@nicnocquee/dataqueue 1.22.0 → 1.25.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -0
- package/dist/index.cjs +2822 -583
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +589 -12
- package/dist/index.d.ts +589 -12
- package/dist/index.js +2818 -584
- package/dist/index.js.map +1 -1
- package/migrations/1751131910825_add_timeout_seconds_to_job_queue.sql +2 -2
- package/migrations/1751186053000_add_job_events_table.sql +12 -8
- package/migrations/1751984773000_add_tags_to_job_queue.sql +1 -1
- package/migrations/1765809419000_add_force_kill_on_timeout_to_job_queue.sql +6 -0
- package/migrations/1771100000000_add_idempotency_key_to_job_queue.sql +7 -0
- package/migrations/1781200000000_add_wait_support.sql +12 -0
- package/migrations/1781200000001_create_waitpoints_table.sql +18 -0
- package/migrations/1781200000002_add_performance_indexes.sql +34 -0
- package/migrations/1781200000003_add_progress_to_job_queue.sql +7 -0
- package/package.json +20 -6
- package/src/backend.ts +163 -0
- package/src/backends/postgres.ts +1111 -0
- package/src/backends/redis-scripts.ts +533 -0
- package/src/backends/redis.test.ts +543 -0
- package/src/backends/redis.ts +834 -0
- package/src/db-util.ts +4 -2
- package/src/handler-validation.test.ts +414 -0
- package/src/handler-validation.ts +168 -0
- package/src/index.test.ts +230 -1
- package/src/index.ts +128 -32
- package/src/processor.test.ts +612 -16
- package/src/processor.ts +759 -47
- package/src/queue.test.ts +736 -3
- package/src/queue.ts +346 -660
- package/src/test-util.ts +32 -0
- package/src/types.ts +451 -16
- package/src/wait.test.ts +698 -0
|
@@ -0,0 +1,834 @@
|
|
|
1
|
+
import { createRequire } from 'module';
|
|
2
|
+
import type { Redis as RedisType } from 'ioredis';
|
|
3
|
+
import {
|
|
4
|
+
JobOptions,
|
|
5
|
+
JobRecord,
|
|
6
|
+
FailureReason,
|
|
7
|
+
JobEvent,
|
|
8
|
+
JobEventType,
|
|
9
|
+
TagQueryMode,
|
|
10
|
+
JobType,
|
|
11
|
+
RedisJobQueueConfig,
|
|
12
|
+
} from '../types.js';
|
|
13
|
+
import { QueueBackend, JobFilters, JobUpdates } from '../backend.js';
|
|
14
|
+
import { log } from '../log-context.js';
|
|
15
|
+
import {
|
|
16
|
+
ADD_JOB_SCRIPT,
|
|
17
|
+
GET_NEXT_BATCH_SCRIPT,
|
|
18
|
+
COMPLETE_JOB_SCRIPT,
|
|
19
|
+
FAIL_JOB_SCRIPT,
|
|
20
|
+
RETRY_JOB_SCRIPT,
|
|
21
|
+
CANCEL_JOB_SCRIPT,
|
|
22
|
+
PROLONG_JOB_SCRIPT,
|
|
23
|
+
RECLAIM_STUCK_JOBS_SCRIPT,
|
|
24
|
+
CLEANUP_OLD_JOBS_SCRIPT,
|
|
25
|
+
} from './redis-scripts.js';
|
|
26
|
+
|
|
27
|
+
/** Helper: convert a Redis hash flat array [k,v,k,v,...] to a JS object */
|
|
28
|
+
function hashToObject(arr: string[]): Record<string, string> {
|
|
29
|
+
const obj: Record<string, string> = {};
|
|
30
|
+
for (let i = 0; i < arr.length; i += 2) {
|
|
31
|
+
obj[arr[i]] = arr[i + 1];
|
|
32
|
+
}
|
|
33
|
+
return obj;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
/** Deserialise a Redis hash object into a JobRecord */
|
|
37
|
+
function deserializeJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
38
|
+
h: Record<string, string>,
|
|
39
|
+
): JobRecord<PayloadMap, T> {
|
|
40
|
+
const nullish = (v: string | undefined) =>
|
|
41
|
+
v === undefined || v === 'null' || v === '' ? null : v;
|
|
42
|
+
|
|
43
|
+
const numOrNull = (v: string | undefined): number | null => {
|
|
44
|
+
const n = nullish(v);
|
|
45
|
+
return n === null ? null : Number(n);
|
|
46
|
+
};
|
|
47
|
+
const dateOrNull = (v: string | undefined): Date | null => {
|
|
48
|
+
const n = numOrNull(v);
|
|
49
|
+
return n === null ? null : new Date(n);
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
let errorHistory: { message: string; timestamp: string }[] = [];
|
|
53
|
+
try {
|
|
54
|
+
const raw = h.errorHistory;
|
|
55
|
+
if (raw && raw !== '[]') {
|
|
56
|
+
errorHistory = JSON.parse(raw);
|
|
57
|
+
}
|
|
58
|
+
} catch {
|
|
59
|
+
/* ignore */
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
let tags: string[] | undefined;
|
|
63
|
+
try {
|
|
64
|
+
const raw = h.tags;
|
|
65
|
+
if (raw && raw !== 'null') {
|
|
66
|
+
tags = JSON.parse(raw);
|
|
67
|
+
}
|
|
68
|
+
} catch {
|
|
69
|
+
/* ignore */
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
let payload: any;
|
|
73
|
+
try {
|
|
74
|
+
payload = JSON.parse(h.payload);
|
|
75
|
+
} catch {
|
|
76
|
+
payload = h.payload;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return {
|
|
80
|
+
id: Number(h.id),
|
|
81
|
+
jobType: h.jobType as T,
|
|
82
|
+
payload,
|
|
83
|
+
status: h.status as any,
|
|
84
|
+
createdAt: new Date(Number(h.createdAt)),
|
|
85
|
+
updatedAt: new Date(Number(h.updatedAt)),
|
|
86
|
+
lockedAt: dateOrNull(h.lockedAt),
|
|
87
|
+
lockedBy: nullish(h.lockedBy) as string | null,
|
|
88
|
+
attempts: Number(h.attempts),
|
|
89
|
+
maxAttempts: Number(h.maxAttempts),
|
|
90
|
+
nextAttemptAt: dateOrNull(h.nextAttemptAt),
|
|
91
|
+
priority: Number(h.priority),
|
|
92
|
+
runAt: new Date(Number(h.runAt)),
|
|
93
|
+
pendingReason: nullish(h.pendingReason) as string | null | undefined,
|
|
94
|
+
errorHistory,
|
|
95
|
+
timeoutMs: numOrNull(h.timeoutMs),
|
|
96
|
+
forceKillOnTimeout:
|
|
97
|
+
h.forceKillOnTimeout === 'true' || h.forceKillOnTimeout === '1'
|
|
98
|
+
? true
|
|
99
|
+
: h.forceKillOnTimeout === 'false' || h.forceKillOnTimeout === '0'
|
|
100
|
+
? false
|
|
101
|
+
: null,
|
|
102
|
+
failureReason: (nullish(h.failureReason) as FailureReason | null) ?? null,
|
|
103
|
+
completedAt: dateOrNull(h.completedAt),
|
|
104
|
+
startedAt: dateOrNull(h.startedAt),
|
|
105
|
+
lastRetriedAt: dateOrNull(h.lastRetriedAt),
|
|
106
|
+
lastFailedAt: dateOrNull(h.lastFailedAt),
|
|
107
|
+
lastCancelledAt: dateOrNull(h.lastCancelledAt),
|
|
108
|
+
tags,
|
|
109
|
+
idempotencyKey: nullish(h.idempotencyKey) as string | null | undefined,
|
|
110
|
+
progress: numOrNull(h.progress),
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export class RedisBackend implements QueueBackend {
|
|
115
|
+
private client: RedisType;
|
|
116
|
+
private prefix: string;
|
|
117
|
+
|
|
118
|
+
constructor(redisConfig: RedisJobQueueConfig['redisConfig']) {
|
|
119
|
+
// Dynamically require ioredis to avoid hard dep
|
|
120
|
+
let IORedis: any;
|
|
121
|
+
try {
|
|
122
|
+
const _require = createRequire(import.meta.url);
|
|
123
|
+
IORedis = _require('ioredis');
|
|
124
|
+
} catch {
|
|
125
|
+
throw new Error(
|
|
126
|
+
'Redis backend requires the "ioredis" package. Install it with: npm install ioredis',
|
|
127
|
+
);
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
this.prefix = redisConfig.keyPrefix ?? 'dq:';
|
|
131
|
+
|
|
132
|
+
if (redisConfig.url) {
|
|
133
|
+
this.client = new IORedis(redisConfig.url, {
|
|
134
|
+
...(redisConfig.tls ? { tls: redisConfig.tls } : {}),
|
|
135
|
+
...(redisConfig.db !== undefined ? { db: redisConfig.db } : {}),
|
|
136
|
+
});
|
|
137
|
+
} else {
|
|
138
|
+
this.client = new IORedis({
|
|
139
|
+
host: redisConfig.host ?? '127.0.0.1',
|
|
140
|
+
port: redisConfig.port ?? 6379,
|
|
141
|
+
password: redisConfig.password,
|
|
142
|
+
db: redisConfig.db ?? 0,
|
|
143
|
+
...(redisConfig.tls ? { tls: redisConfig.tls } : {}),
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
/** Expose the raw ioredis client for advanced usage. */
|
|
149
|
+
getClient(): RedisType {
|
|
150
|
+
return this.client;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
private nowMs(): number {
|
|
154
|
+
return Date.now();
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
// ── Events ──────────────────────────────────────────────────────────
|
|
158
|
+
|
|
159
|
+
async recordJobEvent(
|
|
160
|
+
jobId: number,
|
|
161
|
+
eventType: JobEventType,
|
|
162
|
+
metadata?: any,
|
|
163
|
+
): Promise<void> {
|
|
164
|
+
try {
|
|
165
|
+
const eventId = await this.client.incr(`${this.prefix}event_id_seq`);
|
|
166
|
+
const event = JSON.stringify({
|
|
167
|
+
id: eventId,
|
|
168
|
+
jobId,
|
|
169
|
+
eventType,
|
|
170
|
+
createdAt: this.nowMs(),
|
|
171
|
+
metadata: metadata ?? null,
|
|
172
|
+
});
|
|
173
|
+
await this.client.rpush(`${this.prefix}events:${jobId}`, event);
|
|
174
|
+
} catch (error) {
|
|
175
|
+
log(`Error recording job event for job ${jobId}: ${error}`);
|
|
176
|
+
// Do not throw
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
async getJobEvents(jobId: number): Promise<JobEvent[]> {
|
|
181
|
+
const raw = await this.client.lrange(
|
|
182
|
+
`${this.prefix}events:${jobId}`,
|
|
183
|
+
0,
|
|
184
|
+
-1,
|
|
185
|
+
);
|
|
186
|
+
return raw.map((r: string) => {
|
|
187
|
+
const e = JSON.parse(r);
|
|
188
|
+
return {
|
|
189
|
+
...e,
|
|
190
|
+
createdAt: new Date(e.createdAt),
|
|
191
|
+
};
|
|
192
|
+
});
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// ── Job CRUD ──────────────────────────────────────────────────────────
|
|
196
|
+
|
|
197
|
+
async addJob<PayloadMap, T extends JobType<PayloadMap>>({
|
|
198
|
+
jobType,
|
|
199
|
+
payload,
|
|
200
|
+
maxAttempts = 3,
|
|
201
|
+
priority = 0,
|
|
202
|
+
runAt = null,
|
|
203
|
+
timeoutMs = undefined,
|
|
204
|
+
forceKillOnTimeout = false,
|
|
205
|
+
tags = undefined,
|
|
206
|
+
idempotencyKey = undefined,
|
|
207
|
+
}: JobOptions<PayloadMap, T>): Promise<number> {
|
|
208
|
+
const now = this.nowMs();
|
|
209
|
+
const runAtMs = runAt ? runAt.getTime() : 0;
|
|
210
|
+
|
|
211
|
+
const result = (await this.client.eval(
|
|
212
|
+
ADD_JOB_SCRIPT,
|
|
213
|
+
1,
|
|
214
|
+
this.prefix,
|
|
215
|
+
jobType,
|
|
216
|
+
JSON.stringify(payload),
|
|
217
|
+
maxAttempts,
|
|
218
|
+
priority,
|
|
219
|
+
runAtMs.toString(),
|
|
220
|
+
timeoutMs !== undefined ? timeoutMs.toString() : 'null',
|
|
221
|
+
forceKillOnTimeout ? 'true' : 'false',
|
|
222
|
+
tags ? JSON.stringify(tags) : 'null',
|
|
223
|
+
idempotencyKey ?? 'null',
|
|
224
|
+
now,
|
|
225
|
+
)) as number;
|
|
226
|
+
|
|
227
|
+
const jobId = Number(result);
|
|
228
|
+
log(
|
|
229
|
+
`Added job ${jobId}: payload ${JSON.stringify(payload)}, ${runAt ? `runAt ${runAt.toISOString()}, ` : ''}priority ${priority}, maxAttempts ${maxAttempts}, jobType ${jobType}, tags ${JSON.stringify(tags)}${idempotencyKey ? `, idempotencyKey "${idempotencyKey}"` : ''}`,
|
|
230
|
+
);
|
|
231
|
+
await this.recordJobEvent(jobId, JobEventType.Added, {
|
|
232
|
+
jobType,
|
|
233
|
+
payload,
|
|
234
|
+
tags,
|
|
235
|
+
idempotencyKey,
|
|
236
|
+
});
|
|
237
|
+
return jobId;
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
async getJob<PayloadMap, T extends JobType<PayloadMap>>(
|
|
241
|
+
id: number,
|
|
242
|
+
): Promise<JobRecord<PayloadMap, T> | null> {
|
|
243
|
+
const data = await this.client.hgetall(`${this.prefix}job:${id}`);
|
|
244
|
+
if (!data || Object.keys(data).length === 0) {
|
|
245
|
+
log(`Job ${id} not found`);
|
|
246
|
+
return null;
|
|
247
|
+
}
|
|
248
|
+
log(`Found job ${id}`);
|
|
249
|
+
return deserializeJob<PayloadMap, T>(data);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
async getJobsByStatus<PayloadMap, T extends JobType<PayloadMap>>(
|
|
253
|
+
status: string,
|
|
254
|
+
limit = 100,
|
|
255
|
+
offset = 0,
|
|
256
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
257
|
+
const ids = await this.client.smembers(`${this.prefix}status:${status}`);
|
|
258
|
+
if (ids.length === 0) return [];
|
|
259
|
+
|
|
260
|
+
// Load all, sort by createdAt DESC, then paginate
|
|
261
|
+
const jobs = await this.loadJobsByIds<PayloadMap, T>(ids);
|
|
262
|
+
jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
263
|
+
return jobs.slice(offset, offset + limit);
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
async getAllJobs<PayloadMap, T extends JobType<PayloadMap>>(
|
|
267
|
+
limit = 100,
|
|
268
|
+
offset = 0,
|
|
269
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
270
|
+
// All jobs sorted by createdAt DESC (the 'all' sorted set is scored by createdAt ms)
|
|
271
|
+
const ids = await this.client.zrevrange(
|
|
272
|
+
`${this.prefix}all`,
|
|
273
|
+
offset,
|
|
274
|
+
offset + limit - 1,
|
|
275
|
+
);
|
|
276
|
+
if (ids.length === 0) return [];
|
|
277
|
+
return this.loadJobsByIds<PayloadMap, T>(ids);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
async getJobs<PayloadMap, T extends JobType<PayloadMap>>(
|
|
281
|
+
filters?: JobFilters,
|
|
282
|
+
limit = 100,
|
|
283
|
+
offset = 0,
|
|
284
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
285
|
+
// Start with all job IDs
|
|
286
|
+
let candidateIds: string[];
|
|
287
|
+
|
|
288
|
+
if (filters?.jobType) {
|
|
289
|
+
candidateIds = await this.client.smembers(
|
|
290
|
+
`${this.prefix}type:${filters.jobType}`,
|
|
291
|
+
);
|
|
292
|
+
} else {
|
|
293
|
+
candidateIds = await this.client.zrevrange(`${this.prefix}all`, 0, -1);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (candidateIds.length === 0) return [];
|
|
297
|
+
|
|
298
|
+
// Apply tag filter via set operations
|
|
299
|
+
if (filters?.tags && filters.tags.values.length > 0) {
|
|
300
|
+
candidateIds = await this.filterByTags(
|
|
301
|
+
candidateIds,
|
|
302
|
+
filters.tags.values,
|
|
303
|
+
filters.tags.mode || 'all',
|
|
304
|
+
);
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
// Load and filter remaining criteria in-memory
|
|
308
|
+
let jobs = await this.loadJobsByIds<PayloadMap, T>(candidateIds);
|
|
309
|
+
|
|
310
|
+
if (filters) {
|
|
311
|
+
if (filters.priority !== undefined) {
|
|
312
|
+
jobs = jobs.filter((j) => j.priority === filters.priority);
|
|
313
|
+
}
|
|
314
|
+
if (filters.runAt) {
|
|
315
|
+
jobs = this.filterByRunAt(jobs, filters.runAt);
|
|
316
|
+
}
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Sort by createdAt DESC
|
|
320
|
+
jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
321
|
+
return jobs.slice(offset, offset + limit);
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
async getJobsByTags<PayloadMap, T extends JobType<PayloadMap>>(
|
|
325
|
+
tags: string[],
|
|
326
|
+
mode: TagQueryMode = 'all',
|
|
327
|
+
limit = 100,
|
|
328
|
+
offset = 0,
|
|
329
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
330
|
+
// Start with all IDs
|
|
331
|
+
const allIds = await this.client.zrevrange(`${this.prefix}all`, 0, -1);
|
|
332
|
+
if (allIds.length === 0) return [];
|
|
333
|
+
|
|
334
|
+
const filtered = await this.filterByTags(allIds, tags, mode);
|
|
335
|
+
if (filtered.length === 0) return [];
|
|
336
|
+
|
|
337
|
+
const jobs = await this.loadJobsByIds<PayloadMap, T>(filtered);
|
|
338
|
+
jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
339
|
+
return jobs.slice(offset, offset + limit);
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// ── Processing lifecycle ──────────────────────────────────────────────
|
|
343
|
+
|
|
344
|
+
async getNextBatch<PayloadMap, T extends JobType<PayloadMap>>(
|
|
345
|
+
workerId: string,
|
|
346
|
+
batchSize = 10,
|
|
347
|
+
jobType?: string | string[],
|
|
348
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
349
|
+
const now = this.nowMs();
|
|
350
|
+
const jobTypeFilter =
|
|
351
|
+
jobType === undefined
|
|
352
|
+
? 'null'
|
|
353
|
+
: Array.isArray(jobType)
|
|
354
|
+
? JSON.stringify(jobType)
|
|
355
|
+
: jobType;
|
|
356
|
+
|
|
357
|
+
const result = (await this.client.eval(
|
|
358
|
+
GET_NEXT_BATCH_SCRIPT,
|
|
359
|
+
1,
|
|
360
|
+
this.prefix,
|
|
361
|
+
workerId,
|
|
362
|
+
batchSize,
|
|
363
|
+
now,
|
|
364
|
+
jobTypeFilter,
|
|
365
|
+
)) as string[];
|
|
366
|
+
|
|
367
|
+
if (!result || result.length === 0) {
|
|
368
|
+
log('Found 0 jobs to process');
|
|
369
|
+
return [];
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Parse the flat result into jobs separated by __JOB_SEP__
|
|
373
|
+
const jobs: JobRecord<PayloadMap, T>[] = [];
|
|
374
|
+
let current: string[] = [];
|
|
375
|
+
for (const item of result) {
|
|
376
|
+
if (item === '__JOB_SEP__') {
|
|
377
|
+
if (current.length > 0) {
|
|
378
|
+
const h = hashToObject(current);
|
|
379
|
+
jobs.push(deserializeJob<PayloadMap, T>(h));
|
|
380
|
+
}
|
|
381
|
+
current = [];
|
|
382
|
+
} else {
|
|
383
|
+
current.push(item);
|
|
384
|
+
}
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
log(`Found ${jobs.length} jobs to process`);
|
|
388
|
+
|
|
389
|
+
// Record processing events
|
|
390
|
+
for (const job of jobs) {
|
|
391
|
+
await this.recordJobEvent(job.id, JobEventType.Processing);
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
return jobs;
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
async completeJob(jobId: number): Promise<void> {
|
|
398
|
+
const now = this.nowMs();
|
|
399
|
+
await this.client.eval(COMPLETE_JOB_SCRIPT, 1, this.prefix, jobId, now);
|
|
400
|
+
await this.recordJobEvent(jobId, JobEventType.Completed);
|
|
401
|
+
log(`Completed job ${jobId}`);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
async failJob(
|
|
405
|
+
jobId: number,
|
|
406
|
+
error: Error,
|
|
407
|
+
failureReason?: FailureReason,
|
|
408
|
+
): Promise<void> {
|
|
409
|
+
const now = this.nowMs();
|
|
410
|
+
const errorJson = JSON.stringify([
|
|
411
|
+
{
|
|
412
|
+
message: error.message || String(error),
|
|
413
|
+
timestamp: new Date(now).toISOString(),
|
|
414
|
+
},
|
|
415
|
+
]);
|
|
416
|
+
await this.client.eval(
|
|
417
|
+
FAIL_JOB_SCRIPT,
|
|
418
|
+
1,
|
|
419
|
+
this.prefix,
|
|
420
|
+
jobId,
|
|
421
|
+
errorJson,
|
|
422
|
+
failureReason ?? 'null',
|
|
423
|
+
now,
|
|
424
|
+
);
|
|
425
|
+
await this.recordJobEvent(jobId, JobEventType.Failed, {
|
|
426
|
+
message: error.message || String(error),
|
|
427
|
+
failureReason,
|
|
428
|
+
});
|
|
429
|
+
log(`Failed job ${jobId}`);
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
async prolongJob(jobId: number): Promise<void> {
|
|
433
|
+
try {
|
|
434
|
+
const now = this.nowMs();
|
|
435
|
+
await this.client.eval(PROLONG_JOB_SCRIPT, 1, this.prefix, jobId, now);
|
|
436
|
+
await this.recordJobEvent(jobId, JobEventType.Prolonged);
|
|
437
|
+
log(`Prolonged job ${jobId}`);
|
|
438
|
+
} catch (error) {
|
|
439
|
+
log(`Error prolonging job ${jobId}: ${error}`);
|
|
440
|
+
// Best-effort, do not throw
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
// ── Progress ──────────────────────────────────────────────────────────
|
|
445
|
+
|
|
446
|
+
async updateProgress(jobId: number, progress: number): Promise<void> {
|
|
447
|
+
try {
|
|
448
|
+
const now = this.nowMs();
|
|
449
|
+
await this.client.hset(
|
|
450
|
+
`${this.prefix}job:${jobId}`,
|
|
451
|
+
'progress',
|
|
452
|
+
progress.toString(),
|
|
453
|
+
'updatedAt',
|
|
454
|
+
now.toString(),
|
|
455
|
+
);
|
|
456
|
+
log(`Updated progress for job ${jobId}: ${progress}%`);
|
|
457
|
+
} catch (error) {
|
|
458
|
+
log(`Error updating progress for job ${jobId}: ${error}`);
|
|
459
|
+
// Best-effort: do not throw to avoid killing the running handler
|
|
460
|
+
}
|
|
461
|
+
}
|
|
462
|
+
|
|
463
|
+
// ── Job management ────────────────────────────────────────────────────
|
|
464
|
+
|
|
465
|
+
async retryJob(jobId: number): Promise<void> {
|
|
466
|
+
const now = this.nowMs();
|
|
467
|
+
await this.client.eval(RETRY_JOB_SCRIPT, 1, this.prefix, jobId, now);
|
|
468
|
+
await this.recordJobEvent(jobId, JobEventType.Retried);
|
|
469
|
+
log(`Retried job ${jobId}`);
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
async cancelJob(jobId: number): Promise<void> {
|
|
473
|
+
const now = this.nowMs();
|
|
474
|
+
await this.client.eval(CANCEL_JOB_SCRIPT, 1, this.prefix, jobId, now);
|
|
475
|
+
await this.recordJobEvent(jobId, JobEventType.Cancelled);
|
|
476
|
+
log(`Cancelled job ${jobId}`);
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
async cancelAllUpcomingJobs(filters?: JobFilters): Promise<number> {
|
|
480
|
+
// Get all pending IDs
|
|
481
|
+
let ids = await this.client.smembers(`${this.prefix}status:pending`);
|
|
482
|
+
if (ids.length === 0) return 0;
|
|
483
|
+
|
|
484
|
+
if (filters) {
|
|
485
|
+
ids = await this.applyFilters(ids, filters);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
const now = this.nowMs();
|
|
489
|
+
let count = 0;
|
|
490
|
+
for (const id of ids) {
|
|
491
|
+
const result = await this.client.eval(
|
|
492
|
+
CANCEL_JOB_SCRIPT,
|
|
493
|
+
1,
|
|
494
|
+
this.prefix,
|
|
495
|
+
id,
|
|
496
|
+
now,
|
|
497
|
+
);
|
|
498
|
+
if (Number(result) === 1) count++;
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
log(`Cancelled ${count} jobs`);
|
|
502
|
+
return count;
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
async editJob(jobId: number, updates: JobUpdates): Promise<void> {
|
|
506
|
+
const jk = `${this.prefix}job:${jobId}`;
|
|
507
|
+
const status = await this.client.hget(jk, 'status');
|
|
508
|
+
if (status !== 'pending') {
|
|
509
|
+
log(`Job ${jobId} is not pending (status: ${status}), skipping edit`);
|
|
510
|
+
return;
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
const now = this.nowMs();
|
|
514
|
+
const fields: string[] = [];
|
|
515
|
+
const metadata: any = {};
|
|
516
|
+
|
|
517
|
+
if (updates.payload !== undefined) {
|
|
518
|
+
fields.push('payload', JSON.stringify(updates.payload));
|
|
519
|
+
metadata.payload = updates.payload;
|
|
520
|
+
}
|
|
521
|
+
if (updates.maxAttempts !== undefined) {
|
|
522
|
+
fields.push('maxAttempts', updates.maxAttempts.toString());
|
|
523
|
+
metadata.maxAttempts = updates.maxAttempts;
|
|
524
|
+
}
|
|
525
|
+
if (updates.priority !== undefined) {
|
|
526
|
+
fields.push('priority', updates.priority.toString());
|
|
527
|
+
metadata.priority = updates.priority;
|
|
528
|
+
|
|
529
|
+
// Recompute queue score
|
|
530
|
+
const createdAt = await this.client.hget(jk, 'createdAt');
|
|
531
|
+
const score = updates.priority * 1e15 + (1e15 - Number(createdAt));
|
|
532
|
+
// Update score in queue if present
|
|
533
|
+
const inQueue = await this.client.zscore(
|
|
534
|
+
`${this.prefix}queue`,
|
|
535
|
+
jobId.toString(),
|
|
536
|
+
);
|
|
537
|
+
if (inQueue !== null) {
|
|
538
|
+
await this.client.zadd(`${this.prefix}queue`, score, jobId.toString());
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
if (updates.runAt !== undefined) {
|
|
542
|
+
if (updates.runAt === null) {
|
|
543
|
+
fields.push('runAt', now.toString());
|
|
544
|
+
} else {
|
|
545
|
+
fields.push('runAt', updates.runAt.getTime().toString());
|
|
546
|
+
}
|
|
547
|
+
metadata.runAt = updates.runAt;
|
|
548
|
+
}
|
|
549
|
+
if (updates.timeoutMs !== undefined) {
|
|
550
|
+
fields.push(
|
|
551
|
+
'timeoutMs',
|
|
552
|
+
updates.timeoutMs !== null ? updates.timeoutMs.toString() : 'null',
|
|
553
|
+
);
|
|
554
|
+
metadata.timeoutMs = updates.timeoutMs;
|
|
555
|
+
}
|
|
556
|
+
if (updates.tags !== undefined) {
|
|
557
|
+
// Update tag indexes: remove old, add new
|
|
558
|
+
const oldTagsJson = await this.client.hget(jk, 'tags');
|
|
559
|
+
if (oldTagsJson && oldTagsJson !== 'null') {
|
|
560
|
+
try {
|
|
561
|
+
const oldTags = JSON.parse(oldTagsJson) as string[];
|
|
562
|
+
for (const tag of oldTags) {
|
|
563
|
+
await this.client.srem(
|
|
564
|
+
`${this.prefix}tag:${tag}`,
|
|
565
|
+
jobId.toString(),
|
|
566
|
+
);
|
|
567
|
+
}
|
|
568
|
+
} catch {
|
|
569
|
+
/* ignore */
|
|
570
|
+
}
|
|
571
|
+
}
|
|
572
|
+
await this.client.del(`${this.prefix}job:${jobId}:tags`);
|
|
573
|
+
|
|
574
|
+
if (updates.tags !== null) {
|
|
575
|
+
for (const tag of updates.tags) {
|
|
576
|
+
await this.client.sadd(`${this.prefix}tag:${tag}`, jobId.toString());
|
|
577
|
+
await this.client.sadd(`${this.prefix}job:${jobId}:tags`, tag);
|
|
578
|
+
}
|
|
579
|
+
fields.push('tags', JSON.stringify(updates.tags));
|
|
580
|
+
} else {
|
|
581
|
+
fields.push('tags', 'null');
|
|
582
|
+
}
|
|
583
|
+
metadata.tags = updates.tags;
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
if (fields.length === 0) {
|
|
587
|
+
log(`No fields to update for job ${jobId}`);
|
|
588
|
+
return;
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
fields.push('updatedAt', now.toString());
|
|
592
|
+
await (this.client as any).hmset(jk, ...fields);
|
|
593
|
+
|
|
594
|
+
await this.recordJobEvent(jobId, JobEventType.Edited, metadata);
|
|
595
|
+
log(`Edited job ${jobId}: ${JSON.stringify(metadata)}`);
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
async editAllPendingJobs(
|
|
599
|
+
filters: JobFilters | undefined,
|
|
600
|
+
updates: JobUpdates,
|
|
601
|
+
): Promise<number> {
|
|
602
|
+
let ids = await this.client.smembers(`${this.prefix}status:pending`);
|
|
603
|
+
if (ids.length === 0) return 0;
|
|
604
|
+
|
|
605
|
+
if (filters) {
|
|
606
|
+
ids = await this.applyFilters(ids, filters);
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
let count = 0;
|
|
610
|
+
for (const id of ids) {
|
|
611
|
+
await this.editJob(Number(id), updates);
|
|
612
|
+
count++;
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
log(`Edited ${count} pending jobs`);
|
|
616
|
+
return count;
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
async cleanupOldJobs(daysToKeep = 30): Promise<number> {
|
|
620
|
+
const cutoffMs = this.nowMs() - daysToKeep * 24 * 60 * 60 * 1000;
|
|
621
|
+
const result = (await this.client.eval(
|
|
622
|
+
CLEANUP_OLD_JOBS_SCRIPT,
|
|
623
|
+
1,
|
|
624
|
+
this.prefix,
|
|
625
|
+
cutoffMs,
|
|
626
|
+
)) as number;
|
|
627
|
+
log(`Deleted ${result} old jobs`);
|
|
628
|
+
return Number(result);
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
async cleanupOldJobEvents(daysToKeep = 30): Promise<number> {
|
|
632
|
+
// Redis events are stored per-job; cleaning up old events requires
|
|
633
|
+
// iterating event lists and filtering by date. For now, we skip
|
|
634
|
+
// events belonging to jobs that have been cleaned up (their keys are gone).
|
|
635
|
+
// A full implementation would iterate all events:* keys.
|
|
636
|
+
log(
|
|
637
|
+
`cleanupOldJobEvents is a no-op for Redis backend (events are cleaned up with their jobs)`,
|
|
638
|
+
);
|
|
639
|
+
return 0;
|
|
640
|
+
}
|
|
641
|
+
|
|
642
|
+
async reclaimStuckJobs(maxProcessingTimeMinutes = 10): Promise<number> {
|
|
643
|
+
const maxAgeMs = maxProcessingTimeMinutes * 60 * 1000;
|
|
644
|
+
const now = this.nowMs();
|
|
645
|
+
const result = (await this.client.eval(
|
|
646
|
+
RECLAIM_STUCK_JOBS_SCRIPT,
|
|
647
|
+
1,
|
|
648
|
+
this.prefix,
|
|
649
|
+
maxAgeMs,
|
|
650
|
+
now,
|
|
651
|
+
)) as number;
|
|
652
|
+
log(`Reclaimed ${result} stuck jobs`);
|
|
653
|
+
return Number(result);
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// ── Internal helpers ──────────────────────────────────────────────────
|
|
657
|
+
|
|
658
|
+
async setPendingReasonForUnpickedJobs(
|
|
659
|
+
reason: string,
|
|
660
|
+
jobType?: string | string[],
|
|
661
|
+
): Promise<void> {
|
|
662
|
+
let ids = await this.client.smembers(`${this.prefix}status:pending`);
|
|
663
|
+
if (ids.length === 0) return;
|
|
664
|
+
|
|
665
|
+
if (jobType) {
|
|
666
|
+
const types = Array.isArray(jobType) ? jobType : [jobType];
|
|
667
|
+
const typeSet = new Set<string>();
|
|
668
|
+
for (const t of types) {
|
|
669
|
+
const typeIds = await this.client.smembers(`${this.prefix}type:${t}`);
|
|
670
|
+
for (const id of typeIds) typeSet.add(id);
|
|
671
|
+
}
|
|
672
|
+
ids = ids.filter((id: string) => typeSet.has(id));
|
|
673
|
+
}
|
|
674
|
+
|
|
675
|
+
for (const id of ids) {
|
|
676
|
+
await this.client.hset(
|
|
677
|
+
`${this.prefix}job:${id}`,
|
|
678
|
+
'pendingReason',
|
|
679
|
+
reason,
|
|
680
|
+
);
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
// ── Private helpers ───────────────────────────────────────────────────
|
|
685
|
+
|
|
686
|
+
private async loadJobsByIds<PayloadMap, T extends JobType<PayloadMap>>(
|
|
687
|
+
ids: string[],
|
|
688
|
+
): Promise<JobRecord<PayloadMap, T>[]> {
|
|
689
|
+
const pipeline = this.client.pipeline();
|
|
690
|
+
for (const id of ids) {
|
|
691
|
+
pipeline.hgetall(`${this.prefix}job:${id}`);
|
|
692
|
+
}
|
|
693
|
+
const results = await pipeline.exec();
|
|
694
|
+
const jobs: JobRecord<PayloadMap, T>[] = [];
|
|
695
|
+
if (results) {
|
|
696
|
+
for (const [err, data] of results) {
|
|
697
|
+
if (
|
|
698
|
+
!err &&
|
|
699
|
+
data &&
|
|
700
|
+
typeof data === 'object' &&
|
|
701
|
+
Object.keys(data as object).length > 0
|
|
702
|
+
) {
|
|
703
|
+
jobs.push(
|
|
704
|
+
deserializeJob<PayloadMap, T>(data as Record<string, string>),
|
|
705
|
+
);
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
return jobs;
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
private async filterByTags(
|
|
713
|
+
candidateIds: string[],
|
|
714
|
+
tags: string[],
|
|
715
|
+
mode: TagQueryMode,
|
|
716
|
+
): Promise<string[]> {
|
|
717
|
+
const candidateSet = new Set(candidateIds.map(String));
|
|
718
|
+
|
|
719
|
+
if (mode === 'exact') {
|
|
720
|
+
// Jobs whose tags set is exactly equal to the given tags
|
|
721
|
+
const tagSet = new Set(tags);
|
|
722
|
+
const result: string[] = [];
|
|
723
|
+
for (const id of candidateIds) {
|
|
724
|
+
const jobTags = await this.client.smembers(
|
|
725
|
+
`${this.prefix}job:${id}:tags`,
|
|
726
|
+
);
|
|
727
|
+
if (
|
|
728
|
+
jobTags.length === tagSet.size &&
|
|
729
|
+
jobTags.every((t: string) => tagSet.has(t))
|
|
730
|
+
) {
|
|
731
|
+
result.push(id);
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
return result;
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
if (mode === 'all') {
|
|
738
|
+
// Jobs that have ALL the given tags
|
|
739
|
+
let intersection = new Set(candidateIds.map(String));
|
|
740
|
+
for (const tag of tags) {
|
|
741
|
+
const tagMembers = await this.client.smembers(
|
|
742
|
+
`${this.prefix}tag:${tag}`,
|
|
743
|
+
);
|
|
744
|
+
const tagSet = new Set(tagMembers.map(String));
|
|
745
|
+
intersection = new Set(
|
|
746
|
+
[...intersection].filter((id) => tagSet.has(id)),
|
|
747
|
+
);
|
|
748
|
+
}
|
|
749
|
+
return [...intersection].filter((id) => candidateSet.has(id));
|
|
750
|
+
}
|
|
751
|
+
|
|
752
|
+
if (mode === 'any') {
|
|
753
|
+
// Jobs that have at least ONE of the given tags
|
|
754
|
+
const union = new Set<string>();
|
|
755
|
+
for (const tag of tags) {
|
|
756
|
+
const tagMembers = await this.client.smembers(
|
|
757
|
+
`${this.prefix}tag:${tag}`,
|
|
758
|
+
);
|
|
759
|
+
for (const id of tagMembers) union.add(String(id));
|
|
760
|
+
}
|
|
761
|
+
return [...union].filter((id) => candidateSet.has(id));
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
if (mode === 'none') {
|
|
765
|
+
// Jobs that have NONE of the given tags
|
|
766
|
+
const exclude = new Set<string>();
|
|
767
|
+
for (const tag of tags) {
|
|
768
|
+
const tagMembers = await this.client.smembers(
|
|
769
|
+
`${this.prefix}tag:${tag}`,
|
|
770
|
+
);
|
|
771
|
+
for (const id of tagMembers) exclude.add(String(id));
|
|
772
|
+
}
|
|
773
|
+
return candidateIds.filter((id) => !exclude.has(String(id)));
|
|
774
|
+
}
|
|
775
|
+
|
|
776
|
+
// Default: 'all'
|
|
777
|
+
return this.filterByTags(candidateIds, tags, 'all');
|
|
778
|
+
}
|
|
779
|
+
|
|
780
|
+
private filterByRunAt<PayloadMap, T extends JobType<PayloadMap>>(
|
|
781
|
+
jobs: JobRecord<PayloadMap, T>[],
|
|
782
|
+
runAt: Date | { gt?: Date; gte?: Date; lt?: Date; lte?: Date; eq?: Date },
|
|
783
|
+
): JobRecord<PayloadMap, T>[] {
|
|
784
|
+
if (runAt instanceof Date) {
|
|
785
|
+
return jobs.filter((j) => j.runAt.getTime() === runAt.getTime());
|
|
786
|
+
}
|
|
787
|
+
return jobs.filter((j) => {
|
|
788
|
+
const t = j.runAt.getTime();
|
|
789
|
+
if (runAt.gt && !(t > runAt.gt.getTime())) return false;
|
|
790
|
+
if (runAt.gte && !(t >= runAt.gte.getTime())) return false;
|
|
791
|
+
if (runAt.lt && !(t < runAt.lt.getTime())) return false;
|
|
792
|
+
if (runAt.lte && !(t <= runAt.lte.getTime())) return false;
|
|
793
|
+
if (runAt.eq && t !== runAt.eq.getTime()) return false;
|
|
794
|
+
return true;
|
|
795
|
+
});
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
private async applyFilters(
|
|
799
|
+
ids: string[],
|
|
800
|
+
filters: JobFilters,
|
|
801
|
+
): Promise<string[]> {
|
|
802
|
+
let result = ids;
|
|
803
|
+
|
|
804
|
+
if (filters.jobType) {
|
|
805
|
+
const typeIds = new Set(
|
|
806
|
+
await this.client.smembers(`${this.prefix}type:${filters.jobType}`),
|
|
807
|
+
);
|
|
808
|
+
result = result.filter((id) => typeIds.has(id));
|
|
809
|
+
}
|
|
810
|
+
|
|
811
|
+
if (filters.tags && filters.tags.values.length > 0) {
|
|
812
|
+
result = await this.filterByTags(
|
|
813
|
+
result,
|
|
814
|
+
filters.tags.values,
|
|
815
|
+
filters.tags.mode || 'all',
|
|
816
|
+
);
|
|
817
|
+
}
|
|
818
|
+
|
|
819
|
+
// For priority and runAt, we need to load job data
|
|
820
|
+
if (filters.priority !== undefined || filters.runAt) {
|
|
821
|
+
const jobs = await this.loadJobsByIds(result);
|
|
822
|
+
let filtered = jobs;
|
|
823
|
+
if (filters.priority !== undefined) {
|
|
824
|
+
filtered = filtered.filter((j) => j.priority === filters.priority);
|
|
825
|
+
}
|
|
826
|
+
if (filters.runAt) {
|
|
827
|
+
filtered = this.filterByRunAt(filtered, filters.runAt);
|
|
828
|
+
}
|
|
829
|
+
result = filtered.map((j) => j.id.toString());
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
return result;
|
|
833
|
+
}
|
|
834
|
+
}
|