sidekiq-ts 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/README.md +686 -0
  2. package/dist/api.d.ts +172 -0
  3. package/dist/api.d.ts.map +1 -0
  4. package/dist/api.js +679 -0
  5. package/dist/backtrace.d.ts +3 -0
  6. package/dist/backtrace.d.ts.map +1 -0
  7. package/dist/backtrace.js +16 -0
  8. package/dist/cli-helpers.d.ts +22 -0
  9. package/dist/cli-helpers.d.ts.map +1 -0
  10. package/dist/cli-helpers.js +152 -0
  11. package/dist/cli.d.ts +3 -0
  12. package/dist/cli.d.ts.map +1 -0
  13. package/dist/cli.js +143 -0
  14. package/dist/client.d.ts +25 -0
  15. package/dist/client.d.ts.map +1 -0
  16. package/dist/client.js +212 -0
  17. package/dist/config-loader.d.ts +16 -0
  18. package/dist/config-loader.d.ts.map +1 -0
  19. package/dist/config-loader.js +37 -0
  20. package/dist/config.d.ts +59 -0
  21. package/dist/config.d.ts.map +1 -0
  22. package/dist/config.js +155 -0
  23. package/dist/context.d.ts +10 -0
  24. package/dist/context.d.ts.map +1 -0
  25. package/dist/context.js +29 -0
  26. package/dist/cron.d.ts +44 -0
  27. package/dist/cron.d.ts.map +1 -0
  28. package/dist/cron.js +173 -0
  29. package/dist/index.d.ts +16 -0
  30. package/dist/index.d.ts.map +1 -0
  31. package/dist/index.js +14 -0
  32. package/dist/interrupt-handler.d.ts +8 -0
  33. package/dist/interrupt-handler.d.ts.map +1 -0
  34. package/dist/interrupt-handler.js +24 -0
  35. package/dist/iterable-constants.d.ts +3 -0
  36. package/dist/iterable-constants.d.ts.map +1 -0
  37. package/dist/iterable-constants.js +2 -0
  38. package/dist/iterable-errors.d.ts +10 -0
  39. package/dist/iterable-errors.d.ts.map +1 -0
  40. package/dist/iterable-errors.js +18 -0
  41. package/dist/iterable.d.ts +44 -0
  42. package/dist/iterable.d.ts.map +1 -0
  43. package/dist/iterable.js +298 -0
  44. package/dist/job-logger.d.ts +12 -0
  45. package/dist/job-logger.d.ts.map +1 -0
  46. package/dist/job-logger.js +64 -0
  47. package/dist/job-util.d.ts +8 -0
  48. package/dist/job-util.d.ts.map +1 -0
  49. package/dist/job-util.js +158 -0
  50. package/dist/job.d.ts +73 -0
  51. package/dist/job.d.ts.map +1 -0
  52. package/dist/job.js +200 -0
  53. package/dist/json.d.ts +3 -0
  54. package/dist/json.d.ts.map +1 -0
  55. package/dist/json.js +2 -0
  56. package/dist/leader.d.ts +63 -0
  57. package/dist/leader.d.ts.map +1 -0
  58. package/dist/leader.js +193 -0
  59. package/dist/logger.d.ts +53 -0
  60. package/dist/logger.d.ts.map +1 -0
  61. package/dist/logger.js +143 -0
  62. package/dist/middleware.d.ts +23 -0
  63. package/dist/middleware.d.ts.map +1 -0
  64. package/dist/middleware.js +92 -0
  65. package/dist/periodic.d.ts +80 -0
  66. package/dist/periodic.d.ts.map +1 -0
  67. package/dist/periodic.js +205 -0
  68. package/dist/redis.d.ts +3 -0
  69. package/dist/redis.d.ts.map +1 -0
  70. package/dist/redis.js +1 -0
  71. package/dist/registry.d.ts +11 -0
  72. package/dist/registry.d.ts.map +1 -0
  73. package/dist/registry.js +8 -0
  74. package/dist/runner.d.ts +81 -0
  75. package/dist/runner.d.ts.map +1 -0
  76. package/dist/runner.js +791 -0
  77. package/dist/sidekiq.d.ts +43 -0
  78. package/dist/sidekiq.d.ts.map +1 -0
  79. package/dist/sidekiq.js +189 -0
  80. package/dist/testing.d.ts +32 -0
  81. package/dist/testing.d.ts.map +1 -0
  82. package/dist/testing.js +112 -0
  83. package/dist/types.d.ts +116 -0
  84. package/dist/types.d.ts.map +1 -0
  85. package/dist/types.js +1 -0
  86. package/package.json +42 -0
package/dist/api.js ADDED
@@ -0,0 +1,679 @@
1
+ import { Client } from "./client.js";
2
+ import { dumpJson, loadJson } from "./json.js";
3
+ import { Sidekiq } from "./sidekiq.js";
4
+ const getRedis = async (config) => (config ?? Sidekiq.defaultConfiguration).getRedisClient();
5
+ export class Stats {
6
+ config;
7
+ constructor(config) {
8
+ this.config = config;
9
+ }
10
+ async processed() {
11
+ const redis = await getRedis(this.config);
12
+ return Number(await redis.get("stat:processed")) || 0;
13
+ }
14
+ async failed() {
15
+ const redis = await getRedis(this.config);
16
+ return Number(await redis.get("stat:failed")) || 0;
17
+ }
18
+ async workersSize() {
19
+ const redis = await getRedis(this.config);
20
+ const processes = await redis.sMembers("processes");
21
+ if (processes.length === 0) {
22
+ return 0;
23
+ }
24
+ const pipeline = redis.multi();
25
+ for (const key of processes) {
26
+ pipeline.hGet(key, "busy");
27
+ }
28
+ const result = await pipeline.exec();
29
+ return (result ?? []).reduce((sum, value) => sum + Number(value ?? 0), 0);
30
+ }
31
+ async scheduledSize() {
32
+ const redis = await getRedis(this.config);
33
+ return Number(await redis.sendCommand(["ZCARD", "schedule"])) || 0;
34
+ }
35
+ async retrySize() {
36
+ const redis = await getRedis(this.config);
37
+ return Number(await redis.sendCommand(["ZCARD", "retry"])) || 0;
38
+ }
39
+ async deadSize() {
40
+ const redis = await getRedis(this.config);
41
+ return Number(await redis.sendCommand(["ZCARD", "dead"])) || 0;
42
+ }
43
+ async processesSize() {
44
+ const redis = await getRedis(this.config);
45
+ return Number(await redis.sendCommand(["SCARD", "processes"])) || 0;
46
+ }
47
+ async enqueued() {
48
+ const redis = await getRedis(this.config);
49
+ const queues = await redis.sMembers("queues");
50
+ if (queues.length === 0) {
51
+ return 0;
52
+ }
53
+ const pipeline = redis.multi();
54
+ for (const queue of queues) {
55
+ pipeline.lLen(`queue:${queue}`);
56
+ }
57
+ const result = await pipeline.exec();
58
+ return (result ?? []).reduce((sum, value) => sum + Number(value ?? 0), 0);
59
+ }
60
+ async defaultQueueLatency() {
61
+ const redis = await getRedis(this.config);
62
+ const entry = (await redis.sendCommand([
63
+ "LINDEX",
64
+ "queue:default",
65
+ "-1",
66
+ ]));
67
+ if (!entry) {
68
+ return 0;
69
+ }
70
+ let payload;
71
+ try {
72
+ payload = loadJson(entry);
73
+ }
74
+ catch {
75
+ return 0;
76
+ }
77
+ const enqueuedAt = Number(payload.enqueued_at);
78
+ if (!Number.isFinite(enqueuedAt) || enqueuedAt <= 0) {
79
+ return 0;
80
+ }
81
+ const nowMs = Date.now();
82
+ const diffMs = enqueuedAt > 1_000_000_000_000
83
+ ? nowMs - enqueuedAt
84
+ : nowMs - enqueuedAt * 1000;
85
+ return diffMs / 1000;
86
+ }
87
+ async reset(...stats) {
88
+ const targets = stats.length === 0 ? ["processed", "failed"] : stats.map(String);
89
+ const allowed = targets.filter((stat) => stat === "processed" || stat === "failed");
90
+ if (allowed.length === 0) {
91
+ return;
92
+ }
93
+ const redis = await getRedis(this.config);
94
+ const args = [];
95
+ for (const stat of allowed) {
96
+ args.push(`stat:${stat}`, "0");
97
+ }
98
+ await redis.mSet(args);
99
+ }
100
+ async queues() {
101
+ const redis = await getRedis(this.config);
102
+ const queues = await redis.sMembers("queues");
103
+ if (queues.length === 0) {
104
+ return {};
105
+ }
106
+ const pipeline = redis.multi();
107
+ for (const queue of queues) {
108
+ pipeline.lLen(`queue:${queue}`);
109
+ }
110
+ const result = await pipeline.exec();
111
+ const pairs = queues.map((queue, index) => [
112
+ queue,
113
+ Number(result?.[index] ?? 0),
114
+ ]);
115
+ pairs.sort((a, b) => b[1] - a[1]);
116
+ return Object.fromEntries(pairs);
117
+ }
118
+ }
119
+ export class StatsHistory {
120
+ config;
121
+ daysPrevious;
122
+ startDate;
123
+ constructor(daysPrevious, startDate = new Date(), config) {
124
+ if (daysPrevious < 1 || daysPrevious > 5 * 365) {
125
+ throw new Error("daysPrevious must be between 1 and 1825");
126
+ }
127
+ this.config = config;
128
+ this.daysPrevious = daysPrevious;
129
+ this.startDate = new Date(Date.UTC(startDate.getUTCFullYear(), startDate.getUTCMonth(), startDate.getUTCDate()));
130
+ }
131
+ processed() {
132
+ return this.dateStat("processed");
133
+ }
134
+ failed() {
135
+ return this.dateStat("failed");
136
+ }
137
+ async dateStat(stat) {
138
+ const dates = [];
139
+ for (let i = 0; i < this.daysPrevious; i += 1) {
140
+ const date = new Date(this.startDate);
141
+ date.setUTCDate(this.startDate.getUTCDate() - i);
142
+ dates.push(date.toISOString().slice(0, 10));
143
+ }
144
+ const keys = dates.map((date) => `stat:${stat}:${date}`);
145
+ const redis = await getRedis(this.config);
146
+ const values = await redis.mGet(keys);
147
+ const result = {};
148
+ dates.forEach((date, index) => {
149
+ result[date] = Number(values[index] ?? 0);
150
+ });
151
+ return result;
152
+ }
153
+ }
154
+ export class Queue {
155
+ config;
156
+ name;
157
+ constructor(name = "default", config) {
158
+ this.name = name;
159
+ this.config = config;
160
+ }
161
+ static async all(config) {
162
+ const redis = await getRedis(config);
163
+ const queues = await redis.sMembers("queues");
164
+ return queues.sort().map((queue) => new Queue(queue, config));
165
+ }
166
+ async size() {
167
+ const redis = await getRedis(this.config);
168
+ return Number(await redis.lLen(`queue:${this.name}`));
169
+ }
170
+ async clear() {
171
+ const redis = await getRedis(this.config);
172
+ const pipeline = redis.multi();
173
+ pipeline.unlink(`queue:${this.name}`);
174
+ pipeline.sRem("queues", [this.name]);
175
+ await pipeline.exec();
176
+ }
177
+ async entries(start = 0, stop = -1) {
178
+ const redis = await getRedis(this.config);
179
+ const range = await redis.lRange(`queue:${this.name}`, start, stop);
180
+ return range.map((entry) => loadJson(entry));
181
+ }
182
+ async latency() {
183
+ const redis = await getRedis(this.config);
184
+ const entry = (await redis.sendCommand([
185
+ "LINDEX",
186
+ `queue:${this.name}`,
187
+ "-1",
188
+ ]));
189
+ if (!entry) {
190
+ return 0;
191
+ }
192
+ let payload;
193
+ try {
194
+ payload = loadJson(entry);
195
+ }
196
+ catch {
197
+ return 0;
198
+ }
199
+ const enqueuedAt = Number(payload.enqueued_at);
200
+ if (!Number.isFinite(enqueuedAt) || enqueuedAt <= 0) {
201
+ return 0;
202
+ }
203
+ const nowMs = Date.now();
204
+ const diffMs = enqueuedAt > 1_000_000_000_000
205
+ ? nowMs - enqueuedAt
206
+ : nowMs - enqueuedAt * 1000;
207
+ return diffMs / 1000;
208
+ }
209
+ async *each(pageSize = 50) {
210
+ const redis = await getRedis(this.config);
211
+ const initialSize = await redis.lLen(`queue:${this.name}`);
212
+ let deletedSize = 0;
213
+ let page = 0;
214
+ while (true) {
215
+ const start = Math.max(page * pageSize - deletedSize, 0);
216
+ const end = start + pageSize - 1;
217
+ const entries = await redis.lRange(`queue:${this.name}`, start, end);
218
+ if (entries.length === 0) {
219
+ break;
220
+ }
221
+ page += 1;
222
+ for (const entry of entries) {
223
+ yield new JobRecord(entry, this.name, this.config);
224
+ }
225
+ const size = await redis.lLen(`queue:${this.name}`);
226
+ deletedSize = Math.max(initialSize - size, 0);
227
+ }
228
+ }
229
+ async findJob(jid) {
230
+ for await (const record of this.each()) {
231
+ if (record.jid === jid) {
232
+ return record;
233
+ }
234
+ }
235
+ return null;
236
+ }
237
+ }
238
+ export class JobRecord {
239
+ queue;
240
+ value;
241
+ payload;
242
+ config;
243
+ constructor(value, queue, config) {
244
+ this.value = value;
245
+ this.queue = queue;
246
+ this.config = config;
247
+ this.payload = loadJson(value);
248
+ }
249
+ get jid() {
250
+ return typeof this.payload.jid === "string" ? this.payload.jid : undefined;
251
+ }
252
+ async delete() {
253
+ const redis = await getRedis(this.config);
254
+ const removed = await redis.lRem(`queue:${this.queue}`, 1, this.value);
255
+ return removed > 0;
256
+ }
257
+ }
258
+ class SortedSet {
259
+ config;
260
+ key;
261
+ constructor(key, config) {
262
+ this.key = key;
263
+ this.config = config;
264
+ }
265
+ async size() {
266
+ const redis = await getRedis(this.config);
267
+ return Number(await redis.sendCommand(["ZCARD", this.key]));
268
+ }
269
+ async clear() {
270
+ const redis = await getRedis(this.config);
271
+ await redis.sendCommand(["ZREMRANGEBYRANK", this.key, "0", "-1"]);
272
+ }
273
+ }
274
+ export class SortedEntry {
275
+ score;
276
+ payload;
277
+ value;
278
+ parent;
279
+ constructor(parent, score, value) {
280
+ this.parent = parent;
281
+ this.score = score;
282
+ this.value = value;
283
+ this.payload = loadJson(value);
284
+ }
285
+ get jid() {
286
+ return typeof this.payload.jid === "string" ? this.payload.jid : undefined;
287
+ }
288
+ async delete() {
289
+ await this.parent.deleteByValue(this.value);
290
+ }
291
+ async reschedule(at) {
292
+ await this.parent.rescheduleValue(this.value, at);
293
+ }
294
+ async addToQueue() {
295
+ await this.parent.addToQueueValue(this.value);
296
+ }
297
+ async retry() {
298
+ await this.parent.retryValue(this.value);
299
+ }
300
+ async kill() {
301
+ await this.parent.killValue(this.value);
302
+ }
303
+ }
304
+ class JobSet extends SortedSet {
305
+ async entries(start = 0, stop = -1) {
306
+ const redis = await getRedis(this.config);
307
+ const response = (await redis.sendCommand([
308
+ "ZRANGE",
309
+ this.key,
310
+ String(start),
311
+ String(stop),
312
+ "WITHSCORES",
313
+ ]));
314
+ const entries = [];
315
+ for (let i = 0; i < response.length; i += 2) {
316
+ const value = response[i];
317
+ const score = Number(response[i + 1]);
318
+ entries.push(new SortedEntry(this, score, value));
319
+ }
320
+ return entries;
321
+ }
322
+ async *scan(match, count = 100) {
323
+ const redis = await getRedis(this.config);
324
+ const pattern = match.includes("*") ? match : `*${match}*`;
325
+ let cursor = "0";
326
+ do {
327
+ const response = (await redis.sendCommand([
328
+ "ZSCAN",
329
+ this.key,
330
+ cursor,
331
+ "MATCH",
332
+ pattern,
333
+ "COUNT",
334
+ String(count),
335
+ ]));
336
+ cursor = response[0];
337
+ const items = response[1] ?? [];
338
+ for (let i = 0; i < items.length; i += 2) {
339
+ const value = items[i];
340
+ const score = Number(items[i + 1]);
341
+ yield new SortedEntry(this, score, value);
342
+ }
343
+ } while (cursor !== "0");
344
+ }
345
+ // biome-ignore lint/suspicious/useAwait: yield* delegates to an async generator
346
+ async *each() {
347
+ yield* this.scan("*");
348
+ }
349
+ async findJob(jid) {
350
+ for await (const entry of this.scan(jid)) {
351
+ if (entry.jid === jid) {
352
+ return entry;
353
+ }
354
+ }
355
+ return null;
356
+ }
357
+ async schedule(timestamp, payload) {
358
+ const redis = await getRedis(this.config);
359
+ await redis.zAdd(this.key, [
360
+ { score: timestamp, value: dumpJson(payload) },
361
+ ]);
362
+ }
363
+ async retryAll() {
364
+ const client = new Client({ config: this.config });
365
+ await this.popEach(async (value) => {
366
+ const payload = loadJson(value);
367
+ if (typeof payload.retry_count === "number") {
368
+ payload.retry_count -= 1;
369
+ }
370
+ await client.push(payload);
371
+ });
372
+ }
373
+ async killAll() {
374
+ const dead = new DeadSet(this.config);
375
+ await this.popEach(async (value) => {
376
+ await dead.kill(value, { trim: false });
377
+ });
378
+ await dead.trim();
379
+ }
380
+ async deleteByValue(value) {
381
+ const redis = await getRedis(this.config);
382
+ await redis.zRem(this.key, value);
383
+ }
384
+ async rescheduleValue(value, at) {
385
+ const redis = await getRedis(this.config);
386
+ await redis.zAdd(this.key, [{ score: at, value }]);
387
+ }
388
+ async addToQueueValue(value) {
389
+ const redis = await getRedis(this.config);
390
+ const removed = await redis.zRem(this.key, value);
391
+ if (removed === 0) {
392
+ return;
393
+ }
394
+ const payload = loadJson(value);
395
+ const client = new Client({ config: this.config });
396
+ await client.push(payload);
397
+ }
398
+ async retryValue(value) {
399
+ const redis = await getRedis(this.config);
400
+ const removed = await redis.zRem(this.key, value);
401
+ if (removed === 0) {
402
+ return;
403
+ }
404
+ const payload = loadJson(value);
405
+ if (typeof payload.retry_count === "number") {
406
+ payload.retry_count -= 1;
407
+ }
408
+ const client = new Client({ config: this.config });
409
+ await client.push(payload);
410
+ }
411
+ async killValue(value) {
412
+ const redis = await getRedis(this.config);
413
+ const removed = await redis.zRem(this.key, value);
414
+ if (removed === 0) {
415
+ return;
416
+ }
417
+ const dead = new DeadSet(this.config);
418
+ await dead.kill(value);
419
+ }
420
+ async popEach(fn) {
421
+ const redis = await getRedis(this.config);
422
+ while (true) {
423
+ const response = (await redis.sendCommand([
424
+ "ZPOPMIN",
425
+ this.key,
426
+ "1",
427
+ ]));
428
+ if (!response || response.length === 0) {
429
+ break;
430
+ }
431
+ const value = response[0];
432
+ await fn(value);
433
+ }
434
+ }
435
+ }
436
+ export class ScheduledSet extends JobSet {
437
+ constructor(config) {
438
+ super("schedule", config);
439
+ }
440
+ }
441
+ export class RetrySet extends JobSet {
442
+ constructor(config) {
443
+ super("retry", config);
444
+ }
445
+ }
446
+ export class DeadSet extends JobSet {
447
+ constructor(config) {
448
+ super("dead", config);
449
+ }
450
+ async kill(payload, options = {}) {
451
+ const redis = await getRedis(this.config);
452
+ const nowSeconds = Date.now() / 1000;
453
+ const value = typeof payload === "string" ? payload : dumpJson(payload);
454
+ await redis.zAdd(this.key, [{ score: nowSeconds, value }]);
455
+ if (options.trim !== false) {
456
+ await this.trim();
457
+ }
458
+ }
459
+ async trim() {
460
+ const redis = await getRedis(this.config);
461
+ const maxJobs = this.config?.deadMaxJobs ?? Sidekiq.defaultConfiguration.deadMaxJobs;
462
+ const timeout = this.config?.deadTimeoutInSeconds ??
463
+ Sidekiq.defaultConfiguration.deadTimeoutInSeconds;
464
+ const cutoff = Date.now() / 1000 - timeout;
465
+ const pipeline = redis.multi();
466
+ pipeline.zRemRangeByScore(this.key, 0, cutoff);
467
+ pipeline.zRemRangeByRank(this.key, 0, -maxJobs);
468
+ await pipeline.exec();
469
+ }
470
+ }
471
+ export class ProcessSet {
472
+ config;
473
+ constructor(config) {
474
+ this.config = config;
475
+ }
476
+ static async get(identity, config) {
477
+ const redis = await getRedis(config);
478
+ const pipeline = redis.multi();
479
+ pipeline.sIsMember("processes", identity);
480
+ pipeline.hGetAll(identity);
481
+ const result = await pipeline.exec();
482
+ const exists = Number(result?.[0] ?? 0);
483
+ const raw = (result?.[1] ?? {});
484
+ if (exists === 0 || Object.keys(raw).length === 0) {
485
+ return null;
486
+ }
487
+ const info = raw.info
488
+ ? loadJson(raw.info)
489
+ : {};
490
+ return {
491
+ identity,
492
+ info,
493
+ busy: Number(raw.busy ?? 0),
494
+ beat: Number(raw.beat ?? 0),
495
+ quiet: raw.quiet === "true",
496
+ rtt_us: Number(raw.rtt_us ?? 0),
497
+ rss: Number(raw.rss ?? 0),
498
+ };
499
+ }
500
+ async entries() {
501
+ const redis = await getRedis(this.config);
502
+ const processes = await redis.sMembers("processes");
503
+ if (processes.length === 0) {
504
+ return [];
505
+ }
506
+ const pipeline = redis.multi();
507
+ for (const key of processes) {
508
+ pipeline.hGetAll(key);
509
+ }
510
+ const result = await pipeline.exec();
511
+ return processes.map((identity, index) => {
512
+ const raw = (result?.[index] ?? {});
513
+ const info = raw.info
514
+ ? loadJson(raw.info)
515
+ : {};
516
+ return {
517
+ identity,
518
+ info,
519
+ busy: Number(raw.busy ?? 0),
520
+ beat: Number(raw.beat ?? 0),
521
+ quiet: raw.quiet === "true",
522
+ rtt_us: Number(raw.rtt_us ?? 0),
523
+ rss: Number(raw.rss ?? 0),
524
+ };
525
+ });
526
+ }
527
+ async size() {
528
+ const redis = await getRedis(this.config);
529
+ return Number(await redis.sCard("processes"));
530
+ }
531
+ async totalConcurrency() {
532
+ const entries = await this.entries();
533
+ return entries.reduce((sum, entry) => sum + Number(entry.info.concurrency ?? 0), 0);
534
+ }
535
+ async totalRss() {
536
+ const entries = await this.entries();
537
+ return entries.reduce((sum, entry) => sum + Number(entry.rss ?? 0), 0);
538
+ }
539
+ async cleanup() {
540
+ const redis = await getRedis(this.config);
541
+ const lock = await redis.set("process_cleanup", "1", { NX: true, EX: 60 });
542
+ if (lock !== "OK") {
543
+ return 0;
544
+ }
545
+ const processes = await redis.sMembers("processes");
546
+ if (processes.length === 0) {
547
+ return 0;
548
+ }
549
+ const pipeline = redis.multi();
550
+ for (const key of processes) {
551
+ pipeline.hGet(key, "info");
552
+ }
553
+ const result = await pipeline.exec();
554
+ const toPrune = processes.filter((_, index) => !result?.[index]);
555
+ if (toPrune.length === 0) {
556
+ return 0;
557
+ }
558
+ await redis.sRem("processes", toPrune);
559
+ return toPrune.length;
560
+ }
561
+ }
562
+ /**
563
+ * Represents a single Sidekiq process and allows remote signaling.
564
+ */
565
+ export class Process {
566
+ config;
567
+ identity;
568
+ info;
569
+ busy;
570
+ beat;
571
+ quiet;
572
+ rtt_us;
573
+ rss;
574
+ constructor(entry, config) {
575
+ this.config = config;
576
+ this.identity = entry.identity;
577
+ this.info = entry.info;
578
+ this.busy = entry.busy;
579
+ this.beat = entry.beat;
580
+ this.quiet = entry.quiet;
581
+ this.rtt_us = entry.rtt_us;
582
+ this.rss = entry.rss;
583
+ }
584
+ get tag() {
585
+ return this.info.tag;
586
+ }
587
+ get labels() {
588
+ return this.info.labels ?? [];
589
+ }
590
+ get queues() {
591
+ return this.info.queues ?? [];
592
+ }
593
+ get version() {
594
+ return this.info.version;
595
+ }
596
+ get embedded() {
597
+ return this.info.embedded === true;
598
+ }
599
+ /**
600
+ * Whether this process is quiet or shutting down.
601
+ */
602
+ get stopping() {
603
+ return this.quiet;
604
+ }
605
+ /**
606
+ * Signal this process to stop processing new jobs.
607
+ * It will continue to execute jobs it has already fetched.
608
+ * This method is asynchronous and it can take 5-10 seconds
609
+ * for the process to quiet.
610
+ */
611
+ async quietProcess() {
612
+ if (this.embedded) {
613
+ throw new Error("Can't quiet an embedded process");
614
+ }
615
+ await this.signal("TSTP");
616
+ }
617
+ /**
618
+ * Signal this process to shutdown.
619
+ * It will shutdown within its configured timeout value, default 25 seconds.
620
+ * This method is asynchronous and it can take 5-10 seconds
621
+ * for the process to start shutting down.
622
+ */
623
+ async stopProcess() {
624
+ if (this.embedded) {
625
+ throw new Error("Can't stop an embedded process");
626
+ }
627
+ await this.signal("TERM");
628
+ }
629
+ /**
630
+ * Signal this process to log its current worker state.
631
+ * Useful for debugging frozen or deadlocked processes.
632
+ * This method is asynchronous and it can take 5-10 seconds.
633
+ */
634
+ async dumpThreads() {
635
+ await this.signal("TTIN");
636
+ }
637
+ async signal(sig) {
638
+ const redis = await getRedis(this.config);
639
+ const key = `${this.identity}-signals`;
640
+ const pipeline = redis.multi();
641
+ pipeline.lPush(key, sig);
642
+ pipeline.expire(key, 60);
643
+ await pipeline.exec();
644
+ }
645
+ }
646
+ export class Workers {
647
+ config;
648
+ constructor(config) {
649
+ this.config = config;
650
+ }
651
+ async entries() {
652
+ const redis = await getRedis(this.config);
653
+ const processes = await redis.sMembers("processes");
654
+ if (processes.length === 0) {
655
+ return [];
656
+ }
657
+ const pipeline = redis.multi();
658
+ for (const key of processes) {
659
+ pipeline.hGetAll(`${key}:work`);
660
+ }
661
+ const result = await pipeline.exec();
662
+ const entries = [];
663
+ for (const [index, identity] of processes.entries()) {
664
+ const work = (result?.[index] ?? {});
665
+ for (const [thread, value] of Object.entries(work)) {
666
+ const parsed = loadJson(value);
667
+ const payload = loadJson(parsed.payload);
668
+ entries.push({
669
+ process: identity,
670
+ thread,
671
+ queue: parsed.queue,
672
+ payload,
673
+ run_at: parsed.run_at,
674
+ });
675
+ }
676
+ }
677
+ return entries;
678
+ }
679
+ }
@@ -0,0 +1,3 @@
1
+ export declare const extractBacktrace: (error: Error) => string[];
2
+ export declare const compressBacktrace: (lines: string[]) => string;
3
+ //# sourceMappingURL=backtrace.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"backtrace.d.ts","sourceRoot":"","sources":["../src/backtrace.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,gBAAgB,GAAI,OAAO,KAAK,KAAG,MAAM,EASrD,CAAC;AAEF,eAAO,MAAM,iBAAiB,GAAI,OAAO,MAAM,EAAE,KAAG,MAInD,CAAC"}
@@ -0,0 +1,16 @@
1
+ import { deflateSync } from "node:zlib";
2
+ export const extractBacktrace = (error) => {
3
+ if (!error.stack) {
4
+ return [];
5
+ }
6
+ const lines = error.stack.split("\n").map((line) => line.trim());
7
+ if (lines.length > 0 && lines[0].startsWith(error.name)) {
8
+ return lines.slice(1);
9
+ }
10
+ return lines;
11
+ };
12
+ export const compressBacktrace = (lines) => {
13
+ const serialized = JSON.stringify(lines);
14
+ const compressed = deflateSync(serialized);
15
+ return compressed.toString("base64");
16
+ };