queasy 0.3.0 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/.github/workflows/check.yml +3 -0
  2. package/.github/workflows/publish.yml +3 -0
  3. package/CLAUDE.md +5 -4
  4. package/biome.json +5 -1
  5. package/dist/client.d.ts +33 -0
  6. package/dist/client.d.ts.map +1 -0
  7. package/dist/client.js +199 -0
  8. package/dist/client.js.map +1 -0
  9. package/dist/constants.d.ts +10 -0
  10. package/dist/constants.d.ts.map +1 -0
  11. package/{src → dist}/constants.js +1 -9
  12. package/dist/constants.js.map +1 -0
  13. package/dist/errors.d.ts +7 -0
  14. package/dist/errors.d.ts.map +1 -0
  15. package/{src → dist}/errors.js +1 -13
  16. package/dist/errors.js.map +1 -0
  17. package/dist/index.d.ts +3 -0
  18. package/dist/index.d.ts.map +1 -0
  19. package/dist/index.js +3 -0
  20. package/dist/index.js.map +1 -0
  21. package/dist/manager.d.ts +19 -0
  22. package/dist/manager.d.ts.map +1 -0
  23. package/dist/manager.js +67 -0
  24. package/dist/manager.js.map +1 -0
  25. package/dist/pool.d.ts +29 -0
  26. package/dist/pool.d.ts.map +1 -0
  27. package/{src → dist}/pool.js +23 -82
  28. package/dist/pool.js.map +1 -0
  29. package/dist/queasy.lua +390 -0
  30. package/dist/queue.d.ts +22 -0
  31. package/dist/queue.d.ts.map +1 -0
  32. package/dist/queue.js +81 -0
  33. package/dist/queue.js.map +1 -0
  34. package/dist/types.d.ts +92 -0
  35. package/dist/types.d.ts.map +1 -0
  36. package/dist/types.js +2 -0
  37. package/dist/types.js.map +1 -0
  38. package/dist/utils.d.ts +4 -0
  39. package/dist/utils.d.ts.map +1 -0
  40. package/dist/utils.js +24 -0
  41. package/dist/utils.js.map +1 -0
  42. package/dist/worker.d.ts +2 -0
  43. package/dist/worker.d.ts.map +1 -0
  44. package/dist/worker.js +42 -0
  45. package/dist/worker.js.map +1 -0
  46. package/fuzztest/{fuzz.js → fuzz.ts} +53 -51
  47. package/fuzztest/handlers/{cascade-a.js → cascade-a.ts} +11 -15
  48. package/fuzztest/handlers/{cascade-b.js → cascade-b.ts} +8 -9
  49. package/fuzztest/handlers/{fail-handler.js → fail-handler.ts} +7 -12
  50. package/fuzztest/handlers/{periodic.js → periodic.ts} +11 -15
  51. package/fuzztest/{process.js → process.ts} +15 -15
  52. package/fuzztest/shared/{chaos.js → chaos.ts} +5 -4
  53. package/fuzztest/shared/{stream.js → stream.ts} +7 -7
  54. package/package.json +7 -5
  55. package/src/{client.js → client.ts} +86 -128
  56. package/src/constants.ts +33 -0
  57. package/src/errors.ts +13 -0
  58. package/src/index.ts +2 -0
  59. package/src/manager.ts +78 -0
  60. package/src/pool.ts +129 -0
  61. package/src/queue.ts +108 -0
  62. package/src/types.ts +1 -0
  63. package/src/{utils.js → utils.ts} +3 -20
  64. package/src/{worker.js → worker.ts} +5 -12
  65. package/test/{client.test.js → client.test.ts} +6 -7
  66. package/test/{errors.test.js → errors.test.ts} +1 -1
  67. package/test/fixtures/always-fail-handler.ts +5 -0
  68. package/test/fixtures/data-logger-handler.ts +11 -0
  69. package/test/fixtures/failure-handler.ts +6 -0
  70. package/test/fixtures/permanent-error-handler.ts +6 -0
  71. package/test/fixtures/slow-handler.ts +6 -0
  72. package/test/fixtures/success-handler.js +0 -5
  73. package/test/fixtures/success-handler.ts +6 -0
  74. package/test/fixtures/with-failure-handler.ts +5 -0
  75. package/test/{guards.test.js → guards.test.ts} +9 -12
  76. package/test/{manager.test.js → manager.test.ts} +23 -33
  77. package/test/{pool.test.js → pool.test.ts} +10 -14
  78. package/test/{queue.test.js → queue.test.ts} +16 -17
  79. package/test/{redis-functions.test.js → redis-functions.test.ts} +14 -20
  80. package/test/{utils.test.js → utils.test.ts} +1 -1
  81. package/tsconfig.json +20 -0
  82. package/jsconfig.json +0 -17
  83. package/src/index.js +0 -2
  84. package/src/manager.js +0 -94
  85. package/src/queue.js +0 -154
  86. package/test/fixtures/always-fail-handler.js +0 -8
  87. package/test/fixtures/data-logger-handler.js +0 -19
  88. package/test/fixtures/failure-handler.js +0 -9
  89. package/test/fixtures/permanent-error-handler.js +0 -10
  90. package/test/fixtures/slow-handler.js +0 -9
  91. package/test/fixtures/with-failure-handler.js +0 -8
  92. /package/test/fixtures/{no-handle-handler.js → no-handle-handler.ts} +0 -0
@@ -0,0 +1,92 @@
1
+ import type { RedisClientOptions, RedisClusterOptions } from 'redis';
2
+ type SingleNodeOptions = Pick<RedisClientOptions, 'url' | 'socket' | 'username' | 'password' | 'database'>;
3
+ export type RedisOptions = SingleNodeOptions | {
4
+ rootNodes: SingleNodeOptions[];
5
+ defaults?: Partial<SingleNodeOptions>;
6
+ nodeAddressMap?: RedisClusterOptions['nodeAddressMap'];
7
+ };
8
+ /**
9
+ * Core job identification and data
10
+ */
11
+ export interface JobCoreOptions {
12
+ /** Job ID (auto-generated if not provided) */
13
+ id?: string;
14
+ /** Job data (any JSON-serializable value) */
15
+ data?: any;
16
+ /** Wall clock timestamp (ms) before which job must not run */
17
+ runAt?: number;
18
+ }
19
+ /**
20
+ * Update behavior flags
21
+ */
22
+ export interface JobUpdateOptions {
23
+ /** Whether to replace data of waiting job with same ID */
24
+ updateData?: boolean;
25
+ /** How to update runAt */
26
+ updateRunAt?: boolean | 'if_later' | 'if_earlier';
27
+ /** Whether to reset retry_count and stall_count to 0 */
28
+ resetCounts?: boolean;
29
+ }
30
+ /**
31
+ * Complete options accepted by dispatch()
32
+ */
33
+ export type JobOptions = JobCoreOptions & JobUpdateOptions;
34
+ /**
35
+ * Job runtime state
36
+ */
37
+ export interface JobState {
38
+ /** Number of times this job has been retried */
39
+ retryCount: number;
40
+ /** Number of times this job has stalled */
41
+ stallCount: number;
42
+ }
43
+ /**
44
+ * Complete job representation passed to handlers
45
+ */
46
+ export type Job = Required<JobCoreOptions> & JobState;
47
+ /**
48
+ * Handler options
49
+ */
50
+ export interface HandlerOptions {
51
+ /** Maximum number of retries before permanent failure */
52
+ maxRetries?: number;
53
+ /** Maximum number of stalls before permanent failure */
54
+ maxStalls?: number;
55
+ /** Minimum backoff in milliseconds */
56
+ minBackoff?: number;
57
+ /** Maximum backoff in milliseconds */
58
+ maxBackoff?: number;
59
+ /** Size of the job (as a percent of total worker capacity) */
60
+ size?: number;
61
+ /** Maximum processing duration before considering stalled */
62
+ timeout?: number;
63
+ /** Priority of this queue (vs other queues) */
64
+ priority?: number;
65
+ }
66
+ /**
67
+ * Options for listen() - queue-level retry strategy
68
+ */
69
+ export interface ListenOptions extends HandlerOptions {
70
+ /** Path to failure handler module (optional) */
71
+ failHandler?: string;
72
+ /** Retry options of the fail job */
73
+ failRetryOptions?: HandlerOptions;
74
+ }
75
+ export type ExecMessage = {
76
+ op: 'exec';
77
+ queue: string;
78
+ handlerPath: string;
79
+ job: Job;
80
+ };
81
+ export type DoneMessage = {
82
+ op: 'done';
83
+ jobId: string;
84
+ error?: {
85
+ name: string;
86
+ message: string;
87
+ retryAt?: number;
88
+ kind?: 'retriable' | 'permanent' | 'stall';
89
+ };
90
+ };
91
+ export {};
92
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,kBAAkB,EAAE,mBAAmB,EAAE,MAAM,OAAO,CAAC;AAErE,KAAK,iBAAiB,GAAG,IAAI,CACzB,kBAAkB,EAClB,KAAK,GAAG,QAAQ,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAC1D,CAAC;AAEF,MAAM,MAAM,YAAY,GAClB,iBAAiB,GACjB;IACI,SAAS,EAAE,iBAAiB,EAAE,CAAC;IAC/B,QAAQ,CAAC,EAAE,OAAO,CAAC,iBAAiB,CAAC,CAAC;IACtC,cAAc,CAAC,EAAE,mBAAmB,CAAC,gBAAgB,CAAC,CAAC;CAC1D,CAAC;AAER;;GAEG;AACH,MAAM,WAAW,cAAc;IAC3B,8CAA8C;IAC9C,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,6CAA6C;IAE7C,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,8DAA8D;IAC9D,KAAK,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC7B,0DAA0D;IAC1D,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,0BAA0B;IAC1B,WAAW,CAAC,EAAE,OAAO,GAAG,UAAU,GAAG,YAAY,CAAC;IAClD,wDAAwD;IACxD,WAAW,CAAC,EAAE,OAAO,CAAC;CACzB;AAED;;GAEG;AACH,MAAM,MAAM,UAAU,GAAG,cAAc,GAAG,gBAAgB,CAAC;AAE3D;;GAEG;AACH,MAAM,WAAW,QAAQ;IACrB,gDAAgD;IAChD,UAAU,EAAE,MAAM,CAAC;IACnB,2CAA2C;IAC3C,UAAU,EAAE,MAAM,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,MAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,GAAG,QAAQ,CAAC;AAEtD;;GAEG;AACH,MAAM,WAAW,cAAc;IAC3B,yDAAyD;IACzD,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,wDAAwD;IACxD,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,sCAAsC;IACtC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,sCAAsC;IACtC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,8DAA8D;IAC9D,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,6DAA6D;IAC7D,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,+CAA+C;IAC/C,QAAQ,CAAC,EAAE,MAAM,CAAC;CACrB;AAED;;GAEG;AACH,MAAM,WAAW,aAAc,SAAQ,cAAc;IACjD,gDAAgD;IAChD,WAAW,CAAC,EAAE,MAAM,CAAC;IAErB,oCAAoC;IACpC,gBAAgB,CAAC,EAAE,cAAc,CAAC;CACrC;AAED,MAAM,MAAM,WAAW,GAAG;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,WAAW,EAAE,MAAM,CAAC;IACpB,GAAG,EAAE,GAAG,CAAC;CACZ,CAAC;AAEF,MAAM,MAAM,WAAW,GAAG;IACtB,EAAE,EAAE,MAAM,CAAC;IACX,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE;QACJ,IAAI,EAAE,MAAM,CAAC;QACb,OAAO,EAAE,MAAM,CAAC;QAChB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,IAAI,CAAC,EAAE,WAAW,GAAG,WAAW,GAAG,OAAO,CAAC;KAC9C,CAAC;CACL,CAAC"}
package/dist/types.js ADDED
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=types.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
@@ -0,0 +1,4 @@
1
+ export declare function generateId(length?: number): string;
2
+ export declare function parseVersion(version: string | null | undefined): number[];
3
+ export declare function compareSemver(a: number[], b: number[]): -1 | 0 | 1;
4
+ //# sourceMappingURL=utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA,wBAAgB,UAAU,CAAC,MAAM,SAAK,GAAG,MAAM,CAO9C;AAED,wBAAgB,YAAY,CAAC,OAAO,EAAE,MAAM,GAAG,IAAI,GAAG,SAAS,GAAG,MAAM,EAAE,CAIzE;AAED,wBAAgB,aAAa,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAMlE"}
package/dist/utils.js ADDED
@@ -0,0 +1,24 @@
1
+ export function generateId(length = 20) {
2
+ const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
3
+ let id = '';
4
+ for (let i = 0; i < length; i++) {
5
+ id += chars.charAt(Math.floor(Math.random() * chars.length));
6
+ }
7
+ return id;
8
+ }
9
+ export function parseVersion(version) {
10
+ const parsed = String(version).split('.').map(Number);
11
+ if (parsed.some((n) => Number.isNaN(n)))
12
+ return [0];
13
+ return parsed;
14
+ }
15
+ export function compareSemver(a, b) {
16
+ for (let i = 0; i < Math.min(a.length, b.length); i++) {
17
+ if (a[i] !== b[i])
18
+ return a[i] < b[i] ? -1 : 1;
19
+ }
20
+ if (a.length !== b.length)
21
+ return a.length < b.length ? -1 : 1;
22
+ return 0;
23
+ }
24
+ //# sourceMappingURL=utils.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.js","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA,MAAM,UAAU,UAAU,CAAC,MAAM,GAAG,EAAE;IAClC,MAAM,KAAK,GAAG,gEAAgE,CAAC;IAC/E,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9B,EAAE,IAAI,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC;IACjE,CAAC;IACD,OAAO,EAAE,CAAC;AACd,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAkC;IAC3D,MAAM,MAAM,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IACtD,IAAI,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;QAAE,OAAO,CAAC,CAAC,CAAC,CAAC;IACpD,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,CAAW,EAAE,CAAW;IAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QACpD,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAAE,OAAO,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACnD,CAAC;IACD,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;QAAE,OAAO,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC/D,OAAO,CAAC,CAAC;AACb,CAAC"}
@@ -0,0 +1,2 @@
1
+ export {};
2
+ //# sourceMappingURL=worker.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../src/worker.ts"],"names":[],"mappings":""}
package/dist/worker.js ADDED
@@ -0,0 +1,42 @@
1
+ var __rewriteRelativeImportExtension = (this && this.__rewriteRelativeImportExtension) || function (path, preserveJsx) {
2
+ if (typeof path === "string" && /^\.\.?\//.test(path)) {
3
+ return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) {
4
+ return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js");
5
+ });
6
+ }
7
+ return path;
8
+ };
9
+ import { pathToFileURL } from 'node:url';
10
+ import { parentPort, setEnvironmentData } from 'node:worker_threads';
11
+ import { PermanentError } from "./errors.js";
12
+ if (!parentPort)
13
+ throw new Error('Worker cannot be executed directly.');
14
+ setEnvironmentData('queasy_worker_context', true);
15
+ parentPort.on('message', async (msg) => {
16
+ const { handlerPath, job } = msg;
17
+ try {
18
+ const mod = await import(__rewriteRelativeImportExtension(pathToFileURL(handlerPath).href));
19
+ if (typeof mod.handle !== 'function') {
20
+ throw new Error(`Unable to load handler ${handlerPath}`);
21
+ }
22
+ await mod.handle(job.data, job);
23
+ send({ op: 'done', jobId: job.id });
24
+ }
25
+ catch (err) {
26
+ const { message, name, retryAt } = err;
27
+ send({
28
+ op: 'done',
29
+ jobId: job.id,
30
+ error: {
31
+ name,
32
+ message,
33
+ retryAt,
34
+ kind: err instanceof PermanentError ? 'permanent' : 'retriable',
35
+ },
36
+ });
37
+ }
38
+ });
39
+ function send(message) {
40
+ parentPort?.postMessage(message);
41
+ }
42
+ //# sourceMappingURL=worker.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"worker.js","sourceRoot":"","sources":["../src/worker.ts"],"names":[],"mappings":";;;;;;;;AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AACzC,OAAO,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AACrE,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAC;AAG7C,IAAI,CAAC,UAAU;IAAE,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACxE,kBAAkB,CAAC,uBAAuB,EAAE,IAAI,CAAC,CAAC;AAElD,UAAU,CAAC,EAAE,CAAC,SAAS,EAAE,KAAK,EAAE,GAAgB,EAAE,EAAE;IAChD,MAAM,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,GAAG,CAAC;IACjC,IAAI,CAAC;QACD,MAAM,GAAG,GAAG,MAAM,MAAM,kCAAC,aAAa,CAAC,WAAW,CAAC,CAAC,IAAI,EAAC,CAAC;QAC1D,IAAI,OAAO,GAAG,CAAC,MAAM,KAAK,UAAU,EAAE,CAAC;YACnC,MAAM,IAAI,KAAK,CAAC,0BAA0B,WAAW,EAAE,CAAC,CAAC;QAC7D,CAAC;QAED,MAAM,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;QAChC,IAAI,CAAC,EAAE,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,CAAC,EAAE,EAAE,CAAC,CAAC;IACxC,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACX,MAAM,EAAE,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,GAAmC,CAAC;QAEvE,IAAI,CAAC;YACD,EAAE,EAAE,MAAM;YACV,KAAK,EAAE,GAAG,CAAC,EAAE;YACb,KAAK,EAAE;gBACH,IAAI;gBACJ,OAAO;gBACP,OAAO;gBACP,IAAI,EAAE,GAAG,YAAY,cAAc,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,WAAW;aAClE;SACJ,CAAC,CAAC;IACP,CAAC;AACL,CAAC,CAAC,CAAC;AAEH,SAAS,IAAI,CAAC,OAAoB;IAC9B,UAAU,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;AACrC,CAAC"}
@@ -1,7 +1,7 @@
1
1
  /**
2
2
  * Fuzz test orchestrator.
3
3
  *
4
- * - Spawns NUM_PROCESSES child processes, each running fuzztest/process.js
4
+ * - Spawns NUM_PROCESSES child processes, each running fuzztest/process.ts
5
5
  * - Dispatches seed periodic jobs at startup
6
6
  * - Reads events from the fuzz:events Redis stream
7
7
  * - Checks system invariants after each event
@@ -10,12 +10,14 @@
10
10
  */
11
11
 
12
12
  import { fork } from 'node:child_process';
13
+ import type { ChildProcess } from 'node:child_process';
13
14
  import { createWriteStream } from 'node:fs';
14
15
  import { dirname, join } from 'node:path';
15
16
  import { fileURLToPath } from 'node:url';
16
17
  import { createClient } from 'redis';
17
- import { Client } from '../src/index.js';
18
- import { readEvents, STREAM_KEY } from './shared/stream.js';
18
+ import type { RedisClientType } from 'redis';
19
+ import { Client } from '../src/index.ts';
20
+ import { STREAM_KEY, readEvents } from './shared/stream.ts';
19
21
 
20
22
  const __dirname = dirname(fileURLToPath(import.meta.url));
21
23
 
@@ -35,47 +37,61 @@ const LOG_FILE = join(__dirname, '..', 'fuzz-output.log');
35
37
 
36
38
  const logStream = createWriteStream(LOG_FILE, { flags: 'a' });
37
39
 
38
- function log(level, msg, data = {}) {
39
- const entry = JSON.stringify({ time: new Date().toISOString(), level, msg, data });
40
- if (level === 'error') process.stdout.write(`VIOLATION: ${entry}\n`);
41
- else process.stdout.write(`${entry}\n`);
40
+ function log(level: string, msg: string, data: Record<string, unknown> = {}): void {
41
+ let entry = `${process.uptime().toFixed(2)} ${level.toUpperCase().padEnd(5)}`;
42
+ const { type = '', queue = '', id = '', ...rest } = data;
43
+ entry = `${entry} ${msg.padEnd(10)} ${(type as string).padEnd(10)} ${(queue as string).padEnd(15)} ${(id as string).padEnd(15)} ${JSON.stringify(rest)}`;
44
+ process.stdout.write(`${entry}\n`);
42
45
  logStream.write(`${entry}\n`);
43
46
  }
44
47
 
45
48
  // ── Invariant state ────────────────────────────────────────────────────────────
46
49
 
47
- /** @type {Map<string, {queue: string, startedAt: number, runAt: number, pid: number}>} */
48
- const activeJobs = new Map();
50
+ interface ActiveJob {
51
+ queue: string;
52
+ startedAt: number;
53
+ runAt: number;
54
+ pid: number;
55
+ }
56
+
57
+ interface WaitingJob {
58
+ id: string;
59
+ runAt: number;
60
+ dispatchedAt: number;
61
+ }
49
62
 
50
- /** @type {Set<string>} */
51
- const succeededJobs = new Set();
63
+ const activeJobs = new Map<string, ActiveJob>();
64
+ const succeededJobs = new Set<string>();
52
65
 
53
66
  /**
54
67
  * Per-queue: list of {id, runAt, dispatchedAt} for jobs seen but not yet started.
55
68
  * Used to check priority ordering.
56
- * @type {Map<string, {id: string, runAt: number, dispatchedAt: number}[]>}
57
69
  */
58
- const waitingByQueue = new Map();
70
+ const waitingByQueue = new Map<string, WaitingJob[]>();
59
71
 
60
- /** @type {Map<string, number>} last start event timestamp per queue */
61
- const lastStartPerQueue = new Map();
72
+ /** last start event timestamp per queue */
73
+ const lastStartPerQueue = new Map<string, number>();
62
74
 
63
75
  let violationCount = 0;
64
76
  let eventCount = 0;
65
77
 
66
- function violation(invariant, msg, data = {}) {
78
+ function violation(invariant: string, msg: string, data: Record<string, unknown> = {}): void {
67
79
  violationCount++;
68
80
  log('error', `[${invariant}] ${msg}`, data);
69
81
  }
70
82
 
71
83
  // ── Invariant checks ───────────────────────────────────────────────────────────
72
84
 
85
+ interface IpcDequeueMsg {
86
+ queue: string;
87
+ jobId: string;
88
+ runAt: number;
89
+ }
90
+
73
91
  /**
74
92
  * Called when a child process dequeues a job (via IPC).
75
- * @param {number} pid
76
- * @param {{queue: string, jobId: string, runAt: number}} msg
77
93
  */
78
- function onIpcDequeue(pid, msg) {
94
+ function onIpcJobStart(pid: number, msg: IpcDequeueMsg): void {
79
95
  const { jobId: id, queue, runAt } = msg;
80
96
 
81
97
  // Mutual exclusion: job must not already be active
@@ -92,13 +108,12 @@ function onIpcDequeue(pid, msg) {
92
108
 
93
109
  /**
94
110
  * Called when a child process finishes/retries/fails a job (via IPC).
95
- * @param {string} jobId
96
111
  */
97
- function onIpcJobDone(jobId) {
112
+ function onIpcJobDone(jobId: string): void {
98
113
  activeJobs.delete(jobId);
99
114
  }
100
115
 
101
- function onStart(event) {
116
+ function onStart(event: Record<string, string>): void {
102
117
  const { id, queue, runAt: runAtStr, startedAt: startedAtStr } = event;
103
118
  const runAt = Number(runAtStr);
104
119
  const startedAt = Number(startedAtStr);
@@ -148,12 +163,12 @@ function onStart(event) {
148
163
  if (waitingByQueue.has(queue)) {
149
164
  waitingByQueue.set(
150
165
  queue,
151
- waitingByQueue.get(queue).filter((w) => w.id !== id)
166
+ (waitingByQueue.get(queue) ?? []).filter((w) => w.id !== id)
152
167
  );
153
168
  }
154
169
  }
155
170
 
156
- function onFinish(event) {
171
+ function onFinish(event: Record<string, string>): void {
157
172
  succeededJobs.add(event.id);
158
173
  }
159
174
 
@@ -161,9 +176,8 @@ function onFinish(event) {
161
176
  * Called when a child process exits. Clears all active jobs belonging to that
162
177
  * PID so they don't trigger spurious MutualExclusion violations when the
163
178
  * queasy sweep retries them and a new process picks them up.
164
- * @param {number} pid
165
179
  */
166
- function onProcessExit(pid) {
180
+ function onProcessExit(pid: number): void {
167
181
  for (const [id, entry] of activeJobs) {
168
182
  if (entry.pid === pid) {
169
183
  activeJobs.delete(id);
@@ -174,7 +188,7 @@ function onProcessExit(pid) {
174
188
  /**
175
189
  * Called periodically to check queue progress and priority starvation.
176
190
  */
177
- function checkProgress() {
191
+ function checkProgress(): void {
178
192
  const now = Date.now();
179
193
  for (const [queue, lastStart] of lastStartPerQueue) {
180
194
  const idle = now - lastStart;
@@ -209,7 +223,7 @@ function checkProgress() {
209
223
 
210
224
  // ── Event dispatch ─────────────────────────────────────────────────────────────
211
225
 
212
- function handleEvent(event) {
226
+ function handleEvent(event: Record<string, string>): void {
213
227
  eventCount++;
214
228
  const { type } = event;
215
229
  log('info', 'event', event);
@@ -231,7 +245,7 @@ function handleEvent(event) {
231
245
 
232
246
  // ── Summary ────────────────────────────────────────────────────────────────────
233
247
 
234
- function printSummary() {
248
+ function printSummary(): void {
235
249
  const summary = {
236
250
  events: eventCount,
237
251
  violations: violationCount,
@@ -240,26 +254,19 @@ function printSummary() {
240
254
  lastStartPerQueue: Object.fromEntries(lastStartPerQueue),
241
255
  };
242
256
  log('info', 'Summary', summary);
243
- console.log(`\n=== Fuzz Summary ===`);
244
- console.log(` Events processed : ${eventCount}`);
245
- console.log(` Violations found : ${violationCount}`);
246
- console.log(` Active jobs : ${activeJobs.size}`);
247
- console.log(` Succeeded jobs : ${succeededJobs.size}`);
248
- console.log('===================\n');
249
257
  }
250
258
 
251
259
  // ── Child process management ───────────────────────────────────────────────────
252
260
 
253
- /** @type {Set<import('node:child_process').ChildProcess>} */
254
- const processes = new Set();
261
+ const processes = new Set<ChildProcess>();
255
262
 
256
- function spawnProcess() {
257
- const child = fork(join(__dirname, 'process.js'));
263
+ function spawnProcess(): ChildProcess {
264
+ const child = fork(join(__dirname, 'process.ts'));
258
265
  processes.add(child);
259
266
 
260
- child.on('message', (msg) => {
267
+ child.on('message', (msg: { type: string; queue: string; jobId: string; runAt: number }) => {
261
268
  if (msg.type === 'dequeue') {
262
- onIpcDequeue(child.pid, msg);
269
+ onIpcJobStart(child.pid!, msg);
263
270
  } else if (msg.type === 'finish' || msg.type === 'retry' || msg.type === 'fail') {
264
271
  onIpcJobDone(msg.jobId);
265
272
  }
@@ -268,7 +275,7 @@ function spawnProcess() {
268
275
  child.on('exit', (code, signal) => {
269
276
  processes.delete(child);
270
277
  log('info', 'Child process exited', { pid: child.pid, code, signal });
271
- onProcessExit(child.pid);
278
+ onProcessExit(child.pid!);
272
279
  setTimeout(spawnProcess, PROCESS_RESTART_DELAY_MS);
273
280
  });
274
281
 
@@ -279,7 +286,7 @@ function spawnProcess() {
279
286
  return child;
280
287
  }
281
288
 
282
- function killRandomProcess() {
289
+ function killRandomProcess(): void {
283
290
  const list = [...processes];
284
291
  if (list.length === 0) return;
285
292
  const target = list[Math.floor(Math.random() * list.length)];
@@ -289,18 +296,15 @@ function killRandomProcess() {
289
296
 
290
297
  // ── Redis setup ────────────────────────────────────────────────────────────────
291
298
 
292
- const redis = createClient();
293
- const dispatchRedis = createClient();
294
-
299
+ const redis = createClient() as RedisClientType;
295
300
  await redis.connect();
296
- await dispatchRedis.connect();
297
301
 
298
302
  // Clean up state from previous runs
299
303
  await redis.del(STREAM_KEY);
300
304
  log('info', 'Cleared fuzz:events stream from previous run');
301
305
 
302
306
  // Dispatch seed periodic jobs (await ready to avoid Function not found race)
303
- const dispatchClient = await new Promise((resolve) => new Client(dispatchRedis, 0, resolve));
307
+ const dispatchClient = await new Promise<Client>((resolve) => new Client({}, 0, resolve));
304
308
  const periodicQueue = dispatchClient.queue('{fuzz}:periodic', true);
305
309
 
306
310
  for (let i = 0; i < NUM_PERIODIC_JOBS; i++) {
@@ -311,9 +315,7 @@ for (let i = 0; i < NUM_PERIODIC_JOBS; i++) {
311
315
 
312
316
  // ── Spawn child processes ──────────────────────────────────────────────────────
313
317
 
314
- for (let i = 0; i < NUM_PROCESSES; i++) {
315
- spawnProcess();
316
- }
318
+ for (let i = 0; i < NUM_PROCESSES; i++) spawnProcess();
317
319
 
318
320
  // Periodically kill a random process to simulate crashes
319
321
  const crashTimer = setInterval(killRandomProcess, CRASH_INTERVAL_MS);
@@ -5,27 +5,23 @@
5
5
 
6
6
  import { BroadcastChannel } from 'node:worker_threads';
7
7
  import { createClient } from 'redis';
8
- import { Client, PermanentError } from '../../src/index.js';
9
- import { pickChaos } from '../shared/chaos.js';
10
- import { emitEvent } from '../shared/stream.js';
8
+ import type { RedisClientType } from 'redis';
9
+ import { Client, PermanentError } from '../../src/index.ts';
10
+ import type { Job } from '../../src/types.ts';
11
+ import { pickChaos } from '../shared/chaos.ts';
12
+ import { emitEvent } from '../shared/stream.ts';
11
13
 
12
- const redis = createClient();
13
- const eventRedis = createClient();
14
-
15
- await redis.connect();
14
+ const eventRedis = createClient() as RedisClientType;
16
15
  await eventRedis.connect();
17
16
 
18
17
  // Dispatch-only queasy client (await ready to avoid Function not found race)
19
- const client = await new Promise((resolve) => new Client(redis, 0, resolve));
18
+ const client = await new Promise<Client>((resolve) => new Client({}, 0, resolve));
20
19
  const cascadeBQueue = client.queue('{fuzz}:cascade-b', true);
21
20
 
22
21
  const crashChannel = new BroadcastChannel('fuzz-crash');
23
22
 
24
- /**
25
- * @param {any} data
26
- * @param {import('../../src/types.js').Job} job
27
- */
28
- export async function handle(_data, job) {
23
+ // biome-ignore lint/suspicious/noExplicitAny: Job data is arbitrary
24
+ export async function handle(_data: any, job: Job): Promise<void> {
29
25
  const startedAt = Date.now();
30
26
  await emitEvent(eventRedis, {
31
27
  type: 'start',
@@ -71,7 +67,7 @@ export async function handle(_data, job) {
71
67
  // Normal completion: dispatch 1-2 cascade-b jobs
72
68
  const count = Math.random() < 0.5 ? 1 : 2;
73
69
  const runAtOffset = Math.random() * 2000;
74
- const dispatchPromises = [];
70
+ const dispatchPromises: Promise<string>[] = [];
75
71
  for (let i = 0; i < count; i++) {
76
72
  dispatchPromises.push(
77
73
  cascadeBQueue.dispatch(
@@ -88,7 +84,7 @@ export async function handle(_data, job) {
88
84
  type: 'finish',
89
85
  queue: '{fuzz}:cascade-a',
90
86
  id: job.id,
91
- finishedAt: String(Date.now()),
87
+ finishedAt: process.uptime().toFixed(2),
92
88
  dispatched: ids.join(','),
93
89
  });
94
90
  }
@@ -5,20 +5,19 @@
5
5
 
6
6
  import { BroadcastChannel } from 'node:worker_threads';
7
7
  import { createClient } from 'redis';
8
- import { PermanentError } from '../../src/index.js';
9
- import { pickChaos } from '../shared/chaos.js';
10
- import { emitEvent } from '../shared/stream.js';
8
+ import type { RedisClientType } from 'redis';
9
+ import { PermanentError } from '../../src/index.ts';
10
+ import type { Job } from '../../src/types.ts';
11
+ import { pickChaos } from '../shared/chaos.ts';
12
+ import { emitEvent } from '../shared/stream.ts';
11
13
 
12
- const eventRedis = createClient();
14
+ const eventRedis = createClient() as RedisClientType;
13
15
  await eventRedis.connect();
14
16
 
15
17
  const crashChannel = new BroadcastChannel('fuzz-crash');
16
18
 
17
- /**
18
- * @param {any} data
19
- * @param {import('../../src/types.js').Job} job
20
- */
21
- export async function handle(_data, job) {
19
+ // biome-ignore lint/suspicious/noExplicitAny: Job data is arbitrary
20
+ export async function handle(_data: any, job: Job): Promise<void> {
22
21
  const startedAt = Date.now();
23
22
  await emitEvent(eventRedis, {
24
23
  type: 'start',
@@ -9,26 +9,21 @@
9
9
  */
10
10
 
11
11
  import { createClient } from 'redis';
12
- import { Client } from '../../src/index.js';
13
- import { emitEvent } from '../shared/stream.js';
12
+ import type { RedisClientType } from 'redis';
13
+ import { Client } from '../../src/index.ts';
14
+ import type { Job } from '../../src/types.ts';
15
+ import { emitEvent } from '../shared/stream.ts';
14
16
 
15
- const redis = createClient();
16
- const eventRedis = createClient();
17
-
18
- await redis.connect();
17
+ const eventRedis = createClient() as RedisClientType;
19
18
  await eventRedis.connect();
20
19
 
21
20
  // Dispatch-only queasy client (await ready to avoid Function not found race)
22
- const client = await new Promise((resolve) => new Client(redis, 0, resolve));
21
+ const client = await new Promise<Client>((resolve) => new Client({}, 0, resolve));
23
22
 
24
23
  // Queue references (keys already include braces)
25
24
  const periodicQueue = client.queue('{fuzz}:periodic', true);
26
25
 
27
- /**
28
- * @param {[string, any, {message: string}]} data
29
- * @param {import('../../src/types.js').Job} job
30
- */
31
- export async function handle(data, job) {
26
+ export async function handle(data: [string, unknown, { message: string }], job: Job): Promise<void> {
32
27
  const [originalId, originalData, error] = data;
33
28
 
34
29
  await emitEvent(eventRedis, {
@@ -6,28 +6,24 @@
6
6
 
7
7
  import { BroadcastChannel } from 'node:worker_threads';
8
8
  import { createClient } from 'redis';
9
- import { Client, PermanentError } from '../../src/index.js';
10
- import { pickChaos } from '../shared/chaos.js';
11
- import { emitEvent } from '../shared/stream.js';
9
+ import type { RedisClientType } from 'redis';
10
+ import { Client, PermanentError } from '../../src/index.ts';
11
+ import type { Job } from '../../src/types.ts';
12
+ import { pickChaos } from '../shared/chaos.ts';
13
+ import { emitEvent } from '../shared/stream.ts';
12
14
 
13
- const redis = createClient();
14
- const eventRedis = createClient();
15
-
16
- await redis.connect();
15
+ const eventRedis = createClient() as RedisClientType;
17
16
  await eventRedis.connect();
18
17
 
19
18
  // Dispatch-only queasy client (await ready to avoid Function not found race)
20
- const client = await new Promise((resolve) => new Client(redis, 0, resolve));
19
+ const client = await new Promise<Client>((resolve) => new Client({}, 0, resolve));
21
20
  const periodicQueue = client.queue('{fuzz}:periodic', true);
22
21
  const cascadeAQueue = client.queue('{fuzz}:cascade-a', true);
23
22
 
24
23
  const crashChannel = new BroadcastChannel('fuzz-crash');
25
24
 
26
- /**
27
- * @param {any} data
28
- * @param {import('../../src/types.js').Job} job
29
- */
30
- export async function handle(data, job) {
25
+ // biome-ignore lint/suspicious/noExplicitAny: Job data is arbitrary
26
+ export async function handle(data: any, job: Job): Promise<void> {
31
27
  const startedAt = Date.now();
32
28
  await emitEvent(eventRedis, {
33
29
  type: 'start',
@@ -71,8 +67,8 @@ export async function handle(data, job) {
71
67
  }
72
68
 
73
69
  // Normal completion: dispatch a cascade-a job and re-queue self
74
- const cascadeRunAt = Date.now() + Math.random() * 2000;
75
- const selfDelay = 1000 + Math.random() * 4000;
70
+ const cascadeRunAt = 0;
71
+ const selfDelay = 500;
76
72
 
77
73
  const [cascadeId] = await Promise.all([
78
74
  cascadeAQueue.dispatch({ from: job.id }, { runAt: cascadeRunAt }),