@nicnocquee/dataqueue 1.24.0 → 1.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,8 +6,9 @@ import {
6
6
  processJobWithHandlers,
7
7
  } from './processor.js';
8
8
  import * as queue from './queue.js';
9
+ import { PostgresBackend } from './backends/postgres.js';
9
10
  import { createTestDbAndPool, destroyTestDb } from './test-util.js';
10
- import { FailureReason, JobHandler } from './types.js';
11
+ import { FailureReason, JobHandler, JobContext } from './types.js';
11
12
 
12
13
  // Define the payload map for test jobs
13
14
  interface TestPayloadMap {
@@ -21,16 +22,29 @@ interface TestPayloadMap {
21
22
  typeC: { n: number };
22
23
  }
23
24
 
25
+ /**
26
+ * Claims a job by transitioning it to 'processing' status (simulates getNextBatch).
27
+ * Tests that call processJobWithHandlers directly need the job in 'processing' state.
28
+ */
29
+ async function claimJob(p: Pool, jobId: number) {
30
+ await p.query(
31
+ `UPDATE job_queue SET status = 'processing', locked_by = 'test-worker', locked_at = NOW() WHERE id = $1`,
32
+ [jobId],
33
+ );
34
+ }
35
+
24
36
  // Integration tests for processor
25
37
 
26
38
  describe('processor integration', () => {
27
39
  let pool: Pool;
28
40
  let dbName: string;
41
+ let backend: PostgresBackend;
29
42
 
30
43
  beforeEach(async () => {
31
44
  const setup = await createTestDbAndPool();
32
45
  pool = setup.pool;
33
46
  dbName = setup.dbName;
47
+ backend = new PostgresBackend(pool);
34
48
  });
35
49
 
36
50
  afterEach(async () => {
@@ -54,12 +68,17 @@ describe('processor integration', () => {
54
68
  jobType: 'test',
55
69
  payload: { foo: 'bar' },
56
70
  });
57
- const job = await queue.getJob<TestPayloadMap, 'test'>(pool, jobId);
71
+ // Claim the job so it's in 'processing' status
72
+ const [job] = await queue.getNextBatch(pool, 'test-worker', 1);
58
73
  expect(job).not.toBeNull();
59
- await processJobWithHandlers(pool, job!, handlers);
74
+ await processJobWithHandlers(backend, job!, handlers);
60
75
  expect(handler).toHaveBeenCalledWith(
61
76
  { foo: 'bar' },
62
77
  expect.any(AbortSignal),
78
+ expect.objectContaining({
79
+ prolong: expect.any(Function),
80
+ onTimeout: expect.any(Function),
81
+ }),
63
82
  );
64
83
  const completed = await queue.getJob(pool, jobId);
65
84
  expect(completed?.status).toBe('completed');
@@ -83,9 +102,10 @@ describe('processor integration', () => {
83
102
  jobType: 'fail',
84
103
  payload: {},
85
104
  });
105
+ await claimJob(pool, jobId);
86
106
  const job = await queue.getJob<TestPayloadMap, 'fail'>(pool, jobId);
87
107
  expect(job).not.toBeNull();
88
- await processJobWithHandlers(pool, job!, handlers);
108
+ await processJobWithHandlers(backend, job!, handlers);
89
109
  const failed = await queue.getJob(pool, jobId);
90
110
  expect(failed?.status).toBe('failed');
91
111
  expect(failed?.errorHistory?.[0]?.message).toBe('fail!');
@@ -109,10 +129,11 @@ describe('processor integration', () => {
109
129
  jobType: 'missing',
110
130
  payload: {},
111
131
  });
132
+ await claimJob(pool, jobId);
112
133
  const job = await queue.getJob<TestPayloadMap, 'missing'>(pool, jobId);
113
134
  expect(job).not.toBeNull();
114
135
  // @ts-expect-error - test handler is missing
115
- await processJobWithHandlers(pool, job!, handlers);
136
+ await processJobWithHandlers(backend, job!, handlers);
116
137
  const failed = await queue.getJob(pool, jobId);
117
138
  expect(failed?.status).toBe('failed');
118
139
  expect(failed?.errorHistory?.[0]?.message).toContain(
@@ -144,7 +165,7 @@ describe('processor integration', () => {
144
165
  }),
145
166
  ]);
146
167
  const processed = await processBatchWithHandlers(
147
- pool,
168
+ backend,
148
169
  'worker-batch',
149
170
  2,
150
171
  undefined,
@@ -175,7 +196,7 @@ describe('processor integration', () => {
175
196
  jobType: 'proc',
176
197
  payload: { x: 1 },
177
198
  });
178
- const processor = createProcessor(pool, handlers, { pollInterval: 200 });
199
+ const processor = createProcessor(backend, handlers, { pollInterval: 200 });
179
200
  processor.start();
180
201
  // Wait for job to be processed
181
202
  await new Promise((r) => setTimeout(r, 500));
@@ -212,7 +233,7 @@ describe('processor integration', () => {
212
233
  });
213
234
  // Only process typeA
214
235
  const processed = await processBatchWithHandlers(
215
- pool,
236
+ backend,
216
237
  'worker-typeA',
217
238
  10,
218
239
  'typeA',
@@ -259,7 +280,7 @@ describe('processor integration', () => {
259
280
  });
260
281
  // Only process typeA and typeC
261
282
  const processed = await processBatchWithHandlers(
262
- pool,
283
+ backend,
263
284
  'worker-multi',
264
285
  10,
265
286
  ['typeA', 'typeC'],
@@ -300,7 +321,7 @@ describe('processor integration', () => {
300
321
  jobType: 'typeB',
301
322
  payload: { n: 2 },
302
323
  });
303
- const processor = createProcessor(pool, handlers, {
324
+ const processor = createProcessor(backend, handlers, {
304
325
  pollInterval: 100,
305
326
  jobType: 'typeA',
306
327
  });
@@ -320,11 +341,13 @@ describe('processor integration', () => {
320
341
  describe('concurrency option', () => {
321
342
  let pool: Pool;
322
343
  let dbName: string;
344
+ let backend: PostgresBackend;
323
345
 
324
346
  beforeEach(async () => {
325
347
  const setup = await createTestDbAndPool();
326
348
  pool = setup.pool;
327
349
  dbName = setup.dbName;
350
+ backend = new PostgresBackend(pool);
328
351
  });
329
352
 
330
353
  afterEach(async () => {
@@ -352,7 +375,7 @@ describe('concurrency option', () => {
352
375
  };
353
376
  const handlers = { test: handler };
354
377
  await addJobs(10);
355
- const processor = createProcessor(pool, handlers, { batchSize: 10 });
378
+ const processor = createProcessor(backend, handlers, { batchSize: 10 });
356
379
  await processor.start();
357
380
  expect(maxParallel).toBeLessThanOrEqual(3);
358
381
  });
@@ -368,7 +391,7 @@ describe('concurrency option', () => {
368
391
  };
369
392
  const handlers = { test: handler };
370
393
  await addJobs(10);
371
- const processor = createProcessor(pool, handlers, {
394
+ const processor = createProcessor(backend, handlers, {
372
395
  batchSize: 10,
373
396
  concurrency: 2,
374
397
  });
@@ -387,7 +410,7 @@ describe('concurrency option', () => {
387
410
  };
388
411
  const handlers = { test: handler };
389
412
  await addJobs(2);
390
- const processor = createProcessor(pool, handlers, {
413
+ const processor = createProcessor(backend, handlers, {
391
414
  batchSize: 2,
392
415
  concurrency: 5,
393
416
  });
@@ -406,7 +429,7 @@ describe('concurrency option', () => {
406
429
  };
407
430
  const handlers = { test: handler };
408
431
  await addJobs(5);
409
- const processor = createProcessor(pool, handlers, {
432
+ const processor = createProcessor(backend, handlers, {
410
433
  batchSize: 5,
411
434
  concurrency: 1,
412
435
  });
@@ -418,11 +441,13 @@ describe('concurrency option', () => {
418
441
  describe('per-job timeout', () => {
419
442
  let pool: Pool;
420
443
  let dbName: string;
444
+ let backend: PostgresBackend;
421
445
 
422
446
  beforeEach(async () => {
423
447
  const setup = await createTestDbAndPool();
424
448
  pool = setup.pool;
425
449
  dbName = setup.dbName;
450
+ backend = new PostgresBackend(pool);
426
451
  });
427
452
 
428
453
  afterEach(async () => {
@@ -448,9 +473,10 @@ describe('per-job timeout', () => {
448
473
  payload: {},
449
474
  timeoutMs: 50, // 50ms
450
475
  });
476
+ await claimJob(pool, jobId);
451
477
  const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
452
478
  expect(job).not.toBeNull();
453
- await processJobWithHandlers(pool, job!, handlers);
479
+ await processJobWithHandlers(backend, job!, handlers);
454
480
  const failed = await queue.getJob(pool, jobId);
455
481
  expect(failed?.status).toBe('failed');
456
482
  expect(failed?.errorHistory?.[0]?.message).toContain('timed out');
@@ -469,9 +495,10 @@ describe('per-job timeout', () => {
469
495
  payload: {},
470
496
  timeoutMs: 200, // 200ms
471
497
  });
498
+ await claimJob(pool, jobId);
472
499
  const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
473
500
  expect(job).not.toBeNull();
474
- await processJobWithHandlers(pool, job!, handlers);
501
+ await processJobWithHandlers(backend, job!, handlers);
475
502
  const completed = await queue.getJob(pool, jobId);
476
503
  expect(completed?.status).toBe('completed');
477
504
  });
@@ -497,10 +524,11 @@ describe('per-job timeout', () => {
497
524
  timeoutMs: 50, // 50ms timeout
498
525
  forceKillOnTimeout: true, // Force kill on timeout
499
526
  });
527
+ await claimJob(pool, jobId);
500
528
  const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
501
529
  expect(job).not.toBeNull();
502
530
  expect(job?.forceKillOnTimeout).toBe(true);
503
- await processJobWithHandlers(pool, job!, handlers);
531
+ await processJobWithHandlers(backend, job!, handlers);
504
532
  const failed = await queue.getJob(pool, jobId);
505
533
  expect(failed?.status).toBe('failed');
506
534
  expect(failed?.errorHistory?.[0]?.message).toContain('timed out');
@@ -524,10 +552,523 @@ describe('per-job timeout', () => {
524
552
  timeoutMs: 200, // 200ms
525
553
  forceKillOnTimeout: true,
526
554
  });
555
+ await claimJob(pool, jobId);
527
556
  const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
528
557
  expect(job).not.toBeNull();
529
- await processJobWithHandlers(pool, job!, handlers);
558
+ await processJobWithHandlers(backend, job!, handlers);
559
+ const completed = await queue.getJob(pool, jobId);
560
+ expect(completed?.status).toBe('completed');
561
+ });
562
+ });
563
+
564
+ describe('prolong', () => {
565
+ let pool: Pool;
566
+ let dbName: string;
567
+ let backend: PostgresBackend;
568
+
569
+ beforeEach(async () => {
570
+ const setup = await createTestDbAndPool();
571
+ pool = setup.pool;
572
+ dbName = setup.dbName;
573
+ backend = new PostgresBackend(pool);
574
+ });
575
+
576
+ afterEach(async () => {
577
+ vi.restoreAllMocks();
578
+ await pool.end();
579
+ await destroyTestDb(dbName);
580
+ });
581
+
582
+ it('should extend timeout when prolong is called with explicit duration', async () => {
583
+ // Setup
584
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
585
+ _payload,
586
+ _signal,
587
+ ctx,
588
+ ) => {
589
+ // Wait 60ms, but initial timeout is 50ms -- would fail without prolong
590
+ await new Promise((r) => setTimeout(r, 30));
591
+ ctx.prolong(100); // extend to 100ms from now
592
+ await new Promise((r) => setTimeout(r, 60));
593
+ };
594
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
595
+ test: handler,
596
+ };
597
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
598
+ jobType: 'test',
599
+ payload: {},
600
+ timeoutMs: 50,
601
+ });
602
+ await claimJob(pool, jobId);
603
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
604
+
605
+ // Act
606
+ await processJobWithHandlers(backend, job!, handlers);
607
+
608
+ // Assert
609
+ const completed = await queue.getJob(pool, jobId);
610
+ expect(completed?.status).toBe('completed');
611
+ });
612
+
613
+ it('should extend timeout when prolong is called without arguments (heartbeat)', async () => {
614
+ // Setup
615
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
616
+ _payload,
617
+ _signal,
618
+ ctx,
619
+ ) => {
620
+ // Initial timeout is 80ms, total work ~120ms
621
+ await new Promise((r) => setTimeout(r, 50));
622
+ ctx.prolong(); // reset to original 80ms from now
623
+ await new Promise((r) => setTimeout(r, 60));
624
+ };
625
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
626
+ test: handler,
627
+ };
628
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
629
+ jobType: 'test',
630
+ payload: {},
631
+ timeoutMs: 80,
632
+ });
633
+ await claimJob(pool, jobId);
634
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
635
+
636
+ // Act
637
+ await processJobWithHandlers(backend, job!, handlers);
638
+
639
+ // Assert
640
+ const completed = await queue.getJob(pool, jobId);
641
+ expect(completed?.status).toBe('completed');
642
+ });
643
+
644
+ it('should still timeout if prolong is not called', async () => {
645
+ // Setup
646
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
647
+ _payload,
648
+ signal,
649
+ ) => {
650
+ await new Promise((resolve, reject) => {
651
+ const t = setTimeout(resolve, 200);
652
+ signal.addEventListener('abort', () => {
653
+ clearTimeout(t);
654
+ reject(new Error('aborted'));
655
+ });
656
+ });
657
+ };
658
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
659
+ test: handler,
660
+ };
661
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
662
+ jobType: 'test',
663
+ payload: {},
664
+ timeoutMs: 50,
665
+ });
666
+ await claimJob(pool, jobId);
667
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
668
+
669
+ // Act
670
+ await processJobWithHandlers(backend, job!, handlers);
671
+
672
+ // Assert
673
+ const failed = await queue.getJob(pool, jobId);
674
+ expect(failed?.status).toBe('failed');
675
+ expect(failed?.failureReason).toBe(FailureReason.Timeout);
676
+ });
677
+
678
+ it('should be a no-op when job has no timeout', async () => {
679
+ // Setup
680
+ let ctxReceived: JobContext | undefined;
681
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
682
+ _payload,
683
+ _signal,
684
+ ctx,
685
+ ) => {
686
+ ctxReceived = ctx;
687
+ ctx.prolong(1000); // should be a no-op
688
+ await new Promise((r) => setTimeout(r, 20));
689
+ };
690
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
691
+ test: handler,
692
+ };
693
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
694
+ jobType: 'test',
695
+ payload: {},
696
+ // no timeoutMs
697
+ });
698
+ await claimJob(pool, jobId);
699
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
700
+
701
+ // Act
702
+ await processJobWithHandlers(backend, job!, handlers);
703
+
704
+ // Assert
705
+ const completed = await queue.getJob(pool, jobId);
706
+ expect(completed?.status).toBe('completed');
707
+ expect(ctxReceived).toBeDefined();
708
+ expect(ctxReceived!.prolong).toBeTypeOf('function');
709
+ });
710
+
711
+ it('should update locked_at in the database when prolong is called', async () => {
712
+ // Setup
713
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
714
+ _payload,
715
+ _signal,
716
+ ctx,
717
+ ) => {
718
+ await new Promise((r) => setTimeout(r, 30));
719
+ ctx.prolong(200);
720
+ // Give DB time to update (fire-and-forget)
721
+ await new Promise((r) => setTimeout(r, 50));
722
+ };
723
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
724
+ test: handler,
725
+ };
726
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
727
+ jobType: 'test',
728
+ payload: {},
729
+ timeoutMs: 100,
730
+ });
731
+ const jobBefore = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
732
+ // Pick up the job so it gets locked_at set
733
+ const batch = await queue.getNextBatch<{ test: {} }, 'test'>(
734
+ pool,
735
+ 'test-worker',
736
+ 1,
737
+ );
738
+ const lockedAtBefore = batch[0]!.lockedAt;
739
+
740
+ // Act
741
+ await processJobWithHandlers(backend, batch[0]!, handlers);
742
+
743
+ // Assert - check that a prolonged event was recorded
744
+ const events = await queue.getJobEvents(pool, jobId);
745
+ const prolongedEvents = events.filter((e) => e.eventType === 'prolonged');
746
+ expect(prolongedEvents.length).toBeGreaterThanOrEqual(1);
747
+ });
748
+ });
749
+
750
+ describe('onTimeout', () => {
751
+ let pool: Pool;
752
+ let dbName: string;
753
+ let backend: PostgresBackend;
754
+
755
+ beforeEach(async () => {
756
+ const setup = await createTestDbAndPool();
757
+ pool = setup.pool;
758
+ dbName = setup.dbName;
759
+ backend = new PostgresBackend(pool);
760
+ });
761
+
762
+ afterEach(async () => {
763
+ vi.restoreAllMocks();
764
+ await pool.end();
765
+ await destroyTestDb(dbName);
766
+ });
767
+
768
+ it('should extend timeout reactively when onTimeout callback returns a positive number', async () => {
769
+ // Setup
770
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
771
+ _payload,
772
+ _signal,
773
+ ctx,
774
+ ) => {
775
+ ctx.onTimeout(() => {
776
+ return 100; // extend by 100ms
777
+ });
778
+ // Total work: ~80ms, initial timeout 50ms -- would fail without onTimeout extension
779
+ await new Promise((r) => setTimeout(r, 80));
780
+ };
781
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
782
+ test: handler,
783
+ };
784
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
785
+ jobType: 'test',
786
+ payload: {},
787
+ timeoutMs: 50,
788
+ });
789
+ await claimJob(pool, jobId);
790
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
791
+
792
+ // Act
793
+ await processJobWithHandlers(backend, job!, handlers);
794
+
795
+ // Assert
530
796
  const completed = await queue.getJob(pool, jobId);
531
797
  expect(completed?.status).toBe('completed');
532
798
  });
799
+
800
+ it('should let timeout proceed when onTimeout callback returns nothing', async () => {
801
+ // Setup
802
+ const onTimeoutCalled = vi.fn();
803
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
804
+ _payload,
805
+ signal,
806
+ ctx,
807
+ ) => {
808
+ ctx.onTimeout(() => {
809
+ onTimeoutCalled();
810
+ // Return nothing -- let timeout proceed
811
+ });
812
+ await new Promise((resolve, reject) => {
813
+ const t = setTimeout(resolve, 200);
814
+ signal.addEventListener('abort', () => {
815
+ clearTimeout(t);
816
+ reject(new Error('aborted'));
817
+ });
818
+ });
819
+ };
820
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
821
+ test: handler,
822
+ };
823
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
824
+ jobType: 'test',
825
+ payload: {},
826
+ timeoutMs: 50,
827
+ });
828
+ await claimJob(pool, jobId);
829
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
830
+
831
+ // Act
832
+ await processJobWithHandlers(backend, job!, handlers);
833
+
834
+ // Assert
835
+ const failed = await queue.getJob(pool, jobId);
836
+ expect(failed?.status).toBe('failed');
837
+ expect(failed?.failureReason).toBe(FailureReason.Timeout);
838
+ expect(onTimeoutCalled).toHaveBeenCalledTimes(1);
839
+ });
840
+
841
+ it('should allow repeated extensions via onTimeout', async () => {
842
+ // Setup
843
+ let callCount = 0;
844
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
845
+ _payload,
846
+ _signal,
847
+ ctx,
848
+ ) => {
849
+ ctx.onTimeout(() => {
850
+ callCount++;
851
+ if (callCount <= 3) {
852
+ return 40; // extend by 40ms each time
853
+ }
854
+ // After 3 extensions, let it complete (job should be done by then)
855
+ });
856
+ // Total work: ~130ms, initial timeout 40ms
857
+ // Will need ~3 extensions of 40ms each
858
+ await new Promise((r) => setTimeout(r, 130));
859
+ };
860
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
861
+ test: handler,
862
+ };
863
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
864
+ jobType: 'test',
865
+ payload: {},
866
+ timeoutMs: 40,
867
+ });
868
+ await claimJob(pool, jobId);
869
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
870
+
871
+ // Act
872
+ await processJobWithHandlers(backend, job!, handlers);
873
+
874
+ // Assert
875
+ const completed = await queue.getJob(pool, jobId);
876
+ expect(completed?.status).toBe('completed');
877
+ expect(callCount).toBeGreaterThanOrEqual(2);
878
+ });
879
+
880
+ it('should allow onTimeout with progress-based logic', async () => {
881
+ // Setup
882
+ let progress = 0;
883
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
884
+ _payload,
885
+ _signal,
886
+ ctx,
887
+ ) => {
888
+ ctx.onTimeout(() => {
889
+ if (progress < 100) {
890
+ return 60; // still working, extend
891
+ }
892
+ // done, let timeout proceed if it fires again
893
+ });
894
+ // Simulate progress
895
+ for (let i = 0; i < 5; i++) {
896
+ await new Promise((r) => setTimeout(r, 25));
897
+ progress += 20;
898
+ }
899
+ };
900
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
901
+ test: handler,
902
+ };
903
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
904
+ jobType: 'test',
905
+ payload: {},
906
+ timeoutMs: 50,
907
+ });
908
+ await claimJob(pool, jobId);
909
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
910
+
911
+ // Act
912
+ await processJobWithHandlers(backend, job!, handlers);
913
+
914
+ // Assert
915
+ const completed = await queue.getJob(pool, jobId);
916
+ expect(completed?.status).toBe('completed');
917
+ expect(progress).toBe(100);
918
+ });
919
+
920
+ it('should work when both prolong and onTimeout are used together', async () => {
921
+ // Setup
922
+ let onTimeoutCalled = false;
923
+ const handler: JobHandler<{ test: {} }, 'test'> = async (
924
+ _payload,
925
+ _signal,
926
+ ctx,
927
+ ) => {
928
+ // Register reactive fallback
929
+ ctx.onTimeout(() => {
930
+ onTimeoutCalled = true;
931
+ return 100;
932
+ });
933
+ // Proactively extend before timeout hits
934
+ await new Promise((r) => setTimeout(r, 30));
935
+ ctx.prolong(100);
936
+ await new Promise((r) => setTimeout(r, 60));
937
+ };
938
+ const handlers: { test: JobHandler<{ test: {} }, 'test'> } = {
939
+ test: handler,
940
+ };
941
+ const jobId = await queue.addJob<{ test: {} }, 'test'>(pool, {
942
+ jobType: 'test',
943
+ payload: {},
944
+ timeoutMs: 50,
945
+ });
946
+ await claimJob(pool, jobId);
947
+ const job = await queue.getJob<{ test: {} }, 'test'>(pool, jobId);
948
+
949
+ // Act
950
+ await processJobWithHandlers(backend, job!, handlers);
951
+
952
+ // Assert
953
+ const completed = await queue.getJob(pool, jobId);
954
+ expect(completed?.status).toBe('completed');
955
+ // onTimeout should NOT have been called since prolong extended before timeout fired
956
+ expect(onTimeoutCalled).toBe(false);
957
+ });
958
+
959
+ it('should persist progress via ctx.setProgress', async () => {
960
+ // Setup
961
+ const handler: JobHandler<TestPayloadMap, 'test'> = async (
962
+ _payload,
963
+ _signal,
964
+ ctx,
965
+ ) => {
966
+ await ctx.setProgress(25);
967
+ await ctx.setProgress(50);
968
+ await ctx.setProgress(100);
969
+ };
970
+ const handlers = {
971
+ test: handler,
972
+ fail: vi.fn(async () => {}),
973
+ missing: vi.fn(async () => {}),
974
+ batch: vi.fn(async () => {}),
975
+ proc: vi.fn(async () => {}),
976
+ typeA: vi.fn(async () => {}),
977
+ typeB: vi.fn(async () => {}),
978
+ typeC: vi.fn(async () => {}),
979
+ };
980
+ const jobId = await queue.addJob<TestPayloadMap, 'test'>(pool, {
981
+ jobType: 'test',
982
+ payload: { foo: 'bar' },
983
+ });
984
+ await claimJob(pool, jobId);
985
+ const job = await queue.getJob<TestPayloadMap, 'test'>(pool, jobId);
986
+
987
+ // Act
988
+ await processJobWithHandlers(backend, job!, handlers);
989
+
990
+ // Assert
991
+ const completed = await queue.getJob(pool, jobId);
992
+ expect(completed?.status).toBe('completed');
993
+ expect(completed?.progress).toBe(100);
994
+ });
995
+
996
+ it('should reject progress values outside 0-100', async () => {
997
+ expect.assertions(2);
998
+
999
+ // Setup
1000
+ const handler: JobHandler<TestPayloadMap, 'test'> = async (
1001
+ _payload,
1002
+ _signal,
1003
+ ctx,
1004
+ ) => {
1005
+ try {
1006
+ await ctx.setProgress(-1);
1007
+ } catch (err) {
1008
+ expect((err as Error).message).toBe(
1009
+ 'Progress must be between 0 and 100',
1010
+ );
1011
+ }
1012
+ try {
1013
+ await ctx.setProgress(101);
1014
+ } catch (err) {
1015
+ expect((err as Error).message).toBe(
1016
+ 'Progress must be between 0 and 100',
1017
+ );
1018
+ }
1019
+ };
1020
+ const handlers = {
1021
+ test: handler,
1022
+ fail: vi.fn(async () => {}),
1023
+ missing: vi.fn(async () => {}),
1024
+ batch: vi.fn(async () => {}),
1025
+ proc: vi.fn(async () => {}),
1026
+ typeA: vi.fn(async () => {}),
1027
+ typeB: vi.fn(async () => {}),
1028
+ typeC: vi.fn(async () => {}),
1029
+ };
1030
+ const jobId = await queue.addJob<TestPayloadMap, 'test'>(pool, {
1031
+ jobType: 'test',
1032
+ payload: { foo: 'bar' },
1033
+ });
1034
+ await claimJob(pool, jobId);
1035
+ const job = await queue.getJob<TestPayloadMap, 'test'>(pool, jobId);
1036
+
1037
+ // Act
1038
+ await processJobWithHandlers(backend, job!, handlers);
1039
+ });
1040
+
1041
+ it('should round fractional progress values', async () => {
1042
+ // Setup
1043
+ const handler: JobHandler<TestPayloadMap, 'test'> = async (
1044
+ _payload,
1045
+ _signal,
1046
+ ctx,
1047
+ ) => {
1048
+ await ctx.setProgress(33.7);
1049
+ };
1050
+ const handlers = {
1051
+ test: handler,
1052
+ fail: vi.fn(async () => {}),
1053
+ missing: vi.fn(async () => {}),
1054
+ batch: vi.fn(async () => {}),
1055
+ proc: vi.fn(async () => {}),
1056
+ typeA: vi.fn(async () => {}),
1057
+ typeB: vi.fn(async () => {}),
1058
+ typeC: vi.fn(async () => {}),
1059
+ };
1060
+ const jobId = await queue.addJob<TestPayloadMap, 'test'>(pool, {
1061
+ jobType: 'test',
1062
+ payload: { foo: 'bar' },
1063
+ });
1064
+ await claimJob(pool, jobId);
1065
+ const job = await queue.getJob<TestPayloadMap, 'test'>(pool, jobId);
1066
+
1067
+ // Act
1068
+ await processJobWithHandlers(backend, job!, handlers);
1069
+
1070
+ // Assert
1071
+ const completed = await queue.getJob(pool, jobId);
1072
+ expect(completed?.progress).toBe(34);
1073
+ });
533
1074
  });