@convex-dev/workpool 0.2.0-beta.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (106) hide show
  1. package/README.md +7 -16
  2. package/dist/commonjs/client/index.d.ts +3 -3
  3. package/dist/commonjs/client/index.d.ts.map +1 -1
  4. package/dist/commonjs/client/index.js +10 -5
  5. package/dist/commonjs/client/index.js.map +1 -1
  6. package/dist/commonjs/component/complete.d.ts +89 -0
  7. package/dist/commonjs/component/complete.d.ts.map +1 -0
  8. package/dist/commonjs/component/complete.js +80 -0
  9. package/dist/commonjs/component/complete.js.map +1 -0
  10. package/dist/commonjs/component/kick.d.ts +1 -2
  11. package/dist/commonjs/component/kick.d.ts.map +1 -1
  12. package/dist/commonjs/component/kick.js +7 -5
  13. package/dist/commonjs/component/kick.js.map +1 -1
  14. package/dist/commonjs/component/lib.d.ts +3 -3
  15. package/dist/commonjs/component/lib.d.ts.map +1 -1
  16. package/dist/commonjs/component/lib.js +43 -20
  17. package/dist/commonjs/component/lib.js.map +1 -1
  18. package/dist/commonjs/component/logging.d.ts.map +1 -1
  19. package/dist/commonjs/component/logging.js +1 -2
  20. package/dist/commonjs/component/logging.js.map +1 -1
  21. package/dist/commonjs/component/loop.d.ts +1 -14
  22. package/dist/commonjs/component/loop.d.ts.map +1 -1
  23. package/dist/commonjs/component/loop.js +215 -178
  24. package/dist/commonjs/component/loop.js.map +1 -1
  25. package/dist/commonjs/component/recovery.d.ts +16 -0
  26. package/dist/commonjs/component/recovery.d.ts.map +1 -1
  27. package/dist/commonjs/component/recovery.js +64 -44
  28. package/dist/commonjs/component/recovery.js.map +1 -1
  29. package/dist/commonjs/component/schema.d.ts +6 -2
  30. package/dist/commonjs/component/schema.d.ts.map +1 -1
  31. package/dist/commonjs/component/schema.js +5 -3
  32. package/dist/commonjs/component/schema.js.map +1 -1
  33. package/dist/commonjs/component/shared.d.ts +20 -11
  34. package/dist/commonjs/component/shared.d.ts.map +1 -1
  35. package/dist/commonjs/component/shared.js +18 -5
  36. package/dist/commonjs/component/shared.js.map +1 -1
  37. package/dist/commonjs/component/stats.d.ts +21 -13
  38. package/dist/commonjs/component/stats.d.ts.map +1 -1
  39. package/dist/commonjs/component/stats.js +32 -22
  40. package/dist/commonjs/component/stats.js.map +1 -1
  41. package/dist/commonjs/component/worker.d.ts +2 -12
  42. package/dist/commonjs/component/worker.d.ts.map +1 -1
  43. package/dist/commonjs/component/worker.js +23 -36
  44. package/dist/commonjs/component/worker.js.map +1 -1
  45. package/dist/esm/client/index.d.ts +3 -3
  46. package/dist/esm/client/index.d.ts.map +1 -1
  47. package/dist/esm/client/index.js +10 -5
  48. package/dist/esm/client/index.js.map +1 -1
  49. package/dist/esm/component/complete.d.ts +89 -0
  50. package/dist/esm/component/complete.d.ts.map +1 -0
  51. package/dist/esm/component/complete.js +80 -0
  52. package/dist/esm/component/complete.js.map +1 -0
  53. package/dist/esm/component/kick.d.ts +1 -2
  54. package/dist/esm/component/kick.d.ts.map +1 -1
  55. package/dist/esm/component/kick.js +7 -5
  56. package/dist/esm/component/kick.js.map +1 -1
  57. package/dist/esm/component/lib.d.ts +3 -3
  58. package/dist/esm/component/lib.d.ts.map +1 -1
  59. package/dist/esm/component/lib.js +43 -20
  60. package/dist/esm/component/lib.js.map +1 -1
  61. package/dist/esm/component/logging.d.ts.map +1 -1
  62. package/dist/esm/component/logging.js +1 -2
  63. package/dist/esm/component/logging.js.map +1 -1
  64. package/dist/esm/component/loop.d.ts +1 -14
  65. package/dist/esm/component/loop.d.ts.map +1 -1
  66. package/dist/esm/component/loop.js +215 -178
  67. package/dist/esm/component/loop.js.map +1 -1
  68. package/dist/esm/component/recovery.d.ts +16 -0
  69. package/dist/esm/component/recovery.d.ts.map +1 -1
  70. package/dist/esm/component/recovery.js +64 -44
  71. package/dist/esm/component/recovery.js.map +1 -1
  72. package/dist/esm/component/schema.d.ts +6 -2
  73. package/dist/esm/component/schema.d.ts.map +1 -1
  74. package/dist/esm/component/schema.js +5 -3
  75. package/dist/esm/component/schema.js.map +1 -1
  76. package/dist/esm/component/shared.d.ts +20 -11
  77. package/dist/esm/component/shared.d.ts.map +1 -1
  78. package/dist/esm/component/shared.js +18 -5
  79. package/dist/esm/component/shared.js.map +1 -1
  80. package/dist/esm/component/stats.d.ts +21 -13
  81. package/dist/esm/component/stats.d.ts.map +1 -1
  82. package/dist/esm/component/stats.js +32 -22
  83. package/dist/esm/component/stats.js.map +1 -1
  84. package/dist/esm/component/worker.d.ts +2 -12
  85. package/dist/esm/component/worker.d.ts.map +1 -1
  86. package/dist/esm/component/worker.js +23 -36
  87. package/dist/esm/component/worker.js.map +1 -1
  88. package/package.json +7 -6
  89. package/src/client/index.ts +18 -8
  90. package/src/component/README.md +15 -15
  91. package/src/component/_generated/api.d.ts +7 -2
  92. package/src/component/complete.test.ts +508 -0
  93. package/src/component/complete.ts +98 -0
  94. package/src/component/kick.test.ts +13 -13
  95. package/src/component/kick.ts +13 -8
  96. package/src/component/lib.test.ts +262 -17
  97. package/src/component/lib.ts +55 -24
  98. package/src/component/logging.ts +1 -2
  99. package/src/component/loop.test.ts +1158 -0
  100. package/src/component/loop.ts +289 -221
  101. package/src/component/recovery.test.ts +541 -0
  102. package/src/component/recovery.ts +80 -63
  103. package/src/component/schema.ts +6 -4
  104. package/src/component/shared.ts +21 -6
  105. package/src/component/stats.ts +48 -25
  106. package/src/component/worker.ts +25 -38
@@ -0,0 +1,1158 @@
1
+ import { convexTest } from "convex-test";
2
+ import { WithoutSystemFields } from "convex/server";
3
+ import {
4
+ afterEach,
5
+ assert,
6
+ beforeEach,
7
+ describe,
8
+ expect,
9
+ it,
10
+ vi,
11
+ } from "vitest";
12
+ import { api, internal } from "./_generated/api";
13
+ import { Doc, Id } from "./_generated/dataModel";
14
+ import { MutationCtx } from "./_generated/server";
15
+ import schema from "./schema";
16
+ import {
17
+ currentSegment,
18
+ DEFAULT_MAX_PARALLELISM,
19
+ nextSegment,
20
+ toSegment,
21
+ } from "./shared";
22
+
23
+ const modules = import.meta.glob("./**/*.ts");
24
+
25
+ describe("loop", () => {
26
+ async function setupTest() {
27
+ const t = convexTest(schema, modules);
28
+ return t;
29
+ }
30
+
31
+ let t: Awaited<ReturnType<typeof setupTest>>;
32
+
33
+ async function setMaxParallelism(maxParallelism: number) {
34
+ await t.run(async (ctx) => {
35
+ await ctx.db.patch((await ctx.db.query("globals").unique())!._id, {
36
+ maxParallelism,
37
+ });
38
+ });
39
+ }
40
+
41
+ async function makeDummyWork(
42
+ ctx: MutationCtx,
43
+ overrides: Partial<WithoutSystemFields<Doc<"work">>> = {}
44
+ ) {
45
+ return ctx.db.insert("work", {
46
+ fnType: "action",
47
+ fnHandle: "test_handle",
48
+ fnName: "test_handle",
49
+ fnArgs: {},
50
+ attempts: 0,
51
+ ...overrides,
52
+ });
53
+ }
54
+
55
+ async function makeDummyScheduledFunction(
56
+ ctx: MutationCtx,
57
+ workId: Id<"work">
58
+ ) {
59
+ return ctx.scheduler.runAfter(0, internal.worker.runActionWrapper, {
60
+ workId,
61
+ fnHandle: "test_handle",
62
+ fnArgs: {},
63
+ logLevel: "WARN",
64
+ attempt: 0,
65
+ });
66
+ }
67
+
68
+ async function insertInternalState(
69
+ ctx: MutationCtx,
70
+ overrides: Partial<WithoutSystemFields<Doc<"internalState">>> = {}
71
+ ) {
72
+ await ctx.db.insert("internalState", {
73
+ generation: 1n,
74
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
75
+ lastRecovery: currentSegment(),
76
+ report: {
77
+ completed: 0,
78
+ succeeded: 0,
79
+ failed: 0,
80
+ retries: 0,
81
+ canceled: 0,
82
+ lastReportTs: Date.now(),
83
+ },
84
+ running: [],
85
+ ...overrides,
86
+ });
87
+ }
88
+
89
+ beforeEach(async () => {
90
+ vi.useFakeTimers();
91
+ t = await setupTest();
92
+ await t.run(async (ctx) => {
93
+ await ctx.db.insert("globals", {
94
+ logLevel: "WARN",
95
+ maxParallelism: DEFAULT_MAX_PARALLELISM,
96
+ });
97
+ });
98
+ });
99
+
100
+ afterEach(() => {
101
+ vi.useRealTimers();
102
+ });
103
+
104
+ describe("data state machine", () => {
105
+ it("should follow the pendingStart -> workerRunning -> complete flow", async () => {
106
+ // Setup initial state
107
+ const workId = await t.run<Id<"work">>(async (ctx) => {
108
+ // Create internal state
109
+ await insertInternalState(ctx);
110
+
111
+ // Create running runStatus
112
+ await ctx.db.insert("runStatus", {
113
+ state: { kind: "running" },
114
+ });
115
+
116
+ // Create work
117
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
118
+
119
+ // Create pendingStart
120
+ await ctx.db.insert("pendingStart", {
121
+ workId,
122
+ segment: 1n,
123
+ });
124
+
125
+ return workId;
126
+ });
127
+
128
+ // Run main loop to process pendingStart -> workerRunning
129
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
130
+
131
+ // Verify work is now in running state
132
+ await t.run(async (ctx) => {
133
+ // Check that pendingStart was deleted
134
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
135
+ expect(pendingStarts).toHaveLength(0);
136
+
137
+ // Check that work is in running list
138
+ const state = await ctx.db.query("internalState").unique();
139
+ expect(state).toBeDefined();
140
+ assert(state);
141
+ expect(state.running).toHaveLength(1);
142
+ expect(state.running[0].workId).toBe(workId);
143
+ });
144
+
145
+ // Complete the work (workerRunning -> complete)
146
+ await t.mutation(internal.complete.complete, {
147
+ jobs: [
148
+ {
149
+ workId,
150
+ runResult: { kind: "success", returnValue: null },
151
+ attempt: 0,
152
+ },
153
+ ],
154
+ });
155
+
156
+ // Verify pendingCompletion was created
157
+ await t.run(async (ctx) => {
158
+ const pendingCompletions = await ctx.db
159
+ .query("pendingCompletion")
160
+ .collect();
161
+ expect(pendingCompletions).toHaveLength(1);
162
+ expect(pendingCompletions[0].workId).toBe(workId);
163
+ expect(pendingCompletions[0].runResult.kind).toBe("success");
164
+ expect(pendingCompletions[0].retry).toBe(false);
165
+ });
166
+ });
167
+
168
+ it("should follow the pendingStart + pendingCancelation -> complete flow", async () => {
169
+ // Setup initial state
170
+ const workId = await t.run<Id<"work">>(async (ctx) => {
171
+ // Create internal state
172
+ await insertInternalState(ctx);
173
+
174
+ // Create running runStatus
175
+ await ctx.db.insert("runStatus", {
176
+ state: { kind: "running" },
177
+ });
178
+
179
+ // Create work
180
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
181
+
182
+ // Create pendingStart
183
+ await ctx.db.insert("pendingStart", {
184
+ workId,
185
+ segment: 1n,
186
+ });
187
+
188
+ // Create pendingCancelation
189
+ await ctx.db.insert("pendingCancelation", {
190
+ workId,
191
+ segment: 1n,
192
+ });
193
+
194
+ return workId;
195
+ });
196
+
197
+ // Run main loop to process pendingStart and pendingCancelation
198
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
199
+
200
+ // Verify work was canceled
201
+ await t.run(async (ctx) => {
202
+ // Check that pendingStart was deleted
203
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
204
+ expect(pendingStarts).toHaveLength(0);
205
+
206
+ // Check that pendingCancelation was deleted
207
+ const pendingCancelations = await ctx.db
208
+ .query("pendingCancelation")
209
+ .collect();
210
+ expect(pendingCancelations).toHaveLength(0);
211
+
212
+ // Check that work is not in running list
213
+ const state = await ctx.db.query("internalState").unique();
214
+ expect(state).toBeDefined();
215
+ assert(state);
216
+ expect(state.running).toHaveLength(0);
217
+ expect(state.report.canceled).toBe(1);
218
+
219
+ const work = await ctx.db.get(workId);
220
+ expect(work).not.toBeNull();
221
+ expect(work!.canceled).toBe(true);
222
+ });
223
+ });
224
+
225
+ it("should follow the complete -> pendingCompletion -> pendingStart flow for retries", async () => {
226
+ // Setup initial state with a running job that will need retry
227
+ const workId = await t.run<Id<"work">>(async (ctx) => {
228
+ // Create internal state
229
+ await insertInternalState(ctx);
230
+
231
+ // Create running runStatus
232
+ await ctx.db.insert("runStatus", {
233
+ state: { kind: "running" },
234
+ });
235
+
236
+ // Create work with retry behavior
237
+ const workId = await makeDummyWork(ctx, {
238
+ attempts: 0,
239
+ retryBehavior: {
240
+ maxAttempts: 3,
241
+ initialBackoffMs: 1000,
242
+ base: 2,
243
+ },
244
+ });
245
+
246
+ // Schedule a function and get its ID
247
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
248
+
249
+ // Add to running list
250
+ const state = await ctx.db.query("internalState").unique();
251
+ assert(state);
252
+ await ctx.db.patch(state._id, {
253
+ running: [{ workId, scheduledId, started: Date.now() }],
254
+ });
255
+
256
+ return workId;
257
+ });
258
+
259
+ // Complete the work with failure (workerRunning -> complete)
260
+ await t.mutation(internal.complete.complete, {
261
+ jobs: [
262
+ {
263
+ workId,
264
+ runResult: { kind: "failed", error: "Test error" },
265
+ attempt: 0,
266
+ },
267
+ ],
268
+ });
269
+
270
+ // Verify pendingCompletion was created with retry=true
271
+ await t.run(async (ctx) => {
272
+ const pendingCompletions = await ctx.db
273
+ .query("pendingCompletion")
274
+ .collect();
275
+ expect(pendingCompletions).toHaveLength(1);
276
+ expect(pendingCompletions[0].workId).toBe(workId);
277
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
278
+ expect(pendingCompletions[0].retry).toBe(true);
279
+ });
280
+
281
+ // Run main loop to process pendingCompletion -> pendingStart
282
+ await t.mutation(internal.loop.main, {
283
+ generation: 1n,
284
+ segment: nextSegment(),
285
+ });
286
+
287
+ // Verify work is now in pendingStart for retry
288
+ await t.run(async (ctx) => {
289
+ // Check that pendingCompletion was deleted
290
+ const pendingCompletions = await ctx.db
291
+ .query("pendingCompletion")
292
+ .collect();
293
+ expect(pendingCompletions).toHaveLength(0);
294
+
295
+ // Check that pendingStart was created for retry
296
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
297
+ expect(pendingStarts).toHaveLength(1);
298
+ expect(pendingStarts[0].workId).toBe(workId);
299
+
300
+ // Check that work still exists
301
+ const work = await ctx.db.get(workId);
302
+ expect(work).not.toBeNull();
303
+ expect(work!.attempts).toBe(1);
304
+ });
305
+ });
306
+ });
307
+
308
+ describe("status transitions", () => {
309
+ it("should transition from idle to running when work is enqueued", async () => {
310
+ // Setup initial idle state
311
+ await t.run(async (ctx) => {
312
+ // Create internal state
313
+ await insertInternalState(ctx);
314
+
315
+ // Create idle runStatus
316
+ await ctx.db.insert("runStatus", {
317
+ state: { kind: "idle", generation: 1n },
318
+ });
319
+ });
320
+
321
+ // Enqueue work
322
+ await t.mutation(api.lib.enqueue, {
323
+ fnHandle: "testHandle",
324
+ fnName: "testFunction",
325
+ fnArgs: { test: true },
326
+ fnType: "mutation",
327
+ runAt: Date.now(),
328
+ config: {
329
+ maxParallelism: 10,
330
+ logLevel: "INFO",
331
+ },
332
+ });
333
+
334
+ // Verify state transition to running
335
+ await t.run(async (ctx) => {
336
+ const runStatus = await ctx.db.query("runStatus").unique();
337
+ expect(runStatus).toBeDefined();
338
+ assert(runStatus);
339
+ expect(runStatus.state.kind).toBe("running");
340
+ });
341
+ });
342
+
343
+ it("should transition from running to scheduled when all work is started and there's leftover capacity", async () => {
344
+ // Setup initial running state with work
345
+ await t.run(async (ctx) => {
346
+ // Create internal state
347
+ await insertInternalState(ctx);
348
+
349
+ // Create running runStatus
350
+ await ctx.db.insert("runStatus", {
351
+ state: { kind: "running" },
352
+ });
353
+
354
+ // Create work
355
+ const workId = await makeDummyWork(ctx);
356
+
357
+ // Create pendingStart
358
+ await ctx.db.insert("pendingStart", {
359
+ workId,
360
+ segment: 1n,
361
+ });
362
+ });
363
+
364
+ // Run main loop to process the work
365
+ await t.mutation(internal.loop.main, {
366
+ generation: 1n,
367
+ segment: nextSegment(),
368
+ });
369
+
370
+ // Run updateRunStatus to transition to scheduled
371
+ await t.mutation(internal.loop.updateRunStatus, {
372
+ generation: 2n,
373
+ segment: nextSegment(),
374
+ });
375
+
376
+ // Verify state transition to scheduled
377
+ await t.run(async (ctx) => {
378
+ const runStatus = await ctx.db.query("runStatus").unique();
379
+ expect(runStatus).toBeDefined();
380
+ assert(runStatus);
381
+ expect(runStatus.state.kind).toBe("scheduled");
382
+ assert(runStatus.state.kind === "scheduled");
383
+ expect(runStatus.state.saturated).toBe(false);
384
+ });
385
+ });
386
+
387
+ it("should transition from running to saturated when maxed out", async () => {
388
+ // Setup initial running state with max capacity
389
+ await setMaxParallelism(1);
390
+ const segment = currentSegment();
391
+ await t.run(async (ctx) => {
392
+ // Create work item
393
+ const workId = await makeDummyWork(ctx);
394
+
395
+ // Schedule a function and get its ID
396
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
397
+
398
+ // Create internal state with running job
399
+ await insertInternalState(ctx, {
400
+ running: [{ workId, scheduledId, started: Date.now() }],
401
+ });
402
+
403
+ // Create running runStatus
404
+ await ctx.db.insert("runStatus", {
405
+ state: { kind: "running" },
406
+ });
407
+
408
+ // Create another pendingStart to exceed capacity
409
+ const anotherWorkId = await makeDummyWork(ctx);
410
+
411
+ await ctx.db.insert("pendingStart", {
412
+ workId: anotherWorkId,
413
+ segment,
414
+ });
415
+ });
416
+
417
+ // Run updateRunStatus to transition to scheduled with saturated=true
418
+ await t.mutation(internal.loop.updateRunStatus, {
419
+ generation: 1n,
420
+ segment,
421
+ });
422
+
423
+ // Verify state transition to scheduled with saturated=true
424
+ await t.run(async (ctx) => {
425
+ const runStatus = await ctx.db.query("runStatus").unique();
426
+ expect(runStatus).toBeDefined();
427
+ assert(runStatus);
428
+ expect(runStatus.state.kind).toBe("scheduled");
429
+ assert(runStatus.state.kind === "scheduled");
430
+ expect(runStatus.state.saturated).toBe(true);
431
+ });
432
+ });
433
+
434
+ it("should transition from scheduled to running when new work is enqueued", async () => {
435
+ // Setup initial scheduled state
436
+ await t.run<Id<"_scheduled_functions">>(async (ctx) => {
437
+ // Create internal state
438
+ await insertInternalState(ctx);
439
+
440
+ // Schedule main loop
441
+ const scheduledId = await ctx.scheduler.runAfter(
442
+ 1000,
443
+ internal.loop.main,
444
+ { generation: 1n, segment: nextSegment() + 10n }
445
+ );
446
+
447
+ // Create scheduled runStatus
448
+ await ctx.db.insert("runStatus", {
449
+ state: {
450
+ kind: "scheduled",
451
+ segment: nextSegment() + 10n,
452
+ scheduledId,
453
+ saturated: false,
454
+ generation: 1n,
455
+ },
456
+ });
457
+
458
+ return scheduledId;
459
+ });
460
+
461
+ // Enqueue work to trigger transition to running
462
+ await t.mutation(api.lib.enqueue, {
463
+ fnHandle: "testHandle",
464
+ fnName: "testFunction",
465
+ fnArgs: { test: true },
466
+ fnType: "mutation",
467
+ runAt: Date.now(),
468
+ config: {
469
+ maxParallelism: 10,
470
+ logLevel: "INFO",
471
+ },
472
+ });
473
+
474
+ // Verify state transition to running
475
+ await t.run(async (ctx) => {
476
+ const runStatus = await ctx.db.query("runStatus").unique();
477
+ expect(runStatus).toBeDefined();
478
+ assert(runStatus);
479
+ expect(runStatus.state.kind).toBe("running");
480
+ });
481
+ });
482
+
483
+ it("should transition from running to idle when all work is done", async () => {
484
+ const segment = nextSegment();
485
+ // Setup initial running state with work
486
+ const workId = await t.run<Id<"work">>(async (ctx) => {
487
+ // Create internal state
488
+ await insertInternalState(ctx);
489
+
490
+ // Create running runStatus
491
+ await ctx.db.insert("runStatus", {
492
+ state: { kind: "running" },
493
+ });
494
+
495
+ // Create work
496
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
497
+
498
+ // Create pendingStart
499
+ await ctx.db.insert("pendingStart", {
500
+ workId,
501
+ segment,
502
+ });
503
+
504
+ return workId;
505
+ });
506
+
507
+ // Run main loop to process the work
508
+ await t.mutation(internal.loop.main, { generation: 1n, segment });
509
+
510
+ // Complete the work
511
+ await t.mutation(internal.complete.complete, {
512
+ jobs: [
513
+ {
514
+ workId,
515
+ runResult: { kind: "success", returnValue: null },
516
+ attempt: 0,
517
+ },
518
+ ],
519
+ });
520
+
521
+ // Run main loop again to process the completion
522
+ await t.mutation(internal.loop.main, { generation: 2n, segment });
523
+
524
+ // Run updateRunStatus to transition to idle
525
+ await t.mutation(internal.loop.updateRunStatus, {
526
+ generation: 3n,
527
+ segment,
528
+ });
529
+
530
+ // Verify state transition to idle
531
+ await t.run(async (ctx) => {
532
+ const runStatus = await ctx.db.query("runStatus").unique();
533
+ expect(runStatus).toBeDefined();
534
+ assert(runStatus);
535
+ expect(runStatus.state.kind).toBe("idle");
536
+ assert(runStatus.state.kind === "idle");
537
+ });
538
+ });
539
+ it("should transition from scheduled to running when main loop runs", async () => {
540
+ const segment = nextSegment();
541
+ await t.run(async (ctx) => {
542
+ await insertInternalState(ctx);
543
+
544
+ const scheduledId = await ctx.scheduler.runAfter(
545
+ 1000,
546
+ internal.loop.main,
547
+ { generation: 1n, segment }
548
+ );
549
+
550
+ await ctx.db.insert("runStatus", {
551
+ state: {
552
+ kind: "scheduled",
553
+ scheduledId,
554
+ generation: 1n,
555
+ segment,
556
+ saturated: false,
557
+ },
558
+ });
559
+ });
560
+ // Run main loop
561
+ await t.mutation(internal.loop.main, { generation: 1n, segment });
562
+
563
+ // Verify state transition to running
564
+ await t.run(async (ctx) => {
565
+ const runStatus = await ctx.db.query("runStatus").unique();
566
+ expect(runStatus).toBeDefined();
567
+ assert(runStatus);
568
+ expect(runStatus.state.kind).toBe("running");
569
+ });
570
+ });
571
+ });
572
+
573
+ describe("main function", () => {
574
+ it("should handle generation mismatch", async () => {
575
+ // Setup state with different generation
576
+ await t.run(async (ctx) => {
577
+ await ctx.db.insert("internalState", {
578
+ generation: 2n,
579
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
580
+ lastRecovery: 0n,
581
+ report: {
582
+ completed: 0,
583
+ succeeded: 0,
584
+ failed: 0,
585
+ retries: 0,
586
+ canceled: 0,
587
+ lastReportTs: Date.now(),
588
+ },
589
+ running: [],
590
+ });
591
+ });
592
+
593
+ // Call main with mismatched generation
594
+ await expect(
595
+ t.mutation(internal.loop.main, { generation: 1n, segment: 1n })
596
+ ).rejects.toThrow("generation mismatch");
597
+ });
598
+
599
+ it("should process pending completions", async () => {
600
+ // Setup state with a running job
601
+ await t.run(async (ctx) => {
602
+ // Create a work item for the running list
603
+ const workId = await makeDummyWork(ctx);
604
+
605
+ // Schedule a function and get its ID
606
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
607
+
608
+ // Create internal state
609
+ await ctx.db.insert("internalState", {
610
+ generation: 1n,
611
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
612
+ lastRecovery: 0n,
613
+ report: {
614
+ completed: 0,
615
+ succeeded: 0,
616
+ failed: 0,
617
+ retries: 0,
618
+ canceled: 0,
619
+ lastReportTs: Date.now(),
620
+ },
621
+ running: [
622
+ {
623
+ workId,
624
+ scheduledId,
625
+ started: 900000,
626
+ },
627
+ ],
628
+ });
629
+
630
+ // Create pending completion
631
+ await ctx.db.insert("pendingCompletion", {
632
+ workId,
633
+ runResult: { kind: "success", returnValue: null },
634
+ segment: 1n,
635
+ retry: false,
636
+ });
637
+ });
638
+
639
+ // Call main
640
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
641
+
642
+ // Verify completion was processed
643
+ await t.run(async (ctx) => {
644
+ // Check that pendingCompletion was deleted
645
+ const completions = await ctx.db.query("pendingCompletion").collect();
646
+ expect(completions).toHaveLength(0);
647
+
648
+ // Check that work was removed from running list
649
+ const state = await ctx.db.query("internalState").unique();
650
+ expect(state).toBeDefined();
651
+ assert(state);
652
+ expect(state.running).toHaveLength(0);
653
+ expect(state.report.completed).toBe(1);
654
+ expect(state.report.succeeded).toBe(1);
655
+ });
656
+ });
657
+
658
+ it("should handle job retries", async () => {
659
+ // Setup state with a job that needs retry
660
+ const workId = await t.run<Id<"work">>(async (ctx) => {
661
+ // Create a work item for the running list
662
+ const workId = await makeDummyWork(ctx, {
663
+ attempts: 1,
664
+ retryBehavior: {
665
+ maxAttempts: 3,
666
+ initialBackoffMs: 1000,
667
+ base: 2,
668
+ },
669
+ });
670
+
671
+ // Schedule a function and get its ID
672
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
673
+
674
+ // Create internal state
675
+ await ctx.db.insert("internalState", {
676
+ generation: 1n,
677
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
678
+ lastRecovery: 0n,
679
+ report: {
680
+ completed: 0,
681
+ succeeded: 0,
682
+ failed: 0,
683
+ retries: 0,
684
+ canceled: 0,
685
+ lastReportTs: Date.now(),
686
+ },
687
+ running: [
688
+ {
689
+ workId,
690
+ scheduledId,
691
+ started: 900000,
692
+ },
693
+ ],
694
+ });
695
+
696
+ // Create pending completion with failed result
697
+ await ctx.db.insert("pendingCompletion", {
698
+ workId,
699
+ runResult: { kind: "failed", error: "test error" },
700
+ segment: 1n,
701
+ retry: true,
702
+ });
703
+
704
+ return workId;
705
+ });
706
+
707
+ // Call main
708
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
709
+
710
+ // Verify job was retried
711
+ await t.run(async (ctx) => {
712
+ // Check that pendingCompletion was deleted
713
+ const completions = await ctx.db.query("pendingCompletion").collect();
714
+ expect(completions).toHaveLength(0);
715
+
716
+ // Check that work was updated
717
+ const work = await ctx.db.get(workId);
718
+ expect(work).toBeDefined();
719
+ expect(work!.attempts).toBe(1);
720
+
721
+ // Check that a new pendingStart was created
722
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
723
+ expect(pendingStarts).toHaveLength(1);
724
+ expect(pendingStarts[0].workId).toBe(workId);
725
+
726
+ // Check that report was updated
727
+ const state = await ctx.db.query("internalState").unique();
728
+ expect(state).toBeDefined();
729
+ expect(state!.report.retries).toBe(1);
730
+ });
731
+ });
732
+
733
+ it("should process pending cancelations", async () => {
734
+ // Setup state with a pending cancelation
735
+ const workId = await t.run<Id<"work">>(async (ctx) => {
736
+ // Create a work item for the running list
737
+ const runningWorkId = await makeDummyWork(ctx);
738
+
739
+ // Schedule a function and get its ID
740
+ const scheduledId = await makeDummyScheduledFunction(
741
+ ctx,
742
+ runningWorkId
743
+ );
744
+
745
+ // Create internal state
746
+ await ctx.db.insert("internalState", {
747
+ generation: 1n,
748
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
749
+ lastRecovery: 0n,
750
+ report: {
751
+ completed: 0,
752
+ succeeded: 0,
753
+ failed: 0,
754
+ retries: 0,
755
+ canceled: 0,
756
+ lastReportTs: Date.now(),
757
+ },
758
+ running: [
759
+ {
760
+ workId: runningWorkId,
761
+ scheduledId,
762
+ started: 900000,
763
+ },
764
+ ],
765
+ });
766
+
767
+ // Create work
768
+ const workId = await makeDummyWork(ctx, {
769
+ retryBehavior: {
770
+ maxAttempts: 3,
771
+ initialBackoffMs: 1000,
772
+ base: 2,
773
+ },
774
+ });
775
+
776
+ // Create pending start
777
+ await ctx.db.insert("pendingStart", {
778
+ workId,
779
+ segment: 1n,
780
+ });
781
+
782
+ // Create pending cancelation
783
+ await ctx.db.insert("pendingCancelation", {
784
+ workId,
785
+ segment: 1n,
786
+ });
787
+
788
+ return workId;
789
+ });
790
+
791
+ // Call main
792
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
793
+
794
+ // Verify cancelation was processed
795
+ await t.run(async (ctx) => {
796
+ // Check that pendingCancelation was deleted
797
+ const cancelations = await ctx.db.query("pendingCancelation").collect();
798
+ expect(cancelations).toHaveLength(0);
799
+
800
+ // Check that pendingStart was deleted
801
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
802
+ expect(pendingStarts).toHaveLength(0);
803
+
804
+ const work = await ctx.db.get(workId);
805
+ expect(work).toBeDefined();
806
+ expect(work!.canceled).toBe(true);
807
+
808
+ // Check that report was updated
809
+ const state = await ctx.db.query("internalState").unique();
810
+ expect(state).toBeDefined();
811
+ expect(state!.report.canceled).toBe(1);
812
+ });
813
+ });
814
+
815
+ it("should schedule new work", async () => {
816
+ // Setup state with pending start items
817
+ const workId = await t.run<Id<"work">>(async (ctx) => {
818
+ // Create internal state
819
+ await ctx.db.insert("internalState", {
820
+ generation: 1n,
821
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
822
+ lastRecovery: 0n,
823
+ report: {
824
+ completed: 0,
825
+ succeeded: 0,
826
+ failed: 0,
827
+ retries: 0,
828
+ canceled: 0,
829
+ lastReportTs: Date.now(),
830
+ },
831
+ running: [],
832
+ });
833
+
834
+ // Create work
835
+ const workId = await makeDummyWork(ctx);
836
+
837
+ // Create pending start
838
+ await ctx.db.insert("pendingStart", {
839
+ workId,
840
+ segment: 1n,
841
+ });
842
+
843
+ return workId;
844
+ });
845
+
846
+ // Call main
847
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
848
+
849
+ // Verify work was started
850
+ await t.run(async (ctx) => {
851
+ // Check that pendingStart was deleted
852
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
853
+ expect(pendingStarts).toHaveLength(0);
854
+
855
+ // Check that work was added to running list
856
+ const state = await ctx.db.query("internalState").unique();
857
+ expect(state).toBeDefined();
858
+ expect(state!.running).toHaveLength(1);
859
+ expect(state!.running[0].workId).toBe(workId);
860
+ });
861
+ });
862
+
863
+ it("should schedule recovery for old jobs", async () => {
864
+ // Setup state with old running jobs
865
+ const oldTime = Date.now() - 5 * 60 * 1000 - 1000; // Older than recovery threshold
866
+
867
+ await t.run(async (ctx) => {
868
+ // Create work for the running list
869
+ const workId = await makeDummyWork(ctx);
870
+
871
+ // Schedule a function and get its ID
872
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
873
+
874
+ // Create internal state with old job
875
+ await ctx.db.insert("internalState", {
876
+ generation: 1n,
877
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
878
+ lastRecovery: 0n,
879
+ report: {
880
+ completed: 0,
881
+ succeeded: 0,
882
+ failed: 0,
883
+ retries: 0,
884
+ canceled: 0,
885
+ lastReportTs: Date.now(),
886
+ },
887
+ running: [
888
+ {
889
+ workId,
890
+ scheduledId,
891
+ started: oldTime,
892
+ },
893
+ ],
894
+ });
895
+ });
896
+
897
+ // Call main
898
+ const segment = toSegment(60 * 60 * 1000);
899
+ await t.mutation(internal.loop.main, {
900
+ generation: 1n,
901
+ segment,
902
+ });
903
+
904
+ // Verify recovery was scheduled
905
+ await t.run(async (ctx) => {
906
+ // Check that lastRecovery was updated
907
+ const state = await ctx.db.query("internalState").unique();
908
+ expect(state).toBeDefined();
909
+ expect(state!.lastRecovery).toBe(segment);
910
+
911
+ // We can't directly check if recovery.recover was scheduled,
912
+ // but we can verify the state was updated correctly
913
+ });
914
+ });
915
+ });
916
+
917
+ describe("updateRunStatus function", () => {
918
+ it("should handle generation mismatch", async () => {
919
+ // Setup state with different generation
920
+ await t.run(async (ctx) => {
921
+ await ctx.db.insert("internalState", {
922
+ generation: 2n,
923
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
924
+ lastRecovery: 0n,
925
+ report: {
926
+ completed: 0,
927
+ succeeded: 0,
928
+ failed: 0,
929
+ retries: 0,
930
+ canceled: 0,
931
+ lastReportTs: Date.now(),
932
+ },
933
+ running: [],
934
+ });
935
+ });
936
+
937
+ // Call updateRunStatus with mismatched generation
938
+ await expect(
939
+ t.mutation(internal.loop.updateRunStatus, {
940
+ generation: 1n,
941
+ segment: 1n,
942
+ })
943
+ ).rejects.toThrow("generation mismatch");
944
+ });
945
+
946
+ it("should schedule main immediately if there are outstanding cancelations", async () => {
947
+ // Setup state with outstanding cancelations
948
+ await t.run(async (ctx) => {
949
+ // Create work for cancelation
950
+ const workId = await makeDummyWork(ctx);
951
+
952
+ // Create internal state
953
+ await ctx.db.insert("internalState", {
954
+ generation: 1n,
955
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
956
+ lastRecovery: 0n,
957
+ report: {
958
+ completed: 0,
959
+ succeeded: 0,
960
+ failed: 0,
961
+ retries: 0,
962
+ canceled: 0,
963
+ lastReportTs: Date.now(),
964
+ },
965
+ running: [],
966
+ });
967
+
968
+ // Create run status
969
+ await ctx.db.insert("runStatus", {
970
+ state: { kind: "running" },
971
+ });
972
+
973
+ // Create pending cancelation
974
+ await ctx.db.insert("pendingCancelation", {
975
+ workId,
976
+ segment: 1n,
977
+ });
978
+ });
979
+
980
+ // Call updateRunStatus
981
+ await t.mutation(internal.loop.updateRunStatus, {
982
+ generation: 1n,
983
+ segment: 1n,
984
+ });
985
+
986
+ // Verify main was scheduled (indirectly by checking runStatus)
987
+ await t.run(async (ctx) => {
988
+ // We can't directly check if main was scheduled,
989
+ // but we can verify the state was updated correctly
990
+ const runStatus = await ctx.db.query("runStatus").unique();
991
+ expect(runStatus).toBeDefined();
992
+ // The state should no longer be idle
993
+ expect(runStatus!.state.kind).not.toBe("idle");
994
+ });
995
+ });
996
+
997
+ it("should transition to idle state when there is no work", async () => {
998
+ // Setup state with no work
999
+ await t.run(async (ctx) => {
1000
+ // Create internal state with no running jobs
1001
+ await ctx.db.insert("internalState", {
1002
+ generation: 1n,
1003
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
1004
+ lastRecovery: 0n,
1005
+ report: {
1006
+ completed: 0,
1007
+ succeeded: 0,
1008
+ failed: 0,
1009
+ retries: 0,
1010
+ canceled: 0,
1011
+ lastReportTs: Date.now(),
1012
+ },
1013
+ running: [],
1014
+ });
1015
+
1016
+ // Create run status in running state
1017
+ await ctx.db.insert("runStatus", {
1018
+ state: { kind: "running" },
1019
+ });
1020
+ });
1021
+
1022
+ // Call updateRunStatus
1023
+ await t.mutation(internal.loop.updateRunStatus, {
1024
+ generation: 1n,
1025
+ segment: 1n,
1026
+ });
1027
+
1028
+ // Verify idle state was set
1029
+ await t.run(async (ctx) => {
1030
+ const runStatus = await ctx.db.query("runStatus").unique();
1031
+ expect(runStatus).toBeDefined();
1032
+ expect(runStatus!.state.kind).toBe("idle");
1033
+ assert(runStatus!.state.kind === "idle");
1034
+ expect(runStatus!.state.generation).toBe(1n);
1035
+ });
1036
+ });
1037
+
1038
+ it("should set saturated flag when at max capacity", async () => {
1039
+ // Setup state with running jobs at max capacity
1040
+ const now = currentSegment();
1041
+ const later = now + 10n;
1042
+ await setMaxParallelism(10);
1043
+ await t.run(async (ctx) => {
1044
+ // Create 10 work items and scheduled functions
1045
+ const runningJobs = await Promise.all(
1046
+ Array(10)
1047
+ .fill(0)
1048
+ .map(async () => {
1049
+ const workId = await makeDummyWork(ctx);
1050
+
1051
+ // Schedule a function and get its ID
1052
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
1053
+
1054
+ return { workId, scheduledId, started: Date.now() };
1055
+ })
1056
+ );
1057
+
1058
+ // Create internal state with max running jobs
1059
+ await ctx.db.insert("internalState", {
1060
+ generation: 1n,
1061
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
1062
+ lastRecovery: now,
1063
+ report: {
1064
+ completed: 0,
1065
+ succeeded: 0,
1066
+ failed: 0,
1067
+ retries: 0,
1068
+ canceled: 0,
1069
+ lastReportTs: Date.now(),
1070
+ },
1071
+ running: runningJobs,
1072
+ });
1073
+
1074
+ // Create run status
1075
+ await ctx.db.insert("runStatus", {
1076
+ state: { kind: "running" },
1077
+ });
1078
+
1079
+ // Create future completion to trigger scheduling
1080
+ await ctx.db.insert("pendingCompletion", {
1081
+ workId: runningJobs[0].workId,
1082
+ runResult: { kind: "success", returnValue: null },
1083
+ segment: later,
1084
+ retry: false,
1085
+ });
1086
+ });
1087
+
1088
+ // Call updateRunStatus
1089
+ await t.mutation(internal.loop.updateRunStatus, {
1090
+ generation: 1n,
1091
+ segment: 1n,
1092
+ });
1093
+
1094
+ // Verify scheduled state was set with saturated flag
1095
+ await t.run(async (ctx) => {
1096
+ const runStatus = await ctx.db.query("runStatus").unique();
1097
+ expect(runStatus).toBeDefined();
1098
+ expect(runStatus!.state.kind).toBe("scheduled");
1099
+ assert(runStatus!.state.kind === "scheduled");
1100
+ expect(runStatus!.state.saturated).toBe(true);
1101
+ });
1102
+ });
1103
+ });
1104
+
1105
+ describe("complete function", () => {
1106
+ it("should run onComplete handlers and delete work", async () => {
1107
+ // Setup mock work with onComplete handler
1108
+ const workId = await t.run<Id<"work">>(async (ctx) => {
1109
+ const workId = await makeDummyWork(ctx, {
1110
+ attempts: 0,
1111
+ onComplete: {
1112
+ // TODO: make this a real handle
1113
+ fnHandle: "onComplete_handle",
1114
+ context: { data: "test" },
1115
+ },
1116
+ });
1117
+ return workId;
1118
+ });
1119
+
1120
+ // Call complete
1121
+ await t.mutation(internal.complete.complete, {
1122
+ jobs: [
1123
+ {
1124
+ workId,
1125
+ runResult: { kind: "success", returnValue: null },
1126
+ attempt: 0,
1127
+ },
1128
+ ],
1129
+ });
1130
+
1131
+ // Verify work was deleted
1132
+ await t.run(async (ctx) => {
1133
+ const work = await ctx.db.get(workId);
1134
+ expect(work).toBeNull();
1135
+ });
1136
+ });
1137
+
1138
+ it("should handle missing work gracefully", async () => {
1139
+ // Call complete with non-existent work ID
1140
+ const workId = await t.run(async (ctx) => {
1141
+ const id = await makeDummyWork(ctx, { attempts: 0 });
1142
+ await ctx.db.delete(id);
1143
+ return id;
1144
+ });
1145
+ await t.mutation(internal.complete.complete, {
1146
+ jobs: [
1147
+ {
1148
+ workId,
1149
+ runResult: { kind: "success", returnValue: null },
1150
+ attempt: 0,
1151
+ },
1152
+ ],
1153
+ });
1154
+
1155
+ // No error should be thrown
1156
+ });
1157
+ });
1158
+ });