@convex-dev/workpool 0.1.2 → 0.2.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. package/README.md +155 -17
  2. package/dist/commonjs/client/index.d.ts +123 -35
  3. package/dist/commonjs/client/index.d.ts.map +1 -1
  4. package/dist/commonjs/client/index.js +122 -15
  5. package/dist/commonjs/client/index.js.map +1 -1
  6. package/dist/commonjs/client/utils.d.ts +16 -0
  7. package/dist/commonjs/client/utils.d.ts.map +1 -0
  8. package/dist/commonjs/client/utils.js +2 -0
  9. package/dist/commonjs/client/utils.js.map +1 -0
  10. package/dist/commonjs/component/complete.d.ts +89 -0
  11. package/dist/commonjs/component/complete.d.ts.map +1 -0
  12. package/dist/commonjs/component/complete.js +80 -0
  13. package/dist/commonjs/component/complete.js.map +1 -0
  14. package/dist/commonjs/component/convex.config.d.ts.map +1 -1
  15. package/dist/commonjs/component/convex.config.js +0 -2
  16. package/dist/commonjs/component/convex.config.js.map +1 -1
  17. package/dist/commonjs/component/kick.d.ts +9 -0
  18. package/dist/commonjs/component/kick.d.ts.map +1 -0
  19. package/dist/commonjs/component/kick.js +97 -0
  20. package/dist/commonjs/component/kick.js.map +1 -0
  21. package/dist/commonjs/component/lib.d.ts +23 -32
  22. package/dist/commonjs/component/lib.d.ts.map +1 -1
  23. package/dist/commonjs/component/lib.js +91 -563
  24. package/dist/commonjs/component/lib.js.map +1 -1
  25. package/dist/commonjs/component/logging.d.ts +5 -3
  26. package/dist/commonjs/component/logging.d.ts.map +1 -1
  27. package/dist/commonjs/component/logging.js +13 -2
  28. package/dist/commonjs/component/logging.js.map +1 -1
  29. package/dist/commonjs/component/loop.d.ts +13 -0
  30. package/dist/commonjs/component/loop.d.ts.map +1 -0
  31. package/dist/commonjs/component/loop.js +482 -0
  32. package/dist/commonjs/component/loop.js.map +1 -0
  33. package/dist/commonjs/component/recovery.d.ts +24 -0
  34. package/dist/commonjs/component/recovery.d.ts.map +1 -0
  35. package/dist/commonjs/component/recovery.js +94 -0
  36. package/dist/commonjs/component/recovery.js.map +1 -0
  37. package/dist/commonjs/component/schema.d.ts +167 -93
  38. package/dist/commonjs/component/schema.d.ts.map +1 -1
  39. package/dist/commonjs/component/schema.js +56 -65
  40. package/dist/commonjs/component/schema.js.map +1 -1
  41. package/dist/commonjs/component/shared.d.ts +138 -0
  42. package/dist/commonjs/component/shared.d.ts.map +1 -0
  43. package/dist/commonjs/component/shared.js +77 -0
  44. package/dist/commonjs/component/shared.js.map +1 -0
  45. package/dist/commonjs/component/stats.d.ts +6 -3
  46. package/dist/commonjs/component/stats.d.ts.map +1 -1
  47. package/dist/commonjs/component/stats.js +23 -4
  48. package/dist/commonjs/component/stats.js.map +1 -1
  49. package/dist/commonjs/component/worker.d.ts +15 -0
  50. package/dist/commonjs/component/worker.d.ts.map +1 -0
  51. package/dist/commonjs/component/worker.js +73 -0
  52. package/dist/commonjs/component/worker.js.map +1 -0
  53. package/dist/esm/client/index.d.ts +123 -35
  54. package/dist/esm/client/index.d.ts.map +1 -1
  55. package/dist/esm/client/index.js +122 -15
  56. package/dist/esm/client/index.js.map +1 -1
  57. package/dist/esm/client/utils.d.ts +16 -0
  58. package/dist/esm/client/utils.d.ts.map +1 -0
  59. package/dist/esm/client/utils.js +2 -0
  60. package/dist/esm/client/utils.js.map +1 -0
  61. package/dist/esm/component/complete.d.ts +89 -0
  62. package/dist/esm/component/complete.d.ts.map +1 -0
  63. package/dist/esm/component/complete.js +80 -0
  64. package/dist/esm/component/complete.js.map +1 -0
  65. package/dist/esm/component/convex.config.d.ts.map +1 -1
  66. package/dist/esm/component/convex.config.js +0 -2
  67. package/dist/esm/component/convex.config.js.map +1 -1
  68. package/dist/esm/component/kick.d.ts +9 -0
  69. package/dist/esm/component/kick.d.ts.map +1 -0
  70. package/dist/esm/component/kick.js +97 -0
  71. package/dist/esm/component/kick.js.map +1 -0
  72. package/dist/esm/component/lib.d.ts +23 -32
  73. package/dist/esm/component/lib.d.ts.map +1 -1
  74. package/dist/esm/component/lib.js +91 -563
  75. package/dist/esm/component/lib.js.map +1 -1
  76. package/dist/esm/component/logging.d.ts +5 -3
  77. package/dist/esm/component/logging.d.ts.map +1 -1
  78. package/dist/esm/component/logging.js +13 -2
  79. package/dist/esm/component/logging.js.map +1 -1
  80. package/dist/esm/component/loop.d.ts +13 -0
  81. package/dist/esm/component/loop.d.ts.map +1 -0
  82. package/dist/esm/component/loop.js +482 -0
  83. package/dist/esm/component/loop.js.map +1 -0
  84. package/dist/esm/component/recovery.d.ts +24 -0
  85. package/dist/esm/component/recovery.d.ts.map +1 -0
  86. package/dist/esm/component/recovery.js +94 -0
  87. package/dist/esm/component/recovery.js.map +1 -0
  88. package/dist/esm/component/schema.d.ts +167 -93
  89. package/dist/esm/component/schema.d.ts.map +1 -1
  90. package/dist/esm/component/schema.js +56 -65
  91. package/dist/esm/component/schema.js.map +1 -1
  92. package/dist/esm/component/shared.d.ts +138 -0
  93. package/dist/esm/component/shared.d.ts.map +1 -0
  94. package/dist/esm/component/shared.js +77 -0
  95. package/dist/esm/component/shared.js.map +1 -0
  96. package/dist/esm/component/stats.d.ts +6 -3
  97. package/dist/esm/component/stats.d.ts.map +1 -1
  98. package/dist/esm/component/stats.js +23 -4
  99. package/dist/esm/component/stats.js.map +1 -1
  100. package/dist/esm/component/worker.d.ts +15 -0
  101. package/dist/esm/component/worker.d.ts.map +1 -0
  102. package/dist/esm/component/worker.js +73 -0
  103. package/dist/esm/component/worker.js.map +1 -0
  104. package/package.json +6 -5
  105. package/src/client/index.ts +232 -68
  106. package/src/client/utils.ts +45 -0
  107. package/src/component/README.md +73 -0
  108. package/src/component/_generated/api.d.ts +38 -66
  109. package/src/component/complete.test.ts +508 -0
  110. package/src/component/complete.ts +98 -0
  111. package/src/component/convex.config.ts +0 -3
  112. package/src/component/kick.test.ts +285 -0
  113. package/src/component/kick.ts +118 -0
  114. package/src/component/lib.test.ts +448 -0
  115. package/src/component/lib.ts +105 -667
  116. package/src/component/logging.ts +24 -12
  117. package/src/component/loop.test.ts +1204 -0
  118. package/src/component/loop.ts +637 -0
  119. package/src/component/recovery.test.ts +541 -0
  120. package/src/component/recovery.ts +96 -0
  121. package/src/component/schema.ts +61 -77
  122. package/src/component/setup.test.ts +5 -0
  123. package/src/component/shared.ts +141 -0
  124. package/src/component/stats.ts +26 -8
  125. package/src/component/worker.ts +81 -0
@@ -0,0 +1,1204 @@
1
+ import { convexTest } from "convex-test";
2
+ import {
3
+ describe,
4
+ expect,
5
+ it,
6
+ beforeEach,
7
+ afterEach,
8
+ vi,
9
+ assert,
10
+ } from "vitest";
11
+ import { Doc, Id } from "./_generated/dataModel";
12
+ import schema from "./schema";
13
+ import { internal } from "./_generated/api";
14
+ import { currentSegment, nextSegment, toSegment } from "./shared";
15
+ import { api } from "./_generated/api";
16
+ import { DEFAULT_MAX_PARALLELISM } from "./kick";
17
+ import { WithoutSystemFields } from "convex/server";
18
+ import { MutationCtx } from "./_generated/server";
19
+
20
+ const modules = import.meta.glob("./**/*.ts");
21
+
22
+ describe("loop", () => {
23
+ async function setupTest() {
24
+ const t = convexTest(schema, modules);
25
+ return t;
26
+ }
27
+
28
+ let t: Awaited<ReturnType<typeof setupTest>>;
29
+
30
+ async function setMaxParallelism(maxParallelism: number) {
31
+ await t.run(async (ctx) => {
32
+ await ctx.db.patch((await ctx.db.query("globals").unique())!._id, {
33
+ maxParallelism,
34
+ });
35
+ });
36
+ }
37
+
38
+ async function makeDummyWork(
39
+ ctx: MutationCtx,
40
+ overrides: Partial<WithoutSystemFields<Doc<"work">>> = {}
41
+ ) {
42
+ return ctx.db.insert("work", {
43
+ fnType: "action",
44
+ fnHandle: "test_handle",
45
+ fnName: "test_handle",
46
+ fnArgs: {},
47
+ attempts: 0,
48
+ ...overrides,
49
+ });
50
+ }
51
+
52
+ async function makeDummyScheduledFunction(
53
+ ctx: MutationCtx,
54
+ workId: Id<"work">
55
+ ) {
56
+ return ctx.scheduler.runAfter(0, internal.worker.runActionWrapper, {
57
+ workId,
58
+ fnHandle: "test_handle",
59
+ fnArgs: {},
60
+ logLevel: "DEBUG",
61
+ attempt: 0,
62
+ });
63
+ }
64
+
65
+ beforeEach(async () => {
66
+ vi.useFakeTimers();
67
+ t = await setupTest();
68
+ await t.run(async (ctx) => {
69
+ await ctx.db.insert("globals", {
70
+ logLevel: "DEBUG",
71
+ maxParallelism: DEFAULT_MAX_PARALLELISM,
72
+ });
73
+ });
74
+ });
75
+
76
+ afterEach(() => {
77
+ vi.useRealTimers();
78
+ });
79
+
80
+ describe("data state machine", () => {
81
+ it("should follow the pendingStart -> workerRunning -> complete flow", async () => {
82
+ // Setup initial state
83
+ const workId = await t.run<Id<"work">>(async (ctx) => {
84
+ // Create internal state
85
+ await ctx.db.insert("internalState", {
86
+ generation: 1n,
87
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
88
+ lastRecovery: 0n,
89
+ report: {
90
+ completed: 0,
91
+ succeeded: 0,
92
+ failed: 0,
93
+ retries: 0,
94
+ canceled: 0,
95
+ lastReportTs: Date.now(),
96
+ },
97
+ running: [],
98
+ });
99
+
100
+ // Create running runStatus
101
+ await ctx.db.insert("runStatus", {
102
+ state: { kind: "running" },
103
+ });
104
+
105
+ // Create work
106
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
107
+
108
+ // Create pendingStart
109
+ await ctx.db.insert("pendingStart", {
110
+ workId,
111
+ segment: 1n,
112
+ });
113
+
114
+ return workId;
115
+ });
116
+
117
+ // Run main loop to process pendingStart -> workerRunning
118
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
119
+
120
+ // Verify work is now in running state
121
+ await t.run(async (ctx) => {
122
+ // Check that pendingStart was deleted
123
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
124
+ expect(pendingStarts).toHaveLength(0);
125
+
126
+ // Check that work is in running list
127
+ const state = await ctx.db.query("internalState").unique();
128
+ expect(state).toBeDefined();
129
+ assert(state);
130
+ expect(state.running).toHaveLength(1);
131
+ expect(state.running[0].workId).toBe(workId);
132
+ });
133
+
134
+ // Complete the work (workerRunning -> complete)
135
+ await t.mutation(internal.complete.complete, {
136
+ jobs: [
137
+ {
138
+ workId,
139
+ runResult: { kind: "success", returnValue: null },
140
+ attempt: 0,
141
+ },
142
+ ],
143
+ });
144
+
145
+ // Verify pendingCompletion was created
146
+ await t.run(async (ctx) => {
147
+ const pendingCompletions = await ctx.db
148
+ .query("pendingCompletion")
149
+ .collect();
150
+ expect(pendingCompletions).toHaveLength(1);
151
+ expect(pendingCompletions[0].workId).toBe(workId);
152
+ expect(pendingCompletions[0].runResult.kind).toBe("success");
153
+ expect(pendingCompletions[0].retry).toBe(false);
154
+ });
155
+ });
156
+
157
+ it("should follow the pendingStart + pendingCancelation -> complete flow", async () => {
158
+ // Setup initial state
159
+ const workId = await t.run<Id<"work">>(async (ctx) => {
160
+ // Create internal state
161
+ await ctx.db.insert("internalState", {
162
+ generation: 1n,
163
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
164
+ lastRecovery: 0n,
165
+ report: {
166
+ completed: 0,
167
+ succeeded: 0,
168
+ failed: 0,
169
+ retries: 0,
170
+ canceled: 0,
171
+ lastReportTs: Date.now(),
172
+ },
173
+ running: [],
174
+ });
175
+
176
+ // Create running runStatus
177
+ await ctx.db.insert("runStatus", {
178
+ state: { kind: "running" },
179
+ });
180
+
181
+ // Create work
182
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
183
+
184
+ // Create pendingStart
185
+ await ctx.db.insert("pendingStart", {
186
+ workId,
187
+ segment: 1n,
188
+ });
189
+
190
+ // Create pendingCancelation
191
+ await ctx.db.insert("pendingCancelation", {
192
+ workId,
193
+ segment: 1n,
194
+ });
195
+
196
+ return workId;
197
+ });
198
+
199
+ // Run main loop to process pendingStart and pendingCancelation
200
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
201
+
202
+ // Verify work was canceled
203
+ await t.run(async (ctx) => {
204
+ // Check that pendingStart was deleted
205
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
206
+ expect(pendingStarts).toHaveLength(0);
207
+
208
+ // Check that pendingCancelation was deleted
209
+ const pendingCancelations = await ctx.db
210
+ .query("pendingCancelation")
211
+ .collect();
212
+ expect(pendingCancelations).toHaveLength(0);
213
+
214
+ // Check that work is not in running list
215
+ const state = await ctx.db.query("internalState").unique();
216
+ expect(state).toBeDefined();
217
+ assert(state);
218
+ expect(state.running).toHaveLength(0);
219
+ expect(state.report.canceled).toBe(1);
220
+
221
+ const work = await ctx.db.get(workId);
222
+ expect(work).not.toBeNull();
223
+ expect(work!.canceled).toBe(true);
224
+ });
225
+ });
226
+
227
+ it("should follow the complete -> pendingCompletion -> pendingStart flow for retries", async () => {
228
+ // Setup initial state with a running job that will need retry
229
+ const workId = await t.run<Id<"work">>(async (ctx) => {
230
+ // Create internal state
231
+ await ctx.db.insert("internalState", {
232
+ generation: 1n,
233
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
234
+ lastRecovery: 0n,
235
+ report: {
236
+ completed: 0,
237
+ succeeded: 0,
238
+ failed: 0,
239
+ retries: 0,
240
+ canceled: 0,
241
+ lastReportTs: Date.now(),
242
+ },
243
+ running: [],
244
+ });
245
+
246
+ // Create running runStatus
247
+ await ctx.db.insert("runStatus", {
248
+ state: { kind: "running" },
249
+ });
250
+
251
+ // Create work with retry behavior
252
+ const workId = await makeDummyWork(ctx, {
253
+ attempts: 0,
254
+ retryBehavior: {
255
+ maxAttempts: 3,
256
+ initialBackoffMs: 1000,
257
+ base: 2,
258
+ },
259
+ });
260
+
261
+ // Schedule a function and get its ID
262
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
263
+
264
+ // Add to running list
265
+ const state = await ctx.db.query("internalState").unique();
266
+ assert(state);
267
+ await ctx.db.patch(state._id, {
268
+ running: [{ workId, scheduledId, started: Date.now() }],
269
+ });
270
+
271
+ return workId;
272
+ });
273
+
274
+ // Complete the work with failure (workerRunning -> complete)
275
+ await t.mutation(internal.complete.complete, {
276
+ jobs: [
277
+ {
278
+ workId,
279
+ runResult: { kind: "failed", error: "Test error" },
280
+ attempt: 0,
281
+ },
282
+ ],
283
+ });
284
+
285
+ // Verify pendingCompletion was created with retry=true
286
+ await t.run(async (ctx) => {
287
+ const pendingCompletions = await ctx.db
288
+ .query("pendingCompletion")
289
+ .collect();
290
+ expect(pendingCompletions).toHaveLength(1);
291
+ expect(pendingCompletions[0].workId).toBe(workId);
292
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
293
+ expect(pendingCompletions[0].retry).toBe(true);
294
+ });
295
+
296
+ // Run main loop to process pendingCompletion -> pendingStart
297
+ await t.mutation(internal.loop.main, {
298
+ generation: 1n,
299
+ segment: nextSegment(),
300
+ });
301
+
302
+ // Verify work is now in pendingStart for retry
303
+ await t.run(async (ctx) => {
304
+ // Check that pendingCompletion was deleted
305
+ const pendingCompletions = await ctx.db
306
+ .query("pendingCompletion")
307
+ .collect();
308
+ expect(pendingCompletions).toHaveLength(0);
309
+
310
+ // Check that pendingStart was created for retry
311
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
312
+ expect(pendingStarts).toHaveLength(1);
313
+ expect(pendingStarts[0].workId).toBe(workId);
314
+
315
+ // Check that work still exists
316
+ const work = await ctx.db.get(workId);
317
+ expect(work).not.toBeNull();
318
+ expect(work!.attempts).toBe(1);
319
+ });
320
+ });
321
+ });
322
+
323
+ describe("status transitions", () => {
324
+ it("should transition from idle to running when work is enqueued", async () => {
325
+ // Setup initial idle state
326
+ await t.run(async (ctx) => {
327
+ // Create internal state
328
+ await ctx.db.insert("internalState", {
329
+ generation: 1n,
330
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
331
+ lastRecovery: 0n,
332
+ report: {
333
+ completed: 0,
334
+ succeeded: 0,
335
+ failed: 0,
336
+ retries: 0,
337
+ canceled: 0,
338
+ lastReportTs: Date.now(),
339
+ },
340
+ running: [],
341
+ });
342
+
343
+ // Create idle runStatus
344
+ await ctx.db.insert("runStatus", {
345
+ state: { kind: "idle", generation: 1n },
346
+ });
347
+ });
348
+
349
+ // Enqueue work
350
+ await t.mutation(api.lib.enqueue, {
351
+ fnHandle: "testHandle",
352
+ fnName: "testFunction",
353
+ fnArgs: { test: true },
354
+ fnType: "mutation",
355
+ runAt: Date.now(),
356
+ config: {
357
+ maxParallelism: 10,
358
+ logLevel: "INFO",
359
+ },
360
+ });
361
+
362
+ // Verify state transition to running
363
+ await t.run(async (ctx) => {
364
+ const runStatus = await ctx.db.query("runStatus").unique();
365
+ expect(runStatus).toBeDefined();
366
+ assert(runStatus);
367
+ expect(runStatus.state.kind).toBe("running");
368
+ });
369
+ });
370
+
371
+ it("should transition from running to scheduled when all work is started and there's leftover capacity", async () => {
372
+ // Setup initial running state with work
373
+ await t.run(async (ctx) => {
374
+ // Create internal state
375
+ await ctx.db.insert("internalState", {
376
+ generation: 1n,
377
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
378
+ lastRecovery: 0n,
379
+ report: {
380
+ completed: 0,
381
+ succeeded: 0,
382
+ failed: 0,
383
+ retries: 0,
384
+ canceled: 0,
385
+ lastReportTs: Date.now(),
386
+ },
387
+ running: [],
388
+ });
389
+
390
+ // Create running runStatus
391
+ await ctx.db.insert("runStatus", {
392
+ state: { kind: "running" },
393
+ });
394
+
395
+ // Create work
396
+ const workId = await makeDummyWork(ctx);
397
+
398
+ // Create pendingStart
399
+ await ctx.db.insert("pendingStart", {
400
+ workId,
401
+ segment: 1n,
402
+ });
403
+ });
404
+
405
+ // Run main loop to process the work
406
+ await t.mutation(internal.loop.main, {
407
+ generation: 1n,
408
+ segment: nextSegment(),
409
+ });
410
+
411
+ // Run updateRunStatus to transition to scheduled
412
+ await t.mutation(internal.loop.updateRunStatus, {
413
+ generation: 2n,
414
+ segment: nextSegment(),
415
+ });
416
+
417
+ // Verify state transition to scheduled
418
+ await t.run(async (ctx) => {
419
+ const runStatus = await ctx.db.query("runStatus").unique();
420
+ expect(runStatus).toBeDefined();
421
+ assert(runStatus);
422
+ expect(runStatus.state.kind).toBe("scheduled");
423
+ assert(runStatus.state.kind === "scheduled");
424
+ expect(runStatus.state.saturated).toBe(false);
425
+ });
426
+ });
427
+
428
+ it("should transition from running to saturated when maxed out", async () => {
429
+ // Setup initial running state with max capacity
430
+ await setMaxParallelism(1);
431
+ await t.run(async (ctx) => {
432
+ // Create work item
433
+ const workId = await makeDummyWork(ctx);
434
+
435
+ // Schedule a function and get its ID
436
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
437
+
438
+ // Create internal state with running job
439
+ await ctx.db.insert("internalState", {
440
+ generation: 1n,
441
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
442
+ lastRecovery: 0n,
443
+ report: {
444
+ completed: 0,
445
+ succeeded: 0,
446
+ failed: 0,
447
+ retries: 0,
448
+ canceled: 0,
449
+ lastReportTs: Date.now(),
450
+ },
451
+ running: [{ workId, scheduledId, started: Date.now() }],
452
+ });
453
+
454
+ // Create running runStatus
455
+ await ctx.db.insert("runStatus", {
456
+ state: { kind: "running" },
457
+ });
458
+
459
+ // Create another pendingStart to exceed capacity
460
+ const anotherWorkId = await makeDummyWork(ctx);
461
+
462
+ await ctx.db.insert("pendingStart", {
463
+ workId: anotherWorkId,
464
+ segment: 1n,
465
+ });
466
+ });
467
+
468
+ // Run updateRunStatus to transition to scheduled with saturated=true
469
+ await t.mutation(internal.loop.updateRunStatus, {
470
+ generation: 1n,
471
+ segment: 1n,
472
+ });
473
+
474
+ // Verify state transition to scheduled with saturated=true
475
+ await t.run(async (ctx) => {
476
+ const runStatus = await ctx.db.query("runStatus").unique();
477
+ expect(runStatus).toBeDefined();
478
+ assert(runStatus);
479
+ expect(runStatus.state.kind).toBe("scheduled");
480
+ assert(runStatus.state.kind === "scheduled");
481
+ expect(runStatus.state.saturated).toBe(true);
482
+ });
483
+ });
484
+
485
+ it("should transition from scheduled to running when new work is enqueued", async () => {
486
+ // Setup initial scheduled state
487
+ const scheduledId = await t.run<Id<"_scheduled_functions">>(
488
+ async (ctx) => {
489
+ // Create internal state
490
+ await ctx.db.insert("internalState", {
491
+ generation: 1n,
492
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
493
+ lastRecovery: 0n,
494
+ report: {
495
+ completed: 0,
496
+ succeeded: 0,
497
+ failed: 0,
498
+ retries: 0,
499
+ canceled: 0,
500
+ lastReportTs: Date.now(),
501
+ },
502
+ running: [],
503
+ });
504
+
505
+ // Schedule main loop
506
+ const scheduledId = await ctx.scheduler.runAfter(
507
+ 1000,
508
+ internal.loop.main,
509
+ { generation: 1n, segment: nextSegment() + 10n }
510
+ );
511
+
512
+ // Create scheduled runStatus
513
+ await ctx.db.insert("runStatus", {
514
+ state: {
515
+ kind: "scheduled",
516
+ segment: nextSegment() + 10n,
517
+ scheduledId,
518
+ saturated: false,
519
+ generation: 1n,
520
+ },
521
+ });
522
+
523
+ return scheduledId;
524
+ }
525
+ );
526
+
527
+ // Enqueue work to trigger transition to running
528
+ await t.mutation(api.lib.enqueue, {
529
+ fnHandle: "testHandle",
530
+ fnName: "testFunction",
531
+ fnArgs: { test: true },
532
+ fnType: "mutation",
533
+ runAt: Date.now(),
534
+ config: {
535
+ maxParallelism: 10,
536
+ logLevel: "INFO",
537
+ },
538
+ });
539
+
540
+ // Verify state transition to running
541
+ await t.run(async (ctx) => {
542
+ const runStatus = await ctx.db.query("runStatus").unique();
543
+ expect(runStatus).toBeDefined();
544
+ assert(runStatus);
545
+ expect(runStatus.state.kind).toBe("running");
546
+ });
547
+ });
548
+
549
+ it("should transition from running to idle when all work is done", async () => {
550
+ const segment = nextSegment();
551
+ // Setup initial running state with work
552
+ const workId = await t.run<Id<"work">>(async (ctx) => {
553
+ // Create internal state
554
+ await ctx.db.insert("internalState", {
555
+ generation: 1n,
556
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
557
+ lastRecovery: 0n,
558
+ report: {
559
+ completed: 0,
560
+ succeeded: 0,
561
+ failed: 0,
562
+ retries: 0,
563
+ canceled: 0,
564
+ lastReportTs: Date.now(),
565
+ },
566
+ running: [],
567
+ });
568
+
569
+ // Create running runStatus
570
+ await ctx.db.insert("runStatus", {
571
+ state: { kind: "running" },
572
+ });
573
+
574
+ // Create work
575
+ const workId = await makeDummyWork(ctx, { attempts: 0 });
576
+
577
+ // Create pendingStart
578
+ await ctx.db.insert("pendingStart", {
579
+ workId,
580
+ segment,
581
+ });
582
+
583
+ return workId;
584
+ });
585
+
586
+ // Run main loop to process the work
587
+ await t.mutation(internal.loop.main, { generation: 1n, segment });
588
+
589
+ // Complete the work
590
+ await t.mutation(internal.complete.complete, {
591
+ jobs: [
592
+ {
593
+ workId,
594
+ runResult: { kind: "success", returnValue: null },
595
+ attempt: 0,
596
+ },
597
+ ],
598
+ });
599
+
600
+ // Run main loop again to process the completion
601
+ await t.mutation(internal.loop.main, { generation: 2n, segment });
602
+
603
+ // Run updateRunStatus to transition to idle
604
+ await t.mutation(internal.loop.updateRunStatus, {
605
+ generation: 3n,
606
+ segment,
607
+ });
608
+
609
+ // Verify state transition to idle
610
+ await t.run(async (ctx) => {
611
+ const runStatus = await ctx.db.query("runStatus").unique();
612
+ expect(runStatus).toBeDefined();
613
+ assert(runStatus);
614
+ expect(runStatus.state.kind).toBe("idle");
615
+ assert(runStatus.state.kind === "idle");
616
+ });
617
+ });
618
+ });
619
+
620
+ describe("main function", () => {
621
+ it("should handle generation mismatch", async () => {
622
+ // Setup state with different generation
623
+ await t.run(async (ctx) => {
624
+ await ctx.db.insert("internalState", {
625
+ generation: 2n,
626
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
627
+ lastRecovery: 0n,
628
+ report: {
629
+ completed: 0,
630
+ succeeded: 0,
631
+ failed: 0,
632
+ retries: 0,
633
+ canceled: 0,
634
+ lastReportTs: Date.now(),
635
+ },
636
+ running: [],
637
+ });
638
+ });
639
+
640
+ // Call main with mismatched generation
641
+ await expect(
642
+ t.mutation(internal.loop.main, { generation: 1n, segment: 1n })
643
+ ).rejects.toThrow("generation mismatch");
644
+ });
645
+
646
+ it("should process pending completions", async () => {
647
+ // Setup state with a running job
648
+ await t.run(async (ctx) => {
649
+ // Create a work item for the running list
650
+ const workId = await makeDummyWork(ctx);
651
+
652
+ // Schedule a function and get its ID
653
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
654
+
655
+ // Create internal state
656
+ await ctx.db.insert("internalState", {
657
+ generation: 1n,
658
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
659
+ lastRecovery: 0n,
660
+ report: {
661
+ completed: 0,
662
+ succeeded: 0,
663
+ failed: 0,
664
+ retries: 0,
665
+ canceled: 0,
666
+ lastReportTs: Date.now(),
667
+ },
668
+ running: [
669
+ {
670
+ workId,
671
+ scheduledId,
672
+ started: 900000,
673
+ },
674
+ ],
675
+ });
676
+
677
+ // Create pending completion
678
+ await ctx.db.insert("pendingCompletion", {
679
+ workId,
680
+ runResult: { kind: "success", returnValue: null },
681
+ segment: 1n,
682
+ retry: false,
683
+ });
684
+ });
685
+
686
+ // Call main
687
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
688
+
689
+ // Verify completion was processed
690
+ await t.run(async (ctx) => {
691
+ // Check that pendingCompletion was deleted
692
+ const completions = await ctx.db.query("pendingCompletion").collect();
693
+ expect(completions).toHaveLength(0);
694
+
695
+ // Check that work was removed from running list
696
+ const state = await ctx.db.query("internalState").unique();
697
+ expect(state).toBeDefined();
698
+ assert(state);
699
+ expect(state.running).toHaveLength(0);
700
+ expect(state.report.completed).toBe(1);
701
+ expect(state.report.succeeded).toBe(1);
702
+ });
703
+ });
704
+
705
+ it("should handle job retries", async () => {
706
+ // Setup state with a job that needs retry
707
+ const workId = await t.run<Id<"work">>(async (ctx) => {
708
+ // Create a work item for the running list
709
+ const workId = await makeDummyWork(ctx, {
710
+ attempts: 1,
711
+ retryBehavior: {
712
+ maxAttempts: 3,
713
+ initialBackoffMs: 1000,
714
+ base: 2,
715
+ },
716
+ });
717
+
718
+ // Schedule a function and get its ID
719
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
720
+
721
+ // Create internal state
722
+ await ctx.db.insert("internalState", {
723
+ generation: 1n,
724
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
725
+ lastRecovery: 0n,
726
+ report: {
727
+ completed: 0,
728
+ succeeded: 0,
729
+ failed: 0,
730
+ retries: 0,
731
+ canceled: 0,
732
+ lastReportTs: Date.now(),
733
+ },
734
+ running: [
735
+ {
736
+ workId,
737
+ scheduledId,
738
+ started: 900000,
739
+ },
740
+ ],
741
+ });
742
+
743
+ // Create pending completion with failed result
744
+ await ctx.db.insert("pendingCompletion", {
745
+ workId,
746
+ runResult: { kind: "failed", error: "test error" },
747
+ segment: 1n,
748
+ retry: true,
749
+ });
750
+
751
+ return workId;
752
+ });
753
+
754
+ // Call main
755
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
756
+
757
+ // Verify job was retried
758
+ await t.run(async (ctx) => {
759
+ // Check that pendingCompletion was deleted
760
+ const completions = await ctx.db.query("pendingCompletion").collect();
761
+ expect(completions).toHaveLength(0);
762
+
763
+ // Check that work was updated
764
+ const work = await ctx.db.get(workId);
765
+ expect(work).toBeDefined();
766
+ expect(work!.attempts).toBe(1);
767
+
768
+ // Check that a new pendingStart was created
769
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
770
+ expect(pendingStarts).toHaveLength(1);
771
+ expect(pendingStarts[0].workId).toBe(workId);
772
+
773
+ // Check that report was updated
774
+ const state = await ctx.db.query("internalState").unique();
775
+ expect(state).toBeDefined();
776
+ expect(state!.report.retries).toBe(1);
777
+ });
778
+ });
779
+
780
+ it("should process pending cancelations", async () => {
781
+ // Setup state with a pending cancelation
782
+ const workId = await t.run<Id<"work">>(async (ctx) => {
783
+ // Create a work item for the running list
784
+ const runningWorkId = await makeDummyWork(ctx);
785
+
786
+ // Schedule a function and get its ID
787
+ const scheduledId = await makeDummyScheduledFunction(
788
+ ctx,
789
+ runningWorkId
790
+ );
791
+
792
+ // Create internal state
793
+ await ctx.db.insert("internalState", {
794
+ generation: 1n,
795
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
796
+ lastRecovery: 0n,
797
+ report: {
798
+ completed: 0,
799
+ succeeded: 0,
800
+ failed: 0,
801
+ retries: 0,
802
+ canceled: 0,
803
+ lastReportTs: Date.now(),
804
+ },
805
+ running: [
806
+ {
807
+ workId: runningWorkId,
808
+ scheduledId,
809
+ started: 900000,
810
+ },
811
+ ],
812
+ });
813
+
814
+ // Create work
815
+ const workId = await makeDummyWork(ctx, {
816
+ retryBehavior: {
817
+ maxAttempts: 3,
818
+ initialBackoffMs: 1000,
819
+ base: 2,
820
+ },
821
+ });
822
+
823
+ // Create pending start
824
+ await ctx.db.insert("pendingStart", {
825
+ workId,
826
+ segment: 1n,
827
+ });
828
+
829
+ // Create pending cancelation
830
+ await ctx.db.insert("pendingCancelation", {
831
+ workId,
832
+ segment: 1n,
833
+ });
834
+
835
+ return workId;
836
+ });
837
+
838
+ // Call main
839
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
840
+
841
+ // Verify cancelation was processed
842
+ await t.run(async (ctx) => {
843
+ // Check that pendingCancelation was deleted
844
+ const cancelations = await ctx.db.query("pendingCancelation").collect();
845
+ expect(cancelations).toHaveLength(0);
846
+
847
+ // Check that pendingStart was deleted
848
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
849
+ expect(pendingStarts).toHaveLength(0);
850
+
851
+ const work = await ctx.db.get(workId);
852
+ expect(work).toBeDefined();
853
+ expect(work!.canceled).toBe(true);
854
+
855
+ // Check that report was updated
856
+ const state = await ctx.db.query("internalState").unique();
857
+ expect(state).toBeDefined();
858
+ expect(state!.report.canceled).toBe(1);
859
+ });
860
+ });
861
+
862
+ it("should schedule new work", async () => {
863
+ // Setup state with pending start items
864
+ const workId = await t.run<Id<"work">>(async (ctx) => {
865
+ // Create internal state
866
+ await ctx.db.insert("internalState", {
867
+ generation: 1n,
868
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
869
+ lastRecovery: 0n,
870
+ report: {
871
+ completed: 0,
872
+ succeeded: 0,
873
+ failed: 0,
874
+ retries: 0,
875
+ canceled: 0,
876
+ lastReportTs: Date.now(),
877
+ },
878
+ running: [],
879
+ });
880
+
881
+ // Create work
882
+ const workId = await makeDummyWork(ctx);
883
+
884
+ // Create pending start
885
+ await ctx.db.insert("pendingStart", {
886
+ workId,
887
+ segment: 1n,
888
+ });
889
+
890
+ return workId;
891
+ });
892
+
893
+ // Call main
894
+ await t.mutation(internal.loop.main, { generation: 1n, segment: 1n });
895
+
896
+ // Verify work was started
897
+ await t.run(async (ctx) => {
898
+ // Check that pendingStart was deleted
899
+ const pendingStarts = await ctx.db.query("pendingStart").collect();
900
+ expect(pendingStarts).toHaveLength(0);
901
+
902
+ // Check that work was added to running list
903
+ const state = await ctx.db.query("internalState").unique();
904
+ expect(state).toBeDefined();
905
+ expect(state!.running).toHaveLength(1);
906
+ expect(state!.running[0].workId).toBe(workId);
907
+ });
908
+ });
909
+
910
+ it("should schedule recovery for old jobs", async () => {
911
+ // Setup state with old running jobs
912
+ const oldTime = Date.now() - 5 * 60 * 1000 - 1000; // Older than recovery threshold
913
+
914
+ await t.run(async (ctx) => {
915
+ // Create work for the running list
916
+ const workId = await makeDummyWork(ctx);
917
+
918
+ // Schedule a function and get its ID
919
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
920
+
921
+ // Create internal state with old job
922
+ await ctx.db.insert("internalState", {
923
+ generation: 1n,
924
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
925
+ lastRecovery: 0n,
926
+ report: {
927
+ completed: 0,
928
+ succeeded: 0,
929
+ failed: 0,
930
+ retries: 0,
931
+ canceled: 0,
932
+ lastReportTs: Date.now(),
933
+ },
934
+ running: [
935
+ {
936
+ workId,
937
+ scheduledId,
938
+ started: oldTime,
939
+ },
940
+ ],
941
+ });
942
+ });
943
+
944
+ // Call main
945
+ const segment = toSegment(60 * 60 * 1000);
946
+ await t.mutation(internal.loop.main, {
947
+ generation: 1n,
948
+ segment,
949
+ });
950
+
951
+ // Verify recovery was scheduled
952
+ await t.run(async (ctx) => {
953
+ // Check that lastRecovery was updated
954
+ const state = await ctx.db.query("internalState").unique();
955
+ expect(state).toBeDefined();
956
+ expect(state!.lastRecovery).toBe(segment);
957
+
958
+ // We can't directly check if recovery.recover was scheduled,
959
+ // but we can verify the state was updated correctly
960
+ });
961
+ });
962
+ });
963
+
964
+ describe("updateRunStatus function", () => {
965
+ it("should handle generation mismatch", async () => {
966
+ // Setup state with different generation
967
+ await t.run(async (ctx) => {
968
+ await ctx.db.insert("internalState", {
969
+ generation: 2n,
970
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
971
+ lastRecovery: 0n,
972
+ report: {
973
+ completed: 0,
974
+ succeeded: 0,
975
+ failed: 0,
976
+ retries: 0,
977
+ canceled: 0,
978
+ lastReportTs: Date.now(),
979
+ },
980
+ running: [],
981
+ });
982
+ });
983
+
984
+ // Call updateRunStatus with mismatched generation
985
+ await expect(
986
+ t.mutation(internal.loop.updateRunStatus, {
987
+ generation: 1n,
988
+ segment: 1n,
989
+ })
990
+ ).rejects.toThrow("generation mismatch");
991
+ });
992
+
993
+ it("should schedule main immediately if there are outstanding cancelations", async () => {
994
+ // Setup state with outstanding cancelations
995
+ await t.run(async (ctx) => {
996
+ // Create work for cancelation
997
+ const workId = await makeDummyWork(ctx);
998
+
999
+ // Create internal state
1000
+ await ctx.db.insert("internalState", {
1001
+ generation: 1n,
1002
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
1003
+ lastRecovery: 0n,
1004
+ report: {
1005
+ completed: 0,
1006
+ succeeded: 0,
1007
+ failed: 0,
1008
+ retries: 0,
1009
+ canceled: 0,
1010
+ lastReportTs: Date.now(),
1011
+ },
1012
+ running: [],
1013
+ });
1014
+
1015
+ // Create run status
1016
+ await ctx.db.insert("runStatus", {
1017
+ state: { kind: "running" },
1018
+ });
1019
+
1020
+ // Create pending cancelation
1021
+ await ctx.db.insert("pendingCancelation", {
1022
+ workId,
1023
+ segment: 1n,
1024
+ });
1025
+ });
1026
+
1027
+ // Call updateRunStatus
1028
+ await t.mutation(internal.loop.updateRunStatus, {
1029
+ generation: 1n,
1030
+ segment: 1n,
1031
+ });
1032
+
1033
+ // Verify main was scheduled (indirectly by checking runStatus)
1034
+ await t.run(async (ctx) => {
1035
+ // We can't directly check if main was scheduled,
1036
+ // but we can verify the state was updated correctly
1037
+ const runStatus = await ctx.db.query("runStatus").unique();
1038
+ expect(runStatus).toBeDefined();
1039
+ // The state should no longer be idle
1040
+ expect(runStatus!.state.kind).not.toBe("idle");
1041
+ });
1042
+ });
1043
+
1044
+ it("should transition to idle state when there is no work", async () => {
1045
+ // Setup state with no work
1046
+ await t.run(async (ctx) => {
1047
+ // Create internal state with no running jobs
1048
+ await ctx.db.insert("internalState", {
1049
+ generation: 1n,
1050
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
1051
+ lastRecovery: 0n,
1052
+ report: {
1053
+ completed: 0,
1054
+ succeeded: 0,
1055
+ failed: 0,
1056
+ retries: 0,
1057
+ canceled: 0,
1058
+ lastReportTs: Date.now(),
1059
+ },
1060
+ running: [],
1061
+ });
1062
+
1063
+ // Create run status in running state
1064
+ await ctx.db.insert("runStatus", {
1065
+ state: { kind: "running" },
1066
+ });
1067
+ });
1068
+
1069
+ // Call updateRunStatus
1070
+ await t.mutation(internal.loop.updateRunStatus, {
1071
+ generation: 1n,
1072
+ segment: 1n,
1073
+ });
1074
+
1075
+ // Verify idle state was set
1076
+ await t.run(async (ctx) => {
1077
+ const runStatus = await ctx.db.query("runStatus").unique();
1078
+ expect(runStatus).toBeDefined();
1079
+ expect(runStatus!.state.kind).toBe("idle");
1080
+ assert(runStatus!.state.kind === "idle");
1081
+ expect(runStatus!.state.generation).toBe(1n);
1082
+ });
1083
+ });
1084
+
1085
+ it("should set saturated flag when at max capacity", async () => {
1086
+ // Setup state with running jobs at max capacity
1087
+ const now = currentSegment();
1088
+ const later = now + 10n;
1089
+ await setMaxParallelism(10);
1090
+ await t.run(async (ctx) => {
1091
+ // Create 10 work items and scheduled functions
1092
+ const runningJobs = await Promise.all(
1093
+ Array(10)
1094
+ .fill(0)
1095
+ .map(async () => {
1096
+ const workId = await makeDummyWork(ctx);
1097
+
1098
+ // Schedule a function and get its ID
1099
+ const scheduledId = await makeDummyScheduledFunction(ctx, workId);
1100
+
1101
+ return { workId, scheduledId, started: Date.now() };
1102
+ })
1103
+ );
1104
+
1105
+ // Create internal state with max running jobs
1106
+ await ctx.db.insert("internalState", {
1107
+ generation: 1n,
1108
+ segmentCursors: { incoming: 0n, completion: 0n, cancelation: 0n },
1109
+ lastRecovery: 0n,
1110
+ report: {
1111
+ completed: 0,
1112
+ succeeded: 0,
1113
+ failed: 0,
1114
+ retries: 0,
1115
+ canceled: 0,
1116
+ lastReportTs: Date.now(),
1117
+ },
1118
+ running: runningJobs,
1119
+ });
1120
+
1121
+ // Create run status
1122
+ await ctx.db.insert("runStatus", {
1123
+ state: { kind: "running" },
1124
+ });
1125
+
1126
+ // Create future completion to trigger scheduling
1127
+ await ctx.db.insert("pendingCompletion", {
1128
+ workId: runningJobs[0].workId,
1129
+ runResult: { kind: "success", returnValue: null },
1130
+ segment: later,
1131
+ retry: false,
1132
+ });
1133
+ });
1134
+
1135
+ // Call updateRunStatus
1136
+ await t.mutation(internal.loop.updateRunStatus, {
1137
+ generation: 1n,
1138
+ segment: 1n,
1139
+ });
1140
+
1141
+ // Verify scheduled state was set with saturated flag
1142
+ await t.run(async (ctx) => {
1143
+ const runStatus = await ctx.db.query("runStatus").unique();
1144
+ expect(runStatus).toBeDefined();
1145
+ expect(runStatus!.state.kind).toBe("scheduled");
1146
+ assert(runStatus!.state.kind === "scheduled");
1147
+ expect(runStatus!.state.saturated).toBe(true);
1148
+ });
1149
+ });
1150
+ });
1151
+
1152
+ describe("complete function", () => {
1153
+ it("should run onComplete handlers and delete work", async () => {
1154
+ // Setup mock work with onComplete handler
1155
+ const workId = await t.run<Id<"work">>(async (ctx) => {
1156
+ const workId = await makeDummyWork(ctx, {
1157
+ attempts: 0,
1158
+ onComplete: {
1159
+ fnHandle: "onComplete_handle",
1160
+ context: { data: "test" },
1161
+ },
1162
+ });
1163
+ return workId;
1164
+ });
1165
+
1166
+ // Call complete
1167
+ await t.mutation(internal.complete.complete, {
1168
+ jobs: [
1169
+ {
1170
+ workId,
1171
+ runResult: { kind: "success", returnValue: null },
1172
+ attempt: 0,
1173
+ },
1174
+ ],
1175
+ });
1176
+
1177
+ // Verify work was deleted
1178
+ await t.run(async (ctx) => {
1179
+ const work = await ctx.db.get(workId);
1180
+ expect(work).toBeNull();
1181
+ });
1182
+ });
1183
+
1184
+ it("should handle missing work gracefully", async () => {
1185
+ // Call complete with non-existent work ID
1186
+ const workId = await t.run(async (ctx) => {
1187
+ const id = await makeDummyWork(ctx, { attempts: 0 });
1188
+ await ctx.db.delete(id);
1189
+ return id;
1190
+ });
1191
+ await t.mutation(internal.complete.complete, {
1192
+ jobs: [
1193
+ {
1194
+ workId,
1195
+ runResult: { kind: "success", returnValue: null },
1196
+ attempt: 0,
1197
+ },
1198
+ ],
1199
+ });
1200
+
1201
+ // No error should be thrown
1202
+ });
1203
+ });
1204
+ });