@convex-dev/workpool 0.1.2 → 0.2.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (125) hide show
  1. package/README.md +155 -17
  2. package/dist/commonjs/client/index.d.ts +123 -35
  3. package/dist/commonjs/client/index.d.ts.map +1 -1
  4. package/dist/commonjs/client/index.js +122 -15
  5. package/dist/commonjs/client/index.js.map +1 -1
  6. package/dist/commonjs/client/utils.d.ts +16 -0
  7. package/dist/commonjs/client/utils.d.ts.map +1 -0
  8. package/dist/commonjs/client/utils.js +2 -0
  9. package/dist/commonjs/client/utils.js.map +1 -0
  10. package/dist/commonjs/component/complete.d.ts +89 -0
  11. package/dist/commonjs/component/complete.d.ts.map +1 -0
  12. package/dist/commonjs/component/complete.js +80 -0
  13. package/dist/commonjs/component/complete.js.map +1 -0
  14. package/dist/commonjs/component/convex.config.d.ts.map +1 -1
  15. package/dist/commonjs/component/convex.config.js +0 -2
  16. package/dist/commonjs/component/convex.config.js.map +1 -1
  17. package/dist/commonjs/component/kick.d.ts +9 -0
  18. package/dist/commonjs/component/kick.d.ts.map +1 -0
  19. package/dist/commonjs/component/kick.js +97 -0
  20. package/dist/commonjs/component/kick.js.map +1 -0
  21. package/dist/commonjs/component/lib.d.ts +23 -32
  22. package/dist/commonjs/component/lib.d.ts.map +1 -1
  23. package/dist/commonjs/component/lib.js +91 -563
  24. package/dist/commonjs/component/lib.js.map +1 -1
  25. package/dist/commonjs/component/logging.d.ts +5 -3
  26. package/dist/commonjs/component/logging.d.ts.map +1 -1
  27. package/dist/commonjs/component/logging.js +13 -2
  28. package/dist/commonjs/component/logging.js.map +1 -1
  29. package/dist/commonjs/component/loop.d.ts +13 -0
  30. package/dist/commonjs/component/loop.d.ts.map +1 -0
  31. package/dist/commonjs/component/loop.js +482 -0
  32. package/dist/commonjs/component/loop.js.map +1 -0
  33. package/dist/commonjs/component/recovery.d.ts +24 -0
  34. package/dist/commonjs/component/recovery.d.ts.map +1 -0
  35. package/dist/commonjs/component/recovery.js +94 -0
  36. package/dist/commonjs/component/recovery.js.map +1 -0
  37. package/dist/commonjs/component/schema.d.ts +167 -93
  38. package/dist/commonjs/component/schema.d.ts.map +1 -1
  39. package/dist/commonjs/component/schema.js +56 -65
  40. package/dist/commonjs/component/schema.js.map +1 -1
  41. package/dist/commonjs/component/shared.d.ts +138 -0
  42. package/dist/commonjs/component/shared.d.ts.map +1 -0
  43. package/dist/commonjs/component/shared.js +77 -0
  44. package/dist/commonjs/component/shared.js.map +1 -0
  45. package/dist/commonjs/component/stats.d.ts +6 -3
  46. package/dist/commonjs/component/stats.d.ts.map +1 -1
  47. package/dist/commonjs/component/stats.js +23 -4
  48. package/dist/commonjs/component/stats.js.map +1 -1
  49. package/dist/commonjs/component/worker.d.ts +15 -0
  50. package/dist/commonjs/component/worker.d.ts.map +1 -0
  51. package/dist/commonjs/component/worker.js +73 -0
  52. package/dist/commonjs/component/worker.js.map +1 -0
  53. package/dist/esm/client/index.d.ts +123 -35
  54. package/dist/esm/client/index.d.ts.map +1 -1
  55. package/dist/esm/client/index.js +122 -15
  56. package/dist/esm/client/index.js.map +1 -1
  57. package/dist/esm/client/utils.d.ts +16 -0
  58. package/dist/esm/client/utils.d.ts.map +1 -0
  59. package/dist/esm/client/utils.js +2 -0
  60. package/dist/esm/client/utils.js.map +1 -0
  61. package/dist/esm/component/complete.d.ts +89 -0
  62. package/dist/esm/component/complete.d.ts.map +1 -0
  63. package/dist/esm/component/complete.js +80 -0
  64. package/dist/esm/component/complete.js.map +1 -0
  65. package/dist/esm/component/convex.config.d.ts.map +1 -1
  66. package/dist/esm/component/convex.config.js +0 -2
  67. package/dist/esm/component/convex.config.js.map +1 -1
  68. package/dist/esm/component/kick.d.ts +9 -0
  69. package/dist/esm/component/kick.d.ts.map +1 -0
  70. package/dist/esm/component/kick.js +97 -0
  71. package/dist/esm/component/kick.js.map +1 -0
  72. package/dist/esm/component/lib.d.ts +23 -32
  73. package/dist/esm/component/lib.d.ts.map +1 -1
  74. package/dist/esm/component/lib.js +91 -563
  75. package/dist/esm/component/lib.js.map +1 -1
  76. package/dist/esm/component/logging.d.ts +5 -3
  77. package/dist/esm/component/logging.d.ts.map +1 -1
  78. package/dist/esm/component/logging.js +13 -2
  79. package/dist/esm/component/logging.js.map +1 -1
  80. package/dist/esm/component/loop.d.ts +13 -0
  81. package/dist/esm/component/loop.d.ts.map +1 -0
  82. package/dist/esm/component/loop.js +482 -0
  83. package/dist/esm/component/loop.js.map +1 -0
  84. package/dist/esm/component/recovery.d.ts +24 -0
  85. package/dist/esm/component/recovery.d.ts.map +1 -0
  86. package/dist/esm/component/recovery.js +94 -0
  87. package/dist/esm/component/recovery.js.map +1 -0
  88. package/dist/esm/component/schema.d.ts +167 -93
  89. package/dist/esm/component/schema.d.ts.map +1 -1
  90. package/dist/esm/component/schema.js +56 -65
  91. package/dist/esm/component/schema.js.map +1 -1
  92. package/dist/esm/component/shared.d.ts +138 -0
  93. package/dist/esm/component/shared.d.ts.map +1 -0
  94. package/dist/esm/component/shared.js +77 -0
  95. package/dist/esm/component/shared.js.map +1 -0
  96. package/dist/esm/component/stats.d.ts +6 -3
  97. package/dist/esm/component/stats.d.ts.map +1 -1
  98. package/dist/esm/component/stats.js +23 -4
  99. package/dist/esm/component/stats.js.map +1 -1
  100. package/dist/esm/component/worker.d.ts +15 -0
  101. package/dist/esm/component/worker.d.ts.map +1 -0
  102. package/dist/esm/component/worker.js +73 -0
  103. package/dist/esm/component/worker.js.map +1 -0
  104. package/package.json +6 -5
  105. package/src/client/index.ts +232 -68
  106. package/src/client/utils.ts +45 -0
  107. package/src/component/README.md +73 -0
  108. package/src/component/_generated/api.d.ts +38 -66
  109. package/src/component/complete.test.ts +508 -0
  110. package/src/component/complete.ts +98 -0
  111. package/src/component/convex.config.ts +0 -3
  112. package/src/component/kick.test.ts +285 -0
  113. package/src/component/kick.ts +118 -0
  114. package/src/component/lib.test.ts +448 -0
  115. package/src/component/lib.ts +105 -667
  116. package/src/component/logging.ts +24 -12
  117. package/src/component/loop.test.ts +1204 -0
  118. package/src/component/loop.ts +637 -0
  119. package/src/component/recovery.test.ts +541 -0
  120. package/src/component/recovery.ts +96 -0
  121. package/src/component/schema.ts +61 -77
  122. package/src/component/setup.test.ts +5 -0
  123. package/src/component/shared.ts +141 -0
  124. package/src/component/stats.ts +26 -8
  125. package/src/component/worker.ts +81 -0
@@ -0,0 +1,508 @@
1
+ import { convexTest } from "convex-test";
2
+ import {
3
+ describe,
4
+ expect,
5
+ it,
6
+ beforeEach,
7
+ afterEach,
8
+ vi,
9
+ assert,
10
+ } from "vitest";
11
+ import schema from "./schema";
12
+ import { api } from "./_generated/api";
13
+ import { completeHandler } from "./complete";
14
+
15
+ const modules = import.meta.glob("./**/*.ts");
16
+
17
+ describe("complete", () => {
18
+ async function setupTest() {
19
+ const t = convexTest(schema, modules);
20
+ return t;
21
+ }
22
+
23
+ let t: Awaited<ReturnType<typeof setupTest>>;
24
+
25
+ beforeEach(async () => {
26
+ vi.useFakeTimers();
27
+ t = await setupTest();
28
+
29
+ // Set up globals for logging
30
+ await t.run(async (ctx) => {
31
+ await ctx.db.insert("globals", {
32
+ maxParallelism: 10,
33
+ logLevel: "INFO",
34
+ });
35
+ });
36
+ });
37
+
38
+ afterEach(() => {
39
+ vi.useRealTimers();
40
+ });
41
+
42
+ describe("completeHandler", () => {
43
+ it("should process a successful job and delete the work", async () => {
44
+ // Enqueue a work item
45
+ const workId = await t.mutation(api.lib.enqueue, {
46
+ fnHandle: "testHandle",
47
+ fnName: "testFunction",
48
+ fnArgs: { test: "data" },
49
+ fnType: "mutation",
50
+ runAt: Date.now(),
51
+ config: {
52
+ maxParallelism: 10,
53
+ logLevel: "INFO",
54
+ },
55
+ });
56
+
57
+ // Simulate a successful job completion
58
+ await t.run(async (ctx) => {
59
+ await completeHandler(ctx, {
60
+ jobs: [
61
+ {
62
+ workId,
63
+ runResult: { kind: "success", returnValue: "test result" },
64
+ attempt: 0,
65
+ },
66
+ ],
67
+ });
68
+ });
69
+
70
+ // Verify work was deleted
71
+ await t.run(async (ctx) => {
72
+ const work = await ctx.db.get(workId);
73
+ expect(work).toBeNull();
74
+ });
75
+
76
+ // Verify pendingCompletion was created
77
+ await t.run(async (ctx) => {
78
+ const pendingCompletions = await ctx.db
79
+ .query("pendingCompletion")
80
+ .withIndex("workId", (q) => q.eq("workId", workId))
81
+ .collect();
82
+ expect(pendingCompletions).toHaveLength(1);
83
+ expect(pendingCompletions[0].runResult.kind).toBe("success");
84
+ expect(pendingCompletions[0].retry).toBe(false);
85
+ });
86
+ });
87
+
88
+ it("should process a failed job with retry behavior", async () => {
89
+ // Enqueue a work item with retry behavior
90
+ const workId = await t.mutation(api.lib.enqueue, {
91
+ fnHandle: "testHandle",
92
+ fnName: "testFunction",
93
+ fnArgs: { test: "data" },
94
+ fnType: "mutation",
95
+ runAt: Date.now(),
96
+ config: {
97
+ maxParallelism: 10,
98
+ logLevel: "INFO",
99
+ },
100
+ retryBehavior: {
101
+ maxAttempts: 3,
102
+ initialBackoffMs: 100,
103
+ base: 2,
104
+ },
105
+ });
106
+
107
+ // Simulate a failed job completion
108
+ await t.run(async (ctx) => {
109
+ await completeHandler(ctx, {
110
+ jobs: [
111
+ {
112
+ workId,
113
+ runResult: { kind: "failed", error: "test error" },
114
+ attempt: 0,
115
+ },
116
+ ],
117
+ });
118
+ });
119
+
120
+ // Verify work was not deleted (since it should be retried)
121
+ await t.run(async (ctx) => {
122
+ const work = await ctx.db.get(workId);
123
+ expect(work).not.toBeNull();
124
+ expect(work?.attempts).toBe(1); // Incremented from 0
125
+ });
126
+
127
+ // Verify pendingCompletion was created with retry=true
128
+ await t.run(async (ctx) => {
129
+ const pendingCompletions = await ctx.db
130
+ .query("pendingCompletion")
131
+ .withIndex("workId", (q) => q.eq("workId", workId))
132
+ .collect();
133
+ expect(pendingCompletions).toHaveLength(1);
134
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
135
+ expect(pendingCompletions[0].retry).toBe(true);
136
+ });
137
+ });
138
+
139
+ it("should process a failed job that has reached max attempts", async () => {
140
+ // Enqueue a work item with retry behavior
141
+ const workId = await t.mutation(api.lib.enqueue, {
142
+ fnHandle: "testHandle",
143
+ fnName: "testFunction",
144
+ fnArgs: { test: "data" },
145
+ fnType: "mutation",
146
+ runAt: Date.now(),
147
+ config: {
148
+ maxParallelism: 10,
149
+ logLevel: "INFO",
150
+ },
151
+ retryBehavior: {
152
+ maxAttempts: 2, // Only 1 retry allowed
153
+ initialBackoffMs: 100,
154
+ base: 2,
155
+ },
156
+ });
157
+
158
+ // Update the work to simulate it's already been attempted once
159
+ await t.run(async (ctx) => {
160
+ const work = await ctx.db.get(workId);
161
+ if (work) {
162
+ await ctx.db.patch(work._id, { attempts: 1 });
163
+ }
164
+ });
165
+
166
+ // Simulate a failed job completion on the final attempt
167
+ await t.run(async (ctx) => {
168
+ await completeHandler(ctx, {
169
+ jobs: [
170
+ {
171
+ workId,
172
+ runResult: { kind: "failed", error: "test error" },
173
+ attempt: 1,
174
+ },
175
+ ],
176
+ });
177
+ });
178
+
179
+ // Verify work was deleted (since max attempts reached)
180
+ await t.run(async (ctx) => {
181
+ const work = await ctx.db.get(workId);
182
+ expect(work).toBeNull();
183
+ });
184
+
185
+ // Verify pendingCompletion was created with retry=false
186
+ await t.run(async (ctx) => {
187
+ const pendingCompletions = await ctx.db
188
+ .query("pendingCompletion")
189
+ .withIndex("workId", (q) => q.eq("workId", workId))
190
+ .collect();
191
+ expect(pendingCompletions).toHaveLength(1);
192
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
193
+ expect(pendingCompletions[0].retry).toBe(false);
194
+ });
195
+ });
196
+
197
+ it("should process a canceled job", async () => {
198
+ // Enqueue a work item
199
+ const workId = await t.mutation(api.lib.enqueue, {
200
+ fnHandle: "testHandle",
201
+ fnName: "testFunction",
202
+ fnArgs: { test: "data" },
203
+ fnType: "mutation",
204
+ runAt: Date.now(),
205
+ config: {
206
+ maxParallelism: 10,
207
+ logLevel: "INFO",
208
+ },
209
+ });
210
+
211
+ // Simulate a canceled job completion
212
+ await t.run(async (ctx) => {
213
+ await completeHandler(ctx, {
214
+ jobs: [
215
+ {
216
+ workId,
217
+ runResult: { kind: "canceled" },
218
+ attempt: 0,
219
+ },
220
+ ],
221
+ });
222
+ });
223
+
224
+ // Verify work was deleted
225
+ await t.run(async (ctx) => {
226
+ const work = await ctx.db.get(workId);
227
+ expect(work).toBeNull();
228
+ });
229
+
230
+ // Verify no pendingCompletion was created for canceled jobs
231
+ await t.run(async (ctx) => {
232
+ const pendingCompletions = await ctx.db
233
+ .query("pendingCompletion")
234
+ .withIndex("workId", (q) => q.eq("workId", workId))
235
+ .collect();
236
+ expect(pendingCompletions).toHaveLength(0);
237
+ });
238
+ });
239
+
240
+ it("should call onComplete handler for successful jobs", async () => {
241
+ // Create a spy on runMutation
242
+ const runMutationSpy = vi.fn();
243
+
244
+ // Enqueue a work item with onComplete handler
245
+ const workId = await t.mutation(api.lib.enqueue, {
246
+ fnHandle: "testHandle",
247
+ fnName: "testFunction",
248
+ fnArgs: { test: "data" },
249
+ fnType: "mutation",
250
+ runAt: Date.now(),
251
+ config: {
252
+ maxParallelism: 10,
253
+ logLevel: "INFO",
254
+ },
255
+ onComplete: {
256
+ fnHandle: "testOnComplete",
257
+ context: { someContext: "value" },
258
+ },
259
+ });
260
+
261
+ // Simulate a successful job completion with a spy on runMutation
262
+ await t.run(async (ctx) => {
263
+ // Create a modified context with a spy on runMutation
264
+ const spyCtx = {
265
+ ...ctx,
266
+ runMutation: runMutationSpy,
267
+ };
268
+
269
+ await completeHandler(spyCtx, {
270
+ jobs: [
271
+ {
272
+ workId,
273
+ runResult: { kind: "success", returnValue: "test result" },
274
+ attempt: 0,
275
+ },
276
+ ],
277
+ });
278
+
279
+ // Verify onComplete was called with the right arguments
280
+ expect(runMutationSpy).toHaveBeenCalledWith(
281
+ "testOnComplete",
282
+ expect.objectContaining({
283
+ workId,
284
+ context: { someContext: "value" },
285
+ result: { kind: "success", returnValue: "test result" },
286
+ })
287
+ );
288
+ });
289
+ });
290
+
291
+ it("should handle multiple jobs in a single call", async () => {
292
+ // Enqueue multiple work items
293
+ const workId1 = await t.mutation(api.lib.enqueue, {
294
+ fnHandle: "testHandle",
295
+ fnName: "testFunction",
296
+ fnArgs: { test: 1 },
297
+ fnType: "mutation",
298
+ runAt: Date.now(),
299
+ config: {
300
+ maxParallelism: 10,
301
+ logLevel: "INFO",
302
+ },
303
+ });
304
+
305
+ const workId2 = await t.mutation(api.lib.enqueue, {
306
+ fnHandle: "testHandle",
307
+ fnName: "testFunction",
308
+ fnArgs: { test: 2 },
309
+ fnType: "mutation",
310
+ runAt: Date.now(),
311
+ config: {
312
+ maxParallelism: 10,
313
+ logLevel: "INFO",
314
+ },
315
+ retryBehavior: {
316
+ maxAttempts: 3,
317
+ initialBackoffMs: 100,
318
+ base: 2,
319
+ },
320
+ });
321
+
322
+ // Simulate completion of multiple jobs
323
+ await t.run(async (ctx) => {
324
+ await completeHandler(ctx, {
325
+ jobs: [
326
+ {
327
+ workId: workId1,
328
+ runResult: { kind: "success", returnValue: "result 1" },
329
+ attempt: 0,
330
+ },
331
+ {
332
+ workId: workId2,
333
+ runResult: { kind: "failed", error: "error 2" },
334
+ attempt: 0,
335
+ },
336
+ ],
337
+ });
338
+ });
339
+
340
+ // Verify both jobs were processed correctly
341
+ await t.run(async (ctx) => {
342
+ // First job should be deleted
343
+ const work1 = await ctx.db.get(workId1);
344
+ expect(work1).toBeNull();
345
+
346
+ // Second job should still exist (for retry)
347
+ const work2 = await ctx.db.get(workId2);
348
+ expect(work2).not.toBeNull();
349
+ expect(work2?.attempts).toBe(1);
350
+
351
+ // Both should have pendingCompletion entries
352
+ const pendingCompletions = await ctx.db
353
+ .query("pendingCompletion")
354
+ .collect();
355
+ expect(pendingCompletions).toHaveLength(2);
356
+ });
357
+ });
358
+
359
+ it("should handle mismatched attempt numbers", async () => {
360
+ // Enqueue a work item
361
+ const workId = await t.mutation(api.lib.enqueue, {
362
+ fnHandle: "testHandle",
363
+ fnName: "testFunction",
364
+ fnArgs: { test: "data" },
365
+ fnType: "mutation",
366
+ runAt: Date.now(),
367
+ config: {
368
+ maxParallelism: 10,
369
+ logLevel: "INFO",
370
+ },
371
+ });
372
+
373
+ // Update the work to have a different attempt number
374
+ await t.run(async (ctx) => {
375
+ const work = await ctx.db.get(workId);
376
+ if (work) {
377
+ await ctx.db.patch(work._id, { attempts: 5 });
378
+ }
379
+ });
380
+
381
+ // Simulate a job completion with mismatched attempt number
382
+ await t.run(async (ctx) => {
383
+ await completeHandler(ctx, {
384
+ jobs: [
385
+ {
386
+ workId,
387
+ runResult: { kind: "success", returnValue: "test result" },
388
+ attempt: 0, // Mismatched with the work's attempt number (5)
389
+ },
390
+ ],
391
+ });
392
+ });
393
+
394
+ // Verify work was not modified
395
+ await t.run(async (ctx) => {
396
+ const work = await ctx.db.get(workId);
397
+ expect(work).not.toBeNull();
398
+ expect(work?.attempts).toBe(5); // Should remain unchanged
399
+ });
400
+
401
+ // Verify no pendingCompletion was created
402
+ await t.run(async (ctx) => {
403
+ const pendingCompletions = await ctx.db
404
+ .query("pendingCompletion")
405
+ .withIndex("workId", (q) => q.eq("workId", workId))
406
+ .collect();
407
+ expect(pendingCompletions).toHaveLength(0);
408
+ });
409
+ });
410
+
411
+ it("should only process the first call with the same attempt number for retries", async () => {
412
+ // Enqueue a work item with retry behavior
413
+ const workId = await t.mutation(api.lib.enqueue, {
414
+ fnHandle: "testHandle",
415
+ fnName: "testFunction",
416
+ fnArgs: { test: "data" },
417
+ fnType: "mutation",
418
+ runAt: Date.now(),
419
+ config: {
420
+ maxParallelism: 10,
421
+ logLevel: "INFO",
422
+ },
423
+ retryBehavior: {
424
+ maxAttempts: 3,
425
+ initialBackoffMs: 100,
426
+ base: 2,
427
+ },
428
+ });
429
+
430
+ // First call to completeHandler with a failed result
431
+ await t.run(async (ctx) => {
432
+ await completeHandler(ctx, {
433
+ jobs: [
434
+ {
435
+ workId,
436
+ runResult: { kind: "failed", error: "first error" },
437
+ attempt: 0,
438
+ },
439
+ ],
440
+ });
441
+ });
442
+
443
+ // Verify the first call was processed correctly
444
+ await t.run(async (ctx) => {
445
+ // Work should still exist (for retry)
446
+ const work = await ctx.db.get(workId);
447
+ expect(work).not.toBeNull();
448
+ expect(work?.attempts).toBe(1); // Incremented from 0
449
+
450
+ // pendingCompletion should be created with retry=true
451
+ const pendingCompletions = await ctx.db
452
+ .query("pendingCompletion")
453
+ .withIndex("workId", (q) => q.eq("workId", workId))
454
+ .collect();
455
+ expect(pendingCompletions).toHaveLength(1);
456
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
457
+ expect(pendingCompletions[0].retry).toBe(true);
458
+ assert(pendingCompletions[0].runResult.kind === "failed");
459
+ // Check the error message from the first call
460
+ expect(pendingCompletions[0].runResult.error).toBe("first error");
461
+ });
462
+
463
+ // Create a spy to track if the second call processes anything
464
+ const runMutationSpy = vi.fn();
465
+
466
+ // Second call to completeHandler with the same attempt number
467
+ await t.run(async (ctx) => {
468
+ // Create a modified context with a spy on runMutation
469
+ const spyCtx = {
470
+ ...ctx,
471
+ runMutation: runMutationSpy,
472
+ };
473
+
474
+ await completeHandler(spyCtx, {
475
+ jobs: [
476
+ {
477
+ workId,
478
+ runResult: { kind: "failed", error: "second error" },
479
+ attempt: 0, // Same attempt number as the first call
480
+ },
481
+ ],
482
+ });
483
+ });
484
+
485
+ // Verify the second call was not processed
486
+ await t.run(async (ctx) => {
487
+ // Work should still have the same attempt count
488
+ const work = await ctx.db.get(workId);
489
+ expect(work).not.toBeNull();
490
+ expect(work?.attempts).toBe(1); // Still 1, not incremented again
491
+
492
+ // No additional pendingCompletion should be created
493
+ const pendingCompletions = await ctx.db
494
+ .query("pendingCompletion")
495
+ .withIndex("workId", (q) => q.eq("workId", workId))
496
+ .collect();
497
+ expect(pendingCompletions).toHaveLength(1);
498
+ expect(pendingCompletions[0].runResult.kind).toBe("failed");
499
+ assert(pendingCompletions[0].runResult.kind === "failed");
500
+ expect(pendingCompletions[0].retry).toBe(true);
501
+ expect(pendingCompletions[0].runResult.error).toBe("first error");
502
+
503
+ // The runMutation spy should not have been called
504
+ expect(runMutationSpy).not.toHaveBeenCalled();
505
+ });
506
+ });
507
+ });
508
+ });
@@ -0,0 +1,98 @@
1
+ import { FunctionHandle } from "convex/server";
2
+ import { Infer, v } from "convex/values";
3
+ import { internalMutation, MutationCtx } from "./_generated/server.js";
4
+ import { kickMainLoop } from "./kick.js";
5
+ import { createLogger } from "./logging.js";
6
+ import { nextSegment, OnCompleteArgs, runResult } from "./shared.js";
7
+ import { recordCompleted } from "./stats.js";
8
+
9
+ export type CompleteJob = Infer<typeof completeArgs.fields.jobs.element>;
10
+
11
+ export const completeArgs = v.object({
12
+ jobs: v.array(
13
+ v.object({
14
+ runResult: runResult,
15
+ workId: v.id("work"),
16
+ attempt: v.number(),
17
+ })
18
+ ),
19
+ });
20
+ export async function completeHandler(
21
+ ctx: MutationCtx,
22
+ args: Infer<typeof completeArgs>
23
+ ) {
24
+ const globals = await ctx.db.query("globals").unique();
25
+ const console = createLogger(globals?.logLevel);
26
+ let anyPendingCompletions = false;
27
+ await Promise.all(
28
+ args.jobs.map(async (job) => {
29
+ const work = await ctx.db.get(job.workId);
30
+ if (!work) {
31
+ console.warn(`[complete] ${job.workId} is done, but its work is gone`);
32
+ return;
33
+ }
34
+ if (work.attempts !== job.attempt) {
35
+ console.warn(`[complete] ${job.workId} mismatched attempt number`);
36
+ return;
37
+ }
38
+ work.attempts++;
39
+ await ctx.db.patch(work._id, { attempts: work.attempts });
40
+ const pendingCompletion = await ctx.db
41
+ .query("pendingCompletion")
42
+ .withIndex("workId", (q) => q.eq("workId", job.workId))
43
+ .unique();
44
+ if (pendingCompletion) {
45
+ console.warn(`[complete] ${job.workId} already in pendingCompletion`);
46
+ return;
47
+ }
48
+ const maxAttempts = work.retryBehavior?.maxAttempts;
49
+ const retry =
50
+ job.runResult.kind === "failed" &&
51
+ !!maxAttempts &&
52
+ work.attempts < maxAttempts;
53
+ if (!retry) {
54
+ if (work.onComplete) {
55
+ try {
56
+ const handle = work.onComplete.fnHandle as FunctionHandle<
57
+ "mutation",
58
+ OnCompleteArgs,
59
+ void
60
+ >;
61
+ await ctx.runMutation(handle, {
62
+ workId: work._id,
63
+ context: work.onComplete.context,
64
+ result: job.runResult,
65
+ });
66
+ console.debug(`[complete] onComplete for ${job.workId} completed`);
67
+ } catch (e) {
68
+ console.error(
69
+ `[complete] error running onComplete for ${job.workId}`,
70
+ e
71
+ );
72
+ // TODO: store failures in a table for later debugging
73
+ }
74
+ }
75
+ console.info(recordCompleted(work, job.runResult.kind));
76
+ // This is the terminating state for work.
77
+ await ctx.db.delete(job.workId);
78
+ }
79
+ if (job.runResult.kind !== "canceled") {
80
+ await ctx.db.insert("pendingCompletion", {
81
+ runResult: job.runResult,
82
+ workId: job.workId,
83
+ segment: nextSegment(),
84
+ retry,
85
+ });
86
+ anyPendingCompletions = true;
87
+ }
88
+ })
89
+ );
90
+ if (anyPendingCompletions) {
91
+ await kickMainLoop(ctx, "complete");
92
+ }
93
+ }
94
+
95
+ export const complete = internalMutation({
96
+ args: completeArgs,
97
+ handler: completeHandler,
98
+ });
@@ -1,8 +1,5 @@
1
1
  import { defineComponent } from "convex/server";
2
- import crons from "@convex-dev/crons/convex.config";
3
2
 
4
3
  const component = defineComponent("workpool");
5
4
 
6
- component.use(crons);
7
-
8
5
  export default component;