convex-batch-processor 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +337 -0
  3. package/dist/client/index.d.ts +194 -0
  4. package/dist/client/index.d.ts.map +1 -0
  5. package/dist/client/index.js +75 -0
  6. package/dist/client/index.js.map +1 -0
  7. package/dist/component/_generated/api.d.ts +34 -0
  8. package/dist/component/_generated/api.d.ts.map +1 -0
  9. package/dist/component/_generated/api.js +31 -0
  10. package/dist/component/_generated/api.js.map +1 -0
  11. package/dist/component/_generated/component.d.ts +77 -0
  12. package/dist/component/_generated/component.d.ts.map +1 -0
  13. package/dist/component/_generated/component.js +11 -0
  14. package/dist/component/_generated/component.js.map +1 -0
  15. package/dist/component/_generated/dataModel.d.ts +46 -0
  16. package/dist/component/_generated/dataModel.d.ts.map +1 -0
  17. package/dist/component/_generated/dataModel.js +11 -0
  18. package/dist/component/_generated/dataModel.js.map +1 -0
  19. package/dist/component/_generated/server.d.ts +121 -0
  20. package/dist/component/_generated/server.d.ts.map +1 -0
  21. package/dist/component/_generated/server.js +78 -0
  22. package/dist/component/_generated/server.js.map +1 -0
  23. package/dist/component/convex.config.d.ts +3 -0
  24. package/dist/component/convex.config.d.ts.map +1 -0
  25. package/dist/component/convex.config.js +3 -0
  26. package/dist/component/convex.config.js.map +1 -0
  27. package/dist/component/lib.d.ts +261 -0
  28. package/dist/component/lib.d.ts.map +1 -0
  29. package/dist/component/lib.js +629 -0
  30. package/dist/component/lib.js.map +1 -0
  31. package/dist/component/schema.d.ts +100 -0
  32. package/dist/component/schema.d.ts.map +1 -0
  33. package/dist/component/schema.js +49 -0
  34. package/dist/component/schema.js.map +1 -0
  35. package/package.json +63 -0
  36. package/src/client/index.test.ts +121 -0
  37. package/src/client/index.ts +308 -0
  38. package/src/component/_generated/api.ts +50 -0
  39. package/src/component/_generated/component.ts +133 -0
  40. package/src/component/_generated/dataModel.ts +60 -0
  41. package/src/component/_generated/server.ts +156 -0
  42. package/src/component/convex.config.ts +3 -0
  43. package/src/component/lib.ts +792 -0
  44. package/src/component/schema.ts +57 -0
@@ -0,0 +1,792 @@
1
+ import {
2
+ mutation,
3
+ query,
4
+ internalAction,
5
+ internalMutation,
6
+ internalQuery,
7
+ } from "./_generated/server";
8
+ import { internal } from "./_generated/api";
9
+ import type { Doc } from "./_generated/dataModel";
10
+ import { v } from "convex/values";
11
+ import { FunctionHandle } from "convex/server";
12
+
13
+ // ============================================================================
14
+ // Batch Accumulator - Public API
15
+ // ============================================================================
16
+
17
+ export const addItems = mutation({
18
+ args: {
19
+ batchId: v.string(),
20
+ items: v.array(v.any()),
21
+ config: v.object({
22
+ maxBatchSize: v.number(),
23
+ flushIntervalMs: v.number(),
24
+ processBatchHandle: v.string(),
25
+ }),
26
+ },
27
+ handler: async (ctx, { batchId, items, config }) => {
28
+ const now = Date.now();
29
+
30
+ let batch = await ctx.db
31
+ .query("batches")
32
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
33
+ .first();
34
+
35
+ let isNewBatch = false;
36
+ if (!batch) {
37
+ isNewBatch = true;
38
+ const batchDocId = await ctx.db.insert("batches", {
39
+ batchId,
40
+ items: [],
41
+ itemCount: 0,
42
+ createdAt: now,
43
+ lastUpdatedAt: now,
44
+ status: "accumulating",
45
+ config,
46
+ });
47
+ batch = await ctx.db.get(batchDocId);
48
+ }
49
+
50
+ if (!batch) {
51
+ throw new Error(`Failed to create batch ${batchId}`);
52
+ }
53
+
54
+ // Reset completed batches to accumulating so they can accept new items
55
+ if (batch.status === "completed") {
56
+ await ctx.db.patch(batch._id, {
57
+ status: "accumulating",
58
+ items: [],
59
+ itemCount: 0,
60
+ lastUpdatedAt: now,
61
+ });
62
+ batch = await ctx.db.get(batch._id);
63
+ if (!batch) {
64
+ throw new Error(`Failed to reset batch ${batchId}`);
65
+ }
66
+ }
67
+
68
+ if (batch.status !== "accumulating") {
69
+ throw new Error(`Batch ${batchId} is not in accumulating state (current: ${batch.status})`);
70
+ }
71
+
72
+ const newItems = [...batch.items, ...items];
73
+ const newItemCount = newItems.length;
74
+
75
+ if (newItemCount >= config.maxBatchSize) {
76
+ if (batch.scheduledFlushId) {
77
+ await ctx.scheduler.cancel(batch.scheduledFlushId);
78
+ }
79
+
80
+ await ctx.db.patch(batch._id, {
81
+ items: newItems,
82
+ itemCount: newItemCount,
83
+ lastUpdatedAt: now,
84
+ status: "flushing",
85
+ scheduledFlushId: undefined,
86
+ });
87
+
88
+ await ctx.scheduler.runAfter(0, internal.lib.executeFlush, {
89
+ batchDocId: batch._id,
90
+ items: newItems,
91
+ processBatchHandle: config.processBatchHandle,
92
+ });
93
+
94
+ return {
95
+ batchId,
96
+ itemCount: newItemCount,
97
+ flushed: true,
98
+ status: "flushing",
99
+ };
100
+ }
101
+
102
+ let scheduledFlushId = batch.scheduledFlushId;
103
+ const shouldScheduleFlush =
104
+ config.flushIntervalMs > 0 &&
105
+ !scheduledFlushId &&
106
+ (isNewBatch || batch.itemCount === 0);
107
+
108
+ if (shouldScheduleFlush) {
109
+ scheduledFlushId = await ctx.scheduler.runAfter(
110
+ config.flushIntervalMs,
111
+ internal.lib.scheduledIntervalFlush,
112
+ { batchDocId: batch._id }
113
+ );
114
+ }
115
+
116
+ await ctx.db.patch(batch._id, {
117
+ items: newItems,
118
+ itemCount: newItemCount,
119
+ lastUpdatedAt: now,
120
+ config,
121
+ scheduledFlushId,
122
+ });
123
+
124
+ return {
125
+ batchId,
126
+ itemCount: newItemCount,
127
+ flushed: false,
128
+ status: "accumulating",
129
+ };
130
+ },
131
+ });
132
+
133
+ export const flushBatch = mutation({
134
+ args: { batchId: v.string() },
135
+ handler: async (ctx, { batchId }) => {
136
+ const batch = await ctx.db
137
+ .query("batches")
138
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
139
+ .first();
140
+
141
+ if (!batch) {
142
+ throw new Error(`Batch ${batchId} not found`);
143
+ }
144
+
145
+ if (batch.status !== "accumulating") {
146
+ throw new Error(`Batch ${batchId} is not in accumulating state (current: ${batch.status})`);
147
+ }
148
+
149
+ if (batch.itemCount === 0) {
150
+ return { batchId, itemCount: 0, flushed: false, reason: "Batch is empty" };
151
+ }
152
+
153
+ if (!batch.config.processBatchHandle) {
154
+ throw new Error(`Batch ${batchId} has no processBatchHandle configured`);
155
+ }
156
+
157
+ if (batch.scheduledFlushId) {
158
+ await ctx.scheduler.cancel(batch.scheduledFlushId);
159
+ }
160
+
161
+ await ctx.db.patch(batch._id, {
162
+ status: "flushing",
163
+ scheduledFlushId: undefined,
164
+ });
165
+
166
+ await ctx.scheduler.runAfter(0, internal.lib.executeFlush, {
167
+ batchDocId: batch._id,
168
+ items: batch.items,
169
+ processBatchHandle: batch.config.processBatchHandle,
170
+ });
171
+
172
+ return {
173
+ batchId,
174
+ itemCount: batch.itemCount,
175
+ flushed: true,
176
+ status: "flushing",
177
+ };
178
+ },
179
+ });
180
+
181
+ export const getBatchStatus = query({
182
+ args: { batchId: v.string() },
183
+ handler: async (ctx, { batchId }) => {
184
+ const batch = await ctx.db
185
+ .query("batches")
186
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
187
+ .first();
188
+
189
+ if (!batch) {
190
+ return null;
191
+ }
192
+
193
+ return {
194
+ batchId: batch.batchId,
195
+ itemCount: batch.itemCount,
196
+ status: batch.status,
197
+ createdAt: batch.createdAt,
198
+ lastUpdatedAt: batch.lastUpdatedAt,
199
+ config: batch.config,
200
+ };
201
+ },
202
+ });
203
+
204
+ export const getFlushHistory = query({
205
+ args: {
206
+ batchId: v.string(),
207
+ limit: v.optional(v.number()),
208
+ },
209
+ handler: async (ctx, { batchId, limit }) => {
210
+ let query = ctx.db
211
+ .query("flushHistory")
212
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
213
+ .order("desc");
214
+
215
+ if (limit) {
216
+ return await query.take(limit);
217
+ }
218
+
219
+ return await query.collect();
220
+ },
221
+ });
222
+
223
+ export const deleteBatch = mutation({
224
+ args: { batchId: v.string() },
225
+ handler: async (ctx, { batchId }) => {
226
+ const batch = await ctx.db
227
+ .query("batches")
228
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
229
+ .first();
230
+
231
+ if (!batch) {
232
+ return { deleted: false, reason: "Batch not found" };
233
+ }
234
+
235
+ if (batch.status === "flushing") {
236
+ return { deleted: false, reason: "Cannot delete batch while flushing" };
237
+ }
238
+
239
+ if (batch.status === "accumulating" && batch.itemCount > 0) {
240
+ return { deleted: false, reason: "Cannot delete batch with pending items" };
241
+ }
242
+
243
+ if (batch.scheduledFlushId) {
244
+ await ctx.scheduler.cancel(batch.scheduledFlushId);
245
+ }
246
+
247
+ await ctx.db.delete(batch._id);
248
+ return { deleted: true };
249
+ },
250
+ });
251
+
252
+ // ============================================================================
253
+ // Batch Accumulator - Internal Functions
254
+ // ============================================================================
255
+
256
+ export const getBatch = internalQuery({
257
+ args: { batchId: v.string() },
258
+ handler: async (ctx, { batchId }) => {
259
+ return await ctx.db
260
+ .query("batches")
261
+ .withIndex("by_batchId", (q) => q.eq("batchId", batchId))
262
+ .first();
263
+ },
264
+ });
265
+
266
+ export const executeFlush = internalAction({
267
+ args: {
268
+ batchDocId: v.id("batches"),
269
+ items: v.array(v.any()),
270
+ processBatchHandle: v.string(),
271
+ },
272
+ handler: async (ctx, { batchDocId, items, processBatchHandle }) => {
273
+ const startTime = Date.now();
274
+ let success = true;
275
+ let errorMessage: string | undefined;
276
+
277
+ try {
278
+ const handle = processBatchHandle as FunctionHandle<"action", { items: unknown[] }>;
279
+ await ctx.runAction(handle, { items });
280
+ } catch (error) {
281
+ success = false;
282
+ errorMessage = error instanceof Error ? error.message : String(error);
283
+ }
284
+
285
+ const durationMs = Date.now() - startTime;
286
+
287
+ await ctx.runMutation(internal.lib.recordFlushResult, {
288
+ batchDocId,
289
+ itemCount: items.length,
290
+ durationMs,
291
+ success,
292
+ errorMessage,
293
+ });
294
+
295
+ return { success, errorMessage, durationMs };
296
+ },
297
+ });
298
+
299
+ export const recordFlushResult = internalMutation({
300
+ args: {
301
+ batchDocId: v.id("batches"),
302
+ itemCount: v.number(),
303
+ durationMs: v.number(),
304
+ success: v.boolean(),
305
+ errorMessage: v.optional(v.string()),
306
+ },
307
+ handler: async (ctx, { batchDocId, itemCount, durationMs, success, errorMessage }) => {
308
+ const batch = await ctx.db.get(batchDocId);
309
+ if (!batch) return;
310
+
311
+ await ctx.db.insert("flushHistory", {
312
+ batchId: batch.batchId,
313
+ itemCount,
314
+ flushedAt: Date.now(),
315
+ durationMs,
316
+ success,
317
+ errorMessage,
318
+ });
319
+
320
+ if (success) {
321
+ await ctx.db.patch(batchDocId, {
322
+ status: "completed",
323
+ items: [],
324
+ itemCount: 0,
325
+ scheduledFlushId: undefined,
326
+ });
327
+ } else {
328
+ let scheduledFlushId: typeof batch.scheduledFlushId = undefined;
329
+ if (batch.config.flushIntervalMs > 0 && batch.config.processBatchHandle) {
330
+ scheduledFlushId = await ctx.scheduler.runAfter(
331
+ batch.config.flushIntervalMs,
332
+ internal.lib.scheduledIntervalFlush,
333
+ { batchDocId }
334
+ );
335
+ }
336
+
337
+ await ctx.db.patch(batchDocId, {
338
+ status: "accumulating",
339
+ scheduledFlushId,
340
+ });
341
+ }
342
+ },
343
+ });
344
+
345
+ export const markBatchFlushing = internalMutation({
346
+ args: { batchDocId: v.id("batches") },
347
+ handler: async (ctx, { batchDocId }) => {
348
+ const batch = await ctx.db.get(batchDocId);
349
+ if (!batch || batch.status !== "accumulating" || batch.itemCount === 0) {
350
+ return null;
351
+ }
352
+
353
+ await ctx.db.patch(batchDocId, {
354
+ status: "flushing",
355
+ scheduledFlushId: undefined,
356
+ });
357
+
358
+ return {
359
+ items: batch.items,
360
+ processBatchHandle: batch.config.processBatchHandle,
361
+ };
362
+ },
363
+ });
364
+
365
+ export const scheduledIntervalFlush = internalAction({
366
+ args: { batchDocId: v.id("batches") },
367
+ handler: async (ctx, { batchDocId }): Promise<{
368
+ flushed: boolean;
369
+ reason?: string;
370
+ success?: boolean;
371
+ errorMessage?: string;
372
+ durationMs?: number;
373
+ }> => {
374
+ const batchData: { items: unknown[]; processBatchHandle: string } | null = await ctx.runMutation(internal.lib.markBatchFlushing, {
375
+ batchDocId,
376
+ });
377
+
378
+ if (!batchData || !batchData.processBatchHandle) {
379
+ return { flushed: false, reason: "Batch not ready for flush" };
380
+ }
381
+
382
+ const result: { success: boolean; errorMessage?: string; durationMs: number } = await ctx.runAction(internal.lib.executeFlush, {
383
+ batchDocId,
384
+ items: batchData.items,
385
+ processBatchHandle: batchData.processBatchHandle,
386
+ });
387
+
388
+ return { flushed: true, ...result };
389
+ },
390
+ });
391
+
392
+ // ============================================================================
393
+ // Table Iterator - Public API
394
+ // ============================================================================
395
+
396
+ export const startIteratorJob = mutation({
397
+ args: {
398
+ jobId: v.string(),
399
+ config: v.object({
400
+ batchSize: v.number(),
401
+ delayBetweenBatchesMs: v.optional(v.number()),
402
+ getNextBatchHandle: v.string(),
403
+ processBatchHandle: v.string(),
404
+ onCompleteHandle: v.optional(v.string()),
405
+ maxRetries: v.optional(v.number()),
406
+ }),
407
+ },
408
+ handler: async (ctx, { jobId, config }) => {
409
+ const existingJob = await ctx.db
410
+ .query("iteratorJobs")
411
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
412
+ .first();
413
+
414
+ if (existingJob) {
415
+ throw new Error(`Job ${jobId} already exists`);
416
+ }
417
+
418
+ const now = Date.now();
419
+
420
+ const jobDocId = await ctx.db.insert("iteratorJobs", {
421
+ jobId,
422
+ cursor: undefined,
423
+ processedCount: 0,
424
+ status: "running",
425
+ config: {
426
+ batchSize: config.batchSize,
427
+ delayBetweenBatchesMs: config.delayBetweenBatchesMs ?? 100,
428
+ getNextBatchHandle: config.getNextBatchHandle,
429
+ processBatchHandle: config.processBatchHandle,
430
+ onCompleteHandle: config.onCompleteHandle,
431
+ maxRetries: config.maxRetries,
432
+ },
433
+ retryCount: 0,
434
+ createdAt: now,
435
+ lastRunAt: now,
436
+ });
437
+
438
+ await ctx.scheduler.runAfter(0, internal.lib.processNextBatch, { jobDocId });
439
+
440
+ return { jobId, status: "running" };
441
+ },
442
+ });
443
+
444
+ export const pauseIteratorJob = mutation({
445
+ args: { jobId: v.string() },
446
+ handler: async (ctx, { jobId }) => {
447
+ const job = await ctx.db
448
+ .query("iteratorJobs")
449
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
450
+ .first();
451
+
452
+ if (!job) {
453
+ throw new Error(`Job ${jobId} not found`);
454
+ }
455
+
456
+ if (job.status !== "running") {
457
+ throw new Error(`Job ${jobId} is not running (current: ${job.status})`);
458
+ }
459
+
460
+ await ctx.db.patch(job._id, {
461
+ status: "paused",
462
+ });
463
+
464
+ return { jobId, status: "paused" };
465
+ },
466
+ });
467
+
468
+ export const resumeIteratorJob = mutation({
469
+ args: { jobId: v.string() },
470
+ handler: async (ctx, { jobId }) => {
471
+ const job = await ctx.db
472
+ .query("iteratorJobs")
473
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
474
+ .first();
475
+
476
+ if (!job) {
477
+ throw new Error(`Job ${jobId} not found`);
478
+ }
479
+
480
+ if (job.status !== "paused") {
481
+ throw new Error(`Job ${jobId} is not paused (current: ${job.status})`);
482
+ }
483
+
484
+ await ctx.db.patch(job._id, {
485
+ status: "running",
486
+ retryCount: 0,
487
+ });
488
+
489
+ await ctx.scheduler.runAfter(0, internal.lib.processNextBatch, { jobDocId: job._id });
490
+
491
+ return { jobId, status: "running" };
492
+ },
493
+ });
494
+
495
+ export const cancelIteratorJob = mutation({
496
+ args: { jobId: v.string() },
497
+ handler: async (ctx, { jobId }) => {
498
+ const job = await ctx.db
499
+ .query("iteratorJobs")
500
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
501
+ .first();
502
+
503
+ if (!job) {
504
+ throw new Error(`Job ${jobId} not found`);
505
+ }
506
+
507
+ if (job.status === "completed" || job.status === "failed") {
508
+ return { jobId, status: job.status, reason: "Job already finished" };
509
+ }
510
+
511
+ await ctx.db.patch(job._id, {
512
+ status: "failed",
513
+ errorMessage: "Cancelled by user",
514
+ });
515
+
516
+ return { jobId, status: "failed" };
517
+ },
518
+ });
519
+
520
+ export const getIteratorJobStatus = query({
521
+ args: { jobId: v.string() },
522
+ handler: async (ctx, { jobId }) => {
523
+ const job = await ctx.db
524
+ .query("iteratorJobs")
525
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
526
+ .first();
527
+
528
+ if (!job) {
529
+ return null;
530
+ }
531
+
532
+ return {
533
+ jobId: job.jobId,
534
+ status: job.status,
535
+ processedCount: job.processedCount,
536
+ cursor: job.cursor,
537
+ retryCount: job.retryCount,
538
+ errorMessage: job.errorMessage,
539
+ createdAt: job.createdAt,
540
+ lastRunAt: job.lastRunAt,
541
+ config: {
542
+ batchSize: job.config.batchSize,
543
+ delayBetweenBatchesMs: job.config.delayBetweenBatchesMs,
544
+ },
545
+ };
546
+ },
547
+ });
548
+
549
+ export const listIteratorJobs = query({
550
+ args: {
551
+ status: v.optional(
552
+ v.union(
553
+ v.literal("pending"),
554
+ v.literal("running"),
555
+ v.literal("paused"),
556
+ v.literal("completed"),
557
+ v.literal("failed")
558
+ )
559
+ ),
560
+ limit: v.optional(v.number()),
561
+ },
562
+ handler: async (ctx, { status, limit }) => {
563
+ let queryBuilder;
564
+
565
+ if (status) {
566
+ queryBuilder = ctx.db
567
+ .query("iteratorJobs")
568
+ .withIndex("by_status", (q) => q.eq("status", status));
569
+ } else {
570
+ queryBuilder = ctx.db.query("iteratorJobs");
571
+ }
572
+
573
+ const jobs = limit ? await queryBuilder.take(limit) : await queryBuilder.collect();
574
+
575
+ return jobs.map((job) => ({
576
+ jobId: job.jobId,
577
+ status: job.status,
578
+ processedCount: job.processedCount,
579
+ createdAt: job.createdAt,
580
+ lastRunAt: job.lastRunAt,
581
+ errorMessage: job.errorMessage,
582
+ }));
583
+ },
584
+ });
585
+
586
+ export const deleteIteratorJob = mutation({
587
+ args: { jobId: v.string() },
588
+ handler: async (ctx, { jobId }) => {
589
+ const job = await ctx.db
590
+ .query("iteratorJobs")
591
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
592
+ .first();
593
+
594
+ if (!job) {
595
+ return { deleted: false, reason: "Job not found" };
596
+ }
597
+
598
+ if (job.status === "running" || job.status === "paused") {
599
+ return { deleted: false, reason: "Cannot delete active job" };
600
+ }
601
+
602
+ await ctx.db.delete(job._id);
603
+ return { deleted: true };
604
+ },
605
+ });
606
+
607
+ // ============================================================================
608
+ // Table Iterator - Internal Functions
609
+ // ============================================================================
610
+
611
+ export const getIteratorJob = internalQuery({
612
+ args: { jobId: v.string() },
613
+ handler: async (ctx, { jobId }) => {
614
+ return await ctx.db
615
+ .query("iteratorJobs")
616
+ .withIndex("by_jobId", (q) => q.eq("jobId", jobId))
617
+ .first();
618
+ },
619
+ });
620
+
621
+ export const getIteratorJobById = internalQuery({
622
+ args: { jobDocId: v.id("iteratorJobs") },
623
+ handler: async (ctx, { jobDocId }) => {
624
+ return await ctx.db.get(jobDocId);
625
+ },
626
+ });
627
+
628
+ export const processNextBatch = internalAction({
629
+ args: { jobDocId: v.id("iteratorJobs") },
630
+ handler: async (ctx, { jobDocId }): Promise<{
631
+ processed: boolean;
632
+ done?: boolean;
633
+ processedCount?: number;
634
+ reason?: string;
635
+ error?: string;
636
+ retryCount?: number;
637
+ }> => {
638
+ const job: Doc<"iteratorJobs"> | null = await ctx.runQuery(internal.lib.getIteratorJobById, { jobDocId });
639
+ if (!job || job.status !== "running") {
640
+ return { processed: false, reason: "Job not found or not running" };
641
+ }
642
+
643
+ const maxRetries = job.config.maxRetries ?? 5;
644
+
645
+ try {
646
+ const getNextBatchHandle = job.config.getNextBatchHandle as FunctionHandle<
647
+ "query",
648
+ { cursor: string | undefined; batchSize: number }
649
+ >;
650
+
651
+ const batchResult = await ctx.runQuery(getNextBatchHandle, {
652
+ cursor: job.cursor ?? undefined,
653
+ batchSize: job.config.batchSize,
654
+ });
655
+
656
+ const { items, cursor: nextCursor, done } = batchResult as {
657
+ items: unknown[];
658
+ cursor: string | undefined;
659
+ done: boolean;
660
+ };
661
+
662
+ if (items.length > 0) {
663
+ const processBatchHandle = job.config.processBatchHandle as FunctionHandle<
664
+ "action",
665
+ { items: unknown[] }
666
+ >;
667
+
668
+ await ctx.runAction(processBatchHandle, { items });
669
+ }
670
+
671
+ const newProcessedCount = job.processedCount + items.length;
672
+
673
+ if (done) {
674
+ await ctx.runMutation(internal.lib.markJobCompleted, {
675
+ jobDocId,
676
+ processedCount: newProcessedCount,
677
+ });
678
+
679
+ if (job.config.onCompleteHandle) {
680
+ const onCompleteHandle = job.config.onCompleteHandle as FunctionHandle<
681
+ "mutation",
682
+ { jobId: string; processedCount: number }
683
+ >;
684
+ await ctx.runMutation(onCompleteHandle, {
685
+ jobId: job.jobId,
686
+ processedCount: newProcessedCount,
687
+ });
688
+ }
689
+
690
+ return { processed: true, done: true, processedCount: newProcessedCount };
691
+ }
692
+
693
+ await ctx.runMutation(internal.lib.updateJobProgress, {
694
+ jobDocId,
695
+ cursor: nextCursor,
696
+ processedCount: newProcessedCount,
697
+ });
698
+
699
+ await ctx.scheduler.runAfter(
700
+ job.config.delayBetweenBatchesMs,
701
+ internal.lib.processNextBatch,
702
+ { jobDocId }
703
+ );
704
+
705
+ return { processed: true, done: false, processedCount: newProcessedCount };
706
+ } catch (error) {
707
+ const errorMessage = error instanceof Error ? error.message : String(error);
708
+ const newRetryCount = job.retryCount + 1;
709
+
710
+ if (newRetryCount >= maxRetries) {
711
+ await ctx.runMutation(internal.lib.markJobFailed, {
712
+ jobDocId,
713
+ errorMessage,
714
+ retryCount: newRetryCount,
715
+ });
716
+ return { processed: false, reason: "Max retries exceeded", error: errorMessage };
717
+ }
718
+
719
+ const backoffMs = Math.min(1000 * Math.pow(2, newRetryCount), 30000);
720
+ await ctx.runMutation(internal.lib.incrementRetryCount, {
721
+ jobDocId,
722
+ retryCount: newRetryCount,
723
+ errorMessage,
724
+ });
725
+
726
+ await ctx.scheduler.runAfter(backoffMs, internal.lib.processNextBatch, { jobDocId });
727
+
728
+ return { processed: false, reason: "Retrying", error: errorMessage, retryCount: newRetryCount };
729
+ }
730
+ },
731
+ });
732
+
733
+ export const updateJobProgress = internalMutation({
734
+ args: {
735
+ jobDocId: v.id("iteratorJobs"),
736
+ cursor: v.optional(v.string()),
737
+ processedCount: v.number(),
738
+ },
739
+ handler: async (ctx, { jobDocId, cursor, processedCount }) => {
740
+ await ctx.db.patch(jobDocId, {
741
+ cursor,
742
+ processedCount,
743
+ lastRunAt: Date.now(),
744
+ retryCount: 0,
745
+ });
746
+ },
747
+ });
748
+
749
+ export const markJobCompleted = internalMutation({
750
+ args: {
751
+ jobDocId: v.id("iteratorJobs"),
752
+ processedCount: v.number(),
753
+ },
754
+ handler: async (ctx, { jobDocId, processedCount }) => {
755
+ await ctx.db.patch(jobDocId, {
756
+ status: "completed",
757
+ processedCount,
758
+ lastRunAt: Date.now(),
759
+ });
760
+ },
761
+ });
762
+
763
+ export const markJobFailed = internalMutation({
764
+ args: {
765
+ jobDocId: v.id("iteratorJobs"),
766
+ errorMessage: v.string(),
767
+ retryCount: v.number(),
768
+ },
769
+ handler: async (ctx, { jobDocId, errorMessage, retryCount }) => {
770
+ await ctx.db.patch(jobDocId, {
771
+ status: "failed",
772
+ errorMessage,
773
+ retryCount,
774
+ lastRunAt: Date.now(),
775
+ });
776
+ },
777
+ });
778
+
779
+ export const incrementRetryCount = internalMutation({
780
+ args: {
781
+ jobDocId: v.id("iteratorJobs"),
782
+ retryCount: v.number(),
783
+ errorMessage: v.string(),
784
+ },
785
+ handler: async (ctx, { jobDocId, retryCount, errorMessage }) => {
786
+ await ctx.db.patch(jobDocId, {
787
+ retryCount,
788
+ errorMessage,
789
+ lastRunAt: Date.now(),
790
+ });
791
+ },
792
+ });