@convex-dev/workpool 0.2.20-alpha.0 → 0.3.1-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/README.md +77 -68
  2. package/dist/client/index.d.ts +3 -3
  3. package/dist/client/index.d.ts.map +1 -1
  4. package/dist/client/index.js.map +1 -1
  5. package/dist/client/utils.d.ts +1 -8
  6. package/dist/client/utils.d.ts.map +1 -1
  7. package/dist/client/utils.js.map +1 -1
  8. package/dist/component/_generated/api.d.ts +27 -124
  9. package/dist/component/_generated/api.d.ts.map +1 -1
  10. package/dist/component/_generated/api.js +10 -1
  11. package/dist/component/_generated/api.js.map +1 -1
  12. package/dist/component/_generated/component.d.ts +98 -0
  13. package/dist/component/_generated/component.d.ts.map +1 -0
  14. package/dist/component/_generated/component.js +11 -0
  15. package/dist/component/_generated/component.js.map +1 -0
  16. package/dist/component/_generated/dataModel.d.ts +4 -18
  17. package/dist/component/_generated/dataModel.d.ts.map +1 -0
  18. package/dist/component/_generated/dataModel.js +11 -0
  19. package/dist/component/_generated/dataModel.js.map +1 -0
  20. package/dist/component/_generated/server.d.ts +10 -38
  21. package/dist/component/_generated/server.d.ts.map +1 -1
  22. package/dist/component/_generated/server.js +9 -5
  23. package/dist/component/_generated/server.js.map +1 -1
  24. package/dist/component/danger.d.ts +2 -2
  25. package/dist/component/lib.d.ts +9 -9
  26. package/dist/component/lib.js +2 -2
  27. package/dist/component/lib.js.map +1 -1
  28. package/dist/component/schema.d.ts +15 -15
  29. package/dist/component/shared.d.ts +3 -3
  30. package/dist/component/stats.d.ts +2 -2
  31. package/dist/component/worker.d.ts +3 -3
  32. package/package.json +30 -29
  33. package/src/client/index.ts +19 -20
  34. package/src/client/utils.ts +3 -30
  35. package/src/component/README.md +6 -6
  36. package/src/component/_generated/api.ts +70 -0
  37. package/src/component/_generated/component.ts +117 -0
  38. package/src/component/_generated/{server.d.ts → server.ts} +33 -21
  39. package/src/component/complete.test.ts +1 -1
  40. package/src/component/complete.ts +6 -6
  41. package/src/component/danger.ts +3 -3
  42. package/src/component/kick.test.ts +5 -5
  43. package/src/component/kick.ts +6 -6
  44. package/src/component/lib.test.ts +5 -5
  45. package/src/component/lib.ts +10 -10
  46. package/src/component/logging.ts +2 -2
  47. package/src/component/loop.test.ts +9 -9
  48. package/src/component/loop.ts +33 -33
  49. package/src/component/recovery.test.ts +7 -7
  50. package/src/component/recovery.ts +2 -2
  51. package/src/component/schema.ts +2 -2
  52. package/src/component/shared.ts +5 -5
  53. package/src/component/stats.test.ts +3 -3
  54. package/src/component/stats.ts +6 -6
  55. package/src/test.ts +10 -3
  56. package/src/component/_generated/api.d.ts +0 -151
  57. package/src/component/_generated/api.js +0 -23
  58. package/src/component/_generated/server.js +0 -90
  59. /package/src/component/_generated/{dataModel.d.ts → dataModel.ts} +0 -0
@@ -52,7 +52,7 @@ export const main = internalMutation({
52
52
  const state = await getOrCreateState(ctx);
53
53
  if (generation !== state.generation) {
54
54
  throw new Error(
55
- `generation mismatch: ${generation} !== ${state.generation}`
55
+ `generation mismatch: ${generation} !== ${state.generation}`,
56
56
  );
57
57
  }
58
58
  state.generation++;
@@ -129,7 +129,7 @@ export const updateRunStatus = internalMutation({
129
129
  const state = await getOrCreateState(ctx);
130
130
  if (generation !== state.generation) {
131
131
  throw new Error(
132
- `generation mismatch: ${generation} !== ${state.generation}`
132
+ `generation mismatch: ${generation} !== ${state.generation}`,
133
133
  );
134
134
  }
135
135
 
@@ -154,7 +154,7 @@ export const updateRunStatus = internalMutation({
154
154
  ctx,
155
155
  state,
156
156
  maxParallelism,
157
- nextSegment
157
+ nextSegment,
158
158
  );
159
159
  console.timeEnd("[updateRunStatus] nextSegmentIsActionable");
160
160
 
@@ -165,7 +165,7 @@ export const updateRunStatus = internalMutation({
165
165
  {
166
166
  generation,
167
167
  segment: nextSegment,
168
- }
168
+ },
169
169
  );
170
170
  return;
171
171
  }
@@ -174,7 +174,7 @@ export const updateRunStatus = internalMutation({
174
174
  const [oldIsActionable, cursors] = await oldSegmentIsActionable(
175
175
  ctx,
176
176
  state,
177
- maxParallelism
177
+ maxParallelism,
178
178
  );
179
179
  console.timeEnd("[updateRunStatus] oldSegmentIsActionable");
180
180
 
@@ -204,8 +204,8 @@ export const updateRunStatus = internalMutation({
204
204
  }
205
205
  const docs = await Promise.all(
206
206
  actionableTables.map(async (tableName) =>
207
- getNextUp(ctx, tableName, { start: nextSegment })
208
- )
207
+ getNextUp(ctx, tableName, { start: nextSegment }),
208
+ ),
209
209
  );
210
210
  console.timeEnd("[updateRunStatus] findNextSegment");
211
211
  let targetSegment = docs.map((d) => d?.segment).sort()[0];
@@ -221,7 +221,7 @@ export const updateRunStatus = internalMutation({
221
221
  const scheduledId = await ctx.scheduler.runAt(
222
222
  boundScheduledTime(fromSegment(targetSegment), console),
223
223
  internal.loop.main,
224
- { generation, segment: targetSegment }
224
+ { generation, segment: targetSegment },
225
225
  );
226
226
  if (targetSegment > getNextSegment()) {
227
227
  await ctx.db.patch(runStatus._id, {
@@ -235,7 +235,7 @@ export const updateRunStatus = internalMutation({
235
235
  });
236
236
  } else {
237
237
  console.debug(
238
- `[updateRunStatus] staying running because it's the next segment`
238
+ `[updateRunStatus] staying running because it's the next segment`,
239
239
  );
240
240
  }
241
241
  return;
@@ -251,7 +251,7 @@ async function nextSegmentIsActionable(
251
251
  ctx: MutationCtx,
252
252
  state: Doc<"internalState">,
253
253
  maxParallelism: number,
254
- end: bigint
254
+ end: bigint,
255
255
  ): Promise<boolean> {
256
256
  // First, try with our cursor range, up to end.
257
257
  if (
@@ -286,7 +286,7 @@ async function nextSegmentIsActionable(
286
286
  async function oldSegmentIsActionable(
287
287
  ctx: MutationCtx,
288
288
  state: Doc<"internalState">,
289
- maxParallelism: number
289
+ maxParallelism: number,
290
290
  ): Promise<
291
291
  [boolean, { completion?: bigint; cancelation?: bigint; incoming?: bigint }]
292
292
  > {
@@ -318,7 +318,7 @@ async function oldSegmentIsActionable(
318
318
  async function getNextUp(
319
319
  ctx: MutationCtx,
320
320
  table: "pendingCompletion" | "pendingCancelation" | "pendingStart",
321
- range: { start?: bigint; end?: bigint }
321
+ range: { start?: bigint; end?: bigint },
322
322
  ) {
323
323
  return ctx.db
324
324
  .query(table)
@@ -331,7 +331,7 @@ async function getNextUp(
331
331
  : q.gt("segment", range.start - CURSOR_BUFFER_SEGMENTS)
332
332
  : range.end !== undefined
333
333
  ? q.lt("segment", range.end)
334
- : q
334
+ : q,
335
335
  )
336
336
  .first();
337
337
  }
@@ -344,7 +344,7 @@ async function handleCompletions(
344
344
  ctx: MutationCtx,
345
345
  state: Doc<"internalState">,
346
346
  segment: bigint,
347
- console: Logger
347
+ console: Logger,
348
348
  ) {
349
349
  const startSegment = state.segmentCursors.completion - CURSOR_BUFFER_SEGMENTS;
350
350
  // This won't be too many because the jobs all correspond to being scheduled
@@ -352,7 +352,7 @@ async function handleCompletions(
352
352
  const completed = await ctx.db
353
353
  .query("pendingCompletion")
354
354
  .withIndex("segment", (q) =>
355
- q.gte("segment", startSegment).lte("segment", segment)
355
+ q.gte("segment", startSegment).lte("segment", segment),
356
356
  )
357
357
  .collect();
358
358
  state.segmentCursors.completion = segment;
@@ -365,7 +365,7 @@ async function handleCompletions(
365
365
  const running = state.running.find((r) => r.workId === c.workId);
366
366
  if (!running) {
367
367
  console.error(
368
- `[main] completing ${c.workId} but it's not in "running"`
368
+ `[main] completing ${c.workId} but it's not in "running"`,
369
369
  );
370
370
  return;
371
371
  }
@@ -396,12 +396,12 @@ async function handleCompletions(
396
396
  state.report.failed++;
397
397
  }
398
398
  }
399
- })
399
+ }),
400
400
  );
401
401
  // We do this after so the stats above know if it was in progress.
402
402
  const before = state.running.length;
403
403
  state.running = state.running.filter(
404
- (r) => !completed.some((c) => c.workId === r.workId)
404
+ (r) => !completed.some((c) => c.workId === r.workId),
405
405
  );
406
406
  const numCompleted = before - state.running.length;
407
407
  state.report.completed += numCompleted;
@@ -414,13 +414,13 @@ async function handleCancelation(
414
414
  state: Doc<"internalState">,
415
415
  segment: bigint,
416
416
  console: Logger,
417
- toCancel: CompleteJob[]
417
+ toCancel: CompleteJob[],
418
418
  ) {
419
419
  const start = state.segmentCursors.cancelation - CURSOR_BUFFER_SEGMENTS;
420
420
  const canceled = await ctx.db
421
421
  .query("pendingCancelation")
422
422
  .withIndex("segment", (q) =>
423
- q.gte("segment", start).lte("segment", segment)
423
+ q.gte("segment", start).lte("segment", segment),
424
424
  )
425
425
  .take(CANCELLATION_BATCH_SIZE);
426
426
  state.segmentCursors.cancelation = canceled.at(-1)?.segment ?? segment;
@@ -458,9 +458,9 @@ async function handleCancelation(
458
458
  return { workId, runResult, attempt: work.attempts };
459
459
  }
460
460
  return null;
461
- })
461
+ }),
462
462
  )
463
- ).flatMap((r) => (r ? [r] : []))
463
+ ).flatMap((r) => (r ? [r] : [])),
464
464
  );
465
465
  if (jobs.length) {
466
466
  await ctx.scheduler.runAfter(0, internal.complete.complete, { jobs });
@@ -470,7 +470,7 @@ async function handleCancelation(
470
470
  async function handleRecovery(
471
471
  ctx: MutationCtx,
472
472
  state: Doc<"internalState">,
473
- console: Logger
473
+ console: Logger,
474
474
  ) {
475
475
  const missing = new Set<Id<"work">>();
476
476
  const oldEnoughToConsider = Date.now() - RECOVERY_THRESHOLD_MS;
@@ -487,7 +487,7 @@ async function handleRecovery(
487
487
  return null;
488
488
  }
489
489
  return { ...r, attempt: work.attempts };
490
- })
490
+ }),
491
491
  )
492
492
  ).flatMap((r) => (r ? [r] : []));
493
493
  state.running = state.running.filter((r) => !missing.has(r.workId));
@@ -501,7 +501,7 @@ async function handleStart(
501
501
  state: Doc<"internalState">,
502
502
  segment: bigint,
503
503
  console: Logger,
504
- { maxParallelism, logLevel }: Config
504
+ { maxParallelism, logLevel }: Config,
505
505
  ) {
506
506
  // Schedule as many as needed to reach maxParallelism.
507
507
  const toSchedule = maxParallelism - state.running.length;
@@ -514,9 +514,9 @@ async function handleStart(
514
514
  q
515
515
  .gte(
516
516
  "segment",
517
- state.segmentCursors.incoming - CURSOR_BUFFER_SEGMENTS
517
+ state.segmentCursors.incoming - CURSOR_BUFFER_SEGMENTS,
518
518
  )
519
- .lte("segment", segment)
519
+ .lte("segment", segment),
520
520
  )
521
521
  .take(toSchedule)
522
522
  : [];
@@ -543,9 +543,9 @@ async function handleStart(
543
543
  const scheduledId = await beginWork(ctx, workId, logLevel, lagMs);
544
544
  await ctx.db.delete(_id);
545
545
  return { scheduledId, workId, started: Date.now() };
546
- })
546
+ }),
547
547
  )
548
- ).flatMap((r) => (r ? [r] : []))
548
+ ).flatMap((r) => (r ? [r] : [])),
549
549
  );
550
550
  }
551
551
 
@@ -553,7 +553,7 @@ async function beginWork(
553
553
  ctx: MutationCtx,
554
554
  workId: Id<"work">,
555
555
  logLevel: LogLevel,
556
- lagMs: number
556
+ lagMs: number,
557
557
  ): Promise<Id<"_scheduled_functions">> {
558
558
  const console = createLogger(logLevel);
559
559
  const work = await ctx.db.get(workId);
@@ -583,7 +583,7 @@ async function beginWork(
583
583
  async function rescheduleJob(
584
584
  ctx: MutationCtx,
585
585
  work: Doc<"work">,
586
- console: Logger
586
+ console: Logger,
587
587
  ): Promise<boolean> {
588
588
  const pendingCancelation = await ctx.db
589
589
  .query("pendingCancelation")
@@ -645,7 +645,7 @@ async function getOrCreateState(ctx: MutationCtx) {
645
645
  const console = createLogger(globals.logLevel);
646
646
  console.error("No internalState in running loop! Re-creating empty one...");
647
647
  return (await ctx.db.get(
648
- await ctx.db.insert("internalState", INITIAL_STATE)
648
+ await ctx.db.insert("internalState", INITIAL_STATE),
649
649
  ))!;
650
650
  }
651
651
 
@@ -656,7 +656,7 @@ async function getOrCreateRunningStatus(ctx: MutationCtx) {
656
656
  const console = createLogger(globals.logLevel);
657
657
  console.error("No runStatus in running loop! Re-creating one...");
658
658
  return (await ctx.db.get(
659
- await ctx.db.insert("runStatus", { state: { kind: "running" } })
659
+ await ctx.db.insert("runStatus", { state: { kind: "running" } }),
660
660
  ))!;
661
661
  }
662
662
  // eslint-disable-next-line @typescript-eslint/no-unused-vars
@@ -28,7 +28,7 @@ describe("recovery", () => {
28
28
  // Helper function to create a work item
29
29
  async function makeDummyWork(
30
30
  ctx: MutationCtx,
31
- overrides: Partial<WithoutSystemFields<Doc<"work">>> = {}
31
+ overrides: Partial<WithoutSystemFields<Doc<"work">>> = {},
32
32
  ) {
33
33
  return ctx.db.insert("work", {
34
34
  fnType: "action",
@@ -43,7 +43,7 @@ describe("recovery", () => {
43
43
  // Helper function to create a scheduled function
44
44
  async function makeDummyScheduledFunction(
45
45
  ctx: MutationCtx,
46
- workId: Id<"work">
46
+ workId: Id<"work">,
47
47
  ) {
48
48
  return ctx.scheduler.runAfter(0, internal.worker.runActionWrapper, {
49
49
  workId,
@@ -226,7 +226,7 @@ describe("recovery", () => {
226
226
  expect(pendingCompletions[0].runResult.kind).toBe("failed");
227
227
  assert(pendingCompletions[0].runResult.kind === "failed");
228
228
  expect(pendingCompletions[0].runResult.error).toContain(
229
- "Scheduled job not found"
229
+ "Scheduled job not found",
230
230
  );
231
231
  });
232
232
  });
@@ -292,7 +292,7 @@ describe("recovery", () => {
292
292
  expect(pendingCompletions[0].runResult.kind).toBe("failed");
293
293
  assert(pendingCompletions[0].runResult.kind === "failed");
294
294
  expect(pendingCompletions[0].runResult.error).toBe(
295
- "Function execution failed"
295
+ "Function execution failed",
296
296
  );
297
297
  });
298
298
  });
@@ -357,7 +357,7 @@ describe("recovery", () => {
357
357
  expect(pendingCompletions[0].runResult.kind).toBe("failed");
358
358
  assert(pendingCompletions[0].runResult.kind === "failed");
359
359
  expect(pendingCompletions[0].runResult.error).toBe(
360
- "Canceled via scheduler"
360
+ "Canceled via scheduler",
361
361
  );
362
362
  });
363
363
  });
@@ -451,10 +451,10 @@ describe("recovery", () => {
451
451
 
452
452
  // Find completions for each work ID
453
453
  const completion1 = pendingCompletions.find(
454
- (pc) => pc.workId === workId1
454
+ (pc) => pc.workId === workId1,
455
455
  );
456
456
  const completion2 = pendingCompletions.find(
457
- (pc) => pc.workId === workId2
457
+ (pc) => pc.workId === workId2,
458
458
  );
459
459
 
460
460
  expect(completion1).toBeDefined();
@@ -10,7 +10,7 @@ const recoveryArgs = v.object({
10
10
  workId: v.id("work"),
11
11
  attempt: v.number(),
12
12
  started: v.number(),
13
- })
13
+ }),
14
14
  ),
15
15
  });
16
16
 
@@ -37,7 +37,7 @@ export const recover = internalMutation({
37
37
  // only exported for testing
38
38
  export async function recoveryHandler(
39
39
  ctx: MutationCtx,
40
- { jobs }: Infer<typeof recoveryArgs>
40
+ { jobs }: Infer<typeof recoveryArgs>,
41
41
  ) {
42
42
  const globals = await ctx.db.query("globals").unique();
43
43
  const console = createLogger(globals?.logLevel);
@@ -37,7 +37,7 @@ export default defineSchema({
37
37
  workId: v.id("work"),
38
38
  scheduledId: v.id("_scheduled_functions"),
39
39
  started: v.number(),
40
- })
40
+ }),
41
41
  ),
42
42
  }),
43
43
 
@@ -53,7 +53,7 @@ export default defineSchema({
53
53
  saturated: v.boolean(),
54
54
  generation: v.int64(),
55
55
  }),
56
- v.object({ kind: v.literal("idle"), generation: v.int64() })
56
+ v.object({ kind: v.literal("idle"), generation: v.int64() }),
57
57
  ),
58
58
  }),
59
59
 
@@ -6,7 +6,7 @@ import { type Logger, logLevel } from "./logging.js";
6
6
  export const fnType = v.union(
7
7
  v.literal("action"),
8
8
  v.literal("mutation"),
9
- v.literal("query")
9
+ v.literal("query"),
10
10
  );
11
11
 
12
12
  export const DEFAULT_MAX_PARALLELISM = 10;
@@ -81,7 +81,7 @@ export const vResultValidator = v.union(
81
81
  }),
82
82
  v.object({
83
83
  kind: v.literal("canceled"),
84
- })
84
+ }),
85
85
  );
86
86
  export type RunResult = Infer<typeof vResultValidator>;
87
87
 
@@ -119,8 +119,8 @@ export const status = v.union(
119
119
  }),
120
120
  v.object({
121
121
  state: v.literal("finished"),
122
- })
123
- )
122
+ }),
123
+ ),
124
124
  );
125
125
  export type Status = Infer<typeof status>;
126
126
 
@@ -132,7 +132,7 @@ export function boundScheduledTime(ms: number, console: Logger): number {
132
132
  if (ms > Date.now() + 4 * YEAR) {
133
133
  console.error(
134
134
  "scheduled time is too far in the future, defaulting to 1 year from now",
135
- ms
135
+ ms,
136
136
  );
137
137
  return Date.now() + YEAR;
138
138
  }
@@ -240,7 +240,7 @@ describe("stats", () => {
240
240
 
241
241
  // Check that one of the scheduled functions is calculateBacklogAndReport
242
242
  const calculateBacklogScheduled = scheduledFunctions.find(
243
- (sf) => sf.name === "stats:calculateBacklogAndReport"
243
+ (sf) => sf.name === "stats:calculateBacklogAndReport",
244
244
  );
245
245
  expect(calculateBacklogScheduled).toBeDefined();
246
246
  assert(calculateBacklogScheduled);
@@ -306,7 +306,7 @@ describe("stats", () => {
306
306
  return await paginator(ctx.db, schema)
307
307
  .query("pendingStart")
308
308
  .withIndex("segment", (q) =>
309
- q.gte("segment", 0n).lt("segment", currentSegment)
309
+ q.gte("segment", 0n).lt("segment", currentSegment),
310
310
  )
311
311
  .paginate({
312
312
  numItems: 1,
@@ -336,7 +336,7 @@ describe("stats", () => {
336
336
 
337
337
  // Since our backlog is small, no additional scheduled functions should be created
338
338
  const calculateBacklogScheduled = scheduledFunctions.find(
339
- (sf) => sf.name === "stats:calculateBacklogAndReport"
339
+ (sf) => sf.name === "stats:calculateBacklogAndReport",
340
340
  );
341
341
  expect(calculateBacklogScheduled).toBeUndefined();
342
342
  });
@@ -26,7 +26,7 @@ export function recordEnqueued(
26
26
  workId: Id<"work">;
27
27
  fnName: string;
28
28
  runAt: number;
29
- }
29
+ },
30
30
  ) {
31
31
  console.event("enqueued", {
32
32
  ...data,
@@ -37,7 +37,7 @@ export function recordEnqueued(
37
37
  export function recordStarted(
38
38
  console: Logger,
39
39
  work: Doc<"work">,
40
- lagMs: number
40
+ lagMs: number,
41
41
  ) {
42
42
  console.event("started", {
43
43
  workId: work._id,
@@ -51,7 +51,7 @@ export function recordStarted(
51
51
  export function recordCompleted(
52
52
  console: Logger,
53
53
  work: Doc<"work">,
54
- status: "success" | "failed" | "canceled" | "retrying"
54
+ status: "success" | "failed" | "canceled" | "retrying",
55
55
  ) {
56
56
  console.event("completed", {
57
57
  workId: work._id,
@@ -66,7 +66,7 @@ export async function generateReport(
66
66
  ctx: MutationCtx,
67
67
  console: Logger,
68
68
  state: Doc<"internalState">,
69
- { maxParallelism, logLevel }: Config
69
+ { maxParallelism, logLevel }: Config,
70
70
  ) {
71
71
  if (!shouldLog(logLevel, "REPORT")) {
72
72
  // Don't waste time if we're not going to log.
@@ -78,7 +78,7 @@ export async function generateReport(
78
78
  .withIndex("segment", (q) =>
79
79
  q
80
80
  .gte("segment", state.segmentCursors.incoming)
81
- .lt("segment", currentSegment)
81
+ .lt("segment", currentSegment),
82
82
  )
83
83
  .paginate({
84
84
  numItems: maxParallelism,
@@ -125,7 +125,7 @@ export const calculateBacklogAndReport = internalMutation({
125
125
 
126
126
  function recordReport(
127
127
  console: Logger,
128
- report: Doc<"internalState">["report"] & { running: number; backlog: number }
128
+ report: Doc<"internalState">["report"] & { running: number; backlog: number },
129
129
  ) {
130
130
  const { completed, failed, retries } = report;
131
131
  const withoutRetries = completed - retries;
package/src/test.ts CHANGED
@@ -1,11 +1,18 @@
1
+ /// <reference types="vite/client" />
1
2
  import type { TestConvex } from "convex-test";
2
3
  import type { GenericSchema, SchemaDefinition } from "convex/server";
3
4
  import schema from "./component/schema.js";
4
5
  const modules = import.meta.glob("./component/**/*.ts");
5
- function register(
6
+
7
+ /**
8
+ * Register the component with the test convex instance.
9
+ * @param t - The test convex instance, e.g. from calling `convexTest`.
10
+ * @param name - The name of the component, as registered in convex.config.ts.
11
+ */
12
+ export function register(
6
13
  t: TestConvex<SchemaDefinition<GenericSchema, boolean>>,
7
- name: string = "workpool"
14
+ name: string = "workpool",
8
15
  ) {
9
16
  t.registerComponent(name, schema, modules);
10
17
  }
11
- export default { schema, modules, register };
18
+ export default { register, schema, modules };
@@ -1,151 +0,0 @@
1
- /* eslint-disable */
2
- /**
3
- * Generated `api` utility.
4
- *
5
- * THIS CODE IS AUTOMATICALLY GENERATED.
6
- *
7
- * To regenerate, run `npx convex dev`.
8
- * @module
9
- */
10
-
11
- import type * as complete from "../complete.js";
12
- import type * as crons from "../crons.js";
13
- import type * as danger from "../danger.js";
14
- import type * as kick from "../kick.js";
15
- import type * as lib from "../lib.js";
16
- import type * as logging from "../logging.js";
17
- import type * as loop from "../loop.js";
18
- import type * as recovery from "../recovery.js";
19
- import type * as shared from "../shared.js";
20
- import type * as stats from "../stats.js";
21
- import type * as worker from "../worker.js";
22
-
23
- import type {
24
- ApiFromModules,
25
- FilterApi,
26
- FunctionReference,
27
- } from "convex/server";
28
-
29
- /**
30
- * A utility for referencing Convex functions in your app's API.
31
- *
32
- * Usage:
33
- * ```js
34
- * const myFunctionReference = api.myModule.myFunction;
35
- * ```
36
- */
37
- declare const fullApi: ApiFromModules<{
38
- complete: typeof complete;
39
- crons: typeof crons;
40
- danger: typeof danger;
41
- kick: typeof kick;
42
- lib: typeof lib;
43
- logging: typeof logging;
44
- loop: typeof loop;
45
- recovery: typeof recovery;
46
- shared: typeof shared;
47
- stats: typeof stats;
48
- worker: typeof worker;
49
- }>;
50
- export type Mounts = {
51
- lib: {
52
- cancel: FunctionReference<
53
- "mutation",
54
- "public",
55
- {
56
- id: string;
57
- logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR";
58
- },
59
- any
60
- >;
61
- cancelAll: FunctionReference<
62
- "mutation",
63
- "public",
64
- {
65
- before?: number;
66
- limit?: number;
67
- logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR";
68
- },
69
- any
70
- >;
71
- enqueue: FunctionReference<
72
- "mutation",
73
- "public",
74
- {
75
- config: {
76
- logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR";
77
- maxParallelism: number;
78
- };
79
- fnArgs: any;
80
- fnHandle: string;
81
- fnName: string;
82
- fnType: "action" | "mutation" | "query";
83
- onComplete?: { context?: any; fnHandle: string };
84
- retryBehavior?: {
85
- base: number;
86
- initialBackoffMs: number;
87
- maxAttempts: number;
88
- };
89
- runAt: number;
90
- },
91
- string
92
- >;
93
- enqueueBatch: FunctionReference<
94
- "mutation",
95
- "public",
96
- {
97
- config: {
98
- logLevel: "DEBUG" | "TRACE" | "INFO" | "REPORT" | "WARN" | "ERROR";
99
- maxParallelism: number;
100
- };
101
- items: Array<{
102
- fnArgs: any;
103
- fnHandle: string;
104
- fnName: string;
105
- fnType: "action" | "mutation" | "query";
106
- onComplete?: { context?: any; fnHandle: string };
107
- retryBehavior?: {
108
- base: number;
109
- initialBackoffMs: number;
110
- maxAttempts: number;
111
- };
112
- runAt: number;
113
- }>;
114
- },
115
- Array<string>
116
- >;
117
- status: FunctionReference<
118
- "query",
119
- "public",
120
- { id: string },
121
- | { previousAttempts: number; state: "pending" }
122
- | { previousAttempts: number; state: "running" }
123
- | { state: "finished" }
124
- >;
125
- statusBatch: FunctionReference<
126
- "query",
127
- "public",
128
- { ids: Array<string> },
129
- Array<
130
- | { previousAttempts: number; state: "pending" }
131
- | { previousAttempts: number; state: "running" }
132
- | { state: "finished" }
133
- >
134
- >;
135
- };
136
- };
137
- // For now fullApiWithMounts is only fullApi which provides
138
- // jump-to-definition in component client code.
139
- // Use Mounts for the same type without the inference.
140
- declare const fullApiWithMounts: typeof fullApi;
141
-
142
- export declare const api: FilterApi<
143
- typeof fullApiWithMounts,
144
- FunctionReference<any, "public">
145
- >;
146
- export declare const internal: FilterApi<
147
- typeof fullApiWithMounts,
148
- FunctionReference<any, "internal">
149
- >;
150
-
151
- export declare const components: {};
@@ -1,23 +0,0 @@
1
- /* eslint-disable */
2
- /**
3
- * Generated `api` utility.
4
- *
5
- * THIS CODE IS AUTOMATICALLY GENERATED.
6
- *
7
- * To regenerate, run `npx convex dev`.
8
- * @module
9
- */
10
-
11
- import { anyApi, componentsGeneric } from "convex/server";
12
-
13
- /**
14
- * A utility for referencing Convex functions in your app's API.
15
- *
16
- * Usage:
17
- * ```js
18
- * const myFunctionReference = api.myModule.myFunction;
19
- * ```
20
- */
21
- export const api = anyApi;
22
- export const internal = anyApi;
23
- export const components = componentsGeneric();