@convex-dev/workpool 0.3.2 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/index.d.ts.map +1 -1
- package/dist/client/index.js +33 -7
- package/dist/client/index.js.map +1 -1
- package/dist/component/complete.d.ts.map +1 -1
- package/dist/component/complete.js +50 -4
- package/dist/component/complete.js.map +1 -1
- package/dist/component/danger.d.ts +0 -2
- package/dist/component/danger.d.ts.map +1 -1
- package/dist/component/danger.js +59 -37
- package/dist/component/danger.js.map +1 -1
- package/dist/component/lib.d.ts +1 -1
- package/dist/component/lib.d.ts.map +1 -1
- package/dist/component/lib.js +43 -3
- package/dist/component/lib.js.map +1 -1
- package/dist/component/loop.d.ts.map +1 -1
- package/dist/component/loop.js +7 -10
- package/dist/component/loop.js.map +1 -1
- package/dist/component/recovery.js +7 -7
- package/dist/component/recovery.js.map +1 -1
- package/dist/component/stats.d.ts +2 -2
- package/dist/component/stats.d.ts.map +1 -1
- package/dist/component/stats.js +2 -1
- package/dist/component/stats.js.map +1 -1
- package/dist/component/worker.d.ts +7 -2
- package/dist/component/worker.d.ts.map +1 -1
- package/dist/component/worker.js +37 -7
- package/dist/component/worker.js.map +1 -1
- package/package.json +1 -1
- package/src/client/index.ts +42 -6
- package/src/component/complete.ts +77 -11
- package/src/component/danger.ts +95 -73
- package/src/component/lib.ts +54 -4
- package/src/component/loop.ts +18 -13
- package/src/component/recovery.ts +7 -7
- package/src/component/stats.ts +2 -0
- package/src/component/worker.ts +46 -7
package/src/component/danger.ts
CHANGED
|
@@ -1,44 +1,53 @@
|
|
|
1
|
-
import { v } from "convex/values";
|
|
1
|
+
import { v, getConvexSize } from "convex/values";
|
|
2
2
|
import { internal } from "./_generated/api.js";
|
|
3
3
|
import { internalMutation } from "./_generated/server.js";
|
|
4
|
-
import { paginator } from "convex-helpers/server/pagination";
|
|
5
|
-
import schema from "./schema.js";
|
|
6
4
|
|
|
7
5
|
const DEFAULT_OLDER_THAN = 1000 * 60 * 60 * 24;
|
|
6
|
+
const MAX_ROWS_READ = 100;
|
|
7
|
+
const MAX_BYTES_READ = 4_000_000;
|
|
8
8
|
|
|
9
9
|
export const clearPending = internalMutation({
|
|
10
10
|
args: {
|
|
11
11
|
olderThan: v.optional(v.number()),
|
|
12
12
|
before: v.optional(v.number()),
|
|
13
|
-
cursor: v.optional(v.string()),
|
|
14
13
|
},
|
|
15
14
|
handler: async (ctx, args) => {
|
|
16
15
|
const time =
|
|
17
16
|
args.before ?? Date.now() - (args.olderThan ?? DEFAULT_OLDER_THAN);
|
|
18
|
-
|
|
17
|
+
let i = 0,
|
|
18
|
+
totalBytes = 0,
|
|
19
|
+
hasMore = false,
|
|
20
|
+
nextTime;
|
|
21
|
+
console.log("Clearing pending before", new Date(time).toUTCString());
|
|
22
|
+
for await (const entry of ctx.db
|
|
19
23
|
.query("pendingStart")
|
|
20
|
-
.withIndex("by_creation_time", (q) => q.
|
|
21
|
-
.
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
24
|
+
.withIndex("by_creation_time", (q) => q.lte("_creationTime", time))
|
|
25
|
+
.order("desc")) {
|
|
26
|
+
i++;
|
|
27
|
+
const work = await ctx.db.get(entry.workId);
|
|
28
|
+
totalBytes +=
|
|
29
|
+
getConvexSize(entry) + getConvexSize(work) + (work?.payloadSize ?? 0);
|
|
30
|
+
if (i > MAX_ROWS_READ || totalBytes > MAX_BYTES_READ) {
|
|
31
|
+
hasMore = true;
|
|
32
|
+
nextTime = entry._creationTime;
|
|
33
|
+
console.log(`Continuing after ${i} entries, ${totalBytes} bytes`);
|
|
34
|
+
break;
|
|
35
|
+
}
|
|
36
|
+
await ctx.db.delete(entry._id);
|
|
37
|
+
if (work) {
|
|
38
|
+
// Clean up any large data stored separately
|
|
39
|
+
if (work.payloadId) {
|
|
40
|
+
await ctx.db.delete("payload", work.payloadId);
|
|
34
41
|
}
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
42
|
+
await ctx.db.delete("work", work._id);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
if (hasMore) {
|
|
38
46
|
await ctx.scheduler.runAfter(0, internal.danger.clearPending, {
|
|
39
|
-
before:
|
|
40
|
-
cursor: entries.continueCursor,
|
|
47
|
+
before: nextTime,
|
|
41
48
|
});
|
|
49
|
+
} else {
|
|
50
|
+
console.log(`Done clearing pending entries. ${i} in the last batch.`);
|
|
42
51
|
}
|
|
43
52
|
},
|
|
44
53
|
});
|
|
@@ -47,62 +56,75 @@ export const clearOldWork = internalMutation({
|
|
|
47
56
|
args: {
|
|
48
57
|
olderThan: v.optional(v.number()),
|
|
49
58
|
before: v.optional(v.number()),
|
|
50
|
-
cursor: v.optional(v.string()),
|
|
51
59
|
},
|
|
52
60
|
handler: async (ctx, args) => {
|
|
53
61
|
const time =
|
|
54
62
|
args.before ?? Date.now() - (args.olderThan ?? DEFAULT_OLDER_THAN);
|
|
55
|
-
|
|
63
|
+
let i = 0,
|
|
64
|
+
totalBytes = 0,
|
|
65
|
+
hasMore = false,
|
|
66
|
+
nextTime;
|
|
67
|
+
console.log("Clearing old work before", new Date(time).toUTCString());
|
|
68
|
+
for await (const entry of ctx.db
|
|
56
69
|
.query("work")
|
|
57
|
-
.withIndex("by_creation_time", (q) => q.
|
|
58
|
-
.
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
70
|
+
.withIndex("by_creation_time", (q) => q.lte("_creationTime", time))
|
|
71
|
+
.order("desc")) {
|
|
72
|
+
i++;
|
|
73
|
+
const pendingStart = await ctx.db
|
|
74
|
+
.query("pendingStart")
|
|
75
|
+
.withIndex("workId", (q) => q.eq("workId", entry._id))
|
|
76
|
+
.unique();
|
|
77
|
+
const pendingCompletion = await ctx.db
|
|
78
|
+
.query("pendingCompletion")
|
|
79
|
+
.withIndex("workId", (q) => q.eq("workId", entry._id))
|
|
80
|
+
.unique();
|
|
81
|
+
const pendingCancelation = await ctx.db
|
|
82
|
+
.query("pendingCancelation")
|
|
83
|
+
.withIndex("workId", (q) => q.eq("workId", entry._id))
|
|
84
|
+
.unique();
|
|
85
|
+
totalBytes +=
|
|
86
|
+
getConvexSize(entry) +
|
|
87
|
+
getConvexSize(pendingStart) +
|
|
88
|
+
getConvexSize(pendingCompletion) +
|
|
89
|
+
getConvexSize(pendingCancelation) +
|
|
90
|
+
(entry.payloadSize ?? 0);
|
|
91
|
+
if (i > MAX_ROWS_READ || totalBytes > MAX_BYTES_READ) {
|
|
92
|
+
hasMore = true;
|
|
93
|
+
nextTime = entry._creationTime;
|
|
94
|
+
console.log(`Continuing after ${i} entries, ${totalBytes} bytes`);
|
|
95
|
+
break;
|
|
96
|
+
}
|
|
97
|
+
if (pendingStart) {
|
|
98
|
+
await ctx.db.delete(pendingStart._id);
|
|
99
|
+
}
|
|
100
|
+
if (pendingCompletion) {
|
|
101
|
+
await ctx.db.delete(pendingCompletion._id);
|
|
102
|
+
}
|
|
103
|
+
if (pendingCancelation) {
|
|
104
|
+
await ctx.db.delete(pendingCancelation._id);
|
|
105
|
+
}
|
|
106
|
+
// Clean up any large data stored separately
|
|
107
|
+
if (entry.payloadId) {
|
|
108
|
+
await ctx.db.delete(entry.payloadId);
|
|
109
|
+
}
|
|
110
|
+
console.debug(
|
|
111
|
+
`cleared ${entry.fnName}: ${entry.fnArgs} (${Object.entries({
|
|
112
|
+
pendingStart,
|
|
113
|
+
pendingCompletion,
|
|
114
|
+
pendingCancelation,
|
|
115
|
+
})
|
|
116
|
+
.filter(([_, v]) => v !== null)
|
|
117
|
+
.map(([name]) => name)
|
|
118
|
+
.join(", ")})`,
|
|
119
|
+
);
|
|
120
|
+
await ctx.db.delete(entry._id);
|
|
121
|
+
}
|
|
122
|
+
if (hasMore) {
|
|
102
123
|
await ctx.scheduler.runAfter(0, internal.danger.clearOldWork, {
|
|
103
|
-
before:
|
|
104
|
-
cursor: entries.continueCursor,
|
|
124
|
+
before: nextTime,
|
|
105
125
|
});
|
|
126
|
+
} else {
|
|
127
|
+
console.log(`Done clearing old work. ${i} in the last batch.`);
|
|
106
128
|
}
|
|
107
129
|
},
|
|
108
130
|
});
|
package/src/component/lib.ts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
import { type ObjectType, v } from "convex/values";
|
|
1
|
+
import { type ObjectType, v, getConvexSize } from "convex/values";
|
|
2
|
+
import type { WithoutSystemFields } from "convex/server";
|
|
2
3
|
import { api } from "./_generated/api.js";
|
|
3
|
-
import type
|
|
4
|
+
import { type Doc, type Id } from "./_generated/dataModel.js";
|
|
4
5
|
import {
|
|
5
6
|
mutation,
|
|
6
7
|
type MutationCtx,
|
|
@@ -28,6 +29,10 @@ import {
|
|
|
28
29
|
import { recordEnqueued } from "./stats.js";
|
|
29
30
|
import { getOrUpdateGlobals } from "./config.js";
|
|
30
31
|
|
|
32
|
+
const INLINE_METADATA_THRESHOLD = 8_000; // 8KB threshold
|
|
33
|
+
const MAX_DOC_SIZE = 1_000_000; // Some buffer for 1MiB actual limit
|
|
34
|
+
const PAYLOAD_DOC_OVERHEAD = 78; // Size of { args: null, context: null }
|
|
35
|
+
|
|
31
36
|
const itemArgs = {
|
|
32
37
|
fnHandle: v.string(),
|
|
33
38
|
fnName: v.string(),
|
|
@@ -59,10 +64,55 @@ async function enqueueHandler(
|
|
|
59
64
|
{ runAt, ...workArgs }: ObjectType<typeof itemArgs>,
|
|
60
65
|
) {
|
|
61
66
|
runAt = boundScheduledTime(runAt, console);
|
|
62
|
-
|
|
67
|
+
|
|
68
|
+
const fnArgsSize = getConvexSize(workArgs.fnArgs);
|
|
69
|
+
if (fnArgsSize > MAX_DOC_SIZE) {
|
|
70
|
+
throw new Error(
|
|
71
|
+
`Function arguments for function ${workArgs.fnName} too large: ${fnArgsSize} bytes (max: ${MAX_DOC_SIZE} bytes)`,
|
|
72
|
+
);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
let contextSize = 0;
|
|
76
|
+
const context = workArgs.onComplete?.context;
|
|
77
|
+
if (context !== undefined) {
|
|
78
|
+
contextSize = getConvexSize(context);
|
|
79
|
+
if (contextSize > MAX_DOC_SIZE) {
|
|
80
|
+
throw new Error(
|
|
81
|
+
`OnComplete context for function ${workArgs.fnName} too large: ${contextSize} bytes (max: ${MAX_DOC_SIZE} bytes)`,
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
const workItem: WithoutSystemFields<Doc<"work">> = {
|
|
63
87
|
...workArgs,
|
|
64
88
|
attempts: 0,
|
|
65
|
-
}
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
if (fnArgsSize >= INLINE_METADATA_THRESHOLD) {
|
|
92
|
+
// Args are large, store separately
|
|
93
|
+
const payloadDoc: { args: Record<string, any>; context?: unknown } = {
|
|
94
|
+
args: workArgs.fnArgs,
|
|
95
|
+
};
|
|
96
|
+
workItem.payloadSize = fnArgsSize + PAYLOAD_DOC_OVERHEAD;
|
|
97
|
+
delete workItem.fnArgs;
|
|
98
|
+
if (contextSize >= INLINE_METADATA_THRESHOLD) {
|
|
99
|
+
// Context is also too big to inline
|
|
100
|
+
payloadDoc.context = context;
|
|
101
|
+
workItem.payloadSize += contextSize;
|
|
102
|
+
delete workItem.onComplete!.context;
|
|
103
|
+
}
|
|
104
|
+
workItem.payloadId = await ctx.db.insert("payload", payloadDoc);
|
|
105
|
+
} else if (fnArgsSize + contextSize >= INLINE_METADATA_THRESHOLD) {
|
|
106
|
+
// Args are small enough, but combined with context it's too big.
|
|
107
|
+
// Store just context in this case.
|
|
108
|
+
workItem.payloadId = await ctx.db.insert("payload", { context });
|
|
109
|
+
delete workItem.onComplete!.context;
|
|
110
|
+
workItem.payloadSize = contextSize + PAYLOAD_DOC_OVERHEAD;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Store the work item
|
|
114
|
+
const workId = await ctx.db.insert("work", workItem);
|
|
115
|
+
|
|
66
116
|
await ctx.db.insert("pendingStart", {
|
|
67
117
|
workId,
|
|
68
118
|
segment: max(toSegment(runAt), kickSegment),
|
package/src/component/loop.ts
CHANGED
|
@@ -560,24 +560,29 @@ async function beginWork(
|
|
|
560
560
|
if (!work) {
|
|
561
561
|
throw new Error("work not found");
|
|
562
562
|
}
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
const payload = await ctx.db.get(work.payloadId);
|
|
567
|
-
fnArgs = payload?.args ?? {};
|
|
568
|
-
}
|
|
569
|
-
const { attempts: attempt, fnHandle } = work;
|
|
570
|
-
const args = { workId, fnHandle, fnArgs, logLevel, attempt };
|
|
563
|
+
const { attempts: attempt, fnHandle, fnArgs, payloadId } = work;
|
|
564
|
+
const args = { workId, fnHandle, fnArgs, payloadId, logLevel, attempt };
|
|
565
|
+
let scheduleId;
|
|
571
566
|
if (work.fnType === "action") {
|
|
572
|
-
|
|
567
|
+
scheduleId = await ctx.scheduler.runAfter(
|
|
568
|
+
0,
|
|
569
|
+
internal.worker.runActionWrapper,
|
|
570
|
+
args,
|
|
571
|
+
);
|
|
573
572
|
} else if (work.fnType === "mutation" || work.fnType === "query") {
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
573
|
+
scheduleId = await ctx.scheduler.runAfter(
|
|
574
|
+
0,
|
|
575
|
+
internal.worker.runMutationWrapper,
|
|
576
|
+
{
|
|
577
|
+
...args,
|
|
578
|
+
fnType: work.fnType,
|
|
579
|
+
},
|
|
580
|
+
);
|
|
578
581
|
} else {
|
|
579
582
|
throw new Error(`Unexpected fnType ${work.fnType}`);
|
|
580
583
|
}
|
|
584
|
+
recordStarted(console, work, lagMs, scheduleId);
|
|
585
|
+
return scheduleId;
|
|
581
586
|
}
|
|
582
587
|
|
|
583
588
|
/**
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { type Infer, v } from "convex/values";
|
|
2
2
|
import { internalMutation, type MutationCtx } from "./_generated/server.js";
|
|
3
|
-
import { completeArgs, completeHandler } from "./complete.js";
|
|
4
3
|
import { createLogger } from "./logging.js";
|
|
4
|
+
import { type CompleteJob, completeHandler } from "./complete.js";
|
|
5
5
|
|
|
6
6
|
const recoveryArgs = v.object({
|
|
7
7
|
jobs: v.array(
|
|
@@ -41,7 +41,7 @@ export async function recoveryHandler(
|
|
|
41
41
|
) {
|
|
42
42
|
const globals = await ctx.db.query("globals").unique();
|
|
43
43
|
const console = createLogger(globals?.logLevel);
|
|
44
|
-
const
|
|
44
|
+
const completionJobs: CompleteJob[] = [];
|
|
45
45
|
for (let i = 0; i < jobs.length; i++) {
|
|
46
46
|
const job = jobs[i];
|
|
47
47
|
const preamble = `[recovery] Scheduled job ${job.scheduledId} for work ${job.workId}`;
|
|
@@ -68,7 +68,7 @@ export async function recoveryHandler(
|
|
|
68
68
|
const scheduled = await ctx.db.system.get(job.scheduledId);
|
|
69
69
|
if (scheduled === null) {
|
|
70
70
|
console.warn(`${preamble} not found in _scheduled_functions`);
|
|
71
|
-
|
|
71
|
+
completionJobs.push({
|
|
72
72
|
workId: job.workId,
|
|
73
73
|
runResult: { kind: "failed", error: `Scheduled job not found` },
|
|
74
74
|
attempt: job.attempt,
|
|
@@ -80,7 +80,7 @@ export async function recoveryHandler(
|
|
|
80
80
|
switch (scheduled.state.kind) {
|
|
81
81
|
case "failed": {
|
|
82
82
|
console.debug(`${preamble} failed and detected in recovery`);
|
|
83
|
-
|
|
83
|
+
completionJobs.push({
|
|
84
84
|
workId: job.workId,
|
|
85
85
|
runResult: scheduled.state,
|
|
86
86
|
attempt: job.attempt,
|
|
@@ -89,7 +89,7 @@ export async function recoveryHandler(
|
|
|
89
89
|
}
|
|
90
90
|
case "canceled": {
|
|
91
91
|
console.debug(`${preamble} was canceled and detected in recovery`);
|
|
92
|
-
|
|
92
|
+
completionJobs.push({
|
|
93
93
|
workId: job.workId,
|
|
94
94
|
runResult: { kind: "failed", error: "Canceled via scheduler" },
|
|
95
95
|
attempt: job.attempt,
|
|
@@ -98,7 +98,7 @@ export async function recoveryHandler(
|
|
|
98
98
|
}
|
|
99
99
|
}
|
|
100
100
|
}
|
|
101
|
-
if (
|
|
102
|
-
await completeHandler(ctx, { jobs:
|
|
101
|
+
if (completionJobs.length > 0) {
|
|
102
|
+
await completeHandler(ctx, { jobs: completionJobs });
|
|
103
103
|
}
|
|
104
104
|
}
|
package/src/component/stats.ts
CHANGED
|
@@ -38,11 +38,13 @@ export function recordStarted(
|
|
|
38
38
|
console: Logger,
|
|
39
39
|
work: Doc<"work">,
|
|
40
40
|
lagMs: number,
|
|
41
|
+
scheduledFunctionId: Id<"_scheduled_functions">,
|
|
41
42
|
) {
|
|
42
43
|
console.event("started", {
|
|
43
44
|
workId: work._id,
|
|
44
45
|
fnName: work.fnName,
|
|
45
46
|
enqueuedAt: work._creationTime,
|
|
47
|
+
scheduledFunctionId,
|
|
46
48
|
startedAt: Date.now(),
|
|
47
49
|
startLag: lagMs,
|
|
48
50
|
});
|
package/src/component/worker.ts
CHANGED
|
@@ -6,26 +6,40 @@
|
|
|
6
6
|
import type { FunctionHandle } from "convex/server";
|
|
7
7
|
import { v } from "convex/values";
|
|
8
8
|
import { internal } from "./_generated/api.js";
|
|
9
|
-
import {
|
|
9
|
+
import {
|
|
10
|
+
internalAction,
|
|
11
|
+
internalMutation,
|
|
12
|
+
internalQuery,
|
|
13
|
+
} from "./_generated/server.js";
|
|
10
14
|
import { createLogger, logLevel } from "./logging.js";
|
|
11
15
|
import type { RunResult } from "./shared.js";
|
|
16
|
+
import { assert } from "convex-helpers";
|
|
12
17
|
|
|
13
18
|
export const runMutationWrapper = internalMutation({
|
|
14
19
|
args: {
|
|
15
20
|
workId: v.id("work"),
|
|
16
21
|
fnHandle: v.string(),
|
|
17
|
-
|
|
22
|
+
payloadId: v.optional(v.id("payload")),
|
|
23
|
+
fnArgs: v.optional(v.record(v.string(), v.any())),
|
|
18
24
|
fnType: v.union(v.literal("query"), v.literal("mutation")),
|
|
19
25
|
logLevel,
|
|
20
26
|
attempt: v.number(),
|
|
21
27
|
},
|
|
22
28
|
handler: async (ctx, { workId, attempt, ...args }) => {
|
|
23
29
|
const console = createLogger(args.logLevel);
|
|
24
|
-
|
|
30
|
+
|
|
31
|
+
let fnArgs = args.fnArgs;
|
|
32
|
+
if (!fnArgs) {
|
|
33
|
+
assert(args.payloadId);
|
|
34
|
+
const payload = await ctx.db.get(args.payloadId);
|
|
35
|
+
assert(payload?.args);
|
|
36
|
+
fnArgs = payload.args;
|
|
37
|
+
}
|
|
38
|
+
|
|
25
39
|
try {
|
|
26
40
|
const returnValue = await (args.fnType === "query"
|
|
27
|
-
? ctx.runQuery(fnHandle as FunctionHandle<"query">,
|
|
28
|
-
: ctx.runMutation(fnHandle as FunctionHandle<"mutation">,
|
|
41
|
+
? ctx.runQuery(args.fnHandle as FunctionHandle<"query">, fnArgs)
|
|
42
|
+
: ctx.runMutation(args.fnHandle as FunctionHandle<"mutation">, fnArgs));
|
|
29
43
|
// NOTE: we could run the `saveResult` handler here, or call `ctx.runMutation`,
|
|
30
44
|
// but we want the mutation to be a separate transaction to reduce the window for OCCs.
|
|
31
45
|
await ctx.scheduler.runAfter(0, internal.complete.complete, {
|
|
@@ -54,15 +68,26 @@ export const runActionWrapper = internalAction({
|
|
|
54
68
|
args: {
|
|
55
69
|
workId: v.id("work"),
|
|
56
70
|
fnHandle: v.string(),
|
|
57
|
-
fnArgs: v.any(),
|
|
71
|
+
fnArgs: v.optional(v.record(v.string(), v.any())),
|
|
72
|
+
payloadId: v.optional(v.id("payload")),
|
|
58
73
|
logLevel,
|
|
59
74
|
attempt: v.number(),
|
|
60
75
|
},
|
|
61
76
|
handler: async (ctx, { workId, attempt, ...args }) => {
|
|
62
77
|
const console = createLogger(args.logLevel);
|
|
78
|
+
|
|
79
|
+
// Fetch args from payload if stored separately
|
|
80
|
+
let fnArgs = args.fnArgs;
|
|
81
|
+
if (fnArgs === undefined) {
|
|
82
|
+
assert(args.payloadId);
|
|
83
|
+
fnArgs = await ctx.runQuery(internal.worker.getWorkArgs, {
|
|
84
|
+
payloadId: args.payloadId,
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
|
|
63
88
|
const fnHandle = args.fnHandle as FunctionHandle<"action">;
|
|
64
89
|
try {
|
|
65
|
-
const returnValue = await ctx.runAction(fnHandle,
|
|
90
|
+
const returnValue = await ctx.runAction(fnHandle, fnArgs);
|
|
66
91
|
// NOTE: we could run `ctx.runMutation`, but we want to guarantee execution,
|
|
67
92
|
// and `ctx.scheduler.runAfter` won't OCC.
|
|
68
93
|
const runResult: RunResult = { kind: "success", returnValue };
|
|
@@ -80,5 +105,19 @@ export const runActionWrapper = internalAction({
|
|
|
80
105
|
},
|
|
81
106
|
});
|
|
82
107
|
|
|
108
|
+
// Helper mutation for actions to fetch work args
|
|
109
|
+
export const getWorkArgs = internalQuery({
|
|
110
|
+
args: {
|
|
111
|
+
payloadId: v.id("payload"),
|
|
112
|
+
},
|
|
113
|
+
returns: v.record(v.string(), v.any()),
|
|
114
|
+
handler: async (ctx, args) => {
|
|
115
|
+
const payload = await ctx.db.get("payload", args.payloadId);
|
|
116
|
+
assert(payload);
|
|
117
|
+
assert(payload.args);
|
|
118
|
+
return payload.args;
|
|
119
|
+
},
|
|
120
|
+
});
|
|
121
|
+
|
|
83
122
|
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
|
84
123
|
const console = "THIS IS A REMINDER TO USE createLogger";
|