@convex-dev/workpool 0.1.3-alpha.0 → 0.2.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +144 -4
- package/dist/commonjs/client/index.d.ts +123 -35
- package/dist/commonjs/client/index.d.ts.map +1 -1
- package/dist/commonjs/client/index.js +122 -15
- package/dist/commonjs/client/index.js.map +1 -1
- package/dist/commonjs/client/utils.d.ts +16 -0
- package/dist/commonjs/client/utils.d.ts.map +1 -0
- package/dist/commonjs/client/utils.js +2 -0
- package/dist/commonjs/client/utils.js.map +1 -0
- package/dist/commonjs/component/convex.config.d.ts.map +1 -1
- package/dist/commonjs/component/convex.config.js +0 -2
- package/dist/commonjs/component/convex.config.js.map +1 -1
- package/dist/commonjs/component/kick.d.ts +9 -0
- package/dist/commonjs/component/kick.d.ts.map +1 -0
- package/dist/commonjs/component/kick.js +97 -0
- package/dist/commonjs/component/kick.js.map +1 -0
- package/dist/commonjs/component/lib.d.ts +23 -32
- package/dist/commonjs/component/lib.d.ts.map +1 -1
- package/dist/commonjs/component/lib.js +70 -564
- package/dist/commonjs/component/lib.js.map +1 -1
- package/dist/commonjs/component/logging.d.ts +6 -4
- package/dist/commonjs/component/logging.d.ts.map +1 -1
- package/dist/commonjs/component/logging.js +13 -2
- package/dist/commonjs/component/logging.js.map +1 -1
- package/dist/commonjs/component/loop.d.ts +26 -0
- package/dist/commonjs/component/loop.d.ts.map +1 -0
- package/dist/commonjs/component/loop.js +453 -0
- package/dist/commonjs/component/loop.js.map +1 -0
- package/dist/commonjs/component/recovery.d.ts +8 -0
- package/dist/commonjs/component/recovery.d.ts.map +1 -0
- package/dist/commonjs/component/recovery.js +74 -0
- package/dist/commonjs/component/recovery.js.map +1 -0
- package/dist/commonjs/component/schema.d.ts +163 -93
- package/dist/commonjs/component/schema.d.ts.map +1 -1
- package/dist/commonjs/component/schema.js +54 -65
- package/dist/commonjs/component/schema.js.map +1 -1
- package/dist/commonjs/component/shared.d.ts +130 -0
- package/dist/commonjs/component/shared.d.ts.map +1 -0
- package/dist/commonjs/component/shared.js +65 -0
- package/dist/commonjs/component/shared.js.map +1 -0
- package/dist/commonjs/component/stats.d.ts +3 -2
- package/dist/commonjs/component/stats.d.ts.map +1 -1
- package/dist/commonjs/component/stats.js +17 -3
- package/dist/commonjs/component/stats.js.map +1 -1
- package/dist/commonjs/component/worker.d.ts +25 -0
- package/dist/commonjs/component/worker.d.ts.map +1 -0
- package/dist/commonjs/component/worker.js +86 -0
- package/dist/commonjs/component/worker.js.map +1 -0
- package/dist/esm/client/index.d.ts +123 -35
- package/dist/esm/client/index.d.ts.map +1 -1
- package/dist/esm/client/index.js +122 -15
- package/dist/esm/client/index.js.map +1 -1
- package/dist/esm/client/utils.d.ts +16 -0
- package/dist/esm/client/utils.d.ts.map +1 -0
- package/dist/esm/client/utils.js +2 -0
- package/dist/esm/client/utils.js.map +1 -0
- package/dist/esm/component/convex.config.d.ts.map +1 -1
- package/dist/esm/component/convex.config.js +0 -2
- package/dist/esm/component/convex.config.js.map +1 -1
- package/dist/esm/component/kick.d.ts +9 -0
- package/dist/esm/component/kick.d.ts.map +1 -0
- package/dist/esm/component/kick.js +97 -0
- package/dist/esm/component/kick.js.map +1 -0
- package/dist/esm/component/lib.d.ts +23 -32
- package/dist/esm/component/lib.d.ts.map +1 -1
- package/dist/esm/component/lib.js +70 -564
- package/dist/esm/component/lib.js.map +1 -1
- package/dist/esm/component/logging.d.ts +6 -4
- package/dist/esm/component/logging.d.ts.map +1 -1
- package/dist/esm/component/logging.js +13 -2
- package/dist/esm/component/logging.js.map +1 -1
- package/dist/esm/component/loop.d.ts +26 -0
- package/dist/esm/component/loop.d.ts.map +1 -0
- package/dist/esm/component/loop.js +453 -0
- package/dist/esm/component/loop.js.map +1 -0
- package/dist/esm/component/recovery.d.ts +8 -0
- package/dist/esm/component/recovery.d.ts.map +1 -0
- package/dist/esm/component/recovery.js +74 -0
- package/dist/esm/component/recovery.js.map +1 -0
- package/dist/esm/component/schema.d.ts +163 -93
- package/dist/esm/component/schema.d.ts.map +1 -1
- package/dist/esm/component/schema.js +54 -65
- package/dist/esm/component/schema.js.map +1 -1
- package/dist/esm/component/shared.d.ts +130 -0
- package/dist/esm/component/shared.d.ts.map +1 -0
- package/dist/esm/component/shared.js +65 -0
- package/dist/esm/component/shared.js.map +1 -0
- package/dist/esm/component/stats.d.ts +3 -2
- package/dist/esm/component/stats.d.ts.map +1 -1
- package/dist/esm/component/stats.js +17 -3
- package/dist/esm/component/stats.js.map +1 -1
- package/dist/esm/component/worker.d.ts +25 -0
- package/dist/esm/component/worker.d.ts.map +1 -0
- package/dist/esm/component/worker.js +86 -0
- package/dist/esm/component/worker.js.map +1 -0
- package/package.json +6 -5
- package/src/client/index.ts +231 -70
- package/src/client/utils.ts +45 -0
- package/src/component/README.md +73 -0
- package/src/component/_generated/api.d.ts +36 -66
- package/src/component/convex.config.ts +0 -3
- package/src/component/kick.test.ts +286 -0
- package/src/component/kick.ts +118 -0
- package/src/component/lib.test.ts +203 -0
- package/src/component/lib.ts +80 -671
- package/src/component/logging.ts +24 -10
- package/src/component/loop.ts +579 -0
- package/src/component/recovery.ts +79 -0
- package/src/component/schema.ts +59 -77
- package/src/component/setup.test.ts +5 -0
- package/src/component/shared.ts +127 -0
- package/src/component/stats.ts +20 -6
- package/src/component/worker.ts +94 -0
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
import { convexTest } from "convex-test";
|
|
2
|
+
import {
|
|
3
|
+
afterEach,
|
|
4
|
+
assert,
|
|
5
|
+
beforeEach,
|
|
6
|
+
describe,
|
|
7
|
+
expect,
|
|
8
|
+
test,
|
|
9
|
+
vi,
|
|
10
|
+
} from "vitest";
|
|
11
|
+
import schema from "./schema.js";
|
|
12
|
+
import { modules } from "./setup.test.js";
|
|
13
|
+
import { DEFAULT_MAX_PARALLELISM, kickMainLoop } from "./kick.js";
|
|
14
|
+
import { DEFAULT_LOG_LEVEL } from "./logging.js";
|
|
15
|
+
import { internal } from "./_generated/api";
|
|
16
|
+
import { toSegment, fromSegment, nextSegment } from "./shared";
|
|
17
|
+
import { Id } from "./_generated/dataModel.js";
|
|
18
|
+
|
|
19
|
+
describe("kickMainLoop", () => {
|
|
20
|
+
beforeEach(() => {
|
|
21
|
+
vi.useFakeTimers();
|
|
22
|
+
vi.setSystemTime(new Date(1765432101234)); // Set to a known time
|
|
23
|
+
});
|
|
24
|
+
afterEach(() => {
|
|
25
|
+
vi.useRealTimers();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test("ensures it creates globals on first call", async () => {
|
|
29
|
+
const t = convexTest(schema, modules);
|
|
30
|
+
await t.run(async (ctx) => {
|
|
31
|
+
await kickMainLoop(ctx, "enqueue");
|
|
32
|
+
const globals = await ctx.db.query("globals").unique();
|
|
33
|
+
expect(globals).not.toBeNull();
|
|
34
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
35
|
+
expect(runStatus).not.toBeNull();
|
|
36
|
+
assert(runStatus);
|
|
37
|
+
expect(runStatus.state.kind).toBe("running");
|
|
38
|
+
const internalState = await ctx.db.query("internalState").unique();
|
|
39
|
+
expect(internalState).not.toBeNull();
|
|
40
|
+
assert(internalState);
|
|
41
|
+
expect(internalState.generation).toBe(0n);
|
|
42
|
+
});
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
test("it updates the globals when they change", async () => {
|
|
46
|
+
const t = convexTest(schema, modules);
|
|
47
|
+
await t.run(async (ctx) => {
|
|
48
|
+
await kickMainLoop(ctx, "enqueue");
|
|
49
|
+
const globals = await ctx.db.query("globals").unique();
|
|
50
|
+
expect(globals).not.toBeNull();
|
|
51
|
+
assert(globals);
|
|
52
|
+
expect(globals.maxParallelism).toBe(DEFAULT_MAX_PARALLELISM);
|
|
53
|
+
expect(globals.logLevel).toBe(DEFAULT_LOG_LEVEL);
|
|
54
|
+
await kickMainLoop(ctx, "enqueue", {
|
|
55
|
+
maxParallelism: DEFAULT_MAX_PARALLELISM + 1,
|
|
56
|
+
logLevel: "DEBUG",
|
|
57
|
+
});
|
|
58
|
+
const after = await ctx.db.query("globals").unique();
|
|
59
|
+
expect(after).not.toBeNull();
|
|
60
|
+
assert(after);
|
|
61
|
+
expect(after.maxParallelism).toBe(DEFAULT_MAX_PARALLELISM + 1);
|
|
62
|
+
expect(after.logLevel).toBe("DEBUG");
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
test("does not kick when already running", async () => {
|
|
67
|
+
const t = convexTest(schema, modules);
|
|
68
|
+
await t.run(async (ctx) => {
|
|
69
|
+
// First kick to set up initial state
|
|
70
|
+
await kickMainLoop(ctx, "enqueue");
|
|
71
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
72
|
+
assert(runStatus);
|
|
73
|
+
expect(runStatus.state.kind).toBe("running");
|
|
74
|
+
|
|
75
|
+
// Second kick should not change state
|
|
76
|
+
await kickMainLoop(ctx, "enqueue");
|
|
77
|
+
const afterStatus = await ctx.db.query("runStatus").unique();
|
|
78
|
+
assert(afterStatus);
|
|
79
|
+
expect(afterStatus.state.kind).toBe("running");
|
|
80
|
+
expect(afterStatus._id).toBe(runStatus._id);
|
|
81
|
+
});
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
test("kicks when scheduled with later segment", async () => {
|
|
85
|
+
const t = convexTest(schema, modules);
|
|
86
|
+
await t.run(async (ctx) => {
|
|
87
|
+
// Set up initial scheduled state
|
|
88
|
+
await kickMainLoop(ctx, "enqueue");
|
|
89
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
90
|
+
assert(runStatus);
|
|
91
|
+
|
|
92
|
+
// Get current segment and schedule for future
|
|
93
|
+
const now = Date.now();
|
|
94
|
+
const futureTime = now + 10000; // 10 seconds in future
|
|
95
|
+
const futureSegment = toSegment(futureTime);
|
|
96
|
+
|
|
97
|
+
// Manually set to scheduled state with future segment
|
|
98
|
+
const scheduledId = await ctx.scheduler.runAfter(
|
|
99
|
+
fromSegment(futureSegment) - now,
|
|
100
|
+
internal.loop.main,
|
|
101
|
+
{
|
|
102
|
+
generation: 0n,
|
|
103
|
+
segment: futureSegment,
|
|
104
|
+
}
|
|
105
|
+
);
|
|
106
|
+
await ctx.db.patch(runStatus._id, {
|
|
107
|
+
state: {
|
|
108
|
+
kind: "scheduled",
|
|
109
|
+
scheduledId,
|
|
110
|
+
saturated: false,
|
|
111
|
+
generation: 0n,
|
|
112
|
+
segment: futureSegment,
|
|
113
|
+
},
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
// Kick should reschedule to run sooner
|
|
117
|
+
await kickMainLoop(ctx, "enqueue");
|
|
118
|
+
|
|
119
|
+
const afterStatus = await ctx.db.query("runStatus").unique();
|
|
120
|
+
assert(afterStatus);
|
|
121
|
+
expect(afterStatus.state.kind).toBe("running");
|
|
122
|
+
});
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
test("does not kick when scheduled and saturated", async () => {
|
|
126
|
+
const t = convexTest(schema, modules);
|
|
127
|
+
await t.run(async (ctx) => {
|
|
128
|
+
// Set up initial scheduled state
|
|
129
|
+
await kickMainLoop(ctx, "enqueue");
|
|
130
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
131
|
+
assert(runStatus);
|
|
132
|
+
|
|
133
|
+
// Get current segment
|
|
134
|
+
const now = Date.now();
|
|
135
|
+
const nearFutureTime = now + 1000; // 1 second in future
|
|
136
|
+
const nearFutureSegment = toSegment(nearFutureTime);
|
|
137
|
+
|
|
138
|
+
// Manually set to scheduled saturated state
|
|
139
|
+
const scheduledId = await ctx.scheduler.runAfter(
|
|
140
|
+
fromSegment(nearFutureSegment) - now,
|
|
141
|
+
internal.loop.main,
|
|
142
|
+
{
|
|
143
|
+
generation: 0n,
|
|
144
|
+
segment: nearFutureSegment,
|
|
145
|
+
}
|
|
146
|
+
);
|
|
147
|
+
await ctx.db.patch(runStatus._id, {
|
|
148
|
+
state: {
|
|
149
|
+
kind: "scheduled",
|
|
150
|
+
scheduledId,
|
|
151
|
+
saturated: true,
|
|
152
|
+
generation: 0n,
|
|
153
|
+
segment: nearFutureSegment,
|
|
154
|
+
},
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// Kick should not change state when saturated
|
|
158
|
+
await kickMainLoop(ctx, "enqueue");
|
|
159
|
+
const afterStatus = await ctx.db.query("runStatus").unique();
|
|
160
|
+
assert(afterStatus);
|
|
161
|
+
expect(afterStatus.state.kind).toBe("scheduled");
|
|
162
|
+
assert(afterStatus.state.kind === "scheduled");
|
|
163
|
+
expect(afterStatus.state.saturated).toBe(true);
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
test("recovers if runStatus is deleted but other state exists", async () => {
|
|
168
|
+
const t = convexTest(schema, modules);
|
|
169
|
+
await t.run(async (ctx) => {
|
|
170
|
+
// First create all state
|
|
171
|
+
await kickMainLoop(ctx, "enqueue");
|
|
172
|
+
|
|
173
|
+
// Delete runStatus
|
|
174
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
175
|
+
assert(runStatus);
|
|
176
|
+
await ctx.db.delete(runStatus._id);
|
|
177
|
+
|
|
178
|
+
// Kick should recreate runStatus
|
|
179
|
+
await kickMainLoop(ctx, "recovery");
|
|
180
|
+
const newRunStatus = await ctx.db.query("runStatus").unique();
|
|
181
|
+
expect(newRunStatus).not.toBeNull();
|
|
182
|
+
assert(newRunStatus);
|
|
183
|
+
expect(newRunStatus.state.kind).toBe("running");
|
|
184
|
+
});
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
test("recovers if globals is deleted but other state exists", async () => {
|
|
188
|
+
const t = convexTest(schema, modules);
|
|
189
|
+
await t.run(async (ctx) => {
|
|
190
|
+
// First create all state
|
|
191
|
+
await kickMainLoop(ctx, "enqueue");
|
|
192
|
+
|
|
193
|
+
// Delete globals
|
|
194
|
+
const globals = await ctx.db.query("globals").unique();
|
|
195
|
+
assert(globals);
|
|
196
|
+
await ctx.db.delete(globals._id);
|
|
197
|
+
|
|
198
|
+
// Kick should recreate globals
|
|
199
|
+
await kickMainLoop(ctx, "recovery");
|
|
200
|
+
const newGlobals = await ctx.db.query("globals").unique();
|
|
201
|
+
expect(newGlobals).not.toBeNull();
|
|
202
|
+
assert(newGlobals);
|
|
203
|
+
expect(newGlobals.maxParallelism).toBe(DEFAULT_MAX_PARALLELISM);
|
|
204
|
+
expect(newGlobals.logLevel).toBe(DEFAULT_LOG_LEVEL);
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
test("handles race conditions between multiple kicks", async () => {
|
|
209
|
+
const t = convexTest(schema, modules);
|
|
210
|
+
// Run kicks in separate transactions to simulate concurrent access
|
|
211
|
+
await Promise.all(
|
|
212
|
+
Array.from({ length: 10 }, () =>
|
|
213
|
+
t.run(async (ctx) => {
|
|
214
|
+
await kickMainLoop(ctx, "enqueue");
|
|
215
|
+
})
|
|
216
|
+
)
|
|
217
|
+
);
|
|
218
|
+
|
|
219
|
+
// Check final state in a new transaction
|
|
220
|
+
await t.run(async (ctx) => {
|
|
221
|
+
// Should end up with single consistent state
|
|
222
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
223
|
+
const internalState = await ctx.db.query("internalState").unique();
|
|
224
|
+
const globals = await ctx.db.query("globals").unique();
|
|
225
|
+
|
|
226
|
+
expect(runStatus).not.toBeNull();
|
|
227
|
+
expect(internalState).not.toBeNull();
|
|
228
|
+
expect(globals).not.toBeNull();
|
|
229
|
+
assert(runStatus);
|
|
230
|
+
assert(internalState);
|
|
231
|
+
assert(globals);
|
|
232
|
+
|
|
233
|
+
expect(runStatus.state.kind).toBe("running");
|
|
234
|
+
expect(internalState.generation).toBe(0n);
|
|
235
|
+
expect(globals.maxParallelism).toBe(DEFAULT_MAX_PARALLELISM);
|
|
236
|
+
});
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
test("preserves state between kicks with different sources", async () => {
|
|
240
|
+
const t = convexTest(schema, modules);
|
|
241
|
+
await t.run(async (ctx) => {
|
|
242
|
+
// Initial kick with custom config
|
|
243
|
+
await kickMainLoop(ctx, "enqueue", {
|
|
244
|
+
maxParallelism: 5,
|
|
245
|
+
logLevel: "DEBUG",
|
|
246
|
+
});
|
|
247
|
+
|
|
248
|
+
// Kick from different sources
|
|
249
|
+
await kickMainLoop(ctx, "cancel");
|
|
250
|
+
await kickMainLoop(ctx, "saveResult");
|
|
251
|
+
await kickMainLoop(ctx, "recovery");
|
|
252
|
+
|
|
253
|
+
// Config should be preserved
|
|
254
|
+
const globals = await ctx.db.query("globals").unique();
|
|
255
|
+
expect(globals).not.toBeNull();
|
|
256
|
+
assert(globals);
|
|
257
|
+
expect(globals.maxParallelism).toBe(5);
|
|
258
|
+
expect(globals.logLevel).toBe("DEBUG");
|
|
259
|
+
});
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
test("cancels and starts running when scheduled", async () => {
|
|
263
|
+
const t = convexTest(schema, modules);
|
|
264
|
+
await t.run(async (ctx) => {
|
|
265
|
+
await kickMainLoop(ctx, "enqueue");
|
|
266
|
+
const runStatus = await ctx.db.query("runStatus").unique();
|
|
267
|
+
assert(runStatus);
|
|
268
|
+
const segment = nextSegment() + 10n;
|
|
269
|
+
await ctx.db.patch(runStatus._id, {
|
|
270
|
+
state: {
|
|
271
|
+
generation: 0n,
|
|
272
|
+
saturated: false,
|
|
273
|
+
kind: "scheduled",
|
|
274
|
+
segment,
|
|
275
|
+
scheduledId: "" as Id<"_scheduled_functions">,
|
|
276
|
+
},
|
|
277
|
+
});
|
|
278
|
+
// await all scheduled functions to run
|
|
279
|
+
await kickMainLoop(ctx, "enqueue");
|
|
280
|
+
const afterStatus = await ctx.db.query("runStatus").unique();
|
|
281
|
+
assert(afterStatus);
|
|
282
|
+
expect(afterStatus.state.kind).toBe("running");
|
|
283
|
+
assert(afterStatus.state.kind === "running");
|
|
284
|
+
});
|
|
285
|
+
});
|
|
286
|
+
});
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { internal } from "./_generated/api.js";
|
|
2
|
+
import { internalMutation, MutationCtx } from "./_generated/server.js";
|
|
3
|
+
import { createLogger, DEFAULT_LOG_LEVEL } from "./logging.js";
|
|
4
|
+
import { INITIAL_STATE } from "./loop.js";
|
|
5
|
+
import { Config, nextSegment } from "./shared.js";
|
|
6
|
+
|
|
7
|
+
export const DEFAULT_MAX_PARALLELISM = 10;
|
|
8
|
+
/**
|
|
9
|
+
* Called from outside the loop:
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
export async function kickMainLoop(
|
|
13
|
+
ctx: MutationCtx,
|
|
14
|
+
source: "enqueue" | "cancel" | "saveResult" | "recovery",
|
|
15
|
+
config?: Partial<Config>
|
|
16
|
+
): Promise<void> {
|
|
17
|
+
const globals = await getOrUpdateGlobals(ctx, config);
|
|
18
|
+
const console = createLogger(globals.logLevel);
|
|
19
|
+
const runStatus = await getOrCreateRunStatus(ctx);
|
|
20
|
+
|
|
21
|
+
// Only kick to run now if we're scheduled or idle.
|
|
22
|
+
if (runStatus.state.kind === "running") {
|
|
23
|
+
console.debug(
|
|
24
|
+
`[${source}] main is actively running, so we don't need to kick it`
|
|
25
|
+
);
|
|
26
|
+
return;
|
|
27
|
+
}
|
|
28
|
+
const segment = nextSegment();
|
|
29
|
+
// main is scheduled to run later, so we should cancel it and reschedule.
|
|
30
|
+
if (runStatus.state.kind === "scheduled") {
|
|
31
|
+
if (source === "enqueue" && runStatus.state.saturated) {
|
|
32
|
+
console.debug(
|
|
33
|
+
`[${source}] main is saturated, so we don't need to kick it`
|
|
34
|
+
);
|
|
35
|
+
return;
|
|
36
|
+
}
|
|
37
|
+
if (runStatus.state.segment <= segment) {
|
|
38
|
+
console.debug(
|
|
39
|
+
`[${source}] main is scheduled to run soon enough, so we don't need to kick it`
|
|
40
|
+
);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
console.debug(
|
|
44
|
+
`[${source}] main is scheduled to run later, so reschedule it to run now`
|
|
45
|
+
);
|
|
46
|
+
const scheduled = await ctx.db.system.get(runStatus.state.scheduledId);
|
|
47
|
+
if (scheduled && scheduled.state.kind === "pending") {
|
|
48
|
+
await ctx.scheduler.cancel(runStatus.state.scheduledId);
|
|
49
|
+
} else {
|
|
50
|
+
console.warn(
|
|
51
|
+
`[${source}] main is marked as scheduled, but it's status is ${scheduled?.state.kind}`
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
console.debug(
|
|
56
|
+
`[${source}] main was scheduled later, so reschedule it to run now`
|
|
57
|
+
);
|
|
58
|
+
await ctx.db.patch(runStatus._id, { state: { kind: "running" } });
|
|
59
|
+
await ctx.scheduler.runAfter(0, internal.loop.main, {
|
|
60
|
+
generation: runStatus.state.generation,
|
|
61
|
+
segment,
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export const forceKick = internalMutation({
|
|
66
|
+
args: {},
|
|
67
|
+
handler: async (ctx) => {
|
|
68
|
+
const runStatus = await getOrCreateRunStatus(ctx);
|
|
69
|
+
await ctx.db.delete(runStatus._id);
|
|
70
|
+
await kickMainLoop(ctx, "recovery");
|
|
71
|
+
},
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
async function getOrCreateRunStatus(ctx: MutationCtx) {
|
|
75
|
+
let runStatus = await ctx.db.query("runStatus").unique();
|
|
76
|
+
if (!runStatus) {
|
|
77
|
+
const state = await ctx.db.query("internalState").unique();
|
|
78
|
+
const id = await ctx.db.insert("runStatus", {
|
|
79
|
+
state: {
|
|
80
|
+
kind: "idle",
|
|
81
|
+
generation: state?.generation ?? INITIAL_STATE.generation,
|
|
82
|
+
},
|
|
83
|
+
});
|
|
84
|
+
runStatus = (await ctx.db.get(id))!;
|
|
85
|
+
if (!state) {
|
|
86
|
+
await ctx.db.insert("internalState", INITIAL_STATE);
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return runStatus;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async function getOrUpdateGlobals(ctx: MutationCtx, config?: Partial<Config>) {
|
|
93
|
+
const globals = await ctx.db.query("globals").unique();
|
|
94
|
+
if (!globals) {
|
|
95
|
+
const id = await ctx.db.insert("globals", {
|
|
96
|
+
maxParallelism: config?.maxParallelism ?? DEFAULT_MAX_PARALLELISM,
|
|
97
|
+
logLevel: config?.logLevel ?? DEFAULT_LOG_LEVEL,
|
|
98
|
+
});
|
|
99
|
+
return (await ctx.db.get(id))!;
|
|
100
|
+
} else if (config) {
|
|
101
|
+
let updated = false;
|
|
102
|
+
if (
|
|
103
|
+
config.maxParallelism &&
|
|
104
|
+
config.maxParallelism !== globals.maxParallelism
|
|
105
|
+
) {
|
|
106
|
+
globals.maxParallelism = config.maxParallelism;
|
|
107
|
+
updated = true;
|
|
108
|
+
}
|
|
109
|
+
if (config.logLevel && config.logLevel !== globals.logLevel) {
|
|
110
|
+
globals.logLevel = config.logLevel;
|
|
111
|
+
updated = true;
|
|
112
|
+
}
|
|
113
|
+
if (updated) {
|
|
114
|
+
await ctx.db.replace(globals._id, globals);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return globals;
|
|
118
|
+
}
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
import { convexTest } from "convex-test";
|
|
2
|
+
import { describe, expect, it, beforeEach, afterEach, vi } from "vitest";
|
|
3
|
+
import { Id } from "./_generated/dataModel";
|
|
4
|
+
import schema from "./schema";
|
|
5
|
+
import { api } from "./_generated/api";
|
|
6
|
+
|
|
7
|
+
const modules = import.meta.glob("./**/*.ts");
|
|
8
|
+
|
|
9
|
+
// Mock Id type
|
|
10
|
+
type WorkId = Id<"work">;
|
|
11
|
+
|
|
12
|
+
describe("lib", () => {
|
|
13
|
+
async function setupTest() {
|
|
14
|
+
const t = convexTest(schema, modules);
|
|
15
|
+
return t;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
let t: Awaited<ReturnType<typeof setupTest>>;
|
|
19
|
+
|
|
20
|
+
beforeEach(async () => {
|
|
21
|
+
vi.useFakeTimers();
|
|
22
|
+
t = await setupTest();
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
afterEach(() => {
|
|
26
|
+
vi.useRealTimers();
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
describe("enqueue", () => {
|
|
30
|
+
it("should successfully enqueue a work item", async () => {
|
|
31
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
32
|
+
fnHandle: "testHandle",
|
|
33
|
+
fnName: "testFunction",
|
|
34
|
+
fnArgs: { test: true },
|
|
35
|
+
fnType: "mutation",
|
|
36
|
+
runAt: Date.now(),
|
|
37
|
+
config: {
|
|
38
|
+
maxParallelism: 10,
|
|
39
|
+
logLevel: "INFO",
|
|
40
|
+
},
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
expect(id).toBeDefined();
|
|
44
|
+
const status = await t.query(api.lib.status, { id });
|
|
45
|
+
expect(status).toEqual({ state: "pending", attempt: 0 });
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it("should throw error if maxParallelism is too high", async () => {
|
|
49
|
+
await expect(
|
|
50
|
+
t.mutation(api.lib.enqueue, {
|
|
51
|
+
fnHandle: "testHandle",
|
|
52
|
+
fnName: "testFunction",
|
|
53
|
+
fnArgs: { test: true },
|
|
54
|
+
fnType: "mutation",
|
|
55
|
+
runAt: Date.now(),
|
|
56
|
+
config: {
|
|
57
|
+
maxParallelism: 101, // More than MAX_POSSIBLE_PARALLELISM
|
|
58
|
+
logLevel: "INFO",
|
|
59
|
+
},
|
|
60
|
+
})
|
|
61
|
+
).rejects.toThrow("maxParallelism must be <= 100");
|
|
62
|
+
});
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
describe("cancel", () => {
|
|
66
|
+
it("should successfully queue a work item for cancelation", async () => {
|
|
67
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
68
|
+
fnHandle: "testHandle",
|
|
69
|
+
fnName: "testFunction",
|
|
70
|
+
fnArgs: { test: true },
|
|
71
|
+
fnType: "mutation",
|
|
72
|
+
runAt: Date.now(),
|
|
73
|
+
config: {
|
|
74
|
+
maxParallelism: 10,
|
|
75
|
+
logLevel: "INFO",
|
|
76
|
+
},
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
await t.mutation(api.lib.cancel, {
|
|
80
|
+
id,
|
|
81
|
+
logLevel: "INFO",
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// Verify a pending cancelation was created
|
|
85
|
+
await t.run(async (ctx) => {
|
|
86
|
+
const pendingCancelations = await ctx.db
|
|
87
|
+
.query("pendingCancelation")
|
|
88
|
+
.collect();
|
|
89
|
+
expect(pendingCancelations).toHaveLength(1);
|
|
90
|
+
expect(pendingCancelations[0].workId).toBe(id);
|
|
91
|
+
});
|
|
92
|
+
});
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
describe("cancelAll", () => {
|
|
96
|
+
it("should queue multiple work items for cancelation", async () => {
|
|
97
|
+
const ids: WorkId[] = [];
|
|
98
|
+
for (let i = 0; i < 3; i++) {
|
|
99
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
100
|
+
fnHandle: "testHandle",
|
|
101
|
+
fnName: "testFunction",
|
|
102
|
+
fnArgs: { test: i },
|
|
103
|
+
fnType: "mutation",
|
|
104
|
+
runAt: Date.now(),
|
|
105
|
+
config: {
|
|
106
|
+
maxParallelism: 10,
|
|
107
|
+
logLevel: "INFO",
|
|
108
|
+
},
|
|
109
|
+
});
|
|
110
|
+
ids.push(id);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
await t.mutation(api.lib.cancelAll, {
|
|
114
|
+
logLevel: "INFO",
|
|
115
|
+
before: Date.now() + 1000,
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
// Verify pending cancelations were created
|
|
119
|
+
await t.run(async (ctx) => {
|
|
120
|
+
const pendingCancelations = await ctx.db
|
|
121
|
+
.query("pendingCancelation")
|
|
122
|
+
.collect();
|
|
123
|
+
expect(pendingCancelations).toHaveLength(3);
|
|
124
|
+
const canceledIds = pendingCancelations.map((pc) => pc.workId);
|
|
125
|
+
expect(canceledIds).toEqual(expect.arrayContaining(ids));
|
|
126
|
+
});
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
describe("status", () => {
|
|
131
|
+
it("should return finished state for non-existent work", async () => {
|
|
132
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
133
|
+
fnHandle: "testHandle",
|
|
134
|
+
fnName: "testFunction",
|
|
135
|
+
fnArgs: { test: true },
|
|
136
|
+
fnType: "mutation",
|
|
137
|
+
runAt: Date.now(),
|
|
138
|
+
config: {
|
|
139
|
+
maxParallelism: 10,
|
|
140
|
+
logLevel: "INFO",
|
|
141
|
+
},
|
|
142
|
+
});
|
|
143
|
+
await t.run(async (ctx) => {
|
|
144
|
+
await ctx.db.delete(id);
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
const status = await t.query(api.lib.status, { id });
|
|
148
|
+
expect(status).toEqual({ state: "finished" });
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
it("should return pending state for newly enqueued work", async () => {
|
|
152
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
153
|
+
fnHandle: "testHandle",
|
|
154
|
+
fnName: "testFunction",
|
|
155
|
+
fnArgs: { test: true },
|
|
156
|
+
fnType: "mutation",
|
|
157
|
+
runAt: Date.now(),
|
|
158
|
+
config: {
|
|
159
|
+
maxParallelism: 10,
|
|
160
|
+
logLevel: "INFO",
|
|
161
|
+
},
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
// Verify work item and pending start were created
|
|
165
|
+
await t.run(async (ctx) => {
|
|
166
|
+
const work = await ctx.db.get(id);
|
|
167
|
+
expect(work).toBeDefined();
|
|
168
|
+
const pendingStarts = await ctx.db.query("pendingStart").collect();
|
|
169
|
+
expect(pendingStarts).toHaveLength(1);
|
|
170
|
+
expect(pendingStarts[0].workId).toBe(id);
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
const status = await t.query(api.lib.status, { id });
|
|
174
|
+
expect(status).toEqual({ state: "pending", attempt: 0 });
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
it("should return running state when work is in progress", async () => {
|
|
178
|
+
const id = await t.mutation(api.lib.enqueue, {
|
|
179
|
+
fnHandle: "testHandle",
|
|
180
|
+
fnName: "testFunction",
|
|
181
|
+
fnArgs: { test: true },
|
|
182
|
+
fnType: "mutation",
|
|
183
|
+
runAt: Date.now(),
|
|
184
|
+
config: {
|
|
185
|
+
maxParallelism: 10,
|
|
186
|
+
logLevel: "INFO",
|
|
187
|
+
},
|
|
188
|
+
});
|
|
189
|
+
|
|
190
|
+
// Delete the pendingStart to simulate work in progress
|
|
191
|
+
await t.run(async (ctx) => {
|
|
192
|
+
const pendingStart = await ctx.db.query("pendingStart").first();
|
|
193
|
+
expect(pendingStart).toBeDefined();
|
|
194
|
+
if (pendingStart) {
|
|
195
|
+
await ctx.db.delete(pendingStart._id);
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
|
|
199
|
+
const status = await t.query(api.lib.status, { id });
|
|
200
|
+
expect(status).toEqual({ state: "running", attempt: 0 });
|
|
201
|
+
});
|
|
202
|
+
});
|
|
203
|
+
});
|