@rivetkit/workflow-engine 0.0.0-pr.4600.32b0fc8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/schemas/v1.ts +781 -0
- package/dist/tsup/chunk-4SWXLWKL.cjs +4180 -0
- package/dist/tsup/chunk-4SWXLWKL.cjs.map +1 -0
- package/dist/tsup/chunk-UMFB2AR3.js +4180 -0
- package/dist/tsup/chunk-UMFB2AR3.js.map +1 -0
- package/dist/tsup/index.cjs +93 -0
- package/dist/tsup/index.cjs.map +1 -0
- package/dist/tsup/index.d.cts +1009 -0
- package/dist/tsup/index.d.ts +1009 -0
- package/dist/tsup/index.js +93 -0
- package/dist/tsup/index.js.map +1 -0
- package/dist/tsup/testing.cjs +328 -0
- package/dist/tsup/testing.cjs.map +1 -0
- package/dist/tsup/testing.d.cts +53 -0
- package/dist/tsup/testing.d.ts +53 -0
- package/dist/tsup/testing.js +328 -0
- package/dist/tsup/testing.js.map +1 -0
- package/package.json +70 -0
- package/schemas/serde.ts +609 -0
- package/schemas/v1.bare +203 -0
- package/schemas/versioned.ts +107 -0
- package/src/context.ts +2585 -0
- package/src/driver.ts +108 -0
- package/src/error-utils.ts +87 -0
- package/src/errors.ts +171 -0
- package/src/index.ts +1075 -0
- package/src/keys.ts +303 -0
- package/src/location.ts +171 -0
- package/src/storage.ts +417 -0
- package/src/testing.ts +313 -0
- package/src/types.ts +623 -0
- package/src/utils.ts +48 -0
package/src/storage.ts
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
import {
|
|
2
|
+
deserializeEntry,
|
|
3
|
+
deserializeEntryMetadata,
|
|
4
|
+
deserializeName,
|
|
5
|
+
deserializeWorkflowError,
|
|
6
|
+
deserializeWorkflowOutput,
|
|
7
|
+
deserializeWorkflowState,
|
|
8
|
+
serializeEntry,
|
|
9
|
+
serializeEntryMetadata,
|
|
10
|
+
serializeName,
|
|
11
|
+
serializeWorkflowError,
|
|
12
|
+
serializeWorkflowOutput,
|
|
13
|
+
serializeWorkflowState,
|
|
14
|
+
} from "../schemas/serde.js";
|
|
15
|
+
import type { EngineDriver, KVWrite } from "./driver.js";
|
|
16
|
+
import {
|
|
17
|
+
buildEntryMetadataKey,
|
|
18
|
+
buildEntryMetadataPrefix,
|
|
19
|
+
buildHistoryKey,
|
|
20
|
+
buildHistoryPrefix,
|
|
21
|
+
buildHistoryPrefixAll,
|
|
22
|
+
buildNameKey,
|
|
23
|
+
buildNamePrefix,
|
|
24
|
+
buildWorkflowErrorKey,
|
|
25
|
+
buildWorkflowOutputKey,
|
|
26
|
+
buildWorkflowStateKey,
|
|
27
|
+
compareKeys,
|
|
28
|
+
parseEntryMetadataKey,
|
|
29
|
+
parseNameKey,
|
|
30
|
+
} from "./keys.js";
|
|
31
|
+
import { isLocationPrefix, locationToKey } from "./location.js";
|
|
32
|
+
import type {
|
|
33
|
+
Entry,
|
|
34
|
+
EntryKind,
|
|
35
|
+
EntryMetadata,
|
|
36
|
+
Location,
|
|
37
|
+
Storage,
|
|
38
|
+
WorkflowEntryMetadataSnapshot,
|
|
39
|
+
WorkflowHistoryEntry,
|
|
40
|
+
WorkflowHistorySnapshot,
|
|
41
|
+
} from "./types.js";
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* Create an empty storage instance.
|
|
45
|
+
*/
|
|
46
|
+
export function createStorage(): Storage {
|
|
47
|
+
return {
|
|
48
|
+
nameRegistry: [],
|
|
49
|
+
flushedNameCount: 0,
|
|
50
|
+
history: { entries: new Map() },
|
|
51
|
+
entryMetadata: new Map(),
|
|
52
|
+
output: undefined,
|
|
53
|
+
state: "pending",
|
|
54
|
+
flushedState: undefined,
|
|
55
|
+
error: undefined,
|
|
56
|
+
flushedError: undefined,
|
|
57
|
+
flushedOutput: undefined,
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Create a snapshot of workflow history for observers.
|
|
63
|
+
*/
|
|
64
|
+
export function createHistorySnapshot(
|
|
65
|
+
storage: Storage,
|
|
66
|
+
): WorkflowHistorySnapshot {
|
|
67
|
+
const entryMetadata = new Map<string, WorkflowEntryMetadataSnapshot>();
|
|
68
|
+
for (const [id, metadata] of storage.entryMetadata) {
|
|
69
|
+
const { dirty, ...rest } = metadata;
|
|
70
|
+
entryMetadata.set(id, rest);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const entries: WorkflowHistoryEntry[] = [];
|
|
74
|
+
const entryKeys = Array.from(storage.history.entries.keys()).sort();
|
|
75
|
+
for (const key of entryKeys) {
|
|
76
|
+
const entry = storage.history.entries.get(key);
|
|
77
|
+
if (!entry) continue;
|
|
78
|
+
const { dirty, ...rest } = entry;
|
|
79
|
+
entries.push(rest);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
return {
|
|
83
|
+
nameRegistry: [...storage.nameRegistry],
|
|
84
|
+
entries,
|
|
85
|
+
entryMetadata,
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
/**
|
|
90
|
+
* Generate a UUID v4.
|
|
91
|
+
*/
|
|
92
|
+
export function generateId(): string {
|
|
93
|
+
return crypto.randomUUID();
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
/**
|
|
97
|
+
* Create a new entry.
|
|
98
|
+
*/
|
|
99
|
+
export function createEntry(location: Location, kind: EntryKind): Entry {
|
|
100
|
+
return {
|
|
101
|
+
id: generateId(),
|
|
102
|
+
location,
|
|
103
|
+
kind,
|
|
104
|
+
dirty: true,
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Create or get metadata for an entry.
|
|
110
|
+
*/
|
|
111
|
+
export function getOrCreateMetadata(
|
|
112
|
+
storage: Storage,
|
|
113
|
+
entryId: string,
|
|
114
|
+
): EntryMetadata {
|
|
115
|
+
let metadata = storage.entryMetadata.get(entryId);
|
|
116
|
+
if (!metadata) {
|
|
117
|
+
metadata = {
|
|
118
|
+
status: "pending",
|
|
119
|
+
attempts: 0,
|
|
120
|
+
lastAttemptAt: 0,
|
|
121
|
+
createdAt: Date.now(),
|
|
122
|
+
rollbackCompletedAt: undefined,
|
|
123
|
+
rollbackError: undefined,
|
|
124
|
+
dirty: true,
|
|
125
|
+
};
|
|
126
|
+
storage.entryMetadata.set(entryId, metadata);
|
|
127
|
+
}
|
|
128
|
+
return metadata;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
/**
|
|
132
|
+
* Load storage from the driver.
|
|
133
|
+
*/
|
|
134
|
+
export async function loadStorage(driver: EngineDriver): Promise<Storage> {
|
|
135
|
+
const storage = createStorage();
|
|
136
|
+
|
|
137
|
+
// Load name registry
|
|
138
|
+
const nameEntries = await driver.list(buildNamePrefix());
|
|
139
|
+
// Sort by index to ensure correct order
|
|
140
|
+
nameEntries.sort((a, b) => compareKeys(a.key, b.key));
|
|
141
|
+
for (const entry of nameEntries) {
|
|
142
|
+
const index = parseNameKey(entry.key);
|
|
143
|
+
storage.nameRegistry[index] = deserializeName(entry.value);
|
|
144
|
+
}
|
|
145
|
+
// Track how many names are already persisted
|
|
146
|
+
storage.flushedNameCount = storage.nameRegistry.length;
|
|
147
|
+
|
|
148
|
+
// Load history entries
|
|
149
|
+
const historyEntries = await driver.list(buildHistoryPrefixAll());
|
|
150
|
+
for (const entry of historyEntries) {
|
|
151
|
+
const parsed = deserializeEntry(entry.value);
|
|
152
|
+
parsed.dirty = false;
|
|
153
|
+
// Use locationToKey to match how context.ts looks up entries
|
|
154
|
+
const key = locationToKey(storage, parsed.location);
|
|
155
|
+
storage.history.entries.set(key, parsed);
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// Load entry metadata so observers can reconstruct workflow state after
|
|
159
|
+
// the actor wakes and rebuilds storage from persisted history.
|
|
160
|
+
const metadataEntries = await driver.list(buildEntryMetadataPrefix());
|
|
161
|
+
for (const entry of metadataEntries) {
|
|
162
|
+
const entryId = parseEntryMetadataKey(entry.key);
|
|
163
|
+
const metadata = deserializeEntryMetadata(entry.value);
|
|
164
|
+
metadata.dirty = false;
|
|
165
|
+
storage.entryMetadata.set(entryId, metadata);
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
// Load workflow state
|
|
169
|
+
const stateValue = await driver.get(buildWorkflowStateKey());
|
|
170
|
+
if (stateValue) {
|
|
171
|
+
storage.state = deserializeWorkflowState(stateValue);
|
|
172
|
+
storage.flushedState = storage.state;
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Load output if present
|
|
176
|
+
const outputValue = await driver.get(buildWorkflowOutputKey());
|
|
177
|
+
if (outputValue) {
|
|
178
|
+
storage.output = deserializeWorkflowOutput(outputValue);
|
|
179
|
+
storage.flushedOutput = storage.output;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
// Load error if present
|
|
183
|
+
const errorValue = await driver.get(buildWorkflowErrorKey());
|
|
184
|
+
if (errorValue) {
|
|
185
|
+
storage.error = deserializeWorkflowError(errorValue);
|
|
186
|
+
storage.flushedError = storage.error;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return storage;
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
/**
|
|
193
|
+
* Load metadata for an entry (lazy loading).
|
|
194
|
+
*/
|
|
195
|
+
export async function loadMetadata(
|
|
196
|
+
storage: Storage,
|
|
197
|
+
driver: EngineDriver,
|
|
198
|
+
entryId: string,
|
|
199
|
+
): Promise<EntryMetadata> {
|
|
200
|
+
// Check if already loaded
|
|
201
|
+
const existing = storage.entryMetadata.get(entryId);
|
|
202
|
+
if (existing) {
|
|
203
|
+
return existing;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Load from driver
|
|
207
|
+
const value = await driver.get(buildEntryMetadataKey(entryId));
|
|
208
|
+
if (value) {
|
|
209
|
+
const metadata = deserializeEntryMetadata(value);
|
|
210
|
+
metadata.dirty = false;
|
|
211
|
+
storage.entryMetadata.set(entryId, metadata);
|
|
212
|
+
return metadata;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Create new metadata
|
|
216
|
+
return getOrCreateMetadata(storage, entryId);
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
/**
|
|
220
|
+
* Pending deletions collected by collectLoopPruning to be included
|
|
221
|
+
* in the next flush alongside the state write.
|
|
222
|
+
*/
|
|
223
|
+
export interface PendingDeletions {
|
|
224
|
+
prefixes: Uint8Array[];
|
|
225
|
+
keys: Uint8Array[];
|
|
226
|
+
ranges: { start: Uint8Array; end: Uint8Array }[];
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Flush all dirty data to the driver. Optionally includes pending
|
|
231
|
+
* deletions so that history pruning happens alongside the
|
|
232
|
+
* state write.
|
|
233
|
+
*/
|
|
234
|
+
export async function flush(
|
|
235
|
+
storage: Storage,
|
|
236
|
+
driver: EngineDriver,
|
|
237
|
+
onHistoryUpdated?: () => void,
|
|
238
|
+
pendingDeletions?: PendingDeletions,
|
|
239
|
+
): Promise<void> {
|
|
240
|
+
const writes: KVWrite[] = [];
|
|
241
|
+
let historyUpdated = false;
|
|
242
|
+
|
|
243
|
+
// Flush only new names (those added since last flush)
|
|
244
|
+
for (
|
|
245
|
+
let i = storage.flushedNameCount;
|
|
246
|
+
i < storage.nameRegistry.length;
|
|
247
|
+
i++
|
|
248
|
+
) {
|
|
249
|
+
const name = storage.nameRegistry[i];
|
|
250
|
+
if (name !== undefined) {
|
|
251
|
+
writes.push({
|
|
252
|
+
key: buildNameKey(i),
|
|
253
|
+
value: serializeName(name),
|
|
254
|
+
});
|
|
255
|
+
historyUpdated = true;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
// Flush dirty entries
|
|
260
|
+
for (const [, entry] of storage.history.entries) {
|
|
261
|
+
if (entry.dirty) {
|
|
262
|
+
writes.push({
|
|
263
|
+
key: buildHistoryKey(entry.location),
|
|
264
|
+
value: serializeEntry(entry),
|
|
265
|
+
});
|
|
266
|
+
entry.dirty = false;
|
|
267
|
+
historyUpdated = true;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Flush dirty metadata
|
|
272
|
+
for (const [id, metadata] of storage.entryMetadata) {
|
|
273
|
+
if (metadata.dirty) {
|
|
274
|
+
writes.push({
|
|
275
|
+
key: buildEntryMetadataKey(id),
|
|
276
|
+
value: serializeEntryMetadata(metadata),
|
|
277
|
+
});
|
|
278
|
+
metadata.dirty = false;
|
|
279
|
+
historyUpdated = true;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
// Flush workflow state if changed
|
|
284
|
+
if (storage.state !== storage.flushedState) {
|
|
285
|
+
writes.push({
|
|
286
|
+
key: buildWorkflowStateKey(),
|
|
287
|
+
value: serializeWorkflowState(storage.state),
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// Flush output if changed
|
|
292
|
+
if (
|
|
293
|
+
storage.output !== undefined &&
|
|
294
|
+
storage.output !== storage.flushedOutput
|
|
295
|
+
) {
|
|
296
|
+
writes.push({
|
|
297
|
+
key: buildWorkflowOutputKey(),
|
|
298
|
+
value: serializeWorkflowOutput(storage.output),
|
|
299
|
+
});
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
// Flush error if changed (compare by message since objects aren't reference-equal)
|
|
303
|
+
const errorChanged =
|
|
304
|
+
storage.error !== undefined &&
|
|
305
|
+
(storage.flushedError === undefined ||
|
|
306
|
+
storage.error.name !== storage.flushedError.name ||
|
|
307
|
+
storage.error.message !== storage.flushedError.message);
|
|
308
|
+
if (errorChanged) {
|
|
309
|
+
writes.push({
|
|
310
|
+
key: buildWorkflowErrorKey(),
|
|
311
|
+
value: serializeWorkflowError(storage.error!),
|
|
312
|
+
});
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
if (writes.length > 0) {
|
|
316
|
+
await driver.batch(writes);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
// Apply pending deletions after the batch write. These are collected
|
|
320
|
+
// by collectLoopPruning so pruning happens alongside the state write.
|
|
321
|
+
if (pendingDeletions) {
|
|
322
|
+
const deleteOps: Promise<void>[] = [];
|
|
323
|
+
for (const prefix of pendingDeletions.prefixes) {
|
|
324
|
+
deleteOps.push(driver.deletePrefix(prefix));
|
|
325
|
+
}
|
|
326
|
+
for (const range of pendingDeletions.ranges) {
|
|
327
|
+
deleteOps.push(driver.deleteRange(range.start, range.end));
|
|
328
|
+
}
|
|
329
|
+
for (const key of pendingDeletions.keys) {
|
|
330
|
+
deleteOps.push(driver.delete(key));
|
|
331
|
+
}
|
|
332
|
+
if (deleteOps.length > 0) {
|
|
333
|
+
await Promise.all(deleteOps);
|
|
334
|
+
historyUpdated = true;
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
// Update flushed tracking after successful write
|
|
339
|
+
storage.flushedNameCount = storage.nameRegistry.length;
|
|
340
|
+
storage.flushedState = storage.state;
|
|
341
|
+
storage.flushedOutput = storage.output;
|
|
342
|
+
storage.flushedError = storage.error;
|
|
343
|
+
|
|
344
|
+
if (historyUpdated && onHistoryUpdated) {
|
|
345
|
+
onHistoryUpdated();
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
/**
|
|
350
|
+
* Delete entries with a given location prefix (used for loop forgetting).
|
|
351
|
+
* Also cleans up associated metadata from both memory and driver.
|
|
352
|
+
*/
|
|
353
|
+
export async function deleteEntriesWithPrefix(
|
|
354
|
+
storage: Storage,
|
|
355
|
+
driver: EngineDriver,
|
|
356
|
+
prefixLocation: Location,
|
|
357
|
+
onHistoryUpdated?: () => void,
|
|
358
|
+
): Promise<void> {
|
|
359
|
+
const deletions = collectDeletionsForPrefix(storage, prefixLocation);
|
|
360
|
+
|
|
361
|
+
// Apply deletions to driver
|
|
362
|
+
await driver.deletePrefix(deletions.prefixes[0]!);
|
|
363
|
+
await Promise.all(deletions.keys.map((key) => driver.delete(key)));
|
|
364
|
+
|
|
365
|
+
if (deletions.keys.length > 0 && onHistoryUpdated) {
|
|
366
|
+
onHistoryUpdated();
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
|
|
370
|
+
/**
|
|
371
|
+
* Remove entries matching a location prefix from memory and collect
|
|
372
|
+
* the driver-level deletion operations. The returned PendingDeletions
|
|
373
|
+
* can be applied immediately or batched with a flush.
|
|
374
|
+
*/
|
|
375
|
+
export function collectDeletionsForPrefix(
|
|
376
|
+
storage: Storage,
|
|
377
|
+
prefixLocation: Location,
|
|
378
|
+
): PendingDeletions {
|
|
379
|
+
const pending: PendingDeletions = {
|
|
380
|
+
prefixes: [buildHistoryPrefix(prefixLocation)],
|
|
381
|
+
keys: [],
|
|
382
|
+
ranges: [],
|
|
383
|
+
};
|
|
384
|
+
|
|
385
|
+
for (const [key, entry] of storage.history.entries) {
|
|
386
|
+
if (isLocationPrefix(prefixLocation, entry.location)) {
|
|
387
|
+
pending.keys.push(buildEntryMetadataKey(entry.id));
|
|
388
|
+
storage.entryMetadata.delete(entry.id);
|
|
389
|
+
storage.history.entries.delete(key);
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
return pending;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
/**
|
|
397
|
+
* Get an entry by location.
|
|
398
|
+
*/
|
|
399
|
+
export function getEntry(
|
|
400
|
+
storage: Storage,
|
|
401
|
+
location: Location,
|
|
402
|
+
): Entry | undefined {
|
|
403
|
+
const key = locationToKey(storage, location);
|
|
404
|
+
return storage.history.entries.get(key);
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
/**
|
|
408
|
+
* Set an entry by location.
|
|
409
|
+
*/
|
|
410
|
+
export function setEntry(
|
|
411
|
+
storage: Storage,
|
|
412
|
+
location: Location,
|
|
413
|
+
entry: Entry,
|
|
414
|
+
): void {
|
|
415
|
+
const key = locationToKey(storage, location);
|
|
416
|
+
storage.history.entries.set(key, entry);
|
|
417
|
+
}
|
package/src/testing.ts
ADDED
|
@@ -0,0 +1,313 @@
|
|
|
1
|
+
import type { EngineDriver, KVEntry, KVWrite } from "./driver.js";
|
|
2
|
+
import { EvictedError } from "./errors.js";
|
|
3
|
+
import { compareKeys, keyStartsWith, keyToHex } from "./keys.js";
|
|
4
|
+
import type { Message, WorkflowMessageDriver } from "./types.js";
|
|
5
|
+
import { sleep } from "./utils.js";
|
|
6
|
+
|
|
7
|
+
interface Waiter {
|
|
8
|
+
nameSet?: Set<string>;
|
|
9
|
+
resolve: () => void;
|
|
10
|
+
reject: (error: Error) => void;
|
|
11
|
+
abortSignal: AbortSignal;
|
|
12
|
+
onAbort: () => void;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
class InMemoryWorkflowMessageDriver implements WorkflowMessageDriver {
|
|
16
|
+
#messages: Message[] = [];
|
|
17
|
+
#waiters = new Set<Waiter>();
|
|
18
|
+
|
|
19
|
+
async addMessage(message: Message): Promise<void> {
|
|
20
|
+
this.#messages.push(message);
|
|
21
|
+
this.#notifyWaiters(message.name);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
async receiveMessages(opts: {
|
|
25
|
+
names?: readonly string[];
|
|
26
|
+
count: number;
|
|
27
|
+
completable: boolean;
|
|
28
|
+
}): Promise<Message[]> {
|
|
29
|
+
const limitedCount = Math.max(1, opts.count);
|
|
30
|
+
const nameSet =
|
|
31
|
+
opts.names && opts.names.length > 0
|
|
32
|
+
? new Set(opts.names)
|
|
33
|
+
: undefined;
|
|
34
|
+
const selected: Array<{ message: Message; index: number }> = [];
|
|
35
|
+
|
|
36
|
+
for (
|
|
37
|
+
let i = 0;
|
|
38
|
+
i < this.#messages.length && selected.length < limitedCount;
|
|
39
|
+
i++
|
|
40
|
+
) {
|
|
41
|
+
const message = this.#messages[i];
|
|
42
|
+
if (nameSet && !nameSet.has(message.name)) {
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
selected.push({ message, index: i });
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (selected.length === 0) {
|
|
49
|
+
return [];
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (!opts.completable) {
|
|
53
|
+
for (let i = selected.length - 1; i >= 0; i--) {
|
|
54
|
+
this.#messages.splice(selected[i].index, 1);
|
|
55
|
+
}
|
|
56
|
+
return selected.map((entry) => entry.message);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
return selected.map((entry) => {
|
|
60
|
+
const { message } = entry;
|
|
61
|
+
return {
|
|
62
|
+
...message,
|
|
63
|
+
complete: async () => {
|
|
64
|
+
await this.completeMessage(message.id);
|
|
65
|
+
},
|
|
66
|
+
};
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
async completeMessage(messageId: string): Promise<void> {
|
|
71
|
+
const index = this.#messages.findIndex(
|
|
72
|
+
(message) => message.id === messageId,
|
|
73
|
+
);
|
|
74
|
+
if (index !== -1) {
|
|
75
|
+
this.#messages.splice(index, 1);
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
async waitForMessages(
|
|
80
|
+
messageNames: string[],
|
|
81
|
+
abortSignal: AbortSignal,
|
|
82
|
+
): Promise<void> {
|
|
83
|
+
if (abortSignal.aborted) {
|
|
84
|
+
throw new EvictedError();
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
const nameSet =
|
|
88
|
+
messageNames.length > 0 ? new Set(messageNames) : undefined;
|
|
89
|
+
if (
|
|
90
|
+
this.#messages.some((message) =>
|
|
91
|
+
nameSet ? nameSet.has(message.name) : true,
|
|
92
|
+
)
|
|
93
|
+
) {
|
|
94
|
+
return;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
await new Promise<void>((resolve, reject) => {
|
|
98
|
+
const waiter: Waiter = {
|
|
99
|
+
nameSet,
|
|
100
|
+
resolve: () => {
|
|
101
|
+
this.#removeWaiter(waiter);
|
|
102
|
+
resolve();
|
|
103
|
+
},
|
|
104
|
+
reject: (error) => {
|
|
105
|
+
this.#removeWaiter(waiter);
|
|
106
|
+
reject(error);
|
|
107
|
+
},
|
|
108
|
+
abortSignal,
|
|
109
|
+
onAbort: () => {
|
|
110
|
+
waiter.reject(new EvictedError());
|
|
111
|
+
},
|
|
112
|
+
};
|
|
113
|
+
abortSignal.addEventListener("abort", waiter.onAbort, {
|
|
114
|
+
once: true,
|
|
115
|
+
});
|
|
116
|
+
this.#waiters.add(waiter);
|
|
117
|
+
});
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
#removeWaiter(waiter: Waiter): void {
|
|
121
|
+
if (this.#waiters.delete(waiter)) {
|
|
122
|
+
waiter.abortSignal.removeEventListener("abort", waiter.onAbort);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
#notifyWaiters(name: string): void {
|
|
127
|
+
for (const waiter of [...this.#waiters]) {
|
|
128
|
+
if (waiter.nameSet && !waiter.nameSet.has(name)) {
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
waiter.resolve();
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
clear(): void {
|
|
136
|
+
this.#messages = [];
|
|
137
|
+
for (const waiter of [...this.#waiters]) {
|
|
138
|
+
waiter.reject(new Error("cleared"));
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* In-memory implementation of EngineDriver for testing.
|
|
145
|
+
* Uses binary keys (Uint8Array) with hex encoding for internal Map storage.
|
|
146
|
+
*/
|
|
147
|
+
export class InMemoryDriver implements EngineDriver {
|
|
148
|
+
// Map from hex-encoded key to { originalKey, value }
|
|
149
|
+
private kv = new Map<string, { key: Uint8Array; value: Uint8Array }>();
|
|
150
|
+
private alarms = new Map<string, number>();
|
|
151
|
+
#inMemoryMessageDriver = new InMemoryWorkflowMessageDriver();
|
|
152
|
+
|
|
153
|
+
/** Simulated latency per operation (ms) */
|
|
154
|
+
latency = 10;
|
|
155
|
+
|
|
156
|
+
/** How often the worker polls for work */
|
|
157
|
+
workerPollInterval = 100;
|
|
158
|
+
messageDriver: WorkflowMessageDriver = this.#inMemoryMessageDriver;
|
|
159
|
+
|
|
160
|
+
async get(key: Uint8Array): Promise<Uint8Array | null> {
|
|
161
|
+
await sleep(this.latency);
|
|
162
|
+
const entry = this.kv.get(keyToHex(key));
|
|
163
|
+
return entry?.value ?? null;
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async set(key: Uint8Array, value: Uint8Array): Promise<void> {
|
|
167
|
+
await sleep(this.latency);
|
|
168
|
+
this.kv.set(keyToHex(key), { key, value });
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
async delete(key: Uint8Array): Promise<void> {
|
|
172
|
+
await sleep(this.latency);
|
|
173
|
+
this.kv.delete(keyToHex(key));
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
async deletePrefix(prefix: Uint8Array): Promise<void> {
|
|
177
|
+
await sleep(this.latency);
|
|
178
|
+
for (const [hexKey, entry] of this.kv) {
|
|
179
|
+
if (keyStartsWith(entry.key, prefix)) {
|
|
180
|
+
this.kv.delete(hexKey);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
async deleteRange(start: Uint8Array, end: Uint8Array): Promise<void> {
|
|
186
|
+
await sleep(this.latency);
|
|
187
|
+
for (const [hexKey, entry] of this.kv) {
|
|
188
|
+
if (
|
|
189
|
+
compareKeys(entry.key, start) >= 0 &&
|
|
190
|
+
compareKeys(entry.key, end) < 0
|
|
191
|
+
) {
|
|
192
|
+
this.kv.delete(hexKey);
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
async list(prefix: Uint8Array): Promise<KVEntry[]> {
|
|
198
|
+
await sleep(this.latency);
|
|
199
|
+
const results: KVEntry[] = [];
|
|
200
|
+
for (const entry of this.kv.values()) {
|
|
201
|
+
if (keyStartsWith(entry.key, prefix)) {
|
|
202
|
+
results.push({ key: entry.key, value: entry.value });
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
// Sort by key lexicographically
|
|
206
|
+
return results.sort((a, b) => compareKeys(a.key, b.key));
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async batch(writes: KVWrite[]): Promise<void> {
|
|
210
|
+
await sleep(this.latency);
|
|
211
|
+
for (const { key, value } of writes) {
|
|
212
|
+
this.kv.set(keyToHex(key), { key, value });
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
async setAlarm(workflowId: string, wakeAt: number): Promise<void> {
|
|
217
|
+
await sleep(this.latency);
|
|
218
|
+
this.alarms.set(workflowId, wakeAt);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
async clearAlarm(workflowId: string): Promise<void> {
|
|
222
|
+
await sleep(this.latency);
|
|
223
|
+
this.alarms.delete(workflowId);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
async waitForMessages(
|
|
227
|
+
messageNames: string[],
|
|
228
|
+
abortSignal: AbortSignal,
|
|
229
|
+
): Promise<void> {
|
|
230
|
+
const driver = this.messageDriver as WorkflowMessageDriver & {
|
|
231
|
+
waitForMessages?: (
|
|
232
|
+
messageNames: string[],
|
|
233
|
+
abortSignal: AbortSignal,
|
|
234
|
+
) => Promise<void>;
|
|
235
|
+
};
|
|
236
|
+
if (driver.waitForMessages) {
|
|
237
|
+
await driver.waitForMessages(messageNames, abortSignal);
|
|
238
|
+
return;
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
while (true) {
|
|
242
|
+
if (abortSignal.aborted) {
|
|
243
|
+
throw new EvictedError();
|
|
244
|
+
}
|
|
245
|
+
const messages = await this.messageDriver.receiveMessages({
|
|
246
|
+
names: messageNames.length > 0 ? messageNames : undefined,
|
|
247
|
+
count: 1,
|
|
248
|
+
completable: true,
|
|
249
|
+
});
|
|
250
|
+
if (messages.length > 0) {
|
|
251
|
+
return;
|
|
252
|
+
}
|
|
253
|
+
await sleep(Math.max(1, this.latency));
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Get the alarm time for a workflow (for testing).
|
|
259
|
+
*/
|
|
260
|
+
getAlarm(workflowId: string): number | undefined {
|
|
261
|
+
return this.alarms.get(workflowId);
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Check if any alarms are due and return their workflow IDs.
|
|
266
|
+
*/
|
|
267
|
+
getDueAlarms(): string[] {
|
|
268
|
+
const now = Date.now();
|
|
269
|
+
const due: string[] = [];
|
|
270
|
+
for (const [workflowId, wakeAt] of this.alarms) {
|
|
271
|
+
if (wakeAt <= now) {
|
|
272
|
+
due.push(workflowId);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
return due;
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
/**
|
|
279
|
+
* Clear all data (for testing).
|
|
280
|
+
*/
|
|
281
|
+
clear(): void {
|
|
282
|
+
this.kv.clear();
|
|
283
|
+
this.alarms.clear();
|
|
284
|
+
this.#inMemoryMessageDriver.clear();
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
/**
|
|
288
|
+
* Get a snapshot of all data (for testing/debugging).
|
|
289
|
+
*/
|
|
290
|
+
snapshot(): {
|
|
291
|
+
kv: Record<string, Uint8Array>;
|
|
292
|
+
alarms: Record<string, number>;
|
|
293
|
+
} {
|
|
294
|
+
const kvSnapshot: Record<string, Uint8Array> = {};
|
|
295
|
+
for (const [hexKey, entry] of this.kv) {
|
|
296
|
+
kvSnapshot[hexKey] = entry.value;
|
|
297
|
+
}
|
|
298
|
+
return {
|
|
299
|
+
kv: kvSnapshot,
|
|
300
|
+
alarms: Object.fromEntries(this.alarms),
|
|
301
|
+
};
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
/**
|
|
305
|
+
* Get all hex-encoded keys (for testing).
|
|
306
|
+
*/
|
|
307
|
+
keys(): string[] {
|
|
308
|
+
return [...this.kv.keys()];
|
|
309
|
+
}
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
// Re-export main exports for convenience
|
|
313
|
+
export * from "./index.js";
|