workerflow 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +12 -28
- package/package.json +1 -1
- package/src/json.ts +5 -7
- package/src/migrations/0000_initial.ts +15 -10
- package/src/runtime.ts +30 -53
- package/test/runtime.spec.ts +110 -58
- package/test/tsconfig.json +1 -4
- package/test/worker.ts +1 -3
package/README.md
CHANGED
|
@@ -33,22 +33,14 @@ tag = "v1"
|
|
|
33
33
|
new_sqlite_classes = ["OrderWorkflowRuntime"]
|
|
34
34
|
```
|
|
35
35
|
|
|
36
|
-
In your Worker module, export the runtime, the definition, and a **`fetch`** handler (or queue consumer, cron trigger, and so on) that obtains a namespace stub and calls **`create`** to pin
|
|
36
|
+
In your Worker module, export the runtime, the definition, and a **`fetch`** handler (or queue consumer, cron trigger, and so on) that obtains a namespace stub and calls **`create`** to pin the workflow input:
|
|
37
37
|
|
|
38
38
|
```ts
|
|
39
39
|
// src/worker.ts
|
|
40
40
|
import { WorkflowDefinition, WorkflowRuntime } from "workerflow";
|
|
41
41
|
|
|
42
42
|
export class OrderWorkflowRuntime extends WorkflowRuntime<{ orderId: string }> {
|
|
43
|
-
|
|
44
|
-
protected getDefinition(version: string) {
|
|
45
|
-
switch (version) {
|
|
46
|
-
case "2026-04-01":
|
|
47
|
-
return this.ctx.exports.OrderWorkflowDefinition;
|
|
48
|
-
default:
|
|
49
|
-
throw new Error(`Unsupported workflow definition version: ${version}`);
|
|
50
|
-
}
|
|
51
|
-
}
|
|
43
|
+
protected readonly definition = this.ctx.exports.OrderWorkflowDefinition;
|
|
52
44
|
}
|
|
53
45
|
|
|
54
46
|
export class OrderWorkflowDefinition extends WorkflowDefinition<{ orderId: string }> {
|
|
@@ -79,7 +71,7 @@ export default {
|
|
|
79
71
|
if (url.pathname === "/orders") {
|
|
80
72
|
const orderId = "new-order";
|
|
81
73
|
const stub = env.ORDER_WORKFLOW.getByName(orderId);
|
|
82
|
-
await stub.create({
|
|
74
|
+
await stub.create({ orderId });
|
|
83
75
|
return Response.json({ id: orderId });
|
|
84
76
|
}
|
|
85
77
|
|
|
@@ -88,18 +80,18 @@ export default {
|
|
|
88
80
|
} satisfies ExportedHandler<Env>;
|
|
89
81
|
```
|
|
90
82
|
|
|
91
|
-
Workflow input is **`this.ctx.props.input`**, populated from **`create(
|
|
83
|
+
Workflow input is **`this.ctx.props.input`**, populated from **`create(input)`**. TypeScript requires an input argument when your runtime's **`TInput`** excludes **`undefined`**; no-input workflows can use **`WorkflowRuntime<undefined>`**, and optionally-input workflows can include **`undefined`** in the input type. The runtime also sets **`this.ctx.props.requestId`** (a new UUID each time the run loop invokes your definition) and **`this.ctx.props.runtimeInstanceId`** (this Durable Object’s id) for logs and correlation.
|
|
92
84
|
|
|
93
85
|
### Runtime control
|
|
94
86
|
|
|
95
87
|
From the Durable Object stub you can:
|
|
96
88
|
|
|
97
|
-
- **`create(
|
|
89
|
+
- **`create(input)`** — Pins the workflow input in SQLite the **first** time the instance is initialized, then starts execution. The input argument is required unless **`TInput`** includes **`undefined`**. **No-op** if the workflow is already **completed**, **failed**, **cancelled**, or **paused**.
|
|
98
90
|
- **`pause()`** — When status is **running**, moves to **paused**, clears alarms, and stops driving **`execute()`** until **`resume()`**. Inbound events are queued and applied when a matching **`wait`** runs again after resume.
|
|
99
91
|
- **`resume()`** — When status is **paused**, moves to **running** and continues the loop. Throws if the workflow is not paused.
|
|
100
92
|
- **`cancel(reason?)`** — Moves to terminal **cancelled** and clears alarms.
|
|
101
93
|
|
|
102
|
-
New instances start in **`pending`**
|
|
94
|
+
New instances start in **`pending`**. The first **`create()`** call moves the instance through the durable **`initialized`** state before execution enters **`running`**.
|
|
103
95
|
|
|
104
96
|
### Experimental introspection
|
|
105
97
|
|
|
@@ -193,27 +185,21 @@ At this point there is no sleep alarm, no retry alarm, and no wait-timeout alarm
|
|
|
193
185
|
|
|
194
186
|
There is also a guard for the case where an alarm fires while the run loop is already active — for example, a sleep's precise alarm arriving while the loop is processing another step in the same Durable Object invocation. In that situation the alarm handler simply reschedules the watchdog for another 30 minutes rather than starting a second concurrent loop, keeping the safety net in place until the active loop finishes.
|
|
195
187
|
|
|
196
|
-
### Versioning
|
|
197
|
-
|
|
198
|
-
`create({ definitionVersion, input })` **pins** the definition version and optional input in SQLite the first time the instance is initialized (see [Runtime control](#runtime-control) for no-op cases). **The version cannot be changed later** for that Durable Object id; attempting a different version throws. Every subsequent `next()` resolves the worker implementation via **`getDefinition(version)`** using that pinned value, so **long-lived workflows keep running the definition lineage they started with**, while new instances can use newer version strings you add to `getDefinition`.
|
|
199
|
-
|
|
200
188
|
## Why this exists
|
|
201
189
|
|
|
202
|
-
Cloudflare Workflows is a strong managed option, and for many use cases it is the right tradeoff. I built `workerflow` for cases where I wanted tighter control over runtime behavior,
|
|
190
|
+
Cloudflare Workflows is a strong managed option, and for many use cases it is the right tradeoff. I built `workerflow` for cases where I wanted tighter control over runtime behavior, replay semantics, and state projection than the managed model naturally gives me.
|
|
203
191
|
|
|
204
192
|
1. Explicit ownership of workflow state and lifecycle
|
|
205
|
-
2.
|
|
193
|
+
2. Durable replay semantics that are explicit in userland code
|
|
206
194
|
3. Separation between workflow execution and external state synchronization
|
|
207
195
|
4. Extension points for streaming, WebSockets, and custom hooks
|
|
208
196
|
5. Fewer surprises around long-lived execution and error handling
|
|
209
197
|
|
|
210
|
-
###
|
|
198
|
+
### Definition compatibility
|
|
211
199
|
|
|
212
200
|
One of the biggest concerns in long-running workflows is definition drift. A normal Worker request is typically bound to a single in-flight execution on one deployed version, but a Workflow is durable: it persists state and resumes across multiple executions over time. A workflow may start on one version of its definition and resume later after a deploy has changed or removed a step. That means the next invocation of the workflow entry point could repeat steps unsafely or leave the runtime in an invalid state.
|
|
213
201
|
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
`workerflow` takes a different approach: the runtime pins a definition version when the instance is created and resolves future execution against that pinned version. The goal is not to make compatibility problems disappear, but to make the version boundary explicit in the runtime rather than implicit in workflow input and application code.
|
|
202
|
+
`workerflow` keeps definition selection simple: each runtime points at one definition entrypoint, and the input is the only per-instance payload pinned by `create(input)`. If a workflow needs version-aware behavior, model that explicitly in your input shape and keep old branches compatible until the long-lived instances that need them have completed.
|
|
217
203
|
|
|
218
204
|
### Keeping workflow execution separate from state projection
|
|
219
205
|
|
|
@@ -245,11 +231,9 @@ I think a cleaner design is to keep synchronization logic out of workflow steps
|
|
|
245
231
|
|
|
246
232
|
```ts
|
|
247
233
|
export class MyWorkflowRuntime extends WorkflowRuntime {
|
|
248
|
-
async onStatusChange_experimental(
|
|
249
|
-
status: "running" | "paused" | "completed" | "failed" | "cancelled"
|
|
250
|
-
) {
|
|
234
|
+
async onStatusChange_experimental(status: "running" | "paused" | "completed" | "failed" | "cancelled") {
|
|
251
235
|
// Update your database, or push to a queue for streaming.
|
|
252
|
-
// Note: the hook is also invoked with "running" when leaving
|
|
236
|
+
// Note: the hook is also invoked with "running" when leaving initialized/paused into running.
|
|
253
237
|
}
|
|
254
238
|
}
|
|
255
239
|
```
|
package/package.json
CHANGED
package/src/json.ts
CHANGED
|
@@ -1,12 +1,10 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* Represents an indefinitely deep arbitrary JSON data structure. There are
|
|
3
|
-
* four types that make up the Json family:
|
|
4
|
-
*
|
|
5
|
-
* - Json any legal JSON value
|
|
6
|
-
* - JsonScalar any legal JSON leaf value (no lists or objects)
|
|
7
|
-
* - JsonArray a JSON value whose outer type is an array
|
|
8
|
-
* - JsonObject a JSON value whose outer type is an object
|
|
2
|
+
* Represents an indefinitely deep arbitrary JSON data structure. There are four types that make up the Json family:
|
|
9
3
|
*
|
|
4
|
+
* - Json any legal JSON value
|
|
5
|
+
* - JsonScalar any legal JSON leaf value (no lists or objects)
|
|
6
|
+
* - JsonArray a JSON value whose outer type is an array
|
|
7
|
+
* - JsonObject a JSON value whose outer type is an object
|
|
10
8
|
*/
|
|
11
9
|
export type Json = JsonScalar | JsonArray | JsonObject;
|
|
12
10
|
export type JsonScalar = string | number | boolean | null;
|
|
@@ -5,7 +5,7 @@ export default `
|
|
|
5
5
|
id INTEGER NOT NULL PRIMARY KEY CHECK (id = 1),
|
|
6
6
|
|
|
7
7
|
status TEXT NOT NULL CHECK (
|
|
8
|
-
status IN ('pending', 'running', 'paused', 'completed', 'failed', 'cancelled')
|
|
8
|
+
status IN ('pending', 'initialized', 'running', 'paused', 'completed', 'failed', 'cancelled')
|
|
9
9
|
),
|
|
10
10
|
|
|
11
11
|
created_at INTEGER NOT NULL
|
|
@@ -15,18 +15,13 @@ export default `
|
|
|
15
15
|
updated_at INTEGER NOT NULL
|
|
16
16
|
DEFAULT (CAST(unixepoch('subsecond') * 1000 AS INTEGER)),
|
|
17
17
|
|
|
18
|
-
definition_version TEXT
|
|
19
|
-
CHECK (definition_version IS NULL OR length(definition_version) > 0),
|
|
20
18
|
definition_input TEXT
|
|
21
19
|
CHECK (definition_input IS NULL OR json_valid(definition_input)),
|
|
22
20
|
|
|
23
21
|
CHECK (updated_at >= created_at),
|
|
24
22
|
|
|
25
|
-
-- definition_input must be NULL
|
|
26
|
-
CHECK (
|
|
27
|
-
|
|
28
|
-
-- definition must be pinned before running/paused/completing/failing; cancelled is always allowed
|
|
29
|
-
CHECK (status IN ('pending', 'cancelled') OR definition_version IS NOT NULL)
|
|
23
|
+
-- definition_input must be NULL until create() initializes the workflow.
|
|
24
|
+
CHECK (status <> 'pending' OR definition_input IS NULL)
|
|
30
25
|
) STRICT;
|
|
31
26
|
|
|
32
27
|
CREATE TABLE steps (
|
|
@@ -278,8 +273,10 @@ export default `
|
|
|
278
273
|
WHEN NEW.status <> OLD.status
|
|
279
274
|
BEGIN
|
|
280
275
|
SELECT CASE
|
|
281
|
-
WHEN OLD.status = 'pending' AND NEW.status NOT IN ('
|
|
282
|
-
RAISE(ABORT, 'pending can only transition to
|
|
276
|
+
WHEN OLD.status = 'pending' AND NEW.status NOT IN ('initialized', 'cancelled') THEN
|
|
277
|
+
RAISE(ABORT, 'pending can only transition to initialized or cancelled')
|
|
278
|
+
WHEN OLD.status = 'initialized' AND NEW.status NOT IN ('running', 'cancelled') THEN
|
|
279
|
+
RAISE(ABORT, 'initialized can only transition to running or cancelled')
|
|
283
280
|
WHEN OLD.status = 'running' AND NEW.status NOT IN ('paused', 'completed', 'failed', 'cancelled') THEN
|
|
284
281
|
RAISE(ABORT, 'running can only transition to paused, completed, failed, or cancelled')
|
|
285
282
|
WHEN OLD.status = 'paused' AND NEW.status NOT IN ('running', 'cancelled') THEN
|
|
@@ -289,6 +286,14 @@ export default `
|
|
|
289
286
|
END;
|
|
290
287
|
END;
|
|
291
288
|
|
|
289
|
+
CREATE TRIGGER workflow_metadata_definition_input_immutable_after_init
|
|
290
|
+
BEFORE UPDATE ON workflow_metadata
|
|
291
|
+
FOR EACH ROW
|
|
292
|
+
WHEN OLD.status <> 'pending' AND NEW.definition_input IS NOT OLD.definition_input
|
|
293
|
+
BEGIN
|
|
294
|
+
SELECT RAISE(ABORT, 'workflow_metadata.definition_input is immutable after initialization');
|
|
295
|
+
END;
|
|
296
|
+
|
|
292
297
|
CREATE TRIGGER steps_immutable_identity_fields
|
|
293
298
|
BEFORE UPDATE ON steps
|
|
294
299
|
FOR EACH ROW
|
package/src/runtime.ts
CHANGED
|
@@ -4,15 +4,11 @@ import type { Json } from "./json";
|
|
|
4
4
|
import mig000 from "./migrations/0000_initial";
|
|
5
5
|
import type { Brand } from "./brand";
|
|
6
6
|
|
|
7
|
-
export abstract class WorkflowRuntime<
|
|
8
|
-
TInput extends Json | undefined = Json | undefined,
|
|
9
|
-
TVersion extends string = string
|
|
10
|
-
> extends DurableObject {
|
|
7
|
+
export abstract class WorkflowRuntime<TInput extends Json | undefined = Json | undefined> extends DurableObject {
|
|
11
8
|
private static readonly MIGRATIONS = [mig000];
|
|
12
9
|
private readonly sql: SqlStorage;
|
|
13
10
|
#status: WorkflowStatus;
|
|
14
11
|
#isRunLoopActive: boolean = false;
|
|
15
|
-
#definitionVersion: TVersion | undefined;
|
|
16
12
|
#definitionInput: TInput | undefined;
|
|
17
13
|
|
|
18
14
|
/**
|
|
@@ -46,24 +42,19 @@ export abstract class WorkflowRuntime<
|
|
|
46
42
|
console.error("Database migration version is ahead of the codebase. Please check your migrations.");
|
|
47
43
|
}
|
|
48
44
|
|
|
49
|
-
const [metadata] = this.sql
|
|
50
|
-
.exec<WorkflowMetadata_Row<TVersion>>("SELECT * FROM workflow_metadata WHERE id = 1")
|
|
51
|
-
.toArray();
|
|
45
|
+
const [metadata] = this.sql.exec<WorkflowMetadata_Row>("SELECT * FROM workflow_metadata WHERE id = 1").toArray();
|
|
52
46
|
if (metadata === undefined) {
|
|
53
47
|
this.sql.exec("INSERT INTO workflow_metadata (id, status) VALUES (1, ?)", "pending");
|
|
54
48
|
this.sql.exec("INSERT INTO workflow_events (type) VALUES (?)", "created");
|
|
55
49
|
this.#status = "pending";
|
|
56
50
|
} else {
|
|
57
51
|
this.#status = metadata.status;
|
|
58
|
-
this.#definitionVersion = metadata.definition_version === null ? undefined : metadata.definition_version;
|
|
59
52
|
this.#definitionInput =
|
|
60
53
|
metadata.definition_input === null ? undefined : (JSON.parse(metadata.definition_input) as TInput);
|
|
61
54
|
}
|
|
62
55
|
}
|
|
63
56
|
|
|
64
|
-
protected abstract
|
|
65
|
-
version: TVersion
|
|
66
|
-
): (options: {
|
|
57
|
+
protected abstract readonly definition: (options: {
|
|
67
58
|
props: { requestId: string; runtimeInstanceId: string; input: TInput };
|
|
68
59
|
}) => Fetcher<WorkflowDefinition<TInput>>;
|
|
69
60
|
|
|
@@ -321,49 +312,42 @@ export abstract class WorkflowRuntime<
|
|
|
321
312
|
}
|
|
322
313
|
|
|
323
314
|
/**
|
|
324
|
-
* Creates a new workflow instance and pins the
|
|
325
|
-
* return early. Otherwise, it will pin the
|
|
326
|
-
* pinned to a different version, it will throw an error.
|
|
315
|
+
* Creates a new workflow instance and pins the input. If the workflow is in a terminal state or paused, it will
|
|
316
|
+
* return early. Otherwise, it will pin the input the first time the instance is initialized and start execution.
|
|
327
317
|
*
|
|
328
|
-
* @param
|
|
329
|
-
*
|
|
330
|
-
* @param options.input - The input to the workflow instance. This will be passed to the workflow definition as the
|
|
331
|
-
* `input` property.
|
|
318
|
+
* @param input - The input to the workflow instance. This will be passed to the workflow definition as the `input`
|
|
319
|
+
* property.
|
|
332
320
|
*/
|
|
333
|
-
public async create(
|
|
321
|
+
public async create(...args: undefined extends TInput ? [input?: TInput] : [input: TInput]): Promise<void> {
|
|
322
|
+
const input = args[0];
|
|
334
323
|
if (this.isTerminalStatus(this.#status)) return;
|
|
335
324
|
if (this.#status === "paused") return;
|
|
336
325
|
|
|
337
|
-
const version = options.definitionVersion;
|
|
338
326
|
let metadata = this.sql
|
|
339
|
-
.exec<Pick<WorkflowMetadata_Row
|
|
340
|
-
"SELECT
|
|
327
|
+
.exec<Pick<WorkflowMetadata_Row, "status" | "definition_input">>(
|
|
328
|
+
"SELECT status, definition_input FROM workflow_metadata WHERE id = 1"
|
|
341
329
|
)
|
|
342
330
|
.one();
|
|
343
331
|
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
`Workflow definition version is already pinned to '${metadata.definition_version}' and cannot be changed to '${version}'.`
|
|
347
|
-
);
|
|
348
|
-
}
|
|
349
|
-
|
|
350
|
-
// If the workflow is not yet pinned to a definition version, we pin it to the new version and set the input.
|
|
351
|
-
if (metadata.definition_version === null) {
|
|
332
|
+
// If the workflow is not yet initialized, pin the input. `undefined` is encoded as SQL NULL.
|
|
333
|
+
if (metadata.status === "pending") {
|
|
352
334
|
metadata = this.sql
|
|
353
|
-
.exec<Pick<WorkflowMetadata_Row
|
|
335
|
+
.exec<Pick<WorkflowMetadata_Row, "status" | "definition_input">>(
|
|
354
336
|
`UPDATE workflow_metadata
|
|
355
|
-
SET
|
|
337
|
+
SET status = 'initialized',
|
|
356
338
|
definition_input = ?,
|
|
357
339
|
updated_at = CAST(unixepoch('subsecond') * 1000 AS INTEGER)
|
|
358
|
-
WHERE id = 1
|
|
359
|
-
|
|
360
|
-
|
|
340
|
+
WHERE id = 1
|
|
341
|
+
AND status = 'pending'
|
|
342
|
+
RETURNING status, definition_input`,
|
|
343
|
+
input === undefined ? null : JSON.stringify(input)
|
|
361
344
|
)
|
|
362
345
|
.one();
|
|
363
346
|
}
|
|
364
347
|
|
|
365
|
-
this.#
|
|
366
|
-
this.#definitionInput =
|
|
348
|
+
this.#status = metadata.status;
|
|
349
|
+
this.#definitionInput =
|
|
350
|
+
metadata.definition_input === null ? undefined : (JSON.parse(metadata.definition_input) as TInput);
|
|
367
351
|
|
|
368
352
|
await this.run();
|
|
369
353
|
}
|
|
@@ -372,7 +356,7 @@ export abstract class WorkflowRuntime<
|
|
|
372
356
|
if (this.isTerminalStatus(this.#status)) return;
|
|
373
357
|
if (this.#status === "paused") return;
|
|
374
358
|
|
|
375
|
-
if (this.#
|
|
359
|
+
if (this.#status === "pending") return;
|
|
376
360
|
|
|
377
361
|
if (this.#status !== "running") {
|
|
378
362
|
this.#setStatus({ type: "running" });
|
|
@@ -411,15 +395,11 @@ export abstract class WorkflowRuntime<
|
|
|
411
395
|
}
|
|
412
396
|
|
|
413
397
|
try {
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
throw new Error(
|
|
417
|
-
"Workflow definition version has not been initialized. Call 'create()' before running the workflow."
|
|
418
|
-
);
|
|
398
|
+
if (this.#status === "pending") {
|
|
399
|
+
throw new Error("Workflow input has not been initialized. Call 'create()' before running the workflow.");
|
|
419
400
|
}
|
|
420
401
|
|
|
421
|
-
const
|
|
422
|
-
const executor = definition({
|
|
402
|
+
const executor = this.definition({
|
|
423
403
|
props: {
|
|
424
404
|
runtimeInstanceId: this.ctx.id.toString(),
|
|
425
405
|
requestId,
|
|
@@ -887,10 +867,7 @@ export type RunStepAttempt = {
|
|
|
887
867
|
startedAt: Date;
|
|
888
868
|
} & (
|
|
889
869
|
| { state: "started" }
|
|
890
|
-
| ({ state: "succeeded"; endedAt: Date } & (
|
|
891
|
-
| { resultType: "json"; resultJson: string }
|
|
892
|
-
| { resultType: "none" }
|
|
893
|
-
))
|
|
870
|
+
| ({ state: "succeeded"; endedAt: Date } & ({ resultType: "json"; resultJson: string } | { resultType: "none" }))
|
|
894
871
|
| { state: "failed"; errorMessage: string; errorName?: string; endedAt: Date; nextAttemptAt?: Date }
|
|
895
872
|
);
|
|
896
873
|
|
|
@@ -1294,18 +1271,18 @@ type TimedOutWaitStep_Row = Extract<WaitStep_Row, { state: "timed_out" }>;
|
|
|
1294
1271
|
type Step_Row = RunStep_Row | SleepStep_Row | WaitStep_Row;
|
|
1295
1272
|
|
|
1296
1273
|
export type WorkflowStatus =
|
|
1297
|
-
| "pending" //
|
|
1274
|
+
| "pending" // Durable metadata exists, but create() has not initialized the workflow input yet
|
|
1275
|
+
| "initialized" // create() has pinned the workflow input, but execution has not started yet
|
|
1298
1276
|
| "running" // The workflow is currently executing; steps are being created/processed
|
|
1299
1277
|
| "paused" // The workflow is paused and will not make progress until resumed
|
|
1300
1278
|
| "completed" // The workflow completed successfully; ('Workflow.next' returned { done: true, status: "succeeded" })
|
|
1301
1279
|
| "failed" // A step exhausted retries and the workflow aborted; ('Workflow.next' returned { done: true, status: "failed" })
|
|
1302
1280
|
| "cancelled"; // The workflow was terminated explicitly by the user.
|
|
1303
1281
|
|
|
1304
|
-
type WorkflowMetadata_Row
|
|
1282
|
+
type WorkflowMetadata_Row = {
|
|
1305
1283
|
created_at: number;
|
|
1306
1284
|
updated_at: number;
|
|
1307
1285
|
status: WorkflowStatus;
|
|
1308
|
-
definition_version: TVersion | null;
|
|
1309
1286
|
definition_input: string | null;
|
|
1310
1287
|
};
|
|
1311
1288
|
|
package/test/runtime.spec.ts
CHANGED
|
@@ -48,7 +48,7 @@ describe("WorkflowRuntime", () => {
|
|
|
48
48
|
resolve(status);
|
|
49
49
|
};
|
|
50
50
|
|
|
51
|
-
await instance.create(
|
|
51
|
+
await instance.create();
|
|
52
52
|
await expect(promise).resolves.toBe("failed");
|
|
53
53
|
});
|
|
54
54
|
} finally {
|
|
@@ -71,7 +71,7 @@ describe("WorkflowRuntime", () => {
|
|
|
71
71
|
if (status === "running") return;
|
|
72
72
|
resolve(status);
|
|
73
73
|
};
|
|
74
|
-
await instance.create(
|
|
74
|
+
await instance.create();
|
|
75
75
|
await expect(promise).resolves.toBe("failed");
|
|
76
76
|
});
|
|
77
77
|
} finally {
|
|
@@ -96,7 +96,7 @@ describe("WorkflowRuntime", () => {
|
|
|
96
96
|
resolve(status);
|
|
97
97
|
};
|
|
98
98
|
|
|
99
|
-
await instance.create(
|
|
99
|
+
await instance.create();
|
|
100
100
|
await expect(promise).resolves.toBe("failed");
|
|
101
101
|
const steps = instance.getSteps_experimental();
|
|
102
102
|
expect(steps).toHaveLength(1);
|
|
@@ -136,7 +136,7 @@ describe("WorkflowRuntime", () => {
|
|
|
136
136
|
resolve(status);
|
|
137
137
|
};
|
|
138
138
|
|
|
139
|
-
await instance.create(
|
|
139
|
+
await instance.create();
|
|
140
140
|
await expect(promise).resolves.toBe("failed");
|
|
141
141
|
const steps = instance.getSteps_experimental();
|
|
142
142
|
expect(steps).toHaveLength(1);
|
|
@@ -170,7 +170,7 @@ describe("WorkflowRuntime", () => {
|
|
|
170
170
|
resolve(status);
|
|
171
171
|
};
|
|
172
172
|
|
|
173
|
-
await instance.create(
|
|
173
|
+
await instance.create();
|
|
174
174
|
await expect(promise).resolves.toBe("failed");
|
|
175
175
|
});
|
|
176
176
|
} finally {
|
|
@@ -200,7 +200,7 @@ describe("WorkflowRuntime", () => {
|
|
|
200
200
|
if (status === "running") return;
|
|
201
201
|
resolve(status);
|
|
202
202
|
};
|
|
203
|
-
await instance.create(
|
|
203
|
+
await instance.create();
|
|
204
204
|
await expect(promise).resolves.toBe("completed");
|
|
205
205
|
const steps = instance.getSteps_experimental();
|
|
206
206
|
expect(steps).toHaveLength(1);
|
|
@@ -237,7 +237,7 @@ describe("WorkflowRuntime", () => {
|
|
|
237
237
|
resolve(status);
|
|
238
238
|
};
|
|
239
239
|
|
|
240
|
-
await instance.create(
|
|
240
|
+
await instance.create();
|
|
241
241
|
await expect(promise).resolves.toBe("failed");
|
|
242
242
|
const steps = instance.getSteps_experimental();
|
|
243
243
|
expect(steps).toHaveLength(1);
|
|
@@ -268,7 +268,7 @@ describe("WorkflowRuntime", () => {
|
|
|
268
268
|
resolve(status);
|
|
269
269
|
};
|
|
270
270
|
|
|
271
|
-
await instance.create(
|
|
271
|
+
await instance.create();
|
|
272
272
|
await expect(promise).resolves.toBe("completed");
|
|
273
273
|
|
|
274
274
|
const steps = instance.getSteps_experimental();
|
|
@@ -311,7 +311,7 @@ describe("WorkflowRuntime", () => {
|
|
|
311
311
|
resolve(status);
|
|
312
312
|
};
|
|
313
313
|
|
|
314
|
-
await instance.create(
|
|
314
|
+
await instance.create();
|
|
315
315
|
await expect(promise).resolves.toBe("completed");
|
|
316
316
|
|
|
317
317
|
const steps = instance.getSteps_experimental();
|
|
@@ -351,7 +351,7 @@ describe("WorkflowRuntime", () => {
|
|
|
351
351
|
if (status === "running") return;
|
|
352
352
|
resolve(status);
|
|
353
353
|
};
|
|
354
|
-
await instance.create(
|
|
354
|
+
await instance.create(input);
|
|
355
355
|
await expect(promise).resolves.toBe("completed");
|
|
356
356
|
});
|
|
357
357
|
expect(received.length).toBeGreaterThanOrEqual(1);
|
|
@@ -363,27 +363,80 @@ describe("WorkflowRuntime", () => {
|
|
|
363
363
|
}
|
|
364
364
|
});
|
|
365
365
|
|
|
366
|
-
it("
|
|
366
|
+
it("does not repin input after the workflow is initialized", async () => {
|
|
367
|
+
const received: unknown[] = [];
|
|
367
368
|
const executeSpy = vi
|
|
368
369
|
.spyOn(TestWorkflowDefinition.prototype, "execute")
|
|
369
370
|
.mockImplementation(async function (this: TestWorkflowDefinition) {
|
|
370
|
-
|
|
371
|
+
received.push(this.ctx.props.input);
|
|
372
|
+
await this.wait("wait-1", "event-done", {
|
|
371
373
|
timeoutAt: Date.now() + 86_400_000
|
|
372
374
|
});
|
|
373
375
|
});
|
|
374
376
|
try {
|
|
375
377
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
376
378
|
await runInDurableObject(stub, async (instance) => {
|
|
377
|
-
const { resolve, promise } = Promise.withResolvers<WorkflowStatus>();
|
|
379
|
+
const { resolve: resolveRunning, promise: running } = Promise.withResolvers<WorkflowStatus>();
|
|
380
|
+
const { resolve: resolveDone, promise: done } = Promise.withResolvers<WorkflowStatus>();
|
|
378
381
|
instance.onStatusChange_experimental = async (status) => {
|
|
379
|
-
|
|
382
|
+
if (status === "running") {
|
|
383
|
+
resolveRunning(status);
|
|
384
|
+
} else {
|
|
385
|
+
resolveDone(status);
|
|
386
|
+
}
|
|
380
387
|
};
|
|
381
|
-
|
|
382
|
-
await
|
|
388
|
+
const input = { key: "original" };
|
|
389
|
+
await instance.create(input);
|
|
390
|
+
await expect(running).resolves.toBe("running");
|
|
383
391
|
|
|
384
|
-
await
|
|
385
|
-
|
|
386
|
-
);
|
|
392
|
+
await instance.create({ key: "ignored" });
|
|
393
|
+
await instance.handleInboundEvent("event-done");
|
|
394
|
+
await expect(done).resolves.toBe("completed");
|
|
395
|
+
|
|
396
|
+
expect(received.length).toBeGreaterThanOrEqual(1);
|
|
397
|
+
for (const row of received) {
|
|
398
|
+
expect(row).toEqual(input);
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
} finally {
|
|
402
|
+
executeSpy.mockRestore();
|
|
403
|
+
}
|
|
404
|
+
});
|
|
405
|
+
|
|
406
|
+
it("does not repin undefined input after the workflow is initialized", async () => {
|
|
407
|
+
const received: unknown[] = [];
|
|
408
|
+
const executeSpy = vi
|
|
409
|
+
.spyOn(TestWorkflowDefinition.prototype, "execute")
|
|
410
|
+
.mockImplementation(async function (this: TestWorkflowDefinition) {
|
|
411
|
+
received.push(this.ctx.props.input);
|
|
412
|
+
await this.wait("wait-1", "event-done", {
|
|
413
|
+
timeoutAt: Date.now() + 86_400_000
|
|
414
|
+
});
|
|
415
|
+
});
|
|
416
|
+
try {
|
|
417
|
+
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
418
|
+
await runInDurableObject(stub, async (instance) => {
|
|
419
|
+
const { resolve: resolveRunning, promise: running } = Promise.withResolvers<WorkflowStatus>();
|
|
420
|
+
const { resolve: resolveDone, promise: done } = Promise.withResolvers<WorkflowStatus>();
|
|
421
|
+
instance.onStatusChange_experimental = async (status) => {
|
|
422
|
+
if (status === "running") {
|
|
423
|
+
resolveRunning(status);
|
|
424
|
+
} else {
|
|
425
|
+
resolveDone(status);
|
|
426
|
+
}
|
|
427
|
+
};
|
|
428
|
+
|
|
429
|
+
await instance.create();
|
|
430
|
+
await expect(running).resolves.toBe("running");
|
|
431
|
+
|
|
432
|
+
await instance.create({ key: "ignored" });
|
|
433
|
+
await instance.handleInboundEvent("event-done");
|
|
434
|
+
await expect(done).resolves.toBe("completed");
|
|
435
|
+
|
|
436
|
+
expect(received.length).toBeGreaterThanOrEqual(1);
|
|
437
|
+
for (const row of received) {
|
|
438
|
+
expect(row).toBeUndefined();
|
|
439
|
+
}
|
|
387
440
|
});
|
|
388
441
|
} finally {
|
|
389
442
|
executeSpy.mockRestore();
|
|
@@ -398,11 +451,11 @@ describe("WorkflowRuntime", () => {
|
|
|
398
451
|
if (status === "running") return;
|
|
399
452
|
resolve(status);
|
|
400
453
|
};
|
|
401
|
-
await instance.create(
|
|
454
|
+
await instance.create();
|
|
402
455
|
await expect(promise).resolves.toBe("completed");
|
|
403
456
|
expect(instance.getStatus()).toBe("completed");
|
|
404
457
|
|
|
405
|
-
await instance.create(
|
|
458
|
+
await instance.create();
|
|
406
459
|
expect(instance.getStatus()).toBe("completed");
|
|
407
460
|
});
|
|
408
461
|
});
|
|
@@ -428,7 +481,7 @@ describe("WorkflowRuntime", () => {
|
|
|
428
481
|
resolve(status);
|
|
429
482
|
};
|
|
430
483
|
|
|
431
|
-
await instance.create(
|
|
484
|
+
await instance.create();
|
|
432
485
|
await expect(promise).resolves.toBe("completed");
|
|
433
486
|
|
|
434
487
|
const steps = instance.getSteps_experimental();
|
|
@@ -466,7 +519,7 @@ describe("WorkflowRuntime", () => {
|
|
|
466
519
|
resolve(status);
|
|
467
520
|
};
|
|
468
521
|
|
|
469
|
-
await instance.create(
|
|
522
|
+
await instance.create();
|
|
470
523
|
await expect(promise).resolves.toBe("completed");
|
|
471
524
|
|
|
472
525
|
const steps = instance.getSteps_experimental();
|
|
@@ -508,7 +561,7 @@ describe("WorkflowRuntime", () => {
|
|
|
508
561
|
resolve(status);
|
|
509
562
|
};
|
|
510
563
|
|
|
511
|
-
await instance.create(
|
|
564
|
+
await instance.create();
|
|
512
565
|
await expect(promise).resolves.toBe("completed");
|
|
513
566
|
|
|
514
567
|
const steps = instance.getSteps_experimental();
|
|
@@ -549,7 +602,7 @@ describe("WorkflowRuntime", () => {
|
|
|
549
602
|
resolve(status);
|
|
550
603
|
};
|
|
551
604
|
|
|
552
|
-
await instance.create(
|
|
605
|
+
await instance.create();
|
|
553
606
|
await expect(promise).resolves.toBe("completed");
|
|
554
607
|
|
|
555
608
|
const steps = instance.getSteps_experimental();
|
|
@@ -585,7 +638,7 @@ describe("WorkflowRuntime", () => {
|
|
|
585
638
|
resolve(status);
|
|
586
639
|
};
|
|
587
640
|
|
|
588
|
-
await instance.create(
|
|
641
|
+
await instance.create();
|
|
589
642
|
await expect(promise).resolves.toBe("completed");
|
|
590
643
|
|
|
591
644
|
const steps = instance.getSteps_experimental();
|
|
@@ -619,7 +672,7 @@ describe("WorkflowRuntime", () => {
|
|
|
619
672
|
resolve(status);
|
|
620
673
|
};
|
|
621
674
|
|
|
622
|
-
await instance.create(
|
|
675
|
+
await instance.create();
|
|
623
676
|
|
|
624
677
|
await expect
|
|
625
678
|
.poll(() => {
|
|
@@ -673,7 +726,7 @@ describe("WorkflowRuntime", () => {
|
|
|
673
726
|
resolve(status);
|
|
674
727
|
};
|
|
675
728
|
|
|
676
|
-
await instance.create(
|
|
729
|
+
await instance.create();
|
|
677
730
|
await expect(promise).resolves.toBe("failed");
|
|
678
731
|
|
|
679
732
|
const steps = instance.getSteps_experimental();
|
|
@@ -720,7 +773,7 @@ describe("WorkflowRuntime", () => {
|
|
|
720
773
|
resolve(status);
|
|
721
774
|
};
|
|
722
775
|
|
|
723
|
-
await instance.create(
|
|
776
|
+
await instance.create();
|
|
724
777
|
await expect(promise).resolves.toBe("completed");
|
|
725
778
|
|
|
726
779
|
expect(innerAttempts).toBe(2);
|
|
@@ -762,7 +815,7 @@ describe("WorkflowRuntime", () => {
|
|
|
762
815
|
resolve(status);
|
|
763
816
|
};
|
|
764
817
|
|
|
765
|
-
await instance.create(
|
|
818
|
+
await instance.create();
|
|
766
819
|
await expect(promise).resolves.toBe("failed");
|
|
767
820
|
|
|
768
821
|
const steps = instance.getSteps_experimental();
|
|
@@ -805,7 +858,7 @@ describe("WorkflowRuntime", () => {
|
|
|
805
858
|
resolve(status);
|
|
806
859
|
};
|
|
807
860
|
|
|
808
|
-
await instance.create(
|
|
861
|
+
await instance.create();
|
|
809
862
|
await expect(promise).resolves.toBe("failed");
|
|
810
863
|
|
|
811
864
|
const steps = instance.getSteps_experimental();
|
|
@@ -845,7 +898,7 @@ describe("WorkflowRuntime", () => {
|
|
|
845
898
|
resolve(status);
|
|
846
899
|
};
|
|
847
900
|
|
|
848
|
-
await instance.create(
|
|
901
|
+
await instance.create();
|
|
849
902
|
await expect
|
|
850
903
|
.poll(() => {
|
|
851
904
|
const step = instance.getSteps_experimental().find((s) => s.id === "root-deep-wait");
|
|
@@ -895,7 +948,7 @@ describe("WorkflowRuntime", () => {
|
|
|
895
948
|
resolve(status);
|
|
896
949
|
};
|
|
897
950
|
|
|
898
|
-
await instance.create(
|
|
951
|
+
await instance.create();
|
|
899
952
|
await expect(promise).resolves.toBe("completed");
|
|
900
953
|
|
|
901
954
|
const steps = instance.getSteps_experimental();
|
|
@@ -934,7 +987,7 @@ describe("WorkflowRuntime", () => {
|
|
|
934
987
|
resolve(status);
|
|
935
988
|
};
|
|
936
989
|
|
|
937
|
-
await instance.create(
|
|
990
|
+
await instance.create();
|
|
938
991
|
await expect(promise).resolves.toBe("completed");
|
|
939
992
|
|
|
940
993
|
const steps = instance.getSteps_experimental();
|
|
@@ -978,7 +1031,7 @@ describe("WorkflowRuntime", () => {
|
|
|
978
1031
|
terminalStatuses.push(status);
|
|
979
1032
|
};
|
|
980
1033
|
|
|
981
|
-
await instance.create(
|
|
1034
|
+
await instance.create();
|
|
982
1035
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
983
1036
|
expect(terminalStatuses).toHaveLength(0);
|
|
984
1037
|
|
|
@@ -1028,7 +1081,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1028
1081
|
terminalStatuses.push(status);
|
|
1029
1082
|
};
|
|
1030
1083
|
|
|
1031
|
-
await instance.create(
|
|
1084
|
+
await instance.create();
|
|
1032
1085
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1033
1086
|
expect(terminalStatuses).toHaveLength(0);
|
|
1034
1087
|
|
|
@@ -1065,7 +1118,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1065
1118
|
if (status === "running") return;
|
|
1066
1119
|
resolve(status);
|
|
1067
1120
|
};
|
|
1068
|
-
await instance.create(
|
|
1121
|
+
await instance.create();
|
|
1069
1122
|
await expect(promise).resolves.toBe("completed");
|
|
1070
1123
|
});
|
|
1071
1124
|
|
|
@@ -1084,7 +1137,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1084
1137
|
if (status === "running") return;
|
|
1085
1138
|
resolve(status);
|
|
1086
1139
|
};
|
|
1087
|
-
await instance.create(
|
|
1140
|
+
await instance.create();
|
|
1088
1141
|
await expect(promise).resolves.toBe("failed");
|
|
1089
1142
|
});
|
|
1090
1143
|
|
|
@@ -1117,7 +1170,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1117
1170
|
try {
|
|
1118
1171
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1119
1172
|
await runInDurableObject(stub, async (instance) => {
|
|
1120
|
-
await instance.create(
|
|
1173
|
+
await instance.create();
|
|
1121
1174
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1122
1175
|
await expect
|
|
1123
1176
|
.poll(() => {
|
|
@@ -1151,7 +1204,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1151
1204
|
if (status === "paused") resolve();
|
|
1152
1205
|
};
|
|
1153
1206
|
|
|
1154
|
-
await instance.create(
|
|
1207
|
+
await instance.create();
|
|
1155
1208
|
await instance.pause();
|
|
1156
1209
|
await promise;
|
|
1157
1210
|
});
|
|
@@ -1179,7 +1232,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1179
1232
|
try {
|
|
1180
1233
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1181
1234
|
await runInDurableObject(stub, async (instance) => {
|
|
1182
|
-
await instance.create(
|
|
1235
|
+
await instance.create();
|
|
1183
1236
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1184
1237
|
await instance.pause();
|
|
1185
1238
|
expect(instance.getStatus()).toBe("paused");
|
|
@@ -1208,7 +1261,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1208
1261
|
resolve(status);
|
|
1209
1262
|
};
|
|
1210
1263
|
|
|
1211
|
-
await instance.create(
|
|
1264
|
+
await instance.create();
|
|
1212
1265
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1213
1266
|
await instance.pause();
|
|
1214
1267
|
expect(instance.getStatus()).toBe("paused");
|
|
@@ -1241,7 +1294,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1241
1294
|
try {
|
|
1242
1295
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1243
1296
|
await runInDurableObject(stub, async (instance) => {
|
|
1244
|
-
await instance.create(
|
|
1297
|
+
await instance.create();
|
|
1245
1298
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1246
1299
|
await expect(instance.resume()).rejects.toThrow(
|
|
1247
1300
|
"Cannot resume workflow: expected status 'paused' but got 'running'."
|
|
@@ -1262,7 +1315,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1262
1315
|
try {
|
|
1263
1316
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1264
1317
|
await runInDurableObject(stub, async (instance) => {
|
|
1265
|
-
await instance.create(
|
|
1318
|
+
await instance.create();
|
|
1266
1319
|
|
|
1267
1320
|
await expect
|
|
1268
1321
|
.poll(() => {
|
|
@@ -1306,7 +1359,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1306
1359
|
resolve(status);
|
|
1307
1360
|
};
|
|
1308
1361
|
|
|
1309
|
-
await instance.create(
|
|
1362
|
+
await instance.create();
|
|
1310
1363
|
await expect
|
|
1311
1364
|
.poll(() => {
|
|
1312
1365
|
const step = instance.getSteps_experimental().find((s) => s.id === "wait-1");
|
|
@@ -1343,7 +1396,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1343
1396
|
try {
|
|
1344
1397
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1345
1398
|
await runInDurableObject(stub, async (instance) => {
|
|
1346
|
-
await instance.create(
|
|
1399
|
+
await instance.create();
|
|
1347
1400
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1348
1401
|
await expect
|
|
1349
1402
|
.poll(() => {
|
|
@@ -1386,7 +1439,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1386
1439
|
resolve(status);
|
|
1387
1440
|
};
|
|
1388
1441
|
|
|
1389
|
-
await instance.create(
|
|
1442
|
+
await instance.create();
|
|
1390
1443
|
|
|
1391
1444
|
await expect
|
|
1392
1445
|
.poll(() => {
|
|
@@ -1437,7 +1490,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1437
1490
|
resolve(status);
|
|
1438
1491
|
};
|
|
1439
1492
|
|
|
1440
|
-
await instance.create(
|
|
1493
|
+
await instance.create();
|
|
1441
1494
|
|
|
1442
1495
|
await expect
|
|
1443
1496
|
.poll(() => {
|
|
@@ -1485,7 +1538,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1485
1538
|
resolve(status);
|
|
1486
1539
|
};
|
|
1487
1540
|
|
|
1488
|
-
await instance.create(
|
|
1541
|
+
await instance.create();
|
|
1489
1542
|
|
|
1490
1543
|
await expect
|
|
1491
1544
|
.poll(() => {
|
|
@@ -1533,7 +1586,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1533
1586
|
resolve(status);
|
|
1534
1587
|
};
|
|
1535
1588
|
|
|
1536
|
-
await instance.create(
|
|
1589
|
+
await instance.create();
|
|
1537
1590
|
|
|
1538
1591
|
await expect
|
|
1539
1592
|
.poll(() => {
|
|
@@ -1583,7 +1636,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1583
1636
|
resolve(status);
|
|
1584
1637
|
};
|
|
1585
1638
|
|
|
1586
|
-
await instance.create(
|
|
1639
|
+
await instance.create();
|
|
1587
1640
|
await expect(promise).resolves.toBe("completed");
|
|
1588
1641
|
|
|
1589
1642
|
// Should not throw even though there is no matching wait step
|
|
@@ -1609,7 +1662,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1609
1662
|
});
|
|
1610
1663
|
});
|
|
1611
1664
|
|
|
1612
|
-
it("records 'started' when workflow transitions from
|
|
1665
|
+
it("records 'started' when workflow transitions from initialized to running", async () => {
|
|
1613
1666
|
const executeSpy = vi
|
|
1614
1667
|
.spyOn(TestWorkflowDefinition.prototype, "execute")
|
|
1615
1668
|
.mockImplementation(async function (this: TestWorkflowDefinition) {
|
|
@@ -1625,7 +1678,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1625
1678
|
resolve(status);
|
|
1626
1679
|
};
|
|
1627
1680
|
|
|
1628
|
-
await instance.create(
|
|
1681
|
+
await instance.create();
|
|
1629
1682
|
await expect(promise).resolves.toBe("completed");
|
|
1630
1683
|
|
|
1631
1684
|
const events = instance.getWorkflowEvents_experimental();
|
|
@@ -1646,7 +1699,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1646
1699
|
try {
|
|
1647
1700
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1648
1701
|
await runInDurableObject(stub, async (instance) => {
|
|
1649
|
-
await instance.create(
|
|
1702
|
+
await instance.create();
|
|
1650
1703
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1651
1704
|
await instance.pause();
|
|
1652
1705
|
|
|
@@ -1674,7 +1727,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1674
1727
|
resolve(status);
|
|
1675
1728
|
};
|
|
1676
1729
|
|
|
1677
|
-
await instance.create(
|
|
1730
|
+
await instance.create();
|
|
1678
1731
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1679
1732
|
await instance.pause();
|
|
1680
1733
|
await instance.resume();
|
|
@@ -1706,7 +1759,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1706
1759
|
resolve(status);
|
|
1707
1760
|
};
|
|
1708
1761
|
|
|
1709
|
-
await instance.create(
|
|
1762
|
+
await instance.create();
|
|
1710
1763
|
await expect(promise).resolves.toBe("failed");
|
|
1711
1764
|
|
|
1712
1765
|
const events = instance.getWorkflowEvents_experimental();
|
|
@@ -1727,7 +1780,7 @@ describe("WorkflowRuntime", () => {
|
|
|
1727
1780
|
try {
|
|
1728
1781
|
const stub = env.TEST_WORKFLOW_RUNTIME.getByName(crypto.randomUUID());
|
|
1729
1782
|
await runInDurableObject(stub, async (instance) => {
|
|
1730
|
-
await instance.create(
|
|
1783
|
+
await instance.create();
|
|
1731
1784
|
await expect.poll(() => instance.getStatus()).toBe("running");
|
|
1732
1785
|
await instance.cancel("user requested cancellation");
|
|
1733
1786
|
|
|
@@ -1761,7 +1814,6 @@ describe("WorkflowRuntime", () => {
|
|
|
1761
1814
|
});
|
|
1762
1815
|
});
|
|
1763
1816
|
|
|
1764
|
-
|
|
1765
1817
|
describe("WorkflowRuntimeContext", () => {
|
|
1766
1818
|
describe("run steps", () => {
|
|
1767
1819
|
describe("getOrCreateRunStep()", () => {
|
package/test/tsconfig.json
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"extends": "../tsconfig.json",
|
|
3
3
|
"compilerOptions": {
|
|
4
|
-
"types": [
|
|
5
|
-
"@cloudflare/vitest-pool-workers",
|
|
6
|
-
"@cloudflare/workers-types/experimental"
|
|
7
|
-
]
|
|
4
|
+
"types": ["@cloudflare/vitest-pool-workers", "@cloudflare/workers-types/experimental"]
|
|
8
5
|
},
|
|
9
6
|
"include": ["./**/*.ts"],
|
|
10
7
|
"exclude": []
|
package/test/worker.ts
CHANGED
|
@@ -6,9 +6,7 @@ export type Env = {
|
|
|
6
6
|
};
|
|
7
7
|
|
|
8
8
|
export class TestWorkflowRuntime extends WorkflowRuntime {
|
|
9
|
-
protected
|
|
10
|
-
return this.ctx.exports.TestWorkflowDefinition;
|
|
11
|
-
}
|
|
9
|
+
protected readonly definition = this.ctx.exports.TestWorkflowDefinition;
|
|
12
10
|
}
|
|
13
11
|
export class TestWorkflowDefinition extends WorkflowDefinition {
|
|
14
12
|
async execute(): Promise<void> {}
|