@stepflowjs/core 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index-D_2FGtKL.d.ts +155 -0
- package/dist/index.d.ts +466 -0
- package/dist/index.js +1256 -0
- package/dist/index.js.map +1 -0
- package/dist/storage/index.d.ts +114 -0
- package/dist/storage/index.js +3 -0
- package/dist/storage/index.js.map +1 -0
- package/dist/triggers/index.d.ts +1 -0
- package/dist/triggers/index.js +3 -0
- package/dist/triggers/index.js.map +1 -0
- package/package.json +68 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1256 @@
|
|
|
1
|
+
import { EventEmitter } from 'events';
|
|
2
|
+
import { nanoid } from 'nanoid';
|
|
3
|
+
import ms from 'ms';
|
|
4
|
+
import { createHmac } from 'crypto';
|
|
5
|
+
|
|
6
|
+
// src/stepflow.ts
|
|
7
|
+
|
|
8
|
+
// src/workflow.ts
|
|
9
|
+
function createWorkflow(options, handler) {
|
|
10
|
+
const config = {
|
|
11
|
+
id: options.id,
|
|
12
|
+
version: options.version,
|
|
13
|
+
retries: options.retries ?? 0,
|
|
14
|
+
retryDelay: options.retryDelay ?? 1e3,
|
|
15
|
+
timeout: options.timeout,
|
|
16
|
+
cron: options.cron
|
|
17
|
+
};
|
|
18
|
+
return {
|
|
19
|
+
id: options.id,
|
|
20
|
+
config,
|
|
21
|
+
handler
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
var WorkflowRegistry = class {
|
|
25
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
26
|
+
workflows = /* @__PURE__ */ new Map();
|
|
27
|
+
/**
|
|
28
|
+
* Register a workflow
|
|
29
|
+
*/
|
|
30
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
31
|
+
register(workflow) {
|
|
32
|
+
const version = workflow.config.version ?? "1.0.0";
|
|
33
|
+
const id = workflow.id;
|
|
34
|
+
if (!this.workflows.has(id)) {
|
|
35
|
+
this.workflows.set(id, /* @__PURE__ */ new Map());
|
|
36
|
+
}
|
|
37
|
+
const versions = this.workflows.get(id);
|
|
38
|
+
if (versions.has(version)) {
|
|
39
|
+
throw new Error(
|
|
40
|
+
`Workflow "${id}" version "${version}" is already registered`
|
|
41
|
+
);
|
|
42
|
+
}
|
|
43
|
+
versions.set(version, workflow);
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Get a workflow by ID and optional version
|
|
47
|
+
*/
|
|
48
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
49
|
+
get(id, version) {
|
|
50
|
+
const versions = this.workflows.get(id);
|
|
51
|
+
if (!versions) return void 0;
|
|
52
|
+
if (version) {
|
|
53
|
+
return versions.get(version);
|
|
54
|
+
}
|
|
55
|
+
return this.getLatest(id);
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Get the latest version of a workflow
|
|
59
|
+
*/
|
|
60
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
61
|
+
getLatest(id) {
|
|
62
|
+
const versions = this.workflows.get(id);
|
|
63
|
+
if (!versions || versions.size === 0) return void 0;
|
|
64
|
+
const sortedVersions = Array.from(versions.keys()).sort(
|
|
65
|
+
this.compareVersions
|
|
66
|
+
);
|
|
67
|
+
return versions.get(sortedVersions[sortedVersions.length - 1]);
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Compare semantic versions
|
|
71
|
+
*/
|
|
72
|
+
compareVersions(a, b) {
|
|
73
|
+
const partsA = a.split(".").map(Number);
|
|
74
|
+
const partsB = b.split(".").map(Number);
|
|
75
|
+
for (let i = 0; i < Math.max(partsA.length, partsB.length); i++) {
|
|
76
|
+
const numA = partsA[i] ?? 0;
|
|
77
|
+
const numB = partsB[i] ?? 0;
|
|
78
|
+
if (numA !== numB) return numA - numB;
|
|
79
|
+
}
|
|
80
|
+
return 0;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Check if a workflow exists
|
|
84
|
+
*/
|
|
85
|
+
has(id) {
|
|
86
|
+
const versions = this.workflows.get(id);
|
|
87
|
+
return versions !== void 0 && versions.size > 0;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Get all registered workflows (latest version of each)
|
|
91
|
+
*/
|
|
92
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
93
|
+
all() {
|
|
94
|
+
const result = [];
|
|
95
|
+
for (const versions of this.workflows.values()) {
|
|
96
|
+
const sortedVersions = Array.from(versions.keys()).sort(
|
|
97
|
+
this.compareVersions
|
|
98
|
+
);
|
|
99
|
+
const latest = versions.get(sortedVersions[sortedVersions.length - 1]);
|
|
100
|
+
if (latest) result.push(latest);
|
|
101
|
+
}
|
|
102
|
+
return result;
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get all versions of a workflow
|
|
106
|
+
*/
|
|
107
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
108
|
+
allVersions(id) {
|
|
109
|
+
const versions = this.workflows.get(id);
|
|
110
|
+
if (!versions) return [];
|
|
111
|
+
return Array.from(versions.values());
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* Get all workflows with cron schedules
|
|
115
|
+
*/
|
|
116
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
117
|
+
scheduled() {
|
|
118
|
+
return this.all().filter((w) => w.config.cron);
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Unregister a workflow or specific version
|
|
122
|
+
*/
|
|
123
|
+
unregister(id, version) {
|
|
124
|
+
if (version) {
|
|
125
|
+
const versions = this.workflows.get(id);
|
|
126
|
+
return versions?.delete(version) ?? false;
|
|
127
|
+
}
|
|
128
|
+
return this.workflows.delete(id);
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Clear all workflows
|
|
132
|
+
*/
|
|
133
|
+
clear() {
|
|
134
|
+
this.workflows.clear();
|
|
135
|
+
}
|
|
136
|
+
};
|
|
137
|
+
function parseDuration(duration) {
|
|
138
|
+
if (typeof duration === "number") {
|
|
139
|
+
return duration;
|
|
140
|
+
}
|
|
141
|
+
const match = duration.match(/^(\d+)(ms|s|m|h|d)$/);
|
|
142
|
+
if (match) {
|
|
143
|
+
const value = parseInt(match[1], 10);
|
|
144
|
+
const unit = match[2];
|
|
145
|
+
switch (unit) {
|
|
146
|
+
case "ms":
|
|
147
|
+
return value;
|
|
148
|
+
case "s":
|
|
149
|
+
return value * 1e3;
|
|
150
|
+
case "m":
|
|
151
|
+
return value * 60 * 1e3;
|
|
152
|
+
case "h":
|
|
153
|
+
return value * 60 * 60 * 1e3;
|
|
154
|
+
case "d":
|
|
155
|
+
return value * 24 * 60 * 60 * 1e3;
|
|
156
|
+
default:
|
|
157
|
+
throw new Error(`Invalid duration unit: ${unit}`);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
const parsed = ms(duration);
|
|
161
|
+
if (typeof parsed !== "number") {
|
|
162
|
+
throw new Error(`Invalid duration: ${duration}`);
|
|
163
|
+
}
|
|
164
|
+
return parsed;
|
|
165
|
+
}
|
|
166
|
+
function formatDuration(ms2) {
|
|
167
|
+
if (ms2 < 1e3) {
|
|
168
|
+
return `${ms2}ms`;
|
|
169
|
+
}
|
|
170
|
+
const seconds = Math.floor(ms2 / 1e3);
|
|
171
|
+
if (seconds < 60) {
|
|
172
|
+
return `${seconds}s`;
|
|
173
|
+
}
|
|
174
|
+
const minutes = Math.floor(seconds / 60);
|
|
175
|
+
if (minutes < 60) {
|
|
176
|
+
const remainingSeconds = seconds % 60;
|
|
177
|
+
return remainingSeconds > 0 ? `${minutes}m ${remainingSeconds}s` : `${minutes}m`;
|
|
178
|
+
}
|
|
179
|
+
const hours = Math.floor(minutes / 60);
|
|
180
|
+
if (hours < 24) {
|
|
181
|
+
const remainingMinutes = minutes % 60;
|
|
182
|
+
return remainingMinutes > 0 ? `${hours}h ${remainingMinutes}m` : `${hours}h`;
|
|
183
|
+
}
|
|
184
|
+
const days = Math.floor(hours / 24);
|
|
185
|
+
const remainingHours = hours % 24;
|
|
186
|
+
return remainingHours > 0 ? `${days}d ${remainingHours}h` : `${days}d`;
|
|
187
|
+
}
|
|
188
|
+
function createContext(options) {
|
|
189
|
+
const {
|
|
190
|
+
event,
|
|
191
|
+
executionId,
|
|
192
|
+
runId,
|
|
193
|
+
attempt,
|
|
194
|
+
startedAt,
|
|
195
|
+
storage,
|
|
196
|
+
onTimelineEvent
|
|
197
|
+
} = options;
|
|
198
|
+
const metadata = {};
|
|
199
|
+
const addTimelineEvent = (type, data, stepName) => {
|
|
200
|
+
const timelineEvent = {
|
|
201
|
+
id: nanoid(),
|
|
202
|
+
type,
|
|
203
|
+
timestamp: /* @__PURE__ */ new Date(),
|
|
204
|
+
data,
|
|
205
|
+
stepName
|
|
206
|
+
};
|
|
207
|
+
onTimelineEvent?.(timelineEvent);
|
|
208
|
+
return timelineEvent;
|
|
209
|
+
};
|
|
210
|
+
const step = {
|
|
211
|
+
async run(name, fn, options2) {
|
|
212
|
+
const cacheKey = options2?.idempotencyKey ?? name;
|
|
213
|
+
const cached = await storage.execution.getStepResult(
|
|
214
|
+
executionId,
|
|
215
|
+
cacheKey
|
|
216
|
+
);
|
|
217
|
+
if (cached) {
|
|
218
|
+
addTimelineEvent("step:cached", cached.data, name);
|
|
219
|
+
return cached.data;
|
|
220
|
+
}
|
|
221
|
+
addTimelineEvent("step:start", { name }, name);
|
|
222
|
+
const stepStartedAt = /* @__PURE__ */ new Date();
|
|
223
|
+
try {
|
|
224
|
+
let result;
|
|
225
|
+
if (options2?.timeout) {
|
|
226
|
+
const timeoutMs = parseDuration(options2.timeout);
|
|
227
|
+
result = await Promise.race([
|
|
228
|
+
Promise.resolve(fn()),
|
|
229
|
+
new Promise(
|
|
230
|
+
(_, reject) => setTimeout(
|
|
231
|
+
() => reject(new StepTimeoutError(name, timeoutMs)),
|
|
232
|
+
timeoutMs
|
|
233
|
+
)
|
|
234
|
+
)
|
|
235
|
+
]);
|
|
236
|
+
} else {
|
|
237
|
+
result = await fn();
|
|
238
|
+
}
|
|
239
|
+
const completedAt = /* @__PURE__ */ new Date();
|
|
240
|
+
const durationMs = completedAt.getTime() - stepStartedAt.getTime();
|
|
241
|
+
await storage.execution.saveStepResult(executionId, cacheKey, {
|
|
242
|
+
data: result,
|
|
243
|
+
startedAt: stepStartedAt,
|
|
244
|
+
completedAt,
|
|
245
|
+
durationMs
|
|
246
|
+
});
|
|
247
|
+
addTimelineEvent("step:complete", result, name);
|
|
248
|
+
return result;
|
|
249
|
+
} catch (error) {
|
|
250
|
+
addTimelineEvent(
|
|
251
|
+
"step:failed",
|
|
252
|
+
{
|
|
253
|
+
name: error.name,
|
|
254
|
+
message: error.message,
|
|
255
|
+
stack: error.stack
|
|
256
|
+
},
|
|
257
|
+
name
|
|
258
|
+
);
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
async parallel(name, fns, options2) {
|
|
263
|
+
const cacheKey = options2?.idempotencyKey ?? name;
|
|
264
|
+
const cached = await storage.execution.getStepResult(
|
|
265
|
+
executionId,
|
|
266
|
+
cacheKey
|
|
267
|
+
);
|
|
268
|
+
if (cached) {
|
|
269
|
+
addTimelineEvent("step:cached", cached.data, name);
|
|
270
|
+
return cached.data;
|
|
271
|
+
}
|
|
272
|
+
addTimelineEvent(
|
|
273
|
+
"step:start",
|
|
274
|
+
{ name, parallel: true, count: fns.length },
|
|
275
|
+
name
|
|
276
|
+
);
|
|
277
|
+
const stepStartedAt = /* @__PURE__ */ new Date();
|
|
278
|
+
try {
|
|
279
|
+
let results;
|
|
280
|
+
if (options2?.timeout) {
|
|
281
|
+
const timeoutMs = parseDuration(options2.timeout);
|
|
282
|
+
results = await Promise.race([
|
|
283
|
+
Promise.all(fns.map((fn) => fn())),
|
|
284
|
+
new Promise(
|
|
285
|
+
(_, reject) => setTimeout(
|
|
286
|
+
() => reject(new StepTimeoutError(name, timeoutMs)),
|
|
287
|
+
timeoutMs
|
|
288
|
+
)
|
|
289
|
+
)
|
|
290
|
+
]);
|
|
291
|
+
} else {
|
|
292
|
+
results = await Promise.all(fns.map((fn) => fn()));
|
|
293
|
+
}
|
|
294
|
+
const completedAt = /* @__PURE__ */ new Date();
|
|
295
|
+
const durationMs = completedAt.getTime() - stepStartedAt.getTime();
|
|
296
|
+
await storage.execution.saveStepResult(executionId, cacheKey, {
|
|
297
|
+
data: results,
|
|
298
|
+
startedAt: stepStartedAt,
|
|
299
|
+
completedAt,
|
|
300
|
+
durationMs
|
|
301
|
+
});
|
|
302
|
+
addTimelineEvent("step:complete", results, name);
|
|
303
|
+
return results;
|
|
304
|
+
} catch (error) {
|
|
305
|
+
addTimelineEvent("step:failed", error, name);
|
|
306
|
+
throw error;
|
|
307
|
+
}
|
|
308
|
+
},
|
|
309
|
+
async invoke(workflowId, payload, invokeOptions) {
|
|
310
|
+
const stepName = `invoke:${workflowId}`;
|
|
311
|
+
const cached = await storage.execution.getStepResult(
|
|
312
|
+
executionId,
|
|
313
|
+
stepName
|
|
314
|
+
);
|
|
315
|
+
if (cached) {
|
|
316
|
+
addTimelineEvent("step:cached", cached.data, stepName);
|
|
317
|
+
return cached.data;
|
|
318
|
+
}
|
|
319
|
+
const childExecutionId = nanoid();
|
|
320
|
+
addTimelineEvent(
|
|
321
|
+
"invoke:start",
|
|
322
|
+
{ workflowId, childExecutionId, payload, options: invokeOptions },
|
|
323
|
+
stepName
|
|
324
|
+
);
|
|
325
|
+
throw new InvokeInterrupt(
|
|
326
|
+
stepName,
|
|
327
|
+
workflowId,
|
|
328
|
+
payload,
|
|
329
|
+
childExecutionId,
|
|
330
|
+
invokeOptions
|
|
331
|
+
);
|
|
332
|
+
},
|
|
333
|
+
async sendEvent(eventId, data) {
|
|
334
|
+
await storage.events.publish(eventId, data);
|
|
335
|
+
addTimelineEvent("event:received", { eventId, data });
|
|
336
|
+
}
|
|
337
|
+
};
|
|
338
|
+
const log = {
|
|
339
|
+
info(message, data) {
|
|
340
|
+
addTimelineEvent("log:info", { message, ...data });
|
|
341
|
+
},
|
|
342
|
+
warn(message, data) {
|
|
343
|
+
addTimelineEvent("log:warn", { message, ...data });
|
|
344
|
+
},
|
|
345
|
+
error(message, data) {
|
|
346
|
+
addTimelineEvent("log:error", { message, ...data });
|
|
347
|
+
},
|
|
348
|
+
debug(message, data) {
|
|
349
|
+
addTimelineEvent("log:debug", { message, ...data });
|
|
350
|
+
}
|
|
351
|
+
};
|
|
352
|
+
const context = {
|
|
353
|
+
event,
|
|
354
|
+
execution: {
|
|
355
|
+
id: executionId,
|
|
356
|
+
runId,
|
|
357
|
+
attempt,
|
|
358
|
+
startedAt
|
|
359
|
+
},
|
|
360
|
+
step,
|
|
361
|
+
log,
|
|
362
|
+
async sleep(name, duration) {
|
|
363
|
+
const cached = await storage.execution.getStepResult(executionId, name);
|
|
364
|
+
if (cached) {
|
|
365
|
+
addTimelineEvent("sleep:complete", { name, cached: true }, name);
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
const durationMs = parseDuration(duration);
|
|
369
|
+
const wakeAt = new Date(Date.now() + durationMs);
|
|
370
|
+
addTimelineEvent("sleep:start", { name, durationMs, wakeAt }, name);
|
|
371
|
+
await storage.execution.update(executionId, {
|
|
372
|
+
status: "sleeping",
|
|
373
|
+
metadata: {
|
|
374
|
+
...metadata,
|
|
375
|
+
_sleepName: name,
|
|
376
|
+
_sleepWakeAt: wakeAt.toISOString()
|
|
377
|
+
}
|
|
378
|
+
});
|
|
379
|
+
await storage.queue.schedule(
|
|
380
|
+
{
|
|
381
|
+
id: nanoid(),
|
|
382
|
+
workflowId: event.name,
|
|
383
|
+
eventName: `__resume:${executionId}`,
|
|
384
|
+
payload: { sleepName: name },
|
|
385
|
+
metadata: { executionId, type: "sleep-wake" },
|
|
386
|
+
priority: 0,
|
|
387
|
+
attempts: 0,
|
|
388
|
+
maxAttempts: 1,
|
|
389
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
390
|
+
},
|
|
391
|
+
wakeAt
|
|
392
|
+
);
|
|
393
|
+
await storage.execution.saveStepResult(executionId, name, {
|
|
394
|
+
data: { sleptUntil: wakeAt },
|
|
395
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
396
|
+
completedAt: wakeAt,
|
|
397
|
+
durationMs
|
|
398
|
+
});
|
|
399
|
+
throw new SleepInterrupt(name, wakeAt);
|
|
400
|
+
},
|
|
401
|
+
async sleepUntil(name, timestamp) {
|
|
402
|
+
const now = /* @__PURE__ */ new Date();
|
|
403
|
+
const durationMs = Math.max(0, timestamp.getTime() - now.getTime());
|
|
404
|
+
return context.sleep(name, durationMs);
|
|
405
|
+
},
|
|
406
|
+
async waitForEvent(name, eventId, waitOptions) {
|
|
407
|
+
const cached = await storage.execution.getStepResult(executionId, name);
|
|
408
|
+
if (cached) {
|
|
409
|
+
addTimelineEvent("step:cached", cached.data, name);
|
|
410
|
+
return cached.data;
|
|
411
|
+
}
|
|
412
|
+
const timeoutMs = waitOptions?.timeout ? parseDuration(waitOptions.timeout) : 24 * 60 * 60 * 1e3;
|
|
413
|
+
const timeoutAt = new Date(Date.now() + timeoutMs);
|
|
414
|
+
addTimelineEvent("event:wait", { name, eventId, timeoutAt }, name);
|
|
415
|
+
await storage.events.subscribe(eventId, executionId, timeoutAt);
|
|
416
|
+
await storage.execution.update(executionId, {
|
|
417
|
+
status: "waiting",
|
|
418
|
+
metadata: {
|
|
419
|
+
...metadata,
|
|
420
|
+
_waitEventId: eventId,
|
|
421
|
+
_waitTimeoutAt: timeoutAt.toISOString()
|
|
422
|
+
}
|
|
423
|
+
});
|
|
424
|
+
await storage.queue.schedule(
|
|
425
|
+
{
|
|
426
|
+
id: nanoid(),
|
|
427
|
+
workflowId: event.name,
|
|
428
|
+
eventName: `__timeout:${executionId}`,
|
|
429
|
+
payload: { eventId, stepName: name },
|
|
430
|
+
metadata: { executionId, type: "event-timeout" },
|
|
431
|
+
priority: 0,
|
|
432
|
+
attempts: 0,
|
|
433
|
+
maxAttempts: 1,
|
|
434
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
435
|
+
},
|
|
436
|
+
timeoutAt
|
|
437
|
+
);
|
|
438
|
+
throw new WaitForEventInterrupt(name, eventId, timeoutAt);
|
|
439
|
+
},
|
|
440
|
+
async setMetadata(key, value) {
|
|
441
|
+
metadata[key] = value;
|
|
442
|
+
addTimelineEvent("metadata:update", { [key]: value });
|
|
443
|
+
await storage.execution.update(executionId, { metadata });
|
|
444
|
+
await storage.realtime.publish(`execution:${executionId}`, { metadata });
|
|
445
|
+
},
|
|
446
|
+
getMetadata(key) {
|
|
447
|
+
return metadata[key];
|
|
448
|
+
}
|
|
449
|
+
};
|
|
450
|
+
return context;
|
|
451
|
+
}
|
|
452
|
+
var StepTimeoutError = class extends Error {
|
|
453
|
+
constructor(stepName, timeoutMs) {
|
|
454
|
+
super(`Step "${stepName}" timed out after ${timeoutMs}ms`);
|
|
455
|
+
this.stepName = stepName;
|
|
456
|
+
this.timeoutMs = timeoutMs;
|
|
457
|
+
this.name = "StepTimeoutError";
|
|
458
|
+
}
|
|
459
|
+
};
|
|
460
|
+
var SleepInterrupt = class extends Error {
|
|
461
|
+
constructor(stepName, wakeAt) {
|
|
462
|
+
super(`Sleep interrupt: ${stepName}`);
|
|
463
|
+
this.stepName = stepName;
|
|
464
|
+
this.wakeAt = wakeAt;
|
|
465
|
+
this.name = "SleepInterrupt";
|
|
466
|
+
}
|
|
467
|
+
};
|
|
468
|
+
var WaitForEventInterrupt = class extends Error {
|
|
469
|
+
constructor(stepName, eventId, timeoutAt) {
|
|
470
|
+
super(`WaitForEvent interrupt: ${stepName}`);
|
|
471
|
+
this.stepName = stepName;
|
|
472
|
+
this.eventId = eventId;
|
|
473
|
+
this.timeoutAt = timeoutAt;
|
|
474
|
+
this.name = "WaitForEventInterrupt";
|
|
475
|
+
}
|
|
476
|
+
};
|
|
477
|
+
var InvokeInterrupt = class extends Error {
|
|
478
|
+
constructor(stepName, workflowId, payload, childExecutionId, options) {
|
|
479
|
+
super(`Invoke interrupt: ${stepName} -> ${workflowId}`);
|
|
480
|
+
this.stepName = stepName;
|
|
481
|
+
this.workflowId = workflowId;
|
|
482
|
+
this.payload = payload;
|
|
483
|
+
this.childExecutionId = childExecutionId;
|
|
484
|
+
this.options = options;
|
|
485
|
+
this.name = "InvokeInterrupt";
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
|
|
489
|
+
// src/worker.ts
|
|
490
|
+
var Worker = class extends EventEmitter {
|
|
491
|
+
storage;
|
|
492
|
+
registry;
|
|
493
|
+
concurrency;
|
|
494
|
+
pollInterval;
|
|
495
|
+
workerId;
|
|
496
|
+
logging;
|
|
497
|
+
running = false;
|
|
498
|
+
activeJobs = 0;
|
|
499
|
+
pollTimeout;
|
|
500
|
+
constructor(options) {
|
|
501
|
+
super();
|
|
502
|
+
this.storage = options.storage;
|
|
503
|
+
this.registry = options.registry;
|
|
504
|
+
this.concurrency = options.concurrency ?? 1;
|
|
505
|
+
this.pollInterval = options.pollInterval ?? 1e3;
|
|
506
|
+
this.workerId = options.workerId ?? `worker-${nanoid(8)}`;
|
|
507
|
+
this.logging = options.logging ?? false;
|
|
508
|
+
}
|
|
509
|
+
/**
|
|
510
|
+
* Start the worker
|
|
511
|
+
*/
|
|
512
|
+
async start() {
|
|
513
|
+
if (this.running) return;
|
|
514
|
+
this.running = true;
|
|
515
|
+
this.log("Worker started", { concurrency: this.concurrency });
|
|
516
|
+
this.poll();
|
|
517
|
+
}
|
|
518
|
+
/**
|
|
519
|
+
* Stop the worker gracefully
|
|
520
|
+
*/
|
|
521
|
+
async stop() {
|
|
522
|
+
this.running = false;
|
|
523
|
+
if (this.pollTimeout) {
|
|
524
|
+
clearTimeout(this.pollTimeout);
|
|
525
|
+
}
|
|
526
|
+
while (this.activeJobs > 0) {
|
|
527
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
528
|
+
}
|
|
529
|
+
this.log("Worker stopped");
|
|
530
|
+
}
|
|
531
|
+
/**
|
|
532
|
+
* Poll for jobs
|
|
533
|
+
*/
|
|
534
|
+
async poll() {
|
|
535
|
+
if (!this.running) return;
|
|
536
|
+
while (this.running && this.activeJobs < this.concurrency) {
|
|
537
|
+
const job = await this.storage.queue.pop({
|
|
538
|
+
workerId: this.workerId,
|
|
539
|
+
lockDuration: 3e4
|
|
540
|
+
});
|
|
541
|
+
if (!job) break;
|
|
542
|
+
this.activeJobs++;
|
|
543
|
+
this.processJob(job).finally(() => {
|
|
544
|
+
this.activeJobs--;
|
|
545
|
+
});
|
|
546
|
+
}
|
|
547
|
+
this.pollTimeout = setTimeout(() => this.poll(), this.pollInterval);
|
|
548
|
+
}
|
|
549
|
+
/**
|
|
550
|
+
* Process a single job
|
|
551
|
+
*/
|
|
552
|
+
async processJob(job) {
|
|
553
|
+
this.log("Processing job", { jobId: job.id, workflowId: job.workflowId });
|
|
554
|
+
try {
|
|
555
|
+
if (job.eventName.startsWith("__resume:")) {
|
|
556
|
+
await this.handleResume(job);
|
|
557
|
+
return;
|
|
558
|
+
}
|
|
559
|
+
if (job.eventName.startsWith("__timeout:")) {
|
|
560
|
+
await this.handleTimeout(job);
|
|
561
|
+
return;
|
|
562
|
+
}
|
|
563
|
+
if (job.eventName.startsWith("__invoke-complete:")) {
|
|
564
|
+
await this.handleInvokeComplete(job);
|
|
565
|
+
return;
|
|
566
|
+
}
|
|
567
|
+
const executionId = job.metadata.executionId;
|
|
568
|
+
let workflowVersion;
|
|
569
|
+
if (executionId) {
|
|
570
|
+
const execution = await this.storage.execution.get(executionId);
|
|
571
|
+
workflowVersion = execution?.workflowVersion;
|
|
572
|
+
}
|
|
573
|
+
const workflow = this.registry.get(job.workflowId, workflowVersion);
|
|
574
|
+
if (!workflow) {
|
|
575
|
+
this.log("Workflow not found", { workflowId: job.workflowId });
|
|
576
|
+
await this.storage.queue.ack(job.id);
|
|
577
|
+
return;
|
|
578
|
+
}
|
|
579
|
+
await this.executeWorkflow(workflow, job);
|
|
580
|
+
} catch (error) {
|
|
581
|
+
this.log("Job processing error", { error });
|
|
582
|
+
this.emit("error", { error });
|
|
583
|
+
await this.storage.queue.nack(job.id, {
|
|
584
|
+
delay: 1e3 * Math.pow(2, job.attempts)
|
|
585
|
+
// Exponential backoff
|
|
586
|
+
});
|
|
587
|
+
}
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Execute a workflow
|
|
591
|
+
*/
|
|
592
|
+
async executeWorkflow(workflow, job) {
|
|
593
|
+
const runId = job.metadata.runId ?? nanoid();
|
|
594
|
+
const executionId = job.metadata.executionId ?? nanoid();
|
|
595
|
+
const attempt = job.attempts + 1;
|
|
596
|
+
let execution = await this.storage.execution.get(executionId);
|
|
597
|
+
if (!execution) {
|
|
598
|
+
execution = {
|
|
599
|
+
id: executionId,
|
|
600
|
+
runId,
|
|
601
|
+
workflowId: workflow.id,
|
|
602
|
+
workflowVersion: workflow.config.version ?? "1.0.0",
|
|
603
|
+
eventName: job.eventName,
|
|
604
|
+
payload: job.payload,
|
|
605
|
+
status: "running",
|
|
606
|
+
steps: [],
|
|
607
|
+
metadata: job.metadata,
|
|
608
|
+
attempt,
|
|
609
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
610
|
+
timeline: []
|
|
611
|
+
};
|
|
612
|
+
await this.storage.execution.create(execution);
|
|
613
|
+
} else {
|
|
614
|
+
await this.storage.execution.update(executionId, {
|
|
615
|
+
status: "running",
|
|
616
|
+
attempt
|
|
617
|
+
});
|
|
618
|
+
}
|
|
619
|
+
this.emit("execution:start", {
|
|
620
|
+
executionId,
|
|
621
|
+
workflowId: workflow.id,
|
|
622
|
+
eventName: job.eventName
|
|
623
|
+
});
|
|
624
|
+
const event = {
|
|
625
|
+
id: job.id,
|
|
626
|
+
name: job.eventName,
|
|
627
|
+
data: job.payload,
|
|
628
|
+
timestamp: job.createdAt,
|
|
629
|
+
metadata: job.metadata
|
|
630
|
+
};
|
|
631
|
+
const context = createContext({
|
|
632
|
+
event,
|
|
633
|
+
executionId,
|
|
634
|
+
runId,
|
|
635
|
+
attempt,
|
|
636
|
+
startedAt: execution.startedAt,
|
|
637
|
+
storage: this.storage,
|
|
638
|
+
onTimelineEvent: (timelineEvent) => {
|
|
639
|
+
this.handleTimelineEvent(executionId, timelineEvent);
|
|
640
|
+
}
|
|
641
|
+
});
|
|
642
|
+
try {
|
|
643
|
+
const result = await workflow.handler(context);
|
|
644
|
+
await this.storage.execution.update(executionId, {
|
|
645
|
+
status: "completed",
|
|
646
|
+
result,
|
|
647
|
+
completedAt: /* @__PURE__ */ new Date()
|
|
648
|
+
});
|
|
649
|
+
await this.storage.queue.ack(job.id);
|
|
650
|
+
this.emit("execution:complete", { executionId, result });
|
|
651
|
+
this.log("Workflow completed", { executionId, workflowId: workflow.id });
|
|
652
|
+
await this.storage.realtime.publish(`execution:${executionId}`, {
|
|
653
|
+
status: "completed",
|
|
654
|
+
result
|
|
655
|
+
});
|
|
656
|
+
const parentExecutionId = job.metadata.parentExecutionId;
|
|
657
|
+
const parentStepName = job.metadata.parentStepName;
|
|
658
|
+
if (parentExecutionId && parentStepName) {
|
|
659
|
+
await this.storage.queue.push({
|
|
660
|
+
id: nanoid(),
|
|
661
|
+
workflowId: job.workflowId,
|
|
662
|
+
eventName: `__invoke-complete:${parentExecutionId}`,
|
|
663
|
+
payload: {
|
|
664
|
+
parentExecutionId,
|
|
665
|
+
parentStepName,
|
|
666
|
+
childExecutionId: executionId,
|
|
667
|
+
result
|
|
668
|
+
},
|
|
669
|
+
metadata: {
|
|
670
|
+
executionId: parentExecutionId,
|
|
671
|
+
type: "invoke-complete"
|
|
672
|
+
},
|
|
673
|
+
priority: 0,
|
|
674
|
+
attempts: 0,
|
|
675
|
+
maxAttempts: 1,
|
|
676
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
677
|
+
});
|
|
678
|
+
}
|
|
679
|
+
} catch (error) {
|
|
680
|
+
if (error instanceof SleepInterrupt) {
|
|
681
|
+
await this.storage.queue.ack(job.id);
|
|
682
|
+
this.emit("execution:sleeping", {
|
|
683
|
+
executionId,
|
|
684
|
+
wakeAt: error.wakeAt
|
|
685
|
+
});
|
|
686
|
+
this.log("Workflow sleeping", {
|
|
687
|
+
executionId,
|
|
688
|
+
stepName: error.stepName,
|
|
689
|
+
wakeAt: error.wakeAt
|
|
690
|
+
});
|
|
691
|
+
return;
|
|
692
|
+
}
|
|
693
|
+
if (error instanceof WaitForEventInterrupt) {
|
|
694
|
+
await this.storage.queue.ack(job.id);
|
|
695
|
+
this.emit("execution:waiting", {
|
|
696
|
+
executionId,
|
|
697
|
+
eventId: error.eventId,
|
|
698
|
+
timeoutAt: error.timeoutAt
|
|
699
|
+
});
|
|
700
|
+
this.log("Workflow waiting for event", {
|
|
701
|
+
executionId,
|
|
702
|
+
stepName: error.stepName,
|
|
703
|
+
eventId: error.eventId
|
|
704
|
+
});
|
|
705
|
+
return;
|
|
706
|
+
}
|
|
707
|
+
if (error instanceof InvokeInterrupt) {
|
|
708
|
+
await this.storage.queue.ack(job.id);
|
|
709
|
+
const childJob = {
|
|
710
|
+
id: nanoid(),
|
|
711
|
+
workflowId: error.workflowId,
|
|
712
|
+
eventName: error.workflowId,
|
|
713
|
+
payload: error.payload,
|
|
714
|
+
metadata: {
|
|
715
|
+
...error.options?.metadata ?? {},
|
|
716
|
+
parentExecutionId: executionId,
|
|
717
|
+
parentStepName: error.stepName,
|
|
718
|
+
runId,
|
|
719
|
+
executionId: error.childExecutionId
|
|
720
|
+
},
|
|
721
|
+
priority: 0,
|
|
722
|
+
attempts: 0,
|
|
723
|
+
maxAttempts: workflow.config.retries ?? 0,
|
|
724
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
725
|
+
};
|
|
726
|
+
await this.storage.queue.push(childJob);
|
|
727
|
+
const invokeMetadata = {
|
|
728
|
+
...execution.metadata,
|
|
729
|
+
_invokeWorkflowId: error.workflowId,
|
|
730
|
+
_invokeChildExecutionId: error.childExecutionId,
|
|
731
|
+
_invokeStepName: error.stepName
|
|
732
|
+
};
|
|
733
|
+
await this.storage.execution.update(executionId, {
|
|
734
|
+
status: "waiting",
|
|
735
|
+
metadata: invokeMetadata
|
|
736
|
+
});
|
|
737
|
+
await this.storage.realtime.publish(`execution:${executionId}`, {
|
|
738
|
+
status: "waiting",
|
|
739
|
+
metadata: invokeMetadata
|
|
740
|
+
});
|
|
741
|
+
this.emit("execution:invoking", {
|
|
742
|
+
executionId,
|
|
743
|
+
childExecutionId: error.childExecutionId,
|
|
744
|
+
childWorkflowId: error.workflowId
|
|
745
|
+
});
|
|
746
|
+
this.log("Workflow invoking child", {
|
|
747
|
+
executionId,
|
|
748
|
+
stepName: error.stepName,
|
|
749
|
+
childWorkflowId: error.workflowId,
|
|
750
|
+
childExecutionId: error.childExecutionId
|
|
751
|
+
});
|
|
752
|
+
return;
|
|
753
|
+
}
|
|
754
|
+
const err = error;
|
|
755
|
+
const willRetry = workflow.config.retries !== void 0 && attempt <= workflow.config.retries;
|
|
756
|
+
await this.storage.execution.update(executionId, {
|
|
757
|
+
status: willRetry ? "pending" : "failed",
|
|
758
|
+
error: {
|
|
759
|
+
name: err.name,
|
|
760
|
+
message: err.message,
|
|
761
|
+
stack: err.stack
|
|
762
|
+
}
|
|
763
|
+
});
|
|
764
|
+
if (willRetry) {
|
|
765
|
+
const delay = workflow.config.retryDelay ? parseDuration(workflow.config.retryDelay) : 1e3;
|
|
766
|
+
await this.storage.queue.nack(job.id, {
|
|
767
|
+
delay: delay * Math.pow(2, attempt - 1)
|
|
768
|
+
});
|
|
769
|
+
} else {
|
|
770
|
+
await this.storage.queue.ack(job.id);
|
|
771
|
+
}
|
|
772
|
+
this.emit("execution:failed", { executionId, error: err, willRetry });
|
|
773
|
+
this.log("Workflow failed", {
|
|
774
|
+
executionId,
|
|
775
|
+
error: err.message,
|
|
776
|
+
willRetry
|
|
777
|
+
});
|
|
778
|
+
await this.storage.realtime.publish(`execution:${executionId}`, {
|
|
779
|
+
status: "failed",
|
|
780
|
+
error: { name: err.name, message: err.message }
|
|
781
|
+
});
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
/**
|
|
785
|
+
* Handle resume from sleep
|
|
786
|
+
*/
|
|
787
|
+
async handleResume(job) {
|
|
788
|
+
const executionId = job.metadata.executionId;
|
|
789
|
+
const execution = await this.storage.execution.get(executionId);
|
|
790
|
+
if (!execution) {
|
|
791
|
+
await this.storage.queue.ack(job.id);
|
|
792
|
+
return;
|
|
793
|
+
}
|
|
794
|
+
const workflow = this.registry.get(
|
|
795
|
+
execution.workflowId,
|
|
796
|
+
execution.workflowVersion
|
|
797
|
+
);
|
|
798
|
+
if (!workflow) {
|
|
799
|
+
await this.storage.queue.ack(job.id);
|
|
800
|
+
return;
|
|
801
|
+
}
|
|
802
|
+
this.log("Resuming workflow from sleep", { executionId });
|
|
803
|
+
await this.executeWorkflow(workflow, {
|
|
804
|
+
...job,
|
|
805
|
+
workflowId: execution.workflowId,
|
|
806
|
+
eventName: execution.eventName,
|
|
807
|
+
payload: execution.payload,
|
|
808
|
+
metadata: { ...execution.metadata, executionId, runId: execution.runId }
|
|
809
|
+
});
|
|
810
|
+
}
|
|
811
|
+
/**
|
|
812
|
+
* Handle event timeout
|
|
813
|
+
*/
|
|
814
|
+
async handleTimeout(job) {
|
|
815
|
+
const executionId = job.metadata.executionId;
|
|
816
|
+
const stepName = job.payload.stepName;
|
|
817
|
+
const execution = await this.storage.execution.get(executionId);
|
|
818
|
+
if (!execution || execution.status !== "waiting") {
|
|
819
|
+
await this.storage.queue.ack(job.id);
|
|
820
|
+
return;
|
|
821
|
+
}
|
|
822
|
+
this.log("Event wait timeout", { executionId, stepName });
|
|
823
|
+
await this.storage.execution.saveStepResult(executionId, stepName, {
|
|
824
|
+
data: { eventData: null, timeout: true },
|
|
825
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
826
|
+
completedAt: /* @__PURE__ */ new Date(),
|
|
827
|
+
durationMs: 0
|
|
828
|
+
});
|
|
829
|
+
const workflow = this.registry.get(
|
|
830
|
+
execution.workflowId,
|
|
831
|
+
execution.workflowVersion
|
|
832
|
+
);
|
|
833
|
+
if (workflow) {
|
|
834
|
+
await this.executeWorkflow(workflow, {
|
|
835
|
+
...job,
|
|
836
|
+
workflowId: execution.workflowId,
|
|
837
|
+
eventName: execution.eventName,
|
|
838
|
+
payload: execution.payload,
|
|
839
|
+
metadata: {
|
|
840
|
+
...execution.metadata,
|
|
841
|
+
executionId,
|
|
842
|
+
runId: execution.runId
|
|
843
|
+
}
|
|
844
|
+
});
|
|
845
|
+
}
|
|
846
|
+
await this.storage.queue.ack(job.id);
|
|
847
|
+
}
|
|
848
|
+
/**
|
|
849
|
+
* Handle child workflow completion
|
|
850
|
+
*/
|
|
851
|
+
async handleInvokeComplete(job) {
|
|
852
|
+
const executionId = job.metadata.executionId;
|
|
853
|
+
const execution = await this.storage.execution.get(executionId);
|
|
854
|
+
if (!execution) {
|
|
855
|
+
await this.storage.queue.ack(job.id);
|
|
856
|
+
return;
|
|
857
|
+
}
|
|
858
|
+
const payload = job.payload;
|
|
859
|
+
await this.storage.execution.saveStepResult(
|
|
860
|
+
executionId,
|
|
861
|
+
payload.parentStepName,
|
|
862
|
+
{
|
|
863
|
+
data: payload.result,
|
|
864
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
865
|
+
completedAt: /* @__PURE__ */ new Date(),
|
|
866
|
+
durationMs: 0
|
|
867
|
+
}
|
|
868
|
+
);
|
|
869
|
+
const workflow = this.registry.get(
|
|
870
|
+
execution.workflowId,
|
|
871
|
+
execution.workflowVersion
|
|
872
|
+
);
|
|
873
|
+
if (workflow) {
|
|
874
|
+
await this.executeWorkflow(workflow, {
|
|
875
|
+
...job,
|
|
876
|
+
workflowId: execution.workflowId,
|
|
877
|
+
eventName: execution.eventName,
|
|
878
|
+
payload: execution.payload,
|
|
879
|
+
metadata: {
|
|
880
|
+
...execution.metadata,
|
|
881
|
+
executionId,
|
|
882
|
+
runId: execution.runId
|
|
883
|
+
}
|
|
884
|
+
});
|
|
885
|
+
}
|
|
886
|
+
await this.storage.queue.ack(job.id);
|
|
887
|
+
}
|
|
888
|
+
/**
|
|
889
|
+
* Handle timeline events
|
|
890
|
+
*/
|
|
891
|
+
handleTimelineEvent(executionId, event) {
|
|
892
|
+
if (event.type === "step:complete") {
|
|
893
|
+
this.emit("step:complete", {
|
|
894
|
+
executionId,
|
|
895
|
+
stepName: event.stepName,
|
|
896
|
+
result: event.data
|
|
897
|
+
});
|
|
898
|
+
}
|
|
899
|
+
if (event.type === "step:failed") {
|
|
900
|
+
this.emit("step:failed", {
|
|
901
|
+
executionId,
|
|
902
|
+
stepName: event.stepName,
|
|
903
|
+
error: event.data
|
|
904
|
+
});
|
|
905
|
+
}
|
|
906
|
+
this.storage.realtime.publish(`execution:${executionId}`, event);
|
|
907
|
+
}
|
|
908
|
+
/**
|
|
909
|
+
* Log helper
|
|
910
|
+
*/
|
|
911
|
+
log(message, data) {
|
|
912
|
+
if (this.logging) {
|
|
913
|
+
console.log(`[${this.workerId}] ${message}`, data ?? "");
|
|
914
|
+
}
|
|
915
|
+
}
|
|
916
|
+
};
|
|
917
|
+
function createAccessToken(data, secret, expirySeconds) {
|
|
918
|
+
const payload = {
|
|
919
|
+
runId: data.runId,
|
|
920
|
+
executionId: data.executionId,
|
|
921
|
+
exp: Math.floor(Date.now() / 1e3) + expirySeconds
|
|
922
|
+
};
|
|
923
|
+
const payloadStr = Buffer.from(JSON.stringify(payload)).toString("base64url");
|
|
924
|
+
const signature = createHmac("sha256", secret).update(payloadStr).digest("base64url");
|
|
925
|
+
return `${payloadStr}.${signature}`;
|
|
926
|
+
}
|
|
927
|
+
function verifyAccessToken(token, secret) {
|
|
928
|
+
const parts = token.split(".");
|
|
929
|
+
if (parts.length !== 2) {
|
|
930
|
+
return null;
|
|
931
|
+
}
|
|
932
|
+
const [payloadStr, signature] = parts;
|
|
933
|
+
const expectedSignature = createHmac("sha256", secret).update(payloadStr).digest("base64url");
|
|
934
|
+
if (signature !== expectedSignature) {
|
|
935
|
+
return null;
|
|
936
|
+
}
|
|
937
|
+
try {
|
|
938
|
+
const payload = JSON.parse(
|
|
939
|
+
Buffer.from(payloadStr, "base64url").toString()
|
|
940
|
+
);
|
|
941
|
+
if (payload.exp < Math.floor(Date.now() / 1e3)) {
|
|
942
|
+
return null;
|
|
943
|
+
}
|
|
944
|
+
return payload;
|
|
945
|
+
} catch {
|
|
946
|
+
return null;
|
|
947
|
+
}
|
|
948
|
+
}
|
|
949
|
+
|
|
950
|
+
// src/stepflow.ts
|
|
951
|
+
var Stepflow = class extends EventEmitter {
|
|
952
|
+
storage;
|
|
953
|
+
registry;
|
|
954
|
+
worker;
|
|
955
|
+
triggers = /* @__PURE__ */ new Map();
|
|
956
|
+
logging;
|
|
957
|
+
tokenSecret;
|
|
958
|
+
tokenExpiry;
|
|
959
|
+
constructor(options) {
|
|
960
|
+
super();
|
|
961
|
+
this.storage = options.storage;
|
|
962
|
+
this.logging = options.logging ?? false;
|
|
963
|
+
this.tokenSecret = options.tokenSecret ?? nanoid(32);
|
|
964
|
+
this.tokenExpiry = options.tokenExpiry ?? 3600;
|
|
965
|
+
this.registry = new WorkflowRegistry();
|
|
966
|
+
this.worker = new Worker({
|
|
967
|
+
storage: this.storage,
|
|
968
|
+
registry: this.registry,
|
|
969
|
+
concurrency: options.concurrency ?? 1,
|
|
970
|
+
logging: this.logging
|
|
971
|
+
});
|
|
972
|
+
this.worker.on(
|
|
973
|
+
"execution:start",
|
|
974
|
+
(data) => this.emit("execution:start", data)
|
|
975
|
+
);
|
|
976
|
+
this.worker.on(
|
|
977
|
+
"execution:complete",
|
|
978
|
+
(data) => this.emit("execution:complete", data)
|
|
979
|
+
);
|
|
980
|
+
this.worker.on(
|
|
981
|
+
"execution:failed",
|
|
982
|
+
(data) => this.emit("execution:failed", data)
|
|
983
|
+
);
|
|
984
|
+
this.worker.on(
|
|
985
|
+
"execution:sleeping",
|
|
986
|
+
(data) => this.emit("execution:sleeping", data)
|
|
987
|
+
);
|
|
988
|
+
this.worker.on(
|
|
989
|
+
"execution:waiting",
|
|
990
|
+
(data) => this.emit("execution:waiting", data)
|
|
991
|
+
);
|
|
992
|
+
this.worker.on("step:complete", (data) => this.emit("step:complete", data));
|
|
993
|
+
this.worker.on("step:failed", (data) => this.emit("step:failed", data));
|
|
994
|
+
this.worker.on("error", (data) => this.emit("error", data));
|
|
995
|
+
}
|
|
996
|
+
/**
|
|
997
|
+
* Register a workflow
|
|
998
|
+
*/
|
|
999
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1000
|
+
register(workflow) {
|
|
1001
|
+
this.registry.register(workflow);
|
|
1002
|
+
this.log("Registered workflow", { id: workflow.id });
|
|
1003
|
+
return this;
|
|
1004
|
+
}
|
|
1005
|
+
/**
|
|
1006
|
+
* Register multiple workflows
|
|
1007
|
+
*/
|
|
1008
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
1009
|
+
registerAll(workflows) {
|
|
1010
|
+
for (const workflow of workflows) {
|
|
1011
|
+
this.register(workflow);
|
|
1012
|
+
}
|
|
1013
|
+
return this;
|
|
1014
|
+
}
|
|
1015
|
+
/**
|
|
1016
|
+
* Add a trigger
|
|
1017
|
+
*/
|
|
1018
|
+
addTrigger(name, trigger) {
|
|
1019
|
+
this.triggers.set(name, trigger);
|
|
1020
|
+
return this;
|
|
1021
|
+
}
|
|
1022
|
+
/**
|
|
1023
|
+
* Start the orchestrator
|
|
1024
|
+
*/
|
|
1025
|
+
async start() {
|
|
1026
|
+
await this.storage.connect();
|
|
1027
|
+
await this.worker.start();
|
|
1028
|
+
for (const [name, trigger] of this.triggers) {
|
|
1029
|
+
await trigger.start(this.handleTrigger.bind(this));
|
|
1030
|
+
this.log("Started trigger", { name, type: trigger.type });
|
|
1031
|
+
}
|
|
1032
|
+
this.log("Stepflow started");
|
|
1033
|
+
}
|
|
1034
|
+
/**
|
|
1035
|
+
* Stop the orchestrator
|
|
1036
|
+
*/
|
|
1037
|
+
async stop() {
|
|
1038
|
+
for (const [name, trigger] of this.triggers) {
|
|
1039
|
+
await trigger.stop();
|
|
1040
|
+
}
|
|
1041
|
+
await this.worker.stop();
|
|
1042
|
+
await this.storage.disconnect();
|
|
1043
|
+
this.log("Stepflow stopped");
|
|
1044
|
+
}
|
|
1045
|
+
/**
|
|
1046
|
+
* Trigger a workflow execution
|
|
1047
|
+
*/
|
|
1048
|
+
async trigger(workflowId, payload, options) {
|
|
1049
|
+
const workflow = this.registry.get(workflowId);
|
|
1050
|
+
if (!workflow) {
|
|
1051
|
+
throw new Error(`Workflow "${workflowId}" not found`);
|
|
1052
|
+
}
|
|
1053
|
+
if (options?.idempotencyKey) {
|
|
1054
|
+
const existing = await this.storage.execution.getByIdempotencyKey(
|
|
1055
|
+
workflowId,
|
|
1056
|
+
options.idempotencyKey
|
|
1057
|
+
);
|
|
1058
|
+
if (existing) {
|
|
1059
|
+
this.log("Deduplicated workflow trigger", {
|
|
1060
|
+
workflowId,
|
|
1061
|
+
runId: existing.runId,
|
|
1062
|
+
executionId: existing.id,
|
|
1063
|
+
idempotencyKey: options.idempotencyKey
|
|
1064
|
+
});
|
|
1065
|
+
return {
|
|
1066
|
+
runId: existing.runId,
|
|
1067
|
+
executionId: existing.id,
|
|
1068
|
+
publicAccessToken: this.createPublicToken(
|
|
1069
|
+
existing.runId,
|
|
1070
|
+
existing.id
|
|
1071
|
+
),
|
|
1072
|
+
deduplicated: true
|
|
1073
|
+
};
|
|
1074
|
+
}
|
|
1075
|
+
}
|
|
1076
|
+
const runId = options?.runId ?? nanoid();
|
|
1077
|
+
const executionId = nanoid();
|
|
1078
|
+
const job = {
|
|
1079
|
+
id: nanoid(),
|
|
1080
|
+
workflowId,
|
|
1081
|
+
eventName: workflowId,
|
|
1082
|
+
payload,
|
|
1083
|
+
metadata: {
|
|
1084
|
+
...options?.metadata,
|
|
1085
|
+
workflowVersion: workflow.config.version ?? "1.0.0",
|
|
1086
|
+
idempotencyKey: options?.idempotencyKey,
|
|
1087
|
+
runId,
|
|
1088
|
+
executionId
|
|
1089
|
+
},
|
|
1090
|
+
priority: 0,
|
|
1091
|
+
attempts: 0,
|
|
1092
|
+
maxAttempts: workflow.config.retries ?? 0,
|
|
1093
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
1094
|
+
};
|
|
1095
|
+
if (options?.delay) {
|
|
1096
|
+
await this.storage.queue.schedule(
|
|
1097
|
+
job,
|
|
1098
|
+
new Date(Date.now() + options.delay)
|
|
1099
|
+
);
|
|
1100
|
+
} else {
|
|
1101
|
+
await this.storage.queue.push(job);
|
|
1102
|
+
}
|
|
1103
|
+
const publicAccessToken = createAccessToken(
|
|
1104
|
+
{ runId, executionId },
|
|
1105
|
+
this.tokenSecret,
|
|
1106
|
+
this.tokenExpiry
|
|
1107
|
+
);
|
|
1108
|
+
this.log("Triggered workflow", { workflowId, runId, executionId });
|
|
1109
|
+
return {
|
|
1110
|
+
runId,
|
|
1111
|
+
executionId,
|
|
1112
|
+
publicAccessToken
|
|
1113
|
+
};
|
|
1114
|
+
}
|
|
1115
|
+
/**
|
|
1116
|
+
* Get a run by ID
|
|
1117
|
+
*/
|
|
1118
|
+
async getRun(runId, options) {
|
|
1119
|
+
if (options?.accessToken) {
|
|
1120
|
+
const payload = verifyAccessToken(options.accessToken, this.tokenSecret);
|
|
1121
|
+
if (!payload || payload.runId !== runId) {
|
|
1122
|
+
throw new Error("Invalid access token");
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
const executions = await this.storage.execution.list({ runId, limit: 1 });
|
|
1126
|
+
return executions[0] ?? null;
|
|
1127
|
+
}
|
|
1128
|
+
/**
|
|
1129
|
+
* Get execution by ID
|
|
1130
|
+
*/
|
|
1131
|
+
async getExecution(executionId) {
|
|
1132
|
+
return this.storage.execution.get(executionId);
|
|
1133
|
+
}
|
|
1134
|
+
/**
|
|
1135
|
+
* List executions
|
|
1136
|
+
*/
|
|
1137
|
+
async listRuns(options) {
|
|
1138
|
+
return this.storage.execution.list(options ?? {});
|
|
1139
|
+
}
|
|
1140
|
+
/**
|
|
1141
|
+
* Notify an event (for waitForEvent)
|
|
1142
|
+
*/
|
|
1143
|
+
async notify(eventId, data) {
|
|
1144
|
+
const waiters = await this.storage.events.getWaiters(eventId);
|
|
1145
|
+
const count = await this.storage.events.publish(eventId, data);
|
|
1146
|
+
for (const waiter of waiters) {
|
|
1147
|
+
const execution = await this.storage.execution.get(waiter.executionId);
|
|
1148
|
+
if (execution && execution.status === "waiting") {
|
|
1149
|
+
const stepName = execution.metadata._waitEventId;
|
|
1150
|
+
await this.storage.execution.saveStepResult(
|
|
1151
|
+
waiter.executionId,
|
|
1152
|
+
stepName,
|
|
1153
|
+
{
|
|
1154
|
+
data: { eventData: data, timeout: false },
|
|
1155
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
1156
|
+
completedAt: /* @__PURE__ */ new Date(),
|
|
1157
|
+
durationMs: 0
|
|
1158
|
+
}
|
|
1159
|
+
);
|
|
1160
|
+
await this.storage.queue.push({
|
|
1161
|
+
id: nanoid(),
|
|
1162
|
+
workflowId: execution.workflowId,
|
|
1163
|
+
eventName: `__resume:${waiter.executionId}`,
|
|
1164
|
+
payload: { eventId, data },
|
|
1165
|
+
metadata: { executionId: waiter.executionId, type: "event-resume" },
|
|
1166
|
+
priority: 0,
|
|
1167
|
+
attempts: 0,
|
|
1168
|
+
maxAttempts: 1,
|
|
1169
|
+
createdAt: /* @__PURE__ */ new Date()
|
|
1170
|
+
});
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
return {
|
|
1174
|
+
waiters: count,
|
|
1175
|
+
executions: waiters.map((w) => w.executionId)
|
|
1176
|
+
};
|
|
1177
|
+
}
|
|
1178
|
+
/**
|
|
1179
|
+
* Create a public access token for a run
|
|
1180
|
+
*/
|
|
1181
|
+
createPublicToken(runId, executionId) {
|
|
1182
|
+
return createAccessToken(
|
|
1183
|
+
{ runId, executionId },
|
|
1184
|
+
this.tokenSecret,
|
|
1185
|
+
this.tokenExpiry
|
|
1186
|
+
);
|
|
1187
|
+
}
|
|
1188
|
+
/**
|
|
1189
|
+
* Subscribe to run updates
|
|
1190
|
+
*/
|
|
1191
|
+
subscribeToRun(runId, callback) {
|
|
1192
|
+
return this.storage.realtime.subscribe(`run:${runId}`, (data) => {
|
|
1193
|
+
callback(data);
|
|
1194
|
+
});
|
|
1195
|
+
}
|
|
1196
|
+
/**
|
|
1197
|
+
* Subscribe to execution updates
|
|
1198
|
+
*/
|
|
1199
|
+
subscribeToExecution(executionId, callback) {
|
|
1200
|
+
return this.storage.realtime.subscribe(
|
|
1201
|
+
`execution:${executionId}`,
|
|
1202
|
+
callback
|
|
1203
|
+
);
|
|
1204
|
+
}
|
|
1205
|
+
/**
|
|
1206
|
+
* Handle trigger events
|
|
1207
|
+
*/
|
|
1208
|
+
async handleTrigger(event) {
|
|
1209
|
+
const workflow = this.registry.get(event.source);
|
|
1210
|
+
if (!workflow) {
|
|
1211
|
+
this.log("No workflow found for trigger", { source: event.source });
|
|
1212
|
+
return;
|
|
1213
|
+
}
|
|
1214
|
+
await this.trigger(workflow.id, event.data, {
|
|
1215
|
+
metadata: {
|
|
1216
|
+
triggerType: event.type,
|
|
1217
|
+
...event.metadata
|
|
1218
|
+
}
|
|
1219
|
+
});
|
|
1220
|
+
}
|
|
1221
|
+
/**
|
|
1222
|
+
* Get a trigger by name
|
|
1223
|
+
*/
|
|
1224
|
+
getTrigger(type, ...args) {
|
|
1225
|
+
for (const [name, trigger] of this.triggers) {
|
|
1226
|
+
if (trigger.type === type) {
|
|
1227
|
+
return trigger;
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
return void 0;
|
|
1231
|
+
}
|
|
1232
|
+
/**
|
|
1233
|
+
* Health check
|
|
1234
|
+
*/
|
|
1235
|
+
async healthCheck() {
|
|
1236
|
+
return this.storage.healthCheck();
|
|
1237
|
+
}
|
|
1238
|
+
/**
|
|
1239
|
+
* Get workflow registry
|
|
1240
|
+
*/
|
|
1241
|
+
getRegistry() {
|
|
1242
|
+
return this.registry;
|
|
1243
|
+
}
|
|
1244
|
+
/**
|
|
1245
|
+
* Log helper
|
|
1246
|
+
*/
|
|
1247
|
+
log(message, data) {
|
|
1248
|
+
if (this.logging) {
|
|
1249
|
+
console.log(`[Stepflow] ${message}`, data ?? "");
|
|
1250
|
+
}
|
|
1251
|
+
}
|
|
1252
|
+
};
|
|
1253
|
+
|
|
1254
|
+
export { InvokeInterrupt, SleepInterrupt, StepTimeoutError, Stepflow, WaitForEventInterrupt, Worker, WorkflowRegistry, createAccessToken, createContext, createWorkflow, formatDuration, parseDuration, verifyAccessToken };
|
|
1255
|
+
//# sourceMappingURL=index.js.map
|
|
1256
|
+
//# sourceMappingURL=index.js.map
|