@gravito/flux 3.0.1 → 3.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +298 -0
- package/bin/flux.js +25 -1
- package/dev/viewer/app.js +4 -4
- package/dist/bun.cjs +2 -2
- package/dist/bun.cjs.map +1 -1
- package/dist/bun.d.cts +65 -26
- package/dist/bun.d.ts +65 -26
- package/dist/bun.js +1 -1
- package/dist/chunk-4DXCQ6CL.js +3486 -0
- package/dist/chunk-4DXCQ6CL.js.map +1 -0
- package/dist/chunk-6AZNHVEO.cjs +316 -0
- package/dist/chunk-6AZNHVEO.cjs.map +1 -0
- package/dist/{chunk-ZAMVC732.js → chunk-NAIVO7RR.js} +64 -15
- package/dist/chunk-NAIVO7RR.js.map +1 -0
- package/dist/chunk-WAPZDXSX.cjs +3486 -0
- package/dist/chunk-WAPZDXSX.cjs.map +1 -0
- package/dist/chunk-WGDTB6OC.js +316 -0
- package/dist/chunk-WGDTB6OC.js.map +1 -0
- package/dist/{chunk-SJSPR4ZU.cjs → chunk-YXBEYVGY.cjs} +66 -17
- package/dist/chunk-YXBEYVGY.cjs.map +1 -0
- package/dist/cli/flux-visualize.cjs +108 -0
- package/dist/cli/flux-visualize.cjs.map +1 -0
- package/dist/cli/flux-visualize.d.cts +1 -0
- package/dist/cli/flux-visualize.d.ts +1 -0
- package/dist/cli/flux-visualize.js +108 -0
- package/dist/cli/flux-visualize.js.map +1 -0
- package/dist/index.cjs +97 -9
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +369 -13
- package/dist/index.d.ts +369 -13
- package/dist/index.js +96 -8
- package/dist/index.js.map +1 -1
- package/dist/index.node.cjs +11 -3
- package/dist/index.node.cjs.map +1 -1
- package/dist/index.node.d.cts +1110 -247
- package/dist/index.node.d.ts +1110 -247
- package/dist/index.node.js +10 -2
- package/dist/types-CRz5XdLd.d.cts +433 -0
- package/dist/types-CRz5XdLd.d.ts +433 -0
- package/package.json +17 -6
- package/dist/chunk-3JGQYHUN.js +0 -1006
- package/dist/chunk-3JGQYHUN.js.map +0 -1
- package/dist/chunk-5OXXH442.cjs +0 -1006
- package/dist/chunk-5OXXH442.cjs.map +0 -1
- package/dist/chunk-SJSPR4ZU.cjs.map +0 -1
- package/dist/chunk-ZAMVC732.js.map +0 -1
- package/dist/types-CZwYGpou.d.cts +0 -353
- package/dist/types-CZwYGpou.d.ts +0 -353
|
@@ -0,0 +1,3486 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { newObj[key] = obj[key]; } } } newObj.default = obj; return newObj; } } function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3; var _class4; var _class5; var _class6; var _class7; var _class8;
|
|
2
|
+
|
|
3
|
+
var _chunkYXBEYVGYcjs = require('./chunk-YXBEYVGY.cjs');
|
|
4
|
+
|
|
5
|
+
// src/errors.ts
|
|
6
|
+
var FluxErrorCode = /* @__PURE__ */ ((FluxErrorCode2) => {
|
|
7
|
+
FluxErrorCode2["WORKFLOW_NOT_FOUND"] = "WORKFLOW_NOT_FOUND";
|
|
8
|
+
FluxErrorCode2["WORKFLOW_INVALID_INPUT"] = "WORKFLOW_INVALID_INPUT";
|
|
9
|
+
FluxErrorCode2["WORKFLOW_DEFINITION_CHANGED"] = "WORKFLOW_DEFINITION_CHANGED";
|
|
10
|
+
FluxErrorCode2["WORKFLOW_NAME_MISMATCH"] = "WORKFLOW_NAME_MISMATCH";
|
|
11
|
+
FluxErrorCode2["INVALID_STATE_TRANSITION"] = "INVALID_STATE_TRANSITION";
|
|
12
|
+
FluxErrorCode2["WORKFLOW_NOT_SUSPENDED"] = "WORKFLOW_NOT_SUSPENDED";
|
|
13
|
+
FluxErrorCode2["INVALID_STEP_INDEX"] = "INVALID_STEP_INDEX";
|
|
14
|
+
FluxErrorCode2["STEP_TIMEOUT"] = "STEP_TIMEOUT";
|
|
15
|
+
FluxErrorCode2["STEP_NOT_FOUND"] = "STEP_NOT_FOUND";
|
|
16
|
+
FluxErrorCode2["CONCURRENT_MODIFICATION"] = "CONCURRENT_MODIFICATION";
|
|
17
|
+
FluxErrorCode2["EMPTY_WORKFLOW"] = "EMPTY_WORKFLOW";
|
|
18
|
+
FluxErrorCode2["NO_RECOVERY_ACTION"] = "NO_RECOVERY_ACTION";
|
|
19
|
+
FluxErrorCode2["INVALID_JSON_POINTER"] = "INVALID_JSON_POINTER";
|
|
20
|
+
FluxErrorCode2["INVALID_PATH_TRAVERSAL"] = "INVALID_PATH_TRAVERSAL";
|
|
21
|
+
FluxErrorCode2["CANNOT_REPLACE_ROOT"] = "CANNOT_REPLACE_ROOT";
|
|
22
|
+
FluxErrorCode2["CANNOT_REMOVE_ROOT"] = "CANNOT_REMOVE_ROOT";
|
|
23
|
+
return FluxErrorCode2;
|
|
24
|
+
})(FluxErrorCode || {});
|
|
25
|
+
var FluxError = class extends Error {
|
|
26
|
+
/**
|
|
27
|
+
* Creates a new FluxError.
|
|
28
|
+
*
|
|
29
|
+
* @param message - Human-readable error description.
|
|
30
|
+
* @param code - Machine-readable error code.
|
|
31
|
+
* @param context - Additional metadata related to the error.
|
|
32
|
+
*/
|
|
33
|
+
constructor(message, code, context) {
|
|
34
|
+
super(message);
|
|
35
|
+
this.code = code;
|
|
36
|
+
this.context = context;
|
|
37
|
+
this.name = "FluxError";
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
function workflowNotFound(id) {
|
|
41
|
+
return new FluxError(`Workflow not found: ${id}`, "WORKFLOW_NOT_FOUND" /* WORKFLOW_NOT_FOUND */, {
|
|
42
|
+
workflowId: id
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
function invalidStateTransition(from, to) {
|
|
46
|
+
return new FluxError(
|
|
47
|
+
`Invalid state transition: ${from} \u2192 ${to}`,
|
|
48
|
+
"INVALID_STATE_TRANSITION" /* INVALID_STATE_TRANSITION */,
|
|
49
|
+
{ from, to }
|
|
50
|
+
);
|
|
51
|
+
}
|
|
52
|
+
function invalidInput(workflowName) {
|
|
53
|
+
return new FluxError(
|
|
54
|
+
`Invalid input for workflow "${workflowName}"`,
|
|
55
|
+
"WORKFLOW_INVALID_INPUT" /* WORKFLOW_INVALID_INPUT */,
|
|
56
|
+
{ workflowName }
|
|
57
|
+
);
|
|
58
|
+
}
|
|
59
|
+
function workflowNameMismatch(expected, received) {
|
|
60
|
+
return new FluxError(
|
|
61
|
+
`Workflow name mismatch: ${received} !== ${expected}`,
|
|
62
|
+
"WORKFLOW_NAME_MISMATCH" /* WORKFLOW_NAME_MISMATCH */,
|
|
63
|
+
{ expected, received }
|
|
64
|
+
);
|
|
65
|
+
}
|
|
66
|
+
function workflowDefinitionChanged() {
|
|
67
|
+
return new FluxError(
|
|
68
|
+
"Workflow definition changed; operation is not safe",
|
|
69
|
+
"WORKFLOW_DEFINITION_CHANGED" /* WORKFLOW_DEFINITION_CHANGED */
|
|
70
|
+
);
|
|
71
|
+
}
|
|
72
|
+
function workflowNotSuspended(status) {
|
|
73
|
+
return new FluxError(
|
|
74
|
+
`Workflow is not suspended (status: ${status})`,
|
|
75
|
+
"WORKFLOW_NOT_SUSPENDED" /* WORKFLOW_NOT_SUSPENDED */,
|
|
76
|
+
{ status }
|
|
77
|
+
);
|
|
78
|
+
}
|
|
79
|
+
function stepNotFound(step) {
|
|
80
|
+
return new FluxError(`Step not found: ${step}`, "STEP_NOT_FOUND" /* STEP_NOT_FOUND */, { step });
|
|
81
|
+
}
|
|
82
|
+
function invalidStepIndex(index) {
|
|
83
|
+
return new FluxError(`Invalid step index: ${index}`, "INVALID_STEP_INDEX" /* INVALID_STEP_INDEX */, { index });
|
|
84
|
+
}
|
|
85
|
+
function emptyWorkflow(workflowName) {
|
|
86
|
+
return new FluxError(`Workflow "${workflowName}" has no steps`, "EMPTY_WORKFLOW" /* EMPTY_WORKFLOW */, {
|
|
87
|
+
workflowName
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
function noRecoveryAction(stepName) {
|
|
91
|
+
return new FluxError(
|
|
92
|
+
`No recovery action registered for step: ${stepName}`,
|
|
93
|
+
"NO_RECOVERY_ACTION" /* NO_RECOVERY_ACTION */,
|
|
94
|
+
{ stepName }
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
function invalidJsonPointer(path) {
|
|
98
|
+
return new FluxError(`Invalid JSON Pointer: ${path}`, "INVALID_JSON_POINTER" /* INVALID_JSON_POINTER */, {
|
|
99
|
+
path
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
function invalidPathTraversal(segment, current) {
|
|
103
|
+
return new FluxError(
|
|
104
|
+
`Cannot access property '${segment}' on ${current}`,
|
|
105
|
+
"INVALID_PATH_TRAVERSAL" /* INVALID_PATH_TRAVERSAL */,
|
|
106
|
+
{ segment, currentType: typeof current }
|
|
107
|
+
);
|
|
108
|
+
}
|
|
109
|
+
function cannotReplaceRoot() {
|
|
110
|
+
return new FluxError("Cannot replace root object", "CANNOT_REPLACE_ROOT" /* CANNOT_REPLACE_ROOT */);
|
|
111
|
+
}
|
|
112
|
+
function cannotRemoveRoot() {
|
|
113
|
+
return new FluxError("Cannot remove root object", "CANNOT_REMOVE_ROOT" /* CANNOT_REMOVE_ROOT */);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// src/builder/WorkflowBuilder.ts
|
|
117
|
+
var WorkflowBuilder = (_class = class {
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
__init() {this._steps = []}
|
|
121
|
+
|
|
122
|
+
__init2() {this._parallelGroupCounter = 0}
|
|
123
|
+
/**
|
|
124
|
+
* Initializes a new workflow builder with a unique name.
|
|
125
|
+
* @param name - The identifier for this workflow definition.
|
|
126
|
+
*/
|
|
127
|
+
constructor(name) {;_class.prototype.__init.call(this);_class.prototype.__init2.call(this);
|
|
128
|
+
this._name = name;
|
|
129
|
+
}
|
|
130
|
+
/**
|
|
131
|
+
* Defines the expected input type for the workflow.
|
|
132
|
+
* This is a type-only operation that enables compile-time safety for subsequent steps.
|
|
133
|
+
* @returns A builder instance with the specified input type.
|
|
134
|
+
*/
|
|
135
|
+
input() {
|
|
136
|
+
return this;
|
|
137
|
+
}
|
|
138
|
+
/**
|
|
139
|
+
* Defines the structure of the shared data object used across steps.
|
|
140
|
+
* @returns A builder instance with the specified data type.
|
|
141
|
+
*/
|
|
142
|
+
data() {
|
|
143
|
+
return this;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Sets the semantic version of this workflow definition.
|
|
147
|
+
* @param v - A semantic version string (e.g., "1.0.0", "2.1.0").
|
|
148
|
+
* @returns The builder instance for chaining.
|
|
149
|
+
*/
|
|
150
|
+
version(v) {
|
|
151
|
+
this._version = v;
|
|
152
|
+
return this;
|
|
153
|
+
}
|
|
154
|
+
/**
|
|
155
|
+
* Attaches a runtime validator for the workflow input.
|
|
156
|
+
* @param validator - A type guard function to verify input integrity.
|
|
157
|
+
* @returns The builder instance for chaining.
|
|
158
|
+
*/
|
|
159
|
+
validate(validator) {
|
|
160
|
+
this._validateInput = validator;
|
|
161
|
+
return this;
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Adds a standard processing step to the workflow.
|
|
165
|
+
* Standard steps are subject to compensation if the workflow fails later.
|
|
166
|
+
*
|
|
167
|
+
* @param name - Unique name for the step.
|
|
168
|
+
* @param handler - The business logic to execute.
|
|
169
|
+
* @param options - Optional execution configuration.
|
|
170
|
+
* @returns The builder instance for chaining.
|
|
171
|
+
*/
|
|
172
|
+
step(name, handler, options) {
|
|
173
|
+
this._steps.push({
|
|
174
|
+
name,
|
|
175
|
+
handler,
|
|
176
|
+
retries: _optionalChain([options, 'optionalAccess', _2 => _2.retries]),
|
|
177
|
+
timeout: _optionalChain([options, 'optionalAccess', _3 => _3.timeout]),
|
|
178
|
+
when: _optionalChain([options, 'optionalAccess', _4 => _4.when]),
|
|
179
|
+
compensate: _optionalChain([options, 'optionalAccess', _5 => _5.compensate]),
|
|
180
|
+
commit: false
|
|
181
|
+
});
|
|
182
|
+
return this;
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Adds multiple steps that execute in parallel.
|
|
186
|
+
* All steps in a parallel group will run concurrently and must all succeed before proceeding.
|
|
187
|
+
*
|
|
188
|
+
* @param steps - Array of step configurations to execute in parallel.
|
|
189
|
+
* @returns The builder instance for chaining.
|
|
190
|
+
*
|
|
191
|
+
* @example
|
|
192
|
+
* ```typescript
|
|
193
|
+
* workflow.stepParallel([
|
|
194
|
+
* { name: 'fetch-user', handler: async (ctx) => { ctx.data.user = await getUser() } },
|
|
195
|
+
* { name: 'fetch-orders', handler: async (ctx) => { ctx.data.orders = await getOrders() } },
|
|
196
|
+
* { name: 'fetch-profile', handler: async (ctx) => { ctx.data.profile = await getProfile() } }
|
|
197
|
+
* ])
|
|
198
|
+
* ```
|
|
199
|
+
*/
|
|
200
|
+
stepParallel(steps) {
|
|
201
|
+
if (steps.length === 0) {
|
|
202
|
+
return this;
|
|
203
|
+
}
|
|
204
|
+
const groupId = `parallel-${this._parallelGroupCounter++}`;
|
|
205
|
+
for (const stepConfig of steps) {
|
|
206
|
+
this._steps.push({
|
|
207
|
+
name: stepConfig.name,
|
|
208
|
+
handler: stepConfig.handler,
|
|
209
|
+
retries: _nullishCoalesce(stepConfig.retries, () => ( _optionalChain([stepConfig, 'access', _6 => _6.options, 'optionalAccess', _7 => _7.retries]))),
|
|
210
|
+
timeout: _nullishCoalesce(stepConfig.timeout, () => ( _optionalChain([stepConfig, 'access', _8 => _8.options, 'optionalAccess', _9 => _9.timeout]))),
|
|
211
|
+
when: _nullishCoalesce(stepConfig.when, () => ( _optionalChain([stepConfig, 'access', _10 => _10.options, 'optionalAccess', _11 => _11.when]))),
|
|
212
|
+
compensate: _nullishCoalesce(stepConfig.compensate, () => ( _optionalChain([stepConfig, 'access', _12 => _12.options, 'optionalAccess', _13 => _13.compensate]))),
|
|
213
|
+
commit: false,
|
|
214
|
+
parallelGroup: groupId
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
return this;
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Adds a "commit" step that represents a permanent side-effect.
|
|
221
|
+
* Commit steps are intended for operations that should not be rolled back
|
|
222
|
+
* or re-executed during certain replay scenarios.
|
|
223
|
+
*
|
|
224
|
+
* @param name - Unique name for the step.
|
|
225
|
+
* @param handler - The side-effect logic to execute.
|
|
226
|
+
* @param options - Optional execution configuration (compensation is not allowed).
|
|
227
|
+
* @returns The builder instance for chaining.
|
|
228
|
+
*/
|
|
229
|
+
commit(name, handler, options) {
|
|
230
|
+
this._steps.push({
|
|
231
|
+
name,
|
|
232
|
+
handler,
|
|
233
|
+
retries: _optionalChain([options, 'optionalAccess', _14 => _14.retries]),
|
|
234
|
+
timeout: _optionalChain([options, 'optionalAccess', _15 => _15.timeout]),
|
|
235
|
+
when: _optionalChain([options, 'optionalAccess', _16 => _16.when]),
|
|
236
|
+
commit: true
|
|
237
|
+
});
|
|
238
|
+
return this;
|
|
239
|
+
}
|
|
240
|
+
/**
|
|
241
|
+
* Finalizes the workflow definition.
|
|
242
|
+
* @returns A complete workflow blueprint ready for execution.
|
|
243
|
+
* @throws Error if the workflow has no steps defined.
|
|
244
|
+
*/
|
|
245
|
+
build() {
|
|
246
|
+
if (this._steps.length === 0) {
|
|
247
|
+
throw emptyWorkflow(this._name);
|
|
248
|
+
}
|
|
249
|
+
return {
|
|
250
|
+
name: this._name,
|
|
251
|
+
version: this._version,
|
|
252
|
+
steps: [...this._steps],
|
|
253
|
+
validateInput: this._validateInput
|
|
254
|
+
};
|
|
255
|
+
}
|
|
256
|
+
/**
|
|
257
|
+
* Generates a structural description of the workflow for introspection.
|
|
258
|
+
* @returns A descriptor containing step metadata.
|
|
259
|
+
*/
|
|
260
|
+
describe() {
|
|
261
|
+
const steps = this._steps.map((step) => ({
|
|
262
|
+
name: step.name,
|
|
263
|
+
commit: Boolean(step.commit),
|
|
264
|
+
retries: step.retries,
|
|
265
|
+
timeout: step.timeout,
|
|
266
|
+
hasCondition: Boolean(step.when)
|
|
267
|
+
}));
|
|
268
|
+
return {
|
|
269
|
+
name: this._name,
|
|
270
|
+
version: this._version,
|
|
271
|
+
steps
|
|
272
|
+
};
|
|
273
|
+
}
|
|
274
|
+
/** The name of the workflow being built. */
|
|
275
|
+
get name() {
|
|
276
|
+
return this._name;
|
|
277
|
+
}
|
|
278
|
+
/** The number of steps currently defined in the workflow. */
|
|
279
|
+
get stepCount() {
|
|
280
|
+
return this._steps.length;
|
|
281
|
+
}
|
|
282
|
+
}, _class);
|
|
283
|
+
function createWorkflow(name) {
|
|
284
|
+
return new WorkflowBuilder(name);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
// src/engine/BatchExecutor.ts
|
|
288
|
+
var Semaphore = (_class2 = class {
|
|
289
|
+
constructor(max) {;_class2.prototype.__init3.call(this);_class2.prototype.__init4.call(this);
|
|
290
|
+
this.max = max;
|
|
291
|
+
}
|
|
292
|
+
__init3() {this.current = 0}
|
|
293
|
+
__init4() {this.queue = []}
|
|
294
|
+
async acquire() {
|
|
295
|
+
if (this.current < this.max) {
|
|
296
|
+
this.current++;
|
|
297
|
+
return;
|
|
298
|
+
}
|
|
299
|
+
await new Promise((resolve) => {
|
|
300
|
+
this.queue.push(() => {
|
|
301
|
+
this.current++;
|
|
302
|
+
resolve();
|
|
303
|
+
});
|
|
304
|
+
});
|
|
305
|
+
}
|
|
306
|
+
release() {
|
|
307
|
+
this.current--;
|
|
308
|
+
const next = this.queue.shift();
|
|
309
|
+
if (next) {
|
|
310
|
+
next();
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
}, _class2);
|
|
314
|
+
var BatchExecutor = class {
|
|
315
|
+
constructor(engine) {
|
|
316
|
+
this.engine = engine;
|
|
317
|
+
}
|
|
318
|
+
/**
|
|
319
|
+
* Execute a workflow for multiple inputs with controlled concurrency.
|
|
320
|
+
*
|
|
321
|
+
* @param workflow - The workflow definition or builder to execute.
|
|
322
|
+
* @param inputs - Array of inputs, one per workflow execution.
|
|
323
|
+
* @param options - Execution options (concurrency, error handling, etc.).
|
|
324
|
+
* @returns Batch result containing all individual execution results.
|
|
325
|
+
*/
|
|
326
|
+
async execute(workflow, inputs, options) {
|
|
327
|
+
const startTime = Date.now();
|
|
328
|
+
const concurrency = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _17 => _17.concurrency]), () => ( 10));
|
|
329
|
+
const continueOnError = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _18 => _18.continueOnError]), () => ( true));
|
|
330
|
+
const semaphore = new Semaphore(concurrency);
|
|
331
|
+
const results = new Array(inputs.length);
|
|
332
|
+
let succeeded = 0;
|
|
333
|
+
let failed = 0;
|
|
334
|
+
let completed = 0;
|
|
335
|
+
let shouldStop = false;
|
|
336
|
+
const executeOne = async (input, index) => {
|
|
337
|
+
if (_optionalChain([options, 'optionalAccess', _19 => _19.signal, 'optionalAccess', _20 => _20.aborted]) || shouldStop) {
|
|
338
|
+
results[index] = {
|
|
339
|
+
index,
|
|
340
|
+
input,
|
|
341
|
+
result: null,
|
|
342
|
+
error: new Error("Execution aborted"),
|
|
343
|
+
success: false
|
|
344
|
+
};
|
|
345
|
+
failed++;
|
|
346
|
+
completed++;
|
|
347
|
+
_optionalChain([options, 'optionalAccess', _21 => _21.onProgress, 'optionalCall', _22 => _22(completed, inputs.length, results[index])]);
|
|
348
|
+
return;
|
|
349
|
+
}
|
|
350
|
+
await semaphore.acquire();
|
|
351
|
+
try {
|
|
352
|
+
if (_optionalChain([options, 'optionalAccess', _23 => _23.signal, 'optionalAccess', _24 => _24.aborted]) || shouldStop) {
|
|
353
|
+
results[index] = {
|
|
354
|
+
index,
|
|
355
|
+
input,
|
|
356
|
+
result: null,
|
|
357
|
+
error: new Error("Execution aborted"),
|
|
358
|
+
success: false
|
|
359
|
+
};
|
|
360
|
+
failed++;
|
|
361
|
+
} else {
|
|
362
|
+
const result = await this.engine.execute(
|
|
363
|
+
workflow,
|
|
364
|
+
input
|
|
365
|
+
);
|
|
366
|
+
const success = result.status === "completed";
|
|
367
|
+
results[index] = {
|
|
368
|
+
index,
|
|
369
|
+
input,
|
|
370
|
+
result,
|
|
371
|
+
error: result.error,
|
|
372
|
+
success
|
|
373
|
+
};
|
|
374
|
+
if (success) {
|
|
375
|
+
succeeded++;
|
|
376
|
+
} else {
|
|
377
|
+
failed++;
|
|
378
|
+
if (!continueOnError) {
|
|
379
|
+
shouldStop = true;
|
|
380
|
+
}
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
} catch (error) {
|
|
384
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
385
|
+
results[index] = {
|
|
386
|
+
index,
|
|
387
|
+
input,
|
|
388
|
+
result: null,
|
|
389
|
+
error: err,
|
|
390
|
+
success: false
|
|
391
|
+
};
|
|
392
|
+
failed++;
|
|
393
|
+
if (!continueOnError) {
|
|
394
|
+
shouldStop = true;
|
|
395
|
+
}
|
|
396
|
+
} finally {
|
|
397
|
+
semaphore.release();
|
|
398
|
+
completed++;
|
|
399
|
+
_optionalChain([options, 'optionalAccess', _25 => _25.onProgress, 'optionalCall', _26 => _26(completed, inputs.length, results[index])]);
|
|
400
|
+
}
|
|
401
|
+
};
|
|
402
|
+
await Promise.all(inputs.map((input, index) => executeOne(input, index)));
|
|
403
|
+
return {
|
|
404
|
+
total: inputs.length,
|
|
405
|
+
succeeded,
|
|
406
|
+
failed,
|
|
407
|
+
results,
|
|
408
|
+
duration: Date.now() - startTime
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
/**
|
|
412
|
+
* Execute different workflows in a single batch.
|
|
413
|
+
*
|
|
414
|
+
* Unlike `execute()`, this method allows each item in the batch to use a different
|
|
415
|
+
* workflow definition, enabling heterogeneous batch processing.
|
|
416
|
+
*
|
|
417
|
+
* @param items - Array of workflow/input pairs to execute.
|
|
418
|
+
* @param options - Execution options (concurrency, error handling, etc.).
|
|
419
|
+
* @returns Batch result containing all individual execution results.
|
|
420
|
+
*/
|
|
421
|
+
async executeMany(items, options) {
|
|
422
|
+
const startTime = Date.now();
|
|
423
|
+
const concurrency = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _27 => _27.concurrency]), () => ( 10));
|
|
424
|
+
const continueOnError = _nullishCoalesce(_optionalChain([options, 'optionalAccess', _28 => _28.continueOnError]), () => ( true));
|
|
425
|
+
const semaphore = new Semaphore(concurrency);
|
|
426
|
+
const results = new Array(items.length);
|
|
427
|
+
let succeeded = 0;
|
|
428
|
+
let failed = 0;
|
|
429
|
+
let completed = 0;
|
|
430
|
+
let shouldStop = false;
|
|
431
|
+
const executeOne = async (item, index) => {
|
|
432
|
+
if (_optionalChain([options, 'optionalAccess', _29 => _29.signal, 'optionalAccess', _30 => _30.aborted]) || shouldStop) {
|
|
433
|
+
results[index] = {
|
|
434
|
+
index,
|
|
435
|
+
input: item.input,
|
|
436
|
+
result: null,
|
|
437
|
+
error: new Error("Execution aborted"),
|
|
438
|
+
success: false
|
|
439
|
+
};
|
|
440
|
+
failed++;
|
|
441
|
+
completed++;
|
|
442
|
+
_optionalChain([options, 'optionalAccess', _31 => _31.onProgress, 'optionalCall', _32 => _32(completed, items.length, results[index])]);
|
|
443
|
+
return;
|
|
444
|
+
}
|
|
445
|
+
await semaphore.acquire();
|
|
446
|
+
try {
|
|
447
|
+
if (_optionalChain([options, 'optionalAccess', _33 => _33.signal, 'optionalAccess', _34 => _34.aborted]) || shouldStop) {
|
|
448
|
+
results[index] = {
|
|
449
|
+
index,
|
|
450
|
+
input: item.input,
|
|
451
|
+
result: null,
|
|
452
|
+
error: new Error("Execution aborted"),
|
|
453
|
+
success: false
|
|
454
|
+
};
|
|
455
|
+
failed++;
|
|
456
|
+
} else {
|
|
457
|
+
const result = await this.engine.execute(item.workflow, item.input);
|
|
458
|
+
const success = result.status === "completed";
|
|
459
|
+
results[index] = {
|
|
460
|
+
index,
|
|
461
|
+
input: item.input,
|
|
462
|
+
result,
|
|
463
|
+
error: result.error,
|
|
464
|
+
success
|
|
465
|
+
};
|
|
466
|
+
if (success) {
|
|
467
|
+
succeeded++;
|
|
468
|
+
} else {
|
|
469
|
+
failed++;
|
|
470
|
+
if (!continueOnError) {
|
|
471
|
+
shouldStop = true;
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
} catch (error) {
|
|
476
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
477
|
+
results[index] = {
|
|
478
|
+
index,
|
|
479
|
+
input: item.input,
|
|
480
|
+
result: null,
|
|
481
|
+
error: err,
|
|
482
|
+
success: false
|
|
483
|
+
};
|
|
484
|
+
failed++;
|
|
485
|
+
if (!continueOnError) {
|
|
486
|
+
shouldStop = true;
|
|
487
|
+
}
|
|
488
|
+
} finally {
|
|
489
|
+
semaphore.release();
|
|
490
|
+
completed++;
|
|
491
|
+
_optionalChain([options, 'optionalAccess', _35 => _35.onProgress, 'optionalCall', _36 => _36(completed, items.length, results[index])]);
|
|
492
|
+
}
|
|
493
|
+
};
|
|
494
|
+
await Promise.all(items.map((item, index) => executeOne(item, index)));
|
|
495
|
+
return {
|
|
496
|
+
total: items.length,
|
|
497
|
+
succeeded,
|
|
498
|
+
failed,
|
|
499
|
+
results,
|
|
500
|
+
duration: Date.now() - startTime
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
};
|
|
504
|
+
|
|
505
|
+
// src/core/ContextManager.ts
|
|
506
|
+
function generateId() {
|
|
507
|
+
return crypto.randomUUID();
|
|
508
|
+
}
|
|
509
|
+
var ContextManager = class {
|
|
510
|
+
/**
|
|
511
|
+
* Initializes a fresh workflow context with a pending status and empty history.
|
|
512
|
+
*
|
|
513
|
+
* @param name - The human-readable identifier for the workflow type.
|
|
514
|
+
* @param input - The initial data required to start the workflow.
|
|
515
|
+
* @param stepCount - Total number of steps defined in the workflow for history pre-allocation.
|
|
516
|
+
* @returns A new WorkflowContext instance.
|
|
517
|
+
*
|
|
518
|
+
* @example
|
|
519
|
+
* ```typescript
|
|
520
|
+
* const ctx = manager.create('signup', { email: 'user@example.com' }, 3);
|
|
521
|
+
* ```
|
|
522
|
+
*/
|
|
523
|
+
create(name, input, stepCount) {
|
|
524
|
+
const history = Array.from({ length: stepCount }, (_, _i) => ({
|
|
525
|
+
name: "",
|
|
526
|
+
status: "pending",
|
|
527
|
+
retries: 0
|
|
528
|
+
}));
|
|
529
|
+
return {
|
|
530
|
+
id: generateId(),
|
|
531
|
+
name,
|
|
532
|
+
input,
|
|
533
|
+
data: {},
|
|
534
|
+
status: "pending",
|
|
535
|
+
currentStep: 0,
|
|
536
|
+
history,
|
|
537
|
+
version: 1
|
|
538
|
+
};
|
|
539
|
+
}
|
|
540
|
+
/**
|
|
541
|
+
* Reconstructs a workflow context from a previously persisted state.
|
|
542
|
+
*
|
|
543
|
+
* Used for resuming suspended workflows or replaying failed ones from a specific point.
|
|
544
|
+
*
|
|
545
|
+
* @param state - The persisted state object.
|
|
546
|
+
* @returns A hydrated WorkflowContext ready for execution.
|
|
547
|
+
*
|
|
548
|
+
* @example
|
|
549
|
+
* ```typescript
|
|
550
|
+
* const state = await storage.load(id);
|
|
551
|
+
* const ctx = manager.restore(state);
|
|
552
|
+
* ```
|
|
553
|
+
*/
|
|
554
|
+
restore(state) {
|
|
555
|
+
return {
|
|
556
|
+
id: state.id,
|
|
557
|
+
name: state.name,
|
|
558
|
+
input: state.input,
|
|
559
|
+
data: { ...state.data },
|
|
560
|
+
status: state.status,
|
|
561
|
+
currentStep: state.currentStep,
|
|
562
|
+
history: state.history.map((h) => ({ ...h })),
|
|
563
|
+
version: state.version || 1
|
|
564
|
+
};
|
|
565
|
+
}
|
|
566
|
+
/**
|
|
567
|
+
* Converts a runtime context into a serializable state for persistence.
|
|
568
|
+
*
|
|
569
|
+
* Captures the current progress, data, and execution history.
|
|
570
|
+
*
|
|
571
|
+
* @param ctx - The active workflow context.
|
|
572
|
+
* @returns A serializable WorkflowState object.
|
|
573
|
+
*
|
|
574
|
+
* @example
|
|
575
|
+
* ```typescript
|
|
576
|
+
* const state = manager.toState(ctx);
|
|
577
|
+
* await storage.save(state);
|
|
578
|
+
* ```
|
|
579
|
+
*/
|
|
580
|
+
toState(ctx) {
|
|
581
|
+
return {
|
|
582
|
+
id: ctx.id,
|
|
583
|
+
name: ctx.name,
|
|
584
|
+
status: ctx.status,
|
|
585
|
+
input: ctx.input,
|
|
586
|
+
data: { ...ctx.data },
|
|
587
|
+
currentStep: ctx.currentStep,
|
|
588
|
+
history: ctx.history.map((h) => ({ ...h })),
|
|
589
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
590
|
+
updatedAt: /* @__PURE__ */ new Date(),
|
|
591
|
+
version: ctx.version
|
|
592
|
+
};
|
|
593
|
+
}
|
|
594
|
+
/**
|
|
595
|
+
* Updates the overall status of the workflow.
|
|
596
|
+
*
|
|
597
|
+
* @param ctx - The current context.
|
|
598
|
+
* @param status - The new status to apply.
|
|
599
|
+
* @returns A new context instance with the updated status.
|
|
600
|
+
*
|
|
601
|
+
* @example
|
|
602
|
+
* ```typescript
|
|
603
|
+
* const runningCtx = manager.updateStatus(ctx, 'running');
|
|
604
|
+
* ```
|
|
605
|
+
*/
|
|
606
|
+
updateStatus(ctx, status) {
|
|
607
|
+
return {
|
|
608
|
+
...ctx,
|
|
609
|
+
status
|
|
610
|
+
};
|
|
611
|
+
}
|
|
612
|
+
/**
|
|
613
|
+
* Increments the current step pointer.
|
|
614
|
+
*
|
|
615
|
+
* @param ctx - The current context.
|
|
616
|
+
* @returns A new context instance pointing to the next step.
|
|
617
|
+
*
|
|
618
|
+
* @example
|
|
619
|
+
* ```typescript
|
|
620
|
+
* const nextStepCtx = manager.advanceStep(ctx);
|
|
621
|
+
* console.log(nextStepCtx.currentStep); // ctx.currentStep + 1
|
|
622
|
+
* ```
|
|
623
|
+
*/
|
|
624
|
+
advanceStep(ctx) {
|
|
625
|
+
return {
|
|
626
|
+
...ctx,
|
|
627
|
+
currentStep: ctx.currentStep + 1
|
|
628
|
+
};
|
|
629
|
+
}
|
|
630
|
+
/**
|
|
631
|
+
* Assigns a name to a specific step in the execution history.
|
|
632
|
+
*
|
|
633
|
+
* Useful for tracking which step is currently being executed or has been completed.
|
|
634
|
+
*
|
|
635
|
+
* @param ctx - The current context.
|
|
636
|
+
* @param index - The index of the step in the history array.
|
|
637
|
+
* @param name - The name to assign to the step.
|
|
638
|
+
* @returns A new context instance with the updated history.
|
|
639
|
+
*
|
|
640
|
+
* @example
|
|
641
|
+
* ```typescript
|
|
642
|
+
* const namedCtx = manager.setStepName(ctx, 0, 'validate-user');
|
|
643
|
+
* console.log(namedCtx.history[0].name); // 'validate-user'
|
|
644
|
+
* ```
|
|
645
|
+
*/
|
|
646
|
+
setStepName(ctx, index, name) {
|
|
647
|
+
if (!ctx.history[index]) {
|
|
648
|
+
return ctx;
|
|
649
|
+
}
|
|
650
|
+
const history = [...ctx.history];
|
|
651
|
+
history[index] = { ...history[index], name };
|
|
652
|
+
return {
|
|
653
|
+
...ctx,
|
|
654
|
+
history
|
|
655
|
+
};
|
|
656
|
+
}
|
|
657
|
+
};
|
|
658
|
+
|
|
659
|
+
// src/core/StateMachine.ts
|
|
660
|
+
var TRANSITIONS = {
|
|
661
|
+
pending: ["running", "failed"],
|
|
662
|
+
running: ["paused", "completed", "failed", "suspended", "rolling_back"],
|
|
663
|
+
paused: ["running", "failed"],
|
|
664
|
+
suspended: ["running", "failed"],
|
|
665
|
+
rolling_back: ["rolled_back", "failed", "compensation_failed"],
|
|
666
|
+
rolled_back: ["pending"],
|
|
667
|
+
// allow retry from scratch
|
|
668
|
+
completed: [],
|
|
669
|
+
// terminal state
|
|
670
|
+
failed: ["pending"],
|
|
671
|
+
// allow retry
|
|
672
|
+
compensation_failed: ["pending"]
|
|
673
|
+
// allow retry even if compensation failed
|
|
674
|
+
};
|
|
675
|
+
var StateMachine = (_class3 = class extends EventTarget {constructor(...args2) { super(...args2); _class3.prototype.__init5.call(this); }
|
|
676
|
+
__init5() {this._status = "pending"}
|
|
677
|
+
/**
|
|
678
|
+
* The current operational status of the workflow.
|
|
679
|
+
*/
|
|
680
|
+
get status() {
|
|
681
|
+
return this._status;
|
|
682
|
+
}
|
|
683
|
+
/**
|
|
684
|
+
* Evaluates if a transition to the specified status is valid from the current state.
|
|
685
|
+
*
|
|
686
|
+
* @param to - The target status to check.
|
|
687
|
+
* @returns True if the transition is permitted by the transition map.
|
|
688
|
+
*
|
|
689
|
+
* @example
|
|
690
|
+
* ```typescript
|
|
691
|
+
* if (sm.canTransition('completed')) {
|
|
692
|
+
* sm.transition('completed');
|
|
693
|
+
* }
|
|
694
|
+
* ```
|
|
695
|
+
*/
|
|
696
|
+
canTransition(to) {
|
|
697
|
+
return TRANSITIONS[this._status].includes(to);
|
|
698
|
+
}
|
|
699
|
+
/**
|
|
700
|
+
* Moves the workflow to a new status if the transition is valid.
|
|
701
|
+
*
|
|
702
|
+
* @param to - The target status.
|
|
703
|
+
* @throws {Error} If the transition is illegal according to the defined rules.
|
|
704
|
+
*
|
|
705
|
+
* @example
|
|
706
|
+
* ```typescript
|
|
707
|
+
* try {
|
|
708
|
+
* sm.transition('completed');
|
|
709
|
+
* } catch (e) {
|
|
710
|
+
* // Handle invalid transition
|
|
711
|
+
* }
|
|
712
|
+
* ```
|
|
713
|
+
*/
|
|
714
|
+
transition(to) {
|
|
715
|
+
if (!this.canTransition(to)) {
|
|
716
|
+
throw invalidStateTransition(this._status, to);
|
|
717
|
+
}
|
|
718
|
+
const from = this._status;
|
|
719
|
+
this._status = to;
|
|
720
|
+
this.dispatchEvent(
|
|
721
|
+
new CustomEvent("transition", {
|
|
722
|
+
detail: { from, to }
|
|
723
|
+
})
|
|
724
|
+
);
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* Overrides the current status without validation.
|
|
728
|
+
*
|
|
729
|
+
* This should only be used during workflow restoration from persisted storage
|
|
730
|
+
* or when replaying history where the state is already known to be valid.
|
|
731
|
+
*
|
|
732
|
+
* @param status - The status to force set.
|
|
733
|
+
*
|
|
734
|
+
* @example
|
|
735
|
+
* ```typescript
|
|
736
|
+
* // Restore state from database
|
|
737
|
+
* sm.forceStatus(storedState.status);
|
|
738
|
+
* ```
|
|
739
|
+
*/
|
|
740
|
+
forceStatus(status) {
|
|
741
|
+
this._status = status;
|
|
742
|
+
}
|
|
743
|
+
/**
|
|
744
|
+
* Determines if the workflow has reached a state where no further execution is possible.
|
|
745
|
+
*
|
|
746
|
+
* @returns True if the status is 'completed', 'failed', or 'rolled_back'.
|
|
747
|
+
*
|
|
748
|
+
* @example
|
|
749
|
+
* ```typescript
|
|
750
|
+
* if (sm.isTerminal()) {
|
|
751
|
+
* console.log('Workflow finished');
|
|
752
|
+
* }
|
|
753
|
+
* ```
|
|
754
|
+
*/
|
|
755
|
+
isTerminal() {
|
|
756
|
+
return this._status === "completed" || this._status === "failed" || this._status === "rolled_back" || this._status === "compensation_failed";
|
|
757
|
+
}
|
|
758
|
+
/**
|
|
759
|
+
* Checks if the workflow is in a state that allows for execution or resumption.
|
|
760
|
+
*
|
|
761
|
+
* @returns True if the workflow can be started or resumed.
|
|
762
|
+
*
|
|
763
|
+
* @example
|
|
764
|
+
* ```typescript
|
|
765
|
+
* if (sm.canExecute()) {
|
|
766
|
+
* await engine.run();
|
|
767
|
+
* }
|
|
768
|
+
* ```
|
|
769
|
+
*/
|
|
770
|
+
canExecute() {
|
|
771
|
+
return this._status === "pending" || this._status === "paused" || this._status === "suspended";
|
|
772
|
+
}
|
|
773
|
+
}, _class3);
|
|
774
|
+
|
|
775
|
+
// src/orbit/CronTrigger.ts
|
|
776
|
+
var _cronparser = require('cron-parser');
|
|
777
|
+
var CronTrigger = (_class4 = class {
|
|
778
|
+
/**
|
|
779
|
+
* Creates a new CronTrigger instance.
|
|
780
|
+
*
|
|
781
|
+
* @param engine - The Flux engine to use for executing scheduled workflows.
|
|
782
|
+
*/
|
|
783
|
+
constructor(engine) {;_class4.prototype.__init6.call(this);_class4.prototype.__init7.call(this);_class4.prototype.__init8.call(this);
|
|
784
|
+
this.engine = engine;
|
|
785
|
+
}
|
|
786
|
+
__init6() {this.schedules = /* @__PURE__ */ new Map()}
|
|
787
|
+
__init7() {this.timers = /* @__PURE__ */ new Map()}
|
|
788
|
+
__init8() {this.running = false}
|
|
789
|
+
/**
|
|
790
|
+
* Starts the scheduler.
|
|
791
|
+
*/
|
|
792
|
+
start() {
|
|
793
|
+
if (this.running) {
|
|
794
|
+
return;
|
|
795
|
+
}
|
|
796
|
+
this.running = true;
|
|
797
|
+
this.refreshAll();
|
|
798
|
+
}
|
|
799
|
+
/**
|
|
800
|
+
* Stops the scheduler and clears all timers.
|
|
801
|
+
*/
|
|
802
|
+
stop() {
|
|
803
|
+
this.running = false;
|
|
804
|
+
for (const timer of this.timers.values()) {
|
|
805
|
+
clearTimeout(timer);
|
|
806
|
+
}
|
|
807
|
+
this.timers.clear();
|
|
808
|
+
}
|
|
809
|
+
/**
|
|
810
|
+
* Adds a new schedule or updates an existing one.
|
|
811
|
+
*
|
|
812
|
+
* @param options - The schedule configuration.
|
|
813
|
+
*/
|
|
814
|
+
addSchedule(options) {
|
|
815
|
+
this.schedules.set(options.id, options);
|
|
816
|
+
if (this.running) {
|
|
817
|
+
this.refreshSchedule(options.id);
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
/**
|
|
821
|
+
* Removes a schedule.
|
|
822
|
+
*
|
|
823
|
+
* @param id - The ID of the schedule to remove.
|
|
824
|
+
*/
|
|
825
|
+
removeSchedule(id) {
|
|
826
|
+
this.schedules.delete(id);
|
|
827
|
+
const timer = this.timers.get(id);
|
|
828
|
+
if (timer) {
|
|
829
|
+
clearTimeout(timer);
|
|
830
|
+
this.timers.delete(id);
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
/**
|
|
834
|
+
* Refreshes all active schedules.
|
|
835
|
+
* @private
|
|
836
|
+
*/
|
|
837
|
+
refreshAll() {
|
|
838
|
+
for (const id of this.schedules.keys()) {
|
|
839
|
+
this.refreshSchedule(id);
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
/**
|
|
843
|
+
* Calculates the next execution time and sets a timer for a specific schedule.
|
|
844
|
+
*
|
|
845
|
+
* @param id - The ID of the schedule to refresh.
|
|
846
|
+
* @private
|
|
847
|
+
*/
|
|
848
|
+
refreshSchedule(id) {
|
|
849
|
+
const schedule = this.schedules.get(id);
|
|
850
|
+
if (!schedule || schedule.enabled === false) {
|
|
851
|
+
return;
|
|
852
|
+
}
|
|
853
|
+
const existingTimer = this.timers.get(id);
|
|
854
|
+
if (existingTimer) {
|
|
855
|
+
clearTimeout(existingTimer);
|
|
856
|
+
}
|
|
857
|
+
try {
|
|
858
|
+
const interval = _cronparser.CronExpressionParser.parse(schedule.cron);
|
|
859
|
+
const nextDate = interval.next().toDate();
|
|
860
|
+
const delay = nextDate.getTime() - Date.now();
|
|
861
|
+
if (delay <= 0) {
|
|
862
|
+
const timer2 = setTimeout(() => this.refreshSchedule(id), 1e3);
|
|
863
|
+
this.timers.set(id, timer2);
|
|
864
|
+
return;
|
|
865
|
+
}
|
|
866
|
+
const timer = setTimeout(async () => {
|
|
867
|
+
if (!this.running) {
|
|
868
|
+
return;
|
|
869
|
+
}
|
|
870
|
+
try {
|
|
871
|
+
await this.engine.execute(schedule.workflow, schedule.input);
|
|
872
|
+
} catch (error) {
|
|
873
|
+
console.error(`[CronTrigger] Failed to execute scheduled workflow "${id}":`, error);
|
|
874
|
+
} finally {
|
|
875
|
+
if (this.running && this.schedules.has(id)) {
|
|
876
|
+
this.refreshSchedule(id);
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
}, delay);
|
|
880
|
+
this.timers.set(id, timer);
|
|
881
|
+
} catch (error) {
|
|
882
|
+
console.error(`[CronTrigger] Invalid cron expression for schedule "${id}":`, error);
|
|
883
|
+
}
|
|
884
|
+
}
|
|
885
|
+
/**
|
|
886
|
+
* Lists all registered schedules.
|
|
887
|
+
* @returns An array of schedule configurations.
|
|
888
|
+
*/
|
|
889
|
+
listSchedules() {
|
|
890
|
+
return Array.from(this.schedules.values());
|
|
891
|
+
}
|
|
892
|
+
}, _class4);
|
|
893
|
+
|
|
894
|
+
// src/storage/MemoryStorage.ts
|
|
895
|
+
var MemoryStorage = (_class5 = class {constructor() { _class5.prototype.__init9.call(this); }
|
|
896
|
+
__init9() {this.store = /* @__PURE__ */ new Map()}
|
|
897
|
+
/**
|
|
898
|
+
* Stores a workflow state in the internal Map.
|
|
899
|
+
*
|
|
900
|
+
* Automatically updates the `updatedAt` timestamp to reflect the current time.
|
|
901
|
+
*
|
|
902
|
+
* @param state - The workflow state to persist.
|
|
903
|
+
* @throws {Error} If the state object is invalid or cannot be stored.
|
|
904
|
+
*/
|
|
905
|
+
async save(state) {
|
|
906
|
+
this.store.set(state.id, {
|
|
907
|
+
...state,
|
|
908
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
909
|
+
});
|
|
910
|
+
}
|
|
911
|
+
/**
|
|
912
|
+
* Retrieves a workflow state by its ID from the internal Map.
|
|
913
|
+
*
|
|
914
|
+
* @param id - The unique identifier of the workflow.
|
|
915
|
+
* @returns The workflow state if found, otherwise null.
|
|
916
|
+
*/
|
|
917
|
+
async load(id) {
|
|
918
|
+
return _nullishCoalesce(this.store.get(id), () => ( null));
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Filters and returns workflow states stored in memory.
|
|
922
|
+
*
|
|
923
|
+
* Supports filtering by name and status, and provides basic pagination.
|
|
924
|
+
* Results are sorted by creation date in descending order.
|
|
925
|
+
*
|
|
926
|
+
* @param filter - Criteria for filtering and paginating results.
|
|
927
|
+
* @returns An array of matching workflow states.
|
|
928
|
+
*/
|
|
929
|
+
async list(filter) {
|
|
930
|
+
let results = Array.from(this.store.values());
|
|
931
|
+
if (_optionalChain([filter, 'optionalAccess', _37 => _37.name])) {
|
|
932
|
+
results = results.filter((s) => s.name === filter.name);
|
|
933
|
+
}
|
|
934
|
+
if (_optionalChain([filter, 'optionalAccess', _38 => _38.status])) {
|
|
935
|
+
const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];
|
|
936
|
+
results = results.filter((s) => statuses.includes(s.status));
|
|
937
|
+
}
|
|
938
|
+
if (_optionalChain([filter, 'optionalAccess', _39 => _39.version])) {
|
|
939
|
+
results = results.filter((s) => s.definitionVersion === filter.version);
|
|
940
|
+
}
|
|
941
|
+
results.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
942
|
+
if (_optionalChain([filter, 'optionalAccess', _40 => _40.offset])) {
|
|
943
|
+
results = results.slice(filter.offset);
|
|
944
|
+
}
|
|
945
|
+
if (_optionalChain([filter, 'optionalAccess', _41 => _41.limit])) {
|
|
946
|
+
results = results.slice(0, filter.limit);
|
|
947
|
+
}
|
|
948
|
+
return results;
|
|
949
|
+
}
|
|
950
|
+
/**
|
|
951
|
+
* Removes a workflow state from the internal Map.
|
|
952
|
+
*
|
|
953
|
+
* @param id - The unique identifier of the workflow to delete.
|
|
954
|
+
*/
|
|
955
|
+
async delete(id) {
|
|
956
|
+
this.store.delete(id);
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* Initializes the memory storage.
|
|
960
|
+
*
|
|
961
|
+
* This is a no-op for MemoryStorage but satisfies the WorkflowStorage interface.
|
|
962
|
+
*/
|
|
963
|
+
async init() {
|
|
964
|
+
}
|
|
965
|
+
/**
|
|
966
|
+
* Clears all stored workflow states and resets the storage.
|
|
967
|
+
*/
|
|
968
|
+
async close() {
|
|
969
|
+
this.store.clear();
|
|
970
|
+
}
|
|
971
|
+
/**
|
|
972
|
+
* Returns the total number of workflow states currently stored in memory.
|
|
973
|
+
*
|
|
974
|
+
* Useful for assertions in test environments.
|
|
975
|
+
*
|
|
976
|
+
* @returns The number of entries in the store.
|
|
977
|
+
*/
|
|
978
|
+
size() {
|
|
979
|
+
return this.store.size;
|
|
980
|
+
}
|
|
981
|
+
}, _class5);
|
|
982
|
+
|
|
983
|
+
// src/core/executionUpdater.ts
|
|
984
|
+
function updateStepExecution(execution, updates) {
|
|
985
|
+
return {
|
|
986
|
+
...execution,
|
|
987
|
+
...updates
|
|
988
|
+
};
|
|
989
|
+
}
|
|
990
|
+
|
|
991
|
+
// src/core/StepExecutor.ts
|
|
992
|
+
var StepExecutor = class {
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
|
|
996
|
+
/**
|
|
997
|
+
* Creates a new StepExecutor with global defaults.
|
|
998
|
+
*
|
|
999
|
+
* @param options - Configuration for default behavior and lifecycle hooks.
|
|
1000
|
+
*/
|
|
1001
|
+
constructor(options = {}) {
|
|
1002
|
+
this.defaultRetries = _nullishCoalesce(options.defaultRetries, () => ( 3));
|
|
1003
|
+
this.defaultTimeout = _nullishCoalesce(options.defaultTimeout, () => ( 3e4));
|
|
1004
|
+
this.onRetry = options.onRetry;
|
|
1005
|
+
}
|
|
1006
|
+
/**
|
|
1007
|
+
* Executes a step definition against a workflow context.
|
|
1008
|
+
*
|
|
1009
|
+
* This method performs the following sequence:
|
|
1010
|
+
* 1. Evaluates the `when` condition (if present).
|
|
1011
|
+
* 2. Initiates the execution loop with retries.
|
|
1012
|
+
* 3. Enforces timeouts for each attempt.
|
|
1013
|
+
* 4. Handles suspension signals (`flux_wait`).
|
|
1014
|
+
* 5. Updates the execution history record.
|
|
1015
|
+
*
|
|
1016
|
+
* @param step - The definition of the step to execute.
|
|
1017
|
+
* @param ctx - The current workflow context.
|
|
1018
|
+
* @param execution - The current execution record for this step.
|
|
1019
|
+
* @returns The result of the execution and the updated execution record.
|
|
1020
|
+
*
|
|
1021
|
+
* @throws {Error} If the step handler throws an unrecoverable error or times out.
|
|
1022
|
+
*
|
|
1023
|
+
* @example
|
|
1024
|
+
* ```typescript
|
|
1025
|
+
* const { result, execution } = await executor.execute(
|
|
1026
|
+
* stepDefinition,
|
|
1027
|
+
* currentContext,
|
|
1028
|
+
* currentExecution
|
|
1029
|
+
* );
|
|
1030
|
+
*
|
|
1031
|
+
* if (!result.success) {
|
|
1032
|
+
* console.error(result.error);
|
|
1033
|
+
* }
|
|
1034
|
+
* ```
|
|
1035
|
+
*/
|
|
1036
|
+
async execute(step, ctx, execution) {
|
|
1037
|
+
const maxRetries = _nullishCoalesce(step.retries, () => ( this.defaultRetries));
|
|
1038
|
+
const timeout = _nullishCoalesce(step.timeout, () => ( this.defaultTimeout));
|
|
1039
|
+
const startTime = Date.now();
|
|
1040
|
+
let currentExecution = execution;
|
|
1041
|
+
if (step.when && !step.when(ctx)) {
|
|
1042
|
+
currentExecution = updateStepExecution(currentExecution, { status: "skipped" });
|
|
1043
|
+
return {
|
|
1044
|
+
result: {
|
|
1045
|
+
success: true,
|
|
1046
|
+
duration: 0
|
|
1047
|
+
},
|
|
1048
|
+
execution: currentExecution
|
|
1049
|
+
};
|
|
1050
|
+
}
|
|
1051
|
+
currentExecution = updateStepExecution(currentExecution, {
|
|
1052
|
+
status: "running",
|
|
1053
|
+
startedAt: /* @__PURE__ */ new Date()
|
|
1054
|
+
});
|
|
1055
|
+
let lastError;
|
|
1056
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
1057
|
+
currentExecution = updateStepExecution(currentExecution, { retries: attempt });
|
|
1058
|
+
try {
|
|
1059
|
+
const result = await this.executeWithTimeout(step.handler, ctx, timeout);
|
|
1060
|
+
if (result && typeof result === "object" && "__kind" in result && result.__kind === "flux_wait") {
|
|
1061
|
+
const duration3 = Date.now() - startTime;
|
|
1062
|
+
currentExecution = updateStepExecution(currentExecution, {
|
|
1063
|
+
status: "suspended",
|
|
1064
|
+
waitingFor: result.signal,
|
|
1065
|
+
suspendedAt: /* @__PURE__ */ new Date(),
|
|
1066
|
+
duration: duration3
|
|
1067
|
+
});
|
|
1068
|
+
return {
|
|
1069
|
+
result: {
|
|
1070
|
+
success: true,
|
|
1071
|
+
suspended: true,
|
|
1072
|
+
waitingFor: result.signal,
|
|
1073
|
+
duration: duration3
|
|
1074
|
+
},
|
|
1075
|
+
execution: currentExecution
|
|
1076
|
+
};
|
|
1077
|
+
}
|
|
1078
|
+
const duration2 = Date.now() - startTime;
|
|
1079
|
+
currentExecution = updateStepExecution(currentExecution, {
|
|
1080
|
+
status: "completed",
|
|
1081
|
+
completedAt: /* @__PURE__ */ new Date(),
|
|
1082
|
+
duration: duration2
|
|
1083
|
+
});
|
|
1084
|
+
return {
|
|
1085
|
+
result: {
|
|
1086
|
+
success: true,
|
|
1087
|
+
duration: duration2
|
|
1088
|
+
},
|
|
1089
|
+
execution: currentExecution
|
|
1090
|
+
};
|
|
1091
|
+
} catch (error) {
|
|
1092
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
1093
|
+
if (attempt < maxRetries) {
|
|
1094
|
+
await _optionalChain([this, 'access', _42 => _42.onRetry, 'optionalCall', _43 => _43(step, ctx, lastError, attempt + 1, maxRetries)]);
|
|
1095
|
+
await this.sleep(Math.min(1e3 * 2 ** attempt, 1e4));
|
|
1096
|
+
}
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
const duration = Date.now() - startTime;
|
|
1100
|
+
currentExecution = updateStepExecution(currentExecution, {
|
|
1101
|
+
status: "failed",
|
|
1102
|
+
completedAt: /* @__PURE__ */ new Date(),
|
|
1103
|
+
duration,
|
|
1104
|
+
error: _optionalChain([lastError, 'optionalAccess', _44 => _44.message])
|
|
1105
|
+
});
|
|
1106
|
+
return {
|
|
1107
|
+
result: {
|
|
1108
|
+
success: false,
|
|
1109
|
+
error: lastError,
|
|
1110
|
+
duration
|
|
1111
|
+
},
|
|
1112
|
+
execution: currentExecution
|
|
1113
|
+
};
|
|
1114
|
+
}
|
|
1115
|
+
/**
|
|
1116
|
+
* Wraps the step handler in a timeout race.
|
|
1117
|
+
*
|
|
1118
|
+
* @param handler - The user-defined step handler.
|
|
1119
|
+
* @param ctx - The workflow context.
|
|
1120
|
+
* @param timeout - Maximum time allowed for execution in milliseconds.
|
|
1121
|
+
* @returns The handler result or a suspension signal.
|
|
1122
|
+
* @throws {Error} If the timeout is reached before the handler completes.
|
|
1123
|
+
* @private
|
|
1124
|
+
*/
|
|
1125
|
+
async executeWithTimeout(handler, ctx, timeout) {
|
|
1126
|
+
let timer = null;
|
|
1127
|
+
try {
|
|
1128
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
1129
|
+
timer = setTimeout(() => reject(new Error("Step timeout")), timeout);
|
|
1130
|
+
});
|
|
1131
|
+
return await Promise.race([Promise.resolve(handler(ctx)), timeoutPromise]);
|
|
1132
|
+
} finally {
|
|
1133
|
+
if (timer) {
|
|
1134
|
+
clearTimeout(timer);
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
/**
|
|
1139
|
+
* Pauses execution for a specified duration.
|
|
1140
|
+
*
|
|
1141
|
+
* @param ms - Milliseconds to sleep.
|
|
1142
|
+
* @private
|
|
1143
|
+
*/
|
|
1144
|
+
async sleep(ms) {
|
|
1145
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1146
|
+
}
|
|
1147
|
+
};
|
|
1148
|
+
|
|
1149
|
+
// src/core/DataOptimizer.ts
|
|
1150
|
+
var DataOptimizer = class _DataOptimizer {
|
|
1151
|
+
|
|
1152
|
+
|
|
1153
|
+
/**
|
|
1154
|
+
* Creates a new DataOptimizer instance.
|
|
1155
|
+
*
|
|
1156
|
+
* @param config - Configuration options for the optimizer.
|
|
1157
|
+
*/
|
|
1158
|
+
constructor(config = {}) {
|
|
1159
|
+
this.threshold = _nullishCoalesce(config.threshold, () => ( 10 * 1024));
|
|
1160
|
+
this.defaultLocation = _nullishCoalesce(config.defaultLocation, () => ( "database"));
|
|
1161
|
+
}
|
|
1162
|
+
/**
|
|
1163
|
+
* Estimates the size of a data object in bytes.
|
|
1164
|
+
*
|
|
1165
|
+
* This uses JSON.stringify to calculate the approximate size,
|
|
1166
|
+
* which may not be exact for all types but provides a reasonable estimate.
|
|
1167
|
+
*
|
|
1168
|
+
* @param data - The data to measure.
|
|
1169
|
+
* @returns The estimated size in bytes.
|
|
1170
|
+
*
|
|
1171
|
+
* @example
|
|
1172
|
+
* ```typescript
|
|
1173
|
+
* const size = DataOptimizer.estimateSize({ key: 'value' });
|
|
1174
|
+
* console.log(size); // 15 (approximate)
|
|
1175
|
+
* ```
|
|
1176
|
+
*/
|
|
1177
|
+
static estimateSize(data) {
|
|
1178
|
+
try {
|
|
1179
|
+
return JSON.stringify(data).length;
|
|
1180
|
+
} catch (e2) {
|
|
1181
|
+
return Number.MAX_SAFE_INTEGER;
|
|
1182
|
+
}
|
|
1183
|
+
}
|
|
1184
|
+
/**
|
|
1185
|
+
* Optimizes a data object for storage by replacing large values with references.
|
|
1186
|
+
*
|
|
1187
|
+
* Iterates through the object's properties and converts any value exceeding the
|
|
1188
|
+
* threshold to a DataReference object.
|
|
1189
|
+
*
|
|
1190
|
+
* @param data - The data object to optimize.
|
|
1191
|
+
* @param customThreshold - Optional custom threshold for this optimization.
|
|
1192
|
+
* @returns A new object with large values replaced by references.
|
|
1193
|
+
*
|
|
1194
|
+
* @example
|
|
1195
|
+
* ```typescript
|
|
1196
|
+
* const optimizer = new DataOptimizer({ threshold: 1024 });
|
|
1197
|
+
* const result = optimizer.optimizeForStorage({
|
|
1198
|
+
* small: 'text',
|
|
1199
|
+
* large: Buffer.alloc(10 * 1024)
|
|
1200
|
+
* });
|
|
1201
|
+
* // result.small === 'text'
|
|
1202
|
+
* // result.large.__ref === true
|
|
1203
|
+
* ```
|
|
1204
|
+
*/
|
|
1205
|
+
optimizeForStorage(data, customThreshold) {
|
|
1206
|
+
const threshold = _nullishCoalesce(customThreshold, () => ( this.threshold));
|
|
1207
|
+
const result = {};
|
|
1208
|
+
for (const [key, value] of Object.entries(data)) {
|
|
1209
|
+
if (value === null || value === void 0) {
|
|
1210
|
+
result[key] = value;
|
|
1211
|
+
continue;
|
|
1212
|
+
}
|
|
1213
|
+
if (this.isReference(value)) {
|
|
1214
|
+
result[key] = value;
|
|
1215
|
+
continue;
|
|
1216
|
+
}
|
|
1217
|
+
const size = _DataOptimizer.estimateSize(value);
|
|
1218
|
+
if (size > threshold) {
|
|
1219
|
+
result[key] = this.createReference(value, size);
|
|
1220
|
+
} else {
|
|
1221
|
+
result[key] = value;
|
|
1222
|
+
}
|
|
1223
|
+
}
|
|
1224
|
+
return result;
|
|
1225
|
+
}
|
|
1226
|
+
/**
|
|
1227
|
+
* Checks if a value is a DataReference.
|
|
1228
|
+
*
|
|
1229
|
+
* @param value - The value to check.
|
|
1230
|
+
* @returns True if the value is a DataReference.
|
|
1231
|
+
*/
|
|
1232
|
+
isReference(value) {
|
|
1233
|
+
return typeof value === "object" && value !== null && "__ref" in value && value.__ref === true;
|
|
1234
|
+
}
|
|
1235
|
+
/**
|
|
1236
|
+
* Creates a DataReference from a value.
|
|
1237
|
+
*
|
|
1238
|
+
* @param value - The original value to create a reference for.
|
|
1239
|
+
* @param size - The size of the original value in bytes.
|
|
1240
|
+
* @returns A DataReference object.
|
|
1241
|
+
* @private
|
|
1242
|
+
*/
|
|
1243
|
+
createReference(_value, size) {
|
|
1244
|
+
return {
|
|
1245
|
+
__ref: true,
|
|
1246
|
+
id: this.generateReferenceId(),
|
|
1247
|
+
location: this.defaultLocation,
|
|
1248
|
+
size
|
|
1249
|
+
// Note: The actual storage and load implementation
|
|
1250
|
+
// should be handled by the storage adapter
|
|
1251
|
+
};
|
|
1252
|
+
}
|
|
1253
|
+
/**
|
|
1254
|
+
* Generates a unique identifier for a data reference.
|
|
1255
|
+
*
|
|
1256
|
+
* @returns A unique reference ID.
|
|
1257
|
+
* @private
|
|
1258
|
+
*/
|
|
1259
|
+
generateReferenceId() {
|
|
1260
|
+
return `ref_${Date.now()}_${Math.random().toString(36).substring(2, 11)}`;
|
|
1261
|
+
}
|
|
1262
|
+
/**
|
|
1263
|
+
* Resolves references in a data object by loading the actual values.
|
|
1264
|
+
*
|
|
1265
|
+
* This is the inverse operation of optimizeForStorage.
|
|
1266
|
+
*
|
|
1267
|
+
* @param data - The data object containing references.
|
|
1268
|
+
* @returns A promise resolving to the data with references replaced by actual values.
|
|
1269
|
+
*
|
|
1270
|
+
* @example
|
|
1271
|
+
* ```typescript
|
|
1272
|
+
* const optimizer = new DataOptimizer();
|
|
1273
|
+
* const resolved = await optimizer.resolveReferences({
|
|
1274
|
+
* small: 'text',
|
|
1275
|
+
* large: { __ref: true, id: 'ref_123', location: 'database', size: 1024 }
|
|
1276
|
+
* });
|
|
1277
|
+
* // resolved.large contains the actual loaded data
|
|
1278
|
+
* ```
|
|
1279
|
+
*/
|
|
1280
|
+
async resolveReferences(data) {
|
|
1281
|
+
const result = {};
|
|
1282
|
+
for (const [key, value] of Object.entries(data)) {
|
|
1283
|
+
if (this.isReference(value)) {
|
|
1284
|
+
if (value.load) {
|
|
1285
|
+
result[key] = await value.load();
|
|
1286
|
+
} else {
|
|
1287
|
+
result[key] = value;
|
|
1288
|
+
}
|
|
1289
|
+
} else {
|
|
1290
|
+
result[key] = value;
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
return result;
|
|
1294
|
+
}
|
|
1295
|
+
/**
|
|
1296
|
+
* Calculates the total size reduction achieved by optimization.
|
|
1297
|
+
*
|
|
1298
|
+
* @param original - The original data object.
|
|
1299
|
+
* @param optimized - The optimized data object.
|
|
1300
|
+
* @returns An object containing size statistics.
|
|
1301
|
+
*
|
|
1302
|
+
* @example
|
|
1303
|
+
* ```typescript
|
|
1304
|
+
* const optimizer = new DataOptimizer();
|
|
1305
|
+
* const original = { large: Buffer.alloc(100 * 1024) };
|
|
1306
|
+
* const optimized = optimizer.optimizeForStorage(original);
|
|
1307
|
+
* const stats = optimizer.getOptimizationStats(original, optimized);
|
|
1308
|
+
* console.log(stats); // { originalSize: 102400, optimizedSize: 100, reduction: 99.9 }
|
|
1309
|
+
* ```
|
|
1310
|
+
*/
|
|
1311
|
+
getOptimizationStats(original, optimized) {
|
|
1312
|
+
const originalSize = _DataOptimizer.estimateSize(original);
|
|
1313
|
+
const optimizedSize = _DataOptimizer.estimateSize(optimized);
|
|
1314
|
+
const referencesCreated = Object.values(optimized).filter((v) => this.isReference(v)).length;
|
|
1315
|
+
return {
|
|
1316
|
+
originalSize,
|
|
1317
|
+
optimizedSize,
|
|
1318
|
+
reduction: (originalSize - optimizedSize) / originalSize * 100,
|
|
1319
|
+
referencesCreated
|
|
1320
|
+
};
|
|
1321
|
+
}
|
|
1322
|
+
};
|
|
1323
|
+
|
|
1324
|
+
// src/engine/CompensationRetryPolicy.ts
|
|
1325
|
+
var CompensationRetryPolicy = class {
|
|
1326
|
+
|
|
1327
|
+
constructor(config = {}) {
|
|
1328
|
+
this.config = {
|
|
1329
|
+
maxAttempts: _nullishCoalesce(config.maxAttempts, () => ( 3)),
|
|
1330
|
+
initialDelay: _nullishCoalesce(config.initialDelay, () => ( 1e3)),
|
|
1331
|
+
backoffCoefficient: _nullishCoalesce(config.backoffCoefficient, () => ( 2)),
|
|
1332
|
+
maxDelay: _nullishCoalesce(config.maxDelay, () => ( 3e4)),
|
|
1333
|
+
jitter: _nullishCoalesce(config.jitter, () => ( 0.1))
|
|
1334
|
+
};
|
|
1335
|
+
}
|
|
1336
|
+
/**
|
|
1337
|
+
* Executes an operation with automatic retry on failure.
|
|
1338
|
+
*
|
|
1339
|
+
* Uses exponential backoff with jitter to avoid thundering herd problems.
|
|
1340
|
+
*
|
|
1341
|
+
* @param operation - The async operation to execute.
|
|
1342
|
+
* @returns A promise resolving to the retry result.
|
|
1343
|
+
*
|
|
1344
|
+
* @example
|
|
1345
|
+
* ```typescript
|
|
1346
|
+
* const policy = new CompensationRetryPolicy();
|
|
1347
|
+
*
|
|
1348
|
+
* const result = await policy.execute(async () => {
|
|
1349
|
+
* await paymentGateway.refund(transactionId);
|
|
1350
|
+
* });
|
|
1351
|
+
*
|
|
1352
|
+
* if (result.success) {
|
|
1353
|
+
* console.log(`Refund succeeded after ${result.attempts} attempts`);
|
|
1354
|
+
* }
|
|
1355
|
+
* ```
|
|
1356
|
+
*/
|
|
1357
|
+
async execute(operation) {
|
|
1358
|
+
const startTime = Date.now();
|
|
1359
|
+
let lastError;
|
|
1360
|
+
let attempts = 0;
|
|
1361
|
+
while (attempts < this.config.maxAttempts) {
|
|
1362
|
+
attempts++;
|
|
1363
|
+
try {
|
|
1364
|
+
const value = await operation();
|
|
1365
|
+
return {
|
|
1366
|
+
success: true,
|
|
1367
|
+
value,
|
|
1368
|
+
attempts,
|
|
1369
|
+
duration: Date.now() - startTime
|
|
1370
|
+
};
|
|
1371
|
+
} catch (err) {
|
|
1372
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
1373
|
+
if (attempts < this.config.maxAttempts) {
|
|
1374
|
+
const delay = this.calculateDelay(attempts);
|
|
1375
|
+
await this.sleep(delay);
|
|
1376
|
+
}
|
|
1377
|
+
}
|
|
1378
|
+
}
|
|
1379
|
+
return {
|
|
1380
|
+
success: false,
|
|
1381
|
+
error: lastError,
|
|
1382
|
+
attempts,
|
|
1383
|
+
duration: Date.now() - startTime
|
|
1384
|
+
};
|
|
1385
|
+
}
|
|
1386
|
+
/**
|
|
1387
|
+
* Calculates the delay for a given retry attempt using exponential backoff.
|
|
1388
|
+
*
|
|
1389
|
+
* @param attempt - The current attempt number (1-indexed).
|
|
1390
|
+
* @returns The delay in milliseconds.
|
|
1391
|
+
* @private
|
|
1392
|
+
*/
|
|
1393
|
+
calculateDelay(attempt) {
|
|
1394
|
+
const exponentialDelay = this.config.initialDelay * this.config.backoffCoefficient ** (attempt - 1);
|
|
1395
|
+
const cappedDelay = Math.min(exponentialDelay, this.config.maxDelay);
|
|
1396
|
+
const jitterAmount = cappedDelay * this.config.jitter;
|
|
1397
|
+
const jitter = (Math.random() - 0.5) * 2 * jitterAmount;
|
|
1398
|
+
return Math.max(0, cappedDelay + jitter);
|
|
1399
|
+
}
|
|
1400
|
+
/**
|
|
1401
|
+
* Sleeps for the specified duration.
|
|
1402
|
+
*
|
|
1403
|
+
* @param ms - Duration in milliseconds.
|
|
1404
|
+
* @private
|
|
1405
|
+
*/
|
|
1406
|
+
async sleep(ms) {
|
|
1407
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1408
|
+
}
|
|
1409
|
+
/**
|
|
1410
|
+
* Checks if an error is retryable.
|
|
1411
|
+
*
|
|
1412
|
+
* By default, all errors are retryable. Override this method or provide
|
|
1413
|
+
* a custom predicate to implement selective retry logic.
|
|
1414
|
+
*
|
|
1415
|
+
* @param error - The error to check.
|
|
1416
|
+
* @returns True if the error is retryable.
|
|
1417
|
+
*
|
|
1418
|
+
* @example
|
|
1419
|
+
* ```typescript
|
|
1420
|
+
* class CustomRetryPolicy extends CompensationRetryPolicy {
|
|
1421
|
+
* isRetryable(error: Error): boolean {
|
|
1422
|
+
* return error.message.includes('TRANSIENT');
|
|
1423
|
+
* }
|
|
1424
|
+
* }
|
|
1425
|
+
* ```
|
|
1426
|
+
*/
|
|
1427
|
+
isRetryable(_error) {
|
|
1428
|
+
return true;
|
|
1429
|
+
}
|
|
1430
|
+
/**
|
|
1431
|
+
* Executes an operation with selective retry based on error type.
|
|
1432
|
+
*
|
|
1433
|
+
* @param operation - The async operation to execute.
|
|
1434
|
+
* @param isRetryable - Predicate to determine if an error should trigger retry.
|
|
1435
|
+
* @returns A promise resolving to the retry result.
|
|
1436
|
+
*
|
|
1437
|
+
* @example
|
|
1438
|
+
* ```typescript
|
|
1439
|
+
* const policy = new CompensationRetryPolicy();
|
|
1440
|
+
*
|
|
1441
|
+
* const result = await policy.executeWithPredicate(
|
|
1442
|
+
* async () => await api.call(),
|
|
1443
|
+
* (err) => err.message.includes('timeout')
|
|
1444
|
+
* );
|
|
1445
|
+
* ```
|
|
1446
|
+
*/
|
|
1447
|
+
async executeWithPredicate(operation, isRetryable) {
|
|
1448
|
+
const startTime = Date.now();
|
|
1449
|
+
let lastError;
|
|
1450
|
+
let attempts = 0;
|
|
1451
|
+
while (attempts < this.config.maxAttempts) {
|
|
1452
|
+
attempts++;
|
|
1453
|
+
try {
|
|
1454
|
+
const value = await operation();
|
|
1455
|
+
return {
|
|
1456
|
+
success: true,
|
|
1457
|
+
value,
|
|
1458
|
+
attempts,
|
|
1459
|
+
duration: Date.now() - startTime
|
|
1460
|
+
};
|
|
1461
|
+
} catch (err) {
|
|
1462
|
+
lastError = err instanceof Error ? err : new Error(String(err));
|
|
1463
|
+
if (!isRetryable(lastError)) {
|
|
1464
|
+
break;
|
|
1465
|
+
}
|
|
1466
|
+
if (attempts < this.config.maxAttempts) {
|
|
1467
|
+
const delay = this.calculateDelay(attempts);
|
|
1468
|
+
await this.sleep(delay);
|
|
1469
|
+
}
|
|
1470
|
+
}
|
|
1471
|
+
}
|
|
1472
|
+
return {
|
|
1473
|
+
success: false,
|
|
1474
|
+
error: lastError,
|
|
1475
|
+
attempts,
|
|
1476
|
+
duration: Date.now() - startTime
|
|
1477
|
+
};
|
|
1478
|
+
}
|
|
1479
|
+
/**
|
|
1480
|
+
* Gets the current retry configuration.
|
|
1481
|
+
*
|
|
1482
|
+
* @returns A copy of the current configuration.
|
|
1483
|
+
*/
|
|
1484
|
+
getConfig() {
|
|
1485
|
+
return { ...this.config };
|
|
1486
|
+
}
|
|
1487
|
+
};
|
|
1488
|
+
|
|
1489
|
+
// src/engine/stateUpdater.ts
|
|
1490
|
+
function updateWorkflowContext(ctx, updates) {
|
|
1491
|
+
return {
|
|
1492
|
+
...ctx,
|
|
1493
|
+
...updates
|
|
1494
|
+
};
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
// src/engine/FluxEngineHelpers.ts
|
|
1498
|
+
function createStepExecutor(config, traceEmitter) {
|
|
1499
|
+
return new StepExecutor({
|
|
1500
|
+
defaultRetries: config.defaultRetries,
|
|
1501
|
+
defaultTimeout: config.defaultTimeout,
|
|
1502
|
+
onRetry: async (step, ctx, error, attempt, maxRetries) => {
|
|
1503
|
+
await traceEmitter.emit({
|
|
1504
|
+
type: "step:retry",
|
|
1505
|
+
timestamp: Date.now(),
|
|
1506
|
+
workflowId: ctx.id,
|
|
1507
|
+
workflowName: ctx.name,
|
|
1508
|
+
stepName: step.name,
|
|
1509
|
+
stepIndex: ctx.currentStep,
|
|
1510
|
+
commit: Boolean(step.commit),
|
|
1511
|
+
retries: attempt,
|
|
1512
|
+
maxRetries,
|
|
1513
|
+
error: error.message,
|
|
1514
|
+
status: "running"
|
|
1515
|
+
});
|
|
1516
|
+
}
|
|
1517
|
+
});
|
|
1518
|
+
}
|
|
1519
|
+
function resolveDefinition(workflow) {
|
|
1520
|
+
return workflow instanceof WorkflowBuilder ? workflow.build() : workflow;
|
|
1521
|
+
}
|
|
1522
|
+
function resolveStartIndex(definition, fromStep, fallback) {
|
|
1523
|
+
if (typeof fromStep === "number") {
|
|
1524
|
+
if (fromStep < 0 || fromStep >= definition.steps.length) {
|
|
1525
|
+
throw invalidStepIndex(fromStep);
|
|
1526
|
+
}
|
|
1527
|
+
return fromStep;
|
|
1528
|
+
}
|
|
1529
|
+
if (typeof fromStep === "string") {
|
|
1530
|
+
const index = definition.steps.findIndex((step) => step.name === fromStep);
|
|
1531
|
+
if (index === -1) {
|
|
1532
|
+
throw stepNotFound(fromStep);
|
|
1533
|
+
}
|
|
1534
|
+
return index;
|
|
1535
|
+
}
|
|
1536
|
+
return Math.max(0, Math.min(fallback, definition.steps.length - 1));
|
|
1537
|
+
}
|
|
1538
|
+
function resetHistoryFrom(ctx, startIndex) {
|
|
1539
|
+
for (let i = startIndex; i < ctx.history.length; i++) {
|
|
1540
|
+
const entry = ctx.history[i];
|
|
1541
|
+
if (!entry) {
|
|
1542
|
+
continue;
|
|
1543
|
+
}
|
|
1544
|
+
entry.status = "pending";
|
|
1545
|
+
entry.startedAt = void 0;
|
|
1546
|
+
entry.completedAt = void 0;
|
|
1547
|
+
entry.duration = void 0;
|
|
1548
|
+
entry.error = void 0;
|
|
1549
|
+
entry.retries = 0;
|
|
1550
|
+
}
|
|
1551
|
+
}
|
|
1552
|
+
async function handleExecutionResult(definition, ctx, result, contextManager, rollbackManager, storage) {
|
|
1553
|
+
if (result.status === "failed" && result.error) {
|
|
1554
|
+
const failedIndex = result.history.findIndex((h) => h.status === "failed");
|
|
1555
|
+
if (failedIndex !== -1) {
|
|
1556
|
+
const latestState = await storage.load(ctx.id);
|
|
1557
|
+
const restoredCtx = contextManager.restore({
|
|
1558
|
+
...contextManager.toState(ctx),
|
|
1559
|
+
history: result.history,
|
|
1560
|
+
data: result.data,
|
|
1561
|
+
status: "failed",
|
|
1562
|
+
version: _nullishCoalesce(_optionalChain([latestState, 'optionalAccess', _45 => _45.version]), () => ( result.version))
|
|
1563
|
+
});
|
|
1564
|
+
const rolledBackCtx = await rollbackManager.rollback(
|
|
1565
|
+
definition,
|
|
1566
|
+
restoredCtx,
|
|
1567
|
+
failedIndex,
|
|
1568
|
+
result.error
|
|
1569
|
+
);
|
|
1570
|
+
return {
|
|
1571
|
+
...result,
|
|
1572
|
+
status: rolledBackCtx.status,
|
|
1573
|
+
history: rolledBackCtx.history,
|
|
1574
|
+
data: rolledBackCtx.data
|
|
1575
|
+
};
|
|
1576
|
+
}
|
|
1577
|
+
}
|
|
1578
|
+
return result;
|
|
1579
|
+
}
|
|
1580
|
+
async function persistContext(ctx, storage, contextManager, definitionVersion) {
|
|
1581
|
+
const state = contextManager.toState(ctx);
|
|
1582
|
+
const stored = await storage.load(state.id);
|
|
1583
|
+
if (stored && stored.version !== state.version) {
|
|
1584
|
+
throw new FluxError(
|
|
1585
|
+
"Concurrent modification detected",
|
|
1586
|
+
"CONCURRENT_MODIFICATION" /* CONCURRENT_MODIFICATION */
|
|
1587
|
+
);
|
|
1588
|
+
}
|
|
1589
|
+
if (definitionVersion !== void 0) {
|
|
1590
|
+
state.definitionVersion = definitionVersion;
|
|
1591
|
+
} else if (_optionalChain([stored, 'optionalAccess', _46 => _46.definitionVersion])) {
|
|
1592
|
+
state.definitionVersion = stored.definitionVersion;
|
|
1593
|
+
}
|
|
1594
|
+
const nextVersion = state.version + 1;
|
|
1595
|
+
await storage.save({ ...state, version: nextVersion });
|
|
1596
|
+
const result = updateWorkflowContext(ctx, { version: nextVersion });
|
|
1597
|
+
return result;
|
|
1598
|
+
}
|
|
1599
|
+
async function acquireEngineLock(config, workflowId) {
|
|
1600
|
+
if (!config.lockProvider) {
|
|
1601
|
+
return null;
|
|
1602
|
+
}
|
|
1603
|
+
const owner = `node_${Math.random().toString(36).substring(7)}`;
|
|
1604
|
+
return await config.lockProvider.acquire(workflowId, owner, 3e4);
|
|
1605
|
+
}
|
|
1606
|
+
|
|
1607
|
+
// src/core/IdempotencyGuard.ts
|
|
1608
|
+
var IdempotencyGuard = class {
|
|
1609
|
+
/**
|
|
1610
|
+
* Checks if a step can be compensated based on its execution history.
|
|
1611
|
+
*
|
|
1612
|
+
* A step can be compensated if:
|
|
1613
|
+
* - It has completed successfully
|
|
1614
|
+
* - It has NOT already been compensated
|
|
1615
|
+
* - It is not currently being compensated
|
|
1616
|
+
*
|
|
1617
|
+
* @param ctx - The current workflow context.
|
|
1618
|
+
* @param stepName - The name of the step to check.
|
|
1619
|
+
* @returns True if the step can be safely compensated.
|
|
1620
|
+
*
|
|
1621
|
+
* @example
|
|
1622
|
+
* ```typescript
|
|
1623
|
+
* const guard = new IdempotencyGuard();
|
|
1624
|
+
*
|
|
1625
|
+
* if (guard.canCompensate(ctx, 'reserve-inventory')) {
|
|
1626
|
+
* await inventoryService.release(ctx.data.reservationId);
|
|
1627
|
+
* }
|
|
1628
|
+
* ```
|
|
1629
|
+
*/
|
|
1630
|
+
canCompensate(ctx, stepName) {
|
|
1631
|
+
const execution = this.findExecution(ctx, stepName);
|
|
1632
|
+
if (!execution) {
|
|
1633
|
+
return false;
|
|
1634
|
+
}
|
|
1635
|
+
return execution.status === "completed";
|
|
1636
|
+
}
|
|
1637
|
+
/**
|
|
1638
|
+
* Checks if a step has already been compensated.
|
|
1639
|
+
*
|
|
1640
|
+
* @param ctx - The current workflow context.
|
|
1641
|
+
* @param stepName - The name of the step to check.
|
|
1642
|
+
* @returns True if the step has already been compensated.
|
|
1643
|
+
*
|
|
1644
|
+
* @example
|
|
1645
|
+
* ```typescript
|
|
1646
|
+
* if (guard.isCompensated(ctx, 'book-flight')) {
|
|
1647
|
+
* console.log('Flight booking already cancelled');
|
|
1648
|
+
* }
|
|
1649
|
+
* ```
|
|
1650
|
+
*/
|
|
1651
|
+
isCompensated(ctx, stepName) {
|
|
1652
|
+
const execution = this.findExecution(ctx, stepName);
|
|
1653
|
+
return _optionalChain([execution, 'optionalAccess', _47 => _47.status]) === "compensated";
|
|
1654
|
+
}
|
|
1655
|
+
/**
|
|
1656
|
+
* Checks if a step is currently being compensated.
|
|
1657
|
+
*
|
|
1658
|
+
* @param ctx - The current workflow context.
|
|
1659
|
+
* @param stepName - The name of the step to check.
|
|
1660
|
+
* @returns True if the step is currently in the compensating state.
|
|
1661
|
+
*/
|
|
1662
|
+
isCompensating(ctx, stepName) {
|
|
1663
|
+
const execution = this.findExecution(ctx, stepName);
|
|
1664
|
+
return _optionalChain([execution, 'optionalAccess', _48 => _48.status]) === "compensating";
|
|
1665
|
+
}
|
|
1666
|
+
/**
|
|
1667
|
+
* Retrieves the compensation timestamp for a step.
|
|
1668
|
+
*
|
|
1669
|
+
* @param ctx - The current workflow context.
|
|
1670
|
+
* @param stepName - The name of the step.
|
|
1671
|
+
* @returns The compensation timestamp if available, otherwise undefined.
|
|
1672
|
+
*/
|
|
1673
|
+
getCompensationTimestamp(ctx, stepName) {
|
|
1674
|
+
const execution = this.findExecution(ctx, stepName);
|
|
1675
|
+
return _optionalChain([execution, 'optionalAccess', _49 => _49.compensatedAt]);
|
|
1676
|
+
}
|
|
1677
|
+
/**
|
|
1678
|
+
* Counts how many times compensation has been attempted for a step.
|
|
1679
|
+
*
|
|
1680
|
+
* This is useful for implementing retry limits or circuit breakers.
|
|
1681
|
+
*
|
|
1682
|
+
* @param ctx - The current workflow context.
|
|
1683
|
+
* @param stepName - The name of the step.
|
|
1684
|
+
* @returns The number of compensation attempts (0 if never attempted).
|
|
1685
|
+
*/
|
|
1686
|
+
getCompensationAttempts(ctx, stepName) {
|
|
1687
|
+
const executions = ctx.history.filter((h) => h.name === stepName);
|
|
1688
|
+
return executions.filter((e) => e.status === "compensated" || e.status === "compensating").length;
|
|
1689
|
+
}
|
|
1690
|
+
/**
|
|
1691
|
+
* Finds the most recent execution record for a given step.
|
|
1692
|
+
*
|
|
1693
|
+
* @param ctx - The current workflow context.
|
|
1694
|
+
* @param stepName - The name of the step to find.
|
|
1695
|
+
* @returns The step execution record, or undefined if not found.
|
|
1696
|
+
* @private
|
|
1697
|
+
*/
|
|
1698
|
+
findExecution(ctx, stepName) {
|
|
1699
|
+
for (let i = ctx.history.length - 1; i >= 0; i--) {
|
|
1700
|
+
if (ctx.history[i].name === stepName) {
|
|
1701
|
+
return ctx.history[i];
|
|
1702
|
+
}
|
|
1703
|
+
}
|
|
1704
|
+
return void 0;
|
|
1705
|
+
}
|
|
1706
|
+
/**
|
|
1707
|
+
* Verifies that all completed steps have been compensated.
|
|
1708
|
+
*
|
|
1709
|
+
* Used to ensure rollback has fully completed.
|
|
1710
|
+
*
|
|
1711
|
+
* @param ctx - The current workflow context.
|
|
1712
|
+
* @param stepNames - Array of step names that should be compensated.
|
|
1713
|
+
* @returns True if all specified steps are compensated.
|
|
1714
|
+
*
|
|
1715
|
+
* @example
|
|
1716
|
+
* ```typescript
|
|
1717
|
+
* const guard = new IdempotencyGuard();
|
|
1718
|
+
* const completedSteps = ['reserve', 'charge', 'notify'];
|
|
1719
|
+
*
|
|
1720
|
+
* if (guard.allCompensated(ctx, completedSteps)) {
|
|
1721
|
+
* console.log('Rollback complete');
|
|
1722
|
+
* }
|
|
1723
|
+
* ```
|
|
1724
|
+
*/
|
|
1725
|
+
allCompensated(ctx, stepNames) {
|
|
1726
|
+
return stepNames.every((name) => this.isCompensated(ctx, name));
|
|
1727
|
+
}
|
|
1728
|
+
/**
|
|
1729
|
+
* Finds all steps that need compensation but haven't been compensated yet.
|
|
1730
|
+
*
|
|
1731
|
+
* @param ctx - The current workflow context.
|
|
1732
|
+
* @param stepNames - Array of step names to check.
|
|
1733
|
+
* @returns Array of step names that still need compensation.
|
|
1734
|
+
*
|
|
1735
|
+
* @example
|
|
1736
|
+
* ```typescript
|
|
1737
|
+
* const guard = new IdempotencyGuard();
|
|
1738
|
+
* const pending = guard.getPendingCompensations(ctx, ['step1', 'step2', 'step3']);
|
|
1739
|
+
* console.log(`Still need to compensate: ${pending.join(', ')}`);
|
|
1740
|
+
* ```
|
|
1741
|
+
*/
|
|
1742
|
+
getPendingCompensations(ctx, stepNames) {
|
|
1743
|
+
return stepNames.filter((name) => {
|
|
1744
|
+
const execution = this.findExecution(ctx, name);
|
|
1745
|
+
return _optionalChain([execution, 'optionalAccess', _50 => _50.status]) === "completed";
|
|
1746
|
+
});
|
|
1747
|
+
}
|
|
1748
|
+
};
|
|
1749
|
+
|
|
1750
|
+
// src/engine/RecoveryManager.ts
|
|
1751
|
+
var RecoveryManager = (_class6 = class {constructor() { _class6.prototype.__init10.call(this);_class6.prototype.__init11.call(this);_class6.prototype.__init12.call(this); }
|
|
1752
|
+
__init10() {this.recoveryCallbacks = []}
|
|
1753
|
+
__init11() {this.actions = /* @__PURE__ */ new Map()}
|
|
1754
|
+
__init12() {this.pendingRecoveries = /* @__PURE__ */ new Map()}
|
|
1755
|
+
/**
|
|
1756
|
+
* Registers a callback to be invoked when recovery is needed.
|
|
1757
|
+
*
|
|
1758
|
+
* @param callback - Function to call when a recovery event occurs.
|
|
1759
|
+
*
|
|
1760
|
+
* @example
|
|
1761
|
+
* ```typescript
|
|
1762
|
+
* manager.onRecoveryNeeded(async (ctx, stepName, error) => {
|
|
1763
|
+
* await slack.send(`#alerts`, `Workflow ${ctx.id} needs recovery at ${stepName}`);
|
|
1764
|
+
* });
|
|
1765
|
+
* ```
|
|
1766
|
+
*/
|
|
1767
|
+
onRecoveryNeeded(callback) {
|
|
1768
|
+
this.recoveryCallbacks.push(callback);
|
|
1769
|
+
}
|
|
1770
|
+
/**
|
|
1771
|
+
* Emits a recovery needed event.
|
|
1772
|
+
*
|
|
1773
|
+
* @param ctx - The workflow context.
|
|
1774
|
+
* @param stepName - The step that failed compensation.
|
|
1775
|
+
* @param error - The error that occurred.
|
|
1776
|
+
*/
|
|
1777
|
+
async notifyRecoveryNeeded(ctx, stepName, error) {
|
|
1778
|
+
this.pendingRecoveries.set(ctx.id, { stepName, error });
|
|
1779
|
+
for (const callback of this.recoveryCallbacks) {
|
|
1780
|
+
await callback(ctx, stepName, error);
|
|
1781
|
+
}
|
|
1782
|
+
}
|
|
1783
|
+
/**
|
|
1784
|
+
* Registers a recovery action for a specific step.
|
|
1785
|
+
*
|
|
1786
|
+
* @param stepName - The step name to associate the action with.
|
|
1787
|
+
* @param action - The recovery action to perform.
|
|
1788
|
+
*
|
|
1789
|
+
* @example
|
|
1790
|
+
* ```typescript
|
|
1791
|
+
* manager.registerAction('book-flight', {
|
|
1792
|
+
* type: 'retry',
|
|
1793
|
+
* maxAttempts: 5
|
|
1794
|
+
* });
|
|
1795
|
+
*
|
|
1796
|
+
* manager.registerAction('charge-card', {
|
|
1797
|
+
* type: 'manual',
|
|
1798
|
+
* handler: async () => {
|
|
1799
|
+
* await accountingSystem.manualRefund(transactionId);
|
|
1800
|
+
* }
|
|
1801
|
+
* });
|
|
1802
|
+
* ```
|
|
1803
|
+
*/
|
|
1804
|
+
registerAction(stepName, action) {
|
|
1805
|
+
this.actions.set(stepName, action);
|
|
1806
|
+
}
|
|
1807
|
+
/**
|
|
1808
|
+
* Gets the registered recovery action for a step.
|
|
1809
|
+
*
|
|
1810
|
+
* @param stepName - The step name.
|
|
1811
|
+
* @returns The recovery action if registered, otherwise undefined.
|
|
1812
|
+
*/
|
|
1813
|
+
getAction(stepName) {
|
|
1814
|
+
return this.actions.get(stepName);
|
|
1815
|
+
}
|
|
1816
|
+
/**
|
|
1817
|
+
* Checks if a workflow has a pending recovery.
|
|
1818
|
+
*
|
|
1819
|
+
* @param workflowId - The workflow ID.
|
|
1820
|
+
* @returns True if recovery is pending.
|
|
1821
|
+
*/
|
|
1822
|
+
hasPendingRecovery(workflowId) {
|
|
1823
|
+
return this.pendingRecoveries.has(workflowId);
|
|
1824
|
+
}
|
|
1825
|
+
/**
|
|
1826
|
+
* Gets the pending recovery details for a workflow.
|
|
1827
|
+
*
|
|
1828
|
+
* @param workflowId - The workflow ID.
|
|
1829
|
+
* @returns The pending recovery details if any.
|
|
1830
|
+
*/
|
|
1831
|
+
getPendingRecovery(workflowId) {
|
|
1832
|
+
return this.pendingRecoveries.get(workflowId);
|
|
1833
|
+
}
|
|
1834
|
+
/**
|
|
1835
|
+
* Marks a recovery as resolved.
|
|
1836
|
+
*
|
|
1837
|
+
* @param workflowId - The workflow ID.
|
|
1838
|
+
*/
|
|
1839
|
+
resolveRecovery(workflowId) {
|
|
1840
|
+
this.pendingRecoveries.delete(workflowId);
|
|
1841
|
+
}
|
|
1842
|
+
/**
|
|
1843
|
+
* Executes the registered recovery action for a step.
|
|
1844
|
+
*
|
|
1845
|
+
* @param stepName - The step name.
|
|
1846
|
+
* @returns The recovery action result.
|
|
1847
|
+
*
|
|
1848
|
+
* @example
|
|
1849
|
+
* ```typescript
|
|
1850
|
+
* const action = manager.getAction('failed-step');
|
|
1851
|
+
* if (action && action.type === 'manual') {
|
|
1852
|
+
* await manager.executeRecovery('failed-step');
|
|
1853
|
+
* }
|
|
1854
|
+
* ```
|
|
1855
|
+
*/
|
|
1856
|
+
async executeRecovery(stepName) {
|
|
1857
|
+
const action = this.actions.get(stepName);
|
|
1858
|
+
if (!action) {
|
|
1859
|
+
throw noRecoveryAction(stepName);
|
|
1860
|
+
}
|
|
1861
|
+
if (action.type === "manual") {
|
|
1862
|
+
await action.handler();
|
|
1863
|
+
}
|
|
1864
|
+
}
|
|
1865
|
+
/**
|
|
1866
|
+
* Clears all registered actions.
|
|
1867
|
+
*/
|
|
1868
|
+
clearActions() {
|
|
1869
|
+
this.actions.clear();
|
|
1870
|
+
}
|
|
1871
|
+
/**
|
|
1872
|
+
* Clears all recovery callbacks.
|
|
1873
|
+
*/
|
|
1874
|
+
clearCallbacks() {
|
|
1875
|
+
this.recoveryCallbacks = [];
|
|
1876
|
+
}
|
|
1877
|
+
/**
|
|
1878
|
+
* Gets all pending recoveries.
|
|
1879
|
+
*
|
|
1880
|
+
* @returns A map of workflow IDs to pending recovery details.
|
|
1881
|
+
*/
|
|
1882
|
+
getAllPendingRecoveries() {
|
|
1883
|
+
return new Map(this.pendingRecoveries);
|
|
1884
|
+
}
|
|
1885
|
+
/**
|
|
1886
|
+
* Clears all pending recoveries.
|
|
1887
|
+
*/
|
|
1888
|
+
clearPendingRecoveries() {
|
|
1889
|
+
this.pendingRecoveries.clear();
|
|
1890
|
+
}
|
|
1891
|
+
/**
|
|
1892
|
+
* Gets the count of registered callbacks.
|
|
1893
|
+
* @internal For testing purposes.
|
|
1894
|
+
*/
|
|
1895
|
+
getCallbackCount() {
|
|
1896
|
+
return this.recoveryCallbacks.length;
|
|
1897
|
+
}
|
|
1898
|
+
}, _class6);
|
|
1899
|
+
|
|
1900
|
+
// src/engine/RollbackManager.ts
|
|
1901
|
+
var RollbackManager = class {
|
|
1902
|
+
/**
|
|
1903
|
+
* Initializes the RollbackManager.
|
|
1904
|
+
*
|
|
1905
|
+
* @param storage - The storage adapter for persisting rollback progress.
|
|
1906
|
+
* @param contextManager - Manager for workflow context operations.
|
|
1907
|
+
* @param traceEmitter - Emitter for rollback-related trace events.
|
|
1908
|
+
* @param onPersist - Optional callback to handle custom persistence logic.
|
|
1909
|
+
* @param config - Optional configuration for retry, idempotency, and recovery.
|
|
1910
|
+
*/
|
|
1911
|
+
constructor(storage, contextManager, traceEmitter, onPersist, config) {
|
|
1912
|
+
this.storage = storage;
|
|
1913
|
+
this.contextManager = contextManager;
|
|
1914
|
+
this.traceEmitter = traceEmitter;
|
|
1915
|
+
this.onPersist = onPersist;
|
|
1916
|
+
this.retryPolicy = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _51 => _51.retryPolicy]), () => ( new CompensationRetryPolicy()));
|
|
1917
|
+
this.idempotencyGuard = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _52 => _52.idempotencyGuard]), () => ( new IdempotencyGuard()));
|
|
1918
|
+
this.recoveryManager = _nullishCoalesce(_optionalChain([config, 'optionalAccess', _53 => _53.recoveryManager]), () => ( new RecoveryManager()));
|
|
1919
|
+
}
|
|
1920
|
+
|
|
1921
|
+
|
|
1922
|
+
|
|
1923
|
+
/**
|
|
1924
|
+
* Executes the rollback process for a failed workflow.
|
|
1925
|
+
*
|
|
1926
|
+
* Iterates backwards from the failed step and executes the `compensate` handler
|
|
1927
|
+
* for each completed step that has one defined.
|
|
1928
|
+
*
|
|
1929
|
+
* @param definition - The definition of the workflow being rolled back.
|
|
1930
|
+
* @param ctx - The current workflow context.
|
|
1931
|
+
* @param failedAtIndex - The index of the step where the failure occurred.
|
|
1932
|
+
* @param originalError - The error that triggered the rollback.
|
|
1933
|
+
* @returns A promise resolving to the updated workflow context after rollback.
|
|
1934
|
+
* @throws {Error} If compensation logic itself fails and recovery options are exhausted.
|
|
1935
|
+
*
|
|
1936
|
+
* @example
|
|
1937
|
+
* ```typescript
|
|
1938
|
+
* const rolledBackCtx = await rollbackManager.rollback(
|
|
1939
|
+
* definition,
|
|
1940
|
+
* ctx,
|
|
1941
|
+
* failedIndex,
|
|
1942
|
+
* error
|
|
1943
|
+
* );
|
|
1944
|
+
* ```
|
|
1945
|
+
*/
|
|
1946
|
+
async rollback(definition, ctx, failedAtIndex, originalError) {
|
|
1947
|
+
let currentCtx = updateWorkflowContext(ctx, { status: "rolling_back" });
|
|
1948
|
+
await this.traceEmitter.emit({
|
|
1949
|
+
type: "workflow:rollback_start",
|
|
1950
|
+
timestamp: Date.now(),
|
|
1951
|
+
workflowId: currentCtx.id,
|
|
1952
|
+
workflowName: currentCtx.name,
|
|
1953
|
+
status: "rolling_back",
|
|
1954
|
+
error: originalError.message
|
|
1955
|
+
});
|
|
1956
|
+
let compensatedCount = 0;
|
|
1957
|
+
for (let i = failedAtIndex - 1; i >= 0; i--) {
|
|
1958
|
+
const step = definition.steps[i];
|
|
1959
|
+
let execution = currentCtx.history[i];
|
|
1960
|
+
if (!step || !step.compensate || !execution || execution.status !== "completed") {
|
|
1961
|
+
continue;
|
|
1962
|
+
}
|
|
1963
|
+
if (this.idempotencyGuard.isCompensated(currentCtx, step.name)) {
|
|
1964
|
+
continue;
|
|
1965
|
+
}
|
|
1966
|
+
try {
|
|
1967
|
+
execution = { ...execution, status: "compensating" };
|
|
1968
|
+
currentCtx = updateWorkflowContext(currentCtx, {
|
|
1969
|
+
history: currentCtx.history.map((h, idx) => idx === i ? execution : h)
|
|
1970
|
+
});
|
|
1971
|
+
currentCtx = await this.persist(currentCtx);
|
|
1972
|
+
if (this.retryPolicy.getConfig().maxAttempts === 0) {
|
|
1973
|
+
await _optionalChain([step, 'access', _54 => _54.compensate, 'optionalCall', _55 => _55(currentCtx)]);
|
|
1974
|
+
} else {
|
|
1975
|
+
const result = await this.retryPolicy.execute(async () => {
|
|
1976
|
+
await _optionalChain([step, 'access', _56 => _56.compensate, 'optionalCall', _57 => _57(currentCtx)]);
|
|
1977
|
+
});
|
|
1978
|
+
if (!result.success) {
|
|
1979
|
+
throw _nullishCoalesce(result.error, () => ( new Error("Compensation failed")));
|
|
1980
|
+
}
|
|
1981
|
+
}
|
|
1982
|
+
execution = { ...execution, status: "compensated", compensatedAt: /* @__PURE__ */ new Date() };
|
|
1983
|
+
currentCtx = updateWorkflowContext(currentCtx, {
|
|
1984
|
+
history: currentCtx.history.map((h, idx) => idx === i ? execution : h)
|
|
1985
|
+
});
|
|
1986
|
+
compensatedCount++;
|
|
1987
|
+
await this.traceEmitter.stepCompensate(currentCtx, step.name, i);
|
|
1988
|
+
} catch (err) {
|
|
1989
|
+
const error = err instanceof Error ? err : new Error(String(err));
|
|
1990
|
+
const action = this.recoveryManager.getAction(step.name);
|
|
1991
|
+
if (_optionalChain([action, 'optionalAccess', _58 => _58.type]) === "skip") {
|
|
1992
|
+
continue;
|
|
1993
|
+
}
|
|
1994
|
+
if (_optionalChain([action, 'optionalAccess', _59 => _59.type]) === "abort") {
|
|
1995
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "compensation_failed" });
|
|
1996
|
+
await this.traceEmitter.emit({
|
|
1997
|
+
type: "workflow:error",
|
|
1998
|
+
timestamp: Date.now(),
|
|
1999
|
+
workflowId: currentCtx.id,
|
|
2000
|
+
workflowName: currentCtx.name,
|
|
2001
|
+
status: "compensation_failed",
|
|
2002
|
+
error: `Compensation aborted at step "${step.name}": ${error.message}`
|
|
2003
|
+
});
|
|
2004
|
+
return currentCtx;
|
|
2005
|
+
}
|
|
2006
|
+
await this.recoveryManager.notifyRecoveryNeeded(currentCtx, step.name, error);
|
|
2007
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "compensation_failed" });
|
|
2008
|
+
await this.traceEmitter.emit({
|
|
2009
|
+
type: "workflow:error",
|
|
2010
|
+
timestamp: Date.now(),
|
|
2011
|
+
workflowId: currentCtx.id,
|
|
2012
|
+
workflowName: currentCtx.name,
|
|
2013
|
+
status: "compensation_failed",
|
|
2014
|
+
error: `Compensation failed at step "${step.name}": ${error.message}`
|
|
2015
|
+
});
|
|
2016
|
+
return currentCtx;
|
|
2017
|
+
}
|
|
2018
|
+
currentCtx = await this.persist(currentCtx);
|
|
2019
|
+
}
|
|
2020
|
+
if (compensatedCount > 0) {
|
|
2021
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "rolled_back" });
|
|
2022
|
+
await this.traceEmitter.emit({
|
|
2023
|
+
type: "workflow:rollback_complete",
|
|
2024
|
+
timestamp: Date.now(),
|
|
2025
|
+
workflowId: currentCtx.id,
|
|
2026
|
+
workflowName: currentCtx.name,
|
|
2027
|
+
status: "rolled_back"
|
|
2028
|
+
});
|
|
2029
|
+
} else {
|
|
2030
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "failed" });
|
|
2031
|
+
}
|
|
2032
|
+
return currentCtx;
|
|
2033
|
+
}
|
|
2034
|
+
/**
|
|
2035
|
+
* Persists the current context using the configured mechanism.
|
|
2036
|
+
* @private
|
|
2037
|
+
*/
|
|
2038
|
+
async persist(ctx) {
|
|
2039
|
+
if (this.onPersist) {
|
|
2040
|
+
return await this.onPersist(ctx);
|
|
2041
|
+
} else {
|
|
2042
|
+
await this.storage.save(this.contextManager.toState(ctx));
|
|
2043
|
+
return ctx;
|
|
2044
|
+
}
|
|
2045
|
+
}
|
|
2046
|
+
/**
|
|
2047
|
+
* Gets the retry policy instance used by this manager.
|
|
2048
|
+
* @returns The CompensationRetryPolicy instance.
|
|
2049
|
+
*/
|
|
2050
|
+
getRetryPolicy() {
|
|
2051
|
+
return this.retryPolicy;
|
|
2052
|
+
}
|
|
2053
|
+
/**
|
|
2054
|
+
* Gets the idempotency guard instance used by this manager.
|
|
2055
|
+
* @returns The IdempotencyGuard instance.
|
|
2056
|
+
*/
|
|
2057
|
+
getIdempotencyGuard() {
|
|
2058
|
+
return this.idempotencyGuard;
|
|
2059
|
+
}
|
|
2060
|
+
/**
|
|
2061
|
+
* Gets the recovery manager instance used by this manager.
|
|
2062
|
+
* @returns The RecoveryManager instance.
|
|
2063
|
+
*/
|
|
2064
|
+
getRecoveryManager() {
|
|
2065
|
+
return this.recoveryManager;
|
|
2066
|
+
}
|
|
2067
|
+
};
|
|
2068
|
+
|
|
2069
|
+
// src/engine/TraceEmitter.ts
|
|
2070
|
+
var TraceEmitter = class {
|
|
2071
|
+
/**
|
|
2072
|
+
* Initializes the TraceEmitter.
|
|
2073
|
+
*
|
|
2074
|
+
* @param traceSink - The destination for trace events.
|
|
2075
|
+
*/
|
|
2076
|
+
constructor(traceSink) {
|
|
2077
|
+
this.traceSink = traceSink;
|
|
2078
|
+
}
|
|
2079
|
+
/**
|
|
2080
|
+
* Emits a raw trace event to the configured sink.
|
|
2081
|
+
*
|
|
2082
|
+
* @param event - The trace event to emit.
|
|
2083
|
+
*/
|
|
2084
|
+
async emit(event) {
|
|
2085
|
+
try {
|
|
2086
|
+
await _optionalChain([this, 'access', _60 => _60.traceSink, 'optionalAccess', _61 => _61.emit, 'call', _62 => _62(event)]);
|
|
2087
|
+
} catch (e3) {
|
|
2088
|
+
}
|
|
2089
|
+
}
|
|
2090
|
+
/**
|
|
2091
|
+
* Emits an event indicating that a workflow has started.
|
|
2092
|
+
*
|
|
2093
|
+
* @param ctx - The workflow context at the start.
|
|
2094
|
+
*/
|
|
2095
|
+
async workflowStart(ctx) {
|
|
2096
|
+
await this.emit({
|
|
2097
|
+
type: "workflow:start",
|
|
2098
|
+
timestamp: Date.now(),
|
|
2099
|
+
workflowId: ctx.id,
|
|
2100
|
+
workflowName: ctx.name,
|
|
2101
|
+
status: "running",
|
|
2102
|
+
input: ctx.input
|
|
2103
|
+
});
|
|
2104
|
+
}
|
|
2105
|
+
/**
|
|
2106
|
+
* Emits an event indicating that a workflow has completed successfully.
|
|
2107
|
+
*
|
|
2108
|
+
* @param ctx - The final workflow context.
|
|
2109
|
+
* @param duration - The total execution time in milliseconds.
|
|
2110
|
+
*/
|
|
2111
|
+
async workflowComplete(ctx, duration) {
|
|
2112
|
+
await this.emit({
|
|
2113
|
+
type: "workflow:complete",
|
|
2114
|
+
timestamp: Date.now(),
|
|
2115
|
+
workflowId: ctx.id,
|
|
2116
|
+
workflowName: ctx.name,
|
|
2117
|
+
status: "completed",
|
|
2118
|
+
duration,
|
|
2119
|
+
data: ctx.data
|
|
2120
|
+
});
|
|
2121
|
+
}
|
|
2122
|
+
/**
|
|
2123
|
+
* Emits an event indicating that a workflow has failed.
|
|
2124
|
+
*
|
|
2125
|
+
* @param ctx - The workflow context at the time of failure.
|
|
2126
|
+
* @param error - The error that caused the failure.
|
|
2127
|
+
* @param duration - The execution time until failure in milliseconds.
|
|
2128
|
+
*/
|
|
2129
|
+
async workflowError(ctx, error, duration) {
|
|
2130
|
+
await this.emit({
|
|
2131
|
+
type: "workflow:error",
|
|
2132
|
+
timestamp: Date.now(),
|
|
2133
|
+
workflowId: ctx.id,
|
|
2134
|
+
workflowName: ctx.name,
|
|
2135
|
+
status: "failed",
|
|
2136
|
+
duration,
|
|
2137
|
+
error: error.message
|
|
2138
|
+
});
|
|
2139
|
+
}
|
|
2140
|
+
/**
|
|
2141
|
+
* Emits an event indicating that a specific step has started.
|
|
2142
|
+
*
|
|
2143
|
+
* @param ctx - The current workflow context.
|
|
2144
|
+
* @param stepName - The name of the step.
|
|
2145
|
+
* @param stepIndex - The index of the step in the workflow.
|
|
2146
|
+
* @param commit - Whether the step is a commit step.
|
|
2147
|
+
*/
|
|
2148
|
+
async stepStart(ctx, stepName, stepIndex, commit) {
|
|
2149
|
+
await this.emit({
|
|
2150
|
+
type: "step:start",
|
|
2151
|
+
timestamp: Date.now(),
|
|
2152
|
+
workflowId: ctx.id,
|
|
2153
|
+
workflowName: ctx.name,
|
|
2154
|
+
stepName,
|
|
2155
|
+
stepIndex,
|
|
2156
|
+
commit,
|
|
2157
|
+
status: "running"
|
|
2158
|
+
});
|
|
2159
|
+
}
|
|
2160
|
+
/**
|
|
2161
|
+
* Emits an event indicating that a step has completed successfully.
|
|
2162
|
+
*
|
|
2163
|
+
* @param ctx - The current workflow context.
|
|
2164
|
+
* @param stepName - The name of the step.
|
|
2165
|
+
* @param stepIndex - The index of the step.
|
|
2166
|
+
* @param result - The result of the step execution.
|
|
2167
|
+
*/
|
|
2168
|
+
async stepComplete(ctx, stepName, stepIndex, result) {
|
|
2169
|
+
await this.emit({
|
|
2170
|
+
type: "step:complete",
|
|
2171
|
+
timestamp: Date.now(),
|
|
2172
|
+
workflowId: ctx.id,
|
|
2173
|
+
workflowName: ctx.name,
|
|
2174
|
+
stepName,
|
|
2175
|
+
stepIndex,
|
|
2176
|
+
duration: result.duration,
|
|
2177
|
+
status: "completed"
|
|
2178
|
+
});
|
|
2179
|
+
}
|
|
2180
|
+
/**
|
|
2181
|
+
* Emits an event indicating that a step has failed.
|
|
2182
|
+
*
|
|
2183
|
+
* @param ctx - The current workflow context.
|
|
2184
|
+
* @param stepName - The name of the step.
|
|
2185
|
+
* @param stepIndex - The index of the step.
|
|
2186
|
+
* @param result - The result containing the error.
|
|
2187
|
+
*/
|
|
2188
|
+
async stepError(ctx, stepName, stepIndex, result) {
|
|
2189
|
+
await this.emit({
|
|
2190
|
+
type: "step:error",
|
|
2191
|
+
timestamp: Date.now(),
|
|
2192
|
+
workflowId: ctx.id,
|
|
2193
|
+
workflowName: ctx.name,
|
|
2194
|
+
stepName,
|
|
2195
|
+
stepIndex,
|
|
2196
|
+
duration: result.duration,
|
|
2197
|
+
error: _optionalChain([result, 'access', _63 => _63.error, 'optionalAccess', _64 => _64.message]),
|
|
2198
|
+
status: "failed"
|
|
2199
|
+
});
|
|
2200
|
+
}
|
|
2201
|
+
/**
|
|
2202
|
+
* Emits an event indicating that a step has been suspended.
|
|
2203
|
+
*
|
|
2204
|
+
* @param ctx - The current workflow context.
|
|
2205
|
+
* @param stepName - The name of the step.
|
|
2206
|
+
* @param stepIndex - The index of the step.
|
|
2207
|
+
* @param signal - The name of the signal the step is waiting for.
|
|
2208
|
+
*/
|
|
2209
|
+
async stepSuspended(ctx, stepName, stepIndex, signal) {
|
|
2210
|
+
await this.emit({
|
|
2211
|
+
type: "step:suspend",
|
|
2212
|
+
timestamp: Date.now(),
|
|
2213
|
+
workflowId: ctx.id,
|
|
2214
|
+
workflowName: ctx.name,
|
|
2215
|
+
stepName,
|
|
2216
|
+
stepIndex,
|
|
2217
|
+
meta: { signal }
|
|
2218
|
+
});
|
|
2219
|
+
}
|
|
2220
|
+
/**
|
|
2221
|
+
* Emits an event indicating that a step's compensation logic has been executed.
|
|
2222
|
+
*
|
|
2223
|
+
* @param ctx - The current workflow context.
|
|
2224
|
+
* @param stepName - The name of the step being compensated.
|
|
2225
|
+
* @param stepIndex - The index of the step.
|
|
2226
|
+
*/
|
|
2227
|
+
async stepCompensate(ctx, stepName, stepIndex) {
|
|
2228
|
+
await this.emit({
|
|
2229
|
+
type: "step:compensate",
|
|
2230
|
+
timestamp: Date.now(),
|
|
2231
|
+
workflowId: ctx.id,
|
|
2232
|
+
workflowName: ctx.name,
|
|
2233
|
+
stepName,
|
|
2234
|
+
stepIndex,
|
|
2235
|
+
status: "compensated"
|
|
2236
|
+
});
|
|
2237
|
+
}
|
|
2238
|
+
};
|
|
2239
|
+
|
|
2240
|
+
// src/engine/ParallelExecutor.ts
|
|
2241
|
+
var ParallelExecutor = class {
|
|
2242
|
+
/**
|
|
2243
|
+
* Executes a list of steps concurrently.
|
|
2244
|
+
*
|
|
2245
|
+
* @param steps - The definitions of the steps to run in parallel.
|
|
2246
|
+
* @param ctx - The shared workflow context.
|
|
2247
|
+
* @param executeStep - A callback function to execute a single step.
|
|
2248
|
+
* @returns A promise resolving to the aggregated execution results.
|
|
2249
|
+
*/
|
|
2250
|
+
async executeGroup(steps, ctx, executeStep) {
|
|
2251
|
+
const results = await Promise.allSettled(
|
|
2252
|
+
steps.map(async (step, index) => {
|
|
2253
|
+
try {
|
|
2254
|
+
const result = await executeStep(step, ctx);
|
|
2255
|
+
return { step, result, index, success: true };
|
|
2256
|
+
} catch (error) {
|
|
2257
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
2258
|
+
return { step, error: err, index, success: false };
|
|
2259
|
+
}
|
|
2260
|
+
})
|
|
2261
|
+
);
|
|
2262
|
+
const successes = [];
|
|
2263
|
+
const failures = [];
|
|
2264
|
+
for (const promiseResult of results) {
|
|
2265
|
+
if (promiseResult.status === "fulfilled") {
|
|
2266
|
+
const value = promiseResult.value;
|
|
2267
|
+
if (value.success) {
|
|
2268
|
+
successes.push(value.result.execution);
|
|
2269
|
+
} else {
|
|
2270
|
+
const failedExecution = {
|
|
2271
|
+
name: value.step.name,
|
|
2272
|
+
status: "failed",
|
|
2273
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
2274
|
+
error: value.error.message,
|
|
2275
|
+
retries: 0
|
|
2276
|
+
};
|
|
2277
|
+
failures.push({
|
|
2278
|
+
step: value.step,
|
|
2279
|
+
error: value.error,
|
|
2280
|
+
execution: failedExecution
|
|
2281
|
+
});
|
|
2282
|
+
}
|
|
2283
|
+
} else {
|
|
2284
|
+
const error = promiseResult.reason instanceof Error ? promiseResult.reason : new Error(String(promiseResult.reason));
|
|
2285
|
+
const failedExecution = {
|
|
2286
|
+
name: "unknown-step",
|
|
2287
|
+
status: "failed",
|
|
2288
|
+
startedAt: /* @__PURE__ */ new Date(),
|
|
2289
|
+
error: error.message,
|
|
2290
|
+
retries: 0
|
|
2291
|
+
};
|
|
2292
|
+
failures.push({
|
|
2293
|
+
step: steps[0],
|
|
2294
|
+
error,
|
|
2295
|
+
execution: failedExecution
|
|
2296
|
+
});
|
|
2297
|
+
}
|
|
2298
|
+
}
|
|
2299
|
+
return { successes, failures };
|
|
2300
|
+
}
|
|
2301
|
+
};
|
|
2302
|
+
|
|
2303
|
+
// src/engine/WorkflowExecutor.ts
|
|
2304
|
+
var WorkflowExecutor = class {
|
|
2305
|
+
/**
|
|
2306
|
+
* Initializes the WorkflowExecutor.
|
|
2307
|
+
*
|
|
2308
|
+
* @param storage - The storage adapter for persistence.
|
|
2309
|
+
* @param contextManager - Manager for workflow context operations.
|
|
2310
|
+
* @param stepExecutor - Executor for individual steps.
|
|
2311
|
+
* @param traceEmitter - Emitter for execution-related trace events.
|
|
2312
|
+
* @param config - Global engine configuration.
|
|
2313
|
+
* @param onPersist - Optional callback for custom persistence logic.
|
|
2314
|
+
*/
|
|
2315
|
+
constructor(storage, contextManager, stepExecutor, traceEmitter, config = {}, onPersist, optimizer) {
|
|
2316
|
+
this.storage = storage;
|
|
2317
|
+
this.contextManager = contextManager;
|
|
2318
|
+
this.stepExecutor = stepExecutor;
|
|
2319
|
+
this.traceEmitter = traceEmitter;
|
|
2320
|
+
this.config = config;
|
|
2321
|
+
this.onPersist = onPersist;
|
|
2322
|
+
this.optimizer = optimizer;
|
|
2323
|
+
this.parallelExecutor = new ParallelExecutor();
|
|
2324
|
+
}
|
|
2325
|
+
|
|
2326
|
+
/**
|
|
2327
|
+
* Executes the steps of a workflow definition.
|
|
2328
|
+
*
|
|
2329
|
+
* This method manages the loop over workflow steps, handling suspensions,
|
|
2330
|
+
* failures, and successful completions. It delegates parallel step groups
|
|
2331
|
+
* to the ParallelExecutor.
|
|
2332
|
+
*
|
|
2333
|
+
* @param definition - The workflow definition to execute.
|
|
2334
|
+
* @param ctx - The current workflow context.
|
|
2335
|
+
* @param stateMachine - The state machine governing the workflow status.
|
|
2336
|
+
* @param startTime - The timestamp when the workflow execution originally started.
|
|
2337
|
+
* @param startIndex - The index of the step to start execution from.
|
|
2338
|
+
* @param meta - Metadata about the execution (e.g., if it's a resume or retry).
|
|
2339
|
+
* @returns A promise resolving to the result of the workflow execution.
|
|
2340
|
+
* @throws {Error} If execution fails and no rollback/recovery was successful (handled internally but re-thrown if critical).
|
|
2341
|
+
*
|
|
2342
|
+
* @example
|
|
2343
|
+
* ```typescript
|
|
2344
|
+
* const result = await executor.execute(definition, ctx, stateMachine, Date.now(), 0);
|
|
2345
|
+
* ```
|
|
2346
|
+
*/
|
|
2347
|
+
async execute(definition, ctx, stateMachine, startTime, startIndex, meta) {
|
|
2348
|
+
let currentCtx = ctx;
|
|
2349
|
+
try {
|
|
2350
|
+
if (stateMachine.canExecute()) {
|
|
2351
|
+
stateMachine.transition("running");
|
|
2352
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "running" });
|
|
2353
|
+
}
|
|
2354
|
+
if (!_optionalChain([meta, 'optionalAccess', _65 => _65.resume]) && !_optionalChain([meta, 'optionalAccess', _66 => _66.retry])) {
|
|
2355
|
+
await this.traceEmitter.workflowStart(currentCtx);
|
|
2356
|
+
}
|
|
2357
|
+
for (let i = startIndex; i < definition.steps.length; ) {
|
|
2358
|
+
const step = definition.steps[i];
|
|
2359
|
+
if (step.parallelGroup) {
|
|
2360
|
+
const { lastIndex, updatedCtx, result } = await this.executeParallelGroup(
|
|
2361
|
+
definition,
|
|
2362
|
+
currentCtx,
|
|
2363
|
+
i,
|
|
2364
|
+
startTime
|
|
2365
|
+
);
|
|
2366
|
+
currentCtx = updatedCtx;
|
|
2367
|
+
currentCtx = await this.persist(currentCtx);
|
|
2368
|
+
if (result) {
|
|
2369
|
+
return result;
|
|
2370
|
+
}
|
|
2371
|
+
i = lastIndex + 1;
|
|
2372
|
+
} else {
|
|
2373
|
+
const { updatedCtx, shouldReturn, result } = await this.executeSequentialStep(
|
|
2374
|
+
definition,
|
|
2375
|
+
currentCtx,
|
|
2376
|
+
i,
|
|
2377
|
+
startTime,
|
|
2378
|
+
stateMachine
|
|
2379
|
+
);
|
|
2380
|
+
currentCtx = updatedCtx;
|
|
2381
|
+
currentCtx = await this.persist(currentCtx);
|
|
2382
|
+
if (shouldReturn && result) {
|
|
2383
|
+
return result;
|
|
2384
|
+
}
|
|
2385
|
+
i++;
|
|
2386
|
+
}
|
|
2387
|
+
}
|
|
2388
|
+
stateMachine.transition("completed");
|
|
2389
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "completed" });
|
|
2390
|
+
currentCtx = await this.persist(currentCtx);
|
|
2391
|
+
_optionalChain([this, 'access', _67 => _67.config, 'access', _68 => _68.on, 'optionalAccess', _69 => _69.workflowComplete, 'optionalCall', _70 => _70(currentCtx)]);
|
|
2392
|
+
await this.traceEmitter.workflowComplete(currentCtx, Date.now() - startTime);
|
|
2393
|
+
return {
|
|
2394
|
+
id: currentCtx.id,
|
|
2395
|
+
status: "completed",
|
|
2396
|
+
data: currentCtx.data,
|
|
2397
|
+
history: currentCtx.history,
|
|
2398
|
+
duration: Date.now() - startTime,
|
|
2399
|
+
version: currentCtx.version
|
|
2400
|
+
};
|
|
2401
|
+
} catch (error) {
|
|
2402
|
+
console.log(
|
|
2403
|
+
"In catch block, history:",
|
|
2404
|
+
currentCtx.history.map((h) => ({ name: h.name, status: h.status }))
|
|
2405
|
+
);
|
|
2406
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
2407
|
+
stateMachine.forceStatus("failed");
|
|
2408
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "failed" });
|
|
2409
|
+
currentCtx = await this.persist(currentCtx);
|
|
2410
|
+
_optionalChain([this, 'access', _71 => _71.config, 'access', _72 => _72.on, 'optionalAccess', _73 => _73.workflowError, 'optionalCall', _74 => _74(currentCtx, err)]);
|
|
2411
|
+
await this.traceEmitter.workflowError(currentCtx, err, Date.now() - startTime);
|
|
2412
|
+
return {
|
|
2413
|
+
id: currentCtx.id,
|
|
2414
|
+
status: "failed",
|
|
2415
|
+
data: currentCtx.data,
|
|
2416
|
+
history: currentCtx.history,
|
|
2417
|
+
duration: Date.now() - startTime,
|
|
2418
|
+
error: err,
|
|
2419
|
+
version: currentCtx.version
|
|
2420
|
+
};
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
/**
|
|
2424
|
+
* Executes a single sequential step.
|
|
2425
|
+
* @private
|
|
2426
|
+
*/
|
|
2427
|
+
async executeSequentialStep(definition, ctx, index, startTime, stateMachine) {
|
|
2428
|
+
const step = definition.steps[index];
|
|
2429
|
+
let execution = ctx.history[index];
|
|
2430
|
+
let currentCtx = ctx;
|
|
2431
|
+
currentCtx = this.contextManager.setStepName(currentCtx, index, step.name);
|
|
2432
|
+
execution = currentCtx.history[index];
|
|
2433
|
+
currentCtx = updateWorkflowContext(currentCtx, { currentStep: index });
|
|
2434
|
+
_optionalChain([this, 'access', _75 => _75.config, 'access', _76 => _76.on, 'optionalAccess', _77 => _77.stepStart, 'optionalCall', _78 => _78(step.name, currentCtx)]);
|
|
2435
|
+
await this.traceEmitter.stepStart(currentCtx, step.name, index, Boolean(step.commit));
|
|
2436
|
+
const { result, execution: updatedExecution } = await this.stepExecutor.execute(
|
|
2437
|
+
step,
|
|
2438
|
+
currentCtx,
|
|
2439
|
+
execution
|
|
2440
|
+
);
|
|
2441
|
+
execution = updatedExecution;
|
|
2442
|
+
currentCtx = updateWorkflowContext(currentCtx, {
|
|
2443
|
+
history: currentCtx.history.map((h, idx) => idx === index ? execution : h)
|
|
2444
|
+
});
|
|
2445
|
+
if (result.success) {
|
|
2446
|
+
if (result.suspended) {
|
|
2447
|
+
stateMachine.transition("suspended");
|
|
2448
|
+
currentCtx = updateWorkflowContext(currentCtx, { status: "suspended" });
|
|
2449
|
+
await this.traceEmitter.stepSuspended(currentCtx, step.name, index, result.waitingFor);
|
|
2450
|
+
const suspendedResult = {
|
|
2451
|
+
id: currentCtx.id,
|
|
2452
|
+
status: "suspended",
|
|
2453
|
+
data: currentCtx.data,
|
|
2454
|
+
history: currentCtx.history,
|
|
2455
|
+
duration: Date.now() - startTime,
|
|
2456
|
+
version: currentCtx.version
|
|
2457
|
+
};
|
|
2458
|
+
return { updatedCtx: currentCtx, shouldReturn: true, result: suspendedResult };
|
|
2459
|
+
}
|
|
2460
|
+
_optionalChain([this, 'access', _79 => _79.config, 'access', _80 => _80.on, 'optionalAccess', _81 => _81.stepComplete, 'optionalCall', _82 => _82(step.name, currentCtx, result)]);
|
|
2461
|
+
await this.traceEmitter.stepComplete(currentCtx, step.name, index, result);
|
|
2462
|
+
} else {
|
|
2463
|
+
await this.traceEmitter.stepError(currentCtx, step.name, index, result);
|
|
2464
|
+
const failedResult = {
|
|
2465
|
+
id: currentCtx.id,
|
|
2466
|
+
status: "failed",
|
|
2467
|
+
data: currentCtx.data,
|
|
2468
|
+
history: currentCtx.history,
|
|
2469
|
+
duration: Date.now() - startTime,
|
|
2470
|
+
error: result.error,
|
|
2471
|
+
version: currentCtx.version
|
|
2472
|
+
};
|
|
2473
|
+
return { updatedCtx: currentCtx, shouldReturn: true, result: failedResult };
|
|
2474
|
+
}
|
|
2475
|
+
return { updatedCtx: currentCtx, shouldReturn: false };
|
|
2476
|
+
}
|
|
2477
|
+
/**
|
|
2478
|
+
* Executes a group of parallel steps.
|
|
2479
|
+
* @private
|
|
2480
|
+
*/
|
|
2481
|
+
async executeParallelGroup(definition, ctx, startIndex, startTime) {
|
|
2482
|
+
const firstStep = definition.steps[startIndex];
|
|
2483
|
+
const groupId = firstStep.parallelGroup;
|
|
2484
|
+
const groupSteps = [];
|
|
2485
|
+
for (let i = startIndex; i < definition.steps.length; i++) {
|
|
2486
|
+
const step = definition.steps[i];
|
|
2487
|
+
if (step.parallelGroup === groupId) {
|
|
2488
|
+
groupSteps.push({ step, index: i });
|
|
2489
|
+
} else {
|
|
2490
|
+
break;
|
|
2491
|
+
}
|
|
2492
|
+
}
|
|
2493
|
+
const lastIndex = _optionalChain([groupSteps, 'access', _83 => _83[groupSteps.length - 1], 'optionalAccess', _84 => _84.index]);
|
|
2494
|
+
let currentCtx = ctx;
|
|
2495
|
+
const executeStepWrapper = async (step, stepCtx) => {
|
|
2496
|
+
const stepIndex = _optionalChain([groupSteps, 'access', _85 => _85.find, 'call', _86 => _86((gs) => gs.step === step), 'optionalAccess', _87 => _87.index]);
|
|
2497
|
+
let execution = stepCtx.history[stepIndex];
|
|
2498
|
+
const localCtx = this.contextManager.setStepName(stepCtx, stepIndex, step.name);
|
|
2499
|
+
execution = localCtx.history[stepIndex];
|
|
2500
|
+
_optionalChain([this, 'access', _88 => _88.config, 'access', _89 => _89.on, 'optionalAccess', _90 => _90.stepStart, 'optionalCall', _91 => _91(step.name, localCtx)]);
|
|
2501
|
+
await this.traceEmitter.stepStart(localCtx, step.name, stepIndex, Boolean(step.commit));
|
|
2502
|
+
const { result, execution: updatedExecution } = await this.stepExecutor.execute(
|
|
2503
|
+
step,
|
|
2504
|
+
localCtx,
|
|
2505
|
+
execution
|
|
2506
|
+
);
|
|
2507
|
+
if (result.success) {
|
|
2508
|
+
_optionalChain([this, 'access', _92 => _92.config, 'access', _93 => _93.on, 'optionalAccess', _94 => _94.stepComplete, 'optionalCall', _95 => _95(step.name, localCtx, result)]);
|
|
2509
|
+
await this.traceEmitter.stepComplete(localCtx, step.name, stepIndex, result);
|
|
2510
|
+
} else {
|
|
2511
|
+
await this.traceEmitter.stepError(localCtx, step.name, stepIndex, result);
|
|
2512
|
+
throw result.error || new Error(`Step ${step.name} failed`);
|
|
2513
|
+
}
|
|
2514
|
+
return { ctx: localCtx, execution: updatedExecution };
|
|
2515
|
+
};
|
|
2516
|
+
try {
|
|
2517
|
+
const parallelResult = await this.parallelExecutor.executeGroup(
|
|
2518
|
+
groupSteps.map((gs) => gs.step),
|
|
2519
|
+
currentCtx,
|
|
2520
|
+
executeStepWrapper
|
|
2521
|
+
);
|
|
2522
|
+
for (const execution of parallelResult.successes) {
|
|
2523
|
+
const stepIndex = _optionalChain([groupSteps, 'access', _96 => _96.find, 'call', _97 => _97((gs) => gs.step.name === execution.name), 'optionalAccess', _98 => _98.index]);
|
|
2524
|
+
currentCtx = updateWorkflowContext(currentCtx, {
|
|
2525
|
+
history: currentCtx.history.map((h, idx) => idx === stepIndex ? execution : h)
|
|
2526
|
+
});
|
|
2527
|
+
}
|
|
2528
|
+
for (const failure of parallelResult.failures) {
|
|
2529
|
+
const stepIndex = _optionalChain([groupSteps, 'access', _99 => _99.find, 'call', _100 => _100((gs) => gs.step.name === failure.step.name), 'optionalAccess', _101 => _101.index]);
|
|
2530
|
+
currentCtx = updateWorkflowContext(currentCtx, {
|
|
2531
|
+
history: currentCtx.history.map((h, idx) => idx === stepIndex ? failure.execution : h)
|
|
2532
|
+
});
|
|
2533
|
+
}
|
|
2534
|
+
if (parallelResult.failures.length > 0) {
|
|
2535
|
+
const firstFailure = parallelResult.failures[0];
|
|
2536
|
+
const failedResult = {
|
|
2537
|
+
id: currentCtx.id,
|
|
2538
|
+
status: "failed",
|
|
2539
|
+
data: currentCtx.data,
|
|
2540
|
+
history: currentCtx.history,
|
|
2541
|
+
duration: Date.now() - startTime,
|
|
2542
|
+
error: firstFailure.error,
|
|
2543
|
+
version: currentCtx.version
|
|
2544
|
+
};
|
|
2545
|
+
return { lastIndex, updatedCtx: currentCtx, result: failedResult };
|
|
2546
|
+
}
|
|
2547
|
+
return { lastIndex, updatedCtx: currentCtx };
|
|
2548
|
+
} catch (error) {
|
|
2549
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
2550
|
+
const failedResult = {
|
|
2551
|
+
id: currentCtx.id,
|
|
2552
|
+
status: "failed",
|
|
2553
|
+
data: currentCtx.data,
|
|
2554
|
+
history: currentCtx.history,
|
|
2555
|
+
duration: Date.now() - startTime,
|
|
2556
|
+
error: err,
|
|
2557
|
+
version: currentCtx.version
|
|
2558
|
+
};
|
|
2559
|
+
return { lastIndex, updatedCtx: currentCtx, result: failedResult };
|
|
2560
|
+
}
|
|
2561
|
+
}
|
|
2562
|
+
/**
|
|
2563
|
+
* Persists the context to storage.
|
|
2564
|
+
* @private
|
|
2565
|
+
*/
|
|
2566
|
+
async persist(ctx) {
|
|
2567
|
+
let currentCtx = ctx;
|
|
2568
|
+
if (this.optimizer) {
|
|
2569
|
+
const optimizedData = this.optimizer.optimizeForStorage(ctx.data);
|
|
2570
|
+
currentCtx = updateWorkflowContext(ctx, { data: optimizedData });
|
|
2571
|
+
}
|
|
2572
|
+
if (this.onPersist) {
|
|
2573
|
+
return await this.onPersist(currentCtx);
|
|
2574
|
+
} else {
|
|
2575
|
+
await this.storage.save(this.contextManager.toState(currentCtx));
|
|
2576
|
+
return currentCtx;
|
|
2577
|
+
}
|
|
2578
|
+
}
|
|
2579
|
+
};
|
|
2580
|
+
|
|
2581
|
+
// src/engine/FluxEngine.ts
|
|
2582
|
+
var FluxEngine = class {
|
|
2583
|
+
|
|
2584
|
+
|
|
2585
|
+
|
|
2586
|
+
|
|
2587
|
+
|
|
2588
|
+
|
|
2589
|
+
|
|
2590
|
+
constructor(config = {}) {
|
|
2591
|
+
this.storage = _nullishCoalesce(config.storage, () => ( new MemoryStorage()));
|
|
2592
|
+
this.contextManager = new ContextManager();
|
|
2593
|
+
this.traceEmitter = new TraceEmitter(config.trace);
|
|
2594
|
+
if (_optionalChain([config, 'access', _102 => _102.optimizer, 'optionalAccess', _103 => _103.enabled])) {
|
|
2595
|
+
this.dataOptimizer = new DataOptimizer({
|
|
2596
|
+
threshold: config.optimizer.threshold,
|
|
2597
|
+
defaultLocation: config.optimizer.defaultLocation
|
|
2598
|
+
});
|
|
2599
|
+
}
|
|
2600
|
+
const stepExecutor = createStepExecutor(config, this.traceEmitter);
|
|
2601
|
+
const persist = (ctx) => this.persist(ctx);
|
|
2602
|
+
this.executor = new WorkflowExecutor(
|
|
2603
|
+
this.storage,
|
|
2604
|
+
this.contextManager,
|
|
2605
|
+
stepExecutor,
|
|
2606
|
+
this.traceEmitter,
|
|
2607
|
+
config,
|
|
2608
|
+
persist,
|
|
2609
|
+
this.dataOptimizer
|
|
2610
|
+
);
|
|
2611
|
+
this.rollbackManager = new RollbackManager(
|
|
2612
|
+
this.storage,
|
|
2613
|
+
this.contextManager,
|
|
2614
|
+
this.traceEmitter,
|
|
2615
|
+
persist,
|
|
2616
|
+
{
|
|
2617
|
+
retryPolicy: new CompensationRetryPolicy({
|
|
2618
|
+
maxAttempts: config.defaultRetries !== void 0 ? config.defaultRetries : 3
|
|
2619
|
+
})
|
|
2620
|
+
}
|
|
2621
|
+
);
|
|
2622
|
+
this.cronTrigger = new CronTrigger(this);
|
|
2623
|
+
this.config = config;
|
|
2624
|
+
}
|
|
2625
|
+
|
|
2626
|
+
async execute(workflow, input) {
|
|
2627
|
+
const definition = resolveDefinition(workflow);
|
|
2628
|
+
if (definition.validateInput && !definition.validateInput(input)) {
|
|
2629
|
+
throw invalidInput(definition.name);
|
|
2630
|
+
}
|
|
2631
|
+
let ctx = this.contextManager.create(
|
|
2632
|
+
definition.name,
|
|
2633
|
+
input,
|
|
2634
|
+
definition.steps.length
|
|
2635
|
+
);
|
|
2636
|
+
ctx = await this.persist(ctx, definition.version);
|
|
2637
|
+
return this.executeWithLock(definition, ctx, 0, {}, Date.now());
|
|
2638
|
+
}
|
|
2639
|
+
async executeBatch(workflow, inputs, options) {
|
|
2640
|
+
const executor = new BatchExecutor(this);
|
|
2641
|
+
return executor.execute(workflow, inputs, options);
|
|
2642
|
+
}
|
|
2643
|
+
async executeWithLock(definition, ctx, startIndex, options, startTime) {
|
|
2644
|
+
const lock = await acquireEngineLock(this.config, ctx.id);
|
|
2645
|
+
try {
|
|
2646
|
+
const result = await this.executor.execute(
|
|
2647
|
+
definition,
|
|
2648
|
+
ctx,
|
|
2649
|
+
new StateMachine(),
|
|
2650
|
+
_nullishCoalesce(startTime, () => ( Date.now())),
|
|
2651
|
+
startIndex,
|
|
2652
|
+
options
|
|
2653
|
+
);
|
|
2654
|
+
return handleExecutionResult(
|
|
2655
|
+
definition,
|
|
2656
|
+
ctx,
|
|
2657
|
+
result,
|
|
2658
|
+
this.contextManager,
|
|
2659
|
+
this.rollbackManager,
|
|
2660
|
+
this.storage
|
|
2661
|
+
);
|
|
2662
|
+
} finally {
|
|
2663
|
+
await _optionalChain([lock, 'optionalAccess', _104 => _104.release, 'call', _105 => _105()]);
|
|
2664
|
+
}
|
|
2665
|
+
}
|
|
2666
|
+
async resume(workflow, workflowId, options) {
|
|
2667
|
+
const definition = resolveDefinition(workflow);
|
|
2668
|
+
const state = await this.storage.load(workflowId);
|
|
2669
|
+
if (!state || state.name !== definition.name) {
|
|
2670
|
+
if (!state) {
|
|
2671
|
+
return null;
|
|
2672
|
+
}
|
|
2673
|
+
throw workflowNameMismatch(definition.name, state.name);
|
|
2674
|
+
}
|
|
2675
|
+
if (state.history.length !== definition.steps.length) {
|
|
2676
|
+
throw workflowDefinitionChanged();
|
|
2677
|
+
}
|
|
2678
|
+
if (state.definitionVersion && definition.version && state.definitionVersion !== definition.version) {
|
|
2679
|
+
_optionalChain([this, 'access', _106 => _106.config, 'access', _107 => _107.logger, 'optionalAccess', _108 => _108.warn, 'call', _109 => _109(
|
|
2680
|
+
`Workflow version mismatch: instance was created with v${state.definitionVersion}, but current definition is v${definition.version}. Continuing execution.`
|
|
2681
|
+
)]);
|
|
2682
|
+
}
|
|
2683
|
+
let ctx = this.contextManager.restore(
|
|
2684
|
+
state
|
|
2685
|
+
);
|
|
2686
|
+
const startIndex = resolveStartIndex(definition, _optionalChain([options, 'optionalAccess', _110 => _110.fromStep]), ctx.currentStep);
|
|
2687
|
+
resetHistoryFrom(ctx, startIndex);
|
|
2688
|
+
ctx = updateWorkflowContext(ctx, { status: "pending", currentStep: startIndex });
|
|
2689
|
+
ctx = await this.persist(ctx);
|
|
2690
|
+
return this.executeWithLock(definition, ctx, startIndex, { resume: true, fromStep: startIndex });
|
|
2691
|
+
}
|
|
2692
|
+
async signal(workflow, workflowId, signalName, payload) {
|
|
2693
|
+
const definition = resolveDefinition(workflow);
|
|
2694
|
+
const state = await this.storage.load(workflowId);
|
|
2695
|
+
if (!state) {
|
|
2696
|
+
throw workflowNotFound(workflowId);
|
|
2697
|
+
}
|
|
2698
|
+
if (state.status !== "suspended") {
|
|
2699
|
+
throw workflowNotSuspended(state.status);
|
|
2700
|
+
}
|
|
2701
|
+
let ctx = this.contextManager.restore(
|
|
2702
|
+
state
|
|
2703
|
+
);
|
|
2704
|
+
const idx = ctx.currentStep;
|
|
2705
|
+
const exec = ctx.history[idx];
|
|
2706
|
+
if (!exec || exec.status !== "suspended" || exec.waitingFor !== signalName) {
|
|
2707
|
+
const isSuspended = _optionalChain([exec, 'optionalAccess', _111 => _111.status]) === "suspended";
|
|
2708
|
+
throw new FluxError(
|
|
2709
|
+
isSuspended ? `Workflow waiting for signal "${exec.waitingFor}", received "${signalName}"` : "Workflow state invalid: no suspended step found",
|
|
2710
|
+
isSuspended ? "INVALID_STATE_TRANSITION" /* INVALID_STATE_TRANSITION */ : "STEP_NOT_FOUND" /* STEP_NOT_FOUND */
|
|
2711
|
+
);
|
|
2712
|
+
}
|
|
2713
|
+
ctx = updateWorkflowContext(ctx, {
|
|
2714
|
+
history: ctx.history.map(
|
|
2715
|
+
(h, i) => i === idx ? { ...h, status: "completed", completedAt: /* @__PURE__ */ new Date(), output: payload } : h
|
|
2716
|
+
)
|
|
2717
|
+
});
|
|
2718
|
+
await this.traceEmitter.emit({
|
|
2719
|
+
type: "signal:received",
|
|
2720
|
+
timestamp: Date.now(),
|
|
2721
|
+
workflowId: ctx.id,
|
|
2722
|
+
workflowName: ctx.name,
|
|
2723
|
+
status: "suspended",
|
|
2724
|
+
input: payload
|
|
2725
|
+
});
|
|
2726
|
+
return this.executeWithLock(definition, ctx, idx + 1, { resume: true, fromStep: idx + 1 });
|
|
2727
|
+
}
|
|
2728
|
+
async retryStep(workflow, workflowId, stepName) {
|
|
2729
|
+
const definition = resolveDefinition(workflow);
|
|
2730
|
+
const state = await this.storage.load(workflowId);
|
|
2731
|
+
if (!state || state.name !== definition.name) {
|
|
2732
|
+
if (!state) {
|
|
2733
|
+
return null;
|
|
2734
|
+
}
|
|
2735
|
+
throw workflowNameMismatch(definition.name, state.name);
|
|
2736
|
+
}
|
|
2737
|
+
if (state.history.length !== definition.steps.length) {
|
|
2738
|
+
throw workflowDefinitionChanged();
|
|
2739
|
+
}
|
|
2740
|
+
let ctx = this.contextManager.restore(
|
|
2741
|
+
state
|
|
2742
|
+
);
|
|
2743
|
+
const idx = resolveStartIndex(definition, stepName, ctx.currentStep);
|
|
2744
|
+
resetHistoryFrom(ctx, idx);
|
|
2745
|
+
ctx = updateWorkflowContext(ctx, { status: "pending", currentStep: idx });
|
|
2746
|
+
ctx = await this.persist(ctx);
|
|
2747
|
+
return this.executeWithLock(definition, ctx, idx, { retry: true, fromStep: idx });
|
|
2748
|
+
}
|
|
2749
|
+
/**
|
|
2750
|
+
* Retrieves the current state of a workflow instance from storage.
|
|
2751
|
+
*
|
|
2752
|
+
* @param workflowId - The unique identifier of the workflow.
|
|
2753
|
+
* @returns A promise resolving to the workflow state or null if not found.
|
|
2754
|
+
*/
|
|
2755
|
+
async get(workflowId) {
|
|
2756
|
+
return this.storage.load(workflowId);
|
|
2757
|
+
}
|
|
2758
|
+
async saveState(state) {
|
|
2759
|
+
const stored = await this.storage.load(state.id);
|
|
2760
|
+
if (stored && stored.version !== state.version) {
|
|
2761
|
+
throw new FluxError(
|
|
2762
|
+
"Concurrent modification detected",
|
|
2763
|
+
"CONCURRENT_MODIFICATION" /* CONCURRENT_MODIFICATION */
|
|
2764
|
+
);
|
|
2765
|
+
}
|
|
2766
|
+
return this.storage.save({ ...state, version: state.version + 1 });
|
|
2767
|
+
}
|
|
2768
|
+
async list(filter) {
|
|
2769
|
+
return this.storage.list(filter);
|
|
2770
|
+
}
|
|
2771
|
+
schedule(cron, workflow, input, id = `schedule_${Date.now()}`) {
|
|
2772
|
+
this.cronTrigger.addSchedule({
|
|
2773
|
+
id,
|
|
2774
|
+
cron,
|
|
2775
|
+
workflow: resolveDefinition(workflow),
|
|
2776
|
+
input,
|
|
2777
|
+
enabled: true
|
|
2778
|
+
});
|
|
2779
|
+
return id;
|
|
2780
|
+
}
|
|
2781
|
+
unschedule(id) {
|
|
2782
|
+
this.cronTrigger.removeSchedule(id);
|
|
2783
|
+
}
|
|
2784
|
+
/**
|
|
2785
|
+
* Lists all active workflow schedules.
|
|
2786
|
+
*/
|
|
2787
|
+
listSchedules() {
|
|
2788
|
+
return this.cronTrigger.listSchedules();
|
|
2789
|
+
}
|
|
2790
|
+
/**
|
|
2791
|
+
* Gets the recovery manager for handling manual intervention.
|
|
2792
|
+
*/
|
|
2793
|
+
getRecoveryManager() {
|
|
2794
|
+
return this.rollbackManager.getRecoveryManager();
|
|
2795
|
+
}
|
|
2796
|
+
/**
|
|
2797
|
+
* Gets the lock provider used for cluster mode.
|
|
2798
|
+
*/
|
|
2799
|
+
getLockProvider() {
|
|
2800
|
+
return this.config.lockProvider;
|
|
2801
|
+
}
|
|
2802
|
+
/**
|
|
2803
|
+
* Initializes the engine and its underlying storage, and starts the scheduler.
|
|
2804
|
+
*/
|
|
2805
|
+
async init() {
|
|
2806
|
+
await _optionalChain([this, 'access', _112 => _112.storage, 'access', _113 => _113.init, 'optionalCall', _114 => _114()]);
|
|
2807
|
+
this.cronTrigger.start();
|
|
2808
|
+
}
|
|
2809
|
+
/**
|
|
2810
|
+
* Closes the engine and releases storage resources.
|
|
2811
|
+
*/
|
|
2812
|
+
async close() {
|
|
2813
|
+
this.cronTrigger.stop();
|
|
2814
|
+
await _optionalChain([this, 'access', _115 => _115.storage, 'access', _116 => _116.close, 'optionalCall', _117 => _117()]);
|
|
2815
|
+
}
|
|
2816
|
+
async persist(ctx, definitionVersion) {
|
|
2817
|
+
return persistContext(ctx, this.storage, this.contextManager, definitionVersion);
|
|
2818
|
+
}
|
|
2819
|
+
};
|
|
2820
|
+
|
|
2821
|
+
// src/storage/PostgreSQLStorage.ts
|
|
2822
|
+
var PostgreSQLStorage = (_class7 = class {
|
|
2823
|
+
|
|
2824
|
+
|
|
2825
|
+
__init13() {this.initialized = false}
|
|
2826
|
+
|
|
2827
|
+
/**
|
|
2828
|
+
* Creates a new PostgreSQL storage instance.
|
|
2829
|
+
*
|
|
2830
|
+
* @param options - Connection and configuration options.
|
|
2831
|
+
*/
|
|
2832
|
+
constructor(options = {}) {;_class7.prototype.__init13.call(this);
|
|
2833
|
+
this.options = options;
|
|
2834
|
+
this.tableName = _nullishCoalesce(options.tableName, () => ( "flux_workflows"));
|
|
2835
|
+
}
|
|
2836
|
+
/**
|
|
2837
|
+
* Initializes the database connection pool and schema.
|
|
2838
|
+
*
|
|
2839
|
+
* Creates the workflow table and indexes if they do not exist.
|
|
2840
|
+
* This method is idempotent.
|
|
2841
|
+
*
|
|
2842
|
+
* @throws {Error} If connection fails or schema creation fails.
|
|
2843
|
+
*/
|
|
2844
|
+
async init() {
|
|
2845
|
+
if (this.initialized) {
|
|
2846
|
+
return;
|
|
2847
|
+
}
|
|
2848
|
+
const { Pool } = await Promise.resolve().then(() => _interopRequireWildcard(require("pg")));
|
|
2849
|
+
const config = this.options.connectionString ? { connectionString: this.options.connectionString } : {
|
|
2850
|
+
host: _nullishCoalesce(this.options.host, () => ( "localhost")),
|
|
2851
|
+
port: _nullishCoalesce(this.options.port, () => ( 5432)),
|
|
2852
|
+
database: _nullishCoalesce(this.options.database, () => ( "postgres")),
|
|
2853
|
+
user: this.options.user,
|
|
2854
|
+
password: this.options.password
|
|
2855
|
+
};
|
|
2856
|
+
if (this.options.ssl !== void 0) {
|
|
2857
|
+
config.ssl = this.options.ssl;
|
|
2858
|
+
}
|
|
2859
|
+
this.pool = new Pool(config);
|
|
2860
|
+
await this.pool.query(`
|
|
2861
|
+
CREATE TABLE IF NOT EXISTS ${this.tableName} (
|
|
2862
|
+
id TEXT PRIMARY KEY,
|
|
2863
|
+
name TEXT NOT NULL,
|
|
2864
|
+
status TEXT NOT NULL,
|
|
2865
|
+
input JSONB NOT NULL,
|
|
2866
|
+
data JSONB NOT NULL,
|
|
2867
|
+
current_step INTEGER NOT NULL,
|
|
2868
|
+
history JSONB NOT NULL,
|
|
2869
|
+
error TEXT,
|
|
2870
|
+
created_at TIMESTAMPTZ NOT NULL,
|
|
2871
|
+
updated_at TIMESTAMPTZ NOT NULL,
|
|
2872
|
+
completed_at TIMESTAMPTZ,
|
|
2873
|
+
version INTEGER NOT NULL DEFAULT 1,
|
|
2874
|
+
definition_version TEXT
|
|
2875
|
+
)
|
|
2876
|
+
`);
|
|
2877
|
+
await this.pool.query(`
|
|
2878
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_name
|
|
2879
|
+
ON ${this.tableName}(name)
|
|
2880
|
+
`);
|
|
2881
|
+
await this.pool.query(`
|
|
2882
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_status
|
|
2883
|
+
ON ${this.tableName}(status)
|
|
2884
|
+
`);
|
|
2885
|
+
await this.pool.query(`
|
|
2886
|
+
CREATE INDEX IF NOT EXISTS idx_${this.tableName}_created
|
|
2887
|
+
ON ${this.tableName}(created_at DESC)
|
|
2888
|
+
`);
|
|
2889
|
+
this.initialized = true;
|
|
2890
|
+
}
|
|
2891
|
+
/**
|
|
2892
|
+
* Persists the current state of a workflow.
|
|
2893
|
+
*
|
|
2894
|
+
* Uses upsert (INSERT ... ON CONFLICT) to save or update the workflow state.
|
|
2895
|
+
*
|
|
2896
|
+
* @param state - The workflow state to save.
|
|
2897
|
+
* @throws {Error} If the database operation fails.
|
|
2898
|
+
*/
|
|
2899
|
+
async save(state) {
|
|
2900
|
+
await this.init();
|
|
2901
|
+
await this.pool.query(
|
|
2902
|
+
`
|
|
2903
|
+
INSERT INTO ${this.tableName}
|
|
2904
|
+
(id, name, status, input, data, current_step, history, error, created_at, updated_at, completed_at, version, definition_version)
|
|
2905
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
|
2906
|
+
ON CONFLICT (id) DO UPDATE SET
|
|
2907
|
+
name = EXCLUDED.name,
|
|
2908
|
+
status = EXCLUDED.status,
|
|
2909
|
+
input = EXCLUDED.input,
|
|
2910
|
+
data = EXCLUDED.data,
|
|
2911
|
+
current_step = EXCLUDED.current_step,
|
|
2912
|
+
history = EXCLUDED.history,
|
|
2913
|
+
error = EXCLUDED.error,
|
|
2914
|
+
updated_at = EXCLUDED.updated_at,
|
|
2915
|
+
completed_at = EXCLUDED.completed_at,
|
|
2916
|
+
version = EXCLUDED.version,
|
|
2917
|
+
definition_version = EXCLUDED.definition_version
|
|
2918
|
+
`,
|
|
2919
|
+
[
|
|
2920
|
+
state.id,
|
|
2921
|
+
state.name,
|
|
2922
|
+
state.status,
|
|
2923
|
+
JSON.stringify(state.input),
|
|
2924
|
+
JSON.stringify(state.data),
|
|
2925
|
+
state.currentStep,
|
|
2926
|
+
JSON.stringify(state.history),
|
|
2927
|
+
_nullishCoalesce(state.error, () => ( null)),
|
|
2928
|
+
state.createdAt,
|
|
2929
|
+
/* @__PURE__ */ new Date(),
|
|
2930
|
+
_nullishCoalesce(state.completedAt, () => ( null)),
|
|
2931
|
+
state.version,
|
|
2932
|
+
_nullishCoalesce(state.definitionVersion, () => ( null))
|
|
2933
|
+
]
|
|
2934
|
+
);
|
|
2935
|
+
}
|
|
2936
|
+
/**
|
|
2937
|
+
* Loads a workflow state by its ID.
|
|
2938
|
+
*
|
|
2939
|
+
* @param id - The unique identifier of the workflow.
|
|
2940
|
+
* @returns The workflow state, or null if not found.
|
|
2941
|
+
* @throws {Error} If the database query fails.
|
|
2942
|
+
*/
|
|
2943
|
+
async load(id) {
|
|
2944
|
+
await this.init();
|
|
2945
|
+
const result = await this.pool.query(`SELECT * FROM ${this.tableName} WHERE id = $1`, [id]);
|
|
2946
|
+
if (result.rows.length === 0) {
|
|
2947
|
+
return null;
|
|
2948
|
+
}
|
|
2949
|
+
return this.rowToState(result.rows[0]);
|
|
2950
|
+
}
|
|
2951
|
+
/**
|
|
2952
|
+
* Lists workflows matching the given filter.
|
|
2953
|
+
*
|
|
2954
|
+
* @param filter - Criteria to filter the results.
|
|
2955
|
+
* @returns A list of matching workflow states.
|
|
2956
|
+
* @throws {Error} If the database query fails.
|
|
2957
|
+
*/
|
|
2958
|
+
async list(filter) {
|
|
2959
|
+
await this.init();
|
|
2960
|
+
let query = `SELECT * FROM ${this.tableName} WHERE 1=1`;
|
|
2961
|
+
const params = [];
|
|
2962
|
+
let paramIndex = 1;
|
|
2963
|
+
if (_optionalChain([filter, 'optionalAccess', _118 => _118.name])) {
|
|
2964
|
+
query += ` AND name = $${paramIndex++}`;
|
|
2965
|
+
params.push(filter.name);
|
|
2966
|
+
}
|
|
2967
|
+
if (_optionalChain([filter, 'optionalAccess', _119 => _119.status])) {
|
|
2968
|
+
if (Array.isArray(filter.status)) {
|
|
2969
|
+
const placeholders = filter.status.map(() => `$${paramIndex++}`).join(", ");
|
|
2970
|
+
query += ` AND status IN (${placeholders})`;
|
|
2971
|
+
params.push(...filter.status);
|
|
2972
|
+
} else {
|
|
2973
|
+
query += ` AND status = $${paramIndex++}`;
|
|
2974
|
+
params.push(filter.status);
|
|
2975
|
+
}
|
|
2976
|
+
}
|
|
2977
|
+
if (_optionalChain([filter, 'optionalAccess', _120 => _120.version])) {
|
|
2978
|
+
query += ` AND definition_version = $${paramIndex++}`;
|
|
2979
|
+
params.push(filter.version);
|
|
2980
|
+
}
|
|
2981
|
+
query += " ORDER BY created_at DESC";
|
|
2982
|
+
if (_optionalChain([filter, 'optionalAccess', _121 => _121.limit])) {
|
|
2983
|
+
query += ` LIMIT $${paramIndex++}`;
|
|
2984
|
+
params.push(filter.limit);
|
|
2985
|
+
}
|
|
2986
|
+
if (_optionalChain([filter, 'optionalAccess', _122 => _122.offset])) {
|
|
2987
|
+
query += ` OFFSET $${paramIndex++}`;
|
|
2988
|
+
params.push(filter.offset);
|
|
2989
|
+
}
|
|
2990
|
+
const result = await this.pool.query(query, params);
|
|
2991
|
+
return result.rows.map((row) => this.rowToState(row));
|
|
2992
|
+
}
|
|
2993
|
+
/**
|
|
2994
|
+
* Deletes a workflow state by its ID.
|
|
2995
|
+
*
|
|
2996
|
+
* @param id - The unique identifier of the workflow to delete.
|
|
2997
|
+
* @throws {Error} If the database operation fails.
|
|
2998
|
+
*/
|
|
2999
|
+
async delete(id) {
|
|
3000
|
+
await this.init();
|
|
3001
|
+
await this.pool.query(`DELETE FROM ${this.tableName} WHERE id = $1`, [id]);
|
|
3002
|
+
}
|
|
3003
|
+
/**
|
|
3004
|
+
* Closes the database connection pool.
|
|
3005
|
+
*
|
|
3006
|
+
* Should be called when shutting down the application.
|
|
3007
|
+
*/
|
|
3008
|
+
async close() {
|
|
3009
|
+
if (this.pool) {
|
|
3010
|
+
await this.pool.end();
|
|
3011
|
+
this.initialized = false;
|
|
3012
|
+
}
|
|
3013
|
+
}
|
|
3014
|
+
/**
|
|
3015
|
+
* Converts a database row to a WorkflowState object.
|
|
3016
|
+
*
|
|
3017
|
+
* @param row - The raw database row.
|
|
3018
|
+
* @returns The parsed workflow state.
|
|
3019
|
+
*/
|
|
3020
|
+
rowToState(row) {
|
|
3021
|
+
return {
|
|
3022
|
+
id: row.id,
|
|
3023
|
+
name: row.name,
|
|
3024
|
+
status: row.status,
|
|
3025
|
+
input: typeof row.input === "string" ? JSON.parse(row.input) : row.input,
|
|
3026
|
+
data: typeof row.data === "string" ? JSON.parse(row.data) : row.data,
|
|
3027
|
+
currentStep: row.current_step,
|
|
3028
|
+
history: typeof row.history === "string" ? JSON.parse(row.history) : row.history,
|
|
3029
|
+
error: _nullishCoalesce(row.error, () => ( void 0)),
|
|
3030
|
+
createdAt: new Date(row.created_at),
|
|
3031
|
+
updatedAt: new Date(row.updated_at),
|
|
3032
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : void 0,
|
|
3033
|
+
version: row.version,
|
|
3034
|
+
definitionVersion: _nullishCoalesce(row.definition_version, () => ( void 0))
|
|
3035
|
+
};
|
|
3036
|
+
}
|
|
3037
|
+
/**
|
|
3038
|
+
* Returns the underlying pg.Pool instance.
|
|
3039
|
+
*
|
|
3040
|
+
* Useful for testing or advanced database operations.
|
|
3041
|
+
*
|
|
3042
|
+
* @returns The PostgreSQL connection pool.
|
|
3043
|
+
*/
|
|
3044
|
+
getPool() {
|
|
3045
|
+
return this.pool;
|
|
3046
|
+
}
|
|
3047
|
+
/**
|
|
3048
|
+
* Optimizes the database table by reclaiming storage and updating statistics.
|
|
3049
|
+
*
|
|
3050
|
+
* Runs VACUUM ANALYZE on the workflow table.
|
|
3051
|
+
*
|
|
3052
|
+
* @throws {Error} If the maintenance operation fails.
|
|
3053
|
+
*/
|
|
3054
|
+
async vacuum() {
|
|
3055
|
+
await this.init();
|
|
3056
|
+
await this.pool.query("VACUUM ANALYZE");
|
|
3057
|
+
}
|
|
3058
|
+
}, _class7);
|
|
3059
|
+
|
|
3060
|
+
// src/trace/JsonFileTraceSink.ts
|
|
3061
|
+
var _promises = require('fs/promises');
|
|
3062
|
+
var _path = require('path');
|
|
3063
|
+
var JsonFileTraceSink = class {
|
|
3064
|
+
|
|
3065
|
+
|
|
3066
|
+
/**
|
|
3067
|
+
* Creates a new JSON file trace sink.
|
|
3068
|
+
*
|
|
3069
|
+
* @param options - Configuration options for the sink.
|
|
3070
|
+
*/
|
|
3071
|
+
constructor(options) {
|
|
3072
|
+
this.path = options.path;
|
|
3073
|
+
this.ready = this.init(_nullishCoalesce(options.reset, () => ( true)));
|
|
3074
|
+
}
|
|
3075
|
+
/**
|
|
3076
|
+
* Ensures the target directory exists and optionally resets the file.
|
|
3077
|
+
*
|
|
3078
|
+
* @param reset - Whether to truncate the file if it already exists.
|
|
3079
|
+
* @throws {Error} If directory creation or file writing fails.
|
|
3080
|
+
*/
|
|
3081
|
+
async init(reset) {
|
|
3082
|
+
await _promises.mkdir.call(void 0, _path.dirname.call(void 0, this.path), { recursive: true });
|
|
3083
|
+
if (reset) {
|
|
3084
|
+
await _promises.writeFile.call(void 0, this.path, "", "utf8");
|
|
3085
|
+
}
|
|
3086
|
+
}
|
|
3087
|
+
/**
|
|
3088
|
+
* Appends a trace event to the file in NDJSON format.
|
|
3089
|
+
*
|
|
3090
|
+
* Waits for initialization to complete before writing.
|
|
3091
|
+
*
|
|
3092
|
+
* @param event - The trace event to record.
|
|
3093
|
+
* @throws {Error} If writing to the file fails.
|
|
3094
|
+
*
|
|
3095
|
+
* @example
|
|
3096
|
+
* ```typescript
|
|
3097
|
+
* await sink.emit({
|
|
3098
|
+
* type: 'step_start',
|
|
3099
|
+
* workflowId: 'wf-1',
|
|
3100
|
+
* timestamp: Date.now(),
|
|
3101
|
+
* data: { step: 'validate' }
|
|
3102
|
+
* });
|
|
3103
|
+
* ```
|
|
3104
|
+
*/
|
|
3105
|
+
async emit(event) {
|
|
3106
|
+
await this.ready;
|
|
3107
|
+
await _promises.appendFile.call(void 0, this.path, `${JSON.stringify(event)}
|
|
3108
|
+
`, "utf8");
|
|
3109
|
+
}
|
|
3110
|
+
};
|
|
3111
|
+
|
|
3112
|
+
// src/core/LockProvider.ts
|
|
3113
|
+
var MemoryLockProvider = (_class8 = class {constructor() { _class8.prototype.__init14.call(this); }
|
|
3114
|
+
__init14() {this.locks = /* @__PURE__ */ new Map()}
|
|
3115
|
+
async acquire(resourceId, owner, ttl) {
|
|
3116
|
+
const now = Date.now();
|
|
3117
|
+
const existing = this.locks.get(resourceId);
|
|
3118
|
+
if (existing && existing.expiresAt > now) {
|
|
3119
|
+
if (existing.owner === owner) {
|
|
3120
|
+
existing.expiresAt = now + ttl;
|
|
3121
|
+
return this.createLock(resourceId, owner, existing.expiresAt);
|
|
3122
|
+
}
|
|
3123
|
+
return null;
|
|
3124
|
+
}
|
|
3125
|
+
const expiresAt = now + ttl;
|
|
3126
|
+
this.locks.set(resourceId, { owner, expiresAt });
|
|
3127
|
+
return this.createLock(resourceId, owner, expiresAt);
|
|
3128
|
+
}
|
|
3129
|
+
async refresh(resourceId, owner, ttl) {
|
|
3130
|
+
const now = Date.now();
|
|
3131
|
+
const existing = this.locks.get(resourceId);
|
|
3132
|
+
if (existing && existing.owner === owner && existing.expiresAt > now) {
|
|
3133
|
+
existing.expiresAt = now + ttl;
|
|
3134
|
+
return true;
|
|
3135
|
+
}
|
|
3136
|
+
return false;
|
|
3137
|
+
}
|
|
3138
|
+
async release(resourceId) {
|
|
3139
|
+
this.locks.delete(resourceId);
|
|
3140
|
+
}
|
|
3141
|
+
createLock(id, owner, expiresAt) {
|
|
3142
|
+
return {
|
|
3143
|
+
id,
|
|
3144
|
+
owner,
|
|
3145
|
+
expiresAt,
|
|
3146
|
+
release: () => this.release(id)
|
|
3147
|
+
};
|
|
3148
|
+
}
|
|
3149
|
+
}, _class8);
|
|
3150
|
+
|
|
3151
|
+
// src/core/RedisLockProvider.ts
|
|
3152
|
+
var RELEASE_LOCK_SCRIPT = `
|
|
3153
|
+
if redis.call("get", KEYS[1]) == ARGV[1] then
|
|
3154
|
+
return redis.call("del", KEYS[1])
|
|
3155
|
+
else
|
|
3156
|
+
return 0
|
|
3157
|
+
end
|
|
3158
|
+
`;
|
|
3159
|
+
var REFRESH_LOCK_SCRIPT = `
|
|
3160
|
+
if redis.call("get", KEYS[1]) == ARGV[1] then
|
|
3161
|
+
return redis.call("pexpire", KEYS[1], ARGV[2])
|
|
3162
|
+
else
|
|
3163
|
+
return 0
|
|
3164
|
+
end
|
|
3165
|
+
`;
|
|
3166
|
+
var RedisLockProvider = class {
|
|
3167
|
+
|
|
3168
|
+
|
|
3169
|
+
|
|
3170
|
+
|
|
3171
|
+
|
|
3172
|
+
constructor(options) {
|
|
3173
|
+
this.client = options.client;
|
|
3174
|
+
this.keyPrefix = _nullishCoalesce(options.keyPrefix, () => ( "flux:lock:"));
|
|
3175
|
+
this.defaultTtl = _nullishCoalesce(options.defaultTtl, () => ( 3e4));
|
|
3176
|
+
this.retryDelay = _nullishCoalesce(options.retryDelay, () => ( 100));
|
|
3177
|
+
this.maxRetries = _nullishCoalesce(options.maxRetries, () => ( 0));
|
|
3178
|
+
}
|
|
3179
|
+
/**
|
|
3180
|
+
* Attempts to acquire a lock for a specific resource.
|
|
3181
|
+
*
|
|
3182
|
+
* Uses Redis SET with NX (only if not exists) and PX (expire in ms)
|
|
3183
|
+
* for atomic lock acquisition. Supports retry with configurable delay.
|
|
3184
|
+
*
|
|
3185
|
+
* @param resourceId - The unique ID of the resource to lock
|
|
3186
|
+
* @param owner - The identifier of the node/process requesting the lock
|
|
3187
|
+
* @param ttl - Time-to-live for the lock in milliseconds
|
|
3188
|
+
* @returns A Lock object if successful, otherwise null
|
|
3189
|
+
*/
|
|
3190
|
+
async acquire(resourceId, owner, ttl) {
|
|
3191
|
+
const key = this.getKey(resourceId);
|
|
3192
|
+
const effectiveTtl = ttl || this.defaultTtl;
|
|
3193
|
+
for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
|
|
3194
|
+
const result = await this.client.set(key, owner, {
|
|
3195
|
+
NX: true,
|
|
3196
|
+
PX: effectiveTtl
|
|
3197
|
+
});
|
|
3198
|
+
if (result === "OK") {
|
|
3199
|
+
const expiresAt = Date.now() + effectiveTtl;
|
|
3200
|
+
return this.createLock(resourceId, owner, expiresAt);
|
|
3201
|
+
}
|
|
3202
|
+
const currentOwner = await this.client.get(key);
|
|
3203
|
+
if (currentOwner === owner) {
|
|
3204
|
+
const refreshed = await this.refresh(resourceId, owner, effectiveTtl);
|
|
3205
|
+
if (refreshed) {
|
|
3206
|
+
const expiresAt = Date.now() + effectiveTtl;
|
|
3207
|
+
return this.createLock(resourceId, owner, expiresAt);
|
|
3208
|
+
}
|
|
3209
|
+
}
|
|
3210
|
+
if (attempt < this.maxRetries) {
|
|
3211
|
+
await this.sleep(this.retryDelay);
|
|
3212
|
+
}
|
|
3213
|
+
}
|
|
3214
|
+
return null;
|
|
3215
|
+
}
|
|
3216
|
+
/**
|
|
3217
|
+
* Refreshes an existing lock to extend its lifetime.
|
|
3218
|
+
*
|
|
3219
|
+
* Uses a Lua script to atomically check ownership and extend TTL.
|
|
3220
|
+
*
|
|
3221
|
+
* @param resourceId - The ID of the resource
|
|
3222
|
+
* @param owner - The current owner of the lock
|
|
3223
|
+
* @param ttl - The new time-to-live from the current moment
|
|
3224
|
+
* @returns True if the lock was successfully refreshed
|
|
3225
|
+
*/
|
|
3226
|
+
async refresh(resourceId, owner, ttl) {
|
|
3227
|
+
const key = this.getKey(resourceId);
|
|
3228
|
+
const effectiveTtl = ttl || this.defaultTtl;
|
|
3229
|
+
const result = await this.client.eval(REFRESH_LOCK_SCRIPT, [key], [owner, effectiveTtl]);
|
|
3230
|
+
return result === 1;
|
|
3231
|
+
}
|
|
3232
|
+
/**
|
|
3233
|
+
* Forcefully releases a lock, regardless of the owner.
|
|
3234
|
+
*
|
|
3235
|
+
* @param resourceId - The ID of the resource to unlock
|
|
3236
|
+
*/
|
|
3237
|
+
async release(resourceId) {
|
|
3238
|
+
const key = this.getKey(resourceId);
|
|
3239
|
+
await this.client.del(key);
|
|
3240
|
+
}
|
|
3241
|
+
/**
|
|
3242
|
+
* Safely releases a lock only if owned by the specified owner.
|
|
3243
|
+
*
|
|
3244
|
+
* Uses a Lua script to atomically check ownership and delete.
|
|
3245
|
+
*
|
|
3246
|
+
* @param resourceId - The ID of the resource to unlock
|
|
3247
|
+
* @param owner - The owner that should release the lock
|
|
3248
|
+
* @returns True if the lock was released, false if not owned
|
|
3249
|
+
*/
|
|
3250
|
+
async releaseIfOwned(resourceId, owner) {
|
|
3251
|
+
const key = this.getKey(resourceId);
|
|
3252
|
+
const result = await this.client.eval(RELEASE_LOCK_SCRIPT, [key], [owner]);
|
|
3253
|
+
return result === 1;
|
|
3254
|
+
}
|
|
3255
|
+
/**
|
|
3256
|
+
* Generates the Redis key for a resource.
|
|
3257
|
+
*/
|
|
3258
|
+
getKey(resourceId) {
|
|
3259
|
+
return `${this.keyPrefix}${resourceId}`;
|
|
3260
|
+
}
|
|
3261
|
+
/**
|
|
3262
|
+
* Creates a Lock object with a release method.
|
|
3263
|
+
*/
|
|
3264
|
+
createLock(id, owner, expiresAt) {
|
|
3265
|
+
return {
|
|
3266
|
+
id,
|
|
3267
|
+
owner,
|
|
3268
|
+
expiresAt,
|
|
3269
|
+
release: async () => {
|
|
3270
|
+
await this.releaseIfOwned(id, owner);
|
|
3271
|
+
}
|
|
3272
|
+
};
|
|
3273
|
+
}
|
|
3274
|
+
/**
|
|
3275
|
+
* Sleeps for the specified duration.
|
|
3276
|
+
*/
|
|
3277
|
+
sleep(ms) {
|
|
3278
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
3279
|
+
}
|
|
3280
|
+
};
|
|
3281
|
+
|
|
3282
|
+
// src/logger/FluxLogger.ts
|
|
3283
|
+
var FluxConsoleLogger = class {
|
|
3284
|
+
|
|
3285
|
+
/**
|
|
3286
|
+
* Creates a new console logger instance.
|
|
3287
|
+
*
|
|
3288
|
+
* @param prefix - String prepended to every log message for identification.
|
|
3289
|
+
*/
|
|
3290
|
+
constructor(prefix = "[Flux]") {
|
|
3291
|
+
this.prefix = prefix;
|
|
3292
|
+
}
|
|
3293
|
+
/**
|
|
3294
|
+
* Logs fine-grained informational events that are most useful to debug an application.
|
|
3295
|
+
*
|
|
3296
|
+
* @param message - The primary log message.
|
|
3297
|
+
* @param args - Additional metadata or objects to include in the log output.
|
|
3298
|
+
*/
|
|
3299
|
+
debug(message, ...args) {
|
|
3300
|
+
console.debug(`${this.prefix} ${message}`, ...args);
|
|
3301
|
+
}
|
|
3302
|
+
/**
|
|
3303
|
+
* Logs informational messages that highlight the progress of the application at coarse-grained level.
|
|
3304
|
+
*
|
|
3305
|
+
* @param message - The primary log message.
|
|
3306
|
+
* @param args - Additional metadata or objects to include in the log output.
|
|
3307
|
+
*/
|
|
3308
|
+
info(message, ...args) {
|
|
3309
|
+
console.info(`${this.prefix} ${message}`, ...args);
|
|
3310
|
+
}
|
|
3311
|
+
/**
|
|
3312
|
+
* Logs potentially harmful situations that should be noted but don't stop execution.
|
|
3313
|
+
*
|
|
3314
|
+
* @param message - The primary log message.
|
|
3315
|
+
* @param args - Additional metadata or objects to include in the log output.
|
|
3316
|
+
*/
|
|
3317
|
+
warn(message, ...args) {
|
|
3318
|
+
console.warn(`${this.prefix} ${message}`, ...args);
|
|
3319
|
+
}
|
|
3320
|
+
/**
|
|
3321
|
+
* Logs error events that might still allow the application to continue running.
|
|
3322
|
+
*
|
|
3323
|
+
* @param message - The primary log message.
|
|
3324
|
+
* @param args - Additional metadata or objects to include in the log output.
|
|
3325
|
+
*/
|
|
3326
|
+
error(message, ...args) {
|
|
3327
|
+
console.error(`${this.prefix} ${message}`, ...args);
|
|
3328
|
+
}
|
|
3329
|
+
};
|
|
3330
|
+
var FluxSilentLogger = class {
|
|
3331
|
+
/** Discards debug logs. */
|
|
3332
|
+
debug() {
|
|
3333
|
+
}
|
|
3334
|
+
/** Discards info logs. */
|
|
3335
|
+
info() {
|
|
3336
|
+
}
|
|
3337
|
+
/** Discards warning logs. */
|
|
3338
|
+
warn() {
|
|
3339
|
+
}
|
|
3340
|
+
/** Discards error logs. */
|
|
3341
|
+
error() {
|
|
3342
|
+
}
|
|
3343
|
+
};
|
|
3344
|
+
|
|
3345
|
+
// src/orbit/OrbitFlux.ts
|
|
3346
|
+
var OrbitFlux = class _OrbitFlux {
|
|
3347
|
+
|
|
3348
|
+
|
|
3349
|
+
/**
|
|
3350
|
+
* Initializes a new OrbitFlux instance with the given options.
|
|
3351
|
+
*
|
|
3352
|
+
* @param options - Configuration for the workflow engine integration.
|
|
3353
|
+
*/
|
|
3354
|
+
constructor(options = {}) {
|
|
3355
|
+
this.options = {
|
|
3356
|
+
storage: "memory",
|
|
3357
|
+
exposeAs: "flux",
|
|
3358
|
+
defaultRetries: 3,
|
|
3359
|
+
defaultTimeout: 3e4,
|
|
3360
|
+
...options
|
|
3361
|
+
};
|
|
3362
|
+
}
|
|
3363
|
+
/**
|
|
3364
|
+
* Static factory method to create and configure an OrbitFlux instance.
|
|
3365
|
+
*
|
|
3366
|
+
* @param options - Configuration for the workflow engine integration.
|
|
3367
|
+
* @returns A configured OrbitFlux instance.
|
|
3368
|
+
*/
|
|
3369
|
+
static configure(options = {}) {
|
|
3370
|
+
return new _OrbitFlux(options);
|
|
3371
|
+
}
|
|
3372
|
+
/**
|
|
3373
|
+
* Installs the Flux workflow engine into the Gravito core.
|
|
3374
|
+
*
|
|
3375
|
+
* This method resolves the storage adapter, initializes it, configures the engine
|
|
3376
|
+
* with core-integrated logging and hooks, and registers the engine in the IoC container.
|
|
3377
|
+
*
|
|
3378
|
+
* @param core - The PlanetCore instance being booted.
|
|
3379
|
+
* @throws {Error} If storage initialization fails or engine registration conflicts occur.
|
|
3380
|
+
*/
|
|
3381
|
+
async install(core) {
|
|
3382
|
+
const { storage, dbPath, exposeAs, defaultRetries, defaultTimeout, logger } = this.options;
|
|
3383
|
+
let storageAdapter;
|
|
3384
|
+
if (typeof storage === "string") {
|
|
3385
|
+
switch (storage) {
|
|
3386
|
+
case "sqlite":
|
|
3387
|
+
storageAdapter = new (0, _chunkYXBEYVGYcjs.BunSQLiteStorage)({ path: dbPath });
|
|
3388
|
+
break;
|
|
3389
|
+
default:
|
|
3390
|
+
storageAdapter = new MemoryStorage();
|
|
3391
|
+
}
|
|
3392
|
+
} else {
|
|
3393
|
+
storageAdapter = storage;
|
|
3394
|
+
}
|
|
3395
|
+
await _optionalChain([storageAdapter, 'access', _123 => _123.init, 'optionalCall', _124 => _124()]);
|
|
3396
|
+
const engineConfig = {
|
|
3397
|
+
storage: storageAdapter,
|
|
3398
|
+
defaultRetries,
|
|
3399
|
+
defaultTimeout,
|
|
3400
|
+
logger: _nullishCoalesce(logger, () => ( {
|
|
3401
|
+
debug: (msg) => core.logger.debug(`[Flux] ${msg}`),
|
|
3402
|
+
info: (msg) => core.logger.info(`[Flux] ${msg}`),
|
|
3403
|
+
warn: (msg) => core.logger.warn(`[Flux] ${msg}`),
|
|
3404
|
+
error: (msg) => core.logger.error(`[Flux] ${msg}`)
|
|
3405
|
+
})),
|
|
3406
|
+
on: {
|
|
3407
|
+
stepStart: (step) => {
|
|
3408
|
+
core.hooks.doAction("flux:step:start", { step });
|
|
3409
|
+
},
|
|
3410
|
+
stepComplete: (step, ctx, result) => {
|
|
3411
|
+
core.hooks.doAction("flux:step:complete", { step, ctx, result });
|
|
3412
|
+
},
|
|
3413
|
+
stepError: (step, ctx, error) => {
|
|
3414
|
+
core.hooks.doAction("flux:step:error", { step, ctx, error });
|
|
3415
|
+
},
|
|
3416
|
+
workflowComplete: (ctx) => {
|
|
3417
|
+
core.hooks.doAction("flux:workflow:complete", { ctx });
|
|
3418
|
+
},
|
|
3419
|
+
workflowError: (ctx, error) => {
|
|
3420
|
+
core.hooks.doAction("flux:workflow:error", { ctx, error });
|
|
3421
|
+
}
|
|
3422
|
+
}
|
|
3423
|
+
};
|
|
3424
|
+
this.engine = new FluxEngine(engineConfig);
|
|
3425
|
+
core.container.instance(exposeAs, this.engine);
|
|
3426
|
+
core.logger.info(
|
|
3427
|
+
`[OrbitFlux] Initialized (Storage: ${typeof storage === "string" ? storage : "custom"})`
|
|
3428
|
+
);
|
|
3429
|
+
}
|
|
3430
|
+
/**
|
|
3431
|
+
* Retrieves the managed FluxEngine instance.
|
|
3432
|
+
*
|
|
3433
|
+
* @returns The FluxEngine instance, or undefined if the orbit has not been installed.
|
|
3434
|
+
*/
|
|
3435
|
+
getEngine() {
|
|
3436
|
+
return this.engine;
|
|
3437
|
+
}
|
|
3438
|
+
/**
|
|
3439
|
+
* Performs cleanup operations when the core is shutting down.
|
|
3440
|
+
*
|
|
3441
|
+
* Closes the underlying workflow engine and its storage connections.
|
|
3442
|
+
*
|
|
3443
|
+
* @throws {Error} If the engine fails to close cleanly.
|
|
3444
|
+
*/
|
|
3445
|
+
async cleanup() {
|
|
3446
|
+
if (this.engine) {
|
|
3447
|
+
await this.engine.close();
|
|
3448
|
+
}
|
|
3449
|
+
}
|
|
3450
|
+
};
|
|
3451
|
+
|
|
3452
|
+
|
|
3453
|
+
|
|
3454
|
+
|
|
3455
|
+
|
|
3456
|
+
|
|
3457
|
+
|
|
3458
|
+
|
|
3459
|
+
|
|
3460
|
+
|
|
3461
|
+
|
|
3462
|
+
|
|
3463
|
+
|
|
3464
|
+
|
|
3465
|
+
|
|
3466
|
+
|
|
3467
|
+
|
|
3468
|
+
|
|
3469
|
+
|
|
3470
|
+
|
|
3471
|
+
|
|
3472
|
+
|
|
3473
|
+
|
|
3474
|
+
|
|
3475
|
+
|
|
3476
|
+
|
|
3477
|
+
|
|
3478
|
+
|
|
3479
|
+
|
|
3480
|
+
|
|
3481
|
+
|
|
3482
|
+
|
|
3483
|
+
|
|
3484
|
+
|
|
3485
|
+
exports.FluxErrorCode = FluxErrorCode; exports.FluxError = FluxError; exports.workflowNotFound = workflowNotFound; exports.invalidStateTransition = invalidStateTransition; exports.invalidInput = invalidInput; exports.workflowNameMismatch = workflowNameMismatch; exports.workflowDefinitionChanged = workflowDefinitionChanged; exports.workflowNotSuspended = workflowNotSuspended; exports.stepNotFound = stepNotFound; exports.invalidStepIndex = invalidStepIndex; exports.emptyWorkflow = emptyWorkflow; exports.noRecoveryAction = noRecoveryAction; exports.invalidJsonPointer = invalidJsonPointer; exports.invalidPathTraversal = invalidPathTraversal; exports.cannotReplaceRoot = cannotReplaceRoot; exports.cannotRemoveRoot = cannotRemoveRoot; exports.WorkflowBuilder = WorkflowBuilder; exports.createWorkflow = createWorkflow; exports.BatchExecutor = BatchExecutor; exports.ContextManager = ContextManager; exports.StateMachine = StateMachine; exports.CronTrigger = CronTrigger; exports.MemoryStorage = MemoryStorage; exports.StepExecutor = StepExecutor; exports.FluxEngine = FluxEngine; exports.PostgreSQLStorage = PostgreSQLStorage; exports.JsonFileTraceSink = JsonFileTraceSink; exports.MemoryLockProvider = MemoryLockProvider; exports.RedisLockProvider = RedisLockProvider; exports.FluxConsoleLogger = FluxConsoleLogger; exports.FluxSilentLogger = FluxSilentLogger; exports.OrbitFlux = OrbitFlux;
|
|
3486
|
+
//# sourceMappingURL=chunk-WAPZDXSX.cjs.map
|