@gravito/flux 1.0.0-beta.2 → 1.0.0-beta.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.zh-TW.md +189 -0
- package/assets/flux-branching.svg +84 -0
- package/dist/bun.cjs +7 -0
- package/dist/bun.cjs.map +1 -0
- package/dist/{storage/BunSQLiteStorage.d.ts → bun.d.cts} +8 -5
- package/dist/bun.d.ts +72 -5
- package/dist/bun.js +2 -2
- package/dist/bun.js.map +1 -0
- package/dist/chunk-J37UUMLM.js +858 -0
- package/dist/chunk-J37UUMLM.js.map +1 -0
- package/dist/chunk-RPECIW7O.cjs +858 -0
- package/dist/chunk-RPECIW7O.cjs.map +1 -0
- package/dist/chunk-SJSPR4ZU.cjs +173 -0
- package/dist/chunk-SJSPR4ZU.cjs.map +1 -0
- package/dist/{chunk-qjdtqchy.js → chunk-ZAMVC732.js} +35 -7
- package/dist/chunk-ZAMVC732.js.map +1 -0
- package/dist/index.cjs +121 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +43 -0
- package/dist/index.d.ts +40 -35
- package/dist/index.js +102 -460
- package/dist/index.js.map +1 -0
- package/dist/index.node.cjs +28 -0
- package/dist/index.node.cjs.map +1 -0
- package/dist/index.node.d.cts +499 -0
- package/dist/index.node.d.ts +494 -13
- package/dist/index.node.js +28 -0
- package/dist/index.node.js.map +1 -0
- package/dist/{types.d.ts → types-DvVHBmP6.d.cts} +59 -18
- package/dist/types-DvVHBmP6.d.ts +235 -0
- package/package.json +26 -22
- package/dist/builder/WorkflowBuilder.d.ts +0 -96
- package/dist/builder/WorkflowBuilder.d.ts.map +0 -1
- package/dist/builder/index.d.ts +0 -2
- package/dist/builder/index.d.ts.map +0 -1
- package/dist/bun.d.ts.map +0 -1
- package/dist/core/ContextManager.d.ts +0 -40
- package/dist/core/ContextManager.d.ts.map +0 -1
- package/dist/core/StateMachine.d.ts +0 -43
- package/dist/core/StateMachine.d.ts.map +0 -1
- package/dist/core/StepExecutor.d.ts +0 -34
- package/dist/core/StepExecutor.d.ts.map +0 -1
- package/dist/core/index.d.ts +0 -4
- package/dist/core/index.d.ts.map +0 -1
- package/dist/engine/FluxEngine.d.ts +0 -66
- package/dist/engine/FluxEngine.d.ts.map +0 -1
- package/dist/engine/index.d.ts +0 -2
- package/dist/engine/index.d.ts.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.node.d.ts.map +0 -1
- package/dist/logger/FluxLogger.d.ts +0 -40
- package/dist/logger/FluxLogger.d.ts.map +0 -1
- package/dist/logger/index.d.ts +0 -2
- package/dist/logger/index.d.ts.map +0 -1
- package/dist/node/index.mjs +0 -619
- package/dist/orbit/OrbitFlux.d.ts +0 -107
- package/dist/orbit/OrbitFlux.d.ts.map +0 -1
- package/dist/orbit/index.d.ts +0 -2
- package/dist/orbit/index.d.ts.map +0 -1
- package/dist/storage/BunSQLiteStorage.d.ts.map +0 -1
- package/dist/storage/MemoryStorage.d.ts +0 -28
- package/dist/storage/MemoryStorage.d.ts.map +0 -1
- package/dist/storage/index.d.ts +0 -3
- package/dist/storage/index.d.ts.map +0 -1
- package/dist/types.d.ts.map +0 -1
|
@@ -0,0 +1,858 @@
|
|
|
1
|
+
"use strict";Object.defineProperty(exports, "__esModule", {value: true}); function _nullishCoalesce(lhs, rhsFn) { if (lhs != null) { return lhs; } else { return rhsFn(); } } function _optionalChain(ops) { let lastAccessLHS = undefined; let value = ops[0]; let i = 1; while (i < ops.length) { const op = ops[i]; const fn = ops[i + 1]; i += 2; if ((op === 'optionalAccess' || op === 'optionalCall') && value == null) { return undefined; } if (op === 'access' || op === 'optionalAccess') { lastAccessLHS = value; value = fn(value); } else if (op === 'call' || op === 'optionalCall') { value = fn((...args) => value.call(lastAccessLHS, ...args)); lastAccessLHS = undefined; } } return value; } var _class; var _class2; var _class3;
|
|
2
|
+
|
|
3
|
+
var _chunkSJSPR4ZUcjs = require('./chunk-SJSPR4ZU.cjs');
|
|
4
|
+
|
|
5
|
+
// src/builder/WorkflowBuilder.ts
|
|
6
|
+
var WorkflowBuilder = (_class = class {
|
|
7
|
+
|
|
8
|
+
__init() {this._steps = []}
|
|
9
|
+
|
|
10
|
+
constructor(name) {;_class.prototype.__init.call(this);
|
|
11
|
+
this._name = name;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Define input type
|
|
15
|
+
*
|
|
16
|
+
* This method is used for TypeScript type inference.
|
|
17
|
+
*/
|
|
18
|
+
input() {
|
|
19
|
+
return this;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* Define workflow data (state) type
|
|
23
|
+
*
|
|
24
|
+
* This method is used for TypeScript type inference.
|
|
25
|
+
*/
|
|
26
|
+
data() {
|
|
27
|
+
return this;
|
|
28
|
+
}
|
|
29
|
+
/**
|
|
30
|
+
* Add input validator
|
|
31
|
+
*/
|
|
32
|
+
validate(validator) {
|
|
33
|
+
this._validateInput = validator;
|
|
34
|
+
return this;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Add a step to the workflow
|
|
38
|
+
*/
|
|
39
|
+
step(name, handler, options) {
|
|
40
|
+
this._steps.push({
|
|
41
|
+
name,
|
|
42
|
+
handler,
|
|
43
|
+
retries: _optionalChain([options, 'optionalAccess', _2 => _2.retries]),
|
|
44
|
+
timeout: _optionalChain([options, 'optionalAccess', _3 => _3.timeout]),
|
|
45
|
+
when: _optionalChain([options, 'optionalAccess', _4 => _4.when]),
|
|
46
|
+
commit: false
|
|
47
|
+
});
|
|
48
|
+
return this;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Add a commit step (always executes, even on replay)
|
|
52
|
+
*
|
|
53
|
+
* Commit steps are for side effects that should not be skipped,
|
|
54
|
+
* such as database writes or external API calls.
|
|
55
|
+
*/
|
|
56
|
+
commit(name, handler, options) {
|
|
57
|
+
this._steps.push({
|
|
58
|
+
name,
|
|
59
|
+
handler,
|
|
60
|
+
retries: _optionalChain([options, 'optionalAccess', _5 => _5.retries]),
|
|
61
|
+
timeout: _optionalChain([options, 'optionalAccess', _6 => _6.timeout]),
|
|
62
|
+
when: _optionalChain([options, 'optionalAccess', _7 => _7.when]),
|
|
63
|
+
commit: true
|
|
64
|
+
});
|
|
65
|
+
return this;
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Build the workflow definition
|
|
69
|
+
*/
|
|
70
|
+
build() {
|
|
71
|
+
if (this._steps.length === 0) {
|
|
72
|
+
throw new Error(`Workflow "${this._name}" has no steps`);
|
|
73
|
+
}
|
|
74
|
+
return {
|
|
75
|
+
name: this._name,
|
|
76
|
+
steps: [...this._steps],
|
|
77
|
+
validateInput: this._validateInput
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Describe workflow (serializable metadata)
|
|
82
|
+
*/
|
|
83
|
+
describe() {
|
|
84
|
+
const steps = this._steps.map((step) => ({
|
|
85
|
+
name: step.name,
|
|
86
|
+
commit: Boolean(step.commit),
|
|
87
|
+
retries: step.retries,
|
|
88
|
+
timeout: step.timeout,
|
|
89
|
+
hasCondition: Boolean(step.when)
|
|
90
|
+
}));
|
|
91
|
+
return {
|
|
92
|
+
name: this._name,
|
|
93
|
+
steps
|
|
94
|
+
};
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Get workflow name
|
|
98
|
+
*/
|
|
99
|
+
get name() {
|
|
100
|
+
return this._name;
|
|
101
|
+
}
|
|
102
|
+
/**
|
|
103
|
+
* Get step count
|
|
104
|
+
*/
|
|
105
|
+
get stepCount() {
|
|
106
|
+
return this._steps.length;
|
|
107
|
+
}
|
|
108
|
+
}, _class);
|
|
109
|
+
function createWorkflow(name) {
|
|
110
|
+
return new WorkflowBuilder(name);
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// src/core/ContextManager.ts
|
|
114
|
+
function generateId() {
|
|
115
|
+
return crypto.randomUUID();
|
|
116
|
+
}
|
|
117
|
+
var ContextManager = class {
|
|
118
|
+
/**
|
|
119
|
+
* Create a new workflow context
|
|
120
|
+
*/
|
|
121
|
+
create(name, input, stepCount) {
|
|
122
|
+
const history = Array.from({ length: stepCount }, (_, _i) => ({
|
|
123
|
+
name: "",
|
|
124
|
+
status: "pending",
|
|
125
|
+
retries: 0
|
|
126
|
+
}));
|
|
127
|
+
return {
|
|
128
|
+
id: generateId(),
|
|
129
|
+
name,
|
|
130
|
+
input,
|
|
131
|
+
data: {},
|
|
132
|
+
status: "pending",
|
|
133
|
+
currentStep: 0,
|
|
134
|
+
history
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Restore context from saved state
|
|
139
|
+
*/
|
|
140
|
+
restore(state) {
|
|
141
|
+
return {
|
|
142
|
+
id: state.id,
|
|
143
|
+
name: state.name,
|
|
144
|
+
input: state.input,
|
|
145
|
+
data: { ...state.data },
|
|
146
|
+
status: state.status,
|
|
147
|
+
currentStep: state.currentStep,
|
|
148
|
+
history: state.history.map((h) => ({ ...h }))
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Convert context to serializable state
|
|
153
|
+
*/
|
|
154
|
+
toState(ctx) {
|
|
155
|
+
return {
|
|
156
|
+
id: ctx.id,
|
|
157
|
+
name: ctx.name,
|
|
158
|
+
status: ctx.status,
|
|
159
|
+
input: ctx.input,
|
|
160
|
+
data: { ...ctx.data },
|
|
161
|
+
currentStep: ctx.currentStep,
|
|
162
|
+
history: ctx.history.map((h) => ({ ...h })),
|
|
163
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
164
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
165
|
+
};
|
|
166
|
+
}
|
|
167
|
+
/**
|
|
168
|
+
* Update context status (returns new context for immutability)
|
|
169
|
+
*/
|
|
170
|
+
updateStatus(ctx, status) {
|
|
171
|
+
return {
|
|
172
|
+
...ctx,
|
|
173
|
+
status
|
|
174
|
+
};
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Advance to next step
|
|
178
|
+
*/
|
|
179
|
+
advanceStep(ctx) {
|
|
180
|
+
return {
|
|
181
|
+
...ctx,
|
|
182
|
+
currentStep: ctx.currentStep + 1
|
|
183
|
+
};
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Update step name in history
|
|
187
|
+
*/
|
|
188
|
+
setStepName(ctx, index, name) {
|
|
189
|
+
if (ctx.history[index]) {
|
|
190
|
+
ctx.history[index].name = name;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
};
|
|
194
|
+
|
|
195
|
+
// src/core/StateMachine.ts
|
|
196
|
+
var TRANSITIONS = {
|
|
197
|
+
pending: ["running", "failed"],
|
|
198
|
+
running: ["paused", "completed", "failed"],
|
|
199
|
+
paused: ["running", "failed"],
|
|
200
|
+
completed: [],
|
|
201
|
+
// terminal state
|
|
202
|
+
failed: ["pending"]
|
|
203
|
+
// allow retry
|
|
204
|
+
};
|
|
205
|
+
var StateMachine = (_class2 = class extends EventTarget {constructor(...args2) { super(...args2); _class2.prototype.__init2.call(this); }
|
|
206
|
+
__init2() {this._status = "pending"}
|
|
207
|
+
/**
|
|
208
|
+
* Current status
|
|
209
|
+
*/
|
|
210
|
+
get status() {
|
|
211
|
+
return this._status;
|
|
212
|
+
}
|
|
213
|
+
/**
|
|
214
|
+
* Check if transition to target status is allowed
|
|
215
|
+
*/
|
|
216
|
+
canTransition(to) {
|
|
217
|
+
return TRANSITIONS[this._status].includes(to);
|
|
218
|
+
}
|
|
219
|
+
/**
|
|
220
|
+
* Transition to a new status
|
|
221
|
+
*
|
|
222
|
+
* @throws {Error} If transition is not allowed
|
|
223
|
+
*/
|
|
224
|
+
transition(to) {
|
|
225
|
+
if (!this.canTransition(to)) {
|
|
226
|
+
throw new Error(`Invalid state transition: ${this._status} \u2192 ${to}`);
|
|
227
|
+
}
|
|
228
|
+
const from = this._status;
|
|
229
|
+
this._status = to;
|
|
230
|
+
this.dispatchEvent(
|
|
231
|
+
new CustomEvent("transition", {
|
|
232
|
+
detail: { from, to }
|
|
233
|
+
})
|
|
234
|
+
);
|
|
235
|
+
}
|
|
236
|
+
/**
|
|
237
|
+
* Force set status (for replay/restore)
|
|
238
|
+
*/
|
|
239
|
+
forceStatus(status) {
|
|
240
|
+
this._status = status;
|
|
241
|
+
}
|
|
242
|
+
/**
|
|
243
|
+
* Check if workflow is in terminal state
|
|
244
|
+
*/
|
|
245
|
+
isTerminal() {
|
|
246
|
+
return this._status === "completed" || this._status === "failed";
|
|
247
|
+
}
|
|
248
|
+
/**
|
|
249
|
+
* Check if workflow can be executed
|
|
250
|
+
*/
|
|
251
|
+
canExecute() {
|
|
252
|
+
return this._status === "pending" || this._status === "paused";
|
|
253
|
+
}
|
|
254
|
+
}, _class2);
|
|
255
|
+
|
|
256
|
+
// src/core/StepExecutor.ts
|
|
257
|
+
var StepExecutor = class {
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
constructor(options = {}) {
|
|
262
|
+
this.defaultRetries = _nullishCoalesce(options.defaultRetries, () => ( 3));
|
|
263
|
+
this.defaultTimeout = _nullishCoalesce(options.defaultTimeout, () => ( 3e4));
|
|
264
|
+
this.onRetry = options.onRetry;
|
|
265
|
+
}
|
|
266
|
+
/**
|
|
267
|
+
* Execute a step with retry and timeout
|
|
268
|
+
*/
|
|
269
|
+
async execute(step, ctx, execution) {
|
|
270
|
+
const maxRetries = _nullishCoalesce(step.retries, () => ( this.defaultRetries));
|
|
271
|
+
const timeout = _nullishCoalesce(step.timeout, () => ( this.defaultTimeout));
|
|
272
|
+
const startTime = Date.now();
|
|
273
|
+
if (step.when && !step.when(ctx)) {
|
|
274
|
+
execution.status = "skipped";
|
|
275
|
+
return {
|
|
276
|
+
success: true,
|
|
277
|
+
duration: 0
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
execution.status = "running";
|
|
281
|
+
execution.startedAt = /* @__PURE__ */ new Date();
|
|
282
|
+
let lastError;
|
|
283
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
284
|
+
execution.retries = attempt;
|
|
285
|
+
try {
|
|
286
|
+
await this.executeWithTimeout(step.handler, ctx, timeout);
|
|
287
|
+
execution.status = "completed";
|
|
288
|
+
execution.completedAt = /* @__PURE__ */ new Date();
|
|
289
|
+
execution.duration = Date.now() - startTime;
|
|
290
|
+
return {
|
|
291
|
+
success: true,
|
|
292
|
+
duration: execution.duration
|
|
293
|
+
};
|
|
294
|
+
} catch (error) {
|
|
295
|
+
lastError = error instanceof Error ? error : new Error(String(error));
|
|
296
|
+
if (attempt < maxRetries) {
|
|
297
|
+
await _optionalChain([this, 'access', _8 => _8.onRetry, 'optionalCall', _9 => _9(step, ctx, lastError, attempt + 1, maxRetries)]);
|
|
298
|
+
await this.sleep(Math.min(1e3 * 2 ** attempt, 1e4));
|
|
299
|
+
}
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
execution.status = "failed";
|
|
303
|
+
execution.completedAt = /* @__PURE__ */ new Date();
|
|
304
|
+
execution.duration = Date.now() - startTime;
|
|
305
|
+
execution.error = _optionalChain([lastError, 'optionalAccess', _10 => _10.message]);
|
|
306
|
+
return {
|
|
307
|
+
success: false,
|
|
308
|
+
error: lastError,
|
|
309
|
+
duration: execution.duration
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Execute handler with timeout
|
|
314
|
+
*/
|
|
315
|
+
async executeWithTimeout(handler, ctx, timeout) {
|
|
316
|
+
let timer = null;
|
|
317
|
+
try {
|
|
318
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
319
|
+
timer = setTimeout(() => reject(new Error("Step timeout")), timeout);
|
|
320
|
+
});
|
|
321
|
+
await Promise.race([Promise.resolve(handler(ctx)), timeoutPromise]);
|
|
322
|
+
} finally {
|
|
323
|
+
if (timer) {
|
|
324
|
+
clearTimeout(timer);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
/**
|
|
329
|
+
* Sleep helper
|
|
330
|
+
*/
|
|
331
|
+
sleep(ms) {
|
|
332
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
333
|
+
}
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// src/storage/MemoryStorage.ts
|
|
337
|
+
var MemoryStorage = (_class3 = class {constructor() { _class3.prototype.__init3.call(this); }
|
|
338
|
+
__init3() {this.store = /* @__PURE__ */ new Map()}
|
|
339
|
+
async save(state) {
|
|
340
|
+
this.store.set(state.id, {
|
|
341
|
+
...state,
|
|
342
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
343
|
+
});
|
|
344
|
+
}
|
|
345
|
+
async load(id) {
|
|
346
|
+
return _nullishCoalesce(this.store.get(id), () => ( null));
|
|
347
|
+
}
|
|
348
|
+
async list(filter) {
|
|
349
|
+
let results = Array.from(this.store.values());
|
|
350
|
+
if (_optionalChain([filter, 'optionalAccess', _11 => _11.name])) {
|
|
351
|
+
results = results.filter((s) => s.name === filter.name);
|
|
352
|
+
}
|
|
353
|
+
if (_optionalChain([filter, 'optionalAccess', _12 => _12.status])) {
|
|
354
|
+
const statuses = Array.isArray(filter.status) ? filter.status : [filter.status];
|
|
355
|
+
results = results.filter((s) => statuses.includes(s.status));
|
|
356
|
+
}
|
|
357
|
+
results.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
358
|
+
if (_optionalChain([filter, 'optionalAccess', _13 => _13.offset])) {
|
|
359
|
+
results = results.slice(filter.offset);
|
|
360
|
+
}
|
|
361
|
+
if (_optionalChain([filter, 'optionalAccess', _14 => _14.limit])) {
|
|
362
|
+
results = results.slice(0, filter.limit);
|
|
363
|
+
}
|
|
364
|
+
return results;
|
|
365
|
+
}
|
|
366
|
+
async delete(id) {
|
|
367
|
+
this.store.delete(id);
|
|
368
|
+
}
|
|
369
|
+
async init() {
|
|
370
|
+
}
|
|
371
|
+
async close() {
|
|
372
|
+
this.store.clear();
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* Get store size (for testing)
|
|
376
|
+
*/
|
|
377
|
+
size() {
|
|
378
|
+
return this.store.size;
|
|
379
|
+
}
|
|
380
|
+
}, _class3);
|
|
381
|
+
|
|
382
|
+
// src/engine/FluxEngine.ts
|
|
383
|
+
var FluxEngine = class {
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
constructor(config = {}) {
|
|
389
|
+
this.config = config;
|
|
390
|
+
this.storage = _nullishCoalesce(config.storage, () => ( new MemoryStorage()));
|
|
391
|
+
this.executor = new StepExecutor({
|
|
392
|
+
defaultRetries: config.defaultRetries,
|
|
393
|
+
defaultTimeout: config.defaultTimeout,
|
|
394
|
+
onRetry: async (step, ctx, error, attempt, maxRetries) => {
|
|
395
|
+
await this.emitTrace({
|
|
396
|
+
type: "step:retry",
|
|
397
|
+
timestamp: Date.now(),
|
|
398
|
+
workflowId: ctx.id,
|
|
399
|
+
workflowName: ctx.name,
|
|
400
|
+
stepName: step.name,
|
|
401
|
+
stepIndex: ctx.currentStep,
|
|
402
|
+
commit: Boolean(step.commit),
|
|
403
|
+
retries: attempt,
|
|
404
|
+
maxRetries,
|
|
405
|
+
error: error.message,
|
|
406
|
+
status: "running"
|
|
407
|
+
});
|
|
408
|
+
}
|
|
409
|
+
});
|
|
410
|
+
this.contextManager = new ContextManager();
|
|
411
|
+
}
|
|
412
|
+
/**
|
|
413
|
+
* Execute a workflow with input data
|
|
414
|
+
*
|
|
415
|
+
* @param workflow - Workflow builder or definition
|
|
416
|
+
* @param input - Input data for the workflow
|
|
417
|
+
* @returns Execution result
|
|
418
|
+
*/
|
|
419
|
+
async execute(workflow, input) {
|
|
420
|
+
const startTime = Date.now();
|
|
421
|
+
const definition = this.resolveDefinition(workflow);
|
|
422
|
+
if (definition.validateInput && !definition.validateInput(input)) {
|
|
423
|
+
throw new Error(`Invalid input for workflow "${definition.name}"`);
|
|
424
|
+
}
|
|
425
|
+
const ctx = this.contextManager.create(
|
|
426
|
+
definition.name,
|
|
427
|
+
input,
|
|
428
|
+
definition.steps.length
|
|
429
|
+
);
|
|
430
|
+
const stateMachine = new StateMachine();
|
|
431
|
+
await this.storage.save(this.contextManager.toState(ctx));
|
|
432
|
+
return this.runFrom(definition, ctx, stateMachine, startTime, 0);
|
|
433
|
+
}
|
|
434
|
+
/**
|
|
435
|
+
* Resume a paused or failed workflow
|
|
436
|
+
*
|
|
437
|
+
* @param workflowId - Workflow instance ID
|
|
438
|
+
* @returns Execution result or null if not found
|
|
439
|
+
*/
|
|
440
|
+
async resume(workflow, workflowId, options) {
|
|
441
|
+
const definition = this.resolveDefinition(workflow);
|
|
442
|
+
const state = await this.storage.load(workflowId);
|
|
443
|
+
if (!state) {
|
|
444
|
+
return null;
|
|
445
|
+
}
|
|
446
|
+
if (state.name !== definition.name) {
|
|
447
|
+
throw new Error(`Workflow name mismatch: ${state.name} !== ${definition.name}`);
|
|
448
|
+
}
|
|
449
|
+
if (state.history.length !== definition.steps.length) {
|
|
450
|
+
throw new Error("Workflow definition changed; resume is not safe");
|
|
451
|
+
}
|
|
452
|
+
const ctx = this.contextManager.restore(state);
|
|
453
|
+
const stateMachine = new StateMachine();
|
|
454
|
+
stateMachine.forceStatus("pending");
|
|
455
|
+
const startIndex = this.resolveStartIndex(definition, _optionalChain([options, 'optionalAccess', _15 => _15.fromStep]), ctx.currentStep);
|
|
456
|
+
this.resetHistoryFrom(ctx, startIndex);
|
|
457
|
+
Object.assign(ctx, { status: "pending", currentStep: startIndex });
|
|
458
|
+
await this.storage.save(this.contextManager.toState(ctx));
|
|
459
|
+
return this.runFrom(definition, ctx, stateMachine, Date.now(), startIndex, {
|
|
460
|
+
resume: true,
|
|
461
|
+
fromStep: startIndex
|
|
462
|
+
});
|
|
463
|
+
}
|
|
464
|
+
/**
|
|
465
|
+
* Retry a specific step (replays from that step onward)
|
|
466
|
+
*/
|
|
467
|
+
async retryStep(workflow, workflowId, stepName) {
|
|
468
|
+
const definition = this.resolveDefinition(workflow);
|
|
469
|
+
const state = await this.storage.load(workflowId);
|
|
470
|
+
if (!state) {
|
|
471
|
+
return null;
|
|
472
|
+
}
|
|
473
|
+
if (state.name !== definition.name) {
|
|
474
|
+
throw new Error(`Workflow name mismatch: ${state.name} !== ${definition.name}`);
|
|
475
|
+
}
|
|
476
|
+
if (state.history.length !== definition.steps.length) {
|
|
477
|
+
throw new Error("Workflow definition changed; retry is not safe");
|
|
478
|
+
}
|
|
479
|
+
const ctx = this.contextManager.restore(state);
|
|
480
|
+
const stateMachine = new StateMachine();
|
|
481
|
+
stateMachine.forceStatus("pending");
|
|
482
|
+
const startIndex = this.resolveStartIndex(definition, stepName, ctx.currentStep);
|
|
483
|
+
this.resetHistoryFrom(ctx, startIndex);
|
|
484
|
+
Object.assign(ctx, { status: "pending", currentStep: startIndex });
|
|
485
|
+
await this.storage.save(this.contextManager.toState(ctx));
|
|
486
|
+
return this.runFrom(definition, ctx, stateMachine, Date.now(), startIndex, {
|
|
487
|
+
retry: true,
|
|
488
|
+
fromStep: startIndex
|
|
489
|
+
});
|
|
490
|
+
}
|
|
491
|
+
/**
|
|
492
|
+
* Get workflow state by ID
|
|
493
|
+
*/
|
|
494
|
+
async get(workflowId) {
|
|
495
|
+
return this.storage.load(workflowId);
|
|
496
|
+
}
|
|
497
|
+
/**
|
|
498
|
+
* Save workflow state manually (e.g., for external updates)
|
|
499
|
+
*/
|
|
500
|
+
async saveState(state) {
|
|
501
|
+
return this.storage.save(state);
|
|
502
|
+
}
|
|
503
|
+
/**
|
|
504
|
+
* List workflows
|
|
505
|
+
*/
|
|
506
|
+
async list(filter) {
|
|
507
|
+
return this.storage.list(filter);
|
|
508
|
+
}
|
|
509
|
+
/**
|
|
510
|
+
* Initialize engine (init storage)
|
|
511
|
+
*/
|
|
512
|
+
async init() {
|
|
513
|
+
await _optionalChain([this, 'access', _16 => _16.storage, 'access', _17 => _17.init, 'optionalCall', _18 => _18()]);
|
|
514
|
+
}
|
|
515
|
+
/**
|
|
516
|
+
* Shutdown engine (cleanup)
|
|
517
|
+
*/
|
|
518
|
+
async close() {
|
|
519
|
+
await _optionalChain([this, 'access', _19 => _19.storage, 'access', _20 => _20.close, 'optionalCall', _21 => _21()]);
|
|
520
|
+
}
|
|
521
|
+
resolveDefinition(workflow) {
|
|
522
|
+
return workflow instanceof WorkflowBuilder ? workflow.build() : workflow;
|
|
523
|
+
}
|
|
524
|
+
resolveStartIndex(definition, fromStep, fallback) {
|
|
525
|
+
if (typeof fromStep === "number") {
|
|
526
|
+
if (fromStep < 0 || fromStep >= definition.steps.length) {
|
|
527
|
+
throw new Error(`Invalid step index: ${fromStep}`);
|
|
528
|
+
}
|
|
529
|
+
return fromStep;
|
|
530
|
+
}
|
|
531
|
+
if (typeof fromStep === "string") {
|
|
532
|
+
const index = definition.steps.findIndex((step) => step.name === fromStep);
|
|
533
|
+
if (index === -1) {
|
|
534
|
+
throw new Error(`Step not found: ${fromStep}`);
|
|
535
|
+
}
|
|
536
|
+
return index;
|
|
537
|
+
}
|
|
538
|
+
return Math.max(0, Math.min(fallback, definition.steps.length - 1));
|
|
539
|
+
}
|
|
540
|
+
resetHistoryFrom(ctx, startIndex) {
|
|
541
|
+
for (let i = startIndex; i < ctx.history.length; i++) {
|
|
542
|
+
const entry = ctx.history[i];
|
|
543
|
+
if (!entry) {
|
|
544
|
+
continue;
|
|
545
|
+
}
|
|
546
|
+
entry.status = "pending";
|
|
547
|
+
entry.startedAt = void 0;
|
|
548
|
+
entry.completedAt = void 0;
|
|
549
|
+
entry.duration = void 0;
|
|
550
|
+
entry.error = void 0;
|
|
551
|
+
entry.retries = 0;
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
async runFrom(definition, ctx, stateMachine, startTime, startIndex, meta) {
|
|
555
|
+
try {
|
|
556
|
+
stateMachine.transition("running");
|
|
557
|
+
Object.assign(ctx, { status: "running" });
|
|
558
|
+
await this.emitTrace({
|
|
559
|
+
type: "workflow:start",
|
|
560
|
+
timestamp: Date.now(),
|
|
561
|
+
workflowId: ctx.id,
|
|
562
|
+
workflowName: ctx.name,
|
|
563
|
+
status: ctx.status,
|
|
564
|
+
input: ctx.input,
|
|
565
|
+
meta
|
|
566
|
+
});
|
|
567
|
+
for (let i = startIndex; i < definition.steps.length; i++) {
|
|
568
|
+
const step = definition.steps[i];
|
|
569
|
+
const execution = ctx.history[i];
|
|
570
|
+
this.contextManager.setStepName(ctx, i, step.name);
|
|
571
|
+
Object.assign(ctx, { currentStep: i });
|
|
572
|
+
_optionalChain([this, 'access', _22 => _22.config, 'access', _23 => _23.on, 'optionalAccess', _24 => _24.stepStart, 'optionalCall', _25 => _25(step.name, ctx)]);
|
|
573
|
+
await this.emitTrace({
|
|
574
|
+
type: "step:start",
|
|
575
|
+
timestamp: Date.now(),
|
|
576
|
+
workflowId: ctx.id,
|
|
577
|
+
workflowName: ctx.name,
|
|
578
|
+
stepName: step.name,
|
|
579
|
+
stepIndex: i,
|
|
580
|
+
commit: Boolean(step.commit),
|
|
581
|
+
retries: execution.retries,
|
|
582
|
+
status: execution.status,
|
|
583
|
+
meta
|
|
584
|
+
});
|
|
585
|
+
const result = await this.executor.execute(step, ctx, execution);
|
|
586
|
+
if (result.success) {
|
|
587
|
+
_optionalChain([this, 'access', _26 => _26.config, 'access', _27 => _27.on, 'optionalAccess', _28 => _28.stepComplete, 'optionalCall', _29 => _29(step.name, ctx, result)]);
|
|
588
|
+
if (execution.status === "skipped") {
|
|
589
|
+
await this.emitTrace({
|
|
590
|
+
type: "step:skipped",
|
|
591
|
+
timestamp: Date.now(),
|
|
592
|
+
workflowId: ctx.id,
|
|
593
|
+
workflowName: ctx.name,
|
|
594
|
+
stepName: step.name,
|
|
595
|
+
stepIndex: i,
|
|
596
|
+
commit: Boolean(step.commit),
|
|
597
|
+
retries: execution.retries,
|
|
598
|
+
duration: result.duration,
|
|
599
|
+
status: execution.status,
|
|
600
|
+
meta
|
|
601
|
+
});
|
|
602
|
+
} else {
|
|
603
|
+
await this.emitTrace({
|
|
604
|
+
type: "step:complete",
|
|
605
|
+
timestamp: Date.now(),
|
|
606
|
+
workflowId: ctx.id,
|
|
607
|
+
workflowName: ctx.name,
|
|
608
|
+
stepName: step.name,
|
|
609
|
+
stepIndex: i,
|
|
610
|
+
commit: Boolean(step.commit),
|
|
611
|
+
retries: execution.retries,
|
|
612
|
+
duration: result.duration,
|
|
613
|
+
status: execution.status,
|
|
614
|
+
meta
|
|
615
|
+
});
|
|
616
|
+
}
|
|
617
|
+
} else {
|
|
618
|
+
_optionalChain([this, 'access', _30 => _30.config, 'access', _31 => _31.on, 'optionalAccess', _32 => _32.stepError, 'optionalCall', _33 => _33(step.name, ctx, result.error)]);
|
|
619
|
+
await this.emitTrace({
|
|
620
|
+
type: "step:error",
|
|
621
|
+
timestamp: Date.now(),
|
|
622
|
+
workflowId: ctx.id,
|
|
623
|
+
workflowName: ctx.name,
|
|
624
|
+
stepName: step.name,
|
|
625
|
+
stepIndex: i,
|
|
626
|
+
commit: Boolean(step.commit),
|
|
627
|
+
retries: execution.retries,
|
|
628
|
+
duration: result.duration,
|
|
629
|
+
error: _optionalChain([result, 'access', _34 => _34.error, 'optionalAccess', _35 => _35.message]),
|
|
630
|
+
status: execution.status,
|
|
631
|
+
meta
|
|
632
|
+
});
|
|
633
|
+
stateMachine.transition("failed");
|
|
634
|
+
Object.assign(ctx, { status: "failed" });
|
|
635
|
+
await this.storage.save({
|
|
636
|
+
...this.contextManager.toState(ctx),
|
|
637
|
+
error: _optionalChain([result, 'access', _36 => _36.error, 'optionalAccess', _37 => _37.message])
|
|
638
|
+
});
|
|
639
|
+
return {
|
|
640
|
+
id: ctx.id,
|
|
641
|
+
status: "failed",
|
|
642
|
+
data: ctx.data,
|
|
643
|
+
history: ctx.history,
|
|
644
|
+
duration: Date.now() - startTime,
|
|
645
|
+
error: result.error
|
|
646
|
+
};
|
|
647
|
+
}
|
|
648
|
+
await this.storage.save(this.contextManager.toState(ctx));
|
|
649
|
+
}
|
|
650
|
+
stateMachine.transition("completed");
|
|
651
|
+
Object.assign(ctx, { status: "completed" });
|
|
652
|
+
await this.storage.save({
|
|
653
|
+
...this.contextManager.toState(ctx),
|
|
654
|
+
completedAt: /* @__PURE__ */ new Date()
|
|
655
|
+
});
|
|
656
|
+
_optionalChain([this, 'access', _38 => _38.config, 'access', _39 => _39.on, 'optionalAccess', _40 => _40.workflowComplete, 'optionalCall', _41 => _41(ctx)]);
|
|
657
|
+
await this.emitTrace({
|
|
658
|
+
type: "workflow:complete",
|
|
659
|
+
timestamp: Date.now(),
|
|
660
|
+
workflowId: ctx.id,
|
|
661
|
+
workflowName: ctx.name,
|
|
662
|
+
status: ctx.status,
|
|
663
|
+
duration: Date.now() - startTime,
|
|
664
|
+
data: ctx.data,
|
|
665
|
+
meta
|
|
666
|
+
});
|
|
667
|
+
return {
|
|
668
|
+
id: ctx.id,
|
|
669
|
+
status: "completed",
|
|
670
|
+
data: ctx.data,
|
|
671
|
+
history: ctx.history,
|
|
672
|
+
duration: Date.now() - startTime
|
|
673
|
+
};
|
|
674
|
+
} catch (error) {
|
|
675
|
+
const err = error instanceof Error ? error : new Error(String(error));
|
|
676
|
+
_optionalChain([this, 'access', _42 => _42.config, 'access', _43 => _43.on, 'optionalAccess', _44 => _44.workflowError, 'optionalCall', _45 => _45(ctx, err)]);
|
|
677
|
+
await this.emitTrace({
|
|
678
|
+
type: "workflow:error",
|
|
679
|
+
timestamp: Date.now(),
|
|
680
|
+
workflowId: ctx.id,
|
|
681
|
+
workflowName: ctx.name,
|
|
682
|
+
status: "failed",
|
|
683
|
+
duration: Date.now() - startTime,
|
|
684
|
+
error: err.message,
|
|
685
|
+
meta
|
|
686
|
+
});
|
|
687
|
+
stateMachine.forceStatus("failed");
|
|
688
|
+
Object.assign(ctx, { status: "failed" });
|
|
689
|
+
await this.storage.save({
|
|
690
|
+
...this.contextManager.toState(ctx),
|
|
691
|
+
error: err.message
|
|
692
|
+
});
|
|
693
|
+
return {
|
|
694
|
+
id: ctx.id,
|
|
695
|
+
status: "failed",
|
|
696
|
+
data: ctx.data,
|
|
697
|
+
history: ctx.history,
|
|
698
|
+
duration: Date.now() - startTime,
|
|
699
|
+
error: err
|
|
700
|
+
};
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
async emitTrace(event) {
|
|
704
|
+
try {
|
|
705
|
+
await _optionalChain([this, 'access', _46 => _46.config, 'access', _47 => _47.trace, 'optionalAccess', _48 => _48.emit, 'call', _49 => _49(event)]);
|
|
706
|
+
} catch (e) {
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
};
|
|
710
|
+
|
|
711
|
+
// src/trace/JsonFileTraceSink.ts
|
|
712
|
+
var _promises = require('fs/promises');
|
|
713
|
+
var _path = require('path');
|
|
714
|
+
var JsonFileTraceSink = class {
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
constructor(options) {
|
|
718
|
+
this.path = options.path;
|
|
719
|
+
this.ready = this.init(_nullishCoalesce(options.reset, () => ( true)));
|
|
720
|
+
}
|
|
721
|
+
async init(reset) {
|
|
722
|
+
await _promises.mkdir.call(void 0, _path.dirname.call(void 0, this.path), { recursive: true });
|
|
723
|
+
if (reset) {
|
|
724
|
+
await _promises.writeFile.call(void 0, this.path, "", "utf8");
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
async emit(event) {
|
|
728
|
+
await this.ready;
|
|
729
|
+
await _promises.appendFile.call(void 0, this.path, `${JSON.stringify(event)}
|
|
730
|
+
`, "utf8");
|
|
731
|
+
}
|
|
732
|
+
};
|
|
733
|
+
|
|
734
|
+
// src/logger/FluxLogger.ts
|
|
735
|
+
var FluxConsoleLogger = class {
|
|
736
|
+
|
|
737
|
+
constructor(prefix = "[Flux]") {
|
|
738
|
+
this.prefix = prefix;
|
|
739
|
+
}
|
|
740
|
+
debug(message, ...args) {
|
|
741
|
+
console.debug(`${this.prefix} ${message}`, ...args);
|
|
742
|
+
}
|
|
743
|
+
info(message, ...args) {
|
|
744
|
+
console.info(`${this.prefix} ${message}`, ...args);
|
|
745
|
+
}
|
|
746
|
+
warn(message, ...args) {
|
|
747
|
+
console.warn(`${this.prefix} ${message}`, ...args);
|
|
748
|
+
}
|
|
749
|
+
error(message, ...args) {
|
|
750
|
+
console.error(`${this.prefix} ${message}`, ...args);
|
|
751
|
+
}
|
|
752
|
+
};
|
|
753
|
+
var FluxSilentLogger = class {
|
|
754
|
+
debug() {
|
|
755
|
+
}
|
|
756
|
+
info() {
|
|
757
|
+
}
|
|
758
|
+
warn() {
|
|
759
|
+
}
|
|
760
|
+
error() {
|
|
761
|
+
}
|
|
762
|
+
};
|
|
763
|
+
|
|
764
|
+
// src/orbit/OrbitFlux.ts
|
|
765
|
+
var OrbitFlux = class _OrbitFlux {
|
|
766
|
+
|
|
767
|
+
|
|
768
|
+
constructor(options = {}) {
|
|
769
|
+
this.options = {
|
|
770
|
+
storage: "memory",
|
|
771
|
+
exposeAs: "flux",
|
|
772
|
+
defaultRetries: 3,
|
|
773
|
+
defaultTimeout: 3e4,
|
|
774
|
+
...options
|
|
775
|
+
};
|
|
776
|
+
}
|
|
777
|
+
/**
|
|
778
|
+
* Create OrbitFlux with configuration
|
|
779
|
+
*/
|
|
780
|
+
static configure(options = {}) {
|
|
781
|
+
return new _OrbitFlux(options);
|
|
782
|
+
}
|
|
783
|
+
/**
|
|
784
|
+
* Install into PlanetCore
|
|
785
|
+
*
|
|
786
|
+
* @param core - The PlanetCore instance
|
|
787
|
+
*/
|
|
788
|
+
async install(core) {
|
|
789
|
+
const { storage, dbPath, exposeAs, defaultRetries, defaultTimeout, logger } = this.options;
|
|
790
|
+
let storageAdapter;
|
|
791
|
+
if (typeof storage === "string") {
|
|
792
|
+
switch (storage) {
|
|
793
|
+
case "sqlite":
|
|
794
|
+
storageAdapter = new (0, _chunkSJSPR4ZUcjs.BunSQLiteStorage)({ path: dbPath });
|
|
795
|
+
break;
|
|
796
|
+
default:
|
|
797
|
+
storageAdapter = new MemoryStorage();
|
|
798
|
+
}
|
|
799
|
+
} else {
|
|
800
|
+
storageAdapter = storage;
|
|
801
|
+
}
|
|
802
|
+
await _optionalChain([storageAdapter, 'access', _50 => _50.init, 'optionalCall', _51 => _51()]);
|
|
803
|
+
const engineConfig = {
|
|
804
|
+
storage: storageAdapter,
|
|
805
|
+
defaultRetries,
|
|
806
|
+
defaultTimeout,
|
|
807
|
+
logger: _nullishCoalesce(logger, () => ( {
|
|
808
|
+
debug: (msg) => core.logger.debug(`[Flux] ${msg}`),
|
|
809
|
+
info: (msg) => core.logger.info(`[Flux] ${msg}`),
|
|
810
|
+
warn: (msg) => core.logger.warn(`[Flux] ${msg}`),
|
|
811
|
+
error: (msg) => core.logger.error(`[Flux] ${msg}`)
|
|
812
|
+
})),
|
|
813
|
+
on: {
|
|
814
|
+
stepStart: (step) => {
|
|
815
|
+
core.hooks.doAction("flux:step:start", { step });
|
|
816
|
+
},
|
|
817
|
+
stepComplete: (step, ctx, result) => {
|
|
818
|
+
core.hooks.doAction("flux:step:complete", { step, ctx, result });
|
|
819
|
+
},
|
|
820
|
+
stepError: (step, ctx, error) => {
|
|
821
|
+
core.hooks.doAction("flux:step:error", { step, ctx, error });
|
|
822
|
+
},
|
|
823
|
+
workflowComplete: (ctx) => {
|
|
824
|
+
core.hooks.doAction("flux:workflow:complete", { ctx });
|
|
825
|
+
},
|
|
826
|
+
workflowError: (ctx, error) => {
|
|
827
|
+
core.hooks.doAction("flux:workflow:error", { ctx, error });
|
|
828
|
+
}
|
|
829
|
+
}
|
|
830
|
+
};
|
|
831
|
+
this.engine = new FluxEngine(engineConfig);
|
|
832
|
+
core.services.set(exposeAs, this.engine);
|
|
833
|
+
core.logger.info(
|
|
834
|
+
`[OrbitFlux] Initialized (Storage: ${typeof storage === "string" ? storage : "custom"})`
|
|
835
|
+
);
|
|
836
|
+
}
|
|
837
|
+
/**
|
|
838
|
+
* Get the FluxEngine instance
|
|
839
|
+
*/
|
|
840
|
+
getEngine() {
|
|
841
|
+
return this.engine;
|
|
842
|
+
}
|
|
843
|
+
};
|
|
844
|
+
|
|
845
|
+
|
|
846
|
+
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
|
|
856
|
+
|
|
857
|
+
exports.WorkflowBuilder = WorkflowBuilder; exports.createWorkflow = createWorkflow; exports.ContextManager = ContextManager; exports.StateMachine = StateMachine; exports.StepExecutor = StepExecutor; exports.MemoryStorage = MemoryStorage; exports.FluxEngine = FluxEngine; exports.JsonFileTraceSink = JsonFileTraceSink; exports.FluxConsoleLogger = FluxConsoleLogger; exports.FluxSilentLogger = FluxSilentLogger; exports.OrbitFlux = OrbitFlux;
|
|
858
|
+
//# sourceMappingURL=chunk-RPECIW7O.cjs.map
|