@unlaxer/tramli 1.5.2 → 1.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. package/dist/cjs/data-flow-graph.d.ts +92 -0
  2. package/dist/cjs/data-flow-graph.js +451 -0
  3. package/dist/cjs/flow-context.d.ts +19 -0
  4. package/dist/cjs/flow-context.js +44 -0
  5. package/dist/cjs/flow-definition.d.ts +85 -0
  6. package/dist/cjs/flow-definition.js +488 -0
  7. package/dist/cjs/flow-engine.d.ts +13 -0
  8. package/dist/cjs/flow-engine.js +242 -0
  9. package/dist/cjs/flow-error.d.ts +14 -0
  10. package/dist/cjs/flow-error.js +34 -0
  11. package/dist/cjs/flow-instance.d.ts +44 -0
  12. package/dist/cjs/flow-instance.js +104 -0
  13. package/dist/cjs/flow-key.d.ts +10 -0
  14. package/dist/cjs/flow-key.js +6 -0
  15. package/dist/cjs/in-memory-flow-store.d.ts +19 -0
  16. package/dist/cjs/in-memory-flow-store.js +27 -0
  17. package/dist/cjs/index.d.ts +16 -0
  18. package/dist/cjs/index.js +29 -0
  19. package/dist/cjs/mermaid-generator.d.ts +9 -0
  20. package/dist/cjs/mermaid-generator.js +80 -0
  21. package/dist/cjs/package.json +1 -0
  22. package/dist/cjs/skeleton-generator.d.ts +10 -0
  23. package/dist/cjs/skeleton-generator.js +48 -0
  24. package/dist/cjs/tramli.d.ts +8 -0
  25. package/dist/cjs/tramli.js +14 -0
  26. package/dist/cjs/types.d.ts +75 -0
  27. package/dist/cjs/types.js +2 -0
  28. package/dist/esm/data-flow-graph.d.ts +92 -0
  29. package/dist/esm/data-flow-graph.js +447 -0
  30. package/dist/esm/flow-context.d.ts +19 -0
  31. package/dist/esm/flow-context.js +40 -0
  32. package/dist/esm/flow-definition.d.ts +85 -0
  33. package/dist/esm/flow-definition.js +480 -0
  34. package/dist/esm/flow-engine.d.ts +13 -0
  35. package/dist/esm/flow-engine.js +238 -0
  36. package/dist/esm/flow-error.d.ts +14 -0
  37. package/dist/esm/flow-error.js +30 -0
  38. package/dist/esm/flow-instance.d.ts +44 -0
  39. package/dist/esm/flow-instance.js +100 -0
  40. package/dist/esm/flow-key.d.ts +10 -0
  41. package/dist/esm/flow-key.js +3 -0
  42. package/dist/esm/in-memory-flow-store.d.ts +19 -0
  43. package/dist/esm/in-memory-flow-store.js +23 -0
  44. package/dist/esm/index.d.ts +16 -0
  45. package/dist/esm/index.js +11 -0
  46. package/dist/esm/mermaid-generator.d.ts +9 -0
  47. package/dist/esm/mermaid-generator.js +76 -0
  48. package/dist/esm/skeleton-generator.d.ts +10 -0
  49. package/dist/esm/skeleton-generator.js +44 -0
  50. package/dist/esm/tramli.d.ts +8 -0
  51. package/dist/esm/tramli.js +10 -0
  52. package/dist/esm/types.d.ts +75 -0
  53. package/dist/esm/types.js +1 -0
  54. package/package.json +8 -4
@@ -0,0 +1,447 @@
1
+ /**
2
+ * Bipartite graph of data types (FlowKey) and processors/guards.
3
+ * Built automatically during FlowDefinition.build().
4
+ */
5
+ export class DataFlowGraph {
6
+ _availableAtState;
7
+ _producers;
8
+ _consumers;
9
+ _allProduced;
10
+ _allConsumed;
11
+ constructor(availableAtState, producers, consumers, allProduced, allConsumed) {
12
+ this._availableAtState = availableAtState;
13
+ this._producers = producers;
14
+ this._consumers = consumers;
15
+ this._allProduced = allProduced;
16
+ this._allConsumed = allConsumed;
17
+ }
18
+ /** Data types available in context when the flow reaches the given state. */
19
+ availableAt(state) {
20
+ return this._availableAtState.get(state) ?? new Set();
21
+ }
22
+ /** Processors/guards that produce the given type. */
23
+ producersOf(key) {
24
+ return this._producers.get(key) ?? [];
25
+ }
26
+ /** Processors/guards that consume (require) the given type. */
27
+ consumersOf(key) {
28
+ return this._consumers.get(key) ?? [];
29
+ }
30
+ /** Types produced but never required by any downstream processor/guard. */
31
+ deadData() {
32
+ const dead = new Set(this._allProduced);
33
+ for (const c of this._allConsumed)
34
+ dead.delete(c);
35
+ return dead;
36
+ }
37
+ /** Data lifetime: which states a type is first produced and last consumed. */
38
+ lifetime(key) {
39
+ const prods = this._producers.get(key);
40
+ const cons = this._consumers.get(key);
41
+ if (!prods || prods.length === 0)
42
+ return null;
43
+ const firstProduced = prods[0].toState;
44
+ const lastConsumed = cons && cons.length > 0 ? cons[cons.length - 1].fromState : firstProduced;
45
+ return { firstProduced, lastConsumed };
46
+ }
47
+ /** Context pruning hints: for each state, types available but not required at that state. */
48
+ pruningHints() {
49
+ const consumedAt = new Map();
50
+ for (const [typeName, nodes] of this._consumers) {
51
+ for (const node of nodes) {
52
+ if (!consumedAt.has(node.fromState))
53
+ consumedAt.set(node.fromState, new Set());
54
+ consumedAt.get(node.fromState).add(typeName);
55
+ }
56
+ }
57
+ const hints = new Map();
58
+ for (const [state, available] of this._availableAtState) {
59
+ const needed = consumedAt.get(state) ?? new Set();
60
+ const prunable = new Set();
61
+ for (const type of available) {
62
+ if (!needed.has(type))
63
+ prunable.add(type);
64
+ }
65
+ if (prunable.size > 0)
66
+ hints.set(state, prunable);
67
+ }
68
+ return hints;
69
+ }
70
+ /**
71
+ * Check if processor B can replace processor A without breaking data-flow.
72
+ * B is compatible with A if: B requires no more than A, and B produces at least what A produces.
73
+ */
74
+ static isCompatible(a, b) {
75
+ const aReqs = new Set(a.requires);
76
+ const bReqs = new Set(b.requires);
77
+ const aProds = new Set(a.produces);
78
+ const bProds = new Set(b.produces);
79
+ for (const r of bReqs) {
80
+ if (!aReqs.has(r))
81
+ return false;
82
+ }
83
+ for (const p of aProds) {
84
+ if (!bProds.has(p))
85
+ return false;
86
+ }
87
+ return true;
88
+ }
89
+ /**
90
+ * Verify a processor's declared requires/produces against actual context usage.
91
+ * Returns list of violations (empty = OK).
92
+ */
93
+ static async verifyProcessor(processor, ctx) {
94
+ const violations = [];
95
+ for (const req of processor.requires) {
96
+ if (!ctx.has(req))
97
+ violations.push(`requires ${req} but not in context`);
98
+ }
99
+ const beforeKeys = new Set();
100
+ for (const req of processor.requires) {
101
+ if (ctx.has(req))
102
+ beforeKeys.add(req);
103
+ }
104
+ // Capture all existing keys
105
+ const snapshot = ctx.snapshot();
106
+ const existingKeys = new Set(snapshot.keys());
107
+ try {
108
+ await processor.process(ctx);
109
+ }
110
+ catch (e) {
111
+ violations.push(`threw ${e.constructor.name}: ${e.message}`);
112
+ return violations;
113
+ }
114
+ const afterSnapshot = ctx.snapshot();
115
+ for (const prod of processor.produces) {
116
+ if (!afterSnapshot.has(prod))
117
+ violations.push(`declares produces ${prod} but did not put it`);
118
+ }
119
+ for (const [key] of afterSnapshot) {
120
+ if (!existingKeys.has(key) && !processor.produces.includes(key)) {
121
+ violations.push(`put ${key} but did not declare it in produces`);
122
+ }
123
+ }
124
+ return violations;
125
+ }
126
+ /** All type nodes in the graph. */
127
+ allTypes() {
128
+ const types = new Set(this._allProduced);
129
+ for (const c of this._allConsumed)
130
+ types.add(c);
131
+ return types;
132
+ }
133
+ /**
134
+ * Assert that a flow instance's context satisfies the data-flow invariant.
135
+ * Returns list of missing type keys (empty = OK).
136
+ */
137
+ assertDataFlow(ctx, currentState) {
138
+ const missing = [];
139
+ for (const type of this.availableAt(currentState)) {
140
+ if (!ctx.has(type))
141
+ missing.push(type);
142
+ }
143
+ return missing;
144
+ }
145
+ /** Impact analysis: all producers and consumers of a given type. */
146
+ impactOf(key) {
147
+ return { producers: this.producersOf(key), consumers: this.consumersOf(key) };
148
+ }
149
+ /** Parallelism hints: pairs of processors with no data dependency. */
150
+ parallelismHints() {
151
+ const allNodes = new Set();
152
+ for (const nodes of this._producers.values())
153
+ for (const n of nodes)
154
+ allNodes.add(n.name);
155
+ for (const nodes of this._consumers.values())
156
+ for (const n of nodes)
157
+ allNodes.add(n.name);
158
+ const list = [...allNodes];
159
+ const hints = [];
160
+ for (let i = 0; i < list.length; i++) {
161
+ for (let j = i + 1; j < list.length; j++) {
162
+ const aProds = new Set(), bReqs = new Set();
163
+ const bProds = new Set(), aReqs = new Set();
164
+ for (const [t, ns] of this._producers) {
165
+ for (const n of ns) {
166
+ if (n.name === list[i])
167
+ aProds.add(t);
168
+ if (n.name === list[j])
169
+ bProds.add(t);
170
+ }
171
+ }
172
+ for (const [t, ns] of this._consumers) {
173
+ for (const n of ns) {
174
+ if (n.name === list[i])
175
+ aReqs.add(t);
176
+ if (n.name === list[j])
177
+ bReqs.add(t);
178
+ }
179
+ }
180
+ const aDepB = [...aReqs].some(r => bProds.has(r));
181
+ const bDepA = [...bReqs].some(r => aProds.has(r));
182
+ if (!aDepB && !bDepA)
183
+ hints.push([list[i], list[j]]);
184
+ }
185
+ }
186
+ return hints;
187
+ }
188
+ /** Structured JSON representation. */
189
+ toJson() {
190
+ const types = [...this.allTypes()].map(t => {
191
+ const entry = { name: t };
192
+ const prods = this.producersOf(t);
193
+ if (prods.length)
194
+ entry.producers = prods.map(p => p.name);
195
+ const cons = this.consumersOf(t);
196
+ if (cons.length)
197
+ entry.consumers = cons.map(c => c.name);
198
+ return entry;
199
+ });
200
+ return JSON.stringify({ types, deadData: [...this.deadData()] }, null, 2);
201
+ }
202
+ /** Generate Mermaid data-flow diagram. */
203
+ toMermaid() {
204
+ const lines = ['flowchart LR'];
205
+ const seen = new Set();
206
+ for (const [typeName, nodes] of this._producers) {
207
+ for (const node of nodes) {
208
+ const edge = `${node.name} -->|produces| ${typeName}`;
209
+ if (!seen.has(edge)) {
210
+ seen.add(edge);
211
+ lines.push(` ${edge}`);
212
+ }
213
+ }
214
+ }
215
+ for (const [typeName, nodes] of this._consumers) {
216
+ for (const node of nodes) {
217
+ const edge = `${typeName} -->|requires| ${node.name}`;
218
+ if (!seen.has(edge)) {
219
+ seen.add(edge);
220
+ lines.push(` ${edge}`);
221
+ }
222
+ }
223
+ }
224
+ return lines.join('\n') + '\n';
225
+ }
226
+ /** Recommended migration order: processors sorted by dependency (fewest first). */
227
+ migrationOrder() {
228
+ const nodeReqs = new Map();
229
+ const nodeProds = new Map();
230
+ for (const [t, ns] of this._consumers)
231
+ for (const n of ns) {
232
+ if (!nodeReqs.has(n.name))
233
+ nodeReqs.set(n.name, new Set());
234
+ nodeReqs.get(n.name).add(t);
235
+ }
236
+ for (const [t, ns] of this._producers)
237
+ for (const n of ns) {
238
+ if (!nodeProds.has(n.name))
239
+ nodeProds.set(n.name, new Set());
240
+ nodeProds.get(n.name).add(t);
241
+ }
242
+ const order = [];
243
+ const available = new Set();
244
+ for (const [t, ns] of this._producers) {
245
+ if (ns.some(n => n.name === 'initial'))
246
+ available.add(t);
247
+ }
248
+ const remaining = new Set([...nodeReqs.keys(), ...nodeProds.keys()]);
249
+ remaining.delete('initial');
250
+ while (remaining.size > 0) {
251
+ let next = null;
252
+ for (const name of remaining) {
253
+ const reqs = nodeReqs.get(name) ?? new Set();
254
+ if ([...reqs].every(r => available.has(r))) {
255
+ next = name;
256
+ break;
257
+ }
258
+ }
259
+ if (!next) {
260
+ order.push(...remaining);
261
+ break;
262
+ }
263
+ order.push(next);
264
+ remaining.delete(next);
265
+ for (const p of nodeProds.get(next) ?? [])
266
+ available.add(p);
267
+ }
268
+ return order;
269
+ }
270
+ /** Generate Markdown migration checklist. */
271
+ toMarkdown() {
272
+ const lines = ['# Migration Checklist\n'];
273
+ const order = this.migrationOrder();
274
+ for (let i = 0; i < order.length; i++) {
275
+ const name = order[i];
276
+ const reqs = [];
277
+ for (const [t, ns] of this._consumers)
278
+ if (ns.some(n => n.name === name))
279
+ reqs.push(t);
280
+ const prods = [];
281
+ for (const [t, ns] of this._producers)
282
+ if (ns.some(n => n.name === name))
283
+ prods.push(t);
284
+ let line = `- [ ] **${i + 1}. ${name}**`;
285
+ if (reqs.length)
286
+ line += ` requires: [${reqs.join(', ')}]`;
287
+ if (prods.length)
288
+ line += ` produces: [${prods.join(', ')}]`;
289
+ lines.push(line);
290
+ }
291
+ const dead = this.deadData();
292
+ if (dead.size > 0) {
293
+ lines.push('\n## Dead Data\n');
294
+ for (const d of dead)
295
+ lines.push(`- ${d}`);
296
+ }
297
+ return lines.join('\n') + '\n';
298
+ }
299
+ /** Test scaffold: for each processor, list required type names. */
300
+ testScaffold() {
301
+ const scaffold = new Map();
302
+ for (const [typeName, nodes] of this._consumers) {
303
+ for (const node of nodes) {
304
+ if (!scaffold.has(node.name))
305
+ scaffold.set(node.name, []);
306
+ scaffold.get(node.name).push(typeName);
307
+ }
308
+ }
309
+ return scaffold;
310
+ }
311
+ /** Generate data-flow invariant assertions as strings. */
312
+ generateInvariantAssertions() {
313
+ const assertions = [];
314
+ for (const [state, types] of this._availableAtState) {
315
+ assertions.push(`At state ${state}: context must contain [${[...types].sort().join(', ')}]`);
316
+ }
317
+ return assertions;
318
+ }
319
+ // ─── Cross-flow / Versioning utilities ─────────────────────
320
+ /** Cross-flow map: types that one flow produces and another requires. */
321
+ static crossFlowMap(...graphs) {
322
+ const results = [];
323
+ for (let i = 0; i < graphs.length; i++) {
324
+ for (let j = 0; j < graphs.length; j++) {
325
+ if (i === j)
326
+ continue;
327
+ for (const produced of graphs[i]._allProduced) {
328
+ if (graphs[j]._allConsumed.has(produced)) {
329
+ results.push(`${produced}: flow ${i} produces → flow ${j} consumes`);
330
+ }
331
+ }
332
+ }
333
+ }
334
+ return results;
335
+ }
336
+ /** Diff two data-flow graphs. */
337
+ static diff(before, after) {
338
+ const beforeTypes = before.allTypes(), afterTypes = after.allTypes();
339
+ const addedTypes = new Set([...afterTypes].filter(t => !beforeTypes.has(t)));
340
+ const removedTypes = new Set([...beforeTypes].filter(t => !afterTypes.has(t)));
341
+ const beforeEdges = DataFlowGraph.collectEdges(before), afterEdges = DataFlowGraph.collectEdges(after);
342
+ const addedEdges = new Set([...afterEdges].filter(e => !beforeEdges.has(e)));
343
+ const removedEdges = new Set([...beforeEdges].filter(e => !afterEdges.has(e)));
344
+ return { addedTypes, removedTypes, addedEdges, removedEdges };
345
+ }
346
+ static collectEdges(graph) {
347
+ const edges = new Set();
348
+ for (const [t, ns] of graph._producers)
349
+ for (const n of ns)
350
+ edges.add(`${n.name} --produces--> ${t}`);
351
+ for (const [t, ns] of graph._consumers)
352
+ for (const n of ns)
353
+ edges.add(`${t} --requires--> ${n.name}`);
354
+ return edges;
355
+ }
356
+ /** Version compatibility: check if v1 instances can resume on v2 definition. */
357
+ static versionCompatibility(before, after) {
358
+ const issues = [];
359
+ for (const [state, beforeAvail] of before._availableAtState) {
360
+ const afterAvail = after._availableAtState.get(state) ?? new Set();
361
+ for (const type of afterAvail) {
362
+ if (!beforeAvail.has(type)) {
363
+ issues.push(`State ${state}: v2 expects ${type} but v1 instances may not have it`);
364
+ }
365
+ }
366
+ }
367
+ return issues;
368
+ }
369
+ // ─── Builder ─────────────────────────────────────────────
370
+ static build(def, initiallyAvailable) {
371
+ const stateAvail = new Map();
372
+ const producers = new Map();
373
+ const consumers = new Map();
374
+ const allProduced = new Set(initiallyAvailable);
375
+ const allConsumed = new Set();
376
+ if (def.initialState) {
377
+ traverse(def, def.initialState, new Set(initiallyAvailable), stateAvail, producers, consumers, allProduced, allConsumed);
378
+ // Mark initially available types as produced by "initial"
379
+ for (const key of initiallyAvailable) {
380
+ if (!producers.has(key))
381
+ producers.set(key, []);
382
+ producers.get(key).push({
383
+ name: 'initial', fromState: def.initialState, toState: def.initialState, kind: 'initial',
384
+ });
385
+ }
386
+ }
387
+ return new DataFlowGraph(stateAvail, producers, consumers, allProduced, allConsumed);
388
+ }
389
+ }
390
+ function traverse(def, state, available, stateAvail, producers, consumers, allProduced, allConsumed) {
391
+ if (stateAvail.has(state)) {
392
+ const existing = stateAvail.get(state);
393
+ let isSubset = true;
394
+ for (const a of available) {
395
+ if (!existing.has(a)) {
396
+ isSubset = false;
397
+ break;
398
+ }
399
+ }
400
+ if (isSubset)
401
+ return;
402
+ for (const a of [...existing]) {
403
+ if (!available.has(a))
404
+ existing.delete(a);
405
+ }
406
+ }
407
+ else {
408
+ stateAvail.set(state, new Set(available));
409
+ }
410
+ for (const t of def.transitionsFrom(state)) {
411
+ const newAvail = new Set(stateAvail.get(state));
412
+ if (t.guard) {
413
+ for (const req of t.guard.requires) {
414
+ addTo(consumers, req, { name: t.guard.name, fromState: t.from, toState: t.to, kind: 'guard' });
415
+ allConsumed.add(req);
416
+ }
417
+ for (const prod of t.guard.produces) {
418
+ addTo(producers, prod, { name: t.guard.name, fromState: t.from, toState: t.to, kind: 'guard' });
419
+ allProduced.add(prod);
420
+ newAvail.add(prod);
421
+ }
422
+ }
423
+ if (t.branch) {
424
+ for (const req of t.branch.requires) {
425
+ addTo(consumers, req, { name: t.branch.name, fromState: t.from, toState: t.to, kind: 'branch' });
426
+ allConsumed.add(req);
427
+ }
428
+ }
429
+ if (t.processor) {
430
+ for (const req of t.processor.requires) {
431
+ addTo(consumers, req, { name: t.processor.name, fromState: t.from, toState: t.to, kind: 'processor' });
432
+ allConsumed.add(req);
433
+ }
434
+ for (const prod of t.processor.produces) {
435
+ addTo(producers, prod, { name: t.processor.name, fromState: t.from, toState: t.to, kind: 'processor' });
436
+ allProduced.add(prod);
437
+ newAvail.add(prod);
438
+ }
439
+ }
440
+ traverse(def, t.to, newAvail, stateAvail, producers, consumers, allProduced, allConsumed);
441
+ }
442
+ }
443
+ function addTo(map, key, info) {
444
+ if (!map.has(key))
445
+ map.set(key, []);
446
+ map.get(key).push(info);
447
+ }
@@ -0,0 +1,19 @@
1
+ import type { FlowKey } from './flow-key.js';
2
+ /**
3
+ * Accumulator for flow data. Keyed by FlowKey — each key appears at most once.
4
+ *
5
+ * Use dedicated FlowKey instances as keys (e.g., flowKey<OrderRequest>('OrderRequest')),
6
+ * not raw strings. Putting the same key twice silently overwrites the previous value.
7
+ */
8
+ export declare class FlowContext {
9
+ readonly flowId: string;
10
+ readonly createdAt: Date;
11
+ private attributes;
12
+ constructor(flowId: string, createdAt?: Date, attributes?: Map<string, unknown>);
13
+ get<T>(key: FlowKey<T>): T;
14
+ find<T>(key: FlowKey<T>): T | undefined;
15
+ put<T>(key: FlowKey<T>, value: T): void;
16
+ has(key: FlowKey<unknown>): boolean;
17
+ snapshot(): Map<string, unknown>;
18
+ restoreFrom(snapshot: Map<string, unknown>): void;
19
+ }
@@ -0,0 +1,40 @@
1
+ import { FlowError } from './flow-error.js';
2
+ /**
3
+ * Accumulator for flow data. Keyed by FlowKey — each key appears at most once.
4
+ *
5
+ * Use dedicated FlowKey instances as keys (e.g., flowKey<OrderRequest>('OrderRequest')),
6
+ * not raw strings. Putting the same key twice silently overwrites the previous value.
7
+ */
8
+ export class FlowContext {
9
+ flowId;
10
+ createdAt;
11
+ attributes;
12
+ constructor(flowId, createdAt, attributes) {
13
+ this.flowId = flowId;
14
+ this.createdAt = createdAt ?? new Date();
15
+ this.attributes = new Map(attributes ?? []);
16
+ }
17
+ get(key) {
18
+ const value = this.attributes.get(key);
19
+ if (value === undefined)
20
+ throw FlowError.missingContext(key);
21
+ return value;
22
+ }
23
+ find(key) {
24
+ return this.attributes.get(key);
25
+ }
26
+ put(key, value) {
27
+ this.attributes.set(key, value);
28
+ }
29
+ has(key) {
30
+ return this.attributes.has(key);
31
+ }
32
+ snapshot() {
33
+ return new Map(this.attributes);
34
+ }
35
+ restoreFrom(snapshot) {
36
+ this.attributes.clear();
37
+ for (const [k, v] of snapshot)
38
+ this.attributes.set(k, v);
39
+ }
40
+ }
@@ -0,0 +1,85 @@
1
+ import type { FlowKey } from './flow-key.js';
2
+ import type { StateConfig, Transition, StateProcessor, TransitionGuard, BranchProcessor } from './types.js';
3
+ import { DataFlowGraph } from './data-flow-graph.js';
4
+ export declare class FlowDefinition<S extends string> {
5
+ readonly name: string;
6
+ readonly stateConfig: Record<S, StateConfig>;
7
+ readonly ttl: number;
8
+ readonly maxGuardRetries: number;
9
+ readonly transitions: Transition<S>[];
10
+ readonly errorTransitions: Map<S, S>;
11
+ readonly initialState: S | null;
12
+ readonly terminalStates: Set<S>;
13
+ readonly dataFlowGraph: DataFlowGraph<S> | null;
14
+ private constructor();
15
+ transitionsFrom(state: S): Transition<S>[];
16
+ externalFrom(state: S): Transition<S> | undefined;
17
+ allStates(): S[];
18
+ /**
19
+ * Create a new FlowDefinition with a sub-flow inserted before a specific transition.
20
+ */
21
+ withPlugin(from: S, to: S, pluginFlow: FlowDefinition<any>): FlowDefinition<S>;
22
+ static builder<S extends string>(name: string, stateConfig: Record<S, StateConfig>): Builder<S>;
23
+ }
24
+ export declare class Builder<S extends string> {
25
+ private readonly name;
26
+ private readonly stateConfig;
27
+ private ttl;
28
+ private maxGuardRetries;
29
+ private readonly transitions;
30
+ private readonly errorTransitions;
31
+ private readonly initiallyAvailableKeys;
32
+ constructor(name: string, stateConfig: Record<S, StateConfig>);
33
+ initiallyAvailable(...keys: FlowKey<unknown>[]): this;
34
+ setTtl(ms: number): this;
35
+ setMaxGuardRetries(max: number): this;
36
+ from(state: S): FromBuilder<S>;
37
+ onError(from: S, to: S): this;
38
+ onAnyError(errorState: S): this;
39
+ /** @internal */
40
+ addTransition(t: Transition<S>): void;
41
+ build(): FlowDefinition<S>;
42
+ private validate;
43
+ private checkReachability;
44
+ private checkPathToTerminal;
45
+ private canReachTerminal;
46
+ private checkDag;
47
+ private hasCycle;
48
+ private checkExternalUniqueness;
49
+ private checkBranchCompleteness;
50
+ private checkRequiresProduces;
51
+ private checkRequiresProducesFrom;
52
+ private checkAutoExternalConflict;
53
+ private checkTerminalNoOutgoing;
54
+ private checkSubFlowNestingDepth;
55
+ private checkSubFlowCircularRef;
56
+ private checkSubFlowExitCompleteness;
57
+ }
58
+ export declare class FromBuilder<S extends string> {
59
+ private readonly builder;
60
+ private readonly fromState;
61
+ constructor(builder: Builder<S>, fromState: S);
62
+ auto(to: S, processor: StateProcessor<S>): Builder<S>;
63
+ external(to: S, guard: TransitionGuard<S>, processor?: StateProcessor<S>): Builder<S>;
64
+ branch(branch: BranchProcessor<S>): BranchBuilder<S>;
65
+ subFlow(subFlowDef: FlowDefinition<any>): SubFlowBuilder<S>;
66
+ }
67
+ export declare class SubFlowBuilder<S extends string> {
68
+ private readonly builder;
69
+ private readonly fromState;
70
+ private readonly subFlowDef;
71
+ private readonly exitMap;
72
+ constructor(builder: Builder<S>, fromState: S, subFlowDef: FlowDefinition<any>);
73
+ onExit(terminalName: string, parentState: S): this;
74
+ endSubFlow(): Builder<S>;
75
+ }
76
+ export declare class BranchBuilder<S extends string> {
77
+ private readonly builder;
78
+ private readonly fromState;
79
+ private readonly branch;
80
+ private readonly targets;
81
+ private readonly processors;
82
+ constructor(builder: Builder<S>, fromState: S, branch: BranchProcessor<S>);
83
+ to(state: S, label: string, processor?: StateProcessor<S>): this;
84
+ endBranch(): Builder<S>;
85
+ }