flowcraft 2.9.3 → 2.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +3 -3
- package/dist/adapter-DzeZVjSE.d.mts +133 -0
- package/dist/adapters/index.d.mts +2 -0
- package/dist/adapters/index.mjs +3 -0
- package/dist/adapters/persistent-event-bus.d.mts +2 -0
- package/dist/adapters/persistent-event-bus.mjs +59 -0
- package/dist/analysis-B5Twr7sD.d.mts +52 -0
- package/dist/analysis.d.mts +2 -0
- package/dist/analysis.mjs +164 -0
- package/dist/batch-gather-BhF-IzQR.d.mts +8 -0
- package/dist/batch-scatter-DD8TU0Wm.d.mts +8 -0
- package/dist/container-BKdd-9wf.d.mts +24 -0
- package/dist/container-factory-fDY2kkxt.d.mts +17 -0
- package/dist/container-factory.d.mts +2 -0
- package/dist/container-factory.mjs +23 -0
- package/dist/container.d.mts +2 -0
- package/dist/container.mjs +43 -0
- package/dist/context-ZVtzXuZu.d.mts +64 -0
- package/dist/context.d.mts +2 -0
- package/dist/context.mjs +145 -0
- package/dist/error-mapper-BAv_YQMQ.d.mts +14 -0
- package/dist/error-mapper.d.mts +2 -0
- package/dist/error-mapper.mjs +37 -0
- package/dist/errors-CyyIj3OO.d.mts +21 -0
- package/dist/errors.d.mts +2 -0
- package/dist/errors.mjs +24 -0
- package/dist/evaluator-Dnj5qJ92.d.mts +31 -0
- package/dist/evaluator.d.mts +2 -0
- package/dist/evaluator.mjs +80 -0
- package/dist/flow-CZGpYpl-.d.mts +94 -0
- package/dist/flow.d.mts +2 -0
- package/dist/flow.mjs +328 -0
- package/dist/index-9iG2qHLe.d.mts +1 -0
- package/dist/index-Bk0eNZmQ.d.mts +1 -0
- package/dist/index-CNgSR_kt.d.mts +1 -0
- package/dist/index-CW2WHUXP.d.mts +1 -0
- package/dist/index.d.mts +24 -1
- package/dist/index.mjs +31 -746
- package/dist/linter-B8KALEae.d.mts +25 -0
- package/dist/linter.d.mts +2 -0
- package/dist/linter.mjs +74 -0
- package/dist/logger-BvDgvNHQ.d.mts +19 -0
- package/dist/logger.d.mts +2 -0
- package/dist/logger.mjs +26 -0
- package/dist/node.d.mts +2 -0
- package/dist/node.mjs +55 -0
- package/dist/nodes/batch-gather.d.mts +2 -0
- package/dist/nodes/batch-gather.mjs +47 -0
- package/dist/nodes/batch-scatter.d.mts +2 -0
- package/dist/nodes/batch-scatter.mjs +52 -0
- package/dist/nodes/index.d.mts +7 -0
- package/dist/nodes/index.mjs +8 -0
- package/dist/nodes/sleep.d.mts +2 -0
- package/dist/nodes/sleep.mjs +41 -0
- package/dist/nodes/subflow.d.mts +2 -0
- package/dist/nodes/subflow.mjs +64 -0
- package/dist/nodes/wait.d.mts +2 -0
- package/dist/nodes/wait.mjs +12 -0
- package/dist/nodes/webhook.d.mts +2 -0
- package/dist/nodes/webhook.mjs +24 -0
- package/dist/orchestrator-DwMIJRFI.d.mts +8 -0
- package/dist/persistent-event-bus-COiQOpWh.d.mts +68 -0
- package/dist/replay-CVOy6d_L.d.mts +44 -0
- package/dist/runtime/adapter.d.mts +2 -0
- package/dist/runtime/adapter.mjs +349 -0
- package/dist/runtime/builtin-keys.d.mts +37 -0
- package/dist/runtime/builtin-keys.mjs +12 -0
- package/dist/runtime/execution-context.d.mts +2 -0
- package/dist/runtime/execution-context.mjs +26 -0
- package/dist/runtime/executors.d.mts +2 -0
- package/dist/runtime/executors.mjs +259 -0
- package/dist/runtime/index.d.mts +6 -0
- package/dist/runtime/index.mjs +10 -0
- package/dist/runtime/node-executor-factory.d.mts +11 -0
- package/dist/runtime/node-executor-factory.mjs +41 -0
- package/dist/runtime/orchestrator.d.mts +2 -0
- package/dist/runtime/orchestrator.mjs +41 -0
- package/dist/runtime/orchestrators/replay.d.mts +2 -0
- package/dist/{replay-BB11M6K1.mjs → runtime/orchestrators/replay.mjs} +1 -20
- package/dist/runtime/orchestrators/step-by-step.d.mts +15 -0
- package/dist/runtime/orchestrators/step-by-step.mjs +41 -0
- package/dist/runtime/orchestrators/utils.d.mts +2 -0
- package/dist/runtime/orchestrators/utils.mjs +79 -0
- package/dist/runtime/runtime.d.mts +2 -0
- package/dist/runtime/runtime.mjs +425 -0
- package/dist/runtime/scheduler.d.mts +2 -0
- package/dist/runtime/scheduler.mjs +64 -0
- package/dist/runtime/state.d.mts +2 -0
- package/dist/runtime/state.mjs +127 -0
- package/dist/runtime/traverser.d.mts +2 -0
- package/dist/runtime/traverser.mjs +213 -0
- package/dist/runtime/types.d.mts +2 -0
- package/dist/runtime/types.mjs +1 -0
- package/dist/runtime/workflow-logic-handler.d.mts +16 -0
- package/dist/runtime/workflow-logic-handler.mjs +159 -0
- package/dist/sanitizer-Bi00YjvO.d.mts +11 -0
- package/dist/sanitizer.d.mts +2 -0
- package/dist/sanitizer.mjs +37 -0
- package/dist/sdk.d.mts +1 -2
- package/dist/sdk.mjs +1 -2
- package/dist/serializer-BnmJr13R.d.mts +17 -0
- package/dist/serializer.d.mts +2 -0
- package/dist/serializer.mjs +34 -0
- package/dist/sleep-DpwYaY5b.d.mts +8 -0
- package/dist/subflow-n2IMsRe2.d.mts +8 -0
- package/dist/testing/event-logger.d.mts +62 -0
- package/dist/testing/event-logger.mjs +98 -0
- package/dist/testing/index.d.mts +5 -172
- package/dist/testing/index.mjs +6 -276
- package/dist/testing/run-with-trace.d.mts +37 -0
- package/dist/testing/run-with-trace.mjs +49 -0
- package/dist/testing/stepper.d.mts +78 -0
- package/dist/testing/stepper.mjs +100 -0
- package/dist/types-BcrXJEPI.d.mts +687 -0
- package/dist/types.d.mts +2 -0
- package/dist/types.mjs +1 -0
- package/dist/utils-BUEgr9V2.d.mts +34 -0
- package/dist/wait-2Q-LA7V7.d.mts +8 -0
- package/dist/webhook-BiCm-HLx.d.mts +12 -0
- package/package.json +4 -4
- package/dist/index-DUPpyNvU.d.mts +0 -1326
- package/dist/index.mjs.map +0 -1
- package/dist/replay-BB11M6K1.mjs.map +0 -1
- package/dist/runtime-lNm7WbD1.mjs +0 -2250
- package/dist/runtime-lNm7WbD1.mjs.map +0 -1
- package/dist/sdk.mjs.map +0 -1
- package/dist/testing/index.mjs.map +0 -1
package/README.md
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
# `flowcraft`
|
|
2
2
|
|
|
3
|
-
[](https://www.npmjs.com/package/flowcraft)
|
|
4
3
|
[](https://opensource.org/licenses/MIT)
|
|
5
|
-
[](https://www.npmjs.com/package/flowcraft)
|
|
5
|
+
[](https://codecov.io/github/gorango/flowcraft/tree/master/packages/core/src?flags[0]=core)
|
|
6
6
|
|
|
7
7
|
Build complex, multi-step processes with a lightweight, composable, and type-safe approach. Model complex business processes, data pipelines, ETL workflows, or AI agents and scale from in-memory scripts to distributed systems without changing the core business logic.
|
|
8
8
|
|
|
@@ -106,4 +106,4 @@ For a complete overview of features, patterns, examples, and APIs, see the full
|
|
|
106
106
|
|
|
107
107
|
## License
|
|
108
108
|
|
|
109
|
-
Flowcraft is licensed under the [MIT License](LICENSE).
|
|
109
|
+
Flowcraft is licensed under the [MIT License](https://github.com/gorango/flowcraft/blob/master/LICENSE).
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
import { A as FlowRuntime, D as WorkflowResult, c as ISerializer, i as IAsyncContext, m as NodeDefinition, o as IEventBus, s as ILogger, w as WorkflowBlueprint, x as RuntimeOptions } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/runtime/adapter.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* Defines the contract for an atomic, distributed key-value store required by
|
|
6
|
+
* the adapter for coordination tasks like fan-in joins and locking.
|
|
7
|
+
*/
|
|
8
|
+
interface ICoordinationStore {
|
|
9
|
+
/** Atomically increments a key and returns the new value. Ideal for 'all' joins. */
|
|
10
|
+
increment: (key: string, ttlSeconds: number) => Promise<number>;
|
|
11
|
+
/** Sets a key only if it does not already exist. Ideal for 'any' joins (locking). */
|
|
12
|
+
setIfNotExist: (key: string, value: string, ttlSeconds: number) => Promise<boolean>;
|
|
13
|
+
/** Extends the TTL of an existing key. Used for heartbeat mechanism in long-running jobs. */
|
|
14
|
+
extendTTL: (key: string, ttlSeconds: number) => Promise<boolean>;
|
|
15
|
+
/** Deletes a key. Used for cleanup. */
|
|
16
|
+
delete: (key: string) => Promise<void>;
|
|
17
|
+
/** Gets the value of a key. */
|
|
18
|
+
get: (key: string) => Promise<string | undefined>;
|
|
19
|
+
}
|
|
20
|
+
/** Configuration options for constructing a BaseDistributedAdapter. */
|
|
21
|
+
interface AdapterOptions {
|
|
22
|
+
runtimeOptions: RuntimeOptions<any>;
|
|
23
|
+
coordinationStore: ICoordinationStore;
|
|
24
|
+
eventBus?: IEventBus;
|
|
25
|
+
}
|
|
26
|
+
/** The data payload expected for a job in the queue. */
|
|
27
|
+
interface JobPayload {
|
|
28
|
+
runId: string;
|
|
29
|
+
blueprintId: string;
|
|
30
|
+
nodeId: string;
|
|
31
|
+
/** Used to suppress intermediate failures and fallback triggers when delegating to Queue retries */
|
|
32
|
+
isLastAttempt?: boolean;
|
|
33
|
+
/** Optional attempt tracking metadata */
|
|
34
|
+
attempt?: number;
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* The base class for all distributed adapters. It handles the technology-agnostic
|
|
38
|
+
* orchestration logic and leaves queue-specific implementation to subclasses.
|
|
39
|
+
*/
|
|
40
|
+
declare abstract class BaseDistributedAdapter {
|
|
41
|
+
protected readonly runtime: FlowRuntime<any, any>;
|
|
42
|
+
protected readonly store: ICoordinationStore;
|
|
43
|
+
protected readonly serializer: ISerializer;
|
|
44
|
+
protected readonly logger: ILogger;
|
|
45
|
+
protected readonly eventBus?: IEventBus;
|
|
46
|
+
constructor(options: AdapterOptions);
|
|
47
|
+
/**
|
|
48
|
+
* Starts the worker, which begins listening for and processing jobs from the queue.
|
|
49
|
+
*/
|
|
50
|
+
start(): void;
|
|
51
|
+
/**
|
|
52
|
+
* Hook called by the execution factory to determine if a node's automatic
|
|
53
|
+
* retries should be executed synchronously in-process (true) or delegated
|
|
54
|
+
* to the Queue backoff behavior configured by the adapter (false).
|
|
55
|
+
*/
|
|
56
|
+
protected shouldRetryInProcess(_nodeDef: NodeDefinition): boolean;
|
|
57
|
+
/**
|
|
58
|
+
* Returns queue-level retry options for enqueuing successor jobs.
|
|
59
|
+
* Only used when shouldRetryInProcess() returns false.
|
|
60
|
+
*/
|
|
61
|
+
protected getQueueRetryOptions(_nodeDef: NodeDefinition): Record<string, any> | undefined;
|
|
62
|
+
/**
|
|
63
|
+
* Creates a technology-specific distributed context for a given workflow run.
|
|
64
|
+
* @param runId The unique ID for the workflow execution.
|
|
65
|
+
*/
|
|
66
|
+
protected abstract createContext(runId: string): IAsyncContext<Record<string, any>>;
|
|
67
|
+
/**
|
|
68
|
+
* Sets up the listener for the message queue. The implementation should call the
|
|
69
|
+
* provided `handler` function for each new job received.
|
|
70
|
+
* @param handler The core logic to execute for each job.
|
|
71
|
+
*/
|
|
72
|
+
protected abstract processJobs(handler: (job: JobPayload) => Promise<void>): void;
|
|
73
|
+
/**
|
|
74
|
+
* Enqueues a new job onto the message queue.
|
|
75
|
+
* @param job The payload for the job to be enqueued.
|
|
76
|
+
*/
|
|
77
|
+
protected abstract enqueueJob(job: JobPayload): Promise<void>;
|
|
78
|
+
/**
|
|
79
|
+
* Publishes the final result of a completed or failed workflow run.
|
|
80
|
+
* @param runId The unique ID of the workflow run.
|
|
81
|
+
* @param result The final status and payload of the workflow.
|
|
82
|
+
*/
|
|
83
|
+
protected abstract publishFinalResult(runId: string, result: {
|
|
84
|
+
status: 'completed' | 'failed';
|
|
85
|
+
payload?: WorkflowResult;
|
|
86
|
+
reason?: string;
|
|
87
|
+
}): Promise<void>;
|
|
88
|
+
/**
|
|
89
|
+
* Registers a webhook endpoint for a specific node in a workflow run.
|
|
90
|
+
* @param runId The unique ID of the workflow run.
|
|
91
|
+
* @param nodeId The ID of the node that will wait for the webhook.
|
|
92
|
+
* @returns The URL and event name for the webhook.
|
|
93
|
+
*/
|
|
94
|
+
abstract registerWebhookEndpoint(runId: string, nodeId: string): Promise<{
|
|
95
|
+
url: string;
|
|
96
|
+
event: string;
|
|
97
|
+
}>;
|
|
98
|
+
/**
|
|
99
|
+
* Hook called at the start of job processing. Subclasses can override this
|
|
100
|
+
* to perform additional setup (e.g., timestamp tracking for reconciliation).
|
|
101
|
+
*/
|
|
102
|
+
protected onJobStart(_runId: string, _blueprintId: string, _nodeId: string): Promise<void>;
|
|
103
|
+
/**
|
|
104
|
+
* The main handler for processing a single job from the queue.
|
|
105
|
+
*/
|
|
106
|
+
protected handleJob(job: JobPayload): Promise<void>;
|
|
107
|
+
/**
|
|
108
|
+
* Handles post-execution logic: terminal node checks, successor determination,
|
|
109
|
+
* and enqueueing of ready nodes. Extracted to support the idempotency guard.
|
|
110
|
+
*/
|
|
111
|
+
private handleNodeCompletion;
|
|
112
|
+
/**
|
|
113
|
+
* Encapsulates the fan-in join logic using the coordination store.
|
|
114
|
+
*/
|
|
115
|
+
protected isReadyForFanIn(runId: string, blueprint: WorkflowBlueprint, targetNodeId: string): Promise<boolean>;
|
|
116
|
+
/**
|
|
117
|
+
* Reconciles the state of a workflow run. It inspects the persisted
|
|
118
|
+
* context to find completed nodes, determines the next set of executable
|
|
119
|
+
* nodes (the frontier), and enqueues jobs for them if they aren't
|
|
120
|
+
* already running. This is the core of the resume functionality.
|
|
121
|
+
*
|
|
122
|
+
* @param runId The unique ID of the workflow execution to reconcile.
|
|
123
|
+
* @returns The set of node IDs that were enqueued for execution.
|
|
124
|
+
*/
|
|
125
|
+
reconcile(runId: string): Promise<Set<string>>;
|
|
126
|
+
private calculateResumedFrontier;
|
|
127
|
+
/**
|
|
128
|
+
* Writes a poison pill for 'all' join successors and a cancellation pill for 'any' join successors of a failed node to prevent stalling or ambiguous states.
|
|
129
|
+
*/
|
|
130
|
+
private writePoisonPillForSuccessors;
|
|
131
|
+
}
|
|
132
|
+
//#endregion
|
|
133
|
+
export { JobPayload as i, BaseDistributedAdapter as n, ICoordinationStore as r, AdapterOptions as t };
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
//#region src/adapters/persistent-event-bus.ts
|
|
2
|
+
/**
|
|
3
|
+
* A pluggable event bus adapter that persists all workflow events
|
|
4
|
+
* to a configurable storage backend, enabling time-travel debugging and replay.
|
|
5
|
+
*
|
|
6
|
+
* @example
|
|
7
|
+
* ```typescript
|
|
8
|
+
* // Using a database-backed store
|
|
9
|
+
* const eventStore = new DatabaseEventStore(dbConnection)
|
|
10
|
+
* const eventBus = new PersistentEventBusAdapter(eventStore)
|
|
11
|
+
* const runtime = new FlowRuntime({ eventBus })
|
|
12
|
+
*
|
|
13
|
+
* // Later, replay the execution
|
|
14
|
+
* const events = await eventStore.retrieve(executionId)
|
|
15
|
+
* const finalState = await runtime.replay(blueprint, events)
|
|
16
|
+
* ```
|
|
17
|
+
*/
|
|
18
|
+
var PersistentEventBusAdapter = class {
|
|
19
|
+
constructor(store) {
|
|
20
|
+
this.store = store;
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Emit an event by storing it persistently.
|
|
24
|
+
* Also emits to console for debugging (can be made configurable).
|
|
25
|
+
*/
|
|
26
|
+
async emit(event) {
|
|
27
|
+
let executionId = "unknown";
|
|
28
|
+
if ("executionId" in event.payload) executionId = event.payload.executionId;
|
|
29
|
+
await this.store.store(event, executionId);
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
/**
|
|
33
|
+
* Simple in-memory event store for testing and development.
|
|
34
|
+
* Not suitable for production use.
|
|
35
|
+
*/
|
|
36
|
+
var InMemoryEventStore = class {
|
|
37
|
+
events = /* @__PURE__ */ new Map();
|
|
38
|
+
async store(event, executionId) {
|
|
39
|
+
if (!this.events.has(executionId)) this.events.set(executionId, []);
|
|
40
|
+
this.events.get(executionId)?.push(event);
|
|
41
|
+
}
|
|
42
|
+
async retrieve(executionId) {
|
|
43
|
+
return this.events.get(executionId) || [];
|
|
44
|
+
}
|
|
45
|
+
async retrieveMultiple(executionIds) {
|
|
46
|
+
const result = /* @__PURE__ */ new Map();
|
|
47
|
+
for (const id of executionIds) result.set(id, await this.retrieve(id));
|
|
48
|
+
return result;
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Clear all stored events (useful for testing).
|
|
52
|
+
*/
|
|
53
|
+
clear() {
|
|
54
|
+
this.events.clear();
|
|
55
|
+
}
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
//#endregion
|
|
59
|
+
export { InMemoryEventStore, PersistentEventBusAdapter };
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { r as FlowcraftEvent, w as WorkflowBlueprint } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/analysis.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* A list of cycles found in the graph. Each cycle is an array of node IDs.
|
|
6
|
+
*/
|
|
7
|
+
type Cycles = string[][];
|
|
8
|
+
/**
|
|
9
|
+
* Analysis result for a workflow blueprint
|
|
10
|
+
*/
|
|
11
|
+
interface BlueprintAnalysis {
|
|
12
|
+
/** Cycles found in the graph */
|
|
13
|
+
cycles: Cycles;
|
|
14
|
+
/** Node IDs that have no incoming edges (start nodes) */
|
|
15
|
+
startNodeIds: string[];
|
|
16
|
+
/** Node IDs that have no outgoing edges (terminal nodes) */
|
|
17
|
+
terminalNodeIds: string[];
|
|
18
|
+
/** Total number of nodes */
|
|
19
|
+
nodeCount: number;
|
|
20
|
+
/** Total number of edges */
|
|
21
|
+
edgeCount: number;
|
|
22
|
+
/** Whether the graph is a valid DAG (no cycles) */
|
|
23
|
+
isDag: boolean;
|
|
24
|
+
}
|
|
25
|
+
/**
|
|
26
|
+
* Analyzes a workflow blueprint to detect cycles using an iterative DFS algorithm.
|
|
27
|
+
* This avoids stack overflow issues for deep graphs compared to the recursive version.
|
|
28
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges.
|
|
29
|
+
* @returns An array of cycles found. Each cycle is represented as an array of node IDs.
|
|
30
|
+
*/
|
|
31
|
+
declare function checkForCycles(blueprint: WorkflowBlueprint): Cycles;
|
|
32
|
+
/**
|
|
33
|
+
* Generates Mermaid diagram syntax from a WorkflowBlueprint
|
|
34
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
35
|
+
* @returns Mermaid syntax string for the flowchart
|
|
36
|
+
*/
|
|
37
|
+
declare function generateMermaid(blueprint: WorkflowBlueprint): string;
|
|
38
|
+
/**
|
|
39
|
+
* Generates Mermaid diagram syntax from a WorkflowBlueprint with execution history styling
|
|
40
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
41
|
+
* @param events Array of FlowcraftEvent objects from the workflow execution
|
|
42
|
+
* @returns Mermaid syntax string for the flowchart with execution path highlighting
|
|
43
|
+
*/
|
|
44
|
+
declare function generateMermaidForRun(blueprint: WorkflowBlueprint, events: FlowcraftEvent[]): string;
|
|
45
|
+
/**
|
|
46
|
+
* Analyzes a workflow blueprint and returns comprehensive analysis
|
|
47
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
48
|
+
* @returns Analysis result with cycles, start nodes, terminal nodes, and other metrics
|
|
49
|
+
*/
|
|
50
|
+
declare function analyzeBlueprint(blueprint: WorkflowBlueprint): BlueprintAnalysis;
|
|
51
|
+
//#endregion
|
|
52
|
+
export { generateMermaid as a, checkForCycles as i, Cycles as n, generateMermaidForRun as o, analyzeBlueprint as r, BlueprintAnalysis as t };
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import { a as generateMermaid, i as checkForCycles, n as Cycles, o as generateMermaidForRun, r as analyzeBlueprint, t as BlueprintAnalysis } from "./analysis-B5Twr7sD.mjs";
|
|
2
|
+
export { BlueprintAnalysis, Cycles, analyzeBlueprint, checkForCycles, generateMermaid, generateMermaidForRun };
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
//#region src/analysis.ts
|
|
2
|
+
/**
|
|
3
|
+
* Analyzes a workflow blueprint to detect cycles using an iterative DFS algorithm.
|
|
4
|
+
* This avoids stack overflow issues for deep graphs compared to the recursive version.
|
|
5
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges.
|
|
6
|
+
* @returns An array of cycles found. Each cycle is represented as an array of node IDs.
|
|
7
|
+
*/
|
|
8
|
+
function checkForCycles(blueprint) {
|
|
9
|
+
const cycles = [];
|
|
10
|
+
if (!blueprint?.nodes || blueprint.nodes.length === 0) return cycles;
|
|
11
|
+
const allNodeIds = blueprint.nodes.map((node) => node.id);
|
|
12
|
+
const adj = /* @__PURE__ */ new Map();
|
|
13
|
+
for (const id of allNodeIds) adj.set(id, []);
|
|
14
|
+
for (const edge of blueprint.edges) adj.get(edge.source)?.push(edge.target);
|
|
15
|
+
const state = /* @__PURE__ */ new Map();
|
|
16
|
+
for (const id of allNodeIds) state.set(id, 0);
|
|
17
|
+
for (const startNode of allNodeIds) {
|
|
18
|
+
if (state.get(startNode) !== 0) continue;
|
|
19
|
+
const stack = [{
|
|
20
|
+
node: startNode,
|
|
21
|
+
path: []
|
|
22
|
+
}];
|
|
23
|
+
const pathSet = /* @__PURE__ */ new Set();
|
|
24
|
+
while (stack.length > 0) {
|
|
25
|
+
const { node, path } = stack[stack.length - 1];
|
|
26
|
+
if (state.get(node) === 0) {
|
|
27
|
+
state.set(node, 1);
|
|
28
|
+
pathSet.add(node);
|
|
29
|
+
path.push(node);
|
|
30
|
+
}
|
|
31
|
+
const neighbors = adj.get(node) || [];
|
|
32
|
+
let foundUnvisited = false;
|
|
33
|
+
for (const neighbor of neighbors) if (state.get(neighbor) === 1) {
|
|
34
|
+
const cycleStartIndex = path.indexOf(neighbor);
|
|
35
|
+
const cycle = path.slice(cycleStartIndex);
|
|
36
|
+
cycles.push([...cycle, neighbor]);
|
|
37
|
+
} else if (state.get(neighbor) === 0) {
|
|
38
|
+
stack.push({
|
|
39
|
+
node: neighbor,
|
|
40
|
+
path: [...path]
|
|
41
|
+
});
|
|
42
|
+
foundUnvisited = true;
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
if (!foundUnvisited) {
|
|
46
|
+
state.set(node, 2);
|
|
47
|
+
stack.pop();
|
|
48
|
+
pathSet.delete(node);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return cycles;
|
|
53
|
+
}
|
|
54
|
+
/**
|
|
55
|
+
* Generates Mermaid diagram syntax from a WorkflowBlueprint
|
|
56
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
57
|
+
* @returns Mermaid syntax string for the flowchart
|
|
58
|
+
*/
|
|
59
|
+
function generateMermaid(blueprint) {
|
|
60
|
+
if (!blueprint?.nodes || blueprint.nodes.length === 0) return "flowchart TD\n empty[Empty Blueprint]";
|
|
61
|
+
let mermaid = "flowchart TD\n";
|
|
62
|
+
for (const node of blueprint.nodes) {
|
|
63
|
+
const paramsString = node.params ? `<br/>params: ${JSON.stringify(node.params)}` : "";
|
|
64
|
+
const nodeLabel = `${node.id}${paramsString}`;
|
|
65
|
+
mermaid += ` ${node.id}["${nodeLabel}"]\n`;
|
|
66
|
+
}
|
|
67
|
+
for (const edge of blueprint.edges || []) {
|
|
68
|
+
const labelParts = [];
|
|
69
|
+
if (edge.action) labelParts.push(edge.action);
|
|
70
|
+
if (edge.condition) labelParts.push(edge.condition);
|
|
71
|
+
if (edge.transform) labelParts.push(edge.transform);
|
|
72
|
+
if (labelParts.length > 0) {
|
|
73
|
+
const edgeLabel = labelParts.join(" | ");
|
|
74
|
+
mermaid += ` ${edge.source} -- "${edgeLabel}" --> ${edge.target}\n`;
|
|
75
|
+
} else mermaid += ` ${edge.source} --> ${edge.target}\n`;
|
|
76
|
+
}
|
|
77
|
+
return mermaid;
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Generates Mermaid diagram syntax from a WorkflowBlueprint with execution history styling
|
|
81
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
82
|
+
* @param events Array of FlowcraftEvent objects from the workflow execution
|
|
83
|
+
* @returns Mermaid syntax string for the flowchart with execution path highlighting
|
|
84
|
+
*/
|
|
85
|
+
function generateMermaidForRun(blueprint, events) {
|
|
86
|
+
if (!blueprint?.nodes || blueprint.nodes.length === 0) return "flowchart TD\n empty[Empty Blueprint]";
|
|
87
|
+
let mermaid = "flowchart TD\n";
|
|
88
|
+
const successfulNodes = /* @__PURE__ */ new Set();
|
|
89
|
+
const failedNodes = /* @__PURE__ */ new Set();
|
|
90
|
+
const takenEdges = /* @__PURE__ */ new Set();
|
|
91
|
+
for (const event of events) switch (event.type) {
|
|
92
|
+
case "node:finish":
|
|
93
|
+
successfulNodes.add(event.payload.nodeId);
|
|
94
|
+
break;
|
|
95
|
+
case "node:error":
|
|
96
|
+
failedNodes.add(event.payload.nodeId);
|
|
97
|
+
break;
|
|
98
|
+
case "edge:evaluate":
|
|
99
|
+
if (event.payload.result) {
|
|
100
|
+
const edgeKey = `${event.payload.source}->${event.payload.target}`;
|
|
101
|
+
takenEdges.add(edgeKey);
|
|
102
|
+
}
|
|
103
|
+
break;
|
|
104
|
+
}
|
|
105
|
+
for (const node of blueprint.nodes) {
|
|
106
|
+
const paramsString = node.params ? `<br/>params: ${JSON.stringify(node.params)}` : "";
|
|
107
|
+
const nodeLabel = `${node.id}${paramsString}`;
|
|
108
|
+
mermaid += ` ${node.id}["${nodeLabel}"]\n`;
|
|
109
|
+
}
|
|
110
|
+
for (const node of blueprint.nodes) if (successfulNodes.has(node.id)) mermaid += ` style ${node.id} fill:#d4edda,stroke:#c3e6cb\n`;
|
|
111
|
+
else if (failedNodes.has(node.id)) mermaid += ` style ${node.id} fill:#f8d7da,stroke:#f5c6cb\n`;
|
|
112
|
+
let edgeIndex = 0;
|
|
113
|
+
for (const edge of blueprint.edges || []) {
|
|
114
|
+
const labelParts = [];
|
|
115
|
+
if (edge.action) labelParts.push(edge.action);
|
|
116
|
+
if (edge.condition) labelParts.push(edge.condition);
|
|
117
|
+
if (edge.transform) labelParts.push(edge.transform);
|
|
118
|
+
const edgeKey = `${edge.source}->${edge.target}`;
|
|
119
|
+
const isTaken = takenEdges.has(edgeKey);
|
|
120
|
+
let edgeLine;
|
|
121
|
+
if (labelParts.length > 0) {
|
|
122
|
+
const edgeLabel = labelParts.join(" | ");
|
|
123
|
+
edgeLine = ` ${edge.source} -- "${edgeLabel}" --> ${edge.target}\n`;
|
|
124
|
+
} else edgeLine = ` ${edge.source} --> ${edge.target}\n`;
|
|
125
|
+
mermaid += edgeLine;
|
|
126
|
+
if (isTaken) mermaid += ` linkStyle ${edgeIndex} stroke:#007bff,stroke-width:3px\n`;
|
|
127
|
+
edgeIndex++;
|
|
128
|
+
}
|
|
129
|
+
return mermaid;
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Analyzes a workflow blueprint and returns comprehensive analysis
|
|
133
|
+
* @param blueprint The WorkflowBlueprint object containing nodes and edges
|
|
134
|
+
* @returns Analysis result with cycles, start nodes, terminal nodes, and other metrics
|
|
135
|
+
*/
|
|
136
|
+
function analyzeBlueprint(blueprint) {
|
|
137
|
+
if (!blueprint?.nodes || blueprint.nodes.length === 0) return {
|
|
138
|
+
cycles: [],
|
|
139
|
+
startNodeIds: [],
|
|
140
|
+
terminalNodeIds: [],
|
|
141
|
+
nodeCount: 0,
|
|
142
|
+
edgeCount: 0,
|
|
143
|
+
isDag: true
|
|
144
|
+
};
|
|
145
|
+
const cycles = checkForCycles(blueprint);
|
|
146
|
+
const nodeCount = blueprint.nodes.length;
|
|
147
|
+
const edgeCount = blueprint.edges?.length || 0;
|
|
148
|
+
const nodesWithIncoming = /* @__PURE__ */ new Set();
|
|
149
|
+
for (const edge of blueprint.edges || []) nodesWithIncoming.add(edge.target);
|
|
150
|
+
const startNodeIds = blueprint.nodes.map((node) => node.id).filter((nodeId) => !nodesWithIncoming.has(nodeId));
|
|
151
|
+
const nodesWithOutgoing = /* @__PURE__ */ new Set();
|
|
152
|
+
for (const edge of blueprint.edges || []) nodesWithOutgoing.add(edge.source);
|
|
153
|
+
return {
|
|
154
|
+
cycles,
|
|
155
|
+
startNodeIds,
|
|
156
|
+
terminalNodeIds: blueprint.nodes.map((node) => node.id).filter((nodeId) => !nodesWithOutgoing.has(nodeId)),
|
|
157
|
+
nodeCount,
|
|
158
|
+
edgeCount,
|
|
159
|
+
isDag: cycles.length === 0
|
|
160
|
+
};
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
//#endregion
|
|
164
|
+
export { analyzeBlueprint, checkForCycles, generateMermaid, generateMermaidForRun };
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { K as BaseNode, p as NodeContext, v as NodeResult } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/nodes/batch-gather.d.ts
|
|
4
|
+
declare class BatchGatherNode extends BaseNode {
|
|
5
|
+
exec(_prepResult: any, context: NodeContext<any, any, any>): Promise<Omit<NodeResult, 'error'>>;
|
|
6
|
+
}
|
|
7
|
+
//#endregion
|
|
8
|
+
export { BatchGatherNode as t };
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { K as BaseNode, p as NodeContext, v as NodeResult } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/nodes/batch-scatter.d.ts
|
|
4
|
+
declare class BatchScatterNode extends BaseNode {
|
|
5
|
+
exec(_prepResult: any, context: NodeContext<any, any, any>): Promise<Omit<NodeResult, 'error'>>;
|
|
6
|
+
}
|
|
7
|
+
//#endregion
|
|
8
|
+
export { BatchScatterNode as t };
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
//#region src/container.d.ts
|
|
2
|
+
type ServiceToken<_T = any> = string | symbol;
|
|
3
|
+
declare class DIContainer {
|
|
4
|
+
private services;
|
|
5
|
+
private factories;
|
|
6
|
+
register<T>(token: ServiceToken<T>, implementation: T): void;
|
|
7
|
+
registerFactory<T>(token: ServiceToken<T>, factory: (container: DIContainer) => T): void;
|
|
8
|
+
resolve<T>(token: ServiceToken<T>): T;
|
|
9
|
+
has(token: ServiceToken): boolean;
|
|
10
|
+
createChild(): DIContainer;
|
|
11
|
+
}
|
|
12
|
+
declare const ServiceTokens: {
|
|
13
|
+
readonly Logger: symbol;
|
|
14
|
+
readonly Serializer: symbol;
|
|
15
|
+
readonly Evaluator: symbol;
|
|
16
|
+
readonly EventBus: symbol;
|
|
17
|
+
readonly Orchestrator: symbol;
|
|
18
|
+
readonly Middleware: symbol;
|
|
19
|
+
readonly NodeRegistry: symbol;
|
|
20
|
+
readonly BlueprintRegistry: symbol;
|
|
21
|
+
readonly Dependencies: symbol;
|
|
22
|
+
};
|
|
23
|
+
//#endregion
|
|
24
|
+
export { ServiceToken as n, ServiceTokens as r, DIContainer as t };
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { a as IEvaluator, b as RuntimeDependencies, c as ISerializer, d as NodeClass, h as NodeFunction, o as IEventBus, s as ILogger, u as Middleware, w as WorkflowBlueprint } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
import { t as DIContainer } from "./container-BKdd-9wf.mjs";
|
|
3
|
+
|
|
4
|
+
//#region src/container-factory.d.ts
|
|
5
|
+
interface ContainerOptions<TDependencies extends RuntimeDependencies = RuntimeDependencies> {
|
|
6
|
+
logger?: ILogger;
|
|
7
|
+
serializer?: ISerializer;
|
|
8
|
+
evaluator?: IEvaluator;
|
|
9
|
+
eventBus?: IEventBus;
|
|
10
|
+
middleware?: Middleware[];
|
|
11
|
+
registry?: Record<string, NodeFunction | NodeClass>;
|
|
12
|
+
blueprints?: Record<string, WorkflowBlueprint>;
|
|
13
|
+
dependencies?: TDependencies;
|
|
14
|
+
}
|
|
15
|
+
declare function createDefaultContainer<TDependencies extends RuntimeDependencies = RuntimeDependencies>(options?: ContainerOptions<TDependencies>): DIContainer;
|
|
16
|
+
//#endregion
|
|
17
|
+
export { createDefaultContainer as n, ContainerOptions as t };
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import { DIContainer, ServiceTokens } from "./container.mjs";
|
|
2
|
+
import { PropertyEvaluator } from "./evaluator.mjs";
|
|
3
|
+
import { NullLogger } from "./logger.mjs";
|
|
4
|
+
import { DefaultOrchestrator } from "./runtime/orchestrator.mjs";
|
|
5
|
+
import { JsonSerializer } from "./serializer.mjs";
|
|
6
|
+
|
|
7
|
+
//#region src/container-factory.ts
|
|
8
|
+
function createDefaultContainer(options = {}) {
|
|
9
|
+
const container = new DIContainer();
|
|
10
|
+
container.register(ServiceTokens.Logger, options.logger || new NullLogger());
|
|
11
|
+
container.register(ServiceTokens.Serializer, options.serializer || new JsonSerializer());
|
|
12
|
+
container.register(ServiceTokens.Evaluator, options.evaluator || new PropertyEvaluator());
|
|
13
|
+
container.register(ServiceTokens.EventBus, options.eventBus || { emit: async () => {} });
|
|
14
|
+
container.register(ServiceTokens.Middleware, options.middleware || []);
|
|
15
|
+
container.register(ServiceTokens.NodeRegistry, options.registry || {});
|
|
16
|
+
container.register(ServiceTokens.BlueprintRegistry, options.blueprints || {});
|
|
17
|
+
container.register(ServiceTokens.Dependencies, options.dependencies || {});
|
|
18
|
+
container.registerFactory(ServiceTokens.Orchestrator, () => new DefaultOrchestrator());
|
|
19
|
+
return container;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
//#endregion
|
|
23
|
+
export { createDefaultContainer };
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
//#region src/container.ts
|
|
2
|
+
var DIContainer = class DIContainer {
|
|
3
|
+
services = /* @__PURE__ */ new Map();
|
|
4
|
+
factories = /* @__PURE__ */ new Map();
|
|
5
|
+
register(token, implementation) {
|
|
6
|
+
this.services.set(token, implementation);
|
|
7
|
+
}
|
|
8
|
+
registerFactory(token, factory) {
|
|
9
|
+
this.factories.set(token, factory);
|
|
10
|
+
}
|
|
11
|
+
resolve(token) {
|
|
12
|
+
if (this.services.has(token)) return this.services.get(token);
|
|
13
|
+
if (this.factories.has(token)) {
|
|
14
|
+
const instance = this.factories.get(token)?.(this);
|
|
15
|
+
this.services.set(token, instance);
|
|
16
|
+
return instance;
|
|
17
|
+
}
|
|
18
|
+
throw new Error(`Service not found for token: ${String(token)}`);
|
|
19
|
+
}
|
|
20
|
+
has(token) {
|
|
21
|
+
return this.services.has(token) || this.factories.has(token);
|
|
22
|
+
}
|
|
23
|
+
createChild() {
|
|
24
|
+
const child = new DIContainer();
|
|
25
|
+
child.services = new Map(this.services);
|
|
26
|
+
child.factories = new Map(this.factories);
|
|
27
|
+
return child;
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
const ServiceTokens = {
|
|
31
|
+
Logger: Symbol.for("flowcraft:logger"),
|
|
32
|
+
Serializer: Symbol.for("flowcraft:serializer"),
|
|
33
|
+
Evaluator: Symbol.for("flowcraft:evaluator"),
|
|
34
|
+
EventBus: Symbol.for("flowcraft:eventBus"),
|
|
35
|
+
Orchestrator: Symbol.for("flowcraft:orchestrator"),
|
|
36
|
+
Middleware: Symbol.for("flowcraft:middleware"),
|
|
37
|
+
NodeRegistry: Symbol.for("flowcraft:nodeRegistry"),
|
|
38
|
+
BlueprintRegistry: Symbol.for("flowcraft:blueprintRegistry"),
|
|
39
|
+
Dependencies: Symbol.for("flowcraft:dependencies")
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
//#endregion
|
|
43
|
+
export { DIContainer, ServiceTokens };
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { i as IAsyncContext, l as ISyncContext, y as PatchOperation } from "./types-BcrXJEPI.mjs";
|
|
2
|
+
|
|
3
|
+
//#region src/context.d.ts
|
|
4
|
+
/**
|
|
5
|
+
* A default, high-performance, in-memory implementation of ISyncContext using a Map.
|
|
6
|
+
*/
|
|
7
|
+
declare class Context<TContext extends Record<string, any>> implements ISyncContext<TContext> {
|
|
8
|
+
readonly type: "sync";
|
|
9
|
+
private data;
|
|
10
|
+
constructor(initialData?: Partial<TContext>);
|
|
11
|
+
get<K extends keyof TContext>(key: K): TContext[K] | undefined;
|
|
12
|
+
set<K extends keyof TContext>(key: K, value: TContext[K]): void;
|
|
13
|
+
has<K extends keyof TContext>(key: K): boolean;
|
|
14
|
+
delete<K extends keyof TContext>(key: K): boolean;
|
|
15
|
+
toJSON(): Record<string, any>;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* An adapter that provides a consistent, Promise-based view of a synchronous context.
|
|
19
|
+
* This is created by the runtime and is transparent to the node author.
|
|
20
|
+
*/
|
|
21
|
+
declare class AsyncContextView<TContext extends Record<string, any>> implements IAsyncContext<TContext> {
|
|
22
|
+
private syncContext;
|
|
23
|
+
readonly type: "async";
|
|
24
|
+
constructor(syncContext: ISyncContext<TContext>);
|
|
25
|
+
get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
|
|
26
|
+
set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
|
|
27
|
+
has<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
28
|
+
delete<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
29
|
+
toJSON(): Promise<Record<string, any>>;
|
|
30
|
+
patch(_operations: PatchOperation[]): Promise<void>;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* A proxy wrapper that tracks changes to an async context for delta-based persistence.
|
|
34
|
+
* Records all mutations (set/delete operations) to enable efficient partial updates.
|
|
35
|
+
*/
|
|
36
|
+
declare class TrackedAsyncContext<TContext extends Record<string, any>> implements IAsyncContext<TContext> {
|
|
37
|
+
readonly type: "async";
|
|
38
|
+
private deltas;
|
|
39
|
+
private innerContext;
|
|
40
|
+
private eventBus?;
|
|
41
|
+
private executionId?;
|
|
42
|
+
private sourceNode?;
|
|
43
|
+
constructor(innerContext: IAsyncContext<TContext>, eventBus?: any, executionId?: string, sourceNode?: string);
|
|
44
|
+
get<K extends keyof TContext>(key: K): Promise<TContext[K] | undefined>;
|
|
45
|
+
set<K extends keyof TContext>(key: K, value: TContext[K]): Promise<void>;
|
|
46
|
+
has<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
47
|
+
delete<K extends keyof TContext>(key: K): Promise<boolean>;
|
|
48
|
+
toJSON(): Promise<Record<string, any>>;
|
|
49
|
+
patch(operations: PatchOperation[]): Promise<void>;
|
|
50
|
+
getDeltas(): PatchOperation[];
|
|
51
|
+
clearDeltas(): void;
|
|
52
|
+
/**
|
|
53
|
+
* Configures the event emitter for tracking context changes.
|
|
54
|
+
* This enables the context to emit events when set/delete operations occur,
|
|
55
|
+
* allowing for external monitoring and persistence of context mutations.
|
|
56
|
+
*
|
|
57
|
+
* @param eventBus - The event bus instance to emit context change events
|
|
58
|
+
* @param executionId - The unique identifier for the current workflow execution
|
|
59
|
+
* @param sourceNode - Optional identifier for the node that triggered the context change
|
|
60
|
+
*/
|
|
61
|
+
configureEventEmitter(eventBus: any, executionId: string, sourceNode?: string): void;
|
|
62
|
+
}
|
|
63
|
+
//#endregion
|
|
64
|
+
export { Context as n, TrackedAsyncContext as r, AsyncContextView as t };
|