@uploadista/core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +231 -0
- package/.turbo/turbo-format.log +5 -0
- package/LICENSE +21 -0
- package/README.md +1120 -0
- package/dist/chunk-CUT6urMc.cjs +1 -0
- package/dist/debounce-C2SeqcxD.js +2 -0
- package/dist/debounce-C2SeqcxD.js.map +1 -0
- package/dist/debounce-LZK7yS7Z.cjs +1 -0
- package/dist/errors/index.cjs +1 -0
- package/dist/errors/index.d.cts +3 -0
- package/dist/errors/index.d.ts +3 -0
- package/dist/errors/index.d.ts.map +1 -0
- package/dist/errors/index.js +2 -0
- package/dist/errors/uploadista-error.d.ts +209 -0
- package/dist/errors/uploadista-error.d.ts.map +1 -0
- package/dist/errors/uploadista-error.js +322 -0
- package/dist/errors-8i_aMxOE.js +1 -0
- package/dist/errors-CRm1FHHT.cjs +0 -0
- package/dist/flow/edge.d.ts +47 -0
- package/dist/flow/edge.d.ts.map +1 -0
- package/dist/flow/edge.js +40 -0
- package/dist/flow/event.d.ts +206 -0
- package/dist/flow/event.d.ts.map +1 -0
- package/dist/flow/event.js +53 -0
- package/dist/flow/flow-server.d.ts +223 -0
- package/dist/flow/flow-server.d.ts.map +1 -0
- package/dist/flow/flow-server.js +614 -0
- package/dist/flow/flow.d.ts +238 -0
- package/dist/flow/flow.d.ts.map +1 -0
- package/dist/flow/flow.js +629 -0
- package/dist/flow/index.cjs +1 -0
- package/dist/flow/index.d.cts +6 -0
- package/dist/flow/index.d.ts +24 -0
- package/dist/flow/index.d.ts.map +1 -0
- package/dist/flow/index.js +24 -0
- package/dist/flow/node.d.ts +136 -0
- package/dist/flow/node.d.ts.map +1 -0
- package/dist/flow/node.js +153 -0
- package/dist/flow/nodes/index.d.ts +8 -0
- package/dist/flow/nodes/index.d.ts.map +1 -0
- package/dist/flow/nodes/index.js +7 -0
- package/dist/flow/nodes/input-node.d.ts +78 -0
- package/dist/flow/nodes/input-node.d.ts.map +1 -0
- package/dist/flow/nodes/input-node.js +233 -0
- package/dist/flow/nodes/storage-node.d.ts +67 -0
- package/dist/flow/nodes/storage-node.d.ts.map +1 -0
- package/dist/flow/nodes/storage-node.js +94 -0
- package/dist/flow/nodes/streaming-input-node.d.ts +69 -0
- package/dist/flow/nodes/streaming-input-node.d.ts.map +1 -0
- package/dist/flow/nodes/streaming-input-node.js +156 -0
- package/dist/flow/nodes/transform-node.d.ts +85 -0
- package/dist/flow/nodes/transform-node.d.ts.map +1 -0
- package/dist/flow/nodes/transform-node.js +107 -0
- package/dist/flow/parallel-scheduler.d.ts +175 -0
- package/dist/flow/parallel-scheduler.d.ts.map +1 -0
- package/dist/flow/parallel-scheduler.js +193 -0
- package/dist/flow/plugins/credential-provider.d.ts +47 -0
- package/dist/flow/plugins/credential-provider.d.ts.map +1 -0
- package/dist/flow/plugins/credential-provider.js +24 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts +61 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-ai-plugin.js +21 -0
- package/dist/flow/plugins/image-plugin.d.ts +52 -0
- package/dist/flow/plugins/image-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-plugin.js +22 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts +16 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/describe-image-node.js +9 -0
- package/dist/flow/plugins/types/index.d.ts +9 -0
- package/dist/flow/plugins/types/index.d.ts.map +1 -0
- package/dist/flow/plugins/types/index.js +8 -0
- package/dist/flow/plugins/types/optimize-node.d.ts +20 -0
- package/dist/flow/plugins/types/optimize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/optimize-node.js +11 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts +16 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/remove-background-node.js +9 -0
- package/dist/flow/plugins/types/resize-node.d.ts +21 -0
- package/dist/flow/plugins/types/resize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/resize-node.js +16 -0
- package/dist/flow/plugins/zip-plugin.d.ts +62 -0
- package/dist/flow/plugins/zip-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/zip-plugin.js +21 -0
- package/dist/flow/typed-flow.d.ts +90 -0
- package/dist/flow/typed-flow.d.ts.map +1 -0
- package/dist/flow/typed-flow.js +59 -0
- package/dist/flow/types/flow-file.d.ts +45 -0
- package/dist/flow/types/flow-file.d.ts.map +1 -0
- package/dist/flow/types/flow-file.js +27 -0
- package/dist/flow/types/flow-job.d.ts +118 -0
- package/dist/flow/types/flow-job.d.ts.map +1 -0
- package/dist/flow/types/flow-job.js +11 -0
- package/dist/flow/types/flow-types.d.ts +321 -0
- package/dist/flow/types/flow-types.d.ts.map +1 -0
- package/dist/flow/types/flow-types.js +52 -0
- package/dist/flow/types/index.d.ts +4 -0
- package/dist/flow/types/index.d.ts.map +1 -0
- package/dist/flow/types/index.js +3 -0
- package/dist/flow/types/run-args.d.ts +38 -0
- package/dist/flow/types/run-args.d.ts.map +1 -0
- package/dist/flow/types/run-args.js +30 -0
- package/dist/flow/types/type-validator.d.ts +26 -0
- package/dist/flow/types/type-validator.d.ts.map +1 -0
- package/dist/flow/types/type-validator.js +134 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts +11 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts.map +1 -0
- package/dist/flow/utils/resolve-upload-metadata.js +28 -0
- package/dist/flow-2zXnEiWL.cjs +1 -0
- package/dist/flow-CRaKy7Vj.js +2 -0
- package/dist/flow-CRaKy7Vj.js.map +1 -0
- package/dist/generate-id-Dm-Vboxq.d.ts +34 -0
- package/dist/generate-id-Dm-Vboxq.d.ts.map +1 -0
- package/dist/generate-id-LjJRLD6N.d.cts +34 -0
- package/dist/generate-id-LjJRLD6N.d.cts.map +1 -0
- package/dist/generate-id-xHp_Z7Cl.cjs +1 -0
- package/dist/generate-id-yohS1ZDk.js +2 -0
- package/dist/generate-id-yohS1ZDk.js.map +1 -0
- package/dist/index-BO8GZlbD.d.cts +1040 -0
- package/dist/index-BO8GZlbD.d.cts.map +1 -0
- package/dist/index-BoGG5KAY.d.ts +1 -0
- package/dist/index-BtBZHVmz.d.cts +1 -0
- package/dist/index-D-CoVpkZ.d.ts +1004 -0
- package/dist/index-D-CoVpkZ.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.d.cts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/logger/logger.cjs +1 -0
- package/dist/logger/logger.d.cts +8 -0
- package/dist/logger/logger.d.cts.map +1 -0
- package/dist/logger/logger.d.ts +5 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.js +10 -0
- package/dist/logger/logger.js.map +1 -0
- package/dist/semaphore-0ZwjVpyF.js +2 -0
- package/dist/semaphore-0ZwjVpyF.js.map +1 -0
- package/dist/semaphore-BHprIjFI.d.cts +37 -0
- package/dist/semaphore-BHprIjFI.d.cts.map +1 -0
- package/dist/semaphore-DThupBkc.d.ts +37 -0
- package/dist/semaphore-DThupBkc.d.ts.map +1 -0
- package/dist/semaphore-DVrONiAV.cjs +1 -0
- package/dist/stream-limiter-CoWKv39w.js +2 -0
- package/dist/stream-limiter-CoWKv39w.js.map +1 -0
- package/dist/stream-limiter-JgOwmkMa.cjs +1 -0
- package/dist/streams/multi-stream.cjs +1 -0
- package/dist/streams/multi-stream.d.cts +91 -0
- package/dist/streams/multi-stream.d.cts.map +1 -0
- package/dist/streams/multi-stream.d.ts +86 -0
- package/dist/streams/multi-stream.d.ts.map +1 -0
- package/dist/streams/multi-stream.js +149 -0
- package/dist/streams/multi-stream.js.map +1 -0
- package/dist/streams/stream-limiter.cjs +1 -0
- package/dist/streams/stream-limiter.d.cts +36 -0
- package/dist/streams/stream-limiter.d.cts.map +1 -0
- package/dist/streams/stream-limiter.d.ts +27 -0
- package/dist/streams/stream-limiter.d.ts.map +1 -0
- package/dist/streams/stream-limiter.js +49 -0
- package/dist/streams/stream-splitter.cjs +1 -0
- package/dist/streams/stream-splitter.d.cts +68 -0
- package/dist/streams/stream-splitter.d.cts.map +1 -0
- package/dist/streams/stream-splitter.d.ts +51 -0
- package/dist/streams/stream-splitter.d.ts.map +1 -0
- package/dist/streams/stream-splitter.js +175 -0
- package/dist/streams/stream-splitter.js.map +1 -0
- package/dist/types/data-store-registry.d.ts +13 -0
- package/dist/types/data-store-registry.d.ts.map +1 -0
- package/dist/types/data-store-registry.js +4 -0
- package/dist/types/data-store.d.ts +316 -0
- package/dist/types/data-store.d.ts.map +1 -0
- package/dist/types/data-store.js +157 -0
- package/dist/types/event-broadcaster.d.ts +28 -0
- package/dist/types/event-broadcaster.d.ts.map +1 -0
- package/dist/types/event-broadcaster.js +6 -0
- package/dist/types/event-emitter.d.ts +378 -0
- package/dist/types/event-emitter.d.ts.map +1 -0
- package/dist/types/event-emitter.js +223 -0
- package/dist/types/index.cjs +1 -0
- package/dist/types/index.d.cts +6 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +9 -0
- package/dist/types/input-file.d.ts +104 -0
- package/dist/types/input-file.d.ts.map +1 -0
- package/dist/types/input-file.js +27 -0
- package/dist/types/kv-store.d.ts +281 -0
- package/dist/types/kv-store.d.ts.map +1 -0
- package/dist/types/kv-store.js +234 -0
- package/dist/types/middleware.d.ts +17 -0
- package/dist/types/middleware.d.ts.map +1 -0
- package/dist/types/middleware.js +21 -0
- package/dist/types/upload-event.d.ts +105 -0
- package/dist/types/upload-event.d.ts.map +1 -0
- package/dist/types/upload-event.js +71 -0
- package/dist/types/upload-file.d.ts +136 -0
- package/dist/types/upload-file.d.ts.map +1 -0
- package/dist/types/upload-file.js +34 -0
- package/dist/types/websocket.d.ts +144 -0
- package/dist/types/websocket.d.ts.map +1 -0
- package/dist/types/websocket.js +40 -0
- package/dist/types-BT-cvi7T.cjs +1 -0
- package/dist/types-DhU2j-XF.js +2 -0
- package/dist/types-DhU2j-XF.js.map +1 -0
- package/dist/upload/convert-to-stream.d.ts +38 -0
- package/dist/upload/convert-to-stream.d.ts.map +1 -0
- package/dist/upload/convert-to-stream.js +43 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts +14 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts.map +1 -0
- package/dist/upload/convert-upload-to-flow-file.js +21 -0
- package/dist/upload/create-upload.d.ts +68 -0
- package/dist/upload/create-upload.d.ts.map +1 -0
- package/dist/upload/create-upload.js +157 -0
- package/dist/upload/index.cjs +1 -0
- package/dist/upload/index.d.cts +6 -0
- package/dist/upload/index.d.ts +4 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +3 -0
- package/dist/upload/mime.d.ts +24 -0
- package/dist/upload/mime.d.ts.map +1 -0
- package/dist/upload/mime.js +351 -0
- package/dist/upload/upload-chunk.d.ts +58 -0
- package/dist/upload/upload-chunk.d.ts.map +1 -0
- package/dist/upload/upload-chunk.js +277 -0
- package/dist/upload/upload-server.d.ts +221 -0
- package/dist/upload/upload-server.d.ts.map +1 -0
- package/dist/upload/upload-server.js +181 -0
- package/dist/upload/upload-strategy-negotiator.d.ts +148 -0
- package/dist/upload/upload-strategy-negotiator.d.ts.map +1 -0
- package/dist/upload/upload-strategy-negotiator.js +217 -0
- package/dist/upload/upload-url.d.ts +68 -0
- package/dist/upload/upload-url.d.ts.map +1 -0
- package/dist/upload/upload-url.js +142 -0
- package/dist/upload/write-to-store.d.ts +77 -0
- package/dist/upload/write-to-store.d.ts.map +1 -0
- package/dist/upload/write-to-store.js +147 -0
- package/dist/upload-DLuICjpP.cjs +1 -0
- package/dist/upload-DaXO34dE.js +2 -0
- package/dist/upload-DaXO34dE.js.map +1 -0
- package/dist/uploadista-error-BB-Wdiz9.cjs +22 -0
- package/dist/uploadista-error-BVsVxqvz.js +23 -0
- package/dist/uploadista-error-BVsVxqvz.js.map +1 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts +52 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts.map +1 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts +52 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts.map +1 -0
- package/dist/utils/checksum.d.ts +22 -0
- package/dist/utils/checksum.d.ts.map +1 -0
- package/dist/utils/checksum.js +49 -0
- package/dist/utils/debounce.cjs +1 -0
- package/dist/utils/debounce.d.cts +38 -0
- package/dist/utils/debounce.d.cts.map +1 -0
- package/dist/utils/debounce.d.ts +36 -0
- package/dist/utils/debounce.d.ts.map +1 -0
- package/dist/utils/debounce.js +73 -0
- package/dist/utils/generate-id.cjs +1 -0
- package/dist/utils/generate-id.d.cts +2 -0
- package/dist/utils/generate-id.d.ts +32 -0
- package/dist/utils/generate-id.d.ts.map +1 -0
- package/dist/utils/generate-id.js +23 -0
- package/dist/utils/md5.cjs +1 -0
- package/dist/utils/md5.d.cts +73 -0
- package/dist/utils/md5.d.cts.map +1 -0
- package/dist/utils/md5.d.ts +71 -0
- package/dist/utils/md5.d.ts.map +1 -0
- package/dist/utils/md5.js +417 -0
- package/dist/utils/md5.js.map +1 -0
- package/dist/utils/once.cjs +1 -0
- package/dist/utils/once.d.cts +25 -0
- package/dist/utils/once.d.cts.map +1 -0
- package/dist/utils/once.d.ts +21 -0
- package/dist/utils/once.d.ts.map +1 -0
- package/dist/utils/once.js +54 -0
- package/dist/utils/once.js.map +1 -0
- package/dist/utils/semaphore.cjs +1 -0
- package/dist/utils/semaphore.d.cts +3 -0
- package/dist/utils/semaphore.d.ts +78 -0
- package/dist/utils/semaphore.d.ts.map +1 -0
- package/dist/utils/semaphore.js +134 -0
- package/dist/utils/throttle.cjs +1 -0
- package/dist/utils/throttle.d.cts +24 -0
- package/dist/utils/throttle.d.cts.map +1 -0
- package/dist/utils/throttle.d.ts +18 -0
- package/dist/utils/throttle.d.ts.map +1 -0
- package/dist/utils/throttle.js +20 -0
- package/dist/utils/throttle.js.map +1 -0
- package/docs/PARALLEL_EXECUTION.md +206 -0
- package/docs/PARALLEL_EXECUTION_QUICKSTART.md +142 -0
- package/docs/PARALLEL_EXECUTION_REFACTOR.md +184 -0
- package/package.json +80 -0
- package/src/errors/__tests__/uploadista-error.test.ts +251 -0
- package/src/errors/index.ts +2 -0
- package/src/errors/uploadista-error.ts +394 -0
- package/src/flow/README.md +352 -0
- package/src/flow/edge.test.ts +146 -0
- package/src/flow/edge.ts +60 -0
- package/src/flow/event.ts +229 -0
- package/src/flow/flow-server.ts +1089 -0
- package/src/flow/flow.ts +1050 -0
- package/src/flow/index.ts +28 -0
- package/src/flow/node.ts +249 -0
- package/src/flow/nodes/index.ts +8 -0
- package/src/flow/nodes/input-node.ts +296 -0
- package/src/flow/nodes/storage-node.ts +128 -0
- package/src/flow/nodes/transform-node.ts +154 -0
- package/src/flow/parallel-scheduler.ts +259 -0
- package/src/flow/plugins/credential-provider.ts +48 -0
- package/src/flow/plugins/image-ai-plugin.ts +66 -0
- package/src/flow/plugins/image-plugin.ts +60 -0
- package/src/flow/plugins/types/describe-image-node.ts +16 -0
- package/src/flow/plugins/types/index.ts +9 -0
- package/src/flow/plugins/types/optimize-node.ts +18 -0
- package/src/flow/plugins/types/remove-background-node.ts +18 -0
- package/src/flow/plugins/types/resize-node.ts +26 -0
- package/src/flow/plugins/zip-plugin.ts +69 -0
- package/src/flow/typed-flow.ts +279 -0
- package/src/flow/types/flow-file.ts +51 -0
- package/src/flow/types/flow-job.ts +138 -0
- package/src/flow/types/flow-types.ts +353 -0
- package/src/flow/types/index.ts +6 -0
- package/src/flow/types/run-args.ts +40 -0
- package/src/flow/types/type-validator.ts +204 -0
- package/src/flow/utils/resolve-upload-metadata.ts +48 -0
- package/src/index.ts +5 -0
- package/src/logger/logger.ts +14 -0
- package/src/streams/stream-limiter.test.ts +150 -0
- package/src/streams/stream-limiter.ts +75 -0
- package/src/types/data-store.ts +427 -0
- package/src/types/event-broadcaster.ts +39 -0
- package/src/types/event-emitter.ts +349 -0
- package/src/types/index.ts +9 -0
- package/src/types/input-file.ts +107 -0
- package/src/types/kv-store.ts +375 -0
- package/src/types/middleware.ts +54 -0
- package/src/types/upload-event.ts +75 -0
- package/src/types/upload-file.ts +139 -0
- package/src/types/websocket.ts +65 -0
- package/src/upload/convert-to-stream.ts +48 -0
- package/src/upload/create-upload.ts +214 -0
- package/src/upload/index.ts +3 -0
- package/src/upload/mime.ts +436 -0
- package/src/upload/upload-chunk.ts +364 -0
- package/src/upload/upload-server.ts +390 -0
- package/src/upload/upload-strategy-negotiator.ts +316 -0
- package/src/upload/upload-url.ts +173 -0
- package/src/upload/write-to-store.ts +211 -0
- package/src/utils/checksum.ts +61 -0
- package/src/utils/debounce.test.ts +126 -0
- package/src/utils/debounce.ts +89 -0
- package/src/utils/generate-id.ts +35 -0
- package/src/utils/md5.ts +475 -0
- package/src/utils/once.test.ts +83 -0
- package/src/utils/once.ts +63 -0
- package/src/utils/throttle.test.ts +101 -0
- package/src/utils/throttle.ts +29 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/tsdown.config.ts +25 -0
- package/vitest.config.ts +15 -0
package/src/flow/flow.ts
ADDED
|
@@ -0,0 +1,1050 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Flow Engine implementation using Effect-based DAG execution.
|
|
3
|
+
*
|
|
4
|
+
* This module implements the Flow Engine, which executes directed acyclic graphs (DAGs)
|
|
5
|
+
* of processing nodes. It supports sequential execution with topological sorting,
|
|
6
|
+
* conditional node execution, retry logic, and pausable flows.
|
|
7
|
+
*
|
|
8
|
+
* @module flow
|
|
9
|
+
* @see {@link createFlowWithSchema} for creating new flows
|
|
10
|
+
* @see {@link Flow} for the flow type definition
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */
|
|
14
|
+
|
|
15
|
+
import { Effect } from "effect";
|
|
16
|
+
import { z } from "zod";
|
|
17
|
+
|
|
18
|
+
import { UploadistaError } from "../errors";
|
|
19
|
+
import type { FlowEdge } from "./edge";
|
|
20
|
+
import { EventType } from "./event";
|
|
21
|
+
import { getNodeData } from "./node";
|
|
22
|
+
import { ParallelScheduler } from "./parallel-scheduler";
|
|
23
|
+
import type { FlowConfig, FlowNode, FlowNodeData } from "./types/flow-types";
|
|
24
|
+
import { FlowTypeValidator } from "./types/type-validator";
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Serialized flow data for storage and transport.
|
|
28
|
+
* Contains the minimal information needed to reconstruct a flow.
|
|
29
|
+
*
|
|
30
|
+
* @property id - Unique flow identifier
|
|
31
|
+
* @property name - Human-readable flow name
|
|
32
|
+
* @property nodes - Array of node data (without execution logic)
|
|
33
|
+
* @property edges - Connections between nodes defining data flow
|
|
34
|
+
*/
|
|
35
|
+
export type FlowData = {
|
|
36
|
+
id: string;
|
|
37
|
+
name: string;
|
|
38
|
+
nodes: FlowNodeData[];
|
|
39
|
+
edges: FlowEdge[];
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Extracts serializable flow data from a Flow instance.
|
|
44
|
+
* Useful for storing flow definitions or sending them over the network.
|
|
45
|
+
*
|
|
46
|
+
* @template TRequirements - Effect requirements for the flow
|
|
47
|
+
* @param flow - Flow instance to extract data from
|
|
48
|
+
* @returns Serializable flow data without execution logic
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```typescript
|
|
52
|
+
* const flowData = getFlowData(myFlow);
|
|
53
|
+
* // Store in database or send to client
|
|
54
|
+
* await db.flows.save(flowData);
|
|
55
|
+
* ```
|
|
56
|
+
*/
|
|
57
|
+
export const getFlowData = <TRequirements>(
|
|
58
|
+
flow: Flow<any, any, TRequirements>,
|
|
59
|
+
): FlowData => {
|
|
60
|
+
return {
|
|
61
|
+
id: flow.id,
|
|
62
|
+
name: flow.name,
|
|
63
|
+
nodes: flow.nodes.map(getNodeData),
|
|
64
|
+
edges: flow.edges,
|
|
65
|
+
};
|
|
66
|
+
};
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Result of a flow execution - either completed or paused.
|
|
70
|
+
*
|
|
71
|
+
* @template TOutput - Type of the flow's output data
|
|
72
|
+
*
|
|
73
|
+
* @remarks
|
|
74
|
+
* Flows can pause when a node needs additional data (e.g., waiting for user input
|
|
75
|
+
* or external service). The execution state allows resuming from where it paused.
|
|
76
|
+
*
|
|
77
|
+
* @example
|
|
78
|
+
* ```typescript
|
|
79
|
+
* const result = await Effect.runPromise(flow.run({ inputs, storageId, jobId }));
|
|
80
|
+
*
|
|
81
|
+
* if (result.type === "completed") {
|
|
82
|
+
* console.log("Flow completed:", result.result);
|
|
83
|
+
* } else {
|
|
84
|
+
* console.log("Flow paused at node:", result.nodeId);
|
|
85
|
+
* // Can resume later with: flow.resume({ jobId, executionState: result.executionState, ... })
|
|
86
|
+
* }
|
|
87
|
+
* ```
|
|
88
|
+
*/
|
|
89
|
+
export type FlowExecutionResult<TOutput> =
|
|
90
|
+
| { type: "completed"; result: TOutput }
|
|
91
|
+
| {
|
|
92
|
+
type: "paused";
|
|
93
|
+
nodeId: string;
|
|
94
|
+
executionState: {
|
|
95
|
+
executionOrder: string[];
|
|
96
|
+
currentIndex: number;
|
|
97
|
+
inputs: Record<string, unknown>;
|
|
98
|
+
};
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* A Flow represents a directed acyclic graph (DAG) of processing nodes.
|
|
103
|
+
*
|
|
104
|
+
* Flows execute nodes in topological order, passing data between nodes through edges.
|
|
105
|
+
* They support conditional execution, retry logic, pausable nodes, and event emission.
|
|
106
|
+
*
|
|
107
|
+
* @template TFlowInputSchema - Zod schema defining the shape of input data
|
|
108
|
+
* @template TFlowOutputSchema - Zod schema defining the shape of output data
|
|
109
|
+
* @template TRequirements - Effect requirements (services/contexts) needed by nodes
|
|
110
|
+
*
|
|
111
|
+
* @property id - Unique flow identifier
|
|
112
|
+
* @property name - Human-readable flow name
|
|
113
|
+
* @property nodes - Array of nodes in the flow
|
|
114
|
+
* @property edges - Connections between nodes
|
|
115
|
+
* @property inputSchema - Zod schema for validating flow inputs
|
|
116
|
+
* @property outputSchema - Zod schema for validating flow outputs
|
|
117
|
+
* @property onEvent - Optional callback for flow execution events
|
|
118
|
+
* @property run - Executes the flow from the beginning
|
|
119
|
+
* @property resume - Resumes a paused flow execution
|
|
120
|
+
* @property validateTypes - Validates node type compatibility
|
|
121
|
+
* @property validateInputs - Validates input data against schema
|
|
122
|
+
* @property validateOutputs - Validates output data against schema
|
|
123
|
+
*
|
|
124
|
+
* @remarks
|
|
125
|
+
* Flows are created using {@link createFlowWithSchema}. The Effect-based design
|
|
126
|
+
* allows for composable error handling, resource management, and dependency injection.
|
|
127
|
+
*
|
|
128
|
+
* @example
|
|
129
|
+
* ```typescript
|
|
130
|
+
* const flow = yield* createFlowWithSchema({
|
|
131
|
+
* flowId: "image-pipeline",
|
|
132
|
+
* name: "Image Processing Pipeline",
|
|
133
|
+
* nodes: [inputNode, resizeNode, optimizeNode, storageNode],
|
|
134
|
+
* edges: [
|
|
135
|
+
* { source: "input", target: "resize" },
|
|
136
|
+
* { source: "resize", target: "optimize" },
|
|
137
|
+
* { source: "optimize", target: "storage" }
|
|
138
|
+
* ],
|
|
139
|
+
* inputSchema: z.object({ file: z.instanceof(File) }),
|
|
140
|
+
* outputSchema: uploadFileSchema
|
|
141
|
+
* });
|
|
142
|
+
*
|
|
143
|
+
* const result = yield* flow.run({
|
|
144
|
+
* inputs: { input: { file: myFile } },
|
|
145
|
+
* storageId: "storage-1",
|
|
146
|
+
* jobId: "job-123"
|
|
147
|
+
* });
|
|
148
|
+
* ```
|
|
149
|
+
*/
|
|
150
|
+
export type Flow<
|
|
151
|
+
TFlowInputSchema extends z.ZodSchema<any>,
|
|
152
|
+
TFlowOutputSchema extends z.ZodSchema<any>,
|
|
153
|
+
TRequirements,
|
|
154
|
+
> = {
|
|
155
|
+
id: string;
|
|
156
|
+
name: string;
|
|
157
|
+
nodes: FlowNode<any, any, UploadistaError>[];
|
|
158
|
+
edges: FlowEdge[];
|
|
159
|
+
inputSchema: TFlowInputSchema;
|
|
160
|
+
outputSchema: TFlowOutputSchema;
|
|
161
|
+
onEvent?: FlowConfig<
|
|
162
|
+
TFlowInputSchema,
|
|
163
|
+
TFlowOutputSchema,
|
|
164
|
+
TRequirements
|
|
165
|
+
>["onEvent"];
|
|
166
|
+
run: (args: {
|
|
167
|
+
inputs?: Record<string, z.infer<TFlowInputSchema>>;
|
|
168
|
+
storageId: string;
|
|
169
|
+
jobId: string;
|
|
170
|
+
clientId: string | null;
|
|
171
|
+
}) => Effect.Effect<
|
|
172
|
+
FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>,
|
|
173
|
+
UploadistaError,
|
|
174
|
+
TRequirements
|
|
175
|
+
>;
|
|
176
|
+
resume: (args: {
|
|
177
|
+
jobId: string;
|
|
178
|
+
storageId: string;
|
|
179
|
+
nodeResults: Record<string, unknown>; // Reconstructed from tasks
|
|
180
|
+
executionState: {
|
|
181
|
+
executionOrder: string[];
|
|
182
|
+
currentIndex: number;
|
|
183
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
184
|
+
};
|
|
185
|
+
clientId: string | null;
|
|
186
|
+
}) => Effect.Effect<
|
|
187
|
+
FlowExecutionResult<Record<string, z.infer<TFlowOutputSchema>>>,
|
|
188
|
+
UploadistaError,
|
|
189
|
+
TRequirements
|
|
190
|
+
>;
|
|
191
|
+
validateTypes: () => { isValid: boolean; errors: string[] };
|
|
192
|
+
validateInputs: (inputs: unknown) => { isValid: boolean; errors: string[] };
|
|
193
|
+
validateOutputs: (outputs: unknown) => { isValid: boolean; errors: string[] };
|
|
194
|
+
};
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Creates a new Flow with Zod schema-based type validation.
|
|
198
|
+
*
|
|
199
|
+
* This is the primary way to create flows in Uploadista. It constructs a Flow
|
|
200
|
+
* instance that validates inputs/outputs, executes nodes in topological order,
|
|
201
|
+
* handles errors with retries, and emits events during execution.
|
|
202
|
+
*
|
|
203
|
+
* @template TFlowInputSchema - Zod schema for flow input validation
|
|
204
|
+
* @template TFlowOutputSchema - Zod schema for flow output validation
|
|
205
|
+
* @template TRequirements - Effect requirements/services needed by the flow
|
|
206
|
+
* @template TNodeError - Union of possible errors from nodes
|
|
207
|
+
* @template TNodeRequirements - Union of requirements from nodes
|
|
208
|
+
*
|
|
209
|
+
* @param config - Flow configuration object
|
|
210
|
+
* @param config.flowId - Unique identifier for the flow
|
|
211
|
+
* @param config.name - Human-readable flow name
|
|
212
|
+
* @param config.nodes - Array of nodes (can be plain nodes or Effects resolving to nodes)
|
|
213
|
+
* @param config.edges - Array of edges connecting nodes
|
|
214
|
+
* @param config.inputSchema - Zod schema for validating inputs
|
|
215
|
+
* @param config.outputSchema - Zod schema for validating outputs
|
|
216
|
+
* @param config.typeChecker - Optional custom type compatibility checker
|
|
217
|
+
* @param config.onEvent - Optional event callback for monitoring execution
|
|
218
|
+
*
|
|
219
|
+
* @returns Effect that resolves to a Flow instance
|
|
220
|
+
*
|
|
221
|
+
* @throws {UploadistaError} FLOW_CYCLE_ERROR if the graph contains cycles
|
|
222
|
+
* @throws {UploadistaError} FLOW_NODE_NOT_FOUND if a node is referenced but missing
|
|
223
|
+
* @throws {UploadistaError} FLOW_NODE_ERROR if node execution fails
|
|
224
|
+
* @throws {UploadistaError} FLOW_OUTPUT_VALIDATION_ERROR if outputs don't match schema
|
|
225
|
+
*
|
|
226
|
+
* @remarks
|
|
227
|
+
* - Nodes can be provided as plain objects or as Effects that resolve to nodes
|
|
228
|
+
* - The flow performs topological sorting to determine execution order
|
|
229
|
+
* - Conditional nodes are evaluated before execution
|
|
230
|
+
* - Nodes can specify retry configuration with exponential backoff
|
|
231
|
+
* - Pausable nodes can halt execution and resume later
|
|
232
|
+
*
|
|
233
|
+
* @example
|
|
234
|
+
* ```typescript
|
|
235
|
+
* const flow = yield* createFlowWithSchema({
|
|
236
|
+
* flowId: "image-upload",
|
|
237
|
+
* name: "Image Upload with Processing",
|
|
238
|
+
* nodes: [
|
|
239
|
+
* inputNode,
|
|
240
|
+
* yield* createResizeNode({ width: 1920, height: 1080 }),
|
|
241
|
+
* optimizeNode,
|
|
242
|
+
* storageNode
|
|
243
|
+
* ],
|
|
244
|
+
* edges: [
|
|
245
|
+
* { source: "input", target: "resize" },
|
|
246
|
+
* { source: "resize", target: "optimize" },
|
|
247
|
+
* { source: "optimize", target: "storage" }
|
|
248
|
+
* ],
|
|
249
|
+
* inputSchema: z.object({
|
|
250
|
+
* file: z.instanceof(File),
|
|
251
|
+
* metadata: z.record(z.string(), z.any()).optional()
|
|
252
|
+
* }),
|
|
253
|
+
* outputSchema: uploadFileSchema,
|
|
254
|
+
* onEvent: (event) => Effect.gen(function* () {
|
|
255
|
+
* console.log("Flow event:", event);
|
|
256
|
+
* return { eventId: event.jobId };
|
|
257
|
+
* })
|
|
258
|
+
* });
|
|
259
|
+
* ```
|
|
260
|
+
*
|
|
261
|
+
* @see {@link Flow} for the returned flow type
|
|
262
|
+
* @see {@link FlowConfig} for configuration options
|
|
263
|
+
*/
|
|
264
|
+
export function createFlowWithSchema<
|
|
265
|
+
TFlowInputSchema extends z.ZodSchema<any>,
|
|
266
|
+
TFlowOutputSchema extends z.ZodSchema<any>,
|
|
267
|
+
TRequirements = never,
|
|
268
|
+
TNodeError = never,
|
|
269
|
+
TNodeRequirements = never,
|
|
270
|
+
>(
|
|
271
|
+
config: FlowConfig<
|
|
272
|
+
TFlowInputSchema,
|
|
273
|
+
TFlowOutputSchema,
|
|
274
|
+
TNodeError,
|
|
275
|
+
TNodeRequirements
|
|
276
|
+
>,
|
|
277
|
+
): Effect.Effect<
|
|
278
|
+
Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements>,
|
|
279
|
+
TNodeError,
|
|
280
|
+
TNodeRequirements
|
|
281
|
+
> {
|
|
282
|
+
return Effect.gen(function* () {
|
|
283
|
+
// Resolve nodes - handle mixed arrays of pure nodes and Effect nodes
|
|
284
|
+
const resolvedNodes: Array<FlowNode<any, any, UploadistaError>> =
|
|
285
|
+
yield* Effect.all(
|
|
286
|
+
config.nodes.map((node) =>
|
|
287
|
+
Effect.isEffect(node)
|
|
288
|
+
? (node as Effect.Effect<
|
|
289
|
+
FlowNode<any, any, UploadistaError>,
|
|
290
|
+
TNodeError,
|
|
291
|
+
TNodeRequirements
|
|
292
|
+
>)
|
|
293
|
+
: Effect.succeed(node as FlowNode<any, any, UploadistaError>),
|
|
294
|
+
),
|
|
295
|
+
);
|
|
296
|
+
|
|
297
|
+
const {
|
|
298
|
+
flowId,
|
|
299
|
+
name,
|
|
300
|
+
onEvent,
|
|
301
|
+
edges,
|
|
302
|
+
inputSchema,
|
|
303
|
+
outputSchema,
|
|
304
|
+
typeChecker,
|
|
305
|
+
} = config;
|
|
306
|
+
const nodes = resolvedNodes;
|
|
307
|
+
const typeValidator = new FlowTypeValidator(typeChecker);
|
|
308
|
+
|
|
309
|
+
// Build adjacency list for topological sorting
|
|
310
|
+
const buildGraph = () => {
|
|
311
|
+
const graph: Record<string, string[]> = {};
|
|
312
|
+
const inDegree: Record<string, number> = {};
|
|
313
|
+
const reverseGraph: Record<string, string[]> = {};
|
|
314
|
+
|
|
315
|
+
// Initialize
|
|
316
|
+
nodes.forEach((node: any) => {
|
|
317
|
+
graph[node.id] = [];
|
|
318
|
+
reverseGraph[node.id] = [];
|
|
319
|
+
inDegree[node.id] = 0;
|
|
320
|
+
});
|
|
321
|
+
|
|
322
|
+
// Build edges
|
|
323
|
+
edges.forEach((edge: any) => {
|
|
324
|
+
graph[edge.source]?.push(edge.target);
|
|
325
|
+
reverseGraph[edge.target]?.push(edge.source);
|
|
326
|
+
inDegree[edge.target] = (inDegree[edge.target] || 0) + 1;
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
return { graph, reverseGraph, inDegree };
|
|
330
|
+
};
|
|
331
|
+
|
|
332
|
+
// Topological sort to determine execution order
|
|
333
|
+
const topologicalSort = () => {
|
|
334
|
+
const { graph, inDegree } = buildGraph();
|
|
335
|
+
const queue: string[] = [];
|
|
336
|
+
const result: string[] = [];
|
|
337
|
+
|
|
338
|
+
// Add nodes with no incoming edges
|
|
339
|
+
Object.keys(inDegree).forEach((nodeId) => {
|
|
340
|
+
if (inDegree[nodeId] === 0) {
|
|
341
|
+
queue.push(nodeId);
|
|
342
|
+
}
|
|
343
|
+
});
|
|
344
|
+
|
|
345
|
+
while (queue.length > 0) {
|
|
346
|
+
const current = queue.shift();
|
|
347
|
+
if (!current) {
|
|
348
|
+
throw new Error("No current node found");
|
|
349
|
+
}
|
|
350
|
+
result.push(current);
|
|
351
|
+
|
|
352
|
+
graph[current]?.forEach((neighbor: any) => {
|
|
353
|
+
inDegree[neighbor] = (inDegree[neighbor] || 0) - 1;
|
|
354
|
+
if (inDegree[neighbor] === 0) {
|
|
355
|
+
queue.push(neighbor);
|
|
356
|
+
}
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
return result;
|
|
361
|
+
};
|
|
362
|
+
|
|
363
|
+
// Evaluate condition for conditional nodes using Effect
|
|
364
|
+
const evaluateCondition = (
|
|
365
|
+
node: FlowNode<any, any, UploadistaError>,
|
|
366
|
+
data: unknown,
|
|
367
|
+
): Effect.Effect<boolean, never> => {
|
|
368
|
+
if (!node.condition) return Effect.succeed(true);
|
|
369
|
+
|
|
370
|
+
const { field, operator, value } = node.condition;
|
|
371
|
+
const dataRecord = data as Record<string, unknown>;
|
|
372
|
+
const metadata = dataRecord?.metadata as
|
|
373
|
+
| Record<string, unknown>
|
|
374
|
+
| undefined;
|
|
375
|
+
const fieldValue = metadata?.[field] || dataRecord?.[field];
|
|
376
|
+
|
|
377
|
+
const result = (() => {
|
|
378
|
+
switch (operator) {
|
|
379
|
+
case "equals":
|
|
380
|
+
return fieldValue === value;
|
|
381
|
+
case "notEquals":
|
|
382
|
+
return fieldValue !== value;
|
|
383
|
+
case "greaterThan":
|
|
384
|
+
return Number(fieldValue) > Number(value);
|
|
385
|
+
case "lessThan":
|
|
386
|
+
return Number(fieldValue) < Number(value);
|
|
387
|
+
case "contains":
|
|
388
|
+
return String(fieldValue).includes(String(value));
|
|
389
|
+
case "startsWith":
|
|
390
|
+
return String(fieldValue).startsWith(String(value));
|
|
391
|
+
default:
|
|
392
|
+
return true;
|
|
393
|
+
}
|
|
394
|
+
})();
|
|
395
|
+
|
|
396
|
+
return Effect.succeed(result);
|
|
397
|
+
};
|
|
398
|
+
|
|
399
|
+
// Get all inputs for a node
|
|
400
|
+
const getNodeInputs = (
|
|
401
|
+
nodeId: string,
|
|
402
|
+
nodeResults: Map<string, unknown>,
|
|
403
|
+
) => {
|
|
404
|
+
const { reverseGraph } = buildGraph();
|
|
405
|
+
const incomingNodes = reverseGraph[nodeId] || [];
|
|
406
|
+
const inputs: Record<string, unknown> = {};
|
|
407
|
+
|
|
408
|
+
incomingNodes.forEach((sourceNodeId: any) => {
|
|
409
|
+
const result = nodeResults.get(sourceNodeId);
|
|
410
|
+
if (result !== undefined) {
|
|
411
|
+
inputs[sourceNodeId] = result;
|
|
412
|
+
}
|
|
413
|
+
});
|
|
414
|
+
|
|
415
|
+
return inputs;
|
|
416
|
+
};
|
|
417
|
+
|
|
418
|
+
// Map flow inputs to input nodes
|
|
419
|
+
const mapFlowInputsToNodes = (
|
|
420
|
+
flowInputs: Record<string, z.infer<TFlowInputSchema>>,
|
|
421
|
+
) => {
|
|
422
|
+
const inputNodes = nodes.filter((node: any) => node.type === "input");
|
|
423
|
+
const mappedInputs: Record<string, z.infer<TFlowInputSchema>> = {};
|
|
424
|
+
|
|
425
|
+
inputNodes.forEach((node: any) => {
|
|
426
|
+
if (
|
|
427
|
+
flowInputs &&
|
|
428
|
+
typeof flowInputs === "object" &&
|
|
429
|
+
node.id in flowInputs
|
|
430
|
+
) {
|
|
431
|
+
mappedInputs[node.id] = inputSchema.parse(flowInputs[node.id]);
|
|
432
|
+
}
|
|
433
|
+
});
|
|
434
|
+
|
|
435
|
+
return mappedInputs;
|
|
436
|
+
};
|
|
437
|
+
|
|
438
|
+
// Collect outputs from output nodes
|
|
439
|
+
const collectFlowOutputs = (
|
|
440
|
+
nodeResults: Map<string, unknown>,
|
|
441
|
+
): Record<string, z.infer<TFlowInputSchema>> => {
|
|
442
|
+
const outputNodes = nodes.filter((node: any) => node.type === "output");
|
|
443
|
+
const flowOutputs: Record<string, unknown> = {};
|
|
444
|
+
|
|
445
|
+
outputNodes.forEach((node: any) => {
|
|
446
|
+
const result = nodeResults.get(node.id);
|
|
447
|
+
if (result !== undefined) {
|
|
448
|
+
flowOutputs[node.id] = result;
|
|
449
|
+
}
|
|
450
|
+
});
|
|
451
|
+
|
|
452
|
+
return flowOutputs as Record<string, z.infer<TFlowInputSchema>>;
|
|
453
|
+
};
|
|
454
|
+
|
|
455
|
+
// Execute a single node using Effect
|
|
456
|
+
const executeNode = (
|
|
457
|
+
nodeId: string,
|
|
458
|
+
storageId: string,
|
|
459
|
+
nodeInputs: Record<string, z.infer<TFlowInputSchema>>,
|
|
460
|
+
nodeResults: Map<string, unknown>,
|
|
461
|
+
nodeMap: Map<string, FlowNode<any, any, UploadistaError>>,
|
|
462
|
+
jobId: string,
|
|
463
|
+
clientId: string | null,
|
|
464
|
+
): Effect.Effect<
|
|
465
|
+
{ nodeId: string; result: unknown; success: boolean; waiting: boolean },
|
|
466
|
+
UploadistaError
|
|
467
|
+
> => {
|
|
468
|
+
return Effect.gen(function* () {
|
|
469
|
+
const node = nodeMap.get(nodeId);
|
|
470
|
+
if (!node) {
|
|
471
|
+
return yield* UploadistaError.fromCode(
|
|
472
|
+
"FLOW_NODE_NOT_FOUND",
|
|
473
|
+
).toEffect();
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
// Emit NodeStart event if provided
|
|
477
|
+
if (onEvent) {
|
|
478
|
+
yield* onEvent({
|
|
479
|
+
jobId,
|
|
480
|
+
flowId,
|
|
481
|
+
nodeId,
|
|
482
|
+
eventType: EventType.NodeStart,
|
|
483
|
+
nodeName: node.name,
|
|
484
|
+
nodeType: node.type,
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Get retry configuration
|
|
489
|
+
const maxRetries = node.retry?.maxRetries ?? 0;
|
|
490
|
+
const baseDelay = node.retry?.retryDelay ?? 1000;
|
|
491
|
+
const useExponentialBackoff = node.retry?.exponentialBackoff ?? true;
|
|
492
|
+
|
|
493
|
+
let retryCount = 0;
|
|
494
|
+
let lastError: UploadistaError | null = null;
|
|
495
|
+
|
|
496
|
+
// Retry loop
|
|
497
|
+
while (retryCount <= maxRetries) {
|
|
498
|
+
try {
|
|
499
|
+
// Prepare input data for the node
|
|
500
|
+
let nodeInput: unknown;
|
|
501
|
+
let nodeInputsForExecution: Record<string, unknown> = {};
|
|
502
|
+
|
|
503
|
+
if (node.type === "input") {
|
|
504
|
+
// For input nodes, use the mapped flow input
|
|
505
|
+
nodeInput = nodeInputs[nodeId];
|
|
506
|
+
if (nodeInput === undefined) {
|
|
507
|
+
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
508
|
+
cause: new Error(`Input node ${nodeId} has no input data`),
|
|
509
|
+
}).toEffect();
|
|
510
|
+
}
|
|
511
|
+
} else {
|
|
512
|
+
// Get all inputs for the node
|
|
513
|
+
nodeInputsForExecution = getNodeInputs(nodeId, nodeResults);
|
|
514
|
+
|
|
515
|
+
if (Object.keys(nodeInputsForExecution).length === 0) {
|
|
516
|
+
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
517
|
+
cause: new Error(`Node ${nodeId} has no input data`),
|
|
518
|
+
}).toEffect();
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
// For single input nodes, use the first input
|
|
522
|
+
if (!node.multiInput) {
|
|
523
|
+
const firstInputKey = Object.keys(nodeInputsForExecution)[0];
|
|
524
|
+
if (!firstInputKey) {
|
|
525
|
+
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
526
|
+
cause: new Error(`Node ${nodeId} has no input data`),
|
|
527
|
+
}).toEffect();
|
|
528
|
+
}
|
|
529
|
+
nodeInput = nodeInputsForExecution[firstInputKey];
|
|
530
|
+
} else {
|
|
531
|
+
// For multi-input nodes, pass all inputs
|
|
532
|
+
nodeInput = nodeInputsForExecution;
|
|
533
|
+
}
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
// Check condition for conditional nodes
|
|
537
|
+
if (node.type === "conditional") {
|
|
538
|
+
const conditionResult = yield* evaluateCondition(node, nodeInput);
|
|
539
|
+
if (!conditionResult) {
|
|
540
|
+
// Skip this node - return success but no result
|
|
541
|
+
if (onEvent) {
|
|
542
|
+
yield* onEvent({
|
|
543
|
+
jobId,
|
|
544
|
+
flowId,
|
|
545
|
+
nodeId,
|
|
546
|
+
eventType: EventType.NodeEnd,
|
|
547
|
+
nodeName: node.name,
|
|
548
|
+
});
|
|
549
|
+
}
|
|
550
|
+
return {
|
|
551
|
+
nodeId,
|
|
552
|
+
result: nodeInput,
|
|
553
|
+
success: true,
|
|
554
|
+
waiting: false,
|
|
555
|
+
};
|
|
556
|
+
}
|
|
557
|
+
}
|
|
558
|
+
|
|
559
|
+
// Execute the node
|
|
560
|
+
const executionResult = yield* node.run({
|
|
561
|
+
data: nodeInput,
|
|
562
|
+
inputs: nodeInputsForExecution,
|
|
563
|
+
jobId,
|
|
564
|
+
flowId,
|
|
565
|
+
storageId,
|
|
566
|
+
clientId,
|
|
567
|
+
});
|
|
568
|
+
|
|
569
|
+
// Handle execution result
|
|
570
|
+
if (executionResult.type === "waiting") {
|
|
571
|
+
// Node is waiting for more data - pause execution
|
|
572
|
+
const result = executionResult.partialData;
|
|
573
|
+
|
|
574
|
+
// Emit NodePause event with partial data result
|
|
575
|
+
if (onEvent) {
|
|
576
|
+
yield* onEvent({
|
|
577
|
+
jobId,
|
|
578
|
+
flowId,
|
|
579
|
+
nodeId,
|
|
580
|
+
eventType: EventType.NodePause,
|
|
581
|
+
nodeName: node.name,
|
|
582
|
+
partialData: result,
|
|
583
|
+
});
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
return {
|
|
587
|
+
nodeId,
|
|
588
|
+
result,
|
|
589
|
+
success: true,
|
|
590
|
+
waiting: true,
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// Node completed successfully
|
|
595
|
+
const result = executionResult.data;
|
|
596
|
+
|
|
597
|
+
// Emit NodeEnd event with result
|
|
598
|
+
if (onEvent) {
|
|
599
|
+
yield* onEvent({
|
|
600
|
+
jobId,
|
|
601
|
+
flowId,
|
|
602
|
+
nodeId,
|
|
603
|
+
eventType: EventType.NodeEnd,
|
|
604
|
+
nodeName: node.name,
|
|
605
|
+
result,
|
|
606
|
+
});
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
return { nodeId, result, success: true, waiting: false };
|
|
610
|
+
} catch (error) {
|
|
611
|
+
// Store the error
|
|
612
|
+
lastError =
|
|
613
|
+
error instanceof UploadistaError
|
|
614
|
+
? error
|
|
615
|
+
: UploadistaError.fromCode("FLOW_NODE_ERROR", { cause: error });
|
|
616
|
+
|
|
617
|
+
// Check if we should retry
|
|
618
|
+
if (retryCount < maxRetries) {
|
|
619
|
+
retryCount++;
|
|
620
|
+
|
|
621
|
+
// Calculate delay with exponential backoff if enabled
|
|
622
|
+
const delay = useExponentialBackoff
|
|
623
|
+
? baseDelay * 2 ** (retryCount - 1)
|
|
624
|
+
: baseDelay;
|
|
625
|
+
|
|
626
|
+
// Log retry attempt
|
|
627
|
+
yield* Effect.logWarning(
|
|
628
|
+
`Node ${nodeId} (${node.name}) failed, retrying (${retryCount}/${maxRetries}) after ${delay}ms`,
|
|
629
|
+
);
|
|
630
|
+
|
|
631
|
+
// Wait before retrying
|
|
632
|
+
yield* Effect.sleep(delay);
|
|
633
|
+
|
|
634
|
+
// Continue to next iteration of retry loop
|
|
635
|
+
continue;
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
// No more retries - emit final error event
|
|
639
|
+
if (onEvent) {
|
|
640
|
+
yield* onEvent({
|
|
641
|
+
jobId,
|
|
642
|
+
flowId,
|
|
643
|
+
nodeId,
|
|
644
|
+
eventType: EventType.NodeError,
|
|
645
|
+
nodeName: node.name,
|
|
646
|
+
error: lastError.body,
|
|
647
|
+
retryCount,
|
|
648
|
+
});
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
return yield* lastError.toEffect();
|
|
652
|
+
}
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
// If we get here, all retries failed
|
|
656
|
+
if (lastError) {
|
|
657
|
+
return yield* lastError.toEffect();
|
|
658
|
+
}
|
|
659
|
+
|
|
660
|
+
// Should never reach here
|
|
661
|
+
return yield* UploadistaError.fromCode("FLOW_NODE_ERROR", {
|
|
662
|
+
cause: new Error("Unexpected error in retry loop"),
|
|
663
|
+
}).toEffect();
|
|
664
|
+
});
|
|
665
|
+
};
|
|
666
|
+
|
|
667
|
+
// Internal execution function that can start fresh or resume
|
|
668
|
+
const executeFlow = ({
|
|
669
|
+
inputs,
|
|
670
|
+
storageId,
|
|
671
|
+
jobId,
|
|
672
|
+
resumeFrom,
|
|
673
|
+
clientId,
|
|
674
|
+
}: {
|
|
675
|
+
inputs?: Record<string, z.infer<TFlowInputSchema>>;
|
|
676
|
+
storageId: string;
|
|
677
|
+
jobId: string;
|
|
678
|
+
resumeFrom?: {
|
|
679
|
+
executionOrder: string[];
|
|
680
|
+
nodeResults: Map<string, unknown>;
|
|
681
|
+
currentIndex: number;
|
|
682
|
+
};
|
|
683
|
+
clientId: string | null;
|
|
684
|
+
}): Effect.Effect<
|
|
685
|
+
| {
|
|
686
|
+
type: "completed";
|
|
687
|
+
result: Record<string, z.infer<TFlowOutputSchema>>;
|
|
688
|
+
}
|
|
689
|
+
| {
|
|
690
|
+
type: "paused";
|
|
691
|
+
nodeId: string;
|
|
692
|
+
executionState: {
|
|
693
|
+
executionOrder: string[];
|
|
694
|
+
currentIndex: number;
|
|
695
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
696
|
+
};
|
|
697
|
+
},
|
|
698
|
+
UploadistaError
|
|
699
|
+
> => {
|
|
700
|
+
return Effect.gen(function* () {
|
|
701
|
+
// Emit FlowStart event only if starting fresh
|
|
702
|
+
if (!resumeFrom && onEvent) {
|
|
703
|
+
yield* onEvent({
|
|
704
|
+
jobId,
|
|
705
|
+
eventType: EventType.FlowStart,
|
|
706
|
+
flowId,
|
|
707
|
+
});
|
|
708
|
+
}
|
|
709
|
+
|
|
710
|
+
// Map flow inputs to input nodes
|
|
711
|
+
const nodeInputs = mapFlowInputsToNodes(inputs || {});
|
|
712
|
+
|
|
713
|
+
// Get execution order and results - either fresh or from resume state
|
|
714
|
+
let executionOrder: string[];
|
|
715
|
+
let nodeResults: Map<string, unknown>;
|
|
716
|
+
let startIndex: number;
|
|
717
|
+
|
|
718
|
+
if (resumeFrom) {
|
|
719
|
+
// Resume from saved state
|
|
720
|
+
executionOrder = resumeFrom.executionOrder;
|
|
721
|
+
nodeResults = resumeFrom.nodeResults;
|
|
722
|
+
startIndex = resumeFrom.currentIndex;
|
|
723
|
+
} else {
|
|
724
|
+
// Start fresh
|
|
725
|
+
executionOrder = topologicalSort();
|
|
726
|
+
nodeResults = new Map<string, unknown>();
|
|
727
|
+
startIndex = 0;
|
|
728
|
+
|
|
729
|
+
// Check for cycles
|
|
730
|
+
if (executionOrder.length !== nodes.length) {
|
|
731
|
+
return yield* UploadistaError.fromCode(
|
|
732
|
+
"FLOW_CYCLE_ERROR",
|
|
733
|
+
).toEffect();
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
// Create node map for quick lookup
|
|
738
|
+
const nodeMap = new Map(nodes.map((node) => [node.id, node]));
|
|
739
|
+
|
|
740
|
+
// Determine execution strategy
|
|
741
|
+
const useParallelExecution =
|
|
742
|
+
config.parallelExecution?.enabled ?? false;
|
|
743
|
+
|
|
744
|
+
if (useParallelExecution) {
|
|
745
|
+
// Parallel execution using execution levels
|
|
746
|
+
yield* Effect.logDebug(
|
|
747
|
+
`Flow ${flowId}: Executing in parallel mode (maxConcurrency: ${config.parallelExecution?.maxConcurrency ?? 4})`,
|
|
748
|
+
);
|
|
749
|
+
|
|
750
|
+
const scheduler = new ParallelScheduler({
|
|
751
|
+
maxConcurrency: config.parallelExecution?.maxConcurrency ?? 4,
|
|
752
|
+
});
|
|
753
|
+
|
|
754
|
+
// Get execution levels
|
|
755
|
+
const executionLevels = scheduler.groupNodesByExecutionLevel(
|
|
756
|
+
nodes,
|
|
757
|
+
edges,
|
|
758
|
+
);
|
|
759
|
+
|
|
760
|
+
yield* Effect.logDebug(
|
|
761
|
+
`Flow ${flowId}: Grouped nodes into ${executionLevels.length} execution levels`,
|
|
762
|
+
);
|
|
763
|
+
|
|
764
|
+
// Build reverse graph for dependency checking
|
|
765
|
+
const reverseGraph: Record<string, string[]> = {};
|
|
766
|
+
nodes.forEach((node) => {
|
|
767
|
+
reverseGraph[node.id] = [];
|
|
768
|
+
});
|
|
769
|
+
edges.forEach((edge) => {
|
|
770
|
+
reverseGraph[edge.target]?.push(edge.source);
|
|
771
|
+
});
|
|
772
|
+
|
|
773
|
+
// Execute each level sequentially, but nodes within level in parallel
|
|
774
|
+
for (const level of executionLevels) {
|
|
775
|
+
yield* Effect.logDebug(
|
|
776
|
+
`Flow ${flowId}: Executing level ${level.level} with nodes: ${level.nodes.join(", ")}`,
|
|
777
|
+
);
|
|
778
|
+
|
|
779
|
+
// Create executor functions for all nodes in this level
|
|
780
|
+
const nodeExecutors = level.nodes.map((nodeId) => () =>
|
|
781
|
+
Effect.gen(function* () {
|
|
782
|
+
// Emit NodeResume event if we're resuming from a paused state at this node
|
|
783
|
+
if (resumeFrom && nodeId === resumeFrom.executionOrder[startIndex] && onEvent) {
|
|
784
|
+
const node = nodeMap.get(nodeId);
|
|
785
|
+
if (node) {
|
|
786
|
+
yield* onEvent({
|
|
787
|
+
jobId,
|
|
788
|
+
flowId,
|
|
789
|
+
nodeId,
|
|
790
|
+
eventType: EventType.NodeResume,
|
|
791
|
+
nodeName: node.name,
|
|
792
|
+
nodeType: node.type,
|
|
793
|
+
});
|
|
794
|
+
}
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
const nodeResult = yield* executeNode(
|
|
798
|
+
nodeId,
|
|
799
|
+
storageId,
|
|
800
|
+
nodeInputs,
|
|
801
|
+
nodeResults,
|
|
802
|
+
nodeMap,
|
|
803
|
+
jobId,
|
|
804
|
+
clientId,
|
|
805
|
+
);
|
|
806
|
+
|
|
807
|
+
return { nodeId, nodeResult };
|
|
808
|
+
}),
|
|
809
|
+
);
|
|
810
|
+
|
|
811
|
+
// Execute all nodes in this level in parallel
|
|
812
|
+
const levelResults = yield* scheduler.executeNodesInParallel(
|
|
813
|
+
nodeExecutors,
|
|
814
|
+
);
|
|
815
|
+
|
|
816
|
+
// Process results and check for waiting nodes
|
|
817
|
+
for (const { nodeId, nodeResult } of levelResults) {
|
|
818
|
+
if (nodeResult.waiting) {
|
|
819
|
+
// Node is waiting - pause execution and return state
|
|
820
|
+
if (nodeResult.result !== undefined) {
|
|
821
|
+
nodeResults.set(nodeId, nodeResult.result);
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
return {
|
|
825
|
+
type: "paused" as const,
|
|
826
|
+
nodeId,
|
|
827
|
+
executionState: {
|
|
828
|
+
executionOrder,
|
|
829
|
+
currentIndex: executionOrder.indexOf(nodeId),
|
|
830
|
+
inputs: nodeInputs,
|
|
831
|
+
},
|
|
832
|
+
};
|
|
833
|
+
}
|
|
834
|
+
|
|
835
|
+
if (nodeResult.success) {
|
|
836
|
+
nodeResults.set(nodeId, nodeResult.result);
|
|
837
|
+
}
|
|
838
|
+
}
|
|
839
|
+
}
|
|
840
|
+
} else {
|
|
841
|
+
// Sequential execution (original behavior)
|
|
842
|
+
yield* Effect.logDebug(`Flow ${flowId}: Executing in sequential mode`);
|
|
843
|
+
|
|
844
|
+
for (let i = startIndex; i < executionOrder.length; i++) {
|
|
845
|
+
const nodeId = executionOrder[i];
|
|
846
|
+
if (!nodeId) {
|
|
847
|
+
return yield* UploadistaError.fromCode(
|
|
848
|
+
"FLOW_NODE_NOT_FOUND",
|
|
849
|
+
).toEffect();
|
|
850
|
+
}
|
|
851
|
+
|
|
852
|
+
// Emit NodeResume event if we're resuming from a paused state at this node
|
|
853
|
+
if (resumeFrom && i === startIndex && onEvent) {
|
|
854
|
+
const node = nodeMap.get(nodeId);
|
|
855
|
+
if (node) {
|
|
856
|
+
yield* onEvent({
|
|
857
|
+
jobId,
|
|
858
|
+
flowId,
|
|
859
|
+
nodeId,
|
|
860
|
+
eventType: EventType.NodeResume,
|
|
861
|
+
nodeName: node.name,
|
|
862
|
+
nodeType: node.type,
|
|
863
|
+
});
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
|
|
867
|
+
const nodeResult = yield* executeNode(
|
|
868
|
+
nodeId,
|
|
869
|
+
storageId,
|
|
870
|
+
nodeInputs,
|
|
871
|
+
nodeResults,
|
|
872
|
+
nodeMap,
|
|
873
|
+
jobId,
|
|
874
|
+
clientId,
|
|
875
|
+
);
|
|
876
|
+
|
|
877
|
+
if (nodeResult.waiting) {
|
|
878
|
+
// Node is waiting - pause execution and return state
|
|
879
|
+
if (nodeResult.result !== undefined) {
|
|
880
|
+
nodeResults.set(nodeResult.nodeId, nodeResult.result);
|
|
881
|
+
}
|
|
882
|
+
|
|
883
|
+
return {
|
|
884
|
+
type: "paused" as const,
|
|
885
|
+
nodeId: nodeResult.nodeId,
|
|
886
|
+
executionState: {
|
|
887
|
+
executionOrder,
|
|
888
|
+
currentIndex: i, // Stay at current index to re-execute this node on resume
|
|
889
|
+
inputs: nodeInputs,
|
|
890
|
+
},
|
|
891
|
+
};
|
|
892
|
+
}
|
|
893
|
+
|
|
894
|
+
if (nodeResult.success) {
|
|
895
|
+
nodeResults.set(nodeResult.nodeId, nodeResult.result);
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
// All nodes completed - collect outputs
|
|
901
|
+
const finalResult = collectFlowOutputs(nodeResults);
|
|
902
|
+
|
|
903
|
+
const finalResultSchema = z.record(z.string(), outputSchema);
|
|
904
|
+
|
|
905
|
+
// Validate the final result against the output schema
|
|
906
|
+
const parseResult = finalResultSchema.safeParse(finalResult);
|
|
907
|
+
if (!parseResult.success) {
|
|
908
|
+
const validationError = `Flow output validation failed: ${parseResult.error.message}. Expected outputs: ${JSON.stringify(Object.keys(collectFlowOutputs(nodeResults)))}. Output nodes: ${nodes
|
|
909
|
+
.filter((n: any) => n.type === "output")
|
|
910
|
+
.map((n: any) => n.id)
|
|
911
|
+
.join(", ")}`;
|
|
912
|
+
|
|
913
|
+
// Emit FlowError event for validation failure
|
|
914
|
+
if (onEvent) {
|
|
915
|
+
yield* onEvent({
|
|
916
|
+
jobId,
|
|
917
|
+
eventType: EventType.FlowError,
|
|
918
|
+
flowId,
|
|
919
|
+
error: validationError,
|
|
920
|
+
});
|
|
921
|
+
}
|
|
922
|
+
return yield* UploadistaError.fromCode(
|
|
923
|
+
"FLOW_OUTPUT_VALIDATION_ERROR",
|
|
924
|
+
{
|
|
925
|
+
body: validationError,
|
|
926
|
+
cause: parseResult.error,
|
|
927
|
+
},
|
|
928
|
+
).toEffect();
|
|
929
|
+
}
|
|
930
|
+
const validatedResult = parseResult.data;
|
|
931
|
+
|
|
932
|
+
// Emit FlowEnd event
|
|
933
|
+
if (onEvent) {
|
|
934
|
+
yield* onEvent({
|
|
935
|
+
jobId,
|
|
936
|
+
eventType: EventType.FlowEnd,
|
|
937
|
+
flowId,
|
|
938
|
+
result: validatedResult,
|
|
939
|
+
});
|
|
940
|
+
}
|
|
941
|
+
|
|
942
|
+
return { type: "completed" as const, result: validatedResult };
|
|
943
|
+
});
|
|
944
|
+
};
|
|
945
|
+
|
|
946
|
+
const run = ({
|
|
947
|
+
inputs,
|
|
948
|
+
storageId,
|
|
949
|
+
jobId,
|
|
950
|
+
clientId,
|
|
951
|
+
}: {
|
|
952
|
+
inputs?: Record<string, z.infer<TFlowInputSchema>>;
|
|
953
|
+
storageId: string;
|
|
954
|
+
jobId: string;
|
|
955
|
+
clientId: string | null;
|
|
956
|
+
}): Effect.Effect<
|
|
957
|
+
| {
|
|
958
|
+
type: "completed";
|
|
959
|
+
result: Record<string, z.infer<TFlowOutputSchema>>;
|
|
960
|
+
}
|
|
961
|
+
| {
|
|
962
|
+
type: "paused";
|
|
963
|
+
nodeId: string;
|
|
964
|
+
executionState: {
|
|
965
|
+
executionOrder: string[];
|
|
966
|
+
currentIndex: number;
|
|
967
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
968
|
+
};
|
|
969
|
+
},
|
|
970
|
+
UploadistaError,
|
|
971
|
+
TRequirements
|
|
972
|
+
> => {
|
|
973
|
+
return executeFlow({ inputs, storageId, jobId, clientId });
|
|
974
|
+
};
|
|
975
|
+
|
|
976
|
+
const resume = ({
|
|
977
|
+
jobId,
|
|
978
|
+
storageId,
|
|
979
|
+
nodeResults,
|
|
980
|
+
executionState,
|
|
981
|
+
clientId,
|
|
982
|
+
}: {
|
|
983
|
+
jobId: string;
|
|
984
|
+
storageId: string;
|
|
985
|
+
nodeResults: Record<string, unknown>;
|
|
986
|
+
executionState: {
|
|
987
|
+
executionOrder: string[];
|
|
988
|
+
currentIndex: number;
|
|
989
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
990
|
+
};
|
|
991
|
+
clientId: string | null;
|
|
992
|
+
}): Effect.Effect<
|
|
993
|
+
| {
|
|
994
|
+
type: "completed";
|
|
995
|
+
result: Record<string, z.infer<TFlowOutputSchema>>;
|
|
996
|
+
}
|
|
997
|
+
| {
|
|
998
|
+
type: "paused";
|
|
999
|
+
nodeId: string;
|
|
1000
|
+
executionState: {
|
|
1001
|
+
executionOrder: string[];
|
|
1002
|
+
currentIndex: number;
|
|
1003
|
+
inputs: Record<string, z.infer<TFlowInputSchema>>;
|
|
1004
|
+
};
|
|
1005
|
+
},
|
|
1006
|
+
UploadistaError
|
|
1007
|
+
> => {
|
|
1008
|
+
return executeFlow({
|
|
1009
|
+
inputs: executionState.inputs,
|
|
1010
|
+
storageId,
|
|
1011
|
+
jobId,
|
|
1012
|
+
resumeFrom: {
|
|
1013
|
+
executionOrder: executionState.executionOrder,
|
|
1014
|
+
nodeResults: new Map(Object.entries(nodeResults)),
|
|
1015
|
+
currentIndex: executionState.currentIndex,
|
|
1016
|
+
},
|
|
1017
|
+
clientId,
|
|
1018
|
+
});
|
|
1019
|
+
};
|
|
1020
|
+
|
|
1021
|
+
const validateTypes = () => {
|
|
1022
|
+
// Convert FlowNode to FlowNode for validation
|
|
1023
|
+
const compatibleNodes = nodes as FlowNode<any, any>[];
|
|
1024
|
+
return typeValidator.validateFlow(compatibleNodes, edges);
|
|
1025
|
+
};
|
|
1026
|
+
|
|
1027
|
+
const validateInputs = (inputs: unknown) => {
|
|
1028
|
+
return typeValidator.validateData(inputs, inputSchema);
|
|
1029
|
+
};
|
|
1030
|
+
|
|
1031
|
+
const validateOutputs = (outputs: unknown) => {
|
|
1032
|
+
return typeValidator.validateData(outputs, outputSchema);
|
|
1033
|
+
};
|
|
1034
|
+
|
|
1035
|
+
return {
|
|
1036
|
+
id: flowId,
|
|
1037
|
+
name,
|
|
1038
|
+
nodes,
|
|
1039
|
+
edges,
|
|
1040
|
+
inputSchema,
|
|
1041
|
+
outputSchema,
|
|
1042
|
+
onEvent,
|
|
1043
|
+
run,
|
|
1044
|
+
resume,
|
|
1045
|
+
validateTypes,
|
|
1046
|
+
validateInputs,
|
|
1047
|
+
validateOutputs,
|
|
1048
|
+
};
|
|
1049
|
+
});
|
|
1050
|
+
}
|