@uploadista/core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +231 -0
- package/.turbo/turbo-format.log +5 -0
- package/LICENSE +21 -0
- package/README.md +1120 -0
- package/dist/chunk-CUT6urMc.cjs +1 -0
- package/dist/debounce-C2SeqcxD.js +2 -0
- package/dist/debounce-C2SeqcxD.js.map +1 -0
- package/dist/debounce-LZK7yS7Z.cjs +1 -0
- package/dist/errors/index.cjs +1 -0
- package/dist/errors/index.d.cts +3 -0
- package/dist/errors/index.d.ts +3 -0
- package/dist/errors/index.d.ts.map +1 -0
- package/dist/errors/index.js +2 -0
- package/dist/errors/uploadista-error.d.ts +209 -0
- package/dist/errors/uploadista-error.d.ts.map +1 -0
- package/dist/errors/uploadista-error.js +322 -0
- package/dist/errors-8i_aMxOE.js +1 -0
- package/dist/errors-CRm1FHHT.cjs +0 -0
- package/dist/flow/edge.d.ts +47 -0
- package/dist/flow/edge.d.ts.map +1 -0
- package/dist/flow/edge.js +40 -0
- package/dist/flow/event.d.ts +206 -0
- package/dist/flow/event.d.ts.map +1 -0
- package/dist/flow/event.js +53 -0
- package/dist/flow/flow-server.d.ts +223 -0
- package/dist/flow/flow-server.d.ts.map +1 -0
- package/dist/flow/flow-server.js +614 -0
- package/dist/flow/flow.d.ts +238 -0
- package/dist/flow/flow.d.ts.map +1 -0
- package/dist/flow/flow.js +629 -0
- package/dist/flow/index.cjs +1 -0
- package/dist/flow/index.d.cts +6 -0
- package/dist/flow/index.d.ts +24 -0
- package/dist/flow/index.d.ts.map +1 -0
- package/dist/flow/index.js +24 -0
- package/dist/flow/node.d.ts +136 -0
- package/dist/flow/node.d.ts.map +1 -0
- package/dist/flow/node.js +153 -0
- package/dist/flow/nodes/index.d.ts +8 -0
- package/dist/flow/nodes/index.d.ts.map +1 -0
- package/dist/flow/nodes/index.js +7 -0
- package/dist/flow/nodes/input-node.d.ts +78 -0
- package/dist/flow/nodes/input-node.d.ts.map +1 -0
- package/dist/flow/nodes/input-node.js +233 -0
- package/dist/flow/nodes/storage-node.d.ts +67 -0
- package/dist/flow/nodes/storage-node.d.ts.map +1 -0
- package/dist/flow/nodes/storage-node.js +94 -0
- package/dist/flow/nodes/streaming-input-node.d.ts +69 -0
- package/dist/flow/nodes/streaming-input-node.d.ts.map +1 -0
- package/dist/flow/nodes/streaming-input-node.js +156 -0
- package/dist/flow/nodes/transform-node.d.ts +85 -0
- package/dist/flow/nodes/transform-node.d.ts.map +1 -0
- package/dist/flow/nodes/transform-node.js +107 -0
- package/dist/flow/parallel-scheduler.d.ts +175 -0
- package/dist/flow/parallel-scheduler.d.ts.map +1 -0
- package/dist/flow/parallel-scheduler.js +193 -0
- package/dist/flow/plugins/credential-provider.d.ts +47 -0
- package/dist/flow/plugins/credential-provider.d.ts.map +1 -0
- package/dist/flow/plugins/credential-provider.js +24 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts +61 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-ai-plugin.js +21 -0
- package/dist/flow/plugins/image-plugin.d.ts +52 -0
- package/dist/flow/plugins/image-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-plugin.js +22 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts +16 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/describe-image-node.js +9 -0
- package/dist/flow/plugins/types/index.d.ts +9 -0
- package/dist/flow/plugins/types/index.d.ts.map +1 -0
- package/dist/flow/plugins/types/index.js +8 -0
- package/dist/flow/plugins/types/optimize-node.d.ts +20 -0
- package/dist/flow/plugins/types/optimize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/optimize-node.js +11 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts +16 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/remove-background-node.js +9 -0
- package/dist/flow/plugins/types/resize-node.d.ts +21 -0
- package/dist/flow/plugins/types/resize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/resize-node.js +16 -0
- package/dist/flow/plugins/zip-plugin.d.ts +62 -0
- package/dist/flow/plugins/zip-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/zip-plugin.js +21 -0
- package/dist/flow/typed-flow.d.ts +90 -0
- package/dist/flow/typed-flow.d.ts.map +1 -0
- package/dist/flow/typed-flow.js +59 -0
- package/dist/flow/types/flow-file.d.ts +45 -0
- package/dist/flow/types/flow-file.d.ts.map +1 -0
- package/dist/flow/types/flow-file.js +27 -0
- package/dist/flow/types/flow-job.d.ts +118 -0
- package/dist/flow/types/flow-job.d.ts.map +1 -0
- package/dist/flow/types/flow-job.js +11 -0
- package/dist/flow/types/flow-types.d.ts +321 -0
- package/dist/flow/types/flow-types.d.ts.map +1 -0
- package/dist/flow/types/flow-types.js +52 -0
- package/dist/flow/types/index.d.ts +4 -0
- package/dist/flow/types/index.d.ts.map +1 -0
- package/dist/flow/types/index.js +3 -0
- package/dist/flow/types/run-args.d.ts +38 -0
- package/dist/flow/types/run-args.d.ts.map +1 -0
- package/dist/flow/types/run-args.js +30 -0
- package/dist/flow/types/type-validator.d.ts +26 -0
- package/dist/flow/types/type-validator.d.ts.map +1 -0
- package/dist/flow/types/type-validator.js +134 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts +11 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts.map +1 -0
- package/dist/flow/utils/resolve-upload-metadata.js +28 -0
- package/dist/flow-2zXnEiWL.cjs +1 -0
- package/dist/flow-CRaKy7Vj.js +2 -0
- package/dist/flow-CRaKy7Vj.js.map +1 -0
- package/dist/generate-id-Dm-Vboxq.d.ts +34 -0
- package/dist/generate-id-Dm-Vboxq.d.ts.map +1 -0
- package/dist/generate-id-LjJRLD6N.d.cts +34 -0
- package/dist/generate-id-LjJRLD6N.d.cts.map +1 -0
- package/dist/generate-id-xHp_Z7Cl.cjs +1 -0
- package/dist/generate-id-yohS1ZDk.js +2 -0
- package/dist/generate-id-yohS1ZDk.js.map +1 -0
- package/dist/index-BO8GZlbD.d.cts +1040 -0
- package/dist/index-BO8GZlbD.d.cts.map +1 -0
- package/dist/index-BoGG5KAY.d.ts +1 -0
- package/dist/index-BtBZHVmz.d.cts +1 -0
- package/dist/index-D-CoVpkZ.d.ts +1004 -0
- package/dist/index-D-CoVpkZ.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.d.cts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/logger/logger.cjs +1 -0
- package/dist/logger/logger.d.cts +8 -0
- package/dist/logger/logger.d.cts.map +1 -0
- package/dist/logger/logger.d.ts +5 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.js +10 -0
- package/dist/logger/logger.js.map +1 -0
- package/dist/semaphore-0ZwjVpyF.js +2 -0
- package/dist/semaphore-0ZwjVpyF.js.map +1 -0
- package/dist/semaphore-BHprIjFI.d.cts +37 -0
- package/dist/semaphore-BHprIjFI.d.cts.map +1 -0
- package/dist/semaphore-DThupBkc.d.ts +37 -0
- package/dist/semaphore-DThupBkc.d.ts.map +1 -0
- package/dist/semaphore-DVrONiAV.cjs +1 -0
- package/dist/stream-limiter-CoWKv39w.js +2 -0
- package/dist/stream-limiter-CoWKv39w.js.map +1 -0
- package/dist/stream-limiter-JgOwmkMa.cjs +1 -0
- package/dist/streams/multi-stream.cjs +1 -0
- package/dist/streams/multi-stream.d.cts +91 -0
- package/dist/streams/multi-stream.d.cts.map +1 -0
- package/dist/streams/multi-stream.d.ts +86 -0
- package/dist/streams/multi-stream.d.ts.map +1 -0
- package/dist/streams/multi-stream.js +149 -0
- package/dist/streams/multi-stream.js.map +1 -0
- package/dist/streams/stream-limiter.cjs +1 -0
- package/dist/streams/stream-limiter.d.cts +36 -0
- package/dist/streams/stream-limiter.d.cts.map +1 -0
- package/dist/streams/stream-limiter.d.ts +27 -0
- package/dist/streams/stream-limiter.d.ts.map +1 -0
- package/dist/streams/stream-limiter.js +49 -0
- package/dist/streams/stream-splitter.cjs +1 -0
- package/dist/streams/stream-splitter.d.cts +68 -0
- package/dist/streams/stream-splitter.d.cts.map +1 -0
- package/dist/streams/stream-splitter.d.ts +51 -0
- package/dist/streams/stream-splitter.d.ts.map +1 -0
- package/dist/streams/stream-splitter.js +175 -0
- package/dist/streams/stream-splitter.js.map +1 -0
- package/dist/types/data-store-registry.d.ts +13 -0
- package/dist/types/data-store-registry.d.ts.map +1 -0
- package/dist/types/data-store-registry.js +4 -0
- package/dist/types/data-store.d.ts +316 -0
- package/dist/types/data-store.d.ts.map +1 -0
- package/dist/types/data-store.js +157 -0
- package/dist/types/event-broadcaster.d.ts +28 -0
- package/dist/types/event-broadcaster.d.ts.map +1 -0
- package/dist/types/event-broadcaster.js +6 -0
- package/dist/types/event-emitter.d.ts +378 -0
- package/dist/types/event-emitter.d.ts.map +1 -0
- package/dist/types/event-emitter.js +223 -0
- package/dist/types/index.cjs +1 -0
- package/dist/types/index.d.cts +6 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +9 -0
- package/dist/types/input-file.d.ts +104 -0
- package/dist/types/input-file.d.ts.map +1 -0
- package/dist/types/input-file.js +27 -0
- package/dist/types/kv-store.d.ts +281 -0
- package/dist/types/kv-store.d.ts.map +1 -0
- package/dist/types/kv-store.js +234 -0
- package/dist/types/middleware.d.ts +17 -0
- package/dist/types/middleware.d.ts.map +1 -0
- package/dist/types/middleware.js +21 -0
- package/dist/types/upload-event.d.ts +105 -0
- package/dist/types/upload-event.d.ts.map +1 -0
- package/dist/types/upload-event.js +71 -0
- package/dist/types/upload-file.d.ts +136 -0
- package/dist/types/upload-file.d.ts.map +1 -0
- package/dist/types/upload-file.js +34 -0
- package/dist/types/websocket.d.ts +144 -0
- package/dist/types/websocket.d.ts.map +1 -0
- package/dist/types/websocket.js +40 -0
- package/dist/types-BT-cvi7T.cjs +1 -0
- package/dist/types-DhU2j-XF.js +2 -0
- package/dist/types-DhU2j-XF.js.map +1 -0
- package/dist/upload/convert-to-stream.d.ts +38 -0
- package/dist/upload/convert-to-stream.d.ts.map +1 -0
- package/dist/upload/convert-to-stream.js +43 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts +14 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts.map +1 -0
- package/dist/upload/convert-upload-to-flow-file.js +21 -0
- package/dist/upload/create-upload.d.ts +68 -0
- package/dist/upload/create-upload.d.ts.map +1 -0
- package/dist/upload/create-upload.js +157 -0
- package/dist/upload/index.cjs +1 -0
- package/dist/upload/index.d.cts +6 -0
- package/dist/upload/index.d.ts +4 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +3 -0
- package/dist/upload/mime.d.ts +24 -0
- package/dist/upload/mime.d.ts.map +1 -0
- package/dist/upload/mime.js +351 -0
- package/dist/upload/upload-chunk.d.ts +58 -0
- package/dist/upload/upload-chunk.d.ts.map +1 -0
- package/dist/upload/upload-chunk.js +277 -0
- package/dist/upload/upload-server.d.ts +221 -0
- package/dist/upload/upload-server.d.ts.map +1 -0
- package/dist/upload/upload-server.js +181 -0
- package/dist/upload/upload-strategy-negotiator.d.ts +148 -0
- package/dist/upload/upload-strategy-negotiator.d.ts.map +1 -0
- package/dist/upload/upload-strategy-negotiator.js +217 -0
- package/dist/upload/upload-url.d.ts +68 -0
- package/dist/upload/upload-url.d.ts.map +1 -0
- package/dist/upload/upload-url.js +142 -0
- package/dist/upload/write-to-store.d.ts +77 -0
- package/dist/upload/write-to-store.d.ts.map +1 -0
- package/dist/upload/write-to-store.js +147 -0
- package/dist/upload-DLuICjpP.cjs +1 -0
- package/dist/upload-DaXO34dE.js +2 -0
- package/dist/upload-DaXO34dE.js.map +1 -0
- package/dist/uploadista-error-BB-Wdiz9.cjs +22 -0
- package/dist/uploadista-error-BVsVxqvz.js +23 -0
- package/dist/uploadista-error-BVsVxqvz.js.map +1 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts +52 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts.map +1 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts +52 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts.map +1 -0
- package/dist/utils/checksum.d.ts +22 -0
- package/dist/utils/checksum.d.ts.map +1 -0
- package/dist/utils/checksum.js +49 -0
- package/dist/utils/debounce.cjs +1 -0
- package/dist/utils/debounce.d.cts +38 -0
- package/dist/utils/debounce.d.cts.map +1 -0
- package/dist/utils/debounce.d.ts +36 -0
- package/dist/utils/debounce.d.ts.map +1 -0
- package/dist/utils/debounce.js +73 -0
- package/dist/utils/generate-id.cjs +1 -0
- package/dist/utils/generate-id.d.cts +2 -0
- package/dist/utils/generate-id.d.ts +32 -0
- package/dist/utils/generate-id.d.ts.map +1 -0
- package/dist/utils/generate-id.js +23 -0
- package/dist/utils/md5.cjs +1 -0
- package/dist/utils/md5.d.cts +73 -0
- package/dist/utils/md5.d.cts.map +1 -0
- package/dist/utils/md5.d.ts +71 -0
- package/dist/utils/md5.d.ts.map +1 -0
- package/dist/utils/md5.js +417 -0
- package/dist/utils/md5.js.map +1 -0
- package/dist/utils/once.cjs +1 -0
- package/dist/utils/once.d.cts +25 -0
- package/dist/utils/once.d.cts.map +1 -0
- package/dist/utils/once.d.ts +21 -0
- package/dist/utils/once.d.ts.map +1 -0
- package/dist/utils/once.js +54 -0
- package/dist/utils/once.js.map +1 -0
- package/dist/utils/semaphore.cjs +1 -0
- package/dist/utils/semaphore.d.cts +3 -0
- package/dist/utils/semaphore.d.ts +78 -0
- package/dist/utils/semaphore.d.ts.map +1 -0
- package/dist/utils/semaphore.js +134 -0
- package/dist/utils/throttle.cjs +1 -0
- package/dist/utils/throttle.d.cts +24 -0
- package/dist/utils/throttle.d.cts.map +1 -0
- package/dist/utils/throttle.d.ts +18 -0
- package/dist/utils/throttle.d.ts.map +1 -0
- package/dist/utils/throttle.js +20 -0
- package/dist/utils/throttle.js.map +1 -0
- package/docs/PARALLEL_EXECUTION.md +206 -0
- package/docs/PARALLEL_EXECUTION_QUICKSTART.md +142 -0
- package/docs/PARALLEL_EXECUTION_REFACTOR.md +184 -0
- package/package.json +80 -0
- package/src/errors/__tests__/uploadista-error.test.ts +251 -0
- package/src/errors/index.ts +2 -0
- package/src/errors/uploadista-error.ts +394 -0
- package/src/flow/README.md +352 -0
- package/src/flow/edge.test.ts +146 -0
- package/src/flow/edge.ts +60 -0
- package/src/flow/event.ts +229 -0
- package/src/flow/flow-server.ts +1089 -0
- package/src/flow/flow.ts +1050 -0
- package/src/flow/index.ts +28 -0
- package/src/flow/node.ts +249 -0
- package/src/flow/nodes/index.ts +8 -0
- package/src/flow/nodes/input-node.ts +296 -0
- package/src/flow/nodes/storage-node.ts +128 -0
- package/src/flow/nodes/transform-node.ts +154 -0
- package/src/flow/parallel-scheduler.ts +259 -0
- package/src/flow/plugins/credential-provider.ts +48 -0
- package/src/flow/plugins/image-ai-plugin.ts +66 -0
- package/src/flow/plugins/image-plugin.ts +60 -0
- package/src/flow/plugins/types/describe-image-node.ts +16 -0
- package/src/flow/plugins/types/index.ts +9 -0
- package/src/flow/plugins/types/optimize-node.ts +18 -0
- package/src/flow/plugins/types/remove-background-node.ts +18 -0
- package/src/flow/plugins/types/resize-node.ts +26 -0
- package/src/flow/plugins/zip-plugin.ts +69 -0
- package/src/flow/typed-flow.ts +279 -0
- package/src/flow/types/flow-file.ts +51 -0
- package/src/flow/types/flow-job.ts +138 -0
- package/src/flow/types/flow-types.ts +353 -0
- package/src/flow/types/index.ts +6 -0
- package/src/flow/types/run-args.ts +40 -0
- package/src/flow/types/type-validator.ts +204 -0
- package/src/flow/utils/resolve-upload-metadata.ts +48 -0
- package/src/index.ts +5 -0
- package/src/logger/logger.ts +14 -0
- package/src/streams/stream-limiter.test.ts +150 -0
- package/src/streams/stream-limiter.ts +75 -0
- package/src/types/data-store.ts +427 -0
- package/src/types/event-broadcaster.ts +39 -0
- package/src/types/event-emitter.ts +349 -0
- package/src/types/index.ts +9 -0
- package/src/types/input-file.ts +107 -0
- package/src/types/kv-store.ts +375 -0
- package/src/types/middleware.ts +54 -0
- package/src/types/upload-event.ts +75 -0
- package/src/types/upload-file.ts +139 -0
- package/src/types/websocket.ts +65 -0
- package/src/upload/convert-to-stream.ts +48 -0
- package/src/upload/create-upload.ts +214 -0
- package/src/upload/index.ts +3 -0
- package/src/upload/mime.ts +436 -0
- package/src/upload/upload-chunk.ts +364 -0
- package/src/upload/upload-server.ts +390 -0
- package/src/upload/upload-strategy-negotiator.ts +316 -0
- package/src/upload/upload-url.ts +173 -0
- package/src/upload/write-to-store.ts +211 -0
- package/src/utils/checksum.ts +61 -0
- package/src/utils/debounce.test.ts +126 -0
- package/src/utils/debounce.ts +89 -0
- package/src/utils/generate-id.ts +35 -0
- package/src/utils/md5.ts +475 -0
- package/src/utils/once.test.ts +83 -0
- package/src/utils/once.ts +63 -0
- package/src/utils/throttle.test.ts +101 -0
- package/src/utils/throttle.ts +29 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/tsdown.config.ts +25 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=require(`./chunk-CUT6urMc.cjs`),t=require(`./uploadista-error-BB-Wdiz9.cjs`),n=require(`./types-BT-cvi7T.cjs`),r=require(`./upload-DLuICjpP.cjs`),i=require(`./semaphore-DVrONiAV.cjs`);let a=require(`effect`);a=e.__toESM(a);let o=require(`zod`);o=e.__toESM(o);function s({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let c=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e}({}),l=function(e){return e.input=`input`,e.process=`process`,e.output=`output`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function u({id:e,name:n,description:r,type:i,inputSchema:o,outputSchema:s,run:c,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1}){return a.Effect.succeed({id:e,name:n,description:r,type:i,inputSchema:o,outputSchema:s,pausable:f,run:({data:e,runId:n,flowId:r,storageId:i})=>a.Effect.gen(function*(){let l=yield*a.Effect.try({try:()=>o.parse(e),catch:e=>t.UploadistaError.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:e})}),u=yield*c({data:l,runId:n,storageId:i,flowId:r});return u.type===`waiting`?u:{type:`complete`,data:yield*a.Effect.try({try:()=>s.parse(u.data),catch:e=>t.UploadistaError.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{cause:e})})}}),condition:l,multiInput:u,multiOutput:d})}const d=e=>({id:e.id,name:e.name,description:e.description,type:e.type}),f=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var p=class{typeChecker;constructor(e=f){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};const m=e=>({id:e.id,name:e.name,nodes:e.nodes.map(d),edges:e.edges});function h({flowId:e,name:n,onEvent:r,edges:i,nodes:o,inputSchema:s,outputSchema:l,typeChecker:u}){let d=new p(u),f=()=>{let e={},t={},n={};return o.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),i.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},m=()=>{let{graph:e,inDegree:t}=f(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},h=(e,t)=>{if(!e.condition)return a.Effect.succeed(!0);let{field:n,operator:r,value:i}=e.condition,o=t,s=o?.metadata?.[n]||o?.[n],c=(()=>{switch(r){case`equals`:return s===i;case`notEquals`:return s!==i;case`greaterThan`:return Number(s)>Number(i);case`lessThan`:return Number(s)<Number(i);case`contains`:return String(s).includes(String(i));case`startsWith`:return String(s).startsWith(String(i));default:return!0}})();return a.Effect.succeed(c)},g=(e,t)=>{let{reverseGraph:n}=f(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},_=e=>{let t=o.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=s.parse(e[t.id]))}),n},v=e=>{let t=o.filter(e=>e.type===`output`),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},y=(n,i,o,s,l,u)=>a.Effect.gen(function*(){let a=l.get(n);if(!a)return yield*t.UploadistaError.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();r&&(yield*r({jobId:u,flowId:e,nodeId:n,eventType:c.NodeStart,nodeName:a.name,nodeType:a.type}));try{let l,d={};if(a.type===`input`){if(l=o[n],l===void 0)return yield*t.UploadistaError.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${n} has no input data`)}).toEffect()}else{if(d=g(n,s),Object.keys(d).length===0)return yield*t.UploadistaError.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();if(a.multiInput)l=d;else{let e=Object.keys(d)[0];if(!e)return yield*t.UploadistaError.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();l=d[e]}}if(a.type===`conditional`&&!(yield*h(a,l)))return r&&(yield*r({eventId:``,jobId:u,flowId:e,nodeId:n,eventType:c.NodeEnd,nodeName:a.name})),{nodeId:n,result:l,success:!0,waiting:!1};let f=yield*a.run({data:l,inputs:d,runId:u,flowId:e,storageId:i});if(f.type===`waiting`)return{nodeId:n,result:f.partialData,success:!0,waiting:!0};let p=f.data;return r&&(yield*r({eventId:``,jobId:u,flowId:e,nodeId:n,eventType:c.NodeEnd,nodeName:a.name})),{nodeId:n,result:p,success:!0,waiting:!1}}catch(i){return r&&(yield*r({eventId:``,jobId:u,flowId:e,nodeId:n,eventType:c.NodeError,nodeName:a.name,error:i.message})),yield*t.UploadistaError.fromCode(`FLOW_NODE_ERROR`,{cause:i}).toEffect()}}),b=({inputs:n,storageId:i,jobId:s,resumeFrom:u})=>a.Effect.gen(function*(){!u&&r&&(yield*r({jobId:s,eventType:c.FlowStart,flowId:e}));let d=_(n||{}),f,p,h;if(u)f=u.executionOrder,p=u.nodeResults,h=u.currentIndex;else if(f=m(),p=new Map,h=0,f.length!==o.length)return yield*t.UploadistaError.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let g=new Map(o.map(e=>[e.id,e]));for(let e=h;e<f.length;e++){let n=f[e];if(!n)return yield*t.UploadistaError.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();let r=yield*y(n,i,d,p,g,s);if(r.waiting)return r.result!==void 0&&p.set(r.nodeId,r.result),{type:`paused`,nodeId:r.nodeId,executionState:{nodeResults:Object.fromEntries(p),executionOrder:f,currentIndex:e,inputs:d}};r.success&&p.set(r.nodeId,r.result)}let b=v(p),x=yield*a.Effect.try({try:()=>l.parse(b),catch:e=>t.UploadistaError.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{cause:e})});return r&&(yield*r({eventId:``,jobId:s,eventType:c.FlowEnd,flowId:e})),{type:`completed`,result:x}});return{id:e,name:n,nodes:o,edges:i,inputSchema:s,outputSchema:l,run:({inputs:e,storageId:t,jobId:n})=>b({inputs:e,storageId:t,jobId:n}),resume:({jobId:e,storageId:t,executionState:n})=>b({inputs:n.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:n.executionOrder,nodeResults:new Map(Object.entries(n.nodeResults)),currentIndex:n.currentIndex}}),validateTypes:()=>{let e=o;return d.validateFlow(e,i)},validateInputs:e=>d.validateData(e,s),validateOutputs:e=>d.validateData(e,l)}}var g=class extends a.Context.Tag(`FlowProvider`)(){},_=class extends a.Context.Tag(`FlowServer`)(){};function v(e,t,n){return{...e,run:r=>a.Effect.gen(function*(){let i=r.jobId||crypto.randomUUID(),o=e=>a.Effect.gen(function*(){let t=yield*n.get(i);t&&(yield*n.set(i,{...t,...e,updatedAt:new Date}))});return yield*h({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:e=>a.Effect.gen(function*(){switch(yield*t.emit(i,e),a.Effect.logInfo(`Updating job ${i} with event ${e.eventType}`),e.eventType){case`flow-start`:yield*o({status:`running`});break;case`node-start`:yield*a.Effect.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.find(t=>t.nodeId===e.nodeId)?t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`running`,updatedAt:new Date}:t):[...t.tasks,{nodeId:e.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*n.set(i,{...t,tasks:r,updatedAt:new Date})}});break;case`node-end`:yield*a.Effect.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`completed`,updatedAt:new Date}:t);yield*n.set(i,{...t,tasks:r,updatedAt:new Date})}});break;case`node-error`:yield*a.Effect.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`failed`,updatedAt:new Date}:t);yield*n.set(i,{...t,tasks:r,error:e.error,updatedAt:new Date})}});break}return{eventId:i}})}).run({...r,jobId:i})})}}function y(){return a.Effect.gen(function*(){let e=yield*g,r=yield*n.FlowEventEmitter,i=yield*n.FlowJobKVStore,o=(e,n)=>a.Effect.gen(function*(){let r=yield*i.get(e);return r?yield*i.set(e,{...r,...n}):yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))}),s=(e,n,s,c)=>a.Effect.gen(function*(){yield*o(e,{status:`running`});let t=yield*v(n,r,i).run({inputs:c,storageId:s,jobId:e});return t.type===`paused`?yield*o(e,{status:`paused`,pausedAt:t.nodeId,executionState:t.executionState,updatedAt:new Date}):yield*o(e,{status:`completed`,updatedAt:new Date,endedAt:new Date}),t}).pipe(a.Effect.catchAll(n=>a.Effect.gen(function*(){let r=n instanceof t.UploadistaError?n.body:String(n);return yield*o(e,{status:`failed`,error:r,updatedAt:new Date}),a.Effect.fail(n)})));return{getFlow:t=>a.Effect.gen(function*(){return yield*e.getFlow(t)}),getFlowData:t=>a.Effect.gen(function*(){let n=yield*e.getFlow(t);return m(n)}),runFlow:(n,r,o)=>a.Effect.gen(function*(){let c=yield*a.Effect.try({try:()=>V.parse({inputs:o}),catch:e=>t.UploadistaError.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:e})}),l=crypto.randomUUID(),u=new Date,d={id:l,flowId:n,storageId:r,status:`started`,createdAt:u,updatedAt:u,tasks:[]};yield*i.set(l,d);let f=yield*e.getFlow(n);return yield*a.Effect.forkDaemon(s(l,f,r,c.inputs).pipe(a.Effect.tapErrorCause(e=>a.Effect.logError(`Flow execution failed`,e)))),d}),getJobStatus:e=>a.Effect.gen(function*(){return(yield*i.get(e))||(yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`})))}),continueFlow:(n,s,c)=>a.Effect.gen(function*(){let l=yield*i.get(n);if(!l)return yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${n} not found`}));if(l.status!==`paused`)return yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} is not paused (status: ${l.status})`}));if(l.pausedAt!==s)return yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} is paused at node ${l.pausedAt}, not ${s}`}));if(!l.executionState)return yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} has no execution state`}));let u={...l.executionState.nodeResults,[s]:c},d={...l.executionState.inputs,[s]:c},f=yield*e.getFlow(l.flowId),p=a.Effect.gen(function*(){yield*o(n,{status:`running`});let e=v(f,r,i);if(!l.executionState)return yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${n} has no execution state`}));let s=yield*e.resume({jobId:n,storageId:l.storageId,executionState:{...l.executionState,nodeResults:u,inputs:d}});return s.type===`paused`?yield*o(n,{status:`paused`,pausedAt:s.nodeId,executionState:s.executionState,updatedAt:new Date}):yield*o(n,{status:`completed`,pausedAt:void 0,executionState:void 0,updatedAt:new Date,endedAt:new Date}),s}).pipe(a.Effect.catchAll(e=>a.Effect.gen(function*(){let r=e instanceof t.UploadistaError?e.body:String(e);return yield*o(n,{status:`failed`,error:r,updatedAt:new Date}),a.Effect.fail(e)})));return yield*a.Effect.forkDaemon(p.pipe(a.Effect.tapErrorCause(e=>a.Effect.logError(`Flow resume failed`,e)))),(yield*i.get(n))||(yield*a.Effect.fail(t.UploadistaError.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${n} not found after update`})))}),subscribeToFlowEvents:(e,t)=>a.Effect.gen(function*(){yield*r.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>a.Effect.gen(function*(){yield*r.unsubscribe(e)})}})}const b=a.Layer.effect(_,y()),x=o.z.object({fileId:o.z.string()}),S=e=>a.Effect.tryPromise({try:async()=>await fetch(e),catch:e=>t.UploadistaError.fromCode(`UNKNOWN_ERROR`,{cause:e})}),C=e=>a.Effect.tryPromise({try:async()=>await e.arrayBuffer(),catch:e=>t.UploadistaError.fromCode(`UNKNOWN_ERROR`,{cause:e})});function w(e,t=e=>a.Effect.succeed(e)){return u({id:e,name:`Input`,description:`Reads an asset from the database`,type:l.input,inputSchema:x,outputSchema:L,run:({data:{fileId:e}})=>a.Effect.gen(function*(){let n=yield*t(e),r=yield*S(n),i=yield*C(r);return{type:`complete`,data:{path:n,inputBytes:new Uint8Array(i),metadata:{mimeType:r.headers.get(`content-type`)??``,size:Number(r.headers.get(`content-length`)??0)}}}})})}const T=e=>({type:`complete`,data:e}),E=e=>({type:`waiting`,partialData:e}),D=o.z.object({});function O(e){return a.Effect.gen(function*(){let t=yield*r.UploadServer;return yield*u({id:e,name:`Storage`,description:`Stores a file in the asset storage`,type:l.output,inputSchema:n.uploadFileSchema,outputSchema:n.uploadFileSchema,run:({data:e,storageId:n})=>a.Effect.gen(function*(){let r=yield*t.read(e.id),i=new ReadableStream({start(e){e.enqueue(r),e.close()}}),a=yield*t.upload({storageId:n,size:r.byteLength,type:e.metadata?.mimeType??``,fileName:e.metadata?.originalName??``,lastModified:0,metadata:JSON.stringify(e.metadata)},i),o=a.url||a.id;return T({...e,path:o})})})})}const k=o.z.object({operation:o.z.literal(`init`),storageId:o.z.string(),metadata:o.z.object({originalName:o.z.string().optional(),mimeType:o.z.string().optional(),size:o.z.number().optional(),extension:o.z.string().optional()}).optional()}),A=o.z.object({operation:o.z.literal(`finalize`),uploadId:o.z.string()}),j=o.z.union([k,A]);function M(e){return a.Effect.gen(function*(){let i=yield*r.UploadServer;return yield*u({id:e,name:`Streaming Input`,description:`Handles file uploads through the flow - init creates upload, client uploads chunks directly, finalize completes the flow`,type:l.input,inputSchema:j,outputSchema:n.uploadFileSchema,run:({data:e})=>a.Effect.gen(function*(){switch(e.operation){case`init`:{let t={storageId:e.storageId,size:e.metadata?.size||0,type:e.metadata?.mimeType||`application/octet-stream`,fileName:e.metadata?.originalName,lastModified:e.metadata?.size?Date.now():void 0},n=yield*i.createUpload(t);return E(n)}case`finalize`:{let t=yield*i.getUpload(e.uploadId);return T(t)}default:throw yield*t.UploadistaError.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}var N=class{maxConcurrency;resourceSemaphore;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4,this.resourceSemaphore=e.resourceSemaphore??i.semaphore(this.maxConcurrency)}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}async executeNodesInParallel(e){let t=[],n=[],r=e.map(async(e,r)=>{let i=await this.resourceSemaphore.acquire();try{let n=await e();return t[r]=n,n}catch(e){throw n[r]=e,e}finally{await i.release()}});try{return await Promise.all(r),t}catch(e){if(n.length>0){let e=n.find(e=>e!==void 0);if(e)throw e}throw e}}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}},P=class extends a.Context.Tag(`ImagePlugin`)(){};const F=o.z.object({quality:o.z.number().min(0).max(100),format:o.z.enum([`jpeg`])}),I=o.z.object({width:o.z.number().positive().optional(),height:o.z.number().positive().optional(),fit:o.z.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`),L=o.z.object({path:o.z.string(),inputBytes:o.z.instanceof(Uint8Array),metadata:o.z.object({mimeType:o.z.string(),size:o.z.number(),width:o.z.number().optional(),height:o.z.number().optional(),format:o.z.string().optional(),originalName:o.z.string().optional(),extension:o.z.string().optional()})}),R=o.z.object({files:o.z.array(L),metadata:o.z.object({batchId:o.z.string(),totalSize:o.z.number(),fileCount:o.z.number()}).optional()});function z(e){return`path`in e&&`inputBytes`in e&&`metadata`in e}const B=o.z.union([L,R]),V=o.z.object({inputs:o.z.record(o.z.string(),o.z.any())});Object.defineProperty(exports,`EventType`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`FlowProvider`,{enumerable:!0,get:function(){return g}}),Object.defineProperty(exports,`FlowServer`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`ImagePlugin`,{enumerable:!0,get:function(){return P}}),Object.defineProperty(exports,`NodeType`,{enumerable:!0,get:function(){return l}}),Object.defineProperty(exports,`ParallelScheduler`,{enumerable:!0,get:function(){return N}}),Object.defineProperty(exports,`completeNodeExecution`,{enumerable:!0,get:function(){return T}}),Object.defineProperty(exports,`createFlow`,{enumerable:!0,get:function(){return h}}),Object.defineProperty(exports,`createFlowEdge`,{enumerable:!0,get:function(){return s}}),Object.defineProperty(exports,`createFlowNode`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`createFlowServer`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`createInputNode`,{enumerable:!0,get:function(){return w}}),Object.defineProperty(exports,`createStorageNode`,{enumerable:!0,get:function(){return O}}),Object.defineProperty(exports,`createStreamingInputNode`,{enumerable:!0,get:function(){return M}}),Object.defineProperty(exports,`flowDataSchema`,{enumerable:!0,get:function(){return B}}),Object.defineProperty(exports,`flowFileBatchSchema`,{enumerable:!0,get:function(){return R}}),Object.defineProperty(exports,`flowFileSchema`,{enumerable:!0,get:function(){return L}}),Object.defineProperty(exports,`flowServer`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`getFlowData`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`getNodeData`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`isFlowFile`,{enumerable:!0,get:function(){return z}}),Object.defineProperty(exports,`optimizeParamsSchema`,{enumerable:!0,get:function(){return F}}),Object.defineProperty(exports,`resizeParamsSchema`,{enumerable:!0,get:function(){return I}}),Object.defineProperty(exports,`runArgsSchema`,{enumerable:!0,get:function(){return V}}),Object.defineProperty(exports,`storageParamsSchema`,{enumerable:!0,get:function(){return D}}),Object.defineProperty(exports,`waitingNodeExecution`,{enumerable:!0,get:function(){return E}});
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{UploadistaError as e}from"./uploadista-error-BVsVxqvz.js";import{FlowEventEmitter as t,FlowJobKVStore as n,uploadFileSchema as r}from"./types-DhU2j-XF.js";import{UploadServer as i}from"./upload-DaXO34dE.js";import{semaphore as a}from"./semaphore-0ZwjVpyF.js";import{Context as o,Effect as s,Layer as c}from"effect";import{z as l}from"zod";function u({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let d=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e}({}),f=function(e){return e.input=`input`,e.process=`process`,e.output=`output`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function p({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,run:c,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1}){return s.succeed({id:t,name:n,description:r,type:i,inputSchema:a,outputSchema:o,pausable:f,run:({data:t,runId:n,flowId:r,storageId:i})=>s.gen(function*(){let l=yield*s.try({try:()=>a.parse(t),catch:t=>e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:t})}),u=yield*c({data:l,runId:n,storageId:i,flowId:r});return u.type===`waiting`?u:{type:`complete`,data:yield*s.try({try:()=>o.parse(u.data),catch:t=>e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{cause:t})})}}),condition:l,multiInput:u,multiOutput:d})}const m=e=>({id:e.id,name:e.name,description:e.description,type:e.type}),h=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var g=class{typeChecker;constructor(e=h){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};const _=e=>({id:e.id,name:e.name,nodes:e.nodes.map(m),edges:e.edges});function v({flowId:t,name:n,onEvent:r,edges:i,nodes:a,inputSchema:o,outputSchema:c,typeChecker:l}){let u=new g(l),f=()=>{let e={},t={},n={};return a.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),i.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},p=()=>{let{graph:e,inDegree:t}=f(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},m=(e,t)=>{if(!e.condition)return s.succeed(!0);let{field:n,operator:r,value:i}=e.condition,a=t,o=a?.metadata?.[n]||a?.[n],c=(()=>{switch(r){case`equals`:return o===i;case`notEquals`:return o!==i;case`greaterThan`:return Number(o)>Number(i);case`lessThan`:return Number(o)<Number(i);case`contains`:return String(o).includes(String(i));case`startsWith`:return String(o).startsWith(String(i));default:return!0}})();return s.succeed(c)},h=(e,t)=>{let{reverseGraph:n}=f(),r=n[e]||[],i={};return r.forEach(e=>{let n=t.get(e);n!==void 0&&(i[e]=n)}),i},_=e=>{let t=a.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=o.parse(e[t.id]))}),n},v=e=>{let t=a.filter(e=>e.type===`output`),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},y=(n,i,a,o,c,l)=>s.gen(function*(){let s=c.get(n);if(!s)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();r&&(yield*r({jobId:l,flowId:t,nodeId:n,eventType:d.NodeStart,nodeName:s.name,nodeType:s.type}));try{let c,u={};if(s.type===`input`){if(c=a[n],c===void 0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${n} has no input data`)}).toEffect()}else{if(u=h(n,o),Object.keys(u).length===0)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();if(s.multiInput)c=u;else{let t=Object.keys(u)[0];if(!t)return yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${n} has no input data`)}).toEffect();c=u[t]}}if(s.type===`conditional`&&!(yield*m(s,c)))return r&&(yield*r({eventId:``,jobId:l,flowId:t,nodeId:n,eventType:d.NodeEnd,nodeName:s.name})),{nodeId:n,result:c,success:!0,waiting:!1};let f=yield*s.run({data:c,inputs:u,runId:l,flowId:t,storageId:i});if(f.type===`waiting`)return{nodeId:n,result:f.partialData,success:!0,waiting:!0};let p=f.data;return r&&(yield*r({eventId:``,jobId:l,flowId:t,nodeId:n,eventType:d.NodeEnd,nodeName:s.name})),{nodeId:n,result:p,success:!0,waiting:!1}}catch(i){return r&&(yield*r({eventId:``,jobId:l,flowId:t,nodeId:n,eventType:d.NodeError,nodeName:s.name,error:i.message})),yield*e.fromCode(`FLOW_NODE_ERROR`,{cause:i}).toEffect()}}),b=({inputs:n,storageId:i,jobId:o,resumeFrom:l})=>s.gen(function*(){!l&&r&&(yield*r({jobId:o,eventType:d.FlowStart,flowId:t}));let u=_(n||{}),f,m,h;if(l)f=l.executionOrder,m=l.nodeResults,h=l.currentIndex;else if(f=p(),m=new Map,h=0,f.length!==a.length)return yield*e.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let g=new Map(a.map(e=>[e.id,e]));for(let t=h;t<f.length;t++){let n=f[t];if(!n)return yield*e.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();let r=yield*y(n,i,u,m,g,o);if(r.waiting)return r.result!==void 0&&m.set(r.nodeId,r.result),{type:`paused`,nodeId:r.nodeId,executionState:{nodeResults:Object.fromEntries(m),executionOrder:f,currentIndex:t,inputs:u}};r.success&&m.set(r.nodeId,r.result)}let b=v(m),x=yield*s.try({try:()=>c.parse(b),catch:t=>e.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{cause:t})});return r&&(yield*r({eventId:``,jobId:o,eventType:d.FlowEnd,flowId:t})),{type:`completed`,result:x}});return{id:t,name:n,nodes:a,edges:i,inputSchema:o,outputSchema:c,run:({inputs:e,storageId:t,jobId:n})=>b({inputs:e,storageId:t,jobId:n}),resume:({jobId:e,storageId:t,executionState:n})=>b({inputs:n.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:n.executionOrder,nodeResults:new Map(Object.entries(n.nodeResults)),currentIndex:n.currentIndex}}),validateTypes:()=>{let e=a;return u.validateFlow(e,i)},validateInputs:e=>u.validateData(e,o),validateOutputs:e=>u.validateData(e,c)}}var y=class extends o.Tag(`FlowProvider`)(){},b=class extends o.Tag(`FlowServer`)(){};function x(e,t,n){return{...e,run:r=>s.gen(function*(){let i=r.jobId||crypto.randomUUID(),a=e=>s.gen(function*(){let t=yield*n.get(i);t&&(yield*n.set(i,{...t,...e,updatedAt:new Date}))});return yield*v({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:e=>s.gen(function*(){switch(yield*t.emit(i,e),s.logInfo(`Updating job ${i} with event ${e.eventType}`),e.eventType){case`flow-start`:yield*a({status:`running`});break;case`node-start`:yield*s.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.find(t=>t.nodeId===e.nodeId)?t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`running`,updatedAt:new Date}:t):[...t.tasks,{nodeId:e.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*n.set(i,{...t,tasks:r,updatedAt:new Date})}});break;case`node-end`:yield*s.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`completed`,updatedAt:new Date}:t);yield*n.set(i,{...t,tasks:r,updatedAt:new Date})}});break;case`node-error`:yield*s.gen(function*(){let t=yield*n.get(i);if(t){let r=t.tasks.map(t=>t.nodeId===e.nodeId?{...t,status:`failed`,updatedAt:new Date}:t);yield*n.set(i,{...t,tasks:r,error:e.error,updatedAt:new Date})}});break}return{eventId:i}})}).run({...r,jobId:i})})}}function S(){return s.gen(function*(){let r=yield*y,i=yield*t,a=yield*n,o=(t,n)=>s.gen(function*(){let r=yield*a.get(t);return r?yield*a.set(t,{...r,...n}):yield*s.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}))}),c=(t,n,r,c)=>s.gen(function*(){yield*o(t,{status:`running`});let e=yield*x(n,i,a).run({inputs:c,storageId:r,jobId:t});return e.type===`paused`?yield*o(t,{status:`paused`,pausedAt:e.nodeId,executionState:e.executionState,updatedAt:new Date}):yield*o(t,{status:`completed`,updatedAt:new Date,endedAt:new Date}),e}).pipe(s.catchAll(n=>s.gen(function*(){let r=n instanceof e?n.body:String(n);return yield*o(t,{status:`failed`,error:r,updatedAt:new Date}),s.fail(n)})));return{getFlow:e=>s.gen(function*(){return yield*r.getFlow(e)}),getFlowData:e=>s.gen(function*(){let t=yield*r.getFlow(e);return _(t)}),runFlow:(t,n,i)=>s.gen(function*(){let o=yield*s.try({try:()=>W.parse({inputs:i}),catch:t=>e.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:t})}),l=crypto.randomUUID(),u=new Date,d={id:l,flowId:t,storageId:n,status:`started`,createdAt:u,updatedAt:u,tasks:[]};yield*a.set(l,d);let f=yield*r.getFlow(t);return yield*s.forkDaemon(c(l,f,n,o.inputs).pipe(s.tapErrorCause(e=>s.logError(`Flow execution failed`,e)))),d}),getJobStatus:t=>s.gen(function*(){return(yield*a.get(t))||(yield*s.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`})))}),continueFlow:(t,n,c)=>s.gen(function*(){let l=yield*a.get(t);if(!l)return yield*s.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found`}));if(l.status!==`paused`)return yield*s.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is not paused (status: ${l.status})`}));if(l.pausedAt!==n)return yield*s.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} is paused at node ${l.pausedAt}, not ${n}`}));if(!l.executionState)return yield*s.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let u={...l.executionState.nodeResults,[n]:c},d={...l.executionState.inputs,[n]:c},f=yield*r.getFlow(l.flowId),p=s.gen(function*(){yield*o(t,{status:`running`});let n=x(f,i,a);if(!l.executionState)return yield*s.fail(e.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${t} has no execution state`}));let r=yield*n.resume({jobId:t,storageId:l.storageId,executionState:{...l.executionState,nodeResults:u,inputs:d}});return r.type===`paused`?yield*o(t,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):yield*o(t,{status:`completed`,pausedAt:void 0,executionState:void 0,updatedAt:new Date,endedAt:new Date}),r}).pipe(s.catchAll(n=>s.gen(function*(){let r=n instanceof e?n.body:String(n);return yield*o(t,{status:`failed`,error:r,updatedAt:new Date}),s.fail(n)})));return yield*s.forkDaemon(p.pipe(s.tapErrorCause(e=>s.logError(`Flow resume failed`,e)))),(yield*a.get(t))||(yield*s.fail(e.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${t} not found after update`})))}),subscribeToFlowEvents:(e,t)=>s.gen(function*(){yield*i.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>s.gen(function*(){yield*i.unsubscribe(e)})}})}const C=c.effect(b,S()),w=l.object({fileId:l.string()}),T=t=>s.tryPromise({try:async()=>await fetch(t),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})}),E=t=>s.tryPromise({try:async()=>await t.arrayBuffer(),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})});function D(e,t=e=>s.succeed(e)){return p({id:e,name:`Input`,description:`Reads an asset from the database`,type:f.input,inputSchema:w,outputSchema:B,run:({data:{fileId:e}})=>s.gen(function*(){let n=yield*t(e),r=yield*T(n),i=yield*E(r);return{type:`complete`,data:{path:n,inputBytes:new Uint8Array(i),metadata:{mimeType:r.headers.get(`content-type`)??``,size:Number(r.headers.get(`content-length`)??0)}}}})})}const O=e=>({type:`complete`,data:e}),k=e=>({type:`waiting`,partialData:e}),A=l.object({});function j(e){return s.gen(function*(){let t=yield*i;return yield*p({id:e,name:`Storage`,description:`Stores a file in the asset storage`,type:f.output,inputSchema:r,outputSchema:r,run:({data:e,storageId:n})=>s.gen(function*(){let r=yield*t.read(e.id),i=new ReadableStream({start(e){e.enqueue(r),e.close()}}),a=yield*t.upload({storageId:n,size:r.byteLength,type:e.metadata?.mimeType??``,fileName:e.metadata?.originalName??``,lastModified:0,metadata:JSON.stringify(e.metadata)},i),o=a.url||a.id;return O({...e,path:o})})})})}const M=l.object({operation:l.literal(`init`),storageId:l.string(),metadata:l.object({originalName:l.string().optional(),mimeType:l.string().optional(),size:l.number().optional(),extension:l.string().optional()}).optional()}),N=l.object({operation:l.literal(`finalize`),uploadId:l.string()}),P=l.union([M,N]);function F(t){return s.gen(function*(){let n=yield*i;return yield*p({id:t,name:`Streaming Input`,description:`Handles file uploads through the flow - init creates upload, client uploads chunks directly, finalize completes the flow`,type:f.input,inputSchema:P,outputSchema:r,run:({data:t})=>s.gen(function*(){switch(t.operation){case`init`:{let e={storageId:t.storageId,size:t.metadata?.size||0,type:t.metadata?.mimeType||`application/octet-stream`,fileName:t.metadata?.originalName,lastModified:t.metadata?.size?Date.now():void 0},r=yield*n.createUpload(e);return k(r)}case`finalize`:{let e=yield*n.getUpload(t.uploadId);return O(e)}default:throw yield*e.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}var I=class{maxConcurrency;resourceSemaphore;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4,this.resourceSemaphore=e.resourceSemaphore??a(this.maxConcurrency)}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}async executeNodesInParallel(e){let t=[],n=[],r=e.map(async(e,r)=>{let i=await this.resourceSemaphore.acquire();try{let n=await e();return t[r]=n,n}catch(e){throw n[r]=e,e}finally{await i.release()}});try{return await Promise.all(r),t}catch(e){if(n.length>0){let e=n.find(e=>e!==void 0);if(e)throw e}throw e}}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}},L=class extends o.Tag(`ImagePlugin`)(){};const R=l.object({quality:l.number().min(0).max(100),format:l.enum([`jpeg`])}),z=l.object({width:l.number().positive().optional(),height:l.number().positive().optional(),fit:l.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`),B=l.object({path:l.string(),inputBytes:l.instanceof(Uint8Array),metadata:l.object({mimeType:l.string(),size:l.number(),width:l.number().optional(),height:l.number().optional(),format:l.string().optional(),originalName:l.string().optional(),extension:l.string().optional()})}),V=l.object({files:l.array(B),metadata:l.object({batchId:l.string(),totalSize:l.number(),fileCount:l.number()}).optional()});function H(e){return`path`in e&&`inputBytes`in e&&`metadata`in e}const U=l.union([B,V]),W=l.object({inputs:l.record(l.string(),l.any())});export{d as EventType,y as FlowProvider,b as FlowServer,L as ImagePlugin,f as NodeType,I as ParallelScheduler,O as completeNodeExecution,v as createFlow,u as createFlowEdge,p as createFlowNode,S as createFlowServer,D as createInputNode,j as createStorageNode,F as createStreamingInputNode,U as flowDataSchema,V as flowFileBatchSchema,B as flowFileSchema,C as flowServer,_ as getFlowData,m as getNodeData,H as isFlowFile,R as optimizeParamsSchema,z as resizeParamsSchema,W as runArgsSchema,A as storageParamsSchema,k as waitingNodeExecution};
|
|
2
|
+
//# sourceMappingURL=flow-CRaKy7Vj.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"flow-CRaKy7Vj.js","names":["defaultTypeChecker: TypeCompatibilityChecker","errors: string[]","expectedSchemas: Record<string, unknown>","actualSchemas: Record<string, unknown>","graph: Record<string, string[]>","inDegree: Record<string, number>","reverseGraph: Record<string, string[]>","queue: string[]","result: string[]","inputs: Record<string, unknown>","mappedInputs: Record<string, z.infer<TFlowInputSchema>>","flowOutputs: Record<string, unknown>","nodeInput: unknown","nodeInputsForExecution: Record<string, unknown>","executionOrder: string[]","nodeResults: Map<string, unknown>","startIndex: number","job: FlowJob","inputFile: InputFile","graph: Record<string, string[]>","inDegree: Record<string, number>","levels: ExecutionLevel[]","results: T[]","errors: Error[]"],"sources":["../src/flow/edge.ts","../src/flow/event.ts","../src/flow/node.ts","../src/flow/types/type-validator.ts","../src/flow/flow.ts","../src/flow/flow-server.ts","../src/flow/nodes/input-node.ts","../src/flow/types/flow-types.ts","../src/flow/nodes/storage-node.ts","../src/flow/nodes/streaming-input-node.ts","../src/flow/parallel-scheduler.ts","../src/flow/plugins/image-plugin.ts","../src/flow/plugins/types/optimize-node.ts","../src/flow/plugins/types/resize-node.ts","../src/flow/types/flow-file.ts","../src/flow/types/run-args.ts"],"sourcesContent":["import type { FlowEdge as EnhancedFlowEdge } from \"./types/flow-types\";\n\n// Legacy FlowEdge type for backward compatibility\nexport type FlowEdge = EnhancedFlowEdge;\n\nexport function createFlowEdge({\n source,\n target,\n sourcePort,\n targetPort,\n}: {\n source: string;\n target: string;\n sourcePort?: string;\n targetPort?: string;\n}): FlowEdge {\n return {\n source,\n target,\n sourcePort,\n targetPort,\n };\n}\n","import type { NodeType } from \"./node\";\n\nexport enum EventType {\n JobStart = \"job-start\",\n JobEnd = \"job-end\",\n FlowStart = \"flow-start\",\n FlowEnd = \"flow-end\",\n NodeStart = \"node-start\",\n NodeEnd = \"node-end\",\n NodeError = \"node-error\",\n NodeStream = \"node-stream\",\n NodeResponse = \"node-response\",\n}\n\nexport type FlowEventJobStart = {\n jobId: string;\n eventType: EventType.JobStart;\n};\n\nexport type FlowEventJobEnd = {\n eventId: string;\n jobId: string;\n eventType: EventType.JobEnd;\n};\n\nexport type FlowEventFlowStart = {\n jobId: string;\n flowId: string;\n eventType: EventType.FlowStart;\n};\n\nexport type FlowEventFlowEnd = {\n eventId: string;\n jobId: string;\n flowId: string;\n eventType: EventType.FlowEnd;\n};\n\nexport type FlowEventNodeStart = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeStart;\n nodeName: string;\n nodeType: NodeType;\n};\n\nexport type FlowEventNodeError = {\n eventId: string;\n jobId: string;\n flowId: string;\n nodeId: string;\n nodeName: string;\n eventType: EventType.NodeError;\n error: string;\n};\n\nexport type FlowEventNodeEnd = {\n eventId: string;\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeEnd;\n nodeName: string;\n};\n\nexport type FlowEventNodeResponse = {\n jobId: string;\n flowId: string;\n nodeId: string;\n eventType: EventType.NodeResponse;\n nodeName: string;\n data: unknown;\n};\n\nexport type FlowEvent =\n | FlowEventJobStart\n | FlowEventJobEnd\n | FlowEventFlowStart\n | FlowEventFlowEnd\n | FlowEventNodeStart\n | FlowEventNodeEnd\n | FlowEventNodeError;\n","import { Effect } from \"effect\";\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\nimport type {\n FlowNode,\n FlowNodeData,\n NodeExecutionResult,\n} from \"./types/flow-types\";\n\nexport enum NodeType {\n input = \"input\",\n process = \"process\",\n output = \"output\",\n conditional = \"conditional\",\n multiplex = \"multiplex\",\n merge = \"merge\",\n}\n\n// Type for condition field values\nexport type ConditionField =\n | \"mimeType\"\n | \"size\"\n | \"width\"\n | \"height\"\n | \"extension\";\n\n// Type for condition operators\nexport type ConditionOperator =\n | \"equals\"\n | \"notEquals\"\n | \"greaterThan\"\n | \"lessThan\"\n | \"contains\"\n | \"startsWith\";\n\n// Type for condition value (string or number)\nexport type ConditionValue = string | number;\n\n// New Effect-based FlowNode creator\nexport function createFlowNode<Input, Output, Requirements = never>({\n id,\n name,\n description,\n type,\n inputSchema,\n outputSchema,\n run,\n condition,\n multiInput = false,\n multiOutput = false,\n pausable = false,\n}: {\n id: string;\n name: string;\n description: string;\n type: NodeType;\n inputSchema: z.ZodSchema<Input>;\n outputSchema: z.ZodSchema<Output>;\n run: (args: {\n data: Input;\n runId: string;\n storageId: string;\n flowId: string;\n }) => Effect.Effect<NodeExecutionResult<Output>, UploadistaError, Requirements>;\n condition?: {\n field: ConditionField;\n operator: ConditionOperator;\n value: ConditionValue;\n };\n multiInput?: boolean;\n multiOutput?: boolean;\n pausable?: boolean;\n}): Effect.Effect<\n FlowNode<Input, Output, UploadistaError, Requirements>,\n UploadistaError\n> {\n return Effect.succeed({\n id,\n name,\n description,\n type,\n inputSchema,\n outputSchema,\n pausable,\n run: ({\n data,\n runId,\n flowId,\n storageId,\n }: {\n data: Input;\n runId: string;\n flowId: string;\n storageId: string;\n }) =>\n Effect.gen(function* () {\n // Validate input data against schema\n const validatedData = yield* Effect.try({\n try: () => inputSchema.parse(data),\n catch: (error) =>\n UploadistaError.fromCode(\"FLOW_INPUT_VALIDATION_ERROR\", {\n cause: error,\n }),\n });\n\n // Run the node logic\n const result = yield* run({\n data: validatedData,\n runId,\n storageId,\n flowId,\n });\n\n // If the node returned waiting state, pass it through\n if (result.type === \"waiting\") {\n return result;\n }\n\n // Validate output data against schema for completed results\n const validatedResult = yield* Effect.try({\n try: () => outputSchema.parse(result.data),\n catch: (error) =>\n UploadistaError.fromCode(\"FLOW_OUTPUT_VALIDATION_ERROR\", {\n cause: error,\n }),\n });\n\n return { type: \"complete\" as const, data: validatedResult };\n }),\n condition,\n multiInput,\n multiOutput,\n });\n}\n\nexport const getNodeData = <Requirements>(\n // biome-ignore lint/suspicious/noExplicitAny: maybe type later\n node: FlowNode<any, any, UploadistaError, Requirements>,\n): FlowNodeData => {\n return {\n id: node.id,\n name: node.name,\n description: node.description,\n type: node.type,\n };\n};\n","/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\nimport type { z } from \"zod\";\n\nimport type {\n FlowEdge,\n FlowNode,\n NodeConnectionValidator,\n TypeCompatibilityChecker,\n} from \"./flow-types\";\n\n// Default type compatibility checker using Zod schemas\nexport const defaultTypeChecker: TypeCompatibilityChecker = (\n fromSchema,\n toSchema,\n) => {\n // Basic schema compatibility rules\n if (fromSchema === toSchema) return true;\n\n // Check if schemas are compatible by comparing their types\n try {\n // For now, assume schemas are compatible if they're both Zod schemas\n // In a more sophisticated system, you'd check actual schema compatibility\n if (\n fromSchema &&\n toSchema &&\n typeof fromSchema === \"object\" &&\n typeof toSchema === \"object\"\n ) {\n return true;\n }\n\n return false;\n } catch {\n // If schema comparison fails, assume compatible\n return true;\n }\n};\n\n// Enhanced type validator with Zod schema support\nexport class FlowTypeValidator implements NodeConnectionValidator {\n private typeChecker: TypeCompatibilityChecker;\n\n constructor(typeChecker: TypeCompatibilityChecker = defaultTypeChecker) {\n this.typeChecker = typeChecker;\n }\n\n validateConnection(\n sourceNode: FlowNode<any, any>,\n targetNode: FlowNode<any, any>,\n _edge: FlowEdge,\n ): boolean {\n // Check if source node output schema is compatible with target node input schema\n return this.getCompatibleTypes(\n sourceNode.outputSchema,\n targetNode.inputSchema,\n );\n }\n\n getCompatibleTypes(\n sourceSchema: z.ZodSchema<any>,\n targetSchema: z.ZodSchema<any>,\n ): boolean {\n return this.typeChecker(sourceSchema, targetSchema);\n }\n\n // Validate entire flow for type compatibility\n validateFlow(\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): {\n isValid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n\n for (const edge of edges) {\n const sourceNode = nodeMap.get(edge.source);\n const targetNode = nodeMap.get(edge.target);\n\n if (!sourceNode) {\n errors.push(`Source node ${edge.source} not found`);\n continue;\n }\n\n if (!targetNode) {\n errors.push(`Target node ${edge.target} not found`);\n continue;\n }\n\n if (!this.validateConnection(sourceNode, targetNode, edge)) {\n errors.push(\n `Schema mismatch: ${sourceNode.id} output schema incompatible with ${targetNode.id} input schema`,\n );\n }\n }\n\n return {\n isValid: errors.length === 0,\n errors,\n };\n }\n\n // Get expected input schemas for a node based on its incoming edges\n getExpectedInputSchemas(\n nodeId: string,\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): Record<string, unknown> {\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n const expectedSchemas: Record<string, unknown> = {};\n\n for (const edge of edges) {\n if (edge.target === nodeId) {\n const sourceNode = nodeMap.get(edge.source);\n if (sourceNode) {\n const portKey = edge.sourcePort || edge.source;\n expectedSchemas[portKey] = sourceNode.outputSchema;\n }\n }\n }\n\n return expectedSchemas;\n }\n\n // Get actual output schemas for a node based on its outgoing edges\n getActualOutputSchemas(\n nodeId: string,\n nodes: FlowNode<any, any>[],\n edges: FlowEdge[],\n ): Record<string, unknown> {\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n const actualSchemas: Record<string, unknown> = {};\n\n for (const edge of edges) {\n if (edge.source === nodeId) {\n const targetNode = nodeMap.get(edge.target);\n if (targetNode) {\n const portKey = edge.targetPort || edge.target;\n actualSchemas[portKey] = targetNode.inputSchema;\n }\n }\n }\n\n return actualSchemas;\n }\n\n // Validate data against a schema\n validateData(\n data: unknown,\n schema: unknown,\n ): { isValid: boolean; errors: string[] } {\n try {\n (schema as z.ZodSchema<any>).parse(data);\n return { isValid: true, errors: [] };\n } catch (error) {\n if (error instanceof Error && \"errors\" in error) {\n return {\n isValid: false,\n errors: (\n error as { errors: Array<{ path: string[]; message: string }> }\n ).errors.map((err) => `${err.path.join(\".\")}: ${err.message}`),\n };\n }\n return {\n isValid: false,\n errors: [error instanceof Error ? error.message : \"Validation failed\"],\n };\n }\n }\n}\n\n// Utility functions for common type checks\nexport const typeUtils = {\n // Check if a schema is assignable to another\n isAssignable(\n fromSchema: z.ZodSchema<any>,\n toSchema: z.ZodSchema<any>,\n ): boolean {\n return defaultTypeChecker(fromSchema, toSchema);\n },\n\n // Get the most specific common schema\n getCommonSchema(\n schema1: z.ZodSchema<any>,\n schema2: z.ZodSchema<any>,\n ): z.ZodSchema<any> {\n if (schema1 === schema2) return schema1;\n\n // For now, return the more specific schema or schema1\n // In a more sophisticated system, you'd compute the intersection\n return schema1;\n },\n\n // Check if a value matches a schema\n matchesSchema(value: unknown, schema: z.ZodSchema<any>): boolean {\n try {\n schema.parse(value);\n return true;\n } catch {\n return false;\n }\n },\n};\n","/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\n\nimport { Effect } from \"effect\";\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\nimport type { FlowEdge } from \"./edge\";\n\nimport { EventType } from \"./event\";\nimport { getNodeData } from \"./node\";\n\nimport type { FlowConfig, FlowNode, FlowNodeData } from \"./types/flow-types\";\nimport { FlowTypeValidator } from \"./types/type-validator\";\n\nexport type FlowData = {\n id: string;\n name: string;\n nodes: FlowNodeData[];\n edges: FlowEdge[];\n};\n\nexport const getFlowData = <TRequirements>(\n flow: Flow<any, any, TRequirements>,\n): FlowData => {\n return {\n id: flow.id,\n name: flow.name,\n nodes: flow.nodes.map(getNodeData),\n edges: flow.edges,\n };\n};\n\n// Flow execution result types\nexport type FlowExecutionResult<TOutput> =\n | { type: \"completed\"; result: TOutput }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, unknown>;\n };\n };\n\n// Effect-based Flow Type\nexport type Flow<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n> = {\n id: string;\n name: string;\n nodes: FlowNode<any, any, UploadistaError, TRequirements>[];\n edges: FlowEdge[];\n inputSchema: TFlowInputSchema;\n outputSchema: TFlowOutputSchema;\n run: (args: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n }) => Effect.Effect<\n FlowExecutionResult<z.infer<TFlowOutputSchema>>,\n UploadistaError,\n TRequirements\n >;\n resume: (args: {\n jobId: string;\n storageId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n }) => Effect.Effect<\n FlowExecutionResult<z.infer<TFlowOutputSchema>>,\n UploadistaError,\n TRequirements\n >;\n validateTypes: () => { isValid: boolean; errors: string[] };\n validateInputs: (inputs: unknown) => { isValid: boolean; errors: string[] };\n validateOutputs: (outputs: unknown) => { isValid: boolean; errors: string[] };\n};\n\n// Effect-based Flow Creator\nexport function createFlow<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n>({\n flowId,\n name,\n onEvent,\n edges,\n nodes,\n inputSchema,\n outputSchema,\n typeChecker,\n}: FlowConfig<TFlowInputSchema, TFlowOutputSchema, TRequirements>): Flow<\n TFlowInputSchema,\n TFlowOutputSchema,\n TRequirements\n> {\n const typeValidator = new FlowTypeValidator(typeChecker);\n\n // Build adjacency list for topological sorting\n const buildGraph = () => {\n const graph: Record<string, string[]> = {};\n const inDegree: Record<string, number> = {};\n const reverseGraph: Record<string, string[]> = {};\n\n // Initialize\n nodes.forEach((node: any) => {\n graph[node.id] = [];\n reverseGraph[node.id] = [];\n inDegree[node.id] = 0;\n });\n\n // Build edges\n edges.forEach((edge: any) => {\n graph[edge.source]?.push(edge.target);\n reverseGraph[edge.target]?.push(edge.source);\n inDegree[edge.target] = (inDegree[edge.target] || 0) + 1;\n });\n\n return { graph, reverseGraph, inDegree };\n };\n\n // Topological sort to determine execution order\n const topologicalSort = () => {\n const { graph, inDegree } = buildGraph();\n const queue: string[] = [];\n const result: string[] = [];\n\n // Add nodes with no incoming edges\n Object.keys(inDegree).forEach((nodeId) => {\n if (inDegree[nodeId] === 0) {\n queue.push(nodeId);\n }\n });\n\n while (queue.length > 0) {\n const current = queue.shift();\n if (!current) {\n throw new Error(\"No current node found\");\n }\n result.push(current);\n\n graph[current]?.forEach((neighbor: any) => {\n inDegree[neighbor] = (inDegree[neighbor] || 0) - 1;\n if (inDegree[neighbor] === 0) {\n queue.push(neighbor);\n }\n });\n }\n\n return result;\n };\n\n // Evaluate condition for conditional nodes using Effect\n const evaluateCondition = <TRequirements>(\n node: FlowNode<any, any, UploadistaError, TRequirements>,\n data: unknown,\n ): Effect.Effect<boolean, never> => {\n if (!node.condition) return Effect.succeed(true);\n\n const { field, operator, value } = node.condition;\n const dataRecord = data as Record<string, unknown>;\n const metadata = dataRecord?.metadata as\n | Record<string, unknown>\n | undefined;\n const fieldValue = metadata?.[field] || dataRecord?.[field];\n\n const result = (() => {\n switch (operator) {\n case \"equals\":\n return fieldValue === value;\n case \"notEquals\":\n return fieldValue !== value;\n case \"greaterThan\":\n return Number(fieldValue) > Number(value);\n case \"lessThan\":\n return Number(fieldValue) < Number(value);\n case \"contains\":\n return String(fieldValue).includes(String(value));\n case \"startsWith\":\n return String(fieldValue).startsWith(String(value));\n default:\n return true;\n }\n })();\n\n return Effect.succeed(result);\n };\n\n // Get all inputs for a node\n const getNodeInputs = (nodeId: string, nodeResults: Map<string, unknown>) => {\n const { reverseGraph } = buildGraph();\n const incomingNodes = reverseGraph[nodeId] || [];\n const inputs: Record<string, unknown> = {};\n\n incomingNodes.forEach((sourceNodeId: any) => {\n const result = nodeResults.get(sourceNodeId);\n if (result !== undefined) {\n inputs[sourceNodeId] = result;\n }\n });\n\n return inputs;\n };\n\n // Map flow inputs to input nodes\n const mapFlowInputsToNodes = (\n flowInputs: Record<string, z.infer<TFlowInputSchema>>,\n ) => {\n const inputNodes = nodes.filter((node: any) => node.type === \"input\");\n const mappedInputs: Record<string, z.infer<TFlowInputSchema>> = {};\n\n inputNodes.forEach((node: any) => {\n if (\n flowInputs &&\n typeof flowInputs === \"object\" &&\n node.id in flowInputs\n ) {\n mappedInputs[node.id] = inputSchema.parse(flowInputs[node.id]);\n }\n });\n\n return mappedInputs;\n };\n\n // Collect outputs from output nodes\n const collectFlowOutputs = (\n nodeResults: Map<string, unknown>,\n ): z.infer<TFlowOutputSchema> => {\n const outputNodes = nodes.filter((node: any) => node.type === \"output\");\n const flowOutputs: Record<string, unknown> = {};\n\n outputNodes.forEach((node: any) => {\n const result = nodeResults.get(node.id);\n if (result !== undefined) {\n flowOutputs[node.id] = result;\n }\n });\n\n return flowOutputs as z.infer<TFlowOutputSchema>;\n };\n\n // Execute a single node using Effect\n const executeNode = <TRequirements>(\n nodeId: string,\n storageId: string,\n nodeInputs: Record<string, z.infer<TFlowInputSchema>>,\n nodeResults: Map<string, unknown>,\n nodeMap: Map<string, FlowNode<any, any, UploadistaError, TRequirements>>,\n jobId: string,\n ): Effect.Effect<\n { nodeId: string; result: unknown; success: boolean; waiting: boolean },\n UploadistaError,\n TRequirements\n > => {\n return Effect.gen(function* () {\n const node = nodeMap.get(nodeId);\n if (!node) {\n return yield* UploadistaError.fromCode(\n \"FLOW_NODE_NOT_FOUND\",\n ).toEffect();\n }\n\n // Emit NodeStart event if provided\n if (onEvent) {\n yield* onEvent({\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeStart,\n nodeName: node.name,\n nodeType: node.type,\n });\n }\n\n try {\n // Prepare input data for the node\n let nodeInput: unknown;\n let nodeInputsForExecution: Record<string, unknown> = {};\n\n if (node.type === \"input\") {\n // For input nodes, use the mapped flow input\n nodeInput = nodeInputs[nodeId];\n if (nodeInput === undefined) {\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Input node ${nodeId} has no input data`),\n }).toEffect();\n }\n } else {\n // Get all inputs for the node\n nodeInputsForExecution = getNodeInputs(nodeId, nodeResults);\n\n if (Object.keys(nodeInputsForExecution).length === 0) {\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Node ${nodeId} has no input data`),\n }).toEffect();\n }\n\n // For single input nodes, use the first input\n if (!node.multiInput) {\n const firstInputKey = Object.keys(nodeInputsForExecution)[0];\n if (!firstInputKey) {\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: new Error(`Node ${nodeId} has no input data`),\n }).toEffect();\n }\n nodeInput = nodeInputsForExecution[firstInputKey];\n } else {\n // For multi-input nodes, pass all inputs\n nodeInput = nodeInputsForExecution;\n }\n }\n\n // Check condition for conditional nodes\n if (node.type === \"conditional\") {\n const conditionResult = yield* evaluateCondition(node, nodeInput);\n if (!conditionResult) {\n // Skip this node - return success but no result\n if (onEvent) {\n yield* onEvent({\n eventId: \"\",\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n });\n }\n return { nodeId, result: nodeInput, success: true, waiting: false };\n }\n }\n\n // Execute the node\n const executionResult = yield* node.run({\n data: nodeInput,\n inputs: nodeInputsForExecution,\n runId: jobId,\n flowId,\n storageId,\n });\n\n // Handle execution result\n if (executionResult.type === \"waiting\") {\n // Node is waiting for more data - pause execution\n return {\n nodeId,\n result: executionResult.partialData,\n success: true,\n waiting: true,\n };\n }\n\n // Node completed successfully\n const result = executionResult.data;\n\n // Emit NodeEnd event\n if (onEvent) {\n yield* onEvent({\n eventId: \"\",\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeEnd,\n nodeName: node.name,\n });\n }\n\n return { nodeId, result, success: true, waiting: false };\n } catch (error) {\n // Emit NodeError event\n if (onEvent) {\n yield* onEvent({\n eventId: \"\",\n jobId,\n flowId,\n nodeId,\n eventType: EventType.NodeError,\n nodeName: node.name,\n error: (error as Error).message,\n });\n }\n return yield* UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n cause: error,\n }).toEffect();\n }\n });\n };\n\n // Internal execution function that can start fresh or resume\n const executeFlow = ({\n inputs,\n storageId,\n jobId,\n resumeFrom,\n }: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n resumeFrom?: {\n executionOrder: string[];\n nodeResults: Map<string, unknown>;\n currentIndex: number;\n };\n }): Effect.Effect<\n | { type: \"completed\"; result: z.infer<TFlowOutputSchema> }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n TRequirements\n > => {\n return Effect.gen(function* () {\n // Emit FlowStart event only if starting fresh\n if (!resumeFrom && onEvent) {\n yield* onEvent({\n jobId,\n eventType: EventType.FlowStart,\n flowId,\n });\n }\n\n // Map flow inputs to input nodes\n const nodeInputs = mapFlowInputsToNodes(inputs || {});\n\n // Get execution order and results - either fresh or from resume state\n let executionOrder: string[];\n let nodeResults: Map<string, unknown>;\n let startIndex: number;\n\n if (resumeFrom) {\n // Resume from saved state\n executionOrder = resumeFrom.executionOrder;\n nodeResults = resumeFrom.nodeResults;\n startIndex = resumeFrom.currentIndex;\n } else {\n // Start fresh\n executionOrder = topologicalSort();\n nodeResults = new Map<string, unknown>();\n startIndex = 0;\n\n // Check for cycles\n if (executionOrder.length !== nodes.length) {\n return yield* UploadistaError.fromCode(\"FLOW_CYCLE_ERROR\").toEffect();\n }\n }\n\n // Create node map for quick lookup\n const nodeMap = new Map(nodes.map((node) => [node.id, node]));\n\n // Execute nodes sequentially starting from startIndex\n for (let i = startIndex; i < executionOrder.length; i++) {\n const nodeId = executionOrder[i];\n if (!nodeId) {\n return yield* UploadistaError.fromCode(\n \"FLOW_NODE_NOT_FOUND\",\n ).toEffect();\n }\n const nodeResult = yield* executeNode(\n nodeId,\n storageId,\n nodeInputs,\n nodeResults,\n nodeMap,\n jobId,\n );\n\n if (nodeResult.waiting) {\n // Node is waiting - pause execution and return state\n if (nodeResult.result !== undefined) {\n nodeResults.set(nodeResult.nodeId, nodeResult.result);\n }\n\n return {\n type: \"paused\" as const,\n nodeId: nodeResult.nodeId,\n executionState: {\n nodeResults: Object.fromEntries(nodeResults),\n executionOrder,\n currentIndex: i, // Stay at current index to re-execute this node on resume\n inputs: nodeInputs,\n },\n };\n }\n\n if (nodeResult.success) {\n nodeResults.set(nodeResult.nodeId, nodeResult.result);\n }\n }\n\n // All nodes completed - collect outputs\n const finalResult = collectFlowOutputs(nodeResults);\n\n // Validate the final result against the output schema\n const validatedResult = yield* Effect.try({\n try: () => outputSchema.parse(finalResult),\n catch: (error) =>\n UploadistaError.fromCode(\"FLOW_OUTPUT_VALIDATION_ERROR\", {\n cause: error,\n }),\n });\n\n // Emit FlowEnd event\n if (onEvent) {\n yield* onEvent({\n eventId: \"\",\n jobId,\n eventType: EventType.FlowEnd,\n flowId,\n });\n }\n\n return { type: \"completed\" as const, result: validatedResult };\n });\n };\n\n const run = ({\n inputs,\n storageId,\n jobId,\n }: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId: string;\n }): Effect.Effect<\n | { type: \"completed\"; result: z.infer<TFlowOutputSchema> }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n TRequirements\n > => {\n return executeFlow({ inputs, storageId, jobId });\n };\n\n const resume = ({\n jobId,\n storageId,\n executionState,\n }: {\n jobId: string;\n storageId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n }): Effect.Effect<\n | { type: \"completed\"; result: z.infer<TFlowOutputSchema> }\n | {\n type: \"paused\";\n nodeId: string;\n executionState: {\n nodeResults: Record<string, unknown>;\n executionOrder: string[];\n currentIndex: number;\n inputs: Record<string, z.infer<TFlowInputSchema>>;\n };\n },\n UploadistaError,\n TRequirements\n > => {\n return executeFlow({\n inputs: executionState.inputs,\n storageId,\n jobId,\n resumeFrom: {\n executionOrder: executionState.executionOrder,\n nodeResults: new Map(Object.entries(executionState.nodeResults)),\n currentIndex: executionState.currentIndex,\n },\n });\n };\n\n const validateTypes = () => {\n // Convert FlowNode to FlowNode for validation\n const compatibleNodes = nodes as FlowNode<any, any>[];\n return typeValidator.validateFlow(compatibleNodes, edges);\n };\n\n const validateInputs = (inputs: unknown) => {\n return typeValidator.validateData(inputs, inputSchema);\n };\n\n const validateOutputs = (outputs: unknown) => {\n return typeValidator.validateData(outputs, outputSchema);\n };\n\n return {\n id: flowId,\n name,\n nodes,\n edges,\n inputSchema,\n outputSchema,\n run,\n resume,\n validateTypes,\n validateInputs,\n validateOutputs,\n };\n}\n","import { Context, Effect, Layer } from \"effect\";\nimport type { z } from \"zod\";\nimport { UploadistaError } from \"../errors\";\nimport {\n createFlow,\n type Flow,\n type FlowData,\n getFlowData,\n runArgsSchema,\n} from \"../flow\";\nimport type { EventEmitter, KvStore, WebSocketConnection } from \"../types\";\nimport { FlowEventEmitter, FlowJobKVStore } from \"../types\";\nimport type { FlowEvent } from \"./event\";\nimport type { FlowJob } from \"./types/flow-job\";\n\n// Define the Flow provider interface that applications must implement\nexport type FlowProviderShape<TRequirements = any> = {\n getFlow: (\n flowId: string,\n ) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;\n};\n\n// Context Tag for FlowProvider\nexport class FlowProvider extends Context.Tag(\"FlowProvider\")<\n FlowProvider,\n FlowProviderShape<any>\n>() {}\n\n// Effect-based FlowServer interface - the core abstraction\nexport type FlowServerShape = {\n getFlow: <TRequirements>(\n flowId: string,\n ) => Effect.Effect<Flow<any, any, TRequirements>, UploadistaError>;\n\n getFlowData: (flowId: string) => Effect.Effect<FlowData, UploadistaError>;\n\n runFlow: <TRequirements>(\n flowId: string,\n storageId: string,\n inputs: any,\n ) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;\n\n continueFlow: <TRequirements>(\n jobId: string,\n nodeId: string,\n newData: unknown,\n ) => Effect.Effect<FlowJob, UploadistaError, TRequirements>;\n\n getJobStatus: (jobId: string) => Effect.Effect<FlowJob, UploadistaError>;\n\n subscribeToFlowEvents: (\n jobId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n\n unsubscribeFromFlowEvents: (\n jobId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n// Context Tag for FlowServer\nexport class FlowServer extends Context.Tag(\"FlowServer\")<\n FlowServer,\n FlowServerShape\n>() {}\n\n// Legacy types for backward compatibility\nexport type FlowServerOptions = {\n getFlow: <TRequirements>({\n flowId,\n storageId,\n }: {\n flowId: string;\n storageId: string;\n }) => Promise<Flow<any, any, TRequirements>>;\n kvStore: KvStore<FlowJob>;\n};\n\n// Function to enhance a flow with event emission capabilities\nfunction withFlowEvents<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n>(\n flow: Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements>,\n eventEmitter: EventEmitter<FlowEvent>,\n kvStore: KvStore<FlowJob>,\n): Flow<TFlowInputSchema, TFlowOutputSchema, TRequirements> {\n return {\n ...flow,\n run: (args: {\n inputs?: Record<string, z.infer<TFlowInputSchema>>;\n storageId: string;\n jobId?: string;\n }) => {\n return Effect.gen(function* () {\n // Use provided jobId or generate a new one\n const executionJobId = args.jobId || crypto.randomUUID();\n\n // Helper to update job in KV store\n const updateJobInStore = (updates: Partial<FlowJob>) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n yield* kvStore.set(executionJobId, {\n ...job,\n ...updates,\n updatedAt: new Date(),\n });\n }\n });\n\n // Create the onEvent callback that emits to the eventEmitter and updates job\n const onEventCallback = (event: FlowEvent) =>\n Effect.gen(function* () {\n // Emit event\n yield* eventEmitter.emit(executionJobId, event);\n\n Effect.logInfo(\n `Updating job ${executionJobId} with event ${event.eventType}`,\n );\n\n // Update job based on event type\n switch (event.eventType) {\n case \"flow-start\":\n yield* updateJobInStore({ status: \"running\" });\n break;\n\n case \"node-start\":\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const existingTask = job.tasks.find(\n (t) => t.nodeId === event.nodeId,\n );\n const updatedTasks = existingTask\n ? job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"running\" as const,\n updatedAt: new Date(),\n }\n : t,\n )\n : [\n ...job.tasks,\n {\n nodeId: event.nodeId,\n status: \"running\" as const,\n createdAt: new Date(),\n updatedAt: new Date(),\n },\n ];\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case \"node-end\":\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const updatedTasks = job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"completed\" as const,\n updatedAt: new Date(),\n }\n : t,\n );\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n updatedAt: new Date(),\n });\n }\n });\n break;\n\n case \"node-error\":\n yield* Effect.gen(function* () {\n const job = yield* kvStore.get(executionJobId);\n if (job) {\n const updatedTasks = job.tasks.map((t) =>\n t.nodeId === event.nodeId\n ? {\n ...t,\n status: \"failed\" as const,\n updatedAt: new Date(),\n }\n : t,\n );\n\n yield* kvStore.set(executionJobId, {\n ...job,\n tasks: updatedTasks,\n error: event.error,\n updatedAt: new Date(),\n });\n }\n });\n break;\n }\n\n return { eventId: executionJobId };\n });\n\n // Create a new flow with the same configuration but with onEvent callback\n const flowWithEvents = createFlow({\n flowId: flow.id,\n name: flow.name,\n nodes: flow.nodes,\n edges: flow.edges,\n inputSchema: flow.inputSchema,\n outputSchema: flow.outputSchema,\n onEvent: onEventCallback,\n });\n\n // Run the enhanced flow with consistent jobId\n const result = yield* flowWithEvents.run({\n ...args,\n jobId: executionJobId,\n });\n\n // Return the result directly (can be completed or paused)\n return result;\n });\n },\n };\n}\n\n// Core FlowServer implementation\nexport function createFlowServer() {\n return Effect.gen(function* () {\n const flowProvider = yield* FlowProvider;\n const eventEmitter = yield* FlowEventEmitter;\n const kvStore = yield* FlowJobKVStore;\n\n const updateJob = (jobId: string, updates: Partial<FlowJob>) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n return yield* kvStore.set(jobId, { ...job, ...updates });\n });\n\n // Helper function to execute flow in background\n const executeFlowInBackground = (\n jobId: string,\n flow: Flow<any, any, any>,\n storageId: string,\n inputs: Record<string, any>,\n ) =>\n Effect.gen(function* () {\n // Update job status to running\n yield* updateJob(jobId, {\n status: \"running\",\n });\n\n const flowWithEvents = withFlowEvents(flow, eventEmitter, kvStore);\n\n // Run the flow with the consistent jobId\n const result = yield* flowWithEvents.run({\n inputs,\n storageId,\n jobId,\n });\n\n // Handle result based on type\n if (result.type === \"paused\") {\n // Update job as paused\n yield* updateJob(jobId, {\n status: \"paused\",\n pausedAt: result.nodeId,\n executionState: result.executionState,\n updatedAt: new Date(),\n });\n } else {\n // Update job as completed\n yield* updateJob(jobId, {\n status: \"completed\",\n updatedAt: new Date(),\n endedAt: new Date(),\n });\n }\n\n return result;\n }).pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n // Update job as failed\n const errorMessage =\n error instanceof UploadistaError ? error.body : String(error);\n yield* updateJob(jobId, {\n status: \"failed\",\n error: errorMessage,\n updatedAt: new Date(),\n });\n return Effect.fail(error);\n }),\n ),\n );\n\n return {\n getFlow: (flowId) =>\n Effect.gen(function* () {\n const flow = yield* flowProvider.getFlow(flowId);\n return flow;\n }),\n\n getFlowData: (flowId) =>\n Effect.gen(function* () {\n const flow = yield* flowProvider.getFlow(flowId);\n return getFlowData(flow);\n }),\n\n runFlow: (flowId: string, storageId: string, inputs: unknown) =>\n Effect.gen(function* () {\n const parsedParams = yield* Effect.try({\n try: () => runArgsSchema.parse({ inputs }),\n catch: (error) =>\n UploadistaError.fromCode(\"FLOW_INPUT_VALIDATION_ERROR\", {\n cause: error,\n }),\n });\n\n // Generate a unique jobId\n const jobId = crypto.randomUUID();\n const createdAt = new Date();\n\n // Store initial job metadata\n const job: FlowJob = {\n id: jobId,\n flowId,\n storageId,\n status: \"started\",\n createdAt,\n updatedAt: createdAt,\n tasks: [],\n };\n\n yield* kvStore.set(jobId, job);\n\n // Get the flow and start background execution\n const flow = yield* flowProvider.getFlow(flowId);\n\n // Fork the flow execution to run in background as daemon\n yield* Effect.forkDaemon(\n executeFlowInBackground(\n jobId,\n flow,\n storageId,\n parsedParams.inputs,\n ).pipe(\n Effect.tapErrorCause((cause) =>\n Effect.logError(\"Flow execution failed\", cause),\n ),\n ),\n );\n\n // Return immediately with jobId\n return job;\n }),\n\n getJobStatus: (jobId: string) =>\n Effect.gen(function* () {\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n return job;\n }),\n\n continueFlow: (jobId: string, nodeId: string, newData: unknown) =>\n Effect.gen(function* () {\n // Get the current job\n const job = yield* kvStore.get(jobId);\n if (!job) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found`,\n }),\n );\n }\n\n // Verify job is paused\n if (job.status !== \"paused\") {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} is not paused (status: ${job.status})`,\n }),\n );\n }\n\n // Verify it's paused at the expected node\n if (job.pausedAt !== nodeId) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} is paused at node ${job.pausedAt}, not ${nodeId}`,\n }),\n );\n }\n\n // Verify we have execution state\n if (!job.executionState) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} has no execution state`,\n }),\n );\n }\n\n // Update both node results and inputs with new data\n // For input nodes, the data comes from executionState.inputs[nodeId]\n // For other nodes, the data comes from nodeResults of their dependencies\n const updatedNodeResults = {\n ...job.executionState.nodeResults,\n [nodeId]: newData,\n };\n\n const updatedInputs = {\n ...job.executionState.inputs,\n [nodeId]: newData,\n };\n\n // Get the flow\n const flow = yield* flowProvider.getFlow(job.flowId);\n\n // Helper to resume flow in background\n const resumeFlowInBackground = Effect.gen(function* () {\n // Update job status to running\n yield* updateJob(jobId, {\n status: \"running\",\n });\n\n const flowWithEvents = withFlowEvents(flow, eventEmitter, kvStore);\n\n if (!job.executionState) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_ERROR\", {\n cause: `Job ${jobId} has no execution state`,\n }),\n );\n }\n\n // Resume the flow with updated state\n const result = yield* flowWithEvents.resume({\n jobId,\n storageId: job.storageId,\n executionState: {\n ...job.executionState,\n nodeResults: updatedNodeResults,\n inputs: updatedInputs,\n },\n });\n\n // Handle result based on type\n if (result.type === \"paused\") {\n // Update job as paused again (might pause at another node)\n yield* updateJob(jobId, {\n status: \"paused\",\n pausedAt: result.nodeId,\n executionState: result.executionState,\n updatedAt: new Date(),\n });\n } else {\n // Update job as completed\n yield* updateJob(jobId, {\n status: \"completed\",\n pausedAt: undefined,\n executionState: undefined,\n updatedAt: new Date(),\n endedAt: new Date(),\n });\n }\n\n return result;\n }).pipe(\n Effect.catchAll((error) =>\n Effect.gen(function* () {\n // Update job as failed\n const errorMessage =\n error instanceof UploadistaError ? error.body : String(error);\n yield* updateJob(jobId, {\n status: \"failed\",\n error: errorMessage,\n updatedAt: new Date(),\n });\n return Effect.fail(error);\n }),\n ),\n );\n\n // Fork the resume execution to run in background as daemon\n yield* Effect.forkDaemon(\n resumeFlowInBackground.pipe(\n Effect.tapErrorCause((cause) =>\n Effect.logError(\"Flow resume failed\", cause),\n ),\n ),\n );\n\n // Return immediately with updated job\n const updatedJob = yield* kvStore.get(jobId);\n if (!updatedJob) {\n return yield* Effect.fail(\n UploadistaError.fromCode(\"FLOW_JOB_NOT_FOUND\", {\n cause: `Job ${jobId} not found after update`,\n }),\n );\n }\n return updatedJob;\n }),\n\n subscribeToFlowEvents: (jobId: string, connection: WebSocketConnection) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(jobId, connection);\n }),\n\n unsubscribeFromFlowEvents: (jobId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(jobId);\n }),\n } satisfies FlowServerShape;\n });\n}\n\n// Export the FlowServer layer with job store dependency\nexport const flowServer = Layer.effect(FlowServer, createFlowServer());\nexport type FlowServerLayer = typeof flowServer;\n","import { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport { UploadistaError } from \"../../errors\";\nimport { createFlowNode, flowFileSchema, NodeType } from \"..\";\n\n// Define schemas for input and output\nconst inputDataSchema = z.object({\n fileId: z.string(),\n});\n\nconst fetchFile = (url: string) => {\n return Effect.tryPromise({\n try: async () => {\n return await fetch(url);\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n });\n};\n\nconst arrayBuffer = (response: Response) => {\n return Effect.tryPromise({\n try: async () => {\n return await response.arrayBuffer();\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n });\n};\n\nexport function createInputNode(\n id: string,\n prepareFile: (fileId: string) => Effect.Effect<string> = (fileId) =>\n Effect.succeed(fileId),\n) {\n // TODO: use a service instead ?\n return createFlowNode({\n id,\n name: \"Input\",\n description: \"Reads an asset from the database\",\n type: NodeType.input,\n inputSchema: inputDataSchema,\n outputSchema: flowFileSchema,\n run: ({ data: { fileId } }) => {\n return Effect.gen(function* () {\n const url = yield* prepareFile(fileId);\n const response = yield* fetchFile(url);\n const buffer = yield* arrayBuffer(response);\n return {\n type: \"complete\" as const,\n data: {\n path: url,\n inputBytes: new Uint8Array(buffer),\n metadata: {\n mimeType: response.headers.get(\"content-type\") ?? \"\",\n size: Number(response.headers.get(\"content-length\") ?? 0),\n },\n },\n };\n });\n },\n });\n}\n","/** biome-ignore-all lint/suspicious/noExplicitAny: any is used to allow for dynamic types */\n\nimport type { Effect } from \"effect\";\nimport type { z } from \"zod\";\nimport type { UploadistaError } from \"../../errors\";\nimport type { FlowEvent, FlowEventFlowEnd, FlowEventFlowStart } from \"../event\";\nimport { NodeType } from \"../node\";\n\n// Type for node input/output mapping\nexport type NodeTypeMap = Record<string, { input: unknown; output: unknown }>;\n\nexport type FlowNodeData = {\n id: string;\n name: string;\n description: string;\n type: NodeType;\n};\n\n// Node execution result - can be complete or waiting for more data\nexport type NodeExecutionResult<TOutput> =\n | { type: \"complete\"; data: TOutput }\n | { type: \"waiting\"; partialData?: unknown };\n\nexport const completeNodeExecution = <TOutput>(data: TOutput) => ({\n type: \"complete\" as const,\n data,\n});\n\nexport const waitingNodeExecution = (partialData?: unknown) => ({\n type: \"waiting\" as const,\n partialData,\n});\n\n// Enhanced node type with Zod schemas\nexport type FlowNode<\n TInput = unknown,\n TOutput = unknown,\n TError = UploadistaError,\n TRequirements = never,\n> = FlowNodeData & {\n inputSchema: z.ZodSchema<TInput>;\n outputSchema: z.ZodSchema<TOutput>;\n run: (args: {\n data: TInput;\n runId: string;\n storageId: string;\n flowId: string;\n inputs?: Record<string, unknown>;\n }) => Effect.Effect<NodeExecutionResult<TOutput>, TError, TRequirements>;\n condition?: {\n field: string;\n operator: string;\n value: unknown;\n };\n multiInput?: boolean;\n multiOutput?: boolean;\n pausable?: boolean; // Flag to indicate this node can pause execution\n};\n\n// Enhanced edge type with type validation\nexport type FlowEdge = {\n source: string;\n target: string;\n sourcePort?: string; // For multi-output nodes\n targetPort?: string; // For multi-input nodes\n};\n\n// Type compatibility checker for Zod schemas\nexport type TypeCompatibilityChecker = (\n from: z.ZodSchema<any>,\n to: z.ZodSchema<any>,\n) => boolean;\n\n// Type for validating node connections (schema-based)\nexport type NodeConnectionValidator = {\n validateConnection: (\n sourceNode: FlowNode<any, any>,\n targetNode: FlowNode<any, any>,\n edge: FlowEdge,\n ) => boolean;\n getCompatibleTypes: (\n sourceSchema: z.ZodSchema<any>,\n targetSchema: z.ZodSchema<any>,\n ) => boolean;\n};\n\n// Flow configuration with Zod schemas\nexport type FlowConfig<\n TFlowInputSchema extends z.ZodSchema<any>,\n TFlowOutputSchema extends z.ZodSchema<any>,\n TRequirements,\n> = {\n flowId: string;\n name: string;\n nodes: Array<FlowNode<any, any, UploadistaError, TRequirements>>;\n edges: FlowEdge[];\n inputSchema: TFlowInputSchema;\n outputSchema: TFlowOutputSchema;\n typeChecker?: TypeCompatibilityChecker;\n onEvent?: (\n event: FlowEvent,\n ) => Effect.Effect<{ eventId: string | null }, UploadistaError>;\n parallelExecution?: {\n enabled?: boolean;\n maxConcurrency?: number;\n };\n};\n\n// Flow execution context\nexport type FlowExecutionContext = {\n runId: string;\n flowId: string;\n nodeResults: Map<string, unknown>;\n nodeInputs: Map<string, Record<string, unknown>>;\n};\n\n// Re-export existing types for compatibility\nexport { NodeType };\nexport type { FlowEvent, FlowEventFlowEnd, FlowEventFlowStart };\n","import { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport { uploadFileSchema } from \"../../types\";\nimport { UploadServer } from \"../../upload\";\nimport { createFlowNode, NodeType } from \"../node\";\nimport { completeNodeExecution } from \"../types\";\n\nexport const storageParamsSchema = z.object({});\n\nexport type StorageParams = z.infer<typeof storageParamsSchema>;\n\nexport function createStorageNode(id: string) {\n return Effect.gen(function* () {\n const uploadServer = yield* UploadServer;\n return yield* createFlowNode({\n id,\n name: \"Storage\",\n description: \"Stores a file in the asset storage\",\n type: NodeType.output,\n inputSchema: uploadFileSchema,\n outputSchema: uploadFileSchema,\n run: ({ data: file, storageId }) => {\n return Effect.gen(function* () {\n const inputBytes = yield* uploadServer.read(file.id);\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(inputBytes);\n controller.close();\n },\n });\n\n const uploadResult = yield* uploadServer.upload(\n {\n storageId,\n size: inputBytes.byteLength,\n type: file.metadata?.mimeType ?? \"\",\n fileName: file.metadata?.originalName ?? \"\",\n lastModified: 0,\n metadata: JSON.stringify(file.metadata),\n },\n stream,\n );\n\n const url = uploadResult.url || uploadResult.id;\n return completeNodeExecution({\n ...file,\n path: url,\n });\n });\n },\n });\n });\n}\n","import { Effect } from \"effect\";\nimport { z } from \"zod\";\nimport { UploadistaError } from \"../../errors\";\nimport type { InputFile } from \"../../types\";\nimport { uploadFileSchema } from \"../../types\";\nimport { UploadServer } from \"../../upload\";\nimport { createFlowNode, NodeType } from \"../node\";\nimport { completeNodeExecution, waitingNodeExecution } from \"../types\";\n\n// Input schemas for different operations\nconst initStreamingInputSchema = z.object({\n operation: z.literal(\"init\"),\n storageId: z.string(),\n metadata: z\n .object({\n originalName: z.string().optional(),\n mimeType: z.string().optional(),\n size: z.number().optional(),\n extension: z.string().optional(),\n })\n .optional(),\n});\n\nconst finalizeStreamingInputSchema = z.object({\n operation: z.literal(\"finalize\"),\n uploadId: z.string(),\n});\n\nconst streamingInputDataSchema = z.union([\n initStreamingInputSchema,\n finalizeStreamingInputSchema,\n]);\n\ntype StreamingInputData = z.infer<typeof streamingInputDataSchema>;\n\nexport function createStreamingInputNode(id: string) {\n return Effect.gen(function* () {\n const uploadServer = yield* UploadServer;\n return yield* createFlowNode({\n id,\n name: \"Streaming Input\",\n description:\n \"Handles file uploads through the flow - init creates upload, client uploads chunks directly, finalize completes the flow\",\n type: NodeType.input,\n inputSchema: streamingInputDataSchema,\n outputSchema: uploadFileSchema,\n run: ({\n data,\n }: {\n data: StreamingInputData;\n runId: string;\n storageId: string;\n flowId: string;\n }) => {\n return Effect.gen(function* () {\n switch (data.operation) {\n case \"init\": {\n // Create upload using upload server - it handles all state management\n const inputFile: InputFile = {\n storageId: data.storageId,\n size: data.metadata?.size || 0,\n type: data.metadata?.mimeType || \"application/octet-stream\",\n fileName: data.metadata?.originalName,\n lastModified: data.metadata?.size ? Date.now() : undefined,\n };\n\n const uploadFile = yield* uploadServer.createUpload(inputFile);\n\n // Return waiting state with the upload file\n // Client will upload chunks directly to the upload API\n return waitingNodeExecution(uploadFile);\n }\n\n case \"finalize\": {\n // Get final upload file from upload server's KV store\n const finalUploadFile = yield* uploadServer.getUpload(\n data.uploadId,\n );\n\n // Complete the node execution with the final upload file\n // Flow can now continue to next nodes (e.g., save to storage, optimize)\n return completeNodeExecution(finalUploadFile);\n }\n\n default:\n throw yield* UploadistaError.fromCode(\"VALIDATION_ERROR\", {\n cause: new Error(\"Invalid operation\"),\n }).toEffect();\n }\n });\n },\n });\n });\n}\n","import type { Semaphore } from \"../utils/semaphore\";\nimport { semaphore } from \"../utils/semaphore\";\nimport type { FlowNode } from \"./types/flow-types\";\n\nexport interface ExecutionLevel {\n level: number;\n nodes: string[];\n}\n\nexport interface ParallelSchedulerConfig {\n maxConcurrency?: number;\n resourceSemaphore?: Semaphore;\n}\n\nexport class ParallelScheduler {\n private maxConcurrency: number;\n private resourceSemaphore: Semaphore;\n\n constructor(config: ParallelSchedulerConfig = {}) {\n this.maxConcurrency = config.maxConcurrency ?? 4;\n this.resourceSemaphore =\n config.resourceSemaphore ?? semaphore(this.maxConcurrency);\n }\n\n /**\n * Groups nodes into execution levels where nodes in the same level can run in parallel\n * @param nodes Array of flow nodes\n * @param edges Array of flow edges\n * @returns Array of execution levels\n */\n groupNodesByExecutionLevel(\n nodes: FlowNode<unknown, unknown>[],\n edges: Array<{ source: string; target: string }>,\n ): ExecutionLevel[] {\n // Build dependency graph\n const graph: Record<string, string[]> = {};\n const inDegree: Record<string, number> = {};\n\n // Initialize\n nodes.forEach((node) => {\n graph[node.id] = [];\n inDegree[node.id] = 0;\n });\n\n // Build edges and calculate in-degrees\n edges.forEach((edge) => {\n graph[edge.source]?.push(edge.target);\n inDegree[edge.target] = (inDegree[edge.target] || 0) + 1;\n });\n\n const levels: ExecutionLevel[] = [];\n const processedNodes = new Set<string>();\n let levelIndex = 0;\n\n while (processedNodes.size < nodes.length) {\n // Find all nodes with zero in-degree that haven't been processed\n const currentLevelNodes = Object.keys(inDegree).filter(\n (nodeId) => inDegree[nodeId] === 0 && !processedNodes.has(nodeId),\n );\n\n if (currentLevelNodes.length === 0) {\n throw new Error(\n \"Cycle detected in flow graph - cannot execute in parallel\",\n );\n }\n\n levels.push({\n level: levelIndex++,\n nodes: currentLevelNodes,\n });\n\n // Remove current level nodes and update in-degrees\n currentLevelNodes.forEach((nodeId) => {\n processedNodes.add(nodeId);\n delete inDegree[nodeId];\n\n // Decrease in-degree for all dependent nodes\n graph[nodeId]?.forEach((dependentId) => {\n if (inDegree[dependentId] !== undefined) {\n inDegree[dependentId]--;\n }\n });\n });\n }\n\n return levels;\n }\n\n /**\n * Executes a batch of nodes in parallel with resource management\n * @param nodeExecutors Array of async functions that execute individual nodes\n * @returns Promise that resolves when all nodes complete\n */\n async executeNodesInParallel<T>(\n nodeExecutors: Array<() => Promise<T>>,\n ): Promise<T[]> {\n const results: T[] = [];\n const errors: Error[] = [];\n\n // Execute all node executors in parallel with semaphore control\n const promises = nodeExecutors.map(async (executor, index) => {\n const permit = await this.resourceSemaphore.acquire();\n\n try {\n const result = await executor();\n results[index] = result;\n return result;\n } catch (error) {\n errors[index] = error as Error;\n throw error;\n } finally {\n await permit.release();\n }\n });\n\n try {\n await Promise.all(promises);\n return results;\n } catch (error) {\n // If any node fails, we still want to return partial results\n // The calling code can decide how to handle partial failures\n if (errors.length > 0) {\n const firstError = errors.find((e) => e !== undefined);\n if (firstError) {\n throw firstError;\n }\n }\n throw error;\n }\n }\n\n /**\n * Determines if nodes can be safely executed in parallel\n * @param nodes Nodes to check\n * @param nodeResults Current execution results\n * @returns true if all nodes have their dependencies satisfied\n */\n canExecuteInParallel(\n nodeIds: string[],\n nodeResults: Map<string, unknown>,\n reverseGraph: Record<string, string[]>,\n ): boolean {\n return nodeIds.every((nodeId) => {\n const dependencies = reverseGraph[nodeId] || [];\n return dependencies.every((depId) => nodeResults.has(depId));\n });\n }\n\n /**\n * Gets execution statistics for monitoring\n */\n getStats() {\n return {\n maxConcurrency: this.maxConcurrency,\n // Could add more stats like current active tasks, total completed, etc.\n };\n }\n}\n","import type { UploadistaError } from \"@uploadista/core/errors\";\nimport { Context, type Effect } from \"effect\";\nimport type { OptimizeParams } from \"./types/optimize-node\";\nimport type { ResizeParams } from \"./types/resize-node\";\n\nexport type ImagePluginShape = {\n optimize: (\n input: Uint8Array,\n options: OptimizeParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n resize: (\n input: Uint8Array,\n options: ResizeParams,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n};\n\nexport class ImagePlugin extends Context.Tag(\"ImagePlugin\")<\n ImagePlugin,\n ImagePluginShape\n>() {}\n","import { z } from \"zod\";\n\nexport const optimizeParamsSchema = z.object({\n quality: z.number().min(0).max(100),\n format: z.enum([\"jpeg\"] as const),\n});\n\nexport type OptimizeParams = z.infer<typeof optimizeParamsSchema>;\n","import { z } from \"zod\";\n\nexport const resizeParamsSchema = z\n .object({\n width: z.number().positive().optional(),\n height: z.number().positive().optional(),\n fit: z.enum([\"contain\", \"cover\", \"fill\"]),\n })\n .refine(\n (data) => data.width || data.height,\n \"Either width or height must be specified for resize\",\n );\n\nexport type ResizeParams = z.infer<typeof resizeParamsSchema>;\n","import { z } from \"zod\";\n\nexport type FlowFile = {\n path: string;\n inputBytes: Uint8Array<ArrayBufferLike>;\n metadata: {\n size: number;\n mimeType: string;\n width?: number;\n height?: number;\n format?: string;\n originalName?: string;\n extension?: string;\n };\n};\n\nexport const flowFileSchema = z.object({\n path: z.string(),\n inputBytes: z.instanceof(Uint8Array<ArrayBufferLike>),\n metadata: z.object({\n mimeType: z.string(),\n size: z.number(),\n width: z.number().optional(),\n height: z.number().optional(),\n format: z.string().optional(),\n originalName: z.string().optional(),\n extension: z.string().optional(),\n }),\n});\n\nexport type FlowFileBatch = {\n files: FlowFile[];\n metadata?: {\n batchId: string;\n totalSize: number;\n fileCount: number;\n };\n};\n\nexport const flowFileBatchSchema = z.object({\n files: z.array(flowFileSchema),\n metadata: z\n .object({\n batchId: z.string(),\n totalSize: z.number(),\n fileCount: z.number(),\n })\n .optional(),\n});\n\nexport type FlowFileData = FlowFile | FlowFileBatch;\n\nexport function isFlowFile(data: FlowFileData): data is FlowFile {\n return \"path\" in data && \"inputBytes\" in data && \"metadata\" in data;\n}\n\nexport const flowDataSchema = z.union([flowFileSchema, flowFileBatchSchema]);\n\nexport type FlowCondition = {\n field: \"mimeType\" | \"size\" | \"width\" | \"height\" | \"extension\";\n operator:\n | \"equals\"\n | \"notEquals\"\n | \"greaterThan\"\n | \"lessThan\"\n | \"contains\"\n | \"startsWith\";\n value: string | number;\n};\n","import { z } from \"zod\";\n\nexport const runArgsSchema = z.object({\n inputs: z.record(z.string(), z.any()),\n});\n\nexport type RunArgs = z.infer<typeof runArgsSchema>;\n"],"mappings":"0VAKA,SAAgB,EAAe,CAC7B,SACA,SACA,aACA,cAMW,CACX,MAAO,CACL,SACA,SACA,aACA,aACD,CCnBH,IAAY,EAAA,SAAA,EAAL,OACL,GAAA,SAAA,YACA,EAAA,OAAA,UACA,EAAA,UAAA,aACA,EAAA,QAAA,WACA,EAAA,UAAA,aACA,EAAA,QAAA,WACA,EAAA,UAAA,aACA,EAAA,WAAA,cACA,EAAA,aAAA,uBCFU,EAAA,SAAA,EAAL,OACL,GAAA,MAAA,QACA,EAAA,QAAA,UACA,EAAA,OAAA,SACA,EAAA,YAAA,cACA,EAAA,UAAA,YACA,EAAA,MAAA,eAwBF,SAAgB,EAAoD,CAClE,KACA,OACA,cACA,OACA,cACA,eACA,MACA,YACA,aAAa,GACb,cAAc,GACd,WAAW,IAyBX,CACA,OAAO,EAAO,QAAQ,CACpB,KACA,OACA,cACA,OACA,cACA,eACA,WACA,KAAM,CACJ,OACA,QACA,SACA,eAOA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAgB,MAAO,EAAO,IAAI,CACtC,QAAW,EAAY,MAAM,EAAK,CAClC,MAAQ,GACN,EAAgB,SAAS,8BAA+B,CACtD,MAAO,EACR,CAAC,CACL,CAAC,CAGI,EAAS,MAAO,EAAI,CACxB,KAAM,EACN,QACA,YACA,SACD,CAAC,CAgBF,OAbI,EAAO,OAAS,UACX,EAYF,CAAE,KAAM,WAAqB,KARZ,MAAO,EAAO,IAAI,CACxC,QAAW,EAAa,MAAM,EAAO,KAAK,CAC1C,MAAQ,GACN,EAAgB,SAAS,+BAAgC,CACvD,MAAO,EACR,CAAC,CACL,CAAC,CAEyD,EAC3D,CACJ,YACA,aACA,cACD,CAAC,CAGJ,MAAa,EAEX,IAEO,CACL,GAAI,EAAK,GACT,KAAM,EAAK,KACX,YAAa,EAAK,YAClB,KAAM,EAAK,KACZ,ECrIUA,GACX,EACA,IACG,CAEH,GAAI,IAAe,EAAU,MAAO,GAGpC,GAAI,CAYF,MATA,GACE,GACA,GACA,OAAO,GAAe,UACtB,OAAO,GAAa,eAMhB,CAEN,MAAO,KAKX,IAAa,EAAb,KAAkE,CAChE,YAEA,YAAY,EAAwC,EAAoB,CACtE,KAAK,YAAc,EAGrB,mBACE,EACA,EACA,EACS,CAET,OAAO,KAAK,mBACV,EAAW,aACX,EAAW,YACZ,CAGH,mBACE,EACA,EACS,CACT,OAAO,KAAK,YAAY,EAAc,EAAa,CAIrD,aACE,EACA,EAIA,CACA,IAAMC,EAAmB,EAAE,CACrB,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CAE7D,IAAK,IAAM,KAAQ,EAAO,CACxB,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CACrC,EAAa,EAAQ,IAAI,EAAK,OAAO,CAE3C,GAAI,CAAC,EAAY,CACf,EAAO,KAAK,eAAe,EAAK,OAAO,YAAY,CACnD,SAGF,GAAI,CAAC,EAAY,CACf,EAAO,KAAK,eAAe,EAAK,OAAO,YAAY,CACnD,SAGG,KAAK,mBAAmB,EAAY,EAAY,EAAK,EACxD,EAAO,KACL,oBAAoB,EAAW,GAAG,mCAAmC,EAAW,GAAG,eACpF,CAIL,MAAO,CACL,QAAS,EAAO,SAAW,EAC3B,SACD,CAIH,wBACE,EACA,EACA,EACyB,CACzB,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CACvDC,EAA2C,EAAE,CAEnD,IAAK,IAAM,KAAQ,EACjB,GAAI,EAAK,SAAW,EAAQ,CAC1B,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CAC3C,GAAI,EAAY,CACd,IAAM,EAAU,EAAK,YAAc,EAAK,OACxC,EAAgB,GAAW,EAAW,cAK5C,OAAO,EAIT,uBACE,EACA,EACA,EACyB,CACzB,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CACvDC,EAAyC,EAAE,CAEjD,IAAK,IAAM,KAAQ,EACjB,GAAI,EAAK,SAAW,EAAQ,CAC1B,IAAM,EAAa,EAAQ,IAAI,EAAK,OAAO,CAC3C,GAAI,EAAY,CACd,IAAM,EAAU,EAAK,YAAc,EAAK,OACxC,EAAc,GAAW,EAAW,aAK1C,OAAO,EAIT,aACE,EACA,EACwC,CACxC,GAAI,CAEF,OADC,EAA4B,MAAM,EAAK,CACjC,CAAE,QAAS,GAAM,OAAQ,EAAE,CAAE,OAC7B,EAAO,CASd,OARI,aAAiB,OAAS,WAAY,EACjC,CACL,QAAS,GACT,OACE,EACA,OAAO,IAAK,GAAQ,GAAG,EAAI,KAAK,KAAK,IAAI,CAAC,IAAI,EAAI,UAAU,CAC/D,CAEI,CACL,QAAS,GACT,OAAQ,CAAC,aAAiB,MAAQ,EAAM,QAAU,oBAAoB,CACvE,ICnJP,MAAa,EACX,IAEO,CACL,GAAI,EAAK,GACT,KAAM,EAAK,KACX,MAAO,EAAK,MAAM,IAAI,EAAY,CAClC,MAAO,EAAK,MACb,EA0DH,SAAgB,EAId,CACA,SACA,OACA,UACA,QACA,QACA,cACA,eACA,eAKA,CACA,IAAM,EAAgB,IAAI,EAAkB,EAAY,CAGlD,MAAmB,CACvB,IAAMC,EAAkC,EAAE,CACpCC,EAAmC,EAAE,CACrCC,EAAyC,EAAE,CAgBjD,OAbA,EAAM,QAAS,GAAc,CAC3B,EAAM,EAAK,IAAM,EAAE,CACnB,EAAa,EAAK,IAAM,EAAE,CAC1B,EAAS,EAAK,IAAM,GACpB,CAGF,EAAM,QAAS,GAAc,CAC3B,EAAM,EAAK,SAAS,KAAK,EAAK,OAAO,CACrC,EAAa,EAAK,SAAS,KAAK,EAAK,OAAO,CAC5C,EAAS,EAAK,SAAW,EAAS,EAAK,SAAW,GAAK,GACvD,CAEK,CAAE,QAAO,eAAc,WAAU,EAIpC,MAAwB,CAC5B,GAAM,CAAE,QAAO,YAAa,GAAY,CAClCC,EAAkB,EAAE,CACpBC,EAAmB,EAAE,CAS3B,IANA,OAAO,KAAK,EAAS,CAAC,QAAS,GAAW,CACpC,EAAS,KAAY,GACvB,EAAM,KAAK,EAAO,EAEpB,CAEK,EAAM,OAAS,GAAG,CACvB,IAAM,EAAU,EAAM,OAAO,CAC7B,GAAI,CAAC,EACH,MAAU,MAAM,wBAAwB,CAE1C,EAAO,KAAK,EAAQ,CAEpB,EAAM,IAAU,QAAS,GAAkB,CACzC,EAAS,IAAa,EAAS,IAAa,GAAK,EAC7C,EAAS,KAAc,GACzB,EAAM,KAAK,EAAS,EAEtB,CAGJ,OAAO,GAIH,GACJ,EACA,IACkC,CAClC,GAAI,CAAC,EAAK,UAAW,OAAO,EAAO,QAAQ,GAAK,CAEhD,GAAM,CAAE,QAAO,WAAU,SAAU,EAAK,UAClC,EAAa,EAIb,EAHW,GAAY,WAGC,IAAU,IAAa,GAE/C,OAAgB,CACpB,OAAQ,EAAR,CACE,IAAK,SACH,OAAO,IAAe,EACxB,IAAK,YACH,OAAO,IAAe,EACxB,IAAK,cACH,OAAO,OAAO,EAAW,CAAG,OAAO,EAAM,CAC3C,IAAK,WACH,OAAO,OAAO,EAAW,CAAG,OAAO,EAAM,CAC3C,IAAK,WACH,OAAO,OAAO,EAAW,CAAC,SAAS,OAAO,EAAM,CAAC,CACnD,IAAK,aACH,OAAO,OAAO,EAAW,CAAC,WAAW,OAAO,EAAM,CAAC,CACrD,QACE,MAAO,OAET,CAEJ,OAAO,EAAO,QAAQ,EAAO,EAIzB,GAAiB,EAAgB,IAAsC,CAC3E,GAAM,CAAE,gBAAiB,GAAY,CAC/B,EAAgB,EAAa,IAAW,EAAE,CAC1CC,EAAkC,EAAE,CAS1C,OAPA,EAAc,QAAS,GAAsB,CAC3C,IAAM,EAAS,EAAY,IAAI,EAAa,CACxC,IAAW,IAAA,KACb,EAAO,GAAgB,IAEzB,CAEK,GAIH,EACJ,GACG,CACH,IAAM,EAAa,EAAM,OAAQ,GAAc,EAAK,OAAS,QAAQ,CAC/DC,EAA0D,EAAE,CAYlE,OAVA,EAAW,QAAS,GAAc,CAE9B,GACA,OAAO,GAAe,UACtB,EAAK,MAAM,IAEX,EAAa,EAAK,IAAM,EAAY,MAAM,EAAW,EAAK,IAAI,GAEhE,CAEK,GAIH,EACJ,GAC+B,CAC/B,IAAM,EAAc,EAAM,OAAQ,GAAc,EAAK,OAAS,SAAS,CACjEC,EAAuC,EAAE,CAS/C,OAPA,EAAY,QAAS,GAAc,CACjC,IAAM,EAAS,EAAY,IAAI,EAAK,GAAG,CACnC,IAAW,IAAA,KACb,EAAY,EAAK,IAAM,IAEzB,CAEK,GAIH,GACJ,EACA,EACA,EACA,EACA,EACA,IAMO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAO,EAAQ,IAAI,EAAO,CAChC,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAC5B,sBACD,CAAC,UAAU,CAIV,IACF,MAAO,EAAQ,CACb,QACA,SACA,SACA,UAAW,EAAU,UACrB,SAAU,EAAK,KACf,SAAU,EAAK,KAChB,CAAC,EAGJ,GAAI,CAEF,IAAIC,EACAC,EAAkD,EAAE,CAExD,GAAI,EAAK,OAAS,QAGhB,IADA,EAAY,EAAW,GACnB,IAAc,IAAA,GAChB,OAAO,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,cAAc,EAAO,oBAAoB,CAC3D,CAAC,CAAC,UAAU,KAEV,CAIL,GAFA,EAAyB,EAAc,EAAQ,EAAY,CAEvD,OAAO,KAAK,EAAuB,CAAC,SAAW,EACjD,OAAO,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,QAAQ,EAAO,oBAAoB,CACrD,CAAC,CAAC,UAAU,CAIf,GAAK,EAAK,WAUR,EAAY,MAVQ,CACpB,IAAM,EAAgB,OAAO,KAAK,EAAuB,CAAC,GAC1D,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAW,MAAM,QAAQ,EAAO,oBAAoB,CACrD,CAAC,CAAC,UAAU,CAEf,EAAY,EAAuB,IAQvC,GAAI,EAAK,OAAS,eAEZ,EADoB,MAAO,EAAkB,EAAM,EAAU,EAa/D,OAVI,IACF,MAAO,EAAQ,CACb,QAAS,GACT,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KAChB,CAAC,EAEG,CAAE,SAAQ,OAAQ,EAAW,QAAS,GAAM,QAAS,GAAO,CAKvE,IAAM,EAAkB,MAAO,EAAK,IAAI,CACtC,KAAM,EACN,OAAQ,EACR,MAAO,EACP,SACA,YACD,CAAC,CAGF,GAAI,EAAgB,OAAS,UAE3B,MAAO,CACL,SACA,OAAQ,EAAgB,YACxB,QAAS,GACT,QAAS,GACV,CAIH,IAAM,EAAS,EAAgB,KAc/B,OAXI,IACF,MAAO,EAAQ,CACb,QAAS,GACT,QACA,SACA,SACA,UAAW,EAAU,QACrB,SAAU,EAAK,KAChB,CAAC,EAGG,CAAE,SAAQ,SAAQ,QAAS,GAAM,QAAS,GAAO,OACjD,EAAO,CAad,OAXI,IACF,MAAO,EAAQ,CACb,QAAS,GACT,QACA,SACA,SACA,UAAW,EAAU,UACrB,SAAU,EAAK,KACf,MAAQ,EAAgB,QACzB,CAAC,EAEG,MAAO,EAAgB,SAAS,kBAAmB,CACxD,MAAO,EACR,CAAC,CAAC,UAAU,GAEf,CAIE,GAAe,CACnB,SACA,YACA,QACA,gBAyBO,EAAO,IAAI,WAAa,CAEzB,CAAC,GAAc,IACjB,MAAO,EAAQ,CACb,QACA,UAAW,EAAU,UACrB,SACD,CAAC,EAIJ,IAAM,EAAa,EAAqB,GAAU,EAAE,CAAC,CAGjDC,EACAC,EACAC,EAEJ,GAAI,EAEF,EAAiB,EAAW,eAC5B,EAAc,EAAW,YACzB,EAAa,EAAW,qBAGxB,EAAiB,GAAiB,CAClC,EAAc,IAAI,IAClB,EAAa,EAGT,EAAe,SAAW,EAAM,OAClC,OAAO,MAAO,EAAgB,SAAS,mBAAmB,CAAC,UAAU,CAKzE,IAAM,EAAU,IAAI,IAAI,EAAM,IAAK,GAAS,CAAC,EAAK,GAAI,EAAK,CAAC,CAAC,CAG7D,IAAK,IAAI,EAAI,EAAY,EAAI,EAAe,OAAQ,IAAK,CACvD,IAAM,EAAS,EAAe,GAC9B,GAAI,CAAC,EACH,OAAO,MAAO,EAAgB,SAC5B,sBACD,CAAC,UAAU,CAEd,IAAM,EAAa,MAAO,EACxB,EACA,EACA,EACA,EACA,EACA,EACD,CAED,GAAI,EAAW,QAMb,OAJI,EAAW,SAAW,IAAA,IACxB,EAAY,IAAI,EAAW,OAAQ,EAAW,OAAO,CAGhD,CACL,KAAM,SACN,OAAQ,EAAW,OACnB,eAAgB,CACd,YAAa,OAAO,YAAY,EAAY,CAC5C,iBACA,aAAc,EACd,OAAQ,EACT,CACF,CAGC,EAAW,SACb,EAAY,IAAI,EAAW,OAAQ,EAAW,OAAO,CAKzD,IAAM,EAAc,EAAmB,EAAY,CAG7C,EAAkB,MAAO,EAAO,IAAI,CACxC,QAAW,EAAa,MAAM,EAAY,CAC1C,MAAQ,GACN,EAAgB,SAAS,+BAAgC,CACvD,MAAO,EACR,CAAC,CACL,CAAC,CAYF,OATI,IACF,MAAO,EAAQ,CACb,QAAS,GACT,QACA,UAAW,EAAU,QACrB,SACD,CAAC,EAGG,CAAE,KAAM,YAAsB,OAAQ,EAAiB,EAC9D,CAmFJ,MAAO,CACL,GAAI,EACJ,OACA,QACA,QACA,cACA,eACA,KAvFW,CACX,SACA,YACA,WAoBO,EAAY,CAAE,SAAQ,YAAW,QAAO,CAAC,CAiEhD,QA9Dc,CACd,QACA,YACA,oBAyBO,EAAY,CACjB,OAAQ,EAAe,OACvB,YACA,QACA,WAAY,CACV,eAAgB,EAAe,eAC/B,YAAa,IAAI,IAAI,OAAO,QAAQ,EAAe,YAAY,CAAC,CAChE,aAAc,EAAe,aAC9B,CACF,CAAC,CA0BF,kBAvB0B,CAE1B,IAAM,EAAkB,EACxB,OAAO,EAAc,aAAa,EAAiB,EAAM,EAqBzD,eAlBsB,GACf,EAAc,aAAa,EAAQ,EAAY,CAkBtD,gBAfuB,GAChB,EAAc,aAAa,EAAS,EAAa,CAezD,CCtlBH,IAAa,EAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GAmCS,EAAb,cAAgC,EAAQ,IAAI,aAAa,EAGtD,AAAC,GAeJ,SAAS,EAKP,EACA,EACA,EAC0D,CAC1D,MAAO,CACL,GAAG,EACH,IAAM,GAKG,EAAO,IAAI,WAAa,CAE7B,IAAM,EAAiB,EAAK,OAAS,OAAO,YAAY,CAGlD,EAAoB,GACxB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC1C,IACF,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,GAAG,EACH,UAAW,IAAI,KAChB,CAAC,GAEJ,CA2HJ,OANe,MAXQ,EAAW,CAChC,OAAQ,EAAK,GACb,KAAM,EAAK,KACX,MAAO,EAAK,MACZ,MAAO,EAAK,MACZ,YAAa,EAAK,YAClB,aAAc,EAAK,aACnB,QA9GuB,GACvB,EAAO,IAAI,WAAa,CAStB,OAPA,MAAO,EAAa,KAAK,EAAgB,EAAM,CAE/C,EAAO,QACL,gBAAgB,EAAe,cAAc,EAAM,YACpD,CAGO,EAAM,UAAd,CACE,IAAK,aACH,MAAO,EAAiB,CAAE,OAAQ,UAAW,CAAC,CAC9C,MAEF,IAAK,aACH,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CAIP,IAAM,EAHe,EAAI,MAAM,KAC5B,GAAM,EAAE,SAAW,EAAM,OAC3B,CAEG,EAAI,MAAM,IAAK,GACb,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,UACR,UAAW,IAAI,KAChB,CACD,EACL,CACD,CACE,GAAG,EAAI,MACP,CACE,OAAQ,EAAM,OACd,OAAQ,UACR,UAAW,IAAI,KACf,UAAW,IAAI,KAChB,CACF,CAEL,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,IAAK,WACH,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CACP,IAAM,EAAe,EAAI,MAAM,IAAK,GAClC,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,YACR,UAAW,IAAI,KAChB,CACD,EACL,CAED,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAEF,IAAK,aACH,MAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAe,CAC9C,GAAI,EAAK,CACP,IAAM,EAAe,EAAI,MAAM,IAAK,GAClC,EAAE,SAAW,EAAM,OACf,CACE,GAAG,EACH,OAAQ,SACR,UAAW,IAAI,KAChB,CACD,EACL,CAED,MAAO,EAAQ,IAAI,EAAgB,CACjC,GAAG,EACH,MAAO,EACP,MAAO,EAAM,MACb,UAAW,IAAI,KAChB,CAAC,GAEJ,CACF,MAGJ,MAAO,CAAE,QAAS,EAAgB,EAClC,CAWH,CAAC,CAGmC,IAAI,CACvC,GAAG,EACH,MAAO,EACR,CAAC,EAIF,CAEL,CAIH,SAAgB,GAAmB,CACjC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EACtB,EAAe,MAAO,EACtB,EAAU,MAAO,EAEjB,GAAa,EAAe,IAChC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CAQrC,OAPK,EAOE,MAAO,EAAQ,IAAI,EAAO,CAAE,GAAG,EAAK,GAAG,EAAS,CAAC,CAN/C,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,EAGH,CAGE,GACJ,EACA,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAU,EAAO,CACtB,OAAQ,UACT,CAAC,CAKF,IAAM,EAAS,MAHQ,EAAe,EAAM,EAAc,EAAQ,CAG7B,IAAI,CACvC,SACA,YACA,QACD,CAAC,CAoBF,OAjBI,EAAO,OAAS,SAElB,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,SAAU,EAAO,OACjB,eAAgB,EAAO,eACvB,UAAW,IAAI,KAChB,CAAC,CAGF,MAAO,EAAU,EAAO,CACtB,OAAQ,YACR,UAAW,IAAI,KACf,QAAS,IAAI,KACd,CAAC,CAGG,GACP,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,IAAM,EACJ,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CAM/D,OALA,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,CACK,EAAO,KAAK,EAAM,EACzB,CACH,CACF,CAEH,MAAO,CACL,QAAU,GACR,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAa,QAAQ,EAAO,EAEhD,CAEJ,YAAc,GACZ,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,MAAO,EAAa,QAAQ,EAAO,CAChD,OAAO,EAAY,EAAK,EACxB,CAEJ,SAAU,EAAgB,EAAmB,IAC3C,EAAO,IAAI,WAAa,CACtB,IAAM,EAAe,MAAO,EAAO,IAAI,CACrC,QAAW,EAAc,MAAM,CAAE,SAAQ,CAAC,CAC1C,MAAQ,GACN,EAAgB,SAAS,8BAA+B,CACtD,MAAO,EACR,CAAC,CACL,CAAC,CAGI,EAAQ,OAAO,YAAY,CAC3B,EAAY,IAAI,KAGhBC,EAAe,CACnB,GAAI,EACJ,SACA,YACA,OAAQ,UACR,YACA,UAAW,EACX,MAAO,EAAE,CACV,CAED,MAAO,EAAQ,IAAI,EAAO,EAAI,CAG9B,IAAM,EAAO,MAAO,EAAa,QAAQ,EAAO,CAiBhD,OAdA,MAAO,EAAO,WACZ,EACE,EACA,EACA,EACA,EAAa,OACd,CAAC,KACA,EAAO,cAAe,GACpB,EAAO,SAAS,wBAAyB,EAAM,CAChD,CACF,CACF,CAGM,GACP,CAEJ,aAAe,GACb,EAAO,IAAI,WAAa,CAUtB,OATY,MAAO,EAAQ,IAAI,EAAM,IAE5B,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,GAIH,CAEJ,cAAe,EAAe,EAAgB,IAC5C,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAM,MAAO,EAAQ,IAAI,EAAM,CACrC,GAAI,CAAC,EACH,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,YACrB,CAAC,CACH,CAIH,GAAI,EAAI,SAAW,SACjB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,0BAA0B,EAAI,OAAO,GAC1D,CAAC,CACH,CAIH,GAAI,EAAI,WAAa,EACnB,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,qBAAqB,EAAI,SAAS,QAAQ,IAC/D,CAAC,CACH,CAIH,GAAI,CAAC,EAAI,eACP,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,CAMH,IAAM,EAAqB,CACzB,GAAG,EAAI,eAAe,aACrB,GAAS,EACX,CAEK,EAAgB,CACpB,GAAG,EAAI,eAAe,QACrB,GAAS,EACX,CAGK,EAAO,MAAO,EAAa,QAAQ,EAAI,OAAO,CAG9C,EAAyB,EAAO,IAAI,WAAa,CAErD,MAAO,EAAU,EAAO,CACtB,OAAQ,UACT,CAAC,CAEF,IAAM,EAAiB,EAAe,EAAM,EAAc,EAAQ,CAElE,GAAI,CAAC,EAAI,eACP,OAAO,MAAO,EAAO,KACnB,EAAgB,SAAS,iBAAkB,CACzC,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,CAIH,IAAM,EAAS,MAAO,EAAe,OAAO,CAC1C,QACA,UAAW,EAAI,UACf,eAAgB,CACd,GAAG,EAAI,eACP,YAAa,EACb,OAAQ,EACT,CACF,CAAC,CAsBF,OAnBI,EAAO,OAAS,SAElB,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,SAAU,EAAO,OACjB,eAAgB,EAAO,eACvB,UAAW,IAAI,KAChB,CAAC,CAGF,MAAO,EAAU,EAAO,CACtB,OAAQ,YACR,SAAU,IAAA,GACV,eAAgB,IAAA,GAChB,UAAW,IAAI,KACf,QAAS,IAAI,KACd,CAAC,CAGG,GACP,CAAC,KACD,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,IAAM,EACJ,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CAM/D,OALA,MAAO,EAAU,EAAO,CACtB,OAAQ,SACR,MAAO,EACP,UAAW,IAAI,KAChB,CAAC,CACK,EAAO,KAAK,EAAM,EACzB,CACH,CACF,CAoBD,OAjBA,MAAO,EAAO,WACZ,EAAuB,KACrB,EAAO,cAAe,GACpB,EAAO,SAAS,qBAAsB,EAAM,CAC7C,CACF,CACF,EAGkB,MAAO,EAAQ,IAAI,EAAM,IAEnC,MAAO,EAAO,KACnB,EAAgB,SAAS,qBAAsB,CAC7C,MAAO,OAAO,EAAM,yBACrB,CAAC,CACH,GAGH,CAEJ,uBAAwB,EAAe,IACrC,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAO,EAAW,EAChD,CAEJ,0BAA4B,GAC1B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAM,EACtC,CACL,EACD,CAIJ,MAAa,EAAa,EAAM,OAAO,EAAY,GAAkB,CAAC,CC5hBhE,EAAkB,EAAE,OAAO,CAC/B,OAAQ,EAAE,QAAQ,CACnB,CAAC,CAEI,EAAa,GACV,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,MAAM,EAAI,CAEzB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAGE,EAAe,GACZ,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,EAAS,aAAa,CAErC,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAGJ,SAAgB,EACd,EACA,EAA0D,GACxD,EAAO,QAAQ,EAAO,CACxB,CAEA,OAAO,EAAe,CACpB,KACA,KAAM,QACN,YAAa,mCACb,KAAM,EAAS,MACf,YAAa,EACb,aAAc,EACd,KAAM,CAAE,KAAM,CAAE,aACP,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAM,MAAO,EAAY,EAAO,CAChC,EAAW,MAAO,EAAU,EAAI,CAChC,EAAS,MAAO,EAAY,EAAS,CAC3C,MAAO,CACL,KAAM,WACN,KAAM,CACJ,KAAM,EACN,WAAY,IAAI,WAAW,EAAO,CAClC,SAAU,CACR,SAAU,EAAS,QAAQ,IAAI,eAAe,EAAI,GAClD,KAAM,OAAO,EAAS,QAAQ,IAAI,iBAAiB,EAAI,EAAE,CAC1D,CACF,CACF,EACD,CAEL,CAAC,CC5CJ,MAAa,EAAkC,IAAmB,CAChE,KAAM,WACN,OACD,EAEY,EAAwB,IAA2B,CAC9D,KAAM,UACN,cACD,ECxBY,EAAsB,EAAE,OAAO,EAAE,CAAC,CAI/C,SAAgB,EAAkB,EAAY,CAC5C,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAC5B,OAAO,MAAO,EAAe,CAC3B,KACA,KAAM,UACN,YAAa,qCACb,KAAM,EAAS,OACf,YAAa,EACb,aAAc,EACd,KAAM,CAAE,KAAM,EAAM,eACX,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAa,MAAO,EAAa,KAAK,EAAK,GAAG,CAC9C,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAW,CAC9B,EAAW,OAAO,EAErB,CAAC,CAEI,EAAe,MAAO,EAAa,OACvC,CACE,YACA,KAAM,EAAW,WACjB,KAAM,EAAK,UAAU,UAAY,GACjC,SAAU,EAAK,UAAU,cAAgB,GACzC,aAAc,EACd,SAAU,KAAK,UAAU,EAAK,SAAS,CACxC,CACD,EACD,CAEK,EAAM,EAAa,KAAO,EAAa,GAC7C,OAAO,EAAsB,CAC3B,GAAG,EACH,KAAM,EACP,CAAC,EACF,CAEL,CAAC,EACF,CCzCJ,MAAM,EAA2B,EAAE,OAAO,CACxC,UAAW,EAAE,QAAQ,OAAO,CAC5B,UAAW,EAAE,QAAQ,CACrB,SAAU,EACP,OAAO,CACN,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,UAAW,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACD,UAAU,CACd,CAAC,CAEI,EAA+B,EAAE,OAAO,CAC5C,UAAW,EAAE,QAAQ,WAAW,CAChC,SAAU,EAAE,QAAQ,CACrB,CAAC,CAEI,EAA2B,EAAE,MAAM,CACvC,EACA,EACD,CAAC,CAIF,SAAgB,EAAyB,EAAY,CACnD,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAe,MAAO,EAC5B,OAAO,MAAO,EAAe,CAC3B,KACA,KAAM,kBACN,YACE,2HACF,KAAM,EAAS,MACf,YAAa,EACb,aAAc,EACd,KAAM,CACJ,UAOO,EAAO,IAAI,WAAa,CAC7B,OAAQ,EAAK,UAAb,CACE,IAAK,OAAQ,CAEX,IAAMC,EAAuB,CAC3B,UAAW,EAAK,UAChB,KAAM,EAAK,UAAU,MAAQ,EAC7B,KAAM,EAAK,UAAU,UAAY,2BACjC,SAAU,EAAK,UAAU,aACzB,aAAc,EAAK,UAAU,KAAO,KAAK,KAAK,CAAG,IAAA,GAClD,CAEK,EAAa,MAAO,EAAa,aAAa,EAAU,CAI9D,OAAO,EAAqB,EAAW,CAGzC,IAAK,WAAY,CAEf,IAAM,EAAkB,MAAO,EAAa,UAC1C,EAAK,SACN,CAID,OAAO,EAAsB,EAAgB,CAG/C,QACE,MAAM,MAAO,EAAgB,SAAS,mBAAoB,CACxD,MAAW,MAAM,oBAAoB,CACtC,CAAC,CAAC,UAAU,GAEjB,CAEL,CAAC,EACF,CC9EJ,IAAa,EAAb,KAA+B,CAC7B,eACA,kBAEA,YAAY,EAAkC,EAAE,CAAE,CAChD,KAAK,eAAiB,EAAO,gBAAkB,EAC/C,KAAK,kBACH,EAAO,mBAAqB,EAAU,KAAK,eAAe,CAS9D,2BACE,EACA,EACkB,CAElB,IAAMC,EAAkC,EAAE,CACpCC,EAAmC,EAAE,CAG3C,EAAM,QAAS,GAAS,CACtB,EAAM,EAAK,IAAM,EAAE,CACnB,EAAS,EAAK,IAAM,GACpB,CAGF,EAAM,QAAS,GAAS,CACtB,EAAM,EAAK,SAAS,KAAK,EAAK,OAAO,CACrC,EAAS,EAAK,SAAW,EAAS,EAAK,SAAW,GAAK,GACvD,CAEF,IAAMC,EAA2B,EAAE,CAC7B,EAAiB,IAAI,IACvB,EAAa,EAEjB,KAAO,EAAe,KAAO,EAAM,QAAQ,CAEzC,IAAM,EAAoB,OAAO,KAAK,EAAS,CAAC,OAC7C,GAAW,EAAS,KAAY,GAAK,CAAC,EAAe,IAAI,EAAO,CAClE,CAED,GAAI,EAAkB,SAAW,EAC/B,MAAU,MACR,4DACD,CAGH,EAAO,KAAK,CACV,MAAO,IACP,MAAO,EACR,CAAC,CAGF,EAAkB,QAAS,GAAW,CACpC,EAAe,IAAI,EAAO,CAC1B,OAAO,EAAS,GAGhB,EAAM,IAAS,QAAS,GAAgB,CAClC,EAAS,KAAiB,IAAA,IAC5B,EAAS,MAEX,EACF,CAGJ,OAAO,EAQT,MAAM,uBACJ,EACc,CACd,IAAMC,EAAe,EAAE,CACjBC,EAAkB,EAAE,CAGpB,EAAW,EAAc,IAAI,MAAO,EAAU,IAAU,CAC5D,IAAM,EAAS,MAAM,KAAK,kBAAkB,SAAS,CAErD,GAAI,CACF,IAAM,EAAS,MAAM,GAAU,CAE/B,MADA,GAAQ,GAAS,EACV,QACA,EAAO,CAEd,KADA,GAAO,GAAS,EACV,SACE,CACR,MAAM,EAAO,SAAS,GAExB,CAEF,GAAI,CAEF,OADA,MAAM,QAAQ,IAAI,EAAS,CACpB,QACA,EAAO,CAGd,GAAI,EAAO,OAAS,EAAG,CACrB,IAAM,EAAa,EAAO,KAAM,GAAM,IAAM,IAAA,GAAU,CACtD,GAAI,EACF,MAAM,EAGV,MAAM,GAUV,qBACE,EACA,EACA,EACS,CACT,OAAO,EAAQ,MAAO,IACC,EAAa,IAAW,EAAE,EAC3B,MAAO,GAAU,EAAY,IAAI,EAAM,CAAC,CAC5D,CAMJ,UAAW,CACT,MAAO,CACL,eAAgB,KAAK,eAEtB,GC3IQ,EAAb,cAAiC,EAAQ,IAAI,cAAc,EAGxD,AAAC,GCjBJ,MAAa,EAAuB,EAAE,OAAO,CAC3C,QAAS,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,IAAI,IAAI,CACnC,OAAQ,EAAE,KAAK,CAAC,OAAO,CAAU,CAClC,CAAC,CCHW,EAAqB,EAC/B,OAAO,CACN,MAAO,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CACvC,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAAC,UAAU,CACxC,IAAK,EAAE,KAAK,CAAC,UAAW,QAAS,OAAO,CAAC,CAC1C,CAAC,CACD,OACE,GAAS,EAAK,OAAS,EAAK,OAC7B,sDACD,CCKU,EAAiB,EAAE,OAAO,CACrC,KAAM,EAAE,QAAQ,CAChB,WAAY,EAAE,WAAW,WAA4B,CACrD,SAAU,EAAE,OAAO,CACjB,SAAU,EAAE,QAAQ,CACpB,KAAM,EAAE,QAAQ,CAChB,MAAO,EAAE,QAAQ,CAAC,UAAU,CAC5B,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAC7B,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAC7B,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,UAAW,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACH,CAAC,CAWW,EAAsB,EAAE,OAAO,CAC1C,MAAO,EAAE,MAAM,EAAe,CAC9B,SAAU,EACP,OAAO,CACN,QAAS,EAAE,QAAQ,CACnB,UAAW,EAAE,QAAQ,CACrB,UAAW,EAAE,QAAQ,CACtB,CAAC,CACD,UAAU,CACd,CAAC,CAIF,SAAgB,EAAW,EAAsC,CAC/D,MAAO,SAAU,GAAQ,eAAgB,GAAQ,aAAc,EAGjE,MAAa,EAAiB,EAAE,MAAM,CAAC,EAAgB,EAAoB,CAAC,CCtD/D,EAAgB,EAAE,OAAO,CACpC,OAAQ,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAE,KAAK,CAAC,CACtC,CAAC"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/generate-id.d.ts
|
|
4
|
+
type GenerateIdShape = {
|
|
5
|
+
generateId: () => Effect.Effect<string>;
|
|
6
|
+
};
|
|
7
|
+
declare const GenerateId_base: Context.TagClass<GenerateId, "UploadistaGenerateIdService", {
|
|
8
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
9
|
+
}>;
|
|
10
|
+
declare class GenerateId extends GenerateId_base {}
|
|
11
|
+
/**
|
|
12
|
+
* Effect-based ID generation services
|
|
13
|
+
*/
|
|
14
|
+
declare const GenerateIdService: {
|
|
15
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
16
|
+
};
|
|
17
|
+
/**
|
|
18
|
+
* Generates a random UUID using Effect
|
|
19
|
+
* @returns Effect that produces a random UUID string
|
|
20
|
+
*/
|
|
21
|
+
declare const GenerateIdRandom: {
|
|
22
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
23
|
+
};
|
|
24
|
+
declare const GenerateIdLive: Layer.Layer<GenerateId, never, never>;
|
|
25
|
+
/**
|
|
26
|
+
* Generates a timestamp-based ID using Effect
|
|
27
|
+
* @returns Effect that produces a timestamp-based ID
|
|
28
|
+
*/
|
|
29
|
+
declare const GenerateIdTimestamp: {
|
|
30
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
31
|
+
};
|
|
32
|
+
//#endregion
|
|
33
|
+
export { GenerateId, GenerateIdLive, GenerateIdRandom, GenerateIdService, GenerateIdShape, GenerateIdTimestamp };
|
|
34
|
+
//# sourceMappingURL=generate-id-Dm-Vboxq.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-id-Dm-Vboxq.d.ts","names":[],"sources":["../src/utils/generate-id.ts"],"sourcesContent":[],"mappings":";;;KAEY,eAAA;oBACQ,MAAA,CAAO;AAD3B,CAAA;AAEE,cAAA,eAAA,kBAAA,WAAA,EAAA,6BAAA,EAAA;6BAK6B,MAAA,CAAO;;AAAP,cAFlB,UAAA,SAAmB,eAAA,CAEM;;AAFtC;AAQA;AAMa,cANA,iBANkB,EAAA;EAgBlB,SAAA,UAA4D,EAAA,GAAA,GAhB1C,MAAA,CAAO,MAgBmC,CAAA,MAAA,CAAA;CAAA;;;;AAMzE;cAVa;6BAZkB,MAAA,CAAO;;cAgBzB,gBAAc,KAAA,CAAA,MAAA;;;;;cAMd;6BAtBkB,MAAA,CAAO"}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { Context, Effect, Layer } from "effect";
|
|
2
|
+
|
|
3
|
+
//#region src/utils/generate-id.d.ts
|
|
4
|
+
type GenerateIdShape = {
|
|
5
|
+
generateId: () => Effect.Effect<string>;
|
|
6
|
+
};
|
|
7
|
+
declare const GenerateId_base: Context.TagClass<GenerateId, "UploadistaGenerateIdService", {
|
|
8
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
9
|
+
}>;
|
|
10
|
+
declare class GenerateId extends GenerateId_base {}
|
|
11
|
+
/**
|
|
12
|
+
* Effect-based ID generation services
|
|
13
|
+
*/
|
|
14
|
+
declare const GenerateIdService: {
|
|
15
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
16
|
+
};
|
|
17
|
+
/**
|
|
18
|
+
* Generates a random UUID using Effect
|
|
19
|
+
* @returns Effect that produces a random UUID string
|
|
20
|
+
*/
|
|
21
|
+
declare const GenerateIdRandom: {
|
|
22
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
23
|
+
};
|
|
24
|
+
declare const GenerateIdLive: Layer.Layer<GenerateId, never, never>;
|
|
25
|
+
/**
|
|
26
|
+
* Generates a timestamp-based ID using Effect
|
|
27
|
+
* @returns Effect that produces a timestamp-based ID
|
|
28
|
+
*/
|
|
29
|
+
declare const GenerateIdTimestamp: {
|
|
30
|
+
readonly generateId: () => Effect.Effect<string>;
|
|
31
|
+
};
|
|
32
|
+
//#endregion
|
|
33
|
+
export { GenerateId, GenerateIdLive, GenerateIdRandom, GenerateIdService, GenerateIdShape, GenerateIdTimestamp };
|
|
34
|
+
//# sourceMappingURL=generate-id-LjJRLD6N.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-id-LjJRLD6N.d.cts","names":[],"sources":["../src/utils/generate-id.ts"],"sourcesContent":[],"mappings":";;;KAEY,eAAA;oBACQ,MAAA,CAAO;AAD3B,CAAA;AAEE,cAAA,eAAA,kBAAA,WAAA,EAAA,6BAAA,EAAA;6BAK6B,MAAA,CAAO;;AAAP,cAFlB,UAAA,SAAmB,eAAA,CAEM;;AAFtC;AAQA;AAMa,cANA,iBANkB,EAAO;EAgBzB,SAAA,UAA4D,EAAA,GAAA,GAhB1C,MAAA,CAAO,MAgBmC,CAAA,MAAA,CAAA;CAAA;;;;AAMzE;cAVa;6BAZkB,MAAA,CAAO;;cAgBzB,gBAAc,KAAA,CAAA,MAAA;;;;;cAMd;6BAtBkB,MAAA,CAAO"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=require(`./chunk-CUT6urMc.cjs`);let t=require(`effect`);t=e.__toESM(t);var n=class extends t.Context.Tag(`UploadistaGenerateIdService`)(){};const r=n.Service,i=n.of({generateId:()=>t.Effect.succeed(crypto.randomUUID())}),a=t.Layer.succeed(n,i),o=n.of({generateId:()=>t.Effect.succeed(`${Date.now()}-${Math.random().toString(36).slice(2,11)}`)});Object.defineProperty(exports,`GenerateId`,{enumerable:!0,get:function(){return n}}),Object.defineProperty(exports,`GenerateIdLive`,{enumerable:!0,get:function(){return a}}),Object.defineProperty(exports,`GenerateIdRandom`,{enumerable:!0,get:function(){return i}}),Object.defineProperty(exports,`GenerateIdService`,{enumerable:!0,get:function(){return r}}),Object.defineProperty(exports,`GenerateIdTimestamp`,{enumerable:!0,get:function(){return o}});
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{Context as e,Effect as t,Layer as n}from"effect";var r=class extends e.Tag(`UploadistaGenerateIdService`)(){};const i=r.Service,a=r.of({generateId:()=>t.succeed(crypto.randomUUID())}),o=n.succeed(r,a),s=r.of({generateId:()=>t.succeed(`${Date.now()}-${Math.random().toString(36).slice(2,11)}`)});export{r as GenerateId,o as GenerateIdLive,a as GenerateIdRandom,i as GenerateIdService,s as GenerateIdTimestamp};
|
|
2
|
+
//# sourceMappingURL=generate-id-yohS1ZDk.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generate-id-yohS1ZDk.js","names":[],"sources":["../src/utils/generate-id.ts"],"sourcesContent":["import { Context, Effect, Layer } from \"effect\";\n\nexport type GenerateIdShape = {\n generateId: () => Effect.Effect<string>;\n};\n\n// Declaring a tag for a service that generates random id\nexport class GenerateId extends Context.Tag(\"UploadistaGenerateIdService\")<\n GenerateId,\n { readonly generateId: () => Effect.Effect<string> }\n>() {}\n\n/**\n * Effect-based ID generation services\n */\nexport const GenerateIdService = GenerateId.Service;\n\n/**\n * Generates a random UUID using Effect\n * @returns Effect that produces a random UUID string\n */\nexport const GenerateIdRandom = GenerateId.of({\n generateId: () => Effect.succeed(crypto.randomUUID()),\n});\n\nexport const GenerateIdLive = Layer.succeed(GenerateId, GenerateIdRandom);\n\n/**\n * Generates a timestamp-based ID using Effect\n * @returns Effect that produces a timestamp-based ID\n */\nexport const GenerateIdTimestamp = GenerateId.of({\n generateId: () =>\n Effect.succeed(`${Date.now()}-${Math.random().toString(36).slice(2, 11)}`),\n});\n"],"mappings":"wDAOA,IAAa,EAAb,cAAgC,EAAQ,IAAI,8BAA8B,EAGvE,AAAC,GAKJ,MAAa,EAAoB,EAAW,QAM/B,EAAmB,EAAW,GAAG,CAC5C,eAAkB,EAAO,QAAQ,OAAO,YAAY,CAAC,CACtD,CAAC,CAEW,EAAiB,EAAM,QAAQ,EAAY,EAAiB,CAM5D,EAAsB,EAAW,GAAG,CAC/C,eACE,EAAO,QAAQ,GAAG,KAAK,KAAK,CAAC,GAAG,KAAK,QAAQ,CAAC,SAAS,GAAG,CAAC,MAAM,EAAG,GAAG,GAAG,CAC7E,CAAC"}
|