@uploadista/core 0.2.0 → 1.0.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (107) hide show
  1. package/dist/{checksum-BjP9nb5b.mjs → checksum-BRjFmTRk.mjs} +2 -2
  2. package/dist/{checksum-BjP9nb5b.mjs.map → checksum-BRjFmTRk.mjs.map} +1 -1
  3. package/dist/{checksum-B7RDiO7V.cjs → checksum-BrjQ8GJL.cjs} +1 -1
  4. package/dist/errors/index.cjs +1 -1
  5. package/dist/errors/index.d.cts +1 -1
  6. package/dist/errors/index.d.mts +1 -1
  7. package/dist/errors/index.mjs +1 -1
  8. package/dist/flow/index.cjs +1 -1
  9. package/dist/flow/index.d.cts +3 -2
  10. package/dist/flow/index.d.mts +8 -5
  11. package/dist/flow/index.mjs +1 -1
  12. package/dist/generate-id-BAMRQzMr.d.cts +34 -0
  13. package/dist/generate-id-BAMRQzMr.d.cts.map +1 -0
  14. package/dist/generate-id-DuZwLm4m.d.mts +34 -0
  15. package/dist/generate-id-DuZwLm4m.d.mts.map +1 -0
  16. package/dist/index.cjs +1 -1
  17. package/dist/index.d.cts +8 -5
  18. package/dist/index.d.mts +8 -5
  19. package/dist/index.mjs +1 -1
  20. package/dist/middleware-BlrOGKrp.d.cts +4129 -0
  21. package/dist/middleware-BlrOGKrp.d.cts.map +1 -0
  22. package/dist/middleware-BmRmwme_.d.mts +4129 -0
  23. package/dist/middleware-BmRmwme_.d.mts.map +1 -0
  24. package/dist/resolve-upload-metadata-B2C5e1y1.d.cts +4533 -0
  25. package/dist/resolve-upload-metadata-B2C5e1y1.d.cts.map +1 -0
  26. package/dist/resolve-upload-metadata-DbkBzxm8.d.mts +4533 -0
  27. package/dist/resolve-upload-metadata-DbkBzxm8.d.mts.map +1 -0
  28. package/dist/run-args-C4no7Ny4.cjs +1 -0
  29. package/dist/run-args-CIqI4Zc7.mjs +2 -0
  30. package/dist/run-args-CIqI4Zc7.mjs.map +1 -0
  31. package/dist/{stream-limiter-BCFULdAM.d.cts → stream-limiter-7wkBVLWT.d.mts} +2 -2
  32. package/dist/{stream-limiter-BCFULdAM.d.cts.map → stream-limiter-7wkBVLWT.d.mts.map} +1 -1
  33. package/dist/{stream-limiter-DZ22uIqf.cjs → stream-limiter-B-Y0DTgA.cjs} +1 -1
  34. package/dist/{stream-limiter-CTJPEJqE.mjs → stream-limiter-CvDuNIyd.mjs} +2 -2
  35. package/dist/{stream-limiter-CTJPEJqE.mjs.map → stream-limiter-CvDuNIyd.mjs.map} +1 -1
  36. package/dist/{stream-limiter-Bi7OTbRp.d.mts → stream-limiter-D1KC-6pK.d.cts} +2 -2
  37. package/dist/{stream-limiter-Bi7OTbRp.d.mts.map → stream-limiter-D1KC-6pK.d.cts.map} +1 -1
  38. package/dist/streams/index.cjs +1 -1
  39. package/dist/streams/index.d.cts +1 -1
  40. package/dist/streams/index.d.mts +2 -2
  41. package/dist/streams/index.mjs +1 -1
  42. package/dist/testing/index.cjs +1 -1
  43. package/dist/testing/index.d.cts +2 -1
  44. package/dist/testing/index.d.cts.map +1 -1
  45. package/dist/testing/index.d.mts +7 -4
  46. package/dist/testing/index.d.mts.map +1 -1
  47. package/dist/testing/index.mjs +1 -1
  48. package/dist/{throttle-Da0OA8JT.d.cts → throttle-3FRcr7MU.d.mts} +4 -34
  49. package/dist/throttle-3FRcr7MU.d.mts.map +1 -0
  50. package/dist/{throttle-ibiT6E4U.d.mts → throttle-BlH27EGu.d.cts} +4 -34
  51. package/dist/throttle-BlH27EGu.d.cts.map +1 -0
  52. package/dist/{throttle-KnkRgZPi.cjs → throttle-Dp59f37i.cjs} +1 -1
  53. package/dist/{throttle-CnDa3v1k.mjs → throttle-TFY-V41R.mjs} +2 -2
  54. package/dist/{throttle-CnDa3v1k.mjs.map → throttle-TFY-V41R.mjs.map} +1 -1
  55. package/dist/types/index.cjs +1 -1
  56. package/dist/types/index.d.cts +2 -2
  57. package/dist/types/index.d.mts +3 -5
  58. package/dist/types/index.mjs +1 -1
  59. package/dist/upload/index.cjs +1 -1
  60. package/dist/upload/index.d.cts +1 -1
  61. package/dist/upload/index.d.mts +4 -4
  62. package/dist/upload/index.mjs +1 -1
  63. package/dist/{upload-strategy-negotiator-DfiQ0Fy0.cjs → upload-strategy-negotiator-5da9ZySO.cjs} +1 -1
  64. package/dist/{upload-strategy-negotiator-BuxPf1sa.mjs → upload-strategy-negotiator-ChKvppnA.mjs} +2 -2
  65. package/dist/{upload-strategy-negotiator-BuxPf1sa.mjs.map → upload-strategy-negotiator-ChKvppnA.mjs.map} +1 -1
  66. package/dist/upload-strategy-negotiator-EmOrc2bn.d.cts +455 -0
  67. package/dist/upload-strategy-negotiator-EmOrc2bn.d.cts.map +1 -0
  68. package/dist/upload-strategy-negotiator-a2O28qPf.d.mts +455 -0
  69. package/dist/upload-strategy-negotiator-a2O28qPf.d.mts.map +1 -0
  70. package/dist/{uploadista-error-B-geDgi8.cjs → uploadista-error-CZx1JU_L.cjs} +3 -1
  71. package/dist/{uploadista-error-Fsfvr2Bb.mjs → uploadista-error-DQ7V1FlX.mjs} +3 -1
  72. package/dist/uploadista-error-DQ7V1FlX.mjs.map +1 -0
  73. package/dist/{uploadista-error-BragVhIs.d.mts → uploadista-error-LtiZn-R_.d.mts} +2 -2
  74. package/dist/{uploadista-error-BragVhIs.d.mts.map → uploadista-error-LtiZn-R_.d.mts.map} +1 -1
  75. package/dist/{uploadista-error-Cj_pAFck.d.cts → uploadista-error-eZtG4iyf.d.cts} +2 -2
  76. package/dist/{uploadista-error-Cj_pAFck.d.cts.map → uploadista-error-eZtG4iyf.d.cts.map} +1 -1
  77. package/dist/utils/index.cjs +1 -1
  78. package/dist/utils/index.d.cts +2 -1
  79. package/dist/utils/index.d.mts +3 -2
  80. package/dist/utils/index.mjs +1 -1
  81. package/dist/websocket-Br0ijEZA.cjs +1 -0
  82. package/dist/websocket-DftnHFfN.mjs +2 -0
  83. package/dist/websocket-DftnHFfN.mjs.map +1 -0
  84. package/package.json +3 -3
  85. package/src/errors/uploadista-error.ts +11 -1
  86. package/src/flow/README.md +115 -0
  87. package/src/flow/flow-engine.ts +34 -2
  88. package/src/flow/flow-queue-store.ts +155 -0
  89. package/src/flow/flow-queue.ts +640 -0
  90. package/src/flow/index.ts +4 -0
  91. package/src/flow/types/flow-queue-item.ts +154 -0
  92. package/src/types/kv-store.ts +31 -1
  93. package/tests/flow-queue-store.test.ts +150 -0
  94. package/tests/flow-queue.test.ts +308 -0
  95. package/dist/resolve-upload-metadata-BUVl1LoS.d.cts +0 -8723
  96. package/dist/resolve-upload-metadata-BUVl1LoS.d.cts.map +0 -1
  97. package/dist/resolve-upload-metadata-MPDmDfOZ.d.mts +0 -8723
  98. package/dist/resolve-upload-metadata-MPDmDfOZ.d.mts.map +0 -1
  99. package/dist/run-args-WD1otVrz.mjs +0 -2
  100. package/dist/run-args-WD1otVrz.mjs.map +0 -1
  101. package/dist/run-args-g74p8pEZ.cjs +0 -1
  102. package/dist/throttle-Da0OA8JT.d.cts.map +0 -1
  103. package/dist/throttle-ibiT6E4U.d.mts.map +0 -1
  104. package/dist/uploadista-error-Fsfvr2Bb.mjs.map +0 -1
  105. package/dist/websocket-Avz4T8YB.cjs +0 -1
  106. package/dist/websocket-CdgVhVJs.mjs +0 -2
  107. package/dist/websocket-CdgVhVJs.mjs.map +0 -1
@@ -1 +0,0 @@
1
- const e=require(`./websocket-Avz4T8YB.cjs`),t=require(`./uploadista-error-B-geDgi8.cjs`),n=require(`./upload-strategy-negotiator-DfiQ0Fy0.cjs`);let r=require(`effect`),i=require(`zod`),a=require(`micromustache`);const o={enabled:!1,failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:6e4,fallback:{type:`fail`}},s=`uploadista:circuit-breaker:`;function c(t){let n=e=>`${s}${e}`,i=i=>r.Effect.gen(function*(){let r=n(i),a=yield*t.get(r);if(a===null)return null;try{return e.F.deserialize(a)}catch{return yield*t.delete(r),null}}),a=(r,i)=>{let a=n(r),o=e.F.serialize(i);return t.set(a,o)};return{getState:i,setState:a,incrementFailures:(t,n)=>r.Effect.gen(function*(){let r=Date.now(),o=yield*i(t);return o===null&&(o=e.R({failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:n})),o=r-o.windowStart>n?{...o,failureCount:1,windowStart:r}:{...o,failureCount:o.failureCount+1},yield*a(t,o),o.failureCount}),resetFailures:e=>r.Effect.gen(function*(){let t=yield*i(e);t!==null&&(yield*a(e,{...t,failureCount:0,windowStart:Date.now()}))}),incrementHalfOpenSuccesses:e=>r.Effect.gen(function*(){let t=yield*i(e);if(t===null)return 1;let n={...t,halfOpenSuccesses:t.halfOpenSuccesses+1};return yield*a(e,n),n.halfOpenSuccesses}),getAllStats:()=>r.Effect.gen(function*(){let e=new Map;if(!t.list)return e;let n=yield*t.list(s),r=Date.now();for(let t of n){let n=t,a=yield*i(n);if(a!==null){let t=r-a.lastStateChange;e.set(n,{nodeType:n,state:a.state,failureCount:a.failureCount,halfOpenSuccesses:a.halfOpenSuccesses,timeSinceLastStateChange:t,timeUntilHalfOpen:a.state===`open`?Math.max(0,a.config.resetTimeout-t):void 0})}}return e}),delete:e=>t.delete(n(e))}}function l(){let t=new Map;return{getState:e=>r.Effect.succeed(t.get(e)??null),setState:(e,n)=>r.Effect.sync(()=>{t.set(e,n)}),incrementFailures:(n,i)=>r.Effect.sync(()=>{let r=Date.now(),a=t.get(n);return a===void 0&&(a=e.R({failureThreshold:5,resetTimeout:3e4,halfOpenRequests:3,windowDuration:i})),a=r-a.windowStart>i?{...a,failureCount:1,windowStart:r}:{...a,failureCount:a.failureCount+1},t.set(n,a),a.failureCount}),resetFailures:e=>r.Effect.sync(()=>{let n=t.get(e);n!==void 0&&t.set(e,{...n,failureCount:0,windowStart:Date.now()})}),incrementHalfOpenSuccesses:e=>r.Effect.sync(()=>{let n=t.get(e);if(n===void 0)return 1;let r={...n,halfOpenSuccesses:n.halfOpenSuccesses+1};return t.set(e,r),r.halfOpenSuccesses}),getAllStats:()=>r.Effect.sync(()=>{let e=new Map,n=Date.now();for(let[r,i]of t){let t=n-i.lastStateChange;e.set(r,{nodeType:r,state:i.state,failureCount:i.failureCount,halfOpenSuccesses:i.halfOpenSuccesses,timeSinceLastStateChange:t,timeUntilHalfOpen:i.state===`open`?Math.max(0,i.config.resetTimeout-t):void 0})}return e}),delete:e=>r.Effect.sync(()=>{t.delete(e)})}}const u=r.Layer.effect(e.L,r.Effect.gen(function*(){return c(yield*e.O)})),d=r.Layer.succeed(e.L,l());var f=class{eventHandler;nodeType;config;store;constructor(e,t,n){this.nodeType=e,this.config={enabled:t.enabled??o.enabled,failureThreshold:t.failureThreshold??o.failureThreshold,resetTimeout:t.resetTimeout??o.resetTimeout,halfOpenRequests:t.halfOpenRequests??o.halfOpenRequests,windowDuration:t.windowDuration??o.windowDuration,fallback:t.fallback??o.fallback},this.store=n}setEventHandler(e){this.eventHandler=e}allowRequest(){let t=this;return r.Effect.gen(function*(){if(!t.config.enabled)return{allowed:!0,state:`closed`,failureCount:0};let n=yield*t.store.getState(t.nodeType),r=Date.now();if(n===null&&(n=e.R({failureThreshold:t.config.failureThreshold,resetTimeout:t.config.resetTimeout,halfOpenRequests:t.config.halfOpenRequests,windowDuration:t.config.windowDuration}),yield*t.store.setState(t.nodeType,n)),n.state===`open`&&r-n.lastStateChange>=t.config.resetTimeout){let e=n.state;n={...n,state:`half-open`,halfOpenSuccesses:0,lastStateChange:r},yield*t.store.setState(t.nodeType,n),yield*t.emitEvent(e,`half-open`,n.failureCount)}return{allowed:n.state!==`open`,state:n.state,failureCount:n.failureCount}})}getState(){let e=this;return r.Effect.gen(function*(){return(yield*e.store.getState(e.nodeType))?.state??`closed`})}getFailureCount(){let e=this;return r.Effect.gen(function*(){return(yield*e.store.getState(e.nodeType))?.failureCount??0})}recordSuccess(){let e=this;return r.Effect.gen(function*(){if(!e.config.enabled)return;let t=yield*e.store.getState(e.nodeType);t!==null&&(t.state===`half-open`?(yield*e.store.incrementHalfOpenSuccesses(e.nodeType))>=e.config.halfOpenRequests&&(yield*e.transitionTo(`closed`,t.failureCount)):t.state===`closed`&&(yield*e.store.resetFailures(e.nodeType)))})}recordFailure(e){let t=this;return r.Effect.gen(function*(){if(!t.config.enabled)return;let e=yield*t.store.getState(t.nodeType);if(e===null||e.state===`closed`){let e=yield*t.store.incrementFailures(t.nodeType,t.config.windowDuration);e>=t.config.failureThreshold&&(yield*t.transitionTo(`open`,e))}else e.state===`half-open`&&(yield*t.transitionTo(`open`,e.failureCount))})}getFallback(){return this.config.fallback}reset(){let t=this;return r.Effect.gen(function*(){let n=(yield*t.store.getState(t.nodeType))?.state??`closed`;yield*t.store.setState(t.nodeType,e.R({failureThreshold:t.config.failureThreshold,resetTimeout:t.config.resetTimeout,halfOpenRequests:t.config.halfOpenRequests,windowDuration:t.config.windowDuration})),n!==`closed`&&(yield*t.emitEvent(n,`closed`,0))})}transitionTo(e,t){let n=this;return r.Effect.gen(function*(){let r=yield*n.store.getState(n.nodeType),i=r?.state??`closed`;if(i===e)return;let a=Date.now(),o={state:e,failureCount:e===`closed`?0:t,lastStateChange:a,halfOpenSuccesses:0,windowStart:e===`closed`?a:r?.windowStart??a,config:{failureThreshold:n.config.failureThreshold,resetTimeout:n.config.resetTimeout,halfOpenRequests:n.config.halfOpenRequests,windowDuration:n.config.windowDuration}};yield*n.store.setState(n.nodeType,o),yield*n.emitEvent(i,e,t)})}emitEvent(e,t,n){let i=this;return r.Effect.gen(function*(){i.eventHandler&&(yield*i.eventHandler({nodeType:i.nodeType,previousState:e,newState:t,timestamp:Date.now(),failureCount:n}))})}},p=class{breakers=new Map;eventHandler;constructor(e){this.store=e}setEventHandler(e){this.eventHandler=e;for(let t of this.breakers.values())t.setEventHandler(e)}getOrCreate(e,t){let n=this.breakers.get(e);return n||(n=new f(e,t,this.store),this.eventHandler&&n.setEventHandler(this.eventHandler),this.breakers.set(e,n)),n}get(e){return this.breakers.get(e)}getAllStats(){return this.store.getAllStats()}resetAll(){let e=this;return r.Effect.gen(function*(){for(let t of e.breakers.values())yield*t.reset()})}clear(){this.breakers.clear()}};function m({source:e,target:t,sourcePort:n,targetPort:r}){return{source:e,target:t,sourcePort:n,targetPort:r}}let h=function(e){return e.JobStart=`job-start`,e.JobEnd=`job-end`,e.FlowStart=`flow-start`,e.FlowEnd=`flow-end`,e.FlowError=`flow-error`,e.FlowPause=`flow-pause`,e.FlowCancel=`flow-cancel`,e.NodeStart=`node-start`,e.NodeEnd=`node-end`,e.NodePause=`node-pause`,e.NodeResume=`node-resume`,e.NodeError=`node-error`,e.NodeStream=`node-stream`,e.NodeResponse=`node-response`,e.DlqItemAdded=`dlq-item-added`,e.DlqRetryStart=`dlq-retry-start`,e.DlqRetrySuccess=`dlq-retry-success`,e.DlqRetryFailed=`dlq-retry-failed`,e.DlqItemExhausted=`dlq-item-exhausted`,e.DlqItemResolved=`dlq-item-resolved`,e}({});var g=class{types;constructor(){this.types=new Map}register(e){if(this.types.has(e.id))throw t.n.fromCode(`VALIDATION_ERROR`,{body:`Input type "${e.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:e.id}});this.types.set(e.id,e)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(e,n){let r=this.types.get(e);if(!r)return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Input type "${e}" is not registered`,details:{typeId:e}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for input type "${e}"`,cause:n,details:{typeId:e,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const _=new g;function v(e,t){return _.validate(e,t)}var y=class{types;constructor(){this.types=new Map}register(e){if(this.types.has(e.id))throw t.n.fromCode(`VALIDATION_ERROR`,{body:`Output type "${e.id}" is already registered. Types cannot be modified or re-registered.`,details:{typeId:e.id}});this.types.set(e.id,e)}get(e){return this.types.get(e)}list(){return Array.from(this.types.values())}validate(e,n){let r=this.types.get(e);if(!r)return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Output type "${e}" is not registered`,details:{typeId:e}})};try{return{success:!0,data:r.schema.parse(n)}}catch(n){return{success:!1,error:t.n.fromCode(`VALIDATION_ERROR`,{body:`Data validation failed for output type "${e}"`,cause:n,details:{typeId:e,validationErrors:n}})}}}has(e){return this.types.has(e)}size(){return this.types.size}};const b=new y;function x(e,t){return b.validate(e,t)}let S=function(e){return e.input=`input`,e.process=`process`,e.conditional=`conditional`,e.multiplex=`multiplex`,e.merge=`merge`,e}({});function C({id:e,name:n,description:i,type:a,inputSchema:o,outputSchema:s,run:c,condition:l,multiInput:u=!1,multiOutput:d=!1,pausable:f=!1,retry:p,inputTypeId:m,outputTypeId:h,keepOutput:g=!1,circuitBreaker:v,nodeTypeId:y}){return r.Effect.gen(function*(){return m&&!_.get(m)?yield*t.n.fromCode(`INVALID_INPUT_TYPE`,{body:`Input type "${m}" is not registered in inputTypeRegistry`,details:{inputTypeId:m,nodeId:e}}).toEffect():h&&!b.get(h)?yield*t.n.fromCode(`INVALID_OUTPUT_TYPE`,{body:`Output type "${h}" is not registered in outputTypeRegistry`,details:{outputTypeId:h,nodeId:e}}).toEffect():{id:e,name:n,description:i,type:a,inputTypeId:m,outputTypeId:h,keepOutput:g,inputSchema:o,outputSchema:s,pausable:f,run:({data:i,jobId:a,flowId:l,storageId:u,clientId:d})=>r.Effect.gen(function*(){let f=yield*c({data:yield*r.Effect.try({try:()=>o.parse(i),catch:r=>{let i=r instanceof Error?r.message:String(r);return t.n.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${e}) input validation failed: ${i}`,cause:r})}}),jobId:a,storageId:u,flowId:l,clientId:d});return f.type===`waiting`?{type:`waiting`,partialData:f.partialData,nodeType:h,nodeId:e}:{type:`complete`,data:yield*r.Effect.try({try:()=>s.parse(f.data),catch:r=>{let i=r instanceof Error?r.message:String(r);return t.n.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:`Node '${n}' (${e}) output validation failed: ${i}`,cause:r})}}),nodeType:h,nodeId:e}}),condition:l,multiInput:u,multiOutput:d,retry:p,circuitBreaker:v,nodeTypeId:y}})}const w=e=>({id:e.id,name:e.name,description:e.description,type:e.type,inputTypeId:e.inputTypeId,outputTypeId:e.outputTypeId,nodeTypeId:e.nodeTypeId}),T=e=>t=>t.nodeType===e,E=T(`storage-output-v1`),D=T(`streaming-input-v1`),O=e=>({type:`complete`,data:e}),k=e=>({type:`waiting`,partialData:e}),A=(e,t)=>{if(e===t)return!0;try{return!!(e&&t&&typeof e==`object`&&typeof t==`object`)}catch{return!0}};var ee=class{typeChecker;constructor(e=A){this.typeChecker=e}validateConnection(e,t,n){return this.getCompatibleTypes(e.outputSchema,t.inputSchema)}getCompatibleTypes(e,t){return this.typeChecker(e,t)}validateFlow(e,t){let n=[],r=new Map(e.map(e=>[e.id,e]));for(let e of t){let t=r.get(e.source),i=r.get(e.target);if(!t){n.push(`Source node ${e.source} not found`);continue}if(!i){n.push(`Target node ${e.target} not found`);continue}this.validateConnection(t,i,e)||n.push(`Schema mismatch: ${t.id} output schema incompatible with ${i.id} input schema`)}return{isValid:n.length===0,errors:n}}getExpectedInputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.target===e){let e=r.get(t.source);if(e){let n=t.sourcePort||t.source;i[n]=e.outputSchema}}return i}getActualOutputSchemas(e,t,n){let r=new Map(t.map(e=>[e.id,e])),i={};for(let t of n)if(t.source===e){let e=r.get(t.target);if(e){let n=t.targetPort||t.target;i[n]=e.inputSchema}}return i}validateData(e,t){try{return t.parse(e),{isValid:!0,errors:[]}}catch(e){return e instanceof Error&&`errors`in e?{isValid:!1,errors:e.errors.map(e=>`${e.path.join(`.`)}: ${e.message}`)}:{isValid:!1,errors:[e instanceof Error?e.message:`Validation failed`]}}}};function j(e){if(!e)return{type:``,fileName:``,metadata:void 0,metadataJson:void 0};let t={...e},n=String(t.type||t.mimeType||t[`content-type`]||``);n&&(t.type||=n,t.mimeType||=n);let r=String(t.fileName||t.originalName||t.name||``);return r&&(t.fileName||=r,t.originalName||=r,t.name||=r),{type:n,fileName:r,metadata:t,metadataJson:JSON.stringify(t)}}const M=i.z.object({operation:i.z.literal(`init`),storageId:i.z.string(),metadata:i.z.record(i.z.string(),i.z.any()).optional()}),N=i.z.object({operation:i.z.literal(`finalize`),uploadId:i.z.string()}),P=i.z.object({operation:i.z.literal(`url`),url:i.z.string(),storageId:i.z.string().optional(),metadata:i.z.record(i.z.string(),i.z.any()).optional()}),F=i.z.union([M,N,P]),te=i.z.object({allowedMimeTypes:i.z.array(i.z.string()).optional(),minSize:i.z.number().positive().optional(),maxSize:i.z.number().positive().optional()});function ne(e,n){return r.Effect.gen(function*(){if(n){if(n.allowedMimeTypes&&n.allowedMimeTypes.length>0&&!n.allowedMimeTypes.some(t=>{if(t.endsWith(`/*`)){let n=t.slice(0,-2);return e.type.startsWith(n)}return e.type===t}))throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File type "${e.type}" is not allowed. Allowed types: ${n.allowedMimeTypes.join(`, `)}`)}).toEffect();if(n.minSize!==void 0&&e.size<n.minSize)throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${e.size} bytes) is below minimum (${n.minSize} bytes)`)}).toEffect();if(n.maxSize!==void 0&&e.size>n.maxSize)throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`File size (${e.size} bytes) exceeds maximum (${n.maxSize} bytes)`)}).toEffect()}})}function re(i,a,o){let s=o?.keepOutput??!1;return r.Effect.gen(function*(){let o=yield*n.n;return yield*C({id:i,name:`Input`,description:`Handles file input through multiple methods - streaming upload (init/finalize) or direct URL fetch`,type:S.input,nodeTypeId:`input`,inputSchema:F,outputSchema:e.D,keepOutput:s,inputTypeId:z,outputTypeId:I,run:({data:e,flowId:s,jobId:c,clientId:l})=>r.Effect.gen(function*(){switch(e.operation){case`init`:{let t={storageId:e.storageId,size:e.metadata?.size||0,type:e.metadata?.mimeType||`application/octet-stream`,fileName:e.metadata?.originalName,lastModified:e.metadata?.size?Date.now():void 0,metadata:e.metadata?JSON.stringify(e.metadata):void 0,flow:{flowId:s,nodeId:i,jobId:c}};return k(yield*o.createUpload(t,l))}case`finalize`:{let t=yield*o.getUpload(e.uploadId),{type:n}=j(t.metadata);return yield*ne({type:n,size:t.size||0},a),O(t)}case`url`:{let t=yield*n.o(e.url),r=yield*n.a(t),u=e.metadata?.mimeType||t.headers.get(`content-type`)||`application/octet-stream`,d=e.metadata?.size||Number(t.headers.get(`content-length`)||0),f=e.metadata?.originalName||e.url.split(`/`).pop()||`file`;yield*ne({type:u,size:d},a);let p=new ReadableStream({start(e){e.enqueue(new Uint8Array(r)),e.close()}}),m={storageId:e.storageId||`buffer`,size:d,type:u,fileName:f,lastModified:Date.now(),metadata:e.metadata?JSON.stringify(e.metadata):void 0};return O({...yield*o.upload(m,l,p),flow:{flowId:s,nodeId:i,jobId:c}})}default:throw yield*t.n.fromCode(`VALIDATION_ERROR`,{cause:Error(`Invalid operation`)}).toEffect()}})})})}const I=`storage-output-v1`,L=`ocr-output-v1`,R=`image-description-output-v1`,z=`streaming-input-v1`,ie=i.z.object({extractedText:i.z.string(),format:i.z.enum([`markdown`,`plain`,`structured`]),taskType:i.z.enum([`convertToMarkdown`,`freeOcr`,`parseFigure`,`locateObject`]),confidence:i.z.number().min(0).max(1).optional()}),ae=i.z.object({description:i.z.string(),confidence:i.z.number().min(0).max(1).optional(),metadata:i.z.record(i.z.string(),i.z.unknown()).optional()});_.register({id:z,schema:F,version:`1.0.0`,description:`Streaming file input with init/finalize/url operations for flexible file ingestion`}),b.register({id:I,schema:e.D,version:`1.0.0`,description:`Storage output node that saves files to configured storage backend`}),b.register({id:L,schema:ie,version:`1.0.0`,description:`OCR output node that extracts structured text from documents using AI`}),b.register({id:R,schema:ae,version:`1.0.0`,description:`Image description output node that generates AI-powered descriptions of images`});const B={enabled:!0,maxRetries:3,backoff:{type:`exponential`,initialDelayMs:1e3,maxDelayMs:3e5,multiplier:2,jitter:!0},ttlMs:6048e5};function V(e,t){switch(e.type){case`immediate`:return 0;case`fixed`:return e.delayMs;case`exponential`:{let n=e.initialDelayMs*e.multiplier**t,r=Math.min(n,e.maxDelayMs);if(e.jitter){let e=.5+Math.random();return Math.floor(r*e)}return r}default:return 0}}function oe(e,t){return!t.enabled||t.nonRetryableErrors?.includes(e)?!1:t.retryableErrors&&t.retryableErrors.length>0?t.retryableErrors.includes(e):!0}function se(e,t){if(!(t===void 0||t<=0))return new Date(e.getTime()+t)}var H=class e extends r.Context.Tag(`DeadLetterQueueService`)(){static optional=r.Effect.serviceOption(e)};function ce(){return r.Effect.gen(function*(){let t=yield*e.k,n=()=>`dlq_${crypto.randomUUID()}`,i=e=>({...e,createdAt:new Date(e.createdAt),updatedAt:new Date(e.updatedAt),expiresAt:e.expiresAt?new Date(e.expiresAt):void 0,nextRetryAt:e.nextRetryAt?new Date(e.nextRetryAt):void 0,retryHistory:e.retryHistory.map(e=>({...e,attemptedAt:new Date(e.attemptedAt)}))}),a=()=>r.Effect.gen(function*(){if(!t.list)return[];let e=yield*t.list(),n=[];for(let a of e){let e=yield*r.Effect.catchAll(t.get(a),()=>r.Effect.succeed(null));e&&n.push(i(e))}return n});return{add:(e,i,a=B)=>r.Effect.gen(function*(){let r=n(),o=new Date,s={code:i.code||`UNKNOWN_ERROR`,message:i.body||i.message||`Unknown error`,nodeId:void 0,stack:i.stack},c=e.tasks.find(e=>e.status===`failed`);c&&(s.nodeId=c.nodeId);let l={};for(let t of e.tasks)t.result!==void 0&&(l[t.nodeId]=t.result);let u=oe(s.code,a),d;if(a.enabled&&u&&a.maxRetries>0){let e=V(a.backoff,0);d=new Date(o.getTime()+e)}let f={id:r,jobId:e.id,flowId:e.flowId,storageId:e.storageId,clientId:e.clientId,error:s,inputs:e.executionState?.inputs||{},nodeResults:l,failedAtNodeId:s.nodeId,retryCount:0,maxRetries:a.maxRetries,nextRetryAt:d,retryHistory:[],createdAt:o,updatedAt:o,expiresAt:se(o,a.ttlMs),status:u&&a.enabled?`pending`:`exhausted`};return yield*t.set(r,f),f}),get:e=>r.Effect.gen(function*(){return i(yield*t.get(e))}),getOption:e=>r.Effect.gen(function*(){let n=yield*r.Effect.either(t.get(e));return n._tag===`Left`?n.left.code===`FILE_NOT_FOUND`?r.Option.none():yield*r.Effect.fail(n.left):r.Option.some(i(n.right))}),delete:e=>t.delete(e),list:(e={})=>r.Effect.gen(function*(){let t=yield*a(),{status:n,flowId:r,clientId:i,limit:o=50,offset:s=0}=e,c=t;n&&(c=c.filter(e=>e.status===n)),r&&(c=c.filter(e=>e.flowId===r)),i&&(c=c.filter(e=>e.clientId===i)),c.sort((e,t)=>t.createdAt.getTime()-e.createdAt.getTime());let l=c.length;return{items:c.slice(s,s+o),total:l}}),update:(e,n)=>r.Effect.gen(function*(){let r={...i(yield*t.get(e)),...n,updatedAt:new Date};return yield*t.set(e,r),r}),markRetrying:e=>r.Effect.gen(function*(){let n={...i(yield*t.get(e)),status:`retrying`,updatedAt:new Date};return yield*t.set(e,n),n}),recordRetryFailure:(e,n,a)=>r.Effect.gen(function*(){let r=i(yield*t.get(e)),o=new Date,s=r.retryCount+1,c=[...r.retryHistory,{attemptedAt:o,error:n,durationMs:a}],l=`pending`,u;if(s>=r.maxRetries)l=`exhausted`,u=void 0;else{let e=V(B.backoff,s);u=new Date(o.getTime()+e)}let d={...r,retryCount:s,retryHistory:c,status:l,nextRetryAt:u,updatedAt:o};return yield*t.set(e,d),d}),markResolved:e=>r.Effect.gen(function*(){let n={...i(yield*t.get(e)),status:`resolved`,nextRetryAt:void 0,updatedAt:new Date};return yield*t.set(e,n),n}),getScheduledRetries:(e=100)=>r.Effect.gen(function*(){let t=yield*a(),n=new Date;return t.filter(e=>e.status===`pending`&&e.nextRetryAt&&e.nextRetryAt<=n).sort((e,t)=>(e.nextRetryAt?.getTime()||0)-(t.nextRetryAt?.getTime()||0)).slice(0,e)}),cleanup:(e={})=>r.Effect.gen(function*(){let n=yield*a(),{olderThan:i,status:o}=e,s=new Date,c=0;for(let e of n){let n=!1;e.expiresAt&&e.expiresAt<=s&&(n=!0),i&&e.createdAt<=i&&(o?n=e.status===o:(e.status===`exhausted`||e.status===`resolved`)&&(n=!0)),n&&(yield*r.Effect.catchAll(t.delete(e.id),()=>r.Effect.succeed(void 0)),c++)}return{deleted:c}}),getStats:()=>r.Effect.gen(function*(){let e=yield*a(),t={pending:0,retrying:0,exhausted:0,resolved:0},n={},r,i=0;for(let a of e)t[a.status]++,n[a.flowId]=(n[a.flowId]||0)+1,(!r||a.createdAt<r)&&(r=a.createdAt),i+=a.retryCount;let o=e.length>0?i/e.length:0;return{totalItems:e.length,byStatus:t,byFlow:n,oldestItem:r,averageRetryCount:o}})}})}const le=r.Layer.effect(H,ce());var U=class{maxConcurrency;constructor(e={}){this.maxConcurrency=e.maxConcurrency??4}groupNodesByExecutionLevel(e,t){let n={},r={};e.forEach(e=>{n[e.id]=[],r[e.id]=0}),t.forEach(e=>{n[e.source]?.push(e.target),r[e.target]=(r[e.target]||0)+1});let i=[],a=new Set,o=0;for(;a.size<e.length;){let e=Object.keys(r).filter(e=>r[e]===0&&!a.has(e));if(e.length===0)throw Error(`Cycle detected in flow graph - cannot execute in parallel`);i.push({level:o++,nodes:e}),e.forEach(e=>{a.add(e),delete r[e],n[e]?.forEach(e=>{r[e]!==void 0&&r[e]--})})}return i}executeNodesInParallel(e){return r.Effect.all(e.map(e=>e()),{concurrency:this.maxConcurrency})}canExecuteInParallel(e,t,n){return e.every(e=>(n[e]||[]).every(e=>t.has(e)))}getStats(){return{maxConcurrency:this.maxConcurrency}}};function W(e){return t=>{if(t.nodeType!==e)return!1;let n=b.get(e);return n?n.schema.safeParse(t.data).success:!1}}function G(t){return!t||typeof t!=`object`?!1:e.D.safeParse(t).success}const ue=W(`storage-output-v1`),de=W(L),fe=W(R);function K(e,t){return e.filter(t)}function pe(e,n){return r.Effect.gen(function*(){let r=K(e,n);return r.length===0?yield*t.n.fromCode(`OUTPUT_NOT_FOUND`,{body:`No output of the specified type was found in the flow results`}).toEffect():r.length>1?yield*t.n.fromCode(`MULTIPLE_OUTPUTS_FOUND`,{body:`Found ${r.length} outputs of the specified type, expected exactly one`,details:{foundCount:r.length,nodeIds:r.map(e=>e.nodeId)}}).toEffect():r[0]})}function me(e,t){return K(e,t)[0]}function he(e,t){return e.find(e=>e.nodeId===t)}function ge(e,t){return e.some(t)}function _e(e){return e.operation===`init`}function ve(e){return e.operation===`finalize`}function ye(e){return e.operation===`url`}function be(e){return e.operation===`init`||e.operation===`url`}const xe=e=>({id:e.id,name:e.name,nodes:e.nodes.map(w),edges:e.edges});function q(n){return r.Effect.gen(function*(){let a=yield*r.Effect.all(n.nodes.map(e=>r.Effect.isEffect(e)?e:r.Effect.succeed(e))),{flowId:o,name:s,onEvent:c,checkJobStatus:l,edges:u,inputSchema:d,outputSchema:f,typeChecker:m,circuitBreaker:g}=n,_=a,v=new ee(m),y=e=>{let t=e.circuitBreaker,n=e.nodeTypeId?g?.nodeTypeOverrides?.[e.nodeTypeId]:void 0,r=g?.defaults;if(!(!t&&!n&&!r))return{...r,...n,...t}},b=()=>{let e={},t={},n={};return _.forEach(r=>{e[r.id]=[],n[r.id]=[],t[r.id]=0}),u.forEach(r=>{e[r.source]?.push(r.target),n[r.target]?.push(r.source),t[r.target]=(t[r.target]||0)+1}),{graph:e,reverseGraph:n,inDegree:t}},x=new Map;u.forEach(e=>{let t=x.get(e.target)||[];t.push({source:e.source,sourcePort:e.sourcePort,targetPort:e.targetPort}),x.set(e.target,t)});let S=new Map,C=()=>{let{graph:e,inDegree:t}=b(),n=[],r=[];for(Object.keys(t).forEach(e=>{t[e]===0&&n.push(e)});n.length>0;){let i=n.shift();if(!i)throw Error(`No current node found`);r.push(i),e[i]?.forEach(e=>{t[e]=(t[e]||0)-1,t[e]===0&&n.push(e)})}return r},w=(e,t)=>{if(!e.condition)return r.Effect.succeed(!0);let{field:n,operator:i,value:a}=e.condition,o=t,s=o?.metadata?.[n]||o?.[n],c=(()=>{switch(i){case`equals`:return s===a;case`notEquals`:return s!==a;case`greaterThan`:return Number(s)>Number(a);case`lessThan`:return Number(s)<Number(a);case`contains`:return String(s).includes(String(a));case`startsWith`:return String(s).startsWith(String(a));default:return!0}})();return r.Effect.succeed(c)},T=(e,t)=>{let n=x.get(e)||[],r={};return n.forEach(e=>{let n=e.source,i=t.get(n);if(i!==void 0){if(_.find(e=>e.id===n)?.type===`conditional`&&e.sourcePort){let t=S.get(n)?`true`:`false`;if(e.sourcePort!==t)return}r[n]=i}}),r},E=(e,t,n)=>{let r=x.get(e)||[];if(_.find(t=>t.id===e)?.type===`input`||r.length===0)return`execute`;let i=!1,a=!1,o=!1,s=!1;for(let e of r){let r=e.source;if(n.has(r)){a=!0;continue}if(t.get(r)===void 0){s=!0;continue}if(_.find(e=>e.id===r)?.type===`conditional`&&e.sourcePort){let t=S.get(r);if(t===void 0){s=!0;continue}let n=t?`true`:`false`;e.sourcePort===n?o=!0:i=!0}else o=!0}return o?`execute`:(i||a)&&!s?`skip`:s?`wait`:`execute`},D=e=>{let t=_.filter(e=>e.type===`input`),n={};return t.forEach(t=>{e&&typeof e==`object`&&t.id in e&&(n[t.id]=d.parse(e[t.id]))}),n},O=e=>!u.some(t=>t.source===e),k=e=>{let t=_.find(t=>t.id===e);return O(e)||t?.keepOutput===!0},A=e=>{let t=_.filter(e=>k(e.id)),n={};return t.forEach(t=>{let r=e.get(t.id);r!==void 0&&(n[t.id]=r)}),n},j=(e,t)=>{let n=_.filter(e=>k(e.id)),r=[];return n.forEach(n=>{let i=e.get(n.id);if(i!==void 0){let e=t.get(n.id);r.push({nodeId:n.id,nodeType:e,data:i,timestamp:new Date().toISOString()})}}),r},M=(t,n,i)=>r.Effect.gen(function*(){if(t.storage.id===n)return t;let a=yield*e.S,o=yield*a.getDataStore(t.storage.id,i),s=yield*a.getDataStore(n,i),c=yield*o.read(t.id),l=r.Stream.make(c),u={...t,storage:{id:n,type:t.storage.type}},d=yield*s.create(u);return yield*s.write({file_id:d.id,stream:l,offset:0},{}),d}),N=(e,i,a,s,u,d,f,p)=>r.Effect.gen(function*(){let m=u.get(e);if(!m)return yield*t.n.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(l){let e=yield*l(d);if(e===`paused`)return yield*t.n.fromCode(`FLOW_PAUSED`,{cause:`Flow ${o} was paused by user at job ${d}`}).toEffect();if(e===`cancelled`)return yield*t.n.fromCode(`FLOW_CANCELLED`,{cause:`Flow ${o} was cancelled by user at job ${d}`}).toEffect()}c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeStart,nodeName:m.name,nodeType:m.type}));let g=m.retry?.maxRetries??0,_=m.retry?.retryDelay??1e3,v=m.retry?.exponentialBackoff??!0,b=y(m),x=b?.enabled&&m.nodeTypeId&&p?p.getOrCreate(m.nodeTypeId,b):null;if(x){let{allowed:n,state:i,failureCount:s}=yield*x.allowRequest();if(!n){let n=x.getFallback();return yield*r.Effect.logWarning(`Circuit breaker OPEN for node type "${m.nodeTypeId}" - applying fallback`),n.type===`skip`?(c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name})),{nodeId:e,result:a[e],success:!0,waiting:!1}):n.type===`default`?(c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name,result:n.value})),{nodeId:e,result:n.value,success:!0,waiting:!1}):yield*t.n.fromCode(`CIRCUIT_BREAKER_OPEN`,{body:`Circuit breaker is open for node type "${m.name}"`,details:{nodeType:m.name,nodeId:e,state:i,failureCount:s}}).toEffect()}}let C=0,E=null;for(;C<=g;)try{let l,u={};if(m.type===`input`){if(l=a[e],l===void 0)return yield*r.Effect.logError(`Input node ${e} has no input data`),yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Input node ${e} has no input data`)}).toEffect()}else{if(u=T(e,s),Object.keys(u).length===0)return yield*r.Effect.logError(`Node ${e} has no input data`),yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${e} has no input data`)}).toEffect();if(m.multiInput)l=u;else{let n=Object.keys(u)[0];if(!n)return yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node ${e} has no input data`)}).toEffect();l=u[n]}}if(m.type===`conditional`){let t=yield*w(m,l);S.set(e,t)}let p=yield*m.run({data:l,inputs:u,jobId:d,flowId:o,storageId:i,clientId:f});if(p.type===`waiting`){let t=p.partialData;return c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodePause,nodeName:m.name,partialData:t})),{nodeId:e,result:t,success:!0,waiting:!0,nodeType:p.nodeType}}let g=p.data;if(k(e)&&(G(g)&&g.storage.id!==i&&(yield*r.Effect.logDebug(`Auto-persisting output node ${e} output from ${g.storage.id} to ${i}`),g=yield*M(g,i,f)),n.hooks?.onNodeOutput)){yield*r.Effect.logDebug(`Calling onNodeOutput hook for sink node ${e}`);let t=n.hooks.onNodeOutput({output:g,nodeId:e,flowId:o,jobId:d,storageId:i,clientId:f});g=yield*r.Effect.isEffect(t)?t:r.Effect.promise(()=>t)}return x&&(yield*x.recordSuccess()),c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeEnd,nodeName:m.name,result:g})),{nodeId:e,result:g,success:!0,waiting:!1,nodeType:p.nodeType}}catch(n){if(E=n instanceof t.n?n:t.n.fromCode(`FLOW_NODE_ERROR`,{cause:n}),x&&(yield*x.recordFailure(E.body)),C<g){C++;let t=v?_*2**(C-1):_;yield*r.Effect.logWarning(`Node ${e} (${m.name}) failed, retrying (${C}/${g}) after ${t}ms`),yield*r.Effect.sleep(t);continue}return c&&(yield*c({jobId:d,flowId:o,nodeId:e,eventType:h.NodeError,nodeName:m.name,error:E.body,retryCount:C})),yield*E.toEffect()}return E?yield*E.toEffect():yield*t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Unexpected error in retry loop`)}).toEffect()}).pipe((()=>{let t=u.get(e),n=t?.nodeTypeId??t?.type??`unknown`;return r.Effect.withSpan(`node-${n}`,{attributes:{"node.id":e,"node.type":t?.type??`unknown`,"node.type_id":t?.nodeTypeId??`unknown`,"node.name":t?.name??`unknown`,"flow.id":o,"flow.job_id":d}})})()),P=({inputs:a,storageId:s,jobId:l,resumeFrom:d,clientId:m})=>r.Effect.gen(function*(){let g=yield*r.Effect.serviceOption(e.L),v=g._tag===`Some`?new p(g.value):null;!d&&c&&(yield*c({jobId:l,eventType:h.FlowStart,flowId:o}));let y=D(a||{}),b,x,S;d?(b=d.executionOrder,x=d.nodeResults,S=d.currentIndex):(b=C(),x=new Map,S=0);let w=new Map;if(b.length!==_.length)return yield*t.n.fromCode(`FLOW_CYCLE_ERROR`).toEffect();let T=new Map(_.map(e=>[e.id,e])),O=new Set;if(n.parallelExecution?.enabled??!1){yield*r.Effect.logDebug(`Flow ${o}: Executing in parallel mode (maxConcurrency: ${n.parallelExecution?.maxConcurrency??4})`);let e=new U({maxConcurrency:n.parallelExecution?.maxConcurrency??4}),t=e.groupNodesByExecutionLevel(_,u);yield*r.Effect.logDebug(`Flow ${o}: Grouped nodes into ${t.length} execution levels`);let i={};_.forEach(e=>{i[e.id]=[]}),u.forEach(e=>{i[e.target]?.push(e.source)});for(let n of t){yield*r.Effect.logDebug(`Flow ${o}: Executing level ${n.level} with nodes: ${n.nodes.join(`, `)}`);let t=[],i=[];for(let e of n.nodes)E(e,x,O)===`skip`?(i.push(e),O.add(e)):t.push(e);if(i.length>0&&(yield*r.Effect.logDebug(`Flow ${o}: Skipping nodes due to conditional routing: ${i.join(`, `)}`)),t.length===0){yield*r.Effect.logDebug(`Flow ${o}: All nodes in level ${n.level} skipped due to conditional routing`);continue}let a=t.map(e=>()=>r.Effect.gen(function*(){if(d&&e===d.executionOrder[S]&&c){let t=T.get(e);t&&(yield*c({jobId:l,flowId:o,nodeId:e,eventType:h.NodeResume,nodeName:t.name,nodeType:t.type}))}return{nodeId:e,nodeResult:yield*N(e,s,y,x,T,l,m,v)}})),u=yield*e.executeNodesInParallel(a);for(let{nodeId:e,nodeResult:t}of u){if(t.waiting)return t.result!==void 0&&(x.set(e,t.result),t.nodeType&&w.set(e,t.nodeType)),{type:`paused`,nodeId:e,executionState:{executionOrder:b,currentIndex:b.indexOf(e),inputs:y}};t.success&&(x.set(e,t.result),t.nodeType&&w.set(e,t.nodeType))}}}else{yield*r.Effect.logDebug(`Flow ${o}: Executing in sequential mode`);for(let e=S;e<b.length;e++){let n=b[e];if(!n)return yield*t.n.fromCode(`FLOW_NODE_NOT_FOUND`).toEffect();if(E(n,x,O)===`skip`){yield*r.Effect.logDebug(`Flow ${o}: Skipping node ${n} due to conditional routing`),O.add(n);continue}if(d&&e===S&&c){let e=T.get(n);e&&(yield*c({jobId:l,flowId:o,nodeId:n,eventType:h.NodeResume,nodeName:e.name,nodeType:e.type}))}let i=yield*N(n,s,y,x,T,l,m,v);if(i.waiting)return i.result!==void 0&&(x.set(i.nodeId,i.result),i.nodeType&&w.set(i.nodeId,i.nodeType)),{type:`paused`,nodeId:i.nodeId,executionState:{executionOrder:b,currentIndex:e,inputs:y}};i.success&&(x.set(i.nodeId,i.result),i.nodeType&&w.set(i.nodeId,i.nodeType))}}let ee=A(x),M=j(x,w),P=i.z.record(i.z.string(),f).safeParse(ee);if(!P.success){let e=`Flow output validation failed: ${P.error.message}. Expected outputs: ${JSON.stringify(Object.keys(A(x)))}. Output nodes (sinks + keepOutput): ${_.filter(e=>k(e.id)).map(e=>e.id).join(`, `)}`;return c&&(yield*c({jobId:l,eventType:h.FlowError,flowId:o,error:e})),yield*t.n.fromCode(`FLOW_OUTPUT_VALIDATION_ERROR`,{body:e,cause:P.error}).toEffect()}let F=P.data;return c&&(yield*c({jobId:l,eventType:h.FlowEnd,flowId:o,outputs:M,result:F})),{type:`completed`,result:F,outputs:M}});return{id:o,name:s,nodes:_,edges:u,inputSchema:d,outputSchema:f,onEvent:c,checkJobStatus:l,hooks:n.hooks,run:({inputs:e,storageId:t,jobId:n,clientId:r})=>P({inputs:e,storageId:t,jobId:n,clientId:r}),resume:({jobId:e,storageId:t,nodeResults:n,executionState:r,clientId:i})=>P({inputs:r.inputs,storageId:t,jobId:e,resumeFrom:{executionOrder:r.executionOrder,nodeResults:new Map(Object.entries(n)),currentIndex:r.currentIndex},clientId:i}),validateTypes:()=>{let e=_;return v.validateFlow(e,u)},validateInputs:e=>v.validateData(e,d),validateOutputs:e=>v.validateData(e,f)}})}var J=class e extends r.Context.Tag(`FlowWaitUntil`)(){static optional=r.Effect.serviceOption(e)},Se=class e extends r.Context.Tag(`FlowLifecycleHook`)(){static optional=r.Effect.serviceOption(e)},Ce=class extends r.Context.Tag(`FlowProvider`)(){},we=class extends r.Context.Tag(`FlowEngine`)(){};const Y=e=>typeof e==`object`&&!!e&&`id`in e,Te=e=>typeof e==`object`&&e&&`nodeId`in e&&`data`in e&&`timestamp`in e?e.data:e;function Ee(e,n,i){let a=t=>{let a=e=>r.Effect.gen(function*(){let n=yield*i.get(t);n&&(yield*i.set(t,{...n,...e,updatedAt:new Date}))});return o=>r.Effect.gen(function*(){switch(e.onEvent&&(yield*r.Effect.catchAll(e.onEvent(o),e=>(r.Effect.logError(`Original onEvent failed`,e),r.Effect.succeed({eventId:null})))),yield*n.emit(t,o),r.Effect.logInfo(`Updating job ${t} with event ${o.eventType}`),o.eventType){case h.FlowStart:yield*a({status:`running`});break;case h.FlowEnd:yield*r.Effect.gen(function*(){let e=yield*i.get(t);e&&o.outputs&&(yield*i.set(t,{...e,result:o.outputs,updatedAt:new Date}))});break;case h.FlowError:yield*a({status:`failed`,error:o.error});break;case h.NodeStart:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.find(e=>e.nodeId===o.nodeId)?e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`running`,updatedAt:new Date}:e):[...e.tasks,{nodeId:o.nodeId,status:`running`,createdAt:new Date,updatedAt:new Date}];yield*i.set(t,{...e,tasks:n,updatedAt:new Date})}});break;case h.NodePause:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.find(e=>e.nodeId===o.nodeId)?e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`paused`,result:o.partialData,updatedAt:new Date}:e):[...e.tasks,{nodeId:o.nodeId,status:`paused`,result:o.partialData,createdAt:new Date,updatedAt:new Date}],r=o.partialData?.id,a=r?[...e.activeUploads||[],r]:e.activeUploads;yield*i.set(t,{...e,tasks:n,activeUploads:a,updatedAt:new Date})}});break;case h.NodeResume:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`running`,updatedAt:new Date}:e);yield*i.set(t,{...e,tasks:n,updatedAt:new Date})}});break;case h.NodeEnd:yield*r.Effect.gen(function*(){let n=yield*i.get(t);if(n){let a=n.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`completed`,result:o.result,updatedAt:new Date}:e),s=!e.edges.some(e=>e.source===o.nodeId),c=e.nodes.find(e=>e.id===o.nodeId)?.keepOutput===!0,l=o.result,u=Te(l),d=n.intermediateFiles||[],f=s||c;f&&Y(u)&&u.id?(d=d.filter(e=>e!==u.id),c&&!s&&r.Effect.logInfo(`Preserving output from node ${o.nodeId} due to keepOutput flag`)):!f&&Y(u)&&u.id&&(d.includes(u.id)||d.push(u.id));let p=n.activeUploads||[];Y(u)&&u.id&&(p=p.filter(e=>e!==u.id)),yield*i.set(t,{...n,tasks:a,intermediateFiles:d,activeUploads:p,updatedAt:new Date})}});break;case h.NodeError:yield*r.Effect.gen(function*(){let e=yield*i.get(t);if(e){let n=e.tasks.map(e=>e.nodeId===o.nodeId?{...e,status:`failed`,error:o.error,retryCount:o.retryCount,updatedAt:new Date}:e);yield*i.set(t,{...e,tasks:n,error:o.error,updatedAt:new Date})}});break}return{eventId:t}})},o=e=>e=>r.Effect.gen(function*(){let n=yield*i.get(e);return n?n.status===`paused`?`paused`:n.status===`cancelled`?`cancelled`:`running`:yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))});return{...e,run:t=>r.Effect.gen(function*(){let n=t.jobId||crypto.randomUUID(),r=a(n),i=o(n);return yield*(yield*q({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:r,checkJobStatus:i})).run({...t,jobId:n,clientId:t.clientId})}),resume:t=>r.Effect.gen(function*(){let n=t.jobId,r=a(n),i=o(n);return yield*(yield*q({flowId:e.id,name:e.name,nodes:e.nodes,edges:e.edges,inputSchema:e.inputSchema,outputSchema:e.outputSchema,onEvent:r,checkJobStatus:i})).resume(t)})}}function De(){return r.Effect.gen(function*(){let i=yield*Ce,a=yield*e.d,o=yield*e.A,s=yield*n.n,c=yield*H.optional,l=yield*Se.optional,u=(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);return i?yield*o.set(e,{...i,...n}):yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))}),d=(e,t)=>r.Effect.gen(function*(){let n=yield*o.get(e);!n||!n.intermediateFiles||n.intermediateFiles.length===0||(yield*r.Effect.logInfo(`Cleaning up ${n.intermediateFiles.length} intermediate files for job ${e}`),yield*r.Effect.all(n.intermediateFiles.map(e=>r.Effect.gen(function*(){yield*s.delete(e,t),yield*r.Effect.logDebug(`Deleted intermediate file ${e}`)}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to delete intermediate file ${e}: ${t}`),r.Effect.succeed(void 0)})))),{concurrency:5}),yield*u(e,{intermediateFiles:[]}))}),f=(e,t)=>r.Effect.gen(function*(){if(r.Option.isNone(c)){yield*r.Effect.logDebug(`[FlowServer] DLQ not configured, skipping for job: ${e}`);return}let n=c.value,i=yield*r.Effect.catchAll(o.get(e),()=>r.Effect.succeed(null));if(!i){yield*r.Effect.logWarning(`[FlowServer] Job ${e} not found when adding to DLQ`);return}yield*r.Effect.catchAll(n.add(i,t),t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`[FlowServer] Failed to add job ${e} to DLQ`,t),r.Effect.succeed(void 0)})),yield*r.Effect.logInfo(`[FlowServer] Added job ${e} to Dead Letter Queue`)}),p=e=>r.Option.isSome(l)?l.value.onComplete(e).pipe(r.Effect.catchAll(e=>r.Effect.logWarning(`FlowLifecycleHook.onComplete failed: ${e}`))):r.Effect.void,m=r.Effect.gen(function*(){let e=yield*r.Effect.currentSpan.pipe(r.Effect.option);return r.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),g=({jobId:e,flow:n,storageId:i,clientId:c,inputs:l})=>r.Effect.gen(function*(){return console.log(`[FlowServer] executeFlowInBackground started for job: ${e}`),yield*u(e,{status:`running`,traceContext:yield*m}),yield*r.Effect.gen(function*(){console.log(`[FlowServer] Creating flowWithEvents for job: ${e}`);let t=Ee(n,a,o);console.log(`[FlowServer] Running flow for job: ${e}`);let r=yield*t.run({inputs:l,storageId:i,jobId:e,clientId:c});return console.log(`[FlowServer] Flow completed for job: ${e}, result type: ${r.type}`),r.type===`paused`?yield*u(e,{status:`paused`,pausedAt:r.nodeId,executionState:r.executionState,updatedAt:new Date}):(yield*u(e,{status:`completed`,updatedAt:new Date,endedAt:new Date}),yield*d(e,c),yield*p({jobId:e,flowId:n.id,clientId:c,status:`completed`})),r}).pipe(r.Effect.withSpan(`flow-execution`,{attributes:{"flow.id":n.id,"flow.name":n.name,"flow.job_id":e,"flow.storage_id":i,"flow.node_count":n.nodes.length}}))}).pipe(r.Effect.withSpan(`flow`,{attributes:{"flow.id":n.id,"flow.name":n.name,"flow.job_id":e,"flow.storage_id":i,"flow.node_count":n.nodes.length}}),r.Effect.catchAll(i=>r.Effect.gen(function*(){yield*r.Effect.logError(`Flow execution failed`,i);let l=i instanceof t.n?i.body:String(i);yield*r.Effect.logInfo(`Updating job ${e} to failed status with error: ${l}`),yield*u(e,{status:`failed`,error:l,updatedAt:new Date}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to update job ${e}`,t),r.Effect.succeed(void 0)})));let m=yield*o.get(e);throw m&&(yield*a.emit(e,{jobId:e,eventType:h.FlowError,flowId:m.flowId,error:l}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to emit FlowError event for job ${e}`,t),r.Effect.succeed(void 0)})))),n.onEvent&&(yield*n.onEvent({jobId:e,eventType:h.FlowError,flowId:n.id,error:l}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to call flow.onEvent for FlowError event for job ${e}`,t),r.Effect.succeed({eventId:null})})))),yield*d(e,c).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup intermediate files for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*r.Effect.gen(function*(){let t=yield*o.get(e);t&&t.activeUploads&&t.activeUploads.length>0&&(yield*r.Effect.logInfo(`Cleaning up ${t.activeUploads.length} active uploads for failed job ${e}`),yield*r.Effect.all(t.activeUploads.map(t=>r.Effect.gen(function*(){yield*s.delete(t,c),yield*r.Effect.logDebug(`Aborted active upload ${t} for failed job ${e}`)}).pipe(r.Effect.catchAll(e=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to abort active upload ${t}: ${e}`),r.Effect.succeed(void 0)})))),{concurrency:5}),yield*u(e,{activeUploads:[]}))}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup active uploads for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*f(e,i instanceof t.n?i:new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(i),cause:i})),yield*p({jobId:e,flowId:n.id,clientId:c,status:`failed`}),i})));return{getFlow:(e,t)=>r.Effect.gen(function*(){return yield*i.getFlow(e,t)}),getFlowData:(e,t)=>r.Effect.gen(function*(){return xe(yield*i.getFlow(e,t))}),runFlow:({flowId:e,storageId:n,clientId:a,inputs:s})=>r.Effect.gen(function*(){let c=yield*J.optional,l=yield*r.Effect.try({try:()=>pt.parse({inputs:s}),catch:e=>t.n.fromCode(`FLOW_INPUT_VALIDATION_ERROR`,{cause:e})}),u=crypto.randomUUID(),d=new Date,f={id:u,flowId:e,storageId:n,clientId:a,status:`started`,createdAt:d,updatedAt:d,tasks:[]};yield*o.set(u,f);let p=yield*i.getFlow(e,a);console.log(`[FlowServer] About to fork flow execution for job: ${u}`);let m=g({jobId:u,flow:p,storageId:n,clientId:a,inputs:l.inputs}).pipe(r.Effect.tapErrorCause(e=>r.Effect.logError(`Flow execution failed`,e)));if(r.Option.isSome(c)){console.log(`[FlowServer] Using waitUntil for job: ${u}`);let e=yield*r.Effect.runtime(),t=r.Runtime.runPromise(e)(m);c.value(t)}else console.log(`[FlowServer] Using Effect.forkDaemon for job: ${u}`),yield*r.Effect.forkDaemon(m);return console.log(`[FlowServer] Flow execution started for job: ${u}`),f}),getJobStatus:e=>r.Effect.gen(function*(){return(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`})))}),resumeFlow:({jobId:e,nodeId:n,newData:c,clientId:l})=>r.Effect.gen(function*(){let m=yield*J.optional,g=yield*o.get(e);if(!g)return console.error(`Job not found`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}));if(g.status!==`paused`)return console.error(`Job is not paused`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} is not paused (status: ${g.status})`}));if(g.pausedAt!==n)return console.error(`Job is not paused at the expected node`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} is paused at node ${g.pausedAt}, not ${n}`}));if(!g.executionState)return console.error(`Job has no execution state`),yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} has no execution state`}));let _={...g.tasks.reduce((e,t)=>(t.result!==void 0&&(e[t.nodeId]=t.result),e),{}),[n]:c},v={...g.executionState.inputs,[n]:c};yield*u(e,{status:`running`});let y=yield*i.getFlow(g.flowId,g.clientId),b=g.traceContext?r.Tracer.externalSpan({traceId:g.traceContext.traceId,spanId:g.traceContext.spanId,sampled:g.traceContext.traceFlags===1}):void 0,x=r.Effect.gen(function*(){let n=Ee(y,a,o);if(!g.executionState)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} has no execution state`}));let i=yield*n.resume({jobId:e,storageId:g.storageId,nodeResults:_,executionState:{...g.executionState,inputs:v},clientId:g.clientId});return i.type===`paused`?yield*u(e,{status:`paused`,pausedAt:i.nodeId,executionState:i.executionState,updatedAt:new Date}):(yield*u(e,{status:`completed`,pausedAt:void 0,executionState:void 0,updatedAt:new Date,endedAt:new Date}),yield*d(e,l),yield*p({jobId:e,flowId:y.id,clientId:g.clientId,status:`completed`})),i}).pipe(r.Effect.withSpan(`flow-execution-resume`,{attributes:{"flow.id":y.id,"flow.name":y.name,"flow.job_id":e,"flow.storage_id":g.storageId,"flow.resumed_from_node":n},parent:b})).pipe(r.Effect.catchAll(n=>r.Effect.gen(function*(){yield*r.Effect.logError(`Flow resume failed`,n);let i=n instanceof t.n?n.body:String(n);yield*r.Effect.logInfo(`Updating job ${e} to failed status with error: ${i}`),yield*u(e,{status:`failed`,error:i,updatedAt:new Date}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to update job ${e}`,t),r.Effect.succeed(void 0)})));let c=yield*o.get(e);throw c&&(yield*a.emit(e,{jobId:e,eventType:h.FlowError,flowId:c.flowId,error:i}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logError(`Failed to emit FlowError event for job ${e}`,t),r.Effect.succeed(void 0)})))),yield*d(e,l).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup intermediate files for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*r.Effect.gen(function*(){let t=yield*o.get(e);t&&t.activeUploads&&t.activeUploads.length>0&&(yield*r.Effect.logInfo(`Cleaning up ${t.activeUploads.length} active uploads for failed job ${e}`),yield*r.Effect.all(t.activeUploads.map(t=>r.Effect.gen(function*(){yield*s.delete(t,l),yield*r.Effect.logDebug(`Aborted active upload ${t} for failed job ${e}`)}).pipe(r.Effect.catchAll(e=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to abort active upload ${t}: ${e}`),r.Effect.succeed(void 0)})))),{concurrency:5}),yield*u(e,{activeUploads:[]}))}).pipe(r.Effect.catchAll(t=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to cleanup active uploads for job ${e}`,t),r.Effect.succeed(void 0)}))),yield*f(e,n instanceof t.n?n:new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(n),cause:n})),yield*p({jobId:e,flowId:y.id,clientId:g.clientId,status:`failed`}),n}))).pipe(r.Effect.tapErrorCause(e=>r.Effect.logError(`Flow resume failed`,e)));if(r.Option.isSome(m)){console.log(`[FlowServer] Using waitUntil for resume job: ${e}`);let t=yield*r.Effect.runtime(),n=r.Runtime.runPromise(t)(x);m.value(n)}else console.log(`[FlowServer] Using Effect.forkDaemon for resume job: ${e}`),yield*r.Effect.forkDaemon(x);return(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after update`})))}),pauseFlow:(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);if(!i)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}));if(n!==null&&i.clientId!==n)return yield*r.Effect.fail(t.n.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to pause job ${e}`}));if(i.status!==`running`)return yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} cannot be paused (current status: ${i.status})`}));let s=i.tasks.find(e=>e.status===`running`)?.nodeId;return yield*u(e,{status:`paused`,pausedAt:s,updatedAt:new Date}),yield*a.emit(e,{jobId:e,flowId:i.flowId,eventType:h.FlowPause,pausedAt:s}),(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after pause`})))}),cancelFlow:(e,n)=>r.Effect.gen(function*(){let i=yield*o.get(e);return i?n!==null&&i.clientId!==n?yield*r.Effect.fail(t.n.fromCode(`FLOW_NOT_AUTHORIZED`,{cause:`Client ${n} is not authorized to cancel job ${e}`})):i.status!==`running`&&i.status!==`paused`&&i.status!==`started`?yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_ERROR`,{cause:`Job ${e} cannot be cancelled (current status: ${i.status})`})):(yield*u(e,{status:`cancelled`,updatedAt:new Date,endedAt:new Date}),yield*a.emit(e,{jobId:e,flowId:i.flowId,eventType:h.FlowCancel}),i.activeUploads&&i.activeUploads.length>0&&(yield*r.Effect.logInfo(`Cleaning up ${i.activeUploads.length} active uploads for job ${e}`),yield*r.Effect.all(i.activeUploads.map(t=>r.Effect.gen(function*(){yield*s.delete(t,n),yield*r.Effect.logDebug(`Aborted active upload ${t} for job ${e}`)}).pipe(r.Effect.catchAll(e=>r.Effect.gen(function*(){return yield*r.Effect.logWarning(`Failed to abort active upload ${t}: ${e}`),r.Effect.succeed(void 0)})))),{concurrency:5}),yield*u(e,{activeUploads:[]})),yield*d(e,n),(yield*o.get(e))||(yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found after cancellation`})))):yield*r.Effect.fail(t.n.fromCode(`FLOW_JOB_NOT_FOUND`,{cause:`Job ${e} not found`}))}),subscribeToFlowEvents:(e,t)=>r.Effect.gen(function*(){yield*a.subscribe(e,t)}),unsubscribeFromFlowEvents:e=>r.Effect.gen(function*(){yield*a.unsubscribe(e)})}})}const Oe=r.Layer.effect(we,De());function ke(e){let t=e.lastIndexOf(`.`);return t===-1||t===0?e:e.substring(0,t)}function Ae(e){let t=e.lastIndexOf(`.`);return t===-1||t===0?``:e.substring(t+1)}function X(e,t,n){let r=e.metadata??{},i=r.fileName??r.originalName??r.name??`unnamed`;return{baseName:ke(i),extension:Ae(i),fileName:i,nodeType:t.nodeType,nodeId:t.nodeId,flowId:t.flowId,jobId:t.jobId,timestamp:new Date().toISOString(),...n}}function je(e,t){try{let n={};for(let[e,r]of Object.entries(t))r!==void 0&&(n[e]=String(r));return(0,a.render)(e,n)}catch{return e}}function Z(e,t,n){let r=t.fileName;if(!n)return r;try{if(n.mode===`auto`){if(n.autoSuffix){let e=n.autoSuffix(t);if(e){let{baseName:n,extension:r}=t;return r?`${n}-${e}.${r}`:`${n}-${e}`}}return r}if(n.mode===`custom`){if(n.rename)return n.rename(e,t)||r;if(n.pattern)return je(n.pattern,t)||r}return r}catch{return r}}function Me(e){if(!e||e.trim()===``)return{isValid:!1,error:`Pattern cannot be empty`};let t=(e.match(/\{\{/g)||[]).length,n=(e.match(/\}\}/g)||[]).length;if(t!==n)return{isValid:!1,error:`Unbalanced braces: ${t} opening, ${n} closing`};let r=e.match(/\{\{[^}]*[^a-zA-Z0-9_}][^}]*\}\}/g);return r?{isValid:!1,error:`Invalid variable syntax: ${r[0]}`}:{isValid:!0}}const Ne=[{name:`baseName`,description:`Filename without extension`,example:`photo`},{name:`extension`,description:`File extension without dot`,example:`jpg`},{name:`fileName`,description:`Full original filename`,example:`photo.jpg`},{name:`nodeType`,description:`Type of processing node`,example:`resize`},{name:`nodeId`,description:`Specific node instance ID`,example:`resize-1`},{name:`flowId`,description:`Flow identifier`,example:`flow-abc`},{name:`jobId`,description:`Execution job ID`,example:`job-123`},{name:`timestamp`,description:`ISO 8601 processing time`,example:`2024-01-15T10:30:00Z`},{name:`width`,description:`Output width (image/video)`,example:`800`},{name:`height`,description:`Output height (image/video)`,example:`600`},{name:`format`,description:`Output format`,example:`webp`},{name:`quality`,description:`Quality setting`,example:`80`},{name:`pageNumber`,description:`Page number (documents)`,example:`1`}];function Q(e){return!(`stream`in e)}function Pe({id:t,name:i,description:a,outputTypeId:o,keepOutput:s,naming:c,nodeType:l=`transform`,nodeTypeId:u,namingVars:d,circuitBreaker:f,mode:p=`auto`,streamingConfig:m,transform:h,streamingTransform:g}){if(p===`streaming`&&!g)throw Error(`Transform node "${t}": mode is "streaming" but no streamingTransform function provided`);if(p===`buffered`&&!h)throw Error(`Transform node "${t}": mode is "buffered" but no transform function provided`);if(p===`auto`&&!h&&!g)throw Error(`Transform node "${t}": mode is "auto" but neither transform nor streamingTransform provided`);let _={...e.b,...m};return r.Effect.gen(function*(){let m=yield*n.n;return yield*C({id:t,name:i,description:a,type:S.process,outputTypeId:o,keepOutput:s,nodeTypeId:u,circuitBreaker:f,inputSchema:e.D,outputSchema:e.D,run:({data:e,storageId:n,flowId:i,jobId:a,clientId:o})=>r.Effect.gen(function*(){let s={flowId:i,nodeId:t,jobId:a},u=yield*r.Effect.gen(function*(){if(p===`buffered`)return!1;if(p===`streaming`)return!0;let t=e.size??0,i=_.fileSizeThreshold;return t>0&&t<i?(yield*r.Effect.logDebug(`File ${e.id} (${t} bytes) below threshold (${i}), using buffered mode`),!1):g?(yield*m.getCapabilities(n,o)).supportsStreamingRead?(yield*r.Effect.logDebug(`File ${e.id} qualifies for streaming mode`),!0):(yield*r.Effect.logDebug(`DataStore doesn't support streaming read, using buffered mode`),!1):(yield*r.Effect.logDebug(`No streamingTransform function, using buffered mode`),!1)}),{type:f,fileName:v,metadata:y,metadataJson:b}=j(e.metadata);if(u&&g){yield*r.Effect.logDebug(`Using streaming transform for ${e.id}`);let u=yield*g(yield*m.readStream(e.id,o,_),e),p=Q(u)?u:u.stream,h=Q(u)?void 0:u.type,x=Q(u)?void 0:u.estimatedSize,S=Q(u)?void 0:u.fileName;!S&&c&&(S=Z(e,X(e,{flowId:i,jobId:a,nodeId:t,nodeType:l},d),c));let C=yield*m.getCapabilities(n,o),w;if(C.supportsStreamingWrite)yield*r.Effect.logDebug(`Using streaming write for ${e.id} - no intermediate buffering`),w=yield*m.uploadStream({storageId:n,uploadLengthDeferred:!0,sizeHint:x,type:h??f,fileName:S??v,lastModified:0,metadata:b,flow:s},o,p);else{yield*r.Effect.logDebug(`Falling back to buffered upload for ${e.id} (streaming write not supported)`);let t=[];yield*r.Stream.runForEach(p,e=>r.Effect.sync(()=>{t.push(e)}));let i=t.reduce((e,t)=>e+t.byteLength,0),a=new Uint8Array(i),c=0;for(let e of t)a.set(e,c),c+=e.byteLength;let l=new ReadableStream({start(e){e.enqueue(a),e.close()}});w=yield*m.upload({storageId:n,size:a.byteLength,type:h??f,fileName:S??v,lastModified:0,metadata:b,flow:s},o,l)}let T=y?{...y,...h&&{mimeType:h,type:h,"content-type":h},...S&&{fileName:S,originalName:S,name:S,extension:S.split(`.`).pop()||y.extension}}:w.metadata;return O(T?{...w,metadata:T}:w)}if(!h)throw Error(`Transform node "${t}": buffered mode selected but no transform function provided`);let x=yield*h(yield*m.read(e.id,o),e),S=x instanceof Uint8Array?x:x.bytes,C=x instanceof Uint8Array?void 0:x.type,w=x instanceof Uint8Array?void 0:x.fileName,T=x instanceof Uint8Array?void 0:x.metadata;!w&&c&&(w=Z(e,X(e,{flowId:i,jobId:a,nodeId:t,nodeType:l},d),c));let E=new ReadableStream({start(e){e.enqueue(S),e.close()}}),D=yield*m.upload({storageId:n,size:S.byteLength,type:C??f,fileName:w??v,lastModified:0,metadata:b,flow:s},o,E),k=y?{...y,...T,...C&&{mimeType:C,type:C,"content-type":C},...w&&{fileName:w,originalName:w,name:w,extension:w.split(`.`).pop()||y.extension}}:D.metadata;return O(k?{...D,metadata:k}:D)})})})}var Fe=class extends r.Context.Tag(`CredentialProvider`)(){},Ie=class extends r.Context.Tag(`DocumentAiPlugin`)(){},Le=class extends r.Context.Tag(`DocumentPlugin`)(){},Re=class extends r.Context.Tag(`ImageAiPlugin`)(){},ze=class extends r.Context.Tag(`ImagePlugin`)(){};const Be=i.z.object({serviceType:i.z.enum([`replicate`]).optional()}),Ve=i.z.object({duration:i.z.number().nonnegative(),width:i.z.number().positive(),height:i.z.number().positive(),codec:i.z.string(),format:i.z.string(),bitrate:i.z.number().nonnegative(),frameRate:i.z.number().positive(),aspectRatio:i.z.string(),hasAudio:i.z.boolean(),audioCodec:i.z.string().optional(),audioBitrate:i.z.number().nonnegative().optional(),size:i.z.number().nonnegative()}),He=i.z.object({timestamp:i.z.number().nonnegative(),format:i.z.enum([`png`,`jpeg`]).optional(),quality:i.z.number().min(1).max(100).optional()}),Ue=i.z.object({quality:i.z.number().min(0).max(100),format:i.z.enum([`jpeg`,`webp`,`png`,`avif`])}),We=i.z.object({serviceType:i.z.enum([`replicate`]).optional()}),Ge=i.z.object({width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),fit:i.z.enum([`contain`,`cover`,`fill`])}).refine(e=>e.width||e.height,`Either width or height must be specified for resize`),Ke=i.z.object({width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),aspectRatio:i.z.enum([`keep`,`ignore`]).optional(),scaling:i.z.enum([`bicubic`,`bilinear`,`lanczos`]).optional()}).refine(e=>e.width||e.height,`Either width or height must be specified for video resize`),qe=i.z.object({format:i.z.enum([`mp4`,`webm`,`mov`,`avi`]),codec:i.z.enum([`h264`,`h265`,`vp9`,`av1`]).optional(),videoBitrate:i.z.string().optional(),audioBitrate:i.z.string().optional(),audioCodec:i.z.enum([`aac`,`mp3`,`opus`,`vorbis`]).optional()}),Je=i.z.object({type:i.z.literal(`resize`),width:i.z.number().positive().optional(),height:i.z.number().positive().optional(),fit:i.z.enum([`contain`,`cover`,`fill`])}),Ye=i.z.object({type:i.z.literal(`blur`),sigma:i.z.number().min(.3).max(1e3)}),Xe=i.z.object({type:i.z.literal(`rotate`),angle:i.z.number(),background:i.z.string().optional()}),Ze=i.z.object({type:i.z.literal(`flip`),direction:i.z.enum([`horizontal`,`vertical`])}),Qe=i.z.object({type:i.z.literal(`grayscale`)}),$e=i.z.object({type:i.z.literal(`sepia`)}),et=i.z.object({type:i.z.literal(`brightness`),value:i.z.number().min(-100).max(100)}),tt=i.z.object({type:i.z.literal(`contrast`),value:i.z.number().min(-100).max(100)}),$=i.z.object({type:i.z.literal(`sharpen`),sigma:i.z.number().positive().optional()}),nt=i.z.object({type:i.z.literal(`watermark`),imagePath:i.z.string().min(1).url(),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),opacity:i.z.number().min(0).max(1),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),rt=i.z.object({type:i.z.literal(`logo`),imagePath:i.z.string().min(1).url(),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),scale:i.z.number().min(.1).max(2),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),it=i.z.object({type:i.z.literal(`text`),text:i.z.string().min(1),position:i.z.enum([`top-left`,`top-right`,`bottom-left`,`bottom-right`,`center`]),fontSize:i.z.number().positive(),color:i.z.string().min(1),fontFamily:i.z.string().optional(),offsetX:i.z.number().optional(),offsetY:i.z.number().optional()}),at=i.z.discriminatedUnion(`type`,[Je,Ye,Xe,Ze,Qe,$e,et,tt,$,nt,rt,it]),ot=i.z.object({transformations:i.z.array(at).min(1)}),st=i.z.object({startTime:i.z.number().nonnegative(),endTime:i.z.number().positive().optional(),duration:i.z.number().positive().optional()}).refine(e=>!e.endTime||!e.duration,`Cannot specify both endTime and duration`).refine(e=>!e.endTime||e.endTime>e.startTime,`endTime must be greater than startTime`);var ct=class extends r.Context.Tag(`VideoPlugin`)(){},lt=class extends r.Context.Tag(`VirusScanPlugin`)(){},ut=class extends r.Context.Tag(`ZipPlugin`)(){};const dt=(e,t)=>{if(e.length===0)return t;let[n,...r]=e;return r.reduce((e,t)=>i.z.union([e,t]),n)};function ft(e){return r.Effect.gen(function*(){let n=Object.entries(e.nodes),a=e=>r.Effect.isEffect(e)?e:r.Effect.succeed(e),o=yield*r.Effect.forEach(n,([e,n])=>r.Effect.flatMap(a(n),n=>n.id===e?r.Effect.succeed([e,n]):r.Effect.fail(t.n.fromCode(`FLOW_NODE_ERROR`,{cause:Error(`Node key ${e} does not match node id ${n.id}`)})))),s=Object.fromEntries(o),c=o.map(([,e])=>e),l=o.filter(([,e])=>e.type===S.input).map(([,e])=>e.inputSchema),u=e.edges.map(e=>({source:s[e.source]?.id??e.source,target:s[e.target]?.id??e.target,sourcePort:e.sourcePort,targetPort:e.targetPort})),d=new Set(o.map(([e])=>s[e]?.id).filter(e=>e&&!u.some(t=>t.source===e))),f=o.filter(([,e])=>d.has(e.id)||e.keepOutput===!0).map(([,e])=>e.outputSchema),p=e.inputSchema??dt(l,i.z.unknown()),m=e.outputSchema??dt(f,i.z.unknown());return yield*q({flowId:e.flowId,name:e.name,nodes:c,edges:u,inputSchema:p,outputSchema:m,typeChecker:e.typeChecker,onEvent:e.onEvent,parallelExecution:e.parallelExecution,hooks:e.hooks,circuitBreaker:e.circuitBreaker})})}const pt=i.z.object({inputs:i.z.record(i.z.string(),i.z.any())});Object.defineProperty(exports,`$`,{enumerable:!0,get:function(){return he}}),Object.defineProperty(exports,`A`,{enumerable:!0,get:function(){return Re}}),Object.defineProperty(exports,`At`,{enumerable:!0,get:function(){return E}}),Object.defineProperty(exports,`B`,{enumerable:!0,get:function(){return je}}),Object.defineProperty(exports,`Bt`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`C`,{enumerable:!0,get:function(){return Ge}}),Object.defineProperty(exports,`Ct`,{enumerable:!0,get:function(){return ie}}),Object.defineProperty(exports,`D`,{enumerable:!0,get:function(){return Ve}}),Object.defineProperty(exports,`Dt`,{enumerable:!0,get:function(){return j}}),Object.defineProperty(exports,`E`,{enumerable:!0,get:function(){return He}}),Object.defineProperty(exports,`Et`,{enumerable:!0,get:function(){return te}}),Object.defineProperty(exports,`F`,{enumerable:!0,get:function(){return Ne}}),Object.defineProperty(exports,`Ft`,{enumerable:!0,get:function(){return w}}),Object.defineProperty(exports,`G`,{enumerable:!0,get:function(){return J}}),Object.defineProperty(exports,`Gt`,{enumerable:!0,get:function(){return p}}),Object.defineProperty(exports,`H`,{enumerable:!0,get:function(){return we}}),Object.defineProperty(exports,`Ht`,{enumerable:!0,get:function(){return h}}),Object.defineProperty(exports,`I`,{enumerable:!0,get:function(){return Z}}),Object.defineProperty(exports,`It`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`J`,{enumerable:!0,get:function(){return q}}),Object.defineProperty(exports,`Jt`,{enumerable:!0,get:function(){return l}}),Object.defineProperty(exports,`K`,{enumerable:!0,get:function(){return De}}),Object.defineProperty(exports,`Kt`,{enumerable:!0,get:function(){return u}}),Object.defineProperty(exports,`L`,{enumerable:!0,get:function(){return X}}),Object.defineProperty(exports,`Lt`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`M`,{enumerable:!0,get:function(){return Ie}}),Object.defineProperty(exports,`Mt`,{enumerable:!0,get:function(){return k}}),Object.defineProperty(exports,`N`,{enumerable:!0,get:function(){return Fe}}),Object.defineProperty(exports,`Nt`,{enumerable:!0,get:function(){return S}}),Object.defineProperty(exports,`O`,{enumerable:!0,get:function(){return Be}}),Object.defineProperty(exports,`Ot`,{enumerable:!0,get:function(){return O}}),Object.defineProperty(exports,`P`,{enumerable:!0,get:function(){return Pe}}),Object.defineProperty(exports,`Pt`,{enumerable:!0,get:function(){return C}}),Object.defineProperty(exports,`Q`,{enumerable:!0,get:function(){return me}}),Object.defineProperty(exports,`R`,{enumerable:!0,get:function(){return ke}}),Object.defineProperty(exports,`Rt`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`S`,{enumerable:!0,get:function(){return Ke}}),Object.defineProperty(exports,`St`,{enumerable:!0,get:function(){return ae}}),Object.defineProperty(exports,`T`,{enumerable:!0,get:function(){return Ue}}),Object.defineProperty(exports,`Tt`,{enumerable:!0,get:function(){return F}}),Object.defineProperty(exports,`U`,{enumerable:!0,get:function(){return Se}}),Object.defineProperty(exports,`Ut`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`V`,{enumerable:!0,get:function(){return Me}}),Object.defineProperty(exports,`Vt`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`W`,{enumerable:!0,get:function(){return Ce}}),Object.defineProperty(exports,`Wt`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`X`,{enumerable:!0,get:function(){return W}}),Object.defineProperty(exports,`Xt`,{enumerable:!0,get:function(){return o}}),Object.defineProperty(exports,`Y`,{enumerable:!0,get:function(){return xe}}),Object.defineProperty(exports,`Yt`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`Z`,{enumerable:!0,get:function(){return K}}),Object.defineProperty(exports,`_`,{enumerable:!0,get:function(){return it}}),Object.defineProperty(exports,`_t`,{enumerable:!0,get:function(){return oe}}),Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return ct}}),Object.defineProperty(exports,`at`,{enumerable:!0,get:function(){return de}}),Object.defineProperty(exports,`b`,{enumerable:!0,get:function(){return nt}}),Object.defineProperty(exports,`bt`,{enumerable:!0,get:function(){return I}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return et}}),Object.defineProperty(exports,`ct`,{enumerable:!0,get:function(){return be}}),Object.defineProperty(exports,`d`,{enumerable:!0,get:function(){return Qe}}),Object.defineProperty(exports,`dt`,{enumerable:!0,get:function(){return H}}),Object.defineProperty(exports,`et`,{enumerable:!0,get:function(){return pe}}),Object.defineProperty(exports,`f`,{enumerable:!0,get:function(){return rt}}),Object.defineProperty(exports,`ft`,{enumerable:!0,get:function(){return ce}}),Object.defineProperty(exports,`g`,{enumerable:!0,get:function(){return $}}),Object.defineProperty(exports,`gt`,{enumerable:!0,get:function(){return se}}),Object.defineProperty(exports,`h`,{enumerable:!0,get:function(){return $e}}),Object.defineProperty(exports,`ht`,{enumerable:!0,get:function(){return V}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return lt}}),Object.defineProperty(exports,`it`,{enumerable:!0,get:function(){return _e}}),Object.defineProperty(exports,`j`,{enumerable:!0,get:function(){return Le}}),Object.defineProperty(exports,`jt`,{enumerable:!0,get:function(){return D}}),Object.defineProperty(exports,`k`,{enumerable:!0,get:function(){return ze}}),Object.defineProperty(exports,`kt`,{enumerable:!0,get:function(){return T}}),Object.defineProperty(exports,`l`,{enumerable:!0,get:function(){return tt}}),Object.defineProperty(exports,`lt`,{enumerable:!0,get:function(){return ye}}),Object.defineProperty(exports,`m`,{enumerable:!0,get:function(){return Xe}}),Object.defineProperty(exports,`mt`,{enumerable:!0,get:function(){return B}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return ft}}),Object.defineProperty(exports,`nt`,{enumerable:!0,get:function(){return ve}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return st}}),Object.defineProperty(exports,`ot`,{enumerable:!0,get:function(){return ue}}),Object.defineProperty(exports,`p`,{enumerable:!0,get:function(){return Je}}),Object.defineProperty(exports,`pt`,{enumerable:!0,get:function(){return le}}),Object.defineProperty(exports,`q`,{enumerable:!0,get:function(){return Oe}}),Object.defineProperty(exports,`qt`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return ut}}),Object.defineProperty(exports,`rt`,{enumerable:!0,get:function(){return fe}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return Ye}}),Object.defineProperty(exports,`st`,{enumerable:!0,get:function(){return G}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return pt}}),Object.defineProperty(exports,`tt`,{enumerable:!0,get:function(){return ge}}),Object.defineProperty(exports,`u`,{enumerable:!0,get:function(){return Ze}}),Object.defineProperty(exports,`ut`,{enumerable:!0,get:function(){return U}}),Object.defineProperty(exports,`v`,{enumerable:!0,get:function(){return ot}}),Object.defineProperty(exports,`vt`,{enumerable:!0,get:function(){return R}}),Object.defineProperty(exports,`w`,{enumerable:!0,get:function(){return We}}),Object.defineProperty(exports,`wt`,{enumerable:!0,get:function(){return re}}),Object.defineProperty(exports,`x`,{enumerable:!0,get:function(){return qe}}),Object.defineProperty(exports,`xt`,{enumerable:!0,get:function(){return z}}),Object.defineProperty(exports,`y`,{enumerable:!0,get:function(){return at}}),Object.defineProperty(exports,`yt`,{enumerable:!0,get:function(){return L}}),Object.defineProperty(exports,`z`,{enumerable:!0,get:function(){return Ae}}),Object.defineProperty(exports,`zt`,{enumerable:!0,get:function(){return g}});
@@ -1 +0,0 @@
1
- {"version":3,"file":"throttle-Da0OA8JT.d.cts","names":[],"sources":["../src/utils/generate-id.ts","../src/utils/checksum.ts","../src/utils/md5.ts","../src/utils/once.ts","../src/utils/throttle.ts"],"mappings":";;;;KAEY,eAAA;EACV,UAAA,QAAkB,MAAA,CAAO,MAAA;AAAA;AAAA,cACzB,eAAA;6BAK6B,MAAA,CAAO,MAAA;AAAA;AAAA,cAFzB,UAAA,SAAmB,eAAA;;;;cAQnB,iBAAA;EAAA,2BANkB,MAAA,CAAO,MAAA;AAAA;;;;;cAYzB,gBAAA;EAAA,2BAZkB,MAAA,CAAO,MAAA;AAAA;AAAA,cAgBzB,cAAA,EAAc,KAAA,CAAA,KAAA,CAAA,UAAA;;;;;cAMd,mBAAA;EAAA,2BAtBkB,MAAA,CAAO,MAAA;AAAA;;;;;AAPtC;cCIM,oBAAA;AAAA,KACM,iBAAA,WAA4B,oBAAA;;;;iBAKxB,oBAAA,CACd,SAAA,WACC,SAAA,IAAa,iBAAA;;;ADVd;;;;;;iBCsBc,eAAA,CACd,KAAA,EAAO,UAAA,EACP,SAAA,WACC,MAAA,CAAO,MAAA,SAAe,eAAA;;;KCqBpB,WAAA;EACH,MAAA;EACA,MAAA;EACA,MAAA;EACA,KAAA;AAAA;AAAA,cAGW,GAAA;EFtDoB;;;;;EAAA,OE4DjB,OAAA,CAAQ,GAAA,UAAa,GAAA;EAAA,OACrB,OAAA,CAAQ,GAAA,UAAa,GAAA,SAAY,UAAA;;;;;;SAUjC,YAAA,CAAa,GAAA,UAAa,GAAA;EAAA,OAC1B,YAAA,CAAa,GAAA,UAAa,GAAA,SAAY,UAAA;EAAA,eAMrC,aAAA;EAAA,eAIA,gBAAA;EAAA,eAIA,QAAA;EAAA,eACA,MAAA;EAAA,eAGA,aAAA;EAAA,eAEA,IAAA;EAAA,eAqBA,SAAA;EAAA,QAmJP,WAAA;EAAA,QACA,aAAA;EAAA,QAEA,MAAA;EAAA,QACA,OAAA;EAAA,QACA,QAAA;EAAA,QACA,SAAA;;EF9PyC;;;EEyQ1C,KAAA,CAAA;EF/QsB;;;;EE8RtB,SAAA,CAAU,GAAA;EFhRjB;;;;EE8TO,cAAA,CAAe,GAAA;EF5Uc;;;AAgBtC;EEuVS,eAAA,CAAgB,KAAA,EAAO,UAAA;;;;EA0BvB,QAAA,CAAA,GAAY,WAAA;EFjXM;;;AAM3B;EE0XS,QAAA,CAAS,KAAA,EAAO,WAAA;;;;;EAsBhB,GAAA,CAAI,GAAA,sBAAW,UAAA,CAAA,eAAA;AAAA;;;iBC3aR,IAAA,gCAAA,CACd,EAAA,GAAK,IAAA,EAAM,CAAA,KAAM,IAAA,EAAM,CAAA,KAAM,MAAA,IAAM,IAAA,EAIT,CAAA,KAAC,IAAA,EAAW,CAAA,KAAI,MAAA;;AHP5C;;cGwBa,UAAA;EHvBoB;;;;;kBG6BjB,MAAA,EACJ,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MAC3B,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,GAAI,eAAA,EAAiB,CAAA;EH9BzC;;;;;;;;;iBIFc,QAAA,gCAAA,CACd,EAAA,GAAK,IAAA,EAAM,CAAA,KAAM,IAAA,EAAM,CAAA,KAAM,MAAA,EAC7B,IAAA;EAEE,OAAA;EACA;AAAA;EACG,OAAA;EAAmB,QAAA;AAAA,KAAyB,IAAA,EAAA,CAAA,KAAA,IAAA,EAAA,CAAA;AJNnD;;;AAAA,cIiBa,cAAA;EJhBX;;;;;AACA;;iBIwBD,QAAA;AAAA"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"throttle-ibiT6E4U.d.mts","names":[],"sources":["../src/utils/generate-id.ts","../src/utils/checksum.ts","../src/utils/md5.ts","../src/utils/once.ts","../src/utils/throttle.ts"],"mappings":";;;;KAEY,eAAA;EACV,UAAA,QAAkB,MAAA,CAAO,MAAA;AAAA;AAAA,cACzB,eAAA;6BAK6B,MAAA,CAAO,MAAA;AAAA;AAAA,cAFzB,UAAA,SAAmB,eAAA;;;;cAQnB,iBAAA;EAAA,2BANkB,MAAA,CAAO,MAAA;AAAA;;;;;cAYzB,gBAAA;EAAA,2BAZkB,MAAA,CAAO,MAAA;AAAA;AAAA,cAgBzB,cAAA,EAAc,KAAA,CAAA,KAAA,CAAA,UAAA;;;;;cAMd,mBAAA;EAAA,2BAtBkB,MAAA,CAAO,MAAA;AAAA;;;;;AAPtC;cCIM,oBAAA;AAAA,KACM,iBAAA,WAA4B,oBAAA;;;;iBAKxB,oBAAA,CACd,SAAA,WACC,SAAA,IAAa,iBAAA;;;ADVd;;;;;;iBCsBc,eAAA,CACd,KAAA,EAAO,UAAA,EACP,SAAA,WACC,MAAA,CAAO,MAAA,SAAe,eAAA;;;KCqBpB,WAAA;EACH,MAAA;EACA,MAAA;EACA,MAAA;EACA,KAAA;AAAA;AAAA,cAGW,GAAA;EFtDoB;;;;;EAAA,OE4DjB,OAAA,CAAQ,GAAA,UAAa,GAAA;EAAA,OACrB,OAAA,CAAQ,GAAA,UAAa,GAAA,SAAY,UAAA;;;;;;SAUjC,YAAA,CAAa,GAAA,UAAa,GAAA;EAAA,OAC1B,YAAA,CAAa,GAAA,UAAa,GAAA,SAAY,UAAA;EAAA,eAMrC,aAAA;EAAA,eAIA,gBAAA;EAAA,eAIA,QAAA;EAAA,eACA,MAAA;EAAA,eAGA,aAAA;EAAA,eAEA,IAAA;EAAA,eAqBA,SAAA;EAAA,QAmJP,WAAA;EAAA,QACA,aAAA;EAAA,QAEA,MAAA;EAAA,QACA,OAAA;EAAA,QACA,QAAA;EAAA,QACA,SAAA;;EF9PyC;;;EEyQ1C,KAAA,CAAA;EF/QsB;;;;EE8RtB,SAAA,CAAU,GAAA;EFhRjB;;;;EE8TO,cAAA,CAAe,GAAA;EF5Uc;;;AAgBtC;EEuVS,eAAA,CAAgB,KAAA,EAAO,UAAA;;;;EA0BvB,QAAA,CAAA,GAAY,WAAA;EFjXM;;;AAM3B;EE0XS,QAAA,CAAS,KAAA,EAAO,WAAA;;;;;EAsBhB,GAAA,CAAI,GAAA,sBAAW,UAAA,CAAA,eAAA;AAAA;;;iBC3aR,IAAA,gCAAA,CACd,EAAA,GAAK,IAAA,EAAM,CAAA,KAAM,IAAA,EAAM,CAAA,KAAM,MAAA,IAAM,IAAA,EAIT,CAAA,KAAC,IAAA,EAAW,CAAA,KAAI,MAAA;;AHP5C;;cGwBa,UAAA;EHvBoB;;;;;kBG6BjB,MAAA,EACJ,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,EAAG,CAAA,MAC3B,MAAA,CAAO,MAAA,CAAO,CAAA,EAAG,CAAA,GAAI,eAAA,EAAiB,CAAA;EH9BzC;;;;;;;;;iBIFc,QAAA,gCAAA,CACd,EAAA,GAAK,IAAA,EAAM,CAAA,KAAM,IAAA,EAAM,CAAA,KAAM,MAAA,EAC7B,IAAA;EAEE,OAAA;EACA;AAAA;EACG,OAAA;EAAmB,QAAA;AAAA,KAAyB,IAAA,EAAA,CAAA,KAAA,IAAA,EAAA,CAAA;AJNnD;;;AAAA,cIiBa,cAAA;EJhBX;;;;;AACA;;iBIwBD,QAAA;AAAA"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"uploadista-error-Fsfvr2Bb.mjs","names":[],"sources":["../src/errors/uploadista-error.ts"],"sourcesContent":["import { Data, Effect } from \"effect\";\n\n/**\n * Union type of all possible error codes in the Uploadista system.\n *\n * Each error code corresponds to a specific error condition with predefined\n * HTTP status codes and messages in the ERROR_CATALOG.\n */\nexport type UploadistaErrorCode =\n | \"MISSING_OFFSET\"\n | \"ABORTED\"\n | \"INVALID_TERMINATION\"\n | \"ERR_LOCK_TIMEOUT\"\n | \"INVALID_CONTENT_TYPE\"\n | \"FLOW_STRUCTURE_ERROR\"\n | \"FLOW_CYCLE_ERROR\"\n | \"FLOW_NODE_NOT_FOUND\"\n | \"FLOW_NODE_ERROR\"\n | \"FLOW_NOT_AUTHORIZED\"\n | \"FLOW_NOT_FOUND\"\n | \"FLOW_PAUSED\"\n | \"FLOW_CANCELLED\"\n | \"FILE_READ_ERROR\"\n | \"FLOW_JOB_NOT_FOUND\"\n | \"FLOW_JOB_ERROR\"\n | \"DATASTORE_NOT_FOUND\"\n | \"FILE_NOT_FOUND\"\n | \"UPLOAD_CANCELLED\"\n | \"INVALID_OFFSET\"\n | \"FILE_NO_LONGER_EXISTS\"\n | \"ERR_SIZE_EXCEEDED\"\n | \"ERR_MAX_SIZE_EXCEEDED\"\n | \"INVALID_LENGTH\"\n | \"INVALID_METADATA\"\n | \"VALIDATION_ERROR\"\n | \"STORAGE_NOT_AUTHORIZED\"\n | \"UNKNOWN_ERROR\"\n | \"FILE_WRITE_ERROR\"\n | \"UPLOAD_ID_NOT_FOUND\"\n | \"FLOW_OUTPUT_VALIDATION_ERROR\"\n | \"FLOW_INPUT_VALIDATION_ERROR\"\n | \"CHECKSUM_MISMATCH\"\n | \"MIMETYPE_MISMATCH\"\n | \"UNSUPPORTED_CHECKSUM_ALGORITHM\"\n | \"VIDEO_PROCESSING_FAILED\"\n | \"INVALID_VIDEO_FORMAT\"\n | \"CODEC_NOT_SUPPORTED\"\n | \"VIDEO_METADATA_EXTRACTION_FAILED\"\n | \"FFMPEG_NOT_INSTALLED\"\n | \"INVALID_NODE_TYPE\"\n | \"TYPE_CATEGORY_MISMATCH\"\n | \"INVALID_INPUT_TYPE\"\n | \"INVALID_OUTPUT_TYPE\"\n | \"OUTPUT_NOT_FOUND\"\n | \"MULTIPLE_OUTPUTS_FOUND\"\n | \"VIRUS_SCAN_FAILED\"\n | \"VIRUS_DETECTED\"\n | \"CLAMAV_NOT_INSTALLED\"\n | \"VIRUS_DEFINITIONS_OUTDATED\"\n | \"SCAN_TIMEOUT\"\n | \"DOCUMENT_PROCESSING_FAILED\"\n | \"INVALID_DOCUMENT_FORMAT\"\n | \"OCR_FAILED\"\n | \"PDF_ENCRYPTED\"\n | \"PDF_CORRUPTED\"\n | \"PAGE_RANGE_INVALID\"\n | \"CIRCUIT_BREAKER_OPEN\";\n\n/**\n * Catalog of all predefined errors in the Uploadista system.\n *\n * Maps error codes to their HTTP status codes and default error messages.\n * This centralized catalog ensures consistent error handling across all\n * Uploadista packages and adapters.\n *\n * Each error entry contains:\n * - `status`: HTTP status code (400-500 range)\n * - `body`: Human-readable error message\n *\n * @example\n * ```typescript\n * // Access a specific error definition\n * const fileNotFound = ERROR_CATALOG.FILE_NOT_FOUND;\n * console.log(fileNotFound.status); // 404\n * console.log(fileNotFound.body); // \"The file for this url was not found\\n\"\n *\n * // Use with UploadistaError\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n * ```\n */\nexport const ERROR_CATALOG: Readonly<\n Record<UploadistaErrorCode, { status: number; body: string }>\n> = {\n MISSING_OFFSET: { status: 403, body: \"Upload-Offset header required\\n\" },\n ABORTED: { status: 400, body: \"Request aborted due to lock acquired\" },\n INVALID_TERMINATION: {\n status: 400,\n body: \"Cannot terminate an already completed upload\",\n },\n ERR_LOCK_TIMEOUT: {\n status: 500,\n body: \"failed to acquire lock before timeout\",\n },\n INVALID_CONTENT_TYPE: {\n status: 403,\n body: \"Content-Type header required\\n\",\n },\n DATASTORE_NOT_FOUND: {\n status: 500,\n body: \"The datastore was not found\\n\",\n },\n UPLOAD_ID_NOT_FOUND: {\n status: 500,\n body: \"The upload id was not found\\n\",\n },\n FILE_NOT_FOUND: {\n status: 404,\n body: \"The file for this url was not found\\n\",\n },\n UPLOAD_CANCELLED: {\n status: 410,\n body: \"The upload was cancelled\\n\",\n },\n FLOW_NOT_AUTHORIZED: {\n status: 401,\n body: \"The flow is not authorized\\n\",\n },\n FLOW_NOT_FOUND: {\n status: 404,\n body: \"The flow was not found\\n\",\n },\n FLOW_PAUSED: {\n status: 409,\n body: \"The flow execution was paused by user\\n\",\n },\n FLOW_CANCELLED: {\n status: 409,\n body: \"The flow execution was cancelled by user\\n\",\n },\n FLOW_STRUCTURE_ERROR: {\n status: 500,\n body: \"The flow structure is invalid\\n\",\n },\n FLOW_CYCLE_ERROR: {\n status: 500,\n body: \"The flow contains a cycle\\n\",\n },\n FLOW_NODE_NOT_FOUND: {\n status: 500,\n body: \"The flow node was not found\\n\",\n },\n FLOW_NODE_ERROR: {\n status: 500,\n body: \"The flow node failed\\n\",\n },\n FLOW_JOB_NOT_FOUND: {\n status: 404,\n body: \"The flow job was not found\\n\",\n },\n FLOW_JOB_ERROR: {\n status: 500,\n body: \"The flow job failed\\n\",\n },\n FLOW_INPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow input validation failed\\n\",\n },\n FLOW_OUTPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow output validation failed\\n\",\n },\n INVALID_OFFSET: { status: 409, body: \"Upload-Offset conflict\\n\" },\n FILE_NO_LONGER_EXISTS: {\n status: 410,\n body: \"The file for this url no longer exists\\n\",\n },\n FILE_READ_ERROR: {\n status: 500,\n body: \"Something went wrong reading the file\\n\",\n },\n ERR_SIZE_EXCEEDED: { status: 413, body: \"upload's size exceeded\\n\" },\n ERR_MAX_SIZE_EXCEEDED: { status: 413, body: \"Maximum size exceeded\\n\" },\n INVALID_LENGTH: {\n status: 400,\n body: \"Upload-Length or Upload-Defer-Length header required\\n\",\n },\n INVALID_METADATA: {\n status: 400,\n body: \"Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique\",\n },\n VALIDATION_ERROR: {\n status: 400,\n body: \"Validation failed\\n\",\n },\n STORAGE_NOT_AUTHORIZED: {\n status: 401,\n body: \"The storage is not authorized\\n\",\n },\n UNKNOWN_ERROR: {\n status: 500,\n body: \"Something went wrong with that request\\n\",\n },\n FILE_WRITE_ERROR: {\n status: 500,\n body: \"Something went wrong receiving the file\\n\",\n },\n CHECKSUM_MISMATCH: {\n status: 400,\n body: \"The file checksum does not match the provided checksum\\n\",\n },\n MIMETYPE_MISMATCH: {\n status: 400,\n body: \"The file MIME type does not match the declared type\\n\",\n },\n UNSUPPORTED_CHECKSUM_ALGORITHM: {\n status: 400,\n body: \"The specified checksum algorithm is not supported\\n\",\n },\n VIDEO_PROCESSING_FAILED: {\n status: 500,\n body: \"Video processing operation failed\\n\",\n },\n INVALID_VIDEO_FORMAT: {\n status: 400,\n body: \"The video format is not supported\\n\",\n },\n CODEC_NOT_SUPPORTED: {\n status: 400,\n body: \"The specified video codec is not supported\\n\",\n },\n VIDEO_METADATA_EXTRACTION_FAILED: {\n status: 500,\n body: \"Failed to extract video metadata\\n\",\n },\n FFMPEG_NOT_INSTALLED: {\n status: 500,\n body: \"FFmpeg is not installed or not available in PATH\\n\",\n },\n INVALID_NODE_TYPE: {\n status: 500,\n body: \"The specified node type is not registered\\n\",\n },\n TYPE_CATEGORY_MISMATCH: {\n status: 500,\n body: \"Node type category does not match the node configuration\\n\",\n },\n INVALID_INPUT_TYPE: {\n status: 500,\n body: \"The input type is not registered\\n\",\n },\n INVALID_OUTPUT_TYPE: {\n status: 500,\n body: \"The output type is not registered\\n\",\n },\n OUTPUT_NOT_FOUND: {\n status: 404,\n body: \"No output of the specified type was found\\n\",\n },\n MULTIPLE_OUTPUTS_FOUND: {\n status: 409,\n body: \"Multiple outputs of the specified type found, expected single output\\n\",\n },\n VIRUS_SCAN_FAILED: {\n status: 500,\n body: \"Virus scanning operation failed\\n\",\n },\n VIRUS_DETECTED: {\n status: 400,\n body: \"Virus or malware detected in file\\n\",\n },\n CLAMAV_NOT_INSTALLED: {\n status: 500,\n body: \"ClamAV is not installed or not available\\n\",\n },\n VIRUS_DEFINITIONS_OUTDATED: {\n status: 500,\n body: \"Virus definitions are outdated and should be updated\\n\",\n },\n SCAN_TIMEOUT: {\n status: 500,\n body: \"Virus scan exceeded timeout limit\\n\",\n },\n DOCUMENT_PROCESSING_FAILED: {\n status: 500,\n body: \"Document processing operation failed\\n\",\n },\n INVALID_DOCUMENT_FORMAT: {\n status: 400,\n body: \"The document format is not supported\\n\",\n },\n OCR_FAILED: {\n status: 500,\n body: \"OCR operation failed\\n\",\n },\n PDF_ENCRYPTED: {\n status: 400,\n body: \"The PDF is password-protected and cannot be processed\\n\",\n },\n PDF_CORRUPTED: {\n status: 400,\n body: \"The PDF file is corrupted or malformed\\n\",\n },\n PAGE_RANGE_INVALID: {\n status: 400,\n body: \"The specified page range is invalid\\n\",\n },\n CIRCUIT_BREAKER_OPEN: {\n status: 503,\n body: \"Circuit breaker is open - service temporarily unavailable\\n\",\n },\n} as const;\n\n/**\n * Standard error class for all Uploadista operations.\n *\n * UploadistaError provides a consistent error handling approach across the entire\n * Uploadista ecosystem. Each error has:\n * - A typed error code from the ERROR_CATALOG\n * - An HTTP-compatible status code\n * - A human-readable error message (body)\n * - Optional additional details and cause information\n *\n * This class integrates with Effect-TS for functional error handling and can be\n * easily converted to an Effect that fails.\n *\n * @example\n * ```typescript\n * // Create from error code\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Create with custom details\n * const customError = UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n * body: \"Failed to process image\",\n * cause: originalError,\n * details: { nodeId: \"resize-1\", fileId: \"abc123\" }\n * });\n *\n * // Use with Effect\n * const effect = customError.toEffect<void>();\n *\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* getFile(id);\n * if (!file) {\n * return yield* UploadistaError.fromCode(\"FILE_NOT_FOUND\").toEffect();\n * }\n * return file;\n * });\n * ```\n */\nexport class UploadistaError extends Data.TaggedError(\"UploadistaError\") {\n readonly code: string;\n readonly status: number;\n // Keep legacy property names for backward compatibility\n readonly status_code: number;\n readonly body: string;\n readonly details?: unknown;\n\n constructor({\n code,\n status,\n body,\n cause,\n details,\n }: {\n code: UploadistaErrorCode | string;\n status: number;\n body: string;\n cause?: unknown;\n details?: unknown;\n }) {\n super();\n this.name = \"UploadistaError\";\n this.code = code;\n this.status = status;\n this.status_code = status; // legacy alias\n this.body = body;\n // Set message property from body so it's serializable to JSON\n (this as unknown as { message: string }).message = body;\n this.details = details;\n if (cause) (this as unknown as { cause?: unknown }).cause = cause;\n }\n\n /**\n * Creates an UploadistaError from a predefined error code.\n *\n * This is the primary way to create errors in the Uploadista system. Each error code\n * has a default status and message defined in ERROR_CATALOG, but these can be overridden\n * for specific use cases.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code (overrides the default)\n * @param overrides.body - Custom error message (overrides the default)\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error (for error chaining)\n *\n * @returns A new UploadistaError instance\n *\n * @example\n * ```typescript\n * // Use default error\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Override message\n * const customError = UploadistaError.fromCode(\"FILE_NOT_FOUND\", {\n * body: `File with ID ${fileId} was not found in storage`\n * });\n *\n * // Include cause and details\n * const detailedError = UploadistaError.fromCode(\"DATASTORE_NOT_FOUND\", {\n * cause: storageException,\n * details: { storageId: \"s3-prod\", region: \"us-east-1\" }\n * });\n * ```\n */\n static fromCode(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n ): UploadistaError {\n const base = ERROR_CATALOG[code];\n return new UploadistaError({\n code,\n status: overrides?.status ?? base.status,\n body: overrides?.body ?? base.body,\n details: overrides?.details,\n cause: overrides?.cause,\n });\n }\n\n /**\n * Converts this error to an Effect that immediately fails.\n *\n * This method integrates UploadistaError with Effect-TS's error handling system,\n * allowing errors to be used in Effect pipelines with proper type checking.\n *\n * @template T - The success type of the Effect (defaults to never since it always fails)\n * @returns An Effect that fails with this UploadistaError\n *\n * @example\n * ```typescript\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Use in an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* error.toEffect();\n * }\n * return file;\n * });\n * ```\n */\n toEffect<T = never>(): Effect.Effect<T, UploadistaError> {\n return Effect.fail(this);\n }\n}\n\n/**\n * Type guard to check if an unknown value is an UploadistaError.\n *\n * Useful for error handling when catching errors that might be from\n * different sources or libraries.\n *\n * @param error - The value to check\n * @returns True if the value is an UploadistaError instance\n *\n * @example\n * ```typescript\n * try {\n * await someOperation();\n * } catch (error) {\n * if (isUploadistaError(error)) {\n * console.log(`Uploadista error: ${error.code} (${error.status})`);\n * console.log(error.body);\n * } else {\n * console.error(\"Unknown error:\", error);\n * }\n * }\n * ```\n */\nexport function isUploadistaError(error: unknown): error is UploadistaError {\n return error instanceof UploadistaError;\n}\n\n/**\n * Creates an Effect that immediately fails with an UploadistaError.\n *\n * This is a convenience function that combines error creation with Effect conversion.\n * It's equivalent to calling `UploadistaError.fromCode(code, overrides).toEffect()`.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code\n * @param overrides.body - Custom error message\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error\n *\n * @returns An Effect that immediately fails with the created UploadistaError\n *\n * @example\n * ```typescript\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* httpFailure(\"FILE_NOT_FOUND\", {\n * details: { fileId }\n * });\n * }\n * return file;\n * });\n * ```\n */\nexport function httpFailure(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n): Effect.Effect<never, UploadistaError> {\n return UploadistaError.fromCode(code, overrides).toEffect();\n}\n"],"mappings":"0CA0FA,MAAa,EAET,CACF,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAAmC,CACxE,QAAS,CAAE,OAAQ,IAAK,KAAM,uCAAwC,CACtE,oBAAqB,CACnB,OAAQ,IACR,KAAM,+CACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,wCACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,YAAa,CACX,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,4BAA6B,CAC3B,OAAQ,IACR,KAAM;EACP,CACD,6BAA8B,CAC5B,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACjE,sBAAuB,CACrB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACpE,sBAAuB,CAAE,OAAQ,IAAK,KAAM;EAA2B,CACvE,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,8SACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,+BAAgC,CAC9B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iCAAkC,CAChC,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,aAAc,CACZ,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,WAAY,CACV,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACF,CAwCD,IAAa,EAAb,MAAa,UAAwB,EAAK,YAAY,kBAAkB,AAAC,CACvE,KACA,OAEA,YACA,KACA,QAEA,YAAY,CACV,OACA,SACA,OACA,QACA,WAOC,CACD,OAAO,CACP,KAAK,KAAO,kBACZ,KAAK,KAAO,EACZ,KAAK,OAAS,EACd,KAAK,YAAc,EACnB,KAAK,KAAO,EAEX,KAAwC,QAAU,EACnD,KAAK,QAAU,EACX,IAAQ,KAAwC,MAAQ,GAoC9D,OAAO,SACL,EACA,EAIiB,CACjB,IAAM,EAAO,EAAc,GAC3B,OAAO,IAAI,EAAgB,CACzB,OACA,OAAQ,GAAW,QAAU,EAAK,OAClC,KAAM,GAAW,MAAQ,EAAK,KAC9B,QAAS,GAAW,QACpB,MAAO,GAAW,MACnB,CAAC,CA0BJ,UAAyD,CACvD,OAAO,EAAO,KAAK,KAAK,GA2B5B,SAAgB,EAAkB,EAA0C,CAC1E,OAAO,aAAiB,EAgC1B,SAAgB,EACd,EACA,EAIuC,CACvC,OAAO,EAAgB,SAAS,EAAM,EAAU,CAAC,UAAU"}
@@ -1 +0,0 @@
1
- var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},s=(n,r,a)=>(a=n==null?{}:e(i(n)),o(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));const c=require(`./uploadista-error-B-geDgi8.cjs`);let l=require(`effect`),u=require(`zod`);u=s(u);var d=class extends l.Context.Tag(`CircuitBreakerStoreService`)(){};function f(e){let t=Date.now();return{state:`closed`,failureCount:0,lastStateChange:t,halfOpenSuccesses:0,windowStart:t,config:e}}var p=class{constructor(e,t,n,r){this.baseStore=e,this.keyPrefix=t,this.serialize=n,this.deserialize=r}get=e=>this.baseStore.get(this.keyPrefix+e).pipe(l.Effect.flatMap(t=>{if(t===null)return l.Effect.fail(c.n.fromCode(`FILE_NOT_FOUND`,{cause:`Key "${e}" not found`}));try{return l.Effect.succeed(this.deserialize(t))}catch(t){return l.Effect.fail(new c.n({code:`VALIDATION_ERROR`,status:400,body:`Failed to deserialize value for key "${e}"`,cause:t}))}}));set=(e,t)=>{try{let n=this.serialize(t);return this.baseStore.set(this.keyPrefix+e,n)}catch(t){return l.Effect.fail(new c.n({code:`VALIDATION_ERROR`,status:400,body:`Failed to serialize value for key "${e}"`,cause:t}))}};delete=e=>this.baseStore.delete(this.keyPrefix+e);list=()=>this.baseStore.list?this.baseStore.list(this.keyPrefix).pipe(l.Effect.map(e=>e.map(e=>e.startsWith(this.keyPrefix)?e.slice(this.keyPrefix.length):e))):l.Effect.fail(new c.n({code:`UNKNOWN_ERROR`,status:501,body:`List operation not supported by this store`}))};const m={serialize:e=>JSON.stringify(e),deserialize:e=>JSON.parse(e)};var h=class extends l.Context.Tag(`BaseKvStore`)(){},g=class extends l.Context.Tag(`UploadFileKVStore`)(){};const _=l.Layer.effect(g,l.Effect.gen(function*(){return new p(yield*h,`uploadista:upload-file:`,m.serialize,m.deserialize)}));var v=class extends l.Context.Tag(`FlowJobKVStore`)(){};const y=l.Layer.effect(v,l.Effect.gen(function*(){return new p(yield*h,`uploadista:flow-job:`,m.serialize,m.deserialize)}));var b=class extends l.Context.Tag(`DeadLetterQueueKVStore`)(){};const x=l.Layer.effect(b,l.Effect.gen(function*(){return new p(yield*h,`uploadista:dlq:`,m.serialize,m.deserialize)})),S=u.z.object({traceId:u.z.string(),spanId:u.z.string(),traceFlags:u.z.number()}),C=u.z.lazy(()=>u.z.union([u.z.string(),u.z.number(),u.z.boolean(),u.z.null(),u.z.array(C),u.z.record(u.z.string(),C)])),w=u.z.object({id:u.z.string(),size:u.z.number().optional(),offset:u.z.number(),metadata:u.z.record(u.z.string(),C).optional(),creationDate:u.z.string().optional(),url:u.z.string().optional(),sizeIsDeferred:u.z.boolean().optional(),checksum:u.z.string().optional(),checksumAlgorithm:u.z.string().optional(),storage:u.z.object({id:u.z.string(),type:u.z.string(),path:u.z.string().optional(),uploadId:u.z.string().optional(),bucket:u.z.string().optional(),parts:u.z.array(u.z.object({partNumber:u.z.number(),etag:u.z.string(),size:u.z.number()})).optional()}),flow:u.z.object({flowId:u.z.string(),nodeId:u.z.string(),jobId:u.z.string()}).optional(),traceContext:S.optional()}),T={fileSizeThreshold:1048576,chunkSize:65536};var E=class extends l.Context.Tag(`UploadFileDataStore`)(){},D=class extends l.Context.Tag(`BufferedUploadFileDataStore`)(){},O=class extends l.Context.Tag(`UploadFileDataStores`)(){};const k=e=>`create`in e&&`write`in e,A=async e=>{if(l.Layer.isLayer(e))return e;if(l.Effect.isEffect(e))return l.Layer.effect(O,l.Effect.gen(function*(){let t=yield*e;return{getDataStore:e=>l.Effect.succeed(t),bufferedDataStore:l.Effect.succeed(void 0)}}));if(k(e)){let t=e;return l.Layer.succeed(O,{getDataStore:e=>l.Effect.succeed(t),bufferedDataStore:l.Effect.succeed(void 0)})}let t=e,n=t.default||Object.keys(t.stores)[0],r={};for(let[e,n]of Object.entries(t.stores))`pipe`in n&&!(`create`in n)?r[e]=await l.Effect.runPromise(n):r[e]=n;return l.Layer.succeed(O,{getDataStore:e=>{let t=r[e]||(n?r[n]:void 0);return t?l.Effect.succeed(t):l.Effect.fail(c.n.fromCode(`FILE_NOT_FOUND`))},bufferedDataStore:l.Effect.succeed(void 0)})};var j=class extends l.Context.Tag(`EventBroadcaster`)(){},M=class{constructor(e,t){this.baseEmitter=e,this.eventToMessage=t}subscribe=(e,t)=>this.baseEmitter.subscribe(e,t);unsubscribe=e=>this.baseEmitter.unsubscribe(e);emit=(e,t)=>{let n=this.eventToMessage(t);return this.baseEmitter.emit(e,n)}};const N=e=>({eventToMessage:t=>JSON.stringify({type:e,payload:t,timestamp:new Date().toISOString()})});var P=class extends l.Context.Tag(`BaseEventEmitter`)(){},F=class extends l.Context.Tag(`UploadEventEmitter`)(){};const I=l.Layer.effect(F,l.Effect.gen(function*(){return new M(yield*P,N(`upload_event`).eventToMessage)}));var L=class extends l.Context.Tag(`FlowEventEmitter`)(){};const R=l.Layer.effect(L,l.Effect.gen(function*(){return new M(yield*P,N(`flow_event`).eventToMessage)})),z={timeout:5e3,checkStorage:!0,checkKvStore:!0,checkEventBroadcaster:!0};function B(e){return e?.includes(`text/plain`)?`text`:`json`}function V(e){switch(e){case`healthy`:return`OK`;case`degraded`:return`OK`;case`unhealthy`:return`Service Unavailable`}}const H=u.z.object({uploadLengthDeferred:u.z.boolean().optional(),storageId:u.z.string(),size:u.z.number().optional(),sizeHint:u.z.number().optional(),type:u.z.string(),fileName:u.z.string().optional(),lastModified:u.z.number().optional(),metadata:u.z.string().optional(),checksum:u.z.string().optional(),checksumAlgorithm:u.z.string().optional(),flow:u.z.object({flowId:u.z.string(),nodeId:u.z.string(),jobId:u.z.string()}).optional()}).refine(e=>e.uploadLengthDeferred===!0?!0:e.size!==void 0&&e.size>=0,{message:`size is required when uploadLengthDeferred is not true`,path:[`size`]});var U=class extends l.Context.Tag(`MiddlewareService`)(){};const W=l.Layer.succeed(U,U.of({execute:(e,t,n)=>l.Effect.gen(function*(){if(e.length===0)return yield*l.Effect.tryPromise({try:()=>n(),catch:e=>e});let r=e.reduceRight((e,n)=>()=>n(t,e),n);return yield*l.Effect.tryPromise({try:()=>r(),catch:e=>e})})}));let G=function(e){return e.UPLOAD_STARTED=`upload-started`,e.UPLOAD_PROGRESS=`upload-progress`,e.UPLOAD_COMPLETE=`upload-complete`,e.UPLOAD_FAILED=`upload-failed`,e.UPLOAD_VALIDATION_SUCCESS=`upload-validation-success`,e.UPLOAD_VALIDATION_FAILED=`upload-validation-failed`,e.UPLOAD_VALIDATION_WARNING=`upload-validation-warning`,e}({});const K=u.z.object({flowId:u.z.string(),nodeId:u.z.string(),jobId:u.z.string()}).optional(),q=u.z.union([u.z.object({type:u.z.union([u.z.literal(G.UPLOAD_STARTED),u.z.literal(G.UPLOAD_COMPLETE)]),data:w,flow:K}),u.z.object({type:u.z.literal(G.UPLOAD_PROGRESS),data:u.z.object({id:u.z.string(),progress:u.z.number(),total:u.z.number()}),flow:K}),u.z.object({type:u.z.literal(G.UPLOAD_FAILED),data:u.z.object({id:u.z.string(),error:u.z.string()}),flow:K}),u.z.object({type:u.z.literal(G.UPLOAD_VALIDATION_SUCCESS),data:u.z.object({id:u.z.string(),validationType:u.z.enum([`checksum`,`mimetype`]),algorithm:u.z.string().optional()}),flow:K}),u.z.object({type:u.z.literal(G.UPLOAD_VALIDATION_FAILED),data:u.z.object({id:u.z.string(),reason:u.z.string(),expected:u.z.string(),actual:u.z.string()}),flow:K}),u.z.object({type:u.z.literal(G.UPLOAD_VALIDATION_WARNING),data:u.z.object({id:u.z.string(),message:u.z.string()}),flow:K})]),J=u.default.union([u.default.object({type:u.default.literal(`upload_event`),payload:q,timestamp:u.default.string().optional()}),u.default.object({type:u.default.literal(`flow_event`),payload:u.default.any(),timestamp:u.default.string().optional()}),u.default.object({type:u.default.literal(`subscribed`),payload:u.default.object({eventKey:u.default.string()}),timestamp:u.default.string().optional()}),u.default.object({type:u.default.literal(`error`),message:u.default.string().optional()}),u.default.object({type:u.default.literal(`pong`),timestamp:u.default.string().optional()}),u.default.object({type:u.default.literal(`ping`),timestamp:u.default.string().optional()}),u.default.object({type:u.default.literal(`connection`),message:u.default.string().optional(),uploadId:u.default.string().optional(),timestamp:u.default.string().optional()})]);Object.defineProperty(exports,`A`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`C`,{enumerable:!0,get:function(){return A}}),Object.defineProperty(exports,`D`,{enumerable:!0,get:function(){return w}}),Object.defineProperty(exports,`E`,{enumerable:!0,get:function(){return S}}),Object.defineProperty(exports,`F`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`I`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`L`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`M`,{enumerable:!0,get:function(){return g}}),Object.defineProperty(exports,`N`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`O`,{enumerable:!0,get:function(){return h}}),Object.defineProperty(exports,`P`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`R`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`S`,{enumerable:!0,get:function(){return O}}),Object.defineProperty(exports,`T`,{enumerable:!0,get:function(){return C}}),Object.defineProperty(exports,`_`,{enumerable:!0,get:function(){return j}}),Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return W}}),Object.defineProperty(exports,`b`,{enumerable:!0,get:function(){return T}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return V}}),Object.defineProperty(exports,`d`,{enumerable:!0,get:function(){return L}}),Object.defineProperty(exports,`f`,{enumerable:!0,get:function(){return M}}),Object.defineProperty(exports,`g`,{enumerable:!0,get:function(){return I}}),Object.defineProperty(exports,`h`,{enumerable:!0,get:function(){return R}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return U}}),Object.defineProperty(exports,`j`,{enumerable:!0,get:function(){return p}}),Object.defineProperty(exports,`k`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`l`,{enumerable:!0,get:function(){return B}}),Object.defineProperty(exports,`m`,{enumerable:!0,get:function(){return N}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return G}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return H}}),Object.defineProperty(exports,`p`,{enumerable:!0,get:function(){return F}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return q}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return z}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return J}}),Object.defineProperty(exports,`u`,{enumerable:!0,get:function(){return P}}),Object.defineProperty(exports,`v`,{enumerable:!0,get:function(){return D}}),Object.defineProperty(exports,`w`,{enumerable:!0,get:function(){return k}}),Object.defineProperty(exports,`x`,{enumerable:!0,get:function(){return E}}),Object.defineProperty(exports,`y`,{enumerable:!0,get:function(){return 5242880}}),Object.defineProperty(exports,`z`,{enumerable:!0,get:function(){return s}});
@@ -1,2 +0,0 @@
1
- import{n as e}from"./uploadista-error-Fsfvr2Bb.mjs";import{Context as t,Effect as n,Layer as r}from"effect";import i,{z as a}from"zod";var o=class extends t.Tag(`CircuitBreakerStoreService`)(){};function s(e){let t=Date.now();return{state:`closed`,failureCount:0,lastStateChange:t,halfOpenSuccesses:0,windowStart:t,config:e}}var c=class{constructor(e,t,n,r){this.baseStore=e,this.keyPrefix=t,this.serialize=n,this.deserialize=r}get=t=>this.baseStore.get(this.keyPrefix+t).pipe(n.flatMap(r=>{if(r===null)return n.fail(e.fromCode(`FILE_NOT_FOUND`,{cause:`Key "${t}" not found`}));try{return n.succeed(this.deserialize(r))}catch(r){return n.fail(new e({code:`VALIDATION_ERROR`,status:400,body:`Failed to deserialize value for key "${t}"`,cause:r}))}}));set=(t,r)=>{try{let e=this.serialize(r);return this.baseStore.set(this.keyPrefix+t,e)}catch(r){return n.fail(new e({code:`VALIDATION_ERROR`,status:400,body:`Failed to serialize value for key "${t}"`,cause:r}))}};delete=e=>this.baseStore.delete(this.keyPrefix+e);list=()=>this.baseStore.list?this.baseStore.list(this.keyPrefix).pipe(n.map(e=>e.map(e=>e.startsWith(this.keyPrefix)?e.slice(this.keyPrefix.length):e))):n.fail(new e({code:`UNKNOWN_ERROR`,status:501,body:`List operation not supported by this store`}))};const l={serialize:e=>JSON.stringify(e),deserialize:e=>JSON.parse(e)};var u=class extends t.Tag(`BaseKvStore`)(){},d=class extends t.Tag(`UploadFileKVStore`)(){};const f=r.effect(d,n.gen(function*(){return new c(yield*u,`uploadista:upload-file:`,l.serialize,l.deserialize)}));var p=class extends t.Tag(`FlowJobKVStore`)(){};const m=r.effect(p,n.gen(function*(){return new c(yield*u,`uploadista:flow-job:`,l.serialize,l.deserialize)}));var h=class extends t.Tag(`DeadLetterQueueKVStore`)(){};const g=r.effect(h,n.gen(function*(){return new c(yield*u,`uploadista:dlq:`,l.serialize,l.deserialize)})),_=a.object({traceId:a.string(),spanId:a.string(),traceFlags:a.number()}),v=a.lazy(()=>a.union([a.string(),a.number(),a.boolean(),a.null(),a.array(v),a.record(a.string(),v)])),y=a.object({id:a.string(),size:a.number().optional(),offset:a.number(),metadata:a.record(a.string(),v).optional(),creationDate:a.string().optional(),url:a.string().optional(),sizeIsDeferred:a.boolean().optional(),checksum:a.string().optional(),checksumAlgorithm:a.string().optional(),storage:a.object({id:a.string(),type:a.string(),path:a.string().optional(),uploadId:a.string().optional(),bucket:a.string().optional(),parts:a.array(a.object({partNumber:a.number(),etag:a.string(),size:a.number()})).optional()}),flow:a.object({flowId:a.string(),nodeId:a.string(),jobId:a.string()}).optional(),traceContext:_.optional()}),b={fileSizeThreshold:1048576,chunkSize:65536},x=5*1024*1024;var S=class extends t.Tag(`UploadFileDataStore`)(){},C=class extends t.Tag(`BufferedUploadFileDataStore`)(){},w=class extends t.Tag(`UploadFileDataStores`)(){};const T=e=>`create`in e&&`write`in e,E=async t=>{if(r.isLayer(t))return t;if(n.isEffect(t))return r.effect(w,n.gen(function*(){let e=yield*t;return{getDataStore:t=>n.succeed(e),bufferedDataStore:n.succeed(void 0)}}));if(T(t)){let e=t;return r.succeed(w,{getDataStore:t=>n.succeed(e),bufferedDataStore:n.succeed(void 0)})}let i=t,a=i.default||Object.keys(i.stores)[0],o={};for(let[e,t]of Object.entries(i.stores))`pipe`in t&&!(`create`in t)?o[e]=await n.runPromise(t):o[e]=t;return r.succeed(w,{getDataStore:t=>{let r=o[t]||(a?o[a]:void 0);return r?n.succeed(r):n.fail(e.fromCode(`FILE_NOT_FOUND`))},bufferedDataStore:n.succeed(void 0)})};var D=class extends t.Tag(`EventBroadcaster`)(){},O=class{constructor(e,t){this.baseEmitter=e,this.eventToMessage=t}subscribe=(e,t)=>this.baseEmitter.subscribe(e,t);unsubscribe=e=>this.baseEmitter.unsubscribe(e);emit=(e,t)=>{let n=this.eventToMessage(t);return this.baseEmitter.emit(e,n)}};const k=e=>({eventToMessage:t=>JSON.stringify({type:e,payload:t,timestamp:new Date().toISOString()})});var A=class extends t.Tag(`BaseEventEmitter`)(){},j=class extends t.Tag(`UploadEventEmitter`)(){};const M=r.effect(j,n.gen(function*(){return new O(yield*A,k(`upload_event`).eventToMessage)}));var N=class extends t.Tag(`FlowEventEmitter`)(){};const P=r.effect(N,n.gen(function*(){return new O(yield*A,k(`flow_event`).eventToMessage)})),F={timeout:5e3,checkStorage:!0,checkKvStore:!0,checkEventBroadcaster:!0};function I(e){return e?.includes(`text/plain`)?`text`:`json`}function L(e){switch(e){case`healthy`:return`OK`;case`degraded`:return`OK`;case`unhealthy`:return`Service Unavailable`}}const R=a.object({uploadLengthDeferred:a.boolean().optional(),storageId:a.string(),size:a.number().optional(),sizeHint:a.number().optional(),type:a.string(),fileName:a.string().optional(),lastModified:a.number().optional(),metadata:a.string().optional(),checksum:a.string().optional(),checksumAlgorithm:a.string().optional(),flow:a.object({flowId:a.string(),nodeId:a.string(),jobId:a.string()}).optional()}).refine(e=>e.uploadLengthDeferred===!0?!0:e.size!==void 0&&e.size>=0,{message:`size is required when uploadLengthDeferred is not true`,path:[`size`]});var z=class extends t.Tag(`MiddlewareService`)(){};const B=r.succeed(z,z.of({execute:(e,t,r)=>n.gen(function*(){if(e.length===0)return yield*n.tryPromise({try:()=>r(),catch:e=>e});let i=e.reduceRight((e,n)=>()=>n(t,e),r);return yield*n.tryPromise({try:()=>i(),catch:e=>e})})}));let V=function(e){return e.UPLOAD_STARTED=`upload-started`,e.UPLOAD_PROGRESS=`upload-progress`,e.UPLOAD_COMPLETE=`upload-complete`,e.UPLOAD_FAILED=`upload-failed`,e.UPLOAD_VALIDATION_SUCCESS=`upload-validation-success`,e.UPLOAD_VALIDATION_FAILED=`upload-validation-failed`,e.UPLOAD_VALIDATION_WARNING=`upload-validation-warning`,e}({});const H=a.object({flowId:a.string(),nodeId:a.string(),jobId:a.string()}).optional(),U=a.union([a.object({type:a.union([a.literal(V.UPLOAD_STARTED),a.literal(V.UPLOAD_COMPLETE)]),data:y,flow:H}),a.object({type:a.literal(V.UPLOAD_PROGRESS),data:a.object({id:a.string(),progress:a.number(),total:a.number()}),flow:H}),a.object({type:a.literal(V.UPLOAD_FAILED),data:a.object({id:a.string(),error:a.string()}),flow:H}),a.object({type:a.literal(V.UPLOAD_VALIDATION_SUCCESS),data:a.object({id:a.string(),validationType:a.enum([`checksum`,`mimetype`]),algorithm:a.string().optional()}),flow:H}),a.object({type:a.literal(V.UPLOAD_VALIDATION_FAILED),data:a.object({id:a.string(),reason:a.string(),expected:a.string(),actual:a.string()}),flow:H}),a.object({type:a.literal(V.UPLOAD_VALIDATION_WARNING),data:a.object({id:a.string(),message:a.string()}),flow:H})]),W=i.union([i.object({type:i.literal(`upload_event`),payload:U,timestamp:i.string().optional()}),i.object({type:i.literal(`flow_event`),payload:i.any(),timestamp:i.string().optional()}),i.object({type:i.literal(`subscribed`),payload:i.object({eventKey:i.string()}),timestamp:i.string().optional()}),i.object({type:i.literal(`error`),message:i.string().optional()}),i.object({type:i.literal(`pong`),timestamp:i.string().optional()}),i.object({type:i.literal(`ping`),timestamp:i.string().optional()}),i.object({type:i.literal(`connection`),message:i.string().optional(),uploadId:i.string().optional(),timestamp:i.string().optional()})]);export{p as A,E as C,y as D,_ as E,l as F,f as I,o as L,d as M,g as N,u as O,m as P,s as R,w as S,v as T,D as _,B as a,b,L as c,N as d,O as f,M as g,P as h,z as i,c as j,h as k,I as l,k as m,V as n,R as o,j as p,U as r,F as s,W as t,A as u,C as v,T as w,S as x,x as y};
2
- //# sourceMappingURL=websocket-CdgVhVJs.mjs.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"websocket-CdgVhVJs.mjs","names":["z"],"sources":["../src/types/circuit-breaker-store.ts","../src/types/kv-store.ts","../src/types/upload-file.ts","../src/types/data-store.ts","../src/types/event-broadcaster.ts","../src/types/event-emitter.ts","../src/types/health-check.ts","../src/types/input-file.ts","../src/types/middleware.ts","../src/types/upload-event.ts","../src/types/websocket.ts"],"sourcesContent":["/**\n * Circuit Breaker Store - Distributed state storage for circuit breakers.\n *\n * This module defines the interface for storing circuit breaker state in\n * distributed environments. It allows circuit breaker state to be shared\n * across multiple instances in a cluster.\n *\n * @module types/circuit-breaker-store\n */\n\nimport { Context, type Effect, Layer } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\n\n// ============================================================================\n// State Types\n// ============================================================================\n\n/**\n * Circuit breaker state values.\n */\nexport type CircuitBreakerStateValue = \"closed\" | \"open\" | \"half-open\";\n\n/**\n * Persisted circuit breaker state data.\n *\n * This represents the full state of a circuit breaker that needs to be\n * stored and shared across instances.\n */\nexport interface CircuitBreakerStateData {\n /** Current circuit state */\n state: CircuitBreakerStateValue;\n /** Number of failures in current window */\n failureCount: number;\n /** Timestamp of last state transition */\n lastStateChange: number;\n /** Number of successful requests in half-open state */\n halfOpenSuccesses: number;\n /** Timestamp when the current failure window started */\n windowStart: number;\n /** Configuration snapshot for consistency */\n config: {\n failureThreshold: number;\n resetTimeout: number;\n halfOpenRequests: number;\n windowDuration: number;\n };\n}\n\n/**\n * Statistics about a circuit breaker.\n */\nexport interface CircuitBreakerStats {\n nodeType: string;\n state: CircuitBreakerStateValue;\n failureCount: number;\n halfOpenSuccesses: number;\n timeSinceLastStateChange: number;\n timeUntilHalfOpen?: number; // Only when state is \"open\"\n}\n\n// ============================================================================\n// Store Interface\n// ============================================================================\n\n/**\n * Interface for circuit breaker state storage.\n *\n * Implementations should handle distributed state for circuit breakers,\n * allowing multiple instances to share circuit state. The interface is\n * designed to work with eventually consistent stores - perfect consistency\n * is not required for circuit breaker functionality.\n *\n * @example\n * ```typescript\n * // Using the store\n * const store: CircuitBreakerStore = yield* CircuitBreakerStoreService;\n *\n * // Record a failure\n * const newCount = yield* store.incrementFailures(\"describe-image\", 60000);\n * if (newCount >= 5) {\n * yield* store.setState(\"describe-image\", {\n * state: \"open\",\n * failureCount: newCount,\n * lastStateChange: Date.now(),\n * // ...\n * });\n * }\n * ```\n */\nexport interface CircuitBreakerStore {\n /**\n * Gets the current state data for a circuit breaker.\n *\n * @param nodeType - The node type identifier\n * @returns The state data or null if no state exists\n */\n readonly getState: (\n nodeType: string,\n ) => Effect.Effect<CircuitBreakerStateData | null, UploadistaError>;\n\n /**\n * Sets the complete state for a circuit breaker.\n *\n * @param nodeType - The node type identifier\n * @param state - The new state data\n */\n readonly setState: (\n nodeType: string,\n state: CircuitBreakerStateData,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Increments the failure count and returns the new count.\n *\n * This operation should be atomic where possible. For stores that don't\n * support atomic increment, a read-modify-write is acceptable as circuit\n * breakers tolerate eventual consistency.\n *\n * The implementation should also handle window expiry - if the window\n * has expired, reset the count before incrementing.\n *\n * @param nodeType - The node type identifier\n * @param windowDuration - Duration of the sliding window in milliseconds\n * @returns The new failure count after incrementing\n */\n readonly incrementFailures: (\n nodeType: string,\n windowDuration: number,\n ) => Effect.Effect<number, UploadistaError>;\n\n /**\n * Resets the failure count to zero.\n *\n * Called when circuit closes or on successful requests.\n *\n * @param nodeType - The node type identifier\n */\n readonly resetFailures: (\n nodeType: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Increments the half-open success count.\n *\n * @param nodeType - The node type identifier\n * @returns The new half-open success count\n */\n readonly incrementHalfOpenSuccesses: (\n nodeType: string,\n ) => Effect.Effect<number, UploadistaError>;\n\n /**\n * Gets statistics for all tracked circuit breakers.\n *\n * @returns Map of node type to stats\n */\n readonly getAllStats: () => Effect.Effect<\n Map<string, CircuitBreakerStats>,\n UploadistaError\n >;\n\n /**\n * Deletes circuit breaker state for a node type.\n *\n * @param nodeType - The node type identifier\n */\n readonly delete: (nodeType: string) => Effect.Effect<void, UploadistaError>;\n}\n\n// ============================================================================\n// Effect Context\n// ============================================================================\n\n/**\n * Effect-TS context tag for the CircuitBreakerStore service.\n *\n * Use this to inject a circuit breaker store into your Effect programs.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const cbStore = yield* CircuitBreakerStoreService;\n * const state = yield* cbStore.getState(\"my-node-type\");\n * // ...\n * });\n *\n * // Provide the implementation\n * const result = yield* program.pipe(\n * Effect.provide(kvCircuitBreakerStoreLayer)\n * );\n * ```\n */\nexport class CircuitBreakerStoreService extends Context.Tag(\n \"CircuitBreakerStoreService\",\n)<CircuitBreakerStoreService, CircuitBreakerStore>() {}\n\n// ============================================================================\n// Default State Factory\n// ============================================================================\n\n/**\n * Creates a default initial state for a circuit breaker.\n *\n * @param config - Circuit breaker configuration\n * @returns Initial state data with closed circuit\n */\nexport function createInitialCircuitBreakerState(config: {\n failureThreshold: number;\n resetTimeout: number;\n halfOpenRequests: number;\n windowDuration: number;\n}): CircuitBreakerStateData {\n const now = Date.now();\n return {\n state: \"closed\",\n failureCount: 0,\n lastStateChange: now,\n halfOpenSuccesses: 0,\n windowStart: now,\n config,\n };\n}\n","import { Context, Effect, Layer } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport type { DeadLetterItem, FlowJob } from \"../flow\";\nimport type { UploadFile } from \"./upload-file\";\n\n/**\n * Base key-value store interface for raw string storage.\n *\n * This is the low-level interface that storage adapters implement.\n * It stores raw string values without type safety or serialization.\n *\n * @property get - Retrieves a value by key, returns null if not found\n * @property set - Stores a value with the given key\n * @property delete - Removes a value by key\n * @property list - Optional operation to list all keys with a given prefix\n *\n * @example\n * ```typescript\n * // Implement a BaseKvStore with Redis\n * const redisKvStore: BaseKvStore = {\n * get: (key) => Effect.tryPromise({\n * try: () => redis.get(key),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * set: (key, value) => Effect.tryPromise({\n * try: () => redis.set(key, value),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * delete: (key) => Effect.tryPromise({\n * try: () => redis.del(key),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * }),\n *\n * list: (prefix) => Effect.tryPromise({\n * try: () => redis.keys(`${prefix}*`),\n * catch: (error) => UploadistaError.fromCode(\"UNKNOWN_ERROR\", { cause: error })\n * })\n * };\n * ```\n */\nexport interface BaseKvStore {\n readonly get: (key: string) => Effect.Effect<string | null, UploadistaError>;\n readonly set: (\n key: string,\n value: string,\n ) => Effect.Effect<void, UploadistaError>;\n readonly delete: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly list?: (\n keyPrefix: string,\n ) => Effect.Effect<Array<string>, UploadistaError>;\n}\n\n/**\n * Type-safe key-value store interface with automatic serialization.\n *\n * This wraps a BaseKvStore and handles JSON serialization/deserialization\n * for a specific data type, providing type safety and eliminating the need\n * for manual JSON.stringify/parse calls.\n *\n * @template TData - The type of data stored in this KV store\n *\n * @property get - Retrieves and deserializes a value, fails if not found\n * @property set - Serializes and stores a value\n * @property delete - Removes a value by key\n * @property list - Optional operation to list all keys (without prefix)\n *\n * @example\n * ```typescript\n * // Use a typed KV store\n * const uploadStore: KvStore<UploadFile> = new TypedKvStore(\n * baseStore,\n * \"uploads:\",\n * jsonSerializer.serialize,\n * jsonSerializer.deserialize\n * );\n *\n * // Store and retrieve typed data\n * const program = Effect.gen(function* () {\n * const file: UploadFile = {\n * id: \"file123\",\n * offset: 0,\n * storage: { id: \"s3\", type: \"s3\" }\n * };\n *\n * // Automatic serialization\n * yield* uploadStore.set(\"file123\", file);\n *\n * // Automatic deserialization with type safety\n * const retrieved = yield* uploadStore.get(\"file123\");\n * console.log(retrieved.offset); // TypeScript knows this is a number\n * });\n * ```\n */\nexport type KvStore<TData> = {\n readonly get: (key: string) => Effect.Effect<TData, UploadistaError>;\n readonly set: (\n key: string,\n value: TData,\n ) => Effect.Effect<void, UploadistaError>;\n readonly delete: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly list?: () => Effect.Effect<Array<string>, UploadistaError>;\n};\n\n/**\n * Typed wrapper class that adds serialization to a BaseKvStore.\n *\n * This class implements the KvStore interface by wrapping a BaseKvStore\n * and handling serialization/deserialization for a specific type. It also\n * adds a key prefix to isolate different data types in the same store.\n *\n * @template TData - The type of data to store\n *\n * @example\n * ```typescript\n * // Create a typed store for UploadFile\n * const uploadFileStore = new TypedKvStore<UploadFile>(\n * baseKvStore,\n * \"uploadista:upload-file:\", // All keys will be prefixed\n * (data) => JSON.stringify(data),\n * (str) => JSON.parse(str) as UploadFile\n * );\n *\n * // Use the store\n * const effect = Effect.gen(function* () {\n * const file: UploadFile = { ... };\n * yield* uploadFileStore.set(\"abc123\", file);\n * // Internally stores at key \"uploadista:upload-file:abc123\"\n *\n * const retrieved = yield* uploadFileStore.get(\"abc123\");\n * return retrieved;\n * });\n *\n * // Custom serialization for binary data\n * const binaryStore = new TypedKvStore<Uint8Array>(\n * baseKvStore,\n * \"binary:\",\n * (data) => btoa(String.fromCharCode(...data)), // Base64 encode\n * (str) => Uint8Array.from(atob(str), c => c.charCodeAt(0)) // Base64 decode\n * );\n * ```\n */\nexport class TypedKvStore<TData> implements KvStore<TData> {\n constructor(\n private baseStore: BaseKvStore,\n private keyPrefix: string,\n private serialize: (data: TData) => string,\n private deserialize: (str: string) => TData,\n ) {}\n\n get = (key: string): Effect.Effect<TData, UploadistaError> =>\n this.baseStore.get(this.keyPrefix + key).pipe(\n Effect.flatMap((value) => {\n if (value === null) {\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_NOT_FOUND\", {\n cause: `Key \"${key}\" not found`,\n }),\n );\n }\n try {\n return Effect.succeed(this.deserialize(value));\n } catch (error) {\n return Effect.fail(\n new UploadistaError({\n code: \"VALIDATION_ERROR\",\n status: 400,\n body: `Failed to deserialize value for key \"${key}\"`,\n cause: error,\n }),\n );\n }\n }),\n );\n\n set = (key: string, value: TData): Effect.Effect<void, UploadistaError> => {\n try {\n const serialized = this.serialize(value);\n return this.baseStore.set(this.keyPrefix + key, serialized);\n } catch (error) {\n return Effect.fail(\n new UploadistaError({\n code: \"VALIDATION_ERROR\",\n status: 400,\n body: `Failed to serialize value for key \"${key}\"`,\n cause: error,\n }),\n );\n }\n };\n\n delete = (key: string): Effect.Effect<void, UploadistaError> =>\n this.baseStore.delete(this.keyPrefix + key);\n\n list = (): Effect.Effect<Array<string>, UploadistaError> => {\n if (this.baseStore.list) {\n // Get keys with prefix and strip the prefix for use with get/set/delete\n return this.baseStore\n .list(this.keyPrefix)\n .pipe(\n Effect.map((keys) =>\n keys.map((key) =>\n key.startsWith(this.keyPrefix)\n ? key.slice(this.keyPrefix.length)\n : key,\n ),\n ),\n );\n }\n return Effect.fail(\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 501,\n body: \"List operation not supported by this store\",\n }),\n );\n };\n}\n\n/**\n * Default JSON serialization helpers.\n *\n * These functions provide standard JSON serialization for use with TypedKvStore.\n * They work with any JSON-serializable type.\n *\n * @example\n * ```typescript\n * const store = new TypedKvStore<MyType>(\n * baseStore,\n * \"mydata:\",\n * jsonSerializer.serialize,\n * jsonSerializer.deserialize\n * );\n * ```\n */\nexport const jsonSerializer = {\n serialize: <T>(data: T): string => JSON.stringify(data),\n deserialize: <T>(str: string): T => JSON.parse(str),\n};\n\n/**\n * Effect-TS context tag for the base untyped KV store.\n *\n * This is the low-level store that storage adapter implementations provide.\n * Most application code should use typed stores like UploadFileKVStore instead.\n *\n * @example\n * ```typescript\n * // Provide a base store implementation\n * const baseStoreLayer = Layer.succeed(BaseKvStoreService, redisKvStore);\n *\n * // Use in an Effect\n * const effect = Effect.gen(function* () {\n * const baseStore = yield* BaseKvStoreService;\n * yield* baseStore.set(\"raw-key\", \"raw-value\");\n * });\n * ```\n */\nexport class BaseKvStoreService extends Context.Tag(\"BaseKvStore\")<\n BaseKvStoreService,\n BaseKvStore\n>() {}\n\n/**\n * Effect-TS context tag for the UploadFile typed KV store.\n *\n * This provides type-safe storage for UploadFile metadata. It's the primary\n * way to store and retrieve upload metadata in the system.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n *\n * // Store upload metadata\n * const file: UploadFile = {\n * id: \"upload123\",\n * offset: 0,\n * storage: { id: \"s3\", type: \"s3\" }\n * };\n * yield* kvStore.set(\"upload123\", file);\n *\n * // Retrieve with type safety\n * const retrieved = yield* kvStore.get(\"upload123\");\n * return retrieved;\n * });\n * ```\n */\nexport class UploadFileKVStore extends Context.Tag(\"UploadFileKVStore\")<\n UploadFileKVStore,\n KvStore<UploadFile>\n>() {}\n\n/**\n * Effect Layer that creates the UploadFileKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for UploadFile objects\n * with the \"uploadista:upload-file:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const uploadFileKvStore = Layer.effect(\n UploadFileKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<UploadFile>(\n baseStore,\n \"uploadista:upload-file:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the FlowJob typed KV store.\n *\n * This provides type-safe storage for FlowJob metadata, tracking the\n * execution state of flow processing jobs.\n *\n * @example\n * ```typescript\n * const flowEffect = Effect.gen(function* () {\n * const jobStore = yield* FlowJobKVStore;\n *\n * // Store job state\n * const job: FlowJob = {\n * id: \"job123\",\n * flowId: \"flow_resize\",\n * status: \"running\",\n * tasks: [],\n * createdAt: new Date(),\n * updatedAt: new Date()\n * };\n * yield* jobStore.set(\"job123\", job);\n *\n * // Retrieve and check status\n * const retrieved = yield* jobStore.get(\"job123\");\n * return retrieved.status;\n * });\n * ```\n */\nexport class FlowJobKVStore extends Context.Tag(\"FlowJobKVStore\")<\n FlowJobKVStore,\n KvStore<FlowJob>\n>() {}\n\n/**\n * Effect Layer that creates the FlowJobKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for FlowJob objects\n * with the \"uploadista:flow-job:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const jobStore = yield* FlowJobKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(flowJobKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const flowJobKvStore = Layer.effect(\n FlowJobKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<FlowJob>(\n baseStore,\n \"uploadista:flow-job:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the Dead Letter Queue typed KV store.\n *\n * This provides type-safe storage for DeadLetterItem objects, tracking\n * failed flow jobs for retry, debugging, and manual intervention.\n *\n * @example\n * ```typescript\n * const dlqEffect = Effect.gen(function* () {\n * const dlqStore = yield* DeadLetterQueueKVStore;\n *\n * // Store a DLQ item\n * const item: DeadLetterItem = {\n * id: \"dlq_123\",\n * jobId: \"job_456\",\n * flowId: \"image-pipeline\",\n * storageId: \"s3\",\n * clientId: \"client_789\",\n * error: { code: \"FLOW_NODE_ERROR\", message: \"Timeout\" },\n * inputs: { input: { uploadId: \"upload_abc\" } },\n * nodeResults: {},\n * retryCount: 0,\n * maxRetries: 3,\n * retryHistory: [],\n * createdAt: new Date(),\n * updatedAt: new Date(),\n * status: \"pending\"\n * };\n * yield* dlqStore.set(\"dlq_123\", item);\n *\n * // Retrieve with type safety\n * const retrieved = yield* dlqStore.get(\"dlq_123\");\n * return retrieved.status;\n * });\n * ```\n */\nexport class DeadLetterQueueKVStore extends Context.Tag(\n \"DeadLetterQueueKVStore\",\n)<DeadLetterQueueKVStore, KvStore<DeadLetterItem>>() {}\n\n/**\n * Effect Layer that creates the DeadLetterQueueKVStore from a BaseKvStore.\n *\n * This layer automatically wires up JSON serialization for DeadLetterItem objects\n * with the \"uploadista:dlq:\" key prefix.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const dlqStore = yield* DeadLetterQueueKVStore;\n * // Use the store...\n * }).pipe(\n * Effect.provide(deadLetterQueueKvStore),\n * Effect.provide(baseStoreLayer)\n * );\n * ```\n */\nexport const deadLetterQueueKvStore = Layer.effect(\n DeadLetterQueueKVStore,\n Effect.gen(function* () {\n const baseStore = yield* BaseKvStoreService;\n return new TypedKvStore<DeadLetterItem>(\n baseStore,\n \"uploadista:dlq:\",\n jsonSerializer.serialize,\n jsonSerializer.deserialize,\n );\n }),\n);\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating UploadFile objects.\n *\n * This schema defines the structure and validation rules for upload file metadata.\n * Use this schema to parse and validate UploadFile data from external sources.\n *\n * @see {@link UploadFile} for the TypeScript type\n */\n/**\n * Zod schema for trace context used in distributed tracing.\n */\nexport const traceContextSchema = z.object({\n traceId: z.string(),\n spanId: z.string(),\n traceFlags: z.number(),\n});\n\n/**\n * JSON value type that allows any JSON-serializable data.\n * Used for metadata values which can be primitives, arrays, or nested objects.\n */\nexport type JsonValue =\n | string\n | number\n | boolean\n | null\n | JsonValue[]\n | { [key: string]: JsonValue };\n\n/**\n * JSON value schema that allows any JSON-serializable data.\n * This is used for metadata values which can be primitives, arrays, or objects.\n */\nexport const jsonValueSchema: z.ZodType<JsonValue> = z.lazy(() =>\n z.union([\n z.string(),\n z.number(),\n z.boolean(),\n z.null(),\n z.array(jsonValueSchema),\n z.record(z.string(), jsonValueSchema),\n ]),\n);\n\nexport const uploadFileSchema = z.object({\n id: z.string(),\n size: z.number().optional(),\n offset: z.number(),\n metadata: z.record(z.string(), jsonValueSchema).optional(),\n creationDate: z.string().optional(),\n url: z.string().optional(),\n sizeIsDeferred: z.boolean().optional(),\n checksum: z.string().optional(),\n checksumAlgorithm: z.string().optional(),\n storage: z.object({\n id: z.string(),\n type: z.string(),\n path: z.string().optional(),\n uploadId: z.string().optional(),\n bucket: z.string().optional(),\n parts: z\n .array(\n z.object({\n partNumber: z.number(),\n etag: z.string(),\n size: z.number(),\n }),\n )\n .optional(),\n }),\n flow: z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional(),\n traceContext: traceContextSchema.optional(),\n});\n\n/**\n * Represents an uploaded file with its metadata and storage information.\n *\n * This is the core data structure that tracks file uploads throughout their lifecycle.\n * It contains all metadata needed to resume uploads, track progress, and locate files\n * in storage backends.\n *\n * @property id - Unique identifier for this upload\n * @property offset - Current byte offset (how many bytes have been uploaded)\n * @property storage - Storage backend information\n * @property storage.id - Storage backend identifier (e.g., \"s3-production\")\n * @property storage.type - Storage backend type (e.g., \"s3\", \"azure\", \"gcs\")\n * @property storage.path - Optional path prefix within the storage backend\n * @property storage.uploadId - Optional backend-specific upload ID (e.g., S3 multipart upload ID)\n * @property storage.bucket - Optional bucket or container name\n * @property storage.parts - Optional array of uploaded parts (used by data stores that need to track parts locally, like R2)\n * @property flow - Optional flow processing information (when file is part of a flow)\n * @property flow.flowId - ID of the flow processing this file\n * @property flow.nodeId - ID of the flow node that created this file\n * @property flow.jobId - ID of the flow job execution\n * @property size - Total file size in bytes (undefined if deferred)\n * @property metadata - Custom key-value metadata attached to the file\n * @property creationDate - ISO 8601 timestamp when upload was created\n * @property url - Optional public URL to access the file\n * @property sizeIsDeferred - True if file size is not known at upload start\n * @property checksum - Optional file checksum/hash value\n * @property checksumAlgorithm - Algorithm used for checksum (e.g., \"md5\", \"sha256\")\n *\n * @example\n * ```typescript\n * // Create an UploadFile for a new upload\n * const uploadFile: UploadFile = {\n * id: \"upload_abc123\",\n * offset: 0,\n * size: 1024000,\n * storage: {\n * id: \"s3-production\",\n * type: \"s3\",\n * bucket: \"my-uploads\",\n * path: \"files/\"\n * },\n * metadata: {\n * fileName: \"image.jpg\",\n * contentType: \"image/jpeg\",\n * userId: \"user_123\"\n * },\n * creationDate: new Date().toISOString(),\n * checksum: \"5d41402abc4b2a76b9719d911017c592\",\n * checksumAlgorithm: \"md5\"\n * };\n *\n * // UploadFile with flow processing\n * const flowFile: UploadFile = {\n * id: \"upload_xyz789\",\n * offset: 0,\n * size: 2048000,\n * storage: {\n * id: \"s3-temp\",\n * type: \"s3\",\n * bucket: \"temp-processing\"\n * },\n * flow: {\n * flowId: \"flow_resize_optimize\",\n * nodeId: \"input_1\",\n * jobId: \"job_456\"\n * }\n * };\n *\n * // Resume an interrupted upload\n * const resumingFile: UploadFile = {\n * id: \"upload_resume\",\n * offset: 524288, // Already uploaded 512KB\n * size: 1024000,\n * storage: {\n * id: \"s3-production\",\n * type: \"s3\",\n * uploadId: \"multipart_xyz\" // S3 multipart upload ID\n * }\n * };\n * ```\n */\n/**\n * Trace context for distributed tracing.\n * Allows upload operations to be linked under a single trace.\n */\nexport type UploadFileTraceContext = {\n /** 128-bit trace identifier (32 hex characters) */\n traceId: string;\n /** 64-bit span identifier (16 hex characters) */\n spanId: string;\n /** Trace flags (1 = sampled) */\n traceFlags: number;\n};\n\nexport type UploadFile = {\n id: string;\n offset: number;\n storage: {\n id: string;\n type: string;\n path?: string | undefined;\n uploadId?: string | undefined;\n bucket?: string | undefined;\n parts?:\n | Array<{\n partNumber: number;\n etag: string;\n size: number;\n }>\n | undefined;\n };\n flow?: {\n flowId: string;\n nodeId: string;\n jobId: string;\n };\n size?: number | undefined;\n metadata?: Record<string, JsonValue> | undefined;\n creationDate?: string | undefined;\n url?: string | undefined;\n sizeIsDeferred?: boolean | undefined;\n checksum?: string | undefined;\n checksumAlgorithm?: string | undefined;\n /**\n * OpenTelemetry trace context for distributed tracing.\n * When set, subsequent upload operations (chunks, validation) will be\n * linked as children of this trace context.\n */\n traceContext?: UploadFileTraceContext | undefined;\n};\n","import { Context, Effect, Layer, type Stream } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport type { UploadFileKVStore } from \"./kv-store\";\nimport type { UploadFile } from \"./upload-file\";\n\n/**\n * Options for writing data to a DataStore.\n *\n * @property file_id - Unique identifier for the file being written\n * @property stream - Stream of byte chunks to write to storage\n * @property offset - Byte offset where writing should begin (for resumable uploads)\n */\nexport type DataStoreWriteOptions = {\n file_id: string;\n stream: Stream.Stream<Uint8Array, UploadistaError>;\n offset: number;\n};\n\n/**\n * Upload strategy type indicating how chunks are uploaded.\n *\n * - `single`: Upload file in a single request (traditional upload)\n * - `parallel`: Upload file chunks in parallel (for large files)\n */\nexport type UploadStrategy = \"single\" | \"parallel\";\n\n/**\n * Configuration options for streaming file reads.\n *\n * Used to control streaming behavior in transform nodes and data stores.\n *\n * @property fileSizeThreshold - Files below this size use buffered mode (default: 1MB)\n * @property chunkSize - Chunk size for streaming reads in bytes (default: 64KB)\n *\n * @example\n * ```typescript\n * const config: StreamingConfig = {\n * fileSizeThreshold: 1_048_576, // 1MB - use buffered for smaller files\n * chunkSize: 65_536, // 64KB chunks\n * };\n * ```\n */\nexport type StreamingConfig = {\n /** Files below this size use buffered mode (default: 1MB = 1_048_576 bytes) */\n fileSizeThreshold?: number;\n /** Chunk size for streaming reads in bytes (default: 64KB = 65_536 bytes) */\n chunkSize?: number;\n};\n\n/**\n * Default streaming configuration values.\n */\nexport const DEFAULT_STREAMING_CONFIG: Required<StreamingConfig> = {\n fileSizeThreshold: 1_048_576, // 1MB\n chunkSize: 65_536, // 64KB\n};\n\n/**\n * Default multipart part size for S3/R2 streaming writes.\n * S3 requires minimum 5MB parts (except for the last part).\n */\nexport const DEFAULT_MULTIPART_PART_SIZE = 5 * 1024 * 1024; // 5MB\n\n/**\n * Options for streaming write operations.\n *\n * Used when writing file content from a stream with unknown final size.\n * The store will finalize the upload when the stream completes.\n *\n * @property stream - Effect Stream of byte chunks to write\n * @property contentType - Optional MIME type for the file\n * @property metadata - Optional metadata to store with the file\n * @property sizeHint - Optional estimated size for optimization (e.g., multipart part sizing)\n *\n * @example\n * ```typescript\n * const options: StreamWriteOptions = {\n * stream: transformedStream,\n * contentType: \"image/webp\",\n * metadata: { originalName: \"photo.jpg\" },\n * sizeHint: 5_000_000, // ~5MB expected\n * };\n * ```\n */\nexport type StreamWriteOptions = {\n stream: Stream.Stream<Uint8Array, UploadistaError>;\n contentType?: string;\n metadata?: Record<string, string>;\n /** Optional size hint for optimization (not required) */\n sizeHint?: number;\n};\n\n/**\n * Result of a streaming write operation.\n *\n * Contains the final size after the stream completes, along with\n * storage location information.\n *\n * @property id - Unique identifier of the written file\n * @property size - Final size in bytes after stream completed\n * @property path - Storage path or key where file was written\n * @property bucket - Optional bucket/container name (for cloud storage)\n *\n * @example\n * ```typescript\n * const result = yield* dataStore.writeStream(fileId, options);\n * console.log(`Wrote ${result.size} bytes to ${result.path}`);\n * ```\n */\nexport type StreamWriteResult = {\n id: string;\n size: number;\n path: string;\n bucket?: string;\n /** Public URL for accessing the uploaded file (if available) */\n url?: string;\n};\n\n/**\n * Capabilities and constraints of a DataStore implementation.\n *\n * This type describes what features a storage backend supports and what\n * limitations it has. Use this to determine the optimal upload strategy\n * and validate client requests.\n *\n * @property supportsParallelUploads - Can upload chunks in parallel (e.g., S3 multipart)\n * @property supportsConcatenation - Can concatenate multiple uploads into one file\n * @property supportsDeferredLength - Can start upload without knowing final size\n * @property supportsResumableUploads - Can resume interrupted uploads from last offset\n * @property supportsTransactionalUploads - Guarantees atomic upload success/failure\n * @property supportsStreamingRead - Can read file content as a stream instead of buffering\n * @property maxConcurrentUploads - Maximum parallel upload parts (if parallel supported)\n * @property minChunkSize - Minimum size in bytes for each chunk (except last)\n * @property maxChunkSize - Maximum size in bytes for each chunk\n * @property maxParts - Maximum number of parts in a multipart upload\n * @property optimalChunkSize - Recommended chunk size for best performance\n * @property requiresOrderedChunks - Must receive chunks in sequential order\n * @property requiresMimeTypeValidation - Validates file MIME type matches declaration\n * @property maxValidationSize - Maximum file size for MIME type validation\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n *\n * if (capabilities.supportsParallelUploads && fileSize > 10_000_000) {\n * // Use parallel upload for large files\n * const chunkSize = capabilities.optimalChunkSize || 5_242_880; // 5MB default\n * uploadInParallel(file, chunkSize);\n * } else {\n * // Use single upload\n * uploadAsSingleChunk(file);\n * }\n *\n * // Check for streaming support\n * if (capabilities.supportsStreamingRead) {\n * // Use streaming for memory-efficient processing\n * const stream = yield* dataStore.readStream(fileId);\n * }\n * ```\n */\nexport type DataStoreCapabilities = {\n supportsParallelUploads: boolean;\n supportsConcatenation: boolean;\n supportsDeferredLength: boolean;\n supportsResumableUploads: boolean;\n supportsTransactionalUploads: boolean;\n /** Whether the store supports streaming reads via readStream() */\n supportsStreamingRead?: boolean;\n /** Whether the store supports streaming writes via writeStream() with unknown final size */\n supportsStreamingWrite?: boolean;\n maxConcurrentUploads?: number;\n minChunkSize?: number;\n maxChunkSize?: number;\n maxParts?: number;\n optimalChunkSize?: number;\n requiresOrderedChunks: boolean;\n requiresMimeTypeValidation?: boolean;\n maxValidationSize?: number;\n};\n\n/**\n * Core interface for all storage backend implementations.\n *\n * DataStore abstracts file storage operations across different backends\n * (S3, Azure Blob, GCS, local filesystem, etc.). All storage adapters\n * must implement this interface.\n *\n * @template TData - The data type stored (typically UploadFile)\n *\n * @property bucket - Optional storage bucket or container name\n * @property path - Optional base path prefix for all stored files\n * @property create - Creates a new file record in storage\n * @property remove - Deletes a file from storage\n * @property read - Reads complete file contents as bytes\n * @property write - Writes data stream to storage at specified offset\n * @property deleteExpired - Optional cleanup of expired files\n * @property getCapabilities - Returns storage backend capabilities\n * @property validateUploadStrategy - Validates if strategy is supported\n *\n * @example\n * ```typescript\n * // Implement a custom DataStore\n * const myDataStore: DataStore<UploadFile> = {\n * bucket: \"my-uploads\",\n * path: \"files/\",\n *\n * create: (file) => Effect.gen(function* () {\n * // Store file metadata\n * yield* saveMetadata(file);\n * return file;\n * }),\n *\n * write: ({ file_id, stream, offset }, { onProgress }) => Effect.gen(function* () {\n * // Write chunks to storage\n * let bytesWritten = offset;\n * yield* Stream.runForEach(stream, (chunk) => Effect.sync(() => {\n * writeChunk(file_id, chunk, bytesWritten);\n * bytesWritten += chunk.byteLength;\n * onProgress?.(chunk.byteLength);\n * }));\n * return bytesWritten;\n * }),\n *\n * read: (file_id) => Effect.gen(function* () {\n * // Read complete file\n * const data = yield* readFromStorage(file_id);\n * return data;\n * }),\n *\n * remove: (file_id) => Effect.gen(function* () {\n * yield* deleteFromStorage(file_id);\n * }),\n *\n * getCapabilities: () => ({\n * supportsParallelUploads: true,\n * supportsConcatenation: false,\n * supportsDeferredLength: true,\n * supportsResumableUploads: true,\n * supportsTransactionalUploads: false,\n * maxConcurrentUploads: 10,\n * optimalChunkSize: 5_242_880, // 5MB\n * requiresOrderedChunks: false,\n * }),\n *\n * validateUploadStrategy: (strategy) =>\n * Effect.succeed(strategy === \"parallel\" || strategy === \"single\"),\n * };\n * ```\n */\nexport type DataStore<TData = unknown> = {\n readonly bucket?: string;\n readonly path?: string;\n readonly create: (file: TData) => Effect.Effect<TData, UploadistaError>;\n readonly remove: (file_id: string) => Effect.Effect<void, UploadistaError>;\n /**\n * Reads the complete file contents as bytes (buffered mode).\n * For large files, consider using readStream() if available.\n */\n readonly read: (\n file_id: string,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Optional - check getCapabilities().supportsStreamingRead before using.\n *\n * @param file_id - The unique identifier of the file to read\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n * if (capabilities.supportsStreamingRead && dataStore.readStream) {\n * const stream = yield* dataStore.readStream(fileId, { chunkSize: 65536 });\n * // Process stream chunk by chunk\n * }\n * ```\n */\n readonly readStream?: (\n file_id: string,\n config?: StreamingConfig,\n ) => Effect.Effect<\n Stream.Stream<Uint8Array, UploadistaError>,\n UploadistaError\n >;\n readonly write: (\n options: DataStoreWriteOptions,\n dependencies: {\n onProgress?: (chunkSize: number) => void;\n },\n ) => Effect.Effect<number, UploadistaError>;\n /**\n * Writes file content from a stream with unknown final size.\n * Optional - check getCapabilities().supportsStreamingWrite before using.\n *\n * This method is optimized for end-to-end streaming where the output\n * size isn't known until the stream completes. It uses store-specific\n * mechanisms like multipart uploads (S3/R2), resumable uploads (GCS),\n * or block staging (Azure) to efficiently handle streaming data.\n *\n * @param fileId - Unique identifier for the file being written\n * @param options - Stream and optional metadata\n * @returns StreamWriteResult containing final size after completion\n *\n * @example\n * ```typescript\n * const capabilities = dataStore.getCapabilities();\n * if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n * const result = yield* dataStore.writeStream(fileId, {\n * stream: transformedStream,\n * contentType: \"image/webp\",\n * });\n * console.log(`Wrote ${result.size} bytes`);\n * }\n * ```\n */\n readonly writeStream?: (\n fileId: string,\n options: StreamWriteOptions,\n ) => Effect.Effect<StreamWriteResult, UploadistaError>;\n readonly deleteExpired?: () => Effect.Effect<number, UploadistaError>;\n readonly getCapabilities: () => DataStoreCapabilities;\n readonly validateUploadStrategy: (\n strategy: UploadStrategy,\n ) => Effect.Effect<boolean, never>;\n};\n\n/**\n * Effect-TS context tag for UploadFile DataStore.\n *\n * Use this tag to access the primary DataStore in an Effect context.\n * This is the standard storage backend for uploaded files.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const dataStore = yield* UploadFileDataStore;\n * const file = yield* dataStore.create(uploadFile);\n * return file;\n * });\n * ```\n */\nexport class UploadFileDataStore extends Context.Tag(\"UploadFileDataStore\")<\n UploadFileDataStore,\n DataStore<UploadFile>\n>() {}\n\n/**\n * Effect-TS context tag for buffered/temporary DataStore.\n *\n * This is an optional storage backend used for temporary or intermediate files\n * during flow processing. Not all implementations provide a buffered store.\n *\n * @example\n * ```typescript\n * const processEffect = Effect.gen(function* () {\n * const bufferedStore = yield* BufferedUploadFileDataStore;\n * // Store intermediate processing results\n * const tempFile = yield* bufferedStore.create(intermediateFile);\n * return tempFile;\n * });\n * ```\n */\nexport class BufferedUploadFileDataStore extends Context.Tag(\n \"BufferedUploadFileDataStore\",\n)<BufferedUploadFileDataStore, DataStore<UploadFile>>() {}\n\n/**\n * Service interface for managing multiple DataStore instances.\n *\n * This allows routing files to different storage backends based on\n * storageId (e.g., different S3 buckets, Azure containers, or storage tiers).\n *\n * @property getDataStore - Retrieves the appropriate DataStore for a given storage ID\n * @property bufferedDataStore - Optional temporary storage for intermediate files\n */\nexport type UploadFileDataStoresShape = {\n getDataStore: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStore<UploadFile>, UploadistaError>;\n bufferedDataStore: Effect.Effect<\n DataStore<UploadFile> | undefined,\n UploadistaError\n >;\n};\n\n/**\n * Effect-TS context tag for the DataStore routing service.\n *\n * Provides access to multiple DataStore instances with routing logic.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const dataStores = yield* UploadFileDataStores;\n * // Route to specific storage based on storageId\n * const dataStore = yield* dataStores.getDataStore(\"s3-production\", clientId);\n * const file = yield* dataStore.create(uploadFile);\n * return file;\n * });\n * ```\n */\nexport class UploadFileDataStores extends Context.Tag(\"UploadFileDataStores\")<\n UploadFileDataStores,\n UploadFileDataStoresShape\n>() {}\n\n/**\n * Simplified DataStore configuration for easy setup.\n *\n * This type allows flexible configuration:\n * - Single DataStore instance\n * - Multiple named stores with routing\n * - Effect that resolves to a DataStore\n * - Pre-built Effect Layer\n *\n * @example\n * ```typescript\n * // Single store\n * const config: DataStoreConfig = s3DataStore;\n *\n * // Multiple stores with routing\n * const config: DataStoreConfig = {\n * stores: {\n * \"s3-prod\": s3ProdStore,\n * \"s3-dev\": s3DevStore,\n * \"local\": localFileStore,\n * },\n * default: \"s3-prod\"\n * };\n *\n * // Effect that creates a store\n * const config: DataStoreConfig = Effect.gen(function* () {\n * const kvStore = yield* UploadFileKVStore;\n * return s3Store(kvStore);\n * });\n *\n * // Pre-built Layer\n * const config: DataStoreConfig = Layer.succeed(UploadFileDataStores, {...});\n * ```\n */\nexport type DataStoreConfig =\n | DataStore<UploadFile>\n | Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>\n | {\n stores: Record<\n string,\n | DataStore<UploadFile>\n | Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>\n >;\n default?: string;\n }\n | Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>;\n\n/**\n * Type guard to check if a value is a DataStore instance.\n *\n * @param config - The value to check\n * @returns True if the value is a DataStore\n *\n * @example\n * ```typescript\n * if (isDataStore(config)) {\n * const capabilities = config.getCapabilities();\n * }\n * ```\n */\nexport const isDataStore = (\n config: DataStoreConfig,\n): config is DataStore<UploadFile> => {\n return \"create\" in config && \"write\" in config;\n};\n\n/**\n * Creates an Effect Layer from simplified DataStoreConfig.\n *\n * This function converts any DataStoreConfig format into a proper Effect Layer\n * that can be provided to the UploadFileDataStores context tag.\n *\n * It handles:\n * - Single DataStore: Wraps in a Layer that always returns that store\n * - Multiple stores: Creates routing logic with optional default\n * - Effect<DataStore>: Executes the Effect and wraps the result\n * - Layer: Returns as-is\n *\n * @param config - The DataStore configuration\n * @returns A Layer that provides UploadFileDataStores service\n *\n * @example\n * ```typescript\n * // Create from single store\n * const layer = await createDataStoreLayer(s3DataStore);\n *\n * // Create from multiple stores\n * const layer = await createDataStoreLayer({\n * stores: {\n * \"production\": s3Store,\n * \"development\": localStore,\n * },\n * default: \"development\"\n * });\n *\n * // Use the layer\n * const program = Effect.gen(function* () {\n * const stores = yield* UploadFileDataStores;\n * const store = yield* stores.getDataStore(\"production\", null);\n * return store;\n * }).pipe(Effect.provide(layer));\n * ```\n */\nexport const createDataStoreLayer = async (\n config: DataStoreConfig,\n): Promise<Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>> => {\n // Already a Layer, return as-is\n if (Layer.isLayer(config)) {\n return config as Layer.Layer<\n UploadFileDataStores,\n never,\n UploadFileKVStore\n >;\n }\n\n // Check if it's an Effect\n if (Effect.isEffect(config)) {\n return Layer.effect(\n UploadFileDataStores,\n Effect.gen(function* () {\n const dataStore = config as Effect.Effect<\n DataStore<UploadFile>,\n never,\n UploadFileKVStore\n >;\n const resolvedStore = yield* dataStore;\n return {\n getDataStore: (_storageId: string) => Effect.succeed(resolvedStore),\n bufferedDataStore: Effect.succeed(undefined),\n };\n }),\n );\n }\n\n // Single store (most common case)\n if (isDataStore(config)) {\n const store = config as DataStore<UploadFile>;\n return Layer.succeed(UploadFileDataStores, {\n getDataStore: (_storageId: string) => Effect.succeed(store),\n bufferedDataStore: Effect.succeed(undefined),\n });\n }\n\n // Multiple stores with routing\n const multiConfig = config as {\n stores: Record<\n string,\n DataStore<UploadFile> | Effect.Effect<DataStore<UploadFile>>\n >;\n default?: string;\n };\n\n const defaultKey = multiConfig.default || Object.keys(multiConfig.stores)[0];\n\n // Resolve any Effects in the stores\n const resolvedStores: Record<string, DataStore<UploadFile>> = {};\n for (const [key, storeOrEffect] of Object.entries(multiConfig.stores)) {\n if (\"pipe\" in storeOrEffect && !(\"create\" in storeOrEffect)) {\n resolvedStores[key] = await Effect.runPromise(\n storeOrEffect as Effect.Effect<DataStore<UploadFile>>,\n );\n } else {\n resolvedStores[key] = storeOrEffect as DataStore<UploadFile>;\n }\n }\n\n return Layer.succeed(UploadFileDataStores, {\n getDataStore: (storageId: string) => {\n const store =\n resolvedStores[storageId] ||\n (defaultKey ? resolvedStores[defaultKey] : undefined);\n if (store) {\n return Effect.succeed(store);\n }\n return Effect.fail(UploadistaError.fromCode(\"FILE_NOT_FOUND\"));\n },\n bufferedDataStore: Effect.succeed(undefined),\n });\n};\n","import { Context, type Effect } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\n\n/**\n * Event broadcaster interface for pub/sub messaging across distributed instances.\n * Used by WebSocketManager to broadcast upload events to all connected instances.\n */\nexport interface EventBroadcaster {\n /**\n * Publish a message to a channel\n */\n readonly publish: (\n channel: string,\n message: string,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Subscribe to messages on a channel\n */\n readonly subscribe: (\n channel: string,\n handler: (message: string) => void,\n ) => Effect.Effect<void, UploadistaError>;\n\n /**\n * Unsubscribe from a channel (optional - not all implementations may support)\n */\n readonly unsubscribe?: (\n channel: string,\n ) => Effect.Effect<void, UploadistaError>;\n}\n\n/**\n * Context tag for EventBroadcaster service\n */\nexport class EventBroadcasterService extends Context.Tag(\"EventBroadcaster\")<\n EventBroadcasterService,\n EventBroadcaster\n>() {}\n","import { Context, Effect, Layer } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type { FlowEvent } from \"../flow/event\";\nimport type { UploadEvent } from \"./upload-event\";\nimport type { WebSocketConnection } from \"./websocket\";\n\n/**\n * Base event emitter interface for raw string message broadcasting.\n *\n * This is the low-level interface that event broadcasting implementations\n * (WebSocket, Server-Sent Events, etc.) implement. It emits raw string messages\n * without type safety or serialization.\n *\n * @property subscribe - Registers a WebSocket connection to receive events for a key\n * @property unsubscribe - Removes subscription for a key\n * @property emit - Broadcasts a string message to all subscribers of a key\n *\n * @example\n * ```typescript\n * // Implement BaseEventEmitter with WebSocket broadcast\n * const websocketEmitter: BaseEventEmitter = {\n * subscribe: (key, connection) => Effect.sync(() => {\n * connections.set(key, [...(connections.get(key) || []), connection]);\n * }),\n *\n * unsubscribe: (key) => Effect.sync(() => {\n * connections.delete(key);\n * }),\n *\n * emit: (key, event) => Effect.sync(() => {\n * const subs = connections.get(key) || [];\n * subs.forEach(conn => conn.send(event));\n * })\n * };\n * ```\n */\nexport interface BaseEventEmitter {\n readonly subscribe: (\n key: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n readonly unsubscribe: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly emit: (\n key: string,\n event: string,\n ) => Effect.Effect<void, UploadistaError>;\n}\n\n/**\n * Type-safe event emitter interface with automatic serialization.\n *\n * This wraps a BaseEventEmitter and handles event serialization to JSON messages,\n * providing type safety for events and ensuring consistent message format.\n *\n * @template TEvent - The type of events emitted by this emitter\n *\n * @property subscribe - Registers a WebSocket connection to receive typed events\n * @property unsubscribe - Removes subscription\n * @property emit - Serializes and broadcasts a typed event\n *\n * @example\n * ```typescript\n * // Use a typed event emitter\n * const uploadEmitter: EventEmitter<UploadEvent> = new TypedEventEmitter(\n * baseEmitter,\n * (event) => JSON.stringify({ type: 'upload', payload: event })\n * );\n *\n * // Emit type-safe events\n * const program = Effect.gen(function* () {\n * const event: UploadEvent = {\n * uploadId: \"upload123\",\n * type: \"progress\",\n * offset: 1024,\n * size: 2048\n * };\n *\n * // Automatic serialization\n * yield* uploadEmitter.emit(\"upload123\", event);\n * });\n * ```\n */\nexport type EventEmitter<TEvent> = {\n readonly subscribe: (\n key: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n readonly unsubscribe: (key: string) => Effect.Effect<void, UploadistaError>;\n readonly emit: (\n key: string,\n event: TEvent,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Typed wrapper class that adds event serialization to a BaseEventEmitter.\n *\n * This class implements the EventEmitter interface by wrapping a BaseEventEmitter\n * and handling serialization for a specific event type. It converts typed events\n * to JSON message strings before broadcasting.\n *\n * @template TEvent - The type of events to emit\n *\n * @example\n * ```typescript\n * // Create a typed emitter for UploadEvent\n * const uploadEmitter = new TypedEventEmitter<UploadEvent>(\n * baseEmitter,\n * (event) => JSON.stringify({\n * type: \"upload_event\",\n * payload: event,\n * timestamp: new Date().toISOString()\n * })\n * );\n *\n * // Use the emitter\n * const effect = Effect.gen(function* () {\n * // Subscribe a WebSocket connection\n * yield* uploadEmitter.subscribe(\"upload123\", websocket);\n *\n * // Emit an event (automatically serialized)\n * yield* uploadEmitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"completed\",\n * offset: 2048,\n * size: 2048\n * });\n *\n * // Unsubscribe when done\n * yield* uploadEmitter.unsubscribe(\"upload123\");\n * });\n *\n * // Custom message format\n * const customEmitter = new TypedEventEmitter<MyEvent>(\n * baseEmitter,\n * (event) => `EVENT:${event.type}:${JSON.stringify(event.data)}`\n * );\n * ```\n */\nexport class TypedEventEmitter<TEvent> implements EventEmitter<TEvent> {\n constructor(\n private baseEmitter: BaseEventEmitter,\n private eventToMessage: (event: TEvent) => string,\n ) {}\n\n subscribe = (\n key: string,\n connection: WebSocketConnection,\n ): Effect.Effect<void, UploadistaError> =>\n this.baseEmitter.subscribe(key, connection);\n\n unsubscribe = (key: string): Effect.Effect<void, UploadistaError> =>\n this.baseEmitter.unsubscribe(key);\n\n emit = (key: string, event: TEvent): Effect.Effect<void, UploadistaError> => {\n const message = this.eventToMessage(event);\n return this.baseEmitter.emit(key, message);\n };\n}\n\n/**\n * Default event-to-message serialization helper.\n *\n * Creates a standardized JSON message format with type, payload, and timestamp.\n * This is the recommended way to serialize events for WebSocket transmission.\n *\n * @param messageType - The message type identifier (\"upload_event\" or \"flow_event\")\n * @returns An object with an eventToMessage function\n *\n * @example\n * ```typescript\n * // Create emitter with standard serialization\n * const emitter = new TypedEventEmitter<UploadEvent>(\n * baseEmitter,\n * eventToMessageSerializer(\"upload_event\").eventToMessage\n * );\n *\n * // Messages will be formatted as:\n * // {\n * // \"type\": \"upload_event\",\n * // \"payload\": { ...event data... },\n * // \"timestamp\": \"2024-01-15T10:30:00.000Z\"\n * // }\n * ```\n */\nexport const eventToMessageSerializer = (\n messageType: \"upload_event\" | \"flow_event\",\n) => ({\n eventToMessage: <T>(event: T): string =>\n JSON.stringify({\n type: messageType,\n payload: event,\n timestamp: new Date().toISOString(),\n }),\n});\n\n/**\n * Effect-TS context tag for the base untyped event emitter.\n *\n * This is the low-level emitter that broadcasting implementations provide.\n * Most application code should use typed emitters like UploadEventEmitter instead.\n *\n * @example\n * ```typescript\n * // Provide a base emitter implementation\n * const baseEmitterLayer = Layer.succeed(BaseEventEmitterService, websocketEmitter);\n *\n * // Use in an Effect\n * const effect = Effect.gen(function* () {\n * const baseEmitter = yield* BaseEventEmitterService;\n * yield* baseEmitter.emit(\"channel1\", \"raw message\");\n * });\n * ```\n */\nexport class BaseEventEmitterService extends Context.Tag(\"BaseEventEmitter\")<\n BaseEventEmitterService,\n BaseEventEmitter\n>() {}\n\n/**\n * Effect-TS context tag for the UploadEvent typed emitter.\n *\n * This provides type-safe event emission for upload progress and lifecycle events.\n * It's the primary way to broadcast upload events to connected clients.\n *\n * @example\n * ```typescript\n * const uploadEffect = Effect.gen(function* () {\n * const emitter = yield* UploadEventEmitter;\n *\n * // Subscribe a client to upload events\n * yield* emitter.subscribe(\"upload123\", websocketConnection);\n *\n * // Emit progress event\n * yield* emitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"progress\",\n * offset: 512000,\n * size: 1024000\n * });\n *\n * // Emit completion event\n * yield* emitter.emit(\"upload123\", {\n * uploadId: \"upload123\",\n * type: \"completed\",\n * offset: 1024000,\n * size: 1024000\n * });\n * });\n * ```\n */\nexport class UploadEventEmitter extends Context.Tag(\"UploadEventEmitter\")<\n UploadEventEmitter,\n EventEmitter<UploadEvent>\n>() {}\n\n/**\n * Effect Layer that creates the UploadEventEmitter from a BaseEventEmitter.\n *\n * This layer automatically wires up JSON serialization for UploadEvent objects\n * with the standard \"upload_event\" message format.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const emitter = yield* UploadEventEmitter;\n * // Use the emitter...\n * }).pipe(\n * Effect.provide(uploadEventEmitter),\n * Effect.provide(baseEmitterLayer)\n * );\n * ```\n */\nexport const uploadEventEmitter = Layer.effect(\n UploadEventEmitter,\n Effect.gen(function* () {\n const baseEmitter = yield* BaseEventEmitterService;\n return new TypedEventEmitter<UploadEvent>(\n baseEmitter,\n eventToMessageSerializer(\"upload_event\").eventToMessage,\n );\n }),\n);\n\n/**\n * Effect-TS context tag for the FlowEvent typed emitter.\n *\n * This provides type-safe event emission for flow processing lifecycle events.\n * It's used to broadcast flow execution progress, node completion, and errors.\n *\n * @example\n * ```typescript\n * const flowEffect = Effect.gen(function* () {\n * const emitter = yield* FlowEventEmitter;\n *\n * // Subscribe a client to flow job events\n * yield* emitter.subscribe(\"job123\", websocketConnection);\n *\n * // Emit node start event\n * yield* emitter.emit(\"job123\", {\n * jobId: \"job123\",\n * eventType: \"NodeStart\",\n * flowId: \"flow_resize\",\n * nodeId: \"resize_1\"\n * });\n *\n * // Emit node completion event\n * yield* emitter.emit(\"job123\", {\n * jobId: \"job123\",\n * eventType: \"NodeEnd\",\n * flowId: \"flow_resize\",\n * nodeId: \"resize_1\",\n * result: { width: 800, height: 600 }\n * });\n * });\n * ```\n */\nexport class FlowEventEmitter extends Context.Tag(\"FlowEventEmitter\")<\n FlowEventEmitter,\n EventEmitter<FlowEvent>\n>() {}\n\n/**\n * Effect Layer that creates the FlowEventEmitter from a BaseEventEmitter.\n *\n * This layer automatically wires up JSON serialization for FlowEvent objects\n * with the standard \"flow_event\" message format.\n *\n * @example\n * ```typescript\n * const program = Effect.gen(function* () {\n * const emitter = yield* FlowEventEmitter;\n * // Use the emitter...\n * }).pipe(\n * Effect.provide(flowEventEmitter),\n * Effect.provide(baseEmitterLayer)\n * );\n * ```\n */\nexport const flowEventEmitter = Layer.effect(\n FlowEventEmitter,\n Effect.gen(function* () {\n const baseEmitter = yield* BaseEventEmitterService;\n return new TypedEventEmitter<FlowEvent>(\n baseEmitter,\n eventToMessageSerializer(\"flow_event\").eventToMessage,\n );\n }),\n);\n","/**\n * Health Check Types for Uploadista SDK.\n *\n * This module provides types for the health monitoring system including:\n * - Liveness probes (`/health`)\n * - Readiness probes (`/ready`)\n * - Component health details (`/health/components`)\n *\n * @module types/health-check\n */\n\n// ============================================================================\n// Health Status Types\n// ============================================================================\n\n/**\n * Health status values for components and overall system health.\n *\n * - `healthy`: All checks passed, system is fully operational\n * - `degraded`: Some non-critical issues detected, but system is functional\n * - `unhealthy`: Critical components unavailable, system cannot serve requests\n */\nexport type HealthStatus = \"healthy\" | \"degraded\" | \"unhealthy\";\n\n// ============================================================================\n// Component Health Types\n// ============================================================================\n\n/**\n * Health status for an individual component (storage, KV store, etc.).\n */\nexport interface ComponentHealth {\n /** Current health status of the component */\n status: HealthStatus;\n /** Latency of the last health check in milliseconds */\n latency?: number;\n /** Human-readable status message */\n message?: string;\n /** ISO 8601 timestamp of the last health check */\n lastCheck?: string;\n}\n\n/**\n * Circuit breaker health summary aggregating all circuit states.\n */\nexport interface CircuitBreakerHealthSummary {\n /** Overall circuit breaker system status */\n status: HealthStatus;\n /** Number of circuits currently in open state */\n openCircuits: number;\n /** Total number of tracked circuits */\n totalCircuits: number;\n /** Detailed stats for each circuit (optional, for debugging) */\n circuits?: Array<{\n nodeType: string;\n state: \"closed\" | \"open\" | \"half-open\";\n failureCount: number;\n timeSinceLastStateChange: number;\n }>;\n}\n\n/**\n * Dead Letter Queue health summary.\n */\nexport interface DlqHealthSummary {\n /** Overall DLQ status */\n status: HealthStatus;\n /** Number of items pending retry */\n pendingItems: number;\n /** Number of items that have exhausted all retries */\n exhaustedItems: number;\n /** ISO 8601 timestamp of the oldest item in the queue */\n oldestItem?: string;\n}\n\n// ============================================================================\n// Response Types\n// ============================================================================\n\n/**\n * Components health map for detailed health responses.\n */\nexport interface HealthComponents {\n /** Storage backend health */\n storage?: ComponentHealth;\n /** KV store health */\n kvStore?: ComponentHealth;\n /** Event broadcaster health */\n eventBroadcaster?: ComponentHealth;\n /** Circuit breaker summary (if enabled) */\n circuitBreaker?: CircuitBreakerHealthSummary;\n /** Dead letter queue summary (if enabled) */\n deadLetterQueue?: DlqHealthSummary;\n}\n\n/**\n * Standard health response structure.\n *\n * Used for all health endpoints with varying levels of detail.\n */\nexport interface HealthResponse {\n /** Overall health status */\n status: HealthStatus;\n /** ISO 8601 timestamp of the response */\n timestamp: string;\n /** Optional version string for deployment identification */\n version?: string;\n /** Server uptime in milliseconds */\n uptime?: number;\n /** Component-level health details (for /health/components) */\n components?: HealthComponents;\n}\n\n// ============================================================================\n// Configuration Types\n// ============================================================================\n\n/**\n * Configuration options for health check behavior.\n */\nexport interface HealthCheckConfig {\n /**\n * Timeout for dependency health checks in milliseconds.\n * @default 5000\n */\n timeout?: number;\n\n /**\n * Whether to check storage backend health.\n * @default true\n */\n checkStorage?: boolean;\n\n /**\n * Whether to check KV store health.\n * @default true\n */\n checkKvStore?: boolean;\n\n /**\n * Whether to check event broadcaster health.\n * @default true\n */\n checkEventBroadcaster?: boolean;\n\n /**\n * Optional version string to include in health responses.\n * Useful for identifying deployed versions.\n */\n version?: string;\n}\n\n/**\n * Default health check configuration values.\n */\nexport const DEFAULT_HEALTH_CHECK_CONFIG: Required<\n Omit<HealthCheckConfig, \"version\">\n> = {\n timeout: 5000,\n checkStorage: true,\n checkKvStore: true,\n checkEventBroadcaster: true,\n};\n\n// ============================================================================\n// Request Types (for Accept header handling)\n// ============================================================================\n\n/**\n * Supported response formats for health endpoints.\n */\nexport type HealthResponseFormat = \"json\" | \"text\";\n\n/**\n * Determines the response format based on Accept header.\n *\n * @param acceptHeader - The Accept header value from the request\n * @returns The response format to use\n */\nexport function getHealthResponseFormat(\n acceptHeader?: string | null,\n): HealthResponseFormat {\n if (acceptHeader?.includes(\"text/plain\")) {\n return \"text\";\n }\n return \"json\";\n}\n\n/**\n * Formats a health response as plain text.\n *\n * @param status - The health status\n * @returns Plain text representation\n */\nexport function formatHealthAsText(status: HealthStatus): string {\n switch (status) {\n case \"healthy\":\n return \"OK\";\n case \"degraded\":\n return \"OK\"; // Degraded still returns OK for liveness\n case \"unhealthy\":\n return \"Service Unavailable\";\n }\n}\n","import { z } from \"zod\";\n\n/**\n * Zod schema for validating InputFile objects.\n *\n * This schema defines the structure and validation rules for file upload requests.\n * Use this schema to parse and validate input data when creating new uploads.\n *\n * @see {@link InputFile} for the TypeScript type\n */\nexport const inputFileSchema = z\n .object({\n uploadLengthDeferred: z.boolean().optional(),\n storageId: z.string(),\n /** File size in bytes. Optional when uploadLengthDeferred is true. */\n size: z.number().optional(),\n /** Optional size hint for optimization when size is unknown */\n sizeHint: z.number().optional(),\n type: z.string(),\n fileName: z.string().optional(),\n lastModified: z.number().optional(),\n metadata: z.string().optional(),\n checksum: z.string().optional(),\n checksumAlgorithm: z.string().optional(),\n flow: z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional(),\n })\n .refine(\n (data) => {\n // Size is required unless uploadLengthDeferred is true\n if (data.uploadLengthDeferred === true) {\n return true; // Size can be omitted\n }\n return data.size !== undefined && data.size >= 0;\n },\n {\n message: \"size is required when uploadLengthDeferred is not true\",\n path: [\"size\"],\n },\n );\n\n/**\n * Represents the input data for creating a new file upload.\n *\n * This type defines the information required to initiate an upload.\n * It's used by clients to provide upload metadata before sending file data.\n *\n * @property storageId - Target storage backend identifier (e.g., \"s3-production\", \"azure-blob\")\n * @property size - File size in bytes. Optional when uploadLengthDeferred is true.\n * @property sizeHint - Optional size hint for optimization when exact size is unknown\n * @property type - MIME type of the file (e.g., \"image/jpeg\", \"application/pdf\")\n * @property uploadLengthDeferred - If true, file size is not known upfront (streaming upload)\n * @property fileName - Original filename from the client\n * @property lastModified - File's last modified timestamp in milliseconds since epoch\n * @property metadata - Base64-encoded metadata string (as per tus protocol)\n * @property checksum - Expected file checksum for validation\n * @property checksumAlgorithm - Algorithm used for checksum (e.g., \"md5\", \"sha256\")\n * @property flow - Optional flow processing configuration\n * @property flow.flowId - ID of the flow to execute on this file\n * @property flow.nodeId - Starting node ID in the flow\n * @property flow.jobId - Flow job execution ID\n *\n * @example\n * ```typescript\n * // Basic file upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * lastModified: Date.now()\n * };\n *\n * // Upload with metadata (base64 encoded as per tus protocol)\n * const metadata = btoa(JSON.stringify({\n * userId: \"user_123\",\n * albumId: \"album_456\"\n * }));\n * const inputWithMetadata: InputFile = {\n * storageId: \"s3-production\",\n * size: 2048000,\n * type: \"image/png\",\n * fileName: \"screenshot.png\",\n * metadata\n * };\n *\n * // Upload with checksum validation\n * const inputWithChecksum: InputFile = {\n * storageId: \"s3-production\",\n * size: 512000,\n * type: \"application/pdf\",\n * fileName: \"document.pdf\",\n * checksum: \"5d41402abc4b2a76b9719d911017c592\",\n * checksumAlgorithm: \"md5\"\n * };\n *\n * // Upload that triggers a flow\n * const inputWithFlow: InputFile = {\n * storageId: \"s3-temp\",\n * size: 4096000,\n * type: \"image/jpeg\",\n * fileName: \"large-image.jpg\",\n * flow: {\n * flowId: \"resize-and-optimize\",\n * nodeId: \"input_1\",\n * jobId: \"job_789\"\n * }\n * };\n *\n * // Streaming upload (size unknown) - size can be omitted\n * const streamingInput: InputFile = {\n * storageId: \"s3-production\",\n * type: \"video/mp4\",\n * uploadLengthDeferred: true,\n * fileName: \"live-stream.mp4\"\n * };\n *\n * // Streaming upload with size hint for optimization\n * const streamingWithHint: InputFile = {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * sizeHint: 5_000_000, // ~5MB expected\n * fileName: \"optimized-image.webp\"\n * };\n * ```\n */\nexport type InputFile = z.infer<typeof inputFileSchema>;\n","import { Context, Effect, Layer } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\n\nexport type MiddlewareContext = {\n request: Request;\n uploadId?: string;\n metadata?: Record<string, string>;\n};\n\nexport type MiddlewareNext = () => Promise<Response>;\n\nexport type Middleware = (\n context: MiddlewareContext,\n next: MiddlewareNext,\n) => Promise<Response>;\n\n// Effect-based Middleware service\nexport class MiddlewareService extends Context.Tag(\"MiddlewareService\")<\n MiddlewareService,\n {\n readonly execute: (\n middlewares: Middleware[],\n context: MiddlewareContext,\n handler: MiddlewareNext,\n ) => Effect.Effect<Response, UploadistaError>;\n }\n>() {}\n\nexport const MiddlewareServiceLive = Layer.succeed(\n MiddlewareService,\n MiddlewareService.of({\n execute: (middlewares, context, handler) =>\n Effect.gen(function* () {\n if (middlewares.length === 0) {\n return yield* Effect.tryPromise({\n try: () => handler(),\n catch: (error) => error as UploadistaError,\n });\n }\n\n const chain = middlewares.reduceRight(\n (next: MiddlewareNext, middleware: Middleware) => {\n return () => middleware(context, next);\n },\n handler,\n );\n\n return yield* Effect.tryPromise({\n try: () => chain(),\n catch: (error) => error as UploadistaError,\n });\n }),\n }),\n);\n","import { z } from \"zod\";\nimport { uploadFileSchema } from \"./upload-file\";\n\nexport enum UploadEventType {\n UPLOAD_STARTED = \"upload-started\",\n UPLOAD_PROGRESS = \"upload-progress\",\n UPLOAD_COMPLETE = \"upload-complete\",\n UPLOAD_FAILED = \"upload-failed\",\n UPLOAD_VALIDATION_SUCCESS = \"upload-validation-success\",\n UPLOAD_VALIDATION_FAILED = \"upload-validation-failed\",\n UPLOAD_VALIDATION_WARNING = \"upload-validation-warning\",\n}\n\nconst flowContextSchema = z\n .object({\n flowId: z.string(),\n nodeId: z.string(),\n jobId: z.string(),\n })\n .optional();\n\nexport const uploadEventSchema = z.union([\n z.object({\n type: z.union([\n z.literal(UploadEventType.UPLOAD_STARTED),\n z.literal(UploadEventType.UPLOAD_COMPLETE),\n ]),\n data: uploadFileSchema,\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_PROGRESS),\n data: z.object({\n id: z.string(),\n progress: z.number(),\n total: z.number(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_FAILED),\n data: z.object({\n id: z.string(),\n error: z.string(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_SUCCESS),\n data: z.object({\n id: z.string(),\n validationType: z.enum([\"checksum\", \"mimetype\"]),\n algorithm: z.string().optional(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_FAILED),\n data: z.object({\n id: z.string(),\n reason: z.string(),\n expected: z.string(),\n actual: z.string(),\n }),\n flow: flowContextSchema,\n }),\n z.object({\n type: z.literal(UploadEventType.UPLOAD_VALIDATION_WARNING),\n data: z.object({\n id: z.string(),\n message: z.string(),\n }),\n flow: flowContextSchema,\n }),\n]);\n\nexport type UploadEvent = z.infer<typeof uploadEventSchema>;\n","import z from \"zod\";\nimport { uploadEventSchema } from \"./upload-event\";\n\n/**\n * Platform-agnostic WebSocket connection interface\n */\nexport interface WebSocketConnection {\n send(data: string): void;\n close(code?: number, reason?: string): void;\n readonly readyState: number;\n readonly id: string;\n}\n\n/**\n * WebSocket message that can be sent/received\n */\n\nexport const webSocketMessageSchema = z.union([\n z.object({\n type: z.literal(\"upload_event\"),\n payload: uploadEventSchema,\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"flow_event\"),\n payload: z.any(), // FlowEvent doesn't have a zod schema, using z.any() for now\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"subscribed\"),\n payload: z.object({ eventKey: z.string() }),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"error\"),\n message: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"pong\"),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"ping\"),\n timestamp: z.string().optional(),\n }),\n z.object({\n type: z.literal(\"connection\"),\n message: z.string().optional(),\n uploadId: z.string().optional(),\n timestamp: z.string().optional(),\n }),\n]);\n\nexport type WebSocketMessage<TEvent = unknown> =\n | z.infer<typeof webSocketMessageSchema>\n | {\n type: \"upload_event\";\n payload: TEvent;\n timestamp?: string;\n }\n | {\n type: \"flow_event\";\n payload: TEvent;\n timestamp?: string;\n };\n"],"mappings":"uIAgMA,IAAa,EAAb,cAAgD,EAAQ,IACtD,6BACD,EAAmD,AAAC,GAYrD,SAAgB,EAAiC,EAKrB,CAC1B,IAAM,EAAM,KAAK,KAAK,CACtB,MAAO,CACL,MAAO,SACP,aAAc,EACd,gBAAiB,EACjB,kBAAmB,EACnB,YAAa,EACb,SACD,CC7EH,IAAa,EAAb,KAA2D,CACzD,YACE,EACA,EACA,EACA,EACA,CAJQ,KAAA,UAAA,EACA,KAAA,UAAA,EACA,KAAA,UAAA,EACA,KAAA,YAAA,EAGV,IAAO,GACL,KAAK,UAAU,IAAI,KAAK,UAAY,EAAI,CAAC,KACvC,EAAO,QAAS,GAAU,CACxB,GAAI,IAAU,KACZ,OAAO,EAAO,KACZ,EAAgB,SAAS,iBAAkB,CACzC,MAAO,QAAQ,EAAI,aACpB,CAAC,CACH,CAEH,GAAI,CACF,OAAO,EAAO,QAAQ,KAAK,YAAY,EAAM,CAAC,OACvC,EAAO,CACd,OAAO,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,wCAAwC,EAAI,GAClD,MAAO,EACR,CAAC,CACH,GAEH,CACH,CAEH,KAAO,EAAa,IAAuD,CACzE,GAAI,CACF,IAAM,EAAa,KAAK,UAAU,EAAM,CACxC,OAAO,KAAK,UAAU,IAAI,KAAK,UAAY,EAAK,EAAW,OACpD,EAAO,CACd,OAAO,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,sCAAsC,EAAI,GAChD,MAAO,EACR,CAAC,CACH,GAIL,OAAU,GACR,KAAK,UAAU,OAAO,KAAK,UAAY,EAAI,CAE7C,SACM,KAAK,UAAU,KAEV,KAAK,UACT,KAAK,KAAK,UAAU,CACpB,KACC,EAAO,IAAK,GACV,EAAK,IAAK,GACR,EAAI,WAAW,KAAK,UAAU,CAC1B,EAAI,MAAM,KAAK,UAAU,OAAO,CAChC,EACL,CACF,CACF,CAEE,EAAO,KACZ,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,6CACP,CAAC,CACH,EAoBL,MAAa,EAAiB,CAC5B,UAAe,GAAoB,KAAK,UAAU,EAAK,CACvD,YAAiB,GAAmB,KAAK,MAAM,EAAI,CACpD,CAoBD,IAAa,EAAb,cAAwC,EAAQ,IAAI,cAAc,EAG/D,AAAC,GA2BS,EAAb,cAAuC,EAAQ,IAAI,oBAAoB,EAGpE,AAAC,GAmBJ,MAAa,EAAoB,EAAM,OACrC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,0BACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CA8BD,IAAa,EAAb,cAAoC,EAAQ,IAAI,iBAAiB,EAG9D,AAAC,GAmBJ,MAAa,EAAiB,EAAM,OAClC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,uBACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CAsCD,IAAa,EAAb,cAA4C,EAAQ,IAClD,yBACD,EAAmD,AAAC,GAmBrD,MAAa,EAAyB,EAAM,OAC1C,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADO,MAAO,EAGvB,kBACA,EAAe,UACf,EAAe,YAChB,EACD,CACH,CC1bY,EAAqB,EAAE,OAAO,CACzC,QAAS,EAAE,QAAQ,CACnB,OAAQ,EAAE,QAAQ,CAClB,WAAY,EAAE,QAAQ,CACvB,CAAC,CAkBW,EAAwC,EAAE,SACrD,EAAE,MAAM,CACN,EAAE,QAAQ,CACV,EAAE,QAAQ,CACV,EAAE,SAAS,CACX,EAAE,MAAM,CACR,EAAE,MAAM,EAAgB,CACxB,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAgB,CACtC,CAAC,CACH,CAEY,EAAmB,EAAE,OAAO,CACvC,GAAI,EAAE,QAAQ,CACd,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,OAAQ,EAAE,QAAQ,CAClB,SAAU,EAAE,OAAO,EAAE,QAAQ,CAAE,EAAgB,CAAC,UAAU,CAC1D,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,IAAK,EAAE,QAAQ,CAAC,UAAU,CAC1B,eAAgB,EAAE,SAAS,CAAC,UAAU,CACtC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,kBAAmB,EAAE,QAAQ,CAAC,UAAU,CACxC,QAAS,EAAE,OAAO,CAChB,GAAI,EAAE,QAAQ,CACd,KAAM,EAAE,QAAQ,CAChB,KAAM,EAAE,QAAQ,CAAC,UAAU,CAC3B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,OAAQ,EAAE,QAAQ,CAAC,UAAU,CAC7B,MAAO,EACJ,MACC,EAAE,OAAO,CACP,WAAY,EAAE,QAAQ,CACtB,KAAM,EAAE,QAAQ,CAChB,KAAM,EAAE,QAAQ,CACjB,CAAC,CACH,CACA,UAAU,CACd,CAAC,CACF,KAAM,EACH,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CACb,aAAc,EAAmB,UAAU,CAC5C,CAAC,CC5BW,EAAsD,CACjE,kBAAmB,QACnB,UAAW,MACZ,CAMY,EAA8B,EAAI,KAAO,KAyRtD,IAAa,EAAb,cAAyC,EAAQ,IAAI,sBAAsB,EAGxE,AAAC,GAkBS,EAAb,cAAiD,EAAQ,IACvD,8BACD,EAAsD,AAAC,GAsC3C,EAAb,cAA0C,EAAQ,IAAI,uBAAuB,EAG1E,AAAC,GA8DJ,MAAa,EACX,GAEO,WAAY,GAAU,UAAW,EAwC7B,EAAuB,KAClC,IACyE,CAEzE,GAAI,EAAM,QAAQ,EAAO,CACvB,OAAO,EAQT,GAAI,EAAO,SAAS,EAAO,CACzB,OAAO,EAAM,OACX,EACA,EAAO,IAAI,WAAa,CAMtB,IAAM,EAAgB,MALJ,EAMlB,MAAO,CACL,aAAe,GAAuB,EAAO,QAAQ,EAAc,CACnE,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,EACD,CACH,CAIH,GAAI,EAAY,EAAO,CAAE,CACvB,IAAM,EAAQ,EACd,OAAO,EAAM,QAAQ,EAAsB,CACzC,aAAe,GAAuB,EAAO,QAAQ,EAAM,CAC3D,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,CAAC,CAIJ,IAAM,EAAc,EAQd,EAAa,EAAY,SAAW,OAAO,KAAK,EAAY,OAAO,CAAC,GAGpE,EAAwD,EAAE,CAChE,IAAK,GAAM,CAAC,EAAK,KAAkB,OAAO,QAAQ,EAAY,OAAO,CAC/D,SAAU,GAAiB,EAAE,WAAY,GAC3C,EAAe,GAAO,MAAM,EAAO,WACjC,EACD,CAED,EAAe,GAAO,EAI1B,OAAO,EAAM,QAAQ,EAAsB,CACzC,aAAe,GAAsB,CACnC,IAAM,EACJ,EAAe,KACd,EAAa,EAAe,GAAc,IAAA,IAI7C,OAHI,EACK,EAAO,QAAQ,EAAM,CAEvB,EAAO,KAAK,EAAgB,SAAS,iBAAiB,CAAC,EAEhE,kBAAmB,EAAO,QAAQ,IAAA,GAAU,CAC7C,CAAC,ECtiBJ,IAAa,EAAb,cAA6C,EAAQ,IAAI,mBAAmB,EAGzE,AAAC,GCqGS,EAAb,KAAuE,CACrE,YACE,EACA,EACA,CAFQ,KAAA,YAAA,EACA,KAAA,eAAA,EAGV,WACE,EACA,IAEA,KAAK,YAAY,UAAU,EAAK,EAAW,CAE7C,YAAe,GACb,KAAK,YAAY,YAAY,EAAI,CAEnC,MAAQ,EAAa,IAAwD,CAC3E,IAAM,EAAU,KAAK,eAAe,EAAM,CAC1C,OAAO,KAAK,YAAY,KAAK,EAAK,EAAQ,GA6B9C,MAAa,EACX,IACI,CACJ,eAAoB,GAClB,KAAK,UAAU,CACb,KAAM,EACN,QAAS,EACT,UAAW,IAAI,MAAM,CAAC,aAAa,CACpC,CAAC,CACL,EAoBD,IAAa,EAAb,cAA6C,EAAQ,IAAI,mBAAmB,EAGzE,AAAC,GAkCS,EAAb,cAAwC,EAAQ,IAAI,qBAAqB,EAGtE,AAAC,GAmBJ,MAAa,EAAqB,EAAM,OACtC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADS,MAAO,EAGzB,EAAyB,eAAe,CAAC,eAC1C,EACD,CACH,CAmCD,IAAa,EAAb,cAAsC,EAAQ,IAAI,mBAAmB,EAGlE,AAAC,GAmBJ,MAAa,EAAmB,EAAM,OACpC,EACA,EAAO,IAAI,WAAa,CAEtB,OAAO,IAAI,EADS,MAAO,EAGzB,EAAyB,aAAa,CAAC,eACxC,EACD,CACH,CCjMY,EAET,CACF,QAAS,IACT,aAAc,GACd,aAAc,GACd,sBAAuB,GACxB,CAiBD,SAAgB,EACd,EACsB,CAItB,OAHI,GAAc,SAAS,aAAa,CAC/B,OAEF,OAST,SAAgB,EAAmB,EAA8B,CAC/D,OAAQ,EAAR,CACE,IAAK,UACH,MAAO,KACT,IAAK,WACH,MAAO,KACT,IAAK,YACH,MAAO,uBC/Lb,MAAa,EAAkB,EAC5B,OAAO,CACN,qBAAsB,EAAE,SAAS,CAAC,UAAU,CAC5C,UAAW,EAAE,QAAQ,CAErB,KAAM,EAAE,QAAQ,CAAC,UAAU,CAE3B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,KAAM,EAAE,QAAQ,CAChB,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,aAAc,EAAE,QAAQ,CAAC,UAAU,CACnC,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,SAAU,EAAE,QAAQ,CAAC,UAAU,CAC/B,kBAAmB,EAAE,QAAQ,CAAC,UAAU,CACxC,KAAM,EACH,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CACd,CAAC,CACD,OACE,GAEK,EAAK,uBAAyB,GACzB,GAEF,EAAK,OAAS,IAAA,IAAa,EAAK,MAAQ,EAEjD,CACE,QAAS,yDACT,KAAM,CAAC,OAAO,CACf,CACF,CC3BH,IAAa,EAAb,cAAuC,EAAQ,IAAI,oBAAoB,EASpE,AAAC,GAEJ,MAAa,EAAwB,EAAM,QACzC,EACA,EAAkB,GAAG,CACnB,SAAU,EAAa,EAAS,IAC9B,EAAO,IAAI,WAAa,CACtB,GAAI,EAAY,SAAW,EACzB,OAAO,MAAO,EAAO,WAAW,CAC9B,QAAW,GAAS,CACpB,MAAQ,GAAU,EACnB,CAAC,CAGJ,IAAM,EAAQ,EAAY,aACvB,EAAsB,QACR,EAAW,EAAS,EAAK,CAExC,EACD,CAED,OAAO,MAAO,EAAO,WAAW,CAC9B,QAAW,GAAO,CAClB,MAAQ,GAAU,EACnB,CAAC,EACF,CACL,CAAC,CACH,CClDD,IAAY,EAAA,SAAA,EAAL,OACL,GAAA,eAAA,iBACA,EAAA,gBAAA,kBACA,EAAA,gBAAA,kBACA,EAAA,cAAA,gBACA,EAAA,0BAAA,4BACA,EAAA,yBAAA,2BACA,EAAA,0BAAA,mCAGF,MAAM,EAAoB,EACvB,OAAO,CACN,OAAQ,EAAE,QAAQ,CAClB,OAAQ,EAAE,QAAQ,CAClB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACD,UAAU,CAEA,EAAoB,EAAE,MAAM,CACvC,EAAE,OAAO,CACP,KAAM,EAAE,MAAM,CACZ,EAAE,QAAQ,EAAgB,eAAe,CACzC,EAAE,QAAQ,EAAgB,gBAAgB,CAC3C,CAAC,CACF,KAAM,EACN,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,gBAAgB,CAChD,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,SAAU,EAAE,QAAQ,CACpB,MAAO,EAAE,QAAQ,CAClB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,cAAc,CAC9C,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,MAAO,EAAE,QAAQ,CAClB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,0BAA0B,CAC1D,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,eAAgB,EAAE,KAAK,CAAC,WAAY,WAAW,CAAC,CAChD,UAAW,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,yBAAyB,CACzD,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,OAAQ,EAAE,QAAQ,CAClB,SAAU,EAAE,QAAQ,CACpB,OAAQ,EAAE,QAAQ,CACnB,CAAC,CACF,KAAM,EACP,CAAC,CACF,EAAE,OAAO,CACP,KAAM,EAAE,QAAQ,EAAgB,0BAA0B,CAC1D,KAAM,EAAE,OAAO,CACb,GAAI,EAAE,QAAQ,CACd,QAAS,EAAE,QAAQ,CACpB,CAAC,CACF,KAAM,EACP,CAAC,CACH,CAAC,CCzDW,EAAyBA,EAAE,MAAM,CAC5CA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,eAAe,CAC/B,QAAS,EACT,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,KAAK,CAChB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,OAAO,CAAE,SAAUA,EAAE,QAAQ,CAAE,CAAC,CAC3C,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,QAAQ,CACxB,QAASA,EAAE,QAAQ,CAAC,UAAU,CAC/B,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,OAAO,CACvB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,OAAO,CACvB,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACFA,EAAE,OAAO,CACP,KAAMA,EAAE,QAAQ,aAAa,CAC7B,QAASA,EAAE,QAAQ,CAAC,UAAU,CAC9B,SAAUA,EAAE,QAAQ,CAAC,UAAU,CAC/B,UAAWA,EAAE,QAAQ,CAAC,UAAU,CACjC,CAAC,CACH,CAAC"}