@uploadista/core 0.0.20-beta.7 → 0.0.20-beta.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/flow/index.cjs +1 -1
  2. package/dist/flow/index.d.cts +2 -2
  3. package/dist/flow/index.d.mts +2 -2
  4. package/dist/flow/index.mjs +1 -1
  5. package/dist/flow-BHVkk_6W.cjs +1 -0
  6. package/dist/{flow-_OmguvZm.mjs → flow-DlhHOlMk.mjs} +2 -2
  7. package/dist/flow-DlhHOlMk.mjs.map +1 -0
  8. package/dist/index-9gyMMEIB.d.cts.map +1 -1
  9. package/dist/index-B9V5SSxl.d.mts.map +1 -1
  10. package/dist/{index-DjhpgW08.d.cts → index-BtDyiX5-.d.cts} +1483 -1483
  11. package/dist/index-BtDyiX5-.d.cts.map +1 -0
  12. package/dist/{index-BY620LiC.d.mts → index-XXHmCYAu.d.mts} +1483 -1483
  13. package/dist/index-XXHmCYAu.d.mts.map +1 -0
  14. package/dist/index.cjs +1 -1
  15. package/dist/index.d.cts +2 -2
  16. package/dist/index.d.mts +2 -2
  17. package/dist/index.mjs +1 -1
  18. package/dist/testing/index.cjs +2 -2
  19. package/dist/testing/index.d.cts +9 -9
  20. package/dist/testing/index.d.cts.map +1 -1
  21. package/dist/testing/index.d.mts +9 -9
  22. package/dist/testing/index.d.mts.map +1 -1
  23. package/dist/testing/index.mjs +2 -2
  24. package/dist/testing/index.mjs.map +1 -1
  25. package/dist/types/index.d.cts +1 -1
  26. package/dist/types/index.d.mts +1 -1
  27. package/dist/upload/index.cjs +1 -1
  28. package/dist/upload/index.d.cts +2 -2
  29. package/dist/upload/index.d.mts +2 -2
  30. package/dist/upload/index.mjs +1 -1
  31. package/dist/{upload-tLC7uR9U.mjs → upload-C-C7hn1-.mjs} +2 -2
  32. package/dist/{upload-tLC7uR9U.mjs.map → upload-C-C7hn1-.mjs.map} +1 -1
  33. package/dist/{upload-BHDuuJ80.cjs → upload-DWBlRXHh.cjs} +1 -1
  34. package/package.json +7 -7
  35. package/src/flow/{flow-server.ts → flow-engine.ts} +106 -106
  36. package/src/flow/index.ts +10 -10
  37. package/src/flow/nodes/input-node.ts +5 -5
  38. package/src/flow/nodes/transform-node.ts +11 -14
  39. package/src/flow/typed-flow.ts +22 -20
  40. package/src/testing/index.ts +1 -1
  41. package/src/testing/{mock-upload-server.ts → mock-upload-engine.ts} +10 -10
  42. package/src/upload/index.ts +1 -1
  43. package/src/upload/{upload-server.ts → upload-engine.ts} +44 -40
  44. package/dist/flow-Cv8vCBQ2.cjs +0 -1
  45. package/dist/flow-_OmguvZm.mjs.map +0 -1
  46. package/dist/index-BY620LiC.d.mts.map +0 -1
  47. package/dist/index-DjhpgW08.d.cts.map +0 -1
@@ -1 +1 @@
1
- const e=require(`./types-Cws60JHC.cjs`),t=require(`./uploadista-error-BgQU45we.cjs`),n=require(`./checksum-DVPe3Db4.cjs`),r=require(`./stream-limiter-BvkaZXcz.cjs`);let i=require(`effect`);function a(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function o(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const s=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(a(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(a(e,[255,216,255]))return`image/jpeg`;if(o(e,`GIF87a`)||o(e,`GIF89a`))return`image/gif`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&a(e,[0,0,0],0)&&o(e,`ftyp`,4)&&(o(e,`avif`,8)||o(e,`avis`,8)))return`image/avif`;if(e.length>=12&&o(e,`ftyp`,4)&&(o(e,`heic`,8)||o(e,`heif`,8)||o(e,`mif1`,8)))return`image/heic`;if(a(e,[66,77]))return`image/bmp`;if(a(e,[73,73,42,0])||a(e,[77,77,0,42]))return`image/tiff`;if(a(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&o(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(a(e,[26,69,223,163]))return`video/webm`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(o(e,`moov`,4)||o(e,`mdat`,4)||o(e,`free`,4)))return`video/quicktime`;if(a(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(a(e,[255,251])||a(e,[255,243])||a(e,[255,242])||o(e,`ID3`))return`audio/mpeg`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WAVE`,8))return`audio/wav`;if(o(e,`fLaC`))return`audio/flac`;if(o(e,`OggS`))return`audio/ogg`;if(e.length>=12&&o(e,`ftyp`,4)&&o(e,`M4A`,8))return`audio/mp4`;if(o(e,`%PDF`))return`application/pdf`;if(a(e,[80,75,3,4])||a(e,[80,75,5,6])||a(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(a(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(a(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(a(e,[31,139]))return`application/gzip`;if(e.length>=262&&o(e,`ustar`,257))return`application/x-tar`;if(o(e,`wOFF`))return`font/woff`;if(o(e,`wOF2`))return`font/woff2`;if(a(e,[0,1,0,0,0]))return`font/ttf`;if(o(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function c(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const l=i.Effect.gen(function*(){let e=yield*i.Effect.currentSpan.pipe(i.Effect.option);return i.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),u=(t,n,{dataStoreService:r,kvStore:a,eventEmitter:o,generateId:s})=>i.Effect.gen(function*(){let c=yield*l,u=new Date().toISOString();return yield*i.Effect.gen(function*(){let i=yield*r.getDataStore(t.storageId,n),l=yield*s.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=t,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:l,size:d,metadata:v,offset:0,creationDate:u,storage:{id:t.storageId,type:f,path:``,bucket:i.bucket},flow:g,traceContext:c},b=yield*i.create(y);return yield*a.set(l,b),yield*o.emit(l,{type:e.n.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(i.Effect.withSpan(`upload-create`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}))}).pipe(i.Effect.withSpan(`upload`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}),i.Effect.tap(e=>i.Effect.gen(function*(){if(yield*i.Metric.increment(i.Metric.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=i.Metric.histogram(`upload_file_size_bytes`,i.MetricBoundaries.exponential({start:1024,factor:2,count:25}));yield*i.Metric.update(t,e.size)}let t=i.Metric.gauge(`active_uploads`);yield*i.Metric.increment(t)})),i.Effect.tap(e=>i.Effect.logInfo(`Upload created`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId}))),i.Effect.tapError(e=>i.Effect.gen(function*(){yield*i.Effect.logError(`Upload creation failed`).pipe(i.Effect.annotateLogs({"upload.file_name":t.fileName??`unknown`,"upload.storage_id":t.storageId,error:String(e)})),yield*i.Metric.increment(i.Metric.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function d(e){return i.Stream.fromReadableStream(()=>e,e=>new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(e)}))}function f({data:n,upload:a,dataStore:o,maxFileSize:s,controller:c,eventEmitter:l,uploadProgressInterval:u=200}){return i.Effect.gen(function*(){let f=d(n);if(c.signal.aborted)return yield*i.Effect.fail(t.n.fromCode(`ABORTED`));let p=new AbortController,{signal:m}=p,h=()=>{p.abort()};return c.signal.addEventListener(`abort`,h,{once:!0}),yield*i.Effect.acquireUseRelease(i.Effect.sync(()=>({signal:m,onAbort:h})),({signal:n})=>i.Effect.gen(function*(){let t=yield*i.Ref.make(0),n=r.t.limit({maxSize:s})(f);return yield*o.write({stream:n,file_id:a.id,offset:a.offset},{onProgress:n=>{let r=Date.now();i.Ref.get(t).pipe(i.Effect.flatMap(o=>r-o>=u?i.Effect.gen(function*(){yield*i.Ref.set(t,r),yield*l.emit(a.id,{type:e.n.UPLOAD_PROGRESS,data:{id:a.id,progress:n,total:a.size??0},flow:a.flow})}):i.Effect.void),i.Effect.runPromise).catch(()=>{})}})}).pipe(i.Effect.catchAll(e=>e instanceof Error&&e.name===`AbortError`?i.Effect.fail(t.n.fromCode(`ABORTED`)):e instanceof t.n?i.Effect.fail(e):i.Effect.fail(t.n.fromCode(`FILE_WRITE_ERROR`,{cause:e})))),({onAbort:e})=>i.Effect.sync(()=>{c.signal.removeEventListener(`abort`,e)}))}).pipe(i.Effect.withSpan(`upload-write-to-store`,{attributes:{"upload.id":a.id,"upload.offset":a.offset.toString(),"upload.max_file_size":s.toString(),"upload.file_size":a.size?.toString()??`0`}}),i.Effect.tap(e=>i.Effect.logDebug(`Data written to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"write.offset":e.toString(),"write.bytes_written":(e-a.offset).toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to write to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"upload.offset":a.offset.toString(),error:e instanceof t.n?e.code:String(e)}))))}function p(e){return i.Tracer.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const m=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return i.Effect.void.pipe(i.Effect.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},h=(t,n,r,{dataStoreService:a,kvStore:o,eventEmitter:s})=>i.Effect.gen(function*(){let c=yield*o.get(t),l=c.traceContext?p(c.traceContext):void 0;return yield*i.Effect.gen(function*(){let i=yield*a.getDataStore(c.storage.id,n);return c.offset=yield*f({dataStore:i,data:r,upload:c,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:s}),yield*o.set(t,c),yield*s.emit(c.id,{type:e.n.UPLOAD_PROGRESS,data:{id:c.id,progress:c.offset,total:c.size??0},flow:c.flow}),c.size&&c.offset===c.size&&(yield*g({file:c,dataStore:i,eventEmitter:s}),c.traceContext&&(yield*m(c,p(c.traceContext)))),c}).pipe(i.Effect.withSpan(`upload-chunk`,{attributes:{"upload.id":t,"chunk.upload_id":t,"upload.has_trace_context":c.traceContext?`true`:`false`},parent:l}))}).pipe(i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=i.Metric.histogram(`chunk_size_bytes`,i.MetricBoundaries.linear({start:262144,width:262144,count:20}));if(yield*i.Metric.update(n,t),e.size&&e.size>0){let e=t,n=i.Metric.gauge(`upload_throughput_bytes_per_second`);yield*i.Metric.set(n,e)}})),i.Effect.tap(e=>i.Effect.logDebug(`Chunk uploaded`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),i.Effect.tapError(e=>i.Effect.logError(`Chunk upload failed`).pipe(i.Effect.annotateLogs({"upload.id":t,error:String(e)})))),g=({file:r,dataStore:a,eventEmitter:o})=>i.Effect.gen(function*(){let i=a.getCapabilities();if(i.maxValidationSize&&r.size&&r.size>i.maxValidationSize){yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_WARNING,data:{id:r.id,message:`File size (${r.size} bytes) exceeds max validation size (${i.maxValidationSize} bytes). Validation skipped.`},flow:r.flow});return}let l=yield*a.read(r.id);if(r.checksum&&r.checksumAlgorithm){let i=yield*n.t(l,r.checksumAlgorithm);if(i!==r.checksum)return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`checksum_mismatch`,expected:r.checksum,actual:i},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${r.checksum}, Got: ${i}`,details:{uploadId:r.id,expected:r.checksum,actual:i,algorithm:r.checksumAlgorithm}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`checksum`,algorithm:r.checksumAlgorithm},flow:r.flow})}if(i.requiresMimeTypeValidation){let n=s(l),i=r.metadata?.type;if(i&&!c(i,n))return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`mimetype_mismatch`,expected:i,actual:n},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${i}, Detected: ${n}`,details:{uploadId:r.id,expected:i,actual:n}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`mimetype`},flow:r.flow})}}).pipe(i.Effect.withSpan(`validate-upload`,{attributes:{"upload.id":r.id,"validation.checksum_provided":r.checksum?`true`:`false`,"validation.mime_required":a.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),_=e=>i.Effect.tryPromise({try:async()=>await fetch(e),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-fetch-url`,{attributes:{"upload.url":e,"upload.operation":`fetch`}}),i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*i.Metric.increment(i.Metric.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),i.Effect.tap(t=>i.Effect.logInfo(`URL fetch completed`).pipe(i.Effect.annotateLogs({"upload.url":e,"response.status":t.status.toString(),"response.ok":t.ok.toString(),"response.content_length":t.headers.get(`content-length`)??`unknown`}))),i.Effect.tapError(t=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*i.Effect.logError(`URL fetch failed`).pipe(i.Effect.annotateLogs({"upload.url":e,error:String(t)}))}))),v=e=>i.Effect.tryPromise({try:async()=>await e.arrayBuffer(),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),i.Effect.tap(e=>i.Effect.logDebug(`Response converted to array buffer`).pipe(i.Effect.annotateLogs({"buffer.size":e.byteLength.toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to convert response to array buffer`).pipe(i.Effect.annotateLogs({error:String(e)}))));var y=class extends i.Context.Tag(`UploadServer`)(){};function b(){return i.Effect.gen(function*(){let t=yield*e.N,r=yield*e.p,a=yield*n.r,o=yield*e.S;return{upload:(e,n,s)=>i.Effect.gen(function*(){return yield*h((yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,s,{dataStoreService:o,kvStore:t,eventEmitter:r})}),uploadFromUrl:(e,n,s)=>i.Effect.gen(function*(){let i=yield*v(yield*_(s)),c=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*h((yield*u({...e,size:i.byteLength},n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,c,{dataStoreService:o,kvStore:t,eventEmitter:r})}),createUpload:(e,n)=>i.Effect.gen(function*(){return yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})}),uploadChunk:(e,n,a)=>i.Effect.gen(function*(){return yield*h(e,n,a,{dataStoreService:o,kvStore:t,eventEmitter:r})}),getUpload:e=>i.Effect.gen(function*(){return yield*t.get(e)}),read:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);return yield*(yield*o.getDataStore(r.storage.id,n)).read(e)}),readStream:(e,n,r)=>i.Effect.gen(function*(){let a=yield*t.get(e),s=yield*o.getDataStore(a.storage.id,n);if(s.getCapabilities().supportsStreamingRead&&s.readStream)return yield*i.Effect.logDebug(`Using streaming read for file ${e}`),yield*s.readStream(e,r);yield*i.Effect.logDebug(`Falling back to buffered read for file ${e} (streaming not supported)`);let c=yield*s.read(e);return i.Stream.succeed(c)}),uploadStream:(n,s,c)=>i.Effect.gen(function*(){let l=yield*o.getDataStore(n.storageId,s),d=l.getCapabilities(),f=yield*a.generateId();if(d.supportsStreamingWrite&&l.writeStream){yield*i.Effect.logDebug(`Using streaming write for file ${f}`);let a=typeof n.metadata==`string`?JSON.parse(n.metadata):n.metadata||{},o=Object.fromEntries(Object.entries(a).map(([e,t])=>[e,String(t)])),s={id:f,offset:0,size:n.size??0,storage:{id:n.storageId,type:l.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:a,creationDate:new Date().toISOString()};yield*t.set(f,s),yield*r.emit(f,{type:e.n.UPLOAD_STARTED,data:s});let u=yield*l.writeStream(f,{stream:c,contentType:n.type,sizeHint:n.sizeHint,metadata:o}),d={...s,size:u.size,offset:u.size,storage:{...s.storage,path:u.path},...u.url&&{url:u.url}};return yield*t.set(f,d),yield*r.emit(f,{type:e.n.UPLOAD_COMPLETE,data:d}),d}yield*i.Effect.logWarning(`Falling back to buffered upload for file ${f} (streaming write not supported)`);let p=[];yield*i.Stream.runForEach(c,e=>i.Effect.sync(()=>{p.push(e)}));let m=p.reduce((e,t)=>e+t.length,0),g=new Uint8Array(m),_=0;for(let e of p)g.set(e,_),_+=e.length;let v=new ReadableStream({start(e){e.enqueue(g),e.close()}});return yield*u({...n,size:m},s,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:{generateId:()=>i.Effect.succeed(f)}}),yield*h(f,s,v,{dataStoreService:o,kvStore:t,eventEmitter:r})}),delete:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);yield*(yield*o.getDataStore(r.storage.id,n)).remove(e),yield*t.delete(e)}),getCapabilities:(e,t)=>i.Effect.gen(function*(){return(yield*o.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>i.Effect.gen(function*(){yield*r.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>i.Effect.gen(function*(){yield*r.unsubscribe(e)})}})}const x=i.Layer.effect(y,b());var S=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return s}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return S}});
1
+ const e=require(`./types-Cws60JHC.cjs`),t=require(`./uploadista-error-BgQU45we.cjs`),n=require(`./checksum-DVPe3Db4.cjs`),r=require(`./stream-limiter-BvkaZXcz.cjs`);let i=require(`effect`);function a(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function o(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const s=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(a(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(a(e,[255,216,255]))return`image/jpeg`;if(o(e,`GIF87a`)||o(e,`GIF89a`))return`image/gif`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&a(e,[0,0,0],0)&&o(e,`ftyp`,4)&&(o(e,`avif`,8)||o(e,`avis`,8)))return`image/avif`;if(e.length>=12&&o(e,`ftyp`,4)&&(o(e,`heic`,8)||o(e,`heif`,8)||o(e,`mif1`,8)))return`image/heic`;if(a(e,[66,77]))return`image/bmp`;if(a(e,[73,73,42,0])||a(e,[77,77,0,42]))return`image/tiff`;if(a(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&o(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(a(e,[26,69,223,163]))return`video/webm`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(o(e,`moov`,4)||o(e,`mdat`,4)||o(e,`free`,4)))return`video/quicktime`;if(a(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(a(e,[255,251])||a(e,[255,243])||a(e,[255,242])||o(e,`ID3`))return`audio/mpeg`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WAVE`,8))return`audio/wav`;if(o(e,`fLaC`))return`audio/flac`;if(o(e,`OggS`))return`audio/ogg`;if(e.length>=12&&o(e,`ftyp`,4)&&o(e,`M4A`,8))return`audio/mp4`;if(o(e,`%PDF`))return`application/pdf`;if(a(e,[80,75,3,4])||a(e,[80,75,5,6])||a(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(a(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(a(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(a(e,[31,139]))return`application/gzip`;if(e.length>=262&&o(e,`ustar`,257))return`application/x-tar`;if(o(e,`wOFF`))return`font/woff`;if(o(e,`wOF2`))return`font/woff2`;if(a(e,[0,1,0,0,0]))return`font/ttf`;if(o(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function c(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const l=i.Effect.gen(function*(){let e=yield*i.Effect.currentSpan.pipe(i.Effect.option);return i.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),u=(t,n,{dataStoreService:r,kvStore:a,eventEmitter:o,generateId:s})=>i.Effect.gen(function*(){let c=yield*l,u=new Date().toISOString();return yield*i.Effect.gen(function*(){let i=yield*r.getDataStore(t.storageId,n),l=yield*s.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=t,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:l,size:d,metadata:v,offset:0,creationDate:u,storage:{id:t.storageId,type:f,path:``,bucket:i.bucket},flow:g,traceContext:c},b=yield*i.create(y);return yield*a.set(l,b),yield*o.emit(l,{type:e.n.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(i.Effect.withSpan(`upload-create`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}))}).pipe(i.Effect.withSpan(`upload`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}),i.Effect.tap(e=>i.Effect.gen(function*(){if(yield*i.Metric.increment(i.Metric.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=i.Metric.histogram(`upload_file_size_bytes`,i.MetricBoundaries.exponential({start:1024,factor:2,count:25}));yield*i.Metric.update(t,e.size)}let t=i.Metric.gauge(`active_uploads`);yield*i.Metric.increment(t)})),i.Effect.tap(e=>i.Effect.logInfo(`Upload created`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId}))),i.Effect.tapError(e=>i.Effect.gen(function*(){yield*i.Effect.logError(`Upload creation failed`).pipe(i.Effect.annotateLogs({"upload.file_name":t.fileName??`unknown`,"upload.storage_id":t.storageId,error:String(e)})),yield*i.Metric.increment(i.Metric.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function d(e){return i.Stream.fromReadableStream(()=>e,e=>new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(e)}))}function f({data:n,upload:a,dataStore:o,maxFileSize:s,controller:c,eventEmitter:l,uploadProgressInterval:u=200}){return i.Effect.gen(function*(){let f=d(n);if(c.signal.aborted)return yield*i.Effect.fail(t.n.fromCode(`ABORTED`));let p=new AbortController,{signal:m}=p,h=()=>{p.abort()};return c.signal.addEventListener(`abort`,h,{once:!0}),yield*i.Effect.acquireUseRelease(i.Effect.sync(()=>({signal:m,onAbort:h})),({signal:n})=>i.Effect.gen(function*(){let t=yield*i.Ref.make(0),n=r.t.limit({maxSize:s})(f);return yield*o.write({stream:n,file_id:a.id,offset:a.offset},{onProgress:n=>{let r=Date.now();i.Ref.get(t).pipe(i.Effect.flatMap(o=>r-o>=u?i.Effect.gen(function*(){yield*i.Ref.set(t,r),yield*l.emit(a.id,{type:e.n.UPLOAD_PROGRESS,data:{id:a.id,progress:n,total:a.size??0},flow:a.flow})}):i.Effect.void),i.Effect.runPromise).catch(()=>{})}})}).pipe(i.Effect.catchAll(e=>e instanceof Error&&e.name===`AbortError`?i.Effect.fail(t.n.fromCode(`ABORTED`)):e instanceof t.n?i.Effect.fail(e):i.Effect.fail(t.n.fromCode(`FILE_WRITE_ERROR`,{cause:e})))),({onAbort:e})=>i.Effect.sync(()=>{c.signal.removeEventListener(`abort`,e)}))}).pipe(i.Effect.withSpan(`upload-write-to-store`,{attributes:{"upload.id":a.id,"upload.offset":a.offset.toString(),"upload.max_file_size":s.toString(),"upload.file_size":a.size?.toString()??`0`}}),i.Effect.tap(e=>i.Effect.logDebug(`Data written to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"write.offset":e.toString(),"write.bytes_written":(e-a.offset).toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to write to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"upload.offset":a.offset.toString(),error:e instanceof t.n?e.code:String(e)}))))}function p(e){return i.Tracer.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const m=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return i.Effect.void.pipe(i.Effect.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},h=(t,n,r,{dataStoreService:a,kvStore:o,eventEmitter:s})=>i.Effect.gen(function*(){let c=yield*o.get(t),l=c.traceContext?p(c.traceContext):void 0;return yield*i.Effect.gen(function*(){let i=yield*a.getDataStore(c.storage.id,n);return c.offset=yield*f({dataStore:i,data:r,upload:c,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:s}),yield*o.set(t,c),yield*s.emit(c.id,{type:e.n.UPLOAD_PROGRESS,data:{id:c.id,progress:c.offset,total:c.size??0},flow:c.flow}),c.size&&c.offset===c.size&&(yield*g({file:c,dataStore:i,eventEmitter:s}),c.traceContext&&(yield*m(c,p(c.traceContext)))),c}).pipe(i.Effect.withSpan(`upload-chunk`,{attributes:{"upload.id":t,"chunk.upload_id":t,"upload.has_trace_context":c.traceContext?`true`:`false`},parent:l}))}).pipe(i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=i.Metric.histogram(`chunk_size_bytes`,i.MetricBoundaries.linear({start:262144,width:262144,count:20}));if(yield*i.Metric.update(n,t),e.size&&e.size>0){let e=t,n=i.Metric.gauge(`upload_throughput_bytes_per_second`);yield*i.Metric.set(n,e)}})),i.Effect.tap(e=>i.Effect.logDebug(`Chunk uploaded`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),i.Effect.tapError(e=>i.Effect.logError(`Chunk upload failed`).pipe(i.Effect.annotateLogs({"upload.id":t,error:String(e)})))),g=({file:r,dataStore:a,eventEmitter:o})=>i.Effect.gen(function*(){let i=a.getCapabilities();if(i.maxValidationSize&&r.size&&r.size>i.maxValidationSize){yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_WARNING,data:{id:r.id,message:`File size (${r.size} bytes) exceeds max validation size (${i.maxValidationSize} bytes). Validation skipped.`},flow:r.flow});return}let l=yield*a.read(r.id);if(r.checksum&&r.checksumAlgorithm){let i=yield*n.t(l,r.checksumAlgorithm);if(i!==r.checksum)return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`checksum_mismatch`,expected:r.checksum,actual:i},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${r.checksum}, Got: ${i}`,details:{uploadId:r.id,expected:r.checksum,actual:i,algorithm:r.checksumAlgorithm}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`checksum`,algorithm:r.checksumAlgorithm},flow:r.flow})}if(i.requiresMimeTypeValidation){let n=s(l),i=r.metadata?.type;if(i&&!c(i,n))return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`mimetype_mismatch`,expected:i,actual:n},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${i}, Detected: ${n}`,details:{uploadId:r.id,expected:i,actual:n}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`mimetype`},flow:r.flow})}}).pipe(i.Effect.withSpan(`validate-upload`,{attributes:{"upload.id":r.id,"validation.checksum_provided":r.checksum?`true`:`false`,"validation.mime_required":a.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),_=e=>i.Effect.tryPromise({try:async()=>await fetch(e),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-fetch-url`,{attributes:{"upload.url":e,"upload.operation":`fetch`}}),i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*i.Metric.increment(i.Metric.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),i.Effect.tap(t=>i.Effect.logInfo(`URL fetch completed`).pipe(i.Effect.annotateLogs({"upload.url":e,"response.status":t.status.toString(),"response.ok":t.ok.toString(),"response.content_length":t.headers.get(`content-length`)??`unknown`}))),i.Effect.tapError(t=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*i.Effect.logError(`URL fetch failed`).pipe(i.Effect.annotateLogs({"upload.url":e,error:String(t)}))}))),v=e=>i.Effect.tryPromise({try:async()=>await e.arrayBuffer(),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),i.Effect.tap(e=>i.Effect.logDebug(`Response converted to array buffer`).pipe(i.Effect.annotateLogs({"buffer.size":e.byteLength.toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to convert response to array buffer`).pipe(i.Effect.annotateLogs({error:String(e)}))));var y=class extends i.Context.Tag(`UploadEngine`)(){};function b(){return i.Effect.gen(function*(){let t=yield*e.N,r=yield*e.p,a=yield*n.r,o=yield*e.S;return{upload:(e,n,s)=>i.Effect.gen(function*(){return yield*h((yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,s,{dataStoreService:o,kvStore:t,eventEmitter:r})}),uploadFromUrl:(e,n,s)=>i.Effect.gen(function*(){let i=yield*v(yield*_(s)),c=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*h((yield*u({...e,size:i.byteLength},n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,c,{dataStoreService:o,kvStore:t,eventEmitter:r})}),createUpload:(e,n)=>i.Effect.gen(function*(){return yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})}),uploadChunk:(e,n,a)=>i.Effect.gen(function*(){return yield*h(e,n,a,{dataStoreService:o,kvStore:t,eventEmitter:r})}),getUpload:e=>i.Effect.gen(function*(){return yield*t.get(e)}),read:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);return yield*(yield*o.getDataStore(r.storage.id,n)).read(e)}),readStream:(e,n,r)=>i.Effect.gen(function*(){let a=yield*t.get(e),s=yield*o.getDataStore(a.storage.id,n);if(s.getCapabilities().supportsStreamingRead&&s.readStream)return yield*i.Effect.logDebug(`Using streaming read for file ${e}`),yield*s.readStream(e,r);yield*i.Effect.logDebug(`Falling back to buffered read for file ${e} (streaming not supported)`);let c=yield*s.read(e);return i.Stream.succeed(c)}),uploadStream:(n,s,c)=>i.Effect.gen(function*(){let l=yield*o.getDataStore(n.storageId,s),d=l.getCapabilities(),f=yield*a.generateId();if(d.supportsStreamingWrite&&l.writeStream){yield*i.Effect.logDebug(`Using streaming write for file ${f}`);let a=typeof n.metadata==`string`?JSON.parse(n.metadata):n.metadata||{},o=Object.fromEntries(Object.entries(a).map(([e,t])=>[e,String(t)])),s={id:f,offset:0,size:n.size??0,storage:{id:n.storageId,type:l.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:a,creationDate:new Date().toISOString()};yield*t.set(f,s),yield*r.emit(f,{type:e.n.UPLOAD_STARTED,data:s});let u=yield*l.writeStream(f,{stream:c,contentType:n.type,sizeHint:n.sizeHint,metadata:o}),d={...s,size:u.size,offset:u.size,storage:{...s.storage,path:u.path},...u.url&&{url:u.url}};return yield*t.set(f,d),yield*r.emit(f,{type:e.n.UPLOAD_COMPLETE,data:d}),d}yield*i.Effect.logWarning(`Falling back to buffered upload for file ${f} (streaming write not supported)`);let p=[];yield*i.Stream.runForEach(c,e=>i.Effect.sync(()=>{p.push(e)}));let m=p.reduce((e,t)=>e+t.length,0),g=new Uint8Array(m),_=0;for(let e of p)g.set(e,_),_+=e.length;let v=new ReadableStream({start(e){e.enqueue(g),e.close()}});return yield*u({...n,size:m},s,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:{generateId:()=>i.Effect.succeed(f)}}),yield*h(f,s,v,{dataStoreService:o,kvStore:t,eventEmitter:r})}),delete:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);yield*(yield*o.getDataStore(r.storage.id,n)).remove(e),yield*t.delete(e)}),getCapabilities:(e,t)=>i.Effect.gen(function*(){return(yield*o.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>i.Effect.gen(function*(){yield*r.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>i.Effect.gen(function*(){yield*r.unsubscribe(e)})}})}const x=i.Layer.effect(y,b());var S=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return s}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return S}});
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@uploadista/core",
3
- "version": "0.0.20-beta.7",
3
+ "version": "0.0.20-beta.8",
4
4
  "description": "Core package of Uploadista",
5
5
  "license": "MIT",
6
6
  "author": "Uploadista",
@@ -71,20 +71,20 @@
71
71
  },
72
72
  "devDependencies": {
73
73
  "@effect/vitest": "0.27.0",
74
- "@types/node": "24.10.3",
75
- "effect": "3.19.11",
74
+ "@types/node": "24.10.4",
75
+ "effect": "3.19.12",
76
76
  "tsd": "0.33.0",
77
- "tsdown": "0.17.2",
77
+ "tsdown": "0.18.0",
78
78
  "vitest": "4.0.15",
79
- "zod": "4.1.13",
80
- "@uploadista/typescript-config": "0.0.20-beta.7"
79
+ "zod": "4.2.0",
80
+ "@uploadista/typescript-config": "0.0.20-beta.8"
81
81
  },
82
82
  "publishConfig": {
83
83
  "access": "public",
84
84
  "registry": "https://registry.npmjs.org/"
85
85
  },
86
86
  "scripts": {
87
- "build": "tsdown",
87
+ "build": "tsc --noEmit && tsdown",
88
88
  "dev": "tsc -b",
89
89
  "release": "tsdown && bumpp && pnpm publish --access restricted --no-git-checks",
90
90
  "format": "biome format --write ./src",
@@ -53,7 +53,7 @@ export class FlowWaitUntil extends Context.Tag("FlowWaitUntil")<
53
53
  }
54
54
 
55
55
  import { FlowEventEmitter, FlowJobKVStore } from "../types";
56
- import { UploadServer } from "../upload";
56
+ import { UploadEngine } from "../upload";
57
57
  import { DeadLetterQueueService } from "./dead-letter-queue";
58
58
  import type { FlowEvent } from "./event";
59
59
  import type { FlowJob } from "./types/flow-job";
@@ -61,7 +61,7 @@ import type { FlowJob } from "./types/flow-job";
61
61
  /**
62
62
  * Flow provider interface that applications must implement.
63
63
  *
64
- * This interface defines how the FlowServer retrieves flow definitions.
64
+ * This interface defines how the FlowEngine retrieves flow definitions.
65
65
  * Applications provide their own implementation to load flows from a database,
66
66
  * configuration files, or any other source.
67
67
  *
@@ -89,7 +89,7 @@ import type { FlowJob } from "./types/flow-job";
89
89
  * })
90
90
  * };
91
91
  *
92
- * // Provide to FlowServer
92
+ * // Provide to FlowEngine
93
93
  * const flowProviderLayer = Layer.succeed(FlowProvider, dbFlowProvider);
94
94
  * ```
95
95
  */
@@ -104,7 +104,7 @@ export type FlowProviderShape<TRequirements = any> = {
104
104
  * Effect-TS context tag for the FlowProvider service.
105
105
  *
106
106
  * Applications must provide an implementation of FlowProviderShape
107
- * to enable the FlowServer to retrieve flow definitions.
107
+ * to enable the FlowEngine to retrieve flow definitions.
108
108
  *
109
109
  * @example
110
110
  * ```typescript
@@ -144,7 +144,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
144
144
  * ```typescript
145
145
  * // Execute a flow
146
146
  * const program = Effect.gen(function* () {
147
- * const server = yield* FlowServer;
147
+ * const server = yield* FlowEngine;
148
148
  *
149
149
  * // Start flow execution (returns immediately)
150
150
  * const job = yield* server.runFlow({
@@ -171,7 +171,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
171
171
  *
172
172
  * // Resume a paused flow
173
173
  * const resume = Effect.gen(function* () {
174
- * const server = yield* FlowServer;
174
+ * const server = yield* FlowEngine;
175
175
  *
176
176
  * // Flow paused waiting for user input at node "approval_1"
177
177
  * const job = yield* server.resumeFlow({
@@ -186,7 +186,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
186
186
  *
187
187
  * // Cancel a flow
188
188
  * const cancel = Effect.gen(function* () {
189
- * const server = yield* FlowServer;
189
+ * const server = yield* FlowEngine;
190
190
  *
191
191
  * // Cancel flow and cleanup intermediate files
192
192
  * const job = yield* server.cancelFlow("job123", "client123");
@@ -196,7 +196,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
196
196
  *
197
197
  * // Check flow structure before execution
198
198
  * const inspect = Effect.gen(function* () {
199
- * const server = yield* FlowServer;
199
+ * const server = yield* FlowEngine;
200
200
  *
201
201
  * const flowData = yield* server.getFlowData("resize-optimize", "client123");
202
202
  * console.log("Nodes:", flowData.nodes);
@@ -206,7 +206,7 @@ export class FlowProvider extends Context.Tag("FlowProvider")<
206
206
  * });
207
207
  * ```
208
208
  */
209
- export type FlowServerShape = {
209
+ export type FlowEngineShape = {
210
210
  getFlow: <TRequirements>(
211
211
  flowId: string,
212
212
  clientId: string | null,
@@ -264,16 +264,16 @@ export type FlowServerShape = {
264
264
  };
265
265
 
266
266
  /**
267
- * Effect-TS context tag for the FlowServer service.
267
+ * Effect-TS context tag for the FlowEngine service.
268
268
  *
269
- * Use this tag to access the FlowServer in an Effect context.
269
+ * Use this tag to access the FlowEngine in an Effect context.
270
270
  * The server must be provided via a Layer or dependency injection.
271
271
  *
272
272
  * @example
273
273
  * ```typescript
274
- * // Access FlowServer in an Effect
274
+ * // Access FlowEngine in an Effect
275
275
  * const flowEffect = Effect.gen(function* () {
276
- * const server = yield* FlowServer;
276
+ * const server = yield* FlowEngine;
277
277
  * const job = yield* server.runFlow({
278
278
  * flowId: "my-flow",
279
279
  * storageId: "s3",
@@ -283,7 +283,7 @@ export type FlowServerShape = {
283
283
  * return job;
284
284
  * });
285
285
  *
286
- * // Provide FlowServer layer
286
+ * // Provide FlowEngine layer
287
287
  * const program = flowEffect.pipe(
288
288
  * Effect.provide(flowServer),
289
289
  * Effect.provide(flowProviderLayer),
@@ -291,13 +291,13 @@ export type FlowServerShape = {
291
291
  * );
292
292
  * ```
293
293
  */
294
- export class FlowServer extends Context.Tag("FlowServer")<
295
- FlowServer,
296
- FlowServerShape
294
+ export class FlowEngine extends Context.Tag("FlowEngine")<
295
+ FlowEngine,
296
+ FlowEngineShape
297
297
  >() {}
298
298
 
299
299
  /**
300
- * Legacy configuration options for FlowServer.
300
+ * Legacy configuration options for FlowEngine.
301
301
  *
302
302
  * @deprecated Use Effect Layers and FlowProvider instead.
303
303
  * This type is kept for backward compatibility.
@@ -305,7 +305,7 @@ export class FlowServer extends Context.Tag("FlowServer")<
305
305
  * @property getFlow - Function to retrieve flow definitions
306
306
  * @property kvStore - KV store for flow job metadata
307
307
  */
308
- export type FlowServerOptions = {
308
+ export type FlowEngineOptions = {
309
309
  getFlow: <TRequirements>({
310
310
  flowId,
311
311
  storageId,
@@ -706,12 +706,12 @@ function withFlowEvents<
706
706
  }
707
707
 
708
708
  // Core FlowServer implementation
709
- export function createFlowServer() {
709
+ export function createFlowEngine() {
710
710
  return Effect.gen(function* () {
711
711
  const flowProvider = yield* FlowProvider;
712
712
  const eventEmitter = yield* FlowEventEmitter;
713
713
  const kvStore = yield* FlowJobKVStore;
714
- const uploadServer = yield* UploadServer;
714
+ const uploadEngine = yield* UploadEngine;
715
715
  const dlqOption = yield* DeadLetterQueueService.optional;
716
716
 
717
717
  const updateJob = (jobId: string, updates: Partial<FlowJob>) =>
@@ -747,7 +747,7 @@ export function createFlowServer() {
747
747
  yield* Effect.all(
748
748
  job.intermediateFiles.map((fileId) =>
749
749
  Effect.gen(function* () {
750
- yield* uploadServer.delete(fileId, clientId);
750
+ yield* uploadEngine.delete(fileId, clientId);
751
751
  yield* Effect.logDebug(`Deleted intermediate file ${fileId}`);
752
752
  }).pipe(
753
753
  Effect.catchAll((error) =>
@@ -770,10 +770,7 @@ export function createFlowServer() {
770
770
  });
771
771
 
772
772
  // Helper function to add failed job to Dead Letter Queue
773
- const addToDeadLetterQueue = (
774
- jobId: string,
775
- error: UploadistaError,
776
- ) =>
773
+ const addToDeadLetterQueue = (jobId: string, error: UploadistaError) =>
777
774
  Effect.gen(function* () {
778
775
  if (Option.isNone(dlqOption)) {
779
776
  // DLQ not configured, skip
@@ -1307,88 +1304,91 @@ export function createFlowServer() {
1307
1304
  }),
1308
1305
  );
1309
1306
 
1310
- const resumeFlowInBackgroundWithErrorHandling = resumeFlowInBackground.pipe(
1311
- Effect.catchAll((error) =>
1312
- Effect.gen(function* () {
1313
- yield* Effect.logError("Flow resume failed", error);
1314
-
1315
- // Convert error to a proper message
1316
- const errorMessage =
1317
- error instanceof UploadistaError ? error.body : String(error);
1307
+ const resumeFlowInBackgroundWithErrorHandling =
1308
+ resumeFlowInBackground.pipe(
1309
+ Effect.catchAll((error) =>
1310
+ Effect.gen(function* () {
1311
+ yield* Effect.logError("Flow resume failed", error);
1318
1312
 
1319
- yield* Effect.logInfo(
1320
- `Updating job ${jobId} to failed status with error: ${errorMessage}`,
1321
- );
1313
+ // Convert error to a proper message
1314
+ const errorMessage =
1315
+ error instanceof UploadistaError
1316
+ ? error.body
1317
+ : String(error);
1322
1318
 
1323
- // Update job as failed - do this FIRST before cleanup
1324
- yield* updateJob(jobId, {
1325
- status: "failed",
1326
- error: errorMessage,
1327
- updatedAt: new Date(),
1328
- }).pipe(
1329
- Effect.catchAll((updateError) =>
1330
- Effect.gen(function* () {
1331
- yield* Effect.logError(
1332
- `Failed to update job ${jobId}`,
1333
- updateError,
1334
- );
1335
- return Effect.succeed(undefined);
1336
- }),
1337
- ),
1338
- );
1319
+ yield* Effect.logInfo(
1320
+ `Updating job ${jobId} to failed status with error: ${errorMessage}`,
1321
+ );
1339
1322
 
1340
- // Emit FlowError event to notify client
1341
- const currentJob = yield* kvStore.get(jobId);
1342
- if (currentJob) {
1343
- yield* eventEmitter
1344
- .emit(jobId, {
1345
- jobId,
1346
- eventType: EventType.FlowError,
1347
- flowId: currentJob.flowId,
1348
- error: errorMessage,
1349
- })
1350
- .pipe(
1351
- Effect.catchAll((emitError) =>
1352
- Effect.gen(function* () {
1353
- yield* Effect.logError(
1354
- `Failed to emit FlowError event for job ${jobId}`,
1355
- emitError,
1356
- );
1357
- return Effect.succeed(undefined);
1358
- }),
1359
- ),
1360
- );
1361
- }
1323
+ // Update job as failed - do this FIRST before cleanup
1324
+ yield* updateJob(jobId, {
1325
+ status: "failed",
1326
+ error: errorMessage,
1327
+ updatedAt: new Date(),
1328
+ }).pipe(
1329
+ Effect.catchAll((updateError) =>
1330
+ Effect.gen(function* () {
1331
+ yield* Effect.logError(
1332
+ `Failed to update job ${jobId}`,
1333
+ updateError,
1334
+ );
1335
+ return Effect.succeed(undefined);
1336
+ }),
1337
+ ),
1338
+ );
1362
1339
 
1363
- // Cleanup intermediate files even on failure (don't let this fail the error handling)
1364
- yield* cleanupIntermediateFiles(jobId, clientId).pipe(
1365
- Effect.catchAll((cleanupError) =>
1366
- Effect.gen(function* () {
1367
- yield* Effect.logWarning(
1368
- `Failed to cleanup intermediate files for job ${jobId}`,
1369
- cleanupError,
1340
+ // Emit FlowError event to notify client
1341
+ const currentJob = yield* kvStore.get(jobId);
1342
+ if (currentJob) {
1343
+ yield* eventEmitter
1344
+ .emit(jobId, {
1345
+ jobId,
1346
+ eventType: EventType.FlowError,
1347
+ flowId: currentJob.flowId,
1348
+ error: errorMessage,
1349
+ })
1350
+ .pipe(
1351
+ Effect.catchAll((emitError) =>
1352
+ Effect.gen(function* () {
1353
+ yield* Effect.logError(
1354
+ `Failed to emit FlowError event for job ${jobId}`,
1355
+ emitError,
1356
+ );
1357
+ return Effect.succeed(undefined);
1358
+ }),
1359
+ ),
1370
1360
  );
1371
- return Effect.succeed(undefined);
1372
- }),
1373
- ),
1374
- );
1361
+ }
1375
1362
 
1376
- // Add failed job to Dead Letter Queue for retry/debugging
1377
- const uploadistaError =
1378
- error instanceof UploadistaError
1379
- ? error
1380
- : new UploadistaError({
1381
- code: "UNKNOWN_ERROR",
1382
- status: 500,
1383
- body: String(error),
1384
- cause: error,
1385
- });
1386
- yield* addToDeadLetterQueue(jobId, uploadistaError);
1387
-
1388
- throw error;
1389
- }),
1390
- ),
1391
- );
1363
+ // Cleanup intermediate files even on failure (don't let this fail the error handling)
1364
+ yield* cleanupIntermediateFiles(jobId, clientId).pipe(
1365
+ Effect.catchAll((cleanupError) =>
1366
+ Effect.gen(function* () {
1367
+ yield* Effect.logWarning(
1368
+ `Failed to cleanup intermediate files for job ${jobId}`,
1369
+ cleanupError,
1370
+ );
1371
+ return Effect.succeed(undefined);
1372
+ }),
1373
+ ),
1374
+ );
1375
+
1376
+ // Add failed job to Dead Letter Queue for retry/debugging
1377
+ const uploadistaError =
1378
+ error instanceof UploadistaError
1379
+ ? error
1380
+ : new UploadistaError({
1381
+ code: "UNKNOWN_ERROR",
1382
+ status: 500,
1383
+ body: String(error),
1384
+ cause: error,
1385
+ });
1386
+ yield* addToDeadLetterQueue(jobId, uploadistaError);
1387
+
1388
+ throw error;
1389
+ }),
1390
+ ),
1391
+ );
1392
1392
 
1393
1393
  // Fork the resume execution to run in background
1394
1394
  // Use waitUntil if available (Cloudflare Workers), otherwise fork normally
@@ -1564,10 +1564,10 @@ export function createFlowServer() {
1564
1564
  Effect.gen(function* () {
1565
1565
  yield* eventEmitter.unsubscribe(jobId);
1566
1566
  }),
1567
- } satisfies FlowServerShape;
1567
+ } satisfies FlowEngineShape;
1568
1568
  });
1569
1569
  }
1570
1570
 
1571
- // Export the FlowServer layer with job store dependency
1572
- export const flowServer = Layer.effect(FlowServer, createFlowServer());
1573
- export type FlowServerLayer = typeof flowServer;
1571
+ // Export the FlowEngine layer with job store dependency
1572
+ export const flowEngine = Layer.effect(FlowEngine, createFlowEngine());
1573
+ export type FlowEngineLayer = typeof flowEngine;
package/src/flow/index.ts CHANGED
@@ -15,23 +15,24 @@ export * from "./output-type-registry";
15
15
  // Built-in node types (auto-registers on import)
16
16
  import "./node-types";
17
17
 
18
+ // Re-export streaming config from types for transform node usage
19
+ export type { StreamingConfig } from "../types/data-store";
20
+ export { DEFAULT_STREAMING_CONFIG } from "../types/data-store";
21
+ export * from "./dead-letter-queue";
18
22
  export * from "./flow";
19
23
  // Core flow engine
20
24
  export { createFlowWithSchema } from "./flow";
21
- export * from "./flow-server";
25
+ export * from "./flow-engine";
22
26
  export * from "./node";
23
27
  // Node types and interfaces
24
28
  export { createFlowNode, NodeType } from "./node";
25
29
  export * from "./node-types";
26
30
  export * from "./nodes";
27
- // Re-export streaming config from types for transform node usage
28
- export type { StreamingConfig } from "../types/data-store";
29
- export { DEFAULT_STREAMING_CONFIG } from "../types/data-store";
30
31
  // Re-export transform node types
31
32
  export type {
32
- TransformMode,
33
33
  StreamingTransformFn,
34
34
  StreamingTransformResult,
35
+ TransformMode,
35
36
  TransformNodeConfig,
36
37
  } from "./nodes/transform-node";
37
38
  // Parallel execution
@@ -50,15 +51,14 @@ export * from "./plugins/zip-plugin";
50
51
  export * from "./type-guards";
51
52
  export * from "./typed-flow";
52
53
  export { createFlow } from "./typed-flow";
54
+ // Dead Letter Queue types and service
55
+ export * from "./types/dead-letter-item";
53
56
  export * from "./types/flow-file";
54
57
  export * from "./types/flow-job";
55
58
  export * from "./types/flow-types";
56
- export * from "./types/run-args";
57
- // Dead Letter Queue types and service
58
- export * from "./types/dead-letter-item";
59
59
  export * from "./types/retry-policy";
60
- export * from "./dead-letter-queue";
60
+ export * from "./types/run-args";
61
61
  export * from "./types/type-utils";
62
- export * from "./utils/resolve-upload-metadata";
63
62
  // File naming utilities
64
63
  export * from "./utils/file-naming";
64
+ export * from "./utils/resolve-upload-metadata";
@@ -3,7 +3,7 @@ import { z } from "zod";
3
3
  import { UploadistaError } from "../../errors";
4
4
  import type { InputFile } from "../../types";
5
5
  import { uploadFileSchema } from "../../types";
6
- import { UploadServer } from "../../upload";
6
+ import { UploadEngine } from "../../upload";
7
7
  import { arrayBuffer, fetchFile } from "../../upload/upload-url";
8
8
  import { createFlowNode, NodeType } from "../node";
9
9
  import { STORAGE_OUTPUT_TYPE_ID, STREAMING_INPUT_TYPE_ID } from "../node-types";
@@ -174,7 +174,7 @@ export function createInputNode(
174
174
  ) {
175
175
  const keepOutput = options?.keepOutput ?? false;
176
176
  return Effect.gen(function* () {
177
- const uploadServer = yield* UploadServer;
177
+ const uploadEngine = yield* UploadEngine;
178
178
  return yield* createFlowNode({
179
179
  id,
180
180
  name: "Input",
@@ -208,7 +208,7 @@ export function createInputNode(
208
208
  },
209
209
  };
210
210
 
211
- const uploadFile = yield* uploadServer.createUpload(
211
+ const uploadFile = yield* uploadEngine.createUpload(
212
212
  inputFile,
213
213
  clientId,
214
214
  );
@@ -220,7 +220,7 @@ export function createInputNode(
220
220
 
221
221
  case "finalize": {
222
222
  // Get final upload file from upload server's KV store
223
- const finalUploadFile = yield* uploadServer.getUpload(
223
+ const finalUploadFile = yield* uploadEngine.getUpload(
224
224
  data.uploadId,
225
225
  );
226
226
 
@@ -277,7 +277,7 @@ export function createInputNode(
277
277
  : undefined,
278
278
  };
279
279
 
280
- const uploadFile = yield* uploadServer.upload(
280
+ const uploadFile = yield* uploadEngine.upload(
281
281
  inputFile,
282
282
  clientId,
283
283
  stream,