@uploadista/core 0.0.20 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{checksum-DVPe3Db4.cjs → checksum-CTpNXWEL.cjs} +1 -1
- package/dist/errors/index.cjs +1 -1
- package/dist/errors/index.d.mts +2 -2
- package/dist/flow/index.cjs +1 -1
- package/dist/flow/index.d.cts +2 -2
- package/dist/flow/index.d.mts +6 -6
- package/dist/flow/index.mjs +1 -1
- package/dist/flow-CA8xO6wP.mjs +2 -0
- package/dist/flow-CA8xO6wP.mjs.map +1 -0
- package/dist/flow-DKJaCPxL.cjs +1 -0
- package/dist/index-9gyMMEIB.d.cts.map +1 -1
- package/dist/{index-RuQUCROH.d.mts → index-BKY0VjsL.d.mts} +230 -169
- package/dist/index-BKY0VjsL.d.mts.map +1 -0
- package/dist/{index-B9V5SSxl.d.mts → index-D8MZ6P3o.d.mts} +2 -2
- package/dist/{index-B9V5SSxl.d.mts.map → index-D8MZ6P3o.d.mts.map} +1 -1
- package/dist/{index-BFSHumky.d.mts → index-DQuMQssI.d.mts} +2 -2
- package/dist/{index-BFSHumky.d.mts.map → index-DQuMQssI.d.mts.map} +1 -1
- package/dist/{index-DMqaf28W.d.cts → index-j_n72QK0.d.cts} +228 -167
- package/dist/index-j_n72QK0.d.cts.map +1 -0
- package/dist/index.cjs +1 -1
- package/dist/index.d.cts +2 -2
- package/dist/index.d.mts +6 -6
- package/dist/index.mjs +1 -1
- package/dist/{stream-limiter-BvkaZXcz.cjs → stream-limiter-DH0vv46_.cjs} +1 -1
- package/dist/streams/index.cjs +1 -1
- package/dist/streams/index.d.mts +2 -2
- package/dist/streams/index.mjs +1 -1
- package/dist/testing/index.cjs +2 -2
- package/dist/testing/index.d.cts +1 -1
- package/dist/testing/index.d.cts.map +1 -1
- package/dist/testing/index.d.mts +5 -5
- package/dist/testing/index.d.mts.map +1 -1
- package/dist/testing/index.mjs +2 -2
- package/dist/testing/index.mjs.map +1 -1
- package/dist/types/index.cjs +1 -1
- package/dist/types/index.d.cts +2 -2
- package/dist/types/index.d.mts +6 -6
- package/dist/types/index.mjs +1 -1
- package/dist/types-BF_tvkRh.cjs +1 -0
- package/dist/types-BRnwrJDg.mjs +2 -0
- package/dist/types-BRnwrJDg.mjs.map +1 -0
- package/dist/upload/index.cjs +1 -1
- package/dist/upload/index.d.cts +1 -1
- package/dist/upload/index.d.mts +5 -5
- package/dist/upload/index.mjs +1 -1
- package/dist/upload-CLHJ1SFS.cjs +1 -0
- package/dist/upload-CpsShjP3.mjs +2 -0
- package/dist/upload-CpsShjP3.mjs.map +1 -0
- package/dist/{uploadista-error-DR0XimpE.d.mts → uploadista-error-B1qbOy9N.d.mts} +1 -1
- package/dist/{uploadista-error-DR0XimpE.d.mts.map → uploadista-error-B1qbOy9N.d.mts.map} +1 -1
- package/dist/{uploadista-error-BgQU45we.cjs → uploadista-error-CLWoRAAr.cjs} +1 -1
- package/dist/uploadista-error-CkSxSyNo.mjs.map +1 -1
- package/dist/utils/index.cjs +1 -1
- package/dist/utils/index.d.mts +2 -2
- package/dist/utils/index.mjs +1 -1
- package/dist/{utils-UUJt8ILJ.cjs → utils-CvZJUNEo.cjs} +1 -1
- package/dist/{utils-B-ZhQ6b0.mjs → utils-DVwfrVBJ.mjs} +1 -1
- package/dist/utils-DVwfrVBJ.mjs.map +1 -0
- package/package.json +8 -8
- package/src/flow/circuit-breaker-store.ts +7 -8
- package/src/flow/flow.ts +6 -5
- package/src/flow/nodes/transform-node.ts +15 -1
- package/src/flow/plugins/image-plugin.ts +12 -3
- package/src/flow/plugins/video-plugin.ts +12 -3
- package/src/flow/types/flow-types.ts +75 -6
- package/src/flow/types/retry-policy.ts +5 -2
- package/src/flow/types/type-utils.ts +4 -6
- package/src/flow/utils/file-naming.ts +36 -11
- package/src/testing/mock-upload-engine.ts +18 -1
- package/src/types/circuit-breaker-store.ts +2 -2
- package/src/types/data-store.ts +4 -1
- package/src/types/kv-store.ts +13 -12
- package/src/types/upload-file.ts +29 -4
- package/src/upload/upload-chunk.ts +1 -1
- package/dist/flow-BHVkk_6W.cjs +0 -1
- package/dist/flow-DlhHOlMk.mjs +0 -2
- package/dist/flow-DlhHOlMk.mjs.map +0 -1
- package/dist/index-DMqaf28W.d.cts.map +0 -1
- package/dist/index-RuQUCROH.d.mts.map +0 -1
- package/dist/streams-BiD_pOPH.cjs +0 -0
- package/dist/streams-Cqjxk2rI.mjs +0 -1
- package/dist/types-Cws60JHC.cjs +0 -1
- package/dist/types-DKGQJIEr.mjs +0 -2
- package/dist/types-DKGQJIEr.mjs.map +0 -1
- package/dist/upload-C-C7hn1-.mjs +0 -2
- package/dist/upload-C-C7hn1-.mjs.map +0 -1
- package/dist/upload-DWBlRXHh.cjs +0 -1
- package/dist/utils-B-ZhQ6b0.mjs.map +0 -1
- /package/dist/{index-C-svZlpj.d.mts → index-DWe68pTi.d.mts} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=require(`./types-BF_tvkRh.cjs`),t=require(`./uploadista-error-CLWoRAAr.cjs`),n=require(`./checksum-CTpNXWEL.cjs`),r=require(`./stream-limiter-DH0vv46_.cjs`);let i=require(`effect`);function a(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function o(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const s=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(a(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(a(e,[255,216,255]))return`image/jpeg`;if(o(e,`GIF87a`)||o(e,`GIF89a`))return`image/gif`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&a(e,[0,0,0],0)&&o(e,`ftyp`,4)&&(o(e,`avif`,8)||o(e,`avis`,8)))return`image/avif`;if(e.length>=12&&o(e,`ftyp`,4)&&(o(e,`heic`,8)||o(e,`heif`,8)||o(e,`mif1`,8)))return`image/heic`;if(a(e,[66,77]))return`image/bmp`;if(a(e,[73,73,42,0])||a(e,[77,77,0,42]))return`image/tiff`;if(a(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&o(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(a(e,[26,69,223,163]))return`video/webm`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(o(e,`moov`,4)||o(e,`mdat`,4)||o(e,`free`,4)))return`video/quicktime`;if(a(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(a(e,[255,251])||a(e,[255,243])||a(e,[255,242])||o(e,`ID3`))return`audio/mpeg`;if(a(e,[82,73,70,70])&&e.length>=12&&o(e,`WAVE`,8))return`audio/wav`;if(o(e,`fLaC`))return`audio/flac`;if(o(e,`OggS`))return`audio/ogg`;if(e.length>=12&&o(e,`ftyp`,4)&&o(e,`M4A`,8))return`audio/mp4`;if(o(e,`%PDF`))return`application/pdf`;if(a(e,[80,75,3,4])||a(e,[80,75,5,6])||a(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(a(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(a(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(a(e,[31,139]))return`application/gzip`;if(e.length>=262&&o(e,`ustar`,257))return`application/x-tar`;if(o(e,`wOFF`))return`font/woff`;if(o(e,`wOF2`))return`font/woff2`;if(a(e,[0,1,0,0,0]))return`font/ttf`;if(o(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function c(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const l=i.Effect.gen(function*(){let e=yield*i.Effect.currentSpan.pipe(i.Effect.option);return i.Option.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),u=(t,n,{dataStoreService:r,kvStore:a,eventEmitter:o,generateId:s})=>i.Effect.gen(function*(){let c=yield*l,u=new Date().toISOString();return yield*i.Effect.gen(function*(){let i=yield*r.getDataStore(t.storageId,n),l=yield*s.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=t,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:l,size:d,metadata:v,offset:0,creationDate:u,storage:{id:t.storageId,type:f,path:``,bucket:i.bucket},flow:g,traceContext:c},b=yield*i.create(y);return yield*a.set(l,b),yield*o.emit(l,{type:e.n.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(i.Effect.withSpan(`upload-create`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}))}).pipe(i.Effect.withSpan(`upload`,{attributes:{"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId,"upload.mime_type":t.type,"upload.has_flow":t.flow?`true`:`false`}}),i.Effect.tap(e=>i.Effect.gen(function*(){if(yield*i.Metric.increment(i.Metric.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=i.Metric.histogram(`upload_file_size_bytes`,i.MetricBoundaries.exponential({start:1024,factor:2,count:25}));yield*i.Metric.update(t,e.size)}let t=i.Metric.gauge(`active_uploads`);yield*i.Metric.increment(t)})),i.Effect.tap(e=>i.Effect.logInfo(`Upload created`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"upload.file_name":t.fileName??`unknown`,"upload.file_size":t.size?.toString()??`0`,"upload.storage_id":t.storageId}))),i.Effect.tapError(e=>i.Effect.gen(function*(){yield*i.Effect.logError(`Upload creation failed`).pipe(i.Effect.annotateLogs({"upload.file_name":t.fileName??`unknown`,"upload.storage_id":t.storageId,error:String(e)})),yield*i.Metric.increment(i.Metric.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function d(e){return i.Stream.fromReadableStream(()=>e,e=>new t.n({code:`UNKNOWN_ERROR`,status:500,body:String(e)}))}function f({data:n,upload:a,dataStore:o,maxFileSize:s,controller:c,eventEmitter:l,uploadProgressInterval:u=200}){return i.Effect.gen(function*(){let f=d(n);if(c.signal.aborted)return yield*i.Effect.fail(t.n.fromCode(`ABORTED`));let p=new AbortController,{signal:m}=p,h=()=>{p.abort()};return c.signal.addEventListener(`abort`,h,{once:!0}),yield*i.Effect.acquireUseRelease(i.Effect.sync(()=>({signal:m,onAbort:h})),({signal:n})=>i.Effect.gen(function*(){let t=yield*i.Ref.make(0),n=r.t.limit({maxSize:s})(f);return yield*o.write({stream:n,file_id:a.id,offset:a.offset},{onProgress:n=>{let r=Date.now();i.Ref.get(t).pipe(i.Effect.flatMap(o=>r-o>=u?i.Effect.gen(function*(){yield*i.Ref.set(t,r),yield*l.emit(a.id,{type:e.n.UPLOAD_PROGRESS,data:{id:a.id,progress:n,total:a.size??0},flow:a.flow})}):i.Effect.void),i.Effect.runPromise).catch(()=>{})}})}).pipe(i.Effect.catchAll(e=>e instanceof Error&&e.name===`AbortError`?i.Effect.fail(t.n.fromCode(`ABORTED`)):e instanceof t.n?i.Effect.fail(e):i.Effect.fail(t.n.fromCode(`FILE_WRITE_ERROR`,{cause:e})))),({onAbort:e})=>i.Effect.sync(()=>{c.signal.removeEventListener(`abort`,e)}))}).pipe(i.Effect.withSpan(`upload-write-to-store`,{attributes:{"upload.id":a.id,"upload.offset":a.offset.toString(),"upload.max_file_size":s.toString(),"upload.file_size":a.size?.toString()??`0`}}),i.Effect.tap(e=>i.Effect.logDebug(`Data written to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"write.offset":e.toString(),"write.bytes_written":(e-a.offset).toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to write to store`).pipe(i.Effect.annotateLogs({"upload.id":a.id,"upload.offset":a.offset.toString(),error:e instanceof t.n?e.code:String(e)}))))}function p(e){return i.Tracer.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const m=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return i.Effect.void.pipe(i.Effect.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName?.toString()??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},h=(t,n,r,{dataStoreService:a,kvStore:o,eventEmitter:s})=>i.Effect.gen(function*(){let c=yield*o.get(t),l=c.traceContext?p(c.traceContext):void 0;return yield*i.Effect.gen(function*(){let i=yield*a.getDataStore(c.storage.id,n);return c.offset=yield*f({dataStore:i,data:r,upload:c,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:s}),yield*o.set(t,c),yield*s.emit(c.id,{type:e.n.UPLOAD_PROGRESS,data:{id:c.id,progress:c.offset,total:c.size??0},flow:c.flow}),c.size&&c.offset===c.size&&(yield*g({file:c,dataStore:i,eventEmitter:s}),c.traceContext&&(yield*m(c,p(c.traceContext)))),c}).pipe(i.Effect.withSpan(`upload-chunk`,{attributes:{"upload.id":t,"chunk.upload_id":t,"upload.has_trace_context":c.traceContext?`true`:`false`},parent:l}))}).pipe(i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=i.Metric.histogram(`chunk_size_bytes`,i.MetricBoundaries.linear({start:262144,width:262144,count:20}));if(yield*i.Metric.update(n,t),e.size&&e.size>0){let e=t,n=i.Metric.gauge(`upload_throughput_bytes_per_second`);yield*i.Metric.set(n,e)}})),i.Effect.tap(e=>i.Effect.logDebug(`Chunk uploaded`).pipe(i.Effect.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),i.Effect.tapError(e=>i.Effect.logError(`Chunk upload failed`).pipe(i.Effect.annotateLogs({"upload.id":t,error:String(e)})))),g=({file:r,dataStore:a,eventEmitter:o})=>i.Effect.gen(function*(){let i=a.getCapabilities();if(i.maxValidationSize&&r.size&&r.size>i.maxValidationSize){yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_WARNING,data:{id:r.id,message:`File size (${r.size} bytes) exceeds max validation size (${i.maxValidationSize} bytes). Validation skipped.`},flow:r.flow});return}let l=yield*a.read(r.id);if(r.checksum&&r.checksumAlgorithm){let i=yield*n.t(l,r.checksumAlgorithm);if(i!==r.checksum)return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`checksum_mismatch`,expected:r.checksum,actual:i},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${r.checksum}, Got: ${i}`,details:{uploadId:r.id,expected:r.checksum,actual:i,algorithm:r.checksumAlgorithm}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`checksum`,algorithm:r.checksumAlgorithm},flow:r.flow})}if(i.requiresMimeTypeValidation){let n=s(l),i=r.metadata?.type;if(i&&!c(i,n))return yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_FAILED,data:{id:r.id,reason:`mimetype_mismatch`,expected:i,actual:n},flow:r.flow}),yield*a.remove(r.id),yield*t.n.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${i}, Detected: ${n}`,details:{uploadId:r.id,expected:i,actual:n}}).toEffect();yield*o.emit(r.id,{type:e.n.UPLOAD_VALIDATION_SUCCESS,data:{id:r.id,validationType:`mimetype`},flow:r.flow})}}).pipe(i.Effect.withSpan(`validate-upload`,{attributes:{"upload.id":r.id,"validation.checksum_provided":r.checksum?`true`:`false`,"validation.mime_required":a.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),_=e=>i.Effect.tryPromise({try:async()=>await fetch(e),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-fetch-url`,{attributes:{"upload.url":e,"upload.operation":`fetch`}}),i.Effect.tap(e=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*i.Metric.increment(i.Metric.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),i.Effect.tap(t=>i.Effect.logInfo(`URL fetch completed`).pipe(i.Effect.annotateLogs({"upload.url":e,"response.status":t.status.toString(),"response.ok":t.ok.toString(),"response.content_length":t.headers.get(`content-length`)??`unknown`}))),i.Effect.tapError(t=>i.Effect.gen(function*(){yield*i.Metric.increment(i.Metric.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*i.Effect.logError(`URL fetch failed`).pipe(i.Effect.annotateLogs({"upload.url":e,error:String(t)}))}))),v=e=>i.Effect.tryPromise({try:async()=>await e.arrayBuffer(),catch:e=>t.n.fromCode(`UNKNOWN_ERROR`,{cause:e})}).pipe(i.Effect.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),i.Effect.tap(e=>i.Effect.logDebug(`Response converted to array buffer`).pipe(i.Effect.annotateLogs({"buffer.size":e.byteLength.toString()}))),i.Effect.tapError(e=>i.Effect.logError(`Failed to convert response to array buffer`).pipe(i.Effect.annotateLogs({error:String(e)}))));var y=class extends i.Context.Tag(`UploadEngine`)(){};function b(){return i.Effect.gen(function*(){let t=yield*e.j,r=yield*e.p,a=yield*n.r,o=yield*e.S;return{upload:(e,n,s)=>i.Effect.gen(function*(){return yield*h((yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,s,{dataStoreService:o,kvStore:t,eventEmitter:r})}),uploadFromUrl:(e,n,s)=>i.Effect.gen(function*(){let i=yield*v(yield*_(s)),c=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*h((yield*u({...e,size:i.byteLength},n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})).id,n,c,{dataStoreService:o,kvStore:t,eventEmitter:r})}),createUpload:(e,n)=>i.Effect.gen(function*(){return yield*u(e,n,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:a})}),uploadChunk:(e,n,a)=>i.Effect.gen(function*(){return yield*h(e,n,a,{dataStoreService:o,kvStore:t,eventEmitter:r})}),getUpload:e=>i.Effect.gen(function*(){return yield*t.get(e)}),read:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);return yield*(yield*o.getDataStore(r.storage.id,n)).read(e)}),readStream:(e,n,r)=>i.Effect.gen(function*(){let a=yield*t.get(e),s=yield*o.getDataStore(a.storage.id,n);if(s.getCapabilities().supportsStreamingRead&&s.readStream)return yield*i.Effect.logDebug(`Using streaming read for file ${e}`),yield*s.readStream(e,r);yield*i.Effect.logDebug(`Falling back to buffered read for file ${e} (streaming not supported)`);let c=yield*s.read(e);return i.Stream.succeed(c)}),uploadStream:(n,s,c)=>i.Effect.gen(function*(){let l=yield*o.getDataStore(n.storageId,s),d=l.getCapabilities(),f=yield*a.generateId();if(d.supportsStreamingWrite&&l.writeStream){yield*i.Effect.logDebug(`Using streaming write for file ${f}`);let a=typeof n.metadata==`string`?JSON.parse(n.metadata):n.metadata||{},o=Object.fromEntries(Object.entries(a).map(([e,t])=>[e,String(t)])),s={id:f,offset:0,size:n.size??0,storage:{id:n.storageId,type:l.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:a,creationDate:new Date().toISOString()};yield*t.set(f,s),yield*r.emit(f,{type:e.n.UPLOAD_STARTED,data:s});let u=yield*l.writeStream(f,{stream:c,contentType:n.type,sizeHint:n.sizeHint,metadata:o}),d={...s,size:u.size,offset:u.size,storage:{...s.storage,path:u.path},...u.url&&{url:u.url}};return yield*t.set(f,d),yield*r.emit(f,{type:e.n.UPLOAD_COMPLETE,data:d}),d}yield*i.Effect.logWarning(`Falling back to buffered upload for file ${f} (streaming write not supported)`);let p=[];yield*i.Stream.runForEach(c,e=>i.Effect.sync(()=>{p.push(e)}));let m=p.reduce((e,t)=>e+t.length,0),g=new Uint8Array(m),_=0;for(let e of p)g.set(e,_),_+=e.length;let v=new ReadableStream({start(e){e.enqueue(g),e.close()}});return yield*u({...n,size:m},s,{dataStoreService:o,kvStore:t,eventEmitter:r,generateId:{generateId:()=>i.Effect.succeed(f)}}),yield*h(f,s,v,{dataStoreService:o,kvStore:t,eventEmitter:r})}),delete:(e,n)=>i.Effect.gen(function*(){let r=yield*t.get(e);yield*(yield*o.getDataStore(r.storage.id,n)).remove(e),yield*t.delete(e)}),getCapabilities:(e,t)=>i.Effect.gen(function*(){return(yield*o.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>i.Effect.gen(function*(){yield*r.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>i.Effect.gen(function*(){yield*r.unsubscribe(e)})}})}const x=i.Layer.effect(y,b());var S=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return v}}),Object.defineProperty(exports,`c`,{enumerable:!0,get:function(){return s}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return x}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return y}}),Object.defineProperty(exports,`o`,{enumerable:!0,get:function(){return _}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return b}}),Object.defineProperty(exports,`s`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return S}});
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
import{n as e}from"./uploadista-error-CkSxSyNo.mjs";import{S as t,j as n,n as r,p as i}from"./types-BRnwrJDg.mjs";import{r as a,t as o}from"./checksum-COoD-F1l.mjs";import{t as s}from"./stream-limiter-B9nsn2gb.mjs";import{Context as c,Effect as l,Layer as u,Metric as d,MetricBoundaries as f,Option as p,Ref as m,Stream as h,Tracer as g}from"effect";function _(e,t,n=0){return e.length<n+t.length?!1:t.every((t,r)=>e[n+r]===t)}function v(e,t,n=0){if(e.length<n+t.length)return!1;for(let r=0;r<t.length;r++)if(e[n+r]!==t.charCodeAt(r))return!1;return!0}const y=(e,t)=>{if(e.length===0)return`application/octet-stream`;if(_(e,[137,80,78,71,13,10,26,10]))return`image/png`;if(_(e,[255,216,255]))return`image/jpeg`;if(v(e,`GIF87a`)||v(e,`GIF89a`))return`image/gif`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`WEBP`,8))return`image/webp`;if(e.length>=12&&_(e,[0,0,0],0)&&v(e,`ftyp`,4)&&(v(e,`avif`,8)||v(e,`avis`,8)))return`image/avif`;if(e.length>=12&&v(e,`ftyp`,4)&&(v(e,`heic`,8)||v(e,`heif`,8)||v(e,`mif1`,8)))return`image/heic`;if(_(e,[66,77]))return`image/bmp`;if(_(e,[73,73,42,0])||_(e,[77,77,0,42]))return`image/tiff`;if(_(e,[0,0,1,0]))return`image/x-icon`;if(e.length>=5){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,Math.min(1024,e.length)));if(t.includes(`<svg`)||t.includes(`<?xml`)&&t.includes(`<svg`))return`image/svg+xml`}if(e.length>=12&&v(e,`ftyp`,4)){let t=new TextDecoder().decode(e.slice(8,12));if(t.startsWith(`mp4`)||t.startsWith(`M4`)||t.startsWith(`isom`))return`video/mp4`}if(_(e,[26,69,223,163]))return`video/webm`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`AVI `,8))return`video/x-msvideo`;if(e.length>=8&&(v(e,`moov`,4)||v(e,`mdat`,4)||v(e,`free`,4)))return`video/quicktime`;if(_(e,[26,69,223,163])&&e.length>=100&&new TextDecoder(`utf-8`,{fatal:!1}).decode(e.slice(0,100)).includes(`matroska`))return`video/x-matroska`;if(_(e,[255,251])||_(e,[255,243])||_(e,[255,242])||v(e,`ID3`))return`audio/mpeg`;if(_(e,[82,73,70,70])&&e.length>=12&&v(e,`WAVE`,8))return`audio/wav`;if(v(e,`fLaC`))return`audio/flac`;if(v(e,`OggS`))return`audio/ogg`;if(e.length>=12&&v(e,`ftyp`,4)&&v(e,`M4A`,8))return`audio/mp4`;if(v(e,`%PDF`))return`application/pdf`;if(_(e,[80,75,3,4])||_(e,[80,75,5,6])||_(e,[80,75,7,8])){if(e.length>=1024){let t=new TextDecoder(`utf-8`,{fatal:!1}).decode(e);if(t.includes(`word/`))return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;if(t.includes(`xl/`))return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;if(t.includes(`ppt/`))return`application/vnd.openxmlformats-officedocument.presentationml.presentation`}return`application/zip`}if(_(e,[82,97,114,33,26,7]))return`application/x-rar-compressed`;if(_(e,[55,122,188,175,39,28]))return`application/x-7z-compressed`;if(_(e,[31,139]))return`application/gzip`;if(e.length>=262&&v(e,`ustar`,257))return`application/x-tar`;if(v(e,`wOFF`))return`font/woff`;if(v(e,`wOF2`))return`font/woff2`;if(_(e,[0,1,0,0,0]))return`font/ttf`;if(v(e,`OTTO`))return`font/otf`;if(e.length>=1){let t=e[0];if(t===123||t===91)try{let t=new TextDecoder(`utf-8`).decode(e.slice(0,Math.min(1024,e.length)));return JSON.parse(t.trim()),`application/json`}catch{}}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`avif`:return`image/avif`;case`heic`:case`heif`:return`image/heic`;case`bmp`:return`image/bmp`;case`tiff`:case`tif`:return`image/tiff`;case`ico`:return`image/x-icon`;case`svg`:return`image/svg+xml`;case`mp4`:case`m4v`:return`video/mp4`;case`webm`:return`video/webm`;case`avi`:return`video/x-msvideo`;case`mov`:return`video/quicktime`;case`mkv`:return`video/x-matroska`;case`mp3`:return`audio/mpeg`;case`wav`:return`audio/wav`;case`flac`:return`audio/flac`;case`ogg`:return`audio/ogg`;case`m4a`:return`audio/mp4`;case`pdf`:return`application/pdf`;case`docx`:return`application/vnd.openxmlformats-officedocument.wordprocessingml.document`;case`xlsx`:return`application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`;case`pptx`:return`application/vnd.openxmlformats-officedocument.presentationml.presentation`;case`zip`:return`application/zip`;case`rar`:return`application/x-rar-compressed`;case`7z`:return`application/x-7z-compressed`;case`gz`:case`gzip`:return`application/gzip`;case`tar`:return`application/x-tar`;case`woff`:return`font/woff`;case`woff2`:return`font/woff2`;case`ttf`:return`font/ttf`;case`otf`:return`font/otf`;case`txt`:return`text/plain`;case`json`:return`application/json`;case`xml`:return`application/xml`;case`html`:case`htm`:return`text/html`;case`css`:return`text/css`;case`js`:return`application/javascript`;case`csv`:return`text/csv`;default:return`application/octet-stream`}return`application/octet-stream`};function b(e,t){return e===t?!0:e.split(`/`)[0]===t.split(`/`)[0]}const x=l.gen(function*(){let e=yield*l.currentSpan.pipe(l.option);return p.match(e,{onNone:()=>void 0,onSome:e=>({traceId:e.traceId,spanId:e.spanId,traceFlags:e.sampled?1:0})})}),S=(e,t,{dataStoreService:n,kvStore:i,eventEmitter:a,generateId:o})=>l.gen(function*(){let s=yield*x,c=new Date().toISOString();return yield*l.gen(function*(){let l=yield*n.getDataStore(e.storageId,t),u=yield*o.generateId(),{size:d,type:f,fileName:p,lastModified:m,metadata:h,flow:g}=e,_={};if(h)try{_=JSON.parse(h)}catch{_={}}let v={..._,type:f,fileName:p??``};m&&(v.lastModified=m.toString());let y={id:u,size:d,metadata:v,offset:0,creationDate:c,storage:{id:e.storageId,type:f,path:``,bucket:l.bucket},flow:g,traceContext:s},b=yield*l.create(y);return yield*i.set(u,b),yield*a.emit(u,{type:r.UPLOAD_STARTED,data:b,flow:b.flow}),b}).pipe(l.withSpan(`upload-create`,{attributes:{"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId,"upload.mime_type":e.type,"upload.has_flow":e.flow?`true`:`false`}}))}).pipe(l.withSpan(`upload`,{attributes:{"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId,"upload.mime_type":e.type,"upload.has_flow":e.flow?`true`:`false`}}),l.tap(e=>l.gen(function*(){if(yield*d.increment(d.counter(`upload_created_total`,{description:`Total number of uploads created`})),e.size){let t=d.histogram(`upload_file_size_bytes`,f.exponential({start:1024,factor:2,count:25}));yield*d.update(t,e.size)}let t=d.gauge(`active_uploads`);yield*d.increment(t)})),l.tap(t=>l.logInfo(`Upload created`).pipe(l.annotateLogs({"upload.id":t.id,"upload.file_name":e.fileName??`unknown`,"upload.file_size":e.size?.toString()??`0`,"upload.storage_id":e.storageId}))),l.tapError(t=>l.gen(function*(){yield*l.logError(`Upload creation failed`).pipe(l.annotateLogs({"upload.file_name":e.fileName??`unknown`,"upload.storage_id":e.storageId,error:String(t)})),yield*d.increment(d.counter(`upload_failed_total`,{description:`Total number of uploads that failed`}))})));function C(t){return h.fromReadableStream(()=>t,t=>new e({code:`UNKNOWN_ERROR`,status:500,body:String(t)}))}function w({data:t,upload:n,dataStore:i,maxFileSize:a,controller:o,eventEmitter:c,uploadProgressInterval:u=200}){return l.gen(function*(){let d=C(t);if(o.signal.aborted)return yield*l.fail(e.fromCode(`ABORTED`));let f=new AbortController,{signal:p}=f,h=()=>{f.abort()};return o.signal.addEventListener(`abort`,h,{once:!0}),yield*l.acquireUseRelease(l.sync(()=>({signal:p,onAbort:h})),({signal:t})=>l.gen(function*(){let e=yield*m.make(0),t=s.limit({maxSize:a})(d);return yield*i.write({stream:t,file_id:n.id,offset:n.offset},{onProgress:t=>{let i=Date.now();m.get(e).pipe(l.flatMap(a=>i-a>=u?l.gen(function*(){yield*m.set(e,i),yield*c.emit(n.id,{type:r.UPLOAD_PROGRESS,data:{id:n.id,progress:t,total:n.size??0},flow:n.flow})}):l.void),l.runPromise).catch(()=>{})}})}).pipe(l.catchAll(t=>t instanceof Error&&t.name===`AbortError`?l.fail(e.fromCode(`ABORTED`)):t instanceof e?l.fail(t):l.fail(e.fromCode(`FILE_WRITE_ERROR`,{cause:t})))),({onAbort:e})=>l.sync(()=>{o.signal.removeEventListener(`abort`,e)}))}).pipe(l.withSpan(`upload-write-to-store`,{attributes:{"upload.id":n.id,"upload.offset":n.offset.toString(),"upload.max_file_size":a.toString(),"upload.file_size":n.size?.toString()??`0`}}),l.tap(e=>l.logDebug(`Data written to store`).pipe(l.annotateLogs({"upload.id":n.id,"write.offset":e.toString(),"write.bytes_written":(e-n.offset).toString()}))),l.tapError(t=>l.logError(`Failed to write to store`).pipe(l.annotateLogs({"upload.id":n.id,"upload.offset":n.offset.toString(),error:t instanceof e?t.code:String(t)}))))}function T(e){return g.externalSpan({traceId:e.traceId,spanId:e.spanId,sampled:e.traceFlags===1})}const E=(e,t)=>{let n=new Date(e.creationDate).getTime(),r=Date.now()-n;return l.void.pipe(l.withSpan(`upload-complete`,{attributes:{"upload.id":e.id,"upload.size":e.size??0,"upload.total_duration_ms":r,"upload.storage_id":e.storage.id,"upload.file_name":e.metadata?.fileName?.toString()??`unknown`,"upload.creation_date":e.creationDate,"upload.completion_date":new Date().toISOString()},parent:t}))},D=(e,t,n,{dataStoreService:i,kvStore:a,eventEmitter:o})=>l.gen(function*(){let s=yield*a.get(e),c=s.traceContext?T(s.traceContext):void 0;return yield*l.gen(function*(){let c=yield*i.getDataStore(s.storage.id,t);return s.offset=yield*w({dataStore:c,data:n,upload:s,maxFileSize:1e8,controller:new AbortController,uploadProgressInterval:200,eventEmitter:o}),yield*a.set(e,s),yield*o.emit(s.id,{type:r.UPLOAD_PROGRESS,data:{id:s.id,progress:s.offset,total:s.size??0},flow:s.flow}),s.size&&s.offset===s.size&&(yield*O({file:s,dataStore:c,eventEmitter:o}),s.traceContext&&(yield*E(s,T(s.traceContext)))),s}).pipe(l.withSpan(`upload-chunk`,{attributes:{"upload.id":e,"chunk.upload_id":e,"upload.has_trace_context":s.traceContext?`true`:`false`},parent:c}))}).pipe(l.tap(e=>l.gen(function*(){yield*d.increment(d.counter(`chunk_uploaded_total`,{description:`Total number of chunks uploaded`}));let t=e.offset,n=d.histogram(`chunk_size_bytes`,f.linear({start:262144,width:262144,count:20}));if(yield*d.update(n,t),e.size&&e.size>0){let e=t,n=d.gauge(`upload_throughput_bytes_per_second`);yield*d.set(n,e)}})),l.tap(e=>l.logDebug(`Chunk uploaded`).pipe(l.annotateLogs({"upload.id":e.id,"chunk.size":e.offset.toString(),"chunk.progress":e.size&&e.size>0?(e.offset/e.size*100).toFixed(2):`0`,"upload.total_size":e.size?.toString()??`0`}))),l.tapError(t=>l.logError(`Chunk upload failed`).pipe(l.annotateLogs({"upload.id":e,error:String(t)})))),O=({file:t,dataStore:n,eventEmitter:i})=>l.gen(function*(){let a=n.getCapabilities();if(a.maxValidationSize&&t.size&&t.size>a.maxValidationSize){yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_WARNING,data:{id:t.id,message:`File size (${t.size} bytes) exceeds max validation size (${a.maxValidationSize} bytes). Validation skipped.`},flow:t.flow});return}let s=yield*n.read(t.id);if(t.checksum&&t.checksumAlgorithm){let a=yield*o(s,t.checksumAlgorithm);if(a!==t.checksum)return yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_FAILED,data:{id:t.id,reason:`checksum_mismatch`,expected:t.checksum,actual:a},flow:t.flow}),yield*n.remove(t.id),yield*e.fromCode(`CHECKSUM_MISMATCH`,{body:`Checksum validation failed. Expected: ${t.checksum}, Got: ${a}`,details:{uploadId:t.id,expected:t.checksum,actual:a,algorithm:t.checksumAlgorithm}}).toEffect();yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_SUCCESS,data:{id:t.id,validationType:`checksum`,algorithm:t.checksumAlgorithm},flow:t.flow})}if(a.requiresMimeTypeValidation){let a=y(s),o=t.metadata?.type;if(o&&!b(o,a))return yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_FAILED,data:{id:t.id,reason:`mimetype_mismatch`,expected:o,actual:a},flow:t.flow}),yield*n.remove(t.id),yield*e.fromCode(`MIMETYPE_MISMATCH`,{body:`MIME type validation failed. Expected: ${o}, Detected: ${a}`,details:{uploadId:t.id,expected:o,actual:a}}).toEffect();yield*i.emit(t.id,{type:r.UPLOAD_VALIDATION_SUCCESS,data:{id:t.id,validationType:`mimetype`},flow:t.flow})}}).pipe(l.withSpan(`validate-upload`,{attributes:{"upload.id":t.id,"validation.checksum_provided":t.checksum?`true`:`false`,"validation.mime_required":n.getCapabilities().requiresMimeTypeValidation?`true`:`false`}})),k=t=>l.tryPromise({try:async()=>await fetch(t),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})}).pipe(l.withSpan(`upload-fetch-url`,{attributes:{"upload.url":t,"upload.operation":`fetch`}}),l.tap(e=>l.gen(function*(){yield*d.increment(d.counter(`upload_from_url_total`,{description:`Total number of URL-based uploads`})),e.ok&&(yield*d.increment(d.counter(`upload_from_url_success_total`,{description:`Total number of successful URL-based uploads`})))})),l.tap(e=>l.logInfo(`URL fetch completed`).pipe(l.annotateLogs({"upload.url":t,"response.status":e.status.toString(),"response.ok":e.ok.toString(),"response.content_length":e.headers.get(`content-length`)??`unknown`}))),l.tapError(e=>l.gen(function*(){yield*d.increment(d.counter(`upload_from_url_failed_total`,{description:`Total number of failed URL-based uploads`})),yield*l.logError(`URL fetch failed`).pipe(l.annotateLogs({"upload.url":t,error:String(e)}))}))),A=t=>l.tryPromise({try:async()=>await t.arrayBuffer(),catch:t=>e.fromCode(`UNKNOWN_ERROR`,{cause:t})}).pipe(l.withSpan(`upload-convert-to-buffer`,{attributes:{"upload.operation":`arrayBuffer`}}),l.tap(e=>l.logDebug(`Response converted to array buffer`).pipe(l.annotateLogs({"buffer.size":e.byteLength.toString()}))),l.tapError(e=>l.logError(`Failed to convert response to array buffer`).pipe(l.annotateLogs({error:String(e)}))));var j=class extends c.Tag(`UploadEngine`)(){};function M(){return l.gen(function*(){let e=yield*n,o=yield*i,s=yield*a,c=yield*t;return{upload:(t,n,r)=>l.gen(function*(){return yield*D((yield*S(t,n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})).id,n,r,{dataStoreService:c,kvStore:e,eventEmitter:o})}),uploadFromUrl:(t,n,r)=>l.gen(function*(){let i=yield*A(yield*k(r)),a=new ReadableStream({start(e){e.enqueue(new Uint8Array(i)),e.close()}});return yield*D((yield*S({...t,size:i.byteLength},n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})).id,n,a,{dataStoreService:c,kvStore:e,eventEmitter:o})}),createUpload:(t,n)=>l.gen(function*(){return yield*S(t,n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:s})}),uploadChunk:(t,n,r)=>l.gen(function*(){return yield*D(t,n,r,{dataStoreService:c,kvStore:e,eventEmitter:o})}),getUpload:t=>l.gen(function*(){return yield*e.get(t)}),read:(t,n)=>l.gen(function*(){let r=yield*e.get(t);return yield*(yield*c.getDataStore(r.storage.id,n)).read(t)}),readStream:(t,n,r)=>l.gen(function*(){let i=yield*e.get(t),a=yield*c.getDataStore(i.storage.id,n);if(a.getCapabilities().supportsStreamingRead&&a.readStream)return yield*l.logDebug(`Using streaming read for file ${t}`),yield*a.readStream(t,r);yield*l.logDebug(`Falling back to buffered read for file ${t} (streaming not supported)`);let o=yield*a.read(t);return h.succeed(o)}),uploadStream:(t,n,i)=>l.gen(function*(){let a=yield*c.getDataStore(t.storageId,n),u=a.getCapabilities(),d=yield*s.generateId();if(u.supportsStreamingWrite&&a.writeStream){yield*l.logDebug(`Using streaming write for file ${d}`);let n=typeof t.metadata==`string`?JSON.parse(t.metadata):t.metadata||{},s=Object.fromEntries(Object.entries(n).map(([e,t])=>[e,String(t)])),c={id:d,offset:0,size:t.size??0,storage:{id:t.storageId,type:a.getCapabilities().supportsStreamingWrite?`streaming`:`default`},metadata:n,creationDate:new Date().toISOString()};yield*e.set(d,c),yield*o.emit(d,{type:r.UPLOAD_STARTED,data:c});let u=yield*a.writeStream(d,{stream:i,contentType:t.type,sizeHint:t.sizeHint,metadata:s}),f={...c,size:u.size,offset:u.size,storage:{...c.storage,path:u.path},...u.url&&{url:u.url}};return yield*e.set(d,f),yield*o.emit(d,{type:r.UPLOAD_COMPLETE,data:f}),f}yield*l.logWarning(`Falling back to buffered upload for file ${d} (streaming write not supported)`);let f=[];yield*h.runForEach(i,e=>l.sync(()=>{f.push(e)}));let p=f.reduce((e,t)=>e+t.length,0),m=new Uint8Array(p),g=0;for(let e of f)m.set(e,g),g+=e.length;let _=new ReadableStream({start(e){e.enqueue(m),e.close()}});return yield*S({...t,size:p},n,{dataStoreService:c,kvStore:e,eventEmitter:o,generateId:{generateId:()=>l.succeed(d)}}),yield*D(d,n,_,{dataStoreService:c,kvStore:e,eventEmitter:o})}),delete:(t,n)=>l.gen(function*(){let r=yield*e.get(t);yield*(yield*c.getDataStore(r.storage.id,n)).remove(t),yield*e.delete(t)}),getCapabilities:(e,t)=>l.gen(function*(){return(yield*c.getDataStore(e,t)).getCapabilities()}),subscribeToUploadEvents:(e,t)=>l.gen(function*(){yield*o.subscribe(e,t)}),unsubscribeFromUploadEvents:e=>l.gen(function*(){yield*o.unsubscribe(e)})}})}const N=u.effect(j,M());var P=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};export{A as a,y as c,N as i,j as n,k as o,M as r,b as s,P as t};
|
|
2
|
+
//# sourceMappingURL=upload-CpsShjP3.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-CpsShjP3.mjs","names":["onAbort"],"sources":["../src/upload/mime.ts","../src/upload/create-upload.ts","../src/upload/convert-to-stream.ts","../src/upload/write-to-store.ts","../src/upload/upload-chunk.ts","../src/upload/upload-url.ts","../src/upload/upload-engine.ts","../src/upload/upload-strategy-negotiator.ts"],"sourcesContent":["/**\n * Helper to check if buffer matches a byte pattern at given offset\n */\nfunction checkBytes(\n buffer: Uint8Array,\n pattern: number[],\n offset = 0,\n): boolean {\n if (buffer.length < offset + pattern.length) return false;\n return pattern.every((byte, i) => buffer[offset + i] === byte);\n}\n\n/**\n * Helper to check if buffer matches a string pattern at given offset\n */\nfunction checkString(buffer: Uint8Array, str: string, offset = 0): boolean {\n if (buffer.length < offset + str.length) return false;\n for (let i = 0; i < str.length; i++) {\n if (buffer[offset + i] !== str.charCodeAt(i)) return false;\n }\n return true;\n}\n\n/**\n * Detect MIME type from buffer using magic bytes (file signatures).\n * Supports a wide range of common file types including images, videos, audio, documents, and archives.\n *\n * @param buffer - File content as Uint8Array\n * @param filename - Optional filename for extension-based fallback\n * @returns Detected MIME type or \"application/octet-stream\" if unknown\n */\nexport const detectMimeType = (\n buffer: Uint8Array,\n filename?: string,\n): string => {\n if (buffer.length === 0) {\n return \"application/octet-stream\";\n }\n\n // ===== IMAGES =====\n\n // PNG: 89 50 4E 47 0D 0A 1A 0A\n if (checkBytes(buffer, [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])) {\n return \"image/png\";\n }\n\n // JPEG: FF D8 FF\n if (checkBytes(buffer, [0xff, 0xd8, 0xff])) {\n return \"image/jpeg\";\n }\n\n // GIF87a or GIF89a\n if (checkString(buffer, \"GIF87a\") || checkString(buffer, \"GIF89a\")) {\n return \"image/gif\";\n }\n\n // WebP: RIFF....WEBP\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WEBP\", 8)\n ) {\n return \"image/webp\";\n }\n\n // AVIF: ....ftypavif or ....ftypavis\n if (\n buffer.length >= 12 &&\n checkBytes(buffer, [0x00, 0x00, 0x00], 0) &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"avif\", 8) || checkString(buffer, \"avis\", 8))\n ) {\n return \"image/avif\";\n }\n\n // HEIC/HEIF: ....ftypheic or ....ftypheif or ....ftypmif1\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n (checkString(buffer, \"heic\", 8) ||\n checkString(buffer, \"heif\", 8) ||\n checkString(buffer, \"mif1\", 8))\n ) {\n return \"image/heic\";\n }\n\n // BMP: 42 4D\n if (checkBytes(buffer, [0x42, 0x4d])) {\n return \"image/bmp\";\n }\n\n // TIFF (little-endian): 49 49 2A 00\n if (checkBytes(buffer, [0x49, 0x49, 0x2a, 0x00])) {\n return \"image/tiff\";\n }\n\n // TIFF (big-endian): 4D 4D 00 2A\n if (checkBytes(buffer, [0x4d, 0x4d, 0x00, 0x2a])) {\n return \"image/tiff\";\n }\n\n // ICO: 00 00 01 00\n if (checkBytes(buffer, [0x00, 0x00, 0x01, 0x00])) {\n return \"image/x-icon\";\n }\n\n // SVG (XML-based, check for <svg or <?xml)\n if (buffer.length >= 5) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n if (\n text.includes(\"<svg\") ||\n (text.includes(\"<?xml\") && text.includes(\"<svg\"))\n ) {\n return \"image/svg+xml\";\n }\n }\n\n // ===== VIDEOS =====\n\n // MP4/M4V/M4A: ....ftyp\n if (buffer.length >= 12 && checkString(buffer, \"ftyp\", 4)) {\n const subtype = new TextDecoder().decode(buffer.slice(8, 12));\n if (\n subtype.startsWith(\"mp4\") ||\n subtype.startsWith(\"M4\") ||\n subtype.startsWith(\"isom\")\n ) {\n return \"video/mp4\";\n }\n }\n\n // WebM: 1A 45 DF A3\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3])) {\n return \"video/webm\";\n }\n\n // AVI: RIFF....AVI\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"AVI \", 8)\n ) {\n return \"video/x-msvideo\";\n }\n\n // MOV (QuickTime): ....moov or ....mdat or ....free\n if (\n buffer.length >= 8 &&\n (checkString(buffer, \"moov\", 4) ||\n checkString(buffer, \"mdat\", 4) ||\n checkString(buffer, \"free\", 4))\n ) {\n return \"video/quicktime\";\n }\n\n // MKV: 1A 45 DF A3 (same as WebM but check for Matroska)\n if (checkBytes(buffer, [0x1a, 0x45, 0xdf, 0xa3]) && buffer.length >= 100) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(\n buffer.slice(0, 100),\n );\n if (text.includes(\"matroska\")) {\n return \"video/x-matroska\";\n }\n }\n\n // ===== AUDIO =====\n\n // MP3: FF FB or FF F3 or FF F2 or ID3\n if (\n checkBytes(buffer, [0xff, 0xfb]) ||\n checkBytes(buffer, [0xff, 0xf3]) ||\n checkBytes(buffer, [0xff, 0xf2]) ||\n checkString(buffer, \"ID3\")\n ) {\n return \"audio/mpeg\";\n }\n\n // WAV: RIFF....WAVE\n if (\n checkBytes(buffer, [0x52, 0x49, 0x46, 0x46]) &&\n buffer.length >= 12 &&\n checkString(buffer, \"WAVE\", 8)\n ) {\n return \"audio/wav\";\n }\n\n // FLAC: 66 4C 61 43 (fLaC)\n if (checkString(buffer, \"fLaC\")) {\n return \"audio/flac\";\n }\n\n // OGG: 4F 67 67 53 (OggS)\n if (checkString(buffer, \"OggS\")) {\n return \"audio/ogg\";\n }\n\n // M4A: ....ftypM4A\n if (\n buffer.length >= 12 &&\n checkString(buffer, \"ftyp\", 4) &&\n checkString(buffer, \"M4A\", 8)\n ) {\n return \"audio/mp4\";\n }\n\n // ===== DOCUMENTS =====\n\n // PDF: 25 50 44 46 (%PDF)\n if (checkString(buffer, \"%PDF\")) {\n return \"application/pdf\";\n }\n\n // ===== ARCHIVES =====\n\n // ZIP: 50 4B 03 04 or 50 4B 05 06 (empty archive) or 50 4B 07 08 (spanned archive)\n if (\n checkBytes(buffer, [0x50, 0x4b, 0x03, 0x04]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x05, 0x06]) ||\n checkBytes(buffer, [0x50, 0x4b, 0x07, 0x08])\n ) {\n // Could be ZIP, DOCX, XLSX, PPTX, JAR, APK, etc.\n // Check for Office formats\n if (buffer.length >= 1024) {\n const text = new TextDecoder(\"utf-8\", { fatal: false }).decode(buffer);\n if (text.includes(\"word/\"))\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n if (text.includes(\"xl/\"))\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n if (text.includes(\"ppt/\"))\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n }\n return \"application/zip\";\n }\n\n // RAR: 52 61 72 21 1A 07 (Rar!)\n if (checkBytes(buffer, [0x52, 0x61, 0x72, 0x21, 0x1a, 0x07])) {\n return \"application/x-rar-compressed\";\n }\n\n // 7Z: 37 7A BC AF 27 1C\n if (checkBytes(buffer, [0x37, 0x7a, 0xbc, 0xaf, 0x27, 0x1c])) {\n return \"application/x-7z-compressed\";\n }\n\n // GZIP: 1F 8B\n if (checkBytes(buffer, [0x1f, 0x8b])) {\n return \"application/gzip\";\n }\n\n // TAR (ustar): \"ustar\" at offset 257\n if (buffer.length >= 262 && checkString(buffer, \"ustar\", 257)) {\n return \"application/x-tar\";\n }\n\n // ===== FONTS =====\n\n // WOFF: 77 4F 46 46 (wOFF)\n if (checkString(buffer, \"wOFF\")) {\n return \"font/woff\";\n }\n\n // WOFF2: 77 4F 46 32 (wOF2)\n if (checkString(buffer, \"wOF2\")) {\n return \"font/woff2\";\n }\n\n // TTF: 00 01 00 00 00\n if (checkBytes(buffer, [0x00, 0x01, 0x00, 0x00, 0x00])) {\n return \"font/ttf\";\n }\n\n // OTF: 4F 54 54 4F (OTTO)\n if (checkString(buffer, \"OTTO\")) {\n return \"font/otf\";\n }\n\n // ===== TEXT =====\n\n // JSON (basic check for { or [)\n if (buffer.length >= 1) {\n const firstByte = buffer[0];\n if (firstByte === 0x7b || firstByte === 0x5b) {\n // { or [\n try {\n const text = new TextDecoder(\"utf-8\").decode(\n buffer.slice(0, Math.min(1024, buffer.length)),\n );\n JSON.parse(text.trim());\n return \"application/json\";\n } catch {\n // Not valid JSON\n }\n }\n }\n\n // Fallback to extension-based detection\n if (filename) {\n const ext = filename.split(\".\").pop()?.toLowerCase();\n switch (ext) {\n // Images\n case \"jpg\":\n case \"jpeg\":\n return \"image/jpeg\";\n case \"png\":\n return \"image/png\";\n case \"gif\":\n return \"image/gif\";\n case \"webp\":\n return \"image/webp\";\n case \"avif\":\n return \"image/avif\";\n case \"heic\":\n case \"heif\":\n return \"image/heic\";\n case \"bmp\":\n return \"image/bmp\";\n case \"tiff\":\n case \"tif\":\n return \"image/tiff\";\n case \"ico\":\n return \"image/x-icon\";\n case \"svg\":\n return \"image/svg+xml\";\n\n // Videos\n case \"mp4\":\n case \"m4v\":\n return \"video/mp4\";\n case \"webm\":\n return \"video/webm\";\n case \"avi\":\n return \"video/x-msvideo\";\n case \"mov\":\n return \"video/quicktime\";\n case \"mkv\":\n return \"video/x-matroska\";\n\n // Audio\n case \"mp3\":\n return \"audio/mpeg\";\n case \"wav\":\n return \"audio/wav\";\n case \"flac\":\n return \"audio/flac\";\n case \"ogg\":\n return \"audio/ogg\";\n case \"m4a\":\n return \"audio/mp4\";\n\n // Documents\n case \"pdf\":\n return \"application/pdf\";\n case \"docx\":\n return \"application/vnd.openxmlformats-officedocument.wordprocessingml.document\";\n case \"xlsx\":\n return \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\";\n case \"pptx\":\n return \"application/vnd.openxmlformats-officedocument.presentationml.presentation\";\n\n // Archives\n case \"zip\":\n return \"application/zip\";\n case \"rar\":\n return \"application/x-rar-compressed\";\n case \"7z\":\n return \"application/x-7z-compressed\";\n case \"gz\":\n case \"gzip\":\n return \"application/gzip\";\n case \"tar\":\n return \"application/x-tar\";\n\n // Fonts\n case \"woff\":\n return \"font/woff\";\n case \"woff2\":\n return \"font/woff2\";\n case \"ttf\":\n return \"font/ttf\";\n case \"otf\":\n return \"font/otf\";\n\n // Text\n case \"txt\":\n return \"text/plain\";\n case \"json\":\n return \"application/json\";\n case \"xml\":\n return \"application/xml\";\n case \"html\":\n case \"htm\":\n return \"text/html\";\n case \"css\":\n return \"text/css\";\n case \"js\":\n return \"application/javascript\";\n case \"csv\":\n return \"text/csv\";\n\n default:\n return \"application/octet-stream\";\n }\n }\n\n return \"application/octet-stream\";\n};\n\n/**\n * Compare two MIME types with lenient matching.\n * Matches on major type (e.g., \"image/*\") to allow for minor variations.\n *\n * @param declared - MIME type provided by client\n * @param detected - MIME type detected from file content\n * @returns true if MIME types are compatible\n *\n * @example\n * compareMimeTypes(\"image/png\", \"image/apng\") // true\n * compareMimeTypes(\"image/jpeg\", \"image/png\") // true (both images)\n * compareMimeTypes(\"image/png\", \"application/pdf\") // false\n */\nexport function compareMimeTypes(declared: string, detected: string): boolean {\n // Exact match\n if (declared === detected) {\n return true;\n }\n\n // Extract major types (e.g., \"image\" from \"image/png\")\n const declaredMajor = declared.split(\"/\")[0];\n const detectedMajor = detected.split(\"/\")[0];\n\n // Compare major types for lenient matching\n return declaredMajor === detectedMajor;\n}\n","import { Effect, Metric, MetricBoundaries, Option } from \"effect\";\nimport {\n type EventEmitter,\n type InputFile,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport type { GenerateIdShape } from \"../utils/generate-id\";\n\n/**\n * Captures the current Effect trace context for distributed tracing.\n *\n * Uses Effect's `currentSpan` to get the active span, which is more reliable\n * than OpenTelemetry's `trace.getActiveSpan()` when using @effect/opentelemetry\n * because Effect manages its own span context that may not be synchronized\n * with OpenTelemetry's global context.\n *\n * @returns Effect that yields TraceContext if there's an active span, undefined otherwise\n */\nconst captureTraceContextEffect: Effect.Effect<\n UploadFileTraceContext | undefined\n> = Effect.gen(function* () {\n const spanOption = yield* Effect.currentSpan.pipe(Effect.option);\n return Option.match(spanOption, {\n onNone: () => undefined,\n onSome: (span) => ({\n traceId: span.traceId,\n spanId: span.spanId,\n traceFlags: span.sampled ? 1 : 0,\n }),\n });\n});\n\n/**\n * Creates a new upload and initializes it in the storage system.\n *\n * This function handles the initial upload creation process including:\n * - Generating a unique upload ID\n * - Routing to appropriate data store based on storage ID\n * - Creating the upload record in the data store\n * - Storing upload metadata in KV store\n * - Emitting upload started events\n * - Parsing and validating metadata\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for upload creation, file sizes, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param inputFile - Input file configuration including storage, size, type, etc.\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for upload lifecycle events\n * @param generateId - ID generator for creating unique upload identifiers\n * @returns Effect that yields the created UploadFile\n *\n * @example\n * ```typescript\n * // Create a new upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\",\n * metadata: JSON.stringify({ category: \"photos\" })\n * };\n *\n * const createEffect = createUpload(\n * inputFile,\n * \"client-123\",\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter,\n * generateId\n * }\n * );\n *\n * // Run with dependencies\n * const upload = await Effect.runPromise(\n * createEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer),\n * Effect.provide(generateIdLayer)\n * )\n * );\n * ```\n */\nexport const createUpload = (\n inputFile: InputFile,\n clientId: string | null,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId: GenerateIdShape;\n },\n) =>\n Effect.gen(function* () {\n // Capture the parent \"upload\" span's trace context FIRST\n // This allows subsequent chunk uploads to be siblings of upload-create\n // under the same parent \"upload\" span\n const traceContext = yield* captureTraceContextEffect;\n const creationDate = new Date().toISOString();\n\n // Now run the actual upload creation inside a child span\n const fileCreated = yield* Effect.gen(function* () {\n // Get datastore using Effect\n const dataStore = yield* dataStoreService.getDataStore(\n inputFile.storageId,\n clientId,\n );\n\n const id = yield* generateId.generateId();\n const { size, type, fileName, lastModified, metadata, flow } = inputFile;\n\n let parsedMetadata: Record<string, string> = {};\n if (metadata) {\n try {\n parsedMetadata = JSON.parse(metadata) as Record<string, string>;\n } catch {\n parsedMetadata = {};\n }\n }\n\n const metadataObject: Record<string, string> = {\n ...parsedMetadata,\n type,\n fileName: fileName ?? \"\",\n };\n if (lastModified) {\n metadataObject.lastModified = lastModified.toString();\n }\n\n const file: UploadFile = {\n id,\n size,\n metadata: metadataObject,\n offset: 0,\n creationDate,\n storage: {\n id: inputFile.storageId,\n type,\n path: \"\",\n bucket: dataStore.bucket,\n },\n flow,\n traceContext,\n };\n\n // Create file using Effect\n const created = yield* dataStore.create(file);\n\n // Store in KV store\n yield* kvStore.set(id, created);\n\n // Emit event\n yield* eventEmitter.emit(id, {\n type: UploadEventType.UPLOAD_STARTED,\n data: created,\n flow: created.flow,\n });\n\n return created;\n }).pipe(\n // upload-create is a CHILD span of the parent \"upload\" span\n Effect.withSpan(\"upload-create\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n );\n\n return fileCreated;\n }).pipe(\n // Parent \"upload\" span wraps the entire upload lifecycle\n // upload-create and upload-chunk will be children of this span\n Effect.withSpan(\"upload\", {\n attributes: {\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n \"upload.mime_type\": inputFile.type,\n \"upload.has_flow\": inputFile.flow ? \"true\" : \"false\",\n },\n }),\n // Track upload creation metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment upload created counter\n yield* Metric.increment(\n Metric.counter(\"upload_created_total\", {\n description: \"Total number of uploads created\",\n }),\n );\n\n // Record file size\n if (file.size) {\n const fileSizeHistogram = Metric.histogram(\n \"upload_file_size_bytes\",\n MetricBoundaries.exponential({\n start: 1024,\n factor: 2,\n count: 25,\n }),\n );\n yield* Metric.update(fileSizeHistogram, file.size);\n }\n\n // Track active uploads gauge\n const activeUploadsGauge = Metric.gauge(\"active_uploads\");\n yield* Metric.increment(activeUploadsGauge);\n }),\n ),\n // Add structured logging\n Effect.tap((file) =>\n Effect.logInfo(\"Upload created\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.file_size\": inputFile.size?.toString() ?? \"0\",\n \"upload.storage_id\": inputFile.storageId,\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Log error\n yield* Effect.logError(\"Upload creation failed\").pipe(\n Effect.annotateLogs({\n \"upload.file_name\": inputFile.fileName ?? \"unknown\",\n \"upload.storage_id\": inputFile.storageId,\n error: String(error),\n }),\n );\n\n // Track failed upload metric\n yield* Metric.increment(\n Metric.counter(\"upload_failed_total\", {\n description: \"Total number of uploads that failed\",\n }),\n );\n }),\n ),\n );\n","import { Stream } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Converts a ReadableStream to an Effect Stream.\n *\n * This utility function wraps a ReadableStream in an Effect Stream, providing\n * proper error handling and integration with the Effect ecosystem. It's used\n * throughout the upload system to convert raw streams into Effect-compatible\n * streams for processing.\n *\n * The function handles:\n * - Stream conversion with proper error mapping\n * - UploadistaError creation for stream errors\n * - Integration with Effect Stream processing\n *\n * @param data - The ReadableStream to convert\n * @returns Effect Stream that can be processed with Effect operations\n *\n * @example\n * ```typescript\n * // Convert a file stream to Effect Stream\n * const fileStream = new ReadableStream(...);\n * const effectStream = convertToStream(fileStream);\n *\n * // Process with Effect operations\n * const processedStream = effectStream.pipe(\n * Stream.map((chunk) => processChunk(chunk)),\n * Stream.filter((chunk) => chunk.length > 0)\n * );\n *\n * // Run the stream\n * await Stream.runForEach(processedStream, (chunk) =>\n * Effect.logInfo(`Processed chunk: ${chunk.length} bytes`)\n * );\n * ```\n */\nexport function convertToStream<T>(data: ReadableStream<T>) {\n return Stream.fromReadableStream(\n () => data,\n (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: String(error),\n }),\n );\n}\n","import { Effect, Ref } from \"effect\";\nimport { UploadistaError } from \"../errors\";\nimport { StreamLimiterEffect } from \"../streams/stream-limiter\";\nimport type { DataStore, UploadEvent, UploadFile } from \"../types\";\nimport { type EventEmitter, UploadEventType } from \"../types\";\nimport { convertToStream } from \"./convert-to-stream\";\n\n/**\n * Configuration options for writing data to a data store.\n *\n * @property data - The stream of data to write\n * @property upload - Upload file metadata\n * @property dataStore - Target data store for writing\n * @property maxFileSize - Maximum allowed file size in bytes\n * @property controller - AbortController for cancellation\n * @property eventEmitter - Event emitter for progress tracking\n * @property uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n */\ntype WriteToStoreOptions = {\n data: ReadableStream<Uint8Array>;\n upload: UploadFile;\n dataStore: DataStore<UploadFile>;\n maxFileSize: number;\n controller: AbortController;\n eventEmitter: EventEmitter<UploadEvent>;\n uploadProgressInterval?: number;\n};\n\n/**\n * Writes data stream to a data store with progress tracking and size limits.\n *\n * This function handles the core data writing logic including:\n * - Stream conversion and processing\n * - File size validation and limiting\n * - Progress tracking with throttled events\n * - Abort signal handling for cancellation\n * - Error handling and cleanup\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Structured logging for debugging and monitoring\n * - Progress event emission with throttling\n * - Error handling with proper UploadistaError types\n *\n * @param data - The stream of data to write to storage\n * @param upload - Upload file metadata containing ID, offset, etc.\n * @param dataStore - Target data store for writing the data\n * @param maxFileSize - Maximum allowed file size in bytes\n * @param controller - AbortController for handling cancellation\n * @param eventEmitter - Event emitter for progress tracking\n * @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)\n * @returns Effect that yields the number of bytes written\n *\n * @example\n * ```typescript\n * // Write data to store with progress tracking\n * const writeEffect = writeToStore({\n * data: fileStream,\n * upload: uploadMetadata,\n * dataStore: s3DataStore,\n * maxFileSize: 100_000_000, // 100MB\n * controller: abortController,\n * eventEmitter: progressEmitter,\n * uploadProgressInterval: 500 // Emit progress every 500ms\n * });\n *\n * // Run with error handling\n * const bytesWritten = await Effect.runPromise(\n * writeEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to write to store\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport function writeToStore({\n data,\n upload,\n dataStore,\n maxFileSize,\n controller,\n eventEmitter,\n uploadProgressInterval = 200,\n}: WriteToStoreOptions) {\n return Effect.gen(function* () {\n const stream = convertToStream(data);\n // Check if already aborted\n if (controller.signal.aborted) {\n return yield* Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n\n // Create an AbortController to manage the stream pipeline\n const abortController = new AbortController();\n const { signal } = abortController;\n\n // Set up abort handling\n const onAbort = () => {\n // stream.cancel();\n abortController.abort();\n };\n\n controller.signal.addEventListener(\"abort\", onAbort, { once: true });\n\n return yield* Effect.acquireUseRelease(\n Effect.sync(() => ({ signal, onAbort })),\n ({ signal: _signal }) =>\n Effect.gen(function* () {\n // Create a ref to track the last progress emission time for throttling\n const lastEmitTime = yield* Ref.make(0);\n\n // Create the stream limiter\n const limiter = StreamLimiterEffect.limit({\n maxSize: maxFileSize,\n });\n\n // Pipe the data through the limiter\n const limitedStream = limiter(stream);\n\n // Write to the data store with progress tracking\n const offset = yield* dataStore.write(\n {\n stream: limitedStream,\n file_id: upload.id,\n offset: upload.offset,\n },\n {\n onProgress: (newOffset: number) => {\n // Simple throttling using timestamp check\n const now = Date.now();\n Ref.get(lastEmitTime)\n .pipe(\n Effect.flatMap((lastTime) => {\n if (now - lastTime >= uploadProgressInterval) {\n return Effect.gen(function* () {\n yield* Ref.set(lastEmitTime, now);\n yield* eventEmitter.emit(upload.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: upload.id,\n progress: newOffset,\n total: upload.size ?? 0,\n },\n flow: upload.flow,\n });\n });\n }\n return Effect.void;\n }),\n Effect.runPromise,\n )\n .catch(() => {\n // Ignore errors during progress emission\n });\n },\n },\n );\n\n return offset;\n }).pipe(\n Effect.catchAll((error) => {\n if (error instanceof Error && error.name === \"AbortError\") {\n return Effect.fail(UploadistaError.fromCode(\"ABORTED\"));\n }\n if (error instanceof UploadistaError) {\n return Effect.fail(error);\n }\n return Effect.fail(\n UploadistaError.fromCode(\"FILE_WRITE_ERROR\", { cause: error }),\n );\n }),\n ),\n ({ onAbort }) =>\n Effect.sync(() => {\n controller.signal.removeEventListener(\"abort\", onAbort);\n }),\n );\n }).pipe(\n // Add tracing span for write operation\n Effect.withSpan(\"upload-write-to-store\", {\n attributes: {\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n \"upload.max_file_size\": maxFileSize.toString(),\n \"upload.file_size\": upload.size?.toString() ?? \"0\",\n },\n }),\n // Add structured logging for write operation\n Effect.tap((offset) =>\n Effect.logDebug(\"Data written to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"write.offset\": offset.toString(),\n \"write.bytes_written\": (offset - upload.offset).toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to write to store\").pipe(\n Effect.annotateLogs({\n \"upload.id\": upload.id,\n \"upload.offset\": upload.offset.toString(),\n error: error instanceof UploadistaError ? error.code : String(error),\n }),\n ),\n ),\n );\n}\n","import { Effect, Metric, MetricBoundaries, Tracer } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\nimport {\n type DataStore,\n type EventEmitter,\n type KvStore,\n type UploadEvent,\n UploadEventType,\n type UploadFile,\n type UploadFileDataStoresShape,\n type UploadFileTraceContext,\n} from \"../types\";\nimport { computeChecksum } from \"../utils/checksum\";\nimport { compareMimeTypes, detectMimeType } from \"./mime\";\nimport { writeToStore } from \"./write-to-store\";\n\n/**\n * Creates an ExternalSpan from stored trace context.\n * Used for linking chunk uploads to the original upload trace.\n */\nfunction createExternalSpan(traceContext: UploadFileTraceContext) {\n return Tracer.externalSpan({\n traceId: traceContext.traceId,\n spanId: traceContext.spanId,\n sampled: traceContext.traceFlags === 1,\n });\n}\n\n/**\n * Creates an \"upload-complete\" span Effect that captures the full upload duration.\n * This span is a sibling of upload-create and upload-chunk under the parent \"upload\" span.\n *\n * Note: The span's visual duration in tracing UIs will be short (instant), but the\n * actual upload duration is captured in the \"upload.total_duration_ms\" attribute.\n *\n * @param file - The completed upload file\n * @param parentSpan - The parent span to link to\n * @returns Effect that creates and completes the span\n */\nconst createUploadCompleteSpanEffect = (\n file: UploadFile,\n parentSpan: Tracer.ExternalSpan,\n): Effect.Effect<void> => {\n const creationTime = new Date(file.creationDate as string).getTime();\n const totalDurationMs = Date.now() - creationTime;\n\n return Effect.void.pipe(\n Effect.withSpan(\"upload-complete\", {\n attributes: {\n \"upload.id\": file.id,\n \"upload.size\": file.size ?? 0,\n \"upload.total_duration_ms\": totalDurationMs,\n \"upload.storage_id\": file.storage.id,\n \"upload.file_name\": file.metadata?.fileName?.toString() ?? \"unknown\",\n \"upload.creation_date\": file.creationDate as string,\n \"upload.completion_date\": new Date().toISOString(),\n },\n parent: parentSpan,\n }),\n );\n};\n\n/**\n * Uploads a chunk of data for an existing upload.\n *\n * This function handles the core chunk upload logic including:\n * - Retrieving upload metadata from KV store\n * - Routing to appropriate data store based on storage ID\n * - Writing chunk data to storage with progress tracking\n * - Updating upload offset and metadata\n * - Emitting progress events\n * - Validating upload completion (checksum, MIME type)\n *\n * The function includes comprehensive observability with:\n * - Effect tracing spans for performance monitoring\n * - Metrics tracking for chunk size, throughput, and success rates\n * - Structured logging for debugging and monitoring\n * - Error handling with proper UploadistaError types\n *\n * @param uploadId - Unique identifier for the upload\n * @param clientId - Client identifier (null for anonymous uploads)\n * @param chunk - ReadableStream containing the chunk data to upload\n * @param dataStoreService - Service for routing to appropriate data stores\n * @param kvStore - KV store for upload metadata persistence\n * @param eventEmitter - Event emitter for progress and validation events\n * @returns Effect that yields the updated UploadFile with new offset\n *\n * @example\n * ```typescript\n * // Upload a chunk for an existing upload\n * const uploadChunkEffect = uploadChunk(\n * \"upload-123\",\n * \"client-456\",\n * chunkStream,\n * {\n * dataStoreService,\n * kvStore,\n * eventEmitter\n * }\n * );\n *\n * // Run with dependencies\n * const result = await Effect.runPromise(\n * uploadChunkEffect.pipe(\n * Effect.provide(dataStoreLayer),\n * Effect.provide(kvStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * )\n * );\n * ```\n */\nexport const uploadChunk = (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n }: {\n dataStoreService: UploadFileDataStoresShape;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n },\n) =>\n Effect.gen(function* () {\n // Get file from KV store first to check for trace context\n const file = yield* kvStore.get(uploadId);\n\n // Create external span from stored trace context if available\n // This links chunk uploads to the original upload trace\n const parentSpan = file.traceContext\n ? createExternalSpan(file.traceContext)\n : undefined;\n\n // Core chunk processing logic\n const processChunk = Effect.gen(function* () {\n // Get datastore\n const dataStore = yield* dataStoreService.getDataStore(\n file.storage.id,\n clientId,\n );\n\n // Note: AbortController could be used for cancellation if needed\n\n // Write to store using writeToStore Effect\n const controller = new AbortController();\n\n const chunkSize = yield* writeToStore({\n dataStore,\n data: chunk,\n upload: file,\n maxFileSize: 100_000_000,\n controller,\n uploadProgressInterval: 200,\n eventEmitter,\n });\n\n file.offset = chunkSize;\n\n // Update KV store\n yield* kvStore.set(uploadId, file);\n\n // Emit progress event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_PROGRESS,\n data: {\n id: file.id,\n progress: file.offset,\n total: file.size ?? 0,\n },\n flow: file.flow,\n });\n\n // Check if upload is complete and run validation\n if (file.size && file.offset === file.size) {\n yield* validateUpload({\n file,\n dataStore,\n eventEmitter,\n });\n\n // Create \"upload-complete\" span that captures the full upload duration\n // This span shows the total time from upload creation to completion\n if (file.traceContext) {\n const completeParentSpan = createExternalSpan(file.traceContext);\n yield* createUploadCompleteSpanEffect(file, completeParentSpan);\n }\n }\n\n return file;\n }).pipe(\n // Add tracing span for chunk upload with parent from stored trace context\n Effect.withSpan(\"upload-chunk\", {\n attributes: {\n \"upload.id\": uploadId,\n \"chunk.upload_id\": uploadId,\n \"upload.has_trace_context\": file.traceContext ? \"true\" : \"false\",\n },\n parent: parentSpan,\n }),\n );\n\n return yield* processChunk;\n }).pipe(\n // Track chunk upload metrics\n Effect.tap((file) =>\n Effect.gen(function* () {\n // Increment chunk uploaded counter\n yield* Metric.increment(\n Metric.counter(\"chunk_uploaded_total\", {\n description: \"Total number of chunks uploaded\",\n }),\n );\n\n // Record chunk size\n const chunkSize = file.offset;\n const chunkSizeHistogram = Metric.histogram(\n \"chunk_size_bytes\",\n MetricBoundaries.linear({\n start: 262_144,\n width: 262_144,\n count: 20,\n }),\n );\n yield* Metric.update(chunkSizeHistogram, chunkSize);\n\n // Update throughput gauge\n if (file.size && file.size > 0) {\n const throughput = chunkSize; // bytes processed\n const throughputGauge = Metric.gauge(\n \"upload_throughput_bytes_per_second\",\n );\n yield* Metric.set(throughputGauge, throughput);\n }\n }),\n ),\n // Add structured logging for chunk progress\n Effect.tap((file) =>\n Effect.logDebug(\"Chunk uploaded\").pipe(\n Effect.annotateLogs({\n \"upload.id\": file.id,\n \"chunk.size\": file.offset.toString(),\n \"chunk.progress\":\n file.size && file.size > 0\n ? ((file.offset / file.size) * 100).toFixed(2)\n : \"0\",\n \"upload.total_size\": file.size?.toString() ?? \"0\",\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Chunk upload failed\").pipe(\n Effect.annotateLogs({\n \"upload.id\": uploadId,\n error: String(error),\n }),\n ),\n ),\n );\n\n/**\n * Validates an upload after completion.\n *\n * Performs comprehensive validation including:\n * - Checksum validation (if provided) using the specified algorithm\n * - MIME type validation (if required by data store capabilities)\n * - File size validation against data store limits\n *\n * Validation results are emitted as events and failures result in:\n * - Cleanup of uploaded data from storage\n * - Removal of metadata from KV store\n * - Appropriate error responses\n *\n * The function respects data store capabilities for validation limits\n * and provides detailed error information for debugging.\n *\n * @param file - The upload file to validate\n * @param dataStore - Data store containing the uploaded file\n * @param eventEmitter - Event emitter for validation events\n * @returns Effect that completes validation or fails with UploadistaError\n *\n * @example\n * ```typescript\n * // Validate upload after completion\n * const validationEffect = validateUpload({\n * file: completedUpload,\n * dataStore: s3DataStore,\n * eventEmitter: progressEmitter\n * });\n *\n * // Run validation\n * await Effect.runPromise(validationEffect);\n * ```\n */\nconst validateUpload = ({\n file,\n dataStore,\n eventEmitter,\n}: {\n file: UploadFile;\n dataStore: DataStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n}): Effect.Effect<void, UploadistaError, never> =>\n Effect.gen(function* () {\n const capabilities = dataStore.getCapabilities();\n\n // Check if file exceeds max validation size\n if (\n capabilities.maxValidationSize &&\n file.size &&\n file.size > capabilities.maxValidationSize\n ) {\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_WARNING,\n data: {\n id: file.id,\n message: `File size (${file.size} bytes) exceeds max validation size (${capabilities.maxValidationSize} bytes). Validation skipped.`,\n },\n flow: file.flow,\n });\n return;\n }\n\n // Read file from datastore for validation\n const fileBytes = yield* dataStore.read(file.id);\n\n // Validate checksum if provided\n if (file.checksum && file.checksumAlgorithm) {\n const computedChecksum = yield* computeChecksum(\n fileBytes,\n file.checksumAlgorithm,\n );\n\n if (computedChecksum !== file.checksum) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"checksum_mismatch\",\n expected: file.checksum,\n actual: computedChecksum,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with checksum mismatch error\n return yield* UploadistaError.fromCode(\"CHECKSUM_MISMATCH\", {\n body: `Checksum validation failed. Expected: ${file.checksum}, Got: ${computedChecksum}`,\n details: {\n uploadId: file.id,\n expected: file.checksum,\n actual: computedChecksum,\n algorithm: file.checksumAlgorithm,\n },\n }).toEffect();\n }\n\n // Emit checksum validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"checksum\",\n algorithm: file.checksumAlgorithm,\n },\n flow: file.flow,\n });\n }\n\n // Validate MIME type if required by capabilities\n if (capabilities.requiresMimeTypeValidation) {\n const detectedMimeType = detectMimeType(fileBytes);\n const declaredMimeType = file.metadata?.type as string | undefined;\n\n if (\n declaredMimeType &&\n !compareMimeTypes(declaredMimeType, detectedMimeType)\n ) {\n // Emit validation failure event\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_FAILED,\n data: {\n id: file.id,\n reason: \"mimetype_mismatch\",\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n flow: file.flow,\n });\n\n // Clean up file and remove from KV store\n yield* dataStore.remove(file.id);\n\n // Fail with MIME type mismatch error\n return yield* UploadistaError.fromCode(\"MIMETYPE_MISMATCH\", {\n body: `MIME type validation failed. Expected: ${declaredMimeType}, Detected: ${detectedMimeType}`,\n details: {\n uploadId: file.id,\n expected: declaredMimeType,\n actual: detectedMimeType,\n },\n }).toEffect();\n }\n\n // Emit MIME type validation success\n yield* eventEmitter.emit(file.id, {\n type: UploadEventType.UPLOAD_VALIDATION_SUCCESS,\n data: {\n id: file.id,\n validationType: \"mimetype\",\n },\n flow: file.flow,\n });\n }\n }).pipe(\n Effect.withSpan(\"validate-upload\", {\n attributes: {\n \"upload.id\": file.id,\n \"validation.checksum_provided\": file.checksum ? \"true\" : \"false\",\n \"validation.mime_required\": dataStore.getCapabilities()\n .requiresMimeTypeValidation\n ? \"true\"\n : \"false\",\n },\n }),\n );\n","import { Effect, Metric } from \"effect\";\nimport { UploadistaError } from \"../errors\";\n\n/**\n * Fetches a file from a remote URL.\n *\n * This function handles HTTP requests to remote URLs for file uploads,\n * including proper error handling, metrics tracking, and observability.\n *\n * Features:\n * - HTTP request with proper error handling\n * - Effect tracing for performance monitoring\n * - Metrics tracking for URL-based uploads\n * - Structured logging for debugging\n * - Response validation and error reporting\n *\n * @param url - The remote URL to fetch the file from\n * @returns Effect that yields the Response object\n *\n * @example\n * ```typescript\n * // Fetch a file from URL\n * const fetchEffect = fetchFile(\"https://example.com/image.jpg\");\n *\n * // Run with error handling\n * const response = await Effect.runPromise(\n * fetchEffect.pipe(\n * Effect.catchAll((error) =>\n * Effect.logError(\"Failed to fetch file\").pipe(\n * Effect.andThen(Effect.fail(error))\n * )\n * )\n * )\n * );\n * ```\n */\nexport const fetchFile = (url: string) => {\n return Effect.tryPromise({\n try: async () => {\n return await fetch(url);\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for URL fetch\n Effect.withSpan(\"upload-fetch-url\", {\n attributes: {\n \"upload.url\": url,\n \"upload.operation\": \"fetch\",\n },\n }),\n // Track URL fetch metrics\n Effect.tap((response) =>\n Effect.gen(function* () {\n // Increment URL upload counter\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_total\", {\n description: \"Total number of URL-based uploads\",\n }),\n );\n\n // Track success/failure\n if (response.ok) {\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_success_total\", {\n description: \"Total number of successful URL-based uploads\",\n }),\n );\n }\n }),\n ),\n // Add structured logging\n Effect.tap((response) =>\n Effect.logInfo(\"URL fetch completed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n \"response.status\": response.status.toString(),\n \"response.ok\": response.ok.toString(),\n \"response.content_length\":\n response.headers.get(\"content-length\") ?? \"unknown\",\n }),\n ),\n ),\n // Handle errors with logging and metrics\n Effect.tapError((error) =>\n Effect.gen(function* () {\n // Track failed URL upload\n yield* Metric.increment(\n Metric.counter(\"upload_from_url_failed_total\", {\n description: \"Total number of failed URL-based uploads\",\n }),\n );\n\n // Log error\n yield* Effect.logError(\"URL fetch failed\").pipe(\n Effect.annotateLogs({\n \"upload.url\": url,\n error: String(error),\n }),\n );\n }),\n ),\n );\n};\n\n/**\n * Converts a Response object to an ArrayBuffer.\n *\n * This function safely converts HTTP response data to binary format\n * for processing and storage, with proper error handling and observability.\n *\n * Features:\n * - Safe conversion from Response to ArrayBuffer\n * - Effect tracing for performance monitoring\n * - Structured logging for debugging\n * - Error handling with proper UploadistaError types\n *\n * @param response - The HTTP Response object to convert\n * @returns Effect that yields the ArrayBuffer data\n *\n * @example\n * ```typescript\n * // Convert response to buffer\n * const bufferEffect = arrayBuffer(response);\n *\n * // Use in upload pipeline\n * const buffer = await Effect.runPromise(\n * bufferEffect.pipe(\n * Effect.tap((buffer) =>\n * Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)\n * )\n * )\n * );\n * ```\n */\nexport const arrayBuffer = (response: Response) => {\n return Effect.tryPromise({\n try: async () => {\n return await response.arrayBuffer();\n },\n catch: (error) => {\n return UploadistaError.fromCode(\"UNKNOWN_ERROR\", {\n cause: error,\n });\n },\n }).pipe(\n // Add tracing span for buffer conversion\n Effect.withSpan(\"upload-convert-to-buffer\", {\n attributes: {\n \"upload.operation\": \"arrayBuffer\",\n },\n }),\n // Add structured logging\n Effect.tap((buffer) =>\n Effect.logDebug(\"Response converted to array buffer\").pipe(\n Effect.annotateLogs({\n \"buffer.size\": buffer.byteLength.toString(),\n }),\n ),\n ),\n // Handle errors with logging\n Effect.tapError((error) =>\n Effect.logError(\"Failed to convert response to array buffer\").pipe(\n Effect.annotateLogs({\n error: String(error),\n }),\n ),\n ),\n );\n};\n","import { Context, Effect, Layer, Stream } from \"effect\";\nimport type { UploadistaError } from \"../errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n EventEmitter,\n InputFile,\n KvStore,\n Middleware,\n StreamingConfig,\n UploadEvent,\n UploadFile,\n WebSocketConnection,\n} from \"../types\";\nimport {\n UploadEventEmitter,\n UploadEventType,\n UploadFileDataStores,\n UploadFileKVStore,\n} from \"../types\";\nimport { GenerateId, type GenerateIdShape } from \"../utils/generate-id\";\nimport { createUpload } from \"./create-upload\";\nimport { uploadChunk } from \"./upload-chunk\";\nimport { arrayBuffer, fetchFile } from \"./upload-url\";\n\n/**\n * Legacy configuration options for UploadEngine.\n *\n * @deprecated Use Effect Layers instead of this configuration object.\n * This type is kept for backward compatibility.\n *\n * @property dataStore - DataStore instance or factory function\n * @property kvStore - KV store for upload metadata\n * @property eventEmitter - Event emitter for upload progress\n * @property generateId - Optional ID generator (defaults to UUID)\n * @property middlewares - Optional request middlewares\n * @property withTracing - Enable Effect tracing for debugging\n */\nexport type UploadEngineOptions = {\n dataStore:\n | ((storageId: string) => Promise<DataStore<UploadFile>>)\n | DataStore<UploadFile>;\n kvStore: KvStore<UploadFile>;\n eventEmitter: EventEmitter<UploadEvent>;\n generateId?: GenerateIdShape;\n middlewares?: Middleware[];\n withTracing?: boolean;\n};\n\n/**\n * UploadEngine service interface.\n *\n * This is the core upload handling service that provides all file upload operations.\n * It manages upload lifecycle, resumable uploads, progress tracking, and storage integration.\n *\n * All operations return Effect types for composable, type-safe error handling.\n *\n * @property createUpload - Initiates a new upload and returns metadata\n * @property uploadChunk - Uploads a chunk of data for an existing upload\n * @property getCapabilities - Returns storage backend capabilities\n * @property upload - Complete upload in one operation (create + upload data)\n * @property uploadFromUrl - Uploads a file from a remote URL\n * @property getUpload - Retrieves upload metadata by ID\n * @property read - Reads the complete uploaded file data\n * @property delete - Deletes an upload and its data\n * @property subscribeToUploadEvents - Subscribes WebSocket to upload progress events\n * @property unsubscribeFromUploadEvents - Unsubscribes from upload events\n *\n * @example\n * ```typescript\n * // Basic upload flow\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // 1. Create upload\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 1024000,\n * type: \"image/jpeg\",\n * fileName: \"photo.jpg\"\n * };\n * const upload = yield* server.createUpload(inputFile, \"client123\");\n *\n * // 2. Upload chunks\n * const chunk = new ReadableStream(...);\n * const updated = yield* server.uploadChunk(upload.id, \"client123\", chunk);\n *\n * // 3. Read the uploaded file\n * const data = yield* server.read(upload.id, \"client123\");\n *\n * return upload;\n * });\n *\n * // Upload with WebSocket progress tracking\n * const uploadWithProgress = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * // Subscribe to progress events\n * yield* server.subscribeToUploadEvents(uploadId, websocket);\n *\n * // Upload (events will be emitted automatically)\n * const result = yield* server.upload(inputFile, clientId, stream);\n *\n * // Unsubscribe when done\n * yield* server.unsubscribeFromUploadEvents(uploadId);\n *\n * return result;\n * });\n *\n * // Upload from URL\n * const urlUpload = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n *\n * const inputFile: InputFile = {\n * storageId: \"s3-production\",\n * size: 0, // Unknown initially\n * type: \"image/png\",\n * fileName: \"remote-image.png\"\n * };\n *\n * const upload = yield* server.uploadFromUrl(\n * inputFile,\n * \"client123\",\n * \"https://example.com/image.png\"\n * );\n *\n * return upload;\n * });\n * ```\n */\nexport type UploadEngineShape = {\n createUpload: (\n inputFile: InputFile,\n clientId: string | null,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getCapabilities: (\n storageId: string,\n clientId: string | null,\n ) => Effect.Effect<DataStoreCapabilities, UploadistaError>;\n upload: (\n file: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n getUpload: (uploadId: string) => Effect.Effect<UploadFile, UploadistaError>;\n /**\n * Reads the complete uploaded file data as bytes (buffered mode).\n * For large files, consider using readStream() for memory efficiency.\n */\n read: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<Uint8Array, UploadistaError>;\n /**\n * Reads file content as a stream of chunks for memory-efficient processing.\n * Falls back to buffered read if the underlying DataStore doesn't support streaming.\n *\n * @param uploadId - The unique identifier of the upload to read\n * @param clientId - Client identifier for multi-tenant routing\n * @param config - Optional streaming configuration (chunk size)\n * @returns An Effect that resolves to a Stream of byte chunks\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const stream = yield* server.readStream(uploadId, clientId, { chunkSize: 65536 });\n * // Process stream chunk by chunk with bounded memory\n * yield* Stream.runForEach(stream, (chunk) => processChunk(chunk));\n * ```\n */\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) => Effect.Effect<\n Stream.Stream<Uint8Array, UploadistaError>,\n UploadistaError\n >;\n /**\n * Uploads file content from a stream with unknown final size.\n * Creates upload with deferred length, streams content to storage,\n * and updates the upload record with final size when complete.\n *\n * Falls back to buffered upload if the underlying DataStore\n * doesn't support streaming writes.\n *\n * @param file - Input file configuration (size is optional)\n * @param clientId - Client identifier for multi-tenant routing\n * @param stream - Effect Stream of byte chunks to upload\n * @returns The completed UploadFile with final size\n *\n * @example\n * ```typescript\n * const server = yield* UploadEngine;\n * const result = yield* server.uploadStream(\n * {\n * storageId: \"s3-production\",\n * type: \"image/webp\",\n * uploadLengthDeferred: true,\n * fileName: \"optimized.webp\",\n * },\n * clientId,\n * transformedStream,\n * );\n * console.log(`Uploaded ${result.size} bytes`);\n * ```\n */\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) => Effect.Effect<UploadFile, UploadistaError>;\n delete: (\n uploadId: string,\n clientId: string | null,\n ) => Effect.Effect<void, UploadistaError>;\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) => Effect.Effect<void, UploadistaError>;\n unsubscribeFromUploadEvents: (\n uploadId: string,\n ) => Effect.Effect<void, UploadistaError>;\n};\n\n/**\n * Effect-TS context tag for the UploadEngine service.\n *\n * Use this tag to access the UploadEngine in an Effect context.\n * The server must be provided via a Layer or dependency injection.\n *\n * @example\n * ```typescript\n * // Access UploadEngine in an Effect\n * const uploadEffect = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * const upload = yield* server.createUpload(inputFile, clientId);\n * return upload;\n * });\n *\n * // Provide UploadEngine layer\n * const program = uploadEffect.pipe(\n * Effect.provide(uploadEngine),\n * Effect.provide(uploadFileKvStore),\n * Effect.provide(dataStoreLayer),\n * Effect.provide(eventEmitterLayer)\n * );\n * ```\n */\nexport class UploadEngine extends Context.Tag(\"UploadEngine\")<\n UploadEngine,\n UploadEngineShape\n>() {}\n\n/**\n * Creates the UploadEngine implementation.\n *\n * This function constructs the UploadEngine service by composing all required\n * dependencies (KV store, data stores, event emitter, ID generator). It implements\n * all upload operations defined in UploadEngineShape.\n *\n * The server automatically handles:\n * - Upload lifecycle management (create, resume, complete)\n * - Progress tracking and event emission\n * - Storage backend routing based on storageId\n * - Error handling with proper UploadistaError types\n *\n * @returns An Effect that yields the UploadEngineShape implementation\n *\n * @example\n * ```typescript\n * // Create a custom UploadEngine layer\n * const myUploadEngine = Layer.effect(\n * UploadEngine,\n * createUploadEngine()\n * );\n *\n * // Use in a program\n * const program = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Use server operations...\n * }).pipe(Effect.provide(myUploadEngine));\n * ```\n */\nexport function createUploadEngine() {\n return Effect.gen(function* () {\n const kvStore = yield* UploadFileKVStore;\n const eventEmitter = yield* UploadEventEmitter;\n const generateId = yield* GenerateId;\n const dataStoreService = yield* UploadFileDataStores;\n\n return {\n upload: (\n inputFile: InputFile,\n clientId: string | null,\n stream: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n uploadFromUrl: (\n inputFile: InputFile,\n clientId: string | null,\n url: string,\n ) =>\n Effect.gen(function* () {\n const response = yield* fetchFile(url);\n const buffer = yield* arrayBuffer(response);\n\n // Create a readable stream from the buffer\n const stream = new ReadableStream({\n start(controller) {\n controller.enqueue(new Uint8Array(buffer));\n controller.close();\n },\n });\n\n const fileCreated = yield* createUpload(\n { ...inputFile, size: buffer.byteLength },\n clientId,\n {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n },\n );\n return yield* uploadChunk(fileCreated.id, clientId, stream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n createUpload: (inputFile: InputFile, clientId: string | null) =>\n Effect.gen(function* () {\n const fileCreated = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId,\n });\n return fileCreated;\n }),\n uploadChunk: (\n uploadId: string,\n clientId: string | null,\n chunk: ReadableStream,\n ) =>\n Effect.gen(function* () {\n const file = yield* uploadChunk(uploadId, clientId, chunk, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n return file;\n }),\n getUpload: (uploadId: string) =>\n Effect.gen(function* () {\n const file = yield* kvStore.get(uploadId);\n return file;\n }),\n read: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n return yield* dataStore.read(uploadId);\n }),\n readStream: (\n uploadId: string,\n clientId: string | null,\n config?: StreamingConfig,\n ) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n\n // Check if the DataStore supports streaming reads\n const capabilities = dataStore.getCapabilities();\n if (capabilities.supportsStreamingRead && dataStore.readStream) {\n // Use native streaming\n yield* Effect.logDebug(`Using streaming read for file ${uploadId}`);\n return yield* dataStore.readStream(uploadId, config);\n }\n\n // Fallback: read entire file and convert to stream\n yield* Effect.logDebug(\n `Falling back to buffered read for file ${uploadId} (streaming not supported)`,\n );\n const bytes = yield* dataStore.read(uploadId);\n\n // Convert buffered bytes to a single-chunk stream\n return Stream.succeed(bytes);\n }),\n uploadStream: (\n file: Omit<InputFile, \"size\"> & { size?: number; sizeHint?: number },\n clientId: string | null,\n stream: Stream.Stream<Uint8Array, UploadistaError>,\n ) =>\n Effect.gen(function* () {\n // Get the data store for this storage\n const dataStore = yield* dataStoreService.getDataStore(\n file.storageId,\n clientId,\n );\n\n // Check if the DataStore supports streaming writes\n const capabilities = dataStore.getCapabilities();\n\n // Generate upload ID\n const uploadId = yield* generateId.generateId();\n\n if (capabilities.supportsStreamingWrite && dataStore.writeStream) {\n // Use native streaming write - DO NOT call createUpload as it would\n // create an S3 multipart upload that we won't use (writeStream creates its own)\n yield* Effect.logDebug(\n `Using streaming write for file ${uploadId}`,\n );\n\n // Parse metadata\n const metadata =\n typeof file.metadata === \"string\"\n ? JSON.parse(file.metadata)\n : file.metadata || {};\n\n // Convert metadata to Record<string, string> if present\n const stringMetadata = Object.fromEntries(\n Object.entries(metadata).map(([k, v]) => [k, String(v)]),\n );\n\n // Create initial upload record in KV store (without creating S3 multipart upload)\n const initialUpload: UploadFile = {\n id: uploadId,\n offset: 0,\n size: file.size ?? 0,\n storage: {\n id: file.storageId,\n type: dataStore.getCapabilities().supportsStreamingWrite\n ? \"streaming\"\n : \"default\",\n },\n metadata,\n creationDate: new Date().toISOString(),\n };\n yield* kvStore.set(uploadId, initialUpload);\n\n // Emit started event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_STARTED,\n data: initialUpload,\n });\n\n const result = yield* dataStore.writeStream(uploadId, {\n stream,\n contentType: file.type,\n sizeHint: file.sizeHint,\n metadata: stringMetadata,\n });\n\n // Update the upload record with the final size and URL\n const completedUpload: UploadFile = {\n ...initialUpload,\n size: result.size,\n offset: result.size,\n storage: {\n ...initialUpload.storage,\n path: result.path,\n },\n ...(result.url && { url: result.url }),\n };\n\n yield* kvStore.set(uploadId, completedUpload);\n\n // Emit completion event\n yield* eventEmitter.emit(uploadId, {\n type: UploadEventType.UPLOAD_COMPLETE,\n data: completedUpload,\n });\n\n return completedUpload;\n }\n\n // Fallback: buffer the stream and use regular upload (which calls createUpload + uploadChunk)\n yield* Effect.logWarning(\n `Falling back to buffered upload for file ${uploadId} (streaming write not supported)`,\n );\n\n // Collect stream into a buffer\n const chunks: Uint8Array[] = [];\n yield* Stream.runForEach(stream, (chunk) =>\n Effect.sync(() => {\n chunks.push(chunk);\n }),\n );\n\n // Calculate total size\n const totalSize = chunks.reduce(\n (acc, chunk) => acc + chunk.length,\n 0,\n );\n\n // Create a combined buffer\n const buffer = new Uint8Array(totalSize);\n let offset = 0;\n for (const chunk of chunks) {\n buffer.set(chunk, offset);\n offset += chunk.length;\n }\n\n // Create a readable stream from the buffer\n const readableStream = new ReadableStream({\n start(controller) {\n controller.enqueue(buffer);\n controller.close();\n },\n });\n\n // For fallback, use the regular flow with createUpload + uploadChunk\n const inputFile: InputFile = {\n ...file,\n size: totalSize,\n };\n\n const uploadFile = yield* createUpload(inputFile, clientId, {\n dataStoreService,\n kvStore,\n eventEmitter,\n generateId: { generateId: () => Effect.succeed(uploadId) },\n });\n\n // Use regular uploadChunk\n return yield* uploadChunk(uploadId, clientId, readableStream, {\n dataStoreService,\n kvStore,\n eventEmitter,\n });\n }),\n delete: (uploadId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const upload = yield* kvStore.get(uploadId);\n const dataStore = yield* dataStoreService.getDataStore(\n upload.storage.id,\n clientId,\n );\n yield* dataStore.remove(uploadId);\n yield* kvStore.delete(uploadId);\n return;\n }),\n getCapabilities: (storageId: string, clientId: string | null) =>\n Effect.gen(function* () {\n const dataStore = yield* dataStoreService.getDataStore(\n storageId,\n clientId,\n );\n return dataStore.getCapabilities();\n }),\n subscribeToUploadEvents: (\n uploadId: string,\n connection: WebSocketConnection,\n ) =>\n Effect.gen(function* () {\n yield* eventEmitter.subscribe(uploadId, connection);\n }),\n unsubscribeFromUploadEvents: (uploadId: string) =>\n Effect.gen(function* () {\n yield* eventEmitter.unsubscribe(uploadId);\n }),\n } satisfies UploadEngineShape;\n });\n}\n\n/**\n * Pre-built UploadEngine Effect Layer.\n *\n * This layer provides a ready-to-use UploadEngine implementation that can be\n * composed with other layers to build a complete upload system.\n *\n * Required dependencies:\n * - UploadFileKVStore: For storing upload metadata\n * - UploadFileDataStores: For routing to storage backends\n * - UploadEventEmitter: For progress events\n * - GenerateId: For creating upload IDs\n *\n * @example\n * ```typescript\n * // Compose a complete upload system\n * const fullUploadSystem = Layer.mergeAll(\n * uploadEngine,\n * uploadFileKvStore,\n * dataStoreLayer,\n * uploadEventEmitter,\n * generateIdLayer\n * );\n *\n * // Use in application\n * const app = Effect.gen(function* () {\n * const server = yield* UploadEngine;\n * // Perform uploads...\n * }).pipe(Effect.provide(fullUploadSystem));\n * ```\n */\nexport const uploadEngine = Layer.effect(UploadEngine, createUploadEngine());\n","import type { DataStoreCapabilities, UploadStrategy } from \"../types\";\n\n/**\n * Configuration options for upload strategy negotiation.\n *\n * @property fileSize - Size of the file to be uploaded in bytes\n * @property preferredStrategy - Preferred upload strategy (single, parallel, resumable)\n * @property preferredChunkSize - Preferred chunk size in bytes\n * @property parallelUploads - Number of parallel upload connections\n * @property minChunkSizeForParallel - Minimum file size to consider parallel uploads\n */\nexport type UploadStrategyOptions = {\n fileSize: number;\n preferredStrategy?: UploadStrategy;\n preferredChunkSize?: number;\n parallelUploads?: number;\n minChunkSizeForParallel?: number;\n};\n\n/**\n * Result of upload strategy negotiation.\n *\n * @property strategy - The negotiated upload strategy\n * @property chunkSize - The negotiated chunk size in bytes\n * @property parallelUploads - The negotiated number of parallel uploads\n * @property reasoning - Array of reasoning strings explaining the decisions\n * @property warnings - Array of warning messages about adjustments made\n */\nexport type NegotiatedStrategy = {\n strategy: UploadStrategy;\n chunkSize: number;\n parallelUploads: number;\n reasoning: string[];\n warnings: string[];\n};\n\n/**\n * Negotiates the optimal upload strategy based on data store capabilities and file characteristics.\n *\n * This class analyzes data store capabilities, file size, and user preferences to determine\n * the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size\n * and parallel connection count.\n *\n * The negotiator considers:\n * - Data store capabilities (parallel uploads, resumable uploads, concatenation)\n * - File size and chunk size constraints\n * - User preferences and requirements\n * - Performance optimization opportunities\n *\n * @example\n * ```typescript\n * // Create negotiator for S3 data store\n * const negotiator = new UploadStrategyNegotiator(\n * s3Capabilities,\n * (strategy) => s3Capabilities.supportsStrategy(strategy)\n * );\n *\n * // Negotiate strategy for large file\n * const result = negotiator.negotiateStrategy({\n * fileSize: 100_000_000, // 100MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB chunks\n * parallelUploads: 4\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * ```\n */\nexport class UploadStrategyNegotiator {\n /**\n * Creates a new upload strategy negotiator.\n *\n * @param capabilities - Data store capabilities and constraints\n * @param validateUploadStrategy - Function to validate if a strategy is supported\n */\n constructor(\n private capabilities: DataStoreCapabilities,\n private validateUploadStrategy: (strategy: UploadStrategy) => boolean,\n ) {}\n\n /**\n * Negotiates the optimal upload strategy based on options and data store capabilities.\n *\n * This method analyzes the provided options and data store capabilities to determine\n * the best upload strategy, chunk size, and parallel upload settings. It considers\n * user preferences, file size, and data store constraints to make optimal decisions.\n *\n * The negotiation process:\n * 1. Validates preferred strategy against data store capabilities\n * 2. Automatically selects strategy based on file size and capabilities\n * 3. Adjusts chunk size to fit within data store constraints\n * 4. Validates parallel upload settings\n * 5. Ensures final strategy is supported by the data store\n *\n * @param options - Upload strategy options including file size and preferences\n * @returns Negotiated strategy with reasoning and warnings\n *\n * @example\n * ```typescript\n * const result = negotiator.negotiateStrategy({\n * fileSize: 50_000_000, // 50MB\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 5_000_000, // 5MB\n * parallelUploads: 3\n * });\n *\n * console.log(result.strategy); // \"parallel\"\n * console.log(result.chunkSize); // 5_000_000\n * console.log(result.parallelUploads); // 3\n * console.log(result.reasoning); // [\"Using preferred strategy: parallel\", ...]\n * console.log(result.warnings); // [] (no warnings)\n * ```\n */\n negotiateStrategy(options: UploadStrategyOptions): NegotiatedStrategy {\n const reasoning: string[] = [];\n const warnings: string[] = [];\n\n let strategy: UploadStrategy = \"single\";\n let chunkSize =\n options.preferredChunkSize ??\n this.capabilities.optimalChunkSize ??\n 1024 * 1024;\n let parallelUploads = options.parallelUploads ?? 1;\n\n // Check if data store supports the preferred strategy\n if (options.preferredStrategy) {\n if (!this.validateUploadStrategy(options.preferredStrategy)) {\n warnings.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store, falling back`,\n );\n } else {\n strategy = options.preferredStrategy;\n reasoning.push(`Using preferred strategy: ${strategy}`);\n }\n }\n\n // Automatic strategy selection based on capabilities and file size\n if (\n !options.preferredStrategy ||\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n if (\n this.capabilities.supportsParallelUploads &&\n options.fileSize > (options.minChunkSizeForParallel ?? 10 * 1024 * 1024)\n ) {\n strategy = \"parallel\";\n reasoning.push(\n `Selected parallel upload for large file (${options.fileSize} bytes)`,\n );\n } else {\n strategy = \"single\";\n reasoning.push(\n this.capabilities.supportsParallelUploads\n ? `Selected single upload for small file (${options.fileSize} bytes)`\n : \"Selected single upload (parallel not supported by data store)\",\n );\n }\n }\n\n // Validate and adjust chunk size based on data store constraints\n if (\n this.capabilities.minChunkSize &&\n chunkSize < this.capabilities.minChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} below minimum ${this.capabilities.minChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.minChunkSize;\n }\n\n if (\n this.capabilities.maxChunkSize &&\n chunkSize > this.capabilities.maxChunkSize\n ) {\n warnings.push(\n `Chunk size ${chunkSize} above maximum ${this.capabilities.maxChunkSize}, adjusting`,\n );\n chunkSize = this.capabilities.maxChunkSize;\n }\n\n // Validate parallel upload settings\n if (strategy === \"parallel\") {\n if (\n this.capabilities.maxConcurrentUploads &&\n parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n warnings.push(\n `Parallel uploads ${parallelUploads} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`,\n );\n parallelUploads = this.capabilities.maxConcurrentUploads;\n }\n\n // Check if file would exceed max parts limit\n if (this.capabilities.maxParts) {\n const estimatedParts = Math.ceil(options.fileSize / chunkSize);\n if (estimatedParts > this.capabilities.maxParts) {\n const minChunkForParts = Math.ceil(\n options.fileSize / this.capabilities.maxParts,\n );\n warnings.push(\n `Estimated parts ${estimatedParts} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`,\n );\n chunkSize = Math.max(chunkSize, minChunkForParts);\n }\n }\n }\n\n // Final validation - ensure strategy is still valid after adjustments\n if (!this.validateUploadStrategy(strategy)) {\n warnings.push(\n `Final strategy validation failed, falling back to single upload`,\n );\n strategy = \"single\";\n parallelUploads = 1;\n }\n\n // Add capability information to reasoning\n reasoning.push(\n `Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`,\n );\n\n return {\n strategy,\n chunkSize,\n parallelUploads: strategy === \"parallel\" ? parallelUploads : 1,\n reasoning,\n warnings,\n };\n }\n\n /**\n * Gets the data store capabilities used by this negotiator.\n *\n * @returns The data store capabilities and constraints\n */\n getDataStoreCapabilities(): DataStoreCapabilities {\n return this.capabilities;\n }\n\n /**\n * Validates upload strategy configuration against data store capabilities.\n *\n * This method checks if the provided configuration is valid for the current\n * data store capabilities without performing the actual negotiation. It's\n * useful for pre-validation before attempting to negotiate a strategy.\n *\n * @param options - Upload strategy options to validate\n * @returns Validation result with validity flag and error messages\n *\n * @example\n * ```typescript\n * const validation = negotiator.validateConfiguration({\n * fileSize: 10_000_000,\n * preferredStrategy: \"parallel\",\n * preferredChunkSize: 1_000_000,\n * parallelUploads: 5\n * });\n *\n * if (!validation.valid) {\n * console.log(\"Configuration errors:\", validation.errors);\n * // Handle validation errors\n * }\n * ```\n */\n validateConfiguration(options: UploadStrategyOptions): {\n valid: boolean;\n errors: string[];\n } {\n const errors: string[] = [];\n\n if (\n options.preferredStrategy &&\n !this.validateUploadStrategy(options.preferredStrategy)\n ) {\n errors.push(\n `Preferred strategy '${options.preferredStrategy}' not supported by data store`,\n );\n }\n\n if (options.preferredChunkSize) {\n if (\n this.capabilities.minChunkSize &&\n options.preferredChunkSize < this.capabilities.minChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`,\n );\n }\n if (\n this.capabilities.maxChunkSize &&\n options.preferredChunkSize > this.capabilities.maxChunkSize\n ) {\n errors.push(\n `Chunk size ${options.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`,\n );\n }\n }\n\n if (\n options.parallelUploads &&\n this.capabilities.maxConcurrentUploads &&\n options.parallelUploads > this.capabilities.maxConcurrentUploads\n ) {\n errors.push(\n `Parallel uploads ${options.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`,\n );\n }\n\n return {\n valid: errors.length === 0,\n errors,\n };\n }\n}\n"],"mappings":"8VAGA,SAAS,EACP,EACA,EACA,EAAS,EACA,CAET,OADI,EAAO,OAAS,EAAS,EAAQ,OAAe,GAC7C,EAAQ,OAAO,EAAM,IAAM,EAAO,EAAS,KAAO,EAAK,CAMhE,SAAS,EAAY,EAAoB,EAAa,EAAS,EAAY,CACzE,GAAI,EAAO,OAAS,EAAS,EAAI,OAAQ,MAAO,GAChD,IAAK,IAAI,EAAI,EAAG,EAAI,EAAI,OAAQ,IAC9B,GAAI,EAAO,EAAS,KAAO,EAAI,WAAW,EAAE,CAAE,MAAO,GAEvD,MAAO,GAWT,MAAa,GACX,EACA,IACW,CACX,GAAI,EAAO,SAAW,EACpB,MAAO,2BAMT,GAAI,EAAW,EAAQ,CAAC,IAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAM,GAAK,CAAC,CACtE,MAAO,YAIT,GAAI,EAAW,EAAQ,CAAC,IAAM,IAAM,IAAK,CAAC,CACxC,MAAO,aAIT,GAAI,EAAY,EAAQ,SAAS,EAAI,EAAY,EAAQ,SAAS,CAChE,MAAO,YAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAK,CAAE,EAAE,EACzC,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAAI,EAAY,EAAQ,OAAQ,EAAE,EAEjE,MAAO,aAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,GAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAK,CAAC,CAClC,MAAO,YAST,GALI,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,EAAK,CAAC,EAK5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,GAAK,CAAC,CAC9C,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAK,CAAC,CAC9C,MAAO,eAIT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CACD,GACE,EAAK,SAAS,OAAO,EACpB,EAAK,SAAS,QAAQ,EAAI,EAAK,SAAS,OAAO,CAEhD,MAAO,gBAOX,GAAI,EAAO,QAAU,IAAM,EAAY,EAAQ,OAAQ,EAAE,CAAE,CACzD,IAAM,EAAU,IAAI,aAAa,CAAC,OAAO,EAAO,MAAM,EAAG,GAAG,CAAC,CAC7D,GACE,EAAQ,WAAW,MAAM,EACzB,EAAQ,WAAW,KAAK,EACxB,EAAQ,WAAW,OAAO,CAE1B,MAAO,YAKX,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,CAC9C,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,kBAIT,GACE,EAAO,QAAU,IAChB,EAAY,EAAQ,OAAQ,EAAE,EAC7B,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,OAAQ,EAAE,EAEhC,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,IAAK,CAAC,EAAI,EAAO,QAAU,KACtD,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OACtD,EAAO,MAAM,EAAG,IAAI,CACrB,CACQ,SAAS,WAAW,CAC3B,MAAO,mBAOX,GACE,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAW,EAAQ,CAAC,IAAM,IAAK,CAAC,EAChC,EAAY,EAAQ,MAAM,CAE1B,MAAO,aAIT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,GAAM,GAAK,CAAC,EAC5C,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,CAE9B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GACE,EAAO,QAAU,IACjB,EAAY,EAAQ,OAAQ,EAAE,EAC9B,EAAY,EAAQ,MAAO,EAAE,CAE7B,MAAO,YAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,kBAMT,GACE,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,EAC5C,EAAW,EAAQ,CAAC,GAAM,GAAM,EAAM,EAAK,CAAC,CAC5C,CAGA,GAAI,EAAO,QAAU,KAAM,CACzB,IAAM,EAAO,IAAI,YAAY,QAAS,CAAE,MAAO,GAAO,CAAC,CAAC,OAAO,EAAO,CACtE,GAAI,EAAK,SAAS,QAAQ,CACxB,MAAO,0EACT,GAAI,EAAK,SAAS,MAAM,CACtB,MAAO,oEACT,GAAI,EAAK,SAAS,OAAO,CACvB,MAAO,4EAEX,MAAO,kBAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,GAAM,IAAM,GAAM,GAAM,EAAK,CAAC,CAC1D,MAAO,+BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAM,IAAM,IAAM,GAAM,GAAK,CAAC,CAC1D,MAAO,8BAIT,GAAI,EAAW,EAAQ,CAAC,GAAM,IAAK,CAAC,CAClC,MAAO,mBAIT,GAAI,EAAO,QAAU,KAAO,EAAY,EAAQ,QAAS,IAAI,CAC3D,MAAO,oBAMT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,YAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,aAIT,GAAI,EAAW,EAAQ,CAAC,EAAM,EAAM,EAAM,EAAM,EAAK,CAAC,CACpD,MAAO,WAIT,GAAI,EAAY,EAAQ,OAAO,CAC7B,MAAO,WAMT,GAAI,EAAO,QAAU,EAAG,CACtB,IAAM,EAAY,EAAO,GACzB,GAAI,IAAc,KAAQ,IAAc,GAEtC,GAAI,CACF,IAAM,EAAO,IAAI,YAAY,QAAQ,CAAC,OACpC,EAAO,MAAM,EAAG,KAAK,IAAI,KAAM,EAAO,OAAO,CAAC,CAC/C,CAED,OADA,KAAK,MAAM,EAAK,MAAM,CAAC,CAChB,wBACD,GAOZ,GAAI,EAEF,OADY,EAAS,MAAM,IAAI,CAAC,KAAK,EAAE,aAAa,CACpD,CAEE,IAAK,MACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,OACH,MAAO,aACT,IAAK,OACL,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACL,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,eACT,IAAK,MACH,MAAO,gBAGT,IAAK,MACL,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,mBAGT,IAAK,MACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,OACH,MAAO,aACT,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,YAGT,IAAK,MACH,MAAO,kBACT,IAAK,OACH,MAAO,0EACT,IAAK,OACH,MAAO,oEACT,IAAK,OACH,MAAO,4EAGT,IAAK,MACH,MAAO,kBACT,IAAK,MACH,MAAO,+BACT,IAAK,KACH,MAAO,8BACT,IAAK,KACL,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,oBAGT,IAAK,OACH,MAAO,YACT,IAAK,QACH,MAAO,aACT,IAAK,MACH,MAAO,WACT,IAAK,MACH,MAAO,WAGT,IAAK,MACH,MAAO,aACT,IAAK,OACH,MAAO,mBACT,IAAK,MACH,MAAO,kBACT,IAAK,OACL,IAAK,MACH,MAAO,YACT,IAAK,MACH,MAAO,WACT,IAAK,KACH,MAAO,yBACT,IAAK,MACH,MAAO,WAET,QACE,MAAO,2BAIb,MAAO,4BAgBT,SAAgB,EAAiB,EAAkB,EAA2B,CAW5E,OATI,IAAa,EACR,GAIa,EAAS,MAAM,IAAI,CAAC,KACpB,EAAS,MAAM,IAAI,CAAC,GCvZ5C,MAAM,EAEF,EAAO,IAAI,WAAa,CAC1B,IAAM,EAAa,MAAO,EAAO,YAAY,KAAK,EAAO,OAAO,CAChE,OAAO,EAAO,MAAM,EAAY,CAC9B,WAAc,IAAA,GACd,OAAS,IAAU,CACjB,QAAS,EAAK,QACd,OAAQ,EAAK,OACb,WAAY,EAAK,QAAU,EAAI,EAChC,EACF,CAAC,EACF,CA4DW,GACX,EACA,EACA,CACE,mBACA,UACA,eACA,gBAQF,EAAO,IAAI,WAAa,CAItB,IAAM,EAAe,MAAO,EACtB,EAAe,IAAI,MAAM,CAAC,aAAa,CA0E7C,OAvEoB,MAAO,EAAO,IAAI,WAAa,CAEjD,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAU,UACV,EACD,CAEK,EAAK,MAAO,EAAW,YAAY,CACnC,CAAE,OAAM,OAAM,WAAU,eAAc,WAAU,QAAS,EAE3D,EAAyC,EAAE,CAC/C,GAAI,EACF,GAAI,CACF,EAAiB,KAAK,MAAM,EAAS,MAC/B,CACN,EAAiB,EAAE,CAIvB,IAAM,EAAyC,CAC7C,GAAG,EACH,OACA,SAAU,GAAY,GACvB,CACG,IACF,EAAe,aAAe,EAAa,UAAU,EAGvD,IAAM,EAAmB,CACvB,KACA,OACA,SAAU,EACV,OAAQ,EACR,eACA,QAAS,CACP,GAAI,EAAU,UACd,OACA,KAAM,GACN,OAAQ,EAAU,OACnB,CACD,OACA,eACD,CAGK,EAAU,MAAO,EAAU,OAAO,EAAK,CAY7C,OATA,MAAO,EAAQ,IAAI,EAAI,EAAQ,CAG/B,MAAO,EAAa,KAAK,EAAI,CAC3B,KAAM,EAAgB,eACtB,KAAM,EACN,KAAM,EAAQ,KACf,CAAC,CAEK,GACP,CAAC,KAED,EAAO,SAAS,gBAAiB,CAC/B,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CACH,EAGD,CAAC,KAGD,EAAO,SAAS,SAAU,CACxB,WAAY,CACV,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAC/B,mBAAoB,EAAU,KAC9B,kBAAmB,EAAU,KAAO,OAAS,QAC9C,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAStB,GAPA,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGG,EAAK,KAAM,CACb,IAAM,EAAoB,EAAO,UAC/B,yBACA,EAAiB,YAAY,CAC3B,MAAO,KACP,OAAQ,EACR,MAAO,GACR,CAAC,CACH,CACD,MAAO,EAAO,OAAO,EAAmB,EAAK,KAAK,CAIpD,IAAM,EAAqB,EAAO,MAAM,iBAAiB,CACzD,MAAO,EAAO,UAAU,EAAmB,EAC3C,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,iBAAiB,CAAC,KAC/B,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,mBAAoB,EAAU,UAAY,UAC1C,mBAAoB,EAAU,MAAM,UAAU,EAAI,IAClD,oBAAqB,EAAU,UAChC,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,SAAS,yBAAyB,CAAC,KAC/C,EAAO,aAAa,CAClB,mBAAoB,EAAU,UAAY,UAC1C,oBAAqB,EAAU,UAC/B,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CAGD,MAAO,EAAO,UACZ,EAAO,QAAQ,sBAAuB,CACpC,YAAa,sCACd,CAAC,CACH,EACD,CACH,CACF,CChOH,SAAgB,EAAmB,EAAyB,CAC1D,OAAO,EAAO,uBACN,EACL,GACC,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,OAAO,EAAM,CACpB,CAAC,CACL,CCgCH,SAAgB,EAAa,CAC3B,OACA,SACA,YACA,cACA,aACA,eACA,yBAAyB,KACH,CACtB,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAS,EAAgB,EAAK,CAEpC,GAAI,EAAW,OAAO,QACpB,OAAO,MAAO,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAIhE,IAAM,EAAkB,IAAI,gBACtB,CAAE,UAAW,EAGb,MAAgB,CAEpB,EAAgB,OAAO,EAKzB,OAFA,EAAW,OAAO,iBAAiB,QAAS,EAAS,CAAE,KAAM,GAAM,CAAC,CAE7D,MAAO,EAAO,kBACnB,EAAO,UAAY,CAAE,SAAQ,UAAS,EAAE,EACvC,CAAE,OAAQ,KACT,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAe,MAAO,EAAI,KAAK,EAAE,CAQjC,EALU,EAAoB,MAAM,CACxC,QAAS,EACV,CAAC,CAG4B,EAAO,CAyCrC,OAtCe,MAAO,EAAU,MAC9B,CACE,OAAQ,EACR,QAAS,EAAO,GAChB,OAAQ,EAAO,OAChB,CACD,CACE,WAAa,GAAsB,CAEjC,IAAM,EAAM,KAAK,KAAK,CACtB,EAAI,IAAI,EAAa,CAClB,KACC,EAAO,QAAS,GACV,EAAM,GAAY,EACb,EAAO,IAAI,WAAa,CAC7B,MAAO,EAAI,IAAI,EAAc,EAAI,CACjC,MAAO,EAAa,KAAK,EAAO,GAAI,CAClC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAO,GACX,SAAU,EACV,MAAO,EAAO,MAAQ,EACvB,CACD,KAAM,EAAO,KACd,CAAC,EACF,CAEG,EAAO,KACd,CACF,EAAO,WACR,CACA,UAAY,GAEX,EAEP,CACF,EAGD,CAAC,KACD,EAAO,SAAU,GACX,aAAiB,OAAS,EAAM,OAAS,aACpC,EAAO,KAAK,EAAgB,SAAS,UAAU,CAAC,CAErD,aAAiB,EACZ,EAAO,KAAK,EAAM,CAEpB,EAAO,KACZ,EAAgB,SAAS,mBAAoB,CAAE,MAAO,EAAO,CAAC,CAC/D,CACD,CACH,EACF,CAAE,QAAA,KACD,EAAO,SAAW,CAChB,EAAW,OAAO,oBAAoB,QAASA,EAAQ,EACvD,CACL,EACD,CAAC,KAED,EAAO,SAAS,wBAAyB,CACvC,WAAY,CACV,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,uBAAwB,EAAY,UAAU,CAC9C,mBAAoB,EAAO,MAAM,UAAU,EAAI,IAChD,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,wBAAwB,CAAC,KACvC,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,eAAgB,EAAO,UAAU,CACjC,uBAAwB,EAAS,EAAO,QAAQ,UAAU,CAC3D,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,2BAA2B,CAAC,KAC1C,EAAO,aAAa,CAClB,YAAa,EAAO,GACpB,gBAAiB,EAAO,OAAO,UAAU,CACzC,MAAO,aAAiB,EAAkB,EAAM,KAAO,OAAO,EAAM,CACrE,CAAC,CACH,CACF,CACF,CC7LH,SAAS,EAAmB,EAAsC,CAChE,OAAO,EAAO,aAAa,CACzB,QAAS,EAAa,QACtB,OAAQ,EAAa,OACrB,QAAS,EAAa,aAAe,EACtC,CAAC,CAcJ,MAAM,GACJ,EACA,IACwB,CACxB,IAAM,EAAe,IAAI,KAAK,EAAK,aAAuB,CAAC,SAAS,CAC9D,EAAkB,KAAK,KAAK,CAAG,EAErC,OAAO,EAAO,KAAK,KACjB,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,cAAe,EAAK,MAAQ,EAC5B,2BAA4B,EAC5B,oBAAqB,EAAK,QAAQ,GAClC,mBAAoB,EAAK,UAAU,UAAU,UAAU,EAAI,UAC3D,uBAAwB,EAAK,aAC7B,yBAA0B,IAAI,MAAM,CAAC,aAAa,CACnD,CACD,OAAQ,EACT,CAAC,CACH,EAoDU,GACX,EACA,EACA,EACA,CACE,mBACA,UACA,kBAOF,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAO,MAAO,EAAQ,IAAI,EAAS,CAInC,EAAa,EAAK,aACpB,EAAmB,EAAK,aAAa,CACrC,IAAA,GAsEJ,OAAO,MAnEc,EAAO,IAAI,WAAa,CAE3C,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,QAAQ,GACb,EACD,CAiDD,MAhCA,GAAK,OAVa,MAAO,EAAa,CACpC,YACA,KAAM,EACN,OAAQ,EACR,YAAa,IACb,WAPiB,IAAI,gBAQrB,uBAAwB,IACxB,eACD,CAAC,CAKF,MAAO,EAAQ,IAAI,EAAU,EAAK,CAGlC,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,gBACtB,KAAM,CACJ,GAAI,EAAK,GACT,SAAU,EAAK,OACf,MAAO,EAAK,MAAQ,EACrB,CACD,KAAM,EAAK,KACZ,CAAC,CAGE,EAAK,MAAQ,EAAK,SAAW,EAAK,OACpC,MAAO,EAAe,CACpB,OACA,YACA,eACD,CAAC,CAIE,EAAK,eAEP,MAAO,EAA+B,EADX,EAAmB,EAAK,aAAa,CACD,GAI5D,GACP,CAAC,KAED,EAAO,SAAS,eAAgB,CAC9B,WAAY,CACV,YAAa,EACb,kBAAmB,EACnB,2BAA4B,EAAK,aAAe,OAAS,QAC1D,CACD,OAAQ,EACT,CAAC,CACH,EAGD,CAAC,KAED,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,uBAAwB,CACrC,YAAa,kCACd,CAAC,CACH,CAGD,IAAM,EAAY,EAAK,OACjB,EAAqB,EAAO,UAChC,mBACA,EAAiB,OAAO,CACtB,MAAO,OACP,MAAO,OACP,MAAO,GACR,CAAC,CACH,CAID,GAHA,MAAO,EAAO,OAAO,EAAoB,EAAU,CAG/C,EAAK,MAAQ,EAAK,KAAO,EAAG,CAC9B,IAAM,EAAa,EACb,EAAkB,EAAO,MAC7B,qCACD,CACD,MAAO,EAAO,IAAI,EAAiB,EAAW,GAEhD,CACH,CAED,EAAO,IAAK,GACV,EAAO,SAAS,iBAAiB,CAAC,KAChC,EAAO,aAAa,CAClB,YAAa,EAAK,GAClB,aAAc,EAAK,OAAO,UAAU,CACpC,iBACE,EAAK,MAAQ,EAAK,KAAO,GACnB,EAAK,OAAS,EAAK,KAAQ,KAAK,QAAQ,EAAE,CAC5C,IACN,oBAAqB,EAAK,MAAM,UAAU,EAAI,IAC/C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,sBAAsB,CAAC,KACrC,EAAO,aAAa,CAClB,YAAa,EACb,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CAoCG,GAAkB,CACtB,OACA,YACA,kBAMA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAe,EAAU,iBAAiB,CAGhD,GACE,EAAa,mBACb,EAAK,MACL,EAAK,KAAO,EAAa,kBACzB,CACA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,QAAS,cAAc,EAAK,KAAK,uCAAuC,EAAa,kBAAkB,8BACxG,CACD,KAAM,EAAK,KACZ,CAAC,CACF,OAIF,IAAM,EAAY,MAAO,EAAU,KAAK,EAAK,GAAG,CAGhD,GAAI,EAAK,UAAY,EAAK,kBAAmB,CAC3C,IAAM,EAAmB,MAAO,EAC9B,EACA,EAAK,kBACN,CAED,GAAI,IAAqB,EAAK,SAiB5B,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EAAK,SACf,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,yCAAyC,EAAK,SAAS,SAAS,IACtE,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EAAK,SACf,OAAQ,EACR,UAAW,EAAK,kBACjB,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WAChB,UAAW,EAAK,kBACjB,CACD,KAAM,EAAK,KACZ,CAAC,CAIJ,GAAI,EAAa,2BAA4B,CAC3C,IAAM,EAAmB,EAAe,EAAU,CAC5C,EAAmB,EAAK,UAAU,KAExC,GACE,GACA,CAAC,EAAiB,EAAkB,EAAiB,CAkBrD,OAfA,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,yBACtB,KAAM,CACJ,GAAI,EAAK,GACT,OAAQ,oBACR,SAAU,EACV,OAAQ,EACT,CACD,KAAM,EAAK,KACZ,CAAC,CAGF,MAAO,EAAU,OAAO,EAAK,GAAG,CAGzB,MAAO,EAAgB,SAAS,oBAAqB,CAC1D,KAAM,0CAA0C,EAAiB,cAAc,IAC/E,QAAS,CACP,SAAU,EAAK,GACf,SAAU,EACV,OAAQ,EACT,CACF,CAAC,CAAC,UAAU,CAIf,MAAO,EAAa,KAAK,EAAK,GAAI,CAChC,KAAM,EAAgB,0BACtB,KAAM,CACJ,GAAI,EAAK,GACT,eAAgB,WACjB,CACD,KAAM,EAAK,KACZ,CAAC,GAEJ,CAAC,KACD,EAAO,SAAS,kBAAmB,CACjC,WAAY,CACV,YAAa,EAAK,GAClB,+BAAgC,EAAK,SAAW,OAAS,QACzD,2BAA4B,EAAU,iBAAiB,CACpD,2BACC,OACA,QACL,CACF,CAAC,CACH,CC3YU,EAAa,GACjB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,MAAM,EAAI,CAEzB,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,mBAAoB,CAClC,WAAY,CACV,aAAc,EACd,mBAAoB,QACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,wBAAyB,CACtC,YAAa,oCACd,CAAC,CACH,CAGG,EAAS,KACX,MAAO,EAAO,UACZ,EAAO,QAAQ,gCAAiC,CAC9C,YAAa,+CACd,CAAC,CACH,GAEH,CACH,CAED,EAAO,IAAK,GACV,EAAO,QAAQ,sBAAsB,CAAC,KACpC,EAAO,aAAa,CAClB,aAAc,EACd,kBAAmB,EAAS,OAAO,UAAU,CAC7C,cAAe,EAAS,GAAG,UAAU,CACrC,0BACE,EAAS,QAAQ,IAAI,iBAAiB,EAAI,UAC7C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,IAAI,WAAa,CAEtB,MAAO,EAAO,UACZ,EAAO,QAAQ,+BAAgC,CAC7C,YAAa,2CACd,CAAC,CACH,CAGD,MAAO,EAAO,SAAS,mBAAmB,CAAC,KACzC,EAAO,aAAa,CAClB,aAAc,EACd,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,EACD,CACH,CACF,CAiCU,EAAe,GACnB,EAAO,WAAW,CACvB,IAAK,SACI,MAAM,EAAS,aAAa,CAErC,MAAQ,GACC,EAAgB,SAAS,gBAAiB,CAC/C,MAAO,EACR,CAAC,CAEL,CAAC,CAAC,KAED,EAAO,SAAS,2BAA4B,CAC1C,WAAY,CACV,mBAAoB,cACrB,CACF,CAAC,CAEF,EAAO,IAAK,GACV,EAAO,SAAS,qCAAqC,CAAC,KACpD,EAAO,aAAa,CAClB,cAAe,EAAO,WAAW,UAAU,CAC5C,CAAC,CACH,CACF,CAED,EAAO,SAAU,GACf,EAAO,SAAS,6CAA6C,CAAC,KAC5D,EAAO,aAAa,CAClB,MAAO,OAAO,EAAM,CACrB,CAAC,CACH,CACF,CACF,CCwFH,IAAa,EAAb,cAAkC,EAAQ,IAAI,eAAe,EAG1D,AAAC,GAgCJ,SAAgB,GAAqB,CACnC,OAAO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAU,MAAO,EACjB,EAAe,MAAO,EACtB,EAAa,MAAO,EACpB,EAAmB,MAAO,EAEhC,MAAO,CACL,QACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAOtB,OAAO,MAAO,GANM,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EACoC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,eACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAS,MAAO,EADL,MAAO,EAAU,EAAI,CACK,CAGrC,EAAS,IAAI,eAAe,CAChC,MAAM,EAAY,CAChB,EAAW,QAAQ,IAAI,WAAW,EAAO,CAAC,CAC1C,EAAW,OAAO,EAErB,CAAC,CAYF,OAAO,MAAO,GAVM,MAAO,EACzB,CAAE,GAAG,EAAW,KAAM,EAAO,WAAY,CACzC,EACA,CACE,mBACA,UACA,eACA,aACD,CACF,EACqC,GAAI,EAAU,EAAQ,CAC1D,mBACA,UACA,eACD,CAAC,EACF,CACJ,cAAe,EAAsB,IACnC,EAAO,IAAI,WAAa,CAOtB,OANoB,MAAO,EAAa,EAAW,EAAU,CAC3D,mBACA,UACA,eACA,aACD,CAAC,EAEF,CACJ,aACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAMtB,OALa,MAAO,EAAY,EAAU,EAAU,EAAO,CACzD,mBACA,UACA,eACD,CAAC,EAEF,CACJ,UAAY,GACV,EAAO,IAAI,WAAa,CAEtB,OADa,MAAO,EAAQ,IAAI,EAAS,EAEzC,CACJ,MAAO,EAAkB,IACvB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAAO,OAJW,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACuB,KAAK,EAAS,EACtC,CACJ,YACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CACrC,EAAY,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,CAID,GADqB,EAAU,iBAAiB,CAC/B,uBAAyB,EAAU,WAGlD,OADA,MAAO,EAAO,SAAS,iCAAiC,IAAW,CAC5D,MAAO,EAAU,WAAW,EAAU,EAAO,CAItD,MAAO,EAAO,SACZ,0CAA0C,EAAS,4BACpD,CACD,IAAM,EAAQ,MAAO,EAAU,KAAK,EAAS,CAG7C,OAAO,EAAO,QAAQ,EAAM,EAC5B,CACJ,cACE,EACA,EACA,IAEA,EAAO,IAAI,WAAa,CAEtB,IAAM,EAAY,MAAO,EAAiB,aACxC,EAAK,UACL,EACD,CAGK,EAAe,EAAU,iBAAiB,CAG1C,EAAW,MAAO,EAAW,YAAY,CAE/C,GAAI,EAAa,wBAA0B,EAAU,YAAa,CAGhE,MAAO,EAAO,SACZ,kCAAkC,IACnC,CAGD,IAAM,EACJ,OAAO,EAAK,UAAa,SACrB,KAAK,MAAM,EAAK,SAAS,CACzB,EAAK,UAAY,EAAE,CAGnB,EAAiB,OAAO,YAC5B,OAAO,QAAQ,EAAS,CAAC,KAAK,CAAC,EAAG,KAAO,CAAC,EAAG,OAAO,EAAE,CAAC,CAAC,CACzD,CAGK,EAA4B,CAChC,GAAI,EACJ,OAAQ,EACR,KAAM,EAAK,MAAQ,EACnB,QAAS,CACP,GAAI,EAAK,UACT,KAAM,EAAU,iBAAiB,CAAC,uBAC9B,YACA,UACL,CACD,WACA,aAAc,IAAI,MAAM,CAAC,aAAa,CACvC,CACD,MAAO,EAAQ,IAAI,EAAU,EAAc,CAG3C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,eACtB,KAAM,EACP,CAAC,CAEF,IAAM,EAAS,MAAO,EAAU,YAAY,EAAU,CACpD,SACA,YAAa,EAAK,KAClB,SAAU,EAAK,SACf,SAAU,EACX,CAAC,CAGI,EAA8B,CAClC,GAAG,EACH,KAAM,EAAO,KACb,OAAQ,EAAO,KACf,QAAS,CACP,GAAG,EAAc,QACjB,KAAM,EAAO,KACd,CACD,GAAI,EAAO,KAAO,CAAE,IAAK,EAAO,IAAK,CACtC,CAUD,OARA,MAAO,EAAQ,IAAI,EAAU,EAAgB,CAG7C,MAAO,EAAa,KAAK,EAAU,CACjC,KAAM,EAAgB,gBACtB,KAAM,EACP,CAAC,CAEK,EAIT,MAAO,EAAO,WACZ,4CAA4C,EAAS,kCACtD,CAGD,IAAM,EAAuB,EAAE,CAC/B,MAAO,EAAO,WAAW,EAAS,GAChC,EAAO,SAAW,CAChB,EAAO,KAAK,EAAM,EAClB,CACH,CAGD,IAAM,EAAY,EAAO,QACtB,EAAK,IAAU,EAAM,EAAM,OAC5B,EACD,CAGK,EAAS,IAAI,WAAW,EAAU,CACpC,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAIlB,IAAM,EAAiB,IAAI,eAAe,CACxC,MAAM,EAAY,CAChB,EAAW,QAAQ,EAAO,CAC1B,EAAW,OAAO,EAErB,CAAC,CAgBF,OARmB,MAAO,EALG,CAC3B,GAAG,EACH,KAAM,EACP,CAEiD,EAAU,CAC1D,mBACA,UACA,eACA,WAAY,CAAE,eAAkB,EAAO,QAAQ,EAAS,CAAE,CAC3D,CAAC,CAGK,MAAO,EAAY,EAAU,EAAU,EAAgB,CAC5D,mBACA,UACA,eACD,CAAC,EACF,CACJ,QAAS,EAAkB,IACzB,EAAO,IAAI,WAAa,CACtB,IAAM,EAAS,MAAO,EAAQ,IAAI,EAAS,CAK3C,OAJkB,MAAO,EAAiB,aACxC,EAAO,QAAQ,GACf,EACD,EACgB,OAAO,EAAS,CACjC,MAAO,EAAQ,OAAO,EAAS,EAE/B,CACJ,iBAAkB,EAAmB,IACnC,EAAO,IAAI,WAAa,CAKtB,OAJkB,MAAO,EAAiB,aACxC,EACA,EACD,EACgB,iBAAiB,EAClC,CACJ,yBACE,EACA,IAEA,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,UAAU,EAAU,EAAW,EACnD,CACJ,4BAA8B,GAC5B,EAAO,IAAI,WAAa,CACtB,MAAO,EAAa,YAAY,EAAS,EACzC,CACL,EACD,CAiCJ,MAAa,EAAe,EAAM,OAAO,EAAc,GAAoB,CAAC,CC5iB5E,IAAa,EAAb,KAAsC,CAOpC,YACE,EACA,EACA,CAFQ,KAAA,aAAA,EACA,KAAA,uBAAA,EAoCV,kBAAkB,EAAoD,CACpE,IAAM,EAAsB,EAAE,CACxB,EAAqB,EAAE,CAEzB,EAA2B,SAC3B,EACF,EAAQ,oBACR,KAAK,aAAa,kBAClB,KAAO,KACL,EAAkB,EAAQ,iBAAmB,EA2DjD,GAxDI,EAAQ,oBACL,KAAK,uBAAuB,EAAQ,kBAAkB,EAKzD,EAAW,EAAQ,kBACnB,EAAU,KAAK,6BAA6B,IAAW,EALvD,EAAS,KACP,uBAAuB,EAAQ,kBAAkB,6CAClD,GASH,CAAC,EAAQ,mBACT,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,IAGrD,KAAK,aAAa,yBAClB,EAAQ,UAAY,EAAQ,yBAA2B,GAAK,KAAO,OAEnE,EAAW,WACX,EAAU,KACR,4CAA4C,EAAQ,SAAS,SAC9D,GAED,EAAW,SACX,EAAU,KACR,KAAK,aAAa,wBACd,0CAA0C,EAAQ,SAAS,SAC3D,gEACL,GAMH,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI9B,KAAK,aAAa,cAClB,EAAY,KAAK,aAAa,eAE9B,EAAS,KACP,cAAc,EAAU,iBAAiB,KAAK,aAAa,aAAa,aACzE,CACD,EAAY,KAAK,aAAa,cAI5B,IAAa,aAEb,KAAK,aAAa,sBAClB,EAAkB,KAAK,aAAa,uBAEpC,EAAS,KACP,oBAAoB,EAAgB,mBAAmB,KAAK,aAAa,qBAAqB,aAC/F,CACD,EAAkB,KAAK,aAAa,sBAIlC,KAAK,aAAa,UAAU,CAC9B,IAAM,EAAiB,KAAK,KAAK,EAAQ,SAAW,EAAU,CAC9D,GAAI,EAAiB,KAAK,aAAa,SAAU,CAC/C,IAAM,EAAmB,KAAK,KAC5B,EAAQ,SAAW,KAAK,aAAa,SACtC,CACD,EAAS,KACP,mBAAmB,EAAe,mBAAmB,KAAK,aAAa,SAAS,yBACjF,CACD,EAAY,KAAK,IAAI,EAAW,EAAiB,EAmBvD,OAbK,KAAK,uBAAuB,EAAS,GACxC,EAAS,KACP,kEACD,CACD,EAAW,SACX,EAAkB,GAIpB,EAAU,KACR,qCAAqC,KAAK,aAAa,wBAAwB,kBAAkB,KAAK,aAAa,sBAAsB,cAAc,KAAK,aAAa,2BAC1K,CAEM,CACL,WACA,YACA,gBAAiB,IAAa,WAAa,EAAkB,EAC7D,YACA,WACD,CAQH,0BAAkD,CAChD,OAAO,KAAK,aA4Bd,sBAAsB,EAGpB,CACA,IAAM,EAAmB,EAAE,CAwC3B,OArCE,EAAQ,mBACR,CAAC,KAAK,uBAAuB,EAAQ,kBAAkB,EAEvD,EAAO,KACL,uBAAuB,EAAQ,kBAAkB,+BAClD,CAGC,EAAQ,qBAER,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,CAGD,KAAK,aAAa,cAClB,EAAQ,mBAAqB,KAAK,aAAa,cAE/C,EAAO,KACL,cAAc,EAAQ,mBAAmB,4BAA4B,KAAK,aAAa,eACxF,EAKH,EAAQ,iBACR,KAAK,aAAa,sBAClB,EAAQ,gBAAkB,KAAK,aAAa,sBAE5C,EAAO,KACL,oBAAoB,EAAQ,gBAAgB,8BAA8B,KAAK,aAAa,uBAC7F,CAGI,CACL,MAAO,EAAO,SAAW,EACzB,SACD"}
|
|
@@ -218,4 +218,4 @@ declare function httpFailure(code: UploadistaErrorCode, overrides?: Partial<Pick
|
|
|
218
218
|
}): Effect.Effect<never, UploadistaError>;
|
|
219
219
|
//#endregion
|
|
220
220
|
export { isUploadistaError as a, httpFailure as i, UploadistaError as n, UploadistaErrorCode as r, ERROR_CATALOG as t };
|
|
221
|
-
//# sourceMappingURL=uploadista-error-
|
|
221
|
+
//# sourceMappingURL=uploadista-error-B1qbOy9N.d.mts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uploadista-error-
|
|
1
|
+
{"version":3,"file":"uploadista-error-B1qbOy9N.d.mts","names":[],"sources":["../src/errors/uploadista-error.ts"],"sourcesContent":[],"mappings":";;;;;;;;;;AAQA;AAiFA;AACS,KAlFG,mBAAA,GAkFH,gBAAA,GAAA,SAAA,GAAA,qBAAA,GAAA,kBAAA,GAAA,sBAAA,GAAA,sBAAA,GAAA,kBAAA,GAAA,qBAAA,GAAA,iBAAA,GAAA,qBAAA,GAAA,gBAAA,GAAA,aAAA,GAAA,gBAAA,GAAA,iBAAA,GAAA,oBAAA,GAAA,gBAAA,GAAA,qBAAA,GAAA,gBAAA,GAAA,gBAAA,GAAA,uBAAA,GAAA,mBAAA,GAAA,uBAAA,GAAA,gBAAA,GAAA,kBAAA,GAAA,kBAAA,GAAA,wBAAA,GAAA,eAAA,GAAA,kBAAA,GAAA,qBAAA,GAAA,8BAAA,GAAA,6BAAA,GAAA,mBAAA,GAAA,mBAAA,GAAA,gCAAA,GAAA,yBAAA,GAAA,sBAAA,GAAA,qBAAA,GAAA,kCAAA,GAAA,sBAAA,GAAA,mBAAA,GAAA,wBAAA,GAAA,oBAAA,GAAA,qBAAA,GAAA,kBAAA,GAAA,wBAAA,GAAA,mBAAA,GAAA,gBAAA,GAAA,sBAAA,GAAA,4BAAA,GAAA,cAAA,GAAA,4BAAA,GAAA,yBAAA,GAAA,YAAA,GAAA,eAAA,GAAA,eAAA,GAAA,oBAAA,GAAA,sBAAA;;;;AAuNE;;;;;;;;;;;;;AAwCX;;;;;;AAeU,cA/QG,aA+QH,EA/QkB,QA+QlB,CA9QR,MA8QQ,CA9QD,mBA8QC,EAAA;EAkDA,MAAA,EAAA,MAAA;EACmB,IAAA,EAAA,MAAA;CAAL,CAAA,CAAA;cA1Gb,oBA0GK,EAAA,IAAA,CAAA,gBAAA,CAAA,MAAA,EAAA,GAAA,CAAA,GAAA,CAAA,CAAA,CAAA,CAAA,IAAA,sBAAA,EAAA,EAAA,CAAA,CAAA,CAAA,SAAA,IAAA,GAAA,IAAA,GAAA,iBAAA,OAAA,KAAA,SAAA,MAAA,GAAA,KAAA,IAAA,IAAA,EAAA,CAAA,EAAA,EAAA,kCAAA;EAIX,SAAA,IAAA,EAAA,iBAAA;CAkCkC,WAAA,EAAA,CAAA;;;;;AA4BvC;AAiCA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;cArKa,eAAA,SAAwB,oBAAA;;;;;;;;;;;;;UAe3B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;wBAkDA,iCACM,QAAQ,KAAK;;;MAIxB;;;;;;;;;;;;;;;;;;;;;;;;yBAkCoB,MAAA,CAAO,OAAO,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;iBA4B1B,iBAAA,2BAA4C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAiC5C,WAAA,OACR,iCACM,QAAQ,KAAK;;;IAIxB,MAAA,CAAO,cAAc"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
const e=require(`./types-
|
|
1
|
+
const e=require(`./types-BF_tvkRh.cjs`);let t=require(`effect`);const n={MISSING_OFFSET:{status:403,body:`Upload-Offset header required
|
|
2
2
|
`},ABORTED:{status:400,body:`Request aborted due to lock acquired`},INVALID_TERMINATION:{status:400,body:`Cannot terminate an already completed upload`},ERR_LOCK_TIMEOUT:{status:500,body:`failed to acquire lock before timeout`},INVALID_CONTENT_TYPE:{status:403,body:`Content-Type header required
|
|
3
3
|
`},DATASTORE_NOT_FOUND:{status:500,body:`The datastore was not found
|
|
4
4
|
`},UPLOAD_ID_NOT_FOUND:{status:500,body:`The upload id was not found
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"uploadista-error-CkSxSyNo.mjs","names":["ERROR_CATALOG: Readonly<\n Record<UploadistaErrorCode, { status: number; body: string }>\n>"],"sources":["../src/errors/uploadista-error.ts"],"sourcesContent":["import { Data, Effect } from \"effect\";\n\n/**\n * Union type of all possible error codes in the Uploadista system.\n *\n * Each error code corresponds to a specific error condition with predefined\n * HTTP status codes and messages in the ERROR_CATALOG.\n */\nexport type UploadistaErrorCode =\n | \"MISSING_OFFSET\"\n | \"ABORTED\"\n | \"INVALID_TERMINATION\"\n | \"ERR_LOCK_TIMEOUT\"\n | \"INVALID_CONTENT_TYPE\"\n | \"FLOW_STRUCTURE_ERROR\"\n | \"FLOW_CYCLE_ERROR\"\n | \"FLOW_NODE_NOT_FOUND\"\n | \"FLOW_NODE_ERROR\"\n | \"FLOW_NOT_AUTHORIZED\"\n | \"FLOW_NOT_FOUND\"\n | \"FLOW_PAUSED\"\n | \"FLOW_CANCELLED\"\n | \"FILE_READ_ERROR\"\n | \"FLOW_JOB_NOT_FOUND\"\n | \"FLOW_JOB_ERROR\"\n | \"DATASTORE_NOT_FOUND\"\n | \"FILE_NOT_FOUND\"\n | \"INVALID_OFFSET\"\n | \"FILE_NO_LONGER_EXISTS\"\n | \"ERR_SIZE_EXCEEDED\"\n | \"ERR_MAX_SIZE_EXCEEDED\"\n | \"INVALID_LENGTH\"\n | \"INVALID_METADATA\"\n | \"VALIDATION_ERROR\"\n | \"STORAGE_NOT_AUTHORIZED\"\n | \"UNKNOWN_ERROR\"\n | \"FILE_WRITE_ERROR\"\n | \"UPLOAD_ID_NOT_FOUND\"\n | \"FLOW_OUTPUT_VALIDATION_ERROR\"\n | \"FLOW_INPUT_VALIDATION_ERROR\"\n | \"CHECKSUM_MISMATCH\"\n | \"MIMETYPE_MISMATCH\"\n | \"UNSUPPORTED_CHECKSUM_ALGORITHM\"\n | \"VIDEO_PROCESSING_FAILED\"\n | \"INVALID_VIDEO_FORMAT\"\n | \"CODEC_NOT_SUPPORTED\"\n | \"VIDEO_METADATA_EXTRACTION_FAILED\"\n | \"FFMPEG_NOT_INSTALLED\"\n | \"INVALID_NODE_TYPE\"\n | \"TYPE_CATEGORY_MISMATCH\"\n | \"INVALID_INPUT_TYPE\"\n | \"INVALID_OUTPUT_TYPE\"\n | \"OUTPUT_NOT_FOUND\"\n | \"MULTIPLE_OUTPUTS_FOUND\"\n | \"VIRUS_SCAN_FAILED\"\n | \"VIRUS_DETECTED\"\n | \"CLAMAV_NOT_INSTALLED\"\n | \"VIRUS_DEFINITIONS_OUTDATED\"\n | \"SCAN_TIMEOUT\"\n | \"DOCUMENT_PROCESSING_FAILED\"\n | \"INVALID_DOCUMENT_FORMAT\"\n | \"OCR_FAILED\"\n | \"PDF_ENCRYPTED\"\n | \"PDF_CORRUPTED\"\n | \"PAGE_RANGE_INVALID\"\n | \"CIRCUIT_BREAKER_OPEN\";\n\n/**\n * Catalog of all predefined errors in the Uploadista system.\n *\n * Maps error codes to their HTTP status codes and default error messages.\n * This centralized catalog ensures consistent error handling across all\n * Uploadista packages and adapters.\n *\n * Each error entry contains:\n * - `status`: HTTP status code (400-500 range)\n * - `body`: Human-readable error message\n *\n * @example\n * ```typescript\n * // Access a specific error definition\n * const fileNotFound = ERROR_CATALOG.FILE_NOT_FOUND;\n * console.log(fileNotFound.status); // 404\n * console.log(fileNotFound.body); // \"The file for this url was not found\\n\"\n *\n * // Use with UploadistaError\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n * ```\n */\nexport const ERROR_CATALOG: Readonly<\n Record<UploadistaErrorCode, { status: number; body: string }>\n> = {\n MISSING_OFFSET: { status: 403, body: \"Upload-Offset header required\\n\" },\n ABORTED: { status: 400, body: \"Request aborted due to lock acquired\" },\n INVALID_TERMINATION: {\n status: 400,\n body: \"Cannot terminate an already completed upload\",\n },\n ERR_LOCK_TIMEOUT: {\n status: 500,\n body: \"failed to acquire lock before timeout\",\n },\n INVALID_CONTENT_TYPE: {\n status: 403,\n body: \"Content-Type header required\\n\",\n },\n DATASTORE_NOT_FOUND: {\n status: 500,\n body: \"The datastore was not found\\n\",\n },\n UPLOAD_ID_NOT_FOUND: {\n status: 500,\n body: \"The upload id was not found\\n\",\n },\n FILE_NOT_FOUND: {\n status: 404,\n body: \"The file for this url was not found\\n\",\n },\n FLOW_NOT_AUTHORIZED: {\n status: 401,\n body: \"The flow is not authorized\\n\",\n },\n FLOW_NOT_FOUND: {\n status: 404,\n body: \"The flow was not found\\n\",\n },\n FLOW_PAUSED: {\n status: 409,\n body: \"The flow execution was paused by user\\n\",\n },\n FLOW_CANCELLED: {\n status: 409,\n body: \"The flow execution was cancelled by user\\n\",\n },\n FLOW_STRUCTURE_ERROR: {\n status: 500,\n body: \"The flow structure is invalid\\n\",\n },\n FLOW_CYCLE_ERROR: {\n status: 500,\n body: \"The flow contains a cycle\\n\",\n },\n FLOW_NODE_NOT_FOUND: {\n status: 500,\n body: \"The flow node was not found\\n\",\n },\n FLOW_NODE_ERROR: {\n status: 500,\n body: \"The flow node failed\\n\",\n },\n FLOW_JOB_NOT_FOUND: {\n status: 404,\n body: \"The flow job was not found\\n\",\n },\n FLOW_JOB_ERROR: {\n status: 500,\n body: \"The flow job failed\\n\",\n },\n FLOW_INPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow input validation failed\\n\",\n },\n FLOW_OUTPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow output validation failed\\n\",\n },\n INVALID_OFFSET: { status: 409, body: \"Upload-Offset conflict\\n\" },\n FILE_NO_LONGER_EXISTS: {\n status: 410,\n body: \"The file for this url no longer exists\\n\",\n },\n FILE_READ_ERROR: {\n status: 500,\n body: \"Something went wrong reading the file\\n\",\n },\n ERR_SIZE_EXCEEDED: { status: 413, body: \"upload's size exceeded\\n\" },\n ERR_MAX_SIZE_EXCEEDED: { status: 413, body: \"Maximum size exceeded\\n\" },\n INVALID_LENGTH: {\n status: 400,\n body: \"Upload-Length or Upload-Defer-Length header required\\n\",\n },\n INVALID_METADATA: {\n status: 400,\n body: \"Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique\",\n },\n VALIDATION_ERROR: {\n status: 400,\n body: \"Validation failed\\n\",\n },\n STORAGE_NOT_AUTHORIZED: {\n status: 401,\n body: \"The storage is not authorized\\n\",\n },\n UNKNOWN_ERROR: {\n status: 500,\n body: \"Something went wrong with that request\\n\",\n },\n FILE_WRITE_ERROR: {\n status: 500,\n body: \"Something went wrong receiving the file\\n\",\n },\n CHECKSUM_MISMATCH: {\n status: 400,\n body: \"The file checksum does not match the provided checksum\\n\",\n },\n MIMETYPE_MISMATCH: {\n status: 400,\n body: \"The file MIME type does not match the declared type\\n\",\n },\n UNSUPPORTED_CHECKSUM_ALGORITHM: {\n status: 400,\n body: \"The specified checksum algorithm is not supported\\n\",\n },\n VIDEO_PROCESSING_FAILED: {\n status: 500,\n body: \"Video processing operation failed\\n\",\n },\n INVALID_VIDEO_FORMAT: {\n status: 400,\n body: \"The video format is not supported\\n\",\n },\n CODEC_NOT_SUPPORTED: {\n status: 400,\n body: \"The specified video codec is not supported\\n\",\n },\n VIDEO_METADATA_EXTRACTION_FAILED: {\n status: 500,\n body: \"Failed to extract video metadata\\n\",\n },\n FFMPEG_NOT_INSTALLED: {\n status: 500,\n body: \"FFmpeg is not installed or not available in PATH\\n\",\n },\n INVALID_NODE_TYPE: {\n status: 500,\n body: \"The specified node type is not registered\\n\",\n },\n TYPE_CATEGORY_MISMATCH: {\n status: 500,\n body: \"Node type category does not match the node configuration\\n\",\n },\n INVALID_INPUT_TYPE: {\n status: 500,\n body: \"The input type is not registered\\n\",\n },\n INVALID_OUTPUT_TYPE: {\n status: 500,\n body: \"The output type is not registered\\n\",\n },\n OUTPUT_NOT_FOUND: {\n status: 404,\n body: \"No output of the specified type was found\\n\",\n },\n MULTIPLE_OUTPUTS_FOUND: {\n status: 409,\n body: \"Multiple outputs of the specified type found, expected single output\\n\",\n },\n VIRUS_SCAN_FAILED: {\n status: 500,\n body: \"Virus scanning operation failed\\n\",\n },\n VIRUS_DETECTED: {\n status: 400,\n body: \"Virus or malware detected in file\\n\",\n },\n CLAMAV_NOT_INSTALLED: {\n status: 500,\n body: \"ClamAV is not installed or not available\\n\",\n },\n VIRUS_DEFINITIONS_OUTDATED: {\n status: 500,\n body: \"Virus definitions are outdated and should be updated\\n\",\n },\n SCAN_TIMEOUT: {\n status: 500,\n body: \"Virus scan exceeded timeout limit\\n\",\n },\n DOCUMENT_PROCESSING_FAILED: {\n status: 500,\n body: \"Document processing operation failed\\n\",\n },\n INVALID_DOCUMENT_FORMAT: {\n status: 400,\n body: \"The document format is not supported\\n\",\n },\n OCR_FAILED: {\n status: 500,\n body: \"OCR operation failed\\n\",\n },\n PDF_ENCRYPTED: {\n status: 400,\n body: \"The PDF is password-protected and cannot be processed\\n\",\n },\n PDF_CORRUPTED: {\n status: 400,\n body: \"The PDF file is corrupted or malformed\\n\",\n },\n PAGE_RANGE_INVALID: {\n status: 400,\n body: \"The specified page range is invalid\\n\",\n },\n CIRCUIT_BREAKER_OPEN: {\n status: 503,\n body: \"Circuit breaker is open - service temporarily unavailable\\n\",\n },\n} as const;\n\n/**\n * Standard error class for all Uploadista operations.\n *\n * UploadistaError provides a consistent error handling approach across the entire\n * Uploadista ecosystem. Each error has:\n * - A typed error code from the ERROR_CATALOG\n * - An HTTP-compatible status code\n * - A human-readable error message (body)\n * - Optional additional details and cause information\n *\n * This class integrates with Effect-TS for functional error handling and can be\n * easily converted to an Effect that fails.\n *\n * @example\n * ```typescript\n * // Create from error code\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Create with custom details\n * const customError = UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n * body: \"Failed to process image\",\n * cause: originalError,\n * details: { nodeId: \"resize-1\", fileId: \"abc123\" }\n * });\n *\n * // Use with Effect\n * const effect = customError.toEffect<void>();\n *\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* getFile(id);\n * if (!file) {\n * return yield* UploadistaError.fromCode(\"FILE_NOT_FOUND\").toEffect();\n * }\n * return file;\n * });\n * ```\n */\nexport class UploadistaError extends Data.TaggedError(\"UploadistaError\") {\n readonly code: string;\n readonly status: number;\n // Keep legacy property names for backward compatibility\n readonly status_code: number;\n readonly body: string;\n readonly details?: unknown;\n\n constructor({\n code,\n status,\n body,\n cause,\n details,\n }: {\n code: UploadistaErrorCode | string;\n status: number;\n body: string;\n cause?: unknown;\n details?: unknown;\n }) {\n super();\n this.name = \"UploadistaError\";\n this.code = code;\n this.status = status;\n this.status_code = status; // legacy alias\n this.body = body;\n this.details = details;\n if (cause) (this as unknown as { cause?: unknown }).cause = cause;\n }\n\n /**\n * Creates an UploadistaError from a predefined error code.\n *\n * This is the primary way to create errors in the Uploadista system. Each error code\n * has a default status and message defined in ERROR_CATALOG, but these can be overridden\n * for specific use cases.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code (overrides the default)\n * @param overrides.body - Custom error message (overrides the default)\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error (for error chaining)\n *\n * @returns A new UploadistaError instance\n *\n * @example\n * ```typescript\n * // Use default error\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Override message\n * const customError = UploadistaError.fromCode(\"FILE_NOT_FOUND\", {\n * body: `File with ID ${fileId} was not found in storage`\n * });\n *\n * // Include cause and details\n * const detailedError = UploadistaError.fromCode(\"DATASTORE_NOT_FOUND\", {\n * cause: storageException,\n * details: { storageId: \"s3-prod\", region: \"us-east-1\" }\n * });\n * ```\n */\n static fromCode(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n ): UploadistaError {\n const base = ERROR_CATALOG[code];\n return new UploadistaError({\n code,\n status: overrides?.status ?? base.status,\n body: overrides?.body ?? base.body,\n details: overrides?.details,\n cause: overrides?.cause,\n });\n }\n\n /**\n * Converts this error to an Effect that immediately fails.\n *\n * This method integrates UploadistaError with Effect-TS's error handling system,\n * allowing errors to be used in Effect pipelines with proper type checking.\n *\n * @template T - The success type of the Effect (defaults to never since it always fails)\n * @returns An Effect that fails with this UploadistaError\n *\n * @example\n * ```typescript\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Use in an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* error.toEffect();\n * }\n * return file;\n * });\n * ```\n */\n toEffect<T = never>(): Effect.Effect<T, UploadistaError> {\n return Effect.fail(this);\n }\n}\n\n/**\n * Type guard to check if an unknown value is an UploadistaError.\n *\n * Useful for error handling when catching errors that might be from\n * different sources or libraries.\n *\n * @param error - The value to check\n * @returns True if the value is an UploadistaError instance\n *\n * @example\n * ```typescript\n * try {\n * await someOperation();\n * } catch (error) {\n * if (isUploadistaError(error)) {\n * console.log(`Uploadista error: ${error.code} (${error.status})`);\n * console.log(error.body);\n * } else {\n * console.error(\"Unknown error:\", error);\n * }\n * }\n * ```\n */\nexport function isUploadistaError(error: unknown): error is UploadistaError {\n return error instanceof UploadistaError;\n}\n\n/**\n * Creates an Effect that immediately fails with an UploadistaError.\n *\n * This is a convenience function that combines error creation with Effect conversion.\n * It's equivalent to calling `UploadistaError.fromCode(code, overrides).toEffect()`.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code\n * @param overrides.body - Custom error message\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error\n *\n * @returns An Effect that immediately fails with the created UploadistaError\n *\n * @example\n * ```typescript\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* httpFailure(\"FILE_NOT_FOUND\", {\n * details: { fileId }\n * });\n * }\n * return file;\n * });\n * ```\n */\nexport function httpFailure(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n): Effect.Effect<never, UploadistaError> {\n return UploadistaError.fromCode(code, overrides).toEffect();\n}\n"],"mappings":"0CAyFA,MAAaA,EAET,CACF,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAAmC,CACxE,QAAS,CAAE,OAAQ,IAAK,KAAM,uCAAwC,CACtE,oBAAqB,CACnB,OAAQ,IACR,KAAM,+CACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,wCACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,YAAa,CACX,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,4BAA6B,CAC3B,OAAQ,IACR,KAAM;EACP,CACD,6BAA8B,CAC5B,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACjE,sBAAuB,CACrB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACpE,sBAAuB,CAAE,OAAQ,IAAK,KAAM;EAA2B,CACvE,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,8SACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,+BAAgC,CAC9B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iCAAkC,CAChC,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,aAAc,CACZ,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,WAAY,CACV,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACF,CAwCD,IAAa,EAAb,MAAa,UAAwB,EAAK,YAAY,kBAAkB,AAAC,CACvE,KACA,OAEA,YACA,KACA,QAEA,YAAY,CACV,OACA,SACA,OACA,QACA,WAOC,CACD,OAAO,CACP,KAAK,KAAO,kBACZ,KAAK,KAAO,EACZ,KAAK,OAAS,EACd,KAAK,YAAc,EACnB,KAAK,KAAO,EACZ,KAAK,QAAU,EACX,IAAQ,KAAwC,MAAQ,GAoC9D,OAAO,SACL,EACA,EAIiB,CACjB,IAAM,EAAO,EAAc,GAC3B,OAAO,IAAI,EAAgB,CACzB,OACA,OAAQ,GAAW,QAAU,EAAK,OAClC,KAAM,GAAW,MAAQ,EAAK,KAC9B,QAAS,GAAW,QACpB,MAAO,GAAW,MACnB,CAAC,CA0BJ,UAAyD,CACvD,OAAO,EAAO,KAAK,KAAK,GA2B5B,SAAgB,EAAkB,EAA0C,CAC1E,OAAO,aAAiB,EAgC1B,SAAgB,EACd,EACA,EAIuC,CACvC,OAAO,EAAgB,SAAS,EAAM,EAAU,CAAC,UAAU"}
|
|
1
|
+
{"version":3,"file":"uploadista-error-CkSxSyNo.mjs","names":[],"sources":["../src/errors/uploadista-error.ts"],"sourcesContent":["import { Data, Effect } from \"effect\";\n\n/**\n * Union type of all possible error codes in the Uploadista system.\n *\n * Each error code corresponds to a specific error condition with predefined\n * HTTP status codes and messages in the ERROR_CATALOG.\n */\nexport type UploadistaErrorCode =\n | \"MISSING_OFFSET\"\n | \"ABORTED\"\n | \"INVALID_TERMINATION\"\n | \"ERR_LOCK_TIMEOUT\"\n | \"INVALID_CONTENT_TYPE\"\n | \"FLOW_STRUCTURE_ERROR\"\n | \"FLOW_CYCLE_ERROR\"\n | \"FLOW_NODE_NOT_FOUND\"\n | \"FLOW_NODE_ERROR\"\n | \"FLOW_NOT_AUTHORIZED\"\n | \"FLOW_NOT_FOUND\"\n | \"FLOW_PAUSED\"\n | \"FLOW_CANCELLED\"\n | \"FILE_READ_ERROR\"\n | \"FLOW_JOB_NOT_FOUND\"\n | \"FLOW_JOB_ERROR\"\n | \"DATASTORE_NOT_FOUND\"\n | \"FILE_NOT_FOUND\"\n | \"INVALID_OFFSET\"\n | \"FILE_NO_LONGER_EXISTS\"\n | \"ERR_SIZE_EXCEEDED\"\n | \"ERR_MAX_SIZE_EXCEEDED\"\n | \"INVALID_LENGTH\"\n | \"INVALID_METADATA\"\n | \"VALIDATION_ERROR\"\n | \"STORAGE_NOT_AUTHORIZED\"\n | \"UNKNOWN_ERROR\"\n | \"FILE_WRITE_ERROR\"\n | \"UPLOAD_ID_NOT_FOUND\"\n | \"FLOW_OUTPUT_VALIDATION_ERROR\"\n | \"FLOW_INPUT_VALIDATION_ERROR\"\n | \"CHECKSUM_MISMATCH\"\n | \"MIMETYPE_MISMATCH\"\n | \"UNSUPPORTED_CHECKSUM_ALGORITHM\"\n | \"VIDEO_PROCESSING_FAILED\"\n | \"INVALID_VIDEO_FORMAT\"\n | \"CODEC_NOT_SUPPORTED\"\n | \"VIDEO_METADATA_EXTRACTION_FAILED\"\n | \"FFMPEG_NOT_INSTALLED\"\n | \"INVALID_NODE_TYPE\"\n | \"TYPE_CATEGORY_MISMATCH\"\n | \"INVALID_INPUT_TYPE\"\n | \"INVALID_OUTPUT_TYPE\"\n | \"OUTPUT_NOT_FOUND\"\n | \"MULTIPLE_OUTPUTS_FOUND\"\n | \"VIRUS_SCAN_FAILED\"\n | \"VIRUS_DETECTED\"\n | \"CLAMAV_NOT_INSTALLED\"\n | \"VIRUS_DEFINITIONS_OUTDATED\"\n | \"SCAN_TIMEOUT\"\n | \"DOCUMENT_PROCESSING_FAILED\"\n | \"INVALID_DOCUMENT_FORMAT\"\n | \"OCR_FAILED\"\n | \"PDF_ENCRYPTED\"\n | \"PDF_CORRUPTED\"\n | \"PAGE_RANGE_INVALID\"\n | \"CIRCUIT_BREAKER_OPEN\";\n\n/**\n * Catalog of all predefined errors in the Uploadista system.\n *\n * Maps error codes to their HTTP status codes and default error messages.\n * This centralized catalog ensures consistent error handling across all\n * Uploadista packages and adapters.\n *\n * Each error entry contains:\n * - `status`: HTTP status code (400-500 range)\n * - `body`: Human-readable error message\n *\n * @example\n * ```typescript\n * // Access a specific error definition\n * const fileNotFound = ERROR_CATALOG.FILE_NOT_FOUND;\n * console.log(fileNotFound.status); // 404\n * console.log(fileNotFound.body); // \"The file for this url was not found\\n\"\n *\n * // Use with UploadistaError\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n * ```\n */\nexport const ERROR_CATALOG: Readonly<\n Record<UploadistaErrorCode, { status: number; body: string }>\n> = {\n MISSING_OFFSET: { status: 403, body: \"Upload-Offset header required\\n\" },\n ABORTED: { status: 400, body: \"Request aborted due to lock acquired\" },\n INVALID_TERMINATION: {\n status: 400,\n body: \"Cannot terminate an already completed upload\",\n },\n ERR_LOCK_TIMEOUT: {\n status: 500,\n body: \"failed to acquire lock before timeout\",\n },\n INVALID_CONTENT_TYPE: {\n status: 403,\n body: \"Content-Type header required\\n\",\n },\n DATASTORE_NOT_FOUND: {\n status: 500,\n body: \"The datastore was not found\\n\",\n },\n UPLOAD_ID_NOT_FOUND: {\n status: 500,\n body: \"The upload id was not found\\n\",\n },\n FILE_NOT_FOUND: {\n status: 404,\n body: \"The file for this url was not found\\n\",\n },\n FLOW_NOT_AUTHORIZED: {\n status: 401,\n body: \"The flow is not authorized\\n\",\n },\n FLOW_NOT_FOUND: {\n status: 404,\n body: \"The flow was not found\\n\",\n },\n FLOW_PAUSED: {\n status: 409,\n body: \"The flow execution was paused by user\\n\",\n },\n FLOW_CANCELLED: {\n status: 409,\n body: \"The flow execution was cancelled by user\\n\",\n },\n FLOW_STRUCTURE_ERROR: {\n status: 500,\n body: \"The flow structure is invalid\\n\",\n },\n FLOW_CYCLE_ERROR: {\n status: 500,\n body: \"The flow contains a cycle\\n\",\n },\n FLOW_NODE_NOT_FOUND: {\n status: 500,\n body: \"The flow node was not found\\n\",\n },\n FLOW_NODE_ERROR: {\n status: 500,\n body: \"The flow node failed\\n\",\n },\n FLOW_JOB_NOT_FOUND: {\n status: 404,\n body: \"The flow job was not found\\n\",\n },\n FLOW_JOB_ERROR: {\n status: 500,\n body: \"The flow job failed\\n\",\n },\n FLOW_INPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow input validation failed\\n\",\n },\n FLOW_OUTPUT_VALIDATION_ERROR: {\n status: 500,\n body: \"The flow output validation failed\\n\",\n },\n INVALID_OFFSET: { status: 409, body: \"Upload-Offset conflict\\n\" },\n FILE_NO_LONGER_EXISTS: {\n status: 410,\n body: \"The file for this url no longer exists\\n\",\n },\n FILE_READ_ERROR: {\n status: 500,\n body: \"Something went wrong reading the file\\n\",\n },\n ERR_SIZE_EXCEEDED: { status: 413, body: \"upload's size exceeded\\n\" },\n ERR_MAX_SIZE_EXCEEDED: { status: 413, body: \"Maximum size exceeded\\n\" },\n INVALID_LENGTH: {\n status: 400,\n body: \"Upload-Length or Upload-Defer-Length header required\\n\",\n },\n INVALID_METADATA: {\n status: 400,\n body: \"Upload-Metadata is invalid. It MUST consist of one or more comma-separated key-value pairs. The key and value MUST be separated by a space. The key MUST NOT contain spaces and commas and MUST NOT be empty. The key SHOULD be ASCII encoded and the value MUST be Base64 encoded. All keys MUST be unique\",\n },\n VALIDATION_ERROR: {\n status: 400,\n body: \"Validation failed\\n\",\n },\n STORAGE_NOT_AUTHORIZED: {\n status: 401,\n body: \"The storage is not authorized\\n\",\n },\n UNKNOWN_ERROR: {\n status: 500,\n body: \"Something went wrong with that request\\n\",\n },\n FILE_WRITE_ERROR: {\n status: 500,\n body: \"Something went wrong receiving the file\\n\",\n },\n CHECKSUM_MISMATCH: {\n status: 400,\n body: \"The file checksum does not match the provided checksum\\n\",\n },\n MIMETYPE_MISMATCH: {\n status: 400,\n body: \"The file MIME type does not match the declared type\\n\",\n },\n UNSUPPORTED_CHECKSUM_ALGORITHM: {\n status: 400,\n body: \"The specified checksum algorithm is not supported\\n\",\n },\n VIDEO_PROCESSING_FAILED: {\n status: 500,\n body: \"Video processing operation failed\\n\",\n },\n INVALID_VIDEO_FORMAT: {\n status: 400,\n body: \"The video format is not supported\\n\",\n },\n CODEC_NOT_SUPPORTED: {\n status: 400,\n body: \"The specified video codec is not supported\\n\",\n },\n VIDEO_METADATA_EXTRACTION_FAILED: {\n status: 500,\n body: \"Failed to extract video metadata\\n\",\n },\n FFMPEG_NOT_INSTALLED: {\n status: 500,\n body: \"FFmpeg is not installed or not available in PATH\\n\",\n },\n INVALID_NODE_TYPE: {\n status: 500,\n body: \"The specified node type is not registered\\n\",\n },\n TYPE_CATEGORY_MISMATCH: {\n status: 500,\n body: \"Node type category does not match the node configuration\\n\",\n },\n INVALID_INPUT_TYPE: {\n status: 500,\n body: \"The input type is not registered\\n\",\n },\n INVALID_OUTPUT_TYPE: {\n status: 500,\n body: \"The output type is not registered\\n\",\n },\n OUTPUT_NOT_FOUND: {\n status: 404,\n body: \"No output of the specified type was found\\n\",\n },\n MULTIPLE_OUTPUTS_FOUND: {\n status: 409,\n body: \"Multiple outputs of the specified type found, expected single output\\n\",\n },\n VIRUS_SCAN_FAILED: {\n status: 500,\n body: \"Virus scanning operation failed\\n\",\n },\n VIRUS_DETECTED: {\n status: 400,\n body: \"Virus or malware detected in file\\n\",\n },\n CLAMAV_NOT_INSTALLED: {\n status: 500,\n body: \"ClamAV is not installed or not available\\n\",\n },\n VIRUS_DEFINITIONS_OUTDATED: {\n status: 500,\n body: \"Virus definitions are outdated and should be updated\\n\",\n },\n SCAN_TIMEOUT: {\n status: 500,\n body: \"Virus scan exceeded timeout limit\\n\",\n },\n DOCUMENT_PROCESSING_FAILED: {\n status: 500,\n body: \"Document processing operation failed\\n\",\n },\n INVALID_DOCUMENT_FORMAT: {\n status: 400,\n body: \"The document format is not supported\\n\",\n },\n OCR_FAILED: {\n status: 500,\n body: \"OCR operation failed\\n\",\n },\n PDF_ENCRYPTED: {\n status: 400,\n body: \"The PDF is password-protected and cannot be processed\\n\",\n },\n PDF_CORRUPTED: {\n status: 400,\n body: \"The PDF file is corrupted or malformed\\n\",\n },\n PAGE_RANGE_INVALID: {\n status: 400,\n body: \"The specified page range is invalid\\n\",\n },\n CIRCUIT_BREAKER_OPEN: {\n status: 503,\n body: \"Circuit breaker is open - service temporarily unavailable\\n\",\n },\n} as const;\n\n/**\n * Standard error class for all Uploadista operations.\n *\n * UploadistaError provides a consistent error handling approach across the entire\n * Uploadista ecosystem. Each error has:\n * - A typed error code from the ERROR_CATALOG\n * - An HTTP-compatible status code\n * - A human-readable error message (body)\n * - Optional additional details and cause information\n *\n * This class integrates with Effect-TS for functional error handling and can be\n * easily converted to an Effect that fails.\n *\n * @example\n * ```typescript\n * // Create from error code\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Create with custom details\n * const customError = UploadistaError.fromCode(\"FLOW_NODE_ERROR\", {\n * body: \"Failed to process image\",\n * cause: originalError,\n * details: { nodeId: \"resize-1\", fileId: \"abc123\" }\n * });\n *\n * // Use with Effect\n * const effect = customError.toEffect<void>();\n *\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* getFile(id);\n * if (!file) {\n * return yield* UploadistaError.fromCode(\"FILE_NOT_FOUND\").toEffect();\n * }\n * return file;\n * });\n * ```\n */\nexport class UploadistaError extends Data.TaggedError(\"UploadistaError\") {\n readonly code: string;\n readonly status: number;\n // Keep legacy property names for backward compatibility\n readonly status_code: number;\n readonly body: string;\n readonly details?: unknown;\n\n constructor({\n code,\n status,\n body,\n cause,\n details,\n }: {\n code: UploadistaErrorCode | string;\n status: number;\n body: string;\n cause?: unknown;\n details?: unknown;\n }) {\n super();\n this.name = \"UploadistaError\";\n this.code = code;\n this.status = status;\n this.status_code = status; // legacy alias\n this.body = body;\n this.details = details;\n if (cause) (this as unknown as { cause?: unknown }).cause = cause;\n }\n\n /**\n * Creates an UploadistaError from a predefined error code.\n *\n * This is the primary way to create errors in the Uploadista system. Each error code\n * has a default status and message defined in ERROR_CATALOG, but these can be overridden\n * for specific use cases.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code (overrides the default)\n * @param overrides.body - Custom error message (overrides the default)\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error (for error chaining)\n *\n * @returns A new UploadistaError instance\n *\n * @example\n * ```typescript\n * // Use default error\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Override message\n * const customError = UploadistaError.fromCode(\"FILE_NOT_FOUND\", {\n * body: `File with ID ${fileId} was not found in storage`\n * });\n *\n * // Include cause and details\n * const detailedError = UploadistaError.fromCode(\"DATASTORE_NOT_FOUND\", {\n * cause: storageException,\n * details: { storageId: \"s3-prod\", region: \"us-east-1\" }\n * });\n * ```\n */\n static fromCode(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n ): UploadistaError {\n const base = ERROR_CATALOG[code];\n return new UploadistaError({\n code,\n status: overrides?.status ?? base.status,\n body: overrides?.body ?? base.body,\n details: overrides?.details,\n cause: overrides?.cause,\n });\n }\n\n /**\n * Converts this error to an Effect that immediately fails.\n *\n * This method integrates UploadistaError with Effect-TS's error handling system,\n * allowing errors to be used in Effect pipelines with proper type checking.\n *\n * @template T - The success type of the Effect (defaults to never since it always fails)\n * @returns An Effect that fails with this UploadistaError\n *\n * @example\n * ```typescript\n * const error = UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n *\n * // Use in an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* error.toEffect();\n * }\n * return file;\n * });\n * ```\n */\n toEffect<T = never>(): Effect.Effect<T, UploadistaError> {\n return Effect.fail(this);\n }\n}\n\n/**\n * Type guard to check if an unknown value is an UploadistaError.\n *\n * Useful for error handling when catching errors that might be from\n * different sources or libraries.\n *\n * @param error - The value to check\n * @returns True if the value is an UploadistaError instance\n *\n * @example\n * ```typescript\n * try {\n * await someOperation();\n * } catch (error) {\n * if (isUploadistaError(error)) {\n * console.log(`Uploadista error: ${error.code} (${error.status})`);\n * console.log(error.body);\n * } else {\n * console.error(\"Unknown error:\", error);\n * }\n * }\n * ```\n */\nexport function isUploadistaError(error: unknown): error is UploadistaError {\n return error instanceof UploadistaError;\n}\n\n/**\n * Creates an Effect that immediately fails with an UploadistaError.\n *\n * This is a convenience function that combines error creation with Effect conversion.\n * It's equivalent to calling `UploadistaError.fromCode(code, overrides).toEffect()`.\n *\n * @param code - One of the predefined error codes from UploadistaErrorCode\n * @param overrides - Optional overrides for the default error properties\n * @param overrides.status - Custom HTTP status code\n * @param overrides.body - Custom error message\n * @param overrides.details - Additional structured data about the error\n * @param overrides.cause - The underlying error that caused this error\n *\n * @returns An Effect that immediately fails with the created UploadistaError\n *\n * @example\n * ```typescript\n * // In an Effect pipeline\n * return Effect.gen(function* () {\n * const file = yield* kvStore.get(fileId);\n * if (!file) {\n * return yield* httpFailure(\"FILE_NOT_FOUND\", {\n * details: { fileId }\n * });\n * }\n * return file;\n * });\n * ```\n */\nexport function httpFailure(\n code: UploadistaErrorCode,\n overrides?: Partial<Pick<UploadistaError, \"status\" | \"body\">> & {\n details?: unknown;\n cause?: unknown;\n },\n): Effect.Effect<never, UploadistaError> {\n return UploadistaError.fromCode(code, overrides).toEffect();\n}\n"],"mappings":"0CAyFA,MAAa,EAET,CACF,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAAmC,CACxE,QAAS,CAAE,OAAQ,IAAK,KAAM,uCAAwC,CACtE,oBAAqB,CACnB,OAAQ,IACR,KAAM,+CACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,wCACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,YAAa,CACX,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,4BAA6B,CAC3B,OAAQ,IACR,KAAM;EACP,CACD,6BAA8B,CAC5B,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACjE,sBAAuB,CACrB,OAAQ,IACR,KAAM;EACP,CACD,gBAAiB,CACf,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CAAE,OAAQ,IAAK,KAAM;EAA4B,CACpE,sBAAuB,CAAE,OAAQ,IAAK,KAAM;EAA2B,CACvE,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM,8SACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,+BAAgC,CAC9B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iCAAkC,CAChC,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,oBAAqB,CACnB,OAAQ,IACR,KAAM;EACP,CACD,iBAAkB,CAChB,OAAQ,IACR,KAAM;EACP,CACD,uBAAwB,CACtB,OAAQ,IACR,KAAM;EACP,CACD,kBAAmB,CACjB,OAAQ,IACR,KAAM;EACP,CACD,eAAgB,CACd,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,aAAc,CACZ,OAAQ,IACR,KAAM;EACP,CACD,2BAA4B,CAC1B,OAAQ,IACR,KAAM;EACP,CACD,wBAAyB,CACvB,OAAQ,IACR,KAAM;EACP,CACD,WAAY,CACV,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,cAAe,CACb,OAAQ,IACR,KAAM;EACP,CACD,mBAAoB,CAClB,OAAQ,IACR,KAAM;EACP,CACD,qBAAsB,CACpB,OAAQ,IACR,KAAM;EACP,CACF,CAwCD,IAAa,EAAb,MAAa,UAAwB,EAAK,YAAY,kBAAkB,AAAC,CACvE,KACA,OAEA,YACA,KACA,QAEA,YAAY,CACV,OACA,SACA,OACA,QACA,WAOC,CACD,OAAO,CACP,KAAK,KAAO,kBACZ,KAAK,KAAO,EACZ,KAAK,OAAS,EACd,KAAK,YAAc,EACnB,KAAK,KAAO,EACZ,KAAK,QAAU,EACX,IAAQ,KAAwC,MAAQ,GAoC9D,OAAO,SACL,EACA,EAIiB,CACjB,IAAM,EAAO,EAAc,GAC3B,OAAO,IAAI,EAAgB,CACzB,OACA,OAAQ,GAAW,QAAU,EAAK,OAClC,KAAM,GAAW,MAAQ,EAAK,KAC9B,QAAS,GAAW,QACpB,MAAO,GAAW,MACnB,CAAC,CA0BJ,UAAyD,CACvD,OAAO,EAAO,KAAK,KAAK,GA2B5B,SAAgB,EAAkB,EAA0C,CAC1E,OAAO,aAAiB,EAgC1B,SAAgB,EACd,EACA,EAIuC,CACvC,OAAO,EAAgB,SAAS,EAAM,EAAU,CAAC,UAAU"}
|
package/dist/utils/index.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
require(`../uploadista-error-
|
|
1
|
+
require(`../uploadista-error-CLWoRAAr.cjs`);const e=require(`../checksum-CTpNXWEL.cjs`),t=require(`../utils-CvZJUNEo.cjs`);exports.GenerateId=e.r,exports.GenerateIdLive=e.i,exports.GenerateIdRandom=e.a,exports.GenerateIdService=e.o,exports.GenerateIdTimestamp=e.s,exports.Md5=t.a,exports.OnceEffect=t.r,exports.ThrottleEffect=t.t,exports.computeChecksum=e.t,exports.isSupportedAlgorithm=e.n,exports.once=t.i,exports.throttle=t.n;
|
package/dist/utils/index.d.mts
CHANGED
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import "../uploadista-error-
|
|
2
|
-
import { a as Md5, c as isSupportedAlgorithm, d as GenerateIdRandom, f as GenerateIdService, i as once, l as GenerateId, m as GenerateIdTimestamp, n as throttle, o as ChecksumAlgorithm, p as GenerateIdShape, r as OnceEffect, s as computeChecksum, t as ThrottleEffect, u as GenerateIdLive } from "../index-
|
|
1
|
+
import "../uploadista-error-B1qbOy9N.mjs";
|
|
2
|
+
import { a as Md5, c as isSupportedAlgorithm, d as GenerateIdRandom, f as GenerateIdService, i as once, l as GenerateId, m as GenerateIdTimestamp, n as throttle, o as ChecksumAlgorithm, p as GenerateIdShape, r as OnceEffect, s as computeChecksum, t as ThrottleEffect, u as GenerateIdLive } from "../index-D8MZ6P3o.mjs";
|
|
3
3
|
export { ChecksumAlgorithm, GenerateId, GenerateIdLive, GenerateIdRandom, GenerateIdService, GenerateIdShape, GenerateIdTimestamp, Md5, OnceEffect, ThrottleEffect, computeChecksum, isSupportedAlgorithm, once, throttle };
|
package/dist/utils/index.mjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
import"../uploadista-error-CkSxSyNo.mjs";import{a as e,i as t,n,o as r,r as i,s as a,t as o}from"../checksum-COoD-F1l.mjs";import{a as s,i as c,n as l,r as u,t as d}from"../utils-
|
|
1
|
+
import"../uploadista-error-CkSxSyNo.mjs";import{a as e,i as t,n,o as r,r as i,s as a,t as o}from"../checksum-COoD-F1l.mjs";import{a as s,i as c,n as l,r as u,t as d}from"../utils-DVwfrVBJ.mjs";export{i as GenerateId,t as GenerateIdLive,e as GenerateIdRandom,r as GenerateIdService,a as GenerateIdTimestamp,s as Md5,u as OnceEffect,d as ThrottleEffect,o as computeChecksum,n as isSupportedAlgorithm,c as once,l as throttle};
|
|
@@ -1 +1 @@
|
|
|
1
|
-
const e=require(`./types-
|
|
1
|
+
const e=require(`./types-BF_tvkRh.cjs`),t=require(`./uploadista-error-CLWoRAAr.cjs`);let n=require(`effect`);var r=class e{static hashStr(t,n=!1){return e.onePassHasher.start().appendStr(t).end(n)}static hashAsciiStr(t,n=!1){return e.onePassHasher.start().appendAsciiStr(t).end(n)}static stateIdentity=new Int32Array([1732584193,-271733879,-1732584194,271733878]);static buffer32Identity=new Int32Array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]);static hexChars=`0123456789abcdef`;static hexOut=[];static onePassHasher=new e;static _hex(t){let n=e.hexChars,r=e.hexOut,i,a,o,s;for(s=0;s<4;s+=1)for(a=s*8,i=t[s]??0,o=0;o<8;o+=2)r[a+1+o]=n.charAt(i&15),i>>>=4,r[a+0+o]=n.charAt(i&15),i>>>=4;return r.join(``)}static _md5cycle(e,t){let n=e[0]??0,r=e[1]??0,i=e[2]??0,a=e[3]??0;n+=(r&i|~r&a)+(t[0]??0)-680876936|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[1]??0)-389564586|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[2]??0)+606105819|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[3]??0)-1044525330|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[4]??0)-176418897|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[5]??0)+1200080426|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[6]??0)-1473231341|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[7]??0)-45705983|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[8]??0)+1770035416|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[9]??0)-1958414417|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[10]??0)-42063|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[11]??0)-1990404162|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[12]??0)+1804603682|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[13]??0)-40341101|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[14]??0)-1502002290|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[15]??0)+1236535329|0,r=(r<<22|r>>>10)+i|0,n+=(r&a|i&~a)+(t[1]??0)-165796510|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[6]??0)-1069501632|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[11]??0)+643717713|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[0]??0)-373897302|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[5]??0)-701558691|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[10]??0)+38016083|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[15]??0)-660478335|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[4]??0)-405537848|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[9]??0)+568446438|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[14]??0)-1019803690|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[3]??0)-187363961|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[8]??0)+1163531501|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[13]??0)-1444681467|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[2]??0)-51403784|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[7]??0)+1735328473|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[12]??0)-1926607734|0,r=(r<<20|r>>>12)+i|0,n+=(r^i^a)+(t[5]??0)-378558|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[8]??0)-2022574463|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[11]??0)+1839030562|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[14]??0)-35309556|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[1]??0)-1530992060|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[4]??0)+1272893353|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[7]??0)-155497632|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[10]??0)-1094730640|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[13]??0)+681279174|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[0]??0)-358537222|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[3]??0)-722521979|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[6]??0)+76029189|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[9]??0)-640364487|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[12]??0)-421815835|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[15]??0)+530742520|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[2]??0)-995338651|0,r=(r<<23|r>>>9)+i|0,n+=(i^(r|~a))+(t[0]??0)-198630844|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[7]??0)+1126891415|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[14]??0)-1416354905|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[5]??0)-57434055|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[12]??0)+1700485571|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[3]??0)-1894986606|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[10]??0)-1051523|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[1]??0)-2054922799|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[8]??0)+1873313359|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[15]??0)-30611744|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[6]??0)-1560198380|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[13]??0)+1309151649|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[4]??0)-145523070|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[11]??0)-1120210379|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[2]??0)+718787259|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[9]??0)-343485551|0,r=(r<<21|r>>>11)+i|0,e[0]=n+(e[0]??0)|0,e[1]=r+(e[1]??0)|0,e[2]=i+(e[2]??0)|0,e[3]=a+(e[3]??0)|0}_dataLength=0;_bufferLength=0;_state=new Int32Array(4);_buffer=new ArrayBuffer(68);_buffer8;_buffer32;constructor(){this._buffer8=new Uint8Array(this._buffer,0,68),this._buffer32=new Uint32Array(this._buffer,0,17),this.start()}start(){return this._dataLength=0,this._bufferLength=0,this._state.set(e.stateIdentity),this}appendStr(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o;for(o=0;o<t.length;o+=1){if(a=t.charCodeAt(o),a<128)n[i++]=a;else if(a<2048)n[i++]=(a>>>6)+192,n[i++]=a&63|128;else if(a<55296||a>56319)n[i++]=(a>>>12)+224,n[i++]=a>>>6&63|128,n[i++]=a&63|128;else{if(a=(a-55296)*1024+(t.charCodeAt(++o)-56320)+65536,a>1114111)throw Error(`Unicode standard supports code points up to U+10FFFF`);n[i++]=(a>>>18)+240,n[i++]=a>>>12&63|128,n[i++]=a>>>6&63|128,n[i++]=a&63|128}i>=64&&(this._dataLength+=64,e._md5cycle(this._state,r),i-=64,r[0]=r[16]??0)}return this._bufferLength=i,this}appendAsciiStr(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o=0;for(;;){for(a=Math.min(t.length-o,64-i);a--;)n[i++]=t.charCodeAt(o++);if(i<64)break;this._dataLength+=64,e._md5cycle(this._state,r),i=0}return this._bufferLength=i,this}appendByteArray(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o=0;for(;;){for(a=Math.min(t.length-o,64-i);a--;)n[i++]=t[o++]??0;if(i<64)break;this._dataLength+=64,e._md5cycle(this._state,r),i=0}return this._bufferLength=i,this}getState(){let e=this._state;return{buffer:String.fromCharCode.apply(null,Array.from(this._buffer8)),buflen:this._bufferLength,length:this._dataLength,state:[e[0]??0,e[1]??0,e[2]??0,e[3]??0]}}setState(e){let t=e.buffer,n=e.state,r=this._state,i;for(this._dataLength=e.length,this._bufferLength=e.buflen,r[0]=n[0]??0,r[1]=n[1]??0,r[2]=n[2]??0,r[3]=n[3]??0,i=0;i<t.length;i+=1)this._buffer8[i]=t.charCodeAt(i)}end(t=!1){let n=this._bufferLength,r=this._buffer8,i=this._buffer32,a=(n>>2)+1;this._dataLength+=n;let o=this._dataLength*8;if(r[n]=128,r[n+1]=r[n+2]=r[n+3]=0,i.set(e.buffer32Identity.subarray(a),a),n>55&&(e._md5cycle(this._state,i),i.set(e.buffer32Identity)),o<=4294967295)i[14]=o;else{let e=o.toString(16).match(/(.*?)(.{0,8})$/);if(e===null)return;let t=Number.parseInt(e[2]??``,16),n=Number.parseInt(e[1]??``,16)||0;i[14]=t,i[15]=n}return e._md5cycle(this._state,i),t?this._state:e._hex(this._state)}};if(r.hashStr(`hello`)!==`5d41402abc4b2a76b9719d911017c592`)throw Error(`Md5 self test failed.`);function i(e){let t=!1,n;return function(...r){if(t){if(n)return n;throw Error(`Function called more than once`)}return t=!0,n=e.apply(this,r),n}}const a={make:e=>{let r,i=!1;return n.Effect.gen(function*(){if(i){if(r!==void 0)return r;yield*new t.n({code:`UNKNOWN_ERROR`,status:500,body:`Effect called more than once with undefined result`}).toEffect()}return i=!0,r=yield*e,r})},legacy:i},o={leading:!1,trailing:!0};function s(e,t,n={}){let r={...o,...n},i;return function(...n){r.leading&&!i?(console.log(`leading`),e.apply(this,n),i=setTimeout(()=>{i=void 0},t)):(clearTimeout(i),i=setTimeout(()=>{r.trailing?(e.apply(this,n),r.leading&&setTimeout(()=>{i=void 0},t)):i=void 0},t))}}function c(e,t,{leading:n=!0,trailing:r=!0}={}){return s(e,t,{leading:n,trailing:r})}const l={legacy:c};Object.defineProperty(exports,`a`,{enumerable:!0,get:function(){return r}}),Object.defineProperty(exports,`i`,{enumerable:!0,get:function(){return i}}),Object.defineProperty(exports,`n`,{enumerable:!0,get:function(){return c}}),Object.defineProperty(exports,`r`,{enumerable:!0,get:function(){return a}}),Object.defineProperty(exports,`t`,{enumerable:!0,get:function(){return l}});
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import{n as e}from"./uploadista-error-CkSxSyNo.mjs";import{Effect as t}from"effect";var n=class e{static hashStr(t,n=!1){return e.onePassHasher.start().appendStr(t).end(n)}static hashAsciiStr(t,n=!1){return e.onePassHasher.start().appendAsciiStr(t).end(n)}static stateIdentity=new Int32Array([1732584193,-271733879,-1732584194,271733878]);static buffer32Identity=new Int32Array([0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]);static hexChars=`0123456789abcdef`;static hexOut=[];static onePassHasher=new e;static _hex(t){let n=e.hexChars,r=e.hexOut,i,a,o,s;for(s=0;s<4;s+=1)for(a=s*8,i=t[s]??0,o=0;o<8;o+=2)r[a+1+o]=n.charAt(i&15),i>>>=4,r[a+0+o]=n.charAt(i&15),i>>>=4;return r.join(``)}static _md5cycle(e,t){let n=e[0]??0,r=e[1]??0,i=e[2]??0,a=e[3]??0;n+=(r&i|~r&a)+(t[0]??0)-680876936|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[1]??0)-389564586|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[2]??0)+606105819|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[3]??0)-1044525330|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[4]??0)-176418897|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[5]??0)+1200080426|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[6]??0)-1473231341|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[7]??0)-45705983|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[8]??0)+1770035416|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[9]??0)-1958414417|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[10]??0)-42063|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[11]??0)-1990404162|0,r=(r<<22|r>>>10)+i|0,n+=(r&i|~r&a)+(t[12]??0)+1804603682|0,n=(n<<7|n>>>25)+r|0,a+=(n&r|~n&i)+(t[13]??0)-40341101|0,a=(a<<12|a>>>20)+n|0,i+=(a&n|~a&r)+(t[14]??0)-1502002290|0,i=(i<<17|i>>>15)+a|0,r+=(i&a|~i&n)+(t[15]??0)+1236535329|0,r=(r<<22|r>>>10)+i|0,n+=(r&a|i&~a)+(t[1]??0)-165796510|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[6]??0)-1069501632|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[11]??0)+643717713|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[0]??0)-373897302|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[5]??0)-701558691|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[10]??0)+38016083|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[15]??0)-660478335|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[4]??0)-405537848|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[9]??0)+568446438|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[14]??0)-1019803690|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[3]??0)-187363961|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[8]??0)+1163531501|0,r=(r<<20|r>>>12)+i|0,n+=(r&a|i&~a)+(t[13]??0)-1444681467|0,n=(n<<5|n>>>27)+r|0,a+=(n&i|r&~i)+(t[2]??0)-51403784|0,a=(a<<9|a>>>23)+n|0,i+=(a&r|n&~r)+(t[7]??0)+1735328473|0,i=(i<<14|i>>>18)+a|0,r+=(i&n|a&~n)+(t[12]??0)-1926607734|0,r=(r<<20|r>>>12)+i|0,n+=(r^i^a)+(t[5]??0)-378558|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[8]??0)-2022574463|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[11]??0)+1839030562|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[14]??0)-35309556|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[1]??0)-1530992060|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[4]??0)+1272893353|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[7]??0)-155497632|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[10]??0)-1094730640|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[13]??0)+681279174|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[0]??0)-358537222|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[3]??0)-722521979|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[6]??0)+76029189|0,r=(r<<23|r>>>9)+i|0,n+=(r^i^a)+(t[9]??0)-640364487|0,n=(n<<4|n>>>28)+r|0,a+=(n^r^i)+(t[12]??0)-421815835|0,a=(a<<11|a>>>21)+n|0,i+=(a^n^r)+(t[15]??0)+530742520|0,i=(i<<16|i>>>16)+a|0,r+=(i^a^n)+(t[2]??0)-995338651|0,r=(r<<23|r>>>9)+i|0,n+=(i^(r|~a))+(t[0]??0)-198630844|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[7]??0)+1126891415|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[14]??0)-1416354905|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[5]??0)-57434055|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[12]??0)+1700485571|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[3]??0)-1894986606|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[10]??0)-1051523|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[1]??0)-2054922799|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[8]??0)+1873313359|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[15]??0)-30611744|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[6]??0)-1560198380|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[13]??0)+1309151649|0,r=(r<<21|r>>>11)+i|0,n+=(i^(r|~a))+(t[4]??0)-145523070|0,n=(n<<6|n>>>26)+r|0,a+=(r^(n|~i))+(t[11]??0)-1120210379|0,a=(a<<10|a>>>22)+n|0,i+=(n^(a|~r))+(t[2]??0)+718787259|0,i=(i<<15|i>>>17)+a|0,r+=(a^(i|~n))+(t[9]??0)-343485551|0,r=(r<<21|r>>>11)+i|0,e[0]=n+(e[0]??0)|0,e[1]=r+(e[1]??0)|0,e[2]=i+(e[2]??0)|0,e[3]=a+(e[3]??0)|0}_dataLength=0;_bufferLength=0;_state=new Int32Array(4);_buffer=new ArrayBuffer(68);_buffer8;_buffer32;constructor(){this._buffer8=new Uint8Array(this._buffer,0,68),this._buffer32=new Uint32Array(this._buffer,0,17),this.start()}start(){return this._dataLength=0,this._bufferLength=0,this._state.set(e.stateIdentity),this}appendStr(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o;for(o=0;o<t.length;o+=1){if(a=t.charCodeAt(o),a<128)n[i++]=a;else if(a<2048)n[i++]=(a>>>6)+192,n[i++]=a&63|128;else if(a<55296||a>56319)n[i++]=(a>>>12)+224,n[i++]=a>>>6&63|128,n[i++]=a&63|128;else{if(a=(a-55296)*1024+(t.charCodeAt(++o)-56320)+65536,a>1114111)throw Error(`Unicode standard supports code points up to U+10FFFF`);n[i++]=(a>>>18)+240,n[i++]=a>>>12&63|128,n[i++]=a>>>6&63|128,n[i++]=a&63|128}i>=64&&(this._dataLength+=64,e._md5cycle(this._state,r),i-=64,r[0]=r[16]??0)}return this._bufferLength=i,this}appendAsciiStr(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o=0;for(;;){for(a=Math.min(t.length-o,64-i);a--;)n[i++]=t.charCodeAt(o++);if(i<64)break;this._dataLength+=64,e._md5cycle(this._state,r),i=0}return this._bufferLength=i,this}appendByteArray(t){let n=this._buffer8,r=this._buffer32,i=this._bufferLength,a,o=0;for(;;){for(a=Math.min(t.length-o,64-i);a--;)n[i++]=t[o++]??0;if(i<64)break;this._dataLength+=64,e._md5cycle(this._state,r),i=0}return this._bufferLength=i,this}getState(){let e=this._state;return{buffer:String.fromCharCode.apply(null,Array.from(this._buffer8)),buflen:this._bufferLength,length:this._dataLength,state:[e[0]??0,e[1]??0,e[2]??0,e[3]??0]}}setState(e){let t=e.buffer,n=e.state,r=this._state,i;for(this._dataLength=e.length,this._bufferLength=e.buflen,r[0]=n[0]??0,r[1]=n[1]??0,r[2]=n[2]??0,r[3]=n[3]??0,i=0;i<t.length;i+=1)this._buffer8[i]=t.charCodeAt(i)}end(t=!1){let n=this._bufferLength,r=this._buffer8,i=this._buffer32,a=(n>>2)+1;this._dataLength+=n;let o=this._dataLength*8;if(r[n]=128,r[n+1]=r[n+2]=r[n+3]=0,i.set(e.buffer32Identity.subarray(a),a),n>55&&(e._md5cycle(this._state,i),i.set(e.buffer32Identity)),o<=4294967295)i[14]=o;else{let e=o.toString(16).match(/(.*?)(.{0,8})$/);if(e===null)return;let t=Number.parseInt(e[2]??``,16),n=Number.parseInt(e[1]??``,16)||0;i[14]=t,i[15]=n}return e._md5cycle(this._state,i),t?this._state:e._hex(this._state)}};if(n.hashStr(`hello`)!==`5d41402abc4b2a76b9719d911017c592`)throw Error(`Md5 self test failed.`);function r(e){let t=!1,n;return function(...r){if(t){if(n)return n;throw Error(`Function called more than once`)}return t=!0,n=e.apply(this,r),n}}const i={make:n=>{let r,i=!1;return t.gen(function*(){if(i){if(r!==void 0)return r;yield*new e({code:`UNKNOWN_ERROR`,status:500,body:`Effect called more than once with undefined result`}).toEffect()}return i=!0,r=yield*n,r})},legacy:r},a={leading:!1,trailing:!0};function o(e,t,n={}){let r={...a,...n},i;return function(...n){r.leading&&!i?(console.log(`leading`),e.apply(this,n),i=setTimeout(()=>{i=void 0},t)):(clearTimeout(i),i=setTimeout(()=>{r.trailing?(e.apply(this,n),r.leading&&setTimeout(()=>{i=void 0},t)):i=void 0},t))}}function s(e,t,{leading:n=!0,trailing:r=!0}={}){return o(e,t,{leading:n,trailing:r})}const c={legacy:s};export{n as a,r as i,s as n,i as r,c as t};
|
|
2
|
-
//# sourceMappingURL=utils-
|
|
2
|
+
//# sourceMappingURL=utils-DVwfrVBJ.mjs.map
|