@uploadista/data-store-filesystem 0.0.3 → 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,22 @@
1
1
 
2
2
  
3
- > @uploadista/data-store-filesystem@0.0.2 build /Users/denislaboureyras/Documents/uploadista/dev/uploadista-workspace/uploadista-sdk/packages/data-stores/filesystem
4
- > tsc -b
3
+ > @uploadista/data-store-filesystem@0.0.3 build /Users/denislaboureyras/Documents/uploadista/dev/uploadista-workspace/uploadista-sdk/packages/data-stores/filesystem
4
+ > tsdown
5
5
 
6
+ ℹ tsdown v0.15.9 powered by rolldown v1.0.0-beta.44
7
+ ℹ Using tsdown config: /Users/denislaboureyras/Documents/uploadista/dev/uploadista-workspace/uploadista-sdk/packages/data-stores/filesystem/tsdown.config.ts
8
+ ℹ entry: src/index.ts
9
+ ℹ tsconfig: tsconfig.json
10
+ ℹ Build start
11
+ ℹ Cleaning 7 files
12
+ ℹ [CJS] dist/index.cjs 5.69 kB │ gzip: 1.99 kB
13
+ ℹ [CJS] 1 files, total: 5.69 kB
14
+ ℹ [CJS] dist/index.d.cts.map 0.47 kB │ gzip: 0.26 kB
15
+ ℹ [CJS] dist/index.d.cts 0.77 kB │ gzip: 0.35 kB
16
+ ℹ [CJS] 2 files, total: 1.24 kB
17
+ ℹ [ESM] dist/index.js  4.54 kB │ gzip: 1.75 kB
18
+ ℹ [ESM] dist/index.js.map 17.47 kB │ gzip: 4.55 kB
19
+ ℹ [ESM] dist/index.d.ts.map  0.47 kB │ gzip: 0.26 kB
20
+ ℹ [ESM] dist/index.d.ts  0.77 kB │ gzip: 0.34 kB
21
+ ℹ [ESM] 4 files, total: 23.25 kB
22
+ ✔ Build complete in 10893ms
package/dist/index.cjs ADDED
@@ -0,0 +1 @@
1
+ var e=Object.create,t=Object.defineProperty,n=Object.getOwnPropertyDescriptor,r=Object.getOwnPropertyNames,i=Object.getPrototypeOf,a=Object.prototype.hasOwnProperty,o=(e,i,o,s)=>{if(i&&typeof i==`object`||typeof i==`function`)for(var c=r(i),l=0,u=c.length,d;l<u;l++)d=c[l],!a.call(e,d)&&d!==o&&t(e,d,{get:(e=>i[e]).bind(null,d),enumerable:!(s=n(i,d))||s.enumerable});return e},s=(n,r,a)=>(a=n==null?{}:e(i(n)),o(r||!n||!n.__esModule?t(a,`default`,{value:n,enumerable:!0}):a,n));let c=require(`node:fs`);c=s(c);let l=require(`node:fs/promises`);l=s(l);let u=require(`node:path`);u=s(u);let d=require(`@uploadista/core/errors`);d=s(d);let f=require(`@uploadista/core/types`);f=s(f);let p=require(`@uploadista/observability`);p=s(p);let m=require(`effect`);m=s(m);const h=`0777`,g=`EEXIST`,_=e=>m.Effect.tryPromise({try:()=>l.default.mkdir(e,{mode:`0777`,recursive:!0}),catch:e=>e instanceof Error&&`code`in e&&e.code===`EEXIST`?new d.UploadistaError({code:`UNKNOWN_ERROR`,status:200,body:`Directory already exists`,details:`Directory already exists`}):new d.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create directory`,details:`Directory creation failed: ${String(e)}`})}).pipe(m.Effect.orElse(()=>m.Effect.void)),v=(e,t)=>m.Effect.sync(()=>c.default.createWriteStream(e,{flags:`r+`,start:t})),y=({writeStream:e,bytesReceived:t,onProgress:n})=>r=>m.Effect.gen(function*(){yield*m.Effect.async(t=>{e.write(r,e=>{t(e?m.Effect.fail(new d.UploadistaError({code:`FILE_WRITE_ERROR`,status:500,body:`Failed to write chunk`,details:`Chunk write failed: ${String(e)}`})):m.Effect.succeed(void 0))})}),yield*m.Ref.update(t,e=>e+r.length),n?.(r.length)}),b=e=>m.Effect.async(t=>{e.end(e=>{t(e?m.Effect.fail(new d.UploadistaError({code:`FILE_WRITE_ERROR`,status:500,body:`Failed to close write stream`,details:`Stream close failed: ${String(e)}`})):m.Effect.succeed(void 0))})}),x=e=>m.Effect.sync(()=>{e.destroyed||e.destroy()}),S=({directory:e,deliveryUrl:t})=>m.Effect.gen(function*(){yield*_(e);let n=yield*f.UploadFileKVStore,r=()=>({supportsParallelUploads:!1,supportsConcatenation:!1,supportsDeferredLength:!1,supportsResumableUploads:!0,supportsTransactionalUploads:!1,maxConcurrentUploads:1,minChunkSize:void 0,maxChunkSize:void 0,maxParts:void 0,optimalChunkSize:1024*1024,requiresOrderedChunks:!0,requiresMimeTypeValidation:!0,maxValidationSize:void 0});return{bucket:e,create:r=>{let i=(r.metadata?.fileName?.toString())?.split(`.`).pop(),a=r.id.split(`/`).slice(0,-1),o=u.default.join(e,i?`${r.id}.${i}`:r.id);return m.Effect.gen(function*(){yield*(0,p.filesystemUploadRequestsTotal)(m.Effect.succeed(1)),yield*(0,p.filesystemActiveUploadsGauge)(m.Effect.succeed(1)),yield*(0,p.filesystemFileSizeHistogram)(m.Effect.succeed(r.size||0)),yield*m.Effect.tryPromise({try:()=>l.default.mkdir(u.default.join(e,...a),{recursive:!0}),catch:e=>(m.Effect.runSync((0,p.trackFilesystemError)(`create`,e,{upload_id:r.id,path:o})),new d.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create file directory`,details:`Directory creation failed: ${String(e)}`}))}),yield*m.Effect.tryPromise({try:()=>l.default.writeFile(o,``),catch:e=>(m.Effect.runSync((0,p.trackFilesystemError)(`create`,e,{upload_id:r.id,path:o})),new d.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create file`,details:`File creation failed: ${String(e)}`}))});let s=i?`${r.id}.${i}`:r.id;return r.storage={id:s,type:r.storage.type,path:o,bucket:e},r.url=`${t}/${s}`,yield*n.set(r.id,r),r})},remove:t=>m.Effect.gen(function*(){let r=(yield*n.get(t)).storage.path||u.default.join(e,t);yield*m.Effect.tryPromise({try:()=>l.default.unlink(r),catch:e=>(m.Effect.runSync((0,p.trackFilesystemError)(`remove`,e,{upload_id:t,path:r})),d.UploadistaError.fromCode(`FILE_NOT_FOUND`))}),yield*n.delete(t),yield*(0,p.filesystemActiveUploadsGauge)(m.Effect.succeed(-1))}),write:({file_id:t,stream:r,offset:i},{onProgress:a})=>(0,p.withFilesystemUploadMetrics)(t,(0,p.withFilesystemTimingMetrics)(p.filesystemUploadDurationHistogram,m.Effect.gen(function*(){let o=Date.now(),s=yield*n.get(t),c=s.storage.path||u.default.join(e,t),l=yield*m.Ref.make(0);try{let e=yield*m.Effect.acquireUseRelease(v(c,i),e=>m.Effect.gen(function*(){let t=m.Sink.forEach(y({writeStream:e,bytesReceived:l,onProgress:a}));yield*(0,p.filesystemUploadPartsTotal)(m.Effect.succeed(1)),yield*m.Stream.run(r,t),yield*b(e);let n=yield*m.Ref.get(l);return yield*(0,p.filesystemPartSizeHistogram)(m.Effect.succeed(n)),i+n}),x);return s.size&&e===s.size&&(yield*(0,p.logFilesystemUploadCompletion)(t,{fileSize:s.size,totalDurationMs:Date.now()-o,partsCount:1,averagePartSize:s.size,throughputBps:s.size/(Date.now()-o),retryCount:0}),yield*(0,p.filesystemUploadSuccessTotal)(m.Effect.succeed(1)),yield*(0,p.filesystemActiveUploadsGauge)(m.Effect.succeed(-1))),e}catch(e){throw m.Effect.runSync((0,p.trackFilesystemError)(`write`,e,{upload_id:t,path:c,offset:i})),e}}))),getUpload:t=>m.Effect.gen(function*(){let r=yield*n.get(t),i=r.storage.path||u.default.join(e,t),a=yield*m.Effect.tryPromise({try:()=>l.default.stat(i),catch:()=>d.UploadistaError.fromCode(`FILE_NOT_FOUND`)});return{...r,offset:a.size,size:r.size}}),read:t=>m.Effect.gen(function*(){let r=(yield*n.get(t)).storage.path||u.default.join(e,t),i=yield*m.Effect.tryPromise({try:()=>l.default.readFile(r),catch:()=>d.UploadistaError.fromCode(`FILE_READ_ERROR`)});return new Uint8Array(i)}),getCapabilities:r,validateUploadStrategy:e=>{let t=r();switch(e){case`parallel`:return m.Effect.succeed(t.supportsParallelUploads);case`single`:return m.Effect.succeed(!0);default:return m.Effect.succeed(!1)}}}}),C=e=>S(e);exports.createFileStore=S,exports.fileStore=C;
@@ -0,0 +1,21 @@
1
+ import { DataStore, UploadFile, UploadFileKVStore } from "@uploadista/core/types";
2
+ import { Effect } from "effect";
3
+
4
+ //#region src/file-store.d.ts
5
+ type FileStoreOptions = {
6
+ directory: string;
7
+ deliveryUrl: string;
8
+ };
9
+ /**
10
+ * A data store that stores files in the filesystem.
11
+ * @param options - The options for the file store.
12
+ * @returns A data store that stores files in the filesystem.
13
+ */
14
+ declare const createFileStore: ({
15
+ directory,
16
+ deliveryUrl
17
+ }: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
18
+ declare const fileStore: (options: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
19
+ //#endregion
20
+ export { FileStoreOptions, createFileStore, fileStore };
21
+ //# sourceMappingURL=index.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.cts","names":[],"sources":["../src/file-store.ts"],"sourcesContent":[],"mappings":";;;;KA2BY,gBAAA;EAAA,SAAA,EAAA,MAAA;EA6GC,WAAA,EAAA,MA0OT;CA1O4B;;;;;;AAA4C,cAA/D,eAA+D,EAAA,CAAA;EAAA,SAAA;EAAA;AAAA,CAAA,EAAhB,gBAAgB,EAAA,GAAA,MAAA,CAAA,MAAA,CAAA,SAAA,CAAA,UAAA,CAAA,EAAA,KAAA,EAAA,iBAAA,CAAA;AAAA,cA4O/D,SA5O+D,EAAA,CAAA,OAAA,EA4OzC,gBA5OyC,EAAA,GA4OzB,MAAA,CAAA,MA5OyB,CA4OzB,SA5OyB,CA4OzB,UA5OyB,CAAA,EAAA,KAAA,EA4OzB,iBA5OyB,CAAA"}
package/dist/index.d.ts CHANGED
@@ -1,2 +1,21 @@
1
- export * from "./file-store";
1
+ import { DataStore, UploadFile, UploadFileKVStore } from "@uploadista/core/types";
2
+ import { Effect } from "effect";
3
+
4
+ //#region src/file-store.d.ts
5
+ type FileStoreOptions = {
6
+ directory: string;
7
+ deliveryUrl: string;
8
+ };
9
+ /**
10
+ * A data store that stores files in the filesystem.
11
+ * @param options - The options for the file store.
12
+ * @returns A data store that stores files in the filesystem.
13
+ */
14
+ declare const createFileStore: ({
15
+ directory,
16
+ deliveryUrl
17
+ }: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
18
+ declare const fileStore: (options: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
19
+ //#endregion
20
+ export { FileStoreOptions, createFileStore, fileStore };
2
21
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC"}
1
+ {"version":3,"file":"index.d.ts","names":[],"sources":["../src/file-store.ts"],"sourcesContent":[],"mappings":";;;;KA2BY,gBAAA;EAAA,SAAA,EAAA,MAAA;EA6GC,WAAA,EAAA,MA0OT;CA1O4B;;;;;;AAA4C,cAA/D,eAA+D,EAAA,CAAA;EAAA,SAAA;EAAA;AAAA,CAAA,EAAhB,gBAAgB,EAAA,GAAA,MAAA,CAAA,MAAA,CAAA,SAAA,CAAA,UAAA,CAAA,EAAA,KAAA,EAAA,iBAAA,CAAA;AAAA,cA4O/D,SA5O+D,EAAA,CAAA,OAAA,EA4OzC,gBA5OyC,EAAA,GA4OzB,MAAA,CAAA,MA5OyB,CA4OzB,SA5OyB,CA4OzB,UA5OyB,CAAA,EAAA,KAAA,EA4OzB,iBA5OyB,CAAA"}
package/dist/index.js CHANGED
@@ -1 +1,2 @@
1
- export * from "./file-store";
1
+ import e from"node:fs";import t from"node:fs/promises";import n from"node:path";import{UploadistaError as r}from"@uploadista/core/errors";import{UploadFileKVStore as i}from"@uploadista/core/types";import{filesystemActiveUploadsGauge as a,filesystemFileSizeHistogram as o,filesystemPartSizeHistogram as s,filesystemUploadDurationHistogram as c,filesystemUploadPartsTotal as l,filesystemUploadRequestsTotal as u,filesystemUploadSuccessTotal as d,logFilesystemUploadCompletion as f,trackFilesystemError as p,withFilesystemTimingMetrics as m,withFilesystemUploadMetrics as h}from"@uploadista/observability";import{Effect as g,Ref as _,Sink as v,Stream as y}from"effect";const b=e=>g.tryPromise({try:()=>t.mkdir(e,{mode:`0777`,recursive:!0}),catch:e=>e instanceof Error&&`code`in e&&e.code===`EEXIST`?new r({code:`UNKNOWN_ERROR`,status:200,body:`Directory already exists`,details:`Directory already exists`}):new r({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create directory`,details:`Directory creation failed: ${String(e)}`})}).pipe(g.orElse(()=>g.void)),x=(t,n)=>g.sync(()=>e.createWriteStream(t,{flags:`r+`,start:n})),S=({writeStream:e,bytesReceived:t,onProgress:n})=>i=>g.gen(function*(){yield*g.async(t=>{e.write(i,e=>{t(e?g.fail(new r({code:`FILE_WRITE_ERROR`,status:500,body:`Failed to write chunk`,details:`Chunk write failed: ${String(e)}`})):g.succeed(void 0))})}),yield*_.update(t,e=>e+i.length),n?.(i.length)}),C=e=>g.async(t=>{e.end(e=>{t(e?g.fail(new r({code:`FILE_WRITE_ERROR`,status:500,body:`Failed to close write stream`,details:`Stream close failed: ${String(e)}`})):g.succeed(void 0))})}),w=e=>g.sync(()=>{e.destroyed||e.destroy()}),T=({directory:e,deliveryUrl:T})=>g.gen(function*(){yield*b(e);let E=yield*i,D=()=>({supportsParallelUploads:!1,supportsConcatenation:!1,supportsDeferredLength:!1,supportsResumableUploads:!0,supportsTransactionalUploads:!1,maxConcurrentUploads:1,minChunkSize:void 0,maxChunkSize:void 0,maxParts:void 0,optimalChunkSize:1024*1024,requiresOrderedChunks:!0,requiresMimeTypeValidation:!0,maxValidationSize:void 0});return{bucket:e,create:i=>{let s=(i.metadata?.fileName?.toString())?.split(`.`).pop(),c=i.id.split(`/`).slice(0,-1),l=n.join(e,s?`${i.id}.${s}`:i.id);return g.gen(function*(){yield*u(g.succeed(1)),yield*a(g.succeed(1)),yield*o(g.succeed(i.size||0)),yield*g.tryPromise({try:()=>t.mkdir(n.join(e,...c),{recursive:!0}),catch:e=>(g.runSync(p(`create`,e,{upload_id:i.id,path:l})),new r({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create file directory`,details:`Directory creation failed: ${String(e)}`}))}),yield*g.tryPromise({try:()=>t.writeFile(l,``),catch:e=>(g.runSync(p(`create`,e,{upload_id:i.id,path:l})),new r({code:`UNKNOWN_ERROR`,status:500,body:`Failed to create file`,details:`File creation failed: ${String(e)}`}))});let d=s?`${i.id}.${s}`:i.id;return i.storage={id:d,type:i.storage.type,path:l,bucket:e},i.url=`${T}/${d}`,yield*E.set(i.id,i),i})},remove:i=>g.gen(function*(){let o=(yield*E.get(i)).storage.path||n.join(e,i);yield*g.tryPromise({try:()=>t.unlink(o),catch:e=>(g.runSync(p(`remove`,e,{upload_id:i,path:o})),r.fromCode(`FILE_NOT_FOUND`))}),yield*E.delete(i),yield*a(g.succeed(-1))}),write:({file_id:t,stream:r,offset:i},{onProgress:o})=>h(t,m(c,g.gen(function*(){let c=Date.now(),u=yield*E.get(t),m=u.storage.path||n.join(e,t),h=yield*_.make(0);try{let e=yield*g.acquireUseRelease(x(m,i),e=>g.gen(function*(){let t=v.forEach(S({writeStream:e,bytesReceived:h,onProgress:o}));yield*l(g.succeed(1)),yield*y.run(r,t),yield*C(e);let n=yield*_.get(h);return yield*s(g.succeed(n)),i+n}),w);return u.size&&e===u.size&&(yield*f(t,{fileSize:u.size,totalDurationMs:Date.now()-c,partsCount:1,averagePartSize:u.size,throughputBps:u.size/(Date.now()-c),retryCount:0}),yield*d(g.succeed(1)),yield*a(g.succeed(-1))),e}catch(e){throw g.runSync(p(`write`,e,{upload_id:t,path:m,offset:i})),e}}))),getUpload:i=>g.gen(function*(){let a=yield*E.get(i),o=a.storage.path||n.join(e,i),s=yield*g.tryPromise({try:()=>t.stat(o),catch:()=>r.fromCode(`FILE_NOT_FOUND`)});return{...a,offset:s.size,size:a.size}}),read:i=>g.gen(function*(){let a=(yield*E.get(i)).storage.path||n.join(e,i),o=yield*g.tryPromise({try:()=>t.readFile(a),catch:()=>r.fromCode(`FILE_READ_ERROR`)});return new Uint8Array(o)}),getCapabilities:D,validateUploadStrategy:e=>{let t=D();switch(e){case`parallel`:return g.succeed(t.supportsParallelUploads);case`single`:return g.succeed(!0);default:return g.succeed(!1)}}}}),E=e=>T(e);export{T as createFileStore,E as fileStore};
2
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","names":["uploadRequestsTotal","activeUploadsGauge","fileSizeHistogram","withUploadMetrics","withTimingMetrics","uploadDurationHistogram","uploadPartsTotal","partSizeHistogram","uploadSuccessTotal"],"sources":["../src/file-store.ts"],"sourcesContent":["import fs from \"node:fs\";\nimport fsProm from \"node:fs/promises\";\nimport path from \"node:path\";\nimport { UploadistaError } from \"@uploadista/core/errors\";\nimport type {\n DataStore,\n DataStoreCapabilities,\n DataStoreWriteOptions,\n UploadFile,\n UploadStrategy,\n} from \"@uploadista/core/types\";\nimport { UploadFileKVStore } from \"@uploadista/core/types\";\nimport {\n filesystemActiveUploadsGauge as activeUploadsGauge,\n filesystemFileSizeHistogram as fileSizeHistogram,\n logFilesystemUploadCompletion,\n filesystemPartSizeHistogram as partSizeHistogram,\n trackFilesystemError,\n filesystemUploadDurationHistogram as uploadDurationHistogram,\n filesystemUploadPartsTotal as uploadPartsTotal,\n filesystemUploadRequestsTotal as uploadRequestsTotal,\n filesystemUploadSuccessTotal as uploadSuccessTotal,\n withFilesystemTimingMetrics as withTimingMetrics,\n withFilesystemUploadMetrics as withUploadMetrics,\n} from \"@uploadista/observability\";\nimport { Effect, Ref, Sink, Stream } from \"effect\";\n\nexport type FileStoreOptions = {\n directory: string;\n deliveryUrl: string;\n};\n\nconst MASK = \"0777\";\nconst IGNORED_MKDIR_ERROR = \"EEXIST\";\n// const FILE_DOESNT_EXIST = \"ENOENT\";\n\nconst checkOrCreateDirectory = (directory: string) =>\n Effect.tryPromise({\n try: () => fsProm.mkdir(directory, { mode: MASK, recursive: true }),\n catch: (error) => {\n if (\n error instanceof Error &&\n \"code\" in error &&\n error.code === IGNORED_MKDIR_ERROR\n ) {\n // Directory already exists, not an error\n return new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 200,\n body: \"Directory already exists\",\n details: \"Directory already exists\",\n });\n }\n return new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: \"Failed to create directory\",\n details: `Directory creation failed: ${String(error)}`,\n });\n },\n }).pipe(Effect.orElse(() => Effect.void));\n\nconst createWriteStream = (file_path: string, offset: number) =>\n Effect.sync(() =>\n fs.createWriteStream(file_path, {\n flags: \"r+\",\n start: offset,\n }),\n );\n\nconst writeChunk =\n ({\n writeStream,\n bytesReceived,\n onProgress,\n }: {\n writeStream: fs.WriteStream;\n bytesReceived: Ref.Ref<number>;\n onProgress?: (chunkSize: number) => void;\n }) =>\n (chunk: Uint8Array) =>\n Effect.gen(function* () {\n yield* Effect.async<void, UploadistaError>((resume) => {\n writeStream.write(chunk, (err) => {\n if (err) {\n resume(\n Effect.fail(\n new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 500,\n body: \"Failed to write chunk\",\n details: `Chunk write failed: ${String(err)}`,\n }),\n ),\n );\n } else {\n resume(Effect.succeed(void 0));\n }\n });\n });\n\n yield* Ref.update(bytesReceived, (size) => size + chunk.length);\n onProgress?.(chunk.length);\n });\n\nconst endWriteStream = (writeStream: fs.WriteStream) =>\n Effect.async<void, UploadistaError>((resume) => {\n writeStream.end((err: Error | null | undefined) => {\n if (err) {\n resume(\n Effect.fail(\n new UploadistaError({\n code: \"FILE_WRITE_ERROR\",\n status: 500,\n body: \"Failed to close write stream\",\n details: `Stream close failed: ${String(err)}`,\n }),\n ),\n );\n } else {\n resume(Effect.succeed(void 0));\n }\n });\n });\n\nconst destroyWriteStream = (writeStream: fs.WriteStream) =>\n Effect.sync(() => {\n if (!writeStream.destroyed) {\n writeStream.destroy();\n }\n });\n/**\n * A data store that stores files in the filesystem.\n * @param options - The options for the file store.\n * @returns A data store that stores files in the filesystem.\n */\nexport const createFileStore = ({ directory, deliveryUrl }: FileStoreOptions) =>\n Effect.gen(function* () {\n yield* checkOrCreateDirectory(directory);\n const kvStore = yield* UploadFileKVStore;\n\n const getCapabilities = (): DataStoreCapabilities => {\n return {\n supportsParallelUploads: false, // Filesystem operations are sequential\n supportsConcatenation: false, // No native concatenation support\n supportsDeferredLength: false,\n supportsResumableUploads: true, // Can write at specific offsets\n supportsTransactionalUploads: false,\n maxConcurrentUploads: 1, // Sequential writes only\n minChunkSize: undefined,\n maxChunkSize: undefined,\n maxParts: undefined,\n optimalChunkSize: 1024 * 1024, // 1MB default\n requiresOrderedChunks: true, // Sequential offset-based writes\n requiresMimeTypeValidation: true,\n maxValidationSize: undefined, // no size limit\n };\n };\n\n const validateUploadStrategy = (\n strategy: UploadStrategy,\n ): Effect.Effect<boolean, never> => {\n const capabilities = getCapabilities();\n\n switch (strategy) {\n case \"parallel\":\n return Effect.succeed(capabilities.supportsParallelUploads);\n case \"single\":\n return Effect.succeed(true);\n default:\n return Effect.succeed(false);\n }\n };\n\n return {\n bucket: directory,\n create: (\n file: UploadFile,\n ): Effect.Effect<UploadFile, UploadistaError> => {\n const fileName = file.metadata?.fileName?.toString();\n const fileExtension = fileName?.split(\".\").pop();\n\n const dirs = file.id.split(\"/\").slice(0, -1);\n const filePath = path.join(\n directory,\n fileExtension ? `${file.id}.${fileExtension}` : file.id,\n );\n\n return Effect.gen(function* () {\n yield* uploadRequestsTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(1));\n yield* fileSizeHistogram(Effect.succeed(file.size || 0));\n\n yield* Effect.tryPromise({\n try: () =>\n fsProm.mkdir(path.join(directory, ...dirs), {\n recursive: true,\n }),\n catch: (error) => {\n Effect.runSync(\n trackFilesystemError(\"create\", error, {\n upload_id: file.id,\n path: filePath,\n }),\n );\n return new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: \"Failed to create file directory\",\n details: `Directory creation failed: ${String(error)}`,\n });\n },\n });\n\n yield* Effect.tryPromise({\n try: () => fsProm.writeFile(filePath, \"\"),\n catch: (error) => {\n Effect.runSync(\n trackFilesystemError(\"create\", error, {\n upload_id: file.id,\n path: filePath,\n }),\n );\n return new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: \"Failed to create file\",\n details: `File creation failed: ${String(error)}`,\n });\n },\n });\n\n const fileId = fileExtension\n ? `${file.id}.${fileExtension}`\n : file.id;\n file.storage = {\n id: fileId,\n type: file.storage.type,\n path: filePath,\n bucket: directory,\n };\n file.url = `${deliveryUrl}/${fileId}`;\n\n // Store file metadata in KV store\n yield* kvStore.set(file.id, file);\n\n return file;\n });\n },\n remove: (file_id: string): Effect.Effect<void, UploadistaError> => {\n return Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(file_id);\n const file_path =\n uploadFile.storage.path || path.join(directory, file_id);\n\n yield* Effect.tryPromise({\n try: () => fsProm.unlink(file_path),\n catch: (error) => {\n Effect.runSync(\n trackFilesystemError(\"remove\", error, {\n upload_id: file_id,\n path: file_path,\n }),\n );\n return UploadistaError.fromCode(\"FILE_NOT_FOUND\");\n },\n });\n\n yield* kvStore.delete(file_id);\n yield* activeUploadsGauge(Effect.succeed(-1));\n });\n },\n write: (\n { file_id, stream, offset }: DataStoreWriteOptions,\n { onProgress }: { onProgress?: (chunkSize: number) => void },\n ): Effect.Effect<number, UploadistaError> => {\n return withUploadMetrics(\n file_id,\n withTimingMetrics(\n uploadDurationHistogram,\n Effect.gen(function* () {\n const startTime = Date.now();\n // Get the upload file from KV store to retrieve the actual file path\n const uploadFile = yield* kvStore.get(file_id);\n const file_path =\n uploadFile.storage.path || path.join(directory, file_id);\n\n const bytesReceived = yield* Ref.make(0);\n\n try {\n const result = yield* Effect.acquireUseRelease(\n createWriteStream(file_path, offset),\n (writeStream) =>\n Effect.gen(function* () {\n const sink = Sink.forEach(\n writeChunk({ writeStream, bytesReceived, onProgress }),\n );\n\n yield* uploadPartsTotal(Effect.succeed(1));\n yield* Stream.run(stream, sink);\n yield* endWriteStream(writeStream);\n\n const totalBytes = yield* Ref.get(bytesReceived);\n yield* partSizeHistogram(Effect.succeed(totalBytes));\n return offset + totalBytes;\n }),\n destroyWriteStream,\n );\n\n // Check if upload is complete\n if (uploadFile.size && result === uploadFile.size) {\n yield* logFilesystemUploadCompletion(file_id, {\n fileSize: uploadFile.size,\n totalDurationMs: Date.now() - startTime,\n partsCount: 1,\n averagePartSize: uploadFile.size,\n throughputBps: uploadFile.size / (Date.now() - startTime),\n retryCount: 0,\n });\n yield* uploadSuccessTotal(Effect.succeed(1));\n yield* activeUploadsGauge(Effect.succeed(-1));\n }\n\n return result;\n } catch (error) {\n Effect.runSync(\n trackFilesystemError(\"write\", error, {\n upload_id: file_id,\n path: file_path,\n offset,\n }),\n );\n throw error;\n }\n }),\n ),\n );\n },\n getUpload: (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n\n // For filesystem, get the actual file size from disk\n const file_path = uploadFile.storage.path || path.join(directory, id);\n const stats = yield* Effect.tryPromise({\n try: () => fsProm.stat(file_path),\n catch: () => UploadistaError.fromCode(\"FILE_NOT_FOUND\"),\n });\n\n return {\n ...uploadFile,\n offset: stats.size,\n size: uploadFile.size,\n };\n }),\n read: (id: string) =>\n Effect.gen(function* () {\n const uploadFile = yield* kvStore.get(id);\n const file_path = uploadFile.storage.path || path.join(directory, id);\n\n const buffer = yield* Effect.tryPromise({\n try: () => fsProm.readFile(file_path),\n catch: () => UploadistaError.fromCode(\"FILE_READ_ERROR\"),\n });\n\n return new Uint8Array(buffer);\n }),\n getCapabilities,\n validateUploadStrategy,\n } as DataStore<UploadFile>;\n });\n\nexport const fileStore = (options: FileStoreOptions) =>\n createFileStore(options);\n"],"mappings":"0pBAgCA,MAIM,EAA0B,GAC9B,EAAO,WAAW,CAChB,QAAW,EAAO,MAAM,EAAW,CAAE,KAAM,OAAM,UAAW,GAAM,CAAC,CACnE,MAAQ,GAEJ,aAAiB,OACjB,SAAU,GACV,EAAM,OAAS,SAGR,IAAI,EAAgB,CACzB,KAAM,gBACN,OAAQ,IACR,KAAM,2BACN,QAAS,2BACV,CAAC,CAEG,IAAI,EAAgB,CACzB,KAAM,gBACN,OAAQ,IACR,KAAM,6BACN,QAAS,8BAA8B,OAAO,EAAM,GACrD,CAAC,CAEL,CAAC,CAAC,KAAK,EAAO,WAAa,EAAO,KAAK,CAAC,CAErC,GAAqB,EAAmB,IAC5C,EAAO,SACL,EAAG,kBAAkB,EAAW,CAC9B,MAAO,KACP,MAAO,EACR,CAAC,CACH,CAEG,GACH,CACC,cACA,gBACA,gBAMD,GACC,EAAO,IAAI,WAAa,CACtB,MAAO,EAAO,MAA8B,GAAW,CACrD,EAAY,MAAM,EAAQ,GAAQ,CAE9B,EADE,EAEA,EAAO,KACL,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,wBACN,QAAS,uBAAuB,OAAO,EAAI,GAC5C,CAAC,CACH,CAGI,EAAO,QAAQ,IAAK,GAAE,CAAC,EAEhC,EACF,CAEF,MAAO,EAAI,OAAO,EAAgB,GAAS,EAAO,EAAM,OAAO,CAC/D,IAAa,EAAM,OAAO,EAC1B,CAEA,EAAkB,GACtB,EAAO,MAA8B,GAAW,CAC9C,EAAY,IAAK,GAAkC,CAE/C,EADE,EAEA,EAAO,KACL,IAAI,EAAgB,CAClB,KAAM,mBACN,OAAQ,IACR,KAAM,+BACN,QAAS,wBAAwB,OAAO,EAAI,GAC7C,CAAC,CACH,CAGI,EAAO,QAAQ,IAAK,GAAE,CAAC,EAEhC,EACF,CAEE,EAAsB,GAC1B,EAAO,SAAW,CACX,EAAY,WACf,EAAY,SAAS,EAEvB,CAMS,GAAmB,CAAE,YAAW,iBAC3C,EAAO,IAAI,WAAa,CACtB,MAAO,EAAuB,EAAU,CACxC,IAAM,EAAU,MAAO,EAEjB,OACG,CACL,wBAAyB,GACzB,sBAAuB,GACvB,uBAAwB,GACxB,yBAA0B,GAC1B,6BAA8B,GAC9B,qBAAsB,EACtB,aAAc,IAAA,GACd,aAAc,IAAA,GACd,SAAU,IAAA,GACV,iBAAkB,KAAO,KACzB,sBAAuB,GACvB,2BAA4B,GAC5B,kBAAmB,IAAA,GACpB,EAkBH,MAAO,CACL,OAAQ,EACR,OACE,GAC+C,CAE/C,IAAM,GADW,EAAK,UAAU,UAAU,UAAU,GACpB,MAAM,IAAI,CAAC,KAAK,CAE1C,EAAO,EAAK,GAAG,MAAM,IAAI,CAAC,MAAM,EAAG,GAAG,CACtC,EAAW,EAAK,KACpB,EACA,EAAgB,GAAG,EAAK,GAAG,GAAG,IAAkB,EAAK,GACtD,CAED,OAAO,EAAO,IAAI,WAAa,CAC7B,MAAOA,EAAoB,EAAO,QAAQ,EAAE,CAAC,CAC7C,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOC,EAAkB,EAAO,QAAQ,EAAK,MAAQ,EAAE,CAAC,CAExD,MAAO,EAAO,WAAW,CACvB,QACE,EAAO,MAAM,EAAK,KAAK,EAAW,GAAG,EAAK,CAAE,CAC1C,UAAW,GACZ,CAAC,CACJ,MAAQ,IACN,EAAO,QACL,EAAqB,SAAU,EAAO,CACpC,UAAW,EAAK,GAChB,KAAM,EACP,CAAC,CACH,CACM,IAAI,EAAgB,CACzB,KAAM,gBACN,OAAQ,IACR,KAAM,kCACN,QAAS,8BAA8B,OAAO,EAAM,GACrD,CAAC,EAEL,CAAC,CAEF,MAAO,EAAO,WAAW,CACvB,QAAW,EAAO,UAAU,EAAU,GAAG,CACzC,MAAQ,IACN,EAAO,QACL,EAAqB,SAAU,EAAO,CACpC,UAAW,EAAK,GAChB,KAAM,EACP,CAAC,CACH,CACM,IAAI,EAAgB,CACzB,KAAM,gBACN,OAAQ,IACR,KAAM,wBACN,QAAS,yBAAyB,OAAO,EAAM,GAChD,CAAC,EAEL,CAAC,CAEF,IAAM,EAAS,EACX,GAAG,EAAK,GAAG,GAAG,IACd,EAAK,GAYT,MAXA,GAAK,QAAU,CACb,GAAI,EACJ,KAAM,EAAK,QAAQ,KACnB,KAAM,EACN,OAAQ,EACT,CACD,EAAK,IAAM,GAAG,EAAY,GAAG,IAG7B,MAAO,EAAQ,IAAI,EAAK,GAAI,EAAK,CAE1B,GACP,EAEJ,OAAS,GACA,EAAO,IAAI,WAAa,CAE7B,IAAM,GADa,MAAO,EAAQ,IAAI,EAAQ,EAEjC,QAAQ,MAAQ,EAAK,KAAK,EAAW,EAAQ,CAE1D,MAAO,EAAO,WAAW,CACvB,QAAW,EAAO,OAAO,EAAU,CACnC,MAAQ,IACN,EAAO,QACL,EAAqB,SAAU,EAAO,CACpC,UAAW,EACX,KAAM,EACP,CAAC,CACH,CACM,EAAgB,SAAS,iBAAiB,EAEpD,CAAC,CAEF,MAAO,EAAQ,OAAO,EAAQ,CAC9B,MAAOD,EAAmB,EAAO,QAAQ,GAAG,CAAC,EAC7C,CAEJ,OACE,CAAE,UAAS,SAAQ,UACnB,CAAE,gBAEKE,EACL,EACAC,EACEC,EACA,EAAO,IAAI,WAAa,CACtB,IAAM,EAAY,KAAK,KAAK,CAEtB,EAAa,MAAO,EAAQ,IAAI,EAAQ,CACxC,EACJ,EAAW,QAAQ,MAAQ,EAAK,KAAK,EAAW,EAAQ,CAEpD,EAAgB,MAAO,EAAI,KAAK,EAAE,CAExC,GAAI,CACF,IAAM,EAAS,MAAO,EAAO,kBAC3B,EAAkB,EAAW,EAAO,CACnC,GACC,EAAO,IAAI,WAAa,CACtB,IAAM,EAAO,EAAK,QAChB,EAAW,CAAE,cAAa,gBAAe,aAAY,CAAC,CACvD,CAED,MAAOC,EAAiB,EAAO,QAAQ,EAAE,CAAC,CAC1C,MAAO,EAAO,IAAI,EAAQ,EAAK,CAC/B,MAAO,EAAe,EAAY,CAElC,IAAM,EAAa,MAAO,EAAI,IAAI,EAAc,CAEhD,OADA,MAAOC,EAAkB,EAAO,QAAQ,EAAW,CAAC,CAC7C,EAAS,GAChB,CACJ,EACD,CAgBD,OAbI,EAAW,MAAQ,IAAW,EAAW,OAC3C,MAAO,EAA8B,EAAS,CAC5C,SAAU,EAAW,KACrB,gBAAiB,KAAK,KAAK,CAAG,EAC9B,WAAY,EACZ,gBAAiB,EAAW,KAC5B,cAAe,EAAW,MAAQ,KAAK,KAAK,CAAG,GAC/C,WAAY,EACb,CAAC,CACF,MAAOC,EAAmB,EAAO,QAAQ,EAAE,CAAC,CAC5C,MAAOP,EAAmB,EAAO,QAAQ,GAAG,CAAC,EAGxC,QACA,EAAO,CAQd,MAPA,EAAO,QACL,EAAqB,QAAS,EAAO,CACnC,UAAW,EACX,KAAM,EACN,SACD,CAAC,CACH,CACK,IAER,CACH,CACF,CAEH,UAAY,GACV,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAQ,IAAI,EAAG,CAGnC,EAAY,EAAW,QAAQ,MAAQ,EAAK,KAAK,EAAW,EAAG,CAC/D,EAAQ,MAAO,EAAO,WAAW,CACrC,QAAW,EAAO,KAAK,EAAU,CACjC,UAAa,EAAgB,SAAS,iBAAiB,CACxD,CAAC,CAEF,MAAO,CACL,GAAG,EACH,OAAQ,EAAM,KACd,KAAM,EAAW,KAClB,EACD,CACJ,KAAO,GACL,EAAO,IAAI,WAAa,CAEtB,IAAM,GADa,MAAO,EAAQ,IAAI,EAAG,EACZ,QAAQ,MAAQ,EAAK,KAAK,EAAW,EAAG,CAE/D,EAAS,MAAO,EAAO,WAAW,CACtC,QAAW,EAAO,SAAS,EAAU,CACrC,UAAa,EAAgB,SAAS,kBAAkB,CACzD,CAAC,CAEF,OAAO,IAAI,WAAW,EAAO,EAC7B,CACJ,kBACA,uBAhNA,GACkC,CAClC,IAAM,EAAe,GAAiB,CAEtC,OAAQ,EAAR,CACE,IAAK,WACH,OAAO,EAAO,QAAQ,EAAa,wBAAwB,CAC7D,IAAK,SACH,OAAO,EAAO,QAAQ,GAAK,CAC7B,QACE,OAAO,EAAO,QAAQ,GAAM,GAuMjC,EACD,CAES,EAAa,GACxB,EAAgB,EAAQ"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@uploadista/data-store-filesystem",
3
3
  "type": "module",
4
- "version": "0.0.3",
4
+ "version": "0.0.4",
5
5
  "description": "File system data store for Uploadista",
6
6
  "license": "MIT",
7
7
  "author": "Uploadista",
@@ -14,16 +14,17 @@
14
14
  },
15
15
  "dependencies": {
16
16
  "effect": "3.18.4",
17
- "@uploadista/observability": "0.0.3",
18
- "@uploadista/core": "0.0.3"
17
+ "@uploadista/observability": "0.0.4",
18
+ "@uploadista/core": "0.0.4"
19
19
  },
20
20
  "devDependencies": {
21
21
  "@types/node": "24.8.1",
22
- "@uploadista/typescript-config": "0.0.3"
22
+ "tsdown": "0.15.9",
23
+ "@uploadista/typescript-config": "0.0.4"
23
24
  },
24
25
  "scripts": {
25
26
  "dev": "tsc -b",
26
- "build": "tsc -b",
27
+ "build": "tsdown",
27
28
  "format": "biome format --write ./src",
28
29
  "lint": "biome lint --write ./src",
29
30
  "check": "biome check --write ./src"
@@ -0,0 +1,11 @@
1
+ import { defineConfig } from "tsdown";
2
+
3
+ export default defineConfig({
4
+ entry: {
5
+ index: "src/index.ts",
6
+ },
7
+ minify: true,
8
+ format: ["esm", "cjs"],
9
+ dts: true,
10
+ outDir: "dist",
11
+ });
@@ -1,15 +0,0 @@
1
- import type { DataStore, UploadFile } from "@uploadista/core/types";
2
- import { UploadFileKVStore } from "@uploadista/core/types";
3
- import { Effect } from "effect";
4
- export type FileStoreOptions = {
5
- directory: string;
6
- deliveryUrl: string;
7
- };
8
- /**
9
- * A data store that stores files in the filesystem.
10
- * @param options - The options for the file store.
11
- * @returns A data store that stores files in the filesystem.
12
- */
13
- export declare const createFileStore: ({ directory, deliveryUrl }: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
14
- export declare const fileStore: (options: FileStoreOptions) => Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>;
15
- //# sourceMappingURL=file-store.d.ts.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"file-store.d.ts","sourceRoot":"","sources":["../src/file-store.ts"],"names":[],"mappings":"AAIA,OAAO,KAAK,EACV,SAAS,EAGT,UAAU,EAEX,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,iBAAiB,EAAE,MAAM,wBAAwB,CAAC;AAc3D,OAAO,EAAE,MAAM,EAAqB,MAAM,QAAQ,CAAC;AAEnD,MAAM,MAAM,gBAAgB,GAAG;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAqGF;;;;GAIG;AACH,eAAO,MAAM,eAAe,GAAI,4BAA4B,gBAAgB,mEA0OxE,CAAC;AAEL,eAAO,MAAM,SAAS,GAAI,SAAS,gBAAgB,mEACzB,CAAC"}
@@ -1,257 +0,0 @@
1
- import fs from "node:fs";
2
- import fsProm from "node:fs/promises";
3
- import path from "node:path";
4
- import { UploadistaError } from "@uploadista/core/errors";
5
- import { UploadFileKVStore } from "@uploadista/core/types";
6
- import { filesystemActiveUploadsGauge as activeUploadsGauge, filesystemFileSizeHistogram as fileSizeHistogram, logFilesystemUploadCompletion, filesystemPartSizeHistogram as partSizeHistogram, trackFilesystemError, filesystemUploadDurationHistogram as uploadDurationHistogram, filesystemUploadPartsTotal as uploadPartsTotal, filesystemUploadRequestsTotal as uploadRequestsTotal, filesystemUploadSuccessTotal as uploadSuccessTotal, withFilesystemTimingMetrics as withTimingMetrics, withFilesystemUploadMetrics as withUploadMetrics, } from "@uploadista/observability";
7
- import { Effect, Ref, Sink, Stream } from "effect";
8
- const MASK = "0777";
9
- const IGNORED_MKDIR_ERROR = "EEXIST";
10
- // const FILE_DOESNT_EXIST = "ENOENT";
11
- const checkOrCreateDirectory = (directory) => Effect.tryPromise({
12
- try: () => fsProm.mkdir(directory, { mode: MASK, recursive: true }),
13
- catch: (error) => {
14
- if (error instanceof Error &&
15
- "code" in error &&
16
- error.code === IGNORED_MKDIR_ERROR) {
17
- // Directory already exists, not an error
18
- return new UploadistaError({
19
- code: "UNKNOWN_ERROR",
20
- status: 200,
21
- body: "Directory already exists",
22
- details: "Directory already exists",
23
- });
24
- }
25
- return new UploadistaError({
26
- code: "UNKNOWN_ERROR",
27
- status: 500,
28
- body: "Failed to create directory",
29
- details: `Directory creation failed: ${String(error)}`,
30
- });
31
- },
32
- }).pipe(Effect.orElse(() => Effect.void));
33
- const createWriteStream = (file_path, offset) => Effect.sync(() => fs.createWriteStream(file_path, {
34
- flags: "r+",
35
- start: offset,
36
- }));
37
- const writeChunk = ({ writeStream, bytesReceived, onProgress, }) => (chunk) => Effect.gen(function* () {
38
- yield* Effect.async((resume) => {
39
- writeStream.write(chunk, (err) => {
40
- if (err) {
41
- resume(Effect.fail(new UploadistaError({
42
- code: "FILE_WRITE_ERROR",
43
- status: 500,
44
- body: "Failed to write chunk",
45
- details: `Chunk write failed: ${String(err)}`,
46
- })));
47
- }
48
- else {
49
- resume(Effect.succeed(void 0));
50
- }
51
- });
52
- });
53
- yield* Ref.update(bytesReceived, (size) => size + chunk.length);
54
- onProgress?.(chunk.length);
55
- });
56
- const endWriteStream = (writeStream) => Effect.async((resume) => {
57
- writeStream.end((err) => {
58
- if (err) {
59
- resume(Effect.fail(new UploadistaError({
60
- code: "FILE_WRITE_ERROR",
61
- status: 500,
62
- body: "Failed to close write stream",
63
- details: `Stream close failed: ${String(err)}`,
64
- })));
65
- }
66
- else {
67
- resume(Effect.succeed(void 0));
68
- }
69
- });
70
- });
71
- const destroyWriteStream = (writeStream) => Effect.sync(() => {
72
- if (!writeStream.destroyed) {
73
- writeStream.destroy();
74
- }
75
- });
76
- /**
77
- * A data store that stores files in the filesystem.
78
- * @param options - The options for the file store.
79
- * @returns A data store that stores files in the filesystem.
80
- */
81
- export const createFileStore = ({ directory, deliveryUrl }) => Effect.gen(function* () {
82
- yield* checkOrCreateDirectory(directory);
83
- const kvStore = yield* UploadFileKVStore;
84
- const getCapabilities = () => {
85
- return {
86
- supportsParallelUploads: false, // Filesystem operations are sequential
87
- supportsConcatenation: false, // No native concatenation support
88
- supportsDeferredLength: false,
89
- supportsResumableUploads: true, // Can write at specific offsets
90
- supportsTransactionalUploads: false,
91
- maxConcurrentUploads: 1, // Sequential writes only
92
- minChunkSize: undefined,
93
- maxChunkSize: undefined,
94
- maxParts: undefined,
95
- optimalChunkSize: 1024 * 1024, // 1MB default
96
- requiresOrderedChunks: true, // Sequential offset-based writes
97
- requiresMimeTypeValidation: true,
98
- maxValidationSize: undefined, // no size limit
99
- };
100
- };
101
- const validateUploadStrategy = (strategy) => {
102
- const capabilities = getCapabilities();
103
- switch (strategy) {
104
- case "parallel":
105
- return Effect.succeed(capabilities.supportsParallelUploads);
106
- case "single":
107
- return Effect.succeed(true);
108
- default:
109
- return Effect.succeed(false);
110
- }
111
- };
112
- return {
113
- bucket: directory,
114
- create: (file) => {
115
- const fileName = file.metadata?.fileName?.toString();
116
- const fileExtension = fileName?.split(".").pop();
117
- const dirs = file.id.split("/").slice(0, -1);
118
- const filePath = path.join(directory, fileExtension ? `${file.id}.${fileExtension}` : file.id);
119
- return Effect.gen(function* () {
120
- yield* uploadRequestsTotal(Effect.succeed(1));
121
- yield* activeUploadsGauge(Effect.succeed(1));
122
- yield* fileSizeHistogram(Effect.succeed(file.size || 0));
123
- yield* Effect.tryPromise({
124
- try: () => fsProm.mkdir(path.join(directory, ...dirs), {
125
- recursive: true,
126
- }),
127
- catch: (error) => {
128
- Effect.runSync(trackFilesystemError("create", error, {
129
- upload_id: file.id,
130
- path: filePath,
131
- }));
132
- return new UploadistaError({
133
- code: "UNKNOWN_ERROR",
134
- status: 500,
135
- body: "Failed to create file directory",
136
- details: `Directory creation failed: ${String(error)}`,
137
- });
138
- },
139
- });
140
- yield* Effect.tryPromise({
141
- try: () => fsProm.writeFile(filePath, ""),
142
- catch: (error) => {
143
- Effect.runSync(trackFilesystemError("create", error, {
144
- upload_id: file.id,
145
- path: filePath,
146
- }));
147
- return new UploadistaError({
148
- code: "UNKNOWN_ERROR",
149
- status: 500,
150
- body: "Failed to create file",
151
- details: `File creation failed: ${String(error)}`,
152
- });
153
- },
154
- });
155
- const fileId = fileExtension
156
- ? `${file.id}.${fileExtension}`
157
- : file.id;
158
- file.storage = {
159
- id: fileId,
160
- type: file.storage.type,
161
- path: filePath,
162
- bucket: directory,
163
- };
164
- file.url = `${deliveryUrl}/${fileId}`;
165
- // Store file metadata in KV store
166
- yield* kvStore.set(file.id, file);
167
- return file;
168
- });
169
- },
170
- remove: (file_id) => {
171
- return Effect.gen(function* () {
172
- const uploadFile = yield* kvStore.get(file_id);
173
- const file_path = uploadFile.storage.path || path.join(directory, file_id);
174
- yield* Effect.tryPromise({
175
- try: () => fsProm.unlink(file_path),
176
- catch: (error) => {
177
- Effect.runSync(trackFilesystemError("remove", error, {
178
- upload_id: file_id,
179
- path: file_path,
180
- }));
181
- return UploadistaError.fromCode("FILE_NOT_FOUND");
182
- },
183
- });
184
- yield* kvStore.delete(file_id);
185
- yield* activeUploadsGauge(Effect.succeed(-1));
186
- });
187
- },
188
- write: ({ file_id, stream, offset }, { onProgress }) => {
189
- return withUploadMetrics(file_id, withTimingMetrics(uploadDurationHistogram, Effect.gen(function* () {
190
- const startTime = Date.now();
191
- // Get the upload file from KV store to retrieve the actual file path
192
- const uploadFile = yield* kvStore.get(file_id);
193
- const file_path = uploadFile.storage.path || path.join(directory, file_id);
194
- const bytesReceived = yield* Ref.make(0);
195
- try {
196
- const result = yield* Effect.acquireUseRelease(createWriteStream(file_path, offset), (writeStream) => Effect.gen(function* () {
197
- const sink = Sink.forEach(writeChunk({ writeStream, bytesReceived, onProgress }));
198
- yield* uploadPartsTotal(Effect.succeed(1));
199
- yield* Stream.run(stream, sink);
200
- yield* endWriteStream(writeStream);
201
- const totalBytes = yield* Ref.get(bytesReceived);
202
- yield* partSizeHistogram(Effect.succeed(totalBytes));
203
- return offset + totalBytes;
204
- }), destroyWriteStream);
205
- // Check if upload is complete
206
- if (uploadFile.size && result === uploadFile.size) {
207
- yield* logFilesystemUploadCompletion(file_id, {
208
- fileSize: uploadFile.size,
209
- totalDurationMs: Date.now() - startTime,
210
- partsCount: 1,
211
- averagePartSize: uploadFile.size,
212
- throughputBps: uploadFile.size / (Date.now() - startTime),
213
- retryCount: 0,
214
- });
215
- yield* uploadSuccessTotal(Effect.succeed(1));
216
- yield* activeUploadsGauge(Effect.succeed(-1));
217
- }
218
- return result;
219
- }
220
- catch (error) {
221
- Effect.runSync(trackFilesystemError("write", error, {
222
- upload_id: file_id,
223
- path: file_path,
224
- offset,
225
- }));
226
- throw error;
227
- }
228
- })));
229
- },
230
- getUpload: (id) => Effect.gen(function* () {
231
- const uploadFile = yield* kvStore.get(id);
232
- // For filesystem, get the actual file size from disk
233
- const file_path = uploadFile.storage.path || path.join(directory, id);
234
- const stats = yield* Effect.tryPromise({
235
- try: () => fsProm.stat(file_path),
236
- catch: () => UploadistaError.fromCode("FILE_NOT_FOUND"),
237
- });
238
- return {
239
- ...uploadFile,
240
- offset: stats.size,
241
- size: uploadFile.size,
242
- };
243
- }),
244
- read: (id) => Effect.gen(function* () {
245
- const uploadFile = yield* kvStore.get(id);
246
- const file_path = uploadFile.storage.path || path.join(directory, id);
247
- const buffer = yield* Effect.tryPromise({
248
- try: () => fsProm.readFile(file_path),
249
- catch: () => UploadistaError.fromCode("FILE_READ_ERROR"),
250
- });
251
- return new Uint8Array(buffer);
252
- }),
253
- getCapabilities,
254
- validateUploadStrategy,
255
- };
256
- });
257
- export const fileStore = (options) => createFileStore(options);