@uploadista/core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +231 -0
- package/.turbo/turbo-format.log +5 -0
- package/LICENSE +21 -0
- package/README.md +1120 -0
- package/dist/chunk-CUT6urMc.cjs +1 -0
- package/dist/debounce-C2SeqcxD.js +2 -0
- package/dist/debounce-C2SeqcxD.js.map +1 -0
- package/dist/debounce-LZK7yS7Z.cjs +1 -0
- package/dist/errors/index.cjs +1 -0
- package/dist/errors/index.d.cts +3 -0
- package/dist/errors/index.d.ts +3 -0
- package/dist/errors/index.d.ts.map +1 -0
- package/dist/errors/index.js +2 -0
- package/dist/errors/uploadista-error.d.ts +209 -0
- package/dist/errors/uploadista-error.d.ts.map +1 -0
- package/dist/errors/uploadista-error.js +322 -0
- package/dist/errors-8i_aMxOE.js +1 -0
- package/dist/errors-CRm1FHHT.cjs +0 -0
- package/dist/flow/edge.d.ts +47 -0
- package/dist/flow/edge.d.ts.map +1 -0
- package/dist/flow/edge.js +40 -0
- package/dist/flow/event.d.ts +206 -0
- package/dist/flow/event.d.ts.map +1 -0
- package/dist/flow/event.js +53 -0
- package/dist/flow/flow-server.d.ts +223 -0
- package/dist/flow/flow-server.d.ts.map +1 -0
- package/dist/flow/flow-server.js +614 -0
- package/dist/flow/flow.d.ts +238 -0
- package/dist/flow/flow.d.ts.map +1 -0
- package/dist/flow/flow.js +629 -0
- package/dist/flow/index.cjs +1 -0
- package/dist/flow/index.d.cts +6 -0
- package/dist/flow/index.d.ts +24 -0
- package/dist/flow/index.d.ts.map +1 -0
- package/dist/flow/index.js +24 -0
- package/dist/flow/node.d.ts +136 -0
- package/dist/flow/node.d.ts.map +1 -0
- package/dist/flow/node.js +153 -0
- package/dist/flow/nodes/index.d.ts +8 -0
- package/dist/flow/nodes/index.d.ts.map +1 -0
- package/dist/flow/nodes/index.js +7 -0
- package/dist/flow/nodes/input-node.d.ts +78 -0
- package/dist/flow/nodes/input-node.d.ts.map +1 -0
- package/dist/flow/nodes/input-node.js +233 -0
- package/dist/flow/nodes/storage-node.d.ts +67 -0
- package/dist/flow/nodes/storage-node.d.ts.map +1 -0
- package/dist/flow/nodes/storage-node.js +94 -0
- package/dist/flow/nodes/streaming-input-node.d.ts +69 -0
- package/dist/flow/nodes/streaming-input-node.d.ts.map +1 -0
- package/dist/flow/nodes/streaming-input-node.js +156 -0
- package/dist/flow/nodes/transform-node.d.ts +85 -0
- package/dist/flow/nodes/transform-node.d.ts.map +1 -0
- package/dist/flow/nodes/transform-node.js +107 -0
- package/dist/flow/parallel-scheduler.d.ts +175 -0
- package/dist/flow/parallel-scheduler.d.ts.map +1 -0
- package/dist/flow/parallel-scheduler.js +193 -0
- package/dist/flow/plugins/credential-provider.d.ts +47 -0
- package/dist/flow/plugins/credential-provider.d.ts.map +1 -0
- package/dist/flow/plugins/credential-provider.js +24 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts +61 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-ai-plugin.js +21 -0
- package/dist/flow/plugins/image-plugin.d.ts +52 -0
- package/dist/flow/plugins/image-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-plugin.js +22 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts +16 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/describe-image-node.js +9 -0
- package/dist/flow/plugins/types/index.d.ts +9 -0
- package/dist/flow/plugins/types/index.d.ts.map +1 -0
- package/dist/flow/plugins/types/index.js +8 -0
- package/dist/flow/plugins/types/optimize-node.d.ts +20 -0
- package/dist/flow/plugins/types/optimize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/optimize-node.js +11 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts +16 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/remove-background-node.js +9 -0
- package/dist/flow/plugins/types/resize-node.d.ts +21 -0
- package/dist/flow/plugins/types/resize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/resize-node.js +16 -0
- package/dist/flow/plugins/zip-plugin.d.ts +62 -0
- package/dist/flow/plugins/zip-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/zip-plugin.js +21 -0
- package/dist/flow/typed-flow.d.ts +90 -0
- package/dist/flow/typed-flow.d.ts.map +1 -0
- package/dist/flow/typed-flow.js +59 -0
- package/dist/flow/types/flow-file.d.ts +45 -0
- package/dist/flow/types/flow-file.d.ts.map +1 -0
- package/dist/flow/types/flow-file.js +27 -0
- package/dist/flow/types/flow-job.d.ts +118 -0
- package/dist/flow/types/flow-job.d.ts.map +1 -0
- package/dist/flow/types/flow-job.js +11 -0
- package/dist/flow/types/flow-types.d.ts +321 -0
- package/dist/flow/types/flow-types.d.ts.map +1 -0
- package/dist/flow/types/flow-types.js +52 -0
- package/dist/flow/types/index.d.ts +4 -0
- package/dist/flow/types/index.d.ts.map +1 -0
- package/dist/flow/types/index.js +3 -0
- package/dist/flow/types/run-args.d.ts +38 -0
- package/dist/flow/types/run-args.d.ts.map +1 -0
- package/dist/flow/types/run-args.js +30 -0
- package/dist/flow/types/type-validator.d.ts +26 -0
- package/dist/flow/types/type-validator.d.ts.map +1 -0
- package/dist/flow/types/type-validator.js +134 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts +11 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts.map +1 -0
- package/dist/flow/utils/resolve-upload-metadata.js +28 -0
- package/dist/flow-2zXnEiWL.cjs +1 -0
- package/dist/flow-CRaKy7Vj.js +2 -0
- package/dist/flow-CRaKy7Vj.js.map +1 -0
- package/dist/generate-id-Dm-Vboxq.d.ts +34 -0
- package/dist/generate-id-Dm-Vboxq.d.ts.map +1 -0
- package/dist/generate-id-LjJRLD6N.d.cts +34 -0
- package/dist/generate-id-LjJRLD6N.d.cts.map +1 -0
- package/dist/generate-id-xHp_Z7Cl.cjs +1 -0
- package/dist/generate-id-yohS1ZDk.js +2 -0
- package/dist/generate-id-yohS1ZDk.js.map +1 -0
- package/dist/index-BO8GZlbD.d.cts +1040 -0
- package/dist/index-BO8GZlbD.d.cts.map +1 -0
- package/dist/index-BoGG5KAY.d.ts +1 -0
- package/dist/index-BtBZHVmz.d.cts +1 -0
- package/dist/index-D-CoVpkZ.d.ts +1004 -0
- package/dist/index-D-CoVpkZ.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.d.cts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/logger/logger.cjs +1 -0
- package/dist/logger/logger.d.cts +8 -0
- package/dist/logger/logger.d.cts.map +1 -0
- package/dist/logger/logger.d.ts +5 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.js +10 -0
- package/dist/logger/logger.js.map +1 -0
- package/dist/semaphore-0ZwjVpyF.js +2 -0
- package/dist/semaphore-0ZwjVpyF.js.map +1 -0
- package/dist/semaphore-BHprIjFI.d.cts +37 -0
- package/dist/semaphore-BHprIjFI.d.cts.map +1 -0
- package/dist/semaphore-DThupBkc.d.ts +37 -0
- package/dist/semaphore-DThupBkc.d.ts.map +1 -0
- package/dist/semaphore-DVrONiAV.cjs +1 -0
- package/dist/stream-limiter-CoWKv39w.js +2 -0
- package/dist/stream-limiter-CoWKv39w.js.map +1 -0
- package/dist/stream-limiter-JgOwmkMa.cjs +1 -0
- package/dist/streams/multi-stream.cjs +1 -0
- package/dist/streams/multi-stream.d.cts +91 -0
- package/dist/streams/multi-stream.d.cts.map +1 -0
- package/dist/streams/multi-stream.d.ts +86 -0
- package/dist/streams/multi-stream.d.ts.map +1 -0
- package/dist/streams/multi-stream.js +149 -0
- package/dist/streams/multi-stream.js.map +1 -0
- package/dist/streams/stream-limiter.cjs +1 -0
- package/dist/streams/stream-limiter.d.cts +36 -0
- package/dist/streams/stream-limiter.d.cts.map +1 -0
- package/dist/streams/stream-limiter.d.ts +27 -0
- package/dist/streams/stream-limiter.d.ts.map +1 -0
- package/dist/streams/stream-limiter.js +49 -0
- package/dist/streams/stream-splitter.cjs +1 -0
- package/dist/streams/stream-splitter.d.cts +68 -0
- package/dist/streams/stream-splitter.d.cts.map +1 -0
- package/dist/streams/stream-splitter.d.ts +51 -0
- package/dist/streams/stream-splitter.d.ts.map +1 -0
- package/dist/streams/stream-splitter.js +175 -0
- package/dist/streams/stream-splitter.js.map +1 -0
- package/dist/types/data-store-registry.d.ts +13 -0
- package/dist/types/data-store-registry.d.ts.map +1 -0
- package/dist/types/data-store-registry.js +4 -0
- package/dist/types/data-store.d.ts +316 -0
- package/dist/types/data-store.d.ts.map +1 -0
- package/dist/types/data-store.js +157 -0
- package/dist/types/event-broadcaster.d.ts +28 -0
- package/dist/types/event-broadcaster.d.ts.map +1 -0
- package/dist/types/event-broadcaster.js +6 -0
- package/dist/types/event-emitter.d.ts +378 -0
- package/dist/types/event-emitter.d.ts.map +1 -0
- package/dist/types/event-emitter.js +223 -0
- package/dist/types/index.cjs +1 -0
- package/dist/types/index.d.cts +6 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +9 -0
- package/dist/types/input-file.d.ts +104 -0
- package/dist/types/input-file.d.ts.map +1 -0
- package/dist/types/input-file.js +27 -0
- package/dist/types/kv-store.d.ts +281 -0
- package/dist/types/kv-store.d.ts.map +1 -0
- package/dist/types/kv-store.js +234 -0
- package/dist/types/middleware.d.ts +17 -0
- package/dist/types/middleware.d.ts.map +1 -0
- package/dist/types/middleware.js +21 -0
- package/dist/types/upload-event.d.ts +105 -0
- package/dist/types/upload-event.d.ts.map +1 -0
- package/dist/types/upload-event.js +71 -0
- package/dist/types/upload-file.d.ts +136 -0
- package/dist/types/upload-file.d.ts.map +1 -0
- package/dist/types/upload-file.js +34 -0
- package/dist/types/websocket.d.ts +144 -0
- package/dist/types/websocket.d.ts.map +1 -0
- package/dist/types/websocket.js +40 -0
- package/dist/types-BT-cvi7T.cjs +1 -0
- package/dist/types-DhU2j-XF.js +2 -0
- package/dist/types-DhU2j-XF.js.map +1 -0
- package/dist/upload/convert-to-stream.d.ts +38 -0
- package/dist/upload/convert-to-stream.d.ts.map +1 -0
- package/dist/upload/convert-to-stream.js +43 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts +14 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts.map +1 -0
- package/dist/upload/convert-upload-to-flow-file.js +21 -0
- package/dist/upload/create-upload.d.ts +68 -0
- package/dist/upload/create-upload.d.ts.map +1 -0
- package/dist/upload/create-upload.js +157 -0
- package/dist/upload/index.cjs +1 -0
- package/dist/upload/index.d.cts +6 -0
- package/dist/upload/index.d.ts +4 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +3 -0
- package/dist/upload/mime.d.ts +24 -0
- package/dist/upload/mime.d.ts.map +1 -0
- package/dist/upload/mime.js +351 -0
- package/dist/upload/upload-chunk.d.ts +58 -0
- package/dist/upload/upload-chunk.d.ts.map +1 -0
- package/dist/upload/upload-chunk.js +277 -0
- package/dist/upload/upload-server.d.ts +221 -0
- package/dist/upload/upload-server.d.ts.map +1 -0
- package/dist/upload/upload-server.js +181 -0
- package/dist/upload/upload-strategy-negotiator.d.ts +148 -0
- package/dist/upload/upload-strategy-negotiator.d.ts.map +1 -0
- package/dist/upload/upload-strategy-negotiator.js +217 -0
- package/dist/upload/upload-url.d.ts +68 -0
- package/dist/upload/upload-url.d.ts.map +1 -0
- package/dist/upload/upload-url.js +142 -0
- package/dist/upload/write-to-store.d.ts +77 -0
- package/dist/upload/write-to-store.d.ts.map +1 -0
- package/dist/upload/write-to-store.js +147 -0
- package/dist/upload-DLuICjpP.cjs +1 -0
- package/dist/upload-DaXO34dE.js +2 -0
- package/dist/upload-DaXO34dE.js.map +1 -0
- package/dist/uploadista-error-BB-Wdiz9.cjs +22 -0
- package/dist/uploadista-error-BVsVxqvz.js +23 -0
- package/dist/uploadista-error-BVsVxqvz.js.map +1 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts +52 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts.map +1 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts +52 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts.map +1 -0
- package/dist/utils/checksum.d.ts +22 -0
- package/dist/utils/checksum.d.ts.map +1 -0
- package/dist/utils/checksum.js +49 -0
- package/dist/utils/debounce.cjs +1 -0
- package/dist/utils/debounce.d.cts +38 -0
- package/dist/utils/debounce.d.cts.map +1 -0
- package/dist/utils/debounce.d.ts +36 -0
- package/dist/utils/debounce.d.ts.map +1 -0
- package/dist/utils/debounce.js +73 -0
- package/dist/utils/generate-id.cjs +1 -0
- package/dist/utils/generate-id.d.cts +2 -0
- package/dist/utils/generate-id.d.ts +32 -0
- package/dist/utils/generate-id.d.ts.map +1 -0
- package/dist/utils/generate-id.js +23 -0
- package/dist/utils/md5.cjs +1 -0
- package/dist/utils/md5.d.cts +73 -0
- package/dist/utils/md5.d.cts.map +1 -0
- package/dist/utils/md5.d.ts +71 -0
- package/dist/utils/md5.d.ts.map +1 -0
- package/dist/utils/md5.js +417 -0
- package/dist/utils/md5.js.map +1 -0
- package/dist/utils/once.cjs +1 -0
- package/dist/utils/once.d.cts +25 -0
- package/dist/utils/once.d.cts.map +1 -0
- package/dist/utils/once.d.ts +21 -0
- package/dist/utils/once.d.ts.map +1 -0
- package/dist/utils/once.js +54 -0
- package/dist/utils/once.js.map +1 -0
- package/dist/utils/semaphore.cjs +1 -0
- package/dist/utils/semaphore.d.cts +3 -0
- package/dist/utils/semaphore.d.ts +78 -0
- package/dist/utils/semaphore.d.ts.map +1 -0
- package/dist/utils/semaphore.js +134 -0
- package/dist/utils/throttle.cjs +1 -0
- package/dist/utils/throttle.d.cts +24 -0
- package/dist/utils/throttle.d.cts.map +1 -0
- package/dist/utils/throttle.d.ts +18 -0
- package/dist/utils/throttle.d.ts.map +1 -0
- package/dist/utils/throttle.js +20 -0
- package/dist/utils/throttle.js.map +1 -0
- package/docs/PARALLEL_EXECUTION.md +206 -0
- package/docs/PARALLEL_EXECUTION_QUICKSTART.md +142 -0
- package/docs/PARALLEL_EXECUTION_REFACTOR.md +184 -0
- package/package.json +80 -0
- package/src/errors/__tests__/uploadista-error.test.ts +251 -0
- package/src/errors/index.ts +2 -0
- package/src/errors/uploadista-error.ts +394 -0
- package/src/flow/README.md +352 -0
- package/src/flow/edge.test.ts +146 -0
- package/src/flow/edge.ts +60 -0
- package/src/flow/event.ts +229 -0
- package/src/flow/flow-server.ts +1089 -0
- package/src/flow/flow.ts +1050 -0
- package/src/flow/index.ts +28 -0
- package/src/flow/node.ts +249 -0
- package/src/flow/nodes/index.ts +8 -0
- package/src/flow/nodes/input-node.ts +296 -0
- package/src/flow/nodes/storage-node.ts +128 -0
- package/src/flow/nodes/transform-node.ts +154 -0
- package/src/flow/parallel-scheduler.ts +259 -0
- package/src/flow/plugins/credential-provider.ts +48 -0
- package/src/flow/plugins/image-ai-plugin.ts +66 -0
- package/src/flow/plugins/image-plugin.ts +60 -0
- package/src/flow/plugins/types/describe-image-node.ts +16 -0
- package/src/flow/plugins/types/index.ts +9 -0
- package/src/flow/plugins/types/optimize-node.ts +18 -0
- package/src/flow/plugins/types/remove-background-node.ts +18 -0
- package/src/flow/plugins/types/resize-node.ts +26 -0
- package/src/flow/plugins/zip-plugin.ts +69 -0
- package/src/flow/typed-flow.ts +279 -0
- package/src/flow/types/flow-file.ts +51 -0
- package/src/flow/types/flow-job.ts +138 -0
- package/src/flow/types/flow-types.ts +353 -0
- package/src/flow/types/index.ts +6 -0
- package/src/flow/types/run-args.ts +40 -0
- package/src/flow/types/type-validator.ts +204 -0
- package/src/flow/utils/resolve-upload-metadata.ts +48 -0
- package/src/index.ts +5 -0
- package/src/logger/logger.ts +14 -0
- package/src/streams/stream-limiter.test.ts +150 -0
- package/src/streams/stream-limiter.ts +75 -0
- package/src/types/data-store.ts +427 -0
- package/src/types/event-broadcaster.ts +39 -0
- package/src/types/event-emitter.ts +349 -0
- package/src/types/index.ts +9 -0
- package/src/types/input-file.ts +107 -0
- package/src/types/kv-store.ts +375 -0
- package/src/types/middleware.ts +54 -0
- package/src/types/upload-event.ts +75 -0
- package/src/types/upload-file.ts +139 -0
- package/src/types/websocket.ts +65 -0
- package/src/upload/convert-to-stream.ts +48 -0
- package/src/upload/create-upload.ts +214 -0
- package/src/upload/index.ts +3 -0
- package/src/upload/mime.ts +436 -0
- package/src/upload/upload-chunk.ts +364 -0
- package/src/upload/upload-server.ts +390 -0
- package/src/upload/upload-strategy-negotiator.ts +316 -0
- package/src/upload/upload-url.ts +173 -0
- package/src/upload/write-to-store.ts +211 -0
- package/src/utils/checksum.ts +61 -0
- package/src/utils/debounce.test.ts +126 -0
- package/src/utils/debounce.ts +89 -0
- package/src/utils/generate-id.ts +35 -0
- package/src/utils/md5.ts +475 -0
- package/src/utils/once.test.ts +83 -0
- package/src/utils/once.ts +63 -0
- package/src/utils/throttle.test.ts +101 -0
- package/src/utils/throttle.ts +29 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/tsdown.config.ts +25 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Negotiates the optimal upload strategy based on data store capabilities and file characteristics.
|
|
3
|
+
*
|
|
4
|
+
* This class analyzes data store capabilities, file size, and user preferences to determine
|
|
5
|
+
* the best upload strategy (single, parallel, resumable) and optimal parameters like chunk size
|
|
6
|
+
* and parallel connection count.
|
|
7
|
+
*
|
|
8
|
+
* The negotiator considers:
|
|
9
|
+
* - Data store capabilities (parallel uploads, resumable uploads, concatenation)
|
|
10
|
+
* - File size and chunk size constraints
|
|
11
|
+
* - User preferences and requirements
|
|
12
|
+
* - Performance optimization opportunities
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* ```typescript
|
|
16
|
+
* // Create negotiator for S3 data store
|
|
17
|
+
* const negotiator = new UploadStrategyNegotiator(
|
|
18
|
+
* s3Capabilities,
|
|
19
|
+
* (strategy) => s3Capabilities.supportsStrategy(strategy)
|
|
20
|
+
* );
|
|
21
|
+
*
|
|
22
|
+
* // Negotiate strategy for large file
|
|
23
|
+
* const result = negotiator.negotiateStrategy({
|
|
24
|
+
* fileSize: 100_000_000, // 100MB
|
|
25
|
+
* preferredStrategy: "parallel",
|
|
26
|
+
* preferredChunkSize: 5_000_000, // 5MB chunks
|
|
27
|
+
* parallelUploads: 4
|
|
28
|
+
* });
|
|
29
|
+
*
|
|
30
|
+
* console.log(result.strategy); // "parallel"
|
|
31
|
+
* console.log(result.chunkSize); // 5_000_000
|
|
32
|
+
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
export class UploadStrategyNegotiator {
|
|
36
|
+
capabilities;
|
|
37
|
+
validateUploadStrategy;
|
|
38
|
+
/**
|
|
39
|
+
* Creates a new upload strategy negotiator.
|
|
40
|
+
*
|
|
41
|
+
* @param capabilities - Data store capabilities and constraints
|
|
42
|
+
* @param validateUploadStrategy - Function to validate if a strategy is supported
|
|
43
|
+
*/
|
|
44
|
+
constructor(capabilities, validateUploadStrategy) {
|
|
45
|
+
this.capabilities = capabilities;
|
|
46
|
+
this.validateUploadStrategy = validateUploadStrategy;
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Negotiates the optimal upload strategy based on options and data store capabilities.
|
|
50
|
+
*
|
|
51
|
+
* This method analyzes the provided options and data store capabilities to determine
|
|
52
|
+
* the best upload strategy, chunk size, and parallel upload settings. It considers
|
|
53
|
+
* user preferences, file size, and data store constraints to make optimal decisions.
|
|
54
|
+
*
|
|
55
|
+
* The negotiation process:
|
|
56
|
+
* 1. Validates preferred strategy against data store capabilities
|
|
57
|
+
* 2. Automatically selects strategy based on file size and capabilities
|
|
58
|
+
* 3. Adjusts chunk size to fit within data store constraints
|
|
59
|
+
* 4. Validates parallel upload settings
|
|
60
|
+
* 5. Ensures final strategy is supported by the data store
|
|
61
|
+
*
|
|
62
|
+
* @param options - Upload strategy options including file size and preferences
|
|
63
|
+
* @returns Negotiated strategy with reasoning and warnings
|
|
64
|
+
*
|
|
65
|
+
* @example
|
|
66
|
+
* ```typescript
|
|
67
|
+
* const result = negotiator.negotiateStrategy({
|
|
68
|
+
* fileSize: 50_000_000, // 50MB
|
|
69
|
+
* preferredStrategy: "parallel",
|
|
70
|
+
* preferredChunkSize: 5_000_000, // 5MB
|
|
71
|
+
* parallelUploads: 3
|
|
72
|
+
* });
|
|
73
|
+
*
|
|
74
|
+
* console.log(result.strategy); // "parallel"
|
|
75
|
+
* console.log(result.chunkSize); // 5_000_000
|
|
76
|
+
* console.log(result.parallelUploads); // 3
|
|
77
|
+
* console.log(result.reasoning); // ["Using preferred strategy: parallel", ...]
|
|
78
|
+
* console.log(result.warnings); // [] (no warnings)
|
|
79
|
+
* ```
|
|
80
|
+
*/
|
|
81
|
+
negotiateStrategy(options) {
|
|
82
|
+
const reasoning = [];
|
|
83
|
+
const warnings = [];
|
|
84
|
+
let strategy = "single";
|
|
85
|
+
let chunkSize = options.preferredChunkSize ??
|
|
86
|
+
this.capabilities.optimalChunkSize ??
|
|
87
|
+
1024 * 1024;
|
|
88
|
+
let parallelUploads = options.parallelUploads ?? 1;
|
|
89
|
+
// Check if data store supports the preferred strategy
|
|
90
|
+
if (options.preferredStrategy) {
|
|
91
|
+
if (!this.validateUploadStrategy(options.preferredStrategy)) {
|
|
92
|
+
warnings.push(`Preferred strategy '${options.preferredStrategy}' not supported by data store, falling back`);
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
strategy = options.preferredStrategy;
|
|
96
|
+
reasoning.push(`Using preferred strategy: ${strategy}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
// Automatic strategy selection based on capabilities and file size
|
|
100
|
+
if (!options.preferredStrategy ||
|
|
101
|
+
!this.validateUploadStrategy(options.preferredStrategy)) {
|
|
102
|
+
if (this.capabilities.supportsParallelUploads &&
|
|
103
|
+
options.fileSize > (options.minChunkSizeForParallel ?? 10 * 1024 * 1024)) {
|
|
104
|
+
strategy = "parallel";
|
|
105
|
+
reasoning.push(`Selected parallel upload for large file (${options.fileSize} bytes)`);
|
|
106
|
+
}
|
|
107
|
+
else {
|
|
108
|
+
strategy = "single";
|
|
109
|
+
reasoning.push(this.capabilities.supportsParallelUploads
|
|
110
|
+
? `Selected single upload for small file (${options.fileSize} bytes)`
|
|
111
|
+
: "Selected single upload (parallel not supported by data store)");
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
// Validate and adjust chunk size based on data store constraints
|
|
115
|
+
if (this.capabilities.minChunkSize &&
|
|
116
|
+
chunkSize < this.capabilities.minChunkSize) {
|
|
117
|
+
warnings.push(`Chunk size ${chunkSize} below minimum ${this.capabilities.minChunkSize}, adjusting`);
|
|
118
|
+
chunkSize = this.capabilities.minChunkSize;
|
|
119
|
+
}
|
|
120
|
+
if (this.capabilities.maxChunkSize &&
|
|
121
|
+
chunkSize > this.capabilities.maxChunkSize) {
|
|
122
|
+
warnings.push(`Chunk size ${chunkSize} above maximum ${this.capabilities.maxChunkSize}, adjusting`);
|
|
123
|
+
chunkSize = this.capabilities.maxChunkSize;
|
|
124
|
+
}
|
|
125
|
+
// Validate parallel upload settings
|
|
126
|
+
if (strategy === "parallel") {
|
|
127
|
+
if (this.capabilities.maxConcurrentUploads &&
|
|
128
|
+
parallelUploads > this.capabilities.maxConcurrentUploads) {
|
|
129
|
+
warnings.push(`Parallel uploads ${parallelUploads} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`);
|
|
130
|
+
parallelUploads = this.capabilities.maxConcurrentUploads;
|
|
131
|
+
}
|
|
132
|
+
// Check if file would exceed max parts limit
|
|
133
|
+
if (this.capabilities.maxParts) {
|
|
134
|
+
const estimatedParts = Math.ceil(options.fileSize / chunkSize);
|
|
135
|
+
if (estimatedParts > this.capabilities.maxParts) {
|
|
136
|
+
const minChunkForParts = Math.ceil(options.fileSize / this.capabilities.maxParts);
|
|
137
|
+
warnings.push(`Estimated parts ${estimatedParts} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`);
|
|
138
|
+
chunkSize = Math.max(chunkSize, minChunkForParts);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
// Final validation - ensure strategy is still valid after adjustments
|
|
143
|
+
if (!this.validateUploadStrategy(strategy)) {
|
|
144
|
+
warnings.push(`Final strategy validation failed, falling back to single upload`);
|
|
145
|
+
strategy = "single";
|
|
146
|
+
parallelUploads = 1;
|
|
147
|
+
}
|
|
148
|
+
// Add capability information to reasoning
|
|
149
|
+
reasoning.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`);
|
|
150
|
+
return {
|
|
151
|
+
strategy,
|
|
152
|
+
chunkSize,
|
|
153
|
+
parallelUploads: strategy === "parallel" ? parallelUploads : 1,
|
|
154
|
+
reasoning,
|
|
155
|
+
warnings,
|
|
156
|
+
};
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Gets the data store capabilities used by this negotiator.
|
|
160
|
+
*
|
|
161
|
+
* @returns The data store capabilities and constraints
|
|
162
|
+
*/
|
|
163
|
+
getDataStoreCapabilities() {
|
|
164
|
+
return this.capabilities;
|
|
165
|
+
}
|
|
166
|
+
/**
|
|
167
|
+
* Validates upload strategy configuration against data store capabilities.
|
|
168
|
+
*
|
|
169
|
+
* This method checks if the provided configuration is valid for the current
|
|
170
|
+
* data store capabilities without performing the actual negotiation. It's
|
|
171
|
+
* useful for pre-validation before attempting to negotiate a strategy.
|
|
172
|
+
*
|
|
173
|
+
* @param options - Upload strategy options to validate
|
|
174
|
+
* @returns Validation result with validity flag and error messages
|
|
175
|
+
*
|
|
176
|
+
* @example
|
|
177
|
+
* ```typescript
|
|
178
|
+
* const validation = negotiator.validateConfiguration({
|
|
179
|
+
* fileSize: 10_000_000,
|
|
180
|
+
* preferredStrategy: "parallel",
|
|
181
|
+
* preferredChunkSize: 1_000_000,
|
|
182
|
+
* parallelUploads: 5
|
|
183
|
+
* });
|
|
184
|
+
*
|
|
185
|
+
* if (!validation.valid) {
|
|
186
|
+
* console.log("Configuration errors:", validation.errors);
|
|
187
|
+
* // Handle validation errors
|
|
188
|
+
* }
|
|
189
|
+
* ```
|
|
190
|
+
*/
|
|
191
|
+
validateConfiguration(options) {
|
|
192
|
+
const errors = [];
|
|
193
|
+
if (options.preferredStrategy &&
|
|
194
|
+
!this.validateUploadStrategy(options.preferredStrategy)) {
|
|
195
|
+
errors.push(`Preferred strategy '${options.preferredStrategy}' not supported by data store`);
|
|
196
|
+
}
|
|
197
|
+
if (options.preferredChunkSize) {
|
|
198
|
+
if (this.capabilities.minChunkSize &&
|
|
199
|
+
options.preferredChunkSize < this.capabilities.minChunkSize) {
|
|
200
|
+
errors.push(`Chunk size ${options.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`);
|
|
201
|
+
}
|
|
202
|
+
if (this.capabilities.maxChunkSize &&
|
|
203
|
+
options.preferredChunkSize > this.capabilities.maxChunkSize) {
|
|
204
|
+
errors.push(`Chunk size ${options.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
if (options.parallelUploads &&
|
|
208
|
+
this.capabilities.maxConcurrentUploads &&
|
|
209
|
+
options.parallelUploads > this.capabilities.maxConcurrentUploads) {
|
|
210
|
+
errors.push(`Parallel uploads ${options.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`);
|
|
211
|
+
}
|
|
212
|
+
return {
|
|
213
|
+
valid: errors.length === 0,
|
|
214
|
+
errors,
|
|
215
|
+
};
|
|
216
|
+
}
|
|
217
|
+
}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { Effect } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors";
|
|
3
|
+
/**
|
|
4
|
+
* Fetches a file from a remote URL.
|
|
5
|
+
*
|
|
6
|
+
* This function handles HTTP requests to remote URLs for file uploads,
|
|
7
|
+
* including proper error handling, metrics tracking, and observability.
|
|
8
|
+
*
|
|
9
|
+
* Features:
|
|
10
|
+
* - HTTP request with proper error handling
|
|
11
|
+
* - Effect tracing for performance monitoring
|
|
12
|
+
* - Metrics tracking for URL-based uploads
|
|
13
|
+
* - Structured logging for debugging
|
|
14
|
+
* - Response validation and error reporting
|
|
15
|
+
*
|
|
16
|
+
* @param url - The remote URL to fetch the file from
|
|
17
|
+
* @returns Effect that yields the Response object
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* // Fetch a file from URL
|
|
22
|
+
* const fetchEffect = fetchFile("https://example.com/image.jpg");
|
|
23
|
+
*
|
|
24
|
+
* // Run with error handling
|
|
25
|
+
* const response = await Effect.runPromise(
|
|
26
|
+
* fetchEffect.pipe(
|
|
27
|
+
* Effect.catchAll((error) =>
|
|
28
|
+
* Effect.logError("Failed to fetch file").pipe(
|
|
29
|
+
* Effect.andThen(Effect.fail(error))
|
|
30
|
+
* )
|
|
31
|
+
* )
|
|
32
|
+
* )
|
|
33
|
+
* );
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
export declare const fetchFile: (url: string) => Effect.Effect<Response, UploadistaError, never>;
|
|
37
|
+
/**
|
|
38
|
+
* Converts a Response object to an ArrayBuffer.
|
|
39
|
+
*
|
|
40
|
+
* This function safely converts HTTP response data to binary format
|
|
41
|
+
* for processing and storage, with proper error handling and observability.
|
|
42
|
+
*
|
|
43
|
+
* Features:
|
|
44
|
+
* - Safe conversion from Response to ArrayBuffer
|
|
45
|
+
* - Effect tracing for performance monitoring
|
|
46
|
+
* - Structured logging for debugging
|
|
47
|
+
* - Error handling with proper UploadistaError types
|
|
48
|
+
*
|
|
49
|
+
* @param response - The HTTP Response object to convert
|
|
50
|
+
* @returns Effect that yields the ArrayBuffer data
|
|
51
|
+
*
|
|
52
|
+
* @example
|
|
53
|
+
* ```typescript
|
|
54
|
+
* // Convert response to buffer
|
|
55
|
+
* const bufferEffect = arrayBuffer(response);
|
|
56
|
+
*
|
|
57
|
+
* // Use in upload pipeline
|
|
58
|
+
* const buffer = await Effect.runPromise(
|
|
59
|
+
* bufferEffect.pipe(
|
|
60
|
+
* Effect.tap((buffer) =>
|
|
61
|
+
* Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)
|
|
62
|
+
* )
|
|
63
|
+
* )
|
|
64
|
+
* );
|
|
65
|
+
* ```
|
|
66
|
+
*/
|
|
67
|
+
export declare const arrayBuffer: (response: Response) => Effect.Effect<ArrayBuffer, UploadistaError, never>;
|
|
68
|
+
//# sourceMappingURL=upload-url.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"upload-url.d.ts","sourceRoot":"","sources":["../../src/upload/upload-url.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAU,MAAM,QAAQ,CAAC;AACxC,OAAO,EAAE,eAAe,EAAE,MAAM,WAAW,CAAC;AAE5C;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAgCG;AACH,eAAO,MAAM,SAAS,GAAI,KAAK,MAAM,oDAsEpC,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6BG;AACH,eAAO,MAAM,WAAW,GAAI,UAAU,QAAQ,uDAkC7C,CAAC"}
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import { Effect, Metric } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors";
|
|
3
|
+
/**
|
|
4
|
+
* Fetches a file from a remote URL.
|
|
5
|
+
*
|
|
6
|
+
* This function handles HTTP requests to remote URLs for file uploads,
|
|
7
|
+
* including proper error handling, metrics tracking, and observability.
|
|
8
|
+
*
|
|
9
|
+
* Features:
|
|
10
|
+
* - HTTP request with proper error handling
|
|
11
|
+
* - Effect tracing for performance monitoring
|
|
12
|
+
* - Metrics tracking for URL-based uploads
|
|
13
|
+
* - Structured logging for debugging
|
|
14
|
+
* - Response validation and error reporting
|
|
15
|
+
*
|
|
16
|
+
* @param url - The remote URL to fetch the file from
|
|
17
|
+
* @returns Effect that yields the Response object
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```typescript
|
|
21
|
+
* // Fetch a file from URL
|
|
22
|
+
* const fetchEffect = fetchFile("https://example.com/image.jpg");
|
|
23
|
+
*
|
|
24
|
+
* // Run with error handling
|
|
25
|
+
* const response = await Effect.runPromise(
|
|
26
|
+
* fetchEffect.pipe(
|
|
27
|
+
* Effect.catchAll((error) =>
|
|
28
|
+
* Effect.logError("Failed to fetch file").pipe(
|
|
29
|
+
* Effect.andThen(Effect.fail(error))
|
|
30
|
+
* )
|
|
31
|
+
* )
|
|
32
|
+
* )
|
|
33
|
+
* );
|
|
34
|
+
* ```
|
|
35
|
+
*/
|
|
36
|
+
export const fetchFile = (url) => {
|
|
37
|
+
return Effect.tryPromise({
|
|
38
|
+
try: async () => {
|
|
39
|
+
return await fetch(url);
|
|
40
|
+
},
|
|
41
|
+
catch: (error) => {
|
|
42
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", {
|
|
43
|
+
cause: error,
|
|
44
|
+
});
|
|
45
|
+
},
|
|
46
|
+
}).pipe(
|
|
47
|
+
// Add tracing span for URL fetch
|
|
48
|
+
Effect.withSpan("upload-fetch-url", {
|
|
49
|
+
attributes: {
|
|
50
|
+
"upload.url": url,
|
|
51
|
+
"upload.operation": "fetch",
|
|
52
|
+
},
|
|
53
|
+
}),
|
|
54
|
+
// Track URL fetch metrics
|
|
55
|
+
Effect.tap((response) => Effect.gen(function* () {
|
|
56
|
+
// Increment URL upload counter
|
|
57
|
+
yield* Metric.increment(Metric.counter("upload_from_url_total", {
|
|
58
|
+
description: "Total number of URL-based uploads",
|
|
59
|
+
}));
|
|
60
|
+
// Track success/failure
|
|
61
|
+
if (response.ok) {
|
|
62
|
+
yield* Metric.increment(Metric.counter("upload_from_url_success_total", {
|
|
63
|
+
description: "Total number of successful URL-based uploads",
|
|
64
|
+
}));
|
|
65
|
+
}
|
|
66
|
+
})),
|
|
67
|
+
// Add structured logging
|
|
68
|
+
Effect.tap((response) => Effect.logInfo("URL fetch completed").pipe(Effect.annotateLogs({
|
|
69
|
+
"upload.url": url,
|
|
70
|
+
"response.status": response.status.toString(),
|
|
71
|
+
"response.ok": response.ok.toString(),
|
|
72
|
+
"response.content_length": response.headers.get("content-length") ?? "unknown",
|
|
73
|
+
}))),
|
|
74
|
+
// Handle errors with logging and metrics
|
|
75
|
+
Effect.tapError((error) => Effect.gen(function* () {
|
|
76
|
+
// Track failed URL upload
|
|
77
|
+
yield* Metric.increment(Metric.counter("upload_from_url_failed_total", {
|
|
78
|
+
description: "Total number of failed URL-based uploads",
|
|
79
|
+
}));
|
|
80
|
+
// Log error
|
|
81
|
+
yield* Effect.logError("URL fetch failed").pipe(Effect.annotateLogs({
|
|
82
|
+
"upload.url": url,
|
|
83
|
+
error: String(error),
|
|
84
|
+
}));
|
|
85
|
+
})));
|
|
86
|
+
};
|
|
87
|
+
/**
|
|
88
|
+
* Converts a Response object to an ArrayBuffer.
|
|
89
|
+
*
|
|
90
|
+
* This function safely converts HTTP response data to binary format
|
|
91
|
+
* for processing and storage, with proper error handling and observability.
|
|
92
|
+
*
|
|
93
|
+
* Features:
|
|
94
|
+
* - Safe conversion from Response to ArrayBuffer
|
|
95
|
+
* - Effect tracing for performance monitoring
|
|
96
|
+
* - Structured logging for debugging
|
|
97
|
+
* - Error handling with proper UploadistaError types
|
|
98
|
+
*
|
|
99
|
+
* @param response - The HTTP Response object to convert
|
|
100
|
+
* @returns Effect that yields the ArrayBuffer data
|
|
101
|
+
*
|
|
102
|
+
* @example
|
|
103
|
+
* ```typescript
|
|
104
|
+
* // Convert response to buffer
|
|
105
|
+
* const bufferEffect = arrayBuffer(response);
|
|
106
|
+
*
|
|
107
|
+
* // Use in upload pipeline
|
|
108
|
+
* const buffer = await Effect.runPromise(
|
|
109
|
+
* bufferEffect.pipe(
|
|
110
|
+
* Effect.tap((buffer) =>
|
|
111
|
+
* Effect.logInfo(`Buffer size: ${buffer.byteLength} bytes`)
|
|
112
|
+
* )
|
|
113
|
+
* )
|
|
114
|
+
* );
|
|
115
|
+
* ```
|
|
116
|
+
*/
|
|
117
|
+
export const arrayBuffer = (response) => {
|
|
118
|
+
return Effect.tryPromise({
|
|
119
|
+
try: async () => {
|
|
120
|
+
return await response.arrayBuffer();
|
|
121
|
+
},
|
|
122
|
+
catch: (error) => {
|
|
123
|
+
return UploadistaError.fromCode("UNKNOWN_ERROR", {
|
|
124
|
+
cause: error,
|
|
125
|
+
});
|
|
126
|
+
},
|
|
127
|
+
}).pipe(
|
|
128
|
+
// Add tracing span for buffer conversion
|
|
129
|
+
Effect.withSpan("upload-convert-to-buffer", {
|
|
130
|
+
attributes: {
|
|
131
|
+
"upload.operation": "arrayBuffer",
|
|
132
|
+
},
|
|
133
|
+
}),
|
|
134
|
+
// Add structured logging
|
|
135
|
+
Effect.tap((buffer) => Effect.logDebug("Response converted to array buffer").pipe(Effect.annotateLogs({
|
|
136
|
+
"buffer.size": buffer.byteLength.toString(),
|
|
137
|
+
}))),
|
|
138
|
+
// Handle errors with logging
|
|
139
|
+
Effect.tapError((error) => Effect.logError("Failed to convert response to array buffer").pipe(Effect.annotateLogs({
|
|
140
|
+
error: String(error),
|
|
141
|
+
}))));
|
|
142
|
+
};
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { Effect } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors";
|
|
3
|
+
import type { DataStore, UploadEvent, UploadFile } from "../types";
|
|
4
|
+
import { type EventEmitter } from "../types";
|
|
5
|
+
/**
|
|
6
|
+
* Configuration options for writing data to a data store.
|
|
7
|
+
*
|
|
8
|
+
* @property data - The stream of data to write
|
|
9
|
+
* @property upload - Upload file metadata
|
|
10
|
+
* @property dataStore - Target data store for writing
|
|
11
|
+
* @property maxFileSize - Maximum allowed file size in bytes
|
|
12
|
+
* @property controller - AbortController for cancellation
|
|
13
|
+
* @property eventEmitter - Event emitter for progress tracking
|
|
14
|
+
* @property uploadProgressInterval - Progress emission interval in milliseconds (default: 200)
|
|
15
|
+
*/
|
|
16
|
+
type WriteToStoreOptions = {
|
|
17
|
+
data: ReadableStream<Uint8Array>;
|
|
18
|
+
upload: UploadFile;
|
|
19
|
+
dataStore: DataStore<UploadFile>;
|
|
20
|
+
maxFileSize: number;
|
|
21
|
+
controller: AbortController;
|
|
22
|
+
eventEmitter: EventEmitter<UploadEvent>;
|
|
23
|
+
uploadProgressInterval?: number;
|
|
24
|
+
};
|
|
25
|
+
/**
|
|
26
|
+
* Writes data stream to a data store with progress tracking and size limits.
|
|
27
|
+
*
|
|
28
|
+
* This function handles the core data writing logic including:
|
|
29
|
+
* - Stream conversion and processing
|
|
30
|
+
* - File size validation and limiting
|
|
31
|
+
* - Progress tracking with throttled events
|
|
32
|
+
* - Abort signal handling for cancellation
|
|
33
|
+
* - Error handling and cleanup
|
|
34
|
+
*
|
|
35
|
+
* The function includes comprehensive observability with:
|
|
36
|
+
* - Effect tracing spans for performance monitoring
|
|
37
|
+
* - Structured logging for debugging and monitoring
|
|
38
|
+
* - Progress event emission with throttling
|
|
39
|
+
* - Error handling with proper UploadistaError types
|
|
40
|
+
*
|
|
41
|
+
* @param data - The stream of data to write to storage
|
|
42
|
+
* @param upload - Upload file metadata containing ID, offset, etc.
|
|
43
|
+
* @param dataStore - Target data store for writing the data
|
|
44
|
+
* @param maxFileSize - Maximum allowed file size in bytes
|
|
45
|
+
* @param controller - AbortController for handling cancellation
|
|
46
|
+
* @param eventEmitter - Event emitter for progress tracking
|
|
47
|
+
* @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)
|
|
48
|
+
* @returns Effect that yields the number of bytes written
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* ```typescript
|
|
52
|
+
* // Write data to store with progress tracking
|
|
53
|
+
* const writeEffect = writeToStore({
|
|
54
|
+
* data: fileStream,
|
|
55
|
+
* upload: uploadMetadata,
|
|
56
|
+
* dataStore: s3DataStore,
|
|
57
|
+
* maxFileSize: 100_000_000, // 100MB
|
|
58
|
+
* controller: abortController,
|
|
59
|
+
* eventEmitter: progressEmitter,
|
|
60
|
+
* uploadProgressInterval: 500 // Emit progress every 500ms
|
|
61
|
+
* });
|
|
62
|
+
*
|
|
63
|
+
* // Run with error handling
|
|
64
|
+
* const bytesWritten = await Effect.runPromise(
|
|
65
|
+
* writeEffect.pipe(
|
|
66
|
+
* Effect.catchAll((error) =>
|
|
67
|
+
* Effect.logError("Failed to write to store").pipe(
|
|
68
|
+
* Effect.andThen(Effect.fail(error))
|
|
69
|
+
* )
|
|
70
|
+
* )
|
|
71
|
+
* )
|
|
72
|
+
* );
|
|
73
|
+
* ```
|
|
74
|
+
*/
|
|
75
|
+
export declare function writeToStore({ data, upload, dataStore, maxFileSize, controller, eventEmitter, uploadProgressInterval, }: WriteToStoreOptions): Effect.Effect<number, UploadistaError, never>;
|
|
76
|
+
export {};
|
|
77
|
+
//# sourceMappingURL=write-to-store.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"write-to-store.d.ts","sourceRoot":"","sources":["../../src/upload/write-to-store.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAO,MAAM,QAAQ,CAAC;AACrC,OAAO,EAAE,eAAe,EAAE,MAAM,WAAW,CAAC;AAE5C,OAAO,KAAK,EAAE,SAAS,EAAE,WAAW,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AACnE,OAAO,EAAE,KAAK,YAAY,EAAmB,MAAM,UAAU,CAAC;AAG9D;;;;;;;;;;GAUG;AACH,KAAK,mBAAmB,GAAG;IACzB,IAAI,EAAE,cAAc,CAAC,UAAU,CAAC,CAAC;IACjC,MAAM,EAAE,UAAU,CAAC;IACnB,SAAS,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC;IACjC,WAAW,EAAE,MAAM,CAAC;IACpB,UAAU,EAAE,eAAe,CAAC;IAC5B,YAAY,EAAE,YAAY,CAAC,WAAW,CAAC,CAAC;IACxC,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiDG;AACH,wBAAgB,YAAY,CAAC,EAC3B,IAAI,EACJ,MAAM,EACN,SAAS,EACT,WAAW,EACX,UAAU,EACV,YAAY,EACZ,sBAA4B,GAC7B,EAAE,mBAAmB,iDA4HrB"}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { Effect, Ref } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors";
|
|
3
|
+
import { StreamLimiterEffect } from "../streams/stream-limiter";
|
|
4
|
+
import { UploadEventType } from "../types";
|
|
5
|
+
import { convertToStream } from "./convert-to-stream";
|
|
6
|
+
/**
|
|
7
|
+
* Writes data stream to a data store with progress tracking and size limits.
|
|
8
|
+
*
|
|
9
|
+
* This function handles the core data writing logic including:
|
|
10
|
+
* - Stream conversion and processing
|
|
11
|
+
* - File size validation and limiting
|
|
12
|
+
* - Progress tracking with throttled events
|
|
13
|
+
* - Abort signal handling for cancellation
|
|
14
|
+
* - Error handling and cleanup
|
|
15
|
+
*
|
|
16
|
+
* The function includes comprehensive observability with:
|
|
17
|
+
* - Effect tracing spans for performance monitoring
|
|
18
|
+
* - Structured logging for debugging and monitoring
|
|
19
|
+
* - Progress event emission with throttling
|
|
20
|
+
* - Error handling with proper UploadistaError types
|
|
21
|
+
*
|
|
22
|
+
* @param data - The stream of data to write to storage
|
|
23
|
+
* @param upload - Upload file metadata containing ID, offset, etc.
|
|
24
|
+
* @param dataStore - Target data store for writing the data
|
|
25
|
+
* @param maxFileSize - Maximum allowed file size in bytes
|
|
26
|
+
* @param controller - AbortController for handling cancellation
|
|
27
|
+
* @param eventEmitter - Event emitter for progress tracking
|
|
28
|
+
* @param uploadProgressInterval - Progress emission interval in milliseconds (default: 200)
|
|
29
|
+
* @returns Effect that yields the number of bytes written
|
|
30
|
+
*
|
|
31
|
+
* @example
|
|
32
|
+
* ```typescript
|
|
33
|
+
* // Write data to store with progress tracking
|
|
34
|
+
* const writeEffect = writeToStore({
|
|
35
|
+
* data: fileStream,
|
|
36
|
+
* upload: uploadMetadata,
|
|
37
|
+
* dataStore: s3DataStore,
|
|
38
|
+
* maxFileSize: 100_000_000, // 100MB
|
|
39
|
+
* controller: abortController,
|
|
40
|
+
* eventEmitter: progressEmitter,
|
|
41
|
+
* uploadProgressInterval: 500 // Emit progress every 500ms
|
|
42
|
+
* });
|
|
43
|
+
*
|
|
44
|
+
* // Run with error handling
|
|
45
|
+
* const bytesWritten = await Effect.runPromise(
|
|
46
|
+
* writeEffect.pipe(
|
|
47
|
+
* Effect.catchAll((error) =>
|
|
48
|
+
* Effect.logError("Failed to write to store").pipe(
|
|
49
|
+
* Effect.andThen(Effect.fail(error))
|
|
50
|
+
* )
|
|
51
|
+
* )
|
|
52
|
+
* )
|
|
53
|
+
* );
|
|
54
|
+
* ```
|
|
55
|
+
*/
|
|
56
|
+
export function writeToStore({ data, upload, dataStore, maxFileSize, controller, eventEmitter, uploadProgressInterval = 200, }) {
|
|
57
|
+
return Effect.gen(function* () {
|
|
58
|
+
const stream = convertToStream(data);
|
|
59
|
+
// Check if already aborted
|
|
60
|
+
if (controller.signal.aborted) {
|
|
61
|
+
return yield* Effect.fail(UploadistaError.fromCode("ABORTED"));
|
|
62
|
+
}
|
|
63
|
+
// Create an AbortController to manage the stream pipeline
|
|
64
|
+
const abortController = new AbortController();
|
|
65
|
+
const { signal } = abortController;
|
|
66
|
+
// Set up abort handling
|
|
67
|
+
const onAbort = () => {
|
|
68
|
+
// stream.cancel();
|
|
69
|
+
abortController.abort();
|
|
70
|
+
};
|
|
71
|
+
controller.signal.addEventListener("abort", onAbort, { once: true });
|
|
72
|
+
return yield* Effect.acquireUseRelease(Effect.sync(() => ({ signal, onAbort })), ({ signal: _signal }) => Effect.gen(function* () {
|
|
73
|
+
// Create a ref to track the last progress emission time for throttling
|
|
74
|
+
const lastEmitTime = yield* Ref.make(0);
|
|
75
|
+
// Create the stream limiter
|
|
76
|
+
const limiter = StreamLimiterEffect.limit({
|
|
77
|
+
maxSize: maxFileSize,
|
|
78
|
+
});
|
|
79
|
+
// Pipe the data through the limiter
|
|
80
|
+
const limitedStream = limiter(stream);
|
|
81
|
+
// Write to the data store with progress tracking
|
|
82
|
+
const offset = yield* dataStore.write({
|
|
83
|
+
stream: limitedStream,
|
|
84
|
+
file_id: upload.id,
|
|
85
|
+
offset: upload.offset,
|
|
86
|
+
}, {
|
|
87
|
+
onProgress: (newOffset) => {
|
|
88
|
+
// Simple throttling using timestamp check
|
|
89
|
+
const now = Date.now();
|
|
90
|
+
Ref.get(lastEmitTime)
|
|
91
|
+
.pipe(Effect.flatMap((lastTime) => {
|
|
92
|
+
if (now - lastTime >= uploadProgressInterval) {
|
|
93
|
+
return Effect.gen(function* () {
|
|
94
|
+
yield* Ref.set(lastEmitTime, now);
|
|
95
|
+
yield* eventEmitter.emit(upload.id, {
|
|
96
|
+
type: UploadEventType.UPLOAD_PROGRESS,
|
|
97
|
+
data: {
|
|
98
|
+
id: upload.id,
|
|
99
|
+
progress: newOffset,
|
|
100
|
+
total: upload.size ?? 0,
|
|
101
|
+
},
|
|
102
|
+
flow: upload.flow,
|
|
103
|
+
});
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
return Effect.void;
|
|
107
|
+
}), Effect.runPromise)
|
|
108
|
+
.catch(() => {
|
|
109
|
+
// Ignore errors during progress emission
|
|
110
|
+
});
|
|
111
|
+
},
|
|
112
|
+
});
|
|
113
|
+
return offset;
|
|
114
|
+
}).pipe(Effect.catchAll((error) => {
|
|
115
|
+
if (error instanceof Error && error.name === "AbortError") {
|
|
116
|
+
return Effect.fail(UploadistaError.fromCode("ABORTED"));
|
|
117
|
+
}
|
|
118
|
+
if (error instanceof UploadistaError) {
|
|
119
|
+
return Effect.fail(error);
|
|
120
|
+
}
|
|
121
|
+
return Effect.fail(UploadistaError.fromCode("FILE_WRITE_ERROR", { cause: error }));
|
|
122
|
+
})), ({ onAbort }) => Effect.sync(() => {
|
|
123
|
+
controller.signal.removeEventListener("abort", onAbort);
|
|
124
|
+
}));
|
|
125
|
+
}).pipe(
|
|
126
|
+
// Add tracing span for write operation
|
|
127
|
+
Effect.withSpan("upload-write-to-store", {
|
|
128
|
+
attributes: {
|
|
129
|
+
"upload.id": upload.id,
|
|
130
|
+
"upload.offset": upload.offset.toString(),
|
|
131
|
+
"upload.max_file_size": maxFileSize.toString(),
|
|
132
|
+
"upload.file_size": upload.size?.toString() ?? "0",
|
|
133
|
+
},
|
|
134
|
+
}),
|
|
135
|
+
// Add structured logging for write operation
|
|
136
|
+
Effect.tap((offset) => Effect.logDebug("Data written to store").pipe(Effect.annotateLogs({
|
|
137
|
+
"upload.id": upload.id,
|
|
138
|
+
"write.offset": offset.toString(),
|
|
139
|
+
"write.bytes_written": (offset - upload.offset).toString(),
|
|
140
|
+
}))),
|
|
141
|
+
// Handle errors with logging
|
|
142
|
+
Effect.tapError((error) => Effect.logError("Failed to write to store").pipe(Effect.annotateLogs({
|
|
143
|
+
"upload.id": upload.id,
|
|
144
|
+
"upload.offset": upload.offset.toString(),
|
|
145
|
+
error: error instanceof UploadistaError ? error.code : String(error),
|
|
146
|
+
}))));
|
|
147
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=require(`./chunk-CUT6urMc.cjs`),t=require(`./uploadista-error-BB-Wdiz9.cjs`),n=require(`./types-BT-cvi7T.cjs`),r=require(`./generate-id-xHp_Z7Cl.cjs`),i=require(`./stream-limiter-JgOwmkMa.cjs`);let a=require(`effect`);a=e.__toESM(a);const o=(e,t)=>{if(e.length>=4){let t=e.slice(0,4);if(t[0]===137&&t[1]===80&&t[2]===78&&t[3]===71)return`image/png`;if(t[0]===255&&t[1]===216)return`image/jpeg`;if(t[0]===71&&t[1]===73&&t[2]===70)return`image/gif`;if(t[0]===82&&t[1]===73&&t[2]===70&&t[3]===70)return`image/webp`}if(t)switch(t.split(`.`).pop()?.toLowerCase()){case`jpg`:case`jpeg`:return`image/jpeg`;case`png`:return`image/png`;case`gif`:return`image/gif`;case`webp`:return`image/webp`;case`pdf`:return`application/pdf`;case`txt`:return`text/plain`;case`json`:return`application/json`;default:return`application/octet-stream`}return`application/octet-stream`},s=(e,{dataStoreService:t,kvStore:r,eventEmitter:i,generateId:o,isBuffer:s})=>a.Effect.gen(function*(){let a=yield*t.getDataStore(e.storageId,s),c=yield*o.generateId(),{size:l,type:u,fileName:d,lastModified:f}=e,p={type:u,fileName:d??``};f&&(p.lastModified=f.toString());let m={id:c,size:l,metadata:p,offset:0,creationDate:new Date().toISOString(),storage:{id:e.storageId,isBuffer:s,type:u,path:``,bucket:a.bucket}},h=yield*a.create(m);return yield*r.set(c,h),yield*i.emit(c,{type:n.UploadEventType.UPLOAD_STARTED,data:h}),h});function c(e){return a.Stream.fromReadableStream(()=>e,e=>new t.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:String(e)}))}function l({data:e,upload:r,dataStore:o,maxFileSize:s,controller:l,eventEmitter:u,uploadProgressInterval:d=200}){return a.Effect.gen(function*(){let f=c(e);if(l.signal.aborted)return yield*a.Effect.fail(t.UploadistaError.fromCode(`ABORTED`));let p=new AbortController,{signal:m}=p,h=()=>{p.abort()};return l.signal.addEventListener(`abort`,h,{once:!0}),yield*a.Effect.acquireUseRelease(a.Effect.sync(()=>({signal:m,onAbort:h})),({signal:e})=>a.Effect.gen(function*(){let e=yield*a.Ref.make(0),t=i.StreamLimiterEffect.limit({maxSize:s})(f);return yield*o.write({stream:t,file_id:r.id,offset:r.offset},{onProgress:t=>{let i=Date.now();a.Ref.get(e).pipe(a.Effect.flatMap(o=>i-o>=d?a.Effect.gen(function*(){yield*a.Ref.set(e,i),yield*u.emit(r.id,{type:n.UploadEventType.UPLOAD_PROGRESS,data:{id:r.id,progress:t,total:r.size??0}})}):a.Effect.void),a.Effect.runPromise).catch(()=>{})}})}).pipe(a.Effect.catchAll(e=>e instanceof Error&&e.name===`AbortError`?a.Effect.fail(t.UploadistaError.fromCode(`ABORTED`)):e instanceof t.UploadistaError?a.Effect.fail(e):a.Effect.fail(t.UploadistaError.fromCode(`FILE_WRITE_ERROR`,{cause:e})))),({onAbort:e})=>a.Effect.sync(()=>{l.signal.removeEventListener(`abort`,e)}))})}const u=(e,t,{dataStoreService:r,kvStore:i,eventEmitter:o})=>a.Effect.gen(function*(){let a=yield*i.get(e),s=yield*r.getDataStore(a.storage.id,a.storage.isBuffer),c=new AbortController;return a.offset=yield*l({dataStore:s,data:t,upload:a,maxFileSize:1e8,controller:c,uploadProgressInterval:200,eventEmitter:o}),yield*i.set(e,a),yield*o.emit(a.id,{type:n.UploadEventType.UPLOAD_PROGRESS,data:{id:a.id,progress:a.offset,total:a.size??0}}),a});var d=class extends a.Context.Tag(`UploadServer`)(){};function f(){return a.Effect.gen(function*(){let e=yield*n.UploadFileKVStore,t=yield*n.UploadEventEmitter,i=yield*r.GenerateId,o=yield*n.UploadFileDataStores;return{upload:(n,r)=>a.Effect.gen(function*(){let a=yield*s(n,{dataStoreService:o,kvStore:e,eventEmitter:t,generateId:i});return yield*u(a.id,r,{dataStoreService:o,kvStore:e,eventEmitter:t})}),createUpload:n=>a.Effect.gen(function*(){return yield*s(n,{dataStoreService:o,kvStore:e,eventEmitter:t,generateId:i})}),uploadChunk:(n,r)=>a.Effect.gen(function*(){return yield*u(n,r,{dataStoreService:o,kvStore:e,eventEmitter:t})}),getUpload:t=>a.Effect.gen(function*(){return yield*e.get(t)}),read:e=>a.Effect.gen(function*(){return yield*(yield*o.getDataStore(e)).read(e)}),getCapabilities:e=>a.Effect.gen(function*(){return(yield*o.getDataStore(e)).getCapabilities()}),subscribeToUploadEvents:(e,n)=>a.Effect.gen(function*(){yield*t.subscribe(e,n)}),unsubscribeFromUploadEvents:e=>a.Effect.gen(function*(){yield*t.unsubscribe(e)})}})}const p=a.Layer.effect(d,f());var m=class{constructor(e,t){this.capabilities=e,this.validateUploadStrategy=t}negotiateStrategy(e){let t=[],n=[],r=`single`,i=e.preferredChunkSize??this.capabilities.optimalChunkSize??1024*1024,a=e.parallelUploads??1;if(e.preferredStrategy&&(this.validateUploadStrategy(e.preferredStrategy)?(r=e.preferredStrategy,t.push(`Using preferred strategy: ${r}`)):n.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store, falling back`)),(!e.preferredStrategy||!this.validateUploadStrategy(e.preferredStrategy))&&(this.capabilities.supportsParallelUploads&&e.fileSize>(e.minChunkSizeForParallel??10*1024*1024)?(r=`parallel`,t.push(`Selected parallel upload for large file (${e.fileSize} bytes)`)):(r=`single`,t.push(this.capabilities.supportsParallelUploads?`Selected single upload for small file (${e.fileSize} bytes)`:`Selected single upload (parallel not supported by data store)`))),this.capabilities.minChunkSize&&i<this.capabilities.minChunkSize&&(n.push(`Chunk size ${i} below minimum ${this.capabilities.minChunkSize}, adjusting`),i=this.capabilities.minChunkSize),this.capabilities.maxChunkSize&&i>this.capabilities.maxChunkSize&&(n.push(`Chunk size ${i} above maximum ${this.capabilities.maxChunkSize}, adjusting`),i=this.capabilities.maxChunkSize),r===`parallel`&&(this.capabilities.maxConcurrentUploads&&a>this.capabilities.maxConcurrentUploads&&(n.push(`Parallel uploads ${a} exceeds maximum ${this.capabilities.maxConcurrentUploads}, adjusting`),a=this.capabilities.maxConcurrentUploads),this.capabilities.maxParts)){let t=Math.ceil(e.fileSize/i);if(t>this.capabilities.maxParts){let r=Math.ceil(e.fileSize/this.capabilities.maxParts);n.push(`Estimated parts ${t} exceeds maximum ${this.capabilities.maxParts}, increasing chunk size`),i=Math.max(i,r)}}return this.validateUploadStrategy(r)||(n.push(`Final strategy validation failed, falling back to single upload`),r=`single`,a=1),t.push(`Data store capabilities: parallel=${this.capabilities.supportsParallelUploads}, concatenation=${this.capabilities.supportsConcatenation}, resumable=${this.capabilities.supportsResumableUploads}`),{strategy:r,chunkSize:i,parallelUploads:r===`parallel`?a:1,reasoning:t,warnings:n}}getDataStoreCapabilities(){return this.capabilities}validateConfiguration(e){let t=[];return e.preferredStrategy&&!this.validateUploadStrategy(e.preferredStrategy)&&t.push(`Preferred strategy '${e.preferredStrategy}' not supported by data store`),e.preferredChunkSize&&(this.capabilities.minChunkSize&&e.preferredChunkSize<this.capabilities.minChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} below data store minimum ${this.capabilities.minChunkSize}`),this.capabilities.maxChunkSize&&e.preferredChunkSize>this.capabilities.maxChunkSize&&t.push(`Chunk size ${e.preferredChunkSize} above data store maximum ${this.capabilities.maxChunkSize}`)),e.parallelUploads&&this.capabilities.maxConcurrentUploads&&e.parallelUploads>this.capabilities.maxConcurrentUploads&&t.push(`Parallel uploads ${e.parallelUploads} exceeds data store maximum ${this.capabilities.maxConcurrentUploads}`),{valid:t.length===0,errors:t}}};Object.defineProperty(exports,`UploadServer`,{enumerable:!0,get:function(){return d}}),Object.defineProperty(exports,`UploadStrategyNegotiator`,{enumerable:!0,get:function(){return m}}),Object.defineProperty(exports,`createUploadServer`,{enumerable:!0,get:function(){return f}}),Object.defineProperty(exports,`detectMimeType`,{enumerable:!0,get:function(){return o}}),Object.defineProperty(exports,`uploadServer`,{enumerable:!0,get:function(){return p}});
|