@uploadista/core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +231 -0
- package/.turbo/turbo-format.log +5 -0
- package/LICENSE +21 -0
- package/README.md +1120 -0
- package/dist/chunk-CUT6urMc.cjs +1 -0
- package/dist/debounce-C2SeqcxD.js +2 -0
- package/dist/debounce-C2SeqcxD.js.map +1 -0
- package/dist/debounce-LZK7yS7Z.cjs +1 -0
- package/dist/errors/index.cjs +1 -0
- package/dist/errors/index.d.cts +3 -0
- package/dist/errors/index.d.ts +3 -0
- package/dist/errors/index.d.ts.map +1 -0
- package/dist/errors/index.js +2 -0
- package/dist/errors/uploadista-error.d.ts +209 -0
- package/dist/errors/uploadista-error.d.ts.map +1 -0
- package/dist/errors/uploadista-error.js +322 -0
- package/dist/errors-8i_aMxOE.js +1 -0
- package/dist/errors-CRm1FHHT.cjs +0 -0
- package/dist/flow/edge.d.ts +47 -0
- package/dist/flow/edge.d.ts.map +1 -0
- package/dist/flow/edge.js +40 -0
- package/dist/flow/event.d.ts +206 -0
- package/dist/flow/event.d.ts.map +1 -0
- package/dist/flow/event.js +53 -0
- package/dist/flow/flow-server.d.ts +223 -0
- package/dist/flow/flow-server.d.ts.map +1 -0
- package/dist/flow/flow-server.js +614 -0
- package/dist/flow/flow.d.ts +238 -0
- package/dist/flow/flow.d.ts.map +1 -0
- package/dist/flow/flow.js +629 -0
- package/dist/flow/index.cjs +1 -0
- package/dist/flow/index.d.cts +6 -0
- package/dist/flow/index.d.ts +24 -0
- package/dist/flow/index.d.ts.map +1 -0
- package/dist/flow/index.js +24 -0
- package/dist/flow/node.d.ts +136 -0
- package/dist/flow/node.d.ts.map +1 -0
- package/dist/flow/node.js +153 -0
- package/dist/flow/nodes/index.d.ts +8 -0
- package/dist/flow/nodes/index.d.ts.map +1 -0
- package/dist/flow/nodes/index.js +7 -0
- package/dist/flow/nodes/input-node.d.ts +78 -0
- package/dist/flow/nodes/input-node.d.ts.map +1 -0
- package/dist/flow/nodes/input-node.js +233 -0
- package/dist/flow/nodes/storage-node.d.ts +67 -0
- package/dist/flow/nodes/storage-node.d.ts.map +1 -0
- package/dist/flow/nodes/storage-node.js +94 -0
- package/dist/flow/nodes/streaming-input-node.d.ts +69 -0
- package/dist/flow/nodes/streaming-input-node.d.ts.map +1 -0
- package/dist/flow/nodes/streaming-input-node.js +156 -0
- package/dist/flow/nodes/transform-node.d.ts +85 -0
- package/dist/flow/nodes/transform-node.d.ts.map +1 -0
- package/dist/flow/nodes/transform-node.js +107 -0
- package/dist/flow/parallel-scheduler.d.ts +175 -0
- package/dist/flow/parallel-scheduler.d.ts.map +1 -0
- package/dist/flow/parallel-scheduler.js +193 -0
- package/dist/flow/plugins/credential-provider.d.ts +47 -0
- package/dist/flow/plugins/credential-provider.d.ts.map +1 -0
- package/dist/flow/plugins/credential-provider.js +24 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts +61 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-ai-plugin.js +21 -0
- package/dist/flow/plugins/image-plugin.d.ts +52 -0
- package/dist/flow/plugins/image-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-plugin.js +22 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts +16 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/describe-image-node.js +9 -0
- package/dist/flow/plugins/types/index.d.ts +9 -0
- package/dist/flow/plugins/types/index.d.ts.map +1 -0
- package/dist/flow/plugins/types/index.js +8 -0
- package/dist/flow/plugins/types/optimize-node.d.ts +20 -0
- package/dist/flow/plugins/types/optimize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/optimize-node.js +11 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts +16 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/remove-background-node.js +9 -0
- package/dist/flow/plugins/types/resize-node.d.ts +21 -0
- package/dist/flow/plugins/types/resize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/resize-node.js +16 -0
- package/dist/flow/plugins/zip-plugin.d.ts +62 -0
- package/dist/flow/plugins/zip-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/zip-plugin.js +21 -0
- package/dist/flow/typed-flow.d.ts +90 -0
- package/dist/flow/typed-flow.d.ts.map +1 -0
- package/dist/flow/typed-flow.js +59 -0
- package/dist/flow/types/flow-file.d.ts +45 -0
- package/dist/flow/types/flow-file.d.ts.map +1 -0
- package/dist/flow/types/flow-file.js +27 -0
- package/dist/flow/types/flow-job.d.ts +118 -0
- package/dist/flow/types/flow-job.d.ts.map +1 -0
- package/dist/flow/types/flow-job.js +11 -0
- package/dist/flow/types/flow-types.d.ts +321 -0
- package/dist/flow/types/flow-types.d.ts.map +1 -0
- package/dist/flow/types/flow-types.js +52 -0
- package/dist/flow/types/index.d.ts +4 -0
- package/dist/flow/types/index.d.ts.map +1 -0
- package/dist/flow/types/index.js +3 -0
- package/dist/flow/types/run-args.d.ts +38 -0
- package/dist/flow/types/run-args.d.ts.map +1 -0
- package/dist/flow/types/run-args.js +30 -0
- package/dist/flow/types/type-validator.d.ts +26 -0
- package/dist/flow/types/type-validator.d.ts.map +1 -0
- package/dist/flow/types/type-validator.js +134 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts +11 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts.map +1 -0
- package/dist/flow/utils/resolve-upload-metadata.js +28 -0
- package/dist/flow-2zXnEiWL.cjs +1 -0
- package/dist/flow-CRaKy7Vj.js +2 -0
- package/dist/flow-CRaKy7Vj.js.map +1 -0
- package/dist/generate-id-Dm-Vboxq.d.ts +34 -0
- package/dist/generate-id-Dm-Vboxq.d.ts.map +1 -0
- package/dist/generate-id-LjJRLD6N.d.cts +34 -0
- package/dist/generate-id-LjJRLD6N.d.cts.map +1 -0
- package/dist/generate-id-xHp_Z7Cl.cjs +1 -0
- package/dist/generate-id-yohS1ZDk.js +2 -0
- package/dist/generate-id-yohS1ZDk.js.map +1 -0
- package/dist/index-BO8GZlbD.d.cts +1040 -0
- package/dist/index-BO8GZlbD.d.cts.map +1 -0
- package/dist/index-BoGG5KAY.d.ts +1 -0
- package/dist/index-BtBZHVmz.d.cts +1 -0
- package/dist/index-D-CoVpkZ.d.ts +1004 -0
- package/dist/index-D-CoVpkZ.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.d.cts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/logger/logger.cjs +1 -0
- package/dist/logger/logger.d.cts +8 -0
- package/dist/logger/logger.d.cts.map +1 -0
- package/dist/logger/logger.d.ts +5 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.js +10 -0
- package/dist/logger/logger.js.map +1 -0
- package/dist/semaphore-0ZwjVpyF.js +2 -0
- package/dist/semaphore-0ZwjVpyF.js.map +1 -0
- package/dist/semaphore-BHprIjFI.d.cts +37 -0
- package/dist/semaphore-BHprIjFI.d.cts.map +1 -0
- package/dist/semaphore-DThupBkc.d.ts +37 -0
- package/dist/semaphore-DThupBkc.d.ts.map +1 -0
- package/dist/semaphore-DVrONiAV.cjs +1 -0
- package/dist/stream-limiter-CoWKv39w.js +2 -0
- package/dist/stream-limiter-CoWKv39w.js.map +1 -0
- package/dist/stream-limiter-JgOwmkMa.cjs +1 -0
- package/dist/streams/multi-stream.cjs +1 -0
- package/dist/streams/multi-stream.d.cts +91 -0
- package/dist/streams/multi-stream.d.cts.map +1 -0
- package/dist/streams/multi-stream.d.ts +86 -0
- package/dist/streams/multi-stream.d.ts.map +1 -0
- package/dist/streams/multi-stream.js +149 -0
- package/dist/streams/multi-stream.js.map +1 -0
- package/dist/streams/stream-limiter.cjs +1 -0
- package/dist/streams/stream-limiter.d.cts +36 -0
- package/dist/streams/stream-limiter.d.cts.map +1 -0
- package/dist/streams/stream-limiter.d.ts +27 -0
- package/dist/streams/stream-limiter.d.ts.map +1 -0
- package/dist/streams/stream-limiter.js +49 -0
- package/dist/streams/stream-splitter.cjs +1 -0
- package/dist/streams/stream-splitter.d.cts +68 -0
- package/dist/streams/stream-splitter.d.cts.map +1 -0
- package/dist/streams/stream-splitter.d.ts +51 -0
- package/dist/streams/stream-splitter.d.ts.map +1 -0
- package/dist/streams/stream-splitter.js +175 -0
- package/dist/streams/stream-splitter.js.map +1 -0
- package/dist/types/data-store-registry.d.ts +13 -0
- package/dist/types/data-store-registry.d.ts.map +1 -0
- package/dist/types/data-store-registry.js +4 -0
- package/dist/types/data-store.d.ts +316 -0
- package/dist/types/data-store.d.ts.map +1 -0
- package/dist/types/data-store.js +157 -0
- package/dist/types/event-broadcaster.d.ts +28 -0
- package/dist/types/event-broadcaster.d.ts.map +1 -0
- package/dist/types/event-broadcaster.js +6 -0
- package/dist/types/event-emitter.d.ts +378 -0
- package/dist/types/event-emitter.d.ts.map +1 -0
- package/dist/types/event-emitter.js +223 -0
- package/dist/types/index.cjs +1 -0
- package/dist/types/index.d.cts +6 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +9 -0
- package/dist/types/input-file.d.ts +104 -0
- package/dist/types/input-file.d.ts.map +1 -0
- package/dist/types/input-file.js +27 -0
- package/dist/types/kv-store.d.ts +281 -0
- package/dist/types/kv-store.d.ts.map +1 -0
- package/dist/types/kv-store.js +234 -0
- package/dist/types/middleware.d.ts +17 -0
- package/dist/types/middleware.d.ts.map +1 -0
- package/dist/types/middleware.js +21 -0
- package/dist/types/upload-event.d.ts +105 -0
- package/dist/types/upload-event.d.ts.map +1 -0
- package/dist/types/upload-event.js +71 -0
- package/dist/types/upload-file.d.ts +136 -0
- package/dist/types/upload-file.d.ts.map +1 -0
- package/dist/types/upload-file.js +34 -0
- package/dist/types/websocket.d.ts +144 -0
- package/dist/types/websocket.d.ts.map +1 -0
- package/dist/types/websocket.js +40 -0
- package/dist/types-BT-cvi7T.cjs +1 -0
- package/dist/types-DhU2j-XF.js +2 -0
- package/dist/types-DhU2j-XF.js.map +1 -0
- package/dist/upload/convert-to-stream.d.ts +38 -0
- package/dist/upload/convert-to-stream.d.ts.map +1 -0
- package/dist/upload/convert-to-stream.js +43 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts +14 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts.map +1 -0
- package/dist/upload/convert-upload-to-flow-file.js +21 -0
- package/dist/upload/create-upload.d.ts +68 -0
- package/dist/upload/create-upload.d.ts.map +1 -0
- package/dist/upload/create-upload.js +157 -0
- package/dist/upload/index.cjs +1 -0
- package/dist/upload/index.d.cts +6 -0
- package/dist/upload/index.d.ts +4 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +3 -0
- package/dist/upload/mime.d.ts +24 -0
- package/dist/upload/mime.d.ts.map +1 -0
- package/dist/upload/mime.js +351 -0
- package/dist/upload/upload-chunk.d.ts +58 -0
- package/dist/upload/upload-chunk.d.ts.map +1 -0
- package/dist/upload/upload-chunk.js +277 -0
- package/dist/upload/upload-server.d.ts +221 -0
- package/dist/upload/upload-server.d.ts.map +1 -0
- package/dist/upload/upload-server.js +181 -0
- package/dist/upload/upload-strategy-negotiator.d.ts +148 -0
- package/dist/upload/upload-strategy-negotiator.d.ts.map +1 -0
- package/dist/upload/upload-strategy-negotiator.js +217 -0
- package/dist/upload/upload-url.d.ts +68 -0
- package/dist/upload/upload-url.d.ts.map +1 -0
- package/dist/upload/upload-url.js +142 -0
- package/dist/upload/write-to-store.d.ts +77 -0
- package/dist/upload/write-to-store.d.ts.map +1 -0
- package/dist/upload/write-to-store.js +147 -0
- package/dist/upload-DLuICjpP.cjs +1 -0
- package/dist/upload-DaXO34dE.js +2 -0
- package/dist/upload-DaXO34dE.js.map +1 -0
- package/dist/uploadista-error-BB-Wdiz9.cjs +22 -0
- package/dist/uploadista-error-BVsVxqvz.js +23 -0
- package/dist/uploadista-error-BVsVxqvz.js.map +1 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts +52 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts.map +1 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts +52 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts.map +1 -0
- package/dist/utils/checksum.d.ts +22 -0
- package/dist/utils/checksum.d.ts.map +1 -0
- package/dist/utils/checksum.js +49 -0
- package/dist/utils/debounce.cjs +1 -0
- package/dist/utils/debounce.d.cts +38 -0
- package/dist/utils/debounce.d.cts.map +1 -0
- package/dist/utils/debounce.d.ts +36 -0
- package/dist/utils/debounce.d.ts.map +1 -0
- package/dist/utils/debounce.js +73 -0
- package/dist/utils/generate-id.cjs +1 -0
- package/dist/utils/generate-id.d.cts +2 -0
- package/dist/utils/generate-id.d.ts +32 -0
- package/dist/utils/generate-id.d.ts.map +1 -0
- package/dist/utils/generate-id.js +23 -0
- package/dist/utils/md5.cjs +1 -0
- package/dist/utils/md5.d.cts +73 -0
- package/dist/utils/md5.d.cts.map +1 -0
- package/dist/utils/md5.d.ts +71 -0
- package/dist/utils/md5.d.ts.map +1 -0
- package/dist/utils/md5.js +417 -0
- package/dist/utils/md5.js.map +1 -0
- package/dist/utils/once.cjs +1 -0
- package/dist/utils/once.d.cts +25 -0
- package/dist/utils/once.d.cts.map +1 -0
- package/dist/utils/once.d.ts +21 -0
- package/dist/utils/once.d.ts.map +1 -0
- package/dist/utils/once.js +54 -0
- package/dist/utils/once.js.map +1 -0
- package/dist/utils/semaphore.cjs +1 -0
- package/dist/utils/semaphore.d.cts +3 -0
- package/dist/utils/semaphore.d.ts +78 -0
- package/dist/utils/semaphore.d.ts.map +1 -0
- package/dist/utils/semaphore.js +134 -0
- package/dist/utils/throttle.cjs +1 -0
- package/dist/utils/throttle.d.cts +24 -0
- package/dist/utils/throttle.d.cts.map +1 -0
- package/dist/utils/throttle.d.ts +18 -0
- package/dist/utils/throttle.d.ts.map +1 -0
- package/dist/utils/throttle.js +20 -0
- package/dist/utils/throttle.js.map +1 -0
- package/docs/PARALLEL_EXECUTION.md +206 -0
- package/docs/PARALLEL_EXECUTION_QUICKSTART.md +142 -0
- package/docs/PARALLEL_EXECUTION_REFACTOR.md +184 -0
- package/package.json +80 -0
- package/src/errors/__tests__/uploadista-error.test.ts +251 -0
- package/src/errors/index.ts +2 -0
- package/src/errors/uploadista-error.ts +394 -0
- package/src/flow/README.md +352 -0
- package/src/flow/edge.test.ts +146 -0
- package/src/flow/edge.ts +60 -0
- package/src/flow/event.ts +229 -0
- package/src/flow/flow-server.ts +1089 -0
- package/src/flow/flow.ts +1050 -0
- package/src/flow/index.ts +28 -0
- package/src/flow/node.ts +249 -0
- package/src/flow/nodes/index.ts +8 -0
- package/src/flow/nodes/input-node.ts +296 -0
- package/src/flow/nodes/storage-node.ts +128 -0
- package/src/flow/nodes/transform-node.ts +154 -0
- package/src/flow/parallel-scheduler.ts +259 -0
- package/src/flow/plugins/credential-provider.ts +48 -0
- package/src/flow/plugins/image-ai-plugin.ts +66 -0
- package/src/flow/plugins/image-plugin.ts +60 -0
- package/src/flow/plugins/types/describe-image-node.ts +16 -0
- package/src/flow/plugins/types/index.ts +9 -0
- package/src/flow/plugins/types/optimize-node.ts +18 -0
- package/src/flow/plugins/types/remove-background-node.ts +18 -0
- package/src/flow/plugins/types/resize-node.ts +26 -0
- package/src/flow/plugins/zip-plugin.ts +69 -0
- package/src/flow/typed-flow.ts +279 -0
- package/src/flow/types/flow-file.ts +51 -0
- package/src/flow/types/flow-job.ts +138 -0
- package/src/flow/types/flow-types.ts +353 -0
- package/src/flow/types/index.ts +6 -0
- package/src/flow/types/run-args.ts +40 -0
- package/src/flow/types/type-validator.ts +204 -0
- package/src/flow/utils/resolve-upload-metadata.ts +48 -0
- package/src/index.ts +5 -0
- package/src/logger/logger.ts +14 -0
- package/src/streams/stream-limiter.test.ts +150 -0
- package/src/streams/stream-limiter.ts +75 -0
- package/src/types/data-store.ts +427 -0
- package/src/types/event-broadcaster.ts +39 -0
- package/src/types/event-emitter.ts +349 -0
- package/src/types/index.ts +9 -0
- package/src/types/input-file.ts +107 -0
- package/src/types/kv-store.ts +375 -0
- package/src/types/middleware.ts +54 -0
- package/src/types/upload-event.ts +75 -0
- package/src/types/upload-file.ts +139 -0
- package/src/types/websocket.ts +65 -0
- package/src/upload/convert-to-stream.ts +48 -0
- package/src/upload/create-upload.ts +214 -0
- package/src/upload/index.ts +3 -0
- package/src/upload/mime.ts +436 -0
- package/src/upload/upload-chunk.ts +364 -0
- package/src/upload/upload-server.ts +390 -0
- package/src/upload/upload-strategy-negotiator.ts +316 -0
- package/src/upload/upload-url.ts +173 -0
- package/src/upload/write-to-store.ts +211 -0
- package/src/utils/checksum.ts +61 -0
- package/src/utils/debounce.test.ts +126 -0
- package/src/utils/debounce.ts +89 -0
- package/src/utils/generate-id.ts +35 -0
- package/src/utils/md5.ts +475 -0
- package/src/utils/once.test.ts +83 -0
- package/src/utils/once.ts +63 -0
- package/src/utils/throttle.test.ts +101 -0
- package/src/utils/throttle.ts +29 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/tsdown.config.ts +25 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
import { describe, expect, it, vi } from "vitest";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
import { streamLimiter } from "./stream-limiter";
|
|
4
|
+
|
|
5
|
+
// Helper function to convert stream to array
|
|
6
|
+
async function streamToArray(
|
|
7
|
+
stream: ReadableStream<Uint8Array>,
|
|
8
|
+
): Promise<Uint8Array[]> {
|
|
9
|
+
const reader = stream.getReader();
|
|
10
|
+
const chunks: Uint8Array[] = [];
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
while (true) {
|
|
14
|
+
const { done, value } = await reader.read();
|
|
15
|
+
if (done) break;
|
|
16
|
+
chunks.push(value);
|
|
17
|
+
}
|
|
18
|
+
} finally {
|
|
19
|
+
reader.releaseLock();
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
return chunks;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Helper function to write data to stream
|
|
26
|
+
async function writeToStream(
|
|
27
|
+
stream: WritableStream<Uint8Array>,
|
|
28
|
+
data: Uint8Array[],
|
|
29
|
+
): Promise<void> {
|
|
30
|
+
const writer = stream.getWriter();
|
|
31
|
+
|
|
32
|
+
try {
|
|
33
|
+
for (const chunk of data) {
|
|
34
|
+
await writer.write(chunk);
|
|
35
|
+
}
|
|
36
|
+
} finally {
|
|
37
|
+
await writer.close();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
describe("streamLimiter", () => {
|
|
42
|
+
it("should create a TransformStream", () => {
|
|
43
|
+
const limiter = streamLimiter({ maxSize: 1024 });
|
|
44
|
+
expect(limiter).toBeInstanceOf(TransformStream);
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it("should pass through data within size limit", async () => {
|
|
48
|
+
const limiter = streamLimiter({ maxSize: 1024 });
|
|
49
|
+
const testData = [new Uint8Array([1, 2, 3, 4, 5])];
|
|
50
|
+
|
|
51
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
52
|
+
const readPromise = streamToArray(limiter.readable);
|
|
53
|
+
|
|
54
|
+
const [, result] = await Promise.all([writePromise, readPromise]);
|
|
55
|
+
|
|
56
|
+
expect(result).toHaveLength(1);
|
|
57
|
+
expect(result[0]).toEqual(testData[0]);
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
it("should call onData callback for each chunk", async () => {
|
|
61
|
+
const onDataCallback = vi.fn();
|
|
62
|
+
const limiter = streamLimiter({ maxSize: 1024, onData: onDataCallback });
|
|
63
|
+
|
|
64
|
+
const testData = [new Uint8Array([1, 2, 3])];
|
|
65
|
+
|
|
66
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
67
|
+
const readPromise = streamToArray(limiter.readable);
|
|
68
|
+
|
|
69
|
+
await Promise.all([writePromise, readPromise]);
|
|
70
|
+
|
|
71
|
+
expect(onDataCallback).toHaveBeenCalledOnce();
|
|
72
|
+
expect(onDataCallback).toHaveBeenCalledWith(testData[0].byteLength);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it("should work without onData callback", async () => {
|
|
76
|
+
const limiter = streamLimiter({ maxSize: 1024 });
|
|
77
|
+
const testData = [new Uint8Array([1, 2, 3])];
|
|
78
|
+
|
|
79
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
80
|
+
const readPromise = streamToArray(limiter.readable);
|
|
81
|
+
|
|
82
|
+
const [, result] = await Promise.all([writePromise, readPromise]);
|
|
83
|
+
|
|
84
|
+
expect(result).toHaveLength(1);
|
|
85
|
+
expect(result[0]).toEqual(testData[0]);
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
it("should handle zero-sized chunks", async () => {
|
|
89
|
+
const onDataCallback = vi.fn();
|
|
90
|
+
const limiter = streamLimiter({ maxSize: 1024, onData: onDataCallback });
|
|
91
|
+
|
|
92
|
+
const testData = [new Uint8Array(0)];
|
|
93
|
+
|
|
94
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
95
|
+
const readPromise = streamToArray(limiter.readable);
|
|
96
|
+
|
|
97
|
+
const [, result] = await Promise.all([writePromise, readPromise]);
|
|
98
|
+
|
|
99
|
+
expect(onDataCallback).toHaveBeenCalledWith(testData[0].byteLength);
|
|
100
|
+
expect(result).toHaveLength(1);
|
|
101
|
+
expect(result[0]).toEqual(testData[0]);
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it("should error when size limit is exceeded", async () => {
|
|
105
|
+
const limiter = streamLimiter({ maxSize: 10 });
|
|
106
|
+
const testData = [new Uint8Array(15).fill(1)]; // Exceeds limit of 10
|
|
107
|
+
|
|
108
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
109
|
+
const readPromise = streamToArray(limiter.readable);
|
|
110
|
+
|
|
111
|
+
await expect(
|
|
112
|
+
Promise.all([writePromise, readPromise]),
|
|
113
|
+
).rejects.toBeInstanceOf(UploadistaError);
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
it("should track cumulative size across multiple chunks", async () => {
|
|
117
|
+
const limiter = streamLimiter({ maxSize: 10 });
|
|
118
|
+
|
|
119
|
+
// First chunk (5 bytes) + second chunk (3 bytes) = 8 bytes (within limit)
|
|
120
|
+
// Third chunk (3 bytes) would make total 11 bytes (exceeds limit)
|
|
121
|
+
const testData = [
|
|
122
|
+
new Uint8Array(5).fill(1),
|
|
123
|
+
new Uint8Array(3).fill(2),
|
|
124
|
+
new Uint8Array(3).fill(3),
|
|
125
|
+
];
|
|
126
|
+
|
|
127
|
+
const writePromise = writeToStream(limiter.writable, testData);
|
|
128
|
+
const readPromise = streamToArray(limiter.readable);
|
|
129
|
+
|
|
130
|
+
await expect(
|
|
131
|
+
Promise.all([writePromise, readPromise]),
|
|
132
|
+
).rejects.toBeInstanceOf(UploadistaError);
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
it("should error with correct UploadistaError code", async () => {
|
|
136
|
+
const limiter = streamLimiter({ maxSize: 5 });
|
|
137
|
+
const testData = [new Uint8Array(10)];
|
|
138
|
+
|
|
139
|
+
try {
|
|
140
|
+
await Promise.all([
|
|
141
|
+
writeToStream(limiter.writable, testData),
|
|
142
|
+
streamToArray(limiter.readable),
|
|
143
|
+
]);
|
|
144
|
+
expect.fail("Should have thrown an error");
|
|
145
|
+
} catch (error) {
|
|
146
|
+
expect(error).toBeInstanceOf(UploadistaError);
|
|
147
|
+
expect((error as UploadistaError).code).toBe("ERR_MAX_SIZE_EXCEEDED");
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
});
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import { Effect, Ref, Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
|
|
4
|
+
type StreamLimiterOptions = {
|
|
5
|
+
maxSize: number;
|
|
6
|
+
onData?: (chunkSize: number) => void;
|
|
7
|
+
};
|
|
8
|
+
|
|
9
|
+
export function streamLimiter({
|
|
10
|
+
maxSize,
|
|
11
|
+
onData,
|
|
12
|
+
}: StreamLimiterOptions): TransformStream {
|
|
13
|
+
let currentSize = 0;
|
|
14
|
+
|
|
15
|
+
return new TransformStream({
|
|
16
|
+
transform(chunk, controller) {
|
|
17
|
+
currentSize += chunk.length;
|
|
18
|
+
|
|
19
|
+
onData?.(chunk.byteLength);
|
|
20
|
+
|
|
21
|
+
if (currentSize > maxSize) {
|
|
22
|
+
controller.error(UploadistaError.fromCode("ERR_MAX_SIZE_EXCEEDED"));
|
|
23
|
+
} else {
|
|
24
|
+
controller.enqueue(chunk);
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Effect-based stream limiter that restricts stream size
|
|
32
|
+
*/
|
|
33
|
+
export const StreamLimiterEffect = {
|
|
34
|
+
/**
|
|
35
|
+
* Creates an Effect-based stream limiter
|
|
36
|
+
* @param maxSize - Maximum allowed stream size in bytes
|
|
37
|
+
* @param onData - Optional callback for data progress tracking
|
|
38
|
+
* @returns Effect stream transformation that enforces size limits
|
|
39
|
+
*/
|
|
40
|
+
limit:
|
|
41
|
+
({ maxSize, onData }: StreamLimiterOptions) =>
|
|
42
|
+
<A>(stream: Stream.Stream<A, UploadistaError>) => {
|
|
43
|
+
return Effect.gen(function* () {
|
|
44
|
+
const currentSize = yield* Ref.make(0);
|
|
45
|
+
|
|
46
|
+
return stream.pipe(
|
|
47
|
+
Stream.mapEffect((chunk) =>
|
|
48
|
+
Effect.gen(function* () {
|
|
49
|
+
const chunkSize =
|
|
50
|
+
chunk instanceof Uint8Array ? chunk.byteLength : 0;
|
|
51
|
+
yield* Ref.update(currentSize, (size) => size + chunkSize);
|
|
52
|
+
|
|
53
|
+
onData?.(chunkSize);
|
|
54
|
+
const size = yield* Ref.get(currentSize);
|
|
55
|
+
if (size > maxSize) {
|
|
56
|
+
yield* UploadistaError.fromCode(
|
|
57
|
+
"ERR_MAX_SIZE_EXCEEDED",
|
|
58
|
+
).toEffect();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
return chunk;
|
|
62
|
+
}),
|
|
63
|
+
),
|
|
64
|
+
);
|
|
65
|
+
}).pipe(Stream.unwrap);
|
|
66
|
+
},
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* Creates a legacy TransformStream-based limiter
|
|
70
|
+
* @param options - Stream limiter options
|
|
71
|
+
* @returns TransformStream that enforces size limits
|
|
72
|
+
*/
|
|
73
|
+
createTransformStream: (options: StreamLimiterOptions): TransformStream =>
|
|
74
|
+
streamLimiter(options),
|
|
75
|
+
};
|
|
@@ -0,0 +1,427 @@
|
|
|
1
|
+
import { Context, Effect, Layer, type Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
import type { UploadFileKVStore } from "./kv-store";
|
|
4
|
+
import type { UploadFile } from "./upload-file";
|
|
5
|
+
|
|
6
|
+
/**
|
|
7
|
+
* Options for writing data to a DataStore.
|
|
8
|
+
*
|
|
9
|
+
* @property file_id - Unique identifier for the file being written
|
|
10
|
+
* @property stream - Stream of byte chunks to write to storage
|
|
11
|
+
* @property offset - Byte offset where writing should begin (for resumable uploads)
|
|
12
|
+
*/
|
|
13
|
+
export type DataStoreWriteOptions = {
|
|
14
|
+
file_id: string;
|
|
15
|
+
stream: Stream.Stream<Uint8Array, UploadistaError>;
|
|
16
|
+
offset: number;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
/**
|
|
20
|
+
* Upload strategy type indicating how chunks are uploaded.
|
|
21
|
+
*
|
|
22
|
+
* - `single`: Upload file in a single request (traditional upload)
|
|
23
|
+
* - `parallel`: Upload file chunks in parallel (for large files)
|
|
24
|
+
*/
|
|
25
|
+
export type UploadStrategy = "single" | "parallel";
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Capabilities and constraints of a DataStore implementation.
|
|
29
|
+
*
|
|
30
|
+
* This type describes what features a storage backend supports and what
|
|
31
|
+
* limitations it has. Use this to determine the optimal upload strategy
|
|
32
|
+
* and validate client requests.
|
|
33
|
+
*
|
|
34
|
+
* @property supportsParallelUploads - Can upload chunks in parallel (e.g., S3 multipart)
|
|
35
|
+
* @property supportsConcatenation - Can concatenate multiple uploads into one file
|
|
36
|
+
* @property supportsDeferredLength - Can start upload without knowing final size
|
|
37
|
+
* @property supportsResumableUploads - Can resume interrupted uploads from last offset
|
|
38
|
+
* @property supportsTransactionalUploads - Guarantees atomic upload success/failure
|
|
39
|
+
* @property maxConcurrentUploads - Maximum parallel upload parts (if parallel supported)
|
|
40
|
+
* @property minChunkSize - Minimum size in bytes for each chunk (except last)
|
|
41
|
+
* @property maxChunkSize - Maximum size in bytes for each chunk
|
|
42
|
+
* @property maxParts - Maximum number of parts in a multipart upload
|
|
43
|
+
* @property optimalChunkSize - Recommended chunk size for best performance
|
|
44
|
+
* @property requiresOrderedChunks - Must receive chunks in sequential order
|
|
45
|
+
* @property requiresMimeTypeValidation - Validates file MIME type matches declaration
|
|
46
|
+
* @property maxValidationSize - Maximum file size for MIME type validation
|
|
47
|
+
*
|
|
48
|
+
* @example
|
|
49
|
+
* ```typescript
|
|
50
|
+
* const capabilities = dataStore.getCapabilities();
|
|
51
|
+
*
|
|
52
|
+
* if (capabilities.supportsParallelUploads && fileSize > 10_000_000) {
|
|
53
|
+
* // Use parallel upload for large files
|
|
54
|
+
* const chunkSize = capabilities.optimalChunkSize || 5_242_880; // 5MB default
|
|
55
|
+
* uploadInParallel(file, chunkSize);
|
|
56
|
+
* } else {
|
|
57
|
+
* // Use single upload
|
|
58
|
+
* uploadAsSingleChunk(file);
|
|
59
|
+
* }
|
|
60
|
+
* ```
|
|
61
|
+
*/
|
|
62
|
+
export type DataStoreCapabilities = {
|
|
63
|
+
supportsParallelUploads: boolean;
|
|
64
|
+
supportsConcatenation: boolean;
|
|
65
|
+
supportsDeferredLength: boolean;
|
|
66
|
+
supportsResumableUploads: boolean;
|
|
67
|
+
supportsTransactionalUploads: boolean;
|
|
68
|
+
maxConcurrentUploads?: number;
|
|
69
|
+
minChunkSize?: number;
|
|
70
|
+
maxChunkSize?: number;
|
|
71
|
+
maxParts?: number;
|
|
72
|
+
optimalChunkSize?: number;
|
|
73
|
+
requiresOrderedChunks: boolean;
|
|
74
|
+
requiresMimeTypeValidation?: boolean;
|
|
75
|
+
maxValidationSize?: number;
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Core interface for all storage backend implementations.
|
|
80
|
+
*
|
|
81
|
+
* DataStore abstracts file storage operations across different backends
|
|
82
|
+
* (S3, Azure Blob, GCS, local filesystem, etc.). All storage adapters
|
|
83
|
+
* must implement this interface.
|
|
84
|
+
*
|
|
85
|
+
* @template TData - The data type stored (typically UploadFile)
|
|
86
|
+
*
|
|
87
|
+
* @property bucket - Optional storage bucket or container name
|
|
88
|
+
* @property path - Optional base path prefix for all stored files
|
|
89
|
+
* @property create - Creates a new file record in storage
|
|
90
|
+
* @property remove - Deletes a file from storage
|
|
91
|
+
* @property read - Reads complete file contents as bytes
|
|
92
|
+
* @property write - Writes data stream to storage at specified offset
|
|
93
|
+
* @property deleteExpired - Optional cleanup of expired files
|
|
94
|
+
* @property getCapabilities - Returns storage backend capabilities
|
|
95
|
+
* @property validateUploadStrategy - Validates if strategy is supported
|
|
96
|
+
*
|
|
97
|
+
* @example
|
|
98
|
+
* ```typescript
|
|
99
|
+
* // Implement a custom DataStore
|
|
100
|
+
* const myDataStore: DataStore<UploadFile> = {
|
|
101
|
+
* bucket: "my-uploads",
|
|
102
|
+
* path: "files/",
|
|
103
|
+
*
|
|
104
|
+
* create: (file) => Effect.gen(function* () {
|
|
105
|
+
* // Store file metadata
|
|
106
|
+
* yield* saveMetadata(file);
|
|
107
|
+
* return file;
|
|
108
|
+
* }),
|
|
109
|
+
*
|
|
110
|
+
* write: ({ file_id, stream, offset }, { onProgress }) => Effect.gen(function* () {
|
|
111
|
+
* // Write chunks to storage
|
|
112
|
+
* let bytesWritten = offset;
|
|
113
|
+
* yield* Stream.runForEach(stream, (chunk) => Effect.sync(() => {
|
|
114
|
+
* writeChunk(file_id, chunk, bytesWritten);
|
|
115
|
+
* bytesWritten += chunk.byteLength;
|
|
116
|
+
* onProgress?.(chunk.byteLength);
|
|
117
|
+
* }));
|
|
118
|
+
* return bytesWritten;
|
|
119
|
+
* }),
|
|
120
|
+
*
|
|
121
|
+
* read: (file_id) => Effect.gen(function* () {
|
|
122
|
+
* // Read complete file
|
|
123
|
+
* const data = yield* readFromStorage(file_id);
|
|
124
|
+
* return data;
|
|
125
|
+
* }),
|
|
126
|
+
*
|
|
127
|
+
* remove: (file_id) => Effect.gen(function* () {
|
|
128
|
+
* yield* deleteFromStorage(file_id);
|
|
129
|
+
* }),
|
|
130
|
+
*
|
|
131
|
+
* getCapabilities: () => ({
|
|
132
|
+
* supportsParallelUploads: true,
|
|
133
|
+
* supportsConcatenation: false,
|
|
134
|
+
* supportsDeferredLength: true,
|
|
135
|
+
* supportsResumableUploads: true,
|
|
136
|
+
* supportsTransactionalUploads: false,
|
|
137
|
+
* maxConcurrentUploads: 10,
|
|
138
|
+
* optimalChunkSize: 5_242_880, // 5MB
|
|
139
|
+
* requiresOrderedChunks: false,
|
|
140
|
+
* }),
|
|
141
|
+
*
|
|
142
|
+
* validateUploadStrategy: (strategy) =>
|
|
143
|
+
* Effect.succeed(strategy === "parallel" || strategy === "single"),
|
|
144
|
+
* };
|
|
145
|
+
* ```
|
|
146
|
+
*/
|
|
147
|
+
export type DataStore<TData = unknown> = {
|
|
148
|
+
readonly bucket?: string;
|
|
149
|
+
readonly path?: string;
|
|
150
|
+
readonly create: (file: TData) => Effect.Effect<TData, UploadistaError>;
|
|
151
|
+
readonly remove: (file_id: string) => Effect.Effect<void, UploadistaError>;
|
|
152
|
+
readonly read: (
|
|
153
|
+
file_id: string,
|
|
154
|
+
) => Effect.Effect<Uint8Array, UploadistaError>;
|
|
155
|
+
readonly write: (
|
|
156
|
+
options: DataStoreWriteOptions,
|
|
157
|
+
dependencies: {
|
|
158
|
+
onProgress?: (chunkSize: number) => void;
|
|
159
|
+
},
|
|
160
|
+
) => Effect.Effect<number, UploadistaError>;
|
|
161
|
+
readonly deleteExpired?: Effect.Effect<number, UploadistaError>;
|
|
162
|
+
readonly getCapabilities: () => DataStoreCapabilities;
|
|
163
|
+
readonly validateUploadStrategy: (
|
|
164
|
+
strategy: UploadStrategy,
|
|
165
|
+
) => Effect.Effect<boolean, never>;
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Effect-TS context tag for UploadFile DataStore.
|
|
170
|
+
*
|
|
171
|
+
* Use this tag to access the primary DataStore in an Effect context.
|
|
172
|
+
* This is the standard storage backend for uploaded files.
|
|
173
|
+
*
|
|
174
|
+
* @example
|
|
175
|
+
* ```typescript
|
|
176
|
+
* const uploadEffect = Effect.gen(function* () {
|
|
177
|
+
* const dataStore = yield* UploadFileDataStore;
|
|
178
|
+
* const file = yield* dataStore.create(uploadFile);
|
|
179
|
+
* return file;
|
|
180
|
+
* });
|
|
181
|
+
* ```
|
|
182
|
+
*/
|
|
183
|
+
export class UploadFileDataStore extends Context.Tag("UploadFileDataStore")<
|
|
184
|
+
UploadFileDataStore,
|
|
185
|
+
DataStore<UploadFile>
|
|
186
|
+
>() {}
|
|
187
|
+
|
|
188
|
+
/**
|
|
189
|
+
* Effect-TS context tag for buffered/temporary DataStore.
|
|
190
|
+
*
|
|
191
|
+
* This is an optional storage backend used for temporary or intermediate files
|
|
192
|
+
* during flow processing. Not all implementations provide a buffered store.
|
|
193
|
+
*
|
|
194
|
+
* @example
|
|
195
|
+
* ```typescript
|
|
196
|
+
* const processEffect = Effect.gen(function* () {
|
|
197
|
+
* const bufferedStore = yield* BufferedUploadFileDataStore;
|
|
198
|
+
* // Store intermediate processing results
|
|
199
|
+
* const tempFile = yield* bufferedStore.create(intermediateFile);
|
|
200
|
+
* return tempFile;
|
|
201
|
+
* });
|
|
202
|
+
* ```
|
|
203
|
+
*/
|
|
204
|
+
export class BufferedUploadFileDataStore extends Context.Tag(
|
|
205
|
+
"BufferedUploadFileDataStore",
|
|
206
|
+
)<BufferedUploadFileDataStore, DataStore<UploadFile>>() {}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Service interface for managing multiple DataStore instances.
|
|
210
|
+
*
|
|
211
|
+
* This allows routing files to different storage backends based on
|
|
212
|
+
* storageId (e.g., different S3 buckets, Azure containers, or storage tiers).
|
|
213
|
+
*
|
|
214
|
+
* @property getDataStore - Retrieves the appropriate DataStore for a given storage ID
|
|
215
|
+
* @property bufferedDataStore - Optional temporary storage for intermediate files
|
|
216
|
+
*/
|
|
217
|
+
export type UploadFileDataStoresShape = {
|
|
218
|
+
getDataStore: (
|
|
219
|
+
storageId: string,
|
|
220
|
+
clientId: string | null,
|
|
221
|
+
) => Effect.Effect<DataStore<UploadFile>, UploadistaError>;
|
|
222
|
+
bufferedDataStore: Effect.Effect<
|
|
223
|
+
DataStore<UploadFile> | undefined,
|
|
224
|
+
UploadistaError
|
|
225
|
+
>;
|
|
226
|
+
};
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Effect-TS context tag for the DataStore routing service.
|
|
230
|
+
*
|
|
231
|
+
* Provides access to multiple DataStore instances with routing logic.
|
|
232
|
+
*
|
|
233
|
+
* @example
|
|
234
|
+
* ```typescript
|
|
235
|
+
* const uploadEffect = Effect.gen(function* () {
|
|
236
|
+
* const dataStores = yield* UploadFileDataStores;
|
|
237
|
+
* // Route to specific storage based on storageId
|
|
238
|
+
* const dataStore = yield* dataStores.getDataStore("s3-production", clientId);
|
|
239
|
+
* const file = yield* dataStore.create(uploadFile);
|
|
240
|
+
* return file;
|
|
241
|
+
* });
|
|
242
|
+
* ```
|
|
243
|
+
*/
|
|
244
|
+
export class UploadFileDataStores extends Context.Tag("UploadFileDataStores")<
|
|
245
|
+
UploadFileDataStores,
|
|
246
|
+
UploadFileDataStoresShape
|
|
247
|
+
>() {}
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Simplified DataStore configuration for easy setup.
|
|
251
|
+
*
|
|
252
|
+
* This type allows flexible configuration:
|
|
253
|
+
* - Single DataStore instance
|
|
254
|
+
* - Multiple named stores with routing
|
|
255
|
+
* - Effect that resolves to a DataStore
|
|
256
|
+
* - Pre-built Effect Layer
|
|
257
|
+
*
|
|
258
|
+
* @example
|
|
259
|
+
* ```typescript
|
|
260
|
+
* // Single store
|
|
261
|
+
* const config: DataStoreConfig = s3DataStore;
|
|
262
|
+
*
|
|
263
|
+
* // Multiple stores with routing
|
|
264
|
+
* const config: DataStoreConfig = {
|
|
265
|
+
* stores: {
|
|
266
|
+
* "s3-prod": s3ProdStore,
|
|
267
|
+
* "s3-dev": s3DevStore,
|
|
268
|
+
* "local": localFileStore,
|
|
269
|
+
* },
|
|
270
|
+
* default: "s3-prod"
|
|
271
|
+
* };
|
|
272
|
+
*
|
|
273
|
+
* // Effect that creates a store
|
|
274
|
+
* const config: DataStoreConfig = Effect.gen(function* () {
|
|
275
|
+
* const kvStore = yield* UploadFileKVStore;
|
|
276
|
+
* return createS3Store(kvStore);
|
|
277
|
+
* });
|
|
278
|
+
*
|
|
279
|
+
* // Pre-built Layer
|
|
280
|
+
* const config: DataStoreConfig = Layer.succeed(UploadFileDataStores, {...});
|
|
281
|
+
* ```
|
|
282
|
+
*/
|
|
283
|
+
export type DataStoreConfig =
|
|
284
|
+
| DataStore<UploadFile>
|
|
285
|
+
| Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>
|
|
286
|
+
| {
|
|
287
|
+
stores: Record<
|
|
288
|
+
string,
|
|
289
|
+
| DataStore<UploadFile>
|
|
290
|
+
| Effect.Effect<DataStore<UploadFile>, never, UploadFileKVStore>
|
|
291
|
+
>;
|
|
292
|
+
default?: string;
|
|
293
|
+
}
|
|
294
|
+
| Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>;
|
|
295
|
+
|
|
296
|
+
/**
|
|
297
|
+
* Type guard to check if a value is a DataStore instance.
|
|
298
|
+
*
|
|
299
|
+
* @param config - The value to check
|
|
300
|
+
* @returns True if the value is a DataStore
|
|
301
|
+
*
|
|
302
|
+
* @example
|
|
303
|
+
* ```typescript
|
|
304
|
+
* if (isDataStore(config)) {
|
|
305
|
+
* const capabilities = config.getCapabilities();
|
|
306
|
+
* }
|
|
307
|
+
* ```
|
|
308
|
+
*/
|
|
309
|
+
export const isDataStore = (
|
|
310
|
+
config: DataStoreConfig,
|
|
311
|
+
): config is DataStore<UploadFile> => {
|
|
312
|
+
return "create" in config && "write" in config;
|
|
313
|
+
};
|
|
314
|
+
|
|
315
|
+
/**
|
|
316
|
+
* Creates an Effect Layer from simplified DataStoreConfig.
|
|
317
|
+
*
|
|
318
|
+
* This function converts any DataStoreConfig format into a proper Effect Layer
|
|
319
|
+
* that can be provided to the UploadFileDataStores context tag.
|
|
320
|
+
*
|
|
321
|
+
* It handles:
|
|
322
|
+
* - Single DataStore: Wraps in a Layer that always returns that store
|
|
323
|
+
* - Multiple stores: Creates routing logic with optional default
|
|
324
|
+
* - Effect<DataStore>: Executes the Effect and wraps the result
|
|
325
|
+
* - Layer: Returns as-is
|
|
326
|
+
*
|
|
327
|
+
* @param config - The DataStore configuration
|
|
328
|
+
* @returns A Layer that provides UploadFileDataStores service
|
|
329
|
+
*
|
|
330
|
+
* @example
|
|
331
|
+
* ```typescript
|
|
332
|
+
* // Create from single store
|
|
333
|
+
* const layer = await createDataStoreLayer(s3DataStore);
|
|
334
|
+
*
|
|
335
|
+
* // Create from multiple stores
|
|
336
|
+
* const layer = await createDataStoreLayer({
|
|
337
|
+
* stores: {
|
|
338
|
+
* "production": s3Store,
|
|
339
|
+
* "development": localStore,
|
|
340
|
+
* },
|
|
341
|
+
* default: "development"
|
|
342
|
+
* });
|
|
343
|
+
*
|
|
344
|
+
* // Use the layer
|
|
345
|
+
* const program = Effect.gen(function* () {
|
|
346
|
+
* const stores = yield* UploadFileDataStores;
|
|
347
|
+
* const store = yield* stores.getDataStore("production", null);
|
|
348
|
+
* return store;
|
|
349
|
+
* }).pipe(Effect.provide(layer));
|
|
350
|
+
* ```
|
|
351
|
+
*/
|
|
352
|
+
export const createDataStoreLayer = async (
|
|
353
|
+
config: DataStoreConfig,
|
|
354
|
+
): Promise<Layer.Layer<UploadFileDataStores, never, UploadFileKVStore>> => {
|
|
355
|
+
// Already a Layer, return as-is
|
|
356
|
+
if (Layer.isLayer(config)) {
|
|
357
|
+
return config as Layer.Layer<
|
|
358
|
+
UploadFileDataStores,
|
|
359
|
+
never,
|
|
360
|
+
UploadFileKVStore
|
|
361
|
+
>;
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
// Check if it's an Effect
|
|
365
|
+
if (Effect.isEffect(config)) {
|
|
366
|
+
return Layer.effect(
|
|
367
|
+
UploadFileDataStores,
|
|
368
|
+
Effect.gen(function* () {
|
|
369
|
+
const dataStore = config as Effect.Effect<
|
|
370
|
+
DataStore<UploadFile>,
|
|
371
|
+
never,
|
|
372
|
+
UploadFileKVStore
|
|
373
|
+
>;
|
|
374
|
+
const resolvedStore = yield* dataStore;
|
|
375
|
+
return {
|
|
376
|
+
getDataStore: (_storageId: string) => Effect.succeed(resolvedStore),
|
|
377
|
+
bufferedDataStore: Effect.succeed(undefined),
|
|
378
|
+
};
|
|
379
|
+
}),
|
|
380
|
+
);
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
// Single store (most common case)
|
|
384
|
+
if (isDataStore(config)) {
|
|
385
|
+
const store = config as DataStore<UploadFile>;
|
|
386
|
+
return Layer.succeed(UploadFileDataStores, {
|
|
387
|
+
getDataStore: (_storageId: string) => Effect.succeed(store),
|
|
388
|
+
bufferedDataStore: Effect.succeed(undefined),
|
|
389
|
+
});
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
// Multiple stores with routing
|
|
393
|
+
const multiConfig = config as {
|
|
394
|
+
stores: Record<
|
|
395
|
+
string,
|
|
396
|
+
DataStore<UploadFile> | Effect.Effect<DataStore<UploadFile>>
|
|
397
|
+
>;
|
|
398
|
+
default?: string;
|
|
399
|
+
};
|
|
400
|
+
|
|
401
|
+
const defaultKey = multiConfig.default || Object.keys(multiConfig.stores)[0];
|
|
402
|
+
|
|
403
|
+
// Resolve any Effects in the stores
|
|
404
|
+
const resolvedStores: Record<string, DataStore<UploadFile>> = {};
|
|
405
|
+
for (const [key, storeOrEffect] of Object.entries(multiConfig.stores)) {
|
|
406
|
+
if ("pipe" in storeOrEffect && !("create" in storeOrEffect)) {
|
|
407
|
+
resolvedStores[key] = await Effect.runPromise(
|
|
408
|
+
storeOrEffect as Effect.Effect<DataStore<UploadFile>>,
|
|
409
|
+
);
|
|
410
|
+
} else {
|
|
411
|
+
resolvedStores[key] = storeOrEffect as DataStore<UploadFile>;
|
|
412
|
+
}
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
return Layer.succeed(UploadFileDataStores, {
|
|
416
|
+
getDataStore: (storageId: string) => {
|
|
417
|
+
const store =
|
|
418
|
+
resolvedStores[storageId] ||
|
|
419
|
+
(defaultKey ? resolvedStores[defaultKey] : undefined);
|
|
420
|
+
if (store) {
|
|
421
|
+
return Effect.succeed(store);
|
|
422
|
+
}
|
|
423
|
+
return Effect.fail(UploadistaError.fromCode("FILE_NOT_FOUND"));
|
|
424
|
+
},
|
|
425
|
+
bufferedDataStore: Effect.succeed(undefined),
|
|
426
|
+
});
|
|
427
|
+
};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { Context, type Effect } from "effect";
|
|
2
|
+
import type { UploadistaError } from "../errors";
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Event broadcaster interface for pub/sub messaging across distributed instances.
|
|
6
|
+
* Used by WebSocketManager to broadcast upload events to all connected instances.
|
|
7
|
+
*/
|
|
8
|
+
export interface EventBroadcaster {
|
|
9
|
+
/**
|
|
10
|
+
* Publish a message to a channel
|
|
11
|
+
*/
|
|
12
|
+
readonly publish: (
|
|
13
|
+
channel: string,
|
|
14
|
+
message: string,
|
|
15
|
+
) => Effect.Effect<void, UploadistaError>;
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Subscribe to messages on a channel
|
|
19
|
+
*/
|
|
20
|
+
readonly subscribe: (
|
|
21
|
+
channel: string,
|
|
22
|
+
handler: (message: string) => void,
|
|
23
|
+
) => Effect.Effect<void, UploadistaError>;
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Unsubscribe from a channel (optional - not all implementations may support)
|
|
27
|
+
*/
|
|
28
|
+
readonly unsubscribe?: (
|
|
29
|
+
channel: string,
|
|
30
|
+
) => Effect.Effect<void, UploadistaError>;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Context tag for EventBroadcaster service
|
|
35
|
+
*/
|
|
36
|
+
export class EventBroadcasterService extends Context.Tag("EventBroadcaster")<
|
|
37
|
+
EventBroadcasterService,
|
|
38
|
+
EventBroadcaster
|
|
39
|
+
>() {}
|