@uploadista/core 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -0
- package/.turbo/turbo-check.log +231 -0
- package/.turbo/turbo-format.log +5 -0
- package/LICENSE +21 -0
- package/README.md +1120 -0
- package/dist/chunk-CUT6urMc.cjs +1 -0
- package/dist/debounce-C2SeqcxD.js +2 -0
- package/dist/debounce-C2SeqcxD.js.map +1 -0
- package/dist/debounce-LZK7yS7Z.cjs +1 -0
- package/dist/errors/index.cjs +1 -0
- package/dist/errors/index.d.cts +3 -0
- package/dist/errors/index.d.ts +3 -0
- package/dist/errors/index.d.ts.map +1 -0
- package/dist/errors/index.js +2 -0
- package/dist/errors/uploadista-error.d.ts +209 -0
- package/dist/errors/uploadista-error.d.ts.map +1 -0
- package/dist/errors/uploadista-error.js +322 -0
- package/dist/errors-8i_aMxOE.js +1 -0
- package/dist/errors-CRm1FHHT.cjs +0 -0
- package/dist/flow/edge.d.ts +47 -0
- package/dist/flow/edge.d.ts.map +1 -0
- package/dist/flow/edge.js +40 -0
- package/dist/flow/event.d.ts +206 -0
- package/dist/flow/event.d.ts.map +1 -0
- package/dist/flow/event.js +53 -0
- package/dist/flow/flow-server.d.ts +223 -0
- package/dist/flow/flow-server.d.ts.map +1 -0
- package/dist/flow/flow-server.js +614 -0
- package/dist/flow/flow.d.ts +238 -0
- package/dist/flow/flow.d.ts.map +1 -0
- package/dist/flow/flow.js +629 -0
- package/dist/flow/index.cjs +1 -0
- package/dist/flow/index.d.cts +6 -0
- package/dist/flow/index.d.ts +24 -0
- package/dist/flow/index.d.ts.map +1 -0
- package/dist/flow/index.js +24 -0
- package/dist/flow/node.d.ts +136 -0
- package/dist/flow/node.d.ts.map +1 -0
- package/dist/flow/node.js +153 -0
- package/dist/flow/nodes/index.d.ts +8 -0
- package/dist/flow/nodes/index.d.ts.map +1 -0
- package/dist/flow/nodes/index.js +7 -0
- package/dist/flow/nodes/input-node.d.ts +78 -0
- package/dist/flow/nodes/input-node.d.ts.map +1 -0
- package/dist/flow/nodes/input-node.js +233 -0
- package/dist/flow/nodes/storage-node.d.ts +67 -0
- package/dist/flow/nodes/storage-node.d.ts.map +1 -0
- package/dist/flow/nodes/storage-node.js +94 -0
- package/dist/flow/nodes/streaming-input-node.d.ts +69 -0
- package/dist/flow/nodes/streaming-input-node.d.ts.map +1 -0
- package/dist/flow/nodes/streaming-input-node.js +156 -0
- package/dist/flow/nodes/transform-node.d.ts +85 -0
- package/dist/flow/nodes/transform-node.d.ts.map +1 -0
- package/dist/flow/nodes/transform-node.js +107 -0
- package/dist/flow/parallel-scheduler.d.ts +175 -0
- package/dist/flow/parallel-scheduler.d.ts.map +1 -0
- package/dist/flow/parallel-scheduler.js +193 -0
- package/dist/flow/plugins/credential-provider.d.ts +47 -0
- package/dist/flow/plugins/credential-provider.d.ts.map +1 -0
- package/dist/flow/plugins/credential-provider.js +24 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts +61 -0
- package/dist/flow/plugins/image-ai-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-ai-plugin.js +21 -0
- package/dist/flow/plugins/image-plugin.d.ts +52 -0
- package/dist/flow/plugins/image-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/image-plugin.js +22 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts +16 -0
- package/dist/flow/plugins/types/describe-image-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/describe-image-node.js +9 -0
- package/dist/flow/plugins/types/index.d.ts +9 -0
- package/dist/flow/plugins/types/index.d.ts.map +1 -0
- package/dist/flow/plugins/types/index.js +8 -0
- package/dist/flow/plugins/types/optimize-node.d.ts +20 -0
- package/dist/flow/plugins/types/optimize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/optimize-node.js +11 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts +16 -0
- package/dist/flow/plugins/types/remove-background-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/remove-background-node.js +9 -0
- package/dist/flow/plugins/types/resize-node.d.ts +21 -0
- package/dist/flow/plugins/types/resize-node.d.ts.map +1 -0
- package/dist/flow/plugins/types/resize-node.js +16 -0
- package/dist/flow/plugins/zip-plugin.d.ts +62 -0
- package/dist/flow/plugins/zip-plugin.d.ts.map +1 -0
- package/dist/flow/plugins/zip-plugin.js +21 -0
- package/dist/flow/typed-flow.d.ts +90 -0
- package/dist/flow/typed-flow.d.ts.map +1 -0
- package/dist/flow/typed-flow.js +59 -0
- package/dist/flow/types/flow-file.d.ts +45 -0
- package/dist/flow/types/flow-file.d.ts.map +1 -0
- package/dist/flow/types/flow-file.js +27 -0
- package/dist/flow/types/flow-job.d.ts +118 -0
- package/dist/flow/types/flow-job.d.ts.map +1 -0
- package/dist/flow/types/flow-job.js +11 -0
- package/dist/flow/types/flow-types.d.ts +321 -0
- package/dist/flow/types/flow-types.d.ts.map +1 -0
- package/dist/flow/types/flow-types.js +52 -0
- package/dist/flow/types/index.d.ts +4 -0
- package/dist/flow/types/index.d.ts.map +1 -0
- package/dist/flow/types/index.js +3 -0
- package/dist/flow/types/run-args.d.ts +38 -0
- package/dist/flow/types/run-args.d.ts.map +1 -0
- package/dist/flow/types/run-args.js +30 -0
- package/dist/flow/types/type-validator.d.ts +26 -0
- package/dist/flow/types/type-validator.d.ts.map +1 -0
- package/dist/flow/types/type-validator.js +134 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts +11 -0
- package/dist/flow/utils/resolve-upload-metadata.d.ts.map +1 -0
- package/dist/flow/utils/resolve-upload-metadata.js +28 -0
- package/dist/flow-2zXnEiWL.cjs +1 -0
- package/dist/flow-CRaKy7Vj.js +2 -0
- package/dist/flow-CRaKy7Vj.js.map +1 -0
- package/dist/generate-id-Dm-Vboxq.d.ts +34 -0
- package/dist/generate-id-Dm-Vboxq.d.ts.map +1 -0
- package/dist/generate-id-LjJRLD6N.d.cts +34 -0
- package/dist/generate-id-LjJRLD6N.d.cts.map +1 -0
- package/dist/generate-id-xHp_Z7Cl.cjs +1 -0
- package/dist/generate-id-yohS1ZDk.js +2 -0
- package/dist/generate-id-yohS1ZDk.js.map +1 -0
- package/dist/index-BO8GZlbD.d.cts +1040 -0
- package/dist/index-BO8GZlbD.d.cts.map +1 -0
- package/dist/index-BoGG5KAY.d.ts +1 -0
- package/dist/index-BtBZHVmz.d.cts +1 -0
- package/dist/index-D-CoVpkZ.d.ts +1004 -0
- package/dist/index-D-CoVpkZ.d.ts.map +1 -0
- package/dist/index.cjs +1 -0
- package/dist/index.d.cts +6 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/logger/logger.cjs +1 -0
- package/dist/logger/logger.d.cts +8 -0
- package/dist/logger/logger.d.cts.map +1 -0
- package/dist/logger/logger.d.ts +5 -0
- package/dist/logger/logger.d.ts.map +1 -0
- package/dist/logger/logger.js +10 -0
- package/dist/logger/logger.js.map +1 -0
- package/dist/semaphore-0ZwjVpyF.js +2 -0
- package/dist/semaphore-0ZwjVpyF.js.map +1 -0
- package/dist/semaphore-BHprIjFI.d.cts +37 -0
- package/dist/semaphore-BHprIjFI.d.cts.map +1 -0
- package/dist/semaphore-DThupBkc.d.ts +37 -0
- package/dist/semaphore-DThupBkc.d.ts.map +1 -0
- package/dist/semaphore-DVrONiAV.cjs +1 -0
- package/dist/stream-limiter-CoWKv39w.js +2 -0
- package/dist/stream-limiter-CoWKv39w.js.map +1 -0
- package/dist/stream-limiter-JgOwmkMa.cjs +1 -0
- package/dist/streams/multi-stream.cjs +1 -0
- package/dist/streams/multi-stream.d.cts +91 -0
- package/dist/streams/multi-stream.d.cts.map +1 -0
- package/dist/streams/multi-stream.d.ts +86 -0
- package/dist/streams/multi-stream.d.ts.map +1 -0
- package/dist/streams/multi-stream.js +149 -0
- package/dist/streams/multi-stream.js.map +1 -0
- package/dist/streams/stream-limiter.cjs +1 -0
- package/dist/streams/stream-limiter.d.cts +36 -0
- package/dist/streams/stream-limiter.d.cts.map +1 -0
- package/dist/streams/stream-limiter.d.ts +27 -0
- package/dist/streams/stream-limiter.d.ts.map +1 -0
- package/dist/streams/stream-limiter.js +49 -0
- package/dist/streams/stream-splitter.cjs +1 -0
- package/dist/streams/stream-splitter.d.cts +68 -0
- package/dist/streams/stream-splitter.d.cts.map +1 -0
- package/dist/streams/stream-splitter.d.ts +51 -0
- package/dist/streams/stream-splitter.d.ts.map +1 -0
- package/dist/streams/stream-splitter.js +175 -0
- package/dist/streams/stream-splitter.js.map +1 -0
- package/dist/types/data-store-registry.d.ts +13 -0
- package/dist/types/data-store-registry.d.ts.map +1 -0
- package/dist/types/data-store-registry.js +4 -0
- package/dist/types/data-store.d.ts +316 -0
- package/dist/types/data-store.d.ts.map +1 -0
- package/dist/types/data-store.js +157 -0
- package/dist/types/event-broadcaster.d.ts +28 -0
- package/dist/types/event-broadcaster.d.ts.map +1 -0
- package/dist/types/event-broadcaster.js +6 -0
- package/dist/types/event-emitter.d.ts +378 -0
- package/dist/types/event-emitter.d.ts.map +1 -0
- package/dist/types/event-emitter.js +223 -0
- package/dist/types/index.cjs +1 -0
- package/dist/types/index.d.cts +6 -0
- package/dist/types/index.d.ts +10 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +9 -0
- package/dist/types/input-file.d.ts +104 -0
- package/dist/types/input-file.d.ts.map +1 -0
- package/dist/types/input-file.js +27 -0
- package/dist/types/kv-store.d.ts +281 -0
- package/dist/types/kv-store.d.ts.map +1 -0
- package/dist/types/kv-store.js +234 -0
- package/dist/types/middleware.d.ts +17 -0
- package/dist/types/middleware.d.ts.map +1 -0
- package/dist/types/middleware.js +21 -0
- package/dist/types/upload-event.d.ts +105 -0
- package/dist/types/upload-event.d.ts.map +1 -0
- package/dist/types/upload-event.js +71 -0
- package/dist/types/upload-file.d.ts +136 -0
- package/dist/types/upload-file.d.ts.map +1 -0
- package/dist/types/upload-file.js +34 -0
- package/dist/types/websocket.d.ts +144 -0
- package/dist/types/websocket.d.ts.map +1 -0
- package/dist/types/websocket.js +40 -0
- package/dist/types-BT-cvi7T.cjs +1 -0
- package/dist/types-DhU2j-XF.js +2 -0
- package/dist/types-DhU2j-XF.js.map +1 -0
- package/dist/upload/convert-to-stream.d.ts +38 -0
- package/dist/upload/convert-to-stream.d.ts.map +1 -0
- package/dist/upload/convert-to-stream.js +43 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts +14 -0
- package/dist/upload/convert-upload-to-flow-file.d.ts.map +1 -0
- package/dist/upload/convert-upload-to-flow-file.js +21 -0
- package/dist/upload/create-upload.d.ts +68 -0
- package/dist/upload/create-upload.d.ts.map +1 -0
- package/dist/upload/create-upload.js +157 -0
- package/dist/upload/index.cjs +1 -0
- package/dist/upload/index.d.cts +6 -0
- package/dist/upload/index.d.ts +4 -0
- package/dist/upload/index.d.ts.map +1 -0
- package/dist/upload/index.js +3 -0
- package/dist/upload/mime.d.ts +24 -0
- package/dist/upload/mime.d.ts.map +1 -0
- package/dist/upload/mime.js +351 -0
- package/dist/upload/upload-chunk.d.ts +58 -0
- package/dist/upload/upload-chunk.d.ts.map +1 -0
- package/dist/upload/upload-chunk.js +277 -0
- package/dist/upload/upload-server.d.ts +221 -0
- package/dist/upload/upload-server.d.ts.map +1 -0
- package/dist/upload/upload-server.js +181 -0
- package/dist/upload/upload-strategy-negotiator.d.ts +148 -0
- package/dist/upload/upload-strategy-negotiator.d.ts.map +1 -0
- package/dist/upload/upload-strategy-negotiator.js +217 -0
- package/dist/upload/upload-url.d.ts +68 -0
- package/dist/upload/upload-url.d.ts.map +1 -0
- package/dist/upload/upload-url.js +142 -0
- package/dist/upload/write-to-store.d.ts +77 -0
- package/dist/upload/write-to-store.d.ts.map +1 -0
- package/dist/upload/write-to-store.js +147 -0
- package/dist/upload-DLuICjpP.cjs +1 -0
- package/dist/upload-DaXO34dE.js +2 -0
- package/dist/upload-DaXO34dE.js.map +1 -0
- package/dist/uploadista-error-BB-Wdiz9.cjs +22 -0
- package/dist/uploadista-error-BVsVxqvz.js +23 -0
- package/dist/uploadista-error-BVsVxqvz.js.map +1 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts +52 -0
- package/dist/uploadista-error-CwxYs4EB.d.ts.map +1 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts +52 -0
- package/dist/uploadista-error-kKlhLRhY.d.cts.map +1 -0
- package/dist/utils/checksum.d.ts +22 -0
- package/dist/utils/checksum.d.ts.map +1 -0
- package/dist/utils/checksum.js +49 -0
- package/dist/utils/debounce.cjs +1 -0
- package/dist/utils/debounce.d.cts +38 -0
- package/dist/utils/debounce.d.cts.map +1 -0
- package/dist/utils/debounce.d.ts +36 -0
- package/dist/utils/debounce.d.ts.map +1 -0
- package/dist/utils/debounce.js +73 -0
- package/dist/utils/generate-id.cjs +1 -0
- package/dist/utils/generate-id.d.cts +2 -0
- package/dist/utils/generate-id.d.ts +32 -0
- package/dist/utils/generate-id.d.ts.map +1 -0
- package/dist/utils/generate-id.js +23 -0
- package/dist/utils/md5.cjs +1 -0
- package/dist/utils/md5.d.cts +73 -0
- package/dist/utils/md5.d.cts.map +1 -0
- package/dist/utils/md5.d.ts +71 -0
- package/dist/utils/md5.d.ts.map +1 -0
- package/dist/utils/md5.js +417 -0
- package/dist/utils/md5.js.map +1 -0
- package/dist/utils/once.cjs +1 -0
- package/dist/utils/once.d.cts +25 -0
- package/dist/utils/once.d.cts.map +1 -0
- package/dist/utils/once.d.ts +21 -0
- package/dist/utils/once.d.ts.map +1 -0
- package/dist/utils/once.js +54 -0
- package/dist/utils/once.js.map +1 -0
- package/dist/utils/semaphore.cjs +1 -0
- package/dist/utils/semaphore.d.cts +3 -0
- package/dist/utils/semaphore.d.ts +78 -0
- package/dist/utils/semaphore.d.ts.map +1 -0
- package/dist/utils/semaphore.js +134 -0
- package/dist/utils/throttle.cjs +1 -0
- package/dist/utils/throttle.d.cts +24 -0
- package/dist/utils/throttle.d.cts.map +1 -0
- package/dist/utils/throttle.d.ts +18 -0
- package/dist/utils/throttle.d.ts.map +1 -0
- package/dist/utils/throttle.js +20 -0
- package/dist/utils/throttle.js.map +1 -0
- package/docs/PARALLEL_EXECUTION.md +206 -0
- package/docs/PARALLEL_EXECUTION_QUICKSTART.md +142 -0
- package/docs/PARALLEL_EXECUTION_REFACTOR.md +184 -0
- package/package.json +80 -0
- package/src/errors/__tests__/uploadista-error.test.ts +251 -0
- package/src/errors/index.ts +2 -0
- package/src/errors/uploadista-error.ts +394 -0
- package/src/flow/README.md +352 -0
- package/src/flow/edge.test.ts +146 -0
- package/src/flow/edge.ts +60 -0
- package/src/flow/event.ts +229 -0
- package/src/flow/flow-server.ts +1089 -0
- package/src/flow/flow.ts +1050 -0
- package/src/flow/index.ts +28 -0
- package/src/flow/node.ts +249 -0
- package/src/flow/nodes/index.ts +8 -0
- package/src/flow/nodes/input-node.ts +296 -0
- package/src/flow/nodes/storage-node.ts +128 -0
- package/src/flow/nodes/transform-node.ts +154 -0
- package/src/flow/parallel-scheduler.ts +259 -0
- package/src/flow/plugins/credential-provider.ts +48 -0
- package/src/flow/plugins/image-ai-plugin.ts +66 -0
- package/src/flow/plugins/image-plugin.ts +60 -0
- package/src/flow/plugins/types/describe-image-node.ts +16 -0
- package/src/flow/plugins/types/index.ts +9 -0
- package/src/flow/plugins/types/optimize-node.ts +18 -0
- package/src/flow/plugins/types/remove-background-node.ts +18 -0
- package/src/flow/plugins/types/resize-node.ts +26 -0
- package/src/flow/plugins/zip-plugin.ts +69 -0
- package/src/flow/typed-flow.ts +279 -0
- package/src/flow/types/flow-file.ts +51 -0
- package/src/flow/types/flow-job.ts +138 -0
- package/src/flow/types/flow-types.ts +353 -0
- package/src/flow/types/index.ts +6 -0
- package/src/flow/types/run-args.ts +40 -0
- package/src/flow/types/type-validator.ts +204 -0
- package/src/flow/utils/resolve-upload-metadata.ts +48 -0
- package/src/index.ts +5 -0
- package/src/logger/logger.ts +14 -0
- package/src/streams/stream-limiter.test.ts +150 -0
- package/src/streams/stream-limiter.ts +75 -0
- package/src/types/data-store.ts +427 -0
- package/src/types/event-broadcaster.ts +39 -0
- package/src/types/event-emitter.ts +349 -0
- package/src/types/index.ts +9 -0
- package/src/types/input-file.ts +107 -0
- package/src/types/kv-store.ts +375 -0
- package/src/types/middleware.ts +54 -0
- package/src/types/upload-event.ts +75 -0
- package/src/types/upload-file.ts +139 -0
- package/src/types/websocket.ts +65 -0
- package/src/upload/convert-to-stream.ts +48 -0
- package/src/upload/create-upload.ts +214 -0
- package/src/upload/index.ts +3 -0
- package/src/upload/mime.ts +436 -0
- package/src/upload/upload-chunk.ts +364 -0
- package/src/upload/upload-server.ts +390 -0
- package/src/upload/upload-strategy-negotiator.ts +316 -0
- package/src/upload/upload-url.ts +173 -0
- package/src/upload/write-to-store.ts +211 -0
- package/src/utils/checksum.ts +61 -0
- package/src/utils/debounce.test.ts +126 -0
- package/src/utils/debounce.ts +89 -0
- package/src/utils/generate-id.ts +35 -0
- package/src/utils/md5.ts +475 -0
- package/src/utils/once.test.ts +83 -0
- package/src/utils/once.ts +63 -0
- package/src/utils/throttle.test.ts +101 -0
- package/src/utils/throttle.ts +29 -0
- package/tsconfig.json +20 -0
- package/tsconfig.tsbuildinfo +1 -0
- package/tsdown.config.ts +25 -0
- package/vitest.config.ts +15 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
require(`../uploadista-error-BB-Wdiz9.cjs`);const e=require(`../stream-limiter-JgOwmkMa.cjs`);exports.StreamLimiterEffect=e.StreamLimiterEffect,exports.streamLimiter=e.streamLimiter;
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import { UploadistaError } from "../uploadista-error-kKlhLRhY.cjs";
|
|
2
|
+
import { Stream } from "effect";
|
|
3
|
+
|
|
4
|
+
//#region src/streams/stream-limiter.d.ts
|
|
5
|
+
type StreamLimiterOptions = {
|
|
6
|
+
maxSize: number;
|
|
7
|
+
onData?: (chunkSize: number) => void;
|
|
8
|
+
};
|
|
9
|
+
declare function streamLimiter({
|
|
10
|
+
maxSize,
|
|
11
|
+
onData
|
|
12
|
+
}: StreamLimiterOptions): TransformStream;
|
|
13
|
+
/**
|
|
14
|
+
* Effect-based stream limiter that restricts stream size
|
|
15
|
+
*/
|
|
16
|
+
declare const StreamLimiterEffect: {
|
|
17
|
+
/**
|
|
18
|
+
* Creates an Effect-based stream limiter
|
|
19
|
+
* @param maxSize - Maximum allowed stream size in bytes
|
|
20
|
+
* @param onData - Optional callback for data progress tracking
|
|
21
|
+
* @returns Effect stream transformation that enforces size limits
|
|
22
|
+
*/
|
|
23
|
+
limit: ({
|
|
24
|
+
maxSize,
|
|
25
|
+
onData
|
|
26
|
+
}: StreamLimiterOptions) => <A>(stream: Stream.Stream<A, UploadistaError>) => Stream.Stream<A, UploadistaError, never>;
|
|
27
|
+
/**
|
|
28
|
+
* Creates a legacy TransformStream-based limiter
|
|
29
|
+
* @param options - Stream limiter options
|
|
30
|
+
* @returns TransformStream that enforces size limits
|
|
31
|
+
*/
|
|
32
|
+
createTransformStream: (options: StreamLimiterOptions) => TransformStream;
|
|
33
|
+
};
|
|
34
|
+
//#endregion
|
|
35
|
+
export { StreamLimiterEffect, streamLimiter };
|
|
36
|
+
//# sourceMappingURL=stream-limiter.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-limiter.d.cts","names":[],"sources":["../../src/streams/stream-limiter.ts"],"sourcesContent":[],"mappings":";;;;KAGK,oBAAA;;EAAA,MAAA,CAAA,EAAA,CAAA,SAAA,EAAA,MAAoB,EAAA,GAAA,IAAA;AAKzB,CAAA;AAA6B,iBAAb,aAAA,CAAa;EAAA,OAAA;EAAA;AAAA,CAAA,EAG1B,oBAH0B,CAAA,EAGH,eAHG;;;;AAGH,cAqBb,mBArBa,EAAA;EAAe;AAqBzC;;;;;OAS8B,EAAA,CAAA;IAAA,OAAA;IAAA;EAAA,CAAA,EADJ,oBACI,EAAA,GAAA,CAAA,CAAA,CAAA,CAAA,MAAA,EAAd,MAAA,CAAO,MAAO,CAAA,CAAA,EAAG,eAAH,CAAA,EAAA,GAAmB,MAAA,CAAA,MAAnB,CAAmB,CAAnB,EAAmB,eAAnB,EAAA,KAAA,CAAA;;;;;;uBA+BK,EAAA,CAAA,OAAA,EAAA,oBAAA,EAAA,GAAuB,eAAvB"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
type StreamLimiterOptions = {
|
|
4
|
+
maxSize: number;
|
|
5
|
+
onData?: (chunkSize: number) => void;
|
|
6
|
+
};
|
|
7
|
+
export declare function streamLimiter({ maxSize, onData, }: StreamLimiterOptions): TransformStream;
|
|
8
|
+
/**
|
|
9
|
+
* Effect-based stream limiter that restricts stream size
|
|
10
|
+
*/
|
|
11
|
+
export declare const StreamLimiterEffect: {
|
|
12
|
+
/**
|
|
13
|
+
* Creates an Effect-based stream limiter
|
|
14
|
+
* @param maxSize - Maximum allowed stream size in bytes
|
|
15
|
+
* @param onData - Optional callback for data progress tracking
|
|
16
|
+
* @returns Effect stream transformation that enforces size limits
|
|
17
|
+
*/
|
|
18
|
+
limit: ({ maxSize, onData }: StreamLimiterOptions) => <A>(stream: Stream.Stream<A, UploadistaError>) => Stream.Stream<A, UploadistaError, never>;
|
|
19
|
+
/**
|
|
20
|
+
* Creates a legacy TransformStream-based limiter
|
|
21
|
+
* @param options - Stream limiter options
|
|
22
|
+
* @returns TransformStream that enforces size limits
|
|
23
|
+
*/
|
|
24
|
+
createTransformStream: (options: StreamLimiterOptions) => TransformStream;
|
|
25
|
+
};
|
|
26
|
+
export {};
|
|
27
|
+
//# sourceMappingURL=stream-limiter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-limiter.d.ts","sourceRoot":"","sources":["../../src/streams/stream-limiter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAe,MAAM,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAE7D,KAAK,oBAAoB,GAAG;IAC1B,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,IAAI,CAAC;CACtC,CAAC;AAEF,wBAAgB,aAAa,CAAC,EAC5B,OAAO,EACP,MAAM,GACP,EAAE,oBAAoB,GAAG,eAAe,CAgBxC;AAED;;GAEG;AACH,eAAO,MAAM,mBAAmB;IAC9B;;;;;OAKG;iCAEqB,oBAAoB,MACzC,CAAC,EAAE,QAAQ,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,eAAe,CAAC;IA0B/C;;;;OAIG;qCAC8B,oBAAoB,KAAG,eAAe;CAExE,CAAC"}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { Effect, Ref, Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
export function streamLimiter({ maxSize, onData, }) {
|
|
4
|
+
let currentSize = 0;
|
|
5
|
+
return new TransformStream({
|
|
6
|
+
transform(chunk, controller) {
|
|
7
|
+
currentSize += chunk.length;
|
|
8
|
+
onData?.(chunk.byteLength);
|
|
9
|
+
if (currentSize > maxSize) {
|
|
10
|
+
controller.error(UploadistaError.fromCode("ERR_MAX_SIZE_EXCEEDED"));
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
controller.enqueue(chunk);
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Effect-based stream limiter that restricts stream size
|
|
20
|
+
*/
|
|
21
|
+
export const StreamLimiterEffect = {
|
|
22
|
+
/**
|
|
23
|
+
* Creates an Effect-based stream limiter
|
|
24
|
+
* @param maxSize - Maximum allowed stream size in bytes
|
|
25
|
+
* @param onData - Optional callback for data progress tracking
|
|
26
|
+
* @returns Effect stream transformation that enforces size limits
|
|
27
|
+
*/
|
|
28
|
+
limit: ({ maxSize, onData }) => (stream) => {
|
|
29
|
+
return Effect.gen(function* () {
|
|
30
|
+
const currentSize = yield* Ref.make(0);
|
|
31
|
+
return stream.pipe(Stream.mapEffect((chunk) => Effect.gen(function* () {
|
|
32
|
+
const chunkSize = chunk instanceof Uint8Array ? chunk.byteLength : 0;
|
|
33
|
+
yield* Ref.update(currentSize, (size) => size + chunkSize);
|
|
34
|
+
onData?.(chunkSize);
|
|
35
|
+
const size = yield* Ref.get(currentSize);
|
|
36
|
+
if (size > maxSize) {
|
|
37
|
+
yield* UploadistaError.fromCode("ERR_MAX_SIZE_EXCEEDED").toEffect();
|
|
38
|
+
}
|
|
39
|
+
return chunk;
|
|
40
|
+
})));
|
|
41
|
+
}).pipe(Stream.unwrap);
|
|
42
|
+
},
|
|
43
|
+
/**
|
|
44
|
+
* Creates a legacy TransformStream-based limiter
|
|
45
|
+
* @param options - Stream limiter options
|
|
46
|
+
* @returns TransformStream that enforces size limits
|
|
47
|
+
*/
|
|
48
|
+
createTransformStream: (options) => streamLimiter(options),
|
|
49
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
const e=require(`../chunk-CUT6urMc.cjs`),t=require(`../uploadista-error-BB-Wdiz9.cjs`);let n=require(`effect`);n=e.__toESM(n);function r(e){let t=new Uint8Array(e.reduce((e,t)=>e+t.length,0)),n=0;for(let r of e)t.set(r,n),n+=r.length;return t}async function i(e,{onData:t,onChunkError:n,onChunkStarted:i,onChunkCompleted:a,options:{chunkSize:o}}){let s=e.getReader(),c=1,l=[],u=0;try{for(;;){let{done:e,value:n}=await s.read();if(t?.(n?.byteLength??0),e){if(u>0){i(c);let e=r(l);a({partNumber:c,stream:e,size:e.length})}break}let d=n;for(;d.length>0;){let e=o-u;if(d.length<=e){l.push(d),u+=d.length;break}else{let t=d.slice(0,e);l.push(t),u+=t.length,i(c);let n=r(l);a({partNumber:c,stream:n,size:n.length}),c+=1,l=[],u=0,d=d.slice(e)}}}}catch(e){throw n(c,e),e}finally{s.releaseLock()}}const a={split:({chunkSize:e,onChunkStarted:i,onChunkCompleted:a,onChunkError:o,onData:s})=>c=>n.Effect.gen(function*(){let l=yield*n.Ref.make({partNumber:1,buffer:[],bufferSize:0});return c.pipe(n.Stream.mapEffect(t=>n.Effect.gen(function*(){s&&(yield*n.Effect.sync(()=>s(t.byteLength)));let o=t,c=[];for(;o.length>0;){let t=yield*n.Ref.get(l),s=e-t.bufferSize;if(o.length<=s){yield*n.Ref.update(l,e=>({...e,buffer:[...e.buffer,o],bufferSize:e.bufferSize+o.length}));break}else{let e=o.slice(0,s),u=[...t.buffer,e];yield*n.Effect.sync(()=>i(t.partNumber));let d=r(u),f={partNumber:t.partNumber,stream:d,size:d.length};yield*n.Effect.sync(()=>a(f)),c.push(f),yield*n.Ref.set(l,{partNumber:t.partNumber+1,buffer:[],bufferSize:0}),o=o.slice(s)}}return c})),n.Stream.flatMap(e=>n.Stream.fromIterable(e)),n.Stream.concat(n.Stream.fromEffect((()=>n.Effect.gen(function*(){let e=yield*n.Ref.get(l);if(e.bufferSize===0)return[];yield*n.Effect.sync(()=>i(e.partNumber));let t=r(e.buffer),o={partNumber:e.partNumber,stream:t,size:t.length};return yield*n.Effect.sync(()=>a(o)),[o]}))()).pipe(n.Stream.flatMap(e=>n.Stream.fromIterable(e)))),n.Stream.tapError(e=>n.Effect.sync(()=>o(1,e))),n.Stream.mapError(e=>new t.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:`Stream splitting failed`,details:`Stream splitting failed: ${String(e)}`})))}).pipe(n.Stream.unwrap),fromReadableStream:(e,r)=>n.Effect.tryPromise({try:()=>i(e,r),catch:e=>new t.UploadistaError({code:`UNKNOWN_ERROR`,status:500,body:`Stream splitter failed`,details:`Stream splitter failed: ${String(e)}`})})};exports.StreamSplitterEffect=a,exports.streamSplitter=i;
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import { UploadistaError } from "../uploadista-error-kKlhLRhY.cjs";
|
|
2
|
+
import { Effect, Stream } from "effect";
|
|
3
|
+
|
|
4
|
+
//#region src/streams/stream-splitter.d.ts
|
|
5
|
+
type Options = {
|
|
6
|
+
chunkSize: number;
|
|
7
|
+
};
|
|
8
|
+
type ChunkInfo = {
|
|
9
|
+
partNumber: number;
|
|
10
|
+
stream: Uint8Array;
|
|
11
|
+
size: number;
|
|
12
|
+
};
|
|
13
|
+
type EventHandlers = {
|
|
14
|
+
onData?: (chunkSize: number) => void;
|
|
15
|
+
onChunkStarted: (partNumber: number) => void;
|
|
16
|
+
onChunkCompleted: (chunkInfo: ChunkInfo) => void;
|
|
17
|
+
onChunkError: (partNumber: number, error: unknown) => void;
|
|
18
|
+
};
|
|
19
|
+
declare function streamSplitter(readStream: ReadableStream<Uint8Array>, {
|
|
20
|
+
onData,
|
|
21
|
+
onChunkError,
|
|
22
|
+
onChunkStarted,
|
|
23
|
+
onChunkCompleted,
|
|
24
|
+
options: {
|
|
25
|
+
chunkSize
|
|
26
|
+
}
|
|
27
|
+
}: EventHandlers & {
|
|
28
|
+
options: Options;
|
|
29
|
+
}): Promise<void>;
|
|
30
|
+
/**
|
|
31
|
+
* Effect-based stream splitter that splits a stream into chunks of specified size
|
|
32
|
+
*/
|
|
33
|
+
declare const StreamSplitterEffect: {
|
|
34
|
+
/**
|
|
35
|
+
* Splits an Effect stream into chunks of specified size using idiomatic Effect-TS patterns
|
|
36
|
+
* @param chunkSize - Maximum size for each chunk in bytes
|
|
37
|
+
* @param onChunkStarted - Callback when a new chunk starts processing
|
|
38
|
+
* @param onChunkCompleted - Callback when a chunk is completed
|
|
39
|
+
* @param onChunkError - Callback when a chunk encounters an error
|
|
40
|
+
* @param onData - Optional callback for data progress
|
|
41
|
+
* @returns Effect that splits the input stream
|
|
42
|
+
*/
|
|
43
|
+
split: ({
|
|
44
|
+
chunkSize,
|
|
45
|
+
onChunkStarted,
|
|
46
|
+
onChunkCompleted,
|
|
47
|
+
onChunkError,
|
|
48
|
+
onData
|
|
49
|
+
}: {
|
|
50
|
+
chunkSize: number;
|
|
51
|
+
onChunkStarted: (partNumber: number) => void;
|
|
52
|
+
onChunkCompleted: (chunkInfo: ChunkInfo) => void;
|
|
53
|
+
onChunkError: (partNumber: number, error: unknown) => void;
|
|
54
|
+
onData?: (chunkSize: number) => void;
|
|
55
|
+
}) => <E>(stream: Stream.Stream<Uint8Array, E>) => Stream.Stream<ChunkInfo, E | UploadistaError>;
|
|
56
|
+
/**
|
|
57
|
+
* Creates a legacy stream splitter effect from ReadableStream
|
|
58
|
+
* @param readStream - Input ReadableStream
|
|
59
|
+
* @param handlers - Event handlers for chunk processing
|
|
60
|
+
* @returns Effect that processes the stream
|
|
61
|
+
*/
|
|
62
|
+
fromReadableStream: (readStream: ReadableStream<Uint8Array>, handlers: EventHandlers & {
|
|
63
|
+
options: Options;
|
|
64
|
+
}) => Effect.Effect<void, UploadistaError>;
|
|
65
|
+
};
|
|
66
|
+
//#endregion
|
|
67
|
+
export { ChunkInfo, StreamSplitterEffect, streamSplitter };
|
|
68
|
+
//# sourceMappingURL=stream-splitter.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-splitter.d.cts","names":[],"sources":["../../src/streams/stream-splitter.ts"],"sourcesContent":[],"mappings":";;;;KAGK,OAAA;;AAFwD,CAAA;AAMjD,KAAA,SAAA,GAAS;EAMhB,UAAA,EAAA,MAAa;EAiBI,MAAA,EArBZ,UAqB0B;EAAA,IAAA,EAAA,MAAA;;KAjB/B,aAAA,GAkBS;QAEV,CAAA,EAAA,CAAA,SAAA,EAAA,MAAA,EAAA,GAAA,IAAA;gBACA,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GAAA,IAAA;kBACA,EAAA,CAAA,SAAA,EAnB4B,SAmB5B,EAAA,GAAA,IAAA;cACA,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,KAAA,EAAA,OAAA,EAAA,GAAA,IAAA;;AAEC,iBARiB,cAAA,CAQjB,UAAA,EAPS,cAOT,CAPwB,UAOxB,CAAA,EAAA;EAAA,MAAA;EAAA,YAAA;EAAA,cAAA;EAAA,gBAAA;EAAA,OAAA,EAAA;IAAA;EAAA;CAAA,EAAA,aAAA,GAAA;SAA2B,EAAA,OAAA;IAC7B,OAAA,CAAA,IAAA,CAAA;;AAyEH;;cAAa;;;;;;;;;;OA0BmB,EAAA,CAAA;IAAA,SAAA;IAAA,cAAA;IAAA,gBAAA;IAAA,YAAA;IAAA;GAAA,EAAA;IAAI,SAAA,EAAA,MAAA;IAA7B,cAAO,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GAAA,IAAA;IAmHiB,gBAAA,EAAA,CAAA,SAAA,EAzHK,SAyHL,EAAA,GAAA,IAAA;IAAf,YAAA,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,KAAA,EAAA,OAAA,EAAA,GAAA,IAAA;IACF,MAAA,CAAA,EAAA,CAAA,SAAA,EAAA,MAAA,EAAA,GAAA,IAAA;KAA2B,GAAA,CAAA,CAAA,CAAA,CAAA,MAAA,EArH3B,MAAA,CAAO,MAqHoB,CArHb,UAqHa,EArHD,CAqHC,CAAA,EAAA,GApHlC,MAAA,CAAO,MAoH2B,CApHpB,SAoHoB,EApHT,CAoHS,GApHL,eAoHK,CAAA;;;;;;;mCADzB,eAAe,uBACjB;aAA2B;QACpC,MAAA,CAAO,aAAa"}
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { Effect, Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
type Options = {
|
|
4
|
+
chunkSize: number;
|
|
5
|
+
};
|
|
6
|
+
export type ChunkInfo = {
|
|
7
|
+
partNumber: number;
|
|
8
|
+
stream: Uint8Array;
|
|
9
|
+
size: number;
|
|
10
|
+
};
|
|
11
|
+
type EventHandlers = {
|
|
12
|
+
onData?: (chunkSize: number) => void;
|
|
13
|
+
onChunkStarted: (partNumber: number) => void;
|
|
14
|
+
onChunkCompleted: (chunkInfo: ChunkInfo) => void;
|
|
15
|
+
onChunkError: (partNumber: number, error: unknown) => void;
|
|
16
|
+
};
|
|
17
|
+
export declare function streamSplitter(readStream: ReadableStream<Uint8Array>, { onData, onChunkError, onChunkStarted, onChunkCompleted, options: { chunkSize }, }: EventHandlers & {
|
|
18
|
+
options: Options;
|
|
19
|
+
}): Promise<void>;
|
|
20
|
+
/**
|
|
21
|
+
* Effect-based stream splitter that splits a stream into chunks of specified size
|
|
22
|
+
*/
|
|
23
|
+
export declare const StreamSplitterEffect: {
|
|
24
|
+
/**
|
|
25
|
+
* Splits an Effect stream into chunks of specified size using idiomatic Effect-TS patterns
|
|
26
|
+
* @param chunkSize - Maximum size for each chunk in bytes
|
|
27
|
+
* @param onChunkStarted - Callback when a new chunk starts processing
|
|
28
|
+
* @param onChunkCompleted - Callback when a chunk is completed
|
|
29
|
+
* @param onChunkError - Callback when a chunk encounters an error
|
|
30
|
+
* @param onData - Optional callback for data progress
|
|
31
|
+
* @returns Effect that splits the input stream
|
|
32
|
+
*/
|
|
33
|
+
split: ({ chunkSize, onChunkStarted, onChunkCompleted, onChunkError, onData, }: {
|
|
34
|
+
chunkSize: number;
|
|
35
|
+
onChunkStarted: (partNumber: number) => void;
|
|
36
|
+
onChunkCompleted: (chunkInfo: ChunkInfo) => void;
|
|
37
|
+
onChunkError: (partNumber: number, error: unknown) => void;
|
|
38
|
+
onData?: (chunkSize: number) => void;
|
|
39
|
+
}) => <E>(stream: Stream.Stream<Uint8Array, E>) => Stream.Stream<ChunkInfo, E | UploadistaError>;
|
|
40
|
+
/**
|
|
41
|
+
* Creates a legacy stream splitter effect from ReadableStream
|
|
42
|
+
* @param readStream - Input ReadableStream
|
|
43
|
+
* @param handlers - Event handlers for chunk processing
|
|
44
|
+
* @returns Effect that processes the stream
|
|
45
|
+
*/
|
|
46
|
+
fromReadableStream: (readStream: ReadableStream<Uint8Array>, handlers: EventHandlers & {
|
|
47
|
+
options: Options;
|
|
48
|
+
}) => Effect.Effect<void, UploadistaError>;
|
|
49
|
+
};
|
|
50
|
+
export {};
|
|
51
|
+
//# sourceMappingURL=stream-splitter.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-splitter.d.ts","sourceRoot":"","sources":["../../src/streams/stream-splitter.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAO,MAAM,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAE7D,KAAK,OAAO,GAAG;IACb,SAAS,EAAE,MAAM,CAAC;CACnB,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACtB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,UAAU,CAAC;IACnB,IAAI,EAAE,MAAM,CAAC;CACd,CAAC;AAEF,KAAK,aAAa,GAAG;IACnB,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,IAAI,CAAC;IACrC,cAAc,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;IAC7C,gBAAgB,EAAE,CAAC,SAAS,EAAE,SAAS,KAAK,IAAI,CAAC;IACjD,YAAY,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;CAC5D,CAAC;AAYF,wBAAsB,cAAc,CAClC,UAAU,EAAE,cAAc,CAAC,UAAU,CAAC,EACtC,EACE,MAAM,EACN,YAAY,EACZ,cAAc,EACd,gBAAgB,EAChB,OAAO,EAAE,EAAE,SAAS,EAAE,GACvB,EAAE,aAAa,GAAG;IAAE,OAAO,EAAE,OAAO,CAAA;CAAE,GACtC,OAAO,CAAC,IAAI,CAAC,CAoEf;AAED;;GAEG;AACH,eAAO,MAAM,oBAAoB;IAC/B;;;;;;;;OAQG;oFAQE;QACD,SAAS,EAAE,MAAM,CAAC;QAClB,cAAc,EAAE,CAAC,UAAU,EAAE,MAAM,KAAK,IAAI,CAAC;QAC7C,gBAAgB,EAAE,CAAC,SAAS,EAAE,SAAS,KAAK,IAAI,CAAC;QACjD,YAAY,EAAE,CAAC,UAAU,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;QAC3D,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,IAAI,CAAC;KACtC,MACA,CAAC,EACA,QAAQ,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,KACnC,MAAM,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC,GAAG,eAAe,CAAC;IA4GlD;;;;;OAKG;qCAEW,cAAc,CAAC,UAAU,CAAC,YAC5B,aAAa,GAAG;QAAE,OAAO,EAAE,OAAO,CAAA;KAAE,KAC7C,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC;CAWxC,CAAC"}
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import { Effect, Ref, Stream } from "effect";
|
|
2
|
+
import { UploadistaError } from "../errors/uploadista-error";
|
|
3
|
+
function concatArrayBuffers(chunks) {
|
|
4
|
+
const result = new Uint8Array(chunks.reduce((a, c) => a + c.length, 0));
|
|
5
|
+
let offset = 0;
|
|
6
|
+
for (const chunk of chunks) {
|
|
7
|
+
result.set(chunk, offset);
|
|
8
|
+
offset += chunk.length;
|
|
9
|
+
}
|
|
10
|
+
return result;
|
|
11
|
+
}
|
|
12
|
+
export async function streamSplitter(readStream, { onData, onChunkError, onChunkStarted, onChunkCompleted, options: { chunkSize }, }) {
|
|
13
|
+
const reader = readStream.getReader();
|
|
14
|
+
let part = 1;
|
|
15
|
+
let currentPartChunks = [];
|
|
16
|
+
let currentPartSize = 0;
|
|
17
|
+
try {
|
|
18
|
+
while (true) {
|
|
19
|
+
const { done, value } = await reader.read();
|
|
20
|
+
onData?.(value?.byteLength ?? 0);
|
|
21
|
+
if (done) {
|
|
22
|
+
// Process any remaining data
|
|
23
|
+
if (currentPartSize > 0) {
|
|
24
|
+
onChunkStarted(part);
|
|
25
|
+
const stream = concatArrayBuffers(currentPartChunks);
|
|
26
|
+
onChunkCompleted({
|
|
27
|
+
partNumber: part,
|
|
28
|
+
stream,
|
|
29
|
+
size: stream.length,
|
|
30
|
+
});
|
|
31
|
+
}
|
|
32
|
+
break;
|
|
33
|
+
}
|
|
34
|
+
let remaining = value;
|
|
35
|
+
while (remaining.length > 0) {
|
|
36
|
+
const spaceInCurrentPart = chunkSize - currentPartSize;
|
|
37
|
+
if (remaining.length <= spaceInCurrentPart) {
|
|
38
|
+
// All remaining data fits in current part
|
|
39
|
+
currentPartChunks.push(remaining);
|
|
40
|
+
currentPartSize += remaining.length;
|
|
41
|
+
break;
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
// Need to split the data
|
|
45
|
+
const partToTake = remaining.slice(0, spaceInCurrentPart);
|
|
46
|
+
currentPartChunks.push(partToTake);
|
|
47
|
+
currentPartSize += partToTake.length;
|
|
48
|
+
// Complete current part
|
|
49
|
+
onChunkStarted(part);
|
|
50
|
+
const stream = concatArrayBuffers(currentPartChunks);
|
|
51
|
+
onChunkCompleted({
|
|
52
|
+
partNumber: part,
|
|
53
|
+
stream,
|
|
54
|
+
size: stream.length,
|
|
55
|
+
});
|
|
56
|
+
// Start new part
|
|
57
|
+
part += 1;
|
|
58
|
+
currentPartChunks = [];
|
|
59
|
+
currentPartSize = 0;
|
|
60
|
+
remaining = remaining.slice(spaceInCurrentPart);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch (error) {
|
|
66
|
+
onChunkError(part, error);
|
|
67
|
+
throw error;
|
|
68
|
+
}
|
|
69
|
+
finally {
|
|
70
|
+
reader.releaseLock();
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Effect-based stream splitter that splits a stream into chunks of specified size
|
|
75
|
+
*/
|
|
76
|
+
export const StreamSplitterEffect = {
|
|
77
|
+
/**
|
|
78
|
+
* Splits an Effect stream into chunks of specified size using idiomatic Effect-TS patterns
|
|
79
|
+
* @param chunkSize - Maximum size for each chunk in bytes
|
|
80
|
+
* @param onChunkStarted - Callback when a new chunk starts processing
|
|
81
|
+
* @param onChunkCompleted - Callback when a chunk is completed
|
|
82
|
+
* @param onChunkError - Callback when a chunk encounters an error
|
|
83
|
+
* @param onData - Optional callback for data progress
|
|
84
|
+
* @returns Effect that splits the input stream
|
|
85
|
+
*/
|
|
86
|
+
split: ({ chunkSize, onChunkStarted, onChunkCompleted, onChunkError, onData, }) => (stream) => {
|
|
87
|
+
return Effect.gen(function* () {
|
|
88
|
+
const stateRef = yield* Ref.make({
|
|
89
|
+
partNumber: 1,
|
|
90
|
+
buffer: [],
|
|
91
|
+
bufferSize: 0,
|
|
92
|
+
});
|
|
93
|
+
const processChunkAndEmitCompleted = (chunk) => Effect.gen(function* () {
|
|
94
|
+
// Report data progress
|
|
95
|
+
if (onData) {
|
|
96
|
+
yield* Effect.sync(() => onData(chunk.byteLength));
|
|
97
|
+
}
|
|
98
|
+
let remaining = chunk;
|
|
99
|
+
const results = [];
|
|
100
|
+
while (remaining.length > 0) {
|
|
101
|
+
const state = yield* Ref.get(stateRef);
|
|
102
|
+
const spaceAvailable = chunkSize - state.bufferSize;
|
|
103
|
+
if (remaining.length <= spaceAvailable) {
|
|
104
|
+
// All remaining data fits in current part
|
|
105
|
+
yield* Ref.update(stateRef, (s) => ({
|
|
106
|
+
...s,
|
|
107
|
+
buffer: [...s.buffer, remaining],
|
|
108
|
+
bufferSize: s.bufferSize + remaining.length,
|
|
109
|
+
}));
|
|
110
|
+
break;
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
// Need to split the data - complete current part
|
|
114
|
+
const partToTake = remaining.slice(0, spaceAvailable);
|
|
115
|
+
const completeBuffer = [...state.buffer, partToTake];
|
|
116
|
+
// Execute side effects in Effect context
|
|
117
|
+
yield* Effect.sync(() => onChunkStarted(state.partNumber));
|
|
118
|
+
const concatenatedStream = concatArrayBuffers(completeBuffer);
|
|
119
|
+
const chunkInfo = {
|
|
120
|
+
partNumber: state.partNumber,
|
|
121
|
+
stream: concatenatedStream,
|
|
122
|
+
size: concatenatedStream.length,
|
|
123
|
+
};
|
|
124
|
+
yield* Effect.sync(() => onChunkCompleted(chunkInfo));
|
|
125
|
+
results.push(chunkInfo);
|
|
126
|
+
// Start new part
|
|
127
|
+
yield* Ref.set(stateRef, {
|
|
128
|
+
partNumber: state.partNumber + 1,
|
|
129
|
+
buffer: [],
|
|
130
|
+
bufferSize: 0,
|
|
131
|
+
});
|
|
132
|
+
remaining = remaining.slice(spaceAvailable);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
return results;
|
|
136
|
+
});
|
|
137
|
+
const emitFinalChunk = () => Effect.gen(function* () {
|
|
138
|
+
const finalState = yield* Ref.get(stateRef);
|
|
139
|
+
if (finalState.bufferSize === 0) {
|
|
140
|
+
return [];
|
|
141
|
+
}
|
|
142
|
+
yield* Effect.sync(() => onChunkStarted(finalState.partNumber));
|
|
143
|
+
const concatenatedStream = concatArrayBuffers(finalState.buffer);
|
|
144
|
+
const chunkInfo = {
|
|
145
|
+
partNumber: finalState.partNumber,
|
|
146
|
+
stream: concatenatedStream,
|
|
147
|
+
size: concatenatedStream.length,
|
|
148
|
+
};
|
|
149
|
+
yield* Effect.sync(() => onChunkCompleted(chunkInfo));
|
|
150
|
+
return [chunkInfo];
|
|
151
|
+
});
|
|
152
|
+
return stream.pipe(Stream.mapEffect(processChunkAndEmitCompleted), Stream.flatMap((chunkInfos) => Stream.fromIterable(chunkInfos)), Stream.concat(Stream.fromEffect(emitFinalChunk()).pipe(Stream.flatMap((chunkInfos) => Stream.fromIterable(chunkInfos)))), Stream.tapError((error) => Effect.sync(() => onChunkError(1, error))), Stream.mapError((error) => new UploadistaError({
|
|
153
|
+
code: "UNKNOWN_ERROR",
|
|
154
|
+
status: 500,
|
|
155
|
+
body: "Stream splitting failed",
|
|
156
|
+
details: `Stream splitting failed: ${String(error)}`,
|
|
157
|
+
})));
|
|
158
|
+
}).pipe(Stream.unwrap);
|
|
159
|
+
},
|
|
160
|
+
/**
|
|
161
|
+
* Creates a legacy stream splitter effect from ReadableStream
|
|
162
|
+
* @param readStream - Input ReadableStream
|
|
163
|
+
* @param handlers - Event handlers for chunk processing
|
|
164
|
+
* @returns Effect that processes the stream
|
|
165
|
+
*/
|
|
166
|
+
fromReadableStream: (readStream, handlers) => Effect.tryPromise({
|
|
167
|
+
try: () => streamSplitter(readStream, handlers),
|
|
168
|
+
catch: (error) => new UploadistaError({
|
|
169
|
+
code: "UNKNOWN_ERROR",
|
|
170
|
+
status: 500,
|
|
171
|
+
body: "Stream splitter failed",
|
|
172
|
+
details: `Stream splitter failed: ${String(error)}`,
|
|
173
|
+
}),
|
|
174
|
+
}),
|
|
175
|
+
};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-splitter.js","names":["currentPartChunks: Uint8Array[]","results: ChunkInfo[]","chunkInfo: ChunkInfo"],"sources":["../../src/streams/stream-splitter.ts"],"sourcesContent":["import { Effect, Ref, Stream } from \"effect\";\nimport { UploadistaError } from \"../errors/uploadista-error\";\n\ntype Options = {\n chunkSize: number;\n};\n\nexport type ChunkInfo = {\n partNumber: number;\n stream: Uint8Array;\n size: number;\n};\n\ntype EventHandlers = {\n onData?: (chunkSize: number) => void;\n onChunkStarted: (partNumber: number) => void;\n onChunkCompleted: (chunkInfo: ChunkInfo) => void;\n onChunkError: (partNumber: number, error: unknown) => void;\n};\n\nfunction concatArrayBuffers(chunks: Uint8Array[]): Uint8Array {\n const result = new Uint8Array(chunks.reduce((a, c) => a + c.length, 0));\n let offset = 0;\n for (const chunk of chunks) {\n result.set(chunk, offset);\n offset += chunk.length;\n }\n return result;\n}\n\nexport async function streamSplitter(\n readStream: ReadableStream<Uint8Array>,\n {\n onData,\n onChunkError,\n onChunkStarted,\n onChunkCompleted,\n options: { chunkSize },\n }: EventHandlers & { options: Options },\n): Promise<void> {\n const reader = readStream.getReader();\n\n let part = 1;\n let currentPartChunks: Uint8Array[] = [];\n let currentPartSize = 0;\n\n try {\n while (true) {\n const { done, value } = await reader.read();\n\n onData?.(value?.byteLength ?? 0);\n\n if (done) {\n // Process any remaining data\n if (currentPartSize > 0) {\n onChunkStarted(part);\n\n const stream = concatArrayBuffers(currentPartChunks);\n\n onChunkCompleted({\n partNumber: part,\n stream,\n size: stream.length,\n });\n }\n break;\n }\n\n let remaining = value;\n\n while (remaining.length > 0) {\n const spaceInCurrentPart = chunkSize - currentPartSize;\n\n if (remaining.length <= spaceInCurrentPart) {\n // All remaining data fits in current part\n currentPartChunks.push(remaining);\n currentPartSize += remaining.length;\n break;\n } else {\n // Need to split the data\n const partToTake = remaining.slice(0, spaceInCurrentPart);\n currentPartChunks.push(partToTake);\n currentPartSize += partToTake.length;\n\n // Complete current part\n onChunkStarted(part);\n const stream = concatArrayBuffers(currentPartChunks);\n onChunkCompleted({\n partNumber: part,\n stream,\n size: stream.length,\n });\n\n // Start new part\n part += 1;\n currentPartChunks = [];\n currentPartSize = 0;\n remaining = remaining.slice(spaceInCurrentPart);\n }\n }\n }\n } catch (error) {\n onChunkError(part, error);\n throw error;\n } finally {\n reader.releaseLock();\n }\n}\n\n/**\n * Effect-based stream splitter that splits a stream into chunks of specified size\n */\nexport const StreamSplitterEffect = {\n /**\n * Splits an Effect stream into chunks of specified size using idiomatic Effect-TS patterns\n * @param chunkSize - Maximum size for each chunk in bytes\n * @param onChunkStarted - Callback when a new chunk starts processing\n * @param onChunkCompleted - Callback when a chunk is completed\n * @param onChunkError - Callback when a chunk encounters an error\n * @param onData - Optional callback for data progress\n * @returns Effect that splits the input stream\n */\n split:\n ({\n chunkSize,\n onChunkStarted,\n onChunkCompleted,\n onChunkError,\n onData,\n }: {\n chunkSize: number;\n onChunkStarted: (partNumber: number) => void;\n onChunkCompleted: (chunkInfo: ChunkInfo) => void;\n onChunkError: (partNumber: number, error: unknown) => void;\n onData?: (chunkSize: number) => void;\n }) =>\n <E>(\n stream: Stream.Stream<Uint8Array, E>,\n ): Stream.Stream<ChunkInfo, E | UploadistaError> => {\n return Effect.gen(function* () {\n const stateRef = yield* Ref.make({\n partNumber: 1,\n buffer: [] as Uint8Array[],\n bufferSize: 0,\n });\n\n const processChunkAndEmitCompleted = (\n chunk: Uint8Array,\n ): Effect.Effect<ChunkInfo[], UploadistaError> =>\n Effect.gen(function* () {\n // Report data progress\n if (onData) {\n yield* Effect.sync(() => onData(chunk.byteLength));\n }\n\n let remaining = chunk;\n const results: ChunkInfo[] = [];\n\n while (remaining.length > 0) {\n const state = yield* Ref.get(stateRef);\n const spaceAvailable = chunkSize - state.bufferSize;\n\n if (remaining.length <= spaceAvailable) {\n // All remaining data fits in current part\n yield* Ref.update(stateRef, (s) => ({\n ...s,\n buffer: [...s.buffer, remaining],\n bufferSize: s.bufferSize + remaining.length,\n }));\n break;\n } else {\n // Need to split the data - complete current part\n const partToTake = remaining.slice(0, spaceAvailable);\n const completeBuffer = [...state.buffer, partToTake];\n\n // Execute side effects in Effect context\n yield* Effect.sync(() => onChunkStarted(state.partNumber));\n\n const concatenatedStream = concatArrayBuffers(completeBuffer);\n const chunkInfo: ChunkInfo = {\n partNumber: state.partNumber,\n stream: concatenatedStream,\n size: concatenatedStream.length,\n };\n\n yield* Effect.sync(() => onChunkCompleted(chunkInfo));\n results.push(chunkInfo);\n\n // Start new part\n yield* Ref.set(stateRef, {\n partNumber: state.partNumber + 1,\n buffer: [],\n bufferSize: 0,\n });\n remaining = remaining.slice(spaceAvailable);\n }\n }\n\n return results;\n });\n\n const emitFinalChunk = (): Effect.Effect<\n ChunkInfo[],\n UploadistaError\n > =>\n Effect.gen(function* () {\n const finalState = yield* Ref.get(stateRef);\n if (finalState.bufferSize === 0) {\n return [];\n }\n\n yield* Effect.sync(() => onChunkStarted(finalState.partNumber));\n\n const concatenatedStream = concatArrayBuffers(finalState.buffer);\n const chunkInfo: ChunkInfo = {\n partNumber: finalState.partNumber,\n stream: concatenatedStream,\n size: concatenatedStream.length,\n };\n\n yield* Effect.sync(() => onChunkCompleted(chunkInfo));\n return [chunkInfo];\n });\n\n return stream.pipe(\n Stream.mapEffect(processChunkAndEmitCompleted),\n Stream.flatMap((chunkInfos) => Stream.fromIterable(chunkInfos)),\n Stream.concat(\n Stream.fromEffect(emitFinalChunk()).pipe(\n Stream.flatMap((chunkInfos) => Stream.fromIterable(chunkInfos)),\n ),\n ),\n Stream.tapError((error) => Effect.sync(() => onChunkError(1, error))),\n Stream.mapError(\n (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: \"Stream splitting failed\",\n details: `Stream splitting failed: ${String(error)}`,\n }),\n ),\n );\n }).pipe(Stream.unwrap);\n },\n\n /**\n * Creates a legacy stream splitter effect from ReadableStream\n * @param readStream - Input ReadableStream\n * @param handlers - Event handlers for chunk processing\n * @returns Effect that processes the stream\n */\n fromReadableStream: (\n readStream: ReadableStream<Uint8Array>,\n handlers: EventHandlers & { options: Options },\n ): Effect.Effect<void, UploadistaError> =>\n Effect.tryPromise({\n try: () => streamSplitter(readStream, handlers),\n catch: (error) =>\n new UploadistaError({\n code: \"UNKNOWN_ERROR\",\n status: 500,\n body: \"Stream splitter failed\",\n details: `Stream splitter failed: ${String(error)}`,\n }),\n }),\n};\n"],"mappings":"uHAoBA,SAAS,EAAmB,EAAkC,CAC5D,IAAM,EAAS,IAAI,WAAW,EAAO,QAAQ,EAAG,IAAM,EAAI,EAAE,OAAQ,EAAE,CAAC,CACnE,EAAS,EACb,IAAK,IAAM,KAAS,EAClB,EAAO,IAAI,EAAO,EAAO,CACzB,GAAU,EAAM,OAElB,OAAO,EAGT,eAAsB,EACpB,EACA,CACE,SACA,eACA,iBACA,mBACA,QAAS,CAAE,cAEE,CACf,IAAM,EAAS,EAAW,WAAW,CAEjC,EAAO,EACPA,EAAkC,EAAE,CACpC,EAAkB,EAEtB,GAAI,CACF,OAAa,CACX,GAAM,CAAE,OAAM,SAAU,MAAM,EAAO,MAAM,CAI3C,GAFA,IAAS,GAAO,YAAc,EAAE,CAE5B,EAAM,CAER,GAAI,EAAkB,EAAG,CACvB,EAAe,EAAK,CAEpB,IAAM,EAAS,EAAmB,EAAkB,CAEpD,EAAiB,CACf,WAAY,EACZ,SACA,KAAM,EAAO,OACd,CAAC,CAEJ,MAGF,IAAI,EAAY,EAEhB,KAAO,EAAU,OAAS,GAAG,CAC3B,IAAM,EAAqB,EAAY,EAEvC,GAAI,EAAU,QAAU,EAAoB,CAE1C,EAAkB,KAAK,EAAU,CACjC,GAAmB,EAAU,OAC7B,UACK,CAEL,IAAM,EAAa,EAAU,MAAM,EAAG,EAAmB,CACzD,EAAkB,KAAK,EAAW,CAClC,GAAmB,EAAW,OAG9B,EAAe,EAAK,CACpB,IAAM,EAAS,EAAmB,EAAkB,CACpD,EAAiB,CACf,WAAY,EACZ,SACA,KAAM,EAAO,OACd,CAAC,CAGF,GAAQ,EACR,EAAoB,EAAE,CACtB,EAAkB,EAClB,EAAY,EAAU,MAAM,EAAmB,UAI9C,EAAO,CAEd,MADA,EAAa,EAAM,EAAM,CACnB,SACE,CACR,EAAO,aAAa,EAOxB,MAAa,EAAuB,CAUlC,OACG,CACC,YACA,iBACA,mBACA,eACA,YASA,GAEO,EAAO,IAAI,WAAa,CAC7B,IAAM,EAAW,MAAO,EAAI,KAAK,CAC/B,WAAY,EACZ,OAAQ,EAAE,CACV,WAAY,EACb,CAAC,CAgFF,OAAO,EAAO,KACZ,EAAO,UA9EP,GAEA,EAAO,IAAI,WAAa,CAElB,IACF,MAAO,EAAO,SAAW,EAAO,EAAM,WAAW,CAAC,EAGpD,IAAI,EAAY,EACVC,EAAuB,EAAE,CAE/B,KAAO,EAAU,OAAS,GAAG,CAC3B,IAAM,EAAQ,MAAO,EAAI,IAAI,EAAS,CAChC,EAAiB,EAAY,EAAM,WAEzC,GAAI,EAAU,QAAU,EAAgB,CAEtC,MAAO,EAAI,OAAO,EAAW,IAAO,CAClC,GAAG,EACH,OAAQ,CAAC,GAAG,EAAE,OAAQ,EAAU,CAChC,WAAY,EAAE,WAAa,EAAU,OACtC,EAAE,CACH,UACK,CAEL,IAAM,EAAa,EAAU,MAAM,EAAG,EAAe,CAC/C,EAAiB,CAAC,GAAG,EAAM,OAAQ,EAAW,CAGpD,MAAO,EAAO,SAAW,EAAe,EAAM,WAAW,CAAC,CAE1D,IAAM,EAAqB,EAAmB,EAAe,CACvDC,EAAuB,CAC3B,WAAY,EAAM,WAClB,OAAQ,EACR,KAAM,EAAmB,OAC1B,CAED,MAAO,EAAO,SAAW,EAAiB,EAAU,CAAC,CACrD,EAAQ,KAAK,EAAU,CAGvB,MAAO,EAAI,IAAI,EAAU,CACvB,WAAY,EAAM,WAAa,EAC/B,OAAQ,EAAE,CACV,WAAY,EACb,CAAC,CACF,EAAY,EAAU,MAAM,EAAe,EAI/C,OAAO,GACP,CA0B4C,CAC9C,EAAO,QAAS,GAAe,EAAO,aAAa,EAAW,CAAC,CAC/D,EAAO,OACL,EAAO,gBAvBT,EAAO,IAAI,WAAa,CACtB,IAAM,EAAa,MAAO,EAAI,IAAI,EAAS,CAC3C,GAAI,EAAW,aAAe,EAC5B,MAAO,EAAE,CAGX,MAAO,EAAO,SAAW,EAAe,EAAW,WAAW,CAAC,CAE/D,IAAM,EAAqB,EAAmB,EAAW,OAAO,CAC1DA,EAAuB,CAC3B,WAAY,EAAW,WACvB,OAAQ,EACR,KAAM,EAAmB,OAC1B,CAGD,OADA,MAAO,EAAO,SAAW,EAAiB,EAAU,CAAC,CAC9C,CAAC,EAAU,EAClB,GAMkC,CAAC,CAAC,KAClC,EAAO,QAAS,GAAe,EAAO,aAAa,EAAW,CAAC,CAChE,CACF,CACD,EAAO,SAAU,GAAU,EAAO,SAAW,EAAa,EAAG,EAAM,CAAC,CAAC,CACrE,EAAO,SACJ,GACC,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,0BACN,QAAS,4BAA4B,OAAO,EAAM,GACnD,CAAC,CACL,CACF,EACD,CAAC,KAAK,EAAO,OAAO,CAS1B,oBACE,EACA,IAEA,EAAO,WAAW,CAChB,QAAW,EAAe,EAAY,EAAS,CAC/C,MAAQ,GACN,IAAI,EAAgB,CAClB,KAAM,gBACN,OAAQ,IACR,KAAM,yBACN,QAAS,2BAA2B,OAAO,EAAM,GAClD,CAAC,CACL,CAAC,CACL"}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Context, type Effect } from "effect";
|
|
2
|
+
import type { UploadistaError } from "../errors";
|
|
3
|
+
import type { DataStore } from "./data-store";
|
|
4
|
+
import type { UploadFile } from "./upload-file";
|
|
5
|
+
declare const DataStoreRegistry_base: Context.TagClass<DataStoreRegistry, "DataStoreRegistry", {
|
|
6
|
+
readonly register: <T extends DataStore<UploadFile>>(id: string, store: Effect.Effect<T, UploadistaError>) => Effect.Effect<void, never>;
|
|
7
|
+
readonly get: (id: string) => Effect.Effect<DataStore<UploadFile>, UploadistaError>;
|
|
8
|
+
readonly list: () => Effect.Effect<Array<string>, never>;
|
|
9
|
+
}>;
|
|
10
|
+
export declare class DataStoreRegistry extends DataStoreRegistry_base {
|
|
11
|
+
}
|
|
12
|
+
export {};
|
|
13
|
+
//# sourceMappingURL=data-store-registry.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data-store-registry.d.ts","sourceRoot":"","sources":["../../src/types/data-store-registry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,KAAK,MAAM,EAAE,MAAM,QAAQ,CAAC;AAC9C,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,WAAW,CAAC;AACjD,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;;uBAMzB,CAAC,CAAC,SAAS,SAAS,CAAC,UAAU,CAAC,EACjD,EAAE,EAAE,MAAM,EACV,KAAK,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC,EAAE,eAAe,CAAC,KACrC,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC;kBACjB,CACZ,EAAE,EAAE,MAAM,KACP,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,eAAe,CAAC;mBAC3C,MAAM,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,KAAK,CAAC;;AAV5D,qBAAa,iBAAkB,SAAQ,sBAYpC;CAAG"}
|