@milaboratories/pl-model-common 1.21.3 → 1.21.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,206 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
5
+ * in fixed-size chunks. This is useful for streaming large files without loading
6
+ * them entirely into memory.
7
+ */
8
+ class ChunkedStreamReader {
9
+ currentPosition = 0;
10
+ _read = true;
11
+ _canceled = false;
12
+ _errored = false;
13
+ abortController = null;
14
+ options;
15
+ /**
16
+ * Creates a new ChunkedStreamReader instance.
17
+ * Use the static `create` method instead.
18
+ */
19
+ constructor(options) {
20
+ // Normalize options with defaults
21
+ this.options = {
22
+ ...options,
23
+ chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
24
+ onError: options.onError ?? (async () => {
25
+ // Default behavior: error (will automatically call controller.error)
26
+ return 'error';
27
+ }),
28
+ };
29
+ if (this.totalSize < 0) {
30
+ throw new Error('Total size must be non-negative');
31
+ }
32
+ if (this.chunkSize <= 0) {
33
+ throw new Error('Chunk size must be positive');
34
+ }
35
+ }
36
+ /**
37
+ * Gets the fetchChunk function from options
38
+ */
39
+ get fetchChunk() {
40
+ return this.options.fetchChunk;
41
+ }
42
+ /**
43
+ * Gets the total size from options
44
+ */
45
+ get totalSize() {
46
+ return this.options.totalSize;
47
+ }
48
+ /**
49
+ * Gets the chunk size from options
50
+ */
51
+ get chunkSize() {
52
+ return this.options.chunkSize;
53
+ }
54
+ /**
55
+ * Gets the onError callback from options
56
+ */
57
+ get onError() {
58
+ return this.options.onError;
59
+ }
60
+ /**
61
+ * Creates and returns a ReadableStream that reads data in chunks.
62
+ *
63
+ * @param options - Configuration options for the chunked stream reader
64
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
65
+ *
66
+ * @example
67
+ * ```typescript
68
+ * const stream = ChunkedStreamReader.create({
69
+ * fetchChunk: async (range, signal) => {
70
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
71
+ * return new Uint8Array(await response.arrayBuffer());
72
+ * },
73
+ * totalSize: 1024 * 1024, // 1MB
74
+ * chunkSize: 64 * 1024, // 64KB chunks
75
+ * });
76
+ * ```
77
+ */
78
+ static create(options) {
79
+ const reader = new ChunkedStreamReader(options);
80
+ return reader.createStream();
81
+ }
82
+ readStart() {
83
+ this._read = true;
84
+ }
85
+ readStop() {
86
+ this._read = false;
87
+ }
88
+ async tryRead(controller) {
89
+ if (this._canceled) {
90
+ return true;
91
+ }
92
+ // Check if we've read all data
93
+ if (this.isComplete()) {
94
+ controller.close();
95
+ return true;
96
+ }
97
+ try {
98
+ // Calculate the end position for this chunk
99
+ // Ensure we don't read beyond the total size
100
+ const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
101
+ // Fetch the chunk from the blob driver, passing the abort signal if available
102
+ const data = await this.fetchChunk({ from: this.currentPosition, to: endPosition }, this.abortController?.signal);
103
+ // Check if stream was cancelled during the fetch
104
+ if (this._canceled) {
105
+ return true;
106
+ }
107
+ // Enqueue the data into the stream
108
+ controller.enqueue(data);
109
+ // Update the current position for the next chunk
110
+ this.currentPosition = endPosition;
111
+ if (!controller.desiredSize || controller.desiredSize <= 0) {
112
+ // The internal queue is full, so propagate
113
+ // the backpressure signal to the underlying source.
114
+ this.readStop();
115
+ }
116
+ }
117
+ catch (error) {
118
+ // If any error occurs during chunk reading, call the error handler
119
+ const status = await this.onError(error);
120
+ if (status === 'error') {
121
+ this._errored = true;
122
+ // Error the stream and abort any ongoing fetch operations
123
+ controller.error(error);
124
+ this.abortController?.abort('Stream errored');
125
+ return true; // Stop reading
126
+ }
127
+ if (status === 'cancel') {
128
+ this._canceled = true;
129
+ // Close the stream gracefully and abort any ongoing fetch operations
130
+ controller.close();
131
+ this.abortController?.abort('Stream cancelled');
132
+ console.debug('ChunkedStreamReader cancelled due to error');
133
+ return true; // Stop reading
134
+ }
135
+ }
136
+ return false;
137
+ }
138
+ /**
139
+ * Creates and returns a ReadableStream that reads data in chunks.
140
+ * The stream will automatically close when all data has been read.
141
+ *
142
+ * @private - Use the static `create` method instead
143
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
144
+ */
145
+ createStream() {
146
+ // Create an AbortController for this stream
147
+ this.abortController = new AbortController();
148
+ return new ReadableStream({
149
+ start: async (controller) => {
150
+ while (true) {
151
+ if (this._canceled || this._errored) {
152
+ return;
153
+ }
154
+ if (!this._read) {
155
+ await new Promise((r) => setTimeout(r, 0));
156
+ if (controller.desiredSize) {
157
+ this.readStart();
158
+ }
159
+ }
160
+ else {
161
+ const isDone = await this.tryRead(controller);
162
+ if (isDone) {
163
+ return;
164
+ }
165
+ }
166
+ }
167
+ },
168
+ pull: () => {
169
+ this.readStart();
170
+ },
171
+ cancel: (reason) => {
172
+ this._canceled = true;
173
+ // Abort any ongoing fetch operations
174
+ this.abortController?.abort(reason);
175
+ console.debug('ChunkedStreamReader cancelled:', reason);
176
+ },
177
+ });
178
+ }
179
+ /**
180
+ * Gets the current reading position in bytes.
181
+ *
182
+ * @returns Current position as number of bytes read
183
+ */
184
+ getCurrentPosition() {
185
+ return this.currentPosition;
186
+ }
187
+ /**
188
+ * Gets the remaining bytes to be read.
189
+ *
190
+ * @returns Number of bytes remaining
191
+ */
192
+ getRemainingBytes() {
193
+ return Math.max(0, this.totalSize - this.currentPosition);
194
+ }
195
+ /**
196
+ * Checks if the entire blob has been read.
197
+ *
198
+ * @returns True if all data has been read
199
+ */
200
+ isComplete() {
201
+ return this.currentPosition >= this.totalSize;
202
+ }
203
+ }
204
+
205
+ exports.ChunkedStreamReader = ChunkedStreamReader;
206
+ //# sourceMappingURL=ChunkedStreamReader.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ChunkedStreamReader.cjs","sources":["../../src/drivers/ChunkedStreamReader.ts"],"sourcesContent":["import type { RangeBytes } from './blob';\n\n/**\n * Status returned by onError handler to indicate what action to take\n * - 'continue': Retry the failed operation\n * - 'error': Error the stream (calls controller.error, aborts ongoing fetches)\n * - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)\n */\nexport type ErrorHandlerStatus = 'continue' | 'error' | 'cancel';\n\n/**\n * Options for creating a ChunkedStreamReader\n */\nexport interface ChunkedStreamReaderOptions {\n /**\n * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.\n */\n fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;\n\n /**\n * Total size of the blob in bytes\n */\n totalSize: number;\n\n /**\n * Size of each chunk to read in bytes (default: 16MB)\n */\n chunkSize?: number;\n\n /**\n * Error handler callback. Called when an error occurs during chunk fetching.\n * Should return:\n * - 'continue' to retry the operation\n * - 'error' to error the stream (will call controller.error and abort ongoing fetches)\n * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)\n * Default behavior: returns 'error'.\n */\n onError?: (error: unknown) => Promise<ErrorHandlerStatus>;\n}\n\n/**\n * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver\n * in fixed-size chunks. This is useful for streaming large files without loading\n * them entirely into memory.\n */\nexport class ChunkedStreamReader {\n private currentPosition: number = 0;\n private _read = true;\n private _canceled = false;\n private _errored = false;\n private abortController: AbortController | null = null;\n private readonly options: Required<ChunkedStreamReaderOptions>;\n\n /**\n * Creates a new ChunkedStreamReader instance.\n * Use the static `create` method instead.\n */\n private constructor(options: ChunkedStreamReaderOptions) {\n // Normalize options with defaults\n this.options = {\n ...options,\n chunkSize: options.chunkSize ?? 16 * 1024 * 1024,\n onError: options.onError ?? (async () => {\n // Default behavior: error (will automatically call controller.error)\n return 'error';\n }),\n };\n\n if (this.totalSize < 0) {\n throw new Error('Total size must be non-negative');\n }\n if (this.chunkSize <= 0) {\n throw new Error('Chunk size must be positive');\n }\n }\n\n /**\n * Gets the fetchChunk function from options\n */\n private get fetchChunk() {\n return this.options.fetchChunk;\n }\n\n /**\n * Gets the total size from options\n */\n private get totalSize() {\n return this.options.totalSize;\n }\n\n /**\n * Gets the chunk size from options\n */\n private get chunkSize() {\n return this.options.chunkSize;\n }\n\n /**\n * Gets the onError callback from options\n */\n private get onError() {\n return this.options.onError;\n }\n\n /**\n * Creates and returns a ReadableStream that reads data in chunks.\n *\n * @param options - Configuration options for the chunked stream reader\n * @returns ReadableStream that can be consumed by zip.add or other stream consumers\n *\n * @example\n * ```typescript\n * const stream = ChunkedStreamReader.create({\n * fetchChunk: async (range, signal) => {\n * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });\n * return new Uint8Array(await response.arrayBuffer());\n * },\n * totalSize: 1024 * 1024, // 1MB\n * chunkSize: 64 * 1024, // 64KB chunks\n * });\n * ```\n */\n static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array> {\n const reader = new ChunkedStreamReader(options);\n return reader.createStream();\n }\n\n private readStart() {\n this._read = true;\n }\n\n private readStop() {\n this._read = false;\n }\n\n private async tryRead(controller: ReadableStreamDefaultController<Uint8Array>): Promise<boolean> {\n if (this._canceled) {\n return true;\n }\n\n // Check if we've read all data\n if (this.isComplete()) {\n controller.close();\n return true;\n }\n\n try {\n // Calculate the end position for this chunk\n // Ensure we don't read beyond the total size\n const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);\n\n // Fetch the chunk from the blob driver, passing the abort signal if available\n const data = await this.fetchChunk(\n { from: this.currentPosition, to: endPosition },\n this.abortController?.signal,\n );\n\n // Check if stream was cancelled during the fetch\n if (this._canceled) {\n return true;\n }\n\n // Enqueue the data into the stream\n controller.enqueue(data);\n\n // Update the current position for the next chunk\n this.currentPosition = endPosition;\n\n if (!controller.desiredSize || controller.desiredSize <= 0) {\n // The internal queue is full, so propagate\n // the backpressure signal to the underlying source.\n this.readStop();\n }\n } catch (error) {\n // If any error occurs during chunk reading, call the error handler\n const status = await this.onError(error);\n\n if (status === 'error') {\n this._errored = true;\n // Error the stream and abort any ongoing fetch operations\n controller.error(error);\n this.abortController?.abort('Stream errored');\n return true; // Stop reading\n }\n\n if (status === 'cancel') {\n this._canceled = true;\n // Close the stream gracefully and abort any ongoing fetch operations\n controller.close();\n this.abortController?.abort('Stream cancelled');\n console.debug('ChunkedStreamReader cancelled due to error');\n return true; // Stop reading\n }\n }\n\n return false;\n }\n\n /**\n * Creates and returns a ReadableStream that reads data in chunks.\n * The stream will automatically close when all data has been read.\n *\n * @private - Use the static `create` method instead\n * @returns ReadableStream that can be consumed by zip.add or other stream consumers\n */\n private createStream(): ReadableStream<Uint8Array> {\n // Create an AbortController for this stream\n this.abortController = new AbortController();\n\n return new ReadableStream({\n start: async (controller) => {\n while (true) {\n if (this._canceled || this._errored) {\n return;\n }\n\n if (!this._read) {\n await new Promise((r) => setTimeout(r, 0));\n if (controller.desiredSize) {\n this.readStart();\n }\n } else {\n const isDone = await this.tryRead(controller);\n if (isDone) {\n return;\n }\n }\n }\n },\n\n pull: () => {\n this.readStart();\n },\n\n cancel: (reason) => {\n this._canceled = true;\n // Abort any ongoing fetch operations\n this.abortController?.abort(reason);\n console.debug('ChunkedStreamReader cancelled:', reason);\n },\n });\n }\n\n /**\n * Gets the current reading position in bytes.\n *\n * @returns Current position as number of bytes read\n */\n getCurrentPosition(): number {\n return this.currentPosition;\n }\n\n /**\n * Gets the remaining bytes to be read.\n *\n * @returns Number of bytes remaining\n */\n getRemainingBytes(): number {\n return Math.max(0, this.totalSize - this.currentPosition);\n }\n\n /**\n * Checks if the entire blob has been read.\n *\n * @returns True if all data has been read\n */\n isComplete(): boolean {\n return this.currentPosition >= this.totalSize;\n }\n}\n"],"names":[],"mappings":";;AAwCA;;;;AAIG;MACU,mBAAmB,CAAA;IACtB,eAAe,GAAW,CAAC;IAC3B,KAAK,GAAG,IAAI;IACZ,SAAS,GAAG,KAAK;IACjB,QAAQ,GAAG,KAAK;IAChB,eAAe,GAA2B,IAAI;AACrC,IAAA,OAAO;AAExB;;;AAGG;AACH,IAAA,WAAA,CAAoB,OAAmC,EAAA;;QAErD,IAAI,CAAC,OAAO,GAAG;AACb,YAAA,GAAG,OAAO;YACV,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,EAAE,GAAG,IAAI,GAAG,IAAI;YAChD,OAAO,EAAE,OAAO,CAAC,OAAO,KAAK,YAAW;;AAEtC,gBAAA,OAAO,OAAO;AAChB,YAAA,CAAC,CAAC;SACH;AAED,QAAA,IAAI,IAAI,CAAC,SAAS,GAAG,CAAC,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC;QACpD;AACA,QAAA,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC;QAChD;IACF;AAEA;;AAEG;AACH,IAAA,IAAY,UAAU,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU;IAChC;AAEA;;AAEG;AACH,IAAA,IAAY,SAAS,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS;IAC/B;AAEA;;AAEG;AACH,IAAA,IAAY,SAAS,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS;IAC/B;AAEA;;AAEG;AACH,IAAA,IAAY,OAAO,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO;IAC7B;AAEA;;;;;;;;;;;;;;;;;AAiBG;IACH,OAAO,MAAM,CAAC,OAAmC,EAAA;AAC/C,QAAA,MAAM,MAAM,GAAG,IAAI,mBAAmB,CAAC,OAAO,CAAC;AAC/C,QAAA,OAAO,MAAM,CAAC,YAAY,EAAE;IAC9B;IAEQ,SAAS,GAAA;AACf,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;IACnB;IAEQ,QAAQ,GAAA;AACd,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;IACpB;IAEQ,MAAM,OAAO,CAAC,UAAuD,EAAA;AAC3E,QAAA,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,YAAA,OAAO,IAAI;QACb;;AAGA,QAAA,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;YACrB,UAAU,CAAC,KAAK,EAAE;AAClB,YAAA,OAAO,IAAI;QACb;AAEA,QAAA,IAAI;;;AAGF,YAAA,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC;;YAGnF,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAChC,EAAE,IAAI,EAAE,IAAI,CAAC,eAAe,EAAE,EAAE,EAAE,WAAW,EAAE,EAC/C,IAAI,CAAC,eAAe,EAAE,MAAM,CAC7B;;AAGD,YAAA,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,gBAAA,OAAO,IAAI;YACb;;AAGA,YAAA,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC;;AAGxB,YAAA,IAAI,CAAC,eAAe,GAAG,WAAW;YAElC,IAAI,CAAC,UAAU,CAAC,WAAW,IAAI,UAAU,CAAC,WAAW,IAAI,CAAC,EAAE;;;gBAG1D,IAAI,CAAC,QAAQ,EAAE;YACjB;QACF;QAAE,OAAO,KAAK,EAAE;;YAEd,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;AAExC,YAAA,IAAI,MAAM,KAAK,OAAO,EAAE;AACtB,gBAAA,IAAI,CAAC,QAAQ,GAAG,IAAI;;AAEpB,gBAAA,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;AACvB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,gBAAgB,CAAC;gBAC7C,OAAO,IAAI,CAAC;YACd;AAEA,YAAA,IAAI,MAAM,KAAK,QAAQ,EAAE;AACvB,gBAAA,IAAI,CAAC,SAAS,GAAG,IAAI;;gBAErB,UAAU,CAAC,KAAK,EAAE;AAClB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,kBAAkB,CAAC;AAC/C,gBAAA,OAAO,CAAC,KAAK,CAAC,4CAA4C,CAAC;gBAC3D,OAAO,IAAI,CAAC;YACd;QACF;AAEA,QAAA,OAAO,KAAK;IACd;AAEA;;;;;;AAMG;IACK,YAAY,GAAA;;AAElB,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,eAAe,EAAE;QAE5C,OAAO,IAAI,cAAc,CAAC;AACxB,YAAA,KAAK,EAAE,OAAO,UAAU,KAAI;gBAC1B,OAAO,IAAI,EAAE;oBACX,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC;oBACF;AAEA,oBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,wBAAA,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC1C,wBAAA,IAAI,UAAU,CAAC,WAAW,EAAE;4BAC1B,IAAI,CAAC,SAAS,EAAE;wBAClB;oBACF;yBAAO;wBACL,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC;wBAC7C,IAAI,MAAM,EAAE;4BACV;wBACF;oBACF;gBACF;YACF,CAAC;YAED,IAAI,EAAE,MAAK;gBACT,IAAI,CAAC,SAAS,EAAE;YAClB,CAAC;AAED,YAAA,MAAM,EAAE,CAAC,MAAM,KAAI;AACjB,gBAAA,IAAI,CAAC,SAAS,GAAG,IAAI;;AAErB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,MAAM,CAAC;AACnC,gBAAA,OAAO,CAAC,KAAK,CAAC,gCAAgC,EAAE,MAAM,CAAC;YACzD,CAAC;AACF,SAAA,CAAC;IACJ;AAEA;;;;AAIG;IACH,kBAAkB,GAAA;QAChB,OAAO,IAAI,CAAC,eAAe;IAC7B;AAEA;;;;AAIG;IACH,iBAAiB,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,eAAe,CAAC;IAC3D;AAEA;;;;AAIG;IACH,UAAU,GAAA;AACR,QAAA,OAAO,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,SAAS;IAC/C;AACD;;;;"}
@@ -0,0 +1,117 @@
1
+ import type { RangeBytes } from './blob';
2
+ /**
3
+ * Status returned by onError handler to indicate what action to take
4
+ * - 'continue': Retry the failed operation
5
+ * - 'error': Error the stream (calls controller.error, aborts ongoing fetches)
6
+ * - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)
7
+ */
8
+ export type ErrorHandlerStatus = 'continue' | 'error' | 'cancel';
9
+ /**
10
+ * Options for creating a ChunkedStreamReader
11
+ */
12
+ export interface ChunkedStreamReaderOptions {
13
+ /**
14
+ * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.
15
+ */
16
+ fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;
17
+ /**
18
+ * Total size of the blob in bytes
19
+ */
20
+ totalSize: number;
21
+ /**
22
+ * Size of each chunk to read in bytes (default: 16MB)
23
+ */
24
+ chunkSize?: number;
25
+ /**
26
+ * Error handler callback. Called when an error occurs during chunk fetching.
27
+ * Should return:
28
+ * - 'continue' to retry the operation
29
+ * - 'error' to error the stream (will call controller.error and abort ongoing fetches)
30
+ * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)
31
+ * Default behavior: returns 'error'.
32
+ */
33
+ onError?: (error: unknown) => Promise<ErrorHandlerStatus>;
34
+ }
35
+ /**
36
+ * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
37
+ * in fixed-size chunks. This is useful for streaming large files without loading
38
+ * them entirely into memory.
39
+ */
40
+ export declare class ChunkedStreamReader {
41
+ private currentPosition;
42
+ private _read;
43
+ private _canceled;
44
+ private _errored;
45
+ private abortController;
46
+ private readonly options;
47
+ /**
48
+ * Creates a new ChunkedStreamReader instance.
49
+ * Use the static `create` method instead.
50
+ */
51
+ private constructor();
52
+ /**
53
+ * Gets the fetchChunk function from options
54
+ */
55
+ private get fetchChunk();
56
+ /**
57
+ * Gets the total size from options
58
+ */
59
+ private get totalSize();
60
+ /**
61
+ * Gets the chunk size from options
62
+ */
63
+ private get chunkSize();
64
+ /**
65
+ * Gets the onError callback from options
66
+ */
67
+ private get onError();
68
+ /**
69
+ * Creates and returns a ReadableStream that reads data in chunks.
70
+ *
71
+ * @param options - Configuration options for the chunked stream reader
72
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
73
+ *
74
+ * @example
75
+ * ```typescript
76
+ * const stream = ChunkedStreamReader.create({
77
+ * fetchChunk: async (range, signal) => {
78
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
79
+ * return new Uint8Array(await response.arrayBuffer());
80
+ * },
81
+ * totalSize: 1024 * 1024, // 1MB
82
+ * chunkSize: 64 * 1024, // 64KB chunks
83
+ * });
84
+ * ```
85
+ */
86
+ static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array>;
87
+ private readStart;
88
+ private readStop;
89
+ private tryRead;
90
+ /**
91
+ * Creates and returns a ReadableStream that reads data in chunks.
92
+ * The stream will automatically close when all data has been read.
93
+ *
94
+ * @private - Use the static `create` method instead
95
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
96
+ */
97
+ private createStream;
98
+ /**
99
+ * Gets the current reading position in bytes.
100
+ *
101
+ * @returns Current position as number of bytes read
102
+ */
103
+ getCurrentPosition(): number;
104
+ /**
105
+ * Gets the remaining bytes to be read.
106
+ *
107
+ * @returns Number of bytes remaining
108
+ */
109
+ getRemainingBytes(): number;
110
+ /**
111
+ * Checks if the entire blob has been read.
112
+ *
113
+ * @returns True if all data has been read
114
+ */
115
+ isComplete(): boolean;
116
+ }
117
+ //# sourceMappingURL=ChunkedStreamReader.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ChunkedStreamReader.d.ts","sourceRoot":"","sources":["../../src/drivers/ChunkedStreamReader.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEzC;;;;;GAKG;AACH,MAAM,MAAM,kBAAkB,GAAG,UAAU,GAAG,OAAO,GAAG,QAAQ,CAAC;AAEjE;;GAEG;AACH,MAAM,WAAW,0BAA0B;IACzC;;OAEG;IACH,UAAU,EAAE,CAAC,KAAK,EAAE,UAAU,EAAE,MAAM,CAAC,EAAE,WAAW,KAAK,OAAO,CAAC,UAAU,CAAC,CAAC;IAE7E;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;;;;;;OAOG;IACH,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,KAAK,OAAO,CAAC,kBAAkB,CAAC,CAAC;CAC3D;AAED;;;;GAIG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,eAAe,CAAa;IACpC,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,SAAS,CAAS;IAC1B,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,eAAe,CAAgC;IACvD,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAuC;IAE/D;;;OAGG;IACH,OAAO;IAmBP;;OAEG;IACH,OAAO,KAAK,UAAU,GAErB;IAED;;OAEG;IACH,OAAO,KAAK,SAAS,GAEpB;IAED;;OAEG;IACH,OAAO,KAAK,SAAS,GAEpB;IAED;;OAEG;IACH,OAAO,KAAK,OAAO,GAElB;IAED;;;;;;;;;;;;;;;;;OAiBG;IACH,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,0BAA0B,GAAG,cAAc,CAAC,UAAU,CAAC;IAK9E,OAAO,CAAC,SAAS;IAIjB,OAAO,CAAC,QAAQ;YAIF,OAAO;IA+DrB;;;;;;OAMG;IACH,OAAO,CAAC,YAAY;IAsCpB;;;;OAIG;IACH,kBAAkB,IAAI,MAAM;IAI5B;;;;OAIG;IACH,iBAAiB,IAAI,MAAM;IAI3B;;;;OAIG;IACH,UAAU,IAAI,OAAO;CAGtB"}
@@ -0,0 +1,204 @@
1
+ /**
2
+ * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
3
+ * in fixed-size chunks. This is useful for streaming large files without loading
4
+ * them entirely into memory.
5
+ */
6
+ class ChunkedStreamReader {
7
+ currentPosition = 0;
8
+ _read = true;
9
+ _canceled = false;
10
+ _errored = false;
11
+ abortController = null;
12
+ options;
13
+ /**
14
+ * Creates a new ChunkedStreamReader instance.
15
+ * Use the static `create` method instead.
16
+ */
17
+ constructor(options) {
18
+ // Normalize options with defaults
19
+ this.options = {
20
+ ...options,
21
+ chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
22
+ onError: options.onError ?? (async () => {
23
+ // Default behavior: error (will automatically call controller.error)
24
+ return 'error';
25
+ }),
26
+ };
27
+ if (this.totalSize < 0) {
28
+ throw new Error('Total size must be non-negative');
29
+ }
30
+ if (this.chunkSize <= 0) {
31
+ throw new Error('Chunk size must be positive');
32
+ }
33
+ }
34
+ /**
35
+ * Gets the fetchChunk function from options
36
+ */
37
+ get fetchChunk() {
38
+ return this.options.fetchChunk;
39
+ }
40
+ /**
41
+ * Gets the total size from options
42
+ */
43
+ get totalSize() {
44
+ return this.options.totalSize;
45
+ }
46
+ /**
47
+ * Gets the chunk size from options
48
+ */
49
+ get chunkSize() {
50
+ return this.options.chunkSize;
51
+ }
52
+ /**
53
+ * Gets the onError callback from options
54
+ */
55
+ get onError() {
56
+ return this.options.onError;
57
+ }
58
+ /**
59
+ * Creates and returns a ReadableStream that reads data in chunks.
60
+ *
61
+ * @param options - Configuration options for the chunked stream reader
62
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
63
+ *
64
+ * @example
65
+ * ```typescript
66
+ * const stream = ChunkedStreamReader.create({
67
+ * fetchChunk: async (range, signal) => {
68
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
69
+ * return new Uint8Array(await response.arrayBuffer());
70
+ * },
71
+ * totalSize: 1024 * 1024, // 1MB
72
+ * chunkSize: 64 * 1024, // 64KB chunks
73
+ * });
74
+ * ```
75
+ */
76
+ static create(options) {
77
+ const reader = new ChunkedStreamReader(options);
78
+ return reader.createStream();
79
+ }
80
+ readStart() {
81
+ this._read = true;
82
+ }
83
+ readStop() {
84
+ this._read = false;
85
+ }
86
+ async tryRead(controller) {
87
+ if (this._canceled) {
88
+ return true;
89
+ }
90
+ // Check if we've read all data
91
+ if (this.isComplete()) {
92
+ controller.close();
93
+ return true;
94
+ }
95
+ try {
96
+ // Calculate the end position for this chunk
97
+ // Ensure we don't read beyond the total size
98
+ const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
99
+ // Fetch the chunk from the blob driver, passing the abort signal if available
100
+ const data = await this.fetchChunk({ from: this.currentPosition, to: endPosition }, this.abortController?.signal);
101
+ // Check if stream was cancelled during the fetch
102
+ if (this._canceled) {
103
+ return true;
104
+ }
105
+ // Enqueue the data into the stream
106
+ controller.enqueue(data);
107
+ // Update the current position for the next chunk
108
+ this.currentPosition = endPosition;
109
+ if (!controller.desiredSize || controller.desiredSize <= 0) {
110
+ // The internal queue is full, so propagate
111
+ // the backpressure signal to the underlying source.
112
+ this.readStop();
113
+ }
114
+ }
115
+ catch (error) {
116
+ // If any error occurs during chunk reading, call the error handler
117
+ const status = await this.onError(error);
118
+ if (status === 'error') {
119
+ this._errored = true;
120
+ // Error the stream and abort any ongoing fetch operations
121
+ controller.error(error);
122
+ this.abortController?.abort('Stream errored');
123
+ return true; // Stop reading
124
+ }
125
+ if (status === 'cancel') {
126
+ this._canceled = true;
127
+ // Close the stream gracefully and abort any ongoing fetch operations
128
+ controller.close();
129
+ this.abortController?.abort('Stream cancelled');
130
+ console.debug('ChunkedStreamReader cancelled due to error');
131
+ return true; // Stop reading
132
+ }
133
+ }
134
+ return false;
135
+ }
136
+ /**
137
+ * Creates and returns a ReadableStream that reads data in chunks.
138
+ * The stream will automatically close when all data has been read.
139
+ *
140
+ * @private - Use the static `create` method instead
141
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
142
+ */
143
+ createStream() {
144
+ // Create an AbortController for this stream
145
+ this.abortController = new AbortController();
146
+ return new ReadableStream({
147
+ start: async (controller) => {
148
+ while (true) {
149
+ if (this._canceled || this._errored) {
150
+ return;
151
+ }
152
+ if (!this._read) {
153
+ await new Promise((r) => setTimeout(r, 0));
154
+ if (controller.desiredSize) {
155
+ this.readStart();
156
+ }
157
+ }
158
+ else {
159
+ const isDone = await this.tryRead(controller);
160
+ if (isDone) {
161
+ return;
162
+ }
163
+ }
164
+ }
165
+ },
166
+ pull: () => {
167
+ this.readStart();
168
+ },
169
+ cancel: (reason) => {
170
+ this._canceled = true;
171
+ // Abort any ongoing fetch operations
172
+ this.abortController?.abort(reason);
173
+ console.debug('ChunkedStreamReader cancelled:', reason);
174
+ },
175
+ });
176
+ }
177
+ /**
178
+ * Gets the current reading position in bytes.
179
+ *
180
+ * @returns Current position as number of bytes read
181
+ */
182
+ getCurrentPosition() {
183
+ return this.currentPosition;
184
+ }
185
+ /**
186
+ * Gets the remaining bytes to be read.
187
+ *
188
+ * @returns Number of bytes remaining
189
+ */
190
+ getRemainingBytes() {
191
+ return Math.max(0, this.totalSize - this.currentPosition);
192
+ }
193
+ /**
194
+ * Checks if the entire blob has been read.
195
+ *
196
+ * @returns True if all data has been read
197
+ */
198
+ isComplete() {
199
+ return this.currentPosition >= this.totalSize;
200
+ }
201
+ }
202
+
203
+ export { ChunkedStreamReader };
204
+ //# sourceMappingURL=ChunkedStreamReader.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ChunkedStreamReader.js","sources":["../../src/drivers/ChunkedStreamReader.ts"],"sourcesContent":["import type { RangeBytes } from './blob';\n\n/**\n * Status returned by onError handler to indicate what action to take\n * - 'continue': Retry the failed operation\n * - 'error': Error the stream (calls controller.error, aborts ongoing fetches)\n * - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)\n */\nexport type ErrorHandlerStatus = 'continue' | 'error' | 'cancel';\n\n/**\n * Options for creating a ChunkedStreamReader\n */\nexport interface ChunkedStreamReaderOptions {\n /**\n * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.\n */\n fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;\n\n /**\n * Total size of the blob in bytes\n */\n totalSize: number;\n\n /**\n * Size of each chunk to read in bytes (default: 16MB)\n */\n chunkSize?: number;\n\n /**\n * Error handler callback. Called when an error occurs during chunk fetching.\n * Should return:\n * - 'continue' to retry the operation\n * - 'error' to error the stream (will call controller.error and abort ongoing fetches)\n * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)\n * Default behavior: returns 'error'.\n */\n onError?: (error: unknown) => Promise<ErrorHandlerStatus>;\n}\n\n/**\n * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver\n * in fixed-size chunks. This is useful for streaming large files without loading\n * them entirely into memory.\n */\nexport class ChunkedStreamReader {\n private currentPosition: number = 0;\n private _read = true;\n private _canceled = false;\n private _errored = false;\n private abortController: AbortController | null = null;\n private readonly options: Required<ChunkedStreamReaderOptions>;\n\n /**\n * Creates a new ChunkedStreamReader instance.\n * Use the static `create` method instead.\n */\n private constructor(options: ChunkedStreamReaderOptions) {\n // Normalize options with defaults\n this.options = {\n ...options,\n chunkSize: options.chunkSize ?? 16 * 1024 * 1024,\n onError: options.onError ?? (async () => {\n // Default behavior: error (will automatically call controller.error)\n return 'error';\n }),\n };\n\n if (this.totalSize < 0) {\n throw new Error('Total size must be non-negative');\n }\n if (this.chunkSize <= 0) {\n throw new Error('Chunk size must be positive');\n }\n }\n\n /**\n * Gets the fetchChunk function from options\n */\n private get fetchChunk() {\n return this.options.fetchChunk;\n }\n\n /**\n * Gets the total size from options\n */\n private get totalSize() {\n return this.options.totalSize;\n }\n\n /**\n * Gets the chunk size from options\n */\n private get chunkSize() {\n return this.options.chunkSize;\n }\n\n /**\n * Gets the onError callback from options\n */\n private get onError() {\n return this.options.onError;\n }\n\n /**\n * Creates and returns a ReadableStream that reads data in chunks.\n *\n * @param options - Configuration options for the chunked stream reader\n * @returns ReadableStream that can be consumed by zip.add or other stream consumers\n *\n * @example\n * ```typescript\n * const stream = ChunkedStreamReader.create({\n * fetchChunk: async (range, signal) => {\n * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });\n * return new Uint8Array(await response.arrayBuffer());\n * },\n * totalSize: 1024 * 1024, // 1MB\n * chunkSize: 64 * 1024, // 64KB chunks\n * });\n * ```\n */\n static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array> {\n const reader = new ChunkedStreamReader(options);\n return reader.createStream();\n }\n\n private readStart() {\n this._read = true;\n }\n\n private readStop() {\n this._read = false;\n }\n\n private async tryRead(controller: ReadableStreamDefaultController<Uint8Array>): Promise<boolean> {\n if (this._canceled) {\n return true;\n }\n\n // Check if we've read all data\n if (this.isComplete()) {\n controller.close();\n return true;\n }\n\n try {\n // Calculate the end position for this chunk\n // Ensure we don't read beyond the total size\n const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);\n\n // Fetch the chunk from the blob driver, passing the abort signal if available\n const data = await this.fetchChunk(\n { from: this.currentPosition, to: endPosition },\n this.abortController?.signal,\n );\n\n // Check if stream was cancelled during the fetch\n if (this._canceled) {\n return true;\n }\n\n // Enqueue the data into the stream\n controller.enqueue(data);\n\n // Update the current position for the next chunk\n this.currentPosition = endPosition;\n\n if (!controller.desiredSize || controller.desiredSize <= 0) {\n // The internal queue is full, so propagate\n // the backpressure signal to the underlying source.\n this.readStop();\n }\n } catch (error) {\n // If any error occurs during chunk reading, call the error handler\n const status = await this.onError(error);\n\n if (status === 'error') {\n this._errored = true;\n // Error the stream and abort any ongoing fetch operations\n controller.error(error);\n this.abortController?.abort('Stream errored');\n return true; // Stop reading\n }\n\n if (status === 'cancel') {\n this._canceled = true;\n // Close the stream gracefully and abort any ongoing fetch operations\n controller.close();\n this.abortController?.abort('Stream cancelled');\n console.debug('ChunkedStreamReader cancelled due to error');\n return true; // Stop reading\n }\n }\n\n return false;\n }\n\n /**\n * Creates and returns a ReadableStream that reads data in chunks.\n * The stream will automatically close when all data has been read.\n *\n * @private - Use the static `create` method instead\n * @returns ReadableStream that can be consumed by zip.add or other stream consumers\n */\n private createStream(): ReadableStream<Uint8Array> {\n // Create an AbortController for this stream\n this.abortController = new AbortController();\n\n return new ReadableStream({\n start: async (controller) => {\n while (true) {\n if (this._canceled || this._errored) {\n return;\n }\n\n if (!this._read) {\n await new Promise((r) => setTimeout(r, 0));\n if (controller.desiredSize) {\n this.readStart();\n }\n } else {\n const isDone = await this.tryRead(controller);\n if (isDone) {\n return;\n }\n }\n }\n },\n\n pull: () => {\n this.readStart();\n },\n\n cancel: (reason) => {\n this._canceled = true;\n // Abort any ongoing fetch operations\n this.abortController?.abort(reason);\n console.debug('ChunkedStreamReader cancelled:', reason);\n },\n });\n }\n\n /**\n * Gets the current reading position in bytes.\n *\n * @returns Current position as number of bytes read\n */\n getCurrentPosition(): number {\n return this.currentPosition;\n }\n\n /**\n * Gets the remaining bytes to be read.\n *\n * @returns Number of bytes remaining\n */\n getRemainingBytes(): number {\n return Math.max(0, this.totalSize - this.currentPosition);\n }\n\n /**\n * Checks if the entire blob has been read.\n *\n * @returns True if all data has been read\n */\n isComplete(): boolean {\n return this.currentPosition >= this.totalSize;\n }\n}\n"],"names":[],"mappings":"AAwCA;;;;AAIG;MACU,mBAAmB,CAAA;IACtB,eAAe,GAAW,CAAC;IAC3B,KAAK,GAAG,IAAI;IACZ,SAAS,GAAG,KAAK;IACjB,QAAQ,GAAG,KAAK;IAChB,eAAe,GAA2B,IAAI;AACrC,IAAA,OAAO;AAExB;;;AAGG;AACH,IAAA,WAAA,CAAoB,OAAmC,EAAA;;QAErD,IAAI,CAAC,OAAO,GAAG;AACb,YAAA,GAAG,OAAO;YACV,SAAS,EAAE,OAAO,CAAC,SAAS,IAAI,EAAE,GAAG,IAAI,GAAG,IAAI;YAChD,OAAO,EAAE,OAAO,CAAC,OAAO,KAAK,YAAW;;AAEtC,gBAAA,OAAO,OAAO;AAChB,YAAA,CAAC,CAAC;SACH;AAED,QAAA,IAAI,IAAI,CAAC,SAAS,GAAG,CAAC,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC;QACpD;AACA,QAAA,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,EAAE;AACvB,YAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC;QAChD;IACF;AAEA;;AAEG;AACH,IAAA,IAAY,UAAU,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,UAAU;IAChC;AAEA;;AAEG;AACH,IAAA,IAAY,SAAS,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS;IAC/B;AAEA;;AAEG;AACH,IAAA,IAAY,SAAS,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,SAAS;IAC/B;AAEA;;AAEG;AACH,IAAA,IAAY,OAAO,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,OAAO,CAAC,OAAO;IAC7B;AAEA;;;;;;;;;;;;;;;;;AAiBG;IACH,OAAO,MAAM,CAAC,OAAmC,EAAA;AAC/C,QAAA,MAAM,MAAM,GAAG,IAAI,mBAAmB,CAAC,OAAO,CAAC;AAC/C,QAAA,OAAO,MAAM,CAAC,YAAY,EAAE;IAC9B;IAEQ,SAAS,GAAA;AACf,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI;IACnB;IAEQ,QAAQ,GAAA;AACd,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK;IACpB;IAEQ,MAAM,OAAO,CAAC,UAAuD,EAAA;AAC3E,QAAA,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,YAAA,OAAO,IAAI;QACb;;AAGA,QAAA,IAAI,IAAI,CAAC,UAAU,EAAE,EAAE;YACrB,UAAU,CAAC,KAAK,EAAE;AAClB,YAAA,OAAO,IAAI;QACb;AAEA,QAAA,IAAI;;;AAGF,YAAA,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,SAAS,CAAC;;YAGnF,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,UAAU,CAChC,EAAE,IAAI,EAAE,IAAI,CAAC,eAAe,EAAE,EAAE,EAAE,WAAW,EAAE,EAC/C,IAAI,CAAC,eAAe,EAAE,MAAM,CAC7B;;AAGD,YAAA,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,gBAAA,OAAO,IAAI;YACb;;AAGA,YAAA,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC;;AAGxB,YAAA,IAAI,CAAC,eAAe,GAAG,WAAW;YAElC,IAAI,CAAC,UAAU,CAAC,WAAW,IAAI,UAAU,CAAC,WAAW,IAAI,CAAC,EAAE;;;gBAG1D,IAAI,CAAC,QAAQ,EAAE;YACjB;QACF;QAAE,OAAO,KAAK,EAAE;;YAEd,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;AAExC,YAAA,IAAI,MAAM,KAAK,OAAO,EAAE;AACtB,gBAAA,IAAI,CAAC,QAAQ,GAAG,IAAI;;AAEpB,gBAAA,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC;AACvB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,gBAAgB,CAAC;gBAC7C,OAAO,IAAI,CAAC;YACd;AAEA,YAAA,IAAI,MAAM,KAAK,QAAQ,EAAE;AACvB,gBAAA,IAAI,CAAC,SAAS,GAAG,IAAI;;gBAErB,UAAU,CAAC,KAAK,EAAE;AAClB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,kBAAkB,CAAC;AAC/C,gBAAA,OAAO,CAAC,KAAK,CAAC,4CAA4C,CAAC;gBAC3D,OAAO,IAAI,CAAC;YACd;QACF;AAEA,QAAA,OAAO,KAAK;IACd;AAEA;;;;;;AAMG;IACK,YAAY,GAAA;;AAElB,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,eAAe,EAAE;QAE5C,OAAO,IAAI,cAAc,CAAC;AACxB,YAAA,KAAK,EAAE,OAAO,UAAU,KAAI;gBAC1B,OAAO,IAAI,EAAE;oBACX,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,QAAQ,EAAE;wBACnC;oBACF;AAEA,oBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;AACf,wBAAA,MAAM,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AAC1C,wBAAA,IAAI,UAAU,CAAC,WAAW,EAAE;4BAC1B,IAAI,CAAC,SAAS,EAAE;wBAClB;oBACF;yBAAO;wBACL,MAAM,MAAM,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC;wBAC7C,IAAI,MAAM,EAAE;4BACV;wBACF;oBACF;gBACF;YACF,CAAC;YAED,IAAI,EAAE,MAAK;gBACT,IAAI,CAAC,SAAS,EAAE;YAClB,CAAC;AAED,YAAA,MAAM,EAAE,CAAC,MAAM,KAAI;AACjB,gBAAA,IAAI,CAAC,SAAS,GAAG,IAAI;;AAErB,gBAAA,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,MAAM,CAAC;AACnC,gBAAA,OAAO,CAAC,KAAK,CAAC,gCAAgC,EAAE,MAAM,CAAC;YACzD,CAAC;AACF,SAAA,CAAC;IACJ;AAEA;;;;AAIG;IACH,kBAAkB,GAAA;QAChB,OAAO,IAAI,CAAC,eAAe;IAC7B;AAEA;;;;AAIG;IACH,iBAAiB,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,eAAe,CAAC;IAC3D;AAEA;;;;AAIG;IACH,UAAU,GAAA;AACR,QAAA,OAAO,IAAI,CAAC,eAAe,IAAI,IAAI,CAAC,SAAS;IAC/C;AACD;;;;"}
@@ -5,4 +5,5 @@ export * from './upload';
5
5
  export * from './log';
6
6
  export * from './ls';
7
7
  export * from './pframe';
8
+ export * from './ChunkedStreamReader';
8
9
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/drivers/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAE7B,cAAc,QAAQ,CAAC;AACvB,cAAc,QAAQ,CAAC;AACvB,cAAc,UAAU,CAAC;AACzB,cAAc,OAAO,CAAC;AACtB,cAAc,MAAM,CAAC;AAErB,cAAc,UAAU,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/drivers/index.ts"],"names":[],"mappings":"AAAA,cAAc,cAAc,CAAC;AAE7B,cAAc,QAAQ,CAAC;AACvB,cAAc,QAAQ,CAAC;AACvB,cAAc,UAAU,CAAC;AACzB,cAAc,OAAO,CAAC;AACtB,cAAc,MAAM,CAAC;AAErB,cAAc,UAAU,CAAC;AACzB,cAAc,uBAAuB,CAAC"}
package/dist/index.cjs CHANGED
@@ -18,6 +18,7 @@ var filtered_column = require('./drivers/pframe/spec/filtered_column.cjs');
18
18
  var selectors = require('./drivers/pframe/spec/selectors.cjs');
19
19
  var native_id = require('./drivers/pframe/spec/native_id.cjs');
20
20
  var linker_columns = require('./drivers/pframe/linker_columns.cjs');
21
+ var ChunkedStreamReader = require('./drivers/ChunkedStreamReader.cjs');
21
22
  var errors = require('./errors.cjs');
22
23
  var block_flags = require('./flags/block_flags.cjs');
23
24
  var flag_utils = require('./flags/flag_utils.cjs');
@@ -102,6 +103,7 @@ exports.matchPColumn = selectors.matchPColumn;
102
103
  exports.selectorsToPredicate = selectors.selectorsToPredicate;
103
104
  exports.deriveNativeId = native_id.deriveNativeId;
104
105
  exports.LinkerMap = linker_columns.LinkerMap;
106
+ exports.ChunkedStreamReader = ChunkedStreamReader.ChunkedStreamReader;
105
107
  exports.AbortError = errors.AbortError;
106
108
  exports.PFrameDriverError = errors.PFrameDriverError;
107
109
  exports.PFrameError = errors.PFrameError;
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"index.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
package/dist/index.js CHANGED
@@ -16,6 +16,7 @@ export { isFilteredPColumn } from './drivers/pframe/spec/filtered_column.js';
16
16
  export { matchAxis, matchPColumn, selectorsToPredicate } from './drivers/pframe/spec/selectors.js';
17
17
  export { deriveNativeId } from './drivers/pframe/spec/native_id.js';
18
18
  export { LinkerMap } from './drivers/pframe/linker_columns.js';
19
+ export { ChunkedStreamReader } from './drivers/ChunkedStreamReader.js';
19
20
  export { AbortError, PFrameDriverError, PFrameError, UiError, deserializeError, deserializeResult, ensureError, hasAbortError, isAbortError, isAggregateError, isPFrameDriverError, isPFrameError, serializeError, serializeResult, unwrapResult, wrapAndSerialize, wrapAndSerializeAsync, wrapAsyncCallback, wrapCallback } from './errors.js';
20
21
  export { AllRequiresFeatureFlags, AllSupportsFeatureFlags } from './flags/block_flags.js';
21
22
  export { IncompatibleFlagsError, RuntimeCapabilities, checkBlockFlag, extractAllRequirements, extractAllSupports } from './flags/flag_utils.js';
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"index.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@milaboratories/pl-model-common",
3
- "version": "1.21.3",
3
+ "version": "1.21.4",
4
4
  "description": "Platforma SDK Model",
5
5
  "type": "module",
6
6
  "types": "./dist/index.d.ts",
@@ -25,8 +25,8 @@
25
25
  "eslint": "^9.25.1",
26
26
  "typescript": "~5.6.3",
27
27
  "vitest": "^2.1.9",
28
- "@milaboratories/ts-builder": "1.0.5",
29
28
  "@milaboratories/build-configs": "1.0.8",
29
+ "@milaboratories/ts-builder": "1.0.5",
30
30
  "@platforma-sdk/eslint-config": "1.1.0",
31
31
  "@milaboratories/ts-configs": "1.0.6"
32
32
  },
@@ -0,0 +1,270 @@
1
+ import type { RangeBytes } from './blob';
2
+
3
+ /**
4
+ * Status returned by onError handler to indicate what action to take
5
+ * - 'continue': Retry the failed operation
6
+ * - 'error': Error the stream (calls controller.error, aborts ongoing fetches)
7
+ * - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)
8
+ */
9
+ export type ErrorHandlerStatus = 'continue' | 'error' | 'cancel';
10
+
11
+ /**
12
+ * Options for creating a ChunkedStreamReader
13
+ */
14
+ export interface ChunkedStreamReaderOptions {
15
+ /**
16
+ * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.
17
+ */
18
+ fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;
19
+
20
+ /**
21
+ * Total size of the blob in bytes
22
+ */
23
+ totalSize: number;
24
+
25
+ /**
26
+ * Size of each chunk to read in bytes (default: 16MB)
27
+ */
28
+ chunkSize?: number;
29
+
30
+ /**
31
+ * Error handler callback. Called when an error occurs during chunk fetching.
32
+ * Should return:
33
+ * - 'continue' to retry the operation
34
+ * - 'error' to error the stream (will call controller.error and abort ongoing fetches)
35
+ * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)
36
+ * Default behavior: returns 'error'.
37
+ */
38
+ onError?: (error: unknown) => Promise<ErrorHandlerStatus>;
39
+ }
40
+
41
+ /**
42
+ * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
43
+ * in fixed-size chunks. This is useful for streaming large files without loading
44
+ * them entirely into memory.
45
+ */
46
+ export class ChunkedStreamReader {
47
+ private currentPosition: number = 0;
48
+ private _read = true;
49
+ private _canceled = false;
50
+ private _errored = false;
51
+ private abortController: AbortController | null = null;
52
+ private readonly options: Required<ChunkedStreamReaderOptions>;
53
+
54
+ /**
55
+ * Creates a new ChunkedStreamReader instance.
56
+ * Use the static `create` method instead.
57
+ */
58
+ private constructor(options: ChunkedStreamReaderOptions) {
59
+ // Normalize options with defaults
60
+ this.options = {
61
+ ...options,
62
+ chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
63
+ onError: options.onError ?? (async () => {
64
+ // Default behavior: error (will automatically call controller.error)
65
+ return 'error';
66
+ }),
67
+ };
68
+
69
+ if (this.totalSize < 0) {
70
+ throw new Error('Total size must be non-negative');
71
+ }
72
+ if (this.chunkSize <= 0) {
73
+ throw new Error('Chunk size must be positive');
74
+ }
75
+ }
76
+
77
+ /**
78
+ * Gets the fetchChunk function from options
79
+ */
80
+ private get fetchChunk() {
81
+ return this.options.fetchChunk;
82
+ }
83
+
84
+ /**
85
+ * Gets the total size from options
86
+ */
87
+ private get totalSize() {
88
+ return this.options.totalSize;
89
+ }
90
+
91
+ /**
92
+ * Gets the chunk size from options
93
+ */
94
+ private get chunkSize() {
95
+ return this.options.chunkSize;
96
+ }
97
+
98
+ /**
99
+ * Gets the onError callback from options
100
+ */
101
+ private get onError() {
102
+ return this.options.onError;
103
+ }
104
+
105
+ /**
106
+ * Creates and returns a ReadableStream that reads data in chunks.
107
+ *
108
+ * @param options - Configuration options for the chunked stream reader
109
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
110
+ *
111
+ * @example
112
+ * ```typescript
113
+ * const stream = ChunkedStreamReader.create({
114
+ * fetchChunk: async (range, signal) => {
115
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
116
+ * return new Uint8Array(await response.arrayBuffer());
117
+ * },
118
+ * totalSize: 1024 * 1024, // 1MB
119
+ * chunkSize: 64 * 1024, // 64KB chunks
120
+ * });
121
+ * ```
122
+ */
123
+ static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array> {
124
+ const reader = new ChunkedStreamReader(options);
125
+ return reader.createStream();
126
+ }
127
+
128
+ private readStart() {
129
+ this._read = true;
130
+ }
131
+
132
+ private readStop() {
133
+ this._read = false;
134
+ }
135
+
136
+ private async tryRead(controller: ReadableStreamDefaultController<Uint8Array>): Promise<boolean> {
137
+ if (this._canceled) {
138
+ return true;
139
+ }
140
+
141
+ // Check if we've read all data
142
+ if (this.isComplete()) {
143
+ controller.close();
144
+ return true;
145
+ }
146
+
147
+ try {
148
+ // Calculate the end position for this chunk
149
+ // Ensure we don't read beyond the total size
150
+ const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
151
+
152
+ // Fetch the chunk from the blob driver, passing the abort signal if available
153
+ const data = await this.fetchChunk(
154
+ { from: this.currentPosition, to: endPosition },
155
+ this.abortController?.signal,
156
+ );
157
+
158
+ // Check if stream was cancelled during the fetch
159
+ if (this._canceled) {
160
+ return true;
161
+ }
162
+
163
+ // Enqueue the data into the stream
164
+ controller.enqueue(data);
165
+
166
+ // Update the current position for the next chunk
167
+ this.currentPosition = endPosition;
168
+
169
+ if (!controller.desiredSize || controller.desiredSize <= 0) {
170
+ // The internal queue is full, so propagate
171
+ // the backpressure signal to the underlying source.
172
+ this.readStop();
173
+ }
174
+ } catch (error) {
175
+ // If any error occurs during chunk reading, call the error handler
176
+ const status = await this.onError(error);
177
+
178
+ if (status === 'error') {
179
+ this._errored = true;
180
+ // Error the stream and abort any ongoing fetch operations
181
+ controller.error(error);
182
+ this.abortController?.abort('Stream errored');
183
+ return true; // Stop reading
184
+ }
185
+
186
+ if (status === 'cancel') {
187
+ this._canceled = true;
188
+ // Close the stream gracefully and abort any ongoing fetch operations
189
+ controller.close();
190
+ this.abortController?.abort('Stream cancelled');
191
+ console.debug('ChunkedStreamReader cancelled due to error');
192
+ return true; // Stop reading
193
+ }
194
+ }
195
+
196
+ return false;
197
+ }
198
+
199
+ /**
200
+ * Creates and returns a ReadableStream that reads data in chunks.
201
+ * The stream will automatically close when all data has been read.
202
+ *
203
+ * @private - Use the static `create` method instead
204
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
205
+ */
206
+ private createStream(): ReadableStream<Uint8Array> {
207
+ // Create an AbortController for this stream
208
+ this.abortController = new AbortController();
209
+
210
+ return new ReadableStream({
211
+ start: async (controller) => {
212
+ while (true) {
213
+ if (this._canceled || this._errored) {
214
+ return;
215
+ }
216
+
217
+ if (!this._read) {
218
+ await new Promise((r) => setTimeout(r, 0));
219
+ if (controller.desiredSize) {
220
+ this.readStart();
221
+ }
222
+ } else {
223
+ const isDone = await this.tryRead(controller);
224
+ if (isDone) {
225
+ return;
226
+ }
227
+ }
228
+ }
229
+ },
230
+
231
+ pull: () => {
232
+ this.readStart();
233
+ },
234
+
235
+ cancel: (reason) => {
236
+ this._canceled = true;
237
+ // Abort any ongoing fetch operations
238
+ this.abortController?.abort(reason);
239
+ console.debug('ChunkedStreamReader cancelled:', reason);
240
+ },
241
+ });
242
+ }
243
+
244
+ /**
245
+ * Gets the current reading position in bytes.
246
+ *
247
+ * @returns Current position as number of bytes read
248
+ */
249
+ getCurrentPosition(): number {
250
+ return this.currentPosition;
251
+ }
252
+
253
+ /**
254
+ * Gets the remaining bytes to be read.
255
+ *
256
+ * @returns Number of bytes remaining
257
+ */
258
+ getRemainingBytes(): number {
259
+ return Math.max(0, this.totalSize - this.currentPosition);
260
+ }
261
+
262
+ /**
263
+ * Checks if the entire blob has been read.
264
+ *
265
+ * @returns True if all data has been read
266
+ */
267
+ isComplete(): boolean {
268
+ return this.currentPosition >= this.totalSize;
269
+ }
270
+ }
@@ -7,3 +7,4 @@ export * from './log';
7
7
  export * from './ls';
8
8
 
9
9
  export * from './pframe';
10
+ export * from './ChunkedStreamReader';