@milaboratories/pl-model-common 1.25.1 → 1.25.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (288) hide show
  1. package/dist/_virtual/_rolldown/runtime.cjs +29 -0
  2. package/dist/author_marker.d.ts +9 -6
  3. package/dist/base32_encode.cjs +40 -46
  4. package/dist/base32_encode.cjs.map +1 -1
  5. package/dist/base32_encode.js +40 -45
  6. package/dist/base32_encode.js.map +1 -1
  7. package/dist/base64.cjs +5 -4
  8. package/dist/base64.cjs.map +1 -1
  9. package/dist/base64.d.ts +10 -6
  10. package/dist/base64.js +5 -3
  11. package/dist/base64.js.map +1 -1
  12. package/dist/block_state.d.ts +21 -23
  13. package/dist/bmodel/block_config.cjs +9 -10
  14. package/dist/bmodel/block_config.cjs.map +1 -1
  15. package/dist/bmodel/block_config.d.ts +53 -72
  16. package/dist/bmodel/block_config.js +9 -9
  17. package/dist/bmodel/block_config.js.map +1 -1
  18. package/dist/bmodel/code.cjs +7 -6
  19. package/dist/bmodel/code.cjs.map +1 -1
  20. package/dist/bmodel/code.d.ts +17 -13
  21. package/dist/bmodel/code.js +6 -4
  22. package/dist/bmodel/code.js.map +1 -1
  23. package/dist/bmodel/container.d.ts +46 -44
  24. package/dist/bmodel/index.cjs +3 -0
  25. package/dist/bmodel/index.d.ts +5 -6
  26. package/dist/bmodel/index.js +3 -0
  27. package/dist/bmodel/normalization.cjs +90 -106
  28. package/dist/bmodel/normalization.cjs.map +1 -1
  29. package/dist/bmodel/normalization.d.ts +7 -3
  30. package/dist/bmodel/normalization.js +90 -105
  31. package/dist/bmodel/normalization.js.map +1 -1
  32. package/dist/bmodel/types.d.ts +4 -1
  33. package/dist/branding.d.ts +5 -5
  34. package/dist/common_types.d.ts +19 -15
  35. package/dist/driver_kit.d.ts +25 -17
  36. package/dist/drivers/ChunkedStreamReader.cjs +168 -203
  37. package/dist/drivers/ChunkedStreamReader.cjs.map +1 -1
  38. package/dist/drivers/ChunkedStreamReader.d.ts +104 -100
  39. package/dist/drivers/ChunkedStreamReader.js +168 -202
  40. package/dist/drivers/ChunkedStreamReader.js.map +1 -1
  41. package/dist/drivers/blob.cjs +15 -17
  42. package/dist/drivers/blob.cjs.map +1 -1
  43. package/dist/drivers/blob.d.ts +35 -35
  44. package/dist/drivers/blob.js +14 -15
  45. package/dist/drivers/blob.js.map +1 -1
  46. package/dist/drivers/index.cjs +18 -0
  47. package/dist/drivers/index.d.ts +29 -9
  48. package/dist/drivers/index.js +18 -0
  49. package/dist/drivers/interfaces.d.ts +17 -14
  50. package/dist/drivers/log.cjs +4 -3
  51. package/dist/drivers/log.cjs.map +1 -1
  52. package/dist/drivers/log.d.ts +52 -49
  53. package/dist/drivers/log.js +4 -2
  54. package/dist/drivers/log.js.map +1 -1
  55. package/dist/drivers/ls.cjs +16 -19
  56. package/dist/drivers/ls.cjs.map +1 -1
  57. package/dist/drivers/ls.d.ts +73 -76
  58. package/dist/drivers/ls.js +16 -17
  59. package/dist/drivers/ls.js.map +1 -1
  60. package/dist/drivers/pframe/column_filter.d.ts +19 -15
  61. package/dist/drivers/pframe/data_info.cjs +201 -252
  62. package/dist/drivers/pframe/data_info.cjs.map +1 -1
  63. package/dist/drivers/pframe/data_info.d.ts +84 -109
  64. package/dist/drivers/pframe/data_info.js +201 -250
  65. package/dist/drivers/pframe/data_info.js.map +1 -1
  66. package/dist/drivers/pframe/data_types.cjs +38 -63
  67. package/dist/drivers/pframe/data_types.cjs.map +1 -1
  68. package/dist/drivers/pframe/data_types.d.ts +91 -93
  69. package/dist/drivers/pframe/data_types.js +38 -61
  70. package/dist/drivers/pframe/data_types.js.map +1 -1
  71. package/dist/drivers/pframe/driver.cjs +7 -0
  72. package/dist/drivers/pframe/driver.cjs.map +1 -0
  73. package/dist/drivers/pframe/driver.d.ts +50 -45
  74. package/dist/drivers/pframe/driver.js +6 -0
  75. package/dist/drivers/pframe/driver.js.map +1 -0
  76. package/dist/drivers/pframe/filter_spec.d.ts +127 -123
  77. package/dist/drivers/pframe/find_columns.d.ts +21 -17
  78. package/dist/drivers/pframe/index.cjs +15 -0
  79. package/dist/drivers/pframe/index.d.ts +23 -16
  80. package/dist/drivers/pframe/index.js +15 -0
  81. package/dist/drivers/pframe/linker_columns.cjs +184 -238
  82. package/dist/drivers/pframe/linker_columns.cjs.map +1 -1
  83. package/dist/drivers/pframe/linker_columns.d.ts +45 -38
  84. package/dist/drivers/pframe/linker_columns.js +184 -236
  85. package/dist/drivers/pframe/linker_columns.js.map +1 -1
  86. package/dist/drivers/pframe/pframe.d.ts +26 -21
  87. package/dist/drivers/pframe/query/index.cjs +1 -0
  88. package/dist/drivers/pframe/query/index.d.ts +3 -4
  89. package/dist/drivers/pframe/query/index.js +1 -0
  90. package/dist/drivers/pframe/query/query_common.d.ts +205 -247
  91. package/dist/drivers/pframe/query/query_data.d.ts +25 -23
  92. package/dist/drivers/pframe/query/query_spec.d.ts +30 -29
  93. package/dist/drivers/pframe/query/utils.cjs +164 -183
  94. package/dist/drivers/pframe/query/utils.cjs.map +1 -1
  95. package/dist/drivers/pframe/query/utils.d.ts +13 -12
  96. package/dist/drivers/pframe/query/utils.js +164 -181
  97. package/dist/drivers/pframe/query/utils.js.map +1 -1
  98. package/dist/drivers/pframe/spec/anchored.cjs +178 -216
  99. package/dist/drivers/pframe/spec/anchored.cjs.map +1 -1
  100. package/dist/drivers/pframe/spec/anchored.d.ts +53 -49
  101. package/dist/drivers/pframe/spec/anchored.js +176 -214
  102. package/dist/drivers/pframe/spec/anchored.js.map +1 -1
  103. package/dist/drivers/pframe/spec/filtered_column.cjs +8 -7
  104. package/dist/drivers/pframe/spec/filtered_column.cjs.map +1 -1
  105. package/dist/drivers/pframe/spec/filtered_column.d.ts +20 -17
  106. package/dist/drivers/pframe/spec/filtered_column.js +8 -6
  107. package/dist/drivers/pframe/spec/filtered_column.js.map +1 -1
  108. package/dist/drivers/pframe/spec/ids.cjs +16 -14
  109. package/dist/drivers/pframe/spec/ids.cjs.map +1 -1
  110. package/dist/drivers/pframe/spec/ids.d.ts +13 -8
  111. package/dist/drivers/pframe/spec/ids.js +14 -12
  112. package/dist/drivers/pframe/spec/ids.js.map +1 -1
  113. package/dist/drivers/pframe/spec/index.cjs +6 -0
  114. package/dist/drivers/pframe/spec/index.d.ts +6 -7
  115. package/dist/drivers/pframe/spec/index.js +6 -0
  116. package/dist/drivers/pframe/spec/native_id.cjs +16 -16
  117. package/dist/drivers/pframe/spec/native_id.cjs.map +1 -1
  118. package/dist/drivers/pframe/spec/native_id.d.ts +9 -4
  119. package/dist/drivers/pframe/spec/native_id.js +13 -13
  120. package/dist/drivers/pframe/spec/native_id.js.map +1 -1
  121. package/dist/drivers/pframe/spec/selectors.cjs +72 -112
  122. package/dist/drivers/pframe/spec/selectors.cjs.map +1 -1
  123. package/dist/drivers/pframe/spec/selectors.d.ts +99 -94
  124. package/dist/drivers/pframe/spec/selectors.js +72 -110
  125. package/dist/drivers/pframe/spec/selectors.js.map +1 -1
  126. package/dist/drivers/pframe/spec/spec.cjs +282 -324
  127. package/dist/drivers/pframe/spec/spec.cjs.map +1 -1
  128. package/dist/drivers/pframe/spec/spec.d.ts +263 -272
  129. package/dist/drivers/pframe/spec/spec.js +281 -322
  130. package/dist/drivers/pframe/spec/spec.js.map +1 -1
  131. package/dist/drivers/pframe/table.d.ts +28 -24
  132. package/dist/drivers/pframe/table_calculate.cjs +117 -147
  133. package/dist/drivers/pframe/table_calculate.cjs.map +1 -1
  134. package/dist/drivers/pframe/table_calculate.d.ts +219 -220
  135. package/dist/drivers/pframe/table_calculate.js +117 -145
  136. package/dist/drivers/pframe/table_calculate.js.map +1 -1
  137. package/dist/drivers/pframe/table_common.cjs +13 -14
  138. package/dist/drivers/pframe/table_common.cjs.map +1 -1
  139. package/dist/drivers/pframe/table_common.d.ts +24 -19
  140. package/dist/drivers/pframe/table_common.js +13 -13
  141. package/dist/drivers/pframe/table_common.js.map +1 -1
  142. package/dist/drivers/pframe/unique_values.d.ts +23 -18
  143. package/dist/drivers/upload.d.ts +21 -18
  144. package/dist/drivers/urls.cjs +5 -6
  145. package/dist/drivers/urls.cjs.map +1 -1
  146. package/dist/drivers/urls.d.ts +12 -9
  147. package/dist/drivers/urls.js +5 -5
  148. package/dist/drivers/urls.js.map +1 -1
  149. package/dist/errors.cjs +65 -86
  150. package/dist/errors.cjs.map +1 -1
  151. package/dist/errors.d.ts +36 -33
  152. package/dist/errors.js +65 -85
  153. package/dist/errors.js.map +1 -1
  154. package/dist/flags/block_flags.cjs +7 -6
  155. package/dist/flags/block_flags.cjs.map +1 -1
  156. package/dist/flags/block_flags.d.ts +12 -8
  157. package/dist/flags/block_flags.js +7 -5
  158. package/dist/flags/block_flags.js.map +1 -1
  159. package/dist/flags/flag_utils.cjs +69 -108
  160. package/dist/flags/flag_utils.cjs.map +1 -1
  161. package/dist/flags/flag_utils.d.ts +40 -42
  162. package/dist/flags/flag_utils.js +70 -107
  163. package/dist/flags/flag_utils.js.map +1 -1
  164. package/dist/flags/type_utils.d.ts +12 -11
  165. package/dist/httpAuth.cjs +16 -20
  166. package/dist/httpAuth.cjs.map +1 -1
  167. package/dist/httpAuth.d.ts +10 -7
  168. package/dist/httpAuth.js +16 -19
  169. package/dist/httpAuth.js.map +1 -1
  170. package/dist/index.cjs +176 -179
  171. package/dist/index.d.ts +58 -20
  172. package/dist/index.js +37 -34
  173. package/dist/json.cjs +10 -8
  174. package/dist/json.cjs.map +1 -1
  175. package/dist/json.d.ts +18 -18
  176. package/dist/json.js +8 -6
  177. package/dist/json.js.map +1 -1
  178. package/dist/navigation.cjs +3 -2
  179. package/dist/navigation.cjs.map +1 -1
  180. package/dist/navigation.d.ts +18 -21
  181. package/dist/navigation.js +3 -1
  182. package/dist/navigation.js.map +1 -1
  183. package/dist/plid.cjs +16 -20
  184. package/dist/plid.cjs.map +1 -1
  185. package/dist/plid.d.ts +11 -7
  186. package/dist/plid.js +15 -18
  187. package/dist/plid.js.map +1 -1
  188. package/dist/pool/entry.d.ts +11 -11
  189. package/dist/pool/index.d.ts +3 -4
  190. package/dist/pool/query.cjs +21 -43
  191. package/dist/pool/query.cjs.map +1 -1
  192. package/dist/pool/query.d.ts +25 -20
  193. package/dist/pool/query.js +20 -40
  194. package/dist/pool/query.js.map +1 -1
  195. package/dist/pool/spec.cjs +40 -47
  196. package/dist/pool/spec.cjs.map +1 -1
  197. package/dist/pool/spec.d.ts +33 -31
  198. package/dist/pool/spec.js +40 -45
  199. package/dist/pool/spec.js.map +1 -1
  200. package/dist/ref.cjs +51 -71
  201. package/dist/ref.cjs.map +1 -1
  202. package/dist/ref.d.ts +36 -33
  203. package/dist/ref.js +50 -69
  204. package/dist/ref.js.map +1 -1
  205. package/dist/utag.d.ts +18 -14
  206. package/dist/util.cjs +8 -7
  207. package/dist/util.cjs.map +1 -1
  208. package/dist/util.d.ts +5 -2
  209. package/dist/util.js +8 -6
  210. package/dist/util.js.map +1 -1
  211. package/dist/value_or_error.cjs +7 -3
  212. package/dist/value_or_error.cjs.map +1 -1
  213. package/dist/value_or_error.d.ts +9 -6
  214. package/dist/value_or_error.js +7 -2
  215. package/dist/value_or_error.js.map +1 -1
  216. package/package.json +6 -6
  217. package/src/flags/block_flags.ts +2 -1
  218. package/src/flags/flag_utils.ts +0 -22
  219. package/dist/author_marker.d.ts.map +0 -1
  220. package/dist/base32_encode.d.ts +0 -8
  221. package/dist/base32_encode.d.ts.map +0 -1
  222. package/dist/base64.d.ts.map +0 -1
  223. package/dist/block_state.d.ts.map +0 -1
  224. package/dist/bmodel/block_config.d.ts.map +0 -1
  225. package/dist/bmodel/code.d.ts.map +0 -1
  226. package/dist/bmodel/container.d.ts.map +0 -1
  227. package/dist/bmodel/index.d.ts.map +0 -1
  228. package/dist/bmodel/normalization.d.ts.map +0 -1
  229. package/dist/bmodel/types.d.ts.map +0 -1
  230. package/dist/branding.d.ts.map +0 -1
  231. package/dist/common_types.d.ts.map +0 -1
  232. package/dist/driver_kit.d.ts.map +0 -1
  233. package/dist/drivers/ChunkedStreamReader.d.ts.map +0 -1
  234. package/dist/drivers/blob.d.ts.map +0 -1
  235. package/dist/drivers/index.d.ts.map +0 -1
  236. package/dist/drivers/interfaces.d.ts.map +0 -1
  237. package/dist/drivers/log.d.ts.map +0 -1
  238. package/dist/drivers/ls.d.ts.map +0 -1
  239. package/dist/drivers/pframe/column_filter.d.ts.map +0 -1
  240. package/dist/drivers/pframe/data_info.d.ts.map +0 -1
  241. package/dist/drivers/pframe/data_types.d.ts.map +0 -1
  242. package/dist/drivers/pframe/driver.d.ts.map +0 -1
  243. package/dist/drivers/pframe/filter_spec.d.ts.map +0 -1
  244. package/dist/drivers/pframe/find_columns.d.ts.map +0 -1
  245. package/dist/drivers/pframe/index.d.ts.map +0 -1
  246. package/dist/drivers/pframe/linker_columns.d.ts.map +0 -1
  247. package/dist/drivers/pframe/pframe.d.ts.map +0 -1
  248. package/dist/drivers/pframe/query/index.d.ts.map +0 -1
  249. package/dist/drivers/pframe/query/query_common.d.ts.map +0 -1
  250. package/dist/drivers/pframe/query/query_data.d.ts.map +0 -1
  251. package/dist/drivers/pframe/query/query_spec.d.ts.map +0 -1
  252. package/dist/drivers/pframe/query/utils.d.ts.map +0 -1
  253. package/dist/drivers/pframe/spec/anchored.d.ts.map +0 -1
  254. package/dist/drivers/pframe/spec/filtered_column.d.ts.map +0 -1
  255. package/dist/drivers/pframe/spec/ids.d.ts.map +0 -1
  256. package/dist/drivers/pframe/spec/index.d.ts.map +0 -1
  257. package/dist/drivers/pframe/spec/native_id.d.ts.map +0 -1
  258. package/dist/drivers/pframe/spec/selectors.d.ts.map +0 -1
  259. package/dist/drivers/pframe/spec/spec.d.ts.map +0 -1
  260. package/dist/drivers/pframe/table.d.ts.map +0 -1
  261. package/dist/drivers/pframe/table_calculate.d.ts.map +0 -1
  262. package/dist/drivers/pframe/table_common.d.ts.map +0 -1
  263. package/dist/drivers/pframe/type_util.d.ts +0 -5
  264. package/dist/drivers/pframe/type_util.d.ts.map +0 -1
  265. package/dist/drivers/pframe/unique_values.d.ts.map +0 -1
  266. package/dist/drivers/upload.d.ts.map +0 -1
  267. package/dist/drivers/urls.d.ts.map +0 -1
  268. package/dist/errors.d.ts.map +0 -1
  269. package/dist/flags/block_flags.d.ts.map +0 -1
  270. package/dist/flags/flag_utils.d.ts.map +0 -1
  271. package/dist/flags/index.d.ts +0 -4
  272. package/dist/flags/index.d.ts.map +0 -1
  273. package/dist/flags/type_utils.d.ts.map +0 -1
  274. package/dist/httpAuth.d.ts.map +0 -1
  275. package/dist/index.cjs.map +0 -1
  276. package/dist/index.d.ts.map +0 -1
  277. package/dist/index.js.map +0 -1
  278. package/dist/json.d.ts.map +0 -1
  279. package/dist/navigation.d.ts.map +0 -1
  280. package/dist/plid.d.ts.map +0 -1
  281. package/dist/pool/entry.d.ts.map +0 -1
  282. package/dist/pool/index.d.ts.map +0 -1
  283. package/dist/pool/query.d.ts.map +0 -1
  284. package/dist/pool/spec.d.ts.map +0 -1
  285. package/dist/ref.d.ts.map +0 -1
  286. package/dist/utag.d.ts.map +0 -1
  287. package/dist/util.d.ts.map +0 -1
  288. package/dist/value_or_error.d.ts.map +0 -1
@@ -1,117 +1,121 @@
1
- import type { RangeBytes } from "./blob";
1
+ import { RangeBytes } from "./blob.js";
2
+
3
+ //#region src/drivers/ChunkedStreamReader.d.ts
2
4
  /**
3
5
  * Status returned by onError handler to indicate what action to take
4
6
  * - 'continue': Retry the failed operation
5
7
  * - 'error': Error the stream (calls controller.error, aborts ongoing fetches)
6
8
  * - 'cancel': Cancel the stream gracefully (calls controller.close, aborts ongoing fetches)
7
9
  */
8
- export type ErrorHandlerStatus = "continue" | "error" | "cancel";
10
+ type ErrorHandlerStatus = "continue" | "error" | "cancel";
9
11
  /**
10
12
  * Options for creating a ChunkedStreamReader
11
13
  */
12
- export interface ChunkedStreamReaderOptions {
13
- /**
14
- * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.
15
- */
16
- fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;
17
- /**
18
- * Total size of the blob in bytes
19
- */
20
- totalSize: number;
21
- /**
22
- * Size of each chunk to read in bytes (default: 16MB)
23
- */
24
- chunkSize?: number;
25
- /**
26
- * Error handler callback. Called when an error occurs during chunk fetching.
27
- * Should return:
28
- * - 'continue' to retry the operation
29
- * - 'error' to error the stream (will call controller.error and abort ongoing fetches)
30
- * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)
31
- * Default behavior: returns 'error'.
32
- */
33
- onError?: (error: unknown) => Promise<ErrorHandlerStatus>;
14
+ interface ChunkedStreamReaderOptions {
15
+ /**
16
+ * Function to fetch a chunk of data. Optionally accepts an AbortSignal to cancel the fetch.
17
+ */
18
+ fetchChunk: (range: RangeBytes, signal?: AbortSignal) => Promise<Uint8Array>;
19
+ /**
20
+ * Total size of the blob in bytes
21
+ */
22
+ totalSize: number;
23
+ /**
24
+ * Size of each chunk to read in bytes (default: 16MB)
25
+ */
26
+ chunkSize?: number;
27
+ /**
28
+ * Error handler callback. Called when an error occurs during chunk fetching.
29
+ * Should return:
30
+ * - 'continue' to retry the operation
31
+ * - 'error' to error the stream (will call controller.error and abort ongoing fetches)
32
+ * - 'cancel' to cancel gracefully (will call controller.close and abort ongoing fetches)
33
+ * Default behavior: returns 'error'.
34
+ */
35
+ onError?: (error: unknown) => Promise<ErrorHandlerStatus>;
34
36
  }
35
37
  /**
36
38
  * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
37
39
  * in fixed-size chunks. This is useful for streaming large files without loading
38
40
  * them entirely into memory.
39
41
  */
40
- export declare class ChunkedStreamReader {
41
- private currentPosition;
42
- private _read;
43
- private _canceled;
44
- private _errored;
45
- private abortController;
46
- private readonly options;
47
- /**
48
- * Creates a new ChunkedStreamReader instance.
49
- * Use the static `create` method instead.
50
- */
51
- private constructor();
52
- /**
53
- * Gets the fetchChunk function from options
54
- */
55
- private get fetchChunk();
56
- /**
57
- * Gets the total size from options
58
- */
59
- private get totalSize();
60
- /**
61
- * Gets the chunk size from options
62
- */
63
- private get chunkSize();
64
- /**
65
- * Gets the onError callback from options
66
- */
67
- private get onError();
68
- /**
69
- * Creates and returns a ReadableStream that reads data in chunks.
70
- *
71
- * @param options - Configuration options for the chunked stream reader
72
- * @returns ReadableStream that can be consumed by zip.add or other stream consumers
73
- *
74
- * @example
75
- * ```typescript
76
- * const stream = ChunkedStreamReader.create({
77
- * fetchChunk: async (range, signal) => {
78
- * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
79
- * return new Uint8Array(await response.arrayBuffer());
80
- * },
81
- * totalSize: 1024 * 1024, // 1MB
82
- * chunkSize: 64 * 1024, // 64KB chunks
83
- * });
84
- * ```
85
- */
86
- static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array>;
87
- private readStart;
88
- private readStop;
89
- private tryRead;
90
- /**
91
- * Creates and returns a ReadableStream that reads data in chunks.
92
- * The stream will automatically close when all data has been read.
93
- *
94
- * @private - Use the static `create` method instead
95
- * @returns ReadableStream that can be consumed by zip.add or other stream consumers
96
- */
97
- private createStream;
98
- /**
99
- * Gets the current reading position in bytes.
100
- *
101
- * @returns Current position as number of bytes read
102
- */
103
- getCurrentPosition(): number;
104
- /**
105
- * Gets the remaining bytes to be read.
106
- *
107
- * @returns Number of bytes remaining
108
- */
109
- getRemainingBytes(): number;
110
- /**
111
- * Checks if the entire blob has been read.
112
- *
113
- * @returns True if all data has been read
114
- */
115
- isComplete(): boolean;
42
+ declare class ChunkedStreamReader {
43
+ private currentPosition;
44
+ private _read;
45
+ private _canceled;
46
+ private _errored;
47
+ private abortController;
48
+ private readonly options;
49
+ /**
50
+ * Creates a new ChunkedStreamReader instance.
51
+ * Use the static `create` method instead.
52
+ */
53
+ private constructor();
54
+ /**
55
+ * Gets the fetchChunk function from options
56
+ */
57
+ private get fetchChunk();
58
+ /**
59
+ * Gets the total size from options
60
+ */
61
+ private get totalSize();
62
+ /**
63
+ * Gets the chunk size from options
64
+ */
65
+ private get chunkSize();
66
+ /**
67
+ * Gets the onError callback from options
68
+ */
69
+ private get onError();
70
+ /**
71
+ * Creates and returns a ReadableStream that reads data in chunks.
72
+ *
73
+ * @param options - Configuration options for the chunked stream reader
74
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
75
+ *
76
+ * @example
77
+ * ```typescript
78
+ * const stream = ChunkedStreamReader.create({
79
+ * fetchChunk: async (range, signal) => {
80
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
81
+ * return new Uint8Array(await response.arrayBuffer());
82
+ * },
83
+ * totalSize: 1024 * 1024, // 1MB
84
+ * chunkSize: 64 * 1024, // 64KB chunks
85
+ * });
86
+ * ```
87
+ */
88
+ static create(options: ChunkedStreamReaderOptions): ReadableStream<Uint8Array>;
89
+ private readStart;
90
+ private readStop;
91
+ private tryRead;
92
+ /**
93
+ * Creates and returns a ReadableStream that reads data in chunks.
94
+ * The stream will automatically close when all data has been read.
95
+ *
96
+ * @private - Use the static `create` method instead
97
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
98
+ */
99
+ private createStream;
100
+ /**
101
+ * Gets the current reading position in bytes.
102
+ *
103
+ * @returns Current position as number of bytes read
104
+ */
105
+ getCurrentPosition(): number;
106
+ /**
107
+ * Gets the remaining bytes to be read.
108
+ *
109
+ * @returns Number of bytes remaining
110
+ */
111
+ getRemainingBytes(): number;
112
+ /**
113
+ * Checks if the entire blob has been read.
114
+ *
115
+ * @returns True if all data has been read
116
+ */
117
+ isComplete(): boolean;
116
118
  }
119
+ //#endregion
120
+ export { ChunkedStreamReader, ChunkedStreamReaderOptions, ErrorHandlerStatus };
117
121
  //# sourceMappingURL=ChunkedStreamReader.d.ts.map
@@ -1,205 +1,171 @@
1
+ //#region src/drivers/ChunkedStreamReader.ts
1
2
  /**
2
- * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
3
- * in fixed-size chunks. This is useful for streaming large files without loading
4
- * them entirely into memory.
5
- */
6
- class ChunkedStreamReader {
7
- currentPosition = 0;
8
- _read = true;
9
- _canceled = false;
10
- _errored = false;
11
- abortController = null;
12
- options;
13
- /**
14
- * Creates a new ChunkedStreamReader instance.
15
- * Use the static `create` method instead.
16
- */
17
- constructor(options) {
18
- // Normalize options with defaults
19
- this.options = {
20
- ...options,
21
- chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
22
- onError: options.onError ??
23
- (async () => {
24
- // Default behavior: error (will automatically call controller.error)
25
- return "error";
26
- }),
27
- };
28
- if (this.totalSize < 0) {
29
- throw new Error("Total size must be non-negative");
30
- }
31
- if (this.chunkSize <= 0) {
32
- throw new Error("Chunk size must be positive");
33
- }
34
- }
35
- /**
36
- * Gets the fetchChunk function from options
37
- */
38
- get fetchChunk() {
39
- return this.options.fetchChunk;
40
- }
41
- /**
42
- * Gets the total size from options
43
- */
44
- get totalSize() {
45
- return this.options.totalSize;
46
- }
47
- /**
48
- * Gets the chunk size from options
49
- */
50
- get chunkSize() {
51
- return this.options.chunkSize;
52
- }
53
- /**
54
- * Gets the onError callback from options
55
- */
56
- get onError() {
57
- return this.options.onError;
58
- }
59
- /**
60
- * Creates and returns a ReadableStream that reads data in chunks.
61
- *
62
- * @param options - Configuration options for the chunked stream reader
63
- * @returns ReadableStream that can be consumed by zip.add or other stream consumers
64
- *
65
- * @example
66
- * ```typescript
67
- * const stream = ChunkedStreamReader.create({
68
- * fetchChunk: async (range, signal) => {
69
- * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
70
- * return new Uint8Array(await response.arrayBuffer());
71
- * },
72
- * totalSize: 1024 * 1024, // 1MB
73
- * chunkSize: 64 * 1024, // 64KB chunks
74
- * });
75
- * ```
76
- */
77
- static create(options) {
78
- const reader = new ChunkedStreamReader(options);
79
- return reader.createStream();
80
- }
81
- readStart() {
82
- this._read = true;
83
- }
84
- readStop() {
85
- this._read = false;
86
- }
87
- async tryRead(controller) {
88
- if (this._canceled) {
89
- return true;
90
- }
91
- // Check if we've read all data
92
- if (this.isComplete()) {
93
- controller.close();
94
- return true;
95
- }
96
- try {
97
- // Calculate the end position for this chunk
98
- // Ensure we don't read beyond the total size
99
- const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
100
- // Fetch the chunk from the blob driver, passing the abort signal if available
101
- const data = await this.fetchChunk({ from: this.currentPosition, to: endPosition }, this.abortController?.signal);
102
- // Check if stream was cancelled during the fetch
103
- if (this._canceled) {
104
- return true;
105
- }
106
- // Enqueue the data into the stream
107
- controller.enqueue(data);
108
- // Update the current position for the next chunk
109
- this.currentPosition = endPosition;
110
- if (!controller.desiredSize || controller.desiredSize <= 0) {
111
- // The internal queue is full, so propagate
112
- // the backpressure signal to the underlying source.
113
- this.readStop();
114
- }
115
- }
116
- catch (error) {
117
- // If any error occurs during chunk reading, call the error handler
118
- const status = await this.onError(error);
119
- if (status === "error") {
120
- this._errored = true;
121
- // Error the stream and abort any ongoing fetch operations
122
- controller.error(error);
123
- this.abortController?.abort("Stream errored");
124
- return true; // Stop reading
125
- }
126
- if (status === "cancel") {
127
- this._canceled = true;
128
- // Close the stream gracefully and abort any ongoing fetch operations
129
- controller.close();
130
- this.abortController?.abort("Stream cancelled");
131
- console.debug("ChunkedStreamReader cancelled due to error");
132
- return true; // Stop reading
133
- }
134
- }
135
- return false;
136
- }
137
- /**
138
- * Creates and returns a ReadableStream that reads data in chunks.
139
- * The stream will automatically close when all data has been read.
140
- *
141
- * @private - Use the static `create` method instead
142
- * @returns ReadableStream that can be consumed by zip.add or other stream consumers
143
- */
144
- createStream() {
145
- // Create an AbortController for this stream
146
- this.abortController = new AbortController();
147
- return new ReadableStream({
148
- start: async (controller) => {
149
- while (true) {
150
- if (this._canceled || this._errored) {
151
- return;
152
- }
153
- if (!this._read) {
154
- await new Promise((r) => setTimeout(r, 0));
155
- if (controller.desiredSize) {
156
- this.readStart();
157
- }
158
- }
159
- else {
160
- const isDone = await this.tryRead(controller);
161
- if (isDone) {
162
- return;
163
- }
164
- }
165
- }
166
- },
167
- pull: () => {
168
- this.readStart();
169
- },
170
- cancel: (reason) => {
171
- this._canceled = true;
172
- // Abort any ongoing fetch operations
173
- this.abortController?.abort(reason);
174
- console.debug("ChunkedStreamReader cancelled:", reason);
175
- },
176
- });
177
- }
178
- /**
179
- * Gets the current reading position in bytes.
180
- *
181
- * @returns Current position as number of bytes read
182
- */
183
- getCurrentPosition() {
184
- return this.currentPosition;
185
- }
186
- /**
187
- * Gets the remaining bytes to be read.
188
- *
189
- * @returns Number of bytes remaining
190
- */
191
- getRemainingBytes() {
192
- return Math.max(0, this.totalSize - this.currentPosition);
193
- }
194
- /**
195
- * Checks if the entire blob has been read.
196
- *
197
- * @returns True if all data has been read
198
- */
199
- isComplete() {
200
- return this.currentPosition >= this.totalSize;
201
- }
202
- }
3
+ * ChunkedStreamReader creates a ReadableStream that reads data from a blob driver
4
+ * in fixed-size chunks. This is useful for streaming large files without loading
5
+ * them entirely into memory.
6
+ */
7
+ var ChunkedStreamReader = class ChunkedStreamReader {
8
+ currentPosition = 0;
9
+ _read = true;
10
+ _canceled = false;
11
+ _errored = false;
12
+ abortController = null;
13
+ options;
14
+ /**
15
+ * Creates a new ChunkedStreamReader instance.
16
+ * Use the static `create` method instead.
17
+ */
18
+ constructor(options) {
19
+ this.options = {
20
+ ...options,
21
+ chunkSize: options.chunkSize ?? 16 * 1024 * 1024,
22
+ onError: options.onError ?? (async () => {
23
+ return "error";
24
+ })
25
+ };
26
+ if (this.totalSize < 0) throw new Error("Total size must be non-negative");
27
+ if (this.chunkSize <= 0) throw new Error("Chunk size must be positive");
28
+ }
29
+ /**
30
+ * Gets the fetchChunk function from options
31
+ */
32
+ get fetchChunk() {
33
+ return this.options.fetchChunk;
34
+ }
35
+ /**
36
+ * Gets the total size from options
37
+ */
38
+ get totalSize() {
39
+ return this.options.totalSize;
40
+ }
41
+ /**
42
+ * Gets the chunk size from options
43
+ */
44
+ get chunkSize() {
45
+ return this.options.chunkSize;
46
+ }
47
+ /**
48
+ * Gets the onError callback from options
49
+ */
50
+ get onError() {
51
+ return this.options.onError;
52
+ }
53
+ /**
54
+ * Creates and returns a ReadableStream that reads data in chunks.
55
+ *
56
+ * @param options - Configuration options for the chunked stream reader
57
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
58
+ *
59
+ * @example
60
+ * ```typescript
61
+ * const stream = ChunkedStreamReader.create({
62
+ * fetchChunk: async (range, signal) => {
63
+ * const response = await fetch(`/api/data?from=${range.from}&to=${range.to}`, { signal });
64
+ * return new Uint8Array(await response.arrayBuffer());
65
+ * },
66
+ * totalSize: 1024 * 1024, // 1MB
67
+ * chunkSize: 64 * 1024, // 64KB chunks
68
+ * });
69
+ * ```
70
+ */
71
+ static create(options) {
72
+ return new ChunkedStreamReader(options).createStream();
73
+ }
74
+ readStart() {
75
+ this._read = true;
76
+ }
77
+ readStop() {
78
+ this._read = false;
79
+ }
80
+ async tryRead(controller) {
81
+ if (this._canceled) return true;
82
+ if (this.isComplete()) {
83
+ controller.close();
84
+ return true;
85
+ }
86
+ try {
87
+ const endPosition = Math.min(this.currentPosition + this.chunkSize, this.totalSize);
88
+ const data = await this.fetchChunk({
89
+ from: this.currentPosition,
90
+ to: endPosition
91
+ }, this.abortController?.signal);
92
+ if (this._canceled) return true;
93
+ controller.enqueue(data);
94
+ this.currentPosition = endPosition;
95
+ if (!controller.desiredSize || controller.desiredSize <= 0) this.readStop();
96
+ } catch (error) {
97
+ const status = await this.onError(error);
98
+ if (status === "error") {
99
+ this._errored = true;
100
+ controller.error(error);
101
+ this.abortController?.abort("Stream errored");
102
+ return true;
103
+ }
104
+ if (status === "cancel") {
105
+ this._canceled = true;
106
+ controller.close();
107
+ this.abortController?.abort("Stream cancelled");
108
+ console.debug("ChunkedStreamReader cancelled due to error");
109
+ return true;
110
+ }
111
+ }
112
+ return false;
113
+ }
114
+ /**
115
+ * Creates and returns a ReadableStream that reads data in chunks.
116
+ * The stream will automatically close when all data has been read.
117
+ *
118
+ * @private - Use the static `create` method instead
119
+ * @returns ReadableStream that can be consumed by zip.add or other stream consumers
120
+ */
121
+ createStream() {
122
+ this.abortController = new AbortController();
123
+ return new ReadableStream({
124
+ start: async (controller) => {
125
+ while (true) {
126
+ if (this._canceled || this._errored) return;
127
+ if (!this._read) {
128
+ await new Promise((r) => setTimeout(r, 0));
129
+ if (controller.desiredSize) this.readStart();
130
+ } else if (await this.tryRead(controller)) return;
131
+ }
132
+ },
133
+ pull: () => {
134
+ this.readStart();
135
+ },
136
+ cancel: (reason) => {
137
+ this._canceled = true;
138
+ this.abortController?.abort(reason);
139
+ console.debug("ChunkedStreamReader cancelled:", reason);
140
+ }
141
+ });
142
+ }
143
+ /**
144
+ * Gets the current reading position in bytes.
145
+ *
146
+ * @returns Current position as number of bytes read
147
+ */
148
+ getCurrentPosition() {
149
+ return this.currentPosition;
150
+ }
151
+ /**
152
+ * Gets the remaining bytes to be read.
153
+ *
154
+ * @returns Number of bytes remaining
155
+ */
156
+ getRemainingBytes() {
157
+ return Math.max(0, this.totalSize - this.currentPosition);
158
+ }
159
+ /**
160
+ * Checks if the entire blob has been read.
161
+ *
162
+ * @returns True if all data has been read
163
+ */
164
+ isComplete() {
165
+ return this.currentPosition >= this.totalSize;
166
+ }
167
+ };
203
168
 
169
+ //#endregion
204
170
  export { ChunkedStreamReader };
205
- //# sourceMappingURL=ChunkedStreamReader.js.map
171
+ //# sourceMappingURL=ChunkedStreamReader.js.map