s3mini 0.9.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/S3.ts CHANGED
@@ -15,6 +15,8 @@ import {
15
15
  extractErrCode,
16
16
  S3NetworkError,
17
17
  S3ServiceError,
18
+ generateParts,
19
+ toUint8Array,
18
20
  } from './utils.js';
19
21
  import type * as IT from './types.js';
20
22
 
@@ -55,6 +57,7 @@ class S3mini {
55
57
  * @param {number} [config.requestAbortTimeout=undefined] - The timeout in milliseconds after which a request should be aborted (careful on streamed requests).
56
58
  * @param {Object} [config.logger=null] - A logger object with methods like info, warn, error.
57
59
  * @param {typeof fetch} [config.fetch=globalThis.fetch] - Custom fetch implementation to use for HTTP requests.
60
+ * @param {number} [config.minPartSize=8388608] - The minimum part size for multipart uploads in bytes (default is 8MB).
58
61
  * @throws {TypeError} Will throw an error if required parameters are missing or of incorrect type.
59
62
  */
60
63
  readonly #accessKeyId: string;
@@ -66,6 +69,7 @@ class S3mini {
66
69
  readonly requestAbortTimeout?: number;
67
70
  readonly logger?: IT.Logger;
68
71
  readonly _fetch: typeof fetch;
72
+ readonly minPartSize: number;
69
73
  private signingKeyDate?: string;
70
74
  private signingKey?: ArrayBuffer;
71
75
 
@@ -78,6 +82,7 @@ class S3mini {
78
82
  requestAbortTimeout = undefined,
79
83
  logger = undefined,
80
84
  fetch = globalThis.fetch,
85
+ minPartSize = C.MIN_PART_SIZE,
81
86
  }: IT.S3Config) {
82
87
  this._validateConstructorParams(accessKeyId, secretAccessKey, endpoint);
83
88
  this.#accessKeyId = accessKeyId;
@@ -89,6 +94,7 @@ class S3mini {
89
94
  this.requestAbortTimeout = requestAbortTimeout;
90
95
  this.logger = logger;
91
96
  this._fetch = (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => fetch(input, init);
97
+ this.minPartSize = minPartSize;
92
98
  }
93
99
 
94
100
  private _sanitize(obj: unknown): unknown {
@@ -241,18 +247,24 @@ class S3mini {
241
247
  return { filteredOpts, conditionalHeaders };
242
248
  }
243
249
 
244
- private _validateData(data: unknown): BodyInit {
245
- if (!((globalThis.Buffer && data instanceof globalThis.Buffer) || typeof data === 'string')) {
246
- this._log('error', C.ERROR_DATA_BUFFER_REQUIRED);
247
- throw new TypeError(C.ERROR_DATA_BUFFER_REQUIRED);
248
- }
249
- return data;
250
- }
250
+ // private _validateData(data: unknown): BodyInit {
251
+ // if (data instanceof ArrayBuffer) {
252
+ // return data;
253
+ // }
254
+ // if (data instanceof Uint8Array) {
255
+ // return data as unknown as BodyInit;
256
+ // }
257
+ // if ((globalThis.Buffer && data instanceof globalThis.Buffer) || typeof data === 'string') {
258
+ // return data as BodyInit;
259
+ // }
260
+ // this._log('error', C.ERROR_DATA_BUFFER_REQUIRED);
261
+ // throw new TypeError(C.ERROR_DATA_BUFFER_REQUIRED);
262
+ // }
251
263
 
252
264
  private _validateUploadPartParams(
253
265
  key: string,
254
266
  uploadId: string,
255
- data: IT.MaybeBuffer | string,
267
+ data: IT.DataInput,
256
268
  partNumber: number,
257
269
  opts: object,
258
270
  ): BodyInit {
@@ -266,7 +278,7 @@ class S3mini {
266
278
  throw new TypeError(`${C.ERROR_PREFIX}partNumber must be a positive integer`);
267
279
  }
268
280
  this._checkOpts(opts);
269
- return this._validateData(data);
281
+ return data as BodyInit;
270
282
  }
271
283
 
272
284
  private async _sign(
@@ -969,7 +981,7 @@ class S3mini {
969
981
  /**
970
982
  * Uploads an object to the S3-compatible service.
971
983
  * @param {string} key - The key/path where the object will be stored.
972
- * @param {string | Buffer} data - The data to upload (string or Buffer).
984
+ * @param {string | IT.MaybeBuffer | ReadableStream | File | Blob} data - The data to upload (string or Buffer).
973
985
  * @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
974
986
  * @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
975
987
  * @param {IT.AWSHeaders} [additionalHeaders] - Additional x-amz-* headers specific to this request, if any.
@@ -985,15 +997,17 @@ class S3mini {
985
997
  */
986
998
  public async putObject(
987
999
  key: string,
988
- data: string | IT.MaybeBuffer,
1000
+ data: string | IT.DataInput | ReadableStream | File | Blob,
989
1001
  fileType: string = C.DEFAULT_STREAM_CONTENT_TYPE,
990
1002
  ssecHeaders?: IT.SSECHeaders,
991
1003
  additionalHeaders?: IT.AWSHeaders,
1004
+ contentLength?: number,
992
1005
  ): Promise<Response> {
1006
+ const size = contentLength ?? getByteSize(data);
993
1007
  return this._signedRequest('PUT', key, {
994
- body: this._validateData(data),
1008
+ body: data as BodyInit,
995
1009
  headers: {
996
- [C.HEADER_CONTENT_LENGTH]: getByteSize(data),
1010
+ ...(size && { [C.HEADER_CONTENT_LENGTH]: size }),
997
1011
  [C.HEADER_CONTENT_TYPE]: fileType,
998
1012
  ...additionalHeaders,
999
1013
  ...ssecHeaders,
@@ -1002,6 +1016,235 @@ class S3mini {
1002
1016
  });
1003
1017
  }
1004
1018
 
1019
+ /**
1020
+ * Put object that automatically chooses single PUT vs multipart.
1021
+ * Same signature/shape as putObject so callers don't need to change.
1022
+ * @param {string} key - The key/path where the object will be stored.
1023
+ * @param {string | IT.MaybeBuffer | ReadableStream | File | Blob} data - The data to upload (string or Buffer).
1024
+ * @param {string} [fileType='application/octet-stream'] - The MIME type of the file.
1025
+ * @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
1026
+ * @param {IT.AWSHeaders} [additionalHeaders] - Additional x-amz-* headers specific to this request, if any.
1027
+ * @param {number} [contentLength] - Optional known content length of data.
1028
+ * @returns {Promise<Response | { ok: boolean; status: number; headers: Map<string, string> }>} A promise that resolves to the Response object from the upload request.
1029
+ * @throws {TypeError} If data is not a string or Buffer.
1030
+ * @example
1031
+ * // Upload text file
1032
+ * await s3.putAnyObject('hello.txt', 'Hello, World!', 'text/plain');
1033
+ *
1034
+ * // Upload binary data
1035
+ * const buffer = Buffer.from([0x89, 0x50, 0x4e, 0x47]);
1036
+ * await s3.putAnyObject('image.png', buffer, 'image/png');
1037
+ */
1038
+ public async putAnyObject(
1039
+ key: string,
1040
+ data: IT.DataInput,
1041
+ fileType: string = C.DEFAULT_STREAM_CONTENT_TYPE,
1042
+ ssecHeaders?: IT.SSECHeaders,
1043
+ additionalHeaders?: IT.AWSHeaders,
1044
+ contentLength?: number,
1045
+ ): Promise<Response | { ok: boolean; status: number; headers: Map<string, string> }> {
1046
+ const size = contentLength ?? getByteSize(data);
1047
+
1048
+ // Single PUT for small files
1049
+ if (!Number.isNaN(size) && size <= this.minPartSize) {
1050
+ return this.putObject(key, data, fileType, ssecHeaders, additionalHeaders, contentLength);
1051
+ }
1052
+
1053
+ this._checkKey(key);
1054
+ return this._multipartUpload(key, data, fileType, ssecHeaders, additionalHeaders);
1055
+ }
1056
+
1057
+ private async _multipartUpload(
1058
+ key: string,
1059
+ data: IT.DataInput,
1060
+ fileType: string,
1061
+ ssecHeaders?: IT.SSECHeaders,
1062
+ additionalHeaders?: IT.AWSHeaders,
1063
+ ): Promise<Response | { ok: boolean; status: number; headers: Map<string, string> }> {
1064
+ const uploadId = await this.getMultipartUploadId(key, fileType, ssecHeaders, additionalHeaders);
1065
+
1066
+ try {
1067
+ const parts = await this._uploadPartsOptimized(key, uploadId, data, ssecHeaders, additionalHeaders);
1068
+ parts.sort((a, b) => a.partNumber - b.partNumber);
1069
+ const result = await this.completeMultipartUpload(key, uploadId, parts);
1070
+ return this._createSuccessResponse(result.etag || '');
1071
+ } catch (err) {
1072
+ await this._safeAbortUpload(key, uploadId);
1073
+ throw err;
1074
+ }
1075
+ }
1076
+
1077
+ private async _uploadKnownSizePartsParallel(
1078
+ key: string,
1079
+ uploadId: string,
1080
+ data: Uint8Array | Blob,
1081
+ ssecHeaders?: IT.SSECHeaders,
1082
+ additionalHeaders?: IT.AWSHeaders,
1083
+ concurrency: number = 4,
1084
+ maxRetries: number = 3,
1085
+ ): Promise<IT.UploadPart[]> {
1086
+ const partSize = this.minPartSize;
1087
+ const totalSize = data instanceof Blob ? data.size : data.byteLength;
1088
+ const totalParts = Math.ceil(totalSize / partSize);
1089
+ const results: IT.UploadPart[] = new Array(totalParts) as IT.UploadPart[];
1090
+ let nextIndex = 0;
1091
+
1092
+ const worker = async (): Promise<void> => {
1093
+ while (true) {
1094
+ const index = nextIndex++;
1095
+ if (index >= totalParts) {
1096
+ return;
1097
+ }
1098
+
1099
+ const start = index * partSize;
1100
+ const end = Math.min(start + partSize, totalSize);
1101
+ const part =
1102
+ data instanceof Blob
1103
+ ? await data.slice(start, end).arrayBuffer() // Must await - R2 needs actual bytes
1104
+ : data.subarray(start, end);
1105
+
1106
+ results[index] = await this._uploadPartWithRetry(
1107
+ key,
1108
+ uploadId,
1109
+ part,
1110
+ index + 1,
1111
+ ssecHeaders,
1112
+ additionalHeaders,
1113
+ maxRetries,
1114
+ );
1115
+ }
1116
+ };
1117
+
1118
+ await Promise.all(Array.from({ length: Math.min(concurrency, totalParts) }, () => worker()));
1119
+ return results;
1120
+ }
1121
+
1122
+ private async _uploadPartsOptimized(
1123
+ key: string,
1124
+ uploadId: string,
1125
+ data: IT.DataInput,
1126
+ ssecHeaders?: IT.SSECHeaders,
1127
+ additionalHeaders?: IT.AWSHeaders,
1128
+ concurrency: number = 4,
1129
+ maxRetries: number = 3,
1130
+ ): Promise<IT.UploadPart[]> {
1131
+ const bytes = toUint8Array(data);
1132
+ if (bytes) {
1133
+ return this._uploadKnownSizePartsParallel(
1134
+ key,
1135
+ uploadId,
1136
+ bytes,
1137
+ ssecHeaders,
1138
+ additionalHeaders,
1139
+ concurrency,
1140
+ maxRetries,
1141
+ );
1142
+ }
1143
+ if (data instanceof Blob) {
1144
+ return this._uploadKnownSizePartsParallel(
1145
+ key,
1146
+ uploadId,
1147
+ data,
1148
+ ssecHeaders,
1149
+ additionalHeaders,
1150
+ concurrency,
1151
+ maxRetries,
1152
+ );
1153
+ }
1154
+ return this._uploadStreamingParts(
1155
+ key,
1156
+ uploadId,
1157
+ data as ReadableStream,
1158
+ ssecHeaders,
1159
+ additionalHeaders,
1160
+ concurrency,
1161
+ maxRetries,
1162
+ );
1163
+ }
1164
+
1165
+ private async _uploadStreamingParts(
1166
+ key: string,
1167
+ uploadId: string,
1168
+ stream: ReadableStream,
1169
+ ssecHeaders?: IT.SSECHeaders,
1170
+ additionalHeaders?: IT.AWSHeaders,
1171
+ concurrency: number = 4,
1172
+ maxRetries: number = 3,
1173
+ ): Promise<IT.UploadPart[]> {
1174
+ const parts: IT.UploadPart[] = [];
1175
+ const active = new Set<Promise<void>>();
1176
+ let partNumber = 0;
1177
+
1178
+ for await (const partData of generateParts(stream, this.minPartSize)) {
1179
+ const currentPartNumber = ++partNumber;
1180
+
1181
+ while (active.size >= concurrency) {
1182
+ await Promise.race(active);
1183
+ }
1184
+
1185
+ const p = this._uploadPartWithRetry(
1186
+ key,
1187
+ uploadId,
1188
+ partData,
1189
+ currentPartNumber,
1190
+ ssecHeaders,
1191
+ additionalHeaders,
1192
+ maxRetries,
1193
+ ).then(part => {
1194
+ parts.push(part);
1195
+ active.delete(p);
1196
+ });
1197
+
1198
+ active.add(p);
1199
+ }
1200
+
1201
+ await Promise.all(active);
1202
+ return parts;
1203
+ }
1204
+
1205
+ private async _uploadPartWithRetry(
1206
+ key: string,
1207
+ uploadId: string,
1208
+ data: IT.PartData,
1209
+ partNumber: number,
1210
+ ssecHeaders?: IT.SSECHeaders,
1211
+ additionalHeaders?: IT.AWSHeaders,
1212
+ maxRetries: number = 3,
1213
+ ): Promise<IT.UploadPart> {
1214
+ for (let attempt = 0; attempt <= maxRetries; attempt++) {
1215
+ try {
1216
+ return await this.uploadPart(key, uploadId, data, partNumber, {}, ssecHeaders, additionalHeaders);
1217
+ } catch (err) {
1218
+ if (attempt === maxRetries) {
1219
+ throw err;
1220
+ }
1221
+ await new Promise(r => setTimeout(r, Math.min(1000 * 2 ** attempt, 10000)));
1222
+ }
1223
+ }
1224
+ throw new Error('Unreachable');
1225
+ }
1226
+
1227
+ private async _safeAbortUpload(key: string, uploadId: string): Promise<void> {
1228
+ try {
1229
+ await this.abortMultipartUpload(key, uploadId);
1230
+ } catch (err) {
1231
+ this._log('warn', `Failed to abort multipart upload: ${String(err)}`);
1232
+ }
1233
+ }
1234
+
1235
+ private _createSuccessResponse(
1236
+ etag: string,
1237
+ ): Response | { ok: boolean; status: number; headers: Map<string, string> } {
1238
+ if (typeof Response !== 'undefined') {
1239
+ const headers = new Headers();
1240
+ if (etag) {
1241
+ headers.set('ETag', etag);
1242
+ }
1243
+ return new Response('', { status: 200, headers });
1244
+ }
1245
+ return { ok: true, status: 200, headers: new Map([['ETag', etag]]) };
1246
+ }
1247
+
1005
1248
  /**
1006
1249
  * Initiates a multipart upload and returns the upload ID.
1007
1250
  * @param {string} key - The key/path where the object will be stored.
@@ -1018,13 +1261,14 @@ class S3mini {
1018
1261
  key: string,
1019
1262
  fileType: string = C.DEFAULT_STREAM_CONTENT_TYPE,
1020
1263
  ssecHeaders?: IT.SSECHeaders,
1264
+ additionalHeaders?: IT.AWSHeaders,
1021
1265
  ): Promise<string> {
1022
1266
  this._checkKey(key);
1023
1267
  if (typeof fileType !== 'string') {
1024
1268
  throw new TypeError(`${C.ERROR_PREFIX}fileType must be a string`);
1025
1269
  }
1026
1270
  const query = { uploads: '' };
1027
- const headers = { [C.HEADER_CONTENT_TYPE]: fileType, ...ssecHeaders };
1271
+ const headers = { [C.HEADER_CONTENT_TYPE]: fileType, ...ssecHeaders, ...additionalHeaders };
1028
1272
 
1029
1273
  const res = await this._signedRequest('POST', key, {
1030
1274
  query,
@@ -1056,7 +1300,7 @@ class S3mini {
1056
1300
  * Uploads a part in a multipart upload.
1057
1301
  * @param {string} key - The key of the object being uploaded.
1058
1302
  * @param {string} uploadId - The upload ID from getMultipartUploadId.
1059
- * @param {Buffer | string} data - The data for this part.
1303
+ * @param {string | IT.MaybeBuffer | ReadableStream | File | Blob} data - The data for this part.
1060
1304
  * @param {number} partNumber - The part number (must be between 1 and 10,000).
1061
1305
  * @param {Record<string, unknown>} [opts={}] - Additional options for the request.
1062
1306
  * @param {IT.SSECHeaders} [ssecHeaders] - Server-Side Encryption headers, if any.
@@ -1074,20 +1318,23 @@ class S3mini {
1074
1318
  public async uploadPart(
1075
1319
  key: string,
1076
1320
  uploadId: string,
1077
- data: IT.MaybeBuffer | string,
1321
+ data: IT.DataInput,
1078
1322
  partNumber: number,
1079
1323
  opts: Record<string, unknown> = {},
1080
1324
  ssecHeaders?: IT.SSECHeaders,
1325
+ additionalHeaders?: IT.AWSHeaders,
1081
1326
  ): Promise<IT.UploadPart> {
1082
1327
  const body = this._validateUploadPartParams(key, uploadId, data, partNumber, opts);
1083
1328
 
1084
1329
  const query = { uploadId, partNumber, ...opts };
1330
+ const size = getByteSize(data);
1085
1331
  const res = await this._signedRequest('PUT', key, {
1086
1332
  query,
1087
1333
  body,
1088
1334
  headers: {
1089
- [C.HEADER_CONTENT_LENGTH]: getByteSize(data),
1335
+ ...(size && !Number.isNaN(size) && { [C.HEADER_CONTENT_LENGTH]: size }),
1090
1336
  ...ssecHeaders,
1337
+ ...additionalHeaders,
1091
1338
  },
1092
1339
  });
1093
1340
 
package/src/consts.ts CHANGED
@@ -11,6 +11,7 @@ export const JSON_CONTENT_TYPE = 'application/json';
11
11
  export const SENSITIVE_KEYS_REDACTED = new Set(['accesskeyid', 'secretaccesskey', 'sessiontoken', 'password', 'token']);
12
12
  export const IFHEADERS = new Set(['if-match', 'if-none-match', 'if-modified-since', 'if-unmodified-since']);
13
13
  export const DEFAULT_REQUEST_SIZE_IN_BYTES = 8 * 1024 * 1024;
14
+ export const MIN_PART_SIZE = 8 * 1024 * 1024;
14
15
 
15
16
  // Headers
16
17
  export const HEADER_AMZ_CONTENT_SHA256 = 'x-amz-content-sha256';
package/src/types.ts CHANGED
@@ -7,8 +7,11 @@ export interface S3Config {
7
7
  requestAbortTimeout?: number;
8
8
  logger?: Logger;
9
9
  fetch?: typeof fetch;
10
+ minPartSize?: number;
10
11
  }
11
12
 
13
+ export type PartData = Uint8Array | Blob | ArrayBuffer;
14
+
12
15
  export interface SSECHeaders {
13
16
  'x-amz-server-side-encryption-customer-algorithm': string;
14
17
  'x-amz-server-side-encryption-customer-key': string;
@@ -151,12 +154,12 @@ export interface CopyObjectResult {
151
154
  lastModified?: Date;
152
155
  }
153
156
 
154
- /**
155
- * Where Buffer is available, e.g. when @types/node is loaded, we want to use it.
156
- * But it should be excluded in other environments (e.g. Cloudflare).
157
- */
158
- export type MaybeBuffer = typeof globalThis extends { Buffer?: infer B }
157
+ type BinaryData = ArrayBuffer | Uint8Array;
158
+
159
+ type MaybeBuffer = typeof globalThis extends { Buffer?: infer B }
159
160
  ? B extends new (...a: unknown[]) => unknown
160
- ? InstanceType<B>
161
- : ArrayBuffer | Uint8Array
162
- : ArrayBuffer | Uint8Array;
161
+ ? InstanceType<B> | BinaryData
162
+ : BinaryData
163
+ : BinaryData;
164
+
165
+ export type DataInput = string | MaybeBuffer | ReadableStream | File | Blob;
package/src/utils.ts CHANGED
@@ -1,23 +1,45 @@
1
1
  'use strict';
2
- import type { XmlValue, XmlMap, ListBucketResponse, ErrorWithCode } from './types.js';
2
+
3
+ import type { DataInput, XmlValue, XmlMap, ListBucketResponse, ErrorWithCode, PartData } from './types.js';
4
+ import { ERROR_PREFIX } from './consts.js';
3
5
 
4
6
  const ENCODR = new TextEncoder();
5
7
  const chunkSize = 0x8000; // 32KB chunks
6
8
  const HEX_CHARS = new Uint8Array([48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102]);
7
9
 
8
- export const getByteSize = (data: unknown): number => {
10
+ export const getByteSize = (data: DataInput): number => {
9
11
  if (typeof data === 'string') {
10
12
  return ENCODR.encode(data).byteLength;
11
13
  }
12
14
  if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
13
15
  return data.byteLength;
14
16
  }
15
- if (data instanceof Blob) {
17
+ if (data instanceof Blob || data instanceof File) {
16
18
  return data.size;
17
19
  }
20
+ if (data instanceof ReadableStream) {
21
+ return Number.NaN; // size unknown
22
+ }
18
23
  throw new Error('Unsupported data type');
19
24
  };
20
25
 
26
+ export const toUint8Array = (data: DataInput): Uint8Array | null => {
27
+ if (typeof data === 'string') {
28
+ return ENCODR.encode(data);
29
+ }
30
+ if (data instanceof ArrayBuffer) {
31
+ return new Uint8Array(data);
32
+ }
33
+ if (data instanceof Uint8Array) {
34
+ return data;
35
+ }
36
+ // Node Buffer
37
+ if (typeof Buffer !== 'undefined' && Buffer.isBuffer(data)) {
38
+ return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
39
+ }
40
+ return null;
41
+ };
42
+
21
43
  /**
22
44
  * Turn a raw ArrayBuffer into its hexadecimal representation.
23
45
  * @param {ArrayBuffer} buffer The raw bytes.
@@ -264,3 +286,143 @@ export const runInBatches = async <T = unknown>(
264
286
  }
265
287
  }
266
288
  };
289
+
290
+ export const generateParts = async function* (data: DataInput, partSize: number): AsyncGenerator<PartData> {
291
+ const bytes = toUint8Array(data);
292
+
293
+ if (bytes) {
294
+ yield* generateBufferParts(bytes, partSize);
295
+ } else if (data instanceof Blob) {
296
+ yield* generateBlobParts(data, partSize);
297
+ } else if (data instanceof ReadableStream) {
298
+ yield* generateStreamParts(data as ReadableStream<Uint8Array>, partSize);
299
+ } else {
300
+ throw new TypeError(`${ERROR_PREFIX}Unsupported data type for multipart upload`);
301
+ }
302
+ };
303
+
304
+ export function* generateBufferParts(bytes: Uint8Array, partSize: number): Generator<Uint8Array> {
305
+ for (let offset = 0; offset < bytes.byteLength; offset += partSize) {
306
+ yield bytes.subarray(offset, Math.min(offset + partSize, bytes.byteLength));
307
+ }
308
+ }
309
+
310
+ /**
311
+ * Zero-copy: yields Blob slices. Data is only read when fetch consumes it.
312
+ */
313
+ const generateBlobParts = function* (blob: Blob, partSize: number): Generator<Blob> {
314
+ for (let offset = 0; offset < blob.size; offset += partSize) {
315
+ yield blob.slice(offset, Math.min(offset + partSize, blob.size));
316
+ }
317
+ };
318
+
319
+ const generateStreamParts = async function* (
320
+ stream: ReadableStream<Uint8Array>,
321
+ partSize: number,
322
+ ): AsyncGenerator<ArrayBuffer> {
323
+ const reader = stream.getReader();
324
+ const chunks: Uint8Array[] = [];
325
+ let buffered = 0;
326
+
327
+ try {
328
+ while (true) {
329
+ const { done, value } = await reader.read();
330
+
331
+ if (value) {
332
+ chunks.push(value);
333
+ buffered += value.byteLength;
334
+
335
+ while (buffered >= partSize) {
336
+ yield extractPart(chunks, partSize);
337
+ buffered -= partSize;
338
+ }
339
+ }
340
+
341
+ if (done) {
342
+ break;
343
+ }
344
+ }
345
+
346
+ // Yield remaining
347
+ if (buffered > 0) {
348
+ yield extractPart(chunks, buffered);
349
+ }
350
+ } finally {
351
+ reader.releaseLock();
352
+ }
353
+ };
354
+
355
+ const extractPart = (chunks: Uint8Array[], size: number): ArrayBuffer => {
356
+ const part = new Uint8Array(size);
357
+ let offset = 0;
358
+
359
+ while (offset < size && chunks.length > 0) {
360
+ const chunk = chunks[0]!;
361
+ const needed = size - offset;
362
+
363
+ if (chunk.byteLength <= needed) {
364
+ part.set(chunk, offset);
365
+ offset += chunk.byteLength;
366
+ chunks.shift();
367
+ } else {
368
+ part.set(chunk.subarray(0, needed), offset);
369
+ chunks[0] = chunk.subarray(needed);
370
+ offset = size;
371
+ }
372
+ }
373
+
374
+ return part.buffer;
375
+ };
376
+
377
+ export interface PartDescriptor {
378
+ partNumber: number;
379
+ data: PartData;
380
+ }
381
+
382
+ /**
383
+ * Pre-calculate all parts for known-size data.
384
+ * Returns array of part descriptors for parallel upload.
385
+ */
386
+ export const calculateParts = (data: DataInput, partSize: number): PartDescriptor[] => {
387
+ const bytes = toUint8Array(data);
388
+
389
+ if (bytes) {
390
+ return calculateBufferParts(bytes, partSize);
391
+ }
392
+
393
+ if (data instanceof Blob) {
394
+ return calculateBlobParts(data, partSize);
395
+ }
396
+
397
+ throw new TypeError(`${ERROR_PREFIX}Unsupported data type for part calculation`);
398
+ };
399
+
400
+ function calculateBufferParts(bytes: Uint8Array, partSize: number): PartDescriptor[] {
401
+ const totalParts = Math.ceil(bytes.byteLength / partSize);
402
+ const parts: PartDescriptor[] = new Array(totalParts) as PartDescriptor[];
403
+
404
+ for (let i = 0; i < totalParts; i++) {
405
+ const start = i * partSize;
406
+ parts[i] = {
407
+ partNumber: i + 1,
408
+ data: bytes.subarray(start, Math.min(start + partSize, bytes.byteLength)),
409
+ };
410
+ }
411
+
412
+ return parts;
413
+ }
414
+
415
+ function calculateBlobParts(blob: Blob, partSize: number): PartDescriptor[] {
416
+ const totalParts = Math.ceil(blob.size / partSize);
417
+ const parts: PartDescriptor[] = new Array(totalParts) as PartDescriptor[];
418
+
419
+ for (let i = 0; i < totalParts; i++) {
420
+ const start = i * partSize;
421
+ parts[i] = {
422
+ partNumber: i + 1,
423
+ data: blob.slice(start, Math.min(start + partSize, blob.size)),
424
+ };
425
+ }
426
+
427
+ return parts;
428
+ }