s3mini 0.8.1 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/utils.ts CHANGED
@@ -1,23 +1,45 @@
1
1
  'use strict';
2
- import type { XmlValue, XmlMap, ListBucketResponse, ErrorWithCode } from './types.js';
2
+
3
+ import type { DataInput, XmlValue, XmlMap, ListBucketResponse, ErrorWithCode, PartData } from './types.js';
4
+ import { ERROR_PREFIX } from './consts.js';
3
5
 
4
6
  const ENCODR = new TextEncoder();
5
7
  const chunkSize = 0x8000; // 32KB chunks
6
8
  const HEX_CHARS = new Uint8Array([48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 97, 98, 99, 100, 101, 102]);
7
9
 
8
- export const getByteSize = (data: unknown): number => {
10
+ export const getByteSize = (data: DataInput): number => {
9
11
  if (typeof data === 'string') {
10
12
  return ENCODR.encode(data).byteLength;
11
13
  }
12
14
  if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
13
15
  return data.byteLength;
14
16
  }
15
- if (data instanceof Blob) {
17
+ if (data instanceof Blob || data instanceof File) {
16
18
  return data.size;
17
19
  }
20
+ if (data instanceof ReadableStream) {
21
+ return Number.NaN; // size unknown
22
+ }
18
23
  throw new Error('Unsupported data type');
19
24
  };
20
25
 
26
+ export const toUint8Array = (data: DataInput): Uint8Array | null => {
27
+ if (typeof data === 'string') {
28
+ return ENCODR.encode(data);
29
+ }
30
+ if (data instanceof ArrayBuffer) {
31
+ return new Uint8Array(data);
32
+ }
33
+ if (data instanceof Uint8Array) {
34
+ return data;
35
+ }
36
+ // Node Buffer
37
+ if (typeof Buffer !== 'undefined' && Buffer.isBuffer(data)) {
38
+ return new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
39
+ }
40
+ return null;
41
+ };
42
+
21
43
  /**
22
44
  * Turn a raw ArrayBuffer into its hexadecimal representation.
23
45
  * @param {ArrayBuffer} buffer The raw bytes.
@@ -264,3 +286,143 @@ export const runInBatches = async <T = unknown>(
264
286
  }
265
287
  }
266
288
  };
289
+
290
+ export const generateParts = async function* (data: DataInput, partSize: number): AsyncGenerator<PartData> {
291
+ const bytes = toUint8Array(data);
292
+
293
+ if (bytes) {
294
+ yield* generateBufferParts(bytes, partSize);
295
+ } else if (data instanceof Blob) {
296
+ yield* generateBlobParts(data, partSize);
297
+ } else if (data instanceof ReadableStream) {
298
+ yield* generateStreamParts(data as ReadableStream<Uint8Array>, partSize);
299
+ } else {
300
+ throw new TypeError(`${ERROR_PREFIX}Unsupported data type for multipart upload`);
301
+ }
302
+ };
303
+
304
+ export function* generateBufferParts(bytes: Uint8Array, partSize: number): Generator<Uint8Array> {
305
+ for (let offset = 0; offset < bytes.byteLength; offset += partSize) {
306
+ yield bytes.subarray(offset, Math.min(offset + partSize, bytes.byteLength));
307
+ }
308
+ }
309
+
310
+ /**
311
+ * Zero-copy: yields Blob slices. Data is only read when fetch consumes it.
312
+ */
313
+ const generateBlobParts = function* (blob: Blob, partSize: number): Generator<Blob> {
314
+ for (let offset = 0; offset < blob.size; offset += partSize) {
315
+ yield blob.slice(offset, Math.min(offset + partSize, blob.size));
316
+ }
317
+ };
318
+
319
+ const generateStreamParts = async function* (
320
+ stream: ReadableStream<Uint8Array>,
321
+ partSize: number,
322
+ ): AsyncGenerator<ArrayBuffer> {
323
+ const reader = stream.getReader();
324
+ const chunks: Uint8Array[] = [];
325
+ let buffered = 0;
326
+
327
+ try {
328
+ while (true) {
329
+ const { done, value } = await reader.read();
330
+
331
+ if (value) {
332
+ chunks.push(value);
333
+ buffered += value.byteLength;
334
+
335
+ while (buffered >= partSize) {
336
+ yield extractPart(chunks, partSize);
337
+ buffered -= partSize;
338
+ }
339
+ }
340
+
341
+ if (done) {
342
+ break;
343
+ }
344
+ }
345
+
346
+ // Yield remaining
347
+ if (buffered > 0) {
348
+ yield extractPart(chunks, buffered);
349
+ }
350
+ } finally {
351
+ reader.releaseLock();
352
+ }
353
+ };
354
+
355
+ const extractPart = (chunks: Uint8Array[], size: number): ArrayBuffer => {
356
+ const part = new Uint8Array(size);
357
+ let offset = 0;
358
+
359
+ while (offset < size && chunks.length > 0) {
360
+ const chunk = chunks[0]!;
361
+ const needed = size - offset;
362
+
363
+ if (chunk.byteLength <= needed) {
364
+ part.set(chunk, offset);
365
+ offset += chunk.byteLength;
366
+ chunks.shift();
367
+ } else {
368
+ part.set(chunk.subarray(0, needed), offset);
369
+ chunks[0] = chunk.subarray(needed);
370
+ offset = size;
371
+ }
372
+ }
373
+
374
+ return part.buffer;
375
+ };
376
+
377
+ export interface PartDescriptor {
378
+ partNumber: number;
379
+ data: PartData;
380
+ }
381
+
382
+ /**
383
+ * Pre-calculate all parts for known-size data.
384
+ * Returns array of part descriptors for parallel upload.
385
+ */
386
+ export const calculateParts = (data: DataInput, partSize: number): PartDescriptor[] => {
387
+ const bytes = toUint8Array(data);
388
+
389
+ if (bytes) {
390
+ return calculateBufferParts(bytes, partSize);
391
+ }
392
+
393
+ if (data instanceof Blob) {
394
+ return calculateBlobParts(data, partSize);
395
+ }
396
+
397
+ throw new TypeError(`${ERROR_PREFIX}Unsupported data type for part calculation`);
398
+ };
399
+
400
+ function calculateBufferParts(bytes: Uint8Array, partSize: number): PartDescriptor[] {
401
+ const totalParts = Math.ceil(bytes.byteLength / partSize);
402
+ const parts: PartDescriptor[] = new Array(totalParts) as PartDescriptor[];
403
+
404
+ for (let i = 0; i < totalParts; i++) {
405
+ const start = i * partSize;
406
+ parts[i] = {
407
+ partNumber: i + 1,
408
+ data: bytes.subarray(start, Math.min(start + partSize, bytes.byteLength)),
409
+ };
410
+ }
411
+
412
+ return parts;
413
+ }
414
+
415
+ function calculateBlobParts(blob: Blob, partSize: number): PartDescriptor[] {
416
+ const totalParts = Math.ceil(blob.size / partSize);
417
+ const parts: PartDescriptor[] = new Array(totalParts) as PartDescriptor[];
418
+
419
+ for (let i = 0; i < totalParts; i++) {
420
+ const start = i * partSize;
421
+ parts[i] = {
422
+ partNumber: i + 1,
423
+ data: blob.slice(start, Math.min(start + partSize, blob.size)),
424
+ };
425
+ }
426
+
427
+ return parts;
428
+ }