@loaders.gl/zip 4.4.0-alpha.1 → 4.4.0-alpha.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/dist.dev.js +275 -248
  2. package/dist/dist.min.js +2 -2
  3. package/dist/filesystems/IndexedArchive.d.ts +4 -4
  4. package/dist/filesystems/IndexedArchive.d.ts.map +1 -1
  5. package/dist/filesystems/IndexedArchive.js +6 -5
  6. package/dist/filesystems/IndexedArchive.js.map +1 -0
  7. package/dist/filesystems/zip-filesystem.d.ts +7 -6
  8. package/dist/filesystems/zip-filesystem.d.ts.map +1 -1
  9. package/dist/filesystems/zip-filesystem.js +25 -22
  10. package/dist/filesystems/zip-filesystem.js.map +1 -0
  11. package/dist/hash-file-utility.d.ts +3 -3
  12. package/dist/hash-file-utility.d.ts.map +1 -1
  13. package/dist/hash-file-utility.js +2 -1
  14. package/dist/hash-file-utility.js.map +1 -0
  15. package/dist/index.cjs +181 -82
  16. package/dist/index.cjs.map +4 -4
  17. package/dist/index.d.ts +1 -0
  18. package/dist/index.d.ts.map +1 -1
  19. package/dist/index.js +2 -0
  20. package/dist/index.js.map +1 -0
  21. package/dist/lib/tar/header.js +1 -0
  22. package/dist/lib/tar/header.js.map +1 -0
  23. package/dist/lib/tar/tar.js +1 -0
  24. package/dist/lib/tar/tar.js.map +1 -0
  25. package/dist/lib/tar/types.js +1 -0
  26. package/dist/lib/tar/types.js.map +1 -0
  27. package/dist/lib/tar/utils.js +1 -0
  28. package/dist/lib/tar/utils.js.map +1 -0
  29. package/dist/parse-zip/cd-file-header.d.ts +4 -4
  30. package/dist/parse-zip/cd-file-header.d.ts.map +1 -1
  31. package/dist/parse-zip/cd-file-header.js +9 -6
  32. package/dist/parse-zip/cd-file-header.js.map +1 -0
  33. package/dist/parse-zip/end-of-central-directory.d.ts +3 -3
  34. package/dist/parse-zip/end-of-central-directory.d.ts.map +1 -1
  35. package/dist/parse-zip/end-of-central-directory.js +13 -11
  36. package/dist/parse-zip/end-of-central-directory.js.map +1 -0
  37. package/dist/parse-zip/local-file-header.d.ts +2 -2
  38. package/dist/parse-zip/local-file-header.d.ts.map +1 -1
  39. package/dist/parse-zip/local-file-header.js +5 -3
  40. package/dist/parse-zip/local-file-header.js.map +1 -0
  41. package/dist/parse-zip/readable-file-utils.d.ts +34 -0
  42. package/dist/parse-zip/readable-file-utils.d.ts.map +1 -0
  43. package/dist/parse-zip/readable-file-utils.js +111 -0
  44. package/dist/parse-zip/readable-file-utils.js.map +1 -0
  45. package/dist/parse-zip/search-from-the-end.d.ts +2 -2
  46. package/dist/parse-zip/search-from-the-end.d.ts.map +1 -1
  47. package/dist/parse-zip/search-from-the-end.js +7 -8
  48. package/dist/parse-zip/search-from-the-end.js.map +1 -0
  49. package/dist/parse-zip/zip-composition.d.ts.map +1 -1
  50. package/dist/parse-zip/zip-composition.js +16 -8
  51. package/dist/parse-zip/zip-composition.js.map +1 -0
  52. package/dist/parse-zip/zip64-info-generation.js +1 -0
  53. package/dist/parse-zip/zip64-info-generation.js.map +1 -0
  54. package/dist/tar-builder.js +1 -0
  55. package/dist/tar-builder.js.map +1 -0
  56. package/dist/zip-loader.js +2 -1
  57. package/dist/zip-loader.js.map +1 -0
  58. package/dist/zip-writer.js +3 -2
  59. package/dist/zip-writer.js.map +1 -0
  60. package/package.json +6 -6
  61. package/src/filesystems/IndexedArchive.ts +6 -10
  62. package/src/filesystems/zip-filesystem.ts +26 -28
  63. package/src/hash-file-utility.ts +4 -7
  64. package/src/index.ts +5 -0
  65. package/src/parse-zip/cd-file-header.ts +18 -16
  66. package/src/parse-zip/end-of-central-directory.ts +16 -17
  67. package/src/parse-zip/local-file-header.ts +8 -9
  68. package/src/parse-zip/readable-file-utils.ts +134 -0
  69. package/src/parse-zip/search-from-the-end.ts +8 -10
  70. package/src/parse-zip/zip-composition.ts +25 -18
  71. package/src/zip-writer.ts +1 -1
package/dist/index.cjs CHANGED
@@ -31,6 +31,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
31
31
  var dist_exports = {};
32
32
  __export(dist_exports, {
33
33
  CD_HEADER_SIGNATURE: () => signature2,
34
+ DataViewReadableFile: () => DataViewReadableFile,
34
35
  IndexedArchive: () => IndexedArchive,
35
36
  TarBuilder: () => TarBuilder,
36
37
  ZIP_COMPRESSION_HANDLERS: () => ZIP_COMPRESSION_HANDLERS,
@@ -42,6 +43,7 @@ __export(dist_exports, {
42
43
  createZip: () => createZip,
43
44
  generateCDHeader: () => generateCDHeader,
44
45
  generateLocalHeader: () => generateLocalHeader,
46
+ getReadableFileSize: () => getReadableFileSize,
45
47
  localHeaderSignature: () => signature3,
46
48
  makeHashTableFromZipHeaders: () => makeHashTableFromZipHeaders,
47
49
  makeZipCDHeaderIterator: () => makeZipCDHeaderIterator,
@@ -49,13 +51,14 @@ __export(dist_exports, {
49
51
  parseHashTable: () => parseHashTable,
50
52
  parseZipCDFileHeader: () => parseZipCDFileHeader,
51
53
  parseZipLocalFileHeader: () => parseZipLocalFileHeader,
54
+ readRange: () => readRange,
52
55
  searchFromTheEnd: () => searchFromTheEnd
53
56
  });
54
57
  module.exports = __toCommonJS(dist_exports);
55
58
 
56
59
  // dist/zip-loader.js
57
60
  var import_jszip = __toESM(require("jszip"), 1);
58
- var VERSION = true ? "4.4.0-alpha.0" : "latest";
61
+ var VERSION = true ? "4.4.0-alpha.9" : "latest";
59
62
  var ZipLoader = {
60
63
  dataType: null,
61
64
  batchType: null,
@@ -102,7 +105,7 @@ async function loadZipEntry(jsZip, subFilename, options = {}) {
102
105
 
103
106
  // dist/zip-writer.js
104
107
  var import_jszip2 = __toESM(require("jszip"), 1);
105
- var VERSION2 = true ? "4.4.0-alpha.0" : "latest";
108
+ var VERSION2 = true ? "4.4.0-alpha.9" : "latest";
106
109
  var ZipWriter = {
107
110
  name: "Zip Archive",
108
111
  id: "zip",
@@ -121,7 +124,7 @@ var ZipWriter = {
121
124
  encode: encodeZipAsync
122
125
  };
123
126
  async function encodeZipAsync(fileMap, options = {}) {
124
- var _a;
127
+ var _a, _b, _c;
125
128
  const jsZip = new import_jszip2.default();
126
129
  for (const subFileName in fileMap) {
127
130
  const subFileData = fileMap[subFileName];
@@ -136,7 +139,7 @@ async function encodeZipAsync(fileMap, options = {}) {
136
139
  zipOptions.onUpdate
137
140
  );
138
141
  } catch (error) {
139
- options.log.error(`Unable to encode zip archive: ${error}`);
142
+ (_c = (_b = options.core) == null ? void 0 : _b.log) == null ? void 0 : _c.error(`Unable to encode zip archive: ${error}`);
140
143
  throw error;
141
144
  }
142
145
  }
@@ -360,27 +363,115 @@ var TarBuilder = class {
360
363
  };
361
364
 
362
365
  // dist/parse-zip/cd-file-header.js
363
- var import_loader_utils3 = require("@loaders.gl/loader-utils");
366
+ var import_loader_utils4 = require("@loaders.gl/loader-utils");
364
367
 
365
368
  // dist/parse-zip/end-of-central-directory.js
366
- var import_loader_utils2 = require("@loaders.gl/loader-utils");
369
+ var import_loader_utils3 = require("@loaders.gl/loader-utils");
370
+
371
+ // dist/parse-zip/readable-file-utils.js
372
+ var import_loader_utils = require("@loaders.gl/loader-utils");
373
+ function toBigInt(value) {
374
+ return typeof value === "bigint" ? value : BigInt(value);
375
+ }
376
+ function toNumber(value) {
377
+ const numberValue = Number(value);
378
+ if (!Number.isFinite(numberValue)) {
379
+ throw new Error("Offset is out of bounds");
380
+ }
381
+ return numberValue;
382
+ }
383
+ function normalizeOffset(offset, size) {
384
+ if (offset < 0) {
385
+ return Math.max(size + offset, 0);
386
+ }
387
+ return Math.min(offset, size);
388
+ }
389
+ async function readRange(file, start, end) {
390
+ const startOffset = toBigInt(start);
391
+ const endOffset = toBigInt(end);
392
+ const length = endOffset - startOffset;
393
+ if (length < 0) {
394
+ throw new Error("Invalid range requested");
395
+ }
396
+ return await file.read(startOffset, toNumber(length));
397
+ }
398
+ async function readDataView(file, start, end) {
399
+ const arrayBuffer = await readRange(file, start, end);
400
+ return new DataView(arrayBuffer);
401
+ }
402
+ async function readUint16(file, offset) {
403
+ const dataView = await readDataView(file, offset, toBigInt(offset) + 2n);
404
+ return dataView.getUint16(0, true);
405
+ }
406
+ async function readUint32(file, offset) {
407
+ const dataView = await readDataView(file, offset, toBigInt(offset) + 4n);
408
+ return dataView.getUint32(0, true);
409
+ }
410
+ async function readBigUint64(file, offset) {
411
+ const dataView = await readDataView(file, offset, toBigInt(offset) + 8n);
412
+ return dataView.getBigUint64(0, true);
413
+ }
414
+ async function getReadableFileSize(file) {
415
+ if (file.bigsize > 0n) {
416
+ return file.bigsize;
417
+ }
418
+ if (file.size > 0) {
419
+ return BigInt(file.size);
420
+ }
421
+ if (file.stat) {
422
+ const stats = await file.stat();
423
+ if ((stats == null ? void 0 : stats.bigsize) !== void 0) {
424
+ return stats.bigsize;
425
+ }
426
+ if ((stats == null ? void 0 : stats.size) !== void 0) {
427
+ return BigInt(stats.size);
428
+ }
429
+ }
430
+ return 0n;
431
+ }
432
+ var DataViewReadableFile = class {
433
+ handle;
434
+ size;
435
+ bigsize;
436
+ url;
437
+ constructor(dataView, url = "") {
438
+ this.handle = dataView;
439
+ this.size = dataView.byteLength;
440
+ this.bigsize = BigInt(dataView.byteLength);
441
+ this.url = url;
442
+ }
443
+ async close() {
444
+ }
445
+ async stat() {
446
+ return { size: this.size, bigsize: this.bigsize, isDirectory: false };
447
+ }
448
+ async read(start = 0, length) {
449
+ const offset = toNumber(start);
450
+ const end = length ? offset + length : this.size;
451
+ const normalizedStart = normalizeOffset(offset, this.size);
452
+ const normalizedEnd = normalizeOffset(end, this.size);
453
+ const clampedEnd = Math.max(normalizedEnd, normalizedStart);
454
+ const lengthToRead = clampedEnd - normalizedStart;
455
+ if (lengthToRead <= 0) {
456
+ return new ArrayBuffer(0);
457
+ }
458
+ return (0, import_loader_utils.copyToArrayBuffer)(this.handle.buffer, normalizedStart, lengthToRead);
459
+ }
460
+ };
367
461
 
368
462
  // dist/parse-zip/search-from-the-end.js
369
463
  var buffLength = 1024;
370
464
  var searchFromTheEnd = async (file, target) => {
371
- const searchWindow = [
372
- await file.getUint8(file.length - 1n),
373
- await file.getUint8(file.length - 2n),
374
- await file.getUint8(file.length - 3n),
375
- void 0
376
- ];
465
+ const fileLength = await getReadableFileSize(file);
466
+ const lastBytes = new Uint8Array(await readRange(file, fileLength - 3n, fileLength + 1n));
467
+ const searchWindow = [lastBytes[3], lastBytes[2], lastBytes[1], void 0];
377
468
  let targetOffset = -1;
378
- let point = file.length - 4n;
469
+ let point = fileLength - 4n;
379
470
  do {
380
471
  const prevPoint = point;
381
472
  point -= BigInt(buffLength);
382
473
  point = point >= 0n ? point : 0n;
383
- const buff = new Uint8Array(await file.slice(point, prevPoint));
474
+ const buff = new Uint8Array(await readRange(file, point, prevPoint));
384
475
  for (let i = buff.length - 1; i > -1; i--) {
385
476
  searchWindow[3] = searchWindow[2];
386
477
  searchWindow[2] = searchWindow[1];
@@ -396,7 +487,7 @@ var searchFromTheEnd = async (file, target) => {
396
487
  };
397
488
 
398
489
  // dist/parse-zip/zip64-info-generation.js
399
- var import_loader_utils = require("@loaders.gl/loader-utils");
490
+ var import_loader_utils2 = require("@loaders.gl/loader-utils");
400
491
  var signature = new Uint8Array([1, 0]);
401
492
  function createZip64Info(options) {
402
493
  const optionsToUse = {
@@ -412,7 +503,7 @@ function createZip64Info(options) {
412
503
  NUMBER_SETTERS[field.size](newValue, 0, optionsToUse[field.name ?? ""] ?? field.default);
413
504
  arraysToConcat.push(newValue.buffer);
414
505
  }
415
- return (0, import_loader_utils.concatenateArrayBuffers)(...arraysToConcat);
506
+ return (0, import_loader_utils2.concatenateArrayBuffers)(...arraysToConcat);
416
507
  }
417
508
  function setFieldToNumber(header, fieldSize, fieldOffset, value) {
418
509
  NUMBER_SETTERS[fieldSize](header, Number(fieldOffset), value);
@@ -473,21 +564,21 @@ var ZIP64_CD_START_OFFSET_OFFSET = 48n;
473
564
  var ZIP64_COMMENT_OFFSET = 56n;
474
565
  var parseEoCDRecord = async (file) => {
475
566
  const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);
476
- let cdRecordsNumber = BigInt(await file.getUint16(zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
477
- let cdByteSize = BigInt(await file.getUint32(zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));
478
- let cdStartOffset = BigInt(await file.getUint32(zipEoCDOffset + CD_START_OFFSET_OFFSET));
567
+ let cdRecordsNumber = BigInt(await readUint16(file, zipEoCDOffset + CD_RECORDS_NUMBER_OFFSET));
568
+ let cdByteSize = BigInt(await readUint32(file, zipEoCDOffset + CD_CD_BYTE_SIZE_OFFSET));
569
+ let cdStartOffset = BigInt(await readUint32(file, zipEoCDOffset + CD_START_OFFSET_OFFSET));
479
570
  let zip64EoCDLocatorOffset = zipEoCDOffset - 20n;
480
571
  let zip64EoCDOffset = 0n;
481
- const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
482
- if ((0, import_loader_utils2.compareArrayBuffers)(magicBytes, zip64EoCDLocatorSignature)) {
483
- zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
484
- const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);
485
- if (!(0, import_loader_utils2.compareArrayBuffers)(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {
572
+ const magicBytes = await readRange(file, zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
573
+ if ((0, import_loader_utils3.compareArrayBuffers)(magicBytes, zip64EoCDLocatorSignature.buffer)) {
574
+ zip64EoCDOffset = await readBigUint64(file, zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
575
+ const endOfCDMagicBytes = await readRange(file, zip64EoCDOffset, zip64EoCDOffset + 4n);
576
+ if (!(0, import_loader_utils3.compareArrayBuffers)(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {
486
577
  throw new Error("zip64 EoCD not found");
487
578
  }
488
- cdRecordsNumber = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
489
- cdByteSize = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);
490
- cdStartOffset = await file.getBigUint64(zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
579
+ cdRecordsNumber = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_RECORDS_NUMBER_OFFSET);
580
+ cdByteSize = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_CD_BYTE_SIZE_OFFSET);
581
+ cdStartOffset = await readBigUint64(file, zip64EoCDOffset + ZIP64_CD_START_OFFSET_OFFSET);
491
582
  } else {
492
583
  zip64EoCDLocatorOffset = 0n;
493
584
  }
@@ -532,7 +623,7 @@ function generateEoCD(options) {
532
623
  }
533
624
  const locator = generateZip64InfoLocator(options);
534
625
  const zip64Record = generateZip64Info(options);
535
- return (0, import_loader_utils2.concatenateArrayBuffers)(zip64Record, locator, header.buffer);
626
+ return (0, import_loader_utils3.concatenateArrayBuffers)(zip64Record, locator, header.buffer);
536
627
  }
537
628
  var EOCD_FIELDS = [
538
629
  // End of central directory signature = 0x06054b50
@@ -697,12 +788,13 @@ var CD_LOCAL_HEADER_OFFSET_OFFSET = 42;
697
788
  var CD_FILE_NAME_OFFSET = 46n;
698
789
  var signature2 = new Uint8Array([80, 75, 1, 2]);
699
790
  var parseZipCDFileHeader = async (headerOffset, file) => {
700
- if (headerOffset >= file.length) {
791
+ const fileLength = await getReadableFileSize(file);
792
+ if (headerOffset >= fileLength) {
701
793
  return null;
702
794
  }
703
- const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + CD_FILE_NAME_OFFSET));
795
+ const mainHeader = await readDataView(file, headerOffset, headerOffset + CD_FILE_NAME_OFFSET);
704
796
  const magicBytes = mainHeader.buffer.slice(0, 4);
705
- if (!(0, import_loader_utils3.compareArrayBuffers)(magicBytes, signature2.buffer)) {
797
+ if (!(0, import_loader_utils4.compareArrayBuffers)(magicBytes, signature2.buffer)) {
706
798
  return null;
707
799
  }
708
800
  const compressedSize = BigInt(mainHeader.getUint32(CD_COMPRESSED_SIZE_OFFSET, true));
@@ -710,7 +802,7 @@ var parseZipCDFileHeader = async (headerOffset, file) => {
710
802
  const extraFieldLength = mainHeader.getUint16(CD_EXTRA_FIELD_LENGTH_OFFSET, true);
711
803
  const startDisk = BigInt(mainHeader.getUint16(CD_START_DISK_OFFSET, true));
712
804
  const fileNameLength = mainHeader.getUint16(CD_FILE_NAME_LENGTH_OFFSET, true);
713
- const additionalHeader = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
805
+ const additionalHeader = await readRange(file, headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
714
806
  const filenameBytes = additionalHeader.slice(0, fileNameLength);
715
807
  const fileName = new TextDecoder().decode(filenameBytes);
716
808
  const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
@@ -735,7 +827,7 @@ var parseZipCDFileHeader = async (headerOffset, file) => {
735
827
  };
736
828
  async function* makeZipCDHeaderIterator(fileProvider) {
737
829
  const { cdStartOffset, cdByteSize } = await parseEoCDRecord(fileProvider);
738
- const centralDirectory = new import_loader_utils3.DataViewFile(new DataView(await fileProvider.slice(cdStartOffset, cdStartOffset + cdByteSize)));
830
+ const centralDirectory = new DataViewReadableFile(new DataView(await readRange(fileProvider, cdStartOffset, cdStartOffset + cdByteSize)));
739
831
  let cdHeader = await parseZipCDFileHeader(0n, centralDirectory);
740
832
  while (cdHeader) {
741
833
  yield cdHeader;
@@ -801,7 +893,7 @@ function generateCDHeader(options) {
801
893
  setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
802
894
  }
803
895
  const encodedName = new TextEncoder().encode(optionsToUse.fileName);
804
- const resHeader = (0, import_loader_utils3.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
896
+ const resHeader = (0, import_loader_utils4.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
805
897
  return resHeader;
806
898
  }
807
899
  var ZIP_HEADER_FIELDS = [
@@ -911,7 +1003,7 @@ var ZIP_HEADER_FIELDS = [
911
1003
  ];
912
1004
 
913
1005
  // dist/parse-zip/local-file-header.js
914
- var import_loader_utils4 = require("@loaders.gl/loader-utils");
1006
+ var import_loader_utils5 = require("@loaders.gl/loader-utils");
915
1007
  var COMPRESSION_METHOD_OFFSET = 8;
916
1008
  var COMPRESSED_SIZE_OFFSET = 18;
917
1009
  var UNCOMPRESSED_SIZE_OFFSET = 22;
@@ -920,14 +1012,14 @@ var EXTRA_FIELD_LENGTH_OFFSET = 28;
920
1012
  var FILE_NAME_OFFSET = 30n;
921
1013
  var signature3 = new Uint8Array([80, 75, 3, 4]);
922
1014
  var parseZipLocalFileHeader = async (headerOffset, file) => {
923
- const mainHeader = new DataView(await file.slice(headerOffset, headerOffset + FILE_NAME_OFFSET));
1015
+ const mainHeader = await readDataView(file, headerOffset, headerOffset + FILE_NAME_OFFSET);
924
1016
  const magicBytes = mainHeader.buffer.slice(0, 4);
925
- if (!(0, import_loader_utils4.compareArrayBuffers)(magicBytes, signature3)) {
1017
+ if (!(0, import_loader_utils5.compareArrayBuffers)(magicBytes, signature3.buffer)) {
926
1018
  return null;
927
1019
  }
928
1020
  const fileNameLength = mainHeader.getUint16(FILE_NAME_LENGTH_OFFSET, true);
929
1021
  const extraFieldLength = mainHeader.getUint16(EXTRA_FIELD_LENGTH_OFFSET, true);
930
- const additionalHeader = await file.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
1022
+ const additionalHeader = await readRange(file, headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength));
931
1023
  const fileNameBuffer = additionalHeader.slice(0, fileNameLength);
932
1024
  const extraDataBuffer = new DataView(additionalHeader.slice(fileNameLength, additionalHeader.byteLength));
933
1025
  const fileName = new TextDecoder().decode(fileNameBuffer).split("\\").join("/");
@@ -977,7 +1069,7 @@ function generateLocalHeader(options) {
977
1069
  setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
978
1070
  }
979
1071
  const encodedName = new TextEncoder().encode(optionsToUse.fileName);
980
- const resHeader = (0, import_loader_utils4.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
1072
+ const resHeader = (0, import_loader_utils5.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
981
1073
  return resHeader;
982
1074
  }
983
1075
  var ZIP_HEADER_FIELDS2 = [
@@ -1051,14 +1143,15 @@ var ZIP_HEADER_FIELDS2 = [
1051
1143
  ];
1052
1144
 
1053
1145
  // dist/parse-zip/zip-composition.js
1054
- var import_loader_utils5 = require("@loaders.gl/loader-utils");
1146
+ var import_loader_utils6 = require("@loaders.gl/loader-utils");
1055
1147
  var import_crypto = require("@loaders.gl/crypto");
1056
1148
  var import_core = require("@loaders.gl/core");
1057
1149
  async function cutTheTailOff(provider) {
1058
1150
  const oldEoCDinfo = await parseEoCDRecord(provider);
1059
1151
  const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
1152
+ const providerSize = (await provider.stat()).bigsize;
1060
1153
  const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
1061
- const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
1154
+ const zipEnding = await readRange(provider, oldCDStartOffset, providerSize);
1062
1155
  await provider.truncate(Number(oldCDStartOffset));
1063
1156
  const oldCDBody = zipEnding.slice(0, oldCDLength);
1064
1157
  const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
@@ -1078,25 +1171,30 @@ async function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {
1078
1171
  length: fileToAdd.byteLength
1079
1172
  });
1080
1173
  return [
1081
- new Uint8Array((0, import_loader_utils5.concatenateArrayBuffers)(newFileLocalHeader, fileToAdd)),
1174
+ new Uint8Array((0, import_loader_utils6.concatenateArrayBuffers)(newFileLocalHeader, fileToAdd)),
1082
1175
  new Uint8Array(newFileCDHeader)
1083
1176
  ];
1084
1177
  }
1085
1178
  async function addOneFile(zipUrl, fileToAdd, fileName) {
1086
- const provider = new import_loader_utils5.FileHandleFile(zipUrl, true);
1179
+ const provider = new import_loader_utils6.NodeFile(zipUrl, "a+");
1087
1180
  const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
1088
- const newFileOffset = provider.length;
1181
+ let currentOffset = (await provider.stat()).bigsize;
1182
+ const newFileOffset = currentOffset;
1089
1183
  const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
1090
1184
  await provider.append(localPart);
1091
- const newCDBody = (0, import_loader_utils5.concatenateArrayBuffers)(oldCDBody, cdHeaderPart);
1092
- const newCDStartOffset = provider.length;
1185
+ currentOffset += BigInt(localPart.byteLength);
1186
+ const newCDBody = (0, import_loader_utils6.concatenateArrayBuffers)(oldCDBody, cdHeaderPart);
1187
+ const newCDStartOffset = currentOffset;
1093
1188
  await provider.append(new Uint8Array(newCDBody));
1094
- const eocdOffset = provider.length;
1095
- await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
1189
+ currentOffset += BigInt(newCDBody.byteLength);
1190
+ const eocdOffset = currentOffset;
1191
+ const updatedEoCD = updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n);
1192
+ await provider.append(updatedEoCD);
1193
+ currentOffset += BigInt(updatedEoCD.byteLength);
1096
1194
  }
1097
1195
  async function createZip(inputPath, outputPath, createAdditionalData) {
1098
1196
  const fileIterator = getFileIterator(inputPath);
1099
- const resFile = new import_loader_utils5.NodeFile(outputPath, "w");
1197
+ const resFile = new import_loader_utils6.NodeFile(outputPath, "w");
1100
1198
  const fileList = [];
1101
1199
  const cdArray = [];
1102
1200
  for await (const file of fileIterator) {
@@ -1107,7 +1205,7 @@ async function createZip(inputPath, outputPath, createAdditionalData) {
1107
1205
  await addFile(additionaldata, resFile, cdArray);
1108
1206
  }
1109
1207
  const cdOffset = (await resFile.stat()).bigsize;
1110
- const cd = (0, import_loader_utils5.concatenateArrayBuffers)(...cdArray);
1208
+ const cd = (0, import_loader_utils6.concatenateArrayBuffers)(...cdArray);
1111
1209
  await resFile.append(new Uint8Array(cd));
1112
1210
  const eoCDStart = (await resFile.stat()).bigsize;
1113
1211
  await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));
@@ -1117,20 +1215,20 @@ async function addFile(file, resFile, cdArray, fileList) {
1117
1215
  fileList == null ? void 0 : fileList.push({ fileName: file.path, localHeaderOffset: size });
1118
1216
  const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
1119
1217
  await resFile.append(localPart);
1120
- cdArray.push(cdHeaderPart);
1218
+ cdArray.push((0, import_loader_utils6.toArrayBuffer)(cdHeaderPart));
1121
1219
  }
1122
1220
  function getFileIterator(inputPath) {
1123
1221
  async function* iterable() {
1124
1222
  const fileList = await getAllFiles(inputPath);
1125
1223
  for (const filePath of fileList) {
1126
- const file = await (await (0, import_core.fetchFile)(import_loader_utils5.path.join(inputPath, filePath))).arrayBuffer();
1224
+ const file = await (await (0, import_core.fetchFile)(import_loader_utils6.path.join(inputPath, filePath))).arrayBuffer();
1127
1225
  yield { path: filePath, file };
1128
1226
  }
1129
1227
  }
1130
1228
  return iterable();
1131
1229
  }
1132
1230
  async function getAllFiles(basePath, subfolder = "", fsPassed) {
1133
- const fs = fsPassed ? fsPassed : new import_loader_utils5.NodeFilesystem({});
1231
+ const fs = fsPassed ? fsPassed : new import_loader_utils6.NodeFilesystem({});
1134
1232
  const files = await fs.readdir(pathJoin(basePath, subfolder));
1135
1233
  const arrayOfFiles = [];
1136
1234
  for (const file of files) {
@@ -1146,11 +1244,11 @@ async function getAllFiles(basePath, subfolder = "", fsPassed) {
1146
1244
  }
1147
1245
  function pathJoin(...paths) {
1148
1246
  const resPaths = paths.filter((val) => val.length);
1149
- return import_loader_utils5.path.join(...resPaths);
1247
+ return import_loader_utils6.path.join(...resPaths);
1150
1248
  }
1151
1249
 
1152
1250
  // dist/filesystems/zip-filesystem.js
1153
- var import_loader_utils6 = require("@loaders.gl/loader-utils");
1251
+ var import_loader_utils7 = require("@loaders.gl/loader-utils");
1154
1252
  var import_compression = require("@loaders.gl/compression");
1155
1253
  var ZIP_COMPRESSION_HANDLERS = {
1156
1254
  /** No compression */
@@ -1163,34 +1261,35 @@ var ZIP_COMPRESSION_HANDLERS = {
1163
1261
  }
1164
1262
  };
1165
1263
  var ZipFileSystem = class {
1166
- /** FileProvider instance promise */
1167
- fileProvider = null;
1264
+ /** File instance */
1265
+ file = null;
1168
1266
  fileName;
1169
1267
  archive = null;
1170
1268
  /**
1171
1269
  * Constructor
1172
- * @param file - instance of FileProvider or file path string
1270
+ * @param file - instance of ReadableFile or file path string
1173
1271
  */
1174
1272
  constructor(file) {
1175
1273
  if (typeof file === "string") {
1176
1274
  this.fileName = file;
1177
- if (!import_loader_utils6.isBrowser) {
1178
- this.fileProvider = new import_loader_utils6.FileHandleFile(file);
1179
- } else {
1180
- throw new Error("Cannot open file for random access in a WEB browser");
1275
+ if (import_loader_utils7.isBrowser) {
1276
+ throw new Error("ZipFileSystem cannot open file paths in browser environments");
1181
1277
  }
1278
+ this.file = new import_loader_utils7.NodeFile(file);
1279
+ } else if (file instanceof Blob || file instanceof ArrayBuffer) {
1280
+ this.file = new import_loader_utils7.BlobFile(file);
1182
1281
  } else if (file instanceof IndexedArchive) {
1183
- this.fileProvider = file.fileProvider;
1282
+ this.file = file.file;
1184
1283
  this.archive = file;
1185
1284
  this.fileName = file.fileName;
1186
- } else if ((0, import_loader_utils6.isFileProvider)(file)) {
1187
- this.fileProvider = file;
1285
+ } else {
1286
+ this.file = file;
1188
1287
  }
1189
1288
  }
1190
1289
  /** Clean up resources */
1191
1290
  async destroy() {
1192
- if (this.fileProvider) {
1193
- await this.fileProvider.destroy();
1291
+ if (this.file) {
1292
+ await this.file.close();
1194
1293
  }
1195
1294
  }
1196
1295
  /**
@@ -1198,11 +1297,11 @@ var ZipFileSystem = class {
1198
1297
  * @returns array of file names
1199
1298
  */
1200
1299
  async readdir() {
1201
- if (!this.fileProvider) {
1300
+ if (!this.file) {
1202
1301
  throw new Error("No data detected in the zip archive");
1203
1302
  }
1204
1303
  const fileNames = [];
1205
- const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);
1304
+ const zipCDIterator = makeZipCDHeaderIterator(this.file);
1206
1305
  for await (const cdHeader of zipCDIterator) {
1207
1306
  fileNames.push(cdHeader.fileName);
1208
1307
  }
@@ -1230,11 +1329,11 @@ var ZipFileSystem = class {
1230
1329
  if (this.archive) {
1231
1330
  uncompressedFile = await this.archive.getFile(filename, "http");
1232
1331
  } else {
1233
- if (!this.fileProvider) {
1332
+ if (!this.file) {
1234
1333
  throw new Error("No data detected in the zip archive");
1235
1334
  }
1236
1335
  const cdFileHeader = await this.getCDFileHeader(filename);
1237
- const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.fileProvider);
1336
+ const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.file);
1238
1337
  if (!localFileHeader) {
1239
1338
  throw new Error("Local file header has not been found in the zip archive`");
1240
1339
  }
@@ -1242,7 +1341,7 @@ var ZipFileSystem = class {
1242
1341
  if (!compressionHandler) {
1243
1342
  throw Error("Only Deflation compression is supported");
1244
1343
  }
1245
- const compressedFile = await this.fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
1344
+ const compressedFile = await readRange(this.file, localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
1246
1345
  uncompressedFile = await compressionHandler(compressedFile);
1247
1346
  }
1248
1347
  const response = new Response(uncompressedFile);
@@ -1257,10 +1356,10 @@ var ZipFileSystem = class {
1257
1356
  * @returns central directory file header
1258
1357
  */
1259
1358
  async getCDFileHeader(filename) {
1260
- if (!this.fileProvider) {
1359
+ if (!this.file) {
1261
1360
  throw new Error("No data detected in the zip archive");
1262
1361
  }
1263
- const zipCDIterator = makeZipCDHeaderIterator(this.fileProvider);
1362
+ const zipCDIterator = makeZipCDHeaderIterator(this.file);
1264
1363
  let result = null;
1265
1364
  for await (const cdHeader of zipCDIterator) {
1266
1365
  if (cdHeader.fileName === filename) {
@@ -1277,16 +1376,16 @@ var ZipFileSystem = class {
1277
1376
 
1278
1377
  // dist/filesystems/IndexedArchive.js
1279
1378
  var IndexedArchive = class {
1280
- fileProvider;
1379
+ file;
1281
1380
  fileName;
1282
1381
  /**
1283
1382
  * Constructor
1284
- * @param fileProvider - instance of a binary data reader
1383
+ * @param fileProvider - readable file instance for random access
1285
1384
  * @param hashTable - pre-loaded hashTable. If presented, getFile will skip reading the hash file
1286
1385
  * @param fileName - name of the archive. It is used to add to an URL of a loader context
1287
1386
  */
1288
- constructor(fileProvider, hashTable, fileName) {
1289
- this.fileProvider = fileProvider;
1387
+ constructor(file, hashTable, fileName) {
1388
+ this.file = file;
1290
1389
  this.fileName = fileName;
1291
1390
  }
1292
1391
  /**
@@ -1295,7 +1394,7 @@ var IndexedArchive = class {
1295
1394
  * @returns
1296
1395
  */
1297
1396
  async getFileWithoutHash(filename) {
1298
- const zipFS = new ZipFileSystem(this.fileProvider);
1397
+ const zipFS = new ZipFileSystem(this.file);
1299
1398
  const response = await zipFS.fetch(filename);
1300
1399
  return await response.arrayBuffer();
1301
1400
  }
@@ -1303,7 +1402,7 @@ var IndexedArchive = class {
1303
1402
 
1304
1403
  // dist/hash-file-utility.js
1305
1404
  var import_crypto2 = require("@loaders.gl/crypto");
1306
- var import_loader_utils7 = require("@loaders.gl/loader-utils");
1405
+ var import_loader_utils8 = require("@loaders.gl/loader-utils");
1307
1406
  function parseHashTable(arrayBuffer) {
1308
1407
  const dataView = new DataView(arrayBuffer);
1309
1408
  const hashMap = {};
@@ -1344,10 +1443,10 @@ async function composeHashFile(zipCDIterator) {
1344
1443
  }
1345
1444
  const arrayBuffer = textEncoder.encode(filename).buffer;
1346
1445
  const md5 = await md5Hash.hash(arrayBuffer, "hex");
1347
- hashArray.push((0, import_loader_utils7.concatenateArrayBuffers)(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));
1446
+ hashArray.push((0, import_loader_utils8.concatenateArrayBuffers)(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));
1348
1447
  }
1349
1448
  const bufferArray = hashArray.sort(compareHashes);
1350
- return (0, import_loader_utils7.concatenateArrayBuffersFromArray)(bufferArray);
1449
+ return (0, import_loader_utils8.concatenateArrayBuffersFromArray)(bufferArray);
1351
1450
  }
1352
1451
  function compareHashes(arrA, arrB) {
1353
1452
  const a = new BigUint64Array(arrA);