@loaders.gl/zip 4.2.0-alpha.4 → 4.2.0-alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/dist/dist.dev.js +919 -523
  2. package/dist/dist.min.js +25 -0
  3. package/dist/filesystems/IndexedArchive.js +26 -12
  4. package/dist/filesystems/zip-filesystem.d.ts +2 -2
  5. package/dist/filesystems/zip-filesystem.d.ts.map +1 -1
  6. package/dist/filesystems/zip-filesystem.js +125 -88
  7. package/dist/hash-file-utility.d.ts +1 -1
  8. package/dist/hash-file-utility.d.ts.map +1 -1
  9. package/dist/hash-file-utility.js +85 -42
  10. package/dist/index.cjs +64 -128
  11. package/dist/index.cjs.map +7 -0
  12. package/dist/index.d.ts +12 -12
  13. package/dist/index.d.ts.map +1 -1
  14. package/dist/index.js +4 -1
  15. package/dist/lib/tar/header.d.ts +1 -1
  16. package/dist/lib/tar/header.d.ts.map +1 -1
  17. package/dist/lib/tar/header.js +69 -33
  18. package/dist/lib/tar/tar.d.ts +1 -1
  19. package/dist/lib/tar/tar.d.ts.map +1 -1
  20. package/dist/lib/tar/tar.js +124 -106
  21. package/dist/lib/tar/types.js +3 -1
  22. package/dist/lib/tar/utils.js +45 -18
  23. package/dist/parse-zip/cd-file-header.d.ts +1 -1
  24. package/dist/parse-zip/cd-file-header.d.ts.map +1 -1
  25. package/dist/parse-zip/cd-file-header.js +239 -177
  26. package/dist/parse-zip/end-of-central-directory.js +247 -158
  27. package/dist/parse-zip/local-file-header.d.ts +1 -1
  28. package/dist/parse-zip/local-file-header.d.ts.map +1 -1
  29. package/dist/parse-zip/local-file-header.js +143 -102
  30. package/dist/parse-zip/search-from-the-end.js +27 -13
  31. package/dist/parse-zip/zip-composition.js +142 -92
  32. package/dist/parse-zip/zip64-info-generation.js +64 -41
  33. package/dist/tar-builder.d.ts +1 -1
  34. package/dist/tar-builder.d.ts.map +1 -1
  35. package/dist/tar-builder.js +32 -29
  36. package/dist/zip-loader.js +52 -41
  37. package/dist/zip-writer.js +40 -40
  38. package/package.json +11 -7
  39. package/src/filesystems/zip-filesystem.ts +4 -0
  40. package/dist/filesystems/IndexedArchive.js.map +0 -1
  41. package/dist/filesystems/zip-filesystem.js.map +0 -1
  42. package/dist/hash-file-utility.js.map +0 -1
  43. package/dist/index.js.map +0 -1
  44. package/dist/lib/tar/header.js.map +0 -1
  45. package/dist/lib/tar/tar.js.map +0 -1
  46. package/dist/lib/tar/types.js.map +0 -1
  47. package/dist/lib/tar/utils.js.map +0 -1
  48. package/dist/parse-zip/cd-file-header.js.map +0 -1
  49. package/dist/parse-zip/end-of-central-directory.js.map +0 -1
  50. package/dist/parse-zip/local-file-header.js.map +0 -1
  51. package/dist/parse-zip/search-from-the-end.js.map +0 -1
  52. package/dist/parse-zip/zip-composition.js.map +0 -1
  53. package/dist/parse-zip/zip64-info-generation.js.map +0 -1
  54. package/dist/tar-builder.js.map +0 -1
  55. package/dist/zip-loader.js.map +0 -1
  56. package/dist/zip-writer.js.map +0 -1
package/dist/index.cjs CHANGED
@@ -27,9 +27,9 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
27
27
  ));
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
 
30
- // src/index.ts
31
- var src_exports = {};
32
- __export(src_exports, {
30
+ // dist/index.js
31
+ var dist_exports = {};
32
+ __export(dist_exports, {
33
33
  CD_HEADER_SIGNATURE: () => signature2,
34
34
  IndexedArchive: () => IndexedArchive,
35
35
  TarBuilder: () => TarBuilder,
@@ -51,11 +51,11 @@ __export(src_exports, {
51
51
  parseZipLocalFileHeader: () => parseZipLocalFileHeader,
52
52
  searchFromTheEnd: () => searchFromTheEnd
53
53
  });
54
- module.exports = __toCommonJS(src_exports);
54
+ module.exports = __toCommonJS(dist_exports);
55
55
 
56
- // src/zip-loader.ts
56
+ // dist/zip-loader.js
57
57
  var import_jszip = __toESM(require("jszip"), 1);
58
- var VERSION = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
58
+ var VERSION = true ? "4.2.0-alpha.5" : "latest";
59
59
  var ZipLoader = {
60
60
  id: "zip",
61
61
  module: "zip",
@@ -98,9 +98,9 @@ async function loadZipEntry(jsZip, subFilename, options = {}) {
98
98
  }
99
99
  }
100
100
 
101
- // src/zip-writer.ts
101
+ // dist/zip-writer.js
102
102
  var import_jszip2 = __toESM(require("jszip"), 1);
103
- var VERSION2 = typeof __VERSION__ !== "undefined" ? __VERSION__ : "latest";
103
+ var VERSION2 = true ? "4.2.0-alpha.5" : "latest";
104
104
  var ZipWriter = {
105
105
  name: "Zip Archive",
106
106
  id: "zip",
@@ -139,7 +139,7 @@ async function encodeZipAsync(fileMap, options = {}) {
139
139
  }
140
140
  }
141
141
 
142
- // src/lib/tar/utils.ts
142
+ // dist/lib/tar/utils.js
143
143
  function clean(length) {
144
144
  let i;
145
145
  const buffer = new Uint8Array(length);
@@ -164,7 +164,7 @@ function stringToUint8(input, out, offset) {
164
164
  return out;
165
165
  }
166
166
 
167
- // src/lib/tar/header.ts
167
+ // dist/lib/tar/header.js
168
168
  var structure = {
169
169
  fileName: 100,
170
170
  fileMode: 8,
@@ -202,17 +202,20 @@ function format(data, cb) {
202
202
  return buffer;
203
203
  }
204
204
 
205
- // src/lib/tar/tar.ts
205
+ // dist/lib/tar/tar.js
206
206
  var blockSize;
207
207
  var headerLength;
208
208
  var inputLength;
209
209
  var recordSize = 512;
210
210
  var Tar = class {
211
+ written;
212
+ out;
213
+ blocks = [];
214
+ length;
211
215
  /**
212
216
  * @param [recordsPerBlock]
213
217
  */
214
218
  constructor(recordsPerBlock) {
215
- this.blocks = [];
216
219
  this.written = 0;
217
220
  blockSize = (recordsPerBlock || 20) * recordSize;
218
221
  this.out = clean(blockSize);
@@ -234,9 +237,7 @@ var Tar = class {
234
237
  if (typeof input === "string") {
235
238
  input = stringToUint8(input);
236
239
  } else if (input.constructor && input.constructor !== Uint8Array.prototype.constructor) {
237
- const errorInputMatch = /function\s*([$A-Za-z_][0-9A-Za-z_]*)\s*\(/.exec(
238
- input.constructor.toString()
239
- );
240
+ const errorInputMatch = /function\s*([$A-Za-z_][0-9A-Za-z_]*)\s*\(/.exec(input.constructor.toString());
240
241
  const errorInput = errorInputMatch && errorInputMatch[1];
241
242
  const errorMessage = `Invalid input type. You gave me: ${errorInput}`;
242
243
  throw errorMessage;
@@ -324,16 +325,11 @@ var Tar = class {
324
325
  };
325
326
  var tar_default = Tar;
326
327
 
327
- // src/tar-builder.ts
328
+ // dist/tar-builder.js
328
329
  var TAR_BUILDER_OPTIONS = {
329
330
  recordsPerBlock: 20
330
331
  };
331
332
  var TarBuilder = class {
332
- constructor(options) {
333
- this.count = 0;
334
- this.options = { ...TAR_BUILDER_OPTIONS, ...options };
335
- this.tape = new tar_default(this.options.recordsPerBlock);
336
- }
337
333
  static get properties() {
338
334
  return {
339
335
  id: "tar",
@@ -344,6 +340,13 @@ var TarBuilder = class {
344
340
  options: TAR_BUILDER_OPTIONS
345
341
  };
346
342
  }
343
+ options;
344
+ tape;
345
+ count = 0;
346
+ constructor(options) {
347
+ this.options = { ...TAR_BUILDER_OPTIONS, ...options };
348
+ this.tape = new tar_default(this.options.recordsPerBlock);
349
+ }
347
350
  /** Adds a file to the archive. */
348
351
  addFile(filename, buffer) {
349
352
  this.tape.append(filename, new Uint8Array(buffer));
@@ -354,13 +357,13 @@ var TarBuilder = class {
354
357
  }
355
358
  };
356
359
 
357
- // src/parse-zip/cd-file-header.ts
360
+ // dist/parse-zip/cd-file-header.js
358
361
  var import_loader_utils3 = require("@loaders.gl/loader-utils");
359
362
 
360
- // src/parse-zip/end-of-central-directory.ts
363
+ // dist/parse-zip/end-of-central-directory.js
361
364
  var import_loader_utils2 = require("@loaders.gl/loader-utils");
362
365
 
363
- // src/parse-zip/search-from-the-end.ts
366
+ // dist/parse-zip/search-from-the-end.js
364
367
  var searchFromTheEnd = async (file, target) => {
365
368
  const searchWindow = [
366
369
  await file.getUint8(file.length - 1n),
@@ -382,7 +385,7 @@ var searchFromTheEnd = async (file, target) => {
382
385
  return targetOffset;
383
386
  };
384
387
 
385
- // src/parse-zip/zip64-info-generation.ts
388
+ // dist/parse-zip/zip64-info-generation.js
386
389
  var import_loader_utils = require("@loaders.gl/loader-utils");
387
390
  var signature = new Uint8Array([1, 0]);
388
391
  function createZip64Info(options) {
@@ -443,7 +446,7 @@ var ZIP64_FIELDS = [
443
446
  }
444
447
  ];
445
448
 
446
- // src/parse-zip/end-of-central-directory.ts
449
+ // dist/parse-zip/end-of-central-directory.js
447
450
  var eoCDSignature = new Uint8Array([80, 75, 5, 6]);
448
451
  var zip64EoCDLocatorSignature = new Uint8Array([80, 75, 6, 7]);
449
452
  var zip64EoCDSignature = new Uint8Array([80, 75, 6, 6]);
@@ -466,9 +469,7 @@ var parseEoCDRecord = async (file) => {
466
469
  let zip64EoCDOffset = 0n;
467
470
  const magicBytes = await file.slice(zip64EoCDLocatorOffset, zip64EoCDLocatorOffset + 4n);
468
471
  if ((0, import_loader_utils2.compareArrayBuffers)(magicBytes, zip64EoCDLocatorSignature)) {
469
- zip64EoCDOffset = await file.getBigUint64(
470
- zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET
471
- );
472
+ zip64EoCDOffset = await file.getBigUint64(zip64EoCDLocatorOffset + ZIP64_EOCD_START_OFFSET_OFFSET);
472
473
  const endOfCDMagicBytes = await file.slice(zip64EoCDOffset, zip64EoCDOffset + 4n);
473
474
  if (!(0, import_loader_utils2.compareArrayBuffers)(endOfCDMagicBytes, zip64EoCDSignature.buffer)) {
474
475
  throw new Error("zip64 EoCD not found");
@@ -493,20 +494,10 @@ function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset,
493
494
  const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset : 0n;
494
495
  if (Number(newCDRecordsNumber) <= 65535) {
495
496
  setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_OFFSET, newCDRecordsNumber);
496
- setFieldToNumber(
497
- eocd,
498
- 2,
499
- classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET,
500
- newCDRecordsNumber
501
- );
497
+ setFieldToNumber(eocd, 2, classicEoCDOffset + CD_RECORDS_NUMBER_ON_DISC_OFFSET, newCDRecordsNumber);
502
498
  }
503
499
  if (eocdStartOffset - newCDStartOffset <= 4294967295) {
504
- setFieldToNumber(
505
- eocd,
506
- 4,
507
- classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET,
508
- eocdStartOffset - newCDStartOffset
509
- );
500
+ setFieldToNumber(eocd, 4, classicEoCDOffset + CD_CD_BYTE_SIZE_OFFSET, eocdStartOffset - newCDStartOffset);
510
501
  }
511
502
  if (newCDStartOffset < 4294967295) {
512
503
  setFieldToNumber(eocd, 4, classicEoCDOffset + CD_START_OFFSET_OFFSET, newCDStartOffset);
@@ -524,12 +515,7 @@ function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset,
524
515
  function generateEoCD(options) {
525
516
  const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));
526
517
  for (const field of EOCD_FIELDS) {
527
- setFieldToNumber(
528
- header,
529
- field.size,
530
- field.offset,
531
- options[field.name ?? ""] ?? field.default ?? 0
532
- );
518
+ setFieldToNumber(header, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
533
519
  }
534
520
  const locator = generateZip64InfoLocator(options);
535
521
  const zip64Record = generateZip64Info(options);
@@ -588,24 +574,14 @@ var EOCD_FIELDS = [
588
574
  function generateZip64Info(options) {
589
575
  const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));
590
576
  for (const field of ZIP64_EOCD_FIELDS) {
591
- setFieldToNumber(
592
- record,
593
- field.size,
594
- field.offset,
595
- options[field.name ?? ""] ?? field.default ?? 0
596
- );
577
+ setFieldToNumber(record, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
597
578
  }
598
579
  return record.buffer;
599
580
  }
600
581
  function generateZip64InfoLocator(options) {
601
582
  const locator = new DataView(new ArrayBuffer(Number(20)));
602
583
  for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {
603
- setFieldToNumber(
604
- locator,
605
- field.size,
606
- field.offset,
607
- options[field.name ?? ""] ?? field.default ?? 0
608
- );
584
+ setFieldToNumber(locator, field.size, field.offset, options[field.name ?? ""] ?? field.default ?? 0);
609
585
  }
610
586
  return locator.buffer;
611
587
  }
@@ -698,7 +674,7 @@ var ZIP64_EOCD_FIELDS = [
698
674
  }
699
675
  ];
700
676
 
701
- // src/parse-zip/cd-file-header.ts
677
+ // dist/parse-zip/cd-file-header.js
702
678
  var CD_COMPRESSED_SIZE_OFFSET = 20n;
703
679
  var CD_UNCOMPRESSED_SIZE_OFFSET = 24n;
704
680
  var CD_FILE_NAME_LENGTH_OFFSET = 28n;
@@ -717,17 +693,12 @@ var parseZipCDFileHeader = async (headerOffset, file) => {
717
693
  const extraFieldLength = await file.getUint16(headerOffset + CD_EXTRA_FIELD_LENGTH_OFFSET);
718
694
  const startDisk = BigInt(await file.getUint16(headerOffset + CD_START_DISK_OFFSET));
719
695
  const fileNameLength = await file.getUint16(headerOffset + CD_FILE_NAME_LENGTH_OFFSET);
720
- const filenameBytes = await file.slice(
721
- headerOffset + CD_FILE_NAME_OFFSET,
722
- headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength)
723
- );
696
+ const filenameBytes = await file.slice(headerOffset + CD_FILE_NAME_OFFSET, headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength));
724
697
  const fileName = new TextDecoder().decode(filenameBytes);
725
698
  const extraOffset = headerOffset + CD_FILE_NAME_OFFSET + BigInt(fileNameLength);
726
699
  const oldFormatOffset = await file.getUint32(headerOffset + CD_LOCAL_HEADER_OFFSET_OFFSET);
727
700
  const localHeaderOffset = BigInt(oldFormatOffset);
728
- const extraField = new DataView(
729
- await file.slice(extraOffset, extraOffset + BigInt(extraFieldLength))
730
- );
701
+ const extraField = new DataView(await file.slice(extraOffset, extraOffset + BigInt(extraFieldLength)));
731
702
  const zip64data = {
732
703
  uncompressedSize,
733
704
  compressedSize,
@@ -749,10 +720,7 @@ async function* makeZipCDHeaderIterator(fileProvider) {
749
720
  let cdHeader = await parseZipCDFileHeader(cdStartOffset, fileProvider);
750
721
  while (cdHeader) {
751
722
  yield cdHeader;
752
- cdHeader = await parseZipCDFileHeader(
753
- cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength),
754
- fileProvider
755
- );
723
+ cdHeader = await parseZipCDFileHeader(cdHeader.extraOffset + BigInt(cdHeader.extraFieldLength), fileProvider);
756
724
  }
757
725
  }
758
726
  var getUint16 = (...bytes) => {
@@ -763,9 +731,7 @@ var findZip64DataInExtra = (zip64data, extraField) => {
763
731
  const zip64DataRes = {};
764
732
  if (zip64dataList.length > 0) {
765
733
  const zip64chunkSize = zip64dataList.reduce((sum, curr) => sum + curr.length, 0);
766
- const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex(
767
- (_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 1 && getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize
768
- );
734
+ const offsetInExtraData = new Uint8Array(extraField.buffer).findIndex((_val, i, arr) => getUint16(arr[i], arr[i + 1]) === 1 && getUint16(arr[i + 2], arr[i + 3]) === zip64chunkSize);
769
735
  let bytesRead = 0;
770
736
  for (const note of zip64dataList) {
771
737
  const offset = bytesRead;
@@ -813,12 +779,7 @@ function generateCDHeader(options) {
813
779
  }
814
780
  const header = new DataView(new ArrayBuffer(Number(CD_FILE_NAME_OFFSET)));
815
781
  for (const field of ZIP_HEADER_FIELDS) {
816
- setFieldToNumber(
817
- header,
818
- field.size,
819
- field.offset,
820
- optionsToUse[field.name ?? ""] ?? field.default ?? 0
821
- );
782
+ setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
822
783
  }
823
784
  const encodedName = new TextEncoder().encode(optionsToUse.fileName);
824
785
  const resHeader = (0, import_loader_utils3.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
@@ -930,7 +891,7 @@ var ZIP_HEADER_FIELDS = [
930
891
  }
931
892
  ];
932
893
 
933
- // src/parse-zip/local-file-header.ts
894
+ // dist/parse-zip/local-file-header.js
934
895
  var import_loader_utils4 = require("@loaders.gl/loader-utils");
935
896
  var COMPRESSION_METHOD_OFFSET = 8n;
936
897
  var COMPRESSED_SIZE_OFFSET = 18n;
@@ -945,12 +906,7 @@ var parseZipLocalFileHeader = async (headerOffset, buffer) => {
945
906
  return null;
946
907
  }
947
908
  const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
948
- const fileName = new TextDecoder().decode(
949
- await buffer.slice(
950
- headerOffset + FILE_NAME_OFFSET,
951
- headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength)
952
- )
953
- ).split("\\").join("/");
909
+ const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength))).split("\\").join("/");
954
910
  const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
955
911
  let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
956
912
  const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
@@ -996,12 +952,7 @@ function generateLocalHeader(options) {
996
952
  }
997
953
  const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));
998
954
  for (const field of ZIP_HEADER_FIELDS2) {
999
- setFieldToNumber(
1000
- header,
1001
- field.size,
1002
- field.offset,
1003
- optionsToUse[field.name ?? ""] ?? field.default ?? 0
1004
- );
955
+ setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ""] ?? field.default ?? 0);
1005
956
  }
1006
957
  const encodedName = new TextEncoder().encode(optionsToUse.fileName);
1007
958
  const resHeader = (0, import_loader_utils4.concatenateArrayBuffers)(header.buffer, encodedName, zip64header);
@@ -1077,16 +1028,14 @@ var ZIP_HEADER_FIELDS2 = [
1077
1028
  }
1078
1029
  ];
1079
1030
 
1080
- // src/parse-zip/zip-composition.ts
1031
+ // dist/parse-zip/zip-composition.js
1081
1032
  var import_loader_utils5 = require("@loaders.gl/loader-utils");
1082
1033
  var import_crypto = require("@loaders.gl/crypto");
1083
1034
  var import_core = require("@loaders.gl/core");
1084
1035
  async function cutTheTailOff(provider) {
1085
1036
  const oldEoCDinfo = await parseEoCDRecord(provider);
1086
1037
  const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
1087
- const oldCDLength = Number(
1088
- oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset
1089
- );
1038
+ const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
1090
1039
  const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
1091
1040
  await provider.truncate(Number(oldCDStartOffset));
1092
1041
  const oldCDBody = zipEnding.slice(0, oldCDLength);
@@ -1121,15 +1070,7 @@ async function addOneFile(zipUrl, fileToAdd, fileName) {
1121
1070
  const newCDStartOffset = provider.length;
1122
1071
  await provider.append(new Uint8Array(newCDBody));
1123
1072
  const eocdOffset = provider.length;
1124
- await provider.append(
1125
- updateEoCD(
1126
- eocdBody,
1127
- oldEoCDinfo.offsets,
1128
- newCDStartOffset,
1129
- eocdOffset,
1130
- oldEoCDinfo.cdRecordsNumber + 1n
1131
- )
1132
- );
1073
+ await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
1133
1074
  }
1134
1075
  async function createZip(inputPath, outputPath, createAdditionalData) {
1135
1076
  const fileIterator = getFileIterator(inputPath);
@@ -1147,11 +1088,7 @@ async function createZip(inputPath, outputPath, createAdditionalData) {
1147
1088
  const cd = (0, import_loader_utils5.concatenateArrayBuffers)(...cdArray);
1148
1089
  await resFile.append(new Uint8Array(cd));
1149
1090
  const eoCDStart = (await resFile.stat()).bigsize;
1150
- await resFile.append(
1151
- new Uint8Array(
1152
- generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })
1153
- )
1154
- );
1091
+ await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));
1155
1092
  }
1156
1093
  async function addFile(file, resFile, cdArray, fileList) {
1157
1094
  const size = (await resFile.stat()).bigsize;
@@ -1190,7 +1127,7 @@ function pathJoin(...paths) {
1190
1127
  return import_loader_utils5.path.join(...resPaths);
1191
1128
  }
1192
1129
 
1193
- // src/filesystems/zip-filesystem.ts
1130
+ // dist/filesystems/zip-filesystem.js
1194
1131
  var import_loader_utils6 = require("@loaders.gl/loader-utils");
1195
1132
  var import_loader_utils7 = require("@loaders.gl/loader-utils");
1196
1133
  var import_loader_utils8 = require("@loaders.gl/loader-utils");
@@ -1206,14 +1143,15 @@ var ZIP_COMPRESSION_HANDLERS = {
1206
1143
  }
1207
1144
  };
1208
1145
  var ZipFileSystem = class {
1146
+ /** FileProvider instance promise */
1147
+ fileProvider = null;
1148
+ fileName;
1149
+ archive = null;
1209
1150
  /**
1210
1151
  * Constructor
1211
1152
  * @param file - instance of FileProvider or file path string
1212
1153
  */
1213
1154
  constructor(file) {
1214
- /** FileProvider instance promise */
1215
- this.fileProvider = null;
1216
- this.archive = null;
1217
1155
  if (typeof file === "string") {
1218
1156
  this.fileName = file;
1219
1157
  if (!import_loader_utils6.isBrowser) {
@@ -1265,6 +1203,9 @@ var ZipFileSystem = class {
1265
1203
  * @returns - Response with file data
1266
1204
  */
1267
1205
  async fetch(filename) {
1206
+ if (this.fileName && filename.indexOf(this.fileName) === 0) {
1207
+ filename = filename.substring(this.fileName.length + 1);
1208
+ }
1268
1209
  let uncompressedFile;
1269
1210
  if (this.archive) {
1270
1211
  uncompressedFile = await this.archive.getFile(filename, "http");
@@ -1273,10 +1214,7 @@ var ZipFileSystem = class {
1273
1214
  throw new Error("No data detected in the zip archive");
1274
1215
  }
1275
1216
  const cdFileHeader = await this.getCDFileHeader(filename);
1276
- const localFileHeader = await parseZipLocalFileHeader(
1277
- cdFileHeader.localHeaderOffset,
1278
- this.fileProvider
1279
- );
1217
+ const localFileHeader = await parseZipLocalFileHeader(cdFileHeader.localHeaderOffset, this.fileProvider);
1280
1218
  if (!localFileHeader) {
1281
1219
  throw new Error("Local file header has not been found in the zip archive`");
1282
1220
  }
@@ -1284,10 +1222,7 @@ var ZipFileSystem = class {
1284
1222
  if (!compressionHandler) {
1285
1223
  throw Error("Only Deflation compression is supported");
1286
1224
  }
1287
- const compressedFile = await this.fileProvider.slice(
1288
- localFileHeader.fileDataOffset,
1289
- localFileHeader.fileDataOffset + localFileHeader.compressedSize
1290
- );
1225
+ const compressedFile = await this.fileProvider.slice(localFileHeader.fileDataOffset, localFileHeader.fileDataOffset + localFileHeader.compressedSize);
1291
1226
  uncompressedFile = await compressionHandler(compressedFile);
1292
1227
  }
1293
1228
  const response = new Response(uncompressedFile);
@@ -1320,8 +1255,10 @@ var ZipFileSystem = class {
1320
1255
  }
1321
1256
  };
1322
1257
 
1323
- // src/filesystems/IndexedArchive.ts
1258
+ // dist/filesystems/IndexedArchive.js
1324
1259
  var IndexedArchive = class {
1260
+ fileProvider;
1261
+ fileName;
1325
1262
  /**
1326
1263
  * Constructor
1327
1264
  * @param fileProvider - instance of a binary data reader
@@ -1344,7 +1281,7 @@ var IndexedArchive = class {
1344
1281
  }
1345
1282
  };
1346
1283
 
1347
- // src/hash-file-utility.ts
1284
+ // dist/hash-file-utility.js
1348
1285
  var import_crypto2 = require("@loaders.gl/crypto");
1349
1286
  var import_loader_utils9 = require("@loaders.gl/loader-utils");
1350
1287
  function parseHashTable(arrayBuffer) {
@@ -1387,9 +1324,7 @@ async function composeHashFile(zipCDIterator) {
1387
1324
  }
1388
1325
  const arrayBuffer = textEncoder.encode(filename).buffer;
1389
1326
  const md5 = await md5Hash.hash(arrayBuffer, "hex");
1390
- hashArray.push(
1391
- (0, import_loader_utils9.concatenateArrayBuffers)(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset))
1392
- );
1327
+ hashArray.push((0, import_loader_utils9.concatenateArrayBuffers)(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));
1393
1328
  }
1394
1329
  const bufferArray = hashArray.sort(compareHashes);
1395
1330
  return (0, import_loader_utils9.concatenateArrayBuffersFromArray)(bufferArray);
@@ -1407,3 +1342,4 @@ function hexStringToBuffer(str) {
1407
1342
  function bigintToBuffer(n) {
1408
1343
  return new BigUint64Array([n]).buffer;
1409
1344
  }
1345
+ //# sourceMappingURL=index.cjs.map