@loaders.gl/zip 4.2.0-alpha.4 → 4.2.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/dist.dev.js +920 -542
  2. package/dist/dist.min.js +25 -0
  3. package/dist/filesystems/IndexedArchive.js +24 -12
  4. package/dist/filesystems/zip-filesystem.d.ts +2 -2
  5. package/dist/filesystems/zip-filesystem.d.ts.map +1 -1
  6. package/dist/filesystems/zip-filesystem.js +121 -88
  7. package/dist/hash-file-utility.d.ts +1 -1
  8. package/dist/hash-file-utility.d.ts.map +1 -1
  9. package/dist/hash-file-utility.js +85 -42
  10. package/dist/index.cjs +49 -125
  11. package/dist/index.cjs.map +7 -0
  12. package/dist/index.d.ts +12 -12
  13. package/dist/index.d.ts.map +1 -1
  14. package/dist/index.js +4 -1
  15. package/dist/lib/tar/header.d.ts +1 -1
  16. package/dist/lib/tar/header.d.ts.map +1 -1
  17. package/dist/lib/tar/header.js +69 -33
  18. package/dist/lib/tar/tar.d.ts +1 -1
  19. package/dist/lib/tar/tar.d.ts.map +1 -1
  20. package/dist/lib/tar/tar.js +121 -106
  21. package/dist/lib/tar/types.js +3 -1
  22. package/dist/lib/tar/utils.js +45 -18
  23. package/dist/parse-zip/cd-file-header.d.ts +1 -1
  24. package/dist/parse-zip/cd-file-header.d.ts.map +1 -1
  25. package/dist/parse-zip/cd-file-header.js +239 -177
  26. package/dist/parse-zip/end-of-central-directory.js +247 -158
  27. package/dist/parse-zip/local-file-header.d.ts +1 -1
  28. package/dist/parse-zip/local-file-header.d.ts.map +1 -1
  29. package/dist/parse-zip/local-file-header.js +143 -102
  30. package/dist/parse-zip/search-from-the-end.js +27 -13
  31. package/dist/parse-zip/zip-composition.js +142 -92
  32. package/dist/parse-zip/zip64-info-generation.js +64 -41
  33. package/dist/tar-builder.d.ts +1 -1
  34. package/dist/tar-builder.d.ts.map +1 -1
  35. package/dist/tar-builder.js +30 -29
  36. package/dist/zip-loader.js +51 -40
  37. package/dist/zip-writer.js +39 -39
  38. package/package.json +11 -7
  39. package/dist/filesystems/IndexedArchive.js.map +0 -1
  40. package/dist/filesystems/zip-filesystem.js.map +0 -1
  41. package/dist/hash-file-utility.js.map +0 -1
  42. package/dist/index.js.map +0 -1
  43. package/dist/lib/tar/header.js.map +0 -1
  44. package/dist/lib/tar/tar.js.map +0 -1
  45. package/dist/lib/tar/types.js.map +0 -1
  46. package/dist/lib/tar/utils.js.map +0 -1
  47. package/dist/parse-zip/cd-file-header.js.map +0 -1
  48. package/dist/parse-zip/end-of-central-directory.js.map +0 -1
  49. package/dist/parse-zip/local-file-header.js.map +0 -1
  50. package/dist/parse-zip/search-from-the-end.js.map +0 -1
  51. package/dist/parse-zip/zip-composition.js.map +0 -1
  52. package/dist/parse-zip/zip64-info-generation.js.map +0 -1
  53. package/dist/tar-builder.js.map +0 -1
  54. package/dist/zip-loader.js.map +0 -1
  55. package/dist/zip-writer.js.map +0 -1
@@ -1,5 +1,9 @@
1
+ // loaders.gl
2
+ // SPDX-License-Identifier: MIT
3
+ // Copyright (c) vis.gl contributors
1
4
  import { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';
2
5
  import { createZip64Info, setFieldToNumber } from "./zip64-info-generation.js";
6
+ // offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
3
7
  const COMPRESSION_METHOD_OFFSET = 8n;
4
8
  const COMPRESSED_SIZE_OFFSET = 18n;
5
9
  const UNCOMPRESSED_SIZE_OFFSET = 22n;
@@ -7,109 +11,146 @@ const FILE_NAME_LENGTH_OFFSET = 26n;
7
11
  const EXTRA_FIELD_LENGTH_OFFSET = 28n;
8
12
  const FILE_NAME_OFFSET = 30n;
9
13
  export const signature = new Uint8Array([0x50, 0x4b, 0x03, 0x04]);
14
+ /**
15
+ * Parses local file header of zip file
16
+ * @param headerOffset - offset in the archive where header starts
17
+ * @param buffer - buffer containing whole array
18
+ * @returns Info from the header
19
+ */
10
20
  export const parseZipLocalFileHeader = async (headerOffset, buffer) => {
11
- const magicBytes = await buffer.slice(headerOffset, headerOffset + 4n);
12
- if (!compareArrayBuffers(magicBytes, signature)) {
13
- return null;
14
- }
15
- const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
16
- const fileName = new TextDecoder().decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength))).split('\\').join('/');
17
- const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
18
- let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
19
- const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
20
- let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET));
21
- let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET));
22
- const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);
23
- let offsetInZip64Data = 4n;
24
- if (uncompressedSize === BigInt(0xffffffff)) {
25
- uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
26
- offsetInZip64Data += 8n;
27
- }
28
- if (compressedSize === BigInt(0xffffffff)) {
29
- compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
30
- offsetInZip64Data += 8n;
31
- }
32
- if (fileDataOffset === BigInt(0xffffffff)) {
33
- fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
34
- }
35
- return {
36
- fileNameLength,
37
- fileName,
38
- extraFieldLength,
39
- fileDataOffset,
40
- compressedSize,
41
- compressionMethod
42
- };
21
+ const magicBytes = await buffer.slice(headerOffset, headerOffset + 4n);
22
+ if (!compareArrayBuffers(magicBytes, signature)) {
23
+ return null;
24
+ }
25
+ const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
26
+ const fileName = new TextDecoder()
27
+ .decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength)))
28
+ .split('\\')
29
+ .join('/');
30
+ const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
31
+ let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
32
+ const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
33
+ let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET)); // add zip 64 logic
34
+ let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET)); // add zip 64 logic
35
+ const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);
36
+ let offsetInZip64Data = 4n;
37
+ // looking for info that might be also be in zip64 extra field
38
+ if (uncompressedSize === BigInt(0xffffffff)) {
39
+ uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
40
+ offsetInZip64Data += 8n;
41
+ }
42
+ if (compressedSize === BigInt(0xffffffff)) {
43
+ compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
44
+ offsetInZip64Data += 8n;
45
+ }
46
+ if (fileDataOffset === BigInt(0xffffffff)) {
47
+ fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64
48
+ }
49
+ return {
50
+ fileNameLength,
51
+ fileName,
52
+ extraFieldLength,
53
+ fileDataOffset,
54
+ compressedSize,
55
+ compressionMethod
56
+ };
43
57
  };
58
+ /**
59
+ * generates local header for the file
60
+ * @param options info that can be placed into local header
61
+ * @returns buffer with header
62
+ */
44
63
  export function generateLocalHeader(options) {
45
- const optionsToUse = {
46
- ...options,
47
- extraLength: 0,
48
- fnlength: options.fileName.length
49
- };
50
- let zip64header = new ArrayBuffer(0);
51
- const optionsToZip64 = {};
52
- if (optionsToUse.length >= 0xffffffff) {
53
- optionsToZip64.size = optionsToUse.length;
54
- optionsToUse.length = 0xffffffff;
55
- }
56
- if (Object.keys(optionsToZip64).length) {
57
- zip64header = createZip64Info(optionsToZip64);
58
- optionsToUse.extraLength = zip64header.byteLength;
59
- }
60
- const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));
61
- for (const field of ZIP_HEADER_FIELDS) {
62
- var _ref, _optionsToUse, _field$name;
63
- setFieldToNumber(header, field.size, field.offset, (_ref = (_optionsToUse = optionsToUse[(_field$name = field.name) !== null && _field$name !== void 0 ? _field$name : '']) !== null && _optionsToUse !== void 0 ? _optionsToUse : field.default) !== null && _ref !== void 0 ? _ref : 0);
64
- }
65
- const encodedName = new TextEncoder().encode(optionsToUse.fileName);
66
- const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);
67
- return resHeader;
64
+ const optionsToUse = {
65
+ ...options,
66
+ extraLength: 0,
67
+ fnlength: options.fileName.length
68
+ };
69
+ let zip64header = new ArrayBuffer(0);
70
+ const optionsToZip64 = {};
71
+ if (optionsToUse.length >= 0xffffffff) {
72
+ optionsToZip64.size = optionsToUse.length;
73
+ optionsToUse.length = 0xffffffff;
74
+ }
75
+ if (Object.keys(optionsToZip64).length) {
76
+ zip64header = createZip64Info(optionsToZip64);
77
+ optionsToUse.extraLength = zip64header.byteLength;
78
+ }
79
+ // base length without file name and extra info is static
80
+ const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));
81
+ for (const field of ZIP_HEADER_FIELDS) {
82
+ setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);
83
+ }
84
+ const encodedName = new TextEncoder().encode(optionsToUse.fileName);
85
+ const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);
86
+ return resHeader;
68
87
  }
69
- const ZIP_HEADER_FIELDS = [{
70
- offset: 0,
71
- size: 4,
72
- default: new DataView(signature.buffer).getUint32(0, true)
73
- }, {
74
- offset: 4,
75
- size: 2,
76
- default: 45
77
- }, {
78
- offset: 6,
79
- size: 2,
80
- default: 0
81
- }, {
82
- offset: 8,
83
- size: 2,
84
- default: 0
85
- }, {
86
- offset: 10,
87
- size: 2,
88
- default: 0
89
- }, {
90
- offset: 12,
91
- size: 2,
92
- default: 0
93
- }, {
94
- offset: 14,
95
- size: 4,
96
- name: 'crc32'
97
- }, {
98
- offset: 18,
99
- size: 4,
100
- name: 'length'
101
- }, {
102
- offset: 22,
103
- size: 4,
104
- name: 'length'
105
- }, {
106
- offset: 26,
107
- size: 2,
108
- name: 'fnlength'
109
- }, {
110
- offset: 28,
111
- size: 2,
112
- default: 0,
113
- name: 'extraLength'
114
- }];
115
- //# sourceMappingURL=local-file-header.js.map
88
+ const ZIP_HEADER_FIELDS = [
89
+ // Local file header signature = 0x04034b50
90
+ {
91
+ offset: 0,
92
+ size: 4,
93
+ default: new DataView(signature.buffer).getUint32(0, true)
94
+ },
95
+ // Version needed to extract (minimum)
96
+ {
97
+ offset: 4,
98
+ size: 2,
99
+ default: 45
100
+ },
101
+ // General purpose bit flag
102
+ {
103
+ offset: 6,
104
+ size: 2,
105
+ default: 0
106
+ },
107
+ // Compression method
108
+ {
109
+ offset: 8,
110
+ size: 2,
111
+ default: 0
112
+ },
113
+ // File last modification time
114
+ {
115
+ offset: 10,
116
+ size: 2,
117
+ default: 0
118
+ },
119
+ // File last modification date
120
+ {
121
+ offset: 12,
122
+ size: 2,
123
+ default: 0
124
+ },
125
+ // CRC-32 of uncompressed data
126
+ {
127
+ offset: 14,
128
+ size: 4,
129
+ name: 'crc32'
130
+ },
131
+ // Compressed size (or 0xffffffff for ZIP64)
132
+ {
133
+ offset: 18,
134
+ size: 4,
135
+ name: 'length'
136
+ },
137
+ // Uncompressed size (or 0xffffffff for ZIP64)
138
+ {
139
+ offset: 22,
140
+ size: 4,
141
+ name: 'length'
142
+ },
143
+ // File name length (n)
144
+ {
145
+ offset: 26,
146
+ size: 2,
147
+ name: 'fnlength'
148
+ },
149
+ // Extra field length (m)
150
+ {
151
+ offset: 28,
152
+ size: 2,
153
+ default: 0,
154
+ name: 'extraLength'
155
+ }
156
+ ];
@@ -1,16 +1,30 @@
1
+ // loaders.gl
2
+ // SPDX-License-Identifier: MIT
3
+ // Copyright (c) vis.gl contributors
4
+ /**
5
+ * looking for the last occurrence of the provided
6
+ * @param file
7
+ * @param target
8
+ * @returns
9
+ */
1
10
  export const searchFromTheEnd = async (file, target) => {
2
- const searchWindow = [await file.getUint8(file.length - 1n), await file.getUint8(file.length - 2n), await file.getUint8(file.length - 3n), undefined];
3
- let targetOffset = 0n;
4
- for (let i = file.length - 4n; i > -1; i--) {
5
- searchWindow[3] = searchWindow[2];
6
- searchWindow[2] = searchWindow[1];
7
- searchWindow[1] = searchWindow[0];
8
- searchWindow[0] = await file.getUint8(i);
9
- if (searchWindow.every((val, index) => val === target[index])) {
10
- targetOffset = i;
11
- break;
11
+ const searchWindow = [
12
+ await file.getUint8(file.length - 1n),
13
+ await file.getUint8(file.length - 2n),
14
+ await file.getUint8(file.length - 3n),
15
+ undefined
16
+ ];
17
+ let targetOffset = 0n;
18
+ // looking for the last record in the central directory
19
+ for (let i = file.length - 4n; i > -1; i--) {
20
+ searchWindow[3] = searchWindow[2];
21
+ searchWindow[2] = searchWindow[1];
22
+ searchWindow[1] = searchWindow[0];
23
+ searchWindow[0] = await file.getUint8(i);
24
+ if (searchWindow.every((val, index) => val === target[index])) {
25
+ targetOffset = i;
26
+ break;
27
+ }
12
28
  }
13
- }
14
- return targetOffset;
29
+ return targetOffset;
15
30
  };
16
- //# sourceMappingURL=search-from-the-end.js.map
@@ -4,111 +4,161 @@ import { CRC32Hash } from '@loaders.gl/crypto';
4
4
  import { generateLocalHeader } from "./local-file-header.js";
5
5
  import { generateCDHeader } from "./cd-file-header.js";
6
6
  import { fetchFile } from '@loaders.gl/core';
7
+ /**
8
+ * cut off CD and EoCD records from zip file
9
+ * @param provider zip file
10
+ * @returns tuple with three values: CD, EoCD record, EoCD information
11
+ */
7
12
  async function cutTheTailOff(provider) {
8
- const oldEoCDinfo = await parseEoCDRecord(provider);
9
- const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
10
- const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
11
- const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
12
- await provider.truncate(Number(oldCDStartOffset));
13
- const oldCDBody = zipEnding.slice(0, oldCDLength);
14
- const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
15
- return [oldCDBody, eocdBody, oldEoCDinfo];
13
+ // define where the body ends
14
+ const oldEoCDinfo = await parseEoCDRecord(provider);
15
+ const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
16
+ // define cd length
17
+ const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset
18
+ ? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset
19
+ : oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
20
+ // cut off everything except of archieve body
21
+ const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
22
+ await provider.truncate(Number(oldCDStartOffset));
23
+ // divide cd body and eocd record
24
+ const oldCDBody = zipEnding.slice(0, oldCDLength);
25
+ const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
26
+ return [oldCDBody, eocdBody, oldEoCDinfo];
16
27
  }
28
+ /**
29
+ * generates CD and local headers for the file
30
+ * @param fileName name of the file
31
+ * @param fileToAdd buffer with the file
32
+ * @param localFileHeaderOffset offset of the file local header
33
+ * @returns tuple with two values: local header and file body, cd header
34
+ */
17
35
  async function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {
18
- const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
19
- const newFileLocalHeader = generateLocalHeader({
20
- crc32: newFileCRC322,
21
- fileName,
22
- length: fileToAdd.byteLength
23
- });
24
- const newFileCDHeader = generateCDHeader({
25
- crc32: newFileCRC322,
26
- fileName,
27
- offset: localFileHeaderOffset,
28
- length: fileToAdd.byteLength
29
- });
30
- return [new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)), new Uint8Array(newFileCDHeader)];
36
+ // generating CRC32 of the content
37
+ const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
38
+ // generate local header for the file
39
+ const newFileLocalHeader = generateLocalHeader({
40
+ crc32: newFileCRC322,
41
+ fileName,
42
+ length: fileToAdd.byteLength
43
+ });
44
+ // generate hash file cd header
45
+ const newFileCDHeader = generateCDHeader({
46
+ crc32: newFileCRC322,
47
+ fileName,
48
+ offset: localFileHeaderOffset,
49
+ length: fileToAdd.byteLength
50
+ });
51
+ return [
52
+ new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),
53
+ new Uint8Array(newFileCDHeader)
54
+ ];
31
55
  }
56
+ /**
57
+ * adds one file in the end of the archieve
58
+ * @param zipUrl path to the file
59
+ * @param fileToAdd new file body
60
+ * @param fileName new file name
61
+ */
32
62
  export async function addOneFile(zipUrl, fileToAdd, fileName) {
33
- const provider = new FileHandleFile(zipUrl, true);
34
- const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
35
- const newFileOffset = provider.length;
36
- const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
37
- await provider.append(localPart);
38
- const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
39
- const newCDStartOffset = provider.length;
40
- await provider.append(new Uint8Array(newCDBody));
41
- const eocdOffset = provider.length;
42
- await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
63
+ // init file handler
64
+ const provider = new FileHandleFile(zipUrl, true);
65
+ const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
66
+ // remember the new file local header start offset
67
+ const newFileOffset = provider.length;
68
+ const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
69
+ // write down the file local header
70
+ await provider.append(localPart);
71
+ // add the file CD header to the CD
72
+ const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
73
+ // remember the CD start offset
74
+ const newCDStartOffset = provider.length;
75
+ // write down new CD
76
+ await provider.append(new Uint8Array(newCDBody));
77
+ // remember where eocd starts
78
+ const eocdOffset = provider.length;
79
+ await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
43
80
  }
81
+ /**
82
+ * creates zip archive with no compression
83
+ * @note This is a node specific function that works on files
84
+ * @param inputPath path where files for the achive are stored
85
+ * @param outputPath path where zip archive will be placed
86
+ */
44
87
  export async function createZip(inputPath, outputPath, createAdditionalData) {
45
- const fileIterator = getFileIterator(inputPath);
46
- const resFile = new NodeFile(outputPath, 'w');
47
- const fileList = [];
48
- const cdArray = [];
49
- for await (const file of fileIterator) {
50
- await addFile(file, resFile, cdArray, fileList);
51
- }
52
- if (createAdditionalData) {
53
- const additionaldata = await createAdditionalData(fileList);
54
- await addFile(additionaldata, resFile, cdArray);
55
- }
56
- const cdOffset = (await resFile.stat()).bigsize;
57
- const cd = concatenateArrayBuffers(...cdArray);
58
- await resFile.append(new Uint8Array(cd));
59
- const eoCDStart = (await resFile.stat()).bigsize;
60
- await resFile.append(new Uint8Array(generateEoCD({
61
- recordsNumber: cdArray.length,
62
- cdSize: cd.byteLength,
63
- cdOffset,
64
- eoCDStart
65
- })));
88
+ const fileIterator = getFileIterator(inputPath);
89
+ const resFile = new NodeFile(outputPath, 'w');
90
+ const fileList = [];
91
+ const cdArray = [];
92
+ for await (const file of fileIterator) {
93
+ await addFile(file, resFile, cdArray, fileList);
94
+ }
95
+ if (createAdditionalData) {
96
+ const additionaldata = await createAdditionalData(fileList);
97
+ await addFile(additionaldata, resFile, cdArray);
98
+ }
99
+ const cdOffset = (await resFile.stat()).bigsize;
100
+ const cd = concatenateArrayBuffers(...cdArray);
101
+ await resFile.append(new Uint8Array(cd));
102
+ const eoCDStart = (await resFile.stat()).bigsize;
103
+ await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));
66
104
  }
105
+ /**
106
+ * Adds file to zip parts
107
+ * @param file file to add
108
+ * @param resFile zip file body
109
+ * @param cdArray zip file central directory
110
+ * @param fileList list of file offsets
111
+ */
67
112
  async function addFile(file, resFile, cdArray, fileList) {
68
- const size = (await resFile.stat()).bigsize;
69
- fileList === null || fileList === void 0 ? void 0 : fileList.push({
70
- fileName: file.path,
71
- localHeaderOffset: size
72
- });
73
- const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
74
- await resFile.append(localPart);
75
- cdArray.push(cdHeaderPart);
113
+ const size = (await resFile.stat()).bigsize;
114
+ fileList?.push({ fileName: file.path, localHeaderOffset: size });
115
+ const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
116
+ await resFile.append(localPart);
117
+ cdArray.push(cdHeaderPart);
76
118
  }
119
+ /**
120
+ * creates iterator providing buffer with file content and path to every file in the input folder
121
+ * @param inputPath path to the input folder
122
+ * @returns iterator
123
+ */
77
124
  export function getFileIterator(inputPath) {
78
- async function* iterable() {
79
- const fileList = await getAllFiles(inputPath);
80
- for (const filePath of fileList) {
81
- const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();
82
- yield {
83
- path: filePath,
84
- file
85
- };
125
+ async function* iterable() {
126
+ const fileList = await getAllFiles(inputPath);
127
+ for (const filePath of fileList) {
128
+ const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();
129
+ yield { path: filePath, file };
130
+ }
86
131
  }
87
- }
88
- return iterable();
132
+ return iterable();
89
133
  }
90
- export async function getAllFiles(basePath) {
91
- let subfolder = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
92
- let fsPassed = arguments.length > 2 ? arguments[2] : undefined;
93
- const fs = fsPassed ? fsPassed : new NodeFilesystem({});
94
- const files = await fs.readdir(pathJoin(basePath, subfolder));
95
- const arrayOfFiles = [];
96
- for (const file of files) {
97
- const fullPath = pathJoin(basePath, subfolder, file);
98
- if ((await fs.stat(fullPath)).isDirectory) {
99
- const files = await getAllFiles(basePath, pathJoin(subfolder, file));
100
- arrayOfFiles.push(...files);
101
- } else {
102
- arrayOfFiles.push(pathJoin(subfolder, file));
134
+ /**
135
+ * creates a list of relative paths to all files in the provided folder
136
+ * @param basePath path of the root folder
137
+ * @param subfolder relative path from the root folder.
138
+ * @returns list of paths
139
+ */
140
+ export async function getAllFiles(basePath, subfolder = '', fsPassed) {
141
+ const fs = fsPassed ? fsPassed : new NodeFilesystem({});
142
+ const files = await fs.readdir(pathJoin(basePath, subfolder));
143
+ const arrayOfFiles = [];
144
+ for (const file of files) {
145
+ const fullPath = pathJoin(basePath, subfolder, file);
146
+ if ((await fs.stat(fullPath)).isDirectory) {
147
+ const files = await getAllFiles(basePath, pathJoin(subfolder, file));
148
+ arrayOfFiles.push(...files);
149
+ }
150
+ else {
151
+ arrayOfFiles.push(pathJoin(subfolder, file));
152
+ }
103
153
  }
104
- }
105
- return arrayOfFiles;
154
+ return arrayOfFiles;
106
155
  }
107
- function pathJoin() {
108
- for (var _len = arguments.length, paths = new Array(_len), _key = 0; _key < _len; _key++) {
109
- paths[_key] = arguments[_key];
110
- }
111
- const resPaths = paths.filter(val => val.length);
112
- return path.join(...resPaths);
156
+ /**
157
+ * removes empty parts from path array and joins it
158
+ * @param paths paths to join
159
+ * @returns joined path
160
+ */
161
+ function pathJoin(...paths) {
162
+ const resPaths = paths.filter((val) => val.length);
163
+ return path.join(...resPaths);
113
164
  }
114
- //# sourceMappingURL=zip-composition.js.map