@loaders.gl/zip 4.2.0-alpha.4 → 4.2.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.dev.js +920 -542
- package/dist/dist.min.js +25 -0
- package/dist/filesystems/IndexedArchive.js +24 -12
- package/dist/filesystems/zip-filesystem.d.ts +2 -2
- package/dist/filesystems/zip-filesystem.d.ts.map +1 -1
- package/dist/filesystems/zip-filesystem.js +121 -88
- package/dist/hash-file-utility.d.ts +1 -1
- package/dist/hash-file-utility.d.ts.map +1 -1
- package/dist/hash-file-utility.js +85 -42
- package/dist/index.cjs +49 -125
- package/dist/index.cjs.map +7 -0
- package/dist/index.d.ts +12 -12
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +4 -1
- package/dist/lib/tar/header.d.ts +1 -1
- package/dist/lib/tar/header.d.ts.map +1 -1
- package/dist/lib/tar/header.js +69 -33
- package/dist/lib/tar/tar.d.ts +1 -1
- package/dist/lib/tar/tar.d.ts.map +1 -1
- package/dist/lib/tar/tar.js +121 -106
- package/dist/lib/tar/types.js +3 -1
- package/dist/lib/tar/utils.js +45 -18
- package/dist/parse-zip/cd-file-header.d.ts +1 -1
- package/dist/parse-zip/cd-file-header.d.ts.map +1 -1
- package/dist/parse-zip/cd-file-header.js +239 -177
- package/dist/parse-zip/end-of-central-directory.js +247 -158
- package/dist/parse-zip/local-file-header.d.ts +1 -1
- package/dist/parse-zip/local-file-header.d.ts.map +1 -1
- package/dist/parse-zip/local-file-header.js +143 -102
- package/dist/parse-zip/search-from-the-end.js +27 -13
- package/dist/parse-zip/zip-composition.js +142 -92
- package/dist/parse-zip/zip64-info-generation.js +64 -41
- package/dist/tar-builder.d.ts +1 -1
- package/dist/tar-builder.d.ts.map +1 -1
- package/dist/tar-builder.js +30 -29
- package/dist/zip-loader.js +51 -40
- package/dist/zip-writer.js +39 -39
- package/package.json +11 -7
- package/dist/filesystems/IndexedArchive.js.map +0 -1
- package/dist/filesystems/zip-filesystem.js.map +0 -1
- package/dist/hash-file-utility.js.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/lib/tar/header.js.map +0 -1
- package/dist/lib/tar/tar.js.map +0 -1
- package/dist/lib/tar/types.js.map +0 -1
- package/dist/lib/tar/utils.js.map +0 -1
- package/dist/parse-zip/cd-file-header.js.map +0 -1
- package/dist/parse-zip/end-of-central-directory.js.map +0 -1
- package/dist/parse-zip/local-file-header.js.map +0 -1
- package/dist/parse-zip/search-from-the-end.js.map +0 -1
- package/dist/parse-zip/zip-composition.js.map +0 -1
- package/dist/parse-zip/zip64-info-generation.js.map +0 -1
- package/dist/tar-builder.js.map +0 -1
- package/dist/zip-loader.js.map +0 -1
- package/dist/zip-writer.js.map +0 -1
|
@@ -1,5 +1,9 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
1
4
|
import { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';
|
|
2
5
|
import { createZip64Info, setFieldToNumber } from "./zip64-info-generation.js";
|
|
6
|
+
// offsets accroding to https://en.wikipedia.org/wiki/ZIP_(file_format)
|
|
3
7
|
const COMPRESSION_METHOD_OFFSET = 8n;
|
|
4
8
|
const COMPRESSED_SIZE_OFFSET = 18n;
|
|
5
9
|
const UNCOMPRESSED_SIZE_OFFSET = 22n;
|
|
@@ -7,109 +11,146 @@ const FILE_NAME_LENGTH_OFFSET = 26n;
|
|
|
7
11
|
const EXTRA_FIELD_LENGTH_OFFSET = 28n;
|
|
8
12
|
const FILE_NAME_OFFSET = 30n;
|
|
9
13
|
export const signature = new Uint8Array([0x50, 0x4b, 0x03, 0x04]);
|
|
14
|
+
/**
|
|
15
|
+
* Parses local file header of zip file
|
|
16
|
+
* @param headerOffset - offset in the archive where header starts
|
|
17
|
+
* @param buffer - buffer containing whole array
|
|
18
|
+
* @returns Info from the header
|
|
19
|
+
*/
|
|
10
20
|
export const parseZipLocalFileHeader = async (headerOffset, buffer) => {
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
offsetInZip64Data
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
21
|
+
const magicBytes = await buffer.slice(headerOffset, headerOffset + 4n);
|
|
22
|
+
if (!compareArrayBuffers(magicBytes, signature)) {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
const fileNameLength = await buffer.getUint16(headerOffset + FILE_NAME_LENGTH_OFFSET);
|
|
26
|
+
const fileName = new TextDecoder()
|
|
27
|
+
.decode(await buffer.slice(headerOffset + FILE_NAME_OFFSET, headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength)))
|
|
28
|
+
.split('\\')
|
|
29
|
+
.join('/');
|
|
30
|
+
const extraFieldLength = await buffer.getUint16(headerOffset + EXTRA_FIELD_LENGTH_OFFSET);
|
|
31
|
+
let fileDataOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength + extraFieldLength);
|
|
32
|
+
const compressionMethod = await buffer.getUint16(headerOffset + COMPRESSION_METHOD_OFFSET);
|
|
33
|
+
let compressedSize = BigInt(await buffer.getUint32(headerOffset + COMPRESSED_SIZE_OFFSET)); // add zip 64 logic
|
|
34
|
+
let uncompressedSize = BigInt(await buffer.getUint32(headerOffset + UNCOMPRESSED_SIZE_OFFSET)); // add zip 64 logic
|
|
35
|
+
const extraOffset = headerOffset + FILE_NAME_OFFSET + BigInt(fileNameLength);
|
|
36
|
+
let offsetInZip64Data = 4n;
|
|
37
|
+
// looking for info that might be also be in zip64 extra field
|
|
38
|
+
if (uncompressedSize === BigInt(0xffffffff)) {
|
|
39
|
+
uncompressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
40
|
+
offsetInZip64Data += 8n;
|
|
41
|
+
}
|
|
42
|
+
if (compressedSize === BigInt(0xffffffff)) {
|
|
43
|
+
compressedSize = await buffer.getBigUint64(extraOffset + offsetInZip64Data);
|
|
44
|
+
offsetInZip64Data += 8n;
|
|
45
|
+
}
|
|
46
|
+
if (fileDataOffset === BigInt(0xffffffff)) {
|
|
47
|
+
fileDataOffset = await buffer.getBigUint64(extraOffset + offsetInZip64Data); // setting it to the one from zip64
|
|
48
|
+
}
|
|
49
|
+
return {
|
|
50
|
+
fileNameLength,
|
|
51
|
+
fileName,
|
|
52
|
+
extraFieldLength,
|
|
53
|
+
fileDataOffset,
|
|
54
|
+
compressedSize,
|
|
55
|
+
compressionMethod
|
|
56
|
+
};
|
|
43
57
|
};
|
|
58
|
+
/**
|
|
59
|
+
* generates local header for the file
|
|
60
|
+
* @param options info that can be placed into local header
|
|
61
|
+
* @returns buffer with header
|
|
62
|
+
*/
|
|
44
63
|
export function generateLocalHeader(options) {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
64
|
+
const optionsToUse = {
|
|
65
|
+
...options,
|
|
66
|
+
extraLength: 0,
|
|
67
|
+
fnlength: options.fileName.length
|
|
68
|
+
};
|
|
69
|
+
let zip64header = new ArrayBuffer(0);
|
|
70
|
+
const optionsToZip64 = {};
|
|
71
|
+
if (optionsToUse.length >= 0xffffffff) {
|
|
72
|
+
optionsToZip64.size = optionsToUse.length;
|
|
73
|
+
optionsToUse.length = 0xffffffff;
|
|
74
|
+
}
|
|
75
|
+
if (Object.keys(optionsToZip64).length) {
|
|
76
|
+
zip64header = createZip64Info(optionsToZip64);
|
|
77
|
+
optionsToUse.extraLength = zip64header.byteLength;
|
|
78
|
+
}
|
|
79
|
+
// base length without file name and extra info is static
|
|
80
|
+
const header = new DataView(new ArrayBuffer(Number(FILE_NAME_OFFSET)));
|
|
81
|
+
for (const field of ZIP_HEADER_FIELDS) {
|
|
82
|
+
setFieldToNumber(header, field.size, field.offset, optionsToUse[field.name ?? ''] ?? field.default ?? 0);
|
|
83
|
+
}
|
|
84
|
+
const encodedName = new TextEncoder().encode(optionsToUse.fileName);
|
|
85
|
+
const resHeader = concatenateArrayBuffers(header.buffer, encodedName, zip64header);
|
|
86
|
+
return resHeader;
|
|
68
87
|
}
|
|
69
|
-
const ZIP_HEADER_FIELDS = [
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
},
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
},
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
},
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
88
|
+
const ZIP_HEADER_FIELDS = [
|
|
89
|
+
// Local file header signature = 0x04034b50
|
|
90
|
+
{
|
|
91
|
+
offset: 0,
|
|
92
|
+
size: 4,
|
|
93
|
+
default: new DataView(signature.buffer).getUint32(0, true)
|
|
94
|
+
},
|
|
95
|
+
// Version needed to extract (minimum)
|
|
96
|
+
{
|
|
97
|
+
offset: 4,
|
|
98
|
+
size: 2,
|
|
99
|
+
default: 45
|
|
100
|
+
},
|
|
101
|
+
// General purpose bit flag
|
|
102
|
+
{
|
|
103
|
+
offset: 6,
|
|
104
|
+
size: 2,
|
|
105
|
+
default: 0
|
|
106
|
+
},
|
|
107
|
+
// Compression method
|
|
108
|
+
{
|
|
109
|
+
offset: 8,
|
|
110
|
+
size: 2,
|
|
111
|
+
default: 0
|
|
112
|
+
},
|
|
113
|
+
// File last modification time
|
|
114
|
+
{
|
|
115
|
+
offset: 10,
|
|
116
|
+
size: 2,
|
|
117
|
+
default: 0
|
|
118
|
+
},
|
|
119
|
+
// File last modification date
|
|
120
|
+
{
|
|
121
|
+
offset: 12,
|
|
122
|
+
size: 2,
|
|
123
|
+
default: 0
|
|
124
|
+
},
|
|
125
|
+
// CRC-32 of uncompressed data
|
|
126
|
+
{
|
|
127
|
+
offset: 14,
|
|
128
|
+
size: 4,
|
|
129
|
+
name: 'crc32'
|
|
130
|
+
},
|
|
131
|
+
// Compressed size (or 0xffffffff for ZIP64)
|
|
132
|
+
{
|
|
133
|
+
offset: 18,
|
|
134
|
+
size: 4,
|
|
135
|
+
name: 'length'
|
|
136
|
+
},
|
|
137
|
+
// Uncompressed size (or 0xffffffff for ZIP64)
|
|
138
|
+
{
|
|
139
|
+
offset: 22,
|
|
140
|
+
size: 4,
|
|
141
|
+
name: 'length'
|
|
142
|
+
},
|
|
143
|
+
// File name length (n)
|
|
144
|
+
{
|
|
145
|
+
offset: 26,
|
|
146
|
+
size: 2,
|
|
147
|
+
name: 'fnlength'
|
|
148
|
+
},
|
|
149
|
+
// Extra field length (m)
|
|
150
|
+
{
|
|
151
|
+
offset: 28,
|
|
152
|
+
size: 2,
|
|
153
|
+
default: 0,
|
|
154
|
+
name: 'extraLength'
|
|
155
|
+
}
|
|
156
|
+
];
|
|
@@ -1,16 +1,30 @@
|
|
|
1
|
+
// loaders.gl
|
|
2
|
+
// SPDX-License-Identifier: MIT
|
|
3
|
+
// Copyright (c) vis.gl contributors
|
|
4
|
+
/**
|
|
5
|
+
* looking for the last occurrence of the provided
|
|
6
|
+
* @param file
|
|
7
|
+
* @param target
|
|
8
|
+
* @returns
|
|
9
|
+
*/
|
|
1
10
|
export const searchFromTheEnd = async (file, target) => {
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
11
|
+
const searchWindow = [
|
|
12
|
+
await file.getUint8(file.length - 1n),
|
|
13
|
+
await file.getUint8(file.length - 2n),
|
|
14
|
+
await file.getUint8(file.length - 3n),
|
|
15
|
+
undefined
|
|
16
|
+
];
|
|
17
|
+
let targetOffset = 0n;
|
|
18
|
+
// looking for the last record in the central directory
|
|
19
|
+
for (let i = file.length - 4n; i > -1; i--) {
|
|
20
|
+
searchWindow[3] = searchWindow[2];
|
|
21
|
+
searchWindow[2] = searchWindow[1];
|
|
22
|
+
searchWindow[1] = searchWindow[0];
|
|
23
|
+
searchWindow[0] = await file.getUint8(i);
|
|
24
|
+
if (searchWindow.every((val, index) => val === target[index])) {
|
|
25
|
+
targetOffset = i;
|
|
26
|
+
break;
|
|
27
|
+
}
|
|
12
28
|
}
|
|
13
|
-
|
|
14
|
-
return targetOffset;
|
|
29
|
+
return targetOffset;
|
|
15
30
|
};
|
|
16
|
-
//# sourceMappingURL=search-from-the-end.js.map
|
|
@@ -4,111 +4,161 @@ import { CRC32Hash } from '@loaders.gl/crypto';
|
|
|
4
4
|
import { generateLocalHeader } from "./local-file-header.js";
|
|
5
5
|
import { generateCDHeader } from "./cd-file-header.js";
|
|
6
6
|
import { fetchFile } from '@loaders.gl/core';
|
|
7
|
+
/**
|
|
8
|
+
* cut off CD and EoCD records from zip file
|
|
9
|
+
* @param provider zip file
|
|
10
|
+
* @returns tuple with three values: CD, EoCD record, EoCD information
|
|
11
|
+
*/
|
|
7
12
|
async function cutTheTailOff(provider) {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
13
|
+
// define where the body ends
|
|
14
|
+
const oldEoCDinfo = await parseEoCDRecord(provider);
|
|
15
|
+
const oldCDStartOffset = oldEoCDinfo.cdStartOffset;
|
|
16
|
+
// define cd length
|
|
17
|
+
const oldCDLength = Number(oldEoCDinfo.offsets.zip64EoCDOffset
|
|
18
|
+
? oldEoCDinfo.offsets.zip64EoCDOffset - oldCDStartOffset
|
|
19
|
+
: oldEoCDinfo.offsets.zipEoCDOffset - oldCDStartOffset);
|
|
20
|
+
// cut off everything except of archieve body
|
|
21
|
+
const zipEnding = await provider.slice(oldCDStartOffset, provider.length);
|
|
22
|
+
await provider.truncate(Number(oldCDStartOffset));
|
|
23
|
+
// divide cd body and eocd record
|
|
24
|
+
const oldCDBody = zipEnding.slice(0, oldCDLength);
|
|
25
|
+
const eocdBody = zipEnding.slice(oldCDLength, zipEnding.byteLength);
|
|
26
|
+
return [oldCDBody, eocdBody, oldEoCDinfo];
|
|
16
27
|
}
|
|
28
|
+
/**
|
|
29
|
+
* generates CD and local headers for the file
|
|
30
|
+
* @param fileName name of the file
|
|
31
|
+
* @param fileToAdd buffer with the file
|
|
32
|
+
* @param localFileHeaderOffset offset of the file local header
|
|
33
|
+
* @returns tuple with two values: local header and file body, cd header
|
|
34
|
+
*/
|
|
17
35
|
async function generateFileHeaders(fileName, fileToAdd, localFileHeaderOffset) {
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
36
|
+
// generating CRC32 of the content
|
|
37
|
+
const newFileCRC322 = parseInt(await new CRC32Hash().hash(fileToAdd, 'hex'), 16);
|
|
38
|
+
// generate local header for the file
|
|
39
|
+
const newFileLocalHeader = generateLocalHeader({
|
|
40
|
+
crc32: newFileCRC322,
|
|
41
|
+
fileName,
|
|
42
|
+
length: fileToAdd.byteLength
|
|
43
|
+
});
|
|
44
|
+
// generate hash file cd header
|
|
45
|
+
const newFileCDHeader = generateCDHeader({
|
|
46
|
+
crc32: newFileCRC322,
|
|
47
|
+
fileName,
|
|
48
|
+
offset: localFileHeaderOffset,
|
|
49
|
+
length: fileToAdd.byteLength
|
|
50
|
+
});
|
|
51
|
+
return [
|
|
52
|
+
new Uint8Array(concatenateArrayBuffers(newFileLocalHeader, fileToAdd)),
|
|
53
|
+
new Uint8Array(newFileCDHeader)
|
|
54
|
+
];
|
|
31
55
|
}
|
|
56
|
+
/**
|
|
57
|
+
* adds one file in the end of the archieve
|
|
58
|
+
* @param zipUrl path to the file
|
|
59
|
+
* @param fileToAdd new file body
|
|
60
|
+
* @param fileName new file name
|
|
61
|
+
*/
|
|
32
62
|
export async function addOneFile(zipUrl, fileToAdd, fileName) {
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
63
|
+
// init file handler
|
|
64
|
+
const provider = new FileHandleFile(zipUrl, true);
|
|
65
|
+
const [oldCDBody, eocdBody, oldEoCDinfo] = await cutTheTailOff(provider);
|
|
66
|
+
// remember the new file local header start offset
|
|
67
|
+
const newFileOffset = provider.length;
|
|
68
|
+
const [localPart, cdHeaderPart] = await generateFileHeaders(fileName, fileToAdd, newFileOffset);
|
|
69
|
+
// write down the file local header
|
|
70
|
+
await provider.append(localPart);
|
|
71
|
+
// add the file CD header to the CD
|
|
72
|
+
const newCDBody = concatenateArrayBuffers(oldCDBody, cdHeaderPart);
|
|
73
|
+
// remember the CD start offset
|
|
74
|
+
const newCDStartOffset = provider.length;
|
|
75
|
+
// write down new CD
|
|
76
|
+
await provider.append(new Uint8Array(newCDBody));
|
|
77
|
+
// remember where eocd starts
|
|
78
|
+
const eocdOffset = provider.length;
|
|
79
|
+
await provider.append(updateEoCD(eocdBody, oldEoCDinfo.offsets, newCDStartOffset, eocdOffset, oldEoCDinfo.cdRecordsNumber + 1n));
|
|
43
80
|
}
|
|
81
|
+
/**
|
|
82
|
+
* creates zip archive with no compression
|
|
83
|
+
* @note This is a node specific function that works on files
|
|
84
|
+
* @param inputPath path where files for the achive are stored
|
|
85
|
+
* @param outputPath path where zip archive will be placed
|
|
86
|
+
*/
|
|
44
87
|
export async function createZip(inputPath, outputPath, createAdditionalData) {
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
recordsNumber: cdArray.length,
|
|
62
|
-
cdSize: cd.byteLength,
|
|
63
|
-
cdOffset,
|
|
64
|
-
eoCDStart
|
|
65
|
-
})));
|
|
88
|
+
const fileIterator = getFileIterator(inputPath);
|
|
89
|
+
const resFile = new NodeFile(outputPath, 'w');
|
|
90
|
+
const fileList = [];
|
|
91
|
+
const cdArray = [];
|
|
92
|
+
for await (const file of fileIterator) {
|
|
93
|
+
await addFile(file, resFile, cdArray, fileList);
|
|
94
|
+
}
|
|
95
|
+
if (createAdditionalData) {
|
|
96
|
+
const additionaldata = await createAdditionalData(fileList);
|
|
97
|
+
await addFile(additionaldata, resFile, cdArray);
|
|
98
|
+
}
|
|
99
|
+
const cdOffset = (await resFile.stat()).bigsize;
|
|
100
|
+
const cd = concatenateArrayBuffers(...cdArray);
|
|
101
|
+
await resFile.append(new Uint8Array(cd));
|
|
102
|
+
const eoCDStart = (await resFile.stat()).bigsize;
|
|
103
|
+
await resFile.append(new Uint8Array(generateEoCD({ recordsNumber: cdArray.length, cdSize: cd.byteLength, cdOffset, eoCDStart })));
|
|
66
104
|
}
|
|
105
|
+
/**
|
|
106
|
+
* Adds file to zip parts
|
|
107
|
+
* @param file file to add
|
|
108
|
+
* @param resFile zip file body
|
|
109
|
+
* @param cdArray zip file central directory
|
|
110
|
+
* @param fileList list of file offsets
|
|
111
|
+
*/
|
|
67
112
|
async function addFile(file, resFile, cdArray, fileList) {
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
|
|
74
|
-
await resFile.append(localPart);
|
|
75
|
-
cdArray.push(cdHeaderPart);
|
|
113
|
+
const size = (await resFile.stat()).bigsize;
|
|
114
|
+
fileList?.push({ fileName: file.path, localHeaderOffset: size });
|
|
115
|
+
const [localPart, cdHeaderPart] = await generateFileHeaders(file.path, file.file, size);
|
|
116
|
+
await resFile.append(localPart);
|
|
117
|
+
cdArray.push(cdHeaderPart);
|
|
76
118
|
}
|
|
119
|
+
/**
|
|
120
|
+
* creates iterator providing buffer with file content and path to every file in the input folder
|
|
121
|
+
* @param inputPath path to the input folder
|
|
122
|
+
* @returns iterator
|
|
123
|
+
*/
|
|
77
124
|
export function getFileIterator(inputPath) {
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
file
|
|
85
|
-
};
|
|
125
|
+
async function* iterable() {
|
|
126
|
+
const fileList = await getAllFiles(inputPath);
|
|
127
|
+
for (const filePath of fileList) {
|
|
128
|
+
const file = await (await fetchFile(path.join(inputPath, filePath))).arrayBuffer();
|
|
129
|
+
yield { path: filePath, file };
|
|
130
|
+
}
|
|
86
131
|
}
|
|
87
|
-
|
|
88
|
-
return iterable();
|
|
132
|
+
return iterable();
|
|
89
133
|
}
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
const
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
134
|
+
/**
|
|
135
|
+
* creates a list of relative paths to all files in the provided folder
|
|
136
|
+
* @param basePath path of the root folder
|
|
137
|
+
* @param subfolder relative path from the root folder.
|
|
138
|
+
* @returns list of paths
|
|
139
|
+
*/
|
|
140
|
+
export async function getAllFiles(basePath, subfolder = '', fsPassed) {
|
|
141
|
+
const fs = fsPassed ? fsPassed : new NodeFilesystem({});
|
|
142
|
+
const files = await fs.readdir(pathJoin(basePath, subfolder));
|
|
143
|
+
const arrayOfFiles = [];
|
|
144
|
+
for (const file of files) {
|
|
145
|
+
const fullPath = pathJoin(basePath, subfolder, file);
|
|
146
|
+
if ((await fs.stat(fullPath)).isDirectory) {
|
|
147
|
+
const files = await getAllFiles(basePath, pathJoin(subfolder, file));
|
|
148
|
+
arrayOfFiles.push(...files);
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
arrayOfFiles.push(pathJoin(subfolder, file));
|
|
152
|
+
}
|
|
103
153
|
}
|
|
104
|
-
|
|
105
|
-
return arrayOfFiles;
|
|
154
|
+
return arrayOfFiles;
|
|
106
155
|
}
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
156
|
+
/**
|
|
157
|
+
* removes empty parts from path array and joins it
|
|
158
|
+
* @param paths paths to join
|
|
159
|
+
* @returns joined path
|
|
160
|
+
*/
|
|
161
|
+
function pathJoin(...paths) {
|
|
162
|
+
const resPaths = paths.filter((val) => val.length);
|
|
163
|
+
return path.join(...resPaths);
|
|
113
164
|
}
|
|
114
|
-
//# sourceMappingURL=zip-composition.js.map
|