@cj-tech-master/excelts 4.2.1-canary.20260111102127.f808a37 → 4.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/THIRD_PARTY_NOTICES.md +31 -0
- package/dist/browser/index.browser.d.ts +0 -1
- package/dist/browser/index.browser.js +0 -12
- package/dist/browser/modules/archive/byte-queue.d.ts +18 -0
- package/dist/browser/modules/archive/byte-queue.js +125 -0
- package/dist/browser/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/browser/modules/archive/{compression/compress.browser.d.ts → compress.browser.d.ts} +8 -2
- package/dist/{esm/modules/archive/compression → browser/modules/archive}/compress.browser.js +11 -3
- package/dist/browser/modules/archive/{compression/compress.d.ts → compress.d.ts} +2 -2
- package/dist/{esm/modules/archive/compression → browser/modules/archive}/compress.js +1 -1
- package/dist/browser/modules/archive/{compression/crc32.browser.d.ts → crc32.browser.d.ts} +1 -1
- package/dist/browser/modules/archive/{compression/crc32.d.ts → crc32.d.ts} +1 -1
- package/dist/browser/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/browser/modules/archive/defaults.d.ts +0 -1
- package/dist/browser/modules/archive/defaults.js +3 -6
- package/dist/browser/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/browser/modules/archive/{unzip/extract.d.ts → extract.d.ts} +2 -2
- package/dist/browser/modules/archive/index.base.d.ts +4 -4
- package/dist/browser/modules/archive/index.base.js +6 -3
- package/dist/browser/modules/archive/index.browser.d.ts +4 -3
- package/dist/browser/modules/archive/index.browser.js +7 -3
- package/dist/browser/modules/archive/index.d.ts +4 -3
- package/dist/browser/modules/archive/index.js +5 -3
- package/dist/browser/modules/archive/{unzip/stream.base.d.ts → parse.base.d.ts} +2 -36
- package/dist/browser/modules/archive/parse.base.js +644 -0
- package/dist/browser/modules/archive/{unzip/stream.browser.d.ts → parse.browser.d.ts} +1 -1
- package/dist/{esm/modules/archive/unzip/stream.browser.js → browser/modules/archive/parse.browser.js} +110 -371
- package/dist/browser/modules/archive/{unzip/stream.d.ts → parse.d.ts} +2 -2
- package/dist/{esm/modules/archive/unzip/stream.js → browser/modules/archive/parse.js} +5 -6
- package/dist/browser/modules/archive/{compression/streaming-compress.browser.d.ts → streaming-compress.browser.d.ts} +2 -2
- package/dist/browser/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/browser/modules/archive/{compression/streaming-compress.d.ts → streaming-compress.d.ts} +2 -2
- package/dist/browser/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/browser/modules/archive/{zip/stream.d.ts → streaming-zip.d.ts} +5 -28
- package/dist/{esm/modules/archive/zip/stream.js → browser/modules/archive/streaming-zip.js} +48 -192
- package/dist/browser/modules/archive/utils/bytes.js +16 -16
- package/dist/browser/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/browser/modules/archive/utils/timestamps.js +1 -62
- package/dist/browser/modules/archive/utils/zip-extra-fields.d.ts +1 -1
- package/dist/browser/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/browser/modules/archive/utils/zip-extra.d.ts +18 -0
- package/dist/browser/modules/archive/utils/zip-extra.js +68 -0
- package/dist/browser/modules/archive/zip-builder.d.ts +117 -0
- package/dist/browser/modules/archive/zip-builder.js +292 -0
- package/dist/browser/modules/archive/zip-constants.d.ts +18 -0
- package/dist/browser/modules/archive/zip-constants.js +23 -0
- package/dist/{esm/modules/archive/zip → browser/modules/archive}/zip-entry-metadata.js +3 -3
- package/dist/{types/modules/archive/unzip → browser/modules/archive}/zip-parser.d.ts +1 -1
- package/dist/{esm/modules/archive/unzip → browser/modules/archive}/zip-parser.js +24 -38
- package/dist/browser/modules/archive/{zip-spec/zip-records.d.ts → zip-records.d.ts} +0 -20
- package/dist/browser/modules/archive/zip-records.js +84 -0
- package/dist/browser/modules/excel/stream/workbook-reader.browser.js +1 -1
- package/dist/browser/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
- package/dist/browser/modules/excel/stream/workbook-writer.browser.js +1 -1
- package/dist/browser/modules/excel/xlsx/xlsx.browser.js +6 -3
- package/dist/browser/modules/excel/xlsx/xlsx.js +1 -1
- package/dist/browser/modules/stream/streams.browser.d.ts +30 -28
- package/dist/browser/modules/stream/streams.browser.js +710 -830
- package/dist/browser/modules/stream/streams.js +58 -140
- package/dist/cjs/modules/archive/byte-queue.js +129 -0
- package/dist/cjs/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/cjs/modules/archive/{compression/compress.browser.js → compress.browser.js} +11 -3
- package/dist/cjs/modules/archive/{compression/compress.js → compress.js} +1 -1
- package/dist/cjs/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/cjs/modules/archive/defaults.js +4 -7
- package/dist/cjs/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/cjs/modules/archive/index.base.js +19 -9
- package/dist/cjs/modules/archive/index.browser.js +10 -4
- package/dist/cjs/modules/archive/index.js +8 -4
- package/dist/cjs/modules/archive/parse.base.js +666 -0
- package/dist/cjs/modules/archive/{unzip/stream.browser.js → parse.browser.js} +111 -372
- package/dist/cjs/modules/archive/{unzip/stream.js → parse.js} +8 -9
- package/dist/cjs/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/cjs/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/cjs/modules/archive/{zip/stream.js → streaming-zip.js} +50 -194
- package/dist/cjs/modules/archive/utils/bytes.js +16 -16
- package/dist/cjs/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/cjs/modules/archive/utils/timestamps.js +3 -64
- package/dist/cjs/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/cjs/modules/archive/utils/zip-extra.js +74 -0
- package/dist/cjs/modules/archive/zip-builder.js +297 -0
- package/dist/cjs/modules/archive/zip-constants.js +26 -0
- package/dist/cjs/modules/archive/{zip/zip-entry-metadata.js → zip-entry-metadata.js} +5 -5
- package/dist/cjs/modules/archive/{unzip/zip-parser.js → zip-parser.js} +33 -47
- package/dist/cjs/modules/archive/zip-records.js +90 -0
- package/dist/cjs/modules/excel/stream/workbook-reader.browser.js +2 -2
- package/dist/cjs/modules/excel/stream/workbook-writer.browser.js +4 -4
- package/dist/cjs/modules/excel/xlsx/xlsx.browser.js +9 -6
- package/dist/cjs/modules/excel/xlsx/xlsx.js +2 -2
- package/dist/cjs/modules/stream/streams.browser.js +710 -830
- package/dist/cjs/modules/stream/streams.js +58 -140
- package/dist/esm/index.browser.js +0 -12
- package/dist/esm/modules/archive/byte-queue.js +125 -0
- package/dist/esm/modules/archive/{compression/compress.base.js → compress.base.js} +1 -1
- package/dist/{browser/modules/archive/compression → esm/modules/archive}/compress.browser.js +11 -3
- package/dist/{browser/modules/archive/compression → esm/modules/archive}/compress.js +1 -1
- package/dist/esm/modules/archive/{compression/crc32.js → crc32.js} +1 -1
- package/dist/esm/modules/archive/defaults.js +3 -6
- package/dist/esm/modules/archive/{compression/deflate-fallback.js → deflate-fallback.js} +1 -1
- package/dist/esm/modules/archive/index.base.js +6 -3
- package/dist/esm/modules/archive/index.browser.js +7 -3
- package/dist/esm/modules/archive/index.js +5 -3
- package/dist/esm/modules/archive/parse.base.js +644 -0
- package/dist/{browser/modules/archive/unzip/stream.browser.js → esm/modules/archive/parse.browser.js} +110 -371
- package/dist/{browser/modules/archive/unzip/stream.js → esm/modules/archive/parse.js} +5 -6
- package/dist/esm/modules/archive/{compression/streaming-compress.browser.js → streaming-compress.browser.js} +3 -3
- package/dist/esm/modules/archive/{compression/streaming-compress.js → streaming-compress.js} +2 -2
- package/dist/{browser/modules/archive/zip/stream.js → esm/modules/archive/streaming-zip.js} +48 -192
- package/dist/esm/modules/archive/utils/bytes.js +16 -16
- package/dist/esm/modules/archive/utils/parse-buffer.js +23 -21
- package/dist/esm/modules/archive/utils/timestamps.js +1 -62
- package/dist/esm/modules/archive/utils/zip-extra-fields.js +14 -26
- package/dist/esm/modules/archive/utils/zip-extra.js +68 -0
- package/dist/esm/modules/archive/zip-builder.js +292 -0
- package/dist/esm/modules/archive/zip-constants.js +23 -0
- package/dist/{browser/modules/archive/zip → esm/modules/archive}/zip-entry-metadata.js +3 -3
- package/dist/{browser/modules/archive/unzip → esm/modules/archive}/zip-parser.js +24 -38
- package/dist/esm/modules/archive/zip-records.js +84 -0
- package/dist/esm/modules/excel/stream/workbook-reader.browser.js +1 -1
- package/dist/esm/modules/excel/stream/workbook-writer.browser.js +1 -1
- package/dist/esm/modules/excel/xlsx/xlsx.browser.js +6 -3
- package/dist/esm/modules/excel/xlsx/xlsx.js +1 -1
- package/dist/esm/modules/stream/streams.browser.js +710 -830
- package/dist/esm/modules/stream/streams.js +58 -140
- package/dist/iife/THIRD_PARTY_NOTICES.md +31 -0
- package/dist/iife/excelts.iife.js +4425 -6215
- package/dist/iife/excelts.iife.js.map +1 -1
- package/dist/iife/excelts.iife.min.js +31 -103
- package/dist/types/index.browser.d.ts +0 -1
- package/dist/types/modules/archive/byte-queue.d.ts +18 -0
- package/dist/types/modules/archive/{compression/compress.browser.d.ts → compress.browser.d.ts} +8 -2
- package/dist/types/modules/archive/defaults.d.ts +0 -1
- package/dist/types/modules/archive/index.base.d.ts +4 -4
- package/dist/types/modules/archive/index.browser.d.ts +4 -3
- package/dist/types/modules/archive/index.d.ts +4 -3
- package/dist/types/modules/archive/{unzip/stream.base.d.ts → parse.base.d.ts} +4 -38
- package/dist/types/modules/archive/{unzip/stream.browser.d.ts → parse.browser.d.ts} +2 -2
- package/dist/types/modules/archive/{unzip/stream.d.ts → parse.d.ts} +3 -3
- package/dist/types/modules/archive/{compression/streaming-compress.browser.d.ts → streaming-compress.browser.d.ts} +1 -1
- package/dist/types/modules/archive/{zip/stream.d.ts → streaming-zip.d.ts} +6 -29
- package/dist/types/modules/archive/utils/zip-extra-fields.d.ts +1 -1
- package/dist/types/modules/archive/utils/zip-extra.d.ts +18 -0
- package/dist/types/modules/archive/zip-builder.d.ts +117 -0
- package/dist/types/modules/archive/zip-constants.d.ts +18 -0
- package/dist/types/modules/archive/{zip/zip-entry-metadata.d.ts → zip-entry-metadata.d.ts} +1 -1
- package/dist/{browser/modules/archive/unzip → types/modules/archive}/zip-parser.d.ts +1 -1
- package/dist/types/modules/archive/{zip-spec/zip-records.d.ts → zip-records.d.ts} +0 -20
- package/dist/types/modules/excel/stream/workbook-writer.browser.d.ts +1 -1
- package/dist/types/modules/stream/streams.browser.d.ts +30 -28
- package/package.json +1 -5
- package/dist/browser/modules/archive/internal/byte-queue.d.ts +0 -33
- package/dist/browser/modules/archive/internal/byte-queue.js +0 -407
- package/dist/browser/modules/archive/io/archive-sink.d.ts +0 -9
- package/dist/browser/modules/archive/io/archive-sink.js +0 -77
- package/dist/browser/modules/archive/io/archive-source.d.ts +0 -8
- package/dist/browser/modules/archive/io/archive-source.js +0 -107
- package/dist/browser/modules/archive/unzip/index.d.ts +0 -40
- package/dist/browser/modules/archive/unzip/index.js +0 -164
- package/dist/browser/modules/archive/unzip/stream.base.js +0 -1022
- package/dist/browser/modules/archive/utils/async-queue.d.ts +0 -7
- package/dist/browser/modules/archive/utils/async-queue.js +0 -103
- package/dist/browser/modules/archive/utils/compressibility.d.ts +0 -10
- package/dist/browser/modules/archive/utils/compressibility.js +0 -57
- package/dist/browser/modules/archive/utils/pattern-scanner.d.ts +0 -21
- package/dist/browser/modules/archive/utils/pattern-scanner.js +0 -27
- package/dist/browser/modules/archive/zip/index.d.ts +0 -42
- package/dist/browser/modules/archive/zip/index.js +0 -157
- package/dist/browser/modules/archive/zip/zip-bytes.d.ts +0 -73
- package/dist/browser/modules/archive/zip/zip-bytes.js +0 -239
- package/dist/browser/modules/archive/zip-spec/zip-records.js +0 -126
- package/dist/cjs/modules/archive/internal/byte-queue.js +0 -411
- package/dist/cjs/modules/archive/io/archive-sink.js +0 -82
- package/dist/cjs/modules/archive/io/archive-source.js +0 -114
- package/dist/cjs/modules/archive/unzip/index.js +0 -170
- package/dist/cjs/modules/archive/unzip/stream.base.js +0 -1044
- package/dist/cjs/modules/archive/utils/async-queue.js +0 -106
- package/dist/cjs/modules/archive/utils/compressibility.js +0 -60
- package/dist/cjs/modules/archive/utils/pattern-scanner.js +0 -31
- package/dist/cjs/modules/archive/zip/index.js +0 -162
- package/dist/cjs/modules/archive/zip/zip-bytes.js +0 -242
- package/dist/cjs/modules/archive/zip-spec/zip-records.js +0 -136
- package/dist/esm/modules/archive/internal/byte-queue.js +0 -407
- package/dist/esm/modules/archive/io/archive-sink.js +0 -77
- package/dist/esm/modules/archive/io/archive-source.js +0 -107
- package/dist/esm/modules/archive/unzip/index.js +0 -164
- package/dist/esm/modules/archive/unzip/stream.base.js +0 -1022
- package/dist/esm/modules/archive/utils/async-queue.js +0 -103
- package/dist/esm/modules/archive/utils/compressibility.js +0 -57
- package/dist/esm/modules/archive/utils/pattern-scanner.js +0 -27
- package/dist/esm/modules/archive/zip/index.js +0 -157
- package/dist/esm/modules/archive/zip/zip-bytes.js +0 -239
- package/dist/esm/modules/archive/zip-spec/zip-records.js +0 -126
- package/dist/types/modules/archive/internal/byte-queue.d.ts +0 -33
- package/dist/types/modules/archive/io/archive-sink.d.ts +0 -9
- package/dist/types/modules/archive/io/archive-source.d.ts +0 -8
- package/dist/types/modules/archive/unzip/index.d.ts +0 -40
- package/dist/types/modules/archive/utils/async-queue.d.ts +0 -7
- package/dist/types/modules/archive/utils/compressibility.d.ts +0 -10
- package/dist/types/modules/archive/utils/pattern-scanner.d.ts +0 -21
- package/dist/types/modules/archive/zip/index.d.ts +0 -42
- package/dist/types/modules/archive/zip/zip-bytes.d.ts +0 -73
- /package/dist/browser/modules/archive/{compression/compress.base.d.ts → compress.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.base.d.ts → crc32.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/browser/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/browser/modules/archive/{compression/deflate-fallback.d.ts → deflate-fallback.d.ts} +0 -0
- /package/dist/browser/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/browser/modules/archive/{compression/streaming-compress.base.d.ts → streaming-compress.base.d.ts} +0 -0
- /package/dist/browser/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/browser/modules/archive/{zip-spec/zip-entry-info.d.ts → zip-entry-info.d.ts} +0 -0
- /package/dist/browser/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/browser/modules/archive/{zip/zip-entry-metadata.d.ts → zip-entry-metadata.d.ts} +0 -0
- /package/dist/cjs/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/cjs/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/cjs/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/cjs/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/cjs/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/esm/modules/archive/{compression/crc32.base.js → crc32.base.js} +0 -0
- /package/dist/esm/modules/archive/{compression/crc32.browser.js → crc32.browser.js} +0 -0
- /package/dist/esm/modules/archive/{unzip/extract.js → extract.js} +0 -0
- /package/dist/esm/modules/archive/{compression/streaming-compress.base.js → streaming-compress.base.js} +0 -0
- /package/dist/esm/modules/archive/{zip-spec/zip-entry-info.js → zip-entry-info.js} +0 -0
- /package/dist/types/modules/archive/{compression/compress.base.d.ts → compress.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/compress.d.ts → compress.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.base.d.ts → crc32.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.browser.d.ts → crc32.browser.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/crc32.d.ts → crc32.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/deflate-fallback.d.ts → deflate-fallback.d.ts} +0 -0
- /package/dist/types/modules/archive/{unzip/extract.d.ts → extract.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/streaming-compress.base.d.ts → streaming-compress.base.d.ts} +0 -0
- /package/dist/types/modules/archive/{compression/streaming-compress.d.ts → streaming-compress.d.ts} +0 -0
- /package/dist/types/modules/archive/{zip-spec/zip-entry-info.d.ts → zip-entry-info.d.ts} +0 -0
|
@@ -0,0 +1,666 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DEFAULT_PARSE_THRESHOLD_BYTES = exports.PullStream = exports.parseExtraField = exports.END_OF_CENTRAL_DIRECTORY_FORMAT = exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = exports.DATA_DESCRIPTOR_FORMAT = exports.LOCAL_FILE_HEADER_FORMAT = exports.CRX_HEADER_FORMAT = void 0;
|
|
4
|
+
exports.decodeZipEntryPath = decodeZipEntryPath;
|
|
5
|
+
exports.isZipUnicodeFlag = isZipUnicodeFlag;
|
|
6
|
+
exports.isZipDirectoryPath = isZipDirectoryPath;
|
|
7
|
+
exports.getZipEntryType = getZipEntryType;
|
|
8
|
+
exports.buildZipEntryProps = buildZipEntryProps;
|
|
9
|
+
exports.resolveZipEntryLastModifiedDateTime = resolveZipEntryLastModifiedDateTime;
|
|
10
|
+
exports.hasDataDescriptorFlag = hasDataDescriptorFlag;
|
|
11
|
+
exports.isFileSizeKnown = isFileSizeKnown;
|
|
12
|
+
exports.autodrain = autodrain;
|
|
13
|
+
exports.bufferStream = bufferStream;
|
|
14
|
+
exports.readCrxHeader = readCrxHeader;
|
|
15
|
+
exports.readLocalFileHeader = readLocalFileHeader;
|
|
16
|
+
exports.readDataDescriptor = readDataDescriptor;
|
|
17
|
+
exports.consumeCentralDirectoryFileHeader = consumeCentralDirectoryFileHeader;
|
|
18
|
+
exports.consumeEndOfCentralDirectoryRecord = consumeEndOfCentralDirectoryRecord;
|
|
19
|
+
exports.scanValidatedDataDescriptor = scanValidatedDataDescriptor;
|
|
20
|
+
exports.streamUntilValidatedDataDescriptor = streamUntilValidatedDataDescriptor;
|
|
21
|
+
exports.runParseLoop = runParseLoop;
|
|
22
|
+
const timestamps_1 = require("./utils/timestamps.js");
|
|
23
|
+
const _stream_1 = require("../stream/index.js");
|
|
24
|
+
const parse_buffer_1 = require("./utils/parse-buffer.js");
|
|
25
|
+
const byte_queue_1 = require("./byte-queue.js");
|
|
26
|
+
const bytes_1 = require("./utils/bytes.js");
|
|
27
|
+
const binary_1 = require("./utils/binary.js");
|
|
28
|
+
const zip_extra_fields_1 = require("./utils/zip-extra-fields.js");
|
|
29
|
+
const zip_constants_1 = require("./zip-constants.js");
|
|
30
|
+
// Shared parseBuffer() formats
|
|
31
|
+
exports.CRX_HEADER_FORMAT = [
|
|
32
|
+
["version", 4],
|
|
33
|
+
["pubKeyLength", 4],
|
|
34
|
+
["signatureLength", 4]
|
|
35
|
+
];
|
|
36
|
+
exports.LOCAL_FILE_HEADER_FORMAT = [
|
|
37
|
+
["versionsNeededToExtract", 2],
|
|
38
|
+
["flags", 2],
|
|
39
|
+
["compressionMethod", 2],
|
|
40
|
+
["lastModifiedTime", 2],
|
|
41
|
+
["lastModifiedDate", 2],
|
|
42
|
+
["crc32", 4],
|
|
43
|
+
["compressedSize", 4],
|
|
44
|
+
["uncompressedSize", 4],
|
|
45
|
+
["fileNameLength", 2],
|
|
46
|
+
["extraFieldLength", 2]
|
|
47
|
+
];
|
|
48
|
+
exports.DATA_DESCRIPTOR_FORMAT = [
|
|
49
|
+
["dataDescriptorSignature", 4],
|
|
50
|
+
["crc32", 4],
|
|
51
|
+
["compressedSize", 4],
|
|
52
|
+
["uncompressedSize", 4]
|
|
53
|
+
];
|
|
54
|
+
exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = [
|
|
55
|
+
["versionMadeBy", 2],
|
|
56
|
+
["versionsNeededToExtract", 2],
|
|
57
|
+
["flags", 2],
|
|
58
|
+
["compressionMethod", 2],
|
|
59
|
+
["lastModifiedTime", 2],
|
|
60
|
+
["lastModifiedDate", 2],
|
|
61
|
+
["crc32", 4],
|
|
62
|
+
["compressedSize", 4],
|
|
63
|
+
["uncompressedSize", 4],
|
|
64
|
+
["fileNameLength", 2],
|
|
65
|
+
["extraFieldLength", 2],
|
|
66
|
+
["fileCommentLength", 2],
|
|
67
|
+
["diskNumber", 2],
|
|
68
|
+
["internalFileAttributes", 2],
|
|
69
|
+
["externalFileAttributes", 4],
|
|
70
|
+
["offsetToLocalFileHeader", 4]
|
|
71
|
+
];
|
|
72
|
+
exports.END_OF_CENTRAL_DIRECTORY_FORMAT = [
|
|
73
|
+
["diskNumber", 2],
|
|
74
|
+
["diskStart", 2],
|
|
75
|
+
["numberOfRecordsOnDisk", 2],
|
|
76
|
+
["numberOfRecords", 2],
|
|
77
|
+
["sizeOfCentralDirectory", 4],
|
|
78
|
+
["offsetToStartOfCentralDirectory", 4],
|
|
79
|
+
["commentLength", 2]
|
|
80
|
+
];
|
|
81
|
+
const textDecoder = new TextDecoder();
|
|
82
|
+
const textEncoder = new TextEncoder();
|
|
83
|
+
function decodeZipEntryPath(pathBuffer) {
|
|
84
|
+
return textDecoder.decode(pathBuffer);
|
|
85
|
+
}
|
|
86
|
+
function isZipUnicodeFlag(flags) {
|
|
87
|
+
return ((flags || 0) & 0x800) !== 0;
|
|
88
|
+
}
|
|
89
|
+
function isZipDirectoryPath(path) {
|
|
90
|
+
if (path.length === 0) {
|
|
91
|
+
return false;
|
|
92
|
+
}
|
|
93
|
+
const last = path.charCodeAt(path.length - 1);
|
|
94
|
+
return last === 47 || last === 92;
|
|
95
|
+
}
|
|
96
|
+
function getZipEntryType(path, uncompressedSize) {
|
|
97
|
+
return uncompressedSize === 0 && isZipDirectoryPath(path) ? "Directory" : "File";
|
|
98
|
+
}
|
|
99
|
+
function buildZipEntryProps(path, pathBuffer, flags) {
|
|
100
|
+
return {
|
|
101
|
+
path,
|
|
102
|
+
pathBuffer,
|
|
103
|
+
flags: {
|
|
104
|
+
isUnicode: isZipUnicodeFlag(flags)
|
|
105
|
+
}
|
|
106
|
+
};
|
|
107
|
+
}
|
|
108
|
+
function resolveZipEntryLastModifiedDateTime(vars, extraFields) {
|
|
109
|
+
const dosDate = vars.lastModifiedDate || 0;
|
|
110
|
+
const dosTime = vars.lastModifiedTime || 0;
|
|
111
|
+
const dosDateTime = (0, timestamps_1.parseDosDateTimeUTC)(dosDate, dosTime);
|
|
112
|
+
const unixSecondsMtime = extraFields.mtimeUnixSeconds;
|
|
113
|
+
if (unixSecondsMtime === undefined) {
|
|
114
|
+
return dosDateTime;
|
|
115
|
+
}
|
|
116
|
+
return (0, timestamps_1.resolveZipLastModifiedDateFromUnixSeconds)(dosDate, dosTime, unixSecondsMtime);
|
|
117
|
+
}
|
|
118
|
+
exports.parseExtraField = zip_extra_fields_1.parseZipExtraFields;
|
|
119
|
+
function hasDataDescriptorFlag(flags) {
|
|
120
|
+
return ((flags || 0) & 0x08) !== 0;
|
|
121
|
+
}
|
|
122
|
+
function isFileSizeKnown(flags, compressedSize) {
|
|
123
|
+
return !hasDataDescriptorFlag(flags) || (compressedSize || 0) > 0;
|
|
124
|
+
}
|
|
125
|
+
function autodrain(stream) {
|
|
126
|
+
const draining = stream.pipe(new _stream_1.Transform({
|
|
127
|
+
transform(_chunk, _encoding, callback) {
|
|
128
|
+
callback();
|
|
129
|
+
}
|
|
130
|
+
}));
|
|
131
|
+
draining.promise = () => new Promise((resolve, reject) => {
|
|
132
|
+
draining.on("finish", resolve);
|
|
133
|
+
draining.on("error", reject);
|
|
134
|
+
});
|
|
135
|
+
return draining;
|
|
136
|
+
}
|
|
137
|
+
/**
|
|
138
|
+
* Collects all data from a readable stream into a single Uint8Array.
|
|
139
|
+
*/
|
|
140
|
+
function bufferStream(entry) {
|
|
141
|
+
return new Promise((resolve, reject) => {
|
|
142
|
+
const chunks = [];
|
|
143
|
+
const stream = new _stream_1.Transform({
|
|
144
|
+
transform(d, _encoding, callback) {
|
|
145
|
+
chunks.push(d);
|
|
146
|
+
callback();
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
stream.on("finish", () => {
|
|
150
|
+
resolve(chunks.length === 1 ? chunks[0] : (0, _stream_1.concatUint8Arrays)(chunks));
|
|
151
|
+
});
|
|
152
|
+
stream.on("error", reject);
|
|
153
|
+
entry.on("error", reject).pipe(stream);
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
const STR_FUNCTION = "function";
|
|
157
|
+
class PullStream extends _stream_1.Duplex {
|
|
158
|
+
get buffer() {
|
|
159
|
+
return this._queue.view();
|
|
160
|
+
}
|
|
161
|
+
set buffer(value) {
|
|
162
|
+
this._queue.reset(value);
|
|
163
|
+
}
|
|
164
|
+
constructor() {
|
|
165
|
+
super({ decodeStrings: false, objectMode: true });
|
|
166
|
+
this._queue = new byte_queue_1.ByteQueue();
|
|
167
|
+
this.finished = false;
|
|
168
|
+
this.on("finish", () => {
|
|
169
|
+
this.finished = true;
|
|
170
|
+
this.emit("chunk", false);
|
|
171
|
+
});
|
|
172
|
+
}
|
|
173
|
+
_write(chunk, _encoding, callback) {
|
|
174
|
+
const data = typeof chunk === "string" ? textEncoder.encode(chunk) : chunk;
|
|
175
|
+
this._queue.append(data);
|
|
176
|
+
this.cb = callback;
|
|
177
|
+
this.emit("chunk");
|
|
178
|
+
}
|
|
179
|
+
_read() { }
|
|
180
|
+
_maybeReleaseWriteCallback() {
|
|
181
|
+
if (typeof this.cb === STR_FUNCTION) {
|
|
182
|
+
const callback = this.cb;
|
|
183
|
+
this.cb = undefined;
|
|
184
|
+
callback();
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
/**
|
|
188
|
+
* The `eof` parameter is interpreted as `file_length` if the type is number
|
|
189
|
+
* otherwise (i.e. Uint8Array) it is interpreted as a pattern signaling end of stream
|
|
190
|
+
*/
|
|
191
|
+
stream(eof, includeEof) {
|
|
192
|
+
const p = new _stream_1.PassThrough();
|
|
193
|
+
let done = false;
|
|
194
|
+
const cb = () => {
|
|
195
|
+
this._maybeReleaseWriteCallback();
|
|
196
|
+
};
|
|
197
|
+
const pull = () => {
|
|
198
|
+
let packet;
|
|
199
|
+
const available = this._queue.length;
|
|
200
|
+
if (available) {
|
|
201
|
+
if (typeof eof === "number") {
|
|
202
|
+
const toRead = Math.min(eof, available);
|
|
203
|
+
if (toRead > 0) {
|
|
204
|
+
packet = this._queue.read(toRead);
|
|
205
|
+
eof -= toRead;
|
|
206
|
+
}
|
|
207
|
+
done = done || eof === 0;
|
|
208
|
+
}
|
|
209
|
+
else {
|
|
210
|
+
const view = this._queue.view();
|
|
211
|
+
let match = (0, bytes_1.indexOfUint8ArrayPattern)(view, eof);
|
|
212
|
+
if (match !== -1) {
|
|
213
|
+
// store signature match byte offset to allow us to reference
|
|
214
|
+
// this for zip64 offset
|
|
215
|
+
this.match = match;
|
|
216
|
+
if (includeEof) {
|
|
217
|
+
match = match + eof.length;
|
|
218
|
+
}
|
|
219
|
+
if (match > 0) {
|
|
220
|
+
packet = this._queue.read(match);
|
|
221
|
+
}
|
|
222
|
+
done = true;
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
const len = view.length - eof.length;
|
|
226
|
+
if (len <= 0) {
|
|
227
|
+
cb();
|
|
228
|
+
}
|
|
229
|
+
else {
|
|
230
|
+
packet = this._queue.read(len);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
if (packet) {
|
|
235
|
+
p.write(packet, () => {
|
|
236
|
+
if (this._queue.length === 0 ||
|
|
237
|
+
(typeof eof !== "number" && eof.length && this._queue.length <= eof.length)) {
|
|
238
|
+
cb();
|
|
239
|
+
}
|
|
240
|
+
if (done) {
|
|
241
|
+
cb();
|
|
242
|
+
this.removeListener("chunk", pull);
|
|
243
|
+
p.end();
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
// Continue draining regardless of downstream read timing.
|
|
247
|
+
queueMicrotask(pull);
|
|
248
|
+
});
|
|
249
|
+
return;
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
if (!done) {
|
|
253
|
+
if (this.finished) {
|
|
254
|
+
this.removeListener("chunk", pull);
|
|
255
|
+
cb();
|
|
256
|
+
p.destroy(new Error("FILE_ENDED"));
|
|
257
|
+
return;
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
else {
|
|
261
|
+
this.removeListener("chunk", pull);
|
|
262
|
+
cb();
|
|
263
|
+
p.end();
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
this.on("chunk", pull);
|
|
267
|
+
pull();
|
|
268
|
+
return p;
|
|
269
|
+
}
|
|
270
|
+
pull(eof, includeEof) {
|
|
271
|
+
if (eof === 0) {
|
|
272
|
+
return Promise.resolve(new Uint8Array(0));
|
|
273
|
+
}
|
|
274
|
+
// If we already have the required data in buffer
|
|
275
|
+
// we can resolve the request immediately
|
|
276
|
+
if (typeof eof === "number" && this._queue.length >= eof) {
|
|
277
|
+
const data = this._queue.read(eof);
|
|
278
|
+
// If we drained the internal buffer, allow the upstream writer to continue.
|
|
279
|
+
if (this._queue.length === 0) {
|
|
280
|
+
this._maybeReleaseWriteCallback();
|
|
281
|
+
}
|
|
282
|
+
return Promise.resolve(data);
|
|
283
|
+
}
|
|
284
|
+
// Otherwise we stream until we have it
|
|
285
|
+
const chunks = [];
|
|
286
|
+
const concatStream = new _stream_1.Transform({
|
|
287
|
+
transform(d, _encoding, cb) {
|
|
288
|
+
chunks.push(d);
|
|
289
|
+
cb();
|
|
290
|
+
}
|
|
291
|
+
});
|
|
292
|
+
let pullStreamRejectHandler;
|
|
293
|
+
return new Promise((resolve, reject) => {
|
|
294
|
+
pullStreamRejectHandler = (e) => {
|
|
295
|
+
this.__emittedError = e;
|
|
296
|
+
reject(e);
|
|
297
|
+
};
|
|
298
|
+
if (this.finished) {
|
|
299
|
+
return reject(new Error("FILE_ENDED"));
|
|
300
|
+
}
|
|
301
|
+
this.once("error", pullStreamRejectHandler); // reject any errors from pullstream itself
|
|
302
|
+
this.stream(eof, includeEof)
|
|
303
|
+
.on("error", reject)
|
|
304
|
+
.pipe(concatStream)
|
|
305
|
+
.on("finish", () => {
|
|
306
|
+
resolve(chunks.length === 1 ? chunks[0] : (0, _stream_1.concatUint8Arrays)(chunks));
|
|
307
|
+
})
|
|
308
|
+
.on("error", reject);
|
|
309
|
+
}).finally(() => {
|
|
310
|
+
this.removeListener("error", pullStreamRejectHandler);
|
|
311
|
+
});
|
|
312
|
+
}
|
|
313
|
+
pullUntil(pattern, includeEof) {
|
|
314
|
+
return this.pull(pattern, includeEof);
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
exports.PullStream = PullStream;
|
|
318
|
+
async function readCrxHeader(pull) {
|
|
319
|
+
const data = await pull(12);
|
|
320
|
+
const header = (0, parse_buffer_1.parseTyped)(data, exports.CRX_HEADER_FORMAT);
|
|
321
|
+
const pubKeyLength = header.pubKeyLength || 0;
|
|
322
|
+
const signatureLength = header.signatureLength || 0;
|
|
323
|
+
const keyAndSig = await pull(pubKeyLength + signatureLength);
|
|
324
|
+
header.publicKey = keyAndSig.subarray(0, pubKeyLength);
|
|
325
|
+
header.signature = keyAndSig.subarray(pubKeyLength);
|
|
326
|
+
return header;
|
|
327
|
+
}
|
|
328
|
+
async function readLocalFileHeader(pull) {
|
|
329
|
+
const data = await pull(26);
|
|
330
|
+
const vars = (0, parse_buffer_1.parseTyped)(data, exports.LOCAL_FILE_HEADER_FORMAT);
|
|
331
|
+
const fileNameBuffer = await pull(vars.fileNameLength || 0);
|
|
332
|
+
const extraFieldData = await pull(vars.extraFieldLength || 0);
|
|
333
|
+
return { vars, fileNameBuffer, extraFieldData };
|
|
334
|
+
}
|
|
335
|
+
async function readDataDescriptor(pull) {
|
|
336
|
+
const data = await pull(16);
|
|
337
|
+
return (0, parse_buffer_1.parseTyped)(data, exports.DATA_DESCRIPTOR_FORMAT);
|
|
338
|
+
}
|
|
339
|
+
async function consumeCentralDirectoryFileHeader(pull) {
|
|
340
|
+
const data = await pull(42);
|
|
341
|
+
const vars = (0, parse_buffer_1.parseTyped)(data, exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT);
|
|
342
|
+
await pull(vars.fileNameLength || 0);
|
|
343
|
+
await pull(vars.extraFieldLength || 0);
|
|
344
|
+
await pull(vars.fileCommentLength || 0);
|
|
345
|
+
}
|
|
346
|
+
async function consumeEndOfCentralDirectoryRecord(pull) {
|
|
347
|
+
const data = await pull(18);
|
|
348
|
+
const vars = (0, parse_buffer_1.parseTyped)(data, exports.END_OF_CENTRAL_DIRECTORY_FORMAT);
|
|
349
|
+
await pull(vars.commentLength || 0);
|
|
350
|
+
}
|
|
351
|
+
// =============================================================================
|
|
352
|
+
// Validated Data Descriptor Scan (shared by Node + Browser)
|
|
353
|
+
// =============================================================================
|
|
354
|
+
function isValidZipRecordSignature(sig) {
|
|
355
|
+
switch (sig) {
|
|
356
|
+
case zip_constants_1.LOCAL_FILE_HEADER_SIG:
|
|
357
|
+
case zip_constants_1.CENTRAL_DIR_HEADER_SIG:
|
|
358
|
+
case zip_constants_1.END_OF_CENTRAL_DIR_SIG:
|
|
359
|
+
case zip_constants_1.ZIP64_END_OF_CENTRAL_DIR_SIG:
|
|
360
|
+
case zip_constants_1.ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIG:
|
|
361
|
+
return true;
|
|
362
|
+
default:
|
|
363
|
+
return false;
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
function readUint32LEFromBytes(view, offset) {
|
|
367
|
+
return ((view[offset] |
|
|
368
|
+
0 |
|
|
369
|
+
((view[offset + 1] | 0) << 8) |
|
|
370
|
+
((view[offset + 2] | 0) << 16) |
|
|
371
|
+
((view[offset + 3] | 0) << 24)) >>>
|
|
372
|
+
0);
|
|
373
|
+
}
|
|
374
|
+
function indexOf4BytesPattern(buffer, pattern, startIndex) {
|
|
375
|
+
if (pattern.length !== 4) {
|
|
376
|
+
return (0, bytes_1.indexOfUint8ArrayPattern)(buffer, pattern, startIndex);
|
|
377
|
+
}
|
|
378
|
+
const b0 = pattern[0];
|
|
379
|
+
const b1 = pattern[1];
|
|
380
|
+
const b2 = pattern[2];
|
|
381
|
+
const b3 = pattern[3];
|
|
382
|
+
const bufLen = buffer.length;
|
|
383
|
+
let start = startIndex | 0;
|
|
384
|
+
if (start < 0) {
|
|
385
|
+
start = 0;
|
|
386
|
+
}
|
|
387
|
+
if (start > bufLen - 4) {
|
|
388
|
+
return -1;
|
|
389
|
+
}
|
|
390
|
+
for (let i = start; i <= bufLen - 4; i++) {
|
|
391
|
+
if (buffer[i] === b0 && buffer[i + 1] === b1 && buffer[i + 2] === b2 && buffer[i + 3] === b3) {
|
|
392
|
+
return i;
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
return -1;
|
|
396
|
+
}
|
|
397
|
+
function initScanResult(out) {
|
|
398
|
+
if (out) {
|
|
399
|
+
return out;
|
|
400
|
+
}
|
|
401
|
+
return { foundIndex: -1, nextSearchFrom: 0 };
|
|
402
|
+
}
|
|
403
|
+
/**
|
|
404
|
+
* Scan for a validated DATA_DESCRIPTOR record boundary.
|
|
405
|
+
*
|
|
406
|
+
* Scanning for the 4-byte signature alone is unsafe because it can appear inside
|
|
407
|
+
* compressed data. We validate a candidate by requiring:
|
|
408
|
+
* - the next 4 bytes after the 16-byte descriptor form a known ZIP record signature, and
|
|
409
|
+
* - the descriptor's compressedSize matches the number of compressed bytes emitted so far.
|
|
410
|
+
*/
|
|
411
|
+
function scanValidatedDataDescriptor(view, dataDescriptorSignature, bytesEmitted, startIndex = 0, out) {
|
|
412
|
+
const result = initScanResult(out);
|
|
413
|
+
let searchFrom = startIndex | 0;
|
|
414
|
+
if (searchFrom < 0) {
|
|
415
|
+
searchFrom = 0;
|
|
416
|
+
}
|
|
417
|
+
if (searchFrom > view.length) {
|
|
418
|
+
searchFrom = view.length;
|
|
419
|
+
}
|
|
420
|
+
// To avoid missing a signature split across chunk boundaries, we may need
|
|
421
|
+
// to re-check the last (sigLen - 1) bytes on the next scan.
|
|
422
|
+
const sigLen = dataDescriptorSignature.length | 0;
|
|
423
|
+
const overlap = sigLen > 0 ? sigLen - 1 : 0;
|
|
424
|
+
while (searchFrom < view.length) {
|
|
425
|
+
const match = indexOf4BytesPattern(view, dataDescriptorSignature, searchFrom);
|
|
426
|
+
if (match === -1) {
|
|
427
|
+
result.foundIndex = -1;
|
|
428
|
+
result.nextSearchFrom = Math.max(searchFrom, Math.max(0, view.length - overlap));
|
|
429
|
+
return result;
|
|
430
|
+
}
|
|
431
|
+
const idx = match;
|
|
432
|
+
// Need 16 bytes for descriptor + 4 bytes for next record signature.
|
|
433
|
+
const nextSigOffset = idx + 16;
|
|
434
|
+
if (nextSigOffset + 4 <= view.length) {
|
|
435
|
+
const nextSig = readUint32LEFromBytes(view, nextSigOffset);
|
|
436
|
+
const descriptorCompressedSize = readUint32LEFromBytes(view, idx + 8);
|
|
437
|
+
const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
|
|
438
|
+
if (isValidZipRecordSignature(nextSig) &&
|
|
439
|
+
descriptorCompressedSize === expectedCompressedSize) {
|
|
440
|
+
result.foundIndex = idx;
|
|
441
|
+
result.nextSearchFrom = idx;
|
|
442
|
+
return result;
|
|
443
|
+
}
|
|
444
|
+
searchFrom = idx + 1;
|
|
445
|
+
continue;
|
|
446
|
+
}
|
|
447
|
+
// Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
|
|
448
|
+
result.foundIndex = -1;
|
|
449
|
+
result.nextSearchFrom = idx;
|
|
450
|
+
return result;
|
|
451
|
+
}
|
|
452
|
+
result.foundIndex = -1;
|
|
453
|
+
result.nextSearchFrom = Math.max(searchFrom, Math.max(0, view.length - overlap));
|
|
454
|
+
return result;
|
|
455
|
+
}
|
|
456
|
+
/**
|
|
457
|
+
* Stream compressed file data until we reach a validated DATA_DESCRIPTOR boundary.
|
|
458
|
+
*
|
|
459
|
+
* This encapsulates the shared logic used by both Node and browser parsers.
|
|
460
|
+
*/
|
|
461
|
+
function streamUntilValidatedDataDescriptor(options) {
|
|
462
|
+
const { source, dataDescriptorSignature } = options;
|
|
463
|
+
const keepTailBytes = options.keepTailBytes ?? 20;
|
|
464
|
+
const errorMessage = options.errorMessage ?? "FILE_ENDED: Data descriptor not found";
|
|
465
|
+
const output = new _stream_1.PassThrough();
|
|
466
|
+
let done = false;
|
|
467
|
+
// Total number of compressed bytes already emitted for this entry.
|
|
468
|
+
let bytesEmitted = 0;
|
|
469
|
+
let searchFrom = 0;
|
|
470
|
+
const scanResult = { foundIndex: -1, nextSearchFrom: 0 };
|
|
471
|
+
let unsubscribe;
|
|
472
|
+
const cleanup = () => {
|
|
473
|
+
if (unsubscribe) {
|
|
474
|
+
unsubscribe();
|
|
475
|
+
unsubscribe = undefined;
|
|
476
|
+
}
|
|
477
|
+
};
|
|
478
|
+
const pull = () => {
|
|
479
|
+
if (done) {
|
|
480
|
+
return;
|
|
481
|
+
}
|
|
482
|
+
while (source.getLength() > 0) {
|
|
483
|
+
const view = source.getView();
|
|
484
|
+
scanValidatedDataDescriptor(view, dataDescriptorSignature, bytesEmitted, searchFrom, scanResult);
|
|
485
|
+
const foundIndex = scanResult.foundIndex;
|
|
486
|
+
searchFrom = scanResult.nextSearchFrom;
|
|
487
|
+
if (foundIndex !== -1) {
|
|
488
|
+
if (foundIndex > 0) {
|
|
489
|
+
output.write(source.read(foundIndex));
|
|
490
|
+
bytesEmitted += foundIndex;
|
|
491
|
+
searchFrom = Math.max(0, searchFrom - foundIndex);
|
|
492
|
+
}
|
|
493
|
+
done = true;
|
|
494
|
+
source.maybeReleaseWriteCallback?.();
|
|
495
|
+
cleanup();
|
|
496
|
+
output.end();
|
|
497
|
+
return;
|
|
498
|
+
}
|
|
499
|
+
// Flush most of the buffered data but keep a tail so a potential signature
|
|
500
|
+
// split across chunks can still be detected/validated.
|
|
501
|
+
const flushLen = Math.max(0, view.length - keepTailBytes);
|
|
502
|
+
if (flushLen > 0) {
|
|
503
|
+
output.write(source.read(flushLen));
|
|
504
|
+
bytesEmitted += flushLen;
|
|
505
|
+
searchFrom = Math.max(0, searchFrom - flushLen);
|
|
506
|
+
if (source.getLength() <= keepTailBytes) {
|
|
507
|
+
source.maybeReleaseWriteCallback?.();
|
|
508
|
+
}
|
|
509
|
+
return;
|
|
510
|
+
}
|
|
511
|
+
// Need more data.
|
|
512
|
+
break;
|
|
513
|
+
}
|
|
514
|
+
if (!done && source.isFinished()) {
|
|
515
|
+
done = true;
|
|
516
|
+
cleanup();
|
|
517
|
+
output.destroy(new Error(errorMessage));
|
|
518
|
+
}
|
|
519
|
+
};
|
|
520
|
+
unsubscribe = source.onDataAvailable(pull);
|
|
521
|
+
queueMicrotask(pull);
|
|
522
|
+
return output;
|
|
523
|
+
}
|
|
524
|
+
/**
|
|
525
|
+
* Default threshold for small file optimization (5MB).
|
|
526
|
+
*/
|
|
527
|
+
exports.DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
|
|
528
|
+
const endDirectorySignature = (0, binary_1.writeUint32LE)(zip_constants_1.END_OF_CENTRAL_DIR_SIG);
|
|
529
|
+
async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
|
|
530
|
+
const thresholdBytes = opts.thresholdBytes ?? exports.DEFAULT_PARSE_THRESHOLD_BYTES;
|
|
531
|
+
while (true) {
|
|
532
|
+
const sigBytes = await io.pull(4);
|
|
533
|
+
if (sigBytes.length === 0) {
|
|
534
|
+
emitter.emitClose();
|
|
535
|
+
return;
|
|
536
|
+
}
|
|
537
|
+
const signature = (0, binary_1.readUint32LE)(sigBytes, 0);
|
|
538
|
+
if (signature === 0x34327243) {
|
|
539
|
+
state.crxHeader = await readCrxHeader(async (length) => io.pull(length));
|
|
540
|
+
emitter.emitCrxHeader(state.crxHeader);
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
if (signature === zip_constants_1.LOCAL_FILE_HEADER_SIG) {
|
|
544
|
+
await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
|
|
545
|
+
continue;
|
|
546
|
+
}
|
|
547
|
+
if (signature === zip_constants_1.CENTRAL_DIR_HEADER_SIG) {
|
|
548
|
+
state.reachedCD = true;
|
|
549
|
+
await consumeCentralDirectoryFileHeader(async (length) => io.pull(length));
|
|
550
|
+
continue;
|
|
551
|
+
}
|
|
552
|
+
if (signature === zip_constants_1.END_OF_CENTRAL_DIR_SIG) {
|
|
553
|
+
await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
|
|
554
|
+
io.setDone();
|
|
555
|
+
emitter.emitClose();
|
|
556
|
+
return;
|
|
557
|
+
}
|
|
558
|
+
if (state.reachedCD) {
|
|
559
|
+
// We are in central directory trailing data; resync by scanning for EOCD signature.
|
|
560
|
+
// consumeEndOfCentralDirectoryRecord expects the EOCD signature to be consumed, so includeEof=true.
|
|
561
|
+
const includeEof = true;
|
|
562
|
+
await io.pullUntil(endDirectorySignature, includeEof);
|
|
563
|
+
await consumeEndOfCentralDirectoryRecord(async (length) => io.pull(length));
|
|
564
|
+
io.setDone();
|
|
565
|
+
emitter.emitClose();
|
|
566
|
+
return;
|
|
567
|
+
}
|
|
568
|
+
emitter.emitError(new Error("invalid signature: 0x" + signature.toString(16)));
|
|
569
|
+
emitter.emitClose();
|
|
570
|
+
return;
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
|
|
574
|
+
const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
|
|
575
|
+
const vars = headerVars;
|
|
576
|
+
if (state.crxHeader) {
|
|
577
|
+
vars.crxHeader = state.crxHeader;
|
|
578
|
+
}
|
|
579
|
+
const fileName = decodeZipEntryPath(fileNameBuffer);
|
|
580
|
+
const entry = new _stream_1.PassThrough();
|
|
581
|
+
let autodraining = false;
|
|
582
|
+
entry.autodrain = function () {
|
|
583
|
+
autodraining = true;
|
|
584
|
+
entry.__autodraining = true;
|
|
585
|
+
return autodrain(entry);
|
|
586
|
+
};
|
|
587
|
+
entry.buffer = function () {
|
|
588
|
+
return bufferStream(entry);
|
|
589
|
+
};
|
|
590
|
+
entry.path = fileName;
|
|
591
|
+
entry.props = buildZipEntryProps(fileName, fileNameBuffer, vars.flags);
|
|
592
|
+
entry.type = getZipEntryType(fileName, vars.uncompressedSize || 0);
|
|
593
|
+
if (opts.verbose) {
|
|
594
|
+
if (entry.type === "Directory") {
|
|
595
|
+
console.log(" creating:", fileName);
|
|
596
|
+
}
|
|
597
|
+
else if (entry.type === "File") {
|
|
598
|
+
if (vars.compressionMethod === 0) {
|
|
599
|
+
console.log(" extracting:", fileName);
|
|
600
|
+
}
|
|
601
|
+
else {
|
|
602
|
+
console.log(" inflating:", fileName);
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
}
|
|
606
|
+
const extra = (0, exports.parseExtraField)(extraFieldData, vars);
|
|
607
|
+
vars.lastModifiedDateTime = resolveZipEntryLastModifiedDateTime(vars, extra);
|
|
608
|
+
entry.vars = vars;
|
|
609
|
+
entry.extraFields = extra;
|
|
610
|
+
entry.__autodraining = autodraining;
|
|
611
|
+
const fileSizeKnown = isFileSizeKnown(vars.flags, vars.compressedSize);
|
|
612
|
+
if (fileSizeKnown) {
|
|
613
|
+
entry.size = vars.uncompressedSize || 0;
|
|
614
|
+
}
|
|
615
|
+
if (opts.forceStream) {
|
|
616
|
+
emitter.pushEntry(entry);
|
|
617
|
+
}
|
|
618
|
+
else {
|
|
619
|
+
emitter.emitEntry(entry);
|
|
620
|
+
emitter.pushEntryIfPiped(entry);
|
|
621
|
+
}
|
|
622
|
+
if (opts.verbose) {
|
|
623
|
+
console.log({
|
|
624
|
+
filename: fileName,
|
|
625
|
+
vars: vars,
|
|
626
|
+
extraFields: entry.extraFields
|
|
627
|
+
});
|
|
628
|
+
}
|
|
629
|
+
// Small file optimization: use sync decompression if:
|
|
630
|
+
// 1. Entry sizes are trusted (no data descriptor)
|
|
631
|
+
// 2. File size is known and below threshold
|
|
632
|
+
// 3. inflateRawSync is provided
|
|
633
|
+
// 4. File needs decompression (compressionMethod != 0)
|
|
634
|
+
// 5. Not autodraining
|
|
635
|
+
//
|
|
636
|
+
// We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
|
|
637
|
+
// This prevents materializing large highly-compressible files in memory,
|
|
638
|
+
// which can cause massive peak RSS and negate streaming backpressure.
|
|
639
|
+
const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
|
|
640
|
+
const compressedSize = vars.compressedSize || 0;
|
|
641
|
+
const uncompressedSize = vars.uncompressedSize || 0;
|
|
642
|
+
const useSmallFileOptimization = sizesTrusted &&
|
|
643
|
+
fileSizeKnown &&
|
|
644
|
+
inflateRawSync &&
|
|
645
|
+
vars.compressionMethod !== 0 &&
|
|
646
|
+
!autodraining &&
|
|
647
|
+
compressedSize <= thresholdBytes &&
|
|
648
|
+
uncompressedSize <= thresholdBytes;
|
|
649
|
+
if (useSmallFileOptimization) {
|
|
650
|
+
// Read compressed data directly and decompress synchronously
|
|
651
|
+
const compressedData = await io.pull(compressedSize);
|
|
652
|
+
const decompressedData = inflateRawSync(compressedData);
|
|
653
|
+
entry.end(decompressedData);
|
|
654
|
+
// Wait for entry stream write to complete (not for read/consume)
|
|
655
|
+
await (0, _stream_1.finished)(entry, { readable: false });
|
|
656
|
+
return;
|
|
657
|
+
}
|
|
658
|
+
const inflater = vars.compressionMethod && !autodraining ? inflateFactory() : new _stream_1.PassThrough();
|
|
659
|
+
if (fileSizeKnown) {
|
|
660
|
+
await (0, _stream_1.pipeline)(io.stream(vars.compressedSize || 0), inflater, entry);
|
|
661
|
+
return;
|
|
662
|
+
}
|
|
663
|
+
await (0, _stream_1.pipeline)(io.streamUntilDataDescriptor(), inflater, entry);
|
|
664
|
+
const dd = await readDataDescriptor(async (l) => io.pull(l));
|
|
665
|
+
entry.size = dd.uncompressedSize || 0;
|
|
666
|
+
}
|