@cj-tech-master/excelts 1.0.0 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/excelts.iife.js +2568 -1188
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +21 -19
- package/dist/cjs/index.js +1 -0
- package/dist/cjs/stream/xlsx/workbook-reader.js +2 -2
- package/dist/cjs/stream/xlsx/workbook-writer.js +8 -4
- package/dist/cjs/utils/cell-format.js +815 -0
- package/dist/cjs/utils/cell-matrix.js +37 -2
- package/dist/cjs/utils/parse-sax.js +2 -2
- package/dist/cjs/utils/sheet-utils.js +615 -0
- package/dist/cjs/utils/stream-buf.js +15 -4
- package/dist/cjs/utils/unzip/buffer-stream.js +27 -0
- package/dist/cjs/utils/unzip/index.js +23 -0
- package/dist/cjs/utils/unzip/noop-stream.js +20 -0
- package/dist/cjs/utils/unzip/parse-buffer.js +60 -0
- package/dist/cjs/utils/unzip/parse-datetime.js +23 -0
- package/dist/cjs/utils/unzip/parse-extra-field.js +52 -0
- package/dist/cjs/utils/unzip/parse.js +340 -0
- package/dist/cjs/utils/unzip/pull-stream.js +145 -0
- package/dist/cjs/utils/utils.js +13 -17
- package/dist/cjs/utils/zip-stream.js +29 -33
- package/dist/cjs/xlsx/xlsx.js +1 -2
- package/dist/esm/index.browser.js +1 -0
- package/dist/esm/index.js +1 -0
- package/dist/esm/stream/xlsx/workbook-reader.js +2 -2
- package/dist/esm/stream/xlsx/workbook-writer.js +9 -5
- package/dist/esm/utils/cell-format.js +810 -0
- package/dist/esm/utils/cell-matrix.js +37 -2
- package/dist/esm/utils/parse-sax.js +1 -1
- package/dist/esm/utils/sheet-utils.js +595 -0
- package/dist/esm/utils/stream-buf.js +15 -4
- package/dist/esm/utils/unzip/buffer-stream.js +24 -0
- package/dist/esm/utils/unzip/index.js +12 -0
- package/dist/esm/utils/unzip/noop-stream.js +16 -0
- package/dist/esm/utils/unzip/parse-buffer.js +57 -0
- package/dist/esm/utils/unzip/parse-datetime.js +20 -0
- package/dist/esm/utils/unzip/parse-extra-field.js +49 -0
- package/dist/esm/utils/unzip/parse.js +332 -0
- package/dist/esm/utils/unzip/pull-stream.js +141 -0
- package/dist/esm/utils/utils.js +12 -16
- package/dist/esm/utils/zip-stream.js +30 -34
- package/dist/esm/xlsx/xlsx.js +1 -2
- package/dist/types/doc/column.d.ts +1 -1
- package/dist/types/doc/worksheet.d.ts +2 -2
- package/dist/types/index.browser.d.ts +1 -0
- package/dist/types/index.d.ts +1 -0
- package/dist/types/stream/xlsx/workbook-writer.d.ts +1 -0
- package/dist/types/utils/cell-format.d.ts +32 -0
- package/dist/types/utils/sheet-utils.d.ts +203 -0
- package/dist/types/utils/unzip/buffer-stream.d.ts +9 -0
- package/dist/types/utils/unzip/index.d.ts +12 -0
- package/dist/types/utils/unzip/noop-stream.d.ts +13 -0
- package/dist/types/utils/unzip/parse-buffer.d.ts +24 -0
- package/dist/types/utils/unzip/parse-datetime.d.ts +12 -0
- package/dist/types/utils/unzip/parse-extra-field.d.ts +18 -0
- package/dist/types/utils/unzip/parse.d.ts +70 -0
- package/dist/types/utils/unzip/pull-stream.d.ts +24 -0
- package/dist/types/utils/utils.d.ts +5 -2
- package/dist/types/utils/zip-stream.d.ts +5 -1
- package/package.json +35 -32
- package/dist/cjs/utils/browser-buffer-decode.js +0 -13
- package/dist/cjs/utils/browser-buffer-encode.js +0 -13
- package/dist/cjs/utils/browser.js +0 -6
- package/dist/esm/utils/browser-buffer-decode.js +0 -11
- package/dist/esm/utils/browser-buffer-encode.js +0 -11
- package/dist/esm/utils/browser.js +0 -3
- package/dist/types/utils/browser-buffer-decode.d.ts +0 -2
- package/dist/types/utils/browser-buffer-encode.d.ts +0 -2
- package/dist/types/utils/browser.d.ts +0 -1
|
@@ -198,14 +198,24 @@ inherits(StreamBuf, Duplex, {
|
|
|
198
198
|
if (data instanceof StringBuf || (data && data.constructor?.name === "StringBuf")) {
|
|
199
199
|
chunk = new StringBufChunk(data);
|
|
200
200
|
}
|
|
201
|
-
else if (data
|
|
201
|
+
else if (Buffer.isBuffer(data)) {
|
|
202
|
+
// Use Buffer.isBuffer() instead of instanceof for cross-realm compatibility
|
|
203
|
+
// (e.g., Web Workers where Buffer polyfill instances may differ)
|
|
202
204
|
chunk = new BufferChunk(data);
|
|
203
205
|
}
|
|
204
|
-
else if (
|
|
206
|
+
else if (ArrayBuffer.isView(data)) {
|
|
207
|
+
// Handle typed arrays (Uint8Array, Int8Array, etc.) - cross-realm safe
|
|
208
|
+
chunk = new BufferChunk(Buffer.from(data.buffer, data.byteOffset, data.byteLength));
|
|
209
|
+
}
|
|
210
|
+
else if (data instanceof ArrayBuffer) {
|
|
211
|
+
// Handle ArrayBuffer - convert to Buffer
|
|
212
|
+
chunk = new BufferChunk(Buffer.from(data));
|
|
213
|
+
}
|
|
214
|
+
else if (typeof data === "string" || data instanceof String) {
|
|
205
215
|
chunk = new StringChunk(String(data), encoding);
|
|
206
216
|
}
|
|
207
217
|
else {
|
|
208
|
-
throw new Error("Chunk must be one of type String, Buffer or StringBuf.");
|
|
218
|
+
throw new Error("Chunk must be one of type String, Buffer, Uint8Array, ArrayBuffer or StringBuf.");
|
|
209
219
|
}
|
|
210
220
|
// now, do something with the chunk
|
|
211
221
|
if (this.pipes.length) {
|
|
@@ -221,7 +231,8 @@ inherits(StreamBuf, Duplex, {
|
|
|
221
231
|
}
|
|
222
232
|
else {
|
|
223
233
|
this._writeToBuffers(chunk);
|
|
224
|
-
|
|
234
|
+
// Use queueMicrotask for cross-platform compatibility (ES2020+)
|
|
235
|
+
queueMicrotask(() => callback());
|
|
225
236
|
}
|
|
226
237
|
}
|
|
227
238
|
else {
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper buffer-stream module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
import { Transform } from "stream";
|
|
9
|
+
export function bufferStream(entry) {
|
|
10
|
+
return new Promise((resolve, reject) => {
|
|
11
|
+
const chunks = [];
|
|
12
|
+
const stream = new Transform({
|
|
13
|
+
transform(d, _encoding, cb) {
|
|
14
|
+
chunks.push(d);
|
|
15
|
+
cb();
|
|
16
|
+
}
|
|
17
|
+
});
|
|
18
|
+
stream.on("finish", () => {
|
|
19
|
+
resolve(Buffer.concat(chunks));
|
|
20
|
+
});
|
|
21
|
+
stream.on("error", reject);
|
|
22
|
+
entry.on("error", reject).pipe(stream);
|
|
23
|
+
});
|
|
24
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzip utilities for parsing ZIP archives
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
*/
|
|
6
|
+
export { Parse, createParse } from "./parse.js";
|
|
7
|
+
export { PullStream } from "./pull-stream.js";
|
|
8
|
+
export { NoopStream } from "./noop-stream.js";
|
|
9
|
+
export { bufferStream } from "./buffer-stream.js";
|
|
10
|
+
export { parse as parseBuffer } from "./parse-buffer.js";
|
|
11
|
+
export { parseDateTime } from "./parse-datetime.js";
|
|
12
|
+
export { parseExtraField } from "./parse-extra-field.js";
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper noop-stream module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
import { Transform } from "stream";
|
|
9
|
+
export class NoopStream extends Transform {
|
|
10
|
+
constructor() {
|
|
11
|
+
super();
|
|
12
|
+
}
|
|
13
|
+
_transform(_chunk, _encoding, cb) {
|
|
14
|
+
cb();
|
|
15
|
+
}
|
|
16
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper parse-buffer module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
function parseUIntLE(buffer, offset, size) {
|
|
9
|
+
let result;
|
|
10
|
+
switch (size) {
|
|
11
|
+
case 1:
|
|
12
|
+
result = buffer.readUInt8(offset);
|
|
13
|
+
break;
|
|
14
|
+
case 2:
|
|
15
|
+
result = buffer.readUInt16LE(offset);
|
|
16
|
+
break;
|
|
17
|
+
case 4:
|
|
18
|
+
result = buffer.readUInt32LE(offset);
|
|
19
|
+
break;
|
|
20
|
+
case 8:
|
|
21
|
+
result = Number(buffer.readBigUInt64LE(offset));
|
|
22
|
+
break;
|
|
23
|
+
default:
|
|
24
|
+
throw new Error("Unsupported UInt LE size!");
|
|
25
|
+
}
|
|
26
|
+
return result;
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Parses sequential unsigned little endian numbers from the head of the passed buffer according to
|
|
30
|
+
* the specified format passed. If the buffer is not large enough to satisfy the full format,
|
|
31
|
+
* null values will be assigned to the remaining keys.
|
|
32
|
+
* @param buffer The buffer to sequentially extract numbers from.
|
|
33
|
+
* @param format Expected format to follow when extracting values from the buffer. A list of list entries
|
|
34
|
+
* with the following structure:
|
|
35
|
+
* [
|
|
36
|
+
* [
|
|
37
|
+
* <key>, // Name of the key to assign the extracted number to.
|
|
38
|
+
* <size> // The size in bytes of the number to extract. possible values are 1, 2, 4, 8.
|
|
39
|
+
* ],
|
|
40
|
+
* ...
|
|
41
|
+
* ]
|
|
42
|
+
* @returns An object with keys set to their associated extracted values.
|
|
43
|
+
*/
|
|
44
|
+
export function parse(buffer, format) {
|
|
45
|
+
const result = {};
|
|
46
|
+
let offset = 0;
|
|
47
|
+
for (const [key, size] of format) {
|
|
48
|
+
if (buffer.length >= offset + size) {
|
|
49
|
+
result[key] = parseUIntLE(buffer, offset, size);
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
result[key] = null;
|
|
53
|
+
}
|
|
54
|
+
offset += size;
|
|
55
|
+
}
|
|
56
|
+
return result;
|
|
57
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper parse-datetime module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
/**
|
|
9
|
+
* Dates in zip file entries are stored as DosDateTime
|
|
10
|
+
* Spec is here: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-dosdatetimetofiletime
|
|
11
|
+
*/
|
|
12
|
+
export function parseDateTime(date, time) {
|
|
13
|
+
const day = date & 0x1f;
|
|
14
|
+
const month = (date >> 5) & 0x0f;
|
|
15
|
+
const year = ((date >> 9) & 0x7f) + 1980;
|
|
16
|
+
const seconds = time ? (time & 0x1f) * 2 : 0;
|
|
17
|
+
const minutes = time ? (time >> 5) & 0x3f : 0;
|
|
18
|
+
const hours = time ? time >> 11 : 0;
|
|
19
|
+
return new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds));
|
|
20
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper parse-extra-field module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
import { parse } from "./parse-buffer.js";
|
|
9
|
+
export function parseExtraField(extraField, vars) {
|
|
10
|
+
let extra;
|
|
11
|
+
// Find the ZIP64 header, if present.
|
|
12
|
+
while (!extra && extraField && extraField.length) {
|
|
13
|
+
const candidateExtra = parse(extraField, [
|
|
14
|
+
["signature", 2],
|
|
15
|
+
["partSize", 2]
|
|
16
|
+
]);
|
|
17
|
+
if (candidateExtra.signature === 0x0001) {
|
|
18
|
+
// parse buffer based on data in ZIP64 central directory; order is important!
|
|
19
|
+
const fieldsToExpect = [];
|
|
20
|
+
if (vars.uncompressedSize === 0xffffffff) {
|
|
21
|
+
fieldsToExpect.push(["uncompressedSize", 8]);
|
|
22
|
+
}
|
|
23
|
+
if (vars.compressedSize === 0xffffffff) {
|
|
24
|
+
fieldsToExpect.push(["compressedSize", 8]);
|
|
25
|
+
}
|
|
26
|
+
if (vars.offsetToLocalFileHeader === 0xffffffff) {
|
|
27
|
+
fieldsToExpect.push(["offsetToLocalFileHeader", 8]);
|
|
28
|
+
}
|
|
29
|
+
// slice off the 4 bytes for signature and partSize
|
|
30
|
+
extra = parse(extraField.slice(4), fieldsToExpect);
|
|
31
|
+
}
|
|
32
|
+
else {
|
|
33
|
+
// Advance the buffer to the next part.
|
|
34
|
+
// The total size of this part is the 4 byte header + partsize.
|
|
35
|
+
extraField = extraField.slice((candidateExtra.partSize || 0) + 4);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
extra = extra || {};
|
|
39
|
+
if (vars.compressedSize === 0xffffffff) {
|
|
40
|
+
vars.compressedSize = extra.compressedSize;
|
|
41
|
+
}
|
|
42
|
+
if (vars.uncompressedSize === 0xffffffff) {
|
|
43
|
+
vars.uncompressedSize = extra.uncompressedSize;
|
|
44
|
+
}
|
|
45
|
+
if (vars.offsetToLocalFileHeader === 0xffffffff) {
|
|
46
|
+
vars.offsetToLocalFileHeader = extra.offsetToLocalFileHeader;
|
|
47
|
+
}
|
|
48
|
+
return extra;
|
|
49
|
+
}
|
|
@@ -0,0 +1,332 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unzipper parse module
|
|
3
|
+
* Original source: https://github.com/ZJONSSON/node-unzipper
|
|
4
|
+
* License: MIT
|
|
5
|
+
* Copyright (c) 2012 - 2013 Near Infinity Corporation
|
|
6
|
+
* Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
|
|
7
|
+
*/
|
|
8
|
+
import zlib from "zlib";
|
|
9
|
+
import { PassThrough, Transform, pipeline } from "stream";
|
|
10
|
+
import { PullStream } from "./pull-stream.js";
|
|
11
|
+
import { NoopStream } from "./noop-stream.js";
|
|
12
|
+
import { bufferStream } from "./buffer-stream.js";
|
|
13
|
+
import { parseExtraField } from "./parse-extra-field.js";
|
|
14
|
+
import { parseDateTime } from "./parse-datetime.js";
|
|
15
|
+
import { parse as parseBuffer } from "./parse-buffer.js";
|
|
16
|
+
// Check if native zlib is available (Node.js environment)
|
|
17
|
+
// In browser with polyfill, createInflateRaw may not exist or may not work properly
|
|
18
|
+
const hasNativeZlib = typeof zlib?.createInflateRaw === "function" &&
|
|
19
|
+
typeof process !== "undefined" &&
|
|
20
|
+
process.versions?.node;
|
|
21
|
+
/**
|
|
22
|
+
* A Transform stream that wraps browser's native DecompressionStream.
|
|
23
|
+
* Used when native zlib is not available (browser environment).
|
|
24
|
+
*/
|
|
25
|
+
class BrowserInflateRawStream extends Transform {
|
|
26
|
+
constructor() {
|
|
27
|
+
super();
|
|
28
|
+
this.chunks = [];
|
|
29
|
+
this.totalLength = 0;
|
|
30
|
+
}
|
|
31
|
+
_transform(chunk, _encoding, callback) {
|
|
32
|
+
// Avoid unnecessary copy - Buffer extends Uint8Array
|
|
33
|
+
this.chunks.push(chunk);
|
|
34
|
+
this.totalLength += chunk.length;
|
|
35
|
+
callback();
|
|
36
|
+
}
|
|
37
|
+
_flush(callback) {
|
|
38
|
+
try {
|
|
39
|
+
// Use pre-calculated totalLength for better performance
|
|
40
|
+
const combined = new Uint8Array(this.totalLength);
|
|
41
|
+
let offset = 0;
|
|
42
|
+
for (const chunk of this.chunks) {
|
|
43
|
+
combined.set(chunk, offset);
|
|
44
|
+
offset += chunk.length;
|
|
45
|
+
}
|
|
46
|
+
// Clear chunks to free memory
|
|
47
|
+
this.chunks = [];
|
|
48
|
+
// Use native DecompressionStream
|
|
49
|
+
const ds = new DecompressionStream("deflate-raw");
|
|
50
|
+
const writer = ds.writable.getWriter();
|
|
51
|
+
const reader = ds.readable.getReader();
|
|
52
|
+
// Optimized read loop - collect chunks and concatenate at the end
|
|
53
|
+
const readAll = async () => {
|
|
54
|
+
const results = [];
|
|
55
|
+
let total = 0;
|
|
56
|
+
while (true) {
|
|
57
|
+
const { done, value } = await reader.read();
|
|
58
|
+
if (done) {
|
|
59
|
+
break;
|
|
60
|
+
}
|
|
61
|
+
results.push(value);
|
|
62
|
+
total += value.length;
|
|
63
|
+
}
|
|
64
|
+
// Single allocation for final result
|
|
65
|
+
const result = Buffer.allocUnsafe(total);
|
|
66
|
+
let off = 0;
|
|
67
|
+
for (const r of results) {
|
|
68
|
+
result.set(r, off);
|
|
69
|
+
off += r.length;
|
|
70
|
+
}
|
|
71
|
+
return result;
|
|
72
|
+
};
|
|
73
|
+
writer.write(combined);
|
|
74
|
+
writer.close();
|
|
75
|
+
readAll()
|
|
76
|
+
.then(decompressed => {
|
|
77
|
+
this.push(decompressed);
|
|
78
|
+
callback();
|
|
79
|
+
})
|
|
80
|
+
.catch(callback);
|
|
81
|
+
}
|
|
82
|
+
catch (err) {
|
|
83
|
+
callback(err);
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Creates an InflateRaw stream.
|
|
89
|
+
* Uses native zlib in Node.js for best performance, falls back to DecompressionStream in browser.
|
|
90
|
+
*/
|
|
91
|
+
function createInflateRaw() {
|
|
92
|
+
if (hasNativeZlib) {
|
|
93
|
+
return zlib.createInflateRaw();
|
|
94
|
+
}
|
|
95
|
+
return new BrowserInflateRawStream();
|
|
96
|
+
}
|
|
97
|
+
const endDirectorySignature = Buffer.alloc(4);
|
|
98
|
+
endDirectorySignature.writeUInt32LE(0x06054b50, 0);
|
|
99
|
+
export class Parse extends PullStream {
|
|
100
|
+
constructor(opts = {}) {
|
|
101
|
+
super();
|
|
102
|
+
this._opts = opts;
|
|
103
|
+
this.on("finish", () => {
|
|
104
|
+
this.emit("end");
|
|
105
|
+
this.emit("close");
|
|
106
|
+
});
|
|
107
|
+
this._readRecord().catch((e) => {
|
|
108
|
+
if (!this.__emittedError || this.__emittedError !== e) {
|
|
109
|
+
this.emit("error", e);
|
|
110
|
+
}
|
|
111
|
+
});
|
|
112
|
+
}
|
|
113
|
+
async _readRecord() {
|
|
114
|
+
const data = await this.pull(4);
|
|
115
|
+
if (data.length === 0) {
|
|
116
|
+
return;
|
|
117
|
+
}
|
|
118
|
+
const signature = data.readUInt32LE(0);
|
|
119
|
+
if (signature === 0x34327243) {
|
|
120
|
+
const shouldLoop = await this._readCrxHeader();
|
|
121
|
+
if (shouldLoop) {
|
|
122
|
+
return this._readRecord();
|
|
123
|
+
}
|
|
124
|
+
return;
|
|
125
|
+
}
|
|
126
|
+
if (signature === 0x04034b50) {
|
|
127
|
+
const shouldLoop = await this._readFile();
|
|
128
|
+
if (shouldLoop) {
|
|
129
|
+
return this._readRecord();
|
|
130
|
+
}
|
|
131
|
+
return;
|
|
132
|
+
}
|
|
133
|
+
else if (signature === 0x02014b50) {
|
|
134
|
+
this.reachedCD = true;
|
|
135
|
+
const shouldLoop = await this._readCentralDirectoryFileHeader();
|
|
136
|
+
if (shouldLoop) {
|
|
137
|
+
return this._readRecord();
|
|
138
|
+
}
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
else if (signature === 0x06054b50) {
|
|
142
|
+
await this._readEndOfCentralDirectoryRecord();
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
else if (this.reachedCD) {
|
|
146
|
+
// _readEndOfCentralDirectoryRecord expects the EOCD
|
|
147
|
+
// signature to be consumed so set includeEof=true
|
|
148
|
+
const includeEof = true;
|
|
149
|
+
await this.pull(endDirectorySignature, includeEof);
|
|
150
|
+
await this._readEndOfCentralDirectoryRecord();
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
else {
|
|
154
|
+
this.emit("error", new Error("invalid signature: 0x" + signature.toString(16)));
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
async _readCrxHeader() {
|
|
158
|
+
const data = await this.pull(12);
|
|
159
|
+
this.crxHeader = parseBuffer(data, [
|
|
160
|
+
["version", 4],
|
|
161
|
+
["pubKeyLength", 4],
|
|
162
|
+
["signatureLength", 4]
|
|
163
|
+
]);
|
|
164
|
+
const keyAndSig = await this.pull((this.crxHeader.pubKeyLength || 0) + (this.crxHeader.signatureLength || 0));
|
|
165
|
+
this.crxHeader.publicKey = keyAndSig.slice(0, this.crxHeader.pubKeyLength || 0);
|
|
166
|
+
this.crxHeader.signature = keyAndSig.slice(this.crxHeader.pubKeyLength || 0);
|
|
167
|
+
this.emit("crx-header", this.crxHeader);
|
|
168
|
+
return true;
|
|
169
|
+
}
|
|
170
|
+
async _readFile() {
|
|
171
|
+
const data = await this.pull(26);
|
|
172
|
+
const vars = parseBuffer(data, [
|
|
173
|
+
["versionsNeededToExtract", 2],
|
|
174
|
+
["flags", 2],
|
|
175
|
+
["compressionMethod", 2],
|
|
176
|
+
["lastModifiedTime", 2],
|
|
177
|
+
["lastModifiedDate", 2],
|
|
178
|
+
["crc32", 4],
|
|
179
|
+
["compressedSize", 4],
|
|
180
|
+
["uncompressedSize", 4],
|
|
181
|
+
["fileNameLength", 2],
|
|
182
|
+
["extraFieldLength", 2]
|
|
183
|
+
]);
|
|
184
|
+
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate || 0, vars.lastModifiedTime || 0);
|
|
185
|
+
if (this.crxHeader) {
|
|
186
|
+
vars.crxHeader = this.crxHeader;
|
|
187
|
+
}
|
|
188
|
+
const fileNameBuffer = await this.pull(vars.fileNameLength || 0);
|
|
189
|
+
const fileName = fileNameBuffer.toString("utf8");
|
|
190
|
+
const entry = new PassThrough();
|
|
191
|
+
let __autodraining = false;
|
|
192
|
+
entry.autodrain = function () {
|
|
193
|
+
__autodraining = true;
|
|
194
|
+
const draining = entry.pipe(new NoopStream());
|
|
195
|
+
draining.promise = function () {
|
|
196
|
+
return new Promise((resolve, reject) => {
|
|
197
|
+
draining.on("finish", resolve);
|
|
198
|
+
draining.on("error", reject);
|
|
199
|
+
});
|
|
200
|
+
};
|
|
201
|
+
return draining;
|
|
202
|
+
};
|
|
203
|
+
entry.buffer = function () {
|
|
204
|
+
return bufferStream(entry);
|
|
205
|
+
};
|
|
206
|
+
entry.path = fileName;
|
|
207
|
+
entry.props = {
|
|
208
|
+
path: fileName,
|
|
209
|
+
pathBuffer: fileNameBuffer,
|
|
210
|
+
flags: {
|
|
211
|
+
isUnicode: ((vars.flags || 0) & 0x800) !== 0
|
|
212
|
+
}
|
|
213
|
+
};
|
|
214
|
+
entry.type = vars.uncompressedSize === 0 && /[/\\]$/.test(fileName) ? "Directory" : "File";
|
|
215
|
+
if (this._opts.verbose) {
|
|
216
|
+
if (entry.type === "Directory") {
|
|
217
|
+
console.log(" creating:", fileName);
|
|
218
|
+
}
|
|
219
|
+
else if (entry.type === "File") {
|
|
220
|
+
if (vars.compressionMethod === 0) {
|
|
221
|
+
console.log(" extracting:", fileName);
|
|
222
|
+
}
|
|
223
|
+
else {
|
|
224
|
+
console.log(" inflating:", fileName);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const extraFieldData = await this.pull(vars.extraFieldLength || 0);
|
|
229
|
+
const extra = parseExtraField(extraFieldData, vars);
|
|
230
|
+
entry.vars = vars;
|
|
231
|
+
entry.extra = extra;
|
|
232
|
+
if (this._opts.forceStream) {
|
|
233
|
+
this.push(entry);
|
|
234
|
+
}
|
|
235
|
+
else {
|
|
236
|
+
this.emit("entry", entry);
|
|
237
|
+
const state = this._readableState;
|
|
238
|
+
if (state.pipesCount || (state.pipes && state.pipes.length)) {
|
|
239
|
+
this.push(entry);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
if (this._opts.verbose) {
|
|
243
|
+
console.log({
|
|
244
|
+
filename: fileName,
|
|
245
|
+
vars: vars,
|
|
246
|
+
extra: extra
|
|
247
|
+
});
|
|
248
|
+
}
|
|
249
|
+
const fileSizeKnown = !((vars.flags || 0) & 0x08) || vars.compressedSize > 0;
|
|
250
|
+
let eof;
|
|
251
|
+
entry.__autodraining = __autodraining; // expose __autodraining for test purposes
|
|
252
|
+
const inflater = vars.compressionMethod && !__autodraining ? createInflateRaw() : new PassThrough();
|
|
253
|
+
if (fileSizeKnown) {
|
|
254
|
+
entry.size = vars.uncompressedSize;
|
|
255
|
+
eof = vars.compressedSize;
|
|
256
|
+
}
|
|
257
|
+
else {
|
|
258
|
+
eof = Buffer.alloc(4);
|
|
259
|
+
eof.writeUInt32LE(0x08074b50, 0);
|
|
260
|
+
}
|
|
261
|
+
return new Promise((resolve, reject) => {
|
|
262
|
+
pipeline(this.stream(eof), inflater, entry, err => {
|
|
263
|
+
if (err) {
|
|
264
|
+
return reject(err);
|
|
265
|
+
}
|
|
266
|
+
return fileSizeKnown
|
|
267
|
+
? resolve(fileSizeKnown)
|
|
268
|
+
: this._processDataDescriptor(entry).then(resolve).catch(reject);
|
|
269
|
+
});
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
async _processDataDescriptor(entry) {
|
|
273
|
+
const data = await this.pull(16);
|
|
274
|
+
const vars = parseBuffer(data, [
|
|
275
|
+
["dataDescriptorSignature", 4],
|
|
276
|
+
["crc32", 4],
|
|
277
|
+
["compressedSize", 4],
|
|
278
|
+
["uncompressedSize", 4]
|
|
279
|
+
]);
|
|
280
|
+
entry.size = vars.uncompressedSize || 0;
|
|
281
|
+
return true;
|
|
282
|
+
}
|
|
283
|
+
async _readCentralDirectoryFileHeader() {
|
|
284
|
+
const data = await this.pull(42);
|
|
285
|
+
const vars = parseBuffer(data, [
|
|
286
|
+
["versionMadeBy", 2],
|
|
287
|
+
["versionsNeededToExtract", 2],
|
|
288
|
+
["flags", 2],
|
|
289
|
+
["compressionMethod", 2],
|
|
290
|
+
["lastModifiedTime", 2],
|
|
291
|
+
["lastModifiedDate", 2],
|
|
292
|
+
["crc32", 4],
|
|
293
|
+
["compressedSize", 4],
|
|
294
|
+
["uncompressedSize", 4],
|
|
295
|
+
["fileNameLength", 2],
|
|
296
|
+
["extraFieldLength", 2],
|
|
297
|
+
["fileCommentLength", 2],
|
|
298
|
+
["diskNumber", 2],
|
|
299
|
+
["internalFileAttributes", 2],
|
|
300
|
+
["externalFileAttributes", 4],
|
|
301
|
+
["offsetToLocalFileHeader", 4]
|
|
302
|
+
]);
|
|
303
|
+
await this.pull(vars.fileNameLength || 0);
|
|
304
|
+
await this.pull(vars.extraFieldLength || 0);
|
|
305
|
+
await this.pull(vars.fileCommentLength || 0);
|
|
306
|
+
return true;
|
|
307
|
+
}
|
|
308
|
+
async _readEndOfCentralDirectoryRecord() {
|
|
309
|
+
const data = await this.pull(18);
|
|
310
|
+
const vars = parseBuffer(data, [
|
|
311
|
+
["diskNumber", 2],
|
|
312
|
+
["diskStart", 2],
|
|
313
|
+
["numberOfRecordsOnDisk", 2],
|
|
314
|
+
["numberOfRecords", 2],
|
|
315
|
+
["sizeOfCentralDirectory", 4],
|
|
316
|
+
["offsetToStartOfCentralDirectory", 4],
|
|
317
|
+
["commentLength", 2]
|
|
318
|
+
]);
|
|
319
|
+
await this.pull(vars.commentLength || 0);
|
|
320
|
+
this.end();
|
|
321
|
+
this.push(null);
|
|
322
|
+
}
|
|
323
|
+
promise() {
|
|
324
|
+
return new Promise((resolve, reject) => {
|
|
325
|
+
this.on("finish", resolve);
|
|
326
|
+
this.on("error", reject);
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
export function createParse(opts) {
|
|
331
|
+
return new Parse(opts);
|
|
332
|
+
}
|