@cj-tech-master/excelts 1.0.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/dist/browser/excelts.iife.js +2568 -1188
  2. package/dist/browser/excelts.iife.js.map +1 -1
  3. package/dist/browser/excelts.iife.min.js +21 -19
  4. package/dist/cjs/index.js +1 -0
  5. package/dist/cjs/stream/xlsx/workbook-reader.js +2 -2
  6. package/dist/cjs/stream/xlsx/workbook-writer.js +8 -4
  7. package/dist/cjs/utils/cell-format.js +815 -0
  8. package/dist/cjs/utils/cell-matrix.js +37 -2
  9. package/dist/cjs/utils/parse-sax.js +2 -2
  10. package/dist/cjs/utils/sheet-utils.js +615 -0
  11. package/dist/cjs/utils/stream-buf.js +15 -4
  12. package/dist/cjs/utils/unzip/buffer-stream.js +27 -0
  13. package/dist/cjs/utils/unzip/index.js +23 -0
  14. package/dist/cjs/utils/unzip/noop-stream.js +20 -0
  15. package/dist/cjs/utils/unzip/parse-buffer.js +60 -0
  16. package/dist/cjs/utils/unzip/parse-datetime.js +23 -0
  17. package/dist/cjs/utils/unzip/parse-extra-field.js +52 -0
  18. package/dist/cjs/utils/unzip/parse.js +340 -0
  19. package/dist/cjs/utils/unzip/pull-stream.js +145 -0
  20. package/dist/cjs/utils/utils.js +13 -17
  21. package/dist/cjs/utils/zip-stream.js +29 -33
  22. package/dist/cjs/xlsx/xlsx.js +1 -2
  23. package/dist/esm/index.browser.js +1 -0
  24. package/dist/esm/index.js +1 -0
  25. package/dist/esm/stream/xlsx/workbook-reader.js +2 -2
  26. package/dist/esm/stream/xlsx/workbook-writer.js +9 -5
  27. package/dist/esm/utils/cell-format.js +810 -0
  28. package/dist/esm/utils/cell-matrix.js +37 -2
  29. package/dist/esm/utils/parse-sax.js +1 -1
  30. package/dist/esm/utils/sheet-utils.js +595 -0
  31. package/dist/esm/utils/stream-buf.js +15 -4
  32. package/dist/esm/utils/unzip/buffer-stream.js +24 -0
  33. package/dist/esm/utils/unzip/index.js +12 -0
  34. package/dist/esm/utils/unzip/noop-stream.js +16 -0
  35. package/dist/esm/utils/unzip/parse-buffer.js +57 -0
  36. package/dist/esm/utils/unzip/parse-datetime.js +20 -0
  37. package/dist/esm/utils/unzip/parse-extra-field.js +49 -0
  38. package/dist/esm/utils/unzip/parse.js +332 -0
  39. package/dist/esm/utils/unzip/pull-stream.js +141 -0
  40. package/dist/esm/utils/utils.js +12 -16
  41. package/dist/esm/utils/zip-stream.js +30 -34
  42. package/dist/esm/xlsx/xlsx.js +1 -2
  43. package/dist/types/doc/column.d.ts +1 -1
  44. package/dist/types/doc/worksheet.d.ts +2 -2
  45. package/dist/types/index.browser.d.ts +1 -0
  46. package/dist/types/index.d.ts +1 -0
  47. package/dist/types/stream/xlsx/workbook-writer.d.ts +1 -0
  48. package/dist/types/utils/cell-format.d.ts +32 -0
  49. package/dist/types/utils/sheet-utils.d.ts +203 -0
  50. package/dist/types/utils/unzip/buffer-stream.d.ts +9 -0
  51. package/dist/types/utils/unzip/index.d.ts +12 -0
  52. package/dist/types/utils/unzip/noop-stream.d.ts +13 -0
  53. package/dist/types/utils/unzip/parse-buffer.d.ts +24 -0
  54. package/dist/types/utils/unzip/parse-datetime.d.ts +12 -0
  55. package/dist/types/utils/unzip/parse-extra-field.d.ts +18 -0
  56. package/dist/types/utils/unzip/parse.d.ts +70 -0
  57. package/dist/types/utils/unzip/pull-stream.d.ts +24 -0
  58. package/dist/types/utils/utils.d.ts +5 -2
  59. package/dist/types/utils/zip-stream.d.ts +5 -1
  60. package/package.json +35 -32
  61. package/dist/cjs/utils/browser-buffer-decode.js +0 -13
  62. package/dist/cjs/utils/browser-buffer-encode.js +0 -13
  63. package/dist/cjs/utils/browser.js +0 -6
  64. package/dist/esm/utils/browser-buffer-decode.js +0 -11
  65. package/dist/esm/utils/browser-buffer-encode.js +0 -11
  66. package/dist/esm/utils/browser.js +0 -3
  67. package/dist/types/utils/browser-buffer-decode.d.ts +0 -2
  68. package/dist/types/utils/browser-buffer-encode.d.ts +0 -2
  69. package/dist/types/utils/browser.d.ts +0 -1
@@ -202,14 +202,24 @@ exports.StreamBuf = StreamBuf;
202
202
  if (data instanceof string_buf_js_1.StringBuf || (data && data.constructor?.name === "StringBuf")) {
203
203
  chunk = new StringBufChunk(data);
204
204
  }
205
- else if (data instanceof Buffer) {
205
+ else if (Buffer.isBuffer(data)) {
206
+ // Use Buffer.isBuffer() instead of instanceof for cross-realm compatibility
207
+ // (e.g., Web Workers where Buffer polyfill instances may differ)
206
208
  chunk = new BufferChunk(data);
207
209
  }
208
- else if (typeof data === "string" || data instanceof String || data instanceof ArrayBuffer) {
210
+ else if (ArrayBuffer.isView(data)) {
211
+ // Handle typed arrays (Uint8Array, Int8Array, etc.) - cross-realm safe
212
+ chunk = new BufferChunk(Buffer.from(data.buffer, data.byteOffset, data.byteLength));
213
+ }
214
+ else if (data instanceof ArrayBuffer) {
215
+ // Handle ArrayBuffer - convert to Buffer
216
+ chunk = new BufferChunk(Buffer.from(data));
217
+ }
218
+ else if (typeof data === "string" || data instanceof String) {
209
219
  chunk = new StringChunk(String(data), encoding);
210
220
  }
211
221
  else {
212
- throw new Error("Chunk must be one of type String, Buffer or StringBuf.");
222
+ throw new Error("Chunk must be one of type String, Buffer, Uint8Array, ArrayBuffer or StringBuf.");
213
223
  }
214
224
  // now, do something with the chunk
215
225
  if (this.pipes.length) {
@@ -225,7 +235,8 @@ exports.StreamBuf = StreamBuf;
225
235
  }
226
236
  else {
227
237
  this._writeToBuffers(chunk);
228
- process.nextTick(callback);
238
+ // Use queueMicrotask for cross-platform compatibility (ES2020+)
239
+ queueMicrotask(() => callback());
229
240
  }
230
241
  }
231
242
  else {
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper buffer-stream module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.bufferStream = bufferStream;
11
+ const stream_1 = require("stream");
12
+ function bufferStream(entry) {
13
+ return new Promise((resolve, reject) => {
14
+ const chunks = [];
15
+ const stream = new stream_1.Transform({
16
+ transform(d, _encoding, cb) {
17
+ chunks.push(d);
18
+ cb();
19
+ }
20
+ });
21
+ stream.on("finish", () => {
22
+ resolve(Buffer.concat(chunks));
23
+ });
24
+ stream.on("error", reject);
25
+ entry.on("error", reject).pipe(stream);
26
+ });
27
+ }
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ /**
3
+ * Unzip utilities for parsing ZIP archives
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ */
7
+ Object.defineProperty(exports, "__esModule", { value: true });
8
+ exports.parseExtraField = exports.parseDateTime = exports.parseBuffer = exports.bufferStream = exports.NoopStream = exports.PullStream = exports.createParse = exports.Parse = void 0;
9
+ var parse_js_1 = require("./parse");
10
+ Object.defineProperty(exports, "Parse", { enumerable: true, get: function () { return parse_js_1.Parse; } });
11
+ Object.defineProperty(exports, "createParse", { enumerable: true, get: function () { return parse_js_1.createParse; } });
12
+ var pull_stream_js_1 = require("./pull-stream");
13
+ Object.defineProperty(exports, "PullStream", { enumerable: true, get: function () { return pull_stream_js_1.PullStream; } });
14
+ var noop_stream_js_1 = require("./noop-stream");
15
+ Object.defineProperty(exports, "NoopStream", { enumerable: true, get: function () { return noop_stream_js_1.NoopStream; } });
16
+ var buffer_stream_js_1 = require("./buffer-stream");
17
+ Object.defineProperty(exports, "bufferStream", { enumerable: true, get: function () { return buffer_stream_js_1.bufferStream; } });
18
+ var parse_buffer_js_1 = require("./parse-buffer");
19
+ Object.defineProperty(exports, "parseBuffer", { enumerable: true, get: function () { return parse_buffer_js_1.parse; } });
20
+ var parse_datetime_js_1 = require("./parse-datetime");
21
+ Object.defineProperty(exports, "parseDateTime", { enumerable: true, get: function () { return parse_datetime_js_1.parseDateTime; } });
22
+ var parse_extra_field_js_1 = require("./parse-extra-field");
23
+ Object.defineProperty(exports, "parseExtraField", { enumerable: true, get: function () { return parse_extra_field_js_1.parseExtraField; } });
@@ -0,0 +1,20 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper noop-stream module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.NoopStream = void 0;
11
+ const stream_1 = require("stream");
12
+ class NoopStream extends stream_1.Transform {
13
+ constructor() {
14
+ super();
15
+ }
16
+ _transform(_chunk, _encoding, cb) {
17
+ cb();
18
+ }
19
+ }
20
+ exports.NoopStream = NoopStream;
@@ -0,0 +1,60 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper parse-buffer module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.parse = parse;
11
+ function parseUIntLE(buffer, offset, size) {
12
+ let result;
13
+ switch (size) {
14
+ case 1:
15
+ result = buffer.readUInt8(offset);
16
+ break;
17
+ case 2:
18
+ result = buffer.readUInt16LE(offset);
19
+ break;
20
+ case 4:
21
+ result = buffer.readUInt32LE(offset);
22
+ break;
23
+ case 8:
24
+ result = Number(buffer.readBigUInt64LE(offset));
25
+ break;
26
+ default:
27
+ throw new Error("Unsupported UInt LE size!");
28
+ }
29
+ return result;
30
+ }
31
+ /**
32
+ * Parses sequential unsigned little endian numbers from the head of the passed buffer according to
33
+ * the specified format passed. If the buffer is not large enough to satisfy the full format,
34
+ * null values will be assigned to the remaining keys.
35
+ * @param buffer The buffer to sequentially extract numbers from.
36
+ * @param format Expected format to follow when extracting values from the buffer. A list of list entries
37
+ * with the following structure:
38
+ * [
39
+ * [
40
+ * <key>, // Name of the key to assign the extracted number to.
41
+ * <size> // The size in bytes of the number to extract. possible values are 1, 2, 4, 8.
42
+ * ],
43
+ * ...
44
+ * ]
45
+ * @returns An object with keys set to their associated extracted values.
46
+ */
47
+ function parse(buffer, format) {
48
+ const result = {};
49
+ let offset = 0;
50
+ for (const [key, size] of format) {
51
+ if (buffer.length >= offset + size) {
52
+ result[key] = parseUIntLE(buffer, offset, size);
53
+ }
54
+ else {
55
+ result[key] = null;
56
+ }
57
+ offset += size;
58
+ }
59
+ return result;
60
+ }
@@ -0,0 +1,23 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper parse-datetime module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.parseDateTime = parseDateTime;
11
+ /**
12
+ * Dates in zip file entries are stored as DosDateTime
13
+ * Spec is here: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-dosdatetimetofiletime
14
+ */
15
+ function parseDateTime(date, time) {
16
+ const day = date & 0x1f;
17
+ const month = (date >> 5) & 0x0f;
18
+ const year = ((date >> 9) & 0x7f) + 1980;
19
+ const seconds = time ? (time & 0x1f) * 2 : 0;
20
+ const minutes = time ? (time >> 5) & 0x3f : 0;
21
+ const hours = time ? time >> 11 : 0;
22
+ return new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds));
23
+ }
@@ -0,0 +1,52 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper parse-extra-field module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ Object.defineProperty(exports, "__esModule", { value: true });
10
+ exports.parseExtraField = parseExtraField;
11
+ const parse_buffer_js_1 = require("./parse-buffer");
12
+ function parseExtraField(extraField, vars) {
13
+ let extra;
14
+ // Find the ZIP64 header, if present.
15
+ while (!extra && extraField && extraField.length) {
16
+ const candidateExtra = (0, parse_buffer_js_1.parse)(extraField, [
17
+ ["signature", 2],
18
+ ["partSize", 2]
19
+ ]);
20
+ if (candidateExtra.signature === 0x0001) {
21
+ // parse buffer based on data in ZIP64 central directory; order is important!
22
+ const fieldsToExpect = [];
23
+ if (vars.uncompressedSize === 0xffffffff) {
24
+ fieldsToExpect.push(["uncompressedSize", 8]);
25
+ }
26
+ if (vars.compressedSize === 0xffffffff) {
27
+ fieldsToExpect.push(["compressedSize", 8]);
28
+ }
29
+ if (vars.offsetToLocalFileHeader === 0xffffffff) {
30
+ fieldsToExpect.push(["offsetToLocalFileHeader", 8]);
31
+ }
32
+ // slice off the 4 bytes for signature and partSize
33
+ extra = (0, parse_buffer_js_1.parse)(extraField.slice(4), fieldsToExpect);
34
+ }
35
+ else {
36
+ // Advance the buffer to the next part.
37
+ // The total size of this part is the 4 byte header + partsize.
38
+ extraField = extraField.slice((candidateExtra.partSize || 0) + 4);
39
+ }
40
+ }
41
+ extra = extra || {};
42
+ if (vars.compressedSize === 0xffffffff) {
43
+ vars.compressedSize = extra.compressedSize;
44
+ }
45
+ if (vars.uncompressedSize === 0xffffffff) {
46
+ vars.uncompressedSize = extra.uncompressedSize;
47
+ }
48
+ if (vars.offsetToLocalFileHeader === 0xffffffff) {
49
+ vars.offsetToLocalFileHeader = extra.offsetToLocalFileHeader;
50
+ }
51
+ return extra;
52
+ }
@@ -0,0 +1,340 @@
1
+ "use strict";
2
+ /**
3
+ * Unzipper parse module
4
+ * Original source: https://github.com/ZJONSSON/node-unzipper
5
+ * License: MIT
6
+ * Copyright (c) 2012 - 2013 Near Infinity Corporation
7
+ * Commits in this fork are (c) Ziggy Jonsson (ziggy.jonsson.nyc@gmail.com)
8
+ */
9
+ var __importDefault = (this && this.__importDefault) || function (mod) {
10
+ return (mod && mod.__esModule) ? mod : { "default": mod };
11
+ };
12
+ Object.defineProperty(exports, "__esModule", { value: true });
13
+ exports.Parse = void 0;
14
+ exports.createParse = createParse;
15
+ const zlib_1 = __importDefault(require("zlib"));
16
+ const stream_1 = require("stream");
17
+ const pull_stream_js_1 = require("./pull-stream");
18
+ const noop_stream_js_1 = require("./noop-stream");
19
+ const buffer_stream_js_1 = require("./buffer-stream");
20
+ const parse_extra_field_js_1 = require("./parse-extra-field");
21
+ const parse_datetime_js_1 = require("./parse-datetime");
22
+ const parse_buffer_js_1 = require("./parse-buffer");
23
+ // Check if native zlib is available (Node.js environment)
24
+ // In browser with polyfill, createInflateRaw may not exist or may not work properly
25
+ const hasNativeZlib = typeof zlib_1.default?.createInflateRaw === "function" &&
26
+ typeof process !== "undefined" &&
27
+ process.versions?.node;
28
+ /**
29
+ * A Transform stream that wraps browser's native DecompressionStream.
30
+ * Used when native zlib is not available (browser environment).
31
+ */
32
+ class BrowserInflateRawStream extends stream_1.Transform {
33
+ constructor() {
34
+ super();
35
+ this.chunks = [];
36
+ this.totalLength = 0;
37
+ }
38
+ _transform(chunk, _encoding, callback) {
39
+ // Avoid unnecessary copy - Buffer extends Uint8Array
40
+ this.chunks.push(chunk);
41
+ this.totalLength += chunk.length;
42
+ callback();
43
+ }
44
+ _flush(callback) {
45
+ try {
46
+ // Use pre-calculated totalLength for better performance
47
+ const combined = new Uint8Array(this.totalLength);
48
+ let offset = 0;
49
+ for (const chunk of this.chunks) {
50
+ combined.set(chunk, offset);
51
+ offset += chunk.length;
52
+ }
53
+ // Clear chunks to free memory
54
+ this.chunks = [];
55
+ // Use native DecompressionStream
56
+ const ds = new DecompressionStream("deflate-raw");
57
+ const writer = ds.writable.getWriter();
58
+ const reader = ds.readable.getReader();
59
+ // Optimized read loop - collect chunks and concatenate at the end
60
+ const readAll = async () => {
61
+ const results = [];
62
+ let total = 0;
63
+ while (true) {
64
+ const { done, value } = await reader.read();
65
+ if (done) {
66
+ break;
67
+ }
68
+ results.push(value);
69
+ total += value.length;
70
+ }
71
+ // Single allocation for final result
72
+ const result = Buffer.allocUnsafe(total);
73
+ let off = 0;
74
+ for (const r of results) {
75
+ result.set(r, off);
76
+ off += r.length;
77
+ }
78
+ return result;
79
+ };
80
+ writer.write(combined);
81
+ writer.close();
82
+ readAll()
83
+ .then(decompressed => {
84
+ this.push(decompressed);
85
+ callback();
86
+ })
87
+ .catch(callback);
88
+ }
89
+ catch (err) {
90
+ callback(err);
91
+ }
92
+ }
93
+ }
94
+ /**
95
+ * Creates an InflateRaw stream.
96
+ * Uses native zlib in Node.js for best performance, falls back to DecompressionStream in browser.
97
+ */
98
+ function createInflateRaw() {
99
+ if (hasNativeZlib) {
100
+ return zlib_1.default.createInflateRaw();
101
+ }
102
+ return new BrowserInflateRawStream();
103
+ }
104
+ const endDirectorySignature = Buffer.alloc(4);
105
+ endDirectorySignature.writeUInt32LE(0x06054b50, 0);
106
+ class Parse extends pull_stream_js_1.PullStream {
107
+ constructor(opts = {}) {
108
+ super();
109
+ this._opts = opts;
110
+ this.on("finish", () => {
111
+ this.emit("end");
112
+ this.emit("close");
113
+ });
114
+ this._readRecord().catch((e) => {
115
+ if (!this.__emittedError || this.__emittedError !== e) {
116
+ this.emit("error", e);
117
+ }
118
+ });
119
+ }
120
+ async _readRecord() {
121
+ const data = await this.pull(4);
122
+ if (data.length === 0) {
123
+ return;
124
+ }
125
+ const signature = data.readUInt32LE(0);
126
+ if (signature === 0x34327243) {
127
+ const shouldLoop = await this._readCrxHeader();
128
+ if (shouldLoop) {
129
+ return this._readRecord();
130
+ }
131
+ return;
132
+ }
133
+ if (signature === 0x04034b50) {
134
+ const shouldLoop = await this._readFile();
135
+ if (shouldLoop) {
136
+ return this._readRecord();
137
+ }
138
+ return;
139
+ }
140
+ else if (signature === 0x02014b50) {
141
+ this.reachedCD = true;
142
+ const shouldLoop = await this._readCentralDirectoryFileHeader();
143
+ if (shouldLoop) {
144
+ return this._readRecord();
145
+ }
146
+ return;
147
+ }
148
+ else if (signature === 0x06054b50) {
149
+ await this._readEndOfCentralDirectoryRecord();
150
+ return;
151
+ }
152
+ else if (this.reachedCD) {
153
+ // _readEndOfCentralDirectoryRecord expects the EOCD
154
+ // signature to be consumed so set includeEof=true
155
+ const includeEof = true;
156
+ await this.pull(endDirectorySignature, includeEof);
157
+ await this._readEndOfCentralDirectoryRecord();
158
+ return;
159
+ }
160
+ else {
161
+ this.emit("error", new Error("invalid signature: 0x" + signature.toString(16)));
162
+ }
163
+ }
164
+ async _readCrxHeader() {
165
+ const data = await this.pull(12);
166
+ this.crxHeader = (0, parse_buffer_js_1.parse)(data, [
167
+ ["version", 4],
168
+ ["pubKeyLength", 4],
169
+ ["signatureLength", 4]
170
+ ]);
171
+ const keyAndSig = await this.pull((this.crxHeader.pubKeyLength || 0) + (this.crxHeader.signatureLength || 0));
172
+ this.crxHeader.publicKey = keyAndSig.slice(0, this.crxHeader.pubKeyLength || 0);
173
+ this.crxHeader.signature = keyAndSig.slice(this.crxHeader.pubKeyLength || 0);
174
+ this.emit("crx-header", this.crxHeader);
175
+ return true;
176
+ }
177
+ async _readFile() {
178
+ const data = await this.pull(26);
179
+ const vars = (0, parse_buffer_js_1.parse)(data, [
180
+ ["versionsNeededToExtract", 2],
181
+ ["flags", 2],
182
+ ["compressionMethod", 2],
183
+ ["lastModifiedTime", 2],
184
+ ["lastModifiedDate", 2],
185
+ ["crc32", 4],
186
+ ["compressedSize", 4],
187
+ ["uncompressedSize", 4],
188
+ ["fileNameLength", 2],
189
+ ["extraFieldLength", 2]
190
+ ]);
191
+ vars.lastModifiedDateTime = (0, parse_datetime_js_1.parseDateTime)(vars.lastModifiedDate || 0, vars.lastModifiedTime || 0);
192
+ if (this.crxHeader) {
193
+ vars.crxHeader = this.crxHeader;
194
+ }
195
+ const fileNameBuffer = await this.pull(vars.fileNameLength || 0);
196
+ const fileName = fileNameBuffer.toString("utf8");
197
+ const entry = new stream_1.PassThrough();
198
+ let __autodraining = false;
199
+ entry.autodrain = function () {
200
+ __autodraining = true;
201
+ const draining = entry.pipe(new noop_stream_js_1.NoopStream());
202
+ draining.promise = function () {
203
+ return new Promise((resolve, reject) => {
204
+ draining.on("finish", resolve);
205
+ draining.on("error", reject);
206
+ });
207
+ };
208
+ return draining;
209
+ };
210
+ entry.buffer = function () {
211
+ return (0, buffer_stream_js_1.bufferStream)(entry);
212
+ };
213
+ entry.path = fileName;
214
+ entry.props = {
215
+ path: fileName,
216
+ pathBuffer: fileNameBuffer,
217
+ flags: {
218
+ isUnicode: ((vars.flags || 0) & 0x800) !== 0
219
+ }
220
+ };
221
+ entry.type = vars.uncompressedSize === 0 && /[/\\]$/.test(fileName) ? "Directory" : "File";
222
+ if (this._opts.verbose) {
223
+ if (entry.type === "Directory") {
224
+ console.log(" creating:", fileName);
225
+ }
226
+ else if (entry.type === "File") {
227
+ if (vars.compressionMethod === 0) {
228
+ console.log(" extracting:", fileName);
229
+ }
230
+ else {
231
+ console.log(" inflating:", fileName);
232
+ }
233
+ }
234
+ }
235
+ const extraFieldData = await this.pull(vars.extraFieldLength || 0);
236
+ const extra = (0, parse_extra_field_js_1.parseExtraField)(extraFieldData, vars);
237
+ entry.vars = vars;
238
+ entry.extra = extra;
239
+ if (this._opts.forceStream) {
240
+ this.push(entry);
241
+ }
242
+ else {
243
+ this.emit("entry", entry);
244
+ const state = this._readableState;
245
+ if (state.pipesCount || (state.pipes && state.pipes.length)) {
246
+ this.push(entry);
247
+ }
248
+ }
249
+ if (this._opts.verbose) {
250
+ console.log({
251
+ filename: fileName,
252
+ vars: vars,
253
+ extra: extra
254
+ });
255
+ }
256
+ const fileSizeKnown = !((vars.flags || 0) & 0x08) || vars.compressedSize > 0;
257
+ let eof;
258
+ entry.__autodraining = __autodraining; // expose __autodraining for test purposes
259
+ const inflater = vars.compressionMethod && !__autodraining ? createInflateRaw() : new stream_1.PassThrough();
260
+ if (fileSizeKnown) {
261
+ entry.size = vars.uncompressedSize;
262
+ eof = vars.compressedSize;
263
+ }
264
+ else {
265
+ eof = Buffer.alloc(4);
266
+ eof.writeUInt32LE(0x08074b50, 0);
267
+ }
268
+ return new Promise((resolve, reject) => {
269
+ (0, stream_1.pipeline)(this.stream(eof), inflater, entry, err => {
270
+ if (err) {
271
+ return reject(err);
272
+ }
273
+ return fileSizeKnown
274
+ ? resolve(fileSizeKnown)
275
+ : this._processDataDescriptor(entry).then(resolve).catch(reject);
276
+ });
277
+ });
278
+ }
279
+ async _processDataDescriptor(entry) {
280
+ const data = await this.pull(16);
281
+ const vars = (0, parse_buffer_js_1.parse)(data, [
282
+ ["dataDescriptorSignature", 4],
283
+ ["crc32", 4],
284
+ ["compressedSize", 4],
285
+ ["uncompressedSize", 4]
286
+ ]);
287
+ entry.size = vars.uncompressedSize || 0;
288
+ return true;
289
+ }
290
+ async _readCentralDirectoryFileHeader() {
291
+ const data = await this.pull(42);
292
+ const vars = (0, parse_buffer_js_1.parse)(data, [
293
+ ["versionMadeBy", 2],
294
+ ["versionsNeededToExtract", 2],
295
+ ["flags", 2],
296
+ ["compressionMethod", 2],
297
+ ["lastModifiedTime", 2],
298
+ ["lastModifiedDate", 2],
299
+ ["crc32", 4],
300
+ ["compressedSize", 4],
301
+ ["uncompressedSize", 4],
302
+ ["fileNameLength", 2],
303
+ ["extraFieldLength", 2],
304
+ ["fileCommentLength", 2],
305
+ ["diskNumber", 2],
306
+ ["internalFileAttributes", 2],
307
+ ["externalFileAttributes", 4],
308
+ ["offsetToLocalFileHeader", 4]
309
+ ]);
310
+ await this.pull(vars.fileNameLength || 0);
311
+ await this.pull(vars.extraFieldLength || 0);
312
+ await this.pull(vars.fileCommentLength || 0);
313
+ return true;
314
+ }
315
+ async _readEndOfCentralDirectoryRecord() {
316
+ const data = await this.pull(18);
317
+ const vars = (0, parse_buffer_js_1.parse)(data, [
318
+ ["diskNumber", 2],
319
+ ["diskStart", 2],
320
+ ["numberOfRecordsOnDisk", 2],
321
+ ["numberOfRecords", 2],
322
+ ["sizeOfCentralDirectory", 4],
323
+ ["offsetToStartOfCentralDirectory", 4],
324
+ ["commentLength", 2]
325
+ ]);
326
+ await this.pull(vars.commentLength || 0);
327
+ this.end();
328
+ this.push(null);
329
+ }
330
+ promise() {
331
+ return new Promise((resolve, reject) => {
332
+ this.on("finish", resolve);
333
+ this.on("error", reject);
334
+ });
335
+ }
336
+ }
337
+ exports.Parse = Parse;
338
+ function createParse(opts) {
339
+ return new Parse(opts);
340
+ }