@cj-tech-master/excelts 1.6.3-canary.20251224193747.e89b618 → 1.6.3-canary.20251226035947.ef0b4f2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. package/README.md +5 -4
  2. package/README_zh.md +5 -4
  3. package/dist/browser/excelts.esm.js +2447 -3094
  4. package/dist/browser/excelts.esm.js.map +1 -1
  5. package/dist/browser/excelts.esm.min.js +23 -25
  6. package/dist/browser/excelts.iife.js +2448 -3095
  7. package/dist/browser/excelts.iife.js.map +1 -1
  8. package/dist/browser/excelts.iife.min.js +23 -25
  9. package/dist/cjs/csv/csv-core.js +2 -2
  10. package/dist/cjs/csv/csv-stream.js +1 -1
  11. package/dist/cjs/csv/csv.base.js +10 -27
  12. package/dist/cjs/csv/csv.js +4 -12
  13. package/dist/cjs/doc/column.js +21 -0
  14. package/dist/cjs/doc/worksheet.js +4 -0
  15. package/dist/cjs/stream/xlsx/workbook-writer.js +4 -4
  16. package/dist/cjs/stream/xlsx/worksheet-writer.js +8 -1
  17. package/dist/cjs/utils/datetime.js +648 -0
  18. package/dist/cjs/utils/parse-sax.js +1190 -12
  19. package/dist/cjs/utils/unzip/zip-parser.js +11 -0
  20. package/dist/cjs/utils/zip/compress.base.js +3 -0
  21. package/dist/cjs/utils/zip/compress.browser.js +74 -30
  22. package/dist/cjs/utils/zip/deflate-fallback.js +575 -0
  23. package/dist/cjs/utils/zip/streaming-zip.js +264 -0
  24. package/dist/cjs/xlsx/xform/sheet/col-breaks-xform.js +38 -0
  25. package/dist/cjs/xlsx/xform/sheet/page-breaks-xform.js +13 -1
  26. package/dist/cjs/xlsx/xform/sheet/row-breaks-xform.js +11 -13
  27. package/dist/cjs/xlsx/xform/sheet/worksheet-xform.js +7 -2
  28. package/dist/cjs/xlsx/xlsx.browser.js +10 -53
  29. package/dist/cjs/xlsx/xlsx.js +27 -59
  30. package/dist/esm/csv/csv-core.js +2 -2
  31. package/dist/esm/csv/csv-stream.js +1 -1
  32. package/dist/esm/csv/csv.base.js +10 -24
  33. package/dist/esm/csv/csv.js +4 -12
  34. package/dist/esm/doc/column.js +21 -0
  35. package/dist/esm/doc/worksheet.js +4 -0
  36. package/dist/esm/stream/xlsx/workbook-writer.js +1 -1
  37. package/dist/esm/stream/xlsx/worksheet-writer.js +8 -1
  38. package/dist/esm/utils/datetime.js +639 -0
  39. package/dist/esm/utils/parse-sax.js +1188 -12
  40. package/dist/esm/utils/unzip/zip-parser.js +11 -0
  41. package/dist/esm/utils/zip/compress.base.js +3 -0
  42. package/dist/esm/utils/zip/compress.browser.js +76 -31
  43. package/dist/esm/utils/zip/deflate-fallback.js +570 -0
  44. package/dist/esm/utils/zip/streaming-zip.js +259 -0
  45. package/dist/esm/xlsx/xform/sheet/col-breaks-xform.js +35 -0
  46. package/dist/esm/xlsx/xform/sheet/page-breaks-xform.js +13 -1
  47. package/dist/esm/xlsx/xform/sheet/row-breaks-xform.js +11 -13
  48. package/dist/esm/xlsx/xform/sheet/worksheet-xform.js +7 -2
  49. package/dist/esm/xlsx/xlsx.browser.js +10 -53
  50. package/dist/esm/xlsx/xlsx.js +27 -59
  51. package/dist/types/csv/csv-core.d.ts +6 -6
  52. package/dist/types/csv/csv.base.d.ts +4 -3
  53. package/dist/types/doc/column.d.ts +6 -0
  54. package/dist/types/doc/worksheet.d.ts +3 -1
  55. package/dist/types/stream/xlsx/workbook-writer.d.ts +1 -1
  56. package/dist/types/stream/xlsx/worksheet-writer.d.ts +3 -1
  57. package/dist/types/types.d.ts +6 -0
  58. package/dist/types/utils/datetime.d.ts +85 -0
  59. package/dist/types/utils/parse-sax.d.ts +108 -1
  60. package/dist/types/utils/unzip/zip-parser.d.ts +5 -0
  61. package/dist/types/utils/zip/compress.base.d.ts +3 -0
  62. package/dist/types/utils/zip/compress.browser.d.ts +27 -18
  63. package/dist/types/utils/zip/deflate-fallback.d.ts +39 -0
  64. package/dist/types/utils/zip/streaming-zip.d.ts +96 -0
  65. package/dist/types/xlsx/xform/sheet/col-breaks-xform.d.ts +16 -0
  66. package/dist/types/xlsx/xform/sheet/page-breaks-xform.d.ts +4 -0
  67. package/dist/types/xlsx/xform/sheet/row-breaks-xform.d.ts +4 -0
  68. package/package.json +7 -28
@@ -0,0 +1,264 @@
1
+ "use strict";
2
+ /**
3
+ * Streaming ZIP creator - fflate-compatible API
4
+ *
5
+ * This module provides a streaming ZIP API compatible with fflate's Zip/ZipDeflate,
6
+ * but uses native zlib compression for better performance.
7
+ *
8
+ * Usage:
9
+ * ```ts
10
+ * const zip = new StreamingZip((err, data, final) => {
11
+ * if (err) handleError(err);
12
+ * else {
13
+ * stream.write(data);
14
+ * if (final) stream.end();
15
+ * }
16
+ * });
17
+ *
18
+ * const file = new ZipDeflateFile("path/file.txt", { level: 6 });
19
+ * zip.add(file);
20
+ * file.push(data1);
21
+ * file.push(data2, true); // true = final chunk
22
+ *
23
+ * zip.end();
24
+ * ```
25
+ */
26
+ Object.defineProperty(exports, "__esModule", { value: true });
27
+ exports.ZipDeflate = exports.Zip = exports.StreamingZip = exports.ZipDeflateFile = void 0;
28
+ const crc32_1 = require("./crc32");
29
+ const compress_1 = require("./compress");
30
+ // ZIP signature constants
31
+ const LOCAL_FILE_HEADER_SIG = 0x04034b50;
32
+ const CENTRAL_DIR_HEADER_SIG = 0x02014b50;
33
+ const END_OF_CENTRAL_DIR_SIG = 0x06054b50;
34
+ // ZIP version constants
35
+ const VERSION_NEEDED = 20; // 2.0 - supports DEFLATE
36
+ const VERSION_MADE_BY = 20; // 2.0
37
+ // Compression methods
38
+ const COMPRESSION_STORE = 0;
39
+ const COMPRESSION_DEFLATE = 8;
40
+ const encoder = new TextEncoder();
41
+ /**
42
+ * Convert Date to DOS time format
43
+ */
44
+ function dateToDos(date) {
45
+ const dosTime = ((date.getHours() & 0x1f) << 11) |
46
+ ((date.getMinutes() & 0x3f) << 5) |
47
+ ((date.getSeconds() >> 1) & 0x1f);
48
+ const dosDate = (((date.getFullYear() - 1980) & 0x7f) << 9) |
49
+ (((date.getMonth() + 1) & 0x0f) << 5) |
50
+ (date.getDate() & 0x1f);
51
+ return [dosTime, dosDate];
52
+ }
53
+ /**
54
+ * ZipDeflate-compatible file stream
55
+ * Collects data chunks, compresses on finalization
56
+ */
57
+ class ZipDeflateFile {
58
+ constructor(name, options) {
59
+ this.chunks = [];
60
+ this.totalSize = 0;
61
+ this.finalized = false;
62
+ this._ondata = null;
63
+ this.name = name;
64
+ this.level = options?.level ?? 6;
65
+ }
66
+ /**
67
+ * Set data callback (called by StreamingZip)
68
+ */
69
+ set ondata(cb) {
70
+ this._ondata = cb;
71
+ }
72
+ /**
73
+ * Push data to the file
74
+ * @param data - Data chunk
75
+ * @param final - Whether this is the final chunk
76
+ */
77
+ push(data, final = false) {
78
+ if (this.finalized) {
79
+ throw new Error("Cannot push to finalized ZipDeflateFile");
80
+ }
81
+ if (data.length > 0) {
82
+ this.chunks.push(data);
83
+ this.totalSize += data.length;
84
+ }
85
+ if (final) {
86
+ this.finalized = true;
87
+ this._flush();
88
+ }
89
+ }
90
+ /**
91
+ * Flush collected data through compression and emit
92
+ */
93
+ _flush() {
94
+ if (!this._ondata) {
95
+ return;
96
+ }
97
+ // Combine chunks
98
+ let uncompressed;
99
+ if (this.chunks.length === 0) {
100
+ uncompressed = new Uint8Array(0);
101
+ }
102
+ else if (this.chunks.length === 1) {
103
+ uncompressed = this.chunks[0];
104
+ }
105
+ else {
106
+ uncompressed = new Uint8Array(this.totalSize);
107
+ let offset = 0;
108
+ for (const chunk of this.chunks) {
109
+ uncompressed.set(chunk, offset);
110
+ offset += chunk.length;
111
+ }
112
+ }
113
+ // Compress if level > 0 and data is not empty
114
+ const shouldCompress = this.level > 0 && uncompressed.length > 0;
115
+ const compressed = shouldCompress
116
+ ? (0, compress_1.compressSync)(uncompressed, { level: this.level })
117
+ : uncompressed;
118
+ // Build local file header + data
119
+ const nameBytes = encoder.encode(this.name);
120
+ const crcValue = (0, crc32_1.crc32)(uncompressed);
121
+ const [dosTime, dosDate] = dateToDos(new Date());
122
+ // Local file header (30 bytes + filename)
123
+ const header = new Uint8Array(30 + nameBytes.length);
124
+ const view = new DataView(header.buffer);
125
+ view.setUint32(0, LOCAL_FILE_HEADER_SIG, true);
126
+ view.setUint16(4, VERSION_NEEDED, true);
127
+ view.setUint16(6, 0x0800, true); // UTF-8 flag
128
+ view.setUint16(8, shouldCompress ? COMPRESSION_DEFLATE : COMPRESSION_STORE, true);
129
+ view.setUint16(10, dosTime, true);
130
+ view.setUint16(12, dosDate, true);
131
+ view.setUint32(14, crcValue, true);
132
+ view.setUint32(18, compressed.length, true);
133
+ view.setUint32(22, uncompressed.length, true);
134
+ view.setUint16(26, nameBytes.length, true);
135
+ view.setUint16(28, 0, true); // Extra field length
136
+ header.set(nameBytes, 30);
137
+ // Store info for central directory BEFORE emitting data
138
+ // (StreamingZip reads this in the final callback)
139
+ this._entryInfo = {
140
+ name: nameBytes,
141
+ crc: crcValue,
142
+ compressedSize: compressed.length,
143
+ uncompressedSize: uncompressed.length,
144
+ compressionMethod: shouldCompress ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
145
+ dosTime,
146
+ dosDate,
147
+ offset: -1 // Will be set by StreamingZip
148
+ };
149
+ // Emit header
150
+ this._ondata(header, false);
151
+ // Emit compressed data (final chunk)
152
+ this._ondata(compressed, true);
153
+ // Clear chunks for GC
154
+ this.chunks.length = 0;
155
+ }
156
+ /**
157
+ * Get entry info (called by StreamingZip after finalization)
158
+ */
159
+ getEntryInfo() {
160
+ return this._entryInfo || null;
161
+ }
162
+ }
163
+ exports.ZipDeflateFile = ZipDeflateFile;
164
+ exports.ZipDeflate = ZipDeflateFile;
165
+ /**
166
+ * Streaming ZIP - fflate Zip-compatible API
167
+ * Creates ZIP files in a streaming manner
168
+ */
169
+ class StreamingZip {
170
+ /**
171
+ * Create a streaming ZIP
172
+ * @param callback - Called with (err, data, final) as data becomes available
173
+ */
174
+ constructor(callback) {
175
+ this.entries = [];
176
+ this.currentOffset = 0;
177
+ this.ended = false;
178
+ this.callback = callback;
179
+ }
180
+ /**
181
+ * Add a file to the ZIP
182
+ * @param file - ZipDeflateFile instance
183
+ */
184
+ add(file) {
185
+ if (this.ended) {
186
+ throw new Error("Cannot add files after calling end()");
187
+ }
188
+ // Capture offset when first data is written, not when add() is called
189
+ // This is important because streaming files may have data pushed later
190
+ let startOffset = -1;
191
+ file.ondata = (data, final) => {
192
+ // Capture offset on first data chunk
193
+ if (startOffset === -1) {
194
+ startOffset = this.currentOffset;
195
+ }
196
+ this.currentOffset += data.length;
197
+ this.callback(null, data, false);
198
+ if (final) {
199
+ // Get entry info and set offset
200
+ const entryInfo = file.getEntryInfo();
201
+ if (entryInfo) {
202
+ entryInfo.offset = startOffset;
203
+ this.entries.push(entryInfo);
204
+ }
205
+ }
206
+ };
207
+ }
208
+ /**
209
+ * Finalize the ZIP
210
+ * Writes central directory and end-of-central-directory record
211
+ */
212
+ end() {
213
+ if (this.ended) {
214
+ return;
215
+ }
216
+ this.ended = true;
217
+ const centralDirOffset = this.currentOffset;
218
+ const centralDirChunks = [];
219
+ // Build central directory headers
220
+ for (const entry of this.entries) {
221
+ const header = new Uint8Array(46 + entry.name.length);
222
+ const view = new DataView(header.buffer);
223
+ view.setUint32(0, CENTRAL_DIR_HEADER_SIG, true);
224
+ view.setUint16(4, VERSION_MADE_BY, true);
225
+ view.setUint16(6, VERSION_NEEDED, true);
226
+ view.setUint16(8, 0x0800, true); // UTF-8 flag
227
+ view.setUint16(10, entry.compressionMethod, true);
228
+ view.setUint16(12, entry.dosTime, true);
229
+ view.setUint16(14, entry.dosDate, true);
230
+ view.setUint32(16, entry.crc, true);
231
+ view.setUint32(20, entry.compressedSize, true);
232
+ view.setUint32(24, entry.uncompressedSize, true);
233
+ view.setUint16(28, entry.name.length, true);
234
+ view.setUint16(30, 0, true); // Extra field length
235
+ view.setUint16(32, 0, true); // Comment length
236
+ view.setUint16(34, 0, true); // Disk number start
237
+ view.setUint16(36, 0, true); // Internal file attributes
238
+ view.setUint32(38, 0, true); // External file attributes
239
+ view.setUint32(42, entry.offset, true);
240
+ header.set(entry.name, 46);
241
+ centralDirChunks.push(header);
242
+ }
243
+ // Emit central directory
244
+ for (const chunk of centralDirChunks) {
245
+ this.callback(null, chunk, false);
246
+ }
247
+ const centralDirSize = centralDirChunks.reduce((sum, c) => sum + c.length, 0);
248
+ // Build end of central directory
249
+ const eocd = new Uint8Array(22);
250
+ const eocdView = new DataView(eocd.buffer);
251
+ eocdView.setUint32(0, END_OF_CENTRAL_DIR_SIG, true);
252
+ eocdView.setUint16(4, 0, true); // Disk number
253
+ eocdView.setUint16(6, 0, true); // Disk with central dir
254
+ eocdView.setUint16(8, this.entries.length, true);
255
+ eocdView.setUint16(10, this.entries.length, true);
256
+ eocdView.setUint32(12, centralDirSize, true);
257
+ eocdView.setUint32(16, centralDirOffset, true);
258
+ eocdView.setUint16(20, 0, true); // Comment length
259
+ // Emit end of central directory (final chunk)
260
+ this.callback(null, eocd, true);
261
+ }
262
+ }
263
+ exports.StreamingZip = StreamingZip;
264
+ exports.Zip = StreamingZip;
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ColBreaksXform = void 0;
4
+ const page_breaks_xform_1 = require("./page-breaks-xform");
5
+ const list_xform_1 = require("../list-xform");
6
+ /**
7
+ * Xform for column page breaks (colBreaks element in worksheet XML)
8
+ * Used to define manual page breaks between columns when printing.
9
+ *
10
+ * XML structure:
11
+ * <colBreaks count="3" manualBreakCount="3">
12
+ * <brk id="3" max="1048575" man="1"/>
13
+ * <brk id="6" max="1048575" man="1"/>
14
+ * </colBreaks>
15
+ */
16
+ class ColBreaksXform extends list_xform_1.ListXform {
17
+ constructor() {
18
+ super({
19
+ tag: "colBreaks",
20
+ count: true,
21
+ childXform: new page_breaks_xform_1.PageBreaksXform()
22
+ });
23
+ }
24
+ // Override to add manualBreakCount attribute required by Excel
25
+ render(xmlStream, model) {
26
+ if (model && model.length) {
27
+ xmlStream.openNode(this.tag, this.$);
28
+ xmlStream.addAttribute(this.$count, model.length);
29
+ xmlStream.addAttribute("manualBreakCount", model.length);
30
+ const { childXform } = this;
31
+ for (const childModel of model) {
32
+ childXform.render(xmlStream, childModel);
33
+ }
34
+ xmlStream.closeNode();
35
+ }
36
+ }
37
+ }
38
+ exports.ColBreaksXform = ColBreaksXform;
@@ -2,6 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.PageBreaksXform = void 0;
4
4
  const base_xform_1 = require("../base-xform");
5
+ /**
6
+ * Xform for individual page break (brk element)
7
+ * Used by both RowBreaksXform and ColBreaksXform
8
+ */
5
9
  class PageBreaksXform extends base_xform_1.BaseXform {
6
10
  get tag() {
7
11
  return "brk";
@@ -11,7 +15,15 @@ class PageBreaksXform extends base_xform_1.BaseXform {
11
15
  }
12
16
  parseOpen(node) {
13
17
  if (node.name === "brk") {
14
- this.model = node.attributes.ref;
18
+ const { id, max, man, min } = node.attributes;
19
+ this.model = {
20
+ id: +id,
21
+ max: +max,
22
+ man: +man
23
+ };
24
+ if (min !== undefined) {
25
+ this.model.min = +min;
26
+ }
15
27
  return true;
16
28
  }
17
29
  return false;
@@ -3,32 +3,30 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.RowBreaksXform = void 0;
4
4
  const page_breaks_xform_1 = require("./page-breaks-xform");
5
5
  const list_xform_1 = require("../list-xform");
6
+ /**
7
+ * Xform for row page breaks (rowBreaks element in worksheet XML)
8
+ * Used to define manual page breaks between rows when printing.
9
+ */
6
10
  class RowBreaksXform extends list_xform_1.ListXform {
7
11
  constructor() {
8
- const options = {
12
+ super({
9
13
  tag: "rowBreaks",
10
14
  count: true,
11
15
  childXform: new page_breaks_xform_1.PageBreaksXform()
12
- };
13
- super(options);
16
+ });
14
17
  }
15
- // get tag() { return 'rowBreaks'; }
18
+ // Override to add manualBreakCount attribute required by Excel
16
19
  render(xmlStream, model) {
17
20
  if (model && model.length) {
18
21
  xmlStream.openNode(this.tag, this.$);
19
- if (this.count) {
20
- xmlStream.addAttribute(this.$count, model.length);
21
- xmlStream.addAttribute("manualBreakCount", model.length);
22
- }
22
+ xmlStream.addAttribute(this.$count, model.length);
23
+ xmlStream.addAttribute("manualBreakCount", model.length);
23
24
  const { childXform } = this;
24
- model.forEach((childModel) => {
25
+ for (const childModel of model) {
25
26
  childXform.render(xmlStream, childModel);
26
- });
27
+ }
27
28
  xmlStream.closeNode();
28
29
  }
29
- else if (this.empty) {
30
- xmlStream.leafNode(this.tag);
31
- }
32
30
  }
33
31
  }
34
32
  exports.RowBreaksXform = RowBreaksXform;
@@ -25,6 +25,7 @@ const picture_xform_1 = require("./picture-xform");
25
25
  const drawing_xform_1 = require("./drawing-xform");
26
26
  const table_part_xform_1 = require("./table-part-xform");
27
27
  const row_breaks_xform_1 = require("./row-breaks-xform");
28
+ const col_breaks_xform_1 = require("./col-breaks-xform");
28
29
  const header_footer_xform_1 = require("./header-footer-xform");
29
30
  const conditional_formattings_xform_1 = require("./cf/conditional-formattings-xform");
30
31
  const ext_lst_xform_1 = require("./ext-lst-xform");
@@ -111,6 +112,7 @@ class WorkSheetXform extends base_xform_1.BaseXform {
111
112
  childXform: new merge_cell_xform_1.MergeCellXform()
112
113
  }),
113
114
  rowBreaks: new row_breaks_xform_1.RowBreaksXform(),
115
+ colBreaks: new col_breaks_xform_1.ColBreaksXform(),
114
116
  hyperlinks: new list_xform_1.ListXform({
115
117
  tag: "hyperlinks",
116
118
  count: false,
@@ -327,7 +329,8 @@ class WorkSheetXform extends base_xform_1.BaseXform {
327
329
  this.map.pageSetup.render(xmlStream, model.pageSetup);
328
330
  this.map.headerFooter.render(xmlStream, model.headerFooter);
329
331
  this.map.rowBreaks.render(xmlStream, model.rowBreaks);
330
- this.map.drawing.render(xmlStream, model.drawing); // Note: must be after rowBreaks
332
+ this.map.colBreaks.render(xmlStream, model.colBreaks);
333
+ this.map.drawing.render(xmlStream, model.drawing); // Note: must be after rowBreaks/colBreaks
331
334
  this.map.picture.render(xmlStream, model.background); // Note: must be after drawing
332
335
  this.map.tableParts.render(xmlStream, model.tables);
333
336
  this.map.extLst.render(xmlStream, model);
@@ -402,7 +405,9 @@ class WorkSheetXform extends base_xform_1.BaseXform {
402
405
  background: this.map.picture.model,
403
406
  drawing: this.map.drawing.model,
404
407
  tables: this.map.tableParts.model,
405
- conditionalFormattings
408
+ conditionalFormattings,
409
+ rowBreaks: this.map.rowBreaks.model || [],
410
+ colBreaks: this.map.colBreaks.model || []
406
411
  };
407
412
  if (this.map.autoFilter.model) {
408
413
  this.model.autoFilter = this.map.autoFilter.model;
@@ -12,7 +12,7 @@
12
12
  */
13
13
  Object.defineProperty(exports, "__esModule", { value: true });
14
14
  exports.XLSX = void 0;
15
- const fflate_1 = require("fflate");
15
+ const zip_parser_1 = require("../utils/unzip/zip-parser");
16
16
  const zip_stream_browser_1 = require("../utils/zip-stream.browser");
17
17
  const stream_buf_browser_1 = require("../utils/stream-buf.browser");
18
18
  const utils_1 = require("../utils/utils");
@@ -102,58 +102,15 @@ class XLSX extends xlsx_base_1.XLSXBase {
102
102
  else {
103
103
  buffer = new Uint8Array(data);
104
104
  }
105
- const allFiles = {};
106
- await new Promise((resolve, reject) => {
107
- let filesProcessed = 0;
108
- let zipEnded = false;
109
- let filesStarted = 0;
110
- const checkCompletion = () => {
111
- if (zipEnded && filesProcessed === filesStarted) {
112
- resolve();
113
- }
114
- };
115
- const unzipper = new fflate_1.Unzip((file) => {
116
- filesStarted++;
117
- const fileChunks = [];
118
- let totalLength = 0;
119
- file.ondata = (err, fileData, final) => {
120
- if (err) {
121
- reject(err);
122
- return;
123
- }
124
- if (fileData) {
125
- fileChunks.push(fileData);
126
- totalLength += fileData.length;
127
- }
128
- if (final) {
129
- if (fileChunks.length === 1) {
130
- allFiles[file.name] = fileChunks[0];
131
- }
132
- else if (fileChunks.length > 1) {
133
- const fullData = new Uint8Array(totalLength);
134
- let offset = 0;
135
- for (const chunk of fileChunks) {
136
- fullData.set(chunk, offset);
137
- offset += chunk.length;
138
- }
139
- allFiles[file.name] = fullData;
140
- }
141
- else {
142
- allFiles[file.name] = new Uint8Array(0);
143
- }
144
- filesProcessed++;
145
- fileChunks.length = 0;
146
- checkCompletion();
147
- }
148
- };
149
- file.start();
150
- });
151
- unzipper.register(fflate_1.UnzipInflate);
152
- unzipper.push(buffer, true);
153
- zipEnded = true;
154
- checkCompletion();
155
- });
156
- return this.loadFromFiles(allFiles, options);
105
+ // Use native ZipParser for extraction
106
+ const parser = new zip_parser_1.ZipParser(buffer);
107
+ const allFiles = await parser.extractAll();
108
+ // Convert Map to Record for loadFromFiles
109
+ const filesRecord = {};
110
+ for (const [path, content] of allFiles) {
111
+ filesRecord[path] = content;
112
+ }
113
+ return this.loadFromFiles(filesRecord, options);
157
114
  }
158
115
  /**
159
116
  * Write workbook to buffer
@@ -16,8 +16,8 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
16
16
  Object.defineProperty(exports, "__esModule", { value: true });
17
17
  exports.XLSX = void 0;
18
18
  const fs_1 = __importDefault(require("fs"));
19
- const fflate_1 = require("fflate");
20
19
  const stream_1 = require("stream");
20
+ const zip_parser_1 = require("../utils/unzip/zip-parser");
21
21
  const zip_stream_1 = require("../utils/zip-stream");
22
22
  const stream_buf_1 = require("../utils/stream-buf");
23
23
  const utils_1 = require("../utils/utils");
@@ -106,76 +106,44 @@ class XLSX extends xlsx_base_1.XLSXBase {
106
106
  // Node.js specific: Stream operations
107
107
  // ===========================================================================
108
108
  async read(stream, options) {
109
- const allFiles = {};
109
+ // Collect all stream data into a single buffer
110
+ const chunks = [];
110
111
  await new Promise((resolve, reject) => {
111
- let filesProcessed = 0;
112
- let zipEnded = false;
113
- let filesStarted = 0;
114
- const cleanup = () => {
115
- stream.removeListener("data", onData);
116
- stream.removeListener("end", onEnd);
117
- stream.removeListener("error", onError);
118
- };
119
- const checkCompletion = () => {
120
- if (zipEnded && filesProcessed === filesStarted) {
121
- cleanup();
122
- resolve();
123
- }
124
- };
125
- const unzipper = new fflate_1.Unzip((file) => {
126
- filesStarted++;
127
- const fileChunks = [];
128
- let totalLength = 0;
129
- file.ondata = (err, data, final) => {
130
- if (err) {
131
- cleanup();
132
- reject(err);
133
- return;
134
- }
135
- if (data) {
136
- fileChunks.push(data);
137
- totalLength += data.length;
138
- }
139
- if (final) {
140
- if (fileChunks.length === 1) {
141
- allFiles[file.name] = fileChunks[0];
142
- }
143
- else if (fileChunks.length > 1) {
144
- const fullData = new Uint8Array(totalLength);
145
- let offset = 0;
146
- for (const chunk of fileChunks) {
147
- fullData.set(chunk, offset);
148
- offset += chunk.length;
149
- }
150
- allFiles[file.name] = fullData;
151
- }
152
- else {
153
- allFiles[file.name] = new Uint8Array(0);
154
- }
155
- filesProcessed++;
156
- fileChunks.length = 0;
157
- checkCompletion();
158
- }
159
- };
160
- file.start();
161
- });
162
- unzipper.register(fflate_1.UnzipInflate);
163
112
  const onData = (chunk) => {
164
- unzipper.push(chunk);
113
+ chunks.push(chunk);
165
114
  };
166
115
  const onEnd = () => {
167
- unzipper.push(new Uint8Array(0), true);
168
- zipEnded = true;
169
- checkCompletion();
116
+ stream.removeListener("data", onData);
117
+ stream.removeListener("end", onEnd);
118
+ stream.removeListener("error", onError);
119
+ resolve();
170
120
  };
171
121
  const onError = (err) => {
172
- cleanup();
122
+ stream.removeListener("data", onData);
123
+ stream.removeListener("end", onEnd);
124
+ stream.removeListener("error", onError);
173
125
  reject(err);
174
126
  };
175
127
  stream.on("data", onData);
176
128
  stream.on("end", onEnd);
177
129
  stream.on("error", onError);
178
130
  });
131
+ // Combine chunks into a single buffer
132
+ const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
133
+ const buffer = new Uint8Array(totalLength);
134
+ let offset = 0;
135
+ for (const chunk of chunks) {
136
+ buffer.set(chunk, offset);
137
+ offset += chunk.length;
138
+ }
139
+ // Use native ZipParser for extraction
140
+ const parser = new zip_parser_1.ZipParser(buffer);
141
+ const filesMap = await parser.extractAll();
142
+ // Convert Map to Record for loadFromFiles
143
+ const allFiles = {};
144
+ for (const [path, content] of filesMap) {
145
+ allFiles[path] = content;
146
+ }
179
147
  return this.loadFromFiles(allFiles, options);
180
148
  }
181
149
  async write(stream, options) {
@@ -4,7 +4,7 @@
4
4
  * A lightweight, cross-platform CSV implementation that works in both
5
5
  * Node.js and Browser environments with zero dependencies.
6
6
  *
7
- * Compatible with fast-csv API for drop-in replacement.
7
+ * High-performance RFC 4180 compliant CSV parser and formatter.
8
8
  *
9
9
  * @see https://tools.ietf.org/html/rfc4180
10
10
  */
@@ -153,7 +153,7 @@ export function parseCsv(input, options = {}) {
153
153
  return false;
154
154
  }
155
155
  else {
156
- // Default: trim extra columns (matches fast-csv default behavior)
156
+ // Default: trim extra columns
157
157
  row.length = headerRow.length;
158
158
  }
159
159
  }
@@ -559,7 +559,7 @@ export class CsvFormatterStream extends Transform {
559
559
  });
560
560
  const formattedRow = fields.join(this.delimiter);
561
561
  // Use row delimiter as prefix (except for first row)
562
- // This matches fast-csv behavior where rowDelimiter separates rows
562
+ // rowDelimiter separates rows, no trailing delimiter by default
563
563
  if (this.rowCount === 0) {
564
564
  this.rowCount++;
565
565
  return formattedRow;