@cj-tech-master/excelts 1.6.3-canary.20251224193747.e89b618 → 1.6.3-canary.20251226035947.ef0b4f2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -4
- package/README_zh.md +5 -4
- package/dist/browser/excelts.esm.js +2447 -3094
- package/dist/browser/excelts.esm.js.map +1 -1
- package/dist/browser/excelts.esm.min.js +23 -25
- package/dist/browser/excelts.iife.js +2448 -3095
- package/dist/browser/excelts.iife.js.map +1 -1
- package/dist/browser/excelts.iife.min.js +23 -25
- package/dist/cjs/csv/csv-core.js +2 -2
- package/dist/cjs/csv/csv-stream.js +1 -1
- package/dist/cjs/csv/csv.base.js +10 -27
- package/dist/cjs/csv/csv.js +4 -12
- package/dist/cjs/doc/column.js +21 -0
- package/dist/cjs/doc/worksheet.js +4 -0
- package/dist/cjs/stream/xlsx/workbook-writer.js +4 -4
- package/dist/cjs/stream/xlsx/worksheet-writer.js +8 -1
- package/dist/cjs/utils/datetime.js +648 -0
- package/dist/cjs/utils/parse-sax.js +1190 -12
- package/dist/cjs/utils/unzip/zip-parser.js +11 -0
- package/dist/cjs/utils/zip/compress.base.js +3 -0
- package/dist/cjs/utils/zip/compress.browser.js +74 -30
- package/dist/cjs/utils/zip/deflate-fallback.js +575 -0
- package/dist/cjs/utils/zip/streaming-zip.js +264 -0
- package/dist/cjs/xlsx/xform/sheet/col-breaks-xform.js +38 -0
- package/dist/cjs/xlsx/xform/sheet/page-breaks-xform.js +13 -1
- package/dist/cjs/xlsx/xform/sheet/row-breaks-xform.js +11 -13
- package/dist/cjs/xlsx/xform/sheet/worksheet-xform.js +7 -2
- package/dist/cjs/xlsx/xlsx.browser.js +10 -53
- package/dist/cjs/xlsx/xlsx.js +27 -59
- package/dist/esm/csv/csv-core.js +2 -2
- package/dist/esm/csv/csv-stream.js +1 -1
- package/dist/esm/csv/csv.base.js +10 -24
- package/dist/esm/csv/csv.js +4 -12
- package/dist/esm/doc/column.js +21 -0
- package/dist/esm/doc/worksheet.js +4 -0
- package/dist/esm/stream/xlsx/workbook-writer.js +1 -1
- package/dist/esm/stream/xlsx/worksheet-writer.js +8 -1
- package/dist/esm/utils/datetime.js +639 -0
- package/dist/esm/utils/parse-sax.js +1188 -12
- package/dist/esm/utils/unzip/zip-parser.js +11 -0
- package/dist/esm/utils/zip/compress.base.js +3 -0
- package/dist/esm/utils/zip/compress.browser.js +76 -31
- package/dist/esm/utils/zip/deflate-fallback.js +570 -0
- package/dist/esm/utils/zip/streaming-zip.js +259 -0
- package/dist/esm/xlsx/xform/sheet/col-breaks-xform.js +35 -0
- package/dist/esm/xlsx/xform/sheet/page-breaks-xform.js +13 -1
- package/dist/esm/xlsx/xform/sheet/row-breaks-xform.js +11 -13
- package/dist/esm/xlsx/xform/sheet/worksheet-xform.js +7 -2
- package/dist/esm/xlsx/xlsx.browser.js +10 -53
- package/dist/esm/xlsx/xlsx.js +27 -59
- package/dist/types/csv/csv-core.d.ts +6 -6
- package/dist/types/csv/csv.base.d.ts +4 -3
- package/dist/types/doc/column.d.ts +6 -0
- package/dist/types/doc/worksheet.d.ts +3 -1
- package/dist/types/stream/xlsx/workbook-writer.d.ts +1 -1
- package/dist/types/stream/xlsx/worksheet-writer.d.ts +3 -1
- package/dist/types/types.d.ts +6 -0
- package/dist/types/utils/datetime.d.ts +85 -0
- package/dist/types/utils/parse-sax.d.ts +108 -1
- package/dist/types/utils/unzip/zip-parser.d.ts +5 -0
- package/dist/types/utils/zip/compress.base.d.ts +3 -0
- package/dist/types/utils/zip/compress.browser.d.ts +27 -18
- package/dist/types/utils/zip/deflate-fallback.d.ts +39 -0
- package/dist/types/utils/zip/streaming-zip.d.ts +96 -0
- package/dist/types/xlsx/xform/sheet/col-breaks-xform.d.ts +16 -0
- package/dist/types/xlsx/xform/sheet/page-breaks-xform.d.ts +4 -0
- package/dist/types/xlsx/xform/sheet/row-breaks-xform.d.ts +4 -0
- package/package.json +7 -28
|
@@ -0,0 +1,259 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Streaming ZIP creator - fflate-compatible API
|
|
3
|
+
*
|
|
4
|
+
* This module provides a streaming ZIP API compatible with fflate's Zip/ZipDeflate,
|
|
5
|
+
* but uses native zlib compression for better performance.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* ```ts
|
|
9
|
+
* const zip = new StreamingZip((err, data, final) => {
|
|
10
|
+
* if (err) handleError(err);
|
|
11
|
+
* else {
|
|
12
|
+
* stream.write(data);
|
|
13
|
+
* if (final) stream.end();
|
|
14
|
+
* }
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* const file = new ZipDeflateFile("path/file.txt", { level: 6 });
|
|
18
|
+
* zip.add(file);
|
|
19
|
+
* file.push(data1);
|
|
20
|
+
* file.push(data2, true); // true = final chunk
|
|
21
|
+
*
|
|
22
|
+
* zip.end();
|
|
23
|
+
* ```
|
|
24
|
+
*/
|
|
25
|
+
import { crc32 } from "./crc32.js";
|
|
26
|
+
import { compressSync } from "./compress.js";
|
|
27
|
+
// ZIP signature constants
|
|
28
|
+
const LOCAL_FILE_HEADER_SIG = 0x04034b50;
|
|
29
|
+
const CENTRAL_DIR_HEADER_SIG = 0x02014b50;
|
|
30
|
+
const END_OF_CENTRAL_DIR_SIG = 0x06054b50;
|
|
31
|
+
// ZIP version constants
|
|
32
|
+
const VERSION_NEEDED = 20; // 2.0 - supports DEFLATE
|
|
33
|
+
const VERSION_MADE_BY = 20; // 2.0
|
|
34
|
+
// Compression methods
|
|
35
|
+
const COMPRESSION_STORE = 0;
|
|
36
|
+
const COMPRESSION_DEFLATE = 8;
|
|
37
|
+
const encoder = new TextEncoder();
|
|
38
|
+
/**
|
|
39
|
+
* Convert Date to DOS time format
|
|
40
|
+
*/
|
|
41
|
+
function dateToDos(date) {
|
|
42
|
+
const dosTime = ((date.getHours() & 0x1f) << 11) |
|
|
43
|
+
((date.getMinutes() & 0x3f) << 5) |
|
|
44
|
+
((date.getSeconds() >> 1) & 0x1f);
|
|
45
|
+
const dosDate = (((date.getFullYear() - 1980) & 0x7f) << 9) |
|
|
46
|
+
(((date.getMonth() + 1) & 0x0f) << 5) |
|
|
47
|
+
(date.getDate() & 0x1f);
|
|
48
|
+
return [dosTime, dosDate];
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* ZipDeflate-compatible file stream
|
|
52
|
+
* Collects data chunks, compresses on finalization
|
|
53
|
+
*/
|
|
54
|
+
export class ZipDeflateFile {
|
|
55
|
+
constructor(name, options) {
|
|
56
|
+
this.chunks = [];
|
|
57
|
+
this.totalSize = 0;
|
|
58
|
+
this.finalized = false;
|
|
59
|
+
this._ondata = null;
|
|
60
|
+
this.name = name;
|
|
61
|
+
this.level = options?.level ?? 6;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Set data callback (called by StreamingZip)
|
|
65
|
+
*/
|
|
66
|
+
set ondata(cb) {
|
|
67
|
+
this._ondata = cb;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Push data to the file
|
|
71
|
+
* @param data - Data chunk
|
|
72
|
+
* @param final - Whether this is the final chunk
|
|
73
|
+
*/
|
|
74
|
+
push(data, final = false) {
|
|
75
|
+
if (this.finalized) {
|
|
76
|
+
throw new Error("Cannot push to finalized ZipDeflateFile");
|
|
77
|
+
}
|
|
78
|
+
if (data.length > 0) {
|
|
79
|
+
this.chunks.push(data);
|
|
80
|
+
this.totalSize += data.length;
|
|
81
|
+
}
|
|
82
|
+
if (final) {
|
|
83
|
+
this.finalized = true;
|
|
84
|
+
this._flush();
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
/**
|
|
88
|
+
* Flush collected data through compression and emit
|
|
89
|
+
*/
|
|
90
|
+
_flush() {
|
|
91
|
+
if (!this._ondata) {
|
|
92
|
+
return;
|
|
93
|
+
}
|
|
94
|
+
// Combine chunks
|
|
95
|
+
let uncompressed;
|
|
96
|
+
if (this.chunks.length === 0) {
|
|
97
|
+
uncompressed = new Uint8Array(0);
|
|
98
|
+
}
|
|
99
|
+
else if (this.chunks.length === 1) {
|
|
100
|
+
uncompressed = this.chunks[0];
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
uncompressed = new Uint8Array(this.totalSize);
|
|
104
|
+
let offset = 0;
|
|
105
|
+
for (const chunk of this.chunks) {
|
|
106
|
+
uncompressed.set(chunk, offset);
|
|
107
|
+
offset += chunk.length;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
// Compress if level > 0 and data is not empty
|
|
111
|
+
const shouldCompress = this.level > 0 && uncompressed.length > 0;
|
|
112
|
+
const compressed = shouldCompress
|
|
113
|
+
? compressSync(uncompressed, { level: this.level })
|
|
114
|
+
: uncompressed;
|
|
115
|
+
// Build local file header + data
|
|
116
|
+
const nameBytes = encoder.encode(this.name);
|
|
117
|
+
const crcValue = crc32(uncompressed);
|
|
118
|
+
const [dosTime, dosDate] = dateToDos(new Date());
|
|
119
|
+
// Local file header (30 bytes + filename)
|
|
120
|
+
const header = new Uint8Array(30 + nameBytes.length);
|
|
121
|
+
const view = new DataView(header.buffer);
|
|
122
|
+
view.setUint32(0, LOCAL_FILE_HEADER_SIG, true);
|
|
123
|
+
view.setUint16(4, VERSION_NEEDED, true);
|
|
124
|
+
view.setUint16(6, 0x0800, true); // UTF-8 flag
|
|
125
|
+
view.setUint16(8, shouldCompress ? COMPRESSION_DEFLATE : COMPRESSION_STORE, true);
|
|
126
|
+
view.setUint16(10, dosTime, true);
|
|
127
|
+
view.setUint16(12, dosDate, true);
|
|
128
|
+
view.setUint32(14, crcValue, true);
|
|
129
|
+
view.setUint32(18, compressed.length, true);
|
|
130
|
+
view.setUint32(22, uncompressed.length, true);
|
|
131
|
+
view.setUint16(26, nameBytes.length, true);
|
|
132
|
+
view.setUint16(28, 0, true); // Extra field length
|
|
133
|
+
header.set(nameBytes, 30);
|
|
134
|
+
// Store info for central directory BEFORE emitting data
|
|
135
|
+
// (StreamingZip reads this in the final callback)
|
|
136
|
+
this._entryInfo = {
|
|
137
|
+
name: nameBytes,
|
|
138
|
+
crc: crcValue,
|
|
139
|
+
compressedSize: compressed.length,
|
|
140
|
+
uncompressedSize: uncompressed.length,
|
|
141
|
+
compressionMethod: shouldCompress ? COMPRESSION_DEFLATE : COMPRESSION_STORE,
|
|
142
|
+
dosTime,
|
|
143
|
+
dosDate,
|
|
144
|
+
offset: -1 // Will be set by StreamingZip
|
|
145
|
+
};
|
|
146
|
+
// Emit header
|
|
147
|
+
this._ondata(header, false);
|
|
148
|
+
// Emit compressed data (final chunk)
|
|
149
|
+
this._ondata(compressed, true);
|
|
150
|
+
// Clear chunks for GC
|
|
151
|
+
this.chunks.length = 0;
|
|
152
|
+
}
|
|
153
|
+
/**
|
|
154
|
+
* Get entry info (called by StreamingZip after finalization)
|
|
155
|
+
*/
|
|
156
|
+
getEntryInfo() {
|
|
157
|
+
return this._entryInfo || null;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Streaming ZIP - fflate Zip-compatible API
|
|
162
|
+
* Creates ZIP files in a streaming manner
|
|
163
|
+
*/
|
|
164
|
+
export class StreamingZip {
|
|
165
|
+
/**
|
|
166
|
+
* Create a streaming ZIP
|
|
167
|
+
* @param callback - Called with (err, data, final) as data becomes available
|
|
168
|
+
*/
|
|
169
|
+
constructor(callback) {
|
|
170
|
+
this.entries = [];
|
|
171
|
+
this.currentOffset = 0;
|
|
172
|
+
this.ended = false;
|
|
173
|
+
this.callback = callback;
|
|
174
|
+
}
|
|
175
|
+
/**
|
|
176
|
+
* Add a file to the ZIP
|
|
177
|
+
* @param file - ZipDeflateFile instance
|
|
178
|
+
*/
|
|
179
|
+
add(file) {
|
|
180
|
+
if (this.ended) {
|
|
181
|
+
throw new Error("Cannot add files after calling end()");
|
|
182
|
+
}
|
|
183
|
+
// Capture offset when first data is written, not when add() is called
|
|
184
|
+
// This is important because streaming files may have data pushed later
|
|
185
|
+
let startOffset = -1;
|
|
186
|
+
file.ondata = (data, final) => {
|
|
187
|
+
// Capture offset on first data chunk
|
|
188
|
+
if (startOffset === -1) {
|
|
189
|
+
startOffset = this.currentOffset;
|
|
190
|
+
}
|
|
191
|
+
this.currentOffset += data.length;
|
|
192
|
+
this.callback(null, data, false);
|
|
193
|
+
if (final) {
|
|
194
|
+
// Get entry info and set offset
|
|
195
|
+
const entryInfo = file.getEntryInfo();
|
|
196
|
+
if (entryInfo) {
|
|
197
|
+
entryInfo.offset = startOffset;
|
|
198
|
+
this.entries.push(entryInfo);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Finalize the ZIP
|
|
205
|
+
* Writes central directory and end-of-central-directory record
|
|
206
|
+
*/
|
|
207
|
+
end() {
|
|
208
|
+
if (this.ended) {
|
|
209
|
+
return;
|
|
210
|
+
}
|
|
211
|
+
this.ended = true;
|
|
212
|
+
const centralDirOffset = this.currentOffset;
|
|
213
|
+
const centralDirChunks = [];
|
|
214
|
+
// Build central directory headers
|
|
215
|
+
for (const entry of this.entries) {
|
|
216
|
+
const header = new Uint8Array(46 + entry.name.length);
|
|
217
|
+
const view = new DataView(header.buffer);
|
|
218
|
+
view.setUint32(0, CENTRAL_DIR_HEADER_SIG, true);
|
|
219
|
+
view.setUint16(4, VERSION_MADE_BY, true);
|
|
220
|
+
view.setUint16(6, VERSION_NEEDED, true);
|
|
221
|
+
view.setUint16(8, 0x0800, true); // UTF-8 flag
|
|
222
|
+
view.setUint16(10, entry.compressionMethod, true);
|
|
223
|
+
view.setUint16(12, entry.dosTime, true);
|
|
224
|
+
view.setUint16(14, entry.dosDate, true);
|
|
225
|
+
view.setUint32(16, entry.crc, true);
|
|
226
|
+
view.setUint32(20, entry.compressedSize, true);
|
|
227
|
+
view.setUint32(24, entry.uncompressedSize, true);
|
|
228
|
+
view.setUint16(28, entry.name.length, true);
|
|
229
|
+
view.setUint16(30, 0, true); // Extra field length
|
|
230
|
+
view.setUint16(32, 0, true); // Comment length
|
|
231
|
+
view.setUint16(34, 0, true); // Disk number start
|
|
232
|
+
view.setUint16(36, 0, true); // Internal file attributes
|
|
233
|
+
view.setUint32(38, 0, true); // External file attributes
|
|
234
|
+
view.setUint32(42, entry.offset, true);
|
|
235
|
+
header.set(entry.name, 46);
|
|
236
|
+
centralDirChunks.push(header);
|
|
237
|
+
}
|
|
238
|
+
// Emit central directory
|
|
239
|
+
for (const chunk of centralDirChunks) {
|
|
240
|
+
this.callback(null, chunk, false);
|
|
241
|
+
}
|
|
242
|
+
const centralDirSize = centralDirChunks.reduce((sum, c) => sum + c.length, 0);
|
|
243
|
+
// Build end of central directory
|
|
244
|
+
const eocd = new Uint8Array(22);
|
|
245
|
+
const eocdView = new DataView(eocd.buffer);
|
|
246
|
+
eocdView.setUint32(0, END_OF_CENTRAL_DIR_SIG, true);
|
|
247
|
+
eocdView.setUint16(4, 0, true); // Disk number
|
|
248
|
+
eocdView.setUint16(6, 0, true); // Disk with central dir
|
|
249
|
+
eocdView.setUint16(8, this.entries.length, true);
|
|
250
|
+
eocdView.setUint16(10, this.entries.length, true);
|
|
251
|
+
eocdView.setUint32(12, centralDirSize, true);
|
|
252
|
+
eocdView.setUint32(16, centralDirOffset, true);
|
|
253
|
+
eocdView.setUint16(20, 0, true); // Comment length
|
|
254
|
+
// Emit end of central directory (final chunk)
|
|
255
|
+
this.callback(null, eocd, true);
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
// Export aliases for fflate compatibility
|
|
259
|
+
export { StreamingZip as Zip, ZipDeflateFile as ZipDeflate };
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { PageBreaksXform } from "./page-breaks-xform.js";
|
|
2
|
+
import { ListXform } from "../list-xform.js";
|
|
3
|
+
/**
|
|
4
|
+
* Xform for column page breaks (colBreaks element in worksheet XML)
|
|
5
|
+
* Used to define manual page breaks between columns when printing.
|
|
6
|
+
*
|
|
7
|
+
* XML structure:
|
|
8
|
+
* <colBreaks count="3" manualBreakCount="3">
|
|
9
|
+
* <brk id="3" max="1048575" man="1"/>
|
|
10
|
+
* <brk id="6" max="1048575" man="1"/>
|
|
11
|
+
* </colBreaks>
|
|
12
|
+
*/
|
|
13
|
+
class ColBreaksXform extends ListXform {
|
|
14
|
+
constructor() {
|
|
15
|
+
super({
|
|
16
|
+
tag: "colBreaks",
|
|
17
|
+
count: true,
|
|
18
|
+
childXform: new PageBreaksXform()
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
// Override to add manualBreakCount attribute required by Excel
|
|
22
|
+
render(xmlStream, model) {
|
|
23
|
+
if (model && model.length) {
|
|
24
|
+
xmlStream.openNode(this.tag, this.$);
|
|
25
|
+
xmlStream.addAttribute(this.$count, model.length);
|
|
26
|
+
xmlStream.addAttribute("manualBreakCount", model.length);
|
|
27
|
+
const { childXform } = this;
|
|
28
|
+
for (const childModel of model) {
|
|
29
|
+
childXform.render(xmlStream, childModel);
|
|
30
|
+
}
|
|
31
|
+
xmlStream.closeNode();
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
export { ColBreaksXform };
|
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
import { BaseXform } from "../base-xform.js";
|
|
2
|
+
/**
|
|
3
|
+
* Xform for individual page break (brk element)
|
|
4
|
+
* Used by both RowBreaksXform and ColBreaksXform
|
|
5
|
+
*/
|
|
2
6
|
class PageBreaksXform extends BaseXform {
|
|
3
7
|
get tag() {
|
|
4
8
|
return "brk";
|
|
@@ -8,7 +12,15 @@ class PageBreaksXform extends BaseXform {
|
|
|
8
12
|
}
|
|
9
13
|
parseOpen(node) {
|
|
10
14
|
if (node.name === "brk") {
|
|
11
|
-
|
|
15
|
+
const { id, max, man, min } = node.attributes;
|
|
16
|
+
this.model = {
|
|
17
|
+
id: +id,
|
|
18
|
+
max: +max,
|
|
19
|
+
man: +man
|
|
20
|
+
};
|
|
21
|
+
if (min !== undefined) {
|
|
22
|
+
this.model.min = +min;
|
|
23
|
+
}
|
|
12
24
|
return true;
|
|
13
25
|
}
|
|
14
26
|
return false;
|
|
@@ -1,31 +1,29 @@
|
|
|
1
1
|
import { PageBreaksXform } from "./page-breaks-xform.js";
|
|
2
2
|
import { ListXform } from "../list-xform.js";
|
|
3
|
+
/**
|
|
4
|
+
* Xform for row page breaks (rowBreaks element in worksheet XML)
|
|
5
|
+
* Used to define manual page breaks between rows when printing.
|
|
6
|
+
*/
|
|
3
7
|
class RowBreaksXform extends ListXform {
|
|
4
8
|
constructor() {
|
|
5
|
-
|
|
9
|
+
super({
|
|
6
10
|
tag: "rowBreaks",
|
|
7
11
|
count: true,
|
|
8
12
|
childXform: new PageBreaksXform()
|
|
9
|
-
};
|
|
10
|
-
super(options);
|
|
13
|
+
});
|
|
11
14
|
}
|
|
12
|
-
//
|
|
15
|
+
// Override to add manualBreakCount attribute required by Excel
|
|
13
16
|
render(xmlStream, model) {
|
|
14
17
|
if (model && model.length) {
|
|
15
18
|
xmlStream.openNode(this.tag, this.$);
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
xmlStream.addAttribute("manualBreakCount", model.length);
|
|
19
|
-
}
|
|
19
|
+
xmlStream.addAttribute(this.$count, model.length);
|
|
20
|
+
xmlStream.addAttribute("manualBreakCount", model.length);
|
|
20
21
|
const { childXform } = this;
|
|
21
|
-
|
|
22
|
+
for (const childModel of model) {
|
|
22
23
|
childXform.render(xmlStream, childModel);
|
|
23
|
-
}
|
|
24
|
+
}
|
|
24
25
|
xmlStream.closeNode();
|
|
25
26
|
}
|
|
26
|
-
else if (this.empty) {
|
|
27
|
-
xmlStream.leafNode(this.tag);
|
|
28
|
-
}
|
|
29
27
|
}
|
|
30
28
|
}
|
|
31
29
|
export { RowBreaksXform };
|
|
@@ -22,6 +22,7 @@ import { PictureXform } from "./picture-xform.js";
|
|
|
22
22
|
import { DrawingXform } from "./drawing-xform.js";
|
|
23
23
|
import { TablePartXform } from "./table-part-xform.js";
|
|
24
24
|
import { RowBreaksXform } from "./row-breaks-xform.js";
|
|
25
|
+
import { ColBreaksXform } from "./col-breaks-xform.js";
|
|
25
26
|
import { HeaderFooterXform } from "./header-footer-xform.js";
|
|
26
27
|
import { ConditionalFormattingsXform } from "./cf/conditional-formattings-xform.js";
|
|
27
28
|
import { ExtLstXform } from "./ext-lst-xform.js";
|
|
@@ -108,6 +109,7 @@ class WorkSheetXform extends BaseXform {
|
|
|
108
109
|
childXform: new MergeCellXform()
|
|
109
110
|
}),
|
|
110
111
|
rowBreaks: new RowBreaksXform(),
|
|
112
|
+
colBreaks: new ColBreaksXform(),
|
|
111
113
|
hyperlinks: new ListXform({
|
|
112
114
|
tag: "hyperlinks",
|
|
113
115
|
count: false,
|
|
@@ -324,7 +326,8 @@ class WorkSheetXform extends BaseXform {
|
|
|
324
326
|
this.map.pageSetup.render(xmlStream, model.pageSetup);
|
|
325
327
|
this.map.headerFooter.render(xmlStream, model.headerFooter);
|
|
326
328
|
this.map.rowBreaks.render(xmlStream, model.rowBreaks);
|
|
327
|
-
this.map.
|
|
329
|
+
this.map.colBreaks.render(xmlStream, model.colBreaks);
|
|
330
|
+
this.map.drawing.render(xmlStream, model.drawing); // Note: must be after rowBreaks/colBreaks
|
|
328
331
|
this.map.picture.render(xmlStream, model.background); // Note: must be after drawing
|
|
329
332
|
this.map.tableParts.render(xmlStream, model.tables);
|
|
330
333
|
this.map.extLst.render(xmlStream, model);
|
|
@@ -399,7 +402,9 @@ class WorkSheetXform extends BaseXform {
|
|
|
399
402
|
background: this.map.picture.model,
|
|
400
403
|
drawing: this.map.drawing.model,
|
|
401
404
|
tables: this.map.tableParts.model,
|
|
402
|
-
conditionalFormattings
|
|
405
|
+
conditionalFormattings,
|
|
406
|
+
rowBreaks: this.map.rowBreaks.model || [],
|
|
407
|
+
colBreaks: this.map.colBreaks.model || []
|
|
403
408
|
};
|
|
404
409
|
if (this.map.autoFilter.model) {
|
|
405
410
|
this.model.autoFilter = this.map.autoFilter.model;
|
|
@@ -9,7 +9,7 @@
|
|
|
9
9
|
* - readFile/writeFile (no file system)
|
|
10
10
|
* - read/write (no streams)
|
|
11
11
|
*/
|
|
12
|
-
import {
|
|
12
|
+
import { ZipParser } from "../utils/unzip/zip-parser.js";
|
|
13
13
|
import { ZipWriter } from "../utils/zip-stream.browser.js";
|
|
14
14
|
import { StreamBuf } from "../utils/stream-buf.browser.js";
|
|
15
15
|
import { bufferToString } from "../utils/utils.js";
|
|
@@ -99,58 +99,15 @@ class XLSX extends XLSXBase {
|
|
|
99
99
|
else {
|
|
100
100
|
buffer = new Uint8Array(data);
|
|
101
101
|
}
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
};
|
|
112
|
-
const unzipper = new Unzip((file) => {
|
|
113
|
-
filesStarted++;
|
|
114
|
-
const fileChunks = [];
|
|
115
|
-
let totalLength = 0;
|
|
116
|
-
file.ondata = (err, fileData, final) => {
|
|
117
|
-
if (err) {
|
|
118
|
-
reject(err);
|
|
119
|
-
return;
|
|
120
|
-
}
|
|
121
|
-
if (fileData) {
|
|
122
|
-
fileChunks.push(fileData);
|
|
123
|
-
totalLength += fileData.length;
|
|
124
|
-
}
|
|
125
|
-
if (final) {
|
|
126
|
-
if (fileChunks.length === 1) {
|
|
127
|
-
allFiles[file.name] = fileChunks[0];
|
|
128
|
-
}
|
|
129
|
-
else if (fileChunks.length > 1) {
|
|
130
|
-
const fullData = new Uint8Array(totalLength);
|
|
131
|
-
let offset = 0;
|
|
132
|
-
for (const chunk of fileChunks) {
|
|
133
|
-
fullData.set(chunk, offset);
|
|
134
|
-
offset += chunk.length;
|
|
135
|
-
}
|
|
136
|
-
allFiles[file.name] = fullData;
|
|
137
|
-
}
|
|
138
|
-
else {
|
|
139
|
-
allFiles[file.name] = new Uint8Array(0);
|
|
140
|
-
}
|
|
141
|
-
filesProcessed++;
|
|
142
|
-
fileChunks.length = 0;
|
|
143
|
-
checkCompletion();
|
|
144
|
-
}
|
|
145
|
-
};
|
|
146
|
-
file.start();
|
|
147
|
-
});
|
|
148
|
-
unzipper.register(UnzipInflate);
|
|
149
|
-
unzipper.push(buffer, true);
|
|
150
|
-
zipEnded = true;
|
|
151
|
-
checkCompletion();
|
|
152
|
-
});
|
|
153
|
-
return this.loadFromFiles(allFiles, options);
|
|
102
|
+
// Use native ZipParser for extraction
|
|
103
|
+
const parser = new ZipParser(buffer);
|
|
104
|
+
const allFiles = await parser.extractAll();
|
|
105
|
+
// Convert Map to Record for loadFromFiles
|
|
106
|
+
const filesRecord = {};
|
|
107
|
+
for (const [path, content] of allFiles) {
|
|
108
|
+
filesRecord[path] = content;
|
|
109
|
+
}
|
|
110
|
+
return this.loadFromFiles(filesRecord, options);
|
|
154
111
|
}
|
|
155
112
|
/**
|
|
156
113
|
* Write workbook to buffer
|
package/dist/esm/xlsx/xlsx.js
CHANGED
|
@@ -10,8 +10,8 @@
|
|
|
10
10
|
* - writeBuffer: Write to buffer
|
|
11
11
|
*/
|
|
12
12
|
import fs from "fs";
|
|
13
|
-
import { Unzip, UnzipInflate } from "fflate";
|
|
14
13
|
import { PassThrough } from "stream";
|
|
14
|
+
import { ZipParser } from "../utils/unzip/zip-parser.js";
|
|
15
15
|
import { ZipWriter } from "../utils/zip-stream.js";
|
|
16
16
|
import { StreamBuf } from "../utils/stream-buf.js";
|
|
17
17
|
import { fileExists, bufferToString } from "../utils/utils.js";
|
|
@@ -100,76 +100,44 @@ class XLSX extends XLSXBase {
|
|
|
100
100
|
// Node.js specific: Stream operations
|
|
101
101
|
// ===========================================================================
|
|
102
102
|
async read(stream, options) {
|
|
103
|
-
|
|
103
|
+
// Collect all stream data into a single buffer
|
|
104
|
+
const chunks = [];
|
|
104
105
|
await new Promise((resolve, reject) => {
|
|
105
|
-
let filesProcessed = 0;
|
|
106
|
-
let zipEnded = false;
|
|
107
|
-
let filesStarted = 0;
|
|
108
|
-
const cleanup = () => {
|
|
109
|
-
stream.removeListener("data", onData);
|
|
110
|
-
stream.removeListener("end", onEnd);
|
|
111
|
-
stream.removeListener("error", onError);
|
|
112
|
-
};
|
|
113
|
-
const checkCompletion = () => {
|
|
114
|
-
if (zipEnded && filesProcessed === filesStarted) {
|
|
115
|
-
cleanup();
|
|
116
|
-
resolve();
|
|
117
|
-
}
|
|
118
|
-
};
|
|
119
|
-
const unzipper = new Unzip((file) => {
|
|
120
|
-
filesStarted++;
|
|
121
|
-
const fileChunks = [];
|
|
122
|
-
let totalLength = 0;
|
|
123
|
-
file.ondata = (err, data, final) => {
|
|
124
|
-
if (err) {
|
|
125
|
-
cleanup();
|
|
126
|
-
reject(err);
|
|
127
|
-
return;
|
|
128
|
-
}
|
|
129
|
-
if (data) {
|
|
130
|
-
fileChunks.push(data);
|
|
131
|
-
totalLength += data.length;
|
|
132
|
-
}
|
|
133
|
-
if (final) {
|
|
134
|
-
if (fileChunks.length === 1) {
|
|
135
|
-
allFiles[file.name] = fileChunks[0];
|
|
136
|
-
}
|
|
137
|
-
else if (fileChunks.length > 1) {
|
|
138
|
-
const fullData = new Uint8Array(totalLength);
|
|
139
|
-
let offset = 0;
|
|
140
|
-
for (const chunk of fileChunks) {
|
|
141
|
-
fullData.set(chunk, offset);
|
|
142
|
-
offset += chunk.length;
|
|
143
|
-
}
|
|
144
|
-
allFiles[file.name] = fullData;
|
|
145
|
-
}
|
|
146
|
-
else {
|
|
147
|
-
allFiles[file.name] = new Uint8Array(0);
|
|
148
|
-
}
|
|
149
|
-
filesProcessed++;
|
|
150
|
-
fileChunks.length = 0;
|
|
151
|
-
checkCompletion();
|
|
152
|
-
}
|
|
153
|
-
};
|
|
154
|
-
file.start();
|
|
155
|
-
});
|
|
156
|
-
unzipper.register(UnzipInflate);
|
|
157
106
|
const onData = (chunk) => {
|
|
158
|
-
|
|
107
|
+
chunks.push(chunk);
|
|
159
108
|
};
|
|
160
109
|
const onEnd = () => {
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
110
|
+
stream.removeListener("data", onData);
|
|
111
|
+
stream.removeListener("end", onEnd);
|
|
112
|
+
stream.removeListener("error", onError);
|
|
113
|
+
resolve();
|
|
164
114
|
};
|
|
165
115
|
const onError = (err) => {
|
|
166
|
-
|
|
116
|
+
stream.removeListener("data", onData);
|
|
117
|
+
stream.removeListener("end", onEnd);
|
|
118
|
+
stream.removeListener("error", onError);
|
|
167
119
|
reject(err);
|
|
168
120
|
};
|
|
169
121
|
stream.on("data", onData);
|
|
170
122
|
stream.on("end", onEnd);
|
|
171
123
|
stream.on("error", onError);
|
|
172
124
|
});
|
|
125
|
+
// Combine chunks into a single buffer
|
|
126
|
+
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
|
|
127
|
+
const buffer = new Uint8Array(totalLength);
|
|
128
|
+
let offset = 0;
|
|
129
|
+
for (const chunk of chunks) {
|
|
130
|
+
buffer.set(chunk, offset);
|
|
131
|
+
offset += chunk.length;
|
|
132
|
+
}
|
|
133
|
+
// Use native ZipParser for extraction
|
|
134
|
+
const parser = new ZipParser(buffer);
|
|
135
|
+
const filesMap = await parser.extractAll();
|
|
136
|
+
// Convert Map to Record for loadFromFiles
|
|
137
|
+
const allFiles = {};
|
|
138
|
+
for (const [path, content] of filesMap) {
|
|
139
|
+
allFiles[path] = content;
|
|
140
|
+
}
|
|
173
141
|
return this.loadFromFiles(allFiles, options);
|
|
174
142
|
}
|
|
175
143
|
async write(stream, options) {
|
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
* A lightweight, cross-platform CSV implementation that works in both
|
|
5
5
|
* Node.js and Browser environments with zero dependencies.
|
|
6
6
|
*
|
|
7
|
-
*
|
|
7
|
+
* High-performance RFC 4180 compliant CSV parser and formatter.
|
|
8
8
|
*
|
|
9
9
|
* @see https://tools.ietf.org/html/rfc4180
|
|
10
10
|
*/
|
|
@@ -31,7 +31,7 @@ export interface RowValidationResult<T = Row> {
|
|
|
31
31
|
reason?: string;
|
|
32
32
|
}
|
|
33
33
|
/**
|
|
34
|
-
* CSV parsing options
|
|
34
|
+
* CSV parsing options
|
|
35
35
|
*/
|
|
36
36
|
export interface CsvParseOptions {
|
|
37
37
|
/** Field delimiter (default: ",") */
|
|
@@ -40,9 +40,9 @@ export interface CsvParseOptions {
|
|
|
40
40
|
quote?: string | false | null;
|
|
41
41
|
/** Escape character for quotes (default: '"'), set to false or null to disable */
|
|
42
42
|
escape?: string | false | null;
|
|
43
|
-
/** Skip empty lines (default: false)
|
|
43
|
+
/** Skip empty lines (default: false) */
|
|
44
44
|
skipEmptyLines?: boolean;
|
|
45
|
-
/** Alias for skipEmptyLines
|
|
45
|
+
/** Alias for skipEmptyLines */
|
|
46
46
|
ignoreEmpty?: boolean;
|
|
47
47
|
/** Trim whitespace from both sides of fields (default: false) */
|
|
48
48
|
trim?: boolean;
|
|
@@ -124,7 +124,7 @@ export interface CsvParseOptions {
|
|
|
124
124
|
};
|
|
125
125
|
}
|
|
126
126
|
/**
|
|
127
|
-
* CSV formatting options
|
|
127
|
+
* CSV formatting options
|
|
128
128
|
*/
|
|
129
129
|
export interface CsvFormatOptions {
|
|
130
130
|
/** Field delimiter (default: ",") */
|
|
@@ -133,7 +133,7 @@ export interface CsvFormatOptions {
|
|
|
133
133
|
quote?: string | false | null;
|
|
134
134
|
/** Escape character (default: same as quote) */
|
|
135
135
|
escape?: string | false | null;
|
|
136
|
-
/** Row delimiter (default: "\n"
|
|
136
|
+
/** Row delimiter (default: "\n") */
|
|
137
137
|
rowDelimiter?: string;
|
|
138
138
|
/** Always quote all fields (default: false, only quote when necessary) */
|
|
139
139
|
alwaysQuote?: boolean;
|
|
@@ -2,8 +2,9 @@
|
|
|
2
2
|
* CSV Base class - Shared functionality for Node.js and Browser
|
|
3
3
|
*
|
|
4
4
|
* Uses native CSV parser (RFC 4180 compliant) with zero external dependencies.
|
|
5
|
-
* Date parsing uses
|
|
5
|
+
* Date parsing uses native high-performance datetime utilities.
|
|
6
6
|
*/
|
|
7
|
+
import { type DateFormat } from "../utils/datetime";
|
|
7
8
|
import { type CsvParseOptions, type CsvFormatOptions } from "./csv-core";
|
|
8
9
|
import type { Workbook } from "../doc/workbook";
|
|
9
10
|
import type { Worksheet } from "../doc/worksheet";
|
|
@@ -12,7 +13,7 @@ import type { Worksheet } from "../doc/worksheet";
|
|
|
12
13
|
*/
|
|
13
14
|
export interface CsvReadOptions {
|
|
14
15
|
/** Date format strings to try when parsing (default: ISO formats) */
|
|
15
|
-
dateFormats?:
|
|
16
|
+
dateFormats?: readonly DateFormat[];
|
|
16
17
|
/** Custom value mapper function */
|
|
17
18
|
map?(value: any, index: number): any;
|
|
18
19
|
/** Worksheet name to create (default: "Sheet1") */
|
|
@@ -44,7 +45,7 @@ export interface CsvWriteOptions {
|
|
|
44
45
|
/**
|
|
45
46
|
* Create the default value mapper for CSV parsing
|
|
46
47
|
*/
|
|
47
|
-
export declare function createDefaultValueMapper(dateFormats:
|
|
48
|
+
export declare function createDefaultValueMapper(dateFormats: readonly DateFormat[]): (datum: any) => any;
|
|
48
49
|
/**
|
|
49
50
|
* Create the default value mapper for CSV writing
|
|
50
51
|
*/
|
|
@@ -97,6 +97,12 @@ declare class Column {
|
|
|
97
97
|
*/
|
|
98
98
|
get values(): CellValueType[];
|
|
99
99
|
set values(v: CellValueType[]);
|
|
100
|
+
/**
|
|
101
|
+
* Add a page break after this column.
|
|
102
|
+
* @param top - Optional top row limit for the page break (1-indexed)
|
|
103
|
+
* @param bottom - Optional bottom row limit for the page break (1-indexed)
|
|
104
|
+
*/
|
|
105
|
+
addPageBreak(top?: number, bottom?: number): void;
|
|
100
106
|
get numFmt(): string | NumFmt | undefined;
|
|
101
107
|
set numFmt(value: string | undefined);
|
|
102
108
|
get font(): Partial<Font> | undefined;
|