@cj-tech-master/excelts 4.2.0-canary.20260110080706.375ff37 → 4.2.0-canary.20260110111632.c88c61c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browser/modules/archive/compress.base.d.ts +1 -0
- package/dist/browser/modules/archive/compress.base.js +1 -0
- package/dist/browser/modules/archive/compress.browser.d.ts +8 -0
- package/dist/browser/modules/archive/compress.browser.js +16 -9
- package/dist/browser/modules/archive/parse.base.d.ts +22 -1
- package/dist/browser/modules/archive/parse.base.js +38 -4
- package/dist/browser/modules/archive/parse.browser.js +6 -1
- package/dist/browser/modules/archive/parse.js +1 -1
- package/dist/browser/modules/excel/form-control.d.ts +2 -0
- package/dist/browser/modules/excel/form-control.js +54 -16
- package/dist/browser/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
- package/dist/browser/modules/stream/streams.browser.d.ts +2 -0
- package/dist/browser/modules/stream/streams.browser.js +58 -25
- package/dist/cjs/modules/archive/compress.base.js +1 -0
- package/dist/cjs/modules/archive/compress.browser.js +15 -8
- package/dist/cjs/modules/archive/parse.base.js +38 -4
- package/dist/cjs/modules/archive/parse.browser.js +6 -1
- package/dist/cjs/modules/archive/parse.js +1 -1
- package/dist/cjs/modules/excel/form-control.js +54 -16
- package/dist/cjs/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
- package/dist/cjs/modules/stream/streams.browser.js +58 -25
- package/dist/esm/modules/archive/compress.base.js +1 -0
- package/dist/esm/modules/archive/compress.browser.js +16 -9
- package/dist/esm/modules/archive/parse.base.js +38 -4
- package/dist/esm/modules/archive/parse.browser.js +6 -1
- package/dist/esm/modules/archive/parse.js +1 -1
- package/dist/esm/modules/excel/form-control.js +54 -16
- package/dist/esm/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
- package/dist/esm/modules/stream/streams.browser.js +58 -25
- package/dist/iife/excelts.iife.js +162 -38
- package/dist/iife/excelts.iife.js.map +1 -1
- package/dist/iife/excelts.iife.min.js +19 -19
- package/dist/types/modules/archive/compress.base.d.ts +1 -0
- package/dist/types/modules/archive/compress.browser.d.ts +8 -0
- package/dist/types/modules/archive/parse.base.d.ts +22 -1
- package/dist/types/modules/excel/form-control.d.ts +2 -0
- package/dist/types/modules/stream/streams.browser.d.ts +2 -0
- package/package.json +1 -1
|
@@ -35,6 +35,7 @@ export interface CompressOptions {
|
|
|
35
35
|
* Default threshold (in bytes) to choose the lower-overhead path.
|
|
36
36
|
*
|
|
37
37
|
* This is a performance knob, not a correctness requirement.
|
|
38
|
+
* Default: 8MB.
|
|
38
39
|
*/
|
|
39
40
|
export declare const DEFAULT_COMPRESS_THRESHOLD_BYTES: number;
|
|
40
41
|
/**
|
|
@@ -13,6 +13,7 @@ import { ByteQueue } from "./byte-queue.js";
|
|
|
13
13
|
* Default threshold (in bytes) to choose the lower-overhead path.
|
|
14
14
|
*
|
|
15
15
|
* This is a performance knob, not a correctness requirement.
|
|
16
|
+
* Default: 8MB.
|
|
16
17
|
*/
|
|
17
18
|
export const DEFAULT_COMPRESS_THRESHOLD_BYTES = 8 * 1024 * 1024;
|
|
18
19
|
/**
|
|
@@ -22,6 +22,9 @@ export declare function hasCompressionStream(): boolean;
|
|
|
22
22
|
/**
|
|
23
23
|
* Compress data using browser's native CompressionStream or JS fallback
|
|
24
24
|
*
|
|
25
|
+
* Note: We always prefer native CompressionStream when available because
|
|
26
|
+
* it's significantly faster than pure JS implementation.
|
|
27
|
+
*
|
|
25
28
|
* @param data - Data to compress
|
|
26
29
|
* @param options - Compression options
|
|
27
30
|
* @returns Compressed data
|
|
@@ -44,7 +47,12 @@ export declare function compressSync(data: Uint8Array, options?: CompressOptions
|
|
|
44
47
|
/**
|
|
45
48
|
* Decompress data using browser's native DecompressionStream or JS fallback
|
|
46
49
|
*
|
|
50
|
+
* Note: We always prefer native DecompressionStream when available because
|
|
51
|
+
* it's significantly faster than pure JS implementation, regardless of data size.
|
|
52
|
+
* The threshold is only useful for compression where the overhead matters more.
|
|
53
|
+
*
|
|
47
54
|
* @param data - Compressed data (deflate-raw format)
|
|
55
|
+
* @param options - Decompression options (kept for API parity; currently unused in browser)
|
|
48
56
|
* @returns Decompressed data
|
|
49
57
|
*/
|
|
50
58
|
export declare function decompress(data: Uint8Array, options?: CompressOptions): Promise<Uint8Array>;
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
* - Safari >= 14.1
|
|
11
11
|
* - Edge >= 89
|
|
12
12
|
*/
|
|
13
|
-
import { compressWithStream, decompressWithStream, hasDeflateRawCompressionStream, hasDeflateRawDecompressionStream
|
|
13
|
+
import { compressWithStream, decompressWithStream, hasDeflateRawCompressionStream, hasDeflateRawDecompressionStream } from "./compress.base.js";
|
|
14
14
|
import { inflateRaw, deflateRawCompressed } from "./deflate-fallback.js";
|
|
15
15
|
import { DEFAULT_COMPRESS_LEVEL } from "./defaults.js";
|
|
16
16
|
// Re-export shared types
|
|
@@ -27,6 +27,9 @@ export function hasCompressionStream() {
|
|
|
27
27
|
/**
|
|
28
28
|
* Compress data using browser's native CompressionStream or JS fallback
|
|
29
29
|
*
|
|
30
|
+
* Note: We always prefer native CompressionStream when available because
|
|
31
|
+
* it's significantly faster than pure JS implementation.
|
|
32
|
+
*
|
|
30
33
|
* @param data - Data to compress
|
|
31
34
|
* @param options - Compression options
|
|
32
35
|
* @returns Compressed data
|
|
@@ -39,16 +42,15 @@ export function hasCompressionStream() {
|
|
|
39
42
|
*/
|
|
40
43
|
export async function compress(data, options = {}) {
|
|
41
44
|
const level = options.level ?? DEFAULT_COMPRESS_LEVEL;
|
|
42
|
-
const thresholdBytes = resolveCompressThresholdBytes(options);
|
|
43
45
|
// Level 0 means no compression
|
|
44
46
|
if (level === 0) {
|
|
45
47
|
return data;
|
|
46
48
|
}
|
|
47
|
-
//
|
|
48
|
-
if (hasDeflateRawCompressionStream()
|
|
49
|
+
// Always use native CompressionStream when available - it's much faster than JS
|
|
50
|
+
if (hasDeflateRawCompressionStream()) {
|
|
49
51
|
return compressWithStream(data);
|
|
50
52
|
}
|
|
51
|
-
// Fallback to pure JS implementation
|
|
53
|
+
// Fallback to pure JS implementation only when native is unavailable
|
|
52
54
|
return deflateRawCompressed(data);
|
|
53
55
|
}
|
|
54
56
|
/**
|
|
@@ -70,16 +72,21 @@ export function compressSync(data, options = {}) {
|
|
|
70
72
|
/**
|
|
71
73
|
* Decompress data using browser's native DecompressionStream or JS fallback
|
|
72
74
|
*
|
|
75
|
+
* Note: We always prefer native DecompressionStream when available because
|
|
76
|
+
* it's significantly faster than pure JS implementation, regardless of data size.
|
|
77
|
+
* The threshold is only useful for compression where the overhead matters more.
|
|
78
|
+
*
|
|
73
79
|
* @param data - Compressed data (deflate-raw format)
|
|
80
|
+
* @param options - Decompression options (kept for API parity; currently unused in browser)
|
|
74
81
|
* @returns Decompressed data
|
|
75
82
|
*/
|
|
76
83
|
export async function decompress(data, options = {}) {
|
|
77
|
-
|
|
78
|
-
//
|
|
79
|
-
if (hasDeflateRawDecompressionStream()
|
|
84
|
+
void options;
|
|
85
|
+
// Always use native DecompressionStream when available - it's much faster than JS
|
|
86
|
+
if (hasDeflateRawDecompressionStream()) {
|
|
80
87
|
return decompressWithStream(data);
|
|
81
88
|
}
|
|
82
|
-
// Fallback to pure JS implementation
|
|
89
|
+
// Fallback to pure JS implementation only when native is unavailable
|
|
83
90
|
return inflateRaw(data);
|
|
84
91
|
}
|
|
85
92
|
/**
|
|
@@ -142,6 +142,17 @@ export declare function streamUntilValidatedDataDescriptor(options: StreamUntilV
|
|
|
142
142
|
export interface ParseOptions {
|
|
143
143
|
verbose?: boolean;
|
|
144
144
|
forceStream?: boolean;
|
|
145
|
+
/**
|
|
146
|
+
* Threshold (in bytes) for small file optimization.
|
|
147
|
+
* Files smaller than this will use sync decompression (no stream overhead).
|
|
148
|
+
*
|
|
149
|
+
* Note: the optimization is only applied when the entry sizes are trusted
|
|
150
|
+
* (i.e. no data descriptor) and BOTH compressedSize and uncompressedSize
|
|
151
|
+
* are below this threshold. This avoids buffering huge highly-compressible
|
|
152
|
+
* files (e.g. large XML) in memory, which would defeat streaming.
|
|
153
|
+
* Default: 5MB
|
|
154
|
+
*/
|
|
155
|
+
thresholdBytes?: number;
|
|
145
156
|
}
|
|
146
157
|
export interface EntryVars {
|
|
147
158
|
versionsNeededToExtract: number | null;
|
|
@@ -195,4 +206,14 @@ export interface ParseEmitter {
|
|
|
195
206
|
emitClose(): void;
|
|
196
207
|
}
|
|
197
208
|
export type InflateFactory = () => Transform | Duplex | PassThrough;
|
|
198
|
-
|
|
209
|
+
/**
|
|
210
|
+
* Synchronous inflate function type for small file optimization.
|
|
211
|
+
* When provided and file size is below threshold, this will be used
|
|
212
|
+
* instead of streaming decompression for better performance.
|
|
213
|
+
*/
|
|
214
|
+
export type InflateRawSync = (data: Uint8Array) => Uint8Array;
|
|
215
|
+
/**
|
|
216
|
+
* Default threshold for small file optimization (5MB).
|
|
217
|
+
*/
|
|
218
|
+
export declare const DEFAULT_PARSE_THRESHOLD_BYTES: number;
|
|
219
|
+
export declare function runParseLoop(opts: ParseOptions, io: ParseIO, emitter: ParseEmitter, inflateFactory: InflateFactory, state: ParseDriverState, inflateRawSync?: InflateRawSync): Promise<void>;
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { parseDosDateTimeUTC, resolveZipLastModifiedDateFromUnixSeconds } from "./utils/timestamps.js";
|
|
2
|
-
import { Duplex, PassThrough, Transform, concatUint8Arrays, pipeline } from "../stream/index.browser.js";
|
|
2
|
+
import { Duplex, PassThrough, Transform, concatUint8Arrays, pipeline, finished } from "../stream/index.browser.js";
|
|
3
3
|
import { parseTyped as parseBuffer } from "./utils/parse-buffer.js";
|
|
4
4
|
import { ByteQueue } from "./byte-queue.js";
|
|
5
5
|
import { indexOfUint8ArrayPattern } from "./utils/bytes.js";
|
|
@@ -499,8 +499,13 @@ export function streamUntilValidatedDataDescriptor(options) {
|
|
|
499
499
|
queueMicrotask(pull);
|
|
500
500
|
return output;
|
|
501
501
|
}
|
|
502
|
+
/**
|
|
503
|
+
* Default threshold for small file optimization (5MB).
|
|
504
|
+
*/
|
|
505
|
+
export const DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
|
|
502
506
|
const endDirectorySignature = writeUint32LE(END_OF_CENTRAL_DIR_SIG);
|
|
503
|
-
export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
|
|
507
|
+
export async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
|
|
508
|
+
const thresholdBytes = opts.thresholdBytes ?? DEFAULT_PARSE_THRESHOLD_BYTES;
|
|
504
509
|
while (true) {
|
|
505
510
|
const sigBytes = await io.pull(4);
|
|
506
511
|
if (sigBytes.length === 0) {
|
|
@@ -514,7 +519,7 @@ export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
|
|
|
514
519
|
continue;
|
|
515
520
|
}
|
|
516
521
|
if (signature === LOCAL_FILE_HEADER_SIG) {
|
|
517
|
-
await readFileRecord(opts, io, emitter, inflateFactory, state);
|
|
522
|
+
await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
|
|
518
523
|
continue;
|
|
519
524
|
}
|
|
520
525
|
if (signature === CENTRAL_DIR_HEADER_SIG) {
|
|
@@ -543,7 +548,7 @@ export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
|
|
|
543
548
|
return;
|
|
544
549
|
}
|
|
545
550
|
}
|
|
546
|
-
async function readFileRecord(opts, io, emitter, inflateFactory, state) {
|
|
551
|
+
async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
|
|
547
552
|
const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
|
|
548
553
|
const vars = headerVars;
|
|
549
554
|
if (state.crxHeader) {
|
|
@@ -599,6 +604,35 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state) {
|
|
|
599
604
|
extraFields: entry.extraFields
|
|
600
605
|
});
|
|
601
606
|
}
|
|
607
|
+
// Small file optimization: use sync decompression if:
|
|
608
|
+
// 1. Entry sizes are trusted (no data descriptor)
|
|
609
|
+
// 2. File size is known and below threshold
|
|
610
|
+
// 3. inflateRawSync is provided
|
|
611
|
+
// 4. File needs decompression (compressionMethod != 0)
|
|
612
|
+
// 5. Not autodraining
|
|
613
|
+
//
|
|
614
|
+
// We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
|
|
615
|
+
// This prevents materializing large highly-compressible files in memory,
|
|
616
|
+
// which can cause massive peak RSS and negate streaming backpressure.
|
|
617
|
+
const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
|
|
618
|
+
const compressedSize = vars.compressedSize || 0;
|
|
619
|
+
const uncompressedSize = vars.uncompressedSize || 0;
|
|
620
|
+
const useSmallFileOptimization = sizesTrusted &&
|
|
621
|
+
fileSizeKnown &&
|
|
622
|
+
inflateRawSync &&
|
|
623
|
+
vars.compressionMethod !== 0 &&
|
|
624
|
+
!autodraining &&
|
|
625
|
+
compressedSize <= thresholdBytes &&
|
|
626
|
+
uncompressedSize <= thresholdBytes;
|
|
627
|
+
if (useSmallFileOptimization) {
|
|
628
|
+
// Read compressed data directly and decompress synchronously
|
|
629
|
+
const compressedData = await io.pull(compressedSize);
|
|
630
|
+
const decompressedData = inflateRawSync(compressedData);
|
|
631
|
+
entry.end(decompressedData);
|
|
632
|
+
// Wait for entry stream write to complete (not for read/consume)
|
|
633
|
+
await finished(entry, { readable: false });
|
|
634
|
+
return;
|
|
635
|
+
}
|
|
602
636
|
const inflater = vars.compressionMethod && !autodraining ? inflateFactory() : new PassThrough();
|
|
603
637
|
if (fileSizeKnown) {
|
|
604
638
|
await pipeline(io.stream(vars.compressedSize || 0), inflater, entry);
|
|
@@ -341,7 +341,12 @@ export function createParseClass(createInflateRawFn) {
|
|
|
341
341
|
}
|
|
342
342
|
};
|
|
343
343
|
queueMicrotask(() => {
|
|
344
|
-
|
|
344
|
+
// NOTE: We intentionally do NOT pass inflateRawSync to runParseLoop in browser.
|
|
345
|
+
// Browser's native DecompressionStream is faster than our pure-JS fallback,
|
|
346
|
+
// so we always use the streaming path for decompression in browsers.
|
|
347
|
+
this._parsingDone = runParseLoop(this._opts, io, emitter, () => createInflateRawFn(), this._driverState
|
|
348
|
+
// No inflateRawSync - always use streaming DecompressionStream in browser
|
|
349
|
+
);
|
|
345
350
|
this._parsingDone.catch((e) => {
|
|
346
351
|
if (!this.__emittedError || this.__emittedError !== e) {
|
|
347
352
|
this.__emittedError = e;
|
|
@@ -50,7 +50,7 @@ export function createParseClass(createInflateRawFn) {
|
|
|
50
50
|
}
|
|
51
51
|
};
|
|
52
52
|
// Parse records as data arrives. Only emit `close` when parsing is complete.
|
|
53
|
-
runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState).catch((e) => {
|
|
53
|
+
runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib.inflateRawSync(data)).catch((e) => {
|
|
54
54
|
if (!this.__emittedError || this.__emittedError !== e) {
|
|
55
55
|
this.emit("error", e);
|
|
56
56
|
}
|
|
@@ -42,6 +42,8 @@ export interface FormCheckboxModel {
|
|
|
42
42
|
shapeId: number;
|
|
43
43
|
/** Control property ID (rId in relationships) */
|
|
44
44
|
ctrlPropId: number;
|
|
45
|
+
/** Relationship id (e.g., rId5) in sheet rels for ctrlProp (set during XLSX prepare) */
|
|
46
|
+
ctrlPropRelId?: string;
|
|
45
47
|
/** Top-left anchor */
|
|
46
48
|
tl: FormControlAnchor;
|
|
47
49
|
/** Bottom-right anchor */
|
|
@@ -106,25 +106,63 @@ class FormCheckbox {
|
|
|
106
106
|
let tl;
|
|
107
107
|
let br;
|
|
108
108
|
if (typeof range === "string") {
|
|
109
|
-
// Parse cell reference like "B2" or "B2:D3"
|
|
110
|
-
const
|
|
111
|
-
if (
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
109
|
+
// Parse cell reference like "B2" or range like "B2:D3"
|
|
110
|
+
const isRange = range.includes(":");
|
|
111
|
+
if (isRange) {
|
|
112
|
+
const decoded = colCache.decode(range);
|
|
113
|
+
if ("top" in decoded) {
|
|
114
|
+
// Treat 1-cell ranges (e.g., "J4:J4") as a single cell with default checkbox size.
|
|
115
|
+
if (decoded.left === decoded.right && decoded.top === decoded.bottom) {
|
|
116
|
+
const col = decoded.left - 1;
|
|
117
|
+
const row = decoded.top - 1;
|
|
118
|
+
tl = {
|
|
119
|
+
col,
|
|
120
|
+
colOff: DEFAULT_COL_OFF,
|
|
121
|
+
row,
|
|
122
|
+
rowOff: DEFAULT_ROW_OFF
|
|
123
|
+
};
|
|
124
|
+
br = {
|
|
125
|
+
col: col + 2,
|
|
126
|
+
colOff: DEFAULT_END_COL_OFF,
|
|
127
|
+
row: row + 1,
|
|
128
|
+
rowOff: DEFAULT_END_ROW_OFF
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
// Regular range
|
|
133
|
+
tl = {
|
|
134
|
+
col: decoded.left - 1, // Convert to 0-based
|
|
135
|
+
colOff: DEFAULT_COL_OFF,
|
|
136
|
+
row: decoded.top - 1,
|
|
137
|
+
rowOff: DEFAULT_ROW_OFF
|
|
138
|
+
};
|
|
139
|
+
br = {
|
|
140
|
+
col: decoded.right - 1,
|
|
141
|
+
colOff: DEFAULT_END_COL_OFF,
|
|
142
|
+
row: decoded.bottom - 1,
|
|
143
|
+
rowOff: DEFAULT_END_ROW_OFF
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
else {
|
|
148
|
+
// Defensive fallback: if the cache returns an address, treat it like a single-cell ref.
|
|
149
|
+
tl = {
|
|
150
|
+
col: decoded.col - 1,
|
|
151
|
+
colOff: DEFAULT_COL_OFF,
|
|
152
|
+
row: decoded.row - 1,
|
|
153
|
+
rowOff: DEFAULT_ROW_OFF
|
|
154
|
+
};
|
|
155
|
+
br = {
|
|
156
|
+
col: decoded.col + 1,
|
|
157
|
+
colOff: DEFAULT_END_COL_OFF,
|
|
158
|
+
row: decoded.row,
|
|
159
|
+
rowOff: DEFAULT_END_ROW_OFF
|
|
160
|
+
};
|
|
161
|
+
}
|
|
125
162
|
}
|
|
126
163
|
else {
|
|
127
164
|
// Single cell reference - create default size checkbox
|
|
165
|
+
const decoded = colCache.decodeAddress(range);
|
|
128
166
|
tl = {
|
|
129
167
|
col: decoded.col - 1,
|
|
130
168
|
colOff: DEFAULT_COL_OFF,
|
|
@@ -290,8 +290,10 @@ class WorkSheetXform extends BaseXform {
|
|
|
290
290
|
for (const control of model.formControls) {
|
|
291
291
|
const globalCtrlPropId = options.formControlRefs.length + 1;
|
|
292
292
|
control.ctrlPropId = globalCtrlPropId;
|
|
293
|
+
const relId = nextRid(rels);
|
|
294
|
+
control.ctrlPropRelId = relId;
|
|
293
295
|
rels.push({
|
|
294
|
-
Id:
|
|
296
|
+
Id: relId,
|
|
295
297
|
Type: RelType.CtrlProp,
|
|
296
298
|
Target: ctrlPropRelTargetFromWorksheet(globalCtrlPropId)
|
|
297
299
|
});
|
|
@@ -354,15 +356,27 @@ class WorkSheetXform extends BaseXform {
|
|
|
354
356
|
this.map.drawing.render(xmlStream, model.drawing); // Note: must be after rowBreaks/colBreaks
|
|
355
357
|
this.map.picture.render(xmlStream, model.background); // Note: must be after drawing
|
|
356
358
|
this.map.tableParts.render(xmlStream, model.tables);
|
|
357
|
-
|
|
359
|
+
// Controls section for legacy form controls (checkboxes, etc.)
|
|
360
|
+
// Excel expects <controls> entries that reference ctrlProp relationships.
|
|
361
|
+
if (model.formControls && model.formControls.length > 0) {
|
|
362
|
+
xmlStream.openNode("controls");
|
|
363
|
+
for (const control of model.formControls) {
|
|
364
|
+
if (control.ctrlPropRelId) {
|
|
365
|
+
xmlStream.leafNode("control", { shapeId: control.shapeId, "r:id": control.ctrlPropRelId });
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
xmlStream.closeNode();
|
|
369
|
+
}
|
|
358
370
|
if (model.rels) {
|
|
359
|
-
//
|
|
371
|
+
// Add a <legacyDrawing /> node for each VML drawing relationship (comments and/or form controls).
|
|
360
372
|
model.rels.forEach(rel => {
|
|
361
373
|
if (rel.Type === RelType.VmlDrawing) {
|
|
362
374
|
xmlStream.leafNode("legacyDrawing", { "r:id": rel.Id });
|
|
363
375
|
}
|
|
364
376
|
});
|
|
365
377
|
}
|
|
378
|
+
// extLst should be the last element in the worksheet.
|
|
379
|
+
this.map.extLst.render(xmlStream, model);
|
|
366
380
|
xmlStream.closeNode();
|
|
367
381
|
}
|
|
368
382
|
parseOpen(node) {
|
|
@@ -456,6 +456,8 @@ export declare class Transform<TInput = Uint8Array, TOutput = Uint8Array> extend
|
|
|
456
456
|
* Read from the transform stream
|
|
457
457
|
*/
|
|
458
458
|
read(size?: number): TOutput | null;
|
|
459
|
+
/** @internal - list of piped destinations for forwarding auto-consumed data */
|
|
460
|
+
private _pipeDestinations;
|
|
459
461
|
/**
|
|
460
462
|
* Pipe to another stream (writable, transform, or duplex)
|
|
461
463
|
*/
|
|
@@ -384,55 +384,58 @@ export class Readable extends EventEmitter {
|
|
|
384
384
|
// causing `instanceof Transform/Writable/Duplex` to fail even when the object
|
|
385
385
|
// is a valid destination.
|
|
386
386
|
const dest = destination;
|
|
387
|
-
//
|
|
388
|
-
//
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
const
|
|
392
|
-
const
|
|
393
|
-
const
|
|
394
|
-
const
|
|
395
|
-
|
|
387
|
+
// For event handling (drain, once, off), we need the object that emits events.
|
|
388
|
+
// For write/end, we must call the destination's own write()/end() methods,
|
|
389
|
+
// NOT the internal _writable, because Transform.write() has important logic
|
|
390
|
+
// (like auto-consume) that _writable.write() bypasses.
|
|
391
|
+
const eventTarget = dest;
|
|
392
|
+
const hasWrite = typeof dest?.write === "function";
|
|
393
|
+
const hasEnd = typeof dest?.end === "function";
|
|
394
|
+
const hasOn = typeof eventTarget?.on === "function";
|
|
395
|
+
const hasOnce = typeof eventTarget?.once === "function";
|
|
396
|
+
const hasOff = typeof eventTarget?.off === "function";
|
|
397
|
+
if (!hasWrite || !hasEnd || (!hasOnce && !hasOn) || (!hasOff && !eventTarget?.removeListener)) {
|
|
396
398
|
throw new Error("Readable.pipe: invalid destination");
|
|
397
399
|
}
|
|
398
|
-
|
|
399
|
-
this._pipeTo.push(target);
|
|
400
|
+
this._pipeTo.push(dest);
|
|
400
401
|
// Create listeners that we can later remove
|
|
401
402
|
const dataListener = (chunk) => {
|
|
402
|
-
|
|
403
|
+
// Call destination's write() method (not internal _writable.write())
|
|
404
|
+
// This ensures Transform.write() logic runs properly
|
|
405
|
+
const canWrite = dest.write(chunk);
|
|
403
406
|
if (!canWrite) {
|
|
404
407
|
this.pause();
|
|
405
|
-
if (typeof
|
|
406
|
-
|
|
408
|
+
if (typeof eventTarget.once === "function") {
|
|
409
|
+
eventTarget.once("drain", () => this.resume());
|
|
407
410
|
}
|
|
408
411
|
else {
|
|
409
412
|
const resumeOnce = () => {
|
|
410
|
-
if (typeof
|
|
411
|
-
|
|
413
|
+
if (typeof eventTarget.off === "function") {
|
|
414
|
+
eventTarget.off("drain", resumeOnce);
|
|
412
415
|
}
|
|
413
|
-
else if (typeof
|
|
414
|
-
|
|
416
|
+
else if (typeof eventTarget.removeListener === "function") {
|
|
417
|
+
eventTarget.removeListener("drain", resumeOnce);
|
|
415
418
|
}
|
|
416
419
|
this.resume();
|
|
417
420
|
};
|
|
418
|
-
|
|
421
|
+
eventTarget.on("drain", resumeOnce);
|
|
419
422
|
}
|
|
420
423
|
}
|
|
421
424
|
};
|
|
422
425
|
const endListener = () => {
|
|
423
|
-
|
|
426
|
+
dest.end();
|
|
424
427
|
};
|
|
425
428
|
const errorListener = (err) => {
|
|
426
|
-
if (typeof
|
|
427
|
-
|
|
429
|
+
if (typeof dest.destroy === "function") {
|
|
430
|
+
dest.destroy(err);
|
|
428
431
|
}
|
|
429
432
|
else {
|
|
430
433
|
// Best-effort: forward error to the destination if it supports events.
|
|
431
|
-
|
|
434
|
+
eventTarget.emit?.("error", err);
|
|
432
435
|
}
|
|
433
436
|
};
|
|
434
437
|
// Store listeners for later removal in unpipe
|
|
435
|
-
this._pipeListeners.set(
|
|
438
|
+
this._pipeListeners.set(dest, {
|
|
436
439
|
data: dataListener,
|
|
437
440
|
end: endListener,
|
|
438
441
|
error: errorListener
|
|
@@ -1358,6 +1361,8 @@ export class Transform extends EventEmitter {
|
|
|
1358
1361
|
this._autoConsumeEnded = false;
|
|
1359
1362
|
/** @internal - promise that resolves when auto-consume finishes */
|
|
1360
1363
|
this._autoConsumePromise = null;
|
|
1364
|
+
/** @internal - list of piped destinations for forwarding auto-consumed data */
|
|
1365
|
+
this._pipeDestinations = [];
|
|
1361
1366
|
this.objectMode = options?.objectMode ?? false;
|
|
1362
1367
|
const userTransform = options?.transform;
|
|
1363
1368
|
const userFlush = options?.flush;
|
|
@@ -1624,10 +1629,18 @@ export class Transform extends EventEmitter {
|
|
|
1624
1629
|
for await (const chunk of this._readable) {
|
|
1625
1630
|
// Buffer the data for later retrieval
|
|
1626
1631
|
this._autoConsumedBuffer.push(chunk);
|
|
1632
|
+
// Forward to any piped destinations
|
|
1633
|
+
for (const dest of this._pipeDestinations) {
|
|
1634
|
+
dest.write(chunk);
|
|
1635
|
+
}
|
|
1627
1636
|
// Also emit data event for listeners
|
|
1628
1637
|
this.emit("data", chunk);
|
|
1629
1638
|
}
|
|
1630
1639
|
this._autoConsumeEnded = true;
|
|
1640
|
+
// End all piped destinations
|
|
1641
|
+
for (const dest of this._pipeDestinations) {
|
|
1642
|
+
dest.end();
|
|
1643
|
+
}
|
|
1631
1644
|
this.emit("end");
|
|
1632
1645
|
}
|
|
1633
1646
|
catch (err) {
|
|
@@ -1677,8 +1690,28 @@ export class Transform extends EventEmitter {
|
|
|
1677
1690
|
* Pipe to another stream (writable, transform, or duplex)
|
|
1678
1691
|
*/
|
|
1679
1692
|
pipe(destination) {
|
|
1680
|
-
// Mark as having consumer to prevent auto-consume
|
|
1693
|
+
// Mark as having consumer to prevent new auto-consume from starting
|
|
1681
1694
|
this._hasDataConsumer = true;
|
|
1695
|
+
// Get the writable target - handle both Transform (with internal _writable) and plain Writable
|
|
1696
|
+
const dest = destination;
|
|
1697
|
+
const target = dest?._writable ?? dest;
|
|
1698
|
+
// Register destination for forwarding
|
|
1699
|
+
this._pipeDestinations.push(target);
|
|
1700
|
+
// If auto-consume is running or has run, we need to handle buffered data ourselves
|
|
1701
|
+
if (this._readableConsuming) {
|
|
1702
|
+
// Forward any buffered data from auto-consume to the destination
|
|
1703
|
+
for (let i = 0; i < this._autoConsumedBuffer.length; i++) {
|
|
1704
|
+
target.write(this._autoConsumedBuffer[i]);
|
|
1705
|
+
}
|
|
1706
|
+
// If auto-consume has ended, end the destination too
|
|
1707
|
+
if (this._autoConsumeEnded) {
|
|
1708
|
+
target.end();
|
|
1709
|
+
}
|
|
1710
|
+
// Don't call _readable.pipe() - auto-consume already consumed _readable
|
|
1711
|
+
// Future data will be forwarded via the 'data' event listener below
|
|
1712
|
+
return destination;
|
|
1713
|
+
}
|
|
1714
|
+
// No auto-consume running - use normal pipe through _readable
|
|
1682
1715
|
return this._readable.pipe(destination);
|
|
1683
1716
|
}
|
|
1684
1717
|
/**
|
|
@@ -26,6 +26,7 @@ const byte_queue_1 = require("./byte-queue.js");
|
|
|
26
26
|
* Default threshold (in bytes) to choose the lower-overhead path.
|
|
27
27
|
*
|
|
28
28
|
* This is a performance knob, not a correctness requirement.
|
|
29
|
+
* Default: 8MB.
|
|
29
30
|
*/
|
|
30
31
|
exports.DEFAULT_COMPRESS_THRESHOLD_BYTES = 8 * 1024 * 1024;
|
|
31
32
|
/**
|
|
@@ -32,6 +32,9 @@ function hasCompressionStream() {
|
|
|
32
32
|
/**
|
|
33
33
|
* Compress data using browser's native CompressionStream or JS fallback
|
|
34
34
|
*
|
|
35
|
+
* Note: We always prefer native CompressionStream when available because
|
|
36
|
+
* it's significantly faster than pure JS implementation.
|
|
37
|
+
*
|
|
35
38
|
* @param data - Data to compress
|
|
36
39
|
* @param options - Compression options
|
|
37
40
|
* @returns Compressed data
|
|
@@ -44,16 +47,15 @@ function hasCompressionStream() {
|
|
|
44
47
|
*/
|
|
45
48
|
async function compress(data, options = {}) {
|
|
46
49
|
const level = options.level ?? defaults_1.DEFAULT_COMPRESS_LEVEL;
|
|
47
|
-
const thresholdBytes = (0, compress_base_1.resolveCompressThresholdBytes)(options);
|
|
48
50
|
// Level 0 means no compression
|
|
49
51
|
if (level === 0) {
|
|
50
52
|
return data;
|
|
51
53
|
}
|
|
52
|
-
//
|
|
53
|
-
if ((0, compress_base_1.hasDeflateRawCompressionStream)()
|
|
54
|
+
// Always use native CompressionStream when available - it's much faster than JS
|
|
55
|
+
if ((0, compress_base_1.hasDeflateRawCompressionStream)()) {
|
|
54
56
|
return (0, compress_base_1.compressWithStream)(data);
|
|
55
57
|
}
|
|
56
|
-
// Fallback to pure JS implementation
|
|
58
|
+
// Fallback to pure JS implementation only when native is unavailable
|
|
57
59
|
return (0, deflate_fallback_1.deflateRawCompressed)(data);
|
|
58
60
|
}
|
|
59
61
|
/**
|
|
@@ -75,16 +77,21 @@ function compressSync(data, options = {}) {
|
|
|
75
77
|
/**
|
|
76
78
|
* Decompress data using browser's native DecompressionStream or JS fallback
|
|
77
79
|
*
|
|
80
|
+
* Note: We always prefer native DecompressionStream when available because
|
|
81
|
+
* it's significantly faster than pure JS implementation, regardless of data size.
|
|
82
|
+
* The threshold is only useful for compression where the overhead matters more.
|
|
83
|
+
*
|
|
78
84
|
* @param data - Compressed data (deflate-raw format)
|
|
85
|
+
* @param options - Decompression options (kept for API parity; currently unused in browser)
|
|
79
86
|
* @returns Decompressed data
|
|
80
87
|
*/
|
|
81
88
|
async function decompress(data, options = {}) {
|
|
82
|
-
|
|
83
|
-
//
|
|
84
|
-
if ((0, compress_base_1.hasDeflateRawDecompressionStream)()
|
|
89
|
+
void options;
|
|
90
|
+
// Always use native DecompressionStream when available - it's much faster than JS
|
|
91
|
+
if ((0, compress_base_1.hasDeflateRawDecompressionStream)()) {
|
|
85
92
|
return (0, compress_base_1.decompressWithStream)(data);
|
|
86
93
|
}
|
|
87
|
-
// Fallback to pure JS implementation
|
|
94
|
+
// Fallback to pure JS implementation only when native is unavailable
|
|
88
95
|
return (0, deflate_fallback_1.inflateRaw)(data);
|
|
89
96
|
}
|
|
90
97
|
/**
|