@cj-tech-master/excelts 4.2.0-canary.20260110034516.0919d4d → 4.2.0-canary.20260110111632.c88c61c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/dist/browser/modules/archive/compress.base.d.ts +1 -0
  2. package/dist/browser/modules/archive/compress.base.js +1 -0
  3. package/dist/browser/modules/archive/compress.browser.d.ts +8 -0
  4. package/dist/browser/modules/archive/compress.browser.js +16 -9
  5. package/dist/browser/modules/archive/parse.base.d.ts +22 -1
  6. package/dist/browser/modules/archive/parse.base.js +38 -4
  7. package/dist/browser/modules/archive/parse.browser.js +6 -1
  8. package/dist/browser/modules/archive/parse.js +1 -1
  9. package/dist/browser/modules/excel/form-control.d.ts +2 -0
  10. package/dist/browser/modules/excel/form-control.js +54 -16
  11. package/dist/browser/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  12. package/dist/browser/modules/stream/streams.browser.d.ts +2 -0
  13. package/dist/browser/modules/stream/streams.browser.js +58 -25
  14. package/dist/cjs/modules/archive/compress.base.js +1 -0
  15. package/dist/cjs/modules/archive/compress.browser.js +15 -8
  16. package/dist/cjs/modules/archive/parse.base.js +38 -4
  17. package/dist/cjs/modules/archive/parse.browser.js +6 -1
  18. package/dist/cjs/modules/archive/parse.js +1 -1
  19. package/dist/cjs/modules/excel/form-control.js +54 -16
  20. package/dist/cjs/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  21. package/dist/cjs/modules/stream/streams.browser.js +58 -25
  22. package/dist/esm/modules/archive/compress.base.js +1 -0
  23. package/dist/esm/modules/archive/compress.browser.js +16 -9
  24. package/dist/esm/modules/archive/parse.base.js +38 -4
  25. package/dist/esm/modules/archive/parse.browser.js +6 -1
  26. package/dist/esm/modules/archive/parse.js +1 -1
  27. package/dist/esm/modules/excel/form-control.js +54 -16
  28. package/dist/esm/modules/excel/xlsx/xform/sheet/worksheet-xform.js +17 -3
  29. package/dist/esm/modules/stream/streams.browser.js +58 -25
  30. package/dist/iife/excelts.iife.js +162 -38
  31. package/dist/iife/excelts.iife.js.map +1 -1
  32. package/dist/iife/excelts.iife.min.js +19 -19
  33. package/dist/types/modules/archive/compress.base.d.ts +1 -0
  34. package/dist/types/modules/archive/compress.browser.d.ts +8 -0
  35. package/dist/types/modules/archive/parse.base.d.ts +22 -1
  36. package/dist/types/modules/excel/form-control.d.ts +2 -0
  37. package/dist/types/modules/stream/streams.browser.d.ts +2 -0
  38. package/package.json +6 -6
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.PullStream = exports.parseExtraField = exports.END_OF_CENTRAL_DIRECTORY_FORMAT = exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = exports.DATA_DESCRIPTOR_FORMAT = exports.LOCAL_FILE_HEADER_FORMAT = exports.CRX_HEADER_FORMAT = void 0;
3
+ exports.DEFAULT_PARSE_THRESHOLD_BYTES = exports.PullStream = exports.parseExtraField = exports.END_OF_CENTRAL_DIRECTORY_FORMAT = exports.CENTRAL_DIRECTORY_FILE_HEADER_FORMAT = exports.DATA_DESCRIPTOR_FORMAT = exports.LOCAL_FILE_HEADER_FORMAT = exports.CRX_HEADER_FORMAT = void 0;
4
4
  exports.decodeZipEntryPath = decodeZipEntryPath;
5
5
  exports.isZipUnicodeFlag = isZipUnicodeFlag;
6
6
  exports.isZipDirectoryPath = isZipDirectoryPath;
@@ -521,8 +521,13 @@ function streamUntilValidatedDataDescriptor(options) {
521
521
  queueMicrotask(pull);
522
522
  return output;
523
523
  }
524
+ /**
525
+ * Default threshold for small file optimization (5MB).
526
+ */
527
+ exports.DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
524
528
  const endDirectorySignature = (0, binary_1.writeUint32LE)(zip_constants_1.END_OF_CENTRAL_DIR_SIG);
525
- async function runParseLoop(opts, io, emitter, inflateFactory, state) {
529
+ async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
530
+ const thresholdBytes = opts.thresholdBytes ?? exports.DEFAULT_PARSE_THRESHOLD_BYTES;
526
531
  while (true) {
527
532
  const sigBytes = await io.pull(4);
528
533
  if (sigBytes.length === 0) {
@@ -536,7 +541,7 @@ async function runParseLoop(opts, io, emitter, inflateFactory, state) {
536
541
  continue;
537
542
  }
538
543
  if (signature === zip_constants_1.LOCAL_FILE_HEADER_SIG) {
539
- await readFileRecord(opts, io, emitter, inflateFactory, state);
544
+ await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
540
545
  continue;
541
546
  }
542
547
  if (signature === zip_constants_1.CENTRAL_DIR_HEADER_SIG) {
@@ -565,7 +570,7 @@ async function runParseLoop(opts, io, emitter, inflateFactory, state) {
565
570
  return;
566
571
  }
567
572
  }
568
- async function readFileRecord(opts, io, emitter, inflateFactory, state) {
573
+ async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
569
574
  const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
570
575
  const vars = headerVars;
571
576
  if (state.crxHeader) {
@@ -621,6 +626,35 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state) {
621
626
  extraFields: entry.extraFields
622
627
  });
623
628
  }
629
+ // Small file optimization: use sync decompression if:
630
+ // 1. Entry sizes are trusted (no data descriptor)
631
+ // 2. File size is known and below threshold
632
+ // 3. inflateRawSync is provided
633
+ // 4. File needs decompression (compressionMethod != 0)
634
+ // 5. Not autodraining
635
+ //
636
+ // We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
637
+ // This prevents materializing large highly-compressible files in memory,
638
+ // which can cause massive peak RSS and negate streaming backpressure.
639
+ const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
640
+ const compressedSize = vars.compressedSize || 0;
641
+ const uncompressedSize = vars.uncompressedSize || 0;
642
+ const useSmallFileOptimization = sizesTrusted &&
643
+ fileSizeKnown &&
644
+ inflateRawSync &&
645
+ vars.compressionMethod !== 0 &&
646
+ !autodraining &&
647
+ compressedSize <= thresholdBytes &&
648
+ uncompressedSize <= thresholdBytes;
649
+ if (useSmallFileOptimization) {
650
+ // Read compressed data directly and decompress synchronously
651
+ const compressedData = await io.pull(compressedSize);
652
+ const decompressedData = inflateRawSync(compressedData);
653
+ entry.end(decompressedData);
654
+ // Wait for entry stream write to complete (not for read/consume)
655
+ await (0, _stream_1.finished)(entry, { readable: false });
656
+ return;
657
+ }
624
658
  const inflater = vars.compressionMethod && !autodraining ? inflateFactory() : new _stream_1.PassThrough();
625
659
  if (fileSizeKnown) {
626
660
  await (0, _stream_1.pipeline)(io.stream(vars.compressedSize || 0), inflater, entry);
@@ -346,7 +346,12 @@ function createParseClass(createInflateRawFn) {
346
346
  }
347
347
  };
348
348
  queueMicrotask(() => {
349
- this._parsingDone = (0, parse_base_1.runParseLoop)(this._opts, io, emitter, () => createInflateRawFn(), this._driverState);
349
+ // NOTE: We intentionally do NOT pass inflateRawSync to runParseLoop in browser.
350
+ // Browser's native DecompressionStream is faster than our pure-JS fallback,
351
+ // so we always use the streaming path for decompression in browsers.
352
+ this._parsingDone = (0, parse_base_1.runParseLoop)(this._opts, io, emitter, () => createInflateRawFn(), this._driverState
353
+ // No inflateRawSync - always use streaming DecompressionStream in browser
354
+ );
350
355
  this._parsingDone.catch((e) => {
351
356
  if (!this.__emittedError || this.__emittedError !== e) {
352
357
  this.__emittedError = e;
@@ -58,7 +58,7 @@ function createParseClass(createInflateRawFn) {
58
58
  }
59
59
  };
60
60
  // Parse records as data arrives. Only emit `close` when parsing is complete.
61
- (0, parse_base_1.runParseLoop)(this._opts, io, emitter, createInflateRawFn, this._driverState).catch((e) => {
61
+ (0, parse_base_1.runParseLoop)(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib_1.default.inflateRawSync(data)).catch((e) => {
62
62
  if (!this.__emittedError || this.__emittedError !== e) {
63
63
  this.emit("error", e);
64
64
  }
@@ -109,25 +109,63 @@ class FormCheckbox {
109
109
  let tl;
110
110
  let br;
111
111
  if (typeof range === "string") {
112
- // Parse cell reference like "B2" or "B2:D3"
113
- const decoded = col_cache_1.colCache.decode(range);
114
- if ("top" in decoded) {
115
- // It's a range like "B2:D3"
116
- tl = {
117
- col: decoded.left - 1, // Convert to 0-based
118
- colOff: DEFAULT_COL_OFF,
119
- row: decoded.top - 1,
120
- rowOff: DEFAULT_ROW_OFF
121
- };
122
- br = {
123
- col: decoded.right - 1,
124
- colOff: DEFAULT_END_COL_OFF,
125
- row: decoded.bottom - 1,
126
- rowOff: DEFAULT_END_ROW_OFF
127
- };
112
+ // Parse cell reference like "B2" or range like "B2:D3"
113
+ const isRange = range.includes(":");
114
+ if (isRange) {
115
+ const decoded = col_cache_1.colCache.decode(range);
116
+ if ("top" in decoded) {
117
+ // Treat 1-cell ranges (e.g., "J4:J4") as a single cell with default checkbox size.
118
+ if (decoded.left === decoded.right && decoded.top === decoded.bottom) {
119
+ const col = decoded.left - 1;
120
+ const row = decoded.top - 1;
121
+ tl = {
122
+ col,
123
+ colOff: DEFAULT_COL_OFF,
124
+ row,
125
+ rowOff: DEFAULT_ROW_OFF
126
+ };
127
+ br = {
128
+ col: col + 2,
129
+ colOff: DEFAULT_END_COL_OFF,
130
+ row: row + 1,
131
+ rowOff: DEFAULT_END_ROW_OFF
132
+ };
133
+ }
134
+ else {
135
+ // Regular range
136
+ tl = {
137
+ col: decoded.left - 1, // Convert to 0-based
138
+ colOff: DEFAULT_COL_OFF,
139
+ row: decoded.top - 1,
140
+ rowOff: DEFAULT_ROW_OFF
141
+ };
142
+ br = {
143
+ col: decoded.right - 1,
144
+ colOff: DEFAULT_END_COL_OFF,
145
+ row: decoded.bottom - 1,
146
+ rowOff: DEFAULT_END_ROW_OFF
147
+ };
148
+ }
149
+ }
150
+ else {
151
+ // Defensive fallback: if the cache returns an address, treat it like a single-cell ref.
152
+ tl = {
153
+ col: decoded.col - 1,
154
+ colOff: DEFAULT_COL_OFF,
155
+ row: decoded.row - 1,
156
+ rowOff: DEFAULT_ROW_OFF
157
+ };
158
+ br = {
159
+ col: decoded.col + 1,
160
+ colOff: DEFAULT_END_COL_OFF,
161
+ row: decoded.row,
162
+ rowOff: DEFAULT_END_ROW_OFF
163
+ };
164
+ }
128
165
  }
129
166
  else {
130
167
  // Single cell reference - create default size checkbox
168
+ const decoded = col_cache_1.colCache.decodeAddress(range);
131
169
  tl = {
132
170
  col: decoded.col - 1,
133
171
  colOff: DEFAULT_COL_OFF,
@@ -293,8 +293,10 @@ class WorkSheetXform extends base_xform_1.BaseXform {
293
293
  for (const control of model.formControls) {
294
294
  const globalCtrlPropId = options.formControlRefs.length + 1;
295
295
  control.ctrlPropId = globalCtrlPropId;
296
+ const relId = nextRid(rels);
297
+ control.ctrlPropRelId = relId;
296
298
  rels.push({
297
- Id: nextRid(rels),
299
+ Id: relId,
298
300
  Type: rel_type_1.RelType.CtrlProp,
299
301
  Target: (0, ooxml_paths_1.ctrlPropRelTargetFromWorksheet)(globalCtrlPropId)
300
302
  });
@@ -357,15 +359,27 @@ class WorkSheetXform extends base_xform_1.BaseXform {
357
359
  this.map.drawing.render(xmlStream, model.drawing); // Note: must be after rowBreaks/colBreaks
358
360
  this.map.picture.render(xmlStream, model.background); // Note: must be after drawing
359
361
  this.map.tableParts.render(xmlStream, model.tables);
360
- this.map.extLst.render(xmlStream, model);
362
+ // Controls section for legacy form controls (checkboxes, etc.)
363
+ // Excel expects <controls> entries that reference ctrlProp relationships.
364
+ if (model.formControls && model.formControls.length > 0) {
365
+ xmlStream.openNode("controls");
366
+ for (const control of model.formControls) {
367
+ if (control.ctrlPropRelId) {
368
+ xmlStream.leafNode("control", { shapeId: control.shapeId, "r:id": control.ctrlPropRelId });
369
+ }
370
+ }
371
+ xmlStream.closeNode();
372
+ }
361
373
  if (model.rels) {
362
- // add a <legacyDrawing /> node for each comment
374
+ // Add a <legacyDrawing /> node for each VML drawing relationship (comments and/or form controls).
363
375
  model.rels.forEach(rel => {
364
376
  if (rel.Type === rel_type_1.RelType.VmlDrawing) {
365
377
  xmlStream.leafNode("legacyDrawing", { "r:id": rel.Id });
366
378
  }
367
379
  });
368
380
  }
381
+ // extLst should be the last element in the worksheet.
382
+ this.map.extLst.render(xmlStream, model);
369
383
  xmlStream.closeNode();
370
384
  }
371
385
  parseOpen(node) {
@@ -425,55 +425,58 @@ class Readable extends event_emitter_1.EventEmitter {
425
425
  // causing `instanceof Transform/Writable/Duplex` to fail even when the object
426
426
  // is a valid destination.
427
427
  const dest = destination;
428
- // Get the actual writable target.
429
- // Prefer internal `_writable` (Transform/Duplex wrappers), else treat the destination as writable-like.
430
- const candidate = dest?._writable ?? dest;
431
- const hasWrite = typeof candidate?.write === "function";
432
- const hasEnd = typeof candidate?.end === "function";
433
- const hasOn = typeof candidate?.on === "function";
434
- const hasOnce = typeof candidate?.once === "function";
435
- const hasOff = typeof candidate?.off === "function";
436
- if (!hasWrite || !hasEnd || (!hasOnce && !hasOn) || (!hasOff && !candidate?.removeListener)) {
428
+ // For event handling (drain, once, off), we need the object that emits events.
429
+ // For write/end, we must call the destination's own write()/end() methods,
430
+ // NOT the internal _writable, because Transform.write() has important logic
431
+ // (like auto-consume) that _writable.write() bypasses.
432
+ const eventTarget = dest;
433
+ const hasWrite = typeof dest?.write === "function";
434
+ const hasEnd = typeof dest?.end === "function";
435
+ const hasOn = typeof eventTarget?.on === "function";
436
+ const hasOnce = typeof eventTarget?.once === "function";
437
+ const hasOff = typeof eventTarget?.off === "function";
438
+ if (!hasWrite || !hasEnd || (!hasOnce && !hasOn) || (!hasOff && !eventTarget?.removeListener)) {
437
439
  throw new Error("Readable.pipe: invalid destination");
438
440
  }
439
- const target = candidate;
440
- this._pipeTo.push(target);
441
+ this._pipeTo.push(dest);
441
442
  // Create listeners that we can later remove
442
443
  const dataListener = (chunk) => {
443
- const canWrite = target.write(chunk);
444
+ // Call destination's write() method (not internal _writable.write())
445
+ // This ensures Transform.write() logic runs properly
446
+ const canWrite = dest.write(chunk);
444
447
  if (!canWrite) {
445
448
  this.pause();
446
- if (typeof target.once === "function") {
447
- target.once("drain", () => this.resume());
449
+ if (typeof eventTarget.once === "function") {
450
+ eventTarget.once("drain", () => this.resume());
448
451
  }
449
452
  else {
450
453
  const resumeOnce = () => {
451
- if (typeof target.off === "function") {
452
- target.off("drain", resumeOnce);
454
+ if (typeof eventTarget.off === "function") {
455
+ eventTarget.off("drain", resumeOnce);
453
456
  }
454
- else if (typeof target.removeListener === "function") {
455
- target.removeListener("drain", resumeOnce);
457
+ else if (typeof eventTarget.removeListener === "function") {
458
+ eventTarget.removeListener("drain", resumeOnce);
456
459
  }
457
460
  this.resume();
458
461
  };
459
- target.on("drain", resumeOnce);
462
+ eventTarget.on("drain", resumeOnce);
460
463
  }
461
464
  }
462
465
  };
463
466
  const endListener = () => {
464
- target.end();
467
+ dest.end();
465
468
  };
466
469
  const errorListener = (err) => {
467
- if (typeof target.destroy === "function") {
468
- target.destroy(err);
470
+ if (typeof dest.destroy === "function") {
471
+ dest.destroy(err);
469
472
  }
470
473
  else {
471
474
  // Best-effort: forward error to the destination if it supports events.
472
- target.emit?.("error", err);
475
+ eventTarget.emit?.("error", err);
473
476
  }
474
477
  };
475
478
  // Store listeners for later removal in unpipe
476
- this._pipeListeners.set(target, {
479
+ this._pipeListeners.set(dest, {
477
480
  data: dataListener,
478
481
  end: endListener,
479
482
  error: errorListener
@@ -1401,6 +1404,8 @@ class Transform extends event_emitter_1.EventEmitter {
1401
1404
  this._autoConsumeEnded = false;
1402
1405
  /** @internal - promise that resolves when auto-consume finishes */
1403
1406
  this._autoConsumePromise = null;
1407
+ /** @internal - list of piped destinations for forwarding auto-consumed data */
1408
+ this._pipeDestinations = [];
1404
1409
  this.objectMode = options?.objectMode ?? false;
1405
1410
  const userTransform = options?.transform;
1406
1411
  const userFlush = options?.flush;
@@ -1667,10 +1672,18 @@ class Transform extends event_emitter_1.EventEmitter {
1667
1672
  for await (const chunk of this._readable) {
1668
1673
  // Buffer the data for later retrieval
1669
1674
  this._autoConsumedBuffer.push(chunk);
1675
+ // Forward to any piped destinations
1676
+ for (const dest of this._pipeDestinations) {
1677
+ dest.write(chunk);
1678
+ }
1670
1679
  // Also emit data event for listeners
1671
1680
  this.emit("data", chunk);
1672
1681
  }
1673
1682
  this._autoConsumeEnded = true;
1683
+ // End all piped destinations
1684
+ for (const dest of this._pipeDestinations) {
1685
+ dest.end();
1686
+ }
1674
1687
  this.emit("end");
1675
1688
  }
1676
1689
  catch (err) {
@@ -1720,8 +1733,28 @@ class Transform extends event_emitter_1.EventEmitter {
1720
1733
  * Pipe to another stream (writable, transform, or duplex)
1721
1734
  */
1722
1735
  pipe(destination) {
1723
- // Mark as having consumer to prevent auto-consume conflict
1736
+ // Mark as having consumer to prevent new auto-consume from starting
1724
1737
  this._hasDataConsumer = true;
1738
+ // Get the writable target - handle both Transform (with internal _writable) and plain Writable
1739
+ const dest = destination;
1740
+ const target = dest?._writable ?? dest;
1741
+ // Register destination for forwarding
1742
+ this._pipeDestinations.push(target);
1743
+ // If auto-consume is running or has run, we need to handle buffered data ourselves
1744
+ if (this._readableConsuming) {
1745
+ // Forward any buffered data from auto-consume to the destination
1746
+ for (let i = 0; i < this._autoConsumedBuffer.length; i++) {
1747
+ target.write(this._autoConsumedBuffer[i]);
1748
+ }
1749
+ // If auto-consume has ended, end the destination too
1750
+ if (this._autoConsumeEnded) {
1751
+ target.end();
1752
+ }
1753
+ // Don't call _readable.pipe() - auto-consume already consumed _readable
1754
+ // Future data will be forwarded via the 'data' event listener below
1755
+ return destination;
1756
+ }
1757
+ // No auto-consume running - use normal pipe through _readable
1725
1758
  return this._readable.pipe(destination);
1726
1759
  }
1727
1760
  /**
@@ -13,6 +13,7 @@ import { ByteQueue } from "./byte-queue.js";
13
13
  * Default threshold (in bytes) to choose the lower-overhead path.
14
14
  *
15
15
  * This is a performance knob, not a correctness requirement.
16
+ * Default: 8MB.
16
17
  */
17
18
  export const DEFAULT_COMPRESS_THRESHOLD_BYTES = 8 * 1024 * 1024;
18
19
  /**
@@ -10,7 +10,7 @@
10
10
  * - Safari >= 14.1
11
11
  * - Edge >= 89
12
12
  */
13
- import { compressWithStream, decompressWithStream, hasDeflateRawCompressionStream, hasDeflateRawDecompressionStream, resolveCompressThresholdBytes } from "./compress.base.js";
13
+ import { compressWithStream, decompressWithStream, hasDeflateRawCompressionStream, hasDeflateRawDecompressionStream } from "./compress.base.js";
14
14
  import { inflateRaw, deflateRawCompressed } from "./deflate-fallback.js";
15
15
  import { DEFAULT_COMPRESS_LEVEL } from "./defaults.js";
16
16
  // Re-export shared types
@@ -27,6 +27,9 @@ export function hasCompressionStream() {
27
27
  /**
28
28
  * Compress data using browser's native CompressionStream or JS fallback
29
29
  *
30
+ * Note: We always prefer native CompressionStream when available because
31
+ * it's significantly faster than pure JS implementation.
32
+ *
30
33
  * @param data - Data to compress
31
34
  * @param options - Compression options
32
35
  * @returns Compressed data
@@ -39,16 +42,15 @@ export function hasCompressionStream() {
39
42
  */
40
43
  export async function compress(data, options = {}) {
41
44
  const level = options.level ?? DEFAULT_COMPRESS_LEVEL;
42
- const thresholdBytes = resolveCompressThresholdBytes(options);
43
45
  // Level 0 means no compression
44
46
  if (level === 0) {
45
47
  return data;
46
48
  }
47
- // Use native CompressionStream only for larger inputs.
48
- if (hasDeflateRawCompressionStream() && data.byteLength > thresholdBytes) {
49
+ // Always use native CompressionStream when available - it's much faster than JS
50
+ if (hasDeflateRawCompressionStream()) {
49
51
  return compressWithStream(data);
50
52
  }
51
- // Fallback to pure JS implementation
53
+ // Fallback to pure JS implementation only when native is unavailable
52
54
  return deflateRawCompressed(data);
53
55
  }
54
56
  /**
@@ -70,16 +72,21 @@ export function compressSync(data, options = {}) {
70
72
  /**
71
73
  * Decompress data using browser's native DecompressionStream or JS fallback
72
74
  *
75
+ * Note: We always prefer native DecompressionStream when available because
76
+ * it's significantly faster than pure JS implementation, regardless of data size.
77
+ * The threshold is only useful for compression where the overhead matters more.
78
+ *
73
79
  * @param data - Compressed data (deflate-raw format)
80
+ * @param options - Decompression options (kept for API parity; currently unused in browser)
74
81
  * @returns Decompressed data
75
82
  */
76
83
  export async function decompress(data, options = {}) {
77
- const thresholdBytes = resolveCompressThresholdBytes(options);
78
- // Use native DecompressionStream only for larger inputs.
79
- if (hasDeflateRawDecompressionStream() && data.byteLength > thresholdBytes) {
84
+ void options;
85
+ // Always use native DecompressionStream when available - it's much faster than JS
86
+ if (hasDeflateRawDecompressionStream()) {
80
87
  return decompressWithStream(data);
81
88
  }
82
- // Fallback to pure JS implementation
89
+ // Fallback to pure JS implementation only when native is unavailable
83
90
  return inflateRaw(data);
84
91
  }
85
92
  /**
@@ -1,5 +1,5 @@
1
1
  import { parseDosDateTimeUTC, resolveZipLastModifiedDateFromUnixSeconds } from "./utils/timestamps.js";
2
- import { Duplex, PassThrough, Transform, concatUint8Arrays, pipeline } from "../stream/index.js";
2
+ import { Duplex, PassThrough, Transform, concatUint8Arrays, pipeline, finished } from "../stream/index.js";
3
3
  import { parseTyped as parseBuffer } from "./utils/parse-buffer.js";
4
4
  import { ByteQueue } from "./byte-queue.js";
5
5
  import { indexOfUint8ArrayPattern } from "./utils/bytes.js";
@@ -499,8 +499,13 @@ export function streamUntilValidatedDataDescriptor(options) {
499
499
  queueMicrotask(pull);
500
500
  return output;
501
501
  }
502
+ /**
503
+ * Default threshold for small file optimization (5MB).
504
+ */
505
+ export const DEFAULT_PARSE_THRESHOLD_BYTES = 5 * 1024 * 1024;
502
506
  const endDirectorySignature = writeUint32LE(END_OF_CENTRAL_DIR_SIG);
503
- export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
507
+ export async function runParseLoop(opts, io, emitter, inflateFactory, state, inflateRawSync) {
508
+ const thresholdBytes = opts.thresholdBytes ?? DEFAULT_PARSE_THRESHOLD_BYTES;
504
509
  while (true) {
505
510
  const sigBytes = await io.pull(4);
506
511
  if (sigBytes.length === 0) {
@@ -514,7 +519,7 @@ export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
514
519
  continue;
515
520
  }
516
521
  if (signature === LOCAL_FILE_HEADER_SIG) {
517
- await readFileRecord(opts, io, emitter, inflateFactory, state);
522
+ await readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync);
518
523
  continue;
519
524
  }
520
525
  if (signature === CENTRAL_DIR_HEADER_SIG) {
@@ -543,7 +548,7 @@ export async function runParseLoop(opts, io, emitter, inflateFactory, state) {
543
548
  return;
544
549
  }
545
550
  }
546
- async function readFileRecord(opts, io, emitter, inflateFactory, state) {
551
+ async function readFileRecord(opts, io, emitter, inflateFactory, state, thresholdBytes, inflateRawSync) {
547
552
  const { vars: headerVars, fileNameBuffer, extraFieldData } = await readLocalFileHeader(async (l) => io.pull(l));
548
553
  const vars = headerVars;
549
554
  if (state.crxHeader) {
@@ -599,6 +604,35 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state) {
599
604
  extraFields: entry.extraFields
600
605
  });
601
606
  }
607
+ // Small file optimization: use sync decompression if:
608
+ // 1. Entry sizes are trusted (no data descriptor)
609
+ // 2. File size is known and below threshold
610
+ // 3. inflateRawSync is provided
611
+ // 4. File needs decompression (compressionMethod != 0)
612
+ // 5. Not autodraining
613
+ //
614
+ // We require BOTH compressedSize and uncompressedSize <= thresholdBytes.
615
+ // This prevents materializing large highly-compressible files in memory,
616
+ // which can cause massive peak RSS and negate streaming backpressure.
617
+ const sizesTrusted = !hasDataDescriptorFlag(vars.flags);
618
+ const compressedSize = vars.compressedSize || 0;
619
+ const uncompressedSize = vars.uncompressedSize || 0;
620
+ const useSmallFileOptimization = sizesTrusted &&
621
+ fileSizeKnown &&
622
+ inflateRawSync &&
623
+ vars.compressionMethod !== 0 &&
624
+ !autodraining &&
625
+ compressedSize <= thresholdBytes &&
626
+ uncompressedSize <= thresholdBytes;
627
+ if (useSmallFileOptimization) {
628
+ // Read compressed data directly and decompress synchronously
629
+ const compressedData = await io.pull(compressedSize);
630
+ const decompressedData = inflateRawSync(compressedData);
631
+ entry.end(decompressedData);
632
+ // Wait for entry stream write to complete (not for read/consume)
633
+ await finished(entry, { readable: false });
634
+ return;
635
+ }
602
636
  const inflater = vars.compressionMethod && !autodraining ? inflateFactory() : new PassThrough();
603
637
  if (fileSizeKnown) {
604
638
  await pipeline(io.stream(vars.compressedSize || 0), inflater, entry);
@@ -341,7 +341,12 @@ export function createParseClass(createInflateRawFn) {
341
341
  }
342
342
  };
343
343
  queueMicrotask(() => {
344
- this._parsingDone = runParseLoop(this._opts, io, emitter, () => createInflateRawFn(), this._driverState);
344
+ // NOTE: We intentionally do NOT pass inflateRawSync to runParseLoop in browser.
345
+ // Browser's native DecompressionStream is faster than our pure-JS fallback,
346
+ // so we always use the streaming path for decompression in browsers.
347
+ this._parsingDone = runParseLoop(this._opts, io, emitter, () => createInflateRawFn(), this._driverState
348
+ // No inflateRawSync - always use streaming DecompressionStream in browser
349
+ );
345
350
  this._parsingDone.catch((e) => {
346
351
  if (!this.__emittedError || this.__emittedError !== e) {
347
352
  this.__emittedError = e;
@@ -50,7 +50,7 @@ export function createParseClass(createInflateRawFn) {
50
50
  }
51
51
  };
52
52
  // Parse records as data arrives. Only emit `close` when parsing is complete.
53
- runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState).catch((e) => {
53
+ runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib.inflateRawSync(data)).catch((e) => {
54
54
  if (!this.__emittedError || this.__emittedError !== e) {
55
55
  this.emit("error", e);
56
56
  }
@@ -106,25 +106,63 @@ class FormCheckbox {
106
106
  let tl;
107
107
  let br;
108
108
  if (typeof range === "string") {
109
- // Parse cell reference like "B2" or "B2:D3"
110
- const decoded = colCache.decode(range);
111
- if ("top" in decoded) {
112
- // It's a range like "B2:D3"
113
- tl = {
114
- col: decoded.left - 1, // Convert to 0-based
115
- colOff: DEFAULT_COL_OFF,
116
- row: decoded.top - 1,
117
- rowOff: DEFAULT_ROW_OFF
118
- };
119
- br = {
120
- col: decoded.right - 1,
121
- colOff: DEFAULT_END_COL_OFF,
122
- row: decoded.bottom - 1,
123
- rowOff: DEFAULT_END_ROW_OFF
124
- };
109
+ // Parse cell reference like "B2" or range like "B2:D3"
110
+ const isRange = range.includes(":");
111
+ if (isRange) {
112
+ const decoded = colCache.decode(range);
113
+ if ("top" in decoded) {
114
+ // Treat 1-cell ranges (e.g., "J4:J4") as a single cell with default checkbox size.
115
+ if (decoded.left === decoded.right && decoded.top === decoded.bottom) {
116
+ const col = decoded.left - 1;
117
+ const row = decoded.top - 1;
118
+ tl = {
119
+ col,
120
+ colOff: DEFAULT_COL_OFF,
121
+ row,
122
+ rowOff: DEFAULT_ROW_OFF
123
+ };
124
+ br = {
125
+ col: col + 2,
126
+ colOff: DEFAULT_END_COL_OFF,
127
+ row: row + 1,
128
+ rowOff: DEFAULT_END_ROW_OFF
129
+ };
130
+ }
131
+ else {
132
+ // Regular range
133
+ tl = {
134
+ col: decoded.left - 1, // Convert to 0-based
135
+ colOff: DEFAULT_COL_OFF,
136
+ row: decoded.top - 1,
137
+ rowOff: DEFAULT_ROW_OFF
138
+ };
139
+ br = {
140
+ col: decoded.right - 1,
141
+ colOff: DEFAULT_END_COL_OFF,
142
+ row: decoded.bottom - 1,
143
+ rowOff: DEFAULT_END_ROW_OFF
144
+ };
145
+ }
146
+ }
147
+ else {
148
+ // Defensive fallback: if the cache returns an address, treat it like a single-cell ref.
149
+ tl = {
150
+ col: decoded.col - 1,
151
+ colOff: DEFAULT_COL_OFF,
152
+ row: decoded.row - 1,
153
+ rowOff: DEFAULT_ROW_OFF
154
+ };
155
+ br = {
156
+ col: decoded.col + 1,
157
+ colOff: DEFAULT_END_COL_OFF,
158
+ row: decoded.row,
159
+ rowOff: DEFAULT_END_ROW_OFF
160
+ };
161
+ }
125
162
  }
126
163
  else {
127
164
  // Single cell reference - create default size checkbox
165
+ const decoded = colCache.decodeAddress(range);
128
166
  tl = {
129
167
  col: decoded.col - 1,
130
168
  colOff: DEFAULT_COL_OFF,