@cj-tech-master/excelts 7.0.0 → 7.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,6 +11,46 @@ const byte_queue_1 = require("../shared/byte-queue.js");
11
11
  const bytes_1 = require("../shared/bytes.js");
12
12
  const text_1 = require("../shared/text.js");
13
13
  const pattern_scanner_1 = require("./pattern-scanner.js");
14
+ /**
15
+ * Returns true when `err` is the Node.js ERR_STREAM_PREMATURE_CLOSE error.
16
+ *
17
+ * This error is emitted by `finished()` / `pipeline()` when a stream is
18
+ * destroyed before it has properly ended (e.g. a consumer breaks out of a
19
+ * `for await` loop, or the entry PassThrough is destroyed by an external
20
+ * consumer). In the context of ZIP parsing, a premature close on an *entry*
21
+ * stream is not a fatal error — the parse loop only needs to advance the ZIP
22
+ * cursor past the entry's compressed data.
23
+ */
24
+ function isPrematureCloseError(err) {
25
+ if (!(err instanceof Error)) {
26
+ return false;
27
+ }
28
+ return err.code === "ERR_STREAM_PREMATURE_CLOSE" || err.message === "Premature close";
29
+ }
30
+ /**
31
+ * Wait for an entry's writable side to finish, tolerating premature close.
32
+ *
33
+ * The parse loop calls this after pumping all compressed data into an entry.
34
+ * It ensures the decompressed data has been flushed through the inflater →
35
+ * entry pipeline before advancing the ZIP cursor.
36
+ *
37
+ * If the consumer has already destroyed / autodraining the entry (e.g. early
38
+ * break, external destroy, Readable.from() wrapper), `finished()` rejects
39
+ * with ERR_STREAM_PREMATURE_CLOSE. This is not an error for the parse loop
40
+ * — the compressed data has been fully read from the ZIP cursor, so we
41
+ * can safely continue.
42
+ */
43
+ async function awaitEntryCompletion(entry) {
44
+ try {
45
+ await (0, _stream_1.finished)(entry, { readable: false });
46
+ }
47
+ catch (err) {
48
+ if (!isPrematureCloseError(err)) {
49
+ throw err;
50
+ }
51
+ // Entry was destroyed/autodraining — treat as normal completion.
52
+ }
53
+ }
14
54
  const parser_core_1 = require("./parser-core.js");
15
55
  exports.DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK = 256 * 1024;
16
56
  function autodrain(stream) {
@@ -423,6 +463,7 @@ function streamUntilValidatedDataDescriptor(options) {
423
463
  }
424
464
  while (available > 0) {
425
465
  // Try to find and validate a descriptor candidate.
466
+ let pendingCandidate = false;
426
467
  while (true) {
427
468
  const idx = scanner.find(source);
428
469
  if (idx === -1) {
@@ -480,15 +521,67 @@ function streamUntilValidatedDataDescriptor(options) {
480
521
  scanner.searchFrom = idx + 1;
481
522
  continue;
482
523
  }
483
- // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
524
+ // Not enough bytes to validate yet. Re-check this candidate once
525
+ // more bytes arrive. Mark as pending so we don't accidentally
526
+ // advance searchFrom past it via onNoMatch().
484
527
  scanner.searchFrom = idx;
528
+ pendingCandidate = true;
529
+ // If the source is finished (no more bytes will arrive), attempt a
530
+ // relaxed validation: accept the descriptor without checking the
531
+ // next-record signature. This handles the case where the descriptor
532
+ // is at the very end of the available data (e.g. the last entry in
533
+ // the ZIP, or the next-record header hasn't been fully buffered yet
534
+ // due to extreme input fragmentation).
535
+ if (source.isFinished() && idx + 16 <= available) {
536
+ const descriptorCompressedSize = source.peekUint32LE(idx + 8);
537
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
538
+ if (descriptorCompressedSize !== null &&
539
+ descriptorCompressedSize === expectedCompressedSize) {
540
+ // Descriptor compressed size matches — accept it.
541
+ if (idx > 0) {
542
+ if (source.peekChunks && source.discard) {
543
+ const parts = source.peekChunks(idx);
544
+ let written = 0;
545
+ for (const part of parts) {
546
+ output.write(part);
547
+ written += part.length;
548
+ }
549
+ if (written > 0) {
550
+ source.discard(written);
551
+ bytesEmitted += written;
552
+ scanner.onConsume(written);
553
+ }
554
+ }
555
+ else {
556
+ output.write(source.read(idx));
557
+ bytesEmitted += idx;
558
+ scanner.onConsume(idx);
559
+ }
560
+ }
561
+ done = true;
562
+ source.maybeReleaseWriteCallback?.();
563
+ cleanup();
564
+ output.end();
565
+ return;
566
+ }
567
+ }
485
568
  break;
486
569
  }
487
- // No validated match yet.
488
- scanner.onNoMatch(available);
570
+ // Only advance the scanner's search cursor when there is no pending
571
+ // candidate waiting for more bytes. Without this guard, onNoMatch()
572
+ // would move searchFrom past the candidate, causing it to be skipped
573
+ // and the entry data to be flushed — leading to FILE_ENDED.
574
+ if (!pendingCandidate) {
575
+ scanner.onNoMatch(available);
576
+ }
489
577
  // Flush most of the buffered data but keep a tail so a potential signature
490
578
  // split across chunks can still be detected/validated.
491
- const flushLen = Math.max(0, available - keepTailBytes);
579
+ // When a pending candidate exists, do NOT flush past it.
580
+ let maxFlush = available - keepTailBytes;
581
+ if (pendingCandidate) {
582
+ maxFlush = Math.min(maxFlush, scanner.searchFrom);
583
+ }
584
+ const flushLen = Math.max(0, maxFlush);
492
585
  if (flushLen > 0) {
493
586
  if (source.peekChunks && source.discard) {
494
587
  const parts = source.peekChunks(flushLen);
@@ -681,7 +774,7 @@ async function pumpKnownCompressedSizeToEntry(io, inflater, entry, compressedSiz
681
774
  inflater.end();
682
775
  }
683
776
  // Wait for all writes to complete (not for consumption).
684
- await (0, _stream_1.finished)(entry, { readable: false });
777
+ await awaitEntryCompletion(entry);
685
778
  }
686
779
  finally {
687
780
  inflater.removeListener("error", onError);
@@ -795,8 +888,8 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state, threshol
795
888
  const compressedData = await io.pull(compressedSize);
796
889
  const decompressedData = inflateRawSync(compressedData);
797
890
  entry.end(decompressedData);
798
- // Wait for entry stream write to complete (not for read/consume)
799
- await (0, _stream_1.finished)(entry, { readable: false });
891
+ // Wait for entry stream write to complete (not for read/consume).
892
+ await awaitEntryCompletion(entry);
800
893
  return;
801
894
  }
802
895
  const inflater = needsInflate
@@ -815,7 +908,30 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state, threshol
815
908
  }
816
909
  return;
817
910
  }
818
- await (0, _stream_1.pipeline)(io.streamUntilDataDescriptor(), inflater, entry);
911
+ // pipeline() destroys all streams if any stream errors or closes early.
912
+ // If the entry was destroyed by the consumer, pipeline rejects with
913
+ // ERR_STREAM_PREMATURE_CLOSE. This typically happens when the entry's
914
+ // writable side is force-destroyed and the entire parse operation is
915
+ // being torn down (abort/error).
916
+ try {
917
+ await (0, _stream_1.pipeline)(io.streamUntilDataDescriptor(), inflater, entry);
918
+ }
919
+ catch (pipelineErr) {
920
+ if (!isPrematureCloseError(pipelineErr)) {
921
+ throw pipelineErr;
922
+ }
923
+ // Entry was destroyed — attempt to read the data descriptor; if it
924
+ // fails (cursor misaligned), swallow the error since the entry was
925
+ // abandoned and the operation is ending.
926
+ try {
927
+ const dd = await (0, parser_core_1.readDataDescriptor)(async (l) => io.pull(l));
928
+ entry.size = dd.uncompressedSize ?? 0;
929
+ }
930
+ catch {
931
+ // Cursor misaligned — not recoverable but not worth surfacing.
932
+ }
933
+ return;
934
+ }
819
935
  const dd = await (0, parser_core_1.readDataDescriptor)(async (l) => io.pull(l));
820
936
  entry.size = dd.uncompressedSize ?? 0;
821
937
  }
@@ -493,7 +493,26 @@ function createParseClass(createInflateRawFn) {
493
493
  this.finished = false;
494
494
  this._driverState = {};
495
495
  this._parsingDone = Promise.resolve();
496
+ // ---------------------------------------------------------------
497
+ // Parser completion — explicit deferred, independent of stream
498
+ // lifecycle events. Mirrors the Node.js Parse implementation.
499
+ // ---------------------------------------------------------------
500
+ this._parserDoneFlag = false;
501
+ this._parserError = null;
502
+ this._parserDeferred = null;
503
+ this._parserDonePromise = null;
504
+ // ---------------------------------------------------------------
505
+ // Entry queue — custom [Symbol.asyncIterator] reads from here.
506
+ // ---------------------------------------------------------------
507
+ this._entryQueue = [];
508
+ this._entryWaiter = null;
509
+ this._entriesDone = false;
496
510
  this._opts = opts;
511
+ // Route error events to the parser deferred.
512
+ this.on("error", (err) => {
513
+ this._rejectParserDeferred(err);
514
+ this._closeEntryQueue(err);
515
+ });
497
516
  // Default values are intentionally conservative to avoid memory spikes
498
517
  // when parsing large archives under slow consumers.
499
518
  const hi = Math.max(64 * 1024, opts.inputHighWaterMarkBytes ?? 2 * 1024 * 1024);
@@ -515,10 +534,13 @@ function createParseClass(createInflateRawFn) {
515
534
  },
516
535
  pushEntry: (entry) => {
517
536
  this.push(entry);
537
+ this._enqueueEntry(entry);
518
538
  },
519
539
  // Browser version historically only pushed entries when forceStream=true.
520
540
  // Keep this behavior to avoid changing stream piping semantics.
521
541
  pushEntryIfPiped: (_entry) => {
542
+ // Always feed the entry queue regardless of pipe state.
543
+ this._enqueueEntry(_entry);
522
544
  return;
523
545
  },
524
546
  emitCrxHeader: (header) => {
@@ -527,12 +549,6 @@ function createParseClass(createInflateRawFn) {
527
549
  },
528
550
  emitError: (err) => {
529
551
  this.__emittedError = err;
530
- // Ensure upstream writers don't hang waiting for a deferred write callback.
531
- if (this._writeCb) {
532
- const cb = this._writeCb;
533
- this._writeCb = undefined;
534
- cb(err);
535
- }
536
552
  this.emit("error", err);
537
553
  },
538
554
  emitClose: () => {
@@ -561,11 +577,22 @@ function createParseClass(createInflateRawFn) {
561
577
  this._parsingDone = (0, stream_base_1.runParseLoop)(this._opts, io, emitter, inflateFactory, this._driverState
562
578
  // No inflateRawSync - always use streaming DecompressionStream in browser
563
579
  );
564
- this._parsingDone.catch((e) => {
580
+ this._parsingDone.then(() => {
581
+ if (this.__emittedError) {
582
+ this._rejectParserDeferred(this.__emittedError);
583
+ this._closeEntryQueue(this.__emittedError);
584
+ }
585
+ else {
586
+ this._resolveParserDeferred();
587
+ this._closeEntryQueue();
588
+ }
589
+ }, (e) => {
565
590
  if (!this.__emittedError || this.__emittedError !== e) {
566
591
  this.__emittedError = e;
567
592
  this.emit("error", e);
568
593
  }
594
+ this._rejectParserDeferred(e);
595
+ this._closeEntryQueue(e);
569
596
  this.emit("close");
570
597
  });
571
598
  });
@@ -869,11 +896,99 @@ function createParseClass(createInflateRawFn) {
869
896
  });
870
897
  }
871
898
  promise() {
872
- return new Promise((resolve, reject) => {
873
- this.on("finish", resolve);
874
- this.on("end", resolve);
875
- this.on("error", reject);
899
+ if (this._parserDoneFlag) {
900
+ return this._parserError ? Promise.reject(this._parserError) : Promise.resolve();
901
+ }
902
+ if (this._parserDonePromise) {
903
+ return this._parserDonePromise;
904
+ }
905
+ this._parserDonePromise = new Promise((resolve, reject) => {
906
+ this._parserDeferred = { resolve, reject };
876
907
  });
908
+ return this._parserDonePromise;
909
+ }
910
+ // ---------------------------------------------------------------
911
+ // Parser completion deferred
912
+ // ---------------------------------------------------------------
913
+ _resolveParserDeferred() {
914
+ if (this._parserDoneFlag) {
915
+ return;
916
+ }
917
+ this._parserDoneFlag = true;
918
+ if (this._parserDeferred) {
919
+ const { resolve } = this._parserDeferred;
920
+ this._parserDeferred = null;
921
+ resolve();
922
+ }
923
+ }
924
+ _rejectParserDeferred(err) {
925
+ if (this._parserDoneFlag) {
926
+ return;
927
+ }
928
+ this._parserDoneFlag = true;
929
+ this._parserError = err;
930
+ if (this._parserDeferred) {
931
+ const { reject } = this._parserDeferred;
932
+ this._parserDeferred = null;
933
+ reject(err);
934
+ }
935
+ }
936
+ // ---------------------------------------------------------------
937
+ // Entry queue management
938
+ // ---------------------------------------------------------------
939
+ _enqueueEntry(entry) {
940
+ if (this._entryWaiter) {
941
+ const { resolve } = this._entryWaiter;
942
+ this._entryWaiter = null;
943
+ resolve({ value: entry, done: false });
944
+ }
945
+ else {
946
+ this._entryQueue.push(entry);
947
+ }
948
+ }
949
+ _closeEntryQueue(err) {
950
+ this._entriesDone = true;
951
+ if (this._entryWaiter) {
952
+ const waiter = this._entryWaiter;
953
+ this._entryWaiter = null;
954
+ if (err) {
955
+ waiter.reject(err);
956
+ }
957
+ else {
958
+ waiter.resolve({ value: undefined, done: true });
959
+ }
960
+ }
961
+ }
962
+ // ---------------------------------------------------------------
963
+ // Custom async iterator
964
+ // ---------------------------------------------------------------
965
+ [Symbol.asyncIterator]() {
966
+ const iterator = {
967
+ next: () => {
968
+ if (this._entryQueue.length > 0) {
969
+ return Promise.resolve({ value: this._entryQueue.shift(), done: false });
970
+ }
971
+ if (this._entriesDone) {
972
+ if (this._parserError) {
973
+ return Promise.reject(this._parserError);
974
+ }
975
+ return Promise.resolve({ value: undefined, done: true });
976
+ }
977
+ return new Promise((resolve, reject) => {
978
+ this._entryWaiter = { resolve, reject };
979
+ });
980
+ },
981
+ return: () => {
982
+ this._entriesDone = true;
983
+ this._entryQueue.length = 0;
984
+ this._entryWaiter = null;
985
+ return Promise.resolve({ value: undefined, done: true });
986
+ },
987
+ [Symbol.asyncIterator]() {
988
+ return iterator;
989
+ }
990
+ };
991
+ return iterator;
877
992
  }
878
993
  };
879
994
  }
@@ -21,29 +21,38 @@ function createParseClass(createInflateRawFn) {
21
21
  constructor(opts = {}) {
22
22
  super(opts);
23
23
  this._driverState = {};
24
- this._done = false;
25
- this._doneError = null;
26
- this._donePromise = null;
27
- this._doneDeferred = null;
24
+ // ---------------------------------------------------------------
25
+ // Parser completion — explicit deferred, independent of stream
26
+ // lifecycle events (close / end). This avoids races between
27
+ // push(null) and close that cause ERR_STREAM_PREMATURE_CLOSE in
28
+ // Node.js's default Readable async iterator.
29
+ // ---------------------------------------------------------------
30
+ this._parserDone = false;
31
+ this._parserError = null;
32
+ this._parserDeferred = null;
33
+ this._parserDonePromise = null;
34
+ // ---------------------------------------------------------------
35
+ // Entry queue — custom [Symbol.asyncIterator] reads from here
36
+ // instead of relying on Readable's default objectMode iterator
37
+ // (which uses finished() internally and races with close).
38
+ // ---------------------------------------------------------------
39
+ this._entryQueue = [];
40
+ this._entryWaiter = null;
41
+ /** True once the parser has finished producing entries. */
42
+ this._entriesDone = false;
28
43
  this._opts = opts;
29
- // Latch completion early to avoid missing terminal events, but do NOT
30
- // create a Promise eagerly (it can reject unhandled in tests/consumers
31
- // that never call `promise()`).
32
- const onDone = () => this._latchDone();
33
- const onError = (err) => this._latchError(err);
34
- this.on("close", onDone);
35
- this.on("end", onDone);
36
- this.on("error", onError);
44
+ // Always listen for error events to prevent Node.js from treating
45
+ // them as uncaught exceptions. Route them to the parser deferred.
46
+ this.on("error", (err) => {
47
+ this._rejectParser(err);
48
+ this._closeEntryQueue(err);
49
+ });
37
50
  const io = {
38
51
  pull: async (length) => this.pull(length),
39
52
  pullUntil: async (pattern, includeEof) => this.pull(pattern, includeEof),
40
53
  stream: (length) => this.stream(length),
41
54
  streamUntilDataDescriptor: () => this._streamUntilValidatedDataDescriptor(),
42
55
  setDone: () => {
43
- // If the parser reaches EOF without consuming all buffered bytes,
44
- // there may still be an in-flight writable callback waiting on
45
- // `_maybeReleaseWriteCallback()`. Release it to avoid deadlocks in
46
- // callers that await `write(..., cb)`.
47
56
  this._maybeReleaseWriteCallback();
48
57
  this.end();
49
58
  this.push(null);
@@ -54,36 +63,119 @@ function createParseClass(createInflateRawFn) {
54
63
  this.emit("entry", entry);
55
64
  },
56
65
  pushEntry: (entry) => {
66
+ // Feed the legacy Readable objectMode side (for pipe / data consumers).
57
67
  this.push(entry);
68
+ // Also feed the custom entry queue (for our async iterator).
69
+ this._enqueueEntry(entry);
58
70
  },
59
71
  pushEntryIfPiped: (entry) => {
60
72
  const state = this._readableState;
61
73
  if (state.pipesCount || (state.pipes && state.pipes.length)) {
62
74
  this.push(entry);
63
75
  }
76
+ // Always feed the entry queue regardless of pipe state.
77
+ this._enqueueEntry(entry);
64
78
  },
65
79
  emitCrxHeader: header => {
66
80
  this.crxHeader = header;
67
81
  this.emit("crx-header", header);
68
82
  },
69
83
  emitError: err => {
84
+ this.__emittedError = err;
70
85
  this.emit("error", err);
71
86
  },
72
87
  emitClose: () => {
73
88
  this.emit("close");
74
89
  }
75
90
  };
76
- // Parse records as data arrives. Only emit `close` when parsing is complete.
77
- (0, stream_base_1.runParseLoop)(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib_1.default.inflateRawSync(data)).catch((e) => {
91
+ // Parse records as data arrives.
92
+ (0, stream_base_1.runParseLoop)(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib_1.default.inflateRawSync(data)).then(() => {
93
+ // If an error was emitted during parsing (e.g. invalid signature),
94
+ // the parse loop returns normally but we should reject.
95
+ if (this.__emittedError) {
96
+ this._rejectParser(this.__emittedError);
97
+ this._closeEntryQueue(this.__emittedError);
98
+ }
99
+ else {
100
+ this._resolveParser();
101
+ this._closeEntryQueue();
102
+ }
103
+ }, (e) => {
78
104
  if (!this.__emittedError || this.__emittedError !== e) {
79
105
  this.emit("error", e);
80
106
  }
81
- // Best-effort: ensure upstream writers don't hang waiting for a
82
- // deferred write callback if parsing terminates early.
83
107
  this._maybeReleaseWriteCallback();
108
+ this._rejectParser(e);
109
+ this._closeEntryQueue(e);
84
110
  this.emit("close");
85
111
  });
86
112
  }
113
+ // ---------------------------------------------------------------
114
+ // Entry queue management
115
+ // ---------------------------------------------------------------
116
+ _enqueueEntry(entry) {
117
+ if (this._entryWaiter) {
118
+ // A consumer is already waiting — deliver immediately.
119
+ const { resolve } = this._entryWaiter;
120
+ this._entryWaiter = null;
121
+ resolve({ value: entry, done: false });
122
+ }
123
+ else {
124
+ this._entryQueue.push(entry);
125
+ }
126
+ }
127
+ _closeEntryQueue(err) {
128
+ this._entriesDone = true;
129
+ if (this._entryWaiter) {
130
+ const waiter = this._entryWaiter;
131
+ this._entryWaiter = null;
132
+ if (err) {
133
+ waiter.reject(err);
134
+ }
135
+ else {
136
+ waiter.resolve({ value: undefined, done: true });
137
+ }
138
+ }
139
+ }
140
+ // ---------------------------------------------------------------
141
+ // Custom async iterator — bypasses Node Readable's default
142
+ // iterator which uses finished() and races with close.
143
+ // ---------------------------------------------------------------
144
+ // Override the default Readable async iterator with our custom entry-queue
145
+ // based iterator. This avoids Node.js's Readable default iterator which uses
146
+ // finished() internally and races with the close event.
147
+ //
148
+ // We cast through `any` because ES2024+ AsyncIterator requires
149
+ // [Symbol.asyncDispose] which AsyncIterableIterator doesn't include,
150
+ // and we don't need disposal semantics here.
151
+ [Symbol.asyncIterator]() {
152
+ const iterator = {
153
+ next: () => {
154
+ if (this._entryQueue.length > 0) {
155
+ return Promise.resolve({ value: this._entryQueue.shift(), done: false });
156
+ }
157
+ if (this._entriesDone) {
158
+ if (this._parserError) {
159
+ return Promise.reject(this._parserError);
160
+ }
161
+ return Promise.resolve({ value: undefined, done: true });
162
+ }
163
+ return new Promise((resolve, reject) => {
164
+ this._entryWaiter = { resolve, reject };
165
+ });
166
+ },
167
+ return: () => {
168
+ this._entriesDone = true;
169
+ this._entryQueue.length = 0;
170
+ this._entryWaiter = null;
171
+ return Promise.resolve({ value: undefined, done: true });
172
+ },
173
+ [Symbol.asyncIterator]() {
174
+ return iterator;
175
+ }
176
+ };
177
+ return iterator;
178
+ }
87
179
  /**
88
180
  * Stream file data until we reach a DATA_DESCRIPTOR record boundary.
89
181
  */
@@ -106,52 +198,52 @@ function createParseClass(createInflateRawFn) {
106
198
  dataDescriptorSignature
107
199
  });
108
200
  }
109
- promise() {
110
- if (this._done) {
111
- return this._doneError ? Promise.reject(this._doneError) : Promise.resolve();
112
- }
113
- if (this._donePromise) {
114
- return this._donePromise;
115
- }
116
- this._donePromise = new Promise((resolve, reject) => {
117
- this._doneDeferred = { resolve, reject };
118
- });
119
- return this._donePromise;
120
- }
121
- _latchDone() {
122
- if (this._done) {
123
- return;
124
- }
125
- this._done = true;
126
- const deferred = this._doneDeferred;
127
- this._doneDeferred = null;
128
- if (!deferred) {
201
+ // ---------------------------------------------------------------
202
+ // Parser completion deferred
203
+ // ---------------------------------------------------------------
204
+ _resolveParser() {
205
+ if (this._parserDone) {
129
206
  return;
130
207
  }
131
- try {
132
- deferred.resolve();
133
- }
134
- catch {
135
- // ignore
208
+ this._parserDone = true;
209
+ if (this._parserDeferred) {
210
+ const { resolve } = this._parserDeferred;
211
+ this._parserDeferred = null;
212
+ resolve();
136
213
  }
137
214
  }
138
- _latchError(err) {
139
- if (this._done) {
215
+ _rejectParser(err) {
216
+ if (this._parserDone) {
140
217
  return;
141
218
  }
142
- this._done = true;
143
- this._doneError = err;
144
- const deferred = this._doneDeferred;
145
- this._doneDeferred = null;
146
- if (!deferred) {
147
- return;
219
+ this._parserDone = true;
220
+ this._parserError = err;
221
+ if (this._parserDeferred) {
222
+ const { reject } = this._parserDeferred;
223
+ this._parserDeferred = null;
224
+ reject(err);
148
225
  }
149
- try {
150
- deferred.reject(err);
226
+ }
227
+ /**
228
+ * Returns a promise that resolves when the parser has finished
229
+ * processing all ZIP records, or rejects on parse error.
230
+ *
231
+ * This is driven by an internal deferred that is resolved/rejected
232
+ * directly by the parse loop — it does NOT depend on stream
233
+ * lifecycle events (close / end), avoiding the
234
+ * ERR_STREAM_PREMATURE_CLOSE race.
235
+ */
236
+ promise() {
237
+ if (this._parserDone) {
238
+ return this._parserError ? Promise.reject(this._parserError) : Promise.resolve();
151
239
  }
152
- catch {
153
- // ignore
240
+ if (this._parserDonePromise) {
241
+ return this._parserDonePromise;
154
242
  }
243
+ this._parserDonePromise = new Promise((resolve, reject) => {
244
+ this._parserDeferred = { resolve, reject };
245
+ });
246
+ return this._parserDonePromise;
155
247
  }
156
248
  };
157
249
  }