@cj-tech-master/excelts 7.0.0 → 7.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,46 @@ import { ByteQueue } from "../shared/byte-queue.js";
4
4
  import { EMPTY_UINT8ARRAY } from "../shared/bytes.js";
5
5
  import { decodeZipPath, resolveZipStringCodec } from "../shared/text.js";
6
6
  import { PatternScanner } from "./pattern-scanner.js";
7
+ /**
8
+ * Returns true when `err` is the Node.js ERR_STREAM_PREMATURE_CLOSE error.
9
+ *
10
+ * This error is emitted by `finished()` / `pipeline()` when a stream is
11
+ * destroyed before it has properly ended (e.g. a consumer breaks out of a
12
+ * `for await` loop, or the entry PassThrough is destroyed by an external
13
+ * consumer). In the context of ZIP parsing, a premature close on an *entry*
14
+ * stream is not a fatal error — the parse loop only needs to advance the ZIP
15
+ * cursor past the entry's compressed data.
16
+ */
17
+ function isPrematureCloseError(err) {
18
+ if (!(err instanceof Error)) {
19
+ return false;
20
+ }
21
+ return err.code === "ERR_STREAM_PREMATURE_CLOSE" || err.message === "Premature close";
22
+ }
23
+ /**
24
+ * Wait for an entry's writable side to finish, tolerating premature close.
25
+ *
26
+ * The parse loop calls this after pumping all compressed data into an entry.
27
+ * It ensures the decompressed data has been flushed through the inflater →
28
+ * entry pipeline before advancing the ZIP cursor.
29
+ *
30
+ * If the consumer has already destroyed / autodraining the entry (e.g. early
31
+ * break, external destroy, Readable.from() wrapper), `finished()` rejects
32
+ * with ERR_STREAM_PREMATURE_CLOSE. This is not an error for the parse loop
33
+ * — the compressed data has been fully read from the ZIP cursor, so we
34
+ * can safely continue.
35
+ */
36
+ async function awaitEntryCompletion(entry) {
37
+ try {
38
+ await finished(entry, { readable: false });
39
+ }
40
+ catch (err) {
41
+ if (!isPrematureCloseError(err)) {
42
+ throw err;
43
+ }
44
+ // Entry was destroyed/autodraining — treat as normal completion.
45
+ }
46
+ }
7
47
  import { DEFAULT_PARSE_THRESHOLD_BYTES, buildZipEntryProps, getZipEntryType, hasDataDescriptorFlag, isFileSizeKnown, parseExtraField, readDataDescriptor, readLocalFileHeader, resolveZipEntryLastModifiedDateTime, runParseLoopCore, isValidZipRecordSignature } from "./parser-core.js";
8
48
  export const DEFAULT_UNZIP_STREAM_HIGH_WATER_MARK = 256 * 1024;
9
49
  export function autodrain(stream) {
@@ -415,6 +455,7 @@ export function streamUntilValidatedDataDescriptor(options) {
415
455
  }
416
456
  while (available > 0) {
417
457
  // Try to find and validate a descriptor candidate.
458
+ let pendingCandidate = false;
418
459
  while (true) {
419
460
  const idx = scanner.find(source);
420
461
  if (idx === -1) {
@@ -472,15 +513,67 @@ export function streamUntilValidatedDataDescriptor(options) {
472
513
  scanner.searchFrom = idx + 1;
473
514
  continue;
474
515
  }
475
- // Not enough bytes to validate yet. Re-check this candidate once more bytes arrive.
516
+ // Not enough bytes to validate yet. Re-check this candidate once
517
+ // more bytes arrive. Mark as pending so we don't accidentally
518
+ // advance searchFrom past it via onNoMatch().
476
519
  scanner.searchFrom = idx;
520
+ pendingCandidate = true;
521
+ // If the source is finished (no more bytes will arrive), attempt a
522
+ // relaxed validation: accept the descriptor without checking the
523
+ // next-record signature. This handles the case where the descriptor
524
+ // is at the very end of the available data (e.g. the last entry in
525
+ // the ZIP, or the next-record header hasn't been fully buffered yet
526
+ // due to extreme input fragmentation).
527
+ if (source.isFinished() && idx + 16 <= available) {
528
+ const descriptorCompressedSize = source.peekUint32LE(idx + 8);
529
+ const expectedCompressedSize = (bytesEmitted + idx) >>> 0;
530
+ if (descriptorCompressedSize !== null &&
531
+ descriptorCompressedSize === expectedCompressedSize) {
532
+ // Descriptor compressed size matches — accept it.
533
+ if (idx > 0) {
534
+ if (source.peekChunks && source.discard) {
535
+ const parts = source.peekChunks(idx);
536
+ let written = 0;
537
+ for (const part of parts) {
538
+ output.write(part);
539
+ written += part.length;
540
+ }
541
+ if (written > 0) {
542
+ source.discard(written);
543
+ bytesEmitted += written;
544
+ scanner.onConsume(written);
545
+ }
546
+ }
547
+ else {
548
+ output.write(source.read(idx));
549
+ bytesEmitted += idx;
550
+ scanner.onConsume(idx);
551
+ }
552
+ }
553
+ done = true;
554
+ source.maybeReleaseWriteCallback?.();
555
+ cleanup();
556
+ output.end();
557
+ return;
558
+ }
559
+ }
477
560
  break;
478
561
  }
479
- // No validated match yet.
480
- scanner.onNoMatch(available);
562
+ // Only advance the scanner's search cursor when there is no pending
563
+ // candidate waiting for more bytes. Without this guard, onNoMatch()
564
+ // would move searchFrom past the candidate, causing it to be skipped
565
+ // and the entry data to be flushed — leading to FILE_ENDED.
566
+ if (!pendingCandidate) {
567
+ scanner.onNoMatch(available);
568
+ }
481
569
  // Flush most of the buffered data but keep a tail so a potential signature
482
570
  // split across chunks can still be detected/validated.
483
- const flushLen = Math.max(0, available - keepTailBytes);
571
+ // When a pending candidate exists, do NOT flush past it.
572
+ let maxFlush = available - keepTailBytes;
573
+ if (pendingCandidate) {
574
+ maxFlush = Math.min(maxFlush, scanner.searchFrom);
575
+ }
576
+ const flushLen = Math.max(0, maxFlush);
484
577
  if (flushLen > 0) {
485
578
  if (source.peekChunks && source.discard) {
486
579
  const parts = source.peekChunks(flushLen);
@@ -673,7 +766,7 @@ async function pumpKnownCompressedSizeToEntry(io, inflater, entry, compressedSiz
673
766
  inflater.end();
674
767
  }
675
768
  // Wait for all writes to complete (not for consumption).
676
- await finished(entry, { readable: false });
769
+ await awaitEntryCompletion(entry);
677
770
  }
678
771
  finally {
679
772
  inflater.removeListener("error", onError);
@@ -787,8 +880,8 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state, threshol
787
880
  const compressedData = await io.pull(compressedSize);
788
881
  const decompressedData = inflateRawSync(compressedData);
789
882
  entry.end(decompressedData);
790
- // Wait for entry stream write to complete (not for read/consume)
791
- await finished(entry, { readable: false });
883
+ // Wait for entry stream write to complete (not for read/consume).
884
+ await awaitEntryCompletion(entry);
792
885
  return;
793
886
  }
794
887
  const inflater = needsInflate
@@ -807,7 +900,30 @@ async function readFileRecord(opts, io, emitter, inflateFactory, state, threshol
807
900
  }
808
901
  return;
809
902
  }
810
- await pipeline(io.streamUntilDataDescriptor(), inflater, entry);
903
+ // pipeline() destroys all streams if any stream errors or closes early.
904
+ // If the entry was destroyed by the consumer, pipeline rejects with
905
+ // ERR_STREAM_PREMATURE_CLOSE. This typically happens when the entry's
906
+ // writable side is force-destroyed and the entire parse operation is
907
+ // being torn down (abort/error).
908
+ try {
909
+ await pipeline(io.streamUntilDataDescriptor(), inflater, entry);
910
+ }
911
+ catch (pipelineErr) {
912
+ if (!isPrematureCloseError(pipelineErr)) {
913
+ throw pipelineErr;
914
+ }
915
+ // Entry was destroyed — attempt to read the data descriptor; if it
916
+ // fails (cursor misaligned), swallow the error since the entry was
917
+ // abandoned and the operation is ending.
918
+ try {
919
+ const dd = await readDataDescriptor(async (l) => io.pull(l));
920
+ entry.size = dd.uncompressedSize ?? 0;
921
+ }
922
+ catch {
923
+ // Cursor misaligned — not recoverable but not worth surfacing.
924
+ }
925
+ return;
926
+ }
811
927
  const dd = await readDataDescriptor(async (l) => io.pull(l));
812
928
  entry.size = dd.uncompressedSize ?? 0;
813
929
  }
@@ -488,7 +488,26 @@ export function createParseClass(createInflateRawFn) {
488
488
  this.finished = false;
489
489
  this._driverState = {};
490
490
  this._parsingDone = Promise.resolve();
491
+ // ---------------------------------------------------------------
492
+ // Parser completion — explicit deferred, independent of stream
493
+ // lifecycle events. Mirrors the Node.js Parse implementation.
494
+ // ---------------------------------------------------------------
495
+ this._parserDoneFlag = false;
496
+ this._parserError = null;
497
+ this._parserDeferred = null;
498
+ this._parserDonePromise = null;
499
+ // ---------------------------------------------------------------
500
+ // Entry queue — custom [Symbol.asyncIterator] reads from here.
501
+ // ---------------------------------------------------------------
502
+ this._entryQueue = [];
503
+ this._entryWaiter = null;
504
+ this._entriesDone = false;
491
505
  this._opts = opts;
506
+ // Route error events to the parser deferred.
507
+ this.on("error", (err) => {
508
+ this._rejectParserDeferred(err);
509
+ this._closeEntryQueue(err);
510
+ });
492
511
  // Default values are intentionally conservative to avoid memory spikes
493
512
  // when parsing large archives under slow consumers.
494
513
  const hi = Math.max(64 * 1024, opts.inputHighWaterMarkBytes ?? 2 * 1024 * 1024);
@@ -510,10 +529,13 @@ export function createParseClass(createInflateRawFn) {
510
529
  },
511
530
  pushEntry: (entry) => {
512
531
  this.push(entry);
532
+ this._enqueueEntry(entry);
513
533
  },
514
534
  // Browser version historically only pushed entries when forceStream=true.
515
535
  // Keep this behavior to avoid changing stream piping semantics.
516
536
  pushEntryIfPiped: (_entry) => {
537
+ // Always feed the entry queue regardless of pipe state.
538
+ this._enqueueEntry(_entry);
517
539
  return;
518
540
  },
519
541
  emitCrxHeader: (header) => {
@@ -522,12 +544,6 @@ export function createParseClass(createInflateRawFn) {
522
544
  },
523
545
  emitError: (err) => {
524
546
  this.__emittedError = err;
525
- // Ensure upstream writers don't hang waiting for a deferred write callback.
526
- if (this._writeCb) {
527
- const cb = this._writeCb;
528
- this._writeCb = undefined;
529
- cb(err);
530
- }
531
547
  this.emit("error", err);
532
548
  },
533
549
  emitClose: () => {
@@ -556,11 +572,22 @@ export function createParseClass(createInflateRawFn) {
556
572
  this._parsingDone = runParseLoop(this._opts, io, emitter, inflateFactory, this._driverState
557
573
  // No inflateRawSync - always use streaming DecompressionStream in browser
558
574
  );
559
- this._parsingDone.catch((e) => {
575
+ this._parsingDone.then(() => {
576
+ if (this.__emittedError) {
577
+ this._rejectParserDeferred(this.__emittedError);
578
+ this._closeEntryQueue(this.__emittedError);
579
+ }
580
+ else {
581
+ this._resolveParserDeferred();
582
+ this._closeEntryQueue();
583
+ }
584
+ }, (e) => {
560
585
  if (!this.__emittedError || this.__emittedError !== e) {
561
586
  this.__emittedError = e;
562
587
  this.emit("error", e);
563
588
  }
589
+ this._rejectParserDeferred(e);
590
+ this._closeEntryQueue(e);
564
591
  this.emit("close");
565
592
  });
566
593
  });
@@ -864,11 +891,99 @@ export function createParseClass(createInflateRawFn) {
864
891
  });
865
892
  }
866
893
  promise() {
867
- return new Promise((resolve, reject) => {
868
- this.on("finish", resolve);
869
- this.on("end", resolve);
870
- this.on("error", reject);
894
+ if (this._parserDoneFlag) {
895
+ return this._parserError ? Promise.reject(this._parserError) : Promise.resolve();
896
+ }
897
+ if (this._parserDonePromise) {
898
+ return this._parserDonePromise;
899
+ }
900
+ this._parserDonePromise = new Promise((resolve, reject) => {
901
+ this._parserDeferred = { resolve, reject };
871
902
  });
903
+ return this._parserDonePromise;
904
+ }
905
+ // ---------------------------------------------------------------
906
+ // Parser completion deferred
907
+ // ---------------------------------------------------------------
908
+ _resolveParserDeferred() {
909
+ if (this._parserDoneFlag) {
910
+ return;
911
+ }
912
+ this._parserDoneFlag = true;
913
+ if (this._parserDeferred) {
914
+ const { resolve } = this._parserDeferred;
915
+ this._parserDeferred = null;
916
+ resolve();
917
+ }
918
+ }
919
+ _rejectParserDeferred(err) {
920
+ if (this._parserDoneFlag) {
921
+ return;
922
+ }
923
+ this._parserDoneFlag = true;
924
+ this._parserError = err;
925
+ if (this._parserDeferred) {
926
+ const { reject } = this._parserDeferred;
927
+ this._parserDeferred = null;
928
+ reject(err);
929
+ }
930
+ }
931
+ // ---------------------------------------------------------------
932
+ // Entry queue management
933
+ // ---------------------------------------------------------------
934
+ _enqueueEntry(entry) {
935
+ if (this._entryWaiter) {
936
+ const { resolve } = this._entryWaiter;
937
+ this._entryWaiter = null;
938
+ resolve({ value: entry, done: false });
939
+ }
940
+ else {
941
+ this._entryQueue.push(entry);
942
+ }
943
+ }
944
+ _closeEntryQueue(err) {
945
+ this._entriesDone = true;
946
+ if (this._entryWaiter) {
947
+ const waiter = this._entryWaiter;
948
+ this._entryWaiter = null;
949
+ if (err) {
950
+ waiter.reject(err);
951
+ }
952
+ else {
953
+ waiter.resolve({ value: undefined, done: true });
954
+ }
955
+ }
956
+ }
957
+ // ---------------------------------------------------------------
958
+ // Custom async iterator
959
+ // ---------------------------------------------------------------
960
+ [Symbol.asyncIterator]() {
961
+ const iterator = {
962
+ next: () => {
963
+ if (this._entryQueue.length > 0) {
964
+ return Promise.resolve({ value: this._entryQueue.shift(), done: false });
965
+ }
966
+ if (this._entriesDone) {
967
+ if (this._parserError) {
968
+ return Promise.reject(this._parserError);
969
+ }
970
+ return Promise.resolve({ value: undefined, done: true });
971
+ }
972
+ return new Promise((resolve, reject) => {
973
+ this._entryWaiter = { resolve, reject };
974
+ });
975
+ },
976
+ return: () => {
977
+ this._entriesDone = true;
978
+ this._entryQueue.length = 0;
979
+ this._entryWaiter = null;
980
+ return Promise.resolve({ value: undefined, done: true });
981
+ },
982
+ [Symbol.asyncIterator]() {
983
+ return iterator;
984
+ }
985
+ };
986
+ return iterator;
872
987
  }
873
988
  };
874
989
  }
@@ -13,29 +13,38 @@ export function createParseClass(createInflateRawFn) {
13
13
  constructor(opts = {}) {
14
14
  super(opts);
15
15
  this._driverState = {};
16
- this._done = false;
17
- this._doneError = null;
18
- this._donePromise = null;
19
- this._doneDeferred = null;
16
+ // ---------------------------------------------------------------
17
+ // Parser completion — explicit deferred, independent of stream
18
+ // lifecycle events (close / end). This avoids races between
19
+ // push(null) and close that cause ERR_STREAM_PREMATURE_CLOSE in
20
+ // Node.js's default Readable async iterator.
21
+ // ---------------------------------------------------------------
22
+ this._parserDone = false;
23
+ this._parserError = null;
24
+ this._parserDeferred = null;
25
+ this._parserDonePromise = null;
26
+ // ---------------------------------------------------------------
27
+ // Entry queue — custom [Symbol.asyncIterator] reads from here
28
+ // instead of relying on Readable's default objectMode iterator
29
+ // (which uses finished() internally and races with close).
30
+ // ---------------------------------------------------------------
31
+ this._entryQueue = [];
32
+ this._entryWaiter = null;
33
+ /** True once the parser has finished producing entries. */
34
+ this._entriesDone = false;
20
35
  this._opts = opts;
21
- // Latch completion early to avoid missing terminal events, but do NOT
22
- // create a Promise eagerly (it can reject unhandled in tests/consumers
23
- // that never call `promise()`).
24
- const onDone = () => this._latchDone();
25
- const onError = (err) => this._latchError(err);
26
- this.on("close", onDone);
27
- this.on("end", onDone);
28
- this.on("error", onError);
36
+ // Always listen for error events to prevent Node.js from treating
37
+ // them as uncaught exceptions. Route them to the parser deferred.
38
+ this.on("error", (err) => {
39
+ this._rejectParser(err);
40
+ this._closeEntryQueue(err);
41
+ });
29
42
  const io = {
30
43
  pull: async (length) => this.pull(length),
31
44
  pullUntil: async (pattern, includeEof) => this.pull(pattern, includeEof),
32
45
  stream: (length) => this.stream(length),
33
46
  streamUntilDataDescriptor: () => this._streamUntilValidatedDataDescriptor(),
34
47
  setDone: () => {
35
- // If the parser reaches EOF without consuming all buffered bytes,
36
- // there may still be an in-flight writable callback waiting on
37
- // `_maybeReleaseWriteCallback()`. Release it to avoid deadlocks in
38
- // callers that await `write(..., cb)`.
39
48
  this._maybeReleaseWriteCallback();
40
49
  this.end();
41
50
  this.push(null);
@@ -46,36 +55,119 @@ export function createParseClass(createInflateRawFn) {
46
55
  this.emit("entry", entry);
47
56
  },
48
57
  pushEntry: (entry) => {
58
+ // Feed the legacy Readable objectMode side (for pipe / data consumers).
49
59
  this.push(entry);
60
+ // Also feed the custom entry queue (for our async iterator).
61
+ this._enqueueEntry(entry);
50
62
  },
51
63
  pushEntryIfPiped: (entry) => {
52
64
  const state = this._readableState;
53
65
  if (state.pipesCount || (state.pipes && state.pipes.length)) {
54
66
  this.push(entry);
55
67
  }
68
+ // Always feed the entry queue regardless of pipe state.
69
+ this._enqueueEntry(entry);
56
70
  },
57
71
  emitCrxHeader: header => {
58
72
  this.crxHeader = header;
59
73
  this.emit("crx-header", header);
60
74
  },
61
75
  emitError: err => {
76
+ this.__emittedError = err;
62
77
  this.emit("error", err);
63
78
  },
64
79
  emitClose: () => {
65
80
  this.emit("close");
66
81
  }
67
82
  };
68
- // Parse records as data arrives. Only emit `close` when parsing is complete.
69
- runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib.inflateRawSync(data)).catch((e) => {
83
+ // Parse records as data arrives.
84
+ runParseLoop(this._opts, io, emitter, createInflateRawFn, this._driverState, (data) => zlib.inflateRawSync(data)).then(() => {
85
+ // If an error was emitted during parsing (e.g. invalid signature),
86
+ // the parse loop returns normally but we should reject.
87
+ if (this.__emittedError) {
88
+ this._rejectParser(this.__emittedError);
89
+ this._closeEntryQueue(this.__emittedError);
90
+ }
91
+ else {
92
+ this._resolveParser();
93
+ this._closeEntryQueue();
94
+ }
95
+ }, (e) => {
70
96
  if (!this.__emittedError || this.__emittedError !== e) {
71
97
  this.emit("error", e);
72
98
  }
73
- // Best-effort: ensure upstream writers don't hang waiting for a
74
- // deferred write callback if parsing terminates early.
75
99
  this._maybeReleaseWriteCallback();
100
+ this._rejectParser(e);
101
+ this._closeEntryQueue(e);
76
102
  this.emit("close");
77
103
  });
78
104
  }
105
+ // ---------------------------------------------------------------
106
+ // Entry queue management
107
+ // ---------------------------------------------------------------
108
+ _enqueueEntry(entry) {
109
+ if (this._entryWaiter) {
110
+ // A consumer is already waiting — deliver immediately.
111
+ const { resolve } = this._entryWaiter;
112
+ this._entryWaiter = null;
113
+ resolve({ value: entry, done: false });
114
+ }
115
+ else {
116
+ this._entryQueue.push(entry);
117
+ }
118
+ }
119
+ _closeEntryQueue(err) {
120
+ this._entriesDone = true;
121
+ if (this._entryWaiter) {
122
+ const waiter = this._entryWaiter;
123
+ this._entryWaiter = null;
124
+ if (err) {
125
+ waiter.reject(err);
126
+ }
127
+ else {
128
+ waiter.resolve({ value: undefined, done: true });
129
+ }
130
+ }
131
+ }
132
+ // ---------------------------------------------------------------
133
+ // Custom async iterator — bypasses Node Readable's default
134
+ // iterator which uses finished() and races with close.
135
+ // ---------------------------------------------------------------
136
+ // Override the default Readable async iterator with our custom entry-queue
137
+ // based iterator. This avoids Node.js's Readable default iterator which uses
138
+ // finished() internally and races with the close event.
139
+ //
140
+ // We cast through `any` because ES2024+ AsyncIterator requires
141
+ // [Symbol.asyncDispose] which AsyncIterableIterator doesn't include,
142
+ // and we don't need disposal semantics here.
143
+ [Symbol.asyncIterator]() {
144
+ const iterator = {
145
+ next: () => {
146
+ if (this._entryQueue.length > 0) {
147
+ return Promise.resolve({ value: this._entryQueue.shift(), done: false });
148
+ }
149
+ if (this._entriesDone) {
150
+ if (this._parserError) {
151
+ return Promise.reject(this._parserError);
152
+ }
153
+ return Promise.resolve({ value: undefined, done: true });
154
+ }
155
+ return new Promise((resolve, reject) => {
156
+ this._entryWaiter = { resolve, reject };
157
+ });
158
+ },
159
+ return: () => {
160
+ this._entriesDone = true;
161
+ this._entryQueue.length = 0;
162
+ this._entryWaiter = null;
163
+ return Promise.resolve({ value: undefined, done: true });
164
+ },
165
+ [Symbol.asyncIterator]() {
166
+ return iterator;
167
+ }
168
+ };
169
+ return iterator;
170
+ }
79
171
  /**
80
172
  * Stream file data until we reach a DATA_DESCRIPTOR record boundary.
81
173
  */
@@ -98,52 +190,52 @@ export function createParseClass(createInflateRawFn) {
98
190
  dataDescriptorSignature
99
191
  });
100
192
  }
101
- promise() {
102
- if (this._done) {
103
- return this._doneError ? Promise.reject(this._doneError) : Promise.resolve();
104
- }
105
- if (this._donePromise) {
106
- return this._donePromise;
107
- }
108
- this._donePromise = new Promise((resolve, reject) => {
109
- this._doneDeferred = { resolve, reject };
110
- });
111
- return this._donePromise;
112
- }
113
- _latchDone() {
114
- if (this._done) {
115
- return;
116
- }
117
- this._done = true;
118
- const deferred = this._doneDeferred;
119
- this._doneDeferred = null;
120
- if (!deferred) {
193
+ // ---------------------------------------------------------------
194
+ // Parser completion deferred
195
+ // ---------------------------------------------------------------
196
+ _resolveParser() {
197
+ if (this._parserDone) {
121
198
  return;
122
199
  }
123
- try {
124
- deferred.resolve();
125
- }
126
- catch {
127
- // ignore
200
+ this._parserDone = true;
201
+ if (this._parserDeferred) {
202
+ const { resolve } = this._parserDeferred;
203
+ this._parserDeferred = null;
204
+ resolve();
128
205
  }
129
206
  }
130
- _latchError(err) {
131
- if (this._done) {
207
+ _rejectParser(err) {
208
+ if (this._parserDone) {
132
209
  return;
133
210
  }
134
- this._done = true;
135
- this._doneError = err;
136
- const deferred = this._doneDeferred;
137
- this._doneDeferred = null;
138
- if (!deferred) {
139
- return;
211
+ this._parserDone = true;
212
+ this._parserError = err;
213
+ if (this._parserDeferred) {
214
+ const { reject } = this._parserDeferred;
215
+ this._parserDeferred = null;
216
+ reject(err);
140
217
  }
141
- try {
142
- deferred.reject(err);
218
+ }
219
+ /**
220
+ * Returns a promise that resolves when the parser has finished
221
+ * processing all ZIP records, or rejects on parse error.
222
+ *
223
+ * This is driven by an internal deferred that is resolved/rejected
224
+ * directly by the parse loop — it does NOT depend on stream
225
+ * lifecycle events (close / end), avoiding the
226
+ * ERR_STREAM_PREMATURE_CLOSE race.
227
+ */
228
+ promise() {
229
+ if (this._parserDone) {
230
+ return this._parserError ? Promise.reject(this._parserError) : Promise.resolve();
143
231
  }
144
- catch {
145
- // ignore
232
+ if (this._parserDonePromise) {
233
+ return this._parserDonePromise;
146
234
  }
235
+ this._parserDonePromise = new Promise((resolve, reject) => {
236
+ this._parserDeferred = { resolve, reject };
237
+ });
238
+ return this._parserDonePromise;
147
239
  }
148
240
  };
149
241
  }