@takeshape/streams 10.38.0 → 10.39.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +11 -0
- package/dist/merge.d.ts +10 -0
- package/dist/merge.d.ts.map +1 -0
- package/dist/merge.js +283 -0
- package/dist/streams.d.ts +1 -2
- package/dist/streams.d.ts.map +1 -1
- package/dist/streams.js +0 -11
- package/es/index.js +2 -1
- package/es/merge.js +276 -0
- package/es/streams.js +0 -10
- package/package.json +1 -1
package/dist/index.d.ts
CHANGED
package/dist/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,WAAW,CAAC;AAC1B,cAAc,SAAS,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -13,4 +13,15 @@ Object.keys(_streams).forEach(function (key) {
|
|
|
13
13
|
return _streams[key];
|
|
14
14
|
}
|
|
15
15
|
});
|
|
16
|
+
});
|
|
17
|
+
var _merge = require("./merge");
|
|
18
|
+
Object.keys(_merge).forEach(function (key) {
|
|
19
|
+
if (key === "default" || key === "__esModule") return;
|
|
20
|
+
if (key in exports && exports[key] === _merge[key]) return;
|
|
21
|
+
Object.defineProperty(exports, key, {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
get: function () {
|
|
24
|
+
return _merge[key];
|
|
25
|
+
}
|
|
26
|
+
});
|
|
16
27
|
});
|
package/dist/merge.d.ts
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { type Readable, PassThrough as PassThroughStream } from 'node:stream';
|
|
3
|
+
export declare function mergeStreams(streams: Readable[]): MergedStream;
|
|
4
|
+
declare class MergedStream extends PassThroughStream {
|
|
5
|
+
#private;
|
|
6
|
+
add(stream: Readable): void;
|
|
7
|
+
remove(stream: Readable): Promise<boolean>;
|
|
8
|
+
}
|
|
9
|
+
export {};
|
|
10
|
+
//# sourceMappingURL=merge.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"merge.d.ts","sourceRoot":"","sources":["../../src/merge.ts"],"names":[],"mappings":";AAOA,OAAO,EACL,KAAK,QAAQ,EACb,WAAW,IAAI,iBAAiB,EAGjC,MAAM,aAAa,CAAC;AAGrB,wBAAgB,YAAY,CAAC,OAAO,EAAE,QAAQ,EAAE,GAAG,YAAY,CAsB9D;AAaD,cAAM,YAAa,SAAQ,iBAAiB;;IAQ1C,GAAG,CAAC,MAAM,EAAE,QAAQ;IAwBd,MAAM,CAAC,MAAM,EAAE,QAAQ;CAkB9B"}
|
package/dist/merge.js
ADDED
|
@@ -0,0 +1,283 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.mergeStreams = mergeStreams;
|
|
7
|
+
var _nodeEvents = require("node:events");
|
|
8
|
+
var _nodeStream = require("node:stream");
|
|
9
|
+
var _promises = require("node:stream/promises");
|
|
10
|
+
/* istanbul ignore file */
|
|
11
|
+
/**
|
|
12
|
+
* Borrowed code, due to ESM-only issues
|
|
13
|
+
*
|
|
14
|
+
* @link https://github.com/sindresorhus/merge-streams/blob/main/index.js
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
function mergeStreams(streams) {
|
|
18
|
+
if (!Array.isArray(streams)) {
|
|
19
|
+
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
|
|
20
|
+
}
|
|
21
|
+
for (const stream of streams) {
|
|
22
|
+
validateStream(stream);
|
|
23
|
+
}
|
|
24
|
+
const objectMode = streams.some(({
|
|
25
|
+
readableObjectMode
|
|
26
|
+
}) => readableObjectMode);
|
|
27
|
+
const highWaterMark = getHighWaterMark(streams, objectMode);
|
|
28
|
+
const passThroughStream = new MergedStream({
|
|
29
|
+
objectMode,
|
|
30
|
+
writableHighWaterMark: highWaterMark,
|
|
31
|
+
readableHighWaterMark: highWaterMark
|
|
32
|
+
});
|
|
33
|
+
for (const stream of streams) {
|
|
34
|
+
passThroughStream.add(stream);
|
|
35
|
+
}
|
|
36
|
+
return passThroughStream;
|
|
37
|
+
}
|
|
38
|
+
const getHighWaterMark = (streams, objectMode) => {
|
|
39
|
+
if (streams.length === 0) {
|
|
40
|
+
return (0, _nodeStream.getDefaultHighWaterMark)(objectMode);
|
|
41
|
+
}
|
|
42
|
+
const highWaterMarks = streams.filter(({
|
|
43
|
+
readableObjectMode
|
|
44
|
+
}) => readableObjectMode === objectMode).map(({
|
|
45
|
+
readableHighWaterMark
|
|
46
|
+
}) => readableHighWaterMark);
|
|
47
|
+
return Math.max(...highWaterMarks);
|
|
48
|
+
};
|
|
49
|
+
class MergedStream extends _nodeStream.PassThrough {
|
|
50
|
+
#streams = new Set([]);
|
|
51
|
+
#ended = new Set([]);
|
|
52
|
+
#aborted = new Set([]);
|
|
53
|
+
#onFinished;
|
|
54
|
+
#unpipeEvent = Symbol('unpipe');
|
|
55
|
+
#streamPromises = new WeakMap();
|
|
56
|
+
add(stream) {
|
|
57
|
+
validateStream(stream);
|
|
58
|
+
if (this.#streams.has(stream)) {
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
this.#streams.add(stream);
|
|
62
|
+
this.#onFinished ??= onMergedStreamFinished(this, this.#streams, this.#unpipeEvent);
|
|
63
|
+
const streamPromise = endWhenStreamsDone({
|
|
64
|
+
passThroughStream: this,
|
|
65
|
+
stream,
|
|
66
|
+
streams: this.#streams,
|
|
67
|
+
ended: this.#ended,
|
|
68
|
+
aborted: this.#aborted,
|
|
69
|
+
onFinished: this.#onFinished,
|
|
70
|
+
unpipeEvent: this.#unpipeEvent
|
|
71
|
+
});
|
|
72
|
+
this.#streamPromises.set(stream, streamPromise);
|
|
73
|
+
stream.pipe(this, {
|
|
74
|
+
end: false
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
async remove(stream) {
|
|
78
|
+
validateStream(stream);
|
|
79
|
+
if (!this.#streams.has(stream)) {
|
|
80
|
+
return false;
|
|
81
|
+
}
|
|
82
|
+
const streamPromise = this.#streamPromises.get(stream);
|
|
83
|
+
if (streamPromise === undefined) {
|
|
84
|
+
return false;
|
|
85
|
+
}
|
|
86
|
+
this.#streamPromises.delete(stream);
|
|
87
|
+
stream.unpipe(this);
|
|
88
|
+
await streamPromise;
|
|
89
|
+
return true;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
const onMergedStreamFinished = async (passThroughStream, streams, unpipeEvent) => {
|
|
93
|
+
updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_COUNT);
|
|
94
|
+
const controller = new AbortController();
|
|
95
|
+
try {
|
|
96
|
+
await Promise.race([onMergedStreamEnd(passThroughStream, controller), onInputStreamsUnpipe(passThroughStream, streams, unpipeEvent, controller)]);
|
|
97
|
+
} finally {
|
|
98
|
+
controller.abort();
|
|
99
|
+
updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_COUNT);
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
const onMergedStreamEnd = async (passThroughStream, {
|
|
103
|
+
signal
|
|
104
|
+
}) => {
|
|
105
|
+
try {
|
|
106
|
+
await (0, _promises.finished)(passThroughStream, {
|
|
107
|
+
signal,
|
|
108
|
+
cleanup: true
|
|
109
|
+
});
|
|
110
|
+
} catch (error) {
|
|
111
|
+
errorOrAbortStream(passThroughStream, error);
|
|
112
|
+
throw error;
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
const onInputStreamsUnpipe = async (passThroughStream, streams, unpipeEvent, {
|
|
116
|
+
signal
|
|
117
|
+
}) => {
|
|
118
|
+
for await (const [unpipedStream] of (0, _nodeEvents.on)(passThroughStream, 'unpipe', {
|
|
119
|
+
signal
|
|
120
|
+
})) {
|
|
121
|
+
if (streams.has(unpipedStream)) {
|
|
122
|
+
unpipedStream.emit(unpipeEvent);
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
const validateStream = stream => {
|
|
127
|
+
if (typeof stream?.pipe !== 'function') {
|
|
128
|
+
throw new TypeError(`Expected a readable stream, got: \`${typeof stream}\`.`);
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
const endWhenStreamsDone = async ({
|
|
132
|
+
passThroughStream,
|
|
133
|
+
stream,
|
|
134
|
+
streams,
|
|
135
|
+
ended,
|
|
136
|
+
aborted,
|
|
137
|
+
onFinished,
|
|
138
|
+
unpipeEvent
|
|
139
|
+
}) => {
|
|
140
|
+
updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_PER_STREAM);
|
|
141
|
+
const controller = new AbortController();
|
|
142
|
+
try {
|
|
143
|
+
await Promise.race([afterMergedStreamFinished(onFinished, stream, controller), onInputStreamEnd({
|
|
144
|
+
passThroughStream,
|
|
145
|
+
stream,
|
|
146
|
+
streams,
|
|
147
|
+
ended,
|
|
148
|
+
aborted,
|
|
149
|
+
controller
|
|
150
|
+
}), onInputStreamUnpipe({
|
|
151
|
+
stream,
|
|
152
|
+
streams,
|
|
153
|
+
ended,
|
|
154
|
+
aborted,
|
|
155
|
+
unpipeEvent,
|
|
156
|
+
controller
|
|
157
|
+
})]);
|
|
158
|
+
} finally {
|
|
159
|
+
controller.abort();
|
|
160
|
+
updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_PER_STREAM);
|
|
161
|
+
}
|
|
162
|
+
if (streams.size > 0 && streams.size === ended.size + aborted.size) {
|
|
163
|
+
if (ended.size === 0 && aborted.size > 0) {
|
|
164
|
+
abortStream(passThroughStream);
|
|
165
|
+
} else {
|
|
166
|
+
endStream(passThroughStream);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
};
|
|
170
|
+
const afterMergedStreamFinished = async (onFinished, stream, {
|
|
171
|
+
signal
|
|
172
|
+
}) => {
|
|
173
|
+
try {
|
|
174
|
+
await onFinished;
|
|
175
|
+
if (!signal.aborted) {
|
|
176
|
+
abortStream(stream);
|
|
177
|
+
}
|
|
178
|
+
} catch (error) {
|
|
179
|
+
if (!signal.aborted) {
|
|
180
|
+
errorOrAbortStream(stream, error);
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
const onInputStreamEnd = async ({
|
|
185
|
+
passThroughStream,
|
|
186
|
+
stream,
|
|
187
|
+
streams,
|
|
188
|
+
ended,
|
|
189
|
+
aborted,
|
|
190
|
+
controller: {
|
|
191
|
+
signal
|
|
192
|
+
}
|
|
193
|
+
}) => {
|
|
194
|
+
try {
|
|
195
|
+
await (0, _promises.finished)(stream, {
|
|
196
|
+
signal,
|
|
197
|
+
cleanup: true,
|
|
198
|
+
readable: true,
|
|
199
|
+
writable: false
|
|
200
|
+
});
|
|
201
|
+
if (streams.has(stream)) {
|
|
202
|
+
ended.add(stream);
|
|
203
|
+
}
|
|
204
|
+
} catch (error) {
|
|
205
|
+
if (signal.aborted || !streams.has(stream)) {
|
|
206
|
+
return;
|
|
207
|
+
}
|
|
208
|
+
if (isAbortError(error)) {
|
|
209
|
+
aborted.add(stream);
|
|
210
|
+
} else {
|
|
211
|
+
errorStream(passThroughStream, error);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
};
|
|
215
|
+
const onInputStreamUnpipe = async ({
|
|
216
|
+
stream,
|
|
217
|
+
streams,
|
|
218
|
+
ended,
|
|
219
|
+
aborted,
|
|
220
|
+
unpipeEvent,
|
|
221
|
+
controller: {
|
|
222
|
+
signal
|
|
223
|
+
}
|
|
224
|
+
}) => {
|
|
225
|
+
await (0, _nodeEvents.once)(stream, unpipeEvent, {
|
|
226
|
+
signal
|
|
227
|
+
});
|
|
228
|
+
if (!stream.readable) {
|
|
229
|
+
return (0, _nodeEvents.once)(signal, 'abort', {
|
|
230
|
+
signal
|
|
231
|
+
});
|
|
232
|
+
}
|
|
233
|
+
streams.delete(stream);
|
|
234
|
+
ended.delete(stream);
|
|
235
|
+
aborted.delete(stream);
|
|
236
|
+
};
|
|
237
|
+
const endStream = stream => {
|
|
238
|
+
if (stream.writable) {
|
|
239
|
+
stream.end();
|
|
240
|
+
}
|
|
241
|
+
};
|
|
242
|
+
const errorOrAbortStream = (stream, error) => {
|
|
243
|
+
if (isAbortError(error)) {
|
|
244
|
+
abortStream(stream);
|
|
245
|
+
} else {
|
|
246
|
+
errorStream(stream, error);
|
|
247
|
+
}
|
|
248
|
+
};
|
|
249
|
+
|
|
250
|
+
// This is the error thrown by `finished()` on `stream.destroy()`
|
|
251
|
+
const isAbortError = error => error?.code === 'ERR_STREAM_PREMATURE_CLOSE';
|
|
252
|
+
const abortStream = stream => {
|
|
253
|
+
if (stream.readable || stream.writable) {
|
|
254
|
+
stream.destroy();
|
|
255
|
+
}
|
|
256
|
+
};
|
|
257
|
+
|
|
258
|
+
// `stream.destroy(error)` crashes the process with `uncaughtException` if no `error` event listener exists on `stream`.
|
|
259
|
+
// We take care of error handling on user behalf, so we do not want this to happen.
|
|
260
|
+
const errorStream = (stream, error) => {
|
|
261
|
+
if (!stream.destroyed) {
|
|
262
|
+
stream.once('error', noop);
|
|
263
|
+
stream.destroy(error);
|
|
264
|
+
}
|
|
265
|
+
};
|
|
266
|
+
const noop = () => {
|
|
267
|
+
// empty
|
|
268
|
+
};
|
|
269
|
+
const updateMaxListeners = (passThroughStream, increment) => {
|
|
270
|
+
const maxListeners = passThroughStream.getMaxListeners();
|
|
271
|
+
if (maxListeners !== 0 && maxListeners !== Number.POSITIVE_INFINITY) {
|
|
272
|
+
passThroughStream.setMaxListeners(maxListeners + increment);
|
|
273
|
+
}
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
// Number of times `passThroughStream.on()` is called regardless of streams:
|
|
277
|
+
// - once due to `finished(passThroughStream)`
|
|
278
|
+
// - once due to `on(passThroughStream)`
|
|
279
|
+
const PASSTHROUGH_LISTENERS_COUNT = 2;
|
|
280
|
+
|
|
281
|
+
// Number of times `passThroughStream.on()` is called per stream:
|
|
282
|
+
// - once due to `stream.pipe(passThroughStream)`
|
|
283
|
+
const PASSTHROUGH_LISTENERS_PER_STREAM = 1;
|
package/dist/streams.d.ts
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
/// <reference types="node" />
|
|
3
3
|
/// <reference types="node" />
|
|
4
4
|
/// <reference types="node" />
|
|
5
|
-
import {
|
|
5
|
+
import { Readable, type ReadableOptions, Writable, type WritableOptions, Transform } from 'node:stream';
|
|
6
6
|
import { pipeline } from 'node:stream/promises';
|
|
7
7
|
export { compose } from 'node:stream';
|
|
8
8
|
export declare const pump: typeof pipeline;
|
|
@@ -16,5 +16,4 @@ export declare function tee(...args: NodeJS.WritableStream[] | [NodeJS.WritableS
|
|
|
16
16
|
export declare function readableFromArray(array: any[], options?: Partial<ReadableOptions>): Readable;
|
|
17
17
|
export declare function collectStreamIntoArray(array?: any[], options?: Partial<WritableOptions>): Writable;
|
|
18
18
|
export declare function readStreamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer>;
|
|
19
|
-
export declare function mergeStreams(...streams: NodeJS.ReadableStream[]): PassThrough;
|
|
20
19
|
//# sourceMappingURL=streams.d.ts.map
|
package/dist/streams.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"streams.d.ts","sourceRoot":"","sources":["../../src/streams.ts"],"names":[],"mappings":";;;;AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"streams.d.ts","sourceRoot":"","sources":["../../src/streams.ts"],"names":[],"mappings":";;;;AAAA,OAAO,EAAc,QAAQ,EAAE,KAAK,eAAe,EAAE,QAAQ,EAAE,KAAK,eAAe,EAAE,SAAS,EAAC,MAAM,aAAa,CAAC;AACnH,OAAO,EAAC,QAAQ,EAAC,MAAM,sBAAsB,CAAC;AAK9C,OAAO,EAAC,OAAO,EAAC,MAAM,aAAa,CAAC;AAEpC,eAAO,MAAM,IAAI,iBAAW,CAAC;AAE7B,wBAAsB,eAAe,CAAC,MAAM,EAAE,MAAM,CAAC,cAAc,GAAG,MAAM,CAAC,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC,CAK1G;AAED,wBAAgB,eAAe,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,GAAG,aAY5D;AAED,wBAAgB,oBAAoB,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,GAAG,CAAC,aAc1E;AAED,wBAAgB,YAAY,CAAC,SAAS,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,aAe7D;AAED,wBAAgB,iBAAiB,CAAC,WAAW,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,aAQjE;AAED,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,EAAE,WAAW,SAAK,YAcxF;AAED,wBAAgB,GAAG,CAAC,GAAG,IAAI,EAAE,MAAM,CAAC,cAAc,EAAE,GAAG,CAAC,MAAM,CAAC,cAAc,EAAE,CAAC,YA0B/E;AAED,wBAAgB,iBAAiB,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,eAAe,CAAC,YAEjF;AAED,wBAAgB,sBAAsB,CAAC,KAAK,GAAE,GAAG,EAAO,EAAE,OAAO,GAAE,OAAO,CAAC,eAAe,CAAM,YAS/F;AAED,wBAAsB,kBAAkB,CAAC,MAAM,EAAE,MAAM,CAAC,cAAc,mBAMrE"}
|
package/dist/streams.js
CHANGED
|
@@ -15,7 +15,6 @@ exports.createAsyncTransform = createAsyncTransform;
|
|
|
15
15
|
exports.createAsyncWritable = createAsyncWritable;
|
|
16
16
|
exports.createFilter = createFilter;
|
|
17
17
|
exports.createTransform = createTransform;
|
|
18
|
-
exports.mergeStreams = mergeStreams;
|
|
19
18
|
exports.pump = void 0;
|
|
20
19
|
exports.readStreamToBuffer = readStreamToBuffer;
|
|
21
20
|
exports.readableFromArray = readableFromArray;
|
|
@@ -146,14 +145,4 @@ async function readStreamToBuffer(stream) {
|
|
|
146
145
|
writeStream.on('data', chunk => chunks.push(chunk));
|
|
147
146
|
await pump(stream, writeStream);
|
|
148
147
|
return Buffer.concat(chunks);
|
|
149
|
-
}
|
|
150
|
-
function mergeStreams(...streams) {
|
|
151
|
-
let passThrough = new _nodeStream.PassThrough();
|
|
152
|
-
for (const stream of streams) {
|
|
153
|
-
const end = stream === streams.at(-1);
|
|
154
|
-
passThrough = stream.pipe(passThrough, {
|
|
155
|
-
end
|
|
156
|
-
});
|
|
157
|
-
}
|
|
158
|
-
return passThrough;
|
|
159
148
|
}
|
package/es/index.js
CHANGED
|
@@ -1 +1,2 @@
|
|
|
1
|
-
export * from './streams';
|
|
1
|
+
export * from './streams';
|
|
2
|
+
export * from './merge';
|
package/es/merge.js
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
/* istanbul ignore file */
|
|
2
|
+
/**
|
|
3
|
+
* Borrowed code, due to ESM-only issues
|
|
4
|
+
*
|
|
5
|
+
* @link https://github.com/sindresorhus/merge-streams/blob/main/index.js
|
|
6
|
+
*/
|
|
7
|
+
import { on, once } from 'node:events';
|
|
8
|
+
import { PassThrough as PassThroughStream, getDefaultHighWaterMark } from 'node:stream';
|
|
9
|
+
import { finished } from 'node:stream/promises';
|
|
10
|
+
export function mergeStreams(streams) {
|
|
11
|
+
if (!Array.isArray(streams)) {
|
|
12
|
+
throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
|
|
13
|
+
}
|
|
14
|
+
for (const stream of streams) {
|
|
15
|
+
validateStream(stream);
|
|
16
|
+
}
|
|
17
|
+
const objectMode = streams.some(({
|
|
18
|
+
readableObjectMode
|
|
19
|
+
}) => readableObjectMode);
|
|
20
|
+
const highWaterMark = getHighWaterMark(streams, objectMode);
|
|
21
|
+
const passThroughStream = new MergedStream({
|
|
22
|
+
objectMode,
|
|
23
|
+
writableHighWaterMark: highWaterMark,
|
|
24
|
+
readableHighWaterMark: highWaterMark
|
|
25
|
+
});
|
|
26
|
+
for (const stream of streams) {
|
|
27
|
+
passThroughStream.add(stream);
|
|
28
|
+
}
|
|
29
|
+
return passThroughStream;
|
|
30
|
+
}
|
|
31
|
+
const getHighWaterMark = (streams, objectMode) => {
|
|
32
|
+
if (streams.length === 0) {
|
|
33
|
+
return getDefaultHighWaterMark(objectMode);
|
|
34
|
+
}
|
|
35
|
+
const highWaterMarks = streams.filter(({
|
|
36
|
+
readableObjectMode
|
|
37
|
+
}) => readableObjectMode === objectMode).map(({
|
|
38
|
+
readableHighWaterMark
|
|
39
|
+
}) => readableHighWaterMark);
|
|
40
|
+
return Math.max(...highWaterMarks);
|
|
41
|
+
};
|
|
42
|
+
class MergedStream extends PassThroughStream {
|
|
43
|
+
#streams = new Set([]);
|
|
44
|
+
#ended = new Set([]);
|
|
45
|
+
#aborted = new Set([]);
|
|
46
|
+
#onFinished;
|
|
47
|
+
#unpipeEvent = Symbol('unpipe');
|
|
48
|
+
#streamPromises = new WeakMap();
|
|
49
|
+
add(stream) {
|
|
50
|
+
validateStream(stream);
|
|
51
|
+
if (this.#streams.has(stream)) {
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
this.#streams.add(stream);
|
|
55
|
+
this.#onFinished ??= onMergedStreamFinished(this, this.#streams, this.#unpipeEvent);
|
|
56
|
+
const streamPromise = endWhenStreamsDone({
|
|
57
|
+
passThroughStream: this,
|
|
58
|
+
stream,
|
|
59
|
+
streams: this.#streams,
|
|
60
|
+
ended: this.#ended,
|
|
61
|
+
aborted: this.#aborted,
|
|
62
|
+
onFinished: this.#onFinished,
|
|
63
|
+
unpipeEvent: this.#unpipeEvent
|
|
64
|
+
});
|
|
65
|
+
this.#streamPromises.set(stream, streamPromise);
|
|
66
|
+
stream.pipe(this, {
|
|
67
|
+
end: false
|
|
68
|
+
});
|
|
69
|
+
}
|
|
70
|
+
async remove(stream) {
|
|
71
|
+
validateStream(stream);
|
|
72
|
+
if (!this.#streams.has(stream)) {
|
|
73
|
+
return false;
|
|
74
|
+
}
|
|
75
|
+
const streamPromise = this.#streamPromises.get(stream);
|
|
76
|
+
if (streamPromise === undefined) {
|
|
77
|
+
return false;
|
|
78
|
+
}
|
|
79
|
+
this.#streamPromises.delete(stream);
|
|
80
|
+
stream.unpipe(this);
|
|
81
|
+
await streamPromise;
|
|
82
|
+
return true;
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
const onMergedStreamFinished = async (passThroughStream, streams, unpipeEvent) => {
|
|
86
|
+
updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_COUNT);
|
|
87
|
+
const controller = new AbortController();
|
|
88
|
+
try {
|
|
89
|
+
await Promise.race([onMergedStreamEnd(passThroughStream, controller), onInputStreamsUnpipe(passThroughStream, streams, unpipeEvent, controller)]);
|
|
90
|
+
} finally {
|
|
91
|
+
controller.abort();
|
|
92
|
+
updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_COUNT);
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
const onMergedStreamEnd = async (passThroughStream, {
|
|
96
|
+
signal
|
|
97
|
+
}) => {
|
|
98
|
+
try {
|
|
99
|
+
await finished(passThroughStream, {
|
|
100
|
+
signal,
|
|
101
|
+
cleanup: true
|
|
102
|
+
});
|
|
103
|
+
} catch (error) {
|
|
104
|
+
errorOrAbortStream(passThroughStream, error);
|
|
105
|
+
throw error;
|
|
106
|
+
}
|
|
107
|
+
};
|
|
108
|
+
const onInputStreamsUnpipe = async (passThroughStream, streams, unpipeEvent, {
|
|
109
|
+
signal
|
|
110
|
+
}) => {
|
|
111
|
+
for await (const [unpipedStream] of on(passThroughStream, 'unpipe', {
|
|
112
|
+
signal
|
|
113
|
+
})) {
|
|
114
|
+
if (streams.has(unpipedStream)) {
|
|
115
|
+
unpipedStream.emit(unpipeEvent);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
const validateStream = stream => {
|
|
120
|
+
if (typeof stream?.pipe !== 'function') {
|
|
121
|
+
throw new TypeError(`Expected a readable stream, got: \`${typeof stream}\`.`);
|
|
122
|
+
}
|
|
123
|
+
};
|
|
124
|
+
const endWhenStreamsDone = async ({
|
|
125
|
+
passThroughStream,
|
|
126
|
+
stream,
|
|
127
|
+
streams,
|
|
128
|
+
ended,
|
|
129
|
+
aborted,
|
|
130
|
+
onFinished,
|
|
131
|
+
unpipeEvent
|
|
132
|
+
}) => {
|
|
133
|
+
updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_PER_STREAM);
|
|
134
|
+
const controller = new AbortController();
|
|
135
|
+
try {
|
|
136
|
+
await Promise.race([afterMergedStreamFinished(onFinished, stream, controller), onInputStreamEnd({
|
|
137
|
+
passThroughStream,
|
|
138
|
+
stream,
|
|
139
|
+
streams,
|
|
140
|
+
ended,
|
|
141
|
+
aborted,
|
|
142
|
+
controller
|
|
143
|
+
}), onInputStreamUnpipe({
|
|
144
|
+
stream,
|
|
145
|
+
streams,
|
|
146
|
+
ended,
|
|
147
|
+
aborted,
|
|
148
|
+
unpipeEvent,
|
|
149
|
+
controller
|
|
150
|
+
})]);
|
|
151
|
+
} finally {
|
|
152
|
+
controller.abort();
|
|
153
|
+
updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_PER_STREAM);
|
|
154
|
+
}
|
|
155
|
+
if (streams.size > 0 && streams.size === ended.size + aborted.size) {
|
|
156
|
+
if (ended.size === 0 && aborted.size > 0) {
|
|
157
|
+
abortStream(passThroughStream);
|
|
158
|
+
} else {
|
|
159
|
+
endStream(passThroughStream);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
};
|
|
163
|
+
const afterMergedStreamFinished = async (onFinished, stream, {
|
|
164
|
+
signal
|
|
165
|
+
}) => {
|
|
166
|
+
try {
|
|
167
|
+
await onFinished;
|
|
168
|
+
if (!signal.aborted) {
|
|
169
|
+
abortStream(stream);
|
|
170
|
+
}
|
|
171
|
+
} catch (error) {
|
|
172
|
+
if (!signal.aborted) {
|
|
173
|
+
errorOrAbortStream(stream, error);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
const onInputStreamEnd = async ({
|
|
178
|
+
passThroughStream,
|
|
179
|
+
stream,
|
|
180
|
+
streams,
|
|
181
|
+
ended,
|
|
182
|
+
aborted,
|
|
183
|
+
controller: {
|
|
184
|
+
signal
|
|
185
|
+
}
|
|
186
|
+
}) => {
|
|
187
|
+
try {
|
|
188
|
+
await finished(stream, {
|
|
189
|
+
signal,
|
|
190
|
+
cleanup: true,
|
|
191
|
+
readable: true,
|
|
192
|
+
writable: false
|
|
193
|
+
});
|
|
194
|
+
if (streams.has(stream)) {
|
|
195
|
+
ended.add(stream);
|
|
196
|
+
}
|
|
197
|
+
} catch (error) {
|
|
198
|
+
if (signal.aborted || !streams.has(stream)) {
|
|
199
|
+
return;
|
|
200
|
+
}
|
|
201
|
+
if (isAbortError(error)) {
|
|
202
|
+
aborted.add(stream);
|
|
203
|
+
} else {
|
|
204
|
+
errorStream(passThroughStream, error);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
};
|
|
208
|
+
const onInputStreamUnpipe = async ({
|
|
209
|
+
stream,
|
|
210
|
+
streams,
|
|
211
|
+
ended,
|
|
212
|
+
aborted,
|
|
213
|
+
unpipeEvent,
|
|
214
|
+
controller: {
|
|
215
|
+
signal
|
|
216
|
+
}
|
|
217
|
+
}) => {
|
|
218
|
+
await once(stream, unpipeEvent, {
|
|
219
|
+
signal
|
|
220
|
+
});
|
|
221
|
+
if (!stream.readable) {
|
|
222
|
+
return once(signal, 'abort', {
|
|
223
|
+
signal
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
streams.delete(stream);
|
|
227
|
+
ended.delete(stream);
|
|
228
|
+
aborted.delete(stream);
|
|
229
|
+
};
|
|
230
|
+
const endStream = stream => {
|
|
231
|
+
if (stream.writable) {
|
|
232
|
+
stream.end();
|
|
233
|
+
}
|
|
234
|
+
};
|
|
235
|
+
const errorOrAbortStream = (stream, error) => {
|
|
236
|
+
if (isAbortError(error)) {
|
|
237
|
+
abortStream(stream);
|
|
238
|
+
} else {
|
|
239
|
+
errorStream(stream, error);
|
|
240
|
+
}
|
|
241
|
+
};
|
|
242
|
+
|
|
243
|
+
// This is the error thrown by `finished()` on `stream.destroy()`
|
|
244
|
+
const isAbortError = error => error?.code === 'ERR_STREAM_PREMATURE_CLOSE';
|
|
245
|
+
const abortStream = stream => {
|
|
246
|
+
if (stream.readable || stream.writable) {
|
|
247
|
+
stream.destroy();
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
// `stream.destroy(error)` crashes the process with `uncaughtException` if no `error` event listener exists on `stream`.
|
|
252
|
+
// We take care of error handling on user behalf, so we do not want this to happen.
|
|
253
|
+
const errorStream = (stream, error) => {
|
|
254
|
+
if (!stream.destroyed) {
|
|
255
|
+
stream.once('error', noop);
|
|
256
|
+
stream.destroy(error);
|
|
257
|
+
}
|
|
258
|
+
};
|
|
259
|
+
const noop = () => {
|
|
260
|
+
// empty
|
|
261
|
+
};
|
|
262
|
+
const updateMaxListeners = (passThroughStream, increment) => {
|
|
263
|
+
const maxListeners = passThroughStream.getMaxListeners();
|
|
264
|
+
if (maxListeners !== 0 && maxListeners !== Number.POSITIVE_INFINITY) {
|
|
265
|
+
passThroughStream.setMaxListeners(maxListeners + increment);
|
|
266
|
+
}
|
|
267
|
+
};
|
|
268
|
+
|
|
269
|
+
// Number of times `passThroughStream.on()` is called regardless of streams:
|
|
270
|
+
// - once due to `finished(passThroughStream)`
|
|
271
|
+
// - once due to `on(passThroughStream)`
|
|
272
|
+
const PASSTHROUGH_LISTENERS_COUNT = 2;
|
|
273
|
+
|
|
274
|
+
// Number of times `passThroughStream.on()` is called per stream:
|
|
275
|
+
// - once due to `stream.pipe(passThroughStream)`
|
|
276
|
+
const PASSTHROUGH_LISTENERS_PER_STREAM = 1;
|
package/es/streams.js
CHANGED
|
@@ -123,14 +123,4 @@ export async function readStreamToBuffer(stream) {
|
|
|
123
123
|
writeStream.on('data', chunk => chunks.push(chunk));
|
|
124
124
|
await pump(stream, writeStream);
|
|
125
125
|
return Buffer.concat(chunks);
|
|
126
|
-
}
|
|
127
|
-
export function mergeStreams(...streams) {
|
|
128
|
-
let passThrough = new PassThrough();
|
|
129
|
-
for (const stream of streams) {
|
|
130
|
-
const end = stream === streams.at(-1);
|
|
131
|
-
passThrough = stream.pipe(passThrough, {
|
|
132
|
-
end
|
|
133
|
-
});
|
|
134
|
-
}
|
|
135
|
-
return passThrough;
|
|
136
126
|
}
|