@takeshape/streams 11.143.2 → 11.154.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@takeshape/streams",
3
- "version": "11.143.2",
3
+ "version": "11.154.1",
4
4
  "description": "Stream helpers",
5
5
  "homepage": "https://www.takeshape.io",
6
6
  "repository": {
package/dist/index.d.ts DELETED
@@ -1,2 +0,0 @@
1
- export * from './merge.ts';
2
- export * from './streams.ts';
package/dist/index.js DELETED
@@ -1,2 +0,0 @@
1
- export * from "./merge.js";
2
- export * from "./streams.js";
package/dist/merge.d.ts DELETED
@@ -1,8 +0,0 @@
1
- import { PassThrough as PassThroughStream, type Readable } from 'node:stream';
2
- export declare function mergeStreams(streams: Readable[]): MergedStream;
3
- declare class MergedStream extends PassThroughStream {
4
- #private;
5
- add(stream: Readable): void;
6
- remove(stream: Readable): Promise<boolean>;
7
- }
8
- export {};
package/dist/merge.js DELETED
@@ -1,246 +0,0 @@
1
- /* istanbul ignore file -- @preserve */
2
- /**
3
- * Borrowed code, due to ESM-only issues
4
- *
5
- * @link https://github.com/sindresorhus/merge-streams/blob/main/index.js
6
- */
7
- import { on, once } from 'node:events';
8
- import { getDefaultHighWaterMark, PassThrough as PassThroughStream } from 'node:stream';
9
- import { finished } from 'node:stream/promises';
10
- export function mergeStreams(streams) {
11
- if (!Array.isArray(streams)) {
12
- throw new TypeError(`Expected an array, got \`${typeof streams}\`.`);
13
- }
14
- for (const stream of streams) {
15
- validateStream(stream);
16
- }
17
- const objectMode = streams.some(({ readableObjectMode }) => readableObjectMode);
18
- const highWaterMark = getHighWaterMark(streams, objectMode);
19
- const passThroughStream = new MergedStream({
20
- objectMode,
21
- writableHighWaterMark: highWaterMark,
22
- readableHighWaterMark: highWaterMark
23
- });
24
- for (const stream of streams) {
25
- passThroughStream.add(stream);
26
- }
27
- return passThroughStream;
28
- }
29
- const getHighWaterMark = (streams, objectMode) => {
30
- if (streams.length === 0) {
31
- return getDefaultHighWaterMark(objectMode);
32
- }
33
- const highWaterMarks = streams
34
- .filter(({ readableObjectMode }) => readableObjectMode === objectMode)
35
- .map(({ readableHighWaterMark }) => readableHighWaterMark);
36
- return Math.max(...highWaterMarks);
37
- };
38
- class MergedStream extends PassThroughStream {
39
- #streams = new Set([]);
40
- #ended = new Set([]);
41
- #aborted = new Set([]);
42
- #onFinished;
43
- #unpipeEvent = Symbol('unpipe');
44
- #streamPromises = new WeakMap();
45
- add(stream) {
46
- validateStream(stream);
47
- if (this.#streams.has(stream)) {
48
- return;
49
- }
50
- this.#streams.add(stream);
51
- this.#onFinished ??= onMergedStreamFinished(this, this.#streams, this.#unpipeEvent);
52
- const streamPromise = endWhenStreamsDone({
53
- passThroughStream: this,
54
- stream,
55
- streams: this.#streams,
56
- ended: this.#ended,
57
- aborted: this.#aborted,
58
- onFinished: this.#onFinished,
59
- unpipeEvent: this.#unpipeEvent
60
- });
61
- this.#streamPromises.set(stream, streamPromise);
62
- stream.pipe(this, { end: false });
63
- }
64
- async remove(stream) {
65
- validateStream(stream);
66
- if (!this.#streams.has(stream)) {
67
- return false;
68
- }
69
- const streamPromise = this.#streamPromises.get(stream);
70
- if (streamPromise === undefined) {
71
- return false;
72
- }
73
- this.#streamPromises.delete(stream);
74
- stream.unpipe(this);
75
- await streamPromise;
76
- return true;
77
- }
78
- }
79
- const onMergedStreamFinished = async (passThroughStream, streams, unpipeEvent) => {
80
- updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_COUNT);
81
- const controller = new AbortController();
82
- try {
83
- await Promise.race([
84
- onMergedStreamEnd(passThroughStream, controller),
85
- onInputStreamsUnpipe(passThroughStream, streams, unpipeEvent, controller)
86
- ]);
87
- }
88
- finally {
89
- controller.abort();
90
- updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_COUNT);
91
- }
92
- };
93
- const onMergedStreamEnd = async (passThroughStream, { signal }) => {
94
- try {
95
- await finished(passThroughStream, {
96
- signal,
97
- cleanup: true
98
- });
99
- }
100
- catch (error) {
101
- errorOrAbortStream(passThroughStream, error);
102
- throw error;
103
- }
104
- };
105
- const onInputStreamsUnpipe = async (passThroughStream, streams, unpipeEvent, { signal }) => {
106
- for await (const [unpipedStream] of on(passThroughStream, 'unpipe', {
107
- signal
108
- })) {
109
- if (streams.has(unpipedStream)) {
110
- unpipedStream.emit(unpipeEvent);
111
- }
112
- }
113
- };
114
- const validateStream = (stream) => {
115
- if (typeof stream?.pipe !== 'function') {
116
- throw new TypeError(`Expected a readable stream, got: \`${typeof stream}\`.`);
117
- }
118
- };
119
- const endWhenStreamsDone = async ({ passThroughStream, stream, streams, ended, aborted, onFinished, unpipeEvent }) => {
120
- updateMaxListeners(passThroughStream, PASSTHROUGH_LISTENERS_PER_STREAM);
121
- const controller = new AbortController();
122
- try {
123
- await Promise.race([
124
- afterMergedStreamFinished(onFinished, stream, controller),
125
- onInputStreamEnd({
126
- passThroughStream,
127
- stream,
128
- streams,
129
- ended,
130
- aborted,
131
- controller
132
- }),
133
- onInputStreamUnpipe({
134
- stream,
135
- streams,
136
- ended,
137
- aborted,
138
- unpipeEvent,
139
- controller
140
- })
141
- ]);
142
- }
143
- finally {
144
- controller.abort();
145
- updateMaxListeners(passThroughStream, -PASSTHROUGH_LISTENERS_PER_STREAM);
146
- }
147
- if (streams.size > 0 && streams.size === ended.size + aborted.size) {
148
- if (ended.size === 0 && aborted.size > 0) {
149
- abortStream(passThroughStream);
150
- }
151
- else {
152
- endStream(passThroughStream);
153
- }
154
- }
155
- };
156
- const afterMergedStreamFinished = async (onFinished, stream, { signal }) => {
157
- try {
158
- await onFinished;
159
- if (!signal.aborted) {
160
- abortStream(stream);
161
- }
162
- }
163
- catch (error) {
164
- if (!signal.aborted) {
165
- errorOrAbortStream(stream, error);
166
- }
167
- }
168
- };
169
- const onInputStreamEnd = async ({ passThroughStream, stream, streams, ended, aborted, controller: { signal } }) => {
170
- try {
171
- await finished(stream, {
172
- signal,
173
- cleanup: true,
174
- readable: true,
175
- writable: false
176
- });
177
- if (streams.has(stream)) {
178
- ended.add(stream);
179
- }
180
- }
181
- catch (error) {
182
- if (signal.aborted || !streams.has(stream)) {
183
- return;
184
- }
185
- if (isAbortError(error)) {
186
- aborted.add(stream);
187
- }
188
- else {
189
- errorStream(passThroughStream, error);
190
- }
191
- }
192
- };
193
- const onInputStreamUnpipe = async ({ stream, streams, ended, aborted, unpipeEvent, controller: { signal } }) => {
194
- await once(stream, unpipeEvent, { signal });
195
- if (!stream.readable) {
196
- return once(signal, 'abort', { signal });
197
- }
198
- streams.delete(stream);
199
- ended.delete(stream);
200
- aborted.delete(stream);
201
- return;
202
- };
203
- const endStream = (stream) => {
204
- if (stream.writable) {
205
- stream.end();
206
- }
207
- };
208
- const errorOrAbortStream = (stream, error) => {
209
- if (isAbortError(error)) {
210
- abortStream(stream);
211
- }
212
- else {
213
- errorStream(stream, error);
214
- }
215
- };
216
- // This is the error thrown by `finished()` on `stream.destroy()`
217
- const isAbortError = (error) => error?.code === 'ERR_STREAM_PREMATURE_CLOSE';
218
- const abortStream = (stream) => {
219
- if (stream.readable || stream.writable) {
220
- stream.destroy();
221
- }
222
- };
223
- // `stream.destroy(error)` crashes the process with `uncaughtException` if no `error` event listener exists on `stream`.
224
- // We take care of error handling on user behalf, so we do not want this to happen.
225
- const errorStream = (stream, error) => {
226
- if (!stream.destroyed) {
227
- stream.once('error', noop);
228
- stream.destroy(error);
229
- }
230
- };
231
- const noop = () => {
232
- // empty
233
- };
234
- const updateMaxListeners = (passThroughStream, increment) => {
235
- const maxListeners = passThroughStream.getMaxListeners();
236
- if (maxListeners !== 0 && maxListeners !== Number.POSITIVE_INFINITY) {
237
- passThroughStream.setMaxListeners(maxListeners + increment);
238
- }
239
- };
240
- // Number of times `passThroughStream.on()` is called regardless of streams:
241
- // - once due to `finished(passThroughStream)`
242
- // - once due to `on(passThroughStream)`
243
- const PASSTHROUGH_LISTENERS_COUNT = 2;
244
- // Number of times `passThroughStream.on()` is called per stream:
245
- // - once due to `stream.pipe(passThroughStream)`
246
- const PASSTHROUGH_LISTENERS_PER_STREAM = 1;
package/dist/streams.d.ts DELETED
@@ -1,29 +0,0 @@
1
- import { Readable, type ReadableOptions, Transform, Writable, type WritableOptions } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- declare module 'node:stream' {
4
- /**
5
- * Experimental!
6
- *
7
- * Provisional typing for experimental feature: https://nodejs.org/dist/latest-v20.x/docs/api/stream.html#streamcomposestreams
8
- *
9
- * Combines two or more streams into a Duplex stream that writes to the first stream and reads from the last. Each provided stream is piped into the next, using stream.pipeline. If any of the streams error then all are destroyed, including the outer Duplex stream.
10
- *
11
- * Because stream.compose returns a new stream that in turn can (and should) be piped into other streams, it enables composition. In contrast, when passing streams to stream.pipeline, typically the first stream is a readable stream and the last a writable stream, forming a closed circuit.
12
- *
13
- * If passed a Function it must be a factory method taking a source Iterable.
14
- */
15
- const compose: (stream: NodeJS.ReadableStream, ...streams: Array<NodeJS.WritableStream | NodeJS.ReadWriteStream | Iterable<unknown>>) => NodeJS.ReadWriteStream;
16
- }
17
- export { compose } from 'node:stream';
18
- export declare const pump: typeof pipeline;
19
- export declare function streamToPromise(stream: NodeJS.ReadableStream | NodeJS.WritableStream): Promise<void>;
20
- export declare function createTransform(transform: (data: any) => any): Transform;
21
- export declare function createAsyncTransform(transform: (data: any) => Promise<any>): Transform;
22
- export declare function createFilter(predicate: (data: any) => boolean): Transform;
23
- export declare function createAccumulator(accumulator: (data: any) => void): Transform;
24
- export declare function createAsyncWritable(write: (data: any) => Promise<void>, concurrency?: number): Writable;
25
- export declare function tee(...args: NodeJS.WritableStream[] | [NodeJS.WritableStream[]]): Writable;
26
- export declare function readableFromArray(array: any[], options?: Partial<ReadableOptions>): Readable;
27
- export declare function collectStreamIntoArray(array?: any[], options?: Partial<WritableOptions>): Writable;
28
- export declare function readStreamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer<ArrayBuffer>>;
29
- export declare function batch(size: number): Transform;
package/dist/streams.js DELETED
@@ -1,147 +0,0 @@
1
- import { PassThrough, Readable, Transform, Writable } from 'node:stream';
2
- import { pipeline } from 'node:stream/promises';
3
- import { promisify } from 'node:util';
4
- import isStream from 'is-stream';
5
- import PQueue from 'p-queue';
6
- export { compose } from 'node:stream';
7
- export const pump = pipeline;
8
- export async function streamToPromise(stream) {
9
- return new Promise((resolve, reject) => {
10
- const doneEvent = isStream.writable(stream) ? 'finish' : 'end';
11
- stream.on('error', reject).on(doneEvent, resolve);
12
- });
13
- }
14
- export function createTransform(transform) {
15
- return new Transform({
16
- objectMode: true,
17
- transform(chunk, _, callback) {
18
- try {
19
- this.push(transform(chunk));
20
- callback();
21
- }
22
- catch (e) {
23
- callback(e);
24
- }
25
- }
26
- });
27
- }
28
- export function createAsyncTransform(transform) {
29
- return new Transform({
30
- objectMode: true,
31
- transform(chunk, _, callback) {
32
- try {
33
- transform(chunk).then((result) => {
34
- this.push(result);
35
- callback();
36
- }, callback);
37
- }
38
- catch (e) {
39
- callback(e);
40
- }
41
- }
42
- });
43
- }
44
- export function createFilter(predicate) {
45
- return new Transform({
46
- objectMode: true,
47
- transform(chunk, _, callback) {
48
- try {
49
- if (predicate(chunk)) {
50
- this.push(chunk);
51
- }
52
- callback();
53
- }
54
- catch (e) {
55
- callback(e);
56
- }
57
- }
58
- });
59
- }
60
- export function createAccumulator(accumulator) {
61
- return new Transform({
62
- objectMode: true,
63
- transform(chunk, _, callback) {
64
- accumulator(chunk);
65
- callback(null, chunk);
66
- }
67
- });
68
- }
69
- export function createAsyncWritable(write, concurrency = 16) {
70
- const queue = new PQueue({ concurrency });
71
- return new Writable({
72
- objectMode: true,
73
- write(chunk, _, callback) {
74
- queue.add(async () => write(chunk)).catch((error) => this.emit('error', error));
75
- callback();
76
- },
77
- final(callback) {
78
- void queue.onIdle().then(() => {
79
- callback();
80
- });
81
- }
82
- });
83
- }
84
- export function tee(...args) {
85
- const first = args[0];
86
- const streams = Array.isArray(first) ? first : args;
87
- const donePromises = streams.map(streamToPromise);
88
- return new Writable({
89
- objectMode: true,
90
- write(chunk, _, callback) {
91
- void Promise.all(streams.map((stream) => stream.write(chunk) || promisify((cb) => stream.once('drain', cb)) // If .write() => false buffer is full, start promise to wait for drain event
92
- )).then(() => {
93
- callback();
94
- });
95
- },
96
- final(callback) {
97
- for (const stream of streams) {
98
- stream.end();
99
- }
100
- void Promise.all(donePromises).then(() => {
101
- callback();
102
- });
103
- }
104
- });
105
- }
106
- export function readableFromArray(array, options) {
107
- return Readable.from(array, options);
108
- }
109
- export function collectStreamIntoArray(array = [], options = {}) {
110
- return new Writable({
111
- objectMode: true,
112
- writev(chunks, callback) {
113
- for (const { chunk } of chunks) {
114
- array.push(chunk);
115
- }
116
- callback(null);
117
- },
118
- ...options
119
- });
120
- }
121
- export async function readStreamToBuffer(stream) {
122
- const chunks = [];
123
- const writeStream = new PassThrough();
124
- writeStream.on('data', (chunk) => chunks.push(chunk));
125
- await pump(stream, writeStream);
126
- return Buffer.concat(chunks);
127
- }
128
- export function batch(size) {
129
- let buffer = [];
130
- return new Transform({
131
- objectMode: true,
132
- transform(obj, _, next) {
133
- buffer.push(obj);
134
- if (buffer.length === size) {
135
- this.push(buffer);
136
- buffer = [];
137
- }
138
- next();
139
- },
140
- flush(done) {
141
- if (buffer.length > 0) {
142
- this.push(buffer);
143
- }
144
- done();
145
- }
146
- });
147
- }