@types/node 18.16.2 → 20.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- node/README.md +1 -1
- node/assert.d.ts +68 -73
- node/async_hooks.d.ts +62 -42
- node/buffer.d.ts +123 -95
- node/child_process.d.ts +50 -54
- node/cluster.d.ts +12 -12
- node/console.d.ts +5 -5
- node/crypto.d.ts +209 -220
- node/dgram.d.ts +15 -15
- node/diagnostics_channel.d.ts +25 -26
- node/dns/promises.d.ts +6 -6
- node/dns.d.ts +16 -16
- node/domain.d.ts +3 -3
- node/events.d.ts +60 -60
- node/fs/promises.d.ts +74 -48
- node/fs.d.ts +91 -81
- node/http.d.ts +147 -144
- node/http2.d.ts +42 -46
- node/https.d.ts +52 -153
- node/index.d.ts +1 -1
- node/inspector.d.ts +10 -3
- node/module.d.ts +5 -4
- node/net.d.ts +21 -18
- node/os.d.ts +22 -18
- node/package.json +2 -2
- node/path.d.ts +4 -4
- node/perf_hooks.d.ts +28 -15
- node/process.d.ts +43 -46
- node/punycode.d.ts +1 -1
- node/querystring.d.ts +5 -5
- node/readline/promises.d.ts +6 -4
- node/readline.d.ts +15 -15
- node/repl.d.ts +9 -9
- node/stream/consumers.d.ts +1 -1
- node/stream.d.ts +74 -136
- node/string_decoder.d.ts +6 -6
- node/test.d.ts +0 -76
- node/timers/promises.d.ts +3 -3
- node/timers.d.ts +2 -2
- node/tls.d.ts +29 -15
- node/trace_events.d.ts +20 -9
- node/ts4.8/assert.d.ts +68 -73
- node/ts4.8/async_hooks.d.ts +59 -31
- node/ts4.8/buffer.d.ts +123 -95
- node/ts4.8/child_process.d.ts +50 -54
- node/ts4.8/cluster.d.ts +12 -12
- node/ts4.8/console.d.ts +5 -5
- node/ts4.8/crypto.d.ts +209 -220
- node/ts4.8/dgram.d.ts +15 -15
- node/ts4.8/diagnostics_channel.d.ts +25 -26
- node/ts4.8/dns/promises.d.ts +6 -6
- node/ts4.8/dns.d.ts +16 -16
- node/ts4.8/domain.d.ts +3 -3
- node/ts4.8/events.d.ts +60 -60
- node/ts4.8/fs/promises.d.ts +72 -45
- node/ts4.8/fs.d.ts +81 -67
- node/ts4.8/http.d.ts +133 -126
- node/ts4.8/http2.d.ts +42 -46
- node/ts4.8/https.d.ts +52 -153
- node/ts4.8/inspector.d.ts +10 -3
- node/ts4.8/module.d.ts +5 -4
- node/ts4.8/net.d.ts +21 -18
- node/ts4.8/os.d.ts +22 -18
- node/ts4.8/path.d.ts +4 -4
- node/ts4.8/perf_hooks.d.ts +28 -15
- node/ts4.8/process.d.ts +43 -46
- node/ts4.8/punycode.d.ts +1 -1
- node/ts4.8/querystring.d.ts +5 -5
- node/ts4.8/readline/promises.d.ts +6 -4
- node/ts4.8/readline.d.ts +15 -15
- node/ts4.8/repl.d.ts +9 -9
- node/ts4.8/stream/consumers.d.ts +1 -1
- node/ts4.8/stream.d.ts +77 -139
- node/ts4.8/string_decoder.d.ts +6 -6
- node/ts4.8/test.d.ts +0 -75
- node/ts4.8/timers/promises.d.ts +3 -3
- node/ts4.8/timers.d.ts +2 -2
- node/ts4.8/tls.d.ts +29 -15
- node/ts4.8/trace_events.d.ts +20 -9
- node/ts4.8/tty.d.ts +4 -5
- node/ts4.8/url.d.ts +26 -36
- node/ts4.8/util.d.ts +143 -116
- node/ts4.8/v8.d.ts +107 -16
- node/ts4.8/vm.d.ts +292 -42
- node/ts4.8/wasi.d.ts +8 -14
- node/ts4.8/worker_threads.d.ts +32 -34
- node/ts4.8/zlib.d.ts +11 -11
- node/tty.d.ts +4 -5
- node/url.d.ts +26 -36
- node/util.d.ts +146 -111
- node/v8.d.ts +110 -16
- node/vm.d.ts +292 -42
- node/wasi.d.ts +8 -14
- node/worker_threads.d.ts +32 -34
- node/zlib.d.ts +11 -11
node/repl.d.ts
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* The `repl` module provides a Read-Eval-Print-Loop (REPL) implementation
|
|
3
|
-
* is available both as a standalone program or includible in other
|
|
4
|
-
* It can be accessed using:
|
|
2
|
+
* The `node:repl` module provides a Read-Eval-Print-Loop (REPL) implementation
|
|
3
|
+
* that is available both as a standalone program or includible in other
|
|
4
|
+
* applications. It can be accessed using:
|
|
5
5
|
*
|
|
6
6
|
* ```js
|
|
7
|
-
* const repl = require('repl');
|
|
7
|
+
* const repl = require('node:repl');
|
|
8
8
|
* ```
|
|
9
|
-
* @see [source](https://github.com/nodejs/node/blob/
|
|
9
|
+
* @see [source](https://github.com/nodejs/node/blob/v20.0.0/lib/repl.js)
|
|
10
10
|
*/
|
|
11
11
|
declare module 'repl' {
|
|
12
12
|
import { Interface, Completer, AsyncCompleter } from 'node:readline';
|
|
@@ -124,7 +124,7 @@ declare module 'repl' {
|
|
|
124
124
|
* or directly using the JavaScript `new` keyword.
|
|
125
125
|
*
|
|
126
126
|
* ```js
|
|
127
|
-
* const repl = require('repl');
|
|
127
|
+
* const repl = require('node:repl');
|
|
128
128
|
*
|
|
129
129
|
* const options = { useColors: true };
|
|
130
130
|
*
|
|
@@ -251,7 +251,7 @@ declare module 'repl' {
|
|
|
251
251
|
* The following example shows two new commands added to the REPL instance:
|
|
252
252
|
*
|
|
253
253
|
* ```js
|
|
254
|
-
* const repl = require('repl');
|
|
254
|
+
* const repl = require('node:repl');
|
|
255
255
|
*
|
|
256
256
|
* const replServer = repl.start({ prompt: '> ' });
|
|
257
257
|
* replServer.defineCommand('sayhello', {
|
|
@@ -260,7 +260,7 @@ declare module 'repl' {
|
|
|
260
260
|
* this.clearBufferedCommand();
|
|
261
261
|
* console.log(`Hello, ${name}!`);
|
|
262
262
|
* this.displayPrompt();
|
|
263
|
-
* }
|
|
263
|
+
* },
|
|
264
264
|
* });
|
|
265
265
|
* replServer.defineCommand('saybye', function saybye() {
|
|
266
266
|
* console.log('Goodbye!');
|
|
@@ -401,7 +401,7 @@ declare module 'repl' {
|
|
|
401
401
|
* If `options` is a string, then it specifies the input prompt:
|
|
402
402
|
*
|
|
403
403
|
* ```js
|
|
404
|
-
* const repl = require('repl');
|
|
404
|
+
* const repl = require('node:repl');
|
|
405
405
|
*
|
|
406
406
|
* // a Unix style prompt
|
|
407
407
|
* repl.start('$ ');
|
node/stream/consumers.d.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
declare module 'stream/consumers' {
|
|
2
|
-
import { Blob as NodeBlob } from
|
|
2
|
+
import { Blob as NodeBlob } from 'node:buffer';
|
|
3
3
|
import { Readable } from 'node:stream';
|
|
4
4
|
function buffer(stream: NodeJS.ReadableStream | Readable | AsyncIterator<any>): Promise<Buffer>;
|
|
5
5
|
function text(stream: NodeJS.ReadableStream | Readable | AsyncIterator<any>): Promise<string>;
|
node/stream.d.ts
CHANGED
|
@@ -1,24 +1,24 @@
|
|
|
1
1
|
/**
|
|
2
2
|
* A stream is an abstract interface for working with streaming data in Node.js.
|
|
3
|
-
* The `stream` module provides an API for implementing the stream interface.
|
|
3
|
+
* The `node:stream` module provides an API for implementing the stream interface.
|
|
4
4
|
*
|
|
5
5
|
* There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances.
|
|
6
6
|
*
|
|
7
7
|
* Streams can be readable, writable, or both. All streams are instances of `EventEmitter`.
|
|
8
8
|
*
|
|
9
|
-
* To access the `stream` module:
|
|
9
|
+
* To access the `node:stream` module:
|
|
10
10
|
*
|
|
11
11
|
* ```js
|
|
12
|
-
* const stream = require('stream');
|
|
12
|
+
* const stream = require('node:stream');
|
|
13
13
|
* ```
|
|
14
14
|
*
|
|
15
|
-
* The `stream` module is useful for creating new types of stream instances.
|
|
16
|
-
* usually not necessary to use the `stream` module to consume streams.
|
|
17
|
-
* @see [source](https://github.com/nodejs/node/blob/
|
|
15
|
+
* The `node:stream` module is useful for creating new types of stream instances.
|
|
16
|
+
* It is usually not necessary to use the `node:stream` module to consume streams.
|
|
17
|
+
* @see [source](https://github.com/nodejs/node/blob/v20.0.0/lib/stream.js)
|
|
18
18
|
*/
|
|
19
19
|
declare module 'stream' {
|
|
20
20
|
import { EventEmitter, Abortable } from 'node:events';
|
|
21
|
-
import { Blob as NodeBlob } from
|
|
21
|
+
import { Blob as NodeBlob } from 'node:buffer';
|
|
22
22
|
import * as streamPromises from 'node:stream/promises';
|
|
23
23
|
import * as streamConsumers from 'node:stream/consumers';
|
|
24
24
|
import * as streamWeb from 'node:stream/web';
|
|
@@ -128,7 +128,7 @@ declare module 'stream' {
|
|
|
128
128
|
*/
|
|
129
129
|
destroyed: boolean;
|
|
130
130
|
/**
|
|
131
|
-
* Is true after 'close' has been emitted.
|
|
131
|
+
* Is `true` after `'close'` has been emitted.
|
|
132
132
|
* @since v18.0.0
|
|
133
133
|
*/
|
|
134
134
|
readonly closed: boolean;
|
|
@@ -310,7 +310,7 @@ declare module 'stream' {
|
|
|
310
310
|
* the method does nothing.
|
|
311
311
|
*
|
|
312
312
|
* ```js
|
|
313
|
-
* const fs = require('fs');
|
|
313
|
+
* const fs = require('node:fs');
|
|
314
314
|
* const readable = getReadableStreamSomehow();
|
|
315
315
|
* const writable = fs.createWriteStream('file.txt');
|
|
316
316
|
* // All the data from readable goes into 'file.txt',
|
|
@@ -348,7 +348,7 @@ declare module 'stream' {
|
|
|
348
348
|
* // Pull off a header delimited by \n\n.
|
|
349
349
|
* // Use unshift() if we get too much.
|
|
350
350
|
* // Call the callback with (error, header, stream).
|
|
351
|
-
* const { StringDecoder } = require('string_decoder');
|
|
351
|
+
* const { StringDecoder } = require('node:string_decoder');
|
|
352
352
|
* function parseHeader(stream, callback) {
|
|
353
353
|
* stream.on('error', callback);
|
|
354
354
|
* stream.on('readable', onReadable);
|
|
@@ -388,14 +388,14 @@ declare module 'stream' {
|
|
|
388
388
|
* however it is best to simply avoid calling `readable.unshift()` while in the
|
|
389
389
|
* process of performing a read.
|
|
390
390
|
* @since v0.9.11
|
|
391
|
-
* @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array
|
|
391
|
+
* @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array`, or `null`. For object mode
|
|
392
392
|
* streams, `chunk` may be any JavaScript value.
|
|
393
393
|
* @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`.
|
|
394
394
|
*/
|
|
395
395
|
unshift(chunk: any, encoding?: BufferEncoding): void;
|
|
396
396
|
/**
|
|
397
|
-
* Prior to Node.js 0.10, streams did not implement the entire `stream`
|
|
398
|
-
*
|
|
397
|
+
* Prior to Node.js 0.10, streams did not implement the entire `node:stream`module API as it is currently defined. (See `Compatibility` for more
|
|
398
|
+
* information.)
|
|
399
399
|
*
|
|
400
400
|
* When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable`
|
|
401
401
|
* stream that uses
|
|
@@ -407,7 +407,7 @@ declare module 'stream' {
|
|
|
407
407
|
*
|
|
408
408
|
* ```js
|
|
409
409
|
* const { OldReader } = require('./old-api-module.js');
|
|
410
|
-
* const { Readable } = require('stream');
|
|
410
|
+
* const { Readable } = require('node:stream');
|
|
411
411
|
* const oreader = new OldReader();
|
|
412
412
|
* const myReader = new Readable().wrap(oreader);
|
|
413
413
|
*
|
|
@@ -534,7 +534,7 @@ declare module 'stream' {
|
|
|
534
534
|
static toWeb(streamWritable: Writable): streamWeb.WritableStream;
|
|
535
535
|
/**
|
|
536
536
|
* Is `true` if it is safe to call `writable.write()`, which means
|
|
537
|
-
* the stream has not been destroyed, errored or ended.
|
|
537
|
+
* the stream has not been destroyed, errored, or ended.
|
|
538
538
|
* @since v11.4.0
|
|
539
539
|
*/
|
|
540
540
|
readonly writable: boolean;
|
|
@@ -578,7 +578,7 @@ declare module 'stream' {
|
|
|
578
578
|
*/
|
|
579
579
|
destroyed: boolean;
|
|
580
580
|
/**
|
|
581
|
-
* Is true after 'close' has been emitted.
|
|
581
|
+
* Is `true` after `'close'` has been emitted.
|
|
582
582
|
* @since v18.0.0
|
|
583
583
|
*/
|
|
584
584
|
readonly closed: boolean;
|
|
@@ -588,7 +588,7 @@ declare module 'stream' {
|
|
|
588
588
|
*/
|
|
589
589
|
readonly errored: Error | null;
|
|
590
590
|
/**
|
|
591
|
-
* Is `true` if the stream's buffer has been full and stream will emit 'drain'
|
|
591
|
+
* Is `true` if the stream's buffer has been full and stream will emit `'drain'`.
|
|
592
592
|
* @since v15.2.0, v14.17.0
|
|
593
593
|
*/
|
|
594
594
|
readonly writableNeedDrain: boolean;
|
|
@@ -678,7 +678,7 @@ declare module 'stream' {
|
|
|
678
678
|
*
|
|
679
679
|
* ```js
|
|
680
680
|
* // Write 'hello, ' and then end with 'world!'.
|
|
681
|
-
* const fs = require('fs');
|
|
681
|
+
* const fs = require('node:fs');
|
|
682
682
|
* const file = fs.createWriteStream('example.txt');
|
|
683
683
|
* file.write('hello, ');
|
|
684
684
|
* file.end('world!');
|
|
@@ -864,7 +864,7 @@ declare module 'stream' {
|
|
|
864
864
|
/**
|
|
865
865
|
* If `false` then the stream will automatically end the writable side when the
|
|
866
866
|
* readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
|
867
|
-
* which defaults to `
|
|
867
|
+
* which defaults to `true`.
|
|
868
868
|
*
|
|
869
869
|
* This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
|
870
870
|
* emitted.
|
|
@@ -1052,18 +1052,21 @@ declare module 'stream' {
|
|
|
1052
1052
|
*/
|
|
1053
1053
|
class PassThrough extends Transform {}
|
|
1054
1054
|
/**
|
|
1055
|
+
* A stream to attach a signal to.
|
|
1056
|
+
*
|
|
1055
1057
|
* Attaches an AbortSignal to a readable or writeable stream. This lets code
|
|
1056
1058
|
* control stream destruction using an `AbortController`.
|
|
1057
1059
|
*
|
|
1058
|
-
* Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream.
|
|
1060
|
+
* Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream, and `controller.error(new
|
|
1061
|
+
* AbortError())` for webstreams.
|
|
1059
1062
|
*
|
|
1060
1063
|
* ```js
|
|
1061
|
-
* const fs = require('fs');
|
|
1064
|
+
* const fs = require('node:fs');
|
|
1062
1065
|
*
|
|
1063
1066
|
* const controller = new AbortController();
|
|
1064
1067
|
* const read = addAbortSignal(
|
|
1065
1068
|
* controller.signal,
|
|
1066
|
-
* fs.createReadStream(('object.json'))
|
|
1069
|
+
* fs.createReadStream(('object.json')),
|
|
1067
1070
|
* );
|
|
1068
1071
|
* // Later, abort the operation closing the stream
|
|
1069
1072
|
* controller.abort();
|
|
@@ -1076,7 +1079,7 @@ declare module 'stream' {
|
|
|
1076
1079
|
* setTimeout(() => controller.abort(), 10_000); // set a timeout
|
|
1077
1080
|
* const stream = addAbortSignal(
|
|
1078
1081
|
* controller.signal,
|
|
1079
|
-
* fs.createReadStream(('object.json'))
|
|
1082
|
+
* fs.createReadStream(('object.json')),
|
|
1080
1083
|
* );
|
|
1081
1084
|
* (async () => {
|
|
1082
1085
|
* try {
|
|
@@ -1092,6 +1095,37 @@ declare module 'stream' {
|
|
|
1092
1095
|
* }
|
|
1093
1096
|
* })();
|
|
1094
1097
|
* ```
|
|
1098
|
+
*
|
|
1099
|
+
* Or using an `AbortSignal` with a ReadableStream:
|
|
1100
|
+
*
|
|
1101
|
+
* ```js
|
|
1102
|
+
* const controller = new AbortController();
|
|
1103
|
+
* const rs = new ReadableStream({
|
|
1104
|
+
* start(controller) {
|
|
1105
|
+
* controller.enqueue('hello');
|
|
1106
|
+
* controller.enqueue('world');
|
|
1107
|
+
* controller.close();
|
|
1108
|
+
* },
|
|
1109
|
+
* });
|
|
1110
|
+
*
|
|
1111
|
+
* addAbortSignal(controller.signal, rs);
|
|
1112
|
+
*
|
|
1113
|
+
* finished(rs, (err) => {
|
|
1114
|
+
* if (err) {
|
|
1115
|
+
* if (err.name === 'AbortError') {
|
|
1116
|
+
* // The operation was cancelled
|
|
1117
|
+
* }
|
|
1118
|
+
* }
|
|
1119
|
+
* });
|
|
1120
|
+
*
|
|
1121
|
+
* const reader = rs.getReader();
|
|
1122
|
+
*
|
|
1123
|
+
* reader.read().then(({ value, done }) => {
|
|
1124
|
+
* console.log(value); // hello
|
|
1125
|
+
* console.log(done); // false
|
|
1126
|
+
* controller.abort();
|
|
1127
|
+
* });
|
|
1128
|
+
* ```
|
|
1095
1129
|
* @since v15.4.0
|
|
1096
1130
|
* @param signal A signal representing possible cancellation
|
|
1097
1131
|
* @param stream a stream to attach a signal to
|
|
@@ -1103,11 +1137,14 @@ declare module 'stream' {
|
|
|
1103
1137
|
writable?: boolean | undefined;
|
|
1104
1138
|
}
|
|
1105
1139
|
/**
|
|
1140
|
+
* A readable and/or writable stream/webstream.
|
|
1141
|
+
*
|
|
1106
1142
|
* A function to get notified when a stream is no longer readable, writable
|
|
1107
1143
|
* or has experienced an error or a premature close event.
|
|
1108
1144
|
*
|
|
1109
1145
|
* ```js
|
|
1110
|
-
* const { finished } = require('stream');
|
|
1146
|
+
* const { finished } = require('node:stream');
|
|
1147
|
+
* const fs = require('node:fs');
|
|
1111
1148
|
*
|
|
1112
1149
|
* const rs = fs.createReadStream('archive.tar');
|
|
1113
1150
|
*
|
|
@@ -1125,21 +1162,7 @@ declare module 'stream' {
|
|
|
1125
1162
|
* Especially useful in error handling scenarios where a stream is destroyed
|
|
1126
1163
|
* prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`.
|
|
1127
1164
|
*
|
|
1128
|
-
* The `finished` API provides promise version
|
|
1129
|
-
*
|
|
1130
|
-
* ```js
|
|
1131
|
-
* const { finished } = require('stream/promises');
|
|
1132
|
-
*
|
|
1133
|
-
* const rs = fs.createReadStream('archive.tar');
|
|
1134
|
-
*
|
|
1135
|
-
* async function run() {
|
|
1136
|
-
* await finished(rs);
|
|
1137
|
-
* console.log('Stream is done reading.');
|
|
1138
|
-
* }
|
|
1139
|
-
*
|
|
1140
|
-
* run().catch(console.error);
|
|
1141
|
-
* rs.resume(); // Drain the stream.
|
|
1142
|
-
* ```
|
|
1165
|
+
* The `finished` API provides `promise version`.
|
|
1143
1166
|
*
|
|
1144
1167
|
* `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been
|
|
1145
1168
|
* invoked. The reason for this is so that unexpected `'error'` events (due to
|
|
@@ -1187,9 +1210,9 @@ declare module 'stream' {
|
|
|
1187
1210
|
* properly cleaning up and provide a callback when the pipeline is complete.
|
|
1188
1211
|
*
|
|
1189
1212
|
* ```js
|
|
1190
|
-
* const { pipeline } = require('stream');
|
|
1191
|
-
* const fs = require('fs');
|
|
1192
|
-
* const zlib = require('zlib');
|
|
1213
|
+
* const { pipeline } = require('node:stream');
|
|
1214
|
+
* const fs = require('node:fs');
|
|
1215
|
+
* const zlib = require('node:zlib');
|
|
1193
1216
|
*
|
|
1194
1217
|
* // Use the pipeline API to easily pipe a series of streams
|
|
1195
1218
|
* // together and get notified when the pipeline is fully done.
|
|
@@ -1206,95 +1229,11 @@ declare module 'stream' {
|
|
|
1206
1229
|
* } else {
|
|
1207
1230
|
* console.log('Pipeline succeeded.');
|
|
1208
1231
|
* }
|
|
1209
|
-
* }
|
|
1232
|
+
* },
|
|
1210
1233
|
* );
|
|
1211
1234
|
* ```
|
|
1212
1235
|
*
|
|
1213
|
-
* The `pipeline` API provides a promise version
|
|
1214
|
-
* receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with
|
|
1215
|
-
* an`AbortError`.
|
|
1216
|
-
*
|
|
1217
|
-
* ```js
|
|
1218
|
-
* const { pipeline } = require('stream/promises');
|
|
1219
|
-
*
|
|
1220
|
-
* async function run() {
|
|
1221
|
-
* await pipeline(
|
|
1222
|
-
* fs.createReadStream('archive.tar'),
|
|
1223
|
-
* zlib.createGzip(),
|
|
1224
|
-
* fs.createWriteStream('archive.tar.gz')
|
|
1225
|
-
* );
|
|
1226
|
-
* console.log('Pipeline succeeded.');
|
|
1227
|
-
* }
|
|
1228
|
-
*
|
|
1229
|
-
* run().catch(console.error);
|
|
1230
|
-
* ```
|
|
1231
|
-
*
|
|
1232
|
-
* To use an `AbortSignal`, pass it inside an options object,
|
|
1233
|
-
* as the last argument:
|
|
1234
|
-
*
|
|
1235
|
-
* ```js
|
|
1236
|
-
* const { pipeline } = require('stream/promises');
|
|
1237
|
-
*
|
|
1238
|
-
* async function run() {
|
|
1239
|
-
* const ac = new AbortController();
|
|
1240
|
-
* const signal = ac.signal;
|
|
1241
|
-
*
|
|
1242
|
-
* setTimeout(() => ac.abort(), 1);
|
|
1243
|
-
* await pipeline(
|
|
1244
|
-
* fs.createReadStream('archive.tar'),
|
|
1245
|
-
* zlib.createGzip(),
|
|
1246
|
-
* fs.createWriteStream('archive.tar.gz'),
|
|
1247
|
-
* { signal },
|
|
1248
|
-
* );
|
|
1249
|
-
* }
|
|
1250
|
-
*
|
|
1251
|
-
* run().catch(console.error); // AbortError
|
|
1252
|
-
* ```
|
|
1253
|
-
*
|
|
1254
|
-
* The `pipeline` API also supports async generators:
|
|
1255
|
-
*
|
|
1256
|
-
* ```js
|
|
1257
|
-
* const { pipeline } = require('stream/promises');
|
|
1258
|
-
* const fs = require('fs');
|
|
1259
|
-
*
|
|
1260
|
-
* async function run() {
|
|
1261
|
-
* await pipeline(
|
|
1262
|
-
* fs.createReadStream('lowercase.txt'),
|
|
1263
|
-
* async function* (source, { signal }) {
|
|
1264
|
-
* source.setEncoding('utf8'); // Work with strings rather than `Buffer`s.
|
|
1265
|
-
* for await (const chunk of source) {
|
|
1266
|
-
* yield await processChunk(chunk, { signal });
|
|
1267
|
-
* }
|
|
1268
|
-
* },
|
|
1269
|
-
* fs.createWriteStream('uppercase.txt')
|
|
1270
|
-
* );
|
|
1271
|
-
* console.log('Pipeline succeeded.');
|
|
1272
|
-
* }
|
|
1273
|
-
*
|
|
1274
|
-
* run().catch(console.error);
|
|
1275
|
-
* ```
|
|
1276
|
-
*
|
|
1277
|
-
* Remember to handle the `signal` argument passed into the async generator.
|
|
1278
|
-
* Especially in the case where the async generator is the source for the
|
|
1279
|
-
* pipeline (i.e. first argument) or the pipeline will never complete.
|
|
1280
|
-
*
|
|
1281
|
-
* ```js
|
|
1282
|
-
* const { pipeline } = require('stream/promises');
|
|
1283
|
-
* const fs = require('fs');
|
|
1284
|
-
*
|
|
1285
|
-
* async function run() {
|
|
1286
|
-
* await pipeline(
|
|
1287
|
-
* async function* ({ signal }) {
|
|
1288
|
-
* await someLongRunningfn({ signal });
|
|
1289
|
-
* yield 'asd';
|
|
1290
|
-
* },
|
|
1291
|
-
* fs.createWriteStream('uppercase.txt')
|
|
1292
|
-
* );
|
|
1293
|
-
* console.log('Pipeline succeeded.');
|
|
1294
|
-
* }
|
|
1295
|
-
*
|
|
1296
|
-
* run().catch(console.error);
|
|
1297
|
-
* ```
|
|
1236
|
+
* The `pipeline` API provides a `promise version`.
|
|
1298
1237
|
*
|
|
1299
1238
|
* `stream.pipeline()` will call `stream.destroy(err)` on all streams except:
|
|
1300
1239
|
*
|
|
@@ -1313,9 +1252,9 @@ declare module 'stream' {
|
|
|
1313
1252
|
* See the example below:
|
|
1314
1253
|
*
|
|
1315
1254
|
* ```js
|
|
1316
|
-
* const fs = require('fs');
|
|
1317
|
-
* const http = require('http');
|
|
1318
|
-
* const { pipeline } = require('stream');
|
|
1255
|
+
* const fs = require('node:fs');
|
|
1256
|
+
* const http = require('node:http');
|
|
1257
|
+
* const { pipeline } = require('node:stream');
|
|
1319
1258
|
*
|
|
1320
1259
|
* const server = http.createServer((req, res) => {
|
|
1321
1260
|
* const fileStream = fs.createReadStream('./fileNotExist.txt');
|
|
@@ -1416,19 +1355,18 @@ declare module 'stream' {
|
|
|
1416
1355
|
ref(): void;
|
|
1417
1356
|
unref(): void;
|
|
1418
1357
|
}
|
|
1419
|
-
|
|
1420
1358
|
/**
|
|
1421
1359
|
* Returns whether the stream has encountered an error.
|
|
1422
|
-
* @since v17.3.0
|
|
1360
|
+
* @since v17.3.0, v16.14.0
|
|
1361
|
+
* @experimental
|
|
1423
1362
|
*/
|
|
1424
1363
|
function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean;
|
|
1425
|
-
|
|
1426
1364
|
/**
|
|
1427
1365
|
* Returns whether the stream is readable.
|
|
1428
|
-
* @since v17.4.0
|
|
1366
|
+
* @since v17.4.0, v16.14.0
|
|
1367
|
+
* @experimental
|
|
1429
1368
|
*/
|
|
1430
1369
|
function isReadable(stream: Readable | NodeJS.ReadableStream): boolean;
|
|
1431
|
-
|
|
1432
1370
|
const promises: typeof streamPromises;
|
|
1433
1371
|
const consumers: typeof streamConsumers;
|
|
1434
1372
|
}
|
node/string_decoder.d.ts
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
/**
|
|
2
|
-
* The `string_decoder` module provides an API for decoding `Buffer` objects
|
|
3
|
-
* strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16
|
|
2
|
+
* The `node:string_decoder` module provides an API for decoding `Buffer` objects
|
|
3
|
+
* into strings in a manner that preserves encoded multi-byte UTF-8 and UTF-16
|
|
4
4
|
* characters. It can be accessed using:
|
|
5
5
|
*
|
|
6
6
|
* ```js
|
|
7
|
-
* const { StringDecoder } = require('string_decoder');
|
|
7
|
+
* const { StringDecoder } = require('node:string_decoder');
|
|
8
8
|
* ```
|
|
9
9
|
*
|
|
10
10
|
* The following example shows the basic use of the `StringDecoder` class.
|
|
11
11
|
*
|
|
12
12
|
* ```js
|
|
13
|
-
* const { StringDecoder } = require('string_decoder');
|
|
13
|
+
* const { StringDecoder } = require('node:string_decoder');
|
|
14
14
|
* const decoder = new StringDecoder('utf8');
|
|
15
15
|
*
|
|
16
16
|
* const cent = Buffer.from([0xC2, 0xA2]);
|
|
@@ -29,14 +29,14 @@
|
|
|
29
29
|
* symbol (`€`) are written over three separate operations:
|
|
30
30
|
*
|
|
31
31
|
* ```js
|
|
32
|
-
* const { StringDecoder } = require('string_decoder');
|
|
32
|
+
* const { StringDecoder } = require('node:string_decoder');
|
|
33
33
|
* const decoder = new StringDecoder('utf8');
|
|
34
34
|
*
|
|
35
35
|
* decoder.write(Buffer.from([0xE2]));
|
|
36
36
|
* decoder.write(Buffer.from([0x82]));
|
|
37
37
|
* console.log(decoder.end(Buffer.from([0xAC])));
|
|
38
38
|
* ```
|
|
39
|
-
* @see [source](https://github.com/nodejs/node/blob/
|
|
39
|
+
* @see [source](https://github.com/nodejs/node/blob/v20.0.0/lib/string_decoder.js)
|
|
40
40
|
*/
|
|
41
41
|
declare module 'string_decoder' {
|
|
42
42
|
class StringDecoder {
|