fsevents 1.0.8 → 1.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of fsevents might be problematic. Click here for more details.
- package/node_modules/ansi-styles/index.js +72 -6
- package/node_modules/ansi-styles/package.json +19 -12
- package/node_modules/ansi-styles/readme.md +36 -8
- package/node_modules/are-we-there-yet/CHANGES.md +19 -0
- package/node_modules/are-we-there-yet/README.md +12 -2
- package/node_modules/are-we-there-yet/index.js +4 -132
- package/node_modules/are-we-there-yet/package.json +18 -13
- package/node_modules/are-we-there-yet/test/lib/test-event.js +29 -0
- package/node_modules/are-we-there-yet/test/tracker.js +44 -43
- package/node_modules/are-we-there-yet/test/trackergroup.js +73 -64
- package/node_modules/are-we-there-yet/test/trackerstream.js +26 -40
- package/node_modules/are-we-there-yet/tracker-base.js +11 -0
- package/node_modules/are-we-there-yet/tracker-group.js +107 -0
- package/node_modules/are-we-there-yet/tracker-stream.js +35 -0
- package/node_modules/are-we-there-yet/tracker.js +30 -0
- package/node_modules/asn1/package.json +1 -2
- package/node_modules/assert-plus/package.json +1 -3
- package/node_modules/async/package.json +1 -1
- package/node_modules/aws4/.npmignore +2 -1
- package/node_modules/aws4/README.md +4 -0
- package/node_modules/aws4/aws4.js +8 -3
- package/node_modules/aws4/node_modules/lru-cache/.npmignore +3 -0
- package/node_modules/aws4/node_modules/lru-cache/.travis.yml +3 -4
- package/node_modules/aws4/node_modules/lru-cache/README.md +29 -24
- package/node_modules/aws4/node_modules/lru-cache/benchmarks/insertion-time.js +32 -0
- package/node_modules/aws4/node_modules/lru-cache/lib/lru-cache.js +333 -199
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/LICENSE +15 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/README.md +60 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/map.js +9 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/package.json +59 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/pseudomap.js +113 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/pseudomap/test/basic.js +86 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/.npmignore +4 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/.travis.yml +7 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/CONTRIBUTING.md +4 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/LICENSE +15 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/README.md +204 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/package.json +59 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/test/basic.js +188 -0
- package/node_modules/aws4/node_modules/lru-cache/node_modules/yallist/yallist.js +360 -0
- package/node_modules/aws4/node_modules/lru-cache/package.json +16 -12
- package/node_modules/aws4/node_modules/lru-cache/test/basic.js +270 -146
- package/node_modules/aws4/node_modules/lru-cache/test/foreach.js +39 -25
- package/node_modules/aws4/node_modules/lru-cache/test/inspect.js +54 -0
- package/node_modules/aws4/node_modules/lru-cache/test/no-symbol.js +3 -0
- package/node_modules/aws4/node_modules/lru-cache/test/serialize.js +115 -104
- package/node_modules/aws4/package.json +17 -14
- package/node_modules/bl/bl.js +9 -3
- package/node_modules/bl/package.json +12 -12
- package/node_modules/bl/test/test.js +14 -0
- package/node_modules/color-convert/README.md +62 -0
- package/node_modules/color-convert/conversions.js +594 -0
- package/node_modules/color-convert/css-keywords.js +151 -0
- package/node_modules/color-convert/index.js +75 -0
- package/node_modules/color-convert/package.json +92 -0
- package/node_modules/color-convert/route.js +98 -0
- package/node_modules/dashdash/lib/dashdash.js +1 -1
- package/node_modules/dashdash/node_modules/assert-plus/AUTHORS +6 -0
- package/node_modules/dashdash/node_modules/assert-plus/CHANGES.md +14 -0
- package/node_modules/dashdash/node_modules/assert-plus/README.md +162 -0
- package/node_modules/dashdash/node_modules/assert-plus/assert.js +211 -0
- package/node_modules/dashdash/node_modules/assert-plus/package.json +89 -0
- package/node_modules/dashdash/package.json +19 -12
- package/node_modules/ecc-jsbn/package.json +1 -2
- package/node_modules/escape-string-regexp/package.json +15 -11
- package/node_modules/extsprintf/package.json +2 -3
- package/node_modules/form-data/.dockerignore +7 -0
- package/node_modules/form-data/.editorconfig +10 -0
- package/node_modules/form-data/.eslintignore +1 -0
- package/node_modules/form-data/.eslintrc +64 -0
- package/node_modules/form-data/{Readme.md → README.md} +9 -1
- package/node_modules/form-data/lib/browser.js +2 -1
- package/node_modules/form-data/lib/form_data.js +172 -135
- package/node_modules/form-data/lib/populate.js +9 -0
- package/node_modules/form-data/package.json +35 -16
- package/node_modules/form-data/wercker.yml +36 -0
- package/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/index.js +1 -1
- package/node_modules/fstream-ignore/node_modules/minimatch/node_modules/brace-expansion/package.json +14 -10
- package/node_modules/gauge/README.md +1 -1
- package/node_modules/gauge/package.json +18 -14
- package/node_modules/gauge/progress-bar.js +2 -2
- package/node_modules/http-signature/package.json +1 -2
- package/node_modules/is-my-json-valid/README.md +2 -2
- package/node_modules/is-my-json-valid/index.js +15 -6
- package/node_modules/is-my-json-valid/package.json +11 -7
- package/node_modules/is-my-json-valid/test/json-schema-draft4/multipleOf.json +36 -0
- package/node_modules/is-my-json-valid/test/misc.js +18 -0
- package/node_modules/is-typedarray/package.json +1 -2
- package/node_modules/isarray/.npmignore +1 -0
- package/node_modules/isarray/.travis.yml +4 -0
- package/node_modules/isarray/Makefile +6 -0
- package/node_modules/isarray/README.md +6 -0
- package/node_modules/isarray/index.js +3 -1
- package/node_modules/isarray/package.json +28 -10
- package/node_modules/isarray/test.js +20 -0
- package/node_modules/jodid25519/package.json +1 -2
- package/node_modules/jsbn/package.json +1 -2
- package/node_modules/json-schema/package.json +1 -1
- package/node_modules/jsprim/package.json +1 -2
- package/node_modules/lodash.pad/LICENSE +17 -16
- package/node_modules/lodash.pad/README.md +2 -2
- package/node_modules/lodash.pad/index.js +3 -94
- package/node_modules/lodash.pad/package.json +12 -12
- package/node_modules/lodash.padend/LICENSE +23 -0
- package/node_modules/lodash.padend/README.md +18 -0
- package/node_modules/lodash.padend/index.js +285 -0
- package/node_modules/lodash.padend/package.json +91 -0
- package/node_modules/lodash.padstart/LICENSE +23 -0
- package/node_modules/lodash.padstart/README.md +18 -0
- package/node_modules/lodash.padstart/index.js +285 -0
- package/node_modules/lodash.padstart/package.json +91 -0
- package/node_modules/lodash.repeat/LICENSE +17 -16
- package/node_modules/lodash.repeat/README.md +2 -2
- package/node_modules/lodash.repeat/index.js +3 -94
- package/node_modules/lodash.repeat/package.json +14 -13
- package/node_modules/lodash.tostring/LICENSE +23 -0
- package/node_modules/lodash.tostring/README.md +18 -0
- package/node_modules/lodash.tostring/index.js +164 -0
- package/node_modules/lodash.tostring/package.json +91 -0
- package/node_modules/mime-db/HISTORY.md +14 -0
- package/node_modules/mime-db/db.json +26 -3
- package/node_modules/mime-db/package.json +23 -18
- package/node_modules/mime-types/HISTORY.md +8 -0
- package/node_modules/mime-types/README.md +1 -1
- package/node_modules/mime-types/package.json +21 -16
- package/node_modules/node-pre-gyp/CHANGELOG.md +11 -0
- package/node_modules/node-pre-gyp/lib/util/abi_crosswalk.json +44 -4
- package/node_modules/node-pre-gyp/package.json +16 -16
- package/node_modules/npmlog/.nyc_output/64996.json +1 -0
- package/node_modules/npmlog/.nyc_output/64998.json +1 -0
- package/node_modules/npmlog/.nyc_output/65000.json +1 -0
- package/node_modules/npmlog/log.js +3 -2
- package/node_modules/npmlog/package.json +15 -11
- package/node_modules/npmlog/test/progress.js +25 -8
- package/node_modules/once/package.json +1 -1
- package/node_modules/qs/package.json +1 -2
- package/node_modules/readable-stream/.travis.yml +18 -16
- package/node_modules/readable-stream/README.md +1 -1
- package/node_modules/readable-stream/doc/stream.markdown +435 -405
- package/node_modules/readable-stream/lib/_stream_duplex.js +13 -20
- package/node_modules/readable-stream/lib/_stream_passthrough.js +3 -4
- package/node_modules/readable-stream/lib/_stream_readable.js +110 -205
- package/node_modules/readable-stream/lib/_stream_transform.js +21 -38
- package/node_modules/readable-stream/lib/_stream_writable.js +108 -121
- package/node_modules/readable-stream/package.json +19 -15
- package/node_modules/request/package.json +1 -2
- package/node_modules/rimraf/node_modules/glob/README.md +6 -0
- package/node_modules/rimraf/node_modules/glob/common.js +12 -3
- package/node_modules/rimraf/node_modules/glob/glob.js +20 -3
- package/node_modules/rimraf/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/index.js +1 -1
- package/node_modules/rimraf/node_modules/glob/node_modules/minimatch/node_modules/brace-expansion/package.json +14 -10
- package/node_modules/rimraf/node_modules/glob/package.json +15 -11
- package/node_modules/rimraf/node_modules/glob/sync.js +8 -1
- package/node_modules/rimraf/package.json +14 -10
- package/node_modules/semver/package.json +1 -2
- package/node_modules/sshpk/lib/formats/rfc4253.js +2 -1
- package/node_modules/sshpk/package.json +13 -10
- package/node_modules/strip-ansi/package.json +21 -16
- package/node_modules/strip-ansi/readme.md +5 -5
- package/node_modules/tough-cookie/lib/cookie.js +2 -2
- package/node_modules/tough-cookie/package.json +13 -9
- package/node_modules/tweetnacl/CHANGELOG.md +53 -0
- package/node_modules/tweetnacl/COPYING.txt +9 -0
- package/node_modules/tweetnacl/README.md +14 -40
- package/node_modules/tweetnacl/nacl-fast.js +21 -51
- package/node_modules/tweetnacl/nacl-fast.min.js +2 -2
- package/node_modules/tweetnacl/nacl.js +21 -51
- package/node_modules/tweetnacl/nacl.min.js +1 -1
- package/node_modules/tweetnacl/package.json +28 -38
- package/node_modules/verror/package.json +2 -3
- package/package.json +2 -2
- package/node_modules/aws4/example.js +0 -372
- package/node_modules/aws4/node_modules/lru-cache/test/memory-leak.js +0 -51
- package/node_modules/isarray/build/build.js +0 -209
- package/node_modules/lodash._basetostring/LICENSE +0 -22
- package/node_modules/lodash._basetostring/README.md +0 -20
- package/node_modules/lodash._basetostring/index.js +0 -22
- package/node_modules/lodash._basetostring/package.json +0 -99
- package/node_modules/lodash._createpadding/LICENSE +0 -22
- package/node_modules/lodash._createpadding/README.md +0 -20
- package/node_modules/lodash._createpadding/index.js +0 -37
- package/node_modules/lodash._createpadding/package.json +0 -101
- package/node_modules/lodash._root/LICENSE +0 -22
- package/node_modules/lodash._root/README.md +0 -18
- package/node_modules/lodash._root/index.js +0 -53
- package/node_modules/lodash._root/package.json +0 -77
- package/node_modules/lodash.padleft/LICENSE.txt +0 -22
- package/node_modules/lodash.padleft/README.md +0 -20
- package/node_modules/lodash.padleft/index.js +0 -50
- package/node_modules/lodash.padleft/package.json +0 -107
- package/node_modules/lodash.padright/LICENSE.txt +0 -22
- package/node_modules/lodash.padright/README.md +0 -20
- package/node_modules/lodash.padright/index.js +0 -50
- package/node_modules/lodash.padright/package.json +0 -107
@@ -3,27 +3,25 @@
|
|
3
3
|
Stability: 2 - Stable
|
4
4
|
|
5
5
|
A stream is an abstract interface implemented by various objects in
|
6
|
-
Node.js.
|
7
|
-
[stdout][]. Streams are readable, writable, or both. All
|
8
|
-
instances of [EventEmitter][]
|
6
|
+
Node.js. For example a [request to an HTTP server][http-incoming-message] is a
|
7
|
+
stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All
|
8
|
+
streams are instances of [`EventEmitter`][].
|
9
9
|
|
10
10
|
You can load the Stream base classes by doing `require('stream')`.
|
11
11
|
There are base classes provided for [Readable][] streams, [Writable][]
|
12
12
|
streams, [Duplex][] streams, and [Transform][] streams.
|
13
13
|
|
14
|
-
This document is split up into 3 sections
|
15
|
-
parts of the API that you need to be aware of to use streams in your
|
16
|
-
programs. If you never implement a streaming API yourself, you can
|
17
|
-
stop there.
|
14
|
+
This document is split up into 3 sections:
|
18
15
|
|
19
|
-
The
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
16
|
+
1. The first section explains the parts of the API that you need to be
|
17
|
+
aware of to use streams in your programs.
|
18
|
+
2. The second section explains the parts of the API that you need to
|
19
|
+
use if you implement your own custom streams yourself. The API is designed to
|
20
|
+
make this easy for you to do.
|
21
|
+
3. The third section goes into more depth about how streams work,
|
22
|
+
including some of the internal mechanisms and functions that you
|
23
|
+
should probably not modify unless you definitely know what you are
|
24
|
+
doing.
|
27
25
|
|
28
26
|
|
29
27
|
## API for Stream Consumers
|
@@ -37,22 +35,22 @@ and properties depending on whether they are Readable, Writable, or
|
|
37
35
|
Duplex.
|
38
36
|
|
39
37
|
If a stream is both Readable and Writable, then it implements all of
|
40
|
-
the methods and events
|
38
|
+
the methods and events. So, a [Duplex][] or [Transform][] stream is
|
41
39
|
fully described by this API, though their implementation may be
|
42
40
|
somewhat different.
|
43
41
|
|
44
42
|
It is not necessary to implement Stream interfaces in order to consume
|
45
|
-
streams in your programs.
|
43
|
+
streams in your programs. If you **are** implementing streaming
|
46
44
|
interfaces in your own program, please also refer to
|
47
|
-
[API for Stream Implementors][]
|
45
|
+
[API for Stream Implementors][].
|
48
46
|
|
49
47
|
Almost all Node.js programs, no matter how simple, use Streams in some
|
50
48
|
way. Here is an example of using Streams in an Node.js program:
|
51
49
|
|
52
|
-
```
|
53
|
-
|
50
|
+
```js
|
51
|
+
const http = require('http');
|
54
52
|
|
55
|
-
var server = http.createServer(
|
53
|
+
var server = http.createServer( (req, res) => {
|
56
54
|
// req is an http.IncomingMessage, which is a Readable Stream
|
57
55
|
// res is an http.ServerResponse, which is a Writable Stream
|
58
56
|
|
@@ -62,18 +60,18 @@ var server = http.createServer(function (req, res) {
|
|
62
60
|
req.setEncoding('utf8');
|
63
61
|
|
64
62
|
// Readable streams emit 'data' events once a listener is added
|
65
|
-
req.on('data',
|
63
|
+
req.on('data', (chunk) => {
|
66
64
|
body += chunk;
|
67
65
|
});
|
68
66
|
|
69
67
|
// the end event tells you that you have entire body
|
70
|
-
req.on('end',
|
68
|
+
req.on('end', () => {
|
71
69
|
try {
|
72
70
|
var data = JSON.parse(body);
|
73
71
|
} catch (er) {
|
74
72
|
// uh oh! bad json!
|
75
73
|
res.statusCode = 400;
|
76
|
-
return res.end(
|
74
|
+
return res.end(`error: ${er.message}`);
|
77
75
|
}
|
78
76
|
|
79
77
|
// write back something interesting to the user:
|
@@ -95,65 +93,66 @@ server.listen(1337);
|
|
95
93
|
### Class: stream.Duplex
|
96
94
|
|
97
95
|
Duplex streams are streams that implement both the [Readable][] and
|
98
|
-
[Writable][] interfaces.
|
96
|
+
[Writable][] interfaces.
|
99
97
|
|
100
98
|
Examples of Duplex streams include:
|
101
99
|
|
102
|
-
* [
|
103
|
-
* [zlib streams][]
|
104
|
-
* [crypto streams][]
|
100
|
+
* [TCP sockets][]
|
101
|
+
* [zlib streams][zlib]
|
102
|
+
* [crypto streams][crypto]
|
105
103
|
|
106
104
|
### Class: stream.Readable
|
107
105
|
|
108
106
|
<!--type=class-->
|
109
107
|
|
110
108
|
The Readable stream interface is the abstraction for a *source* of
|
111
|
-
data that you are reading from.
|
109
|
+
data that you are reading from. In other words, data comes *out* of a
|
112
110
|
Readable stream.
|
113
111
|
|
114
112
|
A Readable stream will not start emitting data until you indicate that
|
115
113
|
you are ready to receive it.
|
116
114
|
|
117
115
|
Readable streams have two "modes": a **flowing mode** and a **paused
|
118
|
-
mode**.
|
119
|
-
and provided to your program as fast as possible.
|
120
|
-
must explicitly call `stream.read()` to get chunks of data out.
|
116
|
+
mode**. When in flowing mode, data is read from the underlying system
|
117
|
+
and provided to your program as fast as possible. In paused mode, you
|
118
|
+
must explicitly call [`stream.read()`][stream-read] to get chunks of data out.
|
121
119
|
Streams start out in paused mode.
|
122
120
|
|
123
121
|
**Note**: If no data event handlers are attached, and there are no
|
124
|
-
[`pipe()`][] destinations, and the stream is switched into flowing
|
122
|
+
[`stream.pipe()`][] destinations, and the stream is switched into flowing
|
125
123
|
mode, then data will be lost.
|
126
124
|
|
127
125
|
You can switch to flowing mode by doing any of the following:
|
128
126
|
|
129
|
-
* Adding a [`'data'`
|
130
|
-
* Calling the [`resume()`][] method to explicitly open the
|
131
|
-
|
127
|
+
* Adding a [`'data'`][] event handler to listen for data.
|
128
|
+
* Calling the [`stream.resume()`][stream-resume] method to explicitly open the
|
129
|
+
flow.
|
130
|
+
* Calling the [`stream.pipe()`][] method to send the data to a [Writable][].
|
132
131
|
|
133
132
|
You can switch back to paused mode by doing either of the following:
|
134
133
|
|
135
|
-
* If there are no pipe destinations, by calling the
|
136
|
-
method.
|
137
|
-
* If there are pipe destinations, by removing any [`'data'`
|
134
|
+
* If there are no pipe destinations, by calling the
|
135
|
+
[`stream.pause()`][stream-pause] method.
|
136
|
+
* If there are pipe destinations, by removing any [`'data'`][] event
|
138
137
|
handlers, and removing all pipe destinations by calling the
|
139
|
-
[`unpipe()`][] method.
|
138
|
+
[`stream.unpipe()`][] method.
|
140
139
|
|
141
|
-
Note that, for backwards compatibility reasons, removing `'data'`
|
142
|
-
event handlers will **not** automatically pause the stream.
|
143
|
-
there are piped destinations, then calling `pause()` will
|
144
|
-
guarantee that the stream will *remain* paused once those
|
140
|
+
Note that, for backwards compatibility reasons, removing [`'data'`][]
|
141
|
+
event handlers will **not** automatically pause the stream. Also, if
|
142
|
+
there are piped destinations, then calling [`stream.pause()`][stream-pause] will
|
143
|
+
not guarantee that the stream will *remain* paused once those
|
145
144
|
destinations drain and ask for more data.
|
146
145
|
|
147
146
|
Examples of readable streams include:
|
148
147
|
|
149
|
-
* [
|
150
|
-
* [
|
148
|
+
* [HTTP responses, on the client][http-incoming-message]
|
149
|
+
* [HTTP requests, on the server][http-incoming-message]
|
151
150
|
* [fs read streams][]
|
152
|
-
* [zlib streams][]
|
153
|
-
* [crypto streams][]
|
154
|
-
* [
|
151
|
+
* [zlib streams][zlib]
|
152
|
+
* [crypto streams][crypto]
|
153
|
+
* [TCP sockets][]
|
155
154
|
* [child process stdout and stderr][]
|
156
|
-
* [process.stdin][]
|
155
|
+
* [`process.stdin`][]
|
157
156
|
|
158
157
|
#### Event: 'close'
|
159
158
|
|
@@ -161,22 +160,22 @@ Emitted when the stream and any of its underlying resources (a file
|
|
161
160
|
descriptor, for example) have been closed. The event indicates that
|
162
161
|
no more events will be emitted, and no further computation will occur.
|
163
162
|
|
164
|
-
Not all streams will emit the 'close' event.
|
163
|
+
Not all streams will emit the `'close'` event.
|
165
164
|
|
166
165
|
#### Event: 'data'
|
167
166
|
|
168
|
-
* `chunk` {Buffer
|
167
|
+
* `chunk` {Buffer|String} The chunk of data.
|
169
168
|
|
170
|
-
Attaching a `data` event listener to a stream that has not been
|
169
|
+
Attaching a `'data'` event listener to a stream that has not been
|
171
170
|
explicitly paused will switch the stream into flowing mode. Data will
|
172
171
|
then be passed as soon as it is available.
|
173
172
|
|
174
173
|
If you just want to get all the data out of the stream as fast as
|
175
174
|
possible, this is the best way to do so.
|
176
175
|
|
177
|
-
```
|
176
|
+
```js
|
178
177
|
var readable = getReadableStreamSomehow();
|
179
|
-
readable.on('data',
|
178
|
+
readable.on('data', (chunk) => {
|
180
179
|
console.log('got %d bytes of data', chunk.length);
|
181
180
|
});
|
182
181
|
```
|
@@ -185,16 +184,17 @@ readable.on('data', function(chunk) {
|
|
185
184
|
|
186
185
|
This event fires when there will be no more data to read.
|
187
186
|
|
188
|
-
Note that the `end` event **will not fire** unless the data is
|
189
|
-
completely consumed.
|
190
|
-
or by calling `read()` repeatedly until you get to the
|
187
|
+
Note that the `'end'` event **will not fire** unless the data is
|
188
|
+
completely consumed. This can be done by switching into flowing mode,
|
189
|
+
or by calling [`stream.read()`][stream-read] repeatedly until you get to the
|
190
|
+
end.
|
191
191
|
|
192
|
-
```
|
192
|
+
```js
|
193
193
|
var readable = getReadableStreamSomehow();
|
194
|
-
readable.on('data',
|
194
|
+
readable.on('data', (chunk) => {
|
195
195
|
console.log('got %d bytes of data', chunk.length);
|
196
196
|
});
|
197
|
-
readable.on('end',
|
197
|
+
readable.on('end', () => {
|
198
198
|
console.log('there will be no more data.');
|
199
199
|
});
|
200
200
|
```
|
@@ -216,30 +216,30 @@ hadn't already.
|
|
216
216
|
|
217
217
|
```javascript
|
218
218
|
var readable = getReadableStreamSomehow();
|
219
|
-
readable.on('readable',
|
219
|
+
readable.on('readable', () => {
|
220
220
|
// there is some data to read now
|
221
221
|
});
|
222
222
|
```
|
223
223
|
|
224
|
-
Once the internal buffer is drained, a `readable` event will fire
|
224
|
+
Once the internal buffer is drained, a `'readable'` event will fire
|
225
225
|
again when more data is available.
|
226
226
|
|
227
|
-
The `readable` event is not emitted in the "flowing" mode with the
|
227
|
+
The `'readable'` event is not emitted in the "flowing" mode with the
|
228
228
|
sole exception of the last one, on end-of-stream.
|
229
229
|
|
230
|
-
The 'readable' event indicates that the stream has new information:
|
230
|
+
The `'readable'` event indicates that the stream has new information:
|
231
231
|
either new data is available or the end of the stream has been reached.
|
232
|
-
In the former case,
|
233
|
-
|
234
|
-
is an empty file:
|
232
|
+
In the former case, [`stream.read()`][stream-read] will return that data. In the
|
233
|
+
latter case, [`stream.read()`][stream-read] will return null. For instance, in
|
234
|
+
the following example, `foo.txt` is an empty file:
|
235
235
|
|
236
|
-
```
|
237
|
-
|
236
|
+
```js
|
237
|
+
const fs = require('fs');
|
238
238
|
var rr = fs.createReadStream('foo.txt');
|
239
|
-
rr.on('readable',
|
239
|
+
rr.on('readable', () => {
|
240
240
|
console.log('readable:', rr.read());
|
241
241
|
});
|
242
|
-
rr.on('end',
|
242
|
+
rr.on('end', () => {
|
243
243
|
console.log('end');
|
244
244
|
});
|
245
245
|
```
|
@@ -247,20 +247,20 @@ rr.on('end', function() {
|
|
247
247
|
The output of running this script is:
|
248
248
|
|
249
249
|
```
|
250
|
-
|
250
|
+
$ node test.js
|
251
251
|
readable: null
|
252
252
|
end
|
253
253
|
```
|
254
254
|
|
255
255
|
#### readable.isPaused()
|
256
256
|
|
257
|
-
* Return:
|
257
|
+
* Return: {Boolean}
|
258
258
|
|
259
259
|
This method returns whether or not the `readable` has been **explicitly**
|
260
|
-
paused by client code (using `
|
261
|
-
`
|
260
|
+
paused by client code (using [`stream.pause()`][stream-pause] without a
|
261
|
+
corresponding [`stream.resume()`][stream-resume]).
|
262
262
|
|
263
|
-
```
|
263
|
+
```js
|
264
264
|
var readable = new stream.Readable
|
265
265
|
|
266
266
|
readable.isPaused() // === false
|
@@ -275,16 +275,16 @@ readable.isPaused() // === false
|
|
275
275
|
* Return: `this`
|
276
276
|
|
277
277
|
This method will cause a stream in flowing mode to stop emitting
|
278
|
-
`data` events, switching out of flowing mode.
|
278
|
+
[`'data'`][] events, switching out of flowing mode. Any data that becomes
|
279
279
|
available will remain in the internal buffer.
|
280
280
|
|
281
|
-
```
|
281
|
+
```js
|
282
282
|
var readable = getReadableStreamSomehow();
|
283
|
-
readable.on('data',
|
283
|
+
readable.on('data', (chunk) => {
|
284
284
|
console.log('got %d bytes of data', chunk.length);
|
285
285
|
readable.pause();
|
286
286
|
console.log('there will be no more data for 1 second');
|
287
|
-
setTimeout(
|
287
|
+
setTimeout(() => {
|
288
288
|
console.log('now data will start flowing again');
|
289
289
|
readable.resume();
|
290
290
|
}, 1000);
|
@@ -293,7 +293,7 @@ readable.on('data', function(chunk) {
|
|
293
293
|
|
294
294
|
#### readable.pipe(destination[, options])
|
295
295
|
|
296
|
-
* `destination` {
|
296
|
+
* `destination` {stream.Writable} The destination for writing data
|
297
297
|
* `options` {Object} Pipe options
|
298
298
|
* `end` {Boolean} End the writer when the reader ends. Default = `true`
|
299
299
|
|
@@ -303,7 +303,7 @@ the destination is not overwhelmed by a fast readable stream.
|
|
303
303
|
|
304
304
|
Multiple destinations can be piped to safely.
|
305
305
|
|
306
|
-
```
|
306
|
+
```js
|
307
307
|
var readable = getReadableStreamSomehow();
|
308
308
|
var writable = fs.createWriteStream('file.txt');
|
309
309
|
// All the data from readable goes into 'file.txt'
|
@@ -313,7 +313,7 @@ readable.pipe(writable);
|
|
313
313
|
This function returns the destination stream, so you can set up pipe
|
314
314
|
chains like so:
|
315
315
|
|
316
|
-
```
|
316
|
+
```js
|
317
317
|
var r = fs.createReadStream('file.txt');
|
318
318
|
var z = zlib.createGzip();
|
319
319
|
var w = fs.createWriteStream('file.txt.gz');
|
@@ -322,51 +322,51 @@ r.pipe(z).pipe(w);
|
|
322
322
|
|
323
323
|
For example, emulating the Unix `cat` command:
|
324
324
|
|
325
|
-
```
|
325
|
+
```js
|
326
326
|
process.stdin.pipe(process.stdout);
|
327
327
|
```
|
328
328
|
|
329
|
-
By default [`end()`][] is called on the destination when the
|
330
|
-
emits `end
|
331
|
-
false }` as `options` to keep the destination stream open.
|
329
|
+
By default [`stream.end()`][stream-end] is called on the destination when the
|
330
|
+
source stream emits [`'end'`][], so that `destination` is no longer writable.
|
331
|
+
Pass `{ end: false }` as `options` to keep the destination stream open.
|
332
332
|
|
333
333
|
This keeps `writer` open so that "Goodbye" can be written at the
|
334
334
|
end.
|
335
335
|
|
336
|
-
```
|
336
|
+
```js
|
337
337
|
reader.pipe(writer, { end: false });
|
338
|
-
reader.on('end',
|
338
|
+
reader.on('end', () => {
|
339
339
|
writer.end('Goodbye\n');
|
340
340
|
});
|
341
341
|
```
|
342
342
|
|
343
|
-
Note that `process.stderr` and `process.stdout` are never closed until
|
343
|
+
Note that [`process.stderr`][] and [`process.stdout`][] are never closed until
|
344
344
|
the process exits, regardless of the specified options.
|
345
345
|
|
346
346
|
#### readable.read([size])
|
347
347
|
|
348
348
|
* `size` {Number} Optional argument to specify how much data to read.
|
349
|
-
* Return {String
|
349
|
+
* Return {String|Buffer|Null}
|
350
350
|
|
351
351
|
The `read()` method pulls some data out of the internal buffer and
|
352
|
-
returns it.
|
352
|
+
returns it. If there is no data available, then it will return
|
353
353
|
`null`.
|
354
354
|
|
355
355
|
If you pass in a `size` argument, then it will return that many
|
356
|
-
bytes.
|
356
|
+
bytes. If `size` bytes are not available, then it will return `null`,
|
357
357
|
unless we've ended, in which case it will return the data remaining
|
358
358
|
in the buffer.
|
359
359
|
|
360
360
|
If you do not specify a `size` argument, then it will return all the
|
361
361
|
data in the internal buffer.
|
362
362
|
|
363
|
-
This method should only be called in paused mode.
|
363
|
+
This method should only be called in paused mode. In flowing mode,
|
364
364
|
this method is called automatically until the internal buffer is
|
365
365
|
drained.
|
366
366
|
|
367
|
-
```
|
367
|
+
```js
|
368
368
|
var readable = getReadableStreamSomehow();
|
369
|
-
readable.on('readable',
|
369
|
+
readable.on('readable', () => {
|
370
370
|
var chunk;
|
371
371
|
while (null !== (chunk = readable.read())) {
|
372
372
|
console.log('got %d bytes of data', chunk.length);
|
@@ -375,27 +375,27 @@ readable.on('readable', function() {
|
|
375
375
|
```
|
376
376
|
|
377
377
|
If this method returns a data chunk, then it will also trigger the
|
378
|
-
emission of a [`'data'`
|
378
|
+
emission of a [`'data'`][] event.
|
379
379
|
|
380
|
-
Note that calling `
|
381
|
-
triggered will return `null`. No runtime error will be raised.
|
380
|
+
Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][]
|
381
|
+
event has been triggered will return `null`. No runtime error will be raised.
|
382
382
|
|
383
383
|
#### readable.resume()
|
384
384
|
|
385
385
|
* Return: `this`
|
386
386
|
|
387
|
-
This method will cause the readable stream to resume emitting `data`
|
387
|
+
This method will cause the readable stream to resume emitting [`'data'`][]
|
388
388
|
events.
|
389
389
|
|
390
|
-
This method will switch the stream into flowing mode.
|
390
|
+
This method will switch the stream into flowing mode. If you do *not*
|
391
391
|
want to consume the data from a stream, but you *do* want to get to
|
392
|
-
its `end` event, you can call [`
|
393
|
-
data.
|
392
|
+
its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open
|
393
|
+
the flow of data.
|
394
394
|
|
395
|
-
```
|
395
|
+
```js
|
396
396
|
var readable = getReadableStreamSomehow();
|
397
397
|
readable.resume();
|
398
|
-
readable.on('end',
|
398
|
+
readable.on('end', () => {
|
399
399
|
console.log('got to the end, but did not read anything');
|
400
400
|
});
|
401
401
|
```
|
@@ -405,22 +405,25 @@ readable.on('end', function() {
|
|
405
405
|
* `encoding` {String} The encoding to use.
|
406
406
|
* Return: `this`
|
407
407
|
|
408
|
-
Call this function to cause the stream to return strings of the
|
409
|
-
|
410
|
-
`readable.setEncoding('utf8')`, then the output data will be
|
411
|
-
|
412
|
-
|
413
|
-
hexadecimal string format.
|
408
|
+
Call this function to cause the stream to return strings of the specified
|
409
|
+
encoding instead of Buffer objects. For example, if you do
|
410
|
+
`readable.setEncoding('utf8')`, then the output data will be interpreted as
|
411
|
+
UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`,
|
412
|
+
then the data will be encoded in hexadecimal string format.
|
414
413
|
|
415
414
|
This properly handles multi-byte characters that would otherwise be
|
416
415
|
potentially mangled if you simply pulled the Buffers directly and
|
417
|
-
called `buf.toString(encoding)` on them.
|
416
|
+
called [`buf.toString(encoding)`][] on them. If you want to read the data
|
418
417
|
as strings, always use this method.
|
419
418
|
|
420
|
-
|
419
|
+
Also you can disable any encoding at all with `readable.setEncoding(null)`.
|
420
|
+
This approach is very useful if you deal with binary data or with large
|
421
|
+
multi-byte strings spread out over multiple chunks.
|
422
|
+
|
423
|
+
```js
|
421
424
|
var readable = getReadableStreamSomehow();
|
422
425
|
readable.setEncoding('utf8');
|
423
|
-
readable.on('data',
|
426
|
+
readable.on('data', (chunk) => {
|
424
427
|
assert.equal(typeof chunk, 'string');
|
425
428
|
console.log('got %d characters of string data', chunk.length);
|
426
429
|
});
|
@@ -428,22 +431,23 @@ readable.on('data', function(chunk) {
|
|
428
431
|
|
429
432
|
#### readable.unpipe([destination])
|
430
433
|
|
431
|
-
* `destination` {
|
434
|
+
* `destination` {stream.Writable} Optional specific stream to unpipe
|
432
435
|
|
433
|
-
This method will remove the hooks set up for a previous `pipe()`
|
436
|
+
This method will remove the hooks set up for a previous [`stream.pipe()`][]
|
437
|
+
call.
|
434
438
|
|
435
439
|
If the destination is not specified, then all pipes are removed.
|
436
440
|
|
437
441
|
If the destination is specified, but no pipe is set up for it, then
|
438
442
|
this is a no-op.
|
439
443
|
|
440
|
-
```
|
444
|
+
```js
|
441
445
|
var readable = getReadableStreamSomehow();
|
442
446
|
var writable = fs.createWriteStream('file.txt');
|
443
447
|
// All the data from readable goes into 'file.txt',
|
444
448
|
// but only for the first second
|
445
449
|
readable.pipe(writable);
|
446
|
-
setTimeout(
|
450
|
+
setTimeout(() => {
|
447
451
|
console.log('stop writing to file.txt');
|
448
452
|
readable.unpipe(writable);
|
449
453
|
console.log('manually close the file stream');
|
@@ -453,25 +457,25 @@ setTimeout(function() {
|
|
453
457
|
|
454
458
|
#### readable.unshift(chunk)
|
455
459
|
|
456
|
-
* `chunk` {Buffer
|
460
|
+
* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue
|
457
461
|
|
458
462
|
This is useful in certain cases where a stream is being consumed by a
|
459
463
|
parser, which needs to "un-consume" some data that it has
|
460
464
|
optimistically pulled out of the source, so that the stream can be
|
461
465
|
passed on to some other party.
|
462
466
|
|
463
|
-
Note that `stream.unshift(chunk)` cannot be called after the `end` event
|
467
|
+
Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event
|
464
468
|
has been triggered; a runtime error will be raised.
|
465
469
|
|
466
470
|
If you find that you must often call `stream.unshift(chunk)` in your
|
467
|
-
programs, consider implementing a [Transform][] stream instead.
|
468
|
-
for Stream Implementors
|
471
|
+
programs, consider implementing a [Transform][] stream instead. (See [API
|
472
|
+
for Stream Implementors][].)
|
469
473
|
|
470
|
-
```
|
474
|
+
```js
|
471
475
|
// Pull off a header delimited by \n\n
|
472
476
|
// use unshift() if we get too much
|
473
477
|
// Call the callback with (error, header, stream)
|
474
|
-
|
478
|
+
const StringDecoder = require('string_decoder').StringDecoder;
|
475
479
|
function parseHeader(stream, callback) {
|
476
480
|
stream.on('error', callback);
|
477
481
|
stream.on('readable', onReadable);
|
@@ -501,39 +505,41 @@ function parseHeader(stream, callback) {
|
|
501
505
|
}
|
502
506
|
}
|
503
507
|
```
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
`
|
508
|
+
|
509
|
+
Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)`
|
510
|
+
will not end the reading process by resetting the internal reading state of the
|
511
|
+
stream. This can cause unexpected results if `unshift()` is called during a
|
512
|
+
read (i.e. from within a [`stream._read()`][stream-_read] implementation on a
|
513
|
+
custom stream). Following the call to `unshift()` with an immediate
|
514
|
+
[`stream.push('')`][stream-push] will reset the reading state appropriately,
|
515
|
+
however it is best to simply avoid calling `unshift()` while in the process of
|
516
|
+
performing a read.
|
511
517
|
|
512
518
|
#### readable.wrap(stream)
|
513
519
|
|
514
520
|
* `stream` {Stream} An "old style" readable stream
|
515
521
|
|
516
522
|
Versions of Node.js prior to v0.10 had streams that did not implement the
|
517
|
-
entire Streams API as it is today.
|
523
|
+
entire Streams API as it is today. (See [Compatibility][] for
|
518
524
|
more information.)
|
519
525
|
|
520
|
-
If you are using an older Node.js library that emits `'data'` events and
|
521
|
-
has a [`pause()`][] method that is advisory only, then you
|
522
|
-
`wrap()` method to create a [Readable][] stream that uses the old
|
523
|
-
as its data source.
|
526
|
+
If you are using an older Node.js library that emits [`'data'`][] events and
|
527
|
+
has a [`stream.pause()`][stream-pause] method that is advisory only, then you
|
528
|
+
can use the `wrap()` method to create a [Readable][] stream that uses the old
|
529
|
+
stream as its data source.
|
524
530
|
|
525
531
|
You will very rarely ever need to call this function, but it exists
|
526
532
|
as a convenience for interacting with old Node.js programs and libraries.
|
527
533
|
|
528
534
|
For example:
|
529
535
|
|
530
|
-
```
|
531
|
-
|
532
|
-
|
533
|
-
|
534
|
-
|
536
|
+
```js
|
537
|
+
const OldReader = require('./old-api-module.js').OldReader;
|
538
|
+
const Readable = require('stream').Readable;
|
539
|
+
const oreader = new OldReader;
|
540
|
+
const myReader = new Readable().wrap(oreader);
|
535
541
|
|
536
|
-
myReader.on('readable',
|
542
|
+
myReader.on('readable', () => {
|
537
543
|
myReader.read(); // etc.
|
538
544
|
});
|
539
545
|
```
|
@@ -541,13 +547,13 @@ myReader.on('readable', function() {
|
|
541
547
|
### Class: stream.Transform
|
542
548
|
|
543
549
|
Transform streams are [Duplex][] streams where the output is in some way
|
544
|
-
computed from the input.
|
545
|
-
[Writable][] interfaces.
|
550
|
+
computed from the input. They implement both the [Readable][] and
|
551
|
+
[Writable][] interfaces.
|
546
552
|
|
547
553
|
Examples of Transform streams include:
|
548
554
|
|
549
|
-
* [zlib streams][]
|
550
|
-
* [crypto streams][]
|
555
|
+
* [zlib streams][zlib]
|
556
|
+
* [crypto streams][crypto]
|
551
557
|
|
552
558
|
### Class: stream.Writable
|
553
559
|
|
@@ -558,22 +564,22 @@ that you are writing data *to*.
|
|
558
564
|
|
559
565
|
Examples of writable streams include:
|
560
566
|
|
561
|
-
* [
|
562
|
-
* [
|
567
|
+
* [HTTP requests, on the client][]
|
568
|
+
* [HTTP responses, on the server][]
|
563
569
|
* [fs write streams][]
|
564
|
-
* [zlib streams][]
|
565
|
-
* [crypto streams][]
|
566
|
-
* [
|
570
|
+
* [zlib streams][zlib]
|
571
|
+
* [crypto streams][crypto]
|
572
|
+
* [TCP sockets][]
|
567
573
|
* [child process stdin][]
|
568
|
-
* [process.stdout][], [process.stderr][]
|
574
|
+
* [`process.stdout`][], [`process.stderr`][]
|
569
575
|
|
570
576
|
#### Event: 'drain'
|
571
577
|
|
572
|
-
If a [`
|
573
|
-
event will indicate when it is appropriate to begin writing more data
|
578
|
+
If a [`stream.write(chunk)`][stream-write] call returns `false`, then the
|
579
|
+
`'drain'` event will indicate when it is appropriate to begin writing more data
|
574
580
|
to the stream.
|
575
581
|
|
576
|
-
```
|
582
|
+
```js
|
577
583
|
// Write the data to the supplied writable stream one million times.
|
578
584
|
// Be attentive to back-pressure.
|
579
585
|
function writeOneMillionTimes(writer, data, encoding, callback) {
|
@@ -603,37 +609,37 @@ function writeOneMillionTimes(writer, data, encoding, callback) {
|
|
603
609
|
|
604
610
|
#### Event: 'error'
|
605
611
|
|
606
|
-
* {Error
|
612
|
+
* {Error}
|
607
613
|
|
608
614
|
Emitted if there was an error when writing or piping data.
|
609
615
|
|
610
616
|
#### Event: 'finish'
|
611
617
|
|
612
|
-
When the [`end()`][] method has been called, and all data has
|
613
|
-
to the underlying system, this event is emitted.
|
618
|
+
When the [`stream.end()`][stream-end] method has been called, and all data has
|
619
|
+
been flushed to the underlying system, this event is emitted.
|
614
620
|
|
615
621
|
```javascript
|
616
622
|
var writer = getWritableStreamSomehow();
|
617
623
|
for (var i = 0; i < 100; i ++) {
|
618
|
-
writer.write('hello,
|
624
|
+
writer.write('hello, #${i}!\n');
|
619
625
|
}
|
620
626
|
writer.end('this is the end\n');
|
621
|
-
writer.on('finish',
|
627
|
+
writer.on('finish', () => {
|
622
628
|
console.error('all writes are now complete.');
|
623
629
|
});
|
624
630
|
```
|
625
631
|
|
626
632
|
#### Event: 'pipe'
|
627
633
|
|
628
|
-
* `src` {
|
634
|
+
* `src` {stream.Readable} source stream that is piping to this writable
|
629
635
|
|
630
|
-
This is emitted whenever the `pipe()` method is called on a readable
|
636
|
+
This is emitted whenever the [`stream.pipe()`][] method is called on a readable
|
631
637
|
stream, adding this writable to its set of destinations.
|
632
638
|
|
633
|
-
```
|
639
|
+
```js
|
634
640
|
var writer = getWritableStreamSomehow();
|
635
641
|
var reader = getReadableStreamSomehow();
|
636
|
-
writer.on('pipe',
|
642
|
+
writer.on('pipe', (src) => {
|
637
643
|
console.error('something is piping into the writer');
|
638
644
|
assert.equal(src, reader);
|
639
645
|
});
|
@@ -642,15 +648,16 @@ reader.pipe(writer);
|
|
642
648
|
|
643
649
|
#### Event: 'unpipe'
|
644
650
|
|
645
|
-
* `src` {[Readable][] Stream} The source stream that
|
651
|
+
* `src` {[Readable][] Stream} The source stream that
|
652
|
+
[unpiped][`stream.unpipe()`] this writable
|
646
653
|
|
647
|
-
This is emitted whenever the [`unpipe()`][] method is called on a
|
654
|
+
This is emitted whenever the [`stream.unpipe()`][] method is called on a
|
648
655
|
readable stream, removing this writable from its set of destinations.
|
649
656
|
|
650
|
-
```
|
657
|
+
```js
|
651
658
|
var writer = getWritableStreamSomehow();
|
652
659
|
var reader = getReadableStreamSomehow();
|
653
|
-
writer.on('unpipe',
|
660
|
+
writer.on('unpipe', (src) => {
|
654
661
|
console.error('something has stopped piping into the writer');
|
655
662
|
assert.equal(src, reader);
|
656
663
|
});
|
@@ -662,20 +669,22 @@ reader.unpipe(writer);
|
|
662
669
|
|
663
670
|
Forces buffering of all writes.
|
664
671
|
|
665
|
-
Buffered data will be flushed either at
|
672
|
+
Buffered data will be flushed either at [`stream.uncork()`][] or at
|
673
|
+
[`stream.end()`][stream-end] call.
|
666
674
|
|
667
675
|
#### writable.end([chunk][, encoding][, callback])
|
668
676
|
|
669
|
-
* `chunk` {String
|
677
|
+
* `chunk` {String|Buffer} Optional data to write
|
670
678
|
* `encoding` {String} The encoding, if `chunk` is a String
|
671
679
|
* `callback` {Function} Optional callback for when the stream is finished
|
672
680
|
|
673
|
-
Call this method when no more data will be written to the stream.
|
674
|
-
|
681
|
+
Call this method when no more data will be written to the stream. If supplied,
|
682
|
+
the callback is attached as a listener on the [`'finish'`][] event.
|
675
683
|
|
676
|
-
Calling [`write()`][] after calling
|
684
|
+
Calling [`stream.write()`][stream-write] after calling
|
685
|
+
[`stream.end()`][stream-end] will raise an error.
|
677
686
|
|
678
|
-
```
|
687
|
+
```js
|
679
688
|
// write 'hello, ' and then end with 'world!'
|
680
689
|
var file = fs.createWriteStream('example.txt');
|
681
690
|
file.write('hello, ');
|
@@ -691,26 +700,26 @@ Sets the default encoding for a writable stream.
|
|
691
700
|
|
692
701
|
#### writable.uncork()
|
693
702
|
|
694
|
-
Flush all data, buffered since
|
703
|
+
Flush all data, buffered since [`stream.cork()`][] call.
|
695
704
|
|
696
705
|
#### writable.write(chunk[, encoding][, callback])
|
697
706
|
|
698
|
-
* `chunk` {String
|
707
|
+
* `chunk` {String|Buffer} The data to write
|
699
708
|
* `encoding` {String} The encoding, if `chunk` is a String
|
700
709
|
* `callback` {Function} Callback for when this chunk of data is flushed
|
701
|
-
* Returns: {Boolean}
|
710
|
+
* Returns: {Boolean} `true` if the data was handled completely.
|
702
711
|
|
703
712
|
This method writes some data to the underlying system, and calls the
|
704
713
|
supplied callback once the data has been fully handled.
|
705
714
|
|
706
715
|
The return value indicates if you should continue writing right now.
|
707
716
|
If the data had to be buffered internally, then it will return
|
708
|
-
`false`.
|
717
|
+
`false`. Otherwise, it will return `true`.
|
709
718
|
|
710
|
-
This return value is strictly advisory.
|
711
|
-
even if it returns `false`.
|
712
|
-
memory, so it is best not to do this excessively.
|
713
|
-
the `drain` event before writing more data.
|
719
|
+
This return value is strictly advisory. You MAY continue to write,
|
720
|
+
even if it returns `false`. However, writes will be buffered in
|
721
|
+
memory, so it is best not to do this excessively. Instead, wait for
|
722
|
+
the [`'drain'`][] event before writing more data.
|
714
723
|
|
715
724
|
|
716
725
|
## API for Stream Implementors
|
@@ -719,11 +728,11 @@ the `drain` event before writing more data.
|
|
719
728
|
|
720
729
|
To implement any sort of stream, the pattern is the same:
|
721
730
|
|
722
|
-
1. Extend the appropriate parent class in your own subclass.
|
723
|
-
[`util.inherits`][] method is particularly helpful for this.)
|
731
|
+
1. Extend the appropriate parent class in your own subclass. (The
|
732
|
+
[`util.inherits()`][] method is particularly helpful for this.)
|
724
733
|
2. Call the appropriate parent class constructor in your constructor,
|
725
734
|
to be sure that the internal mechanisms are set up properly.
|
726
|
-
|
735
|
+
3. Implement one or more specific methods, as detailed below.
|
727
736
|
|
728
737
|
The class to extend and the method(s) to implement depend on the sort
|
729
738
|
of stream class you are writing:
|
@@ -750,7 +759,7 @@ of stream class you are writing:
|
|
750
759
|
<p>[Readable](#stream_class_stream_readable_1)</p>
|
751
760
|
</td>
|
752
761
|
<td>
|
753
|
-
<p><code>[_read][]</code></p>
|
762
|
+
<p><code>[_read][stream-_read]</code></p>
|
754
763
|
</td>
|
755
764
|
</tr>
|
756
765
|
<tr>
|
@@ -761,7 +770,7 @@ of stream class you are writing:
|
|
761
770
|
<p>[Writable](#stream_class_stream_writable_1)</p>
|
762
771
|
</td>
|
763
772
|
<td>
|
764
|
-
<p><code>[_write][]</code>, <code>_writev</code></p>
|
773
|
+
<p><code>[_write][stream-_write]</code>, <code>[_writev][stream-_writev]</code></p>
|
765
774
|
</td>
|
766
775
|
</tr>
|
767
776
|
<tr>
|
@@ -772,7 +781,7 @@ of stream class you are writing:
|
|
772
781
|
<p>[Duplex](#stream_class_stream_duplex_1)</p>
|
773
782
|
</td>
|
774
783
|
<td>
|
775
|
-
<p><code>[_read][]</code>, <code>[_write][]</code>, <code>_writev</code></p>
|
784
|
+
<p><code>[_read][stream-_read]</code>, <code>[_write][stream-_write]</code>, <code>[_writev][stream-_writev]</code></p>
|
776
785
|
</td>
|
777
786
|
</tr>
|
778
787
|
<tr>
|
@@ -783,45 +792,45 @@ of stream class you are writing:
|
|
783
792
|
<p>[Transform](#stream_class_stream_transform_1)</p>
|
784
793
|
</td>
|
785
794
|
<td>
|
786
|
-
<p><code>_transform</code>, <code>_flush</code></p>
|
795
|
+
<p><code>[_transform][stream-_transform]</code>, <code>[_flush][stream-_flush]</code></p>
|
787
796
|
</td>
|
788
797
|
</tr>
|
789
798
|
</table>
|
790
799
|
|
791
|
-
In your implementation code, it is very important to never call the
|
792
|
-
|
793
|
-
|
794
|
-
your streaming interfaces.
|
800
|
+
In your implementation code, it is very important to never call the methods
|
801
|
+
described in [API for Stream Consumers][]. Otherwise, you can potentially cause
|
802
|
+
adverse side effects in programs that consume your streaming interfaces.
|
795
803
|
|
796
804
|
### Class: stream.Duplex
|
797
805
|
|
798
806
|
<!--type=class-->
|
799
807
|
|
800
|
-
A "duplex" stream is one that is both Readable and Writable, such as a
|
801
|
-
|
808
|
+
A "duplex" stream is one that is both Readable and Writable, such as a TCP
|
809
|
+
socket connection.
|
802
810
|
|
803
811
|
Note that `stream.Duplex` is an abstract class designed to be extended
|
804
|
-
with an underlying implementation of the `_read(size)`
|
805
|
-
[`_write(chunk, encoding, callback)`][] methods as you
|
806
|
-
Readable or Writable stream class.
|
812
|
+
with an underlying implementation of the [`stream._read(size)`][stream-_read]
|
813
|
+
and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you
|
814
|
+
would with a Readable or Writable stream class.
|
807
815
|
|
808
|
-
Since JavaScript doesn't have multiple prototypal inheritance, this
|
809
|
-
|
810
|
-
|
811
|
-
`_read(n)` method as well as the
|
812
|
-
[`_write(chunk, encoding, callback)`][] method on extension
|
816
|
+
Since JavaScript doesn't have multiple prototypal inheritance, this class
|
817
|
+
prototypally inherits from Readable, and then parasitically from Writable. It is
|
818
|
+
thus up to the user to implement both the low-level
|
819
|
+
[`stream._read(n)`][stream-_read] method as well as the low-level
|
820
|
+
[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension
|
821
|
+
duplex classes.
|
813
822
|
|
814
823
|
#### new stream.Duplex(options)
|
815
824
|
|
816
825
|
* `options` {Object} Passed to both Writable and Readable
|
817
826
|
constructors. Also has the following fields:
|
818
|
-
* `allowHalfOpen` {Boolean} Default=true
|
827
|
+
* `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then
|
819
828
|
the stream will automatically end the readable side when the
|
820
829
|
writable side ends and vice versa.
|
821
|
-
* `readableObjectMode` {Boolean} Default=false
|
830
|
+
* `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
|
822
831
|
for readable side of the stream. Has no effect if `objectMode`
|
823
832
|
is `true`.
|
824
|
-
* `writableObjectMode` {Boolean} Default=false
|
833
|
+
* `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode`
|
825
834
|
for writable side of the stream. Has no effect if `objectMode`
|
826
835
|
is `true`.
|
827
836
|
|
@@ -832,7 +841,7 @@ initialized.
|
|
832
841
|
### Class: stream.PassThrough
|
833
842
|
|
834
843
|
This is a trivial implementation of a [Transform][] stream that simply
|
835
|
-
passes the input bytes across to the output.
|
844
|
+
passes the input bytes across to the output. Its purpose is mainly
|
836
845
|
for examples and testing, but there are occasionally use cases where
|
837
846
|
it can come in handy as a building block for novel sorts of streams.
|
838
847
|
|
@@ -841,10 +850,10 @@ it can come in handy as a building block for novel sorts of streams.
|
|
841
850
|
<!--type=class-->
|
842
851
|
|
843
852
|
`stream.Readable` is an abstract class designed to be extended with an
|
844
|
-
underlying implementation of the [`_read(size)`][] method.
|
853
|
+
underlying implementation of the [`stream._read(size)`][stream-_read] method.
|
845
854
|
|
846
|
-
Please see
|
847
|
-
streams in your programs.
|
855
|
+
Please see [API for Stream Consumers][] for how to consume
|
856
|
+
streams in your programs. What follows is an explanation of how to
|
848
857
|
implement Readable streams in your programs.
|
849
858
|
|
850
859
|
#### new stream.Readable([options])
|
@@ -852,12 +861,14 @@ implement Readable streams in your programs.
|
|
852
861
|
* `options` {Object}
|
853
862
|
* `highWaterMark` {Number} The maximum number of bytes to store in
|
854
863
|
the internal buffer before ceasing to read from the underlying
|
855
|
-
resource.
|
864
|
+
resource. Default = `16384` (16kb), or `16` for `objectMode` streams
|
856
865
|
* `encoding` {String} If specified, then buffers will be decoded to
|
857
|
-
strings using the specified encoding.
|
866
|
+
strings using the specified encoding. Default = `null`
|
858
867
|
* `objectMode` {Boolean} Whether this stream should behave
|
859
|
-
as a stream of objects. Meaning that stream.read(n) returns
|
860
|
-
a single value instead of a Buffer of size n.
|
868
|
+
as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns
|
869
|
+
a single value instead of a Buffer of size n. Default = `false`
|
870
|
+
* `read` {Function} Implementation for the [`stream._read()`][stream-_read]
|
871
|
+
method.
|
861
872
|
|
862
873
|
In classes that extend the Readable class, make sure to call the
|
863
874
|
Readable constructor so that the buffering settings can be properly
|
@@ -871,29 +882,31 @@ Note: **Implement this method, but do NOT call it directly.**
|
|
871
882
|
|
872
883
|
This method is prefixed with an underscore because it is internal to the
|
873
884
|
class that defines it and should only be called by the internal Readable
|
874
|
-
class methods. All Readable stream implementations must provide a _read
|
885
|
+
class methods. All Readable stream implementations must provide a \_read
|
875
886
|
method to fetch data from the underlying resource.
|
876
887
|
|
877
|
-
When _read is called, if data is available from the resource, `_read`
|
878
|
-
start pushing that data into the read queue by calling
|
879
|
-
`_read` should continue reading from
|
880
|
-
|
881
|
-
|
882
|
-
more data from the resource and pushing
|
888
|
+
When `_read()` is called, if data is available from the resource, the `_read()`
|
889
|
+
implementation should start pushing that data into the read queue by calling
|
890
|
+
[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from
|
891
|
+
the resource and pushing data until push returns `false`, at which point it
|
892
|
+
should stop reading from the resource. Only when `_read()` is called again after
|
893
|
+
it has stopped should it start reading more data from the resource and pushing
|
894
|
+
that data onto the queue.
|
883
895
|
|
884
896
|
Note: once the `_read()` method is called, it will not be called again until
|
885
|
-
the `push` method is called.
|
897
|
+
the [`stream.push()`][stream-push] method is called.
|
886
898
|
|
887
|
-
The `size` argument is advisory.
|
899
|
+
The `size` argument is advisory. Implementations where a "read" is a
|
888
900
|
single call that returns data can use this to know how much data to
|
889
|
-
fetch.
|
901
|
+
fetch. Implementations where that is not relevant, such as TCP or
|
890
902
|
TLS, may ignore this argument, and simply provide data whenever it
|
891
|
-
becomes available.
|
892
|
-
`size` bytes are available before calling [`stream.push(chunk)`][].
|
903
|
+
becomes available. There is no need, for example to "wait" until
|
904
|
+
`size` bytes are available before calling [`stream.push(chunk)`][stream-push].
|
893
905
|
|
894
906
|
#### readable.push(chunk[, encoding])
|
895
907
|
|
896
|
-
|
908
|
+
|
909
|
+
* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue
|
897
910
|
* `encoding` {String} Encoding of String chunks. Must be a valid
|
898
911
|
Buffer encoding, such as `'utf8'` or `'ascii'`
|
899
912
|
* return {Boolean} Whether or not more pushes should be performed
|
@@ -906,15 +919,15 @@ into the queue for subsequent stream processors to consume. If `null` is
|
|
906
919
|
passed, it signals the end of the stream (EOF), after which no more data
|
907
920
|
can be written.
|
908
921
|
|
909
|
-
The data added with `push` can be pulled out by calling the
|
910
|
-
when the `'readable'`event fires.
|
922
|
+
The data added with `push()` can be pulled out by calling the
|
923
|
+
[`stream.read()`][stream-read] method when the [`'readable'`][] event fires.
|
911
924
|
|
912
|
-
This API is designed to be as flexible as possible.
|
925
|
+
This API is designed to be as flexible as possible. For example,
|
913
926
|
you may be wrapping a lower-level source which has some sort of
|
914
|
-
pause/resume mechanism, and a data callback.
|
927
|
+
pause/resume mechanism, and a data callback. In those cases, you
|
915
928
|
could wrap the low-level source object by doing something like this:
|
916
929
|
|
917
|
-
```
|
930
|
+
```js
|
918
931
|
// source is an object with readStop() and readStart() methods,
|
919
932
|
// and an `ondata` member that gets called when it has data, and
|
920
933
|
// an `onend` member that gets called when the data is over.
|
@@ -925,18 +938,17 @@ function SourceWrapper(options) {
|
|
925
938
|
Readable.call(this, options);
|
926
939
|
|
927
940
|
this._source = getLowlevelSourceObject();
|
928
|
-
var self = this;
|
929
941
|
|
930
942
|
// Every time there's data, we push it into the internal buffer.
|
931
|
-
this._source.ondata =
|
943
|
+
this._source.ondata = (chunk) => {
|
932
944
|
// if push() returns false, then we need to stop reading from source
|
933
|
-
if (!
|
934
|
-
|
945
|
+
if (!this.push(chunk))
|
946
|
+
this._source.readStop();
|
935
947
|
};
|
936
948
|
|
937
949
|
// When the source ends, we push the EOF-signaling `null` chunk
|
938
|
-
this._source.onend =
|
939
|
-
|
950
|
+
this._source.onend = () => {
|
951
|
+
this.push(null);
|
940
952
|
};
|
941
953
|
}
|
942
954
|
|
@@ -951,12 +963,12 @@ SourceWrapper.prototype._read = function(size) {
|
|
951
963
|
|
952
964
|
<!--type=example-->
|
953
965
|
|
954
|
-
This is a basic example of a Readable stream.
|
966
|
+
This is a basic example of a Readable stream. It emits the numerals
|
955
967
|
from 1 to 1,000,000 in ascending order, and then ends.
|
956
968
|
|
957
|
-
```
|
958
|
-
|
959
|
-
|
969
|
+
```js
|
970
|
+
const Readable = require('stream').Readable;
|
971
|
+
const util = require('util');
|
960
972
|
util.inherits(Counter, Readable);
|
961
973
|
|
962
974
|
function Counter(opt) {
|
@@ -979,24 +991,25 @@ Counter.prototype._read = function() {
|
|
979
991
|
|
980
992
|
#### Example: SimpleProtocol v1 (Sub-optimal)
|
981
993
|
|
982
|
-
This is similar to the `parseHeader` function described
|
983
|
-
implemented as a custom stream.
|
984
|
-
does not convert the incoming data to a
|
994
|
+
This is similar to the `parseHeader` function described
|
995
|
+
[here](#stream_readable_unshift_chunk), but implemented as a custom stream.
|
996
|
+
Also, note that this implementation does not convert the incoming data to a
|
997
|
+
string.
|
985
998
|
|
986
|
-
However, this would be better implemented as a [Transform][] stream.
|
987
|
-
|
999
|
+
However, this would be better implemented as a [Transform][] stream. See
|
1000
|
+
[SimpleProtocol v2][] for a better implementation.
|
988
1001
|
|
989
|
-
```
|
1002
|
+
```js
|
990
1003
|
// A parser for a simple data protocol.
|
991
1004
|
// The "header" is a JSON object, followed by 2 \n characters, and
|
992
1005
|
// then a message body.
|
993
1006
|
//
|
994
1007
|
// NOTE: This can be done more simply as a Transform stream!
|
995
|
-
// Using Readable directly for this is sub-optimal.
|
1008
|
+
// Using Readable directly for this is sub-optimal. See the
|
996
1009
|
// alternative example below under the Transform section.
|
997
1010
|
|
998
|
-
|
999
|
-
|
1011
|
+
const Readable = require('stream').Readable;
|
1012
|
+
const util = require('util');
|
1000
1013
|
|
1001
1014
|
util.inherits(SimpleProtocol, Readable);
|
1002
1015
|
|
@@ -1012,13 +1025,13 @@ function SimpleProtocol(source, options) {
|
|
1012
1025
|
this._source = source;
|
1013
1026
|
|
1014
1027
|
var self = this;
|
1015
|
-
source.on('end',
|
1028
|
+
source.on('end', () => {
|
1016
1029
|
self.push(null);
|
1017
1030
|
});
|
1018
1031
|
|
1019
1032
|
// give it a kick whenever the source is readable
|
1020
1033
|
// read(0) will not consume any bytes
|
1021
|
-
source.on('readable',
|
1034
|
+
source.on('readable', () => {
|
1022
1035
|
self.read(0);
|
1023
1036
|
});
|
1024
1037
|
|
@@ -1098,19 +1111,24 @@ connected in some way to the input, such as a [zlib][] stream or a
|
|
1098
1111
|
[crypto][] stream.
|
1099
1112
|
|
1100
1113
|
There is no requirement that the output be the same size as the input,
|
1101
|
-
the same number of chunks, or arrive at the same time.
|
1114
|
+
the same number of chunks, or arrive at the same time. For example, a
|
1102
1115
|
Hash stream will only ever have a single chunk of output which is
|
1103
|
-
provided when the input is ended.
|
1116
|
+
provided when the input is ended. A zlib stream will produce output
|
1104
1117
|
that is either much smaller or much larger than its input.
|
1105
1118
|
|
1106
|
-
Rather than implement the [`_read()`][] and
|
1107
|
-
|
1108
|
-
|
1119
|
+
Rather than implement the [`stream._read()`][stream-_read] and
|
1120
|
+
[`stream._write()`][stream-_write] methods, Transform classes must implement the
|
1121
|
+
[`stream._transform()`][stream-_transform] method, and may optionally
|
1122
|
+
also implement the [`stream._flush()`][stream-_flush] method. (See below.)
|
1109
1123
|
|
1110
1124
|
#### new stream.Transform([options])
|
1111
1125
|
|
1112
1126
|
* `options` {Object} Passed to both Writable and Readable
|
1113
|
-
constructors.
|
1127
|
+
constructors. Also has the following fields:
|
1128
|
+
* `transform` {Function} Implementation for the
|
1129
|
+
[`stream._transform()`][stream-_transform] method.
|
1130
|
+
* `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush]
|
1131
|
+
method.
|
1114
1132
|
|
1115
1133
|
In classes that extend the Transform class, make sure to call the
|
1116
1134
|
constructor so that the buffering settings can be properly
|
@@ -1118,11 +1136,12 @@ initialized.
|
|
1118
1136
|
|
1119
1137
|
#### Events: 'finish' and 'end'
|
1120
1138
|
|
1121
|
-
The [`finish`][] and [`end`][] events are from the parent Writable
|
1122
|
-
and Readable classes respectively. The `finish` event is fired after
|
1123
|
-
|
1124
|
-
`end` is fired after all data has
|
1125
|
-
in `_flush`
|
1139
|
+
The [`'finish'`][] and [`'end'`][] events are from the parent Writable
|
1140
|
+
and Readable classes respectively. The `'finish'` event is fired after
|
1141
|
+
[`stream.end()`][stream-end] is called and all chunks have been processed by
|
1142
|
+
[`stream._transform()`][stream-_transform], `'end'` is fired after all data has
|
1143
|
+
been output which is after the callback in [`stream._flush()`][stream-_flush]
|
1144
|
+
has been called.
|
1126
1145
|
|
1127
1146
|
#### transform.\_flush(callback)
|
1128
1147
|
|
@@ -1134,26 +1153,26 @@ by child classes, and if so, will be called by the internal Transform
|
|
1134
1153
|
class methods only.
|
1135
1154
|
|
1136
1155
|
In some cases, your transform operation may need to emit a bit more
|
1137
|
-
data at the end of the stream.
|
1156
|
+
data at the end of the stream. For example, a `Zlib` compression
|
1138
1157
|
stream will store up some internal state so that it can optimally
|
1139
|
-
compress the output.
|
1158
|
+
compress the output. At the end, however, it needs to do the best it
|
1140
1159
|
can with what is left, so that the data will be complete.
|
1141
1160
|
|
1142
|
-
In those cases, you can implement a `_flush` method, which will be
|
1161
|
+
In those cases, you can implement a `_flush()` method, which will be
|
1143
1162
|
called at the very end, after all the written data is consumed, but
|
1144
|
-
before emitting `end` to signal the end of the readable side.
|
1145
|
-
like with `_transform
|
1146
|
-
times, as appropriate, and call `callback`
|
1147
|
-
complete.
|
1163
|
+
before emitting [`'end'`][] to signal the end of the readable side. Just
|
1164
|
+
like with [`stream._transform()`][stream-_transform], call
|
1165
|
+
`transform.push(chunk)` zero or more times, as appropriate, and call `callback`
|
1166
|
+
when the flush operation is complete.
|
1148
1167
|
|
1149
1168
|
This method is prefixed with an underscore because it is internal to
|
1150
1169
|
the class that defines it, and should not be called directly by user
|
1151
|
-
programs.
|
1170
|
+
programs. However, you **are** expected to override this method in
|
1152
1171
|
your own extension classes.
|
1153
1172
|
|
1154
1173
|
#### transform.\_transform(chunk, encoding, callback)
|
1155
1174
|
|
1156
|
-
* `chunk` {Buffer
|
1175
|
+
* `chunk` {Buffer|String} The chunk to be transformed. Will **always**
|
1157
1176
|
be a buffer unless the `decodeStrings` option was set to `false`.
|
1158
1177
|
* `encoding` {String} If the chunk is a string, then this is the
|
1159
1178
|
encoding type. If chunk is a buffer, then this is the special
|
@@ -1165,12 +1184,12 @@ Note: **This function MUST NOT be called directly.** It should be
|
|
1165
1184
|
implemented by child classes, and called by the internal Transform
|
1166
1185
|
class methods only.
|
1167
1186
|
|
1168
|
-
All Transform stream implementations must provide a `_transform`
|
1187
|
+
All Transform stream implementations must provide a `_transform()`
|
1169
1188
|
method to accept input and produce output.
|
1170
1189
|
|
1171
|
-
`_transform` should do whatever has to be done in this specific
|
1190
|
+
`_transform()` should do whatever has to be done in this specific
|
1172
1191
|
Transform class, to handle the bytes being written, and pass them off
|
1173
|
-
to the readable portion of the interface.
|
1192
|
+
to the readable portion of the interface. Do asynchronous I/O,
|
1174
1193
|
process things, and so on.
|
1175
1194
|
|
1176
1195
|
Call `transform.push(outputChunk)` 0 or more times to generate output
|
@@ -1178,12 +1197,12 @@ from this input chunk, depending on how much data you want to output
|
|
1178
1197
|
as a result of this chunk.
|
1179
1198
|
|
1180
1199
|
Call the callback function only when the current chunk is completely
|
1181
|
-
consumed.
|
1200
|
+
consumed. Note that there may or may not be output as a result of any
|
1182
1201
|
particular input chunk. If you supply a second argument to the callback
|
1183
1202
|
it will be passed to the push method. In other words the following are
|
1184
1203
|
equivalent:
|
1185
1204
|
|
1186
|
-
```
|
1205
|
+
```js
|
1187
1206
|
transform.prototype._transform = function (data, encoding, callback) {
|
1188
1207
|
this.push(data);
|
1189
1208
|
callback();
|
@@ -1196,22 +1215,23 @@ transform.prototype._transform = function (data, encoding, callback) {
|
|
1196
1215
|
|
1197
1216
|
This method is prefixed with an underscore because it is internal to
|
1198
1217
|
the class that defines it, and should not be called directly by user
|
1199
|
-
programs.
|
1218
|
+
programs. However, you **are** expected to override this method in
|
1200
1219
|
your own extension classes.
|
1201
1220
|
|
1202
1221
|
#### Example: `SimpleProtocol` parser v2
|
1203
1222
|
|
1204
|
-
The example
|
1205
|
-
simply by using the higher level
|
1206
|
-
the `parseHeader` and `SimpleProtocol
|
1223
|
+
The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple
|
1224
|
+
protocol parser can be implemented simply by using the higher level
|
1225
|
+
[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol
|
1226
|
+
v1` examples.
|
1207
1227
|
|
1208
1228
|
In this example, rather than providing the input as an argument, it
|
1209
1229
|
would be piped into the parser, which is a more idiomatic Node.js stream
|
1210
1230
|
approach.
|
1211
1231
|
|
1212
1232
|
```javascript
|
1213
|
-
|
1214
|
-
|
1233
|
+
const util = require('util');
|
1234
|
+
const Transform = require('stream').Transform;
|
1215
1235
|
util.inherits(SimpleProtocol, Transform);
|
1216
1236
|
|
1217
1237
|
function SimpleProtocol(options) {
|
@@ -1282,22 +1302,30 @@ SimpleProtocol.prototype._transform = function(chunk, encoding, done) {
|
|
1282
1302
|
<!--type=class-->
|
1283
1303
|
|
1284
1304
|
`stream.Writable` is an abstract class designed to be extended with an
|
1285
|
-
underlying implementation of the
|
1305
|
+
underlying implementation of the
|
1306
|
+
[`stream._write(chunk, encoding, callback)`][stream-_write] method.
|
1286
1307
|
|
1287
|
-
Please see
|
1288
|
-
writable streams in your programs.
|
1308
|
+
Please see [API for Stream Consumers][] for how to consume
|
1309
|
+
writable streams in your programs. What follows is an explanation of
|
1289
1310
|
how to implement Writable streams in your programs.
|
1290
1311
|
|
1291
1312
|
#### new stream.Writable([options])
|
1292
1313
|
|
1293
1314
|
* `options` {Object}
|
1294
|
-
* `highWaterMark` {Number} Buffer level when
|
1295
|
-
returning false
|
1315
|
+
* `highWaterMark` {Number} Buffer level when
|
1316
|
+
[`stream.write()`][stream-write] starts returning `false`. Default = `16384`
|
1317
|
+
(16kb), or `16` for `objectMode` streams.
|
1296
1318
|
* `decodeStrings` {Boolean} Whether or not to decode strings into
|
1297
|
-
Buffers before passing them to [`_write()`][].
|
1298
|
-
|
1299
|
-
|
1300
|
-
|
1319
|
+
Buffers before passing them to [`stream._write()`][stream-_write].
|
1320
|
+
Default = `true`
|
1321
|
+
* `objectMode` {Boolean} Whether or not the
|
1322
|
+
[`stream.write(anyObj)`][stream-write] is a valid operation. If set you can
|
1323
|
+
write arbitrary data instead of only `Buffer` / `String` data.
|
1324
|
+
Default = `false`
|
1325
|
+
* `write` {Function} Implementation for the
|
1326
|
+
[`stream._write()`][stream-_write] method.
|
1327
|
+
* `writev` {Function} Implementation for the
|
1328
|
+
[`stream._writev()`][stream-_writev] method.
|
1301
1329
|
|
1302
1330
|
In classes that extend the Writable class, make sure to call the
|
1303
1331
|
constructor so that the buffering settings can be properly
|
@@ -1305,7 +1333,7 @@ initialized.
|
|
1305
1333
|
|
1306
1334
|
#### writable.\_write(chunk, encoding, callback)
|
1307
1335
|
|
1308
|
-
* `chunk` {Buffer
|
1336
|
+
* `chunk` {Buffer|String} The chunk to be written. Will **always**
|
1309
1337
|
be a buffer unless the `decodeStrings` option was set to `false`.
|
1310
1338
|
* `encoding` {String} If the chunk is a string, then this is the
|
1311
1339
|
encoding type. If chunk is a buffer, then this is the special
|
@@ -1313,8 +1341,9 @@ initialized.
|
|
1313
1341
|
* `callback` {Function} Call this function (optionally with an error
|
1314
1342
|
argument) when you are done processing the supplied chunk.
|
1315
1343
|
|
1316
|
-
All Writable stream implementations must provide a
|
1317
|
-
method to send data to the underlying
|
1344
|
+
All Writable stream implementations must provide a
|
1345
|
+
[`stream._write()`][stream-_write] method to send data to the underlying
|
1346
|
+
resource.
|
1318
1347
|
|
1319
1348
|
Note: **This function MUST NOT be called directly.** It should be
|
1320
1349
|
implemented by child classes, and called by the internal Writable
|
@@ -1325,20 +1354,20 @@ signal that the write completed successfully or with an error.
|
|
1325
1354
|
|
1326
1355
|
If the `decodeStrings` flag is set in the constructor options, then
|
1327
1356
|
`chunk` may be a string rather than a Buffer, and `encoding` will
|
1328
|
-
indicate the sort of string that it is.
|
1357
|
+
indicate the sort of string that it is. This is to support
|
1329
1358
|
implementations that have an optimized handling for certain string
|
1330
|
-
data encodings.
|
1359
|
+
data encodings. If you do not explicitly set the `decodeStrings`
|
1331
1360
|
option to `false`, then you can safely ignore the `encoding` argument,
|
1332
1361
|
and assume that `chunk` will always be a Buffer.
|
1333
1362
|
|
1334
1363
|
This method is prefixed with an underscore because it is internal to
|
1335
1364
|
the class that defines it, and should not be called directly by user
|
1336
|
-
programs.
|
1365
|
+
programs. However, you **are** expected to override this method in
|
1337
1366
|
your own extension classes.
|
1338
1367
|
|
1339
1368
|
#### writable.\_writev(chunks, callback)
|
1340
1369
|
|
1341
|
-
* `chunks` {Array} The chunks to be written.
|
1370
|
+
* `chunks` {Array} The chunks to be written. Each chunk has following
|
1342
1371
|
format: `{ chunk: ..., encoding: ... }`.
|
1343
1372
|
* `callback` {Function} Call this function (optionally with an error
|
1344
1373
|
argument) when you are done processing the supplied chunks.
|
@@ -1348,7 +1377,7 @@ implemented by child classes, and called by the internal Writable
|
|
1348
1377
|
class methods only.
|
1349
1378
|
|
1350
1379
|
This function is completely optional to implement. In most cases it is
|
1351
|
-
unnecessary.
|
1380
|
+
unnecessary. If implemented, it will be called with all the chunks
|
1352
1381
|
that are buffered in the write queue.
|
1353
1382
|
|
1354
1383
|
|
@@ -1356,14 +1385,16 @@ that are buffered in the write queue.
|
|
1356
1385
|
|
1357
1386
|
<!--type=misc-->
|
1358
1387
|
|
1359
|
-
In simple cases there is now the added benefit of being able to construct a
|
1388
|
+
In simple cases there is now the added benefit of being able to construct a
|
1389
|
+
stream without inheritance.
|
1360
1390
|
|
1361
1391
|
This can be done by passing the appropriate methods as constructor options:
|
1362
1392
|
|
1363
1393
|
Examples:
|
1364
1394
|
|
1365
1395
|
### Duplex
|
1366
|
-
|
1396
|
+
|
1397
|
+
```js
|
1367
1398
|
var duplex = new stream.Duplex({
|
1368
1399
|
read: function(n) {
|
1369
1400
|
// sets this._read under the hood
|
@@ -1400,7 +1431,8 @@ var duplex = new stream.Duplex({
|
|
1400
1431
|
```
|
1401
1432
|
|
1402
1433
|
### Readable
|
1403
|
-
|
1434
|
+
|
1435
|
+
```js
|
1404
1436
|
var readable = new stream.Readable({
|
1405
1437
|
read: function(n) {
|
1406
1438
|
// sets this._read under the hood
|
@@ -1413,7 +1445,8 @@ var readable = new stream.Readable({
|
|
1413
1445
|
```
|
1414
1446
|
|
1415
1447
|
### Transform
|
1416
|
-
|
1448
|
+
|
1449
|
+
```js
|
1417
1450
|
var transform = new stream.Transform({
|
1418
1451
|
transform: function(chunk, encoding, next) {
|
1419
1452
|
// sets this._transform under the hood
|
@@ -1436,7 +1469,8 @@ var transform = new stream.Transform({
|
|
1436
1469
|
```
|
1437
1470
|
|
1438
1471
|
### Writable
|
1439
|
-
|
1472
|
+
|
1473
|
+
```js
|
1440
1474
|
var writable = new stream.Writable({
|
1441
1475
|
write: function(chunk, encoding, next) {
|
1442
1476
|
// sets this._write under the hood
|
@@ -1467,21 +1501,21 @@ var writable = new stream.Writable({
|
|
1467
1501
|
<!--type=misc-->
|
1468
1502
|
|
1469
1503
|
Both Writable and Readable streams will buffer data on an internal
|
1470
|
-
object which can be retrieved from `_writableState.getBuffer()` or
|
1504
|
+
object which can be retrieved from `_writableState.getBuffer()` or
|
1471
1505
|
`_readableState.buffer`, respectively.
|
1472
1506
|
|
1473
1507
|
The amount of data that will potentially be buffered depends on the
|
1474
1508
|
`highWaterMark` option which is passed into the constructor.
|
1475
1509
|
|
1476
1510
|
Buffering in Readable streams happens when the implementation calls
|
1477
|
-
[`stream.push(chunk)`][].
|
1478
|
-
`stream.read()
|
1479
|
-
is consumed.
|
1511
|
+
[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not
|
1512
|
+
call [`stream.read()`][stream-read], then the data will sit in the internal
|
1513
|
+
queue until it is consumed.
|
1480
1514
|
|
1481
1515
|
Buffering in Writable streams happens when the user calls
|
1482
|
-
[`stream.write(chunk)`][] repeatedly, even when
|
1516
|
+
[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`.
|
1483
1517
|
|
1484
|
-
The purpose of streams, especially with the `pipe()` method, is to
|
1518
|
+
The purpose of streams, especially with the [`stream.pipe()`][] method, is to
|
1485
1519
|
limit the buffering of data to acceptable levels, so that sources and
|
1486
1520
|
destinations of varying speed will not overwhelm the available memory.
|
1487
1521
|
|
@@ -1492,36 +1526,37 @@ destinations of varying speed will not overwhelm the available memory.
|
|
1492
1526
|
In versions of Node.js prior to v0.10, the Readable stream interface was
|
1493
1527
|
simpler, but also less powerful and less useful.
|
1494
1528
|
|
1495
|
-
* Rather than waiting for you to call the `read()` method,
|
1496
|
-
events would start emitting immediately.
|
1497
|
-
I/O to decide how to handle data, then you had to store the chunks
|
1529
|
+
* Rather than waiting for you to call the [`stream.read()`][stream-read] method,
|
1530
|
+
[`'data'`][] events would start emitting immediately. If you needed to do
|
1531
|
+
some I/O to decide how to handle data, then you had to store the chunks
|
1498
1532
|
in some kind of buffer so that they would not be lost.
|
1499
|
-
* The [`pause()`][] method was advisory, rather than
|
1500
|
-
meant that you still had to be prepared to receive
|
1501
|
-
even when the stream was in a paused state.
|
1533
|
+
* The [`stream.pause()`][stream-pause] method was advisory, rather than
|
1534
|
+
guaranteed. This meant that you still had to be prepared to receive
|
1535
|
+
[`'data'`][] events even when the stream was in a paused state.
|
1502
1536
|
|
1503
|
-
In Node.js v0.10, the Readable class
|
1537
|
+
In Node.js v0.10, the [Readable][] class was added.
|
1504
1538
|
For backwards compatibility with older Node.js programs, Readable streams
|
1505
|
-
switch into "flowing mode" when a `'data'` event handler is added, or
|
1506
|
-
when the [`resume()`][] method is called.
|
1507
|
-
you are not using the new `read()` method
|
1508
|
-
no longer have to worry about losing
|
1539
|
+
switch into "flowing mode" when a [`'data'`][] event handler is added, or
|
1540
|
+
when the [`stream.resume()`][stream-resume] method is called. The effect is
|
1541
|
+
that, even if you are not using the new [`stream.read()`][stream-read] method
|
1542
|
+
and [`'readable'`][] event, you no longer have to worry about losing
|
1543
|
+
[`'data'`][] chunks.
|
1509
1544
|
|
1510
|
-
Most programs will continue to function normally.
|
1545
|
+
Most programs will continue to function normally. However, this
|
1511
1546
|
introduces an edge case in the following conditions:
|
1512
1547
|
|
1513
|
-
* No [`'data'`
|
1514
|
-
* The [`resume()`][] method is never called.
|
1548
|
+
* No [`'data'`][] event handler is added.
|
1549
|
+
* The [`stream.resume()`][stream-resume] method is never called.
|
1515
1550
|
* The stream is not piped to any writable destination.
|
1516
1551
|
|
1517
1552
|
For example, consider the following code:
|
1518
1553
|
|
1519
|
-
```
|
1554
|
+
```js
|
1520
1555
|
// WARNING! BROKEN!
|
1521
|
-
net.createServer(
|
1556
|
+
net.createServer((socket) => {
|
1522
1557
|
|
1523
1558
|
// we add an 'end' method, but never consume the data
|
1524
|
-
socket.on('end',
|
1559
|
+
socket.on('end', () => {
|
1525
1560
|
// It will never get here.
|
1526
1561
|
socket.end('I got your message (but didnt read it)\n');
|
1527
1562
|
});
|
@@ -1530,17 +1565,17 @@ net.createServer(function(socket) {
|
|
1530
1565
|
```
|
1531
1566
|
|
1532
1567
|
In versions of Node.js prior to v0.10, the incoming message data would be
|
1533
|
-
simply discarded.
|
1568
|
+
simply discarded. However, in Node.js v0.10 and beyond,
|
1534
1569
|
the socket will remain paused forever.
|
1535
1570
|
|
1536
|
-
The workaround in this situation is to call the
|
1537
|
-
start the flow of data:
|
1571
|
+
The workaround in this situation is to call the
|
1572
|
+
[`stream.resume()`][stream-resume] method to start the flow of data:
|
1538
1573
|
|
1539
|
-
```
|
1574
|
+
```js
|
1540
1575
|
// Workaround
|
1541
|
-
net.createServer(
|
1576
|
+
net.createServer((socket) => {
|
1542
1577
|
|
1543
|
-
socket.on('end',
|
1578
|
+
socket.on('end', () => {
|
1544
1579
|
socket.end('I got your message (but didnt read it)\n');
|
1545
1580
|
});
|
1546
1581
|
|
@@ -1552,7 +1587,7 @@ net.createServer(function(socket) {
|
|
1552
1587
|
|
1553
1588
|
In addition to new Readable streams switching into flowing mode,
|
1554
1589
|
pre-v0.10 style streams can be wrapped in a Readable class using the
|
1555
|
-
`wrap()` method.
|
1590
|
+
[`stream.wrap()`][] method.
|
1556
1591
|
|
1557
1592
|
|
1558
1593
|
### Object Mode
|
@@ -1565,33 +1600,33 @@ Streams that are in **object mode** can emit generic JavaScript values
|
|
1565
1600
|
other than Buffers and Strings.
|
1566
1601
|
|
1567
1602
|
A Readable stream in object mode will always return a single item from
|
1568
|
-
a call to `stream.read(size)
|
1569
|
-
is.
|
1603
|
+
a call to [`stream.read(size)`][stream-read], regardless of what the size
|
1604
|
+
argument is.
|
1570
1605
|
|
1571
1606
|
A Writable stream in object mode will always ignore the `encoding`
|
1572
|
-
argument to `stream.write(data, encoding)
|
1607
|
+
argument to [`stream.write(data, encoding)`][stream-write].
|
1573
1608
|
|
1574
1609
|
The special value `null` still retains its special value for object
|
1575
|
-
mode streams.
|
1576
|
-
return value from `stream.read()` indicates that there is no more
|
1577
|
-
data, and [`stream.push(null)`][] will signal the end of stream data
|
1610
|
+
mode streams. That is, for object mode readable streams, `null` as a
|
1611
|
+
return value from [`stream.read()`][stream-read] indicates that there is no more
|
1612
|
+
data, and [`stream.push(null)`][stream-push] will signal the end of stream data
|
1578
1613
|
(`EOF`).
|
1579
1614
|
|
1580
|
-
No streams in Node.js core are object mode streams.
|
1615
|
+
No streams in Node.js core are object mode streams. This pattern is only
|
1581
1616
|
used by userland streaming libraries.
|
1582
1617
|
|
1583
1618
|
You should set `objectMode` in your stream child class constructor on
|
1584
|
-
the options object.
|
1619
|
+
the options object. Setting `objectMode` mid-stream is not safe.
|
1585
1620
|
|
1586
1621
|
For Duplex streams `objectMode` can be set exclusively for readable or
|
1587
1622
|
writable side with `readableObjectMode` and `writableObjectMode`
|
1588
1623
|
respectively. These options can be used to implement parsers and
|
1589
1624
|
serializers with Transform streams.
|
1590
1625
|
|
1591
|
-
```
|
1592
|
-
|
1593
|
-
|
1594
|
-
|
1626
|
+
```js
|
1627
|
+
const util = require('util');
|
1628
|
+
const StringDecoder = require('string_decoder').StringDecoder;
|
1629
|
+
const Transform = require('stream').Transform;
|
1595
1630
|
util.inherits(JSONParseStream, Transform);
|
1596
1631
|
|
1597
1632
|
// Gets \n-delimited JSON string data, and emits the parsed objects
|
@@ -1646,12 +1681,12 @@ JSONParseStream.prototype._flush = function(cb) {
|
|
1646
1681
|
|
1647
1682
|
There are some cases where you want to trigger a refresh of the
|
1648
1683
|
underlying readable stream mechanisms, without actually consuming any
|
1649
|
-
data.
|
1684
|
+
data. In that case, you can call `stream.read(0)`, which will always
|
1650
1685
|
return null.
|
1651
1686
|
|
1652
1687
|
If the internal read buffer is below the `highWaterMark`, and the
|
1653
|
-
stream is not currently reading, then calling `read(0)` will trigger
|
1654
|
-
a low-level `_read` call.
|
1688
|
+
stream is not currently reading, then calling `stream.read(0)` will trigger
|
1689
|
+
a low-level [`stream._read()`][stream-_read] call.
|
1655
1690
|
|
1656
1691
|
There is almost never a need to do this. However, you will see some
|
1657
1692
|
cases in Node.js's internals where this is done, particularly in the
|
@@ -1660,71 +1695,66 @@ Readable stream class internals.
|
|
1660
1695
|
### `stream.push('')`
|
1661
1696
|
|
1662
1697
|
Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an
|
1663
|
-
interesting side effect.
|
1664
|
-
[`stream.push()`][], it will end the `reading` process.
|
1698
|
+
interesting side effect. Because it *is* a call to
|
1699
|
+
[`stream.push()`][stream-push], it will end the `reading` process. However, it
|
1665
1700
|
does *not* add any data to the readable buffer, so there's nothing for
|
1666
1701
|
a user to consume.
|
1667
1702
|
|
1668
1703
|
Very rarely, there are cases where you have no data to provide now,
|
1669
1704
|
but the consumer of your stream (or, perhaps, another bit of your own
|
1670
|
-
code) will know when to check again, by calling `stream.read(0)
|
1671
|
-
those cases, you *may* call `stream.push('')`.
|
1705
|
+
code) will know when to check again, by calling [`stream.read(0)`][stream-read].
|
1706
|
+
In those cases, you *may* call `stream.push('')`.
|
1672
1707
|
|
1673
1708
|
So far, the only use case for this functionality is in the
|
1674
|
-
[tls.CryptoStream][] class, which is deprecated in Node.js/io.js v1.0.
|
1709
|
+
[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you
|
1675
1710
|
find that you have to use `stream.push('')`, please consider another
|
1676
1711
|
approach, because it almost certainly indicates that something is
|
1677
1712
|
horribly wrong.
|
1678
1713
|
|
1679
|
-
[
|
1680
|
-
[
|
1681
|
-
[
|
1682
|
-
[`
|
1683
|
-
[`
|
1684
|
-
[`
|
1685
|
-
[`
|
1686
|
-
[
|
1687
|
-
[
|
1688
|
-
[
|
1689
|
-
[
|
1690
|
-
[
|
1691
|
-
[
|
1692
|
-
[
|
1693
|
-
[
|
1694
|
-
[
|
1695
|
-
[
|
1696
|
-
[crypto streams]: crypto.html
|
1697
|
-
[crypto]: crypto.html
|
1698
|
-
[tls.CryptoStream]: https://nodejs.org/docs/v5.1.0/api/tls.html#tls_class_cryptostream
|
1699
|
-
[process.stdin]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdin
|
1700
|
-
[stdout]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdout
|
1701
|
-
[process.stdout]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stdout
|
1702
|
-
[process.stderr]: https://nodejs.org/docs/v5.1.0/api/process.html#process_process_stderr
|
1703
|
-
[child process stdout and stderr]: https://nodejs.org/docs/v5.1.0/api/child_process.html#child_process_child_stdout
|
1704
|
-
[child process stdin]: https://nodejs.org/docs/v5.1.0/api/child_process.html#child_process_child_stdin
|
1714
|
+
[`'data'`]: #stream_event_data
|
1715
|
+
[`'drain'`]: #stream_event_drain
|
1716
|
+
[`'end'`]: #stream_event_end
|
1717
|
+
[`'finish'`]: #stream_event_finish
|
1718
|
+
[`'readable'`]: #stream_event_readable
|
1719
|
+
[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end
|
1720
|
+
[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter
|
1721
|
+
[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr
|
1722
|
+
[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin
|
1723
|
+
[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout
|
1724
|
+
[`stream.cork()`]: #stream_writable_cork
|
1725
|
+
[`stream.pipe()`]: #stream_readable_pipe_destination_options
|
1726
|
+
[`stream.uncork()`]: #stream_writable_uncork
|
1727
|
+
[`stream.unpipe()`]: #stream_readable_unpipe_destination
|
1728
|
+
[`stream.wrap()`]: #stream_readable_wrap_stream
|
1729
|
+
[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream
|
1730
|
+
[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor
|
1705
1731
|
[API for Stream Consumers]: #stream_api_for_stream_consumers
|
1706
1732
|
[API for Stream Implementors]: #stream_api_for_stream_implementors
|
1707
|
-
[
|
1708
|
-
[
|
1733
|
+
[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin
|
1734
|
+
[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout
|
1735
|
+
[Compatibility]: #stream_compatibility_with_older_node_js_versions
|
1736
|
+
[crypto]: crypto.html
|
1709
1737
|
[Duplex]: #stream_class_stream_duplex
|
1738
|
+
[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream
|
1739
|
+
[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream
|
1740
|
+
[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest
|
1741
|
+
[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse
|
1742
|
+
[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage
|
1743
|
+
[Object mode]: #stream_object_mode
|
1744
|
+
[Readable]: #stream_class_stream_readable
|
1745
|
+
[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2
|
1746
|
+
[stream-_flush]: #stream_transform_flush_callback
|
1747
|
+
[stream-_read]: #stream_readable_read_size_1
|
1748
|
+
[stream-_transform]: #stream_transform_transform_chunk_encoding_callback
|
1749
|
+
[stream-_write]: #stream_writable_write_chunk_encoding_callback_1
|
1750
|
+
[stream-_writev]: #stream_writable_writev_chunks_callback
|
1751
|
+
[stream-end]: #stream_writable_end_chunk_encoding_callback
|
1752
|
+
[stream-pause]: #stream_readable_pause
|
1753
|
+
[stream-push]: #stream_readable_push_chunk_encoding
|
1754
|
+
[stream-read]: #stream_readable_read_size
|
1755
|
+
[stream-resume]: #stream_readable_resume
|
1756
|
+
[stream-write]: #stream_writable_write_chunk_encoding_callback
|
1757
|
+
[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket
|
1710
1758
|
[Transform]: #stream_class_stream_transform
|
1711
|
-
[
|
1712
|
-
[
|
1713
|
-
[`_read(size)`]: #stream_readable_read_size_1
|
1714
|
-
[`_read()`]: #stream_readable_read_size_1
|
1715
|
-
[_read]: #stream_readable_read_size_1
|
1716
|
-
[`writable.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
|
1717
|
-
[`write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback
|
1718
|
-
[`write()`]: #stream_writable_write_chunk_encoding_callback
|
1719
|
-
[`stream.write(chunk)`]: #stream_writable_write_chunk_encoding_callback
|
1720
|
-
[`_write(chunk, encoding, callback)`]: #stream_writable_write_chunk_encoding_callback_1
|
1721
|
-
[`_write()`]: #stream_writable_write_chunk_encoding_callback_1
|
1722
|
-
[_write]: #stream_writable_write_chunk_encoding_callback_1
|
1723
|
-
[`util.inherits`]: https://nodejs.org/docs/v5.1.0/api/util.html#util_util_inherits_constructor_superconstructor
|
1724
|
-
[`end()`]: #stream_writable_end_chunk_encoding_callback
|
1725
|
-
[`'data'` event]: #stream_event_data
|
1726
|
-
[`resume()`]: #stream_readable_resume
|
1727
|
-
[`readable.resume()`]: #stream_readable_resume
|
1728
|
-
[`pause()`]: #stream_readable_pause
|
1729
|
-
[`unpipe()`]: #stream_readable_unpipe_destination
|
1730
|
-
[`pipe()`]: #stream_readable_pipe_destination_options
|
1759
|
+
[Writable]: #stream_class_stream_writable
|
1760
|
+
[zlib]: zlib.html
|