async-io 1.27.0 → 1.27.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +6 -0
- data/lib/async/io.rb +1 -3
- data/lib/async/io/stream.rb +33 -31
- data/lib/async/io/version.rb +1 -1
- data/spec/async/io/c10k_spec.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d7128e6848ad5bdee711dc7a8d1b2b13be4ced2b32cc18c3f067375a968ce59a
|
4
|
+
data.tar.gz: 3b2537d7467f536c65de6740042e59ae257dac49ebc07136167e239a47c2c356
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 80a3e758f2288cf0b01d9daad23914d2bf4b72a933d06483759bfdaf9fb38add65539dc4b4854a0bc0e1237baf72ab716f46c5e3401153e8bd6367f8c93b6379
|
7
|
+
data.tar.gz: 66221768e8c2b71f448868d769bdffe2faea1af137ebe7cbe1e223763bb92424633f02f6ada580eb0b6a4f3f5f777202b3342d0dc7509e585c41a3fcf7154e99
|
data/README.md
CHANGED
@@ -123,6 +123,12 @@ Async do |task|
|
|
123
123
|
end
|
124
124
|
```
|
125
125
|
|
126
|
+
### Deferred Buffering
|
127
|
+
|
128
|
+
`Async::IO::Stream.new(..., deferred:true)` creates a deferred stream which increases latency slightly, but reduces the number of total packets sent. It does this by combining all calls `Stream#flush` within a single iteration of the reactor. This is typically more useful on the client side, but can also be useful on the server side when individual packets have high latency. It should be preferable to send one 100 byte packet than 10x 10 byte packets.
|
129
|
+
|
130
|
+
Servers typically only deal with one request per iteartion of the reactor so it's less useful. Clients which make multiple requests can benefit significantly e.g. HTTP/2 clients can merge many requests into a single packet. Because HTTP/2 recommends disabling Nagle's algorithm, this is often beneficial.
|
131
|
+
|
126
132
|
## Contributing
|
127
133
|
|
128
134
|
1. Fork it
|
data/lib/async/io.rb
CHANGED
@@ -29,10 +29,8 @@ require_relative "io/endpoint/each"
|
|
29
29
|
|
30
30
|
module Async
|
31
31
|
module IO
|
32
|
-
@file_descriptor_limit = nil
|
33
|
-
|
34
32
|
def self.file_descriptor_limit
|
35
|
-
|
33
|
+
Process.getrlimit(Process::RLIMIT_NOFILE).first
|
36
34
|
end
|
37
35
|
end
|
38
36
|
end
|
data/lib/async/io/stream.rb
CHANGED
@@ -59,6 +59,7 @@ module Async
|
|
59
59
|
end
|
60
60
|
|
61
61
|
attr :io
|
62
|
+
|
62
63
|
attr :block_size
|
63
64
|
|
64
65
|
# Reads `size` bytes from the stream. If size is not specified, read until end of file.
|
@@ -66,7 +67,7 @@ module Async
|
|
66
67
|
return '' if size == 0
|
67
68
|
|
68
69
|
if size
|
69
|
-
until @eof or @read_buffer.
|
70
|
+
until @eof or @read_buffer.bytesize >= size
|
70
71
|
# Compute the amount of data we need to read from the underlying stream:
|
71
72
|
read_size = size - @read_buffer.bytesize
|
72
73
|
|
@@ -115,7 +116,7 @@ module Async
|
|
115
116
|
split_offset = pattern.bytesize - 1
|
116
117
|
|
117
118
|
until index = @read_buffer.index(pattern, offset)
|
118
|
-
offset = @read_buffer.
|
119
|
+
offset = @read_buffer.bytesize - split_offset
|
119
120
|
|
120
121
|
offset = 0 if offset < 0
|
121
122
|
|
@@ -145,9 +146,8 @@ module Async
|
|
145
146
|
else
|
146
147
|
@write_buffer << string
|
147
148
|
|
148
|
-
if @write_buffer.
|
149
|
-
|
150
|
-
@write_buffer.clear
|
149
|
+
if @write_buffer.bytesize >= @block_size
|
150
|
+
drain_write_buffer
|
151
151
|
end
|
152
152
|
end
|
153
153
|
|
@@ -163,25 +163,16 @@ module Async
|
|
163
163
|
|
164
164
|
# Flushes buffered data to the stream.
|
165
165
|
def flush(deferred: @deferred)
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
@pending += 1
|
173
|
-
# task.yield
|
174
|
-
return
|
175
|
-
end
|
176
|
-
|
177
|
-
Async.logger.debug(self) {"Flushing #{@pending} writes (#{@write_buffer.bytesize} bytes)..."}
|
178
|
-
|
166
|
+
if deferred and task = Task.current?
|
167
|
+
@pending += 1
|
168
|
+
|
169
|
+
if @pending == 1
|
170
|
+
task.yield
|
171
|
+
drain_write_buffer unless @write_buffer.empty?
|
179
172
|
@pending = 0
|
180
|
-
else
|
181
|
-
Async.logger.debug(self) {"Flushing immediate write (#{@write_buffer.bytesize} bytes)..."}
|
182
173
|
end
|
183
|
-
|
184
|
-
drain_write_buffer
|
174
|
+
else
|
175
|
+
drain_write_buffer unless @write_buffer.empty?
|
185
176
|
end
|
186
177
|
end
|
187
178
|
|
@@ -189,9 +180,9 @@ module Async
|
|
189
180
|
read_until(separator, **options)
|
190
181
|
end
|
191
182
|
|
192
|
-
def puts(*
|
193
|
-
|
194
|
-
@write_buffer <<
|
183
|
+
def puts(*arguments, separator: $/)
|
184
|
+
arguments.each do |argument|
|
185
|
+
@write_buffer << argument << separator
|
195
186
|
end
|
196
187
|
|
197
188
|
flush
|
@@ -220,7 +211,7 @@ module Async
|
|
220
211
|
return if @io.closed?
|
221
212
|
|
222
213
|
begin
|
223
|
-
drain_write_buffer
|
214
|
+
drain_write_buffer unless @write_buffer.empty?
|
224
215
|
rescue
|
225
216
|
# We really can't do anything here unless we want #close to raise exceptions.
|
226
217
|
ensure
|
@@ -251,11 +242,17 @@ module Async
|
|
251
242
|
private
|
252
243
|
|
253
244
|
def drain_write_buffer
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
245
|
+
Async.logger.debug(self) do
|
246
|
+
if @pending > 0
|
247
|
+
"Draining #{@pending} writes (#{@write_buffer.bytesize} bytes)..."
|
248
|
+
else
|
249
|
+
"Draining immediate write (#{@write_buffer.bytesize} bytes)..."
|
250
|
+
end
|
258
251
|
end
|
252
|
+
|
253
|
+
# Async.logger.debug(self, name: "write") {@write_buffer.inspect}
|
254
|
+
@io.write(@write_buffer)
|
255
|
+
@write_buffer.clear
|
259
256
|
end
|
260
257
|
|
261
258
|
# Fills the buffer from the underlying stream.
|
@@ -265,6 +262,11 @@ module Async
|
|
265
262
|
size = @maximum_read_size
|
266
263
|
end
|
267
264
|
|
265
|
+
# This effectively ties the input and output stream together.
|
266
|
+
if @pending > 0
|
267
|
+
drain_write_buffer
|
268
|
+
end
|
269
|
+
|
268
270
|
if @read_buffer.empty?
|
269
271
|
if @io.read_nonblock(size, @read_buffer, exception: false)
|
270
272
|
# Async.logger.debug(self, name: "read") {@read_buffer.inspect}
|
@@ -291,7 +293,7 @@ module Async
|
|
291
293
|
|
292
294
|
result = nil
|
293
295
|
|
294
|
-
if size.nil? or size >= @read_buffer.
|
296
|
+
if size.nil? or size >= @read_buffer.bytesize
|
295
297
|
# Consume the entire read buffer:
|
296
298
|
result = @read_buffer
|
297
299
|
@read_buffer = Buffer.new
|
data/lib/async/io/version.rb
CHANGED
data/spec/async/io/c10k_spec.rb
CHANGED
@@ -24,7 +24,7 @@ require 'open3'
|
|
24
24
|
|
25
25
|
# require 'ruby-prof'
|
26
26
|
|
27
|
-
RSpec.describe "echo client/server", if: Process.respond_to?(:fork) do
|
27
|
+
RSpec.describe "c10k echo client/server", if: Process.respond_to?(:fork) do
|
28
28
|
# macOS has a rediculously hard time to do this.
|
29
29
|
# sudo sysctl -w net.inet.ip.portrange.first=10000
|
30
30
|
# sudo sysctl -w net.inet.ip.portrange.hifirst=10000
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: async-io
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.27.
|
4
|
+
version: 1.27.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Samuel Williams
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2019-
|
11
|
+
date: 2019-12-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: async
|
@@ -208,7 +208,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
208
208
|
- !ruby/object:Gem::Version
|
209
209
|
version: '0'
|
210
210
|
requirements: []
|
211
|
-
rubygems_version: 3.0.
|
211
|
+
rubygems_version: 3.0.6
|
212
212
|
signing_key:
|
213
213
|
specification_version: 4
|
214
214
|
summary: Provides support for asynchonous TCP, UDP, UNIX and SSL sockets.
|