async-io 1.28.0 → 1.31.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/async/io/host_endpoint.rb +1 -1
- data/lib/async/io/peer.rb +2 -2
- data/lib/async/io/socket.rb +2 -2
- data/lib/async/io/ssl_socket.rb +2 -4
- data/lib/async/io/stream.rb +2 -2
- data/lib/async/io/tcp_socket.rb +1 -1
- data/{spec/async/io/socket/udp_spec.rb → lib/async/io/threads.rb} +56 -36
- data/lib/async/io/trap.rb +8 -3
- data/lib/async/io/version.rb +1 -1
- metadata +45 -94
- data/.editorconfig +0 -6
- data/.gitignore +0 -13
- data/.rspec +0 -3
- data/.travis.yml +0 -26
- data/.yardopts +0 -2
- data/Gemfile +0 -13
- data/README.md +0 -171
- data/async-io.gemspec +0 -30
- data/examples/allocations/byteslice.rb +0 -29
- data/examples/allocations/memory.rb +0 -16
- data/examples/allocations/read_chunks.rb +0 -18
- data/examples/chat/client.rb +0 -58
- data/examples/chat/server.rb +0 -83
- data/examples/defer/worker.rb +0 -29
- data/examples/echo/client.rb +0 -23
- data/examples/echo/server.rb +0 -58
- data/examples/issues/broken_ssl.rb +0 -15
- data/examples/issues/pipes.rb +0 -34
- data/examples/millions/client.rb +0 -44
- data/examples/millions/server.rb +0 -41
- data/examples/udp/client.rb +0 -14
- data/examples/udp/server.rb +0 -16
- data/gems/nio4r-2.3.gemfile +0 -3
- data/spec/addrinfo.rb +0 -16
- data/spec/async/io/buffer_spec.rb +0 -48
- data/spec/async/io/c10k_spec.rb +0 -138
- data/spec/async/io/echo_spec.rb +0 -75
- data/spec/async/io/endpoint_spec.rb +0 -105
- data/spec/async/io/generic_examples.rb +0 -73
- data/spec/async/io/generic_spec.rb +0 -107
- data/spec/async/io/notification_spec.rb +0 -46
- data/spec/async/io/protocol/line_spec.rb +0 -81
- data/spec/async/io/shared_endpoint/server_spec.rb +0 -72
- data/spec/async/io/shared_endpoint_spec.rb +0 -65
- data/spec/async/io/socket/tcp_spec.rb +0 -101
- data/spec/async/io/socket_spec.rb +0 -149
- data/spec/async/io/ssl_server_spec.rb +0 -133
- data/spec/async/io/ssl_socket_spec.rb +0 -96
- data/spec/async/io/standard_spec.rb +0 -47
- data/spec/async/io/stream_context.rb +0 -30
- data/spec/async/io/stream_spec.rb +0 -337
- data/spec/async/io/tcp_socket_spec.rb +0 -84
- data/spec/async/io/trap_spec.rb +0 -52
- data/spec/async/io/udp_socket_spec.rb +0 -56
- data/spec/async/io/unix_endpoint_spec.rb +0 -106
- data/spec/async/io/unix_socket_spec.rb +0 -66
- data/spec/async/io/wrap/http_rb_spec.rb +0 -47
- data/spec/async/io/wrap/tcp_spec.rb +0 -79
- data/spec/spec_helper.rb +0 -15
data/.editorconfig
DELETED
data/.gitignore
DELETED
data/.rspec
DELETED
data/.travis.yml
DELETED
@@ -1,26 +0,0 @@
|
|
1
|
-
language: ruby
|
2
|
-
dist: xenial
|
3
|
-
cache: bundler
|
4
|
-
|
5
|
-
script: bundle exec rspec
|
6
|
-
|
7
|
-
matrix:
|
8
|
-
include:
|
9
|
-
- rvm: 2.5
|
10
|
-
- rvm: 2.6
|
11
|
-
- rvm: 2.7
|
12
|
-
- rvm: 2.6
|
13
|
-
env: COVERAGE=PartialSummary,Coveralls
|
14
|
-
- rvm: truffleruby
|
15
|
-
- rvm: jruby-head
|
16
|
-
env: JRUBY_OPTS="--debug -X+O"
|
17
|
-
- rvm: ruby-head
|
18
|
-
- rvm: 2.6
|
19
|
-
os: osx
|
20
|
-
- rvm: 2.6
|
21
|
-
os: osx
|
22
|
-
gemfile: gems/nio4r-2.3.gemfile
|
23
|
-
allow_failures:
|
24
|
-
- rvm: ruby-head
|
25
|
-
- rvm: truffleruby
|
26
|
-
- rvm: jruby-head
|
data/.yardopts
DELETED
data/Gemfile
DELETED
data/README.md
DELETED
@@ -1,171 +0,0 @@
|
|
1
|
-
# Async::IO
|
2
|
-
|
3
|
-
Async::IO provides builds on [async] and provides asynchronous wrappers for `IO`, `Socket`, and related classes.
|
4
|
-
|
5
|
-
[async]: https://github.com/socketry/async
|
6
|
-
|
7
|
-
[](https://travis-ci.com/socketry/async-io)
|
8
|
-
[](https://codeclimate.com/github/socketry/async-io)
|
9
|
-
[](https://coveralls.io/r/socketry/async-io)
|
10
|
-
|
11
|
-
## Installation
|
12
|
-
|
13
|
-
Add this line to your application's Gemfile:
|
14
|
-
|
15
|
-
```ruby
|
16
|
-
gem 'async-io'
|
17
|
-
```
|
18
|
-
|
19
|
-
And then execute:
|
20
|
-
|
21
|
-
$ bundle
|
22
|
-
|
23
|
-
Or install it yourself as:
|
24
|
-
|
25
|
-
$ gem install async-io
|
26
|
-
|
27
|
-
## Usage
|
28
|
-
|
29
|
-
Basic echo server (from `spec/async/io/echo_spec.rb`):
|
30
|
-
|
31
|
-
```ruby
|
32
|
-
require 'async/io'
|
33
|
-
|
34
|
-
def echo_server(endpoint)
|
35
|
-
Async do |task|
|
36
|
-
# This is a synchronous block within the current task:
|
37
|
-
endpoint.accept do |client|
|
38
|
-
# This is an asynchronous block within the current reactor:
|
39
|
-
data = client.read
|
40
|
-
|
41
|
-
# This produces out-of-order responses.
|
42
|
-
task.sleep(rand * 0.01)
|
43
|
-
|
44
|
-
client.write(data.reverse)
|
45
|
-
client.close_write
|
46
|
-
end
|
47
|
-
end
|
48
|
-
end
|
49
|
-
|
50
|
-
def echo_client(endpoint, data)
|
51
|
-
Async do |task|
|
52
|
-
endpoint.connect do |peer|
|
53
|
-
peer.write(data)
|
54
|
-
peer.close_write
|
55
|
-
|
56
|
-
message = peer.read
|
57
|
-
|
58
|
-
puts "Sent #{data}, got response: #{message}"
|
59
|
-
end
|
60
|
-
end
|
61
|
-
end
|
62
|
-
|
63
|
-
Async do
|
64
|
-
endpoint = Async::IO::Endpoint.tcp('0.0.0.0', 9000)
|
65
|
-
|
66
|
-
server = echo_server(endpoint)
|
67
|
-
|
68
|
-
5.times.collect do |i|
|
69
|
-
echo_client(endpoint, "Hello World #{i}")
|
70
|
-
end.each(&:wait)
|
71
|
-
|
72
|
-
server.stop
|
73
|
-
end
|
74
|
-
```
|
75
|
-
|
76
|
-
### Timeouts
|
77
|
-
|
78
|
-
Timeouts add a temporal limit to the execution of your code. If the IO doesn't respond in time, it will fail. Timeouts are high level concerns and you generally shouldn't use them except at the very highest level of your program.
|
79
|
-
|
80
|
-
```ruby
|
81
|
-
message = task.with_timeout(5) do
|
82
|
-
begin
|
83
|
-
peer.read
|
84
|
-
rescue Async::TimeoutError
|
85
|
-
nil # The timeout was triggered.
|
86
|
-
end
|
87
|
-
end
|
88
|
-
```
|
89
|
-
|
90
|
-
Any `yield` operation can cause a timeout to trigger. Non-`async` functions might not timeout because they are outside the scope of `async`.
|
91
|
-
|
92
|
-
#### Wrapper Timeouts
|
93
|
-
|
94
|
-
Asynchronous operations may block forever. You can assign a per-wrapper operation timeout duration. All asynchronous operations will be bounded by this timeout.
|
95
|
-
|
96
|
-
```ruby
|
97
|
-
peer.timeout = 1
|
98
|
-
peer.read # If this takes more than 1 second, Async::TimeoutError will be raised.
|
99
|
-
```
|
100
|
-
|
101
|
-
The benefit of this approach is that it applies to all operations. Typically, this would be configured by the user, and set to something pretty high, e.g. 120 seconds.
|
102
|
-
|
103
|
-
### Reading Characters
|
104
|
-
|
105
|
-
This example shows how to read one character at a time as the user presses it on the keyboard, and echos it back out as uppercase:
|
106
|
-
|
107
|
-
```ruby
|
108
|
-
require 'async'
|
109
|
-
require 'async/io/stream'
|
110
|
-
require 'io/console'
|
111
|
-
|
112
|
-
$stdin.raw!
|
113
|
-
$stdin.echo = false
|
114
|
-
|
115
|
-
Async do |task|
|
116
|
-
stdin = Async::IO::Stream.new(
|
117
|
-
Async::IO::Generic.new($stdin)
|
118
|
-
)
|
119
|
-
|
120
|
-
while character = stdin.read(1)
|
121
|
-
$stdout.write character.upcase
|
122
|
-
end
|
123
|
-
end
|
124
|
-
```
|
125
|
-
|
126
|
-
### Deferred Buffering
|
127
|
-
|
128
|
-
`Async::IO::Stream.new(..., deferred:true)` creates a deferred stream which increases latency slightly, but reduces the number of total packets sent. It does this by combining all calls `Stream#flush` within a single iteration of the reactor. This is typically more useful on the client side, but can also be useful on the server side when individual packets have high latency. It should be preferable to send one 100 byte packet than 10x 10 byte packets.
|
129
|
-
|
130
|
-
Servers typically only deal with one request per iteartion of the reactor so it's less useful. Clients which make multiple requests can benefit significantly e.g. HTTP/2 clients can merge many requests into a single packet. Because HTTP/2 recommends disabling Nagle's algorithm, this is often beneficial.
|
131
|
-
|
132
|
-
## Contributing
|
133
|
-
|
134
|
-
1. Fork it
|
135
|
-
2. Create your feature branch (`git checkout -b my-new-feature`)
|
136
|
-
3. Commit your changes (`git commit -am 'Add some feature'`)
|
137
|
-
4. Push to the branch (`git push origin my-new-feature`)
|
138
|
-
5. Create new Pull Request
|
139
|
-
|
140
|
-
## See Also
|
141
|
-
|
142
|
-
- [async](https://github.com/socketry/async) — Asynchronous event-driven reactor.
|
143
|
-
- [async-process](https://github.com/socketry/async-process) — Asynchronous process spawning/waiting.
|
144
|
-
- [async-websocket](https://github.com/socketry/async-websocket) — Asynchronous client and server websockets.
|
145
|
-
- [async-dns](https://github.com/socketry/async-dns) — Asynchronous DNS resolver and server.
|
146
|
-
- [async-rspec](https://github.com/socketry/async-rspec) — Shared contexts for running async specs.
|
147
|
-
- [rubydns](https://github.com/ioquatix/rubydns) — A easy to use Ruby DNS server.
|
148
|
-
|
149
|
-
## License
|
150
|
-
|
151
|
-
Released under the MIT license.
|
152
|
-
|
153
|
-
Copyright, 2017, by [Samuel G. D. Williams](http://www.codeotaku.com/samuel-williams).
|
154
|
-
|
155
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
156
|
-
of this software and associated documentation files (the "Software"), to deal
|
157
|
-
in the Software without restriction, including without limitation the rights
|
158
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
159
|
-
copies of the Software, and to permit persons to whom the Software is
|
160
|
-
furnished to do so, subject to the following conditions:
|
161
|
-
|
162
|
-
The above copyright notice and this permission notice shall be included in
|
163
|
-
all copies or substantial portions of the Software.
|
164
|
-
|
165
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
166
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
167
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
168
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
169
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
170
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
171
|
-
THE SOFTWARE.
|
data/async-io.gemspec
DELETED
@@ -1,30 +0,0 @@
|
|
1
|
-
|
2
|
-
require_relative 'lib/async/io/version'
|
3
|
-
|
4
|
-
Gem::Specification.new do |spec|
|
5
|
-
spec.name = "async-io"
|
6
|
-
spec.version = Async::IO::VERSION
|
7
|
-
spec.licenses = ["MIT"]
|
8
|
-
spec.authors = ["Samuel Williams"]
|
9
|
-
spec.email = ["samuel.williams@oriontransfer.co.nz"]
|
10
|
-
|
11
|
-
spec.summary = "Provides support for asynchonous TCP, UDP, UNIX and SSL sockets."
|
12
|
-
spec.homepage = "https://github.com/socketry/async-io"
|
13
|
-
|
14
|
-
spec.files = `git ls-files`.split($/)
|
15
|
-
spec.executables = spec.files.grep(%r{^bin/}).map{|f| File.basename(f)}
|
16
|
-
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
17
|
-
spec.require_paths = ["lib"]
|
18
|
-
|
19
|
-
spec.add_dependency "async", "~> 1.14"
|
20
|
-
spec.add_development_dependency "async-rspec", "~> 1.10"
|
21
|
-
|
22
|
-
spec.required_ruby_version = '~> 2.5'
|
23
|
-
|
24
|
-
spec.add_development_dependency "async-container", "~> 0.15"
|
25
|
-
|
26
|
-
spec.add_development_dependency "covered"
|
27
|
-
spec.add_development_dependency "bundler"
|
28
|
-
spec.add_development_dependency "bake-bundler"
|
29
|
-
spec.add_development_dependency "rspec", "~> 3.0"
|
30
|
-
end
|
@@ -1,29 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
# frozen_string_literal: true
|
3
|
-
|
4
|
-
require_relative 'memory'
|
5
|
-
|
6
|
-
string = nil
|
7
|
-
|
8
|
-
measure_memory("Initial allocation") do
|
9
|
-
string = "a" * 5*1024*1024
|
10
|
-
string.freeze
|
11
|
-
end # => 5.0 MB
|
12
|
-
|
13
|
-
measure_memory("Byteslice from start to middle") do
|
14
|
-
# Why does this need to allocate memory? Surely it can share the original allocation?
|
15
|
-
x = string.byteslice(0, string.bytesize / 2)
|
16
|
-
end # => 2.5 MB
|
17
|
-
|
18
|
-
measure_memory("Byteslice from middle to end") do
|
19
|
-
string.byteslice(string.bytesize / 2, string.bytesize)
|
20
|
-
end # => 0.0 MB
|
21
|
-
|
22
|
-
measure_memory("Slice! from start to middle") do
|
23
|
-
string.dup.slice!(0, string.bytesize / 2)
|
24
|
-
end # => 7.5 MB
|
25
|
-
|
26
|
-
measure_memory("Byte slice into two halves") do
|
27
|
-
head = string.byteslice(0, string.bytesize / 2) # 2.5 MB
|
28
|
-
remainder = string.byteslice(string.bytesize / 2, string.bytesize) # Shared
|
29
|
-
end # 2.5 MB
|
@@ -1,16 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
def measure_memory(annotation = "Memory allocated")
|
4
|
-
GC.disable
|
5
|
-
|
6
|
-
start_memory = `ps -p #{Process::pid} -o rss`.split("\n")[1].chomp.to_i
|
7
|
-
|
8
|
-
yield
|
9
|
-
|
10
|
-
ensure
|
11
|
-
end_memory = `ps -p #{Process::pid} -o rss`.split("\n")[1].chomp.to_i
|
12
|
-
memory_usage = (end_memory - start_memory).to_f / 1024
|
13
|
-
|
14
|
-
puts "#{memory_usage.round(1)} MB: #{annotation}"
|
15
|
-
GC.enable
|
16
|
-
end
|
@@ -1,18 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
# frozen_string_literal: true
|
3
|
-
|
4
|
-
require_relative 'memory'
|
5
|
-
|
6
|
-
require_relative "../../lib/async/io/stream"
|
7
|
-
require "stringio"
|
8
|
-
|
9
|
-
measure_memory("Stream setup") do
|
10
|
-
@io = StringIO.new("a" * (50*1024*1024))
|
11
|
-
@stream = Async::IO::Stream.new(@io)
|
12
|
-
end # 50.0 MB
|
13
|
-
|
14
|
-
measure_memory("Read all chunks") do
|
15
|
-
while chunk = @stream.read_partial
|
16
|
-
chunk.clear
|
17
|
-
end
|
18
|
-
end # 0.5 MB
|
data/examples/chat/client.rb
DELETED
@@ -1,58 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
# frozen_string_literal: true
|
3
|
-
|
4
|
-
$LOAD_PATH << File.expand_path("../../lib", __dir__)
|
5
|
-
|
6
|
-
require 'async'
|
7
|
-
require 'async/notification'
|
8
|
-
require 'async/io/stream'
|
9
|
-
require 'async/io/host_endpoint'
|
10
|
-
require 'async/io/protocol/line'
|
11
|
-
|
12
|
-
class User < Async::IO::Protocol::Line
|
13
|
-
end
|
14
|
-
|
15
|
-
endpoint = Async::IO::Endpoint.parse(ARGV.pop || "tcp://localhost:7138")
|
16
|
-
|
17
|
-
input = Async::IO::Protocol::Line.new(
|
18
|
-
Async::IO::Stream.new(
|
19
|
-
Async::IO::Generic.new($stdin)
|
20
|
-
)
|
21
|
-
)
|
22
|
-
|
23
|
-
Async do |task|
|
24
|
-
socket = endpoint.connect
|
25
|
-
stream = Async::IO::Stream.new(socket)
|
26
|
-
user = User.new(stream)
|
27
|
-
|
28
|
-
# This is used to track whether either reading from stdin failed or reading from network failed.
|
29
|
-
finished = Async::Notification.new
|
30
|
-
|
31
|
-
# Read lines from stdin and write to network.
|
32
|
-
terminal = task.async do
|
33
|
-
while line = input.read_line
|
34
|
-
user.write_lines line
|
35
|
-
end
|
36
|
-
rescue EOFError
|
37
|
-
# It's okay, we are disconnecting, because stdin has closed.
|
38
|
-
ensure
|
39
|
-
finished.signal
|
40
|
-
end
|
41
|
-
|
42
|
-
# Read lines from network and write to stdout.
|
43
|
-
network = task.async do
|
44
|
-
while line = user.read_line
|
45
|
-
puts line
|
46
|
-
end
|
47
|
-
ensure
|
48
|
-
finished.signal
|
49
|
-
end
|
50
|
-
|
51
|
-
# Wait for any of the above processes to finish:
|
52
|
-
finished.wait
|
53
|
-
ensure
|
54
|
-
# Stop all the nested tasks if we are exiting:
|
55
|
-
network.stop if network
|
56
|
-
terminal.stop if terminal
|
57
|
-
user.close if user
|
58
|
-
end
|
data/examples/chat/server.rb
DELETED
@@ -1,83 +0,0 @@
|
|
1
|
-
#!/usr/bin/env ruby
|
2
|
-
# frozen_string_literal: true
|
3
|
-
|
4
|
-
$LOAD_PATH << File.expand_path("../../lib", __dir__)
|
5
|
-
|
6
|
-
require 'set'
|
7
|
-
|
8
|
-
require 'async'
|
9
|
-
require 'async/io/host_endpoint'
|
10
|
-
require 'async/io/protocol/line'
|
11
|
-
|
12
|
-
class User < Async::IO::Protocol::Line
|
13
|
-
attr_accessor :name
|
14
|
-
|
15
|
-
def login!
|
16
|
-
self.write_lines "Tell me your name, traveller:"
|
17
|
-
self.name = self.read_line
|
18
|
-
end
|
19
|
-
|
20
|
-
def to_s
|
21
|
-
@name || "unknown"
|
22
|
-
end
|
23
|
-
end
|
24
|
-
|
25
|
-
class Server
|
26
|
-
def initialize
|
27
|
-
@users = Set.new
|
28
|
-
end
|
29
|
-
|
30
|
-
def broadcast(*message)
|
31
|
-
puts *message
|
32
|
-
|
33
|
-
@users.each do |user|
|
34
|
-
begin
|
35
|
-
user.write_lines(*message)
|
36
|
-
rescue EOFError
|
37
|
-
# In theory, it's possible this will fail if the remote end has disconnected. Each user has it's own task running `#connected`, and eventually `user.read_line` will fail. When it does, the disconnection logic will be invoked. A better way to do this would be to have a message queue, but for the sake of keeping this example simple, this is by far the better option.
|
38
|
-
end
|
39
|
-
end
|
40
|
-
end
|
41
|
-
|
42
|
-
def connected(user)
|
43
|
-
user.login!
|
44
|
-
|
45
|
-
broadcast("#{user} has joined")
|
46
|
-
|
47
|
-
user.write_lines("currently connected: #{@users.map(&:to_s).join(', ')}")
|
48
|
-
|
49
|
-
while message = user.read_line
|
50
|
-
broadcast("#{user.name}: #{message}")
|
51
|
-
end
|
52
|
-
rescue EOFError
|
53
|
-
# It's okay, client has disconnected.
|
54
|
-
ensure
|
55
|
-
disconnected(user)
|
56
|
-
end
|
57
|
-
|
58
|
-
def disconnected(user, reason = "quit")
|
59
|
-
@users.delete(user)
|
60
|
-
|
61
|
-
broadcast("#{user} has disconnected: #{reason}")
|
62
|
-
end
|
63
|
-
|
64
|
-
def run(endpoint)
|
65
|
-
Async do |task|
|
66
|
-
endpoint.accept do |peer|
|
67
|
-
stream = Async::IO::Stream.new(peer)
|
68
|
-
user = User.new(stream)
|
69
|
-
|
70
|
-
@users << user
|
71
|
-
|
72
|
-
connected(user)
|
73
|
-
end
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
Async.logger.level = Logger::INFO
|
79
|
-
Async.logger.info("Starting server...")
|
80
|
-
server = Server.new
|
81
|
-
|
82
|
-
endpoint = Async::IO::Endpoint.parse(ARGV.pop || "tcp://localhost:7138")
|
83
|
-
server.run(endpoint)
|