ld-eventsource 1.0.3 → 2.0.0.pre.beta.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8af7db395f6a4a500ea56ff3f742aab25b8ca416
4
- data.tar.gz: ee59c90efdf456e277a6f6ea7209a62941dbe96b
3
+ metadata.gz: abed7a7f61d7f731895a05d722273996e683fab0
4
+ data.tar.gz: aa41fa22d28b66bd6bb6585f4030b06cb3b0dc05
5
5
  SHA512:
6
- metadata.gz: 3c1c5e63d0f8bcff6c27a60f9a8cb04f5d1ffc3e4128bc5d9822f19663934b70ab3f90725ba747b4e13cc77c154c8d23605ed677c392dab980fd56c3d87d8cef
7
- data.tar.gz: eec5bfce676d858bca4d8c687c043d45f2e200d3fb8819d693e81812e5504f597fcbed9bbe011d29f4aba967ee17de1e1302b3b673fbb5d2b9af03ba6558a823
6
+ metadata.gz: 6597dfb8e3c7b1d4103ea18c44bb7d83a598b33565f79403fbec790d361c97ad1988b11027229e20cab4ec9e3895ad47faee37949eb1829a479d15b941f5d3e0
7
+ data.tar.gz: 910c3a810cc7534d316910dc06b95d311a3d22a659956a68d78c31a9f78b3210e9f019e94851e71b15e50271c74e1257735e1f57097d9c52f209acb82d837ac9
@@ -4,11 +4,10 @@ workflows:
4
4
  version: 2
5
5
  test:
6
6
  jobs:
7
- - test-misc-rubies
8
- - test-2.2
9
- - test-2.3
10
7
  - test-2.4
11
8
  - test-2.5
9
+ - test-2.6
10
+ - test-2.7
12
11
  - test-jruby-9.2
13
12
 
14
13
  ruby-docker-template: &ruby-docker-template
@@ -18,6 +17,7 @@ ruby-docker-template: &ruby-docker-template
18
17
  if [[ $CIRCLE_JOB == test-jruby* ]]; then
19
18
  gem install jruby-openssl; # required by bundler, no effect on Ruby MRI
20
19
  fi
20
+ - run: sudo apt-get update -y && sudo apt-get install -y build-essential
21
21
  - run: ruby -v
22
22
  - run: gem install bundler -v "~> 1.17"
23
23
  - run: bundle install
@@ -29,64 +29,23 @@ ruby-docker-template: &ruby-docker-template
29
29
  path: ./rspec
30
30
 
31
31
  jobs:
32
- test-2.2:
32
+ test-2.4:
33
33
  <<: *ruby-docker-template
34
34
  docker:
35
- - image: circleci/ruby:2.2.10-jessie
36
- test-2.3:
35
+ - image: circleci/ruby:2.4
36
+ test-2.5:
37
37
  <<: *ruby-docker-template
38
38
  docker:
39
- - image: circleci/ruby:2.3.7-jessie
40
- test-2.4:
39
+ - image: circleci/ruby:2.5
40
+ test-2.6:
41
41
  <<: *ruby-docker-template
42
42
  docker:
43
- - image: circleci/ruby:2.4.5-stretch
44
- test-2.5:
43
+ - image: circleci/ruby:2.6
44
+ test-2.7:
45
45
  <<: *ruby-docker-template
46
46
  docker:
47
- - image: circleci/ruby:2.5.3-stretch
47
+ - image: circleci/ruby:2.7
48
48
  test-jruby-9.2:
49
49
  <<: *ruby-docker-template
50
50
  docker:
51
- - image: circleci/jruby:9-jdk
52
-
53
- # The following very slow job uses an Ubuntu container to run the Ruby versions that
54
- # CircleCI doesn't provide Docker images for.
55
- test-misc-rubies:
56
- machine:
57
- image: circleci/classic:latest
58
- environment:
59
- - RUBIES: "jruby-9.1.17.0"
60
- steps:
61
- - checkout
62
- - run:
63
- name: install all Ruby versions
64
- command: "parallel rvm install ::: $RUBIES"
65
- - run:
66
- name: bundle install for all versions
67
- shell: /bin/bash -leo pipefail # need -l in order for "rvm use" to work
68
- command: |
69
- set -e;
70
- for i in $RUBIES;
71
- do
72
- rvm use $i;
73
- if [[ $i == jruby* ]]; then
74
- gem install jruby-openssl; # required by bundler, no effect on Ruby MRI
75
- fi;
76
- # bundler 2.0 may be preinstalled, we need to remove it if so
77
- yes | gem uninstall bundler --version '>=2.0' || true;
78
- gem install bundler -v "~> 1.17";
79
- bundle install;
80
- mv Gemfile.lock "Gemfile.lock.$i"
81
- done
82
- - run:
83
- name: run tests for all versions
84
- shell: /bin/bash -leo pipefail
85
- command: |
86
- set -e;
87
- for i in $RUBIES;
88
- do
89
- rvm use $i;
90
- cp "Gemfile.lock.$i" Gemfile.lock;
91
- bundle exec rspec spec;
92
- done
51
+ - image: circleci/jruby:9.2-jdk
data/.gitignore CHANGED
@@ -13,3 +13,4 @@
13
13
  mkmf.log
14
14
  *.gem
15
15
  .DS_Store
16
+ rspec
@@ -1,3 +1,5 @@
1
+ repo:
2
+ public: ruby-eventsource
1
3
 
2
4
  publications:
3
5
  - url: https://rubygems.org/gems/ld-eventsource
@@ -2,6 +2,10 @@
2
2
 
3
3
  All notable changes to the LaunchDarkly SSE Client for Ruby will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org).
4
4
 
5
+ ## [1.0.3] - 2020-03-17
6
+ ### Fixed:
7
+ - The backoff delay logic for reconnecting after a stream failure was broken so that if a failure occurred after a stream had been active for at least `reconnect_reset_interval` (default 60 seconds), retries would use _no_ delay, potentially causing a flood of requests and a spike in CPU usage.
8
+
5
9
  ## [1.0.2] - 2020-03-10
6
10
  ### Removed:
7
11
  - Removed an unused dependency on `rake`. There are no other changes in this release.
data/Gemfile CHANGED
@@ -1,3 +1,3 @@
1
1
  source "https://rubygems.org"
2
2
 
3
- gemspec
3
+ gemspec
@@ -1,19 +1,36 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ld-eventsource (1.0.3)
4
+ ld-eventsource (2.0.0.pre.beta.1)
5
5
  concurrent-ruby (~> 1.0)
6
- http_tools (~> 0.4.5)
7
- socketry (~> 0.5.1)
6
+ http (~> 4.4.1)
8
7
 
9
8
  GEM
10
9
  remote: https://rubygems.org/
11
10
  specs:
12
- concurrent-ruby (1.1.6)
11
+ addressable (2.7.0)
12
+ public_suffix (>= 2.0.2, < 5.0)
13
+ concurrent-ruby (1.1.8)
13
14
  diff-lcs (1.3)
14
- hitimes (1.3.1)
15
- hitimes (1.3.1-java)
16
- http_tools (0.4.5)
15
+ domain_name (0.5.20190701)
16
+ unf (>= 0.0.5, < 1.0.0)
17
+ ffi (1.14.2)
18
+ ffi (1.14.2-java)
19
+ ffi-compiler (1.0.1)
20
+ ffi (>= 1.0.0)
21
+ rake
22
+ http (4.4.1)
23
+ addressable (~> 2.3)
24
+ http-cookie (~> 1.0)
25
+ http-form_data (~> 2.2)
26
+ http-parser (~> 1.2.0)
27
+ http-cookie (1.0.3)
28
+ domain_name (~> 0.5)
29
+ http-form_data (2.3.0)
30
+ http-parser (1.2.3)
31
+ ffi-compiler (>= 1.0, < 2.0)
32
+ public_suffix (4.0.6)
33
+ rake (13.0.3)
17
34
  rspec (3.7.0)
18
35
  rspec-core (~> 3.7.0)
19
36
  rspec-expectations (~> 3.7.0)
@@ -29,8 +46,10 @@ GEM
29
46
  rspec-support (3.7.0)
30
47
  rspec_junit_formatter (0.3.0)
31
48
  rspec-core (>= 2, < 4, != 2.12.0)
32
- socketry (0.5.1)
33
- hitimes (~> 1.2)
49
+ unf (0.1.4)
50
+ unf_ext
51
+ unf (0.1.4-java)
52
+ unf_ext (0.0.7.7)
34
53
 
35
54
  PLATFORMS
36
55
  java
data/README.md CHANGED
@@ -3,14 +3,14 @@ LaunchDarkly SSE Client for Ruby
3
3
 
4
4
  [![Gem Version](https://badge.fury.io/rb/ld-eventsource.svg)](http://badge.fury.io/rb/ld-eventsource) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-eventsource/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-eventsource/tree/master)
5
5
 
6
- A client for the [Server-Sent Events](https://www.w3.org/TR/eventsource/) protocol. This implementation runs on a worker thread, and uses the [`socketry`](https://rubygems.org/gems/socketry) gem to manage a persistent connection. Its primary purpose is to support the [LaunchDarkly SDK for Ruby](https://github.com/launchdarkly/ruby-client), but it can be used independently.
6
+ A client for the [Server-Sent Events](https://www.w3.org/TR/eventsource/) protocol. This implementation runs on a worker thread, and uses the [`http`](https://rubygems.org/gems/http) gem to manage a persistent connection. Its primary purpose is to support the [LaunchDarkly SDK for Ruby](https://github.com/launchdarkly/ruby-client), but it can be used independently.
7
7
 
8
8
  Parts of this code are based on https://github.com/Tonkpils/celluloid-eventsource, but it does not use Celluloid.
9
9
 
10
10
  Supported Ruby versions
11
11
  -----------------------
12
12
 
13
- This gem has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby.
13
+ This gem has a minimum Ruby version of 2.4, or 9.2 for JRuby.
14
14
 
15
15
  Quick setup
16
16
  -----------
@@ -25,6 +25,5 @@ Gem::Specification.new do |spec|
25
25
  spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0"
26
26
 
27
27
  spec.add_runtime_dependency "concurrent-ruby", "~> 1.0"
28
- spec.add_runtime_dependency "http_tools", '~> 0.4.5'
29
- spec.add_runtime_dependency "socketry", "~> 0.5.1"
28
+ spec.add_runtime_dependency "http", "~> 4.4.1"
30
29
  end
@@ -1,6 +1,5 @@
1
1
  require "ld-eventsource/impl/backoff"
2
2
  require "ld-eventsource/impl/event_parser"
3
- require "ld-eventsource/impl/streaming_http"
4
3
  require "ld-eventsource/events"
5
4
  require "ld-eventsource/errors"
6
5
 
@@ -8,6 +7,7 @@ require "concurrent/atomics"
8
7
  require "logger"
9
8
  require "thread"
10
9
  require "uri"
10
+ require "http"
11
11
 
12
12
  module SSE
13
13
  #
@@ -80,6 +80,9 @@ module SSE
80
80
  # proxy with the `HTTP_PROXY` or `HTTPS_PROXY` environment variable)
81
81
  # @param logger [Logger] a Logger instance for the client to use for diagnostic output;
82
82
  # defaults to a logger with WARN level that goes to standard output
83
+ # @param socket_factory [#open] (nil) an optional factory object for creating sockets,
84
+ # if you want to use something other than the default `TCPSocket`; it must implement
85
+ # `open(uri, timeout)` to return a connected `Socket`
83
86
  # @yieldparam [Client] client the new client instance, before opening the connection
84
87
  #
85
88
  def initialize(uri,
@@ -90,7 +93,8 @@ module SSE
90
93
  reconnect_reset_interval: DEFAULT_RECONNECT_RESET_INTERVAL,
91
94
  last_event_id: nil,
92
95
  proxy: nil,
93
- logger: nil)
96
+ logger: nil,
97
+ socket_factory: nil)
94
98
  @uri = URI(uri)
95
99
  @stopped = Concurrent::AtomicBoolean.new(false)
96
100
 
@@ -98,7 +102,11 @@ module SSE
98
102
  @connect_timeout = connect_timeout
99
103
  @read_timeout = read_timeout
100
104
  @logger = logger || default_logger
101
-
105
+ http_client_options = {}
106
+ if socket_factory
107
+ http_client_options["socket_class"] = socket_factory
108
+ end
109
+
102
110
  if proxy
103
111
  @proxy = proxy
104
112
  else
@@ -108,6 +116,21 @@ module SSE
108
116
  end
109
117
  end
110
118
 
119
+ if @proxy
120
+ http_client_options["proxy"] = {
121
+ :proxy_address => @proxy.host,
122
+ :proxy_port => @proxy.port
123
+ }
124
+ end
125
+
126
+ @http_client = HTTP::Client.new(http_client_options)
127
+ .timeout({
128
+ read: read_timeout,
129
+ connect: connect_timeout
130
+ })
131
+ @buffer = ""
132
+ @lock = Mutex.new
133
+
111
134
  @backoff = Impl::Backoff.new(reconnect_time || DEFAULT_RECONNECT_TIME, MAX_RECONNECT_TIME,
112
135
  reconnect_reset_interval: reconnect_reset_interval)
113
136
 
@@ -163,12 +186,56 @@ module SSE
163
186
  #
164
187
  def close
165
188
  if @stopped.make_true
166
- @cxn.close if !@cxn.nil?
167
- @cxn = nil
189
+ reset_http
168
190
  end
169
191
  end
170
192
 
171
193
  private
194
+
195
+ def reset_http
196
+ @http_client.close if !@http_client.nil?
197
+ @cxn = nil
198
+ @buffer = ""
199
+ end
200
+
201
+ def read_lines
202
+ Enumerator.new do |gen|
203
+ loop do
204
+ line = read_line
205
+ break if line.nil?
206
+ gen.yield line
207
+ end
208
+ end
209
+ end
210
+
211
+ def read_line
212
+ loop do
213
+ @lock.synchronize do
214
+ i = @buffer.index(/[\r\n]/)
215
+ if !i.nil? && !(i == @buffer.length - 1 && @buffer[i] == "\r")
216
+ i += 1 if (@buffer[i] == "\r" && @buffer[i + 1] == "\n")
217
+ return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8)
218
+ end
219
+ end
220
+ return nil if !read_chunk_into_buffer
221
+ end
222
+ end
223
+
224
+ def read_chunk_into_buffer
225
+ # If @done is set, it means the Parser has signaled end of response body
226
+ @lock.synchronize { return false if @done }
227
+ begin
228
+ data = @cxn.readpartial
229
+ rescue HTTP::TimeoutError
230
+ # We rethrow this as our own type so the caller doesn't have to know the httprb API
231
+ raise Errors::ReadTimeoutError.new(@read_timeout)
232
+ end
233
+ return false if data == nil
234
+ @buffer << data
235
+ # We are piping the content through the parser so that it can handle things like chunked
236
+ # encoding for us. The content ends up being appended to @buffer via our callback.
237
+ true
238
+ end
172
239
 
173
240
  def default_logger
174
241
  log = ::Logger.new($stdout)
@@ -196,7 +263,7 @@ module SSE
196
263
  end
197
264
  end
198
265
  begin
199
- @cxn.close if !@cxn.nil?
266
+ reset_http
200
267
  rescue StandardError => e
201
268
  log_and_dispatch_error(e, "Unexpected error while closing stream")
202
269
  end
@@ -215,31 +282,28 @@ module SSE
215
282
  cxn = nil
216
283
  begin
217
284
  @logger.info { "Connecting to event stream at #{@uri}" }
218
- cxn = Impl::StreamingHTTPConnection.new(@uri,
219
- proxy: @proxy,
220
- headers: build_headers,
221
- connect_timeout: @connect_timeout,
222
- read_timeout: @read_timeout
223
- )
224
- if cxn.status == 200
285
+ cxn = @http_client.request("GET", @uri, {
286
+ headers: build_headers
287
+ })
288
+ if cxn.status.code == 200
225
289
  content_type = cxn.headers["content-type"]
226
290
  if content_type && content_type.start_with?("text/event-stream")
227
291
  return cxn # we're good to proceed
228
292
  else
229
- cxn.close
293
+ reset_http
230
294
  err = Errors::HTTPContentTypeError.new(cxn.headers["content-type"])
231
295
  @on[:error].call(err)
232
296
  @logger.warn { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" }
233
297
  end
234
298
  else
235
- body = cxn.read_all # grab the whole response body in case it has error details
236
- cxn.close
237
- @logger.info { "Server returned error status #{cxn.status}" }
238
- err = Errors::HTTPStatusError.new(cxn.status, body)
299
+ body = cxn.to_s # grab the whole response body in case it has error details
300
+ reset_http
301
+ @logger.info { "Server returned error status #{cxn.status.code}" }
302
+ err = Errors::HTTPStatusError.new(cxn.status.code, body)
239
303
  @on[:error].call(err)
240
304
  end
241
305
  rescue
242
- cxn.close if !cxn.nil?
306
+ reset_http
243
307
  raise # will be handled in run_stream
244
308
  end
245
309
  # if unsuccessful, continue the loop to connect again
@@ -253,7 +317,7 @@ module SSE
253
317
  # it can automatically reset itself if enough time passes between failures.
254
318
  @backoff.mark_success
255
319
 
256
- event_parser = Impl::EventParser.new(cxn.read_lines)
320
+ event_parser = Impl::EventParser.new(read_lines)
257
321
  event_parser.items.each do |item|
258
322
  return if @stopped.value
259
323
  case item
@@ -288,7 +352,8 @@ module SSE
288
352
  def build_headers
289
353
  h = {
290
354
  'Accept' => 'text/event-stream',
291
- 'Cache-Control' => 'no-cache'
355
+ 'Cache-Control' => 'no-cache',
356
+ 'User-Agent' => 'ruby-eventsource'
292
357
  }
293
358
  h['Last-Event-Id'] = @last_id if !@last_id.nil?
294
359
  h.merge(@headers)
@@ -1,3 +1,3 @@
1
1
  module SSE
2
- VERSION = "1.0.3"
2
+ VERSION = "2.0.0-beta.1"
3
3
  end
@@ -1,5 +1,7 @@
1
1
  require "ld-eventsource"
2
2
 
3
+ require "http_stub"
4
+
3
5
  module SSE
4
6
  module Impl
5
7
  describe Backoff do
@@ -1,6 +1,4 @@
1
1
  require "ld-eventsource"
2
- require "socketry"
3
- require "http_stub"
4
2
 
5
3
  #
6
4
  # End-to-end tests of the SSE client against a real server
@@ -62,8 +60,11 @@ EOT
62
60
  expect(received_req.header).to eq({
63
61
  "accept" => ["text/event-stream"],
64
62
  "cache-control" => ["no-cache"],
65
- "host" => ["127.0.0.1"],
66
- "authorization" => ["secret"]
63
+ "host" => ["127.0.0.1:" + server.port.to_s],
64
+ "authorization" => ["secret"],
65
+ "user-agent" => ["ruby-eventsource"],
66
+ "content-length" => ["0"],
67
+ "connection" => ["close"]
67
68
  })
68
69
  end
69
70
  end
@@ -85,9 +86,12 @@ EOT
85
86
  expect(received_req.header).to eq({
86
87
  "accept" => ["text/event-stream"],
87
88
  "cache-control" => ["no-cache"],
88
- "host" => ["127.0.0.1"],
89
+ "host" => ["127.0.0.1:" + server.port.to_s],
89
90
  "authorization" => ["secret"],
90
- "last-event-id" => [id]
91
+ "last-event-id" => [id],
92
+ "user-agent" => ["ruby-eventsource"],
93
+ "content-length" => ["0"],
94
+ "connection" => ["close"]
91
95
  })
92
96
  end
93
97
  end
@@ -366,4 +370,60 @@ EOT
366
370
  end
367
371
  end
368
372
  end
373
+
374
+ it "connects to HTTP server through proxy" do
375
+ events_body = simple_event_1_text
376
+ with_server do |server|
377
+ server.setup_response("/") do |req,res|
378
+ send_stream_content(res, events_body, keep_open: false)
379
+ end
380
+ with_server(StubProxyServer.new) do |proxy|
381
+ event_sink = Queue.new
382
+ client = subject.new(server.base_uri, proxy: proxy.base_uri) do |c|
383
+ c.on_event { |event| event_sink << event }
384
+ end
385
+
386
+ with_client(client) do |client|
387
+ expect(event_sink.pop).to eq(simple_event_1)
388
+ expect(proxy.request_count).to eq(1)
389
+ end
390
+ end
391
+ end
392
+ end
393
+
394
+ it "resets read timeout between events" do
395
+ event_body = simple_event_1_text
396
+ with_server do |server|
397
+ attempt = 0
398
+ server.setup_response("/") do |req,res|
399
+ attempt += 1
400
+ if attempt == 1
401
+ stream = send_stream_content(res, event_body, keep_open: true)
402
+ Thread.new do
403
+ 2.times {
404
+ # write within timeout interval
405
+ sleep(0.75)
406
+ stream.write(event_body)
407
+ }
408
+ # cause timeout
409
+ sleep(1.25)
410
+ end
411
+ elsif attempt == 2
412
+ send_stream_content(res, event_body, keep_open: false)
413
+ end
414
+ end
415
+
416
+ event_sink = Queue.new
417
+ client = subject.new(server.base_uri, reconnect_time: reconnect_asap, read_timeout: 1) do |c|
418
+ c.on_event { |event| event_sink << event }
419
+ end
420
+
421
+ with_client(client) do |client|
422
+ 4.times {
423
+ expect(event_sink.pop).to eq(simple_event_1)
424
+ }
425
+ expect(attempt).to eq 2
426
+ end
427
+ end
428
+ end
369
429
  end
@@ -3,6 +3,8 @@ require "webrick/httpproxy"
3
3
  require "webrick/https"
4
4
 
5
5
  class StubHTTPServer
6
+ attr_reader :port
7
+
6
8
  def initialize
7
9
  @port = 50000
8
10
  begin
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ld-eventsource
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 2.0.0.pre.beta.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - LaunchDarkly
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-03-18 00:00:00.000000000 Z
11
+ date: 2021-01-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -67,33 +67,19 @@ dependencies:
67
67
  - !ruby/object:Gem::Version
68
68
  version: '1.0'
69
69
  - !ruby/object:Gem::Dependency
70
- name: http_tools
70
+ name: http
71
71
  requirement: !ruby/object:Gem::Requirement
72
72
  requirements:
73
73
  - - "~>"
74
74
  - !ruby/object:Gem::Version
75
- version: 0.4.5
75
+ version: 4.4.1
76
76
  type: :runtime
77
77
  prerelease: false
78
78
  version_requirements: !ruby/object:Gem::Requirement
79
79
  requirements:
80
80
  - - "~>"
81
81
  - !ruby/object:Gem::Version
82
- version: 0.4.5
83
- - !ruby/object:Gem::Dependency
84
- name: socketry
85
- requirement: !ruby/object:Gem::Requirement
86
- requirements:
87
- - - "~>"
88
- - !ruby/object:Gem::Version
89
- version: 0.5.1
90
- type: :runtime
91
- prerelease: false
92
- version_requirements: !ruby/object:Gem::Requirement
93
- requirements:
94
- - - "~>"
95
- - !ruby/object:Gem::Version
96
- version: 0.5.1
82
+ version: 4.4.1
97
83
  description: LaunchDarkly SSE client for Ruby
98
84
  email:
99
85
  - team@launchdarkly.com
@@ -116,7 +102,6 @@ files:
116
102
  - lib/ld-eventsource/events.rb
117
103
  - lib/ld-eventsource/impl/backoff.rb
118
104
  - lib/ld-eventsource/impl/event_parser.rb
119
- - lib/ld-eventsource/impl/streaming_http.rb
120
105
  - lib/ld-eventsource/version.rb
121
106
  - scripts/gendocs.sh
122
107
  - scripts/release.sh
@@ -124,7 +109,6 @@ files:
124
109
  - spec/client_spec.rb
125
110
  - spec/event_parser_spec.rb
126
111
  - spec/http_stub.rb
127
- - spec/streaming_http_spec.rb
128
112
  homepage: https://github.com/launchdarkly/ruby-eventsource
129
113
  licenses:
130
114
  - Apache-2.0
@@ -140,9 +124,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
140
124
  version: '0'
141
125
  required_rubygems_version: !ruby/object:Gem::Requirement
142
126
  requirements:
143
- - - ">="
127
+ - - ">"
144
128
  - !ruby/object:Gem::Version
145
- version: '0'
129
+ version: 1.3.1
146
130
  requirements: []
147
131
  rubyforge_project:
148
132
  rubygems_version: 2.5.2.3
@@ -154,4 +138,3 @@ test_files:
154
138
  - spec/client_spec.rb
155
139
  - spec/event_parser_spec.rb
156
140
  - spec/http_stub.rb
157
- - spec/streaming_http_spec.rb
@@ -1,222 +0,0 @@
1
- require "ld-eventsource/errors"
2
-
3
- require "concurrent/atomics"
4
- require "http_tools"
5
- require "socketry"
6
-
7
- module SSE
8
- module Impl
9
- #
10
- # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming.
11
- # The socket is created and managed by Socketry, which we use so that we can have a read timeout.
12
- #
13
- class StreamingHTTPConnection
14
- attr_reader :status, :headers
15
-
16
- #
17
- # Opens a new connection.
18
- #
19
- # @param [String] uri the URI to connect o
20
- # @param [String] proxy the proxy server URI, if any
21
- # @param [Hash] headers request headers
22
- # @param [Float] connect_timeout connection timeout
23
- # @param [Float] read_timeout read timeout
24
- #
25
- def initialize(uri, proxy: nil, headers: {}, connect_timeout: nil, read_timeout: nil)
26
- @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout)
27
- @socket.write(build_request(uri, headers))
28
- @reader = HTTPResponseReader.new(@socket, read_timeout)
29
- @status = @reader.status
30
- @headers = @reader.headers
31
- @closed = Concurrent::AtomicBoolean.new(false)
32
- end
33
-
34
- #
35
- # Closes the connection.
36
- #
37
- def close
38
- if @closed.make_true
39
- @socket.close if @socket
40
- @socket = nil
41
- end
42
- end
43
-
44
- #
45
- # Generator that returns one line of the response body at a time (delimited by \r, \n,
46
- # or \r\n) until the response is fully consumed or the socket is closed.
47
- #
48
- def read_lines
49
- @reader.read_lines
50
- end
51
-
52
- #
53
- # Consumes the entire response body and returns it.
54
- #
55
- # @return [String] the response body
56
- #
57
- def read_all
58
- @reader.read_all
59
- end
60
-
61
- private
62
-
63
- # Build an HTTP request line and headers.
64
- def build_request(uri, headers)
65
- ret = "GET #{uri.request_uri} HTTP/1.1\r\n"
66
- ret << "Host: #{uri.host}\r\n"
67
- headers.each { |k, v|
68
- ret << "#{k}: #{v}\r\n"
69
- }
70
- ret + "\r\n"
71
- end
72
- end
73
-
74
- #
75
- # Used internally to send the HTTP request, including the proxy dialogue if necessary.
76
- # @private
77
- #
78
- class HTTPConnectionFactory
79
- def self.connect(uri, proxy, connect_timeout, read_timeout)
80
- if !proxy
81
- return open_socket(uri, connect_timeout)
82
- end
83
-
84
- socket = open_socket(proxy, connect_timeout)
85
- socket.write(build_proxy_request(uri, proxy))
86
-
87
- # temporarily create a reader just for the proxy connect response
88
- proxy_reader = HTTPResponseReader.new(socket, read_timeout)
89
- if proxy_reader.status != 200
90
- raise Errors::HTTPProxyError.new(proxy_reader.status)
91
- end
92
-
93
- # start using TLS at this point if appropriate
94
- if uri.scheme.downcase == 'https'
95
- wrap_socket_in_ssl_socket(socket)
96
- else
97
- socket
98
- end
99
- end
100
-
101
- private
102
-
103
- def self.open_socket(uri, connect_timeout)
104
- if uri.scheme.downcase == 'https'
105
- Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
106
- else
107
- Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
108
- end
109
- end
110
-
111
- # Build a proxy connection header.
112
- def self.build_proxy_request(uri, proxy)
113
- ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n"
114
- ret << "Host: #{uri.host}:#{uri.port}\r\n"
115
- if proxy.user || proxy.password
116
- encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":"))
117
- ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n"
118
- end
119
- ret << "\r\n"
120
- ret
121
- end
122
-
123
- def self.wrap_socket_in_ssl_socket(socket)
124
- io = IO.try_convert(socket)
125
- ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new)
126
- ssl_sock.connect
127
- Socketry::SSL::Socket.new.from_socket(ssl_sock)
128
- end
129
- end
130
-
131
- #
132
- # Used internally to read the HTTP response, either all at once or as a stream of text lines.
133
- # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and
134
- # chunks of the body via callbacks.
135
- # @private
136
- #
137
- class HTTPResponseReader
138
- DEFAULT_CHUNK_SIZE = 10000
139
-
140
- attr_reader :status, :headers
141
-
142
- def initialize(socket, read_timeout)
143
- @socket = socket
144
- @read_timeout = read_timeout
145
- @parser = HTTPTools::Parser.new
146
- @buffer = ""
147
- @done = false
148
- @lock = Mutex.new
149
-
150
- # Provide callbacks for the Parser to give us the headers and body. This has to be done
151
- # before we start piping any data into the parser.
152
- have_headers = false
153
- @parser.on(:header) do
154
- have_headers = true
155
- end
156
- @parser.on(:stream) do |data|
157
- @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry
158
- end
159
- @parser.on(:finish) do
160
- @lock.synchronize { @done = true }
161
- end
162
-
163
- # Block until the status code and headers have been successfully read.
164
- while !have_headers
165
- raise EOFError if !read_chunk_into_buffer
166
- end
167
- @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }]
168
- @status = @parser.status_code
169
- end
170
-
171
- def read_lines
172
- Enumerator.new do |gen|
173
- loop do
174
- line = read_line
175
- break if line.nil?
176
- gen.yield line
177
- end
178
- end
179
- end
180
-
181
- def read_all
182
- while read_chunk_into_buffer
183
- end
184
- @buffer
185
- end
186
-
187
- private
188
-
189
- # Attempt to read some more data from the socket. Return true if successful, false if EOF.
190
- # A read timeout will result in an exception from Socketry's readpartial method.
191
- def read_chunk_into_buffer
192
- # If @done is set, it means the Parser has signaled end of response body
193
- @lock.synchronize { return false if @done }
194
- begin
195
- data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout)
196
- rescue Socketry::TimeoutError
197
- # We rethrow this as our own type so the caller doesn't have to know the Socketry API
198
- raise Errors::ReadTimeoutError.new(@read_timeout)
199
- end
200
- return false if data == :eof
201
- @parser << data
202
- # We are piping the content through the parser so that it can handle things like chunked
203
- # encoding for us. The content ends up being appended to @buffer via our callback.
204
- true
205
- end
206
-
207
- # Extract the next line of text from the read buffer, refilling the buffer as needed.
208
- def read_line
209
- loop do
210
- @lock.synchronize do
211
- i = @buffer.index(/[\r\n]/)
212
- if !i.nil?
213
- i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n")
214
- return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8)
215
- end
216
- end
217
- return nil if !read_chunk_into_buffer
218
- end
219
- end
220
- end
221
- end
222
- end
@@ -1,263 +0,0 @@
1
- require "ld-eventsource/impl/streaming_http"
2
- require "socketry"
3
- require "http_stub"
4
-
5
- #
6
- # End-to-end tests of HTTP requests against a real server
7
- #
8
- describe SSE::Impl::StreamingHTTPConnection do
9
- subject { SSE::Impl::StreamingHTTPConnection }
10
-
11
- def with_connection(cxn)
12
- begin
13
- yield cxn
14
- ensure
15
- cxn.close
16
- end
17
- end
18
-
19
- it "makes HTTP connection and sends request" do
20
- with_server do |server|
21
- requests = Queue.new
22
- server.setup_response("/foo") do |req,res|
23
- requests << req
24
- res.status = 200
25
- end
26
- headers = {
27
- "Accept" => "text/plain"
28
- }
29
- with_connection(subject.new(server.base_uri.merge("/foo?bar"), headers: headers)) do
30
- received_req = requests.pop
31
- expect(received_req.unparsed_uri).to eq("/foo?bar")
32
- expect(received_req.header).to eq({
33
- "accept" => ["text/plain"],
34
- "host" => [server.base_uri.host]
35
- })
36
- end
37
- end
38
- end
39
-
40
- it "receives response status" do
41
- with_server do |server|
42
- server.setup_response("/foo") do |req,res|
43
- res.status = 204
44
- end
45
- with_connection(subject.new(server.base_uri.merge("/foo"))) do |cxn|
46
- expect(cxn.status).to eq(204)
47
- end
48
- end
49
- end
50
-
51
- it "receives response headers" do
52
- with_server do |server|
53
- server.setup_response("/foo") do |req,res|
54
- res["Content-Type"] = "application/json"
55
- end
56
- with_connection(subject.new(server.base_uri.merge("/foo"))) do |cxn|
57
- expect(cxn.headers["content-type"]).to eq("application/json")
58
- end
59
- end
60
- end
61
-
62
- it "can read response as lines" do
63
- body = <<-EOT
64
- This is
65
- a response
66
- EOT
67
- with_server do |server|
68
- server.setup_response("/foo") do |req,res|
69
- res.body = body
70
- end
71
- with_connection(subject.new(server.base_uri.merge("/foo"))) do |cxn|
72
- lines = cxn.read_lines
73
- expect(lines.next).to eq("This is\n")
74
- expect(lines.next).to eq("a response\n")
75
- end
76
- end
77
- end
78
-
79
- it "can read entire response body" do
80
- body = <<-EOT
81
- This is
82
- a response
83
- EOT
84
- with_server do |server|
85
- server.setup_response("/foo") do |req,res|
86
- res.body = body
87
- end
88
- with_connection(subject.new(server.base_uri.merge("/foo"))) do |cxn|
89
- read_body = cxn.read_all
90
- expect(read_body).to eq("This is\na response\n")
91
- end
92
- end
93
- end
94
-
95
- it "enforces read timeout" do
96
- with_server do |server|
97
- server.setup_response("/") do |req,res|
98
- sleep(2)
99
- res.status = 200
100
- end
101
- expect { subject.new(server.base_uri, read_timeout: 0.25) }.to raise_error(SSE::Errors::ReadTimeoutError)
102
- end
103
- end
104
-
105
- it "connects to HTTP server through proxy" do
106
- body = "hi"
107
- with_server do |server|
108
- server.setup_response("/") do |req,res|
109
- res.body = body
110
- end
111
- with_server(StubProxyServer.new) do |proxy|
112
- with_connection(subject.new(server.base_uri, proxy: proxy.base_uri)) do |cxn|
113
- read_body = cxn.read_all
114
- expect(read_body).to eq("hi")
115
- expect(proxy.request_count).to eq(1)
116
- end
117
- end
118
- end
119
- end
120
-
121
- it "throws error if proxy responds with error status" do
122
- with_server do |server|
123
- server.setup_response("/") do |req,res|
124
- res.body = body
125
- end
126
- with_server(StubProxyServer.new) do |proxy|
127
- proxy.connect_status = 403
128
- expect { subject.new(server.base_uri, proxy: proxy.base_uri) }.to raise_error(SSE::Errors::HTTPProxyError)
129
- end
130
- end
131
- end
132
-
133
- # The following 2 tests were originally written to connect to an embedded HTTPS server made with
134
- # WEBrick. Unfortunately, some unknown problem prevents WEBrick's self-signed certificate feature
135
- # from working in JRuby 9.1 (but not in any other Ruby version). Therefore these tests currently
136
- # hit an external URL.
137
-
138
- it "connects to HTTPS server" do
139
- with_connection(subject.new(URI("https://app.launchdarkly.com"))) do |cxn|
140
- expect(cxn.status).to eq 200
141
- end
142
- end
143
-
144
- it "connects to HTTPS server through proxy" do
145
- with_server(StubProxyServer.new) do |proxy|
146
- with_connection(subject.new(URI("https://app.launchdarkly.com"), proxy: proxy.base_uri)) do |cxn|
147
- expect(cxn.status).to eq 200
148
- expect(proxy.request_count).to eq(1)
149
- end
150
- end
151
- end
152
- end
153
-
154
- #
155
- # Tests of response parsing functionality without a real HTTP request
156
- #
157
- describe SSE::Impl::HTTPResponseReader do
158
- subject { SSE::Impl::HTTPResponseReader }
159
-
160
- let(:simple_response) { <<-EOT
161
- HTTP/1.1 200 OK
162
- Cache-Control: no-cache
163
- Content-Type: text/event-stream
164
-
165
- line1\r
166
- line2
167
- \r
168
- EOT
169
- }
170
-
171
- def make_chunks(str)
172
- # arbitrarily split content into 5-character blocks
173
- str.scan(/.{1,5}/m).to_enum
174
- end
175
-
176
- def mock_socket_without_timeout(chunks)
177
- mock_socket(chunks) { :eof }
178
- end
179
-
180
- def mock_socket_with_timeout(chunks)
181
- mock_socket(chunks) { raise Socketry::TimeoutError }
182
- end
183
-
184
- def mock_socket(chunks)
185
- sock = double
186
- allow(sock).to receive(:readpartial) do
187
- begin
188
- chunks.next
189
- rescue StopIteration
190
- yield
191
- end
192
- end
193
- sock
194
- end
195
-
196
- it "parses status code" do
197
- socket = mock_socket_without_timeout(make_chunks(simple_response))
198
- reader = subject.new(socket, 0)
199
- expect(reader.status).to eq(200)
200
- end
201
-
202
- it "parses headers" do
203
- socket = mock_socket_without_timeout(make_chunks(simple_response))
204
- reader = subject.new(socket, 0)
205
- expect(reader.headers).to eq({
206
- 'cache-control' => 'no-cache',
207
- 'content-type' => 'text/event-stream'
208
- })
209
- end
210
-
211
- it "can read entire response body" do
212
- socket = mock_socket_without_timeout(make_chunks(simple_response))
213
- reader = subject.new(socket, 0)
214
- expect(reader.read_all).to eq("line1\r\nline2\n\r\n")
215
- end
216
-
217
- it "can read response body as lines" do
218
- socket = mock_socket_without_timeout(make_chunks(simple_response))
219
- reader = subject.new(socket, 0)
220
- expect(reader.read_lines.to_a).to eq([
221
- "line1\r\n",
222
- "line2\n",
223
- "\r\n"
224
- ])
225
- end
226
-
227
- it "handles chunked encoding" do
228
- chunked_response = <<-EOT
229
- HTTP/1.1 200 OK
230
- Content-Type: text/plain
231
- Transfer-Encoding: chunked
232
-
233
- 6\r
234
- things\r
235
- A\r
236
- and stuff\r
237
- 0\r
238
- \r
239
- EOT
240
- socket = mock_socket_without_timeout(make_chunks(chunked_response))
241
- reader = subject.new(socket, 0)
242
- expect(reader.read_all).to eq("things and stuff")
243
- end
244
-
245
- it "raises error if response ends without complete headers" do
246
- malformed_response = <<-EOT
247
- HTTP/1.1 200 OK
248
- Cache-Control: no-cache
249
- EOT
250
- socket = mock_socket_without_timeout(make_chunks(malformed_response))
251
- expect { subject.new(socket, 0) }.to raise_error(EOFError)
252
- end
253
-
254
- it "throws timeout if thrown by socket read" do
255
- socket = mock_socket_with_timeout(make_chunks(simple_response))
256
- reader = subject.new(socket, 0)
257
- lines = reader.read_lines
258
- lines.next
259
- lines.next
260
- lines.next
261
- expect { lines.next }.to raise_error(SSE::Errors::ReadTimeoutError)
262
- end
263
- end