ld-eventsource 2.0.0.pre.beta.1 → 2.1.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: abed7a7f61d7f731895a05d722273996e683fab0
4
- data.tar.gz: aa41fa22d28b66bd6bb6585f4030b06cb3b0dc05
2
+ SHA256:
3
+ metadata.gz: 14623ca76eeb863804646a598dec0a4f368bdd4fbf8875b7d5bb1be62c1845bd
4
+ data.tar.gz: 9c9d521d4e10f01f93f04431acfbbe2821434e99dd2738c09726827a84a96ed2
5
5
  SHA512:
6
- metadata.gz: 6597dfb8e3c7b1d4103ea18c44bb7d83a598b33565f79403fbec790d361c97ad1988b11027229e20cab4ec9e3895ad47faee37949eb1829a479d15b941f5d3e0
7
- data.tar.gz: 910c3a810cc7534d316910dc06b95d311a3d22a659956a68d78c31a9f78b3210e9f019e94851e71b15e50271c74e1257735e1f57097d9c52f209acb82d837ac9
6
+ metadata.gz: 4fccea73f41283286656e7a8ad8d10c72e0175b68a34499008d38d897d9a8828000d3f9cdb582dca682e182d1d0045240d0522fd9e28e72401410aad668a603d
7
+ data.tar.gz: c0756a730b152c06e7a1497703d1acdc0216a9ad8938798914fda239011d34d4c5bf521d51f3cf4f06829e6830befb550d9a47bf6d9fa19bda9ee3e2ba08972b
data/README.md CHANGED
@@ -10,7 +10,7 @@ Parts of this code are based on https://github.com/Tonkpils/celluloid-eventsourc
10
10
  Supported Ruby versions
11
11
  -----------------------
12
12
 
13
- This gem has a minimum Ruby version of 2.4, or 9.2 for JRuby.
13
+ This gem has a minimum Ruby version of 2.5, or 9.2 for JRuby.
14
14
 
15
15
  Quick setup
16
16
  -----------
@@ -1,4 +1,5 @@
1
1
  require "ld-eventsource/impl/backoff"
2
+ require "ld-eventsource/impl/buffered_line_reader"
2
3
  require "ld-eventsource/impl/event_parser"
3
4
  require "ld-eventsource/events"
4
5
  require "ld-eventsource/errors"
@@ -128,11 +129,12 @@ module SSE
128
129
  read: read_timeout,
129
130
  connect: connect_timeout
130
131
  })
131
- @buffer = ""
132
+ @cxn = nil
132
133
  @lock = Mutex.new
133
134
 
134
135
  @backoff = Impl::Backoff.new(reconnect_time || DEFAULT_RECONNECT_TIME, MAX_RECONNECT_TIME,
135
136
  reconnect_reset_interval: reconnect_reset_interval)
137
+ @first_attempt = true
136
138
 
137
139
  @on = { event: ->(_) {}, error: ->(_) {} }
138
140
  @last_id = last_event_id
@@ -190,51 +192,27 @@ module SSE
190
192
  end
191
193
  end
192
194
 
195
+ #
196
+ # Tests whether the client has been shut down by a call to {Client#close}.
197
+ #
198
+ # @return [Boolean] true if the client has been shut down
199
+ #
200
+ def closed?
201
+ @stopped.value
202
+ end
203
+
193
204
  private
194
205
 
195
206
  def reset_http
196
207
  @http_client.close if !@http_client.nil?
197
- @cxn = nil
198
- @buffer = ""
199
- end
200
-
201
- def read_lines
202
- Enumerator.new do |gen|
203
- loop do
204
- line = read_line
205
- break if line.nil?
206
- gen.yield line
207
- end
208
- end
209
- end
210
-
211
- def read_line
212
- loop do
213
- @lock.synchronize do
214
- i = @buffer.index(/[\r\n]/)
215
- if !i.nil? && !(i == @buffer.length - 1 && @buffer[i] == "\r")
216
- i += 1 if (@buffer[i] == "\r" && @buffer[i + 1] == "\n")
217
- return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8)
218
- end
219
- end
220
- return nil if !read_chunk_into_buffer
221
- end
208
+ close_connection
222
209
  end
223
210
 
224
- def read_chunk_into_buffer
225
- # If @done is set, it means the Parser has signaled end of response body
226
- @lock.synchronize { return false if @done }
227
- begin
228
- data = @cxn.readpartial
229
- rescue HTTP::TimeoutError
230
- # We rethrow this as our own type so the caller doesn't have to know the httprb API
231
- raise Errors::ReadTimeoutError.new(@read_timeout)
211
+ def close_connection
212
+ @lock.synchronize do
213
+ @cxn.connection.close if !@cxn.nil?
214
+ @cxn = nil
232
215
  end
233
- return false if data == nil
234
- @buffer << data
235
- # We are piping the content through the parser so that it can handle things like chunked
236
- # encoding for us. The content ends up being appended to @buffer via our callback.
237
- true
238
216
  end
239
217
 
240
218
  def default_logger
@@ -246,13 +224,16 @@ module SSE
246
224
 
247
225
  def run_stream
248
226
  while !@stopped.value
249
- @cxn = nil
227
+ close_connection
250
228
  begin
251
- @cxn = connect
229
+ resp = connect
230
+ @lock.synchronize do
231
+ @cxn = resp
232
+ end
252
233
  # There's a potential race if close was called in the middle of the previous line, i.e. after we
253
234
  # connected but before @cxn was set. Checking the variable again is a bit clunky but avoids that.
254
235
  return if @stopped.value
255
- read_stream(@cxn) if !@cxn.nil?
236
+ read_stream(resp) if !resp.nil?
256
237
  rescue => e
257
238
  # When we deliberately close the connection, it will usually trigger an exception. The exact type
258
239
  # of exception depends on the specific Ruby runtime. But @stopped will always be set in this case.
@@ -274,7 +255,8 @@ module SSE
274
255
  def connect
275
256
  loop do
276
257
  return if @stopped.value
277
- interval = @backoff.next_interval
258
+ interval = @first_attempt ? 0 : @backoff.next_interval
259
+ @first_attempt = false
278
260
  if interval > 0
279
261
  @logger.info { "Will retry connection after #{'%.3f' % interval} seconds" }
280
262
  sleep(interval)
@@ -317,7 +299,24 @@ module SSE
317
299
  # it can automatically reset itself if enough time passes between failures.
318
300
  @backoff.mark_success
319
301
 
320
- event_parser = Impl::EventParser.new(read_lines)
302
+ chunks = Enumerator.new do |gen|
303
+ loop do
304
+ if @stopped.value
305
+ break
306
+ else
307
+ begin
308
+ data = cxn.readpartial
309
+ rescue HTTP::TimeoutError
310
+ # For historical reasons, we rethrow this as our own type
311
+ raise Errors::ReadTimeoutError.new(@read_timeout)
312
+ end
313
+ break if data.nil?
314
+ gen.yield data
315
+ end
316
+ end
317
+ end
318
+ event_parser = Impl::EventParser.new(Impl::BufferedLineReader.lines_from(chunks))
319
+
321
320
  event_parser.items.each do |item|
322
321
  return if @stopped.value
323
322
  case item
@@ -0,0 +1,73 @@
1
+
2
+ module SSE
3
+ module Impl
4
+ class BufferedLineReader
5
+ #
6
+ # Reads a series of data chunks from an enumerator, and returns an enumerator that
7
+ # parses/aggregates these into text lines. The line terminator may be CR, LF, or
8
+ # CRLF for each line; terminators are not included in the returned lines. When the
9
+ # input data runs out, the output enumerator ends and does not include any partially
10
+ # completed line.
11
+ #
12
+ # @param [Enumerator] chunks an enumerator that will yield strings from a stream
13
+ # @return [Enumerator] an enumerator that will yield one line at a time
14
+ #
15
+ def self.lines_from(chunks)
16
+ buffer = ""
17
+ position = 0
18
+ line_start = 0
19
+ last_char_was_cr = false
20
+
21
+ Enumerator.new do |gen|
22
+ chunks.each do |chunk|
23
+ buffer << chunk
24
+
25
+ loop do
26
+ # Search for a line break in any part of the buffer that we haven't yet seen.
27
+ i = buffer.index(/[\r\n]/, position)
28
+ if i.nil?
29
+ # There isn't a line break yet, so we'll keep accumulating data in the buffer, using
30
+ # position to keep track of where we left off scanning. We can also discard any previously
31
+ # parsed lines from the buffer at this point.
32
+ if line_start > 0
33
+ buffer.slice!(0, line_start)
34
+ line_start = 0
35
+ end
36
+ position = buffer.length
37
+ break
38
+ end
39
+
40
+ ch = buffer[i]
41
+ if i == 0 && ch == "\n" && last_char_was_cr
42
+ # This is just the dangling LF of a CRLF pair
43
+ last_char_was_cr = false
44
+ i += 1
45
+ position = i
46
+ line_start = i
47
+ next
48
+ end
49
+
50
+ line = buffer[line_start, i - line_start]
51
+ last_char_was_cr = false
52
+ i += 1
53
+ if ch == "\r"
54
+ if i == buffer.length
55
+ last_char_was_cr = true # We'll break the line here, but be on watch for a dangling LF
56
+ elsif buffer[i] == "\n"
57
+ i += 1
58
+ end
59
+ end
60
+ if i == buffer.length
61
+ buffer = ""
62
+ i = 0
63
+ end
64
+ position = i
65
+ line_start = i
66
+ gen.yield line
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
@@ -20,7 +20,8 @@ module SSE
20
20
  #
21
21
  # Constructs an instance of EventParser.
22
22
  #
23
- # @param [Enumerator] lines an enumerator that will yield one line of text at a time
23
+ # @param [Enumerator] lines an enumerator that will yield one line of text at a time;
24
+ # the lines should not include line terminators
24
25
  #
25
26
  def initialize(lines)
26
27
  @lines = lines
@@ -31,7 +32,6 @@ module SSE
31
32
  def items
32
33
  Enumerator.new do |gen|
33
34
  @lines.each do |line|
34
- line.chomp!
35
35
  if line.empty?
36
36
  event = maybe_create_event
37
37
  reset_buffers
@@ -53,6 +53,7 @@ module SSE
53
53
  @id = nil
54
54
  @type = nil
55
55
  @data = ""
56
+ @have_data = false
56
57
  end
57
58
 
58
59
  def process_field(name, value)
@@ -60,8 +61,9 @@ module SSE
60
61
  when "event"
61
62
  @type = value.to_sym
62
63
  when "data"
63
- @data << "\n" if !@data.empty?
64
+ @data << "\n" if @have_data
64
65
  @data << value
66
+ @have_data = true
65
67
  when "id"
66
68
  @id = value
67
69
  when "retry"
@@ -73,7 +75,7 @@ module SSE
73
75
  end
74
76
 
75
77
  def maybe_create_event
76
- return nil if @data.empty?
78
+ return nil if !@have_data
77
79
  StreamEvent.new(@type || :message, @data, @id)
78
80
  end
79
81
  end
@@ -1,3 +1,3 @@
1
1
  module SSE
2
- VERSION = "2.0.0-beta.1"
2
+ VERSION = "2.1.1"
3
3
  end
metadata CHANGED
@@ -1,29 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ld-eventsource
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.0.pre.beta.1
4
+ version: 2.1.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - LaunchDarkly
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-01-22 00:00:00.000000000 Z
11
+ date: 2021-10-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - '='
18
18
  - !ruby/object:Gem::Version
19
- version: '1.7'
19
+ version: 2.2.10
20
20
  type: :development
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - "~>"
24
+ - - '='
25
25
  - !ruby/object:Gem::Version
26
- version: '1.7'
26
+ version: 2.2.10
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: rspec
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -52,6 +52,20 @@ dependencies:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
54
  version: 0.3.0
55
+ - !ruby/object:Gem::Dependency
56
+ name: webrick
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '1.7'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '1.7'
55
69
  - !ruby/object:Gem::Dependency
56
70
  name: concurrent-ruby
57
71
  requirement: !ruby/object:Gem::Requirement
@@ -70,16 +84,22 @@ dependencies:
70
84
  name: http
71
85
  requirement: !ruby/object:Gem::Requirement
72
86
  requirements:
73
- - - "~>"
87
+ - - ">="
74
88
  - !ruby/object:Gem::Version
75
89
  version: 4.4.1
90
+ - - "<"
91
+ - !ruby/object:Gem::Version
92
+ version: 6.0.0
76
93
  type: :runtime
77
94
  prerelease: false
78
95
  version_requirements: !ruby/object:Gem::Requirement
79
96
  requirements:
80
- - - "~>"
97
+ - - ">="
81
98
  - !ruby/object:Gem::Version
82
99
  version: 4.4.1
100
+ - - "<"
101
+ - !ruby/object:Gem::Version
102
+ version: 6.0.0
83
103
  description: LaunchDarkly SSE client for Ruby
84
104
  email:
85
105
  - team@launchdarkly.com
@@ -87,28 +107,16 @@ executables: []
87
107
  extensions: []
88
108
  extra_rdoc_files: []
89
109
  files:
90
- - ".circleci/config.yml"
91
- - ".gitignore"
92
- - ".ldrelease/config.yml"
93
- - CHANGELOG.md
94
- - Gemfile
95
- - Gemfile.lock
96
110
  - LICENSE
97
111
  - README.md
98
- - ld-eventsource.gemspec
99
112
  - lib/ld-eventsource.rb
100
113
  - lib/ld-eventsource/client.rb
101
114
  - lib/ld-eventsource/errors.rb
102
115
  - lib/ld-eventsource/events.rb
103
116
  - lib/ld-eventsource/impl/backoff.rb
117
+ - lib/ld-eventsource/impl/buffered_line_reader.rb
104
118
  - lib/ld-eventsource/impl/event_parser.rb
105
119
  - lib/ld-eventsource/version.rb
106
- - scripts/gendocs.sh
107
- - scripts/release.sh
108
- - spec/backoff_spec.rb
109
- - spec/client_spec.rb
110
- - spec/event_parser_spec.rb
111
- - spec/http_stub.rb
112
120
  homepage: https://github.com/launchdarkly/ruby-eventsource
113
121
  licenses:
114
122
  - Apache-2.0
@@ -124,17 +132,12 @@ required_ruby_version: !ruby/object:Gem::Requirement
124
132
  version: '0'
125
133
  required_rubygems_version: !ruby/object:Gem::Requirement
126
134
  requirements:
127
- - - ">"
135
+ - - ">="
128
136
  - !ruby/object:Gem::Version
129
- version: 1.3.1
137
+ version: '0'
130
138
  requirements: []
131
- rubyforge_project:
132
- rubygems_version: 2.5.2.3
139
+ rubygems_version: 3.2.29
133
140
  signing_key:
134
141
  specification_version: 4
135
142
  summary: LaunchDarkly SSE client
136
- test_files:
137
- - spec/backoff_spec.rb
138
- - spec/client_spec.rb
139
- - spec/event_parser_spec.rb
140
- - spec/http_stub.rb
143
+ test_files: []
data/.circleci/config.yml DELETED
@@ -1,51 +0,0 @@
1
- version: 2
2
-
3
- workflows:
4
- version: 2
5
- test:
6
- jobs:
7
- - test-2.4
8
- - test-2.5
9
- - test-2.6
10
- - test-2.7
11
- - test-jruby-9.2
12
-
13
- ruby-docker-template: &ruby-docker-template
14
- steps:
15
- - checkout
16
- - run: |
17
- if [[ $CIRCLE_JOB == test-jruby* ]]; then
18
- gem install jruby-openssl; # required by bundler, no effect on Ruby MRI
19
- fi
20
- - run: sudo apt-get update -y && sudo apt-get install -y build-essential
21
- - run: ruby -v
22
- - run: gem install bundler -v "~> 1.17"
23
- - run: bundle install
24
- - run: mkdir ./rspec
25
- - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec
26
- - store_test_results:
27
- path: ./rspec
28
- - store_artifacts:
29
- path: ./rspec
30
-
31
- jobs:
32
- test-2.4:
33
- <<: *ruby-docker-template
34
- docker:
35
- - image: circleci/ruby:2.4
36
- test-2.5:
37
- <<: *ruby-docker-template
38
- docker:
39
- - image: circleci/ruby:2.5
40
- test-2.6:
41
- <<: *ruby-docker-template
42
- docker:
43
- - image: circleci/ruby:2.6
44
- test-2.7:
45
- <<: *ruby-docker-template
46
- docker:
47
- - image: circleci/ruby:2.7
48
- test-jruby-9.2:
49
- <<: *ruby-docker-template
50
- docker:
51
- - image: circleci/jruby:9.2-jdk
data/.gitignore DELETED
@@ -1,16 +0,0 @@
1
- /.bundle/
2
- /.yardoc
3
- /_yardoc/
4
- /coverage/
5
- /doc/
6
- /pkg/
7
- /spec/reports/
8
- /tmp/
9
- *.bundle
10
- *.so
11
- *.o
12
- *.a
13
- mkmf.log
14
- *.gem
15
- .DS_Store
16
- rspec
@@ -1,11 +0,0 @@
1
- repo:
2
- public: ruby-eventsource
3
-
4
- publications:
5
- - url: https://rubygems.org/gems/ld-eventsource
6
- description: RubyGems
7
- - url: https://www.rubydoc.info/gems/ld-eventsource
8
- description: documentation
9
-
10
- template:
11
- name: ruby
data/CHANGELOG.md DELETED
@@ -1,20 +0,0 @@
1
- # Change log
2
-
3
- All notable changes to the LaunchDarkly SSE Client for Ruby will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org).
4
-
5
- ## [1.0.3] - 2020-03-17
6
- ### Fixed:
7
- - The backoff delay logic for reconnecting after a stream failure was broken so that if a failure occurred after a stream had been active for at least `reconnect_reset_interval` (default 60 seconds), retries would use _no_ delay, potentially causing a flood of requests and a spike in CPU usage.
8
-
9
- ## [1.0.2] - 2020-03-10
10
- ### Removed:
11
- - Removed an unused dependency on `rake`. There are no other changes in this release.
12
-
13
-
14
- ## [1.0.1] - 2019-07-10
15
- ### Fixed:
16
- - Calling `close` on the client could cause a misleading warning message in the log, such as `Unexpected error from event source: #<IOError: stream closed in another thread>`.
17
-
18
- ## [1.0.0] - 2019-01-03
19
-
20
- Initial release.
data/Gemfile DELETED
@@ -1,3 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gemspec
data/Gemfile.lock DELETED
@@ -1,65 +0,0 @@
1
- PATH
2
- remote: .
3
- specs:
4
- ld-eventsource (2.0.0.pre.beta.1)
5
- concurrent-ruby (~> 1.0)
6
- http (~> 4.4.1)
7
-
8
- GEM
9
- remote: https://rubygems.org/
10
- specs:
11
- addressable (2.7.0)
12
- public_suffix (>= 2.0.2, < 5.0)
13
- concurrent-ruby (1.1.8)
14
- diff-lcs (1.3)
15
- domain_name (0.5.20190701)
16
- unf (>= 0.0.5, < 1.0.0)
17
- ffi (1.14.2)
18
- ffi (1.14.2-java)
19
- ffi-compiler (1.0.1)
20
- ffi (>= 1.0.0)
21
- rake
22
- http (4.4.1)
23
- addressable (~> 2.3)
24
- http-cookie (~> 1.0)
25
- http-form_data (~> 2.2)
26
- http-parser (~> 1.2.0)
27
- http-cookie (1.0.3)
28
- domain_name (~> 0.5)
29
- http-form_data (2.3.0)
30
- http-parser (1.2.3)
31
- ffi-compiler (>= 1.0, < 2.0)
32
- public_suffix (4.0.6)
33
- rake (13.0.3)
34
- rspec (3.7.0)
35
- rspec-core (~> 3.7.0)
36
- rspec-expectations (~> 3.7.0)
37
- rspec-mocks (~> 3.7.0)
38
- rspec-core (3.7.1)
39
- rspec-support (~> 3.7.0)
40
- rspec-expectations (3.7.0)
41
- diff-lcs (>= 1.2.0, < 2.0)
42
- rspec-support (~> 3.7.0)
43
- rspec-mocks (3.7.0)
44
- diff-lcs (>= 1.2.0, < 2.0)
45
- rspec-support (~> 3.7.0)
46
- rspec-support (3.7.0)
47
- rspec_junit_formatter (0.3.0)
48
- rspec-core (>= 2, < 4, != 2.12.0)
49
- unf (0.1.4)
50
- unf_ext
51
- unf (0.1.4-java)
52
- unf_ext (0.0.7.7)
53
-
54
- PLATFORMS
55
- java
56
- ruby
57
-
58
- DEPENDENCIES
59
- bundler (~> 1.7)
60
- ld-eventsource!
61
- rspec (~> 3.2)
62
- rspec_junit_formatter (~> 0.3.0)
63
-
64
- BUNDLED WITH
65
- 1.17.3
@@ -1,29 +0,0 @@
1
- # coding: utf-8
2
-
3
- lib = File.expand_path("../lib", __FILE__)
4
- $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
- require "ld-eventsource/version"
6
-
7
- # rubocop:disable Metrics/BlockLength
8
- Gem::Specification.new do |spec|
9
- spec.name = "ld-eventsource"
10
- spec.version = SSE::VERSION
11
- spec.authors = ["LaunchDarkly"]
12
- spec.email = ["team@launchdarkly.com"]
13
- spec.summary = "LaunchDarkly SSE client"
14
- spec.description = "LaunchDarkly SSE client for Ruby"
15
- spec.homepage = "https://github.com/launchdarkly/ruby-eventsource"
16
- spec.license = "Apache-2.0"
17
-
18
- spec.files = `git ls-files -z`.split("\x0")
19
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
20
- spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
21
- spec.require_paths = ["lib"]
22
-
23
- spec.add_development_dependency "bundler", "~> 1.7"
24
- spec.add_development_dependency "rspec", "~> 3.2"
25
- spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0"
26
-
27
- spec.add_runtime_dependency "concurrent-ruby", "~> 1.0"
28
- spec.add_runtime_dependency "http", "~> 4.4.1"
29
- end
data/scripts/gendocs.sh DELETED
@@ -1,12 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Use this script to generate documentation locally in ./doc so it can be proofed before release.
4
- # After release, documentation will be visible at https://www.rubydoc.info/gems/ld-eventsource
5
-
6
- gem install --conservative yard
7
- gem install --conservative redcarpet # provides Markdown formatting
8
-
9
- # yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**"
10
- PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb"
11
-
12
- yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md
data/scripts/release.sh DELETED
@@ -1,30 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- # This script updates the version for the library and releases it to RubyGems
4
- # It will only work if you have the proper credentials set up in ~/.gem/credentials
5
-
6
- # It takes exactly one argument: the new version.
7
- # It should be run from the root of this git repo like this:
8
- # ./scripts/release.sh 4.0.9
9
-
10
- # When done you should commit and push the changes made.
11
-
12
- set -uxe
13
-
14
- VERSION=$1
15
- GEM_NAME=ld-eventsource
16
-
17
- echo "Starting $GEM_NAME release."
18
-
19
- # Update version in version.rb
20
- VERSION_RB_TEMP=./version.rb.tmp
21
- sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/$GEM_NAME/version.rb > ${VERSION_RB_TEMP}
22
- mv ${VERSION_RB_TEMP} lib/$GEM_NAME/version.rb
23
-
24
- # Build Ruby gem
25
- gem build $GEM_NAME.gemspec
26
-
27
- # Publish Ruby gem
28
- gem push $GEM_NAME-${VERSION}.gem
29
-
30
- echo "Done with $GEM_NAME release"
data/spec/backoff_spec.rb DELETED
@@ -1,52 +0,0 @@
1
- require "ld-eventsource"
2
-
3
- require "http_stub"
4
-
5
- module SSE
6
- module Impl
7
- describe Backoff do
8
- it "increases exponentially with jitter" do
9
- initial = 1.5
10
- max = 60
11
- b = Backoff.new(initial, max)
12
- previous = 0
13
-
14
- for i in 1..6 do
15
- interval = b.next_interval
16
- expect(interval).to be > previous
17
- target = initial * (2 ** (i - 1))
18
- expect(interval).to be <= target
19
- expect(interval).to be >= target / 2
20
- previous = i
21
- end
22
-
23
- interval = b.next_interval
24
- expect(interval).to be >= previous
25
- expect(interval).to be <= max
26
- end
27
-
28
- it "resets to initial delay if reset threshold has elapsed" do
29
- initial = 1.5
30
- max = 60
31
- threshold = 2
32
- b = Backoff.new(initial, max, reconnect_reset_interval: threshold)
33
-
34
- for i in 1..6 do
35
- # just cause the backoff to increase quickly, don't actually do these delays
36
- b.next_interval
37
- end
38
-
39
- b.mark_success
40
- sleep(threshold + 0.001)
41
-
42
- interval = b.next_interval
43
- expect(interval).to be <= initial
44
- expect(interval).to be >= initial / 2
45
-
46
- interval = b.next_interval # make sure it continues increasing after that
47
- expect(interval).to be <= (initial * 2)
48
- expect(interval).to be >= initial
49
- end
50
- end
51
- end
52
- end
data/spec/client_spec.rb DELETED
@@ -1,429 +0,0 @@
1
- require "ld-eventsource"
2
-
3
- #
4
- # End-to-end tests of the SSE client against a real server
5
- #
6
- describe SSE::Client do
7
- subject { SSE::Client }
8
-
9
- let(:simple_event_1) { SSE::StreamEvent.new(:go, "foo", "a")}
10
- let(:simple_event_2) { SSE::StreamEvent.new(:stop, "bar", "b")}
11
- let(:simple_event_1_text) { <<-EOT
12
- event: go
13
- data: foo
14
- id: a
15
-
16
- EOT
17
- }
18
- let(:simple_event_2_text) { <<-EOT
19
- event: stop
20
- data: bar
21
- id: b
22
-
23
- EOT
24
- }
25
- let(:reconnect_asap) { 0.01 }
26
-
27
- def with_client(client)
28
- begin
29
- yield client
30
- ensure
31
- client.close
32
- end
33
- end
34
-
35
- def send_stream_content(res, content, keep_open:)
36
- res.content_type = "text/event-stream"
37
- res.status = 200
38
- res.chunked = true
39
- rd, wr = IO.pipe
40
- wr.write(content)
41
- res.body = rd
42
- if !keep_open
43
- wr.close
44
- end
45
- wr
46
- end
47
-
48
- it "sends expected headers" do
49
- with_server do |server|
50
- requests = Queue.new
51
- server.setup_response("/") do |req,res|
52
- requests << req
53
- send_stream_content(res, "", keep_open: true)
54
- end
55
-
56
- headers = { "Authorization" => "secret" }
57
-
58
- with_client(subject.new(server.base_uri, headers: headers)) do |client|
59
- received_req = requests.pop
60
- expect(received_req.header).to eq({
61
- "accept" => ["text/event-stream"],
62
- "cache-control" => ["no-cache"],
63
- "host" => ["127.0.0.1:" + server.port.to_s],
64
- "authorization" => ["secret"],
65
- "user-agent" => ["ruby-eventsource"],
66
- "content-length" => ["0"],
67
- "connection" => ["close"]
68
- })
69
- end
70
- end
71
- end
72
-
73
- it "sends initial Last-Event-Id if specified" do
74
- id = "xyz"
75
- with_server do |server|
76
- requests = Queue.new
77
- server.setup_response("/") do |req,res|
78
- requests << req
79
- send_stream_content(res, "", keep_open: true)
80
- end
81
-
82
- headers = { "Authorization" => "secret" }
83
-
84
- with_client(subject.new(server.base_uri, headers: headers, last_event_id: id)) do |client|
85
- received_req = requests.pop
86
- expect(received_req.header).to eq({
87
- "accept" => ["text/event-stream"],
88
- "cache-control" => ["no-cache"],
89
- "host" => ["127.0.0.1:" + server.port.to_s],
90
- "authorization" => ["secret"],
91
- "last-event-id" => [id],
92
- "user-agent" => ["ruby-eventsource"],
93
- "content-length" => ["0"],
94
- "connection" => ["close"]
95
- })
96
- end
97
- end
98
- end
99
-
100
- it "receives messages" do
101
- events_body = simple_event_1_text + simple_event_2_text
102
- with_server do |server|
103
- server.setup_response("/") do |req,res|
104
- send_stream_content(res, events_body, keep_open: true)
105
- end
106
-
107
- event_sink = Queue.new
108
- client = subject.new(server.base_uri) do |c|
109
- c.on_event { |event| event_sink << event }
110
- end
111
-
112
- with_client(client) do |client|
113
- expect(event_sink.pop).to eq(simple_event_1)
114
- expect(event_sink.pop).to eq(simple_event_2)
115
- end
116
- end
117
- end
118
-
119
- it "does not trigger an error when stream is closed" do
120
- events_body = simple_event_1_text + simple_event_2_text
121
- with_server do |server|
122
- server.setup_response("/") do |req,res|
123
- send_stream_content(res, events_body, keep_open: true)
124
- end
125
-
126
- event_sink = Queue.new
127
- error_sink = Queue.new
128
- client = subject.new(server.base_uri) do |c|
129
- c.on_event { |event| event_sink << event }
130
- c.on_error { |error| error_sink << error }
131
- end
132
-
133
- with_client(client) do |client|
134
- event_sink.pop # wait till we have definitely started reading the stream
135
- client.close
136
- sleep 0.25 # there's no way to really know when the stream thread has finished
137
- expect(error_sink.empty?).to be true
138
- end
139
- end
140
- end
141
-
142
- it "reconnects after error response" do
143
- events_body = simple_event_1_text
144
- with_server do |server|
145
- attempt = 0
146
- server.setup_response("/") do |req,res|
147
- attempt += 1
148
- if attempt == 1
149
- res.status = 500
150
- res.body = "sorry"
151
- res.keep_alive = false
152
- else
153
- send_stream_content(res, events_body, keep_open: true)
154
- end
155
- end
156
-
157
- event_sink = Queue.new
158
- error_sink = Queue.new
159
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
160
- c.on_event { |event| event_sink << event }
161
- c.on_error { |error| error_sink << error }
162
- end
163
-
164
- with_client(client) do |client|
165
- expect(event_sink.pop).to eq(simple_event_1)
166
- expect(error_sink.pop).to eq(SSE::Errors::HTTPStatusError.new(500, "sorry"))
167
- expect(attempt).to eq 2
168
- end
169
- end
170
- end
171
-
172
- it "reconnects after invalid content type" do
173
- events_body = simple_event_1_text
174
- with_server do |server|
175
- attempt = 0
176
- server.setup_response("/") do |req,res|
177
- attempt += 1
178
- if attempt == 1
179
- res.status = 200
180
- res.content_type = "text/plain"
181
- res.body = "sorry"
182
- res.keep_alive = false
183
- else
184
- send_stream_content(res, events_body, keep_open: true)
185
- end
186
- end
187
-
188
- event_sink = Queue.new
189
- error_sink = Queue.new
190
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
191
- c.on_event { |event| event_sink << event }
192
- c.on_error { |error| error_sink << error }
193
- end
194
-
195
- with_client(client) do |client|
196
- expect(event_sink.pop).to eq(simple_event_1)
197
- expect(error_sink.pop).to eq(SSE::Errors::HTTPContentTypeError.new("text/plain"))
198
- expect(attempt).to eq 2
199
- end
200
- end
201
- end
202
-
203
- it "reconnects after read timeout" do
204
- events_body = simple_event_1_text
205
- with_server do |server|
206
- attempt = 0
207
- server.setup_response("/") do |req,res|
208
- attempt += 1
209
- if attempt == 1
210
- sleep(1)
211
- end
212
- send_stream_content(res, events_body, keep_open: true)
213
- end
214
-
215
- event_sink = Queue.new
216
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap, read_timeout: 0.25) do |c|
217
- c.on_event { |event| event_sink << event }
218
- end
219
-
220
- with_client(client) do |client|
221
- expect(event_sink.pop).to eq(simple_event_1)
222
- expect(attempt).to eq 2
223
- end
224
- end
225
- end
226
-
227
- it "reconnects if stream returns EOF" do
228
- with_server do |server|
229
- attempt = 0
230
- server.setup_response("/") do |req,res|
231
- attempt += 1
232
- send_stream_content(res, attempt == 1 ? simple_event_1_text : simple_event_2_text,
233
- keep_open: attempt == 2)
234
- end
235
-
236
- event_sink = Queue.new
237
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
238
- c.on_event { |event| event_sink << event }
239
- end
240
-
241
- with_client(client) do |client|
242
- expect(event_sink.pop).to eq(simple_event_1)
243
- expect(event_sink.pop).to eq(simple_event_2)
244
- expect(attempt).to eq 2
245
- end
246
- end
247
- end
248
-
249
- it "sends ID of last received event, if any, when reconnecting" do
250
- with_server do |server|
251
- requests = Queue.new
252
- attempt = 0
253
- server.setup_response("/") do |req,res|
254
- requests << req
255
- attempt += 1
256
- send_stream_content(res, attempt == 1 ? simple_event_1_text : simple_event_2_text,
257
- keep_open: attempt == 2)
258
- end
259
-
260
- event_sink = Queue.new
261
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
262
- c.on_event { |event| event_sink << event }
263
- end
264
-
265
- with_client(client) do |client|
266
- req1 = requests.pop
267
- req2 = requests.pop
268
- expect(req2.header["last-event-id"]).to eq([ simple_event_1.id ])
269
- end
270
- end
271
- end
272
-
273
- it "increases backoff delay if a failure happens within the reset threshold" do
274
- request_times = []
275
- max_requests = 5
276
- initial_interval = 0.25
277
-
278
- with_server do |server|
279
- attempt = 0
280
- server.setup_response("/") do |req,res|
281
- request_times << Time.now
282
- attempt += 1
283
- send_stream_content(res, simple_event_1_text, keep_open: attempt == max_requests)
284
- end
285
-
286
- event_sink = Queue.new
287
- client = subject.new(server.base_uri, reconnect_time: initial_interval) do |c|
288
- c.on_event { |event| event_sink << event }
289
- end
290
-
291
- with_client(client) do |client|
292
- last_interval = nil
293
- max_requests.times do |i|
294
- expect(event_sink.pop).to eq(simple_event_1)
295
- if i > 0
296
- interval = request_times[i] - request_times[i - 1]
297
- minimum_expected_interval = initial_interval * (2 ** (i - 1)) / 2
298
- expect(interval).to be >= minimum_expected_interval
299
- last_interval = interval
300
- end
301
- end
302
- end
303
- end
304
- end
305
-
306
- it "resets backoff delay if a failure happens after the reset threshold" do
307
- request_times = []
308
- request_end_times = []
309
- max_requests = 5
310
- threshold = 0.3
311
- initial_interval = 0.25
312
-
313
- with_server do |server|
314
- attempt = 0
315
- server.setup_response("/") do |req,res|
316
- request_times << Time.now
317
- attempt += 1
318
- stream = send_stream_content(res, simple_event_1_text, keep_open: true)
319
- Thread.new do
320
- sleep(threshold + 0.01)
321
- stream.close
322
- request_end_times << Time.now
323
- end
324
- end
325
-
326
- event_sink = Queue.new
327
- client = subject.new(server.base_uri, reconnect_time: initial_interval, reconnect_reset_interval: threshold) do |c|
328
- c.on_event { |event| event_sink << event }
329
- end
330
-
331
- with_client(client) do |client|
332
- last_interval = nil
333
- max_requests.times do |i|
334
- expect(event_sink.pop).to eq(simple_event_1)
335
- if i > 0
336
- interval = request_times[i] - request_end_times[i - 1]
337
- expect(interval).to be <= (initial_interval + 0.1)
338
- end
339
- end
340
- end
341
- end
342
- end
343
-
344
- it "can change initial reconnect delay based on directive from server" do
345
- request_times = []
346
- configured_interval = 1
347
- retry_ms = 100
348
-
349
- with_server do |server|
350
- attempt = 0
351
- server.setup_response("/") do |req,res|
352
- request_times << Time.now
353
- attempt += 1
354
- if attempt == 1
355
- send_stream_content(res, "retry: #{retry_ms}\n", keep_open: false)
356
- else
357
- send_stream_content(res, simple_event_1_text, keep_open: true)
358
- end
359
- end
360
-
361
- event_sink = Queue.new
362
- client = subject.new(server.base_uri, reconnect_time: configured_interval) do |c|
363
- c.on_event { |event| event_sink << event }
364
- end
365
-
366
- with_client(client) do |client|
367
- expect(event_sink.pop).to eq(simple_event_1)
368
- interval = request_times[1] - request_times[0]
369
- expect(interval).to be < 0.5
370
- end
371
- end
372
- end
373
-
374
- it "connects to HTTP server through proxy" do
375
- events_body = simple_event_1_text
376
- with_server do |server|
377
- server.setup_response("/") do |req,res|
378
- send_stream_content(res, events_body, keep_open: false)
379
- end
380
- with_server(StubProxyServer.new) do |proxy|
381
- event_sink = Queue.new
382
- client = subject.new(server.base_uri, proxy: proxy.base_uri) do |c|
383
- c.on_event { |event| event_sink << event }
384
- end
385
-
386
- with_client(client) do |client|
387
- expect(event_sink.pop).to eq(simple_event_1)
388
- expect(proxy.request_count).to eq(1)
389
- end
390
- end
391
- end
392
- end
393
-
394
- it "resets read timeout between events" do
395
- event_body = simple_event_1_text
396
- with_server do |server|
397
- attempt = 0
398
- server.setup_response("/") do |req,res|
399
- attempt += 1
400
- if attempt == 1
401
- stream = send_stream_content(res, event_body, keep_open: true)
402
- Thread.new do
403
- 2.times {
404
- # write within timeout interval
405
- sleep(0.75)
406
- stream.write(event_body)
407
- }
408
- # cause timeout
409
- sleep(1.25)
410
- end
411
- elsif attempt == 2
412
- send_stream_content(res, event_body, keep_open: false)
413
- end
414
- end
415
-
416
- event_sink = Queue.new
417
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap, read_timeout: 1) do |c|
418
- c.on_event { |event| event_sink << event }
419
- end
420
-
421
- with_client(client) do |client|
422
- 4.times {
423
- expect(event_sink.pop).to eq(simple_event_1)
424
- }
425
- expect(attempt).to eq 2
426
- end
427
- end
428
- end
429
- end
@@ -1,100 +0,0 @@
1
- require "ld-eventsource/impl/event_parser"
2
-
3
- describe SSE::Impl::EventParser do
4
- subject { SSE::Impl::EventParser }
5
-
6
- it "parses an event with all fields" do
7
- lines = [
8
- "event: abc\r\n",
9
- "data: def\r\n",
10
- "id: 1\r\n",
11
- "\r\n"
12
- ]
13
- ep = subject.new(lines)
14
-
15
- expected_event = SSE::StreamEvent.new(:abc, "def", "1")
16
- output = ep.items.to_a
17
- expect(output).to eq([ expected_event ])
18
- end
19
-
20
- it "parses an event with only data" do
21
- lines = [
22
- "data: def\r\n",
23
- "\r\n"
24
- ]
25
- ep = subject.new(lines)
26
-
27
- expected_event = SSE::StreamEvent.new(:message, "def", nil)
28
- output = ep.items.to_a
29
- expect(output).to eq([ expected_event ])
30
- end
31
-
32
- it "parses an event with multi-line data" do
33
- lines = [
34
- "data: def\r\n",
35
- "data: ghi\r\n",
36
- "\r\n"
37
- ]
38
- ep = subject.new(lines)
39
-
40
- expected_event = SSE::StreamEvent.new(:message, "def\nghi", nil)
41
- output = ep.items.to_a
42
- expect(output).to eq([ expected_event ])
43
- end
44
-
45
- it "ignores comments" do
46
- lines = [
47
- ":",
48
- "data: def\r\n",
49
- ":",
50
- "\r\n"
51
- ]
52
- ep = subject.new(lines)
53
-
54
- expected_event = SSE::StreamEvent.new(:message, "def", nil)
55
- output = ep.items.to_a
56
- expect(output).to eq([ expected_event ])
57
- end
58
-
59
- it "parses reconnect interval" do
60
- lines = [
61
- "retry: 2500\r\n",
62
- "\r\n"
63
- ]
64
- ep = subject.new(lines)
65
-
66
- expected_item = SSE::Impl::SetRetryInterval.new(2500)
67
- output = ep.items.to_a
68
- expect(output).to eq([ expected_item ])
69
- end
70
-
71
- it "parses multiple events" do
72
- lines = [
73
- "event: abc\r\n",
74
- "data: def\r\n",
75
- "id: 1\r\n",
76
- "\r\n",
77
- "data: ghi\r\n",
78
- "\r\n"
79
- ]
80
- ep = subject.new(lines)
81
-
82
- expected_event_1 = SSE::StreamEvent.new(:abc, "def", "1")
83
- expected_event_2 = SSE::StreamEvent.new(:message, "ghi", nil)
84
- output = ep.items.to_a
85
- expect(output).to eq([ expected_event_1, expected_event_2 ])
86
- end
87
-
88
- it "ignores events with no data" do
89
- lines = [
90
- "event: nothing\r\n",
91
- "\r\n",
92
- "event: nada\r\n",
93
- "\r\n"
94
- ]
95
- ep = subject.new(lines)
96
-
97
- output = ep.items.to_a
98
- expect(output).to eq([])
99
- end
100
- end
data/spec/http_stub.rb DELETED
@@ -1,83 +0,0 @@
1
- require "webrick"
2
- require "webrick/httpproxy"
3
- require "webrick/https"
4
-
5
- class StubHTTPServer
6
- attr_reader :port
7
-
8
- def initialize
9
- @port = 50000
10
- begin
11
- @server = create_server(@port)
12
- rescue Errno::EADDRINUSE
13
- @port += 1
14
- retry
15
- end
16
- end
17
-
18
- def create_server(port)
19
- WEBrick::HTTPServer.new(
20
- BindAddress: '127.0.0.1',
21
- Port: port,
22
- AccessLog: [],
23
- Logger: NullLogger.new
24
- )
25
- end
26
-
27
- def start
28
- Thread.new { @server.start }
29
- end
30
-
31
- def stop
32
- @server.shutdown
33
- end
34
-
35
- def base_uri
36
- URI("http://127.0.0.1:#{@port}")
37
- end
38
-
39
- def setup_response(uri_path, &action)
40
- @server.mount_proc(uri_path, action)
41
- end
42
- end
43
-
44
- class StubProxyServer < StubHTTPServer
45
- attr_reader :request_count
46
- attr_accessor :connect_status
47
-
48
- def initialize
49
- super
50
- @request_count = 0
51
- end
52
-
53
- def create_server(port)
54
- WEBrick::HTTPProxyServer.new(
55
- BindAddress: '127.0.0.1',
56
- Port: port,
57
- AccessLog: [],
58
- Logger: NullLogger.new,
59
- ProxyContentHandler: proc do |req,res|
60
- if !@connect_status.nil?
61
- res.status = @connect_status
62
- end
63
- @request_count += 1
64
- end
65
- )
66
- end
67
- end
68
-
69
- class NullLogger
70
- def method_missing(*)
71
- self
72
- end
73
- end
74
-
75
- def with_server(server = nil)
76
- server = StubHTTPServer.new if server.nil?
77
- begin
78
- server.start
79
- yield server
80
- ensure
81
- server.stop
82
- end
83
- end