ld-eventsource 2.1.0 → 2.2.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,22 +0,0 @@
1
- repo:
2
- public: ruby-eventsource
3
-
4
- publications:
5
- - url: https://rubygems.org/gems/ld-eventsource
6
- description: RubyGems
7
- - url: https://www.rubydoc.info/gems/ld-eventsource
8
- description: documentation
9
-
10
- releasableBranches:
11
- - name: master
12
- description: 2.x - based on the http gem
13
- - name: 1.x
14
- description: 1.x - based on the socketry gem
15
-
16
- circleci:
17
- linux:
18
- image: cimg/ruby:2.5
19
- context: org-global
20
-
21
- template:
22
- name: ruby
data/CHANGELOG.md DELETED
@@ -1,35 +0,0 @@
1
- # Change log
2
-
3
- All notable changes to the LaunchDarkly SSE Client for Ruby will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org).
4
-
5
- ## [2.0.1] - 2021-08-10
6
- ### Changed:
7
- - The dependency version constraint for the `http` gem is now looser: it allows 5.x versions as well as 4.x. The breaking changes in `http` v5.0.0 do not affect `ld-eventsource`.
8
- - The project's build now uses v2.2.10 of `bundler` due to known vulnerabilities in other versions.
9
- - `Gemfile.lock` has been removed from source control. As this is a library project, the lockfile never affected application code that used this gem, but only affected the gem's own CI build. It is preferable for the CI build to refer only to the gemspec so that it resolves dependencies the same way an application using this gem would, rather than using pinned dependencies that an application would not use.
10
-
11
- ## [2.0.0] - 2021-01-26
12
- ### Added:
13
- - Added a `socket_factory` configuration option which can be used for socket creation by the HTTP client if provided. The value of `socket_factory` must be an object providing an `open(uri, timeout)` method and returning a connected socket.
14
-
15
- ### Changed:
16
- - Switched to the `http` gem instead of `socketry` and a custom HTTP client.
17
- - Dropped support for Ruby < version 2.5
18
- - Dropped support for JRuby < version 9.2
19
-
20
- ## [1.0.3] - 2020-03-17
21
- ### Fixed:
22
- - The backoff delay logic for reconnecting after a stream failure was broken so that if a failure occurred after a stream had been active for at least `reconnect_reset_interval` (default 60 seconds), retries would use _no_ delay, potentially causing a flood of requests and a spike in CPU usage.
23
-
24
- ## [1.0.2] - 2020-03-10
25
- ### Removed:
26
- - Removed an unused dependency on `rake`. There are no other changes in this release.
27
-
28
-
29
- ## [1.0.1] - 2019-07-10
30
- ### Fixed:
31
- - Calling `close` on the client could cause a misleading warning message in the log, such as `Unexpected error from event source: #<IOError: stream closed in another thread>`.
32
-
33
- ## [1.0.0] - 2019-01-03
34
-
35
- Initial release.
data/Gemfile DELETED
@@ -1,3 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gemspec
@@ -1,30 +0,0 @@
1
- # coding: utf-8
2
-
3
- lib = File.expand_path("../lib", __FILE__)
4
- $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
- require "ld-eventsource/version"
6
-
7
- # rubocop:disable Metrics/BlockLength
8
- Gem::Specification.new do |spec|
9
- spec.name = "ld-eventsource"
10
- spec.version = SSE::VERSION
11
- spec.authors = ["LaunchDarkly"]
12
- spec.email = ["team@launchdarkly.com"]
13
- spec.summary = "LaunchDarkly SSE client"
14
- spec.description = "LaunchDarkly SSE client for Ruby"
15
- spec.homepage = "https://github.com/launchdarkly/ruby-eventsource"
16
- spec.license = "Apache-2.0"
17
-
18
- spec.files = `git ls-files -z`.split("\x0")
19
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
20
- spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
21
- spec.require_paths = ["lib"]
22
-
23
- spec.add_development_dependency "bundler", "2.2.10"
24
- spec.add_development_dependency "rspec", "~> 3.2"
25
- spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0"
26
- spec.add_development_dependency "webrick", "~> 1.7"
27
-
28
- spec.add_runtime_dependency "concurrent-ruby", "~> 1.0"
29
- spec.add_runtime_dependency "http", ">= 4.4.1", "< 6.0.0"
30
- end
data/scripts/gendocs.sh DELETED
@@ -1,12 +0,0 @@
1
- #!/bin/bash
2
-
3
- # Use this script to generate documentation locally in ./doc so it can be proofed before release.
4
- # After release, documentation will be visible at https://www.rubydoc.info/gems/ld-eventsource
5
-
6
- gem install --conservative yard
7
- gem install --conservative redcarpet # provides Markdown formatting
8
-
9
- # yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**"
10
- PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb"
11
-
12
- yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md
data/scripts/release.sh DELETED
@@ -1,30 +0,0 @@
1
- #!/usr/bin/env bash
2
-
3
- # This script updates the version for the library and releases it to RubyGems
4
- # It will only work if you have the proper credentials set up in ~/.gem/credentials
5
-
6
- # It takes exactly one argument: the new version.
7
- # It should be run from the root of this git repo like this:
8
- # ./scripts/release.sh 4.0.9
9
-
10
- # When done you should commit and push the changes made.
11
-
12
- set -uxe
13
-
14
- VERSION=$1
15
- GEM_NAME=ld-eventsource
16
-
17
- echo "Starting $GEM_NAME release."
18
-
19
- # Update version in version.rb
20
- VERSION_RB_TEMP=./version.rb.tmp
21
- sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/$GEM_NAME/version.rb > ${VERSION_RB_TEMP}
22
- mv ${VERSION_RB_TEMP} lib/$GEM_NAME/version.rb
23
-
24
- # Build Ruby gem
25
- gem build $GEM_NAME.gemspec
26
-
27
- # Publish Ruby gem
28
- gem push $GEM_NAME-${VERSION}.gem
29
-
30
- echo "Done with $GEM_NAME release"
data/spec/backoff_spec.rb DELETED
@@ -1,52 +0,0 @@
1
- require "ld-eventsource"
2
-
3
- require "http_stub"
4
-
5
- module SSE
6
- module Impl
7
- describe Backoff do
8
- it "increases exponentially with jitter" do
9
- initial = 1.5
10
- max = 60
11
- b = Backoff.new(initial, max)
12
- previous = 0
13
-
14
- for i in 1..6 do
15
- interval = b.next_interval
16
- expect(interval).to be > previous
17
- target = initial * (2 ** (i - 1))
18
- expect(interval).to be <= target
19
- expect(interval).to be >= target / 2
20
- previous = i
21
- end
22
-
23
- interval = b.next_interval
24
- expect(interval).to be >= previous
25
- expect(interval).to be <= max
26
- end
27
-
28
- it "resets to initial delay if reset threshold has elapsed" do
29
- initial = 1.5
30
- max = 60
31
- threshold = 2
32
- b = Backoff.new(initial, max, reconnect_reset_interval: threshold)
33
-
34
- for i in 1..6 do
35
- # just cause the backoff to increase quickly, don't actually do these delays
36
- b.next_interval
37
- end
38
-
39
- b.mark_success
40
- sleep(threshold + 0.001)
41
-
42
- interval = b.next_interval
43
- expect(interval).to be <= initial
44
- expect(interval).to be >= initial / 2
45
-
46
- interval = b.next_interval # make sure it continues increasing after that
47
- expect(interval).to be <= (initial * 2)
48
- expect(interval).to be >= initial
49
- end
50
- end
51
- end
52
- end
data/spec/client_spec.rb DELETED
@@ -1,444 +0,0 @@
1
- require "ld-eventsource"
2
-
3
- #
4
- # End-to-end tests of the SSE client against a real server
5
- #
6
- describe SSE::Client do
7
- subject { SSE::Client }
8
-
9
- let(:simple_event_1) { SSE::StreamEvent.new(:go, "foo", "a")}
10
- let(:simple_event_2) { SSE::StreamEvent.new(:stop, "bar", "b")}
11
- let(:simple_event_1_text) { <<-EOT
12
- event: go
13
- data: foo
14
- id: a
15
-
16
- EOT
17
- }
18
- let(:simple_event_2_text) { <<-EOT
19
- event: stop
20
- data: bar
21
- id: b
22
-
23
- EOT
24
- }
25
- let(:reconnect_asap) { 0.01 }
26
-
27
- def with_client(client)
28
- begin
29
- yield client
30
- ensure
31
- client.close
32
- end
33
- end
34
-
35
- def send_stream_content(res, content, keep_open:)
36
- res.content_type = "text/event-stream"
37
- res.status = 200
38
- res.chunked = true
39
- rd, wr = IO.pipe
40
- wr.write(content)
41
- res.body = rd
42
- if !keep_open
43
- wr.close
44
- end
45
- wr
46
- end
47
-
48
- it "sends expected headers" do
49
- with_server do |server|
50
- requests = Queue.new
51
- server.setup_response("/") do |req,res|
52
- requests << req
53
- send_stream_content(res, "", keep_open: true)
54
- end
55
-
56
- headers = { "Authorization" => "secret" }
57
-
58
- with_client(subject.new(server.base_uri, headers: headers)) do |client|
59
- received_req = requests.pop
60
- expect(received_req.header).to eq({
61
- "accept" => ["text/event-stream"],
62
- "cache-control" => ["no-cache"],
63
- "host" => ["127.0.0.1:" + server.port.to_s],
64
- "authorization" => ["secret"],
65
- "user-agent" => ["ruby-eventsource"],
66
- "content-length" => ["0"],
67
- "connection" => ["close"]
68
- })
69
- end
70
- end
71
- end
72
-
73
- it "sends initial Last-Event-Id if specified" do
74
- id = "xyz"
75
- with_server do |server|
76
- requests = Queue.new
77
- server.setup_response("/") do |req,res|
78
- requests << req
79
- send_stream_content(res, "", keep_open: true)
80
- end
81
-
82
- headers = { "Authorization" => "secret" }
83
-
84
- with_client(subject.new(server.base_uri, headers: headers, last_event_id: id)) do |client|
85
- received_req = requests.pop
86
- expect(received_req.header).to eq({
87
- "accept" => ["text/event-stream"],
88
- "cache-control" => ["no-cache"],
89
- "host" => ["127.0.0.1:" + server.port.to_s],
90
- "authorization" => ["secret"],
91
- "last-event-id" => [id],
92
- "user-agent" => ["ruby-eventsource"],
93
- "content-length" => ["0"],
94
- "connection" => ["close"]
95
- })
96
- end
97
- end
98
- end
99
-
100
- it "receives messages" do
101
- events_body = simple_event_1_text + simple_event_2_text
102
- with_server do |server|
103
- server.setup_response("/") do |req,res|
104
- send_stream_content(res, events_body, keep_open: true)
105
- end
106
-
107
- event_sink = Queue.new
108
- client = subject.new(server.base_uri) do |c|
109
- c.on_event { |event| event_sink << event }
110
- end
111
-
112
- with_client(client) do |client|
113
- expect(event_sink.pop).to eq(simple_event_1)
114
- expect(event_sink.pop).to eq(simple_event_2)
115
- end
116
- end
117
- end
118
-
119
- it "does not trigger an error when stream is closed" do
120
- events_body = simple_event_1_text + simple_event_2_text
121
- with_server do |server|
122
- server.setup_response("/") do |req,res|
123
- send_stream_content(res, events_body, keep_open: true)
124
- end
125
-
126
- event_sink = Queue.new
127
- error_sink = Queue.new
128
- client = subject.new(server.base_uri) do |c|
129
- c.on_event { |event| event_sink << event }
130
- c.on_error { |error| error_sink << error }
131
- end
132
-
133
- with_client(client) do |client|
134
- event_sink.pop # wait till we have definitely started reading the stream
135
- client.close
136
- sleep 0.25 # there's no way to really know when the stream thread has finished
137
- expect(error_sink.empty?).to be true
138
- end
139
- end
140
- end
141
-
142
- it "reconnects after error response" do
143
- events_body = simple_event_1_text
144
- with_server do |server|
145
- attempt = 0
146
- server.setup_response("/") do |req,res|
147
- attempt += 1
148
- if attempt == 1
149
- res.status = 500
150
- res.body = "sorry"
151
- res.keep_alive = false
152
- else
153
- send_stream_content(res, events_body, keep_open: true)
154
- end
155
- end
156
-
157
- event_sink = Queue.new
158
- error_sink = Queue.new
159
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
160
- c.on_event { |event| event_sink << event }
161
- c.on_error { |error| error_sink << error }
162
- end
163
-
164
- with_client(client) do |client|
165
- expect(event_sink.pop).to eq(simple_event_1)
166
- expect(error_sink.pop).to eq(SSE::Errors::HTTPStatusError.new(500, "sorry"))
167
- expect(attempt).to eq 2
168
- end
169
- end
170
- end
171
-
172
- it "reconnects after invalid content type" do
173
- events_body = simple_event_1_text
174
- with_server do |server|
175
- attempt = 0
176
- server.setup_response("/") do |req,res|
177
- attempt += 1
178
- if attempt == 1
179
- res.status = 200
180
- res.content_type = "text/plain"
181
- res.body = "sorry"
182
- res.keep_alive = false
183
- else
184
- send_stream_content(res, events_body, keep_open: true)
185
- end
186
- end
187
-
188
- event_sink = Queue.new
189
- error_sink = Queue.new
190
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
191
- c.on_event { |event| event_sink << event }
192
- c.on_error { |error| error_sink << error }
193
- end
194
-
195
- with_client(client) do |client|
196
- expect(event_sink.pop).to eq(simple_event_1)
197
- expect(error_sink.pop).to eq(SSE::Errors::HTTPContentTypeError.new("text/plain"))
198
- expect(attempt).to eq 2
199
- end
200
- end
201
- end
202
-
203
- it "reconnects after read timeout" do
204
- events_body = simple_event_1_text
205
- with_server do |server|
206
- attempt = 0
207
- server.setup_response("/") do |req,res|
208
- attempt += 1
209
- if attempt == 1
210
- sleep(1)
211
- end
212
- send_stream_content(res, events_body, keep_open: true)
213
- end
214
-
215
- event_sink = Queue.new
216
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap, read_timeout: 0.25) do |c|
217
- c.on_event { |event| event_sink << event }
218
- end
219
-
220
- with_client(client) do |client|
221
- expect(event_sink.pop).to eq(simple_event_1)
222
- expect(attempt).to eq 2
223
- end
224
- end
225
- end
226
-
227
- it "reconnects if stream returns EOF" do
228
- with_server do |server|
229
- attempt = 0
230
- server.setup_response("/") do |req,res|
231
- attempt += 1
232
- send_stream_content(res, attempt == 1 ? simple_event_1_text : simple_event_2_text,
233
- keep_open: attempt == 2)
234
- end
235
-
236
- event_sink = Queue.new
237
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
238
- c.on_event { |event| event_sink << event }
239
- end
240
-
241
- with_client(client) do |client|
242
- expect(event_sink.pop).to eq(simple_event_1)
243
- expect(event_sink.pop).to eq(simple_event_2)
244
- expect(attempt).to eq 2
245
- end
246
- end
247
- end
248
-
249
- it "sends ID of last received event, if any, when reconnecting" do
250
- with_server do |server|
251
- requests = Queue.new
252
- attempt = 0
253
- server.setup_response("/") do |req,res|
254
- requests << req
255
- attempt += 1
256
- send_stream_content(res, attempt == 1 ? simple_event_1_text : simple_event_2_text,
257
- keep_open: attempt == 2)
258
- end
259
-
260
- event_sink = Queue.new
261
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap) do |c|
262
- c.on_event { |event| event_sink << event }
263
- end
264
-
265
- with_client(client) do |client|
266
- req1 = requests.pop
267
- req2 = requests.pop
268
- expect(req2.header["last-event-id"]).to eq([ simple_event_1.id ])
269
- end
270
- end
271
- end
272
-
273
- it "increases backoff delay if a failure happens within the reset threshold" do
274
- request_times = []
275
- max_requests = 5
276
- initial_interval = 0.25
277
-
278
- with_server do |server|
279
- attempt = 0
280
- server.setup_response("/") do |req,res|
281
- request_times << Time.now
282
- attempt += 1
283
- send_stream_content(res, simple_event_1_text, keep_open: attempt == max_requests)
284
- end
285
-
286
- event_sink = Queue.new
287
- client = subject.new(server.base_uri, reconnect_time: initial_interval) do |c|
288
- c.on_event { |event| event_sink << event }
289
- end
290
-
291
- with_client(client) do |client|
292
- last_interval = nil
293
- max_requests.times do |i|
294
- expect(event_sink.pop).to eq(simple_event_1)
295
- if i > 0
296
- interval = request_times[i] - request_times[i - 1]
297
- minimum_expected_interval = initial_interval * (2 ** (i - 1)) / 2
298
- expect(interval).to be >= minimum_expected_interval
299
- last_interval = interval
300
- end
301
- end
302
- end
303
- end
304
- end
305
-
306
- it "resets backoff delay if a failure happens after the reset threshold" do
307
- request_times = []
308
- request_end_times = []
309
- max_requests = 5
310
- threshold = 0.3
311
- initial_interval = 0.25
312
-
313
- with_server do |server|
314
- attempt = 0
315
- server.setup_response("/") do |req,res|
316
- request_times << Time.now
317
- attempt += 1
318
- stream = send_stream_content(res, simple_event_1_text, keep_open: true)
319
- Thread.new do
320
- sleep(threshold + 0.01)
321
- stream.close
322
- request_end_times << Time.now
323
- end
324
- end
325
-
326
- event_sink = Queue.new
327
- client = subject.new(server.base_uri, reconnect_time: initial_interval, reconnect_reset_interval: threshold) do |c|
328
- c.on_event { |event| event_sink << event }
329
- end
330
-
331
- with_client(client) do |client|
332
- last_interval = nil
333
- max_requests.times do |i|
334
- expect(event_sink.pop).to eq(simple_event_1)
335
- if i > 0
336
- interval = request_times[i] - request_end_times[i - 1]
337
- expect(interval).to be <= (initial_interval + 0.1)
338
- end
339
- end
340
- end
341
- end
342
- end
343
-
344
- it "can change initial reconnect delay based on directive from server" do
345
- request_times = []
346
- configured_interval = 1
347
- retry_ms = 100
348
-
349
- with_server do |server|
350
- attempt = 0
351
- server.setup_response("/") do |req,res|
352
- request_times << Time.now
353
- attempt += 1
354
- if attempt == 1
355
- send_stream_content(res, "retry: #{retry_ms}\n", keep_open: false)
356
- else
357
- send_stream_content(res, simple_event_1_text, keep_open: true)
358
- end
359
- end
360
-
361
- event_sink = Queue.new
362
- client = subject.new(server.base_uri, reconnect_time: configured_interval) do |c|
363
- c.on_event { |event| event_sink << event }
364
- end
365
-
366
- with_client(client) do |client|
367
- expect(event_sink.pop).to eq(simple_event_1)
368
- interval = request_times[1] - request_times[0]
369
- expect(interval).to be < 0.5
370
- end
371
- end
372
- end
373
-
374
- it "connects to HTTP server through proxy" do
375
- events_body = simple_event_1_text
376
- with_server do |server|
377
- server.setup_response("/") do |req,res|
378
- send_stream_content(res, events_body, keep_open: false)
379
- end
380
- with_server(StubProxyServer.new) do |proxy|
381
- event_sink = Queue.new
382
- client = subject.new(server.base_uri, proxy: proxy.base_uri) do |c|
383
- c.on_event { |event| event_sink << event }
384
- end
385
-
386
- with_client(client) do |client|
387
- expect(event_sink.pop).to eq(simple_event_1)
388
- expect(proxy.request_count).to eq(1)
389
- end
390
- end
391
- end
392
- end
393
-
394
- it "resets read timeout between events" do
395
- event_body = simple_event_1_text
396
- with_server do |server|
397
- attempt = 0
398
- server.setup_response("/") do |req,res|
399
- attempt += 1
400
- if attempt == 1
401
- stream = send_stream_content(res, event_body, keep_open: true)
402
- Thread.new do
403
- 2.times {
404
- # write within timeout interval
405
- sleep(0.75)
406
- stream.write(event_body)
407
- }
408
- # cause timeout
409
- sleep(1.25)
410
- end
411
- elsif attempt == 2
412
- send_stream_content(res, event_body, keep_open: false)
413
- end
414
- end
415
-
416
- event_sink = Queue.new
417
- client = subject.new(server.base_uri, reconnect_time: reconnect_asap, read_timeout: 1) do |c|
418
- c.on_event { |event| event_sink << event }
419
- end
420
-
421
- with_client(client) do |client|
422
- 4.times {
423
- expect(event_sink.pop).to eq(simple_event_1)
424
- }
425
- expect(attempt).to eq 2
426
- end
427
- end
428
- end
429
-
430
- it "returns true from closed? when closed" do
431
- with_server do |server|
432
- server.setup_response("/") do |req,res|
433
- send_stream_content(res, "", keep_open: true)
434
- end
435
-
436
- with_client(subject.new(server.base_uri)) do |client|
437
- expect(client.closed?).to be(false)
438
-
439
- client.close
440
- expect(client.closed?).to be(true)
441
- end
442
- end
443
- end
444
- end