ldclient-rb 5.4.3 → 5.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +33 -6
  3. data/CHANGELOG.md +19 -0
  4. data/CONTRIBUTING.md +0 -12
  5. data/Gemfile.lock +22 -3
  6. data/README.md +41 -35
  7. data/ldclient-rb.gemspec +4 -3
  8. data/lib/ldclient-rb.rb +9 -1
  9. data/lib/ldclient-rb/cache_store.rb +1 -0
  10. data/lib/ldclient-rb/config.rb +201 -90
  11. data/lib/ldclient-rb/evaluation.rb +56 -8
  12. data/lib/ldclient-rb/event_summarizer.rb +3 -0
  13. data/lib/ldclient-rb/events.rb +16 -0
  14. data/lib/ldclient-rb/expiring_cache.rb +1 -0
  15. data/lib/ldclient-rb/file_data_source.rb +18 -13
  16. data/lib/ldclient-rb/flags_state.rb +3 -2
  17. data/lib/ldclient-rb/impl.rb +13 -0
  18. data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
  19. data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
  20. data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
  21. data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
  22. data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
  23. data/lib/ldclient-rb/in_memory_store.rb +15 -4
  24. data/lib/ldclient-rb/integrations.rb +55 -0
  25. data/lib/ldclient-rb/integrations/consul.rb +38 -0
  26. data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
  27. data/lib/ldclient-rb/integrations/redis.rb +55 -0
  28. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
  29. data/lib/ldclient-rb/interfaces.rb +153 -0
  30. data/lib/ldclient-rb/ldclient.rb +135 -77
  31. data/lib/ldclient-rb/memoized_value.rb +2 -0
  32. data/lib/ldclient-rb/newrelic.rb +1 -0
  33. data/lib/ldclient-rb/non_blocking_thread_pool.rb +3 -3
  34. data/lib/ldclient-rb/polling.rb +1 -0
  35. data/lib/ldclient-rb/redis_store.rb +24 -190
  36. data/lib/ldclient-rb/requestor.rb +3 -2
  37. data/lib/ldclient-rb/simple_lru_cache.rb +1 -0
  38. data/lib/ldclient-rb/stream.rb +22 -10
  39. data/lib/ldclient-rb/user_filter.rb +1 -0
  40. data/lib/ldclient-rb/util.rb +1 -0
  41. data/lib/ldclient-rb/version.rb +1 -1
  42. data/scripts/gendocs.sh +12 -0
  43. data/spec/feature_store_spec_base.rb +173 -72
  44. data/spec/file_data_source_spec.rb +2 -2
  45. data/spec/http_util.rb +103 -0
  46. data/spec/in_memory_feature_store_spec.rb +1 -1
  47. data/spec/integrations/consul_feature_store_spec.rb +41 -0
  48. data/spec/integrations/dynamodb_feature_store_spec.rb +104 -0
  49. data/spec/integrations/store_wrapper_spec.rb +276 -0
  50. data/spec/ldclient_spec.rb +83 -4
  51. data/spec/redis_feature_store_spec.rb +25 -16
  52. data/spec/requestor_spec.rb +44 -38
  53. data/spec/stream_spec.rb +18 -18
  54. metadata +55 -33
  55. data/lib/sse_client.rb +0 -4
  56. data/lib/sse_client/backoff.rb +0 -38
  57. data/lib/sse_client/sse_client.rb +0 -171
  58. data/lib/sse_client/sse_events.rb +0 -67
  59. data/lib/sse_client/streaming_http.rb +0 -199
  60. data/spec/sse_client/sse_client_spec.rb +0 -177
  61. data/spec/sse_client/sse_events_spec.rb +0 -100
  62. data/spec/sse_client/sse_shared.rb +0 -82
  63. data/spec/sse_client/streaming_http_spec.rb +0 -263
data/lib/sse_client.rb DELETED
@@ -1,4 +0,0 @@
1
- require "sse_client/streaming_http"
2
- require "sse_client/sse_events"
3
- require "sse_client/backoff"
4
- require "sse_client/sse_client"
@@ -1,38 +0,0 @@
1
-
2
- module SSE
3
- #
4
- # A simple backoff algorithm that can be reset at any time, or reset itself after a given
5
- # interval has passed without errors.
6
- #
7
- class Backoff
8
- def initialize(base_interval, max_interval, auto_reset_interval = 60)
9
- @base_interval = base_interval
10
- @max_interval = max_interval
11
- @auto_reset_interval = auto_reset_interval
12
- @attempts = 0
13
- @last_good_time = nil
14
- @jitter_rand = Random.new
15
- end
16
-
17
- attr_accessor :base_interval
18
-
19
- def next_interval
20
- if !@last_good_time.nil? && (Time.now.to_i - @last_good_time) >= @auto_reset_interval
21
- @attempts = 0
22
- end
23
- @last_good_time = nil
24
- if @attempts == 0
25
- @attempts += 1
26
- return 0
27
- end
28
- @last_good_time = nil
29
- target = ([@base_interval * (2 ** @attempts), @max_interval].min).to_f
30
- @attempts += 1
31
- (target / 2) + @jitter_rand.rand(target / 2)
32
- end
33
-
34
- def mark_success
35
- @last_good_time = Time.now.to_i if @last_good_time.nil?
36
- end
37
- end
38
- end
@@ -1,171 +0,0 @@
1
- require "concurrent/atomics"
2
- require "logger"
3
- require "thread"
4
- require "uri"
5
-
6
- module SSE
7
- #
8
- # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with
9
- # read timeouts, and http_tools for HTTP response parsing. The overall logic is based on
10
- # [https://github.com/Tonkpils/celluloid-eventsource].
11
- #
12
- class SSEClient
13
- DEFAULT_CONNECT_TIMEOUT = 10
14
- DEFAULT_READ_TIMEOUT = 300
15
- DEFAULT_RECONNECT_TIME = 1
16
- MAX_RECONNECT_TIME = 30
17
-
18
- def initialize(uri, options = {})
19
- @uri = URI(uri)
20
- @stopped = Concurrent::AtomicBoolean.new(false)
21
-
22
- @headers = options[:headers] ? options[:headers].clone : {}
23
- @connect_timeout = options[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT
24
- @read_timeout = options[:read_timeout] || DEFAULT_READ_TIMEOUT
25
- @logger = options[:logger] || default_logger
26
-
27
- if options[:proxy]
28
- @proxy = options[:proxy]
29
- else
30
- proxyUri = @uri.find_proxy
31
- if !proxyUri.nil? && (proxyUri.scheme == 'http' || proxyUri.scheme == 'https')
32
- @proxy = proxyUri
33
- end
34
- end
35
-
36
- reconnect_time = options[:reconnect_time] || DEFAULT_RECONNECT_TIME
37
- @backoff = Backoff.new(reconnect_time, MAX_RECONNECT_TIME)
38
-
39
- @on = { event: ->(_) {}, error: ->(_) {} }
40
- @last_id = nil
41
-
42
- yield self if block_given?
43
-
44
- Thread.new do
45
- run_stream
46
- end
47
- end
48
-
49
- def on(event_name, &action)
50
- @on[event_name.to_sym] = action
51
- end
52
-
53
- def on_event(&action)
54
- @on[:event] = action
55
- end
56
-
57
- def on_error(&action)
58
- @on[:error] = action
59
- end
60
-
61
- def close
62
- if @stopped.make_true
63
- @cxn.close if !@cxn.nil?
64
- @cxn = nil
65
- end
66
- end
67
-
68
- private
69
-
70
- def default_logger
71
- log = ::Logger.new($stdout)
72
- log.level = ::Logger::WARN
73
- log
74
- end
75
-
76
- def run_stream
77
- while !@stopped.value
78
- @cxn = nil
79
- begin
80
- @cxn = connect
81
- # There's a potential race if close was called in the middle of the previous line, i.e. after we
82
- # connected but before @cxn was set. Checking the variable again is a bit clunky but avoids that.
83
- return if @stopped.value
84
- read_stream(@cxn) if !@cxn.nil?
85
- rescue Errno::EBADF
86
- # don't log this - it probably means we closed our own connection deliberately
87
- rescue StandardError => e
88
- @logger.error { "Unexpected error from event source: #{e.inspect}" }
89
- @logger.debug { "Exception trace: #{e.backtrace}" }
90
- end
91
- begin
92
- @cxn.close if !@cxn.nil?
93
- rescue StandardError => e
94
- @logger.error { "Unexpected error while closing stream: #{e.inspect}" }
95
- @logger.debug { "Exception trace: #{e.backtrace}" }
96
- end
97
- end
98
- end
99
-
100
- # Try to establish a streaming connection. Returns the StreamingHTTPConnection object if successful.
101
- def connect
102
- loop do
103
- return if @stopped.value
104
- interval = @backoff.next_interval
105
- if interval > 0
106
- @logger.warn { "Will retry connection after #{'%.3f' % interval} seconds" }
107
- sleep(interval)
108
- end
109
- begin
110
- cxn = open_connection(build_headers)
111
- if cxn.status != 200
112
- body = cxn.read_all # grab the whole response body in case it has error details
113
- cxn.close
114
- @on[:error].call({status_code: cxn.status, body: body})
115
- next
116
- elsif cxn.headers["content-type"] && cxn.headers["content-type"].start_with?("text/event-stream")
117
- return cxn # we're good to proceed
118
- end
119
- @logger.error { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" }
120
- rescue Errno::EBADF
121
- raise
122
- rescue StandardError => e
123
- @logger.error { "Unexpected error from event source: #{e.inspect}" }
124
- @logger.debug { "Exception trace: #{e.backtrace}" }
125
- cxn.close if !cxn.nil?
126
- end
127
- # if unsuccessful, continue the loop to connect again
128
- end
129
- end
130
-
131
- # Just calls the StreamingHTTPConnection constructor - factored out for test purposes
132
- def open_connection(headers)
133
- StreamingHTTPConnection.new(@uri, @proxy, headers, @connect_timeout, @read_timeout)
134
- end
135
-
136
- # Pipe the output of the StreamingHTTPConnection into the EventParser, and dispatch events as
137
- # they arrive.
138
- def read_stream(cxn)
139
- event_parser = EventParser.new(cxn.read_lines)
140
- event_parser.items.each do |item|
141
- return if @stopped.value
142
- case item
143
- when SSEEvent
144
- dispatch_event(item)
145
- when SSESetRetryInterval
146
- @backoff.base_interval = event.milliseconds.t-Of / 1000
147
- end
148
- end
149
- end
150
-
151
- def dispatch_event(event)
152
- @last_id = event.id
153
-
154
- # Tell the Backoff object that as of the current time, we have succeeded in getting some data. It
155
- # uses that information so it can automatically reset itself if enough time passes between failures.
156
- @backoff.mark_success
157
-
158
- # Pass the event to the caller
159
- @on[:event].call(event)
160
- end
161
-
162
- def build_headers
163
- h = {
164
- 'Accept' => 'text/event-stream',
165
- 'Cache-Control' => 'no-cache'
166
- }
167
- h['Last-Event-Id'] = @last_id if !@last_id.nil?
168
- h.merge(@headers)
169
- end
170
- end
171
- end
@@ -1,67 +0,0 @@
1
-
2
- module SSE
3
- # Server-Sent Event type used by SSEClient and EventParser.
4
- SSEEvent = Struct.new(:type, :data, :id)
5
-
6
- SSESetRetryInterval = Struct.new(:milliseconds)
7
-
8
- #
9
- # Accepts lines of text via an iterator, and parses them into SSE messages.
10
- #
11
- class EventParser
12
- def initialize(lines)
13
- @lines = lines
14
- reset_buffers
15
- end
16
-
17
- # Generator that parses the input interator and returns instances of SSEEvent or SSERetryInterval.
18
- def items
19
- Enumerator.new do |gen|
20
- @lines.each do |line|
21
- line.chomp!
22
- if line.empty?
23
- event = maybe_create_event
24
- reset_buffers
25
- gen.yield event if !event.nil?
26
- else
27
- case line
28
- when /^(\w+): ?(.*)$/
29
- item = process_field($1, $2)
30
- gen.yield item if !item.nil?
31
- end
32
- end
33
- end
34
- end
35
- end
36
-
37
- private
38
-
39
- def reset_buffers
40
- @id = nil
41
- @type = nil
42
- @data = ""
43
- end
44
-
45
- def process_field(name, value)
46
- case name
47
- when "event"
48
- @type = value.to_sym
49
- when "data"
50
- @data << "\n" if !@data.empty?
51
- @data << value
52
- when "id"
53
- @id = value
54
- when "retry"
55
- if /^(?<num>\d+)$/ =~ value
56
- return SSESetRetryInterval.new(num.to_i)
57
- end
58
- end
59
- nil
60
- end
61
-
62
- def maybe_create_event
63
- return nil if @data.empty?
64
- SSEEvent.new(@type || :message, @data, @id)
65
- end
66
- end
67
- end
@@ -1,199 +0,0 @@
1
- require "concurrent/atomics"
2
- require "http_tools"
3
- require "socketry"
4
-
5
- module SSE
6
- #
7
- # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming.
8
- # The socket is created and managed by Socketry, which we use so that we can have a read timeout.
9
- #
10
- class StreamingHTTPConnection
11
- attr_reader :status, :headers
12
-
13
- def initialize(uri, proxy, headers, connect_timeout, read_timeout)
14
- @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout)
15
- @socket.write(build_request(uri, headers))
16
- @reader = HTTPResponseReader.new(@socket, read_timeout)
17
- @status = @reader.status
18
- @headers = @reader.headers
19
- @closed = Concurrent::AtomicBoolean.new(false)
20
- end
21
-
22
- def close
23
- if @closed.make_true
24
- @socket.close if @socket
25
- @socket = nil
26
- end
27
- end
28
-
29
- # Generator that returns one line of the response body at a time (delimited by \r, \n,
30
- # or \r\n) until the response is fully consumed or the socket is closed.
31
- def read_lines
32
- @reader.read_lines
33
- end
34
-
35
- # Consumes the entire response body and returns it.
36
- def read_all
37
- @reader.read_all
38
- end
39
-
40
- private
41
-
42
- # Build an HTTP request line and headers.
43
- def build_request(uri, headers)
44
- ret = "GET #{uri.request_uri} HTTP/1.1\r\n"
45
- ret << "Host: #{uri.host}\r\n"
46
- headers.each { |k, v|
47
- ret << "#{k}: #{v}\r\n"
48
- }
49
- ret + "\r\n"
50
- end
51
- end
52
-
53
- #
54
- # Used internally to send the HTTP request, including the proxy dialogue if necessary.
55
- #
56
- class HTTPConnectionFactory
57
- def self.connect(uri, proxy, connect_timeout, read_timeout)
58
- if !proxy
59
- return open_socket(uri, connect_timeout)
60
- end
61
-
62
- socket = open_socket(proxy, connect_timeout)
63
- socket.write(build_proxy_request(uri, proxy))
64
-
65
- # temporarily create a reader just for the proxy connect response
66
- proxy_reader = HTTPResponseReader.new(socket, read_timeout)
67
- if proxy_reader.status != 200
68
- raise ProxyError, "proxy connection refused, status #{proxy_reader.status}"
69
- end
70
-
71
- # start using TLS at this point if appropriate
72
- if uri.scheme.downcase == 'https'
73
- wrap_socket_in_ssl_socket(socket)
74
- else
75
- socket
76
- end
77
- end
78
-
79
- private
80
-
81
- def self.open_socket(uri, connect_timeout)
82
- if uri.scheme.downcase == 'https'
83
- Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
84
- else
85
- Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
86
- end
87
- end
88
-
89
- # Build a proxy connection header.
90
- def self.build_proxy_request(uri, proxy)
91
- ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n"
92
- ret << "Host: #{uri.host}:#{uri.port}\r\n"
93
- if proxy.user || proxy.password
94
- encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":"))
95
- ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n"
96
- end
97
- ret << "\r\n"
98
- ret
99
- end
100
-
101
- def self.wrap_socket_in_ssl_socket(socket)
102
- io = IO.try_convert(socket)
103
- ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new)
104
- ssl_sock.connect
105
- Socketry::SSL::Socket.new.from_socket(ssl_sock)
106
- end
107
- end
108
-
109
- class ProxyError < StandardError
110
- def initialize(message)
111
- super
112
- end
113
- end
114
-
115
- #
116
- # Used internally to read the HTTP response, either all at once or as a stream of text lines.
117
- # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and
118
- # chunks of the body via callbacks.
119
- #
120
- class HTTPResponseReader
121
- DEFAULT_CHUNK_SIZE = 10000
122
-
123
- attr_reader :status, :headers
124
-
125
- def initialize(socket, read_timeout)
126
- @socket = socket
127
- @read_timeout = read_timeout
128
- @parser = HTTPTools::Parser.new
129
- @buffer = ""
130
- @done = false
131
- @lock = Mutex.new
132
-
133
- # Provide callbacks for the Parser to give us the headers and body. This has to be done
134
- # before we start piping any data into the parser.
135
- have_headers = false
136
- @parser.on(:header) do
137
- have_headers = true
138
- end
139
- @parser.on(:stream) do |data|
140
- @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry
141
- end
142
- @parser.on(:finish) do
143
- @lock.synchronize { @done = true }
144
- end
145
-
146
- # Block until the status code and headers have been successfully read.
147
- while !have_headers
148
- raise EOFError if !read_chunk_into_buffer
149
- end
150
- @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }]
151
- @status = @parser.status_code
152
- end
153
-
154
- def read_lines
155
- Enumerator.new do |gen|
156
- loop do
157
- line = read_line
158
- break if line.nil?
159
- gen.yield line
160
- end
161
- end
162
- end
163
-
164
- def read_all
165
- while read_chunk_into_buffer
166
- end
167
- @buffer
168
- end
169
-
170
- private
171
-
172
- # Attempt to read some more data from the socket. Return true if successful, false if EOF.
173
- # A read timeout will result in an exception from Socketry's readpartial method.
174
- def read_chunk_into_buffer
175
- # If @done is set, it means the Parser has signaled end of response body
176
- @lock.synchronize { return false if @done }
177
- data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout)
178
- return false if data == :eof
179
- @parser << data
180
- # We are piping the content through the parser so that it can handle things like chunked
181
- # encoding for us. The content ends up being appended to @buffer via our callback.
182
- true
183
- end
184
-
185
- # Extract the next line of text from the read buffer, refilling the buffer as needed.
186
- def read_line
187
- loop do
188
- @lock.synchronize do
189
- i = @buffer.index(/[\r\n]/)
190
- if !i.nil?
191
- i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n")
192
- return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8)
193
- end
194
- end
195
- return nil if !read_chunk_into_buffer
196
- end
197
- end
198
- end
199
- end