ldclient-rb 4.0.0 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,5 @@
1
1
  require "json"
2
+ require "set"
2
3
 
3
4
  module LaunchDarkly
4
5
  class UserFilter
@@ -0,0 +1,18 @@
1
+
2
+ module LaunchDarkly
3
+ module Util
4
+ def self.http_error_recoverable?(status)
5
+ if status >= 400 && status < 500
6
+ status == 400 || status == 408 || status == 429
7
+ else
8
+ true
9
+ end
10
+ end
11
+
12
+ def self.http_error_message(status, context, recoverable_message)
13
+ desc = (status == 401 || status == 403) ? " (invalid SDK key)" : ""
14
+ message = Util.http_error_recoverable?(status) ? recoverable_message : "giving up permanently"
15
+ "HTTP error #{status}#{desc} for #{context} - #{message}"
16
+ end
17
+ end
18
+ end
@@ -1,3 +1,3 @@
1
1
  module LaunchDarkly
2
- VERSION = "4.0.0"
2
+ VERSION = "5.0.0"
3
3
  end
data/lib/sse_client.rb ADDED
@@ -0,0 +1,4 @@
1
+ require "sse_client/streaming_http"
2
+ require "sse_client/sse_events"
3
+ require "sse_client/backoff"
4
+ require "sse_client/sse_client"
@@ -0,0 +1,38 @@
1
+
2
+ module SSE
3
+ #
4
+ # A simple backoff algorithm that can be reset at any time, or reset itself after a given
5
+ # interval has passed without errors.
6
+ #
7
+ class Backoff
8
+ def initialize(base_interval, max_interval, auto_reset_interval = 60)
9
+ @base_interval = base_interval
10
+ @max_interval = max_interval
11
+ @auto_reset_interval = auto_reset_interval
12
+ @attempts = 0
13
+ @last_good_time = nil
14
+ @jitter_rand = Random.new
15
+ end
16
+
17
+ attr_accessor :base_interval
18
+
19
+ def next_interval
20
+ if !@last_good_time.nil? && (Time.now.to_i - @last_good_time) >= @auto_reset_interval
21
+ @attempts = 0
22
+ end
23
+ @last_good_time = nil
24
+ if @attempts == 0
25
+ @attempts += 1
26
+ return 0
27
+ end
28
+ @last_good_time = nil
29
+ target = ([@base_interval * (2 ** @attempts), @max_interval].min).to_f
30
+ @attempts += 1
31
+ (target / 2) + @jitter_rand.rand(target / 2)
32
+ end
33
+
34
+ def mark_success
35
+ @last_good_time = Time.now.to_i if @last_good_time.nil?
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,162 @@
1
+ require "concurrent/atomics"
2
+ require "logger"
3
+ require "thread"
4
+ require "uri"
5
+
6
+ module SSE
7
+ #
8
+ # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with
9
+ # read timeouts, and http_tools for HTTP response parsing. The overall logic is based on
10
+ # [https://github.com/Tonkpils/celluloid-eventsource].
11
+ #
12
+ class SSEClient
13
+ DEFAULT_CONNECT_TIMEOUT = 10
14
+ DEFAULT_READ_TIMEOUT = 300
15
+ DEFAULT_RECONNECT_TIME = 1
16
+ MAX_RECONNECT_TIME = 30
17
+
18
+ def initialize(uri, options = {})
19
+ @uri = URI(uri)
20
+ @stopped = Concurrent::AtomicBoolean.new(false)
21
+
22
+ @headers = options[:headers] ? options[:headers].clone : {}
23
+ @connect_timeout = options[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT
24
+ @read_timeout = options[:read_timeout] || DEFAULT_READ_TIMEOUT
25
+ @logger = options[:logger] || default_logger
26
+
27
+ if options[:proxy]
28
+ @proxy = options[:proxy]
29
+ else
30
+ proxyUri = @uri.find_proxy
31
+ if !proxyUri.nil? && (proxyUri.scheme == 'http' || proxyUri.scheme == 'https')
32
+ @proxy = proxyUri
33
+ end
34
+ end
35
+
36
+ reconnect_time = options[:reconnect_time] || DEFAULT_RECONNECT_TIME
37
+ @backoff = Backoff.new(reconnect_time, MAX_RECONNECT_TIME)
38
+
39
+ @on = { event: ->(_) {}, error: ->(_) {} }
40
+ @last_id = nil
41
+
42
+ yield self if block_given?
43
+
44
+ Thread.new do
45
+ run_stream
46
+ end
47
+ end
48
+
49
+ def on(event_name, &action)
50
+ @on[event_name.to_sym] = action
51
+ end
52
+
53
+ def on_event(&action)
54
+ @on[:event] = action
55
+ end
56
+
57
+ def on_error(&action)
58
+ @on[:error] = action
59
+ end
60
+
61
+ def close
62
+ if @stopped.make_true
63
+ @cxn.close if !@cxn.nil?
64
+ end
65
+ end
66
+
67
+ private
68
+
69
+ def default_logger
70
+ log = ::Logger.new($stdout)
71
+ log.level = ::Logger::WARN
72
+ log
73
+ end
74
+
75
+ def run_stream
76
+ while !@stopped.value
77
+ @cxn = nil
78
+ begin
79
+ @cxn = connect
80
+ read_stream(@cxn) if !@cxn.nil?
81
+ rescue Errno::EBADF
82
+ # don't log this - it probably means we closed our own connection deliberately
83
+ rescue StandardError => e
84
+ @logger.error { "Unexpected error from event source: #{e.inspect}" }
85
+ @logger.debug { "Exception trace: #{e.backtrace}" }
86
+ end
87
+ @cxn.close if !cxn.nil?
88
+ end
89
+ end
90
+
91
+ # Try to establish a streaming connection. Returns the StreamingHTTPConnection object if successful.
92
+ def connect
93
+ loop do
94
+ return if @stopped.value
95
+ interval = @backoff.next_interval
96
+ if interval > 0
97
+ @logger.warn { "Will retry connection after #{'%.3f' % interval} seconds" }
98
+ sleep(interval)
99
+ end
100
+ begin
101
+ cxn = open_connection(build_headers)
102
+ if cxn.status != 200
103
+ body = cxn.read_all # grab the whole response body in case it has error details
104
+ cxn.close
105
+ @on[:error].call({status_code: cxn.status, body: body})
106
+ next
107
+ elsif cxn.headers["content-type"] && cxn.headers["content-type"].start_with?("text/event-stream")
108
+ return cxn # we're good to proceed
109
+ end
110
+ @logger.error { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" }
111
+ rescue Errno::EBADF
112
+ raise
113
+ rescue StandardError => e
114
+ @logger.error { "Unexpected error from event source: #{e.inspect}" }
115
+ @logger.debug { "Exception trace: #{e.backtrace}" }
116
+ cxn.close if !cxn.nil?
117
+ end
118
+ # if unsuccessful, continue the loop to connect again
119
+ end
120
+ end
121
+
122
+ # Just calls the StreamingHTTPConnection constructor - factored out for test purposes
123
+ def open_connection(headers)
124
+ StreamingHTTPConnection.new(@uri, @proxy, headers, @connect_timeout, @read_timeout)
125
+ end
126
+
127
+ # Pipe the output of the StreamingHTTPConnection into the EventParser, and dispatch events as
128
+ # they arrive.
129
+ def read_stream(cxn)
130
+ event_parser = EventParser.new(cxn.read_lines)
131
+ event_parser.items.each do |item|
132
+ return if @stopped.value
133
+ case item
134
+ when SSEEvent
135
+ dispatch_event(item)
136
+ when SSESetRetryInterval
137
+ @backoff.base_interval = event.milliseconds.t-Of / 1000
138
+ end
139
+ end
140
+ end
141
+
142
+ def dispatch_event(event)
143
+ @last_id = event.id
144
+
145
+ # Tell the Backoff object that as of the current time, we have succeeded in getting some data. It
146
+ # uses that information so it can automatically reset itself if enough time passes between failures.
147
+ @backoff.mark_success
148
+
149
+ # Pass the event to the caller
150
+ @on[:event].call(event)
151
+ end
152
+
153
+ def build_headers
154
+ h = {
155
+ 'Accept' => 'text/event-stream',
156
+ 'Cache-Control' => 'no-cache'
157
+ }
158
+ h['Last-Event-Id'] = @last_id if !@last_id.nil?
159
+ h.merge(@headers)
160
+ end
161
+ end
162
+ end
@@ -0,0 +1,67 @@
1
+
2
+ module SSE
3
+ # Server-Sent Event type used by SSEClient and EventParser.
4
+ SSEEvent = Struct.new(:type, :data, :id)
5
+
6
+ SSESetRetryInterval = Struct.new(:milliseconds)
7
+
8
+ #
9
+ # Accepts lines of text via an iterator, and parses them into SSE messages.
10
+ #
11
+ class EventParser
12
+ def initialize(lines)
13
+ @lines = lines
14
+ reset_buffers
15
+ end
16
+
17
+ # Generator that parses the input interator and returns instances of SSEEvent or SSERetryInterval.
18
+ def items
19
+ Enumerator.new do |gen|
20
+ @lines.each do |line|
21
+ line.chomp!
22
+ if line.empty?
23
+ event = maybe_create_event
24
+ reset_buffers
25
+ gen.yield event if !event.nil?
26
+ else
27
+ case line
28
+ when /^(\w+): ?(.*)$/
29
+ item = process_field($1, $2)
30
+ gen.yield item if !item.nil?
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+
37
+ private
38
+
39
+ def reset_buffers
40
+ @id = nil
41
+ @type = nil
42
+ @data = ""
43
+ end
44
+
45
+ def process_field(name, value)
46
+ case name
47
+ when "event"
48
+ @type = value.to_sym
49
+ when "data"
50
+ @data << "\n" if !@data.empty?
51
+ @data << value
52
+ when "id"
53
+ @id = value
54
+ when "retry"
55
+ if /^(?<num>\d+)$/ =~ value
56
+ return SSESetRetryInterval.new(num.to_i)
57
+ end
58
+ end
59
+ nil
60
+ end
61
+
62
+ def maybe_create_event
63
+ return nil if @data.empty?
64
+ SSEEvent.new(@type || :message, @data, @id)
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,195 @@
1
+ require "http_tools"
2
+ require "socketry"
3
+
4
+ module SSE
5
+ #
6
+ # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming.
7
+ # The socket is created and managed by Socketry, which we use so that we can have a read timeout.
8
+ #
9
+ class StreamingHTTPConnection
10
+ attr_reader :status, :headers
11
+
12
+ def initialize(uri, proxy, headers, connect_timeout, read_timeout)
13
+ @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout)
14
+ @socket.write(build_request(uri, headers))
15
+ @reader = HTTPResponseReader.new(@socket, read_timeout)
16
+ @status = @reader.status
17
+ @headers = @reader.headers
18
+ end
19
+
20
+ def close
21
+ @socket.close if @socket
22
+ @socket = nil
23
+ end
24
+
25
+ # Generator that returns one line of the response body at a time (delimited by \r, \n,
26
+ # or \r\n) until the response is fully consumed or the socket is closed.
27
+ def read_lines
28
+ @reader.read_lines
29
+ end
30
+
31
+ # Consumes the entire response body and returns it.
32
+ def read_all
33
+ @reader.read_all
34
+ end
35
+
36
+ private
37
+
38
+ # Build an HTTP request line and headers.
39
+ def build_request(uri, headers)
40
+ ret = "GET #{uri.request_uri} HTTP/1.1\r\n"
41
+ ret << "Host: #{uri.host}\r\n"
42
+ headers.each { |k, v|
43
+ ret << "#{k}: #{v}\r\n"
44
+ }
45
+ ret + "\r\n"
46
+ end
47
+ end
48
+
49
+ #
50
+ # Used internally to send the HTTP request, including the proxy dialogue if necessary.
51
+ #
52
+ class HTTPConnectionFactory
53
+ def self.connect(uri, proxy, connect_timeout, read_timeout)
54
+ if !proxy
55
+ return open_socket(uri, connect_timeout)
56
+ end
57
+
58
+ socket = open_socket(proxy, connect_timeout)
59
+ socket.write(build_proxy_request(uri, proxy))
60
+
61
+ # temporarily create a reader just for the proxy connect response
62
+ proxy_reader = HTTPResponseReader.new(socket, read_timeout)
63
+ if proxy_reader.status != 200
64
+ raise ProxyError, "proxy connection refused, status #{proxy_reader.status}"
65
+ end
66
+
67
+ # start using TLS at this point if appropriate
68
+ if uri.scheme.downcase == 'https'
69
+ wrap_socket_in_ssl_socket(socket)
70
+ else
71
+ socket
72
+ end
73
+ end
74
+
75
+ private
76
+
77
+ def self.open_socket(uri, connect_timeout)
78
+ if uri.scheme.downcase == 'https'
79
+ Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
80
+ else
81
+ Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout)
82
+ end
83
+ end
84
+
85
+ # Build a proxy connection header.
86
+ def self.build_proxy_request(uri, proxy)
87
+ ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n"
88
+ ret << "Host: #{uri.host}:#{uri.port}\r\n"
89
+ if proxy.user || proxy.password
90
+ encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":"))
91
+ ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n"
92
+ end
93
+ ret << "\r\n"
94
+ ret
95
+ end
96
+
97
+ def self.wrap_socket_in_ssl_socket(socket)
98
+ io = IO.try_convert(socket)
99
+ ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new)
100
+ ssl_sock.connect
101
+ Socketry::SSL::Socket.new.from_socket(ssl_sock)
102
+ end
103
+ end
104
+
105
+ class ProxyError < StandardError
106
+ def initialize(message)
107
+ super
108
+ end
109
+ end
110
+
111
+ #
112
+ # Used internally to read the HTTP response, either all at once or as a stream of text lines.
113
+ # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and
114
+ # chunks of the body via callbacks.
115
+ #
116
+ class HTTPResponseReader
117
+ DEFAULT_CHUNK_SIZE = 10000
118
+
119
+ attr_reader :status, :headers
120
+
121
+ def initialize(socket, read_timeout)
122
+ @socket = socket
123
+ @read_timeout = read_timeout
124
+ @parser = HTTPTools::Parser.new
125
+ @buffer = ""
126
+ @done = false
127
+ @lock = Mutex.new
128
+
129
+ # Provide callbacks for the Parser to give us the headers and body. This has to be done
130
+ # before we start piping any data into the parser.
131
+ have_headers = false
132
+ @parser.on(:header) do
133
+ have_headers = true
134
+ end
135
+ @parser.on(:stream) do |data|
136
+ @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry
137
+ end
138
+ @parser.on(:finish) do
139
+ @lock.synchronize { @done = true }
140
+ end
141
+
142
+ # Block until the status code and headers have been successfully read.
143
+ while !have_headers
144
+ raise EOFError if !read_chunk_into_buffer
145
+ end
146
+ @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }]
147
+ @status = @parser.status_code
148
+ end
149
+
150
+ def read_lines
151
+ Enumerator.new do |gen|
152
+ loop do
153
+ line = read_line
154
+ break if line.nil?
155
+ gen.yield line
156
+ end
157
+ end
158
+ end
159
+
160
+ def read_all
161
+ while read_chunk_into_buffer
162
+ end
163
+ @buffer
164
+ end
165
+
166
+ private
167
+
168
+ # Attempt to read some more data from the socket. Return true if successful, false if EOF.
169
+ # A read timeout will result in an exception from Socketry's readpartial method.
170
+ def read_chunk_into_buffer
171
+ # If @done is set, it means the Parser has signaled end of response body
172
+ @lock.synchronize { return false if @done }
173
+ data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout)
174
+ return false if data == :eof
175
+ @parser << data
176
+ # We are piping the content through the parser so that it can handle things like chunked
177
+ # encoding for us. The content ends up being appended to @buffer via our callback.
178
+ true
179
+ end
180
+
181
+ # Extract the next line of text from the read buffer, refilling the buffer as needed.
182
+ def read_line
183
+ loop do
184
+ @lock.synchronize do
185
+ i = @buffer.index(/[\r\n]/)
186
+ if !i.nil?
187
+ i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n")
188
+ return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8)
189
+ end
190
+ end
191
+ return nil if !read_chunk_into_buffer
192
+ end
193
+ end
194
+ end
195
+ end