launchdarkly-server-sdk 8.11.2 → 8.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/lib/ldclient-rb/config.rb +66 -3
  3. data/lib/ldclient-rb/context.rb +1 -1
  4. data/lib/ldclient-rb/data_system.rb +243 -0
  5. data/lib/ldclient-rb/events.rb +34 -19
  6. data/lib/ldclient-rb/flags_state.rb +1 -1
  7. data/lib/ldclient-rb/impl/big_segments.rb +4 -4
  8. data/lib/ldclient-rb/impl/cache_store.rb +44 -0
  9. data/lib/ldclient-rb/impl/data_source/polling.rb +108 -0
  10. data/lib/ldclient-rb/impl/data_source/requestor.rb +106 -0
  11. data/lib/ldclient-rb/impl/data_source/status_provider.rb +78 -0
  12. data/lib/ldclient-rb/impl/data_source/stream.rb +198 -0
  13. data/lib/ldclient-rb/impl/data_source.rb +3 -3
  14. data/lib/ldclient-rb/impl/data_store/data_kind.rb +108 -0
  15. data/lib/ldclient-rb/impl/data_store/feature_store_client_wrapper.rb +187 -0
  16. data/lib/ldclient-rb/impl/data_store/in_memory_feature_store.rb +130 -0
  17. data/lib/ldclient-rb/impl/data_store/status_provider.rb +82 -0
  18. data/lib/ldclient-rb/impl/data_store/store.rb +371 -0
  19. data/lib/ldclient-rb/impl/data_store.rb +11 -97
  20. data/lib/ldclient-rb/impl/data_system/fdv1.rb +20 -7
  21. data/lib/ldclient-rb/impl/data_system/fdv2.rb +471 -0
  22. data/lib/ldclient-rb/impl/data_system/polling.rb +601 -0
  23. data/lib/ldclient-rb/impl/data_system/protocolv2.rb +264 -0
  24. data/lib/ldclient-rb/impl/dependency_tracker.rb +21 -9
  25. data/lib/ldclient-rb/impl/evaluator.rb +3 -2
  26. data/lib/ldclient-rb/impl/event_sender.rb +4 -3
  27. data/lib/ldclient-rb/impl/expiring_cache.rb +79 -0
  28. data/lib/ldclient-rb/impl/integrations/file_data_source.rb +8 -8
  29. data/lib/ldclient-rb/impl/integrations/test_data/test_data_source_v2.rb +288 -0
  30. data/lib/ldclient-rb/impl/memoized_value.rb +34 -0
  31. data/lib/ldclient-rb/impl/migrations/migrator.rb +2 -1
  32. data/lib/ldclient-rb/impl/migrations/tracker.rb +2 -1
  33. data/lib/ldclient-rb/impl/model/serialization.rb +6 -6
  34. data/lib/ldclient-rb/impl/non_blocking_thread_pool.rb +48 -0
  35. data/lib/ldclient-rb/impl/repeating_task.rb +2 -2
  36. data/lib/ldclient-rb/impl/simple_lru_cache.rb +27 -0
  37. data/lib/ldclient-rb/impl/util.rb +65 -0
  38. data/lib/ldclient-rb/impl.rb +1 -2
  39. data/lib/ldclient-rb/in_memory_store.rb +1 -18
  40. data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +9 -9
  41. data/lib/ldclient-rb/integrations/test_data.rb +11 -11
  42. data/lib/ldclient-rb/integrations/test_data_v2/flag_builder_v2.rb +582 -0
  43. data/lib/ldclient-rb/integrations/test_data_v2.rb +248 -0
  44. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +3 -2
  45. data/lib/ldclient-rb/interfaces/data_system.rb +755 -0
  46. data/lib/ldclient-rb/interfaces/feature_store.rb +3 -0
  47. data/lib/ldclient-rb/ldclient.rb +55 -131
  48. data/lib/ldclient-rb/util.rb +11 -70
  49. data/lib/ldclient-rb/version.rb +1 -1
  50. data/lib/ldclient-rb.rb +8 -17
  51. metadata +35 -17
  52. data/lib/ldclient-rb/cache_store.rb +0 -45
  53. data/lib/ldclient-rb/expiring_cache.rb +0 -77
  54. data/lib/ldclient-rb/memoized_value.rb +0 -32
  55. data/lib/ldclient-rb/non_blocking_thread_pool.rb +0 -46
  56. data/lib/ldclient-rb/polling.rb +0 -102
  57. data/lib/ldclient-rb/requestor.rb +0 -102
  58. data/lib/ldclient-rb/simple_lru_cache.rb +0 -25
  59. data/lib/ldclient-rb/stream.rb +0 -197
@@ -1,45 +0,0 @@
1
- require "concurrent/map"
2
-
3
- module LaunchDarkly
4
- #
5
- # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we
6
- # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment.
7
- #
8
- # @private
9
- #
10
- class ThreadSafeMemoryStore
11
- #
12
- # Default constructor
13
- #
14
- # @return [ThreadSafeMemoryStore] a new store
15
- def initialize
16
- @cache = Concurrent::Map.new
17
- end
18
-
19
- #
20
- # Read a value from the cache
21
- # @param key [Object] the cache key
22
- #
23
- # @return [Object] the cache value
24
- def read(key)
25
- @cache[key]
26
- end
27
-
28
- #
29
- # Store a value in the cache
30
- # @param key [Object] the cache key
31
- # @param value [Object] the value to associate with the key
32
- #
33
- # @return [Object] the value
34
- def write(key, value)
35
- @cache[key] = value
36
- end
37
-
38
- #
39
- # Delete a value in the cache
40
- # @param key [Object] the cache key
41
- def delete(key)
42
- @cache.delete(key)
43
- end
44
- end
45
- end
@@ -1,77 +0,0 @@
1
-
2
- module LaunchDarkly
3
- # A thread-safe cache with maximum number of entries and TTL.
4
- # Adapted from https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/ttl/cache.rb
5
- # under MIT license with the following changes:
6
- # * made thread-safe
7
- # * removed many unused methods
8
- # * reading a key does not reset its expiration time, only writing
9
- # @private
10
- class ExpiringCache
11
- def initialize(max_size, ttl)
12
- @max_size = max_size
13
- @ttl = ttl
14
- @data_lru = {}
15
- @data_ttl = {}
16
- @lock = Mutex.new
17
- end
18
-
19
- def [](key)
20
- @lock.synchronize do
21
- ttl_evict
22
- @data_lru[key]
23
- end
24
- end
25
-
26
- def []=(key, val)
27
- @lock.synchronize do
28
- ttl_evict
29
-
30
- @data_lru.delete(key)
31
- @data_ttl.delete(key)
32
-
33
- @data_lru[key] = val
34
- @data_ttl[key] = Time.now.to_f
35
-
36
- if @data_lru.size > @max_size
37
- key, _ = @data_lru.first # hashes have a FIFO ordering in Ruby
38
-
39
- @data_ttl.delete(key)
40
- @data_lru.delete(key)
41
- end
42
-
43
- val
44
- end
45
- end
46
-
47
- def delete(key)
48
- @lock.synchronize do
49
- ttl_evict
50
-
51
- @data_lru.delete(key)
52
- @data_ttl.delete(key)
53
- end
54
- end
55
-
56
- def clear
57
- @lock.synchronize do
58
- @data_lru.clear
59
- @data_ttl.clear
60
- end
61
- end
62
-
63
- private
64
-
65
- def ttl_evict
66
- ttl_horizon = Time.now.to_f - @ttl
67
- key, time = @data_ttl.first
68
-
69
- until time.nil? || time > ttl_horizon
70
- @data_ttl.delete(key)
71
- @data_lru.delete(key)
72
-
73
- key, time = @data_ttl.first
74
- end
75
- end
76
- end
77
- end
@@ -1,32 +0,0 @@
1
-
2
- module LaunchDarkly
3
- # Simple implementation of a thread-safe memoized value whose generator function will never be
4
- # run more than once, and whose value can be overridden by explicit assignment.
5
- # Note that we no longer use this class and it will be removed in a future version.
6
- # @private
7
- class MemoizedValue
8
- def initialize(&generator)
9
- @generator = generator
10
- @mutex = Mutex.new
11
- @inited = false
12
- @value = nil
13
- end
14
-
15
- def get
16
- @mutex.synchronize do
17
- unless @inited
18
- @value = @generator.call
19
- @inited = true
20
- end
21
- end
22
- @value
23
- end
24
-
25
- def set(value)
26
- @mutex.synchronize do
27
- @value = value
28
- @inited = true
29
- end
30
- end
31
- end
32
- end
@@ -1,46 +0,0 @@
1
- require "concurrent"
2
- require "concurrent/atomics"
3
- require "concurrent/executors"
4
- require "thread"
5
-
6
- module LaunchDarkly
7
- # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather
8
- # than blocking. Also provides a way to wait for all jobs to finish without shutting down.
9
- # @private
10
- class NonBlockingThreadPool
11
- def initialize(capacity, name = 'LD/NonBlockingThreadPool')
12
- @capacity = capacity
13
- @pool = Concurrent::FixedThreadPool.new(capacity, name: name)
14
- @semaphore = Concurrent::Semaphore.new(capacity)
15
- end
16
-
17
- # Attempts to submit a job, but only if a worker is available. Unlike the regular post method,
18
- # this returns a value: true if the job was submitted, false if all workers are busy.
19
- def post
20
- unless @semaphore.try_acquire(1)
21
- return
22
- end
23
- @pool.post do
24
- begin
25
- yield
26
- ensure
27
- @semaphore.release(1)
28
- end
29
- end
30
- end
31
-
32
- # Waits until no jobs are executing, without shutting down the pool.
33
- def wait_all
34
- @semaphore.acquire(@capacity)
35
- @semaphore.release(@capacity)
36
- end
37
-
38
- def shutdown
39
- @pool.shutdown
40
- end
41
-
42
- def wait_for_termination
43
- @pool.wait_for_termination
44
- end
45
- end
46
- end
@@ -1,102 +0,0 @@
1
- require "ldclient-rb/impl/repeating_task"
2
-
3
- require "concurrent/atomics"
4
- require "json"
5
- require "thread"
6
-
7
- module LaunchDarkly
8
- # @private
9
- class PollingProcessor
10
- def initialize(config, requestor)
11
- @config = config
12
- @requestor = requestor
13
- @initialized = Concurrent::AtomicBoolean.new(false)
14
- @started = Concurrent::AtomicBoolean.new(false)
15
- @ready = Concurrent::Event.new
16
- @task = Impl::RepeatingTask.new(@config.poll_interval, 0, -> { self.poll }, @config.logger, 'LD/PollingDataSource')
17
- end
18
-
19
- def initialized?
20
- @initialized.value
21
- end
22
-
23
- def start
24
- return @ready unless @started.make_true
25
- @config.logger.info { "[LDClient] Initializing polling connection" }
26
- @task.start
27
- @ready
28
- end
29
-
30
- def stop
31
- stop_with_error_info
32
- end
33
-
34
- def poll
35
- begin
36
- all_data = @requestor.request_all_data
37
- if all_data
38
- update_sink_or_data_store.init(all_data)
39
- if @initialized.make_true
40
- @config.logger.info { "[LDClient] Polling connection initialized" }
41
- @ready.set
42
- end
43
- end
44
- @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil)
45
- rescue JSON::ParserError => e
46
- @config.logger.error { "[LDClient] JSON parsing failed for polling response." }
47
- error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
48
- LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA,
49
- 0,
50
- e.to_s,
51
- Time.now
52
- )
53
- @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info)
54
- rescue UnexpectedResponseError => e
55
- error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
56
- LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, e.status, nil, Time.now)
57
- message = Util.http_error_message(e.status, "polling request", "will retry")
58
- @config.logger.error { "[LDClient] #{message}" }
59
-
60
- if Util.http_error_recoverable?(e.status)
61
- @config.data_source_update_sink&.update_status(
62
- LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
63
- error_info
64
- )
65
- else
66
- @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set
67
- stop_with_error_info error_info
68
- end
69
- rescue StandardError => e
70
- Util.log_exception(@config.logger, "Exception while polling", e)
71
- @config.data_source_update_sink&.update_status(
72
- LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
73
- LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, e.to_s, Time.now)
74
- )
75
- end
76
- end
77
-
78
- #
79
- # The original implementation of this class relied on the feature store
80
- # directly, which we are trying to move away from. Customers who might have
81
- # instantiated this directly for some reason wouldn't know they have to set
82
- # the config's sink manually, so we have to fall back to the store if the
83
- # sink isn't present.
84
- #
85
- # The next major release should be able to simplify this structure and
86
- # remove the need for fall back to the data store because the update sink
87
- # should always be present.
88
- #
89
- private def update_sink_or_data_store
90
- @config.data_source_update_sink || @config.feature_store
91
- end
92
-
93
- #
94
- # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info
95
- #
96
- private def stop_with_error_info(error_info = nil)
97
- @task.stop
98
- @config.logger.info { "[LDClient] Polling connection stopped" }
99
- @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info)
100
- end
101
- end
102
- end
@@ -1,102 +0,0 @@
1
- require "ldclient-rb/impl/model/serialization"
2
-
3
- require "concurrent/atomics"
4
- require "json"
5
- require "uri"
6
- require "http"
7
-
8
- module LaunchDarkly
9
- # @private
10
- class UnexpectedResponseError < StandardError
11
- def initialize(status)
12
- @status = status
13
- super("HTTP error #{status}")
14
- end
15
-
16
- def status
17
- @status
18
- end
19
- end
20
-
21
- # @private
22
- class Requestor
23
- CacheEntry = Struct.new(:etag, :body)
24
-
25
- def initialize(sdk_key, config)
26
- @sdk_key = sdk_key
27
- @config = config
28
- @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config)
29
- .use(:auto_inflate)
30
- .headers("Accept-Encoding" => "gzip")
31
- @cache = @config.cache_store
32
- end
33
-
34
- def request_all_data()
35
- all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true)
36
- Impl::Model.make_all_store_data(all_data, @config.logger)
37
- end
38
-
39
- def stop
40
- begin
41
- @http_client.close
42
- rescue
43
- end
44
- end
45
-
46
- private
47
-
48
- def make_request(path)
49
- uri = URI(
50
- Util.add_payload_filter_key(@config.base_uri + path, @config)
51
- )
52
- headers = {}
53
- Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v }
54
- headers["Connection"] = "keep-alive"
55
- cached = @cache.read(uri)
56
- unless cached.nil?
57
- headers["If-None-Match"] = cached.etag
58
- end
59
- response = @http_client.request("GET", uri, {
60
- headers: headers,
61
- })
62
- status = response.status.code
63
- # must fully read body for persistent connections
64
- body = response.to_s
65
- @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{response.headers.to_h}\n\tbody: #{body}" }
66
- if status == 304 && !cached.nil?
67
- body = cached.body
68
- else
69
- @cache.delete(uri)
70
- if status < 200 || status >= 300
71
- raise UnexpectedResponseError.new(status)
72
- end
73
- body = fix_encoding(body, response.headers["content-type"])
74
- etag = response.headers["etag"]
75
- @cache.write(uri, CacheEntry.new(etag, body)) unless etag.nil?
76
- end
77
- body
78
- end
79
-
80
- def fix_encoding(body, content_type)
81
- return body if content_type.nil?
82
- media_type, charset = parse_content_type(content_type)
83
- return body if charset.nil?
84
- body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8)
85
- end
86
-
87
- def parse_content_type(value)
88
- return [nil, nil] if value.nil? || value == ''
89
- parts = value.split(/; */)
90
- return [value, nil] if parts.count < 2
91
- charset = nil
92
- parts.each do |part|
93
- fields = part.split('=')
94
- if fields.count >= 2 && fields[0] == 'charset'
95
- charset = fields[1]
96
- break
97
- end
98
- end
99
- [parts[0], charset]
100
- end
101
- end
102
- end
@@ -1,25 +0,0 @@
1
-
2
- module LaunchDarkly
3
- # A non-thread-safe implementation of a LRU cache set with only add and reset methods.
4
- # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb
5
- # @private
6
- class SimpleLRUCacheSet
7
- def initialize(capacity)
8
- @values = {}
9
- @capacity = capacity
10
- end
11
-
12
- # Adds a value to the cache or marks it recent if it was already there. Returns true if already there.
13
- def add(value)
14
- found = true
15
- @values.delete(value) { found = false }
16
- @values[value] = true
17
- @values.shift if @values.length > @capacity
18
- found
19
- end
20
-
21
- def clear
22
- @values = {}
23
- end
24
- end
25
- end
@@ -1,197 +0,0 @@
1
- require "ldclient-rb/impl/model/serialization"
2
-
3
- require "concurrent/atomics"
4
- require "json"
5
- require "ld-eventsource"
6
-
7
- module LaunchDarkly
8
- # @private
9
- PUT = :put
10
- # @private
11
- PATCH = :patch
12
- # @private
13
- DELETE = :delete
14
- # @private
15
- READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes
16
-
17
- # @private
18
- KEY_PATHS = {
19
- FEATURES => "/flags/",
20
- SEGMENTS => "/segments/",
21
- }
22
-
23
- # @private
24
- class StreamProcessor
25
- def initialize(sdk_key, config, diagnostic_accumulator = nil)
26
- @sdk_key = sdk_key
27
- @config = config
28
- @diagnostic_accumulator = diagnostic_accumulator
29
- @data_source_update_sink = config.data_source_update_sink
30
- @feature_store = config.feature_store
31
- @initialized = Concurrent::AtomicBoolean.new(false)
32
- @started = Concurrent::AtomicBoolean.new(false)
33
- @stopped = Concurrent::AtomicBoolean.new(false)
34
- @ready = Concurrent::Event.new
35
- @connection_attempt_start_time = 0
36
- end
37
-
38
- def initialized?
39
- @initialized.value
40
- end
41
-
42
- def start
43
- return @ready unless @started.make_true
44
-
45
- @config.logger.info { "[LDClient] Initializing stream connection" }
46
-
47
- headers = Impl::Util.default_http_headers(@sdk_key, @config)
48
- opts = {
49
- headers: headers,
50
- read_timeout: READ_TIMEOUT_SECONDS,
51
- logger: @config.logger,
52
- socket_factory: @config.socket_factory,
53
- reconnect_time: @config.initial_reconnect_delay,
54
- }
55
- log_connection_started
56
-
57
- uri = Util.add_payload_filter_key(@config.stream_uri + "/all", @config)
58
- @es = SSE::Client.new(uri, **opts) do |conn|
59
- conn.on_event { |event| process_message(event) }
60
- conn.on_error { |err|
61
- log_connection_result(false)
62
- case err
63
- when SSE::Errors::HTTPStatusError
64
- status = err.status
65
- error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
66
- LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, status, nil, Time.now)
67
- message = Util.http_error_message(status, "streaming connection", "will retry")
68
- @config.logger.error { "[LDClient] #{message}" }
69
-
70
- if Util.http_error_recoverable?(status)
71
- @data_source_update_sink&.update_status(
72
- LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
73
- error_info
74
- )
75
- else
76
- @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set
77
- stop_with_error_info error_info
78
- end
79
- when SSE::Errors::HTTPContentTypeError, SSE::Errors::HTTPProxyError, SSE::Errors::ReadTimeoutError
80
- @data_source_update_sink&.update_status(
81
- LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
82
- LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::NETWORK_ERROR, 0, err.to_s, Time.now)
83
- )
84
-
85
- else
86
- @data_source_update_sink&.update_status(
87
- LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
88
- LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, err.to_s, Time.now)
89
- )
90
- end
91
- }
92
- end
93
-
94
- @ready
95
- end
96
-
97
- def stop
98
- stop_with_error_info
99
- end
100
-
101
- private
102
-
103
- #
104
- # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info
105
- #
106
- def stop_with_error_info(error_info = nil)
107
- if @stopped.make_true
108
- @es.close
109
- @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info)
110
- @config.logger.info { "[LDClient] Stream connection stopped" }
111
- end
112
- end
113
-
114
- #
115
- # The original implementation of this class relied on the feature store
116
- # directly, which we are trying to move away from. Customers who might have
117
- # instantiated this directly for some reason wouldn't know they have to set
118
- # the config's sink manually, so we have to fall back to the store if the
119
- # sink isn't present.
120
- #
121
- # The next major release should be able to simplify this structure and
122
- # remove the need for fall back to the data store because the update sink
123
- # should always be present.
124
- #
125
- def update_sink_or_data_store
126
- @data_source_update_sink || @feature_store
127
- end
128
-
129
- def process_message(message)
130
- log_connection_result(true)
131
- method = message.type
132
- @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" }
133
-
134
- begin
135
- if method == PUT
136
- message = JSON.parse(message.data, symbolize_names: true)
137
- all_data = Impl::Model.make_all_store_data(message[:data], @config.logger)
138
- update_sink_or_data_store.init(all_data)
139
- @initialized.make_true
140
- @config.logger.info { "[LDClient] Stream initialized" }
141
- @ready.set
142
- elsif method == PATCH
143
- data = JSON.parse(message.data, symbolize_names: true)
144
- for kind in [FEATURES, SEGMENTS]
145
- key = key_for_path(kind, data[:path])
146
- if key
147
- item = Impl::Model.deserialize(kind, data[:data], @config.logger)
148
- update_sink_or_data_store.upsert(kind, item)
149
- break
150
- end
151
- end
152
- elsif method == DELETE
153
- data = JSON.parse(message.data, symbolize_names: true)
154
- for kind in [FEATURES, SEGMENTS]
155
- key = key_for_path(kind, data[:path])
156
- if key
157
- update_sink_or_data_store.delete(kind, key, data[:version])
158
- break
159
- end
160
- end
161
- else
162
- @config.logger.warn { "[LDClient] Unknown message received: #{method}" }
163
- end
164
-
165
- @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil)
166
- rescue JSON::ParserError => e
167
- @config.logger.error { "[LDClient] JSON parsing failed for method #{method}. Ignoring event." }
168
- error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
169
- LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA,
170
- 0,
171
- e.to_s,
172
- Time.now
173
- )
174
- @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info)
175
-
176
- # Re-raise the exception so the SSE implementation can catch it and restart the stream.
177
- raise
178
- end
179
- end
180
-
181
- def key_for_path(kind, path)
182
- path.start_with?(KEY_PATHS[kind]) ? path[KEY_PATHS[kind].length..-1] : nil
183
- end
184
-
185
- def log_connection_started
186
- @connection_attempt_start_time = Impl::Util::current_time_millis
187
- end
188
-
189
- def log_connection_result(is_success)
190
- if !@diagnostic_accumulator.nil? && @connection_attempt_start_time > 0
191
- @diagnostic_accumulator.record_stream_init(@connection_attempt_start_time, !is_success,
192
- Impl::Util::current_time_millis - @connection_attempt_start_time)
193
- @connection_attempt_start_time = 0
194
- end
195
- end
196
- end
197
- end