launchdarkly-server-sdk 7.1.0 → 7.2.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,6 +1,10 @@
1
1
  require "ldclient-rb/impl/big_segments"
2
+ require "ldclient-rb/impl/broadcaster"
3
+ require "ldclient-rb/impl/data_source"
4
+ require "ldclient-rb/impl/data_store"
2
5
  require "ldclient-rb/impl/diagnostic_events"
3
6
  require "ldclient-rb/impl/evaluator"
7
+ require "ldclient-rb/impl/flag_tracker"
4
8
  require "ldclient-rb/impl/store_client_wrapper"
5
9
  require "concurrent/atomics"
6
10
  require "digest/sha1"
@@ -45,15 +49,22 @@ module LaunchDarkly
45
49
 
46
50
  @sdk_key = sdk_key
47
51
 
52
+ @shared_executor = Concurrent::SingleThreadExecutor.new
53
+
54
+ data_store_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, config.logger)
55
+ store_sink = LaunchDarkly::Impl::DataStore::UpdateSink.new(data_store_broadcaster)
56
+
48
57
  # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add
49
58
  # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses
50
59
  # the feature store through the Config object, so we need to make a new Config that uses
51
60
  # the wrapped store.
52
- @store = Impl::FeatureStoreClientWrapper.new(config.feature_store)
61
+ @store = Impl::FeatureStoreClientWrapper.new(config.feature_store, store_sink, config.logger)
53
62
  updated_config = config.clone
54
63
  updated_config.instance_variable_set(:@feature_store, @store)
55
64
  @config = updated_config
56
65
 
66
+ @data_store_status_provider = LaunchDarkly::Impl::DataStore::StatusProvider.new(@store, store_sink)
67
+
57
68
  @big_segment_store_manager = Impl::BigSegmentStoreManager.new(config.big_segments, @config.logger)
58
69
  @big_segment_store_status_provider = @big_segment_store_manager.status_provider
59
70
 
@@ -79,6 +90,16 @@ module LaunchDarkly
79
90
  return # requestor and update processor are not used in this mode
80
91
  end
81
92
 
93
+ flag_tracker_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, @config.logger)
94
+ @flag_tracker = LaunchDarkly::Impl::FlagTracker.new(flag_tracker_broadcaster, lambda { |key, context| variation(key, context, nil) })
95
+
96
+ data_source_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, @config.logger)
97
+
98
+ # Make the update sink available on the config so that our data source factory can access the sink with a shared executor.
99
+ @config.data_source_update_sink = LaunchDarkly::Impl::DataSource::UpdateSink.new(@store, data_source_broadcaster, flag_tracker_broadcaster)
100
+
101
+ @data_source_status_provider = LaunchDarkly::Impl::DataSource::StatusProvider.new(data_source_broadcaster, @config.data_source_update_sink)
102
+
82
103
  data_source_or_factory = @config.data_source || self.method(:create_default_data_source)
83
104
  if data_source_or_factory.respond_to? :call
84
105
  # Currently, data source factories take two parameters unless they need to be aware of diagnostic_accumulator, in
@@ -345,16 +366,53 @@ module LaunchDarkly
345
366
  @event_processor.stop
346
367
  @big_segment_store_manager.stop
347
368
  @store.stop
369
+ @shared_executor.shutdown
348
370
  end
349
371
 
350
372
  #
351
373
  # Returns an interface for tracking the status of a Big Segment store.
352
374
  #
353
- # The {BigSegmentStoreStatusProvider} has methods for checking whether the Big Segment store
375
+ # The {Interfaces::BigSegmentStoreStatusProvider} has methods for checking whether the Big Segment store
354
376
  # is (as far as the SDK knows) currently operational and tracking changes in this status.
355
377
  #
356
378
  attr_reader :big_segment_store_status_provider
357
379
 
380
+ #
381
+ # Returns an interface for tracking the status of a persistent data store.
382
+ #
383
+ # The {LaunchDarkly::Interfaces::DataStore::StatusProvider} has methods for
384
+ # checking whether the data store is (as far as the SDK knows) currently
385
+ # operational, tracking changes in this status, and getting cache
386
+ # statistics. These are only relevant for a persistent data store; if you
387
+ # are using an in-memory data store, then this method will return a stub
388
+ # object that provides no information.
389
+ #
390
+ # @return [LaunchDarkly::Interfaces::DataStore::StatusProvider]
391
+ #
392
+ attr_reader :data_store_status_provider
393
+
394
+ #
395
+ # Returns an interface for tracking the status of the data source.
396
+ #
397
+ # The data source is the mechanism that the SDK uses to get feature flag
398
+ # configurations, such as a streaming connection (the default) or poll
399
+ # requests. The {LaunchDarkly::Interfaces::DataSource::StatusProvider} has
400
+ # methods for checking whether the data source is (as far as the SDK knows)
401
+ # currently operational and tracking changes in this status.
402
+ #
403
+ # @return [LaunchDarkly::Interfaces::DataSource::StatusProvider]
404
+ #
405
+ attr_reader :data_source_status_provider
406
+
407
+ #
408
+ # Returns an interface for tracking changes in feature flag configurations.
409
+ #
410
+ # The {LaunchDarkly::Interfaces::FlagTracker} contains methods for
411
+ # requesting notifications about feature flag changes using an event
412
+ # listener model.
413
+ #
414
+ attr_reader :flag_tracker
415
+
358
416
  private
359
417
 
360
418
  def create_default_data_source(sdk_key, config, diagnostic_accumulator)
@@ -403,7 +461,11 @@ module LaunchDarkly
403
461
  end
404
462
  end
405
463
 
406
- feature = @store.get(FEATURES, key)
464
+ begin
465
+ feature = @store.get(FEATURES, key)
466
+ rescue
467
+ # Ignored
468
+ end
407
469
 
408
470
  if feature.nil?
409
471
  @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" }
@@ -1,6 +1,7 @@
1
1
  require "ldclient-rb/impl/repeating_task"
2
2
 
3
3
  require "concurrent/atomics"
4
+ require "json"
4
5
  require "thread"
5
6
 
6
7
  module LaunchDarkly
@@ -27,30 +28,75 @@ module LaunchDarkly
27
28
  end
28
29
 
29
30
  def stop
30
- @task.stop
31
- @config.logger.info { "[LDClient] Polling connection stopped" }
31
+ stop_with_error_info
32
32
  end
33
33
 
34
34
  def poll
35
35
  begin
36
36
  all_data = @requestor.request_all_data
37
37
  if all_data
38
- @config.feature_store.init(all_data)
38
+ update_sink_or_data_store.init(all_data)
39
39
  if @initialized.make_true
40
40
  @config.logger.info { "[LDClient] Polling connection initialized" }
41
41
  @ready.set
42
42
  end
43
43
  end
44
+ @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil)
45
+ rescue JSON::ParserError => e
46
+ @config.logger.error { "[LDClient] JSON parsing failed for polling response." }
47
+ error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
48
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA,
49
+ 0,
50
+ e.to_s,
51
+ Time.now
52
+ )
53
+ @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info)
44
54
  rescue UnexpectedResponseError => e
55
+ error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
56
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, e.status, nil, Time.now)
45
57
  message = Util.http_error_message(e.status, "polling request", "will retry")
46
58
  @config.logger.error { "[LDClient] #{message}" }
47
- unless Util.http_error_recoverable?(e.status)
59
+
60
+ if Util.http_error_recoverable?(e.status)
61
+ @config.data_source_update_sink&.update_status(
62
+ LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
63
+ error_info
64
+ )
65
+ else
48
66
  @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set
49
- stop
67
+ stop_with_error_info error_info
50
68
  end
51
69
  rescue StandardError => e
52
70
  Util.log_exception(@config.logger, "Exception while polling", e)
71
+ @config.data_source_update_sink&.update_status(
72
+ LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
73
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, e.to_s, Time.now)
74
+ )
53
75
  end
54
76
  end
77
+
78
+ #
79
+ # The original implementation of this class relied on the feature store
80
+ # directly, which we are trying to move away from. Customers who might have
81
+ # instantiated this directly for some reason wouldn't know they have to set
82
+ # the config's sink manually, so we have to fall back to the store if the
83
+ # sink isn't present.
84
+ #
85
+ # The next major release should be able to simplify this structure and
86
+ # remove the need for fall back to the data store because the update sink
87
+ # should always be present.
88
+ #
89
+ private def update_sink_or_data_store
90
+ @config.data_source_update_sink || @config.feature_store
91
+ end
92
+
93
+ #
94
+ # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info
95
+ #
96
+ private def stop_with_error_info(error_info = nil)
97
+ @task.stop
98
+ @config.logger.info { "[LDClient] Polling connection stopped" }
99
+ @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info)
100
+ end
55
101
  end
56
102
  end
@@ -25,6 +25,7 @@ module LaunchDarkly
25
25
  def initialize(sdk_key, config, diagnostic_accumulator = nil)
26
26
  @sdk_key = sdk_key
27
27
  @config = config
28
+ @data_source_update_sink = config.data_source_update_sink
28
29
  @feature_store = config.feature_store
29
30
  @initialized = Concurrent::AtomicBoolean.new(false)
30
31
  @started = Concurrent::AtomicBoolean.new(false)
@@ -60,12 +61,31 @@ module LaunchDarkly
60
61
  case err
61
62
  when SSE::Errors::HTTPStatusError
62
63
  status = err.status
64
+ error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
65
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, status, nil, Time.now)
63
66
  message = Util.http_error_message(status, "streaming connection", "will retry")
64
67
  @config.logger.error { "[LDClient] #{message}" }
65
- unless Util.http_error_recoverable?(status)
68
+
69
+ if Util.http_error_recoverable?(status)
70
+ @data_source_update_sink&.update_status(
71
+ LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
72
+ error_info
73
+ )
74
+ else
66
75
  @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set
67
- stop
76
+ stop_with_error_info error_info
68
77
  end
78
+ when SSE::Errors::HTTPContentTypeError, SSE::Errors::HTTPProxyError, SSE::Errors::ReadTimeoutError
79
+ @data_source_update_sink&.update_status(
80
+ LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
81
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::NETWORK_ERROR, 0, err.to_s, Time.now)
82
+ )
83
+
84
+ else
85
+ @data_source_update_sink&.update_status(
86
+ LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED,
87
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, err.to_s, Time.now)
88
+ )
69
89
  end
70
90
  }
71
91
  end
@@ -74,46 +94,86 @@ module LaunchDarkly
74
94
  end
75
95
 
76
96
  def stop
97
+ stop_with_error_info
98
+ end
99
+
100
+ private
101
+
102
+ #
103
+ # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info
104
+ #
105
+ def stop_with_error_info(error_info = nil)
77
106
  if @stopped.make_true
78
107
  @es.close
108
+ @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info)
79
109
  @config.logger.info { "[LDClient] Stream connection stopped" }
80
110
  end
81
111
  end
82
112
 
83
- private
113
+ #
114
+ # The original implementation of this class relied on the feature store
115
+ # directly, which we are trying to move away from. Customers who might have
116
+ # instantiated this directly for some reason wouldn't know they have to set
117
+ # the config's sink manually, so we have to fall back to the store if the
118
+ # sink isn't present.
119
+ #
120
+ # The next major release should be able to simplify this structure and
121
+ # remove the need for fall back to the data store because the update sink
122
+ # should always be present.
123
+ #
124
+ def update_sink_or_data_store
125
+ @data_source_update_sink || @feature_store
126
+ end
84
127
 
85
128
  def process_message(message)
86
129
  log_connection_result(true)
87
130
  method = message.type
88
131
  @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" }
89
- if method == PUT
90
- message = JSON.parse(message.data, symbolize_names: true)
91
- all_data = Impl::Model.make_all_store_data(message[:data], @config.logger)
92
- @feature_store.init(all_data)
93
- @initialized.make_true
94
- @config.logger.info { "[LDClient] Stream initialized" }
95
- @ready.set
96
- elsif method == PATCH
97
- data = JSON.parse(message.data, symbolize_names: true)
98
- for kind in [FEATURES, SEGMENTS]
99
- key = key_for_path(kind, data[:path])
100
- if key
101
- item = Impl::Model.deserialize(kind, data[:data], @config.logger)
102
- @feature_store.upsert(kind, item)
103
- break
132
+
133
+ begin
134
+ if method == PUT
135
+ message = JSON.parse(message.data, symbolize_names: true)
136
+ all_data = Impl::Model.make_all_store_data(message[:data], @config.logger)
137
+ update_sink_or_data_store.init(all_data)
138
+ @initialized.make_true
139
+ @config.logger.info { "[LDClient] Stream initialized" }
140
+ @ready.set
141
+ elsif method == PATCH
142
+ data = JSON.parse(message.data, symbolize_names: true)
143
+ for kind in [FEATURES, SEGMENTS]
144
+ key = key_for_path(kind, data[:path])
145
+ if key
146
+ item = Impl::Model.deserialize(kind, data[:data], @config.logger)
147
+ update_sink_or_data_store.upsert(kind, item)
148
+ break
149
+ end
104
150
  end
105
- end
106
- elsif method == DELETE
107
- data = JSON.parse(message.data, symbolize_names: true)
108
- for kind in [FEATURES, SEGMENTS]
109
- key = key_for_path(kind, data[:path])
110
- if key
111
- @feature_store.delete(kind, key, data[:version])
112
- break
151
+ elsif method == DELETE
152
+ data = JSON.parse(message.data, symbolize_names: true)
153
+ for kind in [FEATURES, SEGMENTS]
154
+ key = key_for_path(kind, data[:path])
155
+ if key
156
+ update_sink_or_data_store.delete(kind, key, data[:version])
157
+ break
158
+ end
113
159
  end
160
+ else
161
+ @config.logger.warn { "[LDClient] Unknown message received: #{method}" }
114
162
  end
115
- else
116
- @config.logger.warn { "[LDClient] Unknown message received: #{method}" }
163
+
164
+ @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil)
165
+ rescue JSON::ParserError => e
166
+ @config.logger.error { "[LDClient] JSON parsing failed for method #{method}. Ignoring event." }
167
+ error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(
168
+ LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA,
169
+ 0,
170
+ e.to_s,
171
+ Time.now
172
+ )
173
+ @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info)
174
+
175
+ # Re-raise the exception so the SSE implementation can catch it and restart the stream.
176
+ raise
117
177
  end
118
178
  end
119
179
 
@@ -1,3 +1,3 @@
1
1
  module LaunchDarkly
2
- VERSION = "7.1.0"
2
+ VERSION = "7.2.0"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: launchdarkly-server-sdk
3
3
  version: !ruby/object:Gem::Version
4
- version: 7.1.0
4
+ version: 7.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - LaunchDarkly
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-04-13 00:00:00.000000000 Z
11
+ date: 2023-05-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-dynamodb
@@ -38,6 +38,20 @@ dependencies:
38
38
  - - '='
39
39
  - !ruby/object:Gem::Version
40
40
  version: 2.2.33
41
+ - !ruby/object:Gem::Dependency
42
+ name: simplecov
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '0.21'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '0.21'
41
55
  - !ruby/object:Gem::Dependency
42
56
  name: rspec
43
57
  requirement: !ruby/object:Gem::Requirement
@@ -274,8 +288,12 @@ files:
274
288
  - lib/ldclient-rb/flags_state.rb
275
289
  - lib/ldclient-rb/impl.rb
276
290
  - lib/ldclient-rb/impl/big_segments.rb
291
+ - lib/ldclient-rb/impl/broadcaster.rb
277
292
  - lib/ldclient-rb/impl/context.rb
278
293
  - lib/ldclient-rb/impl/context_filter.rb
294
+ - lib/ldclient-rb/impl/data_source.rb
295
+ - lib/ldclient-rb/impl/data_store.rb
296
+ - lib/ldclient-rb/impl/dependency_tracker.rb
279
297
  - lib/ldclient-rb/impl/diagnostic_events.rb
280
298
  - lib/ldclient-rb/impl/evaluator.rb
281
299
  - lib/ldclient-rb/impl/evaluator_bucketing.rb
@@ -284,6 +302,7 @@ files:
284
302
  - lib/ldclient-rb/impl/event_sender.rb
285
303
  - lib/ldclient-rb/impl/event_summarizer.rb
286
304
  - lib/ldclient-rb/impl/event_types.rb
305
+ - lib/ldclient-rb/impl/flag_tracker.rb
287
306
  - lib/ldclient-rb/impl/integrations/consul_impl.rb
288
307
  - lib/ldclient-rb/impl/integrations/dynamodb_impl.rb
289
308
  - lib/ldclient-rb/impl/integrations/file_data_source.rb