launchdarkly-server-sdk 8.11.1 → 8.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/lib/ldclient-rb/config.rb +66 -3
  3. data/lib/ldclient-rb/context.rb +1 -1
  4. data/lib/ldclient-rb/data_system.rb +243 -0
  5. data/lib/ldclient-rb/events.rb +35 -20
  6. data/lib/ldclient-rb/flags_state.rb +1 -1
  7. data/lib/ldclient-rb/impl/big_segments.rb +4 -4
  8. data/lib/ldclient-rb/impl/cache_store.rb +44 -0
  9. data/lib/ldclient-rb/impl/data_source/null_processor.rb +52 -0
  10. data/lib/ldclient-rb/impl/data_source/polling.rb +108 -0
  11. data/lib/ldclient-rb/impl/data_source/requestor.rb +106 -0
  12. data/lib/ldclient-rb/impl/data_source/status_provider.rb +78 -0
  13. data/lib/ldclient-rb/impl/data_source/stream.rb +198 -0
  14. data/lib/ldclient-rb/impl/data_source.rb +3 -3
  15. data/lib/ldclient-rb/impl/data_store/data_kind.rb +108 -0
  16. data/lib/ldclient-rb/impl/data_store/feature_store_client_wrapper.rb +187 -0
  17. data/lib/ldclient-rb/impl/data_store/in_memory_feature_store.rb +130 -0
  18. data/lib/ldclient-rb/impl/data_store/status_provider.rb +82 -0
  19. data/lib/ldclient-rb/impl/data_store/store.rb +371 -0
  20. data/lib/ldclient-rb/impl/data_store.rb +11 -97
  21. data/lib/ldclient-rb/impl/data_system/fdv1.rb +178 -0
  22. data/lib/ldclient-rb/impl/data_system/fdv2.rb +471 -0
  23. data/lib/ldclient-rb/impl/data_system/polling.rb +601 -0
  24. data/lib/ldclient-rb/impl/data_system/protocolv2.rb +264 -0
  25. data/lib/ldclient-rb/impl/data_system.rb +298 -0
  26. data/lib/ldclient-rb/impl/dependency_tracker.rb +21 -9
  27. data/lib/ldclient-rb/impl/evaluator.rb +3 -2
  28. data/lib/ldclient-rb/impl/event_sender.rb +4 -3
  29. data/lib/ldclient-rb/impl/expiring_cache.rb +79 -0
  30. data/lib/ldclient-rb/impl/integrations/file_data_source.rb +9 -9
  31. data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +0 -1
  32. data/lib/ldclient-rb/impl/integrations/test_data/test_data_source_v2.rb +288 -0
  33. data/lib/ldclient-rb/impl/memoized_value.rb +34 -0
  34. data/lib/ldclient-rb/impl/migrations/migrator.rb +2 -1
  35. data/lib/ldclient-rb/impl/migrations/tracker.rb +2 -1
  36. data/lib/ldclient-rb/impl/model/serialization.rb +6 -6
  37. data/lib/ldclient-rb/impl/non_blocking_thread_pool.rb +48 -0
  38. data/lib/ldclient-rb/impl/repeating_task.rb +2 -2
  39. data/lib/ldclient-rb/impl/simple_lru_cache.rb +27 -0
  40. data/lib/ldclient-rb/impl/util.rb +65 -0
  41. data/lib/ldclient-rb/impl.rb +1 -2
  42. data/lib/ldclient-rb/in_memory_store.rb +1 -18
  43. data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +9 -9
  44. data/lib/ldclient-rb/integrations/test_data.rb +11 -11
  45. data/lib/ldclient-rb/integrations/test_data_v2/flag_builder_v2.rb +582 -0
  46. data/lib/ldclient-rb/integrations/test_data_v2.rb +248 -0
  47. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +3 -2
  48. data/lib/ldclient-rb/interfaces/data_system.rb +755 -0
  49. data/lib/ldclient-rb/interfaces/feature_store.rb +3 -0
  50. data/lib/ldclient-rb/ldclient.rb +55 -149
  51. data/lib/ldclient-rb/util.rb +11 -70
  52. data/lib/ldclient-rb/version.rb +1 -1
  53. data/lib/ldclient-rb.rb +8 -17
  54. metadata +52 -17
  55. data/lib/ldclient-rb/cache_store.rb +0 -45
  56. data/lib/ldclient-rb/expiring_cache.rb +0 -77
  57. data/lib/ldclient-rb/memoized_value.rb +0 -32
  58. data/lib/ldclient-rb/non_blocking_thread_pool.rb +0 -46
  59. data/lib/ldclient-rb/polling.rb +0 -102
  60. data/lib/ldclient-rb/requestor.rb +0 -102
  61. data/lib/ldclient-rb/simple_lru_cache.rb +0 -25
  62. data/lib/ldclient-rb/stream.rb +0 -196
@@ -0,0 +1,371 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "concurrent"
4
+ require "set"
5
+ require "ldclient-rb/impl/data_store"
6
+ require "ldclient-rb/impl/data_store/in_memory_feature_store"
7
+ require "ldclient-rb/impl/dependency_tracker"
8
+ require "ldclient-rb/interfaces/data_system"
9
+
10
+ module LaunchDarkly
11
+ module Impl
12
+ module DataStore
13
+ #
14
+ # Store is a dual-mode persistent/in-memory store that serves requests for
15
+ # data from the evaluation algorithm.
16
+ #
17
+ # At any given moment one of two stores is active: in-memory, or persistent.
18
+ # Once the in-memory store has data (either from initializers or a
19
+ # synchronizer), the persistent store is no longer read from. From that point
20
+ # forward, calls to get data will serve from the memory store.
21
+ #
22
+ class Store
23
+ include LaunchDarkly::Interfaces::DataSystem::SelectorStore
24
+
25
+ #
26
+ # Initialize a new Store.
27
+ #
28
+ # @param flag_change_broadcaster [LaunchDarkly::Impl::Broadcaster] Broadcaster for flag change events
29
+ # @param change_set_broadcaster [LaunchDarkly::Impl::Broadcaster] Broadcaster for changeset events
30
+ # @param logger [Logger] The logger instance
31
+ #
32
+ def initialize(flag_change_broadcaster, change_set_broadcaster, logger)
33
+ @logger = logger
34
+ @persistent_store = nil
35
+ @persistent_store_status_provider = nil
36
+ @persistent_store_writable = false
37
+
38
+ # Source of truth for flag evaluations once initialized
39
+ @memory_store = InMemoryFeatureStoreV2.new(logger)
40
+
41
+ # Used to track dependencies between items in the store
42
+ @dependency_tracker = LaunchDarkly::Impl::DependencyTracker.new(logger)
43
+
44
+ # Broadcasters for events
45
+ @flag_change_broadcaster = flag_change_broadcaster
46
+ @change_set_broadcaster = change_set_broadcaster
47
+
48
+ # True if the data in the memory store may be persisted to the persistent store
49
+ @persist = false
50
+
51
+ # Points to the active store. Swapped upon initialization.
52
+ @active_store = @memory_store
53
+
54
+ # Identifies the current data
55
+ @selector = LaunchDarkly::Interfaces::DataSystem::Selector.no_selector
56
+
57
+ # Thread synchronization
58
+ @lock = Mutex.new
59
+ end
60
+
61
+ #
62
+ # Configure the store with a persistent store for read-only or read-write access.
63
+ #
64
+ # @param persistent_store [LaunchDarkly::Interfaces::FeatureStore] The persistent store implementation
65
+ # @param writable [Boolean] Whether the persistent store should be written to
66
+ # @param status_provider [LaunchDarkly::Impl::DataStore::StatusProviderV2, nil] Optional status provider for the persistent store
67
+ # @return [Store] self for method chaining
68
+ #
69
+ def with_persistence(persistent_store, writable, status_provider = nil)
70
+ @lock.synchronize do
71
+ @persistent_store = persistent_store
72
+ @persistent_store_writable = writable
73
+ @persistent_store_status_provider = status_provider
74
+
75
+ # Initially use persistent store as active until memory store has data
76
+ @active_store = persistent_store
77
+ end
78
+
79
+ self
80
+ end
81
+
82
+ # (see LaunchDarkly::Interfaces::DataSystem::SelectorStore#selector)
83
+ def selector
84
+ @lock.synchronize do
85
+ @selector
86
+ end
87
+ end
88
+
89
+ #
90
+ # Close the store and any persistent store if configured.
91
+ #
92
+ # @return [Exception, nil] Exception if close failed, nil otherwise
93
+ #
94
+ def close
95
+ @lock.synchronize do
96
+ return nil if @persistent_store.nil?
97
+
98
+ begin
99
+ @persistent_store.stop if @persistent_store.respond_to?(:stop)
100
+ rescue => e
101
+ return e
102
+ end
103
+ end
104
+
105
+ nil
106
+ end
107
+
108
+ #
109
+ # Apply a changeset to the store.
110
+ #
111
+ # @param change_set [LaunchDarkly::Interfaces::DataSystem::ChangeSet] The changeset to apply
112
+ # @param persist [Boolean] Whether the changes should be persisted to the persistent store
113
+ # @return [void]
114
+ #
115
+ def apply(change_set, persist)
116
+ collections = changes_to_store_data(change_set.changes)
117
+
118
+ @lock.synchronize do
119
+ begin
120
+ case change_set.intent_code
121
+ when LaunchDarkly::Interfaces::DataSystem::IntentCode::TRANSFER_FULL
122
+ set_basis(collections, change_set.selector, persist)
123
+ when LaunchDarkly::Interfaces::DataSystem::IntentCode::TRANSFER_CHANGES
124
+ apply_delta(collections, change_set.selector, persist)
125
+ when LaunchDarkly::Interfaces::DataSystem::IntentCode::TRANSFER_NONE
126
+ # No-op, no changes to apply
127
+ return
128
+ end
129
+
130
+ # Notify changeset listeners
131
+ @change_set_broadcaster.broadcast(change_set)
132
+ rescue => e
133
+ @logger.error { "[LDClient] Couldn't apply changeset: #{e.message}" }
134
+ end
135
+ end
136
+ end
137
+
138
+ #
139
+ # Commit persists the data in the memory store to the persistent store, if configured.
140
+ #
141
+ # @return [Exception, nil] Exception if commit failed, nil otherwise
142
+ #
143
+ def commit
144
+ @lock.synchronize do
145
+ return nil unless should_persist?
146
+
147
+ begin
148
+ # Get all data from memory store and write to persistent store
149
+ all_data = {}
150
+ [FEATURES, SEGMENTS].each do |kind|
151
+ all_data[kind] = @memory_store.all(kind)
152
+ end
153
+ @persistent_store.init(all_data)
154
+ rescue => e
155
+ return e
156
+ end
157
+ end
158
+
159
+ nil
160
+ end
161
+
162
+ #
163
+ # Get the currently active store for reading data.
164
+ #
165
+ # @return [LaunchDarkly::Interfaces::FeatureStore] The active store (memory or persistent)
166
+ #
167
+ def get_active_store
168
+ @lock.synchronize do
169
+ @active_store
170
+ end
171
+ end
172
+
173
+ #
174
+ # Check if the active store is initialized.
175
+ #
176
+ # @return [Boolean]
177
+ #
178
+ def initialized?
179
+ get_active_store.initialized?
180
+ end
181
+
182
+ #
183
+ # Get the data store status provider for the persistent store, if configured.
184
+ #
185
+ # @return [LaunchDarkly::Impl::DataStore::StatusProviderV2, nil] The data store status provider for the persistent store, if configured
186
+ #
187
+ def get_data_store_status_provider
188
+ @lock.synchronize do
189
+ @persistent_store_status_provider
190
+ end
191
+ end
192
+
193
+ #
194
+ # Set the basis of the store. Any existing data is discarded.
195
+ #
196
+ # @param collections [Hash{Object => Hash{String => Hash}}] Hash of data kinds to collections of items
197
+ # @param selector [LaunchDarkly::Interfaces::DataSystem::Selector, nil] The selector
198
+ # @param persist [Boolean] Whether to persist the data
199
+ # @return [void]
200
+ #
201
+ private def set_basis(collections, selector, persist)
202
+ # Take snapshot for change detection if we have flag listeners
203
+ old_data = nil
204
+ if @flag_change_broadcaster.has_listeners?
205
+ old_data = {}
206
+ [FEATURES, SEGMENTS].each do |kind|
207
+ old_data[kind] = @memory_store.all(kind)
208
+ end
209
+ end
210
+
211
+ ok = @memory_store.set_basis(collections)
212
+ return unless ok
213
+
214
+ # Update dependency tracker
215
+ reset_dependency_tracker(collections)
216
+
217
+ # Update state
218
+ @persist = persist
219
+ @selector = selector || LaunchDarkly::Interfaces::DataSystem::Selector.no_selector
220
+
221
+ # Switch to memory store as active
222
+ @active_store = @memory_store
223
+
224
+ # Persist to persistent store if configured and writable
225
+ @persistent_store.init(collections) if should_persist?
226
+
227
+ # Send change events if we had listeners
228
+ if old_data
229
+ affected_items = compute_changed_items_for_full_data_set(old_data, collections)
230
+ send_change_events(affected_items)
231
+ end
232
+ end
233
+
234
+ #
235
+ # Apply a delta update to the store.
236
+ #
237
+ # @param collections [Hash{Object => Hash{String => Hash}}] Hash of data kinds to collections with updates
238
+ # @param selector [LaunchDarkly::Interfaces::DataSystem::Selector, nil] The selector
239
+ # @param persist [Boolean] Whether to persist the changes
240
+ # @return [void]
241
+ #
242
+ private def apply_delta(collections, selector, persist)
243
+ ok = @memory_store.apply_delta(collections)
244
+ return unless ok
245
+
246
+ has_listeners = @flag_change_broadcaster.has_listeners?
247
+ affected_items = Set.new
248
+
249
+ collections.each do |kind, collection|
250
+ collection.each do |key, item|
251
+ @dependency_tracker.update_dependencies_from(kind, key, item)
252
+ if has_listeners
253
+ @dependency_tracker.add_affected_items(affected_items, { kind: kind, key: key })
254
+ end
255
+ end
256
+ end
257
+
258
+ # Update state
259
+ @persist = persist
260
+ @selector = selector || LaunchDarkly::Interfaces::DataSystem::Selector.no_selector
261
+
262
+ if should_persist?
263
+ collections.each do |kind, kind_data|
264
+ kind_data.each do |_key, item|
265
+ @persistent_store.upsert(kind, item)
266
+ end
267
+ end
268
+ end
269
+
270
+ # Send change events
271
+ send_change_events(affected_items) unless affected_items.empty?
272
+ end
273
+
274
+ #
275
+ # Returns whether data should be persisted to the persistent store.
276
+ #
277
+ # @return [Boolean]
278
+ #
279
+ private def should_persist?
280
+ @persist && !@persistent_store.nil? && @persistent_store_writable
281
+ end
282
+
283
+ #
284
+ # Convert a list of Changes to the pre-existing format used by FeatureStore.
285
+ #
286
+ # @param changes [Array<LaunchDarkly::Interfaces::DataSystem::Change>] List of changes
287
+ # @return [Hash{DataKind => Hash{String => Hash}}] Hash suitable for FeatureStore operations
288
+ #
289
+ private def changes_to_store_data(changes)
290
+ all_data = {
291
+ FEATURES => {},
292
+ SEGMENTS => {},
293
+ }
294
+
295
+ changes.each do |change|
296
+ kind = change.kind == LaunchDarkly::Interfaces::DataSystem::ObjectKind::FLAG ? FEATURES : SEGMENTS
297
+ if change.action == LaunchDarkly::Interfaces::DataSystem::ChangeType::PUT && !change.object.nil?
298
+ all_data[kind][change.key] = change.object
299
+ elsif change.action == LaunchDarkly::Interfaces::DataSystem::ChangeType::DELETE
300
+ all_data[kind][change.key] = { key: change.key, deleted: true, version: change.version }
301
+ end
302
+ end
303
+
304
+ all_data
305
+ end
306
+
307
+ #
308
+ # Reset dependency tracker with new full data set.
309
+ #
310
+ # @param all_data [Hash{DataKind => Hash{String => Hash}}] Hash of data kinds to items
311
+ # @return [void]
312
+ #
313
+ private def reset_dependency_tracker(all_data)
314
+ @dependency_tracker.reset
315
+ all_data.each do |kind, items|
316
+ items.each do |key, item|
317
+ @dependency_tracker.update_dependencies_from(kind, key, item)
318
+ end
319
+ end
320
+ end
321
+
322
+ #
323
+ # Send flag change events for affected items.
324
+ #
325
+ # @param affected_items [Set<Hash>] Set of {kind:, key:} hashes
326
+ # @return [void]
327
+ #
328
+ private def send_change_events(affected_items)
329
+ affected_items.each do |item|
330
+ if item[:kind] == FEATURES
331
+ @flag_change_broadcaster.broadcast(LaunchDarkly::Interfaces::FlagChange.new(item[:key]))
332
+ end
333
+ end
334
+ end
335
+
336
+ #
337
+ # Compute which items changed between old and new data sets.
338
+ #
339
+ # @param old_data [Hash{DataKind => Hash{String => Hash}}] Old data hash
340
+ # @param new_data [Hash{DataKind => Hash{String => Hash}}] New data hash
341
+ # @return [Set<Hash>] Set of {kind:, key:} hashes
342
+ #
343
+ private def compute_changed_items_for_full_data_set(old_data, new_data)
344
+ affected_items = Set.new
345
+
346
+ [FEATURES, SEGMENTS].each do |kind|
347
+ old_items = old_data[kind] || {}
348
+ new_items = new_data[kind] || {}
349
+
350
+ # Get all keys from both old and new data
351
+ all_keys = Set.new(old_items.keys) | Set.new(new_items.keys)
352
+
353
+ all_keys.each do |key|
354
+ old_item = old_items[key]
355
+ new_item = new_items[key]
356
+
357
+ # If either is missing or versions differ, it's a change
358
+ if old_item.nil? || new_item.nil?
359
+ @dependency_tracker.add_affected_items(affected_items, { kind: kind, key: key })
360
+ elsif old_item[:version] != new_item[:version]
361
+ @dependency_tracker.add_affected_items(affected_items, { kind: kind, key: key })
362
+ end
363
+ end
364
+ end
365
+
366
+ affected_items
367
+ end
368
+ end
369
+ end
370
+ end
371
+ end
@@ -1,109 +1,23 @@
1
1
  require 'concurrent'
2
2
  require "ldclient-rb/interfaces"
3
+ require "ldclient-rb/impl/data_store/data_kind"
3
4
 
4
5
  module LaunchDarkly
5
6
  module Impl
6
7
  module DataStore
7
-
8
- class DataKind
9
- FEATURES = "features".freeze
10
- SEGMENTS = "segments".freeze
11
-
12
- FEATURE_PREREQ_FN = lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }.freeze
13
-
14
- attr_reader :namespace
15
- attr_reader :priority
16
-
17
- #
18
- # @param namespace [String]
19
- # @param priority [Integer]
20
- #
21
- def initialize(namespace:, priority:)
22
- @namespace = namespace
23
- @priority = priority
24
- end
25
-
26
- #
27
- # Maintain the same behavior when these data kinds were standard ruby hashes.
28
- #
29
- # @param key [Symbol]
30
- # @return [Object]
31
- #
32
- def [](key)
33
- return priority if key == :priority
34
- return namespace if key == :namespace
35
- return get_dependency_keys_fn() if key == :get_dependency_keys
36
- nil
37
- end
38
-
39
- #
40
- # Retrieve the dependency keys for a particular data kind. Right now, this is only defined for flags.
8
+ # These constants denote the types of data that can be stored in the feature store. If
9
+ # we add another storable data type in the future, as long as it follows the same pattern
10
+ # (having "key", "version", and "deleted" properties), we only need to add a corresponding
11
+ # constant here and the existing store should be able to handle it.
41
12
  #
42
- def get_dependency_keys_fn()
43
- return nil unless @namespace == FEATURES
44
-
45
- FEATURE_PREREQ_FN
46
- end
47
-
48
- def eql?(other)
49
- other.is_a?(DataKind) && namespace == other.namespace && priority == other.priority
50
- end
51
-
52
- def hash
53
- [namespace, priority].hash
54
- end
55
- end
56
-
57
- class StatusProvider
58
- include LaunchDarkly::Interfaces::DataStore::StatusProvider
59
-
60
- def initialize(store, update_sink)
61
- # @type [LaunchDarkly::Impl::FeatureStoreClientWrapper]
62
- @store = store
63
- # @type [UpdateSink]
64
- @update_sink = update_sink
65
- end
66
-
67
- def status
68
- @update_sink.last_status.get
69
- end
70
-
71
- def monitoring_enabled?
72
- @store.monitoring_enabled?
73
- end
74
-
75
- def add_listener(listener)
76
- @update_sink.broadcaster.add_listener(listener)
77
- end
78
-
79
- def remove_listener(listener)
80
- @update_sink.broadcaster.remove_listener(listener)
81
- end
82
- end
83
-
84
- class UpdateSink
85
- include LaunchDarkly::Interfaces::DataStore::UpdateSink
86
-
87
- # @return [LaunchDarkly::Impl::Broadcaster]
88
- attr_reader :broadcaster
89
-
90
- # @return [Concurrent::AtomicReference]
91
- attr_reader :last_status
13
+ # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter
14
+ # to ensure data consistency during non-atomic updates.
92
15
 
93
- def initialize(broadcaster)
94
- @broadcaster = broadcaster
95
- @last_status = Concurrent::AtomicReference.new(
96
- LaunchDarkly::Interfaces::DataStore::Status.new(true, false)
97
- )
98
- end
16
+ FEATURES = DataKind.new(namespace: "features", priority: 1).freeze
99
17
 
100
- def update_status(status)
101
- return if status.nil?
18
+ SEGMENTS = DataKind.new(namespace: "segments", priority: 0).freeze
102
19
 
103
- old_status = @last_status.get_and_set(status)
104
- @broadcaster.broadcast(status) unless old_status == status
105
- end
106
- end
20
+ ALL_KINDS = [FEATURES, SEGMENTS].freeze
107
21
  end
108
22
  end
109
- end
23
+ end
@@ -0,0 +1,178 @@
1
+ require 'concurrent'
2
+ require 'ldclient-rb/impl/broadcaster'
3
+ require 'ldclient-rb/impl/data_source'
4
+ require 'ldclient-rb/impl/data_source/null_processor'
5
+ require 'ldclient-rb/impl/data_source/polling'
6
+ require 'ldclient-rb/impl/data_source/requestor'
7
+ require 'ldclient-rb/impl/data_source/stream'
8
+ require 'ldclient-rb/impl/data_store'
9
+ require 'ldclient-rb/impl/data_system'
10
+ require 'ldclient-rb/impl/store_client_wrapper'
11
+
12
+ module LaunchDarkly
13
+ module Impl
14
+ module DataSystem
15
+ #
16
+ # FDv1 wires the existing v1 data source and store behavior behind the
17
+ # generic DataSystem surface.
18
+ #
19
+ # @see DataSystem
20
+ #
21
+ class FDv1
22
+ include LaunchDarkly::Impl::DataSystem
23
+
24
+ #
25
+ # Creates a new FDv1 data system.
26
+ #
27
+ # @param sdk_key [String] The SDK key
28
+ # @param config [LaunchDarkly::Config] The SDK configuration
29
+ #
30
+ def initialize(sdk_key, config)
31
+ @sdk_key = sdk_key
32
+ @config = config
33
+ @shared_executor = Concurrent::SingleThreadExecutor.new
34
+
35
+ # Set up data store plumbing
36
+ @data_store_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, @config.logger)
37
+ @data_store_update_sink = LaunchDarkly::Impl::DataStore::UpdateSink.new(
38
+ @data_store_broadcaster
39
+ )
40
+
41
+ # Preserve the original unwrapped store to avoid nested wrappers on postfork
42
+ original_store = @config.feature_store
43
+ if original_store.is_a?(LaunchDarkly::Impl::FeatureStoreClientWrapper)
44
+ original_store = original_store.instance_variable_get(:@store)
45
+ end
46
+
47
+ # Wrap the original data store with client wrapper (must be created before status provider)
48
+ @store_wrapper = LaunchDarkly::Impl::FeatureStoreClientWrapper.new(
49
+ original_store,
50
+ @data_store_update_sink,
51
+ @config.logger
52
+ )
53
+
54
+ # Update config to use wrapped store so data sources can access it
55
+ @config.instance_variable_set(:@feature_store, @store_wrapper)
56
+
57
+ # Create status provider with store wrapper
58
+ @data_store_status_provider = LaunchDarkly::Impl::DataStore::StatusProvider.new(
59
+ @store_wrapper,
60
+ @data_store_update_sink
61
+ )
62
+
63
+ # Set up data source plumbing
64
+ @data_source_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, @config.logger)
65
+ @flag_change_broadcaster = LaunchDarkly::Impl::Broadcaster.new(@shared_executor, @config.logger)
66
+ @data_source_update_sink = LaunchDarkly::Impl::DataSource::UpdateSink.new(
67
+ @store_wrapper,
68
+ @data_source_broadcaster,
69
+ @flag_change_broadcaster
70
+ )
71
+ @data_source_status_provider = LaunchDarkly::Impl::DataSource::StatusProvider.new(
72
+ @data_source_broadcaster,
73
+ @data_source_update_sink
74
+ )
75
+
76
+ # Ensure v1 processors can find the sink via config for status updates
77
+ @config.data_source_update_sink = @data_source_update_sink
78
+
79
+ # Update processor created in start()
80
+ @update_processor = nil
81
+
82
+ # Diagnostic accumulator provided by client for streaming metrics
83
+ @diagnostic_accumulator = nil
84
+ end
85
+
86
+ # (see DataSystem#start)
87
+ def start
88
+ @update_processor ||= make_update_processor
89
+ @update_processor.start
90
+ end
91
+
92
+ # (see DataSystem#stop)
93
+ def stop
94
+ @update_processor&.stop
95
+ @store_wrapper.stop
96
+ @shared_executor.shutdown
97
+ end
98
+
99
+ # (see DataSystem#store)
100
+ def store
101
+ @store_wrapper
102
+ end
103
+
104
+ # (see DataSystem#set_diagnostic_accumulator)
105
+ def set_diagnostic_accumulator(diagnostic_accumulator)
106
+ @diagnostic_accumulator = diagnostic_accumulator
107
+ end
108
+
109
+ # (see DataSystem#data_source_status_provider)
110
+ def data_source_status_provider
111
+ @data_source_status_provider
112
+ end
113
+
114
+ # (see DataSystem#data_store_status_provider)
115
+ def data_store_status_provider
116
+ @data_store_status_provider
117
+ end
118
+
119
+ # (see DataSystem#flag_change_broadcaster)
120
+ def flag_change_broadcaster
121
+ @flag_change_broadcaster
122
+ end
123
+
124
+ #
125
+ # (see DataSystem#data_availability)
126
+ #
127
+ # In LDD mode, always returns CACHED for backwards compatibility,
128
+ # even if the store is empty.
129
+ #
130
+ def data_availability
131
+ return DataAvailability::DEFAULTS if @config.offline?
132
+ return DataAvailability::REFRESHED if @update_processor && @update_processor.initialized?
133
+ return DataAvailability::CACHED if @store_wrapper.initialized?
134
+
135
+ DataAvailability::DEFAULTS
136
+ end
137
+
138
+ # (see DataSystem#target_availability)
139
+ def target_availability
140
+ return DataAvailability::DEFAULTS if @config.offline?
141
+
142
+ DataAvailability::REFRESHED
143
+ end
144
+
145
+ #
146
+ # Creates the appropriate update processor based on the configuration.
147
+ #
148
+ # @return [Object] The update processor
149
+ #
150
+ private def make_update_processor
151
+ # Handle custom data source (factory or instance)
152
+ if @config.data_source
153
+ return @config.data_source unless @config.data_source.respond_to?(:call)
154
+
155
+ # Factory - call with appropriate arity
156
+ return @config.data_source.arity == 3 ?
157
+ @config.data_source.call(@sdk_key, @config, @diagnostic_accumulator) :
158
+ @config.data_source.call(@sdk_key, @config)
159
+ end
160
+
161
+ # Create default data source based on config
162
+ return LaunchDarkly::Impl::DataSource::NullUpdateProcessor.new if @config.offline? || @config.use_ldd?
163
+
164
+ if @config.stream?
165
+ return LaunchDarkly::Impl::DataSource::StreamProcessor.new(@sdk_key, @config, @diagnostic_accumulator)
166
+ end
167
+
168
+ # Polling processor
169
+ @config.logger.info { "Disabling streaming API" }
170
+ @config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" }
171
+ requestor = LaunchDarkly::Impl::DataSource::Requestor.new(@sdk_key, @config)
172
+ LaunchDarkly::Impl::DataSource::PollingProcessor.new(@config, requestor)
173
+ end
174
+ end
175
+ end
176
+ end
177
+ end
178
+