launchdarkly-server-sdk 8.11.2 → 8.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/lib/ldclient-rb/config.rb +66 -3
  3. data/lib/ldclient-rb/context.rb +1 -1
  4. data/lib/ldclient-rb/data_system.rb +243 -0
  5. data/lib/ldclient-rb/events.rb +34 -19
  6. data/lib/ldclient-rb/flags_state.rb +1 -1
  7. data/lib/ldclient-rb/impl/big_segments.rb +4 -4
  8. data/lib/ldclient-rb/impl/cache_store.rb +44 -0
  9. data/lib/ldclient-rb/impl/data_source/polling.rb +108 -0
  10. data/lib/ldclient-rb/impl/data_source/requestor.rb +106 -0
  11. data/lib/ldclient-rb/impl/data_source/status_provider.rb +78 -0
  12. data/lib/ldclient-rb/impl/data_source/stream.rb +198 -0
  13. data/lib/ldclient-rb/impl/data_source.rb +3 -3
  14. data/lib/ldclient-rb/impl/data_store/data_kind.rb +108 -0
  15. data/lib/ldclient-rb/impl/data_store/feature_store_client_wrapper.rb +187 -0
  16. data/lib/ldclient-rb/impl/data_store/in_memory_feature_store.rb +130 -0
  17. data/lib/ldclient-rb/impl/data_store/status_provider.rb +82 -0
  18. data/lib/ldclient-rb/impl/data_store/store.rb +371 -0
  19. data/lib/ldclient-rb/impl/data_store.rb +11 -97
  20. data/lib/ldclient-rb/impl/data_system/fdv1.rb +20 -7
  21. data/lib/ldclient-rb/impl/data_system/fdv2.rb +471 -0
  22. data/lib/ldclient-rb/impl/data_system/polling.rb +601 -0
  23. data/lib/ldclient-rb/impl/data_system/protocolv2.rb +264 -0
  24. data/lib/ldclient-rb/impl/dependency_tracker.rb +21 -9
  25. data/lib/ldclient-rb/impl/evaluator.rb +3 -2
  26. data/lib/ldclient-rb/impl/event_sender.rb +4 -3
  27. data/lib/ldclient-rb/impl/expiring_cache.rb +79 -0
  28. data/lib/ldclient-rb/impl/integrations/file_data_source.rb +8 -8
  29. data/lib/ldclient-rb/impl/integrations/test_data/test_data_source_v2.rb +288 -0
  30. data/lib/ldclient-rb/impl/memoized_value.rb +34 -0
  31. data/lib/ldclient-rb/impl/migrations/migrator.rb +2 -1
  32. data/lib/ldclient-rb/impl/migrations/tracker.rb +2 -1
  33. data/lib/ldclient-rb/impl/model/serialization.rb +6 -6
  34. data/lib/ldclient-rb/impl/non_blocking_thread_pool.rb +48 -0
  35. data/lib/ldclient-rb/impl/repeating_task.rb +2 -2
  36. data/lib/ldclient-rb/impl/simple_lru_cache.rb +27 -0
  37. data/lib/ldclient-rb/impl/util.rb +65 -0
  38. data/lib/ldclient-rb/impl.rb +1 -2
  39. data/lib/ldclient-rb/in_memory_store.rb +1 -18
  40. data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +9 -9
  41. data/lib/ldclient-rb/integrations/test_data.rb +11 -11
  42. data/lib/ldclient-rb/integrations/test_data_v2/flag_builder_v2.rb +582 -0
  43. data/lib/ldclient-rb/integrations/test_data_v2.rb +248 -0
  44. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +3 -2
  45. data/lib/ldclient-rb/interfaces/data_system.rb +755 -0
  46. data/lib/ldclient-rb/interfaces/feature_store.rb +3 -0
  47. data/lib/ldclient-rb/ldclient.rb +55 -131
  48. data/lib/ldclient-rb/util.rb +11 -70
  49. data/lib/ldclient-rb/version.rb +1 -1
  50. data/lib/ldclient-rb.rb +8 -17
  51. metadata +35 -17
  52. data/lib/ldclient-rb/cache_store.rb +0 -45
  53. data/lib/ldclient-rb/expiring_cache.rb +0 -77
  54. data/lib/ldclient-rb/memoized_value.rb +0 -32
  55. data/lib/ldclient-rb/non_blocking_thread_pool.rb +0 -46
  56. data/lib/ldclient-rb/polling.rb +0 -102
  57. data/lib/ldclient-rb/requestor.rb +0 -102
  58. data/lib/ldclient-rb/simple_lru_cache.rb +0 -25
  59. data/lib/ldclient-rb/stream.rb +0 -197
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e0821343a2375748e37f95a7a43016973a21c6027d9f0b3c402074ae9859c91b
4
- data.tar.gz: d285b359f653ed6ea74d9b6fedd8ff0388347e24d986f72866c226b24516ed38
3
+ metadata.gz: e1fa7eda2bd9b051a44200581ef0467e375d74fbeccd23f94838149b01bcebb3
4
+ data.tar.gz: 6d1d16653bdb132f4348cc3b97e22ea7dbf45af1721a96e89221d10d46e7384e
5
5
  SHA512:
6
- metadata.gz: e9937a4267ca69970169b66564e1fdfff11e7ceefa57462a0f4f11830aa6ef8cefc34dbcb83e62910759c14ba3120d53c20ba7436d87f21aff0a63ad2996ff8f
7
- data.tar.gz: 6726a6c125e3253c691d8c566e46ef17dc29cfb76407e79232f9b213580bfc53665bf1968fc60b48641a93276b8b4bd6c2bdabce77bda70f4575df2e5b61ec11
6
+ metadata.gz: f938280053627372c37235543a9204585e4c4550ec029c726d0fde1187b44d8c1351deb80722504de3f88f373ec7217c66f181169c58fb05609683c7dfbf6f77
7
+ data.tar.gz: bec0a20471ce7242ff8eeb6b0e535e9218492fb4caa83b7bc70d2769242ed96cb0dcf2fc56449c8a6e9380675e594ee898b3eb2aa0c896bb11c0fd1eb116219a
@@ -1,4 +1,5 @@
1
1
  require "logger"
2
+ require "ldclient-rb/impl/cache_store"
2
3
 
3
4
  module LaunchDarkly
4
5
  #
@@ -44,6 +45,7 @@ module LaunchDarkly
44
45
  # @option opts [Hash] :application See {#application}
45
46
  # @option opts [String] :payload_filter_key See {#payload_filter_key}
46
47
  # @option opts [Boolean] :omit_anonymous_contexts See {#omit_anonymous_contexts}
48
+ # @option opts [DataSystemConfig] :datasystem_config See {#datasystem_config}
47
49
  # @option hooks [Array<Interfaces::Hooks::Hook]
48
50
  # @option plugins [Array<Interfaces::Plugins::Plugin]
49
51
  #
@@ -82,6 +84,7 @@ module LaunchDarkly
82
84
  @hooks = (opts[:hooks] || []).keep_if { |hook| hook.is_a? Interfaces::Hooks::Hook }
83
85
  @plugins = (opts[:plugins] || []).keep_if { |plugin| plugin.is_a? Interfaces::Plugins::Plugin }
84
86
  @omit_anonymous_contexts = opts.has_key?(:omit_anonymous_contexts) && opts[:omit_anonymous_contexts]
87
+ @datasystem_config = opts[:datasystem_config]
85
88
  @data_source_update_sink = nil
86
89
  @instance_id = nil
87
90
  end
@@ -96,7 +99,7 @@ module LaunchDarkly
96
99
  # Custom data source implementations should integrate with this sink if
97
100
  # they want to provide support for data source status listeners.
98
101
  #
99
- # @private
102
+ # @api private
100
103
  #
101
104
  attr_accessor :data_source_update_sink
102
105
 
@@ -108,7 +111,7 @@ module LaunchDarkly
108
111
  # property is not supported; it is temporarily being exposed to maintain
109
112
  # backwards compatibility while the SDK structure is updated.
110
113
  #
111
- # @private
114
+ # @api private
112
115
  #
113
116
  attr_accessor :instance_id
114
117
 
@@ -430,6 +433,15 @@ module LaunchDarkly
430
433
  #
431
434
  attr_reader :omit_anonymous_contexts
432
435
 
436
+ #
437
+ # Configuration for the upcoming enhanced data system design. This is
438
+ # experimental and should not be set without direction from LaunchDarkly
439
+ # support.
440
+ #
441
+ # @return [DataSystemConfig, nil]
442
+ #
443
+ attr_reader :datasystem_config
444
+
433
445
 
434
446
  #
435
447
  # The default LaunchDarkly client configuration. This configuration sets
@@ -477,7 +489,7 @@ module LaunchDarkly
477
489
  # @return [Object] the Rails cache if in Rails, or a simple in-memory implementation otherwise
478
490
  #
479
491
  def self.default_cache_store
480
- defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : ThreadSafeMemoryStore.new
492
+ defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : Impl::ThreadSafeMemoryStore.new
481
493
  end
482
494
 
483
495
  #
@@ -678,4 +690,55 @@ module LaunchDarkly
678
690
  # @return [Float]
679
691
  attr_reader :stale_after
680
692
  end
693
+
694
+ #
695
+ # Configuration for LaunchDarkly's data acquisition strategy.
696
+ #
697
+ # This is not stable and is not subject to any backwards compatibility guarantees
698
+ # or semantic versioning. It is not suitable for production usage.
699
+ #
700
+ class DataSystemConfig
701
+ #
702
+ # @param initializers [Array<Proc(Config) => LaunchDarkly::Interfaces::DataSystem::Initializer>, nil] The (optional) array of builder procs
703
+ # @param primary_synchronizer [Proc(Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil] The (optional) builder proc for primary synchronizer
704
+ # @param secondary_synchronizer [Proc(Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil] The (optional) builder proc for secondary synchronizer
705
+ # @param data_store_mode [Symbol] The (optional) data store mode
706
+ # @param data_store [LaunchDarkly::Interfaces::FeatureStore, nil] The (optional) data store
707
+ # @param fdv1_fallback_synchronizer [Proc(Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil]
708
+ # The (optional) builder proc for FDv1-compatible fallback synchronizer
709
+ #
710
+ def initialize(initializers: nil, primary_synchronizer: nil, secondary_synchronizer: nil,
711
+ data_store_mode: LaunchDarkly::Interfaces::DataSystem::DataStoreMode::READ_ONLY, data_store: nil, fdv1_fallback_synchronizer: nil)
712
+ @initializers = initializers
713
+ @primary_synchronizer = primary_synchronizer
714
+ @secondary_synchronizer = secondary_synchronizer
715
+ @data_store_mode = data_store_mode
716
+ @data_store = data_store
717
+ @fdv1_fallback_synchronizer = fdv1_fallback_synchronizer
718
+ end
719
+
720
+ # The initializers for the data system. Each proc takes sdk_key and Config and returns an Initializer.
721
+ # @return [Array<Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Initializer>, nil]
722
+ attr_reader :initializers
723
+
724
+ # The primary synchronizer builder. Takes sdk_key and Config and returns a Synchronizer.
725
+ # @return [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil]
726
+ attr_reader :primary_synchronizer
727
+
728
+ # The secondary synchronizer builder. Takes sdk_key and Config and returns a Synchronizer.
729
+ # @return [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil]
730
+ attr_reader :secondary_synchronizer
731
+
732
+ # The data store mode.
733
+ # @return [Symbol]
734
+ attr_reader :data_store_mode
735
+
736
+ # The data store.
737
+ # @return [LaunchDarkly::Interfaces::FeatureStore, nil]
738
+ attr_reader :data_store
739
+
740
+ # The FDv1-compatible fallback synchronizer builder. Takes sdk_key and Config and returns a Synchronizer.
741
+ # @return [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil]
742
+ attr_reader :fdv1_fallback_synchronizer
743
+ end
681
744
  end
@@ -48,7 +48,7 @@ module LaunchDarkly
48
48
  attr_reader :error
49
49
 
50
50
  #
51
- # @private
51
+ # @api private
52
52
  # @param key [String, nil]
53
53
  # @param fully_qualified_key [String, nil]
54
54
  # @param kind [String, nil]
@@ -0,0 +1,243 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'ldclient-rb/interfaces/data_system'
4
+ require 'ldclient-rb/config'
5
+ require 'ldclient-rb/impl/data_system/polling'
6
+
7
+ module LaunchDarkly
8
+ #
9
+ # Configuration for LaunchDarkly's data acquisition strategy.
10
+ #
11
+ # This module provides factory methods for creating data system configurations.
12
+ #
13
+ module DataSystem
14
+ #
15
+ # Builder for the data system configuration.
16
+ #
17
+ class ConfigBuilder
18
+ def initialize
19
+ @initializers = nil
20
+ @primary_synchronizer = nil
21
+ @secondary_synchronizer = nil
22
+ @fdv1_fallback_synchronizer = nil
23
+ @data_store_mode = LaunchDarkly::Interfaces::DataSystem::DataStoreMode::READ_ONLY
24
+ @data_store = nil
25
+ end
26
+
27
+ #
28
+ # Sets the initializers for the data system.
29
+ #
30
+ # @param initializers [Array<Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Initializer>]
31
+ # Array of builder procs that take sdk_key and Config and return an Initializer
32
+ # @return [ConfigBuilder] self for chaining
33
+ #
34
+ def initializers(initializers)
35
+ @initializers = initializers
36
+ self
37
+ end
38
+
39
+ #
40
+ # Sets the synchronizers for the data system.
41
+ #
42
+ # @param primary [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer] Builder proc that takes sdk_key and Config and returns the primary Synchronizer
43
+ # @param secondary [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer, nil]
44
+ # Builder proc that takes sdk_key and Config and returns the secondary Synchronizer
45
+ # @return [ConfigBuilder] self for chaining
46
+ #
47
+ def synchronizers(primary, secondary = nil)
48
+ @primary_synchronizer = primary
49
+ @secondary_synchronizer = secondary
50
+ self
51
+ end
52
+
53
+ #
54
+ # Configures the SDK with a fallback synchronizer that is compatible with
55
+ # the Flag Delivery v1 API.
56
+ #
57
+ # @param fallback [Proc(String, Config) => LaunchDarkly::Interfaces::DataSystem::Synchronizer]
58
+ # Builder proc that takes sdk_key and Config and returns the fallback Synchronizer
59
+ # @return [ConfigBuilder] self for chaining
60
+ #
61
+ def fdv1_compatible_synchronizer(fallback)
62
+ @fdv1_fallback_synchronizer = fallback
63
+ self
64
+ end
65
+
66
+ #
67
+ # Sets the data store configuration for the data system.
68
+ #
69
+ # @param data_store [LaunchDarkly::Interfaces::FeatureStore] The data store
70
+ # @param store_mode [Symbol] The store mode
71
+ # @return [ConfigBuilder] self for chaining
72
+ #
73
+ def data_store(data_store, store_mode)
74
+ @data_store = data_store
75
+ @data_store_mode = store_mode
76
+ self
77
+ end
78
+
79
+ #
80
+ # Builds the data system configuration.
81
+ #
82
+ # @return [DataSystemConfig]
83
+ # @raise [ArgumentError] if configuration is invalid
84
+ #
85
+ def build
86
+ if @secondary_synchronizer && @primary_synchronizer.nil?
87
+ raise ArgumentError, "Primary synchronizer must be set if secondary is set"
88
+ end
89
+
90
+ DataSystemConfig.new(
91
+ initializers: @initializers,
92
+ primary_synchronizer: @primary_synchronizer,
93
+ secondary_synchronizer: @secondary_synchronizer,
94
+ data_store_mode: @data_store_mode,
95
+ data_store: @data_store,
96
+ fdv1_fallback_synchronizer: @fdv1_fallback_synchronizer
97
+ )
98
+ end
99
+ end
100
+
101
+ #
102
+ # Returns a builder proc for creating a polling data source.
103
+ # This is a building block that can be used with {ConfigBuilder#initializers}
104
+ # or {ConfigBuilder#synchronizers} to create custom data system configurations.
105
+ #
106
+ # @return [Proc] A proc that takes (sdk_key, config) and returns a polling data source
107
+ #
108
+ def self.polling_ds_builder
109
+ lambda do |sdk_key, config|
110
+ LaunchDarkly::Impl::DataSystem::PollingDataSourceBuilder.new(sdk_key, config).build
111
+ end
112
+ end
113
+
114
+ #
115
+ # Returns a builder proc for creating an FDv1 fallback polling data source.
116
+ # This is a building block that can be used with {ConfigBuilder#fdv1_compatible_synchronizer}
117
+ # to provide FDv1 compatibility in custom data system configurations.
118
+ #
119
+ # @return [Proc] A proc that takes (sdk_key, config) and returns an FDv1 polling data source
120
+ #
121
+ def self.fdv1_fallback_ds_builder
122
+ lambda do |sdk_key, config|
123
+ LaunchDarkly::Impl::DataSystem::FDv1PollingDataSourceBuilder.new(sdk_key, config).build
124
+ end
125
+ end
126
+
127
+ #
128
+ # Returns a builder proc for creating a streaming data source.
129
+ # This is a building block that can be used with {ConfigBuilder#synchronizers}
130
+ # to create custom data system configurations.
131
+ #
132
+ # @return [Proc] A proc that takes (sdk_key, config) and returns a streaming data source
133
+ #
134
+ def self.streaming_ds_builder
135
+ # TODO(fdv2): Implement streaming data source builder
136
+ lambda do |_sdk_key, _config|
137
+ raise NotImplementedError, "Streaming data source not yet implemented for FDv2"
138
+ end
139
+ end
140
+
141
+ #
142
+ # Default is LaunchDarkly's recommended flag data acquisition strategy.
143
+ #
144
+ # Currently, it operates a two-phase method for obtaining data: first, it
145
+ # requests data from LaunchDarkly's global CDN. Then, it initiates a
146
+ # streaming connection to LaunchDarkly's Flag Delivery services to
147
+ # receive real-time updates.
148
+ #
149
+ # If the streaming connection is interrupted for an extended period of
150
+ # time, the SDK will automatically fall back to polling the global CDN
151
+ # for updates.
152
+ #
153
+ # @return [ConfigBuilder]
154
+ #
155
+ def self.default
156
+ polling_builder = polling_ds_builder
157
+ streaming_builder = streaming_ds_builder
158
+ fallback = fdv1_fallback_ds_builder
159
+
160
+ builder = ConfigBuilder.new
161
+ builder.initializers([polling_builder])
162
+ builder.synchronizers(streaming_builder, polling_builder)
163
+ builder.fdv1_compatible_synchronizer(fallback)
164
+
165
+ builder
166
+ end
167
+
168
+ #
169
+ # Streaming configures the SDK to efficiently stream flag/segment data
170
+ # in the background, allowing evaluations to operate on the latest data
171
+ # with no additional latency.
172
+ #
173
+ # @return [ConfigBuilder]
174
+ #
175
+ def self.streaming
176
+ streaming_builder = streaming_ds_builder
177
+ fallback = fdv1_fallback_ds_builder
178
+
179
+ builder = ConfigBuilder.new
180
+ builder.synchronizers(streaming_builder)
181
+ builder.fdv1_compatible_synchronizer(fallback)
182
+
183
+ builder
184
+ end
185
+
186
+ #
187
+ # Polling configures the SDK to regularly poll an endpoint for
188
+ # flag/segment data in the background. This is less efficient than
189
+ # streaming, but may be necessary in some network environments.
190
+ #
191
+ # @return [ConfigBuilder]
192
+ #
193
+ def self.polling
194
+ polling_builder = polling_ds_builder
195
+ fallback = fdv1_fallback_ds_builder
196
+
197
+ builder = ConfigBuilder.new
198
+ builder.synchronizers(polling_builder)
199
+ builder.fdv1_compatible_synchronizer(fallback)
200
+
201
+ builder
202
+ end
203
+
204
+ #
205
+ # Custom returns a builder suitable for creating a custom data
206
+ # acquisition strategy. You may configure how the SDK uses a Persistent
207
+ # Store, how the SDK obtains an initial set of data, and how the SDK
208
+ # keeps data up-to-date.
209
+ #
210
+ # @return [ConfigBuilder]
211
+ #
212
+ def self.custom
213
+ ConfigBuilder.new
214
+ end
215
+
216
+ #
217
+ # Daemon configures the SDK to read from a persistent store integration
218
+ # that is populated by Relay Proxy or other SDKs. The SDK will not connect
219
+ # to LaunchDarkly. In this mode, the SDK never writes to the data store.
220
+ #
221
+ # @param store [Object] The persistent store
222
+ # @return [ConfigBuilder]
223
+ #
224
+ def self.daemon(store)
225
+ custom.data_store(store, LaunchDarkly::Interfaces::DataSystem::DataStoreMode::READ_ONLY)
226
+ end
227
+
228
+ #
229
+ # PersistentStore is similar to default, with the addition of a persistent
230
+ # store integration. Before data has arrived from LaunchDarkly, the SDK is
231
+ # able to evaluate flags using data from the persistent store. Once fresh
232
+ # data is available, the SDK will no longer read from the persistent store,
233
+ # although it will keep it up-to-date.
234
+ #
235
+ # @param store [Object] The persistent store
236
+ # @return [ConfigBuilder]
237
+ #
238
+ def self.persistent_store(store)
239
+ default.data_store(store, LaunchDarkly::Interfaces::DataSystem::DataStoreMode::READ_WRITE)
240
+ end
241
+ end
242
+ end
243
+
@@ -3,6 +3,8 @@ require "ldclient-rb/impl/diagnostic_events"
3
3
  require "ldclient-rb/impl/event_sender"
4
4
  require "ldclient-rb/impl/event_summarizer"
5
5
  require "ldclient-rb/impl/event_types"
6
+ require "ldclient-rb/impl/non_blocking_thread_pool"
7
+ require "ldclient-rb/impl/simple_lru_cache"
6
8
  require "ldclient-rb/impl/util"
7
9
 
8
10
  require "concurrent"
@@ -60,6 +62,19 @@ module LaunchDarkly
60
62
  def record_migration_op_event(event)
61
63
  end
62
64
 
65
+ #
66
+ # Tells the event processor that all pending analytics events should be delivered as soon as possible.
67
+ #
68
+ # When the LaunchDarkly client generates analytics events (from {LaunchDarkly::LDClient#variation},
69
+ # {LaunchDarkly::LDClient#variation_detail}, {LaunchDarkly::LDClient#identify}, or
70
+ # {LaunchDarkly::LDClient#track}), they are queued on a worker thread. The event thread normally
71
+ # sends all queued events to LaunchDarkly at regular intervals, controlled by the
72
+ # {LaunchDarkly::Config#flush_interval} option. Calling `flush` triggers a send without waiting
73
+ # for the next interval.
74
+ #
75
+ # Flushing is asynchronous, so this method will return before it is complete. However, if you
76
+ # call {LaunchDarkly::LDClient#close}, events are guaranteed to be sent before that method returns.
77
+ #
63
78
  def flush
64
79
  end
65
80
 
@@ -70,24 +85,24 @@ module LaunchDarkly
70
85
  MAX_FLUSH_WORKERS = 5
71
86
  private_constant :MAX_FLUSH_WORKERS
72
87
 
73
- # @private
88
+ # @api private
74
89
  class NullEventProcessor
75
90
  include EventProcessorMethods
76
91
  end
77
92
 
78
- # @private
93
+ # @api private
79
94
  class FlushMessage
80
95
  end
81
96
 
82
- # @private
97
+ # @api private
83
98
  class FlushContextsMessage
84
99
  end
85
100
 
86
- # @private
101
+ # @api private
87
102
  class DiagnosticEventMessage
88
103
  end
89
104
 
90
- # @private
105
+ # @api private
91
106
  class SynchronousMessage
92
107
  def initialize
93
108
  @reply = Concurrent::Semaphore.new(0)
@@ -102,15 +117,15 @@ module LaunchDarkly
102
117
  end
103
118
  end
104
119
 
105
- # @private
120
+ # @api private
106
121
  class TestSyncMessage < SynchronousMessage
107
122
  end
108
123
 
109
- # @private
124
+ # @api private
110
125
  class StopMessage < SynchronousMessage
111
126
  end
112
127
 
113
- # @private
128
+ # @api private
114
129
  class EventProcessor
115
130
  include EventProcessorMethods
116
131
 
@@ -141,7 +156,7 @@ module LaunchDarkly
141
156
  @inbox_full = Concurrent::AtomicBoolean.new(false)
142
157
 
143
158
  event_sender = (test_properties || {})[:event_sender] ||
144
- Impl::EventSender.new(sdk_key, config, client || Util.new_http_client(config.events_uri, config))
159
+ Impl::EventSender.new(sdk_key, config, client || Impl::Util.new_http_client(config.events_uri, config))
145
160
 
146
161
  @timestamp_fn = (test_properties || {})[:timestamp_fn] || proc { Impl::Util.current_time_millis }
147
162
  @omit_anonymous_contexts = config.omit_anonymous_contexts
@@ -226,7 +241,7 @@ module LaunchDarkly
226
241
  end
227
242
  end
228
243
 
229
- # @private
244
+ # @api private
230
245
  class EventDispatcher
231
246
  def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender)
232
247
  @sdk_key = sdk_key
@@ -235,7 +250,7 @@ module LaunchDarkly
235
250
  @event_sender = event_sender
236
251
  @sampler = LaunchDarkly::Impl::Sampler.new(Random.new)
237
252
 
238
- @context_keys = SimpleLRUCacheSet.new(config.context_keys_capacity)
253
+ @context_keys = Impl::SimpleLRUCacheSet.new(config.context_keys_capacity)
239
254
  @formatter = EventOutputFormatter.new(config)
240
255
  @disabled = Concurrent::AtomicBoolean.new(false)
241
256
  @last_known_past_time = Concurrent::AtomicReference.new(0)
@@ -243,10 +258,10 @@ module LaunchDarkly
243
258
  @events_in_last_batch = 0
244
259
 
245
260
  outbox = EventBuffer.new(config.capacity, config.logger)
246
- flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS, 'LD/EventDispatcher/FlushWorkers')
261
+ flush_workers = Impl::NonBlockingThreadPool.new(MAX_FLUSH_WORKERS, 'LD/EventDispatcher/FlushWorkers')
247
262
 
248
263
  if !@diagnostic_accumulator.nil?
249
- diagnostic_event_workers = NonBlockingThreadPool.new(1, 'LD/EventDispatcher/DiagnosticEventWorkers')
264
+ diagnostic_event_workers = Impl::NonBlockingThreadPool.new(1, 'LD/EventDispatcher/DiagnosticEventWorkers')
250
265
  init_event = @diagnostic_accumulator.create_init_event(config)
251
266
  send_diagnostic_event(init_event, diagnostic_event_workers)
252
267
  else
@@ -281,7 +296,7 @@ module LaunchDarkly
281
296
  dispatch_event(message, outbox)
282
297
  end
283
298
  rescue => e
284
- Util.log_exception(@config.logger, "Unexpected error in event processor", e)
299
+ Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e)
285
300
  end
286
301
  end
287
302
  end
@@ -383,7 +398,7 @@ module LaunchDarkly
383
398
  @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i
384
399
  end
385
400
  rescue => e
386
- Util.log_exception(@config.logger, "Unexpected error in event processor", e)
401
+ Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e)
387
402
  end
388
403
  end
389
404
  outbox.clear if success # Reset our internal state, these events now belong to the flush worker
@@ -408,16 +423,16 @@ module LaunchDarkly
408
423
  begin
409
424
  @event_sender.send_event_data(event.to_json, "diagnostic event", true)
410
425
  rescue => e
411
- Util.log_exception(@config.logger, "Unexpected error in event processor", e)
426
+ Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e)
412
427
  end
413
428
  end
414
429
  end
415
430
  end
416
431
 
417
- # @private
432
+ # @api private
418
433
  FlushPayload = Struct.new(:events, :summary)
419
434
 
420
- # @private
435
+ # @api private
421
436
  class EventBuffer
422
437
  def initialize(capacity, logger)
423
438
  @capacity = capacity
@@ -461,7 +476,7 @@ module LaunchDarkly
461
476
  end
462
477
  end
463
478
 
464
- # @private
479
+ # @api private
465
480
  class EventOutputFormatter
466
481
  FEATURE_KIND = 'feature'
467
482
  IDENTIFY_KIND = 'identify'
@@ -15,7 +15,7 @@ module LaunchDarkly
15
15
  end
16
16
 
17
17
  # Used internally to build the state map.
18
- # @private
18
+ # @api private
19
19
  def add_flag(flag_state, with_reasons, details_only_if_tracked)
20
20
  key = flag_state[:key]
21
21
  @flag_values[key] = flag_state[:value]
@@ -1,8 +1,8 @@
1
1
  require "ldclient-rb/config"
2
- require "ldclient-rb/expiring_cache"
2
+ require "ldclient-rb/impl/expiring_cache"
3
3
  require "ldclient-rb/impl/repeating_task"
4
+ require "ldclient-rb/impl/util"
4
5
  require "ldclient-rb/interfaces"
5
- require "ldclient-rb/util"
6
6
 
7
7
  require "digest"
8
8
 
@@ -45,7 +45,7 @@ module LaunchDarkly
45
45
  membership = EMPTY_MEMBERSHIP if membership.nil?
46
46
  @cache[context_key] = membership
47
47
  rescue => e
48
- LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e)
48
+ Impl::Util.log_exception(@logger, "Big Segment store membership query returned error", e)
49
49
  return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR)
50
50
  end
51
51
  end
@@ -67,7 +67,7 @@ module LaunchDarkly
67
67
  metadata = @store.get_metadata
68
68
  new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || stale?(metadata.last_up_to_date))
69
69
  rescue => e
70
- LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e)
70
+ Impl::Util.log_exception(@logger, "Big Segment store status query returned error", e)
71
71
  end
72
72
  end
73
73
  @last_status = new_status
@@ -0,0 +1,44 @@
1
+ require "concurrent/map"
2
+
3
+ module LaunchDarkly
4
+ module Impl
5
+ # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we
6
+ # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment.
7
+ class ThreadSafeMemoryStore
8
+ #
9
+ # Default constructor
10
+ #
11
+ # @return [ThreadSafeMemoryStore] a new store
12
+ def initialize
13
+ @cache = Concurrent::Map.new
14
+ end
15
+
16
+ #
17
+ # Read a value from the cache
18
+ # @param key [Object] the cache key
19
+ #
20
+ # @return [Object] the cache value
21
+ def read(key)
22
+ @cache[key]
23
+ end
24
+
25
+ #
26
+ # Store a value in the cache
27
+ # @param key [Object] the cache key
28
+ # @param value [Object] the value to associate with the key
29
+ #
30
+ # @return [Object] the value
31
+ def write(key, value)
32
+ @cache[key] = value
33
+ end
34
+
35
+ #
36
+ # Delete a value in the cache
37
+ # @param key [Object] the cache key
38
+ def delete(key)
39
+ @cache.delete(key)
40
+ end
41
+ end
42
+ end
43
+ end
44
+