launchdarkly-server-sdk 6.2.5 → 7.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -2
  3. data/lib/ldclient-rb/config.rb +203 -43
  4. data/lib/ldclient-rb/context.rb +487 -0
  5. data/lib/ldclient-rb/evaluation_detail.rb +85 -26
  6. data/lib/ldclient-rb/events.rb +185 -146
  7. data/lib/ldclient-rb/flags_state.rb +25 -14
  8. data/lib/ldclient-rb/impl/big_segments.rb +117 -0
  9. data/lib/ldclient-rb/impl/context.rb +96 -0
  10. data/lib/ldclient-rb/impl/context_filter.rb +145 -0
  11. data/lib/ldclient-rb/impl/diagnostic_events.rb +9 -10
  12. data/lib/ldclient-rb/impl/evaluator.rb +428 -132
  13. data/lib/ldclient-rb/impl/evaluator_bucketing.rb +40 -41
  14. data/lib/ldclient-rb/impl/evaluator_helpers.rb +50 -0
  15. data/lib/ldclient-rb/impl/evaluator_operators.rb +26 -55
  16. data/lib/ldclient-rb/impl/event_sender.rb +6 -6
  17. data/lib/ldclient-rb/impl/event_summarizer.rb +68 -0
  18. data/lib/ldclient-rb/impl/event_types.rb +78 -0
  19. data/lib/ldclient-rb/impl/integrations/consul_impl.rb +7 -7
  20. data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +92 -28
  21. data/lib/ldclient-rb/impl/integrations/file_data_source.rb +212 -0
  22. data/lib/ldclient-rb/impl/integrations/redis_impl.rb +165 -32
  23. data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
  24. data/lib/ldclient-rb/impl/model/clause.rb +39 -0
  25. data/lib/ldclient-rb/impl/model/feature_flag.rb +213 -0
  26. data/lib/ldclient-rb/impl/model/preprocessed_data.rb +64 -0
  27. data/lib/ldclient-rb/impl/model/segment.rb +126 -0
  28. data/lib/ldclient-rb/impl/model/serialization.rb +54 -44
  29. data/lib/ldclient-rb/impl/repeating_task.rb +47 -0
  30. data/lib/ldclient-rb/impl/store_data_set_sorter.rb +2 -2
  31. data/lib/ldclient-rb/impl/unbounded_pool.rb +1 -1
  32. data/lib/ldclient-rb/impl/util.rb +62 -1
  33. data/lib/ldclient-rb/in_memory_store.rb +2 -2
  34. data/lib/ldclient-rb/integrations/consul.rb +9 -2
  35. data/lib/ldclient-rb/integrations/dynamodb.rb +47 -2
  36. data/lib/ldclient-rb/integrations/file_data.rb +108 -0
  37. data/lib/ldclient-rb/integrations/redis.rb +43 -3
  38. data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +594 -0
  39. data/lib/ldclient-rb/integrations/test_data.rb +213 -0
  40. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +14 -9
  41. data/lib/ldclient-rb/integrations.rb +2 -51
  42. data/lib/ldclient-rb/interfaces.rb +151 -1
  43. data/lib/ldclient-rb/ldclient.rb +175 -133
  44. data/lib/ldclient-rb/memoized_value.rb +1 -1
  45. data/lib/ldclient-rb/non_blocking_thread_pool.rb +1 -1
  46. data/lib/ldclient-rb/polling.rb +22 -41
  47. data/lib/ldclient-rb/reference.rb +274 -0
  48. data/lib/ldclient-rb/requestor.rb +7 -7
  49. data/lib/ldclient-rb/stream.rb +9 -9
  50. data/lib/ldclient-rb/util.rb +11 -17
  51. data/lib/ldclient-rb/version.rb +1 -1
  52. data/lib/ldclient-rb.rb +2 -4
  53. metadata +49 -23
  54. data/lib/ldclient-rb/event_summarizer.rb +0 -55
  55. data/lib/ldclient-rb/file_data_source.rb +0 -314
  56. data/lib/ldclient-rb/impl/event_factory.rb +0 -126
  57. data/lib/ldclient-rb/newrelic.rb +0 -17
  58. data/lib/ldclient-rb/redis_store.rb +0 -88
  59. data/lib/ldclient-rb/user_filter.rb +0 -52
@@ -4,10 +4,7 @@ module LaunchDarkly
4
4
  module Impl
5
5
  module Integrations
6
6
  module DynamoDB
7
- #
8
- # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
9
- #
10
- class DynamoDBFeatureStoreCore
7
+ class DynamoDBStoreImplBase
11
8
  begin
12
9
  require "aws-sdk-dynamodb"
13
10
  AWS_SDK_ENABLED = true
@@ -23,16 +20,13 @@ module LaunchDarkly
23
20
  PARTITION_KEY = "namespace"
24
21
  SORT_KEY = "key"
25
22
 
26
- VERSION_ATTRIBUTE = "version"
27
- ITEM_JSON_ATTRIBUTE = "item"
28
-
29
23
  def initialize(table_name, opts)
30
- if !AWS_SDK_ENABLED
31
- raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem")
24
+ unless AWS_SDK_ENABLED
25
+ raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem")
32
26
  end
33
27
 
34
28
  @table_name = table_name
35
- @prefix = opts[:prefix]
29
+ @prefix = opts[:prefix] ? (opts[:prefix] + ":") : ""
36
30
  @logger = opts[:logger] || Config.default_logger
37
31
 
38
32
  if !opts[:existing_client].nil?
@@ -41,7 +35,31 @@ module LaunchDarkly
41
35
  @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {})
42
36
  end
43
37
 
44
- @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"")
38
+ @logger.info("#{description}: using DynamoDB table \"#{table_name}\"")
39
+ end
40
+
41
+ def stop
42
+ # AWS client doesn't seem to have a close method
43
+ end
44
+
45
+ protected def description
46
+ "DynamoDB"
47
+ end
48
+ end
49
+
50
+ #
51
+ # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
52
+ #
53
+ class DynamoDBFeatureStoreCore < DynamoDBStoreImplBase
54
+ VERSION_ATTRIBUTE = "version"
55
+ ITEM_JSON_ATTRIBUTE = "item"
56
+
57
+ def initialize(table_name, opts)
58
+ super(table_name, opts)
59
+ end
60
+
61
+ def description
62
+ "DynamoDBFeatureStore"
45
63
  end
46
64
 
47
65
  def init_internal(all_data)
@@ -65,7 +83,7 @@ module LaunchDarkly
65
83
  del_item = make_keys_hash(tuple[0], tuple[1])
66
84
  requests.push({ delete_request: { key: del_item } })
67
85
  end
68
-
86
+
69
87
  # Now set the special key that we check in initialized_internal?
70
88
  inited_item = make_keys_hash(inited_key, inited_key)
71
89
  requests.push({ put_request: { item: inited_item } })
@@ -105,11 +123,11 @@ module LaunchDarkly
105
123
  expression_attribute_names: {
106
124
  "#namespace" => PARTITION_KEY,
107
125
  "#key" => SORT_KEY,
108
- "#version" => VERSION_ATTRIBUTE
126
+ "#version" => VERSION_ATTRIBUTE,
109
127
  },
110
128
  expression_attribute_values: {
111
- ":version" => new_item[:version]
112
- }
129
+ ":version" => new_item[:version],
130
+ },
113
131
  })
114
132
  new_item
115
133
  rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException
@@ -124,14 +142,10 @@ module LaunchDarkly
124
142
  !resp.item.nil? && resp.item.length > 0
125
143
  end
126
144
 
127
- def stop
128
- # AWS client doesn't seem to have a close method
129
- end
130
-
131
145
  private
132
146
 
133
147
  def prefixed_namespace(base_str)
134
- (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}"
148
+ @prefix + base_str
135
149
  end
136
150
 
137
151
  def namespace_for_kind(kind)
@@ -145,7 +159,7 @@ module LaunchDarkly
145
159
  def make_keys_hash(namespace, key)
146
160
  {
147
161
  PARTITION_KEY => namespace,
148
- SORT_KEY => key
162
+ SORT_KEY => key,
149
163
  }
150
164
  end
151
165
 
@@ -156,16 +170,16 @@ module LaunchDarkly
156
170
  key_conditions: {
157
171
  PARTITION_KEY => {
158
172
  comparison_operator: "EQ",
159
- attribute_value_list: [ namespace_for_kind(kind) ]
160
- }
161
- }
173
+ attribute_value_list: [ namespace_for_kind(kind) ],
174
+ },
175
+ },
162
176
  }
163
177
  end
164
178
 
165
179
  def get_item_by_keys(namespace, key)
166
180
  @client.get_item({
167
181
  table_name: @table_name,
168
- key: make_keys_hash(namespace, key)
182
+ key: make_keys_hash(namespace, key),
169
183
  })
170
184
  end
171
185
 
@@ -176,8 +190,8 @@ module LaunchDarkly
176
190
  projection_expression: "#namespace, #key",
177
191
  expression_attribute_names: {
178
192
  "#namespace" => PARTITION_KEY,
179
- "#key" => SORT_KEY
180
- }
193
+ "#key" => SORT_KEY,
194
+ },
181
195
  })
182
196
  while true
183
197
  resp = @client.query(req)
@@ -196,7 +210,7 @@ module LaunchDarkly
196
210
  def marshal_item(kind, item)
197
211
  make_keys_hash(namespace_for_kind(kind), item[:key]).merge({
198
212
  VERSION_ATTRIBUTE => item[:version],
199
- ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item)
213
+ ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item),
200
214
  })
201
215
  end
202
216
 
@@ -208,6 +222,56 @@ module LaunchDarkly
208
222
  end
209
223
  end
210
224
 
225
+ class DynamoDBBigSegmentStore < DynamoDBStoreImplBase
226
+ KEY_METADATA = 'big_segments_metadata'
227
+ KEY_CONTEXT_DATA = 'big_segments_user'
228
+ ATTR_SYNC_TIME = 'synchronizedOn'
229
+ ATTR_INCLUDED = 'included'
230
+ ATTR_EXCLUDED = 'excluded'
231
+
232
+ def initialize(table_name, opts)
233
+ super(table_name, opts)
234
+ end
235
+
236
+ def description
237
+ "DynamoDBBigSegmentStore"
238
+ end
239
+
240
+ def get_metadata
241
+ key = @prefix + KEY_METADATA
242
+ data = @client.get_item(
243
+ table_name: @table_name,
244
+ key: {
245
+ PARTITION_KEY => key,
246
+ SORT_KEY => key,
247
+ }
248
+ )
249
+ timestamp = data.item && data.item[ATTR_SYNC_TIME] ?
250
+ data.item[ATTR_SYNC_TIME] : nil
251
+ LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp)
252
+ end
253
+
254
+ def get_membership(context_hash)
255
+ data = @client.get_item(
256
+ table_name: @table_name,
257
+ key: {
258
+ PARTITION_KEY => @prefix + KEY_CONTEXT_DATA,
259
+ SORT_KEY => context_hash,
260
+ })
261
+ return nil unless data.item
262
+ excluded_refs = data.item[ATTR_EXCLUDED] || []
263
+ included_refs = data.item[ATTR_INCLUDED] || []
264
+ if excluded_refs.empty? && included_refs.empty?
265
+ nil
266
+ else
267
+ membership = {}
268
+ excluded_refs.each { |ref| membership[ref] = false }
269
+ included_refs.each { |ref| membership[ref] = true }
270
+ membership
271
+ end
272
+ end
273
+ end
274
+
211
275
  class DynamoDBUtil
212
276
  #
213
277
  # Calls client.batch_write_item as many times as necessary to submit all of the given requests.
@@ -0,0 +1,212 @@
1
+ require 'ldclient-rb/in_memory_store'
2
+ require 'ldclient-rb/util'
3
+
4
+ require 'concurrent/atomics'
5
+ require 'json'
6
+ require 'yaml'
7
+ require 'pathname'
8
+
9
+ module LaunchDarkly
10
+ module Impl
11
+ module Integrations
12
+ class FileDataSourceImpl
13
+ # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the
14
+ # file data source or who don't need auto-updating, we only enable auto-update if the 'listen'
15
+ # gem has been provided by the host app.
16
+ @@have_listen = false
17
+ begin
18
+ require 'listen'
19
+ @@have_listen = true
20
+ rescue LoadError
21
+ end
22
+
23
+ def initialize(feature_store, logger, options={})
24
+ @feature_store = feature_store
25
+ @logger = logger
26
+ @paths = options[:paths] || []
27
+ if @paths.is_a? String
28
+ @paths = [ @paths ]
29
+ end
30
+ @auto_update = options[:auto_update]
31
+ if @auto_update && @@have_listen && !options[:force_polling] # force_polling is used only for tests
32
+ # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449).
33
+ # Therefore, on that platform we'll fall back to file polling instead.
34
+ if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.")
35
+ @use_listen = false
36
+ else
37
+ @use_listen = true
38
+ end
39
+ end
40
+ @poll_interval = options[:poll_interval] || 1
41
+ @initialized = Concurrent::AtomicBoolean.new(false)
42
+ @ready = Concurrent::Event.new
43
+ end
44
+
45
+ def initialized?
46
+ @initialized.value
47
+ end
48
+
49
+ def start
50
+ ready = Concurrent::Event.new
51
+
52
+ # We will return immediately regardless of whether the file load succeeded or failed -
53
+ # the difference can be detected by checking "initialized?"
54
+ ready.set
55
+
56
+ load_all
57
+
58
+ if @auto_update
59
+ # If we're going to watch files, then the start event will be set the first time we get
60
+ # a successful load.
61
+ @listener = start_listener
62
+ end
63
+
64
+ ready
65
+ end
66
+
67
+ def stop
68
+ @listener.stop unless @listener.nil?
69
+ end
70
+
71
+ private
72
+
73
+ def load_all
74
+ all_data = {
75
+ FEATURES => {},
76
+ SEGMENTS => {},
77
+ }
78
+ @paths.each do |path|
79
+ begin
80
+ load_file(path, all_data)
81
+ rescue => exn
82
+ LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn)
83
+ return
84
+ end
85
+ end
86
+ @feature_store.init(all_data)
87
+ @initialized.make_true
88
+ end
89
+
90
+ def load_file(path, all_data)
91
+ parsed = parse_content(IO.read(path))
92
+ (parsed[:flags] || {}).each do |key, flag|
93
+ add_item(all_data, FEATURES, flag)
94
+ end
95
+ (parsed[:flagValues] || {}).each do |key, value|
96
+ add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value))
97
+ end
98
+ (parsed[:segments] || {}).each do |key, segment|
99
+ add_item(all_data, SEGMENTS, segment)
100
+ end
101
+ end
102
+
103
+ def parse_content(content)
104
+ # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while
105
+ # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least
106
+ # for all the samples of actual flag data that we've tested).
107
+ symbolize_all_keys(YAML.safe_load(content))
108
+ end
109
+
110
+ def symbolize_all_keys(value)
111
+ # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and
112
+ # the SDK expects all objects to be formatted that way.
113
+ if value.is_a?(Hash)
114
+ value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h
115
+ elsif value.is_a?(Array)
116
+ value.map{ |v| symbolize_all_keys(v) }
117
+ else
118
+ value
119
+ end
120
+ end
121
+
122
+ def add_item(all_data, kind, item)
123
+ items = all_data[kind]
124
+ raise ArgumentError, "Received unknown item kind #{kind[:namespace]} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
125
+ key = item[:key].to_sym
126
+ unless items[key].nil?
127
+ raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
128
+ end
129
+ items[key] = Model.deserialize(kind, item)
130
+ end
131
+
132
+ def make_flag_with_value(key, value)
133
+ {
134
+ key: key,
135
+ on: true,
136
+ fallthrough: { variation: 0 },
137
+ variations: [ value ],
138
+ }
139
+ end
140
+
141
+ def start_listener
142
+ resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s }
143
+ if @use_listen
144
+ start_listener_with_listen_gem(resolved_paths)
145
+ else
146
+ FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger)
147
+ end
148
+ end
149
+
150
+ def start_listener_with_listen_gem(resolved_paths)
151
+ path_set = resolved_paths.to_set
152
+ dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq
153
+ opts = { latency: @poll_interval }
154
+ l = Listen.to(*dir_paths, opts) do |modified, added, removed|
155
+ paths = modified + added + removed
156
+ if paths.any? { |p| path_set.include?(p) }
157
+ load_all
158
+ end
159
+ end
160
+ l.start
161
+ l
162
+ end
163
+
164
+ #
165
+ # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available.
166
+ #
167
+ class FileDataSourcePoller
168
+ def initialize(resolved_paths, interval, reloader, logger)
169
+ @stopped = Concurrent::AtomicBoolean.new(false)
170
+ get_file_times = Proc.new do
171
+ ret = {}
172
+ resolved_paths.each do |path|
173
+ begin
174
+ ret[path] = File.mtime(path)
175
+ rescue Errno::ENOENT
176
+ ret[path] = nil
177
+ end
178
+ end
179
+ ret
180
+ end
181
+ last_times = get_file_times.call
182
+ @thread = Thread.new do
183
+ while true
184
+ sleep interval
185
+ break if @stopped.value
186
+ begin
187
+ new_times = get_file_times.call
188
+ changed = false
189
+ last_times.each do |path, old_time|
190
+ new_time = new_times[path]
191
+ if !new_time.nil? && new_time != old_time
192
+ changed = true
193
+ break
194
+ end
195
+ end
196
+ reloader.call if changed
197
+ rescue => exn
198
+ LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn)
199
+ end
200
+ end
201
+ end
202
+ end
203
+
204
+ def stop
205
+ @stopped.make_true
206
+ @thread.run # wakes it up if it's sleeping
207
+ end
208
+ end
209
+ end
210
+ end
211
+ end
212
+ end
@@ -6,9 +6,87 @@ module LaunchDarkly
6
6
  module Integrations
7
7
  module Redis
8
8
  #
9
- # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
9
+ # An implementation of the LaunchDarkly client's feature store that uses a Redis
10
+ # instance. This object holds feature flags and related data received from the
11
+ # streaming API. Feature data can also be further cached in memory to reduce overhead
12
+ # of calls to Redis.
13
+ #
14
+ # To use this class, you must first have the `redis` and `connection-pool` gems
15
+ # installed. Then, create an instance and store it in the `feature_store` property
16
+ # of your client configuration.
10
17
  #
11
- class RedisFeatureStoreCore
18
+ class RedisFeatureStore
19
+ include LaunchDarkly::Interfaces::FeatureStore
20
+
21
+ # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating
22
+ # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical
23
+ # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate
24
+ # away from exposing these concrete classes and use factory methods instead.
25
+
26
+ #
27
+ # Constructor for a RedisFeatureStore instance.
28
+ #
29
+ # @param opts [Hash] the configuration options
30
+ # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts)
31
+ # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url)
32
+ # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly
33
+ # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
34
+ # @option opts [Integer] :max_connections size of the Redis connection pool
35
+ # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching
36
+ # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
37
+ # @option opts [Object] :pool custom connection pool, if desired
38
+ # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool.
39
+ #
40
+ def initialize(opts = {})
41
+ core = RedisFeatureStoreCore.new(opts)
42
+ @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
43
+ end
44
+
45
+ #
46
+ # Default value for the `redis_url` constructor parameter; points to an instance of Redis
47
+ # running at `localhost` with its default port.
48
+ #
49
+ def self.default_redis_url
50
+ LaunchDarkly::Integrations::Redis::default_redis_url
51
+ end
52
+
53
+ #
54
+ # Default value for the `prefix` constructor parameter.
55
+ #
56
+ def self.default_prefix
57
+ LaunchDarkly::Integrations::Redis::default_prefix
58
+ end
59
+
60
+ def get(kind, key)
61
+ @wrapper.get(kind, key)
62
+ end
63
+
64
+ def all(kind)
65
+ @wrapper.all(kind)
66
+ end
67
+
68
+ def delete(kind, key, version)
69
+ @wrapper.delete(kind, key, version)
70
+ end
71
+
72
+ def init(all_data)
73
+ @wrapper.init(all_data)
74
+ end
75
+
76
+ def upsert(kind, item)
77
+ @wrapper.upsert(kind, item)
78
+ end
79
+
80
+ def initialized?
81
+ @wrapper.initialized?
82
+ end
83
+
84
+ def stop
85
+ @wrapper.stop
86
+ end
87
+ end
88
+
89
+ class RedisStoreImplBase
12
90
  begin
13
91
  require "redis"
14
92
  require "connection_pool"
@@ -18,35 +96,68 @@ module LaunchDarkly
18
96
  end
19
97
 
20
98
  def initialize(opts)
21
- if !REDIS_ENABLED
22
- raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool")
99
+ unless REDIS_ENABLED
100
+ raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool")
23
101
  end
24
102
 
25
- @redis_opts = opts[:redis_opts] || Hash.new
26
- if opts[:redis_url]
27
- @redis_opts[:url] = opts[:redis_url]
28
- end
29
- if !@redis_opts.include?(:url)
30
- @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
31
- end
32
- max_connections = opts[:max_connections] || 16
33
- @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do
34
- ::Redis.new(@redis_opts)
35
- end
103
+ @pool = create_redis_pool(opts)
104
+
36
105
  # shutdown pool on close unless the client passed a custom pool and specified not to shutdown
37
106
  @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true))
107
+
38
108
  @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix
39
109
  @logger = opts[:logger] || Config.default_logger
40
110
  @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
41
111
 
42
- @stopped = Concurrent::AtomicBoolean.new(false)
112
+ @stopped = Concurrent::AtomicBoolean.new()
43
113
 
44
114
  with_connection do |redis|
45
- @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \
46
- and prefix: #{@prefix}")
115
+ @logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}")
47
116
  end
48
117
  end
49
118
 
119
+ def stop
120
+ if @stopped.make_true
121
+ return unless @pool_shutdown_on_close
122
+ @pool.shutdown { |redis| redis.close }
123
+ end
124
+ end
125
+
126
+ protected def description
127
+ "Redis"
128
+ end
129
+
130
+ protected def with_connection
131
+ @pool.with { |redis| yield(redis) }
132
+ end
133
+
134
+ private def create_redis_pool(opts)
135
+ redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new
136
+ if opts[:redis_url]
137
+ redis_opts[:url] = opts[:redis_url]
138
+ end
139
+ unless redis_opts.include?(:url)
140
+ redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
141
+ end
142
+ max_connections = opts[:max_connections] || 16
143
+ opts[:pool] || ConnectionPool.new(size: max_connections) { ::Redis.new(redis_opts) }
144
+ end
145
+ end
146
+
147
+ #
148
+ # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
149
+ #
150
+ class RedisFeatureStoreCore < RedisStoreImplBase
151
+ def initialize(opts)
152
+ super(opts)
153
+
154
+ @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
155
+ end
156
+
157
+ def description
158
+ "RedisFeatureStore"
159
+ end
160
+
50
161
  def init_internal(all_data)
51
162
  count = 0
52
163
  with_connection do |redis|
@@ -103,8 +214,8 @@ module LaunchDarkly
103
214
  else
104
215
  final_item = old_item
105
216
  action = new_item[:deleted] ? "delete" : "update"
106
- @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \
107
- in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
217
+ # rubocop:disable Layout/LineLength
218
+ @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
108
219
  end
109
220
  redis.unwatch
110
221
  end
@@ -117,17 +228,10 @@ module LaunchDarkly
117
228
  with_connection { |redis| redis.exists?(inited_key) }
118
229
  end
119
230
 
120
- def stop
121
- if @stopped.make_true
122
- return unless @pool_shutdown_on_close
123
- @pool.shutdown { |redis| redis.close }
124
- end
125
- end
126
-
127
231
  private
128
232
 
129
233
  def before_update_transaction(base_key, key)
130
- @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil?
234
+ @test_hook.before_update_transaction(base_key, key) unless @test_hook.nil?
131
235
  end
132
236
 
133
237
  def items_key(kind)
@@ -142,14 +246,43 @@ module LaunchDarkly
142
246
  @prefix + ":$inited"
143
247
  end
144
248
 
145
- def with_connection
146
- @pool.with { |redis| yield(redis) }
147
- end
148
-
149
249
  def get_redis(redis, kind, key)
150
250
  Model.deserialize(kind, redis.hget(items_key(kind), key))
151
251
  end
152
252
  end
253
+
254
+ #
255
+ # Internal implementation of the Redis big segment store.
256
+ #
257
+ class RedisBigSegmentStore < RedisStoreImplBase
258
+ KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on'
259
+ KEY_CONTEXT_INCLUDE = ':big_segment_include:'
260
+ KEY_CONTEXT_EXCLUDE = ':big_segment_exclude:'
261
+
262
+ def description
263
+ "RedisBigSegmentStore"
264
+ end
265
+
266
+ def get_metadata
267
+ value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) }
268
+ Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i)
269
+ end
270
+
271
+ def get_membership(context_hash)
272
+ with_connection do |redis|
273
+ included_refs = redis.smembers(@prefix + KEY_CONTEXT_INCLUDE + context_hash)
274
+ excluded_refs = redis.smembers(@prefix + KEY_CONTEXT_EXCLUDE + context_hash)
275
+ if !included_refs && !excluded_refs
276
+ nil
277
+ else
278
+ membership = {}
279
+ excluded_refs.each { |ref| membership[ref] = false }
280
+ included_refs.each { |ref| membership[ref] = true }
281
+ membership
282
+ end
283
+ end
284
+ end
285
+ end
153
286
  end
154
287
  end
155
288
  end