launchdarkly-server-sdk 6.1.1 → 6.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -5
- data/lib/ldclient-rb/config.rb +118 -4
- data/lib/ldclient-rb/evaluation_detail.rb +104 -14
- data/lib/ldclient-rb/events.rb +201 -107
- data/lib/ldclient-rb/file_data_source.rb +9 -300
- data/lib/ldclient-rb/flags_state.rb +23 -12
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +1 -1
- data/lib/ldclient-rb/impl/evaluator.rb +116 -62
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +22 -9
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +53 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +1 -1
- data/lib/ldclient-rb/impl/event_summarizer.rb +63 -0
- data/lib/ldclient-rb/impl/event_types.rb +90 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +82 -18
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +212 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +84 -31
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +177 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +7 -37
- data/lib/ldclient-rb/impl/repeating_task.rb +47 -0
- data/lib/ldclient-rb/impl/util.rb +62 -1
- data/lib/ldclient-rb/integrations/consul.rb +8 -1
- data/lib/ldclient-rb/integrations/dynamodb.rb +48 -3
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +42 -2
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +438 -0
- data/lib/ldclient-rb/integrations/test_data.rb +209 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +5 -0
- data/lib/ldclient-rb/integrations.rb +2 -51
- data/lib/ldclient-rb/interfaces.rb +152 -2
- data/lib/ldclient-rb/ldclient.rb +131 -33
- data/lib/ldclient-rb/polling.rb +22 -41
- data/lib/ldclient-rb/requestor.rb +3 -3
- data/lib/ldclient-rb/stream.rb +4 -3
- data/lib/ldclient-rb/util.rb +10 -1
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +0 -1
- metadata +35 -132
- data/.circleci/config.yml +0 -40
- data/.github/ISSUE_TEMPLATE/bug_report.md +0 -37
- data/.github/ISSUE_TEMPLATE/config.yml +0 -5
- data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
- data/.github/pull_request_template.md +0 -21
- data/.gitignore +0 -16
- data/.hound.yml +0 -2
- data/.ldrelease/build-docs.sh +0 -18
- data/.ldrelease/circleci/linux/execute.sh +0 -18
- data/.ldrelease/circleci/mac/execute.sh +0 -18
- data/.ldrelease/circleci/template/build.sh +0 -29
- data/.ldrelease/circleci/template/publish.sh +0 -23
- data/.ldrelease/circleci/template/set-gem-home.sh +0 -7
- data/.ldrelease/circleci/template/test.sh +0 -10
- data/.ldrelease/circleci/template/update-version.sh +0 -8
- data/.ldrelease/circleci/windows/execute.ps1 +0 -19
- data/.ldrelease/config.yml +0 -29
- data/.rspec +0 -2
- data/.rubocop.yml +0 -600
- data/.simplecov +0 -4
- data/CHANGELOG.md +0 -351
- data/CODEOWNERS +0 -1
- data/CONTRIBUTING.md +0 -37
- data/Gemfile +0 -3
- data/azure-pipelines.yml +0 -51
- data/docs/Makefile +0 -26
- data/docs/index.md +0 -9
- data/launchdarkly-server-sdk.gemspec +0 -45
- data/lib/ldclient-rb/event_summarizer.rb +0 -55
- data/lib/ldclient-rb/impl/event_factory.rb +0 -120
- data/spec/config_spec.rb +0 -63
- data/spec/diagnostic_events_spec.rb +0 -163
- data/spec/evaluation_detail_spec.rb +0 -135
- data/spec/event_sender_spec.rb +0 -197
- data/spec/event_summarizer_spec.rb +0 -63
- data/spec/events_spec.rb +0 -607
- data/spec/expiring_cache_spec.rb +0 -76
- data/spec/feature_store_spec_base.rb +0 -213
- data/spec/file_data_source_spec.rb +0 -283
- data/spec/fixtures/feature.json +0 -37
- data/spec/fixtures/feature1.json +0 -36
- data/spec/fixtures/user.json +0 -9
- data/spec/flags_state_spec.rb +0 -81
- data/spec/http_util.rb +0 -132
- data/spec/impl/evaluator_bucketing_spec.rb +0 -111
- data/spec/impl/evaluator_clause_spec.rb +0 -55
- data/spec/impl/evaluator_operators_spec.rb +0 -141
- data/spec/impl/evaluator_rule_spec.rb +0 -96
- data/spec/impl/evaluator_segment_spec.rb +0 -125
- data/spec/impl/evaluator_spec.rb +0 -305
- data/spec/impl/evaluator_spec_base.rb +0 -75
- data/spec/impl/model/serialization_spec.rb +0 -41
- data/spec/in_memory_feature_store_spec.rb +0 -12
- data/spec/integrations/consul_feature_store_spec.rb +0 -40
- data/spec/integrations/dynamodb_feature_store_spec.rb +0 -103
- data/spec/integrations/store_wrapper_spec.rb +0 -276
- data/spec/launchdarkly-server-sdk_spec.rb +0 -13
- data/spec/launchdarkly-server-sdk_spec_autoloadtest.rb +0 -9
- data/spec/ldclient_end_to_end_spec.rb +0 -157
- data/spec/ldclient_spec.rb +0 -643
- data/spec/newrelic_spec.rb +0 -5
- data/spec/polling_spec.rb +0 -120
- data/spec/redis_feature_store_spec.rb +0 -121
- data/spec/requestor_spec.rb +0 -196
- data/spec/segment_store_spec_base.rb +0 -95
- data/spec/simple_lru_cache_spec.rb +0 -24
- data/spec/spec_helper.rb +0 -9
- data/spec/store_spec.rb +0 -10
- data/spec/stream_spec.rb +0 -45
- data/spec/user_filter_spec.rb +0 -91
- data/spec/util_spec.rb +0 -17
- data/spec/version_spec.rb +0 -7
@@ -4,10 +4,7 @@ module LaunchDarkly
|
|
4
4
|
module Impl
|
5
5
|
module Integrations
|
6
6
|
module DynamoDB
|
7
|
-
|
8
|
-
# Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
|
9
|
-
#
|
10
|
-
class DynamoDBFeatureStoreCore
|
7
|
+
class DynamoDBStoreImplBase
|
11
8
|
begin
|
12
9
|
require "aws-sdk-dynamodb"
|
13
10
|
AWS_SDK_ENABLED = true
|
@@ -19,29 +16,50 @@ module LaunchDarkly
|
|
19
16
|
AWS_SDK_ENABLED = false
|
20
17
|
end
|
21
18
|
end
|
22
|
-
|
19
|
+
|
23
20
|
PARTITION_KEY = "namespace"
|
24
21
|
SORT_KEY = "key"
|
25
22
|
|
26
|
-
VERSION_ATTRIBUTE = "version"
|
27
|
-
ITEM_JSON_ATTRIBUTE = "item"
|
28
|
-
|
29
23
|
def initialize(table_name, opts)
|
30
24
|
if !AWS_SDK_ENABLED
|
31
|
-
raise RuntimeError.new("can't use
|
25
|
+
raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem")
|
32
26
|
end
|
33
|
-
|
27
|
+
|
34
28
|
@table_name = table_name
|
35
|
-
@prefix = opts[:prefix]
|
29
|
+
@prefix = opts[:prefix] ? (opts[:prefix] + ":") : ""
|
36
30
|
@logger = opts[:logger] || Config.default_logger
|
37
|
-
|
31
|
+
|
38
32
|
if !opts[:existing_client].nil?
|
39
33
|
@client = opts[:existing_client]
|
40
34
|
else
|
41
35
|
@client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {})
|
42
36
|
end
|
37
|
+
|
38
|
+
@logger.info("#{description}: using DynamoDB table \"#{table_name}\"")
|
39
|
+
end
|
40
|
+
|
41
|
+
def stop
|
42
|
+
# AWS client doesn't seem to have a close method
|
43
|
+
end
|
43
44
|
|
44
|
-
|
45
|
+
protected def description
|
46
|
+
"DynamoDB"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
#
|
51
|
+
# Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
|
52
|
+
#
|
53
|
+
class DynamoDBFeatureStoreCore < DynamoDBStoreImplBase
|
54
|
+
VERSION_ATTRIBUTE = "version"
|
55
|
+
ITEM_JSON_ATTRIBUTE = "item"
|
56
|
+
|
57
|
+
def initialize(table_name, opts)
|
58
|
+
super(table_name, opts)
|
59
|
+
end
|
60
|
+
|
61
|
+
def description
|
62
|
+
"DynamoDBFeatureStore"
|
45
63
|
end
|
46
64
|
|
47
65
|
def init_internal(all_data)
|
@@ -124,14 +142,10 @@ module LaunchDarkly
|
|
124
142
|
!resp.item.nil? && resp.item.length > 0
|
125
143
|
end
|
126
144
|
|
127
|
-
def stop
|
128
|
-
# AWS client doesn't seem to have a close method
|
129
|
-
end
|
130
|
-
|
131
145
|
private
|
132
146
|
|
133
147
|
def prefixed_namespace(base_str)
|
134
|
-
|
148
|
+
@prefix + base_str
|
135
149
|
end
|
136
150
|
|
137
151
|
def namespace_for_kind(kind)
|
@@ -208,6 +222,56 @@ module LaunchDarkly
|
|
208
222
|
end
|
209
223
|
end
|
210
224
|
|
225
|
+
class DynamoDBBigSegmentStore < DynamoDBStoreImplBase
|
226
|
+
KEY_METADATA = 'big_segments_metadata';
|
227
|
+
KEY_USER_DATA = 'big_segments_user';
|
228
|
+
ATTR_SYNC_TIME = 'synchronizedOn';
|
229
|
+
ATTR_INCLUDED = 'included';
|
230
|
+
ATTR_EXCLUDED = 'excluded';
|
231
|
+
|
232
|
+
def initialize(table_name, opts)
|
233
|
+
super(table_name, opts)
|
234
|
+
end
|
235
|
+
|
236
|
+
def description
|
237
|
+
"DynamoDBBigSegmentStore"
|
238
|
+
end
|
239
|
+
|
240
|
+
def get_metadata
|
241
|
+
key = @prefix + KEY_METADATA
|
242
|
+
data = @client.get_item(
|
243
|
+
table_name: @table_name,
|
244
|
+
key: {
|
245
|
+
PARTITION_KEY => key,
|
246
|
+
SORT_KEY => key
|
247
|
+
}
|
248
|
+
)
|
249
|
+
timestamp = data.item && data.item[ATTR_SYNC_TIME] ?
|
250
|
+
data.item[ATTR_SYNC_TIME] : nil
|
251
|
+
LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp)
|
252
|
+
end
|
253
|
+
|
254
|
+
def get_membership(user_hash)
|
255
|
+
data = @client.get_item(
|
256
|
+
table_name: @table_name,
|
257
|
+
key: {
|
258
|
+
PARTITION_KEY => @prefix + KEY_USER_DATA,
|
259
|
+
SORT_KEY => user_hash
|
260
|
+
})
|
261
|
+
return nil if !data.item
|
262
|
+
excluded_refs = data.item[ATTR_EXCLUDED] || []
|
263
|
+
included_refs = data.item[ATTR_INCLUDED] || []
|
264
|
+
if excluded_refs.empty? && included_refs.empty?
|
265
|
+
nil
|
266
|
+
else
|
267
|
+
membership = {}
|
268
|
+
excluded_refs.each { |ref| membership[ref] = false }
|
269
|
+
included_refs.each { |ref| membership[ref] = true }
|
270
|
+
membership
|
271
|
+
end
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
211
275
|
class DynamoDBUtil
|
212
276
|
#
|
213
277
|
# Calls client.batch_write_item as many times as necessary to submit all of the given requests.
|
@@ -0,0 +1,212 @@
|
|
1
|
+
require 'ldclient-rb/in_memory_store'
|
2
|
+
require 'ldclient-rb/util'
|
3
|
+
|
4
|
+
require 'concurrent/atomics'
|
5
|
+
require 'json'
|
6
|
+
require 'yaml'
|
7
|
+
require 'pathname'
|
8
|
+
|
9
|
+
module LaunchDarkly
|
10
|
+
module Impl
|
11
|
+
module Integrations
|
12
|
+
class FileDataSourceImpl
|
13
|
+
# To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the
|
14
|
+
# file data source or who don't need auto-updating, we only enable auto-update if the 'listen'
|
15
|
+
# gem has been provided by the host app.
|
16
|
+
@@have_listen = false
|
17
|
+
begin
|
18
|
+
require 'listen'
|
19
|
+
@@have_listen = true
|
20
|
+
rescue LoadError
|
21
|
+
end
|
22
|
+
|
23
|
+
def initialize(feature_store, logger, options={})
|
24
|
+
@feature_store = feature_store
|
25
|
+
@logger = logger
|
26
|
+
@paths = options[:paths] || []
|
27
|
+
if @paths.is_a? String
|
28
|
+
@paths = [ @paths ]
|
29
|
+
end
|
30
|
+
@auto_update = options[:auto_update]
|
31
|
+
if @auto_update && @@have_listen && !options[:force_polling] # force_polling is used only for tests
|
32
|
+
# We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449).
|
33
|
+
# Therefore, on that platform we'll fall back to file polling instead.
|
34
|
+
if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.")
|
35
|
+
@use_listen = false
|
36
|
+
else
|
37
|
+
@use_listen = true
|
38
|
+
end
|
39
|
+
end
|
40
|
+
@poll_interval = options[:poll_interval] || 1
|
41
|
+
@initialized = Concurrent::AtomicBoolean.new(false)
|
42
|
+
@ready = Concurrent::Event.new
|
43
|
+
end
|
44
|
+
|
45
|
+
def initialized?
|
46
|
+
@initialized.value
|
47
|
+
end
|
48
|
+
|
49
|
+
def start
|
50
|
+
ready = Concurrent::Event.new
|
51
|
+
|
52
|
+
# We will return immediately regardless of whether the file load succeeded or failed -
|
53
|
+
# the difference can be detected by checking "initialized?"
|
54
|
+
ready.set
|
55
|
+
|
56
|
+
load_all
|
57
|
+
|
58
|
+
if @auto_update
|
59
|
+
# If we're going to watch files, then the start event will be set the first time we get
|
60
|
+
# a successful load.
|
61
|
+
@listener = start_listener
|
62
|
+
end
|
63
|
+
|
64
|
+
ready
|
65
|
+
end
|
66
|
+
|
67
|
+
def stop
|
68
|
+
@listener.stop if !@listener.nil?
|
69
|
+
end
|
70
|
+
|
71
|
+
private
|
72
|
+
|
73
|
+
def load_all
|
74
|
+
all_data = {
|
75
|
+
FEATURES => {},
|
76
|
+
SEGMENTS => {}
|
77
|
+
}
|
78
|
+
@paths.each do |path|
|
79
|
+
begin
|
80
|
+
load_file(path, all_data)
|
81
|
+
rescue => exn
|
82
|
+
LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn)
|
83
|
+
return
|
84
|
+
end
|
85
|
+
end
|
86
|
+
@feature_store.init(all_data)
|
87
|
+
@initialized.make_true
|
88
|
+
end
|
89
|
+
|
90
|
+
def load_file(path, all_data)
|
91
|
+
parsed = parse_content(IO.read(path))
|
92
|
+
(parsed[:flags] || {}).each do |key, flag|
|
93
|
+
add_item(all_data, FEATURES, flag)
|
94
|
+
end
|
95
|
+
(parsed[:flagValues] || {}).each do |key, value|
|
96
|
+
add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value))
|
97
|
+
end
|
98
|
+
(parsed[:segments] || {}).each do |key, segment|
|
99
|
+
add_item(all_data, SEGMENTS, segment)
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
def parse_content(content)
|
104
|
+
# We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while
|
105
|
+
# not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least
|
106
|
+
# for all the samples of actual flag data that we've tested).
|
107
|
+
symbolize_all_keys(YAML.safe_load(content))
|
108
|
+
end
|
109
|
+
|
110
|
+
def symbolize_all_keys(value)
|
111
|
+
# This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and
|
112
|
+
# the SDK expects all objects to be formatted that way.
|
113
|
+
if value.is_a?(Hash)
|
114
|
+
value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h
|
115
|
+
elsif value.is_a?(Array)
|
116
|
+
value.map{ |v| symbolize_all_keys(v) }
|
117
|
+
else
|
118
|
+
value
|
119
|
+
end
|
120
|
+
end
|
121
|
+
|
122
|
+
def add_item(all_data, kind, item)
|
123
|
+
items = all_data[kind]
|
124
|
+
raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
|
125
|
+
key = item[:key].to_sym
|
126
|
+
if !items[key].nil?
|
127
|
+
raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
|
128
|
+
end
|
129
|
+
items[key] = item
|
130
|
+
end
|
131
|
+
|
132
|
+
def make_flag_with_value(key, value)
|
133
|
+
{
|
134
|
+
key: key,
|
135
|
+
on: true,
|
136
|
+
fallthrough: { variation: 0 },
|
137
|
+
variations: [ value ]
|
138
|
+
}
|
139
|
+
end
|
140
|
+
|
141
|
+
def start_listener
|
142
|
+
resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s }
|
143
|
+
if @use_listen
|
144
|
+
start_listener_with_listen_gem(resolved_paths)
|
145
|
+
else
|
146
|
+
FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger)
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
def start_listener_with_listen_gem(resolved_paths)
|
151
|
+
path_set = resolved_paths.to_set
|
152
|
+
dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq
|
153
|
+
opts = { latency: @poll_interval }
|
154
|
+
l = Listen.to(*dir_paths, opts) do |modified, added, removed|
|
155
|
+
paths = modified + added + removed
|
156
|
+
if paths.any? { |p| path_set.include?(p) }
|
157
|
+
load_all
|
158
|
+
end
|
159
|
+
end
|
160
|
+
l.start
|
161
|
+
l
|
162
|
+
end
|
163
|
+
|
164
|
+
#
|
165
|
+
# Used internally by FileDataSource to track data file changes if the 'listen' gem is not available.
|
166
|
+
#
|
167
|
+
class FileDataSourcePoller
|
168
|
+
def initialize(resolved_paths, interval, reloader, logger)
|
169
|
+
@stopped = Concurrent::AtomicBoolean.new(false)
|
170
|
+
get_file_times = Proc.new do
|
171
|
+
ret = {}
|
172
|
+
resolved_paths.each do |path|
|
173
|
+
begin
|
174
|
+
ret[path] = File.mtime(path)
|
175
|
+
rescue Errno::ENOENT
|
176
|
+
ret[path] = nil
|
177
|
+
end
|
178
|
+
end
|
179
|
+
ret
|
180
|
+
end
|
181
|
+
last_times = get_file_times.call
|
182
|
+
@thread = Thread.new do
|
183
|
+
while true
|
184
|
+
sleep interval
|
185
|
+
break if @stopped.value
|
186
|
+
begin
|
187
|
+
new_times = get_file_times.call
|
188
|
+
changed = false
|
189
|
+
last_times.each do |path, old_time|
|
190
|
+
new_time = new_times[path]
|
191
|
+
if !new_time.nil? && new_time != old_time
|
192
|
+
changed = true
|
193
|
+
break
|
194
|
+
end
|
195
|
+
end
|
196
|
+
reloader.call if changed
|
197
|
+
rescue => exn
|
198
|
+
LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn)
|
199
|
+
end
|
200
|
+
end
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
204
|
+
def stop
|
205
|
+
@stopped.make_true
|
206
|
+
@thread.run # wakes it up if it's sleeping
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
@@ -5,10 +5,7 @@ module LaunchDarkly
|
|
5
5
|
module Impl
|
6
6
|
module Integrations
|
7
7
|
module Redis
|
8
|
-
|
9
|
-
# Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
|
10
|
-
#
|
11
|
-
class RedisFeatureStoreCore
|
8
|
+
class RedisStoreImplBase
|
12
9
|
begin
|
13
10
|
require "redis"
|
14
11
|
require "connection_pool"
|
@@ -19,22 +16,14 @@ module LaunchDarkly
|
|
19
16
|
|
20
17
|
def initialize(opts)
|
21
18
|
if !REDIS_ENABLED
|
22
|
-
raise RuntimeError.new("can't use
|
19
|
+
raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool")
|
23
20
|
end
|
24
21
|
|
25
|
-
@
|
26
|
-
|
27
|
-
@redis_opts[:url] = opts[:redis_url]
|
28
|
-
end
|
29
|
-
if !@redis_opts.include?(:url)
|
30
|
-
@redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
|
31
|
-
end
|
32
|
-
max_connections = opts[:max_connections] || 16
|
33
|
-
@pool = opts[:pool] || ConnectionPool.new(size: max_connections) do
|
34
|
-
::Redis.new(@redis_opts)
|
35
|
-
end
|
22
|
+
@pool = create_redis_pool(opts)
|
23
|
+
|
36
24
|
# shutdown pool on close unless the client passed a custom pool and specified not to shutdown
|
37
25
|
@pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true))
|
26
|
+
|
38
27
|
@prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix
|
39
28
|
@logger = opts[:logger] || Config.default_logger
|
40
29
|
@test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
|
@@ -42,10 +31,53 @@ module LaunchDarkly
|
|
42
31
|
@stopped = Concurrent::AtomicBoolean.new(false)
|
43
32
|
|
44
33
|
with_connection do |redis|
|
45
|
-
@logger.info("
|
46
|
-
|
34
|
+
@logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}")
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def stop
|
39
|
+
if @stopped.make_true
|
40
|
+
return unless @pool_shutdown_on_close
|
41
|
+
@pool.shutdown { |redis| redis.close }
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
protected def description
|
46
|
+
"Redis"
|
47
|
+
end
|
48
|
+
|
49
|
+
protected def with_connection
|
50
|
+
@pool.with { |redis| yield(redis) }
|
51
|
+
end
|
52
|
+
|
53
|
+
private def create_redis_pool(opts)
|
54
|
+
redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new
|
55
|
+
if opts[:redis_url]
|
56
|
+
redis_opts[:url] = opts[:redis_url]
|
57
|
+
end
|
58
|
+
if !redis_opts.include?(:url)
|
59
|
+
redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
|
60
|
+
end
|
61
|
+
max_connections = opts[:max_connections] || 16
|
62
|
+
return opts[:pool] || ConnectionPool.new(size: max_connections) do
|
63
|
+
::Redis.new(redis_opts)
|
47
64
|
end
|
48
65
|
end
|
66
|
+
end
|
67
|
+
|
68
|
+
#
|
69
|
+
# Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
|
70
|
+
#
|
71
|
+
class RedisFeatureStoreCore < RedisStoreImplBase
|
72
|
+
def initialize(opts)
|
73
|
+
super(opts)
|
74
|
+
|
75
|
+
@test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
|
76
|
+
end
|
77
|
+
|
78
|
+
def description
|
79
|
+
"RedisFeatureStore"
|
80
|
+
end
|
49
81
|
|
50
82
|
def init_internal(all_data)
|
51
83
|
count = 0
|
@@ -103,8 +135,7 @@ module LaunchDarkly
|
|
103
135
|
else
|
104
136
|
final_item = old_item
|
105
137
|
action = new_item[:deleted] ? "delete" : "update"
|
106
|
-
@logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]}
|
107
|
-
in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
|
138
|
+
@logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
|
108
139
|
end
|
109
140
|
redis.unwatch
|
110
141
|
end
|
@@ -117,13 +148,6 @@ module LaunchDarkly
|
|
117
148
|
with_connection { |redis| redis.exists?(inited_key) }
|
118
149
|
end
|
119
150
|
|
120
|
-
def stop
|
121
|
-
if @stopped.make_true
|
122
|
-
return unless @pool_shutdown_on_close
|
123
|
-
@pool.shutdown { |redis| redis.close }
|
124
|
-
end
|
125
|
-
end
|
126
|
-
|
127
151
|
private
|
128
152
|
|
129
153
|
def before_update_transaction(base_key, key)
|
@@ -142,14 +166,43 @@ module LaunchDarkly
|
|
142
166
|
@prefix + ":$inited"
|
143
167
|
end
|
144
168
|
|
145
|
-
def with_connection
|
146
|
-
@pool.with { |redis| yield(redis) }
|
147
|
-
end
|
148
|
-
|
149
169
|
def get_redis(redis, kind, key)
|
150
170
|
Model.deserialize(kind, redis.hget(items_key(kind), key))
|
151
171
|
end
|
152
172
|
end
|
173
|
+
|
174
|
+
#
|
175
|
+
# Internal implementation of the Redis big segment store.
|
176
|
+
#
|
177
|
+
class RedisBigSegmentStore < RedisStoreImplBase
|
178
|
+
KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on'
|
179
|
+
KEY_USER_INCLUDE = ':big_segment_include:'
|
180
|
+
KEY_USER_EXCLUDE = ':big_segment_exclude:'
|
181
|
+
|
182
|
+
def description
|
183
|
+
"RedisBigSegmentStore"
|
184
|
+
end
|
185
|
+
|
186
|
+
def get_metadata
|
187
|
+
value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) }
|
188
|
+
Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i)
|
189
|
+
end
|
190
|
+
|
191
|
+
def get_membership(user_hash)
|
192
|
+
with_connection do |redis|
|
193
|
+
included_refs = redis.smembers(@prefix + KEY_USER_INCLUDE + user_hash)
|
194
|
+
excluded_refs = redis.smembers(@prefix + KEY_USER_EXCLUDE + user_hash)
|
195
|
+
if !included_refs && !excluded_refs
|
196
|
+
nil
|
197
|
+
else
|
198
|
+
membership = {}
|
199
|
+
excluded_refs.each { |ref| membership[ref] = false }
|
200
|
+
included_refs.each { |ref| membership[ref] = true }
|
201
|
+
membership
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
153
206
|
end
|
154
207
|
end
|
155
208
|
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require 'concurrent/atomics'
|
2
|
+
require 'ldclient-rb/interfaces'
|
3
|
+
|
4
|
+
module LaunchDarkly
|
5
|
+
module Impl
|
6
|
+
module Integrations
|
7
|
+
module TestData
|
8
|
+
# @private
|
9
|
+
class TestDataSource
|
10
|
+
include LaunchDarkly::Interfaces::DataSource
|
11
|
+
|
12
|
+
def initialize(feature_store, test_data)
|
13
|
+
@feature_store = feature_store
|
14
|
+
@test_data = test_data
|
15
|
+
end
|
16
|
+
|
17
|
+
def initialized?
|
18
|
+
true
|
19
|
+
end
|
20
|
+
|
21
|
+
def start
|
22
|
+
ready = Concurrent::Event.new
|
23
|
+
ready.set
|
24
|
+
init_data = @test_data.make_init_data
|
25
|
+
@feature_store.init(init_data)
|
26
|
+
ready
|
27
|
+
end
|
28
|
+
|
29
|
+
def stop
|
30
|
+
@test_data.closed_instance(self)
|
31
|
+
end
|
32
|
+
|
33
|
+
def upsert(kind, item)
|
34
|
+
@feature_store.upsert(kind, item)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|