launchdarkly-server-sdk 6.2.5 → 7.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +1 -2
- data/lib/ldclient-rb/config.rb +203 -43
- data/lib/ldclient-rb/context.rb +487 -0
- data/lib/ldclient-rb/evaluation_detail.rb +85 -26
- data/lib/ldclient-rb/events.rb +185 -146
- data/lib/ldclient-rb/flags_state.rb +25 -14
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/context.rb +96 -0
- data/lib/ldclient-rb/impl/context_filter.rb +145 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +9 -10
- data/lib/ldclient-rb/impl/evaluator.rb +428 -132
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +40 -41
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +50 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +26 -55
- data/lib/ldclient-rb/impl/event_sender.rb +6 -6
- data/lib/ldclient-rb/impl/event_summarizer.rb +68 -0
- data/lib/ldclient-rb/impl/event_types.rb +78 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +7 -7
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +92 -28
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +212 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +165 -32
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/model/clause.rb +39 -0
- data/lib/ldclient-rb/impl/model/feature_flag.rb +213 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +64 -0
- data/lib/ldclient-rb/impl/model/segment.rb +126 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +54 -44
- data/lib/ldclient-rb/impl/repeating_task.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +2 -2
- data/lib/ldclient-rb/impl/unbounded_pool.rb +1 -1
- data/lib/ldclient-rb/impl/util.rb +62 -1
- data/lib/ldclient-rb/in_memory_store.rb +2 -2
- data/lib/ldclient-rb/integrations/consul.rb +9 -2
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -2
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +43 -3
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +594 -0
- data/lib/ldclient-rb/integrations/test_data.rb +213 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +14 -9
- data/lib/ldclient-rb/integrations.rb +2 -51
- data/lib/ldclient-rb/interfaces.rb +151 -1
- data/lib/ldclient-rb/ldclient.rb +175 -133
- data/lib/ldclient-rb/memoized_value.rb +1 -1
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +1 -1
- data/lib/ldclient-rb/polling.rb +22 -41
- data/lib/ldclient-rb/reference.rb +274 -0
- data/lib/ldclient-rb/requestor.rb +7 -7
- data/lib/ldclient-rb/stream.rb +9 -9
- data/lib/ldclient-rb/util.rb +11 -17
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +2 -4
- metadata +49 -23
- data/lib/ldclient-rb/event_summarizer.rb +0 -55
- data/lib/ldclient-rb/file_data_source.rb +0 -314
- data/lib/ldclient-rb/impl/event_factory.rb +0 -126
- data/lib/ldclient-rb/newrelic.rb +0 -17
- data/lib/ldclient-rb/redis_store.rb +0 -88
- data/lib/ldclient-rb/user_filter.rb +0 -52
@@ -1,55 +0,0 @@
|
|
1
|
-
|
2
|
-
module LaunchDarkly
|
3
|
-
# @private
|
4
|
-
EventSummary = Struct.new(:start_date, :end_date, :counters)
|
5
|
-
|
6
|
-
# Manages the state of summarizable information for the EventProcessor, including the
|
7
|
-
# event counters and user deduplication. Note that the methods of this class are
|
8
|
-
# deliberately not thread-safe; the EventProcessor is responsible for enforcing
|
9
|
-
# synchronization across both the summarizer and the event queue.
|
10
|
-
#
|
11
|
-
# @private
|
12
|
-
class EventSummarizer
|
13
|
-
def initialize
|
14
|
-
clear
|
15
|
-
end
|
16
|
-
|
17
|
-
# Adds this event to our counters, if it is a type of event we need to count.
|
18
|
-
def summarize_event(event)
|
19
|
-
if event[:kind] == "feature"
|
20
|
-
counter_key = {
|
21
|
-
key: event[:key],
|
22
|
-
version: event[:version],
|
23
|
-
variation: event[:variation]
|
24
|
-
}
|
25
|
-
c = @counters[counter_key]
|
26
|
-
if c.nil?
|
27
|
-
@counters[counter_key] = {
|
28
|
-
value: event[:value],
|
29
|
-
default: event[:default],
|
30
|
-
count: 1
|
31
|
-
}
|
32
|
-
else
|
33
|
-
c[:count] = c[:count] + 1
|
34
|
-
end
|
35
|
-
time = event[:creationDate]
|
36
|
-
if !time.nil?
|
37
|
-
@start_date = time if @start_date == 0 || time < @start_date
|
38
|
-
@end_date = time if time > @end_date
|
39
|
-
end
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
# Returns a snapshot of the current summarized event data, and resets this state.
|
44
|
-
def snapshot
|
45
|
-
ret = EventSummary.new(@start_date, @end_date, @counters)
|
46
|
-
ret
|
47
|
-
end
|
48
|
-
|
49
|
-
def clear
|
50
|
-
@start_date = 0
|
51
|
-
@end_date = 0
|
52
|
-
@counters = {}
|
53
|
-
end
|
54
|
-
end
|
55
|
-
end
|
@@ -1,314 +0,0 @@
|
|
1
|
-
require 'concurrent/atomics'
|
2
|
-
require 'json'
|
3
|
-
require 'yaml'
|
4
|
-
require 'pathname'
|
5
|
-
|
6
|
-
module LaunchDarkly
|
7
|
-
# To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the
|
8
|
-
# file data source or who don't need auto-updating, we only enable auto-update if the 'listen'
|
9
|
-
# gem has been provided by the host app.
|
10
|
-
# @private
|
11
|
-
@@have_listen = false
|
12
|
-
begin
|
13
|
-
require 'listen'
|
14
|
-
@@have_listen = true
|
15
|
-
rescue LoadError
|
16
|
-
end
|
17
|
-
|
18
|
-
# @private
|
19
|
-
def self.have_listen?
|
20
|
-
@@have_listen
|
21
|
-
end
|
22
|
-
|
23
|
-
#
|
24
|
-
# Provides a way to use local files as a source of feature flag state. This allows using a
|
25
|
-
# predetermined feature flag state without an actual LaunchDarkly connection.
|
26
|
-
#
|
27
|
-
# Reading flags from a file is only intended for pre-production environments. Production
|
28
|
-
# environments should always be configured to receive flag updates from LaunchDarkly.
|
29
|
-
#
|
30
|
-
# To use this component, call {FileDataSource#factory}, and store its return value in the
|
31
|
-
# {Config#data_source} property of your LaunchDarkly client configuration. In the options
|
32
|
-
# to `factory`, set `paths` to the file path(s) of your data file(s):
|
33
|
-
#
|
34
|
-
# file_source = FileDataSource.factory(paths: [ myFilePath ])
|
35
|
-
# config = LaunchDarkly::Config.new(data_source: file_source)
|
36
|
-
#
|
37
|
-
# This will cause the client not to connect to LaunchDarkly to get feature flags. The
|
38
|
-
# client may still make network connections to send analytics events, unless you have disabled
|
39
|
-
# this with {Config#send_events} or {Config#offline?}.
|
40
|
-
#
|
41
|
-
# Flag data files can be either JSON or YAML. They contain an object with three possible
|
42
|
-
# properties:
|
43
|
-
#
|
44
|
-
# - `flags`: Feature flag definitions.
|
45
|
-
# - `flagValues`: Simplified feature flags that contain only a value.
|
46
|
-
# - `segments`: User segment definitions.
|
47
|
-
#
|
48
|
-
# The format of the data in `flags` and `segments` is defined by the LaunchDarkly application
|
49
|
-
# and is subject to change. Rather than trying to construct these objects yourself, it is simpler
|
50
|
-
# to request existing flags directly from the LaunchDarkly server in JSON format, and use this
|
51
|
-
# output as the starting point for your file. In Linux you would do this:
|
52
|
-
#
|
53
|
-
# ```
|
54
|
-
# curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all
|
55
|
-
# ```
|
56
|
-
#
|
57
|
-
# The output will look something like this (but with many more properties):
|
58
|
-
#
|
59
|
-
# {
|
60
|
-
# "flags": {
|
61
|
-
# "flag-key-1": {
|
62
|
-
# "key": "flag-key-1",
|
63
|
-
# "on": true,
|
64
|
-
# "variations": [ "a", "b" ]
|
65
|
-
# }
|
66
|
-
# },
|
67
|
-
# "segments": {
|
68
|
-
# "segment-key-1": {
|
69
|
-
# "key": "segment-key-1",
|
70
|
-
# "includes": [ "user-key-1" ]
|
71
|
-
# }
|
72
|
-
# }
|
73
|
-
# }
|
74
|
-
#
|
75
|
-
# Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported
|
76
|
-
# by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to
|
77
|
-
# set specific flag keys to specific values. For that, you can use a much simpler format:
|
78
|
-
#
|
79
|
-
# {
|
80
|
-
# "flagValues": {
|
81
|
-
# "my-string-flag-key": "value-1",
|
82
|
-
# "my-boolean-flag-key": true,
|
83
|
-
# "my-integer-flag-key": 3
|
84
|
-
# }
|
85
|
-
# }
|
86
|
-
#
|
87
|
-
# Or, in YAML:
|
88
|
-
#
|
89
|
-
# flagValues:
|
90
|
-
# my-string-flag-key: "value-1"
|
91
|
-
# my-boolean-flag-key: true
|
92
|
-
# my-integer-flag-key: 1
|
93
|
-
#
|
94
|
-
# It is also possible to specify both "flags" and "flagValues", if you want some flags
|
95
|
-
# to have simple values and others to have complex behavior. However, it is an error to use the
|
96
|
-
# same flag key or segment key more than once, either in a single file or across multiple files.
|
97
|
-
#
|
98
|
-
# If the data source encounters any error in any file-- malformed content, a missing file, or a
|
99
|
-
# duplicate key-- it will not load flags from any of the files.
|
100
|
-
#
|
101
|
-
class FileDataSource
|
102
|
-
#
|
103
|
-
# Returns a factory for the file data source component.
|
104
|
-
#
|
105
|
-
# @param options [Hash] the configuration options
|
106
|
-
# @option options [Array] :paths The paths of the source files for loading flag data. These
|
107
|
-
# may be absolute paths or relative to the current working directory.
|
108
|
-
# @option options [Boolean] :auto_update True if the data source should watch for changes to
|
109
|
-
# the source file(s) and reload flags whenever there is a change. Auto-updating will only
|
110
|
-
# work if all of the files you specified have valid directory paths at startup time.
|
111
|
-
# Note that the default implementation of this feature is based on polling the filesystem,
|
112
|
-
# which may not perform well. If you install the 'listen' gem (not included by default, to
|
113
|
-
# avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be
|
114
|
-
# used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability.
|
115
|
-
# @option options [Float] :poll_interval The minimum interval, in seconds, between checks for
|
116
|
-
# file modifications - used only if auto_update is true, and if the native file-watching
|
117
|
-
# mechanism from 'listen' is not being used. The default value is 1 second.
|
118
|
-
# @return an object that can be stored in {Config#data_source}
|
119
|
-
#
|
120
|
-
def self.factory(options={})
|
121
|
-
return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) }
|
122
|
-
end
|
123
|
-
end
|
124
|
-
|
125
|
-
# @private
|
126
|
-
class FileDataSourceImpl
|
127
|
-
def initialize(feature_store, logger, options={})
|
128
|
-
@feature_store = feature_store
|
129
|
-
@logger = logger
|
130
|
-
@paths = options[:paths] || []
|
131
|
-
if @paths.is_a? String
|
132
|
-
@paths = [ @paths ]
|
133
|
-
end
|
134
|
-
@auto_update = options[:auto_update]
|
135
|
-
if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests
|
136
|
-
# We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449).
|
137
|
-
# Therefore, on that platform we'll fall back to file polling instead.
|
138
|
-
if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.")
|
139
|
-
@use_listen = false
|
140
|
-
else
|
141
|
-
@use_listen = true
|
142
|
-
end
|
143
|
-
end
|
144
|
-
@poll_interval = options[:poll_interval] || 1
|
145
|
-
@initialized = Concurrent::AtomicBoolean.new(false)
|
146
|
-
@ready = Concurrent::Event.new
|
147
|
-
end
|
148
|
-
|
149
|
-
def initialized?
|
150
|
-
@initialized.value
|
151
|
-
end
|
152
|
-
|
153
|
-
def start
|
154
|
-
ready = Concurrent::Event.new
|
155
|
-
|
156
|
-
# We will return immediately regardless of whether the file load succeeded or failed -
|
157
|
-
# the difference can be detected by checking "initialized?"
|
158
|
-
ready.set
|
159
|
-
|
160
|
-
load_all
|
161
|
-
|
162
|
-
if @auto_update
|
163
|
-
# If we're going to watch files, then the start event will be set the first time we get
|
164
|
-
# a successful load.
|
165
|
-
@listener = start_listener
|
166
|
-
end
|
167
|
-
|
168
|
-
ready
|
169
|
-
end
|
170
|
-
|
171
|
-
def stop
|
172
|
-
@listener.stop if !@listener.nil?
|
173
|
-
end
|
174
|
-
|
175
|
-
private
|
176
|
-
|
177
|
-
def load_all
|
178
|
-
all_data = {
|
179
|
-
FEATURES => {},
|
180
|
-
SEGMENTS => {}
|
181
|
-
}
|
182
|
-
@paths.each do |path|
|
183
|
-
begin
|
184
|
-
load_file(path, all_data)
|
185
|
-
rescue => exn
|
186
|
-
Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn)
|
187
|
-
return
|
188
|
-
end
|
189
|
-
end
|
190
|
-
@feature_store.init(all_data)
|
191
|
-
@initialized.make_true
|
192
|
-
end
|
193
|
-
|
194
|
-
def load_file(path, all_data)
|
195
|
-
parsed = parse_content(IO.read(path))
|
196
|
-
(parsed[:flags] || {}).each do |key, flag|
|
197
|
-
add_item(all_data, FEATURES, flag)
|
198
|
-
end
|
199
|
-
(parsed[:flagValues] || {}).each do |key, value|
|
200
|
-
add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value))
|
201
|
-
end
|
202
|
-
(parsed[:segments] || {}).each do |key, segment|
|
203
|
-
add_item(all_data, SEGMENTS, segment)
|
204
|
-
end
|
205
|
-
end
|
206
|
-
|
207
|
-
def parse_content(content)
|
208
|
-
# We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while
|
209
|
-
# not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least
|
210
|
-
# for all the samples of actual flag data that we've tested).
|
211
|
-
symbolize_all_keys(YAML.safe_load(content))
|
212
|
-
end
|
213
|
-
|
214
|
-
def symbolize_all_keys(value)
|
215
|
-
# This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and
|
216
|
-
# the SDK expects all objects to be formatted that way.
|
217
|
-
if value.is_a?(Hash)
|
218
|
-
value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h
|
219
|
-
elsif value.is_a?(Array)
|
220
|
-
value.map{ |v| symbolize_all_keys(v) }
|
221
|
-
else
|
222
|
-
value
|
223
|
-
end
|
224
|
-
end
|
225
|
-
|
226
|
-
def add_item(all_data, kind, item)
|
227
|
-
items = all_data[kind]
|
228
|
-
raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
|
229
|
-
key = item[:key].to_sym
|
230
|
-
if !items[key].nil?
|
231
|
-
raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
|
232
|
-
end
|
233
|
-
items[key] = item
|
234
|
-
end
|
235
|
-
|
236
|
-
def make_flag_with_value(key, value)
|
237
|
-
{
|
238
|
-
key: key,
|
239
|
-
on: true,
|
240
|
-
fallthrough: { variation: 0 },
|
241
|
-
variations: [ value ]
|
242
|
-
}
|
243
|
-
end
|
244
|
-
|
245
|
-
def start_listener
|
246
|
-
resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s }
|
247
|
-
if @use_listen
|
248
|
-
start_listener_with_listen_gem(resolved_paths)
|
249
|
-
else
|
250
|
-
FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger)
|
251
|
-
end
|
252
|
-
end
|
253
|
-
|
254
|
-
def start_listener_with_listen_gem(resolved_paths)
|
255
|
-
path_set = resolved_paths.to_set
|
256
|
-
dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq
|
257
|
-
opts = { latency: @poll_interval }
|
258
|
-
l = Listen.to(*dir_paths, opts) do |modified, added, removed|
|
259
|
-
paths = modified + added + removed
|
260
|
-
if paths.any? { |p| path_set.include?(p) }
|
261
|
-
load_all
|
262
|
-
end
|
263
|
-
end
|
264
|
-
l.start
|
265
|
-
l
|
266
|
-
end
|
267
|
-
|
268
|
-
#
|
269
|
-
# Used internally by FileDataSource to track data file changes if the 'listen' gem is not available.
|
270
|
-
#
|
271
|
-
class FileDataSourcePoller
|
272
|
-
def initialize(resolved_paths, interval, reloader, logger)
|
273
|
-
@stopped = Concurrent::AtomicBoolean.new(false)
|
274
|
-
get_file_times = Proc.new do
|
275
|
-
ret = {}
|
276
|
-
resolved_paths.each do |path|
|
277
|
-
begin
|
278
|
-
ret[path] = File.mtime(path)
|
279
|
-
rescue Errno::ENOENT
|
280
|
-
ret[path] = nil
|
281
|
-
end
|
282
|
-
end
|
283
|
-
ret
|
284
|
-
end
|
285
|
-
last_times = get_file_times.call
|
286
|
-
@thread = Thread.new do
|
287
|
-
while true
|
288
|
-
sleep interval
|
289
|
-
break if @stopped.value
|
290
|
-
begin
|
291
|
-
new_times = get_file_times.call
|
292
|
-
changed = false
|
293
|
-
last_times.each do |path, old_time|
|
294
|
-
new_time = new_times[path]
|
295
|
-
if !new_time.nil? && new_time != old_time
|
296
|
-
changed = true
|
297
|
-
break
|
298
|
-
end
|
299
|
-
end
|
300
|
-
reloader.call if changed
|
301
|
-
rescue => exn
|
302
|
-
Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn)
|
303
|
-
end
|
304
|
-
end
|
305
|
-
end
|
306
|
-
end
|
307
|
-
|
308
|
-
def stop
|
309
|
-
@stopped.make_true
|
310
|
-
@thread.run # wakes it up if it's sleeping
|
311
|
-
end
|
312
|
-
end
|
313
|
-
end
|
314
|
-
end
|
@@ -1,126 +0,0 @@
|
|
1
|
-
|
2
|
-
module LaunchDarkly
|
3
|
-
module Impl
|
4
|
-
# Event constructors are centralized here to avoid mistakes and repetitive logic.
|
5
|
-
# The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons
|
6
|
-
# in the events (for when variation_detail is called) and one that doesn't.
|
7
|
-
#
|
8
|
-
# Note that these methods do not set the "creationDate" property, because in the Ruby client,
|
9
|
-
# that is done by EventProcessor.add_event().
|
10
|
-
class EventFactory
|
11
|
-
def initialize(with_reasons)
|
12
|
-
@with_reasons = with_reasons
|
13
|
-
end
|
14
|
-
|
15
|
-
def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil)
|
16
|
-
add_experiment_data = is_experiment(flag, detail.reason)
|
17
|
-
e = {
|
18
|
-
kind: 'feature',
|
19
|
-
key: flag[:key],
|
20
|
-
user: user,
|
21
|
-
variation: detail.variation_index,
|
22
|
-
value: detail.value,
|
23
|
-
default: default_value,
|
24
|
-
version: flag[:version]
|
25
|
-
}
|
26
|
-
# the following properties are handled separately so we don't waste bandwidth on unused keys
|
27
|
-
e[:trackEvents] = true if add_experiment_data || flag[:trackEvents]
|
28
|
-
e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate]
|
29
|
-
e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil?
|
30
|
-
e[:reason] = detail.reason if add_experiment_data || @with_reasons
|
31
|
-
e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous]
|
32
|
-
e
|
33
|
-
end
|
34
|
-
|
35
|
-
def new_default_event(flag, user, default_value, reason)
|
36
|
-
e = {
|
37
|
-
kind: 'feature',
|
38
|
-
key: flag[:key],
|
39
|
-
user: user,
|
40
|
-
value: default_value,
|
41
|
-
default: default_value,
|
42
|
-
version: flag[:version]
|
43
|
-
}
|
44
|
-
e[:trackEvents] = true if flag[:trackEvents]
|
45
|
-
e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate]
|
46
|
-
e[:reason] = reason if @with_reasons
|
47
|
-
e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous]
|
48
|
-
e
|
49
|
-
end
|
50
|
-
|
51
|
-
def new_unknown_flag_event(key, user, default_value, reason)
|
52
|
-
e = {
|
53
|
-
kind: 'feature',
|
54
|
-
key: key,
|
55
|
-
user: user,
|
56
|
-
value: default_value,
|
57
|
-
default: default_value
|
58
|
-
}
|
59
|
-
e[:reason] = reason if @with_reasons
|
60
|
-
e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous]
|
61
|
-
e
|
62
|
-
end
|
63
|
-
|
64
|
-
def new_identify_event(user)
|
65
|
-
{
|
66
|
-
kind: 'identify',
|
67
|
-
key: user[:key],
|
68
|
-
user: user
|
69
|
-
}
|
70
|
-
end
|
71
|
-
|
72
|
-
def new_alias_event(current_context, previous_context)
|
73
|
-
{
|
74
|
-
kind: 'alias',
|
75
|
-
key: current_context[:key],
|
76
|
-
contextKind: context_to_context_kind(current_context),
|
77
|
-
previousKey: previous_context[:key],
|
78
|
-
previousContextKind: context_to_context_kind(previous_context)
|
79
|
-
}
|
80
|
-
end
|
81
|
-
|
82
|
-
def new_custom_event(event_name, user, data, metric_value)
|
83
|
-
e = {
|
84
|
-
kind: 'custom',
|
85
|
-
key: event_name,
|
86
|
-
user: user
|
87
|
-
}
|
88
|
-
e[:data] = data if !data.nil?
|
89
|
-
e[:metricValue] = metric_value if !metric_value.nil?
|
90
|
-
e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous]
|
91
|
-
e
|
92
|
-
end
|
93
|
-
|
94
|
-
private
|
95
|
-
|
96
|
-
def context_to_context_kind(user)
|
97
|
-
if !user.nil? && user[:anonymous]
|
98
|
-
return "anonymousUser"
|
99
|
-
else
|
100
|
-
return "user"
|
101
|
-
end
|
102
|
-
end
|
103
|
-
|
104
|
-
def is_experiment(flag, reason)
|
105
|
-
return false if !reason
|
106
|
-
|
107
|
-
if reason.in_experiment
|
108
|
-
return true
|
109
|
-
end
|
110
|
-
|
111
|
-
case reason[:kind]
|
112
|
-
when 'RULE_MATCH'
|
113
|
-
index = reason[:ruleIndex]
|
114
|
-
if !index.nil?
|
115
|
-
rules = flag[:rules] || []
|
116
|
-
return index >= 0 && index < rules.length && rules[index][:trackEvents]
|
117
|
-
end
|
118
|
-
when 'FALLTHROUGH'
|
119
|
-
return !!flag[:trackEventsFallthrough]
|
120
|
-
end
|
121
|
-
false
|
122
|
-
end
|
123
|
-
|
124
|
-
end
|
125
|
-
end
|
126
|
-
end
|
data/lib/ldclient-rb/newrelic.rb
DELETED
@@ -1,17 +0,0 @@
|
|
1
|
-
module LaunchDarkly
|
2
|
-
# @private
|
3
|
-
class LDNewRelic
|
4
|
-
begin
|
5
|
-
require "newrelic_rpm"
|
6
|
-
NR_ENABLED = defined?(::NewRelic::Agent.add_custom_parameters)
|
7
|
-
rescue ScriptError, StandardError
|
8
|
-
NR_ENABLED = false
|
9
|
-
end
|
10
|
-
|
11
|
-
def self.annotate_transaction(key, value)
|
12
|
-
if NR_ENABLED
|
13
|
-
::NewRelic::Agent.add_custom_parameters(key.to_s => value.to_s)
|
14
|
-
end
|
15
|
-
end
|
16
|
-
end
|
17
|
-
end
|
@@ -1,88 +0,0 @@
|
|
1
|
-
require "ldclient-rb/interfaces"
|
2
|
-
require "ldclient-rb/impl/integrations/redis_impl"
|
3
|
-
|
4
|
-
module LaunchDarkly
|
5
|
-
#
|
6
|
-
# An implementation of the LaunchDarkly client's feature store that uses a Redis
|
7
|
-
# instance. This object holds feature flags and related data received from the
|
8
|
-
# streaming API. Feature data can also be further cached in memory to reduce overhead
|
9
|
-
# of calls to Redis.
|
10
|
-
#
|
11
|
-
# To use this class, you must first have the `redis` and `connection-pool` gems
|
12
|
-
# installed. Then, create an instance and store it in the `feature_store` property
|
13
|
-
# of your client configuration.
|
14
|
-
#
|
15
|
-
# @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific
|
16
|
-
# implementation class may be changed or removed in the future.
|
17
|
-
#
|
18
|
-
class RedisFeatureStore
|
19
|
-
include LaunchDarkly::Interfaces::FeatureStore
|
20
|
-
|
21
|
-
# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating
|
22
|
-
# to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical
|
23
|
-
# reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate
|
24
|
-
# away from exposing these concrete classes and use factory methods instead.
|
25
|
-
|
26
|
-
#
|
27
|
-
# Constructor for a RedisFeatureStore instance.
|
28
|
-
#
|
29
|
-
# @param opts [Hash] the configuration options
|
30
|
-
# @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts)
|
31
|
-
# @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url)
|
32
|
-
# @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly
|
33
|
-
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
34
|
-
# @option opts [Integer] :max_connections size of the Redis connection pool
|
35
|
-
# @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching
|
36
|
-
# @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
|
37
|
-
# @option opts [Object] :pool custom connection pool, if desired
|
38
|
-
# @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool.
|
39
|
-
#
|
40
|
-
def initialize(opts = {})
|
41
|
-
core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts)
|
42
|
-
@wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
43
|
-
end
|
44
|
-
|
45
|
-
#
|
46
|
-
# Default value for the `redis_url` constructor parameter; points to an instance of Redis
|
47
|
-
# running at `localhost` with its default port.
|
48
|
-
#
|
49
|
-
def self.default_redis_url
|
50
|
-
LaunchDarkly::Integrations::Redis::default_redis_url
|
51
|
-
end
|
52
|
-
|
53
|
-
#
|
54
|
-
# Default value for the `prefix` constructor parameter.
|
55
|
-
#
|
56
|
-
def self.default_prefix
|
57
|
-
LaunchDarkly::Integrations::Redis::default_prefix
|
58
|
-
end
|
59
|
-
|
60
|
-
def get(kind, key)
|
61
|
-
@wrapper.get(kind, key)
|
62
|
-
end
|
63
|
-
|
64
|
-
def all(kind)
|
65
|
-
@wrapper.all(kind)
|
66
|
-
end
|
67
|
-
|
68
|
-
def delete(kind, key, version)
|
69
|
-
@wrapper.delete(kind, key, version)
|
70
|
-
end
|
71
|
-
|
72
|
-
def init(all_data)
|
73
|
-
@wrapper.init(all_data)
|
74
|
-
end
|
75
|
-
|
76
|
-
def upsert(kind, item)
|
77
|
-
@wrapper.upsert(kind, item)
|
78
|
-
end
|
79
|
-
|
80
|
-
def initialized?
|
81
|
-
@wrapper.initialized?
|
82
|
-
end
|
83
|
-
|
84
|
-
def stop
|
85
|
-
@wrapper.stop
|
86
|
-
end
|
87
|
-
end
|
88
|
-
end
|
@@ -1,52 +0,0 @@
|
|
1
|
-
require "json"
|
2
|
-
require "set"
|
3
|
-
|
4
|
-
module LaunchDarkly
|
5
|
-
# @private
|
6
|
-
class UserFilter
|
7
|
-
def initialize(config)
|
8
|
-
@all_attributes_private = config.all_attributes_private
|
9
|
-
@private_attribute_names = Set.new(config.private_attribute_names.map(&:to_sym))
|
10
|
-
end
|
11
|
-
|
12
|
-
def transform_user_props(user_props)
|
13
|
-
return nil if user_props.nil?
|
14
|
-
|
15
|
-
user_private_attrs = Set.new((user_props[:privateAttributeNames] || []).map(&:to_sym))
|
16
|
-
|
17
|
-
filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS)
|
18
|
-
custom = user_props[:custom]
|
19
|
-
if !custom.nil?
|
20
|
-
filtered_user_props[:custom], removed_custom = filter_values(custom, user_private_attrs)
|
21
|
-
removed.merge(removed_custom)
|
22
|
-
end
|
23
|
-
|
24
|
-
unless removed.empty?
|
25
|
-
# note, :privateAttributeNames is what the developer sets; :privateAttrs is what we send to the server
|
26
|
-
filtered_user_props[:privateAttrs] = removed.to_a.sort.map { |s| s.to_s }
|
27
|
-
end
|
28
|
-
return filtered_user_props
|
29
|
-
end
|
30
|
-
|
31
|
-
private
|
32
|
-
|
33
|
-
ALLOWED_TOP_LEVEL_KEYS = Set.new([:key, :secondary, :ip, :country, :email,
|
34
|
-
:firstName, :lastName, :avatar, :name, :anonymous, :custom])
|
35
|
-
IGNORED_TOP_LEVEL_KEYS = Set.new([:custom, :key, :anonymous])
|
36
|
-
|
37
|
-
def filter_values(props, user_private_attrs, allowed_keys = [], keys_to_leave_as_is = [])
|
38
|
-
is_valid_key = lambda { |key| allowed_keys.empty? || allowed_keys.include?(key) }
|
39
|
-
removed_keys = Set.new(props.keys.select { |key|
|
40
|
-
# Note that if is_valid_key returns false, we don't explicitly *remove* the key (which would place
|
41
|
-
# it in the privateAttrs list) - we just silently drop it when we calculate filtered_hash.
|
42
|
-
is_valid_key.call(key) && !keys_to_leave_as_is.include?(key) && private_attr?(key, user_private_attrs)
|
43
|
-
})
|
44
|
-
filtered_hash = props.select { |key, value| !removed_keys.include?(key) && is_valid_key.call(key) }
|
45
|
-
[filtered_hash, removed_keys]
|
46
|
-
end
|
47
|
-
|
48
|
-
def private_attr?(name, user_private_attrs)
|
49
|
-
@all_attributes_private || @private_attribute_names.include?(name) || user_private_attrs.include?(name)
|
50
|
-
end
|
51
|
-
end
|
52
|
-
end
|