launchdarkly-server-sdk 6.4.0 → 7.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/ldclient-rb/config.rb +102 -56
- data/lib/ldclient-rb/context.rb +487 -0
- data/lib/ldclient-rb/evaluation_detail.rb +20 -20
- data/lib/ldclient-rb/events.rb +77 -132
- data/lib/ldclient-rb/flags_state.rb +4 -4
- data/lib/ldclient-rb/impl/big_segments.rb +17 -17
- data/lib/ldclient-rb/impl/context.rb +96 -0
- data/lib/ldclient-rb/impl/context_filter.rb +145 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +9 -10
- data/lib/ldclient-rb/impl/evaluator.rb +379 -131
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +40 -41
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +28 -31
- data/lib/ldclient-rb/impl/evaluator_operators.rb +26 -55
- data/lib/ldclient-rb/impl/event_sender.rb +6 -6
- data/lib/ldclient-rb/impl/event_summarizer.rb +12 -7
- data/lib/ldclient-rb/impl/event_types.rb +18 -30
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +7 -7
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +29 -29
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +8 -8
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +92 -12
- data/lib/ldclient-rb/impl/model/clause.rb +39 -0
- data/lib/ldclient-rb/impl/model/feature_flag.rb +213 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +8 -121
- data/lib/ldclient-rb/impl/model/segment.rb +126 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +52 -12
- data/lib/ldclient-rb/impl/repeating_task.rb +1 -1
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +2 -2
- data/lib/ldclient-rb/impl/unbounded_pool.rb +1 -1
- data/lib/ldclient-rb/impl/util.rb +2 -2
- data/lib/ldclient-rb/in_memory_store.rb +2 -2
- data/lib/ldclient-rb/integrations/consul.rb +1 -1
- data/lib/ldclient-rb/integrations/dynamodb.rb +1 -1
- data/lib/ldclient-rb/integrations/file_data.rb +3 -3
- data/lib/ldclient-rb/integrations/redis.rb +4 -4
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +218 -62
- data/lib/ldclient-rb/integrations/test_data.rb +16 -12
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +9 -9
- data/lib/ldclient-rb/interfaces.rb +14 -14
- data/lib/ldclient-rb/ldclient.rb +94 -144
- data/lib/ldclient-rb/memoized_value.rb +1 -1
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +1 -1
- data/lib/ldclient-rb/polling.rb +2 -2
- data/lib/ldclient-rb/reference.rb +274 -0
- data/lib/ldclient-rb/requestor.rb +5 -5
- data/lib/ldclient-rb/stream.rb +7 -8
- data/lib/ldclient-rb/util.rb +4 -19
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +2 -3
- metadata +34 -17
- data/lib/ldclient-rb/file_data_source.rb +0 -23
- data/lib/ldclient-rb/newrelic.rb +0 -17
- data/lib/ldclient-rb/redis_store.rb +0 -88
- data/lib/ldclient-rb/user_filter.rb +0 -52
@@ -16,28 +16,28 @@ module LaunchDarkly
|
|
16
16
|
AWS_SDK_ENABLED = false
|
17
17
|
end
|
18
18
|
end
|
19
|
-
|
19
|
+
|
20
20
|
PARTITION_KEY = "namespace"
|
21
21
|
SORT_KEY = "key"
|
22
22
|
|
23
23
|
def initialize(table_name, opts)
|
24
|
-
|
24
|
+
unless AWS_SDK_ENABLED
|
25
25
|
raise RuntimeError.new("can't use #{description} without the aws-sdk or aws-sdk-dynamodb gem")
|
26
26
|
end
|
27
|
-
|
27
|
+
|
28
28
|
@table_name = table_name
|
29
29
|
@prefix = opts[:prefix] ? (opts[:prefix] + ":") : ""
|
30
30
|
@logger = opts[:logger] || Config.default_logger
|
31
|
-
|
31
|
+
|
32
32
|
if !opts[:existing_client].nil?
|
33
33
|
@client = opts[:existing_client]
|
34
34
|
else
|
35
35
|
@client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {})
|
36
36
|
end
|
37
|
-
|
37
|
+
|
38
38
|
@logger.info("#{description}: using DynamoDB table \"#{table_name}\"")
|
39
39
|
end
|
40
|
-
|
40
|
+
|
41
41
|
def stop
|
42
42
|
# AWS client doesn't seem to have a close method
|
43
43
|
end
|
@@ -46,7 +46,7 @@ module LaunchDarkly
|
|
46
46
|
"DynamoDB"
|
47
47
|
end
|
48
48
|
end
|
49
|
-
|
49
|
+
|
50
50
|
#
|
51
51
|
# Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
|
52
52
|
#
|
@@ -83,7 +83,7 @@ module LaunchDarkly
|
|
83
83
|
del_item = make_keys_hash(tuple[0], tuple[1])
|
84
84
|
requests.push({ delete_request: { key: del_item } })
|
85
85
|
end
|
86
|
-
|
86
|
+
|
87
87
|
# Now set the special key that we check in initialized_internal?
|
88
88
|
inited_item = make_keys_hash(inited_key, inited_key)
|
89
89
|
requests.push({ put_request: { item: inited_item } })
|
@@ -123,11 +123,11 @@ module LaunchDarkly
|
|
123
123
|
expression_attribute_names: {
|
124
124
|
"#namespace" => PARTITION_KEY,
|
125
125
|
"#key" => SORT_KEY,
|
126
|
-
"#version" => VERSION_ATTRIBUTE
|
126
|
+
"#version" => VERSION_ATTRIBUTE,
|
127
127
|
},
|
128
128
|
expression_attribute_values: {
|
129
|
-
":version" => new_item[:version]
|
130
|
-
}
|
129
|
+
":version" => new_item[:version],
|
130
|
+
},
|
131
131
|
})
|
132
132
|
new_item
|
133
133
|
rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException
|
@@ -159,7 +159,7 @@ module LaunchDarkly
|
|
159
159
|
def make_keys_hash(namespace, key)
|
160
160
|
{
|
161
161
|
PARTITION_KEY => namespace,
|
162
|
-
SORT_KEY => key
|
162
|
+
SORT_KEY => key,
|
163
163
|
}
|
164
164
|
end
|
165
165
|
|
@@ -170,16 +170,16 @@ module LaunchDarkly
|
|
170
170
|
key_conditions: {
|
171
171
|
PARTITION_KEY => {
|
172
172
|
comparison_operator: "EQ",
|
173
|
-
attribute_value_list: [ namespace_for_kind(kind) ]
|
174
|
-
}
|
175
|
-
}
|
173
|
+
attribute_value_list: [ namespace_for_kind(kind) ],
|
174
|
+
},
|
175
|
+
},
|
176
176
|
}
|
177
177
|
end
|
178
178
|
|
179
179
|
def get_item_by_keys(namespace, key)
|
180
180
|
@client.get_item({
|
181
181
|
table_name: @table_name,
|
182
|
-
key: make_keys_hash(namespace, key)
|
182
|
+
key: make_keys_hash(namespace, key),
|
183
183
|
})
|
184
184
|
end
|
185
185
|
|
@@ -190,8 +190,8 @@ module LaunchDarkly
|
|
190
190
|
projection_expression: "#namespace, #key",
|
191
191
|
expression_attribute_names: {
|
192
192
|
"#namespace" => PARTITION_KEY,
|
193
|
-
"#key" => SORT_KEY
|
194
|
-
}
|
193
|
+
"#key" => SORT_KEY,
|
194
|
+
},
|
195
195
|
})
|
196
196
|
while true
|
197
197
|
resp = @client.query(req)
|
@@ -210,7 +210,7 @@ module LaunchDarkly
|
|
210
210
|
def marshal_item(kind, item)
|
211
211
|
make_keys_hash(namespace_for_kind(kind), item[:key]).merge({
|
212
212
|
VERSION_ATTRIBUTE => item[:version],
|
213
|
-
ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item)
|
213
|
+
ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item),
|
214
214
|
})
|
215
215
|
end
|
216
216
|
|
@@ -223,11 +223,11 @@ module LaunchDarkly
|
|
223
223
|
end
|
224
224
|
|
225
225
|
class DynamoDBBigSegmentStore < DynamoDBStoreImplBase
|
226
|
-
KEY_METADATA = 'big_segments_metadata'
|
227
|
-
|
228
|
-
ATTR_SYNC_TIME = 'synchronizedOn'
|
229
|
-
ATTR_INCLUDED = 'included'
|
230
|
-
ATTR_EXCLUDED = 'excluded'
|
226
|
+
KEY_METADATA = 'big_segments_metadata'
|
227
|
+
KEY_CONTEXT_DATA = 'big_segments_user'
|
228
|
+
ATTR_SYNC_TIME = 'synchronizedOn'
|
229
|
+
ATTR_INCLUDED = 'included'
|
230
|
+
ATTR_EXCLUDED = 'excluded'
|
231
231
|
|
232
232
|
def initialize(table_name, opts)
|
233
233
|
super(table_name, opts)
|
@@ -243,7 +243,7 @@ module LaunchDarkly
|
|
243
243
|
table_name: @table_name,
|
244
244
|
key: {
|
245
245
|
PARTITION_KEY => key,
|
246
|
-
SORT_KEY => key
|
246
|
+
SORT_KEY => key,
|
247
247
|
}
|
248
248
|
)
|
249
249
|
timestamp = data.item && data.item[ATTR_SYNC_TIME] ?
|
@@ -251,14 +251,14 @@ module LaunchDarkly
|
|
251
251
|
LaunchDarkly::Interfaces::BigSegmentStoreMetadata.new(timestamp)
|
252
252
|
end
|
253
253
|
|
254
|
-
def get_membership(
|
254
|
+
def get_membership(context_hash)
|
255
255
|
data = @client.get_item(
|
256
256
|
table_name: @table_name,
|
257
257
|
key: {
|
258
|
-
PARTITION_KEY => @prefix +
|
259
|
-
SORT_KEY =>
|
258
|
+
PARTITION_KEY => @prefix + KEY_CONTEXT_DATA,
|
259
|
+
SORT_KEY => context_hash,
|
260
260
|
})
|
261
|
-
return nil
|
261
|
+
return nil unless data.item
|
262
262
|
excluded_refs = data.item[ATTR_EXCLUDED] || []
|
263
263
|
included_refs = data.item[ATTR_INCLUDED] || []
|
264
264
|
if excluded_refs.empty? && included_refs.empty?
|
@@ -48,7 +48,7 @@ module LaunchDarkly
|
|
48
48
|
|
49
49
|
def start
|
50
50
|
ready = Concurrent::Event.new
|
51
|
-
|
51
|
+
|
52
52
|
# We will return immediately regardless of whether the file load succeeded or failed -
|
53
53
|
# the difference can be detected by checking "initialized?"
|
54
54
|
ready.set
|
@@ -63,9 +63,9 @@ module LaunchDarkly
|
|
63
63
|
|
64
64
|
ready
|
65
65
|
end
|
66
|
-
|
66
|
+
|
67
67
|
def stop
|
68
|
-
@listener.stop
|
68
|
+
@listener.stop unless @listener.nil?
|
69
69
|
end
|
70
70
|
|
71
71
|
private
|
@@ -73,7 +73,7 @@ module LaunchDarkly
|
|
73
73
|
def load_all
|
74
74
|
all_data = {
|
75
75
|
FEATURES => {},
|
76
|
-
SEGMENTS => {}
|
76
|
+
SEGMENTS => {},
|
77
77
|
}
|
78
78
|
@paths.each do |path|
|
79
79
|
begin
|
@@ -121,12 +121,12 @@ module LaunchDarkly
|
|
121
121
|
|
122
122
|
def add_item(all_data, kind, item)
|
123
123
|
items = all_data[kind]
|
124
|
-
raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
|
124
|
+
raise ArgumentError, "Received unknown item kind #{kind[:namespace]} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash
|
125
125
|
key = item[:key].to_sym
|
126
|
-
|
126
|
+
unless items[key].nil?
|
127
127
|
raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once"
|
128
128
|
end
|
129
|
-
items[key] = item
|
129
|
+
items[key] = Model.deserialize(kind, item)
|
130
130
|
end
|
131
131
|
|
132
132
|
def make_flag_with_value(key, value)
|
@@ -134,7 +134,7 @@ module LaunchDarkly
|
|
134
134
|
key: key,
|
135
135
|
on: true,
|
136
136
|
fallthrough: { variation: 0 },
|
137
|
-
variations: [ value ]
|
137
|
+
variations: [ value ],
|
138
138
|
}
|
139
139
|
end
|
140
140
|
|
@@ -5,6 +5,87 @@ module LaunchDarkly
|
|
5
5
|
module Impl
|
6
6
|
module Integrations
|
7
7
|
module Redis
|
8
|
+
#
|
9
|
+
# An implementation of the LaunchDarkly client's feature store that uses a Redis
|
10
|
+
# instance. This object holds feature flags and related data received from the
|
11
|
+
# streaming API. Feature data can also be further cached in memory to reduce overhead
|
12
|
+
# of calls to Redis.
|
13
|
+
#
|
14
|
+
# To use this class, you must first have the `redis` and `connection-pool` gems
|
15
|
+
# installed. Then, create an instance and store it in the `feature_store` property
|
16
|
+
# of your client configuration.
|
17
|
+
#
|
18
|
+
class RedisFeatureStore
|
19
|
+
include LaunchDarkly::Interfaces::FeatureStore
|
20
|
+
|
21
|
+
# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating
|
22
|
+
# to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical
|
23
|
+
# reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate
|
24
|
+
# away from exposing these concrete classes and use factory methods instead.
|
25
|
+
|
26
|
+
#
|
27
|
+
# Constructor for a RedisFeatureStore instance.
|
28
|
+
#
|
29
|
+
# @param opts [Hash] the configuration options
|
30
|
+
# @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts)
|
31
|
+
# @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url)
|
32
|
+
# @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly
|
33
|
+
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
34
|
+
# @option opts [Integer] :max_connections size of the Redis connection pool
|
35
|
+
# @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching
|
36
|
+
# @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
|
37
|
+
# @option opts [Object] :pool custom connection pool, if desired
|
38
|
+
# @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool.
|
39
|
+
#
|
40
|
+
def initialize(opts = {})
|
41
|
+
core = RedisFeatureStoreCore.new(opts)
|
42
|
+
@wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
43
|
+
end
|
44
|
+
|
45
|
+
#
|
46
|
+
# Default value for the `redis_url` constructor parameter; points to an instance of Redis
|
47
|
+
# running at `localhost` with its default port.
|
48
|
+
#
|
49
|
+
def self.default_redis_url
|
50
|
+
LaunchDarkly::Integrations::Redis::default_redis_url
|
51
|
+
end
|
52
|
+
|
53
|
+
#
|
54
|
+
# Default value for the `prefix` constructor parameter.
|
55
|
+
#
|
56
|
+
def self.default_prefix
|
57
|
+
LaunchDarkly::Integrations::Redis::default_prefix
|
58
|
+
end
|
59
|
+
|
60
|
+
def get(kind, key)
|
61
|
+
@wrapper.get(kind, key)
|
62
|
+
end
|
63
|
+
|
64
|
+
def all(kind)
|
65
|
+
@wrapper.all(kind)
|
66
|
+
end
|
67
|
+
|
68
|
+
def delete(kind, key, version)
|
69
|
+
@wrapper.delete(kind, key, version)
|
70
|
+
end
|
71
|
+
|
72
|
+
def init(all_data)
|
73
|
+
@wrapper.init(all_data)
|
74
|
+
end
|
75
|
+
|
76
|
+
def upsert(kind, item)
|
77
|
+
@wrapper.upsert(kind, item)
|
78
|
+
end
|
79
|
+
|
80
|
+
def initialized?
|
81
|
+
@wrapper.initialized?
|
82
|
+
end
|
83
|
+
|
84
|
+
def stop
|
85
|
+
@wrapper.stop
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
8
89
|
class RedisStoreImplBase
|
9
90
|
begin
|
10
91
|
require "redis"
|
@@ -15,7 +96,7 @@ module LaunchDarkly
|
|
15
96
|
end
|
16
97
|
|
17
98
|
def initialize(opts)
|
18
|
-
|
99
|
+
unless REDIS_ENABLED
|
19
100
|
raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool")
|
20
101
|
end
|
21
102
|
|
@@ -28,7 +109,7 @@ module LaunchDarkly
|
|
28
109
|
@logger = opts[:logger] || Config.default_logger
|
29
110
|
@test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
|
30
111
|
|
31
|
-
@stopped = Concurrent::AtomicBoolean.new(
|
112
|
+
@stopped = Concurrent::AtomicBoolean.new()
|
32
113
|
|
33
114
|
with_connection do |redis|
|
34
115
|
@logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}")
|
@@ -55,13 +136,11 @@ module LaunchDarkly
|
|
55
136
|
if opts[:redis_url]
|
56
137
|
redis_opts[:url] = opts[:redis_url]
|
57
138
|
end
|
58
|
-
|
139
|
+
unless redis_opts.include?(:url)
|
59
140
|
redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
|
60
141
|
end
|
61
142
|
max_connections = opts[:max_connections] || 16
|
62
|
-
|
63
|
-
::Redis.new(redis_opts)
|
64
|
-
end
|
143
|
+
opts[:pool] || ConnectionPool.new(size: max_connections) { ::Redis.new(redis_opts) }
|
65
144
|
end
|
66
145
|
end
|
67
146
|
|
@@ -135,6 +214,7 @@ module LaunchDarkly
|
|
135
214
|
else
|
136
215
|
final_item = old_item
|
137
216
|
action = new_item[:deleted] ? "delete" : "update"
|
217
|
+
# rubocop:disable Layout/LineLength
|
138
218
|
@logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
|
139
219
|
end
|
140
220
|
redis.unwatch
|
@@ -151,7 +231,7 @@ module LaunchDarkly
|
|
151
231
|
private
|
152
232
|
|
153
233
|
def before_update_transaction(base_key, key)
|
154
|
-
@test_hook.before_update_transaction(base_key, key)
|
234
|
+
@test_hook.before_update_transaction(base_key, key) unless @test_hook.nil?
|
155
235
|
end
|
156
236
|
|
157
237
|
def items_key(kind)
|
@@ -176,8 +256,8 @@ module LaunchDarkly
|
|
176
256
|
#
|
177
257
|
class RedisBigSegmentStore < RedisStoreImplBase
|
178
258
|
KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on'
|
179
|
-
|
180
|
-
|
259
|
+
KEY_CONTEXT_INCLUDE = ':big_segment_include:'
|
260
|
+
KEY_CONTEXT_EXCLUDE = ':big_segment_exclude:'
|
181
261
|
|
182
262
|
def description
|
183
263
|
"RedisBigSegmentStore"
|
@@ -188,10 +268,10 @@ module LaunchDarkly
|
|
188
268
|
Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i)
|
189
269
|
end
|
190
270
|
|
191
|
-
def get_membership(
|
271
|
+
def get_membership(context_hash)
|
192
272
|
with_connection do |redis|
|
193
|
-
included_refs = redis.smembers(@prefix +
|
194
|
-
excluded_refs = redis.smembers(@prefix +
|
273
|
+
included_refs = redis.smembers(@prefix + KEY_CONTEXT_INCLUDE + context_hash)
|
274
|
+
excluded_refs = redis.smembers(@prefix + KEY_CONTEXT_EXCLUDE + context_hash)
|
195
275
|
if !included_refs && !excluded_refs
|
196
276
|
nil
|
197
277
|
else
|
@@ -0,0 +1,39 @@
|
|
1
|
+
|
2
|
+
# See serialization.rb for implementation notes on the data model classes.
|
3
|
+
|
4
|
+
module LaunchDarkly
|
5
|
+
module Impl
|
6
|
+
module Model
|
7
|
+
class Clause
|
8
|
+
def initialize(data, logger)
|
9
|
+
@data = data
|
10
|
+
@context_kind = data[:contextKind]
|
11
|
+
@attribute = (@context_kind.nil? || @context_kind.empty?) ? Reference.create_literal(data[:attribute]) : Reference.create(data[:attribute])
|
12
|
+
unless logger.nil? || @attribute.error.nil?
|
13
|
+
logger.error("[LDClient] Data inconsistency in feature flag: #{@attribute.error}")
|
14
|
+
end
|
15
|
+
@op = data[:op].to_sym
|
16
|
+
@values = data[:values] || []
|
17
|
+
@negate = !!data[:negate]
|
18
|
+
end
|
19
|
+
|
20
|
+
# @return [Hash]
|
21
|
+
attr_reader :data
|
22
|
+
# @return [String|nil]
|
23
|
+
attr_reader :context_kind
|
24
|
+
# @return [LaunchDarkly::Reference]
|
25
|
+
attr_reader :attribute
|
26
|
+
# @return [Symbol]
|
27
|
+
attr_reader :op
|
28
|
+
# @return [Array]
|
29
|
+
attr_reader :values
|
30
|
+
# @return [Boolean]
|
31
|
+
attr_reader :negate
|
32
|
+
|
33
|
+
def as_json
|
34
|
+
@data
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,213 @@
|
|
1
|
+
require "ldclient-rb/impl/evaluator_helpers"
|
2
|
+
require "ldclient-rb/impl/model/clause"
|
3
|
+
require "set"
|
4
|
+
|
5
|
+
# See serialization.rb for implementation notes on the data model classes.
|
6
|
+
|
7
|
+
module LaunchDarkly
|
8
|
+
module Impl
|
9
|
+
module Model
|
10
|
+
class FeatureFlag
|
11
|
+
# @param data [Hash]
|
12
|
+
# @param logger [Logger|nil]
|
13
|
+
def initialize(data, logger = nil)
|
14
|
+
raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash)
|
15
|
+
@data = data
|
16
|
+
@key = data[:key]
|
17
|
+
@version = data[:version]
|
18
|
+
@deleted = !!data[:deleted]
|
19
|
+
return if @deleted
|
20
|
+
@variations = data[:variations] || []
|
21
|
+
@on = !!data[:on]
|
22
|
+
fallthrough = data[:fallthrough] || {}
|
23
|
+
@fallthrough = VariationOrRollout.new(fallthrough[:variation], fallthrough[:rollout])
|
24
|
+
@off_variation = data[:offVariation]
|
25
|
+
@prerequisites = (data[:prerequisites] || []).map do |prereq_data|
|
26
|
+
Prerequisite.new(prereq_data, self, logger)
|
27
|
+
end
|
28
|
+
@targets = (data[:targets] || []).map do |target_data|
|
29
|
+
Target.new(target_data, self, logger)
|
30
|
+
end
|
31
|
+
@context_targets = (data[:contextTargets] || []).map do |target_data|
|
32
|
+
Target.new(target_data, self, logger)
|
33
|
+
end
|
34
|
+
@rules = (data[:rules] || []).map.with_index do |rule_data, index|
|
35
|
+
FlagRule.new(rule_data, index, self, logger)
|
36
|
+
end
|
37
|
+
@salt = data[:salt]
|
38
|
+
@off_result = EvaluatorHelpers.evaluation_detail_for_off_variation(self, EvaluationReason::off, logger)
|
39
|
+
@fallthrough_results = Preprocessor.precompute_multi_variation_results(self,
|
40
|
+
EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true))
|
41
|
+
end
|
42
|
+
|
43
|
+
# @return [Hash]
|
44
|
+
attr_reader :data
|
45
|
+
# @return [String]
|
46
|
+
attr_reader :key
|
47
|
+
# @return [Integer]
|
48
|
+
attr_reader :version
|
49
|
+
# @return [Boolean]
|
50
|
+
attr_reader :deleted
|
51
|
+
# @return [Array]
|
52
|
+
attr_reader :variations
|
53
|
+
# @return [Boolean]
|
54
|
+
attr_reader :on
|
55
|
+
# @return [Integer|nil]
|
56
|
+
attr_reader :off_variation
|
57
|
+
# @return [LaunchDarkly::Impl::Model::VariationOrRollout]
|
58
|
+
attr_reader :fallthrough
|
59
|
+
# @return [LaunchDarkly::EvaluationDetail]
|
60
|
+
attr_reader :off_result
|
61
|
+
# @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations]
|
62
|
+
attr_reader :fallthrough_results
|
63
|
+
# @return [Array<LaunchDarkly::Impl::Model::Prerequisite>]
|
64
|
+
attr_reader :prerequisites
|
65
|
+
# @return [Array<LaunchDarkly::Impl::Model::Target>]
|
66
|
+
attr_reader :targets
|
67
|
+
# @return [Array<LaunchDarkly::Impl::Model::Target>]
|
68
|
+
attr_reader :context_targets
|
69
|
+
# @return [Array<LaunchDarkly::Impl::Model::FlagRule>]
|
70
|
+
attr_reader :rules
|
71
|
+
# @return [String]
|
72
|
+
attr_reader :salt
|
73
|
+
|
74
|
+
# This method allows us to read properties of the object as if it's just a hash. Currently this is
|
75
|
+
# necessary because some data store logic is still written to expect hashes; we can remove it once
|
76
|
+
# we migrate entirely to using attributes of the class.
|
77
|
+
def [](key)
|
78
|
+
@data[key]
|
79
|
+
end
|
80
|
+
|
81
|
+
def ==(other)
|
82
|
+
other.is_a?(FeatureFlag) && other.data == self.data
|
83
|
+
end
|
84
|
+
|
85
|
+
def as_json(*) # parameter is unused, but may be passed if we're using the json gem
|
86
|
+
@data
|
87
|
+
end
|
88
|
+
|
89
|
+
# Same as as_json, but converts the JSON structure into a string.
|
90
|
+
def to_json(*a)
|
91
|
+
as_json.to_json(a)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
class Prerequisite
|
96
|
+
def initialize(data, flag, logger)
|
97
|
+
@data = data
|
98
|
+
@key = data[:key]
|
99
|
+
@variation = data[:variation]
|
100
|
+
@failure_result = EvaluatorHelpers.evaluation_detail_for_off_variation(flag,
|
101
|
+
EvaluationReason::prerequisite_failed(@key), logger)
|
102
|
+
end
|
103
|
+
|
104
|
+
# @return [Hash]
|
105
|
+
attr_reader :data
|
106
|
+
# @return [String]
|
107
|
+
attr_reader :key
|
108
|
+
# @return [Integer]
|
109
|
+
attr_reader :variation
|
110
|
+
# @return [LaunchDarkly::EvaluationDetail]
|
111
|
+
attr_reader :failure_result
|
112
|
+
end
|
113
|
+
|
114
|
+
class Target
|
115
|
+
def initialize(data, flag, logger)
|
116
|
+
@kind = data[:contextKind] || LDContext::KIND_DEFAULT
|
117
|
+
@data = data
|
118
|
+
@values = Set.new(data[:values] || [])
|
119
|
+
@variation = data[:variation]
|
120
|
+
@match_result = EvaluatorHelpers.evaluation_detail_for_variation(flag,
|
121
|
+
data[:variation], EvaluationReason::target_match, logger)
|
122
|
+
end
|
123
|
+
|
124
|
+
# @return [String]
|
125
|
+
attr_reader :kind
|
126
|
+
# @return [Hash]
|
127
|
+
attr_reader :data
|
128
|
+
# @return [Set]
|
129
|
+
attr_reader :values
|
130
|
+
# @return [Integer]
|
131
|
+
attr_reader :variation
|
132
|
+
# @return [LaunchDarkly::EvaluationDetail]
|
133
|
+
attr_reader :match_result
|
134
|
+
end
|
135
|
+
|
136
|
+
class FlagRule
|
137
|
+
def initialize(data, rule_index, flag, logger)
|
138
|
+
@data = data
|
139
|
+
@clauses = (data[:clauses] || []).map do |clause_data|
|
140
|
+
Clause.new(clause_data, logger)
|
141
|
+
end
|
142
|
+
@variation_or_rollout = VariationOrRollout.new(data[:variation], data[:rollout])
|
143
|
+
rule_id = data[:id]
|
144
|
+
match_reason = EvaluationReason::rule_match(rule_index, rule_id)
|
145
|
+
match_reason_in_experiment = EvaluationReason::rule_match(rule_index, rule_id, true)
|
146
|
+
@match_results = Preprocessor.precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment)
|
147
|
+
end
|
148
|
+
|
149
|
+
# @return [Hash]
|
150
|
+
attr_reader :data
|
151
|
+
# @return [Array<LaunchDarkly::Impl::Model::Clause>]
|
152
|
+
attr_reader :clauses
|
153
|
+
# @return [LaunchDarkly::Impl::Model::EvalResultFactoryMultiVariations]
|
154
|
+
attr_reader :match_results
|
155
|
+
# @return [LaunchDarkly::Impl::Model::VariationOrRollout]
|
156
|
+
attr_reader :variation_or_rollout
|
157
|
+
end
|
158
|
+
|
159
|
+
class VariationOrRollout
|
160
|
+
def initialize(variation, rollout_data)
|
161
|
+
@variation = variation
|
162
|
+
@rollout = rollout_data.nil? ? nil : Rollout.new(rollout_data)
|
163
|
+
end
|
164
|
+
|
165
|
+
# @return [Integer|nil]
|
166
|
+
attr_reader :variation
|
167
|
+
# @return [Rollout|nil] currently we do not have a model class for the rollout
|
168
|
+
attr_reader :rollout
|
169
|
+
end
|
170
|
+
|
171
|
+
class Rollout
|
172
|
+
def initialize(data)
|
173
|
+
@context_kind = data[:contextKind]
|
174
|
+
@variations = (data[:variations] || []).map { |v| WeightedVariation.new(v) }
|
175
|
+
@bucket_by = data[:bucketBy]
|
176
|
+
@kind = data[:kind]
|
177
|
+
@is_experiment = @kind == "experiment"
|
178
|
+
@seed = data[:seed]
|
179
|
+
end
|
180
|
+
|
181
|
+
# @return [String|nil]
|
182
|
+
attr_reader :context_kind
|
183
|
+
# @return [Array<WeightedVariation>]
|
184
|
+
attr_reader :variations
|
185
|
+
# @return [String|nil]
|
186
|
+
attr_reader :bucket_by
|
187
|
+
# @return [String|nil]
|
188
|
+
attr_reader :kind
|
189
|
+
# @return [Boolean]
|
190
|
+
attr_reader :is_experiment
|
191
|
+
# @return [Integer|nil]
|
192
|
+
attr_reader :seed
|
193
|
+
end
|
194
|
+
|
195
|
+
class WeightedVariation
|
196
|
+
def initialize(data)
|
197
|
+
@variation = data[:variation]
|
198
|
+
@weight = data[:weight]
|
199
|
+
@untracked = !!data[:untracked]
|
200
|
+
end
|
201
|
+
|
202
|
+
# @return [Integer]
|
203
|
+
attr_reader :variation
|
204
|
+
# @return [Integer]
|
205
|
+
attr_reader :weight
|
206
|
+
# @return [Boolean]
|
207
|
+
attr_reader :untracked
|
208
|
+
end
|
209
|
+
|
210
|
+
# Clause is defined in its own file because clauses are used by both flags and segments
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|