launchdarkly-server-sdk 8.8.3-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.txt +13 -0
- data/README.md +61 -0
- data/lib/launchdarkly-server-sdk.rb +1 -0
- data/lib/ldclient-rb/cache_store.rb +45 -0
- data/lib/ldclient-rb/config.rb +658 -0
- data/lib/ldclient-rb/context.rb +565 -0
- data/lib/ldclient-rb/evaluation_detail.rb +387 -0
- data/lib/ldclient-rb/events.rb +642 -0
- data/lib/ldclient-rb/expiring_cache.rb +77 -0
- data/lib/ldclient-rb/flags_state.rb +88 -0
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/broadcaster.rb +78 -0
- data/lib/ldclient-rb/impl/context.rb +96 -0
- data/lib/ldclient-rb/impl/context_filter.rb +166 -0
- data/lib/ldclient-rb/impl/data_source.rb +188 -0
- data/lib/ldclient-rb/impl/data_store.rb +109 -0
- data/lib/ldclient-rb/impl/dependency_tracker.rb +102 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +129 -0
- data/lib/ldclient-rb/impl/evaluation_with_hook_result.rb +34 -0
- data/lib/ldclient-rb/impl/evaluator.rb +539 -0
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +86 -0
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +50 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +131 -0
- data/lib/ldclient-rb/impl/event_sender.rb +100 -0
- data/lib/ldclient-rb/impl/event_summarizer.rb +68 -0
- data/lib/ldclient-rb/impl/event_types.rb +136 -0
- data/lib/ldclient-rb/impl/flag_tracker.rb +58 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +170 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +300 -0
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +229 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +306 -0
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/migrations/migrator.rb +287 -0
- data/lib/ldclient-rb/impl/migrations/tracker.rb +136 -0
- data/lib/ldclient-rb/impl/model/clause.rb +45 -0
- data/lib/ldclient-rb/impl/model/feature_flag.rb +254 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +64 -0
- data/lib/ldclient-rb/impl/model/segment.rb +132 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +72 -0
- data/lib/ldclient-rb/impl/repeating_task.rb +46 -0
- data/lib/ldclient-rb/impl/sampler.rb +25 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +141 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/impl/unbounded_pool.rb +34 -0
- data/lib/ldclient-rb/impl/util.rb +95 -0
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/in_memory_store.rb +100 -0
- data/lib/ldclient-rb/integrations/consul.rb +45 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +92 -0
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +98 -0
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +663 -0
- data/lib/ldclient-rb/integrations/test_data.rb +213 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +246 -0
- data/lib/ldclient-rb/integrations.rb +6 -0
- data/lib/ldclient-rb/interfaces.rb +974 -0
- data/lib/ldclient-rb/ldclient.rb +822 -0
- data/lib/ldclient-rb/memoized_value.rb +32 -0
- data/lib/ldclient-rb/migrations.rb +230 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
- data/lib/ldclient-rb/polling.rb +102 -0
- data/lib/ldclient-rb/reference.rb +295 -0
- data/lib/ldclient-rb/requestor.rb +102 -0
- data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
- data/lib/ldclient-rb/stream.rb +196 -0
- data/lib/ldclient-rb/util.rb +132 -0
- data/lib/ldclient-rb/version.rb +3 -0
- data/lib/ldclient-rb.rb +27 -0
- metadata +400 -0
@@ -0,0 +1,306 @@
|
|
1
|
+
require "ldclient-rb/interfaces"
|
2
|
+
require "concurrent/atomics"
|
3
|
+
require "json"
|
4
|
+
|
5
|
+
module LaunchDarkly
|
6
|
+
module Impl
|
7
|
+
module Integrations
|
8
|
+
module Redis
|
9
|
+
#
|
10
|
+
# An implementation of the LaunchDarkly client's feature store that uses a Redis
|
11
|
+
# instance. This object holds feature flags and related data received from the
|
12
|
+
# streaming API. Feature data can also be further cached in memory to reduce overhead
|
13
|
+
# of calls to Redis.
|
14
|
+
#
|
15
|
+
# To use this class, you must first have the `redis` and `connection-pool` gems
|
16
|
+
# installed. Then, create an instance and store it in the `feature_store` property
|
17
|
+
# of your client configuration.
|
18
|
+
#
|
19
|
+
class RedisFeatureStore
|
20
|
+
include LaunchDarkly::Interfaces::FeatureStore
|
21
|
+
|
22
|
+
# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating
|
23
|
+
# to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical
|
24
|
+
# reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate
|
25
|
+
# away from exposing these concrete classes and use factory methods instead.
|
26
|
+
|
27
|
+
#
|
28
|
+
# Constructor for a RedisFeatureStore instance.
|
29
|
+
#
|
30
|
+
# @param opts [Hash] the configuration options
|
31
|
+
# @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts)
|
32
|
+
# @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just redis_url)
|
33
|
+
# @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly
|
34
|
+
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
35
|
+
# @option opts [Integer] :max_connections size of the Redis connection pool
|
36
|
+
# @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching
|
37
|
+
# @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
|
38
|
+
# @option opts [Object] :pool custom connection pool, if desired
|
39
|
+
# @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool.
|
40
|
+
#
|
41
|
+
def initialize(opts = {})
|
42
|
+
core = RedisFeatureStoreCore.new(opts)
|
43
|
+
@wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
44
|
+
end
|
45
|
+
|
46
|
+
def monitoring_enabled?
|
47
|
+
true
|
48
|
+
end
|
49
|
+
|
50
|
+
def available?
|
51
|
+
@wrapper.available?
|
52
|
+
end
|
53
|
+
|
54
|
+
#
|
55
|
+
# Default value for the `redis_url` constructor parameter; points to an instance of Redis
|
56
|
+
# running at `localhost` with its default port.
|
57
|
+
#
|
58
|
+
def self.default_redis_url
|
59
|
+
LaunchDarkly::Integrations::Redis::default_redis_url
|
60
|
+
end
|
61
|
+
|
62
|
+
#
|
63
|
+
# Default value for the `prefix` constructor parameter.
|
64
|
+
#
|
65
|
+
def self.default_prefix
|
66
|
+
LaunchDarkly::Integrations::Redis::default_prefix
|
67
|
+
end
|
68
|
+
|
69
|
+
def get(kind, key)
|
70
|
+
@wrapper.get(kind, key)
|
71
|
+
end
|
72
|
+
|
73
|
+
def all(kind)
|
74
|
+
@wrapper.all(kind)
|
75
|
+
end
|
76
|
+
|
77
|
+
def delete(kind, key, version)
|
78
|
+
@wrapper.delete(kind, key, version)
|
79
|
+
end
|
80
|
+
|
81
|
+
def init(all_data)
|
82
|
+
@wrapper.init(all_data)
|
83
|
+
end
|
84
|
+
|
85
|
+
def upsert(kind, item)
|
86
|
+
@wrapper.upsert(kind, item)
|
87
|
+
end
|
88
|
+
|
89
|
+
def initialized?
|
90
|
+
@wrapper.initialized?
|
91
|
+
end
|
92
|
+
|
93
|
+
def stop
|
94
|
+
@wrapper.stop
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
class RedisStoreImplBase
|
99
|
+
begin
|
100
|
+
require "redis"
|
101
|
+
require "connection_pool"
|
102
|
+
REDIS_ENABLED = true
|
103
|
+
rescue ScriptError, StandardError
|
104
|
+
REDIS_ENABLED = false
|
105
|
+
end
|
106
|
+
|
107
|
+
def initialize(opts)
|
108
|
+
unless REDIS_ENABLED
|
109
|
+
raise RuntimeError.new("can't use #{description} because one of these gems is missing: redis, connection_pool")
|
110
|
+
end
|
111
|
+
|
112
|
+
@pool = create_redis_pool(opts)
|
113
|
+
|
114
|
+
# shutdown pool on close unless the client passed a custom pool and specified not to shutdown
|
115
|
+
@pool_shutdown_on_close = !opts[:pool] || opts.fetch(:pool_shutdown_on_close, true)
|
116
|
+
|
117
|
+
@prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix
|
118
|
+
@logger = opts[:logger] || Config.default_logger
|
119
|
+
@test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
|
120
|
+
|
121
|
+
@stopped = Concurrent::AtomicBoolean.new
|
122
|
+
|
123
|
+
with_connection do |redis|
|
124
|
+
@logger.info("#{description}: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} and prefix: #{@prefix}")
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def stop
|
129
|
+
if @stopped.make_true
|
130
|
+
return unless @pool_shutdown_on_close
|
131
|
+
@pool.shutdown { |redis| redis.close }
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
protected def description
|
136
|
+
"Redis"
|
137
|
+
end
|
138
|
+
|
139
|
+
protected def with_connection
|
140
|
+
@pool.with { |redis| yield(redis) }
|
141
|
+
end
|
142
|
+
|
143
|
+
private def create_redis_pool(opts)
|
144
|
+
redis_opts = opts[:redis_opts] ? opts[:redis_opts].clone : Hash.new
|
145
|
+
if opts[:redis_url]
|
146
|
+
redis_opts[:url] = opts[:redis_url]
|
147
|
+
end
|
148
|
+
unless redis_opts.include?(:url)
|
149
|
+
redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
|
150
|
+
end
|
151
|
+
max_connections = opts[:max_connections] || 16
|
152
|
+
opts[:pool] || ConnectionPool.new(size: max_connections) { ::Redis.new(redis_opts) }
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
#
|
157
|
+
# Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
|
158
|
+
#
|
159
|
+
class RedisFeatureStoreCore < RedisStoreImplBase
|
160
|
+
def initialize(opts)
|
161
|
+
super(opts)
|
162
|
+
|
163
|
+
@test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
|
164
|
+
end
|
165
|
+
|
166
|
+
def available?
|
167
|
+
# We don't care what the status is, only that we can connect
|
168
|
+
initialized_internal?
|
169
|
+
true
|
170
|
+
rescue
|
171
|
+
false
|
172
|
+
end
|
173
|
+
|
174
|
+
def description
|
175
|
+
"RedisFeatureStore"
|
176
|
+
end
|
177
|
+
|
178
|
+
def init_internal(all_data)
|
179
|
+
count = 0
|
180
|
+
with_connection do |redis|
|
181
|
+
redis.multi do |multi|
|
182
|
+
all_data.each do |kind, items|
|
183
|
+
multi.del(items_key(kind))
|
184
|
+
count = count + items.count
|
185
|
+
items.each do |key, item|
|
186
|
+
multi.hset(items_key(kind), key, Model.serialize(kind,item))
|
187
|
+
end
|
188
|
+
end
|
189
|
+
multi.set(inited_key, inited_key)
|
190
|
+
end
|
191
|
+
end
|
192
|
+
@logger.info { "RedisFeatureStore: initialized with #{count} items" }
|
193
|
+
end
|
194
|
+
|
195
|
+
def get_internal(kind, key)
|
196
|
+
with_connection do |redis|
|
197
|
+
get_redis(redis, kind, key)
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
def get_all_internal(kind)
|
202
|
+
fs = {}
|
203
|
+
with_connection do |redis|
|
204
|
+
hashfs = redis.hgetall(items_key(kind))
|
205
|
+
hashfs.each do |k, json_item|
|
206
|
+
fs[k.to_sym] = Model.deserialize(kind, json_item)
|
207
|
+
end
|
208
|
+
end
|
209
|
+
fs
|
210
|
+
end
|
211
|
+
|
212
|
+
def upsert_internal(kind, new_item)
|
213
|
+
base_key = items_key(kind)
|
214
|
+
key = new_item[:key]
|
215
|
+
try_again = true
|
216
|
+
final_item = new_item
|
217
|
+
while try_again
|
218
|
+
try_again = false
|
219
|
+
with_connection do |redis|
|
220
|
+
redis.watch(base_key) do
|
221
|
+
old_item = get_redis(redis, kind, key)
|
222
|
+
before_update_transaction(base_key, key)
|
223
|
+
if old_item.nil? || old_item[:version] < new_item[:version]
|
224
|
+
result = redis.multi do |multi|
|
225
|
+
multi.hset(base_key, key, Model.serialize(kind, new_item))
|
226
|
+
end
|
227
|
+
if result.nil?
|
228
|
+
@logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" }
|
229
|
+
try_again = true
|
230
|
+
end
|
231
|
+
else
|
232
|
+
final_item = old_item
|
233
|
+
action = new_item[:deleted] ? "delete" : "update"
|
234
|
+
# rubocop:disable Layout/LineLength
|
235
|
+
@logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
|
236
|
+
end
|
237
|
+
redis.unwatch
|
238
|
+
end
|
239
|
+
end
|
240
|
+
end
|
241
|
+
final_item
|
242
|
+
end
|
243
|
+
|
244
|
+
def initialized_internal?
|
245
|
+
with_connection { |redis| redis.exists?(inited_key) }
|
246
|
+
end
|
247
|
+
|
248
|
+
private
|
249
|
+
|
250
|
+
def before_update_transaction(base_key, key)
|
251
|
+
@test_hook.before_update_transaction(base_key, key) unless @test_hook.nil?
|
252
|
+
end
|
253
|
+
|
254
|
+
def items_key(kind)
|
255
|
+
@prefix + ":" + kind[:namespace]
|
256
|
+
end
|
257
|
+
|
258
|
+
def cache_key(kind, key)
|
259
|
+
kind[:namespace] + ":" + key.to_s
|
260
|
+
end
|
261
|
+
|
262
|
+
def inited_key
|
263
|
+
@prefix + ":$inited"
|
264
|
+
end
|
265
|
+
|
266
|
+
def get_redis(redis, kind, key)
|
267
|
+
Model.deserialize(kind, redis.hget(items_key(kind), key))
|
268
|
+
end
|
269
|
+
end
|
270
|
+
|
271
|
+
#
|
272
|
+
# Internal implementation of the Redis big segment store.
|
273
|
+
#
|
274
|
+
class RedisBigSegmentStore < RedisStoreImplBase
|
275
|
+
KEY_LAST_UP_TO_DATE = ':big_segments_synchronized_on'
|
276
|
+
KEY_CONTEXT_INCLUDE = ':big_segment_include:'
|
277
|
+
KEY_CONTEXT_EXCLUDE = ':big_segment_exclude:'
|
278
|
+
|
279
|
+
def description
|
280
|
+
"RedisBigSegmentStore"
|
281
|
+
end
|
282
|
+
|
283
|
+
def get_metadata
|
284
|
+
value = with_connection { |redis| redis.get(@prefix + KEY_LAST_UP_TO_DATE) }
|
285
|
+
Interfaces::BigSegmentStoreMetadata.new(value.nil? ? nil : value.to_i)
|
286
|
+
end
|
287
|
+
|
288
|
+
def get_membership(context_hash)
|
289
|
+
with_connection do |redis|
|
290
|
+
included_refs = redis.smembers(@prefix + KEY_CONTEXT_INCLUDE + context_hash)
|
291
|
+
excluded_refs = redis.smembers(@prefix + KEY_CONTEXT_EXCLUDE + context_hash)
|
292
|
+
if !included_refs && !excluded_refs
|
293
|
+
nil
|
294
|
+
else
|
295
|
+
membership = {}
|
296
|
+
excluded_refs.each { |ref| membership[ref] = false }
|
297
|
+
included_refs.each { |ref| membership[ref] = true }
|
298
|
+
membership
|
299
|
+
end
|
300
|
+
end
|
301
|
+
end
|
302
|
+
end
|
303
|
+
end
|
304
|
+
end
|
305
|
+
end
|
306
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
require 'concurrent/atomics'
|
2
|
+
require 'ldclient-rb/interfaces'
|
3
|
+
|
4
|
+
module LaunchDarkly
|
5
|
+
module Impl
|
6
|
+
module Integrations
|
7
|
+
module TestData
|
8
|
+
# @private
|
9
|
+
class TestDataSource
|
10
|
+
include LaunchDarkly::Interfaces::DataSource
|
11
|
+
|
12
|
+
def initialize(feature_store, test_data)
|
13
|
+
@feature_store = feature_store
|
14
|
+
@test_data = test_data
|
15
|
+
end
|
16
|
+
|
17
|
+
def initialized?
|
18
|
+
true
|
19
|
+
end
|
20
|
+
|
21
|
+
def start
|
22
|
+
ready = Concurrent::Event.new
|
23
|
+
ready.set
|
24
|
+
init_data = @test_data.make_init_data
|
25
|
+
@feature_store.init(init_data)
|
26
|
+
ready
|
27
|
+
end
|
28
|
+
|
29
|
+
def stop
|
30
|
+
@test_data.closed_instance(self)
|
31
|
+
end
|
32
|
+
|
33
|
+
def upsert(kind, item)
|
34
|
+
@feature_store.upsert(kind, item)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,287 @@
|
|
1
|
+
require 'thread'
|
2
|
+
|
3
|
+
module LaunchDarkly
|
4
|
+
module Impl
|
5
|
+
module Migrations
|
6
|
+
|
7
|
+
#
|
8
|
+
# A migration config stores references to callable methods which execute customer defined read or write
|
9
|
+
# operations on old or new origins of information. For read operations, an optional comparison function also be
|
10
|
+
# defined.
|
11
|
+
#
|
12
|
+
class MigrationConfig
|
13
|
+
#
|
14
|
+
# @param old [#call] Refer to {#old}
|
15
|
+
# @param new [#call] Refer to {#new}
|
16
|
+
# @param comparison [#call, nil] Refer to {#comparison}
|
17
|
+
#
|
18
|
+
def initialize(old, new, comparison)
|
19
|
+
@old = old
|
20
|
+
@new = new
|
21
|
+
@comparison = comparison
|
22
|
+
end
|
23
|
+
|
24
|
+
#
|
25
|
+
# Callable which receives a nullable payload parameter and returns an {LaunchDarkly::Result}.
|
26
|
+
#
|
27
|
+
# This function call should affect the old migration origin when called.
|
28
|
+
#
|
29
|
+
# @return [#call]
|
30
|
+
#
|
31
|
+
attr_reader :old
|
32
|
+
|
33
|
+
#
|
34
|
+
# Callable which receives a nullable payload parameter and returns an {LaunchDarkly::Result}.
|
35
|
+
#
|
36
|
+
# This function call should affect the new migration origin when called.
|
37
|
+
#
|
38
|
+
# @return [#call]
|
39
|
+
#
|
40
|
+
attr_reader :new
|
41
|
+
|
42
|
+
#
|
43
|
+
# Optional callable which receives two objects of any kind and returns a boolean representing equality.
|
44
|
+
#
|
45
|
+
# The result of this comparison can be sent upstream to LaunchDarkly to enhance migration observability.
|
46
|
+
#
|
47
|
+
# @return [#call, nil]
|
48
|
+
#
|
49
|
+
attr_reader :comparison
|
50
|
+
end
|
51
|
+
|
52
|
+
#
|
53
|
+
# An implementation of the [LaunchDarkly::Interfaces::Migrations::Migrator] interface, capable of supporting
|
54
|
+
# feature-flag backed technology migrations.
|
55
|
+
#
|
56
|
+
class Migrator
|
57
|
+
include LaunchDarkly::Interfaces::Migrations::Migrator
|
58
|
+
|
59
|
+
#
|
60
|
+
# @param client [LaunchDarkly::LDClient]
|
61
|
+
# @param read_execution_order [Symbol]
|
62
|
+
# @param read_config [MigrationConfig]
|
63
|
+
# @param write_config [MigrationConfig]
|
64
|
+
# @param measure_latency [Boolean]
|
65
|
+
# @param measure_errors [Boolean]
|
66
|
+
#
|
67
|
+
def initialize(client, read_execution_order, read_config, write_config, measure_latency, measure_errors)
|
68
|
+
@client = client
|
69
|
+
@read_execution_order = read_execution_order
|
70
|
+
@read_config = read_config
|
71
|
+
@write_config = write_config
|
72
|
+
@measure_latency = measure_latency
|
73
|
+
@measure_errors = measure_errors
|
74
|
+
@sampler = LaunchDarkly::Impl::Sampler.new(Random.new)
|
75
|
+
end
|
76
|
+
|
77
|
+
#
|
78
|
+
# Perform the configured read operations against the appropriate old and/or new origins.
|
79
|
+
#
|
80
|
+
# @param key [String] The migration-based flag key to use for determining migration stages
|
81
|
+
# @param context [LaunchDarkly::LDContext] The context to use for evaluating the migration flag
|
82
|
+
# @param default_stage [Symbol] The stage to fallback to if one could not be determined for the requested flag
|
83
|
+
# @param payload [String] An optional payload to pass through to the configured read operations.
|
84
|
+
#
|
85
|
+
# @return [LaunchDarkly::Migrations::OperationResult]
|
86
|
+
#
|
87
|
+
def read(key, context, default_stage, payload = nil)
|
88
|
+
stage, tracker = @client.migration_variation(key, context, default_stage)
|
89
|
+
tracker.operation(LaunchDarkly::Migrations::OP_READ)
|
90
|
+
|
91
|
+
old = Executor.new(@client.logger, LaunchDarkly::Migrations::ORIGIN_OLD, @read_config.old, tracker, @measure_latency, @measure_errors, payload)
|
92
|
+
new = Executor.new(@client.logger, LaunchDarkly::Migrations::ORIGIN_NEW, @read_config.new, tracker, @measure_latency, @measure_errors, payload)
|
93
|
+
|
94
|
+
case stage
|
95
|
+
when LaunchDarkly::Migrations::STAGE_OFF
|
96
|
+
result = old.run
|
97
|
+
when LaunchDarkly::Migrations::STAGE_DUALWRITE
|
98
|
+
result = old.run
|
99
|
+
when LaunchDarkly::Migrations::STAGE_SHADOW
|
100
|
+
result = read_both(old, new, @read_config.comparison, @read_execution_order, tracker)
|
101
|
+
when LaunchDarkly::Migrations::STAGE_LIVE
|
102
|
+
result = read_both(new, old, @read_config.comparison, @read_execution_order, tracker)
|
103
|
+
when LaunchDarkly::Migrations::STAGE_RAMPDOWN
|
104
|
+
result = new.run
|
105
|
+
when LaunchDarkly::Migrations::STAGE_COMPLETE
|
106
|
+
result = new.run
|
107
|
+
else
|
108
|
+
result = LaunchDarkly::Migrations::OperationResult.new(
|
109
|
+
LaunchDarkly::Migrations::ORIGIN_OLD,
|
110
|
+
LaunchDarkly::Result.fail("invalid stage #{stage}; cannot execute read")
|
111
|
+
)
|
112
|
+
end
|
113
|
+
|
114
|
+
@client.track_migration_op(tracker)
|
115
|
+
|
116
|
+
result
|
117
|
+
end
|
118
|
+
|
119
|
+
#
|
120
|
+
# Perform the configured write operations against the appropriate old and/or new origins.
|
121
|
+
#
|
122
|
+
# @param key [String] The migration-based flag key to use for determining migration stages
|
123
|
+
# @param context [LaunchDarkly::LDContext] The context to use for evaluating the migration flag
|
124
|
+
# @param default_stage [Symbol] The stage to fallback to if one could not be determined for the requested flag
|
125
|
+
# @param payload [String] An optional payload to pass through to the configured write operations.
|
126
|
+
#
|
127
|
+
# @return [LaunchDarkly::Migrations::WriteResult]
|
128
|
+
#
|
129
|
+
def write(key, context, default_stage, payload = nil)
|
130
|
+
stage, tracker = @client.migration_variation(key, context, default_stage)
|
131
|
+
tracker.operation(LaunchDarkly::Migrations::OP_WRITE)
|
132
|
+
|
133
|
+
old = Executor.new(@client.logger, LaunchDarkly::Migrations::ORIGIN_OLD, @write_config.old, tracker, @measure_latency, @measure_errors, payload)
|
134
|
+
new = Executor.new(@client.logger, LaunchDarkly::Migrations::ORIGIN_NEW, @write_config.new, tracker, @measure_latency, @measure_errors, payload)
|
135
|
+
|
136
|
+
case stage
|
137
|
+
when LaunchDarkly::Migrations::STAGE_OFF
|
138
|
+
result = old.run()
|
139
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(result)
|
140
|
+
when LaunchDarkly::Migrations::STAGE_DUALWRITE
|
141
|
+
authoritative_result, nonauthoritative_result = write_both(old, new, tracker)
|
142
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(authoritative_result, nonauthoritative_result)
|
143
|
+
when LaunchDarkly::Migrations::STAGE_SHADOW
|
144
|
+
authoritative_result, nonauthoritative_result = write_both(old, new, tracker)
|
145
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(authoritative_result, nonauthoritative_result)
|
146
|
+
when LaunchDarkly::Migrations::STAGE_LIVE
|
147
|
+
authoritative_result, nonauthoritative_result = write_both(new, old, tracker)
|
148
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(authoritative_result, nonauthoritative_result)
|
149
|
+
when LaunchDarkly::Migrations::STAGE_RAMPDOWN
|
150
|
+
authoritative_result, nonauthoritative_result = write_both(new, old, tracker)
|
151
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(authoritative_result, nonauthoritative_result)
|
152
|
+
when LaunchDarkly::Migrations::STAGE_COMPLETE
|
153
|
+
result = new.run()
|
154
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(result)
|
155
|
+
else
|
156
|
+
result = LaunchDarkly::Migrations::OperationResult.fail(
|
157
|
+
LaunchDarkly::Migrations::ORIGIN_OLD,
|
158
|
+
LaunchDarkly::Result.fail("invalid stage #{stage}; cannot execute write")
|
159
|
+
)
|
160
|
+
write_result = LaunchDarkly::Migrations::WriteResult.new(result)
|
161
|
+
end
|
162
|
+
|
163
|
+
@client.track_migration_op(tracker)
|
164
|
+
|
165
|
+
write_result
|
166
|
+
end
|
167
|
+
|
168
|
+
#
|
169
|
+
# Execute both read methods in accordance with the requested execution order.
|
170
|
+
#
|
171
|
+
# This method always returns the {LaunchDarkly::Migrations::OperationResult} from running the authoritative read operation. The
|
172
|
+
# non-authoritative executor may fail but it will not affect the return value.
|
173
|
+
#
|
174
|
+
# @param authoritative [Executor]
|
175
|
+
# @param nonauthoritative [Executor]
|
176
|
+
# @param comparison [#call]
|
177
|
+
# @param execution_order [Symbol]
|
178
|
+
# @param tracker [LaunchDarkly::Interfaces::Migrations::OpTracker]
|
179
|
+
#
|
180
|
+
# @return [LaunchDarkly::Migrations::OperationResult]
|
181
|
+
#
|
182
|
+
private def read_both(authoritative, nonauthoritative, comparison, execution_order, tracker)
|
183
|
+
authoritative_result = nil
|
184
|
+
nonauthoritative_result = nil
|
185
|
+
|
186
|
+
case execution_order
|
187
|
+
when LaunchDarkly::Migrations::MigratorBuilder::EXECUTION_PARALLEL
|
188
|
+
auth_handler = Thread.new { authoritative_result = authoritative.run }
|
189
|
+
nonauth_handler = Thread.new { nonauthoritative_result = nonauthoritative.run }
|
190
|
+
|
191
|
+
auth_handler.join()
|
192
|
+
nonauth_handler.join()
|
193
|
+
when LaunchDarkly::Migrations::MigratorBuilder::EXECUTION_RANDOM && @sampler.sample(2)
|
194
|
+
nonauthoritative_result = nonauthoritative.run
|
195
|
+
authoritative_result = authoritative.run
|
196
|
+
else
|
197
|
+
authoritative_result = authoritative.run
|
198
|
+
nonauthoritative_result = nonauthoritative.run
|
199
|
+
end
|
200
|
+
|
201
|
+
return authoritative_result if comparison.nil?
|
202
|
+
|
203
|
+
if authoritative_result.success? && nonauthoritative_result.success?
|
204
|
+
tracker.consistent(->{ comparison.call(authoritative_result.value, nonauthoritative_result.value) })
|
205
|
+
end
|
206
|
+
|
207
|
+
authoritative_result
|
208
|
+
end
|
209
|
+
|
210
|
+
#
|
211
|
+
# Execute both operations sequentially.
|
212
|
+
#
|
213
|
+
# If the authoritative executor fails, do not run the non-authoritative one. As a result, this method will
|
214
|
+
# always return an authoritative {LaunchDarkly::Migrations::OperationResult} as the first value, and optionally the non-authoritative
|
215
|
+
# {LaunchDarkly::Migrations::OperationResult} as the second value.
|
216
|
+
#
|
217
|
+
# @param authoritative [Executor]
|
218
|
+
# @param nonauthoritative [Executor]
|
219
|
+
# @param tracker [LaunchDarkly::Interfaces::Migrations::OpTracker]
|
220
|
+
#
|
221
|
+
# @return [Array<LaunchDarkly::Migrations::OperationResult, [LaunchDarkly::Migrations::OperationResult, nil]>]
|
222
|
+
#
|
223
|
+
private def write_both(authoritative, nonauthoritative, tracker)
|
224
|
+
authoritative_result = authoritative.run()
|
225
|
+
tracker.invoked(authoritative.origin)
|
226
|
+
|
227
|
+
return authoritative_result, nil unless authoritative_result.success?
|
228
|
+
|
229
|
+
nonauthoritative_result = nonauthoritative.run()
|
230
|
+
tracker.invoked(nonauthoritative.origin)
|
231
|
+
|
232
|
+
[authoritative_result, nonauthoritative_result]
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
#
|
237
|
+
# Utility class for executing migration operations while also tracking our built-in migration measurements.
|
238
|
+
#
|
239
|
+
class Executor
|
240
|
+
#
|
241
|
+
# @return [Symbol]
|
242
|
+
#
|
243
|
+
attr_reader :origin
|
244
|
+
|
245
|
+
#
|
246
|
+
# @param origin [Symbol]
|
247
|
+
# @param fn [#call]
|
248
|
+
# @param tracker [LaunchDarkly::Interfaces::Migrations::OpTracker]
|
249
|
+
# @param measure_latency [Boolean]
|
250
|
+
# @param measure_errors [Boolean]
|
251
|
+
# @param payload [Object, nil]
|
252
|
+
#
|
253
|
+
def initialize(logger, origin, fn, tracker, measure_latency, measure_errors, payload)
|
254
|
+
@logger = logger
|
255
|
+
@origin = origin
|
256
|
+
@fn = fn
|
257
|
+
@tracker = tracker
|
258
|
+
@measure_latency = measure_latency
|
259
|
+
@measure_errors = measure_errors
|
260
|
+
@payload = payload
|
261
|
+
end
|
262
|
+
|
263
|
+
#
|
264
|
+
# Execute the configured operation and track any available measurements.
|
265
|
+
#
|
266
|
+
# @return [LaunchDarkly::Migrations::OperationResult]
|
267
|
+
#
|
268
|
+
def run()
|
269
|
+
start = Time.now
|
270
|
+
|
271
|
+
begin
|
272
|
+
result = @fn.call(@payload)
|
273
|
+
rescue => e
|
274
|
+
LaunchDarkly::Util.log_exception(@logger, "Unexpected error running method for '#{origin}' origin", e)
|
275
|
+
result = LaunchDarkly::Result.fail("'#{origin}' operation raised an exception", e)
|
276
|
+
end
|
277
|
+
|
278
|
+
@tracker.latency(@origin, (Time.now - start) * 1_000) if @measure_latency
|
279
|
+
@tracker.error(@origin) if @measure_errors && !result.success?
|
280
|
+
@tracker.invoked(@origin)
|
281
|
+
|
282
|
+
LaunchDarkly::Migrations::OperationResult.new(@origin, result)
|
283
|
+
end
|
284
|
+
end
|
285
|
+
end
|
286
|
+
end
|
287
|
+
end
|