launchdarkly-server-sdk 8.8.3-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.txt +13 -0
- data/README.md +61 -0
- data/lib/launchdarkly-server-sdk.rb +1 -0
- data/lib/ldclient-rb/cache_store.rb +45 -0
- data/lib/ldclient-rb/config.rb +658 -0
- data/lib/ldclient-rb/context.rb +565 -0
- data/lib/ldclient-rb/evaluation_detail.rb +387 -0
- data/lib/ldclient-rb/events.rb +642 -0
- data/lib/ldclient-rb/expiring_cache.rb +77 -0
- data/lib/ldclient-rb/flags_state.rb +88 -0
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/broadcaster.rb +78 -0
- data/lib/ldclient-rb/impl/context.rb +96 -0
- data/lib/ldclient-rb/impl/context_filter.rb +166 -0
- data/lib/ldclient-rb/impl/data_source.rb +188 -0
- data/lib/ldclient-rb/impl/data_store.rb +109 -0
- data/lib/ldclient-rb/impl/dependency_tracker.rb +102 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +129 -0
- data/lib/ldclient-rb/impl/evaluation_with_hook_result.rb +34 -0
- data/lib/ldclient-rb/impl/evaluator.rb +539 -0
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +86 -0
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +50 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +131 -0
- data/lib/ldclient-rb/impl/event_sender.rb +100 -0
- data/lib/ldclient-rb/impl/event_summarizer.rb +68 -0
- data/lib/ldclient-rb/impl/event_types.rb +136 -0
- data/lib/ldclient-rb/impl/flag_tracker.rb +58 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +170 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +300 -0
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +229 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +306 -0
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/migrations/migrator.rb +287 -0
- data/lib/ldclient-rb/impl/migrations/tracker.rb +136 -0
- data/lib/ldclient-rb/impl/model/clause.rb +45 -0
- data/lib/ldclient-rb/impl/model/feature_flag.rb +254 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +64 -0
- data/lib/ldclient-rb/impl/model/segment.rb +132 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +72 -0
- data/lib/ldclient-rb/impl/repeating_task.rb +46 -0
- data/lib/ldclient-rb/impl/sampler.rb +25 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +141 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/impl/unbounded_pool.rb +34 -0
- data/lib/ldclient-rb/impl/util.rb +95 -0
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/in_memory_store.rb +100 -0
- data/lib/ldclient-rb/integrations/consul.rb +45 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +92 -0
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +98 -0
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +663 -0
- data/lib/ldclient-rb/integrations/test_data.rb +213 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +246 -0
- data/lib/ldclient-rb/integrations.rb +6 -0
- data/lib/ldclient-rb/interfaces.rb +974 -0
- data/lib/ldclient-rb/ldclient.rb +822 -0
- data/lib/ldclient-rb/memoized_value.rb +32 -0
- data/lib/ldclient-rb/migrations.rb +230 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
- data/lib/ldclient-rb/polling.rb +102 -0
- data/lib/ldclient-rb/reference.rb +295 -0
- data/lib/ldclient-rb/requestor.rb +102 -0
- data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
- data/lib/ldclient-rb/stream.rb +196 -0
- data/lib/ldclient-rb/util.rb +132 -0
- data/lib/ldclient-rb/version.rb +3 -0
- data/lib/ldclient-rb.rb +27 -0
- metadata +400 -0
@@ -0,0 +1,72 @@
|
|
1
|
+
require "ldclient-rb/impl/model/feature_flag"
|
2
|
+
require "ldclient-rb/impl/model/preprocessed_data"
|
3
|
+
require "ldclient-rb/impl/model/segment"
|
4
|
+
|
5
|
+
# General implementation notes about the data model classes in LaunchDarkly::Impl::Model--
|
6
|
+
#
|
7
|
+
# As soon as we receive flag/segment JSON data from LaunchDarkly (or, read it from a database), we
|
8
|
+
# transform it into the model classes FeatureFlag, Segment, etc. The constructor of each of these
|
9
|
+
# classes takes a hash (the parsed JSON), and transforms it into an internal representation that
|
10
|
+
# is more efficient for evaluations.
|
11
|
+
#
|
12
|
+
# Validation works as follows:
|
13
|
+
# - A property value that is of the correct type, but is invalid for other reasons (for example,
|
14
|
+
# if a flag rule refers to variation index 5, but there are only 2 variations in the flag), does
|
15
|
+
# not prevent the flag from being parsed and stored. It does cause a warning to be logged, if a
|
16
|
+
# logger was passed to the constructor.
|
17
|
+
# - If a value is completely invalid for the schema, the constructor may throw an
|
18
|
+
# exception, causing the whole data set to be rejected. This is consistent with the behavior of
|
19
|
+
# the strongly-typed SDKs.
|
20
|
+
#
|
21
|
+
# Currently, the model classes also retain the original hash of the parsed JSON. This is because
|
22
|
+
# we may need to re-serialize them to JSON, and building the JSON on the fly would be very
|
23
|
+
# inefficient, so each model class has a to_json method that just returns the same Hash. If we
|
24
|
+
# are able in the future to either use a custom streaming serializer, or pass the JSON data
|
25
|
+
# straight through from LaunchDarkly to a database instead of re-serializing, we could stop
|
26
|
+
# retaining this data.
|
27
|
+
|
28
|
+
module LaunchDarkly
|
29
|
+
module Impl
|
30
|
+
module Model
|
31
|
+
# Abstraction of deserializing a feature flag or segment that was read from a data store or
|
32
|
+
# received from LaunchDarkly.
|
33
|
+
#
|
34
|
+
# SDK code outside of Impl::Model should use this method instead of calling the model class
|
35
|
+
# constructors directly, so as not to rely on implementation details.
|
36
|
+
#
|
37
|
+
# @param kind [Hash] normally either FEATURES or SEGMENTS
|
38
|
+
# @param input [object] a JSON string or a parsed hash (or a data model object, in which case
|
39
|
+
# we'll just return the original object)
|
40
|
+
# @param logger [Logger|nil] logs errors if there are any data validation problems
|
41
|
+
# @return [Object] the flag or segment (or, for an unknown data kind, the data as a hash)
|
42
|
+
def self.deserialize(kind, input, logger = nil)
|
43
|
+
return nil if input.nil?
|
44
|
+
return input if !input.is_a?(String) && !input.is_a?(Hash)
|
45
|
+
data = input.is_a?(Hash) ? input : JSON.parse(input, symbolize_names: true)
|
46
|
+
case kind
|
47
|
+
when FEATURES
|
48
|
+
FeatureFlag.new(data, logger)
|
49
|
+
when SEGMENTS
|
50
|
+
Segment.new(data, logger)
|
51
|
+
else
|
52
|
+
data
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
# Abstraction of serializing a feature flag or segment that will be written to a data store.
|
57
|
+
# Currently we just call to_json, but SDK code outside of Impl::Model should use this method
|
58
|
+
# instead of to_json, so as not to rely on implementation details.
|
59
|
+
def self.serialize(kind, item)
|
60
|
+
item.to_json
|
61
|
+
end
|
62
|
+
|
63
|
+
# Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format.
|
64
|
+
def self.make_all_store_data(received_data, logger = nil)
|
65
|
+
{
|
66
|
+
FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) },
|
67
|
+
SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) },
|
68
|
+
}
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
require "ldclient-rb/util"
|
2
|
+
|
3
|
+
require "concurrent/atomics"
|
4
|
+
|
5
|
+
module LaunchDarkly
|
6
|
+
module Impl
|
7
|
+
class RepeatingTask
|
8
|
+
def initialize(interval, start_delay, task, logger)
|
9
|
+
@interval = interval
|
10
|
+
@start_delay = start_delay
|
11
|
+
@task = task
|
12
|
+
@logger = logger
|
13
|
+
@stopped = Concurrent::AtomicBoolean.new(false)
|
14
|
+
@worker = nil
|
15
|
+
end
|
16
|
+
|
17
|
+
def start
|
18
|
+
@worker = Thread.new do
|
19
|
+
sleep(@start_delay) unless @start_delay.nil? || @start_delay == 0
|
20
|
+
|
21
|
+
until @stopped.value do
|
22
|
+
started_at = Time.now
|
23
|
+
begin
|
24
|
+
@task.call
|
25
|
+
rescue => e
|
26
|
+
LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e)
|
27
|
+
end
|
28
|
+
delta = @interval - (Time.now - started_at)
|
29
|
+
if delta > 0
|
30
|
+
sleep(delta)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def stop
|
37
|
+
if @stopped.make_true
|
38
|
+
if @worker && @worker.alive? && @worker != Thread.current
|
39
|
+
@worker.run # causes the thread to wake up if it's currently in a sleep
|
40
|
+
@worker.join
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module LaunchDarkly
|
2
|
+
module Impl
|
3
|
+
class Sampler
|
4
|
+
#
|
5
|
+
# @param random [Random]
|
6
|
+
#
|
7
|
+
def initialize(random)
|
8
|
+
@random = random
|
9
|
+
end
|
10
|
+
|
11
|
+
#
|
12
|
+
# @param ratio [Int]
|
13
|
+
#
|
14
|
+
# @return [Boolean]
|
15
|
+
#
|
16
|
+
def sample(ratio)
|
17
|
+
return false unless ratio.is_a? Integer
|
18
|
+
return false if ratio <= 0
|
19
|
+
return true if ratio == 1
|
20
|
+
|
21
|
+
@random.rand(1.0) < 1.0 / ratio
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,141 @@
|
|
1
|
+
require "concurrent"
|
2
|
+
require "ldclient-rb/interfaces"
|
3
|
+
require "ldclient-rb/impl/store_data_set_sorter"
|
4
|
+
|
5
|
+
module LaunchDarkly
|
6
|
+
module Impl
|
7
|
+
#
|
8
|
+
# Provides additional behavior that the client requires before or after feature store operations.
|
9
|
+
# This just means sorting the data set for init() and dealing with data store status listeners.
|
10
|
+
#
|
11
|
+
class FeatureStoreClientWrapper
|
12
|
+
include Interfaces::FeatureStore
|
13
|
+
|
14
|
+
def initialize(store, store_update_sink, logger)
|
15
|
+
# @type [LaunchDarkly::Interfaces::FeatureStore]
|
16
|
+
@store = store
|
17
|
+
|
18
|
+
@monitoring_enabled = does_store_support_monitoring?
|
19
|
+
|
20
|
+
# @type [LaunchDarkly::Impl::DataStore::UpdateSink]
|
21
|
+
@store_update_sink = store_update_sink
|
22
|
+
@logger = logger
|
23
|
+
|
24
|
+
@mutex = Mutex.new # Covers the following variables
|
25
|
+
@last_available = true
|
26
|
+
# @type [LaunchDarkly::Impl::RepeatingTask, nil]
|
27
|
+
@poller = nil
|
28
|
+
end
|
29
|
+
|
30
|
+
def init(all_data)
|
31
|
+
wrapper { @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) }
|
32
|
+
end
|
33
|
+
|
34
|
+
def get(kind, key)
|
35
|
+
wrapper { @store.get(kind, key) }
|
36
|
+
end
|
37
|
+
|
38
|
+
def all(kind)
|
39
|
+
wrapper { @store.all(kind) }
|
40
|
+
end
|
41
|
+
|
42
|
+
def upsert(kind, item)
|
43
|
+
wrapper { @store.upsert(kind, item) }
|
44
|
+
end
|
45
|
+
|
46
|
+
def delete(kind, key, version)
|
47
|
+
wrapper { @store.delete(kind, key, version) }
|
48
|
+
end
|
49
|
+
|
50
|
+
def initialized?
|
51
|
+
@store.initialized?
|
52
|
+
end
|
53
|
+
|
54
|
+
def stop
|
55
|
+
@store.stop
|
56
|
+
@mutex.synchronize do
|
57
|
+
return if @poller.nil?
|
58
|
+
|
59
|
+
@poller.stop
|
60
|
+
@poller = nil
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def monitoring_enabled?
|
65
|
+
@monitoring_enabled
|
66
|
+
end
|
67
|
+
|
68
|
+
private def wrapper()
|
69
|
+
begin
|
70
|
+
yield
|
71
|
+
rescue => e
|
72
|
+
update_availability(false) if @monitoring_enabled
|
73
|
+
raise
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
private def update_availability(available)
|
78
|
+
@mutex.synchronize do
|
79
|
+
return if available == @last_available
|
80
|
+
@last_available = available
|
81
|
+
end
|
82
|
+
|
83
|
+
status = LaunchDarkly::Interfaces::DataStore::Status.new(available, false)
|
84
|
+
|
85
|
+
@logger.warn("Persistent store is available again") if available
|
86
|
+
|
87
|
+
@store_update_sink.update_status(status)
|
88
|
+
|
89
|
+
if available
|
90
|
+
@mutex.synchronize do
|
91
|
+
return if @poller.nil?
|
92
|
+
|
93
|
+
@poller.stop
|
94
|
+
@poller = nil
|
95
|
+
end
|
96
|
+
|
97
|
+
return
|
98
|
+
end
|
99
|
+
|
100
|
+
@logger.warn("Detected persistent store unavailability; updates will be cached until it recovers.")
|
101
|
+
|
102
|
+
task = Impl::RepeatingTask.new(0.5, 0, -> { self.check_availability }, @logger)
|
103
|
+
|
104
|
+
@mutex.synchronize do
|
105
|
+
@poller = task
|
106
|
+
@poller.start
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
private def check_availability
|
111
|
+
begin
|
112
|
+
update_availability(true) if @store.available?
|
113
|
+
rescue => e
|
114
|
+
@logger.error("Unexpected error from data store status function: #{e}")
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
# This methods determines whether the wrapped store can support enabling monitoring.
|
119
|
+
#
|
120
|
+
# The wrapped store must provide a monitoring_enabled method, which must
|
121
|
+
# be true. But this alone is not sufficient.
|
122
|
+
#
|
123
|
+
# Because this class wraps all interactions with a provided store, it can
|
124
|
+
# technically "monitor" any store. However, monitoring also requires that
|
125
|
+
# we notify listeners when the store is available again.
|
126
|
+
#
|
127
|
+
# We determine this by checking the store's `available?` method, so this
|
128
|
+
# is also a requirement for monitoring support.
|
129
|
+
#
|
130
|
+
# These extra checks won't be necessary once `available` becomes a part
|
131
|
+
# of the core interface requirements and this class no longer wraps every
|
132
|
+
# feature store.
|
133
|
+
private def does_store_support_monitoring?
|
134
|
+
return false unless @store.respond_to? :monitoring_enabled?
|
135
|
+
return false unless @store.respond_to? :available?
|
136
|
+
|
137
|
+
@store.monitoring_enabled?
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
@@ -0,0 +1,55 @@
|
|
1
|
+
|
2
|
+
module LaunchDarkly
|
3
|
+
module Impl
|
4
|
+
#
|
5
|
+
# Implements a dependency graph ordering for data to be stored in a feature store. We must use this
|
6
|
+
# on every data set that will be passed to the feature store's init() method.
|
7
|
+
#
|
8
|
+
class FeatureStoreDataSetSorter
|
9
|
+
#
|
10
|
+
# Returns a copy of the input hash that has the following guarantees: the iteration order of the outer
|
11
|
+
# hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each
|
12
|
+
# data kind that has a :get_dependency_keys function, the inner hash will have an iteration order
|
13
|
+
# where B is before A if A has a dependency on B.
|
14
|
+
#
|
15
|
+
# This implementation relies on the fact that hashes in Ruby have an iteration order that is the same
|
16
|
+
# as the insertion order. Also, due to the way we deserialize JSON received from LaunchDarkly, the
|
17
|
+
# keys in the inner hash will always be symbols.
|
18
|
+
#
|
19
|
+
def self.sort_all_collections(all_data)
|
20
|
+
outer_hash = {}
|
21
|
+
kinds = all_data.keys.sort_by { |k|
|
22
|
+
k[:priority].nil? ? k[:namespace].length : k[:priority] # arbitrary order if priority is unknown
|
23
|
+
}
|
24
|
+
kinds.each do |kind|
|
25
|
+
items = all_data[kind]
|
26
|
+
outer_hash[kind] = self.sort_collection(kind, items)
|
27
|
+
end
|
28
|
+
outer_hash
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.sort_collection(kind, input)
|
32
|
+
dependency_fn = kind[:get_dependency_keys]
|
33
|
+
return input if dependency_fn.nil? || input.empty?
|
34
|
+
remaining_items = input.clone
|
35
|
+
items_out = {}
|
36
|
+
until remaining_items.empty?
|
37
|
+
# pick a random item that hasn't been updated yet
|
38
|
+
key, item = remaining_items.first
|
39
|
+
self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out)
|
40
|
+
end
|
41
|
+
items_out
|
42
|
+
end
|
43
|
+
|
44
|
+
def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out)
|
45
|
+
item_key = item[:key].to_sym
|
46
|
+
remaining_items.delete(item_key) # we won't need to visit this item again
|
47
|
+
dependency_fn.call(item).each do |dep_key|
|
48
|
+
dep_item = remaining_items[dep_key.to_sym]
|
49
|
+
self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) unless dep_item.nil?
|
50
|
+
end
|
51
|
+
items_out[item_key] = item
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
module LaunchDarkly
|
2
|
+
module Impl
|
3
|
+
# A simple thread safe generic unbounded resource pool abstraction
|
4
|
+
class UnboundedPool
|
5
|
+
def initialize(instance_creator, instance_destructor)
|
6
|
+
@pool = Array.new
|
7
|
+
@lock = Mutex.new
|
8
|
+
@instance_creator = instance_creator
|
9
|
+
@instance_destructor = instance_destructor
|
10
|
+
end
|
11
|
+
|
12
|
+
def acquire
|
13
|
+
@lock.synchronize {
|
14
|
+
if @pool.length == 0
|
15
|
+
@instance_creator.call()
|
16
|
+
else
|
17
|
+
@pool.pop()
|
18
|
+
end
|
19
|
+
}
|
20
|
+
end
|
21
|
+
|
22
|
+
def release(instance)
|
23
|
+
@lock.synchronize { @pool.push(instance) }
|
24
|
+
end
|
25
|
+
|
26
|
+
def dispose_all
|
27
|
+
@lock.synchronize {
|
28
|
+
@pool.map { |instance| @instance_destructor.call(instance) } unless @instance_destructor.nil?
|
29
|
+
@pool.clear()
|
30
|
+
}
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,95 @@
|
|
1
|
+
module LaunchDarkly
|
2
|
+
module Impl
|
3
|
+
module Util
|
4
|
+
def self.bool?(aObject)
|
5
|
+
[true,false].include? aObject
|
6
|
+
end
|
7
|
+
|
8
|
+
def self.current_time_millis
|
9
|
+
(Time.now.to_f * 1000).to_i
|
10
|
+
end
|
11
|
+
|
12
|
+
def self.default_http_headers(sdk_key, config)
|
13
|
+
ret = { "Authorization" => sdk_key, "User-Agent" => "RubyClient/" + LaunchDarkly::VERSION }
|
14
|
+
if config.wrapper_name
|
15
|
+
ret["X-LaunchDarkly-Wrapper"] = config.wrapper_name +
|
16
|
+
(config.wrapper_version ? "/" + config.wrapper_version : "")
|
17
|
+
end
|
18
|
+
|
19
|
+
app_value = application_header_value config.application
|
20
|
+
ret["X-LaunchDarkly-Tags"] = app_value unless app_value.nil? || app_value.empty?
|
21
|
+
|
22
|
+
ret
|
23
|
+
end
|
24
|
+
|
25
|
+
#
|
26
|
+
# Generate an HTTP Header value containing the application meta information (@see #application).
|
27
|
+
#
|
28
|
+
# @return [String]
|
29
|
+
#
|
30
|
+
def self.application_header_value(application)
|
31
|
+
parts = []
|
32
|
+
unless application[:id].empty?
|
33
|
+
parts << "application-id/#{application[:id]}"
|
34
|
+
end
|
35
|
+
|
36
|
+
unless application[:version].empty?
|
37
|
+
parts << "application-version/#{application[:version]}"
|
38
|
+
end
|
39
|
+
|
40
|
+
parts.join(" ")
|
41
|
+
end
|
42
|
+
|
43
|
+
#
|
44
|
+
# @param value [String]
|
45
|
+
# @param name [Symbol]
|
46
|
+
# @param logger [Logger]
|
47
|
+
# @return [String]
|
48
|
+
#
|
49
|
+
def self.validate_application_value(value, name, logger)
|
50
|
+
value = value.to_s
|
51
|
+
|
52
|
+
return "" if value.empty?
|
53
|
+
|
54
|
+
if value.length > 64
|
55
|
+
logger.warn { "Value of application[#{name}] was longer than 64 characters and was discarded" }
|
56
|
+
return ""
|
57
|
+
end
|
58
|
+
|
59
|
+
if /[^a-zA-Z0-9._-]/.match?(value)
|
60
|
+
logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" }
|
61
|
+
return ""
|
62
|
+
end
|
63
|
+
|
64
|
+
value
|
65
|
+
end
|
66
|
+
|
67
|
+
#
|
68
|
+
# @param app [Hash]
|
69
|
+
# @param logger [Logger]
|
70
|
+
# @return [Hash]
|
71
|
+
#
|
72
|
+
def self.validate_application_info(app, logger)
|
73
|
+
{
|
74
|
+
id: validate_application_value(app[:id], :id, logger),
|
75
|
+
version: validate_application_value(app[:version], :version, logger),
|
76
|
+
}
|
77
|
+
end
|
78
|
+
|
79
|
+
#
|
80
|
+
# @param value [String, nil]
|
81
|
+
# @param logger [Logger]
|
82
|
+
# @return [String, nil]
|
83
|
+
#
|
84
|
+
def self.validate_payload_filter_key(value, logger)
|
85
|
+
return nil if value.nil?
|
86
|
+
return value if value.is_a?(String) && /^[a-zA-Z0-9][._\-a-zA-Z0-9]*$/.match?(value)
|
87
|
+
|
88
|
+
logger.warn {
|
89
|
+
"Invalid payload filter configured, full environment will be fetched. Ensure the filter key is not empty and was copied correctly from LaunchDarkly settings."
|
90
|
+
}
|
91
|
+
nil
|
92
|
+
end
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
@@ -0,0 +1,100 @@
|
|
1
|
+
require "concurrent/atomics"
|
2
|
+
|
3
|
+
module LaunchDarkly
|
4
|
+
|
5
|
+
# These constants denote the types of data that can be stored in the feature store. If
|
6
|
+
# we add another storable data type in the future, as long as it follows the same pattern
|
7
|
+
# (having "key", "version", and "deleted" properties), we only need to add a corresponding
|
8
|
+
# constant here and the existing store should be able to handle it.
|
9
|
+
#
|
10
|
+
# The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter
|
11
|
+
# to ensure data consistency during non-atomic updates.
|
12
|
+
|
13
|
+
# @private
|
14
|
+
FEATURES = Impl::DataStore::DataKind.new(namespace: "features", priority: 1).freeze
|
15
|
+
|
16
|
+
# @private
|
17
|
+
SEGMENTS = Impl::DataStore::DataKind.new(namespace: "segments", priority: 0).freeze
|
18
|
+
|
19
|
+
# @private
|
20
|
+
ALL_KINDS = [FEATURES, SEGMENTS].freeze
|
21
|
+
|
22
|
+
#
|
23
|
+
# Default implementation of the LaunchDarkly client's feature store, using an in-memory
|
24
|
+
# cache. This object holds feature flags and related data received from LaunchDarkly.
|
25
|
+
# Database-backed implementations are available in {LaunchDarkly::Integrations}.
|
26
|
+
#
|
27
|
+
class InMemoryFeatureStore
|
28
|
+
include LaunchDarkly::Interfaces::FeatureStore
|
29
|
+
|
30
|
+
def initialize
|
31
|
+
@items = Hash.new
|
32
|
+
@lock = Concurrent::ReadWriteLock.new
|
33
|
+
@initialized = Concurrent::AtomicBoolean.new(false)
|
34
|
+
end
|
35
|
+
|
36
|
+
def monitoring_enabled?
|
37
|
+
false
|
38
|
+
end
|
39
|
+
|
40
|
+
def get(kind, key)
|
41
|
+
@lock.with_read_lock do
|
42
|
+
coll = @items[kind]
|
43
|
+
f = coll.nil? ? nil : coll[key.to_sym]
|
44
|
+
(f.nil? || f[:deleted]) ? nil : f
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def all(kind)
|
49
|
+
@lock.with_read_lock do
|
50
|
+
coll = @items[kind]
|
51
|
+
(coll.nil? ? Hash.new : coll).select { |_k, f| not f[:deleted] }
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def delete(kind, key, version)
|
56
|
+
@lock.with_write_lock do
|
57
|
+
coll = @items[kind]
|
58
|
+
if coll.nil?
|
59
|
+
coll = Hash.new
|
60
|
+
@items[kind] = coll
|
61
|
+
end
|
62
|
+
old = coll[key.to_sym]
|
63
|
+
|
64
|
+
if old.nil? || old[:version] < version
|
65
|
+
coll[key.to_sym] = { deleted: true, version: version }
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
def init(all_data)
|
71
|
+
@lock.with_write_lock do
|
72
|
+
@items.replace(all_data)
|
73
|
+
@initialized.make_true
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
def upsert(kind, item)
|
78
|
+
@lock.with_write_lock do
|
79
|
+
coll = @items[kind]
|
80
|
+
if coll.nil?
|
81
|
+
coll = Hash.new
|
82
|
+
@items[kind] = coll
|
83
|
+
end
|
84
|
+
old = coll[item[:key].to_sym]
|
85
|
+
|
86
|
+
if old.nil? || old[:version] < item[:version]
|
87
|
+
coll[item[:key].to_sym] = item
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
def initialized?
|
93
|
+
@initialized.value
|
94
|
+
end
|
95
|
+
|
96
|
+
def stop
|
97
|
+
# nothing to do
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
require "ldclient-rb/impl/integrations/consul_impl"
|
2
|
+
require "ldclient-rb/integrations/util/store_wrapper"
|
3
|
+
|
4
|
+
module LaunchDarkly
|
5
|
+
module Integrations
|
6
|
+
#
|
7
|
+
# Integration with [Consul](https://www.consul.io/).
|
8
|
+
#
|
9
|
+
# Note that in order to use this integration, you must first install the gem `diplomat`.
|
10
|
+
#
|
11
|
+
# @since 5.5.0
|
12
|
+
#
|
13
|
+
module Consul
|
14
|
+
#
|
15
|
+
# Default value for the `prefix` option for {new_feature_store}.
|
16
|
+
#
|
17
|
+
# @return [String] the default key prefix
|
18
|
+
#
|
19
|
+
def self.default_prefix
|
20
|
+
'launchdarkly'
|
21
|
+
end
|
22
|
+
|
23
|
+
#
|
24
|
+
# Creates a Consul-backed persistent feature store.
|
25
|
+
#
|
26
|
+
# To use this method, you must first install the gem `diplomat`. Then, put the object returned by
|
27
|
+
# this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}).
|
28
|
+
#
|
29
|
+
# @param opts [Hash] the configuration options
|
30
|
+
# @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default
|
31
|
+
# Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`)
|
32
|
+
# @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration
|
33
|
+
# @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly
|
34
|
+
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
35
|
+
# @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching
|
36
|
+
# @option opts [Integer] :capacity (1000) maximum number of items in the cache
|
37
|
+
# @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object
|
38
|
+
#
|
39
|
+
def self.new_feature_store(opts = {})
|
40
|
+
core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts)
|
41
|
+
LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|