launchdarkly-server-sdk 6.4.0 → 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/ldclient-rb/config.rb +102 -56
- data/lib/ldclient-rb/context.rb +487 -0
- data/lib/ldclient-rb/evaluation_detail.rb +20 -20
- data/lib/ldclient-rb/events.rb +77 -132
- data/lib/ldclient-rb/flags_state.rb +4 -4
- data/lib/ldclient-rb/impl/big_segments.rb +17 -17
- data/lib/ldclient-rb/impl/context.rb +96 -0
- data/lib/ldclient-rb/impl/context_filter.rb +145 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +9 -10
- data/lib/ldclient-rb/impl/evaluator.rb +379 -131
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +40 -41
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +28 -31
- data/lib/ldclient-rb/impl/evaluator_operators.rb +26 -55
- data/lib/ldclient-rb/impl/event_sender.rb +6 -6
- data/lib/ldclient-rb/impl/event_summarizer.rb +12 -7
- data/lib/ldclient-rb/impl/event_types.rb +18 -30
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +7 -7
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +29 -29
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +8 -8
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +92 -12
- data/lib/ldclient-rb/impl/model/clause.rb +39 -0
- data/lib/ldclient-rb/impl/model/feature_flag.rb +213 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +8 -121
- data/lib/ldclient-rb/impl/model/segment.rb +126 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +52 -12
- data/lib/ldclient-rb/impl/repeating_task.rb +1 -1
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +2 -2
- data/lib/ldclient-rb/impl/unbounded_pool.rb +1 -1
- data/lib/ldclient-rb/impl/util.rb +2 -2
- data/lib/ldclient-rb/in_memory_store.rb +2 -2
- data/lib/ldclient-rb/integrations/consul.rb +1 -1
- data/lib/ldclient-rb/integrations/dynamodb.rb +1 -1
- data/lib/ldclient-rb/integrations/file_data.rb +3 -3
- data/lib/ldclient-rb/integrations/redis.rb +4 -4
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +218 -62
- data/lib/ldclient-rb/integrations/test_data.rb +16 -12
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +9 -9
- data/lib/ldclient-rb/interfaces.rb +14 -14
- data/lib/ldclient-rb/ldclient.rb +94 -144
- data/lib/ldclient-rb/memoized_value.rb +1 -1
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +1 -1
- data/lib/ldclient-rb/polling.rb +2 -2
- data/lib/ldclient-rb/reference.rb +274 -0
- data/lib/ldclient-rb/requestor.rb +5 -5
- data/lib/ldclient-rb/stream.rb +7 -8
- data/lib/ldclient-rb/util.rb +4 -19
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +2 -3
- metadata +34 -17
- data/lib/ldclient-rb/file_data_source.rb +0 -23
- data/lib/ldclient-rb/newrelic.rb +0 -17
- data/lib/ldclient-rb/redis_store.rb +0 -88
- data/lib/ldclient-rb/user_filter.rb +0 -52
@@ -2,7 +2,7 @@ require "ldclient-rb/impl/evaluator_helpers"
|
|
2
2
|
|
3
3
|
module LaunchDarkly
|
4
4
|
module Impl
|
5
|
-
module
|
5
|
+
module Model
|
6
6
|
#
|
7
7
|
# Container for a precomputed result that includes a specific variation index and value, an
|
8
8
|
# evaluation reason, and optionally an alternate evaluation reason that corresponds to the
|
@@ -18,7 +18,7 @@ module LaunchDarkly
|
|
18
18
|
|
19
19
|
# @param in_experiment [Boolean] indicates whether we want the result to include
|
20
20
|
# "inExperiment: true" in the reason or not
|
21
|
-
# @return [EvaluationDetail]
|
21
|
+
# @return [LaunchDarkly::EvaluationDetail]
|
22
22
|
def get_result(in_experiment = false)
|
23
23
|
in_experiment ? @in_experiment_result : @regular_result
|
24
24
|
end
|
@@ -35,135 +35,22 @@ module LaunchDarkly
|
|
35
35
|
# @param index [Integer] the variation index
|
36
36
|
# @param in_experiment [Boolean] indicates whether we want the result to include
|
37
37
|
# "inExperiment: true" in the reason or not
|
38
|
+
# @return [LaunchDarkly::EvaluationDetail]
|
38
39
|
def for_variation(index, in_experiment)
|
39
40
|
if index < 0 || index >= @factories.length
|
40
41
|
EvaluationDetail.new(nil, nil, EvaluationReason.error(EvaluationReason::ERROR_MALFORMED_FLAG))
|
41
42
|
else
|
42
43
|
@factories[index].get_result(in_experiment)
|
43
44
|
end
|
44
|
-
end
|
45
|
-
end
|
46
|
-
|
47
|
-
# Base class for all of the preprocessed data classes we embed in our data model. Using this class
|
48
|
-
# ensures that none of its properties will be included in JSON representations. It also overrides
|
49
|
-
# == to say that it is always equal with another instance of the same class; equality tests on
|
50
|
-
# this class are only ever done in test code, and we want the contents of these classes to be
|
51
|
-
# ignored in test code unless we are looking at specific attributes.
|
52
|
-
class PreprocessedDataBase
|
53
|
-
def as_json(*)
|
54
|
-
nil
|
55
|
-
end
|
56
|
-
|
57
|
-
def to_json(*a)
|
58
|
-
"null"
|
59
|
-
end
|
60
|
-
|
61
|
-
def ==(other)
|
62
|
-
other.class == self.class
|
63
|
-
end
|
64
|
-
end
|
65
|
-
|
66
|
-
class FlagPreprocessed < PreprocessedDataBase
|
67
|
-
def initialize(off_result, fallthrough_factory)
|
68
|
-
@off_result = off_result
|
69
|
-
@fallthrough_factory = fallthrough_factory
|
70
|
-
end
|
71
|
-
|
72
|
-
# @return [EvalResultsForSingleVariation]
|
73
|
-
attr_reader :off_result
|
74
|
-
# @return [EvalResultFactoryMultiVariations]
|
75
|
-
attr_reader :fallthrough_factory
|
76
|
-
end
|
77
|
-
|
78
|
-
class PrerequisitePreprocessed < PreprocessedDataBase
|
79
|
-
def initialize(failed_result)
|
80
|
-
@failed_result = failed_result
|
81
|
-
end
|
82
|
-
|
83
|
-
# @return [EvalResultsForSingleVariation]
|
84
|
-
attr_reader :failed_result
|
85
|
-
end
|
86
|
-
|
87
|
-
class TargetPreprocessed < PreprocessedDataBase
|
88
|
-
def initialize(match_result)
|
89
|
-
@match_result = match_result
|
90
45
|
end
|
91
|
-
|
92
|
-
# @return [EvalResultsForSingleVariation]
|
93
|
-
attr_reader :match_result
|
94
|
-
end
|
95
|
-
|
96
|
-
class FlagRulePreprocessed < PreprocessedDataBase
|
97
|
-
def initialize(all_match_results)
|
98
|
-
@all_match_results = all_match_results
|
99
|
-
end
|
100
|
-
|
101
|
-
# @return [EvalResultsForSingleVariation]
|
102
|
-
attr_reader :all_match_results
|
103
46
|
end
|
104
47
|
|
105
48
|
class Preprocessor
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
def
|
111
|
-
if kind.eql? FEATURES
|
112
|
-
preprocess_flag!(item)
|
113
|
-
elsif kind.eql? SEGMENTS
|
114
|
-
preprocess_segment!(item)
|
115
|
-
end
|
116
|
-
end
|
117
|
-
|
118
|
-
def preprocess_all_items!(kind, items_map)
|
119
|
-
return items_map if !items_map
|
120
|
-
items_map.each do |key, item|
|
121
|
-
preprocess_item!(kind, item)
|
122
|
-
end
|
123
|
-
end
|
124
|
-
|
125
|
-
def preprocess_flag!(flag)
|
126
|
-
flag[:_preprocessed] = FlagPreprocessed.new(
|
127
|
-
EvaluatorHelpers.off_result(flag),
|
128
|
-
precompute_multi_variation_results(flag, EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true))
|
129
|
-
)
|
130
|
-
(flag[:prerequisites] || []).each do |prereq|
|
131
|
-
preprocess_prerequisite!(prereq, flag)
|
132
|
-
end
|
133
|
-
(flag[:targets] || []).each do |target|
|
134
|
-
preprocess_target!(target, flag)
|
135
|
-
end
|
136
|
-
rules = flag[:rules]
|
137
|
-
(rules || []).each_index do |index|
|
138
|
-
preprocess_flag_rule!(rules[index], index, flag)
|
139
|
-
end
|
140
|
-
end
|
141
|
-
|
142
|
-
def preprocess_segment!(segment)
|
143
|
-
# nothing to do for segments currently
|
144
|
-
end
|
145
|
-
|
146
|
-
private def preprocess_prerequisite!(prereq, flag)
|
147
|
-
prereq[:_preprocessed] = PrerequisitePreprocessed.new(
|
148
|
-
EvaluatorHelpers.prerequisite_failed_result(prereq, flag, @logger)
|
149
|
-
)
|
150
|
-
end
|
151
|
-
|
152
|
-
private def preprocess_target!(target, flag)
|
153
|
-
target[:_preprocessed] = TargetPreprocessed.new(
|
154
|
-
EvaluatorHelpers.target_match_result(target, flag, @logger)
|
155
|
-
)
|
156
|
-
end
|
157
|
-
|
158
|
-
private def preprocess_flag_rule!(rule, index, flag)
|
159
|
-
match_reason = EvaluationReason::rule_match(index, rule[:id])
|
160
|
-
match_reason_in_experiment = EvaluationReason::rule_match(index, rule[:id], true)
|
161
|
-
rule[:_preprocessed] = FlagRulePreprocessed.new(
|
162
|
-
precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment)
|
163
|
-
)
|
164
|
-
end
|
165
|
-
|
166
|
-
private def precompute_multi_variation_results(flag, regular_reason, in_experiment_reason)
|
49
|
+
# @param flag [LaunchDarkly::Impl::Model::FeatureFlag]
|
50
|
+
# @param regular_reason [LaunchDarkly::EvaluationReason]
|
51
|
+
# @param in_experiment_reason [LaunchDarkly::EvaluationReason]
|
52
|
+
# @return [EvalResultFactoryMultiVariations]
|
53
|
+
def self.precompute_multi_variation_results(flag, regular_reason, in_experiment_reason)
|
167
54
|
factories = []
|
168
55
|
vars = flag[:variations] || []
|
169
56
|
vars.each_index do |index|
|
@@ -0,0 +1,126 @@
|
|
1
|
+
require "ldclient-rb/impl/model/clause"
|
2
|
+
require "ldclient-rb/impl/model/preprocessed_data"
|
3
|
+
require "set"
|
4
|
+
|
5
|
+
# See serialization.rb for implementation notes on the data model classes.
|
6
|
+
|
7
|
+
module LaunchDarkly
|
8
|
+
module Impl
|
9
|
+
module Model
|
10
|
+
class Segment
|
11
|
+
# @param data [Hash]
|
12
|
+
# @param logger [Logger|nil]
|
13
|
+
def initialize(data, logger = nil)
|
14
|
+
raise ArgumentError, "expected hash but got #{data.class}" unless data.is_a?(Hash)
|
15
|
+
@data = data
|
16
|
+
@key = data[:key]
|
17
|
+
@version = data[:version]
|
18
|
+
@deleted = !!data[:deleted]
|
19
|
+
return if @deleted
|
20
|
+
@included = data[:included] || []
|
21
|
+
@excluded = data[:excluded] || []
|
22
|
+
@included_contexts = (data[:includedContexts] || []).map do |target_data|
|
23
|
+
SegmentTarget.new(target_data)
|
24
|
+
end
|
25
|
+
@excluded_contexts = (data[:excludedContexts] || []).map do |target_data|
|
26
|
+
SegmentTarget.new(target_data)
|
27
|
+
end
|
28
|
+
@rules = (data[:rules] || []).map do |rule_data|
|
29
|
+
SegmentRule.new(rule_data, logger)
|
30
|
+
end
|
31
|
+
@unbounded = !!data[:unbounded]
|
32
|
+
@unbounded_context_kind = data[:unboundedContextKind] || LDContext::KIND_DEFAULT
|
33
|
+
@generation = data[:generation]
|
34
|
+
@salt = data[:salt]
|
35
|
+
end
|
36
|
+
|
37
|
+
# @return [Hash]
|
38
|
+
attr_reader :data
|
39
|
+
# @return [String]
|
40
|
+
attr_reader :key
|
41
|
+
# @return [Integer]
|
42
|
+
attr_reader :version
|
43
|
+
# @return [Boolean]
|
44
|
+
attr_reader :deleted
|
45
|
+
# @return [Array<String>]
|
46
|
+
attr_reader :included
|
47
|
+
# @return [Array<String>]
|
48
|
+
attr_reader :excluded
|
49
|
+
# @return [Array<LaunchDarkly::Impl::Model::SegmentTarget>]
|
50
|
+
attr_reader :included_contexts
|
51
|
+
# @return [Array<LaunchDarkly::Impl::Model::SegmentTarget>]
|
52
|
+
attr_reader :excluded_contexts
|
53
|
+
# @return [Array<SegmentRule>]
|
54
|
+
attr_reader :rules
|
55
|
+
# @return [Boolean]
|
56
|
+
attr_reader :unbounded
|
57
|
+
# @return [String]
|
58
|
+
attr_reader :unbounded_context_kind
|
59
|
+
# @return [Integer|nil]
|
60
|
+
attr_reader :generation
|
61
|
+
# @return [String]
|
62
|
+
attr_reader :salt
|
63
|
+
|
64
|
+
# This method allows us to read properties of the object as if it's just a hash. Currently this is
|
65
|
+
# necessary because some data store logic is still written to expect hashes; we can remove it once
|
66
|
+
# we migrate entirely to using attributes of the class.
|
67
|
+
def [](key)
|
68
|
+
@data[key]
|
69
|
+
end
|
70
|
+
|
71
|
+
def ==(other)
|
72
|
+
other.is_a?(Segment) && other.data == self.data
|
73
|
+
end
|
74
|
+
|
75
|
+
def as_json(*) # parameter is unused, but may be passed if we're using the json gem
|
76
|
+
@data
|
77
|
+
end
|
78
|
+
|
79
|
+
# Same as as_json, but converts the JSON structure into a string.
|
80
|
+
def to_json(*a)
|
81
|
+
as_json.to_json(a)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
class SegmentTarget
|
86
|
+
def initialize(data)
|
87
|
+
@data = data
|
88
|
+
@context_kind = data[:contextKind]
|
89
|
+
@values = Set.new(data[:values] || [])
|
90
|
+
end
|
91
|
+
|
92
|
+
# @return [Hash]
|
93
|
+
attr_reader :data
|
94
|
+
# @return [String]
|
95
|
+
attr_reader :context_kind
|
96
|
+
# @return [Set]
|
97
|
+
attr_reader :values
|
98
|
+
end
|
99
|
+
|
100
|
+
class SegmentRule
|
101
|
+
def initialize(data, logger)
|
102
|
+
@data = data
|
103
|
+
@clauses = (data[:clauses] || []).map do |clause_data|
|
104
|
+
Clause.new(clause_data, logger)
|
105
|
+
end
|
106
|
+
@weight = data[:weight]
|
107
|
+
@bucket_by = data[:bucketBy]
|
108
|
+
@rollout_context_kind = data[:rolloutContextKind]
|
109
|
+
end
|
110
|
+
|
111
|
+
# @return [Hash]
|
112
|
+
attr_reader :data
|
113
|
+
# @return [Array<LaunchDarkly::Impl::Model::Clause>]
|
114
|
+
attr_reader :clauses
|
115
|
+
# @return [Integer|nil]
|
116
|
+
attr_reader :weight
|
117
|
+
# @return [String|nil]
|
118
|
+
attr_reader :bucket_by
|
119
|
+
# @return [String|nil]
|
120
|
+
attr_reader :rollout_context_kind
|
121
|
+
end
|
122
|
+
|
123
|
+
# Clause is defined in its own file because clauses are used by both flags and segments
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
@@ -1,31 +1,71 @@
|
|
1
|
+
require "ldclient-rb/impl/model/feature_flag"
|
1
2
|
require "ldclient-rb/impl/model/preprocessed_data"
|
3
|
+
require "ldclient-rb/impl/model/segment"
|
4
|
+
|
5
|
+
# General implementation notes about the data model classes in LaunchDarkly::Impl::Model--
|
6
|
+
#
|
7
|
+
# As soon as we receive flag/segment JSON data from LaunchDarkly (or, read it from a database), we
|
8
|
+
# transform it into the model classes FeatureFlag, Segment, etc. The constructor of each of these
|
9
|
+
# classes takes a hash (the parsed JSON), and transforms it into an internal representation that
|
10
|
+
# is more efficient for evaluations.
|
11
|
+
#
|
12
|
+
# Validation works as follows:
|
13
|
+
# - A property value that is of the correct type, but is invalid for other reasons (for example,
|
14
|
+
# if a flag rule refers to variation index 5, but there are only 2 variations in the flag), does
|
15
|
+
# not prevent the flag from being parsed and stored. It does cause a warning to be logged, if a
|
16
|
+
# logger was passed to the constructor.
|
17
|
+
# - If a value is completely invalid for the schema, the constructor may throw an
|
18
|
+
# exception, causing the whole data set to be rejected. This is consistent with the behavior of
|
19
|
+
# the strongly-typed SDKs.
|
20
|
+
#
|
21
|
+
# Currently, the model classes also retain the original hash of the parsed JSON. This is because
|
22
|
+
# we may need to re-serialize them to JSON, and building the JSON on the fly would be very
|
23
|
+
# inefficient, so each model class has a to_json method that just returns the same Hash. If we
|
24
|
+
# are able in the future to either use a custom streaming serializer, or pass the JSON data
|
25
|
+
# straight through from LaunchDarkly to a database instead of re-serializing, we could stop
|
26
|
+
# retaining this data.
|
2
27
|
|
3
28
|
module LaunchDarkly
|
4
29
|
module Impl
|
5
30
|
module Model
|
6
31
|
# Abstraction of deserializing a feature flag or segment that was read from a data store or
|
7
32
|
# received from LaunchDarkly.
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
33
|
+
#
|
34
|
+
# SDK code outside of Impl::Model should use this method instead of calling the model class
|
35
|
+
# constructors directly, so as not to rely on implementation details.
|
36
|
+
#
|
37
|
+
# @param kind [Hash] normally either FEATURES or SEGMENTS
|
38
|
+
# @param input [object] a JSON string or a parsed hash (or a data model object, in which case
|
39
|
+
# we'll just return the original object)
|
40
|
+
# @param logger [Logger|nil] logs warnings if there are any data validation problems
|
41
|
+
# @return [Object] the flag or segment (or, for an unknown data kind, the data as a hash)
|
42
|
+
def self.deserialize(kind, input, logger = nil)
|
43
|
+
return nil if input.nil?
|
44
|
+
return input if !input.is_a?(String) && !input.is_a?(Hash)
|
45
|
+
data = input.is_a?(Hash) ? input : JSON.parse(input, symbolize_names: true)
|
46
|
+
case kind
|
47
|
+
when FEATURES
|
48
|
+
FeatureFlag.new(data, logger)
|
49
|
+
when SEGMENTS
|
50
|
+
Segment.new(data, logger)
|
51
|
+
else
|
52
|
+
data
|
53
|
+
end
|
13
54
|
end
|
14
55
|
|
15
56
|
# Abstraction of serializing a feature flag or segment that will be written to a data store.
|
16
|
-
# Currently we just call to_json
|
57
|
+
# Currently we just call to_json, but SDK code outside of Impl::Model should use this method
|
58
|
+
# instead of to_json, so as not to rely on implementation details.
|
17
59
|
def self.serialize(kind, item)
|
18
60
|
item.to_json
|
19
61
|
end
|
20
62
|
|
21
63
|
# Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format.
|
22
64
|
def self.make_all_store_data(received_data, logger = nil)
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
preprocessor.preprocess_all_items!(SEGMENTS, segments)
|
28
|
-
{ FEATURES => flags, SEGMENTS => segments }
|
65
|
+
{
|
66
|
+
FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) },
|
67
|
+
SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) },
|
68
|
+
}
|
29
69
|
end
|
30
70
|
end
|
31
71
|
end
|
@@ -33,7 +33,7 @@ module LaunchDarkly
|
|
33
33
|
return input if dependency_fn.nil? || input.empty?
|
34
34
|
remaining_items = input.clone
|
35
35
|
items_out = {}
|
36
|
-
|
36
|
+
until remaining_items.empty?
|
37
37
|
# pick a random item that hasn't been updated yet
|
38
38
|
key, item = remaining_items.first
|
39
39
|
self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out)
|
@@ -46,7 +46,7 @@ module LaunchDarkly
|
|
46
46
|
remaining_items.delete(item_key) # we won't need to visit this item again
|
47
47
|
dependency_fn.call(item).each do |dep_key|
|
48
48
|
dep_item = remaining_items[dep_key.to_sym]
|
49
|
-
self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out)
|
49
|
+
self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) unless dep_item.nil?
|
50
50
|
end
|
51
51
|
items_out[item_key] = item
|
52
52
|
end
|
@@ -25,7 +25,7 @@ module LaunchDarkly
|
|
25
25
|
|
26
26
|
def dispose_all
|
27
27
|
@lock.synchronize {
|
28
|
-
@pool.map { |instance| @instance_destructor.call(instance) }
|
28
|
+
@pool.map { |instance| @instance_destructor.call(instance) } unless @instance_destructor.nil?
|
29
29
|
@pool.clear()
|
30
30
|
}
|
31
31
|
end
|
@@ -1,7 +1,7 @@
|
|
1
1
|
module LaunchDarkly
|
2
2
|
module Impl
|
3
3
|
module Util
|
4
|
-
def self.
|
4
|
+
def self.bool?(aObject)
|
5
5
|
[true,false].include? aObject
|
6
6
|
end
|
7
7
|
|
@@ -56,7 +56,7 @@ module LaunchDarkly
|
|
56
56
|
return ""
|
57
57
|
end
|
58
58
|
|
59
|
-
if
|
59
|
+
if /[^a-zA-Z0-9._-]/.match?(value)
|
60
60
|
logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" }
|
61
61
|
return ""
|
62
62
|
end
|
@@ -14,13 +14,13 @@ module LaunchDarkly
|
|
14
14
|
FEATURES = {
|
15
15
|
namespace: "features",
|
16
16
|
priority: 1, # that is, features should be stored after segments
|
17
|
-
get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }
|
17
|
+
get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } },
|
18
18
|
}.freeze
|
19
19
|
|
20
20
|
# @private
|
21
21
|
SEGMENTS = {
|
22
22
|
namespace: "segments",
|
23
|
-
priority: 0
|
23
|
+
priority: 0,
|
24
24
|
}.freeze
|
25
25
|
|
26
26
|
#
|
@@ -38,7 +38,7 @@ module LaunchDarkly
|
|
38
38
|
#
|
39
39
|
def self.new_feature_store(opts = {})
|
40
40
|
core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts)
|
41
|
-
|
41
|
+
LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
42
42
|
end
|
43
43
|
end
|
44
44
|
end
|
@@ -54,7 +54,7 @@ module LaunchDarkly
|
|
54
54
|
#
|
55
55
|
# Creates a DynamoDB-backed Big Segment store.
|
56
56
|
#
|
57
|
-
# Big Segments are a specific type of
|
57
|
+
# Big Segments are a specific type of segments. For more information, read the LaunchDarkly
|
58
58
|
# documentation: https://docs.launchdarkly.com/home/users/big-segments
|
59
59
|
#
|
60
60
|
# To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or
|
@@ -25,7 +25,7 @@ module LaunchDarkly
|
|
25
25
|
#
|
26
26
|
# - `flags`: Feature flag definitions.
|
27
27
|
# - `flagValues`: Simplified feature flags that contain only a value.
|
28
|
-
# - `segments`:
|
28
|
+
# - `segments`: Context segment definitions.
|
29
29
|
#
|
30
30
|
# The format of the data in `flags` and `segments` is defined by the LaunchDarkly application
|
31
31
|
# and is subject to change. Rather than trying to construct these objects yourself, it is simpler
|
@@ -78,7 +78,7 @@ module LaunchDarkly
|
|
78
78
|
# same flag key or segment key more than once, either in a single file or across multiple files.
|
79
79
|
#
|
80
80
|
# If the data source encounters any error in any file-- malformed content, a missing file, or a
|
81
|
-
# duplicate key-- it will not load flags from any of the files.
|
81
|
+
# duplicate key-- it will not load flags from any of the files.
|
82
82
|
#
|
83
83
|
module FileData
|
84
84
|
#
|
@@ -100,7 +100,7 @@ module LaunchDarkly
|
|
100
100
|
# @return an object that can be stored in {Config#data_source}
|
101
101
|
#
|
102
102
|
def self.data_source(options={})
|
103
|
-
|
103
|
+
lambda { |sdk_key, config|
|
104
104
|
Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) }
|
105
105
|
end
|
106
106
|
end
|
@@ -1,4 +1,4 @@
|
|
1
|
-
require "ldclient-rb/
|
1
|
+
require "ldclient-rb/impl/integrations/redis_impl"
|
2
2
|
|
3
3
|
module LaunchDarkly
|
4
4
|
module Integrations
|
@@ -59,13 +59,13 @@ module LaunchDarkly
|
|
59
59
|
# @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object
|
60
60
|
#
|
61
61
|
def self.new_feature_store(opts = {})
|
62
|
-
|
62
|
+
LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStore.new(opts)
|
63
63
|
end
|
64
64
|
|
65
65
|
#
|
66
66
|
# Creates a Redis-backed Big Segment store.
|
67
67
|
#
|
68
|
-
# Big Segments are a specific type of
|
68
|
+
# Big Segments are a specific type of segments. For more information, read the LaunchDarkly
|
69
69
|
# documentation: https://docs.launchdarkly.com/home/users/big-segments
|
70
70
|
#
|
71
71
|
# To use this method, you must first have the `redis` and `connection-pool` gems installed. Then,
|
@@ -91,7 +91,7 @@ module LaunchDarkly
|
|
91
91
|
# @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object
|
92
92
|
#
|
93
93
|
def self.new_big_segment_store(opts)
|
94
|
-
|
94
|
+
LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts)
|
95
95
|
end
|
96
96
|
end
|
97
97
|
end
|