launchdarkly-server-sdk 6.1.1 → 6.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -5
- data/lib/ldclient-rb/config.rb +118 -4
- data/lib/ldclient-rb/evaluation_detail.rb +104 -14
- data/lib/ldclient-rb/events.rb +201 -107
- data/lib/ldclient-rb/file_data_source.rb +9 -300
- data/lib/ldclient-rb/flags_state.rb +23 -12
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +1 -1
- data/lib/ldclient-rb/impl/evaluator.rb +116 -62
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +22 -9
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +53 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +1 -1
- data/lib/ldclient-rb/impl/event_summarizer.rb +63 -0
- data/lib/ldclient-rb/impl/event_types.rb +90 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +82 -18
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +212 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +84 -31
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +177 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +7 -37
- data/lib/ldclient-rb/impl/repeating_task.rb +47 -0
- data/lib/ldclient-rb/impl/util.rb +62 -1
- data/lib/ldclient-rb/integrations/consul.rb +8 -1
- data/lib/ldclient-rb/integrations/dynamodb.rb +48 -3
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +42 -2
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +438 -0
- data/lib/ldclient-rb/integrations/test_data.rb +209 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +5 -0
- data/lib/ldclient-rb/integrations.rb +2 -51
- data/lib/ldclient-rb/interfaces.rb +152 -2
- data/lib/ldclient-rb/ldclient.rb +131 -33
- data/lib/ldclient-rb/polling.rb +22 -41
- data/lib/ldclient-rb/requestor.rb +3 -3
- data/lib/ldclient-rb/stream.rb +4 -3
- data/lib/ldclient-rb/util.rb +10 -1
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +0 -1
- metadata +35 -132
- data/.circleci/config.yml +0 -40
- data/.github/ISSUE_TEMPLATE/bug_report.md +0 -37
- data/.github/ISSUE_TEMPLATE/config.yml +0 -5
- data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
- data/.github/pull_request_template.md +0 -21
- data/.gitignore +0 -16
- data/.hound.yml +0 -2
- data/.ldrelease/build-docs.sh +0 -18
- data/.ldrelease/circleci/linux/execute.sh +0 -18
- data/.ldrelease/circleci/mac/execute.sh +0 -18
- data/.ldrelease/circleci/template/build.sh +0 -29
- data/.ldrelease/circleci/template/publish.sh +0 -23
- data/.ldrelease/circleci/template/set-gem-home.sh +0 -7
- data/.ldrelease/circleci/template/test.sh +0 -10
- data/.ldrelease/circleci/template/update-version.sh +0 -8
- data/.ldrelease/circleci/windows/execute.ps1 +0 -19
- data/.ldrelease/config.yml +0 -29
- data/.rspec +0 -2
- data/.rubocop.yml +0 -600
- data/.simplecov +0 -4
- data/CHANGELOG.md +0 -351
- data/CODEOWNERS +0 -1
- data/CONTRIBUTING.md +0 -37
- data/Gemfile +0 -3
- data/azure-pipelines.yml +0 -51
- data/docs/Makefile +0 -26
- data/docs/index.md +0 -9
- data/launchdarkly-server-sdk.gemspec +0 -45
- data/lib/ldclient-rb/event_summarizer.rb +0 -55
- data/lib/ldclient-rb/impl/event_factory.rb +0 -120
- data/spec/config_spec.rb +0 -63
- data/spec/diagnostic_events_spec.rb +0 -163
- data/spec/evaluation_detail_spec.rb +0 -135
- data/spec/event_sender_spec.rb +0 -197
- data/spec/event_summarizer_spec.rb +0 -63
- data/spec/events_spec.rb +0 -607
- data/spec/expiring_cache_spec.rb +0 -76
- data/spec/feature_store_spec_base.rb +0 -213
- data/spec/file_data_source_spec.rb +0 -283
- data/spec/fixtures/feature.json +0 -37
- data/spec/fixtures/feature1.json +0 -36
- data/spec/fixtures/user.json +0 -9
- data/spec/flags_state_spec.rb +0 -81
- data/spec/http_util.rb +0 -132
- data/spec/impl/evaluator_bucketing_spec.rb +0 -111
- data/spec/impl/evaluator_clause_spec.rb +0 -55
- data/spec/impl/evaluator_operators_spec.rb +0 -141
- data/spec/impl/evaluator_rule_spec.rb +0 -96
- data/spec/impl/evaluator_segment_spec.rb +0 -125
- data/spec/impl/evaluator_spec.rb +0 -305
- data/spec/impl/evaluator_spec_base.rb +0 -75
- data/spec/impl/model/serialization_spec.rb +0 -41
- data/spec/in_memory_feature_store_spec.rb +0 -12
- data/spec/integrations/consul_feature_store_spec.rb +0 -40
- data/spec/integrations/dynamodb_feature_store_spec.rb +0 -103
- data/spec/integrations/store_wrapper_spec.rb +0 -276
- data/spec/launchdarkly-server-sdk_spec.rb +0 -13
- data/spec/launchdarkly-server-sdk_spec_autoloadtest.rb +0 -9
- data/spec/ldclient_end_to_end_spec.rb +0 -157
- data/spec/ldclient_spec.rb +0 -643
- data/spec/newrelic_spec.rb +0 -5
- data/spec/polling_spec.rb +0 -120
- data/spec/redis_feature_store_spec.rb +0 -121
- data/spec/requestor_spec.rb +0 -196
- data/spec/segment_store_spec_base.rb +0 -95
- data/spec/simple_lru_cache_spec.rb +0 -24
- data/spec/spec_helper.rb +0 -9
- data/spec/store_spec.rb +0 -10
- data/spec/stream_spec.rb +0 -45
- data/spec/user_filter_spec.rb +0 -91
- data/spec/util_spec.rb +0 -17
- data/spec/version_spec.rb +0 -7
@@ -0,0 +1,177 @@
|
|
1
|
+
require "ldclient-rb/impl/evaluator_helpers"
|
2
|
+
|
3
|
+
module LaunchDarkly
|
4
|
+
module Impl
|
5
|
+
module DataModelPreprocessing
|
6
|
+
#
|
7
|
+
# Container for a precomputed result that includes a specific variation index and value, an
|
8
|
+
# evaluation reason, and optionally an alternate evaluation reason that corresponds to the
|
9
|
+
# "in experiment" state.
|
10
|
+
#
|
11
|
+
class EvalResultsForSingleVariation
|
12
|
+
def initialize(value, variation_index, regular_reason, in_experiment_reason = nil)
|
13
|
+
@regular_result = EvaluationDetail.new(value, variation_index, regular_reason)
|
14
|
+
@in_experiment_result = in_experiment_reason ?
|
15
|
+
EvaluationDetail.new(value, variation_index, in_experiment_reason) :
|
16
|
+
@regular_result
|
17
|
+
end
|
18
|
+
|
19
|
+
# @param in_experiment [Boolean] indicates whether we want the result to include
|
20
|
+
# "inExperiment: true" in the reason or not
|
21
|
+
# @return [EvaluationDetail]
|
22
|
+
def get_result(in_experiment = false)
|
23
|
+
in_experiment ? @in_experiment_result : @regular_result
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
#
|
28
|
+
# Container for a set of precomputed results, one for each possible flag variation.
|
29
|
+
#
|
30
|
+
class EvalResultFactoryMultiVariations
|
31
|
+
def initialize(variation_factories)
|
32
|
+
@factories = variation_factories
|
33
|
+
end
|
34
|
+
|
35
|
+
# @param index [Integer] the variation index
|
36
|
+
# @param in_experiment [Boolean] indicates whether we want the result to include
|
37
|
+
# "inExperiment: true" in the reason or not
|
38
|
+
def for_variation(index, in_experiment)
|
39
|
+
if index < 0 || index >= @factories.length
|
40
|
+
EvaluationDetail.new(nil, nil, EvaluationReason.error(EvaluationReason::ERROR_MALFORMED_FLAG))
|
41
|
+
else
|
42
|
+
@factories[index].get_result(in_experiment)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
# Base class for all of the preprocessed data classes we embed in our data model. Using this class
|
48
|
+
# ensures that none of its properties will be included in JSON representations. It also overrides
|
49
|
+
# == to say that it is always equal with another instance of the same class; equality tests on
|
50
|
+
# this class are only ever done in test code, and we want the contents of these classes to be
|
51
|
+
# ignored in test code unless we are looking at specific attributes.
|
52
|
+
class PreprocessedDataBase
|
53
|
+
def as_json(*)
|
54
|
+
nil
|
55
|
+
end
|
56
|
+
|
57
|
+
def to_json(*a)
|
58
|
+
"null"
|
59
|
+
end
|
60
|
+
|
61
|
+
def ==(other)
|
62
|
+
other.class == self.class
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
class FlagPreprocessed < PreprocessedDataBase
|
67
|
+
def initialize(off_result, fallthrough_factory)
|
68
|
+
@off_result = off_result
|
69
|
+
@fallthrough_factory = fallthrough_factory
|
70
|
+
end
|
71
|
+
|
72
|
+
# @return [EvalResultsForSingleVariation]
|
73
|
+
attr_reader :off_result
|
74
|
+
# @return [EvalResultFactoryMultiVariations]
|
75
|
+
attr_reader :fallthrough_factory
|
76
|
+
end
|
77
|
+
|
78
|
+
class PrerequisitePreprocessed < PreprocessedDataBase
|
79
|
+
def initialize(failed_result)
|
80
|
+
@failed_result = failed_result
|
81
|
+
end
|
82
|
+
|
83
|
+
# @return [EvalResultsForSingleVariation]
|
84
|
+
attr_reader :failed_result
|
85
|
+
end
|
86
|
+
|
87
|
+
class TargetPreprocessed < PreprocessedDataBase
|
88
|
+
def initialize(match_result)
|
89
|
+
@match_result = match_result
|
90
|
+
end
|
91
|
+
|
92
|
+
# @return [EvalResultsForSingleVariation]
|
93
|
+
attr_reader :match_result
|
94
|
+
end
|
95
|
+
|
96
|
+
class FlagRulePreprocessed < PreprocessedDataBase
|
97
|
+
def initialize(all_match_results)
|
98
|
+
@all_match_results = all_match_results
|
99
|
+
end
|
100
|
+
|
101
|
+
# @return [EvalResultsForSingleVariation]
|
102
|
+
attr_reader :all_match_results
|
103
|
+
end
|
104
|
+
|
105
|
+
class Preprocessor
|
106
|
+
def initialize(logger = nil)
|
107
|
+
@logger = logger
|
108
|
+
end
|
109
|
+
|
110
|
+
def preprocess_item!(kind, item)
|
111
|
+
if kind.eql? FEATURES
|
112
|
+
preprocess_flag!(item)
|
113
|
+
elsif kind.eql? SEGMENTS
|
114
|
+
preprocess_segment!(item)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def preprocess_all_items!(kind, items_map)
|
119
|
+
return items_map if !items_map
|
120
|
+
items_map.each do |key, item|
|
121
|
+
preprocess_item!(kind, item)
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
def preprocess_flag!(flag)
|
126
|
+
flag[:_preprocessed] = FlagPreprocessed.new(
|
127
|
+
EvaluatorHelpers.off_result(flag),
|
128
|
+
precompute_multi_variation_results(flag, EvaluationReason::fallthrough(false), EvaluationReason::fallthrough(true))
|
129
|
+
)
|
130
|
+
(flag[:prerequisites] || []).each do |prereq|
|
131
|
+
preprocess_prerequisite!(prereq, flag)
|
132
|
+
end
|
133
|
+
(flag[:targets] || []).each do |target|
|
134
|
+
preprocess_target!(target, flag)
|
135
|
+
end
|
136
|
+
rules = flag[:rules]
|
137
|
+
(rules || []).each_index do |index|
|
138
|
+
preprocess_flag_rule!(rules[index], index, flag)
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
def preprocess_segment!(segment)
|
143
|
+
# nothing to do for segments currently
|
144
|
+
end
|
145
|
+
|
146
|
+
private def preprocess_prerequisite!(prereq, flag)
|
147
|
+
prereq[:_preprocessed] = PrerequisitePreprocessed.new(
|
148
|
+
EvaluatorHelpers.prerequisite_failed_result(prereq, flag, @logger)
|
149
|
+
)
|
150
|
+
end
|
151
|
+
|
152
|
+
private def preprocess_target!(target, flag)
|
153
|
+
target[:_preprocessed] = TargetPreprocessed.new(
|
154
|
+
EvaluatorHelpers.target_match_result(target, flag, @logger)
|
155
|
+
)
|
156
|
+
end
|
157
|
+
|
158
|
+
private def preprocess_flag_rule!(rule, index, flag)
|
159
|
+
match_reason = EvaluationReason::rule_match(index, rule[:id])
|
160
|
+
match_reason_in_experiment = EvaluationReason::rule_match(index, rule[:id], true)
|
161
|
+
rule[:_preprocessed] = FlagRulePreprocessed.new(
|
162
|
+
precompute_multi_variation_results(flag, match_reason, match_reason_in_experiment)
|
163
|
+
)
|
164
|
+
end
|
165
|
+
|
166
|
+
private def precompute_multi_variation_results(flag, regular_reason, in_experiment_reason)
|
167
|
+
factories = []
|
168
|
+
vars = flag[:variations] || []
|
169
|
+
vars.each_index do |index|
|
170
|
+
factories << EvalResultsForSingleVariation.new(vars[index], index, regular_reason, in_experiment_reason)
|
171
|
+
end
|
172
|
+
EvalResultFactoryMultiVariations.new(factories)
|
173
|
+
end
|
174
|
+
end
|
175
|
+
end
|
176
|
+
end
|
177
|
+
end
|
@@ -1,13 +1,14 @@
|
|
1
|
+
require "ldclient-rb/impl/model/preprocessed_data"
|
1
2
|
|
2
3
|
module LaunchDarkly
|
3
4
|
module Impl
|
4
5
|
module Model
|
5
6
|
# Abstraction of deserializing a feature flag or segment that was read from a data store or
|
6
7
|
# received from LaunchDarkly.
|
7
|
-
def self.deserialize(kind, json)
|
8
|
+
def self.deserialize(kind, json, logger = nil)
|
8
9
|
return nil if json.nil?
|
9
10
|
item = JSON.parse(json, symbolize_names: true)
|
10
|
-
|
11
|
+
DataModelPreprocessing::Preprocessor.new(logger).preprocess_item!(kind, item)
|
11
12
|
item
|
12
13
|
end
|
13
14
|
|
@@ -18,45 +19,14 @@ module LaunchDarkly
|
|
18
19
|
end
|
19
20
|
|
20
21
|
# Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format.
|
21
|
-
def self.make_all_store_data(received_data)
|
22
|
+
def self.make_all_store_data(received_data, logger = nil)
|
23
|
+
preprocessor = DataModelPreprocessing::Preprocessor.new(logger)
|
22
24
|
flags = received_data[:flags]
|
23
|
-
|
25
|
+
preprocessor.preprocess_all_items!(FEATURES, flags)
|
24
26
|
segments = received_data[:segments]
|
25
|
-
|
27
|
+
preprocessor.preprocess_all_items!(SEGMENTS, segments)
|
26
28
|
{ FEATURES => flags, SEGMENTS => segments }
|
27
29
|
end
|
28
|
-
|
29
|
-
# Called after we have deserialized a model item from JSON (because we received it from LaunchDarkly,
|
30
|
-
# or read it from a persistent data store). This allows us to precompute some derived attributes that
|
31
|
-
# will never change during the lifetime of that item.
|
32
|
-
def self.postprocess_item_after_deserializing!(kind, item)
|
33
|
-
return if !item
|
34
|
-
# Currently we are special-casing this for FEATURES; eventually it will be handled by delegating
|
35
|
-
# to the "kind" object or the item class.
|
36
|
-
if kind.eql? FEATURES
|
37
|
-
# For feature flags, we precompute all possible parameterized EvaluationReason instances.
|
38
|
-
prereqs = item[:prerequisites]
|
39
|
-
if !prereqs.nil?
|
40
|
-
prereqs.each do |prereq|
|
41
|
-
prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key])
|
42
|
-
end
|
43
|
-
end
|
44
|
-
rules = item[:rules]
|
45
|
-
if !rules.nil?
|
46
|
-
rules.each_index do |i|
|
47
|
-
rule = rules[i]
|
48
|
-
rule[:_reason] = EvaluationReason::rule_match(i, rule[:id])
|
49
|
-
end
|
50
|
-
end
|
51
|
-
end
|
52
|
-
end
|
53
|
-
|
54
|
-
def self.postprocess_items_after_deserializing!(kind, items_map)
|
55
|
-
return items_map if !items_map
|
56
|
-
items_map.each do |key, item|
|
57
|
-
postprocess_item_after_deserializing!(kind, item)
|
58
|
-
end
|
59
|
-
end
|
60
30
|
end
|
61
31
|
end
|
62
32
|
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
require "ldclient-rb/util"
|
2
|
+
|
3
|
+
require "concurrent/atomics"
|
4
|
+
|
5
|
+
module LaunchDarkly
|
6
|
+
module Impl
|
7
|
+
class RepeatingTask
|
8
|
+
def initialize(interval, start_delay, task, logger)
|
9
|
+
@interval = interval
|
10
|
+
@start_delay = start_delay
|
11
|
+
@task = task
|
12
|
+
@logger = logger
|
13
|
+
@stopped = Concurrent::AtomicBoolean.new(false)
|
14
|
+
@worker = nil
|
15
|
+
end
|
16
|
+
|
17
|
+
def start
|
18
|
+
@worker = Thread.new do
|
19
|
+
if @start_delay
|
20
|
+
sleep(@start_delay)
|
21
|
+
end
|
22
|
+
while !@stopped.value do
|
23
|
+
started_at = Time.now
|
24
|
+
begin
|
25
|
+
@task.call
|
26
|
+
rescue => e
|
27
|
+
LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e)
|
28
|
+
end
|
29
|
+
delta = @interval - (Time.now - started_at)
|
30
|
+
if delta > 0
|
31
|
+
sleep(delta)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def stop
|
38
|
+
if @stopped.make_true
|
39
|
+
if @worker && @worker.alive? && @worker != Thread.current
|
40
|
+
@worker.run # causes the thread to wake up if it's currently in a sleep
|
41
|
+
@worker.join
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -1,7 +1,10 @@
|
|
1
|
-
|
2
1
|
module LaunchDarkly
|
3
2
|
module Impl
|
4
3
|
module Util
|
4
|
+
def self.is_bool(aObject)
|
5
|
+
[true,false].include? aObject
|
6
|
+
end
|
7
|
+
|
5
8
|
def self.current_time_millis
|
6
9
|
(Time.now.to_f * 1000).to_i
|
7
10
|
end
|
@@ -12,8 +15,66 @@ module LaunchDarkly
|
|
12
15
|
ret["X-LaunchDarkly-Wrapper"] = config.wrapper_name +
|
13
16
|
(config.wrapper_version ? "/" + config.wrapper_version : "")
|
14
17
|
end
|
18
|
+
|
19
|
+
app_value = application_header_value config.application
|
20
|
+
ret["X-LaunchDarkly-Tags"] = app_value unless app_value.nil? || app_value.empty?
|
21
|
+
|
15
22
|
ret
|
16
23
|
end
|
24
|
+
|
25
|
+
#
|
26
|
+
# Generate an HTTP Header value containing the application meta information (@see #application).
|
27
|
+
#
|
28
|
+
# @return [String]
|
29
|
+
#
|
30
|
+
def self.application_header_value(application)
|
31
|
+
parts = []
|
32
|
+
unless application[:id].empty?
|
33
|
+
parts << "application-id/#{application[:id]}"
|
34
|
+
end
|
35
|
+
|
36
|
+
unless application[:version].empty?
|
37
|
+
parts << "application-version/#{application[:version]}"
|
38
|
+
end
|
39
|
+
|
40
|
+
parts.join(" ")
|
41
|
+
end
|
42
|
+
|
43
|
+
#
|
44
|
+
# @param value [String]
|
45
|
+
# @param name [Symbol]
|
46
|
+
# @param logger [Logger]
|
47
|
+
# @return [String]
|
48
|
+
#
|
49
|
+
def self.validate_application_value(value, name, logger)
|
50
|
+
value = value.to_s
|
51
|
+
|
52
|
+
return "" if value.empty?
|
53
|
+
|
54
|
+
if value.length > 64
|
55
|
+
logger.warn { "Value of application[#{name}] was longer than 64 characters and was discarded" }
|
56
|
+
return ""
|
57
|
+
end
|
58
|
+
|
59
|
+
if value.match(/[^a-zA-Z0-9._-]/)
|
60
|
+
logger.warn { "Value of application[#{name}] contained invalid characters and was discarded" }
|
61
|
+
return ""
|
62
|
+
end
|
63
|
+
|
64
|
+
value
|
65
|
+
end
|
66
|
+
|
67
|
+
#
|
68
|
+
# @param app [Hash]
|
69
|
+
# @param logger [Logger]
|
70
|
+
# @return [Hash]
|
71
|
+
#
|
72
|
+
def self.validate_application_info(app, logger)
|
73
|
+
{
|
74
|
+
id: validate_application_value(app[:id], :id, logger),
|
75
|
+
version: validate_application_value(app[:version], :version, logger),
|
76
|
+
}
|
77
|
+
end
|
17
78
|
end
|
18
79
|
end
|
19
80
|
end
|
@@ -3,6 +3,13 @@ require "ldclient-rb/integrations/util/store_wrapper"
|
|
3
3
|
|
4
4
|
module LaunchDarkly
|
5
5
|
module Integrations
|
6
|
+
#
|
7
|
+
# Integration with [Consul](https://www.consul.io/).
|
8
|
+
#
|
9
|
+
# Note that in order to use this integration, you must first install the gem `diplomat`.
|
10
|
+
#
|
11
|
+
# @since 5.5.0
|
12
|
+
#
|
6
13
|
module Consul
|
7
14
|
#
|
8
15
|
# Default value for the `prefix` option for {new_feature_store}.
|
@@ -29,7 +36,7 @@ module LaunchDarkly
|
|
29
36
|
# @option opts [Integer] :capacity (1000) maximum number of items in the cache
|
30
37
|
# @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object
|
31
38
|
#
|
32
|
-
def self.new_feature_store(opts
|
39
|
+
def self.new_feature_store(opts = {})
|
33
40
|
core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts)
|
34
41
|
return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
35
42
|
end
|
@@ -3,11 +3,19 @@ require "ldclient-rb/integrations/util/store_wrapper"
|
|
3
3
|
|
4
4
|
module LaunchDarkly
|
5
5
|
module Integrations
|
6
|
+
#
|
7
|
+
# Integration with [DynamoDB](https://aws.amazon.com/dynamodb/).
|
8
|
+
#
|
9
|
+
# Note that in order to use this integration, you must first install one of the AWS SDK gems: either
|
10
|
+
# `aws-sdk-dynamodb`, or the full `aws-sdk`.
|
11
|
+
#
|
12
|
+
# @since 5.5.0
|
13
|
+
#
|
6
14
|
module DynamoDB
|
7
15
|
#
|
8
16
|
# Creates a DynamoDB-backed persistent feature store. For more details about how and why you can
|
9
17
|
# use a persistent feature store, see the
|
10
|
-
# [SDK reference guide](https://docs.launchdarkly.com/
|
18
|
+
# [SDK reference guide](https://docs.launchdarkly.com/sdk/features/storing-data#ruby).
|
11
19
|
#
|
12
20
|
# To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or
|
13
21
|
# the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property
|
@@ -38,9 +46,46 @@ module LaunchDarkly
|
|
38
46
|
# @option opts [Integer] :capacity (1000) maximum number of items in the cache
|
39
47
|
# @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object
|
40
48
|
#
|
41
|
-
def self.new_feature_store(table_name, opts)
|
49
|
+
def self.new_feature_store(table_name, opts = {})
|
42
50
|
core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts)
|
43
|
-
|
51
|
+
LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
52
|
+
end
|
53
|
+
|
54
|
+
#
|
55
|
+
# Creates a DynamoDB-backed Big Segment store.
|
56
|
+
#
|
57
|
+
# Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
|
58
|
+
# documentation: https://docs.launchdarkly.com/home/users/big-segments
|
59
|
+
#
|
60
|
+
# To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or
|
61
|
+
# the full `aws-sdk`. Then, put the object returned by this method into the `store` property of your
|
62
|
+
# Big Segments configuration (see `Config`).
|
63
|
+
#
|
64
|
+
# @example Configuring Big Segments
|
65
|
+
# store = LaunchDarkly::Integrations::DynamoDB::new_big_segment_store("my-table-name")
|
66
|
+
# config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store)
|
67
|
+
# client = LaunchDarkly::LDClient.new(my_sdk_key, config)
|
68
|
+
#
|
69
|
+
# Note that the specified table must already exist in DynamoDB. It must have a partition key called
|
70
|
+
# "namespace", and a sort key called "key" (both strings). The SDK does not create the table
|
71
|
+
# automatically because it has no way of knowing what additional properties (such as permissions
|
72
|
+
# and throughput) you would want it to have.
|
73
|
+
#
|
74
|
+
# By default, the DynamoDB client will try to get your AWS credentials and region name from
|
75
|
+
# environment variables and/or local configuration files, as described in the AWS SDK documentation.
|
76
|
+
# You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an
|
77
|
+
# already-configured DynamoDB client in `existing_client`.
|
78
|
+
#
|
79
|
+
# @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`,
|
80
|
+
# except that there are no caching parameters)
|
81
|
+
# @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`)
|
82
|
+
# @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use
|
83
|
+
# @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly
|
84
|
+
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
85
|
+
# @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object
|
86
|
+
#
|
87
|
+
def self.new_big_segment_store(table_name, opts)
|
88
|
+
LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBBigSegmentStore.new(table_name, opts)
|
44
89
|
end
|
45
90
|
end
|
46
91
|
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
require 'ldclient-rb/impl/integrations/file_data_source'
|
2
|
+
|
3
|
+
module LaunchDarkly
|
4
|
+
module Integrations
|
5
|
+
#
|
6
|
+
# Provides a way to use local files as a source of feature flag state. This allows using a
|
7
|
+
# predetermined feature flag state without an actual LaunchDarkly connection.
|
8
|
+
#
|
9
|
+
# Reading flags from a file is only intended for pre-production environments. Production
|
10
|
+
# environments should always be configured to receive flag updates from LaunchDarkly.
|
11
|
+
#
|
12
|
+
# To use this component, call {FileData#data_source}, and store its return value in the
|
13
|
+
# {Config#data_source} property of your LaunchDarkly client configuration. In the options
|
14
|
+
# to `data_source`, set `paths` to the file path(s) of your data file(s):
|
15
|
+
#
|
16
|
+
# file_source = LaunchDarkly::Integrations::FileData.data_source(paths: [ myFilePath ])
|
17
|
+
# config = LaunchDarkly::Config.new(data_source: file_source)
|
18
|
+
#
|
19
|
+
# This will cause the client not to connect to LaunchDarkly to get feature flags. The
|
20
|
+
# client may still make network connections to send analytics events, unless you have disabled
|
21
|
+
# this with {Config#send_events} or {Config#offline?}.
|
22
|
+
#
|
23
|
+
# Flag data files can be either JSON or YAML. They contain an object with three possible
|
24
|
+
# properties:
|
25
|
+
#
|
26
|
+
# - `flags`: Feature flag definitions.
|
27
|
+
# - `flagValues`: Simplified feature flags that contain only a value.
|
28
|
+
# - `segments`: User segment definitions.
|
29
|
+
#
|
30
|
+
# The format of the data in `flags` and `segments` is defined by the LaunchDarkly application
|
31
|
+
# and is subject to change. Rather than trying to construct these objects yourself, it is simpler
|
32
|
+
# to request existing flags directly from the LaunchDarkly server in JSON format, and use this
|
33
|
+
# output as the starting point for your file. In Linux you would do this:
|
34
|
+
#
|
35
|
+
# ```
|
36
|
+
# curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all
|
37
|
+
# ```
|
38
|
+
#
|
39
|
+
# The output will look something like this (but with many more properties):
|
40
|
+
#
|
41
|
+
# {
|
42
|
+
# "flags": {
|
43
|
+
# "flag-key-1": {
|
44
|
+
# "key": "flag-key-1",
|
45
|
+
# "on": true,
|
46
|
+
# "variations": [ "a", "b" ]
|
47
|
+
# }
|
48
|
+
# },
|
49
|
+
# "segments": {
|
50
|
+
# "segment-key-1": {
|
51
|
+
# "key": "segment-key-1",
|
52
|
+
# "includes": [ "user-key-1" ]
|
53
|
+
# }
|
54
|
+
# }
|
55
|
+
# }
|
56
|
+
#
|
57
|
+
# Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported
|
58
|
+
# by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to
|
59
|
+
# set specific flag keys to specific values. For that, you can use a much simpler format:
|
60
|
+
#
|
61
|
+
# {
|
62
|
+
# "flagValues": {
|
63
|
+
# "my-string-flag-key": "value-1",
|
64
|
+
# "my-boolean-flag-key": true,
|
65
|
+
# "my-integer-flag-key": 3
|
66
|
+
# }
|
67
|
+
# }
|
68
|
+
#
|
69
|
+
# Or, in YAML:
|
70
|
+
#
|
71
|
+
# flagValues:
|
72
|
+
# my-string-flag-key: "value-1"
|
73
|
+
# my-boolean-flag-key: true
|
74
|
+
# my-integer-flag-key: 1
|
75
|
+
#
|
76
|
+
# It is also possible to specify both "flags" and "flagValues", if you want some flags
|
77
|
+
# to have simple values and others to have complex behavior. However, it is an error to use the
|
78
|
+
# same flag key or segment key more than once, either in a single file or across multiple files.
|
79
|
+
#
|
80
|
+
# If the data source encounters any error in any file-- malformed content, a missing file, or a
|
81
|
+
# duplicate key-- it will not load flags from any of the files.
|
82
|
+
#
|
83
|
+
module FileData
|
84
|
+
#
|
85
|
+
# Returns a factory for the file data source component.
|
86
|
+
#
|
87
|
+
# @param options [Hash] the configuration options
|
88
|
+
# @option options [Array] :paths The paths of the source files for loading flag data. These
|
89
|
+
# may be absolute paths or relative to the current working directory.
|
90
|
+
# @option options [Boolean] :auto_update True if the data source should watch for changes to
|
91
|
+
# the source file(s) and reload flags whenever there is a change. Auto-updating will only
|
92
|
+
# work if all of the files you specified have valid directory paths at startup time.
|
93
|
+
# Note that the default implementation of this feature is based on polling the filesystem,
|
94
|
+
# which may not perform well. If you install the 'listen' gem (not included by default, to
|
95
|
+
# avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be
|
96
|
+
# used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability.
|
97
|
+
# @option options [Float] :poll_interval The minimum interval, in seconds, between checks for
|
98
|
+
# file modifications - used only if auto_update is true, and if the native file-watching
|
99
|
+
# mechanism from 'listen' is not being used. The default value is 1 second.
|
100
|
+
# @return an object that can be stored in {Config#data_source}
|
101
|
+
#
|
102
|
+
def self.data_source(options={})
|
103
|
+
return lambda { |sdk_key, config|
|
104
|
+
Impl::Integrations::FileDataSourceImpl.new(config.feature_store, config.logger, options) }
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
@@ -2,6 +2,14 @@ require "ldclient-rb/redis_store" # eventually we will just refer to impl/integ
|
|
2
2
|
|
3
3
|
module LaunchDarkly
|
4
4
|
module Integrations
|
5
|
+
#
|
6
|
+
# Integration with [Redis](https://redis.io/).
|
7
|
+
#
|
8
|
+
# Note that in order to use this integration, you must first install the `redis` and `connection-pool`
|
9
|
+
# gems.
|
10
|
+
#
|
11
|
+
# @since 5.5.0
|
12
|
+
#
|
5
13
|
module Redis
|
6
14
|
#
|
7
15
|
# Default value for the `redis_url` option for {new_feature_store}. This points to an instance of
|
@@ -25,7 +33,7 @@ module LaunchDarkly
|
|
25
33
|
#
|
26
34
|
# Creates a Redis-backed persistent feature store. For more details about how and why you can
|
27
35
|
# use a persistent feature store, see the
|
28
|
-
# [SDK reference guide](https://docs.launchdarkly.com/
|
36
|
+
# [SDK reference guide](https://docs.launchdarkly.com/sdk/features/storing-data#rubys).
|
29
37
|
#
|
30
38
|
# To use this method, you must first have the `redis` and `connection-pool` gems installed. Then,
|
31
39
|
# put the object returned by this method into the `feature_store` property of your
|
@@ -50,9 +58,41 @@ module LaunchDarkly
|
|
50
58
|
# lifecycle to be independent of the SDK client
|
51
59
|
# @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object
|
52
60
|
#
|
53
|
-
def self.new_feature_store(opts)
|
61
|
+
def self.new_feature_store(opts = {})
|
54
62
|
return RedisFeatureStore.new(opts)
|
55
63
|
end
|
64
|
+
|
65
|
+
#
|
66
|
+
# Creates a Redis-backed Big Segment store.
|
67
|
+
#
|
68
|
+
# Big Segments are a specific type of user segments. For more information, read the LaunchDarkly
|
69
|
+
# documentation: https://docs.launchdarkly.com/home/users/big-segments
|
70
|
+
#
|
71
|
+
# To use this method, you must first have the `redis` and `connection-pool` gems installed. Then,
|
72
|
+
# put the object returned by this method into the `store` property of your Big Segments configuration
|
73
|
+
# (see `Config`).
|
74
|
+
#
|
75
|
+
# @example Configuring Big Segments
|
76
|
+
# store = LaunchDarkly::Integrations::Redis::new_big_segment_store(redis_url: "redis://my-server")
|
77
|
+
# config = LaunchDarkly::Config.new(big_segments: LaunchDarkly::BigSegmentsConfig.new(store: store)
|
78
|
+
# client = LaunchDarkly::LDClient.new(my_sdk_key, config)
|
79
|
+
#
|
80
|
+
# @param opts [Hash] the configuration options (these are all the same as for `new_feature_store`,
|
81
|
+
# except that there are no caching parameters)
|
82
|
+
# @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`)
|
83
|
+
# @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`)
|
84
|
+
# @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly
|
85
|
+
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
86
|
+
# @option opts [Integer] :max_connections size of the Redis connection pool
|
87
|
+
# @option opts [Object] :pool custom connection pool, if desired
|
88
|
+
# @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool;
|
89
|
+
# this is true by default, and should be set to false only if you are managing the pool yourself and want its
|
90
|
+
# lifecycle to be independent of the SDK client
|
91
|
+
# @return [LaunchDarkly::Interfaces::BigSegmentStore] a Big Segment store object
|
92
|
+
#
|
93
|
+
def self.new_big_segment_store(opts)
|
94
|
+
return LaunchDarkly::Impl::Integrations::Redis::RedisBigSegmentStore.new(opts)
|
95
|
+
end
|
56
96
|
end
|
57
97
|
end
|
58
98
|
end
|