launchdarkly-server-sdk 6.1.1 → 6.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -5
- data/lib/ldclient-rb/config.rb +118 -4
- data/lib/ldclient-rb/evaluation_detail.rb +104 -14
- data/lib/ldclient-rb/events.rb +201 -107
- data/lib/ldclient-rb/file_data_source.rb +9 -300
- data/lib/ldclient-rb/flags_state.rb +23 -12
- data/lib/ldclient-rb/impl/big_segments.rb +117 -0
- data/lib/ldclient-rb/impl/diagnostic_events.rb +1 -1
- data/lib/ldclient-rb/impl/evaluator.rb +116 -62
- data/lib/ldclient-rb/impl/evaluator_bucketing.rb +22 -9
- data/lib/ldclient-rb/impl/evaluator_helpers.rb +53 -0
- data/lib/ldclient-rb/impl/evaluator_operators.rb +1 -1
- data/lib/ldclient-rb/impl/event_summarizer.rb +63 -0
- data/lib/ldclient-rb/impl/event_types.rb +90 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +82 -18
- data/lib/ldclient-rb/impl/integrations/file_data_source.rb +212 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +84 -31
- data/lib/ldclient-rb/impl/integrations/test_data/test_data_source.rb +40 -0
- data/lib/ldclient-rb/impl/model/preprocessed_data.rb +177 -0
- data/lib/ldclient-rb/impl/model/serialization.rb +7 -37
- data/lib/ldclient-rb/impl/repeating_task.rb +47 -0
- data/lib/ldclient-rb/impl/util.rb +62 -1
- data/lib/ldclient-rb/integrations/consul.rb +8 -1
- data/lib/ldclient-rb/integrations/dynamodb.rb +48 -3
- data/lib/ldclient-rb/integrations/file_data.rb +108 -0
- data/lib/ldclient-rb/integrations/redis.rb +42 -2
- data/lib/ldclient-rb/integrations/test_data/flag_builder.rb +438 -0
- data/lib/ldclient-rb/integrations/test_data.rb +209 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +5 -0
- data/lib/ldclient-rb/integrations.rb +2 -51
- data/lib/ldclient-rb/interfaces.rb +152 -2
- data/lib/ldclient-rb/ldclient.rb +131 -33
- data/lib/ldclient-rb/polling.rb +22 -41
- data/lib/ldclient-rb/requestor.rb +3 -3
- data/lib/ldclient-rb/stream.rb +4 -3
- data/lib/ldclient-rb/util.rb +10 -1
- data/lib/ldclient-rb/version.rb +1 -1
- data/lib/ldclient-rb.rb +0 -1
- metadata +35 -132
- data/.circleci/config.yml +0 -40
- data/.github/ISSUE_TEMPLATE/bug_report.md +0 -37
- data/.github/ISSUE_TEMPLATE/config.yml +0 -5
- data/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
- data/.github/pull_request_template.md +0 -21
- data/.gitignore +0 -16
- data/.hound.yml +0 -2
- data/.ldrelease/build-docs.sh +0 -18
- data/.ldrelease/circleci/linux/execute.sh +0 -18
- data/.ldrelease/circleci/mac/execute.sh +0 -18
- data/.ldrelease/circleci/template/build.sh +0 -29
- data/.ldrelease/circleci/template/publish.sh +0 -23
- data/.ldrelease/circleci/template/set-gem-home.sh +0 -7
- data/.ldrelease/circleci/template/test.sh +0 -10
- data/.ldrelease/circleci/template/update-version.sh +0 -8
- data/.ldrelease/circleci/windows/execute.ps1 +0 -19
- data/.ldrelease/config.yml +0 -29
- data/.rspec +0 -2
- data/.rubocop.yml +0 -600
- data/.simplecov +0 -4
- data/CHANGELOG.md +0 -351
- data/CODEOWNERS +0 -1
- data/CONTRIBUTING.md +0 -37
- data/Gemfile +0 -3
- data/azure-pipelines.yml +0 -51
- data/docs/Makefile +0 -26
- data/docs/index.md +0 -9
- data/launchdarkly-server-sdk.gemspec +0 -45
- data/lib/ldclient-rb/event_summarizer.rb +0 -55
- data/lib/ldclient-rb/impl/event_factory.rb +0 -120
- data/spec/config_spec.rb +0 -63
- data/spec/diagnostic_events_spec.rb +0 -163
- data/spec/evaluation_detail_spec.rb +0 -135
- data/spec/event_sender_spec.rb +0 -197
- data/spec/event_summarizer_spec.rb +0 -63
- data/spec/events_spec.rb +0 -607
- data/spec/expiring_cache_spec.rb +0 -76
- data/spec/feature_store_spec_base.rb +0 -213
- data/spec/file_data_source_spec.rb +0 -283
- data/spec/fixtures/feature.json +0 -37
- data/spec/fixtures/feature1.json +0 -36
- data/spec/fixtures/user.json +0 -9
- data/spec/flags_state_spec.rb +0 -81
- data/spec/http_util.rb +0 -132
- data/spec/impl/evaluator_bucketing_spec.rb +0 -111
- data/spec/impl/evaluator_clause_spec.rb +0 -55
- data/spec/impl/evaluator_operators_spec.rb +0 -141
- data/spec/impl/evaluator_rule_spec.rb +0 -96
- data/spec/impl/evaluator_segment_spec.rb +0 -125
- data/spec/impl/evaluator_spec.rb +0 -305
- data/spec/impl/evaluator_spec_base.rb +0 -75
- data/spec/impl/model/serialization_spec.rb +0 -41
- data/spec/in_memory_feature_store_spec.rb +0 -12
- data/spec/integrations/consul_feature_store_spec.rb +0 -40
- data/spec/integrations/dynamodb_feature_store_spec.rb +0 -103
- data/spec/integrations/store_wrapper_spec.rb +0 -276
- data/spec/launchdarkly-server-sdk_spec.rb +0 -13
- data/spec/launchdarkly-server-sdk_spec_autoloadtest.rb +0 -9
- data/spec/ldclient_end_to_end_spec.rb +0 -157
- data/spec/ldclient_spec.rb +0 -643
- data/spec/newrelic_spec.rb +0 -5
- data/spec/polling_spec.rb +0 -120
- data/spec/redis_feature_store_spec.rb +0 -121
- data/spec/requestor_spec.rb +0 -196
- data/spec/segment_store_spec_base.rb +0 -95
- data/spec/simple_lru_cache_spec.rb +0 -24
- data/spec/spec_helper.rb +0 -9
- data/spec/store_spec.rb +0 -10
- data/spec/stream_spec.rb +0 -45
- data/spec/user_filter_spec.rb +0 -91
- data/spec/util_spec.rb +0 -17
- data/spec/version_spec.rb +0 -7
data/spec/expiring_cache_spec.rb
DELETED
@@ -1,76 +0,0 @@
|
|
1
|
-
require 'timecop'
|
2
|
-
|
3
|
-
describe LaunchDarkly::ExpiringCache do
|
4
|
-
subject { LaunchDarkly::ExpiringCache }
|
5
|
-
|
6
|
-
before(:each) do
|
7
|
-
Timecop.freeze(Time.now)
|
8
|
-
end
|
9
|
-
|
10
|
-
after(:each) do
|
11
|
-
Timecop.return
|
12
|
-
end
|
13
|
-
|
14
|
-
it "evicts entries based on TTL" do
|
15
|
-
c = subject.new(3, 300)
|
16
|
-
c[:a] = 1
|
17
|
-
c[:b] = 2
|
18
|
-
|
19
|
-
Timecop.freeze(Time.now + 330)
|
20
|
-
|
21
|
-
c[:c] = 3
|
22
|
-
|
23
|
-
expect(c[:a]).to be nil
|
24
|
-
expect(c[:b]).to be nil
|
25
|
-
expect(c[:c]).to eq 3
|
26
|
-
end
|
27
|
-
|
28
|
-
it "evicts entries based on max size" do
|
29
|
-
c = subject.new(2, 300)
|
30
|
-
c[:a] = 1
|
31
|
-
c[:b] = 2
|
32
|
-
c[:c] = 3
|
33
|
-
|
34
|
-
expect(c[:a]).to be nil
|
35
|
-
expect(c[:b]).to eq 2
|
36
|
-
expect(c[:c]).to eq 3
|
37
|
-
end
|
38
|
-
|
39
|
-
it "does not reset LRU on get" do
|
40
|
-
c = subject.new(2, 300)
|
41
|
-
c[:a] = 1
|
42
|
-
c[:b] = 2
|
43
|
-
c[:a]
|
44
|
-
c[:c] = 3
|
45
|
-
|
46
|
-
expect(c[:a]).to be nil
|
47
|
-
expect(c[:b]).to eq 2
|
48
|
-
expect(c[:c]).to eq 3
|
49
|
-
end
|
50
|
-
|
51
|
-
it "resets LRU on put" do
|
52
|
-
c = subject.new(2, 300)
|
53
|
-
c[:a] = 1
|
54
|
-
c[:b] = 2
|
55
|
-
c[:a] = 1
|
56
|
-
c[:c] = 3
|
57
|
-
|
58
|
-
expect(c[:a]).to eq 1
|
59
|
-
expect(c[:b]).to be nil
|
60
|
-
expect(c[:c]).to eq 3
|
61
|
-
end
|
62
|
-
|
63
|
-
it "resets TTL on put" do
|
64
|
-
c = subject.new(3, 300)
|
65
|
-
c[:a] = 1
|
66
|
-
c[:b] = 2
|
67
|
-
|
68
|
-
Timecop.freeze(Time.now + 330)
|
69
|
-
c[:a] = 1
|
70
|
-
c[:c] = 3
|
71
|
-
|
72
|
-
expect(c[:a]).to eq 1
|
73
|
-
expect(c[:b]).to be nil
|
74
|
-
expect(c[:c]).to eq 3
|
75
|
-
end
|
76
|
-
end
|
@@ -1,213 +0,0 @@
|
|
1
|
-
require "spec_helper"
|
2
|
-
|
3
|
-
shared_examples "feature_store" do |create_store_method, clear_data_method|
|
4
|
-
|
5
|
-
# Rather than testing with feature flag or segment data, we'll use this fake data kind
|
6
|
-
# to make it clear that feature stores need to be able to handle arbitrary data.
|
7
|
-
let(:things_kind) { { namespace: "things" } }
|
8
|
-
|
9
|
-
let(:key1) { "thing1" }
|
10
|
-
let(:thing1) {
|
11
|
-
{
|
12
|
-
key: key1,
|
13
|
-
name: "Thing 1",
|
14
|
-
version: 11,
|
15
|
-
deleted: false
|
16
|
-
}
|
17
|
-
}
|
18
|
-
let(:unused_key) { "no" }
|
19
|
-
|
20
|
-
let(:create_store) { create_store_method } # just to avoid a scope issue
|
21
|
-
let(:clear_data) { clear_data_method }
|
22
|
-
|
23
|
-
def with_store(opts = {})
|
24
|
-
s = create_store.call(opts)
|
25
|
-
begin
|
26
|
-
yield s
|
27
|
-
ensure
|
28
|
-
s.stop
|
29
|
-
end
|
30
|
-
end
|
31
|
-
|
32
|
-
def with_inited_store(things)
|
33
|
-
things_hash = {}
|
34
|
-
things.each { |thing| things_hash[thing[:key].to_sym] = thing }
|
35
|
-
|
36
|
-
with_store do |s|
|
37
|
-
s.init({ things_kind => things_hash })
|
38
|
-
yield s
|
39
|
-
end
|
40
|
-
end
|
41
|
-
|
42
|
-
def new_version_plus(f, deltaVersion, attrs = {})
|
43
|
-
f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs)
|
44
|
-
end
|
45
|
-
|
46
|
-
before(:each) do
|
47
|
-
clear_data.call if !clear_data.nil?
|
48
|
-
end
|
49
|
-
|
50
|
-
# This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store
|
51
|
-
# that operates on a database that can be shared with other store instances (as opposed to the in-memory store,
|
52
|
-
# which has its own private storage).
|
53
|
-
if !clear_data_method.nil?
|
54
|
-
it "is not initialized by default" do
|
55
|
-
with_store do |store|
|
56
|
-
expect(store.initialized?).to eq false
|
57
|
-
end
|
58
|
-
end
|
59
|
-
|
60
|
-
it "can detect if another instance has initialized the store" do
|
61
|
-
with_store do |store1|
|
62
|
-
store1.init({})
|
63
|
-
with_store do |store2|
|
64
|
-
expect(store2.initialized?).to eq true
|
65
|
-
end
|
66
|
-
end
|
67
|
-
end
|
68
|
-
|
69
|
-
it "can read data written by another instance" do
|
70
|
-
with_store do |store1|
|
71
|
-
store1.init({ things_kind => { key1.to_sym => thing1 } })
|
72
|
-
with_store do |store2|
|
73
|
-
expect(store2.get(things_kind, key1)).to eq thing1
|
74
|
-
end
|
75
|
-
end
|
76
|
-
end
|
77
|
-
|
78
|
-
it "is independent from other stores with different prefixes" do
|
79
|
-
with_store({ prefix: "a" }) do |store_a|
|
80
|
-
store_a.init({ things_kind => { key1.to_sym => thing1 } })
|
81
|
-
with_store({ prefix: "b" }) do |store_b|
|
82
|
-
store_b.init({ things_kind => {} })
|
83
|
-
end
|
84
|
-
with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data
|
85
|
-
expect(store_b1.get(things_kind, key1)).to be_nil
|
86
|
-
expect(store_a.get(things_kind, key1)).to eq thing1
|
87
|
-
end
|
88
|
-
end
|
89
|
-
end
|
90
|
-
end
|
91
|
-
|
92
|
-
it "is initialized after calling init" do
|
93
|
-
with_inited_store([]) do |store|
|
94
|
-
expect(store.initialized?).to eq true
|
95
|
-
end
|
96
|
-
end
|
97
|
-
|
98
|
-
it "can get existing item with symbol key" do
|
99
|
-
with_inited_store([ thing1 ]) do |store|
|
100
|
-
expect(store.get(things_kind, key1.to_sym)).to eq thing1
|
101
|
-
end
|
102
|
-
end
|
103
|
-
|
104
|
-
it "can get existing item with string key" do
|
105
|
-
with_inited_store([ thing1 ]) do |store|
|
106
|
-
expect(store.get(things_kind, key1.to_s)).to eq thing1
|
107
|
-
end
|
108
|
-
end
|
109
|
-
|
110
|
-
it "gets nil for nonexisting item" do
|
111
|
-
with_inited_store([ thing1 ]) do |store|
|
112
|
-
expect(store.get(things_kind, unused_key)).to be_nil
|
113
|
-
end
|
114
|
-
end
|
115
|
-
|
116
|
-
it "returns nil for deleted item" do
|
117
|
-
deleted_thing = thing1.clone.merge({ deleted: true })
|
118
|
-
with_inited_store([ deleted_thing ]) do |store|
|
119
|
-
expect(store.get(things_kind, key1)).to be_nil
|
120
|
-
end
|
121
|
-
end
|
122
|
-
|
123
|
-
it "can get all items" do
|
124
|
-
key2 = "thing2"
|
125
|
-
thing2 = {
|
126
|
-
key: key2,
|
127
|
-
name: "Thing 2",
|
128
|
-
version: 22,
|
129
|
-
deleted: false
|
130
|
-
}
|
131
|
-
with_inited_store([ thing1, thing2 ]) do |store|
|
132
|
-
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 })
|
133
|
-
end
|
134
|
-
end
|
135
|
-
|
136
|
-
it "filters out deleted items when getting all" do
|
137
|
-
key2 = "thing2"
|
138
|
-
thing2 = {
|
139
|
-
key: key2,
|
140
|
-
name: "Thing 2",
|
141
|
-
version: 22,
|
142
|
-
deleted: true
|
143
|
-
}
|
144
|
-
with_inited_store([ thing1, thing2 ]) do |store|
|
145
|
-
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 })
|
146
|
-
end
|
147
|
-
end
|
148
|
-
|
149
|
-
it "can add new item" do
|
150
|
-
with_inited_store([]) do |store|
|
151
|
-
store.upsert(things_kind, thing1)
|
152
|
-
expect(store.get(things_kind, key1)).to eq thing1
|
153
|
-
end
|
154
|
-
end
|
155
|
-
|
156
|
-
it "can update item with newer version" do
|
157
|
-
with_inited_store([ thing1 ]) do |store|
|
158
|
-
thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' })
|
159
|
-
store.upsert(things_kind, thing1_mod)
|
160
|
-
expect(store.get(things_kind, key1)).to eq thing1_mod
|
161
|
-
end
|
162
|
-
end
|
163
|
-
|
164
|
-
it "cannot update item with same version" do
|
165
|
-
with_inited_store([ thing1 ]) do |store|
|
166
|
-
thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' })
|
167
|
-
store.upsert(things_kind, thing1_mod)
|
168
|
-
expect(store.get(things_kind, key1)).to eq thing1
|
169
|
-
end
|
170
|
-
end
|
171
|
-
|
172
|
-
it "cannot update feature with older version" do
|
173
|
-
with_inited_store([ thing1 ]) do |store|
|
174
|
-
thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' })
|
175
|
-
store.upsert(things_kind, thing1_mod)
|
176
|
-
expect(store.get(things_kind, key1)).to eq thing1
|
177
|
-
end
|
178
|
-
end
|
179
|
-
|
180
|
-
it "can delete item with newer version" do
|
181
|
-
with_inited_store([ thing1 ]) do |store|
|
182
|
-
store.delete(things_kind, key1, thing1[:version] + 1)
|
183
|
-
expect(store.get(things_kind, key1)).to be_nil
|
184
|
-
end
|
185
|
-
end
|
186
|
-
|
187
|
-
it "cannot delete item with same version" do
|
188
|
-
with_inited_store([ thing1 ]) do |store|
|
189
|
-
store.delete(things_kind, key1, thing1[:version])
|
190
|
-
expect(store.get(things_kind, key1)).to eq thing1
|
191
|
-
end
|
192
|
-
end
|
193
|
-
|
194
|
-
it "cannot delete item with older version" do
|
195
|
-
with_inited_store([ thing1 ]) do |store|
|
196
|
-
store.delete(things_kind, key1, thing1[:version] - 1)
|
197
|
-
expect(store.get(things_kind, key1)).to eq thing1
|
198
|
-
end
|
199
|
-
end
|
200
|
-
|
201
|
-
it "stores Unicode data correctly" do
|
202
|
-
flag = {
|
203
|
-
key: "my-fancy-flag",
|
204
|
-
name: "Tęst Feåtūre Flæg😺",
|
205
|
-
version: 1,
|
206
|
-
deleted: false
|
207
|
-
}
|
208
|
-
with_inited_store([]) do |store|
|
209
|
-
store.upsert(LaunchDarkly::FEATURES, flag)
|
210
|
-
expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag
|
211
|
-
end
|
212
|
-
end
|
213
|
-
end
|
@@ -1,283 +0,0 @@
|
|
1
|
-
require "spec_helper"
|
2
|
-
require "tempfile"
|
3
|
-
|
4
|
-
# see does not allow Ruby objects in YAML" for the purpose of the following two things
|
5
|
-
$created_bad_class = false
|
6
|
-
class BadClassWeShouldNotInstantiate < Hash
|
7
|
-
def []=(key, value)
|
8
|
-
$created_bad_class = true
|
9
|
-
end
|
10
|
-
end
|
11
|
-
|
12
|
-
describe LaunchDarkly::FileDataSource do
|
13
|
-
let(:full_flag_1_key) { "flag1" }
|
14
|
-
let(:full_flag_1_value) { "on" }
|
15
|
-
let(:flag_value_1_key) { "flag2" }
|
16
|
-
let(:flag_value_1) { "value2" }
|
17
|
-
let(:all_flag_keys) { [ full_flag_1_key.to_sym, flag_value_1_key.to_sym ] }
|
18
|
-
let(:full_segment_1_key) { "seg1" }
|
19
|
-
let(:all_segment_keys) { [ full_segment_1_key.to_sym ] }
|
20
|
-
|
21
|
-
let(:flag_only_json) { <<-EOF
|
22
|
-
{
|
23
|
-
"flags": {
|
24
|
-
"flag1": {
|
25
|
-
"key": "flag1",
|
26
|
-
"on": true,
|
27
|
-
"fallthrough": {
|
28
|
-
"variation": 2
|
29
|
-
},
|
30
|
-
"variations": [ "fall", "off", "on" ]
|
31
|
-
}
|
32
|
-
}
|
33
|
-
}
|
34
|
-
EOF
|
35
|
-
}
|
36
|
-
|
37
|
-
let(:segment_only_json) { <<-EOF
|
38
|
-
{
|
39
|
-
"segments": {
|
40
|
-
"seg1": {
|
41
|
-
"key": "seg1",
|
42
|
-
"include": ["user1"]
|
43
|
-
}
|
44
|
-
}
|
45
|
-
}
|
46
|
-
EOF
|
47
|
-
}
|
48
|
-
|
49
|
-
let(:all_properties_json) { <<-EOF
|
50
|
-
{
|
51
|
-
"flags": {
|
52
|
-
"flag1": {
|
53
|
-
"key": "flag1",
|
54
|
-
"on": true,
|
55
|
-
"fallthrough": {
|
56
|
-
"variation": 2
|
57
|
-
},
|
58
|
-
"variations": [ "fall", "off", "on" ]
|
59
|
-
}
|
60
|
-
},
|
61
|
-
"flagValues": {
|
62
|
-
"flag2": "value2"
|
63
|
-
},
|
64
|
-
"segments": {
|
65
|
-
"seg1": {
|
66
|
-
"key": "seg1",
|
67
|
-
"include": ["user1"]
|
68
|
-
}
|
69
|
-
}
|
70
|
-
}
|
71
|
-
EOF
|
72
|
-
}
|
73
|
-
|
74
|
-
let(:all_properties_yaml) { <<-EOF
|
75
|
-
---
|
76
|
-
flags:
|
77
|
-
flag1:
|
78
|
-
key: flag1
|
79
|
-
"on": true
|
80
|
-
flagValues:
|
81
|
-
flag2: value2
|
82
|
-
segments:
|
83
|
-
seg1:
|
84
|
-
key: seg1
|
85
|
-
include: ["user1"]
|
86
|
-
EOF
|
87
|
-
}
|
88
|
-
|
89
|
-
let(:unsafe_yaml) { <<-EOF
|
90
|
-
--- !ruby/hash:BadClassWeShouldNotInstantiate
|
91
|
-
foo: bar
|
92
|
-
EOF
|
93
|
-
}
|
94
|
-
|
95
|
-
let(:bad_file_path) { "no-such-file" }
|
96
|
-
|
97
|
-
before do
|
98
|
-
@config = LaunchDarkly::Config.new(logger: $null_log)
|
99
|
-
@store = @config.feature_store
|
100
|
-
@tmp_dir = Dir.mktmpdir
|
101
|
-
end
|
102
|
-
|
103
|
-
after do
|
104
|
-
FileUtils.rm_rf(@tmp_dir)
|
105
|
-
end
|
106
|
-
|
107
|
-
def make_temp_file(content)
|
108
|
-
# Note that we don't create our files in the default temp file directory, but rather in an empty directory
|
109
|
-
# that we made. That's because (depending on the platform) the temp file directory may contain huge numbers
|
110
|
-
# of files, which can make the file watcher perform poorly enough to break the tests.
|
111
|
-
file = Tempfile.new('flags', @tmp_dir)
|
112
|
-
IO.write(file, content)
|
113
|
-
file
|
114
|
-
end
|
115
|
-
|
116
|
-
def with_data_source(options)
|
117
|
-
factory = LaunchDarkly::FileDataSource.factory(options)
|
118
|
-
ds = factory.call('', @config)
|
119
|
-
begin
|
120
|
-
yield ds
|
121
|
-
ensure
|
122
|
-
ds.stop
|
123
|
-
end
|
124
|
-
end
|
125
|
-
|
126
|
-
it "doesn't load flags prior to start" do
|
127
|
-
file = make_temp_file('{"flagValues":{"key":"value"}}')
|
128
|
-
with_data_source({ paths: [ file.path ] }) do |ds|
|
129
|
-
expect(@store.initialized?).to eq(false)
|
130
|
-
expect(@store.all(LaunchDarkly::FEATURES)).to eq({})
|
131
|
-
expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({})
|
132
|
-
end
|
133
|
-
end
|
134
|
-
|
135
|
-
it "loads flags on start - from JSON" do
|
136
|
-
file = make_temp_file(all_properties_json)
|
137
|
-
with_data_source({ paths: [ file.path ] }) do |ds|
|
138
|
-
ds.start
|
139
|
-
expect(@store.initialized?).to eq(true)
|
140
|
-
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys)
|
141
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys)
|
142
|
-
end
|
143
|
-
end
|
144
|
-
|
145
|
-
it "loads flags on start - from YAML" do
|
146
|
-
file = make_temp_file(all_properties_yaml)
|
147
|
-
with_data_source({ paths: [ file.path ] }) do |ds|
|
148
|
-
ds.start
|
149
|
-
expect(@store.initialized?).to eq(true)
|
150
|
-
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys)
|
151
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys)
|
152
|
-
end
|
153
|
-
end
|
154
|
-
|
155
|
-
it "does not allow Ruby objects in YAML" do
|
156
|
-
# This tests for the vulnerability described here: https://trailofbits.github.io/rubysec/yaml/index.html
|
157
|
-
# The file we're loading contains a hash with a custom Ruby class, BadClassWeShouldNotInstantiate (see top
|
158
|
-
# of file). If we're not loading in safe mode, it will create an instance of that class and call its []=
|
159
|
-
# method, which we've defined to set $created_bad_class to true. In safe mode, it refuses to parse this file.
|
160
|
-
file = make_temp_file(unsafe_yaml)
|
161
|
-
with_data_source({ paths: [file.path ] }) do |ds|
|
162
|
-
event = ds.start
|
163
|
-
expect(event.set?).to eq(true)
|
164
|
-
expect(ds.initialized?).to eq(false)
|
165
|
-
expect($created_bad_class).to eq(false)
|
166
|
-
end
|
167
|
-
end
|
168
|
-
|
169
|
-
it "sets start event and initialized on successful load" do
|
170
|
-
file = make_temp_file(all_properties_json)
|
171
|
-
with_data_source({ paths: [ file.path ] }) do |ds|
|
172
|
-
event = ds.start
|
173
|
-
expect(event.set?).to eq(true)
|
174
|
-
expect(ds.initialized?).to eq(true)
|
175
|
-
end
|
176
|
-
end
|
177
|
-
|
178
|
-
it "sets start event and does not set initialized on unsuccessful load" do
|
179
|
-
with_data_source({ paths: [ bad_file_path ] }) do |ds|
|
180
|
-
event = ds.start
|
181
|
-
expect(event.set?).to eq(true)
|
182
|
-
expect(ds.initialized?).to eq(false)
|
183
|
-
end
|
184
|
-
end
|
185
|
-
|
186
|
-
it "can load multiple files" do
|
187
|
-
file1 = make_temp_file(flag_only_json)
|
188
|
-
file2 = make_temp_file(segment_only_json)
|
189
|
-
with_data_source({ paths: [ file1.path, file2.path ] }) do |ds|
|
190
|
-
ds.start
|
191
|
-
expect(@store.initialized?).to eq(true)
|
192
|
-
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ])
|
193
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ])
|
194
|
-
end
|
195
|
-
end
|
196
|
-
|
197
|
-
it "does not allow duplicate keys" do
|
198
|
-
file1 = make_temp_file(flag_only_json)
|
199
|
-
file2 = make_temp_file(flag_only_json)
|
200
|
-
with_data_source({ paths: [ file1.path, file2.path ] }) do |ds|
|
201
|
-
ds.start
|
202
|
-
expect(@store.initialized?).to eq(false)
|
203
|
-
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([])
|
204
|
-
end
|
205
|
-
end
|
206
|
-
|
207
|
-
it "does not reload modified file if auto-update is off" do
|
208
|
-
file = make_temp_file(flag_only_json)
|
209
|
-
|
210
|
-
with_data_source({ paths: [ file.path ] }) do |ds|
|
211
|
-
event = ds.start
|
212
|
-
expect(event.set?).to eq(true)
|
213
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
214
|
-
|
215
|
-
IO.write(file, all_properties_json)
|
216
|
-
sleep(0.5)
|
217
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
218
|
-
end
|
219
|
-
end
|
220
|
-
|
221
|
-
def test_auto_reload(options)
|
222
|
-
file = make_temp_file(flag_only_json)
|
223
|
-
options[:paths] = [ file.path ]
|
224
|
-
|
225
|
-
with_data_source(options) do |ds|
|
226
|
-
event = ds.start
|
227
|
-
expect(event.set?).to eq(true)
|
228
|
-
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
229
|
-
|
230
|
-
sleep(1)
|
231
|
-
IO.write(file, all_properties_json)
|
232
|
-
|
233
|
-
max_time = 10
|
234
|
-
ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys }
|
235
|
-
expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload"
|
236
|
-
end
|
237
|
-
end
|
238
|
-
|
239
|
-
it "reloads modified file if auto-update is on" do
|
240
|
-
test_auto_reload({ auto_update: true })
|
241
|
-
end
|
242
|
-
|
243
|
-
it "reloads modified file in polling mode" do
|
244
|
-
test_auto_reload({ auto_update: true, force_polling: true, poll_interval: 0.1 })
|
245
|
-
end
|
246
|
-
|
247
|
-
it "evaluates simplified flag with client as expected" do
|
248
|
-
file = make_temp_file(all_properties_json)
|
249
|
-
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
250
|
-
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
251
|
-
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
252
|
-
|
253
|
-
begin
|
254
|
-
value = client.variation(flag_value_1_key, { key: 'user' }, '')
|
255
|
-
expect(value).to eq(flag_value_1)
|
256
|
-
ensure
|
257
|
-
client.close
|
258
|
-
end
|
259
|
-
end
|
260
|
-
|
261
|
-
it "evaluates full flag with client as expected" do
|
262
|
-
file = make_temp_file(all_properties_json)
|
263
|
-
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
264
|
-
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
265
|
-
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
266
|
-
|
267
|
-
begin
|
268
|
-
value = client.variation(full_flag_1_key, { key: 'user' }, '')
|
269
|
-
expect(value).to eq(full_flag_1_value)
|
270
|
-
ensure
|
271
|
-
client.close
|
272
|
-
end
|
273
|
-
end
|
274
|
-
|
275
|
-
def wait_for_condition(max_time)
|
276
|
-
deadline = Time.now + max_time
|
277
|
-
while Time.now < deadline
|
278
|
-
return true if yield
|
279
|
-
sleep(0.1)
|
280
|
-
end
|
281
|
-
false
|
282
|
-
end
|
283
|
-
end
|
data/spec/fixtures/feature.json
DELETED
@@ -1,37 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"key":"test-feature-flag",
|
3
|
-
"version":11,
|
4
|
-
"on":true,
|
5
|
-
"prerequisites":[
|
6
|
-
|
7
|
-
],
|
8
|
-
"salt":"718ea30a918a4eba8734b57ab1a93227",
|
9
|
-
"sel":"fe1244e5378c4f99976c9634e33667c6",
|
10
|
-
"targets":[
|
11
|
-
{
|
12
|
-
"values":[
|
13
|
-
"alice"
|
14
|
-
],
|
15
|
-
"variation":0
|
16
|
-
},
|
17
|
-
{
|
18
|
-
"values":[
|
19
|
-
"bob"
|
20
|
-
],
|
21
|
-
"variation":1
|
22
|
-
}
|
23
|
-
],
|
24
|
-
"rules":[
|
25
|
-
|
26
|
-
],
|
27
|
-
"fallthrough":{
|
28
|
-
"variation":0
|
29
|
-
},
|
30
|
-
"offVariation":1,
|
31
|
-
"variations":[
|
32
|
-
true,
|
33
|
-
false
|
34
|
-
],
|
35
|
-
"trackEvents": true,
|
36
|
-
"deleted":false
|
37
|
-
}
|
data/spec/fixtures/feature1.json
DELETED
@@ -1,36 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"key":"test-feature-flag1",
|
3
|
-
"version":5,
|
4
|
-
"on":false,
|
5
|
-
"prerequisites":[
|
6
|
-
|
7
|
-
],
|
8
|
-
"salt":"718ea30a918a4eba8734b57ab1a93227",
|
9
|
-
"sel":"fe1244e5378c4f99976c9634e33667c6",
|
10
|
-
"targets":[
|
11
|
-
{
|
12
|
-
"values":[
|
13
|
-
"alice"
|
14
|
-
],
|
15
|
-
"variation":0
|
16
|
-
},
|
17
|
-
{
|
18
|
-
"values":[
|
19
|
-
"bob"
|
20
|
-
],
|
21
|
-
"variation":1
|
22
|
-
}
|
23
|
-
],
|
24
|
-
"rules":[
|
25
|
-
|
26
|
-
],
|
27
|
-
"fallthrough":{
|
28
|
-
"variation":0
|
29
|
-
},
|
30
|
-
"offVariation":1,
|
31
|
-
"variations":[
|
32
|
-
true,
|
33
|
-
false
|
34
|
-
],
|
35
|
-
"deleted":false
|
36
|
-
}
|