launchdarkly-server-sdk 5.5.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +134 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +15 -0
- data/.hound.yml +2 -0
- data/.rspec +2 -0
- data/.rubocop.yml +600 -0
- data/.simplecov +4 -0
- data/.yardopts +9 -0
- data/CHANGELOG.md +261 -0
- data/CODEOWNERS +1 -0
- data/CONTRIBUTING.md +37 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +102 -0
- data/LICENSE.txt +13 -0
- data/README.md +56 -0
- data/Rakefile +5 -0
- data/azure-pipelines.yml +51 -0
- data/ext/mkrf_conf.rb +11 -0
- data/launchdarkly-server-sdk.gemspec +40 -0
- data/lib/ldclient-rb.rb +29 -0
- data/lib/ldclient-rb/cache_store.rb +45 -0
- data/lib/ldclient-rb/config.rb +411 -0
- data/lib/ldclient-rb/evaluation.rb +455 -0
- data/lib/ldclient-rb/event_summarizer.rb +55 -0
- data/lib/ldclient-rb/events.rb +468 -0
- data/lib/ldclient-rb/expiring_cache.rb +77 -0
- data/lib/ldclient-rb/file_data_source.rb +312 -0
- data/lib/ldclient-rb/flags_state.rb +76 -0
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/in_memory_store.rb +100 -0
- data/lib/ldclient-rb/integrations.rb +55 -0
- data/lib/ldclient-rb/integrations/consul.rb +38 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
- data/lib/ldclient-rb/integrations/redis.rb +55 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
- data/lib/ldclient-rb/interfaces.rb +153 -0
- data/lib/ldclient-rb/ldclient.rb +424 -0
- data/lib/ldclient-rb/memoized_value.rb +32 -0
- data/lib/ldclient-rb/newrelic.rb +17 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
- data/lib/ldclient-rb/polling.rb +78 -0
- data/lib/ldclient-rb/redis_store.rb +87 -0
- data/lib/ldclient-rb/requestor.rb +101 -0
- data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
- data/lib/ldclient-rb/stream.rb +141 -0
- data/lib/ldclient-rb/user_filter.rb +51 -0
- data/lib/ldclient-rb/util.rb +50 -0
- data/lib/ldclient-rb/version.rb +3 -0
- data/scripts/gendocs.sh +11 -0
- data/scripts/release.sh +27 -0
- data/spec/config_spec.rb +63 -0
- data/spec/evaluation_spec.rb +739 -0
- data/spec/event_summarizer_spec.rb +63 -0
- data/spec/events_spec.rb +642 -0
- data/spec/expiring_cache_spec.rb +76 -0
- data/spec/feature_store_spec_base.rb +213 -0
- data/spec/file_data_source_spec.rb +255 -0
- data/spec/fixtures/feature.json +37 -0
- data/spec/fixtures/feature1.json +36 -0
- data/spec/fixtures/user.json +9 -0
- data/spec/flags_state_spec.rb +81 -0
- data/spec/http_util.rb +109 -0
- data/spec/in_memory_feature_store_spec.rb +12 -0
- data/spec/integrations/consul_feature_store_spec.rb +42 -0
- data/spec/integrations/dynamodb_feature_store_spec.rb +105 -0
- data/spec/integrations/store_wrapper_spec.rb +276 -0
- data/spec/ldclient_spec.rb +471 -0
- data/spec/newrelic_spec.rb +5 -0
- data/spec/polling_spec.rb +120 -0
- data/spec/redis_feature_store_spec.rb +95 -0
- data/spec/requestor_spec.rb +214 -0
- data/spec/segment_store_spec_base.rb +95 -0
- data/spec/simple_lru_cache_spec.rb +24 -0
- data/spec/spec_helper.rb +9 -0
- data/spec/store_spec.rb +10 -0
- data/spec/stream_spec.rb +60 -0
- data/spec/user_filter_spec.rb +91 -0
- data/spec/util_spec.rb +17 -0
- data/spec/version_spec.rb +7 -0
- metadata +375 -0
@@ -0,0 +1,76 @@
|
|
1
|
+
require 'timecop'
|
2
|
+
|
3
|
+
describe LaunchDarkly::ExpiringCache do
|
4
|
+
subject { LaunchDarkly::ExpiringCache }
|
5
|
+
|
6
|
+
before(:each) do
|
7
|
+
Timecop.freeze(Time.now)
|
8
|
+
end
|
9
|
+
|
10
|
+
after(:each) do
|
11
|
+
Timecop.return
|
12
|
+
end
|
13
|
+
|
14
|
+
it "evicts entries based on TTL" do
|
15
|
+
c = subject.new(3, 300)
|
16
|
+
c[:a] = 1
|
17
|
+
c[:b] = 2
|
18
|
+
|
19
|
+
Timecop.freeze(Time.now + 330)
|
20
|
+
|
21
|
+
c[:c] = 3
|
22
|
+
|
23
|
+
expect(c[:a]).to be nil
|
24
|
+
expect(c[:b]).to be nil
|
25
|
+
expect(c[:c]).to eq 3
|
26
|
+
end
|
27
|
+
|
28
|
+
it "evicts entries based on max size" do
|
29
|
+
c = subject.new(2, 300)
|
30
|
+
c[:a] = 1
|
31
|
+
c[:b] = 2
|
32
|
+
c[:c] = 3
|
33
|
+
|
34
|
+
expect(c[:a]).to be nil
|
35
|
+
expect(c[:b]).to eq 2
|
36
|
+
expect(c[:c]).to eq 3
|
37
|
+
end
|
38
|
+
|
39
|
+
it "does not reset LRU on get" do
|
40
|
+
c = subject.new(2, 300)
|
41
|
+
c[:a] = 1
|
42
|
+
c[:b] = 2
|
43
|
+
c[:a]
|
44
|
+
c[:c] = 3
|
45
|
+
|
46
|
+
expect(c[:a]).to be nil
|
47
|
+
expect(c[:b]).to eq 2
|
48
|
+
expect(c[:c]).to eq 3
|
49
|
+
end
|
50
|
+
|
51
|
+
it "resets LRU on put" do
|
52
|
+
c = subject.new(2, 300)
|
53
|
+
c[:a] = 1
|
54
|
+
c[:b] = 2
|
55
|
+
c[:a] = 1
|
56
|
+
c[:c] = 3
|
57
|
+
|
58
|
+
expect(c[:a]).to eq 1
|
59
|
+
expect(c[:b]).to be nil
|
60
|
+
expect(c[:c]).to eq 3
|
61
|
+
end
|
62
|
+
|
63
|
+
it "resets TTL on put" do
|
64
|
+
c = subject.new(3, 300)
|
65
|
+
c[:a] = 1
|
66
|
+
c[:b] = 2
|
67
|
+
|
68
|
+
Timecop.freeze(Time.now + 330)
|
69
|
+
c[:a] = 1
|
70
|
+
c[:c] = 3
|
71
|
+
|
72
|
+
expect(c[:a]).to eq 1
|
73
|
+
expect(c[:b]).to be nil
|
74
|
+
expect(c[:c]).to eq 3
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,213 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
shared_examples "feature_store" do |create_store_method, clear_data_method|
|
4
|
+
|
5
|
+
# Rather than testing with feature flag or segment data, we'll use this fake data kind
|
6
|
+
# to make it clear that feature stores need to be able to handle arbitrary data.
|
7
|
+
let(:things_kind) { { namespace: "things" } }
|
8
|
+
|
9
|
+
let(:key1) { "thing1" }
|
10
|
+
let(:thing1) {
|
11
|
+
{
|
12
|
+
key: key1,
|
13
|
+
name: "Thing 1",
|
14
|
+
version: 11,
|
15
|
+
deleted: false
|
16
|
+
}
|
17
|
+
}
|
18
|
+
let(:unused_key) { "no" }
|
19
|
+
|
20
|
+
let(:create_store) { create_store_method } # just to avoid a scope issue
|
21
|
+
let(:clear_data) { clear_data_method }
|
22
|
+
|
23
|
+
def with_store(opts = {})
|
24
|
+
s = create_store.call(opts)
|
25
|
+
begin
|
26
|
+
yield s
|
27
|
+
ensure
|
28
|
+
s.stop
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def with_inited_store(things)
|
33
|
+
things_hash = {}
|
34
|
+
things.each { |thing| things_hash[thing[:key].to_sym] = thing }
|
35
|
+
|
36
|
+
with_store do |s|
|
37
|
+
s.init({ things_kind => things_hash })
|
38
|
+
yield s
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def new_version_plus(f, deltaVersion, attrs = {})
|
43
|
+
f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs)
|
44
|
+
end
|
45
|
+
|
46
|
+
before(:each) do
|
47
|
+
clear_data.call if !clear_data.nil?
|
48
|
+
end
|
49
|
+
|
50
|
+
# This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store
|
51
|
+
# that operates on a database that can be shared with other store instances (as opposed to the in-memory store,
|
52
|
+
# which has its own private storage).
|
53
|
+
if !clear_data_method.nil?
|
54
|
+
it "is not initialized by default" do
|
55
|
+
with_store do |store|
|
56
|
+
expect(store.initialized?).to eq false
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
it "can detect if another instance has initialized the store" do
|
61
|
+
with_store do |store1|
|
62
|
+
store1.init({})
|
63
|
+
with_store do |store2|
|
64
|
+
expect(store2.initialized?).to eq true
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
it "can read data written by another instance" do
|
70
|
+
with_store do |store1|
|
71
|
+
store1.init({ things_kind => { key1.to_sym => thing1 } })
|
72
|
+
with_store do |store2|
|
73
|
+
expect(store2.get(things_kind, key1)).to eq thing1
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
it "is independent from other stores with different prefixes" do
|
79
|
+
with_store({ prefix: "a" }) do |store_a|
|
80
|
+
store_a.init({ things_kind => { key1.to_sym => thing1 } })
|
81
|
+
with_store({ prefix: "b" }) do |store_b|
|
82
|
+
store_b.init({ things_kind => {} })
|
83
|
+
end
|
84
|
+
with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data
|
85
|
+
expect(store_b1.get(things_kind, key1)).to be_nil
|
86
|
+
expect(store_a.get(things_kind, key1)).to eq thing1
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
|
92
|
+
it "is initialized after calling init" do
|
93
|
+
with_inited_store([]) do |store|
|
94
|
+
expect(store.initialized?).to eq true
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
it "can get existing item with symbol key" do
|
99
|
+
with_inited_store([ thing1 ]) do |store|
|
100
|
+
expect(store.get(things_kind, key1.to_sym)).to eq thing1
|
101
|
+
end
|
102
|
+
end
|
103
|
+
|
104
|
+
it "can get existing item with string key" do
|
105
|
+
with_inited_store([ thing1 ]) do |store|
|
106
|
+
expect(store.get(things_kind, key1.to_s)).to eq thing1
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
it "gets nil for nonexisting item" do
|
111
|
+
with_inited_store([ thing1 ]) do |store|
|
112
|
+
expect(store.get(things_kind, unused_key)).to be_nil
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
it "returns nil for deleted item" do
|
117
|
+
deleted_thing = thing1.clone.merge({ deleted: true })
|
118
|
+
with_inited_store([ deleted_thing ]) do |store|
|
119
|
+
expect(store.get(things_kind, key1)).to be_nil
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
it "can get all items" do
|
124
|
+
key2 = "thing2"
|
125
|
+
thing2 = {
|
126
|
+
key: key2,
|
127
|
+
name: "Thing 2",
|
128
|
+
version: 22,
|
129
|
+
deleted: false
|
130
|
+
}
|
131
|
+
with_inited_store([ thing1, thing2 ]) do |store|
|
132
|
+
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 })
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
it "filters out deleted items when getting all" do
|
137
|
+
key2 = "thing2"
|
138
|
+
thing2 = {
|
139
|
+
key: key2,
|
140
|
+
name: "Thing 2",
|
141
|
+
version: 22,
|
142
|
+
deleted: true
|
143
|
+
}
|
144
|
+
with_inited_store([ thing1, thing2 ]) do |store|
|
145
|
+
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 })
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
it "can add new item" do
|
150
|
+
with_inited_store([]) do |store|
|
151
|
+
store.upsert(things_kind, thing1)
|
152
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
it "can update item with newer version" do
|
157
|
+
with_inited_store([ thing1 ]) do |store|
|
158
|
+
thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' })
|
159
|
+
store.upsert(things_kind, thing1_mod)
|
160
|
+
expect(store.get(things_kind, key1)).to eq thing1_mod
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
it "cannot update item with same version" do
|
165
|
+
with_inited_store([ thing1 ]) do |store|
|
166
|
+
thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' })
|
167
|
+
store.upsert(things_kind, thing1_mod)
|
168
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
it "cannot update feature with older version" do
|
173
|
+
with_inited_store([ thing1 ]) do |store|
|
174
|
+
thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' })
|
175
|
+
store.upsert(things_kind, thing1_mod)
|
176
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
it "can delete item with newer version" do
|
181
|
+
with_inited_store([ thing1 ]) do |store|
|
182
|
+
store.delete(things_kind, key1, thing1[:version] + 1)
|
183
|
+
expect(store.get(things_kind, key1)).to be_nil
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
it "cannot delete item with same version" do
|
188
|
+
with_inited_store([ thing1 ]) do |store|
|
189
|
+
store.delete(things_kind, key1, thing1[:version])
|
190
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
it "cannot delete item with older version" do
|
195
|
+
with_inited_store([ thing1 ]) do |store|
|
196
|
+
store.delete(things_kind, key1, thing1[:version] - 1)
|
197
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
it "stores Unicode data correctly" do
|
202
|
+
flag = {
|
203
|
+
key: "my-fancy-flag",
|
204
|
+
name: "Tęst Feåtūre Flæg😺",
|
205
|
+
version: 1,
|
206
|
+
deleted: false
|
207
|
+
}
|
208
|
+
with_inited_store([]) do |store|
|
209
|
+
store.upsert(LaunchDarkly::FEATURES, flag)
|
210
|
+
expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
@@ -0,0 +1,255 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "tempfile"
|
3
|
+
|
4
|
+
describe LaunchDarkly::FileDataSource do
|
5
|
+
let(:full_flag_1_key) { "flag1" }
|
6
|
+
let(:full_flag_1_value) { "on" }
|
7
|
+
let(:flag_value_1_key) { "flag2" }
|
8
|
+
let(:flag_value_1) { "value2" }
|
9
|
+
let(:all_flag_keys) { [ full_flag_1_key.to_sym, flag_value_1_key.to_sym ] }
|
10
|
+
let(:full_segment_1_key) { "seg1" }
|
11
|
+
let(:all_segment_keys) { [ full_segment_1_key.to_sym ] }
|
12
|
+
|
13
|
+
let(:flag_only_json) { <<-EOF
|
14
|
+
{
|
15
|
+
"flags": {
|
16
|
+
"flag1": {
|
17
|
+
"key": "flag1",
|
18
|
+
"on": true,
|
19
|
+
"fallthrough": {
|
20
|
+
"variation": 2
|
21
|
+
},
|
22
|
+
"variations": [ "fall", "off", "on" ]
|
23
|
+
}
|
24
|
+
}
|
25
|
+
}
|
26
|
+
EOF
|
27
|
+
}
|
28
|
+
|
29
|
+
let(:segment_only_json) { <<-EOF
|
30
|
+
{
|
31
|
+
"segments": {
|
32
|
+
"seg1": {
|
33
|
+
"key": "seg1",
|
34
|
+
"include": ["user1"]
|
35
|
+
}
|
36
|
+
}
|
37
|
+
}
|
38
|
+
EOF
|
39
|
+
}
|
40
|
+
|
41
|
+
let(:all_properties_json) { <<-EOF
|
42
|
+
{
|
43
|
+
"flags": {
|
44
|
+
"flag1": {
|
45
|
+
"key": "flag1",
|
46
|
+
"on": true,
|
47
|
+
"fallthrough": {
|
48
|
+
"variation": 2
|
49
|
+
},
|
50
|
+
"variations": [ "fall", "off", "on" ]
|
51
|
+
}
|
52
|
+
},
|
53
|
+
"flagValues": {
|
54
|
+
"flag2": "value2"
|
55
|
+
},
|
56
|
+
"segments": {
|
57
|
+
"seg1": {
|
58
|
+
"key": "seg1",
|
59
|
+
"include": ["user1"]
|
60
|
+
}
|
61
|
+
}
|
62
|
+
}
|
63
|
+
EOF
|
64
|
+
}
|
65
|
+
|
66
|
+
let(:all_properties_yaml) { <<-EOF
|
67
|
+
---
|
68
|
+
flags:
|
69
|
+
flag1:
|
70
|
+
key: flag1
|
71
|
+
"on": true
|
72
|
+
flagValues:
|
73
|
+
flag2: value2
|
74
|
+
segments:
|
75
|
+
seg1:
|
76
|
+
key: seg1
|
77
|
+
include: ["user1"]
|
78
|
+
EOF
|
79
|
+
}
|
80
|
+
|
81
|
+
let(:bad_file_path) { "no-such-file" }
|
82
|
+
|
83
|
+
before do
|
84
|
+
@config = LaunchDarkly::Config.new
|
85
|
+
@store = @config.feature_store
|
86
|
+
@tmp_dir = Dir.mktmpdir
|
87
|
+
end
|
88
|
+
|
89
|
+
after do
|
90
|
+
FileUtils.rm_rf(@tmp_dir)
|
91
|
+
end
|
92
|
+
|
93
|
+
def make_temp_file(content)
|
94
|
+
# Note that we don't create our files in the default temp file directory, but rather in an empty directory
|
95
|
+
# that we made. That's because (depending on the platform) the temp file directory may contain huge numbers
|
96
|
+
# of files, which can make the file watcher perform poorly enough to break the tests.
|
97
|
+
file = Tempfile.new('flags', @tmp_dir)
|
98
|
+
IO.write(file, content)
|
99
|
+
file
|
100
|
+
end
|
101
|
+
|
102
|
+
def with_data_source(options)
|
103
|
+
factory = LaunchDarkly::FileDataSource.factory(options)
|
104
|
+
ds = factory.call('', @config)
|
105
|
+
begin
|
106
|
+
yield ds
|
107
|
+
ensure
|
108
|
+
ds.stop
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
it "doesn't load flags prior to start" do
|
113
|
+
file = make_temp_file('{"flagValues":{"key":"value"}}')
|
114
|
+
with_data_source({ paths: [ file.path ] }) do |ds|
|
115
|
+
expect(@store.initialized?).to eq(false)
|
116
|
+
expect(@store.all(LaunchDarkly::FEATURES)).to eq({})
|
117
|
+
expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({})
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
it "loads flags on start - from JSON" do
|
122
|
+
file = make_temp_file(all_properties_json)
|
123
|
+
with_data_source({ paths: [ file.path ] }) do |ds|
|
124
|
+
ds.start
|
125
|
+
expect(@store.initialized?).to eq(true)
|
126
|
+
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys)
|
127
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
it "loads flags on start - from YAML" do
|
132
|
+
file = make_temp_file(all_properties_yaml)
|
133
|
+
with_data_source({ paths: [ file.path ] }) do |ds|
|
134
|
+
ds.start
|
135
|
+
expect(@store.initialized?).to eq(true)
|
136
|
+
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys)
|
137
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys)
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
it "sets start event and initialized on successful load" do
|
142
|
+
file = make_temp_file(all_properties_json)
|
143
|
+
with_data_source({ paths: [ file.path ] }) do |ds|
|
144
|
+
event = ds.start
|
145
|
+
expect(event.set?).to eq(true)
|
146
|
+
expect(ds.initialized?).to eq(true)
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
it "sets start event and does not set initialized on unsuccessful load" do
|
151
|
+
with_data_source({ paths: [ bad_file_path ] }) do |ds|
|
152
|
+
event = ds.start
|
153
|
+
expect(event.set?).to eq(true)
|
154
|
+
expect(ds.initialized?).to eq(false)
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
158
|
+
it "can load multiple files" do
|
159
|
+
file1 = make_temp_file(flag_only_json)
|
160
|
+
file2 = make_temp_file(segment_only_json)
|
161
|
+
with_data_source({ paths: [ file1.path, file2.path ] }) do |ds|
|
162
|
+
ds.start
|
163
|
+
expect(@store.initialized?).to eq(true)
|
164
|
+
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ])
|
165
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ])
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
it "does not allow duplicate keys" do
|
170
|
+
file1 = make_temp_file(flag_only_json)
|
171
|
+
file2 = make_temp_file(flag_only_json)
|
172
|
+
with_data_source({ paths: [ file1.path, file2.path ] }) do |ds|
|
173
|
+
ds.start
|
174
|
+
expect(@store.initialized?).to eq(false)
|
175
|
+
expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([])
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
it "does not reload modified file if auto-update is off" do
|
180
|
+
file = make_temp_file(flag_only_json)
|
181
|
+
|
182
|
+
with_data_source({ paths: [ file.path ] }) do |ds|
|
183
|
+
event = ds.start
|
184
|
+
expect(event.set?).to eq(true)
|
185
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
186
|
+
|
187
|
+
IO.write(file, all_properties_json)
|
188
|
+
sleep(0.5)
|
189
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
def test_auto_reload(options)
|
194
|
+
file = make_temp_file(flag_only_json)
|
195
|
+
options[:paths] = [ file.path ]
|
196
|
+
|
197
|
+
with_data_source(options) do |ds|
|
198
|
+
event = ds.start
|
199
|
+
expect(event.set?).to eq(true)
|
200
|
+
expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([])
|
201
|
+
|
202
|
+
sleep(1)
|
203
|
+
IO.write(file, all_properties_json)
|
204
|
+
|
205
|
+
max_time = 10
|
206
|
+
ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys }
|
207
|
+
expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload"
|
208
|
+
end
|
209
|
+
end
|
210
|
+
|
211
|
+
it "reloads modified file if auto-update is on" do
|
212
|
+
test_auto_reload({ auto_update: true })
|
213
|
+
end
|
214
|
+
|
215
|
+
it "reloads modified file in polling mode" do
|
216
|
+
test_auto_reload({ auto_update: true, force_polling: true, poll_interval: 0.1 })
|
217
|
+
end
|
218
|
+
|
219
|
+
it "evaluates simplified flag with client as expected" do
|
220
|
+
file = make_temp_file(all_properties_json)
|
221
|
+
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
222
|
+
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
223
|
+
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
224
|
+
|
225
|
+
begin
|
226
|
+
value = client.variation(flag_value_1_key, { key: 'user' }, '')
|
227
|
+
expect(value).to eq(flag_value_1)
|
228
|
+
ensure
|
229
|
+
client.close
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
it "evaluates full flag with client as expected" do
|
234
|
+
file = make_temp_file(all_properties_json)
|
235
|
+
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
236
|
+
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
237
|
+
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
238
|
+
|
239
|
+
begin
|
240
|
+
value = client.variation(full_flag_1_key, { key: 'user' }, '')
|
241
|
+
expect(value).to eq(full_flag_1_value)
|
242
|
+
ensure
|
243
|
+
client.close
|
244
|
+
end
|
245
|
+
end
|
246
|
+
|
247
|
+
def wait_for_condition(max_time)
|
248
|
+
deadline = Time.now + max_time
|
249
|
+
while Time.now < deadline
|
250
|
+
return true if yield
|
251
|
+
sleep(0.1)
|
252
|
+
end
|
253
|
+
false
|
254
|
+
end
|
255
|
+
end
|