launchdarkly-server-sdk 5.5.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +134 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
- data/.gitignore +15 -0
- data/.hound.yml +2 -0
- data/.rspec +2 -0
- data/.rubocop.yml +600 -0
- data/.simplecov +4 -0
- data/.yardopts +9 -0
- data/CHANGELOG.md +261 -0
- data/CODEOWNERS +1 -0
- data/CONTRIBUTING.md +37 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +102 -0
- data/LICENSE.txt +13 -0
- data/README.md +56 -0
- data/Rakefile +5 -0
- data/azure-pipelines.yml +51 -0
- data/ext/mkrf_conf.rb +11 -0
- data/launchdarkly-server-sdk.gemspec +40 -0
- data/lib/ldclient-rb.rb +29 -0
- data/lib/ldclient-rb/cache_store.rb +45 -0
- data/lib/ldclient-rb/config.rb +411 -0
- data/lib/ldclient-rb/evaluation.rb +455 -0
- data/lib/ldclient-rb/event_summarizer.rb +55 -0
- data/lib/ldclient-rb/events.rb +468 -0
- data/lib/ldclient-rb/expiring_cache.rb +77 -0
- data/lib/ldclient-rb/file_data_source.rb +312 -0
- data/lib/ldclient-rb/flags_state.rb +76 -0
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/in_memory_store.rb +100 -0
- data/lib/ldclient-rb/integrations.rb +55 -0
- data/lib/ldclient-rb/integrations/consul.rb +38 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
- data/lib/ldclient-rb/integrations/redis.rb +55 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
- data/lib/ldclient-rb/interfaces.rb +153 -0
- data/lib/ldclient-rb/ldclient.rb +424 -0
- data/lib/ldclient-rb/memoized_value.rb +32 -0
- data/lib/ldclient-rb/newrelic.rb +17 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
- data/lib/ldclient-rb/polling.rb +78 -0
- data/lib/ldclient-rb/redis_store.rb +87 -0
- data/lib/ldclient-rb/requestor.rb +101 -0
- data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
- data/lib/ldclient-rb/stream.rb +141 -0
- data/lib/ldclient-rb/user_filter.rb +51 -0
- data/lib/ldclient-rb/util.rb +50 -0
- data/lib/ldclient-rb/version.rb +3 -0
- data/scripts/gendocs.sh +11 -0
- data/scripts/release.sh +27 -0
- data/spec/config_spec.rb +63 -0
- data/spec/evaluation_spec.rb +739 -0
- data/spec/event_summarizer_spec.rb +63 -0
- data/spec/events_spec.rb +642 -0
- data/spec/expiring_cache_spec.rb +76 -0
- data/spec/feature_store_spec_base.rb +213 -0
- data/spec/file_data_source_spec.rb +255 -0
- data/spec/fixtures/feature.json +37 -0
- data/spec/fixtures/feature1.json +36 -0
- data/spec/fixtures/user.json +9 -0
- data/spec/flags_state_spec.rb +81 -0
- data/spec/http_util.rb +109 -0
- data/spec/in_memory_feature_store_spec.rb +12 -0
- data/spec/integrations/consul_feature_store_spec.rb +42 -0
- data/spec/integrations/dynamodb_feature_store_spec.rb +105 -0
- data/spec/integrations/store_wrapper_spec.rb +276 -0
- data/spec/ldclient_spec.rb +471 -0
- data/spec/newrelic_spec.rb +5 -0
- data/spec/polling_spec.rb +120 -0
- data/spec/redis_feature_store_spec.rb +95 -0
- data/spec/requestor_spec.rb +214 -0
- data/spec/segment_store_spec_base.rb +95 -0
- data/spec/simple_lru_cache_spec.rb +24 -0
- data/spec/spec_helper.rb +9 -0
- data/spec/store_spec.rb +10 -0
- data/spec/stream_spec.rb +60 -0
- data/spec/user_filter_spec.rb +91 -0
- data/spec/util_spec.rb +17 -0
- data/spec/version_spec.rb +7 -0
- metadata +375 -0
@@ -0,0 +1,37 @@
|
|
1
|
+
{
|
2
|
+
"key":"test-feature-flag",
|
3
|
+
"version":11,
|
4
|
+
"on":true,
|
5
|
+
"prerequisites":[
|
6
|
+
|
7
|
+
],
|
8
|
+
"salt":"718ea30a918a4eba8734b57ab1a93227",
|
9
|
+
"sel":"fe1244e5378c4f99976c9634e33667c6",
|
10
|
+
"targets":[
|
11
|
+
{
|
12
|
+
"values":[
|
13
|
+
"alice"
|
14
|
+
],
|
15
|
+
"variation":0
|
16
|
+
},
|
17
|
+
{
|
18
|
+
"values":[
|
19
|
+
"bob"
|
20
|
+
],
|
21
|
+
"variation":1
|
22
|
+
}
|
23
|
+
],
|
24
|
+
"rules":[
|
25
|
+
|
26
|
+
],
|
27
|
+
"fallthrough":{
|
28
|
+
"variation":0
|
29
|
+
},
|
30
|
+
"offVariation":1,
|
31
|
+
"variations":[
|
32
|
+
true,
|
33
|
+
false
|
34
|
+
],
|
35
|
+
"trackEvents": true,
|
36
|
+
"deleted":false
|
37
|
+
}
|
@@ -0,0 +1,36 @@
|
|
1
|
+
{
|
2
|
+
"key":"test-feature-flag1",
|
3
|
+
"version":5,
|
4
|
+
"on":false,
|
5
|
+
"prerequisites":[
|
6
|
+
|
7
|
+
],
|
8
|
+
"salt":"718ea30a918a4eba8734b57ab1a93227",
|
9
|
+
"sel":"fe1244e5378c4f99976c9634e33667c6",
|
10
|
+
"targets":[
|
11
|
+
{
|
12
|
+
"values":[
|
13
|
+
"alice"
|
14
|
+
],
|
15
|
+
"variation":0
|
16
|
+
},
|
17
|
+
{
|
18
|
+
"values":[
|
19
|
+
"bob"
|
20
|
+
],
|
21
|
+
"variation":1
|
22
|
+
}
|
23
|
+
],
|
24
|
+
"rules":[
|
25
|
+
|
26
|
+
],
|
27
|
+
"fallthrough":{
|
28
|
+
"variation":0
|
29
|
+
},
|
30
|
+
"offVariation":1,
|
31
|
+
"variations":[
|
32
|
+
true,
|
33
|
+
false
|
34
|
+
],
|
35
|
+
"deleted":false
|
36
|
+
}
|
@@ -0,0 +1,81 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
require "json"
|
3
|
+
|
4
|
+
describe LaunchDarkly::FeatureFlagsState do
|
5
|
+
subject { LaunchDarkly::FeatureFlagsState }
|
6
|
+
|
7
|
+
it "can get flag value" do
|
8
|
+
state = subject.new(true)
|
9
|
+
flag = { key: 'key' }
|
10
|
+
state.add_flag(flag, 'value', 1)
|
11
|
+
|
12
|
+
expect(state.flag_value('key')).to eq 'value'
|
13
|
+
end
|
14
|
+
|
15
|
+
it "returns nil for unknown flag" do
|
16
|
+
state = subject.new(true)
|
17
|
+
|
18
|
+
expect(state.flag_value('key')).to be nil
|
19
|
+
end
|
20
|
+
|
21
|
+
it "can be converted to values map" do
|
22
|
+
state = subject.new(true)
|
23
|
+
flag1 = { key: 'key1' }
|
24
|
+
flag2 = { key: 'key2' }
|
25
|
+
state.add_flag(flag1, 'value1', 0)
|
26
|
+
state.add_flag(flag2, 'value2', 1)
|
27
|
+
|
28
|
+
expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' })
|
29
|
+
end
|
30
|
+
|
31
|
+
it "can be converted to JSON structure" do
|
32
|
+
state = subject.new(true)
|
33
|
+
flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false }
|
34
|
+
flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 }
|
35
|
+
state.add_flag(flag1, 'value1', 0)
|
36
|
+
state.add_flag(flag2, 'value2', 1)
|
37
|
+
|
38
|
+
result = state.as_json
|
39
|
+
expect(result).to eq({
|
40
|
+
'key1' => 'value1',
|
41
|
+
'key2' => 'value2',
|
42
|
+
'$flagsState' => {
|
43
|
+
'key1' => {
|
44
|
+
:variation => 0,
|
45
|
+
:version => 100
|
46
|
+
},
|
47
|
+
'key2' => {
|
48
|
+
:variation => 1,
|
49
|
+
:version => 200,
|
50
|
+
:trackEvents => true,
|
51
|
+
:debugEventsUntilDate => 1000
|
52
|
+
}
|
53
|
+
},
|
54
|
+
'$valid' => true
|
55
|
+
})
|
56
|
+
end
|
57
|
+
|
58
|
+
it "can be converted to JSON string" do
|
59
|
+
state = subject.new(true)
|
60
|
+
flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false }
|
61
|
+
flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 }
|
62
|
+
state.add_flag(flag1, 'value1', 0)
|
63
|
+
state.add_flag(flag2, 'value2', 1)
|
64
|
+
|
65
|
+
object = state.as_json
|
66
|
+
str = state.to_json
|
67
|
+
expect(object.to_json).to eq(str)
|
68
|
+
end
|
69
|
+
|
70
|
+
it "uses our custom serializer with JSON.generate" do
|
71
|
+
state = subject.new(true)
|
72
|
+
flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false }
|
73
|
+
flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 }
|
74
|
+
state.add_flag(flag1, 'value1', 0)
|
75
|
+
state.add_flag(flag2, 'value2', 1)
|
76
|
+
|
77
|
+
stringFromToJson = state.to_json
|
78
|
+
stringFromGenerate = JSON.generate(state)
|
79
|
+
expect(stringFromGenerate).to eq(stringFromToJson)
|
80
|
+
end
|
81
|
+
end
|
data/spec/http_util.rb
ADDED
@@ -0,0 +1,109 @@
|
|
1
|
+
require "webrick"
|
2
|
+
require "webrick/httpproxy"
|
3
|
+
require "webrick/https"
|
4
|
+
|
5
|
+
class StubHTTPServer
|
6
|
+
attr_reader :requests
|
7
|
+
|
8
|
+
@@next_port = 50000
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
@port = StubHTTPServer.next_port
|
12
|
+
begin
|
13
|
+
base_opts = {
|
14
|
+
BindAddress: '127.0.0.1',
|
15
|
+
Port: @port,
|
16
|
+
AccessLog: [],
|
17
|
+
Logger: NullLogger.new,
|
18
|
+
RequestCallback: method(:record_request)
|
19
|
+
}
|
20
|
+
@server = create_server(@port, base_opts)
|
21
|
+
rescue Errno::EADDRINUSE
|
22
|
+
@port = StubHTTPServer.next_port
|
23
|
+
retry
|
24
|
+
end
|
25
|
+
@requests = []
|
26
|
+
@requests_queue = Queue.new
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.next_port
|
30
|
+
p = @@next_port
|
31
|
+
@@next_port = (p + 1 < 60000) ? p + 1 : 50000
|
32
|
+
p
|
33
|
+
end
|
34
|
+
|
35
|
+
def create_server(port, base_opts)
|
36
|
+
WEBrick::HTTPServer.new(base_opts)
|
37
|
+
end
|
38
|
+
|
39
|
+
def start
|
40
|
+
Thread.new { @server.start }
|
41
|
+
end
|
42
|
+
|
43
|
+
def stop
|
44
|
+
@server.shutdown
|
45
|
+
end
|
46
|
+
|
47
|
+
def base_uri
|
48
|
+
URI("http://127.0.0.1:#{@port}")
|
49
|
+
end
|
50
|
+
|
51
|
+
def setup_response(uri_path, &action)
|
52
|
+
@server.mount_proc(uri_path, action)
|
53
|
+
end
|
54
|
+
|
55
|
+
def setup_ok_response(uri_path, body, content_type=nil, headers={})
|
56
|
+
setup_response(uri_path) do |req, res|
|
57
|
+
res.status = 200
|
58
|
+
res.content_type = content_type if !content_type.nil?
|
59
|
+
res.body = body
|
60
|
+
headers.each { |n, v| res[n] = v }
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
def record_request(req, res)
|
65
|
+
@requests.push(req)
|
66
|
+
@requests_queue << req
|
67
|
+
end
|
68
|
+
|
69
|
+
def await_request
|
70
|
+
@requests_queue.pop
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
class StubProxyServer < StubHTTPServer
|
75
|
+
attr_reader :request_count
|
76
|
+
attr_accessor :connect_status
|
77
|
+
|
78
|
+
def initialize
|
79
|
+
super
|
80
|
+
@request_count = 0
|
81
|
+
end
|
82
|
+
|
83
|
+
def create_server(port, base_opts)
|
84
|
+
WEBrick::HTTPProxyServer.new(base_opts.merge({
|
85
|
+
ProxyContentHandler: proc do |req,res|
|
86
|
+
if !@connect_status.nil?
|
87
|
+
res.status = @connect_status
|
88
|
+
end
|
89
|
+
@request_count += 1
|
90
|
+
end
|
91
|
+
}))
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
class NullLogger
|
96
|
+
def method_missing(*)
|
97
|
+
self
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
def with_server(server = nil)
|
102
|
+
server = StubHTTPServer.new if server.nil?
|
103
|
+
begin
|
104
|
+
server.start
|
105
|
+
yield server
|
106
|
+
ensure
|
107
|
+
server.stop
|
108
|
+
end
|
109
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
require "feature_store_spec_base"
|
2
|
+
require "spec_helper"
|
3
|
+
|
4
|
+
def create_in_memory_store(opts = {})
|
5
|
+
LaunchDarkly::InMemoryFeatureStore.new
|
6
|
+
end
|
7
|
+
|
8
|
+
describe LaunchDarkly::InMemoryFeatureStore do
|
9
|
+
subject { LaunchDarkly::InMemoryFeatureStore }
|
10
|
+
|
11
|
+
include_examples "feature_store", method(:create_in_memory_store)
|
12
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
require "feature_store_spec_base"
|
2
|
+
require "diplomat"
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
|
6
|
+
$my_prefix = 'testprefix'
|
7
|
+
$null_log = ::Logger.new($stdout)
|
8
|
+
$null_log.level = ::Logger::FATAL
|
9
|
+
|
10
|
+
$consul_base_opts = {
|
11
|
+
prefix: $my_prefix,
|
12
|
+
logger: $null_log
|
13
|
+
}
|
14
|
+
|
15
|
+
def create_consul_store(opts = {})
|
16
|
+
LaunchDarkly::Integrations::Consul::new_feature_store(
|
17
|
+
$consul_base_opts.merge(opts).merge({ expiration: 60 }))
|
18
|
+
end
|
19
|
+
|
20
|
+
def create_consul_store_uncached(opts = {})
|
21
|
+
LaunchDarkly::Integrations::Consul::new_feature_store(
|
22
|
+
$consul_base_opts.merge(opts).merge({ expiration: 0 }))
|
23
|
+
end
|
24
|
+
|
25
|
+
def clear_all_data
|
26
|
+
Diplomat::Kv.delete($my_prefix + '/', recurse: true)
|
27
|
+
end
|
28
|
+
|
29
|
+
|
30
|
+
describe "Consul feature store" do
|
31
|
+
return if ENV['LD_SKIP_DATABASE_TESTS'] == '1'
|
32
|
+
|
33
|
+
# These tests will all fail if there isn't a local Consul instance running.
|
34
|
+
|
35
|
+
context "with local cache" do
|
36
|
+
include_examples "feature_store", method(:create_consul_store), method(:clear_all_data)
|
37
|
+
end
|
38
|
+
|
39
|
+
context "without local cache" do
|
40
|
+
include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data)
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,105 @@
|
|
1
|
+
require "feature_store_spec_base"
|
2
|
+
require "aws-sdk-dynamodb"
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
|
6
|
+
$table_name = 'LD_DYNAMODB_TEST_TABLE'
|
7
|
+
$endpoint = 'http://localhost:8000'
|
8
|
+
$my_prefix = 'testprefix'
|
9
|
+
$null_log = ::Logger.new($stdout)
|
10
|
+
$null_log.level = ::Logger::FATAL
|
11
|
+
|
12
|
+
$dynamodb_opts = {
|
13
|
+
credentials: Aws::Credentials.new("key", "secret"),
|
14
|
+
region: "us-east-1",
|
15
|
+
endpoint: $endpoint
|
16
|
+
}
|
17
|
+
|
18
|
+
$ddb_base_opts = {
|
19
|
+
dynamodb_opts: $dynamodb_opts,
|
20
|
+
prefix: $my_prefix,
|
21
|
+
logger: $null_log
|
22
|
+
}
|
23
|
+
|
24
|
+
def create_dynamodb_store(opts = {})
|
25
|
+
LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name,
|
26
|
+
$ddb_base_opts.merge(opts).merge({ expiration: 60 }))
|
27
|
+
end
|
28
|
+
|
29
|
+
def create_dynamodb_store_uncached(opts = {})
|
30
|
+
LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name,
|
31
|
+
$ddb_base_opts.merge(opts).merge({ expiration: 0 }))
|
32
|
+
end
|
33
|
+
|
34
|
+
def clear_all_data
|
35
|
+
client = create_test_client
|
36
|
+
items_to_delete = []
|
37
|
+
req = {
|
38
|
+
table_name: $table_name,
|
39
|
+
projection_expression: '#namespace, #key',
|
40
|
+
expression_attribute_names: {
|
41
|
+
'#namespace' => 'namespace',
|
42
|
+
'#key' => 'key'
|
43
|
+
}
|
44
|
+
}
|
45
|
+
while true
|
46
|
+
resp = client.scan(req)
|
47
|
+
items_to_delete = items_to_delete + resp.items
|
48
|
+
break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0
|
49
|
+
req.exclusive_start_key = resp.last_evaluated_key
|
50
|
+
end
|
51
|
+
requests = items_to_delete.map do |item|
|
52
|
+
{ delete_request: { key: item } }
|
53
|
+
end
|
54
|
+
LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests)
|
55
|
+
end
|
56
|
+
|
57
|
+
def create_table_if_necessary
|
58
|
+
client = create_test_client
|
59
|
+
begin
|
60
|
+
client.describe_table({ table_name: $table_name })
|
61
|
+
return # no error, table exists
|
62
|
+
rescue Aws::DynamoDB::Errors::ResourceNotFoundException
|
63
|
+
# fall through to code below - we'll create the table
|
64
|
+
end
|
65
|
+
|
66
|
+
req = {
|
67
|
+
table_name: $table_name,
|
68
|
+
key_schema: [
|
69
|
+
{ attribute_name: "namespace", key_type: "HASH" },
|
70
|
+
{ attribute_name: "key", key_type: "RANGE" }
|
71
|
+
],
|
72
|
+
attribute_definitions: [
|
73
|
+
{ attribute_name: "namespace", attribute_type: "S" },
|
74
|
+
{ attribute_name: "key", attribute_type: "S" }
|
75
|
+
],
|
76
|
+
provisioned_throughput: {
|
77
|
+
read_capacity_units: 1,
|
78
|
+
write_capacity_units: 1
|
79
|
+
}
|
80
|
+
}
|
81
|
+
client.create_table(req)
|
82
|
+
|
83
|
+
# When DynamoDB creates a table, it may not be ready to use immediately
|
84
|
+
end
|
85
|
+
|
86
|
+
def create_test_client
|
87
|
+
Aws::DynamoDB::Client.new($dynamodb_opts)
|
88
|
+
end
|
89
|
+
|
90
|
+
|
91
|
+
describe "DynamoDB feature store" do
|
92
|
+
return if ENV['LD_SKIP_DATABASE_TESTS'] == '1'
|
93
|
+
|
94
|
+
# These tests will all fail if there isn't a local DynamoDB instance running.
|
95
|
+
|
96
|
+
create_table_if_necessary
|
97
|
+
|
98
|
+
context "with local cache" do
|
99
|
+
include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data)
|
100
|
+
end
|
101
|
+
|
102
|
+
context "without local cache" do
|
103
|
+
include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data)
|
104
|
+
end
|
105
|
+
end
|
@@ -0,0 +1,276 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
describe LaunchDarkly::Integrations::Util::CachingStoreWrapper do
|
4
|
+
subject { LaunchDarkly::Integrations::Util::CachingStoreWrapper }
|
5
|
+
|
6
|
+
THINGS = { namespace: "things" }
|
7
|
+
|
8
|
+
shared_examples "tests" do |cached|
|
9
|
+
opts = cached ? { expiration: 30 } : { expiration: 0 }
|
10
|
+
|
11
|
+
it "gets item" do
|
12
|
+
core = MockCore.new
|
13
|
+
wrapper = subject.new(core, opts)
|
14
|
+
key = "flag"
|
15
|
+
itemv1 = { key: key, version: 1 }
|
16
|
+
itemv2 = { key: key, version: 2 }
|
17
|
+
|
18
|
+
core.force_set(THINGS, itemv1)
|
19
|
+
expect(wrapper.get(THINGS, key)).to eq itemv1
|
20
|
+
|
21
|
+
core.force_set(THINGS, itemv2)
|
22
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet
|
23
|
+
end
|
24
|
+
|
25
|
+
it "gets deleted item" do
|
26
|
+
core = MockCore.new
|
27
|
+
wrapper = subject.new(core, opts)
|
28
|
+
key = "flag"
|
29
|
+
itemv1 = { key: key, version: 1, deleted: true }
|
30
|
+
itemv2 = { key: key, version: 2, deleted: false }
|
31
|
+
|
32
|
+
core.force_set(THINGS, itemv1)
|
33
|
+
expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true
|
34
|
+
|
35
|
+
core.force_set(THINGS, itemv2)
|
36
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet
|
37
|
+
end
|
38
|
+
|
39
|
+
it "gets missing item" do
|
40
|
+
core = MockCore.new
|
41
|
+
wrapper = subject.new(core, opts)
|
42
|
+
key = "flag"
|
43
|
+
item = { key: key, version: 1 }
|
44
|
+
|
45
|
+
expect(wrapper.get(THINGS, key)).to eq nil
|
46
|
+
|
47
|
+
core.force_set(THINGS, item)
|
48
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result
|
49
|
+
end
|
50
|
+
|
51
|
+
it "gets all items" do
|
52
|
+
core = MockCore.new
|
53
|
+
wrapper = subject.new(core, opts)
|
54
|
+
item1 = { key: "flag1", version: 1 }
|
55
|
+
item2 = { key: "flag2", version: 1 }
|
56
|
+
|
57
|
+
core.force_set(THINGS, item1)
|
58
|
+
core.force_set(THINGS, item2)
|
59
|
+
expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 })
|
60
|
+
|
61
|
+
core.force_remove(THINGS, item2[:key])
|
62
|
+
expect(wrapper.all(THINGS)).to eq (cached ?
|
63
|
+
{ item1[:key] => item1, item2[:key] => item2 } :
|
64
|
+
{ item1[:key] => item1 })
|
65
|
+
end
|
66
|
+
|
67
|
+
it "gets all items filtering out deleted items" do
|
68
|
+
core = MockCore.new
|
69
|
+
wrapper = subject.new(core, opts)
|
70
|
+
item1 = { key: "flag1", version: 1 }
|
71
|
+
item2 = { key: "flag2", version: 1, deleted: true }
|
72
|
+
|
73
|
+
core.force_set(THINGS, item1)
|
74
|
+
core.force_set(THINGS, item2)
|
75
|
+
expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 })
|
76
|
+
end
|
77
|
+
|
78
|
+
it "upserts item successfully" do
|
79
|
+
core = MockCore.new
|
80
|
+
wrapper = subject.new(core, opts)
|
81
|
+
key = "flag"
|
82
|
+
itemv1 = { key: key, version: 1 }
|
83
|
+
itemv2 = { key: key, version: 2 }
|
84
|
+
|
85
|
+
wrapper.upsert(THINGS, itemv1)
|
86
|
+
expect(core.data[THINGS][key]).to eq itemv1
|
87
|
+
|
88
|
+
wrapper.upsert(THINGS, itemv2)
|
89
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
90
|
+
|
91
|
+
# if we have a cache, verify that the new item is now cached by writing a different value
|
92
|
+
# to the underlying data - Get should still return the cached item
|
93
|
+
if cached
|
94
|
+
itemv3 = { key: key, version: 3 }
|
95
|
+
core.force_set(THINGS, itemv3)
|
96
|
+
end
|
97
|
+
|
98
|
+
expect(wrapper.get(THINGS, key)).to eq itemv2
|
99
|
+
end
|
100
|
+
|
101
|
+
it "deletes item" do
|
102
|
+
core = MockCore.new
|
103
|
+
wrapper = subject.new(core, opts)
|
104
|
+
key = "flag"
|
105
|
+
itemv1 = { key: key, version: 1 }
|
106
|
+
itemv2 = { key: key, version: 2, deleted: true }
|
107
|
+
itemv3 = { key: key, version: 3 }
|
108
|
+
|
109
|
+
core.force_set(THINGS, itemv1)
|
110
|
+
expect(wrapper.get(THINGS, key)).to eq itemv1
|
111
|
+
|
112
|
+
wrapper.delete(THINGS, key, 2)
|
113
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
114
|
+
|
115
|
+
core.force_set(THINGS, itemv3) # make a change that bypasses the cache
|
116
|
+
|
117
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
context "cached" do
|
122
|
+
include_examples "tests", true
|
123
|
+
|
124
|
+
cached_opts = { expiration: 30 }
|
125
|
+
|
126
|
+
it "get uses values from init" do
|
127
|
+
core = MockCore.new
|
128
|
+
wrapper = subject.new(core, cached_opts)
|
129
|
+
item1 = { key: "flag1", version: 1 }
|
130
|
+
item2 = { key: "flag2", version: 1 }
|
131
|
+
|
132
|
+
wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } })
|
133
|
+
core.force_remove(THINGS, item1[:key])
|
134
|
+
|
135
|
+
expect(wrapper.get(THINGS, item1[:key])).to eq item1
|
136
|
+
end
|
137
|
+
|
138
|
+
it "get all uses values from init" do
|
139
|
+
core = MockCore.new
|
140
|
+
wrapper = subject.new(core, cached_opts)
|
141
|
+
item1 = { key: "flag1", version: 1 }
|
142
|
+
item2 = { key: "flag2", version: 1 }
|
143
|
+
|
144
|
+
wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } })
|
145
|
+
core.force_remove(THINGS, item1[:key])
|
146
|
+
|
147
|
+
expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 })
|
148
|
+
end
|
149
|
+
|
150
|
+
it "upsert doesn't update cache if unsuccessful" do
|
151
|
+
# This is for an upsert where the data in the store has a higher version. In an uncached
|
152
|
+
# store, this is just a no-op as far as the wrapper is concerned so there's nothing to
|
153
|
+
# test here. In a cached store, we need to verify that the cache has been refreshed
|
154
|
+
# using the data that was found in the store.
|
155
|
+
core = MockCore.new
|
156
|
+
wrapper = subject.new(core, cached_opts)
|
157
|
+
key = "flag"
|
158
|
+
itemv1 = { key: key, version: 1 }
|
159
|
+
itemv2 = { key: key, version: 2 }
|
160
|
+
|
161
|
+
wrapper.upsert(THINGS, itemv2)
|
162
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
163
|
+
|
164
|
+
wrapper.upsert(THINGS, itemv1)
|
165
|
+
expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same
|
166
|
+
|
167
|
+
itemv3 = { key: key, version: 3 }
|
168
|
+
core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache
|
169
|
+
expect(wrapper.get(THINGS, key)).to eq itemv2
|
170
|
+
end
|
171
|
+
|
172
|
+
it "initialized? can cache false result" do
|
173
|
+
core = MockCore.new
|
174
|
+
wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test
|
175
|
+
|
176
|
+
expect(wrapper.initialized?).to eq false
|
177
|
+
expect(core.inited_query_count).to eq 1
|
178
|
+
|
179
|
+
core.inited = true
|
180
|
+
expect(wrapper.initialized?).to eq false
|
181
|
+
expect(core.inited_query_count).to eq 1
|
182
|
+
|
183
|
+
sleep(0.5)
|
184
|
+
|
185
|
+
expect(wrapper.initialized?).to eq true
|
186
|
+
expect(core.inited_query_count).to eq 2
|
187
|
+
|
188
|
+
# From this point on it should remain true and the method should not be called
|
189
|
+
expect(wrapper.initialized?).to eq true
|
190
|
+
expect(core.inited_query_count).to eq 2
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
context "uncached" do
|
195
|
+
include_examples "tests", false
|
196
|
+
|
197
|
+
uncached_opts = { expiration: 0 }
|
198
|
+
|
199
|
+
it "queries internal initialized state only if not already inited" do
|
200
|
+
core = MockCore.new
|
201
|
+
wrapper = subject.new(core, uncached_opts)
|
202
|
+
|
203
|
+
expect(wrapper.initialized?).to eq false
|
204
|
+
expect(core.inited_query_count).to eq 1
|
205
|
+
|
206
|
+
core.inited = true
|
207
|
+
expect(wrapper.initialized?).to eq true
|
208
|
+
expect(core.inited_query_count).to eq 2
|
209
|
+
|
210
|
+
core.inited = false
|
211
|
+
expect(wrapper.initialized?).to eq true
|
212
|
+
expect(core.inited_query_count).to eq 2
|
213
|
+
end
|
214
|
+
|
215
|
+
it "does not query internal initialized state if init has been called" do
|
216
|
+
core = MockCore.new
|
217
|
+
wrapper = subject.new(core, uncached_opts)
|
218
|
+
|
219
|
+
expect(wrapper.initialized?).to eq false
|
220
|
+
expect(core.inited_query_count).to eq 1
|
221
|
+
|
222
|
+
wrapper.init({})
|
223
|
+
|
224
|
+
expect(wrapper.initialized?).to eq true
|
225
|
+
expect(core.inited_query_count).to eq 1
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
class MockCore
|
230
|
+
def initialize
|
231
|
+
@data = {}
|
232
|
+
@inited = false
|
233
|
+
@inited_query_count = 0
|
234
|
+
end
|
235
|
+
|
236
|
+
attr_reader :data
|
237
|
+
attr_reader :inited_query_count
|
238
|
+
attr_accessor :inited
|
239
|
+
|
240
|
+
def force_set(kind, item)
|
241
|
+
@data[kind] = {} if !@data.has_key?(kind)
|
242
|
+
@data[kind][item[:key]] = item
|
243
|
+
end
|
244
|
+
|
245
|
+
def force_remove(kind, key)
|
246
|
+
@data[kind].delete(key) if @data.has_key?(kind)
|
247
|
+
end
|
248
|
+
|
249
|
+
def init_internal(all_data)
|
250
|
+
@data = all_data
|
251
|
+
@inited = true
|
252
|
+
end
|
253
|
+
|
254
|
+
def get_internal(kind, key)
|
255
|
+
items = @data[kind]
|
256
|
+
items.nil? ? nil : items[key]
|
257
|
+
end
|
258
|
+
|
259
|
+
def get_all_internal(kind)
|
260
|
+
@data[kind]
|
261
|
+
end
|
262
|
+
|
263
|
+
def upsert_internal(kind, item)
|
264
|
+
@data[kind] = {} if !@data.has_key?(kind)
|
265
|
+
old_item = @data[kind][item[:key]]
|
266
|
+
return old_item if !old_item.nil? && old_item[:version] >= item[:version]
|
267
|
+
@data[kind][item[:key]] = item
|
268
|
+
item
|
269
|
+
end
|
270
|
+
|
271
|
+
def initialized_internal?
|
272
|
+
@inited_query_count = @inited_query_count + 1
|
273
|
+
@inited
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|