ldclient-rb 5.4.3 → 5.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.circleci/config.yml +33 -6
- data/CHANGELOG.md +19 -0
- data/CONTRIBUTING.md +0 -12
- data/Gemfile.lock +22 -3
- data/README.md +41 -35
- data/ldclient-rb.gemspec +4 -3
- data/lib/ldclient-rb.rb +9 -1
- data/lib/ldclient-rb/cache_store.rb +1 -0
- data/lib/ldclient-rb/config.rb +201 -90
- data/lib/ldclient-rb/evaluation.rb +56 -8
- data/lib/ldclient-rb/event_summarizer.rb +3 -0
- data/lib/ldclient-rb/events.rb +16 -0
- data/lib/ldclient-rb/expiring_cache.rb +1 -0
- data/lib/ldclient-rb/file_data_source.rb +18 -13
- data/lib/ldclient-rb/flags_state.rb +3 -2
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/in_memory_store.rb +15 -4
- data/lib/ldclient-rb/integrations.rb +55 -0
- data/lib/ldclient-rb/integrations/consul.rb +38 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
- data/lib/ldclient-rb/integrations/redis.rb +55 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
- data/lib/ldclient-rb/interfaces.rb +153 -0
- data/lib/ldclient-rb/ldclient.rb +135 -77
- data/lib/ldclient-rb/memoized_value.rb +2 -0
- data/lib/ldclient-rb/newrelic.rb +1 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +3 -3
- data/lib/ldclient-rb/polling.rb +1 -0
- data/lib/ldclient-rb/redis_store.rb +24 -190
- data/lib/ldclient-rb/requestor.rb +3 -2
- data/lib/ldclient-rb/simple_lru_cache.rb +1 -0
- data/lib/ldclient-rb/stream.rb +22 -10
- data/lib/ldclient-rb/user_filter.rb +1 -0
- data/lib/ldclient-rb/util.rb +1 -0
- data/lib/ldclient-rb/version.rb +1 -1
- data/scripts/gendocs.sh +12 -0
- data/spec/feature_store_spec_base.rb +173 -72
- data/spec/file_data_source_spec.rb +2 -2
- data/spec/http_util.rb +103 -0
- data/spec/in_memory_feature_store_spec.rb +1 -1
- data/spec/integrations/consul_feature_store_spec.rb +41 -0
- data/spec/integrations/dynamodb_feature_store_spec.rb +104 -0
- data/spec/integrations/store_wrapper_spec.rb +276 -0
- data/spec/ldclient_spec.rb +83 -4
- data/spec/redis_feature_store_spec.rb +25 -16
- data/spec/requestor_spec.rb +44 -38
- data/spec/stream_spec.rb +18 -18
- metadata +55 -33
- data/lib/sse_client.rb +0 -4
- data/lib/sse_client/backoff.rb +0 -38
- data/lib/sse_client/sse_client.rb +0 -171
- data/lib/sse_client/sse_events.rb +0 -67
- data/lib/sse_client/streaming_http.rb +0 -199
- data/spec/sse_client/sse_client_spec.rb +0 -177
- data/spec/sse_client/sse_events_spec.rb +0 -100
- data/spec/sse_client/sse_shared.rb +0 -82
- data/spec/sse_client/streaming_http_spec.rb +0 -263
@@ -219,7 +219,7 @@ EOF
|
|
219
219
|
it "evaluates simplified flag with client as expected" do
|
220
220
|
file = make_temp_file(all_properties_json)
|
221
221
|
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
222
|
-
config = LaunchDarkly::Config.new(send_events: false,
|
222
|
+
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
223
223
|
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
224
224
|
|
225
225
|
begin
|
@@ -233,7 +233,7 @@ EOF
|
|
233
233
|
it "evaluates full flag with client as expected" do
|
234
234
|
file = make_temp_file(all_properties_json)
|
235
235
|
factory = LaunchDarkly::FileDataSource.factory({ paths: file.path })
|
236
|
-
config = LaunchDarkly::Config.new(send_events: false,
|
236
|
+
config = LaunchDarkly::Config.new(send_events: false, data_source: factory)
|
237
237
|
client = LaunchDarkly::LDClient.new('sdkKey', config)
|
238
238
|
|
239
239
|
begin
|
data/spec/http_util.rb
ADDED
@@ -0,0 +1,103 @@
|
|
1
|
+
require "webrick"
|
2
|
+
require "webrick/httpproxy"
|
3
|
+
require "webrick/https"
|
4
|
+
|
5
|
+
class StubHTTPServer
|
6
|
+
attr_reader :requests
|
7
|
+
|
8
|
+
@@next_port = 50000
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
@port = StubHTTPServer.next_port
|
12
|
+
begin
|
13
|
+
base_opts = {
|
14
|
+
BindAddress: '127.0.0.1',
|
15
|
+
Port: @port,
|
16
|
+
AccessLog: [],
|
17
|
+
Logger: NullLogger.new,
|
18
|
+
RequestCallback: method(:record_request)
|
19
|
+
}
|
20
|
+
@server = create_server(@port, base_opts)
|
21
|
+
rescue Errno::EADDRINUSE
|
22
|
+
@port = StubHTTPServer.next_port
|
23
|
+
retry
|
24
|
+
end
|
25
|
+
@requests = []
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.next_port
|
29
|
+
p = @@next_port
|
30
|
+
@@next_port = (p + 1 < 60000) ? p + 1 : 50000
|
31
|
+
p
|
32
|
+
end
|
33
|
+
|
34
|
+
def create_server(port, base_opts)
|
35
|
+
WEBrick::HTTPServer.new(base_opts)
|
36
|
+
end
|
37
|
+
|
38
|
+
def start
|
39
|
+
Thread.new { @server.start }
|
40
|
+
end
|
41
|
+
|
42
|
+
def stop
|
43
|
+
@server.shutdown
|
44
|
+
end
|
45
|
+
|
46
|
+
def base_uri
|
47
|
+
URI("http://127.0.0.1:#{@port}")
|
48
|
+
end
|
49
|
+
|
50
|
+
def setup_response(uri_path, &action)
|
51
|
+
@server.mount_proc(uri_path, action)
|
52
|
+
end
|
53
|
+
|
54
|
+
def setup_ok_response(uri_path, body, content_type=nil, headers={})
|
55
|
+
setup_response(uri_path) do |req, res|
|
56
|
+
res.status = 200
|
57
|
+
res.content_type = content_type if !content_type.nil?
|
58
|
+
res.body = body
|
59
|
+
headers.each { |n, v| res[n] = v }
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
def record_request(req, res)
|
64
|
+
@requests.push(req)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
class StubProxyServer < StubHTTPServer
|
69
|
+
attr_reader :request_count
|
70
|
+
attr_accessor :connect_status
|
71
|
+
|
72
|
+
def initialize
|
73
|
+
super
|
74
|
+
@request_count = 0
|
75
|
+
end
|
76
|
+
|
77
|
+
def create_server(port, base_opts)
|
78
|
+
WEBrick::HTTPProxyServer.new(base_opts.merge({
|
79
|
+
ProxyContentHandler: proc do |req,res|
|
80
|
+
if !@connect_status.nil?
|
81
|
+
res.status = @connect_status
|
82
|
+
end
|
83
|
+
@request_count += 1
|
84
|
+
end
|
85
|
+
}))
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
class NullLogger
|
90
|
+
def method_missing(*)
|
91
|
+
self
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
def with_server(server = nil)
|
96
|
+
server = StubHTTPServer.new if server.nil?
|
97
|
+
begin
|
98
|
+
server.start
|
99
|
+
yield server
|
100
|
+
ensure
|
101
|
+
server.stop
|
102
|
+
end
|
103
|
+
end
|
@@ -0,0 +1,41 @@
|
|
1
|
+
require "feature_store_spec_base"
|
2
|
+
require "diplomat"
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
|
6
|
+
$my_prefix = 'testprefix'
|
7
|
+
$null_log = ::Logger.new($stdout)
|
8
|
+
$null_log.level = ::Logger::FATAL
|
9
|
+
|
10
|
+
$consul_base_opts = {
|
11
|
+
prefix: $my_prefix,
|
12
|
+
logger: $null_log
|
13
|
+
}
|
14
|
+
|
15
|
+
def create_consul_store(opts = {})
|
16
|
+
LaunchDarkly::Integrations::Consul::new_feature_store(
|
17
|
+
$consul_base_opts.merge(opts).merge({ expiration: 60 }))
|
18
|
+
end
|
19
|
+
|
20
|
+
def create_consul_store_uncached(opts = {})
|
21
|
+
LaunchDarkly::Integrations::Consul::new_feature_store(
|
22
|
+
$consul_base_opts.merge(opts).merge({ expiration: 0 }))
|
23
|
+
end
|
24
|
+
|
25
|
+
def clear_all_data
|
26
|
+
Diplomat::Kv.delete($my_prefix + '/', recurse: true)
|
27
|
+
end
|
28
|
+
|
29
|
+
|
30
|
+
describe "Consul feature store" do
|
31
|
+
|
32
|
+
# These tests will all fail if there isn't a local Consul instance running.
|
33
|
+
|
34
|
+
context "with local cache" do
|
35
|
+
include_examples "feature_store", method(:create_consul_store), method(:clear_all_data)
|
36
|
+
end
|
37
|
+
|
38
|
+
context "without local cache" do
|
39
|
+
include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data)
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,104 @@
|
|
1
|
+
require "feature_store_spec_base"
|
2
|
+
require "aws-sdk-dynamodb"
|
3
|
+
require "spec_helper"
|
4
|
+
|
5
|
+
|
6
|
+
$table_name = 'LD_DYNAMODB_TEST_TABLE'
|
7
|
+
$endpoint = 'http://localhost:8000'
|
8
|
+
$my_prefix = 'testprefix'
|
9
|
+
$null_log = ::Logger.new($stdout)
|
10
|
+
$null_log.level = ::Logger::FATAL
|
11
|
+
|
12
|
+
$dynamodb_opts = {
|
13
|
+
credentials: Aws::Credentials.new("key", "secret"),
|
14
|
+
region: "us-east-1",
|
15
|
+
endpoint: $endpoint
|
16
|
+
}
|
17
|
+
|
18
|
+
$ddb_base_opts = {
|
19
|
+
dynamodb_opts: $dynamodb_opts,
|
20
|
+
prefix: $my_prefix,
|
21
|
+
logger: $null_log
|
22
|
+
}
|
23
|
+
|
24
|
+
def create_dynamodb_store(opts = {})
|
25
|
+
LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name,
|
26
|
+
$ddb_base_opts.merge(opts).merge({ expiration: 60 }))
|
27
|
+
end
|
28
|
+
|
29
|
+
def create_dynamodb_store_uncached(opts = {})
|
30
|
+
LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name,
|
31
|
+
$ddb_base_opts.merge(opts).merge({ expiration: 0 }))
|
32
|
+
end
|
33
|
+
|
34
|
+
def clear_all_data
|
35
|
+
client = create_test_client
|
36
|
+
items_to_delete = []
|
37
|
+
req = {
|
38
|
+
table_name: $table_name,
|
39
|
+
projection_expression: '#namespace, #key',
|
40
|
+
expression_attribute_names: {
|
41
|
+
'#namespace' => 'namespace',
|
42
|
+
'#key' => 'key'
|
43
|
+
}
|
44
|
+
}
|
45
|
+
while true
|
46
|
+
resp = client.scan(req)
|
47
|
+
items_to_delete = items_to_delete + resp.items
|
48
|
+
break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0
|
49
|
+
req.exclusive_start_key = resp.last_evaluated_key
|
50
|
+
end
|
51
|
+
requests = items_to_delete.map do |item|
|
52
|
+
{ delete_request: { key: item } }
|
53
|
+
end
|
54
|
+
LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests)
|
55
|
+
end
|
56
|
+
|
57
|
+
def create_table_if_necessary
|
58
|
+
client = create_test_client
|
59
|
+
begin
|
60
|
+
client.describe_table({ table_name: $table_name })
|
61
|
+
return # no error, table exists
|
62
|
+
rescue Aws::DynamoDB::Errors::ResourceNotFoundException
|
63
|
+
# fall through to code below - we'll create the table
|
64
|
+
end
|
65
|
+
|
66
|
+
req = {
|
67
|
+
table_name: $table_name,
|
68
|
+
key_schema: [
|
69
|
+
{ attribute_name: "namespace", key_type: "HASH" },
|
70
|
+
{ attribute_name: "key", key_type: "RANGE" }
|
71
|
+
],
|
72
|
+
attribute_definitions: [
|
73
|
+
{ attribute_name: "namespace", attribute_type: "S" },
|
74
|
+
{ attribute_name: "key", attribute_type: "S" }
|
75
|
+
],
|
76
|
+
provisioned_throughput: {
|
77
|
+
read_capacity_units: 1,
|
78
|
+
write_capacity_units: 1
|
79
|
+
}
|
80
|
+
}
|
81
|
+
client.create_table(req)
|
82
|
+
|
83
|
+
# When DynamoDB creates a table, it may not be ready to use immediately
|
84
|
+
end
|
85
|
+
|
86
|
+
def create_test_client
|
87
|
+
Aws::DynamoDB::Client.new($dynamodb_opts)
|
88
|
+
end
|
89
|
+
|
90
|
+
|
91
|
+
describe "DynamoDB feature store" do
|
92
|
+
|
93
|
+
# These tests will all fail if there isn't a local DynamoDB instance running.
|
94
|
+
|
95
|
+
create_table_if_necessary
|
96
|
+
|
97
|
+
context "with local cache" do
|
98
|
+
include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data)
|
99
|
+
end
|
100
|
+
|
101
|
+
context "without local cache" do
|
102
|
+
include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data)
|
103
|
+
end
|
104
|
+
end
|
@@ -0,0 +1,276 @@
|
|
1
|
+
require "spec_helper"
|
2
|
+
|
3
|
+
describe LaunchDarkly::Integrations::Util::CachingStoreWrapper do
|
4
|
+
subject { LaunchDarkly::Integrations::Util::CachingStoreWrapper }
|
5
|
+
|
6
|
+
THINGS = { namespace: "things" }
|
7
|
+
|
8
|
+
shared_examples "tests" do |cached|
|
9
|
+
opts = cached ? { expiration: 30 } : { expiration: 0 }
|
10
|
+
|
11
|
+
it "gets item" do
|
12
|
+
core = MockCore.new
|
13
|
+
wrapper = subject.new(core, opts)
|
14
|
+
key = "flag"
|
15
|
+
itemv1 = { key: key, version: 1 }
|
16
|
+
itemv2 = { key: key, version: 2 }
|
17
|
+
|
18
|
+
core.force_set(THINGS, itemv1)
|
19
|
+
expect(wrapper.get(THINGS, key)).to eq itemv1
|
20
|
+
|
21
|
+
core.force_set(THINGS, itemv2)
|
22
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet
|
23
|
+
end
|
24
|
+
|
25
|
+
it "gets deleted item" do
|
26
|
+
core = MockCore.new
|
27
|
+
wrapper = subject.new(core, opts)
|
28
|
+
key = "flag"
|
29
|
+
itemv1 = { key: key, version: 1, deleted: true }
|
30
|
+
itemv2 = { key: key, version: 2, deleted: false }
|
31
|
+
|
32
|
+
core.force_set(THINGS, itemv1)
|
33
|
+
expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true
|
34
|
+
|
35
|
+
core.force_set(THINGS, itemv2)
|
36
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet
|
37
|
+
end
|
38
|
+
|
39
|
+
it "gets missing item" do
|
40
|
+
core = MockCore.new
|
41
|
+
wrapper = subject.new(core, opts)
|
42
|
+
key = "flag"
|
43
|
+
item = { key: key, version: 1 }
|
44
|
+
|
45
|
+
expect(wrapper.get(THINGS, key)).to eq nil
|
46
|
+
|
47
|
+
core.force_set(THINGS, item)
|
48
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result
|
49
|
+
end
|
50
|
+
|
51
|
+
it "gets all items" do
|
52
|
+
core = MockCore.new
|
53
|
+
wrapper = subject.new(core, opts)
|
54
|
+
item1 = { key: "flag1", version: 1 }
|
55
|
+
item2 = { key: "flag2", version: 1 }
|
56
|
+
|
57
|
+
core.force_set(THINGS, item1)
|
58
|
+
core.force_set(THINGS, item2)
|
59
|
+
expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 })
|
60
|
+
|
61
|
+
core.force_remove(THINGS, item2[:key])
|
62
|
+
expect(wrapper.all(THINGS)).to eq (cached ?
|
63
|
+
{ item1[:key] => item1, item2[:key] => item2 } :
|
64
|
+
{ item1[:key] => item1 })
|
65
|
+
end
|
66
|
+
|
67
|
+
it "gets all items filtering out deleted items" do
|
68
|
+
core = MockCore.new
|
69
|
+
wrapper = subject.new(core, opts)
|
70
|
+
item1 = { key: "flag1", version: 1 }
|
71
|
+
item2 = { key: "flag2", version: 1, deleted: true }
|
72
|
+
|
73
|
+
core.force_set(THINGS, item1)
|
74
|
+
core.force_set(THINGS, item2)
|
75
|
+
expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 })
|
76
|
+
end
|
77
|
+
|
78
|
+
it "upserts item successfully" do
|
79
|
+
core = MockCore.new
|
80
|
+
wrapper = subject.new(core, opts)
|
81
|
+
key = "flag"
|
82
|
+
itemv1 = { key: key, version: 1 }
|
83
|
+
itemv2 = { key: key, version: 2 }
|
84
|
+
|
85
|
+
wrapper.upsert(THINGS, itemv1)
|
86
|
+
expect(core.data[THINGS][key]).to eq itemv1
|
87
|
+
|
88
|
+
wrapper.upsert(THINGS, itemv2)
|
89
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
90
|
+
|
91
|
+
# if we have a cache, verify that the new item is now cached by writing a different value
|
92
|
+
# to the underlying data - Get should still return the cached item
|
93
|
+
if cached
|
94
|
+
itemv3 = { key: key, version: 3 }
|
95
|
+
core.force_set(THINGS, itemv3)
|
96
|
+
end
|
97
|
+
|
98
|
+
expect(wrapper.get(THINGS, key)).to eq itemv2
|
99
|
+
end
|
100
|
+
|
101
|
+
it "deletes item" do
|
102
|
+
core = MockCore.new
|
103
|
+
wrapper = subject.new(core, opts)
|
104
|
+
key = "flag"
|
105
|
+
itemv1 = { key: key, version: 1 }
|
106
|
+
itemv2 = { key: key, version: 2, deleted: true }
|
107
|
+
itemv3 = { key: key, version: 3 }
|
108
|
+
|
109
|
+
core.force_set(THINGS, itemv1)
|
110
|
+
expect(wrapper.get(THINGS, key)).to eq itemv1
|
111
|
+
|
112
|
+
wrapper.delete(THINGS, key, 2)
|
113
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
114
|
+
|
115
|
+
core.force_set(THINGS, itemv3) # make a change that bypasses the cache
|
116
|
+
|
117
|
+
expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3)
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
context "cached" do
|
122
|
+
include_examples "tests", true
|
123
|
+
|
124
|
+
cached_opts = { expiration: 30 }
|
125
|
+
|
126
|
+
it "get uses values from init" do
|
127
|
+
core = MockCore.new
|
128
|
+
wrapper = subject.new(core, cached_opts)
|
129
|
+
item1 = { key: "flag1", version: 1 }
|
130
|
+
item2 = { key: "flag2", version: 1 }
|
131
|
+
|
132
|
+
wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } })
|
133
|
+
core.force_remove(THINGS, item1[:key])
|
134
|
+
|
135
|
+
expect(wrapper.get(THINGS, item1[:key])).to eq item1
|
136
|
+
end
|
137
|
+
|
138
|
+
it "get all uses values from init" do
|
139
|
+
core = MockCore.new
|
140
|
+
wrapper = subject.new(core, cached_opts)
|
141
|
+
item1 = { key: "flag1", version: 1 }
|
142
|
+
item2 = { key: "flag2", version: 1 }
|
143
|
+
|
144
|
+
wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } })
|
145
|
+
core.force_remove(THINGS, item1[:key])
|
146
|
+
|
147
|
+
expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 })
|
148
|
+
end
|
149
|
+
|
150
|
+
it "upsert doesn't update cache if unsuccessful" do
|
151
|
+
# This is for an upsert where the data in the store has a higher version. In an uncached
|
152
|
+
# store, this is just a no-op as far as the wrapper is concerned so there's nothing to
|
153
|
+
# test here. In a cached store, we need to verify that the cache has been refreshed
|
154
|
+
# using the data that was found in the store.
|
155
|
+
core = MockCore.new
|
156
|
+
wrapper = subject.new(core, cached_opts)
|
157
|
+
key = "flag"
|
158
|
+
itemv1 = { key: key, version: 1 }
|
159
|
+
itemv2 = { key: key, version: 2 }
|
160
|
+
|
161
|
+
wrapper.upsert(THINGS, itemv2)
|
162
|
+
expect(core.data[THINGS][key]).to eq itemv2
|
163
|
+
|
164
|
+
wrapper.upsert(THINGS, itemv1)
|
165
|
+
expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same
|
166
|
+
|
167
|
+
itemv3 = { key: key, version: 3 }
|
168
|
+
core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache
|
169
|
+
expect(wrapper.get(THINGS, key)).to eq itemv2
|
170
|
+
end
|
171
|
+
|
172
|
+
it "initialized? can cache false result" do
|
173
|
+
core = MockCore.new
|
174
|
+
wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test
|
175
|
+
|
176
|
+
expect(wrapper.initialized?).to eq false
|
177
|
+
expect(core.inited_query_count).to eq 1
|
178
|
+
|
179
|
+
core.inited = true
|
180
|
+
expect(wrapper.initialized?).to eq false
|
181
|
+
expect(core.inited_query_count).to eq 1
|
182
|
+
|
183
|
+
sleep(0.5)
|
184
|
+
|
185
|
+
expect(wrapper.initialized?).to eq true
|
186
|
+
expect(core.inited_query_count).to eq 2
|
187
|
+
|
188
|
+
# From this point on it should remain true and the method should not be called
|
189
|
+
expect(wrapper.initialized?).to eq true
|
190
|
+
expect(core.inited_query_count).to eq 2
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
context "uncached" do
|
195
|
+
include_examples "tests", false
|
196
|
+
|
197
|
+
uncached_opts = { expiration: 0 }
|
198
|
+
|
199
|
+
it "queries internal initialized state only if not already inited" do
|
200
|
+
core = MockCore.new
|
201
|
+
wrapper = subject.new(core, uncached_opts)
|
202
|
+
|
203
|
+
expect(wrapper.initialized?).to eq false
|
204
|
+
expect(core.inited_query_count).to eq 1
|
205
|
+
|
206
|
+
core.inited = true
|
207
|
+
expect(wrapper.initialized?).to eq true
|
208
|
+
expect(core.inited_query_count).to eq 2
|
209
|
+
|
210
|
+
core.inited = false
|
211
|
+
expect(wrapper.initialized?).to eq true
|
212
|
+
expect(core.inited_query_count).to eq 2
|
213
|
+
end
|
214
|
+
|
215
|
+
it "does not query internal initialized state if init has been called" do
|
216
|
+
core = MockCore.new
|
217
|
+
wrapper = subject.new(core, uncached_opts)
|
218
|
+
|
219
|
+
expect(wrapper.initialized?).to eq false
|
220
|
+
expect(core.inited_query_count).to eq 1
|
221
|
+
|
222
|
+
wrapper.init({})
|
223
|
+
|
224
|
+
expect(wrapper.initialized?).to eq true
|
225
|
+
expect(core.inited_query_count).to eq 1
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
class MockCore
|
230
|
+
def initialize
|
231
|
+
@data = {}
|
232
|
+
@inited = false
|
233
|
+
@inited_query_count = 0
|
234
|
+
end
|
235
|
+
|
236
|
+
attr_reader :data
|
237
|
+
attr_reader :inited_query_count
|
238
|
+
attr_accessor :inited
|
239
|
+
|
240
|
+
def force_set(kind, item)
|
241
|
+
@data[kind] = {} if !@data.has_key?(kind)
|
242
|
+
@data[kind][item[:key]] = item
|
243
|
+
end
|
244
|
+
|
245
|
+
def force_remove(kind, key)
|
246
|
+
@data[kind].delete(key) if @data.has_key?(kind)
|
247
|
+
end
|
248
|
+
|
249
|
+
def init_internal(all_data)
|
250
|
+
@data = all_data
|
251
|
+
@inited = true
|
252
|
+
end
|
253
|
+
|
254
|
+
def get_internal(kind, key)
|
255
|
+
items = @data[kind]
|
256
|
+
items.nil? ? nil : items[key]
|
257
|
+
end
|
258
|
+
|
259
|
+
def get_all_internal(kind)
|
260
|
+
@data[kind]
|
261
|
+
end
|
262
|
+
|
263
|
+
def upsert_internal(kind, item)
|
264
|
+
@data[kind] = {} if !@data.has_key?(kind)
|
265
|
+
old_item = @data[kind][item[:key]]
|
266
|
+
return old_item if !old_item.nil? && old_item[:version] >= item[:version]
|
267
|
+
@data[kind][item[:key]] = item
|
268
|
+
item
|
269
|
+
end
|
270
|
+
|
271
|
+
def initialized_internal?
|
272
|
+
@inited_query_count = @inited_query_count + 1
|
273
|
+
@inited
|
274
|
+
end
|
275
|
+
end
|
276
|
+
end
|