ldclient-rb 5.4.3 → 5.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.circleci/config.yml +33 -6
- data/CHANGELOG.md +19 -0
- data/CONTRIBUTING.md +0 -12
- data/Gemfile.lock +22 -3
- data/README.md +41 -35
- data/ldclient-rb.gemspec +4 -3
- data/lib/ldclient-rb.rb +9 -1
- data/lib/ldclient-rb/cache_store.rb +1 -0
- data/lib/ldclient-rb/config.rb +201 -90
- data/lib/ldclient-rb/evaluation.rb +56 -8
- data/lib/ldclient-rb/event_summarizer.rb +3 -0
- data/lib/ldclient-rb/events.rb +16 -0
- data/lib/ldclient-rb/expiring_cache.rb +1 -0
- data/lib/ldclient-rb/file_data_source.rb +18 -13
- data/lib/ldclient-rb/flags_state.rb +3 -2
- data/lib/ldclient-rb/impl.rb +13 -0
- data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
- data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
- data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
- data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
- data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
- data/lib/ldclient-rb/in_memory_store.rb +15 -4
- data/lib/ldclient-rb/integrations.rb +55 -0
- data/lib/ldclient-rb/integrations/consul.rb +38 -0
- data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
- data/lib/ldclient-rb/integrations/redis.rb +55 -0
- data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
- data/lib/ldclient-rb/interfaces.rb +153 -0
- data/lib/ldclient-rb/ldclient.rb +135 -77
- data/lib/ldclient-rb/memoized_value.rb +2 -0
- data/lib/ldclient-rb/newrelic.rb +1 -0
- data/lib/ldclient-rb/non_blocking_thread_pool.rb +3 -3
- data/lib/ldclient-rb/polling.rb +1 -0
- data/lib/ldclient-rb/redis_store.rb +24 -190
- data/lib/ldclient-rb/requestor.rb +3 -2
- data/lib/ldclient-rb/simple_lru_cache.rb +1 -0
- data/lib/ldclient-rb/stream.rb +22 -10
- data/lib/ldclient-rb/user_filter.rb +1 -0
- data/lib/ldclient-rb/util.rb +1 -0
- data/lib/ldclient-rb/version.rb +1 -1
- data/scripts/gendocs.sh +12 -0
- data/spec/feature_store_spec_base.rb +173 -72
- data/spec/file_data_source_spec.rb +2 -2
- data/spec/http_util.rb +103 -0
- data/spec/in_memory_feature_store_spec.rb +1 -1
- data/spec/integrations/consul_feature_store_spec.rb +41 -0
- data/spec/integrations/dynamodb_feature_store_spec.rb +104 -0
- data/spec/integrations/store_wrapper_spec.rb +276 -0
- data/spec/ldclient_spec.rb +83 -4
- data/spec/redis_feature_store_spec.rb +25 -16
- data/spec/requestor_spec.rb +44 -38
- data/spec/stream_spec.rb +18 -18
- metadata +55 -33
- data/lib/sse_client.rb +0 -4
- data/lib/sse_client/backoff.rb +0 -38
- data/lib/sse_client/sse_client.rb +0 -171
- data/lib/sse_client/sse_events.rb +0 -67
- data/lib/sse_client/streaming_http.rb +0 -199
- data/spec/sse_client/sse_client_spec.rb +0 -177
- data/spec/sse_client/sse_events_spec.rb +0 -100
- data/spec/sse_client/sse_shared.rb +0 -82
- data/spec/sse_client/streaming_http_spec.rb +0 -263
@@ -2,6 +2,8 @@
|
|
2
2
|
module LaunchDarkly
|
3
3
|
# Simple implementation of a thread-safe memoized value whose generator function will never be
|
4
4
|
# run more than once, and whose value can be overridden by explicit assignment.
|
5
|
+
# Note that we no longer use this class and it will be removed in a future version.
|
6
|
+
# @private
|
5
7
|
class MemoizedValue
|
6
8
|
def initialize(&generator)
|
7
9
|
@generator = generator
|
data/lib/ldclient-rb/newrelic.rb
CHANGED
@@ -3,10 +3,10 @@ require "concurrent/atomics"
|
|
3
3
|
require "concurrent/executors"
|
4
4
|
require "thread"
|
5
5
|
|
6
|
-
# Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather
|
7
|
-
# than blocking. Also provides a way to wait for all jobs to finish without shutting down.
|
8
|
-
|
9
6
|
module LaunchDarkly
|
7
|
+
# Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather
|
8
|
+
# than blocking. Also provides a way to wait for all jobs to finish without shutting down.
|
9
|
+
# @private
|
10
10
|
class NonBlockingThreadPool
|
11
11
|
def initialize(capacity)
|
12
12
|
@capacity = capacity
|
data/lib/ldclient-rb/polling.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
|
-
require "
|
2
|
-
require "
|
1
|
+
require "ldclient-rb/interfaces"
|
2
|
+
require "ldclient-rb/impl/integrations/redis_impl"
|
3
3
|
|
4
4
|
module LaunchDarkly
|
5
5
|
#
|
@@ -12,14 +12,16 @@ module LaunchDarkly
|
|
12
12
|
# installed. Then, create an instance and store it in the `feature_store` property
|
13
13
|
# of your client configuration.
|
14
14
|
#
|
15
|
+
# @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific
|
16
|
+
# implementation class may be changed or removed in the future.
|
17
|
+
#
|
15
18
|
class RedisFeatureStore
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
end
|
19
|
+
include LaunchDarkly::Interfaces::FeatureStore
|
20
|
+
|
21
|
+
# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating
|
22
|
+
# to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical
|
23
|
+
# reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate
|
24
|
+
# away from exposing these concrete classes and use factory methods instead.
|
23
25
|
|
24
26
|
#
|
25
27
|
# Constructor for a RedisFeatureStore instance.
|
@@ -30,45 +32,13 @@ module LaunchDarkly
|
|
30
32
|
# @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly
|
31
33
|
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
32
34
|
# @option opts [Integer] :max_connections size of the Redis connection pool
|
33
|
-
# @option opts [Integer] :
|
35
|
+
# @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching
|
34
36
|
# @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
|
35
|
-
# @option opts [Object] :pool custom connection pool,
|
37
|
+
# @option opts [Object] :pool custom connection pool, if desired
|
36
38
|
#
|
37
39
|
def initialize(opts = {})
|
38
|
-
|
39
|
-
|
40
|
-
end
|
41
|
-
@redis_opts = opts[:redis_opts] || Hash.new
|
42
|
-
if opts[:redis_url]
|
43
|
-
@redis_opts[:url] = opts[:redis_url]
|
44
|
-
end
|
45
|
-
if !@redis_opts.include?(:url)
|
46
|
-
@redis_opts[:url] = RedisFeatureStore.default_redis_url
|
47
|
-
end
|
48
|
-
max_connections = opts[:max_connections] || 16
|
49
|
-
@pool = opts[:pool] || ConnectionPool.new(size: max_connections) do
|
50
|
-
Redis.new(@redis_opts)
|
51
|
-
end
|
52
|
-
@prefix = opts[:prefix] || RedisFeatureStore.default_prefix
|
53
|
-
@logger = opts[:logger] || Config.default_logger
|
54
|
-
|
55
|
-
expiration_seconds = opts[:expiration] || 15
|
56
|
-
capacity = opts[:capacity] || 1000
|
57
|
-
if expiration_seconds > 0
|
58
|
-
@cache = ExpiringCache.new(capacity, expiration_seconds)
|
59
|
-
else
|
60
|
-
@cache = nil
|
61
|
-
end
|
62
|
-
|
63
|
-
@stopped = Concurrent::AtomicBoolean.new(false)
|
64
|
-
@inited = MemoizedValue.new {
|
65
|
-
query_inited
|
66
|
-
}
|
67
|
-
|
68
|
-
with_connection do |redis|
|
69
|
-
@logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \
|
70
|
-
and prefix: #{@prefix}")
|
71
|
-
end
|
40
|
+
core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts)
|
41
|
+
@wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts)
|
72
42
|
end
|
73
43
|
|
74
44
|
#
|
@@ -76,178 +46,42 @@ and prefix: #{@prefix}")
|
|
76
46
|
# running at `localhost` with its default port.
|
77
47
|
#
|
78
48
|
def self.default_redis_url
|
79
|
-
|
49
|
+
LaunchDarkly::Integrations::Redis::default_redis_url
|
80
50
|
end
|
81
51
|
|
82
52
|
#
|
83
53
|
# Default value for the `prefix` constructor parameter.
|
84
54
|
#
|
85
55
|
def self.default_prefix
|
86
|
-
|
56
|
+
LaunchDarkly::Integrations::Redis::default_prefix
|
87
57
|
end
|
88
58
|
|
89
59
|
def get(kind, key)
|
90
|
-
|
91
|
-
if f.nil?
|
92
|
-
@logger.debug { "RedisFeatureStore: no cache hit for #{key} in '#{kind[:namespace]}', requesting from Redis" }
|
93
|
-
f = with_connection do |redis|
|
94
|
-
begin
|
95
|
-
get_redis(kind, redis, key.to_sym)
|
96
|
-
rescue => e
|
97
|
-
@logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis in '#{kind[:namespace]}', with error: #{e}" }
|
98
|
-
nil
|
99
|
-
end
|
100
|
-
end
|
101
|
-
end
|
102
|
-
if f.nil?
|
103
|
-
@logger.debug { "RedisFeatureStore: #{key} not found in '#{kind[:namespace]}'" }
|
104
|
-
nil
|
105
|
-
elsif f[:deleted]
|
106
|
-
@logger.debug { "RedisFeatureStore: #{key} was deleted in '#{kind[:namespace]}', returning nil" }
|
107
|
-
nil
|
108
|
-
else
|
109
|
-
f
|
110
|
-
end
|
60
|
+
@wrapper.get(kind, key)
|
111
61
|
end
|
112
62
|
|
113
63
|
def all(kind)
|
114
|
-
|
115
|
-
with_connection do |redis|
|
116
|
-
begin
|
117
|
-
hashfs = redis.hgetall(items_key(kind))
|
118
|
-
rescue => e
|
119
|
-
@logger.error { "RedisFeatureStore: could not retrieve all '#{kind[:namespace]}' items from Redis with error: #{e}; returning none" }
|
120
|
-
hashfs = {}
|
121
|
-
end
|
122
|
-
hashfs.each do |k, jsonItem|
|
123
|
-
f = JSON.parse(jsonItem, symbolize_names: true)
|
124
|
-
if !f[:deleted]
|
125
|
-
fs[k.to_sym] = f
|
126
|
-
end
|
127
|
-
end
|
128
|
-
end
|
129
|
-
fs
|
64
|
+
@wrapper.all(kind)
|
130
65
|
end
|
131
66
|
|
132
67
|
def delete(kind, key, version)
|
133
|
-
|
68
|
+
@wrapper.delete(kind, key, version)
|
134
69
|
end
|
135
70
|
|
136
71
|
def init(all_data)
|
137
|
-
@
|
138
|
-
count = 0
|
139
|
-
with_connection do |redis|
|
140
|
-
all_data.each do |kind, items|
|
141
|
-
begin
|
142
|
-
redis.multi do |multi|
|
143
|
-
multi.del(items_key(kind))
|
144
|
-
count = count + items.count
|
145
|
-
items.each { |key, item|
|
146
|
-
redis.hset(items_key(kind), key, item.to_json)
|
147
|
-
}
|
148
|
-
end
|
149
|
-
items.each { |key, item|
|
150
|
-
put_cache(kind, key.to_sym, item)
|
151
|
-
}
|
152
|
-
rescue => e
|
153
|
-
@logger.error { "RedisFeatureStore: could not initialize '#{kind[:namespace]}' in Redis, error: #{e}" }
|
154
|
-
end
|
155
|
-
end
|
156
|
-
end
|
157
|
-
@inited.set(true)
|
158
|
-
@logger.info { "RedisFeatureStore: initialized with #{count} items" }
|
72
|
+
@wrapper.init(all_data)
|
159
73
|
end
|
160
74
|
|
161
75
|
def upsert(kind, item)
|
162
|
-
|
76
|
+
@wrapper.upsert(kind, item)
|
163
77
|
end
|
164
78
|
|
165
79
|
def initialized?
|
166
|
-
@
|
80
|
+
@wrapper.initialized?
|
167
81
|
end
|
168
82
|
|
169
83
|
def stop
|
170
|
-
|
171
|
-
@pool.shutdown { |redis| redis.close }
|
172
|
-
@cache.clear if !@cache.nil?
|
173
|
-
end
|
174
|
-
end
|
175
|
-
|
176
|
-
private
|
177
|
-
|
178
|
-
# exposed for testing
|
179
|
-
def before_update_transaction(base_key, key)
|
180
|
-
end
|
181
|
-
|
182
|
-
def items_key(kind)
|
183
|
-
@prefix + ":" + kind[:namespace]
|
184
|
-
end
|
185
|
-
|
186
|
-
def cache_key(kind, key)
|
187
|
-
kind[:namespace] + ":" + key.to_s
|
188
|
-
end
|
189
|
-
|
190
|
-
def with_connection
|
191
|
-
@pool.with { |redis| yield(redis) }
|
192
|
-
end
|
193
|
-
|
194
|
-
def get_redis(kind, redis, key)
|
195
|
-
begin
|
196
|
-
json_item = redis.hget(items_key(kind), key)
|
197
|
-
if json_item
|
198
|
-
item = JSON.parse(json_item, symbolize_names: true)
|
199
|
-
put_cache(kind, key, item)
|
200
|
-
item
|
201
|
-
else
|
202
|
-
nil
|
203
|
-
end
|
204
|
-
rescue => e
|
205
|
-
@logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis, error: #{e}" }
|
206
|
-
nil
|
207
|
-
end
|
208
|
-
end
|
209
|
-
|
210
|
-
def put_cache(kind, key, value)
|
211
|
-
@cache[cache_key(kind, key)] = value if !@cache.nil?
|
212
|
-
end
|
213
|
-
|
214
|
-
def update_with_versioning(kind, new_item)
|
215
|
-
base_key = items_key(kind)
|
216
|
-
key = new_item[:key]
|
217
|
-
try_again = true
|
218
|
-
while try_again
|
219
|
-
try_again = false
|
220
|
-
with_connection do |redis|
|
221
|
-
redis.watch(base_key) do
|
222
|
-
old_item = get_redis(kind, redis, key)
|
223
|
-
before_update_transaction(base_key, key)
|
224
|
-
if old_item.nil? || old_item[:version] < new_item[:version]
|
225
|
-
begin
|
226
|
-
result = redis.multi do |multi|
|
227
|
-
multi.hset(base_key, key, new_item.to_json)
|
228
|
-
end
|
229
|
-
if result.nil?
|
230
|
-
@logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" }
|
231
|
-
try_again = true
|
232
|
-
else
|
233
|
-
put_cache(kind, key.to_sym, new_item)
|
234
|
-
end
|
235
|
-
rescue => e
|
236
|
-
@logger.error { "RedisFeatureStore: could not store #{key} in Redis, error: #{e}" }
|
237
|
-
end
|
238
|
-
else
|
239
|
-
action = new_item[:deleted] ? "delete" : "update"
|
240
|
-
@logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \
|
241
|
-
in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
|
242
|
-
end
|
243
|
-
redis.unwatch
|
244
|
-
end
|
245
|
-
end
|
246
|
-
end
|
247
|
-
end
|
248
|
-
|
249
|
-
def query_inited
|
250
|
-
with_connection { |redis| redis.exists(items_key(FEATURES)) }
|
84
|
+
@wrapper.stop
|
251
85
|
end
|
252
86
|
end
|
253
87
|
end
|
@@ -3,7 +3,7 @@ require "net/http/persistent"
|
|
3
3
|
require "faraday/http_cache"
|
4
4
|
|
5
5
|
module LaunchDarkly
|
6
|
-
|
6
|
+
# @private
|
7
7
|
class UnexpectedResponseError < StandardError
|
8
8
|
def initialize(status)
|
9
9
|
@status = status
|
@@ -14,12 +14,13 @@ module LaunchDarkly
|
|
14
14
|
end
|
15
15
|
end
|
16
16
|
|
17
|
+
# @private
|
17
18
|
class Requestor
|
18
19
|
def initialize(sdk_key, config)
|
19
20
|
@sdk_key = sdk_key
|
20
21
|
@config = config
|
21
22
|
@client = Faraday.new do |builder|
|
22
|
-
builder.use :http_cache, store: @config.cache_store
|
23
|
+
builder.use :http_cache, store: @config.cache_store, serializer: Marshal
|
23
24
|
|
24
25
|
builder.adapter :net_http_persistent
|
25
26
|
end
|
@@ -2,6 +2,7 @@
|
|
2
2
|
module LaunchDarkly
|
3
3
|
# A non-thread-safe implementation of a LRU cache set with only add and reset methods.
|
4
4
|
# Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb
|
5
|
+
# @private
|
5
6
|
class SimpleLRUCacheSet
|
6
7
|
def initialize(capacity)
|
7
8
|
@values = {}
|
data/lib/ldclient-rb/stream.rb
CHANGED
@@ -1,20 +1,28 @@
|
|
1
1
|
require "concurrent/atomics"
|
2
2
|
require "json"
|
3
|
-
require "
|
3
|
+
require "ld-eventsource"
|
4
4
|
|
5
5
|
module LaunchDarkly
|
6
|
+
# @private
|
6
7
|
PUT = :put
|
8
|
+
# @private
|
7
9
|
PATCH = :patch
|
10
|
+
# @private
|
8
11
|
DELETE = :delete
|
12
|
+
# @private
|
9
13
|
INDIRECT_PUT = :'indirect/put'
|
14
|
+
# @private
|
10
15
|
INDIRECT_PATCH = :'indirect/patch'
|
16
|
+
# @private
|
11
17
|
READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes
|
12
18
|
|
19
|
+
# @private
|
13
20
|
KEY_PATHS = {
|
14
21
|
FEATURES => "/flags/",
|
15
22
|
SEGMENTS => "/segments/"
|
16
23
|
}
|
17
24
|
|
25
|
+
# @private
|
18
26
|
class StreamProcessor
|
19
27
|
def initialize(sdk_key, config, requestor)
|
20
28
|
@sdk_key = sdk_key
|
@@ -46,15 +54,18 @@ module LaunchDarkly
|
|
46
54
|
read_timeout: READ_TIMEOUT_SECONDS,
|
47
55
|
logger: @config.logger
|
48
56
|
}
|
49
|
-
@es = SSE::
|
50
|
-
conn.on_event { |event| process_message(event
|
57
|
+
@es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn|
|
58
|
+
conn.on_event { |event| process_message(event) }
|
51
59
|
conn.on_error { |err|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
@
|
57
|
-
|
60
|
+
case err
|
61
|
+
when SSE::Errors::HTTPStatusError
|
62
|
+
status = err.status
|
63
|
+
message = Util.http_error_message(status, "streaming connection", "will retry")
|
64
|
+
@config.logger.error { "[LDClient] #{message}" }
|
65
|
+
if !Util.http_error_recoverable?(status)
|
66
|
+
@ready.set # if client was waiting on us, make it stop waiting - has no effect if already set
|
67
|
+
stop
|
68
|
+
end
|
58
69
|
end
|
59
70
|
}
|
60
71
|
end
|
@@ -71,7 +82,8 @@ module LaunchDarkly
|
|
71
82
|
|
72
83
|
private
|
73
84
|
|
74
|
-
def process_message(message
|
85
|
+
def process_message(message)
|
86
|
+
method = message.type
|
75
87
|
@config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" }
|
76
88
|
if method == PUT
|
77
89
|
message = JSON.parse(message.data, symbolize_names: true)
|
data/lib/ldclient-rb/util.rb
CHANGED
data/lib/ldclient-rb/version.rb
CHANGED
data/scripts/gendocs.sh
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
|
3
|
+
# Use this script to generate documentation locally in ./doc so it can be proofed before release.
|
4
|
+
# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb
|
5
|
+
|
6
|
+
gem install --conservative yard
|
7
|
+
gem install --conservative redcarpet # provides Markdown formatting
|
8
|
+
|
9
|
+
# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**"
|
10
|
+
PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb"
|
11
|
+
|
12
|
+
yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md
|
@@ -1,112 +1,213 @@
|
|
1
1
|
require "spec_helper"
|
2
2
|
|
3
|
-
|
3
|
+
shared_examples "feature_store" do |create_store_method, clear_data_method|
|
4
4
|
|
5
|
-
|
5
|
+
# Rather than testing with feature flag or segment data, we'll use this fake data kind
|
6
|
+
# to make it clear that feature stores need to be able to handle arbitrary data.
|
7
|
+
let(:things_kind) { { namespace: "things" } }
|
8
|
+
|
9
|
+
let(:key1) { "thing1" }
|
10
|
+
let(:thing1) {
|
6
11
|
{
|
7
|
-
key:
|
12
|
+
key: key1,
|
13
|
+
name: "Thing 1",
|
8
14
|
version: 11,
|
9
|
-
on: true,
|
10
|
-
prerequisites: [],
|
11
|
-
salt: "718ea30a918a4eba8734b57ab1a93227",
|
12
|
-
sel: "fe1244e5378c4f99976c9634e33667c6",
|
13
|
-
targets: [
|
14
|
-
{
|
15
|
-
values: [ "alice" ],
|
16
|
-
variation: 0
|
17
|
-
},
|
18
|
-
{
|
19
|
-
values: [ "bob" ],
|
20
|
-
variation: 1
|
21
|
-
}
|
22
|
-
],
|
23
|
-
rules: [],
|
24
|
-
fallthrough: { variation: 0 },
|
25
|
-
offVariation: 1,
|
26
|
-
variations: [ true, false ],
|
27
15
|
deleted: false
|
28
16
|
}
|
29
17
|
}
|
30
|
-
let(:
|
18
|
+
let(:unused_key) { "no" }
|
19
|
+
|
20
|
+
let(:create_store) { create_store_method } # just to avoid a scope issue
|
21
|
+
let(:clear_data) { clear_data_method }
|
22
|
+
|
23
|
+
def with_store(opts = {})
|
24
|
+
s = create_store.call(opts)
|
25
|
+
begin
|
26
|
+
yield s
|
27
|
+
ensure
|
28
|
+
s.stop
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def with_inited_store(things)
|
33
|
+
things_hash = {}
|
34
|
+
things.each { |thing| things_hash[thing[:key].to_sym] = thing }
|
31
35
|
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
+
with_store do |s|
|
37
|
+
s.init({ things_kind => things_hash })
|
38
|
+
yield s
|
39
|
+
end
|
36
40
|
end
|
37
41
|
|
38
42
|
def new_version_plus(f, deltaVersion, attrs = {})
|
39
|
-
|
40
|
-
f1[:version] = f[:version] + deltaVersion
|
41
|
-
f1.update(attrs)
|
42
|
-
f1
|
43
|
+
f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs)
|
43
44
|
end
|
44
45
|
|
46
|
+
before(:each) do
|
47
|
+
clear_data.call if !clear_data.nil?
|
48
|
+
end
|
45
49
|
|
46
|
-
|
47
|
-
|
50
|
+
# This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store
|
51
|
+
# that operates on a database that can be shared with other store instances (as opposed to the in-memory store,
|
52
|
+
# which has its own private storage).
|
53
|
+
if !clear_data_method.nil?
|
54
|
+
it "is not initialized by default" do
|
55
|
+
with_store do |store|
|
56
|
+
expect(store.initialized?).to eq false
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
it "can detect if another instance has initialized the store" do
|
61
|
+
with_store do |store1|
|
62
|
+
store1.init({})
|
63
|
+
with_store do |store2|
|
64
|
+
expect(store2.initialized?).to eq true
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
it "can read data written by another instance" do
|
70
|
+
with_store do |store1|
|
71
|
+
store1.init({ things_kind => { key1.to_sym => thing1 } })
|
72
|
+
with_store do |store2|
|
73
|
+
expect(store2.get(things_kind, key1)).to eq thing1
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
it "is independent from other stores with different prefixes" do
|
79
|
+
with_store({ prefix: "a" }) do |store_a|
|
80
|
+
store_a.init({ things_kind => { key1.to_sym => thing1 } })
|
81
|
+
with_store({ prefix: "b" }) do |store_b|
|
82
|
+
store_b.init({ things_kind => {} })
|
83
|
+
end
|
84
|
+
with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data
|
85
|
+
expect(store_b1.get(things_kind, key1)).to be_nil
|
86
|
+
expect(store_a.get(things_kind, key1)).to eq thing1
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
48
90
|
end
|
49
91
|
|
50
|
-
it "
|
51
|
-
|
92
|
+
it "is initialized after calling init" do
|
93
|
+
with_inited_store([]) do |store|
|
94
|
+
expect(store.initialized?).to eq true
|
95
|
+
end
|
52
96
|
end
|
53
97
|
|
54
|
-
it "can get existing
|
55
|
-
|
98
|
+
it "can get existing item with symbol key" do
|
99
|
+
with_inited_store([ thing1 ]) do |store|
|
100
|
+
expect(store.get(things_kind, key1.to_sym)).to eq thing1
|
101
|
+
end
|
56
102
|
end
|
57
103
|
|
58
|
-
it "
|
59
|
-
|
104
|
+
it "can get existing item with string key" do
|
105
|
+
with_inited_store([ thing1 ]) do |store|
|
106
|
+
expect(store.get(things_kind, key1.to_s)).to eq thing1
|
107
|
+
end
|
60
108
|
end
|
61
109
|
|
62
|
-
it "
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
feature1[:on] = false
|
67
|
-
store.upsert(LaunchDarkly::FEATURES, feature1)
|
68
|
-
expect(store.all(LaunchDarkly::FEATURES)).to eq ({ key0 => feature0, :"test-feature-flag1" => feature1 })
|
110
|
+
it "gets nil for nonexisting item" do
|
111
|
+
with_inited_store([ thing1 ]) do |store|
|
112
|
+
expect(store.get(things_kind, unused_key)).to be_nil
|
113
|
+
end
|
69
114
|
end
|
70
115
|
|
71
|
-
it "
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
store.upsert(LaunchDarkly::FEATURES, feature1)
|
77
|
-
expect(store.get(LaunchDarkly::FEATURES, :"test-feature-flag1")).to eq feature1
|
116
|
+
it "returns nil for deleted item" do
|
117
|
+
deleted_thing = thing1.clone.merge({ deleted: true })
|
118
|
+
with_inited_store([ deleted_thing ]) do |store|
|
119
|
+
expect(store.get(things_kind, key1)).to be_nil
|
120
|
+
end
|
78
121
|
end
|
79
122
|
|
80
|
-
it "can
|
81
|
-
|
82
|
-
|
83
|
-
|
123
|
+
it "can get all items" do
|
124
|
+
key2 = "thing2"
|
125
|
+
thing2 = {
|
126
|
+
key: key2,
|
127
|
+
name: "Thing 2",
|
128
|
+
version: 22,
|
129
|
+
deleted: false
|
130
|
+
}
|
131
|
+
with_inited_store([ thing1, thing2 ]) do |store|
|
132
|
+
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 })
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
it "filters out deleted items when getting all" do
|
137
|
+
key2 = "thing2"
|
138
|
+
thing2 = {
|
139
|
+
key: key2,
|
140
|
+
name: "Thing 2",
|
141
|
+
version: 22,
|
142
|
+
deleted: true
|
143
|
+
}
|
144
|
+
with_inited_store([ thing1, thing2 ]) do |store|
|
145
|
+
expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 })
|
146
|
+
end
|
84
147
|
end
|
85
148
|
|
86
|
-
it "
|
87
|
-
|
88
|
-
|
89
|
-
|
149
|
+
it "can add new item" do
|
150
|
+
with_inited_store([]) do |store|
|
151
|
+
store.upsert(things_kind, thing1)
|
152
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
it "can update item with newer version" do
|
157
|
+
with_inited_store([ thing1 ]) do |store|
|
158
|
+
thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' })
|
159
|
+
store.upsert(things_kind, thing1_mod)
|
160
|
+
expect(store.get(things_kind, key1)).to eq thing1_mod
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
it "cannot update item with same version" do
|
165
|
+
with_inited_store([ thing1 ]) do |store|
|
166
|
+
thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' })
|
167
|
+
store.upsert(things_kind, thing1_mod)
|
168
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
169
|
+
end
|
90
170
|
end
|
91
171
|
|
92
172
|
it "cannot update feature with older version" do
|
93
|
-
|
94
|
-
|
95
|
-
|
173
|
+
with_inited_store([ thing1 ]) do |store|
|
174
|
+
thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' })
|
175
|
+
store.upsert(things_kind, thing1_mod)
|
176
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
177
|
+
end
|
96
178
|
end
|
97
179
|
|
98
|
-
it "can delete
|
99
|
-
|
100
|
-
|
180
|
+
it "can delete item with newer version" do
|
181
|
+
with_inited_store([ thing1 ]) do |store|
|
182
|
+
store.delete(things_kind, key1, thing1[:version] + 1)
|
183
|
+
expect(store.get(things_kind, key1)).to be_nil
|
184
|
+
end
|
101
185
|
end
|
102
186
|
|
103
|
-
it "cannot delete
|
104
|
-
|
105
|
-
|
187
|
+
it "cannot delete item with same version" do
|
188
|
+
with_inited_store([ thing1 ]) do |store|
|
189
|
+
store.delete(things_kind, key1, thing1[:version])
|
190
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
191
|
+
end
|
106
192
|
end
|
107
193
|
|
108
|
-
it "cannot delete
|
109
|
-
|
110
|
-
|
194
|
+
it "cannot delete item with older version" do
|
195
|
+
with_inited_store([ thing1 ]) do |store|
|
196
|
+
store.delete(things_kind, key1, thing1[:version] - 1)
|
197
|
+
expect(store.get(things_kind, key1)).to eq thing1
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
it "stores Unicode data correctly" do
|
202
|
+
flag = {
|
203
|
+
key: "my-fancy-flag",
|
204
|
+
name: "Tęst Feåtūre Flæg😺",
|
205
|
+
version: 1,
|
206
|
+
deleted: false
|
207
|
+
}
|
208
|
+
with_inited_store([]) do |store|
|
209
|
+
store.upsert(LaunchDarkly::FEATURES, flag)
|
210
|
+
expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag
|
211
|
+
end
|
111
212
|
end
|
112
213
|
end
|