ldclient-rb 2.5.0 → 3.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +9 -2
- data/circle.yml +11 -9
- data/ldclient-rb.gemspec +7 -2
- data/lib/ldclient-rb.rb +2 -2
- data/lib/ldclient-rb/config.rb +2 -1
- data/lib/ldclient-rb/evaluation.rb +77 -39
- data/lib/ldclient-rb/in_memory_store.rb +89 -0
- data/lib/ldclient-rb/ldclient.rb +2 -2
- data/lib/ldclient-rb/polling.rb +6 -3
- data/lib/ldclient-rb/{redis_feature_store.rb → redis_store.rb} +54 -42
- data/lib/ldclient-rb/requestor.rb +12 -0
- data/lib/ldclient-rb/stream.rb +44 -8
- data/lib/ldclient-rb/version.rb +1 -1
- data/spec/evaluation_spec.rb +204 -7
- data/spec/feature_store_spec_base.rb +20 -20
- data/spec/segment_store_spec_base.rb +95 -0
- data/spec/stream_spec.rb +32 -17
- metadata +6 -4
- data/lib/ldclient-rb/feature_store.rb +0 -63
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5624d2c634cd8bc37ee54d2215e4150c754a2de7
|
4
|
+
data.tar.gz: da595a32a6ed8f80625318141639d1ddc26bed7f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 79f7bff1fd78fdc74370ae3604a13da553cdc9052d4a3c294470c51ca7f4969c93b8f9a317bc906f5b90f0d7d2d997e7a64738e7f209a0267ede8eedfb74b952
|
7
|
+
data.tar.gz: f3288d3da6869ee47c2785d34d190d63391ba0d0df59cb3b135c939589f52e6cdc9645b7cd1503d51822214888b65ba0221e2d4341513633ee9ef03caa860fc9
|
data/CHANGELOG.md
CHANGED
@@ -2,7 +2,15 @@
|
|
2
2
|
|
3
3
|
All notable changes to the LaunchDarkly Ruby SDK will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org).
|
4
4
|
|
5
|
-
|
5
|
+
## [3.0.0] - 2018-02-22
|
6
|
+
### Added
|
7
|
+
- Support for a new LaunchDarkly feature: reusable user segments.
|
8
|
+
|
9
|
+
### Changed
|
10
|
+
- The feature store interface has been changed to support user segment data as well as feature flags. Existing code that uses `InMemoryFeatureStore` or `RedisFeatureStore` should work as before, but custom feature store implementations will need to be updated.
|
11
|
+
|
12
|
+
|
13
|
+
## [2.5.0] - 2018-02-12
|
6
14
|
|
7
15
|
## Added
|
8
16
|
- Adds support for a future LaunchDarkly feature, coming soon: semantic version user attributes.
|
@@ -10,7 +18,6 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file
|
|
10
18
|
## Changed
|
11
19
|
- It is now possible to compute rollouts based on an integer attribute of a user, not just a string attribute.
|
12
20
|
|
13
|
-
|
14
21
|
## [2.4.1] - 2018-01-23
|
15
22
|
## Changed
|
16
23
|
- Reduce logging level for missing flags
|
data/circle.yml
CHANGED
@@ -1,21 +1,23 @@
|
|
1
1
|
machine:
|
2
2
|
environment:
|
3
|
-
RUBIES: "ruby-2.4.
|
3
|
+
RUBIES: "ruby-2.4.2 ruby-2.2.7 ruby-2.1.9 ruby-2.0.0 ruby-1.9.3 jruby-1.7.22 jruby-9.0.5.0 jruby-9.1.13.0"
|
4
4
|
services:
|
5
5
|
- redis
|
6
6
|
|
7
7
|
dependencies:
|
8
8
|
cache_directories:
|
9
|
-
- '
|
9
|
+
- '/opt/circleci/.rvm/rubies'
|
10
10
|
|
11
11
|
override:
|
12
|
-
-
|
13
|
-
|
14
|
-
for i in
|
12
|
+
- |
|
13
|
+
set -e
|
14
|
+
for i in $RUBIES;
|
15
15
|
do
|
16
16
|
rvm install $i;
|
17
17
|
rvm use $i;
|
18
|
-
|
18
|
+
if [[ $i == jruby* ]]; then
|
19
|
+
gem install jruby-openssl; # required by bundler, no effect on Ruby MRI
|
20
|
+
fi
|
19
21
|
gem install bundler;
|
20
22
|
bundle install;
|
21
23
|
mv Gemfile.lock "Gemfile.lock.$i"
|
@@ -23,9 +25,9 @@ dependencies:
|
|
23
25
|
|
24
26
|
test:
|
25
27
|
override:
|
26
|
-
-
|
27
|
-
|
28
|
-
for i in
|
28
|
+
- |
|
29
|
+
set -e
|
30
|
+
for i in $RUBIES;
|
29
31
|
do
|
30
32
|
rvm use $i;
|
31
33
|
cp "Gemfile.lock.$i" Gemfile.lock;
|
data/ldclient-rb.gemspec
CHANGED
@@ -30,8 +30,13 @@ Gem::Specification.new do |spec|
|
|
30
30
|
spec.add_development_dependency "moneta", "~> 1.0.0"
|
31
31
|
|
32
32
|
spec.add_runtime_dependency "json", [">= 1.8", "< 3"]
|
33
|
-
|
34
|
-
|
33
|
+
if RUBY_VERSION >= "2.1.0"
|
34
|
+
spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"]
|
35
|
+
spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"]
|
36
|
+
else
|
37
|
+
spec.add_runtime_dependency "faraday", [">= 0.9", "< 0.14.0"]
|
38
|
+
spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 2"]
|
39
|
+
end
|
35
40
|
spec.add_runtime_dependency "semantic", "~> 1.6.0"
|
36
41
|
spec.add_runtime_dependency "thread_safe", "~> 0.3"
|
37
42
|
spec.add_runtime_dependency "net-http-persistent", "~> 2.9"
|
data/lib/ldclient-rb.rb
CHANGED
@@ -3,12 +3,12 @@ require "ldclient-rb/evaluation"
|
|
3
3
|
require "ldclient-rb/ldclient"
|
4
4
|
require "ldclient-rb/cache_store"
|
5
5
|
require "ldclient-rb/memoized_value"
|
6
|
+
require "ldclient-rb/in_memory_store"
|
6
7
|
require "ldclient-rb/config"
|
7
8
|
require "ldclient-rb/newrelic"
|
8
9
|
require "ldclient-rb/stream"
|
9
10
|
require "ldclient-rb/polling"
|
10
11
|
require "ldclient-rb/event_serializer"
|
11
12
|
require "ldclient-rb/events"
|
12
|
-
require "ldclient-rb/
|
13
|
-
require "ldclient-rb/redis_feature_store"
|
13
|
+
require "ldclient-rb/redis_store"
|
14
14
|
require "ldclient-rb/requestor"
|
data/lib/ldclient-rb/config.rb
CHANGED
@@ -34,6 +34,8 @@ module LaunchDarkly
|
|
34
34
|
# @option opts [Object] :cache_store A cache store for the Faraday HTTP caching
|
35
35
|
# library. Defaults to the Rails cache in a Rails environment, or a
|
36
36
|
# thread-safe in-memory store otherwise.
|
37
|
+
# @option opts [Object] :feature_store A store for feature flags and related data. Defaults to an in-memory
|
38
|
+
# cache, or you can use RedisFeatureStore.
|
37
39
|
# @option opts [Boolean] :use_ldd (false) Whether you are using the LaunchDarkly relay proxy in
|
38
40
|
# daemon mode. In this configuration, the client will not use a streaming connection to listen
|
39
41
|
# for updates, but instead will get feature state from a Redis instance. The `stream` and
|
@@ -171,7 +173,6 @@ module LaunchDarkly
|
|
171
173
|
#
|
172
174
|
attr_reader :feature_store
|
173
175
|
|
174
|
-
|
175
176
|
# The proxy configuration string
|
176
177
|
#
|
177
178
|
attr_reader :proxy
|
@@ -22,16 +22,18 @@ module LaunchDarkly
|
|
22
22
|
end
|
23
23
|
|
24
24
|
SEMVER_OPERAND = lambda do |v|
|
25
|
+
semver = nil
|
25
26
|
if v.is_a? String
|
26
27
|
for _ in 0..2 do
|
27
28
|
begin
|
28
|
-
|
29
|
+
semver = Semantic::Version.new(v)
|
30
|
+
break # Some versions of jruby cannot properly handle a return here and return from the method that calls this lambda
|
29
31
|
rescue ArgumentError
|
30
32
|
v = addZeroVersionComponent(v)
|
31
33
|
end
|
32
34
|
end
|
33
35
|
end
|
34
|
-
|
36
|
+
semver
|
35
37
|
end
|
36
38
|
|
37
39
|
def self.addZeroVersionComponent(v)
|
@@ -98,7 +100,11 @@ module LaunchDarkly
|
|
98
100
|
semVerLessThan:
|
99
101
|
comparator(SEMVER_OPERAND) { |n| n < 0 },
|
100
102
|
semVerGreaterThan:
|
101
|
-
comparator(SEMVER_OPERAND) { |n| n > 0 }
|
103
|
+
comparator(SEMVER_OPERAND) { |n| n > 0 },
|
104
|
+
segmentMatch:
|
105
|
+
lambda do |a, b|
|
106
|
+
false # we should never reach this - instead we special-case this operator in clause_match_user
|
107
|
+
end
|
102
108
|
}
|
103
109
|
|
104
110
|
class EvaluationError < StandardError
|
@@ -136,54 +142,46 @@ module LaunchDarkly
|
|
136
142
|
def eval_internal(flag, user, store, events)
|
137
143
|
failed_prereq = false
|
138
144
|
# Evaluate prerequisites, if any
|
139
|
-
|
140
|
-
|
141
|
-
prereq_flag = store.get(prerequisite[:key])
|
145
|
+
(flag[:prerequisites] || []).each do |prerequisite|
|
146
|
+
prereq_flag = store.get(FEATURES, prerequisite[:key])
|
142
147
|
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
failed_prereq = true
|
152
|
-
end
|
153
|
-
rescue => exn
|
154
|
-
@config.logger.error("[LDClient] Error evaluating prerequisite: #{exn.inspect}")
|
148
|
+
if prereq_flag.nil? || !prereq_flag[:on]
|
149
|
+
failed_prereq = true
|
150
|
+
else
|
151
|
+
begin
|
152
|
+
prereq_res = eval_internal(prereq_flag, user, store, events)
|
153
|
+
variation = get_variation(prereq_flag, prerequisite[:variation])
|
154
|
+
events.push(kind: "feature", key: prereq_flag[:key], value: prereq_res, version: prereq_flag[:version], prereqOf: flag[:key])
|
155
|
+
if prereq_res.nil? || prereq_res != variation
|
155
156
|
failed_prereq = true
|
156
157
|
end
|
158
|
+
rescue => exn
|
159
|
+
@config.logger.error("[LDClient] Error evaluating prerequisite: #{exn.inspect}")
|
160
|
+
failed_prereq = true
|
157
161
|
end
|
158
162
|
end
|
163
|
+
end
|
159
164
|
|
160
|
-
|
161
|
-
|
162
|
-
end
|
165
|
+
if failed_prereq
|
166
|
+
return nil
|
163
167
|
end
|
164
168
|
# The prerequisites were satisfied.
|
165
169
|
# Now walk through the evaluation steps and get the correct
|
166
170
|
# variation index
|
167
|
-
eval_rules(flag, user)
|
171
|
+
eval_rules(flag, user, store)
|
168
172
|
end
|
169
173
|
|
170
|
-
def eval_rules(flag, user)
|
174
|
+
def eval_rules(flag, user, store)
|
171
175
|
# Check user target matches
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
target[:values].each do |value|
|
176
|
-
return get_variation(flag, target[:variation]) if value == user[:key]
|
177
|
-
end
|
178
|
-
end
|
176
|
+
(flag[:targets] || []).each do |target|
|
177
|
+
(target[:values] || []).each do |value|
|
178
|
+
return get_variation(flag, target[:variation]) if value == user[:key]
|
179
179
|
end
|
180
180
|
end
|
181
|
-
|
181
|
+
|
182
182
|
# Check custom rules
|
183
|
-
|
184
|
-
flag
|
185
|
-
return variation_for_user(rule, user, flag) if rule_match_user(rule, user)
|
186
|
-
end
|
183
|
+
(flag[:rules] || []).each do |rule|
|
184
|
+
return variation_for_user(rule, user, flag) if rule_match_user(rule, user, store)
|
187
185
|
end
|
188
186
|
|
189
187
|
# Check the fallthrough rule
|
@@ -202,17 +200,30 @@ module LaunchDarkly
|
|
202
200
|
flag[:variations][index]
|
203
201
|
end
|
204
202
|
|
205
|
-
def rule_match_user(rule, user)
|
203
|
+
def rule_match_user(rule, user, store)
|
206
204
|
return false if !rule[:clauses]
|
207
205
|
|
208
|
-
rule[:clauses].each do |clause|
|
209
|
-
return false if !clause_match_user(clause, user)
|
206
|
+
(rule[:clauses] || []).each do |clause|
|
207
|
+
return false if !clause_match_user(clause, user, store)
|
210
208
|
end
|
211
209
|
|
212
210
|
return true
|
213
211
|
end
|
214
212
|
|
215
|
-
def clause_match_user(clause, user)
|
213
|
+
def clause_match_user(clause, user, store)
|
214
|
+
# In the case of a segment match operator, we check if the user is in any of the segments,
|
215
|
+
# and possibly negate
|
216
|
+
if clause[:op].to_sym == :segmentMatch
|
217
|
+
(clause[:values] || []).each do |v|
|
218
|
+
segment = store.get(SEGMENTS, v)
|
219
|
+
return maybe_negate(clause, true) if !segment.nil? && segment_match_user(segment, user)
|
220
|
+
end
|
221
|
+
return maybe_negate(clause, false)
|
222
|
+
end
|
223
|
+
clause_match_user_no_segments(clause, user)
|
224
|
+
end
|
225
|
+
|
226
|
+
def clause_match_user_no_segments(clause, user)
|
216
227
|
val = user_value(user, clause[:attribute])
|
217
228
|
return false if val.nil?
|
218
229
|
|
@@ -250,6 +261,33 @@ module LaunchDarkly
|
|
250
261
|
end
|
251
262
|
end
|
252
263
|
|
264
|
+
def segment_match_user(segment, user)
|
265
|
+
return false unless user[:key]
|
266
|
+
|
267
|
+
return true if segment[:included].include?(user[:key])
|
268
|
+
return false if segment[:excluded].include?(user[:key])
|
269
|
+
|
270
|
+
(segment[:rules] || []).each do |r|
|
271
|
+
return true if segment_rule_match_user(r, user, segment[:key], segment[:salt])
|
272
|
+
end
|
273
|
+
|
274
|
+
return false
|
275
|
+
end
|
276
|
+
|
277
|
+
def segment_rule_match_user(rule, user, segment_key, salt)
|
278
|
+
(rule[:clauses] || []).each do |c|
|
279
|
+
return false unless clause_match_user_no_segments(c, user)
|
280
|
+
end
|
281
|
+
|
282
|
+
# If the weight is absent, this rule matches
|
283
|
+
return true if !rule[:weight]
|
284
|
+
|
285
|
+
# All of the clauses are met. See if the user buckets in
|
286
|
+
bucket = bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt)
|
287
|
+
weight = rule[:weight].to_f / 100000.0
|
288
|
+
return bucket < weight
|
289
|
+
end
|
290
|
+
|
253
291
|
def bucket_user(user, key, bucket_by, salt)
|
254
292
|
return nil unless user[:key]
|
255
293
|
|
@@ -0,0 +1,89 @@
|
|
1
|
+
require "concurrent/atomics"
|
2
|
+
|
3
|
+
module LaunchDarkly
|
4
|
+
|
5
|
+
# These constants denote the types of data that can be stored in the feature store. If
|
6
|
+
# we add another storable data type in the future, as long as it follows the same pattern
|
7
|
+
# (having "key", "version", and "deleted" properties), we only need to add a corresponding
|
8
|
+
# constant here and the existing store should be able to handle it.
|
9
|
+
FEATURES = {
|
10
|
+
namespace: "features"
|
11
|
+
}.freeze
|
12
|
+
|
13
|
+
SEGMENTS = {
|
14
|
+
namespace: "segments"
|
15
|
+
}.freeze
|
16
|
+
|
17
|
+
#
|
18
|
+
# Default implementation of the LaunchDarkly client's feature store, using an in-memory
|
19
|
+
# cache. This object holds feature flags and related data received from the
|
20
|
+
# streaming API.
|
21
|
+
#
|
22
|
+
class InMemoryFeatureStore
|
23
|
+
def initialize
|
24
|
+
@items = Hash.new
|
25
|
+
@lock = Concurrent::ReadWriteLock.new
|
26
|
+
@initialized = Concurrent::AtomicBoolean.new(false)
|
27
|
+
end
|
28
|
+
|
29
|
+
def get(kind, key)
|
30
|
+
@lock.with_read_lock do
|
31
|
+
coll = @items[kind]
|
32
|
+
f = coll.nil? ? nil : coll[key.to_sym]
|
33
|
+
(f.nil? || f[:deleted]) ? nil : f
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def all(kind)
|
38
|
+
@lock.with_read_lock do
|
39
|
+
coll = @items[kind]
|
40
|
+
(coll.nil? ? Hash.new : coll).select { |_k, f| not f[:deleted] }
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def delete(kind, key, version)
|
45
|
+
@lock.with_write_lock do
|
46
|
+
coll = @items[kind]
|
47
|
+
if coll.nil?
|
48
|
+
coll = Hash.new
|
49
|
+
@items[kind] = coll
|
50
|
+
end
|
51
|
+
old = coll[key.to_sym]
|
52
|
+
|
53
|
+
if old.nil? || old[:version] < version
|
54
|
+
coll[key.to_sym] = { deleted: true, version: version }
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def init(all_data)
|
60
|
+
@lock.with_write_lock do
|
61
|
+
@items.replace(all_data)
|
62
|
+
@initialized.make_true
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def upsert(kind, item)
|
67
|
+
@lock.with_write_lock do
|
68
|
+
coll = @items[kind]
|
69
|
+
if coll.nil?
|
70
|
+
coll = Hash.new
|
71
|
+
@items[kind] = coll
|
72
|
+
end
|
73
|
+
old = coll[item[:key].to_sym]
|
74
|
+
|
75
|
+
if old.nil? || old[:version] < item[:version]
|
76
|
+
coll[item[:key].to_sym] = item
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def initialized?
|
82
|
+
@initialized.value
|
83
|
+
end
|
84
|
+
|
85
|
+
def stop
|
86
|
+
# nothing to do
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
data/lib/ldclient-rb/ldclient.rb
CHANGED
@@ -130,7 +130,7 @@ module LaunchDarkly
|
|
130
130
|
end
|
131
131
|
|
132
132
|
sanitize_user(user)
|
133
|
-
feature = @store.get(key)
|
133
|
+
feature = @store.get(FEATURES, key)
|
134
134
|
|
135
135
|
if feature.nil?
|
136
136
|
@config.logger.info("[LDClient] Unknown feature flag #{key}. Returning default value")
|
@@ -197,7 +197,7 @@ module LaunchDarkly
|
|
197
197
|
end
|
198
198
|
|
199
199
|
begin
|
200
|
-
features = @store.all
|
200
|
+
features = @store.all(FEATURES)
|
201
201
|
|
202
202
|
# TODO rescue if necessary
|
203
203
|
Hash[features.map{ |k, f| [k, evaluate(f, user, @store)[:value]] }]
|
data/lib/ldclient-rb/polling.rb
CHANGED
@@ -31,9 +31,12 @@ module LaunchDarkly
|
|
31
31
|
end
|
32
32
|
|
33
33
|
def poll
|
34
|
-
|
35
|
-
if
|
36
|
-
@config.feature_store.init(
|
34
|
+
all_data = @requestor.request_all_data
|
35
|
+
if all_data
|
36
|
+
@config.feature_store.init({
|
37
|
+
FEATURES => all_data[:flags],
|
38
|
+
SEGMENTS => all_data[:segments]
|
39
|
+
})
|
37
40
|
if @initialized.make_true
|
38
41
|
@config.logger.info("[LDClient] Polling connection initialized")
|
39
42
|
end
|
@@ -5,7 +5,8 @@ require "thread_safe"
|
|
5
5
|
module LaunchDarkly
|
6
6
|
#
|
7
7
|
# An implementation of the LaunchDarkly client's feature store that uses a Redis
|
8
|
-
# instance.
|
8
|
+
# instance. This object holds feature flags and related data received from the
|
9
|
+
# streaming API. Feature data can also be further cached in memory to reduce overhead
|
9
10
|
# of calls to Redis.
|
10
11
|
#
|
11
12
|
# To use this class, you must first have the `redis`, `connection-pool`, and `moneta`
|
@@ -32,7 +33,7 @@ module LaunchDarkly
|
|
32
33
|
# @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger`
|
33
34
|
# @option opts [Integer] :max_connections size of the Redis connection pool
|
34
35
|
# @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching
|
35
|
-
# @option opts [Integer] :capacity maximum number of feature flags to cache locally
|
36
|
+
# @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally
|
36
37
|
# @option opts [Object] :pool custom connection pool, used for testing only
|
37
38
|
#
|
38
39
|
def initialize(opts = {})
|
@@ -52,7 +53,6 @@ module LaunchDarkly
|
|
52
53
|
end
|
53
54
|
@prefix = opts[:prefix] || RedisFeatureStore.default_prefix
|
54
55
|
@logger = opts[:logger] || Config.default_logger
|
55
|
-
@features_key = @prefix + ':features'
|
56
56
|
|
57
57
|
@expiration_seconds = opts[:expiration] || 15
|
58
58
|
@capacity = opts[:capacity] || 1000
|
@@ -91,44 +91,44 @@ and prefix: #{@prefix}")
|
|
91
91
|
'launchdarkly'
|
92
92
|
end
|
93
93
|
|
94
|
-
def get(key)
|
95
|
-
f = @cache[key
|
94
|
+
def get(kind, key)
|
95
|
+
f = @cache[cache_key(kind, key)]
|
96
96
|
if f.nil?
|
97
|
-
@logger.debug("RedisFeatureStore: no cache hit for #{key}, requesting from Redis")
|
97
|
+
@logger.debug("RedisFeatureStore: no cache hit for #{key} in '#{kind[:namespace]}', requesting from Redis")
|
98
98
|
f = with_connection do |redis|
|
99
99
|
begin
|
100
|
-
get_redis(redis,key.to_sym)
|
100
|
+
get_redis(kind, redis, key.to_sym)
|
101
101
|
rescue => e
|
102
|
-
@logger.error("RedisFeatureStore: could not retrieve
|
102
|
+
@logger.error("RedisFeatureStore: could not retrieve #{key} from Redis in '#{kind[:namespace]}', with error: #{e}")
|
103
103
|
nil
|
104
104
|
end
|
105
105
|
end
|
106
106
|
if !f.nil?
|
107
|
-
put_cache(key
|
107
|
+
put_cache(kind, key, f)
|
108
108
|
end
|
109
109
|
end
|
110
110
|
if f.nil?
|
111
|
-
@logger.debug("RedisFeatureStore:
|
111
|
+
@logger.debug("RedisFeatureStore: #{key} not found in '#{kind[:namespace]}'")
|
112
112
|
nil
|
113
113
|
elsif f[:deleted]
|
114
|
-
@logger.debug("RedisFeatureStore:
|
114
|
+
@logger.debug("RedisFeatureStore: #{key} was deleted in '#{kind[:namespace]}', returning nil")
|
115
115
|
nil
|
116
116
|
else
|
117
117
|
f
|
118
118
|
end
|
119
119
|
end
|
120
120
|
|
121
|
-
def all
|
121
|
+
def all(kind)
|
122
122
|
fs = {}
|
123
123
|
with_connection do |redis|
|
124
124
|
begin
|
125
|
-
hashfs = redis.hgetall(
|
125
|
+
hashfs = redis.hgetall(items_key(kind))
|
126
126
|
rescue => e
|
127
|
-
@logger.error("RedisFeatureStore: could not retrieve all
|
127
|
+
@logger.error("RedisFeatureStore: could not retrieve all '#{kind[:namespace]}' items from Redis with error: #{e}; returning none")
|
128
128
|
hashfs = {}
|
129
129
|
end
|
130
|
-
hashfs.each do |k,
|
131
|
-
f = JSON.parse(
|
130
|
+
hashfs.each do |k, jsonItem|
|
131
|
+
f = JSON.parse(jsonItem, symbolize_names: true)
|
132
132
|
if !f[:deleted]
|
133
133
|
fs[k.to_sym] = f
|
134
134
|
end
|
@@ -137,43 +137,47 @@ and prefix: #{@prefix}")
|
|
137
137
|
fs
|
138
138
|
end
|
139
139
|
|
140
|
-
def delete(key, version)
|
140
|
+
def delete(kind, key, version)
|
141
141
|
with_connection do |redis|
|
142
|
-
f = get_redis(redis, key)
|
142
|
+
f = get_redis(kind, redis, key)
|
143
143
|
if f.nil?
|
144
|
-
put_redis_and_cache(redis, key, { deleted: true, version: version })
|
144
|
+
put_redis_and_cache(kind, redis, key, { deleted: true, version: version })
|
145
145
|
else
|
146
146
|
if f[:version] < version
|
147
147
|
f1 = f.clone
|
148
148
|
f1[:deleted] = true
|
149
149
|
f1[:version] = version
|
150
|
-
put_redis_and_cache(redis, key, f1)
|
150
|
+
put_redis_and_cache(kind, redis, key, f1)
|
151
151
|
else
|
152
|
-
@logger.warn("RedisFeatureStore: attempted to delete
|
153
|
-
with a version that is the same or older: #{version}")
|
152
|
+
@logger.warn("RedisFeatureStore: attempted to delete #{key} version: #{f[:version]} \
|
153
|
+
in '#{kind[:namespace]}' with a version that is the same or older: #{version}")
|
154
154
|
end
|
155
155
|
end
|
156
156
|
end
|
157
157
|
end
|
158
158
|
|
159
|
-
def init(
|
159
|
+
def init(all_data)
|
160
160
|
@cache.clear
|
161
|
+
count = 0
|
161
162
|
with_connection do |redis|
|
162
|
-
|
163
|
-
multi
|
164
|
-
|
163
|
+
all_data.each do |kind, items|
|
164
|
+
redis.multi do |multi|
|
165
|
+
multi.del(items_key(kind))
|
166
|
+
count = count + items.count
|
167
|
+
items.each { |k, v| put_redis_and_cache(kind, multi, k, v) }
|
168
|
+
end
|
165
169
|
end
|
166
170
|
end
|
167
171
|
@inited.set(true)
|
168
|
-
@logger.info("RedisFeatureStore: initialized with #{
|
172
|
+
@logger.info("RedisFeatureStore: initialized with #{count} items")
|
169
173
|
end
|
170
174
|
|
171
|
-
def upsert(
|
175
|
+
def upsert(kind, item)
|
172
176
|
with_connection do |redis|
|
173
|
-
redis.watch(
|
174
|
-
old = get_redis(redis, key)
|
175
|
-
if old.nil? || (old[:version] <
|
176
|
-
put_redis_and_cache(redis, key,
|
177
|
+
redis.watch(items_key(kind)) do
|
178
|
+
old = get_redis(kind, redis, item[:key])
|
179
|
+
if old.nil? || (old[:version] < item[:version])
|
180
|
+
put_redis_and_cache(kind, redis, item[:key], item)
|
177
181
|
end
|
178
182
|
redis.unwatch
|
179
183
|
end
|
@@ -198,35 +202,43 @@ and prefix: #{@prefix}")
|
|
198
202
|
|
199
203
|
private
|
200
204
|
|
205
|
+
def items_key(kind)
|
206
|
+
@prefix + ":" + kind[:namespace]
|
207
|
+
end
|
208
|
+
|
209
|
+
def cache_key(kind, key)
|
210
|
+
kind[:namespace] + ":" + key.to_s
|
211
|
+
end
|
212
|
+
|
201
213
|
def with_connection
|
202
214
|
@pool.with { |redis| yield(redis) }
|
203
215
|
end
|
204
216
|
|
205
|
-
def get_redis(redis, key)
|
217
|
+
def get_redis(kind, redis, key)
|
206
218
|
begin
|
207
|
-
|
208
|
-
JSON.parse(
|
219
|
+
json_item = redis.hget(items_key(kind), key)
|
220
|
+
JSON.parse(json_item, symbolize_names: true) if json_item
|
209
221
|
rescue => e
|
210
|
-
@logger.error("RedisFeatureStore: could not retrieve
|
222
|
+
@logger.error("RedisFeatureStore: could not retrieve #{key} from Redis, error: #{e}")
|
211
223
|
nil
|
212
224
|
end
|
213
225
|
end
|
214
226
|
|
215
|
-
def put_cache(key, value)
|
216
|
-
@cache.store(key, value, expires: @expiration_seconds)
|
227
|
+
def put_cache(kind, key, value)
|
228
|
+
@cache.store(cache_key(kind, key), value, expires: @expiration_seconds)
|
217
229
|
end
|
218
230
|
|
219
|
-
def put_redis_and_cache(redis, key,
|
231
|
+
def put_redis_and_cache(kind, redis, key, item)
|
220
232
|
begin
|
221
|
-
redis.hset(
|
233
|
+
redis.hset(items_key(kind), key, item.to_json)
|
222
234
|
rescue => e
|
223
235
|
@logger.error("RedisFeatureStore: could not store #{key} in Redis, error: #{e}")
|
224
236
|
end
|
225
|
-
put_cache(key.to_sym,
|
237
|
+
put_cache(kind, key.to_sym, item)
|
226
238
|
end
|
227
239
|
|
228
240
|
def query_inited
|
229
|
-
with_connection { |redis| redis.exists(
|
241
|
+
with_connection { |redis| redis.exists(items_key(FEATURES)) }
|
230
242
|
end
|
231
243
|
end
|
232
244
|
end
|