launchdarkly-server-sdk 5.5.7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (87) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +134 -0
  3. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  4. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  5. data/.gitignore +15 -0
  6. data/.hound.yml +2 -0
  7. data/.rspec +2 -0
  8. data/.rubocop.yml +600 -0
  9. data/.simplecov +4 -0
  10. data/.yardopts +9 -0
  11. data/CHANGELOG.md +261 -0
  12. data/CODEOWNERS +1 -0
  13. data/CONTRIBUTING.md +37 -0
  14. data/Gemfile +3 -0
  15. data/Gemfile.lock +102 -0
  16. data/LICENSE.txt +13 -0
  17. data/README.md +56 -0
  18. data/Rakefile +5 -0
  19. data/azure-pipelines.yml +51 -0
  20. data/ext/mkrf_conf.rb +11 -0
  21. data/launchdarkly-server-sdk.gemspec +40 -0
  22. data/lib/ldclient-rb.rb +29 -0
  23. data/lib/ldclient-rb/cache_store.rb +45 -0
  24. data/lib/ldclient-rb/config.rb +411 -0
  25. data/lib/ldclient-rb/evaluation.rb +455 -0
  26. data/lib/ldclient-rb/event_summarizer.rb +55 -0
  27. data/lib/ldclient-rb/events.rb +468 -0
  28. data/lib/ldclient-rb/expiring_cache.rb +77 -0
  29. data/lib/ldclient-rb/file_data_source.rb +312 -0
  30. data/lib/ldclient-rb/flags_state.rb +76 -0
  31. data/lib/ldclient-rb/impl.rb +13 -0
  32. data/lib/ldclient-rb/impl/integrations/consul_impl.rb +158 -0
  33. data/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +228 -0
  34. data/lib/ldclient-rb/impl/integrations/redis_impl.rb +155 -0
  35. data/lib/ldclient-rb/impl/store_client_wrapper.rb +47 -0
  36. data/lib/ldclient-rb/impl/store_data_set_sorter.rb +55 -0
  37. data/lib/ldclient-rb/in_memory_store.rb +100 -0
  38. data/lib/ldclient-rb/integrations.rb +55 -0
  39. data/lib/ldclient-rb/integrations/consul.rb +38 -0
  40. data/lib/ldclient-rb/integrations/dynamodb.rb +47 -0
  41. data/lib/ldclient-rb/integrations/redis.rb +55 -0
  42. data/lib/ldclient-rb/integrations/util/store_wrapper.rb +230 -0
  43. data/lib/ldclient-rb/interfaces.rb +153 -0
  44. data/lib/ldclient-rb/ldclient.rb +424 -0
  45. data/lib/ldclient-rb/memoized_value.rb +32 -0
  46. data/lib/ldclient-rb/newrelic.rb +17 -0
  47. data/lib/ldclient-rb/non_blocking_thread_pool.rb +46 -0
  48. data/lib/ldclient-rb/polling.rb +78 -0
  49. data/lib/ldclient-rb/redis_store.rb +87 -0
  50. data/lib/ldclient-rb/requestor.rb +101 -0
  51. data/lib/ldclient-rb/simple_lru_cache.rb +25 -0
  52. data/lib/ldclient-rb/stream.rb +141 -0
  53. data/lib/ldclient-rb/user_filter.rb +51 -0
  54. data/lib/ldclient-rb/util.rb +50 -0
  55. data/lib/ldclient-rb/version.rb +3 -0
  56. data/scripts/gendocs.sh +11 -0
  57. data/scripts/release.sh +27 -0
  58. data/spec/config_spec.rb +63 -0
  59. data/spec/evaluation_spec.rb +739 -0
  60. data/spec/event_summarizer_spec.rb +63 -0
  61. data/spec/events_spec.rb +642 -0
  62. data/spec/expiring_cache_spec.rb +76 -0
  63. data/spec/feature_store_spec_base.rb +213 -0
  64. data/spec/file_data_source_spec.rb +255 -0
  65. data/spec/fixtures/feature.json +37 -0
  66. data/spec/fixtures/feature1.json +36 -0
  67. data/spec/fixtures/user.json +9 -0
  68. data/spec/flags_state_spec.rb +81 -0
  69. data/spec/http_util.rb +109 -0
  70. data/spec/in_memory_feature_store_spec.rb +12 -0
  71. data/spec/integrations/consul_feature_store_spec.rb +42 -0
  72. data/spec/integrations/dynamodb_feature_store_spec.rb +105 -0
  73. data/spec/integrations/store_wrapper_spec.rb +276 -0
  74. data/spec/ldclient_spec.rb +471 -0
  75. data/spec/newrelic_spec.rb +5 -0
  76. data/spec/polling_spec.rb +120 -0
  77. data/spec/redis_feature_store_spec.rb +95 -0
  78. data/spec/requestor_spec.rb +214 -0
  79. data/spec/segment_store_spec_base.rb +95 -0
  80. data/spec/simple_lru_cache_spec.rb +24 -0
  81. data/spec/spec_helper.rb +9 -0
  82. data/spec/store_spec.rb +10 -0
  83. data/spec/stream_spec.rb +60 -0
  84. data/spec/user_filter_spec.rb +91 -0
  85. data/spec/util_spec.rb +17 -0
  86. data/spec/version_spec.rb +7 -0
  87. metadata +375 -0
@@ -0,0 +1,13 @@
1
+
2
+ module LaunchDarkly
3
+ #
4
+ # Internal implementation classes. Everything in this module should be considered unsupported
5
+ # and subject to change.
6
+ #
7
+ # @since 5.5.0
8
+ # @private
9
+ #
10
+ module Impl
11
+ # code is in ldclient-rb/impl/
12
+ end
13
+ end
@@ -0,0 +1,158 @@
1
+ require "json"
2
+
3
+ module LaunchDarkly
4
+ module Impl
5
+ module Integrations
6
+ module Consul
7
+ #
8
+ # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper.
9
+ #
10
+ class ConsulFeatureStoreCore
11
+ begin
12
+ require "diplomat"
13
+ CONSUL_ENABLED = true
14
+ rescue ScriptError, StandardError
15
+ CONSUL_ENABLED = false
16
+ end
17
+
18
+ def initialize(opts)
19
+ if !CONSUL_ENABLED
20
+ raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem")
21
+ end
22
+
23
+ @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/'
24
+ @logger = opts[:logger] || Config.default_logger
25
+ Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil?
26
+ Diplomat.configuration.url = opts[:url] if !opts[:url].nil?
27
+ @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}")
28
+ end
29
+
30
+ def init_internal(all_data)
31
+ # Start by reading the existing keys; we will later delete any of these that weren't in all_data.
32
+ unused_old_keys = Set.new
33
+ keys = Diplomat::Kv.get(@prefix, { keys: true, recurse: true }, :return)
34
+ unused_old_keys.merge(keys) if keys != ""
35
+
36
+ ops = []
37
+ num_items = 0
38
+
39
+ # Insert or update every provided item
40
+ all_data.each do |kind, items|
41
+ items.values.each do |item|
42
+ value = item.to_json
43
+ key = item_key(kind, item[:key])
44
+ ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } })
45
+ unused_old_keys.delete(key)
46
+ num_items = num_items + 1
47
+ end
48
+ end
49
+
50
+ # Now delete any previously existing items whose keys were not in the current data
51
+ unused_old_keys.each do |key|
52
+ ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } })
53
+ end
54
+
55
+ # Now set the special key that we check in initialized_internal?
56
+ ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => inited_key, 'Value' => '' } })
57
+
58
+ ConsulUtil.batch_operations(ops)
59
+
60
+ @logger.info { "Initialized database with #{num_items} items" }
61
+ end
62
+
63
+ def get_internal(kind, key)
64
+ value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found"
65
+ (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true)
66
+ end
67
+
68
+ def get_all_internal(kind)
69
+ items_out = {}
70
+ results = Diplomat::Kv.get(kind_key(kind), { recurse: true }, :return)
71
+ (results == "" ? [] : results).each do |result|
72
+ value = result[:value]
73
+ if !value.nil?
74
+ item = JSON.parse(value, symbolize_names: true)
75
+ items_out[item[:key].to_sym] = item
76
+ end
77
+ end
78
+ items_out
79
+ end
80
+
81
+ def upsert_internal(kind, new_item)
82
+ key = item_key(kind, new_item[:key])
83
+ json = new_item.to_json
84
+
85
+ # We will potentially keep retrying indefinitely until someone's write succeeds
86
+ while true
87
+ old_value = Diplomat::Kv.get(key, { decode_values: true }, :return)
88
+ if old_value.nil? || old_value == ""
89
+ mod_index = 0
90
+ else
91
+ old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true)
92
+ # Check whether the item is stale. If so, don't do the update (and return the existing item to
93
+ # FeatureStoreWrapper so it can be cached)
94
+ if old_item[:version] >= new_item[:version]
95
+ return old_item
96
+ end
97
+ mod_index = old_value[0]["ModifyIndex"]
98
+ end
99
+
100
+ # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if
101
+ # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero,
102
+ # it means the key did not previously exist and the write will only succeed if it still doesn't exist.
103
+ success = Diplomat::Kv.put(key, json, cas: mod_index)
104
+ return new_item if success
105
+
106
+ # If we failed, retry the whole shebang
107
+ @logger.debug { "Concurrent modification detected, retrying" }
108
+ end
109
+ end
110
+
111
+ def initialized_internal?
112
+ # Unfortunately we need to use exceptions here, instead of the :return parameter, because with
113
+ # :return there's no way to distinguish between a missing value and an empty string.
114
+ begin
115
+ Diplomat::Kv.get(inited_key, {})
116
+ true
117
+ rescue Diplomat::KeyNotFound
118
+ false
119
+ end
120
+ end
121
+
122
+ def stop
123
+ # There's no Consul client instance to dispose of
124
+ end
125
+
126
+ private
127
+
128
+ def item_key(kind, key)
129
+ kind_key(kind) + key.to_s
130
+ end
131
+
132
+ def kind_key(kind)
133
+ @prefix + kind[:namespace] + '/'
134
+ end
135
+
136
+ def inited_key
137
+ @prefix + '$inited'
138
+ end
139
+ end
140
+
141
+ class ConsulUtil
142
+ #
143
+ # Submits as many transactions as necessary to submit all of the given operations.
144
+ # The ops array is consumed.
145
+ #
146
+ def self.batch_operations(ops)
147
+ batch_size = 64 # Consul can only do this many at a time
148
+ while true
149
+ chunk = ops.shift(batch_size)
150
+ break if chunk.empty?
151
+ Diplomat::Kv.txn(chunk)
152
+ end
153
+ end
154
+ end
155
+ end
156
+ end
157
+ end
158
+ end
@@ -0,0 +1,228 @@
1
+ require "json"
2
+
3
+ module LaunchDarkly
4
+ module Impl
5
+ module Integrations
6
+ module DynamoDB
7
+ #
8
+ # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper.
9
+ #
10
+ class DynamoDBFeatureStoreCore
11
+ begin
12
+ require "aws-sdk-dynamodb"
13
+ AWS_SDK_ENABLED = true
14
+ rescue ScriptError, StandardError
15
+ begin
16
+ require "aws-sdk"
17
+ AWS_SDK_ENABLED = true
18
+ rescue ScriptError, StandardError
19
+ AWS_SDK_ENABLED = false
20
+ end
21
+ end
22
+
23
+ PARTITION_KEY = "namespace"
24
+ SORT_KEY = "key"
25
+
26
+ VERSION_ATTRIBUTE = "version"
27
+ ITEM_JSON_ATTRIBUTE = "item"
28
+
29
+ def initialize(table_name, opts)
30
+ if !AWS_SDK_ENABLED
31
+ raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem")
32
+ end
33
+
34
+ @table_name = table_name
35
+ @prefix = opts[:prefix]
36
+ @logger = opts[:logger] || Config.default_logger
37
+
38
+ if !opts[:existing_client].nil?
39
+ @client = opts[:existing_client]
40
+ else
41
+ @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {})
42
+ end
43
+
44
+ @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"")
45
+ end
46
+
47
+ def init_internal(all_data)
48
+ # Start by reading the existing keys; we will later delete any of these that weren't in all_data.
49
+ unused_old_keys = read_existing_keys(all_data.keys)
50
+
51
+ requests = []
52
+ num_items = 0
53
+
54
+ # Insert or update every provided item
55
+ all_data.each do |kind, items|
56
+ items.values.each do |item|
57
+ requests.push({ put_request: { item: marshal_item(kind, item) } })
58
+ unused_old_keys.delete([ namespace_for_kind(kind), item[:key] ])
59
+ num_items = num_items + 1
60
+ end
61
+ end
62
+
63
+ # Now delete any previously existing items whose keys were not in the current data
64
+ unused_old_keys.each do |tuple|
65
+ del_item = make_keys_hash(tuple[0], tuple[1])
66
+ requests.push({ delete_request: { key: del_item } })
67
+ end
68
+
69
+ # Now set the special key that we check in initialized_internal?
70
+ inited_item = make_keys_hash(inited_key, inited_key)
71
+ requests.push({ put_request: { item: inited_item } })
72
+
73
+ DynamoDBUtil.batch_write_requests(@client, @table_name, requests)
74
+
75
+ @logger.info { "Initialized table #{@table_name} with #{num_items} items" }
76
+ end
77
+
78
+ def get_internal(kind, key)
79
+ resp = get_item_by_keys(namespace_for_kind(kind), key)
80
+ unmarshal_item(resp.item)
81
+ end
82
+
83
+ def get_all_internal(kind)
84
+ items_out = {}
85
+ req = make_query_for_kind(kind)
86
+ while true
87
+ resp = @client.query(req)
88
+ resp.items.each do |item|
89
+ item_out = unmarshal_item(item)
90
+ items_out[item_out[:key].to_sym] = item_out
91
+ end
92
+ break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0
93
+ req.exclusive_start_key = resp.last_evaluated_key
94
+ end
95
+ items_out
96
+ end
97
+
98
+ def upsert_internal(kind, new_item)
99
+ encoded_item = marshal_item(kind, new_item)
100
+ begin
101
+ @client.put_item({
102
+ table_name: @table_name,
103
+ item: encoded_item,
104
+ condition_expression: "attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version",
105
+ expression_attribute_names: {
106
+ "#namespace" => PARTITION_KEY,
107
+ "#key" => SORT_KEY,
108
+ "#version" => VERSION_ATTRIBUTE
109
+ },
110
+ expression_attribute_values: {
111
+ ":version" => new_item[:version]
112
+ }
113
+ })
114
+ new_item
115
+ rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException
116
+ # The item was not updated because there's a newer item in the database.
117
+ # We must now read the item that's in the database and return it, so CachingStoreWrapper can cache it.
118
+ get_internal(kind, new_item[:key])
119
+ end
120
+ end
121
+
122
+ def initialized_internal?
123
+ resp = get_item_by_keys(inited_key, inited_key)
124
+ !resp.item.nil? && resp.item.length > 0
125
+ end
126
+
127
+ def stop
128
+ # AWS client doesn't seem to have a close method
129
+ end
130
+
131
+ private
132
+
133
+ def prefixed_namespace(base_str)
134
+ (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}"
135
+ end
136
+
137
+ def namespace_for_kind(kind)
138
+ prefixed_namespace(kind[:namespace])
139
+ end
140
+
141
+ def inited_key
142
+ prefixed_namespace("$inited")
143
+ end
144
+
145
+ def make_keys_hash(namespace, key)
146
+ {
147
+ PARTITION_KEY => namespace,
148
+ SORT_KEY => key
149
+ }
150
+ end
151
+
152
+ def make_query_for_kind(kind)
153
+ {
154
+ table_name: @table_name,
155
+ consistent_read: true,
156
+ key_conditions: {
157
+ PARTITION_KEY => {
158
+ comparison_operator: "EQ",
159
+ attribute_value_list: [ namespace_for_kind(kind) ]
160
+ }
161
+ }
162
+ }
163
+ end
164
+
165
+ def get_item_by_keys(namespace, key)
166
+ @client.get_item({
167
+ table_name: @table_name,
168
+ key: make_keys_hash(namespace, key)
169
+ })
170
+ end
171
+
172
+ def read_existing_keys(kinds)
173
+ keys = Set.new
174
+ kinds.each do |kind|
175
+ req = make_query_for_kind(kind).merge({
176
+ projection_expression: "#namespace, #key",
177
+ expression_attribute_names: {
178
+ "#namespace" => PARTITION_KEY,
179
+ "#key" => SORT_KEY
180
+ }
181
+ })
182
+ while true
183
+ resp = @client.query(req)
184
+ resp.items.each do |item|
185
+ namespace = item[PARTITION_KEY]
186
+ key = item[SORT_KEY]
187
+ keys.add([ namespace, key ])
188
+ end
189
+ break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0
190
+ req.exclusive_start_key = resp.last_evaluated_key
191
+ end
192
+ end
193
+ keys
194
+ end
195
+
196
+ def marshal_item(kind, item)
197
+ make_keys_hash(namespace_for_kind(kind), item[:key]).merge({
198
+ VERSION_ATTRIBUTE => item[:version],
199
+ ITEM_JSON_ATTRIBUTE => item.to_json
200
+ })
201
+ end
202
+
203
+ def unmarshal_item(item)
204
+ return nil if item.nil? || item.length == 0
205
+ json_attr = item[ITEM_JSON_ATTRIBUTE]
206
+ raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil?
207
+ JSON.parse(json_attr, symbolize_names: true)
208
+ end
209
+ end
210
+
211
+ class DynamoDBUtil
212
+ #
213
+ # Calls client.batch_write_item as many times as necessary to submit all of the given requests.
214
+ # The requests array is consumed.
215
+ #
216
+ def self.batch_write_requests(client, table, requests)
217
+ batch_size = 25
218
+ while true
219
+ chunk = requests.shift(batch_size)
220
+ break if chunk.empty?
221
+ client.batch_write_item({ request_items: { table => chunk } })
222
+ end
223
+ end
224
+ end
225
+ end
226
+ end
227
+ end
228
+ end
@@ -0,0 +1,155 @@
1
+ require "concurrent/atomics"
2
+ require "json"
3
+
4
+ module LaunchDarkly
5
+ module Impl
6
+ module Integrations
7
+ module Redis
8
+ #
9
+ # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper.
10
+ #
11
+ class RedisFeatureStoreCore
12
+ begin
13
+ require "redis"
14
+ require "connection_pool"
15
+ REDIS_ENABLED = true
16
+ rescue ScriptError, StandardError
17
+ REDIS_ENABLED = false
18
+ end
19
+
20
+ def initialize(opts)
21
+ if !REDIS_ENABLED
22
+ raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool")
23
+ end
24
+
25
+ @redis_opts = opts[:redis_opts] || Hash.new
26
+ if opts[:redis_url]
27
+ @redis_opts[:url] = opts[:redis_url]
28
+ end
29
+ if !@redis_opts.include?(:url)
30
+ @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url
31
+ end
32
+ max_connections = opts[:max_connections] || 16
33
+ @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do
34
+ ::Redis.new(@redis_opts)
35
+ end
36
+ @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix
37
+ @logger = opts[:logger] || Config.default_logger
38
+ @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented
39
+
40
+ @stopped = Concurrent::AtomicBoolean.new(false)
41
+
42
+ with_connection do |redis|
43
+ @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \
44
+ and prefix: #{@prefix}")
45
+ end
46
+ end
47
+
48
+ def init_internal(all_data)
49
+ count = 0
50
+ with_connection do |redis|
51
+ redis.multi do |multi|
52
+ all_data.each do |kind, items|
53
+ multi.del(items_key(kind))
54
+ count = count + items.count
55
+ items.each do |key, item|
56
+ multi.hset(items_key(kind), key, item.to_json)
57
+ end
58
+ end
59
+ multi.set(inited_key, inited_key)
60
+ end
61
+ end
62
+ @logger.info { "RedisFeatureStore: initialized with #{count} items" }
63
+ end
64
+
65
+ def get_internal(kind, key)
66
+ with_connection do |redis|
67
+ get_redis(redis, kind, key)
68
+ end
69
+ end
70
+
71
+ def get_all_internal(kind)
72
+ fs = {}
73
+ with_connection do |redis|
74
+ hashfs = redis.hgetall(items_key(kind))
75
+ hashfs.each do |k, json_item|
76
+ f = JSON.parse(json_item, symbolize_names: true)
77
+ fs[k.to_sym] = f
78
+ end
79
+ end
80
+ fs
81
+ end
82
+
83
+ def upsert_internal(kind, new_item)
84
+ base_key = items_key(kind)
85
+ key = new_item[:key]
86
+ try_again = true
87
+ final_item = new_item
88
+ while try_again
89
+ try_again = false
90
+ with_connection do |redis|
91
+ redis.watch(base_key) do
92
+ old_item = get_redis(redis, kind, key)
93
+ before_update_transaction(base_key, key)
94
+ if old_item.nil? || old_item[:version] < new_item[:version]
95
+ result = redis.multi do |multi|
96
+ multi.hset(base_key, key, new_item.to_json)
97
+ end
98
+ if result.nil?
99
+ @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" }
100
+ try_again = true
101
+ end
102
+ else
103
+ final_item = old_item
104
+ action = new_item[:deleted] ? "delete" : "update"
105
+ @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \
106
+ in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" }
107
+ end
108
+ redis.unwatch
109
+ end
110
+ end
111
+ end
112
+ final_item
113
+ end
114
+
115
+ def initialized_internal?
116
+ with_connection { |redis| redis.exists(inited_key) }
117
+ end
118
+
119
+ def stop
120
+ if @stopped.make_true
121
+ @pool.shutdown { |redis| redis.close }
122
+ end
123
+ end
124
+
125
+ private
126
+
127
+ def before_update_transaction(base_key, key)
128
+ @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil?
129
+ end
130
+
131
+ def items_key(kind)
132
+ @prefix + ":" + kind[:namespace]
133
+ end
134
+
135
+ def cache_key(kind, key)
136
+ kind[:namespace] + ":" + key.to_s
137
+ end
138
+
139
+ def inited_key
140
+ @prefix + ":$inited"
141
+ end
142
+
143
+ def with_connection
144
+ @pool.with { |redis| yield(redis) }
145
+ end
146
+
147
+ def get_redis(redis, kind, key)
148
+ json_item = redis.hget(items_key(kind), key)
149
+ json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true)
150
+ end
151
+ end
152
+ end
153
+ end
154
+ end
155
+ end