network_resiliency 0.2.1 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f19f34885c2da17fa7d1d7bb5777a4d8712918281708d0e04f743260859cbd90
4
- data.tar.gz: 1ee75fdedacbb172ae51ba3c425d8808ba3ecfd4b6414cd8155195603dc49d94
3
+ metadata.gz: a9a1103993c635cbda0e6a3ba3ef523c8179680353fa67b93842f0bdfa44b197
4
+ data.tar.gz: aeb11a089950492bc6099933c137ccf79c27447a18ce6335e2a46c9982201591
5
5
  SHA512:
6
- metadata.gz: 7e2f9eb75cddf2fee82f32a561df20180bced19639fb0326010c67e0cca4076d01114a5368648a8ce781220d0cb90db4a6c6256859d27669c7c94204ab69f628
7
- data.tar.gz: 614a9aa8a59ce1918f9b800c098a1e0c41254df096ed698c64c3ae24b2c9275a214aa1f134ae6c6f9b5308a29fdcd87f0dc92dfe7af0fafc79cd466d2823265e
6
+ metadata.gz: 7ea2cedcf4e6044299111e69c211a92a3d7df5305e66b4d6be83d32b949425daee581af33142f73f4d618ec178bd10e19ea0599175420838f57b0b61f157a8f5
7
+ data.tar.gz: 772d5749b23a32e6be7542c10d18794f1905fd64dde7da5b702bba4b4fb6ceb62fa3b971ce120c550db2fbc89ccec11670e3029ee0649ec7c21266597b0cb637
data/CHANGELOG.md CHANGED
@@ -1,3 +1,16 @@
1
+ ### v0.3.1 (2023-11-02)
2
+ - sync thread safety
3
+ - order of magnitude stats
4
+ - stats engine
5
+ - improve stats aggregation
6
+ - lua v1
7
+ - thread safety
8
+ - postgres adapter
9
+
10
+ ### v0.3.0 (2023-10-04)
11
+ - mysql adapter
12
+ - faraday version constraint
13
+
1
14
  ### v0.2.1 (2023-10-02)
2
15
  - prevent recursion
3
16
  - test coverage
data/Gemfile.lock CHANGED
@@ -1,18 +1,21 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- network_resiliency (0.2.1)
4
+ network_resiliency (0.3.1)
5
5
 
6
6
  GEM
7
7
  remote: https://rubygems.org/
8
8
  specs:
9
9
  byebug (11.1.3)
10
- ddtrace (1.14.0)
11
- debase-ruby_core_source (= 3.2.1)
12
- libdatadog (~> 3.0.0.1.0)
13
- libddwaf (~> 1.9.0.0.0)
10
+ datadog-ci (0.2.0)
14
11
  msgpack
15
- debase-ruby_core_source (3.2.1)
12
+ ddtrace (1.15.0)
13
+ datadog-ci (~> 0.2.0)
14
+ debase-ruby_core_source (= 3.2.2)
15
+ libdatadog (~> 5.0.0.1.0)
16
+ libddwaf (~> 1.14.0.0.0)
17
+ msgpack
18
+ debase-ruby_core_source (3.2.2)
16
19
  diff-lcs (1.5.0)
17
20
  docile (1.4.0)
18
21
  dogstatsd-ruby (4.8.3)
@@ -39,12 +42,14 @@ GEM
39
42
  faraday-patron (1.0.0)
40
43
  faraday-rack (1.0.0)
41
44
  faraday-retry (1.0.3)
42
- ffi (1.15.5)
43
- libdatadog (3.0.0.1.0)
44
- libddwaf (1.9.0.0.1)
45
+ ffi (1.16.3)
46
+ libdatadog (5.0.0.1.0)
47
+ libddwaf (1.14.0.0.0)
45
48
  ffi (~> 1.0)
46
49
  msgpack (1.7.2)
47
50
  multipart-post (2.3.0)
51
+ mysql2 (0.5.5)
52
+ pg (1.5.4)
48
53
  rack (3.0.8)
49
54
  rack-test (2.1.0)
50
55
  rack (>= 1.3)
@@ -79,7 +84,9 @@ DEPENDENCIES
79
84
  dogstatsd-ruby (<= 4.8.3)
80
85
  faraday (~> 1)
81
86
  faraday-rack
87
+ mysql2 (>= 0.5)
82
88
  network_resiliency!
89
+ pg (~> 1.1)
83
90
  rack
84
91
  rack-test
85
92
  redis (~> 4)
@@ -87,4 +94,4 @@ DEPENDENCIES
87
94
  simplecov
88
95
 
89
96
  BUNDLED WITH
90
- 2.4.19
97
+ 2.4.20
data/README.md CHANGED
@@ -3,13 +3,22 @@ NetworkResiliency
3
3
  ![Gem](https://img.shields.io/gem/dt/network_resiliency?style=plastic)
4
4
  [![codecov](https://codecov.io/gh/dpep/network_resiliency_rb/branch/main/graph/badge.svg)](https://codecov.io/gh/dpep/network_resiliency_rb)
5
5
 
6
- Making networks more resilient to errors.
7
-
8
- Resiliency: the ability to recover from adversity or adjust to change.
6
+ Making network requests more resilient to error.
7
+ - less errors, by retrying
8
+ - less time, by setting granular timeouts
9
9
 
10
10
 
11
11
  ```ruby
12
12
  require "network_resiliency"
13
+
14
+ NetworkResiliency.configure do |conf|
15
+ conf.statsd = Datadog::Statsd.new
16
+
17
+ # patch Redis instances
18
+ conf.patch :redis
19
+ end
20
+
21
+ Redis.new.connect
13
22
  ```
14
23
 
15
24
 
@@ -1,3 +1,4 @@
1
+ gem "faraday", "~> 1.10"
1
2
  require "faraday"
2
3
 
3
4
  module NetworkResiliency
@@ -0,0 +1,52 @@
1
+ gem "mysql2", ">= 0.5"
2
+ require "mysql2"
3
+
4
+ module NetworkResiliency
5
+ module Adapter
6
+ module Mysql
7
+ extend self
8
+
9
+ def patch
10
+ return if patched?
11
+
12
+ Mysql2::Client.prepend(Instrumentation)
13
+ end
14
+
15
+ def patched?
16
+ Mysql2::Client.ancestors.include?(Instrumentation)
17
+ end
18
+
19
+ module Instrumentation
20
+ def connect(_, _, host, *args)
21
+ # timeout = query_options[:connect_timeout]
22
+
23
+ return super unless NetworkResiliency.enabled?(:mysql)
24
+
25
+ begin
26
+ ts = -NetworkResiliency.timestamp
27
+
28
+ super
29
+ rescue Mysql2::Error::TimeoutError => e
30
+ # capture error
31
+ raise
32
+ ensure
33
+ ts += NetworkResiliency.timestamp
34
+
35
+ NetworkResiliency.record(
36
+ adapter: "mysql",
37
+ action: "connect",
38
+ destination: host,
39
+ error: e&.class,
40
+ duration: ts,
41
+ )
42
+ end
43
+ end
44
+
45
+ # def query(sql, options = {})
46
+ # puts "query"
47
+ # super
48
+ # end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,47 @@
1
+ gem "pg", "~> 1.1"
2
+ require "pg"
3
+
4
+ module NetworkResiliency
5
+ module Adapter
6
+ module Postgres
7
+ extend self
8
+
9
+ def patch
10
+ return if patched?
11
+
12
+ PG::Connection.singleton_class.prepend(Instrumentation)
13
+ end
14
+
15
+ def patched?
16
+ PG::Connection.singleton_class.ancestors.include?(Instrumentation)
17
+ end
18
+
19
+ module Instrumentation
20
+ def connect_start(opts)
21
+ host = opts[:host].split(",")[0] if opts[:host]
22
+
23
+ return super unless NetworkResiliency.enabled?(:postgres)
24
+
25
+ begin
26
+ ts = -NetworkResiliency.timestamp
27
+
28
+ super
29
+ rescue PG::Error => e
30
+ # capture error
31
+ raise
32
+ ensure
33
+ ts += NetworkResiliency.timestamp
34
+
35
+ NetworkResiliency.record(
36
+ adapter: "postgres",
37
+ action: "connect",
38
+ destination: host,
39
+ error: e&.class,
40
+ duration: ts,
41
+ )
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,9 @@
1
+ module NetworkResiliency
2
+ module Refinements
3
+ refine Numeric do
4
+ def order_of_magnitude
5
+ self == 0 ? 0 : 10 ** Math.log10(self).ceil
6
+ end
7
+ end
8
+ end
9
+ end
@@ -2,20 +2,33 @@ module NetworkResiliency
2
2
  class Stats
3
3
  attr_reader :n, :avg
4
4
 
5
- def self.from(n, avg, sq_dist)
6
- new.tap do |instance|
7
- instance.instance_eval do
8
- @n = n
9
- @avg = avg
10
- @sq_dist = sq_dist
5
+ class << self
6
+ def from(n:, avg:, sq_dist:)
7
+ new.tap do |instance|
8
+ instance.instance_eval do
9
+ @n = n.to_i
10
+ @avg = avg.to_f
11
+ @sq_dist = sq_dist.to_f
12
+ end
11
13
  end
12
14
  end
15
+
16
+ private
17
+
18
+ def synchronize(fn_name)
19
+ make_private = private_method_defined?(fn_name)
20
+ fn = instance_method(fn_name)
21
+
22
+ define_method(fn_name) do |*args|
23
+ @lock.synchronize { fn.bind(self).call(*args) }
24
+ end
25
+ private fn_name if make_private
26
+ end
13
27
  end
14
28
 
15
29
  def initialize(values = [])
16
- @n = 0
17
- @avg = 0.0
18
- @sq_dist = 0.0 # sum of squared distance from mean
30
+ @lock = Thread::Mutex.new
31
+ reset
19
32
 
20
33
  values.each {|x| update(x) }
21
34
  end
@@ -34,7 +47,7 @@ module NetworkResiliency
34
47
  end
35
48
 
36
49
  def variance(sample: false)
37
- @sq_dist / (sample ? (@n - 1) : @n)
50
+ @n == 0 ? 0 : @sq_dist / (sample ? (@n - 1) : @n)
38
51
  end
39
52
 
40
53
  def stdev
@@ -46,24 +59,161 @@ module NetworkResiliency
46
59
  end
47
60
  alias_method :+, :merge
48
61
 
49
- def merge!(other)
62
+ synchronize def merge!(other)
50
63
  raise ArgumentError unless other.is_a?(self.class)
51
64
 
52
- prev_n = n
53
- @n += other.n
65
+ if @n == 0
66
+ @n = other.n
67
+ @avg = other.avg
68
+ @sq_dist = other.sq_dist
69
+ elsif other.n > 0
70
+ prev_n = @n
71
+ @n += other.n
54
72
 
55
- delta = other.avg - avg
56
- @avg += delta * other.n / n
73
+ delta = other.avg - avg
74
+ @avg += delta * other.n / @n
57
75
 
58
- @sq_dist += other.instance_variable_get(:@sq_dist)
59
- @sq_dist += (delta ** 2) * prev_n * other.n / n
76
+ @sq_dist += other.sq_dist
77
+ @sq_dist += (delta ** 2) * prev_n * other.n / @n
78
+ end
60
79
 
61
80
  self
62
81
  end
63
82
 
83
+ def ==(other)
84
+ return false unless other.is_a?(self.class)
85
+
86
+ @n == other.n &&
87
+ @avg == other.avg &&
88
+ @sq_dist == other.sq_dist
89
+ end
90
+
91
+ synchronize def reset
92
+ @n = 0
93
+ @avg = 0.0
94
+ @sq_dist = 0.0 # sum of squared distance from mean
95
+ end
96
+
97
+ MIN_SAMPLE_SIZE = 1000
98
+ MAX_WINDOW_LENGTH = 1000
99
+ STATS_TTL = 24 * 60 * 60 # 1 day
100
+ CACHE_TTL = 60 # seconds
101
+
102
+ LUA_SCRIPT = <<~LUA
103
+ local results = {}
104
+
105
+ for i = 0, #KEYS / 2 - 1 do
106
+ local state_key = KEYS[i * 2 + 1]
107
+ local cache_key = KEYS[i * 2 + 2]
108
+
109
+ local n = tonumber(ARGV[i * 3 + 1])
110
+ local avg = ARGV[i * 3 + 2]
111
+ local sq_dist = math.floor(ARGV[i * 3 + 3])
112
+
113
+ if n > 0 then
114
+ -- save new data
115
+ local window_len = redis.call(
116
+ 'LPUSH',
117
+ state_key,
118
+ string.format('%d|%f|%d', n, avg, sq_dist)
119
+ )
120
+ redis.call('EXPIRE', state_key, #{STATS_TTL})
121
+
122
+ if window_len > #{MAX_WINDOW_LENGTH} then
123
+ -- trim stats to window length
124
+ redis.call('LTRIM', state_key, 0, #{MAX_WINDOW_LENGTH - 1})
125
+ end
126
+ end
127
+
128
+ -- retrieve aggregated stats
129
+
130
+ local cached_stats = redis.call('GET', cache_key)
131
+ if cached_stats then
132
+ -- use cached stats
133
+ n, avg, sq_dist = string.match(cached_stats, "(%d+)|([%d.]+)|(%d+)")
134
+ n = tonumber(n)
135
+ else
136
+ -- calculate aggregated stats
137
+ n = 0
138
+ avg = 0.0
139
+ sq_dist = 0
140
+
141
+ local stats = redis.call('LRANGE', state_key, 0, -1)
142
+ for _, entry in ipairs(stats) do
143
+ local other_n, other_avg, other_sq_dist = string.match(entry, "(%d+)|([%d.]+)|(%d+)")
144
+ other_n = tonumber(other_n)
145
+ other_avg = tonumber(other_avg) + 0.0
146
+ other_sq_dist = tonumber(other_sq_dist)
147
+
148
+ local prev_n = n
149
+ n = n + other_n
150
+
151
+ local delta = other_avg - avg
152
+ avg = avg + delta * other_n / n
153
+
154
+ sq_dist = sq_dist + other_sq_dist
155
+ sq_dist = sq_dist + (delta ^ 2) * prev_n * other_n / n
156
+ end
157
+ end
158
+
159
+ -- update cache
160
+ if n >= #{MIN_SAMPLE_SIZE} then
161
+ cached_stats = string.format('%d|%f|%d', n, avg, sq_dist)
162
+ redis.call('SET', cache_key, cached_stats, 'EX', #{CACHE_TTL})
163
+ end
164
+
165
+ -- accumulate results
166
+ table.insert(results, n)
167
+ table.insert(results, tostring(avg))
168
+ table.insert(results, sq_dist)
169
+ end
170
+
171
+ return results
172
+ LUA
173
+
174
+ def sync(redis, key)
175
+ self.class.sync(redis, key => self)[key]
176
+ end
177
+
178
+ def self.sync(redis, **data)
179
+ keys = []
180
+ args = []
181
+
182
+ data.each do |key, stats|
183
+ keys += [
184
+ "network_resiliency:stats:#{key}",
185
+ "network_resiliency:stats:cache:#{key}",
186
+ ]
187
+
188
+ args += [ stats.n, stats.avg, stats.send(:sq_dist) ]
189
+ end
190
+
191
+ res = redis.eval(LUA_SCRIPT, keys, args)
192
+ data.keys.zip(res.each_slice(3)).map do |key, stats|
193
+ n, avg, sq_dist = *stats
194
+
195
+ [ key, Stats.from(n: n, avg: avg, sq_dist: sq_dist) ]
196
+ end.to_h
197
+ end
198
+
199
+ def self.fetch(redis, keys)
200
+ data = Array(keys).map { |k| [ k, new ] }.to_h
201
+ res = sync(redis, **data)
202
+
203
+ keys.is_a?(Array) ? res : res[keys]
204
+ end
205
+
206
+ def to_s
207
+ "#<#{self.class.name}:#{object_id} n=#{n} avg=#{avg} sq_dist=#{sq_dist}>"
208
+ end
209
+
210
+ protected
211
+
212
+ attr_reader :sq_dist
213
+
64
214
  private
65
215
 
66
- def update(value)
216
+ synchronize def update(value)
67
217
  raise ArgumentError unless value.is_a?(Numeric)
68
218
 
69
219
  @n += 1
@@ -72,19 +222,6 @@ module NetworkResiliency
72
222
  @avg += (value - @avg) / @n
73
223
 
74
224
  @sq_dist += (value - prev_avg) * (value - @avg)
75
- # @sq_dist += (sq_dist - @sq_dist) / @n
76
-
77
- # for x, w in data_weight_pairs:
78
- # w_sum = w_sum + w
79
- # mean_old = mean
80
- # mean = mean_old + (w / w_sum) * (x - mean_old)
81
- # S = S + w * (x - mean_old) * (x - mean)
82
-
83
- # count += 1
84
- # delta = newValue - mean
85
- # mean += delta / count
86
- # delta2 = newValue - mean
87
- # M2 += delta * delta2
88
225
  end
89
226
  end
90
227
  end
@@ -0,0 +1,112 @@
1
+ module NetworkResiliency
2
+ module StatsEngine
3
+ extend self
4
+
5
+ LOCK = Thread::Mutex.new
6
+ STATS = {}
7
+ SYNC_LIMIT = 100
8
+
9
+ def add(key, value)
10
+ local, _ = synchronize do
11
+ STATS[key] ||= [ Stats.new, Stats.new ]
12
+ end
13
+
14
+ local << value
15
+ end
16
+
17
+ def get(key)
18
+ local, remote = synchronize do
19
+ STATS[key] ||= [ Stats.new, Stats.new ]
20
+ end
21
+
22
+ local + remote
23
+ end
24
+
25
+ def reset
26
+ synchronize { STATS.clear }
27
+ end
28
+
29
+ def sync(redis)
30
+ dirty_keys = {}
31
+
32
+ # select data to be synced
33
+ data = synchronize do
34
+ # ensure sync is not run concurrently
35
+ return [] if @syncing
36
+ @syncing = Thread.current
37
+
38
+ dirty_keys = STATS.map do |key, (local, remote)|
39
+ # skip if no new local stats and remote already synced
40
+ next if local.n == 0 && remote.n > 0
41
+
42
+ [ key, local.n ]
43
+ end.compact.to_h
44
+
45
+ # select keys to sync, prioritizing most used
46
+ keys = dirty_keys.sort_by do |key, weight|
47
+ -weight
48
+ end.take(SYNC_LIMIT).map(&:first)
49
+
50
+ # update stats for keys being synced
51
+ keys.map do |key|
52
+ local, remote = STATS[key]
53
+
54
+ remote << local # update remote stats until sync completes
55
+ STATS[key][0] = Stats.new # reset local stats
56
+
57
+ [ key, local ]
58
+ end.to_h
59
+ end
60
+
61
+ NetworkResiliency.statsd&.distribution(
62
+ "network_resiliency.sync.keys",
63
+ data.size,
64
+ tags: {
65
+ empty: data.empty?,
66
+ truncated: data.size < dirty_keys.size,
67
+ }.select { |_, v| v },
68
+ )
69
+
70
+ NetworkResiliency.statsd&.distribution(
71
+ "network_resiliency.sync.keys.dirty",
72
+ dirty_keys.select { |_, n| n > 0 }.count,
73
+ )
74
+
75
+ return [] if data.empty?
76
+
77
+ # sync data to redis
78
+ remote_stats = if NetworkResiliency.statsd
79
+ NetworkResiliency.statsd&.time("network_resiliency.sync") do
80
+ Stats.sync(redis, **data)
81
+ end
82
+ else
83
+ Stats.sync(redis, **data)
84
+ end
85
+
86
+ # integrate new remote stats
87
+ synchronize do
88
+ remote_stats.each do |key, stats|
89
+ local, remote = STATS[key]
90
+
91
+ remote.reset
92
+ remote << stats
93
+ end
94
+ end
95
+
96
+ remote_stats.keys
97
+ ensure
98
+ # release sync lock
99
+ @syncing = nil if @syncing == Thread.current
100
+ end
101
+
102
+ def syncing?
103
+ !!@syncing
104
+ end
105
+
106
+ private
107
+
108
+ def synchronize
109
+ LOCK.synchronize { yield }
110
+ end
111
+ end
112
+ end
@@ -1,3 +1,3 @@
1
1
  module NetworkResiliency
2
- VERSION = "0.2.1"
2
+ VERSION = "0.3.1"
3
3
  end
@@ -1,18 +1,27 @@
1
+ require "network_resiliency/refinements"
2
+ require "network_resiliency/stats"
3
+ require "network_resiliency/stats_engine"
1
4
  require "network_resiliency/version"
2
5
 
6
+ using NetworkResiliency::Refinements
7
+
3
8
  module NetworkResiliency
4
9
  module Adapter
5
10
  autoload :HTTP, "network_resiliency/adapter/http"
6
11
  autoload :Faraday, "network_resiliency/adapter/faraday"
7
12
  autoload :Redis, "network_resiliency/adapter/redis"
13
+ autoload :Mysql, "network_resiliency/adapter/mysql"
14
+ autoload :Postgres, "network_resiliency/adapter/postgres"
8
15
  end
9
16
 
10
17
  extend self
11
18
 
12
- attr_accessor :statsd
19
+ attr_accessor :statsd, :redis
13
20
 
14
21
  def configure
15
- yield self
22
+ yield self if block_given?
23
+
24
+ start_syncing if redis
16
25
  end
17
26
 
18
27
  def patch(*adapters)
@@ -22,6 +31,10 @@ module NetworkResiliency
22
31
  Adapter::HTTP.patch
23
32
  when :redis
24
33
  Adapter::Redis.patch
34
+ when :mysql
35
+ Adapter::Mysql.patch
36
+ when :postgres
37
+ Adapter::Postgres.patch
25
38
  else
26
39
  raise NotImplementedError
27
40
  end
@@ -29,6 +42,7 @@ module NetworkResiliency
29
42
  end
30
43
 
31
44
  def enabled?(adapter)
45
+ return thread_state["enabled"] if thread_state.key?("enabled")
32
46
  return true if @enabled.nil?
33
47
 
34
48
  if @enabled.is_a?(Proc)
@@ -52,20 +66,20 @@ module NetworkResiliency
52
66
 
53
67
  def enable!
54
68
  original = @enabled
55
- @enabled = true
69
+ thread_state["enabled"] = true
56
70
 
57
71
  yield if block_given?
58
72
  ensure
59
- @enabled = original if block_given?
73
+ thread_state.delete("enabled") if block_given?
60
74
  end
61
75
 
62
76
  def disable!
63
77
  original = @enabled
64
- @enabled = false
78
+ thread_state["enabled"] = false
65
79
 
66
80
  yield if block_given?
67
81
  ensure
68
- @enabled = original if block_given?
82
+ thread_state.delete("enabled") if block_given?
69
83
  end
70
84
 
71
85
  def timestamp
@@ -90,9 +104,56 @@ module NetworkResiliency
90
104
  error: error,
91
105
  }.compact,
92
106
  )
107
+
108
+ NetworkResiliency.statsd&.distribution(
109
+ "network_resiliency.#{action}.magnitude",
110
+ duration.order_of_magnitude,
111
+ tags: {
112
+ adapter: adapter,
113
+ destination: destination,
114
+ error: error,
115
+ }.compact,
116
+ )
117
+
118
+ key = [ adapter, action, destination ].join(":")
119
+ StatsEngine.add(key, duration)
120
+ rescue => e
121
+ NetworkResiliency.statsd&.increment(
122
+ "network_resiliency.error",
123
+ tags: {
124
+ type: e.class,
125
+ },
126
+ )
127
+
128
+ warn "[ERROR] NetworkResiliency: #{e.class}: #{e.message}"
93
129
  end
94
130
 
95
131
  def reset
96
132
  @enabled = nil
133
+ Thread.current["network_resiliency"] = nil
134
+ StatsEngine.reset
135
+ @sync_worker.kill if @sync_worker
136
+ end
137
+
138
+ private
139
+
140
+ def thread_state
141
+ Thread.current["network_resiliency"] ||= {}
142
+ end
143
+
144
+ def start_syncing
145
+ @sync_worker.kill if @sync_worker
146
+
147
+ raise "Redis not configured" unless redis
148
+
149
+ @sync_worker = Thread.new do
150
+ while true do
151
+ StatsEngine.sync(redis)
152
+
153
+ sleep(3)
154
+ end
155
+ rescue Interrupt
156
+ # goodbye
157
+ end
97
158
  end
98
159
  end
@@ -18,6 +18,8 @@ Gem::Specification.new do |s|
18
18
  s.add_development_dependency "dogstatsd-ruby", "<= 4.8.3"
19
19
  s.add_development_dependency "faraday", "~> 1"
20
20
  s.add_development_dependency "faraday-rack"
21
+ s.add_development_dependency "mysql2", ">= 0.5"
22
+ s.add_development_dependency "pg", "~> 1.1"
21
23
  s.add_development_dependency "rack"
22
24
  s.add_development_dependency "rack-test"
23
25
  s.add_development_dependency "redis", "~> 4"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: network_resiliency
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.1
4
+ version: 0.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Pepper
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-10-02 00:00:00.000000000 Z
11
+ date: 2023-11-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: byebug
@@ -80,6 +80,34 @@ dependencies:
80
80
  - - ">="
81
81
  - !ruby/object:Gem::Version
82
82
  version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: mysql2
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0.5'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0.5'
97
+ - !ruby/object:Gem::Dependency
98
+ name: pg
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - "~>"
102
+ - !ruby/object:Gem::Version
103
+ version: '1.1'
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - "~>"
109
+ - !ruby/object:Gem::Version
110
+ version: '1.1'
83
111
  - !ruby/object:Gem::Dependency
84
112
  name: rack
85
113
  requirement: !ruby/object:Gem::Requirement
@@ -164,8 +192,12 @@ files:
164
192
  - lib/network_resiliency.rb
165
193
  - lib/network_resiliency/adapter/faraday.rb
166
194
  - lib/network_resiliency/adapter/http.rb
195
+ - lib/network_resiliency/adapter/mysql.rb
196
+ - lib/network_resiliency/adapter/postgres.rb
167
197
  - lib/network_resiliency/adapter/redis.rb
198
+ - lib/network_resiliency/refinements.rb
168
199
  - lib/network_resiliency/stats.rb
200
+ - lib/network_resiliency/stats_engine.rb
169
201
  - lib/network_resiliency/version.rb
170
202
  - network_resiliency.gemspec
171
203
  homepage: https://github.com/dpep/network_resiliency_rb
@@ -187,7 +219,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
187
219
  - !ruby/object:Gem::Version
188
220
  version: '0'
189
221
  requirements: []
190
- rubygems_version: 3.3.7
222
+ rubygems_version: 3.2.33
191
223
  signing_key:
192
224
  specification_version: 4
193
225
  summary: NetworkResiliency