lhc 12.2.0 → 13.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/rubocop.yml +27 -0
- data/.github/workflows/test.yml +27 -0
- data/.rubocop.yml +3 -0
- data/.ruby-version +1 -1
- data/Gemfile.activesupport5 +1 -1
- data/Gemfile.activesupport6 +1 -1
- data/README.md +67 -6
- data/Rakefile +3 -3
- data/lhc.gemspec +3 -2
- data/lib/lhc/error.rb +3 -1
- data/lib/lhc/interceptor.rb +4 -0
- data/lib/lhc/interceptors/auth.rb +0 -4
- data/lib/lhc/interceptors/caching.rb +65 -44
- data/lib/lhc/interceptors/monitoring.rb +39 -10
- data/lib/lhc/interceptors/throttle.rb +9 -8
- data/lib/lhc/railtie.rb +0 -1
- data/lib/lhc/request.rb +7 -3
- data/lib/lhc/rspec.rb +1 -2
- data/lib/lhc/version.rb +1 -1
- data/spec/error/to_s_spec.rb +7 -2
- data/spec/formats/multipart_spec.rb +1 -1
- data/spec/formats/plain_spec.rb +1 -1
- data/spec/interceptors/after_response_spec.rb +1 -1
- data/spec/interceptors/caching/main_spec.rb +2 -2
- data/spec/interceptors/caching/multilevel_cache_spec.rb +139 -0
- data/spec/interceptors/caching/options_spec.rb +0 -11
- data/spec/interceptors/monitoring/caching_spec.rb +66 -0
- data/spec/interceptors/response_competition_spec.rb +2 -2
- data/spec/interceptors/return_response_spec.rb +2 -2
- data/spec/interceptors/throttle/main_spec.rb +95 -21
- data/spec/spec_helper.rb +1 -0
- metadata +27 -20
- data/Gemfile.activesupport4 +0 -4
- data/cider-ci.yml +0 -6
- data/cider-ci/bin/bundle +0 -51
- data/cider-ci/bin/ruby_install +0 -8
- data/cider-ci/bin/ruby_version +0 -25
- data/cider-ci/jobs/rspec-activesupport-4.yml +0 -28
- data/cider-ci/jobs/rspec-activesupport-5.yml +0 -27
- data/cider-ci/jobs/rspec-activesupport-6.yml +0 -28
- data/cider-ci/jobs/rubocop.yml +0 -18
- data/cider-ci/task_components/bundle.yml +0 -22
- data/cider-ci/task_components/rspec.yml +0 -36
- data/cider-ci/task_components/rubocop.yml +0 -29
- data/cider-ci/task_components/ruby.yml +0 -15
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b7e042d800751c9fceb6d4f3e01513a1a849a3e00fb8bf7e0987bf1df73b5a45
|
4
|
+
data.tar.gz: 6fa4a578f6003a9ef46ca27ed81766ef2c4dbd07ad231d4201624274b7255153
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 80b0d65063c77912df0cf07bc8f25acbb083af34186248689131283cb07e90f9bc2677973b96170d0eeba788b7bcde19ce0f3efddefe349501e33b3825f39b53
|
7
|
+
data.tar.gz: 2f0b273133f8fe5311eeaa4c46d11170430c58099c8599f855e303659f299fbab0ababbc5091999a0fc7c433a7eccdd80fbc343e5cac267aeae3aab7595bbdcb
|
@@ -0,0 +1,27 @@
|
|
1
|
+
name: Rubocop
|
2
|
+
|
3
|
+
on: push
|
4
|
+
|
5
|
+
jobs:
|
6
|
+
rubocop:
|
7
|
+
runs-on: ubuntu-latest
|
8
|
+
|
9
|
+
steps:
|
10
|
+
- uses: actions/checkout@v2
|
11
|
+
- uses: actions/setup-ruby@v1
|
12
|
+
with:
|
13
|
+
ruby-version: 2.7.2
|
14
|
+
- name: Cache Ruby Gems
|
15
|
+
uses: actions/cache@v2
|
16
|
+
with:
|
17
|
+
path: /.tmp/vendor/bundle
|
18
|
+
key: ${{ runner.os }}-gems-latest-${{ hashFiles('**/Gemfile.lock') }}
|
19
|
+
restore-keys: |
|
20
|
+
${{ runner.os }}-gems-latest-
|
21
|
+
- name: Bundle Install
|
22
|
+
run: |
|
23
|
+
bundle config path /.tmp/vendor/bundle
|
24
|
+
bundle install --jobs 4 --retry 3
|
25
|
+
- name: Run Rubocop
|
26
|
+
run: |
|
27
|
+
bundle exec rubocop
|
@@ -0,0 +1,27 @@
|
|
1
|
+
name: Test
|
2
|
+
|
3
|
+
on: push
|
4
|
+
|
5
|
+
jobs:
|
6
|
+
rspec:
|
7
|
+
runs-on: ubuntu-latest
|
8
|
+
|
9
|
+
steps:
|
10
|
+
- uses: actions/checkout@v2
|
11
|
+
- uses: actions/setup-ruby@v1
|
12
|
+
with:
|
13
|
+
ruby-version: 2.7.2
|
14
|
+
- name: Cache Ruby Gems
|
15
|
+
uses: actions/cache@v2
|
16
|
+
with:
|
17
|
+
path: /.tmp/vendor/bundle
|
18
|
+
key: ${{ runner.os }}-gems-latest-${{ hashFiles('**/Gemfile.lock') }}
|
19
|
+
restore-keys: |
|
20
|
+
${{ runner.os }}-gems-latest-
|
21
|
+
- name: Bundle Install
|
22
|
+
run: |
|
23
|
+
bundle config path /.tmp/vendor/bundle
|
24
|
+
bundle install --jobs 4 --retry 3
|
25
|
+
- name: Run Tests
|
26
|
+
run: |
|
27
|
+
bundle exec rspec
|
data/.rubocop.yml
CHANGED
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
ruby-2.
|
1
|
+
ruby-2.7.2
|
data/Gemfile.activesupport5
CHANGED
data/Gemfile.activesupport6
CHANGED
data/README.md
CHANGED
@@ -73,6 +73,10 @@ use it like:
|
|
73
73
|
* [Installation](#installation-1)
|
74
74
|
* [Environment](#environment)
|
75
75
|
* [What it tracks](#what-it-tracks)
|
76
|
+
* [Before and after request tracking](#before-and-after-request-tracking)
|
77
|
+
* [Response tracking](#response-tracking)
|
78
|
+
* [Timeout tracking](#timeout-tracking)
|
79
|
+
* [Caching tracking](#caching-tracking)
|
76
80
|
* [Configure](#configure-1)
|
77
81
|
* [Prometheus Interceptor](#prometheus-interceptor)
|
78
82
|
* [Retry Interceptor](#retry-interceptor)
|
@@ -95,6 +99,7 @@ use it like:
|
|
95
99
|
|
96
100
|
|
97
101
|
|
102
|
+
|
98
103
|
## Basic methods
|
99
104
|
|
100
105
|
Available are `get`, `post`, `put` & `delete`.
|
@@ -601,7 +606,6 @@ You can configure your own cache (default Rails.cache) and logger (default Rails
|
|
601
606
|
|
602
607
|
```ruby
|
603
608
|
LHC::Caching.cache = ActiveSupport::Cache::MemoryStore.new
|
604
|
-
LHC::Caching.logger = Logger.new(STDOUT)
|
605
609
|
```
|
606
610
|
|
607
611
|
Caching is not enabled by default, although you added it to your basic set of interceptors.
|
@@ -632,6 +636,18 @@ Responses served from cache are marked as served from cache:
|
|
632
636
|
response.from_cache? # true
|
633
637
|
```
|
634
638
|
|
639
|
+
You can also use a central http cache to be used by the `LHC::Caching` interceptor.
|
640
|
+
|
641
|
+
If you configure a local and a central cache, LHC will perform multi-level-caching.
|
642
|
+
LHC will try to retrieve cached information first from the central, in case of a miss from the local cache, while writing back into both.
|
643
|
+
|
644
|
+
```ruby
|
645
|
+
LHC::Caching.central = {
|
646
|
+
read: 'redis://$PASSWORD@central-http-cache-replica.namespace:6379/0',
|
647
|
+
write: 'redis://$PASSWORD@central-http-cache-master.namespace:6379/0'
|
648
|
+
}
|
649
|
+
```
|
650
|
+
|
635
651
|
##### Options
|
636
652
|
|
637
653
|
```ruby
|
@@ -644,7 +660,7 @@ Responses served from cache are marked as served from cache:
|
|
644
660
|
|
645
661
|
`race_condition_ttl` - very useful in situations where a cache entry is used very frequently and is under heavy load.
|
646
662
|
If a cache expires and due to heavy load several different processes will try to read data natively and then they all will try to write to cache.
|
647
|
-
To avoid that case the first process to find an expired cache entry will bump the cache expiration time by the value set in `
|
663
|
+
To avoid that case the first process to find an expired cache entry will bump the cache expiration time by the value set in `race_condition_ttl`.
|
648
664
|
|
649
665
|
`use` - Set an explicit cache to be used for this request. If this option is missing `LHC::Caching.cache` is used.
|
650
666
|
|
@@ -733,11 +749,15 @@ It tracks request attempts with `before_request` and `after_request` (counts).
|
|
733
749
|
In case your workers/processes are getting killed due limited time constraints,
|
734
750
|
you are able to detect deltas with relying on "before_request", and "after_request" counts:
|
735
751
|
|
752
|
+
###### Before and after request tracking
|
753
|
+
|
736
754
|
```ruby
|
737
755
|
"lhc.<app_name>.<env>.<host>.<http_method>.before_request", 1
|
738
756
|
"lhc.<app_name>.<env>.<host>.<http_method>.after_request", 1
|
739
757
|
```
|
740
758
|
|
759
|
+
###### Response tracking
|
760
|
+
|
741
761
|
In case of a successful response it reports the response code with a count and the response time with a gauge value.
|
742
762
|
|
743
763
|
```ruby
|
@@ -748,6 +768,17 @@ In case of a successful response it reports the response code with a count and t
|
|
748
768
|
"lhc.<app_name>.<env>.<host>.<http_method>.time", 43
|
749
769
|
```
|
750
770
|
|
771
|
+
In case of a unsuccessful response it reports the response code with a count but no time:
|
772
|
+
|
773
|
+
```ruby
|
774
|
+
LHC.get('http://local.ch')
|
775
|
+
|
776
|
+
"lhc.<app_name>.<env>.<host>.<http_method>.count", 1
|
777
|
+
"lhc.<app_name>.<env>.<host>.<http_method>.500", 1
|
778
|
+
```
|
779
|
+
|
780
|
+
###### Timeout tracking
|
781
|
+
|
751
782
|
Timeouts are also reported:
|
752
783
|
|
753
784
|
```ruby
|
@@ -756,6 +787,30 @@ Timeouts are also reported:
|
|
756
787
|
|
757
788
|
All the dots in the host are getting replaced with underscore, because dot is the default separator in graphite.
|
758
789
|
|
790
|
+
###### Caching tracking
|
791
|
+
|
792
|
+
When you want to track caching stats please make sure you have enabled the `LHC::Caching` and the `LHC::Monitoring` interceptor.
|
793
|
+
|
794
|
+
Make sure that the `LHC::Caching` is listed before `LHC::Monitoring` interceptor when configuring interceptors:
|
795
|
+
|
796
|
+
```ruby
|
797
|
+
LHC.configure do |c|
|
798
|
+
c.interceptors = [LHC::Caching, LHC::Monitoring]
|
799
|
+
end
|
800
|
+
```
|
801
|
+
|
802
|
+
If a response was served from cache it tracks:
|
803
|
+
|
804
|
+
```ruby
|
805
|
+
"lhc.<app_name>.<env>.<host>.<http_method>.cache.hit", 1
|
806
|
+
```
|
807
|
+
|
808
|
+
If a response was not served from cache it tracks:
|
809
|
+
|
810
|
+
```ruby
|
811
|
+
"lhc.<app_name>.<env>.<host>.<http_method>.cache.miss", 1
|
812
|
+
```
|
813
|
+
|
759
814
|
##### Configure
|
760
815
|
|
761
816
|
It is possible to set the key for Monitoring Interceptor on per request basis:
|
@@ -895,16 +950,22 @@ LHC.get('http://local.ch', options)
|
|
895
950
|
LHC.get('http://local.ch', options)
|
896
951
|
# raises LHC::Throttle::OutOfQuota: Reached predefined quota for local.ch
|
897
952
|
```
|
953
|
+
|
898
954
|
**Options Description**
|
899
955
|
* `track`: enables tracking of current limit/remaining requests of rate-limiting
|
900
956
|
* `break`: quota in percent after which errors are raised. Percentage symbol is optional, values will be converted to integer (e.g. '23.5' will become 23)
|
901
957
|
* `provider`: name of the provider under which throttling tracking is aggregated,
|
902
|
-
* `limit`:
|
958
|
+
* `limit`:
|
959
|
+
* a hard-coded integer
|
960
|
+
* a hash pointing at the response header containing the limit value
|
961
|
+
* a proc that receives the response as argument and returns the limit value
|
903
962
|
* `remaining`:
|
904
963
|
* a hash pointing at the response header containing the current amount of remaining requests
|
905
|
-
* a proc that receives the response as argument and returns the current amount
|
906
|
-
|
907
|
-
*
|
964
|
+
* a proc that receives the response as argument and returns the current amount of remaining requests
|
965
|
+
* `expires`:
|
966
|
+
* a hash pointing at the response header containing the timestamp when the quota will reset
|
967
|
+
* a proc that receives the response as argument and returns the timestamp when the quota will reset
|
968
|
+
|
908
969
|
|
909
970
|
#### Zipkin
|
910
971
|
|
data/Rakefile
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
begin
|
2
4
|
require 'bundler/setup'
|
3
5
|
rescue LoadError
|
@@ -17,9 +19,7 @@ end
|
|
17
19
|
begin
|
18
20
|
require 'rspec/core/rake_task'
|
19
21
|
RSpec::Core::RakeTask.new(:spec)
|
20
|
-
task :
|
21
|
-
rescue LoadError
|
22
|
-
# no rspec available
|
22
|
+
task default: :spec
|
23
23
|
end
|
24
24
|
|
25
25
|
Bundler::GemHelper.install_tasks
|
data/lhc.gemspec
CHANGED
@@ -21,14 +21,15 @@ Gem::Specification.new do |s|
|
|
21
21
|
|
22
22
|
s.requirements << 'Ruby >= 2.0.0'
|
23
23
|
|
24
|
-
s.add_dependency 'activesupport', '>=
|
24
|
+
s.add_dependency 'activesupport', '>= 5.2'
|
25
25
|
s.add_dependency 'addressable'
|
26
26
|
s.add_dependency 'typhoeus', '>= 0.11'
|
27
27
|
|
28
28
|
s.add_development_dependency 'geminabox'
|
29
29
|
s.add_development_dependency 'prometheus-client', '~> 0.7.1'
|
30
30
|
s.add_development_dependency 'pry'
|
31
|
-
s.add_development_dependency 'rails', '>=
|
31
|
+
s.add_development_dependency 'rails', '>= 5.2'
|
32
|
+
s.add_development_dependency 'redis'
|
32
33
|
s.add_development_dependency 'rspec-rails', '>= 3.0.0'
|
33
34
|
s.add_development_dependency 'rubocop', '~> 0.57.1'
|
34
35
|
s.add_development_dependency 'rubocop-rspec', '~> 1.26.0'
|
data/lib/lhc/error.rb
CHANGED
@@ -64,8 +64,10 @@ class LHC::Error < StandardError
|
|
64
64
|
end
|
65
65
|
|
66
66
|
def to_s
|
67
|
-
return response
|
67
|
+
return response.to_s unless response.is_a?(LHC::Response)
|
68
68
|
request = response.request
|
69
|
+
return unless request.is_a?(LHC::Request)
|
70
|
+
|
69
71
|
debug = []
|
70
72
|
debug << [request.method, request.url].map { |str| self.class.fix_invalid_encoding(str) }.join(' ')
|
71
73
|
debug << "Options: #{request.options}"
|
data/lib/lhc/interceptor.rb
CHANGED
@@ -75,10 +75,6 @@ class LHC::Auth < LHC::Interceptor
|
|
75
75
|
@refresh_client_token_option ||= auth_options[:refresh_client_token] || refresh_client_token
|
76
76
|
end
|
77
77
|
|
78
|
-
def all_interceptor_classes
|
79
|
-
@all_interceptors ||= LHC::Interceptors.new(request).all.map(&:class)
|
80
|
-
end
|
81
|
-
|
82
78
|
def auth_options
|
83
79
|
request.options[:auth] || {}
|
84
80
|
end
|
@@ -3,69 +3,104 @@
|
|
3
3
|
class LHC::Caching < LHC::Interceptor
|
4
4
|
include ActiveSupport::Configurable
|
5
5
|
|
6
|
-
config_accessor :cache, :
|
6
|
+
config_accessor :cache, :central
|
7
7
|
|
8
|
+
# to control cache invalidation across all applications in case of
|
9
|
+
# breaking changes within this inteceptor
|
10
|
+
# that do not lead to cache invalidation otherwise
|
8
11
|
CACHE_VERSION = '1'
|
9
12
|
|
10
13
|
# Options forwarded to the cache
|
11
14
|
FORWARDED_OPTIONS = [:expires_in, :race_condition_ttl]
|
12
15
|
|
16
|
+
class MultilevelCache
|
17
|
+
|
18
|
+
def initialize(central: nil, local: nil)
|
19
|
+
@central = central
|
20
|
+
@local = local
|
21
|
+
end
|
22
|
+
|
23
|
+
def fetch(key)
|
24
|
+
central_response = @central[:read].fetch(key) if @central && @central[:read].present?
|
25
|
+
if central_response
|
26
|
+
puts %Q{[LHC] served from central cache: "#{key}"}
|
27
|
+
return central_response
|
28
|
+
end
|
29
|
+
local_response = @local.fetch(key) if @local
|
30
|
+
if local_response
|
31
|
+
puts %Q{[LHC] served from local cache: "#{key}"}
|
32
|
+
return local_response
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def write(key, content, options)
|
37
|
+
@central[:write].write(key, content, options) if @central && @central[:write].present?
|
38
|
+
@local.write(key, content, options) if @local.present?
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
13
42
|
def before_request
|
14
43
|
return unless cache?(request)
|
15
|
-
|
16
|
-
options = options(request.options)
|
17
|
-
key = key(request, options[:key])
|
18
|
-
response_data = cache_for(options).fetch(key)
|
19
|
-
return unless response_data
|
20
|
-
logger&.info "Served from cache: #{key}"
|
44
|
+
return if response_data.blank?
|
21
45
|
from_cache(request, response_data)
|
22
46
|
end
|
23
47
|
|
24
48
|
def after_response
|
25
49
|
return unless response.success?
|
26
|
-
request = response.request
|
27
50
|
return unless cache?(request)
|
28
|
-
|
29
|
-
|
51
|
+
return if response_data.present?
|
52
|
+
multilevel_cache.write(
|
30
53
|
key(request, options[:key]),
|
31
54
|
to_cache(response),
|
32
|
-
cache_options
|
55
|
+
cache_options
|
33
56
|
)
|
34
57
|
end
|
35
58
|
|
36
59
|
private
|
37
60
|
|
38
|
-
#
|
39
|
-
def
|
61
|
+
# from cache
|
62
|
+
def response_data
|
63
|
+
# stop calling multi-level cache if it already returned nil for this interceptor instance
|
64
|
+
return @response_data if defined? @response_data
|
65
|
+
@response_data ||= multilevel_cache.fetch(key(request, options[:key]))
|
66
|
+
end
|
67
|
+
|
68
|
+
# performs read/write (fetch/write) on all configured cache levels (e.g. local & central)
|
69
|
+
def multilevel_cache
|
70
|
+
MultilevelCache.new(
|
71
|
+
central: central_cache,
|
72
|
+
local: local_cache
|
73
|
+
)
|
74
|
+
end
|
75
|
+
|
76
|
+
# returns the local cache either configured for entire LHC
|
77
|
+
# or configured locally for that particular request
|
78
|
+
def local_cache
|
40
79
|
options.fetch(:use, cache)
|
41
80
|
end
|
42
81
|
|
82
|
+
def central_cache
|
83
|
+
return nil if central.blank? || (central[:read].blank? && central[:write].blank?)
|
84
|
+
{}.tap do |options|
|
85
|
+
options[:read] = ActiveSupport::Cache::RedisCacheStore.new(url: central[:read]) if central[:read].present?
|
86
|
+
options[:write] = ActiveSupport::Cache::RedisCacheStore.new(url: central[:write]) if central[:write].present?
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
43
90
|
# do we even need to bother with this interceptor?
|
44
91
|
# based on the options, this method will
|
45
92
|
# return false if this interceptor cannot work
|
46
93
|
def cache?(request)
|
47
94
|
return false unless request.options[:cache]
|
48
|
-
|
49
|
-
cache_for(options) &&
|
95
|
+
(local_cache || central_cache) &&
|
50
96
|
cached_method?(request.method, options[:methods])
|
51
97
|
end
|
52
98
|
|
53
|
-
|
54
|
-
|
55
|
-
def options(request_options)
|
56
|
-
options = (request_options[:cache] == true) ? {} : request_options[:cache].dup
|
57
|
-
map_deprecated_options!(request_options, options)
|
99
|
+
def options
|
100
|
+
options = (request.options[:cache] == true) ? {} : request.options[:cache].dup
|
58
101
|
options
|
59
102
|
end
|
60
103
|
|
61
|
-
# maps `cache_key` -> `key`, `cache_expires_in` -> `expires_in` and so on
|
62
|
-
def map_deprecated_options!(request_options, options)
|
63
|
-
deprecated_keys(request_options).each do |deprecated_key|
|
64
|
-
new_key = deprecated_key.to_s.gsub(/^cache_/, '').to_sym
|
65
|
-
options[new_key] = request_options[deprecated_key]
|
66
|
-
end
|
67
|
-
end
|
68
|
-
|
69
104
|
# converts json we read from the cache to an LHC::Response object
|
70
105
|
def from_cache(request, data)
|
71
106
|
raw = Typhoeus::Response.new(data)
|
@@ -104,24 +139,10 @@ class LHC::Caching < LHC::Interceptor
|
|
104
139
|
|
105
140
|
# extracts the options that should be forwarded to
|
106
141
|
# the cache
|
107
|
-
def cache_options
|
108
|
-
|
142
|
+
def cache_options
|
143
|
+
options.each_with_object({}) do |(key, value), result|
|
109
144
|
result[key] = value if key.in? FORWARDED_OPTIONS
|
110
145
|
result
|
111
146
|
end
|
112
147
|
end
|
113
|
-
|
114
|
-
# grabs the deprecated keys from the request options
|
115
|
-
def deprecated_keys(request_options)
|
116
|
-
request_options.keys.select { |k| k =~ /^cache_.*/ }.sort
|
117
|
-
end
|
118
|
-
|
119
|
-
# emits a deprecation warning if necessary
|
120
|
-
def deprecation_warning(request_options)
|
121
|
-
unless deprecated_keys(request_options).empty?
|
122
|
-
ActiveSupport::Deprecation.warn(
|
123
|
-
"Cache options have changed! #{deprecated_keys(request_options).join(', ')} are deprecated and will be removed in future versions."
|
124
|
-
)
|
125
|
-
end
|
126
|
-
end
|
127
148
|
end
|