lhc 12.2.0 → 13.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/rubocop.yml +27 -0
- data/.github/workflows/test.yml +27 -0
- data/.rubocop.yml +3 -0
- data/.ruby-version +1 -1
- data/Gemfile.activesupport5 +1 -1
- data/Gemfile.activesupport6 +1 -1
- data/README.md +67 -6
- data/Rakefile +3 -3
- data/lhc.gemspec +3 -2
- data/lib/lhc/error.rb +3 -1
- data/lib/lhc/interceptor.rb +4 -0
- data/lib/lhc/interceptors/auth.rb +0 -4
- data/lib/lhc/interceptors/caching.rb +65 -44
- data/lib/lhc/interceptors/monitoring.rb +39 -10
- data/lib/lhc/interceptors/throttle.rb +9 -8
- data/lib/lhc/railtie.rb +0 -1
- data/lib/lhc/request.rb +7 -3
- data/lib/lhc/rspec.rb +1 -2
- data/lib/lhc/version.rb +1 -1
- data/spec/error/to_s_spec.rb +7 -2
- data/spec/formats/multipart_spec.rb +1 -1
- data/spec/formats/plain_spec.rb +1 -1
- data/spec/interceptors/after_response_spec.rb +1 -1
- data/spec/interceptors/caching/main_spec.rb +2 -2
- data/spec/interceptors/caching/multilevel_cache_spec.rb +139 -0
- data/spec/interceptors/caching/options_spec.rb +0 -11
- data/spec/interceptors/monitoring/caching_spec.rb +66 -0
- data/spec/interceptors/response_competition_spec.rb +2 -2
- data/spec/interceptors/return_response_spec.rb +2 -2
- data/spec/interceptors/throttle/main_spec.rb +95 -21
- data/spec/spec_helper.rb +1 -0
- metadata +27 -20
- data/Gemfile.activesupport4 +0 -4
- data/cider-ci.yml +0 -6
- data/cider-ci/bin/bundle +0 -51
- data/cider-ci/bin/ruby_install +0 -8
- data/cider-ci/bin/ruby_version +0 -25
- data/cider-ci/jobs/rspec-activesupport-4.yml +0 -28
- data/cider-ci/jobs/rspec-activesupport-5.yml +0 -27
- data/cider-ci/jobs/rspec-activesupport-6.yml +0 -28
- data/cider-ci/jobs/rubocop.yml +0 -18
- data/cider-ci/task_components/bundle.yml +0 -22
- data/cider-ci/task_components/rspec.yml +0 -36
- data/cider-ci/task_components/rubocop.yml +0 -29
- data/cider-ci/task_components/ruby.yml +0 -15
@@ -13,34 +13,59 @@ class LHC::Monitoring < LHC::Interceptor
|
|
13
13
|
|
14
14
|
def before_request
|
15
15
|
return unless statsd
|
16
|
-
LHC::Monitoring.statsd.count("#{key
|
16
|
+
LHC::Monitoring.statsd.count("#{key}.before_request", 1)
|
17
17
|
end
|
18
18
|
|
19
19
|
def after_request
|
20
20
|
return unless statsd
|
21
|
-
LHC::Monitoring.statsd.count("#{key
|
22
|
-
LHC::Monitoring.statsd.count("#{key
|
21
|
+
LHC::Monitoring.statsd.count("#{key}.count", 1)
|
22
|
+
LHC::Monitoring.statsd.count("#{key}.after_request", 1)
|
23
23
|
end
|
24
24
|
|
25
25
|
def after_response
|
26
26
|
return unless statsd
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
LHC::Monitoring.statsd.count(key, 1)
|
27
|
+
monitor_time!
|
28
|
+
monitor_cache!
|
29
|
+
monitor_response!
|
31
30
|
end
|
32
31
|
|
33
32
|
private
|
34
33
|
|
35
|
-
def
|
36
|
-
|
34
|
+
def monitor_time!
|
35
|
+
LHC::Monitoring.statsd.timing("#{key}.time", response.time) if response.success?
|
36
|
+
end
|
37
|
+
|
38
|
+
def monitor_cache!
|
39
|
+
return if request.options[:cache].blank?
|
40
|
+
return unless monitor_caching_configuration_check
|
41
|
+
if response.from_cache?
|
42
|
+
LHC::Monitoring.statsd.count("#{key}.cache.hit", 1)
|
43
|
+
else
|
44
|
+
LHC::Monitoring.statsd.count("#{key}.cache.miss", 1)
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def monitor_caching_configuration_check
|
49
|
+
return true if all_interceptor_classes.include?(LHC::Caching) && all_interceptor_classes.index(self.class) > all_interceptor_classes.index(LHC::Caching)
|
50
|
+
warn("[WARNING] Your interceptors must include LHC::Caching and LHC::Monitoring and also in that order.")
|
51
|
+
end
|
52
|
+
|
53
|
+
def monitor_response!
|
54
|
+
if response.timeout?
|
55
|
+
LHC::Monitoring.statsd.count("#{key}.timeout", 1)
|
56
|
+
else
|
57
|
+
LHC::Monitoring.statsd.count("#{key}.#{response.code}", 1)
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def key
|
37
62
|
key = options(request.options)[:key]
|
38
63
|
return key if key.present?
|
39
64
|
|
40
65
|
url = sanitize_url(request.url)
|
41
66
|
key = [
|
42
67
|
'lhc',
|
43
|
-
|
68
|
+
module_parent_name.underscore,
|
44
69
|
LHC::Monitoring.env || Rails.env,
|
45
70
|
URI.parse(url).host.gsub(/\./, '_'),
|
46
71
|
request.method
|
@@ -48,6 +73,10 @@ class LHC::Monitoring < LHC::Interceptor
|
|
48
73
|
key.join('.')
|
49
74
|
end
|
50
75
|
|
76
|
+
def module_parent_name
|
77
|
+
(ActiveSupport.gem_version >= Gem::Version.new('6.0.0')) ? Rails.application.class.module_parent_name : Rails.application.class.parent_name
|
78
|
+
end
|
79
|
+
|
51
80
|
def sanitize_url(url)
|
52
81
|
return url if url.match(%r{https?://})
|
53
82
|
"http://#{url}"
|
@@ -3,7 +3,6 @@
|
|
3
3
|
require 'active_support/duration'
|
4
4
|
|
5
5
|
class LHC::Throttle < LHC::Interceptor
|
6
|
-
|
7
6
|
class OutOfQuota < StandardError
|
8
7
|
end
|
9
8
|
|
@@ -50,12 +49,12 @@ class LHC::Throttle < LHC::Interceptor
|
|
50
49
|
|
51
50
|
def limit(options:, response:)
|
52
51
|
@limit ||=
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
52
|
+
if options.is_a?(Proc)
|
53
|
+
options.call(response)
|
54
|
+
elsif options.is_a?(Integer)
|
55
|
+
options
|
56
|
+
elsif options.is_a?(Hash) && options[:header]
|
57
|
+
response.headers[options[:header]]&.to_i
|
59
58
|
end
|
60
59
|
end
|
61
60
|
|
@@ -79,7 +78,9 @@ class LHC::Throttle < LHC::Interceptor
|
|
79
78
|
end
|
80
79
|
|
81
80
|
def convert_expires(value)
|
81
|
+
return if value.blank?
|
82
|
+
return value.call(response) if value.is_a?(Proc)
|
82
83
|
return Time.parse(value) if value.match(/GMT/)
|
83
|
-
Time.zone.at(value.to_i).to_datetime
|
84
|
+
Time.zone.at(value.to_i).to_datetime
|
84
85
|
end
|
85
86
|
end
|
data/lib/lhc/railtie.rb
CHANGED
data/lib/lhc/request.rb
CHANGED
@@ -25,7 +25,11 @@ class LHC::Request
|
|
25
25
|
interceptors.intercept(:before_raw_request)
|
26
26
|
self.raw = create_request
|
27
27
|
interceptors.intercept(:before_request)
|
28
|
-
|
28
|
+
if self_executing && !response
|
29
|
+
run!
|
30
|
+
elsif response
|
31
|
+
on_complete(response)
|
32
|
+
end
|
29
33
|
end
|
30
34
|
|
31
35
|
def url
|
@@ -87,7 +91,7 @@ class LHC::Request
|
|
87
91
|
|
88
92
|
def encode_url(url)
|
89
93
|
return url if url.nil?
|
90
|
-
URI.escape(url)
|
94
|
+
Addressable::URI.escape(url)
|
91
95
|
end
|
92
96
|
|
93
97
|
def typhoeusize(options)
|
@@ -128,7 +132,7 @@ class LHC::Request
|
|
128
132
|
end
|
129
133
|
|
130
134
|
def on_complete(response)
|
131
|
-
self.response = LHC::Response.new(response, self)
|
135
|
+
self.response = response.is_a?(LHC::Response) ? response : LHC::Response.new(response, self)
|
132
136
|
interceptors.intercept(:after_response)
|
133
137
|
handle_error(self.response) unless self.response.success?
|
134
138
|
end
|
data/lib/lhc/rspec.rb
CHANGED
data/lib/lhc/version.rb
CHANGED
data/spec/error/to_s_spec.rb
CHANGED
@@ -45,7 +45,7 @@ describe LHC::Error do
|
|
45
45
|
|
46
46
|
context 'some mocked response' do
|
47
47
|
let(:request) do
|
48
|
-
double('
|
48
|
+
double('LHC::Request',
|
49
49
|
method: 'GET',
|
50
50
|
url: 'http://example.com/sessions',
|
51
51
|
headers: { 'Bearer Token' => "aaaaaaaa-bbbb-cccc-dddd-eeee" },
|
@@ -55,7 +55,7 @@ describe LHC::Error do
|
|
55
55
|
end
|
56
56
|
|
57
57
|
let(:response) do
|
58
|
-
double('
|
58
|
+
double('LHC::Response',
|
59
59
|
request: request,
|
60
60
|
code: 500,
|
61
61
|
options: { return_code: :internal_error, response_headers: "" },
|
@@ -64,6 +64,11 @@ describe LHC::Error do
|
|
64
64
|
|
65
65
|
subject { LHC::Error.new('The error message', response) }
|
66
66
|
|
67
|
+
before do
|
68
|
+
allow(request).to receive(:is_a?).with(LHC::Request).and_return(true)
|
69
|
+
allow(response).to receive(:is_a?).with(LHC::Response).and_return(true)
|
70
|
+
end
|
71
|
+
|
67
72
|
it 'produces correct debug output' do
|
68
73
|
expect(subject.to_s.split("\n")).to eq(<<-MSG.strip_heredoc.split("\n"))
|
69
74
|
GET http://example.com/sessions
|
@@ -6,7 +6,7 @@ describe LHC do
|
|
6
6
|
include ActionDispatch::TestProcess
|
7
7
|
|
8
8
|
context 'multipart' do
|
9
|
-
let(:file) {
|
9
|
+
let(:file) { Rack::Test::UploadedFile.new(Tempfile.new) }
|
10
10
|
let(:body) { { size: 2231 }.to_json }
|
11
11
|
let(:location) { 'http://local.ch/uploads/image.jpg' }
|
12
12
|
|
data/spec/formats/plain_spec.rb
CHANGED
@@ -6,7 +6,7 @@ describe LHC do
|
|
6
6
|
include ActionDispatch::TestProcess
|
7
7
|
|
8
8
|
context 'plain' do
|
9
|
-
let(:file) {
|
9
|
+
let(:file) { Rack::Test::UploadedFile.new(Tempfile.new) }
|
10
10
|
|
11
11
|
it 'leaves plains requests unformatted' do
|
12
12
|
stub_request(:post, 'http://local.ch/')
|
@@ -14,7 +14,7 @@ describe LHC do
|
|
14
14
|
uri = URI.parse(response.request.url)
|
15
15
|
path = [
|
16
16
|
'web',
|
17
|
-
Rails.application.class.parent_name,
|
17
|
+
((ActiveSupport.gem_version >= Gem::Version.new('6.0.0')) ? Rails.application.class.module_parent_name : Rails.application.class.parent_name).underscore,
|
18
18
|
Rails.env,
|
19
19
|
response.request.method,
|
20
20
|
uri.scheme,
|
@@ -47,7 +47,7 @@ describe LHC::Caching do
|
|
47
47
|
|
48
48
|
it 'lets you configure the cache key that will be used' do
|
49
49
|
LHC.config.endpoint(:local, 'http://local.ch', cache: { key: 'STATICKEY' })
|
50
|
-
expect(Rails.cache).to receive(:fetch).with("LHC_CACHE(v#{LHC::Caching::CACHE_VERSION}): STATICKEY").and_call_original
|
50
|
+
expect(Rails.cache).to receive(:fetch).at_least(:once).with("LHC_CACHE(v#{LHC::Caching::CACHE_VERSION}): STATICKEY").and_call_original
|
51
51
|
expect(Rails.cache).to receive(:write).with("LHC_CACHE(v#{LHC::Caching::CACHE_VERSION}): STATICKEY", anything, anything).and_call_original
|
52
52
|
stub
|
53
53
|
LHC.get(:local)
|
@@ -66,8 +66,8 @@ describe LHC::Caching do
|
|
66
66
|
stub
|
67
67
|
LHC.config.endpoint(:local, 'http://local.ch', cache: true)
|
68
68
|
original_response = LHC.get(:local)
|
69
|
-
cached_response = LHC.get(:local)
|
70
69
|
expect(original_response.from_cache?).to eq false
|
70
|
+
cached_response = LHC.get(:local)
|
71
71
|
expect(cached_response.from_cache?).to eq true
|
72
72
|
end
|
73
73
|
end
|
@@ -0,0 +1,139 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'rails_helper'
|
4
|
+
|
5
|
+
describe LHC::Caching do
|
6
|
+
let(:redis_url) { 'redis://localhost:6379/0' }
|
7
|
+
let(:redis_cache) do
|
8
|
+
spy('ActiveSupport::Cache::RedisCacheStore')
|
9
|
+
end
|
10
|
+
|
11
|
+
before do
|
12
|
+
Rails.cache.clear
|
13
|
+
LHC.config.interceptors = [LHC::Caching]
|
14
|
+
ActiveSupport::Cache::RedisCacheStore.new(url: redis_url).clear
|
15
|
+
allow(ActiveSupport::Cache::RedisCacheStore).to receive(:new).and_return(redis_cache)
|
16
|
+
allow(redis_cache).to receive(:present?).and_return(true)
|
17
|
+
end
|
18
|
+
|
19
|
+
let!(:request_stub) do
|
20
|
+
stub_request(:get, "http://local.ch/")
|
21
|
+
.to_return(body: '<h1>Hi there</h1>')
|
22
|
+
end
|
23
|
+
|
24
|
+
def request
|
25
|
+
LHC.get('http://local.ch', cache: true)
|
26
|
+
end
|
27
|
+
|
28
|
+
def response_has_been_cached_and_served_from_cache!
|
29
|
+
original_response = request
|
30
|
+
cached_response = request
|
31
|
+
|
32
|
+
expect(original_response.body).to eq cached_response.body
|
33
|
+
expect(original_response.code).to eq cached_response.code
|
34
|
+
expect(original_response.headers).to eq cached_response.headers
|
35
|
+
expect(original_response.options[:return_code]).to eq cached_response.options[:return_code]
|
36
|
+
expect(original_response.mock).to eq cached_response.mock
|
37
|
+
|
38
|
+
assert_requested request_stub, times: 1
|
39
|
+
end
|
40
|
+
|
41
|
+
context 'only local cache has been configured' do
|
42
|
+
before do
|
43
|
+
LHC::Caching.cache = Rails.cache
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'serves a response from local cache without trying the central cache' do
|
47
|
+
expect(Rails.cache).to receive(:fetch).at_least(:once).and_call_original
|
48
|
+
expect(Rails.cache).to receive(:write).and_call_original
|
49
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
50
|
+
.to output(%Q{[LHC] served from local cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
context 'local and central cache have been configured' do
|
55
|
+
before do
|
56
|
+
LHC::Caching.cache = Rails.cache
|
57
|
+
LHC::Caching.central = {
|
58
|
+
read: redis_url,
|
59
|
+
write: redis_url
|
60
|
+
}
|
61
|
+
end
|
62
|
+
|
63
|
+
context 'found in central cache' do
|
64
|
+
it 'serves it from central cache if found there' do
|
65
|
+
expect(redis_cache).to receive(:fetch).and_return(nil,
|
66
|
+
body: '<h1>Hi there</h1>', code: 200, headers: nil, return_code: nil, mock: :webmock)
|
67
|
+
expect(redis_cache).to receive(:write).and_return(true)
|
68
|
+
expect(Rails.cache).to receive(:fetch).and_call_original
|
69
|
+
expect(Rails.cache).to receive(:write).and_call_original
|
70
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
71
|
+
.to output(%Q{[LHC] served from central cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
context 'not found in central cache' do
|
76
|
+
it 'serves it from local cache if found there' do
|
77
|
+
expect(redis_cache).to receive(:fetch).and_return(nil, nil)
|
78
|
+
expect(redis_cache).to receive(:write).and_return(true)
|
79
|
+
expect(Rails.cache).to receive(:fetch).at_least(:once).and_call_original
|
80
|
+
expect(Rails.cache).to receive(:write).and_call_original
|
81
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
82
|
+
.to output(%Q{[LHC] served from local cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
context 'only central read configured' do
|
88
|
+
before do
|
89
|
+
LHC::Caching.cache = Rails.cache
|
90
|
+
LHC::Caching.central = {
|
91
|
+
read: redis_url
|
92
|
+
}
|
93
|
+
end
|
94
|
+
|
95
|
+
it 'still serves responses from cache, but does not write them back' do
|
96
|
+
expect(redis_cache).to receive(:fetch).and_return(nil, body: '<h1>Hi there</h1>', code: 200, headers: nil, return_code: nil, mock: :webmock)
|
97
|
+
expect(redis_cache).not_to receive(:write)
|
98
|
+
expect(Rails.cache).to receive(:fetch).and_call_original
|
99
|
+
expect(Rails.cache).to receive(:write).and_call_original
|
100
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
101
|
+
.to output(%Q{[LHC] served from central cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
context 'only central write configured' do
|
106
|
+
before do
|
107
|
+
LHC::Caching.cache = Rails.cache
|
108
|
+
LHC::Caching.central = {
|
109
|
+
write: redis_url
|
110
|
+
}
|
111
|
+
end
|
112
|
+
|
113
|
+
it 'still writes responses to cache, but does not retrieve them from there' do
|
114
|
+
expect(redis_cache).not_to receive(:fetch)
|
115
|
+
expect(redis_cache).to receive(:write).and_return(true)
|
116
|
+
expect(Rails.cache).to receive(:fetch).at_least(:once).and_call_original
|
117
|
+
expect(Rails.cache).to receive(:write).and_call_original
|
118
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
119
|
+
.to output(%Q{[LHC] served from local cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
context 'central cache configured only' do
|
124
|
+
before do
|
125
|
+
LHC::Caching.cache = nil
|
126
|
+
LHC::Caching.central = {
|
127
|
+
read: redis_url,
|
128
|
+
write: redis_url
|
129
|
+
}
|
130
|
+
end
|
131
|
+
|
132
|
+
it 'does not inquire the local cache for information neither to write them' do
|
133
|
+
expect(redis_cache).to receive(:fetch).and_return(nil, body: '<h1>Hi there</h1>', code: 200, headers: nil, return_code: nil, mock: :webmock)
|
134
|
+
expect(redis_cache).to receive(:write).and_return(true)
|
135
|
+
expect(-> { response_has_been_cached_and_served_from_cache! })
|
136
|
+
.to output(%Q{[LHC] served from central cache: "LHC_CACHE(v1): GET http://local.ch"\n}).to_stdout
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
@@ -20,17 +20,6 @@ describe LHC::Caching do
|
|
20
20
|
default_cache.clear
|
21
21
|
end
|
22
22
|
|
23
|
-
it 'maps deprecated cache options' do
|
24
|
-
expected_options = { expires_in: 5.minutes, race_condition_ttl: 15.seconds }
|
25
|
-
expected_key = "LHC_CACHE(v1): key"
|
26
|
-
expect(default_cache).to receive(:write).with(expected_key, anything, expected_options)
|
27
|
-
expect(lambda {
|
28
|
-
LHC.get('http://local.ch', cache: true, cache_expires_in: 5.minutes, cache_key: 'key', cache_race_condition_ttl: 15.seconds)
|
29
|
-
}).to output(
|
30
|
-
/Cache options have changed! cache_expires_in, cache_key, cache_race_condition_ttl are deprecated and will be removed in future versions./
|
31
|
-
).to_stderr
|
32
|
-
end
|
33
|
-
|
34
23
|
it 'does cache' do
|
35
24
|
expect(default_cache).to receive(:fetch)
|
36
25
|
expect(default_cache).to receive(:write)
|
@@ -0,0 +1,66 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'rails_helper'
|
4
|
+
|
5
|
+
describe LHC::Monitoring do
|
6
|
+
let(:stub) do
|
7
|
+
stub_request(:get, 'http://local.ch').to_return(status: 200, body: 'The Website')
|
8
|
+
end
|
9
|
+
|
10
|
+
module Statsd
|
11
|
+
def self.count(_path, _value); end
|
12
|
+
|
13
|
+
def self.timing(_path, _value); end
|
14
|
+
end
|
15
|
+
|
16
|
+
before(:each) do
|
17
|
+
LHC::Monitoring.statsd = Statsd
|
18
|
+
Rails.cache.clear
|
19
|
+
allow(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.before_request', 1)
|
20
|
+
allow(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.count', 1)
|
21
|
+
allow(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.after_request', 1)
|
22
|
+
allow(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.200', 1)
|
23
|
+
end
|
24
|
+
|
25
|
+
context 'interceptors configured correctly' do
|
26
|
+
before do
|
27
|
+
LHC.config.interceptors = [LHC::Caching, LHC::Monitoring]
|
28
|
+
end
|
29
|
+
|
30
|
+
context 'requesting with cache option' do
|
31
|
+
it 'monitors miss/hit for caching' do
|
32
|
+
stub
|
33
|
+
expect(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.cache.miss', 1)
|
34
|
+
expect(Statsd).to receive(:count).with('lhc.dummy.test.local_ch.get.cache.hit', 1)
|
35
|
+
LHC.get('http://local.ch', cache: true)
|
36
|
+
LHC.get('http://local.ch', cache: true)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
context 'request uncached' do
|
41
|
+
it 'requesting without cache option' do
|
42
|
+
stub
|
43
|
+
expect(Statsd).not_to receive(:count).with('lhc.dummy.test.local_ch.get.cache.miss', 1)
|
44
|
+
expect(Statsd).not_to receive(:count).with('lhc.dummy.test.local_ch.get.cache.hit', 1)
|
45
|
+
LHC.get('http://local.ch')
|
46
|
+
LHC.get('http://local.ch')
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
context 'wrong interceptor order' do
|
52
|
+
before(:each) do
|
53
|
+
LHC.config.interceptors = [LHC::Monitoring, LHC::Caching] # monitoring needs to be after Caching
|
54
|
+
end
|
55
|
+
|
56
|
+
it 'does monitors miss/hit for caching and warns about wrong order of interceptors' do
|
57
|
+
stub
|
58
|
+
expect(Statsd).not_to receive(:count).with('lhc.dummy.test.local_ch.get.cache.miss', 1)
|
59
|
+
expect(Statsd).not_to receive(:count).with('lhc.dummy.test.local_ch.get.cache.hit', 1)
|
60
|
+
expect(-> {
|
61
|
+
LHC.get('http://local.ch', cache: true)
|
62
|
+
LHC.get('http://local.ch', cache: true)
|
63
|
+
}).to output("[WARNING] Your interceptors must include LHC::Caching and LHC::Monitoring and also in that order.\n[WARNING] Your interceptors must include LHC::Caching and LHC::Monitoring and also in that order.\n").to_stderr
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|