mamiya 0.0.1.alpha19 → 0.0.1.alpha20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +18 -0
- data/lib/mamiya/agent.rb +25 -30
- data/lib/mamiya/agent/actions.rb +9 -6
- data/lib/mamiya/agent/handlers/task.rb +13 -0
- data/lib/mamiya/agent/task_queue.rb +151 -0
- data/lib/mamiya/agent/tasks/abstract.rb +61 -0
- data/lib/mamiya/agent/tasks/clean.rb +44 -0
- data/lib/mamiya/agent/tasks/fetch.rb +60 -0
- data/lib/mamiya/agent/tasks/notifyable.rb +30 -0
- data/lib/mamiya/cli/client.rb +1 -1
- data/lib/mamiya/master.rb +4 -9
- data/lib/mamiya/master/agent_monitor_handlers.rb +42 -25
- data/lib/mamiya/master/web.rb +22 -7
- data/lib/mamiya/version.rb +1 -1
- data/mamiya.gemspec +1 -1
- data/spec/agent/actions_spec.rb +2 -8
- data/spec/agent/handlers/task_spec.rb +39 -0
- data/spec/agent/task_queue_spec.rb +246 -0
- data/spec/agent/tasks/abstract_spec.rb +58 -0
- data/spec/agent/tasks/clean_spec.rb +72 -0
- data/spec/agent/tasks/fetch_spec.rb +56 -0
- data/spec/agent/tasks/notifyable_spec.rb +37 -0
- data/spec/agent_spec.rb +33 -54
- data/spec/master/agent_monitor_spec.rb +155 -69
- data/spec/master/web_spec.rb +340 -1
- data/spec/master_spec.rb +0 -21
- metadata +22 -10
- data/lib/mamiya/agent/fetcher.rb +0 -165
- data/lib/mamiya/agent/handlers/fetch.rb +0 -78
- data/spec/agent/fetcher_spec.rb +0 -237
- data/spec/agent/handlers/fetch_spec.rb +0 -127
data/lib/mamiya/agent/fetcher.rb
DELETED
|
@@ -1,165 +0,0 @@
|
|
|
1
|
-
require 'thread'
|
|
2
|
-
require 'mamiya/steps/fetch'
|
|
3
|
-
|
|
4
|
-
require 'mamiya/storages/abstract'
|
|
5
|
-
|
|
6
|
-
module Mamiya
|
|
7
|
-
class Agent
|
|
8
|
-
##
|
|
9
|
-
# This class has a queue for fetching packages.
|
|
10
|
-
class Fetcher
|
|
11
|
-
GRACEFUL_TIMEOUT = 60
|
|
12
|
-
|
|
13
|
-
def initialize(config, logger: Mamiya::Logger.new)
|
|
14
|
-
@worker_thread = nil
|
|
15
|
-
@queueing_thread = nil
|
|
16
|
-
@external_queue = Queue.new
|
|
17
|
-
@internal_queue = Queue.new
|
|
18
|
-
|
|
19
|
-
@config = config
|
|
20
|
-
@destination = config[:packages_dir]
|
|
21
|
-
@keep_packages = config[:keep_packages]
|
|
22
|
-
@current_job = nil
|
|
23
|
-
@pending_jobs = []
|
|
24
|
-
|
|
25
|
-
@logger = logger['fetcher']
|
|
26
|
-
end
|
|
27
|
-
|
|
28
|
-
attr_reader :worker_thread
|
|
29
|
-
attr_reader :queueing_thread
|
|
30
|
-
attr_reader :current_job
|
|
31
|
-
attr_reader :pending_jobs
|
|
32
|
-
attr_writer :cleanup_hook
|
|
33
|
-
|
|
34
|
-
def enqueue(app, package, before: nil, &callback)
|
|
35
|
-
@external_queue << [app, package, before, callback]
|
|
36
|
-
end
|
|
37
|
-
|
|
38
|
-
def queue_size
|
|
39
|
-
@internal_queue.size
|
|
40
|
-
end
|
|
41
|
-
|
|
42
|
-
def start!
|
|
43
|
-
stop!
|
|
44
|
-
@logger.info 'Starting...'
|
|
45
|
-
|
|
46
|
-
@worker_thread = Thread.new(&method(:main_loop))
|
|
47
|
-
@worker_thread.abort_on_exception = true
|
|
48
|
-
|
|
49
|
-
@queueing_thread = Thread.new(&method(:queueing_loop))
|
|
50
|
-
@queueing_thread.abort_on_exception = true
|
|
51
|
-
end
|
|
52
|
-
|
|
53
|
-
def stop!(graceful = false)
|
|
54
|
-
{@external_queue => @queueing_thread, @internal_queue => @worker_thread}.each do |q, th|
|
|
55
|
-
next unless th
|
|
56
|
-
if graceful
|
|
57
|
-
q << :suicide
|
|
58
|
-
th.join(GRACEFUL_TIMEOUT)
|
|
59
|
-
end
|
|
60
|
-
|
|
61
|
-
th.kill if th.alive?
|
|
62
|
-
end
|
|
63
|
-
ensure
|
|
64
|
-
@worker_thread = nil
|
|
65
|
-
@queueing_thread = nil
|
|
66
|
-
end
|
|
67
|
-
|
|
68
|
-
def running?
|
|
69
|
-
@worker_thread && @worker_thread.alive? && \
|
|
70
|
-
@queueing_thread && @queueing_thread.alive?
|
|
71
|
-
end
|
|
72
|
-
|
|
73
|
-
def working?
|
|
74
|
-
!!@current_job
|
|
75
|
-
end
|
|
76
|
-
|
|
77
|
-
def cleanup
|
|
78
|
-
Dir[File.join(@destination, '*')].each do |app|
|
|
79
|
-
packages = Dir[File.join(app, "*.tar.gz")]
|
|
80
|
-
packages.sort_by! { |_| [File.mtime(_), _] }
|
|
81
|
-
packages[0...-@keep_packages].each do |victim|
|
|
82
|
-
@logger.info "Cleaning up: remove #{victim}"
|
|
83
|
-
File.unlink(victim) if File.exist?(victim)
|
|
84
|
-
|
|
85
|
-
meta_victim = victim.sub(/\.tar\.gz\z/, '.json')
|
|
86
|
-
if File.exist?(meta_victim)
|
|
87
|
-
@logger.info "Cleaning up: remove #{meta_victim}"
|
|
88
|
-
File.unlink(meta_victim)
|
|
89
|
-
end
|
|
90
|
-
|
|
91
|
-
package_name = File.basename(victim, '.tar.gz')
|
|
92
|
-
if @cleanup_hook
|
|
93
|
-
@cleanup_hook.call(File.basename(app), package_name)
|
|
94
|
-
end
|
|
95
|
-
end
|
|
96
|
-
end
|
|
97
|
-
end
|
|
98
|
-
|
|
99
|
-
private
|
|
100
|
-
|
|
101
|
-
def main_loop
|
|
102
|
-
while order = @internal_queue.pop
|
|
103
|
-
break if order == :suicide
|
|
104
|
-
@pending_jobs.delete(order)
|
|
105
|
-
handle_order(*order)
|
|
106
|
-
end
|
|
107
|
-
end
|
|
108
|
-
|
|
109
|
-
def queueing_loop
|
|
110
|
-
while order = @external_queue.pop
|
|
111
|
-
break if order == :suicide
|
|
112
|
-
@pending_jobs << order
|
|
113
|
-
@internal_queue << order
|
|
114
|
-
end
|
|
115
|
-
end
|
|
116
|
-
|
|
117
|
-
def handle_order(app, package, before_hook = nil, callback = nil)
|
|
118
|
-
@current_job = [app, package]
|
|
119
|
-
@logger.info "fetching #{app}:#{package}"
|
|
120
|
-
|
|
121
|
-
if @config[:fetch_sleep]
|
|
122
|
-
wait = rand(@config[:fetch_sleep])
|
|
123
|
-
@logger.debug "Sleeping #{wait} before starting fetch"
|
|
124
|
-
sleep wait
|
|
125
|
-
end
|
|
126
|
-
|
|
127
|
-
# TODO: Limit apps by configuration
|
|
128
|
-
|
|
129
|
-
destination = File.join(@destination, app)
|
|
130
|
-
|
|
131
|
-
Dir.mkdir(destination) unless File.exist?(destination)
|
|
132
|
-
|
|
133
|
-
before_hook.call if before_hook
|
|
134
|
-
|
|
135
|
-
# TODO: before run hook for agent.update_tags!
|
|
136
|
-
Mamiya::Steps::Fetch.new(
|
|
137
|
-
application: app,
|
|
138
|
-
package: package,
|
|
139
|
-
destination: destination,
|
|
140
|
-
config: @config,
|
|
141
|
-
).run!
|
|
142
|
-
|
|
143
|
-
@current_job = nil
|
|
144
|
-
callback.call if callback
|
|
145
|
-
|
|
146
|
-
@logger.info "fetched #{app}:#{package}"
|
|
147
|
-
|
|
148
|
-
cleanup
|
|
149
|
-
|
|
150
|
-
rescue Mamiya::Storages::Abstract::AlreadyFetched => e
|
|
151
|
-
@logger.info "skipped #{app}:#{package} (already fetched)"
|
|
152
|
-
callback.call(e) if callback
|
|
153
|
-
rescue Exception => e
|
|
154
|
-
@logger.fatal "fetch failed (#{app}:#{package}): #{e.inspect}"
|
|
155
|
-
e.backtrace.each do |line|
|
|
156
|
-
@logger.fatal "\t#{line}"
|
|
157
|
-
end
|
|
158
|
-
|
|
159
|
-
callback.call(e) if callback
|
|
160
|
-
ensure
|
|
161
|
-
@current_job = nil
|
|
162
|
-
end
|
|
163
|
-
end
|
|
164
|
-
end
|
|
165
|
-
end
|
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
require 'mamiya/agent/handlers/abstract'
|
|
2
|
-
require 'mamiya/storages/abstract'
|
|
3
|
-
|
|
4
|
-
module Mamiya
|
|
5
|
-
class Agent
|
|
6
|
-
module Handlers
|
|
7
|
-
class Fetch < Abstract
|
|
8
|
-
FETCH_ACK_EVENT = 'mamiya:fetch-result:ack'
|
|
9
|
-
FETCH_START_EVENT = 'mamiya:fetch-result:start'
|
|
10
|
-
FETCH_SUCCESS_EVENT = 'mamiya:fetch-result:success'
|
|
11
|
-
FETCH_ERROR_EVENT = 'mamiya:fetch-result:error'
|
|
12
|
-
|
|
13
|
-
IGNORED_ERRORS = [
|
|
14
|
-
Mamiya::Storages::Abstract::AlreadyFetched.new(''),
|
|
15
|
-
].freeze
|
|
16
|
-
|
|
17
|
-
def run!
|
|
18
|
-
# XXX: Why don't use agent#trigger ...
|
|
19
|
-
agent.serf.event(FETCH_ACK_EVENT,
|
|
20
|
-
{
|
|
21
|
-
name: agent.serf.name,
|
|
22
|
-
application: payload['application'],
|
|
23
|
-
package: payload['package'],
|
|
24
|
-
pending: agent.fetcher.queue_size.succ,
|
|
25
|
-
}.to_json,
|
|
26
|
-
coalesce: false,
|
|
27
|
-
)
|
|
28
|
-
|
|
29
|
-
agent.fetcher.enqueue(
|
|
30
|
-
payload['application'], payload['package'],
|
|
31
|
-
before: proc {
|
|
32
|
-
agent.serf.event(FETCH_START_EVENT,
|
|
33
|
-
{
|
|
34
|
-
name: agent.serf.name,
|
|
35
|
-
application: payload['application'],
|
|
36
|
-
package: payload['package'],
|
|
37
|
-
pending: agent.fetcher.queue_size.succ,
|
|
38
|
-
}.to_json,
|
|
39
|
-
coalesce: false,
|
|
40
|
-
)
|
|
41
|
-
agent.update_tags!
|
|
42
|
-
}
|
|
43
|
-
) do |error|
|
|
44
|
-
if error && IGNORED_ERRORS.lazy.grep(error.class).none?
|
|
45
|
-
# FIXME: TODO: may exceed 256
|
|
46
|
-
begin
|
|
47
|
-
agent.serf.event(FETCH_ERROR_EVENT,
|
|
48
|
-
{
|
|
49
|
-
name: agent.serf.name,
|
|
50
|
-
application: payload['application'],
|
|
51
|
-
package: payload['package'],
|
|
52
|
-
error: error.class,
|
|
53
|
-
pending: agent.fetcher.queue_size,
|
|
54
|
-
}.to_json,
|
|
55
|
-
coalesce: false,
|
|
56
|
-
)
|
|
57
|
-
rescue Villein::Client::SerfError => e
|
|
58
|
-
agent.logger.error "error sending fetch error event: #{e.inspect}"
|
|
59
|
-
end
|
|
60
|
-
else
|
|
61
|
-
agent.serf.event(FETCH_SUCCESS_EVENT,
|
|
62
|
-
{
|
|
63
|
-
name: agent.serf.name,
|
|
64
|
-
application: payload['application'],
|
|
65
|
-
package: payload['package'],
|
|
66
|
-
pending: agent.fetcher.queue_size,
|
|
67
|
-
}.to_json,
|
|
68
|
-
coalesce: false,
|
|
69
|
-
)
|
|
70
|
-
end
|
|
71
|
-
|
|
72
|
-
agent.update_tags!
|
|
73
|
-
end
|
|
74
|
-
end
|
|
75
|
-
end
|
|
76
|
-
end
|
|
77
|
-
end
|
|
78
|
-
end
|
data/spec/agent/fetcher_spec.rb
DELETED
|
@@ -1,237 +0,0 @@
|
|
|
1
|
-
require 'spec_helper'
|
|
2
|
-
require 'pathname'
|
|
3
|
-
require 'tmpdir'
|
|
4
|
-
require 'fileutils'
|
|
5
|
-
|
|
6
|
-
require 'mamiya/agent/fetcher'
|
|
7
|
-
require 'mamiya/steps/fetch'
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
describe Mamiya::Agent::Fetcher do
|
|
11
|
-
let!(:tmpdir) { Dir.mktmpdir('mamiya-agent-fetcher-spec') }
|
|
12
|
-
after { FileUtils.remove_entry_secure(tmpdir) if File.exist?(tmpdir) }
|
|
13
|
-
|
|
14
|
-
let(:config) do
|
|
15
|
-
{packages_dir: tmpdir, keep_packages: 2}
|
|
16
|
-
end
|
|
17
|
-
|
|
18
|
-
subject(:fetcher) { described_class.new(config) }
|
|
19
|
-
|
|
20
|
-
describe "lifecycle" do
|
|
21
|
-
it "can start and stop" do
|
|
22
|
-
expect(fetcher.worker_thread).to be_nil
|
|
23
|
-
expect(fetcher).not_to be_running
|
|
24
|
-
|
|
25
|
-
fetcher.start!
|
|
26
|
-
|
|
27
|
-
expect(fetcher).to be_running
|
|
28
|
-
expect(fetcher.worker_thread).to be_a(Thread)
|
|
29
|
-
expect(fetcher.worker_thread).to be_alive
|
|
30
|
-
th = fetcher.worker_thread
|
|
31
|
-
|
|
32
|
-
fetcher.stop!
|
|
33
|
-
|
|
34
|
-
20.times { break unless th.alive?; sleep 0.1 }
|
|
35
|
-
expect(th).not_to be_alive
|
|
36
|
-
|
|
37
|
-
expect(fetcher.worker_thread).to be_nil
|
|
38
|
-
expect(fetcher).not_to be_running
|
|
39
|
-
end
|
|
40
|
-
|
|
41
|
-
it "can graceful stop"
|
|
42
|
-
end
|
|
43
|
-
|
|
44
|
-
describe "#cleanup" do
|
|
45
|
-
before do
|
|
46
|
-
path = Pathname.new(tmpdir)
|
|
47
|
-
|
|
48
|
-
path.join('a').mkdir
|
|
49
|
-
File.write path.join('a', "a.tar.gz"), "\n"
|
|
50
|
-
File.write path.join('a', "a.json"), "\n"
|
|
51
|
-
File.write path.join('a', "b.json"), "\n"
|
|
52
|
-
File.write path.join('a', "b.tar.gz"), "\n"
|
|
53
|
-
File.write path.join('a', "c.json"), "\n"
|
|
54
|
-
File.write path.join('a', "c.tar.gz"), "\n"
|
|
55
|
-
path.join('b').mkdir
|
|
56
|
-
File.write path.join('b', "a.tar.gz"), "\n"
|
|
57
|
-
File.write path.join('b', "a.json"), "\n"
|
|
58
|
-
|
|
59
|
-
path.join('c').mkdir
|
|
60
|
-
File.write path.join('c', "a.tar.gz"), "\n"
|
|
61
|
-
File.write path.join('c', "b.json"), "\n"
|
|
62
|
-
end
|
|
63
|
-
|
|
64
|
-
it "cleans up" do
|
|
65
|
-
called = []
|
|
66
|
-
fetcher.cleanup_hook = proc { |a,b| called << [a,b] }
|
|
67
|
-
|
|
68
|
-
fetcher.cleanup
|
|
69
|
-
|
|
70
|
-
path = Pathname.new(tmpdir)
|
|
71
|
-
existences = Hash[
|
|
72
|
-
[
|
|
73
|
-
path.join('a', 'a.tar.gz'),
|
|
74
|
-
path.join('a', 'a.json'),
|
|
75
|
-
path.join('a', 'b.tar.gz'),
|
|
76
|
-
path.join('a', 'b.json'),
|
|
77
|
-
path.join('a', 'c.tar.gz'),
|
|
78
|
-
path.join('a', 'c.json'),
|
|
79
|
-
].map { |file|
|
|
80
|
-
[file, file.exist?]
|
|
81
|
-
}
|
|
82
|
-
]
|
|
83
|
-
|
|
84
|
-
expect(called).to eq([['a', 'a']])
|
|
85
|
-
expect(existences).to eq(
|
|
86
|
-
path.join('a', 'a.tar.gz') => false,
|
|
87
|
-
path.join('a', 'a.json') => false,
|
|
88
|
-
path.join('a', 'b.tar.gz') => true,
|
|
89
|
-
path.join('a', 'b.json') => true,
|
|
90
|
-
path.join('a', 'c.tar.gz') => true,
|
|
91
|
-
path.join('a', 'c.json') => true,
|
|
92
|
-
)
|
|
93
|
-
end
|
|
94
|
-
end
|
|
95
|
-
|
|
96
|
-
describe "#pending_jobs" do
|
|
97
|
-
before do
|
|
98
|
-
step = double('fetch-step')
|
|
99
|
-
allow(step).to receive(:run!)
|
|
100
|
-
allow(Mamiya::Steps::Fetch).to receive(:new).with(
|
|
101
|
-
application: 'myapp',
|
|
102
|
-
package: 'package',
|
|
103
|
-
destination: File.join(tmpdir, 'myapp'),
|
|
104
|
-
config: config,
|
|
105
|
-
).and_return(step)
|
|
106
|
-
end
|
|
107
|
-
|
|
108
|
-
it "shows remaining jobs" do
|
|
109
|
-
fetcher.start!; fetcher.worker_thread.kill
|
|
110
|
-
|
|
111
|
-
expect {
|
|
112
|
-
fetcher.enqueue('myapp', 'package')
|
|
113
|
-
fetcher.stop!(:graceful)
|
|
114
|
-
}.to change { fetcher.pending_jobs } \
|
|
115
|
-
.from([]).to([['myapp', 'package', nil, nil]])
|
|
116
|
-
|
|
117
|
-
fetcher.start!; fetcher.stop!(:graceful)
|
|
118
|
-
|
|
119
|
-
expect(fetcher.pending_jobs).to be_empty
|
|
120
|
-
end
|
|
121
|
-
end
|
|
122
|
-
|
|
123
|
-
describe "mainloop" do
|
|
124
|
-
before do
|
|
125
|
-
allow(step).to receive(:run!)
|
|
126
|
-
allow(Mamiya::Steps::Fetch).to receive(:new).with(
|
|
127
|
-
application: 'myapp',
|
|
128
|
-
package: 'package',
|
|
129
|
-
destination: File.join(tmpdir, 'myapp'),
|
|
130
|
-
config: config,
|
|
131
|
-
).and_return(step)
|
|
132
|
-
|
|
133
|
-
fetcher.start!
|
|
134
|
-
end
|
|
135
|
-
|
|
136
|
-
let(:step) { double('fetch-step') }
|
|
137
|
-
|
|
138
|
-
it "starts fetch step for each order" do
|
|
139
|
-
flag = false
|
|
140
|
-
|
|
141
|
-
expect(step).to receive(:run!) do
|
|
142
|
-
flag = true
|
|
143
|
-
end
|
|
144
|
-
|
|
145
|
-
fetcher.enqueue('myapp', 'package')
|
|
146
|
-
fetcher.stop!(:graceful)
|
|
147
|
-
end
|
|
148
|
-
|
|
149
|
-
it "calls callback" do
|
|
150
|
-
received = true
|
|
151
|
-
|
|
152
|
-
fetcher.enqueue('myapp', 'package') do |succeeded|
|
|
153
|
-
expect(fetcher.working?).to be_false
|
|
154
|
-
received = succeeded
|
|
155
|
-
end
|
|
156
|
-
|
|
157
|
-
fetcher.stop!(:graceful)
|
|
158
|
-
|
|
159
|
-
expect(received).to be_nil
|
|
160
|
-
end
|
|
161
|
-
|
|
162
|
-
it "calls cleanup" do
|
|
163
|
-
expect(fetcher).to receive(:cleanup)
|
|
164
|
-
fetcher.enqueue('myapp', 'package')
|
|
165
|
-
fetcher.stop!(:graceful)
|
|
166
|
-
end
|
|
167
|
-
|
|
168
|
-
it "claims itself as working" do
|
|
169
|
-
expect(fetcher.working?).to be_false
|
|
170
|
-
expect(fetcher.current_job).to be_nil
|
|
171
|
-
|
|
172
|
-
received = false
|
|
173
|
-
fetcher.enqueue 'myapp', 'package', before: proc { |error|
|
|
174
|
-
expect(fetcher.working?).to be_true
|
|
175
|
-
expect(fetcher.current_job).to eq %w(myapp package)
|
|
176
|
-
received = true
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
fetcher.stop!(:graceful)
|
|
180
|
-
expect(received).to be_true
|
|
181
|
-
expect(fetcher.working?).to be_false
|
|
182
|
-
expect(fetcher.current_job).to be_nil
|
|
183
|
-
end
|
|
184
|
-
|
|
185
|
-
context "with config.fetch_sleep" do
|
|
186
|
-
it "calls sleep" do
|
|
187
|
-
config[:fetch_sleep] = 1
|
|
188
|
-
expect(fetcher).to receive(:sleep)
|
|
189
|
-
fetcher.enqueue 'myapp', 'package'
|
|
190
|
-
fetcher.stop!(:graceful)
|
|
191
|
-
end
|
|
192
|
-
end
|
|
193
|
-
|
|
194
|
-
context "with before hook" do
|
|
195
|
-
it "calls callback" do
|
|
196
|
-
run = false
|
|
197
|
-
received = false
|
|
198
|
-
|
|
199
|
-
allow(step).to receive(:run!) do
|
|
200
|
-
run = true
|
|
201
|
-
end
|
|
202
|
-
|
|
203
|
-
fetcher.enqueue('myapp', 'package', before: proc {
|
|
204
|
-
received = true
|
|
205
|
-
expect(run).to be_false
|
|
206
|
-
})
|
|
207
|
-
fetcher.stop!(:graceful)
|
|
208
|
-
|
|
209
|
-
expect(received).to be_true
|
|
210
|
-
end
|
|
211
|
-
end
|
|
212
|
-
|
|
213
|
-
context "when fetch step raised error" do
|
|
214
|
-
let(:exception) { Exception.new("he he...") }
|
|
215
|
-
|
|
216
|
-
before do
|
|
217
|
-
allow(step).to receive(:run!).and_raise(exception)
|
|
218
|
-
end
|
|
219
|
-
|
|
220
|
-
it "calls callback with error" do
|
|
221
|
-
received = nil
|
|
222
|
-
|
|
223
|
-
fetcher.enqueue('myapp', 'package') do |error|
|
|
224
|
-
received = error
|
|
225
|
-
end
|
|
226
|
-
|
|
227
|
-
fetcher.stop!(:graceful)
|
|
228
|
-
|
|
229
|
-
expect(received).to eq exception
|
|
230
|
-
end
|
|
231
|
-
end
|
|
232
|
-
|
|
233
|
-
after do
|
|
234
|
-
fetcher.stop!
|
|
235
|
-
end
|
|
236
|
-
end
|
|
237
|
-
end
|