mamiya 0.0.1.alpha19 → 0.0.1.alpha20
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +18 -0
- data/lib/mamiya/agent.rb +25 -30
- data/lib/mamiya/agent/actions.rb +9 -6
- data/lib/mamiya/agent/handlers/task.rb +13 -0
- data/lib/mamiya/agent/task_queue.rb +151 -0
- data/lib/mamiya/agent/tasks/abstract.rb +61 -0
- data/lib/mamiya/agent/tasks/clean.rb +44 -0
- data/lib/mamiya/agent/tasks/fetch.rb +60 -0
- data/lib/mamiya/agent/tasks/notifyable.rb +30 -0
- data/lib/mamiya/cli/client.rb +1 -1
- data/lib/mamiya/master.rb +4 -9
- data/lib/mamiya/master/agent_monitor_handlers.rb +42 -25
- data/lib/mamiya/master/web.rb +22 -7
- data/lib/mamiya/version.rb +1 -1
- data/mamiya.gemspec +1 -1
- data/spec/agent/actions_spec.rb +2 -8
- data/spec/agent/handlers/task_spec.rb +39 -0
- data/spec/agent/task_queue_spec.rb +246 -0
- data/spec/agent/tasks/abstract_spec.rb +58 -0
- data/spec/agent/tasks/clean_spec.rb +72 -0
- data/spec/agent/tasks/fetch_spec.rb +56 -0
- data/spec/agent/tasks/notifyable_spec.rb +37 -0
- data/spec/agent_spec.rb +33 -54
- data/spec/master/agent_monitor_spec.rb +155 -69
- data/spec/master/web_spec.rb +340 -1
- data/spec/master_spec.rb +0 -21
- metadata +22 -10
- data/lib/mamiya/agent/fetcher.rb +0 -165
- data/lib/mamiya/agent/handlers/fetch.rb +0 -78
- data/spec/agent/fetcher_spec.rb +0 -237
- data/spec/agent/handlers/fetch_spec.rb +0 -127
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e7ed13e8778df9ab9ce99e885013120a638a2bb6
|
4
|
+
data.tar.gz: 4d85eca80055453cc3aac803a6dc5c0d232d2d38
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: efef0fb46485a44362ae151ad79eaf6baec6ade4877bf02588b802fe553e1b2115408fda4fbf5628c77ba2d141391bfc1742341d43bfea3d76b6a2a9ee51fa58
|
7
|
+
data.tar.gz: c8dc1424d06eda65ce392f5d8e3acd3fa7f667c0d29c1bb6993a3cad27e099c5da2ffa97b3b573fdab045e957c060edbabdc200d1cbb1ae60d3297404f669f2f
|
data/README.md
CHANGED
@@ -34,6 +34,24 @@ Or install it yourself as:
|
|
34
34
|
|
35
35
|
TODO: Write usage instructions here
|
36
36
|
|
37
|
+
## Upgrade Notes
|
38
|
+
|
39
|
+
### 0.0.1.alpha20
|
40
|
+
|
41
|
+
_tl;dr_ Don't mix alpha19 and alpha20.
|
42
|
+
|
43
|
+
#### Internal component for distribution has been replaced completely
|
44
|
+
|
45
|
+
alpha20 introduces new class `TaskQueue` and removes `Fetcher`. This changes way to distribute packages -- including internal serf events, job tracking that Distribution API does, etc.
|
46
|
+
So basically there's no compatibility for distribution, between alpha19 and alpha20 and later. Distribute task from alpha20 doesn't effect to alpha19, and vice versa.
|
47
|
+
|
48
|
+
Good new: There's no change in Distribution API.
|
49
|
+
|
50
|
+
#### Agent status has changed
|
51
|
+
|
52
|
+
- Due to removal of `Fetcher`, alpha20 removes `.fetcher` object from agent status.
|
53
|
+
- Added `.queues`, represents task queues that managed by `TaskQueue` class.
|
54
|
+
|
37
55
|
## Contributing
|
38
56
|
|
39
57
|
1. Fork it ( http://github.com/sorah/mamiya/fork )
|
data/lib/mamiya/agent.rb
CHANGED
@@ -5,15 +5,17 @@ require 'mamiya/version'
|
|
5
5
|
require 'mamiya/logger'
|
6
6
|
|
7
7
|
require 'mamiya/steps/fetch'
|
8
|
-
require 'mamiya/agent/
|
8
|
+
require 'mamiya/agent/task_queue'
|
9
9
|
|
10
|
-
require 'mamiya/agent/
|
10
|
+
require 'mamiya/agent/tasks/fetch'
|
11
|
+
require 'mamiya/agent/tasks/clean'
|
12
|
+
|
13
|
+
require 'mamiya/agent/handlers/task'
|
11
14
|
require 'mamiya/agent/actions'
|
12
15
|
|
13
16
|
module Mamiya
|
14
17
|
class Agent
|
15
18
|
include Mamiya::Agent::Actions
|
16
|
-
FETCH_REMOVE_EVENT = 'mamiya:fetch-result:remove'
|
17
19
|
|
18
20
|
def initialize(config, logger: Mamiya::Logger.new, events_only: nil)
|
19
21
|
@config = config
|
@@ -27,10 +29,11 @@ module Mamiya
|
|
27
29
|
|
28
30
|
attr_reader :config, :serf, :logger
|
29
31
|
|
30
|
-
def
|
31
|
-
@
|
32
|
-
|
33
|
-
|
32
|
+
def task_queue
|
33
|
+
@task_queue ||= Mamiya::Agent::TaskQueue.new(self, logger: logger, task_classes: [
|
34
|
+
Mamiya::Agent::Tasks::Fetch,
|
35
|
+
Mamiya::Agent::Tasks::Clean,
|
36
|
+
])
|
34
37
|
end
|
35
38
|
|
36
39
|
def run!
|
@@ -53,19 +56,18 @@ module Mamiya
|
|
53
56
|
|
54
57
|
def start
|
55
58
|
serf_start
|
56
|
-
|
59
|
+
task_queue_start
|
57
60
|
end
|
58
61
|
|
59
62
|
def terminate
|
60
63
|
serf.stop!
|
61
|
-
|
64
|
+
task_queue.stop!
|
62
65
|
ensure
|
63
66
|
@terminate = false
|
64
67
|
end
|
65
68
|
|
66
69
|
def update_tags!
|
67
70
|
serf.tags['mamiya'] = ','.tap do |status|
|
68
|
-
status.concat('fetching,') if fetcher.working?
|
69
71
|
status.concat('ready,') if status == ','
|
70
72
|
end
|
71
73
|
|
@@ -80,11 +82,7 @@ module Mamiya
|
|
80
82
|
s[:name] = serf.name
|
81
83
|
s[:version] = Mamiya::VERSION
|
82
84
|
|
83
|
-
s[:
|
84
|
-
fetching: fetcher.current_job,
|
85
|
-
pending: fetcher.queue_size,
|
86
|
-
pending_jobs: fetcher.pending_jobs.map{ |_| _[0,2] },
|
87
|
-
}
|
85
|
+
s[:queues] = task_queue.status
|
88
86
|
|
89
87
|
s[:packages] = self.existing_packages
|
90
88
|
end
|
@@ -123,13 +121,14 @@ module Mamiya
|
|
123
121
|
name = "mamiya:#{type}"
|
124
122
|
name << ":#{action}" if action
|
125
123
|
|
126
|
-
serf.event(name, payload.to_json, coalesce: coalesce)
|
124
|
+
serf.event(name, payload.merge(name: self.serf.name).to_json, coalesce: coalesce)
|
127
125
|
end
|
128
126
|
|
129
127
|
private
|
130
128
|
|
131
129
|
def init_serf
|
132
130
|
agent_config = (config[:serf] && config[:serf][:agent]) || {}
|
131
|
+
# agent_config.merge!(log: $stderr)
|
133
132
|
Villein::Agent.new(**agent_config).tap do |serf|
|
134
133
|
serf.on_user_event do |event|
|
135
134
|
user_event_handler(event)
|
@@ -151,10 +150,9 @@ module Mamiya
|
|
151
150
|
logger.debug "Serf became ready"
|
152
151
|
end
|
153
152
|
|
154
|
-
def
|
155
|
-
logger.debug "Starting
|
156
|
-
|
157
|
-
fetcher.start!
|
153
|
+
def task_queue_start
|
154
|
+
logger.debug "Starting task_queue"
|
155
|
+
task_queue.start!
|
158
156
|
end
|
159
157
|
|
160
158
|
def user_event_handler(event)
|
@@ -174,9 +172,14 @@ module Mamiya
|
|
174
172
|
|
175
173
|
if Handlers.const_defined?(class_name)
|
176
174
|
handler = Handlers.const_get(class_name).new(self, event)
|
177
|
-
|
175
|
+
meth = action || :run!
|
176
|
+
if handler.respond_to?(meth)
|
177
|
+
handler.send meth
|
178
|
+
else
|
179
|
+
logger.debug "Handler #{class_name} doesn't respond to #{meth}, skipping"
|
180
|
+
end
|
178
181
|
else
|
179
|
-
logger.warn("Discarded event[#{event.user_event}] because we don't handle it")
|
182
|
+
#logger.warn("Discarded event[#{event.user_event}] because we don't handle it")
|
180
183
|
end
|
181
184
|
rescue Exception => e
|
182
185
|
logger.fatal("Error during handling event: #{e.inspect}")
|
@@ -188,13 +191,5 @@ module Mamiya
|
|
188
191
|
rescue JSON::ParserError
|
189
192
|
logger.warn("Discarded event[#{event.user_event}] with invalid payload (unable to parse as json)")
|
190
193
|
end
|
191
|
-
|
192
|
-
def cleanup_handler(app, package)
|
193
|
-
trigger('fetch-result', action: 'remove', coalesce: false,
|
194
|
-
name: self.serf.name,
|
195
|
-
application: app,
|
196
|
-
package: package,
|
197
|
-
)
|
198
|
-
end
|
199
194
|
end
|
200
195
|
end
|
data/lib/mamiya/agent/actions.rb
CHANGED
@@ -1,14 +1,17 @@
|
|
1
1
|
module Mamiya
|
2
2
|
class Agent
|
3
3
|
module Actions
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
coalesce: false
|
4
|
+
def order_task(task, coalesce: false, **payload)
|
5
|
+
trigger('task',
|
6
|
+
coalesce: coalesce,
|
7
|
+
task: task,
|
8
|
+
**payload,
|
10
9
|
)
|
11
10
|
end
|
11
|
+
|
12
|
+
def distribute(application, package)
|
13
|
+
order_task('fetch', app: application, pkg: package)
|
14
|
+
end
|
12
15
|
end
|
13
16
|
end
|
14
17
|
end
|
@@ -0,0 +1,151 @@
|
|
1
|
+
require 'mamiya/agent'
|
2
|
+
require 'thread'
|
3
|
+
|
4
|
+
# XXX: TODO: have to refactor
|
5
|
+
module Mamiya
|
6
|
+
class Agent
|
7
|
+
class TaskQueue
|
8
|
+
GRACEFUL_TIMEOUT = 30
|
9
|
+
JOIN_TIMEOUT = 30
|
10
|
+
|
11
|
+
def initialize(agent, task_classes: [], logger: Mamiya::Logger.new)
|
12
|
+
@agent = agent
|
13
|
+
@task_classes = task_classes
|
14
|
+
@external_queue = Queue.new
|
15
|
+
@queues = {}
|
16
|
+
@worker_threads = nil
|
17
|
+
@statuses = nil
|
18
|
+
@queueing_thread = nil
|
19
|
+
@lifecycle_mutex = Mutex.new
|
20
|
+
@terminate = false
|
21
|
+
@logger = logger['task_queue']
|
22
|
+
end
|
23
|
+
|
24
|
+
attr_reader :worker_threads, :task_classes, :agent
|
25
|
+
|
26
|
+
def start!
|
27
|
+
@lifecycle_mutex.synchronize do
|
28
|
+
return if running?
|
29
|
+
|
30
|
+
worker_threads = {}
|
31
|
+
queues = {}
|
32
|
+
statuses = {}
|
33
|
+
|
34
|
+
@task_classes.each { |klass|
|
35
|
+
name = klass.identifier.to_sym
|
36
|
+
queue = queues[name] = Queue.new
|
37
|
+
statuses[name] = {pending: [], lock: Mutex.new}
|
38
|
+
th = worker_threads[name] = Thread.new(
|
39
|
+
klass, queue,
|
40
|
+
statuses[name],
|
41
|
+
&method(:worker_loop)
|
42
|
+
)
|
43
|
+
th.abort_on_exception = true
|
44
|
+
}
|
45
|
+
|
46
|
+
@terminate = false
|
47
|
+
@statuses = statuses
|
48
|
+
@queues = queues
|
49
|
+
exqueue = @external_queue = Queue.new
|
50
|
+
@queueing_thread = Thread.new(queues, exqueue, statuses, &method(:queueing_loop))
|
51
|
+
@queueing_thread.abort_on_exception = true
|
52
|
+
@worker_threads = worker_threads
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def stop!(graceful = false)
|
57
|
+
@lifecycle_mutex.synchronize do
|
58
|
+
return unless running?
|
59
|
+
@terminate = true
|
60
|
+
@queueing_thread.kill if @queueing_thread.alive?
|
61
|
+
if graceful
|
62
|
+
@worker_threads.each do |th|
|
63
|
+
th.join(GRACEFUL_TIMEOUT)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
@worker_threads.each do |name, th|
|
67
|
+
next unless th.alive?
|
68
|
+
th.kill
|
69
|
+
th.join(JOIN_TIMEOUT)
|
70
|
+
end
|
71
|
+
@queues = nil
|
72
|
+
@worker_threads = nil
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def running?
|
77
|
+
@worker_threads && !@terminate
|
78
|
+
end
|
79
|
+
|
80
|
+
def working?
|
81
|
+
running? && status.any? { |name, stat| stat[:working] }
|
82
|
+
end
|
83
|
+
|
84
|
+
def enqueue(task_name, task)
|
85
|
+
raise Stopped, 'this task queue is stopped' unless running?
|
86
|
+
|
87
|
+
@logger.debug "enqueue #{task_name.inspect}, #{task.inspect}, #{@external_queue.inspect}"
|
88
|
+
@external_queue << [task_name, task]
|
89
|
+
self
|
90
|
+
end
|
91
|
+
|
92
|
+
def status
|
93
|
+
return nil unless running?
|
94
|
+
Hash[@statuses.map do |name, st|
|
95
|
+
[name, {
|
96
|
+
queue: st[:pending].dup,
|
97
|
+
working: st[:working] ? st[:working].dup : nil,
|
98
|
+
}]
|
99
|
+
end]
|
100
|
+
end
|
101
|
+
|
102
|
+
private
|
103
|
+
|
104
|
+
def worker_loop(task_class, queue, status)
|
105
|
+
while task = queue.pop
|
106
|
+
break if @terminate
|
107
|
+
begin
|
108
|
+
status[:lock].synchronize do
|
109
|
+
status[:pending].delete task
|
110
|
+
status[:working] = task
|
111
|
+
end
|
112
|
+
task_class.new(self, task, agent: @agent, logger: @logger).execute
|
113
|
+
rescue Exception => e
|
114
|
+
@logger.error "#{task_class} worker catched error: #{e}\n\t#{e.backtrace.join("\n\t")}"
|
115
|
+
ensure
|
116
|
+
status[:lock].synchronize do
|
117
|
+
status[:working] = nil
|
118
|
+
end
|
119
|
+
end
|
120
|
+
break if @terminate
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
def queueing_loop(queues, external_queue, statuses)
|
125
|
+
@logger.debug "queueing thread started #{external_queue.inspect}"
|
126
|
+
while _ = external_queue.pop
|
127
|
+
task_name, task = _
|
128
|
+
|
129
|
+
break if @terminate
|
130
|
+
|
131
|
+
queue = queues[task_name]
|
132
|
+
unless queue
|
133
|
+
@logger.debug "Ignoring task #{task_name} (queue not defined)"
|
134
|
+
next
|
135
|
+
end
|
136
|
+
|
137
|
+
statuses[task_name][:lock].synchronize do
|
138
|
+
statuses[task_name][:pending] << task
|
139
|
+
end
|
140
|
+
@logger.info "Queueing task #{task_name}: #{task.inspect}"
|
141
|
+
queue << task
|
142
|
+
break if @terminate
|
143
|
+
end
|
144
|
+
@logger.debug "queueing thread finish"
|
145
|
+
rescue Exception => e
|
146
|
+
@logger.error "queueing thread error #{e.inspect}\n\t#{e.backtrace.join("\n\t")}"
|
147
|
+
raise e
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
require 'mamiya/logger'
|
2
|
+
|
3
|
+
module Mamiya
|
4
|
+
class Agent
|
5
|
+
module Tasks
|
6
|
+
class Abstract
|
7
|
+
def initialize(task_queue, task, agent: nil, logger: Mamiya::Logger.new, raise_error: false)
|
8
|
+
@agent = agent
|
9
|
+
@task_queue = task_queue
|
10
|
+
@task = task.merge('task' => self.class.identifier)
|
11
|
+
@error = nil
|
12
|
+
@raise_error = raise_error
|
13
|
+
@logger = logger["#{self.class.identifier}:#{self.task_id}"]
|
14
|
+
end
|
15
|
+
|
16
|
+
def self.identifier
|
17
|
+
self.name.split(/::/).last.gsub(/(.)([A-Z])/, '\1_\2').downcase
|
18
|
+
end
|
19
|
+
|
20
|
+
attr_reader :task, :error, :logger, :agent, :task_queue
|
21
|
+
|
22
|
+
def raise_error?
|
23
|
+
!!@raise_error
|
24
|
+
end
|
25
|
+
|
26
|
+
def task_id
|
27
|
+
task['id'] || "0x#{self.__id__.to_s(16)}"
|
28
|
+
end
|
29
|
+
|
30
|
+
def execute
|
31
|
+
before
|
32
|
+
run
|
33
|
+
rescue Exception => error
|
34
|
+
@error = error
|
35
|
+
raise if raise_error?
|
36
|
+
errored
|
37
|
+
ensure
|
38
|
+
after
|
39
|
+
end
|
40
|
+
|
41
|
+
def before
|
42
|
+
end
|
43
|
+
|
44
|
+
def run
|
45
|
+
end
|
46
|
+
|
47
|
+
def after
|
48
|
+
end
|
49
|
+
|
50
|
+
def errored
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
|
55
|
+
def config
|
56
|
+
@config ||= agent ? agent.config : nil
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
require 'mamiya/agent/tasks/abstract'
|
2
|
+
|
3
|
+
module Mamiya
|
4
|
+
class Agent
|
5
|
+
module Tasks
|
6
|
+
class Clean < Abstract
|
7
|
+
|
8
|
+
def run
|
9
|
+
victims.each do |app, victim|
|
10
|
+
@logger.info "Cleaning up: remove #{victim}"
|
11
|
+
File.unlink(victim) if File.exist?(victim)
|
12
|
+
|
13
|
+
meta_victim = victim.sub(/\.tar\.gz\z/, '.json')
|
14
|
+
if File.exist?(meta_victim)
|
15
|
+
@logger.info "Cleaning up: remove #{meta_victim}"
|
16
|
+
File.unlink(meta_victim)
|
17
|
+
end
|
18
|
+
|
19
|
+
package_name = File.basename(victim, '.tar.gz')
|
20
|
+
|
21
|
+
# XXX: depends on FS structure
|
22
|
+
agent.trigger('pkg', action: 'remove',
|
23
|
+
application: app,
|
24
|
+
package: package_name,
|
25
|
+
coalesce: false,
|
26
|
+
)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def victims
|
31
|
+
Dir[File.join(config[:packages_dir], '*')].flat_map do |app|
|
32
|
+
packages = Dir[File.join(app, "*.tar.gz")].
|
33
|
+
sort_by { |_| [File.mtime(_), _] }
|
34
|
+
|
35
|
+
packages[0...-(config[:keep_packages])].map do |victim|
|
36
|
+
[File.basename(app), victim]
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|