pwrake 0.9.9.2 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/CHANGES_V2.md +90 -0
- data/{LICENSE.txt → MIT-LICENSE} +2 -3
- data/README +12 -0
- data/README.md +75 -52
- data/bin/gfwhere-pipe +23 -12
- data/bin/pwrake +22 -29
- data/bin/pwrake_branch +24 -0
- data/lib/pwrake/branch.rb +22 -0
- data/lib/pwrake/branch/branch.rb +213 -0
- data/lib/pwrake/branch/branch_application.rb +53 -0
- data/lib/pwrake/branch/fiber_queue.rb +36 -0
- data/lib/pwrake/branch/file_utils.rb +101 -0
- data/lib/pwrake/branch/shell.rb +231 -0
- data/lib/pwrake/{profiler.rb → branch/shell_profiler.rb} +28 -27
- data/lib/pwrake/branch/worker_communicator.rb +104 -0
- data/lib/pwrake/{gfarm_feature.rb → gfarm/gfarm_path.rb} +2 -100
- data/lib/pwrake/gfarm/gfarm_postprocess.rb +53 -0
- data/lib/pwrake/iomux/channel.rb +70 -0
- data/lib/pwrake/iomux/handler.rb +124 -0
- data/lib/pwrake/iomux/handler_set.rb +35 -0
- data/lib/pwrake/iomux/runner.rb +62 -0
- data/lib/pwrake/logger.rb +3 -150
- data/lib/pwrake/master.rb +30 -137
- data/lib/pwrake/master/fiber_pool.rb +69 -0
- data/lib/pwrake/master/idle_cores.rb +30 -0
- data/lib/pwrake/master/master.rb +345 -0
- data/lib/pwrake/master/master_application.rb +150 -0
- data/lib/pwrake/master/postprocess.rb +16 -0
- data/lib/pwrake/{graphviz.rb → misc/graphviz.rb} +0 -0
- data/lib/pwrake/{mcgp.rb → misc/mcgp.rb} +63 -42
- data/lib/pwrake/option/host_map.rb +158 -0
- data/lib/pwrake/option/option.rb +357 -0
- data/lib/pwrake/option/option_filesystem.rb +112 -0
- data/lib/pwrake/queue/locality_aware_queue.rb +158 -0
- data/lib/pwrake/queue/no_action_queue.rb +67 -0
- data/lib/pwrake/queue/queue_array.rb +366 -0
- data/lib/pwrake/queue/task_queue.rb +164 -0
- data/lib/pwrake/report.rb +1 -0
- data/lib/pwrake/report/parallelism.rb +9 -3
- data/lib/pwrake/report/report.rb +50 -103
- data/lib/pwrake/report/task_stat.rb +83 -0
- data/lib/pwrake/task/task_algorithm.rb +107 -0
- data/lib/pwrake/task/task_manager.rb +32 -0
- data/lib/pwrake/task/task_property.rb +98 -0
- data/lib/pwrake/task/task_rank.rb +48 -0
- data/lib/pwrake/task/task_wrapper.rb +296 -0
- data/lib/pwrake/version.rb +1 -1
- data/lib/pwrake/worker/executor.rb +169 -0
- data/lib/pwrake/worker/gfarm_directory.rb +90 -0
- data/lib/pwrake/worker/invoker.rb +199 -0
- data/lib/pwrake/worker/load.rb +14 -0
- data/lib/pwrake/worker/log_executor.rb +73 -0
- data/lib/pwrake/worker/shared_directory.rb +74 -0
- data/lib/pwrake/worker/worker_main.rb +14 -0
- data/lib/pwrake/worker/writer.rb +59 -0
- data/setup.rb +1212 -1502
- data/spec/003/Rakefile +2 -2
- data/spec/008/Rakefile +2 -1
- data/spec/009/Rakefile +1 -1
- data/spec/009/pwrake_conf.yaml +1 -3
- data/spec/hosts +0 -2
- data/spec/pwrake_spec.rb +9 -8
- metadata +50 -21
- data/lib/pwrake.rb +0 -19
- data/lib/pwrake/application.rb +0 -232
- data/lib/pwrake/counter.rb +0 -54
- data/lib/pwrake/file_utils.rb +0 -98
- data/lib/pwrake/gfwhere_pool.rb +0 -109
- data/lib/pwrake/host_list.rb +0 -88
- data/lib/pwrake/locality_aware_queue.rb +0 -413
- data/lib/pwrake/option.rb +0 -400
- data/lib/pwrake/rake_modify.rb +0 -14
- data/lib/pwrake/shell.rb +0 -186
- data/lib/pwrake/task_algorithm.rb +0 -475
- data/lib/pwrake/task_queue.rb +0 -633
- data/lib/pwrake/timer.rb +0 -22
@@ -0,0 +1,69 @@
|
|
1
|
+
module Pwrake
|
2
|
+
|
3
|
+
class FiberPool
|
4
|
+
|
5
|
+
def initialize(max_fiber=2,&block)
|
6
|
+
@new_fiber_block = block
|
7
|
+
@max_fiber = max_fiber
|
8
|
+
@count = 0
|
9
|
+
@fibers = []
|
10
|
+
@idle_fiber = []
|
11
|
+
@q = []
|
12
|
+
@new_fiber_start_time = Time.now-10
|
13
|
+
end
|
14
|
+
|
15
|
+
def enq(x)
|
16
|
+
@q.push(x)
|
17
|
+
@count += 1
|
18
|
+
if @idle_fiber.empty? and @fibers.size < @max_fiber and
|
19
|
+
Time.now - @new_fiber_start_time > 0.1
|
20
|
+
@idle_fiber << new_fiber
|
21
|
+
end
|
22
|
+
f = @idle_fiber.shift
|
23
|
+
f.resume if f
|
24
|
+
@finished
|
25
|
+
end
|
26
|
+
|
27
|
+
def deq
|
28
|
+
while @q.empty?
|
29
|
+
return nil if @finished
|
30
|
+
@idle_fiber.push(Fiber.current)
|
31
|
+
Fiber.yield
|
32
|
+
end
|
33
|
+
@q.shift
|
34
|
+
end
|
35
|
+
|
36
|
+
def count_down
|
37
|
+
@count -= 1
|
38
|
+
end
|
39
|
+
|
40
|
+
def empty?
|
41
|
+
@count == 0
|
42
|
+
end
|
43
|
+
|
44
|
+
def finish
|
45
|
+
@finished = true
|
46
|
+
run
|
47
|
+
while f = @fibers.shift
|
48
|
+
if f.alive?
|
49
|
+
raise RuntimeError,"FiberPool#finish: fiber is still alive."
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def run
|
55
|
+
cond = !@idle_fiber.empty?
|
56
|
+
while f = @idle_fiber.shift
|
57
|
+
f.resume
|
58
|
+
end
|
59
|
+
cond
|
60
|
+
end
|
61
|
+
|
62
|
+
def new_fiber
|
63
|
+
@fibers.push(fb = @new_fiber_block.call(self))
|
64
|
+
fb
|
65
|
+
end
|
66
|
+
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
@@ -0,0 +1,30 @@
|
|
1
|
+
module Pwrake
|
2
|
+
|
3
|
+
class IdleCores < Hash
|
4
|
+
|
5
|
+
def increase(k,n)
|
6
|
+
if x = self[k]
|
7
|
+
n += x
|
8
|
+
end
|
9
|
+
self[k] = n
|
10
|
+
end
|
11
|
+
|
12
|
+
def decrease(k,n)
|
13
|
+
x = (self[k]||0) - n
|
14
|
+
if x == 0
|
15
|
+
delete(k)
|
16
|
+
elsif x < 0
|
17
|
+
raise "# of cores must be non-negative"
|
18
|
+
else
|
19
|
+
self[k] = x
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def max
|
24
|
+
x = 0
|
25
|
+
each{|k,v| x = v if v > x}
|
26
|
+
x
|
27
|
+
end
|
28
|
+
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,345 @@
|
|
1
|
+
module Pwrake
|
2
|
+
|
3
|
+
class Master
|
4
|
+
|
5
|
+
def initialize
|
6
|
+
@runner = Runner.new
|
7
|
+
@hostid_by_taskname = {}
|
8
|
+
@option = Option.new
|
9
|
+
@hdl_set = HandlerSet.new
|
10
|
+
@channel_by_hostid = {}
|
11
|
+
@channels = []
|
12
|
+
@hosts = {}
|
13
|
+
init_logger
|
14
|
+
end
|
15
|
+
|
16
|
+
attr_reader :task_queue
|
17
|
+
attr_reader :option
|
18
|
+
attr_reader :logger
|
19
|
+
|
20
|
+
def init_logger
|
21
|
+
if logdir = @option['LOG_DIR']
|
22
|
+
::FileUtils.mkdir_p(logdir)
|
23
|
+
logfile = File.join(logdir,@option['LOG_FILE'])
|
24
|
+
@logger = Logger.new(logfile)
|
25
|
+
else
|
26
|
+
if @option['DEBUG']
|
27
|
+
@logger = Logger.new($stderr)
|
28
|
+
else
|
29
|
+
@logger = Logger.new(File::NULL)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
if @option['DEBUG']
|
34
|
+
@logger.level = Logger::DEBUG
|
35
|
+
else
|
36
|
+
@logger.level = Logger::INFO
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def init(hosts=nil)
|
41
|
+
@option.init
|
42
|
+
TaskWrapper.init_task_logger(@option)
|
43
|
+
end
|
44
|
+
|
45
|
+
def setup_branch_handler(sub_host)
|
46
|
+
if sub_host == "localhost" && /^(n|f)/i !~ ENV['T']
|
47
|
+
hdl = Handler.new(@runner) do |w0,w1,r2|
|
48
|
+
@thread = Thread.new(r2,w0,@option) do |r,w,o|
|
49
|
+
Rake.application.run_branch_in_thread(r,w,o)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
else
|
53
|
+
hdl = Handler.new(@runner) do |w0,w1,r2|
|
54
|
+
dir = File.absolute_path(File.dirname($PROGRAM_NAME))
|
55
|
+
#args = Shellwords.shelljoin(@args)
|
56
|
+
cmd = "ssh -x -T -q #{sub_host} '" +
|
57
|
+
"cd \"#{Dir.pwd}\";"+
|
58
|
+
"PATH=#{dir}:${PATH} exec pwrake_branch'"
|
59
|
+
Log.debug("BranchCommunicator cmd=#{cmd}")
|
60
|
+
#$stderr.puts "BranchCommunicator cmd=#{cmd}"
|
61
|
+
spawn(cmd,:pgroup=>true,:out=>w0,:err=>w1,:in=>r2)
|
62
|
+
w0.close
|
63
|
+
w1.close
|
64
|
+
r2.close
|
65
|
+
end
|
66
|
+
Marshal.dump(@option,hdl.iow)
|
67
|
+
hdl.iow.flush
|
68
|
+
s = hdl.ior.gets
|
69
|
+
if !s or s.chomp != "pwrake_branch start"
|
70
|
+
raise RuntimeError,"pwrake_branch start failed: receive #{s.inspect}"
|
71
|
+
end
|
72
|
+
end
|
73
|
+
hdl.host = sub_host
|
74
|
+
return hdl
|
75
|
+
end
|
76
|
+
|
77
|
+
def signal_trap(sig)
|
78
|
+
case @killed
|
79
|
+
when 0
|
80
|
+
# log writing failed. can't be called from trap context
|
81
|
+
if Rake.application.options.debug
|
82
|
+
$stderr.puts "\nSignal trapped. (sig=#{sig} pid=#{Process.pid}"+
|
83
|
+
" thread=#{Thread.current} ##{@killed})"
|
84
|
+
$stderr.puts caller
|
85
|
+
else
|
86
|
+
$stderr.puts "\nSignal trapped. (sig=#{sig} pid=#{Process.pid}"+
|
87
|
+
" ##{@killed})"
|
88
|
+
end
|
89
|
+
$stderr.puts "Exiting..."
|
90
|
+
@no_more_run = true
|
91
|
+
@failed = true
|
92
|
+
@hdl_set.kill(sig)
|
93
|
+
when 1
|
94
|
+
$stderr.puts "\nOnce more Ctrl-C (SIGINT) for exit."
|
95
|
+
else
|
96
|
+
Kernel.exit(false) # must wait for nomral exit
|
97
|
+
end
|
98
|
+
@killed += 1
|
99
|
+
end
|
100
|
+
|
101
|
+
def setup_branches
|
102
|
+
sum_ncore = 0
|
103
|
+
|
104
|
+
@option.host_map.each do |sub_host, wk_hosts|
|
105
|
+
@hdl_set << hdl = setup_branch_handler(sub_host)
|
106
|
+
@channels << chan = Channel.new(hdl)
|
107
|
+
chan.puts "host_list_begin"
|
108
|
+
wk_hosts.each do |host_info|
|
109
|
+
name = host_info.name
|
110
|
+
ncore = host_info.ncore
|
111
|
+
host_id = host_info.id
|
112
|
+
Log.debug "connecting #{name} ncore=#{ncore} id=#{host_id}"
|
113
|
+
chan.puts "host:#{host_id} #{name} #{ncore}"
|
114
|
+
@channel_by_hostid[host_id] = chan
|
115
|
+
@hosts[host_id] = name
|
116
|
+
end
|
117
|
+
chan.puts "host_list_end"
|
118
|
+
|
119
|
+
while s = chan.gets
|
120
|
+
case s
|
121
|
+
when /^ncore:done$/
|
122
|
+
break
|
123
|
+
when /^ncore:(\d+):(\d+)$/
|
124
|
+
id, ncore = $1.to_i, $2.to_i
|
125
|
+
Log.debug "worker_id=#{id} ncore=#{ncore}"
|
126
|
+
@option.host_map.by_id[id].set_ncore(ncore)
|
127
|
+
sum_ncore += ncore
|
128
|
+
when /^exited$/
|
129
|
+
raise RuntimeError,"Unexpected branch exit"
|
130
|
+
else
|
131
|
+
msg = "#{hdl.host}:#{s.inspect}"
|
132
|
+
raise RuntimeError,"invalid return: #{msg}"
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
Log.info "num_cores=#{sum_ncore}"
|
138
|
+
@hosts.each do |id,host|
|
139
|
+
Log.info "#{host} id=#{id} ncore=#{
|
140
|
+
@option.host_map.by_id[id].idle_cores}"
|
141
|
+
end
|
142
|
+
queue_class = Pwrake.const_get(@option.queue_class)
|
143
|
+
@task_queue = queue_class.new(@option.host_map)
|
144
|
+
|
145
|
+
@branch_setup_thread = Thread.new do
|
146
|
+
@channels.each do |chan|
|
147
|
+
s = chan.gets
|
148
|
+
if /^branch_setup:done$/ !~ s
|
149
|
+
raise RuntimeError,"branch_setup failed"
|
150
|
+
end
|
151
|
+
end
|
152
|
+
@killed = 0
|
153
|
+
[:TERM,:INT].each do |sig|
|
154
|
+
Signal.trap(sig) do
|
155
|
+
signal_trap(sig)
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
end
|
161
|
+
|
162
|
+
def create_fiber(channels,&blk)
|
163
|
+
channels.each do |chan|
|
164
|
+
fb = Fiber.new(&blk)
|
165
|
+
fb.resume(chan)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
def invoke(t, args)
|
170
|
+
@failed = false
|
171
|
+
t.pw_search_tasks(args)
|
172
|
+
|
173
|
+
if @option['GRAPH_PARTITION']
|
174
|
+
setup_postprocess0
|
175
|
+
@task_queue.deq_noaction_task do |tw,hid|
|
176
|
+
tw.preprocess
|
177
|
+
tw.status = "end"
|
178
|
+
@post_pool.enq(tw)
|
179
|
+
end
|
180
|
+
@runner.run
|
181
|
+
@post_pool.finish
|
182
|
+
Log.debug "@post_pool.finish"
|
183
|
+
|
184
|
+
require 'pwrake/misc/mcgp'
|
185
|
+
MCGP.graph_partition(@option.host_map)
|
186
|
+
end
|
187
|
+
|
188
|
+
setup_postprocess1
|
189
|
+
@branch_setup_thread.join
|
190
|
+
send_task_to_idle_core
|
191
|
+
#
|
192
|
+
create_fiber(@channels) do |chan|
|
193
|
+
while s = chan.get_line
|
194
|
+
Log.debug "Master:recv #{s.inspect} from branch[#{chan.handler.host}]"
|
195
|
+
case s
|
196
|
+
when /^task(\w+):(\d*):(.*)$/o
|
197
|
+
status, shell_id, task_name = $1, $2.to_i, $3
|
198
|
+
tw = Rake.application[task_name].wrapper
|
199
|
+
tw.shell_id = shell_id
|
200
|
+
tw.status = status
|
201
|
+
hid = @hostid_by_taskname[task_name]
|
202
|
+
@task_queue.task_end(tw,hid) # @idle_cores.increase(..
|
203
|
+
# check failure
|
204
|
+
if tw.status == "fail"
|
205
|
+
$stderr.puts %[task "#{tw.name}" failed.]
|
206
|
+
if !@failed
|
207
|
+
@failed = true
|
208
|
+
case @option['FAILURE_TERMINATION']
|
209
|
+
when 'kill'
|
210
|
+
@hdl_set.kill("INT")
|
211
|
+
@no_more_run = true
|
212
|
+
$stderr.puts "... Kill running tasks."
|
213
|
+
when 'continue'
|
214
|
+
$stderr.puts "... Continue runable tasks."
|
215
|
+
else # 'wait'
|
216
|
+
@no_more_run = true
|
217
|
+
$stderr.puts "... Wait for running tasks."
|
218
|
+
end
|
219
|
+
end
|
220
|
+
if tw.has_output_file? && File.exist?(tw.name)
|
221
|
+
handle_failed_target(tw.name)
|
222
|
+
end
|
223
|
+
end
|
224
|
+
# postprocess
|
225
|
+
@post_pool.enq(tw) # must be after @no_more_run = true
|
226
|
+
break if @finished
|
227
|
+
when /^exited$/o
|
228
|
+
@exited = true
|
229
|
+
Log.debug "receive #{s.chomp} from branch"
|
230
|
+
break
|
231
|
+
else
|
232
|
+
Log.error "unknown result: #{s.inspect}"
|
233
|
+
$stderr.puts(s)
|
234
|
+
end
|
235
|
+
end
|
236
|
+
Log.debug "Master#invoke: fiber end"
|
237
|
+
end
|
238
|
+
@runner.run
|
239
|
+
@post_pool.finish
|
240
|
+
Log.debug "Master#invoke: end of task=#{t.name}"
|
241
|
+
end
|
242
|
+
|
243
|
+
def send_task_to_idle_core
|
244
|
+
#Log.debug "#{self.class}#send_task_to_idle_core start"
|
245
|
+
count = 0
|
246
|
+
# @idle_cores.decrease(..
|
247
|
+
@task_queue.deq_task do |tw,hid|
|
248
|
+
count += 1
|
249
|
+
@hostid_by_taskname[tw.name] = hid
|
250
|
+
tw.preprocess
|
251
|
+
if tw.has_action?
|
252
|
+
s = "#{hid}:#{tw.task_id}:#{tw.name}"
|
253
|
+
@channel_by_hostid[hid].put_line(s)
|
254
|
+
tw.exec_host = @hosts[hid]
|
255
|
+
else
|
256
|
+
tw.status = "end"
|
257
|
+
@task_queue.task_end(tw,hid) # @idle_cores.increase(..
|
258
|
+
@post_pool.enq(tw)
|
259
|
+
end
|
260
|
+
end
|
261
|
+
if count == 0 && !@task_queue.empty? && @hostid_by_taskname.empty?
|
262
|
+
m="No task was invoked while unexecuted tasks remain"
|
263
|
+
Log.error m
|
264
|
+
raise RuntimeError,m
|
265
|
+
end
|
266
|
+
#Log.debug "#{self.class}#send_task_to_idle_core end time=#{Time.now-tm}"
|
267
|
+
end
|
268
|
+
|
269
|
+
def setup_postprocess
|
270
|
+
i = 0
|
271
|
+
n = @option.max_postprocess_pool
|
272
|
+
@post_pool = FiberPool.new(n) do |pool|
|
273
|
+
postproc = @option.postprocess(@runner)
|
274
|
+
i += 1
|
275
|
+
Log.debug "New postprocess fiber ##{i}"
|
276
|
+
Fiber.new do
|
277
|
+
j = i
|
278
|
+
while tw = pool.deq()
|
279
|
+
Log.debug "postproc##{j} deq=#{tw.name}"
|
280
|
+
loc = postproc.run(tw)
|
281
|
+
tw.postprocess(loc)
|
282
|
+
pool.count_down
|
283
|
+
@hostid_by_taskname.delete(tw.name)
|
284
|
+
break if yield(pool,j)
|
285
|
+
end
|
286
|
+
postproc.close
|
287
|
+
Log.debug "postproc##{j} end"
|
288
|
+
end
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
def setup_postprocess0
|
293
|
+
setup_postprocess{false}
|
294
|
+
end
|
295
|
+
|
296
|
+
def setup_postprocess1
|
297
|
+
setup_postprocess do |pool,j|
|
298
|
+
#Log.debug "@no_more_run=#{@no_more_run.inspect}"
|
299
|
+
#Log.debug "@task_queue.empty?=#{@task_queue.empty?}"
|
300
|
+
#Log.debug "@hostid_by_taskname=#{@hostid_by_taskname.inspect}"
|
301
|
+
#Log.debug "pool.empty?=#{pool.empty?}"
|
302
|
+
if (@no_more_run || @task_queue.empty?) &&
|
303
|
+
@hostid_by_taskname.empty?
|
304
|
+
Log.debug "postproc##{j} closing @channels=#{@channels.inspect}"
|
305
|
+
@finished = true
|
306
|
+
@channels.each{|ch| ch.finish} # exit
|
307
|
+
true
|
308
|
+
elsif !@no_more_run
|
309
|
+
send_task_to_idle_core
|
310
|
+
false
|
311
|
+
end
|
312
|
+
end
|
313
|
+
end
|
314
|
+
|
315
|
+
def handle_failed_target(name)
|
316
|
+
case @option['FAILED_TARGET']
|
317
|
+
#
|
318
|
+
when /rename/i, NilClass
|
319
|
+
dst = name+"._fail_"
|
320
|
+
::FileUtils.mv(name,dst)
|
321
|
+
msg = "Rename failed target file '#{name}' to '#{dst}'"
|
322
|
+
$stderr.puts(msg)
|
323
|
+
Log.warn(msg)
|
324
|
+
#
|
325
|
+
when /delete/i
|
326
|
+
::FileUtils.rm(name)
|
327
|
+
msg = "Delete failed target file '#{name}'"
|
328
|
+
$stderr.puts(msg)
|
329
|
+
Log.warn(msg)
|
330
|
+
#
|
331
|
+
when /leave/i
|
332
|
+
end
|
333
|
+
end
|
334
|
+
|
335
|
+
def finish
|
336
|
+
Log.debug "Master#finish begin"
|
337
|
+
@branch_setup_thread.join
|
338
|
+
@hdl_set.exit unless @exited
|
339
|
+
TaskWrapper.close_task_logger
|
340
|
+
Log.debug "Master#finish end"
|
341
|
+
@failed
|
342
|
+
end
|
343
|
+
|
344
|
+
end
|
345
|
+
end
|
@@ -0,0 +1,150 @@
|
|
1
|
+
module Pwrake
|
2
|
+
|
3
|
+
# a mixin for managing Rake application.
|
4
|
+
module MasterApplication
|
5
|
+
|
6
|
+
def pwrake_options
|
7
|
+
@role.option
|
8
|
+
end
|
9
|
+
|
10
|
+
def logger
|
11
|
+
@role.logger
|
12
|
+
end
|
13
|
+
|
14
|
+
def task_logger
|
15
|
+
@role.task_logger
|
16
|
+
end
|
17
|
+
|
18
|
+
def task_queue
|
19
|
+
@role.task_queue
|
20
|
+
end
|
21
|
+
|
22
|
+
# Run the Pwrake application.
|
23
|
+
def run
|
24
|
+
standard_exception_handling do
|
25
|
+
init("pwrake") # <- parse options here
|
26
|
+
@role = @master = Master.new
|
27
|
+
load_rakefile
|
28
|
+
t = Time.now
|
29
|
+
@master.init
|
30
|
+
@master.setup_branches
|
31
|
+
begin
|
32
|
+
Log.debug "init: #{Time.now-t} sec"
|
33
|
+
t = Time.now
|
34
|
+
top_level
|
35
|
+
Log.debug "main: #{Time.now-t} sec"
|
36
|
+
t = Time.now
|
37
|
+
ensure
|
38
|
+
@failed = @master.finish
|
39
|
+
Log.debug "finish: #{Time.now-t} sec"
|
40
|
+
Log.info "pwrake elapsed time: #{Time.now-START_TIME} sec"
|
41
|
+
end
|
42
|
+
Kernel.exit(false) if @failed
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def invoke_task(task_string)
|
47
|
+
name, args = parse_task_string(task_string)
|
48
|
+
t = self[name]
|
49
|
+
@master.invoke(t,args)
|
50
|
+
end
|
51
|
+
|
52
|
+
def standard_rake_options
|
53
|
+
opts = super
|
54
|
+
opts.each_with_index do |a,i|
|
55
|
+
if a[0] == '--version'
|
56
|
+
a[3] = lambda { |value|
|
57
|
+
puts "rake, version #{RAKEVERSION}"
|
58
|
+
puts "pwrake, version #{Pwrake::VERSION}"
|
59
|
+
exit
|
60
|
+
}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
opts.concat(
|
65
|
+
[
|
66
|
+
['-F', '--hostfile FILE',
|
67
|
+
"[Pw] Read hostnames from FILE",
|
68
|
+
lambda { |value|
|
69
|
+
options.hostfile = value
|
70
|
+
}
|
71
|
+
],
|
72
|
+
['-j', '--jobs [N]',
|
73
|
+
"[Pw] Number of threads at localhost (default: # of processors)",
|
74
|
+
lambda { |value|
|
75
|
+
if value
|
76
|
+
if /^[+-]?\d+$/ =~ value
|
77
|
+
options.num_threads = value.to_i
|
78
|
+
else
|
79
|
+
raise ArgumentError,"Invalid argument for -j: #{value}"
|
80
|
+
end
|
81
|
+
else
|
82
|
+
options.num_threads = 0
|
83
|
+
end
|
84
|
+
}
|
85
|
+
],
|
86
|
+
['-L', '--log', '--log-dir [DIRECTORY]', "[Pw] Write log to DIRECTORY",
|
87
|
+
lambda { |value|
|
88
|
+
if value.kind_of? String
|
89
|
+
options.log_dir = value
|
90
|
+
else
|
91
|
+
options.log_dir = ""
|
92
|
+
end
|
93
|
+
}
|
94
|
+
],
|
95
|
+
['--ssh-opt', '--ssh-option OPTION', "[Pw] Option passed to SSH",
|
96
|
+
lambda { |value|
|
97
|
+
options.ssh_option = value
|
98
|
+
}
|
99
|
+
],
|
100
|
+
['--filesystem FILESYSTEM', "[Pw] Specify FILESYSTEM (nfs|gfarm)",
|
101
|
+
lambda { |value|
|
102
|
+
options.filesystem = value
|
103
|
+
}
|
104
|
+
],
|
105
|
+
['--gfarm', "[Pw] FILESYSTEM=gfarm",
|
106
|
+
lambda { |value|
|
107
|
+
options.filesystem = "gfarm"
|
108
|
+
}
|
109
|
+
],
|
110
|
+
['-A', '--disable-affinity', "[Pw] Turn OFF affinity (AFFINITY=off)",
|
111
|
+
lambda { |value|
|
112
|
+
options.disable_affinity = true
|
113
|
+
}
|
114
|
+
],
|
115
|
+
['-S', '--disable-steal', "[Pw] Turn OFF task steal",
|
116
|
+
lambda { |value|
|
117
|
+
options.disable_steal = true
|
118
|
+
}
|
119
|
+
],
|
120
|
+
['-d', '--debug',
|
121
|
+
"[Pw] Output Debug messages",
|
122
|
+
lambda { |value|
|
123
|
+
options.debug = true
|
124
|
+
}
|
125
|
+
],
|
126
|
+
['--pwrake-conf [FILE]',
|
127
|
+
"[Pw] Pwrake configuation file in YAML",
|
128
|
+
lambda {|value| options.pwrake_conf = value}
|
129
|
+
],
|
130
|
+
['--show-conf','--show-config',
|
131
|
+
"[Pw] Show Pwrake configuration options",
|
132
|
+
lambda {|value| options.show_conf = true }
|
133
|
+
],
|
134
|
+
['--report LOGDIR',"[Pw] Report workflow statistics from LOGDIR to HTML and exit.",
|
135
|
+
lambda {|value| options.report_dir = value }
|
136
|
+
],
|
137
|
+
['--clear-gfarm2fs',"[Pw] Clear gfarm2fs mountpoints left after failure.",
|
138
|
+
lambda { |value|
|
139
|
+
Option.new.clear_gfarm2fs
|
140
|
+
exit
|
141
|
+
}
|
142
|
+
],
|
143
|
+
|
144
|
+
|
145
|
+
])
|
146
|
+
opts
|
147
|
+
end
|
148
|
+
|
149
|
+
end
|
150
|
+
end
|