rbbt-util 5.32.6 → 5.32.12

Sign up to get free protection for your applications and to get access to all the features.
@@ -38,13 +38,13 @@ module SimpleDSL
38
38
  hook_method(method)
39
39
 
40
40
  # Execute
41
+ @config ||= {}
41
42
  if actions.is_a? Proc
42
43
  begin
43
- require 'parse_tree_extensions'
44
- require 'parse_tree'
45
- require 'ruby2ruby'
46
- @config[@@method_name] = actions.to_ruby.collect[1..-2].join
44
+ require 'method_source'
45
+ @config[@@method_name] = actions.source.split("\n")[1..-2] * "\n"
47
46
  rescue Exception
47
+ Log.exception $!
48
48
  @config[@@method_name] = NoRuby2Ruby.new "The gem ruby2ruby is not installed. It will not work on ruby 1.9."
49
49
  end
50
50
 
data/lib/rbbt/workflow.rb CHANGED
@@ -347,6 +347,11 @@ module Workflow
347
347
 
348
348
  inputs = IndiferentHash.setup(inputs)
349
349
 
350
+ not_overriden = inputs.delete :not_overriden
351
+ if not_overriden
352
+ inputs[:not_overriden] = :not_overriden_dep
353
+ end
354
+
350
355
  Workflow.resolve_locals(inputs)
351
356
 
352
357
  task_info = task_info(taskname)
@@ -400,7 +405,11 @@ module Workflow
400
405
  jobname = DEFAULT_NAME if jobname.nil? or jobname.empty?
401
406
 
402
407
  dependencies = real_dependencies(task, jobname, defaults.merge(inputs), task_dependencies[taskname] || [])
403
- overriden = has_overriden_inputs || dependencies.select{|dep| dep.overriden }.any?
408
+
409
+ overriden_deps = dependencies.select{|d| d.overriden }
410
+ true_overriden_deps = overriden_deps.select{|d| TrueClass === d.overriden }
411
+
412
+ overriden = has_overriden_inputs || overriden_deps.any?
404
413
 
405
414
  if real_inputs.empty? && Workflow::TAG != :inputs && ! overriden
406
415
  step_path = step_path taskname, jobname, [], [], task.extension
@@ -413,7 +422,16 @@ module Workflow
413
422
  job = get_job_step step_path, task, input_values, dependencies
414
423
  job.workflow = self
415
424
  job.clean_name = jobname
416
- job.overriden = overriden
425
+
426
+ case not_overriden
427
+ when TrueClass
428
+ job.overriden = has_overriden_inputs || true_overriden_deps.any?
429
+ when :not_overriden_dep
430
+ job.overriden = true if has_overriden_inputs || true_overriden_deps.any?
431
+ else
432
+ job.overriden = true if has_overriden_inputs || overriden_deps.any?
433
+ end
434
+
417
435
  job.real_inputs = real_inputs.keys
418
436
  job
419
437
  end
@@ -34,13 +34,40 @@ class Step
34
34
  end
35
35
  end
36
36
 
37
+
38
+ def overriden?
39
+ return true if @overriden
40
+ return true if dependencies.select{|dep| dep.overriden? }.any?
41
+ info[:archived_info].each do |f,i|
42
+ return true if i[:overriden] || i["overriden"]
43
+ end if info[:archived_info]
44
+ return false
45
+ end
46
+
37
47
  def overriden
38
- if @overriden.nil?
39
- return [] if dependencies.nil?
40
- dependencies.select{|dep| dep.overriden }.any?
41
- else
42
- @overriden
48
+ @overriden
49
+ #if @overriden.nil?
50
+ # return false if dependencies.nil?
51
+ # dependencies.select{|dep| dep.overriden? }.any?
52
+ #else
53
+ # @overriden
54
+ #end
55
+ end
56
+
57
+ def overriden_deps
58
+ ord = []
59
+ deps = dependencies.dup
60
+ while dep = deps.shift
61
+ case dep.overriden
62
+ when FalseClass
63
+ next
64
+ when Symbol
65
+ ord << dep
66
+ else
67
+ deps += dep.dependencies
68
+ end
43
69
  end
70
+ ord
44
71
  end
45
72
 
46
73
  def initialize(path, task = nil, inputs = nil, dependencies = nil, bindings = nil, clean_name = nil)
@@ -134,7 +161,11 @@ class Step
134
161
 
135
162
  archived_info = {}
136
163
  dependencies.each do |dep|
137
- archived_info[dep.path] = dep.info
164
+ if Symbol === dep.overriden && ! Open.exists?(dep.info_file)
165
+ archived_info[dep.path] = dep.overriden
166
+ else
167
+ archived_info[dep.path] = dep.info
168
+ end
138
169
  archived_info.merge!(dep.archived_info)
139
170
  end if dependencies
140
171
 
@@ -248,7 +248,7 @@ class Step
248
248
  def init_info(force = false)
249
249
  return nil if @exec || info_file.nil? || (Open.exists?(info_file) && ! force)
250
250
  Open.lock(info_file, :lock => info_lock) do
251
- i = {:status => :waiting, :pid => Process.pid, :path => path, :real_inputs => real_inputs}
251
+ i = {:status => :waiting, :pid => Process.pid, :path => path, :real_inputs => real_inputs, :overriden => overriden}
252
252
  i[:dependencies] = dependencies.collect{|dep| [dep.task_name, dep.name, dep.path]} if dependencies
253
253
  Misc.sensiblewrite(info_file, Step.serialize_info(i), :force => true, :lock => false)
254
254
  @info_cache = IndiferentHash.setup(i)
@@ -551,7 +551,7 @@ class Step
551
551
 
552
552
  def aborted?
553
553
  status = self.status
554
- status == :aborted || ((status != :dependencies && status != :cleaned && status != :noinfo && status != :setup && status != :noinfo) && nopid?)
554
+ status == :aborted || ((status != :ending && status != :dependencies && status != :cleaned && status != :noinfo && status != :setup && status != :noinfo) && nopid?)
555
555
  end
556
556
 
557
557
  # {{{ INFO
@@ -1,3 +1,4 @@
1
+ require 'rbbt/util/migrate'
1
2
  class Step
2
3
 
3
4
  MAIN_RSYNC_ARGS="-avztAXHP --copy-links"
@@ -126,63 +127,48 @@ class Step
126
127
  end
127
128
  end
128
129
 
129
- def self.migrate(path, search_path, options = {})
130
- resource=Rbbt
131
-
132
- orig_path = path
133
- other_rsync_args = options[:rsync]
134
-
135
- recursive = options[:recursive]
130
+ def self.migrate_source_paths(path, resource = Rbbt, source = nil, recursive = true)
136
131
  recursive = false if recursive.nil?
137
-
138
- paths = if options[:source]
139
- Misc.ssh_run(options[:source], <<-EOF).split("\n")
132
+ if source
133
+ lpath, *paths = Misc.ssh_run(source, <<-EOF).split("\n")
140
134
  require 'rbbt-util'
141
135
  require 'rbbt/workflow'
142
136
 
143
- path = "#{path}"
144
137
  recursive = #{ recursive.to_s }
138
+ path = "#{path}"
145
139
 
146
- if File.exists?(path)
140
+ if Open.exists?(path)
147
141
  path = #{resource.to_s}.identify(path)
148
142
  else
149
143
  path = Path.setup(path)
150
144
  end
151
145
 
152
- files = path.glob_all
153
-
146
+ files = path.glob_all.collect{|p| File.directory?(p) ? p + "/" : p }
154
147
  files = Step.job_files_for_archive(files, recursive)
155
148
 
149
+ puts path
156
150
  puts files * "\n"
157
- EOF
158
-
159
- else
160
- if File.exists?(path)
161
- path = resource.identify(path)
162
- raise "Resource #{resource} could not identify #{orig_path}" if path.nil?
163
- else
164
- path = Path.setup(path)
165
- end
166
- files = path.glob_all
167
- files = Step.job_files_for_archive(files, recursive)
168
- files
169
- end
170
-
171
-
172
- target = if options[:target]
173
- target = Misc.ssh_run(options[:target], <<-EOF).split("\n").first
174
- require 'rbbt-util'
175
- path = "var/jobs"
176
- resource = #{resource.to_s}
177
- search_path = "#{search_path}"
178
- puts resource[path].find(search_path)
179
- EOF
180
- else
181
- resource['var/jobs'].find(search_path)
182
- end
151
+ EOF
152
+
153
+ [path, paths.collect{|p| [source, p] * ":"}, lpath]
154
+ else
155
+ path = Path.setup(path.dup)
156
+ files = path.glob_all
157
+ files = Step.job_files_for_archive(files, recursive)
158
+
159
+ [path, files, path]
160
+ end
161
+ end
162
+
163
+ def self.migrate(path, search_path, options = {})
164
+ search_path = 'user' if search_path.nil?
165
+
166
+ resource = Rbbt
167
+
168
+ path, real_paths, lpath = self.migrate_source_paths(path, resource, options[:source], options[:recursive])
183
169
 
184
170
  subpath_files = {}
185
- paths.sort.each do |path|
171
+ real_paths.sort.each do |path|
186
172
  parts = path.split("/")
187
173
  subpath = parts[0..-4] * "/" + "/"
188
174
 
@@ -190,73 +176,16 @@ puts resource[path].find(search_path)
190
176
  subpath = subpath_files.keys.last
191
177
  end
192
178
 
193
- source = path[subpath.length..-1]
179
+ source = path.chars[subpath.length..-1] * ""
194
180
 
195
181
  subpath_files[subpath] ||= []
196
182
  subpath_files[subpath] << source
197
183
  end
198
184
 
199
- synced_files = []
200
- subpath_files.each do |subpath, files|
201
- if options[:target]
202
- CMD.cmd("ssh #{options[:target]} mkdir -p '#{File.dirname(target)}'")
203
- else
204
- Open.mkdir File.dirname(target)
205
- end
185
+ target = Rbbt.migrate_target_path('var/jobs', search_path, resource, options[:target])
206
186
 
207
- if options[:source]
208
- source = [options[:source], subpath] * ":"
209
- else
210
- source = subpath
211
- end
212
- target = [options[:target], target] * ":" if options[:target]
213
-
214
- next if File.exists?(source) && File.exists?(target) && File.expand_path(source) == File.expand_path(target)
215
-
216
- files_and_dirs = Set.new( files )
217
- files.each do |file|
218
- synced_files << File.join(subpath, file)
219
-
220
- parts = file.split("/")[0..-2].reject{|p| p.empty?}
221
- while parts.any?
222
- files_and_dirs << parts * "/"
223
- parts.pop
224
- end
225
- end
226
-
227
- TmpFile.with_file(files_and_dirs.sort_by{|l| l.length}.to_a * "\n") do |tmp_include_file|
228
- test_str = options[:test] ? '-nv' : ''
229
-
230
- cmd = "rsync #{MAIN_RSYNC_ARGS} --progress #{test_str} --files-from='#{tmp_include_file}' #{source}/ #{target}/ #{other_rsync_args}"
231
-
232
- #cmd << " && rm -Rf #{source}" if options[:delete]
233
- if options[:print]
234
- ppp Open.read(tmp_include_file)
235
- puts cmd
236
- else
237
- CMD.cmd_log(cmd, :log => Log::INFO)
238
- end
239
- end
240
- end
241
-
242
- if options[:delete] && synced_files.any?
243
- puts Log.color :magenta, "About to erase these files:"
244
- synced_files.each do |p|
245
- puts Log.color :red, p
246
- end
247
-
248
- if options[:non_interactive]
249
- response = 'yes'
250
- else
251
- puts Log.color :magenta, "Type 'yes' if you are sure:"
252
- response = STDIN.gets.chomp
253
- end
254
-
255
- if response == 'yes'
256
- synced_files.each do |p|
257
- Open.rm p
258
- end
259
- end
187
+ subpath_files.each do |subpath, files|
188
+ Rbbt.migrate_files([subpath], target, options.merge(:files => files))
260
189
  end
261
190
  end
262
191
 
@@ -188,6 +188,7 @@ rbbt.png_plot('#{plot}', 'plot(timeline)', width=#{width}, height=#{height}, poi
188
188
  seed_jobs.each do |step|
189
189
  jobs += step.rec_dependencies + [step]
190
190
  step.info[:archived_info].each do |path,ainfo|
191
+ next unless Hash === ainfo
191
192
  archived_step = Step.new path
192
193
 
193
194
  archived_step.define_singleton_method :info do
@@ -205,7 +206,7 @@ rbbt.png_plot('#{plot}', 'plot(timeline)', width=#{width}, height=#{height}, poi
205
206
 
206
207
  end
207
208
 
208
- jobs = jobs.uniq.sort_by{|job| t = job.info[:started] || Open.mtime(job.path) || Time.now; Time === t ? t : Time.parse(t) }
209
+ jobs = jobs.uniq.sort_by{|job| [job, job.info]; t = job.info[:started] || Open.mtime(job.path) || Time.now; Time === t ? t : Time.parse(t) }
209
210
 
210
211
  data = trace_job_times(jobs, options[:fix_gap])
211
212
 
@@ -173,7 +173,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
173
173
  select = true if queued && deps && (running_jobs & deps).any? || queued && is_running && nodes.empty?
174
174
  select = true if running && nodes.any? && (exit_status.nil? && running_jobs.include?(id)) && (!deps || (running_jobs & deps).empty?)
175
175
  select = true if jobid && jobid.split(",").include?(id)
176
- select = select && cmd.match(/#{search}/) if search
176
+ select = select && step_path.match(/#{search}/) if search
177
177
  next unless select
178
178
  elsif search
179
179
  select = false
@@ -206,9 +206,9 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
206
206
  puts Log.color(:magenta, "BATCH parameters: ")
207
207
  case job_batch_system
208
208
  when 'slurm'
209
- text = CMD.cmd('grep "^#SBATCH" |tail -n +5', :in => Open.read(fcmd)).read.strip
209
+ text = CMD.cmd('grep "^#SBATCH" ', :in => Open.read(fcmd)).read.strip
210
210
  when 'lsf'
211
- text = CMD.cmd('grep "^#BSUB" |tail -n +5', :in => Open.read(fcmd)).read.strip
211
+ text = CMD.cmd('grep "^#BSUB" ', :in => Open.read(fcmd)).read.strip
212
212
  else
213
213
  text = ""
214
214
  end
@@ -292,13 +292,17 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
292
292
  step_path = step_line.split(": ").last.strip
293
293
  step = Step.new step_path
294
294
  step.load_dependencies_from_info
295
+ has_bar = false
295
296
  (step.rec_dependencies + [step]).reverse.each do |j|
296
297
  next if j.done?
297
- next unless j.file(:progress).exists?
298
- bar = Log::ProgressBar.new
299
- bar.load(j.file(:progress).yaml)
300
- puts Log.color(:magenta, "Progress: ") + bar.report_msg + " " + Log.color(:yellow, j.task_signature)
298
+ if j.file(:progress).exists?
299
+ bar = Log::ProgressBar.new
300
+ bar.load(j.file(:progress).yaml)
301
+ puts Log.color(:magenta, "Progress: ") + bar.report_msg + " " + Log.color(:yellow, j.task_signature)
302
+ has_bar = true
303
+ end
301
304
  end
305
+ puts Log.color(:magenta, "Progress: ") + Log.color(:yellow, step.task_signature) + " #{step.status}" unless has_bar
302
306
  end
303
307
  end
304
308
 
@@ -19,14 +19,19 @@ $slurm_options = SOPT.get <<EOF
19
19
  -e--exclusive Make exclusive use of the node
20
20
  -hm--highmem Make use of highmem cores
21
21
  -wc--wipe_container* Wipe the jobs from the contain directory
22
+ -pd--purge_deps Purge job dependencies
22
23
  -CS--contain_and_sync Contain and sync to default locations
23
24
  -ci--copy_image When using a container directory, copy image there
24
25
  -t--tail Tail the logs
25
26
  -BPP--batch_procpath* Save Procpath performance for batch job; specify only options
26
27
  -q--queue* Queue
27
28
  -t--task_cpus* Tasks
28
- -W--workflows* Additional workflows
29
29
  -tm--time* Time
30
+ -m--mem* SLURM minimum memory
31
+ -mcpu--mem_per_cpu* SLURM minimum memory per CPU
32
+ -lin--licenses* SLURM licenses
33
+ -cons--constraint* SLURM constraint
34
+ -W--workflows* Additional workflows
30
35
  -OR--orchestration_rules* Orchestration rules
31
36
  -rmb--remove_batch_basedir Remove the SLURM working directory (command, STDIN, exit status, ...)
32
37
  EOF
@@ -18,14 +18,19 @@ $slurm_options = SOPT.get <<EOF
18
18
  -e--exclusive Make exclusive use of the node
19
19
  -hm--highmem Make use of highmem cores
20
20
  -wc--wipe_container* Wipe the jobs from the contain directory
21
+ -pd--purge_deps Purge job dependencies
21
22
  -CS--contain_and_sync Contain and sync to default locations
22
23
  -ci--copy_image When using a container directory, copy image there
23
24
  -t--tail Tail the logs
24
25
  -BPP--batch_procpath* Save Procpath performance for batch job; specify only options
25
26
  -q--queue* Queue
26
27
  -t--task_cpus* Tasks
27
- -W--workflows* Additional workflows
28
28
  -tm--time* Time
29
+ -m--mem* SLURM minimum memory
30
+ -mcpu--mem_per_cpu* SLURM minimum memory per CPU
31
+ -lin--licenses* SLURM licenses
32
+ -cons--constraint* SLURM constraint
33
+ -W--workflows* Additional workflows
29
34
  -rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
30
35
  -bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
31
36
  EOF
@@ -0,0 +1,212 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'rbbt-util'
4
+ require 'rbbt/util/simpleopt'
5
+ require 'rbbt/hpc'
6
+
7
+ #$0 = "rbbt #{$previous_commands*""} #{ File.basename(__FILE__) }" if $previous_commands
8
+
9
+ options = SOPT.setup <<EOF
10
+
11
+ Clean error or aborted jobs
12
+
13
+ $ rbbt slurm clean [options]
14
+
15
+ -h--help Print this help
16
+ -d--done Done jobs only
17
+ -e--error Error jobs only
18
+ -a--aborted SLURM aboted jobs
19
+ -q--queued Queued jobs only
20
+ -j--job* Job ids
21
+ -s--search* Regular expression
22
+ -t--tail* Show the last lines of the STDERR
23
+ -BP--batch_parameters show batch parameters
24
+ -dr--dry_run Do not erase anything
25
+ EOF
26
+
27
+ if options[:help]
28
+ if defined? rbbt_usage
29
+ rbbt_usage
30
+ else
31
+ puts SOPT.doc
32
+ end
33
+ exit 0
34
+ end
35
+
36
+ batch_system = options.delete :batch_system
37
+ batch_system ||= 'auto'
38
+
39
+ HPC::BATCH_MODULE = HPC.batch_system batch_system
40
+
41
+ raise ParameterException.new("Could not detect batch_system: #{Misc.fingerprint batch_system}") if HPC::BATCH_MODULE.nil?
42
+
43
+ Log.severity = 4
44
+ done, error, aborted, queued, jobid, search, tail, batch_parameters, dry_run = options.values_at :done, :error, :aborted, :queued, :job, :search, :tail, :batch_parameters, :dry_run
45
+
46
+ workdir = File.expand_path('~/rbbt-batch')
47
+ Path.setup(workdir)
48
+
49
+ running_jobs = begin
50
+ squeue_txt = HPC::BATCH_MODULE.job_status
51
+ squeue_txt.split("\n").collect{|l| l.to_i.to_s}
52
+ rescue
53
+ Log.warn "Cannot determine if jobs are running, they will seem to be all alive (Job ID in green)"
54
+ squeue_txt = nil
55
+ $norunningjobs = true
56
+ []
57
+ end
58
+
59
+ if squeue_txt
60
+ job_nodes = {}
61
+ squeue_txt.split("\n").each do |line|
62
+ parts = line.strip.split(/\s+/)
63
+ job_nodes[parts.first] = parts.last.split(",")
64
+ end
65
+ else
66
+ job_nodes = nil
67
+ end
68
+
69
+ count = 0
70
+ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
71
+ dir = File.dirname(fcmd)
72
+ command_txt = Open.read(fcmd)
73
+
74
+ if m = command_txt.match(/#CMD: (.*)/)
75
+ cmd = m[1]
76
+ else
77
+ cmd = nil
78
+ end
79
+
80
+ if m = command_txt.match(/# Run command\n(.*?)\n/im)
81
+ exe = m[1]
82
+ else
83
+ exe = nil
84
+ end
85
+
86
+ if m = command_txt.match(/^CONTAINER_DIR=(.*)/)
87
+ container_home = m[1]
88
+ else
89
+ container_home = nil
90
+ end
91
+
92
+ if m = command_txt.match(/^BATCH_SYSTEM=(.*)/)
93
+ job_batch_system = m[1].downcase
94
+ else
95
+ job_batch_system = nil
96
+ end
97
+
98
+ different_system = job_batch_system != batch_system
99
+
100
+ if File.exists?(fid = File.join(dir, 'job.id'))
101
+ id = Open.read(fid).chomp
102
+ else
103
+ id = nil
104
+ end
105
+
106
+ if File.exists?(fstatus = File.join(dir, 'exit.status'))
107
+ exit_status = Open.read(fstatus).to_i
108
+ else
109
+ exit_status = nil
110
+ end
111
+
112
+ if File.exists?(fstatus = File.join(dir, 'job.status'))
113
+ fstatus_txt = Open.read(fstatus)
114
+ begin
115
+ if job_batch_system == "lsf"
116
+ nodes = Open.read(fstatus).split("\n").last.split(/\s+/)[5].split(",")
117
+ else
118
+ nodes = Open.read(fstatus).split("\n").last.split(/\s+/).last.split(",")
119
+ end
120
+ rescue
121
+ nodes = []
122
+ end
123
+ elsif job_nodes[id]
124
+ nodes = job_nodes[id]
125
+ else
126
+ nodes = []
127
+ end
128
+
129
+ if File.exists?(File.join(dir, 'std.out'))
130
+ outt = File.mtime File.join(dir, 'std.out')
131
+ errt = File.mtime File.join(dir, 'std.err')
132
+ time_diff = Time.now - [outt, errt].max
133
+ end
134
+
135
+ fdep = File.join(dir, 'dependencies.list')
136
+ deps = Open.read(fdep).split("\n") if File.exists?(fdep)
137
+
138
+ fcadep = File.join(dir, 'canfail_dependencies.list')
139
+ cadeps = Open.read(fcadep).split("\n") if File.exists?(fcadep)
140
+
141
+ aborted = error = true if aborted.nil? && error.nil?
142
+ #if done || error || aborted || running || queued || jobid || search
143
+ # select = false
144
+ # select = true if done && exit_status && exit_status.to_i == 0
145
+ # select = true if error && exit_status && exit_status.to_i != 0
146
+ # select = true if aborted && (exit_status.nil? && ! running_jobs.include?(id))
147
+ # select = select && jobid.split(",").include?(id) if jobid
148
+ # select = select && cmd.match(/#{search}/) if search
149
+ # next unless select
150
+ #end
151
+
152
+ if done || error || aborted || queued || jobid
153
+ select = false
154
+ select = true if done && exit_status == 0
155
+ select = true if error && exit_status && exit_status != 0
156
+ select = true if aborted && (exit_status.nil? && ! running_jobs.include?(id))
157
+ is_running = exit_status.nil? && ( (running_jobs.include?(id) && (!deps || (running_jobs & deps).empty?)) || different_system )
158
+ select = true if queued && deps && (running_jobs & deps).any? || queued && is_running && nodes.empty?
159
+ select = true if jobid && jobid.split(",").include?(id)
160
+ select = select && cmd.match(/#{search}/) if search
161
+ next unless select
162
+ elsif search
163
+ select = false
164
+ select = true if search && cmd.match(/#{search}/)
165
+ next unless select
166
+ end
167
+
168
+
169
+ puts Log.color(:yellow, "**ERASING**")
170
+ puts Log.color :blue, dir
171
+ puts Log.color(:magenta, "Creation: ") << File.mtime(File.join(dir, 'command.batch')).to_s
172
+ puts Log.color(:magenta, "Done: ") << File.mtime(File.join(dir, 'exit.status')).to_s if File.exist?(File.join(dir, 'exit.status'))
173
+ puts Log.color(:magenta, "Exec: ") << (exe || "Missing")
174
+ puts Log.color(:magenta, "CMD: ") << (Log.color(:yellow, cmd) || "Missing")
175
+ puts Log.color(:magenta, "HOME: ") << Log.color(:yellow, container_home) if container_home
176
+ puts Log.color(:magenta, "Job ID: ") << (exit_status ? (exit_status == 0 ? Log.color(:green, "Done") : Log.color(:red, "Error")) + " (#{ id })" : (running_jobs.include?(id) || $norunningjobs ? Log.color(:green, id) : Log.color(:red, id) ))
177
+ puts Log.color(:magenta, "Dependencies: ") << deps * ", " if deps
178
+ puts Log.color(:magenta, "Dependencies (can fail): ") << cadeps * ", " if cadeps
179
+ puts Log.color(:magenta, "Nodes: ") << nodes * ", "
180
+ puts Log.color(:magenta, "Output: ") << File.exists?(File.join(dir, 'std.out')).to_s << (id.nil? ? "" : " (last update " + Misc.format_seconds(time_diff) + " ago)")
181
+
182
+ if options[:batch_parameters]
183
+ puts Log.color(:magenta, "BATCH parameters: ")
184
+ case job_batch_system
185
+ when 'slurm'
186
+ puts Log.color :blue, CMD.cmd('grep "^#SBATCH" |tail -n +6', :in => Open.read(fcmd)).read.strip
187
+ when 'lsf'
188
+ puts Log.color :blue, CMD.cmd('grep "^#BSUB" |tail -n +6', :in => Open.read(fcmd)).read.strip
189
+ end
190
+ end
191
+
192
+ if tail && File.exists?(File.join(dir, 'std.err'))
193
+ if exit_status && exit_status != 0
194
+ puts Log.color(:magenta, "First error or exception found: ")
195
+ puts CMD.cmd("grep -i -w 'error\\|[a-z]*exception' #{File.join(dir, 'std.err')} -A #{tail.to_i} |head -n #{tail.to_i}", :no_fail => true).read
196
+ elsif exit_status
197
+ puts Log.color(:magenta, "Completed jobs: ")
198
+ puts CMD.cmd("grep -i -w 'Completed step' #{File.join(dir, 'std.err')} | grep -v 'Retrying dep.' | tail -n #{tail.to_i}", :no_fail => true).read
199
+ else
200
+ puts Log.color(:magenta, "Log tail: ")
201
+ puts CMD.cmd("tail -n #{tail.to_i} #{File.join(dir, 'std.err')}").read
202
+ end
203
+ end
204
+
205
+ count += 1
206
+
207
+ Open.rm_rf dir unless dry_run
208
+ end
209
+
210
+ puts
211
+ puts "Found #{count} jobs"
212
+