rbbt-util 5.28.5 → 5.28.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/lib/rbbt/entity.rb +1 -1
  3. data/lib/rbbt/fix_width_table.rb +5 -4
  4. data/lib/rbbt/persist.rb +1 -0
  5. data/lib/rbbt/persist/tsv/adapter.rb +0 -1
  6. data/lib/rbbt/persist/tsv/fix_width_table.rb +5 -3
  7. data/lib/rbbt/tsv/accessor.rb +10 -2
  8. data/lib/rbbt/tsv/dumper.rb +14 -2
  9. data/lib/rbbt/tsv/parallel/traverse.rb +3 -0
  10. data/lib/rbbt/tsv/util.rb +5 -1
  11. data/lib/rbbt/util/cmd.rb +1 -0
  12. data/lib/rbbt/util/config.rb +2 -1
  13. data/lib/rbbt/util/misc/bgzf.rb +1 -1
  14. data/lib/rbbt/util/misc/inspect.rb +1 -1
  15. data/lib/rbbt/util/misc/system.rb +1 -1
  16. data/lib/rbbt/util/named_array.rb +1 -1
  17. data/lib/rbbt/util/open.rb +18 -17
  18. data/lib/rbbt/workflow.rb +1 -0
  19. data/lib/rbbt/workflow/accessor.rb +94 -93
  20. data/lib/rbbt/workflow/definition.rb +8 -4
  21. data/lib/rbbt/workflow/integration/ansible.rb +53 -0
  22. data/lib/rbbt/workflow/integration/ansible/workflow.rb +60 -0
  23. data/lib/rbbt/workflow/remote_workflow/driver/rest.rb +5 -1
  24. data/lib/rbbt/workflow/step.rb +22 -5
  25. data/lib/rbbt/workflow/step/accessor.rb +7 -5
  26. data/lib/rbbt/workflow/usage.rb +1 -1
  27. data/lib/rbbt/workflow/util/archive.rb +3 -0
  28. data/lib/rbbt/workflow/util/orchestrator.rb +228 -0
  29. data/lib/rbbt/workflow/util/trace.rb +182 -0
  30. data/share/rbbt_commands/ansible +55 -0
  31. data/share/rbbt_commands/purge_job +2 -5
  32. data/share/rbbt_commands/system/status +1 -1
  33. data/share/rbbt_commands/workflow/forget_deps +10 -3
  34. data/share/rbbt_commands/workflow/server +2 -0
  35. data/test/rbbt/association/test_index.rb +6 -6
  36. data/test/rbbt/knowledge_base/test_query.rb +3 -3
  37. data/test/rbbt/knowledge_base/test_registry.rb +1 -1
  38. data/test/rbbt/persist/tsv/test_cdb.rb +0 -7
  39. data/test/rbbt/persist/tsv/test_kyotocabinet.rb +2 -8
  40. data/test/rbbt/persist/tsv/test_leveldb.rb +0 -6
  41. data/test/rbbt/persist/tsv/test_lmdb.rb +0 -6
  42. data/test/rbbt/persist/tsv/test_tokyocabinet.rb +15 -14
  43. data/test/rbbt/test_entity.rb +0 -1
  44. data/test/rbbt/test_knowledge_base.rb +3 -4
  45. data/test/rbbt/test_persist.rb +10 -6
  46. data/test/rbbt/test_workflow.rb +17 -16
  47. data/test/rbbt/tsv/parallel/test_traverse.rb +14 -0
  48. data/test/rbbt/tsv/test_accessor.rb +11 -0
  49. data/test/rbbt/tsv/test_attach.rb +0 -2
  50. data/test/rbbt/tsv/test_index.rb +6 -7
  51. data/test/rbbt/tsv/test_manipulate.rb +22 -3
  52. data/test/rbbt/util/R/test_model.rb +2 -1
  53. data/test/rbbt/util/R/test_plot.rb +0 -2
  54. data/test/rbbt/util/concurrency/test_processes.rb +1 -1
  55. data/test/rbbt/util/misc/test_bgzf.rb +11 -7
  56. data/test/rbbt/util/misc/test_lock.rb +0 -1
  57. data/test/rbbt/util/misc/test_multipart_payload.rb +1 -1
  58. data/test/rbbt/util/misc/test_pipes.rb +0 -5
  59. data/test/rbbt/util/test_R.rb +1 -0
  60. data/test/rbbt/util/test_log.rb +4 -6
  61. data/test/rbbt/util/test_misc.rb +0 -2
  62. data/test/rbbt/util/test_open.rb +0 -1
  63. data/test/rbbt/util/test_python.rb +17 -1
  64. data/test/rbbt/workflow/test_remote_workflow.rb +1 -1
  65. data/test/rbbt/workflow/test_schedule.rb +0 -0
  66. data/test/rbbt/workflow/test_step.rb +8 -3
  67. data/test/rbbt/workflow/util/test_orchestrator.rb +273 -0
  68. metadata +11 -5
  69. data/lib/rbbt/workflow/schedule.rb +0 -238
  70. data/test/rbbt/workflow/remote/test_client.rb +0 -56
@@ -44,6 +44,7 @@ module Workflow
44
44
 
45
45
  def dep(*dependency, &block)
46
46
  @dependencies ||= []
47
+ dependency = [tasks.keys.last] if dependency.empty? && ! block_given?
47
48
  if block_given?
48
49
  if dependency.any?
49
50
 
@@ -81,9 +82,10 @@ module Workflow
81
82
  if forget
82
83
  remove = config :remove_dep_tasks, :remove_dep_tasks, :default => REMOVE_DEP_TASKS
83
84
  self.archive_deps
85
+ self.copy_files_dir
84
86
  self.dependencies = self.dependencies - [dep]
85
87
  Open.rm_rf self.files_dir if Open.exist? self.files_dir
86
- FileUtils.cp_r dep.files_dir, self.files_dir if Open.exist? dep.files_dir
88
+ FileUtils.cp_r dep.files_dir, self.files_dir if Open.exist?(dep.files_dir)
87
89
  Open.ln_h dep.path, self.tmp_path
88
90
  case remove.to_s
89
91
  when 'true'
@@ -92,8 +94,10 @@ module Workflow
92
94
  dep.recursive_clean
93
95
  end
94
96
  else
95
- Open.rm_rf self.files_dir
96
- Open.link dep.files_dir, self.files_dir
97
+ if Open.exists?(dep.files_dir)
98
+ Open.rm_rf self.files_dir
99
+ Open.link dep.files_dir, self.files_dir
100
+ end
97
101
  Open.link dep.path, self.path
98
102
  end
99
103
  nil
@@ -126,7 +130,7 @@ module Workflow
126
130
  :resumable => consume_resumable,
127
131
  :input_options => consume_input_options
128
132
  }
129
-
133
+
130
134
  task_info[:extension] = case task_info[:result_type].to_s
131
135
  when "tsv"
132
136
  "tsv"
@@ -0,0 +1,53 @@
1
+ require_relative 'ansible/workflow'
2
+ require 'rbbt/workflow/usage'
3
+
4
+ module Ansible
5
+ def self.play(playbook, inventory = nil)
6
+ inventory = Rbbt.etc.ansible_inventory.find
7
+ Log.with_severity 0 do
8
+ TmpFile.with_file do |tmp|
9
+ if Hash === playbook
10
+ Open.write(tmp, [playbook].to_yaml)
11
+ playbook = tmp
12
+ end
13
+ CMD.cmd_log("ansible-playbook -i #{inventory} #{playbook}")
14
+ end
15
+ end
16
+ end
17
+
18
+ def self.clean_symbols(hash)
19
+ new = {}
20
+ hash.each do |key,value|
21
+ key = key.to_s
22
+ value = case value
23
+ when Symbol
24
+ value.to_s
25
+ when Hash
26
+ self.clean_symbols(value)
27
+ else
28
+ value
29
+ end
30
+ new[key] = value
31
+ end
32
+ new
33
+ end
34
+
35
+ def self.workflow2playbook(workflow, task, options = {})
36
+ job_options = workflow.get_SOPT(workflow.tasks[task])
37
+
38
+ tasks = workflow.job(task, nil, job_options).exec
39
+
40
+ hosts = options[:hosts] || 'localhost'
41
+
42
+ clean_tasks = tasks.collect{|task| self.clean_symbols task }
43
+ {"hosts" => hosts, "tasks" => clean_tasks}
44
+ end
45
+
46
+ def self.playbook(file, task = nil, options = {})
47
+ task = 'default' if task.nil?
48
+
49
+ workflow = Workflow.require_workflow file
50
+ task = workflow.tasks.keys.last if workflow.tasks[task].nil?
51
+ workflow2playbook workflow, task, options
52
+ end
53
+ end
@@ -0,0 +1,60 @@
1
+ require 'rbbt/workflow'
2
+
3
+ module Ansible
4
+ module AnsibleWorkflow
5
+ def self.extended(object)
6
+ class << object
7
+ attr_accessor :ans_tasks
8
+ end
9
+
10
+ object.helper :register do |task_info|
11
+ desc = task.description if task
12
+ name ||= desc || short_path
13
+ task_info = {"name" => name}.merge(task_info)
14
+ @ans_tasks ||= []
15
+ @ans_tasks << task_info
16
+ task
17
+ end
18
+
19
+ object.helper :ans do |name, info|
20
+ register({ name => info})
21
+ end
22
+
23
+ object.helper :add do |name, info|
24
+ @ans_tasks.last[name.to_s] = info
25
+ end
26
+
27
+ object.helper :shell do |cmd|
28
+ register({"shell" => cmd.strip})
29
+ end
30
+
31
+ object.helper :sudo do |cmd|
32
+ register({"shell" => cmd.strip, "become" => 'yes'})
33
+ end
34
+
35
+ object.helper :singularity do |scmd|
36
+ img = config :singularity_img, :build, :test, :small, :default => '/data/img/singularity/rbbt/rbbt.simg'
37
+ container = config :singularity_container, :build, :test, :small, :default => '/data/img/sandbox/mvazque2/'
38
+ cmd = <<-EOF
39
+ singularity exec -C -H '#{container}' '#{img}' #{scmd}
40
+ EOF
41
+ register({"shell" => cmd.strip, "name" => short_path})
42
+ end
43
+
44
+
45
+ object.helper :produce_task do
46
+ @ans_tasks
47
+ end
48
+ end
49
+
50
+ def play(name = nil, &block)
51
+ name = Misc.snake_case(@description) if name.nil?
52
+ task name => :yaml do |*args|
53
+ self.instance_exec *args, &block
54
+ dependencies.inject([]){|acc,dep| acc += dep.load } + produce_task
55
+ end
56
+ end
57
+
58
+ end
59
+ end
60
+
@@ -85,7 +85,11 @@ class RemoteWorkflow
85
85
 
86
86
  RemoteWorkflow::REST.__prepare_inputs_for_restclient(params)
87
87
  name = RemoteWorkflow.capture_exception do
88
- RestClient.post(self.encode(url), params)
88
+ begin
89
+ RestClient.post(self.encode(url), params)
90
+ rescue RestClient::MovedPermanently, RestClient::Found, RestClient::TemporaryRedirect
91
+ raise RbbtException, "REST end-point moved to: #{$!.response.headers[:location]}"
92
+ end
89
93
  end
90
94
 
91
95
  Log.debug{ "RestClient jobname returned for #{ url } - #{Misc.fingerprint params}: #{name}" }
@@ -84,6 +84,7 @@ class Step
84
84
  end
85
85
 
86
86
  def load_dependencies_from_info
87
+ relocated = nil
87
88
  @dependencies = (self.info[:dependencies] || []).collect do |task,name,dep_path|
88
89
  if Open.exists?(dep_path) || Open.exists?(dep_path + '.info')
89
90
  Workflow._load_step dep_path
@@ -108,6 +109,14 @@ class Step
108
109
  @inputs || []
109
110
  end
110
111
 
112
+ def copy_files_dir
113
+ if File.symlink?(self.files_dir)
114
+ realpath = Open.realpath(self.files_dir)
115
+ Open.rm self.files_dir
116
+ Open.cp realpath, self.files_dir
117
+ end
118
+ end
119
+
111
120
  def archive_deps
112
121
  self.set_info :archived_info, archived_info
113
122
  self.set_info :archived_dependencies, info[:dependencies]
@@ -145,6 +154,11 @@ class Step
145
154
  all_inputs
146
155
  end
147
156
 
157
+ def dependencies=(dependencies)
158
+ @dependencies = dependencies
159
+ set_info :dependencies, dependencies.collect{|dep| [dep.task_name, dep.name, dep.path]}
160
+ end
161
+
148
162
  def recursive_inputs
149
163
  if NamedArray === inputs
150
164
  i = {}
@@ -411,7 +425,7 @@ class Step
411
425
  return
412
426
  end
413
427
 
414
- if (Open.exists?(path) or Open.broken_link?(path)) or Open.exists?(pid_file) or Open.exists?(info_file) or Open.exists?(files_dir)
428
+ if (Open.exists?(path) or Open.broken_link?(path)) or Open.exists?(pid_file) or Open.exists?(info_file) or Open.exists?(files_dir) or Open.broken_link?(files_dir)
415
429
 
416
430
  @result = nil
417
431
  @pid = nil
@@ -419,8 +433,8 @@ class Step
419
433
  Misc.insist do
420
434
  Open.rm info_file if Open.exists?(info_file)
421
435
  Open.rm md5_file if Open.exists?(md5_file)
422
- Open.rm path if (Open.exists?(path) or Open.broken_link?(path))
423
- Open.rm_rf files_dir if Open.exists?(files_dir)
436
+ Open.rm path if (Open.exists?(path) || Open.broken_link?(path))
437
+ Open.rm_rf files_dir if Open.exists?(files_dir) || Open.broken_link?(files_dir)
424
438
  Open.rm pid_file if Open.exists?(pid_file)
425
439
  Open.rm tmp_path if Open.exists?(tmp_path)
426
440
  end
@@ -454,12 +468,15 @@ class Step
454
468
  return [] if dependencies.nil? or dependencies.empty?
455
469
 
456
470
  new_dependencies = []
471
+ archived_deps = self.info[:archived_info] ? self.info[:archived_info].keys : []
472
+
457
473
  dependencies.each{|step|
458
474
  #next if self.done? && Open.exists?(info_file) && info[:dependencies] && info[:dependencies].select{|task,name,path| path == step.path }.empty?
459
- next if seen.include? step
475
+ next if archived_deps.include? step.path
476
+ next if seen.include? step.path
460
477
  next if self.done? && need_run && ! updatable?
461
478
 
462
- r = step.rec_dependencies(need_run, new_dependencies)
479
+ r = step.rec_dependencies(need_run, new_dependencies.collect{|d| d.path})
463
480
  new_dependencies.concat r
464
481
  new_dependencies << step
465
482
  }
@@ -93,8 +93,8 @@ class Step
93
93
  else
94
94
  Open.write(path + '.read', value.to_s)
95
95
  end
96
- when Step === v
97
- v = v.produce.load
96
+ when Step === value
97
+ value = value.produce.load
98
98
  else
99
99
  Open.write(path, value.to_s)
100
100
  end
@@ -110,22 +110,24 @@ class Step
110
110
  task_info = workflow.task_info(task_name)
111
111
  input_types = task_info[:input_types]
112
112
  task_inputs = task_info[:inputs]
113
+ input_defaults = task_info[:input_defaults]
113
114
 
114
115
  inputs = {}
115
116
  job.recursive_inputs.zip(job.recursive_inputs.fields).each do |value,name|
116
117
  next unless task_inputs.include? name.to_sym
117
118
  next if options and ! options.include?(name)
118
119
  next if value.nil?
120
+ next if input_defaults[name] == value
119
121
  inputs[name] = value
120
122
  end
121
123
 
122
- if options.include? 'override_dependencies'
124
+ if options && options.include?('override_dependencies')
123
125
  inputs.merge!(:override_dependencies => open[:override_dependencies])
124
126
  input_types = IndiferentHash.setup(input_types.merge(:override_dependencies => :array))
125
127
  end
126
128
  save_inputs(inputs, input_types, dir)
127
129
 
128
- inputs.any?
130
+ inputs.keys
129
131
  end
130
132
 
131
133
  def name
@@ -437,7 +439,7 @@ class Step
437
439
  rec_dependencies = self.rec_dependencies
438
440
  return [] if rec_dependencies.empty?
439
441
  canfail_paths = self.canfail_paths
440
- dep = rec_dependencies.select{|d| d.task_name.to_s == 'contamination'}.first
442
+
441
443
  dirty_files = rec_dependencies.reject{|dep|
442
444
  (defined?(WorkflowRemoteClient) && WorkflowRemoteClient::RemoteStep === dep) ||
443
445
  ! Open.exists?(dep.info_file) ||
@@ -240,7 +240,7 @@ module Workflow
240
240
 
241
241
  inputs.each do |input, type, file|
242
242
  case type
243
- when :tsv, :array, :text
243
+ when :tsv, :array, :text, :file
244
244
  lines = file.read.split("\n")
245
245
  head = lines[0..5].compact * "\n\n"
246
246
  head = head[0..500]
@@ -67,8 +67,10 @@ class Step
67
67
  next unless File.exists?(step.path)
68
68
  job_files << step.path
69
69
  job_files << step.info_file if File.exists?(step.info_file)
70
+ job_files << Step.md5_file(step.path) if File.exists?(Step.md5_file step.path)
70
71
  job_file_dir_content = Dir.glob(step.files_dir + '/**/*')
71
72
  job_files += job_file_dir_content
73
+ job_files << step.files_dir if File.exists?(step.files_dir)
72
74
  rec_dependencies = Set.new
73
75
 
74
76
  next unless recursive
@@ -262,6 +264,7 @@ puts resource[path].find(search_path)
262
264
 
263
265
  job_files.each do |file|
264
266
  begin
267
+ Log.debug "Purging #{file}"
265
268
  Open.rm_rf file if Open.exists?(file)
266
269
  rescue
267
270
  Log.warn "Could not erase '#{file}': #{$!.message}"
@@ -0,0 +1,228 @@
1
+ require 'rbbt/workflow'
2
+
3
+ module Workflow
4
+ class Orchestrator
5
+
6
+ def self.job_workload(job)
7
+ workload = {job => []}
8
+ return workload if job.done?
9
+
10
+ job.dependencies.each do |dep|
11
+ next if dep.done?
12
+ workload.merge!(job_workload(dep))
13
+ workload[job] += workload[dep]
14
+ workload[job] << dep
15
+ workload[job].uniq!
16
+ end
17
+
18
+ job.input_dependencies.each do |dep|
19
+ next if dep.done?
20
+ workload.merge!(job_workload(dep))
21
+ workload[job] += workload[dep]
22
+ workload[job] << dep
23
+ workload[job].uniq!
24
+ end
25
+
26
+ workload
27
+ end
28
+
29
+ def self.job_rules(rules, job)
30
+ workflow = job.workflow.to_s
31
+ task_name = job.task_name.to_s
32
+
33
+ return IndiferentHash.setup(rules["defaults"]) unless rules[workflow]
34
+ return IndiferentHash.setup(rules["defaults"]) unless rules[workflow][task_name]
35
+
36
+ job_rules = IndiferentHash.setup(rules[workflow][task_name])
37
+ rules["defaults"].each{|k,v| job_rules[k] = v if job_rules[k].nil? } if rules["defaults"]
38
+ job_rules
39
+ end
40
+
41
+ def self.purge_duplicates(candidates)
42
+ seen = Set.new
43
+ candidates.select do |job|
44
+ if seen.include? job.path
45
+ false
46
+ else
47
+ seen << job.path
48
+ true
49
+ end
50
+ end
51
+ end
52
+
53
+ def self.job_resources(rules, job)
54
+ resources = (job_rules(rules, job) || {})["resources"] || {}
55
+
56
+ IndiferentHash.setup(resources)
57
+
58
+ default_resources = rules["default_resources"] || rules["defaults"]["resources"]
59
+ default_resources.each{|k,v| resources[k] ||= v } if default_resources
60
+
61
+ resources
62
+ end
63
+
64
+ def self.sort_candidates(candidates, rules)
65
+ seen = Set.new
66
+ candidates.sort_by do |job|
67
+ - job_resources(rules, job).values.inject(0){|acc,e| acc += e}
68
+ end
69
+ end
70
+
71
+ def self.candidates(workload, rules)
72
+ if rules.empty?
73
+ candidates = workload.select{|k,v| v.empty? }.
74
+ collect{|k,v| k}.
75
+ reject{|k| k.done? }
76
+ else
77
+ candidates = workload. #select{|k,v| Orchestrator.job_rules(rules, k) }.
78
+ select{|k,v| v.empty? }.
79
+ collect{|k,v| k }.
80
+ reject{|k| k.done? }
81
+ end
82
+
83
+ top_level = workload.keys - workload.values.flatten
84
+
85
+ candidates = purge_duplicates candidates
86
+ candidates = sort_candidates candidates, rules
87
+
88
+ candidates
89
+ end
90
+
91
+ attr_accessor :available_resources, :resources_requested, :resources_used, :timer
92
+
93
+ def initialize(timer = 5, available_resources = {})
94
+ @timer = timer
95
+ @available_resources = IndiferentHash.setup(available_resources)
96
+ @resources_requested = IndiferentHash.setup({})
97
+ @resources_used = IndiferentHash.setup({})
98
+ end
99
+
100
+ def release_resources(job)
101
+ if resources_used[job]
102
+ Log.debug "Orchestrator releasing resouces from #{job.path}"
103
+ resources_used[job].each do |resource,value|
104
+ next if resource == 'size'
105
+ resources_requested[resource] -= value.to_i
106
+ end
107
+ resources_used.delete job
108
+ end
109
+ end
110
+
111
+ def check_resources(rules, job)
112
+ resources = Orchestrator.job_resources(rules, job)
113
+
114
+ limit_resources = resources.select{|resource,value| available_resources[resource] && ((resources_requested[resource] || 0) + value) > available_resources[resource] }.collect{|resource,v| resource }
115
+ if limit_resources.any?
116
+ Log.debug "Orchestrator waiting on #{job.path} due to #{limit_resources * ", "}"
117
+ else
118
+
119
+ resources_used[job] = resources
120
+ resources.each do |resource,value|
121
+ resources_requested[resource] ||= 0
122
+ resources_requested[resource] += value.to_i
123
+ end
124
+ Log.low "Orchestrator producing #{job.path} with resources #{resources}"
125
+
126
+ return yield
127
+ end
128
+ end
129
+
130
+ def run_with_rules(rules, job)
131
+ job_rules = Orchestrator.job_rules(rules, job)
132
+
133
+ Rbbt::Config.with_config do
134
+ job_rules[:config_keys].each do |config|
135
+ Rbbt::Config.process_config config
136
+ end if job_rules && job_rules[:config_keys]
137
+
138
+ log = job_rules[:log] if job_rules
139
+ log = Log.severity if log.nil?
140
+ Log.with_severity log do
141
+ job.produce(false, true)
142
+ end
143
+ end
144
+ end
145
+
146
+ def erase_job_dependencies(job, rules, all_jobs, top_level_jobs)
147
+ job.dependencies.each do |dep|
148
+ next if top_level_jobs.include? dep.path
149
+ next unless Orchestrator.job_rules(rules, dep)["erase"].to_s == 'true'
150
+
151
+ dep_path = dep.path
152
+ parents = all_jobs.select do |parent|
153
+ paths = parent.info[:dependencies].nil? ? parent.dependencies.collect{|d| d.path } : parent.info[:dependencies].collect{|d| d.last }
154
+ paths.include? dep_path
155
+ end
156
+
157
+ next unless parents.reject{|parent| parent.done? }.empty?
158
+
159
+ parents.each do |parent|
160
+ Log.high "Erasing #{dep.path} from #{parent.path}"
161
+ parent.archive_deps
162
+ parent.copy_files_dir
163
+ parent.dependencies = parent.dependencies - [dep]
164
+ end
165
+ dep.clean
166
+ end
167
+ end
168
+
169
+ def process(rules, jobs)
170
+ begin
171
+
172
+ workload = jobs.inject({}) do |acc,job|
173
+ Orchestrator.job_workload(job).each do |j,d|
174
+ acc[j] = d unless acc.keys.collect{|k| k.path }.include? j.path
175
+ end
176
+ acc
177
+ end
178
+ all_jobs = workload.keys
179
+
180
+ top_level_jobs = jobs.collect{|job| job.path }
181
+ while workload.any?
182
+
183
+ candidates = resources_used.keys + Orchestrator.candidates(workload, rules)
184
+ raise "No candidates and no running jobs" if candidates.empty?
185
+
186
+ candidates.each do |job|
187
+ case
188
+ when (job.error? || job.aborted?)
189
+ begin
190
+ if job.recoverable_error?
191
+ job.clean
192
+ raise TryAgain
193
+ else
194
+ next
195
+ end
196
+ ensure
197
+ Log.warn "Releases resources from failed job: #{job.path}"
198
+ release_resources(job)
199
+ end
200
+ when job.done?
201
+ Log.debug "Orchestrator done #{job.path}"
202
+ release_resources(job)
203
+ erase_job_dependencies(job, rules, all_jobs, top_level_jobs)
204
+
205
+ when job.running?
206
+ next
207
+
208
+ else
209
+ check_resources(rules, job) do
210
+ run_with_rules(rules, job)
211
+ end
212
+ end
213
+ end
214
+
215
+ new_workload = {}
216
+ workload.each do |k,v|
217
+ next if k.done?
218
+ new_workload[k] = v.reject{|d| d.done? || (d.error? && ! d.recoverable_error?)}
219
+ end
220
+ workload = new_workload
221
+ sleep timer
222
+ end
223
+ rescue TryAgain
224
+ retry
225
+ end
226
+ end
227
+ end
228
+ end