rbbt-util 5.32.27 → 5.32.28

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ffa357551766aa6ae6da35c586949c0ff05814171304ca5a874689418c7c48e2
4
- data.tar.gz: 5e1e419fcf846fc8ead12a77e6713f198a74d516c0262c34b4d86380a58af092
3
+ metadata.gz: a680ce3f6d91a1b0ff8060ee18b8e68efbd43df56f238869b30f6f88617d23d9
4
+ data.tar.gz: d6c8c724f15d4ce4c910ff9db6becbd0587c8dc04598abcc934884a8368dd93e
5
5
  SHA512:
6
- metadata.gz: a6afd86e7698e5b7e4588412ed678497c825242616bc75d2fa2ab835bc325a8a58826da164dea16cd5b4bbf81697ac5201bb347af220bab5ce238f8e0ce4edcb
7
- data.tar.gz: 3e86fe7c2d84cc7a04cdd348f03a6323b5467723182057ec028b5d611b3dd200fce41949f1502ca59b0a295b5664f1d6ba259d27a196039f873261e9faf7a1a6
6
+ metadata.gz: 9b90c640f51de96d469af4b442d9ba66aa89a592c7776196058da3b20e13b07ef50f67e07018435b12bed13492c3bf9d9abf4de565902094a3d58be4ab628fd5
7
+ data.tar.gz: 523d7234c59bc1781d47827daebb07870913b8dec9851110d5c2e0c92441903601965f9a7bb4b494b0271d64e09ad0f3b80b254176ee3495744c2bbf1f34f714
@@ -94,6 +94,10 @@ module AnnotatedArray
94
94
  end
95
95
  end
96
96
 
97
+ def +(other)
98
+ self.annotate super(other)
99
+ end
100
+
97
101
 
98
102
  def reject
99
103
  res = []
@@ -220,6 +220,35 @@ module Annotated
220
220
  def marshal_dump
221
221
  Annotated.purge(self).to_sym.to_s
222
222
  end
223
+
224
+ def self.to_hash(e)
225
+ hash = {}
226
+ if Array === e && AnntatedArray === e
227
+ hash[:literal] = Annotated.purge(e)
228
+ hash[:info] = e.info
229
+ elsif Array === e
230
+ hash = e.collect do |_e|
231
+ _hash = {}
232
+ _hash[:literal] = _e
233
+ _hash[:info] = _e.info
234
+ _hash
235
+ end
236
+ else
237
+ hash[:literal] = e
238
+ hash[:info] = e.info
239
+ end
240
+ hash
241
+ end
242
+
243
+ def self.load_hash(hash)
244
+ literal = hash[:literal]
245
+ info = hash[:info]
246
+ info[:annotation_types].each do |type|
247
+ type = Kernel.const_get(type) if String === type
248
+ type.setup(literal, info)
249
+ end
250
+ literal
251
+ end
223
252
  end
224
253
 
225
254
  class String
data/lib/rbbt/entity.rb CHANGED
@@ -164,7 +164,9 @@ module Entity
164
164
  define_method single_name, &block
165
165
  define_method name do |*args|
166
166
  if Array === self
167
- self.collect{|e| e.send(single_name, *args)}
167
+ res = self.collect{|e| e.send(single_name, *args)}
168
+ res.first.annotate(res) if Annotated === res.first && type == :single2array
169
+ res
168
170
  else
169
171
  self.send(single_name, *args)
170
172
  end
@@ -0,0 +1,140 @@
1
+ require 'rbbt/hpc/orchestrate/rules'
2
+ require 'rbbt/hpc/orchestrate/chains'
3
+
4
+ module HPC
5
+ module Orchestration
6
+
7
+ def self.pb(batch)
8
+ if Array === batch
9
+ iii :BATCHES
10
+ batch.each{|b| pb b}
11
+ iii :END_BATCHES
12
+ else
13
+ n = batch.dup
14
+ n[:deps] = n[:deps].collect{|b| b[:top_level] }
15
+ iif n
16
+ end
17
+ end
18
+
19
+ def self.job_workload(job)
20
+ workload = []
21
+ heap = []
22
+ heap << job
23
+ while job = heap.pop
24
+ next if job.done?
25
+ workload << job
26
+ heap.concat job_dependencies(job)
27
+ heap.uniq!
28
+ end
29
+ workload.uniq
30
+ end
31
+
32
+
33
+ def self.chain_batches(rules, chains, workload)
34
+ chain_rules = parse_chains(rules)
35
+
36
+ batches = []
37
+ while job = workload.pop
38
+ matches = chains.select{|name,info| info[:jobs].include? job }
39
+ if matches.any?
40
+ name, info = matches.sort_by do |name,info|
41
+ num_jobs = info[:jobs].length
42
+ total_tasks = chain_rules[name][:tasks].values.flatten.uniq.length
43
+ num_jobs.to_f + 1/total_tasks
44
+ end.last
45
+ workload = workload - info[:jobs]
46
+ info[:chain] = name
47
+ batch = info
48
+ else
49
+ batch = {:jobs => [job], :top_level => job}
50
+ end
51
+
52
+ chains.delete_if{|name,info| batch[:jobs].include? info[:top_level] }
53
+
54
+ chains.each do |name,info|
55
+ info[:jobs] = info[:jobs] - batch[:jobs]
56
+ end
57
+
58
+ chains.delete_if{|name,info| info[:jobs].length < 2 }
59
+
60
+ batches << batch
61
+ end
62
+
63
+ batches
64
+ end
65
+
66
+ def self.add_batch_deps(batches)
67
+
68
+ batches.each do |batch|
69
+ jobs = batch[:jobs]
70
+ all_deps = jobs.collect{|d| job_dependencies(d) }.flatten.uniq
71
+ deps = all_deps.collect do |d|
72
+ (batches - [batch]).select{|batch| batch[:jobs].include? d }
73
+ end.flatten.uniq
74
+ batch[:deps] = deps
75
+ end
76
+
77
+ batches
78
+ end
79
+
80
+ def self.add_rules_and_consolidate(rules, batches)
81
+ chain_rules = parse_chains(rules)
82
+
83
+ batches.each do |batch|
84
+ job_rules = batch[:jobs].inject(nil) do |acc,job|
85
+ task_rules = task_specific_rules(rules, job.workflow, job.task_name)
86
+ acc = accumulate_rules(acc, task_rules.dup)
87
+ end
88
+
89
+ if chain = batch[:chain]
90
+ batch[:rules] = merge_rules(chain_rules[chain][:rules].dup, job_rules)
91
+ else
92
+ batch[:rules] = job_rules
93
+ end
94
+ end
95
+
96
+ begin
97
+ batches.each do |batch|
98
+ batch[:deps] = batch[:deps].collect do |dep|
99
+ dep[:target] || dep
100
+ end if batch[:deps]
101
+ end
102
+
103
+ batches.each do |batch|
104
+ next unless batch[:rules][:skip]
105
+ batch[:rules].delete :skip
106
+ next if batch[:deps].nil?
107
+
108
+ if batch[:deps].any?
109
+ target = batch[:deps].select do |target|
110
+ (batch[:deps] - [target] - target[:deps]).empty?
111
+ end.first
112
+ next if target.nil?
113
+ target[:jobs] = batch[:jobs] + target[:jobs]
114
+ target[:deps] = (target[:deps] + batch[:deps]).uniq - [target]
115
+ target[:top_level] = batch[:top_level]
116
+ target[:rules] = accumulate_rules(target[:rules], batch[:rules])
117
+ batch[:target] = target
118
+ end
119
+ raise TryAgain
120
+ end
121
+ rescue TryAgain
122
+ retry
123
+ end
124
+
125
+ batches.delete_if{|b| b[:target] }
126
+
127
+ batches
128
+ end
129
+
130
+ def self.job_batches(rules, job)
131
+ job_chains = self.job_chains(rules, job)
132
+
133
+ workload = job_workload(job)
134
+
135
+ batches = chain_batches(rules, job_chains, workload)
136
+ batches = add_batch_deps(batches)
137
+ batches = add_rules_and_consolidate(rules, batches)
138
+ end
139
+ end
140
+ end
@@ -0,0 +1,173 @@
1
+ module HPC
2
+ module Orchestration
3
+ def self.check_chains(chains, job)
4
+ matches = []
5
+ chains.each do |name, chain|
6
+ next unless chain[:tasks].include?(job.workflow.to_s)
7
+ next unless chain[:tasks][job.workflow.to_s].include?(job.task_name.to_s)
8
+ matches << name
9
+ end
10
+ matches
11
+ end
12
+
13
+ def self.parse_chains(rules)
14
+ return {} if rules["chains"].nil?
15
+
16
+ chains = IndiferentHash.setup({})
17
+ rules["chains"].each do |name,rules|
18
+ rules = IndiferentHash.setup(rules.dup)
19
+ chain_tasks = rules.delete(:tasks).split(/,\s*/)
20
+ workflow = rules.delete(:workflow)
21
+
22
+ chain_tasks.each do |task|
23
+ chain_workflow, chain_task = task.split("#")
24
+ chain_task, chain_workflow = chain_workflow, workflow if chain_task.nil? or chain_tasks.empty?
25
+
26
+ chains[name] ||= IndiferentHash.setup({:tasks => {}, :rules => rules })
27
+ chains[name][:tasks][chain_workflow] ||= []
28
+ chains[name][:tasks][chain_workflow] << chain_task
29
+ end
30
+ end
31
+
32
+ chains
33
+ end
34
+
35
+ def self.job_dependencies(job)
36
+ (job.dependencies + job.input_dependencies).uniq.select{|d| ! d.done? || d.dirty? }
37
+ end
38
+
39
+ #def self.job_workload(job)
40
+ # workload = []
41
+ # heap = []
42
+ # heap << job
43
+ # while job = heap.pop
44
+ # next if job.done?
45
+ # workload << job
46
+ # heap.concat job_dependencies(job)
47
+ # heap.uniq!
48
+ # end
49
+ # workload.uniq
50
+ #end
51
+
52
+ #def self.top_level_job(jobs)
53
+ # top = jobs.select do |job|
54
+ # (jobs - job_workload(job)).empty? &&
55
+ # (job_workload(job) - jobs).select{|j| (job_workload(j) & jobs).any? }.empty?
56
+ # end
57
+ # return nil if top.length != 1
58
+ # top.first
59
+ #end
60
+
61
+ #def self.job_chains(rules, job)
62
+ # workload = job_workload(job)
63
+ # chains = parse_chains(rules)
64
+
65
+ # chain_jobs = {}
66
+ # workload.each do |job|
67
+ # check_chains(chains, job).each do |match|
68
+ # chain_jobs[match] ||= []
69
+ # chain_jobs[match] << job
70
+ # end
71
+ # end
72
+
73
+ # job_chains = []
74
+
75
+ # seen = []
76
+ # chain_jobs.sort_by{|name,jobs| jobs.length }.reverse.each do |name,jobs|
77
+ # remain = jobs - seen
78
+ # next unless remain.length > 1
79
+ # top_level_job = top_level_job(jobs)
80
+ # next if top_level_job.nil?
81
+ # job_chains << {:jobs => remain, :rules => chains[name][:rules], :top_level_job => top_level_job}
82
+ # seen.concat remain
83
+ # end
84
+
85
+ # job_chains
86
+ #end
87
+
88
+ #def self._job_chains(rules, job)
89
+ # workload = job_workload(job)
90
+ # chains = parse_chains(rules)
91
+
92
+ # matches = check_chains(chains, job)
93
+
94
+ # job_chains = {}
95
+ # job.dependencies.each do |dep|
96
+ # dep_chains = _job_chains(rules, dep)
97
+ # matches.each do |match|
98
+ # if dep_chains[match] && dep_chains[match].include?(dep)
99
+ # dep_chains[match].prepend job
100
+ # end
101
+ # end
102
+ # job_chains.merge!(dep_chains)
103
+ # end
104
+
105
+ # matches.each do |match|
106
+ # job_chains[match] ||= [job]
107
+ # end
108
+
109
+ # job_chains
110
+ #end
111
+
112
+ #def self.job_chains(rules, job)
113
+ # job_chains = self._job_chains(rules, job)
114
+ # iif job_chains
115
+ # chains = parse_chains(rules)
116
+
117
+ # seen = []
118
+ # job_chains.collect do |name,jobs|
119
+ # remain = jobs - seen
120
+ # next unless remain.length > 1
121
+ # top_level_job = top_level_job(jobs)
122
+ # next if top_level_job.nil?
123
+ # seen.concat remain
124
+ # {:jobs => remain, :rules => chains[name][:rules], :top_level_job => top_level_job}
125
+ # end.compact
126
+ #end
127
+
128
+ def self.job_chains(rules, job)
129
+ chains = self.parse_chains(rules)
130
+
131
+ matches = check_chains(chains, job)
132
+
133
+ dependencies = job_dependencies(job)
134
+
135
+ job_chains = []
136
+ new_job_chains = {}
137
+ dependencies.each do |dep|
138
+ dep_matches = check_chains(chains, dep)
139
+ common = matches & dep_matches
140
+
141
+ dep_chains = job_chains(rules, dep)
142
+ found = []
143
+ dep_chains.each do |match,info|
144
+ if common.include?(match)
145
+ found << match
146
+ new_info = new_job_chains[match] ||= {}
147
+ new_info[:jobs] ||= []
148
+ new_info[:jobs].concat info[:jobs]
149
+ new_info[:top_level] = job
150
+ else
151
+ job_chains << [match, info]
152
+ end
153
+ end
154
+
155
+ (common - found).each do |match|
156
+ info = {}
157
+ info[:jobs] = [job, dep]
158
+ info[:top_level] = job
159
+ job_chains << [match, info]
160
+ end
161
+ end
162
+
163
+ new_job_chains.each do |match,info|
164
+ info[:jobs].prepend job
165
+ job_chains << [match, info]
166
+ end
167
+
168
+ job_chains
169
+ end
170
+
171
+ end
172
+ end
173
+
@@ -0,0 +1,70 @@
1
+ module HPC
2
+ module Orchestration
3
+ def self.add_config_keys(current, new)
4
+ if current.nil?
5
+ new
6
+ else
7
+ new + ',' + current
8
+ end.gsub(/,\s*/,',').split(",").reverse.uniq.reverse * ","
9
+ end
10
+
11
+ def self.workflow_rules(rules, workflow)
12
+ return {} if rules[workflow].nil?
13
+ return {} if rules[workflow]["defaults"].nil?
14
+ IndiferentHash.setup(rules[workflow]["defaults"])
15
+ end
16
+
17
+ def self.merge_rules(current, new)
18
+ return IndiferentHash.setup({}) if (new.nil? || new.empty?) && (current.nil? || current.empty?)
19
+ return IndiferentHash.setup(current.dup) if new.nil? || new.empty?
20
+ return IndiferentHash.setup(new.dup) if current.nil? || current.empty?
21
+ target = IndiferentHash.setup(current.dup)
22
+ new.each do |k,value|
23
+ case k.to_s
24
+ when "config_keys"
25
+ target[k] = add_config_keys target["config_keys"], value
26
+ else
27
+ next if target.include?(k)
28
+ target[k] = value
29
+ end
30
+ end
31
+ target
32
+ end
33
+
34
+ def self.accumulate_rules(current, new)
35
+ return IndiferentHash.setup({}) if (new.nil? || new.empty?) && (current.nil? || current.empty?)
36
+ return IndiferentHash.setup(current.dup) if new.nil? || new.empty?
37
+ return IndiferentHash.setup(new.dup) if current.nil? || current.empty?
38
+ target = IndiferentHash.setup(current.dup)
39
+ new.each do |k,value|
40
+ case k.to_s
41
+ when "config_keys"
42
+ target[k] = add_config_keys target["config_keys"], value
43
+ when "cpus"
44
+ target[k] = [target[k], value].compact.sort_by{|v| v.to_i}.last
45
+ when "time"
46
+ target[k] = Misc.format_seconds [target[k], value].compact.inject(0){|acc,t| acc += Misc.timespan t }
47
+ when "skip"
48
+ skip = target[k] && value
49
+ target.delete k unless skip
50
+ else
51
+ next if target.include?(k)
52
+ target[k] = value
53
+ end
54
+ end
55
+ target
56
+ end
57
+
58
+ def self.task_specific_rules(rules, workflow, task)
59
+ defaults = rules[:defaults] || {}
60
+ workflow = workflow.to_s
61
+ task = task.to_s
62
+ return defaults if rules[workflow].nil?
63
+ workflow_rules = merge_rules(workflow_rules(rules, workflow), defaults)
64
+ return IndiferentHash.setup(workflow_rules.dup) if rules[workflow][task].nil?
65
+ merge_rules(rules[workflow][task], workflow_rules)
66
+ end
67
+
68
+
69
+ end
70
+ end
@@ -0,0 +1,220 @@
1
+ require 'rbbt/workflow/util/orchestrator'
2
+ module HPC
3
+ module Orchestration
4
+ def job_rules(rules, job)
5
+ workflow = job.workflow.to_s
6
+ task_name = job.task_name.to_s
7
+ task_name = job.overriden.to_s if Symbol === job.overriden
8
+
9
+ defaults = rules["defaults"] || {}
10
+ defaults = defaults.merge(rules[workflow]["defaults"] || {}) if rules[workflow]
11
+
12
+ job_rules = IndiferentHash.setup(defaults.dup)
13
+
14
+ rules["chains"].each do |name,info|
15
+ IndiferentHash.setup(info)
16
+ chain_tasks = info[:tasks].split(/,\s*/)
17
+
18
+ chain_tasks.each do |task|
19
+ task_workflow, chain_task = task.split("#")
20
+ chain_task, task_workflow = task_workflow, info[:workflow] if chain_task.nil? or chain_tasks.empty?
21
+ job_rules["chain_tasks"] ||= {}
22
+ job_rules["chain_tasks"][task_workflow] ||= []
23
+ job_rules["chain_tasks"][task_workflow] << chain_task
24
+ next unless task_name == chain_task.to_s && workflow == task_workflow.to_s
25
+ config_keys = job_rules.delete :config_keys
26
+ job_rules = IndiferentHash.setup(job_rules.merge(info))
27
+ if config_keys
28
+ config_keys.gsub!(/,\s+/,',')
29
+ job_rules[:config_keys] = job_rules[:config_keys] ? config_keys + "," + job_rules[:config_keys] : config_keys
30
+ end
31
+ end
32
+
33
+ if job_rules["chain_tasks"][workflow] && job_rules["chain_tasks"][workflow].include?(task_name)
34
+ break
35
+ else
36
+ job_rules.delete "chain_tasks"
37
+ end
38
+ end if rules["chains"]
39
+
40
+ config_keys = job_rules.delete :config_keys
41
+ job_rules = IndiferentHash.setup(job_rules.merge(rules[workflow][task_name])) if rules[workflow] && rules[workflow][task_name]
42
+
43
+ if config_keys
44
+ config_keys.gsub!(/,\s+/,',')
45
+ job_rules[:config_keys] = job_rules[:config_keys] ? config_keys + "," + job_rules[:config_keys] : config_keys
46
+ end
47
+
48
+ if rules["skip"] && rules["skip"][workflow]
49
+ job_rules["skip"] = true if rules["skip"][workflow].split(/,\s*/).include? task_name
50
+ end
51
+
52
+ job_rules
53
+ end
54
+
55
+ def get_job_dependencies(job, job_rules = nil)
56
+ deps = job.dependencies || []
57
+ deps += job.input_dependencies || []
58
+ deps
59
+ end
60
+
61
+ def get_recursive_job_dependencies(job)
62
+ deps = get_job_dependencies(job)
63
+ (deps + deps.collect{|dep| get_recursive_job_dependencies(dep) }).flatten
64
+ end
65
+
66
+ def piggyback(job, job_rules, job_deps)
67
+ return false unless job_rules["skip"]
68
+ final_deps = job_deps - job_deps.collect{|dep| get_recursive_job_dependencies(dep)}.flatten.uniq
69
+ final_deps = final_deps.reject{|dep| dep.done? }
70
+ return final_deps.first if final_deps.length == 1
71
+ return false
72
+ end
73
+
74
+ def get_chains(job, rules, chains = {})
75
+ job_rules = self.job_rules(rules, job)
76
+ job_deps = get_job_dependencies(job)
77
+
78
+ input_deps = []
79
+ job.rec_dependencies.each do |dep|
80
+ input_deps.concat dep.input_dependencies
81
+ end
82
+
83
+ job_deps.each do |dep|
84
+ input_deps.concat dep.input_dependencies
85
+ get_chains(dep, rules, chains)
86
+ end
87
+
88
+ job_deps.select do |dep|
89
+ chained = job_rules["chain_tasks"] &&
90
+ job_rules["chain_tasks"][job.workflow.to_s] && job_rules["chain_tasks"][job.workflow.to_s].include?(job.task_name.to_s) &&
91
+ job_rules["chain_tasks"][dep.workflow.to_s] && job_rules["chain_tasks"][dep.workflow.to_s].include?(dep.task_name.to_s)
92
+
93
+ dep_skip = dep.done? && ! input_deps.include?(dep) && self.job_rules(rules, dep)["skip"]
94
+ chained || dep_skip
95
+ end.each do |dep|
96
+ chains[job] ||= []
97
+ chains[job] << dep
98
+ chains[job].concat chains[dep] if chains[dep]
99
+ chains[job].uniq!
100
+ end
101
+
102
+ chains
103
+ end
104
+
105
+ def workload(job, rules, chains, options, seen = nil)
106
+ return [] if job.done?
107
+ if seen.nil?
108
+ seen = {}
109
+ target_job = true
110
+ end
111
+
112
+ job_rules = self.job_rules(rules, job)
113
+ job_deps = get_job_dependencies(job)
114
+
115
+ chain = chains[job]
116
+ chain = chain.reject{|j| seen.include? j.path} if chain
117
+ chain = chain.reject{|dep| dep.done? } if chain
118
+ piggyback = piggyback(job, job_rules, job_deps)
119
+ dep_ids = job_deps.collect do |dep|
120
+ seen[dep.path] ||= nil if chain && chain.include?(dep) #&& ! job.input_dependencies.include?(dep)
121
+ next_options = IndiferentHash.setup(options.dup)
122
+ if piggyback and piggyback == dep
123
+ next_options[:piggyback] ||= []
124
+ next_options[:piggyback].push job
125
+ ids = workload(dep, rules, chains, next_options, seen)
126
+ else
127
+ next_options.delete :piggyback
128
+ ids = workload(dep, rules, chains, next_options, seen)
129
+ end
130
+
131
+ ids = [ids].flatten.compact.collect{|id| ['canfail', id] * ":"} if job.canfail_paths.include? dep.path
132
+
133
+ seen[dep.path] = ids
134
+ ids
135
+ end.compact.flatten.uniq
136
+
137
+ return seen[job.path] || dep_ids if seen.include?(job.path)
138
+
139
+ if piggyback and seen[piggyback.path]
140
+ return seen[job.path] = seen[piggyback.path]
141
+ end
142
+
143
+ job_rules.delete :chain_tasks
144
+ job_rules.delete :tasks
145
+ job_rules.delete :workflow
146
+
147
+
148
+ option_config_keys = options[:config_keys]
149
+
150
+ job_options = IndiferentHash.setup(options.merge(job_rules).merge(:batch_dependencies => dep_ids))
151
+ job_options.delete :orchestration_rules
152
+
153
+ config_keys = job_rules.delete(:config_keys)
154
+ if config_keys
155
+ config_keys.gsub!(/,\s+/,',')
156
+ job_options[:config_keys] = job_options[:config_keys] ? config_keys + "," + job_options[:config_keys] : config_keys
157
+ end
158
+
159
+ if option_config_keys
160
+ option_config_keys = option_config_keys.gsub(/,\s+/,',')
161
+ job_options[:config_keys] = job_options[:config_keys] ? job_options[:config_keys] + "," + option_config_keys : option_config_keys
162
+ end
163
+
164
+ if options[:piggyback]
165
+ manifest = options[:piggyback].uniq
166
+ manifest += [job]
167
+ manifest.concat chain if chain
168
+
169
+ job = options[:piggyback].first
170
+
171
+ job_rules = self.job_rules(rules, job)
172
+ new_config_keys = self.job_rules(rules, job)[:config_keys]
173
+ if new_config_keys
174
+ new_config_keys = new_config_keys.gsub(/,\s+/,',')
175
+ job_options[:config_keys] = job_options[:config_keys] ? job_options[:config_keys] + "," + new_config_keys : new_config_keys
176
+ end
177
+
178
+ job_options.delete :piggyback
179
+ else
180
+ manifest = [job]
181
+ manifest.concat chain if chain
182
+ end
183
+
184
+ manifest.uniq!
185
+
186
+ job_options[:manifest] = manifest.collect{|j| j.task_signature }
187
+
188
+ job_options[:config_keys] = job_options[:config_keys].split(",").uniq * "," if job_options[:config_keys]
189
+
190
+ if options[:dry_run]
191
+ puts Log.color(:magenta, "Manifest: ") + Log.color(:blue, job_options[:manifest] * ", ") + " - tasks: #{job_options[:task_cpus] || 1} - time: #{job_options[:time]} - config: #{job_options[:config_keys]}"
192
+ puts Log.color(:yellow, "Deps: ") + Log.color(:blue, job_options[:batch_dependencies]*", ")
193
+ job_options[:manifest].first
194
+ else
195
+ run_job(job, job_options)
196
+ end
197
+ end
198
+
199
+
200
+ def orchestrate_job(job, options)
201
+ options.delete "recursive_clean"
202
+ options.delete "clean_task"
203
+ options.delete "clean"
204
+ options.delete "tail"
205
+ options.delete "printpath"
206
+ options.delete "detach"
207
+ options.delete "jobname"
208
+
209
+ rules = YAML.load(Open.read(options[:orchestration_rules])) if options[:orchestration_rules]
210
+ rules ||= {}
211
+ IndiferentHash.setup(rules)
212
+
213
+ chains = get_chains(job, rules)
214
+
215
+ workload(job, rules, chains, options)
216
+ end
217
+
218
+ end
219
+ end
220
+