rbbt-util 5.35.4 → 5.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/rbbt/persist.rb +2 -0
- data/lib/rbbt/resource/path.rb +12 -2
- data/lib/rbbt/util/concurrency/processes/socket.rb +1 -0
- data/lib/rbbt/util/concurrency/processes/worker.rb +1 -5
- data/lib/rbbt/util/concurrency/processes.rb +29 -24
- data/lib/rbbt/util/misc/inspect.rb +2 -2
- data/lib/rbbt/util/python.rb +2 -2
- data/lib/rbbt/workflow/definition.rb +2 -0
- data/lib/rbbt/workflow/dependencies.rb +26 -4
- data/lib/rbbt/workflow/doc.rb +1 -0
- data/lib/rbbt/workflow/remote_workflow/driver/ssh.rb +62 -20
- data/lib/rbbt/workflow/remote_workflow/remote_step/ssh.rb +31 -3
- data/lib/rbbt/workflow/remote_workflow/remote_step.rb +1 -0
- data/lib/rbbt/workflow/step/accessor.rb +42 -5
- data/lib/rbbt/workflow/step/dependencies.rb +37 -28
- data/lib/rbbt/workflow/step/{prepare.rb → produce.rb} +1 -1
- data/lib/rbbt/workflow/step/run.rb +39 -28
- data/lib/rbbt/workflow/step/status.rb +1 -1
- data/lib/rbbt/workflow/step.rb +55 -83
- data/lib/rbbt/workflow/task.rb +4 -2
- data/lib/rbbt/workflow/util/archive.rb +29 -23
- data/lib/rbbt/workflow.rb +13 -3
- data/python/rbbt.py +87 -1
- data/share/rbbt_commands/tsv/write_excel +3 -2
- data/share/rbbt_commands/workflow/task +53 -5
- data/test/rbbt/test_workflow.rb +88 -0
- data/test/rbbt/workflow/step/test_dependencies.rb +1 -0
- metadata +88 -144
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 281f115df5158fada081c65b87e292e69e4f378b8353d6e2542b3c45db0c03f1
|
4
|
+
data.tar.gz: e9801707391c731a2a4b4ead8156cff82fdf30c4e769a8bed34d1d77185b447a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3da2876b28c245b27f32b6dbd00188bdeea498fb118ccab048fb5d72925548e5e89926dea6b6316573e9d95a570c07983109e0e09de92360ec89ab2aec088439
|
7
|
+
data.tar.gz: 0166d47c3198bfc4c636a1f47ac9be1d2a64b8d97a2836166b02d1ac17d46c2b835ad210953da586f15a98f8593e18f6010ebc07220e95da149dcabec44b33a9
|
data/lib/rbbt/persist.rb
CHANGED
data/lib/rbbt/resource/path.rb
CHANGED
@@ -319,7 +319,8 @@ module Path
|
|
319
319
|
|
320
320
|
|
321
321
|
def open(options = {}, &block)
|
322
|
-
Open.
|
322
|
+
file = Open.remote?(self) || Open.ssh?(self) ? self : self.produce.find
|
323
|
+
Open.open(file, options, &block)
|
323
324
|
end
|
324
325
|
|
325
326
|
def to_s
|
@@ -331,7 +332,16 @@ module Path
|
|
331
332
|
end
|
332
333
|
|
333
334
|
def tsv(*args)
|
334
|
-
|
335
|
+
begin
|
336
|
+
path = self.produce
|
337
|
+
rescue Resource::ResourceNotFound => e
|
338
|
+
begin
|
339
|
+
path = self.set_extension('tsv').produce
|
340
|
+
rescue Resource::ResourceNotFound
|
341
|
+
raise e
|
342
|
+
end
|
343
|
+
end
|
344
|
+
TSV.open(path, *args)
|
335
345
|
end
|
336
346
|
|
337
347
|
def tsv_options(options = {})
|
@@ -65,11 +65,7 @@ class RbbtProcessQueue
|
|
65
65
|
Log.high "Worker #{Process.pid} leaving"
|
66
66
|
rescue Exception
|
67
67
|
Log.high "Worker #{Process.pid} had exception: #{$!.message}"
|
68
|
-
|
69
|
-
begin
|
70
|
-
@callback_queue.push($!) if @callback_queue
|
71
|
-
rescue
|
72
|
-
end
|
68
|
+
@callback_queue.push($!) if @callback_queue
|
73
69
|
Kernel.exit! -1
|
74
70
|
ensure
|
75
71
|
@callback_queue.close_write if @callback_queue
|
@@ -180,13 +180,15 @@ class RbbtProcessQueue
|
|
180
180
|
rescue Exception
|
181
181
|
Log.low "Process monitor exception [#{Process.pid}]: #{$!.message}"
|
182
182
|
processes.each{|p| p.abort_and_join}
|
183
|
-
Log.low "Processes aborted #{Process.pid}"
|
183
|
+
Log.low "Processes aborted for monitor #{Process.pid}"
|
184
184
|
processes.clear
|
185
185
|
|
186
|
+
@manager_thread.report_on_exception = false
|
186
187
|
@manager_thread.raise $! if @manager_thread.alive?
|
187
188
|
raise Aborted, "Aborted monitor thread with exception"
|
188
189
|
end
|
189
190
|
end
|
191
|
+
@monitor_thread.report_on_exception = false
|
190
192
|
|
191
193
|
RbbtSemaphore.post_semaphore(@sem)
|
192
194
|
|
@@ -215,33 +217,36 @@ class RbbtProcessQueue
|
|
215
217
|
init_master
|
216
218
|
|
217
219
|
RbbtSemaphore.synchronize(@sem) do
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
220
|
+
@callback_thread = Thread.new do
|
221
|
+
begin
|
222
|
+
loop do
|
223
|
+
p = @callback_queue.pop unless @callback_queue.cleaned
|
222
224
|
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
225
|
+
if Exception === p or (Array === p and Exception === p.first)
|
226
|
+
e = Array === p ? p.first : p
|
227
|
+
Log.low "Callback recieved exception from worker: #{e.message}" unless Aborted === e or ClosedStream === e
|
228
|
+
raise e
|
229
|
+
end
|
228
230
|
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
231
|
+
if @callback.arity == 0
|
232
|
+
@callback.call
|
233
|
+
else
|
234
|
+
@callback.call p
|
235
|
+
end
|
233
236
|
end
|
237
|
+
rescue ClosedStream
|
238
|
+
Log.low "Callback thread closing"
|
239
|
+
rescue Aborted
|
240
|
+
Log.low "Callback thread aborted"
|
241
|
+
raise $!
|
242
|
+
rescue Exception
|
243
|
+
Log.low "Exception captured in callback: #{$!.message}"
|
244
|
+
raise $!
|
234
245
|
end
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
raise $!
|
240
|
-
rescue Exception
|
241
|
-
Log.low "Exception captured in callback: #{$!.message}"
|
242
|
-
raise $!
|
243
|
-
end
|
244
|
-
end if @callback_queue
|
246
|
+
end if @callback_queue
|
247
|
+
|
248
|
+
@callback_thread.report_on_exception = false
|
249
|
+
|
245
250
|
end
|
246
251
|
|
247
252
|
end
|
@@ -343,9 +343,9 @@ module Misc
|
|
343
343
|
end
|
344
344
|
when Array
|
345
345
|
if obj.length > HASH2MD5_MAX_ARRAY_LENGTH
|
346
|
-
"[" << sample_large_obj(obj, HASH2MD5_MAX_ARRAY_LENGTH).collect{|v| obj2str(v)} * "," << "]"
|
346
|
+
"[" << sample_large_obj(obj, HASH2MD5_MAX_ARRAY_LENGTH).collect{|v| obj2str(v.nil? ? "" : v) } * "," << "]"
|
347
347
|
else
|
348
|
-
"[" << obj.collect{|v| obj2str(v) } * "," << "]"
|
348
|
+
"[" << obj.collect{|v| obj2str(v.nil? ? "" : v) } * "," << "]"
|
349
349
|
end
|
350
350
|
when TSV::Parser
|
351
351
|
remove_long_items(obj)
|
data/lib/rbbt/util/python.rb
CHANGED
@@ -59,12 +59,12 @@ module RbbtPython
|
|
59
59
|
end
|
60
60
|
|
61
61
|
def self.get_class(module_name, class_name)
|
62
|
-
save_module_name = module_name.gsub(".", "_")
|
62
|
+
save_module_name = module_name.to_s.gsub(".", "_")
|
63
63
|
RbbtPython.pyimport(module_name, as: save_module_name)
|
64
64
|
RbbtPython.send(save_module_name).send(class_name)
|
65
65
|
end
|
66
66
|
|
67
|
-
def self.class_new_obj(module_name, class_name, args)
|
67
|
+
def self.class_new_obj(module_name, class_name, args={})
|
68
68
|
RbbtPython.get_class(module_name, class_name).new(**args)
|
69
69
|
end
|
70
70
|
|
@@ -72,6 +72,7 @@ module Workflow
|
|
72
72
|
def setup_override_dependency(dep, workflow, task_name)
|
73
73
|
return [] if dep == :skip || dep == 'skip'
|
74
74
|
|
75
|
+
unlocated = unlocated_override?(dep)
|
75
76
|
dep = Workflow.load_step(dep) if not Step === dep
|
76
77
|
|
77
78
|
dep.original_workflow ||= dep.workflow if dep.workflow
|
@@ -91,7 +92,7 @@ module Workflow
|
|
91
92
|
end
|
92
93
|
|
93
94
|
dep.task_name = task_name
|
94
|
-
dep.overriden = dep.original_task_name.to_sym if dep.original_task_name
|
95
|
+
dep.overriden = dep.original_task_name.to_sym if dep.original_task_name && dep.original_task_name.to_s != task_name.to_s || ! unlocated
|
95
96
|
|
96
97
|
dep.extend step_module
|
97
98
|
|
@@ -169,6 +170,16 @@ module Workflow
|
|
169
170
|
else
|
170
171
|
|
171
172
|
compute = options[:compute] if options
|
173
|
+
if options && options[:canfail]
|
174
|
+
compute = case compute
|
175
|
+
when nil
|
176
|
+
:canfail
|
177
|
+
when Array
|
178
|
+
compute + [:canfail]
|
179
|
+
else
|
180
|
+
[compute, :canfail]
|
181
|
+
end
|
182
|
+
end
|
172
183
|
|
173
184
|
all_d = (real_dependencies + real_dependencies.flatten.collect{|d| d.rec_dependencies} ).flatten.compact.uniq
|
174
185
|
|
@@ -207,6 +218,16 @@ module Workflow
|
|
207
218
|
|
208
219
|
options = {} if options.nil?
|
209
220
|
compute = options[:compute]
|
221
|
+
if options[:canfail]
|
222
|
+
compute = case compute
|
223
|
+
when nil
|
224
|
+
:canfail
|
225
|
+
when Array
|
226
|
+
compute + [:canfail]
|
227
|
+
else
|
228
|
+
[compute, :canfail]
|
229
|
+
end
|
230
|
+
end
|
210
231
|
|
211
232
|
options = IndiferentHash.setup(options.dup)
|
212
233
|
dep = dependency.call jobname, _inputs.merge(options), real_dependencies
|
@@ -228,7 +249,8 @@ module Workflow
|
|
228
249
|
task_info = d[:workflow].task_info(d[:task])
|
229
250
|
|
230
251
|
_inputs = assign_dep_inputs({}, options.merge(d[:inputs] || {}), real_dependencies, task_info)
|
231
|
-
|
252
|
+
_jobname = d.include?(:jobname) ? d[:jobname] : jobname
|
253
|
+
job = d[:workflow]._job(d[:task], _jobname, _inputs)
|
232
254
|
overriden = true if TrueClass === job.overriden && (d.nil? || ! d[:not_overriden])
|
233
255
|
job
|
234
256
|
end
|
@@ -250,8 +272,8 @@ module Workflow
|
|
250
272
|
else
|
251
273
|
task_info = (dep[:task] && dep[:workflow]) ? dep[:workflow].task_info(dep[:task]) : nil
|
252
274
|
_inputs = assign_dep_inputs({}, dep[:inputs], real_dependencies, task_info)
|
253
|
-
|
254
|
-
job =
|
275
|
+
_jobname = dep.include?(:jobname) ? dep[:jobname] : jobname
|
276
|
+
job = dep[:workflow]._job(dep[:task], _jobname, _inputs)
|
255
277
|
overriden = true if TrueClass === job.overriden && (d.nil? || ! d[:not_overriden])
|
256
278
|
job
|
257
279
|
end
|
data/lib/rbbt/workflow/doc.rb
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
class RemoteWorkflow
|
2
|
+
RBBT_DEBUG_REMOTE_JSON = ENV["RBBT_DEBUG_REMOTE_JSON"] == 'true'
|
3
|
+
|
2
4
|
module SSH
|
3
5
|
#def self.run(server, script)
|
4
6
|
# Log.debug "Run ssh script in #{server}:\n#{script}"
|
@@ -21,9 +23,16 @@ class RemoteWorkflow
|
|
21
23
|
|
22
24
|
workflow, task, job, *rest = path.split("/")
|
23
25
|
|
26
|
+
workflow_name = begin
|
27
|
+
wf = Kernel.const_get(workflow) if String === workflow && ! workflow.empty?
|
28
|
+
wf.respond_to?(:complete_name) ? (wf.complete_name || workflow) : workflow
|
29
|
+
rescue
|
30
|
+
workflow
|
31
|
+
end
|
32
|
+
|
24
33
|
script =<<-EOF
|
25
34
|
require 'rbbt/workflow'
|
26
|
-
wf = Workflow.require_workflow "#{
|
35
|
+
wf = Workflow.require_workflow "#{workflow_name}"
|
27
36
|
EOF
|
28
37
|
|
29
38
|
case task
|
@@ -86,7 +95,7 @@ STDOUT.write res.to_json
|
|
86
95
|
EOF
|
87
96
|
|
88
97
|
json = Misc.ssh_run(server, script)
|
89
|
-
Log.debug "JSON (#{ url }): #{json}"
|
98
|
+
Log.debug "JSON (#{ url }): #{json}" if RBBT_DEBUG_REMOTE_JSON
|
90
99
|
JSON.parse(json)
|
91
100
|
end
|
92
101
|
|
@@ -172,11 +181,18 @@ job.clean
|
|
172
181
|
def self.upload_inputs(server, inputs, input_types, input_id)
|
173
182
|
TmpFile.with_file do |dir|
|
174
183
|
if Step.save_inputs(inputs, input_types, dir)
|
175
|
-
Dir.glob(File.join(dir, "*.as_step")).each do |file|
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
184
|
+
# Dir.glob(File.join(dir, "*.as_step")).each do |file|
|
185
|
+
# Log.medium "Migrating Step input #{file} #{ server }"
|
186
|
+
# path = Open.read(file).strip
|
187
|
+
# new = Step.migrate(path, :user, :target => server)
|
188
|
+
# Open.write(file, new)
|
189
|
+
# end
|
190
|
+
|
191
|
+
files = Dir.glob(File.join(dir, "*.as_step"))
|
192
|
+
paths = files.collect{|f| Open.read(f).strip }
|
193
|
+
new = Step.migrate(paths, :user, :target => server)
|
194
|
+
files.zip(new).each{|file,new| Open.write(file, new) }
|
195
|
+
|
180
196
|
CMD.cmd_log("ssh '#{server}' mkdir -p .rbbt/tmp/tmp-ssh_job_inputs/; scp -r '#{dir}' #{server}:.rbbt/tmp/tmp-ssh_job_inputs/#{input_id}")
|
181
197
|
end
|
182
198
|
end
|
@@ -204,31 +220,57 @@ job.clean
|
|
204
220
|
# rjob.run
|
205
221
|
#end
|
206
222
|
|
223
|
+
def self.upload_dependencies(job, server, search_path = 'user', produce_dependencies = false)
|
224
|
+
server, path = parse_url(server) if server =~ /^ssh:\/\//
|
225
|
+
job.dependencies.each do |dep|
|
226
|
+
Log.medium "Producing #{dep.workflow}:#{dep.short_path} dependency for #{job.workflow}:#{job.short_path}"
|
227
|
+
dep.produce
|
228
|
+
end if produce_dependencies
|
229
|
+
|
230
|
+
job.input_dependencies.each do |dep|
|
231
|
+
Log.medium "Producing #{dep.workflow}:#{dep.short_path} dependency for #{job.workflow}:#{job.short_path}"
|
232
|
+
dep.produce
|
233
|
+
end
|
234
|
+
|
235
|
+
migrate_dependencies = job.rec_dependencies.select{|d| d.done? }.collect{|d| d.path }
|
236
|
+
migrate_dependencies += job.input_dependencies.select{|d| d.done? }.collect{|d| d.path }
|
237
|
+
Log.medium "Migrating #{migrate_dependencies.length} dependencies from #{job.path} to #{ server }"
|
238
|
+
Step.migrate(migrate_dependencies, search_path, :target => server) if migrate_dependencies.any?
|
239
|
+
end
|
240
|
+
|
241
|
+
def self.missing_dep_inputs(job)
|
242
|
+
inputs = job.inputs.to_hash.slice(*job.real_inputs.map{|i| i.to_s})
|
243
|
+
job.dependencies.each do |dep|
|
244
|
+
next if dep.done?
|
245
|
+
iif [dep, dep.inputs, dep.real_inputs]
|
246
|
+
inputs = dep.inputs.to_hash.slice(*dep.real_inputs.map{|i| i.to_s}).merge(inputs)
|
247
|
+
inputs = missing_dep_inputs(dep).merge(inputs)
|
248
|
+
end
|
249
|
+
inputs
|
250
|
+
end
|
251
|
+
|
207
252
|
def self.relay_job(job, server, options = {})
|
208
|
-
migrate, produce, produce_dependencies, search_path = Misc.process_options options.dup,
|
209
|
-
:migrate, :produce, :produce_dependencies, :search_path
|
253
|
+
migrate, produce, produce_dependencies, search_path, run_type, slurm_options = Misc.process_options options.dup,
|
254
|
+
:migrate, :produce, :produce_dependencies, :search_path, :run_type, :slurm_options
|
210
255
|
|
211
256
|
search_path ||= 'user'
|
212
257
|
|
213
258
|
produce = true if migrate
|
214
259
|
|
215
260
|
workflow_name = job.workflow.to_s
|
216
|
-
|
217
|
-
|
218
|
-
job.
|
219
|
-
dep.produce
|
220
|
-
end if options[:produce_dependencies]
|
221
|
-
|
222
|
-
job.rec_dependencies.each do |dep|
|
223
|
-
Step.migrate(dep.path, search_path, :target => server) if dep.done?
|
224
|
-
end
|
261
|
+
remote_workflow = RemoteWorkflow.new("ssh://#{server}:#{workflow_name}", "#{workflow_name}")
|
262
|
+
inputs = job.recursive_inputs.to_hash.slice(*job.real_inputs.map{|i| i.to_s})
|
263
|
+
Log.medium "Relaying dependency #{job.workflow}:#{job.short_path} to #{server} (#{inputs.keys * ", "})"
|
225
264
|
|
226
|
-
|
265
|
+
upload_dependencies(job, server, search_path, options[:produce_dependencies])
|
227
266
|
rjob = remote_workflow.job(job.task_name.to_s, job.clean_name, inputs)
|
228
267
|
|
229
268
|
override_dependencies = job.rec_dependencies.select{|dep| dep.done? }.collect{|dep| [dep.workflow.to_s, dep.task_name.to_s] * "#" << "=" << Rbbt.identify(dep.path)}
|
230
269
|
rjob.override_dependencies = override_dependencies
|
231
270
|
|
271
|
+
rjob.run_type = run_type
|
272
|
+
rjob.slurm_options = slurm_options || {}
|
273
|
+
|
232
274
|
if options[:migrate]
|
233
275
|
rjob.produce
|
234
276
|
Step.migrate(Rbbt.identify(job.path), 'user', :source => server)
|
@@ -255,7 +297,7 @@ job.clean
|
|
255
297
|
@task_info ||= IndiferentHash.setup({})
|
256
298
|
|
257
299
|
if @task_info[task].nil?
|
258
|
-
task_info = RemoteWorkflow::SSH.get_json(File.join(@base_url, task.to_s))
|
300
|
+
task_info = RemoteWorkflow::SSH.get_json(File.join(@base_url || @url, task.to_s))
|
259
301
|
task_info = RemoteWorkflow::SSH.fix_hash(task_info)
|
260
302
|
|
261
303
|
task_info[:result_type] = task_info[:result_type].to_sym if task_info[:result_type]
|
@@ -1,6 +1,6 @@
|
|
1
1
|
class RemoteStep
|
2
2
|
module SSH
|
3
|
-
attr_accessor :override_dependencies
|
3
|
+
attr_accessor :override_dependencies, :run_type, :slurm_options
|
4
4
|
|
5
5
|
def init_job(cache_type = nil, other_params = {})
|
6
6
|
return self if @url
|
@@ -52,15 +52,31 @@ class RemoteStep
|
|
52
52
|
end
|
53
53
|
|
54
54
|
def _run
|
55
|
+
RemoteWorkflow::SSH.upload_dependencies(self, @server)
|
55
56
|
RemoteWorkflow::SSH.run_job(File.join(base_url, task.to_s), @input_id, @base_name)
|
56
57
|
end
|
57
58
|
|
59
|
+
def _run_slurm
|
60
|
+
RemoteWorkflow::SSH.run_slurm_job(File.join(base_url, task.to_s), @input_id, @base_name, @slurm_options || {})
|
61
|
+
end
|
62
|
+
|
63
|
+
def _orchestrate_slurm
|
64
|
+
RemoteWorkflow::SSH.orchestrate_slurm_job(File.join(base_url, task.to_s), @input_id, @base_name, @slurm_options || {})
|
65
|
+
end
|
66
|
+
|
58
67
|
def produce(*args)
|
59
68
|
input_types = {}
|
60
69
|
init_job
|
61
|
-
@remote_path =
|
70
|
+
@remote_path = case @run_type
|
71
|
+
when 'run', :run, nil
|
72
|
+
_run
|
73
|
+
when 'slurm', :slurm
|
74
|
+
_run_slurm
|
75
|
+
when 'orchestrate', :orchestrate
|
76
|
+
_orchestrate_slurm
|
77
|
+
end
|
62
78
|
@started = true
|
63
|
-
while ! (done? || error?)
|
79
|
+
while ! (done? || error? || aborted?)
|
64
80
|
sleep 1
|
65
81
|
end
|
66
82
|
raise self.get_exception if error?
|
@@ -82,6 +98,18 @@ class RemoteStep
|
|
82
98
|
_restart
|
83
99
|
end
|
84
100
|
|
101
|
+
def abort
|
102
|
+
Log.warn "not implemented RemoteWorkflow::SSH.abort(@url, @input_id, @base_name)"
|
103
|
+
end
|
104
|
+
|
105
|
+
def input_dependencies
|
106
|
+
@input_dependencies ||= inputs.values.flatten.
|
107
|
+
select{|i| Step === i || (defined?(RemoteStep) && RemoteStep === i) } +
|
108
|
+
inputs.values.flatten.
|
109
|
+
select{|dep| Path === dep && Step === dep.resource }.
|
110
|
+
select{|dep| ! dep.resource.started? }. # Ignore input_deps already started
|
111
|
+
collect{|dep| dep.resource }
|
112
|
+
end
|
85
113
|
end
|
86
114
|
end
|
87
115
|
|
@@ -1,4 +1,12 @@
|
|
1
1
|
class Step
|
2
|
+
attr_accessor :clean_name, :path, :task, :workflow, :inputs, :dependencies, :bindings
|
3
|
+
attr_accessor :task_name, :overriden
|
4
|
+
attr_accessor :pid
|
5
|
+
attr_accessor :exec
|
6
|
+
attr_accessor :relocated
|
7
|
+
attr_accessor :result, :mutex, :seen
|
8
|
+
attr_accessor :real_inputs, :original_task_name, :original_workflow
|
9
|
+
|
2
10
|
|
3
11
|
INFO_SERIALIZER = begin
|
4
12
|
if ENV["RBBT_INFO_SERIALIZER"]
|
@@ -86,12 +94,45 @@ class Step
|
|
86
94
|
end
|
87
95
|
end
|
88
96
|
|
97
|
+
def task_name
|
98
|
+
@task_name ||= begin
|
99
|
+
if @task.respond_to?(:name)
|
100
|
+
|
101
|
+
@task.name
|
102
|
+
else
|
103
|
+
@path.split("/")[-2]
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def result_type
|
109
|
+
@result_type ||= if @task.respond_to?(:result_type)
|
110
|
+
@task.result_type || info[:result_type]
|
111
|
+
else
|
112
|
+
info[:result_type]
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def result_type=(type)
|
117
|
+
@result_type = type
|
118
|
+
end
|
119
|
+
|
120
|
+
def result_description
|
121
|
+
@result_description ||= if @task.respond_to?(:result_description)
|
122
|
+
@task.result_description
|
123
|
+
else
|
124
|
+
nil
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def result_description=(description)
|
129
|
+
@result_description = description
|
130
|
+
end
|
89
131
|
|
90
132
|
def name
|
91
133
|
@name ||= path.sub(/.*\/#{Regexp.quote task_name.to_s}\/(.*)/, '\1')
|
92
134
|
end
|
93
135
|
|
94
|
-
|
95
136
|
def short_path
|
96
137
|
[task_name, name] * "/"
|
97
138
|
end
|
@@ -105,10 +146,6 @@ class Step
|
|
105
146
|
workflow.to_s + "#" + short_path
|
106
147
|
end
|
107
148
|
|
108
|
-
def task_name
|
109
|
-
@task_name ||= task.name
|
110
|
-
end
|
111
|
-
|
112
149
|
def task_signature
|
113
150
|
[workflow.to_s, task_name] * "#"
|
114
151
|
end
|