rbbt-util 5.32.25 → 5.32.30
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/rbbt/annotations/annotated_array.rb +4 -0
- data/lib/rbbt/annotations/util.rb +29 -0
- data/lib/rbbt/entity.rb +3 -1
- data/lib/rbbt/hpc/orchestrate/batches.rb +152 -0
- data/lib/rbbt/hpc/orchestrate/chains.rb +173 -0
- data/lib/rbbt/hpc/orchestrate/rules.rb +70 -0
- data/lib/rbbt/hpc/orchestrate.old.rb +220 -0
- data/lib/rbbt/hpc/orchestrate.rb +24 -200
- data/lib/rbbt/hpc/slurm.rb +1 -0
- data/lib/rbbt/persist/tsv.rb +1 -1
- data/lib/rbbt/tsv/excel.rb +16 -8
- data/lib/rbbt/util/log.rb +6 -2
- data/lib/rbbt/util/migrate.rb +6 -1
- data/lib/rbbt/util/misc/inspect.rb +4 -1
- data/lib/rbbt/util/misc.rb +5 -0
- data/lib/rbbt/util/python.rb +1 -1
- data/lib/rbbt/workflow/definition.rb +1 -1
- data/lib/rbbt/workflow/examples.rb +0 -65
- data/lib/rbbt/workflow/integration/nextflow.rb +74 -14
- data/lib/rbbt/workflow/step/accessor.rb +0 -70
- data/lib/rbbt/workflow/step/dependencies.rb +8 -2
- data/lib/rbbt/workflow/step/run.rb +1 -1
- data/lib/rbbt/workflow/step/save_load_inputs.rb +175 -0
- data/lib/rbbt/workflow/step.rb +2 -1
- data/lib/rbbt/workflow/task.rb +2 -2
- data/lib/rbbt/workflow.rb +9 -2
- data/share/rbbt_commands/hpc/tail +0 -13
- data/share/rbbt_commands/lsf/tail +0 -13
- data/share/rbbt_commands/slurm/tail +0 -13
- data/share/rbbt_commands/tsv/keys +14 -15
- data/share/rbbt_commands/tsv/read_excel +2 -2
- data/share/rbbt_commands/workflow/task +11 -5
- data/test/rbbt/annotations/test_util.rb +11 -0
- data/test/rbbt/hpc/orchestrate/test_batches.rb +113 -0
- data/test/rbbt/hpc/orchestrate/test_chains.rb +139 -0
- data/test/rbbt/hpc/orchestrate/test_rules.rb +92 -0
- data/test/rbbt/hpc/test_orchestrate.rb +144 -0
- data/test/rbbt/tsv/test_excel.rb +38 -4
- data/test/rbbt/util/test_misc.rb +4 -0
- data/test/rbbt/workflow/step/test_dependencies.rb +14 -13
- data/test/rbbt/workflow/step/test_save_load_inputs.rb +46 -0
- metadata +17 -2
@@ -254,7 +254,12 @@ class Step
|
|
254
254
|
when :bootstrap
|
255
255
|
cpus = rest.nil? ? nil : rest.first
|
256
256
|
|
257
|
-
|
257
|
+
if cpus.nil?
|
258
|
+
keys = ['bootstrap'] + list.collect{|d| [d.task_name, d.task_signature] }.flatten.uniq
|
259
|
+
cpus = config('dep_cpus', *keys, :default => [5, list.length / 2].min)
|
260
|
+
elsif Symbol === cpus
|
261
|
+
cpus = config('dep_cpus', cpus, :default => [5, list.length / 2].min)
|
262
|
+
end
|
258
263
|
|
259
264
|
respawn = rest && rest.include?(:respawn)
|
260
265
|
respawn = false if rest && rest.include?(:norespawn)
|
@@ -369,7 +374,8 @@ class Step
|
|
369
374
|
next unless step.dependencies and step.dependencies.any?
|
370
375
|
(step.dependencies + step.input_dependencies).each do |step_dep|
|
371
376
|
next unless step.dependencies.include?(step_dep)
|
372
|
-
next if step_dep.done? or step_dep.running? or
|
377
|
+
next if step_dep.done? or step_dep.running? or
|
378
|
+
(ComputeDependency === step_dep and (step_dep.compute == :nodup or step_dep.compute == :ignore))
|
373
379
|
dep_step[step_dep.path] ||= []
|
374
380
|
dep_step[step_dep.path] << step
|
375
381
|
end
|
@@ -419,7 +419,7 @@ class Step
|
|
419
419
|
set_info :dependencies, dependencies.collect{|dep| [dep.task_name, dep.name, dep.path]}
|
420
420
|
|
421
421
|
config_keys = Rbbt::Config::GOT_KEYS[config_keys_pre.length..-1]
|
422
|
-
set_info :config_keys, config_keys
|
422
|
+
set_info :config_keys, config_keys.uniq
|
423
423
|
|
424
424
|
if result.nil? && File.exists?(self.tmp_path) && ! File.exists?(self.path)
|
425
425
|
Open.mv self.tmp_path, self.path
|
@@ -0,0 +1,175 @@
|
|
1
|
+
module Workflow
|
2
|
+
def self.load_inputs(dir, input_names, input_types)
|
3
|
+
inputs = {}
|
4
|
+
if File.exists?(dir) && ! File.directory?(dir)
|
5
|
+
Log.debug "Loading inputs from #{dir}, not a directory trying as tar.gz"
|
6
|
+
tarfile = dir
|
7
|
+
digest = CMD.cmd("md5sum '#{tarfile}'").read.split(" ").first
|
8
|
+
tmpdir = Rbbt.tmp.input_bundle[digest].find
|
9
|
+
Misc.untar(tarfile, tmpdir) unless File.exists? tmpdir
|
10
|
+
files = tmpdir.glob("*")
|
11
|
+
if files.length == 1 && File.directory?(files.first)
|
12
|
+
tmpdir = files.first
|
13
|
+
end
|
14
|
+
load_inputs(tmpdir, input_names, input_types)
|
15
|
+
else
|
16
|
+
dir = Path.setup(dir.dup)
|
17
|
+
input_names.each do |input|
|
18
|
+
file = dir[input].find
|
19
|
+
file = dir.glob(input.to_s + ".*").reject{|f| f =~ /\.md5$/}.first if file.nil? or not (File.symlink?(file) || file.exists?)
|
20
|
+
Log.debug "Trying #{ input }: #{file}"
|
21
|
+
next unless file and (File.symlink?(file) || file.exists?)
|
22
|
+
|
23
|
+
type = input_types[input]
|
24
|
+
|
25
|
+
type = :io if file.split(".").last == 'as_io'
|
26
|
+
|
27
|
+
type = :path if file.split(".").last == 'as_path'
|
28
|
+
|
29
|
+
case type
|
30
|
+
when :path
|
31
|
+
inputs[input.to_sym] = Open.realpath(Open.read(file).strip)
|
32
|
+
when :io
|
33
|
+
inputs[input.to_sym] = Open.open(Open.realpath(file))
|
34
|
+
when :file, :binary
|
35
|
+
Log.debug "Pointing #{ input } to #{file}"
|
36
|
+
if file =~ /\.yaml/
|
37
|
+
inputs[input.to_sym] = YAML.load(Open.read(file))
|
38
|
+
else
|
39
|
+
if File.symlink?(file)
|
40
|
+
link_target = File.expand_path(File.readlink(file), File.dirname(file))
|
41
|
+
inputs[input.to_sym] = link_target
|
42
|
+
else
|
43
|
+
inputs[input.to_sym] = Open.realpath(file)
|
44
|
+
end
|
45
|
+
end
|
46
|
+
when :text
|
47
|
+
Log.debug "Reading #{ input } from #{file}"
|
48
|
+
inputs[input.to_sym] = Open.read(file)
|
49
|
+
when :array
|
50
|
+
Log.debug "Reading array #{ input } from #{file}"
|
51
|
+
inputs[input.to_sym] = Open.read(file).split("\n")
|
52
|
+
when :tsv
|
53
|
+
Log.debug "Opening tsv #{ input } from #{file}"
|
54
|
+
inputs[input.to_sym] = TSV.open(file)
|
55
|
+
when :boolean
|
56
|
+
inputs[input.to_sym] = (file.read.strip == 'true')
|
57
|
+
else
|
58
|
+
Log.debug "Loading #{ input } from #{file}"
|
59
|
+
inputs[input.to_sym] = file.read.strip
|
60
|
+
end
|
61
|
+
|
62
|
+
end
|
63
|
+
inputs = IndiferentHash.setup(inputs)
|
64
|
+
|
65
|
+
dir.glob("*#*").each do |od|
|
66
|
+
name = File.basename(od)
|
67
|
+
value = Open.read(od)
|
68
|
+
Log.debug "Loading override dependency #{ name } as #{value}"
|
69
|
+
inputs[name] = value.chomp
|
70
|
+
end
|
71
|
+
|
72
|
+
inputs
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def task_inputs_from_directory(task_name, directory)
|
77
|
+
task_info = self.task_info(task_name)
|
78
|
+
Workflow.load_inputs(directory, task_info[:inputs], task_info[:input_types])
|
79
|
+
end
|
80
|
+
|
81
|
+
def job_for_directory_inputs(task_name, directory, jobname = nil)
|
82
|
+
inputs = task_inputs_from_directory(task_name, directory)
|
83
|
+
job(task_name, jobname, inputs)
|
84
|
+
end
|
85
|
+
|
86
|
+
end
|
87
|
+
|
88
|
+
class Step
|
89
|
+
def self.save_inputs(inputs, input_types, input_options, dir)
|
90
|
+
inputs.each do |name,value|
|
91
|
+
type = input_types[name]
|
92
|
+
type = type.to_s if type
|
93
|
+
path = File.join(dir, name.to_s)
|
94
|
+
|
95
|
+
path = path + '.as_io' if (IO === value || Step === value) && ! (input_options[name] && input_options[name][:nofile])
|
96
|
+
Log.debug "Saving job input #{name} (#{type}) into #{path}"
|
97
|
+
|
98
|
+
case
|
99
|
+
when IO === value
|
100
|
+
Open.write(path, value.to_s)
|
101
|
+
when Step === value
|
102
|
+
Open.ln_s(value.path, path)
|
103
|
+
when type.to_s == "binary"
|
104
|
+
if String === value && File.exists?(value)
|
105
|
+
value = File.expand_path(value)
|
106
|
+
Open.ln_s(value, path)
|
107
|
+
elsif String === value && Misc.is_filename?(value, false)
|
108
|
+
Open.write(path + '.as_path' , value)
|
109
|
+
else
|
110
|
+
Open.write(path, value, :mode => 'wb')
|
111
|
+
end
|
112
|
+
when type.to_s == "file"
|
113
|
+
if String === value && File.exists?(value)
|
114
|
+
value = File.expand_path(value)
|
115
|
+
Open.ln_s(value, path)
|
116
|
+
else
|
117
|
+
value = value.collect{|v| v = "#{v}" if Path === v; v } if Array === value
|
118
|
+
value = "#{value}" if Path === value
|
119
|
+
Open.write(path + '.yaml', value.to_yaml)
|
120
|
+
end
|
121
|
+
when Array === value
|
122
|
+
Open.write(path, value.collect{|v| Step === v ? v.path : v.to_s} * "\n")
|
123
|
+
when IO === value
|
124
|
+
if value.filename && String === value.filename && File.exists?(value.filename)
|
125
|
+
Open.ln_s(value.filename, path)
|
126
|
+
else
|
127
|
+
Open.write(path, value)
|
128
|
+
end
|
129
|
+
else
|
130
|
+
Open.write(path, value.to_s)
|
131
|
+
end
|
132
|
+
end.any?
|
133
|
+
end
|
134
|
+
|
135
|
+
def self.save_job_inputs(job, dir, options = nil)
|
136
|
+
options = IndiferentHash.setup options.dup if options
|
137
|
+
|
138
|
+
task_name = Symbol === job.overriden ? job.overriden : job.task_name
|
139
|
+
workflow = job.workflow
|
140
|
+
workflow = Kernel.const_get workflow if String === workflow
|
141
|
+
if workflow
|
142
|
+
task_info = IndiferentHash.setup(workflow.task_info(task_name))
|
143
|
+
input_types = IndiferentHash.setup(task_info[:input_types])
|
144
|
+
input_options = IndiferentHash.setup(task_info[:input_options])
|
145
|
+
task_inputs = IndiferentHash.setup(task_info[:inputs])
|
146
|
+
input_defaults = IndiferentHash.setup(task_info[:input_defaults])
|
147
|
+
else
|
148
|
+
task_info = IndiferentHash.setup({})
|
149
|
+
input_types = IndiferentHash.setup({})
|
150
|
+
task_inputs = IndiferentHash.setup({})
|
151
|
+
task_options = IndiferentHash.setup({})
|
152
|
+
input_defaults = IndiferentHash.setup({})
|
153
|
+
end
|
154
|
+
|
155
|
+
inputs = IndiferentHash.setup({})
|
156
|
+
real_inputs = job.real_inputs || job.info[:real_inputs]
|
157
|
+
job.recursive_inputs.zip(job.recursive_inputs.fields).each do |value,name|
|
158
|
+
next unless task_inputs.include? name.to_sym
|
159
|
+
next unless real_inputs.include? name.to_sym
|
160
|
+
next if options && ! options.include?(name)
|
161
|
+
next if value.nil?
|
162
|
+
next if input_defaults[name] == value
|
163
|
+
inputs[name] = value
|
164
|
+
end
|
165
|
+
|
166
|
+
if options && options.include?('override_dependencies')
|
167
|
+
inputs.merge!(:override_dependencies => open[:override_dependencies])
|
168
|
+
input_types = IndiferentHash.setup(input_types.merge(:override_dependencies => :array))
|
169
|
+
end
|
170
|
+
|
171
|
+
save_inputs(inputs, input_types, input_options, dir)
|
172
|
+
|
173
|
+
inputs.keys
|
174
|
+
end
|
175
|
+
end
|
data/lib/rbbt/workflow/step.rb
CHANGED
@@ -6,6 +6,7 @@ require 'rbbt/workflow/step/accessor'
|
|
6
6
|
require 'rbbt/workflow/step/prepare'
|
7
7
|
require 'rbbt/workflow/step/status'
|
8
8
|
require 'rbbt/workflow/step/info'
|
9
|
+
require 'rbbt/workflow/step/save_load_inputs'
|
9
10
|
|
10
11
|
class Step
|
11
12
|
attr_accessor :clean_name, :path, :task, :workflow, :inputs, :dependencies, :bindings
|
@@ -321,7 +322,7 @@ class Step
|
|
321
322
|
def load
|
322
323
|
res = begin
|
323
324
|
@result = nil if IO === @result && @result.closed?
|
324
|
-
if @result && @path != @result
|
325
|
+
if @result && @path != @result && ! StreamArray === @result
|
325
326
|
res = @result
|
326
327
|
else
|
327
328
|
join if not done?
|
data/lib/rbbt/workflow/task.rb
CHANGED
@@ -90,11 +90,11 @@ module Task
|
|
90
90
|
|
91
91
|
maps = (Array === dep and Hash === dep.last) ? dep.last.keys : []
|
92
92
|
raise "Dependency task not found: #{dep}" if task.nil?
|
93
|
-
next if seen.include? [wf, task.name]
|
93
|
+
next if seen.include? [wf, task.name, maps]
|
94
94
|
|
95
95
|
task.workflow = wf if wf
|
96
96
|
|
97
|
-
seen << [wf, task.name]
|
97
|
+
seen << [wf, task.name, maps]
|
98
98
|
new_inputs = task.inputs - maps
|
99
99
|
next unless new_inputs.any?
|
100
100
|
if task_inputs[task].nil?
|
data/lib/rbbt/workflow.rb
CHANGED
@@ -244,7 +244,7 @@ module Workflow
|
|
244
244
|
when :hash
|
245
245
|
clean_inputs = Annotated.purge(inputs)
|
246
246
|
clean_inputs = clean_inputs.collect{|i| Symbol === i ? i.to_s : i }
|
247
|
-
deps_str = dependencies.collect{|d| (Step === d || (defined?(RemoteStep) && RemoteStep === Step)) ? "Step: " << (d.overriden
|
247
|
+
deps_str = dependencies.collect{|d| (Step === d || (defined?(RemoteStep) && RemoteStep === Step)) ? "Step: " << (Symbol === d.overriden ? d.path : d.short_path) : d }
|
248
248
|
key_obj = {:inputs => clean_inputs, :dependencies => deps_str }
|
249
249
|
key_str = Misc.obj2str(key_obj)
|
250
250
|
hash_str = Misc.digest(key_str)
|
@@ -465,7 +465,14 @@ module Workflow
|
|
465
465
|
extension = nil
|
466
466
|
if dependencies.any?
|
467
467
|
dep_basename = File.basename(dependencies.last.path)
|
468
|
-
|
468
|
+
if dep_basename.include? "."
|
469
|
+
parts = dep_basename.split(".")
|
470
|
+
extension = [parts.pop]
|
471
|
+
while parts.last.length <= 4
|
472
|
+
extension << parts.pop
|
473
|
+
end
|
474
|
+
extension = extension.reverse * "."
|
475
|
+
end
|
469
476
|
end
|
470
477
|
end
|
471
478
|
|
@@ -13,19 +13,6 @@ Queue a job in Marenostrum
|
|
13
13
|
$ rbbt slurm tail <directory> [options]
|
14
14
|
|
15
15
|
-h--help Print this help
|
16
|
-
-d--done Done jobs only
|
17
|
-
-e--error Error jobs only
|
18
|
-
-a--aborted SLURM aboted jobs
|
19
|
-
-r--running Running jobs only
|
20
|
-
-q--queued Queued jobs only
|
21
|
-
-j--job* Job ids
|
22
|
-
-s--search* Regular expression
|
23
|
-
-t--tail* Show the last lines of the STDERR
|
24
|
-
-p--progress Report progress of job and the dependencies
|
25
|
-
-SBP--sbatch_parameters show sbatch parameters
|
26
|
-
-PERF--procpath_performance show Procpath performance summary
|
27
|
-
-sacct--sacct_peformance show sacct performance summary
|
28
|
-
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
29
16
|
EOF
|
30
17
|
|
31
18
|
if options[:help]
|
@@ -13,19 +13,6 @@ Queue a job in Marenostrum
|
|
13
13
|
$ rbbt slurm tail <directory> [options]
|
14
14
|
|
15
15
|
-h--help Print this help
|
16
|
-
-d--done Done jobs only
|
17
|
-
-e--error Error jobs only
|
18
|
-
-a--aborted SLURM aboted jobs
|
19
|
-
-r--running Running jobs only
|
20
|
-
-q--queued Queued jobs only
|
21
|
-
-j--job* Job ids
|
22
|
-
-s--search* Regular expression
|
23
|
-
-t--tail* Show the last lines of the STDERR
|
24
|
-
-p--progress Report progress of job and the dependencies
|
25
|
-
-SBP--sbatch_parameters show sbatch parameters
|
26
|
-
-PERF--procpath_performance show Procpath performance summary
|
27
|
-
-sacct--sacct_peformance show sacct performance summary
|
28
|
-
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
29
16
|
EOF
|
30
17
|
|
31
18
|
if options[:help]
|
@@ -13,19 +13,6 @@ Queue a job in Marenostrum
|
|
13
13
|
$ rbbt slurm tail <directory> [options]
|
14
14
|
|
15
15
|
-h--help Print this help
|
16
|
-
-d--done Done jobs only
|
17
|
-
-e--error Error jobs only
|
18
|
-
-a--aborted SLURM aboted jobs
|
19
|
-
-r--running Running jobs only
|
20
|
-
-q--queued Queued jobs only
|
21
|
-
-j--job* Job ids
|
22
|
-
-s--search* Regular expression
|
23
|
-
-t--tail* Show the last lines of the STDERR
|
24
|
-
-p--progress Report progress of job and the dependencies
|
25
|
-
-SBP--sbatch_parameters show sbatch parameters
|
26
|
-
-PERF--procpath_performance show Procpath performance summary
|
27
|
-
-sacct--sacct_peformance show sacct performance summary
|
28
|
-
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
29
16
|
EOF
|
30
17
|
|
31
18
|
if options[:help]
|
@@ -41,19 +41,18 @@ parser = TSV::Parser.new TSV.get_stream(file), options.merge(:fields => [])
|
|
41
41
|
|
42
42
|
options[:merge] = false if options[:merge] == "false"
|
43
43
|
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
end
|
44
|
+
line = parser.first_line
|
45
|
+
bar = Log::ProgressBar.new
|
46
|
+
while line
|
47
|
+
bar.tick
|
48
|
+
|
49
|
+
line = Misc.fixutf8(line)
|
50
|
+
line = parser.process line
|
51
|
+
raise SKIP_LINE if line.empty?
|
52
|
+
parts = parser.chop_line line
|
53
|
+
key, values = parser.get_values parts
|
54
|
+
values = parser.cast_values values if parser.cast?
|
55
|
+
|
56
|
+
puts key
|
57
|
+
line = parser.stream.gets
|
59
58
|
end
|
@@ -22,7 +22,7 @@ Use - to read from STDIN
|
|
22
22
|
-h--help Print this help
|
23
23
|
-s--sheet* Sheet to extract
|
24
24
|
-skip--skip_rows* Initial rows to skip
|
25
|
-
|
25
|
+
-o--original Dump the rows without parsing them into TSV
|
26
26
|
EOF
|
27
27
|
if options[:help]
|
28
28
|
if defined? rbbt_usage
|
@@ -39,5 +39,5 @@ raise ParameterException, "No excel file given" if excelfile.nil?
|
|
39
39
|
|
40
40
|
options[:zipped] ||= true if options[:merge]
|
41
41
|
require 'rbbt/tsv/excel'
|
42
|
-
puts TSV.excel(excelfile, options)
|
42
|
+
puts TSV.excel(excelfile, options.merge(:text => options[:original]))
|
43
43
|
|
@@ -104,7 +104,7 @@ def fix_options(workflow, task, job_options)
|
|
104
104
|
elsif input_options[name] and input_options[name][:stream] and value == "-"
|
105
105
|
STDIN
|
106
106
|
else
|
107
|
-
if Array === value
|
107
|
+
if Array === value || IO === value
|
108
108
|
value
|
109
109
|
else
|
110
110
|
array_separator = $array_separator
|
@@ -137,6 +137,8 @@ def fix_options(workflow, task, job_options)
|
|
137
137
|
TSV.open(STDIN, :unnamed => true, :sep => $field_separator, :sep2 => ($array_separator || "|"))
|
138
138
|
when (Misc.is_filename?(value) and String)
|
139
139
|
TSV.open(value, :unnamed => true, :sep => $field_separator, :sep2 => ($array_separator || "|"))
|
140
|
+
when IO
|
141
|
+
TSV.open(value, :unnamed => true, :sep => $field_separator, :sep2 => ($array_separator || "|"))
|
140
142
|
else
|
141
143
|
TSV.open(StringIO.new(value), :unnamed => true, :sep => $field_separator, :sep2 => ($array_separator || "|"))
|
142
144
|
end
|
@@ -571,10 +573,14 @@ when Step
|
|
571
573
|
elsif detach
|
572
574
|
exit! 0
|
573
575
|
else
|
574
|
-
res.
|
575
|
-
|
576
|
-
|
577
|
-
|
576
|
+
if %w(float integer string boolean).include?(res.result_type.to_s)
|
577
|
+
out.puts res.load
|
578
|
+
else
|
579
|
+
res.join
|
580
|
+
Open.open(res.path, :mode => 'rb') do |io|
|
581
|
+
Misc.consume_stream(io, false, out)
|
582
|
+
end if Open.exist?(res.path) || Open.remote?(res.path) || Open.ssh?(res.path)
|
583
|
+
end
|
578
584
|
end
|
579
585
|
else
|
580
586
|
if Array === res
|
@@ -1,6 +1,11 @@
|
|
1
1
|
require File.join(File.expand_path(File.dirname(__FILE__)), '../..', 'test_helper.rb')
|
2
2
|
require 'rbbt/annotations'
|
3
3
|
|
4
|
+
module TestEntityString
|
5
|
+
extend Entity
|
6
|
+
self.annotation :code
|
7
|
+
end
|
8
|
+
|
4
9
|
class TestAnnotation < Test::Unit::TestCase
|
5
10
|
def test_marshal
|
6
11
|
a = "STRING"
|
@@ -9,5 +14,11 @@ class TestAnnotation < Test::Unit::TestCase
|
|
9
14
|
assert !(Annotated === Marshal.load(Marshal.dump(a)))
|
10
15
|
assert_equal a, Marshal.load(Marshal.dump(a))
|
11
16
|
end
|
17
|
+
|
18
|
+
def test_hash
|
19
|
+
e = TestEntityString.setup("TEST", :code => 10)
|
20
|
+
assert_equal "TEST", Annotated.to_hash(e)[:literal]
|
21
|
+
assert_equal 10, Annotated.to_hash(e)[:info][:code]
|
22
|
+
end
|
12
23
|
end
|
13
24
|
|
@@ -0,0 +1,113 @@
|
|
1
|
+
require File.join(File.expand_path(File.dirname(__FILE__)), '../../..', 'test_helper.rb')
|
2
|
+
require 'rbbt/workflow'
|
3
|
+
require 'rbbt/hpc/orchestrate/batches'
|
4
|
+
|
5
|
+
module TestWFA
|
6
|
+
extend Workflow
|
7
|
+
|
8
|
+
task :a1 => :string do self.task_name.to_s end
|
9
|
+
|
10
|
+
dep :a1
|
11
|
+
task :a2 => :string do self.task_name.to_s end
|
12
|
+
|
13
|
+
dep :a2
|
14
|
+
task :a3 => :string do self.task_name.to_s end
|
15
|
+
end
|
16
|
+
|
17
|
+
module TestWFB
|
18
|
+
extend Workflow
|
19
|
+
|
20
|
+
dep TestWFA, :a2
|
21
|
+
task :b1 => :string do self.task_name.to_s end
|
22
|
+
|
23
|
+
dep :b1
|
24
|
+
task :b2 => :string do self.task_name.to_s end
|
25
|
+
end
|
26
|
+
|
27
|
+
module TestWFC
|
28
|
+
extend Workflow
|
29
|
+
|
30
|
+
dep TestWFA, :a1
|
31
|
+
dep_task :c1, TestWFB, :b2
|
32
|
+
|
33
|
+
task :c2 => :string do self.task_name.to_s end
|
34
|
+
|
35
|
+
dep :c1
|
36
|
+
dep :c2
|
37
|
+
task :c3 => :string do self.task_name.to_s end
|
38
|
+
|
39
|
+
dep_task :c4, TestWFC, :c3
|
40
|
+
end
|
41
|
+
|
42
|
+
module TestWFD
|
43
|
+
extend Workflow
|
44
|
+
|
45
|
+
dep TestWFC, :c3, :jobname => "First c3"
|
46
|
+
dep TestWFC, :c3, :jobname => "Second c3"
|
47
|
+
task :d1 => :string do self.task_name.to_s end
|
48
|
+
end
|
49
|
+
|
50
|
+
class TestOrchestrate < Test::Unit::TestCase
|
51
|
+
|
52
|
+
RULES = IndiferentHash.setup(YAML.load(<<-EOF))
|
53
|
+
---
|
54
|
+
defaults:
|
55
|
+
queue: first_queue
|
56
|
+
time: 1h
|
57
|
+
log: 2
|
58
|
+
config_keys: key1 value1 token1
|
59
|
+
chains:
|
60
|
+
chain_a_b:
|
61
|
+
tasks: TestWFB#b1, TestWFB#b2, TestWFA#a1, TestWFA#a2
|
62
|
+
config_keys: key2 value2 token2, key3 value3 token3.1 token3.2
|
63
|
+
chain_a:
|
64
|
+
workflow: TestWFA
|
65
|
+
tasks: a1, a2, a3
|
66
|
+
config_keys: key2 value2 token2, key3 value3 token3.1 token3.2
|
67
|
+
chain_b:
|
68
|
+
workflow: TestWFB
|
69
|
+
tasks: b1, b2
|
70
|
+
chain_b2:
|
71
|
+
tasks: TestWFB#b1, TestWFB#b2, TestWFA#a1
|
72
|
+
chain_d:
|
73
|
+
tasks: TestWFD#d1, TestWFC#c1, TestWFC#c2, TestWFC#c3
|
74
|
+
TestWFA:
|
75
|
+
defaults:
|
76
|
+
log: 4
|
77
|
+
config_keys: key4 value4 token4
|
78
|
+
time: 10min
|
79
|
+
a1:
|
80
|
+
cpus: 10
|
81
|
+
config_keys: key5 value5 token5
|
82
|
+
TestWFC:
|
83
|
+
defaults:
|
84
|
+
skip: true
|
85
|
+
log: 4
|
86
|
+
time: 10s
|
87
|
+
EOF
|
88
|
+
|
89
|
+
|
90
|
+
def test_job_batches_d
|
91
|
+
job = TestWFD.job(:d1, nil)
|
92
|
+
job.recursive_clean
|
93
|
+
|
94
|
+
batches = HPC::Orchestration.job_batches(RULES, job)
|
95
|
+
assert_equal 3, batches.length
|
96
|
+
end
|
97
|
+
|
98
|
+
def test_job_batches_c3
|
99
|
+
job = TestWFC.job(:c3, nil)
|
100
|
+
job.recursive_clean
|
101
|
+
|
102
|
+
batches = HPC::Orchestration.job_batches(RULES, job)
|
103
|
+
end
|
104
|
+
|
105
|
+
def test_job_batches_c4
|
106
|
+
job = TestWFC.job(:c4, nil)
|
107
|
+
job.recursive_clean
|
108
|
+
|
109
|
+
batches = HPC::Orchestration.job_batches(RULES, job)
|
110
|
+
assert_equal 3, batches.length
|
111
|
+
end
|
112
|
+
|
113
|
+
end
|