rbbt-util 5.32.25 → 5.32.30

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. checksums.yaml +4 -4
  2. data/lib/rbbt/annotations/annotated_array.rb +4 -0
  3. data/lib/rbbt/annotations/util.rb +29 -0
  4. data/lib/rbbt/entity.rb +3 -1
  5. data/lib/rbbt/hpc/orchestrate/batches.rb +152 -0
  6. data/lib/rbbt/hpc/orchestrate/chains.rb +173 -0
  7. data/lib/rbbt/hpc/orchestrate/rules.rb +70 -0
  8. data/lib/rbbt/hpc/orchestrate.old.rb +220 -0
  9. data/lib/rbbt/hpc/orchestrate.rb +24 -200
  10. data/lib/rbbt/hpc/slurm.rb +1 -0
  11. data/lib/rbbt/persist/tsv.rb +1 -1
  12. data/lib/rbbt/tsv/excel.rb +16 -8
  13. data/lib/rbbt/util/log.rb +6 -2
  14. data/lib/rbbt/util/migrate.rb +6 -1
  15. data/lib/rbbt/util/misc/inspect.rb +4 -1
  16. data/lib/rbbt/util/misc.rb +5 -0
  17. data/lib/rbbt/util/python.rb +1 -1
  18. data/lib/rbbt/workflow/definition.rb +1 -1
  19. data/lib/rbbt/workflow/examples.rb +0 -65
  20. data/lib/rbbt/workflow/integration/nextflow.rb +74 -14
  21. data/lib/rbbt/workflow/step/accessor.rb +0 -70
  22. data/lib/rbbt/workflow/step/dependencies.rb +8 -2
  23. data/lib/rbbt/workflow/step/run.rb +1 -1
  24. data/lib/rbbt/workflow/step/save_load_inputs.rb +175 -0
  25. data/lib/rbbt/workflow/step.rb +2 -1
  26. data/lib/rbbt/workflow/task.rb +2 -2
  27. data/lib/rbbt/workflow.rb +9 -2
  28. data/share/rbbt_commands/hpc/tail +0 -13
  29. data/share/rbbt_commands/lsf/tail +0 -13
  30. data/share/rbbt_commands/slurm/tail +0 -13
  31. data/share/rbbt_commands/tsv/keys +14 -15
  32. data/share/rbbt_commands/tsv/read_excel +2 -2
  33. data/share/rbbt_commands/workflow/task +11 -5
  34. data/test/rbbt/annotations/test_util.rb +11 -0
  35. data/test/rbbt/hpc/orchestrate/test_batches.rb +113 -0
  36. data/test/rbbt/hpc/orchestrate/test_chains.rb +139 -0
  37. data/test/rbbt/hpc/orchestrate/test_rules.rb +92 -0
  38. data/test/rbbt/hpc/test_orchestrate.rb +144 -0
  39. data/test/rbbt/tsv/test_excel.rb +38 -4
  40. data/test/rbbt/util/test_misc.rb +4 -0
  41. data/test/rbbt/workflow/step/test_dependencies.rb +14 -13
  42. data/test/rbbt/workflow/step/test_save_load_inputs.rb +46 -0
  43. metadata +17 -2
@@ -1,203 +1,10 @@
1
- require 'rbbt/workflow/util/orchestrator'
1
+ #require 'rbbt/workflow/util/orchestrator'
2
+ require 'rbbt/hpc/orchestrate/rules'
3
+ require 'rbbt/hpc/orchestrate/chains'
4
+ require 'rbbt/hpc/orchestrate/batches'
2
5
  module HPC
3
6
  module Orchestration
4
7
 
5
- def job_rules(rules, job)
6
- workflow = job.workflow.to_s
7
- task_name = job.task_name.to_s
8
- task_name = job.overriden.to_s if Symbol === job.overriden
9
-
10
- defaults = rules["defaults"] || {}
11
- defaults = defaults.merge(rules[workflow]["defaults"] || {}) if rules[workflow]
12
-
13
- job_rules = IndiferentHash.setup(defaults.dup)
14
-
15
- rules["chains"].each do |name,info|
16
- IndiferentHash.setup(info)
17
- chain_tasks = info[:tasks].split(/,\s*/)
18
-
19
- chain_tasks.each do |task|
20
- task_workflow, chain_task = task.split("#")
21
- chain_task, task_workflow = task_workflow, info[:workflow] if chain_task.nil? or chain_tasks.empty?
22
- job_rules["chain_tasks"] ||= {}
23
- job_rules["chain_tasks"][task_workflow] ||= []
24
- job_rules["chain_tasks"][task_workflow] << chain_task
25
- next unless task_name == chain_task.to_s && workflow == task_workflow.to_s
26
- config_keys = job_rules.delete :config_keys
27
- job_rules = IndiferentHash.setup(job_rules.merge(info))
28
- if config_keys
29
- config_keys.gsub!(/,\s+/,',')
30
- job_rules[:config_keys] = job_rules[:config_keys] ? config_keys + "," + job_rules[:config_keys] : config_keys
31
- end
32
- end
33
-
34
- if job_rules["chain_tasks"][workflow] && job_rules["chain_tasks"][workflow].include?(task_name)
35
- break
36
- else
37
- job_rules.delete "chain_tasks"
38
- end
39
- end if rules["chains"]
40
-
41
- config_keys = job_rules.delete :config_keys
42
- job_rules = IndiferentHash.setup(job_rules.merge(rules[workflow][task_name])) if rules[workflow] && rules[workflow][task_name]
43
-
44
- if config_keys
45
- config_keys.gsub!(/,\s+/,',')
46
- job_rules[:config_keys] = job_rules[:config_keys] ? config_keys + "," + job_rules[:config_keys] : config_keys
47
- end
48
-
49
- if rules["skip"] && rules["skip"][workflow]
50
- job_rules["skip"] = true if rules["skip"][workflow].split(/,\s*/).include? task_name
51
- end
52
-
53
- job_rules
54
- end
55
-
56
- def get_job_dependencies(job, job_rules = nil)
57
- deps = job.dependencies || []
58
- deps += job.input_dependencies || []
59
- deps
60
- end
61
-
62
- def get_recursive_job_dependencies(job)
63
- deps = get_job_dependencies(job)
64
- (deps + deps.collect{|dep| get_recursive_job_dependencies(dep) }).flatten
65
- end
66
-
67
- def piggyback(job, job_rules, job_deps)
68
- return false unless job_rules["skip"]
69
- final_deps = job_deps - job_deps.collect{|dep| get_recursive_job_dependencies(dep)}.flatten.uniq
70
- final_deps = final_deps.reject{|dep| dep.done? }
71
- return final_deps.first if final_deps.length == 1
72
- return false
73
- end
74
-
75
- def get_chains(job, rules, chains = {})
76
- job_rules = self.job_rules(rules, job)
77
- job_deps = get_job_dependencies(job)
78
-
79
- input_deps = []
80
- job.rec_dependencies.each do |dep|
81
- input_deps.concat dep.input_dependencies
82
- end
83
-
84
- job_deps.each do |dep|
85
- input_deps.concat dep.input_dependencies
86
- get_chains(dep, rules, chains)
87
- end
88
-
89
- job_deps.select do |dep|
90
- chained = job_rules["chain_tasks"] &&
91
- job_rules["chain_tasks"][job.workflow.to_s] && job_rules["chain_tasks"][job.workflow.to_s].include?(job.task_name.to_s) &&
92
- job_rules["chain_tasks"][dep.workflow.to_s] && job_rules["chain_tasks"][dep.workflow.to_s].include?(dep.task_name.to_s)
93
-
94
- dep_skip = dep.done? && ! input_deps.include?(dep) && self.job_rules(rules, dep)["skip"]
95
- chained || dep_skip
96
- end.each do |dep|
97
- chains[job] ||= []
98
- chains[job] << dep
99
- chains[job].concat chains[dep] if chains[dep]
100
- chains[job].uniq!
101
- end
102
-
103
- chains
104
- end
105
-
106
- def workload(job, rules, chains, options, seen = nil)
107
- return [] if job.done?
108
- if seen.nil?
109
- seen = {}
110
- target_job = true
111
- end
112
-
113
- job_rules = self.job_rules(rules, job)
114
- job_deps = get_job_dependencies(job)
115
-
116
- chain = chains[job]
117
- chain = chain.reject{|j| seen.include? j.path} if chain
118
- chain = chain.reject{|dep| dep.done? } if chain
119
- piggyback = piggyback(job, job_rules, job_deps)
120
- dep_ids = job_deps.collect do |dep|
121
- seen[dep.path] ||= nil if chain && chain.include?(dep) #&& ! job.input_dependencies.include?(dep)
122
- next_options = IndiferentHash.setup(options.dup)
123
- if piggyback and piggyback == dep
124
- next_options[:piggyback] ||= []
125
- next_options[:piggyback].push job
126
- ids = workload(dep, rules, chains, next_options, seen)
127
- else
128
- next_options.delete :piggyback
129
- ids = workload(dep, rules, chains, next_options, seen)
130
- end
131
-
132
- ids = [ids].flatten.compact.collect{|id| ['canfail', id] * ":"} if job.canfail_paths.include? dep.path
133
-
134
- seen[dep.path] = ids
135
- ids
136
- end.compact.flatten.uniq
137
-
138
- return seen[job.path] || dep_ids if seen.include?(job.path)
139
-
140
- if piggyback and seen[piggyback.path]
141
- return seen[job.path] = seen[piggyback.path]
142
- end
143
-
144
- job_rules.delete :chain_tasks
145
- job_rules.delete :tasks
146
- job_rules.delete :workflow
147
-
148
-
149
- option_config_keys = options[:config_keys]
150
-
151
- job_options = IndiferentHash.setup(options.merge(job_rules).merge(:batch_dependencies => dep_ids))
152
- job_options.delete :orchestration_rules
153
-
154
- config_keys = job_rules.delete(:config_keys)
155
- if config_keys
156
- config_keys.gsub!(/,\s+/,',')
157
- job_options[:config_keys] = job_options[:config_keys] ? config_keys + "," + job_options[:config_keys] : config_keys
158
- end
159
-
160
- if option_config_keys
161
- option_config_keys = option_config_keys.gsub(/,\s+/,',')
162
- job_options[:config_keys] = job_options[:config_keys] ? job_options[:config_keys] + "," + option_config_keys : option_config_keys
163
- end
164
-
165
- if options[:piggyback]
166
- manifest = options[:piggyback].uniq
167
- manifest += [job]
168
- manifest.concat chain if chain
169
-
170
- job = options[:piggyback].first
171
-
172
- job_rules = self.job_rules(rules, job)
173
- new_config_keys = self.job_rules(rules, job)[:config_keys]
174
- if new_config_keys
175
- new_config_keys = new_config_keys.gsub(/,\s+/,',')
176
- job_options[:config_keys] = job_options[:config_keys] ? job_options[:config_keys] + "," + new_config_keys : new_config_keys
177
- end
178
-
179
- job_options.delete :piggyback
180
- else
181
- manifest = [job]
182
- manifest.concat chain if chain
183
- end
184
-
185
- manifest.uniq!
186
-
187
- job_options[:manifest] = manifest.collect{|j| j.task_signature }
188
-
189
- job_options[:config_keys] = job_options[:config_keys].split(",").uniq * "," if job_options[:config_keys]
190
-
191
- if options[:dry_run]
192
- puts Log.color(:magenta, "Manifest: ") + Log.color(:blue, job_options[:manifest] * ", ") + " - tasks: #{job_options[:task_cpus] || 1} - time: #{job_options[:time]} - config: #{job_options[:config_keys]}"
193
- puts Log.color(:yellow, "Deps: ") + Log.color(:blue, job_options[:batch_dependencies]*", ")
194
- job_options[:manifest].first
195
- else
196
- run_job(job, job_options)
197
- end
198
- end
199
-
200
-
201
8
  def orchestrate_job(job, options)
202
9
  options.delete "recursive_clean"
203
10
  options.delete "clean_task"
@@ -211,9 +18,26 @@ module HPC
211
18
  rules ||= {}
212
19
  IndiferentHash.setup(rules)
213
20
 
214
- chains = get_chains(job, rules)
215
-
216
- workload(job, rules, chains, options)
21
+ batches = HPC::Orchestration.job_batches(rules, job)
22
+
23
+ batch_ids = {}
24
+ while batches.any?
25
+ top = batches.select{|b| b[:deps].nil? || (b[:deps] - batch_ids.keys).empty? }.first
26
+ raise "No batch without unmet dependencies" if top.nil?
27
+ batches.delete top
28
+ job_options = options.merge(top[:rules])
29
+ job_options.merge!(:batch_dependencies => top[:deps].nil? ? [] : top[:deps].collect{|d| batch_ids[d] })
30
+ job_options.merge!(:manifest => top[:jobs].collect{|d| d.task_signature })
31
+
32
+ if options[:dry_run]
33
+ puts Log.color(:magenta, "Manifest: ") + Log.color(:blue, job_options[:manifest] * ", ") + " - tasks: #{job_options[:task_cpus] || 1} - time: #{job_options[:time]} - config: #{job_options[:config_keys]}"
34
+ puts Log.color(:yellow, "Deps: ") + Log.color(:blue, job_options[:batch_dependencies]*", ")
35
+ batch_ids[top] = top[:top_level].task_signature
36
+ else
37
+ id = run_job(top[:top_level], job_options)
38
+ batch_ids[top] = id
39
+ end
40
+ end
217
41
  end
218
42
 
219
43
  end
@@ -51,6 +51,7 @@ export BATCH_SYSTEM=SLURM
51
51
  "nodes" => nodes,
52
52
  "time" => time,
53
53
  "exclusive" => exclusive,
54
+ "highmem" => highmem,
54
55
  "licenses" => licenses,
55
56
  "gres" => gres,
56
57
  "mem" => mem,
@@ -78,7 +78,7 @@ module Persist
78
78
  end
79
79
  end
80
80
 
81
- def self.persist_tsv(source, filename, options = {}, persist_options = {}, &block)
81
+ def self.persist_tsv(source, filename = nil, options = {}, persist_options = {}, &block)
82
82
  persist_options[:prefix] ||= "TSV"
83
83
 
84
84
  if data = persist_options[:data]
@@ -177,7 +177,7 @@ module TSV
177
177
 
178
178
  sheet ||= "0"
179
179
  workbook = RubyXL::Parser.parse file
180
- if sheet && sheet =~ /^\d+$/
180
+ if sheet && sheet =~ /^\d+$/
181
181
  sheet = workbook.worksheets.collect{|s| s.sheet_name }[sheet.to_i]
182
182
  end
183
183
  sheet_name = sheet
@@ -185,7 +185,9 @@ module TSV
185
185
 
186
186
  TmpFile.with_file :extension => Misc.sanitize_filename(sheet_name.to_s) do |filename|
187
187
 
188
- sheet = sheet ? workbook[sheet] : workbook.worksheets.first
188
+ sheet = sheet_name ? workbook[sheet_name] : workbook.worksheets.first
189
+
190
+ raise "No sheet #{sheet_name} found" if sheet.nil?
189
191
 
190
192
  rows = []
191
193
 
@@ -217,21 +219,27 @@ module TSV
217
219
  end
218
220
 
219
221
  def self.write(tsv, file, options = {})
220
- sheet = Misc.process_options options, :sheet
222
+ sheet, add_sheet = Misc.process_options options, :sheet, :add_sheet
221
223
 
222
224
  fields, rows = TSV._excel_data(tsv, options)
223
225
 
224
- book = RubyXL::Workbook.new
225
- sheet1 = book.worksheets.first
226
- sheet1.sheet_name = sheet if sheet
226
+ if Open.exists?(file) && add_sheet
227
+ book = RubyXL::Parser.parse file
228
+ sheet1 = book.add_worksheet(sheet)
229
+ else
230
+ book = RubyXL::Workbook.new
231
+ sheet1 = book.worksheets.first
232
+ sheet1.sheet_name = sheet if sheet
233
+ end
227
234
 
228
235
  fields.each_with_index do |e,i|
229
236
  sheet1.add_cell(0, i, e)
230
- end
237
+ end if fields
231
238
 
232
239
  rows.each_with_index do |cells,i|
240
+ i += 1 if fields
233
241
  cells.each_with_index do |e,j|
234
- sheet1.add_cell(i+1, j, e)
242
+ sheet1.add_cell(i, j, e)
235
243
  end
236
244
  end
237
245
 
data/lib/rbbt/util/log.rb CHANGED
@@ -108,8 +108,9 @@ module Log
108
108
 
109
109
 
110
110
  def self._ignore_stderr
111
- backup_stderr = STDERR.dup
112
- File.open('/dev/null', 'w') do |f|
111
+ begin
112
+ File.open('/dev/null', 'w') do |f|
113
+ backup_stderr = STDERR.dup
113
114
  STDERR.reopen(f)
114
115
  begin
115
116
  yield
@@ -117,6 +118,9 @@ module Log
117
118
  STDERR.reopen backup_stderr
118
119
  backup_stderr.close
119
120
  end
121
+ end
122
+ rescue Errno::ENOENT
123
+ yield
120
124
  end
121
125
  end
122
126
 
@@ -49,6 +49,7 @@ puts resource[path].find(search_path)
49
49
  test_str = options[:test] ? '-nv' : ''
50
50
 
51
51
  real_paths.each do |source_path|
52
+ Log.medium "Migrating #{source_path} #{options[:files].length} files to #{target} - #{Misc.fingerprint(options[:files])}}" if options[:files]
52
53
  if File.directory?(source_path) || source_path =~ /\/$/
53
54
  source_path += "/" unless source_path[-1] == "/"
54
55
  target += "/" unless target[-1] == "/"
@@ -76,8 +77,12 @@ puts resource[path].find(search_path)
76
77
  files_from_str = ""
77
78
  end
78
79
 
79
- cmd = "rsync -avztAXHP --copy-unsafe-links #{test_str} #{files_from_str} #{excludes_str} '#{source_path}' #{target_path} #{other * " "}"
80
+ #cmd = "rsync -avztAXHP --copy-unsafe-links #{test_str} #{files_from_str} #{excludes_str} '#{source_path}' #{target_path} #{other * " "}"
81
+
82
+ # rsync_args = "-avztAXHP --copy-unsafe-links"
83
+ rsync_args = "-avztAHP --copy-unsafe-links"
80
84
 
85
+ cmd = "rsync #{rsync_args} #{test_str} #{files_from_str} #{excludes_str} '#{source_path}' #{target_path} #{other * " "}"
81
86
 
82
87
  cmd << " && rm -Rf #{source_path}" if options[:delete] && ! options[:files]
83
88
 
@@ -280,8 +280,11 @@ module Misc
280
280
  i = parts.index job
281
281
  begin
282
282
  workflow, task = parts.values_at i - 2, i - 1
283
- return Kernel.const_get(workflow).tasks.include? task.to_sym
283
+ Workflow.require_workflow workflow
284
+ #return Kernel.const_get(workflow).tasks.include? task.to_sym
285
+ return true
284
286
  rescue
287
+ Log.exception $!
285
288
  end
286
289
  end
287
290
  false
@@ -70,6 +70,11 @@ module Misc
70
70
  end
71
71
 
72
72
  def self.timespan(str, default = "s")
73
+ if str.include?(":")
74
+ seconds, minutes, hours = str.split(":").reverse
75
+ return seconds.to_i + minutes.to_i * 60 + hours.to_i * 60 * 60
76
+ end
77
+
73
78
  tokens = {
74
79
  "s" => (1),
75
80
  "sec" => (1),
@@ -52,7 +52,7 @@ module RbbtPython
52
52
  if Array === imports
53
53
  pyfrom mod, :import => imports
54
54
  elsif Hash === imports
55
- pyimport mod, imports
55
+ pyimport mod, **imports
56
56
  else
57
57
  pyimport mod
58
58
  end
@@ -76,7 +76,7 @@ module Workflow
76
76
  extension :dep_task unless @extension
77
77
  returns workflow.tasks[oname].result_description if workflow.tasks.include?(oname) unless @result_description
78
78
  task name do
79
- raise RbbtException, "dependency not found in dep_task" if dependencies.empty?
79
+ raise RbbtException, "dep_task does not have any dependencies" if dependencies.empty?
80
80
  Step.wait_for_jobs dependencies.select{|d| d.streaming? }
81
81
  dep = dependencies.last
82
82
  dep.join
@@ -25,71 +25,6 @@ module Workflow
25
25
  end.compact
26
26
  end
27
27
 
28
- def self.load_inputs(dir, input_names, input_types)
29
- inputs = {}
30
- if File.exists?(dir) && ! File.directory?(dir)
31
- Log.debug "Loading inputs from #{dir}, not a directory trying as tar.gz"
32
- tarfile = dir
33
- digest = CMD.cmd("md5sum '#{tarfile}'").read.split(" ").first
34
- tmpdir = Rbbt.tmp.input_bundle[digest].find
35
- Misc.untar(tarfile, tmpdir) unless File.exists? tmpdir
36
- files = tmpdir.glob("*")
37
- if files.length == 1 && File.directory?(files.first)
38
- tmpdir = files.first
39
- end
40
- load_inputs(tmpdir, input_names, input_types)
41
- else
42
- dir = Path.setup(dir.dup)
43
- input_names.each do |input|
44
- file = dir[input].find
45
- file = dir.glob(input.to_s + ".*").reject{|f| f =~ /\.md5$/}.first if file.nil? or not file.exists?
46
- Log.debug "Trying #{ input }: #{file}"
47
- next unless file and file.exists?
48
-
49
-
50
- case input_types[input]
51
- when :file, :binary
52
- Log.debug "Pointing #{ input } to #{file}"
53
- if file =~ /\.yaml/
54
- inputs[input.to_sym] = YAML.load(Open.read(file))
55
- else
56
- if File.symlink?(file)
57
- link_target = File.expand_path(File.readlink(file), File.dirname(file))
58
- inputs[input.to_sym] = link_target
59
- else
60
- inputs[input.to_sym] = Open.realpath(file)
61
- end
62
- end
63
- when :text
64
- Log.debug "Reading #{ input } from #{file}"
65
- inputs[input.to_sym] = Open.read(file)
66
- when :array
67
- Log.debug "Reading array #{ input } from #{file}"
68
- inputs[input.to_sym] = Open.read(file).split("\n")
69
- when :tsv
70
- Log.debug "Opening tsv #{ input } from #{file}"
71
- inputs[input.to_sym] = TSV.open(file)
72
- when :boolean
73
- inputs[input.to_sym] = (file.read.strip == 'true')
74
- else
75
- Log.debug "Loading #{ input } from #{file}"
76
- inputs[input.to_sym] = file.read.strip
77
- end
78
-
79
- end
80
- inputs = IndiferentHash.setup(inputs)
81
-
82
- dir.glob("*#*").each do |od|
83
- name = File.basename(od)
84
- value = Open.read(od)
85
- Log.debug "Loading override dependency #{ name } as #{value}"
86
- inputs[name] = value.chomp
87
- end
88
-
89
- inputs
90
- end
91
- end
92
-
93
28
  def example_inputs(task_name, example)
94
29
  inputs = {}
95
30
  IndiferentHash.setup(inputs)
@@ -1,37 +1,97 @@
1
1
  module Workflow
2
- def nextflow_file(file, name = nil)
2
+ def self.nextflow_file_params(file)
3
+ Open.read(file).scan(/params\.\w+/).collect{|p| p.split(".").last}.uniq
4
+ end
5
+
6
+ def self.nextflow_includes(file)
7
+ Open.read(file).scan(/^include\s*{\s*(.*?)(?:\s*as.*?)?}\s*from\s+"(.*?)"(?:\s*params.*)?/).collect{|p| p}.uniq
8
+ end
9
+
10
+ def self.nextflow_recursive_params(file)
11
+ params = nextflow_file_params(file)
12
+ dir = File.dirname(file)
13
+ nextflow_includes(file).inject(params) do |params,info|
14
+ name_str, included_file = info
15
+ included_file = File.join(dir, included_file)
16
+ included_file += '.nf' unless File.exists?(included_file) || ! File.exists?(included_file + '.nf')
17
+ name_str.split(";").each do |name|
18
+ name = name.strip
19
+ include_params = nextflow_recursive_params(included_file).collect{|p| [p,name] * "-"}
20
+ params += include_params
21
+ end
22
+ params
23
+ end
24
+ end
25
+
26
+ def nextflow_file(file, name = nil, output = nil)
27
+ name, output = nil, name if Hash === name
28
+
29
+ if Hash === output
30
+ result, output = output.collect.first
31
+ else
32
+ result = :text
33
+ end
34
+
3
35
  file = file + '.nf' unless File.exists?(file) || ! File.exists?(file + '.nf')
4
36
  file = File.expand_path(file)
5
- name ||= File.basename(file).sub(/\.nf$/,'')
6
- params = Open.read(file).scan(/params\.\w+/).collect{|p| p.split(".").last}.uniq
37
+ name ||= File.basename(file).sub(/\.nf$/,'').gsub(/\s/,'_')
38
+ params = Workflow.nextflow_recursive_params(file)
7
39
 
8
40
  params.each do |param|
9
- input param, :string
41
+ p,_sep, section = param.partition("-")
42
+ if section.nil? || section.empty?
43
+ input param, :string, "Nextflow param #{p}", nil, :nofile => true
44
+ else
45
+ input param, :string, "Nextflow param #{p} from import #{section}", nil, :nofile => true
46
+ end
10
47
  end
11
- task name => :text do
48
+ task name => result do
12
49
  work = file('work')
13
- output = file('output')
14
50
  profile = config :profile, :nextflow
15
- Misc.in_dir output do
51
+
52
+ new_inputs = inputs.zip(inputs.fields).collect do |v,f|
53
+ if String === v && m = v.match(/^JOB_FILE:(.*)/)
54
+ file(m[1])
55
+ elsif v.nil?
56
+ Rbbt::Config.get(['nextflow', f] * "_", 'default', f)
57
+ else
58
+ v
59
+ end
60
+ end
61
+
62
+ inputs.replace new_inputs
63
+
64
+ Misc.in_dir file('stage') do
16
65
  if profile
17
- cmd("nextflow run -work-dir #{work} -name #{clean_name} -ansi-log false -profile #{profile} #{file}", inputs.to_hash.merge('add_option_dashes' => true))
66
+ cmd("nextflow run -work-dir #{work} -ansi-log false -profile #{profile} #{file}", inputs.to_hash.merge('add_option_dashes' => true))
18
67
  else
19
- cmd("nextflow run -work-dir #{work} -name #{clean_name} -ansi-log false #{file}", inputs.to_hash.merge('add_option_dashes' => true))
68
+ cmd("nextflow run -work-dir #{work} -ansi-log false #{file}", inputs.to_hash.merge('add_option_dashes' => true))
20
69
  end
21
70
  end
71
+
72
+ output_file = file(output).glob.first if output
73
+ output_file = work[File.join('*', '*', output)].glob.first if output && output_file.nil?
74
+
75
+ if output_file.nil?
76
+ work[File.join("*", "*", "*")].glob * "\n"
77
+ else
78
+ Open.link output_file, self.tmp_path
79
+ #Open.rm_rf file('work')
80
+ nil
81
+ end
22
82
  end
23
83
  end
24
84
 
25
- def nextflow_dir(path)
85
+ def nextflow_dir(path, output = nil)
26
86
  main = File.join(path, 'main.nf')
27
- nextflow_file main, File.basename(path)
87
+ nextflow_file main, File.basename(path), output
28
88
  end
29
89
 
30
- def nextflow(path)
90
+ def nextflow(path, *args)
31
91
  if File.directory?(path)
32
- nextflow_dir path
92
+ nextflow_dir path, *args
33
93
  else
34
- nextflow_file path
94
+ nextflow_file path, *args
35
95
  end
36
96
  end
37
97
  end
@@ -86,76 +86,6 @@ class Step
86
86
  end
87
87
  end
88
88
 
89
- def self.save_inputs(inputs, input_types, dir)
90
- inputs.each do |name,value|
91
- type = input_types[name]
92
- type = type.to_s if type
93
- path = File.join(dir, name.to_s)
94
-
95
- Log.debug "Saving job input #{name} (#{type}) into #{path}"
96
- case
97
- when Step === value
98
- Open.ln_s(value.path, path)
99
- when type.to_s == "file"
100
- if String === value && File.exists?(value)
101
- value = File.expand_path(value)
102
- Open.ln_s(value, path)
103
- else
104
- value = value.collect{|v| v = "#{v}" if Path === v; v }if Array === value
105
- value = "#{value}" if Path === value
106
- Open.write(path + '.yaml', value.to_yaml)
107
- end
108
- when Array === value
109
- Open.write(path, value.collect{|v| Step === v ? v.path : v.to_s} * "\n")
110
- when IO === value
111
- if value.filename && String === value.filename && File.exists?(value.filename)
112
- Open.ln_s(value.filename, path)
113
- else
114
- Open.write(path, value)
115
- end
116
- else
117
- Open.write(path, value.to_s)
118
- end
119
- end.any?
120
- end
121
-
122
- def self.save_job_inputs(job, dir, options = nil)
123
- options = IndiferentHash.setup options.dup if options
124
-
125
- task_name = Symbol === job.overriden ? job.overriden : job.task_name
126
- workflow = job.workflow
127
- workflow = Kernel.const_get workflow if String === workflow
128
- if workflow
129
- task_info = IndiferentHash.setup(workflow.task_info(task_name))
130
- input_types = IndiferentHash.setup(task_info[:input_types])
131
- task_inputs = IndiferentHash.setup(task_info[:inputs])
132
- input_defaults = IndiferentHash.setup(task_info[:input_defaults])
133
- else
134
- task_info = IndiferentHash.setup({})
135
- input_types = IndiferentHash.setup({})
136
- task_inputs = IndiferentHash.setup({})
137
- input_defaults = IndiferentHash.setup({})
138
- end
139
-
140
- inputs = IndiferentHash.setup({})
141
- real_inputs = job.real_inputs || job.info[:real_inputs]
142
- job.recursive_inputs.zip(job.recursive_inputs.fields).each do |value,name|
143
- next unless task_inputs.include? name.to_sym
144
- next unless real_inputs.include? name.to_sym
145
- next if options && ! options.include?(name)
146
- next if value.nil?
147
- next if input_defaults[name] == value
148
- inputs[name] = value
149
- end
150
-
151
- if options && options.include?('override_dependencies')
152
- inputs.merge!(:override_dependencies => open[:override_dependencies])
153
- input_types = IndiferentHash.setup(input_types.merge(:override_dependencies => :array))
154
- end
155
- save_inputs(inputs, input_types, dir)
156
-
157
- inputs.keys
158
- end
159
89
 
160
90
  def name
161
91
  @name ||= path.sub(/.*\/#{Regexp.quote task_name.to_s}\/(.*)/, '\1')