rbbt-util 5.39.0 → 5.40.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 12c5401cf3ecb8495ba5c21d0f3d3f35c741c1c335ffa0d3a7d3ee498f6daae8
4
- data.tar.gz: 344f30b6651a272f5e9665d3ec6383b4a1d62ba7c64c7889af5cfc573b2285ac
3
+ metadata.gz: '088ac190b36a176e420d4bb1d7b8d28d777133568e45530219f6a0212a1a1144'
4
+ data.tar.gz: 47bdc31d247e5fd5d944f493690e8181bcc236cee74c61fefd5ce25711edf7da
5
5
  SHA512:
6
- metadata.gz: 6e8305e1f3e74eeff9e5892592bc66bfc9aac3baa505503942b266c3de996408a9210fcfaa2f6073800fc763bf3cdf5b8f39a50a920696402657d2928a9add8f
7
- data.tar.gz: 511c72833b4a8ef747e574376d44a3b4799f96f6a922301d3f405df1cd7aac79a2d2de388075aaab4fc3ef6962ee66ded3de8684227912399f41f7ad00006148
6
+ metadata.gz: 9afce115c221695ee896c1e1f884be9a1ba0f0ae98789a485a906e438252afe8292c5422f3eef57d77089bf55cc0b425988ab9b479b1f422eaa0abb7005cfdfa
7
+ data.tar.gz: 4cf4d1e86ce3ab256609aff4c66a3412ea1a9c0c87db08fd5cdad1b98c95147d452cc69d96009c9791cf698ea43903873ed0e4cebbc861bda85097864bed68e8
@@ -1,5 +1,5 @@
1
1
  module HPC
2
- class SBATCH < Exception;
2
+ class BATCH_DRY_RUN < Exception;
3
3
  attr_accessor :directory
4
4
  def initialize(directory)
5
5
  @directory = directory
@@ -12,24 +12,32 @@ module HPC
12
12
  HPC::SLURM
13
13
  when 'lsf'
14
14
  HPC::LSF
15
+ when 'pbs'
16
+ HPC::PBS
15
17
  when 'auto'
16
18
  case $previous_commands.last
17
19
  when 'slurm'
18
20
  HPC::SLURM
19
21
  when 'lsf'
20
22
  HPC::LSF
23
+ when 'pbs'
24
+ HPC::PBS
21
25
  else
22
26
  case Rbbt::Config.get(:batch_system, :batch, :batch_system, :hpc, :HPC, :BATCH).to_s.downcase
23
27
  when 'slurm'
24
28
  HPC::SLURM
25
29
  when 'lsf'
26
30
  HPC::LSF
31
+ when 'pbd'
32
+ HPC::PBS
27
33
  else
28
34
  case ENV["BATCH_SYSTEM"].to_s.downcase
29
35
  when 'slurm'
30
36
  HPC::SLURM
31
37
  when 'lsf'
32
38
  HPC::LSF
39
+ when 'pbs'
40
+ HPC::PBS
33
41
  end
34
42
  end
35
43
  end
@@ -564,8 +572,8 @@ env > #{batch_options[:fenv]}
564
572
  def run_job(job, options = {})
565
573
  system = self.to_s.split("::").last
566
574
 
567
- batch_base_dir, clean_batch_job, remove_batch_dir, procpath, tail, batch_dependencies, dry_run = Misc.process_options options,
568
- :batch_base_dir, :clean_batch_job, :remove_batch_dir, :batch_procpath, :tail, :batch_dependencies, :dry_run,
575
+ batch_base_dir, clean_batch_job, remove_batch_dir, procpath, tail, batch_dependencies, dry_run, orchestration_rules_file = Misc.process_options options,
576
+ :batch_base_dir, :clean_batch_job, :remove_batch_dir, :batch_procpath, :tail, :batch_dependencies, :dry_run, :orchestration_rules,
569
577
  :batch_base_dir => File.expand_path(File.join('~/rbbt-batch'))
570
578
 
571
579
  if (batch_job = job.info[:batch_job]) && job_queued(batch_job)
@@ -586,6 +594,8 @@ env > #{batch_options[:fenv]}
586
594
  workflow = job.original_workflow ||job.workflow
587
595
  task_name = job.original_task_name || job.task_name
588
596
 
597
+ options = options.merge(HPC::Orchestration.job_rules(HPC::Orchestration.orchestration_rules(orchestration_rules_file), job)) if orchestration_rules_file
598
+
589
599
  workflows_to_load = job.rec_dependencies.select{|d| Step === d}.collect{|d| d.workflow }.compact.collect(&:to_s) - [workflow.to_s]
590
600
 
591
601
  TmpFile.with_file(nil, remove_batch_dir, :tmpdir => batch_base_dir, :prefix => "#{system}_rbbt_job-#{workflow.to_s}-#{task_name}-") do |batch_dir|
data/lib/rbbt/hpc/lsf.rb CHANGED
@@ -99,7 +99,7 @@ export BATCH_SYSTEM=#{batch_system}
99
99
  elsif dry_run
100
100
  STDERR.puts Log.color(:magenta, "To execute run: ") + Log.color(:blue, cmd)
101
101
  STDERR.puts Log.color(:magenta, "To monitor progress run (needs local rbbt): ") + Log.color(:blue, "rbbt lsf tail '#{batch_dir}'")
102
- raise HPC::SBATCH, batch_dir
102
+ raise HPC::BATCH_DRY_RUN, batch_dir
103
103
  else
104
104
  Open.rm fsync
105
105
  Open.rm fexit
@@ -65,6 +65,13 @@ module HPC
65
65
  merge_rules(rules[workflow][task], workflow_rules)
66
66
  end
67
67
 
68
-
68
+ def self.job_rules(rules, job)
69
+ return {} if job.done? or job.error?
70
+ job_rules = task_specific_rules(rules, job.workflow.to_s, job.task_name.to_s)
71
+ job.dependencies.each do |dep|
72
+ job_rules = accumulate_rules(job_rules, job_rules(rules, dep))
73
+ end
74
+ job_rules
75
+ end
69
76
  end
70
77
  end
@@ -22,6 +22,25 @@ module HPC
22
22
 
23
23
  end
24
24
 
25
+ def self.orchestration_rules(orchestration_rules_file = nil)
26
+ rules = {}
27
+ if orchestration_rules_file
28
+ if Open.exists?(orchestration_rules_file)
29
+ rules = Misc.load_yaml(orchestration_rules_file)
30
+ elsif Rbbt.etc.batch[orchestration_rules_file].exists?
31
+ rules = Misc.load_yaml(Rbbt.etc.batch[orchestration_rules_file])
32
+ elsif Rbbt.etc.batch[orchestration_rules_file + '.yaml'].exists?
33
+ rules = Misc.load_yaml(Rbbt.etc.batch[orchestration_rules_file + '.yaml'])
34
+ else
35
+ raise "Orchestration rules file not found: #{orchestration_rules_file}"
36
+ end
37
+ elsif Rbbt.etc.batch["default.yaml"].exists?
38
+ rules = Misc.load_yaml(Rbbt.etc.batch["default.yaml"])
39
+ end
40
+
41
+ IndiferentHash.setup(rules)
42
+ end
43
+
25
44
  def orchestrate_job(job, options)
26
45
  options.delete "recursive_clean"
27
46
  options.delete "clean_task"
@@ -33,26 +52,11 @@ module HPC
33
52
  options.delete "load_inputs"
34
53
  options.delete "provenance"
35
54
 
55
+
36
56
  Log.high "Prepare for exec"
37
57
  prepare_for_execution(job)
38
58
 
39
- if orchestration_rules_file = options[:orchestration_rules]
40
- if Open.exists?(orchestration_rules_file)
41
- rules = Misc.load_yaml(orchestration_rules_file)
42
- elsif Rbbt.etc.slurm(orchestration_rules_file).exists?
43
- rules = Misc.load_yaml(Rbbt.etc.slurm(orchestration_rules_file))
44
- elsif Rbbt.etc.slurm(orchestration_rules_file + '.yaml').exists?
45
- rules = Misc.load_yaml(Rbbt.etc.slurm(orchestration_rules_file + '.yaml'))
46
- else
47
- raise "Orchestration rules file not found: #{options[:orchestration_rules]}"
48
- end
49
- elsif Rbbt.etc.slurm["default.yaml"].exists?
50
- rules = Misc.load_yaml(Rbbt.etc.slurm["default.yaml"])
51
- else
52
- rules = {}
53
- end
54
-
55
- IndiferentHash.setup(rules)
59
+ rules = HPC::Orchestration.orchestration_rules(options[:orchestration_rules])
56
60
 
57
61
  batches = HPC::Orchestration.job_batches(rules, job)
58
62
  Log.high "Compute #{batches.length} batches"
@@ -0,0 +1,177 @@
1
+ require 'rbbt/hpc/batch'
2
+ require 'rbbt/hpc/orchestrate'
3
+
4
+ module HPC
5
+ module PBS
6
+ extend HPC::TemplateGeneration
7
+ extend HPC::Orchestration
8
+
9
+ def self.batch_system
10
+ "PBS"
11
+ end
12
+
13
+ def self.batch_system_variables
14
+ <<-EOF
15
+ let TOTAL_PROCESORS="$(cat /proc/cpuinfo|grep ^processor |wc -l)"
16
+ let MAX_MEMORY_DEFAULT="$(grep MemTotal /proc/meminfo|grep -o "[[:digit:]]*") / ( (1024 * $TOTAL_PROCESORS) / $PBS_CPUS_PER_TASK )"
17
+ MAX_MEMORY="$MAX_MEMORY_DEFAULT"
18
+ [ ! -z $PBS_MEM_PER_CPU ] && let MAX_MEMORY="$PBS_MEM_PER_CPU * $PBS_CPUS_PER_TASK"
19
+ [ ! -z $PBS_MEM_PER_NODE ] && MAX_MEMORY="$PBS_MEM_PER_NODE"
20
+ export MAX_MEMORY_DEFAULT
21
+ export MAX_MEMORY
22
+ export BATCH_JOB_ID=$PBS_JOBID
23
+ export BATCH_SYSTEM=#{batch_system}
24
+
25
+ cd ${PBS_O_WORKDIR}
26
+ EOF
27
+ end
28
+
29
+ def self.header(options = {})
30
+ options = options.dup
31
+
32
+ workdir = Misc.process_options options, :workdir
33
+ batch_dir = Misc.process_options options, :batch_dir
34
+ batch_name = Misc.process_options options, :batch_name
35
+
36
+ queue = Misc.process_options options, :queue
37
+ account = Misc.process_options options, :account
38
+ time = Misc.process_options options, :time
39
+ nodes = Misc.process_options options, :nodes
40
+
41
+ # PBS
42
+ place = Misc.process_options options, :place, :place => 'scatter'
43
+ system = Misc.process_options options, :partition
44
+ filesystems = Misc.process_options options, :filesystems
45
+
46
+ filesystems = filesystems * "," if Array === filesystems
47
+
48
+ # NOT USED
49
+ partition = Misc.process_options options, :partition
50
+ task_cpus = Misc.process_options options, :task_cpus
51
+ exclusive = Misc.process_options options, :exclusive
52
+ highmem = Misc.process_options options, :highmem
53
+ licenses = Misc.process_options options, :licenses
54
+ constraint = Misc.process_options options, :constraint
55
+ gres = Misc.process_options options, :gres
56
+
57
+ constraint = [constraint, "highmem"].compact * "&" if highmem
58
+
59
+ mem = Misc.process_options options, :mem
60
+ mem_per_cpu = Misc.process_options options, :mem_per_cpu
61
+
62
+ fout = File.join(batch_dir, 'std.out')
63
+ ferr = File.join(batch_dir, 'std.err')
64
+
65
+ time = Misc.format_seconds Misc.timespan(time) unless time.include? ":"
66
+
67
+ qsub_params = { "-l filesystems=" => filesystems,
68
+ "-l system=" => system,
69
+ "-l select=" => nodes,
70
+ "-l place=" => place,
71
+ "-l walltime=" => time,
72
+ "-q " => queue,
73
+ "-A " => account,
74
+ "-o " => fout,
75
+ "-e " => ferr,
76
+ "-k doe" => true,
77
+ # "cpus-per-task" => task_cpus,
78
+ # "nodes" => nodes,
79
+ # "time" => time,
80
+ # "constraint" => constraint,
81
+ # "exclusive" => exclusive,
82
+ # "licenses" => licenses,
83
+ # "gres" => gres,
84
+ # "mem" => mem,
85
+ # "mem-per-cpu" => mem_per_cpu,
86
+ }
87
+
88
+
89
+ header =<<-EOF
90
+ #!/bin/bash
91
+ EOF
92
+
93
+ qsub_params.each do |name,value|
94
+ next if value.nil? || value == ""
95
+ if TrueClass === value
96
+ header << "#PBS #{name}" << "\n"
97
+ elsif Array === value
98
+ value.each do |v|
99
+ header << "#PBS #{name}\"#{v}\"" << "\n"
100
+ end
101
+ else
102
+ header << "#PBS #{name}\"#{value}\"" << "\n"
103
+ end
104
+ end
105
+
106
+ header
107
+ end
108
+
109
+ def self.run_template(batch_dir, dry_run)
110
+
111
+ fout = File.join(batch_dir, 'std.out')
112
+ ferr = File.join(batch_dir, 'std.err')
113
+ fjob = File.join(batch_dir, 'job.id')
114
+ fdep = File.join(batch_dir, 'dependencies.list')
115
+ fcfdep = File.join(batch_dir, 'canfail_dependencies.list')
116
+ fexit = File.join(batch_dir, 'exit.status')
117
+ fsync = File.join(batch_dir, 'sync.log')
118
+ fcmd = File.join(batch_dir, 'command.batch')
119
+
120
+ return if Open.exists?(fexit)
121
+
122
+ Log.info "Issuing PBS file: #{fcmd}"
123
+ Log.debug Open.read(fcmd)
124
+
125
+ if File.exist?(fjob)
126
+ job = Open.read(fjob).to_i
127
+ else
128
+
129
+ dependencies = Open.read(fdep).split("\n") if File.exist? fdep
130
+ canfail_dependencies = Open.read(fcfdep).split("\n") if File.exist? fcfdep
131
+
132
+ normal_dep_str = dependencies && dependencies.any? ? "afterok:" + dependencies * ":" : nil
133
+ canfail_dep_str = canfail_dependencies && canfail_dependencies.any? ? "afterany:" + canfail_dependencies * ":" : nil
134
+
135
+ if normal_dep_str.nil? && canfail_dep_str.nil?
136
+ dep_str = ""
137
+ else
138
+ dep_str = '-W depend=' + [normal_dep_str, canfail_dep_str].compact * ","
139
+ end
140
+
141
+ cmd = "qsub #{dep_str} '#{fcmd}'"
142
+
143
+ if File.exist?(fout)
144
+ return
145
+ elsif dry_run
146
+ STDERR.puts Log.color(:magenta, "To execute run: ") + Log.color(:blue, "squb '#{fcmd}'")
147
+ STDERR.puts Log.color(:magenta, "To monitor progress run (needs local rbbt): ") + Log.color(:blue, "rbbt pbs tail '#{batch_dir}'")
148
+ raise HPC::BATCH_DRY_RUN, batch_dir
149
+ else
150
+ Open.rm fsync
151
+ Open.rm fexit
152
+ Open.rm fout
153
+ Open.rm ferr
154
+
155
+ job = CMD.cmd(cmd).read.scan(/\d+/).first.to_i
156
+ Log.debug "SBATCH job id: #{job}"
157
+ Open.write(fjob, job.to_s)
158
+ job
159
+ end
160
+ end
161
+ end
162
+
163
+ def self.job_status(job = nil)
164
+ if job.nil?
165
+ CMD.cmd("qstat").read
166
+ else
167
+ begin
168
+ CMD.cmd("qstat #{job}").read
169
+ rescue
170
+ ""
171
+ end
172
+ end
173
+ end
174
+
175
+ end
176
+ end
177
+
@@ -130,7 +130,7 @@ export BATCH_SYSTEM=#{batch_system}
130
130
  elsif dry_run
131
131
  STDERR.puts Log.color(:magenta, "To execute run: ") + Log.color(:blue, "sbatch '#{fcmd}'")
132
132
  STDERR.puts Log.color(:magenta, "To monitor progress run (needs local rbbt): ") + Log.color(:blue, "rbbt slurm tail '#{batch_dir}'")
133
- raise HPC::SBATCH, batch_dir
133
+ raise HPC::BATCH_DRY_RUN, batch_dir
134
134
  else
135
135
  Open.rm fsync
136
136
  Open.rm fexit
data/lib/rbbt/hpc.rb CHANGED
@@ -4,3 +4,4 @@ require 'rbbt/hpc/batch'
4
4
  require 'rbbt/hpc/orchestrate'
5
5
  require 'rbbt/hpc/slurm'
6
6
  require 'rbbt/hpc/lsf'
7
+ require 'rbbt/hpc/pbs'
@@ -1,6 +1,6 @@
1
1
  module Path
2
2
 
3
- def self.caller_lib_dir(file = nil, relative_to = ['lib', 'bin'])
3
+ def self.caller_lib_dir(file = nil, relative_to = ['lib', 'bin', 'LICENSE'])
4
4
  #file = caller.reject{|l|
5
5
  # l =~ /rbbt\/(?:resource\.rb|workflow\.rb)/ or
6
6
  # l =~ /rbbt\/resource\/path\.rb/ or
@@ -1,6 +1,91 @@
1
1
  module R
2
2
  module SVG
3
3
 
4
+ def self.plot(filename, data = nil, script = nil, width = nil, height = nil, options = {}, &block)
5
+ width ||= 600
6
+ height ||= 600
7
+ values = []
8
+
9
+ script ||= ""
10
+ if block_given?
11
+ s = StringIO.new
12
+ class << s
13
+ def method_missing(name, *args)
14
+ name = name.to_s
15
+ if name[-1] == '='
16
+ arg = args.first
17
+ value = if String === arg
18
+ arg
19
+ else
20
+ R.ruby2R arg
21
+ end
22
+ add("" << name[0..-2] << "=" << value)
23
+ else
24
+ args_strs = []
25
+ args.each do |arg|
26
+ value = if String === arg
27
+ arg
28
+ else
29
+ R.ruby2R arg
30
+ end
31
+ args_strs << value
32
+ end
33
+ add("" << name << "(" << args_strs * ", " << ")")
34
+ end
35
+ end
36
+
37
+ def add(line)
38
+ self.write line << "\n"
39
+ end
40
+ end
41
+ block.call(s)
42
+ s.rewind
43
+ script << "\n" << s.read
44
+ end
45
+ sources = [:plot, options[:source]].flatten.compact
46
+
47
+ if data
48
+ data.each do |k,v|
49
+ v = Array === v ? v : [v]
50
+ next if v == "NA" or v.nil? or v.include? "NA" or v.include? nil
51
+ values = v
52
+ break
53
+ end
54
+
55
+ values = [values] unless values.nil? or Array === values
56
+
57
+ field_classes = values.collect do |v|
58
+ case v
59
+ when FalseClass, TrueClass
60
+ "'logical'"
61
+ when Numeric
62
+ "'numeric'"
63
+ when String
64
+ if v.strip =~ /^[-+]?[\d\.]+$/
65
+ "'numeric'"
66
+ else
67
+ "'character'"
68
+ end
69
+ when Symbol
70
+ "'factor'"
71
+ else
72
+ ":NA"
73
+ end
74
+ end
75
+
76
+ options[:R_open] ||= "colClasses=c('character'," + field_classes * ", " + ')' if field_classes.any?
77
+
78
+ data.R <<-EOF, :plot, options
79
+ rbbt.svg_plot("#{ filename }", width=#{ width }, height = #{ height }, function(){ #{script} })
80
+ data = NULL
81
+ EOF
82
+ else
83
+ R.run <<-EOF, :plot, options
84
+ rbbt.svg_plot("#{ filename }", width=#{ width }, height = #{ height }, function(){ #{script} })
85
+ EOF
86
+ end
87
+ end
88
+
4
89
  def self.ggplotSVG(*args)
5
90
  ggplot(*args)
6
91
  end
data/share/Rlib/util.R CHANGED
@@ -577,6 +577,16 @@ rbbt.get.modes <- function(x,bw = NULL,spar = NULL) {
577
577
 
578
578
  #{{{ PLOTS
579
579
 
580
+ rbbt.svg_plot <- function(filename, p, width=500, height=500, ...){
581
+ svg(filename=filename, width=width, height=height, ...);
582
+ if (is.function(p)) {
583
+ p()
584
+ }else{
585
+ eval(parse(text=p));
586
+ }
587
+ dev.off()
588
+ }
589
+
580
590
  rbbt.png_plot <- function(filename, p, width=500, height=500, ...){
581
591
  png(filename=filename, width=width, height=height, type='cairo', ...);
582
592
  if (is.function(p)) {
@@ -751,6 +761,8 @@ rbbt.plot.venn <- function(data, a=NULL, category=NULL, fill=NULL, ...) {
751
761
  fill=rbbt.plot.set_colors(dim(data)[2], "Set3")
752
762
  }
753
763
 
764
+ fill=fill[0:dim(data)[2]]
765
+
754
766
  group.matches <- function(data, fields) {
755
767
  sub = data
756
768
  for (i in 1:length(fields)) {
@@ -253,6 +253,8 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
253
253
  text = CMD.cmd('grep "^#SBATCH" ', :in => Open.read(fcmd)).read.strip
254
254
  when 'lsf'
255
255
  text = CMD.cmd('grep "^#BSUB" ', :in => Open.read(fcmd)).read.strip
256
+ when 'pbs'
257
+ text = CMD.cmd('grep "^#PBS" ', :in => Open.read(fcmd)).read.strip
256
258
  else
257
259
  text = ""
258
260
  end
@@ -73,7 +73,7 @@ class Step
73
73
  join
74
74
  self.load
75
75
  end
76
- rescue HPC::SBATCH
76
+ rescue HPC::BATCH_DRY_RUN
77
77
  end
78
78
  end
79
79
  end
@@ -32,16 +32,17 @@ $slurm_options = SOPT.get <<EOF
32
32
  -p--partition* Partition
33
33
  -t--task_cpus* Tasks
34
34
  -tm--time* Time
35
- -m--mem* SLURM minimum memory
36
- --gres* SLURM Generic resources
37
- -mcpu--mem_per_cpu* SLURM minimum memory per CPU
38
- -lin--licenses* SLURM licenses
39
- -cons--constraint* SLURM constraint
35
+ -m--mem* minimum memory
36
+ --gres* Generic resources
37
+ -mcpu--mem_per_cpu* minimum memory per CPU
38
+ -lin--licenses* licenses
39
+ -cons--constraint* constraint
40
40
  -W--workflows* Additional workflows
41
41
  -rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
42
42
  -bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
43
43
  -lmod--lua_modules* Lua Modules to load
44
44
  -co--conda* Conda environment to use
45
+ -OR--orchestration_rules* Orchestration rules
45
46
  EOF
46
47
 
47
48
  batch_system = $slurm_options.delete :batch_system
@@ -58,9 +59,9 @@ class Step
58
59
  self.load
59
60
  else
60
61
  begin
61
- Log.debug "Issuing SLURM job for #{self.path}"
62
+ Log.debug "Issuing BATCH job for #{self.path}"
62
63
  HPC::BATCH_MODULE.run_job(self, $slurm_options)
63
- rescue HPC::SBATCH
64
+ rescue HPC::BATCH_DRY_RUN
64
65
  end
65
66
  end
66
67
  end
@@ -253,6 +253,8 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
253
253
  text = CMD.cmd('grep "^#SBATCH" ', :in => Open.read(fcmd)).read.strip
254
254
  when 'lsf'
255
255
  text = CMD.cmd('grep "^#BSUB" ', :in => Open.read(fcmd)).read.strip
256
+ when 'pbs'
257
+ text = CMD.cmd('grep "^#PBS" ', :in => Open.read(fcmd)).read.strip
256
258
  else
257
259
  text = ""
258
260
  end
@@ -73,7 +73,7 @@ class Step
73
73
  join
74
74
  self.load
75
75
  end
76
- rescue HPC::SBATCH
76
+ rescue HPC::BATCH_DRY_RUN
77
77
  end
78
78
  end
79
79
  end
@@ -32,16 +32,17 @@ $slurm_options = SOPT.get <<EOF
32
32
  -p--partition* Partition
33
33
  -t--task_cpus* Tasks
34
34
  -tm--time* Time
35
- -m--mem* SLURM minimum memory
36
- --gres* SLURM Generic resources
37
- -mcpu--mem_per_cpu* SLURM minimum memory per CPU
38
- -lin--licenses* SLURM licenses
39
- -cons--constraint* SLURM constraint
35
+ -m--mem* minimum memory
36
+ --gres* Generic resources
37
+ -mcpu--mem_per_cpu* minimum memory per CPU
38
+ -lin--licenses* licenses
39
+ -cons--constraint* constraint
40
40
  -W--workflows* Additional workflows
41
41
  -rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
42
42
  -bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
43
43
  -lmod--lua_modules* Lua Modules to load
44
44
  -co--conda* Conda environment to use
45
+ -OR--orchestration_rules* Orchestration rules
45
46
  EOF
46
47
 
47
48
  batch_system = $slurm_options.delete :batch_system
@@ -58,9 +59,9 @@ class Step
58
59
  self.load
59
60
  else
60
61
  begin
61
- Log.debug "Issuing SLURM job for #{self.path}"
62
+ Log.debug "Issuing BATCH job for #{self.path}"
62
63
  HPC::BATCH_MODULE.run_job(self, $slurm_options)
63
- rescue HPC::SBATCH
64
+ rescue HPC::BATCH_DRY_RUN
64
65
  end
65
66
  end
66
67
  end
@@ -253,6 +253,8 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
253
253
  text = CMD.cmd('grep "^#SBATCH" ', :in => Open.read(fcmd)).read.strip
254
254
  when 'lsf'
255
255
  text = CMD.cmd('grep "^#BSUB" ', :in => Open.read(fcmd)).read.strip
256
+ when 'pbs'
257
+ text = CMD.cmd('grep "^#PBS" ', :in => Open.read(fcmd)).read.strip
256
258
  else
257
259
  text = ""
258
260
  end
@@ -73,7 +73,7 @@ class Step
73
73
  join
74
74
  self.load
75
75
  end
76
- rescue HPC::SBATCH
76
+ rescue HPC::BATCH_DRY_RUN
77
77
  end
78
78
  end
79
79
  end
@@ -32,16 +32,17 @@ $slurm_options = SOPT.get <<EOF
32
32
  -p--partition* Partition
33
33
  -t--task_cpus* Tasks
34
34
  -tm--time* Time
35
- -m--mem* SLURM minimum memory
36
- --gres* SLURM Generic resources
37
- -mcpu--mem_per_cpu* SLURM minimum memory per CPU
38
- -lin--licenses* SLURM licenses
39
- -cons--constraint* SLURM constraint
35
+ -m--mem* minimum memory
36
+ --gres* Generic resources
37
+ -mcpu--mem_per_cpu* minimum memory per CPU
38
+ -lin--licenses* licenses
39
+ -cons--constraint* constraint
40
40
  -W--workflows* Additional workflows
41
41
  -rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
42
42
  -bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
43
43
  -lmod--lua_modules* Lua Modules to load
44
44
  -co--conda* Conda environment to use
45
+ -OR--orchestration_rules* Orchestration rules
45
46
  EOF
46
47
 
47
48
  batch_system = $slurm_options.delete :batch_system
@@ -58,9 +59,9 @@ class Step
58
59
  self.load
59
60
  else
60
61
  begin
61
- Log.debug "Issuing SLURM job for #{self.path}"
62
+ Log.debug "Issuing BATCH job for #{self.path}"
62
63
  HPC::BATCH_MODULE.run_job(self, $slurm_options)
63
- rescue HPC::SBATCH
64
+ rescue HPC::BATCH_DRY_RUN
64
65
  end
65
66
  end
66
67
  end
@@ -0,0 +1,43 @@
1
+ require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
2
+ require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
3
+
4
+ require 'rbbt/workflow'
5
+
6
+ class TestPBS < Test::Unit::TestCase
7
+ def workflow
8
+ @workflow ||= Module.new do
9
+ extend Workflow
10
+
11
+ def self.to_s
12
+ "TestWorkflow"
13
+ end
14
+
15
+ input :name, :string
16
+ task :hello => :string do |name|
17
+ "hello #{name}"
18
+ end
19
+ end
20
+ end
21
+
22
+ def test_template
23
+ job = workflow.job(:hello, "TEST", :name => "world")
24
+
25
+ TmpFile.with_file do |batch_dir|
26
+
27
+ template = HPC::PBS.job_template(job, :batch_dir => batch_dir, :lua_modules => 'java')
28
+ ppp template
29
+
30
+ end
31
+ end
32
+
33
+ def __test_run_job
34
+ job = Sample.job(:mutect2, "small", :reference => "hg38")
35
+
36
+ job.clean
37
+
38
+ jobid = HPC::SLURM.run_job(job, :workflows => "HTS", :batch_modules => 'java', :env_cmd => '_JAVA_OPTIONS="-Xms1g -Xmx${MAX_MEMORY}m"', :queue => :debug, :time => '01:00:00', :config_keys => "HTS_light", :task_cpus => '10', :tail => true, :clean_task => "HTS#mutect2")
39
+ assert jobid.to_s =~ /^\d+$/
40
+ end
41
+
42
+ end
43
+
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rbbt-util
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.39.0
4
+ version: 5.40.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Miguel Vazquez
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-08-02 00:00:00.000000000 Z
11
+ date: 2023-10-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -206,6 +206,7 @@ files:
206
206
  - lib/rbbt/hpc/orchestrate/batches.rb
207
207
  - lib/rbbt/hpc/orchestrate/chains.rb
208
208
  - lib/rbbt/hpc/orchestrate/rules.rb
209
+ - lib/rbbt/hpc/pbs.rb
209
210
  - lib/rbbt/hpc/slurm.rb
210
211
  - lib/rbbt/knowledge_base.rb
211
212
  - lib/rbbt/knowledge_base/enrichment.rb
@@ -476,6 +477,7 @@ files:
476
477
  - test/rbbt/hpc/orchestrate/test_rules.rb
477
478
  - test/rbbt/hpc/test_batch.rb
478
479
  - test/rbbt/hpc/test_orchestrate.rb
480
+ - test/rbbt/hpc/test_pbs.rb
479
481
  - test/rbbt/hpc/test_slurm.rb
480
482
  - test/rbbt/knowledge_base/test_enrichment.rb
481
483
  - test/rbbt/knowledge_base/test_entity.rb
@@ -590,7 +592,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
590
592
  - !ruby/object:Gem::Version
591
593
  version: '0'
592
594
  requirements: []
593
- rubygems_version: 3.4.13
595
+ rubygems_version: 3.5.0.dev
594
596
  signing_key:
595
597
  specification_version: 4
596
598
  summary: Utilities for the Ruby Bioinformatics Toolkit (rbbt)
@@ -607,6 +609,7 @@ test_files:
607
609
  - test/rbbt/hpc/orchestrate/test_rules.rb
608
610
  - test/rbbt/hpc/test_batch.rb
609
611
  - test/rbbt/hpc/test_orchestrate.rb
612
+ - test/rbbt/hpc/test_pbs.rb
610
613
  - test/rbbt/hpc/test_slurm.rb
611
614
  - test/rbbt/knowledge_base/test_enrichment.rb
612
615
  - test/rbbt/knowledge_base/test_entity.rb