rbbt-util 5.34.12 → 5.34.13
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/rbbt/hpc/batch.rb +27 -3
- data/lib/rbbt/util/python/util.rb +2 -1
- data/share/rbbt_commands/hpc/clean +1 -1
- data/share/rbbt_commands/hpc/orchestrate +3 -1
- data/share/rbbt_commands/hpc/task +3 -1
- data/share/rbbt_commands/lsf/clean +1 -1
- data/share/rbbt_commands/lsf/orchestrate +3 -1
- data/share/rbbt_commands/lsf/task +3 -1
- data/share/rbbt_commands/slurm/clean +1 -1
- data/share/rbbt_commands/slurm/orchestrate +3 -1
- data/share/rbbt_commands/slurm/task +3 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9481e1e7a7e7341b95824a48c314d2316a7283e356ea916b424edb2f15886616
|
4
|
+
data.tar.gz: 5012fb19a8cb88d1e9764a2b6ab049e203c38bc2daf758e112c8e256dca68565
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e72b2d6f831128ace58d54e62fdad1f180acdd74d9c73304cc315760dd66c7705b733d024b62794c165a2ea00cdbf6b1d67569f1b40a8e19822424a1638ecd1f
|
7
|
+
data.tar.gz: f8ffee23f77ac7024a06b50d7b48ba6db900080fa06e78d44e61dd4274b4843d6fd203644a5c9be0b730e1728a3b0550e32fc503401744737dcc2bed7401ce8d
|
data/lib/rbbt/hpc/batch.rb
CHANGED
@@ -51,9 +51,15 @@ module HPC
|
|
51
51
|
|
52
52
|
group, user, user_group, scratch_group_dir, projects_group_dir = options.values_at :group, :user, :user_group, :scratch_group_dir, :projects_group_dir
|
53
53
|
|
54
|
-
singularity_img, singularity_opt_dir, singularity_ruby_inline = options.values_at :singularity_img, :singularity_opt_dir, :singularity_ruby_inline
|
54
|
+
singularity_img, singularity_opt_dir, singularity_ruby_inline, singularity_mounts = options.values_at :singularity_img, :singularity_opt_dir, :singularity_ruby_inline, :singularity_mounts
|
55
|
+
|
56
|
+
singularity_cmd = %(singularity exec -e -B "#{File.expand_path singularity_opt_dir}":/singularity_opt/ -B "#{File.expand_path singularity_ruby_inline}":"/.singularity_ruby_inline":rw )
|
55
57
|
|
56
|
-
|
58
|
+
if singularity_mounts
|
59
|
+
singularity_mounts.split(",").each do |mount|
|
60
|
+
singularity_cmd += "-B #{ mount } "
|
61
|
+
end
|
62
|
+
end
|
57
63
|
|
58
64
|
if contain && options[:hardened]
|
59
65
|
singularity_cmd << %( -C -H "#{contain}" \
|
@@ -150,6 +156,7 @@ EOF
|
|
150
156
|
:mem_per_cpu,
|
151
157
|
:gres,
|
152
158
|
:lua_modules,
|
159
|
+
:conda,
|
153
160
|
:contraints,
|
154
161
|
:licenses,
|
155
162
|
:batch_dir,
|
@@ -167,6 +174,7 @@ EOF
|
|
167
174
|
:purge_deps,
|
168
175
|
:singularity,
|
169
176
|
:singularity_img,
|
177
|
+
:singularity_mounts,
|
170
178
|
:singularity_opt_dir,
|
171
179
|
:singularity_ruby_inline
|
172
180
|
]
|
@@ -188,6 +196,7 @@ EOF
|
|
188
196
|
:env_cmd,
|
189
197
|
:user_group,
|
190
198
|
:singularity_img,
|
199
|
+
:singularity_mounts,
|
191
200
|
:singularity_opt_dir,
|
192
201
|
:singularity_ruby_inline,
|
193
202
|
:singularity
|
@@ -284,6 +293,20 @@ EOF
|
|
284
293
|
str
|
285
294
|
end
|
286
295
|
|
296
|
+
def load_conda(env = nil)
|
297
|
+
return "" if env.nil? || env.empty?
|
298
|
+
|
299
|
+
<<-EOF
|
300
|
+
if ! type conda | grep function &> /dev/null; then
|
301
|
+
if [ ! -z $CONDA_EXE ]; then
|
302
|
+
source "$(dirname $(dirname $CONDA_EXE))/etc/profile.d/conda.sh" &> /dev/null
|
303
|
+
fi
|
304
|
+
fi
|
305
|
+
conda activate #{ env }
|
306
|
+
EOF
|
307
|
+
end
|
308
|
+
|
309
|
+
|
287
310
|
def batch_system_variables
|
288
311
|
<<-EOF
|
289
312
|
let MAX_MEMORY="$(grep MemTotal /proc/meminfo|grep -o "[[:digit:]]*") / 1024"
|
@@ -292,6 +315,7 @@ let MAX_MEMORY="$(grep MemTotal /proc/meminfo|grep -o "[[:digit:]]*") / 1024"
|
|
292
315
|
|
293
316
|
def prepare_environment(options = {})
|
294
317
|
modules = options[:lua_modules]
|
318
|
+
conda = options[:conda]
|
295
319
|
|
296
320
|
prepare_environment = ""
|
297
321
|
|
@@ -382,7 +406,7 @@ echo "user_scratch: #{scratch_group_dir}/#{user}/{PKGDIR}/{TOPLEVEL}/{SUBPATH}"
|
|
382
406
|
end
|
383
407
|
end
|
384
408
|
|
385
|
-
batch_system_variables + load_modules(modules) + "\n" + functions + "\n" + prepare_environment
|
409
|
+
batch_system_variables + load_modules(modules) + "\n" + load_conda(conda) + "\n" + functions + "\n" + prepare_environment
|
386
410
|
end
|
387
411
|
|
388
412
|
def execute(options)
|
@@ -19,8 +19,9 @@ module RbbtPython
|
|
19
19
|
|
20
20
|
def self.df2tsv(tuple, options = {})
|
21
21
|
options = Misc.add_defaults options, :type => :list
|
22
|
+
IndiferentHash.setup options
|
22
23
|
tsv = TSV.setup({}, options)
|
23
|
-
tsv.key_field = tuple.columns.name
|
24
|
+
tsv.key_field = options[:key_field] || tuple.columns.name
|
24
25
|
tsv.fields = py2ruby_a(tuple.columns.values)
|
25
26
|
keys = tuple.index.values
|
26
27
|
PyCall.len(tuple.index).times do |i|
|
@@ -138,7 +138,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
138
138
|
fcadep = File.join(dir, 'canfail_dependencies.list')
|
139
139
|
cadeps = Open.read(fcadep).split("\n") if File.exist?(fcadep)
|
140
140
|
|
141
|
-
aborted = error = true if aborted.nil? && error.nil?
|
141
|
+
aborted = error = true if ! done && aborted.nil? && error.nil?
|
142
142
|
#if done || error || aborted || running || queued || jobid || search
|
143
143
|
# select = false
|
144
144
|
# select = true if done && exit_status && exit_status.to_i == 0
|
@@ -13,6 +13,7 @@ $slurm_options = SOPT.get <<EOF
|
|
13
13
|
--drbbt* Use development version of rbbt
|
14
14
|
-sing--singularity Use Singularity
|
15
15
|
-si--singularity_img* Singularity image to use
|
16
|
+
-sm--singularity_mounts* Singularity image to use
|
16
17
|
-ug--user_group* Use alternative user group for group project directory
|
17
18
|
-c--contain* Contain in directory (using Singularity)
|
18
19
|
-s--sync* Contain in directory and sync jobs
|
@@ -36,8 +37,9 @@ $slurm_options = SOPT.get <<EOF
|
|
36
37
|
-cons--constraint* SLURM constraint
|
37
38
|
-W--workflows* Additional workflows
|
38
39
|
-rmb--remove_batch_basedir Remove the SLURM working directory (command, STDIN, exit status, ...)
|
39
|
-
-lmod--lua_modules* Lua Modules to load
|
40
40
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
41
|
+
-lmod--lua_modules* Lua Modules to load
|
42
|
+
-co--conda* Conda environment to use
|
41
43
|
-OR--orchestration_rules* Orchestration rules
|
42
44
|
EOF
|
43
45
|
|
@@ -12,6 +12,7 @@ $slurm_options = SOPT.get <<EOF
|
|
12
12
|
--drbbt* Use development version of rbbt
|
13
13
|
-sing--singularity Use Singularity
|
14
14
|
-si--singularity_img* Singularity image to use
|
15
|
+
-sm--singularity_mounts* Singularity image to use
|
15
16
|
-ug--user_group* Use alternative user group for group project directory
|
16
17
|
-c--contain* Contain in directory (using Singularity)
|
17
18
|
-s--sync* Contain in directory and sync jobs
|
@@ -35,8 +36,9 @@ $slurm_options = SOPT.get <<EOF
|
|
35
36
|
-cons--constraint* SLURM constraint
|
36
37
|
-W--workflows* Additional workflows
|
37
38
|
-rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
|
38
|
-
-lmod--lua_modules* Lua Modules to load
|
39
39
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
40
|
+
-lmod--lua_modules* Lua Modules to load
|
41
|
+
-co--conda* Conda environment to use
|
40
42
|
EOF
|
41
43
|
|
42
44
|
batch_system = $slurm_options.delete :batch_system
|
@@ -138,7 +138,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
138
138
|
fcadep = File.join(dir, 'canfail_dependencies.list')
|
139
139
|
cadeps = Open.read(fcadep).split("\n") if File.exist?(fcadep)
|
140
140
|
|
141
|
-
aborted = error = true if aborted.nil? && error.nil?
|
141
|
+
aborted = error = true if ! done && aborted.nil? && error.nil?
|
142
142
|
#if done || error || aborted || running || queued || jobid || search
|
143
143
|
# select = false
|
144
144
|
# select = true if done && exit_status && exit_status.to_i == 0
|
@@ -13,6 +13,7 @@ $slurm_options = SOPT.get <<EOF
|
|
13
13
|
--drbbt* Use development version of rbbt
|
14
14
|
-sing--singularity Use Singularity
|
15
15
|
-si--singularity_img* Singularity image to use
|
16
|
+
-sm--singularity_mounts* Singularity image to use
|
16
17
|
-ug--user_group* Use alternative user group for group project directory
|
17
18
|
-c--contain* Contain in directory (using Singularity)
|
18
19
|
-s--sync* Contain in directory and sync jobs
|
@@ -36,8 +37,9 @@ $slurm_options = SOPT.get <<EOF
|
|
36
37
|
-cons--constraint* SLURM constraint
|
37
38
|
-W--workflows* Additional workflows
|
38
39
|
-rmb--remove_batch_basedir Remove the SLURM working directory (command, STDIN, exit status, ...)
|
39
|
-
-lmod--lua_modules* Lua Modules to load
|
40
40
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
41
|
+
-lmod--lua_modules* Lua Modules to load
|
42
|
+
-co--conda* Conda environment to use
|
41
43
|
-OR--orchestration_rules* Orchestration rules
|
42
44
|
EOF
|
43
45
|
|
@@ -12,6 +12,7 @@ $slurm_options = SOPT.get <<EOF
|
|
12
12
|
--drbbt* Use development version of rbbt
|
13
13
|
-sing--singularity Use Singularity
|
14
14
|
-si--singularity_img* Singularity image to use
|
15
|
+
-sm--singularity_mounts* Singularity image to use
|
15
16
|
-ug--user_group* Use alternative user group for group project directory
|
16
17
|
-c--contain* Contain in directory (using Singularity)
|
17
18
|
-s--sync* Contain in directory and sync jobs
|
@@ -35,8 +36,9 @@ $slurm_options = SOPT.get <<EOF
|
|
35
36
|
-cons--constraint* SLURM constraint
|
36
37
|
-W--workflows* Additional workflows
|
37
38
|
-rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
|
38
|
-
-lmod--lua_modules* Lua Modules to load
|
39
39
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
40
|
+
-lmod--lua_modules* Lua Modules to load
|
41
|
+
-co--conda* Conda environment to use
|
40
42
|
EOF
|
41
43
|
|
42
44
|
batch_system = $slurm_options.delete :batch_system
|
@@ -138,7 +138,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
138
138
|
fcadep = File.join(dir, 'canfail_dependencies.list')
|
139
139
|
cadeps = Open.read(fcadep).split("\n") if File.exist?(fcadep)
|
140
140
|
|
141
|
-
aborted = error = true if aborted.nil? && error.nil?
|
141
|
+
aborted = error = true if ! done && aborted.nil? && error.nil?
|
142
142
|
#if done || error || aborted || running || queued || jobid || search
|
143
143
|
# select = false
|
144
144
|
# select = true if done && exit_status && exit_status.to_i == 0
|
@@ -13,6 +13,7 @@ $slurm_options = SOPT.get <<EOF
|
|
13
13
|
--drbbt* Use development version of rbbt
|
14
14
|
-sing--singularity Use Singularity
|
15
15
|
-si--singularity_img* Singularity image to use
|
16
|
+
-sm--singularity_mounts* Singularity image to use
|
16
17
|
-ug--user_group* Use alternative user group for group project directory
|
17
18
|
-c--contain* Contain in directory (using Singularity)
|
18
19
|
-s--sync* Contain in directory and sync jobs
|
@@ -36,8 +37,9 @@ $slurm_options = SOPT.get <<EOF
|
|
36
37
|
-cons--constraint* SLURM constraint
|
37
38
|
-W--workflows* Additional workflows
|
38
39
|
-rmb--remove_batch_basedir Remove the SLURM working directory (command, STDIN, exit status, ...)
|
39
|
-
-lmod--lua_modules* Lua Modules to load
|
40
40
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
41
|
+
-lmod--lua_modules* Lua Modules to load
|
42
|
+
-co--conda* Conda environment to use
|
41
43
|
-OR--orchestration_rules* Orchestration rules
|
42
44
|
EOF
|
43
45
|
|
@@ -12,6 +12,7 @@ $slurm_options = SOPT.get <<EOF
|
|
12
12
|
--drbbt* Use development version of rbbt
|
13
13
|
-sing--singularity Use Singularity
|
14
14
|
-si--singularity_img* Singularity image to use
|
15
|
+
-sm--singularity_mounts* Singularity image to use
|
15
16
|
-ug--user_group* Use alternative user group for group project directory
|
16
17
|
-c--contain* Contain in directory (using Singularity)
|
17
18
|
-s--sync* Contain in directory and sync jobs
|
@@ -35,8 +36,9 @@ $slurm_options = SOPT.get <<EOF
|
|
35
36
|
-cons--constraint* SLURM constraint
|
36
37
|
-W--workflows* Additional workflows
|
37
38
|
-rmb--remove_batch_dir Remove the batch working directory (command, STDIN, exit status, ...)
|
38
|
-
-lmod--lua_modules* Lua Modules to load
|
39
39
|
-bs--batch_system* Batch system to use: auto, lsf, slurm (default is auto-detect)
|
40
|
+
-lmod--lua_modules* Lua Modules to load
|
41
|
+
-co--conda* Conda environment to use
|
40
42
|
EOF
|
41
43
|
|
42
44
|
batch_system = $slurm_options.delete :batch_system
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rbbt-util
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.34.
|
4
|
+
version: 5.34.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Miguel Vazquez
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-
|
11
|
+
date: 2022-09-19 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rake
|