sushi_fabric 0.9.2 → 0.9.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/lib/sushi_fabric/sushiApp.rb +50 -31
- data/lib/sushi_fabric/version.rb +1 -1
- metadata +3 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 8ca5afb9de9e23912a2d514c547ff22d2edaf0a6c440e3663a0ac3fce59933f9
|
4
|
+
data.tar.gz: 7a906efa08a1bd3ee1b33dd02ff9a4350f241042626120278b720b3b3294d013
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 482274611996c4e8c2d483088d1bcd8966d6136f849f7b77782d5ac0834556a56682fc2049f8c70da3aa48131bfa71ae13322a8e001252d4744d76787d9c2a23
|
7
|
+
data.tar.gz: 561434d611bc97fe9e30a035db0f3e3b71cbec89dc767f2531e2bf1b03905030364cf1bd833a0c432cad0d184a87a6552ae90430fe03c47f272611799d35cdea
|
@@ -1,6 +1,6 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
# encoding: utf-8
|
3
|
-
# Version = '
|
3
|
+
# Version = '20200703-164839'
|
4
4
|
|
5
5
|
require 'csv'
|
6
6
|
require 'fileutils'
|
@@ -227,6 +227,7 @@ class SushiApp
|
|
227
227
|
attr_accessor :mango_run_name
|
228
228
|
attr_accessor :input_dataset_bfabric_application_number
|
229
229
|
attr_accessor :next_dataset_bfabric_application_number
|
230
|
+
attr_reader :inactivate_nodes
|
230
231
|
def initialize
|
231
232
|
@gstore_dir = GSTORE_DIR
|
232
233
|
@project = nil
|
@@ -243,6 +244,7 @@ class SushiApp
|
|
243
244
|
@module_source = MODULE_SOURCE
|
244
245
|
@modules = []
|
245
246
|
#@workflow_manager = workflow_manager_instance||DRbObject.new_with_uri(WORKFLOW_MANAGER)
|
247
|
+
@last_job = true
|
246
248
|
end
|
247
249
|
def set_input_dataset
|
248
250
|
if @dataset_tsv_file
|
@@ -389,8 +391,13 @@ class SushiApp
|
|
389
391
|
end
|
390
392
|
def check_latest_module_version(mod)
|
391
393
|
command_out = %x[ bash -lc "source #{@module_source}; module whatis #{mod} 2>&1" ]
|
392
|
-
latest_mod =
|
393
|
-
|
394
|
+
latest_mod = nil
|
395
|
+
command_out.split("\n").each do |line|
|
396
|
+
if line =~ /#{mod}/
|
397
|
+
latest_mod = line.split.first
|
398
|
+
break
|
399
|
+
end
|
400
|
+
end
|
394
401
|
latest_mod
|
395
402
|
end
|
396
403
|
def job_header
|
@@ -414,7 +421,6 @@ class SushiApp
|
|
414
421
|
modules_with_version = @modules.map{|mod| check_latest_module_version(mod)}
|
415
422
|
modules_with_version.compact!
|
416
423
|
"module add #{modules_with_version.join(' ')}"
|
417
|
-
#"module add #{@modules.join(' ')}"
|
418
424
|
else
|
419
425
|
""
|
420
426
|
end
|
@@ -429,6 +435,7 @@ umask 0002
|
|
429
435
|
SCRATCH_DIR=#{@scratch_dir}
|
430
436
|
GSTORE_DIR=#{@gstore_dir}
|
431
437
|
INPUT_DATASET=#{@input_dataset_tsv_path}
|
438
|
+
LAST_JOB=#{@last_job.to_s.upcase}
|
432
439
|
echo "Job runs on `hostname`"
|
433
440
|
echo "at $SCRATCH_DIR"
|
434
441
|
mkdir $SCRATCH_DIR || exit 1
|
@@ -632,16 +639,16 @@ rm -rf #{@scratch_dir} || exit 1
|
|
632
639
|
end
|
633
640
|
def sample_mode
|
634
641
|
selected_samples = Hash[*@params['samples'].split(',').map{|sample_name| [sample_name, true]}.flatten]
|
635
|
-
@dataset_hash.
|
642
|
+
@dataset_hash.each_with_index do |row, i|
|
636
643
|
@dataset = Hash[*row.map{|key,value| [key.gsub(/\[.+\]/,'').strip, value]}.flatten]
|
637
644
|
if selected_samples[@dataset['Name']]
|
638
|
-
## WRITE THE JOB SCRIPT
|
639
645
|
sample_name = @dataset['Name']||@dataset.first
|
640
646
|
@job_script = if @dataset_sushi_id and dataset = DataSet.find_by_id(@dataset_sushi_id.to_i)
|
641
647
|
File.join(@job_script_dir, @analysis_category + '_' + sample_name) + '_' + dataset.name.gsub(/\s+/,'_') + '.sh'
|
642
648
|
else
|
643
649
|
File.join(@job_script_dir, @analysis_category + '_' + sample_name) + '.sh'
|
644
650
|
end
|
651
|
+
@last_job = (i == @dataset_hash.length - 1)
|
645
652
|
make_job_script
|
646
653
|
@job_scripts << @job_script
|
647
654
|
@result_dataset << next_dataset
|
@@ -752,28 +759,14 @@ rm -rf #{@scratch_dir} || exit 1
|
|
752
759
|
if mock
|
753
760
|
make_dummy_files
|
754
761
|
end
|
755
|
-
copy_inputdataset_parameter_jobscripts
|
756
762
|
|
757
|
-
# job submittion
|
758
|
-
gstore_job_script_paths = []
|
759
|
-
@job_scripts.each_with_index do |job_script, i|
|
760
|
-
if job_id = submit(job_script, mock)
|
761
|
-
@job_ids << job_id
|
762
|
-
print "Submit job #{File.basename(job_script)} job_id=#{job_id}"
|
763
|
-
gstore_job_script_paths << File.join(@gstore_script_dir, File.basename(job_script))
|
764
|
-
end
|
765
|
-
end
|
766
|
-
|
767
|
-
puts
|
768
|
-
print 'job scripts: '
|
769
|
-
p @job_scripts
|
770
763
|
print 'result dataset: '
|
771
764
|
p @result_dataset
|
772
765
|
|
773
766
|
# copy application data to gstore
|
774
767
|
@next_dataset_tsv_path = save_next_dataset_as_tsv
|
775
768
|
|
776
|
-
if
|
769
|
+
if @dataset_sushi_id and dataset = DataSet.find_by_id(@dataset_sushi_id.to_i)
|
777
770
|
data_set_arr = []
|
778
771
|
headers = []
|
779
772
|
rows = []
|
@@ -798,20 +791,46 @@ rm -rf #{@scratch_dir} || exit 1
|
|
798
791
|
unless @off_bfabric_registration
|
799
792
|
if next_dataset = DataSet.find_by_id(@next_dataset_id)
|
800
793
|
next_dataset.register_bfabric(bfabric_application_number: @next_dataset_bfabric_application_number)
|
794
|
+
if next_dataset.workunit_id
|
795
|
+
@job_scripts.each do |job_script|
|
796
|
+
open(job_script, "a") do |out|
|
797
|
+
out.puts "module load Dev/Python"
|
798
|
+
out.puts "WORKUNIT_ID=#{next_dataset.workunit_id}"
|
799
|
+
out.puts "update_resource_size -w $WORKUNIT_ID"
|
800
|
+
end
|
801
|
+
end
|
802
|
+
end
|
801
803
|
end
|
802
804
|
end
|
805
|
+
end
|
806
|
+
end
|
807
|
+
copy_inputdataset_parameter_jobscripts
|
808
|
+
|
809
|
+
# job submittion
|
810
|
+
gstore_job_script_paths = []
|
811
|
+
@job_scripts.each_with_index do |job_script, i|
|
812
|
+
if job_id = submit(job_script, mock)
|
813
|
+
@job_ids << job_id
|
814
|
+
print "Submit job #{File.basename(job_script)} job_id=#{job_id}"
|
815
|
+
gstore_job_script_paths << File.join(@gstore_script_dir, File.basename(job_script))
|
816
|
+
end
|
817
|
+
end
|
818
|
+
|
819
|
+
puts
|
820
|
+
print 'job scripts: '
|
821
|
+
p @job_scripts
|
803
822
|
|
804
|
-
# save job and dataset relation in Sushi DB
|
805
|
-
job_ids.each_with_index do |job_id, i|
|
806
|
-
new_job = Job.new
|
807
|
-
new_job.submit_job_id = job_id.to_i
|
808
|
-
new_job.script_path = gstore_job_script_paths[i]
|
809
|
-
new_job.next_dataset_id = @next_dataset_id
|
810
|
-
new_job.save
|
811
|
-
new_job.data_set.jobs << new_job
|
812
|
-
new_job.data_set.save
|
813
|
-
end
|
814
823
|
|
824
|
+
unless @job_ids.empty? or NO_ROR
|
825
|
+
# save job and dataset relation in Sushi DB
|
826
|
+
job_ids.each_with_index do |job_id, i|
|
827
|
+
new_job = Job.new
|
828
|
+
new_job.submit_job_id = job_id.to_i
|
829
|
+
new_job.script_path = gstore_job_script_paths[i]
|
830
|
+
new_job.next_dataset_id = @next_dataset_id
|
831
|
+
new_job.save
|
832
|
+
new_job.data_set.jobs << new_job
|
833
|
+
new_job.data_set.save
|
815
834
|
end
|
816
835
|
end
|
817
836
|
copy_nextdataset
|
data/lib/sushi_fabric/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sushi_fabric
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.9.
|
4
|
+
version: 0.9.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Functional Genomics Center Zurich
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-07-03 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -84,8 +84,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
84
84
|
- !ruby/object:Gem::Version
|
85
85
|
version: '0'
|
86
86
|
requirements: []
|
87
|
-
|
88
|
-
rubygems_version: 2.6.14
|
87
|
+
rubygems_version: 3.0.3
|
89
88
|
signing_key:
|
90
89
|
specification_version: 4
|
91
90
|
summary: workflow manager client.
|