workflow_manager 0.7.3 → 0.7.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +2 -2
- data/config/environments/redis.conf +2 -2
- data/lib/job_checker.rb +13 -5
- data/lib/workflow_manager/server.rb +30 -10
- data/lib/workflow_manager/version.rb +1 -1
- data/start_workflow_manager.sh +11 -0
- data/test/job_list.rb +11 -4
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: aff589c219150c4a2b7e8676d8540343143a12099ce4bfed847663e37f946a99
|
4
|
+
data.tar.gz: 64461e67991613603227c3894aa2b728db4952739071d140c007caefd8564601
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 4e79f54d8335b4df34e466055d76cb8f64fa5c53787bf83047a7e259e5d564b0514e39d2b08ba56830dc73e9235164904806a34210e76e6e1018516e1f3bd243
|
7
|
+
data.tar.gz: 9e3f4b1ce2adddc6b4b13e77e7bedbef74c9b635253ba77f9dbd0d731a5a6896a515404b481aa0c7f23b1a739f55142f5825fa7b7ffa8ecb8f7c7a3ba8754166
|
data/Gemfile
CHANGED
@@ -2,5 +2,5 @@ source 'https://rubygems.org'
|
|
2
2
|
|
3
3
|
gem 'redis'
|
4
4
|
gem 'sidekiq'
|
5
|
-
|
6
|
-
gem 'workflow_manager'
|
5
|
+
gem 'workflow_manager', :path => './'
|
6
|
+
#gem 'workflow_manager'
|
data/lib/job_checker.rb
CHANGED
@@ -1,6 +1,9 @@
|
|
1
1
|
require 'sidekiq'
|
2
2
|
require 'redis'
|
3
3
|
|
4
|
+
require 'uri'
|
5
|
+
require 'net/http'
|
6
|
+
|
4
7
|
WORKER_INTERVAL = 10 # [s]
|
5
8
|
REDIS_CONF = File.expand_path("../../config/environments/redis.conf", __FILE__)
|
6
9
|
PORT = if File.exist?(REDIS_CONF)
|
@@ -40,15 +43,15 @@ class JobChecker
|
|
40
43
|
end
|
41
44
|
new_job_script
|
42
45
|
end
|
43
|
-
def update_time_status(status, script_basename, user, project_number)
|
46
|
+
def update_time_status(status, script_basename, user, project_number, next_dataset_id, rails_host)
|
44
47
|
unless @start_time
|
45
48
|
@start_time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
46
49
|
end
|
47
50
|
time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
48
|
-
[status, script_basename, [@start_time, time].join("/"), user, project_number].join(',')
|
51
|
+
[status, script_basename, [@start_time, time].join("/"), user, project_number, next_dataset_id, rails_host].join(',')
|
49
52
|
end
|
50
53
|
|
51
|
-
def perform(job_id, script_basename, log_file, user, project_id)
|
54
|
+
def perform(job_id, script_basename, log_file, user, project_id, next_dataset_id=nil, rails_host=nil)
|
52
55
|
puts "JobID (in JobChecker): #{job_id}"
|
53
56
|
db0 = Redis.new(port: PORT, db: 0) # state + alpha DB
|
54
57
|
db1 = Redis.new(port: PORT, db: 1) # log DB
|
@@ -63,10 +66,10 @@ class JobChecker
|
|
63
66
|
#print ret
|
64
67
|
state = ret.split(/\n/).last.strip
|
65
68
|
#puts "state: #{state}"
|
66
|
-
db0[job_id] = update_time_status(state, script_basename, user, project_id)
|
69
|
+
db0[job_id] = update_time_status(state, script_basename, user, project_id, next_dataset_id, rails_host)
|
67
70
|
|
68
71
|
unless state == pre_state
|
69
|
-
db0[job_id] = update_time_status(state, script_basename, user, project_id)
|
72
|
+
db0[job_id] = update_time_status(state, script_basename, user, project_id, next_dataset_id, rails_host)
|
70
73
|
project_jobs = eval((db2[project_id]||[]).to_s)
|
71
74
|
project_jobs = Hash[*project_jobs]
|
72
75
|
project_jobs[job_id] = state
|
@@ -76,6 +79,11 @@ class JobChecker
|
|
76
79
|
pre_state = state
|
77
80
|
sleep WORKER_INTERVAL
|
78
81
|
end while state =~ /RUNNING/ or state =~ /PENDING/ or state =~ /---/
|
82
|
+
if next_dataset_id and rails_host
|
83
|
+
uri = URI("#{rails_host}/data_set/#{next_dataset_id}/update_completed_samples")
|
84
|
+
#p uri
|
85
|
+
res = Net::HTTP.get_response(uri)
|
86
|
+
end
|
79
87
|
end
|
80
88
|
end
|
81
89
|
|
@@ -163,6 +163,7 @@ module WorkflowManager
|
|
163
163
|
RedisDB.new(1, @redis_conf)
|
164
164
|
end
|
165
165
|
@jobs = RedisDB.new(2, @redis_conf)
|
166
|
+
@trees = RedisDB.new(4, @redis_conf)
|
166
167
|
|
167
168
|
@system_log = File.join(@log_dir, "system.log")
|
168
169
|
@mutex = Mutex.new
|
@@ -186,12 +187,12 @@ module WorkflowManager
|
|
186
187
|
statuses.each do |job_id, status|
|
187
188
|
# puts [job_id, status].join(",")
|
188
189
|
# 120249,RUNNING,QC_ventricles_100k.sh,2021-07-30 09:47:04/2021-07-30 09:47:04,masaomi,1535
|
189
|
-
stat, script_basename, time, user, project_number = status.split(",")
|
190
|
+
stat, script_basename, time, user, project_number, next_dataset_id, rails_host = status.split(",")
|
190
191
|
if stat == "RUNNING" or stat == "PENDING"
|
191
192
|
log_file = logs[job_id]
|
192
193
|
log_puts("JobID (in recovery check): #{job_id}")
|
193
194
|
puts "JobID (in recovery check): #{job_id}"
|
194
|
-
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number)
|
195
|
+
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number, next_dataset_id, rails_host)
|
195
196
|
end
|
196
197
|
end
|
197
198
|
end
|
@@ -296,7 +297,7 @@ module WorkflowManager
|
|
296
297
|
Thread.current.kill
|
297
298
|
end
|
298
299
|
end
|
299
|
-
def start_monitoring3(script_path, script_content, user='sushi_lover', project_number=0, sge_options='', log_dir='')
|
300
|
+
def start_monitoring3(script_path, script_content, user='sushi_lover', project_number=0, sge_options='', log_dir='', next_dataset_id='', rails_host=nil)
|
300
301
|
script_basename = File.basename(script_path)
|
301
302
|
job_id, log_file, command = @cluster.submit_job(script_path, script_content, sge_options)
|
302
303
|
#p command
|
@@ -304,7 +305,7 @@ module WorkflowManager
|
|
304
305
|
#p job_id
|
305
306
|
puts "JobID (in WorkflowManager): #{job_id}"
|
306
307
|
sleep 1
|
307
|
-
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number)
|
308
|
+
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number, next_dataset_id, rails_host)
|
308
309
|
job_id
|
309
310
|
end
|
310
311
|
def start_monitoring2(script_path, script_content, user='sushi_lover', project_number=0, sge_options='', log_dir='')
|
@@ -473,17 +474,23 @@ module WorkflowManager
|
|
473
474
|
job_idsh = if job_ids
|
474
475
|
Hash[*(job_ids.split(',')).map{|job_id| [job_id, true]}.flatten]
|
475
476
|
end
|
476
|
-
|
477
|
-
|
477
|
+
if project_number
|
478
|
+
s_ = {}
|
478
479
|
@jobs.transaction do |jobs|
|
479
480
|
if project_jobs = jobs[project_number]
|
480
481
|
s_ = Hash[*eval(project_jobs)]
|
481
482
|
end
|
482
483
|
end
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
|
484
|
+
@statuses.transaction do |statuses|
|
485
|
+
s_.each do |job_id, stat|
|
486
|
+
s << [job_id, statuses[job_id]]
|
487
|
+
end
|
488
|
+
end
|
489
|
+
else
|
490
|
+
@statuses.transaction do |statuses|
|
491
|
+
statuses.each do |key, value|
|
492
|
+
s << [key, value]
|
493
|
+
end
|
487
494
|
end
|
488
495
|
end
|
489
496
|
if job_ids
|
@@ -549,6 +556,19 @@ module WorkflowManager
|
|
549
556
|
def cluster_node_list
|
550
557
|
@cluster.node_list
|
551
558
|
end
|
559
|
+
def save_dataset_tree(project_number, json)
|
560
|
+
@trees.transaction do |trees|
|
561
|
+
trees[project_number] = json
|
562
|
+
end
|
563
|
+
json
|
564
|
+
end
|
565
|
+
def load_dataset_tree(project_number)
|
566
|
+
json = nil
|
567
|
+
@trees.transaction do |trees|
|
568
|
+
json = trees[project_number]
|
569
|
+
end
|
570
|
+
json
|
571
|
+
end
|
552
572
|
end
|
553
573
|
end
|
554
574
|
|
@@ -0,0 +1,11 @@
|
|
1
|
+
#!/usr/bin/bash
|
2
|
+
source /usr/local/ngseq/etc/lmod_profile
|
3
|
+
module load Dev/Ruby/2.6.7
|
4
|
+
module load Tools/Redis/6.0.1
|
5
|
+
conda activate gtools_env
|
6
|
+
which python
|
7
|
+
which g-sub
|
8
|
+
which g-req
|
9
|
+
mkdir -p logs
|
10
|
+
mkdir -p dbs
|
11
|
+
bundle exec workflow_manager -d druby://fgcz-h-032:40002
|
data/test/job_list.rb
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
# encoding: utf-8
|
3
|
-
# Version = '
|
3
|
+
# Version = '20211001-104513'
|
4
4
|
|
5
5
|
PORT = (ARGV[0]||6380).to_i
|
6
6
|
require 'redis'
|
7
7
|
db0 = Redis.new(port: PORT, db: 0)
|
8
8
|
db1 = Redis.new(port: PORT, db: 1)
|
9
9
|
db2 = Redis.new(port: PORT, db: 2)
|
10
|
-
|
10
|
+
db4 = Redis.new(port: PORT, db: 4)
|
11
11
|
|
12
12
|
class Redis
|
13
13
|
def show_all
|
@@ -18,8 +18,8 @@ class Redis
|
|
18
18
|
end
|
19
19
|
end
|
20
20
|
|
21
|
-
dbs = [db0, db1, db2]
|
22
|
-
db_notes = ["state DB", "log DB", "project job DB"]
|
21
|
+
dbs = [db0, db1, db2, db4]
|
22
|
+
db_notes = ["state DB", "log DB", "project job DB", "JS tree DB"]
|
23
23
|
|
24
24
|
dbs.each.with_index do |db, i|
|
25
25
|
note = db_notes[i]
|
@@ -48,3 +48,10 @@ db2.keys.sort.each do |key|
|
|
48
48
|
value = db2.get(key)
|
49
49
|
puts [key, value].join("\t")
|
50
50
|
end
|
51
|
+
|
52
|
+
puts
|
53
|
+
puts "db3, status DB3, project specific"
|
54
|
+
db3.keys.sort.each do |key|
|
55
|
+
value = db3.get(key)
|
56
|
+
puts [key, value].join("\t")
|
57
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: workflow_manager
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Functional Genomics Center Zurich
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-
|
11
|
+
date: 2021-10-15 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -87,6 +87,7 @@ files:
|
|
87
87
|
- spec/cluster_spec.rb
|
88
88
|
- spec/server_spec.rb
|
89
89
|
- spec/spec_helper.rb
|
90
|
+
- start_workflow_manager.sh
|
90
91
|
- test/call_worker4.rb
|
91
92
|
- test/call_worker_method.rb
|
92
93
|
- test/job_list.rb
|