workflow_manager 0.6.0 → 0.7.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/Gemfile +4 -2
- data/bin/wfm_monitoring +3 -2
- data/bin/workflow_manager +22 -2
- data/config/environments/production.rb +1 -1
- data/config/environments/redis.conf +2 -2
- data/config/environments/sidekiq.yml +8 -0
- data/lib/job_checker.rb +81 -0
- data/lib/worker4.rb +80 -0
- data/lib/workflow_manager/server.rb +48 -11
- data/lib/workflow_manager/version.rb +1 -1
- data/test/call_worker4.rb +11 -0
- data/test/call_worker_method.rb +11 -0
- data/test/job_list.rb +50 -0
- data/test/test_job1.sh +5 -0
- data/workflow_manager.gemspec +1 -1
- metadata +16 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 49b41b8057a7487ed992b96a9f0b81a155bef54d2fe9b63cde6c2503a84d2257
|
4
|
+
data.tar.gz: 373b56284df027f151a0a77f7d28fc0cd8331b51f9e2ef22e4484b0db8732154
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: df2c18b28e56ee900c2845b0ae43ee826a59fbab3b39ec6f93b5f62880b3bf2fcbc7ab4f3804899a6234e0ae5f9f7abebacd77fd24900515da31cf4d6b1215e9
|
7
|
+
data.tar.gz: e6ca40a3e1428284c73f578e09be5028e55bd630ce673699288d8ce3b76ccf107e5e265eba3a731401e38b7b1e01fcb5fa5e6152aeec642333cc35f681131074
|
data/.gitignore
CHANGED
data/Gemfile
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
source 'https://rubygems.org'
|
2
2
|
|
3
|
-
|
4
|
-
|
3
|
+
gem 'redis'
|
4
|
+
gem 'sidekiq'
|
5
|
+
#gem 'workflow_manager', :path => '/srv/GT/analysis/masaomi/FGCZ/prototype_workflow_manager_with_sidekiq_20210122/workflow_manager/'
|
6
|
+
gem 'workflow_manager'
|
data/bin/wfm_monitoring
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
# encoding: utf-8
|
3
3
|
# 20121112 masa workflow manager client
|
4
|
-
Version = '
|
4
|
+
Version = '20210625-165025'
|
5
5
|
|
6
6
|
require 'drb/drb'
|
7
7
|
require 'workflow_manager/optparse_ex'
|
@@ -54,4 +54,5 @@ sge_options << "-n #{opt.nodes}" if opt.nodes
|
|
54
54
|
script_content = File.read(script_file)
|
55
55
|
workflow_manager = DRbObject.new_with_uri(uri)
|
56
56
|
#puts workflow_manager.start_monitoring(script_file, user, 0, script_content, project_number, sge_options.join(' '), opt.log)
|
57
|
-
puts workflow_manager.start_monitoring2(script_file, script_content, user, project_number, sge_options.join(' '), opt.log)
|
57
|
+
#puts workflow_manager.start_monitoring2(script_file, script_content, user, project_number, sge_options.join(' '), opt.log)
|
58
|
+
puts workflow_manager.start_monitoring3(script_file, script_content, user, project_number, sge_options.join(' '), opt.log)
|
data/bin/workflow_manager
CHANGED
@@ -8,7 +8,7 @@ Version = WorkflowManager::VERSION
|
|
8
8
|
opt = OptionParser.new do |o|
|
9
9
|
o.banner = "Version: #{Version}\nUsage:\n #{File.basename(__FILE__)} -d [druby://host:port] -m [development|production]"
|
10
10
|
o.on(:server, 'druby://localhost:12345', '-d server', '--server', 'workflow manager URI (default: druby://localhost:12345)')
|
11
|
-
o.on(:mode, '
|
11
|
+
o.on(:mode, 'production', '-m mode', '--mode', 'development|production (default: production)')
|
12
12
|
o.parse!(ARGV)
|
13
13
|
end
|
14
14
|
|
@@ -33,7 +33,9 @@ if opt.mode
|
|
33
33
|
default_config_file = File.join(default_config_dir, opt.mode+".rb")
|
34
34
|
if File.exist?(default_config_file)
|
35
35
|
default_redis_config_file = File.join(default_config_dir, "redis.conf")
|
36
|
+
default_sidekiq_config_file = File.join(default_config_dir, "sidekiq.yml")
|
36
37
|
FileUtils.cp(default_redis_config_file, config_dir)
|
38
|
+
FileUtils.cp(default_sidekiq_config_file, config_dir)
|
37
39
|
FileUtils.cp(default_config_file, config_file)
|
38
40
|
else
|
39
41
|
raise "Configure file does not exist: #{config_file}"
|
@@ -43,4 +45,22 @@ if opt.mode
|
|
43
45
|
end
|
44
46
|
DRb.start_service(uri, WorkflowManager::Server.new)
|
45
47
|
puts DRb.uri
|
46
|
-
DRb.thread.join
|
48
|
+
#DRb.thread.join
|
49
|
+
|
50
|
+
sleep 1
|
51
|
+
|
52
|
+
sidekiq_pid = fork do
|
53
|
+
app_dir = File.expand_path('..', __FILE__)
|
54
|
+
job_checker = File.join(app_dir, "../lib/job_checker.rb")
|
55
|
+
exec("sidekiq -C config/environments/sidekiq.yml -r #{job_checker}")
|
56
|
+
end
|
57
|
+
|
58
|
+
begin
|
59
|
+
DRb.thread.join
|
60
|
+
puts "__END__"
|
61
|
+
rescue SignalException
|
62
|
+
Process.kill("HUP", sidekiq_pid)
|
63
|
+
sleep 1
|
64
|
+
puts "__CORRECTLY_END__"
|
65
|
+
end
|
66
|
+
|
@@ -7,6 +7,6 @@ WorkflowManager::Server.configure do |config|
|
|
7
7
|
config.interval = 30
|
8
8
|
config.resubmit = 0
|
9
9
|
config.redis_conf = "config/environments/redis.conf"
|
10
|
-
config.cluster = WorkflowManager::
|
10
|
+
config.cluster = WorkflowManager::FGCZDebian10Cluster.new('FGCZDebian10Cluster')
|
11
11
|
end
|
12
12
|
|
data/lib/job_checker.rb
ADDED
@@ -0,0 +1,81 @@
|
|
1
|
+
require 'sidekiq'
|
2
|
+
require 'redis'
|
3
|
+
|
4
|
+
WORKER_INTERVAL = 10 # [s]
|
5
|
+
REDIS_CONF = File.expand_path("../../config/environments/redis.conf", __FILE__)
|
6
|
+
PORT = if File.exist?(REDIS_CONF)
|
7
|
+
redis_conf = Hash[*File.readlines(REDIS_CONF).map{|line| line.chomp.split}.map{|e| [e[0], e[1,100].join(",")]}.flatten]
|
8
|
+
redis_conf["port"].to_i
|
9
|
+
else
|
10
|
+
6379
|
11
|
+
end
|
12
|
+
SIDEKIQ_URL = "redis://localhost:#{PORT}/3"
|
13
|
+
warn "redis.conf: #{REDIS_CONF}"
|
14
|
+
warn "Redis port: #{PORT}"
|
15
|
+
warn "Sidekiq URL: #{SIDEKIQ_URL}"
|
16
|
+
|
17
|
+
Sidekiq.configure_server do |config|
|
18
|
+
config.redis = { url: SIDEKIQ_URL }
|
19
|
+
end
|
20
|
+
|
21
|
+
Sidekiq.configure_client do |config|
|
22
|
+
config.redis = { url: SIDEKIQ_URL }
|
23
|
+
end
|
24
|
+
|
25
|
+
class Redis
|
26
|
+
alias_method :[], :get
|
27
|
+
alias_method :[]=, :set
|
28
|
+
end
|
29
|
+
|
30
|
+
class JobChecker
|
31
|
+
include Sidekiq::Worker
|
32
|
+
sidekiq_options queue: :default, retry: 5
|
33
|
+
|
34
|
+
def generate_new_job_script(log_dir, script_basename, script_content)
|
35
|
+
new_job_script = File.basename(script_basename) + "_" + Time.now.strftime("%Y%m%d%H%M%S%L")
|
36
|
+
new_job_script = File.join(log_dir, new_job_script)
|
37
|
+
open(new_job_script, 'w') do |out|
|
38
|
+
out.print script_content
|
39
|
+
out.print "\necho __SCRIPT END__\n"
|
40
|
+
end
|
41
|
+
new_job_script
|
42
|
+
end
|
43
|
+
def update_time_status(status, script_basename, user, project_number)
|
44
|
+
unless @start_time
|
45
|
+
@start_time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
46
|
+
end
|
47
|
+
time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
48
|
+
[status, script_basename, [@start_time, time].join("/"), user, project_number].join(',')
|
49
|
+
end
|
50
|
+
|
51
|
+
def perform(job_id, script_basename, log_file, user, project_id)
|
52
|
+
puts "JobID (in JobChecker): #{job_id}"
|
53
|
+
db0 = Redis.new(port: PORT, db: 0) # state + alpha DB
|
54
|
+
db1 = Redis.new(port: PORT, db: 1) # log DB
|
55
|
+
db2 = Redis.new(port: PORT, db: 2) # project jobs DB
|
56
|
+
db1[job_id] = log_file
|
57
|
+
pre_state = nil
|
58
|
+
@start_time = nil
|
59
|
+
begin
|
60
|
+
command = "sacct --jobs=#{job_id} --format=state"
|
61
|
+
#puts command
|
62
|
+
ret = `#{command}`
|
63
|
+
#print ret
|
64
|
+
state = ret.split(/\n/).last.strip
|
65
|
+
#puts "state: #{state}"
|
66
|
+
db0[job_id] = update_time_status(state, script_basename, user, project_id)
|
67
|
+
|
68
|
+
unless state == pre_state
|
69
|
+
db0[job_id] = update_time_status(state, script_basename, user, project_id)
|
70
|
+
project_jobs = eval((db2[project_id]||[]).to_s)
|
71
|
+
project_jobs = Hash[*project_jobs]
|
72
|
+
project_jobs[job_id] = state
|
73
|
+
#p project_jobs
|
74
|
+
db2[project_id] = project_jobs.to_a.flatten.last(200).to_s
|
75
|
+
end
|
76
|
+
pre_state = state
|
77
|
+
sleep WORKER_INTERVAL
|
78
|
+
end while state =~ /RUNNING/ or state =~ /PENDING/ or state =~ /---/
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
data/lib/worker4.rb
ADDED
@@ -0,0 +1,80 @@
|
|
1
|
+
require 'sidekiq'
|
2
|
+
require 'redis'
|
3
|
+
|
4
|
+
WORKER_INTERVAL = 10 # [s]
|
5
|
+
|
6
|
+
Sidekiq.configure_server do |config|
|
7
|
+
config.redis = { url: 'redis://localhost:6380/3' }
|
8
|
+
end
|
9
|
+
|
10
|
+
Sidekiq.configure_client do |config|
|
11
|
+
config.redis = { url: 'redis://localhost:6380/3' }
|
12
|
+
end
|
13
|
+
|
14
|
+
class Redis
|
15
|
+
alias_method :[], :get
|
16
|
+
alias_method :[]=, :set
|
17
|
+
end
|
18
|
+
|
19
|
+
class JobWorker
|
20
|
+
include Sidekiq::Worker
|
21
|
+
sidekiq_options queue: :default, retry: 5
|
22
|
+
|
23
|
+
def generate_new_job_script(log_dir, script_basename, script_content)
|
24
|
+
new_job_script = File.basename(script_basename) + "_" + Time.now.strftime("%Y%m%d%H%M%S%L")
|
25
|
+
new_job_script = File.join(log_dir, new_job_script)
|
26
|
+
open(new_job_script, 'w') do |out|
|
27
|
+
out.print script_content
|
28
|
+
out.print "\necho __SCRIPT END__\n"
|
29
|
+
end
|
30
|
+
new_job_script
|
31
|
+
end
|
32
|
+
def update_time_status(status, script_basename, user, project_number)
|
33
|
+
unless @start_time
|
34
|
+
@start_time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
35
|
+
end
|
36
|
+
time = Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
37
|
+
[status, script_basename, [@start_time, time].join("/"), user, project_number].join(',')
|
38
|
+
end
|
39
|
+
|
40
|
+
def perform(project_id, log_dir, script_basename, script_content)
|
41
|
+
#script_base_name = "test_job.sh"
|
42
|
+
job_script = generate_new_job_script(log_dir, script_basename, script_content)
|
43
|
+
log_file = job_script + "_o.log"
|
44
|
+
err_file = job_script + "_e.log"
|
45
|
+
command = "sbatch -o #{log_file} -e #{err_file} -N 1 #{job_script}"
|
46
|
+
puts command
|
47
|
+
ret = `#{command}`
|
48
|
+
job_id = ret.chomp.split.last
|
49
|
+
puts "JobID: #{job_id}"
|
50
|
+
db0 = Redis.new(port: 6380, db: 0) # state + alpha DB
|
51
|
+
db1 = Redis.new(port: 6380, db: 1) # log DB
|
52
|
+
db2 = Redis.new(port: 6380, db: 2) # project jobs DB
|
53
|
+
db1[job_id] = log_file
|
54
|
+
pre_state = nil
|
55
|
+
@start_time = nil
|
56
|
+
begin
|
57
|
+
command = "sacct --jobs=#{job_id} --format=state"
|
58
|
+
puts command
|
59
|
+
ret = `#{command}`
|
60
|
+
#print ret
|
61
|
+
state = ret.split(/\n/).last.strip
|
62
|
+
puts "state: #{state}"
|
63
|
+
#db.set(job_id, state)
|
64
|
+
db0[job_id] = update_time_status(state, script_basename, "sushi_lover", project_id)
|
65
|
+
|
66
|
+
unless state == pre_state
|
67
|
+
db0[job_id] = update_time_status(state, script_basename, "sushi_lover", project_id)
|
68
|
+
project_jobs = eval((db2[project_id]||[]).to_s)
|
69
|
+
project_jobs = Hash[*project_jobs]
|
70
|
+
project_jobs[job_id] = state
|
71
|
+
p project_jobs
|
72
|
+
db2[project_id] = project_jobs.to_a.flatten.last(200).to_s
|
73
|
+
#db2[project_id] = project_jobs.to_s
|
74
|
+
end
|
75
|
+
pre_state = state
|
76
|
+
sleep WORKER_INTERVAL
|
77
|
+
end while state =~ /RUNNING/ or state =~ /PENDING/ or state =~ /---/
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
@@ -4,6 +4,9 @@
|
|
4
4
|
require 'drb/drb'
|
5
5
|
require 'fileutils'
|
6
6
|
require 'csv'
|
7
|
+
|
8
|
+
require 'job_checker'
|
9
|
+
|
7
10
|
begin
|
8
11
|
require 'redis'
|
9
12
|
DB_MODE = "Redis"
|
@@ -159,6 +162,7 @@ module WorkflowManager
|
|
159
162
|
when "Redis"
|
160
163
|
RedisDB.new(1, @redis_conf)
|
161
164
|
end
|
165
|
+
@jobs = RedisDB.new(2, @redis_conf)
|
162
166
|
|
163
167
|
@system_log = File.join(@log_dir, "system.log")
|
164
168
|
@mutex = Mutex.new
|
@@ -172,15 +176,35 @@ module WorkflowManager
|
|
172
176
|
log_puts("DB = #{DB_MODE}")
|
173
177
|
log_puts("Cluster = #{@cluster.name}")
|
174
178
|
log_puts("Server starts")
|
179
|
+
log_puts("Recovery check")
|
180
|
+
sleep 2
|
181
|
+
recovery_job_checker
|
182
|
+
end
|
183
|
+
def recovery_job_checker
|
184
|
+
@logs.transaction do |logs|
|
185
|
+
@statuses.transaction do |statuses|
|
186
|
+
statuses.each do |job_id, status|
|
187
|
+
# puts [job_id, status].join(",")
|
188
|
+
# 120249,RUNNING,QC_ventricles_100k.sh,2021-07-30 09:47:04/2021-07-30 09:47:04,masaomi,1535
|
189
|
+
stat, script_basename, time, user, project_number = status.split(",")
|
190
|
+
if stat == "RUNNING" or stat == "PENDING"
|
191
|
+
log_file = logs[job_id]
|
192
|
+
log_puts("JobID (in recovery check): #{job_id}")
|
193
|
+
puts "JobID (in recovery check): #{job_id}"
|
194
|
+
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number)
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
175
199
|
end
|
176
200
|
def hello
|
177
|
-
'hello
|
201
|
+
'hello hoge hoge bar boo bundle, '+ @cluster.name
|
178
202
|
end
|
179
203
|
def copy_commands(org_dir, dest_parent_dir, now=nil)
|
180
204
|
@cluster.copy_commands(org_dir, dest_parent_dir, now)
|
181
205
|
end
|
182
206
|
def kill_job(job_id)
|
183
|
-
status(job_id, '
|
207
|
+
status(job_id, 'FAIL')
|
184
208
|
status = `#{@cluster.kill_command(job_id)}`
|
185
209
|
end
|
186
210
|
def delete_command(target)
|
@@ -272,6 +296,17 @@ module WorkflowManager
|
|
272
296
|
Thread.current.kill
|
273
297
|
end
|
274
298
|
end
|
299
|
+
def start_monitoring3(script_path, script_content, user='sushi_lover', project_number=0, sge_options='', log_dir='')
|
300
|
+
script_basename = File.basename(script_path)
|
301
|
+
job_id, log_file, command = @cluster.submit_job(script_path, script_content, sge_options)
|
302
|
+
#p command
|
303
|
+
#p log_file
|
304
|
+
#p job_id
|
305
|
+
puts "JobID (in WorkflowManager): #{job_id}"
|
306
|
+
sleep 1
|
307
|
+
JobChecker.perform_async(job_id, script_basename, log_file, user, project_number)
|
308
|
+
job_id
|
309
|
+
end
|
275
310
|
def start_monitoring2(script_path, script_content, user='sushi_lover', project_number=0, sge_options='', log_dir='')
|
276
311
|
# script_path is only used to generate a log file name
|
277
312
|
# It is not used to read the script contents
|
@@ -418,7 +453,7 @@ module WorkflowManager
|
|
418
453
|
#@statuses.open(@db_stat)
|
419
454
|
@statuses.transaction do |statuses|
|
420
455
|
if new_status and stat = statuses[job_id.to_s]
|
421
|
-
status_list = ['
|
456
|
+
status_list = ['CONPLETED', 'RUNNING', 'PENDING', 'FAIL']
|
422
457
|
if status_list.include?(new_status)
|
423
458
|
items = stat.split(/,/)
|
424
459
|
items.shift
|
@@ -438,17 +473,19 @@ module WorkflowManager
|
|
438
473
|
job_idsh = if job_ids
|
439
474
|
Hash[*(job_ids.split(',')).map{|job_id| [job_id, true]}.flatten]
|
440
475
|
end
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
end
|
447
|
-
else
|
448
|
-
s << [key, value]
|
476
|
+
s_ = {}
|
477
|
+
unless job_ids
|
478
|
+
@jobs.transaction do |jobs|
|
479
|
+
if project_jobs = jobs[project_number]
|
480
|
+
s_ = Hash[*eval(project_jobs)]
|
449
481
|
end
|
450
482
|
end
|
451
483
|
end
|
484
|
+
@statuses.transaction do |statuses|
|
485
|
+
s_.each do |job_id, stat|
|
486
|
+
s << [job_id, statuses[job_id]]
|
487
|
+
end
|
488
|
+
end
|
452
489
|
if job_ids
|
453
490
|
s = s.select{|job_id, stat| job_idsh[job_id]}
|
454
491
|
end
|
@@ -0,0 +1,11 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# Version = '20210625-104318'
|
4
|
+
|
5
|
+
require './lib/worker4'
|
6
|
+
script_file = "./test/test_job1.sh"
|
7
|
+
script_content = File.read(script_file)
|
8
|
+
log_dir = "./logs"
|
9
|
+
script_basename = File.basename(script_file)
|
10
|
+
JobWorker.perform_async(1001, log_dir, script_basename, script_content)
|
11
|
+
p "submitted test_job1.sh"
|
@@ -0,0 +1,11 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# Version = '20210625-162836'
|
4
|
+
|
5
|
+
require 'workflow_manager'
|
6
|
+
script_file = "./test/test_job1.sh"
|
7
|
+
script_content = File.read(script_file)
|
8
|
+
log_dir = "./logs"
|
9
|
+
script_basename = File.basename(script_file)
|
10
|
+
JobWorker.perform_async(1001, log_dir, script_basename, script_content)
|
11
|
+
p "submitted test_job1.sh"
|
data/test/job_list.rb
ADDED
@@ -0,0 +1,50 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# Version = '20210723-134812'
|
4
|
+
|
5
|
+
PORT = (ARGV[0]||6380).to_i
|
6
|
+
require 'redis'
|
7
|
+
db0 = Redis.new(port: PORT, db: 0)
|
8
|
+
db1 = Redis.new(port: PORT, db: 1)
|
9
|
+
db2 = Redis.new(port: PORT, db: 2)
|
10
|
+
#db3 = Redis.new(port: 6380, db: 3)
|
11
|
+
|
12
|
+
class Redis
|
13
|
+
def show_all
|
14
|
+
self.keys.sort.each do |key|
|
15
|
+
value = self.get(key)
|
16
|
+
puts [key, value].join("\t")
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
dbs = [db0, db1, db2]
|
22
|
+
db_notes = ["state DB", "log DB", "project job DB"]
|
23
|
+
|
24
|
+
dbs.each.with_index do |db, i|
|
25
|
+
note = db_notes[i]
|
26
|
+
puts ["db#{i}", note].join("\t")
|
27
|
+
db.show_all
|
28
|
+
puts
|
29
|
+
end
|
30
|
+
exit
|
31
|
+
puts "db0, status DB"
|
32
|
+
puts ["JobID", "Status"].join("\t")
|
33
|
+
db0.keys.sort.each do |key|
|
34
|
+
value = db0.get(key)
|
35
|
+
puts [key, value].join("\t")
|
36
|
+
end
|
37
|
+
|
38
|
+
puts
|
39
|
+
puts "db1, log DB"
|
40
|
+
db1.keys.sort.each do |key|
|
41
|
+
value = db1.get(key)
|
42
|
+
puts [key, value].join("\t")
|
43
|
+
end
|
44
|
+
|
45
|
+
puts
|
46
|
+
puts "db2, status DB2, project specific"
|
47
|
+
db2.keys.sort.each do |key|
|
48
|
+
value = db2.get(key)
|
49
|
+
puts [key, value].join("\t")
|
50
|
+
end
|
data/test/test_job1.sh
ADDED
data/workflow_manager.gemspec
CHANGED
@@ -19,6 +19,6 @@ Gem::Specification.new do |spec|
|
|
19
19
|
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
20
20
|
spec.require_paths = ["lib"]
|
21
21
|
|
22
|
-
spec.add_development_dependency "bundler", "~>
|
22
|
+
spec.add_development_dependency "bundler", "~> 2.2.10"
|
23
23
|
spec.add_development_dependency "rake"
|
24
24
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: workflow_manager
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.7.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Functional Genomics Center Zurich
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-07-31 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version:
|
19
|
+
version: 2.2.10
|
20
20
|
type: :development
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version:
|
26
|
+
version: 2.2.10
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: rake
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -76,6 +76,9 @@ files:
|
|
76
76
|
- config/environments/development.rb
|
77
77
|
- config/environments/production.rb
|
78
78
|
- config/environments/redis.conf
|
79
|
+
- config/environments/sidekiq.yml
|
80
|
+
- lib/job_checker.rb
|
81
|
+
- lib/worker4.rb
|
79
82
|
- lib/workflow_manager.rb
|
80
83
|
- lib/workflow_manager/cluster.rb
|
81
84
|
- lib/workflow_manager/optparse_ex.rb
|
@@ -84,6 +87,10 @@ files:
|
|
84
87
|
- spec/cluster_spec.rb
|
85
88
|
- spec/server_spec.rb
|
86
89
|
- spec/spec_helper.rb
|
90
|
+
- test/call_worker4.rb
|
91
|
+
- test/call_worker_method.rb
|
92
|
+
- test/job_list.rb
|
93
|
+
- test/test_job1.sh
|
87
94
|
- workflow_manager.gemspec
|
88
95
|
homepage: ''
|
89
96
|
licenses:
|
@@ -104,7 +111,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
104
111
|
- !ruby/object:Gem::Version
|
105
112
|
version: '0'
|
106
113
|
requirements: []
|
107
|
-
rubygems_version: 3.0.3
|
114
|
+
rubygems_version: 3.0.3.1
|
108
115
|
signing_key:
|
109
116
|
specification_version: 4
|
110
117
|
summary: Workflow Manager manages job submissions using dRuby.
|
@@ -112,3 +119,7 @@ test_files:
|
|
112
119
|
- spec/cluster_spec.rb
|
113
120
|
- spec/server_spec.rb
|
114
121
|
- spec/spec_helper.rb
|
122
|
+
- test/call_worker4.rb
|
123
|
+
- test/call_worker_method.rb
|
124
|
+
- test/job_list.rb
|
125
|
+
- test/test_job1.sh
|