workflow_manager 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Gemfile +4 -0
- data/LICENSE.txt +22 -0
- data/README.md +29 -0
- data/Rakefile +1 -0
- data/bin/wfm_get_log +32 -0
- data/bin/wfm_get_script +29 -0
- data/bin/wfm_hello +30 -0
- data/bin/wfm_job_list +37 -0
- data/bin/wfm_monitoring +55 -0
- data/bin/wfm_status +29 -0
- data/bin/workflow_manager +262 -0
- data/config/environments/development.rb +11 -0
- data/config/environments/production.rb +11 -0
- data/lib/workflow_manager.rb +5 -0
- data/lib/workflow_manager/cluster.rb +113 -0
- data/lib/workflow_manager/optparse_ex.rb +52 -0
- data/lib/workflow_manager/version.rb +3 -0
- data/workflow_manager.gemspec +24 -0
- metadata +103 -0
data/Gemfile
ADDED
data/LICENSE.txt
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2013 Function Genomics Center Zurich
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
# WorkflowManager
|
2
|
+
|
3
|
+
TODO: Write a gem description
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
gem 'workflow_manager'
|
10
|
+
|
11
|
+
And then execute:
|
12
|
+
|
13
|
+
$ bundle
|
14
|
+
|
15
|
+
Or install it yourself as:
|
16
|
+
|
17
|
+
$ gem install workflow_manager
|
18
|
+
|
19
|
+
## Usage
|
20
|
+
|
21
|
+
TODO: Write usage instructions here
|
22
|
+
|
23
|
+
## Contributing
|
24
|
+
|
25
|
+
1. Fork it
|
26
|
+
2. Create your feature branch (`git checkout -b my-new-feature`)
|
27
|
+
3. Commit your changes (`git commit -am 'Add some feature'`)
|
28
|
+
4. Push to the branch (`git push origin my-new-feature`)
|
29
|
+
5. Create new Pull Request
|
data/Rakefile
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require "bundler/gem_tasks"
|
data/bin/wfm_get_log
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20130307-094928'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
|
8
|
+
unless job_id = ARGV[0]
|
9
|
+
puts "Usage:\n #{__FILE__} [job_id] [with_err] [server]"
|
10
|
+
puts
|
11
|
+
puts " job_id: required"
|
12
|
+
puts " with_err: if you want also standard errors"
|
13
|
+
puts " server: workflow_manager sever URI (default: druby://localhost:12345)"
|
14
|
+
exit
|
15
|
+
end
|
16
|
+
with_err = ARGV[1]
|
17
|
+
uri = ARGV[2]||'druby://localhost:12345'
|
18
|
+
|
19
|
+
wfmrc = if File.exist?(".wfmrc")
|
20
|
+
".wfmrc"
|
21
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
22
|
+
File.expand_path("~/.wfmrc")
|
23
|
+
end
|
24
|
+
if wfmrc
|
25
|
+
File.readlines(wfmrc).each do |line|
|
26
|
+
if line =~ /server:\s*(druby.+)/
|
27
|
+
uri = $1
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
32
|
+
puts workflow_manager.get_log(job_id, with_err)
|
data/bin/wfm_get_script
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20130307-094955'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
|
8
|
+
unless job_id = ARGV[0]
|
9
|
+
puts "Usage:\n #{__FILE__} [job_id] [server]"
|
10
|
+
puts
|
11
|
+
puts " job_id: required"
|
12
|
+
puts " server: workflow_manager URI (default: druby://localhost:12345)"
|
13
|
+
exit
|
14
|
+
end
|
15
|
+
uri = ARGV[1]||'druby://localhost:12345'
|
16
|
+
wfmrc = if File.exist?(".wfmrc")
|
17
|
+
".wfmrc"
|
18
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
19
|
+
File.expand_path("~/.wfmrc")
|
20
|
+
end
|
21
|
+
if wfmrc
|
22
|
+
File.readlines(wfmrc).each do |line|
|
23
|
+
if line =~ /server:\s*(druby.+)/
|
24
|
+
uri = $1
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
29
|
+
puts workflow_manager.get_script(job_id)
|
data/bin/wfm_hello
ADDED
@@ -0,0 +1,30 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20130517-111334'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
|
8
|
+
if ARGV[0]=='-h' or ARGV[0]=='--help'
|
9
|
+
puts "Usage:\n #{__FILE__}"
|
10
|
+
exit
|
11
|
+
end
|
12
|
+
|
13
|
+
uri = ARGV[0]||'druby://localhost:12345'
|
14
|
+
wfmrc = if File.exist?(".wfmrc")
|
15
|
+
".wfmrc"
|
16
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
17
|
+
File.expand_path("~/.wfmrc")
|
18
|
+
end
|
19
|
+
if wfmrc
|
20
|
+
File.readlines(wfmrc).each do |line|
|
21
|
+
if line =~ /server:\s*(druby.+)/
|
22
|
+
uri = $1
|
23
|
+
end
|
24
|
+
if line =~ /project:\s*(\d+)/
|
25
|
+
project_number = $1
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
30
|
+
puts workflow_manager.hello
|
data/bin/wfm_job_list
ADDED
@@ -0,0 +1,37 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20131104-192211'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
require 'workflow_manager/optparse_ex'
|
8
|
+
#require File.join((File.expand_path('../../lib',__FILE__)), 'optparse_ex.rb')
|
9
|
+
|
10
|
+
opt = OptionParser.new do |o|
|
11
|
+
o.banner = "Usage:\n #{File.basename(__FILE__)} [options]"
|
12
|
+
# o.on(:user, 'sushi lover', '-u user', '--user', 'who submitted?')
|
13
|
+
o.on(:project, nil, '-p project', '--project', 'project number')
|
14
|
+
o.on(:server, 'druby://localhost:12345', '-d server', '--server', 'workflow manager URI (default: druby://localhost:12345)')
|
15
|
+
o.parse!(ARGV)
|
16
|
+
end
|
17
|
+
|
18
|
+
with_result = false
|
19
|
+
uri = opt.server||'druby://localhost:12345'
|
20
|
+
project_number = opt.project
|
21
|
+
wfmrc = if File.exist?(".wfmrc")
|
22
|
+
".wfmrc"
|
23
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
24
|
+
File.expand_path("~/.wfmrc")
|
25
|
+
end
|
26
|
+
if wfmrc
|
27
|
+
File.readlines(wfmrc).each do |line|
|
28
|
+
if line =~ /server:\s*(druby.+)/
|
29
|
+
uri = $1
|
30
|
+
end
|
31
|
+
if line =~ /project:\s*(\d+)/
|
32
|
+
project_number = $1
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
37
|
+
puts workflow_manager.job_list(with_result, project_number)
|
data/bin/wfm_monitoring
ADDED
@@ -0,0 +1,55 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20131104-192323'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
require 'workflow_manager/optparse_ex'
|
8
|
+
|
9
|
+
opt = OptionParser.new do |o|
|
10
|
+
o.banner = "Usage:\n #{File.basename(__FILE__)} [options] [job_script.sh]"
|
11
|
+
o.on(:user, 'sushi lover', '-u user', '--user', 'who submitted? (default: sushi lover)')
|
12
|
+
o.on(:project, 1001, '-p project', '--project', 'project number (default: 1001)')
|
13
|
+
o.on(:server, 'druby://localhost:12345', '-d server', '--server', 'workflow manager URI (default: druby://localhost:12345)')
|
14
|
+
o.on(:log, '-o logdir', '--logdir', 'directory of standard output and standard error file outputted after the job')
|
15
|
+
o.on(:cores, '-c cores', '--cores', 'Number of cores to request for g-sub')
|
16
|
+
o.on(:nodes, '-n nodes', '--nodes', 'Comma separated list of nodes to submit to for g-sub')
|
17
|
+
o.on(:ram, '-r RAM', '--RAM', 'Amount of RAM to request in Gigs for g-sub')
|
18
|
+
o.on(:scratch, '-s scratch', '--scratch', 'Amount of scratch space to request in Gigs for g-sub')
|
19
|
+
o.parse!(ARGV)
|
20
|
+
end
|
21
|
+
unless script_file = ARGV[0] and script_file =~ /\.sh/
|
22
|
+
print opt.help
|
23
|
+
exit
|
24
|
+
end
|
25
|
+
|
26
|
+
project_number = opt.project
|
27
|
+
uri = opt.server
|
28
|
+
user = opt.user
|
29
|
+
wfmrc = if File.exist?(".wfmrc")
|
30
|
+
".wfmrc"
|
31
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
32
|
+
File.expand_path("~/.wfmrc")
|
33
|
+
end
|
34
|
+
if wfmrc
|
35
|
+
File.readlines(wfmrc).each do |line|
|
36
|
+
if line =~ /user:\s*(.+)/
|
37
|
+
user = $1
|
38
|
+
end
|
39
|
+
if line =~ /server:\s*(druby.+)/
|
40
|
+
uri = $1
|
41
|
+
end
|
42
|
+
if line =~ /project:\s*(\d+)/
|
43
|
+
project_number = $1
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
sge_options = []
|
48
|
+
sge_options << "-c #{opt.cores}" if opt.cores
|
49
|
+
sge_options << "-r #{opt.ram}" if opt.ram
|
50
|
+
sge_options << "-s #{opt.scratch}" if opt.scratch
|
51
|
+
sge_options << "-n #{opt.nodes}" if opt.nodes
|
52
|
+
|
53
|
+
script_content = File.read(script_file)
|
54
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
55
|
+
puts workflow_manager.start_monitoring(script_file, user, 0, script_content, project_number, sge_options.join(' '), opt.log)
|
data/bin/wfm_status
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121112 masa workflow manager client
|
4
|
+
Version = '20130307-095018'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
|
8
|
+
unless job_id = ARGV[0]
|
9
|
+
puts "Usage:\n #{__FILE__} [job_id] [server]"
|
10
|
+
puts
|
11
|
+
puts " job_id: required"
|
12
|
+
puts " server: workflow_manager URI (default: druby://localhost:12345)"
|
13
|
+
exit
|
14
|
+
end
|
15
|
+
uri = ARGV[1]||'druby://localhost:12345'
|
16
|
+
wfmrc = if File.exist?(".wfmrc")
|
17
|
+
".wfmrc"
|
18
|
+
elsif File.exist?(File.expand_path("~/.wfmrc"))
|
19
|
+
File.expand_path("~/.wfmrc")
|
20
|
+
end
|
21
|
+
if wfmrc
|
22
|
+
File.readlines(wfmrc).each do |line|
|
23
|
+
if line =~ /server:\s*(druby.+)/
|
24
|
+
uri = $1
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
workflow_manager = DRbObject.new_with_uri(uri)
|
29
|
+
puts workflow_manager.status(job_id)
|
@@ -0,0 +1,262 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
# 20121109 masa workflow manager druby server
|
4
|
+
Version = '20131104-192005'
|
5
|
+
|
6
|
+
require 'drb/drb'
|
7
|
+
require 'fileutils'
|
8
|
+
require 'kyotocabinet'
|
9
|
+
require_relative '../lib/workflow_manager'
|
10
|
+
|
11
|
+
|
12
|
+
# default parameters
|
13
|
+
#LOG_DIR = '/srv/GT/analysis/workflow_manager_logs'
|
14
|
+
LOG_DIR = 'logs'
|
15
|
+
DB_DIR = 'dbs'
|
16
|
+
INTERVAL = 30
|
17
|
+
RESUBMIT = 0
|
18
|
+
|
19
|
+
class WorkflowManager
|
20
|
+
@@config = nil
|
21
|
+
class Config
|
22
|
+
attr_accessor :log_dir
|
23
|
+
attr_accessor :db_dir
|
24
|
+
attr_accessor :interval
|
25
|
+
attr_accessor :resubmit
|
26
|
+
attr_accessor :cluster
|
27
|
+
end
|
28
|
+
def self.config=(config)
|
29
|
+
@@config = config
|
30
|
+
end
|
31
|
+
def self.config
|
32
|
+
@@config
|
33
|
+
end
|
34
|
+
def config
|
35
|
+
@@config ||= WorkflowManager.configure{}
|
36
|
+
end
|
37
|
+
def self.configure
|
38
|
+
@@config = Config.new
|
39
|
+
# default values
|
40
|
+
@@config.log_dir = LOG_DIR
|
41
|
+
@@config.db_dir = DB_DIR
|
42
|
+
@@config.interval = INTERVAL # interval to check jobs, [s]
|
43
|
+
@@config.resubmit = RESUBMIT # how many times at maximum to resubmit when job fails
|
44
|
+
yield(@@config)
|
45
|
+
if @@config.cluster
|
46
|
+
@@config.cluster.log_dir = File.expand_path(@@config.log_dir)
|
47
|
+
end
|
48
|
+
@@config
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
class WorkflowManager
|
53
|
+
def initialize
|
54
|
+
@interval = config.interval
|
55
|
+
@resubmit = config.resubmit
|
56
|
+
@db_stat = File.join(config.db_dir, 'statuses.kch')
|
57
|
+
@db_logs = File.join(config.db_dir, 'logs.kch')
|
58
|
+
|
59
|
+
@log_dir = File.expand_path(config.log_dir)
|
60
|
+
@db_dir = File.expand_path(config.db_dir)
|
61
|
+
FileUtils.mkdir_p @log_dir unless File.exist?(@log_dir)
|
62
|
+
FileUtils.mkdir_p @db_dir unless File.exist?(@db_dir)
|
63
|
+
@statuses = KyotoCabinet::DB.new
|
64
|
+
@logs = KyotoCabinet::DB.new
|
65
|
+
@system_log = File.join(@log_dir, "system.log")
|
66
|
+
@mutex = Mutex.new
|
67
|
+
@cluster = config.cluster
|
68
|
+
log_puts("Server starts")
|
69
|
+
end
|
70
|
+
def hello
|
71
|
+
'hello, '+ @cluster.name
|
72
|
+
end
|
73
|
+
def copy_commands(org_dir, dest_parent_dir)
|
74
|
+
@cluster.copy_commands(org_dir, dest_parent_dir)
|
75
|
+
end
|
76
|
+
def log_puts(str)
|
77
|
+
time = Time.now.strftime("[%Y.%m.%d %H:%M:%S]")
|
78
|
+
@mutex.synchronize do
|
79
|
+
open(@system_log, "a") do |out|
|
80
|
+
out.print time + " " + str + "\n"
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
def start_monitoring(submit_command, user = 'sushi lover', resubmit = 0, script = '', project_number = 0, sge_options='', log_dir = '')
|
85
|
+
log_puts("monitoring: script=" + submit_command + " user=" + user + " resubmit=" + resubmit.to_s + " project=" + project_number.to_s + " sge option=" + sge_options + " log dir=" + log_dir.to_s)
|
86
|
+
|
87
|
+
#warn submit_command
|
88
|
+
#
|
89
|
+
# TODO: analyze arguments
|
90
|
+
#
|
91
|
+
job_id, log_file, command = @cluster.submit_job(submit_command, script, sge_options)
|
92
|
+
log_puts("submit: " + job_id + " " + command)
|
93
|
+
|
94
|
+
#
|
95
|
+
# monitor worker
|
96
|
+
#
|
97
|
+
if job_id and log_file
|
98
|
+
monitor_worker = Thread.new(job_id, log_file, submit_command, user, resubmit, script, project_number, sge_options, log_dir) do |t_job_id, t_log_file, t_submit_command, t_user, t_resubmit, t_script, t_project_number, t_sge_options, t_log_dir|
|
99
|
+
loop do
|
100
|
+
status = success_or_fail(t_job_id, t_log_file)
|
101
|
+
script_name = File.basename(submit_command).split(/-/).first
|
102
|
+
@statuses.open(@db_stat)
|
103
|
+
start_time = if stat = @statuses[t_job_id] and stat = stat.split(/,/) and time = stat[2]
|
104
|
+
time
|
105
|
+
end
|
106
|
+
time = if start_time
|
107
|
+
if status == 'success' or status == 'fail'
|
108
|
+
start_time + '/' + Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
109
|
+
else
|
110
|
+
start_time
|
111
|
+
end
|
112
|
+
else
|
113
|
+
Time.now.strftime("%Y-%m-%d %H:%M:%S")
|
114
|
+
end
|
115
|
+
@statuses[t_job_id] = [status, script_name, time, user, project_number].join(',')
|
116
|
+
@statuses.close
|
117
|
+
@logs.open(@db_logs)
|
118
|
+
@logs[t_job_id] = t_log_file
|
119
|
+
@logs.close
|
120
|
+
#warn t_job_id + " " + status
|
121
|
+
if status == 'success'
|
122
|
+
log_puts(status + ": " + t_job_id)
|
123
|
+
unless t_log_dir.empty?
|
124
|
+
copy_commands(t_log_file, t_log_dir).each do |command|
|
125
|
+
log_puts(command)
|
126
|
+
system command
|
127
|
+
end
|
128
|
+
err_file = t_log_file.gsub('_o.log','_e.log')
|
129
|
+
copy_commands(err_file, t_log_dir).each do |command|
|
130
|
+
log_puts(command)
|
131
|
+
system command
|
132
|
+
end
|
133
|
+
end
|
134
|
+
Thread.current.kill
|
135
|
+
elsif status == 'fail'
|
136
|
+
log_puts(status + ": " + t_job_id)
|
137
|
+
#
|
138
|
+
# TODO: re-submit
|
139
|
+
#
|
140
|
+
if t_resubmit < RESUBMIT
|
141
|
+
log_puts("resubmit: " + t_job_id)
|
142
|
+
resubmit_job_id = start_monitoring(t_submit_command, t_user, t_resubmit + 1, t_script, t_project_number, t_sge_options)
|
143
|
+
script_name = File.basename(submit_command).split(/-/).first
|
144
|
+
@statuses.open(@db_stat)
|
145
|
+
@statuses[t_job_id] = ["resubmit: " + resubmit_job_id.to_s, script_name, Time.now.strftime("%Y-%m-%d %H:%M:%S"), t_user, t_project_number].join(',')
|
146
|
+
@statuses.close
|
147
|
+
else
|
148
|
+
log_puts("fail: " + t_job_id)
|
149
|
+
end
|
150
|
+
unless t_log_dir.empty?
|
151
|
+
copy_commands(t_log_file, t_log_dir).each do |command|
|
152
|
+
log_puts(command)
|
153
|
+
system command
|
154
|
+
end
|
155
|
+
err_file = t_log_file.gsub('_o.log','_e.log')
|
156
|
+
copy_commands(err_file, t_log_dir).each do |command|
|
157
|
+
log_puts(command)
|
158
|
+
system command
|
159
|
+
end
|
160
|
+
end
|
161
|
+
Thread.current.kill
|
162
|
+
end
|
163
|
+
sleep @interval
|
164
|
+
end
|
165
|
+
end
|
166
|
+
job_id.to_i
|
167
|
+
end
|
168
|
+
end
|
169
|
+
def status(job_id)
|
170
|
+
stat = nil
|
171
|
+
@statuses.open(@db_stat)
|
172
|
+
stat = @statuses[job_id.to_s]
|
173
|
+
@statuses.close
|
174
|
+
stat
|
175
|
+
end
|
176
|
+
def job_list(with_results=false, project_number=nil)
|
177
|
+
s = []
|
178
|
+
@statuses.open(@db_stat)
|
179
|
+
@statuses.each do |key, value|
|
180
|
+
if project_number
|
181
|
+
if x = value.split(/,/)[4].to_i==project_number.to_i
|
182
|
+
s << [key, value]
|
183
|
+
end
|
184
|
+
else
|
185
|
+
s << [key, value]
|
186
|
+
end
|
187
|
+
end
|
188
|
+
@statuses.close
|
189
|
+
s.sort.reverse.map{|v| v.join(',')}.join("\n")
|
190
|
+
end
|
191
|
+
def get_log(job_id, with_err=false)
|
192
|
+
@logs.open(@db_logs)
|
193
|
+
log_file = @logs[job_id.to_s]
|
194
|
+
@logs.close
|
195
|
+
log_data = if log_file and File.exist?(log_file)
|
196
|
+
"__STDOUT LOG__\n\n" + File.read(log_file)
|
197
|
+
else
|
198
|
+
'no log file'
|
199
|
+
end
|
200
|
+
if with_err
|
201
|
+
err_file = log_file.gsub(/_o\.log/,'_e.log')
|
202
|
+
if err_file and File.exist?(err_file)
|
203
|
+
log_data << "\n\n__STDERR LOG__\n\n"
|
204
|
+
log_data << File.read(err_file)
|
205
|
+
end
|
206
|
+
end
|
207
|
+
log_data
|
208
|
+
end
|
209
|
+
def get_script(job_id)
|
210
|
+
@logs.open(@db_logs)
|
211
|
+
script_file = @logs[job_id.to_s]
|
212
|
+
@logs.close
|
213
|
+
if script_file
|
214
|
+
script_file = script_file.gsub(/_o\.log/,'')
|
215
|
+
end
|
216
|
+
script = if script_file and File.exist?(script_file)
|
217
|
+
File.read(script_file)
|
218
|
+
else
|
219
|
+
'no script file'
|
220
|
+
end
|
221
|
+
script
|
222
|
+
end
|
223
|
+
def success_or_fail(job_id, log_file)
|
224
|
+
job_running = @cluster.job_running?(job_id)
|
225
|
+
job_ends = @cluster.job_ends?(log_file)
|
226
|
+
msg = if job_running
|
227
|
+
'running'
|
228
|
+
elsif job_ends
|
229
|
+
'success'
|
230
|
+
else
|
231
|
+
'fail'
|
232
|
+
end
|
233
|
+
msg
|
234
|
+
end
|
235
|
+
end
|
236
|
+
|
237
|
+
#
|
238
|
+
# main
|
239
|
+
#
|
240
|
+
#if __FILE__ == $0
|
241
|
+
|
242
|
+
opt = OptionParser.new do |o|
|
243
|
+
o.banner = "Usage:\n #{File.basename(__FILE__)} -d [druby://host:port] -m [development|production]"
|
244
|
+
o.on(:server, 'druby://localhost:12345', '-d server', '--server', 'workflow manager URI (default: druby://localhost:12345)')
|
245
|
+
o.on(:mode, 'development', '-m mode', '--mode', 'development|production (default: development)')
|
246
|
+
o.parse!(ARGV)
|
247
|
+
end
|
248
|
+
|
249
|
+
uri = opt.server
|
250
|
+
if opt.mode =~ /[development|production]/
|
251
|
+
config = File.join(File.dirname(File.expand_path(__FILE__)), "../config/environments/#{opt.mode}.rb")
|
252
|
+
opt.mode = nil unless File.exist?(config)
|
253
|
+
end
|
254
|
+
print "mode = #{opt.mode}\n"
|
255
|
+
if opt.mode
|
256
|
+
require_relative "../config/environments/#{opt.mode}"
|
257
|
+
end
|
258
|
+
DRb.start_service(uri, WorkflowManager.new)
|
259
|
+
puts DRb.uri
|
260
|
+
DRb.thread.join
|
261
|
+
#sleep
|
262
|
+
#end
|
@@ -0,0 +1,113 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
|
4
|
+
class Cluster
|
5
|
+
attr_accessor :name
|
6
|
+
attr_reader :options
|
7
|
+
attr_accessor :log_dir
|
8
|
+
def initialize(name='', log_dir='')
|
9
|
+
@name = name
|
10
|
+
@options = {}
|
11
|
+
@log_dir = log_dir
|
12
|
+
end
|
13
|
+
def generate_new_job_script(script_name, script_content)
|
14
|
+
new_job_script = File.basename(script_name) + "_" + Time.now.strftime("%Y%m%d%H%M%S")
|
15
|
+
new_job_script = File.join(@log_dir, new_job_script)
|
16
|
+
open(new_job_script, 'w') do |out|
|
17
|
+
out.print script_content
|
18
|
+
out.print "\necho __SCRIPT END__\n"
|
19
|
+
end
|
20
|
+
new_job_script
|
21
|
+
end
|
22
|
+
def submit_job(script_file, script_content, option='')
|
23
|
+
end
|
24
|
+
def job_running?(job_id)
|
25
|
+
end
|
26
|
+
def job_ends?(log_file)
|
27
|
+
end
|
28
|
+
def copy_commands(org_dir, dest_parent_dir)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
class LocalComputer < Cluster
|
33
|
+
def submit_job(script_file, script_content, option='')
|
34
|
+
if script_name = File.basename(script_file) and script_name =~ /\.sh$/
|
35
|
+
new_job_script = generate_new_job_script(script_name, script_content)
|
36
|
+
new_job_script_base = File.basename(new_job_script)
|
37
|
+
log_file = File.join(@log_dir, new_job_script_base + "_o.log")
|
38
|
+
err_file = File.join(@log_dir, new_job_script_base + "_e.log")
|
39
|
+
command = "bash #{new_job_script} 1> #{log_file} 2> #{err_file}"
|
40
|
+
pid = spawn(command)
|
41
|
+
Process.detach(pid)
|
42
|
+
[pid.to_s, log_file, command]
|
43
|
+
end
|
44
|
+
end
|
45
|
+
def job_running?(pid)
|
46
|
+
command = "ps aux"
|
47
|
+
result = IO.popen(command) do |io|
|
48
|
+
flag = false
|
49
|
+
while line=io.gets
|
50
|
+
x = line.split
|
51
|
+
if x[1].to_i == pid.to_i
|
52
|
+
flag = true
|
53
|
+
break
|
54
|
+
end
|
55
|
+
end
|
56
|
+
flag
|
57
|
+
end
|
58
|
+
result
|
59
|
+
end
|
60
|
+
def job_ends?(log_file)
|
61
|
+
command = "tail -n 20 #{log_file}|grep '__SCRIPT END__'"
|
62
|
+
result = `#{command}`
|
63
|
+
result.to_s.empty? ? false : true
|
64
|
+
end
|
65
|
+
def copy_commands(org_dir, dest_parent_dir)
|
66
|
+
commands = []
|
67
|
+
commands << "mkdir -p #{dest_parent_dir}"
|
68
|
+
commands << "cp -r #{org_dir} #{dest_parent_dir}"
|
69
|
+
commands
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
class FGCZCluster < Cluster
|
74
|
+
def submit_job(script_file, script_content, option='')
|
75
|
+
if script_name = File.basename(script_file) and script_name =~ /\.sh$/
|
76
|
+
new_job_script = generate_new_job_script(script_name, script_content)
|
77
|
+
new_job_script_base = File.basename(new_job_script)
|
78
|
+
log_file = File.join(@log_dir, new_job_script_base + "_o.log")
|
79
|
+
err_file = File.join(@log_dir, new_job_script_base + "_e.log")
|
80
|
+
command = "g-sub -o #{log_file} -e #{err_file} #{option} #{new_job_script}"
|
81
|
+
job_id = `#{command}`
|
82
|
+
job_id = job_id.match(/Your job (\d+) \(/)[1]
|
83
|
+
[job_id, log_file, command]
|
84
|
+
end
|
85
|
+
end
|
86
|
+
def job_running?(job_id)
|
87
|
+
qstat_flag = false
|
88
|
+
IO.popen('qstat -u "*"') do |io|
|
89
|
+
while line=io.gets
|
90
|
+
if line =~ /#{job_id}/
|
91
|
+
qstat_flag = true
|
92
|
+
break
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
qstat_flag
|
97
|
+
end
|
98
|
+
def job_ends?(log_file)
|
99
|
+
log_flag = false
|
100
|
+
IO.popen("tail -n 10 #{log_file}") do |io|
|
101
|
+
while line=io.gets
|
102
|
+
if line =~ /__SCRIPT END__/
|
103
|
+
log_flag = true
|
104
|
+
break
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
log_flag
|
109
|
+
end
|
110
|
+
def copy_commands(org_dir, dest_parent_dir)
|
111
|
+
commands = ["g-req -w copy #{org_dir} #{dest_parent_dir}"]
|
112
|
+
end
|
113
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# encoding: utf-8
|
3
|
+
|
4
|
+
require 'optparse'
|
5
|
+
# Version = '20130517-120455'
|
6
|
+
|
7
|
+
class OptionParser
|
8
|
+
attr_reader :p
|
9
|
+
alias :_on :on
|
10
|
+
def on(attr, *args, &block)
|
11
|
+
if attr.is_a?(Symbol)
|
12
|
+
self.class.class_eval do
|
13
|
+
unless method_defined?(attr)
|
14
|
+
attr_accessor attr
|
15
|
+
else
|
16
|
+
raise "Method #{attr.to_s} is already defined in OptionParser class"
|
17
|
+
end
|
18
|
+
end
|
19
|
+
unless args[0] =~ /\-/
|
20
|
+
default = args.shift
|
21
|
+
self.send(attr.to_s+"=", default)
|
22
|
+
end
|
23
|
+
_on(*args) do |i|
|
24
|
+
self.send(attr.to_s+"=", i)
|
25
|
+
block.call(i) if block
|
26
|
+
end
|
27
|
+
else
|
28
|
+
args.unshift attr
|
29
|
+
_on(*args, block)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
if __FILE__ == $0
|
35
|
+
opt = OptionParser.new do |o|
|
36
|
+
o.banner = "Last update: #{o.version}\nUsage: ruby #{__FILE__} [options]"
|
37
|
+
o.on(:size, 100, '-N N', '--pop_size', Integer, 'population size (default: 100)'){|i| p i}
|
38
|
+
o.on(:seed, '-R R', '--rseed', 'random seed')
|
39
|
+
o.on(:flag, '-f', 'flag')
|
40
|
+
o.on('-M M', '--hoge', Integer, 'hoge'){|i| p i}
|
41
|
+
o.parse!(ARGV)
|
42
|
+
end
|
43
|
+
|
44
|
+
print "opt.flag = "
|
45
|
+
p opt.flag
|
46
|
+
print "opt.size = "
|
47
|
+
p opt.size
|
48
|
+
print "opt.seed = "
|
49
|
+
p opt.seed
|
50
|
+
puts
|
51
|
+
puts opt.help
|
52
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'workflow_manager/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "workflow_manager"
|
8
|
+
spec.version = WorkflowManager::VERSION
|
9
|
+
spec.authors = ["Functional Genomics Center Zurich"]
|
10
|
+
spec.email = ["masaomi.hatakeyama@fgcz.uzh.ch"]
|
11
|
+
spec.description = %q{Workflow Manager manages job submissions using dRuby.}
|
12
|
+
spec.summary = %q{Workflow Manager manages job submissions using dRuby.}
|
13
|
+
spec.homepage = ""
|
14
|
+
spec.license = "MIT"
|
15
|
+
|
16
|
+
#spec.files = `git ls-files`.split($/)
|
17
|
+
spec.files = `bzr ls --versioned --recursive`.split($/).select{|file| !File.directory?(file)}
|
18
|
+
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
|
19
|
+
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
|
20
|
+
spec.require_paths = ["lib"]
|
21
|
+
|
22
|
+
spec.add_development_dependency "bundler", "~> 1.3"
|
23
|
+
spec.add_development_dependency "rake"
|
24
|
+
end
|
metadata
ADDED
@@ -0,0 +1,103 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: workflow_manager
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.5
|
5
|
+
prerelease:
|
6
|
+
platform: ruby
|
7
|
+
authors:
|
8
|
+
- Functional Genomics Center Zurich
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2013-11-07 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: bundler
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
none: false
|
18
|
+
requirements:
|
19
|
+
- - ~>
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '1.3'
|
22
|
+
type: :development
|
23
|
+
prerelease: false
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ~>
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.3'
|
30
|
+
- !ruby/object:Gem::Dependency
|
31
|
+
name: rake
|
32
|
+
requirement: !ruby/object:Gem::Requirement
|
33
|
+
none: false
|
34
|
+
requirements:
|
35
|
+
- - ! '>='
|
36
|
+
- !ruby/object:Gem::Version
|
37
|
+
version: '0'
|
38
|
+
type: :development
|
39
|
+
prerelease: false
|
40
|
+
version_requirements: !ruby/object:Gem::Requirement
|
41
|
+
none: false
|
42
|
+
requirements:
|
43
|
+
- - ! '>='
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: '0'
|
46
|
+
description: Workflow Manager manages job submissions using dRuby.
|
47
|
+
email:
|
48
|
+
- masaomi.hatakeyama@fgcz.uzh.ch
|
49
|
+
executables:
|
50
|
+
- wfm_get_log
|
51
|
+
- wfm_get_script
|
52
|
+
- wfm_hello
|
53
|
+
- wfm_job_list
|
54
|
+
- wfm_monitoring
|
55
|
+
- wfm_status
|
56
|
+
- workflow_manager
|
57
|
+
extensions: []
|
58
|
+
extra_rdoc_files: []
|
59
|
+
files:
|
60
|
+
- Gemfile
|
61
|
+
- LICENSE.txt
|
62
|
+
- README.md
|
63
|
+
- Rakefile
|
64
|
+
- bin/wfm_get_log
|
65
|
+
- bin/wfm_get_script
|
66
|
+
- bin/wfm_hello
|
67
|
+
- bin/wfm_job_list
|
68
|
+
- bin/wfm_monitoring
|
69
|
+
- bin/wfm_status
|
70
|
+
- bin/workflow_manager
|
71
|
+
- config/environments/development.rb
|
72
|
+
- config/environments/production.rb
|
73
|
+
- lib/workflow_manager/cluster.rb
|
74
|
+
- lib/workflow_manager/optparse_ex.rb
|
75
|
+
- lib/workflow_manager/version.rb
|
76
|
+
- lib/workflow_manager.rb
|
77
|
+
- workflow_manager.gemspec
|
78
|
+
homepage: ''
|
79
|
+
licenses:
|
80
|
+
- MIT
|
81
|
+
post_install_message:
|
82
|
+
rdoc_options: []
|
83
|
+
require_paths:
|
84
|
+
- lib
|
85
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
86
|
+
none: false
|
87
|
+
requirements:
|
88
|
+
- - ! '>='
|
89
|
+
- !ruby/object:Gem::Version
|
90
|
+
version: '0'
|
91
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
92
|
+
none: false
|
93
|
+
requirements:
|
94
|
+
- - ! '>='
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
requirements: []
|
98
|
+
rubyforge_project:
|
99
|
+
rubygems_version: 1.8.24
|
100
|
+
signing_key:
|
101
|
+
specification_version: 3
|
102
|
+
summary: Workflow Manager manages job submissions using dRuby.
|
103
|
+
test_files: []
|