autoflow 0.6.4 → 0.8.0

Sign up to get free protection for your applications and to get access to all the features.
data/autoflow.gemspec CHANGED
@@ -19,8 +19,10 @@ Gem::Specification.new do |spec|
19
19
  spec.require_paths = ["lib"]
20
20
 
21
21
  spec.add_runtime_dependency 'net-ssh', '>= 2.8.0'
22
+ spec.add_runtime_dependency 'git', '>= 1.3.0'
22
23
  spec.add_runtime_dependency 'win32console', '>= 1.3.2' if !ENV['OS'].nil? && ENV['OS'].downcase.include?('windows')
23
24
  spec.add_runtime_dependency 'colorize', '~> 0.7.3'
25
+ spec.add_runtime_dependency 'terminal-table', '~> 1.6.0'
24
26
  spec.add_development_dependency "bundler", "~> 1.3"
25
27
  spec.add_development_dependency "rake"
26
28
  end
data/bin/AutoFlow CHANGED
@@ -11,6 +11,7 @@ require 'io/console'
11
11
  require 'net/ssh'
12
12
  require 'queue_manager'
13
13
  require 'fileutils'
14
+ require 'git'
14
15
 
15
16
  #################################################################################################
16
17
  # METHODS
@@ -21,8 +22,13 @@ def get_templates(string_template)
21
22
  end
22
23
 
23
24
  def get_repositories
24
- directories = [File.join(ENV['HOME'], 'autoflow_templates')]
25
+ main_repository = File.join(ENV['HOME'], 'autoflow_templates')
26
+ local_rep = File.join(main_repository, 'local')
27
+ remote_rep = File.join(main_repository, 'remote')
28
+ all_remote = Dir.glob(File.join(remote_rep, '*'))
29
+ directories = [local_rep]
25
30
  directories.concat(ENV['WORKFLOW_REPOSITORY'].split(':')) if !ENV['WORKFLOW_REPOSITORY'].nil?
31
+ directories.concat(all_remote)
26
32
  return directories
27
33
  end
28
34
 
@@ -39,6 +45,8 @@ def list_repository_templates
39
45
  end
40
46
  templates.delete('.')
41
47
  templates.delete('..')
48
+ templates.delete('README.md') # From git remote repos
49
+ templates.delete('.git') # From git remote repos
42
50
  return templates
43
51
  end
44
52
 
@@ -105,6 +113,11 @@ optparse = OptionParser.new do |opts|
105
113
  options[:graph] = graph
106
114
  end
107
115
 
116
+ options[:get_template_repository] = nil
117
+ opts.on( '-G', '--get_template_repository GIT_URL', 'Git url to get template from a remote repository' ) do |url|
118
+ options[:get_template_repository] = url
119
+ end
120
+
108
121
  options[:identifier] = FALSE
109
122
  opts.on( '-i', '--job_identifier STRING', 'Identifier tag for each launching script' ) do |identifier|
110
123
  options[:identifier] = identifier
@@ -204,8 +217,24 @@ optparse.parse!
204
217
  # MAIN
205
218
  #################################################################################################
206
219
  #Create repository
207
- if !File.exists?(File.join(ENV['HOME'], 'autoflow_templates'))
208
- Dir.mkdir(File.join(ENV['HOME'], 'autoflow_templates'))
220
+ main_repository = File.join(ENV['HOME'], 'autoflow_templates')
221
+ local_rep = File.join(main_repository, 'local')
222
+ remote_rep = File.join(main_repository, 'remote')
223
+ Dir.mkdir(main_repository) if !File.exists?(main_repository)
224
+ Dir.mkdir(local_rep) if !File.exists?(local_rep)
225
+ Dir.mkdir(remote_rep) if !File.exists?(remote_rep)
226
+
227
+ #Move templates from legacy tree folder to current tree folder
228
+ Dir.glob(File.join(main_repository, '*')) { |file| FileUtils.mv(file, local_rep) if file != local_rep && file != remote_rep }
229
+
230
+ # Get git remote repos and include it in user templates
231
+ if !options[:get_template_repository].nil?
232
+ fields = options[:get_template_repository].split(/\/+/)
233
+ repo_name = fields.last.gsub('.git','')
234
+ repo_author = fields[-2]
235
+ local_name = "REM_#{repo_author}_#{repo_name}"
236
+ Git.clone(options[:get_template_repository], local_name, :path => remote_rep)
237
+ exit_exec('Remote template repository added as ' + local_name)
209
238
  end
210
239
 
211
240
  # List templates
@@ -294,7 +323,7 @@ end
294
323
  # Flow parse
295
324
  #--------------------------------------------------------------------------------
296
325
  stack = Stack.new(exec_folder, options)
297
- manager = QueueManager.select_queue_manager(stack, options)
326
+ stack.parse!
298
327
 
299
328
  #--------------------------------------------------------------------------------
300
329
  # Flow exec
@@ -304,6 +333,8 @@ if !options[:graph].nil?
304
333
  else
305
334
  stack.inspect if options[:verbose]
306
335
  stack.comment_main_command if options[:comment]
336
+ options[:write_sh] = TRUE # Set in flow logger to FALSE, it is used for relaunch failed jobs
337
+ manager = QueueManager.select_queue_manager(stack.exec_folder, options, stack.jobs, stack.persist_variables)
307
338
  manager.exec
308
339
  end
309
340
  options[:ssh].close if options[:remote]
data/bin/flow_logger ADDED
@@ -0,0 +1,315 @@
1
+ #! /usr/bin/env ruby
2
+
3
+ ROOT_PATH=File.dirname(__FILE__)
4
+ $: << File.expand_path(File.join(ROOT_PATH, "..", "lib", "autoflow"))
5
+ $: << File.expand_path(File.join(ROOT_PATH, "..", "lib", "autoflow", "queue_managers"))
6
+
7
+ require 'autoflow'
8
+ require 'optparse'
9
+ require 'colorize'
10
+ require 'logging'
11
+ require 'json'
12
+ require 'terminal-table'
13
+ require 'queue_manager'
14
+ require 'program'
15
+ require 'erb'
16
+
17
+ #################################################################################################
18
+ ### METHODS
19
+ #################################################################################################
20
+
21
+ def report_log(log, initial_flow_attribs, mode, workflow_status, no_size)
22
+ set_task_state(log, workflow_status)
23
+ set_time(log)
24
+ if mode.nil? || mode.upcase == 'ALL'
25
+ tasks = log
26
+ else
27
+ tasks = log.select{|name, attribs| attribs['state'] == mode.upcase}
28
+ end
29
+ rows = []
30
+ tasks.each do |task_name, attribs|
31
+ job_path = initial_flow_attribs[task_name].first
32
+ size = nil
33
+ size = `du -sh #{job_path}`.split.first if !no_size
34
+ rows << [attribs['state_msg'], File.basename(job_path), attribs['time'], size, task_name]
35
+ end
36
+ puts Terminal::Table.new :headings => ['Status', 'Folder', 'Time', 'Size', 'Job Name'], :rows => rows
37
+ end
38
+
39
+ def launch_failed_jobs(log, initial_flow_attribs, exec_folder, batch)
40
+ options = {
41
+ :verbose => FALSE,
42
+ :identifier => nil,
43
+ :remote => FALSE,
44
+ :ssh => nil,
45
+ :external_dependencies => [],
46
+ :batch => batch,
47
+ :write_sh => FALSE
48
+ }
49
+ failed_jobs = get_failed_jobs(log)
50
+ jobs = {}
51
+ create_jobs(jobs, failed_jobs, initial_flow_attribs)
52
+ get_all_dependencies(jobs, failed_jobs, initial_flow_attribs)
53
+ manager = QueueManager.select_queue_manager(exec_folder, options, jobs, {})
54
+ manager.exec
55
+ end
56
+
57
+ def create_jobs(jobs, failed_jobs, initial_flow_attribs)
58
+ failed_jobs.each do |job|
59
+ folder, dependencies = initial_flow_attribs[job]
60
+ job_attrib = {
61
+ :done => FALSE,
62
+ :folder => TRUE,
63
+ :buffer => FALSE,
64
+ :exec_folder => folder,
65
+ :cpu_asign => nil
66
+ }
67
+ verified_dependencies = []
68
+ dependencies.each do |dep|
69
+ verified_dependencies << dep if !jobs[dep].nil?
70
+ end
71
+ jobs[job] = Program.new(job, '', '', verified_dependencies, job_attrib)
72
+ end
73
+ end
74
+
75
+ def get_all_dependencies(jobs, failed_jobs, initial_flow_attribs)
76
+ failed_dependecies = []
77
+ failed_jobs.each do |fj|
78
+ initial_flow_attribs.each do |job, attribs|
79
+ folder, dependencies = attribs
80
+ failed_dependecies << job if dependencies.include?(fj) && !failed_dependecies.include?(job)
81
+ end
82
+ end
83
+ if !failed_dependecies.empty?
84
+ create_jobs(jobs, failed_dependecies, initial_flow_attribs)
85
+ get_all_dependencies(jobs, failed_dependecies, initial_flow_attribs)
86
+ end
87
+ end
88
+
89
+ def get_failed_jobs(log)
90
+ failed_jobs = []
91
+ position = 0
92
+ fails = []
93
+ log.each do |task, attribs|
94
+ abort = find_failed(attribs['start'], attribs['end'])
95
+ if !abort.nil?
96
+ position = abort if abort > position
97
+ fails << [task, abort]
98
+ end
99
+ end
100
+ if !fails.empty?
101
+ fails.each do |task, index|
102
+ if position == index
103
+ failed_jobs << task
104
+ end
105
+ end
106
+ end
107
+ return failed_jobs
108
+ end
109
+
110
+ def find_failed(ar_start, ar_end)
111
+ position = nil
112
+ ar_start.reverse.each_with_index do |st, i|
113
+ reverse_pos = ar_start.length - i - 1
114
+ stop = ar_end[reverse_pos]
115
+ if st > 0 && stop == 0
116
+ next_executions = ar_end[reverse_pos..ar_end.length - 1]
117
+ if next_executions.nil? || next_executions.count(0) == next_executions.length
118
+ position = reverse_pos
119
+ break
120
+ end
121
+ end
122
+ end
123
+ return position
124
+ end
125
+
126
+ def set_time(log)
127
+ log.each do |task, attribs|
128
+ start = attribs['start'].last
129
+ stop = attribs['end'].last
130
+ status = attribs['state']
131
+ time = 0
132
+ if status == 'SUCC'
133
+ time = stop - start
134
+ elsif status == 'RUN'
135
+ time = Time.now.to_i - start
136
+ end
137
+ attribs['seconds'] = time
138
+ magnitude = 's'
139
+ if time >= 60
140
+ magnitude = 'm'
141
+ time = time /60.0 # To minutes
142
+ end
143
+ if time >= 60 && magnitude == 'm'
144
+ magnitude = 'h'
145
+ time = time /60.0 # To hours
146
+ end
147
+ if time >= 24 && magnitude == 'h'
148
+ magnitude = 'd'
149
+ time = time /24.0 # To days
150
+ end
151
+ if time == 0
152
+ time_string = '-'
153
+ else
154
+ time_string = "#{time} #{magnitude}"
155
+ end
156
+ attribs['time'] = time_string
157
+ end
158
+ end
159
+
160
+ def set_task_state(log, workflow_status, position = -1)
161
+ log.each do | task, attribs|
162
+ start_position = attribs['start'].length - position
163
+ start = attribs['start'][position]
164
+ stop_position = attribs['end'].length - position
165
+ stop = attribs['end'][position]
166
+ if workflow_status # Workflow has finished
167
+ if start == 0 && stop == 0
168
+ status = 'NOT'
169
+ status_msg = 'NOT'.colorize(:blue)
170
+ elsif start > 0 && stop > 0
171
+ status = 'SUCC'
172
+ status_msg = 'SUCC'.colorize(:green)
173
+ elsif start > 0 && stop ==0
174
+ status = 'ABORT'
175
+ status_msg = 'ABORT'.colorize(:red)
176
+ end
177
+ else # Workflow is still running
178
+ if start == 0 && stop == 0
179
+ status = 'PEND'
180
+ status_msg = 'PEND'.colorize(:blue)
181
+ elsif start > 0 && stop > 0
182
+ status = 'SUCC'
183
+ status_msg = 'SUCC'.colorize(:green)
184
+ elsif start > 0 && stop ==0
185
+ status = 'RUN'
186
+ status_msg = 'RUN'.colorize(:magenta)
187
+ end
188
+ end
189
+ attribs['state'] = status
190
+ attribs['state_msg'] = status_msg
191
+ end
192
+ end
193
+
194
+ def add_timestamp(log_file, attrib, task_name)
195
+ File.open(log_file, 'a'){|f| f.puts "#{task_name}\t#{attrib}\t#{Time.now.to_i}"}
196
+ end
197
+
198
+ def report_html(log, initial_flow_attribs)
199
+ set_task_state(log, TRUE)
200
+ set_time(log)
201
+ report ="
202
+ <table>
203
+ <% log.each do |task, attribs| %>
204
+ <tr>
205
+ <td><%= task %></td>
206
+ <td><%= attribs['seconds'] %></td>
207
+ <tr>
208
+ <% end %>
209
+ </table>
210
+ "
211
+ data_structure = {
212
+ 'y' =>{
213
+ 'vars' => ['Time'],
214
+ 'smps' => log.keys,
215
+ 'data' => [log.values.map{|attribs| attribs['seconds']}],
216
+ 'desc' => ['seconds']
217
+ },
218
+ 'a' => {
219
+ "xAxis" => ["Time"]
220
+ }
221
+ }
222
+ #puts log.inspect
223
+
224
+ puts data_structure.inspect
225
+ #renderer = ERB.new(report)
226
+ demo = File.open('lines.html').read
227
+ demo.gsub!('data_structure', data_structure.to_json)
228
+ renderer = ERB.new(report + "\n" + demo)
229
+ File.open('report.html', 'w'){|f| f.puts renderer.result()}
230
+ end
231
+ #################################################################################################
232
+ ### PARSE OPTIONS
233
+ #################################################################################################
234
+
235
+ options = {}
236
+ OptionParser.new do |opts|
237
+ opts.banner = "Usage: __FILE__ [options]"
238
+
239
+ options[:workflow_execution] = Dir.pwd
240
+ opts.on("-e", "--workflow_execution PATH", "Path to workflow directory") do |opt|
241
+ options[:workflow_execution] = File.expand_path(opt)
242
+ end
243
+
244
+ options[:start] = nil
245
+ opts.on("-s", "--start TASK_NAME", "Write start timestamp of TASK_NAME to log") do |opt|
246
+ options[:start] = opt
247
+ end
248
+
249
+ options[:finish] = nil
250
+ opts.on("-f", "--finish TASK_NAME", "Write finish timestamp of TASK_NAME to log") do |opt|
251
+ options[:finish] = opt
252
+ end
253
+
254
+ options[:report] = nil
255
+ opts.on("-r", "--report STATUS", "List the status of launched tasks.") do |opt|
256
+ options[:report] = opt
257
+ end
258
+
259
+ options[:workflow_status] = FALSE
260
+ opts.on("-w", "--workflow_finished", "When set, logger assumes that the workflow has ended") do |opt|
261
+ options[:workflow_status] = TRUE
262
+ end
263
+
264
+ options[:no_size] = FALSE
265
+ opts.on("-n", "--no_size", "When set, logger don't compute the workflow folder sizes") do |opt|
266
+ options[:no_size] = TRUE
267
+ end
268
+
269
+ options[:batch] = FALSE
270
+ opts.on( '-b', '--batch', 'Workflow execution using batch' ) do |opt|
271
+ options[:batch] = TRUE
272
+ end
273
+
274
+ options[:launch_failed_jobs] = FALSE
275
+ opts.on("-l", "--launch_failed_jobs", "Launch jobs tagges as ABORT and NOT. This option only works when the -w flag is enabled") do |opt|
276
+ options[:launch_failed_jobs] = TRUE
277
+ end
278
+
279
+ options[:html] = FALSE
280
+ opts.on("-H", "--html", "Make a workflow execution full report in html format") do |opt|
281
+ options[:html] = TRUE
282
+ end
283
+ end.parse!
284
+
285
+ #################################################################################################
286
+ ### MAIN
287
+ #################################################################################################
288
+
289
+ if !options[:start].nil?
290
+ add_timestamp(options[:workflow_execution],'start', options[:start])
291
+ elsif !options[:finish].nil?
292
+ add_timestamp(options[:workflow_execution],'end', options[:finish])
293
+ else
294
+ log_folder = File.join(options[:workflow_execution], '.wf_log')
295
+ job_attribs_file = File.join(options[:workflow_execution], 'wf.json')
296
+
297
+ if !Dir.exists?(log_folder)
298
+ puts "Log folder not exists"
299
+ Process.exit
300
+ end
301
+ if !File.exists?(job_attribs_file)
302
+ puts "wf.json file not exists"
303
+ Process.exit
304
+ end
305
+
306
+ attribs = JSON.parse(File.open(job_attribs_file).read)
307
+ log = parse_log(log_folder)
308
+ if !options[:report].nil?
309
+ report_log(log, attribs, options[:report], options[:workflow_status], options[:no_size])
310
+ elsif options[:html]
311
+ report_html(log, attribs)
312
+ elsif options[:workflow_status] && options[:launch_failed_jobs]
313
+ launch_failed_jobs(log, attribs, options[:workflow_execution], options[:batch])
314
+ end
315
+ end
@@ -0,0 +1,58 @@
1
+ def parse_log(log_path)
2
+ log = {}
3
+ if Dir.exists?(log_path)
4
+ Dir.entries(log_path).each do |entry|
5
+ next if entry == '.' || entry == '..'
6
+ File.open(File.join(log_path, entry)).each do |line|
7
+ line.chomp!
8
+ name, status, time_int = line.split("\t")
9
+ time = time_int.to_i
10
+ query = log[name]
11
+ if query.nil?
12
+ log[name] = {status => [time]}
13
+ else
14
+ query_status = query[status]
15
+ if query_status.nil?
16
+ query[status] = [time]
17
+ else
18
+ query[status] << time
19
+ end
20
+ end
21
+ end
22
+ end
23
+ end
24
+ log.each do |task, attribs|
25
+ #puts "#{attribs.inspect}"
26
+ set_length = attribs['set'].length
27
+ fill_attrib(attribs, 'start', set_length)
28
+ fill_attrib(attribs, 'end', set_length)
29
+ end
30
+ return log
31
+ end
32
+
33
+ def fill_attrib(attribs, mode, set_length)
34
+ query = attribs[mode]
35
+ if query.nil?
36
+ attribs[mode] = Array.new(set_length, 0)
37
+ elsif query.length < set_length
38
+ (set_length - query.length).times do
39
+ query << 0
40
+ end
41
+ end
42
+ end
43
+
44
+ def write_log(log, log_path, job_relations_with_folders)
45
+ Dir.mkdir(log_path) if !Dir.exists?(log_path)
46
+ job_relations_with_folders.each do |name, folder_deps|
47
+ if !log[name].nil? #Control check when the wk_log folder has been deleted
48
+ folder, deps = folder_deps
49
+ f = File.open([log_path, File.basename(folder)].join('/'), 'w')
50
+ log[name].each do |mode, times|
51
+ times.each do |time|
52
+ f.puts "#{name}\t#{mode}\t#{time}"
53
+ end
54
+ end
55
+ f.close
56
+ end
57
+ end
58
+ end
@@ -1,3 +1,5 @@
1
+ require 'logging'
2
+ require 'json'
1
3
  class QueueManager
2
4
 
3
5
  def initialize(exec_folder, options, commands, persist_variables)
@@ -9,7 +11,9 @@ class QueueManager
9
11
  @files = {}
10
12
  @remote = options[:remote]
11
13
  @ssh = options[:ssh]
14
+ @write_sh = options[:write_sh]
12
15
  @external_dependencies = options[:external_dependencies]
16
+ @active_jobs = []
13
17
  end
14
18
 
15
19
  ########################################################################################
@@ -20,7 +24,7 @@ class QueueManager
20
24
  ObjectSpace.each_object(Class).select { |klass| klass < self }
21
25
  end
22
26
 
23
- def self.select_queue_manager(stack, options)
27
+ def self.select_queue_manager(exec_folder, options, jobs, persist_variables)
24
28
  path_managers = File.join(File.dirname(__FILE__),'queue_managers')
25
29
  Dir.glob(path_managers+'/*').each do |manager|
26
30
  require manager
@@ -30,7 +34,7 @@ class QueueManager
30
34
  else
31
35
  queue_manager = select_manager(options)
32
36
  end
33
- return queue_manager.new(stack.exec_folder, options, stack.jobs, stack.persist_variables)
37
+ return queue_manager.new(exec_folder, options, jobs, persist_variables)
34
38
  end
35
39
 
36
40
  def self.select_manager(options)
@@ -60,9 +64,42 @@ class QueueManager
60
64
  close_file('index_execution')
61
65
  end
62
66
 
67
+ def init_log #TODO adapt to remote execution
68
+ log_path = [@exec_folder, '.wf_log'].join('/') #Join must assume linux systems so File.join canot be used for windows hosts
69
+ log = parse_log(log_path) #TODO modify to folder
70
+ job_relations_with_folders = get_relations_and_folders
71
+ if @write_sh
72
+ create_file('wf.json', @exec_folder)
73
+ write_file('wf.json', job_relations_with_folders.to_json)
74
+ close_file('wf.json')
75
+ end
76
+ @active_jobs.each do |task|
77
+ query = log[task]
78
+ if query.nil?
79
+ log[task] = {'set' => [Time.now.to_i]}
80
+ else
81
+ log[task]['set'] << Time.now.to_i
82
+ end
83
+ end
84
+ write_log(log, log_path, job_relations_with_folders)
85
+ end
86
+
87
+ def get_relations_and_folders
88
+ relations = {}
89
+ @commands.each do |name, job|
90
+ relations[name] = [job.attrib[:exec_folder], job.dependencies]
91
+ end
92
+ return relations
93
+ end
94
+
63
95
  def launch_all_jobs
64
96
  buffered_jobs = []
65
- sort_jobs_by_dependencies.each do |name, job|
97
+ sorted_jobs = sort_jobs_by_dependencies
98
+ sorted_jobs.each do |name, job|
99
+ @active_jobs << job.name if !job.attrib[:done]
100
+ end
101
+ init_log
102
+ sorted_jobs.each do |name, job|
66
103
  write_file('index_execution', "#{name}\t#{job.attrib[:exec_folder]}")
67
104
  if job.attrib[:done]
68
105
  next
@@ -70,7 +107,7 @@ class QueueManager
70
107
  rm_done_dependencies(job)
71
108
  end
72
109
  buffered_jobs = launch_job_in_folder(job, name, buffered_jobs)
73
- end
110
+ end
74
111
  end
75
112
 
76
113
  def sort_jobs_by_dependencies # We need job ids from queue system so we ask for each job and we give the previous queue system ids as dependencies if necessary
@@ -113,35 +150,41 @@ class QueueManager
113
150
 
114
151
 
115
152
  def launch2queue_system(job, id, buffered_jobs)
116
- # Write sh file
117
- #--------------------------------
118
- log_folder = File.join(@exec_folder, 'log')
119
153
  sh_name = job.name+'.sh'
120
- create_file(sh_name, job.attrib[:exec_folder])
121
- write_file(sh_name, '#!/usr/bin/env bash')
122
- write_file(sh_name, '##JOB_GROUP_ID='+@job_identifier)
123
- write_header(id, job, sh_name)
154
+ if @write_sh
155
+ # Write sh file
156
+ #--------------------------------
157
+ create_file(sh_name, job.attrib[:exec_folder])
158
+ write_file(sh_name, '#!/usr/bin/env bash')
159
+ write_file(sh_name, '##JOB_GROUP_ID='+@job_identifier)
160
+ write_header(id, job, sh_name)
161
+ end
124
162
 
125
163
  #Get dependencies
126
164
  #------------------------------------
127
165
  ar_dependencies = get_dependencies(job, id)
128
166
  buffered_jobs.each do |id_buff_job, buff_job|
129
- write_job(buff_job, sh_name)
130
167
  ar_dependencies += get_dependencies(buff_job, id_buff_job)
131
- buff_job.attrib[:exec_folder] = job.attrib[:exec_folder]
168
+ if @write_sh
169
+ write_job(buff_job, sh_name)
170
+ buff_job.attrib[:exec_folder] = job.attrib[:exec_folder]
171
+ end
132
172
  end
133
173
  ar_dependencies.uniq!
134
174
 
135
- #Write sh body
136
- #--------------------------------
137
- write_file(sh_name, 'hostname')
138
- write_file(sh_name, "echo -e \"STARTED #{id} #{job.parameters.split.first}:\\t`date`\" >> #{log_folder}")
139
- write_file(sh_name, "source #{File.join(@exec_folder, 'env_file')}") if !@persist_variables.empty?
140
- write_job(job, sh_name)
141
- write_file(sh_name, "echo -e \"FINISHED #{id} #{job.parameters.split.first}:\\t`date`\" >> #{log_folder}")
142
- write_file(sh_name, "echo 'General time'")
143
- write_file(sh_name, "times")
144
- close_file(sh_name, 0755)
175
+ if @write_sh
176
+ #Write sh body
177
+ #--------------------------------
178
+ write_file(sh_name, 'hostname')
179
+ log_file_path = [@exec_folder, '.wf_log', File.basename(job.attrib[:exec_folder])].join('/')
180
+ write_file(sh_name, "flow_logger -e #{log_file_path} -s #{job.name}")
181
+ write_file(sh_name, "source #{File.join(@exec_folder, 'env_file')}") if !@persist_variables.empty?
182
+ write_job(job, sh_name)
183
+ write_file(sh_name, "flow_logger -e #{log_file_path} -f #{job.name}")
184
+ write_file(sh_name, "echo 'General time'")
185
+ write_file(sh_name, "times")
186
+ close_file(sh_name, 0755)
187
+ end
145
188
 
146
189
  #Submit node
147
190
  #-----------------------------------
@@ -190,6 +233,17 @@ class QueueManager
190
233
  end
191
234
  end
192
235
 
236
+ def read_file(file_path)
237
+ content = nil
238
+ if @remote
239
+ res = @ssh.exec!("[ ! -f #{file_path} ] && echo 'Autoflow:File Not Found' || cat #{file_path}")
240
+ content = res if !content.include?('Autoflow:File Not Found')
241
+ else
242
+ content = File.open(file_path).read if File.exists?(file_path)
243
+ end
244
+ return content
245
+ end
246
+
193
247
  def system_call(cmd, path = nil)
194
248
  cmd = "cd #{path}; " + cmd if !path.nil?
195
249
  if @remote
@@ -4,7 +4,8 @@ class BashManager < QueueManager
4
4
  def initialize(exec_folder, options, commands, persist_variables)
5
5
  super
6
6
  @queued = []
7
- @last_deps = []
7
+ @count = 0
8
+ @pids = {}
8
9
  @path2execution_script = File.join(@exec_folder, 'execution.sh')
9
10
  create_file('execution.sh', @exec_folder)
10
11
  write_file('execution.sh', '#! /usr/bin/env bash')
@@ -17,22 +18,22 @@ class BashManager < QueueManager
17
18
  end
18
19
 
19
20
  def write_header(id, node, sh)
20
- @queued << id # For dependencies purposes
21
+ #@queued << id # For dependencies purposes
21
22
  end
22
23
 
23
24
  def submit_job(job, ar_dependencies)
24
25
  write_file('execution.sh','')
25
26
  if !ar_dependencies.empty?
26
- deps = ar_dependencies - @last_deps
27
- if !deps.empty?
28
- write_file('execution.sh', 'wait')
29
- @last_deps.concat(@queued)
27
+ ar_dependencies.each do |dep|
28
+ cmd = "wait \"$pid#{@pids[dep]}\"\nif [ $? -ne 0 ]\nthen \n\techo \"#{job.name} failed\"\n\texit\nfi"
29
+ write_file('execution.sh', cmd)
30
30
  end
31
31
  end
32
- @last_deps.concat(ar_dependencies)
33
- @last_deps.uniq!
34
32
  write_file('execution.sh', "cd #{job.attrib[:exec_folder]}")
35
- write_file('execution.sh', "./#{job.name}.sh &")
33
+ write_file('execution.sh', "./#{job.name}.sh &> task_log & pid#{@count}=$!")
34
+ @pids[job.name] = @count
35
+ @count += 1
36
+ @queued << job.name # For dependencies purposes
36
37
  return nil
37
38
  end
38
39
 
@@ -27,16 +27,17 @@ class Stack
27
27
  @exec_folder = exec_folder #TODO move this to queue_manager
28
28
  @do_retry = options[:retry]
29
29
  @options = options
30
- parse(options[:workflow], options[:Variables])
31
- @jobs = get_jobs_relations
30
+ @workflow = options[:workflow]
31
+ @external_variables= options[:Variables]
32
+ @jobs = {}
32
33
  end
33
34
 
34
- def parse(workflow, external_variables)
35
+ def parse!
35
36
  #Clean template
36
- workflow.gsub!(/\#.+$/,'') #Delete comments
37
- workflow.gsub!("\t",'') #Drop tabs
38
- workflow.gsub!(/\n+/,"\n") #Drop empty lines
39
- workflow.gsub!(/^\s*/,'')
37
+ @workflow.gsub!(/\#.+$/,'') #Delete comments
38
+ @workflow.gsub!("\t",'') #Drop tabs
39
+ @workflow.gsub!(/\n+/,"\n") #Drop empty lines
40
+ @workflow.gsub!(/^\s*/,'')
40
41
 
41
42
  #Parse template
42
43
  variables_lines = []
@@ -44,7 +45,7 @@ class Stack
44
45
  node_lines = []
45
46
 
46
47
  node_beg = FALSE
47
- workflow.each_line do |line|
48
+ @workflow.each_line do |line|
48
49
  node_beg = TRUE if line.include?('{') # This check the context of a variable
49
50
  if line.include?('}') # if a variable is within a node,
50
51
  if node_beg # we consider tha is a bash variable not a static autoflow variable
@@ -62,9 +63,10 @@ class Stack
62
63
  end
63
64
  end
64
65
  load_variables(variables_lines, @variables)
65
- load_variables(external_variables, @variables)
66
+ load_variables(@external_variables, @variables)
66
67
  load_variables(persist_variables_lines, @persist_variables)
67
68
  parse_nodes(node_lines)
69
+ @jobs = get_jobs_relations
68
70
  end
69
71
 
70
72
  def load_variables(variables_lines, variable_type)
@@ -1,3 +1,3 @@
1
1
  module Autoflow
2
- VERSION = "0.6.4"
2
+ VERSION = "0.8.0"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: autoflow
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.4
4
+ version: 0.8.0
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2016-03-10 00:00:00.000000000 Z
12
+ date: 2017-01-25 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: net-ssh
@@ -27,6 +27,22 @@ dependencies:
27
27
  - - ! '>='
28
28
  - !ruby/object:Gem::Version
29
29
  version: 2.8.0
30
+ - !ruby/object:Gem::Dependency
31
+ name: git
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ! '>='
36
+ - !ruby/object:Gem::Version
37
+ version: 1.3.0
38
+ type: :runtime
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: 1.3.0
30
46
  - !ruby/object:Gem::Dependency
31
47
  name: colorize
32
48
  requirement: !ruby/object:Gem::Requirement
@@ -43,6 +59,22 @@ dependencies:
43
59
  - - ~>
44
60
  - !ruby/object:Gem::Version
45
61
  version: 0.7.3
62
+ - !ruby/object:Gem::Dependency
63
+ name: terminal-table
64
+ requirement: !ruby/object:Gem::Requirement
65
+ none: false
66
+ requirements:
67
+ - - ~>
68
+ - !ruby/object:Gem::Version
69
+ version: 1.6.0
70
+ type: :runtime
71
+ prerelease: false
72
+ version_requirements: !ruby/object:Gem::Requirement
73
+ none: false
74
+ requirements:
75
+ - - ~>
76
+ - !ruby/object:Gem::Version
77
+ version: 1.6.0
46
78
  - !ruby/object:Gem::Dependency
47
79
  name: bundler
48
80
  requirement: !ruby/object:Gem::Requirement
@@ -82,6 +114,7 @@ email:
82
114
  executables:
83
115
  - AutoFlow
84
116
  - env_manager
117
+ - flow_logger
85
118
  - flow_time
86
119
  extensions: []
87
120
  extra_rdoc_files: []
@@ -94,9 +127,11 @@ files:
94
127
  - autoflow.gemspec
95
128
  - bin/AutoFlow
96
129
  - bin/env_manager
130
+ - bin/flow_logger
97
131
  - bin/flow_time
98
132
  - lib/autoflow.rb
99
133
  - lib/autoflow/batch.rb
134
+ - lib/autoflow/logging.rb
100
135
  - lib/autoflow/program.rb
101
136
  - lib/autoflow/queue_manager.rb
102
137
  - lib/autoflow/queue_managers/bash_manager.rb