dake 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,387 @@
1
+ require 'set'
2
+ require 'open3'
3
+
4
+ # the data struct needed by the executor
5
+ # note that this is not nessesarily the complete graph,
6
+ # the graph is only used to produce the given targets
7
+ DepGraph = Struct.new(
8
+ :succ_step, # a dict maps each step in the DepGraph to the steps depend on it
9
+ :dep_step, # a dict maps each step in the DepGraph to the steps it depends on
10
+ :step_list, # a list of steps represents one sequential execution order
11
+ :root_step, # a set of steps that hos no dependant
12
+ :leaf_step, # a set of steps that has no prerequisite
13
+ :need_rebuild # a set of steps in step_list which should be executed to update their targets
14
+ )
15
+
16
+ class DakeAnalyzer
17
+ attr_reader :workflow, :variable_dict, :method_dict, :included_files
18
+ attr_reader :tag_target_dict, :file_target_dict
19
+ attr_reader :tag_template_dict, :file_template_dict, :step_template_dict
20
+
21
+ def initialize(workflow, inclusion_stack, env={})
22
+ @workflow = workflow
23
+ @inclusion_stack = inclusion_stack
24
+ @included_files = inclusion_stack.to_set
25
+ @variable_dict = env.dup
26
+ @method_dict = {}
27
+ @tag_target_dict = {}
28
+ @file_target_dict = {}
29
+ @tag_template_dict = {}
30
+ @file_template_dict = {}
31
+ @step_template_dict = {}
32
+ end
33
+
34
+ def analyze_option(step) # method is the same as step
35
+ step.option_dict = {}
36
+ step.options.each do |option|
37
+ name = option.name.to_s
38
+ if option.value.is_a? Parslet::Slice
39
+ value = option.value.to_s
40
+ value = 'true' if value == '+'
41
+ value = 'false' if value == '-'
42
+ else
43
+ value = text_eval(option.value, step.src_file, step.context)
44
+ end
45
+ if step.option_dict[name]
46
+ line, column = option.name.line_and_column
47
+ raise "Option `#{name}' in #{step.src_file} at #{line}:#{column} has already been set."
48
+ else
49
+ if name == 'protocol' and not DakeProtocol::ProtocolDict.keys.include? value
50
+ line, column = option.value.line_and_column
51
+ raise "Protocol `#{value}' in #{step.src_file} at #{line}:#{column} is not supported."
52
+ end
53
+ # TODO: should have more option check
54
+ step.option_dict[name] = value
55
+ end
56
+ end
57
+ step.option_dict['protocol'] = 'shell' unless step.option_dict['protocol']
58
+ end
59
+
60
+ def analyze_method(meth)
61
+ if @method_dict[meth.name.to_s]
62
+ line, column = @method_dict[meth.name.to_s].name.line_and_column
63
+ raise "Method `#{meth.name.to_s}' has already beed defined in " +
64
+ "#{@method_dict[meth.name.to_s].src_file} at #{line}:#{column}"
65
+ end
66
+ meth.context = @variable_dict.dup
67
+ analyze_option(meth)
68
+ @method_dict[meth.name.to_s] = meth
69
+ end
70
+
71
+ def analyze_scheme(name, step, line, column)
72
+ scheme_part, path_part = name.match(/(\w+:)?(.*)/).to_a[1, 2]
73
+ scheme_cls = DakeScheme::SchemeDict[scheme_part ? scheme_part : 'local:']
74
+ if step
75
+ raise "Scheme `#{scheme_part}' in #{step.src_file} at #{line}:#{column} is not supported." unless scheme_cls
76
+ scheme_cls.new(scheme_part, path_part, step)
77
+ else # for user input target name
78
+ raise "Scheme `#{scheme_part}' in #{name} is not supported." unless scheme_cls
79
+ dummy_step = Step.new([], [], [], {}, nil, nil, @variable_dict, nil, nil)
80
+ scheme_cls.new(scheme_part, path_part, dummy_step)
81
+ end
82
+ end
83
+
84
+ def analyze_file(file, type, step)
85
+ line, column = text_line_and_column(file.name)
86
+ if file.flag == '?' and type == :targets
87
+ file_name = text_eval(file.name)
88
+ raise "Output file `#{file_name}' in #{step.src_file} at #{line}:#{column} should not be optional."
89
+ end
90
+ if file.tag
91
+ tag_name = text_eval(file.name, step.src_file, step.context)
92
+ if file.regex
93
+ if type == :prerequisites
94
+ raise "Pattern `#{file_name}' in #{step.src_file} at #{line}:#{column} cannot be used in input file list."
95
+ end
96
+ file.scheme = DakeScheme::Regex.new('^', tag_name, step)
97
+ if type == :targets
98
+ @tag_template_dict[file.scheme.path] ||= []
99
+ @tag_template_dict[file.scheme.path] << step
100
+ end
101
+ else
102
+ file.scheme = DakeScheme::Tag.new('@', tag_name, step)
103
+ if type == :targets
104
+ @tag_target_dict[file.scheme.path] ||= []
105
+ @tag_target_dict[file.scheme.path] << step
106
+ end
107
+ end
108
+ return [file]
109
+ else
110
+ # there maybe more than one file if a file list is generated in command substitution
111
+ file_names = text_eval(file.name, step.src_file, step.context).split("\n")
112
+ files = file_names.map do |file_name|
113
+ if file.regex
114
+ scheme = DakeScheme::Regex.new('^', file_name, step)
115
+ else
116
+ scheme = analyze_scheme(file_name, step, line, column)
117
+ end
118
+ if file_names.length == 1
119
+ file_path = scheme.path
120
+ if type == :targets
121
+ if file.regex
122
+ if @file_template_dict[file_path]
123
+ raise "Output pattern `#{file_name}' in #{step.src_file} at #{line}:#{column} appears in more than one step."
124
+ else
125
+ @file_template_dict[file_path] = step
126
+ end
127
+ else
128
+ if @file_target_dict[file_path]
129
+ raise "Output file `#{file_name}' in #{step.src_file} at #{line}:#{column} appears in more than one step."
130
+ else
131
+ @file_target_dict[file_path] = step
132
+ end
133
+ end
134
+ else
135
+ if file.regex
136
+ raise "Pattern `#{file_name}' in #{step.src_file} at #{line}:#{column} cannot be used in input file list."
137
+ end
138
+ end
139
+ else
140
+ # Generated file list should not be used in targets
141
+ if type == :targets
142
+ raise "File list `#{file_name}' in #{step.src_file} at #{line}:#{column} cannot be used as targets."
143
+ end
144
+ end
145
+ newfile = file.dup
146
+ newfile.scheme = scheme
147
+ newfile
148
+ end
149
+ return files
150
+ end
151
+ end
152
+
153
+ def analyze_step(step)
154
+ step.context = @variable_dict.dup
155
+ # the analysis of prerequisites is deferred to the resolve phase
156
+ step.targets.map! { |file| analyze_file(file, :targets, step) }.flatten!
157
+ end
158
+
159
+ def analyze_condition(condition)
160
+ case condition.cond
161
+ when EqCond
162
+ lhs = text_eval condition.cond.eq_lhs
163
+ rhs = text_eval condition.cond.eq_rhs
164
+ if condition.not
165
+ truth = lhs != rhs
166
+ else
167
+ truth = lhs == rhs
168
+ end
169
+ when DefCond
170
+ var_name = condition.cond.var_name.to_s
171
+ if condition.not
172
+ truth = (not @variable_dict.has_key? var_name)
173
+ else
174
+ truth = @variable_dict.has_key? var_name
175
+ end
176
+ end
177
+ truth ? condition.if_body : condition.else_body
178
+ end
179
+
180
+ def analyze
181
+ @workflow.tasks.each do |task|
182
+ case task
183
+ when VariableDef
184
+ var_name = task.var_name.to_s
185
+ var_value = text_eval(task.var_value)
186
+ if task.type == :assign
187
+ @variable_dict[var_name] = var_value
188
+ elsif not @variable_dict[var_name]
189
+ @variable_dict[var_name] = var_value
190
+ end
191
+ when Step
192
+ analyze_step(task)
193
+ when StepMethod
194
+ analyze_method(task)
195
+ when Workflow, Inclusion, Condition
196
+ case task
197
+ when Inclusion
198
+ file_names = text_eval(task.files).split("\n")
199
+ file_names.each do |file_name|
200
+ src_dirname = File.dirname(@inclusion_stack[-1])
201
+ path = file_name.start_with?('/') ? file_name : File.absolute_path(file_name, src_dirname)
202
+ if @included_files.include? path
203
+ line, column = text_line_and_column task.files
204
+ raise "Cyclical inclusion detected in #{task.src_file} at #{line}:#{column}"
205
+ else
206
+ @inclusion_stack.push path
207
+ end
208
+ begin
209
+ tree = DakeParser.new.parse(File.read(path))
210
+ rescue Parslet::ParseFailed => failure
211
+ line, column = text_line_and_column task.files
212
+ STDERR.puts "Failed parsing #{path} included from #{task.src_file} at #{line}:#{column}"
213
+ raise failure.message
214
+ end
215
+ workflow = DakeTransform.new.apply(tree, src_file: path)
216
+ if task.type == :include or task.type == :call
217
+ sub_workflow = DakeAnalyzer.new(workflow, @inclusion_stack, @variable_dict).analyze
218
+ else
219
+ sub_workflow = DakeAnalyzer.new(workflow, @inclusion_stack, 'BASE' => File.dirname(path)).analyze
220
+ end
221
+ @inclusion_stack.pop
222
+ end
223
+ when Condition
224
+ body = analyze_condition(task)
225
+ next unless body
226
+ sub_workflow = DakeAnalyzer.new(body, @inclusion_stack, @variable_dict).analyze
227
+ when Workflow # for scope
228
+ sub_workflow = DakeAnalyzer.new(task, @inclusion_stack, @variable_dict).analyze
229
+ end
230
+ @tag_target_dict.merge! sub_workflow.tag_target_dict do |tag, step_list1, step_list2|
231
+ step_list1 + step_list2
232
+ end
233
+ @file_target_dict.merge! sub_workflow.file_target_dict do |file, step1, step2|
234
+ file1 = step1.targets.find { |target| target.scheme.path == file }
235
+ file2 = step2.targets.find { |target| target.scheme.path == file }
236
+ line1, column1 = text_line_and_column file1.name
237
+ line2, column2 = text_line_and_column file2.name
238
+ file2_name = file2.scheme.path
239
+ raise "Output file `#{file2_name}' defined in #{step2.src_file} at #{line2}:#{column2} " +
240
+ "was previously defined in #{step1.src_file} at #{line1}:#{column1}."
241
+ end
242
+ @file_template_dict.merge! sub_workflow.file_template_dict do |file, (scheme1, step1), (scheme2, step2)|
243
+ file1 = step1.targets.find { |target| target.scheme.path == file }
244
+ file2 = step2.targets.find { |target| target.scheme.path == file }
245
+ line1, column1 = text_line_and_column file1.name
246
+ line2, column2 = text_line_and_column file2.name
247
+ file2_name = file2.scheme.path
248
+ raise "Output pattern `#{file2_name}' defined in #{step2.src_file} at #{line2}:#{column2} " +
249
+ "was previously defined in #{step1.src_file} at #{line1}:#{column1}."
250
+ end
251
+ if task.is_a? Condition or (task.is_a? Inclusion and task.type == :include)
252
+ @variable_dict.merge! sub_workflow.variable_dict
253
+ end
254
+ end
255
+ end
256
+ self
257
+ end
258
+
259
+ def step_line_and_column(step)
260
+ return step.name.line_and_column if step.is_a? StepMethod
261
+ first_target = step.targets[0]
262
+ if first_target.name.is_a? Parslet::Slice
263
+ first_target.name.line_and_column
264
+ else
265
+ text_line_and_column(first_target.name)
266
+ end
267
+ end
268
+
269
+ def option_line_and_column(options, option_name)
270
+ options.each do |option|
271
+ next unless option.name.to_s == option_name
272
+ if option.value.is_a? Parslet::Slice
273
+ return option.value.line_and_column
274
+ else
275
+ return text_line_and_column option.value
276
+ end
277
+ end
278
+ nil
279
+ end
280
+
281
+ def text_line_and_column(text)
282
+ first_item = text.items[0]
283
+ case first_item
284
+ when Chars then first_item.string.line_and_column
285
+ when VariableSub
286
+ line, column = first_item.var_name.line_and_column
287
+ [line, column - 2]
288
+ when CommandSub
289
+ line, column = text_line_and_column first_item.cmd_text
290
+ [line, column - 2]
291
+ end
292
+ end
293
+
294
+ def print_target(target, tag, target_step, dep_graph = nil, pad_stack = [])
295
+ pad_prefix = ''
296
+ unless pad_stack.empty?
297
+ pad_prefix += pad_stack.reduce('') do |str, pad|
298
+ next str + '│ ' if pad == '├'
299
+ next str + ' ' if pad == '└'
300
+ end
301
+ end
302
+ pwd = Pathname.new(Dir.pwd)
303
+ target_path = if tag then '@' + target
304
+ elsif target.start_with? '/'
305
+ Pathname.new(target).relative_path_from(pwd)
306
+ else
307
+ target
308
+ end
309
+ if target_step
310
+ workflow_path = Pathname.new(target_step.src_file).relative_path_from(pwd)
311
+ line, column = step_line_and_column(target_step)
312
+
313
+ pad = (pad_stack.empty? ? '' : pad_prefix[0...-4] + pad_stack.last + '── ')
314
+ puts pad.colorize(:magenta) +
315
+ "#{target_path}".colorize((tag ? :light_blue : :light_white)) +
316
+ " in " +
317
+ "#{workflow_path} ".colorize(:green) +
318
+ "#{line}:#{column} ".colorize(:yellow)
319
+ unless target_step.doc_str.empty?
320
+ pad = (pad_stack.empty? ? '│ ' : pad_prefix + '│ ')
321
+ target_step.doc_str.each do |doc|
322
+ if target_step.prerequisites.empty? or not dep_graph
323
+ puts (pad[0...-2] + (doc == target_step.doc_str.last ? '\_' : '│ ') + doc.string.to_s).colorize(:magenta)
324
+ else
325
+ puts (pad + doc.string.to_s).colorize(:magenta)
326
+ end
327
+ end
328
+ end
329
+ else
330
+ pad = (pad_stack.empty? ? '' : pad_prefix[0...-4] + pad_stack.last + '── ')
331
+ puts pad.colorize(:magenta) +
332
+ "#{target_path}".colorize((tag ? :light_blue : :light_white)) +
333
+ " (No step) "
334
+ end
335
+
336
+ if target_step and dep_graph
337
+ dep_steps = dep_graph.dep_step[target_step]
338
+ pairs = target_step.prerequisites.each_with_object([]) do |prereq, pairs|
339
+ steps = dep_steps.find_all { |dep_step| dep_step.targets.any? { |tar| tar.scheme.path == prereq.scheme.path } }
340
+ if steps.empty?
341
+ pairs << [prereq, nil]
342
+ else
343
+ steps.each { |step| pairs << [prereq, step]}
344
+ end
345
+ end
346
+ while pair = pairs.pop
347
+ pad_stack << (pairs.empty? ? '└' : '├')
348
+ prerequisite, step = pair
349
+ print_target(prerequisite.scheme.path, prerequisite.tag, step, dep_graph, pad_stack)
350
+ pad_stack.pop
351
+ end
352
+ end
353
+ end
354
+
355
+ # src_file is needed by prepare_command in execution phase
356
+ # because @inclusion_stack will have only root workflow by then
357
+ def text_eval(text, src_file=@inclusion_stack[-1], context=@variable_dict, skip=0)
358
+ text.items[skip..-1].reduce('') do |string, item|
359
+ case item
360
+ when Chars
361
+ string.concat item.string.to_s
362
+ when VariableSub
363
+ var_name = item.var_name.to_s
364
+ var_value = context[var_name]
365
+ if var_value
366
+ string.concat var_value
367
+ else
368
+ line, column = item.var_name.line_and_column
369
+ raise "Variable `#{var_name}' in #{src_file} at #{line}:#{column} is not defined."
370
+ end
371
+ when CommandSub
372
+ command = text_eval(item.cmd_text, src_file, context)
373
+ stdout, stderr, status = Open3.capture3(command, :chdir=>@variable_dict['BASE'])
374
+ if status.success?
375
+ string.concat stdout.chomp
376
+ else
377
+ line, column = text_line_and_column item.cmd_text
378
+ raise "Command `#{command}' in #{src_file} at #{line}:#{column} failed " +
379
+ "with EXIT_STATUS:#{status.exitstatus} and STDERR:\n#{stderr}"
380
+ end
381
+ else # For Escaped Char
382
+ string.concat item.to_s
383
+ end
384
+ string
385
+ end
386
+ end
387
+ end
@@ -0,0 +1,49 @@
1
+ require 'git'
2
+ # require "sqlite3"
3
+
4
+ class DakeDB
5
+ attr_reader :database_path, :database_file
6
+ def initialize(path)
7
+ workflow_path = File.dirname(path)
8
+ @database_path = workflow_path + '/.dake'
9
+ @database_file = database_path + '/step_history.db'
10
+
11
+ FileUtils.mkdir(@database_path) unless File.exist? @database_path
12
+ # @db = SQLite3::Database.new database_file
13
+ # @db.execute <<-SQL
14
+ # create table if not exists step_history (
15
+ # id int unsigned auto_increment primary key,
16
+ # step_sha1 binary(20) not null
17
+ # target varchar(50),
18
+ # process_id big int(50),
19
+ # start_time varchar(5),
20
+ # end_time varchar(5),
21
+ # ip_address binary(4)
22
+ # );
23
+ # create table if not exists step_target (
24
+ # id int unsigned auto_increment primary key,
25
+ # target varchar(1024),
26
+ # type char(50)
27
+ # );
28
+ # SQL
29
+
30
+ # git_opts = {
31
+ # repository: database_path + '/.git',
32
+ # index: database_path + '/.git/index',
33
+ # log: Logger.new(File.open(database_path + '/git.log', 'w+'))
34
+ # }
35
+
36
+ # if File.exist? database_path + '/.git'
37
+ # @git = Git.open(workflow_path, git_opts)
38
+ # else
39
+ # @git = Git.init(workflow_path, git_opts)
40
+ # @git.config('user.name', 'Dake User')
41
+ # @git.config('user.email', 'email@email.com')
42
+ # File.open(database_path + '/.gitignore', 'w') do |f|
43
+ # f.puts File.basename('.dake')
44
+ # end
45
+ # @git.add(database_path + '/.gitignore')
46
+ # @git.commit('init commit')
47
+ # end
48
+ end
49
+ end
@@ -0,0 +1,211 @@
1
+ require 'tempfile'
2
+ require 'concurrent'
3
+
4
+ class DakeExecutor
5
+ def initialize(analyzer, dake_db, dep_graph, jobs)
6
+ @analyzer = analyzer
7
+ @dake_db = dake_db
8
+ @dep_graph = dep_graph
9
+ @complete_dep_steps = Hash.new(0)
10
+ @async = (jobs ? true : false)
11
+ @pool = Concurrent::ThreadPoolExecutor.new(
12
+ min_threads: 1,
13
+ max_threads: jobs,
14
+ max_queue: 0 # unbounded work queue
15
+ ) if @async
16
+ end
17
+
18
+ def execute(dry_run=false, log=false)
19
+ if @dep_graph.need_rebuild.empty?
20
+ STDERR.puts "Nothing to be done.".colorize(:green)
21
+ return
22
+ end
23
+ if @async
24
+ dep_map = Hash.new
25
+ @dep_graph.dep_step.each do |step, dep_set|
26
+ next if dep_set.empty?
27
+ dep_map[step] = dep_set.dup
28
+ end
29
+
30
+ queue = Queue.new
31
+ error_queue = Queue.new
32
+ error_steps = Set.new
33
+
34
+ error_thr = Thread.new do
35
+ while error = error_queue.deq
36
+ if error.is_a? Exception
37
+ STDERR.puts "#{error.class}: #{error.message}".colorize(:red)
38
+ STDERR.puts "Continue to execute other Step(s)".colorize(:red)
39
+ STDERR.puts "To Force Quitting: Press Ctrl + C".colorize(:red)
40
+ end
41
+ end
42
+ end
43
+
44
+ lock = Concurrent::ReadWriteLock.new
45
+ @dep_graph.leaf_step.each { |step| queue << step }
46
+
47
+ while next_step = queue.deq
48
+ @pool.post(next_step) do |step|
49
+ lock.acquire_read_lock
50
+ error_step = error_steps.include? step
51
+ lock.release_read_lock
52
+ if error_step
53
+ line, column = @analyzer.step_line_and_column step
54
+ msg = "Step(#{step.object_id}) defined in #{step.src_file} at #{line}:#{column} " +
55
+ "skipped due to prerequisite step(s) error."
56
+ error_queue << Exception.new(msg)
57
+ else
58
+ execute_step(step, dry_run, log) if @dep_graph.need_rebuild.include? step
59
+ end
60
+ lock.acquire_write_lock
61
+ dep_map.delete step
62
+ if dep_map.empty?
63
+ queue.close
64
+ else
65
+ @dep_graph.succ_step[step].each do |succ|
66
+ dep_map[succ].delete step
67
+ if dep_map[succ].empty?
68
+ queue << succ
69
+ elsif dep_map[succ].all? { |dep_step| error_steps.include? dep_step }
70
+ error_steps << succ
71
+ queue << succ
72
+ end
73
+ end
74
+ end
75
+ lock.release_write_lock
76
+ rescue Exception => e
77
+ error_queue << e
78
+ lock.acquire_write_lock
79
+ error_steps << step
80
+ dep_map.delete step
81
+ if dep_map.empty?
82
+ queue.close
83
+ else
84
+ @dep_graph.succ_step[step].each do |succ|
85
+ if dep_map[succ].all? { |dep_step| error_steps.include? dep_step }
86
+ error_steps << succ
87
+ queue << succ
88
+ end
89
+ end
90
+ end
91
+ lock.release_write_lock
92
+ end
93
+ end
94
+ @pool.shutdown
95
+ @pool.wait_for_termination
96
+ queue.close
97
+ error_queue.close
98
+ error_thr.join
99
+ raise "Failed to execute some step(s)" unless error_steps.empty?
100
+ else
101
+ @dep_graph.step_list.each do |step|
102
+ execute_step(step, dry_run, log) if @dep_graph.need_rebuild.include? step
103
+ end
104
+ end
105
+ end
106
+
107
+ def execute_step(step, dry_run, log)
108
+ prepare_step(step)
109
+ protocol = step.option_dict['protocol']
110
+ protocol ||= 'shell'
111
+
112
+ line, column = @analyzer.step_line_and_column step
113
+ proto = DakeProtocol::ProtocolDict[protocol].new(step, @analyzer, @dake_db, dry_run)
114
+ STDERR.puts ("[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] Running #{protocol} step(#{step.object_id}) defined in " +
115
+ "#{step.src_file} at #{line}:#{column}").colorize(:green)
116
+ STDERR.puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] step(#{step.object_id}) Script in #{proto.script_file}".colorize(:green) unless dry_run
117
+ step.targets.each do |target|
118
+ next if target.scheme.is_a? DakeScheme::Regex
119
+ if target.scheme.is_a? DakeScheme::Tag
120
+ STDERR.puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] step(#{step.object_id}) Producing ".colorize(:green) +
121
+ "@#{target.scheme.path}".colorize(:light_blue)
122
+ else
123
+ STDERR.puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] step(#{step.object_id}) Producing ".colorize(:green) +
124
+ "#{target.scheme.path}".colorize(:light_white)
125
+ end
126
+ end
127
+ STDERR.puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] step(#{step.object_id}) STDOUT in #{proto.script_stdout}".colorize(:green) if log and not dry_run
128
+ STDERR.puts "[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] step(#{step.object_id}) STDERR in #{proto.script_stderr}".colorize(:green) if log and not dry_run
129
+
130
+ if dry_run
131
+ puts step.cmd_text
132
+ else
133
+ proto.execute_step(log)
134
+ end
135
+
136
+ STDERR.puts ("[#{Time.now.strftime('%Y-%m-%d %H:%M:%S')}] Complete #{protocol} step(#{step.object_id}) defined in " +
137
+ "#{step.src_file} at #{line}:#{column}").colorize(:green)
138
+ end
139
+
140
+ def prepare_step(step)
141
+ context = step.context.merge!({'OUTPUTN' => 0, 'OUTPUTS' => [], 'INPUTN' => 0, 'INPUTS' => []})
142
+ step.targets.reject { |f| [DakeScheme::Tag, DakeScheme::Regex].include? f.scheme.class }.each_with_index do |output, n|
143
+ name = output.scheme.path
144
+ context["OUTPUT"] = name if n == 0
145
+ context["OUTPUT#{n}"] = name
146
+ context["OUTPUTS"] << name
147
+ context["OUTPUTN"] += 1
148
+ end
149
+ context['OUTPUTN'] = context['OUTPUTN'].to_s
150
+ context['OUTPUTS'] = context['OUTPUTS'].join(" ")
151
+ step.prerequisites.reject { |s| s.tag }.each_with_index do |input, n|
152
+ name = input.scheme.path
153
+ context["INPUT"] = name if n == 0
154
+ context["INPUT#{n}"] = name
155
+ context["INPUTS"] << name
156
+ context["INPUTN"] += 1
157
+ end
158
+ context['INPUTN'] = context['INPUTN'].to_s
159
+ context['INPUTS'] = context['INPUTS'].join(" ")
160
+ @analyzer.analyze_option(step)
161
+ step.cmd_text = prepare_command(step)
162
+ end
163
+
164
+ # command preparation is intentionally deferred to execution phase to
165
+ # accelerate the analysis phase of big workflow file
166
+ def prepare_command(step, context={})
167
+ mixin_method = (step.option_dict['method'] ? step.option_dict['method'] : nil)
168
+ method_mode = (step.option_dict['method_mode'] ? step.option_dict['method_mode'] : 'prepend')
169
+ if mixin_method
170
+ meth = @analyzer.method_dict[mixin_method]
171
+ unless meth
172
+ line, column = @analyzer.option_line_and_column(step.options, 'method')
173
+ raise "Method `#{mixin_method}' used in #{step.src_file} at #{line}:#{column} is not defined."
174
+ end
175
+ @analyzer.analyze_option(meth)
176
+ unless step.option_dict['protocol'] == meth.option_dict['protocol']
177
+ line, column = @analyzer.option_line_and_column(step.options, 'protocol')
178
+ line, column = @analyzer.step_line_and_column(step) unless line
179
+ meth_line, meth_column = meth.name.line_and_column
180
+ raise "Method `#{mixin_method}' defined in #{meth.src_file} at #{meth_line}:#{meth_column} " +
181
+ "uses protocol `#{meth.option_dict['protocol']}', which is incompatible with protocol " +
182
+ "`#{step.option_dict['protocol']}' used in #{step.src_file} at #{line}:#{column}."
183
+ end
184
+ if method_mode == 'replace'
185
+ return prepare_command(meth, step.context)
186
+ else
187
+ method_text = prepare_command(meth, step.context)
188
+ end
189
+ end
190
+ cmd_text = ''
191
+ cmd_text << method_text if mixin_method and method_mode == 'prepend'
192
+ first_indent = step.commands[0].items[0].to_s unless step.commands.empty?
193
+ step.commands.each do |command|
194
+ indent = command.items[0]
195
+ if not indent.to_s.start_with? first_indent
196
+ line, column = indent.line_and_column
197
+ raise "Incompatible indentation in #{step.src_file} at #{line}:#{column}."
198
+ else
199
+ indentation = indent.to_s[first_indent.length..-1]
200
+ cmd_text << indentation + @analyzer.text_eval(command, step.src_file, step.context.merge(context), 1) + "\n"
201
+ end
202
+ end
203
+ cmd_text << method_text if mixin_method and method_mode == 'append'
204
+ if cmd_text == ''
205
+ line, column = @analyzer.step_line_and_column step
206
+ step_meth = step.is_a?(StepMethod) ? 'Method' : 'Step'
207
+ raise "#{step_meth} defined in #{step.src_file} at #{line}:#{column} has no commands."
208
+ end
209
+ cmd_text
210
+ end
211
+ end