scout-gear 10.11.6 → 10.11.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.vimproject +16 -2
- data/VERSION +1 -1
- data/bin/scout +10 -10
- data/lib/scout/association/fields.rb +15 -15
- data/lib/scout/association/index.rb +6 -6
- data/lib/scout/association/item.rb +18 -8
- data/lib/scout/association.rb +4 -4
- data/lib/scout/entity/identifiers.rb +5 -5
- data/lib/scout/entity/property.rb +2 -2
- data/lib/scout/entity.rb +1 -1
- data/lib/scout/knowledge_base/description.rb +10 -10
- data/lib/scout/knowledge_base/entity.rb +6 -6
- data/lib/scout/knowledge_base/list.rb +1 -1
- data/lib/scout/knowledge_base/query.rb +4 -4
- data/lib/scout/knowledge_base/registry.rb +6 -6
- data/lib/scout/knowledge_base/traverse.rb +7 -40
- data/lib/scout/persist/engine/fix_width_table.rb +6 -6
- data/lib/scout/persist/engine/packed_index.rb +2 -2
- data/lib/scout/persist/engine/sharder.rb +4 -4
- data/lib/scout/persist/engine/tkrzw.rb +1 -1
- data/lib/scout/persist/engine/tokyocabinet.rb +2 -2
- data/lib/scout/persist/tsv/adapter/fix_width_table.rb +1 -1
- data/lib/scout/persist/tsv/adapter/packed_index.rb +1 -1
- data/lib/scout/persist/tsv/adapter/tkrzw.rb +1 -1
- data/lib/scout/persist/tsv/adapter/tokyocabinet.rb +3 -3
- data/lib/scout/persist/tsv/serialize.rb +3 -3
- data/lib/scout/persist/tsv.rb +1 -1
- data/lib/scout/semaphore.rb +78 -3
- data/lib/scout/tsv/annotation/repo.rb +4 -4
- data/lib/scout/tsv/annotation.rb +2 -2
- data/lib/scout/tsv/attach.rb +7 -7
- data/lib/scout/tsv/change_id/translate.rb +1 -1
- data/lib/scout/tsv/csv.rb +3 -3
- data/lib/scout/tsv/dumper.rb +8 -8
- data/lib/scout/tsv/index.rb +1 -1
- data/lib/scout/tsv/open.rb +3 -3
- data/lib/scout/tsv/stream.rb +2 -2
- data/lib/scout/tsv/traverse.rb +4 -4
- data/lib/scout/tsv/util/filter.rb +9 -9
- data/lib/scout/tsv/util/process.rb +2 -2
- data/lib/scout/tsv/util/reorder.rb +2 -2
- data/lib/scout/tsv/util/select.rb +3 -3
- data/lib/scout/tsv/util/unzip.rb +2 -2
- data/lib/scout/tsv/util.rb +1 -1
- data/lib/scout/tsv.rb +2 -2
- data/lib/scout/work_queue/socket.rb +2 -2
- data/lib/scout/work_queue/worker.rb +4 -4
- data/lib/scout/work_queue.rb +5 -5
- data/lib/scout/workflow/definition.rb +18 -16
- data/lib/scout/workflow/deployment/local.rb +82 -62
- data/lib/scout/workflow/deployment/orchestrator/batches.rb +66 -5
- data/lib/scout/workflow/deployment/orchestrator/chains.rb +47 -30
- data/lib/scout/workflow/deployment/orchestrator/rules.rb +3 -3
- data/lib/scout/workflow/deployment/orchestrator/workload.rb +11 -22
- data/lib/scout/workflow/deployment/scheduler/job.rb +34 -36
- data/lib/scout/workflow/deployment/scheduler/lfs.rb +1 -1
- data/lib/scout/workflow/deployment/scheduler/pbs.rb +4 -4
- data/lib/scout/workflow/deployment/scheduler/slurm.rb +2 -2
- data/lib/scout/workflow/deployment/scheduler.rb +23 -12
- data/lib/scout/workflow/deployment/trace.rb +2 -2
- data/lib/scout/workflow/documentation.rb +4 -4
- data/lib/scout/workflow/export.rb +1 -1
- data/lib/scout/workflow/path.rb +2 -2
- data/lib/scout/workflow/step/children.rb +1 -1
- data/lib/scout/workflow/step/dependencies.rb +36 -3
- data/lib/scout/workflow/step/info.rb +5 -19
- data/lib/scout/workflow/step/inputs.rb +1 -1
- data/lib/scout/workflow/step/progress.rb +2 -2
- data/lib/scout/workflow/step/provenance.rb +4 -4
- data/lib/scout/workflow/step/status.rb +23 -9
- data/lib/scout/workflow/step.rb +19 -17
- data/lib/scout/workflow/task/dependencies.rb +10 -3
- data/lib/scout/workflow/task/info.rb +3 -3
- data/lib/scout/workflow/task/inputs.rb +14 -8
- data/lib/scout/workflow/task.rb +37 -22
- data/lib/scout/workflow/usage.rb +13 -13
- data/lib/scout/workflow/util.rb +1 -1
- data/lib/scout/workflow.rb +6 -6
- data/scout-gear.gemspec +3 -3
- data/scout_commands/alias +1 -1
- data/scout_commands/batch/clean +12 -12
- data/scout_commands/batch/list +26 -25
- data/scout_commands/batch/tail +9 -5
- data/scout_commands/cat +1 -1
- data/scout_commands/doc +2 -2
- data/scout_commands/entity +4 -4
- data/scout_commands/find +1 -1
- data/scout_commands/kb/config +1 -1
- data/scout_commands/kb/entities +1 -1
- data/scout_commands/kb/list +1 -1
- data/scout_commands/kb/query +2 -2
- data/scout_commands/kb/register +1 -1
- data/scout_commands/kb/show +1 -1
- data/scout_commands/kb/traverse +1 -1
- data/scout_commands/log +6 -6
- data/scout_commands/resource/produce +2 -2
- data/scout_commands/resource/sync +1 -1
- data/scout_commands/system/clean +7 -7
- data/scout_commands/system/status +4 -4
- data/scout_commands/template +1 -1
- data/scout_commands/update +1 -1
- data/scout_commands/workflow/info +1 -1
- data/scout_commands/workflow/install +1 -1
- data/scout_commands/workflow/list +2 -2
- data/scout_commands/workflow/process +2 -2
- data/scout_commands/workflow/prov +3 -3
- data/scout_commands/workflow/task +36 -11
- data/scout_commands/workflow/trace +1 -1
- data/scout_commands/workflow/write_info +2 -2
- data/share/templates/command +1 -1
- data/test/scout/association/test_item.rb +5 -0
- data/test/scout/entity/test_property.rb +3 -3
- data/test/scout/knowledge_base/test_description.rb +1 -1
- data/test/scout/knowledge_base/test_traverse.rb +2 -2
- data/test/scout/persist/engine/test_packed_index.rb +6 -6
- data/test/scout/persist/test_tsv.rb +4 -4
- data/test/scout/persist/tsv/adapter/test_packed_index.rb +4 -4
- data/test/scout/persist/tsv/adapter/test_sharder.rb +23 -23
- data/test/scout/persist/tsv/adapter/test_tokyocabinet.rb +1 -1
- data/test/scout/persist/tsv/test_serialize.rb +1 -1
- data/test/scout/test_association.rb +1 -1
- data/test/scout/test_tsv.rb +2 -2
- data/test/scout/test_workflow.rb +2 -2
- data/test/scout/tsv/test_annotation.rb +4 -4
- data/test/scout/tsv/test_index.rb +1 -1
- data/test/scout/tsv/test_open.rb +2 -2
- data/test/scout/tsv/test_parser.rb +2 -2
- data/test/scout/tsv/test_stream.rb +1 -1
- data/test/scout/tsv/test_transformer.rb +1 -1
- data/test/scout/tsv/util/test_filter.rb +1 -1
- data/test/scout/tsv/util/test_melt.rb +1 -1
- data/test/scout/tsv/util/test_reorder.rb +1 -1
- data/test/scout/work_queue/test_socket.rb +3 -3
- data/test/scout/work_queue/test_worker.rb +2 -2
- data/test/scout/workflow/deployment/orchestrator/test_batches.rb +13 -3
- data/test/scout/workflow/deployment/orchestrator/test_chains.rb +15 -13
- data/test/scout/workflow/deployment/orchestrator/test_workload.rb +1 -1
- data/test/scout/workflow/deployment/test_local.rb +2 -2
- data/test/scout/workflow/deployment/test_scheduler.rb +1 -2
- data/test/scout/workflow/step/test_children.rb +1 -1
- data/test/scout/workflow/step/test_dependencies.rb +36 -1
- data/test/scout/workflow/step/test_info.rb +3 -35
- data/test/scout/workflow/step/test_load.rb +1 -1
- data/test/scout/workflow/step/test_provenance.rb +1 -1
- data/test/scout/workflow/step/test_status.rb +33 -1
- data/test/scout/workflow/task/test_dependencies.rb +9 -7
- data/test/scout/workflow/task/test_inputs.rb +1 -1
- data/test/scout/workflow/test_definition.rb +1 -1
- data/test/scout/workflow/test_documentation.rb +1 -1
- data/test/scout/workflow/test_entity.rb +2 -2
- data/test/scout/workflow/test_step.rb +13 -13
- data/test/scout/workflow/test_usage.rb +1 -1
- data/test/test_helper.rb +1 -1
- metadata +2 -2
data/lib/scout/tsv/traverse.rb
CHANGED
|
@@ -60,9 +60,9 @@ module TSV
|
|
|
60
60
|
key = values.delete_at(key_pos)
|
|
61
61
|
end
|
|
62
62
|
end
|
|
63
|
-
else
|
|
63
|
+
else
|
|
64
64
|
orig_key = key
|
|
65
|
-
key = @type == :flat ? values : values[key_pos] if key_pos != :key
|
|
65
|
+
key = @type == :flat ? values : values[key_pos] if key_pos != :key
|
|
66
66
|
|
|
67
67
|
values = values.values_at(*positions)
|
|
68
68
|
if key_index
|
|
@@ -85,7 +85,7 @@ module TSV
|
|
|
85
85
|
|
|
86
86
|
values = TSV.cast_value(values, cast) if cast
|
|
87
87
|
|
|
88
|
-
if Array === key
|
|
88
|
+
if Array === key
|
|
89
89
|
key = key.uniq if uniq
|
|
90
90
|
if @type == :double && one2one
|
|
91
91
|
if one2one == :strict
|
|
@@ -159,7 +159,7 @@ module TSV
|
|
|
159
159
|
end
|
|
160
160
|
end
|
|
161
161
|
end
|
|
162
|
-
|
|
162
|
+
|
|
163
163
|
|
|
164
164
|
[key_name, field_names]
|
|
165
165
|
end
|
|
@@ -27,7 +27,7 @@ module Filtered
|
|
|
27
27
|
@value = value
|
|
28
28
|
@unsaved = []
|
|
29
29
|
|
|
30
|
-
case
|
|
30
|
+
case
|
|
31
31
|
when Hash === persistence
|
|
32
32
|
@persistence = persistence
|
|
33
33
|
when String === persistence
|
|
@@ -57,7 +57,7 @@ module Filtered
|
|
|
57
57
|
self
|
|
58
58
|
end.class_eval <<-EOC
|
|
59
59
|
def match_entry(key, entry)
|
|
60
|
-
value = entry[@fieldnum]
|
|
60
|
+
value = entry[@fieldnum]
|
|
61
61
|
value == @value or (Array === value and value.include? @value)
|
|
62
62
|
end
|
|
63
63
|
EOC
|
|
@@ -67,7 +67,7 @@ module Filtered
|
|
|
67
67
|
end
|
|
68
68
|
|
|
69
69
|
def key
|
|
70
|
-
case
|
|
70
|
+
case
|
|
71
71
|
when String === value
|
|
72
72
|
value
|
|
73
73
|
else
|
|
@@ -162,7 +162,7 @@ module Filtered
|
|
|
162
162
|
if not base.respond_to? :unfiltered_set
|
|
163
163
|
class << base
|
|
164
164
|
attr_accessor :filter_dir, :filters
|
|
165
|
-
|
|
165
|
+
|
|
166
166
|
alias unfiltered_set []=
|
|
167
167
|
alias []= filtered_set
|
|
168
168
|
|
|
@@ -200,7 +200,7 @@ module Filtered
|
|
|
200
200
|
if filters.empty?
|
|
201
201
|
self.send(:unfiltered_set, key, value, clean)
|
|
202
202
|
else
|
|
203
|
-
filters.each do |filter|
|
|
203
|
+
filters.each do |filter|
|
|
204
204
|
filter.add key if filter.match_entry key, value
|
|
205
205
|
end
|
|
206
206
|
self.send(:unfiltered_set, key, value, clean)
|
|
@@ -215,10 +215,10 @@ module Filtered
|
|
|
215
215
|
end
|
|
216
216
|
end
|
|
217
217
|
|
|
218
|
-
def filtered_values
|
|
219
|
-
if filters.empty?
|
|
220
|
-
self.send(:unfiltered_values)
|
|
221
|
-
else
|
|
218
|
+
def filtered_values
|
|
219
|
+
if filters.empty?
|
|
220
|
+
self.send(:unfiltered_values)
|
|
221
|
+
else
|
|
222
222
|
ids = filters.inject(nil){|list,filter| list.nil? ? filter.ids.dup : Misc.intersect_sorted_arrays(list, filter.ids.dup)}
|
|
223
223
|
self.send :values_at, *ids
|
|
224
224
|
end
|
|
@@ -13,7 +13,7 @@ module TSV
|
|
|
13
13
|
field_values = values[field_pos]
|
|
14
14
|
end
|
|
15
15
|
|
|
16
|
-
new_values = case
|
|
16
|
+
new_values = case
|
|
17
17
|
when block.arity == 1
|
|
18
18
|
yield(field_values)
|
|
19
19
|
when block.arity == 2
|
|
@@ -54,7 +54,7 @@ module TSV
|
|
|
54
54
|
values = new_values
|
|
55
55
|
when (values.nil? and (fields.nil? or fields.empty?))
|
|
56
56
|
values = [new_values]
|
|
57
|
-
when values.nil?
|
|
57
|
+
when values.nil?
|
|
58
58
|
values = [nil] * fields.length + [new_values]
|
|
59
59
|
when Array === values
|
|
60
60
|
values += [new_values]
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
require 'matrix'
|
|
2
2
|
|
|
3
3
|
module TSV
|
|
4
|
-
def reorder(key_field = nil, fields = nil, merge: true, one2one: true, data: nil, unnamed: true, **kwargs)
|
|
4
|
+
def reorder(key_field = nil, fields = nil, merge: true, one2one: true, data: nil, unnamed: true, **kwargs)
|
|
5
5
|
res = data || self.annotate({})
|
|
6
6
|
res.type = kwargs[:type] if kwargs.include?(:type)
|
|
7
7
|
kwargs[:one2one] = one2one
|
|
@@ -61,7 +61,7 @@ module TSV
|
|
|
61
61
|
new = self.annotate({})
|
|
62
62
|
TSV.setup(new, :key_field => key_field, :fields => new_fields, :type => type, :filename => filename, :identifiers => identifiers)
|
|
63
63
|
|
|
64
|
-
m = Matrix.rows values
|
|
64
|
+
m = Matrix.rows values
|
|
65
65
|
new_rows = m.transpose.to_a
|
|
66
66
|
|
|
67
67
|
fields.zip(new_rows) do |key,row|
|
|
@@ -41,7 +41,7 @@ module TSV
|
|
|
41
41
|
if block_given?
|
|
42
42
|
field = method
|
|
43
43
|
field = fields.index?(field) if fields && String === field
|
|
44
|
-
case
|
|
44
|
+
case
|
|
45
45
|
when block.arity == 1
|
|
46
46
|
if (method == key_field or method == :key)
|
|
47
47
|
yield(key)
|
|
@@ -69,7 +69,7 @@ module TSV
|
|
|
69
69
|
new = TSV.setup({}, :key_field => key_field, :fields => fields, :type => type, :filename => filename, :identifiers => identifiers)
|
|
70
70
|
|
|
71
71
|
self.annotate(new)
|
|
72
|
-
|
|
72
|
+
|
|
73
73
|
case
|
|
74
74
|
when (method.nil? and block_given?)
|
|
75
75
|
through do |key, values|
|
|
@@ -101,7 +101,7 @@ module TSV
|
|
|
101
101
|
end
|
|
102
102
|
when ((String === method) || (Symbol === method))
|
|
103
103
|
if block_given?
|
|
104
|
-
case
|
|
104
|
+
case
|
|
105
105
|
when block.arity == 1
|
|
106
106
|
with_unnamed do
|
|
107
107
|
case
|
data/lib/scout/tsv/util/unzip.rb
CHANGED
|
@@ -20,7 +20,7 @@ module TSV
|
|
|
20
20
|
else
|
|
21
21
|
target
|
|
22
22
|
end
|
|
23
|
-
|
|
23
|
+
|
|
24
24
|
target.fields = new_fields
|
|
25
25
|
target.key_field = new_key_field
|
|
26
26
|
target.type = type
|
|
@@ -57,7 +57,7 @@ module TSV
|
|
|
57
57
|
[new_key, new_values]
|
|
58
58
|
end
|
|
59
59
|
end
|
|
60
|
-
|
|
60
|
+
|
|
61
61
|
MultipleResult.setup(res)
|
|
62
62
|
else
|
|
63
63
|
field_value = v[field_pos]
|
data/lib/scout/tsv/util.rb
CHANGED
|
@@ -129,7 +129,7 @@ module TSV
|
|
|
129
129
|
|
|
130
130
|
filename = @filename
|
|
131
131
|
filename = "No filename" if filename.nil? || String === filename && filename.empty?
|
|
132
|
-
filename.find if Path === filename
|
|
132
|
+
filename.find if Path === filename
|
|
133
133
|
filename = File.basename(filename) + " [" + File.basename(persistence_path) + "]" if respond_to?(:persistence_path) and persistence_path
|
|
134
134
|
|
|
135
135
|
with_unnamed do
|
data/lib/scout/tsv.rb
CHANGED
|
@@ -68,7 +68,7 @@ module TSV
|
|
|
68
68
|
end
|
|
69
69
|
|
|
70
70
|
def self.str_setup(option_str, obj)
|
|
71
|
-
options = TSV.str2options(option_str)
|
|
71
|
+
options = TSV.str2options(option_str)
|
|
72
72
|
setup(obj, **options)
|
|
73
73
|
end
|
|
74
74
|
|
|
@@ -87,7 +87,7 @@ module TSV
|
|
|
87
87
|
|
|
88
88
|
file = StringIO.new file if String === file && ! (Path === file) && file.index("\n")
|
|
89
89
|
|
|
90
|
-
source_name, options =
|
|
90
|
+
source_name, options =
|
|
91
91
|
case file
|
|
92
92
|
when StringIO
|
|
93
93
|
[file.inspect, options]
|
|
@@ -41,10 +41,10 @@ class WorkQueue
|
|
|
41
41
|
while obj = input.read
|
|
42
42
|
if DoneProcessing === obj
|
|
43
43
|
output.write DoneProcessing.new
|
|
44
|
-
raise obj
|
|
44
|
+
raise obj
|
|
45
45
|
end
|
|
46
46
|
res = block.call obj
|
|
47
|
-
output.write res unless ignore_ouput || res == :ignore
|
|
47
|
+
output.write res unless ignore_ouput || res == :ignore
|
|
48
48
|
end
|
|
49
49
|
rescue DoneProcessing
|
|
50
50
|
rescue Interrupt
|
|
@@ -64,7 +64,7 @@ class WorkQueue
|
|
|
64
64
|
begin
|
|
65
65
|
Log.medium "Aborting worker #{worker_id}"
|
|
66
66
|
Process.kill SIGNAL, @pid
|
|
67
|
-
rescue Errno::ECHILD
|
|
67
|
+
rescue Errno::ECHILD
|
|
68
68
|
rescue Errno::ESRCH
|
|
69
69
|
end
|
|
70
70
|
end
|
|
@@ -77,7 +77,7 @@ class WorkQueue
|
|
|
77
77
|
def self.join(workers)
|
|
78
78
|
workers = [workers] unless Array === workers
|
|
79
79
|
begin
|
|
80
|
-
while pid = Process.wait
|
|
80
|
+
while pid = Process.wait
|
|
81
81
|
status = $?
|
|
82
82
|
worker = workers.select{|w| w.pid == pid }.first
|
|
83
83
|
end
|
data/lib/scout/work_queue.rb
CHANGED
|
@@ -62,7 +62,7 @@ class WorkQueue
|
|
|
62
62
|
end
|
|
63
63
|
|
|
64
64
|
def process(&callback)
|
|
65
|
-
@workers.each do |w|
|
|
65
|
+
@workers.each do |w|
|
|
66
66
|
w.process @input, @output, &@worker_proc
|
|
67
67
|
end
|
|
68
68
|
|
|
@@ -126,9 +126,9 @@ class WorkQueue
|
|
|
126
126
|
t
|
|
127
127
|
end
|
|
128
128
|
exceptions = []
|
|
129
|
-
threads.each do |t|
|
|
129
|
+
threads.each do |t|
|
|
130
130
|
begin
|
|
131
|
-
t.join
|
|
131
|
+
t.join
|
|
132
132
|
rescue Exception
|
|
133
133
|
exceptions << $!
|
|
134
134
|
end
|
|
@@ -155,10 +155,10 @@ class WorkQueue
|
|
|
155
155
|
@aborted = true
|
|
156
156
|
Log.low "Aborting #{@workers.length} workers in queue #{queue_id}"
|
|
157
157
|
@worker_mutex.synchronize do
|
|
158
|
-
@workers.each do |w|
|
|
158
|
+
@workers.each do |w|
|
|
159
159
|
ScoutSemaphore.post_semaphore(@output.write_sem)
|
|
160
160
|
ScoutSemaphore.post_semaphore(@input.read_sem)
|
|
161
|
-
w.abort
|
|
161
|
+
w.abort
|
|
162
162
|
end
|
|
163
163
|
end
|
|
164
164
|
end
|
|
@@ -123,7 +123,7 @@ module Workflow
|
|
|
123
123
|
block = lambda &self.method(name) if block.nil?
|
|
124
124
|
begin
|
|
125
125
|
@annotate_next_task ||= {}
|
|
126
|
-
@annotate_next_task[:extension] ||=
|
|
126
|
+
@annotate_next_task[:extension] ||=
|
|
127
127
|
case type
|
|
128
128
|
when :tsv
|
|
129
129
|
"tsv"
|
|
@@ -144,24 +144,24 @@ module Workflow
|
|
|
144
144
|
end
|
|
145
145
|
end
|
|
146
146
|
|
|
147
|
-
FORGET_TASK_ALIAS = begin
|
|
147
|
+
FORGET_TASK_ALIAS = begin
|
|
148
148
|
%w(SCOUT_FORGET_TASK_ALIAS SCOUT_FORGET_DEP_TASKS RBBT_FORGET_DEP_TASKS).select do |var|
|
|
149
149
|
ENV[var] == 'true'
|
|
150
150
|
end.any?
|
|
151
151
|
end
|
|
152
|
-
REMOVE_TASK_ALIAS = begin
|
|
152
|
+
REMOVE_TASK_ALIAS = begin
|
|
153
153
|
remove = %w(SCOUT_REMOVE_TASK_ALIAS SCOUT_REMOVE_DEP_TASKS RBBT_REMOVE_DEP_TASKS).select do |var|
|
|
154
154
|
ENV.include?(var) && ENV[var] != 'false'
|
|
155
155
|
end.first
|
|
156
156
|
remove.nil? ? false : remove
|
|
157
157
|
end
|
|
158
158
|
def task_alias(name, workflow, oname, *rest, &block)
|
|
159
|
-
dep(workflow, oname, *rest, &block)
|
|
159
|
+
dep(workflow, oname, *rest, &block)
|
|
160
160
|
extension :dep_task unless @extension
|
|
161
161
|
task_proc = workflow.tasks[oname] if workflow.tasks
|
|
162
162
|
if task_proc
|
|
163
163
|
returns task_proc.returns if @returns.nil?
|
|
164
|
-
type = task_proc.type
|
|
164
|
+
type = task_proc.type
|
|
165
165
|
end
|
|
166
166
|
task name => type do
|
|
167
167
|
raise ScoutException, "dep_task does not have any dependencies" if dependencies.empty?
|
|
@@ -187,25 +187,27 @@ module Workflow
|
|
|
187
187
|
Open.rm_rf self.files_dir if Open.exist? self.files_dir
|
|
188
188
|
Open.link_dir dep.files_dir, self.files_dir if Open.exist?(dep.files_dir)
|
|
189
189
|
|
|
190
|
-
if dep.overriden?
|
|
190
|
+
if dep.overriden? || dep.overrider?
|
|
191
191
|
Open.link dep.path, self.tmp_path
|
|
192
192
|
else
|
|
193
193
|
Open.ln_h dep.path, self.tmp_path
|
|
194
194
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
195
|
+
if Open.exists?(dep.info_file)
|
|
196
|
+
case remove.to_s
|
|
197
|
+
when 'true'
|
|
198
|
+
dep.clean
|
|
199
|
+
when 'recursive'
|
|
200
|
+
(dep.dependencies.to_a + dep.rec_dependencies.to_a).uniq.each do |d|
|
|
201
|
+
next if d.overriden
|
|
202
|
+
d.clean unless Scout::Config.get(:remove_dep, "task:#{d.task_signature}", "task:#{d.task_name}", "workflow:#{d.workflow.name}", :default => true).to_s == 'false'
|
|
203
|
+
end
|
|
204
|
+
dep.clean unless Scout::Config.get(:remove_dep, "task:#{dep.task_signature}", "task:#{dep.task_name}", "workflow:#{dep.workflow.name}", :default => true).to_s == 'false'
|
|
202
205
|
end
|
|
203
|
-
|
|
204
|
-
end
|
|
206
|
+
end
|
|
205
207
|
end
|
|
206
208
|
else
|
|
207
209
|
if Open.exists?(dep.files_dir)
|
|
208
|
-
Open.rm_rf self.files_dir
|
|
210
|
+
Open.rm_rf self.files_dir
|
|
209
211
|
Open.link dep.files_dir, self.files_dir
|
|
210
212
|
end
|
|
211
213
|
if defined?(RemoteStep) && RemoteStep === dep
|
|
@@ -42,86 +42,95 @@ class Workflow::LocalExecutor
|
|
|
42
42
|
@available_resources = IndiferentHash.setup(available_resources)
|
|
43
43
|
@resources_requested = IndiferentHash.setup({})
|
|
44
44
|
@resources_used = IndiferentHash.setup({})
|
|
45
|
+
Log.info "LocalExecutor initiated #{Log.fingerprint available_resources}"
|
|
45
46
|
end
|
|
46
47
|
|
|
47
|
-
def process_batches(batches)
|
|
48
|
+
def process_batches(batches, bar: true)
|
|
48
49
|
retry_jobs = []
|
|
49
50
|
failed_jobs = []
|
|
50
51
|
|
|
51
|
-
|
|
52
|
+
bar = {desc: "Processing batches"} if TrueClass === bar
|
|
53
|
+
bar = {bar: bar} if Log::ProgressBar === bar
|
|
54
|
+
Log::ProgressBar.with_bar batches.length, bar do |bar|
|
|
55
|
+
bar.init if bar
|
|
52
56
|
|
|
53
|
-
|
|
54
|
-
top_level_jobs = candidates.collect{|batch| batch[:top_level] }
|
|
57
|
+
while (missing_batches = batches.reject{|b| Workflow::Orchestrator.done_batch?(b) }).any?
|
|
55
58
|
|
|
56
|
-
|
|
59
|
+
bar.pos batches.select{|b| Workflow::Orchestrator.done_batch?(b) }.length if bar
|
|
57
60
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
if exception
|
|
61
|
-
Log.warn 'Some work failed'
|
|
62
|
-
raise exception
|
|
63
|
-
else
|
|
64
|
-
raise 'Some work failed'
|
|
65
|
-
end
|
|
66
|
-
end
|
|
61
|
+
candidates = Workflow::LocalExecutor.candidates(batches)
|
|
62
|
+
top_level_jobs = candidates.collect{|batch| batch[:top_level] }
|
|
67
63
|
|
|
68
|
-
|
|
69
|
-
begin
|
|
64
|
+
raise NoWork, "No candidates and no running jobs #{Log.fingerprint batches}" if resources_used.empty? && top_level_jobs.empty?
|
|
70
65
|
|
|
71
|
-
|
|
66
|
+
if candidates.reject{|batch| failed_jobs.include? batch[:top_level] }.empty? && resources_used.empty? && top_level_jobs.empty?
|
|
67
|
+
exception = failed_jobs.collect(&:get_exception).compact.first
|
|
68
|
+
if exception
|
|
69
|
+
Log.warn 'Some work failed'
|
|
70
|
+
raise exception
|
|
71
|
+
else
|
|
72
|
+
raise 'Some work failed'
|
|
73
|
+
end
|
|
74
|
+
end
|
|
72
75
|
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
76
|
+
candidates.each do |batch|
|
|
77
|
+
begin
|
|
78
|
+
|
|
79
|
+
job = batch[:top_level]
|
|
80
|
+
|
|
81
|
+
case
|
|
82
|
+
when (job.error? || job.aborted?)
|
|
83
|
+
begin
|
|
84
|
+
if job.recoverable_error?
|
|
85
|
+
if retry_jobs.include?(job)
|
|
86
|
+
Log.warn "Failed twice #{job.path} with recoverable error"
|
|
87
|
+
retry_jobs.delete job
|
|
88
|
+
failed_jobs << job
|
|
89
|
+
next
|
|
90
|
+
else
|
|
91
|
+
retry_jobs << job
|
|
92
|
+
job.clean
|
|
93
|
+
raise TryAgain
|
|
94
|
+
end
|
|
95
|
+
else
|
|
80
96
|
failed_jobs << job
|
|
97
|
+
Log.warn "Non-recoverable error in #{job.path}"
|
|
81
98
|
next
|
|
82
|
-
else
|
|
83
|
-
retry_jobs << job
|
|
84
|
-
job.clean
|
|
85
|
-
raise TryAgain
|
|
86
99
|
end
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
next
|
|
100
|
+
ensure
|
|
101
|
+
Log.warn "Releases resources from failed job: #{job.path}"
|
|
102
|
+
release_resources(job)
|
|
91
103
|
end
|
|
92
|
-
|
|
93
|
-
Log.
|
|
104
|
+
when job.done?
|
|
105
|
+
Log.debug "Orchestrator done #{job.path}"
|
|
94
106
|
release_resources(job)
|
|
107
|
+
clear_batch(batches, batch)
|
|
108
|
+
erase_job_dependencies(job, batches)
|
|
109
|
+
when job.running?
|
|
110
|
+
next
|
|
111
|
+
|
|
112
|
+
else
|
|
113
|
+
check_resources(batch) do
|
|
114
|
+
run_batch(batch)
|
|
115
|
+
end
|
|
95
116
|
end
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
117
|
+
rescue TryAgain
|
|
118
|
+
retry
|
|
119
|
+
end
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
batches.each do |batch|
|
|
123
|
+
job = batch[:top_level]
|
|
124
|
+
if job.done? || job.aborted? || job.error?
|
|
125
|
+
job.join if job.done?
|
|
99
126
|
clear_batch(batches, batch)
|
|
127
|
+
release_resources(job)
|
|
100
128
|
erase_job_dependencies(job, batches)
|
|
101
|
-
when job.running?
|
|
102
|
-
next
|
|
103
|
-
|
|
104
|
-
else
|
|
105
|
-
check_resources(batch) do
|
|
106
|
-
run_batch(batch)
|
|
107
|
-
end
|
|
108
129
|
end
|
|
109
|
-
rescue TryAgain
|
|
110
|
-
retry
|
|
111
130
|
end
|
|
112
|
-
end
|
|
113
131
|
|
|
114
|
-
|
|
115
|
-
job = batch[:top_level]
|
|
116
|
-
if job.done? || job.aborted? || job.error?
|
|
117
|
-
job.join if job.done?
|
|
118
|
-
clear_batch(batches, batch)
|
|
119
|
-
release_resources(job)
|
|
120
|
-
erase_job_dependencies(job, batches)
|
|
121
|
-
end
|
|
132
|
+
sleep timer
|
|
122
133
|
end
|
|
123
|
-
|
|
124
|
-
sleep timer
|
|
125
134
|
end
|
|
126
135
|
|
|
127
136
|
batches.each{|batch|
|
|
@@ -141,9 +150,19 @@ class Workflow::LocalExecutor
|
|
|
141
150
|
|
|
142
151
|
def process(rules, jobs = nil)
|
|
143
152
|
jobs, rules = rules, {} if jobs.nil?
|
|
144
|
-
|
|
153
|
+
|
|
154
|
+
if Step === jobs
|
|
155
|
+
jobs = [jobs]
|
|
156
|
+
end
|
|
145
157
|
|
|
146
158
|
batches = Workflow::Orchestrator.job_batches(rules, jobs)
|
|
159
|
+
|
|
160
|
+
if jobs.length == 1
|
|
161
|
+
bar = jobs.first.progress_bar("Processing batches for #{jobs.first.short_path}", max: batches.length)
|
|
162
|
+
else
|
|
163
|
+
bar = true
|
|
164
|
+
end
|
|
165
|
+
|
|
147
166
|
batches.each do |batch|
|
|
148
167
|
rules = IndiferentHash.setup batch[:rules]
|
|
149
168
|
rules.delete :erase if jobs.include?(batch[:top_level])
|
|
@@ -153,13 +172,13 @@ class Workflow::LocalExecutor
|
|
|
153
172
|
batch[:rules] = rules
|
|
154
173
|
end
|
|
155
174
|
|
|
156
|
-
process_batches(batches)
|
|
175
|
+
process_batches(batches, bar: bar)
|
|
157
176
|
end
|
|
158
177
|
|
|
159
178
|
def release_resources(job)
|
|
160
179
|
if resources_used[job]
|
|
161
180
|
Log.debug "Orchestrator releasing resouces from #{job.path}"
|
|
162
|
-
resources_used[job].each do |resource,value|
|
|
181
|
+
resources_used[job].each do |resource,value|
|
|
163
182
|
next if resource == 'size'
|
|
164
183
|
resources_requested[resource] -= value.to_i
|
|
165
184
|
end
|
|
@@ -195,8 +214,9 @@ class Workflow::LocalExecutor
|
|
|
195
214
|
def run_batch(batch)
|
|
196
215
|
job, job_rules = batch.values_at :top_level, :rules
|
|
197
216
|
|
|
198
|
-
rules = batch[:rules]
|
|
217
|
+
rules = batch[:rules]
|
|
199
218
|
deploy = rules[:deploy] if rules
|
|
219
|
+
Log.debug "Processing #{deploy} #{job.short_path} #{Log.fingerprint job_rules}"
|
|
200
220
|
case deploy
|
|
201
221
|
when nil, 'local', :local, :serial, 'serial'
|
|
202
222
|
Scout::Config.with_config do
|
|
@@ -207,7 +227,7 @@ class Workflow::LocalExecutor
|
|
|
207
227
|
log = job_rules[:log] if job_rules
|
|
208
228
|
log = Log.severity if log.nil?
|
|
209
229
|
Log.with_severity log do
|
|
210
|
-
job.fork
|
|
230
|
+
job.fork(true)
|
|
211
231
|
end
|
|
212
232
|
end
|
|
213
233
|
when 'batch', 'sched', 'slurm', 'pbs', 'lsf'
|
|
@@ -69,6 +69,7 @@ class Workflow::Orchestrator
|
|
|
69
69
|
task_name = job.task_name
|
|
70
70
|
task_rules = task_specific_rules(rules, workflow, task_name)
|
|
71
71
|
acc = accumulate_rules(acc, task_rules.dup)
|
|
72
|
+
acc
|
|
72
73
|
end
|
|
73
74
|
|
|
74
75
|
if chain = batch[:chain]
|
|
@@ -92,12 +93,20 @@ class Workflow::Orchestrator
|
|
|
92
93
|
next if batch[:deps].nil?
|
|
93
94
|
|
|
94
95
|
if batch[:deps].any?
|
|
95
|
-
batch_dep_jobs = batch[:top_level].rec_dependencies
|
|
96
|
+
batch_dep_jobs = batch[:top_level].rec_dependencies.to_a
|
|
96
97
|
target = batch[:deps].select do |target|
|
|
97
|
-
|
|
98
|
-
|
|
98
|
+
target_deps = []
|
|
99
|
+
stack = [target]
|
|
100
|
+
while stack.any?
|
|
101
|
+
c = stack.pop
|
|
102
|
+
target_deps << c
|
|
103
|
+
stack.concat c[:deps]
|
|
104
|
+
end
|
|
105
|
+
(batch[:deps] - target_deps).empty?
|
|
99
106
|
end.first
|
|
100
107
|
next if target.nil?
|
|
108
|
+
all_target_jobs = ([target] + target[:deps]).collect{|d| d[:jobs] }.flatten
|
|
109
|
+
next if all_target_jobs.reject{|j| batch_dep_jobs.include? j }.any?
|
|
101
110
|
target[:jobs] = batch[:jobs] + target[:jobs]
|
|
102
111
|
target[:deps] = (target[:deps] + batch[:deps]).uniq - [target]
|
|
103
112
|
target[:top_level] = batch[:top_level]
|
|
@@ -119,12 +128,64 @@ class Workflow::Orchestrator
|
|
|
119
128
|
jobs = [jobs] unless Array === jobs
|
|
120
129
|
|
|
121
130
|
workload = job_workload(jobs)
|
|
122
|
-
|
|
131
|
+
job_chain_list = []
|
|
123
132
|
|
|
124
|
-
|
|
133
|
+
jobs.each do |job|
|
|
134
|
+
job_chains = self.job_chains(rules, job)
|
|
135
|
+
job_chains.each do |chain,list|
|
|
136
|
+
list.each do |info|
|
|
137
|
+
job_chain_list << [chain,info]
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
batches = chain_batches(rules, job_chain_list, workload)
|
|
125
143
|
batches = add_batch_deps(batches)
|
|
126
144
|
batches = add_rules_and_consolidate(rules, batches)
|
|
127
145
|
|
|
128
146
|
batches
|
|
129
147
|
end
|
|
148
|
+
|
|
149
|
+
def self.sort_batches(batches)
|
|
150
|
+
pending = batches.dup
|
|
151
|
+
sorted = []
|
|
152
|
+
while pending.any?
|
|
153
|
+
leaf_nodes = batches.select{|batch| batch[:deps].nil? || (batch[:deps] - sorted).empty? }
|
|
154
|
+
sorted.concat(leaf_nodes - sorted)
|
|
155
|
+
pending -= leaf_nodes
|
|
156
|
+
end
|
|
157
|
+
sorted
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
def self.errors_in_batch(batch)
|
|
161
|
+
errors = batch[:jobs].select do |job|
|
|
162
|
+
job.error? && ! job.recoverable_error?
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
errors.empty? ? false : errors
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
def self.clean_batches(batches)
|
|
169
|
+
error = []
|
|
170
|
+
batches.collect do |batch|
|
|
171
|
+
if failed = Workflow::Orchestrator.errors_in_batch(batch)
|
|
172
|
+
Log.warn "Batch contains errors #{batch[:top_level].short_path} #{Log.fingerprint failed}"
|
|
173
|
+
error << batch
|
|
174
|
+
next
|
|
175
|
+
elsif (error_deps = error & batch[:deps]).any?
|
|
176
|
+
if error_deps.reject{|b| b[:top_level].canfail? }.any?
|
|
177
|
+
Log.warn "Batch depends on batches with errors #{batch[:top_level].short_path} #{Log.fingerprint(error_deps.collect{|d| d[:top_level] })}"
|
|
178
|
+
error << batch
|
|
179
|
+
next
|
|
180
|
+
else
|
|
181
|
+
batch[:deps] -= error_deps
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
batch
|
|
185
|
+
end.compact
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
def self.inspect_batch(batch)
|
|
189
|
+
batch.merge(deps: batch[:deps].collect{|b| b[:top_level] })
|
|
190
|
+
end
|
|
130
191
|
end
|