scout-gear 10.11.6 → 10.11.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.vimproject +16 -2
- data/VERSION +1 -1
- data/bin/scout +10 -10
- data/lib/scout/association/fields.rb +15 -15
- data/lib/scout/association/index.rb +6 -6
- data/lib/scout/association/item.rb +18 -8
- data/lib/scout/association.rb +4 -4
- data/lib/scout/entity/identifiers.rb +5 -5
- data/lib/scout/entity/property.rb +2 -2
- data/lib/scout/entity.rb +1 -1
- data/lib/scout/knowledge_base/description.rb +10 -10
- data/lib/scout/knowledge_base/entity.rb +6 -6
- data/lib/scout/knowledge_base/list.rb +1 -1
- data/lib/scout/knowledge_base/query.rb +4 -4
- data/lib/scout/knowledge_base/registry.rb +6 -6
- data/lib/scout/knowledge_base/traverse.rb +7 -40
- data/lib/scout/persist/engine/fix_width_table.rb +6 -6
- data/lib/scout/persist/engine/packed_index.rb +2 -2
- data/lib/scout/persist/engine/sharder.rb +4 -4
- data/lib/scout/persist/engine/tkrzw.rb +1 -1
- data/lib/scout/persist/engine/tokyocabinet.rb +2 -2
- data/lib/scout/persist/tsv/adapter/fix_width_table.rb +1 -1
- data/lib/scout/persist/tsv/adapter/packed_index.rb +1 -1
- data/lib/scout/persist/tsv/adapter/tkrzw.rb +1 -1
- data/lib/scout/persist/tsv/adapter/tokyocabinet.rb +3 -3
- data/lib/scout/persist/tsv/serialize.rb +3 -3
- data/lib/scout/persist/tsv.rb +1 -1
- data/lib/scout/semaphore.rb +78 -3
- data/lib/scout/tsv/annotation/repo.rb +4 -4
- data/lib/scout/tsv/annotation.rb +2 -2
- data/lib/scout/tsv/attach.rb +7 -7
- data/lib/scout/tsv/change_id/translate.rb +1 -1
- data/lib/scout/tsv/csv.rb +3 -3
- data/lib/scout/tsv/dumper.rb +8 -8
- data/lib/scout/tsv/index.rb +1 -1
- data/lib/scout/tsv/open.rb +3 -3
- data/lib/scout/tsv/stream.rb +2 -2
- data/lib/scout/tsv/traverse.rb +4 -4
- data/lib/scout/tsv/util/filter.rb +9 -9
- data/lib/scout/tsv/util/process.rb +2 -2
- data/lib/scout/tsv/util/reorder.rb +2 -2
- data/lib/scout/tsv/util/select.rb +3 -3
- data/lib/scout/tsv/util/unzip.rb +2 -2
- data/lib/scout/tsv/util.rb +1 -1
- data/lib/scout/tsv.rb +2 -2
- data/lib/scout/work_queue/socket.rb +2 -2
- data/lib/scout/work_queue/worker.rb +4 -4
- data/lib/scout/work_queue.rb +5 -5
- data/lib/scout/workflow/definition.rb +18 -16
- data/lib/scout/workflow/deployment/local.rb +82 -62
- data/lib/scout/workflow/deployment/orchestrator/batches.rb +66 -5
- data/lib/scout/workflow/deployment/orchestrator/chains.rb +47 -30
- data/lib/scout/workflow/deployment/orchestrator/rules.rb +3 -3
- data/lib/scout/workflow/deployment/orchestrator/workload.rb +11 -22
- data/lib/scout/workflow/deployment/scheduler/job.rb +34 -36
- data/lib/scout/workflow/deployment/scheduler/lfs.rb +1 -1
- data/lib/scout/workflow/deployment/scheduler/pbs.rb +4 -4
- data/lib/scout/workflow/deployment/scheduler/slurm.rb +2 -2
- data/lib/scout/workflow/deployment/scheduler.rb +23 -12
- data/lib/scout/workflow/deployment/trace.rb +2 -2
- data/lib/scout/workflow/documentation.rb +4 -4
- data/lib/scout/workflow/export.rb +1 -1
- data/lib/scout/workflow/path.rb +2 -2
- data/lib/scout/workflow/step/children.rb +1 -1
- data/lib/scout/workflow/step/dependencies.rb +36 -3
- data/lib/scout/workflow/step/info.rb +5 -19
- data/lib/scout/workflow/step/inputs.rb +1 -1
- data/lib/scout/workflow/step/progress.rb +2 -2
- data/lib/scout/workflow/step/provenance.rb +4 -4
- data/lib/scout/workflow/step/status.rb +23 -9
- data/lib/scout/workflow/step.rb +19 -17
- data/lib/scout/workflow/task/dependencies.rb +10 -3
- data/lib/scout/workflow/task/info.rb +3 -3
- data/lib/scout/workflow/task/inputs.rb +14 -8
- data/lib/scout/workflow/task.rb +37 -22
- data/lib/scout/workflow/usage.rb +13 -13
- data/lib/scout/workflow/util.rb +1 -1
- data/lib/scout/workflow.rb +6 -6
- data/scout-gear.gemspec +3 -3
- data/scout_commands/alias +1 -1
- data/scout_commands/batch/clean +12 -12
- data/scout_commands/batch/list +26 -25
- data/scout_commands/batch/tail +9 -5
- data/scout_commands/cat +1 -1
- data/scout_commands/doc +2 -2
- data/scout_commands/entity +4 -4
- data/scout_commands/find +1 -1
- data/scout_commands/kb/config +1 -1
- data/scout_commands/kb/entities +1 -1
- data/scout_commands/kb/list +1 -1
- data/scout_commands/kb/query +2 -2
- data/scout_commands/kb/register +1 -1
- data/scout_commands/kb/show +1 -1
- data/scout_commands/kb/traverse +1 -1
- data/scout_commands/log +6 -6
- data/scout_commands/resource/produce +2 -2
- data/scout_commands/resource/sync +1 -1
- data/scout_commands/system/clean +7 -7
- data/scout_commands/system/status +4 -4
- data/scout_commands/template +1 -1
- data/scout_commands/update +1 -1
- data/scout_commands/workflow/info +1 -1
- data/scout_commands/workflow/install +1 -1
- data/scout_commands/workflow/list +2 -2
- data/scout_commands/workflow/process +2 -2
- data/scout_commands/workflow/prov +3 -3
- data/scout_commands/workflow/task +36 -11
- data/scout_commands/workflow/trace +1 -1
- data/scout_commands/workflow/write_info +2 -2
- data/share/templates/command +1 -1
- data/test/scout/association/test_item.rb +5 -0
- data/test/scout/entity/test_property.rb +3 -3
- data/test/scout/knowledge_base/test_description.rb +1 -1
- data/test/scout/knowledge_base/test_traverse.rb +2 -2
- data/test/scout/persist/engine/test_packed_index.rb +6 -6
- data/test/scout/persist/test_tsv.rb +4 -4
- data/test/scout/persist/tsv/adapter/test_packed_index.rb +4 -4
- data/test/scout/persist/tsv/adapter/test_sharder.rb +23 -23
- data/test/scout/persist/tsv/adapter/test_tokyocabinet.rb +1 -1
- data/test/scout/persist/tsv/test_serialize.rb +1 -1
- data/test/scout/test_association.rb +1 -1
- data/test/scout/test_tsv.rb +2 -2
- data/test/scout/test_workflow.rb +2 -2
- data/test/scout/tsv/test_annotation.rb +4 -4
- data/test/scout/tsv/test_index.rb +1 -1
- data/test/scout/tsv/test_open.rb +2 -2
- data/test/scout/tsv/test_parser.rb +2 -2
- data/test/scout/tsv/test_stream.rb +1 -1
- data/test/scout/tsv/test_transformer.rb +1 -1
- data/test/scout/tsv/util/test_filter.rb +1 -1
- data/test/scout/tsv/util/test_melt.rb +1 -1
- data/test/scout/tsv/util/test_reorder.rb +1 -1
- data/test/scout/work_queue/test_socket.rb +3 -3
- data/test/scout/work_queue/test_worker.rb +2 -2
- data/test/scout/workflow/deployment/orchestrator/test_batches.rb +13 -3
- data/test/scout/workflow/deployment/orchestrator/test_chains.rb +15 -13
- data/test/scout/workflow/deployment/orchestrator/test_workload.rb +1 -1
- data/test/scout/workflow/deployment/test_local.rb +2 -2
- data/test/scout/workflow/deployment/test_scheduler.rb +1 -2
- data/test/scout/workflow/step/test_children.rb +1 -1
- data/test/scout/workflow/step/test_dependencies.rb +36 -1
- data/test/scout/workflow/step/test_info.rb +3 -35
- data/test/scout/workflow/step/test_load.rb +1 -1
- data/test/scout/workflow/step/test_provenance.rb +1 -1
- data/test/scout/workflow/step/test_status.rb +33 -1
- data/test/scout/workflow/task/test_dependencies.rb +9 -7
- data/test/scout/workflow/task/test_inputs.rb +1 -1
- data/test/scout/workflow/test_definition.rb +1 -1
- data/test/scout/workflow/test_documentation.rb +1 -1
- data/test/scout/workflow/test_entity.rb +2 -2
- data/test/scout/workflow/test_step.rb +13 -13
- data/test/scout/workflow/test_usage.rb +1 -1
- data/test/test_helper.rb +1 -1
- metadata +2 -2
|
@@ -45,7 +45,7 @@ class KnowledgeBase
|
|
|
45
45
|
|
|
46
46
|
[source_entities, target_entities]
|
|
47
47
|
end
|
|
48
|
-
|
|
48
|
+
|
|
49
49
|
def reassign(matches, source, target)
|
|
50
50
|
#assignments[source] = (matches.any? ? matches.collect{|m| m.source_entity }.uniq : nil) if is_wildcard? source
|
|
51
51
|
#assignments[target] = (matches.any? ? matches.collect{|m| m.target_entity }.uniq : nil) if is_wildcard? target
|
|
@@ -64,7 +64,7 @@ class KnowledgeBase
|
|
|
64
64
|
assigned = assignments[source] || []
|
|
65
65
|
matches = matches.select{|m| assigned.include? m.partition("~").first }
|
|
66
66
|
end
|
|
67
|
-
|
|
67
|
+
|
|
68
68
|
if is_wildcard? target
|
|
69
69
|
assigned = assignments[target] || []
|
|
70
70
|
matches = matches.select{|m| assigned.include? m.partition("~").last }
|
|
@@ -110,7 +110,7 @@ class KnowledgeBase
|
|
|
110
110
|
|
|
111
111
|
return false if paths.empty?
|
|
112
112
|
|
|
113
|
-
paths
|
|
113
|
+
paths
|
|
114
114
|
end
|
|
115
115
|
|
|
116
116
|
def _ep(paths)
|
|
@@ -172,8 +172,8 @@ class KnowledgeBase
|
|
|
172
172
|
_name, _sep, _kb = db.partition("@")
|
|
173
173
|
case
|
|
174
174
|
when _name[0] == '?'
|
|
175
|
-
dbs = all_dbs.select{|_db|
|
|
176
|
-
n,_s,d=_db.partition("@");
|
|
175
|
+
dbs = all_dbs.select{|_db|
|
|
176
|
+
n,_s,d=_db.partition("@");
|
|
177
177
|
d.nil? or d.empty? or (d == _kb and assignments[_name].include?(n))
|
|
178
178
|
}
|
|
179
179
|
when _kb[0] == '?'
|
|
@@ -247,7 +247,7 @@ class KnowledgeBase
|
|
|
247
247
|
else
|
|
248
248
|
dbs = id_dbs(db)
|
|
249
249
|
names = names.collect{|name| assignments.include?(name) ? assignments[name] : name}.flatten
|
|
250
|
-
ids = names.collect{|name|
|
|
250
|
+
ids = names.collect{|name|
|
|
251
251
|
id = nil
|
|
252
252
|
dbs.each do |db|
|
|
253
253
|
sid, tid = identify db, name, name
|
|
@@ -286,44 +286,11 @@ class KnowledgeBase
|
|
|
286
286
|
[assignments, paths]
|
|
287
287
|
end
|
|
288
288
|
|
|
289
|
-
#def traverse
|
|
290
|
-
# all_matches = []
|
|
291
|
-
|
|
292
|
-
# rules.each do |rule|
|
|
293
|
-
# rule = rule.strip
|
|
294
|
-
# next if rule.empty?
|
|
295
|
-
# source, db, target, conditions = rule.match(/([^\s]+)\s+([^\s]+)\s+([^\s]+)(?:\s+-\s+([^\s]+))?/).captures
|
|
296
|
-
|
|
297
|
-
# source_entities, target_entities = identify db, source, target
|
|
298
|
-
|
|
299
|
-
# matches = kb.subset(db, :source => source_entities, :target => target_entities)
|
|
300
|
-
|
|
301
|
-
# if conditions
|
|
302
|
-
# conditions.split(/\s+/).each do |condition|
|
|
303
|
-
# if condition.index "="
|
|
304
|
-
# key, value = conditions.split("=")
|
|
305
|
-
# matches = matches.select{|m| m.info[key.strip].to_s =~ /\b#{value.strip}\b/}
|
|
306
|
-
# else
|
|
307
|
-
# matches = matches.select{|m| m.info[condition.strip].to_s =~ /\btrue\b/}
|
|
308
|
-
# end
|
|
309
|
-
# end
|
|
310
|
-
# end
|
|
311
|
-
|
|
312
|
-
# reassign matches, source, target
|
|
313
|
-
|
|
314
|
-
# all_matches << matches
|
|
315
|
-
# end
|
|
316
|
-
|
|
317
|
-
# paths = find_paths rules, all_matches, assignments
|
|
318
|
-
|
|
319
|
-
# [assignments, paths]
|
|
320
|
-
#end
|
|
321
|
-
|
|
322
289
|
end
|
|
323
290
|
|
|
324
291
|
def traverse(rules, nopaths=false)
|
|
325
292
|
traverser = KnowledgeBase::Traverser.new self, rules
|
|
326
293
|
traverser.traverse nopaths
|
|
327
294
|
end
|
|
328
|
-
|
|
295
|
+
|
|
329
296
|
end
|
|
@@ -62,7 +62,7 @@ class FixWidthTable
|
|
|
62
62
|
Persist::CONNECTIONS[persistence_path] = self.new(persistence_path, value_size, range, update)
|
|
63
63
|
end
|
|
64
64
|
|
|
65
|
-
Persist::CONNECTIONS[persistence_path]
|
|
65
|
+
Persist::CONNECTIONS[persistence_path]
|
|
66
66
|
end
|
|
67
67
|
|
|
68
68
|
def format(pos, value)
|
|
@@ -209,10 +209,10 @@ class FixWidthTable
|
|
|
209
209
|
values = []
|
|
210
210
|
l_start = idx_pos(idx)
|
|
211
211
|
l_end = idx_pos_end(idx)
|
|
212
|
-
|
|
212
|
+
|
|
213
213
|
if return_idx
|
|
214
214
|
while l_start <= r_end
|
|
215
|
-
values << idx if l_end >= r_start
|
|
215
|
+
values << idx if l_end >= r_start
|
|
216
216
|
idx += 1
|
|
217
217
|
break if idx >= size
|
|
218
218
|
l_start = idx_pos(idx)
|
|
@@ -220,7 +220,7 @@ class FixWidthTable
|
|
|
220
220
|
end
|
|
221
221
|
else
|
|
222
222
|
while l_start <= r_end
|
|
223
|
-
values << idx_value(idx) if l_end >= r_start
|
|
223
|
+
values << idx_value(idx) if l_end >= r_start
|
|
224
224
|
idx += 1
|
|
225
225
|
break if idx >= size
|
|
226
226
|
l_start = idx_pos(idx)
|
|
@@ -254,7 +254,7 @@ class FixWidthTable
|
|
|
254
254
|
values = []
|
|
255
255
|
l_start = idx_pos(idx)
|
|
256
256
|
l_end = idx_pos_end(idx)
|
|
257
|
-
if return_idx
|
|
257
|
+
if return_idx
|
|
258
258
|
while l_start <= r_end
|
|
259
259
|
values << idx
|
|
260
260
|
idx += 1
|
|
@@ -284,7 +284,7 @@ class FixWidthTable
|
|
|
284
284
|
get_point(pos)
|
|
285
285
|
end
|
|
286
286
|
end
|
|
287
|
-
|
|
287
|
+
|
|
288
288
|
def overlaps(pos, value = false)
|
|
289
289
|
return [] if size == 0
|
|
290
290
|
idxs = if @range
|
|
@@ -56,7 +56,7 @@ class PackedIndex
|
|
|
56
56
|
def file
|
|
57
57
|
@persistence_path
|
|
58
58
|
end
|
|
59
|
-
|
|
59
|
+
|
|
60
60
|
def close
|
|
61
61
|
@stream.close
|
|
62
62
|
end
|
|
@@ -82,7 +82,7 @@ class PackedIndex
|
|
|
82
82
|
def get_position(position)
|
|
83
83
|
@stream.seek(position * item_size + offset)
|
|
84
84
|
encoded = @stream.read(item_size)
|
|
85
|
-
return nil if encoded.nil? or encoded == nil_string
|
|
85
|
+
return nil if encoded.nil? or encoded == nil_string
|
|
86
86
|
encoded.unpack mask
|
|
87
87
|
end
|
|
88
88
|
|
|
@@ -10,7 +10,7 @@ class Sharder
|
|
|
10
10
|
@db_type = db_type
|
|
11
11
|
|
|
12
12
|
if write
|
|
13
|
-
@databases = {}
|
|
13
|
+
@databases = {}
|
|
14
14
|
end
|
|
15
15
|
end
|
|
16
16
|
|
|
@@ -210,9 +210,9 @@ class Sharder
|
|
|
210
210
|
end
|
|
211
211
|
|
|
212
212
|
def size
|
|
213
|
-
databases.inject(0){|acc,i|
|
|
214
|
-
shard, db = i;
|
|
215
|
-
acc += db.size
|
|
213
|
+
databases.inject(0){|acc,i|
|
|
214
|
+
shard, db = i;
|
|
215
|
+
acc += db.size
|
|
216
216
|
}
|
|
217
217
|
end
|
|
218
218
|
end
|
|
@@ -5,7 +5,7 @@ module ScoutTKRZW
|
|
|
5
5
|
|
|
6
6
|
def self.open(path, write = true, persistence_class = 'tkh', options = {})
|
|
7
7
|
open_options = IndiferentHash.add_defaults options, truncate: true, num_buckets: 100, dbm: "HashDBM", sync_hard: true, encoding: "UTF-8"
|
|
8
|
-
|
|
8
|
+
|
|
9
9
|
path = path.find if Path === path
|
|
10
10
|
|
|
11
11
|
dir = File.dirname(File.expand_path(path))
|
|
@@ -31,11 +31,11 @@ if continue
|
|
|
31
31
|
database = Log.ignore_stderr do Persist::CONNECTIONS[path] ||= tokyocabinet_class.new end
|
|
32
32
|
|
|
33
33
|
if big and not Open.exists?(path)
|
|
34
|
-
database.tune(nil, nil, nil, tokyocabinet_class::TLARGE | tokyocabinet_class::TDEFLATE)
|
|
34
|
+
database.tune(nil, nil, nil, tokyocabinet_class::TLARGE | tokyocabinet_class::TDEFLATE)
|
|
35
35
|
end
|
|
36
36
|
|
|
37
37
|
flags = (write ? tokyocabinet_class::OWRITER | tokyocabinet_class::OCREAT : tokyocabinet_class::OREADER)
|
|
38
|
-
database.close
|
|
38
|
+
database.close
|
|
39
39
|
|
|
40
40
|
if !database.open(path, flags)
|
|
41
41
|
ecode = database.ecode
|
|
@@ -11,7 +11,7 @@ Persist.save_drivers[:tkh] = proc do |file, content|
|
|
|
11
11
|
data
|
|
12
12
|
end
|
|
13
13
|
|
|
14
|
-
Persist.load_drivers[:tkh] = proc do |file|
|
|
14
|
+
Persist.load_drivers[:tkh] = proc do |file|
|
|
15
15
|
data = ScoutTKRZW.open(file, false, "tkh")
|
|
16
16
|
data.extend TSVAdapter unless TSVAdapter === data
|
|
17
17
|
data
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
require_relative 'base'
|
|
2
2
|
require_relative '../../engine/tokyocabinet'
|
|
3
3
|
|
|
4
|
-
module TKAdapter
|
|
4
|
+
module TKAdapter
|
|
5
5
|
include TSVAdapter
|
|
6
6
|
def self.extended(obj)
|
|
7
7
|
obj.extend TSVAdapter
|
|
@@ -38,7 +38,7 @@ Persist.save_drivers[:HDB] = proc do |file, content|
|
|
|
38
38
|
end
|
|
39
39
|
end
|
|
40
40
|
|
|
41
|
-
Persist.load_drivers[:HDB] = proc do |file|
|
|
41
|
+
Persist.load_drivers[:HDB] = proc do |file|
|
|
42
42
|
data = ScoutCabinet.open(file, false, "HDB")
|
|
43
43
|
data.extend TKAdapter unless TKAdapter === data
|
|
44
44
|
data
|
|
@@ -58,7 +58,7 @@ Persist.save_drivers[:BDB] = proc do |file, content|
|
|
|
58
58
|
end
|
|
59
59
|
end
|
|
60
60
|
|
|
61
|
-
Persist.load_drivers[:BDB] = proc do |file|
|
|
61
|
+
Persist.load_drivers[:BDB] = proc do |file|
|
|
62
62
|
data = ScoutCabinet.open(file, false, "BDB")
|
|
63
63
|
data.extend TKAdapter unless TKAdapter === data
|
|
64
64
|
data
|
|
@@ -10,7 +10,7 @@ module TSVAdapter
|
|
|
10
10
|
def self.dump(o); [o].pack('m'); end
|
|
11
11
|
def self.load(str); str.unpack('m').first; end
|
|
12
12
|
end
|
|
13
|
-
|
|
13
|
+
|
|
14
14
|
class IntegerSerializer
|
|
15
15
|
def self.dump(i); [i].pack("l"); end
|
|
16
16
|
def self.load(str); str.unpack("l").first; end
|
|
@@ -101,8 +101,8 @@ module TSVAdapter
|
|
|
101
101
|
:flat => StringArraySerializer,
|
|
102
102
|
:double => StringDoubleArraySerializer,
|
|
103
103
|
:clean => CleanSerializer,
|
|
104
|
-
:integer => IntegerSerializer,
|
|
105
|
-
:float => FloatSerializer,
|
|
104
|
+
:integer => IntegerSerializer,
|
|
105
|
+
:float => FloatSerializer,
|
|
106
106
|
:integer_array => IntegerArraySerializer,
|
|
107
107
|
:float_array => FloatArraySerializer,
|
|
108
108
|
:strict_integer_array => StrictIntegerArraySerializer,
|
data/lib/scout/persist/tsv.rb
CHANGED
data/lib/scout/semaphore.rb
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
begin
|
|
2
2
|
require 'inline'
|
|
3
|
+
require 'fileutils'
|
|
3
4
|
continue = true
|
|
4
5
|
rescue Exception
|
|
5
6
|
Log.warn "The RubyInline gem could not be loaded: semaphore synchronization will not work"
|
|
@@ -159,9 +160,55 @@ if continue
|
|
|
159
160
|
end
|
|
160
161
|
end
|
|
161
162
|
|
|
163
|
+
# Try to create the semaphore while holding a per-semaphore lock to avoid races
|
|
164
|
+
def self.ensure_or_create(name, size = 1)
|
|
165
|
+
# Normalize and make a safe lock path under Scout.tmp.semaphore_locks
|
|
166
|
+
lock_dir = if defined?(Scout) && Scout.respond_to?(:tmp) && Scout.tmp.respond_to?(:semaphore_locks)
|
|
167
|
+
Scout.tmp.semaphore_locks
|
|
168
|
+
else
|
|
169
|
+
File.join('/tmp', 'scout', 'semaphore_locks')
|
|
170
|
+
end
|
|
171
|
+
|
|
172
|
+
FileUtils.mkdir_p(lock_dir) unless File.exist?(lock_dir)
|
|
173
|
+
|
|
174
|
+
lock_base = File.join(lock_dir, name.gsub(%r{^/+}, '').gsub('/', '_'))
|
|
175
|
+
|
|
176
|
+
begin
|
|
177
|
+
Open.lock(lock_base) do |_lf|
|
|
178
|
+
# If someone else created it while waiting for the lock, we're done
|
|
179
|
+
return true if self.exists?(name)
|
|
180
|
+
|
|
181
|
+
Log.info "Semaphore #{name} missing; creating under lock #{lock_base}"
|
|
182
|
+
begin
|
|
183
|
+
# call the lower-level C create and let create_semaphore perform checks/retries
|
|
184
|
+
ret = ScoutSemaphore.create_semaphore_c(name, size)
|
|
185
|
+
if ret < 0
|
|
186
|
+
Log.warn "create_semaphore_c failed for #{name}: errno=#{-ret}"
|
|
187
|
+
return false
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
# best-effort: ensure the file shows up
|
|
191
|
+
unless self.exists?(name)
|
|
192
|
+
Log.warn "Semaphore #{name} created but /dev/shm entry not visible"
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
Log.info "Semaphore #{name} created"
|
|
196
|
+
return true
|
|
197
|
+
rescue Exception => e
|
|
198
|
+
Log.warn "Exception while creating semaphore #{name}: #{e.message}"
|
|
199
|
+
return false
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
rescue Exception => e
|
|
203
|
+
Log.warn "Failed to acquire creation lock for #{name}: #{e.message}"
|
|
204
|
+
return false
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
|
|
162
208
|
# Safe wrappers that raise SystemCallError on final failure
|
|
163
209
|
def self.create_semaphore(name, value, **opts)
|
|
164
210
|
ret = with_retry(**opts) { ScoutSemaphore.create_semaphore_c(name, value) }
|
|
211
|
+
# After creation attempt, make sure the /dev/shm entry exists (cluster may remove entries)
|
|
165
212
|
raise SystemCallError.new("Semaphore missing (#{name})") unless self.exists?(name)
|
|
166
213
|
if ret < 0
|
|
167
214
|
raise SystemCallError.new("create_semaphore(#{name}) failed", -ret)
|
|
@@ -172,14 +219,28 @@ if continue
|
|
|
172
219
|
def self.delete_semaphore(name, **opts)
|
|
173
220
|
ret = with_retry(**opts) { ScoutSemaphore.delete_semaphore_c(name) }
|
|
174
221
|
if ret < 0
|
|
175
|
-
|
|
222
|
+
Log.warn("delete_semaphore(#{name}) failed")
|
|
176
223
|
end
|
|
177
224
|
ret
|
|
178
225
|
end
|
|
179
226
|
|
|
180
227
|
def self.wait_semaphore(name, **opts)
|
|
181
|
-
|
|
228
|
+
# Try a normal wait first
|
|
182
229
|
ret = with_retry(**opts) { ScoutSemaphore.wait_semaphore_c(name) }
|
|
230
|
+
|
|
231
|
+
if ret < 0
|
|
232
|
+
err = -ret
|
|
233
|
+
# If semaphore missing or removed, try to recreate it under a lock and retry once
|
|
234
|
+
if err == Errno::ENOENT.new.errno || err == Errno::EIDRM.new.errno
|
|
235
|
+
Log.warn "wait_semaphore: semaphore #{name} appears missing (errno=#{err}); attempting recreate"
|
|
236
|
+
created = ensure_or_create(name, opts.fetch(:create_size, 1))
|
|
237
|
+
if created
|
|
238
|
+
# retry the wait after creating
|
|
239
|
+
ret = with_retry(**opts) { ScoutSemaphore.wait_semaphore_c(name) }
|
|
240
|
+
end
|
|
241
|
+
end
|
|
242
|
+
end
|
|
243
|
+
|
|
183
244
|
if ret < 0
|
|
184
245
|
err = -ret
|
|
185
246
|
if err == Errno::EINTR.new.errno
|
|
@@ -188,12 +249,26 @@ if continue
|
|
|
188
249
|
raise SystemCallError.new("wait_semaphore(#{name}) failed", err)
|
|
189
250
|
end
|
|
190
251
|
end
|
|
252
|
+
|
|
191
253
|
ret
|
|
192
254
|
end
|
|
193
255
|
|
|
194
256
|
def self.post_semaphore(name, **opts)
|
|
195
|
-
|
|
257
|
+
# Try normal post first
|
|
196
258
|
ret = with_retry(**opts) { ScoutSemaphore.post_semaphore_c(name) }
|
|
259
|
+
|
|
260
|
+
if ret < 0
|
|
261
|
+
err = -ret
|
|
262
|
+
# If semaphore missing or removed, try to recreate it under a lock and then post
|
|
263
|
+
if err == Errno::ENOENT.new.errno || err == Errno::EIDRM.new.errno
|
|
264
|
+
Log.warn "post_semaphore: semaphore #{name} appears missing (errno=#{err}); attempting recreate"
|
|
265
|
+
created = ensure_or_create(name, opts.fetch(:create_size, 1))
|
|
266
|
+
if created
|
|
267
|
+
ret = with_retry(**opts) { ScoutSemaphore.post_semaphore_c(name) }
|
|
268
|
+
end
|
|
269
|
+
end
|
|
270
|
+
end
|
|
271
|
+
|
|
197
272
|
if ret < 0
|
|
198
273
|
raise SystemCallError.new("post_semaphore(#{name}) failed", -ret)
|
|
199
274
|
end
|
|
@@ -56,25 +56,25 @@ module Persist
|
|
|
56
56
|
else
|
|
57
57
|
annotations = yield
|
|
58
58
|
|
|
59
|
-
repo.write_and_close do
|
|
59
|
+
repo.write_and_close do
|
|
60
60
|
case
|
|
61
61
|
when annotations.nil?
|
|
62
62
|
repo[subkey + "NIL"] = nil
|
|
63
63
|
when annotations.empty?
|
|
64
64
|
repo[subkey + "EMPTY"] = nil
|
|
65
65
|
when (not Array === annotations or (AnnotatedArray === annotations and not Array === annotations.first))
|
|
66
|
-
tsv_values = Annotation.obj_tsv_values(annotations, repo_fields)
|
|
66
|
+
tsv_values = Annotation.obj_tsv_values(annotations, repo_fields)
|
|
67
67
|
repo[subkey + annotations.id << ":" << "SINGLE"] = tsv_values
|
|
68
68
|
when (not Array === annotations or (AnnotatedArray === annotations and AnnotatedArray === annotations.first))
|
|
69
69
|
annotations.each_with_index do |e,i|
|
|
70
70
|
next if e.nil?
|
|
71
|
-
tsv_values = Annotation.obj_tsv_values(e, repo_fields)
|
|
71
|
+
tsv_values = Annotation.obj_tsv_values(e, repo_fields)
|
|
72
72
|
repo[subkey + "ANNOTATED_DOUBLE_ARRAY:" << i.to_s] = tsv_values
|
|
73
73
|
end
|
|
74
74
|
else
|
|
75
75
|
annotations.each_with_index do |e,i|
|
|
76
76
|
next if e.nil?
|
|
77
|
-
tsv_values = Annotation.obj_tsv_values(e, repo_fields)
|
|
77
|
+
tsv_values = Annotation.obj_tsv_values(e, repo_fields)
|
|
78
78
|
repo[subkey + i.to_s] = tsv_values
|
|
79
79
|
end
|
|
80
80
|
end
|
data/lib/scout/tsv/annotation.rb
CHANGED
|
@@ -41,7 +41,7 @@ module Annotation
|
|
|
41
41
|
def self.list_tsv_values(objs, fields)
|
|
42
42
|
obj_tsv_values(objs, fields)
|
|
43
43
|
end
|
|
44
|
-
|
|
44
|
+
|
|
45
45
|
|
|
46
46
|
def self.tsv(objs, *fields)
|
|
47
47
|
return nil if objs.nil?
|
|
@@ -76,7 +76,7 @@ module Annotation
|
|
|
76
76
|
tsv.key_field = "List"
|
|
77
77
|
|
|
78
78
|
tsv[objs.id] = self.list_tsv_values(objs, fields).dup
|
|
79
|
-
when Array === objs
|
|
79
|
+
when Array === objs
|
|
80
80
|
tsv.key_field = "ID"
|
|
81
81
|
|
|
82
82
|
if Annotation.is_annotated?(objs.compact.first)
|
data/lib/scout/tsv/attach.rb
CHANGED
|
@@ -28,7 +28,7 @@ module TSV
|
|
|
28
28
|
end
|
|
29
29
|
end
|
|
30
30
|
|
|
31
|
-
match_key = source.key_field if match_key.nil?
|
|
31
|
+
match_key = source.key_field if match_key.nil?
|
|
32
32
|
|
|
33
33
|
if other_key.nil?
|
|
34
34
|
other_key = other.identify_field(match_key)
|
|
@@ -87,13 +87,13 @@ module TSV
|
|
|
87
87
|
index = TSV.translation_index(identifier_files.flatten, match_key_name, other_key_name)
|
|
88
88
|
end
|
|
89
89
|
|
|
90
|
-
if other_key != :key
|
|
90
|
+
if other_key != :key
|
|
91
91
|
other = other.reorder other_key, fields, one2one: one2one, merge: true, type: :double
|
|
92
92
|
end
|
|
93
93
|
|
|
94
|
-
other_field_positions = other.identify_field(fields.dup)
|
|
94
|
+
other_field_positions = other.identify_field(fields.dup)
|
|
95
95
|
fields.zip(other_field_positions) do |o,n|
|
|
96
|
-
raise "Field #{o} not found. Options: #{Log.fingerprint other.fields}" if n.nil?
|
|
96
|
+
raise "Field #{o} not found. Options: #{Log.fingerprint other.fields}" if n.nil?
|
|
97
97
|
end
|
|
98
98
|
|
|
99
99
|
log_message = "Attach #{Log.fingerprint fields - source.fields} to #{Log.fingerprint source} (#{[match_key, other_key] * "=~"})"
|
|
@@ -139,7 +139,7 @@ module TSV
|
|
|
139
139
|
|
|
140
140
|
if other_values.nil?
|
|
141
141
|
other_values = empty_other_values
|
|
142
|
-
elsif other.type == :flat
|
|
142
|
+
elsif other.type == :flat
|
|
143
143
|
other_values = [other_values]
|
|
144
144
|
elsif other.type == :list && source.type == :double
|
|
145
145
|
other_values = other_values.collect{|v| [v] }
|
|
@@ -185,9 +185,9 @@ module TSV
|
|
|
185
185
|
end
|
|
186
186
|
other.each do |other_key,other_values|
|
|
187
187
|
next if source.include?(other_key)
|
|
188
|
-
if other.type == :flat
|
|
188
|
+
if other.type == :flat
|
|
189
189
|
other_values = [other_values]
|
|
190
|
-
elsif other.type == :single
|
|
190
|
+
elsif other.type == :single
|
|
191
191
|
other_values = [other_values]
|
|
192
192
|
elsif other.type == :list && type == :double
|
|
193
193
|
other_values = other_values.collect{|v| [v] }
|
|
@@ -87,7 +87,7 @@ module TSV
|
|
|
87
87
|
pos = NamedArray.identify_name(TSV.all_fields(file1), TSV.all_fields(file2))
|
|
88
88
|
TSV.all_fields(file1)[pos.compact.first]
|
|
89
89
|
end
|
|
90
|
-
Persist.persist(name, "HDB", persist_options) do
|
|
90
|
+
Persist.persist(name, "HDB", persist_options) do
|
|
91
91
|
index = path.inject(nil) do |acc,file|
|
|
92
92
|
if acc.nil?
|
|
93
93
|
if source.nil?
|
data/lib/scout/tsv/csv.rb
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
require 'csv'
|
|
2
2
|
|
|
3
3
|
module TSV
|
|
4
|
-
def self.csv(obj, options = {})
|
|
4
|
+
def self.csv(obj, options = {})
|
|
5
5
|
options = IndiferentHash.add_defaults options, :headers => true, :type => :list
|
|
6
6
|
headers = options[:headers]
|
|
7
7
|
|
|
@@ -12,7 +12,7 @@ module TSV
|
|
|
12
12
|
merge = options.delete :merge
|
|
13
13
|
key_field = options.delete :key_field
|
|
14
14
|
fields = options.delete :fields
|
|
15
|
-
|
|
15
|
+
|
|
16
16
|
if key_field || fields
|
|
17
17
|
orig_type = type
|
|
18
18
|
type = :double
|
|
@@ -49,7 +49,7 @@ module TSV
|
|
|
49
49
|
else
|
|
50
50
|
key, *values = row
|
|
51
51
|
end
|
|
52
|
-
|
|
52
|
+
|
|
53
53
|
if cast
|
|
54
54
|
values = values.collect{|v| v.send cast }
|
|
55
55
|
end
|
data/lib/scout/tsv/dumper.rb
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
module TSV
|
|
2
2
|
class Dumper
|
|
3
3
|
def self.header(options={})
|
|
4
|
-
key_field, fields, sep, header_hash, preamble, unnamed = IndiferentHash.process_options options,
|
|
4
|
+
key_field, fields, sep, header_hash, preamble, unnamed = IndiferentHash.process_options options,
|
|
5
5
|
:key_field, :fields, :sep, :header_hash, :preamble, :unnamed,
|
|
6
6
|
:sep => "\t", :header_hash => "#", :preamble => true
|
|
7
7
|
|
|
@@ -29,8 +29,8 @@ module TSV
|
|
|
29
29
|
attr_accessor :options, :initialized, :type, :sep, :compact, :filename, :namespace
|
|
30
30
|
def initialize(options = {})
|
|
31
31
|
options = options.options.merge(sep: nil) if TSV::Parser === options || TSV === options
|
|
32
|
-
@sep, @type = IndiferentHash.process_options options,
|
|
33
|
-
:sep, :type,
|
|
32
|
+
@sep, @type = IndiferentHash.process_options options,
|
|
33
|
+
:sep, :type,
|
|
34
34
|
:sep => "\t", :type => :double
|
|
35
35
|
@compact = options[:compact]
|
|
36
36
|
@options = options
|
|
@@ -54,7 +54,7 @@ module TSV
|
|
|
54
54
|
def key_field
|
|
55
55
|
@options[:key_field]
|
|
56
56
|
end
|
|
57
|
-
|
|
57
|
+
|
|
58
58
|
def fields
|
|
59
59
|
@options[:fields]
|
|
60
60
|
end
|
|
@@ -62,7 +62,7 @@ module TSV
|
|
|
62
62
|
def key_field=(key_field)
|
|
63
63
|
@options[:key_field] = key_field
|
|
64
64
|
end
|
|
65
|
-
|
|
65
|
+
|
|
66
66
|
def fields=(fields)
|
|
67
67
|
@options[:fields] = fields
|
|
68
68
|
end
|
|
@@ -100,7 +100,7 @@ module TSV
|
|
|
100
100
|
when :double
|
|
101
101
|
@sin << key + @sep + value.collect{|v| Array === v ? (@compact ? v.compact : v) * "|" : v } * @sep << "\n"
|
|
102
102
|
else
|
|
103
|
-
if Array === value
|
|
103
|
+
if Array === value
|
|
104
104
|
if Array === value.first
|
|
105
105
|
@sin << key + @sep + value.collect{|v| Array === v ? (@compact ? v.compact : v) * "|" : v } * @sep << "\n"
|
|
106
106
|
else
|
|
@@ -147,7 +147,7 @@ module TSV
|
|
|
147
147
|
end
|
|
148
148
|
|
|
149
149
|
def dumper_stream(options = {})
|
|
150
|
-
preamble, unmerge, keys, stream = IndiferentHash.process_options options,
|
|
150
|
+
preamble, unmerge, keys, stream = IndiferentHash.process_options options,
|
|
151
151
|
:preamble, :unmerge, :keys, :stream,
|
|
152
152
|
:preamble => true, :unmerge => false
|
|
153
153
|
unmerge = false unless @type === :double
|
|
@@ -177,7 +177,7 @@ module TSV
|
|
|
177
177
|
|
|
178
178
|
self.with_unnamed do
|
|
179
179
|
if stream.nil?
|
|
180
|
-
t = Thread.new do
|
|
180
|
+
t = Thread.new do
|
|
181
181
|
begin
|
|
182
182
|
Thread.current.report_on_exception = true
|
|
183
183
|
Thread.current["name"] = "Dumper thread"
|
data/lib/scout/tsv/index.rb
CHANGED
data/lib/scout/tsv/open.rb
CHANGED
|
@@ -62,7 +62,7 @@ module Open
|
|
|
62
62
|
|
|
63
63
|
if into.respond_to?(:close)
|
|
64
64
|
obj = obj.find if Path === obj
|
|
65
|
-
into_thread = Thread.new do
|
|
65
|
+
into_thread = Thread.new do
|
|
66
66
|
Thread.current.report_on_exception = false
|
|
67
67
|
Thread.current["name"] = "Traverse into"
|
|
68
68
|
error = false
|
|
@@ -96,7 +96,7 @@ module Open
|
|
|
96
96
|
queue.process do |res|
|
|
97
97
|
callback.call res if callback
|
|
98
98
|
end
|
|
99
|
-
|
|
99
|
+
|
|
100
100
|
begin
|
|
101
101
|
self.traverse(obj, **options) do |*args|
|
|
102
102
|
queue.write args
|
|
@@ -210,7 +210,7 @@ module TSV
|
|
|
210
210
|
|
|
211
211
|
def self.process_stream(stream, header_hash: "#", &block)
|
|
212
212
|
sout = Open.open_pipe do |sin|
|
|
213
|
-
while line = stream.gets
|
|
213
|
+
while line = stream.gets
|
|
214
214
|
break unless line.start_with?(header_hash)
|
|
215
215
|
sin.puts line
|
|
216
216
|
end
|
data/lib/scout/tsv/stream.rb
CHANGED
|
@@ -77,7 +77,7 @@ module TSV
|
|
|
77
77
|
fields = fields.compact.flatten
|
|
78
78
|
end
|
|
79
79
|
|
|
80
|
-
options = input_options.first
|
|
80
|
+
options = input_options.first
|
|
81
81
|
type ||= options[:type]
|
|
82
82
|
type ||= :list if type == :single
|
|
83
83
|
type ||= :double if type == :flat
|
|
@@ -152,7 +152,7 @@ module TSV
|
|
|
152
152
|
|
|
153
153
|
if k == keys[i]
|
|
154
154
|
new_parts = NamedArray.zip_fields([new_parts]).zip(p).collect{|p| [p.flatten * "|"] }
|
|
155
|
-
raise TryAgain
|
|
155
|
+
raise TryAgain
|
|
156
156
|
end
|
|
157
157
|
keys[i]= k
|
|
158
158
|
parts[i]= p
|