eco-helpers 2.6.0 → 2.6.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +72 -4
- data/README.md +5 -0
- data/eco-helpers.gemspec +1 -1
- data/lib/eco/api/common/class_helpers.rb +1 -1
- data/lib/eco/api/common/loaders/case_base.rb +0 -2
- data/lib/eco/api/common/loaders/config/workflow/mailer.rb +78 -0
- data/lib/eco/api/common/loaders/config/workflow.rb +11 -0
- data/lib/eco/api/common/loaders/config.rb +29 -0
- data/lib/eco/api/common/loaders/error_handler.rb +0 -2
- data/lib/eco/api/common/loaders/parser.rb +0 -1
- data/lib/eco/api/common/loaders/policy.rb +0 -2
- data/lib/eco/api/common/loaders.rb +1 -0
- data/lib/eco/api/common/session/mailer.rb +3 -1
- data/lib/eco/api/common/version_patches/exception.rb +2 -2
- data/lib/eco/api/common/version_patches/ruby3/object.rb +18 -0
- data/lib/eco/api/common/version_patches/ruby3.rb +1 -0
- data/lib/eco/api/common/version_patches.rb +3 -0
- data/lib/eco/api/custom/config.rb +10 -0
- data/lib/eco/api/custom/mailer.rb +9 -0
- data/lib/eco/api/custom/namespace.rb +2 -0
- data/lib/eco/api/custom/workflow.rb +9 -0
- data/lib/eco/api/custom.rb +3 -0
- data/lib/eco/api/session/batch/base_policy.rb +13 -5
- data/lib/eco/api/session/batch/job.rb +10 -7
- data/lib/eco/api/session/config/workflow.rb +94 -58
- data/lib/eco/api/session/config.rb +2 -2
- data/lib/eco/api/usecases/base_io.rb +50 -4
- data/lib/eco/api/usecases/cli/dsl.rb +23 -13
- data/lib/eco/api/usecases/default/locations/cli/tagtree_extract_cli.rb +5 -0
- data/lib/eco/api/usecases/default/locations/tagtree_extract_case.rb +12 -4
- data/lib/eco/api/usecases/graphql/helpers/location/base.rb +1 -2
- data/lib/eco/api/usecases/ooze_samples/register_update_case.rb +3 -3
- data/lib/eco/api/usecases/use_case.rb +12 -2
- data/lib/eco/assets.rb +2 -2
- data/lib/eco/cli_default/workflow.rb +102 -120
- data/lib/eco/data/locations/node_base/tag_validations.rb +19 -9
- data/lib/eco/data/locations/node_base/treeify.rb +193 -18
- data/lib/eco/data/locations/node_level.rb +1 -1
- data/lib/eco/data/locations/node_plain/parsing.rb +1 -1
- data/lib/eco/data/locations/node_plain/serial.rb +1 -1
- data/lib/eco/data/locations/node_plain.rb +4 -3
- data/lib/eco/language/klass/when_inherited.rb +17 -0
- data/lib/eco/language/klass.rb +8 -0
- data/lib/eco/language/methods/delegate_missing.rb +28 -0
- data/lib/eco/language/methods/dsl_able.rb +25 -0
- data/lib/eco/language/methods.rb +9 -0
- data/lib/eco/language.rb +2 -0
- data/lib/eco/version.rb +1 -1
- metadata +16 -3
@@ -1,189 +1,171 @@
|
|
1
|
-
ASSETS.cli
|
1
|
+
ASSETS.cli do |cli|
|
2
2
|
ASSETS.config.workflow do |wf|
|
3
|
-
io = nil
|
4
|
-
rescued = false
|
5
|
-
cases_with_input = nil
|
6
|
-
cases_with_output = nil
|
7
3
|
|
4
|
+
rescued = false
|
8
5
|
# default rescue
|
9
|
-
wf.rescue do |
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
rescue Exception => e
|
17
|
-
puts "Some problem in workflow.rescue: #{e}"
|
18
|
-
end
|
19
|
-
io
|
6
|
+
wf.rescue do |err, io|
|
7
|
+
next io if rescued
|
8
|
+
rescued = true
|
9
|
+
log(:debug) { err.patch_full_message }
|
10
|
+
wf.run(:close, io: io)
|
11
|
+
rescue StandardError => e
|
12
|
+
puts "Some problem in workflow.rescue: #{e}"
|
20
13
|
end
|
21
14
|
|
22
|
-
wf.on(:options) do |
|
23
|
-
config.usecases.cli_apply(io: io)
|
24
|
-
io
|
15
|
+
wf.on(:options) do |_wf_opt, io|
|
16
|
+
cli.config.usecases.cli_apply(io: io)
|
17
|
+
io.new(options: cli.config.options_set.process(io: io))
|
25
18
|
end
|
26
19
|
|
27
20
|
wf.for(:load) do |wf_load|
|
28
|
-
wf_load.for(:input) do |
|
29
|
-
|
30
|
-
cases_with_input =
|
21
|
+
wf_load.for(:input) do |wf_in|
|
22
|
+
wf_in.on(:get) do |_wf_ig, io|
|
23
|
+
cases_with_input = cli.config.usecases.active(io: io).select do |usecase, data|
|
31
24
|
io.class.input_required?(usecase.type)
|
32
25
|
end
|
33
26
|
|
34
|
-
input_is_required = !cases_with_input.empty? ||
|
35
|
-
missing_input = !
|
36
|
-
next
|
27
|
+
input_is_required = !cases_with_input.empty? || options.dig(:input, :entries_from)
|
28
|
+
missing_input = !input || input.empty?
|
29
|
+
next unless missing_input && input_is_required
|
37
30
|
|
38
|
-
if
|
39
|
-
io
|
31
|
+
if options.dig(:input, :entries_from)
|
32
|
+
io.new(input: cli.config.input.get(io: io))
|
40
33
|
else
|
41
34
|
opt_case = cases_with_input.values.first.option
|
42
|
-
io
|
35
|
+
io.new(input: cli.config.input.get(io: io, option: opt_case))
|
43
36
|
end
|
44
|
-
io
|
45
37
|
end
|
46
38
|
|
47
|
-
|
48
|
-
next
|
49
|
-
io
|
39
|
+
wf_in.on(:filter) do |_wf_if, io|
|
40
|
+
next unless input && !input.empty?
|
41
|
+
io.new(input: cli.config.input_filters.process(io: io))
|
50
42
|
end
|
51
43
|
end
|
52
44
|
|
53
|
-
wf_load.for(:people) do |
|
54
|
-
|
55
|
-
cases_with_people = config.usecases.active(io: io).select do |usecase, data|
|
45
|
+
wf_load.for(:people) do |wf_peo|
|
46
|
+
wf_peo.on(:get) do |_wf_pg, io|
|
47
|
+
cases_with_people = cli.config.usecases.active(io: io).select do |usecase, data|
|
56
48
|
io.class.people_required?(usecase.type)
|
57
49
|
end
|
58
|
-
next
|
59
|
-
io
|
50
|
+
next if cases_with_people.empty? && !options.dig(:people, :get)
|
51
|
+
io.new(people: cli.config.people(io: io))
|
60
52
|
end
|
61
53
|
|
62
|
-
|
63
|
-
next
|
64
|
-
io
|
54
|
+
wf_peo.on(:filter) do |_wf_pf, io|
|
55
|
+
next unless people && !people.empty?
|
56
|
+
io.new(people: cli.config.people_filters.process(io: io))
|
65
57
|
end
|
66
58
|
end
|
67
59
|
end
|
68
60
|
|
69
|
-
wf.before(:usecases) do |
|
61
|
+
wf.before(:usecases) do |_wf_ca, io|
|
70
62
|
# save partial entries -> should be native to session.workflow
|
71
|
-
get_people =
|
63
|
+
get_people = options.dig(:people, :get)
|
72
64
|
partial_update = get_people && get_people.dig(:type) == :partial
|
73
|
-
if !
|
74
|
-
partial_file =
|
75
|
-
|
65
|
+
if !options[:dry_run] && partial_update
|
66
|
+
partial_file = session.config.people.partial_cache
|
67
|
+
session.file_manager.save_json(io.people, partial_file, :timestamp)
|
76
68
|
end
|
77
|
-
io
|
78
69
|
end
|
79
70
|
|
80
|
-
wf.on(:usecases) do |
|
81
|
-
unless config.usecases.process(io: io)
|
82
|
-
|
83
|
-
|
84
|
-
exit(0)
|
71
|
+
wf.on(:usecases) do |_wf_ca, io|
|
72
|
+
unless cli.config.usecases.process(io: io)
|
73
|
+
log(:info) { "No update operation specified... quitting" }
|
74
|
+
exit 0
|
85
75
|
end
|
86
|
-
io
|
87
76
|
end
|
88
77
|
|
89
|
-
wf.before(:launch_jobs) do
|
78
|
+
wf.before(:launch_jobs) do
|
90
79
|
SCR.stop_on_unknown!
|
91
|
-
io
|
92
80
|
end
|
93
81
|
|
94
|
-
wf.on(:launch_jobs) do
|
95
|
-
|
96
|
-
io
|
82
|
+
wf.on(:launch_jobs) do
|
83
|
+
session.jobs_launch(simulate: options[:dry_run])
|
97
84
|
end
|
98
85
|
|
99
86
|
wf.before(:post_launch) do |wf_post, io|
|
100
|
-
if
|
87
|
+
next wf_post.skip! if session.post_launch.empty?
|
88
|
+
|
89
|
+
run_it = !options[:dry_run] || options.dig(:post_launch, :run)
|
90
|
+
unless run_it
|
101
91
|
wf_post.skip!
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
msg += ", because it is not a partial update (-get-partial option not present)."
|
118
|
-
end
|
119
|
-
io.session.logger.info(msg)
|
120
|
-
end
|
121
|
-
else
|
122
|
-
wf_post.skip!
|
123
|
-
msg = "Although there are post_launch cases, they will NOT be RUN"
|
92
|
+
log(:info) {
|
93
|
+
msg = "Although there are post_launch cases, they will NOT be RUN"
|
94
|
+
msg += ", because we are in dry-run (simulate)." if options[:dry_run]
|
95
|
+
msg
|
96
|
+
}
|
97
|
+
next
|
98
|
+
end
|
99
|
+
|
100
|
+
get_people = options.dig(:people, :get)
|
101
|
+
partial_update = get_people && get_people.dig(:type) == :partial
|
102
|
+
refresh_data = !options[:dry_run] && partial_update
|
103
|
+
|
104
|
+
unless refresh_data
|
105
|
+
log(:info) {
|
106
|
+
msg = "Although there are post_launch cases, data will not be refreshed before their run"
|
124
107
|
if io.options[:dry_run]
|
125
|
-
msg+= ", because we are in dry-run (simulate)."
|
108
|
+
msg += ", because we are in dry-run (simulate)."
|
109
|
+
elsif !partial_update
|
110
|
+
msg += ", because it is not a partial update (-get-partial option not present)."
|
126
111
|
end
|
127
|
-
|
128
|
-
|
112
|
+
msg
|
113
|
+
}
|
114
|
+
next
|
129
115
|
end
|
130
|
-
|
116
|
+
|
117
|
+
# get target people afresh
|
118
|
+
peo_aux = session.micro.people_refresh(people: people, include_created: true)
|
119
|
+
io.base.new(people: peo_aux)
|
131
120
|
end
|
132
121
|
|
133
122
|
wf.for(:post_launch) do |wf_post|
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
else
|
143
|
-
raise
|
144
|
-
end
|
145
|
-
end
|
123
|
+
wf_post.on(:usecases) do |_wf_pu, io|
|
124
|
+
session.post_launch.each do |use|
|
125
|
+
use.launch(io: io).base
|
126
|
+
rescue Eco::API::UseCases::BaseIO::MissingParameter => e
|
127
|
+
raise unless e.required == :people
|
128
|
+
log(:debug) {
|
129
|
+
"Skipping use case '#{use.name}' -- no base people detected for the current run"
|
130
|
+
}
|
146
131
|
end
|
147
|
-
io
|
148
132
|
end
|
149
133
|
|
150
|
-
wf_post.on(:launch_jobs) do |
|
151
|
-
|
152
|
-
io
|
134
|
+
wf_post.on(:launch_jobs) do |_wf_pl, io|
|
135
|
+
session.jobs_launch(simulate: options[:dry_run])
|
153
136
|
end
|
154
137
|
end
|
155
138
|
|
156
|
-
wf.on(:report) do |
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
io.session.process_case("to-csv", io: aux_io, type: :export)
|
165
|
-
end
|
139
|
+
wf.on(:report) do |_wf_rep, io|
|
140
|
+
if file = options.dig(:report, :people, :csv)
|
141
|
+
options.deep_merge!(export: {
|
142
|
+
options: {internal_names: true, nice_header: true, split_schemas: true},
|
143
|
+
file: {name: file, format: :csv}
|
144
|
+
})
|
145
|
+
aux_io = io.new(people: people.updated_or_created)
|
146
|
+
session.process_case("to-csv", io: aux_io, type: :export)
|
166
147
|
end
|
167
148
|
end
|
168
149
|
|
169
|
-
wf.on(:end) do |
|
170
|
-
get_people =
|
150
|
+
wf.on(:end) do |_wf_end, io|
|
151
|
+
get_people = options.dig(:people, :get)
|
171
152
|
partial_update = get_people && get_people.dig(:type) == :partial
|
172
153
|
|
173
|
-
unless !
|
174
|
-
people_update_cases = config.usecases.active(io: io).any? do |usecase, data|
|
175
|
-
[:transform, :sync].any? {|type| usecase.type == type}
|
154
|
+
unless !options[:end_get] || options[:dry_run] || partial_update
|
155
|
+
people_update_cases = cli.config.usecases.active(io: io).any? do |usecase, data|
|
156
|
+
[:transform, :sync].any? { |type| usecase.type == type }
|
176
157
|
end
|
177
158
|
|
178
159
|
if !people_update_cases
|
179
160
|
# Prevent getting people when there were no use cases that used them
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
161
|
+
log(:info) {
|
162
|
+
"Won't be recaching people, as there haven't been any targetted updates"
|
163
|
+
}
|
164
|
+
elsif !people
|
165
|
+
people = session.micro.people_cache
|
166
|
+
io.new(people: people)
|
184
167
|
end
|
185
168
|
end
|
186
|
-
io
|
187
169
|
end
|
188
170
|
end
|
189
171
|
end
|
@@ -1,33 +1,43 @@
|
|
1
1
|
module Eco::Data::Locations::NodeBase
|
2
2
|
module TagValidations
|
3
|
+
include Eco::Language::AuxiliarLogger
|
4
|
+
|
3
5
|
ALLOWED_CHARACTERS = "A-Za-z0-9 &_'\/.-"
|
4
6
|
VALID_TAG_REGEX = /^[#{ALLOWED_CHARACTERS}]+$/
|
5
7
|
INVALID_TAG_REGEX = /[^#{ALLOWED_CHARACTERS}]+/
|
6
8
|
VALID_TAG_CHARS = /[#{ALLOWED_CHARACTERS}]+/
|
7
9
|
DOUBLE_BLANKS = /\s\s+/
|
8
10
|
|
9
|
-
def clean_id(str, notify: true)
|
11
|
+
def clean_id(str, notify: true, ref: '')
|
10
12
|
blanks_x2 = has_double_blanks?(str)
|
11
13
|
partial = replace_not_allowed(str)
|
12
14
|
remove_double_blanks(partial).tap do |result|
|
13
15
|
next unless notify
|
14
|
-
next if invalid_warned?
|
16
|
+
next if invalid_warned?(str)
|
15
17
|
if partial != str
|
16
18
|
invalid_chars = identify_invalid_characters(str)
|
17
|
-
|
19
|
+
log(:warn) {
|
20
|
+
"• #{ref}Invalid characters _#{invalid_chars}_ <<_removed_: '#{str}' :_converted_>> '#{result}'"
|
21
|
+
}
|
18
22
|
elsif blanks_x2
|
19
|
-
|
23
|
+
log(:warn) {
|
24
|
+
"• #{ref}Double blanks removed: '#{str}' :_converted_>> '#{result}'"
|
25
|
+
}
|
20
26
|
end
|
21
|
-
invalid_warned!
|
27
|
+
invalid_warned!(str)
|
22
28
|
end
|
23
29
|
end
|
24
30
|
|
25
|
-
def invalid_warned?
|
26
|
-
|
31
|
+
def invalid_warned?(str)
|
32
|
+
invalid_warned[str] ||= false
|
33
|
+
end
|
34
|
+
|
35
|
+
def invalid_warned!(str)
|
36
|
+
invalid_warned[str] = true
|
27
37
|
end
|
28
38
|
|
29
|
-
def invalid_warned
|
30
|
-
@invalid_warned
|
39
|
+
def invalid_warned
|
40
|
+
@invalid_warned ||= {}
|
31
41
|
end
|
32
42
|
|
33
43
|
def has_double_blanks?(str)
|
@@ -12,14 +12,31 @@ module Eco::Data::Locations::NodeBase
|
|
12
12
|
# @yieldreturn [Hash] custom hash model when treeifying (allows to set more keys/properties).
|
13
13
|
# @nodes [Array<NodeBase>] list of nodes
|
14
14
|
# @return [Array<Hash>] a hierarchical tree of nested Hashes via `nodes` key.
|
15
|
-
def treeify(nodes, &block)
|
15
|
+
def treeify(nodes, skipped: [], unlinked_trees: [], &block)
|
16
16
|
return [] if nodes.empty?
|
17
17
|
block ||= nodes.first.class.serializer
|
18
|
-
|
18
|
+
done_ids = {}
|
19
|
+
warns = []
|
20
|
+
parents = parents_hash(nodes)
|
21
|
+
get_children(nil, parents, done_ids: done_ids, skipped: skipped, warns: warns, &block).tap do |tree|
|
22
|
+
check_results(
|
23
|
+
tree,
|
24
|
+
nodes,
|
25
|
+
parents,
|
26
|
+
done_ids: done_ids,
|
27
|
+
skipped: skipped,
|
28
|
+
unlinked_trees: unlinked_trees,
|
29
|
+
warns: warns,
|
30
|
+
&block
|
31
|
+
)
|
32
|
+
log(:warn) { warns.join("\n") } unless warns.empty?
|
33
|
+
end
|
19
34
|
end
|
20
35
|
|
21
36
|
private
|
22
37
|
|
38
|
+
# @return [Hash] where `key`s are all the `parentId` of the nodes
|
39
|
+
# and `value` and `Array` of those nodes that have that `parentId`
|
23
40
|
def parents_hash(nodes)
|
24
41
|
nodes.each_with_object({}) do |node, parents|
|
25
42
|
(parents[node.parentId] ||= []).push(node)
|
@@ -32,12 +49,21 @@ module Eco::Data::Locations::NodeBase
|
|
32
49
|
# 3. The above can translate into some
|
33
50
|
# @yield [node]
|
34
51
|
# @yieldreturn [Hash] custom hash model when treeifying
|
35
|
-
def get_children(node_id, parents, parent: nil, done_ids: {},
|
52
|
+
def get_children(node_id, parents, parent: nil, level: 0, done_ids: {}, skipped: [], warns: [], &block)
|
36
53
|
level_ids = []
|
37
54
|
(parents[node_id] ||= []).each_with_object([]) do |child, results|
|
38
55
|
# Skipping done id. Add proper warnings...
|
39
56
|
# => rely on `done_ids` to identify if an `id` has already been done
|
40
|
-
next report_skipped_node(
|
57
|
+
next report_skipped_node(
|
58
|
+
child,
|
59
|
+
parent,
|
60
|
+
done_ids,
|
61
|
+
level,
|
62
|
+
level_ids,
|
63
|
+
parents,
|
64
|
+
skipped: skipped,
|
65
|
+
warns: warns
|
66
|
+
) if done_ids[child.id]
|
41
67
|
|
42
68
|
# Fill in tracking data
|
43
69
|
child.parent = parent
|
@@ -52,8 +78,26 @@ module Eco::Data::Locations::NodeBase
|
|
52
78
|
node_hash.merge(yield(child)) if block_given?
|
53
79
|
# we must register the `id` before recursing down
|
54
80
|
done_ids[child.id] = child
|
81
|
+
|
82
|
+
children = get_children(
|
83
|
+
child.id,
|
84
|
+
parents,
|
85
|
+
parent: child,
|
86
|
+
done_ids: done_ids,
|
87
|
+
level: level + 1,
|
88
|
+
skipped: skipped,
|
89
|
+
warns: warns,
|
90
|
+
&block
|
91
|
+
).tap do |desc|
|
92
|
+
if (nil_count = desc.count(nil)) > 0
|
93
|
+
log(:debug) {
|
94
|
+
"get_children gave #{nil_count} nil values for nodes of #{child.id}"
|
95
|
+
}
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
55
99
|
results << node_hash.merge({
|
56
|
-
"nodes" =>
|
100
|
+
"nodes" => children.compact
|
57
101
|
})
|
58
102
|
end
|
59
103
|
end
|
@@ -70,8 +114,138 @@ module Eco::Data::Locations::NodeBase
|
|
70
114
|
"#{" " * level}"
|
71
115
|
end
|
72
116
|
|
73
|
-
#
|
74
|
-
|
117
|
+
# Method to ensure the results are consistent
|
118
|
+
# @param skipped [Array<NodePlain>] those skipped because repeated
|
119
|
+
# 1. It will add children of them that were skipped. This won't clash with unlinked nodes
|
120
|
+
# because otherwise would be part of `done_ids` anyway.
|
121
|
+
# @param unlinked_trees [Array<Hash>] by excluding those done and skipped,
|
122
|
+
# it will treeify the unlinked nodes (the exclusion applies to `parants_hash`)
|
123
|
+
def check_results(tree, nodes, parents, done_ids: {}, skipped: [], unlinked_trees: [], warns: [], &block)
|
124
|
+
update_skipped(skipped, parents, done_ids: done_ids) unless skipped.empty?
|
125
|
+
|
126
|
+
if done_ids.count != nodes.count
|
127
|
+
tracked_nodes = done_ids.values
|
128
|
+
untracked_nodes = nodes - tracked_nodes - skipped
|
129
|
+
# skipped keys is inherent, as they were excluded because of id clash with done_ids
|
130
|
+
unlinked_parent_ids = (parents.keys - done_ids.keys).compact
|
131
|
+
|
132
|
+
msg = []
|
133
|
+
|
134
|
+
# The reason of missing nodes in the output tree is unknown!
|
135
|
+
if skipped.empty? && unlinked_parent_ids.empty?
|
136
|
+
msg << "BUG in this library (open issue with maintainers)."
|
137
|
+
msg << "There were no skipped nodes nor missin referred parents, and yet:"
|
138
|
+
msg << " • the tree nodes count: #{done_ids.count} ..."
|
139
|
+
msg << " • doesn't match the original nodes count: #{nodes.count}"
|
140
|
+
raise msg.join("\n")
|
141
|
+
end
|
142
|
+
|
143
|
+
unless unlinked_parent_ids.empty?
|
144
|
+
msg << "There are #{unlinked_parent_ids.count} referred parent_id's NOT linked to the root:"
|
145
|
+
msg << " • total_nodes: #{nodes.count}"
|
146
|
+
msg << " • tracked_nodes: #{tracked_nodes.count}"
|
147
|
+
msg << " • untracked_nodes: #{untracked_nodes.count}"
|
148
|
+
msg << " • unlinked_parents: #{unlinked_parent_ids.count}"
|
149
|
+
msg << " • skipped (repeated) nodes: #{skipped.count}" unless skipped.empty?
|
150
|
+
|
151
|
+
unlinked_nodes = nodes - skipped
|
152
|
+
unlinked_parents = parents.slice(*unlinked_parent_ids) # doesn'thave skipped ones
|
153
|
+
|
154
|
+
residual_skipped = []
|
155
|
+
unlinked_trees.concat \
|
156
|
+
get_unlinked_trees(
|
157
|
+
unlinked_nodes,
|
158
|
+
unlinked_parents,
|
159
|
+
done_ids: done_ids,
|
160
|
+
skipped: residual_skipped,
|
161
|
+
warns: warns,
|
162
|
+
&block
|
163
|
+
)
|
164
|
+
|
165
|
+
update_skipped(skipped, parents, with: residual_skipped, done_ids: done_ids) unless residual_skipped.empty?
|
166
|
+
|
167
|
+
tracked_nodes = done_ids.values
|
168
|
+
untracked_nodes = nodes - tracked_nodes - skipped
|
169
|
+
unlinked_parent_ids = (parents.keys - done_ids.keys).compact
|
170
|
+
|
171
|
+
msg << "After treeifying via the unlinked_parents:"
|
172
|
+
msg << " • total_nodes: #{nodes.count}"
|
173
|
+
msg << " • tracked_nodes: #{tracked_nodes.count}"
|
174
|
+
msg << " • untracked_nodes: #{untracked_nodes.count}"
|
175
|
+
msg << " • unlinked_parents: #{unlinked_parent_ids.count}"
|
176
|
+
msg << " • skipped in this step: #{residual_skipped.count}"
|
177
|
+
end
|
178
|
+
|
179
|
+
msg << " • total skipped (repeated) nodes: #{skipped.count} !!" unless skipped.empty?
|
180
|
+
warns << msg.join("\n")
|
181
|
+
nil
|
182
|
+
end
|
183
|
+
end
|
184
|
+
|
185
|
+
# Treeifies the unlinked nodes by scoping existing parent ids.
|
186
|
+
def get_unlinked_trees(nodes, parents, done_ids: {}, skipped: [], warns: [], &block)
|
187
|
+
node_ids = nodes.map(&:id)
|
188
|
+
parent_ids = parents.keys & node_ids
|
189
|
+
missing_parent_ids = parents.keys - parent_ids
|
190
|
+
missing_parents = parents.slice(*missing_parent_ids)
|
191
|
+
warns << " • missing_parents: #{missing_parents.count}"
|
192
|
+
nil_parent_nodes = missing_parents.each_with_object([]) do |(id, nodes), mem|
|
193
|
+
nodes.each {|node| node.parent_id = nil}
|
194
|
+
mem.concat(nodes)
|
195
|
+
end
|
196
|
+
rest_parents = parents.slice(*parent_ids).merge({
|
197
|
+
nil => nil_parent_nodes
|
198
|
+
})
|
199
|
+
get_children(nil, rest_parents, done_ids: done_ids, skipped: skipped, warns: warns, &block)
|
200
|
+
end
|
201
|
+
|
202
|
+
# Same as `get_children` but not performing checks and with
|
203
|
+
# option to retrieve the source nodes (rather than parsing to `Hash`).
|
204
|
+
# @note serves the purpose to identify what linked children got inherently
|
205
|
+
# skipped, because their parent was skipped.
|
206
|
+
def get_tree_nodes_raw(node_id, parents, src_plain: true, &block)
|
207
|
+
(parents[node_id] ||= []).each_with_object([]) do |child, results|
|
208
|
+
unless src_plain
|
209
|
+
node_hash = {
|
210
|
+
"id" => child.id,
|
211
|
+
"name" => child.name,
|
212
|
+
"parent_id" => node_id
|
213
|
+
}
|
214
|
+
node_hash.merge(yield(child)) if block_given?
|
215
|
+
end
|
216
|
+
|
217
|
+
descendants = get_tree_nodes_raw(child.id, parents, src_plain: src_plain, &block).tap do |desc|
|
218
|
+
if (nil_count = desc.count(nil)) > 0
|
219
|
+
puts "get_tree_nodes_raw gave #{nil_count} nil values for nodes of #{child.id}"
|
220
|
+
end
|
221
|
+
end
|
222
|
+
|
223
|
+
if src_plain
|
224
|
+
results.concat(descendants)
|
225
|
+
else
|
226
|
+
results << node_hash.merge({
|
227
|
+
"nodes" => descendants.compact
|
228
|
+
})
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
# It goes through the `with` skipped nodes, and adds them to the `skipped` ones
|
234
|
+
# by including their not tracked/done/included children.
|
235
|
+
def update_skipped(skipped, parents, with: skipped, done_ids: {})
|
236
|
+
raw_skipped_children = with.each_with_object([]) do |node, mem|
|
237
|
+
mem << node
|
238
|
+
mem.concat get_tree_nodes_raw(node.id, parents)
|
239
|
+
end.uniq
|
240
|
+
skipped_children = raw_skipped_children - done_ids.values
|
241
|
+
skipped.concat(skipped_children).uniq!
|
242
|
+
skipped
|
243
|
+
end
|
244
|
+
|
245
|
+
# With given a skipped `node` (repeated `id`), it gives different warnings,
|
246
|
+
# provided that the context in which the double-up `id` happened is identified.
|
247
|
+
def report_skipped_node(node, parent, done_ids, level, level_ids, parents, skipped: [], warns: [])
|
248
|
+
skipped << node
|
75
249
|
lev = level + 1
|
76
250
|
done_node = done_ids[node.id]
|
77
251
|
prev_parent = node.parent
|
@@ -84,6 +258,9 @@ module Eco::Data::Locations::NodeBase
|
|
84
258
|
row_str = row_num ? "(Row: #{row_num}) " : ''
|
85
259
|
node_str = "#{row_str}Node '#{node.id}' #{level_msg(lev)} (#{parent_msg(parent)})"
|
86
260
|
|
261
|
+
msg = []
|
262
|
+
msg << "#{indent(level)}Skipping #{node_str}."
|
263
|
+
|
87
264
|
# Implementation integrity guard
|
88
265
|
# => as we don't register in `done_ids` those that are skipped,
|
89
266
|
# when a `node` has already a tracked `parent` or `level`,
|
@@ -114,18 +291,15 @@ module Eco::Data::Locations::NodeBase
|
|
114
291
|
cyclic = multi_parent && done_node == node
|
115
292
|
double_up = node_dup || lev_dup
|
116
293
|
|
117
|
-
msg = []
|
118
|
-
msg << "#{indent(level)}WARNING: Skipping #{node_str}."
|
119
|
-
|
120
294
|
if cyclic
|
121
|
-
str = "#{indent(level
|
295
|
+
str = "#{indent(level + 1)}Cyclic definition. By skipping the node, "
|
122
296
|
str << "it will remain as #{parent_msg(done_node.parent)} (#{level_msg(prev_level)})."
|
123
297
|
msg << str
|
124
298
|
end
|
125
299
|
|
126
300
|
if double_up
|
127
|
-
str = "#{indent(level
|
128
|
-
str << "as #{parent_msg(
|
301
|
+
str = "#{indent(level + 1)}Node ID was already tracked as #{level_msg(done_node.tracked_level)}, "
|
302
|
+
str << "as #{parent_msg(done_node.parent)} "
|
129
303
|
str << "(same parent)." if lev_dup
|
130
304
|
str << "(different parent)." if multi_parent
|
131
305
|
msg << str
|
@@ -133,18 +307,19 @@ module Eco::Data::Locations::NodeBase
|
|
133
307
|
|
134
308
|
unless cyclic || double_up
|
135
309
|
str = "Integrity issue in Treeify. "
|
136
|
-
str
|
310
|
+
str << "Skipping is only applicable to double_ups or cyclic nodes."
|
137
311
|
str << "\n • #{node_str}."
|
138
312
|
raise str
|
139
313
|
end
|
140
314
|
|
141
|
-
|
142
|
-
str = "#{indent(level
|
143
|
-
str << children.map {|
|
315
|
+
unless (children = parents[node.id] || []).empty?
|
316
|
+
str = "#{indent(level + 1)}Immediate children of skipped node (will probably be missing): "
|
317
|
+
str << children.map {|ch| "'#{ch.id}'"}.join(", ")
|
144
318
|
msg << str
|
145
319
|
end
|
146
320
|
|
147
|
-
|
321
|
+
warns << msg.join("\n")
|
322
|
+
nil
|
148
323
|
end
|
149
324
|
end
|
150
325
|
end
|
@@ -20,7 +20,7 @@ class Eco::Data::Locations::NodePlain
|
|
20
20
|
end
|
21
21
|
|
22
22
|
# It builds each NodePlain from the input csv.
|
23
|
-
# @param `csv` [CSV::Table]
|
23
|
+
# @param `csv` [CSV::Table] with specific headers
|
24
24
|
# @return [Array<NodePlain>]
|
25
25
|
def nodes_from_csv(csv)
|
26
26
|
raise ArgumentError, "Expecting CSV::Table. Given: #{csv.class}" unless csv.is_a?(::CSV::Table)
|
@@ -6,7 +6,7 @@ class Eco::Data::Locations::NodePlain
|
|
6
6
|
def serializer
|
7
7
|
@serializer ||= proc do |node|
|
8
8
|
raise "Expecting NodePlain. Given: #{node.class}" unless node.is_a?(Eco::Data::Locations::NodePlain)
|
9
|
-
keys = Eco::Data::Locations::NodePlain::
|
9
|
+
keys = Eco::Data::Locations::NodePlain::ALL_ATTRS
|
10
10
|
node.to_h(*keys)
|
11
11
|
end
|
12
12
|
end
|