eco-helpers 2.5.2 → 2.5.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (104) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +46 -1
  3. data/eco-helpers.gemspec +2 -2
  4. data/lib/eco/api/common/loaders/use_case.rb +0 -2
  5. data/lib/eco/api/common/people/person_entry_attribute_mapper.rb +0 -2
  6. data/lib/eco/api/common/session/logger.rb +22 -77
  7. data/lib/eco/api/microcases/with_each.rb +0 -1
  8. data/lib/eco/api/organization/tag_tree.rb +64 -15
  9. data/lib/eco/api/session/config/tagtree.rb +32 -10
  10. data/lib/eco/api/session/config/workflow.rb +0 -1
  11. data/lib/eco/api/session/config.rb +6 -2
  12. data/lib/eco/api/session.rb +2 -2
  13. data/lib/eco/api/usecases/default_cases/abstract_policygroup_abilities_case.rb +2 -3
  14. data/lib/eco/api/usecases/default_cases/analyse_people_case.rb +2 -3
  15. data/lib/eco/api/usecases/default_cases/append_usergroups_case.rb +0 -1
  16. data/lib/eco/api/usecases/default_cases/change_email_case.rb +1 -2
  17. data/lib/eco/api/usecases/default_cases/clean_unknown_tags_case.rb +0 -5
  18. data/lib/eco/api/usecases/default_cases/clear_abilities_case.rb +2 -2
  19. data/lib/eco/api/usecases/default_cases/codes_to_tags_case.rb +5 -7
  20. data/lib/eco/api/usecases/default_cases/create_case.rb +0 -5
  21. data/lib/eco/api/usecases/default_cases/create_details_case.rb +0 -5
  22. data/lib/eco/api/usecases/default_cases/create_details_with_supervisor_case.rb +0 -5
  23. data/lib/eco/api/usecases/default_cases/csv_to_tree_case/helper.rb +1 -1
  24. data/lib/eco/api/usecases/default_cases/csv_to_tree_case.rb +0 -4
  25. data/lib/eco/api/usecases/default_cases/delete_sync_case.rb +2 -4
  26. data/lib/eco/api/usecases/default_cases/delete_trans_case.rb +2 -3
  27. data/lib/eco/api/usecases/default_cases/email_as_id_case.rb +0 -1
  28. data/lib/eco/api/usecases/default_cases/entries_to_csv_case.rb +0 -4
  29. data/lib/eco/api/usecases/default_cases/hris_case.rb +2 -3
  30. data/lib/eco/api/usecases/default_cases/new_email_case.rb +0 -2
  31. data/lib/eco/api/usecases/default_cases/new_id_case.rb +0 -2
  32. data/lib/eco/api/usecases/default_cases/org_data_convert_case.rb +0 -5
  33. data/lib/eco/api/usecases/default_cases/refresh_case.rb +0 -1
  34. data/lib/eco/api/usecases/default_cases/reinvite_sync_case.rb +1 -3
  35. data/lib/eco/api/usecases/default_cases/reinvite_trans_case.rb +2 -2
  36. data/lib/eco/api/usecases/default_cases/remove_account_sync_case.rb +1 -2
  37. data/lib/eco/api/usecases/default_cases/remove_account_trans_case.rb +2 -3
  38. data/lib/eco/api/usecases/default_cases/reset_landing_page_case.rb +1 -7
  39. data/lib/eco/api/usecases/default_cases/restore_db_case.rb +0 -10
  40. data/lib/eco/api/usecases/default_cases/set_default_tag_case.rb +0 -1
  41. data/lib/eco/api/usecases/default_cases/set_supervisor_case.rb +0 -1
  42. data/lib/eco/api/usecases/default_cases/supers_cyclic_identify_case.rb +2 -3
  43. data/lib/eco/api/usecases/default_cases/supers_hierarchy_case.rb +2 -3
  44. data/lib/eco/api/usecases/default_cases/switch_supervisor_case.rb +2 -4
  45. data/lib/eco/api/usecases/default_cases/tagtree_case.rb +0 -2
  46. data/lib/eco/api/usecases/default_cases/to_csv_case.rb +4 -5
  47. data/lib/eco/api/usecases/default_cases/to_csv_detailed_case.rb +0 -1
  48. data/lib/eco/api/usecases/default_cases/transfer_account_case.rb +0 -2
  49. data/lib/eco/api/usecases/default_cases/update_case.rb +0 -2
  50. data/lib/eco/api/usecases/default_cases/update_details_case.rb +0 -2
  51. data/lib/eco/api/usecases/default_cases/upsert_case.rb +0 -4
  52. data/lib/eco/api/usecases/graphql/base.rb +6 -18
  53. data/lib/eco/api/usecases/graphql/helpers/base.rb +31 -0
  54. data/lib/eco/api/usecases/graphql/helpers/location/base.rb +87 -0
  55. data/lib/eco/api/usecases/graphql/helpers/location/command/result.rb +69 -0
  56. data/lib/eco/api/usecases/graphql/helpers/location/command/results.rb +126 -0
  57. data/lib/eco/api/usecases/graphql/helpers/location/command.rb +84 -0
  58. data/lib/eco/api/usecases/graphql/helpers/location.rb +7 -0
  59. data/lib/eco/api/usecases/graphql/helpers.rb +2 -1
  60. data/lib/eco/api/usecases/graphql/samples/location/command/dsl.rb +54 -0
  61. data/lib/eco/api/usecases/graphql/samples/location/command/results.rb +125 -0
  62. data/lib/eco/api/usecases/graphql/samples/location/command.rb +10 -0
  63. data/lib/eco/api/usecases/graphql/samples/location/dsl.rb +6 -0
  64. data/lib/eco/api/usecases/graphql/samples/location.rb +10 -0
  65. data/lib/eco/api/usecases/graphql/samples.rb +6 -0
  66. data/lib/eco/api/usecases/graphql.rb +2 -1
  67. data/lib/eco/api/usecases/ooze_cases/export_register_case.rb +0 -1
  68. data/lib/eco/api/usecases/ooze_samples/ooze_base_case.rb +0 -2
  69. data/lib/eco/api/usecases/ooze_samples/register_migration_case.rb +0 -2
  70. data/lib/eco/api/usecases/use_case.rb +2 -2
  71. data/lib/eco/cli/config/default/workflow.rb +2 -4
  72. data/lib/eco/cli/scripting/args_helpers.rb +0 -2
  73. data/lib/eco/csv/table.rb +39 -3
  74. data/lib/eco/data/files/helpers.rb +1 -0
  75. data/lib/eco/data/hashes/array_diff.rb +12 -6
  76. data/lib/eco/data/hashes/diff_result.rb +1 -2
  77. data/lib/eco/data/locations/convert.rb +92 -0
  78. data/lib/eco/data/locations/dsl.rb +35 -0
  79. data/lib/eco/data/locations/node_base/builder.rb +26 -0
  80. data/lib/eco/data/locations/node_base/csv_convert.rb +57 -0
  81. data/lib/eco/data/locations/node_base/parsing.rb +30 -0
  82. data/lib/eco/data/locations/node_base/serial.rb +26 -0
  83. data/lib/eco/data/locations/node_base/tag_validations.rb +52 -0
  84. data/lib/eco/data/locations/node_base/treeify.rb +150 -0
  85. data/lib/eco/data/locations/node_base.rb +48 -0
  86. data/lib/eco/data/locations/node_level/builder.rb +6 -0
  87. data/lib/eco/data/locations/node_level/cleaner.rb +74 -0
  88. data/lib/eco/data/locations/node_level/parsing.rb +63 -0
  89. data/lib/eco/data/locations/node_level/serial.rb +37 -0
  90. data/lib/eco/data/locations/node_level.rb +156 -0
  91. data/lib/eco/data/locations/node_plain/builder.rb +6 -0
  92. data/lib/eco/data/locations/node_plain/parsing.rb +36 -0
  93. data/lib/eco/data/locations/node_plain/serial.rb +14 -0
  94. data/lib/eco/data/locations/node_plain.rb +34 -0
  95. data/lib/eco/data/locations.rb +12 -0
  96. data/lib/eco/data.rb +1 -0
  97. data/lib/eco/language/auxiliar_logger.rb +9 -1
  98. data/lib/eco/language/basic_logger.rb +74 -0
  99. data/lib/eco/language.rb +2 -1
  100. data/lib/eco/version.rb +1 -1
  101. metadata +37 -8
  102. data/lib/eco/api/usecases/default_cases/new_id_case0.rb +0 -14
  103. data/lib/eco/api/usecases/graphql/helpers/locations/commands.rb +0 -4
  104. data/lib/eco/api/usecases/graphql/helpers/locations.rb +0 -6
@@ -34,14 +34,15 @@ module Eco
34
34
  end
35
35
  end
36
36
 
37
+ include Eco::Language::AuxiliarLogger
38
+
37
39
  attr_reader :source1, :source2
38
40
  attr_reader :src_h1, :src_h2
39
- attr_reader :logger
40
41
 
41
42
  class_resolver :diff_result_class, "Eco::Data::Hash::DiffResult"
42
43
 
43
- def initialize(source1, source2, logger: ::Logger.new(IO::NULL), **kargs)
44
- @logger = logger
44
+ def initialize(source1, source2, logger: nil, **kargs)
45
+ @logger = logger if logger
45
46
  @options = kargs
46
47
  @source1 = source1
47
48
  @source2 = source2
@@ -84,7 +85,8 @@ module Eco
84
85
  # - It also ensures they are in their Hash form (with string keys)
85
86
  # - This will merge entries of the same source that hold the same `key` attr value (latest wins)
86
87
  def paired_sources
87
- keys1 = src_h1.keys; keys2 = src_h2.keys
88
+ keys1 = src_h1.keys
89
+ keys2 = src_h2.keys
88
90
  all_keys = keys1 | keys2
89
91
  all_keys.map {|key| [src_h1[key], src_h2[key]]}
90
92
  end
@@ -149,11 +151,15 @@ module Eco
149
151
  when Hash, Array, ::CSV::Row
150
152
  Eco::CSV::Table.new(content).to_array_of_hashes
151
153
  else
152
- logger.error("Input content 'Array' of '#{sample.class}' is not supported.")
154
+ log(:error) {
155
+ "Input content 'Array' of '#{sample.class}' is not supported."
156
+ }
153
157
  exit(1)
154
158
  end
155
159
  else
156
- logger.error("Could not obtain any data out content: '#{content.class}'")
160
+ log(:error) {
161
+ "Could not obtain any data out content: '#{content.class}'"
162
+ }
157
163
  exit(1)
158
164
  end
159
165
  end
@@ -2,11 +2,10 @@ module Eco
2
2
  module Data
3
3
  module Hashes
4
4
  class DiffResult
5
-
6
5
  attr_reader :key
7
6
  attr_reader :src1, :src2
8
7
 
9
- # @param [Array<String>, sym]
8
+ # @param compare [Array<String>, sym]
10
9
  # - `:all` compares the matching attrs between both hashes only
11
10
  def initialize(src1, src2, key:, compare: :all, case_sensitive: false)
12
11
  @key = key
@@ -0,0 +1,92 @@
1
+ module Eco::Data::Locations
2
+ module Convert
3
+ include Eco::Language::AuxiliarLogger
4
+
5
+ # Helper to open a csv
6
+ # @note this is a shortcut helper.
7
+ # @param filename [String] the csv file.
8
+ # @return [Eco::CSV::Table]
9
+ def csv_from(filename, encoding: 'utf-8')
10
+ raise ArgumentError, "Expecting String filename. Given: #{filename.class}" unless filename.is_a?(String)
11
+ raise "Missing #{filename}" unless File.exists?(filename)
12
+ Eco::CSV.read(filename, encoding: encoding)
13
+ rescue CSV::MalformedCSVError => e
14
+ if match = e.message.match(/line (?<line>\d+)/i)
15
+ log(:error) {"An encoding problem was found on line #{match[:line]}"}
16
+ end
17
+ raise
18
+ end
19
+
20
+ # Generic converter/helper to generate the csv data export for a hierarchical csv tree
21
+ # @note The steps of usage would be:
22
+ # 1. First **treeify** your input (i.e. `Eco::API::Organization::TagTree#as_json`,
23
+ # or `treeify(nodes)`
24
+ # @param hash_nodes [Array<Hash>] a hierarchical tree of Hash nodes, nested via `nodes`
25
+ # @return [CSV::Table] ready to be made a hierarchical csv tree (i.e. out.to_csv)
26
+ def hash_tree_to_tree_csv(hash_nodes, out: [], done_ids: [], repeated_ids: [], level: 0)
27
+ lev = level + 1
28
+ base = empty_array(level)
29
+
30
+ hash_nodes.each_with_object(out) do |node, out|
31
+ if done_ids.include?(id = node["id"])
32
+ repeated_ids << id
33
+ else
34
+ has_offspring = (children = node["nodes"]) && !children.empty?
35
+ done_ids << id
36
+ out << (base.dup << node["id"])
37
+ hash_tree_to_tree_csv(node["nodes"], out: out, done_ids: done_ids, repeated_ids: repeated_ids, level: lev)
38
+ end
39
+ end.tap do |out|
40
+ if level == 0
41
+ report_repeated_node_ids(repeated_ids)
42
+ return Eco::CSV::Table.new(normalize_arrays(out))
43
+ end
44
+ end
45
+ end
46
+
47
+ # It normalizes the size of the arrays to the max size among the arrays
48
+ # @param rows [Array<Array>] where arrays may not have the same length
49
+ # @return [Array<Array>] where arrays have all the same length
50
+ def normalize_arrays(rows)
51
+ max_row = rows.max {|a, b| a.length <=> b.length}
52
+ holder = empty_array(max_row.length)
53
+ rows.map do |row|
54
+ row.dup.concat(holder[0..-(row.length+1)])
55
+ end
56
+ end
57
+
58
+ # @param count [Integer] number of possitions of the new array
59
+ # @return [Array<NilClass>] with `count` positions.
60
+ def empty_array(count)
61
+ Array.new(count, nil)
62
+ end
63
+
64
+ # @note
65
+ # 1. Initially it has as many keys as levels `count`
66
+ # 2. It serves the purpose to track the lastest seen node
67
+ # for a given level, during a loop.
68
+ # @return [Hash] with integer level counts as keys and
69
+ # nodes as values.
70
+ def empty_level_tracker_hash(count = 11)
71
+ Array(1..count).zip(empty_array(count)).to_h
72
+ end
73
+
74
+ # It logs a message from `yield` and appends a `pretty_inspect` on object.
75
+ # @note it only works where `object` is `Enumberable`
76
+ def log_pretty_inspect(object, lev = :info)
77
+ return unless object.is_a?(Enumerable)
78
+ return if object.empty?
79
+ msg = ''
80
+ msg << "#{yield(object)}\n" if block_given?
81
+ msg << object.pretty_inspect
82
+ log(lev) { msg }
83
+ end
84
+
85
+ # Prints a common message
86
+ def report_repeated_node_ids(repeated)
87
+ log_pretty_inspect(repeated, :warn) do
88
+ "There were #{repeated.length} repeated node ids. Only one included. These excluded:"
89
+ end
90
+ end
91
+ end
92
+ end
@@ -0,0 +1,35 @@
1
+ module Eco::Data::Locations
2
+ # It allows to create your own helper
3
+ # @example of usage:
4
+ # module LocHelp
5
+ # include Eco::Data::Locations::DSL
6
+ # end
7
+ #
8
+ # nodes_list = LocHelp.csv_nodes_from(csv_list_filename)
9
+ # csv_tree = LocHelp.nodes_to_csv_tree(nodes_list)
10
+ # File.open(output_filename, 'w') { |fd| fd.write(csv_tree.to_csv) }
11
+ #
12
+ # @example of usage:
13
+ # class Foo
14
+ # include Eco::Data::Locations::DSL
15
+ #
16
+ # def csv_tree_to_csv_list(csv_tree_filename)
17
+ # csv_list = nodes_to_csv_list(csv_nodes_from(csv_tree_filename))
18
+ # File.open(output_filename, 'w') { |fd| fd.write(csv_list.to_csv) }
19
+ # end
20
+ #
21
+ # def live_tree_to_csv_tree(org_tagtree)
22
+ # File.open(output_filename, 'w') { |fd| fd.write(csv_tree(org_tagtree).to_csv) }
23
+ # end
24
+ # end
25
+ #
26
+ module DSL
27
+ class << self
28
+ def included(base)
29
+ super(base)
30
+ base.extend Eco::Data::Locations::NodeBase::Builder
31
+ base.send :include, Eco::Data::Locations::NodeBase::Builder
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,26 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ module Builder
3
+ include Eco::Data::Locations::NodeBase::Parsing
4
+ include Eco::Data::Locations::NodeBase::Serial
5
+ include Eco::Data::Locations::NodeBase::CsvConvert
6
+ include Eco::Data::Locations::NodeBase::Treeify
7
+
8
+ # @param data [CSV::Table, NodeBase, Array<NodeBase>]
9
+ # @return [NodeBase::Class] the Node class we can use.
10
+ def node_class(data)
11
+ case data
12
+ when ::CSV::Table
13
+ return Eco::Data::Locations::NodePlain if Eco::Data::Locations::NodePlain.csv_matches_format?(csv)
14
+ return Eco::Data::Locations::NodeLevel if Eco::Data::Locations::NodeLevel.csv_matches_format?(csv)
15
+ when Array
16
+ return nil unless sample = data.first
17
+ node_class(sample)
18
+ when Eco::Data::Locations::NodeBase
19
+ return nil unless data.class < Eco::Data::Locations::NodeBase
20
+ data.class
21
+ else
22
+ raise ArgumentError, "Expecting CSV::Table. Given: #{csv.class}" unless csv.is_a?(::CSV::Table)
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,57 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ module CsvConvert
3
+ include Eco::Data::Locations::NodeBase::Parsing
4
+
5
+ def tree_class
6
+ Eco::API::Organization::TagTree
7
+ end
8
+
9
+ # @yield [Node] optional custom serializer
10
+ # @yieldreturn [Hash] the serialized Node
11
+ # @param value [CSV::Table, Eco::API::Organization::TagTree]
12
+ # @return [Array<Hash>] a plain list of hash nodes
13
+ def hash_list(value, &block)
14
+ return hash_list(org_tree(value)) if value.is_a?(::CSV::Table)
15
+ return value.as_nodes_json if value.is_a?(tree_class)
16
+ raise ArgumentError, "Expecting Eco::API::Organization::TagTree or CSV::Table. Given: #{value.class}"
17
+ end
18
+
19
+ # @yield [Node] optional custom serializer
20
+ # @yieldreturn [Hash] the serialized Node
21
+ # @param value [CSV::Table, Eco::API::Organization::TagTree]
22
+ # @return [Array<Hash>] a hierarchical tree of hash nodes,
23
+ # ready to be parsed as an organization tagtree
24
+ def hash_tree(value, &block)
25
+ return hash_tree_from_csv(value, &block) if value.is_a?(::CSV::Table)
26
+ return value.as_json if value.is_a?(tree_class)
27
+ raise ArgumentError, "Expecting Eco::API::Organization::TagTree or CSV::Table. Given: #{value.class}"
28
+ end
29
+
30
+ # @yield [Node] optional custom serializer
31
+ # @yieldreturn [Hash] the serialized Node
32
+ # @param value [CSV::Table, Eco::API::Organization::TagTree]
33
+ # @return [Eco::API::Organization::TagTree]
34
+ def org_tree(value, &block)
35
+ return tree_class.new(hash_tree(value), &block) if value.is_a?(::CSV::Table)
36
+ return tree_class.new(value.as_json) if value.is_a?(tree_class)
37
+ raise ArgumentError, "Expecting Eco::API::Organization::TagTree or CSV::Table. Given: #{value.class}"
38
+ end
39
+
40
+ # @yield [Node] optional custom serializer
41
+ # @yieldreturn [Hash] the serialized Node
42
+ # @return [CSV::Table] a table with L1 to Ln columns ready for dump to csv
43
+ def csv_tree(value, encoding: 'utf-8', &block)
44
+ Eco::CSV::Table.new(hash_tree_to_tree_csv(hash_tree(value, &block)))
45
+ end
46
+
47
+ # @note it just converts to an organizational tagtree and uses a helper method.
48
+ # @yield [Node] optional custom serializer
49
+ # @yieldreturn [Hash] the serialized Node
50
+ # @param value [CSV::Table, Eco::API::Organization::TagTree]
51
+ # @return [CSV::Table] a table with a list of nodes and their parents
52
+ def csv_list(value, &block)
53
+ value = org_tree(value, &block) unless value.is_a?(tree_class)
54
+ Eco::CSV.Table.new(hash_list(value))
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,30 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ module Parsing
3
+ include Eco::Data::Locations::Convert
4
+ include Eco::Data::Locations::NodeBase::Treeify
5
+
6
+ # @param csv [CSV::Table]
7
+ # @return [Array<NodePlain>, Array<NodeLevel>] with integrity issues resolved.
8
+ def nodes_from_csv(csv)
9
+ raise ArgumentError, "Expecting CSV::Table. Given: #{csv.class}" unless csv.is_a?(::CSV::Table)
10
+ return Eco::Data::Locations::NodePlain.nodes_from_csv(csv) if Eco::Data::Locations::NodePlain.csv_matches_format?(csv)
11
+ return Eco::Data::Locations::NodeLevel.nodes_from_csv(csv) if Eco::Data::Locations::NodeLevel.csv_matches_format?(csv)
12
+ raise ArgumentError, "The input csv does not have the required format to read a locations structure."
13
+ end
14
+
15
+ # @yield [Node] optional custom serializer
16
+ # @yieldreturn [Hash] the serialized Node
17
+ # @return [Array<Hash>] a hierarchical tree of nested Hashes via `nodes` key.
18
+ def hash_tree_from_csv(csv, &block)
19
+ raise ArgumentError, "Expecting CSV::Table. Given: #{csv.class}" unless csv.is_a?(::CSV::Table)
20
+ treeify(nodes_from_csv(csv), &block)
21
+ end
22
+
23
+ # Shortcut to obtain a list of parsed nodes out of a file
24
+ # @param filename [String] the csv file.
25
+ # @return [Array<NodePlain>, Array<NodeLevel>] with integrity issues resolved.
26
+ def csv_nodes_from(filename, encoding: 'utf-8')
27
+ nodes_from_csv(csv_from(filename, encoding: 'utf-8'))
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,26 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ module Serial
3
+ include Eco::Data::Locations::NodeBase::Treeify
4
+ include Eco::Data::Locations::Convert
5
+
6
+ # @param item [Eco::Data::Locations::NodeBase] an instance object of a child class.
7
+ # @return [Proc] the serializer to be used.
8
+ def serializer(item)
9
+ raise "Execting a chidren of NodeBase. Given: #{item.class}" unless item.class < Eco::Data::Locations::NodeBase
10
+ item.serializer
11
+ end
12
+
13
+ # @paran nodes [Array<NodeBase>]
14
+ # @return [CSV::Table] ready to dump into a hierarhical **csv** (columns are tree levels)
15
+ def nodes_to_csv_tree(nodes)
16
+ hash_tree_to_tree_csv(treeify(nodes))
17
+ end
18
+
19
+ # @paran nodes [Array<NodeBase>]
20
+ # @return [CSV::Table] ready to dump into a nodes list **csv** (rows are nodes; a column holds `parent_id`)
21
+ def nodes_to_csv_list(nodes)
22
+ tree = Eco::API::Organization::TagTree.new(treeify(nodes))
23
+ Eco::CSV::Table.new(tree.as_nodes_json)
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,52 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ module TagValidations
3
+ ALLOWED_CHARACTERS = "A-Za-z0-9 &_'\/.-"
4
+ VALID_TAG_REGEX = /^[#{ALLOWED_CHARACTERS}]+$/
5
+ INVALID_TAG_REGEX = /[^#{ALLOWED_CHARACTERS}]+/
6
+ VALID_TAG_CHARS = /[#{ALLOWED_CHARACTERS}]+/
7
+ DOUBLE_BLANKS = /\s\s+/
8
+
9
+ def clean_id(str)
10
+ blanks_x2 = has_double_blanks?(str)
11
+ partial = replace_not_allowed(str)
12
+ remove_double_blanks(partial).tap do |result|
13
+ next if invalid_warned?
14
+ if partial != str
15
+ invalid_chars = identify_invalid_characters(str)
16
+ puts "• (Row: #{self.row_num}) Invalid characters _#{invalid_chars}_ (removed): '#{str}' (converted to '#{result}')"
17
+ elsif blanks_x2
18
+ puts "• (Row: #{self.row_num}) Double blanks (removed): '#{str}' (converted to '#{result}')"
19
+ end
20
+ invalid_warned!
21
+ end
22
+ end
23
+
24
+ def invalid_warned?
25
+ @invalid_warned ||= false
26
+ end
27
+
28
+ def invalid_warned!
29
+ @invalid_warned = true
30
+ end
31
+
32
+ def has_double_blanks?(str)
33
+ return false if str.nil?
34
+ str.match(DOUBLE_BLANKS)
35
+ end
36
+
37
+ def remove_double_blanks(str)
38
+ return nil if str.nil?
39
+ str.gsub(DOUBLE_BLANKS, ' ').strip
40
+ end
41
+
42
+ def replace_not_allowed(str)
43
+ return nil if str.nil?
44
+ return str if str.match(VALID_TAG_REGEX)
45
+ str.gsub(INVALID_TAG_REGEX, ' ')
46
+ end
47
+
48
+ def identify_invalid_characters(str)
49
+ str.gsub(VALID_TAG_CHARS, '')
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,150 @@
1
+ module Eco::Data::Locations::NodeBase
2
+ # Generic treeifier
3
+ # @note expects nodes to have these properties:
4
+ # 1. `id`, `name` and `parentId`
5
+ # 2. `parent`
6
+ # 3. `tracked_level`
7
+ module Treeify
8
+ include Eco::Language::AuxiliarLogger
9
+
10
+ # @note if block is no given, it auto-detects the `serializer` **block**.
11
+ # @yield [NodeBase] for each included node
12
+ # @yieldreturn [Hash] custom hash model when treeifying (allows to set more keys/properties).
13
+ # @nodes [Array<NodeBase>] list of nodes
14
+ # @return [Array<Hash>] a hierarchical tree of nested Hashes via `nodes` key.
15
+ def treeify(nodes, &block)
16
+ return [] if nodes.empty?
17
+ block ||= nodes.first.class.serializer
18
+ get_children(nil, parents_hash(nodes), &block)
19
+ end
20
+
21
+ private
22
+
23
+ def parents_hash(nodes)
24
+ nodes.each_with_object({}) do |node, parents|
25
+ (parents[node.parentId] ||= []).push(node)
26
+ end
27
+ end
28
+
29
+ # @note
30
+ # 1. It tracks the `level` where nodes are discovered
31
+ # 2. If the node had already a tracked level, it warns and keeps the previous level
32
+ # 3. The above can translate into some
33
+ # @yield [node]
34
+ # @yieldreturn [Hash] custom hash model when treeifying
35
+ def get_children(node_id, parents, parent: nil, done_ids: {}, level: 0, &block)
36
+ level_ids = []
37
+ (parents[node_id] ||= []).each_with_object([]) do |child, results|
38
+ # Skipping done id. Add proper warnings...
39
+ # => rely on `done_ids` to identify if an `id` has already been done
40
+ next report_skipped_node(child, parent, done_ids, level, level_ids, parents) if done_ids[child.id]
41
+
42
+ # Fill in tracking data
43
+ child.parent = parent
44
+ child.tracked_level = level + 1
45
+ level_ids << child.id
46
+
47
+ node_hash = {
48
+ "id" => child.id,
49
+ "name" => child.name,
50
+ "parent_id" => node_id
51
+ }
52
+ node_hash.merge(yield(child)) if block_given?
53
+ # we must register the `id` before recursing down
54
+ done_ids[child.id] = child
55
+ results << node_hash.merge({
56
+ "nodes" => get_children(child.id, parents, parent: child, done_ids: done_ids, level: level + 1, &block).compact
57
+ })
58
+ end
59
+ end
60
+
61
+ def parent_msg(parent)
62
+ parent ? "child of '#{parent.id}'" : "top level"
63
+ end
64
+
65
+ def level_msg(level)
66
+ "at lev: #{level}"
67
+ end
68
+
69
+ def indent(level)
70
+ "#{" " * level}"
71
+ end
72
+
73
+ # Gives different warnings, depending the case
74
+ def report_skipped_node(node, parent, done_ids, level, level_ids, parents)
75
+ lev = level + 1
76
+ done_node = done_ids[node.id]
77
+ prev_parent = node.parent
78
+ prev_level = node.tracked_level
79
+ node_dup = done_node && (done_node != node)
80
+ lev_dup = level_ids.include?(node.id)
81
+ multi_parent = (!prev_parent == !!parent) || (prev_parent && (prev_parent.id != parent.id))
82
+
83
+ row_num = node.respond_to?(:row_num) ? node.row_num : nil
84
+ row_str = row_num ? "(Row: #{row_num}) " : ''
85
+ node_str = "#{row_str}Node '#{node.id}' #{level_msg(lev)} (#{parent_msg(parent)})"
86
+
87
+ # Implementation integrity guard
88
+ # => as we don't register in `done_ids` those that are skipped,
89
+ # when a `node` has already a tracked `parent` or `level`,
90
+ # it should not happen that the `node.id` retrieves a different node in `node_ids`.
91
+ if (prev_parent || prev_level) && node_dup # && !done_node
92
+ str = "Integrity issue in Treeify. "
93
+ str << "A Node with tracked level or parent should be present in done_ids, but it isn't."
94
+ str << "\n • #{node_str}."
95
+ raise str
96
+ end
97
+ # From here on, do NOT expect `node_dup` where `node` has tracked `parent` or `level`.
98
+
99
+ # Implementation integrity guard
100
+ # => as`level_ids` only relates to the current `parent`,
101
+ # and as `done_ids` don't get those skipped,
102
+ # when we get an ID double-up in `level_ids`,
103
+ # there must be a `done_node` AND
104
+ # `done_node` can only have `tracked_level` matching the current one
105
+ # Moreover, they should have exactly the same parentId.
106
+ if lev_dup && (multi_parent || !done_node || done_node.tracked_level != lev)
107
+ str = "Integrity issue in Treeify. "
108
+ str << "A Node with ID already in level_ids should have same tracked_level as current level."
109
+ str << "\n • #{node_str}."
110
+ raise str
111
+ end
112
+ # From here on, do NOT expect `lev_up` where there isn't `done_node` or it has different level or parent.
113
+
114
+ cyclic = multi_parent && done_node == node
115
+ double_up = node_dup || lev_dup
116
+
117
+ msg = []
118
+ msg << "#{indent(level)}WARNING: Skipping #{node_str}."
119
+
120
+ if cyclic
121
+ str = "#{indent(level)+1}Cyclic definition. By skipping the node, "
122
+ str << "it will remain as #{parent_msg(done_node.parent)} (#{level_msg(prev_level)})."
123
+ msg << str
124
+ end
125
+
126
+ if double_up
127
+ str = "#{indent(level)+1}The node ID has been tracked as #{level_msg(done_node.tracked_level)}, "
128
+ str << "as #{parent_msg(node_dup.parent)} "
129
+ str << "(same parent)." if lev_dup
130
+ str << "(different parent)." if multi_parent
131
+ msg << str
132
+ end
133
+
134
+ unless cyclic || double_up
135
+ str = "Integrity issue in Treeify. "
136
+ str = "Skipping is only applicable to double_ups or cyclic nodes."
137
+ str << "\n • #{node_str}."
138
+ raise str
139
+ end
140
+
141
+ if children = parents[node.id]
142
+ str = "#{indent(level)+1}Immediate children of skipped node (will probably be missing): "
143
+ str << children.map {|gc| "'#{gc.id}'"}.join(", ")
144
+ msg << str
145
+ end
146
+
147
+ log(:warn) { msg.join('\n') }
148
+ end
149
+ end
150
+ end
@@ -0,0 +1,48 @@
1
+ module Eco::Data::Locations
2
+ module NodeBase
3
+ require_relative 'node_base/tag_validations'
4
+ include Eco::Data::Locations::NodeBase::TagValidations
5
+
6
+ require_relative 'node_base/treeify'
7
+ require_relative 'node_base/parsing'
8
+ require_relative 'node_base/serial'
9
+ require_relative 'node_base/csv_convert'
10
+ require_relative 'node_base/builder'
11
+ extend Eco::Data::Locations::NodeBase::Builder
12
+
13
+ ALL_ATTRS = []
14
+
15
+ attr_accessor :tracked_level, :parent
16
+
17
+ def copy
18
+ self.class.new.set_attrs(**self.to_h)
19
+ end
20
+
21
+ def attr(sym)
22
+ self.send(sym.to_sym)
23
+ end
24
+
25
+ def set_attrs(**kargs)
26
+ kargs.each {|attr, value| set_attr(attr, value)}
27
+ self
28
+ end
29
+
30
+ def set_attr(attr, value)
31
+ self.send("#{attr}=", value)
32
+ end
33
+
34
+ def values_at(*attrs)
35
+ attrs.map {|a| attr(a)}
36
+ end
37
+
38
+ def to_h(*attrs)
39
+ attrs = self.class::ALL_ATTRS if attrs.empty?
40
+ attrs.zip(values_at(*attrs)).to_h
41
+ end
42
+
43
+ def slice(*attrs)
44
+ return {} if attrs.empty?
45
+ to_h(*attrs)
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,6 @@
1
+ class Eco::Data::Locations::NodeLevel
2
+ module Builder
3
+ include Eco::Data::Locations::NodeLevel::Parsing
4
+ include Eco::Data::Locations::NodeLevel::Serial
5
+ end
6
+ end
@@ -0,0 +1,74 @@
1
+ class Eco::Data::Locations::NodeLevel
2
+ module Cleaner
3
+ include Eco::Language::AuxiliarLogger
4
+ include Eco::Data::Locations::Convert
5
+
6
+ # Prevents repeated node ids/tags, decouples merged levels,
7
+ # covers gaps (jumping multiple levels)
8
+ # @note
9
+ # 1. It first discards node ids/tags that have been already pulled (discard repeated)
10
+ # 2. For non repeated, it identifies if there's a gap (jump of multiple levels)
11
+ # 3. It covers the gap if present by decoupling merged parent(s) from the same node (see node.decouple)
12
+ # 4. Then, it delegates the filling in of parents to `fill_in_parents` function.
13
+ # @return [Array<NodeLevel>] child to parent relationships solved and no double-ups.
14
+ def tidy_nodes(nodes, prev_level: 0, main: true)
15
+ reset_trackers! if main
16
+ nodes.each_with_object([]) do |node, out|
17
+ node_id = node.id
18
+ if done_ids.include?(node_id)
19
+ repeated_ids << "#{node_id} (level: #{node.level})"
20
+ else
21
+ level = node.actual_level
22
+ if level > prev_level + 1
23
+ gap = level - (prev_level + 1)
24
+ msg = "(Row: #{node.row_num}) ID/Tag '#{node_id}' (lev #{level}) jumps #{gap} level(s) (expected #{prev_level + 1})."
25
+ #puts " " + node.tags_array.pretty_inspect
26
+ missing_nodes = node.decouple(gap)
27
+
28
+ msg << "\n Adding missing upper level(s): " + missing_nodes.map(&:raw_tag).pretty_inspect
29
+ log(:info) { msg }
30
+
31
+ out.push(*tidy_nodes(missing_nodes, prev_level: prev_level, main: false))
32
+ # puts node.actual_level
33
+ # pp node.tags_array
34
+ level = prev_level + 1
35
+ end
36
+ out << node
37
+ done_ids << node_id
38
+ prev_level = level
39
+ end
40
+ end.yield_self do |out|
41
+ report_repeated_node_ids(repeated_ids) if main
42
+ fill_in_parents(out)
43
+ end
44
+ end
45
+
46
+ # Sets the `parentId` property.
47
+ def fill_in_parents(nodes)
48
+ nodes.tap do |nodes|
49
+ prev_nodes = empty_level_tracker_hash(11)
50
+ nodes.each do |node|
51
+ if parent_node = prev_nodes[node.actual_level - 1]
52
+ node.parentId = parent_node.id
53
+ end
54
+ prev_nodes[node.raw_level] = node
55
+ end
56
+ end
57
+ end
58
+
59
+ # Tracker helper (those repeated)
60
+ def repeated_ids
61
+ @repeated_ids ||= []
62
+ end
63
+
64
+ # Tracker helper (those done)
65
+ def done_ids
66
+ @done_ids ||= []
67
+ end
68
+
69
+ def reset_trackers!
70
+ @done_ids = []
71
+ @repeated_ids = []
72
+ end
73
+ end
74
+ end