eco-helpers 2.5.8 → 2.5.10

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 28c284a7c27d048f79b4f3b08b7c5fabca039fe1c3840809c7b608b024ba666b
4
- data.tar.gz: 42218a8878c3b3ffb937c4bc66d0b8227ba8b44771e7047a4bb6bf2fdbcf34e7
3
+ metadata.gz: 8e5eed9a7b386a3461b4fad2d842d183ccec984eae4149104a2943bda6b84885
4
+ data.tar.gz: c19b0aa7ba31b41069f4afba73edcd1354e80ecc60aa668aba124ea4960eca1c
5
5
  SHA512:
6
- metadata.gz: 0a7f9160a34384831722dc1552d50dd70fb039df2110f675c9e9083143e59f33263669b532c3f000d3a12d09568f95b9c7933a5e89175b6870595cb6f55c98bb
7
- data.tar.gz: 5ba00795fc3cded86e5fe820385355153dfa51ab4ddac0e84a8a00cdd01d1db66ddf238c0ad0debae0ae107f584324f5ca941453ff79b9d1e8f7d52009046b83
6
+ metadata.gz: c83534044499d6a1561cbf5f21f038ec2f03e3a458178555c3b14c87e50e6c9f6d938f960586bae7f04e1ee152aab7c83b34f8f2752eca8d8ca94b9e97faf46f
7
+ data.tar.gz: 401d2226b3f02ab5b5d67ebddf75498b918bec22ea85bc405c48aee4ea50c32dad56abb230a91a88eabc0a7fa6d448683ef09a3dedd788e8ec61a11d36a258c3
data/.gitignore CHANGED
@@ -18,3 +18,4 @@ Gemfile.lock
18
18
  # rspec failure tracking
19
19
  .rspec_status
20
20
  scratch.rb
21
+ .byebug_history
data/CHANGELOG.md CHANGED
@@ -1,11 +1,51 @@
1
1
  # Change Log
2
2
  All notable changes to this project will be documented in this file.
3
3
 
4
- ## [2.5.9] - 2023-08-xx
4
+ ## [2.5.10] - 2023-09-xx
5
5
 
6
6
  ### Added
7
7
  ### Changed
8
8
  ### Fixed
9
+ - `Eco::API::UseCases::DefaultCases::ReinviteSyncCase` remove unnecessary additional parameters on call.
10
+
11
+ ## [2.5.9] - 2023-09-05
12
+
13
+ The input **csv** of trees could come very compacted, which had unintended consequences.
14
+ - This is a fix to this scenario.
15
+
16
+ ### Added
17
+ - `Eco::Data::Locations::NodeLevel#raw_latest_consecutive_top_empty_level`
18
+ - To scope the missing ancestors (up to what level)
19
+ - `Eco::API::UseCases::DefaultCases::Samples::Sftp`
20
+ - Options for `remote_subfolder`, `remote_target_folder` and `remote_folder`
21
+ - `Eco::API::Organization::People#updated_or_created` **method**
22
+ - Scoping all people that have been updated as part of the current session.
23
+ - Note that it includes also those that are hris excluded.
24
+ - **Improvement** on `Eco::API::Common::Loaders::Base`
25
+ - Should be able to call `log(:level) { "Some message" }`
26
+
27
+ ### Changed
28
+ - `Eco::Data::Locations::NodeLevel`
29
+ - **removed** `#merge!` and `#override_upper_levels`
30
+ - `#override_lower_levels` **renamed** to `#update_lower_levels`
31
+ - `Eco::API::UseCases::DefaultCases::CsvToTree` use case
32
+ - Moved to work with `Eco::Data::Locations::DSL`
33
+ - **Removed** helpers double-ups.
34
+ - Default workflow `on(:report)`: implementation of `-processed-people-to-csv`
35
+ - It now only includes people created or updated as part of the current session.
36
+ - **Improvement**: `Eco::API::Common::Loaders::Parser` made `serializer` method **not required**
37
+ - This aims to use the default serializer definition
38
+ - Made **inheritable** `parsing_phase` and `serializing_phase`
39
+
40
+ ### Fixed
41
+ - `Eco::Data::Locations::NodeLevel#update_lower_levels`
42
+ - To use `#raw_latest_consecutive_top_empty_level`
43
+ - Compact from first filled in `tags_array` onwards (preserve empty tags at the beginning for clean validation)
44
+ - `Eco::Data::Locations::NodeLevel::Cleaner`
45
+ - `#tidy_nodes` **gap** calculation is scoped against previous node (common ancestors)
46
+ - `#fill_in_parents` to give feedback on unexpected parental relationship
47
+ - `Eco::API::UseCases::DefaultCases::Samples::Sftp`
48
+ - Prevent double slashed paths
9
49
 
10
50
  ## [2.5.8] - 2023-08-28
11
51
 
data/eco-helpers.gemspec CHANGED
@@ -30,9 +30,9 @@ Gem::Specification.new do |spec|
30
30
  spec.add_development_dependency "yard", ">= 0.9.26", "< 1"
31
31
  spec.add_development_dependency "redcarpet", ">= 3.5.1", "< 4"
32
32
 
33
- spec.add_dependency 'ecoportal-api', '>= 0.9.4', '< 0.10'
33
+ spec.add_dependency 'ecoportal-api', '>= 0.9.5', '< 0.10'
34
34
  spec.add_dependency 'ecoportal-api-v2', '>= 1.1.3', '< 1.2'
35
- spec.add_dependency 'ecoportal-api-graphql', '>= 0.3.10', '< 0.4'
35
+ spec.add_dependency 'ecoportal-api-graphql', '>= 0.3.11', '< 0.4'
36
36
  spec.add_dependency 'aws-sdk-s3', '>= 1.83.0', '< 2'
37
37
  spec.add_dependency 'aws-sdk-ses', '>= 1.36.0', '< 2'
38
38
  spec.add_dependency 'dotenv', '>= 2.7.6', '< 3'
@@ -4,6 +4,7 @@ module Eco
4
4
  module Loaders
5
5
  class Base
6
6
  extend Eco::API::Common::ClassHelpers
7
+ include Eco::Language::AuxiliarLogger
7
8
 
8
9
  class << self
9
10
  # Sort order
@@ -47,10 +48,6 @@ module Eco
47
48
  session.config
48
49
  end
49
50
 
50
- def logger
51
- session.logger
52
- end
53
-
54
51
  def micro
55
52
  session.micro
56
53
  end
@@ -101,10 +101,9 @@ module Eco
101
101
  keys = []
102
102
  end
103
103
  end
104
-
105
104
  end
106
105
 
107
- inheritable_class_vars :attribute
106
+ inheritable_class_vars :attribute, :parsing_phase, :serializing_phase
108
107
 
109
108
  def initialize(person_parser)
110
109
  raise "Expected Eco::API::Common::People::PersonParser. Given #{policies.class}" unless person_parser.is_a?(Eco::API::Common::People::PersonParser)
@@ -127,9 +126,9 @@ module Eco
127
126
  # - when `:final`: it will receive a `Hash` with the **internal values** and **types**.
128
127
  # - when `:person`: it will receive the `person` object.
129
128
  # @param deps [Hash] the merged dependencies (default to the class object and when calling the parser).
130
- def seralizer(data, deps)
131
- raise "You should implement this method"
132
- end
129
+ # def serializer(data, deps)
130
+ # raise "You should implement this method"
131
+ # end
133
132
 
134
133
  # @return [String, Symbol] the field/attribute or type this parser is linked to.
135
134
  def attribute
@@ -147,6 +146,7 @@ module Eco
147
146
  end
148
147
 
149
148
  def _define_serializer(attr_parser)
149
+ return unless respond_to?(:serializer, true)
150
150
  attr_parser.def_serializer(self.class.serializing_phase, &self.method(:serializer))
151
151
  end
152
152
  end
@@ -80,6 +80,15 @@ module Eco
80
80
  account_present(false)
81
81
  end
82
82
 
83
+ # Returns the people that are being or have been updated and/or created.
84
+ def updated_or_created
85
+ select do |person|
86
+ !person.as_update(:total).empty?
87
+ end.yield_self do |persons|
88
+ newFrom persons
89
+ end
90
+ end
91
+
83
92
  def supervisors
84
93
  sup_ids = self.ids & self.supervisor_ids
85
94
  sup_ids.map do |id|
@@ -74,7 +74,7 @@ module Eco
74
74
  session: self.session,
75
75
  options: self.options,
76
76
  validate: true
77
- }.merge(kargs)
77
+ }
78
78
  self.class.new(**default.merge(kargs))
79
79
  end
80
80
 
@@ -14,7 +14,7 @@ class Eco::API::UseCases::DefaultCases::TagPaths < Eco::API::Common::Loaders::Us
14
14
  private
15
15
 
16
16
  def tag_paths
17
- @tag_paths ||= tagtree.tags.map do |tag|
17
+ @tag_paths ||= tagtree.tags.reverse.map do |tag|
18
18
  [tag, tagtree.path(tag).join("|")]
19
19
  end
20
20
  end
@@ -2,27 +2,36 @@ class Eco::API::UseCases::DefaultCases::CsvToTree < Eco::API::Common::Loaders::U
2
2
  name "csv-to-tree"
3
3
  type :other
4
4
 
5
+ include Eco::Data::Locations::DSL
6
+
5
7
  TIME_FORMAT = '%Y%m%dT%H%M%S'
6
8
 
7
9
  def main(session, options, usecase)
8
10
  options[:end_get] = false
9
- tree_struct = Helper.treeify(Helper.csv_nodes(input_file))
11
+ tree_struct = org_tree(input_csv)
10
12
 
11
13
  File.open(output_file, "w") do |fd|
12
- json = tree_struct.to_json
13
- fd << json
14
+ fd << tree_struct.as_json.to_json
14
15
  end
15
16
  logger.info("Saved structure in '#{output_file}'")
16
17
  end
17
18
 
18
19
  private
19
20
 
21
+ def output_file
22
+ @output_file ||= "#{active_enviro}_tree_#{timestamp}.json"
23
+ end
24
+
25
+ def input_csv
26
+ @input_csv ||= Eco::CSV.read(input_file, encoding: input_encoding)
27
+ end
28
+
20
29
  def input_file
21
30
  @input_file ||= options.dig(:source, :file)
22
31
  end
23
32
 
24
- def output_file
25
- @output_file ||= "#{active_enviro}_tree_#{timestamp}.json"
33
+ def input_encoding
34
+ options.dig(:input, :file, :encoding) || 'utf-8'
26
35
  end
27
36
 
28
37
  def timestamp(date = Time.now)
@@ -33,8 +42,3 @@ class Eco::API::UseCases::DefaultCases::CsvToTree < Eco::API::Common::Loaders::U
33
42
  config.active_enviro
34
43
  end
35
44
  end
36
-
37
- require_relative 'csv_to_tree_case/node'
38
- require_relative 'csv_to_tree_case/nodes_cleaner'
39
- require_relative 'csv_to_tree_case/treeify'
40
- require_relative 'csv_to_tree_case/helper'
@@ -4,6 +4,6 @@ class Eco::API::UseCases::DefaultCases::ReinviteSyncCase < Eco::API::UseCases::D
4
4
 
5
5
  def main(entries, people, session, options, usecase)
6
6
  found = micro.with_each_present(entries, people, options, log_starter: true)
7
- reinvite(found.users, session, options, usecase)
7
+ reinvite(found.users)
8
8
  end
9
9
  end
@@ -2,10 +2,9 @@ class Eco::API::UseCases::DefaultCases::Samples::Sftp < Eco::API::Common::Loader
2
2
  name "sftp-sample"
3
3
  type :other
4
4
 
5
- attr_reader :session, :options
5
+ CONST_REFERRAL = /^(?:::)?(?:[A-Z][a-zA-Z0-9_]*(?:::[A-Z][a-zA-Z0-9_]*)*)$/
6
6
 
7
7
  def main(session, options, usecase)
8
- @session = session; @options = options
9
8
  options[:end_get] = false
10
9
  raise "The SFTP is not configured" unless session.sftp?
11
10
  case options.dig(:sftp, :command)
@@ -22,17 +21,41 @@ class Eco::API::UseCases::DefaultCases::Samples::Sftp < Eco::API::Common::Loader
22
21
 
23
22
  private
24
23
 
24
+ # Can't pass this via CLI option, as it breaks the regular expression
25
25
  def file_pattern
26
+ fpc = file_pattern_const
27
+ return fpc if fpc
26
28
  raise "You should redefine the file_pattern function as a RegEx expression that matches the target remote file"
27
29
  end
28
30
 
31
+ def file_pattern_const
32
+ if fpc = options.dig(:sftp, :file_pattern_const)
33
+ raise "Invalid file pattern const referral: #{fpc}" unless fpc.match(CONST_REFERRAL)
34
+ self.eval(fpc)
35
+ end
36
+ end
37
+
29
38
  # Ex: "/IN/Personnel"
30
39
  def remote_subfolder
40
+ rm_sf = options.dig(:sftp, :remote_subfolder)
41
+ return rm_sf if rm_sf
31
42
  raise "You should redefine remote_subfolder as the folder where the target file sits. Ex: /IN/Personnel"
32
43
  end
33
44
 
34
- def archive_subfolder
35
- "Archive"
45
+ # `remote_target_folder` overrides `sftp_config.remote_folder` as well as `remote_subfolder`
46
+ # `remote_folder` overrides `sftp_config.remote_folder` but NOT `remote_subfolder`
47
+ def remote_folder
48
+ rm_tf = options.dig(:sftp, :remote_target_folder)
49
+ rm_fd = options.dig(:sftp, :remote_folder) || sftp_config.remote_folder
50
+ rm_tf || File.join(rm_fd, remote_subfolder)
51
+ end
52
+
53
+ def to_remote_path(file)
54
+ File.join(remote_folder, file)
55
+ end
56
+
57
+ def local_folder
58
+ options.dig(:sftp, :local_folder) || "."
36
59
  end
37
60
 
38
61
  def with_remote_files
@@ -92,16 +115,8 @@ class Eco::API::UseCases::DefaultCases::Samples::Sftp < Eco::API::Common::Loader
92
115
  end
93
116
  end
94
117
 
95
- def to_remote_path(file)
96
- remote_folder + "/" + file
97
- end
98
-
99
- def local_folder
100
- options.dig(:sftp, :local_folder) || "."
101
- end
102
-
103
- def remote_folder
104
- @remote_folder ||= sftp_config.remote_folder + remote_subfolder
118
+ def archive_subfolder
119
+ "Archive"
105
120
  end
106
121
 
107
122
  def sftp_config
@@ -153,14 +153,16 @@ ASSETS.cli.config do |config|
153
153
  end
154
154
 
155
155
  wf.on(:report) do |wf_report, io|
156
- if file = io.options.dig(:report, :people, :csv)
157
- io.options.deep_merge!(export: {
158
- options: {internal_names: true, nice_header: true, split_schemas: true},
159
- file: {name: file, format: :csv}
160
- })
161
- io = io.session.process_case("to-csv", io: io, type: :export)
156
+ io.tap do |_io|
157
+ if file = io.options.dig(:report, :people, :csv)
158
+ io.options.deep_merge!(export: {
159
+ options: {internal_names: true, nice_header: true, split_schemas: true},
160
+ file: {name: file, format: :csv}
161
+ })
162
+ aux_io = io.new(people: io.people.updated_or_created)
163
+ io.session.process_case("to-csv", io: aux_io, type: :export)
164
+ end
162
165
  end
163
- io
164
166
  end
165
167
 
166
168
  wf.on(:end) do |wf_end, io|
@@ -6,10 +6,11 @@ module Eco::Data::Locations::NodeBase
6
6
  VALID_TAG_CHARS = /[#{ALLOWED_CHARACTERS}]+/
7
7
  DOUBLE_BLANKS = /\s\s+/
8
8
 
9
- def clean_id(str)
9
+ def clean_id(str, notify: true)
10
10
  blanks_x2 = has_double_blanks?(str)
11
11
  partial = replace_not_allowed(str)
12
12
  remove_double_blanks(partial).tap do |result|
13
+ next unless notify
13
14
  next if invalid_warned?
14
15
  if partial != str
15
16
  invalid_chars = identify_invalid_characters(str)
@@ -22,6 +22,10 @@ module Eco::Data::Locations
22
22
  self.send(sym.to_sym)
23
23
  end
24
24
 
25
+ def attr?(sym)
26
+ !attr(sym).to_s.strip.empty?
27
+ end
28
+
25
29
  def set_attrs(**kargs)
26
30
  kargs.each {|attr, value| set_attr(attr, value)}
27
31
  self
@@ -11,36 +11,41 @@ class Eco::Data::Locations::NodeLevel
11
11
  # 3. It covers the gap if present by decoupling merged parent(s) from the same node (see node.decouple)
12
12
  # 4. Then, it delegates the filling in of parents to `fill_in_parents` function.
13
13
  # @return [Array<NodeLevel>] child to parent relationships solved and no double-ups.
14
- def tidy_nodes(nodes, prev_level: 0, main: true)
14
+ def tidy_nodes(nodes, prev_node: nil, main: true)
15
15
  reset_trackers! if main
16
+
17
+ prev_level = prev_node&.actual_level || 0
18
+
16
19
  nodes.each_with_object([]) do |node, out|
17
- node_id = node.id
18
- if done_ids.include?(node_id)
19
- repeated_ids << "#{node_id} (level: #{node.level})"
20
+ if done_ids.include?(node.id)
21
+ row_str = node.row_num ? " - (row: #{node.row_num})" : ''
22
+ repeated_ids << "#{node.id} (level: #{node.level})#{row_str}"
20
23
  else
21
24
  level = node.actual_level
22
- if level > prev_level + 1
23
- gap = level - (prev_level + 1)
24
- msg = "(Row: #{node.row_num}) ID/Tag '#{node_id}' (lev #{level}) jumps #{gap} level(s) (expected #{prev_level + 1})."
25
+ common_level = node.common_level_with(prev_node)
26
+ common_level ||= prev_level
27
+ gap = level - (common_level + 1)
28
+
29
+ unless gap < 1
30
+ msg = "(Row: #{node.row_num}) ID/Tag '#{node.id}' (lev #{level}) jumps #{gap} level(s) (expected #{prev_level + 1})."
25
31
  #puts " " + node.tags_array.pretty_inspect
26
32
  missing_nodes = node.decouple(gap)
27
-
28
- msg << "\n Adding missing upper level(s): " + missing_nodes.map(&:raw_tag).pretty_inspect
29
- log(:info) { msg }
30
-
33
+ msg << "\n Adding missing upper node(s): " + missing_nodes.map(&:raw_tag).pretty_inspect
34
+ log(:debug) { msg }
31
35
  # The very top missing node (first in list) should be checked against prev_level
32
36
  # alongside any descendants in missing_nodes (when gap 2+)
33
- tidied_nodes = tidy_nodes(missing_nodes, prev_level: prev_level, main: false)
37
+ tidied_nodes = tidy_nodes(missing_nodes, prev_node: prev_node, main: false)
34
38
  out.push(*tidied_nodes)
35
- #level = prev_level + 1 # <= we are actually on level and filled in the gaps
36
39
  end
37
40
  out << node
38
- done_ids << node_id
39
- prev_level = level
41
+ done_ids << node.id
42
+ prev_node = node
43
+ end
44
+ end.tap do |out|
45
+ if main
46
+ report_repeated_node_ids(repeated_ids)
47
+ fill_in_parents(out)
40
48
  end
41
- end.yield_self do |out|
42
- report_repeated_node_ids(repeated_ids) if main
43
- fill_in_parents(out)
44
49
  end
45
50
  end
46
51
 
@@ -51,10 +56,21 @@ class Eco::Data::Locations::NodeLevel
51
56
  nodes.tap do |nodes|
52
57
  prev_nodes = empty_level_tracker_hash(11)
53
58
  nodes.each do |node|
59
+ expected_parent_id = node.clean_parent_id&.upcase
60
+ msg = "Expecting node '#{node.id}' to have parent: '#{expected_parent_id}'\n"
54
61
  if parent_node = prev_nodes[node.actual_level - 1]
55
62
  node.parentId = parent_node.id
63
+ log(:warn) {
64
+ msg + " • We got '#{parent_node.id}' instead"
65
+ } unless expected_parent_id == node.parentId
66
+ elsif node.actual_level == 1
67
+ # expected to not have parent
68
+ else
69
+ log(:warn) {
70
+ msg + "but we did not get parent."
71
+ }
56
72
  end
57
- prev_nodes[node.raw_level] = node
73
+ prev_nodes[node.actual_level] = node
58
74
  end
59
75
  end
60
76
  end
@@ -30,17 +30,11 @@ class Eco::Data::Locations::NodeLevel
30
30
 
31
31
  prev_level = nil
32
32
  prev_node = nil
33
- prev_nodes = empty_level_tracker_hash(11)
34
- prev_node_get = proc do |raw_level|
35
- prev = nil
36
- (1..raw_level).to_a.reverse.each do |lev|
37
- prev ||= prev_nodes[lev]
38
- end
39
- prev
40
- end
33
+
41
34
  # Convert to Eco::CSV::Table for a fresh start
42
35
  csv = Eco::CSV.parse(csv.to_csv).nil_blank_cells.add_index_column(:row_num)
43
36
 
37
+ first = true
44
38
  nodes = csv.each_with_object([]) do |row, out|
45
39
  row_num, *values = row.fields
46
40
  node = node_class.new(row_num, *values)
@@ -53,16 +47,15 @@ class Eco::Data::Locations::NodeLevel
53
47
  # which allows to node#actual_level to work
54
48
  node.set_high_levels(prev_node)
55
49
  else
56
- if parent_node = prev_node_get[node.raw_level - 1]
57
- node.set_high_levels(parent_node)
58
- elsif node.raw_level == 1
50
+ if node.raw_level == 1
59
51
  # It is expected not to have parent (as it's top level tag)
52
+ elsif prev_node
53
+ node.set_high_levels(prev_node)
60
54
  else
61
55
  raise "Node '#{node.raw_tag}' (#{node.row_num} row) doesn't have parent"
62
56
  end
63
57
  end
64
58
  out << node
65
- prev_nodes[node.raw_level] = node
66
59
  prev_node = node
67
60
  end
68
61
  tidy_nodes(nodes)
@@ -54,6 +54,42 @@ module Eco::Data::Locations
54
54
  nil
55
55
  end
56
56
 
57
+ def raw_prev_empty_level?
58
+ lev = raw_prev_empty_level
59
+ lev && lev > 0
60
+ end
61
+
62
+ def raw_latest_consecutive_top_empty_level
63
+ tags_array[0..raw_level-1].each_with_index do |value, idx|
64
+ return idx if value
65
+ end
66
+ nil
67
+ end
68
+
69
+ # Requires that all upper levels (lower positions) are filled-in
70
+ def common_level_with(other)
71
+ return nil unless other
72
+ otags_array = other.tags_array.compact
73
+ stags_array = tags_array.compact
74
+ raise "Missing lower levels for #{other.id}: #{other.tags_array.pretty_inspect}" unless other.highest_levels_set?
75
+ raise "Missing lower levels for #{self.id}: #{tags_array.pretty_inspect}" unless highest_levels_set?
76
+ otags_array.zip(stags_array).each_with_index do |(otag, stag), idx|
77
+ next if otag&.upcase&.strip == stag&.upcase&.strip
78
+ return nil if idx == 0
79
+ return idx # previous idx, which means prev_idx + 1 (so idx itself)
80
+ end
81
+ actual_level
82
+ end
83
+
84
+ # Second last id in tags_array
85
+ def raw_parent_id
86
+ tags_array.compact[-2]
87
+ end
88
+
89
+ def clean_parent_id
90
+ clean_tags_array.compact[-2]
91
+ end
92
+
57
93
  def tag_idx
58
94
  tags_array.index(raw_tag)
59
95
  end
@@ -68,33 +104,50 @@ module Eco::Data::Locations
68
104
  tary.index(nil) || tary.length + 1
69
105
  end
70
106
 
107
+ def copy
108
+ super.tap do |dup|
109
+ dup.highest_levels_set!
110
+ end
111
+ end
112
+
71
113
  # We got a missing level that is compacted in one row
72
114
  # Here we get the missing intermediate levels
73
115
  # This is done from upper to lower level to ensure processing order
74
116
  # It skips last one, as that is this object already
117
+ # @note for each one in the gap, creates a copy and clears deepest levels thereof
75
118
  def decouple(num = 1)
76
119
  with_info = filled_idxs
77
120
  # must be the last among filled_idxs, so let's use it to verify
78
121
  unless with_info.last == tag_idx
122
+ # This can only happen when there are repeated nodes
79
123
  raise "Review this (row #{row_num}; '#{raw_tag}'): tag_idx is #{tag_idx}, while last filled idx is #{with_info.last}"
80
124
  end
81
- len = with_info.length
125
+ len = with_info.length
82
126
  target_idxs = with_info[len-(num+1)..-2]
83
127
  target_idxs.map do |idx|
84
- self.copy.tap do |dup|
128
+ copy.tap do |dup|
85
129
  dup.clear_level(idx_to_level(idx + 1))
86
130
  end
87
131
  end
88
132
  end
89
133
 
90
- def merge!(node)
91
- override_upper_levels(node.tags_array)
134
+ def highest_levels_set?
135
+ return true if raw_level == 1
136
+ return true unless raw_prev_empty_level?
137
+ !!@highest_levels_set
92
138
  end
93
139
 
94
- def set_high_levels(node)
95
- override_lower_levels(node.tags_array)
140
+ def highest_levels_set!
141
+ @highest_levels_set = true
96
142
  end
97
143
 
144
+ # Sets ancestors
145
+ def set_high_levels(node, override: false, compact: true)
146
+ update_lower_levels(node.tags_array, override: override)
147
+ self
148
+ end
149
+
150
+ # Clears the deepest levels, from level `i` onwards
98
151
  def clear_level(i)
99
152
  case i
100
153
  when Enumerable
@@ -113,23 +166,17 @@ module Eco::Data::Locations
113
166
  true
114
167
  end
115
168
 
116
- # Cleanses deepest tags
117
- def override_upper_levels(src_tags_array, from_level: self.raw_level + 1)
118
- target_lev = Array(from_level..tag_attrs_count)
119
- target_tags = src_tags_array[level_to_idx(from_level)..level_to_idx(tag_attrs_count)]
120
- target_lev.zip(target_tags).each do |(n, tag)|
121
- set_attr("l#{n}", tag)
122
- end
123
- self
124
- end
125
-
126
169
  # Ensures parent is among the upper level tags
127
- def override_lower_levels(src_tags_array, to_level: self.raw_prev_empty_level)
170
+ # It actually ensures all ancestors are there
171
+ # @param override [Boolean] `false` will only override upmost top consecutive empty levels.
172
+ def update_lower_levels(src_tags_array, to_level: self.raw_latest_consecutive_top_empty_level, override: false)
173
+ highest_levels_set!
128
174
  return self unless to_level
129
175
  target_lev = Array(1..to_level)
130
176
  target_tags = src_tags_array[level_to_idx(1)..level_to_idx(to_level)]
131
177
  target_lev.zip(target_tags).each do |(n, tag)|
132
- set_attr("l#{n}", tag)
178
+ attr_lev = "l#{n}"
179
+ set_attr(attr_lev, tag) # unless attr?(attr_lev) && !override
133
180
  end
134
181
  self
135
182
  end
@@ -152,6 +199,12 @@ module Eco::Data::Locations
152
199
  actual_level > empty_idx
153
200
  end
154
201
 
202
+ def clean_tags_array
203
+ tags_array.map do |tg|
204
+ clean_id(tg, notify: false)
205
+ end
206
+ end
207
+
155
208
  def tags_array
156
209
  values_at(*TAGS_ATTRS)
157
210
  end
@@ -6,7 +6,7 @@ module Eco
6
6
  class ParserSerializer
7
7
  attr_reader :attr
8
8
 
9
- # Parser/seralizer.
9
+ # Parser/serializer.
10
10
  # @param attr [String, Symbol] name of the parsed/serialized.
11
11
  # @param dependencies [Hash] provisioning of _**default dependencies**_ that will be required when calling back to the
12
12
  # parsing or serializing functions.
data/lib/eco/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Eco
2
- VERSION = "2.5.8"
2
+ VERSION = "2.5.10"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: eco-helpers
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.5.8
4
+ version: 2.5.10
5
5
  platform: ruby
6
6
  authors:
7
7
  - Oscar Segura
@@ -116,7 +116,7 @@ dependencies:
116
116
  requirements:
117
117
  - - ">="
118
118
  - !ruby/object:Gem::Version
119
- version: 0.9.4
119
+ version: 0.9.5
120
120
  - - "<"
121
121
  - !ruby/object:Gem::Version
122
122
  version: '0.10'
@@ -126,7 +126,7 @@ dependencies:
126
126
  requirements:
127
127
  - - ">="
128
128
  - !ruby/object:Gem::Version
129
- version: 0.9.4
129
+ version: 0.9.5
130
130
  - - "<"
131
131
  - !ruby/object:Gem::Version
132
132
  version: '0.10'
@@ -156,7 +156,7 @@ dependencies:
156
156
  requirements:
157
157
  - - ">="
158
158
  - !ruby/object:Gem::Version
159
- version: 0.3.10
159
+ version: 0.3.11
160
160
  - - "<"
161
161
  - !ruby/object:Gem::Version
162
162
  version: '0.4'
@@ -166,7 +166,7 @@ dependencies:
166
166
  requirements:
167
167
  - - ">="
168
168
  - !ruby/object:Gem::Version
169
- version: 0.3.10
169
+ version: 0.3.11
170
170
  - - "<"
171
171
  - !ruby/object:Gem::Version
172
172
  version: '0.4'
@@ -555,10 +555,6 @@ files:
555
555
  - lib/eco/api/usecases/default_cases/create_details_with_supervisor_case.rb
556
556
  - lib/eco/api/usecases/default_cases/create_tag_paths_case.rb
557
557
  - lib/eco/api/usecases/default_cases/csv_to_tree_case.rb
558
- - lib/eco/api/usecases/default_cases/csv_to_tree_case/helper.rb
559
- - lib/eco/api/usecases/default_cases/csv_to_tree_case/node.rb
560
- - lib/eco/api/usecases/default_cases/csv_to_tree_case/nodes_cleaner.rb
561
- - lib/eco/api/usecases/default_cases/csv_to_tree_case/treeify.rb
562
558
  - lib/eco/api/usecases/default_cases/delete_sync_case.rb
563
559
  - lib/eco/api/usecases/default_cases/delete_trans_case.rb
564
560
  - lib/eco/api/usecases/default_cases/email_as_id_case.rb
@@ -1,99 +0,0 @@
1
- class Eco::API::UseCases::DefaultCases::CsvToTree
2
- module Helper
3
- extend NodesCleaner
4
- extend Treeify
5
-
6
- class << self
7
- def csv_from(filename)
8
- raise "Missing #{filename}" unless File.exists?(filename)
9
- result = csv_from_file(filename)
10
- if result.is_a?(Integer)
11
- puts "An encoding problem was found on line #{result}"
12
- result = csv_from_content(filename)
13
- end
14
- result
15
- end
16
-
17
- def nodes_from_csv(csv)
18
- i = 1; prev_level = nil; prev_node = nil; prev_nodes = Array(1..11).zip(Array.new(11, nil)).to_h
19
- nodes = csv.each_with_object([]) do |row, out|
20
- values = row.fields.map do |value|
21
- value = value.to_s.strip
22
- value.empty?? nil : value
23
- end
24
- i += 1
25
- node = Node.new(i, *values)
26
- prev_node ||= node
27
-
28
- # If node is nested in prev_node or is a sibling thereof
29
- if prev_node.raw_level <= node.raw_level
30
- # Make sure parent is among upper level tags
31
- node.set_high_levels(prev_node)
32
- else
33
- if parent_node = prev_nodes[node.raw_level - 1]
34
- node.set_high_levels(parent_node)
35
- elsif node.raw_level == 1
36
- # It is expected not to have parent
37
- #puts "Node '#{node.raw_tag}' doesn't have parent, but it's top level tag"
38
- else
39
- raise "Node '#{node.raw_tag}' (#{node.row_num} row) doesn't have parent"
40
- end
41
- end
42
- out << node
43
- prev_nodes[node.raw_level] = node
44
- prev_node = node
45
- end
46
- tidy_nodes(nodes)
47
- end
48
-
49
- def csv_nodes(filename)
50
- nodes_from_csv(csv_from(filename))
51
- end
52
-
53
- private
54
-
55
- def csv_from_content(filename)
56
- CSV.parse(file_content(filename), headers: true)
57
- end
58
-
59
- def file_content(filename)
60
- coding = encoding(filename)
61
- coding = (coding != "utf-8")? "#{coding}|utf-8": coding
62
- if content = File.read(filename, encoding: coding)
63
- content.scrub do |bytes|
64
- '<' + bytes.unpack('H*')[0] + '>'
65
- end
66
- end
67
- end
68
-
69
- def csv_from_file(filename)
70
- coding = encoding(filename)
71
- coding = (coding != "utf-8")? "#{coding}|utf-8": coding
72
- CSV.read(filename, headers: true, encoding: coding)
73
- rescue CSV::MalformedCSVError => e
74
- if line = e.message.match(/line (?<line>\d+)/i)[:line]
75
- return line.to_i
76
- else
77
- raise
78
- end
79
- end
80
-
81
- def has_bom?(path)
82
- return false if !path || file_empty?(path)
83
- File.open(path, "rb") do |f|
84
- bytes = f.read(3)
85
- return bytes.unpack("C*") == [239, 187, 191]
86
- end
87
- end
88
-
89
- def encoding(path)
90
- has_bom?(path) ? "bom" : "utf-8"
91
- end
92
-
93
- def file_empty?(path)
94
- return true if !File.file?(path)
95
- File.zero?(path)
96
- end
97
- end
98
- end
99
- end
@@ -1,221 +0,0 @@
1
- class Eco::API::UseCases::DefaultCases::CsvToTree
2
- class Node < Struct.new(:row_num, :l1, :l2, :l3, :l4, :l5, :l6, :l7, :l8, :l9, :l10, :l11)
3
- TAGS_ATTRS = [:l1, :l2, :l3, :l4, :l5, :l6, :l7, :l8, :l9, :l10, :l11]
4
- ADDITIONAL_ATTRS = [:row_num]
5
- ALL_ATTRS = ADDITIONAL_ATTRS + TAGS_ATTRS
6
- ALLOWED_CHARACTERS = "A-Za-z0-9 &_'\/.-"
7
- VALID_TAG_REGEX = /^[#{ALLOWED_CHARACTERS}]+$/
8
- INVALID_TAG_REGEX = /[^#{ALLOWED_CHARACTERS}]+/
9
- VALID_TAG_CHARS = /[#{ALLOWED_CHARACTERS}]+/
10
- DOUBLE_BLANKS = /\s\s+/
11
-
12
- attr_accessor :parentId
13
-
14
- def nodeId
15
- id
16
- end
17
-
18
- def id
19
- tag.upcase
20
- end
21
-
22
- def name
23
- tag
24
- end
25
-
26
- def tag
27
- raw_tag.yield_self do |str|
28
- blanks_x2 = has_double_blanks?(str)
29
- partial = replace_not_allowed(str)
30
- remove_double_blanks(partial).tap do |result|
31
- next if invalid_warned?
32
- if partial != str
33
- invalid_chars = identify_invalid_characters(str)
34
- puts "• (Row: #{self.row_num}) Invalid characters _#{invalid_chars}_ (removed): '#{str}' (converted to '#{result}')"
35
- elsif blanks_x2
36
- puts "• (Row: #{self.row_num}) Double blanks (removed): '#{str}' (converted to '#{result}')"
37
- end
38
- invalid_warned!
39
- end
40
- end
41
- end
42
-
43
- def invalid_warned?
44
- @invalid_warned ||= false
45
- end
46
-
47
- def invalid_warned!
48
- @invalid_warned = true
49
- end
50
-
51
- def raw_tag
52
- values_at(*TAGS_ATTRS.reverse).compact.first
53
- end
54
-
55
- def level
56
- actual_level
57
- end
58
-
59
- def actual_level
60
- tags_array.compact.length
61
- end
62
-
63
- def raw_level
64
- tags_array.index(raw_tag) + 1
65
- end
66
-
67
- def tag_idx
68
- tags_array.index(raw_tag)
69
- end
70
-
71
- def previous_idx
72
- idx = tag_idx - 1
73
- idx < 0 ? nil : idx
74
- end
75
-
76
- def empty_idx
77
- tary = tags_array
78
- tary.index(nil) || tary.length + 1
79
- end
80
-
81
- def copy
82
- self.class.new.set_attrs(**self.to_h)
83
- end
84
-
85
- # We got a missing level that is compacted in one row
86
- # Here we get the missing intermediate levels
87
- # This is done from upper to lower level to ensure processing order
88
- # It skips last one, as that is this object already
89
- def decouple(num = 1)
90
- with_info = filled_idxs
91
- # must be the last among filled_idxs, so let's use it to verify
92
- unless with_info.last == tag_idx
93
- raise "Review this (row #{row_num}; '#{raw_tag}'): tag_idx is #{tag_idx}, while last filled idx is #{with_info.last}"
94
- end
95
- len = with_info.length
96
- target_idxs = with_info[len-(num+1)..-2]
97
- target_idxs.map do |idx|
98
- self.copy.tap do |dup|
99
- dup.clear_level(idx_to_level(idx + 1))
100
- end
101
- end
102
- end
103
-
104
- def merge!(node)
105
- override_upper_levels(node.tags_array)
106
- end
107
-
108
- def set_high_levels(node)
109
- override_lower_levels(node.tags_array)
110
- end
111
-
112
- def clear_level(i)
113
- case i
114
- when Enumerable
115
- target = i.to_a
116
- when Integer
117
- return false unless i >= 1 && i <= tag_attrs_count
118
- target = Array(i..tag_attrs_count)
119
- else
120
- return false
121
- end
122
- return false if target.empty?
123
- target.each do |n|
124
- #puts "clearing 'l#{n}': #{attr("l#{n}")}"
125
- set_attr("l#{n}", nil)
126
- end
127
- true
128
- end
129
-
130
- def override_upper_levels(src_tags_array, from_level: self.raw_level + 1)
131
- target_lev = Array(from_level..tag_attrs_count)
132
- target_tags = src_tags_array[level_to_idx(from_level)..level_to_idx(tag_attrs_count)]
133
- target_lev.zip(target_tags).each do |(n, tag)|
134
- set_attr("l#{n}", tag)
135
- end
136
- self
137
- end
138
-
139
- def override_lower_levels(src_tags_array, to_level: self.raw_level - 1)
140
- target_lev = Array(1..to_level)
141
- target_tags = src_tags_array[level_to_idx(1)..level_to_idx(to_level)]
142
- target_lev.zip(target_tags).each do |(n, tag)|
143
- set_attr("l#{n}", tag)
144
- end
145
- self
146
- end
147
-
148
- def idx_to_level(x)
149
- x + 1
150
- end
151
-
152
- def level_to_idx(x)
153
- x - 1
154
- end
155
-
156
- def filled_idxs
157
- tags_array.each_with_index.with_object([]) do |(t, i), o|
158
- o << i if t
159
- end
160
- end
161
-
162
- def blanks_between?
163
- actual_level > empty_idx
164
- end
165
-
166
- def tags_array
167
- values_at(*TAGS_ATTRS)
168
- end
169
-
170
- def values_at(*attrs)
171
- attrs.map {|a| attr(a)}
172
- end
173
-
174
- def to_h(*attrs)
175
- attrs = ALL_ATTRS if attrs.empty?
176
- attrs.zip(values_at(*attrs)).to_h
177
- end
178
-
179
- def slice(*attrs)
180
- return {} if attrs.empty?
181
- to_h(*attrs)
182
- end
183
-
184
- def set_attrs(**kargs)
185
- kargs.each {|attr, value| set_attr(attr, value)}
186
- self
187
- end
188
-
189
- def set_attr(attr, value)
190
- self.send("#{attr}=", value)
191
- end
192
-
193
- def attr(sym)
194
- self.send(sym.to_sym)
195
- end
196
-
197
- def tag_attrs_count
198
- TAGS_ATTRS.length
199
- end
200
-
201
- def has_double_blanks?(str)
202
- return false if str.nil?
203
- str.match(DOUBLE_BLANKS)
204
- end
205
-
206
- def remove_double_blanks(str)
207
- return nil if str.nil?
208
- str.gsub(DOUBLE_BLANKS, ' ').strip
209
- end
210
-
211
- def replace_not_allowed(str)
212
- return nil if str.nil?
213
- return str if str.match(VALID_TAG_REGEX)
214
- str.gsub(INVALID_TAG_REGEX, ' ')
215
- end
216
-
217
- def identify_invalid_characters(str)
218
- str.gsub(VALID_TAG_CHARS, '')
219
- end
220
- end
221
- end
@@ -1,73 +0,0 @@
1
- class Eco::API::UseCases::DefaultCases::CsvToTree
2
- module NodesCleaner
3
- def repeated_tags
4
- @repeated_tags ||= []
5
- end
6
-
7
- def done_tags
8
- @done_tags ||= []
9
- end
10
-
11
- def fill_in_parents(nodes)
12
- nodes.tap do |nodes|
13
- prev_nodes = Array(1..11).zip(Array.new(11, nil)).to_h
14
- nodes.each do |node|
15
- if parent_node = prev_nodes[node.actual_level - 1]
16
- node.parentId = parent_node.id
17
- end
18
- prev_nodes[node.raw_level] = node
19
- end
20
- end
21
- end
22
-
23
- def tidy_nodes(nodes, prev_level: 0, main: true)
24
- out = nodes.each_with_object([]) do |node, out|
25
- if done_tags.include?(tag = node.tag)
26
- repeated_tags << "#{tag} (level: #{node.level})"
27
- else
28
- level = node.actual_level
29
- if level > prev_level + 1
30
- gap = level - (prev_level + 1)
31
- puts "(Row: #{node.row_num}) Tag '#{tag}' (lev #{level}) jumps #{gap} level(s) (expected #{prev_level + 1})."
32
- #puts " " + node.tags_array.pretty_inspect
33
- missing_nodes = node.decouple(gap)
34
- puts " Adding missing upper level(s): " + missing_nodes.map(&:raw_tag).pretty_inspect
35
- out.push(*tidy_nodes(missing_nodes, prev_level: prev_level, main: false))
36
- # puts node.actual_level
37
- # pp node.tags_array
38
- level = prev_level + 1
39
- end
40
- out << node
41
- done_tags << tag
42
- prev_level = level
43
- end
44
- end
45
- if main
46
- unless repeated_tags.empty?
47
- puts "There were #{repeated_tags.length} repeated tags. Only one included. These excluded:"
48
- pp repeated_tags
49
- end
50
- end
51
- fill_in_parents(out)
52
- end
53
-
54
- def to_rows(nodes, prev_level: 0, main: true)
55
- out = tidy_nodes(nodes).each_with_object([]) do |node, out|
56
- tag = node.tag
57
- level = node.actual_level
58
- out << (row = Array.new(level, nil))
59
- row[-1..-1] = [tag.upcase]
60
- prev_level = level
61
- end
62
- if main
63
- # Normalize length
64
- max_row = out.max {|a, b| a.length <=> b.length}
65
- holder = Array.new(max_row.length, nil)
66
- out = out.map do |row|
67
- row.dup.concat(holder)[0..max_row.length-1]
68
- end
69
- end
70
- out
71
- end
72
- end
73
- end
@@ -1,33 +0,0 @@
1
- class Eco::API::UseCases::DefaultCases::CsvToTree
2
- module Treeify
3
- def treeify(nodes, &block)
4
- get_children(nil, parents_hash(nodes), &block)
5
- end
6
-
7
- private
8
-
9
- def parents_hash(nodes)
10
- nodes.each_with_object({}) do |node, parents|
11
- (parents[node.parentId] ||= []).push(node)
12
- end
13
- end
14
-
15
- def get_children(node_id, parents, &block)
16
- (parents[node_id] ||= []).each_with_object([]) do |child, results|
17
- node_hash = {
18
- "id" => child.id,
19
- "name" => child.name
20
- }
21
-
22
- if block_given?
23
- yield_hash = yield(child)
24
- node_hash.merge(yield_hash) if yield_hash.is_a?(Hash)
25
- end
26
-
27
- results << node_hash.merge({
28
- "nodes" => get_children(child.id, parents, &block).compact
29
- })
30
- end
31
- end
32
- end
33
- end