eco-helpers 2.7.16 → 2.7.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5c94232f6124e63e5b9df716e6e4550b10e3853e2cd2ad9ee5dc6289bf4a2a56
4
- data.tar.gz: 224908e975dff3cf04d03c5c6cc55fde4472a622010a0548beb731cfb1e834d0
3
+ metadata.gz: 4f816f1f0ec131d756c11a4839629d57f90f2b9cc2a8a9008b54de0c57926272
4
+ data.tar.gz: 8e725e46c074927cf2db191471d29fe5dfce15c7aa121f9e49cfa30411f29bf2
5
5
  SHA512:
6
- metadata.gz: 2521d794cfa6f6d47c938df024f48540d4bf6eb2401a4c5a2e805d556814e1bdfef3949a03f492b7ab00ce98f56b752438e06a0860aebdb788c5eda056b242de
7
- data.tar.gz: 245b52b949c7c8141821ce61c0b58beed8242d5b0a6efd493892e1f9c96f58198db6fa4b2c0da0c08b275ff8a6eb7c6444e099dd1c1898a532bd8310a9201c8a
6
+ metadata.gz: 8427745673ff4ab4d19477fff524cb9cab781b9b818c072943c600282cc5d519f06b3a5807e6a241787fb1c259a1697530c282ccf18c9849d8d4f09ca89e64b7
7
+ data.tar.gz: 22da605e9eaa15e84ea50a6431ad4e9f7b98239b981e5ec7b07dbd93d92d72387232c222656ec8a79ed725c60d5088c150edc4dbed4547ce10fd97e01d6be6ae
data/CHANGELOG.md CHANGED
@@ -2,7 +2,7 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file.
4
4
 
5
- ## [2.7.16] - 2024-06-18
5
+ ## [2.7.19] - 2024-06-xx
6
6
 
7
7
  ### Added
8
8
 
@@ -10,14 +10,45 @@ All notable changes to this project will be documented in this file.
10
10
 
11
11
  ### Fixed
12
12
 
13
- - `RegisterUpdateCase` filters
13
+ ## [2.7.18] - 2024-07-01
14
14
 
15
- ## [2.7.15] - 2024-06-18
15
+ ### Changed
16
+
17
+ - `Eco::API::UseCases::GraphQL::Samples::Location::Service::TreeToList`
18
+ - Custom input node parsers: yield has_node and node as well.
19
+ - `Eco::API::UseCases::GraphQL::Samples::Location::Service::TreeDiff`
20
+ - Exclude archived nodes from the file as **default**
21
+
22
+ ## [2.7.17] - 2024-06-22
16
23
 
17
24
  ### Added
18
25
 
26
+ - **Stop on uknown** options will try to offer suggestions from now on
27
+ - `Eco::CSV::count` class method to stream-count the number of rows
28
+ - Option `-start-at` allows to start the count at certain row idx
29
+
19
30
  ### Changed
20
31
 
32
+ - Added options to case `-split-csv`
33
+ - `-start-at`
34
+ - `-simulate` a dry-run will return the count and won't generate the files.
35
+ - `Eco::CSV::split`
36
+ - the `block` allows to filter what rows should be included
37
+
38
+ ### Fixed
39
+
40
+ - `Workflow::Mailer`: shouldn't send notification when there is an **error** that doesn't really need to be notified.
41
+ - `to-csv` case should accepte target folder as an argument
42
+ - `options_set` sort namespaces
43
+
44
+ ## [2.7.16] - 2024-06-18
45
+
46
+ ### Fixed
47
+
48
+ - `RegisterUpdateCase` filters
49
+
50
+ ## [2.7.15] - 2024-06-18
51
+
21
52
  ### Fixed
22
53
 
23
54
  - `Eco::API::Common::People::EntryFactor` super must be called before
@@ -17,7 +17,10 @@ class Eco::API::Common::Loaders::Workflow::Mailer < Eco::API::Common::Loaders::W
17
17
  next unless session.mailer?
18
18
  next if session.config.dry_run?
19
19
  next unless session.config.run_mode_remote?
20
- next unless some_update?(io) || error?
20
+
21
+ # temporary contingency
22
+ maybe_error_pages_or_tree_updates = other_case?(io) && error?
23
+ next unless some_update?(io) || maybe_error_pages_or_tree_updates
21
24
 
22
25
  subject = base_subject
23
26
 
@@ -53,6 +56,12 @@ class Eco::API::Common::Loaders::Workflow::Mailer < Eco::API::Common::Loaders::W
53
56
  end
54
57
  end
55
58
 
59
+ def other_case?(io)
60
+ cli.config.usecases.active(io: io).any? do |usecase, _data|
61
+ %i[other].any? { |type| usecase.type == type }
62
+ end
63
+ end
64
+
56
65
  def error?
57
66
  !!error
58
67
  end
@@ -7,18 +7,17 @@ module Eco
7
7
  def people_cache(filename = enviro.config.people.cache)
8
8
  logger.info("Going to get all the people via API")
9
9
 
10
- start = Time.now
11
- people = session.batch.get_people
10
+ start = Time.now
11
+ people = session.batch.get_people
12
12
  secs = (Time.now - start).round(3)
13
13
  cnt = people.count
14
14
  per_sec = (cnt.to_f / secs).round(2)
15
15
  logger.info("Loaded #{cnt} people in #{secs} seconds (#{per_sec} people/sec)")
16
16
 
17
- file = file_manager.save_json(people, filename, :timestamp)
17
+ file = file_manager.save_json(people, filename, :timestamp)
18
18
  logger.info("#{people.length} people loaded and saved locally to #{file}.")
19
19
  Eco::API::Organization::People.new(people)
20
20
  end
21
-
22
21
  end
23
22
  end
24
23
  end
@@ -15,19 +15,23 @@ module Eco
15
15
  # - `:file` if it is supposed to load people from a file.
16
16
  # - `:save` if it is supposed to cache/save the data locally once obtained people from the server (`:api`)
17
17
  # @return [Eco::API::Organization::People] the `People` object with the data.
18
- def people_load(filename = enviro.config.people.cache, modifier: [:newest, :api])
18
+ def people_load(filename = enviro.config.people.cache, modifier: %i[newest api]) # rubocop:disable Metrics/AbcSize
19
19
  modifier = [modifier].flatten
20
- load_file = [:file, :newest].any? {|flag| modifier.include?(flag)}
20
+ load_file = %i[file newest].any? {|flag| modifier.include?(flag)}
21
+
21
22
  case
22
23
  when filename && load_file
23
- if file = people_load_filename(filename, newest: modifier.include?(:newest))
24
+ file = people_load_filename(filename, newest: modifier.include?(:newest))
25
+
26
+ if file
24
27
  file_manager.load_json(file).tap do |people|
25
28
  logger.info("#{people&.length} people loaded from file #{file}") if people.is_a?(Array)
26
29
  end
27
30
  else
28
31
  logger.error("could not find the file #{file_manager.dir.file(filename)}")
29
32
  exit unless modifier.include?(:api)
30
- people_load(modifier: modifier - [:newest, :file])
33
+
34
+ people_load(modifier: modifier - %i[newest file])
31
35
  end
32
36
  when modifier.include?(:api)
33
37
  logger.info("Going to get all the people via API (load)")
@@ -39,12 +43,12 @@ module Eco
39
43
  per_sec = (cnt.to_f / secs).round(2)
40
44
  logger.info("Loaded #{cnt} people in #{secs} seconds (#{per_sec} people/sec)")
41
45
 
42
- if modifier.include?(:save) && people && people.length > 0
46
+ if modifier.include?(:save) && people && people.length.positive?
43
47
  file = file_manager.save_json(people, filename, :timestamp)
44
- logger.info("#{people.length } people saved to file #{file}.")
48
+ logger.info("#{people.length} people saved to file #{file}.")
45
49
  end
46
50
  end
47
- end.yield_self do |people|
51
+ end.then do |people|
48
52
  Eco::API::Organization::People.new(people)
49
53
  end
50
54
  end
@@ -61,7 +65,6 @@ module Eco
61
65
  file_manager.dir.file(filename, should_exist: true)
62
66
  end
63
67
  end
64
-
65
68
  end
66
69
  end
67
70
  end
@@ -9,24 +9,26 @@ module Eco
9
9
  # @param people [Eco::API::Organization::People] the people that needs refresh.
10
10
  # @param include_created [Boolean] include people created during this session? (will check `:create` batch jobs).
11
11
  # @return [Eco::API::Organization::People] the `People` object with the data.
12
- def people_refresh(people:, include_created: true)
12
+ def people_refresh(people:, include_created: true) # rubocop:disable Metrics/AbcSize
13
13
  people = people.newFrom people.select do |person|
14
14
  !person.new? || !person.dirty?
15
15
  end
16
+
16
17
  ini = people.length
18
+
17
19
  if include_created
18
20
  session.job_groups.find_jobs(type: :create).map do |job|
19
- to_add = job.people.select {|person| !person.dirty?}
21
+ to_add = job.people.reject(&:dirty?)
20
22
  people = people.merge(to_add)
21
23
  end
22
24
  end
23
25
 
24
26
  created = people.length - ini
25
- msg = "Going to refresh #{people.length} people with server data"
26
- msg += " (including #{created} that were created)" if created > 0
27
+ msg = "Going to refresh #{people.length} people with server data"
28
+ msg += " (including #{created} that were created)" if created.positive?
27
29
  logger.info(msg)
28
30
 
29
- start = Time.now
31
+ start = Time.now
30
32
  entries = session.batch.get_people(people, silent: true)
31
33
  secs = (Time.now - start).round(3)
32
34
  cnt = entries.count
@@ -34,11 +36,10 @@ module Eco
34
36
  logger.info("Re-loaded #{cnt} people (out of #{people.length}) in #{secs} seconds (#{per_sec} people/sec)")
35
37
 
36
38
  missing = people.length - entries.length
37
- logger.error("Missed to obtain #{missing} people during the refresh") if missing > 0
39
+ logger.error("Missed to obtain #{missing} people during the refresh") if missing.positive?
38
40
 
39
41
  Eco::API::Organization::People.new(entries)
40
42
  end
41
-
42
43
  end
43
44
  end
44
45
  end
@@ -5,52 +5,57 @@ module Eco
5
5
  # @note
6
6
  # - this helper is normally used to **get partial** part of the people manager.
7
7
  # - therefore, normally used with _**delta** input files_ (files with only the differences).
8
- # @param data [Eco::API::Organization::People, Enumerable<Person>, Enumerable<Hash>] `People` to search against the server.
8
+ # @param data [Eco::API::Organization::People, Enumerable<Person>, Enumerable<Hash>]
9
+ # `People` to search against the server.
9
10
  # @param options [Hash] the options.
10
11
  # @param silent [Boolean] `false` if low level search messages should be shown.
11
12
  # @return [Eco::API::Organization::People] the `People` object with the found persons.
12
- def people_search(data, options: {}, silent: true)
13
+ def people_search(data, options: {}, silent: true) # rubocop:disable Metrics/AbcSize
13
14
  session.logger.info("Going to api get #{data.length} entries...")
14
15
 
15
16
  start = Time.now
16
- people = session.batch.search(data, silent: silent).yield_self do |status|
17
- secs = (Time.now - start).round(3)
18
- Eco::API::Organization::People.new(status.people).tap do |people|
17
+ people = session.batch.search(data, silent: silent).then do |status|
18
+ secs = (Time.now - start).round(3)
19
+ Eco::API::Organization::People.new(status.people).tap do |people| # rubocop:disable Lint/ShadowingOuterLocalVariable
19
20
  cnt = people.count
20
21
  per_sec = (cnt.to_f / secs).round(2)
21
- msg = "... could get #{cnt} people (out of #{data.length} entries) in #{secs} seconds (#{per_sec} people/sec)"
22
+ msg = "... could get #{cnt} people "
23
+ msg << "(out of #{data.length} entries) in #{secs} seconds (#{per_sec} people/sec)"
22
24
  session.logger.info(msg)
23
25
  end
24
26
  end
25
27
 
26
28
  # get the supervisors of found people (current supervisors)
27
29
  supers = people_search_prepare_supers_request(people)
28
- if supers.length > 0
30
+ if supers.length.positive?
29
31
  session.logger.info(" Going to api get #{supers.length} current supervisors...")
30
32
  start = Time.now
31
- people = session.batch.search(supers, silent: silent).yield_self do |status|
33
+ people = session.batch.search(supers, silent: silent).then do |status|
32
34
  secs = (Time.now - start).round(3)
33
35
  found = status.people
34
36
  cnt = found.count
35
37
  per_sec = (cnt.to_f / secs).round(2)
36
- msg = "... could find #{cnt} current supers (out of #{supers.length}) in #{secs} seconds (#{per_sec} people/sec)"
38
+ msg = "... could find #{cnt} current supers "
39
+ msg << "(out of #{supers.length}) in #{secs} seconds (#{per_sec} people/sec)"
37
40
  session.logger.info(msg)
41
+
38
42
  people.merge(found, strict: micro.strict_search?(options))
39
43
  end
40
44
  end
41
45
 
42
46
  # get the supervisors referred in the input data (future supervisors)
43
47
  supers = people_search_prepare_supers_request(data, people)
44
- if supers.length > 0
48
+ if supers.length.positive?
45
49
  session.logger.info(" Going to api get #{supers.length} supervisors as per input entries...")
46
50
  start = Time.now
47
51
 
48
- people = session.batch.search(supers, silent: silent).yield_self do |status|
52
+ people = session.batch.search(supers, silent: silent).then do |status|
49
53
  secs = (Time.now - start).round(3)
50
54
  found = status.people
51
55
  cnt = found.count
52
56
  per_sec = (cnt.to_f / secs).round(2)
53
- msg = "... could find #{cnt} input supers (out of #{supers.length}) in #{secs} seconds (#{per_sec} people/sec)"
57
+ msg = "... could find #{cnt} input supers "
58
+ msg << "(out of #{supers.length}) in #{secs} seconds (#{per_sec} people/sec)"
54
59
  session.logger.info(msg)
55
60
  people.merge(found, strict: micro.strict_search?(options))
56
61
  end
@@ -66,25 +71,26 @@ module Eco
66
71
  def people_search_prepare_supers_request(data, people = data)
67
72
  data.each_with_object([]) do |entry, request|
68
73
  spr = {"id" => (sup_id = people_search_super_id(entry))}
69
- unless !sup_id || request.include?(spr)
70
- micro.with_supervisor(sup_id, people) do |supervisor|
71
- request.push(spr) unless supervisor
72
- end
74
+ next if !sup_id || request.include?(spr)
75
+
76
+ micro.with_supervisor(sup_id, people) do |supervisor|
77
+ request.push(spr) unless supervisor
73
78
  end
74
79
  end
75
80
  end
76
81
 
77
82
  # Gets the `supervisor_id` from `value`
78
83
  def people_search_super_id(value)
79
- sup_id = if value.respond_to?(:supervisor_id)
80
- value.supervisor_id
81
- elsif value.is_a?(Hash) && value.key("supervisor_id")
82
- value["supervisor_id"]
83
- end
84
+ sup_id =
85
+ if value.respond_to?(:supervisor_id)
86
+ value.supervisor_id
87
+ elsif value.is_a?(Hash) && value.key("supervisor_id")
88
+ value["supervisor_id"]
89
+ end
90
+
84
91
  sup_id = nil if sup_id.to_s.strip.empty?
85
92
  sup_id
86
93
  end
87
-
88
94
  end
89
95
  end
90
96
  end
@@ -7,9 +7,10 @@ module Eco
7
7
  def apply!(arg_case = cli_name)
8
8
  #puts "DEFINING CLI for '#{arg_case}' via #{self}"
9
9
  if applied?(arg_case)
10
- puts "Warning: (#{self}) Tried to call again cli.apply! on '#{arg_case}'"
10
+ puts "Warning: (#{self}) Tried to call again cli.apply! on '#{arg_case}'"
11
11
  return self
12
12
  end
13
+
13
14
  cli_config_case(arg_case)
14
15
  apply_options(arg_case)
15
16
  applied!(arg_case)
@@ -29,10 +30,11 @@ module Eco
29
30
  end
30
31
 
31
32
  attr_writer :usecase
33
+
32
34
  # Unless specified, assume Cli class hangs from its case namespace
33
35
  def usecase
34
- raise "#{self} is to use to extend a class" unless self.is_a?(Class)
35
- @usecase ||= Kernel.const_get(self.to_s.split('::')[0..-2].join('::'))
36
+ raise "#{self} is to use to extend a class" unless is_a?(Class)
37
+ @usecase ||= Kernel.const_get(to_s.split('::')[0..-2].join('::'))
36
38
  end
37
39
 
38
40
  def description(value = nil)
@@ -50,7 +52,7 @@ module Eco
50
52
 
51
53
  # It defaults to the use case preceded by dash
52
54
  def cli_name(arg_name = nil)
53
- @cli_name = (arg_name.nil? ? @cli_name : arg_name).yield_self do |value|
55
+ @cli_name = (arg_name.nil? ? @cli_name : arg_name).then do |value|
54
56
  value = "-#{name}" if value.nil?
55
57
  value
56
58
  end
@@ -66,7 +68,7 @@ module Eco
66
68
  end
67
69
 
68
70
  def add_option(arg, desc = nil, &block)
69
- self.tap do
71
+ tap do
70
72
  "Overriding option '#{arg}' on case '#{name}'" if options.key?(arg)
71
73
  @options[arg] = Eco::API::UseCases::Cli::Option.new(arg, desc, &block)
72
74
  end
@@ -75,7 +77,7 @@ module Eco
75
77
  private
76
78
 
77
79
  def apply_options(arg_case)
78
- options.each do |_key, option|
80
+ options.each_value do |option|
79
81
  option.link_case(cli_config_case(arg_case))
80
82
  end
81
83
  end
@@ -13,6 +13,7 @@ class Eco::API::UseCases::Cli
13
13
 
14
14
  def link_case(cli_config_case)
15
15
  raise ArgumentError, "cli_config_case must have an 'add_option' method. Given: #{cli_config_case.class}" unless cli_config_case.respond_to?(:add_option)
16
+
16
17
  cli_config_case.add_option(name, desc, &callback)
17
18
  end
18
19
  end
@@ -11,5 +11,10 @@ class Eco::API::UseCases::Default::People::Utils::SplitCsv
11
11
  count = SCR.get_arg("-max-rows", with_param: true)
12
12
  options.deep_merge!(output: {file: {max_rows: count}})
13
13
  end
14
+
15
+ add_option("-start-at", "Get only the last N-start_at rows") do |options|
16
+ count = SCR.get_arg("-start-at", with_param: true)
17
+ options.deep_merge!(output: {file: {start_at: count}})
18
+ end
14
19
  end
15
20
  end
@@ -7,15 +7,27 @@ class Eco::API::UseCases::Default::People::Utils::SplitCsv < Eco::API::Common::L
7
7
  type :other
8
8
 
9
9
  def main(*_args)
10
- Eco::CSV.split(input_file, max_rows: max_rows).each do |file|
11
- log(:info) {
12
- "Generated file '#{file}'"
13
- }
10
+ if simulate?
11
+ count = Eco::CSV.count(input_file, start_at: start_at)
12
+ log(:info) { "CSV '#{input_file}' has #{count} rows." }
13
+ else
14
+ Eco::CSV.split(
15
+ input_file,
16
+ max_rows: max_rows,
17
+ start_at: start_at,
18
+ &filter
19
+ ).each do |file|
20
+ log(:info) { "Generated file '#{file}'" }
21
+ end
14
22
  end
15
23
  end
16
24
 
17
25
  private
18
26
 
27
+ def filter
28
+ nil
29
+ end
30
+
19
31
  def input_file
20
32
  options.dig(:source, :file)
21
33
  end
@@ -31,4 +43,12 @@ class Eco::API::UseCases::Default::People::Utils::SplitCsv < Eco::API::Common::L
31
43
  num = nil if num.zero?
32
44
  num
33
45
  end
46
+
47
+ def start_at
48
+ return nil unless (num = options.dig(:output, :file, :start_at))
49
+
50
+ num = num.to_i
51
+ num = nil if num.zero?
52
+ num
53
+ end
34
54
  end
@@ -2,6 +2,8 @@ class Eco::API::UseCases::DefaultCases::ToCsvCase < Eco::API::Common::Loaders::U
2
2
  name "to-csv"
3
3
  type :export
4
4
 
5
+ OUT_FILENAME = 'pm'
6
+
5
7
  attr_reader :people
6
8
 
7
9
  def main(people, _session, options, _usecase)
@@ -16,8 +18,9 @@ class Eco::API::UseCases::DefaultCases::ToCsvCase < Eco::API::Common::Loaders::U
16
18
  if options.dig(:export, :options, :split_schemas)
17
19
  by_schema.each do |id, people|
18
20
  sch_name = schemas.to_name(id)
19
- prefix = sch_name ? sch_name.gsub(" ", "_") : "No_Schema"
20
- create_file!("#{prefix}_#{file}", people)
21
+ prefix = sch_name ? sch_name.gsub(" ", "_").downcase : "no_schema"
22
+ filename = in_folder("#{prefix}_#{File.basename(file)}")
23
+ create_file!(filename, people)
21
24
  end
22
25
  else
23
26
  create_file!(file, people)
@@ -61,15 +64,13 @@ class Eco::API::UseCases::DefaultCases::ToCsvCase < Eco::API::Common::Loaders::U
61
64
  end
62
65
 
63
66
  def nice_header_names(header, schema: nil)
64
- schema ||= session.schema
65
- name_maps = schema.fields_by_alt_id.each_with_object({}) do |(alt_id, fld), mappings|
66
- mappings[alt_id] = fld.name
67
- end.merge(nice_header_maps)
67
+ schema ||= session.schema
68
+ name_maps = schema.fields_by_alt_id.transform_values(&:name).merge(nice_header_maps)
68
69
  header.map {|name| name_maps[name] || name}
69
70
  end
70
71
 
71
72
  def to_entry_type(person)
72
- session.new_entry(person, dependencies: deps).yield_self do |person_entry|
73
+ session.new_entry(person, dependencies: deps).then do |person_entry|
73
74
  options.dig(:export, :options, :internal_names) ? person_entry.mapped_entry : person_entry.external_entry
74
75
  end
75
76
  end
@@ -79,14 +80,37 @@ class Eco::API::UseCases::DefaultCases::ToCsvCase < Eco::API::Common::Loaders::U
79
80
  end
80
81
 
81
82
  def file
82
- @file ||= (options[:file] || options.dig(:export, :file, :name)).tap do |filename|
83
- unless filename
84
- session.logger.error("Destination file not specified")
85
- return false
86
- end
83
+ @file ||= out_filename.tap do |filename|
84
+ next if filename
85
+
86
+ log(:error) { "Destination file not specified" }
87
+ return false
87
88
  end
88
89
  end
89
90
 
91
+ def out_filename
92
+ return options_file unless options_folder?
93
+
94
+ File.join(options_file, "#{config.active_enviro}_#{OUT_FILENAME}.csv")
95
+ end
96
+
97
+ def in_folder(filename)
98
+ basename = File.basename(filename)
99
+ return basename unless options_folder?
100
+
101
+ File.join(options_file, basename)
102
+ end
103
+
104
+ def options_folder?
105
+ return false unless (value = options_file)
106
+
107
+ File.directory?(value)
108
+ end
109
+
110
+ def options_file
111
+ options[:file] || options.dig(:export, :file, :name)
112
+ end
113
+
90
114
  def by_schema
91
115
  people.group_by do |person|
92
116
  if (details = person.details)
@@ -102,10 +102,22 @@ module Eco::API::UseCases::GraphQL::Samples
102
102
  # - `as_nodes_json`
103
103
  # 2. `org_tree` is native from `Eco::Data::Locations::NodeBase::CsvConvert`
104
104
  def file_nodes_list
105
- @file_nodes_list ||= as_nodes_json(org_tree(input_csv)).tap do |list|
105
+ @file_nodes_list ||= as_nodes_json(file_tree).tap do |list|
106
106
  log(:info) { "Converted input csv file to list of #{list.count} hash-nodes" }
107
107
  end
108
108
  end
109
+
110
+ def file_tree
111
+ org_tree(input_csv).then do |tree|
112
+ next tree if include_file_archived?
113
+
114
+ tree.active_tree
115
+ end
116
+ end
117
+
118
+ def include_file_archived?
119
+ options.dig(:input, :file, :include, :archived)
120
+ end
109
121
  end
110
122
  end
111
123
  end
@@ -14,11 +14,11 @@ module Eco::API::UseCases::GraphQL::Samples::Location::Service
14
14
  return @node_parser_block if instance_variable_defined?(:@node_parser_block)
15
15
  custom_before = custom_node_parser_before_block
16
16
  custom_after = custom_node_parser_block
17
- @node_parser_block = proc do |node_hash|
17
+ @node_parser_block = proc do |node_hash, node|
18
18
  node_hash.tap do
19
- custom_before&.call(node_hash)
20
- default_node_parse(node_hash)
21
- custom_after&.call(node_hash)
19
+ custom_before&.call(node_hash, node)
20
+ default_node_parse(node_hash, node)
21
+ custom_after&.call(node_hash, node)
22
22
  end
23
23
  end
24
24
  end
@@ -35,7 +35,7 @@ module Eco::API::UseCases::GraphQL::Samples::Location::Service
35
35
  end
36
36
 
37
37
  # Generic parsing
38
- def default_node_parse(node_hash)
38
+ def default_node_parse(node_hash, _node)
39
39
  class_ids = to_classification_ids(node_hash['classifications'])
40
40
  node_hash['classifications'] = class_ids
41
41
  end
@@ -24,12 +24,10 @@ module Eco::API::UseCases::GraphQL::Samples::Location::Service
24
24
  next key unless node_attr_maps.key?(key)
25
25
  node_attr_maps[key]
26
26
  end.tap do |node_hash|
27
- node_parser_block&.call(node_hash)
28
- yield(node_hash) if block_given?
27
+ node_parser_block&.call(node_hash, node)
28
+ yield(node_hash, node) if block_given?
29
29
  end
30
- end.tap do |list|
31
- list.each(&node_parser_block)
32
- end
30
+ end.compact.reject(&:empty?)
33
31
  end
34
32
  end
35
33
  end
@@ -15,7 +15,8 @@ module Eco
15
15
  def help(msg = nil, refine: nil)
16
16
  refinement = refine.is_a?(String)? " (containing: '#{refine}')" : ""
17
17
  msg ||= "The following are the available filters#{refinement}:"
18
- [msg].yield_self do |lines|
18
+
19
+ [msg].then do |lines|
19
20
  max_len = keys_max_len(@filters.keys)
20
21
  @filters.keys.sort.select do |key|
21
22
  !refine.is_a?(String) || key.include?(refine)
@@ -26,10 +27,19 @@ module Eco
26
27
  end.join("\n")
27
28
  end
28
29
 
30
+ def available(keys: false)
31
+ return @filters.keys if keys
32
+
33
+ @filters.keys.map do |key|
34
+ [key, @filters[key], @description[key]]
35
+ end
36
+ end
37
+
29
38
  # @param option [String] the command line option that activates this filter.
30
39
  # @param desc [String] description of the filter.
31
40
  def add(option, desc = nil, &block)
32
- raise "Missing block to define the filters builder" unless block_given?
41
+ raise ArgumentError, "Missing block to define the filters builder" unless block_given?
42
+
33
43
  callback = block
34
44
  [option].flatten.compact.each do |opt|
35
45
  @filters[opt] = callback
@@ -38,7 +48,7 @@ module Eco
38
48
  self
39
49
  end
40
50
 
41
- def process(io:)
51
+ def process(*)
42
52
  raise "You need to override this method in child classes"
43
53
  end
44
54
  end