eco-helpers 2.5.2 → 2.5.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (104) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +46 -1
  3. data/eco-helpers.gemspec +2 -2
  4. data/lib/eco/api/common/loaders/use_case.rb +0 -2
  5. data/lib/eco/api/common/people/person_entry_attribute_mapper.rb +0 -2
  6. data/lib/eco/api/common/session/logger.rb +22 -77
  7. data/lib/eco/api/microcases/with_each.rb +0 -1
  8. data/lib/eco/api/organization/tag_tree.rb +64 -15
  9. data/lib/eco/api/session/config/tagtree.rb +32 -10
  10. data/lib/eco/api/session/config/workflow.rb +0 -1
  11. data/lib/eco/api/session/config.rb +6 -2
  12. data/lib/eco/api/session.rb +2 -2
  13. data/lib/eco/api/usecases/default_cases/abstract_policygroup_abilities_case.rb +2 -3
  14. data/lib/eco/api/usecases/default_cases/analyse_people_case.rb +2 -3
  15. data/lib/eco/api/usecases/default_cases/append_usergroups_case.rb +0 -1
  16. data/lib/eco/api/usecases/default_cases/change_email_case.rb +1 -2
  17. data/lib/eco/api/usecases/default_cases/clean_unknown_tags_case.rb +0 -5
  18. data/lib/eco/api/usecases/default_cases/clear_abilities_case.rb +2 -2
  19. data/lib/eco/api/usecases/default_cases/codes_to_tags_case.rb +5 -7
  20. data/lib/eco/api/usecases/default_cases/create_case.rb +0 -5
  21. data/lib/eco/api/usecases/default_cases/create_details_case.rb +0 -5
  22. data/lib/eco/api/usecases/default_cases/create_details_with_supervisor_case.rb +0 -5
  23. data/lib/eco/api/usecases/default_cases/csv_to_tree_case/helper.rb +1 -1
  24. data/lib/eco/api/usecases/default_cases/csv_to_tree_case.rb +0 -4
  25. data/lib/eco/api/usecases/default_cases/delete_sync_case.rb +2 -4
  26. data/lib/eco/api/usecases/default_cases/delete_trans_case.rb +2 -3
  27. data/lib/eco/api/usecases/default_cases/email_as_id_case.rb +0 -1
  28. data/lib/eco/api/usecases/default_cases/entries_to_csv_case.rb +0 -4
  29. data/lib/eco/api/usecases/default_cases/hris_case.rb +2 -3
  30. data/lib/eco/api/usecases/default_cases/new_email_case.rb +0 -2
  31. data/lib/eco/api/usecases/default_cases/new_id_case.rb +0 -2
  32. data/lib/eco/api/usecases/default_cases/org_data_convert_case.rb +0 -5
  33. data/lib/eco/api/usecases/default_cases/refresh_case.rb +0 -1
  34. data/lib/eco/api/usecases/default_cases/reinvite_sync_case.rb +1 -3
  35. data/lib/eco/api/usecases/default_cases/reinvite_trans_case.rb +2 -2
  36. data/lib/eco/api/usecases/default_cases/remove_account_sync_case.rb +1 -2
  37. data/lib/eco/api/usecases/default_cases/remove_account_trans_case.rb +2 -3
  38. data/lib/eco/api/usecases/default_cases/reset_landing_page_case.rb +1 -7
  39. data/lib/eco/api/usecases/default_cases/restore_db_case.rb +0 -10
  40. data/lib/eco/api/usecases/default_cases/set_default_tag_case.rb +0 -1
  41. data/lib/eco/api/usecases/default_cases/set_supervisor_case.rb +0 -1
  42. data/lib/eco/api/usecases/default_cases/supers_cyclic_identify_case.rb +2 -3
  43. data/lib/eco/api/usecases/default_cases/supers_hierarchy_case.rb +2 -3
  44. data/lib/eco/api/usecases/default_cases/switch_supervisor_case.rb +2 -4
  45. data/lib/eco/api/usecases/default_cases/tagtree_case.rb +0 -2
  46. data/lib/eco/api/usecases/default_cases/to_csv_case.rb +4 -5
  47. data/lib/eco/api/usecases/default_cases/to_csv_detailed_case.rb +0 -1
  48. data/lib/eco/api/usecases/default_cases/transfer_account_case.rb +0 -2
  49. data/lib/eco/api/usecases/default_cases/update_case.rb +0 -2
  50. data/lib/eco/api/usecases/default_cases/update_details_case.rb +0 -2
  51. data/lib/eco/api/usecases/default_cases/upsert_case.rb +0 -4
  52. data/lib/eco/api/usecases/graphql/base.rb +6 -18
  53. data/lib/eco/api/usecases/graphql/helpers/base.rb +31 -0
  54. data/lib/eco/api/usecases/graphql/helpers/location/base.rb +87 -0
  55. data/lib/eco/api/usecases/graphql/helpers/location/command/result.rb +69 -0
  56. data/lib/eco/api/usecases/graphql/helpers/location/command/results.rb +126 -0
  57. data/lib/eco/api/usecases/graphql/helpers/location/command.rb +84 -0
  58. data/lib/eco/api/usecases/graphql/helpers/location.rb +7 -0
  59. data/lib/eco/api/usecases/graphql/helpers.rb +2 -1
  60. data/lib/eco/api/usecases/graphql/samples/location/command/dsl.rb +54 -0
  61. data/lib/eco/api/usecases/graphql/samples/location/command/results.rb +125 -0
  62. data/lib/eco/api/usecases/graphql/samples/location/command.rb +10 -0
  63. data/lib/eco/api/usecases/graphql/samples/location/dsl.rb +6 -0
  64. data/lib/eco/api/usecases/graphql/samples/location.rb +10 -0
  65. data/lib/eco/api/usecases/graphql/samples.rb +6 -0
  66. data/lib/eco/api/usecases/graphql.rb +2 -1
  67. data/lib/eco/api/usecases/ooze_cases/export_register_case.rb +0 -1
  68. data/lib/eco/api/usecases/ooze_samples/ooze_base_case.rb +0 -2
  69. data/lib/eco/api/usecases/ooze_samples/register_migration_case.rb +0 -2
  70. data/lib/eco/api/usecases/use_case.rb +2 -2
  71. data/lib/eco/cli/config/default/workflow.rb +2 -4
  72. data/lib/eco/cli/scripting/args_helpers.rb +0 -2
  73. data/lib/eco/csv/table.rb +39 -3
  74. data/lib/eco/data/files/helpers.rb +1 -0
  75. data/lib/eco/data/hashes/array_diff.rb +12 -6
  76. data/lib/eco/data/hashes/diff_result.rb +1 -2
  77. data/lib/eco/data/locations/convert.rb +92 -0
  78. data/lib/eco/data/locations/dsl.rb +35 -0
  79. data/lib/eco/data/locations/node_base/builder.rb +26 -0
  80. data/lib/eco/data/locations/node_base/csv_convert.rb +57 -0
  81. data/lib/eco/data/locations/node_base/parsing.rb +30 -0
  82. data/lib/eco/data/locations/node_base/serial.rb +26 -0
  83. data/lib/eco/data/locations/node_base/tag_validations.rb +52 -0
  84. data/lib/eco/data/locations/node_base/treeify.rb +150 -0
  85. data/lib/eco/data/locations/node_base.rb +48 -0
  86. data/lib/eco/data/locations/node_level/builder.rb +6 -0
  87. data/lib/eco/data/locations/node_level/cleaner.rb +74 -0
  88. data/lib/eco/data/locations/node_level/parsing.rb +63 -0
  89. data/lib/eco/data/locations/node_level/serial.rb +37 -0
  90. data/lib/eco/data/locations/node_level.rb +156 -0
  91. data/lib/eco/data/locations/node_plain/builder.rb +6 -0
  92. data/lib/eco/data/locations/node_plain/parsing.rb +36 -0
  93. data/lib/eco/data/locations/node_plain/serial.rb +14 -0
  94. data/lib/eco/data/locations/node_plain.rb +34 -0
  95. data/lib/eco/data/locations.rb +12 -0
  96. data/lib/eco/data.rb +1 -0
  97. data/lib/eco/language/auxiliar_logger.rb +9 -1
  98. data/lib/eco/language/basic_logger.rb +74 -0
  99. data/lib/eco/language.rb +2 -1
  100. data/lib/eco/version.rb +1 -1
  101. metadata +37 -8
  102. data/lib/eco/api/usecases/default_cases/new_id_case0.rb +0 -14
  103. data/lib/eco/api/usecases/graphql/helpers/locations/commands.rb +0 -4
  104. data/lib/eco/api/usecases/graphql/helpers/locations.rb +0 -6
@@ -0,0 +1,69 @@
1
+ module Eco::API::UseCases::GraphQL::Helpers::Location::Command
2
+ class Result
3
+ attr_reader :input, :result
4
+
5
+ def initialize(input, result)
6
+ @input = input
7
+ @result = result
8
+ end
9
+
10
+ def command
11
+ input.keys.first
12
+ end
13
+
14
+ def command_input_data
15
+ input[command]
16
+ end
17
+
18
+ def node_id
19
+ command_input_data[:nodeId]
20
+ end
21
+
22
+ def pending?
23
+ !result || !success?
24
+ end
25
+
26
+ def applied?
27
+ !pending?
28
+ end
29
+
30
+ def success?
31
+ result&.ok
32
+ end
33
+
34
+ def error
35
+ result&.error
36
+ end
37
+
38
+ def error?
39
+ !!error
40
+ end
41
+
42
+ def error_msg
43
+ return nil unless error?
44
+ msg = ''
45
+ msg << "(#{command} '#{node_id}') #{error.message}\n" || ''
46
+ return msg if error.validationErrors.empty?
47
+ msg << " • " + error.validationErrors.map do |err|
48
+ err.message
49
+ end.join("\n • ")
50
+ msg
51
+ end
52
+
53
+ def command_result
54
+ result&.command
55
+ end
56
+
57
+ def command_id
58
+ return nil unless result
59
+ command_result["id"]
60
+ end
61
+
62
+ def as_json
63
+ {
64
+ "request" => input,
65
+ "response" => result.doc
66
+ }
67
+ end
68
+ end
69
+ end
@@ -0,0 +1,126 @@
1
+ module Eco::API::UseCases::GraphQL::Helpers::Location::Command
2
+ class Results
3
+ attr_reader :input, :response
4
+
5
+ def initialize(input, response)
6
+ @input = input
7
+ @response = response
8
+ end
9
+
10
+ def stats
11
+ msg = ''
12
+ first_err = force? ? '' : "(stopped on node: '#{first_errored&.node_id}' - idx: #{first_errored_idx})"
13
+ msg << " • Errored: #{errored.count} #{first_err}\n" if errored?
14
+ last_okay = force? ? '' : "(last node done: '#{last_applied&.node_id}' - idx: #{last_applied_idx})"
15
+ msg << " • Applied: #{errored.count} #{last_okay}\n" if some_applied?
16
+ msg << " • Pending: #{pending.count}\n" if some_pending?
17
+ msg
18
+ end
19
+
20
+ # Was this configured to force-continue on command error?
21
+ def force?
22
+ input[:force]
23
+ end
24
+
25
+ # # Offers a summary. If anything went wrong, it's `false`.
26
+ # # If everything went right, it's `true`.
27
+ # def ok?
28
+ # response&.ok
29
+ # end
30
+
31
+ # Overal errors (i.e. ID clashes between different structures)
32
+ def error
33
+ response&.error
34
+ end
35
+
36
+ def error?
37
+ !!error
38
+ end
39
+
40
+ def success?
41
+ !error? && results.all? {|r| r.success?}
42
+ end
43
+
44
+ def results
45
+ @results ||= input_commands.zip(response_results).each_with_object([]) do |(i, r), results|
46
+ results << Result.new(i, r)
47
+ end
48
+ end
49
+
50
+ def count
51
+ results.count
52
+ end
53
+
54
+ def input_result(input)
55
+ results_by_input[input]
56
+ end
57
+
58
+ def input_idx(input)
59
+ results.index(input_result(input))
60
+ end
61
+
62
+ def idx(result)
63
+ results.index(result)
64
+ end
65
+
66
+ def errored
67
+ @errored ||= results.select {|r| r.error?}
68
+ end
69
+
70
+ def errored?
71
+ !errored.empty?
72
+ end
73
+
74
+ def first_errored
75
+ errored.first
76
+ end
77
+
78
+ def first_errored_idx
79
+ idx(first_errored)
80
+ end
81
+
82
+ def applied
83
+ @applied ||= results.select {|r| r.applied?}
84
+ end
85
+
86
+ def applied?
87
+ results.all? {|r| r.applied?}
88
+ end
89
+
90
+ def some_applied?
91
+ applied.count > 0
92
+ end
93
+
94
+ def last_applied
95
+ applied.last
96
+ end
97
+
98
+ def last_applied_idx
99
+ idx(last_applied)
100
+ end
101
+
102
+ def pending
103
+ @pending ||= results.select {|r| r.pending?}
104
+ end
105
+
106
+ def some_pending?
107
+ !pending.empty?
108
+ end
109
+
110
+ private
111
+
112
+ def results_by_input
113
+ @results_by_input ||= results.each_with_object({}) do |r, h|
114
+ h[r.input] = r
115
+ end
116
+ end
117
+
118
+ def input_commands
119
+ input[:commands]
120
+ end
121
+
122
+ def response_results
123
+ response&.results || []
124
+ end
125
+ end
126
+ end
@@ -0,0 +1,84 @@
1
+ module Eco::API::UseCases::GraphQL::Helpers::Location
2
+ module Command
3
+ include Eco::Language::AuxiliarLogger
4
+ include Eco::API::UseCases::GraphQL::Helpers::Location::Base
5
+
6
+ # Prevents each request from timing out
7
+ COMMANDS_PER_PAGE = 45
8
+ # Whether to stop or continue on command fail
9
+ FORCE_CONTINUE = false
10
+
11
+ def commands_per_page
12
+ self.class::COMMANDS_PER_PAGE
13
+ end
14
+
15
+ def force_continue?
16
+ self.class::FORCE_CONTINUE
17
+ end
18
+
19
+ # With given the commands, it generates the input of the endpoint mutation.
20
+ # @param commands [Array<Hash>]
21
+ def input(commands, force_continue: force_continue?)
22
+ {
23
+ clientMutationId: "",
24
+ id: target_structure_id,
25
+ force: force_continue,
26
+ commands: commands
27
+ }
28
+ end
29
+
30
+ # @return see #with_sliced_input
31
+ def sliced_batches(batch_input, size: commands_per_page, desc: :input, logging: true)
32
+ dry_run_msg = simulate? ? '(dry-run) ' : ''
33
+
34
+ if batch_input[:commands].empty?
35
+ msg = "#{dry_run_msg}No commands for '#{desc}'."
36
+ msg << " Skipping batch..." unless simulate?
37
+ log(:info) { msg }
38
+ return
39
+ end
40
+
41
+ done = 0
42
+ with_sliced_input(batch_input, size: size) do |sliced_input, page, pages, count, total|
43
+ msg = "#{dry_run_msg}Launching '#{desc}' request #{page} (of #{pages}) "
44
+ msg << "with #{count} commands (done #{done} of #{total})..."
45
+ logger.info { msg }
46
+
47
+ response = nil
48
+ unless simulate? && !options.dig(:requests, :backup)
49
+ backup(sliced_input, type: "tree_update_#{desc}_request_#{page}_of_#{pages}")
50
+ end
51
+
52
+ if simulate?
53
+ log(:info) { sliced_input.pretty_inspect } if page < 3
54
+ else
55
+ response = graphql.locationStructure.applyCommands(input: sliced_input)
56
+ backup(response, type: "tree_update_#{desc}_response_#{page}_of_#{pages}")
57
+ end
58
+
59
+ done += count
60
+ yield(sliced_input, response, page, pages, done, total) if block_given?
61
+ end
62
+ end
63
+
64
+ # @param input_data [Hash] input for the endpoint `mutation.ApplyCommandsToLocationStructure`.
65
+ # @return [Array<Array>] pairs of `sliced_input` and `response` thereof.
66
+ def with_sliced_input(input_data, size: commands_per_page)
67
+ comms = input_data[:commands]
68
+ total = comms.count
69
+ pages = (total.to_f / size).ceil.to_i
70
+ page = 1; out = []
71
+ comms.each_slice(size) do |comms_slice|
72
+ sliced_input = input_data.slice(:clientMutationId, :id).merge(commands: comms_slice)
73
+ yield(sliced_input, page, pages, comms_slice.count, total).tap do |response|
74
+ out.push([sliced_input, response])
75
+ page += 1
76
+ end
77
+ end
78
+ out
79
+ end
80
+ end
81
+ end
82
+
83
+ require_relative 'command/result'
84
+ require_relative 'command/results'
@@ -0,0 +1,7 @@
1
+ module Eco::API::UseCases::GraphQL::Helpers
2
+ module Location
3
+ end
4
+ end
5
+
6
+ require_relative 'location/base'
7
+ require_relative 'location/command'
@@ -3,4 +3,5 @@ module Eco::API::UseCases::GraphQL
3
3
  end
4
4
  end
5
5
 
6
- require_relative 'helpers/locations'
6
+ require_relative 'helpers/base'
7
+ require_relative 'helpers/location'
@@ -0,0 +1,54 @@
1
+ class Eco::API::UseCases::GraphQL::Samples::Location
2
+ module Command::DSL
3
+ include Eco::API::UseCases::GraphQL::Helpers::Location::Command
4
+ include Eco::API::UseCases::GraphQL::Samples::Location::Command::Results
5
+
6
+ # @example of implementation:
7
+ # def inputs(command_types, force_continue: force_continue?)
8
+ # {}.tap do |sequence|
9
+ # command_types.commands do |comms, stage|
10
+ # sequence[stage] = input(comms, force_continue: force_continue)
11
+ # end
12
+ # end.tap do |sequence|
13
+ # sequence.each do |stage, input|
14
+ # yield(input, stage) if block_given?
15
+ # end
16
+ # end
17
+ # end
18
+ def inputs(*args, force_continue: force_continue?, **kargs, &block)
19
+ msg = "You should implement this method in your child class.\n"
20
+ msg << "Which should yield the input Hash and the stage or descriptor."
21
+ raise Eco::API::UseCases::GraphQL::Base::NotImplementedMethod, msg
22
+ end
23
+
24
+ # Main processor
25
+ def process
26
+ begin
27
+ super if defined?(super)
28
+ rescue Eco::API::UseCases::GraphQL::Base::NotImplementedMethod
29
+ end
30
+
31
+ self.error = false
32
+ # this triggers a backup of the tagtree
33
+ self.current_tree ||= live_tree
34
+
35
+ inputs(force_continue: force_continue?) do |input, stage|
36
+ results[stage] ||= []
37
+ sliced_batches(input, desc: stage) do |sliced_input, response, page, pages, done, total|
38
+ track_current_tree(response&.structure)
39
+ page_results = nil
40
+ results[stage] << (page_results = request_results_class.new(sliced_input, response))
41
+ update_tags_remap_table(page_results, stage)
42
+ break if self.error = page_errors?(page_results, page, pages, done, total, stage: stage)
43
+ end
44
+
45
+ break if error
46
+ end
47
+ rescue StandardError => e
48
+ log(:error) { self.exception ||= e.patch_full_message }
49
+ raise
50
+ ensure
51
+ rescued { self.tags_remap_csv_file = generate_tags_remap_csv }
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,125 @@
1
+ class Eco::API::UseCases::GraphQL::Samples::Location
2
+ # Logic to:
3
+ # 1. Track-down results and errors
4
+ # 2. Create tags remap csv table batch design
5
+ module Command::Results
6
+ include Eco::API::UseCases::GraphQL::Helpers::Location::Base
7
+
8
+ attr_accessor :error, :exception
9
+ attr_accessor :tags_remap_csv_file
10
+
11
+ def rescued
12
+ yield
13
+ rescue StandardError => e
14
+ log(:error) { self.exception ||= e.patch_full_message }
15
+ end
16
+
17
+ def request_results_class
18
+ Eco::API::UseCases::GraphQL::Helpers::Location::Command::Results
19
+ end
20
+
21
+ # Capture results
22
+ def results
23
+ @results ||= {}
24
+ end
25
+
26
+ # The maps of tags to be used in batch remap tags
27
+ # @return [Array<Array>] source/destination pairs of `Array<String>`
28
+ def tags_remap_table
29
+ @tags_remap_table ||= []
30
+ end
31
+
32
+ # Errors tracking/logging.
33
+ # @note it gives feedback on where an error has occurred.
34
+ # @param page_results [Eco::API::UseCases::GraphQL::Helpers::Locations::Commands::CommandResults]
35
+ # @param stage [Symbol] used when we launch an update in different phases (i.e. rename, move, etc.)
36
+ # @return [Boolean] whether or not there was an error
37
+ def page_errors?(page_results, page, pages, done, total, stage: nil)
38
+ raise "Expecting CommandResults object. Given: #{page_results.class}" unless page_results.is_a?(request_results_class)
39
+ stage_str = stage ? "'#{stage}' " : ''
40
+ fingerprint = "#{stage_str}#{page} (of #{pages})"
41
+ errored = false
42
+
43
+ if page_results.error?
44
+ errored = true
45
+ log(:error) { "Error on #{fingerprint}: #{page_results.error.doc.pretty_inspect}" }
46
+ end
47
+
48
+ if page_results.applied?
49
+ log(:info) { "Success on #{fingerprint}: #{done} (of #{total}) commands applied!" }
50
+ elsif page_results.errored?
51
+ errored = true
52
+ msg = "Some command failed on #{fingerprint}:\n#{page_results.stats}"
53
+ unless force_continue?
54
+ first_errored = page_results.first_errored
55
+ msg << "The error(s) - #{first_errored.error_msg}\n"
56
+ end
57
+ log(:error) { msg }
58
+ end
59
+
60
+ errored
61
+ end
62
+
63
+ # Based on commands that succeded, and the batch stage, it tracks
64
+ # the tag remaps that should be batches against existing pages
65
+ # @note
66
+ # 1. This requires to have available the `current_tree` locations structure
67
+ # - Fortunatelly this is being tracked, as it is returned as payload of the response.
68
+ # 2. Based on the assumption that the order of the commands (stages) happens like this:
69
+ # - :unarchive, :id_name, :insert, :move, :archive
70
+ # 3. The only update operations that generate tag remaps are `:id` (or `:id_name`) and `:move`.
71
+ def update_tags_remap_table(results, stage)
72
+ return false if [:unarchive, :archive].include?(stage)
73
+ raise "Expecting CommandResults object. Given: #{results.class}" unless results.is_a?(request_results_class)
74
+ results.applied.each do |result|
75
+ case stage
76
+ when :id, :id_name
77
+ prev_id, curr_id = result.command_input_data.values_at(:nodeId, :newId)
78
+
79
+ unless current_tree.tag?(curr_id)
80
+ msg = "Node '#{prev_id}' was updated to '#{curr_id}', "
81
+ msg << "but in current structure '#{curr_id}' is not present"
82
+ log(:warn) { msg }
83
+ end
84
+
85
+ tags_remap_table << [[prev_id], [curr_id]]
86
+ when :move
87
+ node_id, parent_id = result.command_input_data.values_at(:nodeId, :parentId)
88
+ prev_node = previous_tree.node(node_id)
89
+ curr_node = current_tree.node(node_id)
90
+ lost_tags = prev_node.path - curr_node.path
91
+ new_tags = curr_node.path - prev_node.path
92
+
93
+ curr_parent = curr_node.parent.top? ? nil : curr_node.parent
94
+ unless curr_parent&.id == parent_id
95
+ msg = "Node '#{node_id}' was moved uner '#{parent_id}', "
96
+ msg << "but in current structure has parent '#{curr_parent&.id}'"
97
+ log(:warn) { msg }
98
+ end
99
+
100
+ tags_remap_table << [lost_tags.unshift(node_id), new_tags.unshift(node_id)]
101
+ end
102
+ end
103
+ end
104
+
105
+ # Generates the final tags remap file
106
+ def generate_tags_remap_csv(filename = "cache/remap_tags.csv")
107
+ return nil if tags_remap_table.empty?
108
+ timestamp_file(filename).tap do |file|
109
+ CSV.open(file, 'w') do |csv|
110
+ csv << ["source_tags", "destination_tags"]
111
+ tags_remap_table.each do |(src_tags, dst_tags)|
112
+ csv << [src_tags.join('|'), dst_tags.join('|')]
113
+ end
114
+ end
115
+ log(:info) { "Generated file '#{file}'" }
116
+ end
117
+ end
118
+
119
+ # Makes the file relative to the enviro
120
+ def timestamp_file(filename, enviro_relative: true)
121
+ filename = session.file_manager.dir.file(filename) if enviro_relative
122
+ Eco::Data::Files.timestamp_file(filename)
123
+ end
124
+ end
125
+ end
@@ -0,0 +1,10 @@
1
+ # Use case to launch updates to a tagtree (i.e. upload new locations)
2
+ module Eco::API::UseCases::GraphQL::Samples
3
+ class Location::Command < Eco::API::UseCases::GraphQL::Samples::Location
4
+ name "location-command"
5
+
6
+ require_relative 'command/results'
7
+ require_relative 'command/dsl'
8
+ include Eco::API::UseCases::GraphQL::Samples::Location::Command::DSL
9
+ end
10
+ end
@@ -0,0 +1,6 @@
1
+ module Eco::API::UseCases::GraphQL::Samples
2
+ module Location::DSL
3
+ include Eco::API::UseCases::GraphQL::Helpers::Location::Base
4
+ include Eco::Data::Locations::DSL
5
+ end
6
+ end
@@ -0,0 +1,10 @@
1
+ module Eco::API::UseCases::GraphQL
2
+ class Samples::Location < Eco::API::UseCases::GraphQL::Base
3
+ name "location-base"
4
+
5
+ require_relative 'location/dsl'
6
+ include Eco::API::UseCases::GraphQL::Samples::Location::DSL
7
+ end
8
+ end
9
+
10
+ require_relative 'location/command'
@@ -0,0 +1,6 @@
1
+ module Eco::API::UseCases::GraphQL
2
+ module Samples
3
+ end
4
+ end
5
+
6
+ require_relative 'samples/location'
@@ -7,5 +7,6 @@ module Eco
7
7
  end
8
8
  end
9
9
 
10
- require_relative 'graphql/base'
11
10
  require_relative 'graphql/helpers'
11
+ require_relative 'graphql/base'
12
+ require_relative 'graphql/samples'
@@ -5,7 +5,6 @@ class Eco::API::UseCases::OozeCases::ExportRegisterCase < Eco::API::UseCases::Oo
5
5
  batch_size 5
6
6
 
7
7
  def main(session, options, usecase)
8
-
9
8
  super(session, options, usecase) do
10
9
  # Save the File
11
10
  CSV.open(filename, "w") do |csv|
@@ -5,7 +5,6 @@ class Eco::API::UseCases::OozeSamples::OozeBaseCase < Eco::API::Common::Loaders:
5
5
 
6
6
  include Eco::API::UseCases::OozeSamples::Helpers
7
7
 
8
- attr_reader :session, :options, :usecase
9
8
  attr_reader :target
10
9
 
11
10
  SAVE_PATCH = "ooze_patch_update.json"
@@ -14,7 +13,6 @@ class Eco::API::UseCases::OozeSamples::OozeBaseCase < Eco::API::Common::Loaders:
14
13
  def main(session, options, usecase)
15
14
  options[:end_get] = false
16
15
  raise "You need to inherit from this class ('#{self.class}') and call super with a block" unless block_given?
17
- @session = session; @options = options; @usecase = usecase
18
16
  @target = nil
19
17
  yield
20
18
  end
@@ -40,11 +40,9 @@ class Eco::API::UseCases::OozeSamples::RegisterMigrationCase < Eco::API::UseCase
40
40
 
41
41
  include Eco::API::UseCases::OozeSamples::HelpersMigration
42
42
 
43
- attr_reader :session, :options
44
43
  attr_reader :csv
45
44
 
46
45
  def main(session, options, usecase, &block)
47
- @session = session; @options = options
48
46
  if options[:dry_run]
49
47
  @csv = []
50
48
  super(session, options, usecase, &block)
@@ -75,10 +75,10 @@ module Eco
75
75
  return false unless callback_from_loader?
76
76
  use_case_self = self
77
77
  callback_self.instance_eval do
78
- next unless self.is_a?(Eco::API::Common::Loaders::CaseBase)
79
- # `self` is the use case itself (when used the Loader)
80
78
  @session = session
81
79
  @options = options
80
+ # `self` is the use case itself (when used the Loader)
81
+ next unless self.is_a?(Eco::API::Common::Loaders::CaseBase)
82
82
  @usecase = use_case_self
83
83
  end
84
84
  true
@@ -1,8 +1,7 @@
1
1
  ASSETS.cli.config do |config|
2
2
  ASSETS.config.workflow do |wf|
3
-
4
- io = nil
5
- rescued = false
3
+ io = nil
4
+ rescued = false
6
5
  cases_with_input = nil
7
6
  cases_with_output = nil
8
7
 
@@ -183,6 +182,5 @@ ASSETS.cli.config do |config|
183
182
  end
184
183
  io
185
184
  end
186
-
187
185
  end
188
186
  end
@@ -2,7 +2,6 @@ module Eco
2
2
  class CLI
3
3
  class Scripting
4
4
  module ArgsHelpers
5
-
6
5
  # @return [Array<String] the command line arguments.
7
6
  def argv
8
7
  @argv || ARGV
@@ -91,7 +90,6 @@ module Eco
91
90
  def file_exists?(filename)
92
91
  File.exists?(filename) || File.exists?(File.expand_path(filename))
93
92
  end
94
-
95
93
  end
96
94
  end
97
95
  end
data/lib/eco/csv/table.rb CHANGED
@@ -11,6 +11,25 @@ module Eco
11
11
  end
12
12
  end
13
13
 
14
+ # It ensures blank strings are set to `nil`
15
+ # @note assumes there are no repeated header names
16
+ # @return [Eco::CSV::Table]
17
+ def nil_blank_cells!
18
+ self.each do |row|
19
+ row.dup.each do |header, value|
20
+ value = value.to_s.strip
21
+ row[header] = value.empty?? nil : value
22
+ end
23
+ end
24
+ self
25
+ end
26
+
27
+ # A new table from `self` where blank strings are have been set to `nil`
28
+ # @return [Eco::CSV::Table]
29
+ def nil_blank_cells
30
+ self.class.new(self).nil_blank_cells!
31
+ end
32
+
14
33
  # @return [Hash] where keys are the groups and the values a `Eco::CSV::Table`
15
34
  def group_by(&block)
16
35
  rows.group_by(&block).transform_values do |rows|
@@ -111,11 +130,29 @@ module Eco
111
130
  end
112
131
 
113
132
  # Adds a new column at the end
133
+ # @note by default it adds it to the end.
114
134
  # @param header_name [String] header of the new column
135
+ # @param pos [Integer] index where to add the column (i.e. `0` for first)
115
136
  # @return [Eco::CSV::Table] with a new empty column
116
- def add_column(header_name)
137
+ def add_column(header_name, pos: -1)
138
+ header_name = header_name.to_s.strip
139
+ raise ArgumentError, "header_name can't be blank" if header_name.empty?
117
140
  new_col = Array.new(length).unshift(header_name)
118
- columns_to_table(columns.push(new_col))
141
+ columns_to_table(columns.insert(pos, new_col))
142
+ end
143
+
144
+ # @note by default it adds as a first column
145
+
146
+ # @param header_name [String] header of the new column
147
+ # @param pos [Integer] index where to add the column (i.e. `-1` for last)
148
+ # @return [Eco::CSV::Table] with a new column named `name` with the row number
149
+ def add_index_column(header_name = 'idx', pos: 0)
150
+ header_name = header_name.to_s.strip
151
+ add_column(header_name, pos: pos).tap do |table|
152
+ table.each.with_index do |row, idx|
153
+ row[header_name] = idx + 2
154
+ end
155
+ end
119
156
  end
120
157
 
121
158
  # @return [Array<::CSV::Row>]
@@ -221,7 +258,6 @@ module Eco
221
258
  raise "Input type not supported. Given: #{data.class}"
222
259
  end
223
260
  end
224
-
225
261
  end
226
262
  end
227
263
  end
@@ -119,6 +119,7 @@ module Eco
119
119
  end
120
120
 
121
121
  class << self
122
+ include Files::InstanceMethods
122
123
  include Files::ClassMethods
123
124
  end
124
125
  end