eco-helpers 2.5.2 → 2.5.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +62 -2
- data/eco-helpers.gemspec +2 -2
- data/lib/eco/api/common/loaders/use_case.rb +0 -2
- data/lib/eco/api/common/people/person_entry_attribute_mapper.rb +0 -2
- data/lib/eco/api/common/session/logger.rb +22 -77
- data/lib/eco/api/microcases/with_each.rb +0 -1
- data/lib/eco/api/organization/tag_tree.rb +64 -15
- data/lib/eco/api/session/config/tagtree.rb +32 -10
- data/lib/eco/api/session/config/workflow.rb +0 -1
- data/lib/eco/api/session/config.rb +6 -2
- data/lib/eco/api/session.rb +2 -2
- data/lib/eco/api/usecases/default_cases/abstract_policygroup_abilities_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/analyse_people_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/append_usergroups_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/change_email_case.rb +1 -2
- data/lib/eco/api/usecases/default_cases/clean_unknown_tags_case.rb +0 -5
- data/lib/eco/api/usecases/default_cases/clear_abilities_case.rb +2 -2
- data/lib/eco/api/usecases/default_cases/codes_to_tags_case.rb +5 -7
- data/lib/eco/api/usecases/default_cases/create_case.rb +0 -5
- data/lib/eco/api/usecases/default_cases/create_details_case.rb +0 -5
- data/lib/eco/api/usecases/default_cases/create_details_with_supervisor_case.rb +0 -5
- data/lib/eco/api/usecases/default_cases/csv_to_tree_case/helper.rb +1 -1
- data/lib/eco/api/usecases/default_cases/csv_to_tree_case.rb +0 -4
- data/lib/eco/api/usecases/default_cases/delete_sync_case.rb +2 -4
- data/lib/eco/api/usecases/default_cases/delete_trans_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/email_as_id_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/entries_to_csv_case.rb +0 -4
- data/lib/eco/api/usecases/default_cases/hris_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/new_email_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/new_id_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/org_data_convert_case.rb +0 -5
- data/lib/eco/api/usecases/default_cases/refresh_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/reinvite_sync_case.rb +1 -3
- data/lib/eco/api/usecases/default_cases/reinvite_trans_case.rb +2 -2
- data/lib/eco/api/usecases/default_cases/remove_account_sync_case.rb +1 -2
- data/lib/eco/api/usecases/default_cases/remove_account_trans_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/reset_landing_page_case.rb +1 -7
- data/lib/eco/api/usecases/default_cases/restore_db_case.rb +0 -10
- data/lib/eco/api/usecases/default_cases/set_default_tag_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/set_supervisor_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/supers_cyclic_identify_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/supers_hierarchy_case.rb +2 -3
- data/lib/eco/api/usecases/default_cases/switch_supervisor_case.rb +2 -4
- data/lib/eco/api/usecases/default_cases/tagtree_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/to_csv_case.rb +4 -5
- data/lib/eco/api/usecases/default_cases/to_csv_detailed_case.rb +0 -1
- data/lib/eco/api/usecases/default_cases/transfer_account_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/update_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/update_details_case.rb +0 -2
- data/lib/eco/api/usecases/default_cases/upsert_case.rb +0 -4
- data/lib/eco/api/usecases/graphql/base.rb +6 -18
- data/lib/eco/api/usecases/graphql/helpers/base/case_env.rb +15 -0
- data/lib/eco/api/usecases/graphql/helpers/base.rb +23 -0
- data/lib/eco/api/usecases/graphql/helpers/location/base.rb +87 -0
- data/lib/eco/api/usecases/graphql/helpers/location/command/result.rb +69 -0
- data/lib/eco/api/usecases/graphql/helpers/location/command/results.rb +126 -0
- data/lib/eco/api/usecases/graphql/helpers/location/command.rb +92 -0
- data/lib/eco/api/usecases/graphql/helpers/location.rb +7 -0
- data/lib/eco/api/usecases/graphql/helpers.rb +2 -1
- data/lib/eco/api/usecases/graphql/samples/location/command/dsl.rb +54 -0
- data/lib/eco/api/usecases/graphql/samples/location/command/results.rb +125 -0
- data/lib/eco/api/usecases/graphql/samples/location/command.rb +10 -0
- data/lib/eco/api/usecases/graphql/samples/location/dsl.rb +6 -0
- data/lib/eco/api/usecases/graphql/samples/location.rb +10 -0
- data/lib/eco/api/usecases/graphql/samples.rb +6 -0
- data/lib/eco/api/usecases/graphql/utils/sftp.rb +74 -0
- data/lib/eco/api/usecases/graphql/utils.rb +6 -0
- data/lib/eco/api/usecases/graphql.rb +3 -1
- data/lib/eco/api/usecases/ooze_cases/export_register_case.rb +0 -1
- data/lib/eco/api/usecases/ooze_samples/ooze_base_case.rb +0 -2
- data/lib/eco/api/usecases/ooze_samples/register_migration_case.rb +0 -2
- data/lib/eco/api/usecases/use_case.rb +2 -2
- data/lib/eco/cli/config/default/workflow.rb +2 -4
- data/lib/eco/cli/scripting/args_helpers.rb +0 -2
- data/lib/eco/csv/table.rb +39 -3
- data/lib/eco/data/files/helpers.rb +4 -3
- data/lib/eco/data/hashes/array_diff.rb +21 -61
- data/lib/eco/data/hashes/diff_meta.rb +52 -0
- data/lib/eco/data/hashes/diff_result.rb +36 -25
- data/lib/eco/data/hashes.rb +1 -0
- data/lib/eco/data/locations/convert.rb +92 -0
- data/lib/eco/data/locations/dsl.rb +35 -0
- data/lib/eco/data/locations/node_base/builder.rb +26 -0
- data/lib/eco/data/locations/node_base/csv_convert.rb +57 -0
- data/lib/eco/data/locations/node_base/parsing.rb +30 -0
- data/lib/eco/data/locations/node_base/serial.rb +26 -0
- data/lib/eco/data/locations/node_base/tag_validations.rb +52 -0
- data/lib/eco/data/locations/node_base/treeify.rb +150 -0
- data/lib/eco/data/locations/node_base.rb +48 -0
- data/lib/eco/data/locations/node_diff/accessors.rb +46 -0
- data/lib/eco/data/locations/node_diff/nodes_diff.rb +90 -0
- data/lib/eco/data/locations/node_diff/selectors.rb +20 -0
- data/lib/eco/data/locations/node_diff.rb +55 -0
- data/lib/eco/data/locations/node_level/builder.rb +6 -0
- data/lib/eco/data/locations/node_level/cleaner.rb +74 -0
- data/lib/eco/data/locations/node_level/parsing.rb +63 -0
- data/lib/eco/data/locations/node_level/serial.rb +37 -0
- data/lib/eco/data/locations/node_level.rb +153 -0
- data/lib/eco/data/locations/node_plain/builder.rb +6 -0
- data/lib/eco/data/locations/node_plain/parsing.rb +36 -0
- data/lib/eco/data/locations/node_plain/serial.rb +14 -0
- data/lib/eco/data/locations/node_plain.rb +31 -0
- data/lib/eco/data/locations.rb +13 -0
- data/lib/eco/data.rb +1 -0
- data/lib/eco/language/auxiliar_logger.rb +9 -1
- data/lib/eco/language/basic_logger.rb +74 -0
- data/lib/eco/language.rb +2 -1
- data/lib/eco/version.rb +1 -1
- metadata +45 -8
- data/lib/eco/api/usecases/default_cases/new_id_case0.rb +0 -14
- data/lib/eco/api/usecases/graphql/helpers/locations/commands.rb +0 -4
- data/lib/eco/api/usecases/graphql/helpers/locations.rb +0 -6
@@ -0,0 +1,87 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL::Helpers::Location
|
2
|
+
module Base
|
3
|
+
include Eco::API::UseCases::GraphQL::Helpers::Base
|
4
|
+
|
5
|
+
TAGTREE_BACKUP = 'cache/tagtree.json'.freeze
|
6
|
+
|
7
|
+
attr_reader :current_tree
|
8
|
+
attr_accessor :previous_tree
|
9
|
+
|
10
|
+
# Back-up the tree everytime that it is retrieved anew.
|
11
|
+
def current_tree=(value)
|
12
|
+
return current_tree if current_tree == value
|
13
|
+
@current_tree = value
|
14
|
+
backup_tree(current_tree)
|
15
|
+
value
|
16
|
+
end
|
17
|
+
|
18
|
+
# At any moment we want to know how the live tree is
|
19
|
+
# @note it also does a backup
|
20
|
+
# @return [Eco::API::Organization::TagTree] the latest tree (`current_tree`)
|
21
|
+
def track_current_tree(tree)
|
22
|
+
return if simulate?
|
23
|
+
return unless tree
|
24
|
+
latest_tree = tree if tree.is_a?(Eco::API::Organization::TagTree)
|
25
|
+
if tree.respond_to?(:treeify)
|
26
|
+
args = { enviro: session.enviro, id: tree.id, name: tree.name}
|
27
|
+
latest_tree ||= Eco::API::Organization::TagTree.new(tree.treeify, **args)
|
28
|
+
end
|
29
|
+
latest_tree.tap do |_tree|
|
30
|
+
next unless latest_tree
|
31
|
+
@previous_tree = @current_tree
|
32
|
+
self.current_tree = latest_tree
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
# @param tree [Eco::API::Organization::TagTree, Hash, Array]
|
37
|
+
# @return [Boolean] whether or not the backup was created
|
38
|
+
def backup_tree(tree = current_tree || live_tree)
|
39
|
+
return false if simulate?
|
40
|
+
case tree
|
41
|
+
when Eco::API::Organization::TagTree
|
42
|
+
tagtree = tree.source
|
43
|
+
when Hash, Array
|
44
|
+
# that's allright
|
45
|
+
else
|
46
|
+
log(:error) {
|
47
|
+
"Can't back up tagtree. Expecting TagTree, Hash or Array. Given: #{tree.class}"
|
48
|
+
}
|
49
|
+
return false
|
50
|
+
end
|
51
|
+
file = session.file_manager.save_json(tree, self.class::TAGTREE_BACKUP, :timestamp)
|
52
|
+
logger.debug("Backed up tagtree saved locally to #{file}.")
|
53
|
+
true
|
54
|
+
end
|
55
|
+
|
56
|
+
def tagtree_id
|
57
|
+
%i[target_structure_id tagtree_id structure_id].find {|key| options.dig(:source, key)}
|
58
|
+
end
|
59
|
+
|
60
|
+
# Scopes the target structure `id`.
|
61
|
+
# @note it is basic that the `id` is correctly identified.
|
62
|
+
def target_structure_id
|
63
|
+
@target_structure_id ||= tagtree_id
|
64
|
+
@target_structure_id ||= self.class.const_get(:TARGET_STRUCTURE_ID) if self.class.const_defined?(:TARGET_STRUCTURE_ID)
|
65
|
+
@target_structure_id ||= current_tree.id if current_tree.respond_to?(:id)
|
66
|
+
return @target_structure_id if @target_structure_id
|
67
|
+
msg = "Const TARGET_STRUCTURE_ID has not been defined, "
|
68
|
+
msg << "nor options(:source, :structure_id). "
|
69
|
+
msg << "Infering active locations structure."
|
70
|
+
log(:warn) { msg }
|
71
|
+
if self.current_tree = session.live_tree
|
72
|
+
@target_structure_id = current_tree.id
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
# Retrieves the live tree only if `current_tree` hasn't been just retrieved.
|
77
|
+
# @note that `target_structure_id` can retrive the live tree (when `id` is not defined)
|
78
|
+
# By checking if the current_tree changed after calling `target_structure_id` we
|
79
|
+
# prevent unnecessary requests.
|
80
|
+
def live_tree
|
81
|
+
tree_init = current_tree
|
82
|
+
target_id = target_structure_id
|
83
|
+
return current_tree if current_tree != tree_init
|
84
|
+
self.current_tree = session.live_tree(id: target_id, include_archived: true)
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,69 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL::Helpers::Location::Command
|
2
|
+
class Result
|
3
|
+
attr_reader :input, :result
|
4
|
+
|
5
|
+
def initialize(input, result)
|
6
|
+
@input = input
|
7
|
+
@result = result
|
8
|
+
end
|
9
|
+
|
10
|
+
def command
|
11
|
+
input.keys.first
|
12
|
+
end
|
13
|
+
|
14
|
+
def command_input_data
|
15
|
+
input[command]
|
16
|
+
end
|
17
|
+
|
18
|
+
def node_id
|
19
|
+
command_input_data[:nodeId]
|
20
|
+
end
|
21
|
+
|
22
|
+
def pending?
|
23
|
+
!result || !success?
|
24
|
+
end
|
25
|
+
|
26
|
+
def applied?
|
27
|
+
!pending?
|
28
|
+
end
|
29
|
+
|
30
|
+
def success?
|
31
|
+
result&.ok
|
32
|
+
end
|
33
|
+
|
34
|
+
def error
|
35
|
+
result&.error
|
36
|
+
end
|
37
|
+
|
38
|
+
def error?
|
39
|
+
!!error
|
40
|
+
end
|
41
|
+
|
42
|
+
def error_msg
|
43
|
+
return nil unless error?
|
44
|
+
msg = ''
|
45
|
+
msg << "(#{command} '#{node_id}') #{error.message}\n" || ''
|
46
|
+
return msg if error.validationErrors.empty?
|
47
|
+
msg << " • " + error.validationErrors.map do |err|
|
48
|
+
err.message
|
49
|
+
end.join("\n • ")
|
50
|
+
msg
|
51
|
+
end
|
52
|
+
|
53
|
+
def command_result
|
54
|
+
result&.command
|
55
|
+
end
|
56
|
+
|
57
|
+
def command_id
|
58
|
+
return nil unless result
|
59
|
+
command_result["id"]
|
60
|
+
end
|
61
|
+
|
62
|
+
def as_json
|
63
|
+
{
|
64
|
+
"request" => input,
|
65
|
+
"response" => result.doc
|
66
|
+
}
|
67
|
+
end
|
68
|
+
end
|
69
|
+
end
|
@@ -0,0 +1,126 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL::Helpers::Location::Command
|
2
|
+
class Results
|
3
|
+
attr_reader :input, :response
|
4
|
+
|
5
|
+
def initialize(input, response)
|
6
|
+
@input = input
|
7
|
+
@response = response
|
8
|
+
end
|
9
|
+
|
10
|
+
def stats
|
11
|
+
msg = ''
|
12
|
+
first_err = force? ? '' : "(stopped on node: '#{first_errored&.node_id}' - idx: #{first_errored_idx})"
|
13
|
+
msg << " • Errored: #{errored.count} #{first_err}\n" if errored?
|
14
|
+
last_okay = force? ? '' : "(last node done: '#{last_applied&.node_id}' - idx: #{last_applied_idx})"
|
15
|
+
msg << " • Applied: #{errored.count} #{last_okay}\n" if some_applied?
|
16
|
+
msg << " • Pending: #{pending.count}\n" if some_pending?
|
17
|
+
msg
|
18
|
+
end
|
19
|
+
|
20
|
+
# Was this configured to force-continue on command error?
|
21
|
+
def force?
|
22
|
+
input[:force]
|
23
|
+
end
|
24
|
+
|
25
|
+
# # Offers a summary. If anything went wrong, it's `false`.
|
26
|
+
# # If everything went right, it's `true`.
|
27
|
+
# def ok?
|
28
|
+
# response&.ok
|
29
|
+
# end
|
30
|
+
|
31
|
+
# Overal errors (i.e. ID clashes between different structures)
|
32
|
+
def error
|
33
|
+
response&.error
|
34
|
+
end
|
35
|
+
|
36
|
+
def error?
|
37
|
+
!!error
|
38
|
+
end
|
39
|
+
|
40
|
+
def success?
|
41
|
+
!error? && results.all? {|r| r.success?}
|
42
|
+
end
|
43
|
+
|
44
|
+
def results
|
45
|
+
@results ||= input_commands.zip(response_results).each_with_object([]) do |(i, r), results|
|
46
|
+
results << Result.new(i, r)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
def count
|
51
|
+
results.count
|
52
|
+
end
|
53
|
+
|
54
|
+
def input_result(input)
|
55
|
+
results_by_input[input]
|
56
|
+
end
|
57
|
+
|
58
|
+
def input_idx(input)
|
59
|
+
results.index(input_result(input))
|
60
|
+
end
|
61
|
+
|
62
|
+
def idx(result)
|
63
|
+
results.index(result)
|
64
|
+
end
|
65
|
+
|
66
|
+
def errored
|
67
|
+
@errored ||= results.select {|r| r.error?}
|
68
|
+
end
|
69
|
+
|
70
|
+
def errored?
|
71
|
+
!errored.empty?
|
72
|
+
end
|
73
|
+
|
74
|
+
def first_errored
|
75
|
+
errored.first
|
76
|
+
end
|
77
|
+
|
78
|
+
def first_errored_idx
|
79
|
+
idx(first_errored)
|
80
|
+
end
|
81
|
+
|
82
|
+
def applied
|
83
|
+
@applied ||= results.select {|r| r.applied?}
|
84
|
+
end
|
85
|
+
|
86
|
+
def applied?
|
87
|
+
results.all? {|r| r.applied?}
|
88
|
+
end
|
89
|
+
|
90
|
+
def some_applied?
|
91
|
+
applied.count > 0
|
92
|
+
end
|
93
|
+
|
94
|
+
def last_applied
|
95
|
+
applied.last
|
96
|
+
end
|
97
|
+
|
98
|
+
def last_applied_idx
|
99
|
+
idx(last_applied)
|
100
|
+
end
|
101
|
+
|
102
|
+
def pending
|
103
|
+
@pending ||= results.select {|r| r.pending?}
|
104
|
+
end
|
105
|
+
|
106
|
+
def some_pending?
|
107
|
+
!pending.empty?
|
108
|
+
end
|
109
|
+
|
110
|
+
private
|
111
|
+
|
112
|
+
def results_by_input
|
113
|
+
@results_by_input ||= results.each_with_object({}) do |r, h|
|
114
|
+
h[r.input] = r
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def input_commands
|
119
|
+
input[:commands]
|
120
|
+
end
|
121
|
+
|
122
|
+
def response_results
|
123
|
+
response&.results || []
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
@@ -0,0 +1,92 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL::Helpers::Location
|
2
|
+
module Command
|
3
|
+
include Eco::Language::AuxiliarLogger
|
4
|
+
include Eco::API::UseCases::GraphQL::Helpers::Location::Base
|
5
|
+
|
6
|
+
DEFAULT_COMMANDS_PER_PAGE = 45
|
7
|
+
DEFAULT_FORCE_CONTINUE = false
|
8
|
+
|
9
|
+
# Prevents each request from timing out
|
10
|
+
def commands_per_page
|
11
|
+
if self.class.const_defined?(:COMMANDS_PER_PAGE)
|
12
|
+
self.class::COMMANDS_PER_PAGE
|
13
|
+
else
|
14
|
+
DEFAULT_COMMANDS_PER_PAGE
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
# Whether to stop or continue on command fail
|
19
|
+
def force_continue?
|
20
|
+
if self.class.const_defined?(:FORCE_CONTINUE)
|
21
|
+
self.class::FORCE_CONTINUE
|
22
|
+
else
|
23
|
+
DEFAULT_FORCE_CONTINUE
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# With given the commands, it generates the input of the endpoint mutation.
|
28
|
+
# @param commands [Array<Hash>]
|
29
|
+
def input(commands, force_continue: force_continue?)
|
30
|
+
{
|
31
|
+
clientMutationId: "",
|
32
|
+
id: target_structure_id,
|
33
|
+
force: force_continue,
|
34
|
+
commands: commands
|
35
|
+
}
|
36
|
+
end
|
37
|
+
|
38
|
+
# @return see #with_sliced_input
|
39
|
+
def sliced_batches(batch_input, size: commands_per_page, desc: :input, logging: true)
|
40
|
+
dry_run_msg = simulate? ? '(dry-run) ' : ''
|
41
|
+
|
42
|
+
if batch_input[:commands].empty?
|
43
|
+
msg = "#{dry_run_msg}No commands for '#{desc}'."
|
44
|
+
msg << " Skipping batch..." unless simulate?
|
45
|
+
log(:info) { msg }
|
46
|
+
return
|
47
|
+
end
|
48
|
+
|
49
|
+
done = 0
|
50
|
+
with_sliced_input(batch_input, size: size) do |sliced_input, page, pages, count, total|
|
51
|
+
msg = "#{dry_run_msg}Launching '#{desc}' request #{page} (of #{pages}) "
|
52
|
+
msg << "with #{count} commands (done #{done} of #{total})..."
|
53
|
+
logger.info { msg }
|
54
|
+
|
55
|
+
response = nil
|
56
|
+
unless simulate? && !options.dig(:requests, :backup)
|
57
|
+
backup(sliced_input, type: "tree_update_#{desc}_request_#{page}_of_#{pages}")
|
58
|
+
end
|
59
|
+
|
60
|
+
if simulate?
|
61
|
+
log(:info) { sliced_input.pretty_inspect } if page < 3
|
62
|
+
else
|
63
|
+
response = graphql.locationStructure.applyCommands(input: sliced_input)
|
64
|
+
backup(response, type: "tree_update_#{desc}_response_#{page}_of_#{pages}")
|
65
|
+
end
|
66
|
+
|
67
|
+
done += count
|
68
|
+
yield(sliced_input, response, page, pages, done, total) if block_given?
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
# @param input_data [Hash] input for the endpoint `mutation.ApplyCommandsToLocationStructure`.
|
73
|
+
# @return [Array<Array>] pairs of `sliced_input` and `response` thereof.
|
74
|
+
def with_sliced_input(input_data, size: commands_per_page)
|
75
|
+
comms = input_data[:commands]
|
76
|
+
total = comms.count
|
77
|
+
pages = (total.to_f / size).ceil.to_i
|
78
|
+
page = 1; out = []
|
79
|
+
comms.each_slice(size) do |comms_slice|
|
80
|
+
sliced_input = input_data.slice(:clientMutationId, :id).merge(commands: comms_slice)
|
81
|
+
yield(sliced_input, page, pages, comms_slice.count, total).tap do |response|
|
82
|
+
out.push([sliced_input, response])
|
83
|
+
page += 1
|
84
|
+
end
|
85
|
+
end
|
86
|
+
out
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
require_relative 'command/result'
|
92
|
+
require_relative 'command/results'
|
@@ -0,0 +1,54 @@
|
|
1
|
+
class Eco::API::UseCases::GraphQL::Samples::Location
|
2
|
+
module Command::DSL
|
3
|
+
include Eco::API::UseCases::GraphQL::Helpers::Location::Command
|
4
|
+
include Eco::API::UseCases::GraphQL::Samples::Location::Command::Results
|
5
|
+
|
6
|
+
# @example of implementation:
|
7
|
+
# def inputs(command_types, force_continue: force_continue?)
|
8
|
+
# {}.tap do |sequence|
|
9
|
+
# command_types.commands do |comms, stage|
|
10
|
+
# sequence[stage] = input(comms, force_continue: force_continue)
|
11
|
+
# end
|
12
|
+
# end.tap do |sequence|
|
13
|
+
# sequence.each do |stage, input|
|
14
|
+
# yield(input, stage) if block_given?
|
15
|
+
# end
|
16
|
+
# end
|
17
|
+
# end
|
18
|
+
def inputs(*args, force_continue: force_continue?, **kargs, &block)
|
19
|
+
msg = "You should implement this method in your child class.\n"
|
20
|
+
msg << "Which should yield the input Hash and the stage or descriptor."
|
21
|
+
raise Eco::API::UseCases::GraphQL::Base::NotImplementedMethod, msg
|
22
|
+
end
|
23
|
+
|
24
|
+
# Main processor
|
25
|
+
def process
|
26
|
+
begin
|
27
|
+
super if defined?(super)
|
28
|
+
rescue Eco::API::UseCases::GraphQL::Base::NotImplementedMethod
|
29
|
+
end
|
30
|
+
|
31
|
+
self.error = false
|
32
|
+
# this triggers a backup of the tagtree
|
33
|
+
self.current_tree ||= live_tree
|
34
|
+
|
35
|
+
inputs(force_continue: force_continue?) do |input, stage|
|
36
|
+
results[stage] ||= []
|
37
|
+
sliced_batches(input, desc: stage) do |sliced_input, response, page, pages, done, total|
|
38
|
+
track_current_tree(response&.structure)
|
39
|
+
page_results = nil
|
40
|
+
results[stage] << (page_results = request_results_class.new(sliced_input, response))
|
41
|
+
update_tags_remap_table(page_results, stage)
|
42
|
+
break if self.error = page_errors?(page_results, page, pages, done, total, stage: stage)
|
43
|
+
end
|
44
|
+
|
45
|
+
break if error
|
46
|
+
end
|
47
|
+
rescue StandardError => e
|
48
|
+
log(:error) { self.exception ||= e.patch_full_message }
|
49
|
+
raise
|
50
|
+
ensure
|
51
|
+
rescued { self.tags_remap_csv_file = generate_tags_remap_csv }
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,125 @@
|
|
1
|
+
class Eco::API::UseCases::GraphQL::Samples::Location
|
2
|
+
# Logic to:
|
3
|
+
# 1. Track-down results and errors
|
4
|
+
# 2. Create tags remap csv table batch design
|
5
|
+
module Command::Results
|
6
|
+
include Eco::API::UseCases::GraphQL::Helpers::Location::Base
|
7
|
+
|
8
|
+
attr_accessor :error, :exception
|
9
|
+
attr_accessor :tags_remap_csv_file
|
10
|
+
|
11
|
+
def rescued
|
12
|
+
yield
|
13
|
+
rescue StandardError => e
|
14
|
+
log(:error) { self.exception ||= e.patch_full_message }
|
15
|
+
end
|
16
|
+
|
17
|
+
def request_results_class
|
18
|
+
Eco::API::UseCases::GraphQL::Helpers::Location::Command::Results
|
19
|
+
end
|
20
|
+
|
21
|
+
# Capture results
|
22
|
+
def results
|
23
|
+
@results ||= {}
|
24
|
+
end
|
25
|
+
|
26
|
+
# The maps of tags to be used in batch remap tags
|
27
|
+
# @return [Array<Array>] source/destination pairs of `Array<String>`
|
28
|
+
def tags_remap_table
|
29
|
+
@tags_remap_table ||= []
|
30
|
+
end
|
31
|
+
|
32
|
+
# Errors tracking/logging.
|
33
|
+
# @note it gives feedback on where an error has occurred.
|
34
|
+
# @param page_results [Eco::API::UseCases::GraphQL::Helpers::Locations::Commands::CommandResults]
|
35
|
+
# @param stage [Symbol] used when we launch an update in different phases (i.e. rename, move, etc.)
|
36
|
+
# @return [Boolean] whether or not there was an error
|
37
|
+
def page_errors?(page_results, page, pages, done, total, stage: nil)
|
38
|
+
raise "Expecting CommandResults object. Given: #{page_results.class}" unless page_results.is_a?(request_results_class)
|
39
|
+
stage_str = stage ? "'#{stage}' " : ''
|
40
|
+
fingerprint = "#{stage_str}#{page} (of #{pages})"
|
41
|
+
errored = false
|
42
|
+
|
43
|
+
if page_results.error?
|
44
|
+
errored = true
|
45
|
+
log(:error) { "Error on #{fingerprint}: #{page_results.error.doc.pretty_inspect}" }
|
46
|
+
end
|
47
|
+
|
48
|
+
if page_results.applied?
|
49
|
+
log(:info) { "Success on #{fingerprint}: #{done} (of #{total}) commands applied!" }
|
50
|
+
elsif page_results.errored?
|
51
|
+
errored = true
|
52
|
+
msg = "Some command failed on #{fingerprint}:\n#{page_results.stats}"
|
53
|
+
unless force_continue?
|
54
|
+
first_errored = page_results.first_errored
|
55
|
+
msg << "The error(s) - #{first_errored.error_msg}\n"
|
56
|
+
end
|
57
|
+
log(:error) { msg }
|
58
|
+
end
|
59
|
+
|
60
|
+
errored
|
61
|
+
end
|
62
|
+
|
63
|
+
# Based on commands that succeded, and the batch stage, it tracks
|
64
|
+
# the tag remaps that should be batches against existing pages
|
65
|
+
# @note
|
66
|
+
# 1. This requires to have available the `current_tree` locations structure
|
67
|
+
# - Fortunatelly this is being tracked, as it is returned as payload of the response.
|
68
|
+
# 2. Based on the assumption that the order of the commands (stages) happens like this:
|
69
|
+
# - :unarchive, :id_name, :insert, :move, :archive
|
70
|
+
# 3. The only update operations that generate tag remaps are `:id` (or `:id_name`) and `:move`.
|
71
|
+
def update_tags_remap_table(results, stage)
|
72
|
+
return false if [:unarchive, :archive].include?(stage)
|
73
|
+
raise "Expecting CommandResults object. Given: #{results.class}" unless results.is_a?(request_results_class)
|
74
|
+
results.applied.each do |result|
|
75
|
+
case stage
|
76
|
+
when :id, :id_name
|
77
|
+
prev_id, curr_id = result.command_input_data.values_at(:nodeId, :newId)
|
78
|
+
|
79
|
+
unless current_tree.tag?(curr_id)
|
80
|
+
msg = "Node '#{prev_id}' was updated to '#{curr_id}', "
|
81
|
+
msg << "but in current structure '#{curr_id}' is not present"
|
82
|
+
log(:warn) { msg }
|
83
|
+
end
|
84
|
+
|
85
|
+
tags_remap_table << [[prev_id], [curr_id]]
|
86
|
+
when :move
|
87
|
+
node_id, parent_id = result.command_input_data.values_at(:nodeId, :parentId)
|
88
|
+
prev_node = previous_tree.node(node_id)
|
89
|
+
curr_node = current_tree.node(node_id)
|
90
|
+
lost_tags = prev_node.path - curr_node.path
|
91
|
+
new_tags = curr_node.path - prev_node.path
|
92
|
+
|
93
|
+
curr_parent = curr_node.parent.top? ? nil : curr_node.parent
|
94
|
+
unless curr_parent&.id == parent_id
|
95
|
+
msg = "Node '#{node_id}' was moved uner '#{parent_id}', "
|
96
|
+
msg << "but in current structure has parent '#{curr_parent&.id}'"
|
97
|
+
log(:warn) { msg }
|
98
|
+
end
|
99
|
+
|
100
|
+
tags_remap_table << [lost_tags.unshift(node_id), new_tags.unshift(node_id)]
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
# Generates the final tags remap file
|
106
|
+
def generate_tags_remap_csv(filename = "cache/remap_tags.csv")
|
107
|
+
return nil if tags_remap_table.empty?
|
108
|
+
timestamp_file(filename).tap do |file|
|
109
|
+
CSV.open(file, 'w') do |csv|
|
110
|
+
csv << ["source_tags", "destination_tags"]
|
111
|
+
tags_remap_table.each do |(src_tags, dst_tags)|
|
112
|
+
csv << [src_tags.join('|'), dst_tags.join('|')]
|
113
|
+
end
|
114
|
+
end
|
115
|
+
log(:info) { "Generated file '#{file}'" }
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
# Makes the file relative to the enviro
|
120
|
+
def timestamp_file(filename, enviro_relative: true)
|
121
|
+
filename = session.file_manager.dir.file(filename) if enviro_relative
|
122
|
+
Eco::Data::Files.timestamp_file(filename)
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# Use case to launch updates to a tagtree (i.e. upload new locations)
|
2
|
+
module Eco::API::UseCases::GraphQL::Samples
|
3
|
+
class Location::Command < Eco::API::UseCases::GraphQL::Samples::Location
|
4
|
+
name "location-command"
|
5
|
+
|
6
|
+
require_relative 'command/results'
|
7
|
+
require_relative 'command/dsl'
|
8
|
+
include Eco::API::UseCases::GraphQL::Samples::Location::Command::DSL
|
9
|
+
end
|
10
|
+
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL
|
2
|
+
class Samples::Location < Eco::API::UseCases::GraphQL::Base
|
3
|
+
name "location-base"
|
4
|
+
|
5
|
+
require_relative 'location/dsl'
|
6
|
+
include Eco::API::UseCases::GraphQL::Samples::Location::DSL
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
require_relative 'location/command'
|
@@ -0,0 +1,74 @@
|
|
1
|
+
module Eco::API::UseCases::GraphQL::Utils
|
2
|
+
module Sftp
|
3
|
+
include Eco::API::UseCases::GraphQL::Helpers::Base::CaseEnv
|
4
|
+
|
5
|
+
|
6
|
+
def remote_subfolder
|
7
|
+
nil
|
8
|
+
end
|
9
|
+
|
10
|
+
def remote_folder(subfolder = remote_subfolder)
|
11
|
+
"#{sftp_config.remote_folder}/#{subfolder || ''}"
|
12
|
+
end
|
13
|
+
|
14
|
+
def sftp_group_id
|
15
|
+
if self.class.const_defined?(:SFTP_GROUP)
|
16
|
+
self.class.const_get(:TARGET_STRUCTURE_ID)
|
17
|
+
elsif group_id = options.dig(:sftp, :group)
|
18
|
+
group_id
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def upload(local_file, remote_folder: self.remote_folder, gid: sftp_group_id)
|
23
|
+
return false unless local_file && File.exists?(local_file)
|
24
|
+
dest_file = "#{remote_folder}/#{File.basename(local_file)}"
|
25
|
+
res = sftp_session.upload!(local_file, dest_file)
|
26
|
+
attrs = sftp_session.stat!(dest_file)
|
27
|
+
if gid && gid != attrs.gid
|
28
|
+
stat_res = sftp_session.setstat!(dest_file, {permissions: 0660, uid: attrs.uid, gid: gid})
|
29
|
+
end
|
30
|
+
logger.info("Uploaded '#{local_file}' (#{res})")
|
31
|
+
end
|
32
|
+
|
33
|
+
def ensure_remote_empty
|
34
|
+
files = with_remote_files
|
35
|
+
unless files.empty?
|
36
|
+
msg = "There are still files in the remote folder that will be deleted: '#{remote_folder}':\n"
|
37
|
+
msg += " • " + files.map do |file|
|
38
|
+
file.longname
|
39
|
+
end.join("\n • ") + "\n"
|
40
|
+
session.prompt_user("Do you want to proceed to delete? (Y/n):", explanation: msg, default: "Y", timeout: 3) do |response|
|
41
|
+
if response.upcase.start_with?("Y")
|
42
|
+
files.each do |file|
|
43
|
+
remote_full_path = to_remote_path(file.name)
|
44
|
+
res = sftp_session.remove(remote_full_path)
|
45
|
+
logger.info("Deleted remote file: '#{remote_full_path}' (#{res})")
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def with_remote_files
|
53
|
+
sftp.files(remote_folder).each do |remote_file|
|
54
|
+
yield(remote_file) if block_given?
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def to_remote_path(file, subfolder: nil)
|
59
|
+
remote_folder(subfolder) + "/" + file
|
60
|
+
end
|
61
|
+
|
62
|
+
def sftp_config
|
63
|
+
session.config.sftp
|
64
|
+
end
|
65
|
+
|
66
|
+
def sftp_session
|
67
|
+
sftp.sftp_session
|
68
|
+
end
|
69
|
+
|
70
|
+
def sftp
|
71
|
+
session.sftp
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|