eco-helpers 2.7.11 → 2.7.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +17 -4
- data/Gemfile +2 -0
- data/eco-helpers.gemspec +17 -15
- data/lib/eco/api/common/session/logger.rb +12 -8
- data/lib/eco/api/session/batch/jobs.rb +10 -9
- data/lib/eco/api/usecases/default/people/{clean_unknown_tags_case.rb → amend/clean_unknown_tags_case.rb} +2 -2
- data/lib/eco/api/usecases/default/people/{clear_abilities_case.rb → amend/clear_abilities_case.rb} +2 -2
- data/lib/eco/api/usecases/default/people/{refresh_case.rb → amend/refresh_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/amend/reinvite_sync_case.rb +11 -0
- data/lib/eco/api/usecases/default/people/{reinvite_trans_case.rb → amend/reinvite_trans_case.rb} +2 -2
- data/lib/eco/api/usecases/default/people/amend/reinvite_trans_cli.rb +4 -0
- data/lib/eco/api/usecases/default/people/{restore_db_case.rb → amend/restore_db_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/amend.rb +11 -0
- data/lib/eco/api/usecases/default/people/migrate/cli/remap_tags_cli.rb +23 -0
- data/lib/eco/api/usecases/default/people/migrate/remap_tags_case.rb +269 -0
- data/lib/eco/api/usecases/default/people/migrate.rb +6 -0
- data/lib/eco/api/usecases/default/people/{analyse_people_case.rb → treat/analyse_people_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/{org_data_convert_case.rb → treat/org_data_convert_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/{supers_cyclic_identify_case.rb → treat/supers_cyclic_identify_case.rb} +2 -2
- data/lib/eco/api/usecases/default/people/{supers_hierarchy_case.rb → treat/supers_hierarchy_case.rb} +18 -17
- data/lib/eco/api/usecases/default/people/treat.rb +9 -0
- data/lib/eco/api/usecases/default/people/{change_email_case.rb → utils/change_email_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/{set_default_tag_case.rb → utils/set_default_tag_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/{switch_supervisor_case.rb → utils/switch_supervisor_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/{transfer_account_case.rb → utils/transfer_account_case.rb} +1 -1
- data/lib/eco/api/usecases/default/people/utils.rb +9 -0
- data/lib/eco/api/usecases/default/people.rb +4 -14
- data/lib/eco/api/usecases/default/utils/cli/split_csv_cli.rb +15 -0
- data/lib/eco/api/usecases/default/utils/split_csv_case.rb +34 -0
- data/lib/eco/api/usecases/default/utils.rb +12 -0
- data/lib/eco/api/usecases/default.rb +1 -0
- data/lib/eco/api/usecases/graphql/samples/location/command/service/tree_update.rb +1 -2
- data/lib/eco/api/usecases/graphql/samples/location/command/track_changed_ids.rb +3 -3
- data/lib/eco/csv/split.rb +114 -0
- data/lib/eco/csv/stream.rb +66 -0
- data/lib/eco/csv.rb +14 -0
- data/lib/eco/language/basic_logger.rb +7 -6
- data/lib/eco/version.rb +1 -1
- metadata +159 -120
- data/lib/eco/api/usecases/default/people/reinvite_sync_case.rb +0 -9
- data/lib/eco/api/usecases/default/people/reinvite_trans_cli.rb +0 -4
data/lib/eco/api/usecases/default/people/{supers_hierarchy_case.rb → treat/supers_hierarchy_case.rb}
RENAMED
@@ -1,10 +1,10 @@
|
|
1
|
-
class Eco::API::UseCases::Default::People::SupersHierarchy < Eco::API::Common::Loaders::UseCase
|
1
|
+
class Eco::API::UseCases::Default::People::Treat::SupersHierarchy < Eco::API::Common::Loaders::UseCase
|
2
2
|
name "supers-hierarchy"
|
3
3
|
type :export
|
4
4
|
|
5
5
|
attr_reader :people
|
6
6
|
|
7
|
-
def main(people,
|
7
|
+
def main(people, _session, options, _usecase)
|
8
8
|
options[:end_get] = false
|
9
9
|
@people = people
|
10
10
|
|
@@ -22,20 +22,19 @@ class Eco::API::UseCases::Default::People::SupersHierarchy < Eco::API::Common::L
|
|
22
22
|
end
|
23
23
|
|
24
24
|
def save!(data)
|
25
|
-
ext
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
end
|
25
|
+
ext = File.extname(file).downcase.delete(".")
|
26
|
+
|
27
|
+
case ext
|
28
|
+
when "txt"
|
29
|
+
create_file(data, file: file, format: :txt)
|
30
|
+
when "html"
|
31
|
+
puts "html is still not supported"
|
32
|
+
exit(1)
|
33
|
+
# create_file(data, file: file, format: :html)
|
34
|
+
when "json"
|
35
|
+
puts "json is still not supported"
|
36
|
+
exit(1)
|
37
|
+
# create_file(data, file: file, format: :json)
|
39
38
|
end
|
40
39
|
end
|
41
40
|
|
@@ -50,7 +49,9 @@ class Eco::API::UseCases::Default::People::SupersHierarchy < Eco::API::Common::L
|
|
50
49
|
raise "Required Hash tree structure. Given: #{tree.class}" unless tree.is_a?(Hash)
|
51
50
|
"".tap do |str|
|
52
51
|
tree.each do |entry, subtree|
|
53
|
-
str << "#{" " * lev}#{
|
52
|
+
str << "#{" " * lev}#{lev.positive? ? "+-#{lev}- " : ""}"
|
53
|
+
str << entry.name
|
54
|
+
str << " (#{entry.external_id}|#{entry.email}|#{entry.id})\n"
|
54
55
|
str << tree_to_str(subtree, lev: lev + 1, format: format) unless !subtree || subtree.empty?
|
55
56
|
end
|
56
57
|
end
|
@@ -0,0 +1,9 @@
|
|
1
|
+
module Eco::API::UseCases::Default::People
|
2
|
+
module Treat
|
3
|
+
end
|
4
|
+
end
|
5
|
+
|
6
|
+
require_relative 'treat/analyse_people_case'
|
7
|
+
require_relative 'treat/supers_hierarchy_case'
|
8
|
+
require_relative 'treat/supers_cyclic_identify_case'
|
9
|
+
require_relative 'treat/org_data_convert_case'
|
@@ -9,17 +9,7 @@ module Eco
|
|
9
9
|
end
|
10
10
|
end
|
11
11
|
|
12
|
-
require_relative 'people/
|
13
|
-
require_relative 'people/
|
14
|
-
require_relative 'people/
|
15
|
-
require_relative 'people/
|
16
|
-
require_relative 'people/reinvite_trans_case'
|
17
|
-
require_relative 'people/reinvite_sync_case'
|
18
|
-
require_relative 'people/org_data_convert_case'
|
19
|
-
require_relative 'people/refresh_case'
|
20
|
-
require_relative 'people/restore_db_case'
|
21
|
-
require_relative 'people/set_default_tag_case'
|
22
|
-
require_relative 'people/supers_hierarchy_case'
|
23
|
-
require_relative 'people/supers_cyclic_identify_case'
|
24
|
-
require_relative 'people/switch_supervisor_case'
|
25
|
-
require_relative 'people/transfer_account_case'
|
12
|
+
require_relative 'people/treat'
|
13
|
+
require_relative 'people/utils'
|
14
|
+
require_relative 'people/amend'
|
15
|
+
require_relative 'people/migrate'
|
@@ -0,0 +1,15 @@
|
|
1
|
+
class Eco::API::UseCases::Default::People::Utils::SplitCsv
|
2
|
+
class Cli < Eco::API::UseCases::Cli
|
3
|
+
desc "Splits an input file into multiple ones"
|
4
|
+
|
5
|
+
callback do |_sess, options, _case|
|
6
|
+
file = SCR.get_file(cli_name, required: true, should_exist: true)
|
7
|
+
options.deep_merge!(source: {file: file})
|
8
|
+
end
|
9
|
+
|
10
|
+
add_option("-max-rows", "The max count of rows of the output files") do |options|
|
11
|
+
count = SCR.get_arg("-max-rows", with_param: true)
|
12
|
+
options.deep_merge!(output: {file: {max_rows: count}})
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
class Eco::API::UseCases::Default::People::Utils::SplitCsv < Eco::API::Common::Loaders::UseCase
|
2
|
+
require_relative 'cli/split_csv_cli'
|
3
|
+
|
4
|
+
MAX_ROWS = 15_000
|
5
|
+
|
6
|
+
name "split-csv"
|
7
|
+
type :other
|
8
|
+
|
9
|
+
def main(*_args)
|
10
|
+
Eco::CSV.split(input_file, max_rows: max_rows).each do |file|
|
11
|
+
log(:info) {
|
12
|
+
"Generated file '#{file}'"
|
13
|
+
}
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
private
|
18
|
+
|
19
|
+
def input_file
|
20
|
+
options.dig(:source, :file)
|
21
|
+
end
|
22
|
+
|
23
|
+
def max_rows
|
24
|
+
max_rows_options || self.class::MAX_ROWS
|
25
|
+
end
|
26
|
+
|
27
|
+
def max_rows_options
|
28
|
+
return nil unless (num = options.dig(:output, :file, :max_rows))
|
29
|
+
|
30
|
+
num = num.to_i
|
31
|
+
num = nil if num.zero?
|
32
|
+
num
|
33
|
+
end
|
34
|
+
end
|
@@ -69,8 +69,7 @@ class Eco::API::UseCases::GraphQL::Samples::Location
|
|
69
69
|
end
|
70
70
|
|
71
71
|
# Generates the file and pushes to the SFTP folder
|
72
|
-
# @note
|
73
|
-
# This is only possible using a draft, which is not that desired.
|
72
|
+
# @note it also displays the mappings on screen
|
74
73
|
# @note the SFTP push only happens if `remote_subfolder` is defined, via:
|
75
74
|
# 1. `options.dig(:sftp, :remote_subfolder)`
|
76
75
|
# 2. `REMOTE_FOLDER` const
|
@@ -25,10 +25,10 @@ class Eco::API::UseCases::GraphQL::Samples::Location
|
|
25
25
|
end
|
26
26
|
|
27
27
|
# Generates the file
|
28
|
-
# @note this method
|
29
|
-
#
|
28
|
+
# @note this method used to only work if we could run cummulative dry-runs to the back-end.
|
29
|
+
# However, after RS P3, as mappings are one-to-one (not many-to-many per row),
|
30
|
+
# we can just display the mappings in dry-run as well.
|
30
31
|
def close_handling_tags_remap_csv
|
31
|
-
return false if simulate?
|
32
32
|
if tags_remap_table.any?
|
33
33
|
puts "REMAP LOC IDs CSV (content):"
|
34
34
|
puts tags_remap_table
|
@@ -0,0 +1,114 @@
|
|
1
|
+
module Eco
|
2
|
+
class CSV
|
3
|
+
class Split
|
4
|
+
include Eco::Language::AuxiliarLogger
|
5
|
+
|
6
|
+
attr_reader :filename
|
7
|
+
|
8
|
+
def initialize(filename, max_rows:, **kargs)
|
9
|
+
raise ArgumentError, "File '#{filename}' does not exist" unless ::File.exist?(filename)
|
10
|
+
@filename = filename
|
11
|
+
@max_rows = max_rows
|
12
|
+
@params = kargs
|
13
|
+
init
|
14
|
+
end
|
15
|
+
|
16
|
+
# @yield [idx, file] a block to spot the filename
|
17
|
+
# @yieldparam idx [Integer] the number of the file
|
18
|
+
# @yieldparam file [String] the default name of the file
|
19
|
+
# @yieldreturn [String] the filename of the file `idx`.
|
20
|
+
# - If `nil` it will create its own filename convention
|
21
|
+
# @return [Array<String>] names of the generated files
|
22
|
+
def call(&block)
|
23
|
+
stream.for_each do |row, ridx|
|
24
|
+
copy_row(row, ridx, &block)
|
25
|
+
end
|
26
|
+
out_files
|
27
|
+
ensure
|
28
|
+
puts "Close at row #{row_idx}"
|
29
|
+
@csv&.close
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
attr_reader :params
|
35
|
+
attr_reader :idx, :max_rows
|
36
|
+
attr_reader :headers, :row_idx
|
37
|
+
|
38
|
+
attr_accessor :exception
|
39
|
+
|
40
|
+
def copy_row(row, ridx, &block)
|
41
|
+
@headers ||= row.headers
|
42
|
+
@row_idx = ridx
|
43
|
+
current_csv(ridx, &block) << row.fields
|
44
|
+
end
|
45
|
+
|
46
|
+
def current_csv(ridx)
|
47
|
+
if split?(ridx) || @csv.nil?
|
48
|
+
puts "Split at row #{row_idx}"
|
49
|
+
@csv&.close
|
50
|
+
out_filename = generate_name(nidx = next_idx)
|
51
|
+
out_filename = yield(nidx, out_filename) if block_given?
|
52
|
+
@csv = ::CSV.open(out_filename, "w")
|
53
|
+
@csv << headers
|
54
|
+
out_files << out_filename
|
55
|
+
end
|
56
|
+
@csv
|
57
|
+
end
|
58
|
+
|
59
|
+
def split?(ridx)
|
60
|
+
((ridx + 1) % max_rows).zero?
|
61
|
+
end
|
62
|
+
|
63
|
+
def next_idx
|
64
|
+
idx.tap { @idx += 1 }
|
65
|
+
end
|
66
|
+
|
67
|
+
def init
|
68
|
+
@idx ||= 0 # rubocop:disable Naming/MemoizedInstanceVariableName
|
69
|
+
end
|
70
|
+
|
71
|
+
def stream
|
72
|
+
@stream ||= Eco::CSV::Stream.new(filename, **params)
|
73
|
+
end
|
74
|
+
|
75
|
+
def generate_name(fidx)
|
76
|
+
File.join(input_dir, "#{input_name}_#{file_number(fidx)}#{input_ext}")
|
77
|
+
end
|
78
|
+
|
79
|
+
def file_number(num)
|
80
|
+
"#{zeroed}#{num}"[-5..]
|
81
|
+
end
|
82
|
+
|
83
|
+
def zeroed
|
84
|
+
"0" * 5
|
85
|
+
end
|
86
|
+
|
87
|
+
def out_files
|
88
|
+
@out_files ||= []
|
89
|
+
end
|
90
|
+
|
91
|
+
def input_name
|
92
|
+
@input_name ||= File.basename(input_basename, input_ext)
|
93
|
+
end
|
94
|
+
|
95
|
+
def input_ext
|
96
|
+
@input_ext ||= input_basename.split('.')[1..].join('.').then do |name|
|
97
|
+
".#{name}"
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
def input_basename
|
102
|
+
@input_basename ||= File.basename(input_full_filename)
|
103
|
+
end
|
104
|
+
|
105
|
+
def input_dir
|
106
|
+
@input_dir = File.dirname(input_full_filename)
|
107
|
+
end
|
108
|
+
|
109
|
+
def input_full_filename
|
110
|
+
@input_full_filename ||= File.expand_path(filename)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
module Eco
|
2
|
+
class CSV
|
3
|
+
class Stream
|
4
|
+
include Eco::Language::AuxiliarLogger
|
5
|
+
|
6
|
+
attr_reader :filename
|
7
|
+
|
8
|
+
def initialize(filename, **kargs)
|
9
|
+
raise ArgumentError, "File '#{filename}' does not exist" unless ::File.exist?(filename)
|
10
|
+
@filename = filename
|
11
|
+
@params = {
|
12
|
+
headers: true,
|
13
|
+
skip_blanks: true
|
14
|
+
}.merge(kargs)
|
15
|
+
init
|
16
|
+
end
|
17
|
+
|
18
|
+
def for_each(start_at_idx: 0)
|
19
|
+
raise ArgumentError, 'Expecting block, but not given.' unless block_given?
|
20
|
+
|
21
|
+
move_to_idx(start_at_idx)
|
22
|
+
|
23
|
+
yield(row, next_idx) while (self.row = csv.shift)
|
24
|
+
rescue StandardError => err
|
25
|
+
self.exception = err
|
26
|
+
raise
|
27
|
+
ensure
|
28
|
+
(fd.close; @fd = nil) if fd.is_a?(::File) # rubocop:disable Style/Semicolon
|
29
|
+
if exception
|
30
|
+
# Give some feedback if it crashes
|
31
|
+
msg = []
|
32
|
+
msg << "Last row IDX: #{idx}"
|
33
|
+
msg << "Last row content: #{row.to_h.pretty_inspect}"
|
34
|
+
puts msg
|
35
|
+
log(:debug) { msg.join("\n") }
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def move_to_idx(start_at_idx)
|
40
|
+
next_idx while (idx < start_at_idx) && (self.row = csv.shift)
|
41
|
+
end
|
42
|
+
|
43
|
+
private
|
44
|
+
|
45
|
+
attr_reader :params
|
46
|
+
attr_reader :idx, :fd
|
47
|
+
|
48
|
+
attr_accessor :row, :exception
|
49
|
+
|
50
|
+
def next_idx
|
51
|
+
idx.tap { @idx += 1 }
|
52
|
+
end
|
53
|
+
|
54
|
+
# see https://dalibornasevic.com/posts/68-processing-large-csv-files-with-ruby
|
55
|
+
def csv
|
56
|
+
return @csv if instance_variable_defined?(:@csv)
|
57
|
+
@fd = ::File.open(filename, 'r')
|
58
|
+
@csv = Eco::CSV.new(fd, **params)
|
59
|
+
end
|
60
|
+
|
61
|
+
def init
|
62
|
+
@idx ||= 0 # rubocop:disable Naming/MemoizedInstanceVariableName
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
data/lib/eco/csv.rb
CHANGED
@@ -17,8 +17,22 @@ module Eco
|
|
17
17
|
end
|
18
18
|
parse(get_file_content(file, **params), **kargs)
|
19
19
|
end
|
20
|
+
|
21
|
+
# @yield [idx, file] a block to spot the filename
|
22
|
+
# @yieldparam idx [Integer] the number of the file
|
23
|
+
# @yieldparam file [String] the default name of the file
|
24
|
+
# @yieldreturn [String] the filename of the file `idx`.
|
25
|
+
# - If `nil` it will create its own filename convention
|
26
|
+
# @param filename [String] the orignal file
|
27
|
+
# @param max_rows [Integer] number of rows per file
|
28
|
+
# @see Eco::CSV::Split#call
|
29
|
+
def split(filename, max_rows:, **kargs, &block)
|
30
|
+
Eco::CSV::Split.new(filename, max_rows: max_rows, **kargs).call(&block)
|
31
|
+
end
|
20
32
|
end
|
21
33
|
end
|
22
34
|
end
|
23
35
|
|
24
36
|
require_relative 'csv/table'
|
37
|
+
require_relative 'csv/stream'
|
38
|
+
require_relative 'csv/split'
|
@@ -19,12 +19,12 @@ module Eco
|
|
19
19
|
attr_writer :timestamp
|
20
20
|
attr_reader :level
|
21
21
|
|
22
|
-
forward
|
22
|
+
forward(*LOG_LEVELS, *METHODS)
|
23
23
|
|
24
24
|
def initialize(level: ::Logger::INFO, timestamp: false)
|
25
25
|
@level = level
|
26
26
|
self.timestamp = timestamp
|
27
|
-
loggers[:console] = ::Logger.new(
|
27
|
+
loggers[:console] = ::Logger.new($stdout).tap do |logger|
|
28
28
|
logger.formatter = format_proc(console: true)
|
29
29
|
logger.level = level
|
30
30
|
end
|
@@ -41,7 +41,7 @@ module Eco
|
|
41
41
|
private
|
42
42
|
|
43
43
|
def forward(meth, *args, &block)
|
44
|
-
loggers.
|
44
|
+
loggers.each_value do |logger|
|
45
45
|
logger.send(meth, *args, &block)
|
46
46
|
end
|
47
47
|
end
|
@@ -52,6 +52,7 @@ module Eco
|
|
52
52
|
|
53
53
|
def console_timestamp(datetime)
|
54
54
|
return nil unless timestamp?
|
55
|
+
|
55
56
|
timestamp(datetime)
|
56
57
|
end
|
57
58
|
|
@@ -62,10 +63,10 @@ module Eco
|
|
62
63
|
end
|
63
64
|
|
64
65
|
def format_proc(console: true, &block)
|
65
|
-
proc do |severity, datetime,
|
66
|
-
str_stamp
|
66
|
+
proc do |severity, datetime, _progname, msg|
|
67
|
+
str_stamp = console ? console_timestamp(datetime) : timestamp(datetime)
|
67
68
|
"#{severity.to_s[0]}: #{str_stamp}#{msg}\n".tap do |formatted_msg|
|
68
|
-
block
|
69
|
+
block&.call(severity, datetime, msg, formatted_msg)
|
69
70
|
end
|
70
71
|
end
|
71
72
|
end
|
data/lib/eco/version.rb
CHANGED