eco-helpers 1.0.13 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. checksums.yaml +4 -4
  2. data/lib/eco/api/common/people/entry_factory.rb +3 -1
  3. data/lib/eco/api/common/people/person_attribute_parser.rb +33 -10
  4. data/lib/eco/api/common/people/person_entry.rb +1 -1
  5. data/lib/eco/api/common/people/person_entry_attribute_mapper.rb +1 -1
  6. data/lib/eco/api/common/people/person_factory.rb +5 -1
  7. data/lib/eco/api/common/session/environment.rb +7 -3
  8. data/lib/eco/api/common/session/mailer.rb +4 -0
  9. data/lib/eco/api/common/session/sftp.rb +4 -3
  10. data/lib/eco/api/error.rb +1 -0
  11. data/lib/eco/api/organization/presets_factory.rb +1 -1
  12. data/lib/eco/api/organization/tag_tree.rb +1 -1
  13. data/lib/eco/api/session.rb +119 -74
  14. data/lib/eco/api/session/batch.rb +23 -25
  15. data/lib/eco/api/session/batch/base_policy.rb +283 -0
  16. data/lib/eco/api/session/batch/errors.rb +17 -3
  17. data/lib/eco/api/session/batch/feedback.rb +112 -0
  18. data/lib/eco/api/session/batch/job.rb +90 -87
  19. data/lib/eco/api/session/batch/policies.rb +22 -0
  20. data/lib/eco/api/session/batch/request_stats.rb +195 -0
  21. data/lib/eco/api/session/batch/status.rb +66 -19
  22. data/lib/eco/api/session/config.rb +10 -0
  23. data/lib/eco/api/session/config/workflow.rb +1 -1
  24. data/lib/eco/api/usecases/default_cases/set_default_tag_case.rb +4 -3
  25. data/lib/eco/api/usecases/default_cases/switch_supervisor_case.rb +15 -10
  26. data/lib/eco/cli/config/default/filters.rb +3 -2
  27. data/lib/eco/cli/config/default/options.rb +12 -0
  28. data/lib/eco/cli/config/default/usecases.rb +6 -4
  29. data/lib/eco/cli/config/default/workflow.rb +3 -2
  30. data/lib/eco/cli/scripting/args_helpers.rb +1 -1
  31. data/lib/eco/version.rb +1 -1
  32. metadata +5 -1
@@ -2,7 +2,13 @@ module Eco
2
2
  module API
3
3
  class Session
4
4
  class Batch
5
- class Job < API::Common::Session::BaseSession
5
+ # @attr_reader name [String] the name of this `batch job`
6
+ # @attr_reader type [Symbol] a valid batch operation
7
+ # @attr_reader sets [Array<Symbol>] the parts of the person model this batch is supposed to affect
8
+ # @attr_reader usecase [Eco::API::UseCases::UseCase, nil] when provided: `usecase` that generated this `batch job`
9
+ # @attr_reader status [Eco::API::Session::Batch::Status] if launched: the `status` of the `batch`
10
+ # @attr_reader feedback [Eco::API::Session::Batch::Feedback] helper class for feedback and decision making
11
+ class Job < Eco::API::Common::Session::BaseSession
6
12
  @types = [:get, :create, :update, :delete]
7
13
  @sets = [:core, :details, :account]
8
14
 
@@ -19,20 +25,26 @@ module Eco
19
25
  end
20
26
  end
21
27
 
22
- attr_reader :name, :type, :status
28
+ attr_reader :name, :type, :sets
23
29
  attr_reader :usecase
30
+ attr_reader :status, :feedback
24
31
 
32
+ # @param e [Eco::API::Common::Session::Environment] requires a session environmen, as any child of `Eco::API::Common::Session::BaseSession`
33
+ # @param name [String] the name of this `batch job`
34
+ # @param type [Symbol] a valid batch operation
35
+ # @param usecase [Eco::API::UseCases::UseCase, nil] when provided: `usecase` that generated this `batch job`
25
36
  def initialize(e, name:, type:, sets:, usecase: nil)
26
37
  raise "A name is required to refer a job. Given: #{name}" if !name
27
- raise "Type should be one of #{self.class.types}. Given: #{type}" if !self.class.valid_type?(type)
28
- raise "Sets should be some of #{self.class.sets}. Given: #{sets}" if !self.class.valid_sets?(sets)
38
+ raise "Type should be one of #{self.class.types}. Given: #{type}" unless self.class.valid_type?(type)
39
+ raise "Sets should be some of #{self.class.sets}. Given: #{sets}" unless self.class.valid_sets?(sets)
29
40
  raise "usecase must be a Eco::API::UseCases::UseCase object. Given: #{usecase.class}" if usecase && !usecase.is_a?(Eco::API::UseCases::UseCase)
30
41
  super(e)
31
42
 
32
- @name = name
33
- @type = type
34
- @usecase = usecase
35
- @sets = [sets].flatten.compact
43
+ @name = name
44
+ @type = type
45
+ @sets = [sets].flatten.compact
46
+ @usecase = usecase
47
+ @feedback = Eco::API::Session::Batch::Feedback.new(job: self)
36
48
  reset
37
49
  end
38
50
 
@@ -44,23 +56,22 @@ module Eco
44
56
  @status = nil
45
57
  end
46
58
 
59
+ # @return [Boolean] was this `batch job` generated by a `usecase`? (`Eco::API::UseCases::UseCase`)
47
60
  def usecase?
48
61
  !!usecase
49
62
  end
50
63
 
64
+ # @return [Hash] options the root `usecase` is run with
51
65
  def options
52
66
  usecase?? usecase.options : {}
53
67
  end
54
68
 
55
- def signature
56
- "Batch job \"#{name}\" ['#{type.to_s.upcase}': #{sets_title}]"
57
- end
58
-
59
69
  def match?(type:, sets:)
60
70
  sets = [sets].flatten
61
- type == self.type && (sets.order == @sets.order)
71
+ type == self.type && (sets.order == self.sets.order)
62
72
  end
63
73
 
74
+ # @return [Boolean] has been this `batch job` launched?
64
75
  def pending?
65
76
  @pending
66
77
  end
@@ -80,23 +91,40 @@ module Eco
80
91
  unless unique && @queue_hash.key?(entry)
81
92
  @queue_hash[entry] = true
82
93
  @queue.push(entry)
83
- @callbacks[entry] = Proc.new if block_given?
94
+ @callbacks[entry] = Proc.new if block_given?
84
95
  end
85
96
  end
86
97
  end
87
98
  end
88
99
 
100
+ # Helper/shortcut to obtain a people object out of `input`
101
+ # @note if `input` is not provided, it will use `queue`
102
+ # @return [Eco::API::Organization::People]
89
103
  def people(input = @queue)
90
104
  Eco::API::Organization::People.new(input)
91
105
  end
92
106
 
107
+ # Processes the `queue` and, unless `simulate` is `true`, launches against the server:
108
+ # 1. if the entries of `queue` got pending _callbacks_ (delayed changes), it processes them
109
+ # 2. unless type == `:create`: if there's a defined `api_excluded` _callback_ it calls it (see `Eco::API::Session::Config::People#api_excluded`)
110
+ # 3. transforms the result to a `Eco::API::Organization::People` object
111
+ # 4. if there are `api policies` defined, it passes the entries through them in order (see `Eco::API::Session::Config#policies`)
112
+ # 5. at this point all the transformations have taken place...
113
+ # 6. only include the entries that, after all above, still hold pending changes (`!as_update.empty?`) to be launched as update
114
+ # 7. if we are **not** in `dry-run` (or `simulate`), launch the batch request against the server (see `Eco::API::Session::Batch#launch`)
115
+ # 8. next, it links the resulting batch `status` to this `Batch::Job` (see `Eco::API::Session::Batch::Status`)
116
+ # 9. the post launch kicks in, and for success requests, it consolidates the associated entries (see `Ecoportal::API::V1::Person#consolidate!`)
117
+ # 10. launches specific error handlers, if there were **errors** from the Server as a result of the `batch.launch`, and there are `Error::Handlers` defined
118
+ # 11. if we are **not** in `dry-run` (or `simulate`), it backs up the raw queries launched to the Server
93
119
  def launch(simulate: false)
94
- pqueue = processed_queue
95
- launch_feedback(pqueue, simulate ? 2500 : 800)
120
+ pqueue = processed_queue
121
+ requests = pqueue.map {|e| as_update(e)}
122
+
123
+ pre_checks(requests, simulate: simulate)
96
124
 
97
125
  if !simulate
98
126
  if pqueue.length > 0
99
- backup_update(pqueue)
127
+ backup_update(requests)
100
128
  @status = session.batch.launch(pqueue, method: type)
101
129
  @status.root = self
102
130
  end
@@ -111,36 +139,15 @@ module Eco
111
139
 
112
140
  private
113
141
 
142
+ def as_update(*args)
143
+ feedback.as_update(*args)
144
+ end
145
+
114
146
  def processed_queue
115
147
  @queue.each {|e| @callbacks[e].call(e) if @callbacks.key?(e) }
116
148
  apply_policies(api_included(@queue)).select {|e| !as_update(e).empty?}
117
149
  end
118
150
 
119
- def post_launch(queue: [], simulate: false)
120
- if !simulate && @status
121
- @status.queue.map do |entry|
122
- if @status.success?(entry)
123
- entry.consolidate! if entry.respond_to?(:consolidate!)
124
- #else # do not entry.reset! (keep track on changes still)
125
- end
126
- end
127
- # launch_error handlers
128
- handlers = session.config.error_handlers
129
- if @status.errors.any? && !handlers.empty?
130
- err_types = @status.errors.by_type
131
- handlers.each do |handler|
132
- if entries = err_types[handler.name]
133
- handler.launch(people: people(entries), session: session, options: options)
134
- end
135
- end
136
- end
137
- elsif simulate
138
- queue.map do |entry|
139
- entry.consolidate! if entry.respond_to?(:consolidate!)
140
- end
141
- end
142
- end
143
-
144
151
  # if there is a config definition to exclude entries
145
152
  # and the current batch is not a creation batch
146
153
  # - filter out excluded entries from the api update
@@ -151,69 +158,65 @@ module Eco
151
158
  end
152
159
 
153
160
  def apply_policies(pre_queue)
154
- #pre_queue.tap do |entries|
155
161
  people(pre_queue).tap do |entries|
156
162
  policies = session.config.policies
157
- unless policies.empty?
163
+ unless policies.empty? || options.dig(:skip, :api_policies)
158
164
  policies.launch(people: entries, session: session, options: options)
159
165
  end
160
166
  end
161
167
  end
162
168
 
163
- def as_update(entry)
164
- hash = entry if entry.is_a?(Hash)
165
- if only_ids?
166
- hash = entry.as_json.slice("id", "external_id", "email")
167
- else
168
- if entry.is_a?(Ecoportal::API::V1::Person)
169
- hash = entry.as_update
170
- if hfields = hash.dig("details", "fields")
171
- hash["details"]["fields"] = hfields.map do |fld|
172
- fld.merge!("alt_id" => entry.details.get_field(fld["id"]).alt_id) if entry.details
173
- end
174
- end
175
- end
176
-
177
- fields = hash&.dig('details', 'fields')
178
- fields&.map! { |fld| fld&.slice("id", "alt_id", "value") }
169
+ def batch_policy
170
+ unless options.dig(:skip, :batch_policy)
171
+ @batch_policy ||= session.config.batch_policies[self.type]
179
172
  end
180
- hash || {}
181
173
  end
182
174
 
183
- def only_ids?
184
- [:delete, :get].include?(type)
185
- end
175
+ def pre_checks(requests, simulate: false)
176
+ only_stats = options.dig(:feedback, :only_stats)
177
+ max_chars = simulate ? 2500 : 800
178
+ msg = feedback.generate(requests, max_chars: max_chars, only_stats: only_stats)
179
+ logger.info(msg)
186
180
 
187
- def sets_title
188
- "#{@sets.map {|s| s.to_s}.join(", ")}"
181
+ @request_stats = feedback.request_stats(requests)
182
+ if simulate && batch_policy && !batch_policy.compliant?(@request_stats)
183
+ logger.warn("Batch Policy Uncompliance: this and next batches will be aborted!")
184
+ logger.warn(batch_policy.uncompliance(@request_stats))
185
+ elsif batch_policy
186
+ # will throw an Exception if the policy request_stats is not compliant
187
+ batch_policy.validate!(@request_stats)
188
+ end
189
189
  end
190
190
 
191
- def launch_feedback(data, max_chars = 800)
192
- if !data || !data.is_a?(Enumerable) || data.empty?
193
- logger.warn("#{"*" * 20} Nothing for #{signature} so far :) #{"*" * 20}")
194
- return
195
- end
196
- header = ("*" * 20) + " #{signature} - Feedback Sample " + ("*" * 20)
197
- logger.info(header)
198
-
199
- sample_length = 1
200
- sample = data.slice(0, 20).map do |entry|
201
- update = as_update(entry)
202
- max_chars -= update.pretty_inspect.length
203
- sample_length += 1 if max_chars > 0
204
- update
191
+ def post_launch(queue: [], simulate: false)
192
+ if !simulate && @status
193
+ @status.queue.map do |entry|
194
+ if @status.success?(entry)
195
+ entry.consolidate! if entry.respond_to?(:consolidate!)
196
+ #else # do not entry.reset! (keep track on changes still)
197
+ end
198
+ end
199
+ # launch_error handlers
200
+ handlers = session.config.error_handlers
201
+ if @status.errors.any? && !handlers.empty?
202
+ err_types = @status.errors.by_type
203
+ handlers.each do |handler|
204
+ if entries = err_types[handler.name]
205
+ handler.launch(people: people(entries), session: session, options: options)
206
+ end
207
+ end
208
+ end
209
+ elsif simulate
210
+ queue.map do |entry|
211
+ entry.consolidate! if entry.respond_to?(:consolidate!)
212
+ end
205
213
  end
206
-
207
- logger.info("#{sample.slice(0, sample_length).pretty_inspect}")
208
- logger.info("#{type.to_s.upcase} length: #{data.length}")
209
- logger.info("*" * header.length)
210
214
  end
211
215
 
212
- def backup_update(data)
213
- data_body = data.map { |u| as_update(u) }
216
+ def backup_update(requests)
214
217
  dir = config.people.requests_folder
215
218
  file = File.join(dir, "#{type}_data.json")
216
- file_manager.save_json(data_body, file, :timestamp)
219
+ file_manager.save_json(requests, file, :timestamp)
217
220
  end
218
221
 
219
222
  end
@@ -0,0 +1,22 @@
1
+ module Eco
2
+ module API
3
+ class Session
4
+ class Batch
5
+ class Policies < Eco::API::Session::Batch::BasePolicy
6
+ CORE_ATTRS = Eco::API::Session::Batch::RequestStats.core_attrs(stats: true)
7
+ ACCOUNT_ATTRS = Eco::API::Session::Batch::RequestStats.account_attrs(stats: true)
8
+ DETAILS_ATTRS = Eco::API::Session::Batch::RequestStats.details_attrs(stats: true)
9
+
10
+ core_model = {core: CORE_ATTRS}
11
+ account_model = {account: ACCOUNT_ATTRS}
12
+ details_model = {details: DETAILS_ATTRS}
13
+ submodel = core_model.merge(account_model).merge(details_model)
14
+ TOP_MODEL = Eco::API::Session::Batch::Job.types.each_with_object({}) {|t, h| h[t] = submodel}
15
+
16
+ self.model = TOP_MODEL
17
+ policy_attrs *model_attrs
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,195 @@
1
+ module Eco
2
+ module API
3
+ class Session
4
+ class Batch
5
+ # @attr_reader count [Integer] the total number of requests
6
+ # @attr_reader stats [Hash] plain `Hash` with the number of requests that include an attribute
7
+ class RequestStats
8
+ CORE_ATTRS = Eco::API::Common::People::PersonParser::CORE_ATTRS
9
+ ACCOUNT_ATTRS = (Eco::API::Common::People::PersonParser::ACCOUNT_ATTRS + ["permissions_custom"]).uniq
10
+ DETAILS_ATTRS = ["fields"]
11
+
12
+ class << self
13
+
14
+ def valid_type?(type)
15
+ Eco::API::Session::Batch::Job.valid_type?(type.to_sym)
16
+ end
17
+
18
+ def core_attrs(stats: false, all: false)
19
+ CORE_ATTRS.dup.tap do |attrs|
20
+ attrs.unshift("core") if stats || all
21
+ end
22
+ end
23
+
24
+ def account_attrs(stats: false, all: false)
25
+ ACCOUNT_ATTRS.dup.tap do |attrs|
26
+ if stats || all
27
+ attrs.unshift("account_remove")
28
+ attrs.unshift("account") if all
29
+ end
30
+ end
31
+ end
32
+
33
+ def details_attrs(stats: false, all: false)
34
+ DETAILS_ATTRS.dup.tap do |attrs|
35
+ if stats || all
36
+ attrs.unshift("details_remove")
37
+ attrs.unshift("details") if all
38
+ end
39
+ end
40
+ end
41
+
42
+ end
43
+
44
+ attr_reader :type, :count
45
+
46
+ def initialize(type:, requests: [])
47
+ raise "type should be one of #{Eco::API::Session::Batch::Job.types}. Given: #{type}" unless self.class.valid_type?(type.to_sym)
48
+ @type = type.to_sym
49
+ @count = requests && requests.length
50
+ @stats = build(requests)
51
+ end
52
+
53
+ def to_h
54
+ @stats
55
+ end
56
+
57
+ def core_attrs
58
+ @core_attrs ||= self.class.core_attrs
59
+ end
60
+
61
+ def account_attrs
62
+ @account_attrs ||= self.class.account_attrs
63
+ end
64
+
65
+ def details_attrs
66
+ @details_attrs ||= self.class.details_attrs
67
+ end
68
+
69
+ def attr(attr, percent: false, total: count)
70
+ i = @stats["#{attr}"]
71
+ return i unless percent
72
+ percentage(i, total: total)
73
+ end
74
+
75
+ def core(percent: false)
76
+ attr("core", percent: percent)
77
+ end
78
+
79
+ def account(percent: false)
80
+ attr("account", percent: percent)
81
+ end
82
+
83
+ def account_remove(percent: false)
84
+ attr("account_remove", percent: percent)
85
+ end
86
+
87
+ def details(percent: false)
88
+ attr("details", percent: percent)
89
+ end
90
+
91
+ def details_remove(percent: false)
92
+ attr("details_remove", percent: percent)
93
+ end
94
+
95
+ def fields_average
96
+ if (fields_num = attr("fields")) && (total = details) > 0
97
+ (fields_num.to_f / total.to_f).round(2)
98
+ end
99
+ end
100
+
101
+ def message(percent: false)
102
+ key_val_delimiter = ": "; attr_delimiter = " ++ "
103
+ pairs_to_line = Proc.new do |pairs|
104
+ pairs.map do |p|
105
+ [p.first.to_s, "#{p.last.to_s}" + (percent ? "%" : "")].join(key_val_delimiter)
106
+ end.join(attr_delimiter)
107
+ end
108
+
109
+ lines = []
110
+ lines << pairs_to_line.call(core_pairs(percent: percent))
111
+ lines << pairs_to_line.call(account_pairs(percent: percent))
112
+ lines << pairs_to_line.call(details_pairs(percent: percent))
113
+ lines.join("\n")
114
+ end
115
+
116
+ def model
117
+
118
+ end
119
+
120
+ private
121
+
122
+ def percentage(num, total: count)
123
+ total ||= count
124
+ if num
125
+ (num.to_f / total * 100).round(2)
126
+ end
127
+ end
128
+
129
+ def build(requests)
130
+ Hash.new(0).tap do |stats|
131
+ stats[type] = count
132
+ unless !requests || !requests.is_a?(Enumerable) || requests.empty?
133
+ requests.each_with_index do |request|
134
+ add_core_stats(stats, request || {})
135
+ add_account_stats(stats, request || {})
136
+ add_details_stats(stats, request || {})
137
+ end
138
+ end
139
+ end
140
+ end
141
+
142
+ def attrs_to_stat(stats, hash, attrs)
143
+ stats.tap do |st|
144
+ attrs.each {|attr| st[attr] += 1 if hash.key?(attr)}
145
+ end
146
+ end
147
+
148
+ def add_core_stats(stats, request)
149
+ stats["core"] += 1 if (request.keys & core_attrs).length > 0
150
+ attrs_to_stat(stats, request, core_attrs)
151
+ end
152
+
153
+ def add_account_stats(stats, request)
154
+ if request.key?("account")
155
+ stats["account"] += 1
156
+ stats["account_remove"] += 1 if !request["account"]
157
+ attrs_to_stat(stats, request["account"] || {}, account_attrs)
158
+ end
159
+ end
160
+
161
+ def add_details_stats(stats, request)
162
+ if request.key?("details")
163
+ stats["details"] += 1
164
+ stats["details_remove"] += 1 if !request["details"]
165
+ if fields = request.dig("details", "fields")
166
+ stats["fields"] += fields.length
167
+ end
168
+ end
169
+ end
170
+
171
+ def pairs(attrs, percent: false, total: count)
172
+ pairs = attrs.map do |a|
173
+ (v = attr(a, percent: percent, total: count)) > 0 ? [a, v] : nil
174
+ end.compact
175
+ end
176
+
177
+ def core_pairs(percent: false)
178
+ [["core", core(percent: percent)]] + pairs(core_attrs, percent: percent, total: core)
179
+ end
180
+
181
+ def account_pairs(percent: false)
182
+ aattrs = ["account_remove"] + account_attrs
183
+ [["account", account(percent: percent)]] + pairs(aattrs, percent: percent, total: account)
184
+ end
185
+
186
+ def details_pairs(percent: false)
187
+ details_pairs = [["details", details(percent: percent)]]
188
+ details_pairs += [["fields", fields_average]] if attr("fields") && fields_average
189
+ details_pairs += pairs(["details_remove"], percent: percent, total: details)
190
+ end
191
+ end
192
+ end
193
+ end
194
+ end
195
+ end