eco-helpers 1.1.2 → 1.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 749e298478fcb12ab365f02ef46b78189c7208da89ec7e685b32ec77ba414fac
4
- data.tar.gz: e440ad822d6a825bf4185ee9489955fca000b52c85196d633a78c8d2bde59a1e
3
+ metadata.gz: a2efa416d986a9c212bc99ec69bf667778392737f0c635d54a18d19ff8bced9b
4
+ data.tar.gz: 7747bbfa3253f537d08b65148f8fc7418b0f4190d2754873daeb0b9fc5642b41
5
5
  SHA512:
6
- metadata.gz: 50de4fee6dd471172367acb718ebc83323ea36ac74ae53f96c7e3aa32cc531160b0f2fd5f62458576c39a349d74a88efdb478d3a658b95ec191c509e038a5fa4
7
- data.tar.gz: 741993e6d9445b8f55e611b2315299a477bbaaf6f928f4a87e8ab6f88576fadcec5032c9df590222dd10d2e8b569c193b8bc0e16ce97984f595cd155722e3993
6
+ metadata.gz: 60457d180398b01377aaeb6bced7c57fb24010da7acecbca8cbdac46a16d617759a23175e6b7be8c2b8eb4ec3834119e5173d8f2ff14212493a3db35298441ed
7
+ data.tar.gz: 5fab0a498910811d86f63419193aa02d1aebd52d4a1c63db1d11c173d9905f651e47c1082090ef0a83236f8c1d48d2c206572d25b1881b05dddc6cbc4be69c0e
@@ -35,7 +35,7 @@ module Eco
35
35
  @sftp ||= Eco::API::Common::Session::SFTP.new(enviro: self)
36
36
  end
37
37
 
38
- def s3upoader
38
+ def s3uploader
39
39
  @s3uploader ||= Eco::API::Common::Session::S3Uploader.new(enviro: self)
40
40
  end
41
41
 
@@ -7,6 +7,7 @@ module Eco
7
7
  module Session
8
8
  class Mailer
9
9
 
10
+ # @param enviro [Eco::API::Common::Session::Environment]
10
11
  def initialize (enviro:)
11
12
  raise "Required Environment object (enviro:). Given: #{enviro}" if enviro && !enviro.is_a?(Eco::API::Common::Session::Environment)
12
13
  @enviro = enviro
@@ -16,10 +17,10 @@ module Eco
16
17
  # @param to [String] destination email address
17
18
  # @param subject [String] subject of the email
18
19
  # @param body [String] `html` or plain text message
19
- def mail(to:, subject:, body:)
20
+ def mail(to: nil, subject:, body:)
20
21
  ses.send_email(
21
22
  destination: {
22
- to_addresses: [to].flatten,
23
+ to_addresses: [fetch_to(to)].flatten,
23
24
  },
24
25
  source: fetch_from,
25
26
  message: {
@@ -64,6 +65,10 @@ module Eco
64
65
  @enviro.config || {}
65
66
  end
66
67
 
68
+ def fetch_to(value = nil)
69
+ value || config.mailer.to
70
+ end
71
+
67
72
  def fetch_from(value = nil)
68
73
  value || config.mailer.from
69
74
  end
@@ -8,6 +8,7 @@ module Eco
8
8
  class S3Uploader
9
9
  attr_reader :prefix
10
10
 
11
+ # @param enviro [Eco::API::Common::Session::Environment]
11
12
  def initialize (enviro:)
12
13
  raise "Required Environment object (enviro:). Given: #{enviro}" if enviro && !enviro.is_a?(Eco::API::Common::Session::Environment)
13
14
  @enviro = enviro
@@ -15,6 +16,10 @@ module Eco
15
16
  @timestamp = Time.now.iso8601
16
17
  end
17
18
 
19
+ # Uploads `content` to S3 as `filename`
20
+ # @param filename [String] the name of the object to be created on S3
21
+ # @param content [String] that to be uploaded
22
+ # @return [String] S3 path to the uploaded `filename` object
18
23
  def upload(filename, content)
19
24
  if obj = new_s3_object(filename)
20
25
  log_upload(obj) do
@@ -24,16 +29,24 @@ module Eco
24
29
  return full_path(obj)
25
30
  end
26
31
 
32
+ # Uploads a single file
33
+ # @param path [String] the target file to be uploaded
34
+ # @return [String] S3 path to the uploaded `path` file
27
35
  def upload_file(path)
28
36
  File.open(path, "rb") do |f|
29
37
  upload(File.basename(path), f)
30
38
  end
31
39
  end
32
40
 
33
- def upload_directory(path)
34
- path = File.expand_path(path)
35
- prefix = File.expand_path(File.join(path, ".."))
36
- Dir.glob(File.join(path, "**/*")).sort.map do |file|
41
+ # @note it will skip subfolders
42
+ # @param path [String] the target directory to be uploaded
43
+ # @param recurse [Boolean] deepen in the folder structure? (`false`: default)
44
+ # @return [Array<String>] S3 paths to all the uploaded files of `path` directory
45
+ def upload_directory(path, recurse: false)
46
+ path = File.expand_path(path)
47
+ prefix = File.expand_path(File.join(path, ".."))
48
+ wildcard = recurse ? "**/*" : "*"
49
+ Dir.glob(File.join(path, wildcard)).sort.map do |file|
37
50
  next unless File.file?(file) # Skip directories
38
51
  key = file.sub(prefix,"").gsub(/\\/,"/").sub(/^\/+/,"")
39
52
 
@@ -43,11 +56,15 @@ module Eco
43
56
  end.compact
44
57
  end
45
58
 
46
- def path_to_link(path)
59
+ # @param path [String] a full path to a S3 object
60
+ # @return [String] `link` to the S3 object on console
61
+ def link(path)
47
62
  unless path.is_a?(Enumerable)
48
- return "https://s3.console.aws.amazon.com/s3/object/#{path.sub("s3://","")}?region=#{fetch_region}&tab=overview"
63
+ return nil unless path.is_a?(String)
64
+ return "https://s3.console.aws.amazon.com/s3/object/#{path.sub("s3://","")}?region=#{fetch_region}&tab=overview"
65
+ return link
49
66
  end
50
- path.map {|pth| path_to_link(pth)}
67
+ path.map {|pth| link(pth)}
51
68
  end
52
69
 
53
70
  private
@@ -7,5 +7,6 @@ module Eco
7
7
  end
8
8
  end
9
9
 
10
+ require_relative 'version_patches/exception'
10
11
  require_relative 'version_patches/hash'
11
12
  require_relative 'version_patches/ecoportal_api'
@@ -0,0 +1,8 @@
1
+ class ::Exception
2
+ def patch_full_message
3
+ msg = []
4
+ msg << "#{backtrace.shift} #{message} (#{self.class.to_s})"
5
+ backtrace.each {|bt| msg << "#{" "*8}from #{bt}"}
6
+ msg.join("\n")
7
+ end
8
+ end
@@ -201,10 +201,14 @@ module Eco
201
201
  job_groups.launch(simulate: simulate)
202
202
  end
203
203
 
204
+ def summary
205
+ job_groups.summary
206
+ end
207
+
204
208
  # Sends an email
205
209
  # @see Eco::API::Common::Session::Mailer#mail
206
- def mail_to(**kargs)
207
- mail.mail(**kargs)
210
+ def mail(**kargs)
211
+ mailer.mail(**kargs)
208
212
  end
209
213
 
210
214
  # Uploads content into a file, a file or a directory to S3
@@ -214,17 +218,23 @@ module Eco
214
218
  # @param content [String] content to be uploaded (requires `file`)
215
219
  # @param file [String] name of the file to be uploaded
216
220
  # @param directory [String] name of source directory to be uploaded
217
- # @return [String, Array<String>] paths to S3
218
- def s3upload(content: nil, file: nil, directory: nil)
219
- if content && file
220
- s3uploader.upload(file, content)
221
+ # @param recurse [Boolean] used with `directory`: deepen in the folder structure? (`false`: default)
222
+ # @param link [Boolean] **return** _link(s)_ (`true`) or _path(s)_ (`false`: default)
223
+ # @return [String, Array<String>] either paths to S3 objects if `link` is `false`, or _link_ otherwise
224
+ def s3upload(content: nil, file: nil, directory: nil, recurse: false, link: false)
225
+ if content == :target
226
+ path = self.do.s3upload_targets
227
+ elsif content && file
228
+ path = s3uploader.upload(file, content)
221
229
  elsif file
222
- s3uploader.upload_file(file)
230
+ path = s3uploader.upload_file(file)
223
231
  elsif directory
224
- s3uploader.upload_directory(directory)
232
+ path = s3uploader.upload_directory(directory, recurse: recurse)
225
233
  else
226
234
  logger.error("To use Session.s3upload, you must specify either directory, file or content and file name")
227
235
  end
236
+ return path unless link
237
+ s3uploader.link(path)
228
238
  end
229
239
 
230
240
  private
@@ -167,8 +167,6 @@ module Eco
167
167
  iteration += 1
168
168
  done += slice.length
169
169
  end # next slice
170
-
171
- status.errors.print unless silent
172
170
  end
173
171
  end
174
172
 
@@ -38,13 +38,13 @@ module Eco
38
38
  status.logger
39
39
  end
40
40
 
41
- # Was there any **error** as a result of this batch?
41
+ # Was there any _Sever_ (reply) **error** as a result of this batch?
42
42
  # @return [Boolean] `true` if any of the queried _entries_ got an unsuccessful `Ecoportal::API::Common::BatchResponse`
43
43
  def any?
44
44
  queue.any? {|query| !status[query].success?}
45
45
  end
46
46
 
47
- # Input entries that got launched against the server.
47
+ # Input entries that got **error** response from the _Server_.
48
48
  # @raise [Exception] if there are elements of the final `queue` that did not get response
49
49
  # @note discards those that did not get _response_ from the Server (so those that were not queried)
50
50
  # - please, observe that this can only happen if there were repeated entries in the `source_queue`
@@ -63,7 +63,7 @@ module Eco
63
63
  raise msg
64
64
  end
65
65
 
66
- response.success? ? nil : query
66
+ response.success?? nil : query
67
67
  end.compact
68
68
  end
69
69
 
@@ -117,10 +117,18 @@ module Eco
117
117
  end
118
118
  end
119
119
 
120
+ def message
121
+ msgs = strs
122
+ if msgs.length > 0
123
+ "There were #{msgs.length} errors:\n" + msgs.join("\n")
124
+ else
125
+ "There were no errors for the current batch '#{method}'!! ;)"
126
+ end
127
+ end
128
+
120
129
  def print
121
130
  msgs = strs
122
131
  if msgs.length > 0
123
- logger.info()
124
132
  logger.error("There were #{msgs.length} errors:\n" + msgs.join("\n"))
125
133
  else
126
134
  logger.info("There were no errors for the current batch '#{method}'!! ;)")
@@ -34,7 +34,12 @@ module Eco
34
34
  job.options
35
35
  end
36
36
 
37
- # Slightly modifies the behaviour of `as_update`, so schema detail fields show the `alt_id`
37
+ # @see Eco::API::Session::Batch::Job#requests
38
+ def job_requests
39
+ job.requests
40
+ end
41
+
42
+ # Slightly modifies the behaviour of `Ecoportal::API::Common::BaseModel#as_update`, so schema details fields show the `alt_id`
38
43
  # @note for better feedback
39
44
  # @param entry [Hash, Ecoportal::API::V1::Person, Ecoportal::API::Internal::Person]
40
45
  def as_update(entry)
@@ -63,32 +68,43 @@ module Eco
63
68
  hash || {}
64
69
  end
65
70
 
66
- def request_stats(data)
67
- @request_stats ||= Eco::API::Session::Batch::RequestStats.new(type: type, requests: data)
71
+ # @note if `requests` is not provided, it uses the last requests of the parent `Batch::Job` `job`
72
+ # @param requests [Enumerable<Hash>] raw requests as they would be sent to the _Server_
73
+ # @return [Eco::API::Session::Batch::RequestStats] the stats object of the current requests
74
+ def request_stats(requests = nil)
75
+ requests ||= job.requests
76
+ return @request_stats if @request_stats && requests == job.requests
77
+ @request_stats ||= Eco::API::Session::Batch::RequestStats.new(type: type, requests: requests)
68
78
  end
69
79
 
70
- def generate(requests, max_chars: 800, only_stats: false)
71
- msg = []
72
- if !requests || !requests.is_a?(Enumerable) || requests.empty?
73
- msg << "#{"*" * 20} Nothing for #{signature} so far :) #{"*" * 20}"
74
- else
75
- sample_length = 1
76
- sample = requests.slice(0, 20).map do |request|
77
- max_chars -= request.pretty_inspect.length
78
- sample_length += 1 if max_chars > 0
79
- request
80
- end
81
-
82
- header = "#{"*"*20} #{signature} #{only_stats ? "" : "- Feedback Sample"} #{"*"*20}"
83
- msg << header
84
- unless only_stats
85
- msg << "#{sample.slice(0, sample_length).pretty_inspect}"
80
+ # Generates the lines of feedback of the current requests
81
+ # @note if `requests` is not provided, it uses the last requests of the parent `Batch::Job` `job`
82
+ # @param requests [Enumerable<Hash>] raw requests as they would be sent to the _Server_
83
+ # @param max_charts [Integer] the max number of characters for the current feedback message
84
+ # @param only_stats [Boolean] whether or not should only include a brief summary of stats
85
+ # @return [String] the feedback message
86
+ def generate(requests = nil, max_chars: 800, only_stats: false)
87
+ requests ||= job.requests
88
+ [].tap do |msg|
89
+ if !requests || !requests.is_a?(Enumerable) || requests.empty?
90
+ msg << "#{"*" * 20} Nothing for #{signature} so far :) #{"*" * 20}"
91
+ else
92
+ header = "#{"*"*20} #{signature} - Feedback Sample #{"*"*20}"
93
+ msg << header unless only_stats
94
+ unless only_stats
95
+ sample_length = 1
96
+ sample = requests.slice(0, 20).map do |request|
97
+ max_chars -= request.pretty_inspect.length
98
+ sample_length += 1 if max_chars > 0
99
+ request
100
+ end
101
+ msg << "#{sample.slice(0, sample_length).pretty_inspect}"
102
+ end
103
+ msg << "#{"+"*5} STATS (job '#{name}') +++ #{type.to_s.upcase} length: #{requests.length} #{"+"*5}"
104
+ msg << "#{request_stats(requests).message}"
105
+ msg << "*" * header.length unless only_stats
86
106
  end
87
- msg << "#{"+"*5} STATS ++ #{type.to_s.upcase} length: #{requests.length} #{"+"*5}"
88
- msg << "#{request_stats(requests).message}"
89
- msg << "*" * header.length
90
- end
91
- msg.join("\n")
107
+ end.join("\n")
92
108
  end
93
109
 
94
110
  private
@@ -66,16 +66,6 @@ module Eco
66
66
  usecase?? usecase.options : {}
67
67
  end
68
68
 
69
- def match?(type:, sets:)
70
- sets = [sets].flatten
71
- type == self.type && (sets.order == self.sets.order)
72
- end
73
-
74
- # @return [Boolean] has been this `batch job` launched?
75
- def pending?
76
- @pending
77
- end
78
-
79
69
  # Adds an entry(ies) to the job queue.
80
70
  # @param entry [Person, Enumberable<Person>] the person(s) we want to update, carrying the changes to be done.
81
71
  # @param unique [Boolean] specifies if repeated entries should be avoided in the queue.
@@ -97,6 +87,35 @@ module Eco
97
87
  end
98
88
  end
99
89
 
90
+ #def match?(type:, sets:)
91
+ # sets = [sets].flatten
92
+ # type == self.type && (sets.order == self.sets.order)
93
+ #end
94
+
95
+ # @return [Boolean] has been this `batch job` launched?
96
+ def pending?
97
+ @pending
98
+ end
99
+
100
+ # @note it requires launch to be firstly invoked
101
+ # @raise [Exception] if 'launch' has not firstly invoked
102
+ # @return [Enumbrable<Hash>] the last requests that the queue will generate
103
+ def requests
104
+ raise "Method missuse. Firstly 'launch' should be invoked" unless instance_variable_defined?(:@requests)
105
+ @requests
106
+ end
107
+
108
+ # @see Eco::API::Session::Batch::Feedback#request_stats
109
+ def request_stats(requests = nil)
110
+ feedback.request_stats(requests || self.requests)
111
+ end
112
+
113
+ # @see Eco::API::Session::Batch::Status#errors?
114
+ # @return [Boolean] `true` if there were Server errors, `false` otherwise
115
+ def errors?
116
+ status && status.errors?
117
+ end
118
+
100
119
  # Helper/shortcut to obtain a people object out of `input`
101
120
  # @note if `input` is not provided, it will use `queue`
102
121
  # @return [Eco::API::Organization::People]
@@ -105,36 +124,71 @@ module Eco
105
124
  end
106
125
 
107
126
  # Processes the `queue` and, unless `simulate` is `true`, launches against the server:
108
- # 1. if the entries of `queue` got pending _callbacks_ (delayed changes), it processes them
109
- # 2. unless type == `:create`: if there's a defined `api_excluded` _callback_ it calls it (see `Eco::API::Session::Config::People#api_excluded`)
110
- # 3. transforms the result to a `Eco::API::Organization::People` object
111
- # 4. if there are `api policies` defined, it passes the entries through them in order (see `Eco::API::Session::Config#policies`)
112
- # 5. at this point all the transformations have taken place...
113
- # 6. only include the entries that, after all above, still hold pending changes (`!as_update.empty?`) to be launched as update
114
- # 7. if we are **not** in `dry-run` (or `simulate`), launch the batch request against the server (see `Eco::API::Session::Batch#launch`)
115
- # 8. next, it links the resulting batch `status` to this `Batch::Job` (see `Eco::API::Session::Batch::Status`)
116
- # 9. the post launch kicks in, and for success requests, it consolidates the associated entries (see `Ecoportal::API::V1::Person#consolidate!`)
117
- # 10. launches specific error handlers, if there were **errors** from the Server as a result of the `batch.launch`, and there are `Error::Handlers` defined
118
- # 11. if we are **not** in `dry-run` (or `simulate`), it backs up the raw queries launched to the Server
127
+ # 1. pre_processes the queue obtaining the `requests`:
128
+ # - if the entries of `queue` got pending _callbacks_ (delayed changes), it processes them
129
+ # - unless type == `:create`: if there's a defined `api_excluded` _callback_ it calls it (see `Eco::API::Session::Config::People#api_excluded`)
130
+ # - transforms the result to a `Eco::API::Organization::People` object
131
+ # - if there are `api policies` defined, it passes the entries through them in order (see `Eco::API::Session::Config#policies`)
132
+ # - this step is **skipped** if the option `-skip-api-policies` was used in the command line
133
+ # - at this point all the transformations have taken place...
134
+ # - only include the entries that, after all above, still hold pending changes (`!as_update.empty?`) to be launched as update
135
+ # 2. pre launch checks against the `requests`:
136
+ # - it generates `stats` (`Eco::API::Session::Batch::RequestStats`) out of the requests
137
+ # - if there is a batch policy declared for the current job `type`, it checks compliance against `stats` (`Eco::API::Session::Batch::Policies`),
138
+ # - a non-compliant batch will stop the current session by raising an `Exception`
139
+ # - this setp is **skipped** if the option `-skip-batch-policy` was used in the command line
140
+ # 3. if we are **not** in `dry-run` (or `simulate`), it:
141
+ # - backs up the raw queries (`requests`) launched to the Server, if we are **not** in `dry-run` (or `simulate`)
142
+ # - **launches the batch** request against the _Server_ (see `Eco::API::Session::Batch#launch`)
143
+ # - links the resulting batch `status` to this `Batch::Job` (see `Eco::API::Session::Batch::Status`)
144
+ # - prints any `errors` replied by the _Server_
145
+ # 4. the post launch kicks in, and:
146
+ # - for success requests, it consolidates the associated entries (see `Ecoportal::API::V1::Person#consolidate!`)
147
+ # - launches specific error handlers, if there were **errors** from the Server as a result of the `batch.launch`, and there are `Error::Handlers` defined
148
+ # @return [Eco::API::Session::Batch::Status]
119
149
  def launch(simulate: false)
120
- pqueue = processed_queue
121
- requests = pqueue.map {|e| as_update(e)}
150
+ pqueue = processed_queue
151
+ @requests = pqueue.map {|e| as_update(e)}
122
152
 
123
153
  pre_checks(requests, simulate: simulate)
124
154
 
125
- if !simulate
155
+ unless simulate
126
156
  if pqueue.length > 0
127
157
  backup_update(requests)
128
- @status = session.batch.launch(pqueue, method: type)
129
- @status.root = self
158
+ session.batch.launch(pqueue, method: type).tap do |job_status|
159
+ @status = job_status
160
+ status.root = self
161
+ status.errors.print
162
+ end
130
163
  end
131
164
  end
132
165
 
133
- post_launch(queue: pqueue, simulate: simulate)
166
+ unless requests.empty?
167
+ logger.info("--- simulate mode (dry-run) -- job '#{name}' -- this would have launched #{type.to_s.upcase}") if simulate
168
+ end
134
169
 
135
- logger.info("Simulate: this would have launched: '#{type}'") if simulate
170
+ post_launch(queue: pqueue, simulate: simulate)
136
171
  @pending = false
137
- return @status
172
+ return status
173
+ end
174
+
175
+ # Provides a text summary of the current status
176
+ # @note if `launch` was not invoked, it specifies so
177
+ # @return [String] the summary
178
+ def summary
179
+ [].tap do |msg|
180
+ if pending?
181
+ msg << "PENDING - Batch #{type.to_s.upcase} - job '#{name}' - length: #{@queue.length}"
182
+ else
183
+ msg << feedback.generate(requests, only_stats: true)
184
+ if batch_policy && !batch_policy.compliant?(request_stats)
185
+ msg << "Batch Policy Uncompliance:"
186
+ msg << batch_policy.uncompliance(request_stats)
187
+ end
188
+
189
+ msg << status.errors.message unless !status
190
+ end
191
+ end.join("\n")
138
192
  end
139
193
 
140
194
  private
@@ -178,28 +232,29 @@ module Eco
178
232
  msg = feedback.generate(requests, max_chars: max_chars, only_stats: only_stats)
179
233
  logger.info(msg)
180
234
 
181
- @request_stats = feedback.request_stats(requests)
182
- if simulate && batch_policy && !batch_policy.compliant?(@request_stats)
235
+ # batch_policy
236
+ stats = request_stats(requests)
237
+ if simulate && batch_policy && !batch_policy.compliant?(stats)
183
238
  logger.warn("Batch Policy Uncompliance: this and next batches will be aborted!")
184
- logger.warn(batch_policy.uncompliance(@request_stats))
239
+ logger.warn(batch_policy.uncompliance(stats))
185
240
  elsif batch_policy
186
241
  # will throw an Exception if the policy request_stats is not compliant
187
- batch_policy.validate!(@request_stats)
242
+ batch_policy.validate!(stats)
188
243
  end
189
244
  end
190
245
 
191
246
  def post_launch(queue: [], simulate: false)
192
- if !simulate && @status
193
- @status.queue.map do |entry|
194
- if @status.success?(entry)
247
+ if !simulate && status
248
+ status.queue.map do |entry|
249
+ if status.success?(entry)
195
250
  entry.consolidate! if entry.respond_to?(:consolidate!)
196
251
  #else # do not entry.reset! (keep track on changes still)
197
252
  end
198
253
  end
199
254
  # launch_error handlers
200
255
  handlers = session.config.error_handlers
201
- if @status.errors.any? && !handlers.empty?
202
- err_types = @status.errors.by_type
256
+ if status.errors.any? && !handlers.empty?
257
+ err_types = status.errors.by_type
203
258
  handlers.each do |handler|
204
259
  if entries = err_types[handler.name]
205
260
  handler.launch(people: people(entries), session: session, options: options)
@@ -3,6 +3,7 @@ module Eco
3
3
  class Session
4
4
  class Batch
5
5
  class Jobs < API::Common::Session::BaseSession
6
+ include Enumerable
6
7
  attr_reader :name
7
8
 
8
9
  def initialize(e, name:)
@@ -16,6 +17,23 @@ module Eco
16
17
  @callbacks = {}
17
18
  end
18
19
 
20
+ def length
21
+ count
22
+ end
23
+
24
+ def empty?
25
+ count == 0
26
+ end
27
+
28
+ def each(params: {}, &block)
29
+ return to_enum(:each) unless block
30
+ items.each(&block)
31
+ end
32
+
33
+ def items
34
+ @jobs.values
35
+ end
36
+
19
37
  def [](name)
20
38
  @jobs[name]
21
39
  end
@@ -41,30 +59,48 @@ module Eco
41
59
  end
42
60
 
43
61
  def pending?
44
- @jobs.keys.any? {|key| @jobs[key].pending?}
62
+ any? {|job| job.pending?}
45
63
  end
46
64
 
47
65
  def launch(simulate: false)
48
- group_status = {}
49
- @jobs.each do |name, job|
66
+ each do |job|
50
67
  if job.pending?
51
- group_status[job] = job_status = job.launch(simulate: simulate)
52
- callback = @callbacks[job]
68
+ status[job] = job_status = job.launch(simulate: simulate)
69
+ callback = @callbacks[job]
53
70
  callback.call(job, job_status) if callback
54
71
  end
55
72
  end
56
73
 
57
- return group_status
74
+ return status
58
75
  end
59
76
 
60
77
  def find_jobs(type:)
61
- @jobs.each_with_object([]) do |(k, jb), jbs|
62
- if jb.type == type
63
- jbs.push(jb)
78
+ each_with_object([]) do |job, jbs|
79
+ jbs.push(job) if job.type == type
80
+ end
81
+ end
82
+
83
+ def status
84
+ if block_given?
85
+ status.each do |job, job_status|
86
+ yield(job, job_status)
64
87
  end
88
+ self
89
+ else
90
+ @jobs_status ||= {}
65
91
  end
66
92
  end
67
93
 
94
+ def errors?
95
+ any? {|job| job.errors?}
96
+ end
97
+
98
+ def summary
99
+ [].tap do |msg|
100
+ map {|job| msg << job.summary}
101
+ end.join("\n")
102
+ end
103
+
68
104
  end
69
105
  end
70
106
  end
@@ -20,6 +20,8 @@ module Eco
20
20
  end
21
21
  end
22
22
 
23
+ include Enumerable
24
+
23
25
  def initialize(e)
24
26
  super(e)
25
27
  reset
@@ -31,6 +33,23 @@ module Eco
31
33
  @callbacks = {}
32
34
  end
33
35
 
36
+ def length
37
+ count
38
+ end
39
+
40
+ def empty?
41
+ count == 0
42
+ end
43
+
44
+ def each(params: {}, &block)
45
+ return to_enum(:each) unless block
46
+ items.each(&block)
47
+ end
48
+
49
+ def items
50
+ @groups.values
51
+ end
52
+
34
53
  def [](name)
35
54
  @groups[name]
36
55
  end
@@ -56,28 +75,48 @@ module Eco
56
75
  end
57
76
 
58
77
  def pending?
59
- @groups.any? {|group| group.pending?}
78
+ any? {|group| group.pending?}
60
79
  end
61
80
 
62
81
  def launch(simulate: false)
63
- groups_status = {}
64
- @order.each.with_index do |group, idx|
82
+ @order.each_with_index do |group, idx|
65
83
  if group.pending?
66
- groups_status[group] = group_status = group.launch(simulate: simulate)
84
+ status[group] = group_status = group.launch(simulate: simulate)
67
85
  callback = @callbacks[group]
68
86
  callback.call(group, group_status) if callback
69
87
  self.class.counter(DELAY_BETWEEN_GROUPS) if !simulate && idx < @order.length - 1
70
88
  end
71
89
  end
72
- return groups_status
90
+ return status
73
91
  end
74
92
 
75
93
  def find_jobs(type:)
76
- @groups.each_with_object([]) do |(k, gr), jbs|
77
- jbs.concat(gr.find_jobs(type: type))
94
+ each_with_object([]) do |group, jbs|
95
+ jbs.concat(group.find_jobs(type: type))
78
96
  end
79
97
  end
80
98
 
99
+ def status
100
+ if block_given?
101
+ status.each do |group, group_status|
102
+ yield(group, group_status)
103
+ end
104
+ self
105
+ else
106
+ @groups_status ||= {}
107
+ end
108
+ end
109
+
110
+ def errors?
111
+ any? {|group| group.errors?}
112
+ end
113
+
114
+ def summary
115
+ [].tap do |msg|
116
+ map {|group| msg << group.summary}
117
+ end.join("\n")
118
+ end
119
+
81
120
  end
82
121
  end
83
122
  end
@@ -72,6 +72,12 @@ module Eco
72
72
  @errors ||= Eco::API::Session::Batch::Errors.new(status: self)
73
73
  end
74
74
 
75
+ # @see Eco::API::Session::Batch::Errors#any?
76
+ # @return [Boolean] `true` if there were Server errors, `false` otherwise
77
+ def errors?
78
+ errors.any?
79
+ end
80
+
75
81
  # Get the assciated `reponse` of an input entry object `key`
76
82
  # @param key [Integer, Hash, Ecoportal::API::V1::Person, Ecoportal::API::Internal::Person] these are the **index options**:
77
83
  # 1. `Integer`: index/position of the entry in the final `queue`
@@ -4,6 +4,14 @@ module Eco
4
4
  class Config
5
5
  class Mailer < BaseConfig
6
6
 
7
+ def to=(value)
8
+ self["to"] = value
9
+ end
10
+
11
+ def to
12
+ self["to"]
13
+ end
14
+
7
15
  def from=(value)
8
16
  self["from"] = value
9
17
  end
@@ -44,6 +44,14 @@ module Eco
44
44
  self["region"]
45
45
  end
46
46
 
47
+ def target_files=(value)
48
+ self["target_files"] = [value].flatten
49
+ end
50
+
51
+ def target_files
52
+ self["target_files"]
53
+ end
54
+
47
55
  def target_directories=(value)
48
56
  self["target_directories"] = [value].flatten
49
57
  end
@@ -11,7 +11,8 @@ module Eco
11
11
  usecases: nil,
12
12
  launch_jobs: nil,
13
13
  post_launch: {usecases: nil, launch_jobs: nil},
14
- end: nil
14
+ end: nil,
15
+ close: nil
15
16
  }
16
17
 
17
18
  class << self
@@ -67,21 +68,38 @@ module Eco
67
68
  @after = []
68
69
  end
69
70
 
71
+ # Has this stage run yet?
72
+ # @note it does **not** include _sub-stages_ that run `before`
73
+ # @return [Boolean] `true` if it has run, `false` otherwise
70
74
  def pending?
71
75
  @pending
72
76
  end
73
77
 
78
+ # Do not run this stage!
74
79
  def skip!
75
80
  @skip = true
76
81
  @pending = false
77
82
  end
78
83
 
84
+ # Has this stage been marked as to be skipped
85
+ # @return [Boolean] depends on this order:
86
+ # - `true` if `skip!` was called
87
+ # - `false` if the current _stage_ is `root?` (the top stage of the hierarchy)
88
+ # - `true` if its parent task is to be skipped
79
89
  def skip?
80
90
  return @skip if instance_variable_defined?(:@skip)
81
91
  return false if root?
82
92
  @_parent.skip?
83
93
  end
84
94
 
95
+ # Used in **configuration** time **to configure** the _workflow_ of the target (sub)stage `key`
96
+ # @note if a `block` is provided it will `yield` the target stage immediately
97
+ # @param key [Symbol, nil] cases:
98
+ # - if `key` is not provided, it targets the _current stage_
99
+ # - if `key` is provided, it targets the specific _sub-stage_
100
+ # @yield [stage_workflow] further _workflow_ configuration `for` the target stage `key`
101
+ # @yieldparam stage_workflow [Eco::API::Session::Config::Workflow] the _target stage_ referred by `key`
102
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
85
103
  def for(key = nil)
86
104
  raise "A block should be given." unless block_given?
87
105
  if !key
@@ -92,6 +110,16 @@ module Eco
92
110
  self
93
111
  end
94
112
 
113
+ # Used in **configuration** time **to define** the **behaviour** the target (sub)stage `key`
114
+ # @note if a `block` is provided it will **not** `yield` the target stage immediately, but when the _workflow_ reaches the stage
115
+ # @param key [Symbol, nil] cases:
116
+ # - if `key` is not provided, it targets the _current stage_
117
+ # - if `key` is provided, it targets the specific _sub-stage_
118
+ # @yield [stage_workflow, io] the behaviour of the target stage `key` when the _workflow_ reaches it
119
+ # @yieldparam stage_workflow [Eco::API::Session::Config::Workflow] the _target stage_ referred by `key`
120
+ # @yieldparam io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
121
+ # @yieldreturn io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
122
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
95
123
  def on(key = nil, &block)
96
124
  raise "A block should be given." unless block
97
125
  if !key
@@ -102,6 +130,30 @@ module Eco
102
130
  self
103
131
  end
104
132
 
133
+ # When there is an `Exception`, you might have defined some `callback` to do something with it (i.e. register, email)
134
+ # @yield [exception, io] the `callback` to do something with an `Exception` raised within this _workflow_ stage
135
+ # @yieldparam exception [Exception] the exception object that was raised
136
+ # @yieldparam io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
137
+ # @yieldreturn io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
138
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
139
+ def rescue(&block)
140
+ return @rescue unless block
141
+ @rescue = block
142
+ self
143
+ end
144
+
145
+ # Used in **configuration** time **add previous** `callbacks` **before** the `on` _callback_ of the (sub)stage `key` is actually `run`
146
+ # @note
147
+ # - it will **not** `yield` it immediately, but when the _workflow_ reaches the target stage
148
+ # - in this case, you can define multiple `callbacks`
149
+ # @param key [Symbol, nil] cases:
150
+ # - if `key` is not provided, it targets the _current stage_
151
+ # - if `key` is provided, it targets the specific _sub-stage_
152
+ # @yield [stage_workflow, io] one of the things to do **before** the `on` _callback_ of the (sub)stage `key` is actually `run`
153
+ # @yieldparam stage_workflow [Eco::API::Session::Config::Workflow] the _target stage_ referred by `key`
154
+ # @yieldparam io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
155
+ # @yieldreturn io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
156
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
105
157
  def before(key = nil, &block)
106
158
  raise "A block should be given." unless block
107
159
  if !key
@@ -112,6 +164,18 @@ module Eco
112
164
  self
113
165
  end
114
166
 
167
+ # Used in **configuration** time **add previous** `callbacks` **after** the `on` _callback_ of the (sub)stage `key` is actually `run`
168
+ # @note
169
+ # - it will **not** `yield` it immediately, but when the _workflow_ reaches the target stage
170
+ # - in this case, you can define multiple `callbacks`
171
+ # @param key [Symbol, nil] cases:
172
+ # - if `key` is not provided, it targets the _current stage_
173
+ # - if `key` is provided, it targets the specific _sub-stage_
174
+ # @yield [stage_workflow, io] one of the things to do **after** the `on` _callback_ of the (sub)stage `key` is actually `run`
175
+ # @yieldparam stage_workflow [Eco::API::Session::Config::Workflow] the _target stage_ referred by `key`
176
+ # @yieldparam io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
177
+ # @yieldreturn io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
178
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
115
179
  def after(key = nil, &block)
116
180
  raise "A block should be given." unless block
117
181
  if !key
@@ -122,29 +186,58 @@ module Eco
122
186
  self
123
187
  end
124
188
 
189
+ # Used in run time to **execute the workflow** of the (sub)stage `key`
190
+ # @note if a `block` is **not** provided:
191
+ # - it will run the `before` _callbacks_ defined during the configuration time
192
+ # - it will run the _workflow_ of any defined _**substage**_ of the `key` stage
193
+ # - it will run the `on` _callback_ defined during the configuration time
194
+ # - it will mark the stage as **not** `pending?`.
195
+ # - it will run the `after` _callbacks_ defined during the configuration time
196
+ # @note if a `block` is provided:
197
+ # - it will **not** run the workflow of the substages to `key` stage
198
+ # - it will **not** run the `callback` for `on` defined during the configuration time
199
+ # - it will rather `yield` the target stage after all the `before` _callbacks_ have been run
200
+ # - aside of this, the rest will be the same as when the _block_ is provided (see previous note)
201
+ # @param key [Symbol, nil] cases:
202
+ # - if `key` is not provided, it targets the _current stage_
203
+ # - if `key` is provided, it targets the specific _sub-stage_
204
+ # @yield [stage_workflow, io] if a `block` is provided, see `note`
205
+ # @yieldparam stage_workflow [Eco::API::Session::Config::Workflow] the _target stage_ referred by `key`
206
+ # @yieldparam io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
207
+ # @yieldreturn io [Eco::API::UseCases::BaseIO] the input/output object carried througout all the _workflow_
208
+ # @return [Eco::API::Session::Config::Workflow] the current stage object (to ease chainig).
125
209
  def run(key = nil, io:, &block)
126
- if key
127
- io = stage(key).run(io: io, &block)
128
- elsif pending?
129
- @before.each {|c| io = c.call(self, io)}
130
-
131
- unless skip?
132
- io.session.logger.debug("(Workflow: #{path}) running now")
133
- if block
134
- io = block.call(self, io)
135
- else
136
- existing_stages.each {|stg| io = stg.run(io: io)}
137
-
138
- unless ready?
139
- msg = "(Workflow: #{path}) 'on' callback is not defined, nor block given"
140
- io.session.logger.debug(msg)
210
+ begin
211
+ if key
212
+ io = stage(key).run(io: io, &block)
213
+ elsif pending?
214
+ @before.each {|c| io = c.call(self, io)}
215
+
216
+ unless skip?
217
+ io.session.logger.debug("(Workflow: #{path}) running now")
218
+ if block
219
+ io = block.call(self, io)
220
+ else
221
+ existing_stages.each {|stg| io = stg.run(io: io)}
222
+
223
+ unless ready?
224
+ msg = "(Workflow: #{path}) 'on' callback is not defined, nor block given"
225
+ io.session.logger.debug(msg)
226
+ end
227
+ io = @on.call(self, io) if ready?
141
228
  end
142
- io = @on.call(self, io) if ready?
229
+ @pending = false
143
230
  end
144
- @pending = false
145
- end
146
231
 
147
- @after.each {|c| io = c.call(self, io)}
232
+ @after.each {|c| io = c.call(self, io)}
233
+ end
234
+ rescue SystemExit
235
+ exit
236
+ rescue Interrupt => i
237
+ raise i
238
+ rescue Exception => e
239
+ self.rescue.call(e, io) if self.rescue
240
+ raise e
148
241
  end
149
242
  io
150
243
  end
@@ -115,6 +115,17 @@ module Eco
115
115
  Eco::API::Organization::People.new(people)
116
116
  end
117
117
 
118
+ def s3upload_targets
119
+ [].tap do |paths|
120
+ session.config.s3storage.target_files.each_with_object(paths) do |file, arr|
121
+ arr.push(session.s3upload(file: file))
122
+ end
123
+ session.config.s3storage.target_directories.each_with_object(paths) do |folder, arr|
124
+ arr.concat(session.s3upload(directory: folder))
125
+ end
126
+ end
127
+ end
128
+
118
129
  private
119
130
 
120
131
  # MODIFIERS
@@ -20,13 +20,9 @@ module Eco
20
20
  end
21
21
  end
22
22
 
23
- def workflow
24
- @workflow ||= Eco::CLI::Workflow.new(cli: self)
25
- end
26
-
27
23
  def run(session:)
28
24
  io = Eco::API::UseCases::BaseIO.new(session: session, options: options)
29
-
25
+ #session.workflow.run(io: io)
30
26
  session.workflow(io: io) do |wf, io|
31
27
  io = wf.run(:options, io: io)
32
28
  io = wf.run(:load, io: io)
@@ -2,6 +2,13 @@ ASSETS.cli.config do |config|
2
2
  ASSETS.config.workflow do |wf|
3
3
 
4
4
  io = nil
5
+ # default rescue
6
+ wf.rescue do |exception, io|
7
+ #io.session.logger.error(exception.patch_full_message)
8
+ wf.run(:close, io: io)
9
+ io
10
+ end
11
+
5
12
  wf.on(:options) do |wf_options, io|
6
13
  io = io.new(options: config.options_set.process(io: io))
7
14
  end
@@ -1,3 +1,3 @@
1
1
  module Eco
2
- VERSION = "1.1.2"
2
+ VERSION = "1.1.3"
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: eco-helpers
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2
4
+ version: 1.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Oscar Segura
@@ -288,6 +288,7 @@ files:
288
288
  - lib/eco/api/common/version_patches/ecoportal_api/base_model.rb
289
289
  - lib/eco/api/common/version_patches/ecoportal_api/external_person.rb
290
290
  - lib/eco/api/common/version_patches/ecoportal_api/internal_person.rb
291
+ - lib/eco/api/common/version_patches/exception.rb
291
292
  - lib/eco/api/common/version_patches/hash.rb
292
293
  - lib/eco/api/common/version_patches/hash/deep_merge.rb
293
294
  - lib/eco/api/eco_faker.rb