burner 1.0.0.pre.alpha.6 → 1.0.0.pre.alpha.11
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +2 -0
- data/README.md +43 -39
- data/burner.gemspec +1 -1
- data/lib/burner/job.rb +15 -10
- data/lib/burner/job_with_register.rb +24 -0
- data/lib/burner/jobs.rb +27 -20
- data/lib/burner/library.rb +11 -5
- data/lib/burner/library/collection/arrays_to_objects.rb +14 -11
- data/lib/burner/library/collection/graph.rb +7 -9
- data/lib/burner/library/collection/objects_to_arrays.rb +34 -34
- data/lib/burner/library/collection/shift.rb +6 -8
- data/lib/burner/library/collection/transform.rb +7 -9
- data/lib/burner/library/collection/unpivot.rb +17 -11
- data/lib/burner/library/collection/validate.rb +90 -0
- data/lib/burner/library/collection/values.rb +9 -11
- data/lib/burner/library/deserialize/csv.rb +4 -6
- data/lib/burner/library/deserialize/json.rb +4 -6
- data/lib/burner/library/deserialize/yaml.rb +7 -7
- data/lib/burner/library/echo.rb +1 -3
- data/lib/burner/library/io/base.rb +3 -3
- data/lib/burner/library/io/exist.rb +9 -9
- data/lib/burner/library/io/read.rb +5 -7
- data/lib/burner/library/io/write.rb +5 -7
- data/lib/burner/library/{dummy.rb → nothing.rb} +3 -5
- data/lib/burner/library/serialize/csv.rb +5 -7
- data/lib/burner/library/serialize/json.rb +4 -6
- data/lib/burner/library/serialize/yaml.rb +4 -6
- data/lib/burner/library/set_value.rb +6 -8
- data/lib/burner/library/sleep.rb +1 -3
- data/lib/burner/modeling.rb +1 -0
- data/lib/burner/modeling/attribute.rb +3 -1
- data/lib/burner/modeling/validations.rb +23 -0
- data/lib/burner/modeling/validations/base.rb +35 -0
- data/lib/burner/modeling/validations/blank.rb +31 -0
- data/lib/burner/modeling/validations/present.rb +31 -0
- data/lib/burner/payload.rb +50 -10
- data/lib/burner/pipeline.rb +3 -3
- data/lib/burner/step.rb +1 -5
- data/lib/burner/util.rb +1 -0
- data/lib/burner/util/string_template.rb +42 -0
- data/lib/burner/version.rb +1 -1
- metadata +13 -6
- data/lib/burner/string_template.rb +0 -40
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: adf7e5e1ea0c19d59b6edcbb1c5073e25f533bf32076df4ec7d9122edc852958
|
4
|
+
data.tar.gz: 40009afdb93c5ee3971513971dd47a8f416acd04b33eb0c8e9720806cce4a515
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cf255e3021e975451354a3d9537b464ffc61843c59b780ceace9dc6be04e4e2499f5d874aa905608ed3c4b60c4989e6bb3c8cccd2116e8286ca045ea4202bea7
|
7
|
+
data.tar.gz: 569a44eb9a5915d946b4de3038ead60094b170481583ea6974391eedaba209162df03883757b917b2e9c3ed3be6d8c2f80f64f43e6420d5fbc2d711eedc81a8c
|
data/.rubocop.yml
CHANGED
data/README.md
CHANGED
@@ -31,30 +31,30 @@ pipeline = {
|
|
31
31
|
jobs: [
|
32
32
|
{
|
33
33
|
name: :read,
|
34
|
-
type: 'io/read',
|
34
|
+
type: 'b/io/read',
|
35
35
|
path: '{input_file}'
|
36
36
|
},
|
37
37
|
{
|
38
38
|
name: :output_id,
|
39
|
-
type:
|
39
|
+
type: 'b/echo',
|
40
40
|
message: 'The job id is: {__id}'
|
41
41
|
},
|
42
42
|
{
|
43
43
|
name: :output_value,
|
44
|
-
type:
|
44
|
+
type: 'b/echo',
|
45
45
|
message: 'The current value is: {__value}'
|
46
46
|
},
|
47
47
|
{
|
48
48
|
name: :parse,
|
49
|
-
type: 'deserialize/json'
|
49
|
+
type: 'b/deserialize/json'
|
50
50
|
},
|
51
51
|
{
|
52
52
|
name: :convert,
|
53
|
-
type: 'serialize/yaml'
|
53
|
+
type: 'b/serialize/yaml'
|
54
54
|
},
|
55
55
|
{
|
56
56
|
name: :write,
|
57
|
-
type: 'io/write',
|
57
|
+
type: 'b/io/write',
|
58
58
|
path: '{output_file}'
|
59
59
|
}
|
60
60
|
],
|
@@ -172,25 +172,25 @@ Write the following json_to_yaml_pipeline.yaml file to disk:
|
|
172
172
|
````yaml
|
173
173
|
jobs:
|
174
174
|
- name: read
|
175
|
-
type: io/read
|
175
|
+
type: b/io/read
|
176
176
|
path: '{input_file}'
|
177
177
|
|
178
178
|
- name: output_id
|
179
|
-
type: echo
|
179
|
+
type: b/echo
|
180
180
|
message: 'The job id is: {__id}'
|
181
181
|
|
182
182
|
- name: output_value
|
183
|
-
type: echo
|
183
|
+
type: b/echo
|
184
184
|
message: 'The current value is: {__value}'
|
185
185
|
|
186
186
|
- name: parse
|
187
|
-
type: deserialize/json
|
187
|
+
type: b/deserialize/json
|
188
188
|
|
189
189
|
- name: convert
|
190
|
-
type: serialize/yaml
|
190
|
+
type: b/serialize/yaml
|
191
191
|
|
192
192
|
- name: write
|
193
|
-
type: io/write
|
193
|
+
type: b/io/write
|
194
194
|
path: '{output_file}'
|
195
195
|
|
196
196
|
steps:
|
@@ -233,39 +233,43 @@ This library only ships with very basic, rudimentary jobs that are meant to just
|
|
233
233
|
|
234
234
|
#### Collection
|
235
235
|
|
236
|
-
* **collection/arrays_to_objects** [mappings]: Convert an array of arrays to an array of objects.
|
237
|
-
* **collection/graph** [config, key]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
|
238
|
-
* **collection/objects_to_arrays** [mappings]: Convert an array of objects to an array of arrays.
|
239
|
-
* **collection/shift** [amount]: Remove the first N number of elements from an array.
|
240
|
-
* **collection/transform** [attributes, exclusive, separator]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
|
241
|
-
* **collection/unpivot** [pivot_set]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
242
|
-
* **collection/
|
236
|
+
* **b/collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
|
237
|
+
* **b/collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
|
238
|
+
* **b/collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
|
239
|
+
* **b/collection/shift** [amount, register]: Remove the first N number of elements from an array.
|
240
|
+
* **b/collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
|
241
|
+
* **b/collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
242
|
+
* **b/collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
|
243
|
+
* **b/collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
|
243
244
|
|
244
245
|
#### De-serialization
|
245
246
|
|
246
|
-
* **deserialize/csv** []: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
247
|
-
* **deserialize/json** []: Treat input as a string and de-serialize it to JSON.
|
248
|
-
* **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
247
|
+
* **b/deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
248
|
+
* **b/deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
|
249
|
+
* **b/deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
249
250
|
|
250
251
|
#### IO
|
251
252
|
|
252
|
-
* **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
253
|
-
* **io/read** [binary, path]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
254
|
-
* **io/write** [binary, path]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
253
|
+
* **b/io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
254
|
+
* **b/io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
255
|
+
* **b/io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
255
256
|
|
256
257
|
#### Serialization
|
257
258
|
|
258
|
-
* **serialize/csv** []: Take an array of arrays and create a CSV.
|
259
|
-
* **serialize/json** []: Convert value to JSON.
|
260
|
-
* **serialize/yaml** []: Convert value to YAML.
|
259
|
+
* **b/serialize/csv** [register]: Take an array of arrays and create a CSV.
|
260
|
+
* **b/serialize/json** [register]: Convert value to JSON.
|
261
|
+
* **b/serialize/yaml** [register]: Convert value to YAML.
|
261
262
|
|
262
263
|
#### General
|
263
264
|
|
264
|
-
* **
|
265
|
-
* **
|
266
|
-
* **set** [value]: Set the value to any arbitrary value.
|
267
|
-
* **sleep** [seconds]: Sleep the thread for X number of seconds.
|
265
|
+
* **b/echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
|
266
|
+
* **b/nothing** []: Do nothing.
|
267
|
+
* **b/set** [register, value]: Set the value to any arbitrary value.
|
268
|
+
* **b/sleep** [seconds]: Sleep the thread for X number of seconds.
|
268
269
|
|
270
|
+
Notes:
|
271
|
+
|
272
|
+
* If you see that a job accepts a 'register' attribute/argument, that indicates a job will access and/or mutate the payload. The register indicates which part of the payload the job will interact with. This allows jobs to be placed into 'lanes'. If register is not specified, then the default register is used.
|
269
273
|
|
270
274
|
### Adding & Registering Jobs
|
271
275
|
|
@@ -274,9 +278,9 @@ Where this library shines is when additional jobs are plugged in. Burner uses i
|
|
274
278
|
Let's say we would like to register a job to parse a CSV:
|
275
279
|
|
276
280
|
````ruby
|
277
|
-
class ParseCsv < Burner::
|
281
|
+
class ParseCsv < Burner::JobWithRegister
|
278
282
|
def perform(output, payload)
|
279
|
-
payload
|
283
|
+
payload[register] = CSV.parse(payload[register], headers: true).map(&:to_h)
|
280
284
|
|
281
285
|
nil
|
282
286
|
end
|
@@ -292,17 +296,17 @@ pipeline = {
|
|
292
296
|
jobs: [
|
293
297
|
{
|
294
298
|
name: :read,
|
295
|
-
type: 'io/read',
|
299
|
+
type: 'b/io/read',
|
296
300
|
path: '{input_file}'
|
297
301
|
},
|
298
302
|
{
|
299
303
|
name: :output_id,
|
300
|
-
type:
|
304
|
+
type: 'b/echo',
|
301
305
|
message: 'The job id is: {__id}'
|
302
306
|
},
|
303
307
|
{
|
304
308
|
name: :output_value,
|
305
|
-
type:
|
309
|
+
type: 'b/echo',
|
306
310
|
message: 'The current value is: {__value}'
|
307
311
|
},
|
308
312
|
{
|
@@ -311,11 +315,11 @@ pipeline = {
|
|
311
315
|
},
|
312
316
|
{
|
313
317
|
name: :convert,
|
314
|
-
type: 'serialize/yaml'
|
318
|
+
type: 'b/serialize/yaml'
|
315
319
|
},
|
316
320
|
{
|
317
321
|
name: :write,
|
318
|
-
type: 'io/write',
|
322
|
+
type: 'b/io/write',
|
319
323
|
path: '{output_file}'
|
320
324
|
}
|
321
325
|
],
|
data/burner.gemspec
CHANGED
@@ -8,7 +8,7 @@ Gem::Specification.new do |s|
|
|
8
8
|
s.summary = 'Declarative and extendable processing pipeline'
|
9
9
|
|
10
10
|
s.description = <<-DESCRIPTION
|
11
|
-
This library serves as the
|
11
|
+
This library serves as the backbone for a configurable processing engine. It allows you to organize your code into jobs, then stitch those jobs together as steps.
|
12
12
|
DESCRIPTION
|
13
13
|
|
14
14
|
s.authors = ['Matthew Ruggio']
|
data/lib/burner/job.rb
CHANGED
@@ -7,11 +7,9 @@
|
|
7
7
|
# LICENSE file in the root directory of this source tree.
|
8
8
|
#
|
9
9
|
|
10
|
-
require_relative 'string_template'
|
11
|
-
|
12
10
|
module Burner
|
13
11
|
# Abstract base class for all job subclasses. The only public method a subclass needs to
|
14
|
-
# implement #perform(
|
12
|
+
# implement #perform(output, payload) and then you can register it for use using
|
15
13
|
# the Burner::Jobs factory class method #register. An example of a registration:
|
16
14
|
# Burner::Jobs.register('your_class', YourClass)
|
17
15
|
class Job
|
@@ -26,17 +24,18 @@ module Burner
|
|
26
24
|
@name = name.to_s
|
27
25
|
end
|
28
26
|
|
29
|
-
# There are only
|
27
|
+
# There are only a few requirements to be considered a valid Burner Job:
|
30
28
|
# 1. The class responds to #name
|
31
29
|
# 2. The class responds to #perform(output, payload)
|
32
30
|
#
|
33
31
|
# The #perform method takes in two arguments: output (an instance of Burner::Output)
|
34
32
|
# and payload (an instance of Burner::Payload). Jobs can leverage output to emit
|
35
33
|
# information to the pipeline's log(s). The payload is utilized to pass data from job to job,
|
36
|
-
# with its most important attribute being #
|
37
|
-
# per the individual job's context
|
38
|
-
#
|
39
|
-
#
|
34
|
+
# with its most important attribute being #registers. The registers attribute is a mutable
|
35
|
+
# and accessible hash per the individual job's context
|
36
|
+
# (meaning of it is unknown without understanding a job's input and output value
|
37
|
+
# of #registers.). Therefore #register key values can mean anything
|
38
|
+
# and it is up to consumers to clearly document the assumptions of its use.
|
40
39
|
#
|
41
40
|
# Returning false will short-circuit the pipeline right after the job method exits.
|
42
41
|
# Returning anything else besides false just means "continue".
|
@@ -49,9 +48,15 @@ module Burner
|
|
49
48
|
protected
|
50
49
|
|
51
50
|
def job_string_template(expression, output, payload)
|
52
|
-
templatable_params = payload.params
|
51
|
+
templatable_params = payload.params
|
52
|
+
.merge(__id: output.id)
|
53
|
+
.merge(templatable_register_values(payload))
|
54
|
+
|
55
|
+
Util::StringTemplate.instance.evaluate(expression, templatable_params)
|
56
|
+
end
|
53
57
|
|
54
|
-
|
58
|
+
def templatable_register_values(payload)
|
59
|
+
payload.registers.transform_keys { |key| "__#{key}_register" }
|
55
60
|
end
|
56
61
|
end
|
57
62
|
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
require_relative 'job'
|
11
|
+
|
12
|
+
module Burner
|
13
|
+
# Add on a register attribute to the configuration for a job. This indicates that a job
|
14
|
+
# either accesses and/or mutates the payload's registers.
|
15
|
+
class JobWithRegister < Job
|
16
|
+
attr_reader :register
|
17
|
+
|
18
|
+
def initialize(name:, register: '')
|
19
|
+
super(name: name)
|
20
|
+
|
21
|
+
@register = register.to_s
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
data/lib/burner/jobs.rb
CHANGED
@@ -16,25 +16,32 @@ module Burner
|
|
16
16
|
class Jobs
|
17
17
|
acts_as_hashable_factory
|
18
18
|
|
19
|
-
|
20
|
-
|
21
|
-
register '
|
22
|
-
register '
|
23
|
-
register '
|
24
|
-
register '
|
25
|
-
|
26
|
-
register '
|
27
|
-
register '
|
28
|
-
register '
|
29
|
-
register '
|
30
|
-
register '
|
31
|
-
register '
|
32
|
-
register '
|
33
|
-
register '
|
34
|
-
|
35
|
-
register '
|
36
|
-
register '
|
37
|
-
register '
|
38
|
-
|
19
|
+
# Nothing is the default as noted by the ''. This means if a type is omitted, nil, or blank
|
20
|
+
# string then the nothing job will be used.
|
21
|
+
register 'b/echo', Library::Echo
|
22
|
+
register 'b/nothing', '', Library::Nothing
|
23
|
+
register 'b/set_value', Library::SetValue
|
24
|
+
register 'b/sleep', Library::Sleep
|
25
|
+
|
26
|
+
register 'b/collection/arrays_to_objects', Library::Collection::ArraysToObjects
|
27
|
+
register 'b/collection/graph', Library::Collection::Graph
|
28
|
+
register 'b/collection/objects_to_arrays', Library::Collection::ObjectsToArrays
|
29
|
+
register 'b/collection/shift', Library::Collection::Shift
|
30
|
+
register 'b/collection/transform', Library::Collection::Transform
|
31
|
+
register 'b/collection/unpivot', Library::Collection::Unpivot
|
32
|
+
register 'b/collection/values', Library::Collection::Values
|
33
|
+
register 'b/collection/validate', Library::Collection::Validate
|
34
|
+
|
35
|
+
register 'b/deserialize/csv', Library::Deserialize::Csv
|
36
|
+
register 'b/deserialize/json', Library::Deserialize::Json
|
37
|
+
register 'b/deserialize/yaml', Library::Deserialize::Yaml
|
38
|
+
|
39
|
+
register 'b/io/exist', Library::IO::Exist
|
40
|
+
register 'b/io/read', Library::IO::Read
|
41
|
+
register 'b/io/write', Library::IO::Write
|
42
|
+
|
43
|
+
register 'b/serialize/csv', Library::Serialize::Csv
|
44
|
+
register 'b/serialize/json', Library::Serialize::Json
|
45
|
+
register 'b/serialize/yaml', Library::Serialize::Yaml
|
39
46
|
end
|
40
47
|
end
|
data/lib/burner/library.rb
CHANGED
@@ -7,24 +7,30 @@
|
|
7
7
|
# LICENSE file in the root directory of this source tree.
|
8
8
|
#
|
9
9
|
|
10
|
-
require_relative '
|
10
|
+
require_relative 'job_with_register'
|
11
|
+
|
12
|
+
require_relative 'library/echo'
|
13
|
+
require_relative 'library/nothing'
|
14
|
+
require_relative 'library/set_value'
|
15
|
+
require_relative 'library/sleep'
|
16
|
+
|
11
17
|
require_relative 'library/collection/arrays_to_objects'
|
12
18
|
require_relative 'library/collection/graph'
|
13
19
|
require_relative 'library/collection/objects_to_arrays'
|
14
20
|
require_relative 'library/collection/shift'
|
15
21
|
require_relative 'library/collection/transform'
|
16
22
|
require_relative 'library/collection/unpivot'
|
23
|
+
require_relative 'library/collection/validate'
|
17
24
|
require_relative 'library/collection/values'
|
25
|
+
|
18
26
|
require_relative 'library/deserialize/csv'
|
19
27
|
require_relative 'library/deserialize/json'
|
20
28
|
require_relative 'library/deserialize/yaml'
|
21
|
-
|
22
|
-
require_relative 'library/echo'
|
29
|
+
|
23
30
|
require_relative 'library/io/exist'
|
24
31
|
require_relative 'library/io/read'
|
25
32
|
require_relative 'library/io/write'
|
33
|
+
|
26
34
|
require_relative 'library/serialize/csv'
|
27
35
|
require_relative 'library/serialize/json'
|
28
36
|
require_relative 'library/serialize/yaml'
|
29
|
-
require_relative 'library/set_value'
|
30
|
-
require_relative 'library/sleep'
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
# Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
|
15
15
|
# the index-to-key mappings to use.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: array of arrays.
|
18
|
+
# Payload[register] output: An array of hashes.
|
19
19
|
#
|
20
20
|
# An example using a configuration-first pipeline:
|
21
21
|
#
|
@@ -23,14 +23,14 @@ module Burner
|
|
23
23
|
# jobs: [
|
24
24
|
# {
|
25
25
|
# name: 'set',
|
26
|
-
# type: 'set_value',
|
26
|
+
# type: 'b/set_value',
|
27
27
|
# value: [
|
28
28
|
# [1, 'funky']
|
29
29
|
# ]
|
30
30
|
# },
|
31
31
|
# {
|
32
32
|
# name: 'map',
|
33
|
-
# type: 'collection/arrays_to_objects',
|
33
|
+
# type: 'b/collection/arrays_to_objects',
|
34
34
|
# mappings: [
|
35
35
|
# { index: 0, key: 'id' },
|
36
36
|
# { index: 1, key: 'name' }
|
@@ -38,7 +38,7 @@ module Burner
|
|
38
38
|
# },
|
39
39
|
# {
|
40
40
|
# name: 'output',
|
41
|
-
# type: 'echo',
|
41
|
+
# type: 'b/echo',
|
42
42
|
# message: 'value is currently: {__value}'
|
43
43
|
# },
|
44
44
|
#
|
@@ -47,11 +47,16 @@ module Burner
|
|
47
47
|
# }
|
48
48
|
#
|
49
49
|
# Burner::Pipeline.make(config).execute
|
50
|
-
|
50
|
+
#
|
51
|
+
# Given the above example, the expected output would be:
|
52
|
+
# [
|
53
|
+
# { 'id' => 1, 'name' => 'funky' }
|
54
|
+
# ]
|
55
|
+
class ArraysToObjects < JobWithRegister
|
51
56
|
attr_reader :mappings
|
52
57
|
|
53
|
-
def initialize(name:, mappings: [])
|
54
|
-
super(name: name)
|
58
|
+
def initialize(name:, mappings: [], register: '')
|
59
|
+
super(name: name, register: register)
|
55
60
|
|
56
61
|
@mappings = Modeling::KeyIndexMapping.array(mappings)
|
57
62
|
|
@@ -59,9 +64,7 @@ module Burner
|
|
59
64
|
end
|
60
65
|
|
61
66
|
def perform(_output, payload)
|
62
|
-
payload
|
63
|
-
|
64
|
-
nil
|
67
|
+
payload[register] = array(payload[register]).map { |array| index_to_key_map(array) }
|
65
68
|
end
|
66
69
|
|
67
70
|
private
|
@@ -13,13 +13,13 @@ module Burner
|
|
13
13
|
# Take an array of (denormalized) objects and create an object hierarchy from them.
|
14
14
|
# Under the hood it uses Hashematics: https://github.com/bluemarblepayroll/hashematics.
|
15
15
|
#
|
16
|
-
# Expected Payload
|
17
|
-
# Payload
|
18
|
-
class Graph <
|
16
|
+
# Expected Payload[register] input: array of objects.
|
17
|
+
# Payload[register] output: An array of objects.
|
18
|
+
class Graph < JobWithRegister
|
19
19
|
attr_reader :key, :groups
|
20
20
|
|
21
|
-
def initialize(name:, key:, config: Hashematics::Configuration.new)
|
22
|
-
super(name: name)
|
21
|
+
def initialize(name:, key:, config: Hashematics::Configuration.new, register: '')
|
22
|
+
super(name: name, register: register)
|
23
23
|
|
24
24
|
raise ArgumentError, 'key is required' if key.to_s.empty?
|
25
25
|
|
@@ -30,13 +30,11 @@ module Burner
|
|
30
30
|
end
|
31
31
|
|
32
32
|
def perform(output, payload)
|
33
|
-
graph = Hashematics::Graph.new(groups).add(array(payload
|
33
|
+
graph = Hashematics::Graph.new(groups).add(array(payload[register]))
|
34
34
|
|
35
35
|
output.detail("Graphing: #{key}")
|
36
36
|
|
37
|
-
payload
|
38
|
-
|
39
|
-
nil
|
37
|
+
payload[register] = graph.data(key)
|
40
38
|
end
|
41
39
|
end
|
42
40
|
end
|