burner 1.0.0.pre.alpha.1 → 1.0.0.pre.alpha.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +48 -20
- data/burner.gemspec +3 -0
- data/exe/burner +3 -4
- data/lib/burner.rb +12 -0
- data/lib/burner/cli.rb +10 -8
- data/lib/burner/job.rb +28 -6
- data/lib/burner/jobs.rb +21 -23
- data/lib/burner/library.rb +30 -0
- data/lib/burner/library/collection/arrays_to_objects.rb +77 -0
- data/lib/burner/library/collection/graph.rb +44 -0
- data/lib/burner/library/collection/objects_to_arrays.rb +90 -0
- data/lib/burner/library/collection/shift.rb +44 -0
- data/lib/burner/library/collection/transform.rb +68 -0
- data/lib/burner/library/collection/unpivot.rb +47 -0
- data/lib/burner/library/collection/values.rb +51 -0
- data/lib/burner/library/deserialize/csv.rb +29 -0
- data/lib/burner/{jobs → library}/deserialize/json.rb +5 -2
- data/lib/burner/{jobs → library}/deserialize/yaml.rb +9 -3
- data/lib/burner/{jobs → library}/dummy.rb +4 -2
- data/lib/burner/{jobs → library}/echo.rb +5 -3
- data/lib/burner/{jobs → library}/io/base.rb +1 -7
- data/lib/burner/{jobs → library}/io/exist.rb +5 -3
- data/lib/burner/{jobs → library}/io/read.rb +6 -3
- data/lib/burner/{jobs → library}/io/write.rb +9 -4
- data/lib/burner/library/serialize/csv.rb +39 -0
- data/lib/burner/{jobs → library}/serialize/json.rb +5 -2
- data/lib/burner/{jobs → library}/serialize/yaml.rb +5 -2
- data/lib/burner/{jobs/set.rb → library/set_value.rb} +6 -3
- data/lib/burner/{jobs → library}/sleep.rb +4 -2
- data/lib/burner/modeling.rb +12 -0
- data/lib/burner/modeling/attribute.rb +29 -0
- data/lib/burner/modeling/attribute_renderer.rb +32 -0
- data/lib/burner/modeling/key_index_mapping.rb +29 -0
- data/lib/burner/payload.rb +20 -9
- data/lib/burner/pipeline.rb +23 -4
- data/lib/burner/side_effects.rb +10 -0
- data/lib/burner/side_effects/written_file.rb +28 -0
- data/lib/burner/step.rb +2 -4
- data/lib/burner/string_template.rb +6 -5
- data/lib/burner/util.rb +10 -0
- data/lib/burner/util/arrayable.rb +30 -0
- data/lib/burner/version.rb +1 -1
- metadata +74 -15
- data/lib/burner/written_file.rb +0 -28
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 73263c161bce669b99286160acb7129d33c529162da964b72fd34321cc7e3724
|
4
|
+
data.tar.gz: 18fd59799a73d9a8a3bf691a9dd3f8a004d442b37862bf83cbe347fbddd7e49a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7db0c2be13a882885b97681ec7c9fd60b6702368d673d97ba0aa484b446db0c77c9f823adf03c8d9e970b2078b43a99d2d159ba97df345124a461497718b7046
|
7
|
+
data.tar.gz: 509382aa7fb48e116b8b3eb145a8b8226c1e4d4282eec1aabb10dcc9f7b333b906a7887d97cee4a96ae730dd7c02a0b0d4cd4a7b06dbfcc90349cfd82767e048
|
data/README.md
CHANGED
@@ -73,19 +73,21 @@ params = {
|
|
73
73
|
input_file: 'input.json',
|
74
74
|
output_file: 'output.yaml'
|
75
75
|
}
|
76
|
+
|
77
|
+
payload = Burner::Payload.new(params: params)
|
76
78
|
````
|
77
79
|
|
78
80
|
Assuming we are running this script from a directory where an `input.json` file exists, we can then programatically process the pipeline:
|
79
81
|
|
80
82
|
````ruby
|
81
|
-
Burner::Pipeline.make(pipeline).execute(
|
83
|
+
Burner::Pipeline.make(pipeline).execute(payload: payload)
|
82
84
|
````
|
83
85
|
|
84
86
|
We should now see a output.yaml file created.
|
85
87
|
|
86
88
|
Some notes:
|
87
89
|
|
88
|
-
* Some values are able to be string-interpolated using the provided params. This allows for the passing runtime configuration/data into pipelines/jobs.
|
90
|
+
* Some values are able to be string-interpolated using the provided Payload#params. This allows for the passing runtime configuration/data into pipelines/jobs.
|
89
91
|
* The job's ID can be accessed using the `__id` key.
|
90
92
|
* The current job's payload value can be accessed using the `__value` key.
|
91
93
|
* Jobs can be re-used (just like the output_id and output_value jobs).
|
@@ -116,8 +118,9 @@ end
|
|
116
118
|
|
117
119
|
string_out = StringOut.new
|
118
120
|
output = Burner::Output.new(outs: string_out)
|
121
|
+
payload = Burner::Payload.new(params: params)
|
119
122
|
|
120
|
-
Burner::Pipeline.make(pipeline).execute(output: output,
|
123
|
+
Burner::Pipeline.make(pipeline).execute(output: output, payload: payload)
|
121
124
|
|
122
125
|
log = string_out.read
|
123
126
|
````
|
@@ -130,23 +133,23 @@ The value of `log` should now look similar to:
|
|
130
133
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - input_file: input.json
|
131
134
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - output_file: output.yaml
|
132
135
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
|
133
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::
|
136
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Library::IO::Read::read
|
134
137
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Reading: spec/fixtures/input.json
|
135
138
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
136
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::
|
139
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Library::Echo::output_id
|
137
140
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The job id is:
|
138
141
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
139
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::
|
142
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Library::Echo::output_value
|
140
143
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
|
141
144
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
142
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::
|
145
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Library::Deserialize::Json::parse
|
143
146
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
144
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::
|
147
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Library::Serialize::Yaml::convert
|
145
148
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
146
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::
|
149
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Library::Echo::output_value
|
147
150
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
|
148
151
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
149
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::
|
152
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Library::IO::Write::write
|
150
153
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Writing: output.yaml
|
151
154
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
152
155
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
|
@@ -160,7 +163,7 @@ Notes:
|
|
160
163
|
|
161
164
|
### Command Line Pipeline Processing
|
162
165
|
|
163
|
-
This library also ships with a built-in script `
|
166
|
+
This library also ships with a built-in script `burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
|
164
167
|
|
165
168
|
#### Create YAML Pipeline Configuration File
|
166
169
|
|
@@ -228,28 +231,51 @@ Burner::Cli.new(args).invoke
|
|
228
231
|
|
229
232
|
This library only ships with very basic, rudimentary jobs that are meant to just serve as a baseline:
|
230
233
|
|
234
|
+
#### Collection
|
235
|
+
|
236
|
+
* **collection/arrays_to_objects** [mappings]: Convert an array of arrays to an array of objects.
|
237
|
+
* **collection/graph** [config, key]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
|
238
|
+
* **collection/objects_to_arrays** [mappings]: Convert an array of objects to an array of arrays.
|
239
|
+
* **collection/shift** [amount]: Remove the first N number of elements from an array.
|
240
|
+
* **collection/transform** [attributes, exclusive, separator]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
|
241
|
+
* **collection/unpivot** [pivot_set]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
242
|
+
* **collection/values** [include_keys]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
|
243
|
+
|
244
|
+
#### De-serialization
|
245
|
+
|
246
|
+
* **deserialize/csv** []: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
231
247
|
* **deserialize/json** []: Treat input as a string and de-serialize it to JSON.
|
232
|
-
* **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
* **io/
|
237
|
-
* **io/
|
248
|
+
* **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
249
|
+
|
250
|
+
#### IO
|
251
|
+
|
252
|
+
* **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
253
|
+
* **io/read** [binary, path]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
254
|
+
* **io/write** [binary, path]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
255
|
+
|
256
|
+
#### Serialization
|
257
|
+
|
258
|
+
* **serialize/csv** []: Take an array of arrays and create a CSV.
|
238
259
|
* **serialize/json** []: Convert value to JSON.
|
239
260
|
* **serialize/yaml** []: Convert value to YAML.
|
261
|
+
|
262
|
+
#### General
|
263
|
+
|
264
|
+
* **dummy** []: Do nothing
|
265
|
+
* **echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
|
240
266
|
* **set** [value]: Set the value to any arbitrary value.
|
241
267
|
* **sleep** [seconds]: Sleep the thread for X number of seconds.
|
242
268
|
|
243
269
|
|
244
270
|
### Adding & Registering Jobs
|
245
271
|
|
246
|
-
Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with (
|
272
|
+
Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with [acts_as_hashable](https://github.com/bluemarblepayroll/acts_as_hashable)'s acts_as_hashable_factory directive.
|
247
273
|
|
248
274
|
Let's say we would like to register a job to parse a CSV:
|
249
275
|
|
250
276
|
````ruby
|
251
277
|
class ParseCsv < Burner::Job
|
252
|
-
def perform(output, payload
|
278
|
+
def perform(output, payload)
|
253
279
|
payload.value = CSV.parse(payload.value, headers: true).map(&:to_h)
|
254
280
|
|
255
281
|
nil
|
@@ -309,7 +335,9 @@ params = {
|
|
309
335
|
output_file: File.join(TEMP_DIR, "#{SecureRandom.uuid}.yaml")
|
310
336
|
}
|
311
337
|
|
312
|
-
Burner::
|
338
|
+
payload = Burner::Payload.new(params: params)
|
339
|
+
|
340
|
+
Burner::Pipeline.make(pipeline).execute(output: output, payload: payload)
|
313
341
|
````
|
314
342
|
|
315
343
|
## Contributing
|
data/burner.gemspec
CHANGED
@@ -29,7 +29,10 @@ Gem::Specification.new do |s|
|
|
29
29
|
s.required_ruby_version = '>= 2.5'
|
30
30
|
|
31
31
|
s.add_dependency('acts_as_hashable', '~>1.2')
|
32
|
+
s.add_dependency('hashematics', '~>1.1')
|
33
|
+
s.add_dependency('hash_math', '~>1.2')
|
32
34
|
s.add_dependency('objectable', '~>1.0')
|
35
|
+
s.add_dependency('realize', '~>1.2')
|
33
36
|
s.add_dependency('stringento', '~>2.1')
|
34
37
|
|
35
38
|
s.add_development_dependency('guard-rspec', '~>4.7')
|
data/exe/burner
CHANGED
@@ -10,12 +10,11 @@
|
|
10
10
|
|
11
11
|
require 'bundler/setup'
|
12
12
|
require 'burner'
|
13
|
-
require 'pry'
|
14
13
|
|
15
14
|
if ARGV.empty?
|
16
|
-
|
17
|
-
exit
|
15
|
+
warn('Usage: ./exe/burner package.yaml key=value key=value ...')
|
16
|
+
exit 2 # Do not return 1, that is reserved for hard errors.
|
18
17
|
end
|
19
18
|
|
20
19
|
# This should return exit code of 1 if it raises any hard errors.
|
21
|
-
Burner::Cli.new(ARGV).
|
20
|
+
Burner::Cli.new(ARGV).execute
|
data/lib/burner.rb
CHANGED
@@ -9,11 +9,23 @@
|
|
9
9
|
|
10
10
|
require 'acts_as_hashable'
|
11
11
|
require 'benchmark'
|
12
|
+
require 'csv'
|
13
|
+
require 'forwardable'
|
14
|
+
require 'hash_math'
|
15
|
+
require 'hashematics'
|
12
16
|
require 'json'
|
13
17
|
require 'objectable'
|
18
|
+
require 'realize'
|
14
19
|
require 'securerandom'
|
15
20
|
require 'singleton'
|
16
21
|
require 'stringento'
|
22
|
+
require 'time'
|
17
23
|
require 'yaml'
|
18
24
|
|
25
|
+
# Common/Shared
|
26
|
+
require_relative 'burner/modeling'
|
27
|
+
require_relative 'burner/side_effects'
|
28
|
+
require_relative 'burner/util'
|
29
|
+
|
30
|
+
# Main Entrypoint(s)
|
19
31
|
require_relative 'burner/cli'
|
data/lib/burner/cli.rb
CHANGED
@@ -12,18 +12,20 @@ require_relative 'pipeline'
|
|
12
12
|
module Burner
|
13
13
|
# Process a single string as a Pipeline. This is mainly to back the command-line interface.
|
14
14
|
class Cli
|
15
|
-
attr_reader :
|
15
|
+
attr_reader :payload, :pipeline
|
16
16
|
|
17
17
|
def initialize(args)
|
18
|
-
path
|
19
|
-
|
20
|
-
config
|
21
|
-
@pipeline
|
22
|
-
@
|
18
|
+
path = args.first
|
19
|
+
params = extract_cli_params(args)
|
20
|
+
config = read_yaml(path)
|
21
|
+
@pipeline = Burner::Pipeline.make(jobs: config['jobs'], steps: config['steps'])
|
22
|
+
@payload = Payload.new(params: params)
|
23
|
+
|
24
|
+
freeze
|
23
25
|
end
|
24
26
|
|
25
|
-
def
|
26
|
-
pipeline.execute(
|
27
|
+
def execute
|
28
|
+
pipeline.execute(payload: payload)
|
27
29
|
end
|
28
30
|
|
29
31
|
private
|
data/lib/burner/job.rb
CHANGED
@@ -15,21 +15,43 @@ module Burner
|
|
15
15
|
# the Burner::Jobs factory class method #register. An example of a registration:
|
16
16
|
# Burner::Jobs.register('your_class', YourClass)
|
17
17
|
class Job
|
18
|
+
include Util::Arrayable
|
18
19
|
acts_as_hashable
|
19
20
|
|
20
|
-
attr_reader :name
|
21
|
+
attr_reader :name
|
21
22
|
|
22
23
|
def initialize(name:)
|
23
24
|
raise ArgumentError, 'name is required' if name.to_s.empty?
|
24
25
|
|
25
|
-
@name
|
26
|
-
@string_template = StringTemplate.instance
|
26
|
+
@name = name.to_s
|
27
27
|
end
|
28
28
|
|
29
|
-
|
29
|
+
# There are only two requirements to be considered a valid Burner Job:
|
30
|
+
# 1. The class responds to #name
|
31
|
+
# 2. The class responds to #perform(output, payload)
|
32
|
+
#
|
33
|
+
# The #perform method takes in two arguments: output (an instance of Burner::Output)
|
34
|
+
# and payload (an instance of Burner::Payload). Jobs can leverage output to emit
|
35
|
+
# information to the pipeline's log(s). The payload is utilized to pass data from job to job,
|
36
|
+
# with its most important attribute being #value. The value attribute is mutable
|
37
|
+
# per the individual job's context (meaning of it is unknown without understanding a job's
|
38
|
+
# input and output value of #value.). Therefore #value can mean anything and it is up to the
|
39
|
+
# engineers to clearly document the assumptions of its use.
|
40
|
+
#
|
41
|
+
# Returning false will short-circuit the pipeline right after the job method exits.
|
42
|
+
# Returning anything else besides false just means "continue".
|
43
|
+
def perform(output, _payload)
|
44
|
+
output.detail("#perform not implemented for: #{self.class.name}")
|
45
|
+
|
46
|
+
nil
|
47
|
+
end
|
48
|
+
|
49
|
+
protected
|
50
|
+
|
51
|
+
def job_string_template(expression, output, payload)
|
52
|
+
templatable_params = payload.params.merge(__id: output.id, __value: payload.value)
|
30
53
|
|
31
|
-
|
32
|
-
string_template.evaluate(expression, input)
|
54
|
+
StringTemplate.instance.evaluate(expression, templatable_params)
|
33
55
|
end
|
34
56
|
end
|
35
57
|
end
|
data/lib/burner/jobs.rb
CHANGED
@@ -7,18 +7,7 @@
|
|
7
7
|
# LICENSE file in the root directory of this source tree.
|
8
8
|
#
|
9
9
|
|
10
|
-
require_relative '
|
11
|
-
require_relative 'jobs/deserialize/json'
|
12
|
-
require_relative 'jobs/deserialize/yaml'
|
13
|
-
require_relative 'jobs/dummy'
|
14
|
-
require_relative 'jobs/echo'
|
15
|
-
require_relative 'jobs/io/exist'
|
16
|
-
require_relative 'jobs/io/read'
|
17
|
-
require_relative 'jobs/io/write'
|
18
|
-
require_relative 'jobs/serialize/json'
|
19
|
-
require_relative 'jobs/serialize/yaml'
|
20
|
-
require_relative 'jobs/set'
|
21
|
-
require_relative 'jobs/sleep'
|
10
|
+
require_relative 'library'
|
22
11
|
|
23
12
|
module Burner
|
24
13
|
# Main library of jobs. This file contains all the basic/default jobs. All other consumer
|
@@ -27,16 +16,25 @@ module Burner
|
|
27
16
|
class Jobs
|
28
17
|
acts_as_hashable_factory
|
29
18
|
|
30
|
-
register '
|
31
|
-
register '
|
32
|
-
register '
|
33
|
-
register '
|
34
|
-
register '
|
35
|
-
register '
|
36
|
-
register '
|
37
|
-
register '
|
38
|
-
register '
|
39
|
-
register '
|
40
|
-
register '
|
19
|
+
register 'collection/arrays_to_objects', Library::Collection::ArraysToObjects
|
20
|
+
register 'collection/graph', Library::Collection::Graph
|
21
|
+
register 'collection/objects_to_arrays', Library::Collection::ObjectsToArrays
|
22
|
+
register 'collection/shift', Library::Collection::Shift
|
23
|
+
register 'collection/transform', Library::Collection::Transform
|
24
|
+
register 'collection/unpivot', Library::Collection::Unpivot
|
25
|
+
register 'collection/values', Library::Collection::Values
|
26
|
+
register 'deserialize/csv', Library::Deserialize::Csv
|
27
|
+
register 'deserialize/json', Library::Deserialize::Json
|
28
|
+
register 'deserialize/yaml', Library::Deserialize::Yaml
|
29
|
+
register 'dummy', '', Library::Dummy
|
30
|
+
register 'echo', Library::Echo
|
31
|
+
register 'io/exist', Library::IO::Exist
|
32
|
+
register 'io/read', Library::IO::Read
|
33
|
+
register 'io/write', Library::IO::Write
|
34
|
+
register 'serialize/csv', Library::Serialize::Csv
|
35
|
+
register 'serialize/json', Library::Serialize::Json
|
36
|
+
register 'serialize/yaml', Library::Serialize::Yaml
|
37
|
+
register 'set_value', Library::SetValue
|
38
|
+
register 'sleep', Library::Sleep
|
41
39
|
end
|
42
40
|
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
require_relative 'job'
|
11
|
+
require_relative 'library/collection/arrays_to_objects'
|
12
|
+
require_relative 'library/collection/graph'
|
13
|
+
require_relative 'library/collection/objects_to_arrays'
|
14
|
+
require_relative 'library/collection/shift'
|
15
|
+
require_relative 'library/collection/transform'
|
16
|
+
require_relative 'library/collection/unpivot'
|
17
|
+
require_relative 'library/collection/values'
|
18
|
+
require_relative 'library/deserialize/csv'
|
19
|
+
require_relative 'library/deserialize/json'
|
20
|
+
require_relative 'library/deserialize/yaml'
|
21
|
+
require_relative 'library/dummy'
|
22
|
+
require_relative 'library/echo'
|
23
|
+
require_relative 'library/io/exist'
|
24
|
+
require_relative 'library/io/read'
|
25
|
+
require_relative 'library/io/write'
|
26
|
+
require_relative 'library/serialize/csv'
|
27
|
+
require_relative 'library/serialize/json'
|
28
|
+
require_relative 'library/serialize/yaml'
|
29
|
+
require_relative 'library/set_value'
|
30
|
+
require_relative 'library/sleep'
|
@@ -0,0 +1,77 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Collection
|
13
|
+
# Convert an array of arrays to an array of objects. Pass in an array of
|
14
|
+
# Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
|
15
|
+
# the index-to-key mappings to use.
|
16
|
+
#
|
17
|
+
# Expected Payload#value input: array of arrays.
|
18
|
+
# Payload#value output: An array of hashes.
|
19
|
+
#
|
20
|
+
# An example using a configuration-first pipeline:
|
21
|
+
#
|
22
|
+
# config = {
|
23
|
+
# jobs: [
|
24
|
+
# {
|
25
|
+
# name: 'set',
|
26
|
+
# type: 'set_value',
|
27
|
+
# value: [
|
28
|
+
# [1, 'funky']
|
29
|
+
# ]
|
30
|
+
# },
|
31
|
+
# {
|
32
|
+
# name: 'map',
|
33
|
+
# type: 'collection/arrays_to_objects',
|
34
|
+
# mappings: [
|
35
|
+
# { index: 0, key: 'id' },
|
36
|
+
# { index: 1, key: 'name' }
|
37
|
+
# ]
|
38
|
+
# },
|
39
|
+
# {
|
40
|
+
# name: 'output',
|
41
|
+
# type: 'echo',
|
42
|
+
# message: 'value is currently: {__value}'
|
43
|
+
# },
|
44
|
+
#
|
45
|
+
# ],
|
46
|
+
# steps: %w[set map output]
|
47
|
+
# }
|
48
|
+
#
|
49
|
+
# Burner::Pipeline.make(config).execute
|
50
|
+
class ArraysToObjects < Job
|
51
|
+
attr_reader :mappings
|
52
|
+
|
53
|
+
def initialize(name:, mappings: [])
|
54
|
+
super(name: name)
|
55
|
+
|
56
|
+
@mappings = Modeling::KeyIndexMapping.array(mappings)
|
57
|
+
|
58
|
+
freeze
|
59
|
+
end
|
60
|
+
|
61
|
+
def perform(_output, payload)
|
62
|
+
payload.value = array(payload.value).map { |array| index_to_key_map(array) }
|
63
|
+
|
64
|
+
nil
|
65
|
+
end
|
66
|
+
|
67
|
+
private
|
68
|
+
|
69
|
+
def index_to_key_map(array)
|
70
|
+
mappings.each_with_object({}) do |mapping, memo|
|
71
|
+
memo[mapping.key] = array[mapping.index]
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|