burner 1.0.0.pre.alpha.7 → 1.0.0.pre.alpha.13
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +43 -38
- data/burner.gemspec +1 -1
- data/lib/burner/job.rb +12 -5
- data/lib/burner/jobs.rb +30 -20
- data/lib/burner/library.rb +11 -4
- data/lib/burner/library/collection/arrays_to_objects.rb +10 -5
- data/lib/burner/library/collection/concatenate.rb +42 -0
- data/lib/burner/library/collection/graph.rb +2 -2
- data/lib/burner/library/collection/objects_to_arrays.rb +30 -28
- data/lib/burner/library/collection/shift.rb +2 -2
- data/lib/burner/library/collection/transform.rb +2 -2
- data/lib/burner/library/collection/unpivot.rb +2 -2
- data/lib/burner/library/collection/validate.rb +3 -2
- data/lib/burner/library/collection/values.rb +2 -2
- data/lib/burner/library/deserialize/csv.rb +2 -2
- data/lib/burner/library/deserialize/json.rb +2 -2
- data/lib/burner/library/deserialize/yaml.rb +2 -2
- data/lib/burner/library/echo.rb +1 -1
- data/lib/burner/library/io/exist.rb +1 -1
- data/lib/burner/library/io/read.rb +2 -2
- data/lib/burner/library/io/write.rb +2 -2
- data/lib/burner/library/{dummy.rb → nothing.rb} +2 -2
- data/lib/burner/library/serialize/csv.rb +2 -2
- data/lib/burner/library/serialize/json.rb +2 -2
- data/lib/burner/library/serialize/yaml.rb +2 -2
- data/lib/burner/library/sleep.rb +1 -1
- data/lib/burner/library/value/copy.rb +39 -0
- data/lib/burner/library/value/static.rb +34 -0
- data/lib/burner/modeling/attribute.rb +3 -1
- data/lib/burner/payload.rb +11 -5
- data/lib/burner/version.rb +1 -1
- metadata +9 -6
- data/lib/burner/library/set_value.rb +0 -32
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d682634e7c0c8b850b98d80a33e4e44012a3f7ae058b72e3c828830d7e177a0a
|
4
|
+
data.tar.gz: afc1daecbec70a6ec88f19ab11ea92f93f28a7dd8bc8ea4821cb6a222db3f824
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d75499b61c0b30b4882fb8cf10c74ea2cd0dfa8562824bf3a0a61c3311b78808eaeb5e0dfc1db1faa81400a23a998e4dd6b37ba35ad170ec2945c8708804d7c7
|
7
|
+
data.tar.gz: 42e5b5d6fead52793eefa7ef6f41423254e870e0fd319d7aad8b011c88c8048bbaec78cac79d0acdeed98f446777014b46e419ed86e39ea0f7fd9f888c786a4d
|
data/README.md
CHANGED
@@ -31,30 +31,30 @@ pipeline = {
|
|
31
31
|
jobs: [
|
32
32
|
{
|
33
33
|
name: :read,
|
34
|
-
type: 'io/read',
|
34
|
+
type: 'b/io/read',
|
35
35
|
path: '{input_file}'
|
36
36
|
},
|
37
37
|
{
|
38
38
|
name: :output_id,
|
39
|
-
type:
|
39
|
+
type: 'b/echo',
|
40
40
|
message: 'The job id is: {__id}'
|
41
41
|
},
|
42
42
|
{
|
43
43
|
name: :output_value,
|
44
|
-
type:
|
44
|
+
type: 'b/echo',
|
45
45
|
message: 'The current value is: {__value}'
|
46
46
|
},
|
47
47
|
{
|
48
48
|
name: :parse,
|
49
|
-
type: 'deserialize/json'
|
49
|
+
type: 'b/deserialize/json'
|
50
50
|
},
|
51
51
|
{
|
52
52
|
name: :convert,
|
53
|
-
type: 'serialize/yaml'
|
53
|
+
type: 'b/serialize/yaml'
|
54
54
|
},
|
55
55
|
{
|
56
56
|
name: :write,
|
57
|
-
type: 'io/write',
|
57
|
+
type: 'b/io/write',
|
58
58
|
path: '{output_file}'
|
59
59
|
}
|
60
60
|
],
|
@@ -172,25 +172,25 @@ Write the following json_to_yaml_pipeline.yaml file to disk:
|
|
172
172
|
````yaml
|
173
173
|
jobs:
|
174
174
|
- name: read
|
175
|
-
type: io/read
|
175
|
+
type: b/io/read
|
176
176
|
path: '{input_file}'
|
177
177
|
|
178
178
|
- name: output_id
|
179
|
-
type: echo
|
179
|
+
type: b/echo
|
180
180
|
message: 'The job id is: {__id}'
|
181
181
|
|
182
182
|
- name: output_value
|
183
|
-
type: echo
|
183
|
+
type: b/echo
|
184
184
|
message: 'The current value is: {__value}'
|
185
185
|
|
186
186
|
- name: parse
|
187
|
-
type: deserialize/json
|
187
|
+
type: b/deserialize/json
|
188
188
|
|
189
189
|
- name: convert
|
190
|
-
type: serialize/yaml
|
190
|
+
type: b/serialize/yaml
|
191
191
|
|
192
192
|
- name: write
|
193
|
-
type: io/write
|
193
|
+
type: b/io/write
|
194
194
|
path: '{output_file}'
|
195
195
|
|
196
196
|
steps:
|
@@ -233,39 +233,44 @@ This library only ships with very basic, rudimentary jobs that are meant to just
|
|
233
233
|
|
234
234
|
#### Collection
|
235
235
|
|
236
|
-
* **collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
|
237
|
-
* **collection/
|
238
|
-
* **collection/
|
239
|
-
* **collection/
|
240
|
-
* **collection/
|
241
|
-
* **collection/
|
242
|
-
* **collection/
|
243
|
-
* **collection/
|
236
|
+
* **b/collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
|
237
|
+
* **b/collection/concatenate** [from_registers, to_register]: Concatenate each from_register's value and place the newly concatenated array into the to_register. Note: this does not do any deep copying and should be assumed it is shallow copying all objects.
|
238
|
+
* **b/collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
|
239
|
+
* **b/collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
|
240
|
+
* **b/collection/shift** [amount, register]: Remove the first N number of elements from an array.
|
241
|
+
* **b/collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
|
242
|
+
* **b/collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
243
|
+
* **b/collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
|
244
|
+
* **b/collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
|
244
245
|
|
245
246
|
#### De-serialization
|
246
247
|
|
247
|
-
* **deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
248
|
-
* **deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
|
249
|
-
* **deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
248
|
+
* **b/deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
249
|
+
* **b/deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
|
250
|
+
* **b/deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
250
251
|
|
251
252
|
#### IO
|
252
253
|
|
253
|
-
* **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
254
|
-
* **io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
255
|
-
* **io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
254
|
+
* **b/io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
255
|
+
* **b/io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
256
|
+
* **b/io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
256
257
|
|
257
258
|
#### Serialization
|
258
259
|
|
259
|
-
* **serialize/csv** [register]: Take an array of arrays and create a CSV.
|
260
|
-
* **serialize/json** [register]: Convert value to JSON.
|
261
|
-
* **serialize/yaml** [register]: Convert value to YAML.
|
260
|
+
* **b/serialize/csv** [register]: Take an array of arrays and create a CSV.
|
261
|
+
* **b/serialize/json** [register]: Convert value to JSON.
|
262
|
+
* **b/serialize/yaml** [register]: Convert value to YAML.
|
263
|
+
|
264
|
+
#### Value
|
265
|
+
|
266
|
+
* **b/value/copy** [from_register, to_register]: Copy from_register's value into the to_register. Note: this does not do any deep copying and should be assumed it is shallow copying all objects.
|
267
|
+
* **b/value/static** [register, value]: Set the value to any arbitrary value.
|
262
268
|
|
263
269
|
#### General
|
264
270
|
|
265
|
-
* **
|
266
|
-
* **
|
267
|
-
* **
|
268
|
-
* **sleep** [seconds]: Sleep the thread for X number of seconds.
|
271
|
+
* **b/echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
|
272
|
+
* **b/nothing** []: Do nothing.
|
273
|
+
* **b/sleep** [seconds]: Sleep the thread for X number of seconds.
|
269
274
|
|
270
275
|
Notes:
|
271
276
|
|
@@ -296,17 +301,17 @@ pipeline = {
|
|
296
301
|
jobs: [
|
297
302
|
{
|
298
303
|
name: :read,
|
299
|
-
type: 'io/read',
|
304
|
+
type: 'b/io/read',
|
300
305
|
path: '{input_file}'
|
301
306
|
},
|
302
307
|
{
|
303
308
|
name: :output_id,
|
304
|
-
type:
|
309
|
+
type: 'b/echo',
|
305
310
|
message: 'The job id is: {__id}'
|
306
311
|
},
|
307
312
|
{
|
308
313
|
name: :output_value,
|
309
|
-
type:
|
314
|
+
type: 'b/echo',
|
310
315
|
message: 'The current value is: {__value}'
|
311
316
|
},
|
312
317
|
{
|
@@ -315,11 +320,11 @@ pipeline = {
|
|
315
320
|
},
|
316
321
|
{
|
317
322
|
name: :convert,
|
318
|
-
type: 'serialize/yaml'
|
323
|
+
type: 'b/serialize/yaml'
|
319
324
|
},
|
320
325
|
{
|
321
326
|
name: :write,
|
322
|
-
type: 'io/write',
|
327
|
+
type: 'b/io/write',
|
323
328
|
path: '{output_file}'
|
324
329
|
}
|
325
330
|
],
|
data/burner.gemspec
CHANGED
@@ -8,7 +8,7 @@ Gem::Specification.new do |s|
|
|
8
8
|
s.summary = 'Declarative and extendable processing pipeline'
|
9
9
|
|
10
10
|
s.description = <<-DESCRIPTION
|
11
|
-
This library serves as the
|
11
|
+
This library serves as the backbone for a configurable processing engine. It allows you to organize your code into jobs, then stitch those jobs together as steps.
|
12
12
|
DESCRIPTION
|
13
13
|
|
14
14
|
s.authors = ['Matthew Ruggio']
|
data/lib/burner/job.rb
CHANGED
@@ -31,10 +31,11 @@ module Burner
|
|
31
31
|
# The #perform method takes in two arguments: output (an instance of Burner::Output)
|
32
32
|
# and payload (an instance of Burner::Payload). Jobs can leverage output to emit
|
33
33
|
# information to the pipeline's log(s). The payload is utilized to pass data from job to job,
|
34
|
-
# with its most important attribute being #
|
35
|
-
# per the individual job's context
|
36
|
-
#
|
37
|
-
#
|
34
|
+
# with its most important attribute being #registers. The registers attribute is a mutable
|
35
|
+
# and accessible hash per the individual job's context
|
36
|
+
# (meaning of it is unknown without understanding a job's input and output value
|
37
|
+
# of #registers.). Therefore #register key values can mean anything
|
38
|
+
# and it is up to consumers to clearly document the assumptions of its use.
|
38
39
|
#
|
39
40
|
# Returning false will short-circuit the pipeline right after the job method exits.
|
40
41
|
# Returning anything else besides false just means "continue".
|
@@ -47,9 +48,15 @@ module Burner
|
|
47
48
|
protected
|
48
49
|
|
49
50
|
def job_string_template(expression, output, payload)
|
50
|
-
templatable_params = payload.params
|
51
|
+
templatable_params = payload.params
|
52
|
+
.merge(__id: output.id)
|
53
|
+
.merge(templatable_register_values(payload))
|
51
54
|
|
52
55
|
Util::StringTemplate.instance.evaluate(expression, templatable_params)
|
53
56
|
end
|
57
|
+
|
58
|
+
def templatable_register_values(payload)
|
59
|
+
payload.registers.transform_keys { |key| "__#{key}_register" }
|
60
|
+
end
|
54
61
|
end
|
55
62
|
end
|
data/lib/burner/jobs.rb
CHANGED
@@ -16,25 +16,35 @@ module Burner
|
|
16
16
|
class Jobs
|
17
17
|
acts_as_hashable_factory
|
18
18
|
|
19
|
-
|
20
|
-
|
21
|
-
register '
|
22
|
-
register '
|
23
|
-
register '
|
24
|
-
|
25
|
-
register 'collection/
|
26
|
-
register '
|
27
|
-
register '
|
28
|
-
register '
|
29
|
-
register '
|
30
|
-
register '
|
31
|
-
register '
|
32
|
-
register '
|
33
|
-
register '
|
34
|
-
|
35
|
-
register '
|
36
|
-
register '
|
37
|
-
register '
|
38
|
-
|
19
|
+
# Nothing is the default as noted by the ''. This means if a type is omitted, nil, or blank
|
20
|
+
# string then the nothing job will be used.
|
21
|
+
register 'b/echo', Library::Echo
|
22
|
+
register 'b/nothing', '', Library::Nothing
|
23
|
+
register 'b/sleep', Library::Sleep
|
24
|
+
|
25
|
+
register 'b/collection/arrays_to_objects', Library::Collection::ArraysToObjects
|
26
|
+
register 'b/collection/concatenate', Library::Collection::Concatenate
|
27
|
+
register 'b/collection/graph', Library::Collection::Graph
|
28
|
+
register 'b/collection/objects_to_arrays', Library::Collection::ObjectsToArrays
|
29
|
+
register 'b/collection/shift', Library::Collection::Shift
|
30
|
+
register 'b/collection/transform', Library::Collection::Transform
|
31
|
+
register 'b/collection/unpivot', Library::Collection::Unpivot
|
32
|
+
register 'b/collection/values', Library::Collection::Values
|
33
|
+
register 'b/collection/validate', Library::Collection::Validate
|
34
|
+
|
35
|
+
register 'b/deserialize/csv', Library::Deserialize::Csv
|
36
|
+
register 'b/deserialize/json', Library::Deserialize::Json
|
37
|
+
register 'b/deserialize/yaml', Library::Deserialize::Yaml
|
38
|
+
|
39
|
+
register 'b/io/exist', Library::IO::Exist
|
40
|
+
register 'b/io/read', Library::IO::Read
|
41
|
+
register 'b/io/write', Library::IO::Write
|
42
|
+
|
43
|
+
register 'b/serialize/csv', Library::Serialize::Csv
|
44
|
+
register 'b/serialize/json', Library::Serialize::Json
|
45
|
+
register 'b/serialize/yaml', Library::Serialize::Yaml
|
46
|
+
|
47
|
+
register 'b/value/copy', Library::Value::Copy
|
48
|
+
register 'b/value/static', Library::Value::Static
|
39
49
|
end
|
40
50
|
end
|
data/lib/burner/library.rb
CHANGED
@@ -9,7 +9,12 @@
|
|
9
9
|
|
10
10
|
require_relative 'job_with_register'
|
11
11
|
|
12
|
+
require_relative 'library/echo'
|
13
|
+
require_relative 'library/nothing'
|
14
|
+
require_relative 'library/sleep'
|
15
|
+
|
12
16
|
require_relative 'library/collection/arrays_to_objects'
|
17
|
+
require_relative 'library/collection/concatenate'
|
13
18
|
require_relative 'library/collection/graph'
|
14
19
|
require_relative 'library/collection/objects_to_arrays'
|
15
20
|
require_relative 'library/collection/shift'
|
@@ -17,16 +22,18 @@ require_relative 'library/collection/transform'
|
|
17
22
|
require_relative 'library/collection/unpivot'
|
18
23
|
require_relative 'library/collection/validate'
|
19
24
|
require_relative 'library/collection/values'
|
25
|
+
|
20
26
|
require_relative 'library/deserialize/csv'
|
21
27
|
require_relative 'library/deserialize/json'
|
22
28
|
require_relative 'library/deserialize/yaml'
|
23
|
-
|
24
|
-
require_relative 'library/echo'
|
29
|
+
|
25
30
|
require_relative 'library/io/exist'
|
26
31
|
require_relative 'library/io/read'
|
27
32
|
require_relative 'library/io/write'
|
33
|
+
|
28
34
|
require_relative 'library/serialize/csv'
|
29
35
|
require_relative 'library/serialize/json'
|
30
36
|
require_relative 'library/serialize/yaml'
|
31
|
-
|
32
|
-
require_relative 'library/
|
37
|
+
|
38
|
+
require_relative 'library/value/copy'
|
39
|
+
require_relative 'library/value/static'
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
# Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
|
15
15
|
# the index-to-key mappings to use.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: array of arrays.
|
18
|
+
# Payload[register] output: An array of hashes.
|
19
19
|
#
|
20
20
|
# An example using a configuration-first pipeline:
|
21
21
|
#
|
@@ -23,14 +23,14 @@ module Burner
|
|
23
23
|
# jobs: [
|
24
24
|
# {
|
25
25
|
# name: 'set',
|
26
|
-
# type: '
|
26
|
+
# type: 'b/value/static',
|
27
27
|
# value: [
|
28
28
|
# [1, 'funky']
|
29
29
|
# ]
|
30
30
|
# },
|
31
31
|
# {
|
32
32
|
# name: 'map',
|
33
|
-
# type: 'collection/arrays_to_objects',
|
33
|
+
# type: 'b/collection/arrays_to_objects',
|
34
34
|
# mappings: [
|
35
35
|
# { index: 0, key: 'id' },
|
36
36
|
# { index: 1, key: 'name' }
|
@@ -38,7 +38,7 @@ module Burner
|
|
38
38
|
# },
|
39
39
|
# {
|
40
40
|
# name: 'output',
|
41
|
-
# type: 'echo',
|
41
|
+
# type: 'b/echo',
|
42
42
|
# message: 'value is currently: {__value}'
|
43
43
|
# },
|
44
44
|
#
|
@@ -47,6 +47,11 @@ module Burner
|
|
47
47
|
# }
|
48
48
|
#
|
49
49
|
# Burner::Pipeline.make(config).execute
|
50
|
+
#
|
51
|
+
# Given the above example, the expected output would be:
|
52
|
+
# [
|
53
|
+
# { 'id' => 1, 'name' => 'funky' }
|
54
|
+
# ]
|
50
55
|
class ArraysToObjects < JobWithRegister
|
51
56
|
attr_reader :mappings
|
52
57
|
|
@@ -0,0 +1,42 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Collection
|
13
|
+
# Take the list of from_registers and concatenate each of their values together.
|
14
|
+
# Each from_value will be coerced into an array if it not an array.
|
15
|
+
#
|
16
|
+
# Expected Payload[from_register] input: array of objects.
|
17
|
+
# Payload[to_register] output: An array of objects.
|
18
|
+
class Concatenate < Job
|
19
|
+
attr_reader :from_registers, :to_register
|
20
|
+
|
21
|
+
def initialize(name:, from_registers: [], to_register: '')
|
22
|
+
super(name: name)
|
23
|
+
|
24
|
+
@from_registers = Array(from_registers)
|
25
|
+
@to_register = to_register.to_s
|
26
|
+
|
27
|
+
freeze
|
28
|
+
end
|
29
|
+
|
30
|
+
def perform(output, payload)
|
31
|
+
output.detail("Concatenating registers: '#{from_registers}' to: '#{to_register}'")
|
32
|
+
|
33
|
+
payload[to_register] = from_registers.each_with_object([]) do |from_register, memo|
|
34
|
+
from_register_value = array(payload[from_register])
|
35
|
+
|
36
|
+
memo.concat(from_register_value)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -13,8 +13,8 @@ module Burner
|
|
13
13
|
# Take an array of (denormalized) objects and create an object hierarchy from them.
|
14
14
|
# Under the hood it uses Hashematics: https://github.com/bluemarblepayroll/hashematics.
|
15
15
|
#
|
16
|
-
# Expected Payload
|
17
|
-
# Payload
|
16
|
+
# Expected Payload[register] input: array of objects.
|
17
|
+
# Payload[register] output: An array of objects.
|
18
18
|
class Graph < JobWithRegister
|
19
19
|
attr_reader :key, :groups
|
20
20
|
|
@@ -15,39 +15,41 @@ module Burner
|
|
15
15
|
# Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
|
16
16
|
# the key-to-index mappings to use.
|
17
17
|
#
|
18
|
-
# Expected Payload
|
19
|
-
# Payload
|
18
|
+
# Expected Payload[register] input: array of hashes.
|
19
|
+
# Payload[register] output: An array of arrays.
|
20
20
|
#
|
21
21
|
# An example using a configuration-first pipeline:
|
22
22
|
#
|
23
|
-
#
|
24
|
-
#
|
25
|
-
#
|
26
|
-
#
|
27
|
-
#
|
28
|
-
#
|
29
|
-
#
|
30
|
-
#
|
31
|
-
#
|
32
|
-
#
|
33
|
-
#
|
34
|
-
#
|
35
|
-
#
|
36
|
-
#
|
37
|
-
#
|
38
|
-
#
|
39
|
-
#
|
40
|
-
#
|
41
|
-
#
|
42
|
-
#
|
43
|
-
#
|
44
|
-
#
|
23
|
+
# config = {
|
24
|
+
# jobs: [
|
25
|
+
# {
|
26
|
+
# name: 'set',
|
27
|
+
# type: 'b/value/static',
|
28
|
+
# value: [
|
29
|
+
# { 'id' => 1, 'name' => 'funky' }
|
30
|
+
# ],
|
31
|
+
# register: register
|
32
|
+
# },
|
33
|
+
# {
|
34
|
+
# name: 'map',
|
35
|
+
# type: 'b/collection/objects_to_arrays',
|
36
|
+
# mappings: [
|
37
|
+
# { index: 0, key: 'id' },
|
38
|
+
# { index: 1, key: 'name' }
|
39
|
+
# ],
|
40
|
+
# register: register
|
41
|
+
# },
|
42
|
+
# {
|
43
|
+
# name: 'output',
|
44
|
+
# type: 'b/echo',
|
45
|
+
# message: 'value is currently: {__value}'
|
46
|
+
# },
|
45
47
|
#
|
46
|
-
#
|
47
|
-
#
|
48
|
-
#
|
48
|
+
# ],
|
49
|
+
# steps: %w[set map output]
|
50
|
+
# }
|
49
51
|
#
|
50
|
-
#
|
52
|
+
# Burner::Pipeline.make(config).execute
|
51
53
|
class ObjectsToArrays < JobWithRegister
|
52
54
|
attr_reader :mappings
|
53
55
|
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
# attribute. The initial use case for this was to remove "header" rows from arrays,
|
15
15
|
# like you would expect when parsing CSV files.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: nothing.
|
18
|
+
# Payload[register] output: An array with N beginning elements removed.
|
19
19
|
class Shift < JobWithRegister
|
20
20
|
DEFAULT_AMOUNT = 0
|
21
21
|
|
@@ -18,8 +18,8 @@ module Burner
|
|
18
18
|
# For more information on the specific contract for attributes, see the
|
19
19
|
# Burner::Modeling::Attribute class.
|
20
20
|
#
|
21
|
-
# Expected Payload
|
22
|
-
# Payload
|
21
|
+
# Expected Payload[register] input: array of objects.
|
22
|
+
# Payload[register] output: An array of objects.
|
23
23
|
class Transform < JobWithRegister
|
24
24
|
BLANK = ''
|
25
25
|
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
# Under the hood it uses HashMath's Unpivot class:
|
15
15
|
# https://github.com/bluemarblepayroll/hash_math
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: array of objects.
|
18
|
+
# Payload[register] output: An array of objects.
|
19
19
|
class Unpivot < JobWithRegister
|
20
20
|
attr_reader :unpivot
|
21
21
|
|
@@ -14,8 +14,9 @@ module Burner
|
|
14
14
|
# of validations. The main register will include the valid objects and the invalid_register
|
15
15
|
# will contain the invalid objects.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: array of objects.
|
18
|
+
# Payload[register] output: An array of objects that are valid.
|
19
|
+
# Payload[invalid_register] output: An array of objects that are invalid.
|
19
20
|
class Validate < JobWithRegister
|
20
21
|
DEFAULT_INVALID_REGISTER = 'invalid'
|
21
22
|
DEFAULT_JOIN_CHAR = ', '
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
# If include_keys is true (it is false by default), then call #keys on the first
|
15
15
|
# object and inject that as a "header" object.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: array of objects.
|
18
|
+
# Payload[register] output: An array of arrays.
|
19
19
|
class Values < JobWithRegister
|
20
20
|
attr_reader :include_keys
|
21
21
|
|
@@ -12,8 +12,8 @@ module Burner
|
|
12
12
|
module Deserialize
|
13
13
|
# Take a CSV string and de-serialize into object(s).
|
14
14
|
#
|
15
|
-
# Expected Payload
|
16
|
-
# Payload
|
15
|
+
# Expected Payload[register] input: nothing.
|
16
|
+
# Payload[register] output: an array of arrays. Each inner array represents one data row.
|
17
17
|
class Csv < JobWithRegister
|
18
18
|
# This currently only supports returning an array of arrays, including the header row.
|
19
19
|
# In the future this could be extended to offer more customizable options, such as
|
@@ -12,8 +12,8 @@ module Burner
|
|
12
12
|
module Deserialize
|
13
13
|
# Take a JSON string and deserialize into object(s).
|
14
14
|
#
|
15
|
-
# Expected Payload
|
16
|
-
# Payload
|
15
|
+
# Expected Payload[register] input: string of JSON data.
|
16
|
+
# Payload[register] output: anything, as specified by the JSON de-serializer.
|
17
17
|
class Json < JobWithRegister
|
18
18
|
def perform(_output, payload)
|
19
19
|
payload[register] = JSON.parse(payload[register])
|
@@ -15,8 +15,8 @@ module Burner
|
|
15
15
|
# YAML. If you wish to ease this restriction, for example if you have custom serialization
|
16
16
|
# for custom classes, then you can pass in safe: false.
|
17
17
|
#
|
18
|
-
# Expected Payload
|
19
|
-
# Payload
|
18
|
+
# Expected Payload[register] input: string of YAML data.
|
19
|
+
# Payload[register]output: anything as specified by the YAML de-serializer.
|
20
20
|
class Yaml < JobWithRegister
|
21
21
|
attr_reader :safe
|
22
22
|
|
data/lib/burner/library/echo.rb
CHANGED
@@ -15,7 +15,7 @@ module Burner
|
|
15
15
|
# Check to see if a file exists. If short_circuit is set to true and the file
|
16
16
|
# does not exist then the job will return false and short circuit the pipeline.
|
17
17
|
#
|
18
|
-
# Note: this does not use Payload#
|
18
|
+
# Note: this does not use Payload#registers.
|
19
19
|
class Exist < Job
|
20
20
|
attr_reader :path, :short_circuit
|
21
21
|
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
module IO
|
15
15
|
# Read value from disk.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: nothing.
|
18
|
+
# Payload[register] output: contents of the specified file.
|
19
19
|
class Read < Base
|
20
20
|
attr_reader :binary
|
21
21
|
|
@@ -14,8 +14,8 @@ module Burner
|
|
14
14
|
module IO
|
15
15
|
# Write value to disk.
|
16
16
|
#
|
17
|
-
# Expected Payload
|
18
|
-
# Payload
|
17
|
+
# Expected Payload[register] input: anything.
|
18
|
+
# Payload[register] output: whatever was passed in.
|
19
19
|
class Write < Base
|
20
20
|
attr_reader :binary
|
21
21
|
|
@@ -12,8 +12,8 @@ module Burner
|
|
12
12
|
module Serialize
|
13
13
|
# Take an array of arrays and create a CSV.
|
14
14
|
#
|
15
|
-
# Expected Payload
|
16
|
-
# Payload
|
15
|
+
# Expected Payload[register] input: array of arrays.
|
16
|
+
# Payload[register] output: a serialized CSV string.
|
17
17
|
class Csv < JobWithRegister
|
18
18
|
def perform(_output, payload)
|
19
19
|
payload[register] = CSV.generate(options) do |csv|
|
@@ -12,8 +12,8 @@ module Burner
|
|
12
12
|
module Serialize
|
13
13
|
# Treat value like a Ruby object and serialize it using JSON.
|
14
14
|
#
|
15
|
-
# Expected Payload
|
16
|
-
# Payload
|
15
|
+
# Expected Payload[register] input: anything.
|
16
|
+
# Payload[register] output: string representing the output of the JSON serializer.
|
17
17
|
class Json < JobWithRegister
|
18
18
|
def perform(_output, payload)
|
19
19
|
payload[register] = payload[register].to_json
|
@@ -12,8 +12,8 @@ module Burner
|
|
12
12
|
module Serialize
|
13
13
|
# Treat value like a Ruby object and serialize it using YAML.
|
14
14
|
#
|
15
|
-
# Expected Payload
|
16
|
-
# Payload
|
15
|
+
# Expected Payload[register] input: anything.
|
16
|
+
# Payload[register] output: string representing the output of the YAML serializer.
|
17
17
|
class Yaml < JobWithRegister
|
18
18
|
def perform(_output, payload)
|
19
19
|
payload[register] = payload[register].to_yaml
|
data/lib/burner/library/sleep.rb
CHANGED
@@ -11,7 +11,7 @@ module Burner
|
|
11
11
|
module Library
|
12
12
|
# Arbitrarily put thread to sleep for X number of seconds
|
13
13
|
#
|
14
|
-
#
|
14
|
+
# Note: this does not use Payload#registers.
|
15
15
|
class Sleep < Job
|
16
16
|
attr_reader :seconds
|
17
17
|
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Value
|
13
|
+
# Copy one value in a register to another. Note that this does *not* perform any type of
|
14
|
+
# deep copy, it simply points one register's value to another. If you decide to later mutate
|
15
|
+
# one register, you are *not* guaranteed to not mutate the other.
|
16
|
+
#
|
17
|
+
# Expected Payload[from_register] input: anything.
|
18
|
+
# Payload[to_register] output: whatever value was specified in the from_register.
|
19
|
+
class Copy < Job
|
20
|
+
attr_reader :from_register, :to_register
|
21
|
+
|
22
|
+
def initialize(name:, to_register: '', from_register: '')
|
23
|
+
super(name: name)
|
24
|
+
|
25
|
+
@from_register = from_register.to_s
|
26
|
+
@to_register = to_register.to_s
|
27
|
+
|
28
|
+
freeze
|
29
|
+
end
|
30
|
+
|
31
|
+
def perform(output, payload)
|
32
|
+
output.detail("Copying register: '#{from_register}' to: '#{to_register}'")
|
33
|
+
|
34
|
+
payload[to_register] = payload[from_register]
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Value
|
13
|
+
# Arbitrarily set the value of a register.
|
14
|
+
#
|
15
|
+
# Expected Payload[register] input: anything.
|
16
|
+
# Payload[register] output: whatever value was specified in this job.
|
17
|
+
class Static < JobWithRegister
|
18
|
+
attr_reader :value
|
19
|
+
|
20
|
+
def initialize(name:, register: '', value: nil)
|
21
|
+
super(name: name, register: register)
|
22
|
+
|
23
|
+
@value = value
|
24
|
+
|
25
|
+
freeze
|
26
|
+
end
|
27
|
+
|
28
|
+
def perform(_output, payload)
|
29
|
+
payload[register] = value
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -10,7 +10,9 @@
|
|
10
10
|
module Burner
|
11
11
|
module Modeling
|
12
12
|
# Defines a top-level key and the associated transformers for deriving the final value
|
13
|
-
# to set the key to.
|
13
|
+
# to set the key to. The transformers that can be passed in can be any Realize::Transformers
|
14
|
+
# subclasses. For more information, see the Realize library at:
|
15
|
+
# https://github.com/bluemarblepayroll/realize
|
14
16
|
class Attribute
|
15
17
|
acts_as_hashable
|
16
18
|
|
data/lib/burner/payload.rb
CHANGED
@@ -8,16 +8,22 @@
|
|
8
8
|
#
|
9
9
|
|
10
10
|
module Burner
|
11
|
-
# The input for all Job#perform methods. The main notion of this object is its
|
12
|
-
# attribute. This
|
13
|
-
#
|
14
|
-
#
|
15
|
-
#
|
11
|
+
# The input for all Job#perform methods. The main notion of this object is its 'registers'
|
12
|
+
# attribute. This registers attribute is a key-indifferent hash, accessible on Payload using
|
13
|
+
# the brackets setter and getter methods. This is dynamic and weak on purpose and is subject
|
14
|
+
# to whatever the Job#perform methods decides it is. This definitely adds an order-of-magnitude
|
15
|
+
# complexity to this whole library and lifecycle, but I am not sure there is any other way
|
16
|
+
# around it: trying to build a generic, open-ended processing pipeline to serve almost
|
17
|
+
# any use case.
|
16
18
|
#
|
17
19
|
# The side_effects attribute can also be utilized as a way for jobs to emit any data in a more
|
18
20
|
# structured/additive manner. The initial use case for this was for Burner's core IO jobs to
|
19
21
|
# report back the files it has written in a more structured data way (as opposed to simply
|
20
22
|
# writing some information to the output.)
|
23
|
+
#
|
24
|
+
# The 'time' attribute is important in that it should for the replaying of pipelines and jobs.
|
25
|
+
# Instead of having job's utilizing Time.now, Date.today, etc... they should rather opt to
|
26
|
+
# use this value instead.
|
21
27
|
class Payload
|
22
28
|
attr_reader :params,
|
23
29
|
:registers,
|
data/lib/burner/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: burner
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.0.pre.alpha.
|
4
|
+
version: 1.0.0.pre.alpha.13
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Matthew Ruggio
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2020-10-
|
11
|
+
date: 2020-10-28 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: acts_as_hashable
|
@@ -192,8 +192,9 @@ dependencies:
|
|
192
192
|
- - "~>"
|
193
193
|
- !ruby/object:Gem::Version
|
194
194
|
version: 0.7.0
|
195
|
-
description: " This library serves as the
|
196
|
-
allows you to organize your code into
|
195
|
+
description: " This library serves as the backbone for a configurable processing
|
196
|
+
engine. It allows you to organize your code into jobs, then stitch those jobs together
|
197
|
+
as steps.\n"
|
197
198
|
email:
|
198
199
|
- mruggio@bluemarblepayroll.com
|
199
200
|
executables:
|
@@ -224,6 +225,7 @@ files:
|
|
224
225
|
- lib/burner/jobs.rb
|
225
226
|
- lib/burner/library.rb
|
226
227
|
- lib/burner/library/collection/arrays_to_objects.rb
|
228
|
+
- lib/burner/library/collection/concatenate.rb
|
227
229
|
- lib/burner/library/collection/graph.rb
|
228
230
|
- lib/burner/library/collection/objects_to_arrays.rb
|
229
231
|
- lib/burner/library/collection/shift.rb
|
@@ -234,17 +236,18 @@ files:
|
|
234
236
|
- lib/burner/library/deserialize/csv.rb
|
235
237
|
- lib/burner/library/deserialize/json.rb
|
236
238
|
- lib/burner/library/deserialize/yaml.rb
|
237
|
-
- lib/burner/library/dummy.rb
|
238
239
|
- lib/burner/library/echo.rb
|
239
240
|
- lib/burner/library/io/base.rb
|
240
241
|
- lib/burner/library/io/exist.rb
|
241
242
|
- lib/burner/library/io/read.rb
|
242
243
|
- lib/burner/library/io/write.rb
|
244
|
+
- lib/burner/library/nothing.rb
|
243
245
|
- lib/burner/library/serialize/csv.rb
|
244
246
|
- lib/burner/library/serialize/json.rb
|
245
247
|
- lib/burner/library/serialize/yaml.rb
|
246
|
-
- lib/burner/library/set_value.rb
|
247
248
|
- lib/burner/library/sleep.rb
|
249
|
+
- lib/burner/library/value/copy.rb
|
250
|
+
- lib/burner/library/value/static.rb
|
248
251
|
- lib/burner/modeling.rb
|
249
252
|
- lib/burner/modeling/attribute.rb
|
250
253
|
- lib/burner/modeling/attribute_renderer.rb
|
@@ -1,32 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
#
|
4
|
-
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
-
#
|
6
|
-
# This source code is licensed under the MIT license found in the
|
7
|
-
# LICENSE file in the root directory of this source tree.
|
8
|
-
#
|
9
|
-
|
10
|
-
module Burner
|
11
|
-
module Library
|
12
|
-
# Arbitrarily set value
|
13
|
-
#
|
14
|
-
# Expected Payload#value input: anything.
|
15
|
-
# Payload#value output: whatever value was specified in this job.
|
16
|
-
class SetValue < JobWithRegister
|
17
|
-
attr_reader :value
|
18
|
-
|
19
|
-
def initialize(name:, register: '', value: nil)
|
20
|
-
super(name: name, register: register)
|
21
|
-
|
22
|
-
@value = value
|
23
|
-
|
24
|
-
freeze
|
25
|
-
end
|
26
|
-
|
27
|
-
def perform(_output, payload)
|
28
|
-
payload[register] = value
|
29
|
-
end
|
30
|
-
end
|
31
|
-
end
|
32
|
-
end
|