burner 1.0.0.pre.alpha.4 → 1.0.0.pre.alpha.9
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +2 -0
- data/README.md +52 -47
- data/burner.gemspec +1 -1
- data/exe/burner +2 -3
- data/lib/burner.rb +2 -0
- data/lib/burner/cli.rb +2 -0
- data/lib/burner/job.rb +27 -9
- data/lib/burner/job_with_register.rb +24 -0
- data/lib/burner/jobs.rb +28 -39
- data/lib/burner/library.rb +32 -0
- data/lib/burner/library/collection/arrays_to_objects.rb +75 -0
- data/lib/burner/{jobs → library}/collection/graph.rb +7 -8
- data/lib/burner/library/collection/objects_to_arrays.rb +88 -0
- data/lib/burner/{jobs → library}/collection/shift.rb +8 -9
- data/lib/burner/{jobs → library}/collection/transform.rb +17 -15
- data/lib/burner/{jobs → library}/collection/unpivot.rb +17 -9
- data/lib/burner/library/collection/validate.rb +89 -0
- data/lib/burner/library/collection/values.rb +49 -0
- data/lib/burner/{jobs → library}/deserialize/csv.rb +4 -5
- data/lib/burner/{jobs → library}/deserialize/json.rb +6 -5
- data/lib/burner/{jobs → library}/deserialize/yaml.rb +13 -7
- data/lib/burner/{jobs → library}/dummy.rb +4 -4
- data/lib/burner/{jobs → library}/echo.rb +3 -3
- data/lib/burner/{jobs → library}/io/base.rb +4 -4
- data/lib/burner/{jobs → library}/io/exist.rb +11 -9
- data/lib/burner/{jobs → library}/io/read.rb +7 -6
- data/lib/burner/{jobs → library}/io/write.rb +9 -6
- data/lib/burner/{jobs → library}/serialize/csv.rb +5 -6
- data/lib/burner/{jobs → library}/serialize/json.rb +6 -5
- data/lib/burner/{jobs → library}/serialize/yaml.rb +6 -5
- data/lib/burner/{jobs/set.rb → library/set_value.rb} +8 -7
- data/lib/burner/{jobs → library}/sleep.rb +3 -3
- data/lib/burner/modeling.rb +3 -0
- data/lib/burner/modeling/attribute.rb +29 -0
- data/lib/burner/modeling/attribute_renderer.rb +32 -0
- data/lib/burner/modeling/validations.rb +23 -0
- data/lib/burner/modeling/validations/base.rb +35 -0
- data/lib/burner/modeling/validations/blank.rb +31 -0
- data/lib/burner/modeling/validations/present.rb +31 -0
- data/lib/burner/payload.rb +52 -15
- data/lib/burner/pipeline.rb +23 -4
- data/lib/burner/side_effects.rb +10 -0
- data/lib/burner/side_effects/written_file.rb +28 -0
- data/lib/burner/step.rb +1 -5
- data/lib/burner/util.rb +11 -0
- data/lib/burner/util/arrayable.rb +30 -0
- data/lib/burner/util/string_template.rb +42 -0
- data/lib/burner/version.rb +1 -1
- metadata +40 -28
- data/lib/burner/jobs/collection/arrays_to_objects.rb +0 -43
- data/lib/burner/jobs/collection/objects_to_arrays.rb +0 -54
- data/lib/burner/jobs/collection/transform/attribute.rb +0 -33
- data/lib/burner/jobs/collection/transform/attribute_renderer.rb +0 -36
- data/lib/burner/string_template.rb +0 -40
- data/lib/burner/written_file.rb +0 -28
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a2af01edaf5ab67512cb8d3db4dbf3bba75a6272af78d78d43f3ceaec0ead386
|
4
|
+
data.tar.gz: 812095ecf0b4240f61128f1968380f35e6748a07486f268da358ab56425de013
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 68fcfbf7d2b600889e805691b4e8289c82eb6366391c533333d8873b1521e41d342af00bbf68b01caad954fd8eb136016906b484727020af9ff5716bcfed0822
|
7
|
+
data.tar.gz: 33dc2ca22f71d2e6f54858bfa90e9f5c6bbbff969409fb1d6a8c05b459c105c77ff7e354780a93daca8241edd31fbb03c424091c3557bfb4facf8b43b9fe6264
|
data/.rubocop.yml
CHANGED
data/README.md
CHANGED
@@ -31,30 +31,30 @@ pipeline = {
|
|
31
31
|
jobs: [
|
32
32
|
{
|
33
33
|
name: :read,
|
34
|
-
type: 'io/read',
|
34
|
+
type: 'b/io/read',
|
35
35
|
path: '{input_file}'
|
36
36
|
},
|
37
37
|
{
|
38
38
|
name: :output_id,
|
39
|
-
type:
|
39
|
+
type: 'b/echo',
|
40
40
|
message: 'The job id is: {__id}'
|
41
41
|
},
|
42
42
|
{
|
43
43
|
name: :output_value,
|
44
|
-
type:
|
44
|
+
type: 'b/echo',
|
45
45
|
message: 'The current value is: {__value}'
|
46
46
|
},
|
47
47
|
{
|
48
48
|
name: :parse,
|
49
|
-
type: 'deserialize/json'
|
49
|
+
type: 'b/deserialize/json'
|
50
50
|
},
|
51
51
|
{
|
52
52
|
name: :convert,
|
53
|
-
type: 'serialize/yaml'
|
53
|
+
type: 'b/serialize/yaml'
|
54
54
|
},
|
55
55
|
{
|
56
56
|
name: :write,
|
57
|
-
type: 'io/write',
|
57
|
+
type: 'b/io/write',
|
58
58
|
path: '{output_file}'
|
59
59
|
}
|
60
60
|
],
|
@@ -133,23 +133,23 @@ The value of `log` should now look similar to:
|
|
133
133
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - input_file: input.json
|
134
134
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - output_file: output.yaml
|
135
135
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
|
136
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::
|
136
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Library::IO::Read::read
|
137
137
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Reading: spec/fixtures/input.json
|
138
138
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
139
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::
|
139
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Library::Echo::output_id
|
140
140
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The job id is:
|
141
141
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
142
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::
|
142
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Library::Echo::output_value
|
143
143
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
|
144
144
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
145
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::
|
145
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Library::Deserialize::Json::parse
|
146
146
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
147
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::
|
147
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Library::Serialize::Yaml::convert
|
148
148
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
149
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::
|
149
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Library::Echo::output_value
|
150
150
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
|
151
151
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
152
|
-
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::
|
152
|
+
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Library::IO::Write::write
|
153
153
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Writing: output.yaml
|
154
154
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
|
155
155
|
[8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
|
@@ -163,7 +163,7 @@ Notes:
|
|
163
163
|
|
164
164
|
### Command Line Pipeline Processing
|
165
165
|
|
166
|
-
This library also ships with a built-in script `
|
166
|
+
This library also ships with a built-in script `burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
|
167
167
|
|
168
168
|
#### Create YAML Pipeline Configuration File
|
169
169
|
|
@@ -172,25 +172,25 @@ Write the following json_to_yaml_pipeline.yaml file to disk:
|
|
172
172
|
````yaml
|
173
173
|
jobs:
|
174
174
|
- name: read
|
175
|
-
type: io/read
|
175
|
+
type: b/io/read
|
176
176
|
path: '{input_file}'
|
177
177
|
|
178
178
|
- name: output_id
|
179
|
-
type: echo
|
179
|
+
type: b/echo
|
180
180
|
message: 'The job id is: {__id}'
|
181
181
|
|
182
182
|
- name: output_value
|
183
|
-
type: echo
|
183
|
+
type: b/echo
|
184
184
|
message: 'The current value is: {__value}'
|
185
185
|
|
186
186
|
- name: parse
|
187
|
-
type: deserialize/json
|
187
|
+
type: b/deserialize/json
|
188
188
|
|
189
189
|
- name: convert
|
190
|
-
type: serialize/yaml
|
190
|
+
type: b/serialize/yaml
|
191
191
|
|
192
192
|
- name: write
|
193
|
-
type: io/write
|
193
|
+
type: b/io/write
|
194
194
|
path: '{output_file}'
|
195
195
|
|
196
196
|
steps:
|
@@ -233,49 +233,54 @@ This library only ships with very basic, rudimentary jobs that are meant to just
|
|
233
233
|
|
234
234
|
#### Collection
|
235
235
|
|
236
|
-
* **collection/arrays_to_objects** [mappings]: Convert an array of arrays to an array of objects.
|
237
|
-
* **collection/graph** [config, key]: Use (
|
238
|
-
* **collection/objects_to_arrays** [mappings]: Convert an array of objects to an array of arrays.
|
239
|
-
* **collection/shift** [amount]: Remove the first N number of elements from an array.
|
240
|
-
* **collection/transform** [attributes, exclusive, separator]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses (
|
241
|
-
* **collection/unpivot** [pivot_set]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
236
|
+
* **b/collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
|
237
|
+
* **b/collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
|
238
|
+
* **b/collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
|
239
|
+
* **b/collection/shift** [amount, register]: Remove the first N number of elements from an array.
|
240
|
+
* **b/collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
|
241
|
+
* **b/collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
|
242
|
+
* **b/collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
|
243
|
+
* **b/collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
|
242
244
|
|
243
245
|
#### De-serialization
|
244
246
|
|
245
|
-
* **deserialize/csv** []: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
246
|
-
* **deserialize/json** []: Treat input as a string and de-serialize it to JSON.
|
247
|
-
* **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and
|
247
|
+
* **b/deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
|
248
|
+
* **b/deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
|
249
|
+
* **b/deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
|
248
250
|
|
249
251
|
#### IO
|
250
252
|
|
251
|
-
* **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
252
|
-
* **io/read** [binary, path]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
253
|
-
* **io/write** [binary, path]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
253
|
+
* **b/io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
|
254
|
+
* **b/io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
|
255
|
+
* **b/io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
|
254
256
|
|
255
257
|
#### Serialization
|
256
258
|
|
257
|
-
* **serialize/csv** []: Take an array of arrays and create a CSV.
|
258
|
-
* **serialize/json** []: Convert value to JSON.
|
259
|
-
* **serialize/yaml** []: Convert value to YAML.
|
259
|
+
* **b/serialize/csv** [register]: Take an array of arrays and create a CSV.
|
260
|
+
* **b/serialize/json** [register]: Convert value to JSON.
|
261
|
+
* **b/serialize/yaml** [register]: Convert value to YAML.
|
260
262
|
|
261
263
|
#### General
|
262
264
|
|
263
|
-
* **dummy** []: Do nothing
|
264
|
-
* **echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
|
265
|
-
* **set** [value]: Set the value to any arbitrary value.
|
266
|
-
* **sleep** [seconds]: Sleep the thread for X number of seconds.
|
265
|
+
* **b/dummy** []: Do nothing
|
266
|
+
* **b/echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
|
267
|
+
* **b/set** [register, value]: Set the value to any arbitrary value.
|
268
|
+
* **b/sleep** [seconds]: Sleep the thread for X number of seconds.
|
267
269
|
|
270
|
+
Notes:
|
271
|
+
|
272
|
+
* If you see that a job accepts a 'register' attribute/argument, that indicates a job will access and/or mutate the payload. The register indicates which part of the payload the job will interact with. This allows jobs to be placed into 'lanes'. If register is not specified, then the default register is used.
|
268
273
|
|
269
274
|
### Adding & Registering Jobs
|
270
275
|
|
271
|
-
Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with (
|
276
|
+
Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with [acts_as_hashable](https://github.com/bluemarblepayroll/acts_as_hashable)'s acts_as_hashable_factory directive.
|
272
277
|
|
273
278
|
Let's say we would like to register a job to parse a CSV:
|
274
279
|
|
275
280
|
````ruby
|
276
|
-
class ParseCsv < Burner::
|
281
|
+
class ParseCsv < Burner::JobWithRegister
|
277
282
|
def perform(output, payload)
|
278
|
-
payload
|
283
|
+
payload[register] = CSV.parse(payload[register], headers: true).map(&:to_h)
|
279
284
|
|
280
285
|
nil
|
281
286
|
end
|
@@ -291,17 +296,17 @@ pipeline = {
|
|
291
296
|
jobs: [
|
292
297
|
{
|
293
298
|
name: :read,
|
294
|
-
type: 'io/read',
|
299
|
+
type: 'b/io/read',
|
295
300
|
path: '{input_file}'
|
296
301
|
},
|
297
302
|
{
|
298
303
|
name: :output_id,
|
299
|
-
type:
|
304
|
+
type: 'b/echo',
|
300
305
|
message: 'The job id is: {__id}'
|
301
306
|
},
|
302
307
|
{
|
303
308
|
name: :output_value,
|
304
|
-
type:
|
309
|
+
type: 'b/echo',
|
305
310
|
message: 'The current value is: {__value}'
|
306
311
|
},
|
307
312
|
{
|
@@ -310,11 +315,11 @@ pipeline = {
|
|
310
315
|
},
|
311
316
|
{
|
312
317
|
name: :convert,
|
313
|
-
type: 'serialize/yaml'
|
318
|
+
type: 'b/serialize/yaml'
|
314
319
|
},
|
315
320
|
{
|
316
321
|
name: :write,
|
317
|
-
type: 'io/write',
|
322
|
+
type: 'b/io/write',
|
318
323
|
path: '{output_file}'
|
319
324
|
}
|
320
325
|
],
|
data/burner.gemspec
CHANGED
@@ -8,7 +8,7 @@ Gem::Specification.new do |s|
|
|
8
8
|
s.summary = 'Declarative and extendable processing pipeline'
|
9
9
|
|
10
10
|
s.description = <<-DESCRIPTION
|
11
|
-
This library serves as the
|
11
|
+
This library serves as the backbone for a configurable processing engine. It allows you to organize your code into jobs, then stitch those jobs together as steps.
|
12
12
|
DESCRIPTION
|
13
13
|
|
14
14
|
s.authors = ['Matthew Ruggio']
|
data/exe/burner
CHANGED
@@ -10,11 +10,10 @@
|
|
10
10
|
|
11
11
|
require 'bundler/setup'
|
12
12
|
require 'burner'
|
13
|
-
require 'pry'
|
14
13
|
|
15
14
|
if ARGV.empty?
|
16
|
-
|
17
|
-
exit
|
15
|
+
warn('Usage: ./exe/burner package.yaml key=value key=value ...')
|
16
|
+
exit 2 # Do not return 1, that is reserved for hard errors.
|
18
17
|
end
|
19
18
|
|
20
19
|
# This should return exit code of 1 if it raises any hard errors.
|
data/lib/burner.rb
CHANGED
data/lib/burner/cli.rb
CHANGED
data/lib/burner/job.rb
CHANGED
@@ -7,31 +7,49 @@
|
|
7
7
|
# LICENSE file in the root directory of this source tree.
|
8
8
|
#
|
9
9
|
|
10
|
-
require_relative 'string_template'
|
11
|
-
|
12
10
|
module Burner
|
13
11
|
# Abstract base class for all job subclasses. The only public method a subclass needs to
|
14
|
-
# implement #perform(
|
12
|
+
# implement #perform(output, payload) and then you can register it for use using
|
15
13
|
# the Burner::Jobs factory class method #register. An example of a registration:
|
16
14
|
# Burner::Jobs.register('your_class', YourClass)
|
17
15
|
class Job
|
16
|
+
include Util::Arrayable
|
18
17
|
acts_as_hashable
|
19
18
|
|
20
|
-
attr_reader :name
|
19
|
+
attr_reader :name
|
21
20
|
|
22
21
|
def initialize(name:)
|
23
22
|
raise ArgumentError, 'name is required' if name.to_s.empty?
|
24
23
|
|
25
|
-
@name
|
26
|
-
|
24
|
+
@name = name.to_s
|
25
|
+
end
|
26
|
+
|
27
|
+
# There are only a few requirements to be considered a valid Burner Job:
|
28
|
+
# 1. The class responds to #name
|
29
|
+
# 2. The class responds to #perform(output, payload)
|
30
|
+
#
|
31
|
+
# The #perform method takes in two arguments: output (an instance of Burner::Output)
|
32
|
+
# and payload (an instance of Burner::Payload). Jobs can leverage output to emit
|
33
|
+
# information to the pipeline's log(s). The payload is utilized to pass data from job to job,
|
34
|
+
# with its most important attribute being #value. The value attribute is mutable
|
35
|
+
# per the individual job's context (meaning of it is unknown without understanding a job's
|
36
|
+
# input and output value of #value.). Therefore #value can mean anything and it is up to the
|
37
|
+
# engineers to clearly document the assumptions of its use.
|
38
|
+
#
|
39
|
+
# Returning false will short-circuit the pipeline right after the job method exits.
|
40
|
+
# Returning anything else besides false just means "continue".
|
41
|
+
def perform(output, _payload)
|
42
|
+
output.detail("#perform not implemented for: #{self.class.name}")
|
43
|
+
|
44
|
+
nil
|
27
45
|
end
|
28
46
|
|
29
|
-
|
47
|
+
protected
|
30
48
|
|
31
49
|
def job_string_template(expression, output, payload)
|
32
|
-
templatable_params = payload.params.merge(__id: output.id, __value: payload
|
50
|
+
templatable_params = payload.params.merge(__id: output.id, __value: payload[''])
|
33
51
|
|
34
|
-
|
52
|
+
Util::StringTemplate.instance.evaluate(expression, templatable_params)
|
35
53
|
end
|
36
54
|
end
|
37
55
|
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
require_relative 'job'
|
11
|
+
|
12
|
+
module Burner
|
13
|
+
# Add on a register attribute to the configuration for a job. This indicates that a job
|
14
|
+
# either accesses and/or mutates the payload's registers.
|
15
|
+
class JobWithRegister < Job
|
16
|
+
attr_reader :register
|
17
|
+
|
18
|
+
def initialize(name:, register: '')
|
19
|
+
super(name: name)
|
20
|
+
|
21
|
+
@register = register.to_s
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
data/lib/burner/jobs.rb
CHANGED
@@ -7,26 +7,7 @@
|
|
7
7
|
# LICENSE file in the root directory of this source tree.
|
8
8
|
#
|
9
9
|
|
10
|
-
require_relative '
|
11
|
-
require_relative 'jobs/collection/arrays_to_objects'
|
12
|
-
require_relative 'jobs/collection/graph'
|
13
|
-
require_relative 'jobs/collection/objects_to_arrays'
|
14
|
-
require_relative 'jobs/collection/shift'
|
15
|
-
require_relative 'jobs/collection/transform'
|
16
|
-
require_relative 'jobs/collection/unpivot'
|
17
|
-
require_relative 'jobs/deserialize/csv'
|
18
|
-
require_relative 'jobs/deserialize/json'
|
19
|
-
require_relative 'jobs/deserialize/yaml'
|
20
|
-
require_relative 'jobs/dummy'
|
21
|
-
require_relative 'jobs/echo'
|
22
|
-
require_relative 'jobs/io/exist'
|
23
|
-
require_relative 'jobs/io/read'
|
24
|
-
require_relative 'jobs/io/write'
|
25
|
-
require_relative 'jobs/serialize/csv'
|
26
|
-
require_relative 'jobs/serialize/json'
|
27
|
-
require_relative 'jobs/serialize/yaml'
|
28
|
-
require_relative 'jobs/set'
|
29
|
-
require_relative 'jobs/sleep'
|
10
|
+
require_relative 'library'
|
30
11
|
|
31
12
|
module Burner
|
32
13
|
# Main library of jobs. This file contains all the basic/default jobs. All other consumer
|
@@ -35,24 +16,32 @@ module Burner
|
|
35
16
|
class Jobs
|
36
17
|
acts_as_hashable_factory
|
37
18
|
|
38
|
-
|
39
|
-
|
40
|
-
register '
|
41
|
-
register '
|
42
|
-
register '
|
43
|
-
register '
|
44
|
-
|
45
|
-
register '
|
46
|
-
register '
|
47
|
-
register '
|
48
|
-
register '
|
49
|
-
register '
|
50
|
-
register '
|
51
|
-
register '
|
52
|
-
register '
|
53
|
-
|
54
|
-
register '
|
55
|
-
register '
|
56
|
-
register '
|
19
|
+
# Dummy is the default as noted by the ''. This means if a type is omitted, nil, or blank
|
20
|
+
# string then the dummy job will be used.
|
21
|
+
register 'b/dummy', '', Library::Dummy
|
22
|
+
register 'b/echo', Library::Echo
|
23
|
+
register 'b/set_value', Library::SetValue
|
24
|
+
register 'b/sleep', Library::Sleep
|
25
|
+
|
26
|
+
register 'b/collection/arrays_to_objects', Library::Collection::ArraysToObjects
|
27
|
+
register 'b/collection/graph', Library::Collection::Graph
|
28
|
+
register 'b/collection/objects_to_arrays', Library::Collection::ObjectsToArrays
|
29
|
+
register 'b/collection/shift', Library::Collection::Shift
|
30
|
+
register 'b/collection/transform', Library::Collection::Transform
|
31
|
+
register 'b/collection/unpivot', Library::Collection::Unpivot
|
32
|
+
register 'b/collection/values', Library::Collection::Values
|
33
|
+
register 'b/collection/validate', Library::Collection::Values
|
34
|
+
|
35
|
+
register 'b/deserialize/csv', Library::Deserialize::Csv
|
36
|
+
register 'b/deserialize/json', Library::Deserialize::Json
|
37
|
+
register 'b/deserialize/yaml', Library::Deserialize::Yaml
|
38
|
+
|
39
|
+
register 'b/io/exist', Library::IO::Exist
|
40
|
+
register 'b/io/read', Library::IO::Read
|
41
|
+
register 'b/io/write', Library::IO::Write
|
42
|
+
|
43
|
+
register 'b/serialize/csv', Library::Serialize::Csv
|
44
|
+
register 'b/serialize/json', Library::Serialize::Json
|
45
|
+
register 'b/serialize/yaml', Library::Serialize::Yaml
|
57
46
|
end
|
58
47
|
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
require_relative 'job_with_register'
|
11
|
+
|
12
|
+
require_relative 'library/collection/arrays_to_objects'
|
13
|
+
require_relative 'library/collection/graph'
|
14
|
+
require_relative 'library/collection/objects_to_arrays'
|
15
|
+
require_relative 'library/collection/shift'
|
16
|
+
require_relative 'library/collection/transform'
|
17
|
+
require_relative 'library/collection/unpivot'
|
18
|
+
require_relative 'library/collection/validate'
|
19
|
+
require_relative 'library/collection/values'
|
20
|
+
require_relative 'library/deserialize/csv'
|
21
|
+
require_relative 'library/deserialize/json'
|
22
|
+
require_relative 'library/deserialize/yaml'
|
23
|
+
require_relative 'library/dummy'
|
24
|
+
require_relative 'library/echo'
|
25
|
+
require_relative 'library/io/exist'
|
26
|
+
require_relative 'library/io/read'
|
27
|
+
require_relative 'library/io/write'
|
28
|
+
require_relative 'library/serialize/csv'
|
29
|
+
require_relative 'library/serialize/json'
|
30
|
+
require_relative 'library/serialize/yaml'
|
31
|
+
require_relative 'library/set_value'
|
32
|
+
require_relative 'library/sleep'
|