burner 1.0.0.pre.alpha.7 → 1.0.0.pre.alpha.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8cb851b5c317d3566c1b3b77c9c904e26970de8e0ce4329e2c8b83336c372b02
4
- data.tar.gz: 0ad28e56c8b52ceede4ed321812e45edfaef2bcd4000428738c348e5e849092a
3
+ metadata.gz: ebd1332e7b4a01aace974be43aaa77e02b3bcb13e36b2360b08192bde2b99705
4
+ data.tar.gz: '09095dcd305279cf2e246cca6e36256a0f7943d12d335366aa6ff9dcc92c6198'
5
5
  SHA512:
6
- metadata.gz: cf5bc810ffa5dba106e6538dbab2f44e6763f5ad073da2b645e46eff3e976e61f9a0d551ae4d5ad9998c5c1d87b5e2913b902901020ad77f20b4c29800321df5
7
- data.tar.gz: ef83de8173fbad28c2cc07c44f101175ebe9c60d5a9275d2e7280ba1b621a8c51082504ae5d5c917916d398a7a89d06ca7416dd08ba7148ebebb87cd42dd7b05
6
+ metadata.gz: 31e45b0c0cc815d05f3424a04e77a8e36ed96e912ec0fed2dfcf8e171ecbe7f0ade1841991cdb97751bbd10d103b161d173120abd0a68689281a258de524697c
7
+ data.tar.gz: 751091b32afae9e386814cb16856c5b0d41393b51e4e98070579d6508484e61a27a21bddba4be08f7a8416a86b50de71eeac3d886c635c785572f1597cb9095c
data/README.md CHANGED
@@ -31,30 +31,30 @@ pipeline = {
31
31
  jobs: [
32
32
  {
33
33
  name: :read,
34
- type: 'io/read',
34
+ type: 'b/io/read',
35
35
  path: '{input_file}'
36
36
  },
37
37
  {
38
38
  name: :output_id,
39
- type: :echo,
39
+ type: 'b/echo',
40
40
  message: 'The job id is: {__id}'
41
41
  },
42
42
  {
43
43
  name: :output_value,
44
- type: :echo,
44
+ type: 'b/echo',
45
45
  message: 'The current value is: {__value}'
46
46
  },
47
47
  {
48
48
  name: :parse,
49
- type: 'deserialize/json'
49
+ type: 'b/deserialize/json'
50
50
  },
51
51
  {
52
52
  name: :convert,
53
- type: 'serialize/yaml'
53
+ type: 'b/serialize/yaml'
54
54
  },
55
55
  {
56
56
  name: :write,
57
- type: 'io/write',
57
+ type: 'b/io/write',
58
58
  path: '{output_file}'
59
59
  }
60
60
  ],
@@ -172,25 +172,25 @@ Write the following json_to_yaml_pipeline.yaml file to disk:
172
172
  ````yaml
173
173
  jobs:
174
174
  - name: read
175
- type: io/read
175
+ type: b/io/read
176
176
  path: '{input_file}'
177
177
 
178
178
  - name: output_id
179
- type: echo
179
+ type: b/echo
180
180
  message: 'The job id is: {__id}'
181
181
 
182
182
  - name: output_value
183
- type: echo
183
+ type: b/echo
184
184
  message: 'The current value is: {__value}'
185
185
 
186
186
  - name: parse
187
- type: deserialize/json
187
+ type: b/deserialize/json
188
188
 
189
189
  - name: convert
190
- type: serialize/yaml
190
+ type: b/serialize/yaml
191
191
 
192
192
  - name: write
193
- type: io/write
193
+ type: b/io/write
194
194
  path: '{output_file}'
195
195
 
196
196
  steps:
@@ -233,39 +233,39 @@ This library only ships with very basic, rudimentary jobs that are meant to just
233
233
 
234
234
  #### Collection
235
235
 
236
- * **collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
237
- * **collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
238
- * **collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
239
- * **collection/shift** [amount, register]: Remove the first N number of elements from an array.
240
- * **collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
241
- * **collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
242
- * **collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
243
- * **collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
236
+ * **b/collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
237
+ * **b/collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
238
+ * **b/collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
239
+ * **b/collection/shift** [amount, register]: Remove the first N number of elements from an array.
240
+ * **b/collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
241
+ * **b/collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
242
+ * **b/collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
243
+ * **b/collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
244
244
 
245
245
  #### De-serialization
246
246
 
247
- * **deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
248
- * **deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
249
- * **deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
247
+ * **b/deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
248
+ * **b/deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
249
+ * **b/deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
250
250
 
251
251
  #### IO
252
252
 
253
- * **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
254
- * **io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
255
- * **io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
253
+ * **b/io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
254
+ * **b/io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
255
+ * **b/io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
256
256
 
257
257
  #### Serialization
258
258
 
259
- * **serialize/csv** [register]: Take an array of arrays and create a CSV.
260
- * **serialize/json** [register]: Convert value to JSON.
261
- * **serialize/yaml** [register]: Convert value to YAML.
259
+ * **b/serialize/csv** [register]: Take an array of arrays and create a CSV.
260
+ * **b/serialize/json** [register]: Convert value to JSON.
261
+ * **b/serialize/yaml** [register]: Convert value to YAML.
262
262
 
263
263
  #### General
264
264
 
265
- * **dummy** []: Do nothing
266
- * **echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
267
- * **set** [register, value]: Set the value to any arbitrary value.
268
- * **sleep** [seconds]: Sleep the thread for X number of seconds.
265
+ * **b/dummy** []: Do nothing
266
+ * **b/echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
267
+ * **b/set** [register, value]: Set the value to any arbitrary value.
268
+ * **b/sleep** [seconds]: Sleep the thread for X number of seconds.
269
269
 
270
270
  Notes:
271
271
 
@@ -296,17 +296,17 @@ pipeline = {
296
296
  jobs: [
297
297
  {
298
298
  name: :read,
299
- type: 'io/read',
299
+ type: 'b/io/read',
300
300
  path: '{input_file}'
301
301
  },
302
302
  {
303
303
  name: :output_id,
304
- type: :echo,
304
+ type: 'b/echo',
305
305
  message: 'The job id is: {__id}'
306
306
  },
307
307
  {
308
308
  name: :output_value,
309
- type: :echo,
309
+ type: 'b/echo',
310
310
  message: 'The current value is: {__value}'
311
311
  },
312
312
  {
@@ -315,11 +315,11 @@ pipeline = {
315
315
  },
316
316
  {
317
317
  name: :convert,
318
- type: 'serialize/yaml'
318
+ type: 'b/serialize/yaml'
319
319
  },
320
320
  {
321
321
  name: :write,
322
- type: 'io/write',
322
+ type: 'b/io/write',
323
323
  path: '{output_file}'
324
324
  }
325
325
  ],
@@ -8,7 +8,7 @@ Gem::Specification.new do |s|
8
8
  s.summary = 'Declarative and extendable processing pipeline'
9
9
 
10
10
  s.description = <<-DESCRIPTION
11
- This library serves as the skeleton for a processing engine. It allows you to organize your code into Jobs, then stitch those jobs together as steps.
11
+ This library serves as the backbone for a configurable processing engine. It allows you to organize your code into jobs, then stitch those jobs together as steps.
12
12
  DESCRIPTION
13
13
 
14
14
  s.authors = ['Matthew Ruggio']
@@ -16,25 +16,31 @@ module Burner
16
16
  class Jobs
17
17
  acts_as_hashable_factory
18
18
 
19
- register 'collection/arrays_to_objects', Library::Collection::ArraysToObjects
20
- register 'collection/graph', Library::Collection::Graph
21
- register 'collection/objects_to_arrays', Library::Collection::ObjectsToArrays
22
- register 'collection/shift', Library::Collection::Shift
23
- register 'collection/transform', Library::Collection::Transform
24
- register 'collection/unpivot', Library::Collection::Unpivot
25
- register 'collection/values', Library::Collection::Values
26
- register 'deserialize/csv', Library::Deserialize::Csv
27
- register 'deserialize/json', Library::Deserialize::Json
28
- register 'deserialize/yaml', Library::Deserialize::Yaml
29
- register 'dummy', '', Library::Dummy
30
- register 'echo', Library::Echo
31
- register 'io/exist', Library::IO::Exist
32
- register 'io/read', Library::IO::Read
33
- register 'io/write', Library::IO::Write
34
- register 'serialize/csv', Library::Serialize::Csv
35
- register 'serialize/json', Library::Serialize::Json
36
- register 'serialize/yaml', Library::Serialize::Yaml
37
- register 'set_value', Library::SetValue
38
- register 'sleep', Library::Sleep
19
+ # Dummy is the default as noted by the ''. This means if a type is omitted, nil, or blank
20
+ # string then the dummy job will be used.
21
+ register 'b/dummy', '', Library::Dummy
22
+ register 'b/echo', Library::Echo
23
+ register 'b/set_value', Library::SetValue
24
+ register 'b/sleep', Library::Sleep
25
+
26
+ register 'b/collection/arrays_to_objects', Library::Collection::ArraysToObjects
27
+ register 'b/collection/graph', Library::Collection::Graph
28
+ register 'b/collection/objects_to_arrays', Library::Collection::ObjectsToArrays
29
+ register 'b/collection/shift', Library::Collection::Shift
30
+ register 'b/collection/transform', Library::Collection::Transform
31
+ register 'b/collection/unpivot', Library::Collection::Unpivot
32
+ register 'b/collection/values', Library::Collection::Values
33
+
34
+ register 'b/deserialize/csv', Library::Deserialize::Csv
35
+ register 'b/deserialize/json', Library::Deserialize::Json
36
+ register 'b/deserialize/yaml', Library::Deserialize::Yaml
37
+
38
+ register 'b/io/exist', Library::IO::Exist
39
+ register 'b/io/read', Library::IO::Read
40
+ register 'b/io/write', Library::IO::Write
41
+
42
+ register 'b/serialize/csv', Library::Serialize::Csv
43
+ register 'b/serialize/json', Library::Serialize::Json
44
+ register 'b/serialize/yaml', Library::Serialize::Yaml
39
45
  end
40
46
  end
@@ -23,14 +23,14 @@ module Burner
23
23
  # jobs: [
24
24
  # {
25
25
  # name: 'set',
26
- # type: 'set_value',
26
+ # type: 'b/set_value',
27
27
  # value: [
28
28
  # [1, 'funky']
29
29
  # ]
30
30
  # },
31
31
  # {
32
32
  # name: 'map',
33
- # type: 'collection/arrays_to_objects',
33
+ # type: 'b/collection/arrays_to_objects',
34
34
  # mappings: [
35
35
  # { index: 0, key: 'id' },
36
36
  # { index: 1, key: 'name' }
@@ -38,7 +38,7 @@ module Burner
38
38
  # },
39
39
  # {
40
40
  # name: 'output',
41
- # type: 'echo',
41
+ # type: 'b/echo',
42
42
  # message: 'value is currently: {__value}'
43
43
  # },
44
44
  #
@@ -24,14 +24,14 @@ module Burner
24
24
  # jobs: [
25
25
  # {
26
26
  # name: 'set',
27
- # type: 'set_value',
27
+ # type: 'b/set_value',
28
28
  # value: [
29
29
  # [1, 'funky']
30
30
  # ]
31
31
  # },
32
32
  # {
33
33
  # name: 'map',
34
- # type: 'collection/objects_to_arrays',
34
+ # type: 'b/collection/objects_to_arrays',
35
35
  # mappings: [
36
36
  # { index: 0, key: 'id' },
37
37
  # { index: 1, key: 'name' }
@@ -39,7 +39,7 @@ module Burner
39
39
  # },
40
40
  # {
41
41
  # name: 'output',
42
- # type: 'echo',
42
+ # type: 'b/echo',
43
43
  # message: 'value is currently: {__value}'
44
44
  # },
45
45
  #
@@ -8,5 +8,5 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- VERSION = '1.0.0-alpha.7'
11
+ VERSION = '1.0.0-alpha.8'
12
12
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: burner
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0.pre.alpha.7
4
+ version: 1.0.0.pre.alpha.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - Matthew Ruggio
@@ -192,8 +192,9 @@ dependencies:
192
192
  - - "~>"
193
193
  - !ruby/object:Gem::Version
194
194
  version: 0.7.0
195
- description: " This library serves as the skeleton for a processing engine. It
196
- allows you to organize your code into Jobs, then stitch those jobs together as steps.\n"
195
+ description: " This library serves as the backbone for a configurable processing
196
+ engine. It allows you to organize your code into jobs, then stitch those jobs together
197
+ as steps.\n"
197
198
  email:
198
199
  - mruggio@bluemarblepayroll.com
199
200
  executables: