burner 1.0.0.pre.alpha.5 → 1.0.0.pre.alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +12 -12
  3. data/exe/burner +2 -3
  4. data/lib/burner.rb +2 -0
  5. data/lib/burner/cli.rb +2 -0
  6. data/lib/burner/job.rb +25 -5
  7. data/lib/burner/jobs.rb +21 -41
  8. data/lib/burner/library.rb +30 -0
  9. data/lib/burner/library/collection/arrays_to_objects.rb +77 -0
  10. data/lib/burner/{jobs → library}/collection/graph.rb +3 -2
  11. data/lib/burner/{jobs → library}/collection/objects_to_arrays.rb +40 -4
  12. data/lib/burner/{jobs → library}/collection/shift.rb +5 -4
  13. data/lib/burner/{jobs → library}/collection/transform.rb +13 -9
  14. data/lib/burner/{jobs → library}/collection/unpivot.rb +7 -5
  15. data/lib/burner/{jobs → library}/collection/values.rb +5 -4
  16. data/lib/burner/{jobs → library}/deserialize/csv.rb +2 -1
  17. data/lib/burner/{jobs → library}/deserialize/json.rb +4 -1
  18. data/lib/burner/{jobs → library}/deserialize/yaml.rb +8 -2
  19. data/lib/burner/{jobs → library}/dummy.rb +3 -1
  20. data/lib/burner/{jobs → library}/echo.rb +3 -1
  21. data/lib/burner/{jobs → library}/io/base.rb +1 -1
  22. data/lib/burner/{jobs → library}/io/exist.rb +3 -1
  23. data/lib/burner/{jobs → library}/io/read.rb +4 -1
  24. data/lib/burner/{jobs → library}/io/write.rb +7 -2
  25. data/lib/burner/{jobs → library}/serialize/csv.rb +3 -2
  26. data/lib/burner/{jobs → library}/serialize/json.rb +4 -1
  27. data/lib/burner/{jobs → library}/serialize/yaml.rb +4 -1
  28. data/lib/burner/{jobs/set.rb → library/set_value.rb} +5 -2
  29. data/lib/burner/{jobs → library}/sleep.rb +3 -1
  30. data/lib/burner/modeling.rb +2 -0
  31. data/lib/burner/modeling/attribute.rb +29 -0
  32. data/lib/burner/modeling/attribute_renderer.rb +32 -0
  33. data/lib/burner/payload.rb +15 -12
  34. data/lib/burner/pipeline.rb +20 -1
  35. data/lib/burner/side_effects.rb +10 -0
  36. data/lib/burner/side_effects/written_file.rb +28 -0
  37. data/lib/burner/util.rb +10 -0
  38. data/lib/burner/util/arrayable.rb +30 -0
  39. data/lib/burner/version.rb +1 -1
  40. metadata +30 -26
  41. data/lib/burner/jobs/collection/arrays_to_objects.rb +0 -43
  42. data/lib/burner/jobs/collection/transform/attribute.rb +0 -33
  43. data/lib/burner/jobs/collection/transform/attribute_renderer.rb +0 -36
  44. data/lib/burner/written_file.rb +0 -28
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 57b5b4290b72962e10ce7ea7244a1dcfb43894cff1d72ef4e0684de4cdea4a35
4
- data.tar.gz: 2936c408e7ffa3e1e9883510d890329870d5c1a4c129fef1fb283103568c9508
3
+ metadata.gz: 73263c161bce669b99286160acb7129d33c529162da964b72fd34321cc7e3724
4
+ data.tar.gz: 18fd59799a73d9a8a3bf691a9dd3f8a004d442b37862bf83cbe347fbddd7e49a
5
5
  SHA512:
6
- metadata.gz: f514870aa2b12cc4fc34f3952cabf8c738985dac1a6602c7f41aec3f28b83738991d5cf82d429b62d76b0f3a96b85907e51d0e06db0ce33579d1dc74dc55409e
7
- data.tar.gz: 2e9ee10f91bb28eb4d79091ae7106f6da640daaf6b5ac2f77e828e81743dfd55d6baca12a0e376c3be709483fe7a94e0f840f47ed2ecb5031233f4ff9ae7db3a
6
+ metadata.gz: 7db0c2be13a882885b97681ec7c9fd60b6702368d673d97ba0aa484b446db0c77c9f823adf03c8d9e970b2078b43a99d2d159ba97df345124a461497718b7046
7
+ data.tar.gz: 509382aa7fb48e116b8b3eb145a8b8226c1e4d4282eec1aabb10dcc9f7b333b906a7887d97cee4a96ae730dd7c02a0b0d4cd4a7b06dbfcc90349cfd82767e048
data/README.md CHANGED
@@ -133,23 +133,23 @@ The value of `log` should now look similar to:
133
133
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - input_file: input.json
134
134
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - output_file: output.yaml
135
135
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
136
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Jobs::IO::Read::read
136
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Library::IO::Read::read
137
137
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Reading: spec/fixtures/input.json
138
138
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
139
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Jobs::Echo::output_id
139
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Library::Echo::output_id
140
140
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The job id is:
141
141
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
142
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Jobs::Echo::output_value
142
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Library::Echo::output_value
143
143
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
144
144
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
145
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Jobs::Deserialize::Json::parse
145
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Library::Deserialize::Json::parse
146
146
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
147
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Jobs::Serialize::Yaml::convert
147
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Library::Serialize::Yaml::convert
148
148
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
149
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Jobs::Echo::output_value
149
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Library::Echo::output_value
150
150
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
151
151
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
152
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Jobs::IO::Write::write
152
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Library::IO::Write::write
153
153
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Writing: output.yaml
154
154
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
155
155
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
@@ -163,7 +163,7 @@ Notes:
163
163
 
164
164
  ### Command Line Pipeline Processing
165
165
 
166
- This library also ships with a built-in script `exe/burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
166
+ This library also ships with a built-in script `burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
167
167
 
168
168
  #### Create YAML Pipeline Configuration File
169
169
 
@@ -234,10 +234,10 @@ This library only ships with very basic, rudimentary jobs that are meant to just
234
234
  #### Collection
235
235
 
236
236
  * **collection/arrays_to_objects** [mappings]: Convert an array of arrays to an array of objects.
237
- * **collection/graph** [config, key]: Use (Hashematics)[https://github.com/bluemarblepayroll/hashematics] to turn a flat array of objects into a deeply nested object tree.
237
+ * **collection/graph** [config, key]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
238
238
  * **collection/objects_to_arrays** [mappings]: Convert an array of objects to an array of arrays.
239
239
  * **collection/shift** [amount]: Remove the first N number of elements from an array.
240
- * **collection/transform** [attributes, exclusive, separator]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses (Realize)[https://github.com/bluemarblepayroll/realize], which provides its own extendable value-transformation pipeline.
240
+ * **collection/transform** [attributes, exclusive, separator]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
241
241
  * **collection/unpivot** [pivot_set]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
242
242
  * **collection/values** [include_keys]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
243
243
 
@@ -245,7 +245,7 @@ This library only ships with very basic, rudimentary jobs that are meant to just
245
245
 
246
246
  * **deserialize/csv** []: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
247
247
  * **deserialize/json** []: Treat input as a string and de-serialize it to JSON.
248
- * **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and (safely de-serialize)[https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load] it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
248
+ * **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
249
249
 
250
250
  #### IO
251
251
 
@@ -269,7 +269,7 @@ This library only ships with very basic, rudimentary jobs that are meant to just
269
269
 
270
270
  ### Adding & Registering Jobs
271
271
 
272
- Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with (acts_as_hashable)[https://github.com/bluemarblepayroll/acts_as_hashable]'s acts_as_hashable_factory directive.
272
+ Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with [acts_as_hashable](https://github.com/bluemarblepayroll/acts_as_hashable)'s acts_as_hashable_factory directive.
273
273
 
274
274
  Let's say we would like to register a job to parse a CSV:
275
275
 
data/exe/burner CHANGED
@@ -10,11 +10,10 @@
10
10
 
11
11
  require 'bundler/setup'
12
12
  require 'burner'
13
- require 'pry'
14
13
 
15
14
  if ARGV.empty?
16
- puts 'Usage: ./exe/burner package.yaml key=value key=value ...'
17
- exit
15
+ warn('Usage: ./exe/burner package.yaml key=value key=value ...')
16
+ exit 2 # Do not return 1, that is reserved for hard errors.
18
17
  end
19
18
 
20
19
  # This should return exit code of 1 if it raises any hard errors.
@@ -24,6 +24,8 @@ require 'yaml'
24
24
 
25
25
  # Common/Shared
26
26
  require_relative 'burner/modeling'
27
+ require_relative 'burner/side_effects'
28
+ require_relative 'burner/util'
27
29
 
28
30
  # Main Entrypoint(s)
29
31
  require_relative 'burner/cli'
@@ -20,6 +20,8 @@ module Burner
20
20
  config = read_yaml(path)
21
21
  @pipeline = Burner::Pipeline.make(jobs: config['jobs'], steps: config['steps'])
22
22
  @payload = Payload.new(params: params)
23
+
24
+ freeze
23
25
  end
24
26
 
25
27
  def execute
@@ -15,23 +15,43 @@ module Burner
15
15
  # the Burner::Jobs factory class method #register. An example of a registration:
16
16
  # Burner::Jobs.register('your_class', YourClass)
17
17
  class Job
18
+ include Util::Arrayable
18
19
  acts_as_hashable
19
20
 
20
- attr_reader :name, :string_template
21
+ attr_reader :name
21
22
 
22
23
  def initialize(name:)
23
24
  raise ArgumentError, 'name is required' if name.to_s.empty?
24
25
 
25
- @name = name.to_s
26
- @string_template = StringTemplate.instance
26
+ @name = name.to_s
27
27
  end
28
28
 
29
- private
29
+ # There are only two requirements to be considered a valid Burner Job:
30
+ # 1. The class responds to #name
31
+ # 2. The class responds to #perform(output, payload)
32
+ #
33
+ # The #perform method takes in two arguments: output (an instance of Burner::Output)
34
+ # and payload (an instance of Burner::Payload). Jobs can leverage output to emit
35
+ # information to the pipeline's log(s). The payload is utilized to pass data from job to job,
36
+ # with its most important attribute being #value. The value attribute is mutable
37
+ # per the individual job's context (meaning of it is unknown without understanding a job's
38
+ # input and output value of #value.). Therefore #value can mean anything and it is up to the
39
+ # engineers to clearly document the assumptions of its use.
40
+ #
41
+ # Returning false will short-circuit the pipeline right after the job method exits.
42
+ # Returning anything else besides false just means "continue".
43
+ def perform(output, _payload)
44
+ output.detail("#perform not implemented for: #{self.class.name}")
45
+
46
+ nil
47
+ end
48
+
49
+ protected
30
50
 
31
51
  def job_string_template(expression, output, payload)
32
52
  templatable_params = payload.params.merge(__id: output.id, __value: payload.value)
33
53
 
34
- string_template.evaluate(expression, templatable_params)
54
+ StringTemplate.instance.evaluate(expression, templatable_params)
35
55
  end
36
56
  end
37
57
  end
@@ -7,27 +7,7 @@
7
7
  # LICENSE file in the root directory of this source tree.
8
8
  #
9
9
 
10
- require_relative 'job'
11
- require_relative 'jobs/collection/arrays_to_objects'
12
- require_relative 'jobs/collection/graph'
13
- require_relative 'jobs/collection/objects_to_arrays'
14
- require_relative 'jobs/collection/shift'
15
- require_relative 'jobs/collection/transform'
16
- require_relative 'jobs/collection/unpivot'
17
- require_relative 'jobs/collection/values'
18
- require_relative 'jobs/deserialize/csv'
19
- require_relative 'jobs/deserialize/json'
20
- require_relative 'jobs/deserialize/yaml'
21
- require_relative 'jobs/dummy'
22
- require_relative 'jobs/echo'
23
- require_relative 'jobs/io/exist'
24
- require_relative 'jobs/io/read'
25
- require_relative 'jobs/io/write'
26
- require_relative 'jobs/serialize/csv'
27
- require_relative 'jobs/serialize/json'
28
- require_relative 'jobs/serialize/yaml'
29
- require_relative 'jobs/set'
30
- require_relative 'jobs/sleep'
10
+ require_relative 'library'
31
11
 
32
12
  module Burner
33
13
  # Main library of jobs. This file contains all the basic/default jobs. All other consumer
@@ -36,25 +16,25 @@ module Burner
36
16
  class Jobs
37
17
  acts_as_hashable_factory
38
18
 
39
- register 'collection/arrays_to_objects', Collection::ArraysToObjects
40
- register 'collection/graph', Collection::Graph
41
- register 'collection/objects_to_arrays', Collection::ObjectsToArrays
42
- register 'collection/shift', Collection::Shift
43
- register 'collection/transform', Collection::Transform
44
- register 'collection/unpivot', Collection::Unpivot
45
- register 'collection/values', Collection::Values
46
- register 'deserialize/csv', Deserialize::Csv
47
- register 'deserialize/json', Deserialize::Json
48
- register 'deserialize/yaml', Deserialize::Yaml
49
- register 'dummy', '', Dummy
50
- register 'echo', Echo
51
- register 'io/exist', IO::Exist
52
- register 'io/read', IO::Read
53
- register 'io/write', IO::Write
54
- register 'serialize/csv', Serialize::Csv
55
- register 'serialize/json', Serialize::Json
56
- register 'serialize/yaml', Serialize::Yaml
57
- register 'set', Set
58
- register 'sleep', Sleep
19
+ register 'collection/arrays_to_objects', Library::Collection::ArraysToObjects
20
+ register 'collection/graph', Library::Collection::Graph
21
+ register 'collection/objects_to_arrays', Library::Collection::ObjectsToArrays
22
+ register 'collection/shift', Library::Collection::Shift
23
+ register 'collection/transform', Library::Collection::Transform
24
+ register 'collection/unpivot', Library::Collection::Unpivot
25
+ register 'collection/values', Library::Collection::Values
26
+ register 'deserialize/csv', Library::Deserialize::Csv
27
+ register 'deserialize/json', Library::Deserialize::Json
28
+ register 'deserialize/yaml', Library::Deserialize::Yaml
29
+ register 'dummy', '', Library::Dummy
30
+ register 'echo', Library::Echo
31
+ register 'io/exist', Library::IO::Exist
32
+ register 'io/read', Library::IO::Read
33
+ register 'io/write', Library::IO::Write
34
+ register 'serialize/csv', Library::Serialize::Csv
35
+ register 'serialize/json', Library::Serialize::Json
36
+ register 'serialize/yaml', Library::Serialize::Yaml
37
+ register 'set_value', Library::SetValue
38
+ register 'sleep', Library::Sleep
59
39
  end
60
40
  end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'job'
11
+ require_relative 'library/collection/arrays_to_objects'
12
+ require_relative 'library/collection/graph'
13
+ require_relative 'library/collection/objects_to_arrays'
14
+ require_relative 'library/collection/shift'
15
+ require_relative 'library/collection/transform'
16
+ require_relative 'library/collection/unpivot'
17
+ require_relative 'library/collection/values'
18
+ require_relative 'library/deserialize/csv'
19
+ require_relative 'library/deserialize/json'
20
+ require_relative 'library/deserialize/yaml'
21
+ require_relative 'library/dummy'
22
+ require_relative 'library/echo'
23
+ require_relative 'library/io/exist'
24
+ require_relative 'library/io/read'
25
+ require_relative 'library/io/write'
26
+ require_relative 'library/serialize/csv'
27
+ require_relative 'library/serialize/json'
28
+ require_relative 'library/serialize/yaml'
29
+ require_relative 'library/set_value'
30
+ require_relative 'library/sleep'
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Convert an array of arrays to an array of objects. Pass in an array of
14
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
15
+ # the index-to-key mappings to use.
16
+ #
17
+ # Expected Payload#value input: array of arrays.
18
+ # Payload#value output: An array of hashes.
19
+ #
20
+ # An example using a configuration-first pipeline:
21
+ #
22
+ # config = {
23
+ # jobs: [
24
+ # {
25
+ # name: 'set',
26
+ # type: 'set_value',
27
+ # value: [
28
+ # [1, 'funky']
29
+ # ]
30
+ # },
31
+ # {
32
+ # name: 'map',
33
+ # type: 'collection/arrays_to_objects',
34
+ # mappings: [
35
+ # { index: 0, key: 'id' },
36
+ # { index: 1, key: 'name' }
37
+ # ]
38
+ # },
39
+ # {
40
+ # name: 'output',
41
+ # type: 'echo',
42
+ # message: 'value is currently: {__value}'
43
+ # },
44
+ #
45
+ # ],
46
+ # steps: %w[set map output]
47
+ # }
48
+ #
49
+ # Burner::Pipeline.make(config).execute
50
+ class ArraysToObjects < Job
51
+ attr_reader :mappings
52
+
53
+ def initialize(name:, mappings: [])
54
+ super(name: name)
55
+
56
+ @mappings = Modeling::KeyIndexMapping.array(mappings)
57
+
58
+ freeze
59
+ end
60
+
61
+ def perform(_output, payload)
62
+ payload.value = array(payload.value).map { |array| index_to_key_map(array) }
63
+
64
+ nil
65
+ end
66
+
67
+ private
68
+
69
+ def index_to_key_map(array)
70
+ mappings.each_with_object({}) do |mapping, memo|
71
+ memo[mapping.key] = array[mapping.index]
72
+ end
73
+ end
74
+ end
75
+ end
76
+ end
77
+ end
@@ -8,10 +8,11 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  module Collection
13
13
  # Take an array of (denormalized) objects and create an object hierarchy from them.
14
14
  # Under the hood it uses Hashematics: https://github.com/bluemarblepayroll/hashematics.
15
+ #
15
16
  # Expected Payload#value input: array of objects.
16
17
  # Payload#value output: An array of objects.
17
18
  class Graph < Job
@@ -29,7 +30,7 @@ module Burner
29
30
  end
30
31
 
31
32
  def perform(output, payload)
32
- graph = Hashematics::Graph.new(groups).add(payload.value || [])
33
+ graph = Hashematics::Graph.new(groups).add(array(payload.value))
33
34
 
34
35
  output.detail("Graphing: #{key}")
35
36
 
@@ -8,18 +8,54 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  module Collection
13
13
  # Convert an array of objects to an array of arrays. You can leverage the separator
14
- # option to support key paths and nested objects.
14
+ # option to support key paths and nested objects. Pass in an array of
15
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
16
+ # the key-to-index mappings to use.
17
+ #
15
18
  # Expected Payload#value input: array of hashes.
16
19
  # Payload#value output: An array of arrays.
20
+ #
21
+ # An example using a configuration-first pipeline:
22
+ #
23
+ # config = {
24
+ # jobs: [
25
+ # {
26
+ # name: 'set',
27
+ # type: 'set_value',
28
+ # value: [
29
+ # [1, 'funky']
30
+ # ]
31
+ # },
32
+ # {
33
+ # name: 'map',
34
+ # type: 'collection/objects_to_arrays',
35
+ # mappings: [
36
+ # { index: 0, key: 'id' },
37
+ # { index: 1, key: 'name' }
38
+ # ]
39
+ # },
40
+ # {
41
+ # name: 'output',
42
+ # type: 'echo',
43
+ # message: 'value is currently: {__value}'
44
+ # },
45
+ #
46
+ # ],
47
+ # steps: %w[set map output]
48
+ # }
49
+ #
50
+ # Burner::Pipeline.make(config).execute
17
51
  class ObjectsToArrays < Job
18
52
  attr_reader :mappings
19
53
 
20
54
  # If you wish to support nested objects you can pass in a string to use as a
21
55
  # key path separator. For example: if you would like to recognize dot-notation for
22
- # nested hashes then set separator to '.'.
56
+ # nested hashes then set separator to '.'. For more information, see the underlying
57
+ # library that supports this dot-notation concept:
58
+ # https://github.com/bluemarblepayroll/objectable
23
59
  def initialize(name:, mappings: [], separator: '')
24
60
  super(name: name)
25
61
 
@@ -30,7 +66,7 @@ module Burner
30
66
  end
31
67
 
32
68
  def perform(_output, payload)
33
- payload.value = (payload.value || []).map { |object| key_to_index_map(object) }
69
+ payload.value = array(payload.value).map { |object| key_to_index_map(object) }
34
70
 
35
71
  nil
36
72
  end
@@ -8,10 +8,12 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  module Collection
13
13
  # Take an array and remove the first N elements, where N is specified by the amount
14
- # attribute.
14
+ # attribute. The initial use case for this was to remove "header" rows from arrays,
15
+ # like you would expect when parsing CSV files.
16
+ #
15
17
  # Expected Payload#value input: nothing.
16
18
  # Payload#value output: An array with N beginning elements removed.
17
19
  class Shift < Job
@@ -32,8 +34,7 @@ module Burner
32
34
  def perform(output, payload)
33
35
  output.detail("Shifting #{amount} entries.")
34
36
 
35
- payload.value ||= []
36
- payload.value.shift(amount)
37
+ payload.value = array(payload.value).slice(amount..-1)
37
38
 
38
39
  nil
39
40
  end