burner 1.0.0.pre.alpha.1 → 1.0.0.pre.alpha.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +48 -20
  3. data/burner.gemspec +3 -0
  4. data/exe/burner +3 -4
  5. data/lib/burner.rb +12 -0
  6. data/lib/burner/cli.rb +10 -8
  7. data/lib/burner/job.rb +28 -6
  8. data/lib/burner/jobs.rb +21 -23
  9. data/lib/burner/library.rb +30 -0
  10. data/lib/burner/library/collection/arrays_to_objects.rb +77 -0
  11. data/lib/burner/library/collection/graph.rb +44 -0
  12. data/lib/burner/library/collection/objects_to_arrays.rb +90 -0
  13. data/lib/burner/library/collection/shift.rb +44 -0
  14. data/lib/burner/library/collection/transform.rb +68 -0
  15. data/lib/burner/library/collection/unpivot.rb +47 -0
  16. data/lib/burner/library/collection/values.rb +51 -0
  17. data/lib/burner/library/deserialize/csv.rb +29 -0
  18. data/lib/burner/{jobs → library}/deserialize/json.rb +5 -2
  19. data/lib/burner/{jobs → library}/deserialize/yaml.rb +9 -3
  20. data/lib/burner/{jobs → library}/dummy.rb +4 -2
  21. data/lib/burner/{jobs → library}/echo.rb +5 -3
  22. data/lib/burner/{jobs → library}/io/base.rb +1 -7
  23. data/lib/burner/{jobs → library}/io/exist.rb +5 -3
  24. data/lib/burner/{jobs → library}/io/read.rb +6 -3
  25. data/lib/burner/{jobs → library}/io/write.rb +9 -4
  26. data/lib/burner/library/serialize/csv.rb +39 -0
  27. data/lib/burner/{jobs → library}/serialize/json.rb +5 -2
  28. data/lib/burner/{jobs → library}/serialize/yaml.rb +5 -2
  29. data/lib/burner/{jobs/set.rb → library/set_value.rb} +6 -3
  30. data/lib/burner/{jobs → library}/sleep.rb +4 -2
  31. data/lib/burner/modeling.rb +12 -0
  32. data/lib/burner/modeling/attribute.rb +29 -0
  33. data/lib/burner/modeling/attribute_renderer.rb +32 -0
  34. data/lib/burner/modeling/key_index_mapping.rb +29 -0
  35. data/lib/burner/payload.rb +20 -9
  36. data/lib/burner/pipeline.rb +23 -4
  37. data/lib/burner/side_effects.rb +10 -0
  38. data/lib/burner/side_effects/written_file.rb +28 -0
  39. data/lib/burner/step.rb +2 -4
  40. data/lib/burner/string_template.rb +6 -5
  41. data/lib/burner/util.rb +10 -0
  42. data/lib/burner/util/arrayable.rb +30 -0
  43. data/lib/burner/version.rb +1 -1
  44. metadata +74 -15
  45. data/lib/burner/written_file.rb +0 -28
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array of (denormalized) objects and create an object hierarchy from them.
14
+ # Under the hood it uses Hashematics: https://github.com/bluemarblepayroll/hashematics.
15
+ #
16
+ # Expected Payload#value input: array of objects.
17
+ # Payload#value output: An array of objects.
18
+ class Graph < Job
19
+ attr_reader :key, :groups
20
+
21
+ def initialize(name:, key:, config: Hashematics::Configuration.new)
22
+ super(name: name)
23
+
24
+ raise ArgumentError, 'key is required' if key.to_s.empty?
25
+
26
+ @groups = Hashematics::Configuration.new(config).groups
27
+ @key = key.to_s
28
+
29
+ freeze
30
+ end
31
+
32
+ def perform(output, payload)
33
+ graph = Hashematics::Graph.new(groups).add(array(payload.value))
34
+
35
+ output.detail("Graphing: #{key}")
36
+
37
+ payload.value = graph.data(key)
38
+
39
+ nil
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,90 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Convert an array of objects to an array of arrays. You can leverage the separator
14
+ # option to support key paths and nested objects. Pass in an array of
15
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
16
+ # the key-to-index mappings to use.
17
+ #
18
+ # Expected Payload#value input: array of hashes.
19
+ # Payload#value output: An array of arrays.
20
+ #
21
+ # An example using a configuration-first pipeline:
22
+ #
23
+ # config = {
24
+ # jobs: [
25
+ # {
26
+ # name: 'set',
27
+ # type: 'set_value',
28
+ # value: [
29
+ # [1, 'funky']
30
+ # ]
31
+ # },
32
+ # {
33
+ # name: 'map',
34
+ # type: 'collection/objects_to_arrays',
35
+ # mappings: [
36
+ # { index: 0, key: 'id' },
37
+ # { index: 1, key: 'name' }
38
+ # ]
39
+ # },
40
+ # {
41
+ # name: 'output',
42
+ # type: 'echo',
43
+ # message: 'value is currently: {__value}'
44
+ # },
45
+ #
46
+ # ],
47
+ # steps: %w[set map output]
48
+ # }
49
+ #
50
+ # Burner::Pipeline.make(config).execute
51
+ class ObjectsToArrays < Job
52
+ attr_reader :mappings
53
+
54
+ # If you wish to support nested objects you can pass in a string to use as a
55
+ # key path separator. For example: if you would like to recognize dot-notation for
56
+ # nested hashes then set separator to '.'. For more information, see the underlying
57
+ # library that supports this dot-notation concept:
58
+ # https://github.com/bluemarblepayroll/objectable
59
+ def initialize(name:, mappings: [], separator: '')
60
+ super(name: name)
61
+
62
+ @mappings = Modeling::KeyIndexMapping.array(mappings)
63
+ @resolver = Objectable.resolver(separator: separator.to_s)
64
+
65
+ freeze
66
+ end
67
+
68
+ def perform(_output, payload)
69
+ payload.value = array(payload.value).map { |object| key_to_index_map(object) }
70
+
71
+ nil
72
+ end
73
+
74
+ private
75
+
76
+ attr_reader :resolver
77
+
78
+ def key_to_index_map(object)
79
+ mappings.each_with_object(prototype_array) do |mapping, memo|
80
+ memo[mapping.index] = resolver.get(object, mapping.key)
81
+ end
82
+ end
83
+
84
+ def prototype_array
85
+ Array.new(mappings.length)
86
+ end
87
+ end
88
+ end
89
+ end
90
+ end
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array and remove the first N elements, where N is specified by the amount
14
+ # attribute. The initial use case for this was to remove "header" rows from arrays,
15
+ # like you would expect when parsing CSV files.
16
+ #
17
+ # Expected Payload#value input: nothing.
18
+ # Payload#value output: An array with N beginning elements removed.
19
+ class Shift < Job
20
+ DEFAULT_AMOUNT = 0
21
+
22
+ private_constant :DEFAULT_AMOUNT
23
+
24
+ attr_reader :amount
25
+
26
+ def initialize(name:, amount: DEFAULT_AMOUNT)
27
+ super(name: name)
28
+
29
+ @amount = amount.to_i
30
+
31
+ freeze
32
+ end
33
+
34
+ def perform(output, payload)
35
+ output.detail("Shifting #{amount} entries.")
36
+
37
+ payload.value = array(payload.value).slice(amount..-1)
38
+
39
+ nil
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Iterate over all objects and return a new set of transformed objects. The object is
14
+ # transformed per the "transformers" attribute for its attributes. An attribute defines
15
+ # the ultimate key to place the value in and then the transformer pipeline to use to
16
+ # derive the value. Under the hood this uses the Realize library:
17
+ # https://github.com/bluemarblepayroll/realize
18
+ # For more information on the specific contract for attributes, see the
19
+ # Burner::Modeling::Attribute class.
20
+ #
21
+ # Expected Payload#value input: array of objects.
22
+ # Payload#value output: An array of objects.
23
+ class Transform < Job
24
+ BLANK = ''
25
+
26
+ attr_reader :attribute_renderers,
27
+ :exclusive,
28
+ :resolver
29
+
30
+ def initialize(name:, attributes: [], exclusive: false, separator: BLANK)
31
+ super(name: name)
32
+
33
+ @resolver = Objectable.resolver(separator: separator)
34
+ @exclusive = exclusive || false
35
+
36
+ @attribute_renderers =
37
+ Modeling::Attribute.array(attributes)
38
+ .map { |a| Modeling::AttributeRenderer.new(a, resolver) }
39
+
40
+ freeze
41
+ end
42
+
43
+ def perform(output, payload)
44
+ payload.value = array(payload.value).map { |row| transform(row, payload.time) }
45
+
46
+ attr_count = attribute_renderers.length
47
+ row_count = payload.value.length
48
+
49
+ output.detail("Transformed #{attr_count} attributes(s) for #{row_count} row(s)")
50
+
51
+ nil
52
+ end
53
+
54
+ private
55
+
56
+ def transform(row, time)
57
+ outgoing_row = exclusive ? {} : row
58
+
59
+ attribute_renderers.each_with_object(outgoing_row) do |attribute_renderer, memo|
60
+ value = attribute_renderer.transform(row, time)
61
+
62
+ resolver.set(memo, attribute_renderer.key, value)
63
+ end
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array of objects and un-pivot groups of keys into rows.
14
+ # Under the hood it uses HashMath's Unpivot class:
15
+ # https://github.com/bluemarblepayroll/hash_math
16
+ #
17
+ # Expected Payload#value input: array of objects.
18
+ # Payload#value output: An array of objects.
19
+ class Unpivot < Job
20
+ attr_reader :unpivot
21
+
22
+ def initialize(name:, pivot_set: HashMath::Unpivot::PivotSet.new)
23
+ super(name: name)
24
+
25
+ @unpivot = HashMath::Unpivot.new(pivot_set)
26
+
27
+ freeze
28
+ end
29
+
30
+ def perform(output, payload)
31
+ pivot_count = unpivot.pivot_set.pivots.length
32
+ key_count = unpivot.pivot_set.pivots.map { |p| p.keys.length }.sum
33
+ payload.value = array(payload.value)
34
+ object_count = payload.value.length || 0
35
+
36
+ message = "#{pivot_count} Pivots, Key(s): #{key_count} key(s), #{object_count} objects(s)"
37
+
38
+ output.detail(message)
39
+
40
+ payload.value = payload.value.flat_map { |object| unpivot.expand(object) }
41
+
42
+ nil
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array of objects and call #values on each object.
14
+ # If include_keys is true (it is false by default), then call #keys on the first
15
+ # object and inject that as a "header" object.
16
+ #
17
+ # Expected Payload#value input: array of objects.
18
+ # Payload#value output: An array of arrays.
19
+ class Values < Job
20
+ attr_reader :include_keys
21
+
22
+ def initialize(name:, include_keys: false)
23
+ super(name: name)
24
+
25
+ @include_keys = include_keys || false
26
+
27
+ freeze
28
+ end
29
+
30
+ def perform(_output, payload)
31
+ payload.value = array(payload.value)
32
+ keys = include_keys ? [keys(payload.value.first)] : []
33
+ values = payload.value.map { |object| values(object) }
34
+ payload.value = keys + values
35
+
36
+ nil
37
+ end
38
+
39
+ private
40
+
41
+ def keys(object)
42
+ object.respond_to?(:keys) ? object.keys : []
43
+ end
44
+
45
+ def values(object)
46
+ object.respond_to?(:values) ? object.values : []
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Deserialize
13
+ # Take a CSV string and de-serialize into object(s).
14
+ #
15
+ # Expected Payload#value input: nothing.
16
+ # Payload#value output: an array of arrays. Each inner array represents one data row.
17
+ class Csv < Job
18
+ # This currently only supports returning an array of arrays, including the header row.
19
+ # In the future this could be extended to offer more customizable options, such as
20
+ # making it return an array of hashes with the columns mapped, etc.)
21
+ def perform(_output, payload)
22
+ payload.value = CSV.new(payload.value, headers: false).to_a
23
+
24
+ nil
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -8,11 +8,14 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  module Deserialize
13
13
  # Take a JSON string and deserialize into object(s).
14
+ #
15
+ # Expected Payload#value input: string of JSON data.
16
+ # Payload#value output: anything, as specified by the JSON de-serializer.
14
17
  class Json < Job
15
- def perform(_output, payload, _params)
18
+ def perform(_output, payload)
16
19
  payload.value = JSON.parse(payload.value)
17
20
 
18
21
  nil
@@ -8,9 +8,15 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  module Deserialize
13
- # Take a YAML string and deserialize into object(s).
13
+ # Take a YAML string and deserialize into object(s). It uses YAML#safe_load by default,
14
+ # which ensures only a limited number of Ruby object constants can be hydrated by the
15
+ # YAML. If you wish to ease this restriction, for example if you have custom serialization
16
+ # for custom classes, then you can pass in safe: false.
17
+ #
18
+ # Expected Payload#value input: string of YAML data.
19
+ # Payload#value output: anything as specified by the YAML de-serializer.
14
20
  class Yaml < Job
15
21
  attr_reader :safe
16
22
 
@@ -27,7 +33,7 @@ module Burner
27
33
  # in a sandbox. By default, though, we will try and drive them towards using it
28
34
  # in the safer alternative.
29
35
  # rubocop:disable Security/YAMLLoad
30
- def perform(output, payload, _params)
36
+ def perform(output, payload)
31
37
  output.detail('Warning: loading YAML not using safe_load.') unless safe
32
38
 
33
39
  payload.value = safe ? YAML.safe_load(payload.value) : YAML.load(payload.value)
@@ -8,10 +8,12 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  # Do nothing.
13
+ #
14
+ # Note: this does not use Payload#value.
13
15
  class Dummy < Job
14
- def perform(_output, _payload, _params)
16
+ def perform(_output, _payload)
15
17
  nil
16
18
  end
17
19
  end
@@ -8,8 +8,10 @@
8
8
  #
9
9
 
10
10
  module Burner
11
- class Jobs
11
+ module Library
12
12
  # Output a simple message to the output.
13
+ #
14
+ # Note: this does not use Payload#value.
13
15
  class Echo < Job
14
16
  attr_reader :message
15
17
 
@@ -21,8 +23,8 @@ module Burner
21
23
  freeze
22
24
  end
23
25
 
24
- def perform(output, _payload, params)
25
- compiled_message = eval_string_template(message, params)
26
+ def perform(output, payload)
27
+ compiled_message = job_string_template(message, output, payload)
26
28
 
27
29
  output.detail(compiled_message)
28
30