burner 1.0.0.pre.alpha.3 → 1.0.0.pre.alpha.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +2 -0
  3. data/README.md +76 -44
  4. data/burner.gemspec +4 -1
  5. data/exe/burner +2 -3
  6. data/lib/burner.rb +11 -0
  7. data/lib/burner/cli.rb +10 -10
  8. data/lib/burner/job.rb +29 -9
  9. data/lib/burner/job_with_register.rb +24 -0
  10. data/lib/burner/jobs.rb +27 -23
  11. data/lib/burner/library.rb +32 -0
  12. data/lib/burner/library/collection/arrays_to_objects.rb +75 -0
  13. data/lib/burner/library/collection/graph.rb +42 -0
  14. data/lib/burner/library/collection/objects_to_arrays.rb +88 -0
  15. data/lib/burner/library/collection/shift.rb +42 -0
  16. data/lib/burner/library/collection/transform.rb +66 -0
  17. data/lib/burner/library/collection/unpivot.rb +53 -0
  18. data/lib/burner/library/collection/validate.rb +89 -0
  19. data/lib/burner/library/collection/values.rb +49 -0
  20. data/lib/burner/library/deserialize/csv.rb +27 -0
  21. data/lib/burner/{jobs → library}/deserialize/json.rb +7 -6
  22. data/lib/burner/{jobs → library}/deserialize/yaml.rb +14 -8
  23. data/lib/burner/{jobs → library}/dummy.rb +4 -4
  24. data/lib/burner/{jobs → library}/echo.rb +5 -5
  25. data/lib/burner/{jobs → library}/io/base.rb +4 -10
  26. data/lib/burner/{jobs → library}/io/exist.rb +13 -11
  27. data/lib/burner/{jobs → library}/io/read.rb +9 -8
  28. data/lib/burner/{jobs → library}/io/write.rb +11 -8
  29. data/lib/burner/library/serialize/csv.rb +37 -0
  30. data/lib/burner/{jobs → library}/serialize/json.rb +7 -6
  31. data/lib/burner/{jobs → library}/serialize/yaml.rb +7 -6
  32. data/lib/burner/{jobs/set.rb → library/set_value.rb} +9 -8
  33. data/lib/burner/{jobs → library}/sleep.rb +4 -4
  34. data/lib/burner/modeling.rb +13 -0
  35. data/lib/burner/modeling/attribute.rb +29 -0
  36. data/lib/burner/modeling/attribute_renderer.rb +32 -0
  37. data/lib/burner/modeling/key_index_mapping.rb +29 -0
  38. data/lib/burner/modeling/validations.rb +23 -0
  39. data/lib/burner/modeling/validations/base.rb +35 -0
  40. data/lib/burner/modeling/validations/blank.rb +31 -0
  41. data/lib/burner/modeling/validations/present.rb +31 -0
  42. data/lib/burner/payload.rb +55 -10
  43. data/lib/burner/pipeline.rb +25 -6
  44. data/lib/burner/side_effects.rb +10 -0
  45. data/lib/burner/side_effects/written_file.rb +28 -0
  46. data/lib/burner/step.rb +2 -8
  47. data/lib/burner/util.rb +11 -0
  48. data/lib/burner/util/arrayable.rb +30 -0
  49. data/lib/burner/util/string_template.rb +42 -0
  50. data/lib/burner/version.rb +1 -1
  51. metadata +84 -18
  52. data/lib/burner/string_template.rb +0 -40
  53. data/lib/burner/written_file.rb +0 -28
@@ -7,18 +7,7 @@
7
7
  # LICENSE file in the root directory of this source tree.
8
8
  #
9
9
 
10
- require_relative 'job'
11
- require_relative 'jobs/deserialize/json'
12
- require_relative 'jobs/deserialize/yaml'
13
- require_relative 'jobs/dummy'
14
- require_relative 'jobs/echo'
15
- require_relative 'jobs/io/exist'
16
- require_relative 'jobs/io/read'
17
- require_relative 'jobs/io/write'
18
- require_relative 'jobs/serialize/json'
19
- require_relative 'jobs/serialize/yaml'
20
- require_relative 'jobs/set'
21
- require_relative 'jobs/sleep'
10
+ require_relative 'library'
22
11
 
23
12
  module Burner
24
13
  # Main library of jobs. This file contains all the basic/default jobs. All other consumer
@@ -27,16 +16,31 @@ module Burner
27
16
  class Jobs
28
17
  acts_as_hashable_factory
29
18
 
30
- register 'deserialize/json', Deserialize::Json
31
- register 'deserialize/yaml', Deserialize::Yaml
32
- register 'dummy', '', Dummy
33
- register 'echo', Echo
34
- register 'io/exist', IO::Exist
35
- register 'io/read', IO::Read
36
- register 'io/write', IO::Write
37
- register 'serialize/json', Serialize::Json
38
- register 'serialize/yaml', Serialize::Yaml
39
- register 'set', Set
40
- register 'sleep', Sleep
19
+ # Dummy is the default as noted by the ''. This means if a type is omitted, nil, or blank
20
+ # string then the dummy job will be used.
21
+ register 'b/dummy', '', Library::Dummy
22
+ register 'b/echo', Library::Echo
23
+ register 'b/set_value', Library::SetValue
24
+ register 'b/sleep', Library::Sleep
25
+
26
+ register 'b/collection/arrays_to_objects', Library::Collection::ArraysToObjects
27
+ register 'b/collection/graph', Library::Collection::Graph
28
+ register 'b/collection/objects_to_arrays', Library::Collection::ObjectsToArrays
29
+ register 'b/collection/shift', Library::Collection::Shift
30
+ register 'b/collection/transform', Library::Collection::Transform
31
+ register 'b/collection/unpivot', Library::Collection::Unpivot
32
+ register 'b/collection/values', Library::Collection::Values
33
+
34
+ register 'b/deserialize/csv', Library::Deserialize::Csv
35
+ register 'b/deserialize/json', Library::Deserialize::Json
36
+ register 'b/deserialize/yaml', Library::Deserialize::Yaml
37
+
38
+ register 'b/io/exist', Library::IO::Exist
39
+ register 'b/io/read', Library::IO::Read
40
+ register 'b/io/write', Library::IO::Write
41
+
42
+ register 'b/serialize/csv', Library::Serialize::Csv
43
+ register 'b/serialize/json', Library::Serialize::Json
44
+ register 'b/serialize/yaml', Library::Serialize::Yaml
41
45
  end
42
46
  end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'job_with_register'
11
+
12
+ require_relative 'library/collection/arrays_to_objects'
13
+ require_relative 'library/collection/graph'
14
+ require_relative 'library/collection/objects_to_arrays'
15
+ require_relative 'library/collection/shift'
16
+ require_relative 'library/collection/transform'
17
+ require_relative 'library/collection/unpivot'
18
+ require_relative 'library/collection/validate'
19
+ require_relative 'library/collection/values'
20
+ require_relative 'library/deserialize/csv'
21
+ require_relative 'library/deserialize/json'
22
+ require_relative 'library/deserialize/yaml'
23
+ require_relative 'library/dummy'
24
+ require_relative 'library/echo'
25
+ require_relative 'library/io/exist'
26
+ require_relative 'library/io/read'
27
+ require_relative 'library/io/write'
28
+ require_relative 'library/serialize/csv'
29
+ require_relative 'library/serialize/json'
30
+ require_relative 'library/serialize/yaml'
31
+ require_relative 'library/set_value'
32
+ require_relative 'library/sleep'
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Convert an array of arrays to an array of objects. Pass in an array of
14
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
15
+ # the index-to-key mappings to use.
16
+ #
17
+ # Expected Payload#value input: array of arrays.
18
+ # Payload#value output: An array of hashes.
19
+ #
20
+ # An example using a configuration-first pipeline:
21
+ #
22
+ # config = {
23
+ # jobs: [
24
+ # {
25
+ # name: 'set',
26
+ # type: 'b/set_value',
27
+ # value: [
28
+ # [1, 'funky']
29
+ # ]
30
+ # },
31
+ # {
32
+ # name: 'map',
33
+ # type: 'b/collection/arrays_to_objects',
34
+ # mappings: [
35
+ # { index: 0, key: 'id' },
36
+ # { index: 1, key: 'name' }
37
+ # ]
38
+ # },
39
+ # {
40
+ # name: 'output',
41
+ # type: 'b/echo',
42
+ # message: 'value is currently: {__value}'
43
+ # },
44
+ #
45
+ # ],
46
+ # steps: %w[set map output]
47
+ # }
48
+ #
49
+ # Burner::Pipeline.make(config).execute
50
+ class ArraysToObjects < JobWithRegister
51
+ attr_reader :mappings
52
+
53
+ def initialize(name:, mappings: [], register: '')
54
+ super(name: name, register: register)
55
+
56
+ @mappings = Modeling::KeyIndexMapping.array(mappings)
57
+
58
+ freeze
59
+ end
60
+
61
+ def perform(_output, payload)
62
+ payload[register] = array(payload[register]).map { |array| index_to_key_map(array) }
63
+ end
64
+
65
+ private
66
+
67
+ def index_to_key_map(array)
68
+ mappings.each_with_object({}) do |mapping, memo|
69
+ memo[mapping.key] = array[mapping.index]
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array of (denormalized) objects and create an object hierarchy from them.
14
+ # Under the hood it uses Hashematics: https://github.com/bluemarblepayroll/hashematics.
15
+ #
16
+ # Expected Payload#value input: array of objects.
17
+ # Payload#value output: An array of objects.
18
+ class Graph < JobWithRegister
19
+ attr_reader :key, :groups
20
+
21
+ def initialize(name:, key:, config: Hashematics::Configuration.new, register: '')
22
+ super(name: name, register: register)
23
+
24
+ raise ArgumentError, 'key is required' if key.to_s.empty?
25
+
26
+ @groups = Hashematics::Configuration.new(config).groups
27
+ @key = key.to_s
28
+
29
+ freeze
30
+ end
31
+
32
+ def perform(output, payload)
33
+ graph = Hashematics::Graph.new(groups).add(array(payload[register]))
34
+
35
+ output.detail("Graphing: #{key}")
36
+
37
+ payload[register] = graph.data(key)
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Convert an array of objects to an array of arrays. You can leverage the separator
14
+ # option to support key paths and nested objects. Pass in an array of
15
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
16
+ # the key-to-index mappings to use.
17
+ #
18
+ # Expected Payload#value input: array of hashes.
19
+ # Payload#value output: An array of arrays.
20
+ #
21
+ # An example using a configuration-first pipeline:
22
+ #
23
+ # config = {
24
+ # jobs: [
25
+ # {
26
+ # name: 'set',
27
+ # type: 'b/set_value',
28
+ # value: [
29
+ # [1, 'funky']
30
+ # ]
31
+ # },
32
+ # {
33
+ # name: 'map',
34
+ # type: 'b/collection/objects_to_arrays',
35
+ # mappings: [
36
+ # { index: 0, key: 'id' },
37
+ # { index: 1, key: 'name' }
38
+ # ]
39
+ # },
40
+ # {
41
+ # name: 'output',
42
+ # type: 'b/echo',
43
+ # message: 'value is currently: {__value}'
44
+ # },
45
+ #
46
+ # ],
47
+ # steps: %w[set map output]
48
+ # }
49
+ #
50
+ # Burner::Pipeline.make(config).execute
51
+ class ObjectsToArrays < JobWithRegister
52
+ attr_reader :mappings
53
+
54
+ # If you wish to support nested objects you can pass in a string to use as a
55
+ # key path separator. For example: if you would like to recognize dot-notation for
56
+ # nested hashes then set separator to '.'. For more information, see the underlying
57
+ # library that supports this dot-notation concept:
58
+ # https://github.com/bluemarblepayroll/objectable
59
+ def initialize(name:, mappings: [], register: '', separator: '')
60
+ super(name: name, register: register)
61
+
62
+ @mappings = Modeling::KeyIndexMapping.array(mappings)
63
+ @resolver = Objectable.resolver(separator: separator.to_s)
64
+
65
+ freeze
66
+ end
67
+
68
+ def perform(_output, payload)
69
+ payload[register] = array(payload[register]).map { |object| key_to_index_map(object) }
70
+ end
71
+
72
+ private
73
+
74
+ attr_reader :resolver
75
+
76
+ def key_to_index_map(object)
77
+ mappings.each_with_object(prototype_array) do |mapping, memo|
78
+ memo[mapping.index] = resolver.get(object, mapping.key)
79
+ end
80
+ end
81
+
82
+ def prototype_array
83
+ Array.new(mappings.length)
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array and remove the first N elements, where N is specified by the amount
14
+ # attribute. The initial use case for this was to remove "header" rows from arrays,
15
+ # like you would expect when parsing CSV files.
16
+ #
17
+ # Expected Payload#value input: nothing.
18
+ # Payload#value output: An array with N beginning elements removed.
19
+ class Shift < JobWithRegister
20
+ DEFAULT_AMOUNT = 0
21
+
22
+ private_constant :DEFAULT_AMOUNT
23
+
24
+ attr_reader :amount
25
+
26
+ def initialize(name:, amount: DEFAULT_AMOUNT, register: '')
27
+ super(name: name, register: register)
28
+
29
+ @amount = amount.to_i
30
+
31
+ freeze
32
+ end
33
+
34
+ def perform(output, payload)
35
+ output.detail("Shifting #{amount} entries.")
36
+
37
+ payload[register] = array(payload[register]).slice(amount..-1)
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Iterate over all objects and return a new set of transformed objects. The object is
14
+ # transformed per the "transformers" attribute for its attributes. An attribute defines
15
+ # the ultimate key to place the value in and then the transformer pipeline to use to
16
+ # derive the value. Under the hood this uses the Realize library:
17
+ # https://github.com/bluemarblepayroll/realize
18
+ # For more information on the specific contract for attributes, see the
19
+ # Burner::Modeling::Attribute class.
20
+ #
21
+ # Expected Payload#value input: array of objects.
22
+ # Payload#value output: An array of objects.
23
+ class Transform < JobWithRegister
24
+ BLANK = ''
25
+
26
+ attr_reader :attribute_renderers,
27
+ :exclusive,
28
+ :resolver
29
+
30
+ def initialize(name:, attributes: [], exclusive: false, register: '', separator: BLANK)
31
+ super(name: name, register: register)
32
+
33
+ @resolver = Objectable.resolver(separator: separator)
34
+ @exclusive = exclusive || false
35
+
36
+ @attribute_renderers =
37
+ Modeling::Attribute.array(attributes)
38
+ .map { |a| Modeling::AttributeRenderer.new(a, resolver) }
39
+
40
+ freeze
41
+ end
42
+
43
+ def perform(output, payload)
44
+ payload[register] = array(payload[register]).map { |row| transform(row, payload.time) }
45
+
46
+ attr_count = attribute_renderers.length
47
+ row_count = payload[register].length
48
+
49
+ output.detail("Transformed #{attr_count} attributes(s) for #{row_count} row(s)")
50
+ end
51
+
52
+ private
53
+
54
+ def transform(row, time)
55
+ outgoing_row = exclusive ? {} : row
56
+
57
+ attribute_renderers.each_with_object(outgoing_row) do |attribute_renderer, memo|
58
+ value = attribute_renderer.transform(row, time)
59
+
60
+ resolver.set(memo, attribute_renderer.key, value)
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,53 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Take an array of objects and un-pivot groups of keys into rows.
14
+ # Under the hood it uses HashMath's Unpivot class:
15
+ # https://github.com/bluemarblepayroll/hash_math
16
+ #
17
+ # Expected Payload#value input: array of objects.
18
+ # Payload#value output: An array of objects.
19
+ class Unpivot < JobWithRegister
20
+ attr_reader :unpivot
21
+
22
+ def initialize(name:, pivot_set: HashMath::Unpivot::PivotSet.new, register: '')
23
+ super(name: name, register: register)
24
+
25
+ @unpivot = HashMath::Unpivot.new(pivot_set)
26
+
27
+ freeze
28
+ end
29
+
30
+ def perform(output, payload)
31
+ payload[register] = array(payload[register])
32
+ object_count = payload[register].length || 0
33
+
34
+ message = "#{pivot_count} Pivots, Key(s): #{key_count} key(s), #{object_count} objects(s)"
35
+
36
+ output.detail(message)
37
+
38
+ payload[register] = payload[register].flat_map { |object| unpivot.expand(object) }
39
+ end
40
+
41
+ private
42
+
43
+ def pivot_count
44
+ unpivot.pivot_set.pivots.length
45
+ end
46
+
47
+ def key_count
48
+ unpivot.pivot_set.pivots.map { |p| p.keys.length }.sum
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end