burner 1.0.0.pre.alpha.6 → 1.0.0.pre.alpha.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +2 -0
  3. data/README.md +43 -39
  4. data/burner.gemspec +1 -1
  5. data/lib/burner/job.rb +15 -10
  6. data/lib/burner/job_with_register.rb +24 -0
  7. data/lib/burner/jobs.rb +27 -20
  8. data/lib/burner/library.rb +11 -5
  9. data/lib/burner/library/collection/arrays_to_objects.rb +14 -11
  10. data/lib/burner/library/collection/graph.rb +7 -9
  11. data/lib/burner/library/collection/objects_to_arrays.rb +34 -34
  12. data/lib/burner/library/collection/shift.rb +6 -8
  13. data/lib/burner/library/collection/transform.rb +7 -9
  14. data/lib/burner/library/collection/unpivot.rb +17 -11
  15. data/lib/burner/library/collection/validate.rb +90 -0
  16. data/lib/burner/library/collection/values.rb +9 -11
  17. data/lib/burner/library/deserialize/csv.rb +4 -6
  18. data/lib/burner/library/deserialize/json.rb +4 -6
  19. data/lib/burner/library/deserialize/yaml.rb +7 -7
  20. data/lib/burner/library/echo.rb +1 -3
  21. data/lib/burner/library/io/base.rb +3 -3
  22. data/lib/burner/library/io/exist.rb +9 -9
  23. data/lib/burner/library/io/read.rb +5 -7
  24. data/lib/burner/library/io/write.rb +5 -7
  25. data/lib/burner/library/{dummy.rb → nothing.rb} +3 -5
  26. data/lib/burner/library/serialize/csv.rb +5 -7
  27. data/lib/burner/library/serialize/json.rb +4 -6
  28. data/lib/burner/library/serialize/yaml.rb +4 -6
  29. data/lib/burner/library/set_value.rb +6 -8
  30. data/lib/burner/library/sleep.rb +1 -3
  31. data/lib/burner/modeling.rb +1 -0
  32. data/lib/burner/modeling/attribute.rb +3 -1
  33. data/lib/burner/modeling/validations.rb +23 -0
  34. data/lib/burner/modeling/validations/base.rb +35 -0
  35. data/lib/burner/modeling/validations/blank.rb +31 -0
  36. data/lib/burner/modeling/validations/present.rb +31 -0
  37. data/lib/burner/payload.rb +50 -10
  38. data/lib/burner/pipeline.rb +3 -3
  39. data/lib/burner/step.rb +1 -5
  40. data/lib/burner/util.rb +1 -0
  41. data/lib/burner/util/string_template.rb +42 -0
  42. data/lib/burner/version.rb +1 -1
  43. metadata +13 -6
  44. data/lib/burner/string_template.rb +0 -40
@@ -15,40 +15,42 @@ module Burner
15
15
  # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
16
16
  # the key-to-index mappings to use.
17
17
  #
18
- # Expected Payload#value input: array of hashes.
19
- # Payload#value output: An array of arrays.
18
+ # Expected Payload[register] input: array of hashes.
19
+ # Payload[register] output: An array of arrays.
20
20
  #
21
21
  # An example using a configuration-first pipeline:
22
22
  #
23
- # config = {
24
- # jobs: [
25
- # {
26
- # name: 'set',
27
- # type: 'set_value',
28
- # value: [
29
- # [1, 'funky']
30
- # ]
31
- # },
32
- # {
33
- # name: 'map',
34
- # type: 'collection/objects_to_arrays',
35
- # mappings: [
36
- # { index: 0, key: 'id' },
37
- # { index: 1, key: 'name' }
38
- # ]
39
- # },
40
- # {
41
- # name: 'output',
42
- # type: 'echo',
43
- # message: 'value is currently: {__value}'
44
- # },
23
+ # config = {
24
+ # jobs: [
25
+ # {
26
+ # name: 'set',
27
+ # type: 'b/set_value',
28
+ # value: [
29
+ # { 'id' => 1, 'name' => 'funky' }
30
+ # ],
31
+ # register: register
32
+ # },
33
+ # {
34
+ # name: 'map',
35
+ # type: 'b/collection/objects_to_arrays',
36
+ # mappings: [
37
+ # { index: 0, key: 'id' },
38
+ # { index: 1, key: 'name' }
39
+ # ],
40
+ # register: register
41
+ # },
42
+ # {
43
+ # name: 'output',
44
+ # type: 'b/echo',
45
+ # message: 'value is currently: {__value}'
46
+ # },
45
47
  #
46
- # ],
47
- # steps: %w[set map output]
48
- # }
48
+ # ],
49
+ # steps: %w[set map output]
50
+ # }
49
51
  #
50
- # Burner::Pipeline.make(config).execute
51
- class ObjectsToArrays < Job
52
+ # Burner::Pipeline.make(config).execute
53
+ class ObjectsToArrays < JobWithRegister
52
54
  attr_reader :mappings
53
55
 
54
56
  # If you wish to support nested objects you can pass in a string to use as a
@@ -56,8 +58,8 @@ module Burner
56
58
  # nested hashes then set separator to '.'. For more information, see the underlying
57
59
  # library that supports this dot-notation concept:
58
60
  # https://github.com/bluemarblepayroll/objectable
59
- def initialize(name:, mappings: [], separator: '')
60
- super(name: name)
61
+ def initialize(name:, mappings: [], register: '', separator: '')
62
+ super(name: name, register: register)
61
63
 
62
64
  @mappings = Modeling::KeyIndexMapping.array(mappings)
63
65
  @resolver = Objectable.resolver(separator: separator.to_s)
@@ -66,9 +68,7 @@ module Burner
66
68
  end
67
69
 
68
70
  def perform(_output, payload)
69
- payload.value = array(payload.value).map { |object| key_to_index_map(object) }
70
-
71
- nil
71
+ payload[register] = array(payload[register]).map { |object| key_to_index_map(object) }
72
72
  end
73
73
 
74
74
  private
@@ -14,17 +14,17 @@ module Burner
14
14
  # attribute. The initial use case for this was to remove "header" rows from arrays,
15
15
  # like you would expect when parsing CSV files.
16
16
  #
17
- # Expected Payload#value input: nothing.
18
- # Payload#value output: An array with N beginning elements removed.
19
- class Shift < Job
17
+ # Expected Payload[register] input: nothing.
18
+ # Payload[register] output: An array with N beginning elements removed.
19
+ class Shift < JobWithRegister
20
20
  DEFAULT_AMOUNT = 0
21
21
 
22
22
  private_constant :DEFAULT_AMOUNT
23
23
 
24
24
  attr_reader :amount
25
25
 
26
- def initialize(name:, amount: DEFAULT_AMOUNT)
27
- super(name: name)
26
+ def initialize(name:, amount: DEFAULT_AMOUNT, register: '')
27
+ super(name: name, register: register)
28
28
 
29
29
  @amount = amount.to_i
30
30
 
@@ -34,9 +34,7 @@ module Burner
34
34
  def perform(output, payload)
35
35
  output.detail("Shifting #{amount} entries.")
36
36
 
37
- payload.value = array(payload.value).slice(amount..-1)
38
-
39
- nil
37
+ payload[register] = array(payload[register]).slice(amount..-1)
40
38
  end
41
39
  end
42
40
  end
@@ -18,17 +18,17 @@ module Burner
18
18
  # For more information on the specific contract for attributes, see the
19
19
  # Burner::Modeling::Attribute class.
20
20
  #
21
- # Expected Payload#value input: array of objects.
22
- # Payload#value output: An array of objects.
23
- class Transform < Job
21
+ # Expected Payload[register] input: array of objects.
22
+ # Payload[register] output: An array of objects.
23
+ class Transform < JobWithRegister
24
24
  BLANK = ''
25
25
 
26
26
  attr_reader :attribute_renderers,
27
27
  :exclusive,
28
28
  :resolver
29
29
 
30
- def initialize(name:, attributes: [], exclusive: false, separator: BLANK)
31
- super(name: name)
30
+ def initialize(name:, attributes: [], exclusive: false, register: '', separator: BLANK)
31
+ super(name: name, register: register)
32
32
 
33
33
  @resolver = Objectable.resolver(separator: separator)
34
34
  @exclusive = exclusive || false
@@ -41,14 +41,12 @@ module Burner
41
41
  end
42
42
 
43
43
  def perform(output, payload)
44
- payload.value = array(payload.value).map { |row| transform(row, payload.time) }
44
+ payload[register] = array(payload[register]).map { |row| transform(row, payload.time) }
45
45
 
46
46
  attr_count = attribute_renderers.length
47
- row_count = payload.value.length
47
+ row_count = payload[register].length
48
48
 
49
49
  output.detail("Transformed #{attr_count} attributes(s) for #{row_count} row(s)")
50
-
51
- nil
52
50
  end
53
51
 
54
52
  private
@@ -14,13 +14,13 @@ module Burner
14
14
  # Under the hood it uses HashMath's Unpivot class:
15
15
  # https://github.com/bluemarblepayroll/hash_math
16
16
  #
17
- # Expected Payload#value input: array of objects.
18
- # Payload#value output: An array of objects.
19
- class Unpivot < Job
17
+ # Expected Payload[register] input: array of objects.
18
+ # Payload[register] output: An array of objects.
19
+ class Unpivot < JobWithRegister
20
20
  attr_reader :unpivot
21
21
 
22
- def initialize(name:, pivot_set: HashMath::Unpivot::PivotSet.new)
23
- super(name: name)
22
+ def initialize(name:, pivot_set: HashMath::Unpivot::PivotSet.new, register: '')
23
+ super(name: name, register: register)
24
24
 
25
25
  @unpivot = HashMath::Unpivot.new(pivot_set)
26
26
 
@@ -28,18 +28,24 @@ module Burner
28
28
  end
29
29
 
30
30
  def perform(output, payload)
31
- pivot_count = unpivot.pivot_set.pivots.length
32
- key_count = unpivot.pivot_set.pivots.map { |p| p.keys.length }.sum
33
- payload.value = array(payload.value)
34
- object_count = payload.value.length || 0
31
+ payload[register] = array(payload[register])
32
+ object_count = payload[register].length || 0
35
33
 
36
34
  message = "#{pivot_count} Pivots, Key(s): #{key_count} key(s), #{object_count} objects(s)"
37
35
 
38
36
  output.detail(message)
39
37
 
40
- payload.value = payload.value.flat_map { |object| unpivot.expand(object) }
38
+ payload[register] = payload[register].flat_map { |object| unpivot.expand(object) }
39
+ end
40
+
41
+ private
42
+
43
+ def pivot_count
44
+ unpivot.pivot_set.pivots.length
45
+ end
41
46
 
42
- nil
47
+ def key_count
48
+ unpivot.pivot_set.pivots.map { |p| p.keys.length }.sum
43
49
  end
44
50
  end
45
51
  end
@@ -0,0 +1,90 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Process each object in an array and see if its attribute values match a given set
14
+ # of validations. The main register will include the valid objects and the invalid_register
15
+ # will contain the invalid objects.
16
+ #
17
+ # Expected Payload[register] input: array of objects.
18
+ # Payload[register] output: An array of objects that are valid.
19
+ # Payload[invalid_register] output: An array of objects that are invalid.
20
+ class Validate < JobWithRegister
21
+ DEFAULT_INVALID_REGISTER = 'invalid'
22
+ DEFAULT_JOIN_CHAR = ', '
23
+ DEFAULT_MESSAGE_KEY = 'errors'
24
+
25
+ attr_reader :invalid_register,
26
+ :join_char,
27
+ :message_key,
28
+ :resolver,
29
+ :validations
30
+
31
+ def initialize(
32
+ name:,
33
+ invalid_register: DEFAULT_INVALID_REGISTER,
34
+ join_char: DEFAULT_JOIN_CHAR,
35
+ message_key: DEFAULT_MESSAGE_KEY,
36
+ register: '',
37
+ separator: '',
38
+ validations: []
39
+ )
40
+ super(name: name, register: register)
41
+
42
+ @invalid_register = invalid_register.to_s
43
+ @join_char = join_char.to_s
44
+ @message_key = message_key.to_s
45
+ @resolver = Objectable.resolver(separator: separator)
46
+ @validations = Modeling::Validations.array(validations)
47
+
48
+ freeze
49
+ end
50
+
51
+ def perform(output, payload)
52
+ valid = []
53
+ invalid = []
54
+
55
+ (payload[register] || []).each do |object|
56
+ errors = validate(object)
57
+
58
+ if errors.empty?
59
+ valid << object
60
+ else
61
+ invalid << make_in_error(object, errors)
62
+ end
63
+ end
64
+
65
+ output.detail("Valid count: #{valid.length}")
66
+ output.detail("Invalid count: #{invalid.length}")
67
+
68
+ payload[register] = valid
69
+ payload[invalid_register] = invalid
70
+
71
+ nil
72
+ end
73
+
74
+ private
75
+
76
+ def validate(object)
77
+ validations.each_with_object([]) do |validation, memo|
78
+ next if validation.valid?(object, resolver)
79
+
80
+ memo << validation.message
81
+ end
82
+ end
83
+
84
+ def make_in_error(object, errors)
85
+ resolver.set(object, message_key, errors.join(join_char))
86
+ end
87
+ end
88
+ end
89
+ end
90
+ end
@@ -14,13 +14,13 @@ module Burner
14
14
  # If include_keys is true (it is false by default), then call #keys on the first
15
15
  # object and inject that as a "header" object.
16
16
  #
17
- # Expected Payload#value input: array of objects.
18
- # Payload#value output: An array of arrays.
19
- class Values < Job
17
+ # Expected Payload[register] input: array of objects.
18
+ # Payload[register] output: An array of arrays.
19
+ class Values < JobWithRegister
20
20
  attr_reader :include_keys
21
21
 
22
- def initialize(name:, include_keys: false)
23
- super(name: name)
22
+ def initialize(name:, include_keys: false, register: '')
23
+ super(name: name, register: register)
24
24
 
25
25
  @include_keys = include_keys || false
26
26
 
@@ -28,12 +28,10 @@ module Burner
28
28
  end
29
29
 
30
30
  def perform(_output, payload)
31
- payload.value = array(payload.value)
32
- keys = include_keys ? [keys(payload.value.first)] : []
33
- values = payload.value.map { |object| values(object) }
34
- payload.value = keys + values
35
-
36
- nil
31
+ payload[register] = array(payload[register])
32
+ keys = include_keys ? [keys(payload[register].first)] : []
33
+ values = payload[register].map { |object| values(object) }
34
+ payload[register] = keys + values
37
35
  end
38
36
 
39
37
  private
@@ -12,16 +12,14 @@ module Burner
12
12
  module Deserialize
13
13
  # Take a CSV string and de-serialize into object(s).
14
14
  #
15
- # Expected Payload#value input: nothing.
16
- # Payload#value output: an array of arrays. Each inner array represents one data row.
17
- class Csv < Job
15
+ # Expected Payload[register] input: nothing.
16
+ # Payload[register] output: an array of arrays. Each inner array represents one data row.
17
+ class Csv < JobWithRegister
18
18
  # This currently only supports returning an array of arrays, including the header row.
19
19
  # In the future this could be extended to offer more customizable options, such as
20
20
  # making it return an array of hashes with the columns mapped, etc.)
21
21
  def perform(_output, payload)
22
- payload.value = CSV.new(payload.value, headers: false).to_a
23
-
24
- nil
22
+ payload[register] = CSV.new(payload[register], headers: false).to_a
25
23
  end
26
24
  end
27
25
  end
@@ -12,13 +12,11 @@ module Burner
12
12
  module Deserialize
13
13
  # Take a JSON string and deserialize into object(s).
14
14
  #
15
- # Expected Payload#value input: string of JSON data.
16
- # Payload#value output: anything, as specified by the JSON de-serializer.
17
- class Json < Job
15
+ # Expected Payload[register] input: string of JSON data.
16
+ # Payload[register] output: anything, as specified by the JSON de-serializer.
17
+ class Json < JobWithRegister
18
18
  def perform(_output, payload)
19
- payload.value = JSON.parse(payload.value)
20
-
21
- nil
19
+ payload[register] = JSON.parse(payload[register])
22
20
  end
23
21
  end
24
22
  end
@@ -15,13 +15,13 @@ module Burner
15
15
  # YAML. If you wish to ease this restriction, for example if you have custom serialization
16
16
  # for custom classes, then you can pass in safe: false.
17
17
  #
18
- # Expected Payload#value input: string of YAML data.
19
- # Payload#value output: anything as specified by the YAML de-serializer.
20
- class Yaml < Job
18
+ # Expected Payload[register] input: string of YAML data.
19
+ # Payload[register]output: anything as specified by the YAML de-serializer.
20
+ class Yaml < JobWithRegister
21
21
  attr_reader :safe
22
22
 
23
- def initialize(name:, safe: true)
24
- super(name: name)
23
+ def initialize(name:, register: '', safe: true)
24
+ super(name: name, register: register)
25
25
 
26
26
  @safe = safe
27
27
 
@@ -36,9 +36,9 @@ module Burner
36
36
  def perform(output, payload)
37
37
  output.detail('Warning: loading YAML not using safe_load.') unless safe
38
38
 
39
- payload.value = safe ? YAML.safe_load(payload.value) : YAML.load(payload.value)
39
+ value = payload[register]
40
40
 
41
- nil
41
+ payload[register] = safe ? YAML.safe_load(value) : YAML.load(value)
42
42
  end
43
43
  # rubocop:enable Security/YAMLLoad
44
44
  end
@@ -11,7 +11,7 @@ module Burner
11
11
  module Library
12
12
  # Output a simple message to the output.
13
13
  #
14
- # Note: this does not use Payload#value.
14
+ # Note: this does not use Payload#registers.
15
15
  class Echo < Job
16
16
  attr_reader :message
17
17
 
@@ -27,8 +27,6 @@ module Burner
27
27
  compiled_message = job_string_template(message, output, payload)
28
28
 
29
29
  output.detail(compiled_message)
30
-
31
- nil
32
30
  end
33
31
  end
34
32
  end
@@ -11,11 +11,11 @@ module Burner
11
11
  module Library
12
12
  module IO
13
13
  # Common configuration/code for all IO Job subclasses.
14
- class Base < Job
14
+ class Base < JobWithRegister
15
15
  attr_reader :path
16
16
 
17
- def initialize(name:, path:)
18
- super(name: name)
17
+ def initialize(name:, path:, register: '')
18
+ super(name: name, register: register)
19
19
 
20
20
  raise ArgumentError, 'path is required' if path.to_s.empty?
21
21