burner 1.5.0 → 1.8.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. data/.tool-versions +1 -0
  3. data/CHANGELOG.md +36 -0
  4. data/README.md +59 -3
  5. data/lib/burner/data.rb +46 -0
  6. data/lib/burner/job.rb +2 -10
  7. data/lib/burner/job_set.rb +64 -0
  8. data/lib/burner/job_with_register.rb +8 -1
  9. data/lib/burner/jobs.rb +6 -0
  10. data/lib/burner/library.rb +6 -0
  11. data/lib/burner/library/collection/arrays_to_objects.rb +1 -1
  12. data/lib/burner/library/collection/coalesce.rb +14 -9
  13. data/lib/burner/library/collection/concatenate.rb +1 -1
  14. data/lib/burner/library/collection/graph.rb +1 -1
  15. data/lib/burner/library/collection/group.rb +15 -11
  16. data/lib/burner/library/collection/nested_aggregate.rb +1 -1
  17. data/lib/burner/library/collection/number.rb +51 -0
  18. data/lib/burner/library/collection/objects_to_arrays.rb +1 -1
  19. data/lib/burner/library/collection/shift.rb +1 -1
  20. data/lib/burner/library/collection/transform.rb +1 -1
  21. data/lib/burner/library/collection/unpivot.rb +1 -1
  22. data/lib/burner/library/collection/validate.rb +1 -1
  23. data/lib/burner/library/collection/values.rb +1 -1
  24. data/lib/burner/library/collection/zip.rb +1 -1
  25. data/lib/burner/library/compress/row_reader.rb +1 -1
  26. data/lib/burner/library/deserialize/yaml.rb +1 -1
  27. data/lib/burner/library/echo.rb +1 -1
  28. data/lib/burner/library/io/exist.rb +1 -1
  29. data/lib/burner/library/io/open_file_base.rb +1 -1
  30. data/lib/burner/library/io/row_reader.rb +1 -1
  31. data/lib/burner/library/io/write.rb +28 -1
  32. data/lib/burner/library/param/base.rb +29 -0
  33. data/lib/burner/library/param/from_register.rb +30 -0
  34. data/lib/burner/library/param/to_register.rb +28 -0
  35. data/lib/burner/library/serialize/csv.rb +1 -1
  36. data/lib/burner/library/sleep.rb +1 -1
  37. data/lib/burner/library/value/copy.rb +1 -1
  38. data/lib/burner/library/value/nest.rb +37 -0
  39. data/lib/burner/library/value/static.rb +1 -1
  40. data/lib/burner/library/value/transform.rb +38 -0
  41. data/lib/burner/payload.rb +39 -15
  42. data/lib/burner/pipeline.rb +6 -34
  43. data/lib/burner/util.rb +1 -0
  44. data/lib/burner/util/keyable.rb +23 -0
  45. data/lib/burner/version.rb +1 -1
  46. metadata +15 -5
@@ -16,6 +16,12 @@ module Burner
16
16
  # It is worth noting that the resulting hashes values are singular objects and not an array
17
17
  # like Ruby's Enumerable#group_by method.
18
18
  #
19
+ # If the insensitive option is set as true then each key's value will be coerced as
20
+ # a lowercase string. This can help provide two types of insensitivity: case and type
21
+ # insensitivity. This may be appropriate in some places but not others. If any other
22
+ # value coercion is needed then another option would be to first transform the records
23
+ # before grouping them.
24
+ #
19
25
  # An example of this specific job:
20
26
  #
21
27
  # input: [{ id: 1, code: 'a' }, { id: 2, code: 'b' }]
@@ -25,18 +31,22 @@ module Burner
25
31
  # Expected Payload[register] input: array of objects.
26
32
  # Payload[register] output: hash.
27
33
  class Group < JobWithRegister
28
- attr_reader :keys, :resolver
34
+ include Util::Keyable
35
+
36
+ attr_reader :insensitive, :keys, :resolver
29
37
 
30
38
  def initialize(
31
- name:,
39
+ insensitive: false,
32
40
  keys: [],
41
+ name: '',
33
42
  register: DEFAULT_REGISTER,
34
43
  separator: ''
35
44
  )
36
45
  super(name: name, register: register)
37
46
 
38
- @keys = Array(keys)
39
- @resolver = Objectable.resolver(separator: separator.to_s)
47
+ @insensitive = insensitive || false
48
+ @keys = Array(keys)
49
+ @resolver = Objectable.resolver(separator: separator.to_s)
40
50
 
41
51
  raise ArgumentError, 'at least one key is required' if @keys.empty?
42
52
 
@@ -50,18 +60,12 @@ module Burner
50
60
  output.detail("Grouping based on key(s): #{keys} for #{count} records(s)")
51
61
 
52
62
  grouped_records = payload[register].each_with_object({}) do |record, memo|
53
- key = make_key(record)
63
+ key = make_key(record, keys, resolver, insensitive)
54
64
  memo[key] = record
55
65
  end
56
66
 
57
67
  payload[register] = grouped_records
58
68
  end
59
-
60
- private
61
-
62
- def make_key(record)
63
- keys.map { |key| resolver.get(record, key) }
64
- end
65
69
  end
66
70
  end
67
71
  end
@@ -21,7 +21,7 @@ module Burner
21
21
  class NestedAggregate < JobWithRegister
22
22
  attr_reader :key, :key_mappings, :resolver
23
23
 
24
- def initialize(name:, key:, key_mappings: [], register: DEFAULT_REGISTER, separator: '')
24
+ def initialize(key:, key_mappings: [], name: '', register: DEFAULT_REGISTER, separator: '')
25
25
  super(name: name, register: register)
26
26
 
27
27
  raise ArgumentError, 'key is required' if key.to_s.empty?
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # This job can iterate over a set of records and sequence them (set the specified key to
14
+ # a sequential index value.)
15
+ #
16
+ # Expected Payload[register] input: array of objects.
17
+ # Payload[register] output: array of objects.
18
+ class Number < JobWithRegister
19
+ BLANK = ''
20
+ DEFAULT_KEY = 'number'
21
+ DEFAULT_START_AT = 1
22
+
23
+ attr_reader :key, :resolver, :start_at
24
+
25
+ def initialize(
26
+ key: DEFAULT_KEY,
27
+ name: BLANK,
28
+ register: Burner::DEFAULT_REGISTER,
29
+ separator: BLANK,
30
+ start_at: DEFAULT_START_AT
31
+ )
32
+ super(name: name, register: register)
33
+
34
+ @key = key.to_s
35
+ @resolver = Objectable.resolver(separator: separator)
36
+ @start_at = start_at.to_i
37
+
38
+ freeze
39
+ end
40
+
41
+ def perform(output, payload)
42
+ output.detail("Setting '#{key}' for each record with values starting at #{start_at}")
43
+
44
+ ensure_array(payload).each.with_index(start_at) do |record, index|
45
+ resolver.set(record, key, index)
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -58,7 +58,7 @@ module Burner
58
58
  # nested hashes then set separator to '.'. For more information, see the underlying
59
59
  # library that supports this dot-notation concept:
60
60
  # https://github.com/bluemarblepayroll/objectable
61
- def initialize(name:, mappings: [], register: DEFAULT_REGISTER, separator: '')
61
+ def initialize(mappings: [], name: '', register: DEFAULT_REGISTER, separator: '')
62
62
  super(name: name, register: register)
63
63
 
64
64
  @mappings = Modeling::KeyIndexMapping.array(mappings)
@@ -23,7 +23,7 @@ module Burner
23
23
 
24
24
  attr_reader :amount
25
25
 
26
- def initialize(name:, amount: DEFAULT_AMOUNT, register: DEFAULT_REGISTER)
26
+ def initialize(amount: DEFAULT_AMOUNT, name: '', register: DEFAULT_REGISTER)
27
27
  super(name: name, register: register)
28
28
 
29
29
  @amount = amount.to_i
@@ -28,9 +28,9 @@ module Burner
28
28
  :resolver
29
29
 
30
30
  def initialize(
31
- name:,
32
31
  attributes: [],
33
32
  exclusive: false,
33
+ name: '',
34
34
  register: DEFAULT_REGISTER,
35
35
  separator: BLANK
36
36
  )
@@ -20,7 +20,7 @@ module Burner
20
20
  attr_reader :unpivot
21
21
 
22
22
  def initialize(
23
- name:,
23
+ name: '',
24
24
  pivot_set: HashMath::Unpivot::PivotSet.new,
25
25
  register: DEFAULT_REGISTER
26
26
  )
@@ -29,10 +29,10 @@ module Burner
29
29
  :validations
30
30
 
31
31
  def initialize(
32
- name:,
33
32
  invalid_register: DEFAULT_INVALID_REGISTER,
34
33
  join_char: DEFAULT_JOIN_CHAR,
35
34
  message_key: DEFAULT_MESSAGE_KEY,
35
+ name: '',
36
36
  register: DEFAULT_REGISTER,
37
37
  separator: '',
38
38
  validations: []
@@ -19,7 +19,7 @@ module Burner
19
19
  class Values < JobWithRegister
20
20
  attr_reader :include_keys
21
21
 
22
- def initialize(name:, include_keys: false, register: DEFAULT_REGISTER)
22
+ def initialize(include_keys: false, name: '', register: DEFAULT_REGISTER)
23
23
  super(name: name, register: register)
24
24
 
25
25
  @include_keys = include_keys || false
@@ -25,9 +25,9 @@ module Burner
25
25
  attr_reader :base_register, :with_register
26
26
 
27
27
  def initialize(
28
- name:,
29
28
  with_register:,
30
29
  base_register: DEFAULT_REGISTER,
30
+ name: '',
31
31
  register: DEFAULT_REGISTER
32
32
  )
33
33
  super(name: name, register: register)
@@ -33,10 +33,10 @@ module Burner
33
33
  :resolver
34
34
 
35
35
  def initialize(
36
- name:,
37
36
  data_key: DEFAULT_DATA_KEY,
38
37
  ignore_blank_data: false,
39
38
  ignore_blank_path: false,
39
+ name: '',
40
40
  path_key: DEFAULT_PATH_KEY,
41
41
  register: DEFAULT_REGISTER,
42
42
  separator: ''
@@ -20,7 +20,7 @@ module Burner
20
20
  class Yaml < JobWithRegister
21
21
  attr_reader :safe
22
22
 
23
- def initialize(name:, register: DEFAULT_REGISTER, safe: true)
23
+ def initialize(name: '', register: DEFAULT_REGISTER, safe: true)
24
24
  super(name: name, register: register)
25
25
 
26
26
  @safe = safe
@@ -15,7 +15,7 @@ module Burner
15
15
  class Echo < Job
16
16
  attr_reader :message
17
17
 
18
- def initialize(name:, message: '')
18
+ def initialize(message: '', name: '')
19
19
  super(name: name)
20
20
 
21
21
  @message = message.to_s
@@ -17,7 +17,7 @@ module Burner
17
17
  class Exist < Job
18
18
  attr_reader :disk, :path, :short_circuit
19
19
 
20
- def initialize(name:, path:, disk: {}, short_circuit: false)
20
+ def initialize(path:, disk: {}, name: '', short_circuit: false)
21
21
  super(name: name)
22
22
 
23
23
  raise ArgumentError, 'path is required' if path.to_s.empty?
@@ -14,7 +14,7 @@ module Burner
14
14
  class OpenFileBase < JobWithRegister
15
15
  attr_reader :binary, :disk, :path
16
16
 
17
- def initialize(name:, path:, binary: false, disk: {}, register: DEFAULT_REGISTER)
17
+ def initialize(path:, binary: false, disk: {}, name: '', register: DEFAULT_REGISTER)
18
18
  super(name: name, register: register)
19
19
 
20
20
  raise ArgumentError, 'path is required' if path.to_s.empty?
@@ -35,12 +35,12 @@ module Burner
35
35
  :resolver
36
36
 
37
37
  def initialize(
38
- name:,
39
38
  binary: false,
40
39
  data_key: DEFAULT_DATA_KEY,
41
40
  disk: {},
42
41
  ignore_blank_path: false,
43
42
  ignore_file_not_found: false,
43
+ name: '',
44
44
  path_key: DEFAULT_PATH_KEY,
45
45
  register: DEFAULT_REGISTER,
46
46
  separator: ''
@@ -12,11 +12,34 @@ require_relative 'open_file_base'
12
12
  module Burner
13
13
  module Library
14
14
  module IO
15
- # Write value to disk.
15
+ # Write value to disk. By default, written files are also logged as WrittenFile
16
+ # instances to the Payload#side_effects array. You can pass in
17
+ # supress_side_effect: true to disable this behavior.
16
18
  #
17
19
  # Expected Payload[register] input: anything.
18
20
  # Payload[register] output: whatever was passed in.
19
21
  class Write < OpenFileBase
22
+ attr_reader :supress_side_effect
23
+
24
+ def initialize(
25
+ path:,
26
+ binary: false,
27
+ disk: {},
28
+ name: '',
29
+ register: DEFAULT_REGISTER,
30
+ supress_side_effect: false
31
+ )
32
+ @supress_side_effect = supress_side_effect || false
33
+
34
+ super(
35
+ binary: binary,
36
+ disk: disk,
37
+ name: name,
38
+ path: path,
39
+ register: register
40
+ )
41
+ end
42
+
20
43
  def perform(output, payload)
21
44
  logical_filename = job_string_template(path, output, payload)
22
45
  physical_filename = nil
@@ -29,6 +52,10 @@ module Burner
29
52
 
30
53
  output.detail("Wrote to: #{physical_filename}")
31
54
 
55
+ return if supress_side_effect
56
+
57
+ output.detail("Saving to side effects: #{logical_filename}")
58
+
32
59
  side_effect = SideEffects::WrittenFile.new(
33
60
  logical_filename: logical_filename,
34
61
  physical_filename: physical_filename,
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Param
13
+ # Common logic shared across Param job subclasses.
14
+ class Base < JobWithRegister
15
+ BLANK = ''
16
+
17
+ attr_reader :param_key
18
+
19
+ def initialize(name: BLANK, param_key: BLANK, register: DEFAULT_REGISTER)
20
+ super(name: name, register: register)
21
+
22
+ @param_key = param_key.to_s
23
+
24
+ freeze
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'base'
11
+
12
+ module Burner
13
+ module Library
14
+ module Param
15
+ # Copy a register's value into a param key. Generally speaking you should only be
16
+ # mutating registers, that way the params stay true to the passed in params for the
17
+ # pipeline. But this job is available in case a param needs to be updated.
18
+ #
19
+ # Expected Payload[register] input: anything.
20
+ # Payload.params(param_key) output: whatever value was specified in the register.
21
+ class FromRegister < Base
22
+ def perform(output, payload)
23
+ output.detail("Pushing value from register: #{register} to param: #{param_key}")
24
+
25
+ payload.update_param(param_key, payload[register])
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'base'
11
+
12
+ module Burner
13
+ module Library
14
+ module Param
15
+ # Copy a param key's value into a register.
16
+ #
17
+ # Expected Payload.param(param_key) input: anything.
18
+ # Payload[register] output: whatever value was specified as the param_key's value.
19
+ class ToRegister < Base
20
+ def perform(output, payload)
21
+ output.detail("Pushing value to register: #{register} from param: #{param_key}")
22
+
23
+ payload[register] = payload.param(param_key)
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -18,7 +18,7 @@ module Burner
18
18
  class Csv < JobWithRegister
19
19
  attr_reader :byte_order_mark
20
20
 
21
- def initialize(name:, byte_order_mark: nil, register: DEFAULT_REGISTER)
21
+ def initialize(byte_order_mark: nil, name: '', register: DEFAULT_REGISTER)
22
22
  super(name: name, register: register)
23
23
 
24
24
  @byte_order_mark = Modeling::ByteOrderMark.resolve(byte_order_mark)
@@ -15,7 +15,7 @@ module Burner
15
15
  class Sleep < Job
16
16
  attr_reader :seconds
17
17
 
18
- def initialize(name:, seconds: 0)
18
+ def initialize(name: '', seconds: 0)
19
19
  super(name: name)
20
20
 
21
21
  @seconds = seconds.to_f
@@ -19,7 +19,7 @@ module Burner
19
19
  class Copy < Job
20
20
  attr_reader :from_register, :to_register
21
21
 
22
- def initialize(name:, to_register: DEFAULT_REGISTER, from_register: DEFAULT_REGISTER)
22
+ def initialize(from_register: DEFAULT_REGISTER, name: '', to_register: DEFAULT_REGISTER)
23
23
  super(name: name)
24
24
 
25
25
  @from_register = from_register.to_s
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Value
13
+ # This job will nest the current value within a new outer hash. The specified key
14
+ # passed in will be the corresponding new hash key entry for the existing value.
15
+ #
16
+ # Expected Payload[from_register] input: anything.
17
+ # Payload[to_register] output: hash.
18
+ class Nest < JobWithRegister
19
+ DEFAULT_KEY = 'key'
20
+
21
+ attr_reader :key
22
+
23
+ def initialize(key: DEFAULT_KEY, name: '', register: Burner::DEFAULT_REGISTER)
24
+ super(name: name, register: register)
25
+
26
+ @key = key.to_s
27
+
28
+ freeze
29
+ end
30
+
31
+ def perform(_output, payload)
32
+ payload[register] = { key => payload[register] }
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end