burner 1.6.0 → 1.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.tool-versions +1 -0
- data/CHANGELOG.md +36 -0
- data/README.md +60 -2
- data/lib/burner/data.rb +46 -0
- data/lib/burner/job.rb +2 -10
- data/lib/burner/job_set.rb +64 -0
- data/lib/burner/job_with_register.rb +8 -1
- data/lib/burner/jobs.rb +7 -0
- data/lib/burner/library.rb +7 -0
- data/lib/burner/library/collection/arrays_to_objects.rb +1 -1
- data/lib/burner/library/collection/coalesce.rb +14 -9
- data/lib/burner/library/collection/concatenate.rb +1 -1
- data/lib/burner/library/collection/graph.rb +1 -1
- data/lib/burner/library/collection/group.rb +15 -11
- data/lib/burner/library/collection/nested_aggregate.rb +1 -1
- data/lib/burner/library/collection/number.rb +51 -0
- data/lib/burner/library/collection/objects_to_arrays.rb +1 -1
- data/lib/burner/library/collection/pivot.rb +150 -0
- data/lib/burner/library/collection/shift.rb +1 -1
- data/lib/burner/library/collection/transform.rb +1 -1
- data/lib/burner/library/collection/unpivot.rb +1 -1
- data/lib/burner/library/collection/validate.rb +1 -1
- data/lib/burner/library/collection/values.rb +1 -1
- data/lib/burner/library/collection/zip.rb +1 -1
- data/lib/burner/library/compress/row_reader.rb +1 -1
- data/lib/burner/library/deserialize/yaml.rb +1 -1
- data/lib/burner/library/echo.rb +1 -1
- data/lib/burner/library/io/exist.rb +1 -1
- data/lib/burner/library/io/open_file_base.rb +1 -1
- data/lib/burner/library/io/row_reader.rb +1 -1
- data/lib/burner/library/io/write.rb +1 -1
- data/lib/burner/library/param/base.rb +29 -0
- data/lib/burner/library/param/from_register.rb +30 -0
- data/lib/burner/library/param/to_register.rb +28 -0
- data/lib/burner/library/serialize/csv.rb +1 -1
- data/lib/burner/library/sleep.rb +1 -1
- data/lib/burner/library/value/copy.rb +1 -1
- data/lib/burner/library/value/nest.rb +37 -0
- data/lib/burner/library/value/static.rb +1 -1
- data/lib/burner/library/value/transform.rb +38 -0
- data/lib/burner/payload.rb +39 -15
- data/lib/burner/pipeline.rb +6 -34
- data/lib/burner/util.rb +1 -0
- data/lib/burner/util/keyable.rb +23 -0
- data/lib/burner/version.rb +1 -1
- metadata +16 -5
@@ -16,6 +16,12 @@ module Burner
|
|
16
16
|
# It is worth noting that the resulting hashes values are singular objects and not an array
|
17
17
|
# like Ruby's Enumerable#group_by method.
|
18
18
|
#
|
19
|
+
# If the insensitive option is set as true then each key's value will be coerced as
|
20
|
+
# a lowercase string. This can help provide two types of insensitivity: case and type
|
21
|
+
# insensitivity. This may be appropriate in some places but not others. If any other
|
22
|
+
# value coercion is needed then another option would be to first transform the records
|
23
|
+
# before grouping them.
|
24
|
+
#
|
19
25
|
# An example of this specific job:
|
20
26
|
#
|
21
27
|
# input: [{ id: 1, code: 'a' }, { id: 2, code: 'b' }]
|
@@ -25,18 +31,22 @@ module Burner
|
|
25
31
|
# Expected Payload[register] input: array of objects.
|
26
32
|
# Payload[register] output: hash.
|
27
33
|
class Group < JobWithRegister
|
28
|
-
|
34
|
+
include Util::Keyable
|
35
|
+
|
36
|
+
attr_reader :insensitive, :keys, :resolver
|
29
37
|
|
30
38
|
def initialize(
|
31
|
-
|
39
|
+
insensitive: false,
|
32
40
|
keys: [],
|
41
|
+
name: '',
|
33
42
|
register: DEFAULT_REGISTER,
|
34
43
|
separator: ''
|
35
44
|
)
|
36
45
|
super(name: name, register: register)
|
37
46
|
|
38
|
-
@
|
39
|
-
@
|
47
|
+
@insensitive = insensitive || false
|
48
|
+
@keys = Array(keys)
|
49
|
+
@resolver = Objectable.resolver(separator: separator.to_s)
|
40
50
|
|
41
51
|
raise ArgumentError, 'at least one key is required' if @keys.empty?
|
42
52
|
|
@@ -50,18 +60,12 @@ module Burner
|
|
50
60
|
output.detail("Grouping based on key(s): #{keys} for #{count} records(s)")
|
51
61
|
|
52
62
|
grouped_records = payload[register].each_with_object({}) do |record, memo|
|
53
|
-
key = make_key(record)
|
63
|
+
key = make_key(record, keys, resolver, insensitive)
|
54
64
|
memo[key] = record
|
55
65
|
end
|
56
66
|
|
57
67
|
payload[register] = grouped_records
|
58
68
|
end
|
59
|
-
|
60
|
-
private
|
61
|
-
|
62
|
-
def make_key(record)
|
63
|
-
keys.map { |key| resolver.get(record, key) }
|
64
|
-
end
|
65
69
|
end
|
66
70
|
end
|
67
71
|
end
|
@@ -21,7 +21,7 @@ module Burner
|
|
21
21
|
class NestedAggregate < JobWithRegister
|
22
22
|
attr_reader :key, :key_mappings, :resolver
|
23
23
|
|
24
|
-
def initialize(
|
24
|
+
def initialize(key:, key_mappings: [], name: '', register: DEFAULT_REGISTER, separator: '')
|
25
25
|
super(name: name, register: register)
|
26
26
|
|
27
27
|
raise ArgumentError, 'key is required' if key.to_s.empty?
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Collection
|
13
|
+
# This job can iterate over a set of records and sequence them (set the specified key to
|
14
|
+
# a sequential index value.)
|
15
|
+
#
|
16
|
+
# Expected Payload[register] input: array of objects.
|
17
|
+
# Payload[register] output: array of objects.
|
18
|
+
class Number < JobWithRegister
|
19
|
+
BLANK = ''
|
20
|
+
DEFAULT_KEY = 'number'
|
21
|
+
DEFAULT_START_AT = 1
|
22
|
+
|
23
|
+
attr_reader :key, :resolver, :start_at
|
24
|
+
|
25
|
+
def initialize(
|
26
|
+
key: DEFAULT_KEY,
|
27
|
+
name: BLANK,
|
28
|
+
register: Burner::DEFAULT_REGISTER,
|
29
|
+
separator: BLANK,
|
30
|
+
start_at: DEFAULT_START_AT
|
31
|
+
)
|
32
|
+
super(name: name, register: register)
|
33
|
+
|
34
|
+
@key = key.to_s
|
35
|
+
@resolver = Objectable.resolver(separator: separator)
|
36
|
+
@start_at = start_at.to_i
|
37
|
+
|
38
|
+
freeze
|
39
|
+
end
|
40
|
+
|
41
|
+
def perform(output, payload)
|
42
|
+
output.detail("Setting '#{key}' for each record with values starting at #{start_at}")
|
43
|
+
|
44
|
+
ensure_array(payload).each.with_index(start_at) do |record, index|
|
45
|
+
resolver.set(record, key, index)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -58,7 +58,7 @@ module Burner
|
|
58
58
|
# nested hashes then set separator to '.'. For more information, see the underlying
|
59
59
|
# library that supports this dot-notation concept:
|
60
60
|
# https://github.com/bluemarblepayroll/objectable
|
61
|
-
def initialize(
|
61
|
+
def initialize(mappings: [], name: '', register: DEFAULT_REGISTER, separator: '')
|
62
62
|
super(name: name, register: register)
|
63
63
|
|
64
64
|
@mappings = Modeling::KeyIndexMapping.array(mappings)
|
@@ -0,0 +1,150 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Collection
|
13
|
+
# Take an array of objects and pivot a key into multiple keys. It essentially takes all
|
14
|
+
# the values for a key and creates N number of keys (one per value.)
|
15
|
+
# Under the hood it uses HashMath's Record and Table classes:
|
16
|
+
# https://github.com/bluemarblepayroll/hash_math
|
17
|
+
#
|
18
|
+
# An example of a normalized dataset that could be pivoted:
|
19
|
+
#
|
20
|
+
# records = [
|
21
|
+
# { patient_id: 1, key: :first_name, value: 'bozo' },
|
22
|
+
# { patient_id: 1, key: :last_name, value: 'clown' },
|
23
|
+
# { patient_id: 2, key: :first_name, value: 'frank' },
|
24
|
+
# { patient_id: 2, key: :last_name, value: 'rizzo' },
|
25
|
+
# ]
|
26
|
+
#
|
27
|
+
# Using the following job configuration:
|
28
|
+
#
|
29
|
+
# config = {
|
30
|
+
# unique_key: :patient_id
|
31
|
+
# }
|
32
|
+
#
|
33
|
+
# Once ran through this job, it would set the register to:
|
34
|
+
#
|
35
|
+
# records = [
|
36
|
+
# { patient_id: 1, first_name: 'bozo', last_name: 'clown' },
|
37
|
+
# { patient_id: 2, first_name: 'frank', last_name: 'rizzo' },
|
38
|
+
# ]
|
39
|
+
#
|
40
|
+
# Expected Payload[register] input: array of objects.
|
41
|
+
# Payload[register] output: An array of objects.
|
42
|
+
class Pivot < JobWithRegister
|
43
|
+
DEFAULT_PIVOT_KEY = :key
|
44
|
+
DEFAULT_PIVOT_VALUE_KEY = :value
|
45
|
+
|
46
|
+
attr_reader :insensitive,
|
47
|
+
:other_keys,
|
48
|
+
:non_pivoted_keys,
|
49
|
+
:pivot_key,
|
50
|
+
:pivot_value_key,
|
51
|
+
:resolver,
|
52
|
+
:unique_keys
|
53
|
+
|
54
|
+
def initialize(
|
55
|
+
unique_keys:,
|
56
|
+
insensitive: false,
|
57
|
+
name: '',
|
58
|
+
other_keys: [],
|
59
|
+
pivot_key: DEFAULT_PIVOT_KEY,
|
60
|
+
pivot_value_key: DEFAULT_PIVOT_KEY_VALUE,
|
61
|
+
register: DEFAULT_REGISTER,
|
62
|
+
separator: ''
|
63
|
+
)
|
64
|
+
super(name: name, register: register)
|
65
|
+
|
66
|
+
@insensitive = insensitive || false
|
67
|
+
@pivot_key = pivot_key.to_s
|
68
|
+
@pivot_value_key = pivot_value_key.to_s
|
69
|
+
@resolver = Objectable.resolver(separator: separator)
|
70
|
+
@unique_keys = Array(unique_keys)
|
71
|
+
@other_keys = Array(other_keys)
|
72
|
+
@non_pivoted_keys = @unique_keys + @other_keys
|
73
|
+
|
74
|
+
freeze
|
75
|
+
end
|
76
|
+
|
77
|
+
def perform(output, payload)
|
78
|
+
objects = array(payload[register])
|
79
|
+
table = make_table(objects)
|
80
|
+
|
81
|
+
output.detail("Pivoting #{objects.length} object(s)")
|
82
|
+
output.detail("By key: #{pivot_key} and value: #{pivot_value_key}")
|
83
|
+
|
84
|
+
objects.each { |object| object_to_table(object, table) }
|
85
|
+
|
86
|
+
pivoted_objects = table.to_a.map(&:fields)
|
87
|
+
|
88
|
+
output.detail("Resulting dataset has #{pivoted_objects.length} object(s)")
|
89
|
+
|
90
|
+
payload[register] = pivoted_objects
|
91
|
+
end
|
92
|
+
|
93
|
+
private
|
94
|
+
|
95
|
+
def resolve_key(object)
|
96
|
+
key_to_use = resolver.get(object, pivot_key)
|
97
|
+
|
98
|
+
make_key(key_to_use)
|
99
|
+
end
|
100
|
+
|
101
|
+
def make_key(value)
|
102
|
+
insensitive ? value.to_s.downcase : value
|
103
|
+
end
|
104
|
+
|
105
|
+
def make_row_id(object)
|
106
|
+
unique_keys.map { |k| make_key(resolver.get(object, k)) }
|
107
|
+
end
|
108
|
+
|
109
|
+
def make_key_map(objects)
|
110
|
+
objects.each_with_object({}) do |object, key_map|
|
111
|
+
key = resolver.get(object, pivot_key)
|
112
|
+
unique_key = make_key(key)
|
113
|
+
|
114
|
+
key_map[unique_key] ||= Set.new
|
115
|
+
|
116
|
+
key_map[unique_key] << key
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
def make_record(objects)
|
121
|
+
key_map = make_key_map(objects)
|
122
|
+
keys = non_pivoted_keys + key_map.values.map(&:first)
|
123
|
+
|
124
|
+
HashMath::Record.new(keys)
|
125
|
+
end
|
126
|
+
|
127
|
+
def make_table(objects)
|
128
|
+
HashMath::Table.new(make_record(objects))
|
129
|
+
end
|
130
|
+
|
131
|
+
def object_to_table(object, table)
|
132
|
+
row_id = make_row_id(object)
|
133
|
+
|
134
|
+
non_pivoted_keys.each do |key|
|
135
|
+
value = resolver.get(object, key)
|
136
|
+
|
137
|
+
table.add(row_id, key, value)
|
138
|
+
end
|
139
|
+
|
140
|
+
key_to_use = resolve_key(object)
|
141
|
+
value_to_use = resolver.get(object, pivot_value_key)
|
142
|
+
|
143
|
+
table.add(row_id, key_to_use, value_to_use)
|
144
|
+
|
145
|
+
self
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
@@ -23,7 +23,7 @@ module Burner
|
|
23
23
|
|
24
24
|
attr_reader :amount
|
25
25
|
|
26
|
-
def initialize(
|
26
|
+
def initialize(amount: DEFAULT_AMOUNT, name: '', register: DEFAULT_REGISTER)
|
27
27
|
super(name: name, register: register)
|
28
28
|
|
29
29
|
@amount = amount.to_i
|
@@ -29,10 +29,10 @@ module Burner
|
|
29
29
|
:validations
|
30
30
|
|
31
31
|
def initialize(
|
32
|
-
name:,
|
33
32
|
invalid_register: DEFAULT_INVALID_REGISTER,
|
34
33
|
join_char: DEFAULT_JOIN_CHAR,
|
35
34
|
message_key: DEFAULT_MESSAGE_KEY,
|
35
|
+
name: '',
|
36
36
|
register: DEFAULT_REGISTER,
|
37
37
|
separator: '',
|
38
38
|
validations: []
|
@@ -19,7 +19,7 @@ module Burner
|
|
19
19
|
class Values < JobWithRegister
|
20
20
|
attr_reader :include_keys
|
21
21
|
|
22
|
-
def initialize(
|
22
|
+
def initialize(include_keys: false, name: '', register: DEFAULT_REGISTER)
|
23
23
|
super(name: name, register: register)
|
24
24
|
|
25
25
|
@include_keys = include_keys || false
|
@@ -20,7 +20,7 @@ module Burner
|
|
20
20
|
class Yaml < JobWithRegister
|
21
21
|
attr_reader :safe
|
22
22
|
|
23
|
-
def initialize(name
|
23
|
+
def initialize(name: '', register: DEFAULT_REGISTER, safe: true)
|
24
24
|
super(name: name, register: register)
|
25
25
|
|
26
26
|
@safe = safe
|
data/lib/burner/library/echo.rb
CHANGED
@@ -17,7 +17,7 @@ module Burner
|
|
17
17
|
class Exist < Job
|
18
18
|
attr_reader :disk, :path, :short_circuit
|
19
19
|
|
20
|
-
def initialize(
|
20
|
+
def initialize(path:, disk: {}, name: '', short_circuit: false)
|
21
21
|
super(name: name)
|
22
22
|
|
23
23
|
raise ArgumentError, 'path is required' if path.to_s.empty?
|
@@ -14,7 +14,7 @@ module Burner
|
|
14
14
|
class OpenFileBase < JobWithRegister
|
15
15
|
attr_reader :binary, :disk, :path
|
16
16
|
|
17
|
-
def initialize(
|
17
|
+
def initialize(path:, binary: false, disk: {}, name: '', register: DEFAULT_REGISTER)
|
18
18
|
super(name: name, register: register)
|
19
19
|
|
20
20
|
raise ArgumentError, 'path is required' if path.to_s.empty?
|
@@ -35,12 +35,12 @@ module Burner
|
|
35
35
|
:resolver
|
36
36
|
|
37
37
|
def initialize(
|
38
|
-
name:,
|
39
38
|
binary: false,
|
40
39
|
data_key: DEFAULT_DATA_KEY,
|
41
40
|
disk: {},
|
42
41
|
ignore_blank_path: false,
|
43
42
|
ignore_file_not_found: false,
|
43
|
+
name: '',
|
44
44
|
path_key: DEFAULT_PATH_KEY,
|
45
45
|
register: DEFAULT_REGISTER,
|
46
46
|
separator: ''
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
#
|
4
|
+
# Copyright (c) 2020-present, Blue Marble Payroll, LLC
|
5
|
+
#
|
6
|
+
# This source code is licensed under the MIT license found in the
|
7
|
+
# LICENSE file in the root directory of this source tree.
|
8
|
+
#
|
9
|
+
|
10
|
+
module Burner
|
11
|
+
module Library
|
12
|
+
module Param
|
13
|
+
# Common logic shared across Param job subclasses.
|
14
|
+
class Base < JobWithRegister
|
15
|
+
BLANK = ''
|
16
|
+
|
17
|
+
attr_reader :param_key
|
18
|
+
|
19
|
+
def initialize(name: BLANK, param_key: BLANK, register: DEFAULT_REGISTER)
|
20
|
+
super(name: name, register: register)
|
21
|
+
|
22
|
+
@param_key = param_key.to_s
|
23
|
+
|
24
|
+
freeze
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|