burner 1.0.0.pre.alpha.2 → 1.0.0.pre.alpha.7

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +2 -0
  3. data/README.md +57 -25
  4. data/burner.gemspec +3 -0
  5. data/exe/burner +2 -3
  6. data/lib/burner.rb +11 -0
  7. data/lib/burner/cli.rb +11 -9
  8. data/lib/burner/job.rb +29 -9
  9. data/lib/burner/job_with_register.rb +24 -0
  10. data/lib/burner/jobs.rb +21 -23
  11. data/lib/burner/library.rb +32 -0
  12. data/lib/burner/library/collection/arrays_to_objects.rb +75 -0
  13. data/lib/burner/library/collection/graph.rb +42 -0
  14. data/lib/burner/library/collection/objects_to_arrays.rb +88 -0
  15. data/lib/burner/library/collection/shift.rb +42 -0
  16. data/lib/burner/library/collection/transform.rb +66 -0
  17. data/lib/burner/library/collection/unpivot.rb +53 -0
  18. data/lib/burner/library/collection/validate.rb +89 -0
  19. data/lib/burner/library/collection/values.rb +49 -0
  20. data/lib/burner/library/deserialize/csv.rb +27 -0
  21. data/lib/burner/{jobs → library}/deserialize/json.rb +7 -6
  22. data/lib/burner/{jobs → library}/deserialize/yaml.rb +14 -8
  23. data/lib/burner/{jobs → library}/dummy.rb +4 -4
  24. data/lib/burner/{jobs → library}/echo.rb +5 -5
  25. data/lib/burner/{jobs → library}/io/base.rb +4 -10
  26. data/lib/burner/{jobs → library}/io/exist.rb +13 -11
  27. data/lib/burner/{jobs → library}/io/read.rb +9 -8
  28. data/lib/burner/{jobs → library}/io/write.rb +11 -8
  29. data/lib/burner/library/serialize/csv.rb +37 -0
  30. data/lib/burner/{jobs → library}/serialize/json.rb +7 -6
  31. data/lib/burner/{jobs → library}/serialize/yaml.rb +7 -6
  32. data/lib/burner/{jobs/set.rb → library/set_value.rb} +9 -8
  33. data/lib/burner/{jobs → library}/sleep.rb +4 -4
  34. data/lib/burner/modeling.rb +13 -0
  35. data/lib/burner/modeling/attribute.rb +29 -0
  36. data/lib/burner/modeling/attribute_renderer.rb +32 -0
  37. data/lib/burner/modeling/key_index_mapping.rb +29 -0
  38. data/lib/burner/modeling/validations.rb +23 -0
  39. data/lib/burner/modeling/validations/base.rb +35 -0
  40. data/lib/burner/modeling/validations/blank.rb +31 -0
  41. data/lib/burner/modeling/validations/present.rb +31 -0
  42. data/lib/burner/payload.rb +55 -10
  43. data/lib/burner/pipeline.rb +25 -6
  44. data/lib/burner/side_effects.rb +10 -0
  45. data/lib/burner/side_effects/written_file.rb +28 -0
  46. data/lib/burner/step.rb +2 -8
  47. data/lib/burner/util.rb +11 -0
  48. data/lib/burner/util/arrayable.rb +30 -0
  49. data/lib/burner/util/string_template.rb +42 -0
  50. data/lib/burner/version.rb +1 -1
  51. metadata +81 -16
  52. data/lib/burner/string_template.rb +0 -40
  53. data/lib/burner/written_file.rb +0 -28
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '0499f990d92543479718a805654a9e070b3ea3fe9d8bf85aae170c16bdc47e51'
4
- data.tar.gz: 4dff77329307b83891fbb92f0a67aabbd4799b8600af14d15aa22c29cfdfc456
3
+ metadata.gz: 8cb851b5c317d3566c1b3b77c9c904e26970de8e0ce4329e2c8b83336c372b02
4
+ data.tar.gz: 0ad28e56c8b52ceede4ed321812e45edfaef2bcd4000428738c348e5e849092a
5
5
  SHA512:
6
- metadata.gz: 42655eba14a6397bc815f8b9e989f8485fb25a0981840ea4c6e69c47b565e487eb1a01c03caa952b4685421130711eea2e07a1cf3a62102e80a49a484be34931
7
- data.tar.gz: a1be281d0e8f136ecbed2e3bb577584f1faeb5b9afc85a7fcf6b88601e86baff2f36077742e79ff03f7e4d41dcc2e2e15c150657774ec17dc0111721851352dd
6
+ metadata.gz: cf5bc810ffa5dba106e6538dbab2f44e6763f5ad073da2b645e46eff3e976e61f9a0d551ae4d5ad9998c5c1d87b5e2913b902901020ad77f20b4c29800321df5
7
+ data.tar.gz: ef83de8173fbad28c2cc07c44f101175ebe9c60d5a9275d2e7280ba1b621a8c51082504ae5d5c917916d398a7a89d06ca7416dd08ba7148ebebb87cd42dd7b05
@@ -31,3 +31,5 @@ Style/TrailingCommaInHashLiteral:
31
31
  Style/TrailingCommaInArrayLiteral:
32
32
  Enabled: false
33
33
 
34
+ Metrics/ParameterLists:
35
+ CountKeywordArgs: false
data/README.md CHANGED
@@ -73,19 +73,21 @@ params = {
73
73
  input_file: 'input.json',
74
74
  output_file: 'output.yaml'
75
75
  }
76
+
77
+ payload = Burner::Payload.new(params: params)
76
78
  ````
77
79
 
78
80
  Assuming we are running this script from a directory where an `input.json` file exists, we can then programatically process the pipeline:
79
81
 
80
82
  ````ruby
81
- Burner::Pipeline.make(pipeline).execute(params: params)
83
+ Burner::Pipeline.make(pipeline).execute(payload: payload)
82
84
  ````
83
85
 
84
86
  We should now see a output.yaml file created.
85
87
 
86
88
  Some notes:
87
89
 
88
- * Some values are able to be string-interpolated using the provided params. This allows for the passing runtime configuration/data into pipelines/jobs.
90
+ * Some values are able to be string-interpolated using the provided Payload#params. This allows for the passing runtime configuration/data into pipelines/jobs.
89
91
  * The job's ID can be accessed using the `__id` key.
90
92
  * The current job's payload value can be accessed using the `__value` key.
91
93
  * Jobs can be re-used (just like the output_id and output_value jobs).
@@ -116,8 +118,9 @@ end
116
118
 
117
119
  string_out = StringOut.new
118
120
  output = Burner::Output.new(outs: string_out)
121
+ payload = Burner::Payload.new(params: params)
119
122
 
120
- Burner::Pipeline.make(pipeline).execute(output: output, params: params)
123
+ Burner::Pipeline.make(pipeline).execute(output: output, payload: payload)
121
124
 
122
125
  log = string_out.read
123
126
  ````
@@ -130,23 +133,23 @@ The value of `log` should now look similar to:
130
133
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - input_file: input.json
131
134
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - output_file: output.yaml
132
135
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
133
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Jobs::IO::Read::read
136
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [1] Burner::Library::IO::Read::read
134
137
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Reading: spec/fixtures/input.json
135
138
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
136
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Jobs::Echo::output_id
139
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [2] Burner::Library::Echo::output_id
137
140
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The job id is:
138
141
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
139
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Jobs::Echo::output_value
142
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [3] Burner::Library::Echo::output_value
140
143
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
141
144
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
142
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Jobs::Deserialize::Json::parse
145
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [4] Burner::Library::Deserialize::Json::parse
143
146
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
144
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Jobs::Serialize::Yaml::convert
147
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [5] Burner::Library::Serialize::Yaml::convert
145
148
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
146
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Jobs::Echo::output_value
149
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [6] Burner::Library::Echo::output_value
147
150
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - The current value is:
148
151
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
149
- [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Jobs::IO::Write::write
152
+ [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] [7] Burner::Library::IO::Write::write
150
153
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Writing: output.yaml
151
154
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] - Completed in: 0.0 second(s)
152
155
  [8bdc394e-7047-4a1a-87ed-6c54ed690ed5 | 2020-10-14 13:49:59 UTC] --------------------------------------------------------------------------------
@@ -160,7 +163,7 @@ Notes:
160
163
 
161
164
  ### Command Line Pipeline Processing
162
165
 
163
- This library also ships with a built-in script `exe/burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
166
+ This library also ships with a built-in script `burner` that illustrates using the `Burner::Cli` API. This class can take in an array of arguments (similar to a command-line) and execute a pipeline. The first argument is the path to a YAML file with the pipeline's configuration and each subsequent argument is a param in `key=value` form. Here is how the json-to-yaml example can utilize this interface:
164
167
 
165
168
  #### Create YAML Pipeline Configuration File
166
169
 
@@ -228,29 +231,56 @@ Burner::Cli.new(args).invoke
228
231
 
229
232
  This library only ships with very basic, rudimentary jobs that are meant to just serve as a baseline:
230
233
 
231
- * **deserialize/json** []: Treat input as a string and de-serialize it to JSON.
232
- * **deserialize/yaml** [safe]: Treat input as a string and de-serialize it to YAML. By default it will try and (safely de-serialize)[https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load] it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
234
+ #### Collection
235
+
236
+ * **collection/arrays_to_objects** [mappings, register]: Convert an array of arrays to an array of objects.
237
+ * **collection/graph** [config, key, register]: Use [Hashematics](https://github.com/bluemarblepayroll/hashematics) to turn a flat array of objects into a deeply nested object tree.
238
+ * **collection/objects_to_arrays** [mappings, register]: Convert an array of objects to an array of arrays.
239
+ * **collection/shift** [amount, register]: Remove the first N number of elements from an array.
240
+ * **collection/transform** [attributes, exclusive, separator, register]: Iterate over all objects and transform each key per the attribute transformers specifications. If exclusive is set to false then the current object will be overridden/merged. Separator can also be set for key path support. This job uses [Realize](https://github.com/bluemarblepayroll/realize), which provides its own extendable value-transformation pipeline.
241
+ * **collection/unpivot** [pivot_set, register]: Take an array of objects and unpivot specific sets of keys into rows. Under the hood it uses [HashMath's Unpivot class](https://github.com/bluemarblepayroll/hash_math#unpivot-hash-key-coalescence-and-row-extrapolation).
242
+ * **collection/validate** [invalid_register, join_char, message_key, register, separator, validations]: Take an array of objects, run it through each declared validator, and split the objects into two registers. The valid objects will be split into the current register while the invalid ones will go into the invalid_register as declared. Optional arguments, join_char and message_key, help determine the compiled error messages. The separator option can be utilized to use dot-notation for validating keys. See each validation's options by viewing their classes within the `lib/modeling/validations` directory.
243
+ * **collection/values** [include_keys, register]: Take an array of objects and call `#values` on each object. If include_keys is true (it is false by default), then call `#keys` on the first object and inject that as a "header" object.
244
+
245
+ #### De-serialization
246
+
247
+ * **deserialize/csv** [register]: Take a CSV string and de-serialize into object(s). Currently it will return an array of arrays, with each nested array representing one row.
248
+ * **deserialize/json** [register]: Treat input as a string and de-serialize it to JSON.
249
+ * **deserialize/yaml** [register, safe]: Treat input as a string and de-serialize it to YAML. By default it will try and [safely de-serialize](https://ruby-doc.org/stdlib-2.6.1/libdoc/psych/rdoc/Psych.html#method-c-safe_load) it (only using core classes). If you wish to de-serialize it to any class type, pass in `safe: false`
250
+
251
+ #### IO
252
+
253
+ * **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using `Payload#params`. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
254
+ * **io/read** [binary, path, register]: Read in a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
255
+ * **io/write** [binary, path, register]: Write to a local file. The path parameter can be interpolated using `Payload#params`. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
256
+
257
+ #### Serialization
258
+
259
+ * **serialize/csv** [register]: Take an array of arrays and create a CSV.
260
+ * **serialize/json** [register]: Convert value to JSON.
261
+ * **serialize/yaml** [register]: Convert value to YAML.
262
+
263
+ #### General
264
+
233
265
  * **dummy** []: Do nothing
234
- * **echo** [message]: Write a message to the output. The message parameter can be interpolated using params.
235
- * **io/exist** [path, short_circuit]: Check to see if a file exists. The path parameter can be interpolated using params. If short_circuit was set to true (defaults to false) and the file does not exist then the pipeline will be short-circuited.
236
- * **io/read** [binary, path]: Read in a local file. The path parameter can be interpolated using params. If the contents are binary, pass in `binary: true` to open it up in binary+read mode.
237
- * **io/write** [binary, path]: Write to a local file. The path parameter can be interpolated using params. If the contents are binary, pass in `binary: true` to open it up in binary+write mode.
238
- * **serialize/json** []: Convert value to JSON.
239
- * **serialize/yaml** []: Convert value to YAML.
240
- * **set** [value]: Set the value to any arbitrary value.
266
+ * **echo** [message]: Write a message to the output. The message parameter can be interpolated using `Payload#params`.
267
+ * **set** [register, value]: Set the value to any arbitrary value.
241
268
  * **sleep** [seconds]: Sleep the thread for X number of seconds.
242
269
 
270
+ Notes:
271
+
272
+ * If you see that a job accepts a 'register' attribute/argument, that indicates a job will access and/or mutate the payload. The register indicates which part of the payload the job will interact with. This allows jobs to be placed into 'lanes'. If register is not specified, then the default register is used.
243
273
 
244
274
  ### Adding & Registering Jobs
245
275
 
246
- Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with (acts_as_hashable)[https://github.com/bluemarblepayroll/acts_as_hashable]'s acts_as_hashable_factory directive.
276
+ Where this library shines is when additional jobs are plugged in. Burner uses its `Burner::Jobs` class as its class-level registry built with [acts_as_hashable](https://github.com/bluemarblepayroll/acts_as_hashable)'s acts_as_hashable_factory directive.
247
277
 
248
278
  Let's say we would like to register a job to parse a CSV:
249
279
 
250
280
  ````ruby
251
- class ParseCsv < Burner::Job
252
- def perform(output, payload, params)
253
- payload.value = CSV.parse(payload.value, headers: true).map(&:to_h)
281
+ class ParseCsv < Burner::JobWithRegister
282
+ def perform(output, payload)
283
+ payload[register] = CSV.parse(payload[register], headers: true).map(&:to_h)
254
284
 
255
285
  nil
256
286
  end
@@ -309,7 +339,9 @@ params = {
309
339
  output_file: File.join(TEMP_DIR, "#{SecureRandom.uuid}.yaml")
310
340
  }
311
341
 
312
- Burner::Pipeline.make(pipeline).execute(output: output, params: params)
342
+ payload = Burner::Payload.new(params: params)
343
+
344
+ Burner::Pipeline.make(pipeline).execute(output: output, payload: payload)
313
345
  ````
314
346
 
315
347
  ## Contributing
@@ -29,7 +29,10 @@ Gem::Specification.new do |s|
29
29
  s.required_ruby_version = '>= 2.5'
30
30
 
31
31
  s.add_dependency('acts_as_hashable', '~>1.2')
32
+ s.add_dependency('hashematics', '~>1.1')
33
+ s.add_dependency('hash_math', '~>1.2')
32
34
  s.add_dependency('objectable', '~>1.0')
35
+ s.add_dependency('realize', '~>1.2')
33
36
  s.add_dependency('stringento', '~>2.1')
34
37
 
35
38
  s.add_development_dependency('guard-rspec', '~>4.7')
data/exe/burner CHANGED
@@ -10,11 +10,10 @@
10
10
 
11
11
  require 'bundler/setup'
12
12
  require 'burner'
13
- require 'pry'
14
13
 
15
14
  if ARGV.empty?
16
- puts 'Usage: ./exe/burner package.yaml key=value key=value ...'
17
- exit
15
+ warn('Usage: ./exe/burner package.yaml key=value key=value ...')
16
+ exit 2 # Do not return 1, that is reserved for hard errors.
18
17
  end
19
18
 
20
19
  # This should return exit code of 1 if it raises any hard errors.
@@ -9,12 +9,23 @@
9
9
 
10
10
  require 'acts_as_hashable'
11
11
  require 'benchmark'
12
+ require 'csv'
12
13
  require 'forwardable'
14
+ require 'hash_math'
15
+ require 'hashematics'
13
16
  require 'json'
14
17
  require 'objectable'
18
+ require 'realize'
15
19
  require 'securerandom'
16
20
  require 'singleton'
17
21
  require 'stringento'
22
+ require 'time'
18
23
  require 'yaml'
19
24
 
25
+ # Common/Shared
26
+ require_relative 'burner/modeling'
27
+ require_relative 'burner/side_effects'
28
+ require_relative 'burner/util'
29
+
30
+ # Main Entrypoint(s)
20
31
  require_relative 'burner/cli'
@@ -12,18 +12,20 @@ require_relative 'pipeline'
12
12
  module Burner
13
13
  # Process a single string as a Pipeline. This is mainly to back the command-line interface.
14
14
  class Cli
15
- extend Forwardable
15
+ attr_reader :payload, :pipeline
16
16
 
17
- attr_reader :params, :pipeline
17
+ def initialize(args)
18
+ path = args.first
19
+ params = extract_cli_params(args)
20
+ config = read_yaml(path)
21
+ @pipeline = Burner::Pipeline.make(jobs: config['jobs'], steps: config['steps'])
22
+ @payload = Payload.new(params: params)
18
23
 
19
- def_delegators :pipeline, :execute
24
+ freeze
25
+ end
20
26
 
21
- def initialize(args)
22
- path = args.first
23
- cli_params = extract_cli_params(args)
24
- config = read_yaml(path)
25
- @pipeline = Burner::Pipeline.make(jobs: config['jobs'], steps: config['steps'])
26
- @params = (config['params'] || {}).merge(cli_params)
27
+ def execute
28
+ pipeline.execute(payload: payload)
27
29
  end
28
30
 
29
31
  private
@@ -7,29 +7,49 @@
7
7
  # LICENSE file in the root directory of this source tree.
8
8
  #
9
9
 
10
- require_relative 'string_template'
11
-
12
10
  module Burner
13
11
  # Abstract base class for all job subclasses. The only public method a subclass needs to
14
- # implement #perform(params, payload, reporter) and then you can register it for use using
12
+ # implement #perform(output, payload) and then you can register it for use using
15
13
  # the Burner::Jobs factory class method #register. An example of a registration:
16
14
  # Burner::Jobs.register('your_class', YourClass)
17
15
  class Job
16
+ include Util::Arrayable
18
17
  acts_as_hashable
19
18
 
20
- attr_reader :name, :string_template
19
+ attr_reader :name
21
20
 
22
21
  def initialize(name:)
23
22
  raise ArgumentError, 'name is required' if name.to_s.empty?
24
23
 
25
- @name = name.to_s
26
- @string_template = StringTemplate.instance
24
+ @name = name.to_s
25
+ end
26
+
27
+ # There are only a few requirements to be considered a valid Burner Job:
28
+ # 1. The class responds to #name
29
+ # 2. The class responds to #perform(output, payload)
30
+ #
31
+ # The #perform method takes in two arguments: output (an instance of Burner::Output)
32
+ # and payload (an instance of Burner::Payload). Jobs can leverage output to emit
33
+ # information to the pipeline's log(s). The payload is utilized to pass data from job to job,
34
+ # with its most important attribute being #value. The value attribute is mutable
35
+ # per the individual job's context (meaning of it is unknown without understanding a job's
36
+ # input and output value of #value.). Therefore #value can mean anything and it is up to the
37
+ # engineers to clearly document the assumptions of its use.
38
+ #
39
+ # Returning false will short-circuit the pipeline right after the job method exits.
40
+ # Returning anything else besides false just means "continue".
41
+ def perform(output, _payload)
42
+ output.detail("#perform not implemented for: #{self.class.name}")
43
+
44
+ nil
27
45
  end
28
46
 
29
- private
47
+ protected
48
+
49
+ def job_string_template(expression, output, payload)
50
+ templatable_params = payload.params.merge(__id: output.id, __value: payload[''])
30
51
 
31
- def eval_string_template(expression, input)
32
- string_template.evaluate(expression, input)
52
+ Util::StringTemplate.instance.evaluate(expression, templatable_params)
33
53
  end
34
54
  end
35
55
  end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'job'
11
+
12
+ module Burner
13
+ # Add on a register attribute to the configuration for a job. This indicates that a job
14
+ # either accesses and/or mutates the payload's registers.
15
+ class JobWithRegister < Job
16
+ attr_reader :register
17
+
18
+ def initialize(name:, register: '')
19
+ super(name: name)
20
+
21
+ @register = register.to_s
22
+ end
23
+ end
24
+ end
@@ -7,18 +7,7 @@
7
7
  # LICENSE file in the root directory of this source tree.
8
8
  #
9
9
 
10
- require_relative 'job'
11
- require_relative 'jobs/deserialize/json'
12
- require_relative 'jobs/deserialize/yaml'
13
- require_relative 'jobs/dummy'
14
- require_relative 'jobs/echo'
15
- require_relative 'jobs/io/exist'
16
- require_relative 'jobs/io/read'
17
- require_relative 'jobs/io/write'
18
- require_relative 'jobs/serialize/json'
19
- require_relative 'jobs/serialize/yaml'
20
- require_relative 'jobs/set'
21
- require_relative 'jobs/sleep'
10
+ require_relative 'library'
22
11
 
23
12
  module Burner
24
13
  # Main library of jobs. This file contains all the basic/default jobs. All other consumer
@@ -27,16 +16,25 @@ module Burner
27
16
  class Jobs
28
17
  acts_as_hashable_factory
29
18
 
30
- register 'deserialize/json', Deserialize::Json
31
- register 'deserialize/yaml', Deserialize::Yaml
32
- register 'dummy', '', Dummy
33
- register 'echo', Echo
34
- register 'io/exist', IO::Exist
35
- register 'io/read', IO::Read
36
- register 'io/write', IO::Write
37
- register 'serialize/json', Serialize::Json
38
- register 'serialize/yaml', Serialize::Yaml
39
- register 'set', Set
40
- register 'sleep', Sleep
19
+ register 'collection/arrays_to_objects', Library::Collection::ArraysToObjects
20
+ register 'collection/graph', Library::Collection::Graph
21
+ register 'collection/objects_to_arrays', Library::Collection::ObjectsToArrays
22
+ register 'collection/shift', Library::Collection::Shift
23
+ register 'collection/transform', Library::Collection::Transform
24
+ register 'collection/unpivot', Library::Collection::Unpivot
25
+ register 'collection/values', Library::Collection::Values
26
+ register 'deserialize/csv', Library::Deserialize::Csv
27
+ register 'deserialize/json', Library::Deserialize::Json
28
+ register 'deserialize/yaml', Library::Deserialize::Yaml
29
+ register 'dummy', '', Library::Dummy
30
+ register 'echo', Library::Echo
31
+ register 'io/exist', Library::IO::Exist
32
+ register 'io/read', Library::IO::Read
33
+ register 'io/write', Library::IO::Write
34
+ register 'serialize/csv', Library::Serialize::Csv
35
+ register 'serialize/json', Library::Serialize::Json
36
+ register 'serialize/yaml', Library::Serialize::Yaml
37
+ register 'set_value', Library::SetValue
38
+ register 'sleep', Library::Sleep
41
39
  end
42
40
  end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ require_relative 'job_with_register'
11
+
12
+ require_relative 'library/collection/arrays_to_objects'
13
+ require_relative 'library/collection/graph'
14
+ require_relative 'library/collection/objects_to_arrays'
15
+ require_relative 'library/collection/shift'
16
+ require_relative 'library/collection/transform'
17
+ require_relative 'library/collection/unpivot'
18
+ require_relative 'library/collection/validate'
19
+ require_relative 'library/collection/values'
20
+ require_relative 'library/deserialize/csv'
21
+ require_relative 'library/deserialize/json'
22
+ require_relative 'library/deserialize/yaml'
23
+ require_relative 'library/dummy'
24
+ require_relative 'library/echo'
25
+ require_relative 'library/io/exist'
26
+ require_relative 'library/io/read'
27
+ require_relative 'library/io/write'
28
+ require_relative 'library/serialize/csv'
29
+ require_relative 'library/serialize/json'
30
+ require_relative 'library/serialize/yaml'
31
+ require_relative 'library/set_value'
32
+ require_relative 'library/sleep'
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ #
4
+ # Copyright (c) 2020-present, Blue Marble Payroll, LLC
5
+ #
6
+ # This source code is licensed under the MIT license found in the
7
+ # LICENSE file in the root directory of this source tree.
8
+ #
9
+
10
+ module Burner
11
+ module Library
12
+ module Collection
13
+ # Convert an array of arrays to an array of objects. Pass in an array of
14
+ # Burner::Modeling::KeyIndexMapping instances or hashable configurations which specifies
15
+ # the index-to-key mappings to use.
16
+ #
17
+ # Expected Payload#value input: array of arrays.
18
+ # Payload#value output: An array of hashes.
19
+ #
20
+ # An example using a configuration-first pipeline:
21
+ #
22
+ # config = {
23
+ # jobs: [
24
+ # {
25
+ # name: 'set',
26
+ # type: 'set_value',
27
+ # value: [
28
+ # [1, 'funky']
29
+ # ]
30
+ # },
31
+ # {
32
+ # name: 'map',
33
+ # type: 'collection/arrays_to_objects',
34
+ # mappings: [
35
+ # { index: 0, key: 'id' },
36
+ # { index: 1, key: 'name' }
37
+ # ]
38
+ # },
39
+ # {
40
+ # name: 'output',
41
+ # type: 'echo',
42
+ # message: 'value is currently: {__value}'
43
+ # },
44
+ #
45
+ # ],
46
+ # steps: %w[set map output]
47
+ # }
48
+ #
49
+ # Burner::Pipeline.make(config).execute
50
+ class ArraysToObjects < JobWithRegister
51
+ attr_reader :mappings
52
+
53
+ def initialize(name:, mappings: [], register: '')
54
+ super(name: name, register: register)
55
+
56
+ @mappings = Modeling::KeyIndexMapping.array(mappings)
57
+
58
+ freeze
59
+ end
60
+
61
+ def perform(_output, payload)
62
+ payload[register] = array(payload[register]).map { |array| index_to_key_map(array) }
63
+ end
64
+
65
+ private
66
+
67
+ def index_to_key_map(array)
68
+ mappings.each_with_object({}) do |mapping, memo|
69
+ memo[mapping.key] = array[mapping.index]
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end