pal_tool 0.2.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.rubocop.yml +132 -0
- data/CODE_OF_CONDUCT.md +84 -0
- data/Dockerfile +10 -0
- data/Gemfile +14 -0
- data/Gemfile.lock +72 -0
- data/LICENSE.txt +21 -0
- data/README.md +124 -0
- data/Rakefile +12 -0
- data/bin/console +15 -0
- data/bin/setup +8 -0
- data/exe/pal +47 -0
- data/lib/pal/common/local_file_utils.rb +37 -0
- data/lib/pal/common/object_helpers.rb +27 -0
- data/lib/pal/common/safe_hash_parse.rb +87 -0
- data/lib/pal/configuration.rb +77 -0
- data/lib/pal/handler/base.rb +138 -0
- data/lib/pal/handler/definitions/aws_cur.json +8 -0
- data/lib/pal/handler/manager.rb +30 -0
- data/lib/pal/handler/processor.rb +84 -0
- data/lib/pal/log.rb +29 -0
- data/lib/pal/main.rb +63 -0
- data/lib/pal/operation/actions.rb +106 -0
- data/lib/pal/operation/exporter.rb +183 -0
- data/lib/pal/operation/filter_evaluator.rb +249 -0
- data/lib/pal/operation/processor_context.rb +50 -0
- data/lib/pal/operation/projection.rb +302 -0
- data/lib/pal/plugin.rb +61 -0
- data/lib/pal/request/metadata.rb +19 -0
- data/lib/pal/request/runbook.rb +54 -0
- data/lib/pal/version.rb +5 -0
- data/lib/pal.rb +43 -0
- data/plugins/PLUGINS.md +1 -0
- data/plugins/operation/terminal_exporter_impl.rb +14 -0
- data/templates/DOCUMENTATION.md +46 -0
- data/templates/aws/data_transfer/data_transfer_breakdown.json +93 -0
- data/templates/aws/ec2/ec2_compute_hourly_breakdown.json +63 -0
- data/templates/aws/ec2/ec2_operation_breakdown.json +64 -0
- data/templates/aws/ec2/ec2_spend_breakdown.json +63 -0
- data/templates/aws/global_resource_and_usage_type_costs.json +41 -0
- data/templates/aws/kms/kms_usage_counts.json +52 -0
- data/templates/aws/kms/kms_usage_list.json +80 -0
- data/templates/aws/kms/list_of_kms_keys.json +57 -0
- data/templates/aws/reserved_instances/all_reserved_instance_expiries.json +41 -0
- data/templates/aws/reserved_instances/reserved_instance_opportunities.json +60 -0
- data/templates/aws/summary_cost_between_date_range.json +43 -0
- data/templates/aws/summary_daily_breakdown_costs.json +39 -0
- data/templates/azure/global_resource_type_summary.json +47 -0
- metadata +136 -0
@@ -0,0 +1,87 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
require "json"
|
5
|
+
require "jsonpath"
|
6
|
+
|
7
|
+
module Pal
|
8
|
+
# The most lazy way to find things in hashes and JSON.
|
9
|
+
# Provides default and optional params
|
10
|
+
# Provides safe navigation with hash key dot notation ('this.is.a.key')
|
11
|
+
class SafeHashParse
|
12
|
+
class << self
|
13
|
+
# @param [String] json_str
|
14
|
+
# @param [Object] key
|
15
|
+
# @param [Boolean] optional
|
16
|
+
# @param [Object] default
|
17
|
+
# @return [Array]
|
18
|
+
def extract_from_json(json_str, key, optional=false, default=nil)
|
19
|
+
val = JsonPath.new(key.to_s).on(json_str)
|
20
|
+
return val if val && !val.empty?
|
21
|
+
return [] unless optional
|
22
|
+
|
23
|
+
[default]
|
24
|
+
rescue JSON::ParserError, MultiJson::ParseError, ArgumentError => e
|
25
|
+
raise e unless optional
|
26
|
+
|
27
|
+
[default]
|
28
|
+
end
|
29
|
+
|
30
|
+
# @param [Hash] hash
|
31
|
+
# @param [Object] search_key
|
32
|
+
# @param [Boolean] optional
|
33
|
+
# @param [Object, nil] default
|
34
|
+
# @return [Object, nil]
|
35
|
+
# rubocop:disable Metrics/AbcSize
|
36
|
+
# rubocop:disable Metrics/CyclomaticComplexity
|
37
|
+
# rubocop:disable Metrics/PerceivedComplexity
|
38
|
+
def extract_from_hash(hash, search_key, optional=false, default=nil)
|
39
|
+
keys = format_key(search_key)
|
40
|
+
last_level = hash
|
41
|
+
searched = nil
|
42
|
+
|
43
|
+
keys.each_with_index do |key, index|
|
44
|
+
break unless last_level.is_a?(Hash) && last_level.key?(key.to_s)
|
45
|
+
|
46
|
+
if index + 1 == keys.length
|
47
|
+
searched = last_level[key.to_s] || last_level[key.to_sym]
|
48
|
+
else
|
49
|
+
last_level = last_level[key.to_s] || last_level[key.to_sym]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
return searched if searched
|
54
|
+
return nil unless optional
|
55
|
+
|
56
|
+
default
|
57
|
+
end
|
58
|
+
# rubocop:enable Metrics/AbcSize
|
59
|
+
# rubocop:enable Metrics/CyclomaticComplexity
|
60
|
+
# rubocop:enable Metrics/PerceivedComplexity
|
61
|
+
|
62
|
+
# @param [Object] key
|
63
|
+
# @return [Array]
|
64
|
+
def format_key(key)
|
65
|
+
return [key.downcase] if key.is_a?(Symbol)
|
66
|
+
|
67
|
+
if key.is_a?(String)
|
68
|
+
return [key.downcase.to_sym] unless key.include?(".")
|
69
|
+
|
70
|
+
return key.to_s.split(".").map { |s| s.downcase.to_sym }
|
71
|
+
end
|
72
|
+
|
73
|
+
raise ArgumentError, "Key [#{key}] must be either a String or Symbol"
|
74
|
+
end
|
75
|
+
|
76
|
+
# @param [String] json
|
77
|
+
def all_values_from_json(json)
|
78
|
+
all_values(JSON.parse(json))
|
79
|
+
end
|
80
|
+
|
81
|
+
# @param [Hash] hash
|
82
|
+
def all_values(hash)
|
83
|
+
hash.flat_map { |_k, v| (v.is_a?(Hash) ? all_values(v) : [v]) }
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,77 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "singleton"
|
4
|
+
require "json"
|
5
|
+
require "pal/log"
|
6
|
+
|
7
|
+
module Pal
|
8
|
+
# Configuration management module for Pal
|
9
|
+
module Configuration
|
10
|
+
include Log
|
11
|
+
|
12
|
+
# @return [Config]
|
13
|
+
def config
|
14
|
+
conf = ConfigurationSource.instance.config
|
15
|
+
raise "Set config first" unless conf
|
16
|
+
|
17
|
+
conf
|
18
|
+
end
|
19
|
+
|
20
|
+
# @param [Config] request_config
|
21
|
+
def register_config(request_config)
|
22
|
+
log_info "Setting config"
|
23
|
+
ConfigurationSource.instance.load_config(request_config)
|
24
|
+
end
|
25
|
+
|
26
|
+
# Config data class - holds configuration settings.
|
27
|
+
class Config
|
28
|
+
attr_accessor :source_file_loc, :template_file_loc, :output_dir
|
29
|
+
|
30
|
+
# @return [Boolean]
|
31
|
+
def validate
|
32
|
+
errors = decorate_errors
|
33
|
+
|
34
|
+
if errors.size.positive?
|
35
|
+
errors.each { |x| Pal.logger.info x }
|
36
|
+
raise Pal::ValidationError.new(errors, "Invalid request.")
|
37
|
+
end
|
38
|
+
|
39
|
+
true
|
40
|
+
end
|
41
|
+
|
42
|
+
# Can probably remove this
|
43
|
+
def read_template_file
|
44
|
+
JSON.parse(File.read(@template_file_loc))
|
45
|
+
end
|
46
|
+
|
47
|
+
def all_source_files
|
48
|
+
@source_file_loc.split(",")
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def decorate_errors
|
54
|
+
# Add directory validation
|
55
|
+
# Check billing file is a valid billing file
|
56
|
+
errors = []
|
57
|
+
errors << "Missing property: template file [-t]." unless @template_file_loc
|
58
|
+
errors << "Missing property: input file [-s]." unless @source_file_loc
|
59
|
+
errors << "File not found: billing file must exist" unless File.exist?(@source_file_loc || "")
|
60
|
+
errors
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Config storage source for access, stored as singleton.
|
65
|
+
class ConfigurationSource
|
66
|
+
include Singleton
|
67
|
+
|
68
|
+
attr_reader :config
|
69
|
+
|
70
|
+
# @param [Config] config
|
71
|
+
# @return [Config]
|
72
|
+
def load_config(config)
|
73
|
+
@config = config
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
@@ -0,0 +1,138 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
|
5
|
+
module Pal
|
6
|
+
module Handler
|
7
|
+
class Base
|
8
|
+
include Configuration
|
9
|
+
include Log
|
10
|
+
|
11
|
+
# @param [Pal::Request::Runbook] runbook
|
12
|
+
def initialize(runbook)
|
13
|
+
@runbook = runbook
|
14
|
+
end
|
15
|
+
|
16
|
+
# @return [Operation::ProcessorContext]
|
17
|
+
# rubocop:disable Metrics/AbcSize
|
18
|
+
def process_runbook
|
19
|
+
log_debug("Processing runbook started, setting up context.")
|
20
|
+
ctx = Operation::ProcessorContext.new
|
21
|
+
ctx.column_type_definitions = retrieve_column_definitions
|
22
|
+
|
23
|
+
# Get CSV parser
|
24
|
+
# Each impl needs to return a hash of candidate columns and values
|
25
|
+
# Extract headers
|
26
|
+
# Extract values
|
27
|
+
|
28
|
+
log_debug("Calling off to parse impl for CSV processing.")
|
29
|
+
|
30
|
+
# Different impls may choose to stream file, so we hand in a location and let it decide.
|
31
|
+
|
32
|
+
config.all_source_files.each_with_index do |file, idx|
|
33
|
+
log_info "Opening file [#{file}][#{idx}]"
|
34
|
+
|
35
|
+
_parse_file(ctx, _csv_processor(file)) do |row|
|
36
|
+
ctx.add_candidate(row) if should_include?(@runbook.filters, row, ctx.column_headers)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
log_info "Process completed with #{ctx.candidates.size} candidate records found."
|
41
|
+
|
42
|
+
ctx
|
43
|
+
end
|
44
|
+
# rubocop:enable Metrics/AbcSize
|
45
|
+
|
46
|
+
# @return [Boolean]
|
47
|
+
# @param [Pal::Operation::FilterEvaluator] filter_eval
|
48
|
+
# @param [Array] row
|
49
|
+
# @param [Hash] column_headers
|
50
|
+
def should_include?(filter_eval, row, column_headers)
|
51
|
+
return true unless filter_eval
|
52
|
+
|
53
|
+
filter_eval.test_property(row, column_headers)
|
54
|
+
end
|
55
|
+
|
56
|
+
# @return [Hash, nil]
|
57
|
+
def retrieve_column_definitions
|
58
|
+
overrides = @runbook.column_overrides || {}
|
59
|
+
path = File.join(File.dirname(__FILE__), "definitions/#{_type}.json")
|
60
|
+
|
61
|
+
return overrides unless File.exist?(path)
|
62
|
+
|
63
|
+
default_defs = JSON.parse(File.read(path))
|
64
|
+
default_defs.merge(overrides)
|
65
|
+
end
|
66
|
+
|
67
|
+
protected
|
68
|
+
|
69
|
+
# @abstract
|
70
|
+
# @param [ProcessorContext] _ctx
|
71
|
+
# @param [CSVProcessor] _processor
|
72
|
+
# @param [Proc] _block
|
73
|
+
# @return [Hash]
|
74
|
+
def _parse_file(_ctx, _processor, &_block)
|
75
|
+
raise NotImplementedError, "#{self.class} has not implemented method '#{__method__}'"
|
76
|
+
end
|
77
|
+
|
78
|
+
# @abstract
|
79
|
+
# @param [String] _source_file_loc
|
80
|
+
# @return [CSVProcessor]
|
81
|
+
def _csv_processor(_source_file_loc)
|
82
|
+
raise NotImplementedError, "#{self.class} has not implemented method '#{__method__}'"
|
83
|
+
end
|
84
|
+
|
85
|
+
# @abstract
|
86
|
+
# @return [String]
|
87
|
+
def _type
|
88
|
+
raise NotImplementedError, "#{self.class} has not implemented method '#{__method__}'"
|
89
|
+
end
|
90
|
+
|
91
|
+
def _extract_headers; end
|
92
|
+
end
|
93
|
+
|
94
|
+
# Generic has first row column headers, then data rows.
|
95
|
+
class GenericCSVHandlerImpl < Base
|
96
|
+
include Log
|
97
|
+
|
98
|
+
# @param [ProcessorContext] ctx
|
99
|
+
# @param [Pal::Operation::CSVProcessor] csv_processor
|
100
|
+
# @param [Proc] _block
|
101
|
+
# @return [Hash]
|
102
|
+
# ---
|
103
|
+
# Each impl needs to return a hash of candidate columns and values
|
104
|
+
# eg. { col_name: col_value, col_name_2: col_value_2 }
|
105
|
+
def _parse_file(ctx, csv_processor, &_block)
|
106
|
+
log_info("Starting to process file, using #{csv_processor.class} processor for #{_type} CUR file.")
|
107
|
+
ctx.current_file_row_count = 0
|
108
|
+
|
109
|
+
csv_processor.parse(ctx, header: :none) do |row|
|
110
|
+
if ctx.current_file_row_count == 1
|
111
|
+
ctx.extract_column_headers(row)
|
112
|
+
next
|
113
|
+
end
|
114
|
+
|
115
|
+
yield row
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
# @param [String] source_file_loc
|
120
|
+
# @return [Pal::Operation::CSVProcessor]
|
121
|
+
def _csv_processor(source_file_loc)
|
122
|
+
Operation::CSVProcessor.retrieve_default_processor(source_file_loc)
|
123
|
+
end
|
124
|
+
|
125
|
+
# @return [String]
|
126
|
+
def _type
|
127
|
+
"generic"
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
class AwsCurHandlerImpl < GenericCSVHandlerImpl
|
132
|
+
# @return [String]
|
133
|
+
def _type
|
134
|
+
"aws_cur"
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pal
|
4
|
+
module Handler
|
5
|
+
class Manager
|
6
|
+
include Log
|
7
|
+
|
8
|
+
attr_accessor :handler
|
9
|
+
|
10
|
+
# @param [Base] handler
|
11
|
+
def initialize(handler)
|
12
|
+
raise TypeError.new("Service must be type of BaseServiceImpl") unless handler.is_a? Base
|
13
|
+
|
14
|
+
@handler = handler
|
15
|
+
end
|
16
|
+
|
17
|
+
# @param [Pal::Request::Runbook] runbook
|
18
|
+
# @return [Array, Hash]
|
19
|
+
def process_runbook(runbook)
|
20
|
+
Pal.logger.info("Beginning execution of playbook ...")
|
21
|
+
ctx = @handler.process_runbook
|
22
|
+
|
23
|
+
log_info "No exporter defined." unless runbook.exporter
|
24
|
+
log_info "No candidates found." unless ctx.candidates.size.positive?
|
25
|
+
|
26
|
+
runbook.exporter.perform_export(ctx)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
require "pal/operation/processor_context"
|
5
|
+
|
6
|
+
# Processor for CSV extraction
|
7
|
+
module Pal
|
8
|
+
module Operation
|
9
|
+
# Base class for CSV impls, we can define strategy on memory usage needs based on
|
10
|
+
# potential issues from file size.
|
11
|
+
# TODO: We probably want to break away from this being a "CSV"-only file type later
|
12
|
+
# Needs more thinking
|
13
|
+
class CSVProcessor
|
14
|
+
include Pal::Log
|
15
|
+
|
16
|
+
# Strategy to return correct type - memory or performance focused.
|
17
|
+
# @return [BaseCSVProcessor]
|
18
|
+
def self.retrieve_default_processor(csv_file_location)
|
19
|
+
Pal.logger.info("Default processor has been requested. No further action required.")
|
20
|
+
RCSVProcessorImpl.new(csv_file_location)
|
21
|
+
end
|
22
|
+
|
23
|
+
attr_accessor :csv_file_location
|
24
|
+
|
25
|
+
def initialize(csv_file_location)
|
26
|
+
@csv_file_location = csv_file_location
|
27
|
+
end
|
28
|
+
|
29
|
+
# @param [ProcessorContext] ctx
|
30
|
+
# @param [Proc] block
|
31
|
+
# @param [Hash] opts
|
32
|
+
def parse(ctx, opts={}, &block)
|
33
|
+
_parse_impl(ctx, opts, &block)
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# @abstract
|
39
|
+
# @param [ProcessorContext] _ctx
|
40
|
+
# @param [Hash] _opts
|
41
|
+
# @param [Proc] _block
|
42
|
+
def _parse_impl(_ctx, _opts, &_block)
|
43
|
+
raise NotImplementedError, "#{self.class} has not implemented method '#{__method__}'"
|
44
|
+
end
|
45
|
+
|
46
|
+
# @param [String] file_location
|
47
|
+
# @return [String]
|
48
|
+
def read_file(file_location)
|
49
|
+
log_info("Reading file from disk located at #{file_location}")
|
50
|
+
# File.read(File.expand_path(File.join(File.dirname(__FILE__), file_location)))
|
51
|
+
Common::LocalFileUtils.read_file(file_location)
|
52
|
+
end
|
53
|
+
|
54
|
+
# @param [String] _file_location
|
55
|
+
def stream_file(_file_location)
|
56
|
+
raise NotImplementedError, "#{self.class} has not implemented method '#{__method__}'"
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
require "rcsv"
|
61
|
+
|
62
|
+
# rCSV impl
|
63
|
+
class RCSVProcessorImpl < CSVProcessor
|
64
|
+
private
|
65
|
+
|
66
|
+
# @param [ProcessorContext] ctx
|
67
|
+
# @param [Proc] _block
|
68
|
+
# @param [Hash] opts
|
69
|
+
# @yield [Array] row
|
70
|
+
# @yield [ProcessorContext] ctx
|
71
|
+
# @return [ProcessorContext]
|
72
|
+
def _parse_impl(ctx, opts={}, &_block)
|
73
|
+
return nil unless block_given?
|
74
|
+
|
75
|
+
Rcsv.parse(read_file(@csv_file_location), opts) do |row|
|
76
|
+
ctx.total_row_count += 1
|
77
|
+
ctx.current_file_row_count += 1
|
78
|
+
|
79
|
+
yield row
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
data/lib/pal/log.rb
ADDED
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
|
5
|
+
module Pal
|
6
|
+
module Log
|
7
|
+
# @param [String] message
|
8
|
+
def log_debug(message)
|
9
|
+
Pal.logger.debug(message)
|
10
|
+
end
|
11
|
+
|
12
|
+
# @param [String] message
|
13
|
+
def log_info(message)
|
14
|
+
Pal.logger.info(message)
|
15
|
+
end
|
16
|
+
|
17
|
+
# @param [String] message
|
18
|
+
def log_warn(message)
|
19
|
+
Pal.logger.warn(message)
|
20
|
+
end
|
21
|
+
|
22
|
+
# @param [String] message
|
23
|
+
# @param [Exception/Nil] exception
|
24
|
+
def log_error(message, exception=nil)
|
25
|
+
Pal.logger.error(message)
|
26
|
+
Pal.logger.error(exception.backtrace.join("\n")) unless exception.nil?
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
data/lib/pal/main.rb
ADDED
@@ -0,0 +1,63 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
require "pal/configuration"
|
5
|
+
require "pal/plugin"
|
6
|
+
require "pal/handler/base"
|
7
|
+
|
8
|
+
module Pal
|
9
|
+
class Main
|
10
|
+
include Log
|
11
|
+
include Plugin
|
12
|
+
include Configuration
|
13
|
+
|
14
|
+
# @return [Pal::Request::Runbook]
|
15
|
+
attr_accessor :runbook
|
16
|
+
|
17
|
+
# @return [Pal::Handler::Manager]
|
18
|
+
attr_accessor :manager
|
19
|
+
|
20
|
+
# @param [Pal::Config] config
|
21
|
+
def initialize(config)
|
22
|
+
register_config(config)
|
23
|
+
end
|
24
|
+
|
25
|
+
# set config for process
|
26
|
+
def setup
|
27
|
+
register_plugins
|
28
|
+
|
29
|
+
@runbook = create_runbook(config.template_file_loc)
|
30
|
+
@manager = create_service_manager
|
31
|
+
end
|
32
|
+
|
33
|
+
# @return [Array, Hash]
|
34
|
+
def process
|
35
|
+
@manager.process_runbook(@runbook)
|
36
|
+
end
|
37
|
+
|
38
|
+
# @param [String] file_location
|
39
|
+
# @return [Pal::Request::Runbook]
|
40
|
+
def create_runbook(file_location)
|
41
|
+
file_relative = file_location.start_with?("/") ? file_location : File.join(File.dirname(__FILE__), file_location)
|
42
|
+
|
43
|
+
log_debug "Attempting to read file from [#{file_relative}]"
|
44
|
+
log_debug "Script executed from [#{__dir__}]"
|
45
|
+
|
46
|
+
request_content = File.read(file_relative)
|
47
|
+
Pal::Request::Runbook.new.from_json(request_content)
|
48
|
+
rescue JSON::ParserError => e
|
49
|
+
log_error("Malformed JSON request for file [#{file_location}]")
|
50
|
+
raise e, "Malformed JSON request for file [#{file_location}]"
|
51
|
+
end
|
52
|
+
|
53
|
+
# @return [Pal::Handler::Manager]
|
54
|
+
def create_service_manager
|
55
|
+
clazz_name = "Pal::Handler::#{@runbook.metadata.handler}HandlerImpl"
|
56
|
+
impl = Kernel.const_get(clazz_name).new(@runbook)
|
57
|
+
Pal::Handler::Manager.new(impl)
|
58
|
+
rescue NameError => e
|
59
|
+
log_error("Cannot find a valid handler impl for #{@runbook.metadata.handler}")
|
60
|
+
raise e
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,106 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pal"
|
4
|
+
require "pal/operation/projection"
|
5
|
+
|
6
|
+
module Pal
|
7
|
+
module Operation
|
8
|
+
class Actions
|
9
|
+
include ObjectHelpers
|
10
|
+
include Log
|
11
|
+
|
12
|
+
# @return [Array<String>]
|
13
|
+
attr_accessor :group_by
|
14
|
+
|
15
|
+
# @return [String]
|
16
|
+
attr_accessor :sort_by
|
17
|
+
|
18
|
+
# @return [Projection]
|
19
|
+
attr_reader :projection
|
20
|
+
|
21
|
+
def projection=(opts)
|
22
|
+
clazz_name = "Pal::Operation::#{opts["type"]&.to_s&.capitalize || "Default"}ProjectionImpl"
|
23
|
+
@projection = Kernel.const_get(clazz_name).new(opts["property"] || nil)
|
24
|
+
end
|
25
|
+
|
26
|
+
def processable?
|
27
|
+
# Do better in the future
|
28
|
+
!@group_by.nil?
|
29
|
+
end
|
30
|
+
|
31
|
+
# @param [Array] rows
|
32
|
+
# @param [Hash] column_headers
|
33
|
+
# @return [Array] rows, column_headers
|
34
|
+
def process(rows, column_headers)
|
35
|
+
grouped = perform_group_by(rows, column_headers)
|
36
|
+
|
37
|
+
return [rows, column_headers] unless @projection&.processable?
|
38
|
+
|
39
|
+
log_info("Performing projection by [#{@projection.type}].")
|
40
|
+
rows, column_headers = perform_projection(grouped, column_headers)
|
41
|
+
rows, column_headers = perform_sort_by(rows, column_headers)
|
42
|
+
|
43
|
+
[rows, column_headers]
|
44
|
+
end
|
45
|
+
|
46
|
+
private
|
47
|
+
|
48
|
+
# @param [Hash] groups
|
49
|
+
# @param [Hash] column_headers
|
50
|
+
# @return [Array] rows, column_headers
|
51
|
+
def perform_projection(groups, column_headers)
|
52
|
+
rows, column_headers = @projection.process(@group_by, groups, column_headers)
|
53
|
+
[rows, column_headers]
|
54
|
+
end
|
55
|
+
|
56
|
+
# @param [Array] rows
|
57
|
+
# @param [Hash] column_headers
|
58
|
+
# @return [Hash] group_by_map
|
59
|
+
def perform_group_by(rows, column_headers)
|
60
|
+
log_info("Performing grouping by #{@group_by} across a total of #{rows.size} has been provided.")
|
61
|
+
|
62
|
+
group_by_map = {}
|
63
|
+
rows.each do |row|
|
64
|
+
key = generate_map_key(row, column_headers)
|
65
|
+
group_by_map[key] = [] unless group_by_map.key?(key)
|
66
|
+
|
67
|
+
group_by_map[key] << row
|
68
|
+
end
|
69
|
+
|
70
|
+
group_by_map
|
71
|
+
end
|
72
|
+
|
73
|
+
# @param [Array] rows
|
74
|
+
# @param [Hash] column_headers
|
75
|
+
# @return [Array] rows, column_headers
|
76
|
+
def perform_sort_by(rows, column_headers)
|
77
|
+
log_info("Performing sort by #{@sort_by} across a total of #{rows.size} has been provided.")
|
78
|
+
return [rows, column_headers] if @sort_by.nil?
|
79
|
+
|
80
|
+
sort_idx = column_headers[@sort_by]
|
81
|
+
|
82
|
+
if sort_idx.nil? || sort_idx.negative?
|
83
|
+
raise "Missing [#{@sort_by}]. Valid candidates are: [#{column_headers.keys.join(", ")}]"
|
84
|
+
end
|
85
|
+
|
86
|
+
rows.sort_by! { |a| a[sort_idx] }
|
87
|
+
|
88
|
+
[rows.reverse, column_headers]
|
89
|
+
end
|
90
|
+
|
91
|
+
# Take a row, extract the props, return a key
|
92
|
+
def generate_map_key(row, column_headers)
|
93
|
+
keys = []
|
94
|
+
@group_by.each do |gbp|
|
95
|
+
idx = column_headers[gbp]
|
96
|
+
|
97
|
+
raise "Missing column index. Please include [#{gbp}] in columns #{column_headers.keys}." unless idx
|
98
|
+
|
99
|
+
keys << row[idx]
|
100
|
+
end
|
101
|
+
|
102
|
+
keys.join(".")
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|