abide_dev_utils 0.4.2 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +2 -1
- data/.rubocop.yml +1 -1
- data/CODEOWNERS +1 -0
- data/README.md +34 -0
- data/abide_dev_utils.gemspec +11 -7
- data/itests.rb +138 -0
- data/lib/abide_dev_utils/cli/abstract.rb +2 -0
- data/lib/abide_dev_utils/cli/comply.rb +115 -0
- data/lib/abide_dev_utils/cli/jira.rb +2 -2
- data/lib/abide_dev_utils/cli/puppet.rb +136 -11
- data/lib/abide_dev_utils/cli/xccdf.rb +26 -7
- data/lib/abide_dev_utils/cli.rb +2 -0
- data/lib/abide_dev_utils/comply.rb +498 -0
- data/lib/abide_dev_utils/config.rb +19 -0
- data/lib/abide_dev_utils/errors/comply.rb +17 -0
- data/lib/abide_dev_utils/errors/gcloud.rb +27 -0
- data/lib/abide_dev_utils/errors/general.rb +5 -0
- data/lib/abide_dev_utils/errors/ppt.rb +12 -0
- data/lib/abide_dev_utils/errors/xccdf.rb +8 -0
- data/lib/abide_dev_utils/errors.rb +2 -0
- data/lib/abide_dev_utils/gcloud.rb +22 -0
- data/lib/abide_dev_utils/jira.rb +15 -0
- data/lib/abide_dev_utils/mixins.rb +16 -0
- data/lib/abide_dev_utils/output.rb +7 -3
- data/lib/abide_dev_utils/ppt/api.rb +219 -0
- data/lib/abide_dev_utils/ppt/class_utils.rb +184 -0
- data/lib/abide_dev_utils/ppt/coverage.rb +2 -3
- data/lib/abide_dev_utils/ppt/score_module.rb +162 -0
- data/lib/abide_dev_utils/ppt.rb +138 -49
- data/lib/abide_dev_utils/validate.rb +5 -1
- data/lib/abide_dev_utils/version.rb +1 -1
- data/lib/abide_dev_utils/xccdf.rb +567 -9
- data/lib/abide_dev_utils.rb +1 -0
- metadata +82 -17
- data/lib/abide_dev_utils/utils/general.rb +0 -9
- data/lib/abide_dev_utils/xccdf/cis/hiera.rb +0 -161
- data/lib/abide_dev_utils/xccdf/cis.rb +0 -3
@@ -0,0 +1,16 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AbideDevUtils
|
4
|
+
module Mixins
|
5
|
+
# mixin methods for the Hash data type
|
6
|
+
module Hash
|
7
|
+
def deep_copy
|
8
|
+
Marshal.load(Marshal.dump(self))
|
9
|
+
end
|
10
|
+
|
11
|
+
def diff(other)
|
12
|
+
dup.delete_if { |k, v| other[k] == v }.merge!(other.dup.delete_if { |k, _| key?(k) })
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
@@ -22,9 +22,13 @@ module AbideDevUtils
|
|
22
22
|
end
|
23
23
|
|
24
24
|
def self.yaml(in_obj, console: false, file: nil)
|
25
|
-
|
26
|
-
|
27
|
-
|
25
|
+
yaml_out = if in_obj.is_a? String
|
26
|
+
in_obj
|
27
|
+
else
|
28
|
+
AbideDevUtils::Validate.hashable(in_obj)
|
29
|
+
# Use object's #to_yaml method if it exists, convert to hash if not
|
30
|
+
in_obj.respond_to?(:to_yaml) ? in_obj.to_yaml : in_obj.to_h.to_yaml
|
31
|
+
end
|
28
32
|
simple(yaml_out) if console
|
29
33
|
FWRITER.write_yaml(yaml_out, file: file) unless file.nil?
|
30
34
|
end
|
@@ -0,0 +1,219 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'io/console'
|
4
|
+
require 'json'
|
5
|
+
require 'net/http'
|
6
|
+
require 'openssl'
|
7
|
+
|
8
|
+
module AbideDevUtils
|
9
|
+
module Ppt
|
10
|
+
class ApiClient
|
11
|
+
attr_reader :hostname, :custom_ports
|
12
|
+
attr_writer :auth_token, :tls_cert_verify
|
13
|
+
attr_accessor :content_type
|
14
|
+
|
15
|
+
CT_JSON = 'application/json'
|
16
|
+
API_DEFS = {
|
17
|
+
codemanager: {
|
18
|
+
port: 8170,
|
19
|
+
version: 'v1',
|
20
|
+
base: 'code-manager',
|
21
|
+
paths: [
|
22
|
+
{
|
23
|
+
path: 'deploys',
|
24
|
+
verbs: %w[post],
|
25
|
+
x_auth: true
|
26
|
+
}
|
27
|
+
]
|
28
|
+
},
|
29
|
+
classifier1: {
|
30
|
+
port: 4433,
|
31
|
+
version: 'v1',
|
32
|
+
base: 'classifier-api',
|
33
|
+
paths: [
|
34
|
+
{
|
35
|
+
path: 'groups',
|
36
|
+
verbs: %w[get post],
|
37
|
+
x_auth: true
|
38
|
+
}
|
39
|
+
]
|
40
|
+
},
|
41
|
+
orchestrator: {
|
42
|
+
port: 8143,
|
43
|
+
version: 'v1',
|
44
|
+
base: 'orchestrator',
|
45
|
+
paths: [
|
46
|
+
{
|
47
|
+
path: 'command/deploy',
|
48
|
+
verbs: %w[post],
|
49
|
+
x_auth: true
|
50
|
+
},
|
51
|
+
{
|
52
|
+
path: 'command/task',
|
53
|
+
verbs: %w[post],
|
54
|
+
x_auth: true
|
55
|
+
},
|
56
|
+
{
|
57
|
+
path: 'jobs',
|
58
|
+
verbs: %w[get],
|
59
|
+
x_auth: true
|
60
|
+
}
|
61
|
+
]
|
62
|
+
}
|
63
|
+
}.freeze
|
64
|
+
|
65
|
+
def initialize(hostname, auth_token: nil, content_type: CT_JSON, custom_ports: {}, verbose: false)
|
66
|
+
@hostname = hostname
|
67
|
+
@auth_token = auth_token
|
68
|
+
@content_type = content_type
|
69
|
+
@custom_ports = custom_ports
|
70
|
+
@verbose = verbose
|
71
|
+
define_api_methods
|
72
|
+
end
|
73
|
+
|
74
|
+
def login(username, password: nil, lifetime: '1h', label: nil)
|
75
|
+
label = "AbideDevUtils token for #{username} - lifetime #{lifetime}" if label.nil?
|
76
|
+
password = IO.console.getpass 'Password: ' if password.nil?
|
77
|
+
data = {
|
78
|
+
'login' => username,
|
79
|
+
'password' => password,
|
80
|
+
'lifetime' => lifetime,
|
81
|
+
'label' => label
|
82
|
+
}
|
83
|
+
uri = URI("https://#{@hostname}:4433/rbac-api/v1/auth/token")
|
84
|
+
result = http_request(uri, post_request(uri, x_auth: false, **data), json_out: true)
|
85
|
+
@auth_token = result['token']
|
86
|
+
log_verbose("Successfully logged in? #{auth_token?}")
|
87
|
+
auth_token?
|
88
|
+
end
|
89
|
+
|
90
|
+
def auth_token?
|
91
|
+
defined?(@auth_token) && !@auth_token.nil? && !@auth_token.empty?
|
92
|
+
end
|
93
|
+
|
94
|
+
def tls_cert_verify
|
95
|
+
@tls_cert_verify = defined?(@tls_cert_verify) ? @tls_cert_verify : false
|
96
|
+
end
|
97
|
+
|
98
|
+
def verbose?
|
99
|
+
@verbose
|
100
|
+
end
|
101
|
+
|
102
|
+
def no_verbose
|
103
|
+
@verbose = false
|
104
|
+
end
|
105
|
+
|
106
|
+
def verbose!
|
107
|
+
@verbose = true
|
108
|
+
end
|
109
|
+
|
110
|
+
private
|
111
|
+
|
112
|
+
def define_api_methods
|
113
|
+
api_method_data.each do |meth, data|
|
114
|
+
case meth
|
115
|
+
when /^get_.*/
|
116
|
+
self.class.define_method(meth) do |*args, **kwargs|
|
117
|
+
uri = args.empty? ? data[:uri] : URI("#{data[:uri]}/#{args.join('/')}")
|
118
|
+
req = get_request(uri, x_auth: data[:x_auth], **kwargs)
|
119
|
+
http_request(data[:uri], req, json_out: true)
|
120
|
+
end
|
121
|
+
when /^post_.*/
|
122
|
+
self.class.define_method(meth) do |*args, **kwargs|
|
123
|
+
uri = args.empty? ? data[:uri] : URI("#{data[:uri]}/#{args.join('/')}")
|
124
|
+
req = post_request(uri, x_auth: data[:x_auth], **kwargs)
|
125
|
+
http_request(data[:uri], req, json_out: true)
|
126
|
+
end
|
127
|
+
else
|
128
|
+
raise "Cannot define method for #{meth}"
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
def api_method_data
|
134
|
+
method_data = {}
|
135
|
+
API_DEFS.each do |key, val|
|
136
|
+
val[:paths].each do |path|
|
137
|
+
method_names = api_method_names(key, path)
|
138
|
+
method_names.each do |name|
|
139
|
+
method_data[name] = {
|
140
|
+
uri: api_method_uri(val[:port], val[:base], val[:version], path[:path]),
|
141
|
+
x_auth: path[:x_auth]
|
142
|
+
}
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
method_data
|
147
|
+
end
|
148
|
+
|
149
|
+
def api_method_names(api_name, path)
|
150
|
+
path[:verbs].each_with_object([]) do |verb, ary|
|
151
|
+
path_str = path[:path].split('/').join('_')
|
152
|
+
ary << [verb, api_name.to_s, path_str].join('_')
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
def api_method_uri(port, base, version, path)
|
157
|
+
URI("https://#{@hostname}:#{port}/#{base}/#{version}/#{path}")
|
158
|
+
end
|
159
|
+
|
160
|
+
def get_request(uri, x_auth: true, **qparams)
|
161
|
+
log_verbose('New GET request:')
|
162
|
+
log_verbose("request_qparams?: #{!qparams.empty?}")
|
163
|
+
uri.query = URI.encode_www_form(qparams) unless qparams.empty?
|
164
|
+
headers = init_headers(x_auth: x_auth)
|
165
|
+
log_verbose("request_headers: #{redact_headers(headers)}")
|
166
|
+
Net::HTTP::Get.new(uri, headers)
|
167
|
+
end
|
168
|
+
|
169
|
+
def post_request(uri, x_auth: true, **data)
|
170
|
+
log_verbose('New POST request:')
|
171
|
+
log_verbose("request_data?: #{!data.empty?}")
|
172
|
+
headers = init_headers(x_auth: x_auth)
|
173
|
+
log_verbose("request_headers: #{redact_headers(headers)}")
|
174
|
+
req = Net::HTTP::Post.new(uri, headers)
|
175
|
+
req.body = data.to_json unless data.empty?
|
176
|
+
req
|
177
|
+
end
|
178
|
+
|
179
|
+
def init_headers(x_auth: true)
|
180
|
+
headers = { 'Content-Type' => @content_type }
|
181
|
+
return headers unless x_auth
|
182
|
+
|
183
|
+
raise 'Auth token not set!' unless auth_token?
|
184
|
+
|
185
|
+
headers['X-Authentication'] = @auth_token
|
186
|
+
headers
|
187
|
+
end
|
188
|
+
|
189
|
+
def http_request(uri, req, json_out: true)
|
190
|
+
result = Net::HTTP.start(uri.hostname, uri.port, use_ssl: true, verify_mode: tls_verify_mode) do |http|
|
191
|
+
log_verbose("use_ssl: true, verify_mode: #{tls_verify_mode}")
|
192
|
+
http.request(req)
|
193
|
+
end
|
194
|
+
case result.code
|
195
|
+
when '200', '201', '202'
|
196
|
+
json_out ? JSON.parse(result.body) : result
|
197
|
+
else
|
198
|
+
jbody = JSON.parse(result.body)
|
199
|
+
log_verbose("HTTP #{result.code} #{jbody['kind']} #{jbody['msg']} #{jbody['details']} #{uri}")
|
200
|
+
raise "HTTP #{result.code} #{jbody['kind']} #{jbody['msg']} #{jbody['details']} #{uri}"
|
201
|
+
end
|
202
|
+
end
|
203
|
+
|
204
|
+
def log_verbose(msg)
|
205
|
+
puts msg if @verbose
|
206
|
+
end
|
207
|
+
|
208
|
+
def redact_headers(headers)
|
209
|
+
r_headers = headers.dup
|
210
|
+
r_headers['X-Authentication'] = 'XXXXX' if r_headers.key?('X-Authentication')
|
211
|
+
r_headers
|
212
|
+
end
|
213
|
+
|
214
|
+
def tls_verify_mode
|
215
|
+
tls_cert_verify ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE
|
216
|
+
end
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
@@ -0,0 +1,184 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'fileutils'
|
4
|
+
require 'tempfile'
|
5
|
+
require 'abide_dev_utils/errors/ppt'
|
6
|
+
|
7
|
+
module AbideDevUtils
|
8
|
+
module Ppt
|
9
|
+
module ClassUtils
|
10
|
+
include AbideDevUtils::Errors::Ppt
|
11
|
+
|
12
|
+
CLASS_NAME_PATTERN = /\A([a-z][a-z0-9_]*)?(::[a-z][a-z0-9_]*)*\Z/.freeze
|
13
|
+
CLASS_NAME_CAPTURE_PATTERN = /\A^class (?<class_name>([a-z][a-z0-9_]*)?(::[a-z][a-z0-9_]*)*).*\Z/.freeze
|
14
|
+
|
15
|
+
# Validates a Puppet class name
|
16
|
+
# @param name [String] Puppet class name
|
17
|
+
# @return [Boolean] Is the name a valid Puppet class name
|
18
|
+
def self.valid_class_name?(name)
|
19
|
+
name.match?(CLASS_NAME_PATTERN)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Takes a full Puppet class name and returns the path
|
23
|
+
# of the class file. This command must be run from the
|
24
|
+
# root module directory if validate_path is true.
|
25
|
+
# @param class_name [String] full Puppet class name
|
26
|
+
# @return [String] path to class file
|
27
|
+
def self.path_from_class_name(class_name)
|
28
|
+
parts = class_name.split('::')
|
29
|
+
parts[-1] = "#{parts[-1]}.pp"
|
30
|
+
File.expand_path(File.join('manifests', parts[1..-1]))
|
31
|
+
end
|
32
|
+
|
33
|
+
# Returns the namespaced class name from a file path
|
34
|
+
# @param class_path [String] the path to the Puppet class
|
35
|
+
# @return [String] the namespaced class name
|
36
|
+
def self.class_name_from_path(class_path)
|
37
|
+
parts = class_path.split(File::SEPARATOR).map { |x| x == '' ? File::SEPARATOR : x }
|
38
|
+
module_root_idx = parts.find_index('manifests') - 1
|
39
|
+
module_root = parts[module_root_idx].split('-')[-1]
|
40
|
+
namespaces = parts[(module_root_idx + 2)..-2].join('::') # add 2 to module root idx to skip manifests dir
|
41
|
+
class_name = parts[-1].delete_suffix('.pp')
|
42
|
+
[module_root, namespaces, class_name].join('::')
|
43
|
+
end
|
44
|
+
|
45
|
+
# Takes a path to a Puppet file and extracts the class name from the class declaration in the file.
|
46
|
+
# This differs from class_name_from_path because we actually read the class file and search
|
47
|
+
# the code for a class declaration to get the class name instead of just using the path
|
48
|
+
# to construct a valid Puppet class name.
|
49
|
+
# @param path [String] the path to a Puppet file
|
50
|
+
# @return [String] the Puppet class name
|
51
|
+
# @raise [ClassDeclarationNotFoundError] if there is not class declaration in the file
|
52
|
+
def self.class_name_from_declaration(path)
|
53
|
+
File.readlines(path).each do |line|
|
54
|
+
next unless line.match?(/^class /)
|
55
|
+
|
56
|
+
return CLASS_NAME_CAPTURE_PATTERN.match(line)['class_name']
|
57
|
+
end
|
58
|
+
raise ClassDeclarationNotFoundError, "Path:#{path}"
|
59
|
+
end
|
60
|
+
|
61
|
+
# Renames a file by file move. Ensures destination path exists before moving.
|
62
|
+
# @param from_path [String] path of the original file
|
63
|
+
# @param to_path [String] path of the new file
|
64
|
+
# @param verbose [Boolean] Sets verbose mode on file operations
|
65
|
+
# @param force [Boolean] If true, file move file overwrite existing files
|
66
|
+
def self.rename_class_file(from_path, to_path, **kwargs)
|
67
|
+
verbose = kwargs.fetch(:verbose, false)
|
68
|
+
force = kwargs.fetch(:force, false)
|
69
|
+
FileUtils.mkdir_p(File.dirname(to_path), verbose: verbose)
|
70
|
+
FileUtils.mv(from_path, to_path, verbose: verbose, force: force)
|
71
|
+
end
|
72
|
+
|
73
|
+
# Renames a Puppet class in the class declaration of the given file
|
74
|
+
# @param from [String] the original class name
|
75
|
+
# @param to [String] the new class name
|
76
|
+
# @param file_path [String] the path to the class file
|
77
|
+
# @param verbose [Boolean] Sets verbose mode on file operations
|
78
|
+
# @param force [Boolean] If true, file move file overwrite existing files
|
79
|
+
# @raise [ClassDeclarationNotFoundError] if the class file does not contain the from class declaration
|
80
|
+
def self.rename_puppet_class_declaration(from, to, file_path, **kwargs)
|
81
|
+
verbose = kwargs.fetch(:verbose, false)
|
82
|
+
force = kwargs.fetch(:force, false)
|
83
|
+
temp_file = Tempfile.new
|
84
|
+
renamed = false
|
85
|
+
begin
|
86
|
+
File.readlines(file_path).each do |line|
|
87
|
+
if line.match?(/^class #{from}.*/)
|
88
|
+
line.gsub!(/^class #{from}/, "class #{to}")
|
89
|
+
renamed = true
|
90
|
+
end
|
91
|
+
temp_file.puts line
|
92
|
+
end
|
93
|
+
raise ClassDeclarationNotFoundError, "File:#{file_path},Declaration:class #{from}" unless renamed
|
94
|
+
|
95
|
+
temp_file.close
|
96
|
+
FileUtils.mv(temp_file.path, file_path, verbose: verbose, force: force)
|
97
|
+
ensure
|
98
|
+
temp_file.close
|
99
|
+
temp_file.unlink
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
# Determines if a Puppet class name is mismatched by constructing a class name from
|
104
|
+
# a path to a Puppet file and extracting the class name from the class declaration
|
105
|
+
# inside the file. This is useful to determine if a Puppet class file breaks the
|
106
|
+
# autoload path pattern.
|
107
|
+
# @param path [String] path to a Puppet class file
|
108
|
+
# @return [Boolean] if the actual class name and path-constructed class name match
|
109
|
+
def self.mismatched_class_declaration?(path)
|
110
|
+
class_name_from_path(path) != class_name_from_declaration(path)
|
111
|
+
end
|
112
|
+
|
113
|
+
# Finds all Puppet classes in the given directory that have class declarations
|
114
|
+
# that do not adhere to the autoload path pattern.
|
115
|
+
# @param class_dir [String] path to a directory containing Puppet class files
|
116
|
+
# @return [Array] paths to all Puppet class files with mismatched class names
|
117
|
+
def self.find_all_mismatched_class_declarations(class_dir)
|
118
|
+
mismatched = []
|
119
|
+
Dir[File.join(File.expand_path(class_dir), '*.pp')].each do |class_file|
|
120
|
+
mismatched << class_file if mismatched_class_declaration?(class_file)
|
121
|
+
end
|
122
|
+
mismatched.sort
|
123
|
+
end
|
124
|
+
|
125
|
+
# Given a directory holding Puppet manifests, returns
|
126
|
+
# the full namespace for all classes in that directory.
|
127
|
+
# @param puppet_class_dir [String] path to a dir containing Puppet manifests
|
128
|
+
# @return [String] The namespace for all classes in manifests in the dir
|
129
|
+
def self.find_class_namespace(puppet_class_dir)
|
130
|
+
path = Pathname.new(puppet_class_dir)
|
131
|
+
mod_root = nil
|
132
|
+
ns_parts = []
|
133
|
+
found_manifests = false
|
134
|
+
path.ascend do |p|
|
135
|
+
if found_manifests
|
136
|
+
mod_root = find_mod_root(p)
|
137
|
+
break
|
138
|
+
end
|
139
|
+
if File.basename(p) == 'manifests'
|
140
|
+
found_manifests = true
|
141
|
+
next
|
142
|
+
else
|
143
|
+
ns_parts << File.basename(p)
|
144
|
+
end
|
145
|
+
end
|
146
|
+
"#{mod_root}::#{ns_parts.reverse.join('::')}::"
|
147
|
+
end
|
148
|
+
|
149
|
+
# Given a Pathname object of the 'manifests' directory in a Puppet module,
|
150
|
+
# determines the module namespace root. Does this by consulting
|
151
|
+
# metadata.json, if it exists, or by using the parent directory name.
|
152
|
+
# @param pathname [Pathname] A Pathname object of the module's manifests dir
|
153
|
+
# @return [String] The module's namespace root
|
154
|
+
def self.find_mod_root(pathname)
|
155
|
+
metadata_file = nil
|
156
|
+
pathname.entries.each do |e|
|
157
|
+
metadata_file = "#{pathname}/metadata.json" if File.basename(e) == 'metadata.json'
|
158
|
+
end
|
159
|
+
if metadata_file.nil?
|
160
|
+
File.basename(p)
|
161
|
+
else
|
162
|
+
File.open(metadata_file) do |f|
|
163
|
+
file = JSON.parse(f.read)
|
164
|
+
File.basename(p) unless file.key?('name')
|
165
|
+
file['name'].split('-')[-1]
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
# @return [Array] An array of frozen arrays where each sub-array's
|
171
|
+
# index 0 is class_name and index 1 is the full path to the file.
|
172
|
+
def self.find_all_classes_and_paths(puppet_class_dir)
|
173
|
+
all_cap = []
|
174
|
+
Dir.each_child(puppet_class_dir) do |c|
|
175
|
+
path = "#{puppet_class_dir}/#{c}"
|
176
|
+
next if File.directory?(path) || File.extname(path) != '.pp'
|
177
|
+
|
178
|
+
all_cap << [File.basename(path, '.pp'), path].freeze
|
179
|
+
end
|
180
|
+
all_cap
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
@@ -4,16 +4,15 @@ require 'json'
|
|
4
4
|
require 'pathname'
|
5
5
|
require 'yaml'
|
6
6
|
require 'puppet_pal'
|
7
|
-
require 'abide_dev_utils/ppt'
|
7
|
+
require 'abide_dev_utils/ppt/class_utils'
|
8
8
|
|
9
9
|
module AbideDevUtils
|
10
10
|
module Ppt
|
11
11
|
class CoverageReport
|
12
|
-
include AbideDevUtils::Ppt
|
13
12
|
def self.generate(puppet_class_dir, hiera_path, profile = nil)
|
14
13
|
coverage = {}
|
15
14
|
coverage['classes'] = {}
|
16
|
-
all_cap =
|
15
|
+
all_cap = ClassUtils.find_all_classes_and_paths(puppet_class_dir)
|
17
16
|
invalid_classes = find_invalid_classes(all_cap)
|
18
17
|
valid_classes = find_valid_classes(all_cap, invalid_classes)
|
19
18
|
coverage['classes']['invalid'] = invalid_classes
|
@@ -0,0 +1,162 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'pathname'
|
4
|
+
require 'metadata-json-lint'
|
5
|
+
require 'puppet-lint'
|
6
|
+
require 'json'
|
7
|
+
|
8
|
+
module AbideDevUtils
|
9
|
+
module Ppt
|
10
|
+
class ScoreModule
|
11
|
+
attr_reader :module_name, :module_dir, :manifests_dir
|
12
|
+
|
13
|
+
def initialize(module_dir)
|
14
|
+
@module_name = module_dir.split(File::SEPARATOR)[-1]
|
15
|
+
@module_dir = real_module_dir(module_dir)
|
16
|
+
@manifests_dir = File.join(real_module_dir(module_dir), 'manifests')
|
17
|
+
@metadata = JSON.parse(File.join(@module_dir, 'metadata.json'))
|
18
|
+
end
|
19
|
+
|
20
|
+
def lint
|
21
|
+
linter_exit_code, linter_output = lint_manifests
|
22
|
+
{
|
23
|
+
exit_code: linter_exit_code,
|
24
|
+
manifests: manifest_count,
|
25
|
+
lines: line_count,
|
26
|
+
linter_version: linter_version,
|
27
|
+
output: linter_output
|
28
|
+
}.to_json
|
29
|
+
end
|
30
|
+
|
31
|
+
# def metadata
|
32
|
+
|
33
|
+
# end
|
34
|
+
|
35
|
+
private
|
36
|
+
|
37
|
+
def manifests
|
38
|
+
@manifests ||= Dir["#{manifests_dir}/**/*.pp"]
|
39
|
+
end
|
40
|
+
|
41
|
+
def manifest_count
|
42
|
+
@manifest_count ||= manifests.count
|
43
|
+
end
|
44
|
+
|
45
|
+
def line_count
|
46
|
+
@line_count ||= manifests.each_with_object([]) { |x, ary| ary << File.readlines(x).size }.sum
|
47
|
+
end
|
48
|
+
|
49
|
+
def lint_manifests
|
50
|
+
results = []
|
51
|
+
PuppetLint.configuration.with_filename = true
|
52
|
+
PuppetLint.configuration.json = true
|
53
|
+
PuppetLint.configuration.relative = true
|
54
|
+
linter_exit_code = 0
|
55
|
+
manifests.each do |manifest|
|
56
|
+
next if PuppetLint.configuration.ignore_paths.any? { |p| File.fnmatch(p, manifest) }
|
57
|
+
|
58
|
+
linter = PuppetLint.new
|
59
|
+
linter.file = manifest
|
60
|
+
linter.run
|
61
|
+
linter_exit_code = 1 if linter.errors? || linter.warnings?
|
62
|
+
results << linter.problems.reject { |x| x[:kind] == :ignored }
|
63
|
+
end
|
64
|
+
[linter_exit_code, JSON.generate(results)]
|
65
|
+
end
|
66
|
+
|
67
|
+
def lint_metadata
|
68
|
+
results = { errors: [], warnings: [] }
|
69
|
+
results[:errors] << metadata_schema_errors
|
70
|
+
dep_errors, dep_warnings = metadata_validate_deps
|
71
|
+
results[:errors] << dep_errors
|
72
|
+
results[:warnings] << dep_warnings
|
73
|
+
results[:errors] << metadata_deprecated_fields
|
74
|
+
end
|
75
|
+
|
76
|
+
def metadata_schema_errors
|
77
|
+
MetadataJsonLint::Schema.new.validate(@metadata).each_with_object([]) do |err, ary|
|
78
|
+
check = err[:field] == 'root' ? :required_fields : err[:field]
|
79
|
+
ary << metadata_err(check, err[:message])
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def metadata_validate_deps
|
84
|
+
return [[], []] unless @metadata.key?('dependencies')
|
85
|
+
|
86
|
+
errors, warnings = []
|
87
|
+
duplicates = metadata_dep_duplicates
|
88
|
+
warnings << duplicates unless duplicates.empty?
|
89
|
+
@metadata['dependencies'].each do |dep|
|
90
|
+
e, w = metadata_dep_version_requirement(dep)
|
91
|
+
errors << e unless e.nil?
|
92
|
+
warnings << w unless w.nil?
|
93
|
+
warnings << metadata_dep_version_range(dep['name']) if dep.key?('version_range')
|
94
|
+
end
|
95
|
+
[errors.flatten, warnings.flatten]
|
96
|
+
end
|
97
|
+
|
98
|
+
def metadata_deprecated_fields
|
99
|
+
%w[types checksum].each_with_object([]) do |field, ary|
|
100
|
+
next unless @metadata.key?(field)
|
101
|
+
|
102
|
+
ary << metadata_err(:deprecated_fields, "Deprecated field '#{field}' found in metadata.json")
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def metadata_dep_duplicates
|
107
|
+
results = []
|
108
|
+
duplicates = @metadata['dependencies'].detect { |x| @metadata['dependencies'].count(x) > 1 }
|
109
|
+
return results if duplicates.empty?
|
110
|
+
|
111
|
+
duplicates.each { |x| results << metadata_err(:dependencies, "Duplicate dependencies on #{x}") }
|
112
|
+
results
|
113
|
+
end
|
114
|
+
|
115
|
+
def metadata_dep_version_requirement(dependency)
|
116
|
+
unless dependency.key?('version_requirement')
|
117
|
+
return [metadata_err(:dependencies, "Invalid 'version_requirement' field in metadata.json: #{e}"), nil]
|
118
|
+
end
|
119
|
+
|
120
|
+
ver_req = MetadataJsonLint::VersionRequirement.new(dependency['version_requirement'])
|
121
|
+
return [nil, metadata_dep_open_ended(dependency['name'], dependency['version_requirement'])] if ver_req.open_ended?
|
122
|
+
return [nil, metadata_dep_mixed_syntax(dependency['name'], dependency['version_requirement'])] if ver_req.mixed_syntax?
|
123
|
+
|
124
|
+
[nil, nil]
|
125
|
+
end
|
126
|
+
|
127
|
+
def metadata_dep_open_ended(name, version_req)
|
128
|
+
metadata_err(:dependencies, "Dependency #{name} has an open ended dependency version requirement #{version_req}")
|
129
|
+
end
|
130
|
+
|
131
|
+
def metadata_dep_mixed_syntax(name, version_req)
|
132
|
+
msg = 'Mixing "x" or "*" version syntax with operators is not recommended in ' \
|
133
|
+
"metadata.json, use one style in the #{name} dependency: #{version_req}"
|
134
|
+
metadata_err(:dependencies, msg)
|
135
|
+
end
|
136
|
+
|
137
|
+
def metadata_dep_version_range(name)
|
138
|
+
metadata_err(:dependencies, "Dependency #{name} has a 'version_range' attribute which is no longer used by the forge.")
|
139
|
+
end
|
140
|
+
|
141
|
+
def metadata_err(check, msg)
|
142
|
+
{ check: check, msg: msg }
|
143
|
+
end
|
144
|
+
|
145
|
+
def linter_version
|
146
|
+
PuppetLint::VERSION
|
147
|
+
end
|
148
|
+
|
149
|
+
def relative_manifests
|
150
|
+
Dir.glob('manifests/**/*.pp')
|
151
|
+
end
|
152
|
+
|
153
|
+
def real_module_dir(path)
|
154
|
+
return Pathname.pwd if path.nil?
|
155
|
+
|
156
|
+
return Pathname.new(path).cleanpath(consider_symlink: true) if Dir.exist?(path)
|
157
|
+
|
158
|
+
raise ArgumentError, "Path #{path} is not a directory"
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|
162
|
+
end
|