pdksync 0.5.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/.rubocop.yml +26 -15
- data/.travis.yml +17 -16
- data/CHANGELOG.md +23 -0
- data/CODEOWNERS +2 -0
- data/Gemfile +6 -2
- data/README.md +295 -17
- data/Rakefile +14 -8
- data/lib/pdksync.rb +310 -449
- data/lib/pdksync/conf/puppet_abs_supported_platforms.yaml +41 -0
- data/lib/pdksync/configuration.rb +155 -0
- data/lib/pdksync/githubclient.rb +3 -1
- data/lib/pdksync/gitplatformclient.rb +2 -2
- data/lib/pdksync/jenkinsclient.rb +50 -0
- data/lib/pdksync/logger.rb +116 -0
- data/lib/pdksync/rake_tasks.rb +99 -15
- data/lib/pdksync/utils.rb +1293 -0
- data/managed_modules.yml +21 -26
- data/pdksync.gemspec +18 -15
- data/spec/configuration_spec.rb +56 -0
- data/spec/fixtures/fake_managed_modules.yaml +2 -0
- data/spec/fixtures/pdksync.yml +2 -0
- data/spec/logger_spec.rb +44 -0
- data/spec/pdksync_spec.rb +185 -0
- data/spec/spec_helper.rb +74 -0
- data/spec/utils_spec.rb +131 -0
- metadata +94 -21
- data/lib/pdksync/constants.rb +0 -78
- data/spec/lib/pdksync_spec.rb +0 -58
@@ -0,0 +1,1293 @@
|
|
1
|
+
# @summary provides a module with various methods for performing the desired tasks
|
2
|
+
require 'git'
|
3
|
+
require 'open3'
|
4
|
+
require 'fileutils'
|
5
|
+
require 'pdk'
|
6
|
+
require 'pdksync/configuration'
|
7
|
+
require 'pdksync/gitplatformclient'
|
8
|
+
require 'colorize'
|
9
|
+
require 'bundler'
|
10
|
+
require 'octokit'
|
11
|
+
require 'pdk/util/template_uri'
|
12
|
+
require 'pdksync/logger'
|
13
|
+
|
14
|
+
module PdkSync
|
15
|
+
module Utils
|
16
|
+
def self.configuration
|
17
|
+
@configuration ||= PdkSync::Configuration.new
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.on_windows?
|
21
|
+
# Ruby only sets File::ALT_SEPARATOR on Windows and the Ruby standard
|
22
|
+
# library uses that to test what platform it's on.
|
23
|
+
!!File::ALT_SEPARATOR # rubocop:disable Style/DoubleNegation
|
24
|
+
end
|
25
|
+
|
26
|
+
def self.temp_file_path
|
27
|
+
@temp_file_path ||= on_windows? ? "#{ENV['TEMP']}\\out.tmp" : '/tmp/out.tmp'
|
28
|
+
end
|
29
|
+
|
30
|
+
# @summary
|
31
|
+
# This method when called will delete any preexisting branch on the given repository that matches the given name.
|
32
|
+
# @param [PdkSync::GitPlatformClient] client
|
33
|
+
# The Git platform client used to gain access to and manipulate the repository.
|
34
|
+
# @param [String] repo_name
|
35
|
+
# The name of the repository from which the branch is to be deleted.
|
36
|
+
# @param [String] branch_name
|
37
|
+
# The name of the branch that is to be deleted.
|
38
|
+
def self.delete_branch(client, repo_name, branch_name)
|
39
|
+
client.delete_branch(repo_name, branch_name)
|
40
|
+
rescue StandardError => error
|
41
|
+
PdkSync::Logger.fatal "Deleting #{branch_name} in #{repo_name} failed. #{error}"
|
42
|
+
end
|
43
|
+
|
44
|
+
# @summary
|
45
|
+
# This method when called will add a given label to a given repository
|
46
|
+
# @param [PdkSync::GitPlatformClient] client
|
47
|
+
# The Git Platform client used to gain access to and manipulate the repository.
|
48
|
+
# @param [String] repo_name
|
49
|
+
# The name of the repository on which the commit is to be made.
|
50
|
+
# @param [Integer] issue_number
|
51
|
+
# The id of the issue (i.e. pull request) to add the label to.
|
52
|
+
# @param [String] label
|
53
|
+
# The label to add.
|
54
|
+
def self.add_label(client, repo_name, issue_number, label)
|
55
|
+
client.update_issue(repo_name, issue_number, labels: [label])
|
56
|
+
rescue StandardError => error
|
57
|
+
PdkSync::Logger.info "Adding label to #{repo_name} issue #{issue_number} has failed. #{error}"
|
58
|
+
return false
|
59
|
+
end
|
60
|
+
|
61
|
+
# @summary
|
62
|
+
# This method when called will check on the given repository for the existence of the supplied label
|
63
|
+
# @param [PdkSync::GitPlatformClient] client
|
64
|
+
# The Git platform client used to gain access to and manipulate the repository.
|
65
|
+
# @param [String] repo_name
|
66
|
+
# The name of the repository on which the commit is to be made.
|
67
|
+
# @param [String] label
|
68
|
+
# The label to check for.
|
69
|
+
# @return [Boolean]
|
70
|
+
# A boolean stating whether the label was found.
|
71
|
+
def self.check_for_label(client, repo_name, label)
|
72
|
+
# Get labels from repository
|
73
|
+
repo_labels = client.labels(repo_name)
|
74
|
+
|
75
|
+
# Look for label in the repository's labels
|
76
|
+
match = false
|
77
|
+
repo_labels.each do |repo_label|
|
78
|
+
if repo_label.name == label
|
79
|
+
match = true
|
80
|
+
break
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
# Raise error if a match was not found else return true
|
85
|
+
(match == false) ? (raise StandardError, "Label '#{label}' not found in #{repo_name}") : (return true)
|
86
|
+
rescue StandardError => error
|
87
|
+
PdkSync::Logger.fatal "Retrieving labels for #{repo_name} has failed. #{error}"
|
88
|
+
return false
|
89
|
+
end
|
90
|
+
|
91
|
+
# @summary
|
92
|
+
# This method when called will retrieve the pdk_version of the current module, i.e. the one that was navigated into in the 'pdk_update' method.
|
93
|
+
# @param [String] metadata_file
|
94
|
+
# An optional input that can be used to set the location of the metadata file.
|
95
|
+
# @return [String]
|
96
|
+
# A string value that represents the current pdk version.
|
97
|
+
def self.return_pdk_version(metadata_file = 'metadata.json')
|
98
|
+
file = File.read(metadata_file)
|
99
|
+
data_hash = JSON.parse(file)
|
100
|
+
data_hash['pdk-version']
|
101
|
+
end
|
102
|
+
|
103
|
+
# @summary
|
104
|
+
# This method when called will stage all changed files within the given repository, conditional on them being managed via the pdk.
|
105
|
+
# @param [Git::Base] git_repo
|
106
|
+
# A git object representing the local repository to be staged.
|
107
|
+
def self.add_staged_files(git_repo)
|
108
|
+
if !git_repo.status.changed.empty?
|
109
|
+
git_repo.add(all: true)
|
110
|
+
PdkSync::Logger.info 'All files have been staged.'
|
111
|
+
true
|
112
|
+
else
|
113
|
+
PdkSync::Logger.info 'Nothing to commit.'
|
114
|
+
false
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
# @summary
|
119
|
+
# This method when called will create a commit containing all currently staged files, with the name of the commit containing the template ref as a unique identifier.
|
120
|
+
# @param [Git::Base] git_repo
|
121
|
+
# A git object representing the local repository against which the commit is to be made.
|
122
|
+
# @param [String] template_ref
|
123
|
+
# The unique template_ref that is used as part of the commit name.
|
124
|
+
# @param [String] commit_message
|
125
|
+
# If specified it will be the message for the commit.
|
126
|
+
def self.commit_staged_files(git_repo, template_ref, commit_message = nil)
|
127
|
+
message = if commit_message.nil?
|
128
|
+
"pdksync_#{template_ref}"
|
129
|
+
else
|
130
|
+
commit_message
|
131
|
+
end
|
132
|
+
git_repo.commit(message)
|
133
|
+
end
|
134
|
+
|
135
|
+
# @summary
|
136
|
+
# This method when called will push the given local commit to local repository's origin.
|
137
|
+
# @param [Git::Base] git_repo
|
138
|
+
# A git object representing the local repository againt which the push is to be made.
|
139
|
+
# @param [String] template_ref
|
140
|
+
# The unique reference that that represents the template the update has ran against.
|
141
|
+
# @param [String] repo_name
|
142
|
+
# The name of the repository on which the commit is to be made.
|
143
|
+
def self.push_staged_files(git_repo, current_branch, repo_name)
|
144
|
+
git_repo.push(configuration.push_file_destination, current_branch)
|
145
|
+
rescue StandardError => error
|
146
|
+
PdkSync::Logger.error "Pushing to #{configuration.push_file_destination} for #{repo_name} has failed. #{error}"
|
147
|
+
end
|
148
|
+
|
149
|
+
# @summary
|
150
|
+
# This method when called will create a pr on the given repository that will create a pr to merge the given commit into the main with the pdk version as an identifier.
|
151
|
+
# @param [PdkSync::GitPlatformClient] client
|
152
|
+
# The Git platform client used to gain access to and manipulate the repository.
|
153
|
+
# @param [String] repo_name
|
154
|
+
# The name of the repository on which the commit is to be made.
|
155
|
+
# @param [String] template_ref
|
156
|
+
# The unique reference that that represents the template the update has ran against.
|
157
|
+
# @param [String] pdk_version
|
158
|
+
# The current version of the pdk on which the update is run.
|
159
|
+
def self.create_pr(client, repo_name, template_ref, pdk_version, pr_title = nil)
|
160
|
+
if pr_title.nil?
|
161
|
+
title = "pdksync - Update using #{pdk_version}"
|
162
|
+
message = "pdk version: `#{pdk_version}` \n pdk template ref: `#{template_ref}`"
|
163
|
+
head = "pdksync_#{template_ref}"
|
164
|
+
else
|
165
|
+
title = "pdksync - #{pr_title}"
|
166
|
+
message = "#{pr_title}\npdk version: `#{pdk_version}` \n"
|
167
|
+
head = template_ref
|
168
|
+
end
|
169
|
+
client.create_pull_request(repo_name, configuration.create_pr_against,
|
170
|
+
head,
|
171
|
+
title,
|
172
|
+
message)
|
173
|
+
rescue StandardError => error
|
174
|
+
PdkSync::Logger.fatal "PR creation for #{repo_name} has failed. #{error}"
|
175
|
+
nil
|
176
|
+
end
|
177
|
+
|
178
|
+
# @summary
|
179
|
+
# Try to use a fully installed pdk, otherwise fall back to the bundled pdk gem.
|
180
|
+
# @return String
|
181
|
+
# Path to the pdk executable
|
182
|
+
def self.return_pdk_path
|
183
|
+
full_path = '/opt/puppetlabs/pdk/bin/pdk'
|
184
|
+
path = if File.executable?(full_path)
|
185
|
+
full_path
|
186
|
+
else
|
187
|
+
PdkSync::Logger.warn "(WARNING) Using pdk on PATH not '#{full_path}'"
|
188
|
+
'pdk'
|
189
|
+
end
|
190
|
+
path
|
191
|
+
end
|
192
|
+
|
193
|
+
# @return
|
194
|
+
def self.create_commit(git_repo, branch_name, commit_message)
|
195
|
+
checkout_branch(git_repo, branch_name)
|
196
|
+
commit_staged_files(git_repo, branch_name, commit_message) if add_staged_files(git_repo)
|
197
|
+
end
|
198
|
+
|
199
|
+
# @summary
|
200
|
+
# This method when called will call the delete function against the given repository if it exists.
|
201
|
+
# @param [String] output_path
|
202
|
+
# The repository that is to be deleted.
|
203
|
+
def self.clean_env(output_path)
|
204
|
+
# If a local copy already exists it is removed
|
205
|
+
FileUtils.rm_rf(output_path)
|
206
|
+
end
|
207
|
+
|
208
|
+
# @summary
|
209
|
+
# This method when called will clone a given repository into a local location that has also been set.
|
210
|
+
# @param [String] namespace
|
211
|
+
# The namespace the repository is located in.
|
212
|
+
# @param [String] module_name
|
213
|
+
# The name of the repository.
|
214
|
+
# @param [String] output_path
|
215
|
+
# The location the repository is to be cloned to.
|
216
|
+
# @return [Git::Base]
|
217
|
+
# A git object representing the local repository or true if already exist
|
218
|
+
def self.clone_directory(namespace, module_name, output_path)
|
219
|
+
# not all urls are public facing so we need to conditionally use the correct separator
|
220
|
+
sep = configuration.git_base_uri.start_with?('git@') ? ':' : '/'
|
221
|
+
clone_url = "#{configuration.git_base_uri}#{sep}#{namespace}/#{module_name}.git"
|
222
|
+
Git.clone(clone_url, output_path.to_s)
|
223
|
+
rescue ::Git::GitExecuteError => error
|
224
|
+
PdkSync::Logger.fatal "Cloning #{module_name} has failed. #{error}"
|
225
|
+
end
|
226
|
+
|
227
|
+
# @summary
|
228
|
+
# This method when called will run a command command at the given location, with an error message being thrown if it is not successful.
|
229
|
+
# @param [String] output_path
|
230
|
+
# The location that the command is to be run from.
|
231
|
+
# @param [String] command
|
232
|
+
# The command to be run.
|
233
|
+
# @return [Integer]
|
234
|
+
# The status code of the command run.
|
235
|
+
def self.run_command(output_path, command, option)
|
236
|
+
stdout = ''
|
237
|
+
stderr = ''
|
238
|
+
status = Process::Status
|
239
|
+
pid = ''
|
240
|
+
Dir.chdir(output_path) unless Dir.pwd == output_path
|
241
|
+
|
242
|
+
# Environment cleanup required due to Ruby subshells using current Bundler environment
|
243
|
+
if option.nil? == true
|
244
|
+
if command =~ %r{^bundle}
|
245
|
+
Bundler.with_clean_env do
|
246
|
+
stdout, stderr, status = Open3.capture3(command)
|
247
|
+
end
|
248
|
+
else
|
249
|
+
stdout, stderr, status = Open3.capture3(command)
|
250
|
+
end
|
251
|
+
PdkSync::Logger.info "\n#{stdout}\n"
|
252
|
+
PdkSync::Logger.crit "Unable to run command '#{command}': #{stderr}" unless status.exitstatus.zero?
|
253
|
+
status.exitstatus
|
254
|
+
else
|
255
|
+
# Environment cleanup required due to Ruby subshells using current Bundler environment
|
256
|
+
if command =~ %r{^sh }
|
257
|
+
Bundler.with_clean_env do
|
258
|
+
pid = spawn(command, out: 'run_command.out', err: 'run_command.err')
|
259
|
+
Process.detach(pid)
|
260
|
+
end
|
261
|
+
end
|
262
|
+
pid
|
263
|
+
end
|
264
|
+
end
|
265
|
+
|
266
|
+
# @summary
|
267
|
+
# This method when called will run the 'pdk update --force' command at the given location, with an error message being thrown if it is not successful.
|
268
|
+
# @param [String] output_path
|
269
|
+
# The location that the command is to be run from.
|
270
|
+
# @return [Integer]
|
271
|
+
# The status code of the pdk update run.
|
272
|
+
def self.pdk_update(output_path)
|
273
|
+
# Runs the pdk update command
|
274
|
+
Dir.chdir(output_path) do
|
275
|
+
_, module_temp_ref = module_templates_url.split('#')
|
276
|
+
module_temp_ref ||= configuration.pdk_templates_ref
|
277
|
+
template_ref = configuration.module_is_authoritive ? module_temp_ref : configuration.pdk_templates_ref
|
278
|
+
change_module_template_url(configuration.pdk_templates_url, template_ref) unless configuration.module_is_authoritive
|
279
|
+
_stdout, stderr, status = Open3.capture3("#{return_pdk_path} update --force --template-ref=#{template_ref}")
|
280
|
+
PdkSync::Logger.fatal "Unable to run `pdk update`: #{stderr}" unless status.exitstatus.zero?
|
281
|
+
status.exitstatus
|
282
|
+
end
|
283
|
+
end
|
284
|
+
|
285
|
+
# @summary
|
286
|
+
# This method when called will retrieve the template ref of the current module, i.e. the one that was navigated into in the 'pdk_update' method.
|
287
|
+
# @param [String] metadata_file
|
288
|
+
# An optional input that can be used to set the location of the metadata file.
|
289
|
+
# @return [String]
|
290
|
+
# A string value that represents the current pdk template.
|
291
|
+
def self.return_template_ref(metadata_file = 'metadata.json')
|
292
|
+
file = File.read(metadata_file)
|
293
|
+
data_hash = JSON.parse(file)
|
294
|
+
data_hash['template-ref']
|
295
|
+
end
|
296
|
+
|
297
|
+
# @summary
|
298
|
+
# This method when called will retrieve the tempalate-url of the current module,
|
299
|
+
# @param metadata_file [String]
|
300
|
+
# An optional input that can be used to set the location of the metadata file.
|
301
|
+
# @param url [String] - the url of the pdk-templates repo
|
302
|
+
# @return [String]
|
303
|
+
# A string value that represents the current pdk tempalate-url.
|
304
|
+
def self.module_templates_url(metadata_file = 'metadata.json')
|
305
|
+
file = File.read(metadata_file)
|
306
|
+
data_hash = JSON.parse(file)
|
307
|
+
data_hash['template-url']
|
308
|
+
end
|
309
|
+
|
310
|
+
# @param [String] - the url of the pdk-templates
|
311
|
+
# @param [String] - the ref of the pdk templates you want to change to
|
312
|
+
# @return [String] - the updated url
|
313
|
+
def self.change_module_template_url(url, ref, metadata_file = 'metadata.json')
|
314
|
+
content = File.read(metadata_file)
|
315
|
+
uri = PDK::Util::TemplateURI.uri_safe(url.to_s + "##{ref}")
|
316
|
+
data_hash = JSON.parse(content)
|
317
|
+
data_hash['template-url'] = uri
|
318
|
+
File.write(metadata_file, data_hash.to_json)
|
319
|
+
uri.to_s
|
320
|
+
end
|
321
|
+
|
322
|
+
# @summary
|
323
|
+
# This method when called will checkout a new local branch of the given repository.
|
324
|
+
# @param [Git::Base] git_repo
|
325
|
+
# A git object representing the local repository to be branched.
|
326
|
+
# @param [String] branch_suffix
|
327
|
+
# The string that is appended on the branch name. eg template_ref or a friendly name
|
328
|
+
def self.checkout_branch(git_repo, branch_suffix)
|
329
|
+
git_repo.branch("pdksync_#{branch_suffix}").checkout
|
330
|
+
end
|
331
|
+
|
332
|
+
# @summary
|
333
|
+
# Check the local pdk version against the most recent tagged release on GitHub
|
334
|
+
# @return [Boolean] true if the remote version is less than or equal to local version
|
335
|
+
def self.check_pdk_version
|
336
|
+
stdout, _stderr, status = Open3.capture3("#{return_pdk_path} --version")
|
337
|
+
PdkSync::Logger.fatal "Unable to find pdk at '#{return_pdk_path}'." unless status.exitstatus
|
338
|
+
local_version = stdout.strip
|
339
|
+
remote_version = Octokit.tags('puppetlabs/pdk').first[:name][1..-1]
|
340
|
+
up2date = Gem::Version.new(remote_version) <= Gem::Version.new(local_version)
|
341
|
+
unless up2date
|
342
|
+
PdkSync::Logger.warn "The current version of pdk is #{remote_version} however you are using #{local_version}"
|
343
|
+
end
|
344
|
+
up2date
|
345
|
+
rescue StandardError => error
|
346
|
+
PdkSync::Logger.warn "Unable to check latest pdk version. #{error}"
|
347
|
+
end
|
348
|
+
|
349
|
+
# @summary
|
350
|
+
# This method when called will create a directory identified by the set global variable 'configuration.pdksync_dir', on the condition that it does not already exist.
|
351
|
+
def self.create_filespace
|
352
|
+
FileUtils.mkdir_p configuration.pdksync_dir unless Dir.exist?(configuration.pdksync_dir)
|
353
|
+
configuration.pdksync_dir
|
354
|
+
end
|
355
|
+
|
356
|
+
# @summary
|
357
|
+
# This method when called will create a directory identified by the set global variable 'configuration.pdksync_gem_dir', on the condition that it does not already exist.
|
358
|
+
def self.create_filespace_gem
|
359
|
+
FileUtils.mkdir_p configuration.pdksync_gem_dir unless Dir.exist?(configuration.pdksync_gem_dir)
|
360
|
+
configuration.pdksync_gem_dir
|
361
|
+
end
|
362
|
+
|
363
|
+
# @summary
|
364
|
+
# This method when called will create and return an octokit client with access to the upstream git repositories.
|
365
|
+
# @return [PdkSync::GitPlatformClient] client
|
366
|
+
# The Git platform client that has been created.
|
367
|
+
def self.setup_client
|
368
|
+
PdkSync::GitPlatformClient.new(configuration.git_platform, configuration.git_platform_access_settings)
|
369
|
+
rescue StandardError => error
|
370
|
+
raise "Git platform access not set up correctly: #{error}"
|
371
|
+
end
|
372
|
+
|
373
|
+
# @summary
|
374
|
+
# This method when called will create and return an octokit client with access to jenkins.
|
375
|
+
# @return [PdkSync::JenkinsClient] client
|
376
|
+
# The Git platform client that has been created.
|
377
|
+
def self.setup_jenkins_client(jenkins_server_url)
|
378
|
+
require 'pdksync/jenkinsclient'
|
379
|
+
if configuration.jenkins_platform_access_settings[:jenkins_username].nil?
|
380
|
+
raise ArgumentError, "Jenkins access token for #{configuration.jenkins_platform.capitalize} not set"\
|
381
|
+
" - use 'export #{configuration.jenkins_platform.upcase}_USERNAME=\"<your username>\"' to set"
|
382
|
+
elsif configuration.jenkins_platform_access_settings[:jenkins_password].nil?
|
383
|
+
raise ArgumentError, "Jenkins access token for #{jenkins_platform.capitalize} not set"\
|
384
|
+
" - use 'export #{jenkins_platform.upcase}_PASSWORD=\"<your password>\"' to set"
|
385
|
+
end
|
386
|
+
PdkSync::JenkinsClient.new(jenkins_server_url, configuration.jenkins_platform_access_settings)
|
387
|
+
rescue StandardError => error
|
388
|
+
raise "Jenkins platform access not set up correctly: #{error}"
|
389
|
+
end
|
390
|
+
|
391
|
+
# @summary
|
392
|
+
# This method when called will access a file set by the global variable 'configuration.managed_modules' and retrieve the information within as an array.
|
393
|
+
# @return [Array]
|
394
|
+
# An array of different module names.
|
395
|
+
def self.return_modules
|
396
|
+
raise "File '#{configuration.managed_modules}' is empty/does not exist" if File.size?(configuration.managed_modules).nil?
|
397
|
+
YAML.safe_load(File.open(configuration.managed_modules))
|
398
|
+
end
|
399
|
+
|
400
|
+
# @summary
|
401
|
+
# This method when called will parse an array of module names and verify
|
402
|
+
# whether they are valid repo or project names on the configured Git
|
403
|
+
# hosting platform.
|
404
|
+
# @param [PdkSync::GitPlatformClient] client
|
405
|
+
# The Git platform client used to get a repository.
|
406
|
+
# @param [Array] module_names
|
407
|
+
# String array of the names of Git platform repos
|
408
|
+
def self.validate_modules_exist(client, module_names)
|
409
|
+
raise "Error reading in modules. Check syntax of '#{configuration.managed_modules}'." unless !module_names.nil? && module_names.is_a?(Array)
|
410
|
+
invalid = module_names.reject { |name| client.repository?("#{configuration.namespace}/#{name}") }
|
411
|
+
# Raise error if any invalid matches were found
|
412
|
+
raise "Could not find the following repositories: #{invalid}" unless invalid.empty?
|
413
|
+
true
|
414
|
+
end
|
415
|
+
|
416
|
+
# @summary
|
417
|
+
# This method when called will update a Gemfile and remove the existing version of gem from the Gemfile.
|
418
|
+
# @param [String] output_path
|
419
|
+
# The location that the command is to be run from.
|
420
|
+
# @param [String] gem_to_test
|
421
|
+
# The Gem to test.
|
422
|
+
# @param [String] gem_line
|
423
|
+
# The gem line to replace
|
424
|
+
# @param [String] gem_sha_finder
|
425
|
+
# The gem sha to find
|
426
|
+
# @param [String] gem_sha_replacer
|
427
|
+
# The gem sha to replace
|
428
|
+
# @param [String] gem_version_finder
|
429
|
+
# The gem version to find
|
430
|
+
# @param [String] gem_version_replacer
|
431
|
+
# The gem version to replace
|
432
|
+
# @param [String] gem_branch_finder
|
433
|
+
# The gem branch to find
|
434
|
+
# @param [String] gem_branch_replacer
|
435
|
+
# The gem branch to replace
|
436
|
+
def self.gem_file_update(output_path, gem_to_test, gem_line, gem_sha_finder, gem_sha_replacer, gem_version_finder, gem_version_replacer, gem_branch_finder, gem_branch_replacer, main_path)
|
437
|
+
gem_file_name = 'Gemfile'
|
438
|
+
validate_gem_update_module(gem_to_test, gem_line, output_path, main_path)
|
439
|
+
|
440
|
+
if (gem_line.nil? == false) && (gem_sha_replacer != '\"\"')
|
441
|
+
new_data = get_source_test_gem(gem_to_test, gem_line)
|
442
|
+
new_data.each do |data|
|
443
|
+
if data.include?('branch')
|
444
|
+
gem_branch_replacer = data.split(' ')[1].strip.chomp('"').delete("'")
|
445
|
+
elsif data.include?('ref')
|
446
|
+
gem_sha_replacer = data.split(' ')[1].strip.chomp('').delete("'")
|
447
|
+
elsif data =~ %r{~>|=|>=|<=|<|>}
|
448
|
+
delimiters = ['>', '<', '>=', '<=', '=']
|
449
|
+
version_to_check = data.split(Regexp.union(delimiters))[1].chomp('""').delete("'")
|
450
|
+
validate_gem_version_replacer(version_to_check.to_s, gem_to_test)
|
451
|
+
end
|
452
|
+
end
|
453
|
+
end
|
454
|
+
|
455
|
+
if gem_sha_replacer.nil? == false && gem_sha_replacer != '\"\"' && gem_sha_replacer != ''
|
456
|
+
validate_gem_sha_replacer(gem_sha_replacer.chomp('"').reverse.chomp('"').reverse, gem_to_test)
|
457
|
+
end
|
458
|
+
if gem_branch_replacer.nil? == false && gem_branch_replacer != '\"\"'
|
459
|
+
validate_gem_branch_replacer(gem_branch_replacer.chomp('"').reverse.chomp('"').reverse, gem_to_test)
|
460
|
+
end
|
461
|
+
if gem_version_replacer.nil? == false && gem_version_replacer != '\"\"' && gem_version_replacer != ''
|
462
|
+
delimiters = ['<', '>', '<=', '>=', '=']
|
463
|
+
version_to_check = gem_version_replacer.split(Regexp.union(delimiters))
|
464
|
+
version_to_check.each do |version|
|
465
|
+
next if version.nil?
|
466
|
+
validate_gem_version_replacer(version.to_s, gem_to_test) unless version == ''
|
467
|
+
end
|
468
|
+
end
|
469
|
+
|
470
|
+
Dir.chdir(output_path) unless Dir.pwd == output_path
|
471
|
+
|
472
|
+
line_number = 1
|
473
|
+
gem_update_sha = [
|
474
|
+
{ finder: "ref: '#{gem_sha_finder}'",
|
475
|
+
replacer: "ref: '#{gem_sha_replacer}'" }
|
476
|
+
]
|
477
|
+
gem_update_version = [
|
478
|
+
{ finder: gem_version_finder,
|
479
|
+
replacer: gem_version_replacer }
|
480
|
+
]
|
481
|
+
gem_update_branch = [
|
482
|
+
{ finder: "branch: '#{gem_branch_finder}'",
|
483
|
+
replacer: "branch: '#{gem_branch_replacer}'" }
|
484
|
+
]
|
485
|
+
# gem_line option is passed
|
486
|
+
|
487
|
+
if gem_line.nil? == false && (gem_line != '' || gem_line != '\"\"')
|
488
|
+
|
489
|
+
# Delete the gem in the Gemfile to add the new line
|
490
|
+
gem_test = gem_to_test.chomp('"').reverse.chomp('"').reverse
|
491
|
+
File.open(temp_file_path, 'w') do |out_file|
|
492
|
+
File.foreach(gem_file_name) do |line|
|
493
|
+
out_file.puts line unless line =~ %r{#{gem_test}}
|
494
|
+
end
|
495
|
+
end
|
496
|
+
FileUtils.mv(temp_file_path, gem_file_name)
|
497
|
+
|
498
|
+
# Insert the new Gem to test
|
499
|
+
file = File.open(gem_file_name)
|
500
|
+
contents = file.readlines.map(&:chomp)
|
501
|
+
contents.insert(line_number, gem_line.chomp('"').reverse.chomp('"').reverse)
|
502
|
+
File.open(gem_file_name, 'w') { |f| f.write contents.join("\n") }
|
503
|
+
end
|
504
|
+
|
505
|
+
# gem_sha_finder and gem_sha_replacer options are passed
|
506
|
+
if gem_sha_finder.nil? == false && gem_sha_replacer.nil? == false && gem_sha_finder != '' && gem_sha_finder != '\"\"' && gem_sha_replacer != '' && gem_sha_replacer != '\"\"'
|
507
|
+
# Replace with SHA
|
508
|
+
file = File.open(gem_file_name)
|
509
|
+
contents = file.readlines.join
|
510
|
+
gem_update_sha.each do |regex|
|
511
|
+
contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer])
|
512
|
+
end
|
513
|
+
File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
|
514
|
+
end
|
515
|
+
|
516
|
+
# gem_version_finder and gem_version_replacer options are passed
|
517
|
+
if gem_version_finder.nil? == false && gem_version_replacer.nil? == false && gem_version_finder != '' && gem_version_finder != '\"\"' && gem_version_replacer != '' && gem_version_replacer != '\"\"' # rubocop:disable Metrics/LineLength
|
518
|
+
# Replace with version
|
519
|
+
file = File.open(gem_file_name)
|
520
|
+
contents = file.readlines.join
|
521
|
+
gem_update_version.each do |regex|
|
522
|
+
contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer])
|
523
|
+
end
|
524
|
+
File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
|
525
|
+
end
|
526
|
+
|
527
|
+
# gem_branch_finder and gem_branch_replacer options are passed
|
528
|
+
if gem_branch_finder.nil? == false && gem_branch_replacer.nil? == false && gem_branch_finder != '' && gem_branch_finder != '\"\"' && gem_branch_replacer != '' && gem_branch_replacer != '\"\"' # rubocop:disable Metrics/LineLength, Style/GuardClause
|
529
|
+
# Replace with branch
|
530
|
+
file = File.open(gem_file_name)
|
531
|
+
contents = file.readlines.join
|
532
|
+
gem_update_branch.each do |regex|
|
533
|
+
contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer]) # unless contents =~ %r{#{gem_to_test}}
|
534
|
+
end
|
535
|
+
File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
|
536
|
+
end
|
537
|
+
end
|
538
|
+
|
539
|
+
# @summary
|
540
|
+
# This method is used to identify the type of module.
|
541
|
+
# @param [String] output_path
|
542
|
+
# The location that the command is to be run from.
|
543
|
+
# @param [String] repo_name
|
544
|
+
# The module name to identify the type
|
545
|
+
def self.module_type(output_path, repo_name)
|
546
|
+
if repo_name.nil? == false
|
547
|
+
module_type = if File.exist?("#{output_path}/provision.yaml")
|
548
|
+
'litmus'
|
549
|
+
else
|
550
|
+
'traditional'
|
551
|
+
end
|
552
|
+
end
|
553
|
+
module_type
|
554
|
+
end
|
555
|
+
|
556
|
+
# @summary
|
557
|
+
# This method when called will run the 'module tests' command at the given location, with an error message being thrown if it is not successful.
|
558
|
+
# @param [String] output_path
|
559
|
+
# The location that the command is to be run from.
|
560
|
+
# @param [String] module_type
|
561
|
+
# The module type (litmus or traditional)
|
562
|
+
# @param [String] module_name
|
563
|
+
# The module name
|
564
|
+
# @param [String] puppet collection
|
565
|
+
# The puppet collection
|
566
|
+
# @return [Integer]
|
567
|
+
# The status code of the pdk update run.
|
568
|
+
def self.run_tests_locally(output_path, module_type, provision_type, module_name, puppet_collection)
|
569
|
+
provision_type = provision_type.chomp('"').reverse.chomp('"').reverse
|
570
|
+
status = Process::Status
|
571
|
+
# Save the current path
|
572
|
+
old_path = Dir.pwd
|
573
|
+
|
574
|
+
# Create the acceptance scripts
|
575
|
+
file = File.open('acc.sh', 'w')
|
576
|
+
file.puts '#!/bin/sh'
|
577
|
+
|
578
|
+
if puppet_collection
|
579
|
+
file.puts "export PUPPET_GEM_VERSION='~> #{puppet_collection}'"
|
580
|
+
end
|
581
|
+
file.puts "rm -rf #{output_path}/Gemfile.lock;rm -rf #{output_path}/.bundle"
|
582
|
+
file.puts 'bundle install --path .bundle/gems/ --jobs 4'
|
583
|
+
file.puts "bundle exec rake 'litmus:provision_list[#{provision_type}]'"
|
584
|
+
file.puts 'bundle exec rake litmus:install_agent'
|
585
|
+
file.puts 'bundle exec rake litmus:install_module'
|
586
|
+
file.puts 'bundle exec rake litmus:acceptance:parallel'
|
587
|
+
file.puts 'bundle exec rake litmus:tear_down'
|
588
|
+
file.close
|
589
|
+
|
590
|
+
# Runs the module tests command
|
591
|
+
if module_type == 'litmus'
|
592
|
+
run_command(output_path, 'cp ../../acc.sh .', nil)
|
593
|
+
Dir.chdir(old_path)
|
594
|
+
run_command(output_path, 'chmod 777 acc.sh', nil)
|
595
|
+
Dir.chdir(old_path)
|
596
|
+
status = run_command(output_path, 'sh acc.sh 2>&1 | tee litmusacceptance.out', 'background')
|
597
|
+
if status != 0
|
598
|
+
PdkSync::Logger.info "SUCCESS:Kicking of module Acceptance tests to run for the module #{module_name} - SUCCEED.Results will be available in the following path #{output_path}/litmusacceptance.out.Process id is #{status}"
|
599
|
+
else
|
600
|
+
PdkSync::Logger.fatal "FAILURE:Kicking of module Acceptance tests to run for the module #{module_name} - FAILED.Results will be available in the following path #{output_path}/litmusacceptance.out."
|
601
|
+
end
|
602
|
+
end
|
603
|
+
PdkSync::Logger.warn "(WARNING) Executing testcases locally supports only for litmus'" if module_type != 'litmus'
|
604
|
+
end
|
605
|
+
|
606
|
+
# @summary
|
607
|
+
# This method when called will fetch the module tests results.
|
608
|
+
# @param [String] output_path
|
609
|
+
# The location that the command is to be run from.
|
610
|
+
# @param [String] module_type
|
611
|
+
# The module type (litmus or traditional)
|
612
|
+
# @param [String] module_name
|
613
|
+
# The module name
|
614
|
+
# @param [String] report_rows
|
615
|
+
# The module test results
|
616
|
+
# @return [Integer]
|
617
|
+
# The status code of the pdk update run.
|
618
|
+
def self.fetch_test_results_locally(output_path, module_type, module_name, report_rows)
|
619
|
+
# Save the current path
|
620
|
+
old_path = Dir.pwd
|
621
|
+
if module_type != 'litmus'
|
622
|
+
PdkSync::Logger.warn "(WARNING) Fetching test results locally supports only for litmus'"
|
623
|
+
end
|
624
|
+
|
625
|
+
# Run the tests
|
626
|
+
Dir.chdir(old_path)
|
627
|
+
lines = IO.readlines("#{output_path}/litmusacceptance.out")[-10..-1]
|
628
|
+
if lines.find { |e| %r{exit} =~ e } # rubocop:disable Style/ConditionalAssignment
|
629
|
+
report_rows << if lines.find { |e| %r{^Failed} =~ e } || lines.find { |e| %r{--trace} =~ e }
|
630
|
+
[module_name, 'FAILED', "Results are available in the following path #{output_path}/litmusacceptance.out"]
|
631
|
+
else
|
632
|
+
[module_name, 'SUCCESS', "Results are available in the following path #{output_path}/litmusacceptance.out"]
|
633
|
+
end
|
634
|
+
else
|
635
|
+
report_rows << if lines.find { |e| %r{^Failed} =~ e } || lines.find { |e| %r{--trace} =~ e } || lines.find { |e| %r{rake aborted} =~ e }
|
636
|
+
[module_name, 'FAILED', "Results are available in the following path #{output_path}/litmusacceptance.out"]
|
637
|
+
else
|
638
|
+
[module_name, 'PROGRESS', "Results will be available in the following path #{output_path}/litmusacceptance.out"]
|
639
|
+
end
|
640
|
+
end
|
641
|
+
return report_rows if module_type == 'litmus'
|
642
|
+
end
|
643
|
+
|
644
|
+
# @summary
|
645
|
+
# This method when called will find the source location of the gem to test
|
646
|
+
# @param [String] gem_to_test
|
647
|
+
# The gem to test
|
648
|
+
# @param [String] gem_line
|
649
|
+
# TThe line to update in the Gemfile
|
650
|
+
# @return [String]
|
651
|
+
# The source location of the gem to test
|
652
|
+
def self.get_source_test_gem(gem_to_test, gem_line)
|
653
|
+
return gem_line.split(',') if gem_line
|
654
|
+
return gem_to_test unless gem_to_test
|
655
|
+
|
656
|
+
gemfile_line = File.readlines('Gemfile').find do |line|
|
657
|
+
line.include?(gem_to_test.to_s)
|
658
|
+
end
|
659
|
+
|
660
|
+
return "https://github.com/puppetlabs/#{gem_to_test}" unless gemfile_line
|
661
|
+
gemfile_line =~ %r{(http|https|ftp|ftps)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/\S*)?}
|
662
|
+
line.split(',')[1].strip.to_s if line
|
663
|
+
end
|
664
|
+
|
665
|
+
# @summary
|
666
|
+
# This method when called will validate the gem_line to update in the Gemfile
|
667
|
+
# @param [String] gem_to_test
|
668
|
+
# The gem to test
|
669
|
+
# @param [String] gem_line
|
670
|
+
# The line to update in the Gemfile
|
671
|
+
def self.validate_gem_update_module(gem_to_test, gem_line, output_path, main_path)
|
672
|
+
gem_to_test = gem_to_test.chomp('"').reverse.chomp('"').reverse
|
673
|
+
Dir.chdir(main_path)
|
674
|
+
output_path = "#{configuration.pdksync_dir}/#{gem_to_test}"
|
675
|
+
clean_env(output_path) if Dir.exist?(output_path)
|
676
|
+
print 'delete module directory, '
|
677
|
+
|
678
|
+
# when gem_line is specified, we need to parse the line and identify all the values
|
679
|
+
# - we can have source url or we need to
|
680
|
+
# - sha, branch, version
|
681
|
+
if gem_line
|
682
|
+
git_repo = get_source_test_gem(gem_to_test, gem_line)
|
683
|
+
i = 0
|
684
|
+
git_repo.each do |item|
|
685
|
+
i += 1
|
686
|
+
if item =~ %r{((git@|http(s)?:\/\/)([\w\.@]+)(\/|:))([\w,\-,\_]+)\/([\w,\-,\_]+)(.git){0,1}((\/){0,1})}
|
687
|
+
git_repo = item.split('git:')[1].strip.delete("'")
|
688
|
+
break
|
689
|
+
elsif git_repo.size == i
|
690
|
+
# git_repo = "https://github.com/puppetlabs#{gem_to_test}"
|
691
|
+
sep = configuration.git_base_uri.start_with?('git@') ? ':' : '/'
|
692
|
+
git_repo = "#{configuration.git_base_uri}#{sep}#{configuration.namespace}/#{gem_to_test}"
|
693
|
+
end
|
694
|
+
end
|
695
|
+
print 'clone module directory, '
|
696
|
+
git_repo = run_command(configuration.pdksync_dir.to_s, "git clone #{git_repo}", nil)
|
697
|
+
elsif gem_to_test
|
698
|
+
git_repo = clone_directory(configuration.namespace, gem_to_test, output_path.to_s)
|
699
|
+
end
|
700
|
+
|
701
|
+
Dir.chdir(main_path)
|
702
|
+
raise "Unable to clone repo for #{gem_to_test}. Check repository's url to be correct!".red if git_repo.nil?
|
703
|
+
|
704
|
+
@all_versions = ''
|
705
|
+
@all_refs = ''
|
706
|
+
@all_branches = ''
|
707
|
+
|
708
|
+
Dir.chdir(output_path)
|
709
|
+
|
710
|
+
stdout_refs, stderr_refs, status_refs = Open3.capture3('git show-ref -s')
|
711
|
+
@all_refs = stdout_refs
|
712
|
+
stdout_branches, stderr_branches, status_branches = Open3.capture3('git branch -a')
|
713
|
+
@all_branches = stdout_branches
|
714
|
+
stdout_versions, stderr_versions, status_versions = Open3.capture3('git tag')
|
715
|
+
@all_versions = stdout_versions
|
716
|
+
|
717
|
+
raise "Couldn't get references due to #{stderr_refs}".red unless status_refs.exitstatus.zero?
|
718
|
+
raise "Couldn't get branches due to #{stderr_branches}".red unless status_branches.exitstatus.zero?
|
719
|
+
raise "Couldn't get versions due to #{stderr_versions}".red unless status_versions.exitstatus.zero?
|
720
|
+
Dir.chdir(main_path)
|
721
|
+
end
|
722
|
+
|
723
|
+
# @summary
|
724
|
+
# This method when called will validate the gem_sha_replacer to update in the Gemfile
|
725
|
+
# @param [String] gem_to_test
|
726
|
+
# The gem to test
|
727
|
+
# @param [String] gem_sha_replacer
|
728
|
+
# The sha to update in the Gemfile
|
729
|
+
def self.validate_gem_sha_replacer(gem_sha_replacer, gem_to_test)
|
730
|
+
found = false
|
731
|
+
@all_refs.split(' ').each do |sha|
|
732
|
+
puts "SHA #{gem_sha_replacer} valid.\n".green if gem_sha_replacer == sha
|
733
|
+
found = true if gem_sha_replacer == sha
|
734
|
+
end
|
735
|
+
raise "Couldn't find sha: #{gem_sha_replacer} in your repository: #{gem_to_test}".red if found == false
|
736
|
+
end
|
737
|
+
|
738
|
+
# @summary
|
739
|
+
# This method when called will validate the gem_branch_replacer to update in the Gemfile
|
740
|
+
# @param [String] gem_to_test
|
741
|
+
# The gem to test
|
742
|
+
# @param [String] gem_branch_replacer
|
743
|
+
# The branch to update in the Gemfile
|
744
|
+
def self.validate_gem_branch_replacer(gem_branch_replacer, gem_to_test)
|
745
|
+
raise "Couldn't find branch: #{gem_branch_replacer} in your repository: #{gem_to_test}".red unless @all_branches.include?(gem_branch_replacer)
|
746
|
+
puts "Branch #{gem_branch_replacer} valid.\n".green
|
747
|
+
end
|
748
|
+
|
749
|
+
# @summary
|
750
|
+
# This method when called will validate the gem_version_replacer to update in the Gemfile
|
751
|
+
# @param [String] gem_to_test
|
752
|
+
# The gem to test
|
753
|
+
# @param [String] gem_version_replacer
|
754
|
+
# The version to update in the Gemfile
|
755
|
+
def self.validate_gem_version_replacer(gem_version_replacer, gem_to_test)
|
756
|
+
found = false
|
757
|
+
@all_versions.split(' ').each do |version|
|
758
|
+
puts "Version #{gem_version_replacer} valid.\n".green if gem_version_replacer == version
|
759
|
+
found = true if gem_version_replacer == version
|
760
|
+
end
|
761
|
+
raise "Couldn't find version: #{gem_version_replacer} in your repository: #{gem_to_test}".red if found == false
|
762
|
+
end
|
763
|
+
|
764
|
+
# @summary
|
765
|
+
# This method when called will create a pr on the given repository that will create a pr to merge the given commit into the main with the pdk version as an identifier.
|
766
|
+
# @param [PdkSync::GitPlatformClient] client
|
767
|
+
# The Git platform client used to gain access to and manipulate the repository.
|
768
|
+
# @param [String] ouput_path
|
769
|
+
# The location that the command is to be run from.
|
770
|
+
# @param [String] jenkins_client
|
771
|
+
# Jenkins authentication.
|
772
|
+
# @param [String] repo_name
|
773
|
+
# Module to run on Jenkins
|
774
|
+
# @param [String] current_branch
|
775
|
+
# The branch against which the user needs to run the jenkin jobs
|
776
|
+
def self.run_tests_jenkins(jenkins_client, repo_name, current_branch, github_user, job_name)
|
777
|
+
if jenkins_client.nil? == false || repo_name.nil? == false || current_branch.nil? == false
|
778
|
+
pr = jenkins_client.create_adhoc_job(repo_name,
|
779
|
+
current_branch,
|
780
|
+
github_user,
|
781
|
+
job_name)
|
782
|
+
pr
|
783
|
+
end
|
784
|
+
rescue StandardError => error
|
785
|
+
puts "(FAILURE) Jenkins Job creation for #{repo_name} has failed. #{error}".red
|
786
|
+
end
|
787
|
+
|
788
|
+
# convert duration from ms to format h m s ms
|
789
|
+
def self.duration_hrs_and_mins(ms)
|
790
|
+
return '' unless ms
|
791
|
+
hours, ms = ms.divmod(1000 * 60 * 60)
|
792
|
+
minutes, ms = ms.divmod(1000 * 60)
|
793
|
+
seconds, ms = ms.divmod(1000)
|
794
|
+
"#{hours}h #{minutes}m #{seconds}s #{ms}ms"
|
795
|
+
end
|
796
|
+
|
797
|
+
# return jenkins job urls
|
798
|
+
def self.adhoc_urls(job_name, jenkins_server_urls)
|
799
|
+
adhoc_urls = []
|
800
|
+
# get adhoc jobs
|
801
|
+
adhoc_urls.push("#{jenkins_server_urls}/job/#{job_name}")
|
802
|
+
adhoc_urls.each do |url|
|
803
|
+
conn = Faraday::Connection.new "#{url}/api/json"
|
804
|
+
res = conn.get
|
805
|
+
build_job_data = JSON.parse(res.body.to_s)
|
806
|
+
downstream_job = build_job_data['downstreamProjects']
|
807
|
+
break if downstream_job.empty?
|
808
|
+
downstream_job.each do |item|
|
809
|
+
next if item.nil?
|
810
|
+
adhoc_urls.push(item['url']) unless item['url'].nil? && item['url'].include?('skippable_adhoc')
|
811
|
+
end
|
812
|
+
end
|
813
|
+
adhoc_urls
|
814
|
+
end
|
815
|
+
|
816
|
+
# test_results_jenkins
|
817
|
+
def self.test_results_jenkins(jenkins_server_url, build_id, job_name, module_name)
|
818
|
+
PdkSync::Logger.info 'Fetch results from jenkins'
|
819
|
+
# remove duplicates and sort the list
|
820
|
+
adhoc_urls = adhoc_urls(job_name, jenkins_server_url).uniq.sort_by { |url| JSON.parse(Faraday.get("#{url}/api/json").body.to_s)['fullDisplayName'].scan(%r{[0-9]{2}\s}).first.to_i }
|
821
|
+
report_rows = []
|
822
|
+
@failed = false
|
823
|
+
@in_progress = false
|
824
|
+
@aborted = false
|
825
|
+
|
826
|
+
File.delete("results_#{module_name}.out") if File.exist?("results_#{module_name}.out")
|
827
|
+
# analyse each build result - get status, execution time, logs_link
|
828
|
+
@data = "MODULE_NAME=#{module_name}\nBUILD_ID=#{build_id}\nINITIAL_job=#{jenkins_server_url}/job/#{job_name}/#{build_id}\n\n"
|
829
|
+
write_to_file("results_#{module_name}.out", @data)
|
830
|
+
PdkSync::Logger.info "Analyse test execution report \n"
|
831
|
+
adhoc_urls.each do |url|
|
832
|
+
# next if skipped in build name
|
833
|
+
current_build_data = JSON.parse(Faraday.get("#{url}/api/json").body.to_s)
|
834
|
+
next if url.include?('skippable_adhoc') || current_build_data['color'] == 'notbuilt'
|
835
|
+
next if current_build_data['fullDisplayName'].downcase.include?('skipped')
|
836
|
+
returned_data = get_data_build(url, build_id, module_name) unless @failed || @in_progress
|
837
|
+
generate_report_table(report_rows, url, returned_data)
|
838
|
+
end
|
839
|
+
|
840
|
+
table = Terminal::Table.new title: "Module Test Results for: #{module_name}\nCheck results in #{Dir.pwd}/results_#{module_name}.out ", headings: %w[Status Result Execution_Time], rows: report_rows
|
841
|
+
PdkSync::Logger.info "SUCCESSFUL test results!\n".green unless @failed || @in_progress
|
842
|
+
PdkSync::Logger.info "\n#{table} \n"
|
843
|
+
end
|
844
|
+
|
845
|
+
# generate report table when running tests on jenkins
|
846
|
+
def self.generate_report_table(report_rows, url, data)
|
847
|
+
if @failed
|
848
|
+
report_rows << ['FAILED', url, data[1]] unless data.nil?
|
849
|
+
elsif @aborted
|
850
|
+
report_rows << ['ABORTED', url, data[1]] unless data.nil?
|
851
|
+
else
|
852
|
+
report_rows << [data[0], url, data[1]] unless data.nil?
|
853
|
+
end
|
854
|
+
end
|
855
|
+
|
856
|
+
# for each build from adhoc jobs, get data
|
857
|
+
# if multiple platforms under current url, get data for each platform
|
858
|
+
def self.get_data_build(url, build_id, module_name)
|
859
|
+
current_build_data = JSON.parse(Faraday.get("#{url}/api/json").body.to_s)
|
860
|
+
if current_build_data['activeConfigurations'].nil?
|
861
|
+
returned_data = analyse_jenkins_report(url, module_name, build_id)
|
862
|
+
if returned_data[0] == 'in progress'
|
863
|
+
@in_progress = true
|
864
|
+
elsif returned_data[0] == 'FAILURE'
|
865
|
+
@failed = true
|
866
|
+
elsif returned_data[0] == 'ABORTED'
|
867
|
+
@aborted = true
|
868
|
+
end
|
869
|
+
else
|
870
|
+
platforms_list = []
|
871
|
+
current_build_data['activeConfigurations'].each do |url_child|
|
872
|
+
next if url_child['color'] == 'notbuilt'
|
873
|
+
platforms_list.push(url_child['url'])
|
874
|
+
end
|
875
|
+
|
876
|
+
platforms_list.each do |platform_build|
|
877
|
+
returned_data = analyse_jenkins_report(platform_build, module_name, build_id)
|
878
|
+
if returned_data[0] == 'in progress'
|
879
|
+
@in_progress = true
|
880
|
+
elsif returned_data[0] == 'FAILURE'
|
881
|
+
@failed = true
|
882
|
+
elsif returned_data[0] == 'ABORTED'
|
883
|
+
@aborted = true
|
884
|
+
end
|
885
|
+
end
|
886
|
+
end
|
887
|
+
|
888
|
+
@data = "\nFAILURE. Fix the failures and rerun tests!\n" if @failed
|
889
|
+
@data = "\nIN PROGRESS. Please check test report after the execution is done!\n" if @in_progress
|
890
|
+
write_to_file("results_#{module_name}.out", @data) if @failed || @in_progress
|
891
|
+
PdkSync::Logger.info 'Failed status! Fix errors and rerun.'.red if @failed
|
892
|
+
PdkSync::Logger.info 'Aborted status! Fix errors and rerun.'.red if @aborted
|
893
|
+
PdkSync::Logger.info 'Tests are still running! You can fetch the results later by using this task: test_results_jenkins'.blue if @in_progress
|
894
|
+
returned_data
|
895
|
+
end
|
896
|
+
|
897
|
+
# write test report to file
|
898
|
+
def self.write_to_file(file, _data)
|
899
|
+
File.open(file, 'a') do |f|
|
900
|
+
f.write @data
|
901
|
+
end
|
902
|
+
end
|
903
|
+
|
904
|
+
# analyse jenkins report
|
905
|
+
def self.analyse_jenkins_report(url, module_name, build_id)
|
906
|
+
# builds don't have the same build_id. That's why just the init build will be identified by id, rest of them by lastBuild
|
907
|
+
last_build_job_data = JSON.parse(Faraday.get("#{url}/#{build_id}/api/json").body.to_s) if url.include?('init-manual-parameters_adhoc')
|
908
|
+
last_build_job_data = JSON.parse(Faraday.get("#{url}/lastBuild/api/json").body.to_s) unless url.include?('init-manual-parameters_adhoc')
|
909
|
+
|
910
|
+
# status = 'not_built' unless last_build_job_data
|
911
|
+
if last_build_job_data['result'].nil?
|
912
|
+
status = 'in progress'
|
913
|
+
execution_time = 'running'
|
914
|
+
else
|
915
|
+
status = last_build_job_data['result']
|
916
|
+
execution_time = duration_hrs_and_mins(last_build_job_data['duration'].to_i)
|
917
|
+
end
|
918
|
+
|
919
|
+
# execution_time = 0 unless last_build_job_data
|
920
|
+
logs_link = "#{url}/#{build_id}/" if url.include?('init-manual-parameters_adhoc')
|
921
|
+
logs_link = "#{url}lastBuild/" unless url.include?('init-manual-parameters_adhoc')
|
922
|
+
@data = "Job title =#{last_build_job_data['fullDisplayName']}\n logs_link = #{logs_link}\n status = #{status}\n"
|
923
|
+
return_data = [status, execution_time]
|
924
|
+
write_to_file("results_#{module_name}.out", @data)
|
925
|
+
return_data
|
926
|
+
end
|
927
|
+
|
928
|
+
# @summary
|
929
|
+
# Check the most recent tagged release on GitHub for the gem
|
930
|
+
# @param [String] gem_to_test
|
931
|
+
# The gem to test
|
932
|
+
# The current version of the gem
|
933
|
+
def self.check_gem_latest_version(gem_to_test)
|
934
|
+
remote_version = Octokit.tags("puppetlabs/#{gem_to_test}").first[:name]
|
935
|
+
rescue StandardError => error
|
936
|
+
puts "(WARNING) Unable to check latest gem version. #{error}".red
|
937
|
+
remote_version
|
938
|
+
end
|
939
|
+
|
940
|
+
# @summary
|
941
|
+
# Update the gem version by one
|
942
|
+
# @param [String] gem_version
|
943
|
+
# The current version of the gem
|
944
|
+
# The bump version by one of the gem
|
945
|
+
def self.update_gem_latest_version_by_one(gem_version)
|
946
|
+
current_version = Gem::Version.new gem_version
|
947
|
+
new_version = current_version.bump
|
948
|
+
rescue StandardError => error
|
949
|
+
puts "(WARNING) Unable to check latest gem version. #{error}".red
|
950
|
+
new_version
|
951
|
+
end
|
952
|
+
|
953
|
+
# @summary
|
954
|
+
# Update Gemfile with multigem
|
955
|
+
# @param [String] output_path
|
956
|
+
# The location that the command is to be run from.
|
957
|
+
# @param [String] gem_name
|
958
|
+
# The gem name
|
959
|
+
# @param [String] gemfury_token
|
960
|
+
# The gemfury token
|
961
|
+
# @param [String] gemfury_user
|
962
|
+
# The gemfury user
|
963
|
+
def self.update_gemfile_multigem(output_path, gem_name, gemfury_token, gemfury_user)
|
964
|
+
gem_file_name = 'Gemfile'
|
965
|
+
gem_source_line = "source \"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/\""
|
966
|
+
Dir.chdir(output_path) unless Dir.pwd == output_path
|
967
|
+
|
968
|
+
if gem_name.nil? == false && gemfury_token.nil? == false && gemfury_user.nil? == false # rubocop:disable Style/GuardClause
|
969
|
+
# Append the gem with new source location
|
970
|
+
gem_name = gem_name.chomp('"').reverse.chomp('"').reverse
|
971
|
+
begin
|
972
|
+
File.open(temp_file_path, 'w') do |out_file|
|
973
|
+
File.foreach(gem_file_name) do |line|
|
974
|
+
if line =~ %r{#{gem_name}}
|
975
|
+
line = line.chomp
|
976
|
+
if line =~ %r{"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/"}
|
977
|
+
puts 'GemFile Already updated'.green
|
978
|
+
out_file.puts line.to_s
|
979
|
+
else
|
980
|
+
out_file.puts "#{line} , :source => \"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/\""
|
981
|
+
end
|
982
|
+
else
|
983
|
+
out_file.puts line
|
984
|
+
end
|
985
|
+
end
|
986
|
+
end
|
987
|
+
FileUtils.mv(temp_file_path, gem_file_name)
|
988
|
+
|
989
|
+
# Insert the new source Gem location to Gemfile
|
990
|
+
file = File.open(gem_file_name)
|
991
|
+
contents = file.readlines.map(&:chomp)
|
992
|
+
contents.insert(2, gem_source_line) unless contents.include?(gem_source_line)
|
993
|
+
File.open(gem_file_name, 'w') { |f| f.write contents.join("\n") }
|
994
|
+
rescue Errno::ENOENT => e
|
995
|
+
raise "Couldn't find file: #{gem_file_name} #{e} in your repository: #{gem_file_name}".red
|
996
|
+
rescue Errno::EACCES => e
|
997
|
+
raise "Does not have required permissions to the #{gem_file_name} #{e} in your repository: #{gem_file_name}".red
|
998
|
+
end
|
999
|
+
end
|
1000
|
+
end
|
1001
|
+
|
1002
|
+
# @summary
|
1003
|
+
# Adds an entry to the 'provision.yaml' of a module with the values given
|
1004
|
+
# @param [String] module_path
|
1005
|
+
# Path to the module root dir
|
1006
|
+
# @param [String] key
|
1007
|
+
# Key name in 'provision.yaml' (e.g. "release_checks_7)
|
1008
|
+
# @param [String] provisioner
|
1009
|
+
# The value for the provisioner key (e.g. "abs")
|
1010
|
+
# @param [Array] images
|
1011
|
+
# The list of images for the images key (e.g. ['ubuntu-1804-x86_64, ubuntu-2004-x86_64', 'centos-8-x86_64'])
|
1012
|
+
# @return [Boolean]
|
1013
|
+
# True if entry was successfully added to 'provision.yaml'
|
1014
|
+
# False if 'provision.yaml' does not exist or is an empty file
|
1015
|
+
def self.add_provision_list(module_path, key, provisioner, images)
|
1016
|
+
path_to_provision_yaml = "#{module_path}/provision.yaml"
|
1017
|
+
return false unless File.exist? path_to_provision_yaml
|
1018
|
+
PdkSync::Logger.info "Updating #{path_to_provision_yaml}"
|
1019
|
+
provision_yaml = YAML.safe_load(File.read(path_to_provision_yaml))
|
1020
|
+
return false if provision_yaml.nil?
|
1021
|
+
provision_yaml[key] = {}
|
1022
|
+
provision_yaml[key]['provisioner'] = provisioner
|
1023
|
+
provision_yaml[key]['images'] = images
|
1024
|
+
File.write(path_to_provision_yaml, YAML.dump(provision_yaml))
|
1025
|
+
end
|
1026
|
+
|
1027
|
+
# @summary
|
1028
|
+
# Query the 'metadata.json' in the given module path and return the compatible platforms
|
1029
|
+
# @param [String] module_path
|
1030
|
+
# Path to the module root dir
|
1031
|
+
# @return [Hash]
|
1032
|
+
# The compatible OSs defined in the 'operatingsystem_support' key of the 'metadata.json'
|
1033
|
+
def self.module_supported_platforms(module_path)
|
1034
|
+
PdkSync::Logger.info 'Determining supported platforms from metadata.json'
|
1035
|
+
os_support_key = 'operatingsystem_support'
|
1036
|
+
metadata_json = "#{module_path}/metadata.json"
|
1037
|
+
raise 'Could not locate metadata.json' unless File.exist? metadata_json
|
1038
|
+
module_metadata = JSON.parse(File.read(metadata_json))
|
1039
|
+
raise "Could not locate '#{os_support_key}' key from #{metadata_json}" unless module_metadata.key? os_support_key
|
1040
|
+
module_metadata[os_support_key]
|
1041
|
+
end
|
1042
|
+
|
1043
|
+
# @summary
|
1044
|
+
# Take a Windows version extracted from the module's 'metadata.json' and normalize it to the version conventions
|
1045
|
+
# that VMPooler uses
|
1046
|
+
# @param ver
|
1047
|
+
# Version from 'metadata.json'
|
1048
|
+
# @return [String]
|
1049
|
+
# Normalised version that is used by VMPooler templates
|
1050
|
+
def self.normalize_win_version(ver)
|
1051
|
+
PdkSync::Logger.debug "Normalising Windows version from metadata.json: #{ver}"
|
1052
|
+
win_ver_matcher = ver.match(%r{(?:Server\s)?(?<ver>\d+)(?:\s(?<rel>R\d))?})
|
1053
|
+
raise "Unable to determine Windows version from metadata.json: #{ver}" unless win_ver_matcher
|
1054
|
+
normalized_version = win_ver_matcher['ver']
|
1055
|
+
normalized_version += " #{win_ver_matcher['rel'].upcase}" if win_ver_matcher['rel']
|
1056
|
+
normalized_version
|
1057
|
+
end
|
1058
|
+
|
1059
|
+
# @summary
|
1060
|
+
# Normalize the given os name
|
1061
|
+
# @param os
|
1062
|
+
# The OS name to normalize
|
1063
|
+
# @return [String]
|
1064
|
+
# Normalized os name
|
1065
|
+
def self.normalize_os(os)
|
1066
|
+
case os
|
1067
|
+
when %r{aix}i
|
1068
|
+
'AIX'
|
1069
|
+
when %r{cent}i
|
1070
|
+
'CentOS'
|
1071
|
+
when %r{darwin}i
|
1072
|
+
'Darwin'
|
1073
|
+
when %r{deb}i
|
1074
|
+
'Debian'
|
1075
|
+
when %r{fedora}i
|
1076
|
+
'Fedora'
|
1077
|
+
when %r{oracle}i
|
1078
|
+
'OracleLinux'
|
1079
|
+
when %r{osx}i
|
1080
|
+
'OSX'
|
1081
|
+
when %r{pan}i
|
1082
|
+
'PAN-OS'
|
1083
|
+
when %r{red}i
|
1084
|
+
'RedHat'
|
1085
|
+
when %r{sci}i
|
1086
|
+
'Scientific'
|
1087
|
+
when %r{suse|sles}i
|
1088
|
+
'SLES'
|
1089
|
+
when %r{sol}i
|
1090
|
+
'Solaris'
|
1091
|
+
when %r{ubuntu}i
|
1092
|
+
'Ubuntu'
|
1093
|
+
when %r{win}i
|
1094
|
+
'Windows'
|
1095
|
+
else
|
1096
|
+
raise "Could not normalize OS value: #{os}"
|
1097
|
+
end
|
1098
|
+
end
|
1099
|
+
|
1100
|
+
# @summary
|
1101
|
+
# Get the metadata.json of the given module
|
1102
|
+
# @param module_path
|
1103
|
+
# Path to the root dir of the module
|
1104
|
+
# @return [JSON]
|
1105
|
+
# JSON of the metadata.json
|
1106
|
+
def self.metadata_json(module_path)
|
1107
|
+
metadata_json = "#{module_path}/metadata.json"
|
1108
|
+
raise 'Could not locate metadata.json' unless File.exist? metadata_json
|
1109
|
+
JSON.parse(File.read(metadata_json))
|
1110
|
+
end
|
1111
|
+
|
1112
|
+
OPERATINGSYSTEM = 'operatingsystem'.freeze
|
1113
|
+
OPERATINGSYSTEMRELEASE = 'operatingsystemrelease'.freeze
|
1114
|
+
OPERATINGSYSTEM_SUPPORT = 'operatingsystem_support'.freeze
|
1115
|
+
|
1116
|
+
# @summary
|
1117
|
+
# Write the given metadata in JSON format to the given module root dir path
|
1118
|
+
# @param module_path
|
1119
|
+
# Path to the root dir of the module
|
1120
|
+
# @param metadata_json
|
1121
|
+
# Metadata in JSON format to write to the module root dir
|
1122
|
+
def self.write_metadata_json(module_path, metadata_json)
|
1123
|
+
File.open(File.join(module_path, 'metadata.json'), 'w') do |f|
|
1124
|
+
f.write(JSON.pretty_generate(metadata_json) + "\n")
|
1125
|
+
end
|
1126
|
+
end
|
1127
|
+
|
1128
|
+
# @summary
|
1129
|
+
# Normalize the 'operatingsystem_support' entries in the metadata.json
|
1130
|
+
# @param module_path
|
1131
|
+
# Path to the root dir of the module
|
1132
|
+
def self.normalize_metadata_supported_platforms(module_path)
|
1133
|
+
new_metadata_json = metadata_json(module_path)
|
1134
|
+
|
1135
|
+
new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
|
1136
|
+
normalized_os = normalize_os(os_vers[OPERATINGSYSTEM])
|
1137
|
+
unless normalized_os == os_vers[OPERATINGSYSTEM]
|
1138
|
+
PdkSync::Logger.info "Corrected OS Name: '#{os_vers[OPERATINGSYSTEM]}' -> '#{normalized_os}'"
|
1139
|
+
os_vers[OPERATINGSYSTEM] = normalized_os
|
1140
|
+
end
|
1141
|
+
next unless normalized_os == 'Windows'
|
1142
|
+
normalized_vers = os_vers[OPERATINGSYSTEMRELEASE].map { |v| normalize_win_version(v) }
|
1143
|
+
unless normalized_vers == os_vers[OPERATINGSYSTEMRELEASE]
|
1144
|
+
PdkSync::Logger.info "Corrected OS Versions: #{os_vers[OPERATINGSYSTEMRELEASE]} -> #{normalized_vers}"
|
1145
|
+
os_vers[OPERATINGSYSTEMRELEASE] = normalized_vers
|
1146
|
+
end
|
1147
|
+
end
|
1148
|
+
|
1149
|
+
write_metadata_json(module_path, new_metadata_json)
|
1150
|
+
end
|
1151
|
+
|
1152
|
+
# @summary
|
1153
|
+
# Removes the OS version from the supported platforms
|
1154
|
+
# TODO: Remove entire OS entry when version is nil
|
1155
|
+
# TODO: Remove entire OS entry when versions is empty
|
1156
|
+
# @param module_path
|
1157
|
+
# Path to the root dir of the module
|
1158
|
+
# @param os_to_remove
|
1159
|
+
# OS we want to remove version from
|
1160
|
+
# @param version_to_remove
|
1161
|
+
# Version from OS we want to remove
|
1162
|
+
def self.remove_platform_from_metadata(module_path, os_to_remove, version_to_remove)
|
1163
|
+
new_metadata_json = metadata_json(module_path)
|
1164
|
+
new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
|
1165
|
+
if (os = normalize_os(os_vers[OPERATINGSYSTEM]))
|
1166
|
+
next unless os == os_to_remove
|
1167
|
+
vers = os_vers[OPERATINGSYSTEMRELEASE]
|
1168
|
+
next unless (ver_index = vers.find_index(version_to_remove))
|
1169
|
+
PdkSync::Logger.info "Removing #{os} #{vers[ver_index]} from metadata.json"
|
1170
|
+
vers.delete_at(ver_index)
|
1171
|
+
else
|
1172
|
+
PdkSync::Logger.info 'No entry in metadata.json to replace'
|
1173
|
+
return true
|
1174
|
+
end
|
1175
|
+
end
|
1176
|
+
write_metadata_json(module_path, new_metadata_json)
|
1177
|
+
end
|
1178
|
+
|
1179
|
+
# @summary
|
1180
|
+
# Adds an OS version to the supported platforms. Creates a new OS entry if it does not exist
|
1181
|
+
# @param module_path
|
1182
|
+
# Path to the root dir of the module
|
1183
|
+
# @param os_to_add
|
1184
|
+
# OS we want to add
|
1185
|
+
# @param version_to_add
|
1186
|
+
# Version we want to add
|
1187
|
+
def self.add_platform_to_metadata(module_path, os_to_add, version_to_add)
|
1188
|
+
os_to_add = normalize_os(os_to_add)
|
1189
|
+
new_metadata_json = metadata_json(module_path)
|
1190
|
+
updated_existing_entry = false
|
1191
|
+
new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
|
1192
|
+
next unless (os = normalize_os(os_vers[OPERATINGSYSTEM]))
|
1193
|
+
next unless os == os_to_add
|
1194
|
+
PdkSync::Logger.info "Adding #{os_to_add} version #{version_to_add} to existing entry"
|
1195
|
+
os_vers[OPERATINGSYSTEMRELEASE] << version_to_add
|
1196
|
+
os_vers[OPERATINGSYSTEMRELEASE].uniq!
|
1197
|
+
os_vers[OPERATINGSYSTEMRELEASE].sort_by!(&:to_f)
|
1198
|
+
updated_existing_entry = true
|
1199
|
+
break
|
1200
|
+
end
|
1201
|
+
unless updated_existing_entry
|
1202
|
+
PdkSync::Logger.info "Adding #{os_to_add} version #{version_to_add} to new entry"
|
1203
|
+
supported_platform_entry = {}
|
1204
|
+
supported_platform_entry[OPERATINGSYSTEM] = os_to_add
|
1205
|
+
supported_platform_entry[OPERATINGSYSTEMRELEASE] = [version_to_add]
|
1206
|
+
new_metadata_json[OPERATINGSYSTEM_SUPPORT] << supported_platform_entry
|
1207
|
+
end
|
1208
|
+
write_metadata_json(module_path, new_metadata_json)
|
1209
|
+
end
|
1210
|
+
|
1211
|
+
NAME = 'name'.freeze
|
1212
|
+
REQUIREMENTS = 'requirements'.freeze
|
1213
|
+
|
1214
|
+
# @summary
|
1215
|
+
# Updates the requirements parameter in the metadata.json. If the requirement or a key within it doesn't exist,
|
1216
|
+
# it is created.
|
1217
|
+
# TODO: Ability to remove requirement
|
1218
|
+
# @param module_path
|
1219
|
+
# Path to the root dir of the module
|
1220
|
+
# @param name
|
1221
|
+
# Name attribute of the requirement
|
1222
|
+
# @param key
|
1223
|
+
# The key name of a K/V pair to be added / updated in the requirement
|
1224
|
+
# @param value
|
1225
|
+
# The value of the key to be added / updated in the requirement
|
1226
|
+
def self.update_requirements(module_path, name, key, value)
|
1227
|
+
new_metadata_json = metadata_json(module_path)
|
1228
|
+
updated_existing_entry = false
|
1229
|
+
new_metadata_json[REQUIREMENTS].each do |requirement|
|
1230
|
+
next unless requirement[NAME] == name
|
1231
|
+
PdkSync::Logger.info "Updating [#{requirement['name']}] #{requirement.key? key ? "dependency's existing" : 'with a new'} key [#{key}] to value [#{value}]"
|
1232
|
+
requirement[key] = value
|
1233
|
+
updated_existing_entry = true
|
1234
|
+
end
|
1235
|
+
unless updated_existing_entry
|
1236
|
+
PdkSync::Logger.info "Adding new requirement [#{name}] with key [#{key}] of value [#{value}]"
|
1237
|
+
new_requirement = {}
|
1238
|
+
new_requirement[NAME] = name
|
1239
|
+
new_requirement[key] = value
|
1240
|
+
new_metadata_json[REQUIREMENTS] << new_requirement
|
1241
|
+
end
|
1242
|
+
write_metadata_json(module_path, new_metadata_json)
|
1243
|
+
end
|
1244
|
+
|
1245
|
+
# @summary
|
1246
|
+
# Generate an entry in the 'provision.yaml' for running release checks against the platforms that the given
|
1247
|
+
# Puppet version. Will compare the supported platforms for the given Puppet version against the compatible
|
1248
|
+
# platforms defined in the module's 'metadata.json' and generate a list of platforms that are the same.
|
1249
|
+
# @param [String] module_path
|
1250
|
+
# Path to the module root dir
|
1251
|
+
# @param [String] puppet_version
|
1252
|
+
# Puppet version we are generating platform checks for
|
1253
|
+
def self.generate_vmpooler_release_checks(module_path, puppet_version)
|
1254
|
+
PdkSync::Logger.info "Generating release checks provision.yaml key for Puppet version #{puppet_version}"
|
1255
|
+
# This YAML is where the compatible platforms for each Puppet version is stored
|
1256
|
+
agent_test_platforms_yaml_file_path = 'lib/pdksync/conf/puppet_abs_supported_platforms.yaml'
|
1257
|
+
agent_test_platforms = YAML.safe_load(File.read(agent_test_platforms_yaml_file_path))
|
1258
|
+
raise "No configuration for Puppet #{puppet_version} found in #{agent_test_platforms_yaml_file_path}" unless agent_test_platforms.key? puppet_version
|
1259
|
+
agent_test_platforms = agent_test_platforms[puppet_version]
|
1260
|
+
module_supported_platforms = module_supported_platforms(module_path)
|
1261
|
+
images = []
|
1262
|
+
PdkSync::Logger.debug 'Processing compatible platforms from metadata.json'
|
1263
|
+
module_supported_platforms.each do |os_vers|
|
1264
|
+
os = os_vers['operatingsystem'].downcase
|
1265
|
+
# 'Windows' and 'OracleLinux' are the definitions in 'metadata.json', however the VMPooler images are 'win' and 'oracle'
|
1266
|
+
os = 'win' if os == 'windows'
|
1267
|
+
os = 'oracle' if os == 'oraclelinux'
|
1268
|
+
vers = os_vers['operatingsystemrelease']
|
1269
|
+
if agent_test_platforms.keys.select { |k| k.start_with? os }.empty?
|
1270
|
+
PdkSync::Logger.warn "'#{os}' is a compatible platform but was not defined as test platform for Puppet #{puppet_version} in #{agent_test_platforms_yaml_file_path}"
|
1271
|
+
next
|
1272
|
+
end
|
1273
|
+
vers.each do |ver|
|
1274
|
+
PdkSync::Logger.debug "Checking '#{os} #{ver}'"
|
1275
|
+
if os == 'win'
|
1276
|
+
win_ver = normalize_win_version(ver)
|
1277
|
+
PdkSync::Logger.debug "Normalised Windows version: #{win_ver}"
|
1278
|
+
next unless agent_test_platforms['win'].include? win_ver
|
1279
|
+
PdkSync::Logger.debug "'#{os} #{ver}' SUPPORTED by Puppet #{puppet_version}"
|
1280
|
+
images << "win-#{win_ver}-x86_64"
|
1281
|
+
else
|
1282
|
+
next unless agent_test_platforms[os].include? ver
|
1283
|
+
PdkSync::Logger.debug "'#{os} #{ver}' SUPPORTED by Puppet #{puppet_version}"
|
1284
|
+
images << "#{os}-#{ver.delete('.')}-x86_64"
|
1285
|
+
end
|
1286
|
+
end
|
1287
|
+
end
|
1288
|
+
images.uniq!
|
1289
|
+
result = add_provision_list(module_path, "release_checks_#{puppet_version}", 'abs', images)
|
1290
|
+
PdkSync::Logger.warn "#{module_path}/provision.yaml does not exist" unless result
|
1291
|
+
end
|
1292
|
+
end
|
1293
|
+
end
|