pdksync 0.5.0 → 0.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1296 @@
1
+ # @summary provides a module with various methods for performing the desired tasks
2
+ require 'git'
3
+ require 'open3'
4
+ require 'fileutils'
5
+ require 'pdk'
6
+ require 'pdksync/configuration'
7
+ require 'pdksync/gitplatformclient'
8
+ require 'bundler'
9
+ require 'octokit'
10
+ require 'pdk/util/template_uri'
11
+ require 'pdksync/logger'
12
+
13
+ module PdkSync
14
+ module Utils
15
+ def self.configuration
16
+ @configuration ||= PdkSync::Configuration.new
17
+ end
18
+
19
+ def self.on_windows?
20
+ # Ruby only sets File::ALT_SEPARATOR on Windows and the Ruby standard
21
+ # library uses that to test what platform it's on.
22
+ !!File::ALT_SEPARATOR # rubocop:disable Style/DoubleNegation
23
+ end
24
+
25
+ def self.temp_file_path
26
+ @temp_file_path ||= on_windows? ? "#{ENV['TEMP']}\\out.tmp" : '/tmp/out.tmp'
27
+ end
28
+
29
+ # @summary
30
+ # This method when called will delete any preexisting branch on the given repository that matches the given name.
31
+ # @param [PdkSync::GitPlatformClient] client
32
+ # The Git platform client used to gain access to and manipulate the repository.
33
+ # @param [String] repo_name
34
+ # The name of the repository from which the branch is to be deleted.
35
+ # @param [String] branch_name
36
+ # The name of the branch that is to be deleted.
37
+ def self.delete_branch(client, repo_name, branch_name)
38
+ client.delete_branch(repo_name, branch_name)
39
+ rescue StandardError => error
40
+ PdkSync::Logger.fatal "Deleting #{branch_name} in #{repo_name} failed. #{error}"
41
+ end
42
+
43
+ # @summary
44
+ # This method when called will add a given label to a given repository
45
+ # @param [PdkSync::GitPlatformClient] client
46
+ # The Git Platform client used to gain access to and manipulate the repository.
47
+ # @param [String] repo_name
48
+ # The name of the repository on which the commit is to be made.
49
+ # @param [Integer] issue_number
50
+ # The id of the issue (i.e. pull request) to add the label to.
51
+ # @param [String] label
52
+ # The label to add.
53
+ def self.add_label(client, repo_name, issue_number, label)
54
+ client.update_issue(repo_name, issue_number, labels: [label])
55
+ rescue StandardError => error
56
+ PdkSync::Logger.info "Adding label to #{repo_name} issue #{issue_number} has failed. #{error}"
57
+ return false
58
+ end
59
+
60
+ # @summary
61
+ # This method when called will check on the given repository for the existence of the supplied label
62
+ # @param [PdkSync::GitPlatformClient] client
63
+ # The Git platform client used to gain access to and manipulate the repository.
64
+ # @param [String] repo_name
65
+ # The name of the repository on which the commit is to be made.
66
+ # @param [String] label
67
+ # The label to check for.
68
+ # @return [Boolean]
69
+ # A boolean stating whether the label was found.
70
+ def self.check_for_label(client, repo_name, label)
71
+ # Get labels from repository
72
+ repo_labels = client.labels(repo_name)
73
+
74
+ # Look for label in the repository's labels
75
+ match = false
76
+ repo_labels.each do |repo_label|
77
+ if repo_label.name == label
78
+ match = true
79
+ break
80
+ end
81
+ end
82
+
83
+ # Raise error if a match was not found else return true
84
+ (match == false) ? (raise StandardError, "Label '#{label}' not found in #{repo_name}") : (return true)
85
+ rescue StandardError => error
86
+ PdkSync::Logger.fatal "Retrieving labels for #{repo_name} has failed. #{error}"
87
+ return false
88
+ end
89
+
90
+ # @summary
91
+ # This method when called will retrieve the pdk_version of the current module, i.e. the one that was navigated into in the 'pdk_update' method.
92
+ # @param [String] metadata_file
93
+ # An optional input that can be used to set the location of the metadata file.
94
+ # @return [String]
95
+ # A string value that represents the current pdk version.
96
+ def self.return_pdk_version(metadata_file = 'metadata.json')
97
+ file = File.read(metadata_file)
98
+ data_hash = JSON.parse(file)
99
+ data_hash['pdk-version']
100
+ end
101
+
102
+ # @summary
103
+ # This method when called will stage all changed files within the given repository, conditional on them being managed via the pdk.
104
+ # @param [Git::Base] git_repo
105
+ # A git object representing the local repository to be staged.
106
+ def self.add_staged_files(git_repo)
107
+ if !git_repo.status.changed.empty?
108
+ git_repo.add(all: true)
109
+ PdkSync::Logger.info 'All files have been staged.'
110
+ true
111
+ else
112
+ PdkSync::Logger.info 'Nothing to commit.'
113
+ false
114
+ end
115
+ end
116
+
117
+ # @summary
118
+ # This method when called will create a commit containing all currently staged files, with the name of the commit containing the template ref as a unique identifier.
119
+ # @param [Git::Base] git_repo
120
+ # A git object representing the local repository against which the commit is to be made.
121
+ # @param [String] template_ref
122
+ # The unique template_ref that is used as part of the commit name.
123
+ # @param [String] commit_message
124
+ # If specified it will be the message for the commit.
125
+ def self.commit_staged_files(git_repo, template_ref, commit_message = nil)
126
+ message = if commit_message.nil?
127
+ "pdksync_#{template_ref}"
128
+ else
129
+ commit_message
130
+ end
131
+ git_repo.commit(message)
132
+ end
133
+
134
+ # @summary
135
+ # This method when called will push the given local commit to local repository's origin.
136
+ # @param [Git::Base] git_repo
137
+ # A git object representing the local repository againt which the push is to be made.
138
+ # @param [String] template_ref
139
+ # The unique reference that that represents the template the update has ran against.
140
+ # @param [String] repo_name
141
+ # The name of the repository on which the commit is to be made.
142
+ def self.push_staged_files(git_repo, current_branch, repo_name)
143
+ git_repo.push(configuration.push_file_destination, current_branch)
144
+ rescue StandardError => error
145
+ PdkSync::Logger.error "Pushing to #{configuration.push_file_destination} for #{repo_name} has failed. #{error}"
146
+ end
147
+
148
+ # @summary
149
+ # This method when called will create a pr on the given repository that will create a pr to merge the given commit into the main with the pdk version as an identifier.
150
+ # @param [PdkSync::GitPlatformClient] client
151
+ # The Git platform client used to gain access to and manipulate the repository.
152
+ # @param [String] repo_name
153
+ # The name of the repository on which the commit is to be made.
154
+ # @param [String] template_ref
155
+ # The unique reference that that represents the template the update has ran against.
156
+ # @param [String] pdk_version
157
+ # The current version of the pdk on which the update is run.
158
+ def self.create_pr(client, repo_name, template_ref, pdk_version, pr_title = nil)
159
+ if pr_title.nil?
160
+ title = "pdksync - Update using #{pdk_version}"
161
+ message = "pdk version: `#{pdk_version}` \n pdk template ref: `#{template_ref}`"
162
+ head = "pdksync_#{template_ref}"
163
+ else
164
+ title = "pdksync - #{pr_title}"
165
+ message = "#{pr_title}\npdk version: `#{pdk_version}` \n"
166
+ head = template_ref
167
+ end
168
+ client.create_pull_request(repo_name, configuration.create_pr_against,
169
+ head,
170
+ title,
171
+ message)
172
+ rescue StandardError => error
173
+ PdkSync::Logger.fatal "PR creation for #{repo_name} has failed. #{error}"
174
+ nil
175
+ end
176
+
177
+ # @summary
178
+ # Try to use a fully installed pdk, otherwise fall back to the bundled pdk gem.
179
+ # @return String
180
+ # Path to the pdk executable
181
+ def self.return_pdk_path
182
+ full_path = '/opt/puppetlabs/pdk/bin/pdk'
183
+ path = if File.executable?(full_path)
184
+ full_path
185
+ else
186
+ PdkSync::Logger.warn "(WARNING) Using pdk on PATH not '#{full_path}'"
187
+ 'pdk'
188
+ end
189
+ path
190
+ end
191
+
192
+ # @return
193
+ def self.create_commit(git_repo, branch_name, commit_message)
194
+ checkout_branch(git_repo, branch_name)
195
+ commit_staged_files(git_repo, branch_name, commit_message) if add_staged_files(git_repo)
196
+ end
197
+
198
+ # @summary
199
+ # This method when called will call the delete function against the given repository if it exists.
200
+ # @param [String] output_path
201
+ # The repository that is to be deleted.
202
+ def self.clean_env(output_path)
203
+ # If a local copy already exists it is removed
204
+ FileUtils.rm_rf(output_path)
205
+ end
206
+
207
+ # @summary
208
+ # This method when called will clone a given repository into a local location that has also been set.
209
+ # @param [String] namespace
210
+ # The namespace the repository is located in.
211
+ # @param [String] module_name
212
+ # The name of the repository.
213
+ # @param [String] output_path
214
+ # The location the repository is to be cloned to.
215
+ # @return [Git::Base]
216
+ # A git object representing the local repository or true if already exist
217
+ def self.clone_directory(namespace, module_name, output_path)
218
+ # not all urls are public facing so we need to conditionally use the correct separator
219
+ sep = configuration.git_base_uri.start_with?('git@') ? ':' : '/'
220
+ clone_url = "#{configuration.git_base_uri}#{sep}#{namespace}/#{module_name}.git"
221
+ Git.clone(clone_url, output_path.to_s)
222
+ rescue ::Git::GitExecuteError => error
223
+ PdkSync::Logger.fatal "Cloning #{module_name} has failed. #{error}"
224
+ end
225
+
226
+ # @summary
227
+ # This method when called will run a command command at the given location, with an error message being thrown if it is not successful.
228
+ # @param [String] output_path
229
+ # The location that the command is to be run from.
230
+ # @param [String] command
231
+ # The command to be run.
232
+ # @return [Integer]
233
+ # The status code of the command run.
234
+ def self.run_command(output_path, command, option)
235
+ stdout = ''
236
+ stderr = ''
237
+ status = Process::Status
238
+ pid = ''
239
+ Dir.chdir(output_path) unless Dir.pwd == output_path
240
+
241
+ # Environment cleanup required due to Ruby subshells using current Bundler environment
242
+ if option.nil? == true
243
+ if command =~ %r{^bundle}
244
+ Bundler.with_clean_env do
245
+ stdout, stderr, status = Open3.capture3(command)
246
+ end
247
+ else
248
+ stdout, stderr, status = Open3.capture3(command)
249
+ end
250
+ PdkSync::Logger.info "\n#{stdout}\n"
251
+ PdkSync::Logger.error "Unable to run command '#{command}': #{stderr}" unless status.exitstatus.zero?
252
+ status.exitstatus
253
+ else
254
+ # Environment cleanup required due to Ruby subshells using current Bundler environment
255
+ if command =~ %r{^sh }
256
+ Bundler.with_clean_env do
257
+ pid = spawn(command, out: 'run_command.out', err: 'run_command.err')
258
+ Process.detach(pid)
259
+ end
260
+ end
261
+ pid
262
+ end
263
+ end
264
+
265
+ # @summary
266
+ # This method when called will run the 'pdk update --force' command at the given location, with an error message being thrown if it is not successful.
267
+ # @param [String] output_path
268
+ # The location that the command is to be run from.
269
+ # @return [Integer]
270
+ # The status code of the pdk update run.
271
+ def self.pdk_update(output_path)
272
+ # Runs the pdk update command
273
+ Dir.chdir(output_path) do
274
+ _, module_temp_ref = module_templates_url.split('#')
275
+ module_temp_ref ||= configuration.pdk_templates_ref
276
+ template_ref = configuration.module_is_authoritive ? module_temp_ref : configuration.pdk_templates_ref
277
+ change_module_template_url(configuration.pdk_templates_url, template_ref) unless configuration.module_is_authoritive
278
+ _stdout, stderr, status = Open3.capture3("#{return_pdk_path} update --force --template-ref=#{template_ref}")
279
+ PdkSync::Logger.fatal "Unable to run `pdk update`: #{stderr}" unless status.exitstatus.zero?
280
+ status.exitstatus
281
+ end
282
+ end
283
+
284
+ # @summary
285
+ # This method when called will retrieve the template ref of the current module, i.e. the one that was navigated into in the 'pdk_update' method.
286
+ # @param [String] metadata_file
287
+ # An optional input that can be used to set the location of the metadata file.
288
+ # @return [String]
289
+ # A string value that represents the current pdk template.
290
+ def self.return_template_ref(metadata_file = 'metadata.json')
291
+ file = File.read(metadata_file)
292
+ data_hash = JSON.parse(file)
293
+ data_hash['template-ref']
294
+ end
295
+
296
+ # @summary
297
+ # This method when called will retrieve the tempalate-url of the current module,
298
+ # @param metadata_file [String]
299
+ # An optional input that can be used to set the location of the metadata file.
300
+ # @param url [String] - the url of the pdk-templates repo
301
+ # @return [String]
302
+ # A string value that represents the current pdk tempalate-url.
303
+ def self.module_templates_url(metadata_file = 'metadata.json')
304
+ file = File.read(metadata_file)
305
+ data_hash = JSON.parse(file)
306
+ data_hash['template-url']
307
+ end
308
+
309
+ # @param [String] - the url of the pdk-templates
310
+ # @param [String] - the ref of the pdk templates you want to change to
311
+ # @return [String] - the updated url
312
+ def self.change_module_template_url(url, ref, metadata_file = 'metadata.json')
313
+ content = File.read(metadata_file)
314
+ uri = PDK::Util::TemplateURI.uri_safe(url.to_s + "##{ref}")
315
+ data_hash = JSON.parse(content)
316
+ data_hash['template-url'] = uri
317
+ File.write(metadata_file, data_hash.to_json)
318
+ uri.to_s
319
+ end
320
+
321
+ # @summary
322
+ # This method when called will checkout a new local branch of the given repository.
323
+ # @param [Git::Base] git_repo
324
+ # A git object representing the local repository to be branched.
325
+ # @param [String] branch_suffix
326
+ # The string that is appended on the branch name. eg template_ref or a friendly name
327
+ def self.checkout_branch(git_repo, branch_suffix)
328
+ git_repo.branch("pdksync_#{branch_suffix}").checkout
329
+ end
330
+
331
+ # @summary
332
+ # Check the local pdk version against the most recent tagged release on GitHub
333
+ # @return [Boolean] true if the remote version is less than or equal to local version
334
+ def self.check_pdk_version
335
+ stdout, _stderr, status = Open3.capture3("#{return_pdk_path} --version")
336
+ PdkSync::Logger.fatal "Unable to find pdk at '#{return_pdk_path}'." unless status.exitstatus
337
+ local_version = stdout.strip
338
+ remote_version = Octokit.tags('puppetlabs/pdk').first[:name][1..-1]
339
+ up2date = Gem::Version.new(remote_version) <= Gem::Version.new(local_version)
340
+ unless up2date
341
+ PdkSync::Logger.warn "The current version of pdk is #{remote_version} however you are using #{local_version}"
342
+ end
343
+ up2date
344
+ rescue StandardError => error
345
+ PdkSync::Logger.warn "Unable to check latest pdk version. #{error}"
346
+ end
347
+
348
+ # @summary
349
+ # This method when called will create a directory identified by the set global variable 'configuration.pdksync_dir', on the condition that it does not already exist.
350
+ def self.create_filespace
351
+ FileUtils.mkdir_p configuration.pdksync_dir unless Dir.exist?(configuration.pdksync_dir)
352
+ configuration.pdksync_dir
353
+ end
354
+
355
+ # @summary
356
+ # This method when called will create a directory identified by the set global variable 'configuration.pdksync_gem_dir', on the condition that it does not already exist.
357
+ def self.create_filespace_gem
358
+ FileUtils.mkdir_p configuration.pdksync_gem_dir unless Dir.exist?(configuration.pdksync_gem_dir)
359
+ configuration.pdksync_gem_dir
360
+ end
361
+
362
+ # @summary
363
+ # This method when called will create and return an octokit client with access to the upstream git repositories.
364
+ # @return [PdkSync::GitPlatformClient] client
365
+ # The Git platform client that has been created.
366
+ def self.setup_client
367
+ PdkSync::GitPlatformClient.new(configuration.git_platform, configuration.git_platform_access_settings)
368
+ rescue StandardError => error
369
+ raise "Git platform access not set up correctly: #{error}"
370
+ end
371
+
372
+ # @summary
373
+ # This method when called will create and return an octokit client with access to jenkins.
374
+ # @return [PdkSync::JenkinsClient] client
375
+ # The Git platform client that has been created.
376
+ def self.setup_jenkins_client(jenkins_server_url)
377
+ require 'pdksync/jenkinsclient'
378
+ if configuration.jenkins_platform_access_settings[:jenkins_username].nil?
379
+ raise ArgumentError, "Jenkins access token for #{configuration.jenkins_platform.capitalize} not set"\
380
+ " - use 'export #{configuration.jenkins_platform.upcase}_USERNAME=\"<your username>\"' to set"
381
+ elsif configuration.jenkins_platform_access_settings[:jenkins_password].nil?
382
+ raise ArgumentError, "Jenkins access token for #{jenkins_platform.capitalize} not set"\
383
+ " - use 'export #{jenkins_platform.upcase}_PASSWORD=\"<your password>\"' to set"
384
+ end
385
+ PdkSync::JenkinsClient.new(jenkins_server_url, configuration.jenkins_platform_access_settings)
386
+ rescue StandardError => error
387
+ raise "Jenkins platform access not set up correctly: #{error}"
388
+ end
389
+
390
+ # @summary
391
+ # This method when called will access a file set by the global variable 'configuration.managed_modules' and retrieve the information within as an array.
392
+ # @return [Array]
393
+ # An array of different module names.
394
+ def self.return_modules
395
+ raise "File '#{configuration.managed_modules}' is empty/does not exist" if File.size?(configuration.managed_modules).nil?
396
+ YAML.safe_load(File.open(configuration.managed_modules))
397
+ end
398
+
399
+ # @summary
400
+ # This method when called will parse an array of module names and verify
401
+ # whether they are valid repo or project names on the configured Git
402
+ # hosting platform.
403
+ # @param [PdkSync::GitPlatformClient] client
404
+ # The Git platform client used to get a repository.
405
+ # @param [Array] module_names
406
+ # String array of the names of Git platform repos
407
+ def self.validate_modules_exist(client, module_names)
408
+ raise "Error reading in modules. Check syntax of '#{configuration.managed_modules}'." unless !module_names.nil? && module_names.is_a?(Array)
409
+ invalid = module_names.reject { |name| client.repository?("#{configuration.namespace}/#{name}") }
410
+ # Raise error if any invalid matches were found
411
+ raise "Could not find the following repositories: #{invalid}" unless invalid.empty?
412
+ true
413
+ end
414
+
415
+ # @summary
416
+ # This method when called will update a Gemfile and remove the existing version of gem from the Gemfile.
417
+ # @param [String] output_path
418
+ # The location that the command is to be run from.
419
+ # @param [String] gem_to_test
420
+ # The Gem to test.
421
+ # @param [String] gem_line
422
+ # The gem line to replace
423
+ # @param [String] gem_sha_finder
424
+ # The gem sha to find
425
+ # @param [String] gem_sha_replacer
426
+ # The gem sha to replace
427
+ # @param [String] gem_version_finder
428
+ # The gem version to find
429
+ # @param [String] gem_version_replacer
430
+ # The gem version to replace
431
+ # @param [String] gem_branch_finder
432
+ # The gem branch to find
433
+ # @param [String] gem_branch_replacer
434
+ # The gem branch to replace
435
+ def self.gem_file_update(output_path, gem_to_test, gem_line, gem_sha_finder, gem_sha_replacer, gem_version_finder, gem_version_replacer, gem_branch_finder, gem_branch_replacer, main_path)
436
+ gem_file_name = 'Gemfile'
437
+ validate_gem_update_module(gem_to_test, gem_line, output_path, main_path)
438
+
439
+ if (gem_line.nil? == false) && (gem_sha_replacer != '\"\"')
440
+ new_data = get_source_test_gem(gem_to_test, gem_line)
441
+ new_data.each do |data|
442
+ if data.include?('branch')
443
+ gem_branch_replacer = data.split(' ')[1].strip.chomp('"').delete("'")
444
+ elsif data.include?('ref')
445
+ gem_sha_replacer = data.split(' ')[1].strip.chomp('').delete("'")
446
+ elsif data =~ %r{~>|=|>=|<=|<|>}
447
+ delimiters = ['>', '<', '>=', '<=', '=']
448
+ version_to_check = data.split(Regexp.union(delimiters))[1].chomp('""').delete("'")
449
+ validate_gem_version_replacer(version_to_check.to_s, gem_to_test)
450
+ end
451
+ end
452
+ end
453
+
454
+ if gem_sha_replacer.nil? == false && gem_sha_replacer != '\"\"' && gem_sha_replacer != ''
455
+ validate_gem_sha_replacer(gem_sha_replacer.chomp('"').reverse.chomp('"').reverse, gem_to_test)
456
+ end
457
+ if gem_branch_replacer.nil? == false && gem_branch_replacer != '\"\"'
458
+ validate_gem_branch_replacer(gem_branch_replacer.chomp('"').reverse.chomp('"').reverse, gem_to_test)
459
+ end
460
+ if gem_version_replacer.nil? == false && gem_version_replacer != '\"\"' && gem_version_replacer != ''
461
+ delimiters = ['<', '>', '<=', '>=', '=']
462
+ version_to_check = gem_version_replacer.split(Regexp.union(delimiters))
463
+ version_to_check.each do |version|
464
+ next if version.nil?
465
+ validate_gem_version_replacer(version.to_s, gem_to_test) unless version == ''
466
+ end
467
+ end
468
+
469
+ Dir.chdir(output_path) unless Dir.pwd == output_path
470
+
471
+ line_number = 1
472
+ gem_update_sha = [
473
+ { finder: "ref: '#{gem_sha_finder}'",
474
+ replacer: "ref: '#{gem_sha_replacer}'" }
475
+ ]
476
+ gem_update_version = [
477
+ { finder: gem_version_finder,
478
+ replacer: gem_version_replacer }
479
+ ]
480
+ gem_update_branch = [
481
+ { finder: "branch: '#{gem_branch_finder}'",
482
+ replacer: "branch: '#{gem_branch_replacer}'" }
483
+ ]
484
+ # gem_line option is passed
485
+
486
+ if gem_line.nil? == false && (gem_line != '' || gem_line != '\"\"')
487
+
488
+ # Delete the gem in the Gemfile to add the new line
489
+ gem_test = gem_to_test.chomp('"').reverse.chomp('"').reverse
490
+ File.open(temp_file_path, 'w') do |out_file|
491
+ File.foreach(gem_file_name) do |line|
492
+ out_file.puts line unless line =~ %r{#{gem_test}}
493
+ end
494
+ end
495
+ FileUtils.mv(temp_file_path, gem_file_name)
496
+
497
+ # Insert the new Gem to test
498
+ file = File.open(gem_file_name)
499
+ contents = file.readlines.map(&:chomp)
500
+ contents.insert(line_number, gem_line.chomp('"').reverse.chomp('"').reverse)
501
+ File.open(gem_file_name, 'w') { |f| f.write contents.join("\n") }
502
+ end
503
+
504
+ # gem_sha_finder and gem_sha_replacer options are passed
505
+ if gem_sha_finder.nil? == false && gem_sha_replacer.nil? == false && gem_sha_finder != '' && gem_sha_finder != '\"\"' && gem_sha_replacer != '' && gem_sha_replacer != '\"\"'
506
+ # Replace with SHA
507
+ file = File.open(gem_file_name)
508
+ contents = file.readlines.join
509
+ gem_update_sha.each do |regex|
510
+ contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer])
511
+ end
512
+ File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
513
+ end
514
+
515
+ # gem_version_finder and gem_version_replacer options are passed
516
+ if gem_version_finder.nil? == false && gem_version_replacer.nil? == false && gem_version_finder != '' && gem_version_finder != '\"\"' && gem_version_replacer != '' && gem_version_replacer != '\"\"' # rubocop:disable Metrics/LineLength
517
+ # Replace with version
518
+ file = File.open(gem_file_name)
519
+ contents = file.readlines.join
520
+ gem_update_version.each do |regex|
521
+ contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer])
522
+ end
523
+ File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
524
+ end
525
+
526
+ # gem_branch_finder and gem_branch_replacer options are passed
527
+ if gem_branch_finder.nil? == false && gem_branch_replacer.nil? == false && gem_branch_finder != '' && gem_branch_finder != '\"\"' && gem_branch_replacer != '' && gem_branch_replacer != '\"\"' # rubocop:disable Metrics/LineLength, Style/GuardClause
528
+ # Replace with branch
529
+ file = File.open(gem_file_name)
530
+ contents = file.readlines.join
531
+ gem_update_branch.each do |regex|
532
+ contents = contents.gsub(%r{#{regex[:finder]}}, regex[:replacer]) # unless contents =~ %r{#{gem_to_test}}
533
+ end
534
+ File.open(gem_file_name, 'w') { |f| f.write contents.to_s }
535
+ end
536
+ end
537
+
538
+ # @summary
539
+ # This method is used to identify the type of module.
540
+ # @param [String] output_path
541
+ # The location that the command is to be run from.
542
+ # @param [String] repo_name
543
+ # The module name to identify the type
544
+ def self.module_type(output_path, repo_name)
545
+ if repo_name.nil? == false
546
+ module_type = if File.exist?("#{output_path}/provision.yaml")
547
+ 'litmus'
548
+ else
549
+ 'traditional'
550
+ end
551
+ end
552
+ module_type
553
+ end
554
+
555
+ # @summary
556
+ # This method when called will run the 'module tests' command at the given location, with an error message being thrown if it is not successful.
557
+ # @param [String] output_path
558
+ # The location that the command is to be run from.
559
+ # @param [String] module_type
560
+ # The module type (litmus or traditional)
561
+ # @param [String] module_name
562
+ # The module name
563
+ # @param [String] puppet collection
564
+ # The puppet collection
565
+ # @return [Integer]
566
+ # The status code of the pdk update run.
567
+ def self.run_tests_locally(output_path, module_type, provision_type, module_name, puppet_collection)
568
+ provision_type = provision_type.chomp('"').reverse.chomp('"').reverse
569
+ status = Process::Status
570
+ # Save the current path
571
+ old_path = Dir.pwd
572
+
573
+ # Create the acceptance scripts
574
+ file = File.open('acc.sh', 'w')
575
+ file.puts '#!/bin/sh'
576
+
577
+ if puppet_collection
578
+ file.puts "export PUPPET_GEM_VERSION='~> #{puppet_collection}'"
579
+ end
580
+ file.puts "rm -rf #{output_path}/Gemfile.lock;rm -rf #{output_path}/.bundle"
581
+ file.puts 'bundle install --path .bundle/gems/ --jobs 4'
582
+ file.puts "bundle exec rake 'litmus:provision_list[#{provision_type}]'"
583
+ file.puts 'bundle exec rake litmus:install_agent'
584
+ file.puts 'bundle exec rake litmus:install_module'
585
+ file.puts 'bundle exec rake litmus:acceptance:parallel'
586
+ file.puts 'bundle exec rake litmus:tear_down'
587
+ file.close
588
+
589
+ # Runs the module tests command
590
+ if module_type == 'litmus'
591
+ run_command(output_path, 'cp ../../acc.sh .', nil)
592
+ Dir.chdir(old_path)
593
+ run_command(output_path, 'chmod 777 acc.sh', nil)
594
+ Dir.chdir(old_path)
595
+ status = run_command(output_path, 'sh acc.sh 2>&1 | tee litmusacceptance.out', 'background')
596
+ if status != 0
597
+ PdkSync::Logger.info "SUCCESS:Kicking of module Acceptance tests to run for the module #{module_name} - SUCCEED.Results will be available in the following path #{output_path}/litmusacceptance.out.Process id is #{status}"
598
+ else
599
+ PdkSync::Logger.fatal "FAILURE:Kicking of module Acceptance tests to run for the module #{module_name} - FAILED.Results will be available in the following path #{output_path}/litmusacceptance.out."
600
+ end
601
+ end
602
+ PdkSync::Logger.warn "(WARNING) Executing testcases locally supports only for litmus'" if module_type != 'litmus'
603
+ end
604
+
605
+ # @summary
606
+ # This method when called will fetch the module tests results.
607
+ # @param [String] output_path
608
+ # The location that the command is to be run from.
609
+ # @param [String] module_type
610
+ # The module type (litmus or traditional)
611
+ # @param [String] module_name
612
+ # The module name
613
+ # @param [String] report_rows
614
+ # The module test results
615
+ # @return [Integer]
616
+ # The status code of the pdk update run.
617
+ def self.fetch_test_results_locally(output_path, module_type, module_name, report_rows)
618
+ # Save the current path
619
+ old_path = Dir.pwd
620
+ if module_type != 'litmus'
621
+ PdkSync::Logger.warn "(WARNING) Fetching test results locally supports only for litmus'"
622
+ end
623
+
624
+ # Run the tests
625
+ Dir.chdir(old_path)
626
+ lines = IO.readlines("#{output_path}/litmusacceptance.out")[-10..-1]
627
+ if lines.find { |e| %r{exit} =~ e } # rubocop:disable Style/ConditionalAssignment
628
+ report_rows << if lines.find { |e| %r{^Failed} =~ e } || lines.find { |e| %r{--trace} =~ e }
629
+ [module_name, 'FAILED', "Results are available in the following path #{output_path}/litmusacceptance.out"]
630
+ else
631
+ [module_name, 'SUCCESS', "Results are available in the following path #{output_path}/litmusacceptance.out"]
632
+ end
633
+ else
634
+ report_rows << if lines.find { |e| %r{^Failed} =~ e } || lines.find { |e| %r{--trace} =~ e } || lines.find { |e| %r{rake aborted} =~ e }
635
+ [module_name, 'FAILED', "Results are available in the following path #{output_path}/litmusacceptance.out"]
636
+ else
637
+ [module_name, 'PROGRESS', "Results will be available in the following path #{output_path}/litmusacceptance.out"]
638
+ end
639
+ end
640
+ return report_rows if module_type == 'litmus'
641
+ end
642
+
643
+ # @summary
644
+ # This method when called will find the source location of the gem to test
645
+ # @param [String] gem_to_test
646
+ # The gem to test
647
+ # @param [String] gem_line
648
+ # TThe line to update in the Gemfile
649
+ # @return [String]
650
+ # The source location of the gem to test
651
+ def self.get_source_test_gem(gem_to_test, gem_line)
652
+ return gem_line.split(',') if gem_line
653
+ return gem_to_test unless gem_to_test
654
+
655
+ gemfile_line = File.readlines('Gemfile').find do |line|
656
+ line.include?(gem_to_test.to_s)
657
+ end
658
+
659
+ return "https://github.com/puppetlabs/#{gem_to_test}" unless gemfile_line
660
+ gemfile_line =~ %r{(http|https|ftp|ftps)\:\/\/[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(\/\S*)?}
661
+ line.split(',')[1].strip.to_s if line
662
+ end
663
+
664
+ # @summary
665
+ # This method when called will validate the gem_line to update in the Gemfile
666
+ # @param [String] gem_to_test
667
+ # The gem to test
668
+ # @param [String] gem_line
669
+ # The line to update in the Gemfile
670
+ def self.validate_gem_update_module(gem_to_test, gem_line, output_path, main_path)
671
+ gem_to_test = gem_to_test.chomp('"').reverse.chomp('"').reverse
672
+ Dir.chdir(main_path)
673
+ output_path = "#{configuration.pdksync_dir}/#{gem_to_test}"
674
+ clean_env(output_path) if Dir.exist?(output_path)
675
+ print 'delete module directory, '
676
+
677
+ # when gem_line is specified, we need to parse the line and identify all the values
678
+ # - we can have source url or we need to
679
+ # - sha, branch, version
680
+ if gem_line
681
+ git_repo = get_source_test_gem(gem_to_test, gem_line)
682
+ i = 0
683
+ git_repo.each do |item|
684
+ i += 1
685
+ if item =~ %r{((git@|http(s)?:\/\/)([\w\.@]+)(\/|:))([\w,\-,\_]+)\/([\w,\-,\_]+)(.git){0,1}((\/){0,1})}
686
+ git_repo = item.split('git:')[1].strip.delete("'")
687
+ break
688
+ elsif git_repo.size == i
689
+ # git_repo = "https://github.com/puppetlabs#{gem_to_test}"
690
+ sep = configuration.git_base_uri.start_with?('git@') ? ':' : '/'
691
+ git_repo = "#{configuration.git_base_uri}#{sep}#{configuration.namespace}/#{gem_to_test}"
692
+ end
693
+ end
694
+ print 'clone module directory, '
695
+ git_repo = run_command(configuration.pdksync_dir.to_s, "git clone #{git_repo}", nil)
696
+ elsif gem_to_test
697
+ git_repo = clone_directory(configuration.namespace, gem_to_test, output_path.to_s)
698
+ end
699
+
700
+ Dir.chdir(main_path)
701
+ raise "Unable to clone repo for #{gem_to_test}. Check repository's url to be correct!".red if git_repo.nil?
702
+
703
+ @all_versions = ''
704
+ @all_refs = ''
705
+ @all_branches = ''
706
+
707
+ Dir.chdir(output_path)
708
+
709
+ stdout_refs, stderr_refs, status_refs = Open3.capture3('git show-ref -s')
710
+ @all_refs = stdout_refs
711
+ stdout_branches, stderr_branches, status_branches = Open3.capture3('git branch -a')
712
+ @all_branches = stdout_branches
713
+ stdout_versions, stderr_versions, status_versions = Open3.capture3('git tag')
714
+ @all_versions = stdout_versions
715
+
716
+ raise "Couldn't get references due to #{stderr_refs}".red unless status_refs.exitstatus.zero?
717
+ raise "Couldn't get branches due to #{stderr_branches}".red unless status_branches.exitstatus.zero?
718
+ raise "Couldn't get versions due to #{stderr_versions}".red unless status_versions.exitstatus.zero?
719
+ Dir.chdir(main_path)
720
+ end
721
+
722
+ # @summary
723
+ # This method when called will validate the gem_sha_replacer to update in the Gemfile
724
+ # @param [String] gem_to_test
725
+ # The gem to test
726
+ # @param [String] gem_sha_replacer
727
+ # The sha to update in the Gemfile
728
+ def self.validate_gem_sha_replacer(gem_sha_replacer, gem_to_test)
729
+ found = false
730
+ @all_refs.split(' ').each do |sha|
731
+ puts "SHA #{gem_sha_replacer} valid.\n".green if gem_sha_replacer == sha
732
+ found = true if gem_sha_replacer == sha
733
+ end
734
+ raise "Couldn't find sha: #{gem_sha_replacer} in your repository: #{gem_to_test}".red if found == false
735
+ end
736
+
737
+ # @summary
738
+ # This method when called will validate the gem_branch_replacer to update in the Gemfile
739
+ # @param [String] gem_to_test
740
+ # The gem to test
741
+ # @param [String] gem_branch_replacer
742
+ # The branch to update in the Gemfile
743
+ def self.validate_gem_branch_replacer(gem_branch_replacer, gem_to_test)
744
+ raise "Couldn't find branch: #{gem_branch_replacer} in your repository: #{gem_to_test}".red unless @all_branches.include?(gem_branch_replacer)
745
+ puts "Branch #{gem_branch_replacer} valid.\n".green
746
+ end
747
+
748
+ # @summary
749
+ # This method when called will validate the gem_version_replacer to update in the Gemfile
750
+ # @param [String] gem_to_test
751
+ # The gem to test
752
+ # @param [String] gem_version_replacer
753
+ # The version to update in the Gemfile
754
+ def self.validate_gem_version_replacer(gem_version_replacer, gem_to_test)
755
+ found = false
756
+ @all_versions.split(' ').each do |version|
757
+ puts "Version #{gem_version_replacer} valid.\n".green if gem_version_replacer == version
758
+ found = true if gem_version_replacer == version
759
+ end
760
+ raise "Couldn't find version: #{gem_version_replacer} in your repository: #{gem_to_test}".red if found == false
761
+ end
762
+
763
+ # @summary
764
+ # This method when called will create a pr on the given repository that will create a pr to merge the given commit into the main with the pdk version as an identifier.
765
+ # @param [PdkSync::GitPlatformClient] client
766
+ # The Git platform client used to gain access to and manipulate the repository.
767
+ # @param [String] ouput_path
768
+ # The location that the command is to be run from.
769
+ # @param [String] jenkins_client
770
+ # Jenkins authentication.
771
+ # @param [String] repo_name
772
+ # Module to run on Jenkins
773
+ # @param [String] current_branch
774
+ # The branch against which the user needs to run the jenkin jobs
775
+ def self.run_tests_jenkins(jenkins_client, repo_name, current_branch, github_user, job_name)
776
+ if jenkins_client.nil? == false || repo_name.nil? == false || current_branch.nil? == false
777
+ pr = jenkins_client.create_adhoc_job(repo_name,
778
+ current_branch,
779
+ github_user,
780
+ job_name)
781
+ pr
782
+ end
783
+ rescue StandardError => error
784
+ puts "(FAILURE) Jenkins Job creation for #{repo_name} has failed. #{error}".red
785
+ end
786
+
787
+ # convert duration from ms to format h m s ms
788
+ def self.duration_hrs_and_mins(ms)
789
+ return '' unless ms
790
+ hours, ms = ms.divmod(1000 * 60 * 60)
791
+ minutes, ms = ms.divmod(1000 * 60)
792
+ seconds, ms = ms.divmod(1000)
793
+ "#{hours}h #{minutes}m #{seconds}s #{ms}ms"
794
+ end
795
+
796
+ # return jenkins job urls
797
+ def self.adhoc_urls(job_name, jenkins_server_urls)
798
+ adhoc_urls = []
799
+ # get adhoc jobs
800
+ adhoc_urls.push("#{jenkins_server_urls}/job/#{job_name}")
801
+ adhoc_urls.each do |url|
802
+ conn = Faraday::Connection.new "#{url}/api/json"
803
+ res = conn.get
804
+ build_job_data = JSON.parse(res.body.to_s)
805
+ downstream_job = build_job_data['downstreamProjects']
806
+ break if downstream_job.empty?
807
+ downstream_job.each do |item|
808
+ next if item.nil?
809
+ adhoc_urls.push(item['url']) unless item['url'].nil? && item['url'].include?('skippable_adhoc')
810
+ end
811
+ end
812
+ adhoc_urls
813
+ end
814
+
815
+ # test_results_jenkins
816
+ def self.test_results_jenkins(jenkins_server_url, build_id, job_name, module_name)
817
+ PdkSync::Logger.info 'Fetch results from jenkins'
818
+ # remove duplicates and sort the list
819
+ adhoc_urls = adhoc_urls(job_name, jenkins_server_url).uniq.sort_by { |url| JSON.parse(Faraday.get("#{url}/api/json").body.to_s)['fullDisplayName'].scan(%r{[0-9]{2}\s}).first.to_i }
820
+ report_rows = []
821
+ @failed = false
822
+ @in_progress = false
823
+ @aborted = false
824
+
825
+ File.delete("results_#{module_name}.out") if File.exist?("results_#{module_name}.out")
826
+ # analyse each build result - get status, execution time, logs_link
827
+ @data = "MODULE_NAME=#{module_name}\nBUILD_ID=#{build_id}\nINITIAL_job=#{jenkins_server_url}/job/#{job_name}/#{build_id}\n\n"
828
+ write_to_file("results_#{module_name}.out", @data)
829
+ PdkSync::Logger.info "Analyse test execution report \n"
830
+ adhoc_urls.each do |url|
831
+ # next if skipped in build name
832
+ current_build_data = JSON.parse(Faraday.get("#{url}/api/json").body.to_s)
833
+ next if url.include?('skippable_adhoc') || current_build_data['color'] == 'notbuilt'
834
+ next if current_build_data['fullDisplayName'].downcase.include?('skipped')
835
+ returned_data = get_data_build(url, build_id, module_name) unless @failed || @in_progress
836
+ generate_report_table(report_rows, url, returned_data)
837
+ end
838
+
839
+ table = Terminal::Table.new title: "Module Test Results for: #{module_name}\nCheck results in #{Dir.pwd}/results_#{module_name}.out ", headings: %w[Status Result Execution_Time], rows: report_rows
840
+ PdkSync::Logger.info "SUCCESSFUL test results!\n".green unless @failed || @in_progress
841
+ PdkSync::Logger.info "\n#{table} \n"
842
+ end
843
+
844
+ # generate report table when running tests on jenkins
845
+ def self.generate_report_table(report_rows, url, data)
846
+ if @failed
847
+ report_rows << ['FAILED', url, data[1]] unless data.nil?
848
+ elsif @aborted
849
+ report_rows << ['ABORTED', url, data[1]] unless data.nil?
850
+ else
851
+ report_rows << [data[0], url, data[1]] unless data.nil?
852
+ end
853
+ end
854
+
855
+ # for each build from adhoc jobs, get data
856
+ # if multiple platforms under current url, get data for each platform
857
+ def self.get_data_build(url, build_id, module_name)
858
+ current_build_data = JSON.parse(Faraday.get("#{url}/api/json").body.to_s)
859
+ if current_build_data['activeConfigurations'].nil?
860
+ returned_data = analyse_jenkins_report(url, module_name, build_id)
861
+ if returned_data[0] == 'in progress'
862
+ @in_progress = true
863
+ elsif returned_data[0] == 'FAILURE'
864
+ @failed = true
865
+ elsif returned_data[0] == 'ABORTED'
866
+ @aborted = true
867
+ end
868
+ else
869
+ platforms_list = []
870
+ current_build_data['activeConfigurations'].each do |url_child|
871
+ next if url_child['color'] == 'notbuilt'
872
+ platforms_list.push(url_child['url'])
873
+ end
874
+
875
+ platforms_list.each do |platform_build|
876
+ returned_data = analyse_jenkins_report(platform_build, module_name, build_id)
877
+ if returned_data[0] == 'in progress'
878
+ @in_progress = true
879
+ elsif returned_data[0] == 'FAILURE'
880
+ @failed = true
881
+ elsif returned_data[0] == 'ABORTED'
882
+ @aborted = true
883
+ end
884
+ end
885
+ end
886
+
887
+ @data = "\nFAILURE. Fix the failures and rerun tests!\n" if @failed
888
+ @data = "\nIN PROGRESS. Please check test report after the execution is done!\n" if @in_progress
889
+ write_to_file("results_#{module_name}.out", @data) if @failed || @in_progress
890
+ PdkSync::Logger.info 'Failed status! Fix errors and rerun.'.red if @failed
891
+ PdkSync::Logger.info 'Aborted status! Fix errors and rerun.'.red if @aborted
892
+ PdkSync::Logger.info 'Tests are still running! You can fetch the results later by using this task: test_results_jenkins'.blue if @in_progress
893
+ returned_data
894
+ end
895
+
896
+ # write test report to file
897
+ def self.write_to_file(file, _data)
898
+ File.open(file, 'a') do |f|
899
+ f.write @data
900
+ end
901
+ end
902
+
903
+ # analyse jenkins report
904
+ def self.analyse_jenkins_report(url, module_name, build_id)
905
+ # builds don't have the same build_id. That's why just the init build will be identified by id, rest of them by lastBuild
906
+ last_build_job_data = JSON.parse(Faraday.get("#{url}/#{build_id}/api/json").body.to_s) if url.include?('init-manual-parameters_adhoc')
907
+ last_build_job_data = JSON.parse(Faraday.get("#{url}/lastBuild/api/json").body.to_s) unless url.include?('init-manual-parameters_adhoc')
908
+
909
+ # status = 'not_built' unless last_build_job_data
910
+ if last_build_job_data['result'].nil?
911
+ status = 'in progress'
912
+ execution_time = 'running'
913
+ else
914
+ status = last_build_job_data['result']
915
+ execution_time = duration_hrs_and_mins(last_build_job_data['duration'].to_i)
916
+ end
917
+
918
+ # execution_time = 0 unless last_build_job_data
919
+ logs_link = "#{url}/#{build_id}/" if url.include?('init-manual-parameters_adhoc')
920
+ logs_link = "#{url}lastBuild/" unless url.include?('init-manual-parameters_adhoc')
921
+ @data = "Job title =#{last_build_job_data['fullDisplayName']}\n logs_link = #{logs_link}\n status = #{status}\n"
922
+ return_data = [status, execution_time]
923
+ write_to_file("results_#{module_name}.out", @data)
924
+ return_data
925
+ end
926
+
927
+ # @summary
928
+ # Check the most recent tagged release on GitHub for the gem
929
+ # @param [String] gem_to_test
930
+ # The gem to test
931
+ # The current version of the gem
932
+ def self.check_gem_latest_version(gem_to_test)
933
+ remote_version = Octokit.tags("puppetlabs/#{gem_to_test}").first[:name]
934
+ rescue StandardError => error
935
+ puts "(WARNING) Unable to check latest gem version. #{error}".red
936
+ remote_version
937
+ end
938
+
939
+ # @summary
940
+ # Update the gem version by one
941
+ # @param [String] gem_version
942
+ # The current version of the gem
943
+ # The bump version by one of the gem
944
+ def self.update_gem_latest_version_by_one(gem_version)
945
+ current_version = Gem::Version.new gem_version
946
+ new_version = current_version.bump
947
+ rescue StandardError => error
948
+ puts "(WARNING) Unable to check latest gem version. #{error}".red
949
+ new_version
950
+ end
951
+
952
+ # @summary
953
+ # Update Gemfile with multigem
954
+ # @param [String] output_path
955
+ # The location that the command is to be run from.
956
+ # @param [String] gem_name
957
+ # The gem name
958
+ # @param [String] gemfury_token
959
+ # The gemfury token
960
+ # @param [String] gemfury_user
961
+ # The gemfury user
962
+ def self.update_gemfile_multigem(output_path, gem_name, gemfury_token, gemfury_user)
963
+ gem_file_name = 'Gemfile'
964
+ gem_source_line = "source \"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/\""
965
+ Dir.chdir(output_path) unless Dir.pwd == output_path
966
+
967
+ if gem_name.nil? == false && gemfury_token.nil? == false && gemfury_user.nil? == false # rubocop:disable Style/GuardClause
968
+ # Append the gem with new source location
969
+ gem_name = gem_name.chomp('"').reverse.chomp('"').reverse
970
+ begin
971
+ File.open(temp_file_path, 'w') do |out_file|
972
+ File.foreach(gem_file_name) do |line|
973
+ if line =~ %r{#{gem_name}}
974
+ line = line.chomp
975
+ if line =~ %r{"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/"}
976
+ puts 'GemFile Already updated'.green
977
+ out_file.puts line.to_s
978
+ else
979
+ out_file.puts "#{line} , :source => \"https://#{gemfury_token}@gem.fury.io/#{gemfury_user}/\""
980
+ end
981
+ else
982
+ out_file.puts line
983
+ end
984
+ end
985
+ end
986
+ FileUtils.mv(temp_file_path, gem_file_name)
987
+
988
+ # Insert the new source Gem location to Gemfile
989
+ file = File.open(gem_file_name)
990
+ contents = file.readlines.map(&:chomp)
991
+ contents.insert(2, gem_source_line) unless contents.include?(gem_source_line)
992
+ File.open(gem_file_name, 'w') { |f| f.write contents.join("\n") }
993
+ rescue Errno::ENOENT => e
994
+ raise "Couldn't find file: #{gem_file_name} #{e} in your repository: #{gem_file_name}".red
995
+ rescue Errno::EACCES => e
996
+ raise "Does not have required permissions to the #{gem_file_name} #{e} in your repository: #{gem_file_name}".red
997
+ end
998
+ end
999
+ end
1000
+
1001
+ # @summary
1002
+ # Adds an entry to the 'provision.yaml' of a module with the values given
1003
+ # @param [String] module_path
1004
+ # Path to the module root dir
1005
+ # @param [String] key
1006
+ # Key name in 'provision.yaml' (e.g. "release_checks_7)
1007
+ # @param [String] provisioner
1008
+ # The value for the provisioner key (e.g. "abs")
1009
+ # @param [Array] images
1010
+ # The list of images for the images key (e.g. ['ubuntu-1804-x86_64, ubuntu-2004-x86_64', 'centos-8-x86_64'])
1011
+ # @return [Boolean]
1012
+ # True if entry was successfully added to 'provision.yaml'
1013
+ # False if 'provision.yaml' does not exist or is an empty file
1014
+ def self.add_provision_list(module_path, key, provisioner, images)
1015
+ path_to_provision_yaml = "#{module_path}/provision.yaml"
1016
+ return false unless File.exist? path_to_provision_yaml
1017
+ PdkSync::Logger.info "Updating #{path_to_provision_yaml}"
1018
+ provision_yaml = YAML.safe_load(File.read(path_to_provision_yaml))
1019
+ return false if provision_yaml.nil?
1020
+ provision_yaml[key] = {}
1021
+ provision_yaml[key]['provisioner'] = provisioner
1022
+ provision_yaml[key]['images'] = images
1023
+ File.write(path_to_provision_yaml, YAML.dump(provision_yaml))
1024
+ end
1025
+
1026
+ # @summary
1027
+ # Query the 'metadata.json' in the given module path and return the compatible platforms
1028
+ # @param [String] module_path
1029
+ # Path to the module root dir
1030
+ # @return [Hash]
1031
+ # The compatible OSs defined in the 'operatingsystem_support' key of the 'metadata.json'
1032
+ def self.module_supported_platforms(module_path)
1033
+ PdkSync::Logger.info 'Determining supported platforms from metadata.json'
1034
+ os_support_key = 'operatingsystem_support'
1035
+ metadata_json = "#{module_path}/metadata.json"
1036
+ raise 'Could not locate metadata.json' unless File.exist? metadata_json
1037
+ module_metadata = JSON.parse(File.read(metadata_json))
1038
+ raise "Could not locate '#{os_support_key}' key from #{metadata_json}" unless module_metadata.key? os_support_key
1039
+ module_metadata[os_support_key]
1040
+ end
1041
+
1042
+ # @summary
1043
+ # Take a Windows version extracted from the module's 'metadata.json' and normalize it to the version conventions
1044
+ # that VMPooler uses
1045
+ # @param ver
1046
+ # Version from 'metadata.json'
1047
+ # @return [String]
1048
+ # Normalised version that is used by VMPooler templates
1049
+ def self.normalize_win_version(ver)
1050
+ PdkSync::Logger.debug "Normalising Windows version from metadata.json: #{ver}"
1051
+ win_ver_matcher = ver.match(%r{(?:Server\s)?(?<ver>\d+)(?:\s(?<rel>R\d))?})
1052
+ raise "Unable to determine Windows version from metadata.json: #{ver}" unless win_ver_matcher
1053
+ normalized_version = win_ver_matcher['ver']
1054
+ normalized_version += " #{win_ver_matcher['rel'].upcase}" if win_ver_matcher['rel']
1055
+ normalized_version
1056
+ end
1057
+
1058
+ # @summary
1059
+ # Normalize the given os name
1060
+ # @param os
1061
+ # The OS name to normalize
1062
+ # @return [String]
1063
+ # Normalized os name
1064
+ def self.normalize_os(os)
1065
+ case os
1066
+ when %r{aix}i
1067
+ 'AIX'
1068
+ when %r{cent}i
1069
+ 'CentOS'
1070
+ when %r{darwin}i
1071
+ 'Darwin'
1072
+ when %r{deb}i
1073
+ 'Debian'
1074
+ when %r{fedora}i
1075
+ 'Fedora'
1076
+ when %r{oracle}i
1077
+ 'OracleLinux'
1078
+ when %r{osx}i
1079
+ 'OSX'
1080
+ when %r{pan}i
1081
+ 'PAN-OS'
1082
+ when %r{red}i
1083
+ 'RedHat'
1084
+ when %r{sci}i
1085
+ 'Scientific'
1086
+ when %r{suse|sles}i
1087
+ 'SLES'
1088
+ when %r{sol}i
1089
+ 'Solaris'
1090
+ when %r{ubuntu}i
1091
+ 'Ubuntu'
1092
+ when %r{win}i
1093
+ 'Windows'
1094
+ when %r{rocky}i
1095
+ 'Rocky'
1096
+ when %r{almalinux}i
1097
+ 'AlmaLinux'
1098
+ else
1099
+ raise "Could not normalize OS value: #{os}"
1100
+ end
1101
+ end
1102
+
1103
+ # @summary
1104
+ # Get the metadata.json of the given module
1105
+ # @param module_path
1106
+ # Path to the root dir of the module
1107
+ # @return [JSON]
1108
+ # JSON of the metadata.json
1109
+ def self.metadata_json(module_path)
1110
+ metadata_json = "#{module_path}/metadata.json"
1111
+ raise 'Could not locate metadata.json' unless File.exist? metadata_json
1112
+ JSON.parse(File.read(metadata_json))
1113
+ end
1114
+
1115
+ OPERATINGSYSTEM = 'operatingsystem'.freeze
1116
+ OPERATINGSYSTEMRELEASE = 'operatingsystemrelease'.freeze
1117
+ OPERATINGSYSTEM_SUPPORT = 'operatingsystem_support'.freeze
1118
+
1119
+ # @summary
1120
+ # Write the given metadata in JSON format to the given module root dir path
1121
+ # @param module_path
1122
+ # Path to the root dir of the module
1123
+ # @param metadata_json
1124
+ # Metadata in JSON format to write to the module root dir
1125
+ def self.write_metadata_json(module_path, metadata_json)
1126
+ File.open(File.join(module_path, 'metadata.json'), 'w') do |f|
1127
+ f.write(JSON.pretty_generate(metadata_json) + "\n")
1128
+ end
1129
+ end
1130
+
1131
+ # @summary
1132
+ # Normalize the 'operatingsystem_support' entries in the metadata.json
1133
+ # @param module_path
1134
+ # Path to the root dir of the module
1135
+ def self.normalize_metadata_supported_platforms(module_path)
1136
+ new_metadata_json = metadata_json(module_path)
1137
+
1138
+ new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
1139
+ normalized_os = normalize_os(os_vers[OPERATINGSYSTEM])
1140
+ unless normalized_os == os_vers[OPERATINGSYSTEM]
1141
+ PdkSync::Logger.info "Corrected OS Name: '#{os_vers[OPERATINGSYSTEM]}' -> '#{normalized_os}'"
1142
+ os_vers[OPERATINGSYSTEM] = normalized_os
1143
+ end
1144
+ next unless normalized_os == 'Windows'
1145
+ normalized_vers = os_vers[OPERATINGSYSTEMRELEASE].map { |v| normalize_win_version(v) }
1146
+ unless normalized_vers == os_vers[OPERATINGSYSTEMRELEASE]
1147
+ PdkSync::Logger.info "Corrected OS Versions: #{os_vers[OPERATINGSYSTEMRELEASE]} -> #{normalized_vers}"
1148
+ os_vers[OPERATINGSYSTEMRELEASE] = normalized_vers
1149
+ end
1150
+ end
1151
+
1152
+ write_metadata_json(module_path, new_metadata_json)
1153
+ end
1154
+
1155
+ # @summary
1156
+ # Removes the OS version from the supported platforms
1157
+ # TODO: Remove entire OS entry when version is nil
1158
+ # TODO: Remove entire OS entry when versions is empty
1159
+ # @param module_path
1160
+ # Path to the root dir of the module
1161
+ # @param os_to_remove
1162
+ # OS we want to remove version from
1163
+ # @param version_to_remove
1164
+ # Version from OS we want to remove
1165
+ def self.remove_platform_from_metadata(module_path, os_to_remove, version_to_remove)
1166
+ new_metadata_json = metadata_json(module_path)
1167
+ new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
1168
+ if (os = normalize_os(os_vers[OPERATINGSYSTEM]))
1169
+ next unless os == os_to_remove
1170
+ vers = os_vers[OPERATINGSYSTEMRELEASE]
1171
+ next unless (ver_index = vers.find_index(version_to_remove))
1172
+ PdkSync::Logger.info "Removing #{os} #{vers[ver_index]} from metadata.json"
1173
+ vers.delete_at(ver_index)
1174
+ else
1175
+ PdkSync::Logger.info 'No entry in metadata.json to replace'
1176
+ return true
1177
+ end
1178
+ end
1179
+ write_metadata_json(module_path, new_metadata_json)
1180
+ end
1181
+
1182
+ # @summary
1183
+ # Adds an OS version to the supported platforms. Creates a new OS entry if it does not exist
1184
+ # @param module_path
1185
+ # Path to the root dir of the module
1186
+ # @param os_to_add
1187
+ # OS we want to add
1188
+ # @param version_to_add
1189
+ # Version we want to add
1190
+ def self.add_platform_to_metadata(module_path, os_to_add, version_to_add)
1191
+ os_to_add = normalize_os(os_to_add)
1192
+ new_metadata_json = metadata_json(module_path)
1193
+ updated_existing_entry = false
1194
+ new_metadata_json[OPERATINGSYSTEM_SUPPORT].each do |os_vers|
1195
+ next unless (os = normalize_os(os_vers[OPERATINGSYSTEM]))
1196
+ next unless os == os_to_add
1197
+ PdkSync::Logger.info "Adding #{os_to_add} version #{version_to_add} to existing entry"
1198
+ os_vers[OPERATINGSYSTEMRELEASE] << version_to_add
1199
+ os_vers[OPERATINGSYSTEMRELEASE].uniq!
1200
+ os_vers[OPERATINGSYSTEMRELEASE].sort_by!(&:to_f)
1201
+ updated_existing_entry = true
1202
+ break
1203
+ end
1204
+ unless updated_existing_entry
1205
+ PdkSync::Logger.info "Adding #{os_to_add} version #{version_to_add} to new entry"
1206
+ supported_platform_entry = {}
1207
+ supported_platform_entry[OPERATINGSYSTEM] = os_to_add
1208
+ supported_platform_entry[OPERATINGSYSTEMRELEASE] = [version_to_add]
1209
+ new_metadata_json[OPERATINGSYSTEM_SUPPORT] << supported_platform_entry
1210
+ end
1211
+ write_metadata_json(module_path, new_metadata_json)
1212
+ end
1213
+
1214
+ NAME = 'name'.freeze
1215
+ REQUIREMENTS = 'requirements'.freeze
1216
+
1217
+ # @summary
1218
+ # Updates the requirements parameter in the metadata.json. If the requirement or a key within it doesn't exist,
1219
+ # it is created.
1220
+ # TODO: Ability to remove requirement
1221
+ # @param module_path
1222
+ # Path to the root dir of the module
1223
+ # @param name
1224
+ # Name attribute of the requirement
1225
+ # @param key
1226
+ # The key name of a K/V pair to be added / updated in the requirement
1227
+ # @param value
1228
+ # The value of the key to be added / updated in the requirement
1229
+ def self.update_requirements(module_path, name, key, value)
1230
+ new_metadata_json = metadata_json(module_path)
1231
+ updated_existing_entry = false
1232
+ new_metadata_json[REQUIREMENTS].each do |requirement|
1233
+ next unless requirement[NAME] == name
1234
+ PdkSync::Logger.info "Updating [#{requirement['name']}] #{requirement.key? key ? "dependency's existing" : 'with a new'} key [#{key}] to value [#{value}]"
1235
+ requirement[key] = value
1236
+ updated_existing_entry = true
1237
+ end
1238
+ unless updated_existing_entry
1239
+ PdkSync::Logger.info "Adding new requirement [#{name}] with key [#{key}] of value [#{value}]"
1240
+ new_requirement = {}
1241
+ new_requirement[NAME] = name
1242
+ new_requirement[key] = value
1243
+ new_metadata_json[REQUIREMENTS] << new_requirement
1244
+ end
1245
+ write_metadata_json(module_path, new_metadata_json)
1246
+ end
1247
+
1248
+ # @summary
1249
+ # Generate an entry in the 'provision.yaml' for running release checks against the platforms that the given
1250
+ # Puppet version. Will compare the supported platforms for the given Puppet version against the compatible
1251
+ # platforms defined in the module's 'metadata.json' and generate a list of platforms that are the same.
1252
+ # @param [String] module_path
1253
+ # Path to the module root dir
1254
+ # @param [String] puppet_version
1255
+ # Puppet version we are generating platform checks for
1256
+ def self.generate_vmpooler_release_checks(module_path, puppet_version)
1257
+ PdkSync::Logger.info "Generating release checks provision.yaml key for Puppet version #{puppet_version}"
1258
+ # This YAML is where the compatible platforms for each Puppet version is stored
1259
+ agent_test_platforms_yaml_file_path = 'lib/pdksync/conf/puppet_abs_supported_platforms.yaml'
1260
+ agent_test_platforms = YAML.safe_load(File.read(agent_test_platforms_yaml_file_path))
1261
+ raise "No configuration for Puppet #{puppet_version} found in #{agent_test_platforms_yaml_file_path}" unless agent_test_platforms.key? puppet_version
1262
+ agent_test_platforms = agent_test_platforms[puppet_version]
1263
+ module_supported_platforms = module_supported_platforms(module_path)
1264
+ images = []
1265
+ PdkSync::Logger.debug 'Processing compatible platforms from metadata.json'
1266
+ module_supported_platforms.each do |os_vers|
1267
+ os = os_vers['operatingsystem'].downcase
1268
+ # 'Windows' and 'OracleLinux' are the definitions in 'metadata.json', however the VMPooler images are 'win' and 'oracle'
1269
+ os = 'win' if os == 'windows'
1270
+ os = 'oracle' if os == 'oraclelinux'
1271
+ vers = os_vers['operatingsystemrelease']
1272
+ if agent_test_platforms.keys.select { |k| k.start_with? os }.empty?
1273
+ PdkSync::Logger.warn "'#{os}' is a compatible platform but was not defined as test platform for Puppet #{puppet_version} in #{agent_test_platforms_yaml_file_path}"
1274
+ next
1275
+ end
1276
+ vers.each do |ver|
1277
+ PdkSync::Logger.debug "Checking '#{os} #{ver}'"
1278
+ if os == 'win'
1279
+ win_ver = normalize_win_version(ver)
1280
+ PdkSync::Logger.debug "Normalised Windows version: #{win_ver}"
1281
+ next unless agent_test_platforms['win'].include? win_ver
1282
+ PdkSync::Logger.debug "'#{os} #{ver}' SUPPORTED by Puppet #{puppet_version}"
1283
+ images << "win-#{win_ver}-x86_64"
1284
+ else
1285
+ next unless agent_test_platforms[os].include? ver
1286
+ PdkSync::Logger.debug "'#{os} #{ver}' SUPPORTED by Puppet #{puppet_version}"
1287
+ images << "#{os}-#{ver.delete('.')}-x86_64"
1288
+ end
1289
+ end
1290
+ end
1291
+ images.uniq!
1292
+ result = add_provision_list(module_path, "release_checks_#{puppet_version}", 'abs', images)
1293
+ PdkSync::Logger.warn "#{module_path}/provision.yaml does not exist" unless result
1294
+ end
1295
+ end
1296
+ end