packaging 0.106.0 → 0.106.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (66) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +4 -4
  3. data/lib/packaging/archive.rb +2 -2
  4. data/lib/packaging/artifactory/extensions.rb +1 -0
  5. data/lib/packaging/artifactory.rb +27 -23
  6. data/lib/packaging/config/params.rb +191 -199
  7. data/lib/packaging/config/validations.rb +0 -2
  8. data/lib/packaging/config.rb +8 -8
  9. data/lib/packaging/deb/repo.rb +11 -14
  10. data/lib/packaging/gem.rb +2 -2
  11. data/lib/packaging/metrics.rb +7 -7
  12. data/lib/packaging/nuget.rb +0 -1
  13. data/lib/packaging/paths.rb +11 -13
  14. data/lib/packaging/platforms.rb +10 -6
  15. data/lib/packaging/repo.rb +11 -12
  16. data/lib/packaging/retrieve.rb +1 -1
  17. data/lib/packaging/rpm/repo.rb +8 -8
  18. data/lib/packaging/sign/dmg.rb +8 -7
  19. data/lib/packaging/sign/ips.rb +64 -32
  20. data/lib/packaging/sign/msi.rb +84 -112
  21. data/lib/packaging/sign/rpm.rb +1 -1
  22. data/lib/packaging/sign.rb +0 -1
  23. data/lib/packaging/tar.rb +2 -4
  24. data/lib/packaging/util/date.rb +0 -1
  25. data/lib/packaging/util/distribution_server.rb +2 -2
  26. data/lib/packaging/util/execution.rb +2 -4
  27. data/lib/packaging/util/file.rb +2 -3
  28. data/lib/packaging/util/git.rb +1 -3
  29. data/lib/packaging/util/git_tags.rb +3 -3
  30. data/lib/packaging/util/gpg.rb +3 -4
  31. data/lib/packaging/util/jenkins.rb +0 -3
  32. data/lib/packaging/util/misc.rb +1 -1
  33. data/lib/packaging/util/net.rb +26 -22
  34. data/lib/packaging/util/repo.rb +0 -1
  35. data/lib/packaging/util/serialization.rb +1 -2
  36. data/lib/packaging/util/ship.rb +3 -3
  37. data/lib/packaging/util/sign.rb +8 -8
  38. data/lib/packaging/util/tool.rb +1 -4
  39. data/lib/packaging/util/version.rb +1 -5
  40. data/lib/packaging/util.rb +1 -1
  41. data/lib/packaging.rb +1 -2
  42. data/spec/lib/packaging/platforms_spec.rb +1 -1
  43. data/spec/lib/packaging/sign_spec.rb +1 -1
  44. data/spec/lib/packaging/util/git_spec.rb +2 -2
  45. data/spec/lib/packaging/util/git_tag_spec.rb +5 -5
  46. data/tasks/30_metrics.rake +2 -2
  47. data/tasks/apple.rake +8 -14
  48. data/tasks/archive.rake +1 -2
  49. data/tasks/deb.rake +7 -8
  50. data/tasks/doc.rake +5 -3
  51. data/tasks/education.rake +2 -4
  52. data/tasks/gem.rake +20 -12
  53. data/tasks/jenkins.rake +27 -15
  54. data/tasks/jenkins_dynamic.rake +10 -10
  55. data/tasks/mock.rake +8 -9
  56. data/tasks/nightly_repos.rake +14 -14
  57. data/tasks/pe_ship.rake +10 -17
  58. data/tasks/retrieve.rake +2 -2
  59. data/tasks/rpm.rake +1 -1
  60. data/tasks/ship.rake +6 -6
  61. data/tasks/sign.rake +5 -5
  62. data/tasks/tar.rake +2 -3
  63. data/tasks/update.rake +2 -2
  64. data/tasks/vendor_gems.rake +5 -7
  65. data/tasks/version.rake +2 -2
  66. metadata +59 -31
@@ -2,7 +2,6 @@
2
2
  # This includes both reporting the correct path and divining the platform
3
3
  # tag associated with a variety of paths
4
4
  #
5
- # rubocop:disable Metrics/ModuleLength
6
5
  module Pkg::Paths
7
6
  include Pkg::Platforms
8
7
 
@@ -17,7 +16,7 @@ module Pkg::Paths
17
16
  return Pkg::Platforms.get_attribute_for_platform_version(platform, version, :source_architecture)
18
17
  end
19
18
  arches.find { |a| path.include?(package_arch(platform, a)) } || arches[0]
20
- rescue
19
+ rescue StandardError
21
20
  arches.find { |a| path.include?(package_arch(platform, a)) } || arches[0]
22
21
  end
23
22
 
@@ -40,7 +39,7 @@ module Pkg::Paths
40
39
  arch = arch_from_artifact_path(platform, version, path)
41
40
 
42
41
  return "#{platform}-#{version}-#{arch}"
43
- rescue
42
+ rescue StandardError
44
43
  fmt = Pkg::Platforms.all_supported_package_formats.find { |ext| path =~ /#{ext}$/ }
45
44
 
46
45
  # We need to make sure this is actually a file, and not simply a path
@@ -117,7 +116,7 @@ module Pkg::Paths
117
116
 
118
117
  # In puppet7 and beyond, we moved the repo_name to the top to allow each
119
118
  # puppet major release to have its own apt repo.
120
- if %w(FUTURE-puppet7 FUTURE-puppet7-nightly).include? repo_name
119
+ if %w[FUTURE-puppet7 FUTURE-puppet7-nightly].include? repo_name
121
120
  return File.join(prefix, apt_repo_name(is_nonfinal), debian_code_name)
122
121
  end
123
122
 
@@ -170,7 +169,7 @@ module Pkg::Paths
170
169
  # Given platform information, create symlink target (base_path) and link path in the
171
170
  # form of a 2-element array
172
171
  def artifacts_base_path_and_link_path(platform_tag, prefix = 'artifacts', is_nonfinal = false)
173
- platform_name, _ = Pkg::Platforms.parse_platform_tag(platform_tag)
172
+ platform_name, = Pkg::Platforms.parse_platform_tag(platform_tag)
174
173
  package_format = Pkg::Platforms.package_format_for_tag(platform_tag)
175
174
 
176
175
  path_data = {
@@ -189,7 +188,7 @@ module Pkg::Paths
189
188
  end
190
189
 
191
190
  def artifacts_path(platform_tag, path_prefix = 'artifacts', nonfinal = false)
192
- base_path, _ = artifacts_base_path_and_link_path(platform_tag, path_prefix, nonfinal)
191
+ base_path, = artifacts_base_path_and_link_path(platform_tag, path_prefix, nonfinal)
193
192
  platform, version, architecture = Pkg::Platforms.parse_platform_tag(platform_tag)
194
193
  package_format = Pkg::Platforms.package_format_for_tag(platform_tag)
195
194
 
@@ -306,24 +305,24 @@ module Pkg::Paths
306
305
  fail "Can't determine path for non-debian platform #{platform_tag}."
307
306
  end
308
307
 
309
- platform, version, _ = Pkg::Platforms.parse_platform_tag(platform_tag)
308
+ platform, version, = Pkg::Platforms.parse_platform_tag(platform_tag)
310
309
  code_name = Pkg::Platforms.codename_for_platform_version(platform, version)
311
310
  remote_repo_path = remote_repo_base(platform_tag, nonfinal: nonfinal)
312
311
 
313
312
  # In puppet7 and beyond, we moved the puppet major version to near the top to allow each
314
313
  # puppet major release to have its own apt repo, for example:
315
314
  # /opt/repository/apt/puppet7/pool/bionic/p/puppet-agent
316
- if %w(FUTURE-puppet7 FUTURE-puppet7-nightly).include? repo_name
315
+ if %w[FUTURE-puppet7 FUTURE-puppet7-nightly].include? repo_name
317
316
  return File.join(remote_repo_path, repo_name, 'pool', code_name, project[0], project)
318
317
  end
319
318
 
320
319
  # For repos prior to puppet7, the puppet version was part of the repository
321
320
  # For example: /opt/repository/apt/pool/bionic/puppet6/p/puppet-agent
322
- if %w(puppet7 puppet7-nightly
321
+ if %w[puppet7 puppet7-nightly
323
322
  puppet6 puppet6-nightly
324
323
  puppet5 puppet5-nightly
325
- puppet puppet-nightly
326
- puppet-tools).include? repo_name
324
+ puppet puppet-nightly
325
+ puppet-tools].include? repo_name
327
326
  return File.join(remote_repo_path, 'pool', code_name, repo_name, project[0], project)
328
327
  end
329
328
 
@@ -331,7 +330,7 @@ module Pkg::Paths
331
330
  end
332
331
 
333
332
  def release_package_link_path(platform_tag, nonfinal = false)
334
- platform, version, _ = Pkg::Platforms.parse_platform_tag(platform_tag)
333
+ platform, version, = Pkg::Platforms.parse_platform_tag(platform_tag)
335
334
  package_format = Pkg::Platforms.package_format_for_tag(platform_tag)
336
335
  case package_format
337
336
  when 'rpm'
@@ -372,5 +371,4 @@ module Pkg::Paths
372
371
  end
373
372
 
374
373
  private :package_arch
375
-
376
374
  end
@@ -4,7 +4,6 @@ require 'set'
4
4
  # explicitly supports
5
5
  module Pkg
6
6
  module Platforms
7
-
8
7
  module_function
9
8
 
10
9
  DEBIAN_SOURCE_FORMATS = ['debian.tar.gz', 'orig.tar.gz', 'dsc', 'changes']
@@ -113,6 +112,11 @@ module Pkg
113
112
  package_format: 'dmg',
114
113
  repo: false,
115
114
  },
115
+ '12' => {
116
+ architectures: ['x86_64', 'arm64'],
117
+ package_format: 'dmg',
118
+ repo: false,
119
+ },
116
120
  },
117
121
 
118
122
  'redhatfips' => {
@@ -223,7 +227,7 @@ module Pkg
223
227
  repo: false,
224
228
  }
225
229
  },
226
- }.freeze
230
+ }
227
231
 
228
232
  # @return [Array] An array of Strings, containing all of the supported
229
233
  # platforms as defined in PLATFORM_INFO
@@ -235,7 +239,7 @@ module Pkg
235
239
  # versions for the given platform
236
240
  def versions_for_platform(platform)
237
241
  PLATFORM_INFO[platform].keys
238
- rescue
242
+ rescue StandardError
239
243
  raise "No information found for '#{platform}'"
240
244
  end
241
245
 
@@ -277,7 +281,7 @@ module Pkg
277
281
  # AIX uses 'ppc' as its architecture in paths and file names
278
282
  architecture = 'ppc' if platform == 'aix'
279
283
  return [platform, version, architecture]
280
- rescue
284
+ rescue StandardError
281
285
  raise "Could not verify that '#{platform_tag}' is a valid tag"
282
286
  end
283
287
 
@@ -286,7 +290,7 @@ module Pkg
286
290
  # platform-version-arch
287
291
  # @return [Hash] The hash of data associated with the given platform version
288
292
  def platform_lookup(platform_tag)
289
- platform, version, _ = parse_platform_tag(platform_tag)
293
+ platform, version, = parse_platform_tag(platform_tag)
290
294
  PLATFORM_INFO[platform][version]
291
295
  end
292
296
 
@@ -396,7 +400,7 @@ module Pkg
396
400
  if include_source
397
401
  begin
398
402
  source_architecture = Array(get_attribute_for_platform_version(platform, version, :source_architecture))
399
- rescue
403
+ rescue StandardError # rubocop:disable Lint/SuppressedException
400
404
  end
401
405
  end
402
406
  return (platform_architectures + source_architecture).flatten
@@ -1,7 +1,5 @@
1
1
  module Pkg::Repo
2
-
3
2
  class << self
4
-
5
3
  ##
6
4
  ## Construct a local_target based upon the versioning style
7
5
  ##
@@ -38,7 +36,7 @@ module Pkg::Repo
38
36
  target_tarball = File.join('repos', "#{archive_name}.tar.gz")
39
37
  tar_command = %W[#{tar} --owner=0 --group=0 --create --gzip
40
38
  --file #{target_tarball} #{repo_location}].join(' ')
41
- stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
39
+ stdout, = Pkg::Util::Execution.capture3(tar_command)
42
40
  return stdout
43
41
  end
44
42
  end
@@ -69,7 +67,7 @@ module Pkg::Repo
69
67
  tar_command = %W[#{tar} --owner=0 --group=0 #{tar_action}
70
68
  --file #{all_repos_tarball_name} #{repo_tarball_path}].join(' ')
71
69
 
72
- stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
70
+ stdout, = Pkg::Util::Execution.capture3(tar_command)
73
71
  puts stdout
74
72
  end
75
73
  end
@@ -82,7 +80,7 @@ module Pkg::Repo
82
80
  gzip = Pkg::Util::Tool.check_tool('gzip')
83
81
 
84
82
  gzip_command = "#{gzip} --fast #{all_repos_tarball_name}"
85
- stdout, _, _ = Pkg::Util::Execution.capture3(gzip_command)
83
+ stdout, = Pkg::Util::Execution.capture3(gzip_command)
86
84
  puts stdout
87
85
  end
88
86
 
@@ -111,13 +109,13 @@ module Pkg::Repo
111
109
  cmd = "[ -d #{artifact_directory} ] || exit 1 ; "
112
110
  cmd << "pushd #{artifact_directory} > /dev/null && "
113
111
  cmd << "find . -name '*.#{pkg_ext}' -print0 | xargs --no-run-if-empty -0 -I {} dirname {} "
114
- stdout, _ = Pkg::Util::Net.remote_execute(
115
- Pkg::Config.distribution_server,
112
+ stdout, = Pkg::Util::Net.remote_execute(
113
+ Pkg::Config.distribution_server,
116
114
  cmd,
117
115
  { capture_output: true }
118
- )
116
+ )
119
117
  return stdout.split
120
- rescue => e
118
+ rescue StandardError => e
121
119
  fail "Error: Could not retrieve directories that contain #{pkg_ext} " \
122
120
  "packages in #{Pkg::Config.distribution_server}:#{artifact_directory}: #{e}"
123
121
  end
@@ -127,7 +125,7 @@ module Pkg::Repo
127
125
  cmd << "pushd #{artifact_parent_directory} > /dev/null && "
128
126
  cmd << 'rsync --archive --verbose --one-file-system --ignore-existing artifacts/ repos/ '
129
127
  Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, cmd)
130
- rescue => e
128
+ rescue StandardError => e
131
129
  fail "Error: Could not populate repos directory in " \
132
130
  "#{Pkg::Config.distribution_server}:#{artifact_parent_directory}: #{e}"
133
131
  end
@@ -138,7 +136,7 @@ module Pkg::Repo
138
136
 
139
137
  def update_repo(remote_host, command, options = {})
140
138
  fail_message = "Error: Missing required argument '%s', perhaps update build_defaults?"
141
- [:repo_name, :repo_path, :repo_host, :repo_url].each do |option|
139
+ %i[repo_name repo_path repo_host repo_url].each do |option|
142
140
  fail fail_message % option.to_s if argument_required?(option.to_s, command) && !options[option]
143
141
  end
144
142
 
@@ -152,7 +150,8 @@ module Pkg::Repo
152
150
  }
153
151
  Pkg::Util::Net.remote_execute(
154
152
  remote_host,
155
- Pkg::Util::Misc.search_and_replace(command, repo_configuration))
153
+ Pkg::Util::Misc.search_and_replace(command, repo_configuration)
154
+ )
156
155
  end
157
156
  end
158
157
  end
@@ -67,7 +67,7 @@ module Pkg::Retrieve
67
67
  warn "Could not find `wget` tool. Falling back to rsyncing from #{Pkg::Config.distribution_server}."
68
68
  begin
69
69
  Pkg::Util::Net.rsync_from("#{rsync_path}/", Pkg::Config.distribution_server, "#{local_target}/")
70
- rescue => e
70
+ rescue StandardError => e
71
71
  fail "Couldn't rsync packages from distribution server.\n#{e}"
72
72
  end
73
73
  end
@@ -75,7 +75,7 @@ module Pkg::Rpm::Repo
75
75
  path = Pathname.new(origin_path)
76
76
  dest_path = Pathname.new(destination_path)
77
77
 
78
- options = %w(
78
+ options = %w[
79
79
  rsync
80
80
  --recursive
81
81
  --links
@@ -91,7 +91,7 @@ module Pkg::Rpm::Repo
91
91
  --no-perms
92
92
  --no-owner
93
93
  --no-group
94
- )
94
+ ]
95
95
 
96
96
  options << '--dry-run' if dryrun
97
97
  options << path
@@ -99,7 +99,7 @@ module Pkg::Rpm::Repo
99
99
  if destination
100
100
  options << "#{destination}:#{dest_path.parent}"
101
101
  else
102
- options << "#{dest_path.parent}"
102
+ options << dest_path.parent.to_s
103
103
  end
104
104
 
105
105
  options.join("\s")
@@ -117,9 +117,9 @@ module Pkg::Rpm::Repo
117
117
  FileUtils.mkdir_p("pkg/#{target}")
118
118
  config_url = "#{base_url}/#{target}/rpm/"
119
119
  begin
120
- stdout, _, _ = Pkg::Util::Execution.capture3("#{wget} -r -np -nH --cut-dirs 3 -P pkg/#{target} --reject 'index*' #{config_url}")
120
+ stdout, = Pkg::Util::Execution.capture3("#{wget} -r -np -nH --cut-dirs 3 -P pkg/#{target} --reject 'index*' #{config_url}")
121
121
  stdout
122
- rescue => e
122
+ rescue StandardError => e
123
123
  fail "Couldn't retrieve rpm yum repo configs.\n#{e}"
124
124
  end
125
125
  end
@@ -149,7 +149,7 @@ module Pkg::Rpm::Repo
149
149
  # repodata folders in them, and second that those same directories also
150
150
  # contain rpms
151
151
  #
152
- stdout, _, _ = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 5 --no-parent #{repo_base} 2>&1")
152
+ stdout, = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 5 --no-parent #{repo_base} 2>&1")
153
153
  stdout = stdout.split.uniq.reject { |x| x =~ /\?|index/ }.select { |x| x =~ /http:.*repodata\/$/ }
154
154
 
155
155
  # RPMs will always exist at the same directory level as the repodata
@@ -157,7 +157,7 @@ module Pkg::Rpm::Repo
157
157
  #
158
158
  yum_repos = []
159
159
  stdout.map { |x| x.chomp('repodata/') }.each do |url|
160
- output, _, _ = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 1 --no-parent #{url} 2>&1")
160
+ output, = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 1 --no-parent #{url} 2>&1")
161
161
  unless output.split.uniq.reject { |x| x =~ /\?|index/ }.select { |x| x =~ /http:.*\.rpm$/ }.empty?
162
162
  yum_repos << url
163
163
  end
@@ -204,7 +204,7 @@ module Pkg::Rpm::Repo
204
204
  end
205
205
 
206
206
  def create_local_repos(directory = "repos")
207
- stdout, _, _ = Pkg::Util::Execution.capture3("bash -c '#{repo_creation_command(directory)}'")
207
+ stdout, = Pkg::Util::Execution.capture3("bash -c '#{repo_creation_command(directory)}'")
208
208
  stdout
209
209
  end
210
210
 
@@ -8,25 +8,25 @@ module Pkg::Sign::Dmg
8
8
  end
9
9
 
10
10
  host_string = "#{ENV['USER']}@#{Pkg::Config.osx_signing_server}"
11
- host_string = "#{Pkg::Config.osx_signing_server}" if Pkg::Config.osx_signing_server =~ /@/
11
+ host_string = Pkg::Config.osx_signing_server.to_s if Pkg::Config.osx_signing_server =~ /@/
12
12
 
13
13
  ssh_host_string = "#{use_identity} #{host_string}"
14
14
  rsync_host_string = "-e 'ssh #{use_identity}' #{host_string}"
15
- archs = Dir.glob("#{pkg_directory}/{apple,mac,osx}/**/{x86_64,arm64}").map { |el| el.split('/').last }
15
+ archs = Dir.glob("#{pkg_directory}/{apple,mac,osx}/**/{x86_64,arm64}").map { |el| el.split('/').last }
16
16
 
17
17
  if archs.empty?
18
- $stderr.puts "Error: no architectures found in #{pkg_directory}/{apple,mac,osx}"
18
+ warn "Error: no architectures found in #{pkg_directory}/{apple,mac,osx}"
19
19
  exit 1
20
20
  end
21
21
 
22
22
  archs.each do |arch|
23
23
  remote_working_directory = "/tmp/#{Pkg::Util.rand_string}/#{arch}"
24
24
  dmg_mount_point = File.join(remote_working_directory, "mount")
25
- signed_items_directory = File.join(remote_working_directory, "signed")
25
+ signed_items_directory = File.join(remote_working_directory, "signed")
26
26
 
27
27
  dmgs = Dir.glob("#{pkg_directory}/{apple,mac,osx}/**/#{arch}/*.dmg")
28
28
  if dmgs.empty?
29
- $stderr.puts "Error: no dmgs found in #{pkg_directory}/{apple,mac,osx} for #{arch} architecture."
29
+ warn "Error: no dmgs found in #{pkg_directory}/{apple,mac,osx} for #{arch} architecture."
30
30
  exit 1
31
31
  end
32
32
 
@@ -43,7 +43,7 @@ module Pkg::Sign::Dmg
43
43
  for pkg in #{dmg_mount_point}/*.pkg; do
44
44
  pkg_basename=$(basename $pkg) ;
45
45
  if /usr/sbin/pkgutil --check-signature $pkg ; then
46
- echo "Warning: $pkg is already signed, skipping" ;
46
+ echo Warning: $pkg is already signed skipping ;
47
47
  cp $pkg #{signed_items_directory}/$pkg_basename ;
48
48
  continue ;
49
49
  fi ;
@@ -70,7 +70,8 @@ module Pkg::Sign::Dmg
70
70
 
71
71
  dmgs.each do |dmg|
72
72
  Pkg::Util::Net.rsync_from(
73
- "#{remote_working_directory}/#{File.basename(dmg)}", rsync_host_string, File.dirname(dmg))
73
+ "#{remote_working_directory}/#{File.basename(dmg)}", rsync_host_string, File.dirname(dmg)
74
+ )
74
75
  end
75
76
 
76
77
  Pkg::Util::Net.remote_execute(ssh_host_string, "rm -rf '#{remote_working_directory}'")
@@ -1,57 +1,89 @@
1
1
  module Pkg::Sign::Ips
2
2
  module_function
3
3
 
4
- def sign(target_dir = 'pkg')
5
- use_identity = "-i #{Pkg::Config.ips_signing_ssh_key}" unless Pkg::Config.ips_signing_ssh_key.nil?
4
+ def sign(packages_root = 'pkg')
5
+ identity_spec = ''
6
+ unless Pkg::Config.ips_signing_ssh_key.nil?
7
+ identity_spec = "-i #{Pkg::Config.ips_signing_ssh_key}"
8
+ end
9
+
10
+ signing_server_spec = Pkg::Config.ips_signing_server
11
+ unless Pkg::Config.ips_signing_server.match(%r{.+@.+})
12
+ signing_server_spec = "#{ENV['USER']}@#{Pkg::Config.ips_signing_server}"
13
+ end
6
14
 
7
- ssh_host_string = "#{use_identity} #{ENV['USER']}@#{Pkg::Config.ips_signing_server}"
8
- rsync_host_string = "-e 'ssh #{use_identity}' #{ENV['USER']}@#{Pkg::Config.ips_signing_server}"
15
+ ssh_host_spec = "#{identity_spec} #{signing_server_spec}"
16
+ rsync_host_spec = "-e 'ssh #{identity_spec}' #{signing_server_spec}"
9
17
 
10
- p5ps = Dir.glob("#{target_dir}/solaris/11/**/*.p5p")
18
+ packages = Dir.glob("#{packages_root}/solaris/11/**/*.p5p")
11
19
 
12
- p5ps.each do |p5p|
20
+ packages.each do |package|
13
21
  work_dir = "/tmp/#{Pkg::Util.rand_string}"
14
22
  unsigned_dir = "#{work_dir}/unsigned"
15
23
  repo_dir = "#{work_dir}/repo"
16
24
  signed_dir = "#{work_dir}/pkgs"
25
+ package_name = File.basename(package)
17
26
 
18
- Pkg::Util::Net.remote_execute(ssh_host_string, "mkdir -p #{repo_dir} #{unsigned_dir} #{signed_dir}")
19
- Pkg::Util::Net.rsync_to(p5p, rsync_host_string, unsigned_dir)
27
+ Pkg::Util::Net.remote_execute(
28
+ ssh_host_spec,
29
+ "mkdir -p #{repo_dir} #{unsigned_dir} #{signed_dir}"
30
+ )
31
+ Pkg::Util::Net.rsync_to(package, rsync_host_spec, unsigned_dir)
20
32
 
21
33
  # Before we can get started with signing packages we need to create a repo
22
- Pkg::Util::Net.remote_execute(ssh_host_string, "sudo -E /usr/bin/pkgrepo create #{repo_dir}")
23
- Pkg::Util::Net.remote_execute(ssh_host_string, "sudo -E /usr/bin/pkgrepo set -s #{repo_dir} publisher/prefix=puppetlabs.com")
24
- # And import all the packages into the repo.
25
- Pkg::Util::Net.remote_execute(ssh_host_string, "sudo -E /usr/bin/pkgrecv -s #{unsigned_dir}/#{File.basename(p5p)} -d #{repo_dir} '*'")
26
- # We are going to hard code the values for signing cert locations for now.
27
- # This autmation will require an update to actually become reusable, but
28
- # for now these values will stay this way so solaris signing will stop
29
- # failing. Please update soon. 06/23/16
30
- #
31
- # - Sean P. McDonald
32
- #
34
+ Pkg::Util::Net.remote_execute(ssh_host_spec, "sudo -E /usr/bin/pkgrepo create #{repo_dir}")
35
+ Pkg::Util::Net.remote_execute(
36
+ ssh_host_spec,
37
+ "sudo -E /usr/bin/pkgrepo set -s #{repo_dir} publisher/prefix=puppetlabs.com"
38
+ )
39
+
40
+ # Import all the packages into the repo.
41
+ Pkg::Util::Net.remote_execute(
42
+ ssh_host_spec,
43
+ "sudo -E /usr/bin/pkgrecv -s #{unsigned_dir}/#{package_name} -d #{repo_dir} '*'"
44
+ )
45
+
33
46
  # We sign the entire repo
34
- sign_cmd = "sudo -E /usr/bin/pkgsign -c /root/signing/signing_cert_2020.pem \
35
- -i /root/signing/Thawte_SHA256_Code_Signing_CA.pem \
36
- -i /root/signing/Thawte_Primary_Root_CA.pem \
37
- -k /root/signing/signing_key_2020.pem \
47
+ # Paths to the .pem files should live elsewhere rather than hardcoded here.
48
+ sign_cmd = "sudo -E /usr/bin/pkgsign -c /root/signing/signing_cert_2022.pem \
49
+ -i /root/signing/DigiCert_Code_Signing_Certificate.pem \
50
+ -i /root/signing/DigiCert_Trusted_Root.pem \
51
+ -k /root/signing/signing_key_2022.pem \
38
52
  -s 'file://#{work_dir}/repo' '*'"
39
- puts "About to sign #{p5p} with #{sign_cmd} in #{work_dir}"
40
- Pkg::Util::Net.remote_execute(ssh_host_string, sign_cmd.squeeze(' '))
41
- # pkgrecv with -a will pull packages out of the repo, so we need to do that too to actually get the packages we signed
42
- Pkg::Util::Net.remote_execute(ssh_host_string, "sudo -E /usr/bin/pkgrecv -d #{signed_dir}/#{File.basename(p5p)} -a -s #{repo_dir} '*'")
53
+ puts "Signing #{package} with #{sign_cmd} in #{work_dir}"
54
+ Pkg::Util::Net.remote_execute(ssh_host_spec, sign_cmd.squeeze(' '))
55
+
56
+ # pkgrecv with -a will pull packages out of the repo, so we need
57
+ # to do that too to actually get the packages we signed
58
+ Pkg::Util::Net.remote_execute(
59
+ ssh_host_spec,
60
+ "sudo -E /usr/bin/pkgrecv -d #{signed_dir}/#{package_name} -a -s #{repo_dir} '*'"
61
+ )
43
62
  begin
44
63
  # lets make sure we actually signed something?
45
64
  # **NOTE** if we're repeatedly trying to sign the same version this
46
65
  # might explode because I don't know how to reset the IPS cache.
47
66
  # Everything is amazing.
48
- Pkg::Util::Net.remote_execute(ssh_host_string, "sudo -E /usr/bin/pkg contents -m -g #{signed_dir}/#{File.basename(p5p)} '*' | grep '^signature '")
67
+ Pkg::Util::Net.remote_execute(
68
+ ssh_host_spec,
69
+ "sudo -E /usr/bin/pkg contents -m -g #{signed_dir}/#{package_name} '*' " \
70
+ "| grep '^signature '"
71
+ )
49
72
  rescue RuntimeError
50
- raise "Looks like #{File.basename(p5p)} was not signed correctly, quitting!"
73
+ raise "Error: #{package_name} was not signed correctly."
51
74
  end
52
- # and pull the packages back.
53
- Pkg::Util::Net.rsync_from("#{signed_dir}/#{File.basename(p5p)}", rsync_host_string, File.dirname(p5p))
54
- Pkg::Util::Net.remote_execute(ssh_host_string, "if [ -e '#{work_dir}' ] ; then sudo rm -r '#{work_dir}' ; fi")
75
+
76
+ # Pull the packages back.
77
+ Pkg::Util::Net.rsync_from(
78
+ "#{signed_dir}/#{package_name}",
79
+ rsync_host_spec,
80
+ File.dirname(package)
81
+ )
82
+
83
+ Pkg::Util::Net.remote_execute(
84
+ ssh_host_spec,
85
+ "if [ -e '#{work_dir}' ] ; then sudo rm -r '#{work_dir}' ; fi"
86
+ )
55
87
  end
56
88
  end
57
89
  end