packaging 0.101.0 → 0.105.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,14 +1,20 @@
1
1
  # Module for shipping all packages to places
2
+
3
+ require 'English'
2
4
  require 'tmpdir'
5
+
3
6
  module Pkg::Util::Ship
4
7
  module_function
5
8
 
6
- def collect_packages(pkg_exts, excludes = []) # rubocop:disable Metrics/MethodLength
9
+ def collect_packages(pkg_exts, excludes = [])
7
10
  pkgs = pkg_exts.map { |ext| Dir.glob(ext) }.flatten
8
11
  return [] if pkgs.empty?
9
- excludes.each do |exclude|
10
- pkgs.delete_if { |p| p.match(exclude) }
11
- end if excludes
12
+
13
+ if excludes
14
+ excludes.each do |exclude|
15
+ pkgs.delete_if { |p| p.match(exclude) }
16
+ end
17
+ end
12
18
  if pkgs.empty?
13
19
  $stdout.puts "No packages with (#{pkg_exts.join(', ')}) extensions found staged in 'pkg'"
14
20
  $stdout.puts "Maybe your excludes argument (#{excludes}) is too restrictive?"
@@ -59,13 +65,13 @@ module Pkg::Util::Ship
59
65
  # false (most paths will be platform dependent), but set to true for gems
60
66
  # and tarballs since those just land directly under /opt/downloads/<project>
61
67
  #
62
- # rubocop:disable Metrics/MethodLength, Metrics/AbcSize
63
68
  def ship_pkgs(pkg_exts, staging_server, remote_path, opts = {})
64
69
  options = {
65
70
  excludes: [],
66
71
  chattr: true,
67
72
  platform_independent: false,
68
- nonfinal: false }.merge(opts)
73
+ nonfinal: false
74
+ }.merge(opts)
69
75
 
70
76
  # First find the packages to be shipped. We must find them before moving
71
77
  # to our temporary staging directory
@@ -73,35 +79,39 @@ module Pkg::Util::Ship
73
79
  return false if local_packages.empty?
74
80
 
75
81
  tmpdir = Dir.mktmpdir
76
- staged_pkgs = reorganize_packages(local_packages, tmpdir, options[:platform_independent], options[:nonfinal])
82
+ staged_pkgs = reorganize_packages(
83
+ local_packages, tmpdir, options[:platform_independent], options[:nonfinal]
84
+ )
77
85
 
78
86
  puts staged_pkgs.sort
79
87
  puts "Do you want to ship the above files to (#{staging_server})?"
80
- if Pkg::Util.ask_yes_or_no
81
- extra_flags = ['--ignore-existing', '--delay-updates']
82
- extra_flags << '--dry-run' if ENV['DRYRUN']
83
-
84
- staged_pkgs.each do |pkg|
85
- Pkg::Util::Execution.retry_on_fail(times: 3) do
86
- sub_string = 'pkg'
87
- remote_pkg = pkg.sub(sub_string, remote_path)
88
- remote_basepath = File.dirname(remote_pkg)
89
- Pkg::Util::Net.remote_execute(staging_server, "mkdir -p #{remote_basepath}")
90
- Pkg::Util::Net.rsync_to(
91
- File.join(tmpdir, pkg),
92
- staging_server,
93
- remote_basepath,
94
- extra_flags: extra_flags
95
- )
96
-
97
- Pkg::Util::Net.remote_set_ownership(staging_server, 'root', 'release', [remote_basepath, remote_pkg])
98
- Pkg::Util::Net.remote_set_permissions(staging_server, '775', [remote_basepath])
99
- Pkg::Util::Net.remote_set_permissions(staging_server, '0664', [remote_pkg])
100
- Pkg::Util::Net.remote_set_immutable(staging_server, [remote_pkg]) if options[:chattr]
101
- end
88
+ return false unless Pkg::Util.ask_yes_or_no
89
+
90
+ extra_flags = %w(--ignore-existing --delay-updates)
91
+ extra_flags << '--dry-run' if ENV['DRYRUN']
92
+
93
+ staged_pkgs.each do |pkg|
94
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
95
+ sub_string = 'pkg'
96
+ remote_pkg = pkg.sub(sub_string, remote_path)
97
+ remote_basepath = File.dirname(remote_pkg)
98
+ Pkg::Util::Net.remote_execute(staging_server, "mkdir -p #{remote_basepath}")
99
+ Pkg::Util::Net.rsync_to(
100
+ File.join(tmpdir, pkg),
101
+ staging_server,
102
+ remote_basepath,
103
+ extra_flags: extra_flags
104
+ )
105
+
106
+ Pkg::Util::Net.remote_set_ownership(
107
+ staging_server, 'root', 'release', [remote_basepath, remote_pkg]
108
+ )
109
+ Pkg::Util::Net.remote_set_permissions(staging_server, '775', [remote_basepath])
110
+ Pkg::Util::Net.remote_set_permissions(staging_server, '0664', [remote_pkg])
111
+ Pkg::Util::Net.remote_set_immutable(staging_server, [remote_pkg]) if options[:chattr]
102
112
  end
103
- return true
104
113
  end
114
+ return true
105
115
  end
106
116
 
107
117
  def ship_rpms(local_staging_directory, remote_path, opts = {})
@@ -123,6 +133,8 @@ module Pkg::Util::Ship
123
133
  ship_pkgs(things_to_ship, Pkg::Config.apt_signing_server, remote_path, opts)
124
134
  end
125
135
 
136
+
137
+
126
138
  def ship_svr4(local_staging_directory, remote_path, opts = {})
127
139
  ship_pkgs(["#{local_staging_directory}/**/*.pkg.gz"], Pkg::Config.svr4_host, remote_path, opts)
128
140
  end
@@ -132,33 +144,63 @@ module Pkg::Util::Ship
132
144
  end
133
145
 
134
146
  def ship_dmg(local_staging_directory, remote_path, opts = {})
135
- packages_have_shipped = ship_pkgs(["#{local_staging_directory}/**/*.dmg"],
136
- Pkg::Config.dmg_staging_server, remote_path, opts)
137
-
138
- if packages_have_shipped
139
- Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
140
- # Create the latest symlink for the current supported repo
141
- Pkg::Util::Net.remote_create_latest_symlink(
142
- Pkg::Config.project,
143
- Pkg::Paths.artifacts_path(platform_tag, remote_path, opts[:nonfinal]),
144
- 'dmg'
145
- )
146
- end
147
+ packages_have_shipped = ship_pkgs(
148
+ ["#{local_staging_directory}/**/*.dmg"],
149
+ Pkg::Config.dmg_staging_server, remote_path, opts
150
+ )
151
+
152
+ return unless packages_have_shipped
153
+
154
+ Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
155
+ # Create the latest symlink for the current supported repo
156
+ Pkg::Util::Net.remote_create_latest_symlink(
157
+ Pkg::Config.project,
158
+ Pkg::Paths.artifacts_path(platform_tag, remote_path, opts[:nonfinal]),
159
+ 'dmg'
160
+ )
147
161
  end
148
162
  end
149
163
 
150
164
  def ship_swix(local_staging_directory, remote_path, opts = {})
151
- ship_pkgs(["#{local_staging_directory}/**/*.swix"], Pkg::Config.swix_staging_server, remote_path, opts)
165
+ ship_pkgs(
166
+ ["#{local_staging_directory}/**/*.swix"],
167
+ Pkg::Config.swix_staging_server,
168
+ remote_path,
169
+ opts
170
+ )
152
171
  end
153
172
 
154
173
  def ship_msi(local_staging_directory, remote_path, opts = {})
155
- packages_have_shipped = ship_pkgs(["#{local_staging_directory}/**/*.msi"], Pkg::Config.msi_staging_server, remote_path, opts)
174
+ packages_have_shipped = ship_pkgs(
175
+ ["#{local_staging_directory}/**/*.msi"],
176
+ Pkg::Config.msi_staging_server,
177
+ remote_path,
178
+ opts
179
+ )
180
+ return unless packages_have_shipped
156
181
 
157
- if packages_have_shipped
158
- # Create the symlinks for the latest supported repo
159
- Pkg::Util::Net.remote_create_latest_symlink(Pkg::Config.project, Pkg::Paths.artifacts_path(Pkg::Platforms.generic_platform_tag('windows'), remote_path, opts[:nonfinal]), 'msi', arch: 'x64')
160
- Pkg::Util::Net.remote_create_latest_symlink(Pkg::Config.project, Pkg::Paths.artifacts_path(Pkg::Platforms.generic_platform_tag('windows'), remote_path, opts[:nonfinal]), 'msi', arch: 'x86')
161
- end
182
+ # Create the symlinks for the latest supported repo
183
+ Pkg::Util::Net.remote_create_latest_symlink(
184
+ Pkg::Config.project,
185
+ Pkg::Paths.artifacts_path(
186
+ Pkg::Platforms.generic_platform_tag('windows'),
187
+ remote_path,
188
+ opts[:nonfinal]
189
+ ),
190
+ 'msi',
191
+ arch: 'x64'
192
+ )
193
+
194
+ Pkg::Util::Net.remote_create_latest_symlink(
195
+ Pkg::Config.project,
196
+ Pkg::Paths.artifacts_path(
197
+ Pkg::Platforms.generic_platform_tag('windows'),
198
+ remote_path,
199
+ opts[:nonfinal]
200
+ ),
201
+ 'msi',
202
+ arch: 'x86'
203
+ )
162
204
  end
163
205
 
164
206
  def ship_gem(local_staging_directory, remote_path, opts = {})
@@ -166,44 +208,32 @@ module Pkg::Util::Ship
166
208
  end
167
209
 
168
210
  def ship_tar(local_staging_directory, remote_path, opts = {})
169
- ship_pkgs(["#{local_staging_directory}/*.tar.gz*"], Pkg::Config.tar_staging_server, remote_path, opts)
211
+ ship_pkgs(
212
+ ["#{local_staging_directory}/*.tar.gz*"],
213
+ Pkg::Config.tar_staging_server,
214
+ remote_path,
215
+ opts
216
+ )
170
217
  end
171
218
 
172
219
  def rolling_repo_link_command(platform_tag, repo_path, nonfinal = false)
173
- base_path, link_path = Pkg::Paths.artifacts_base_path_and_link_path(platform_tag, repo_path, nonfinal)
220
+ base_path, link_path = Pkg::Paths.artifacts_base_path_and_link_path(
221
+ platform_tag,
222
+ repo_path,
223
+ nonfinal
224
+ )
174
225
 
175
226
  if link_path.nil?
176
227
  puts "No link target set, not creating rolling repo link for #{base_path}"
177
228
  return nil
178
229
  end
179
-
180
- cmd = <<-CMD
181
- if [ ! -d #{base_path} ] ; then
182
- echo "Link target '#{base_path}' does not exist; skipping"
183
- exit 0
184
- fi
185
- # If it's a link but pointing to the wrong place, remove the link
186
- # This is likely to happen around the transition times, like puppet5 -> puppet6
187
- if [ -L #{link_path} ] && [ ! #{base_path} -ef #{link_path} ] ; then
188
- rm #{link_path}
189
- # This is the link you're looking for, nothing to see here
190
- elif [ -L #{link_path} ] ; then
191
- exit 0
192
- # Don't want to delete it if it isn't a link, that could be destructive
193
- # So, fail!
194
- elif [ -e #{link_path} ] ; then
195
- echo "#{link_path} exists but isn't a link, I don't know what to do with this" >&2
196
- exit 1
197
- fi
198
- ln -s #{base_path} #{link_path}
199
- CMD
200
230
  end
201
231
 
202
232
  def create_rolling_repo_link(platform_tag, staging_server, repo_path, nonfinal = false)
203
233
  command = rolling_repo_link_command(platform_tag, repo_path, nonfinal)
204
234
 
205
235
  Pkg::Util::Net.remote_execute(staging_server, command) unless command.nil?
206
- rescue => e
236
+ rescue StandardError => e
207
237
  fail "Failed to create rolling repo link for '#{platform_tag}'.\n#{e}\n#{e.backtrace}"
208
238
  end
209
239
 
@@ -214,10 +244,33 @@ module Pkg::Util::Ship
214
244
  swix_path = Pkg::Paths.remote_repo_base(nonfinal: nonfinal, package_format: 'swix')
215
245
  msi_path = Pkg::Paths.remote_repo_base(nonfinal: nonfinal, package_format: 'msi')
216
246
 
217
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('el'), Pkg::Config.yum_staging_server, yum_path, nonfinal)
218
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('osx'), Pkg::Config.dmg_staging_server, dmg_path, nonfinal)
219
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('eos'), Pkg::Config.swix_staging_server, swix_path, nonfinal)
220
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('windows'), Pkg::Config.msi_staging_server, msi_path, nonfinal)
247
+ create_rolling_repo_link(
248
+ Pkg::Platforms.generic_platform_tag('el'),
249
+ Pkg::Config.yum_staging_server,
250
+ yum_path,
251
+ nonfinal
252
+ )
253
+
254
+ create_rolling_repo_link(
255
+ Pkg::Platforms.generic_platform_tag('osx'),
256
+ Pkg::Config.dmg_staging_server,
257
+ dmg_path,
258
+ nonfinal
259
+ )
260
+
261
+ create_rolling_repo_link(
262
+ Pkg::Platforms.generic_platform_tag('eos'),
263
+ Pkg::Config.swix_staging_server,
264
+ swix_path,
265
+ nonfinal
266
+ )
267
+
268
+ create_rolling_repo_link(
269
+ Pkg::Platforms.generic_platform_tag('windows'),
270
+ Pkg::Config.msi_staging_server,
271
+ msi_path,
272
+ nonfinal
273
+ )
221
274
 
222
275
  # We need to iterate through all the supported platforms here because of
223
276
  # how deb repos are set up. Each codename will have its own link from the
@@ -231,7 +284,12 @@ module Pkg::Util::Ship
231
284
  apt_path = Pkg::Config.nonfinal_apt_repo_staging_path
232
285
  end
233
286
  Pkg::Platforms.codenames.each do |codename|
234
- create_rolling_repo_link(Pkg::Platforms.codename_to_tags(codename)[0], Pkg::Config.apt_signing_server, apt_path, nonfinal)
287
+ create_rolling_repo_link(
288
+ Pkg::Platforms.codename_to_tags(codename)[0],
289
+ Pkg::Config.apt_signing_server,
290
+ apt_path,
291
+ nonfinal
292
+ )
235
293
  end
236
294
  end
237
295
 
@@ -297,4 +355,151 @@ module Pkg::Util::Ship
297
355
  end
298
356
  Rake::Task[ship_task].invoke
299
357
  end
358
+
359
+ # Ship pkg directory contents to distribution server
360
+ def ship(target = 'artifacts', local_directory = 'pkg')
361
+ Pkg::Util::File.fetch
362
+
363
+ unless Pkg::Config.project
364
+ fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
365
+ end
366
+
367
+ project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
368
+ artifact_directory = "#{project_basedir}/#{target}"
369
+
370
+ # For EZBake builds, we also want to include the ezbake.manifest file to
371
+ # get a snapshot of this build and all dependencies. We eventually will
372
+ # create a yaml version of this file, but until that point we want to
373
+ # make the original ezbake.manifest available
374
+ #
375
+ ezbake_manifest = File.join('ext', 'ezbake.manifest')
376
+ if File.exist?(ezbake_manifest)
377
+ FileUtils.cp(ezbake_manifest, File.join(local_directory, "#{Pkg::Config.ref}.ezbake.manifest"))
378
+ end
379
+ ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
380
+ if File.exists?(ezbake_yaml)
381
+ FileUtils.cp(ezbake_yaml, File.join(local_directory, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
382
+ end
383
+
384
+ # Inside build_metadata*.json files there is additional metadata containing
385
+ # information such as git ref and dependencies that are needed at build
386
+ # time. If these files exist, copy them downstream.
387
+ # Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
388
+ build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
389
+ build_metadata_json_files.each do |source_file|
390
+ target_file = File.join(local_directory, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
391
+ FileUtils.cp(source_file, target_file)
392
+ end
393
+
394
+ # Sadly, the packaging repo cannot yet act on its own, without living
395
+ # inside of a packaging-repo compatible project. This means in order to
396
+ # use the packaging repo for shipping and signing (things that really
397
+ # don't require build automation, specifically) we still need the project
398
+ # clone itself.
399
+ Pkg::Util::Git.bundle('HEAD', 'signing_bundle', local_directory)
400
+
401
+ # While we're bundling things, let's also make a git bundle of the
402
+ # packaging repo that we're using when we invoke pl:jenkins:ship. We can
403
+ # have a reasonable level of confidence, later on, that the git bundle on
404
+ # the distribution server was, in fact, the git bundle used to create the
405
+ # associated packages. This is because this ship task is automatically
406
+ # called upon completion each cell of the pl:jenkins:uber_build, and we
407
+ # have --ignore-existing set below. As such, the only git bundle that
408
+ # should possibly be on the distribution is the one used to create the
409
+ # packages.
410
+ # We're bundling the packaging repo because it allows us to keep an
411
+ # archive of the packaging source that was used to create the packages,
412
+ # so that later on if we need to rebuild an older package to audit it or
413
+ # for some other reason we're assured that the new package isn't
414
+ # different by virtue of the packaging automation.
415
+ if defined?(PACKAGING_ROOT)
416
+ packaging_bundle = ''
417
+ Dir.chdir(PACKAGING_ROOT) do
418
+ packaging_bundle = Pkg::Util::Git.bundle('HEAD', 'packaging-bundle')
419
+ end
420
+ FileUtils.mv(packaging_bundle, local_directory)
421
+ end
422
+
423
+ # This is functionality to add the project-arch.msi links that have no
424
+ # version. The code itself looks for the link (if it's there already)
425
+ # and if the source package exists before linking. Searching for the
426
+ # packages has been restricted specifically to just the pkg/windows dir
427
+ # on purpose, as this is where we currently have all windows packages
428
+ # building to. Once we move the Metadata about the output location in
429
+ # to one source of truth we can refactor this to use that to search
430
+ # -Sean P. M. 08/12/16
431
+
432
+ {
433
+ 'windows' => ['x86', 'x64'],
434
+ 'windowsfips' => ['x64']
435
+ }.each_pair do |platform, archs|
436
+ packages = Dir["#{local_directory}/#{platform}/*"]
437
+
438
+ archs.each do |arch|
439
+ package_version = Pkg::Util::Git.describe.tr('-', '.')
440
+ package_filename = File.join(local_directory, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
441
+ link_filename = File.join(local_directory, platform, "#{Pkg::Config.project}-#{arch}.msi")
442
+
443
+ next unless !packages.include?(link_filename) && packages.include?(package_filename)
444
+ # Dear future code spelunkers:
445
+ # Using symlinks instead of hard links causes failures when we try
446
+ # to set these files to be immutable. Also be wary of whether the
447
+ # linking utility you're using expects the source path to be relative
448
+ # to the link target or pwd.
449
+ #
450
+ FileUtils.ln(package_filename, link_filename)
451
+ end
452
+ end
453
+
454
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
455
+ Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
456
+ Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_directory}")
457
+ Pkg::Util::Net.rsync_to("#{local_directory}/", Pkg::Config.distribution_server, "#{artifact_directory}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
458
+ end
459
+
460
+ # In order to get a snapshot of what this build looked like at the time
461
+ # of shipping, we also generate and ship the params file
462
+ #
463
+ Pkg::Config.config_to_yaml(local_directory)
464
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
465
+ Pkg::Util::Net.rsync_to("#{local_directory}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_directory}/", extra_flags: ["--exclude repo_configs"])
466
+ end
467
+
468
+ # If we just shipped a tagged version, we want to make it immutable
469
+ files = Dir.glob("#{local_directory}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
470
+ "#{artifact_directory}/#{file.sub(/^#{local_directory}\//, '')}"
471
+ end
472
+
473
+ Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
474
+ Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
475
+ Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
476
+ end
477
+
478
+ def ship_to_artifactory(local_directory = 'pkg')
479
+ Pkg::Util::File.fetch
480
+ unless Pkg::Config.project
481
+ fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
482
+ end
483
+ artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
484
+
485
+ artifacts = Dir.glob("#{local_directory}/**/*").reject { |e| File.directory? e }
486
+ artifacts.sort! do |a, b|
487
+ if File.extname(a) =~ /(md5|sha\d+)/ && File.extname(b) !~ /(md5|sha\d+)/
488
+ 1
489
+ elsif File.extname(b) =~ /(md5|sha\d+)/ && File.extname(a) !~ /(md5|sha\d+)/
490
+ -1
491
+ else
492
+ a <=> b
493
+ end
494
+ end
495
+ artifacts.each do |artifact|
496
+ if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
497
+ artifactory.deploy_package(artifact)
498
+ elsif artifactory.package_exists_on_artifactory?(artifact)
499
+ warn "Attempt to upload '#{artifact}' failed. Package already exists!"
500
+ else
501
+ artifactory.deploy_package(artifact)
502
+ end
503
+ end
504
+ end
300
505
  end
@@ -0,0 +1,47 @@
1
+ # Module for signing all packages to places
2
+
3
+
4
+ module Pkg::Util::Sign
5
+ class << self
6
+ # Sign all locally staged packages on signing server.
7
+ def sign_all(root_directory = nil)
8
+ Pkg::Util::File.fetch
9
+ root_directory ||= ENV['DEFAULT_DIRECTORY']
10
+ Dir["#{root_directory}/*"].empty? and fail "There were no files found in #{root_directory}. \
11
+ Maybe you wanted to build/retrieve something first?"
12
+
13
+ # Because rpms and debs are laid out differently in PE under pkg/ they
14
+ # have a different sign task to address this. Rather than create a whole
15
+ # extra :jenkins task for signing PE, we determine which sign task to use
16
+ # based on if we're building PE.
17
+ # We also listen in on the environment variable SIGNING_BUNDLE. This is
18
+ # _NOT_ intended for public use, but rather with the internal promotion
19
+ # workflow for Puppet Enterprise. SIGNING_BUNDLE is the path to a tarball
20
+ # containing a git bundle to be used as the environment for the packaging
21
+ # repo in a signing operation.
22
+ signing_bundle = ENV['SIGNING_BUNDLE']
23
+ sign_tasks = ["pl:sign_rpms"]
24
+ sign_tasks << "pl:sign_deb_changes" unless Dir["#{root_directory}/**/*.changes"].empty?
25
+ sign_tasks << "pl:sign_tar" if Pkg::Config.build_tar
26
+ sign_tasks << "pl:sign_gem" if Pkg::Config.build_gem
27
+ sign_tasks << "pl:sign_osx" if Pkg::Config.build_dmg || Pkg::Config.vanagon_project
28
+ sign_tasks << "pl:sign_swix" if Pkg::Config.vanagon_project
29
+ sign_tasks << "pl:sign_svr4" if Pkg::Config.vanagon_project
30
+ sign_tasks << "pl:sign_ips" if Pkg::Config.vanagon_project
31
+ sign_tasks << "pl:sign_msi" if Pkg::Config.build_msi || Pkg::Config.vanagon_project
32
+ remote_repo = Pkg::Util::Net.remote_unpack_git_bundle(Pkg::Config.signing_server, 'HEAD', nil, signing_bundle)
33
+ build_params = Pkg::Util::Net.remote_buildparams(Pkg::Config.signing_server, Pkg::Config)
34
+ Pkg::Util::Net.rsync_to(root_directory, Pkg::Config.signing_server, remote_repo)
35
+ rake_command = <<-DOC
36
+ cd #{remote_repo} ;
37
+ #{Pkg::Util::Net.remote_bundle_install_command}
38
+ bundle exec rake #{sign_tasks.map { |task| task + "[#{root_directory}]" }.join(" ")} PARAMS_FILE=#{build_params}
39
+ DOC
40
+ Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, rake_command)
41
+ Pkg::Util::Net.rsync_from("#{remote_repo}/#{root_directory}/", Pkg::Config.signing_server, "#{root_directory}/")
42
+ Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, "rm -rf #{remote_repo}")
43
+ Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, "rm #{build_params}")
44
+ puts "Signed packages staged in #{root_directory}/ directory"
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,38 @@
1
+ # Utility methods for handling windows
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::Windows
6
+ class << self
7
+ def add_msi_links(local_source_directory)
8
+ {
9
+ 'windows' => ['x86', 'x64'],
10
+ 'windowsfips' => ['x64']
11
+ }.each_pair do |platform, archs|
12
+ packages = Dir["#{local_source_directory}/#{platform}/*"]
13
+
14
+ archs.each do |arch|
15
+ package_version = Pkg::Util::Git.describe.tr('-', '.')
16
+ package_filename = File.join(
17
+ local_source_directory, platform,
18
+ "#{Pkg::Config.project}-#{package_version}-#{arch}.msi"
19
+ )
20
+ link_filename = File.join(
21
+ local_source_directory,
22
+ platform,
23
+ "#{Pkg::Config.project}-#{arch}.msi"
24
+ )
25
+
26
+ next unless !packages.include?(link_filename) && packages.include?(package_filename)
27
+
28
+ # Dear future code spelunkers:
29
+ # Using symlinks instead of hard links causes failures when we try
30
+ # to set these files to be immutable. Also be wary of whether the
31
+ # linking utility you're using expects the source path to be relative
32
+ # to the link target or pwd.
33
+ FileUtils.ln(package_filename, link_filename)
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -4,8 +4,12 @@ module Pkg::Util
4
4
  require 'benchmark'
5
5
  require 'base64'
6
6
  require 'io/console'
7
+ require 'packaging/util/apt_staging_server'
8
+ require 'packaging/util/build_metadata'
7
9
  require 'packaging/util/date'
10
+ require 'packaging/util/distribution_server'
8
11
  require 'packaging/util/execution'
12
+ require 'packaging/util/ezbake'
9
13
  require 'packaging/util/file'
10
14
  require 'packaging/util/git'
11
15
  require 'packaging/util/gpg'
@@ -19,7 +23,10 @@ module Pkg::Util
19
23
  require 'packaging/util/tool'
20
24
  require 'packaging/util/rake_utils'
21
25
  require 'packaging/util/version'
26
+ require 'packaging/util/windows'
22
27
  require 'packaging/util/git_tags'
28
+ require 'packaging/util/sign'
29
+ require 'packaging/util/repo'
23
30
 
24
31
  def self.boolean_value(var)
25
32
  return true if var == true || ( var.is_a?(String) && ( var.downcase == 'true' || var.downcase =~ /^y$|^yes$/))
@@ -223,7 +223,7 @@ describe "Pkg::Config" do
223
223
  "./artifacts/aix/7.1/PC1/ppc/puppet-agent-5.3.2-1.aix7.1.ppc.rpm"
224
224
 
225
225
  fedora_artifacts = \
226
- "./artifacts/fedora/31/PC1/x86_64/puppet-agent-5.3.2-1.fc31.x86_64.rpm"
226
+ "./artifacts/fedora/32/PC1/x86_64/puppet-agent-5.3.2-1.fc32.x86_64.rpm"
227
227
 
228
228
  windows_artifacts = \
229
229
  "./artifacts/windows/puppet-agent-x64.msi\n" \
@@ -280,8 +280,8 @@ describe "Pkg::Config" do
280
280
  it "should not use 'f' in fedora platform tags" do
281
281
  allow(Pkg::Util::Net).to receive(:remote_execute).and_return(fedora_artifacts, nil)
282
282
  data = Pkg::Config.platform_data
283
- expect(data).to include('fedora-31-x86_64')
284
- expect(data).not_to include('fedora-f31-x86_64')
283
+ expect(data).to include('fedora-32-x86_64')
284
+ expect(data).not_to include('fedora-f32-x86_64')
285
285
  end
286
286
 
287
287
  it "should collect packages whose extname differ from package_format" do
@@ -6,7 +6,7 @@ describe "Pkg::Deb::Repo" do
6
6
  let(:project) { "deb_repos" }
7
7
  let(:ref) { "1234abcd" }
8
8
  let(:base_url) { "http://#{builds_server}/#{project}/#{ref}" }
9
- let(:cows) { ["xenial", "jessie", "trusty", "stretch", ""] }
9
+ let(:cows) { ["xenial", "trusty", "stretch", ""] }
10
10
  let(:wget_results) { cows.map {|cow| "#{base_url}/repos/apt/#{cow}" }.join("\n") }
11
11
  let(:wget_garbage) { "\n and an index\nhttp://somethingelse.com/robots" }
12
12
  let(:repo_configs) { cows.reject {|cow| cow.empty?}.map {|dist| "pkg/repo_configs/deb/pl-#{project}-#{ref}-#{dist}.list" } }
@@ -5,12 +5,11 @@ describe 'Pkg::Paths' do
5
5
  arch_transformations = {
6
6
  ['pkg/el-8-x86_64/puppet-agent-6.9.0-1.el8.x86_64.rpm', 'el', '8'] => 'x86_64',
7
7
  ['pkg/el/8/puppet6/aarch64/puppet-agent-6.5.0.3094.g16b6fa6f-1.el8.aarch64.rpm', 'el', '8'] => 'aarch64',
8
- ['artifacts/fedora/32/puppet6/x86_64/puppet-agent-6.9.0-1.fc30.x86_64.rpm', 'fedora', '32'] => 'x86_64',
8
+ ['artifacts/fedora/32/puppet6/x86_64/puppet-agent-6.9.0-1.fc32.x86_64.rpm', 'fedora', '32'] => 'x86_64',
9
9
  ['pkg/ubuntu-16.04-amd64/puppet-agent_4.99.0-1xenial_amd64.deb', 'ubuntu', '16.04'] => 'amd64',
10
10
  ['artifacts/deb/focal/puppet6/puppet-agent_6.5.0.3094.g16b6fa6f-1focal_arm64.deb', 'ubuntu', '20.04'] => 'aarch64',
11
11
 
12
12
  ['artifacts/ubuntu-16.04-i386/puppetserver_5.0.1-0.1SNAPSHOT.2017.07.27T2346puppetlabs1.debian.tar.gz', 'ubuntu', '16.04'] => 'source',
13
- ['artifacts/deb/jessie/PC1/puppetserver_5.0.1.master.orig.tar.gz', 'debian', '8'] => 'source',
14
13
  ['artifacts/el/6/PC1/SRPMS/puppetserver-5.0.1.master-0.1SNAPSHOT.2017.08.18T0951.el6.src.rpm', 'el', '6'] => 'SRPMS'
15
14
  }
16
15
  arch_transformations.each do |path_array, arch|
@@ -273,7 +272,7 @@ describe 'Pkg::Paths' do
273
272
  .to eq(fake_apt_repo_path)
274
273
  end
275
274
  it 'returns nonfinal_yum_repo_path for nonfinal rpms' do
276
- expect(Pkg::Paths.remote_repo_base('fedora-31-x86_64', nonfinal: true))
275
+ expect(Pkg::Paths.remote_repo_base('fedora-34-x86_64', nonfinal: true))
277
276
  .to eq(fake_yum_nightly_repo_path)
278
277
  end
279
278
  it 'returns nonfinal_apt_repo_path for nonfinal debs' do
@@ -26,7 +26,7 @@ describe 'Pkg::Platforms' do
26
26
 
27
27
  describe '#versions_for_platform' do
28
28
  it 'should return all supported versions for a given platform' do
29
- expect(Pkg::Platforms.versions_for_platform('el')).to match_array(['5', '6', '7', '8'])
29
+ expect(Pkg::Platforms.versions_for_platform('el')).to match_array(['6', '7', '8', '9'])
30
30
  end
31
31
 
32
32
  it 'should raise an error if given a nonexistent platform' do
@@ -36,7 +36,7 @@ describe 'Pkg::Platforms' do
36
36
 
37
37
  describe '#codenames' do
38
38
  it 'should return all codenames for a given platform' do
39
- codenames = ['focal', 'bionic', 'bullseye', 'buster', 'cosmic', 'jessie', 'stretch', 'trusty', 'xenial']
39
+ codenames = ['focal', 'bionic', 'bullseye', 'buster', 'stretch', 'trusty', 'xenial']
40
40
  expect(Pkg::Platforms.codenames).to match_array(codenames)
41
41
  end
42
42
  end
@@ -101,12 +101,12 @@ describe 'Pkg::Platforms' do
101
101
  it 'should return a hash of platform info' do
102
102
  expect(Pkg::Platforms.platform_lookup(platform)).to be_instance_of(Hash)
103
103
  end
104
-
104
+
105
105
  it 'should include at least arch and package format keys' do
106
106
  expect(Pkg::Platforms.platform_lookup(platform).keys).to include(:architectures)
107
107
  expect(Pkg::Platforms.platform_lookup(platform).keys).to include(:package_format)
108
108
  end
109
- end
109
+ end
110
110
  end
111
111
 
112
112
  describe '#get_attribute' do
@@ -166,7 +166,7 @@ describe 'Pkg::Platforms' do
166
166
 
167
167
  describe '#generic_platform_tag' do
168
168
  it 'fails for unsupported platforms' do
169
- expect { Pkg::Platforms.generic_platform_tag('butts') }.to raise_error
169
+ expect { Pkg::Platforms.generic_platform_tag('noplatform') }.to raise_error
170
170
  end
171
171
 
172
172
  it 'returns a supported platform tag containing the supplied platform' do