packaging 0.104.0 → 0.106.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -4
- data/lib/packaging/archive.rb +2 -2
- data/lib/packaging/artifactory/extensions.rb +1 -0
- data/lib/packaging/artifactory.rb +27 -23
- data/lib/packaging/config/params.rb +191 -193
- data/lib/packaging/config/validations.rb +0 -2
- data/lib/packaging/config.rb +8 -8
- data/lib/packaging/deb/repo.rb +11 -14
- data/lib/packaging/gem.rb +2 -2
- data/lib/packaging/metrics.rb +7 -7
- data/lib/packaging/nuget.rb +0 -1
- data/lib/packaging/paths.rb +11 -13
- data/lib/packaging/platforms.rb +14 -52
- data/lib/packaging/repo.rb +11 -12
- data/lib/packaging/retrieve.rb +1 -1
- data/lib/packaging/rpm/repo.rb +8 -8
- data/lib/packaging/sign/dmg.rb +8 -7
- data/lib/packaging/sign/ips.rb +64 -32
- data/lib/packaging/sign/msi.rb +48 -48
- data/lib/packaging/sign/rpm.rb +1 -1
- data/lib/packaging/sign.rb +0 -1
- data/lib/packaging/tar.rb +2 -4
- data/lib/packaging/util/date.rb +0 -1
- data/lib/packaging/util/distribution_server.rb +2 -2
- data/lib/packaging/util/execution.rb +2 -4
- data/lib/packaging/util/file.rb +81 -3
- data/lib/packaging/util/git.rb +1 -3
- data/lib/packaging/util/git_tags.rb +3 -3
- data/lib/packaging/util/gpg.rb +3 -4
- data/lib/packaging/util/jenkins.rb +0 -3
- data/lib/packaging/util/misc.rb +1 -1
- data/lib/packaging/util/net.rb +25 -23
- data/lib/packaging/util/repo.rb +17 -0
- data/lib/packaging/util/serialization.rb +1 -2
- data/lib/packaging/util/ship.rb +150 -3
- data/lib/packaging/util/sign.rb +47 -0
- data/lib/packaging/util/tool.rb +1 -4
- data/lib/packaging/util/version.rb +1 -5
- data/lib/packaging/util.rb +3 -1
- data/lib/packaging.rb +1 -2
- data/spec/lib/packaging/config_spec.rb +3 -3
- data/spec/lib/packaging/deb/repo_spec.rb +1 -1
- data/spec/lib/packaging/paths_spec.rb +2 -3
- data/spec/lib/packaging/platforms_spec.rb +6 -6
- data/spec/lib/packaging/sign_spec.rb +1 -13
- data/spec/lib/packaging/util/git_spec.rb +2 -2
- data/spec/lib/packaging/util/git_tag_spec.rb +5 -5
- data/spec/lib/packaging/util/ship_spec.rb +0 -2
- data/tasks/30_metrics.rake +2 -2
- data/tasks/apple.rake +8 -14
- data/tasks/archive.rake +1 -2
- data/tasks/deb.rake +7 -8
- data/tasks/deb_repos.rake +1 -0
- data/tasks/doc.rake +5 -3
- data/tasks/education.rake +2 -4
- data/tasks/fetch.rake +1 -0
- data/tasks/gem.rake +20 -12
- data/tasks/jenkins.rake +27 -15
- data/tasks/jenkins_dynamic.rake +10 -10
- data/tasks/load_extras.rake +1 -0
- data/tasks/mock.rake +8 -9
- data/tasks/nightly_repos.rake +14 -14
- data/tasks/pe_ship.rake +10 -17
- data/tasks/retrieve.rake +2 -2
- data/tasks/rpm.rake +1 -1
- data/tasks/rpm_repos.rake +1 -0
- data/tasks/ship.rake +24 -12
- data/tasks/sign.rake +6 -6
- data/tasks/tar.rake +2 -3
- data/tasks/update.rake +2 -2
- data/tasks/vendor_gems.rake +5 -7
- data/tasks/version.rake +2 -2
- metadata +42 -40
data/lib/packaging/util/net.rb
CHANGED
@@ -1,15 +1,13 @@
|
|
1
1
|
# Utility methods for handling network calls and interactions
|
2
2
|
|
3
3
|
module Pkg::Util::Net
|
4
|
-
|
5
4
|
class << self
|
6
|
-
|
7
5
|
# This simple method does an HTTP get of a URI and writes it to a file
|
8
6
|
# in a slightly more platform agnostic way than curl/wget
|
9
7
|
def fetch_uri(uri, target)
|
10
8
|
require 'open-uri'
|
11
9
|
if Pkg::Util::File.file_writable?(File.dirname(target))
|
12
|
-
File.open(target, 'w') { |f| f.puts(open(uri).read) }
|
10
|
+
File.open(target, 'w') { |f| f.puts(URI.open(uri).read) }
|
13
11
|
end
|
14
12
|
end
|
15
13
|
|
@@ -37,7 +35,7 @@ module Pkg::Util::Net
|
|
37
35
|
Array(hosts).flatten.each do |host|
|
38
36
|
begin
|
39
37
|
remote_execute(host, 'exit', { extra_options: '-oBatchMode=yes' })
|
40
|
-
rescue
|
38
|
+
rescue StandardError
|
41
39
|
errs << host
|
42
40
|
end
|
43
41
|
end
|
@@ -56,7 +54,7 @@ module Pkg::Util::Net
|
|
56
54
|
begin
|
57
55
|
remote_execute(host, "gpg --list-secret-keys #{gpg} > /dev/null 2&>1",
|
58
56
|
{ extra_options: '-oBatchMode=yes' })
|
59
|
-
rescue
|
57
|
+
rescue StandardError
|
60
58
|
errs << host
|
61
59
|
end
|
62
60
|
end
|
@@ -112,13 +110,14 @@ module Pkg::Util::Net
|
|
112
110
|
###
|
113
111
|
### Deprecated method implemented as a shim to the new `remote_execute` method
|
114
112
|
###
|
115
|
-
def remote_ssh_cmd(target, command, capture_output = false, extra_options = '', fail_fast = true, trace = false)
|
113
|
+
def remote_ssh_cmd(target, command, capture_output = false, extra_options = '', fail_fast = true, trace = false) # rubocop:disable Metrics/ParameterLists
|
116
114
|
puts "Warn: \"remote_ssh_cmd\" call in packaging is deprecated. Use \"remote_execute\" instead."
|
117
115
|
remote_execute(target, command, {
|
118
116
|
capture_output: capture_output,
|
119
117
|
extra_options: extra_options,
|
120
118
|
fail_fast: fail_fast,
|
121
|
-
trace: trace
|
119
|
+
trace: trace
|
120
|
+
})
|
122
121
|
end
|
123
122
|
|
124
123
|
# Construct a valid rsync command
|
@@ -149,7 +148,8 @@ module Pkg::Util::Net
|
|
149
148
|
target_path: nil,
|
150
149
|
target_host: nil,
|
151
150
|
extra_flags: nil,
|
152
|
-
dryrun: false
|
151
|
+
dryrun: false
|
152
|
+
}.merge(opts)
|
153
153
|
origin = Pathname.new(origin_path)
|
154
154
|
target = options[:target_path] || origin.parent
|
155
155
|
|
@@ -187,9 +187,10 @@ module Pkg::Util::Net
|
|
187
187
|
target_path: nil,
|
188
188
|
target_host: nil,
|
189
189
|
extra_flags: nil,
|
190
|
-
dryrun: ENV['DRYRUN']
|
190
|
+
dryrun: ENV['DRYRUN']
|
191
|
+
}.merge(opts.delete_if { |_, value| value.nil? })
|
191
192
|
|
192
|
-
stdout,
|
193
|
+
stdout, = Pkg::Util::Execution.capture3(rsync_cmd(source, options), true)
|
193
194
|
stdout
|
194
195
|
end
|
195
196
|
|
@@ -223,7 +224,7 @@ module Pkg::Util::Net
|
|
223
224
|
s3cmd = Pkg::Util::Tool.check_tool('s3cmd')
|
224
225
|
|
225
226
|
if Pkg::Util::File.file_exists?(File.join(ENV['HOME'], '.s3cfg'))
|
226
|
-
stdout,
|
227
|
+
stdout, = Pkg::Util::Execution.capture3("#{s3cmd} sync #{flags.join(' ')} '#{source}' s3://#{target_bucket}/#{target_directory}/")
|
227
228
|
stdout
|
228
229
|
else
|
229
230
|
fail "#{File.join(ENV['HOME'], '.s3cfg')} does not exist. It is required to ship files using s3cmd."
|
@@ -279,7 +280,7 @@ module Pkg::Util::Net
|
|
279
280
|
'--write-out "%{http_code}"',
|
280
281
|
'--output /dev/null'
|
281
282
|
]
|
282
|
-
stdout,
|
283
|
+
stdout, = Pkg::Util::Net.curl_form_data(uri, data)
|
283
284
|
stdout
|
284
285
|
end
|
285
286
|
|
@@ -292,18 +293,18 @@ module Pkg::Util::Net
|
|
292
293
|
end
|
293
294
|
|
294
295
|
def remote_set_ownership(host, owner, group, files)
|
295
|
-
remote_cmd = "for file in #{files.join(
|
296
|
+
remote_cmd = "for file in #{files.join(' ')}; do if [[ -d $file ]] || ! `lsattr $file | grep -q '\\-i\\-'`; then sudo chown #{owner}:#{group} $file; else echo \"$file is immutable\"; fi; done"
|
296
297
|
Pkg::Util::Net.remote_execute(host, remote_cmd)
|
297
298
|
end
|
298
299
|
|
299
300
|
def remote_set_permissions(host, permissions, files)
|
300
|
-
remote_cmd = "for file in #{files.join(
|
301
|
+
remote_cmd = "for file in #{files.join(' ')}; do if [[ -d $file ]] || ! `lsattr $file | grep -q '\\-i\\-'`; then sudo chmod #{permissions} $file; else echo \"$file is immutable\"; fi; done"
|
301
302
|
Pkg::Util::Net.remote_execute(host, remote_cmd)
|
302
303
|
end
|
303
304
|
|
304
305
|
# Remotely set the immutable bit on a list of files
|
305
306
|
def remote_set_immutable(host, files)
|
306
|
-
Pkg::Util::Net.remote_execute(host, "sudo chattr +i #{files.join(
|
307
|
+
Pkg::Util::Net.remote_execute(host, "sudo chattr +i #{files.join(' ')}")
|
307
308
|
end
|
308
309
|
|
309
310
|
# Create a symlink indicating the latest version of a package
|
@@ -350,8 +351,9 @@ module Pkg::Util::Net
|
|
350
351
|
CMD
|
351
352
|
|
352
353
|
_, err = Pkg::Util::Net.remote_execute(
|
353
|
-
|
354
|
-
|
354
|
+
Pkg::Config.staging_server, cmd, { capture_output: true }
|
355
|
+
)
|
356
|
+
warn err
|
355
357
|
end
|
356
358
|
|
357
359
|
def escape_html(uri)
|
@@ -383,18 +385,18 @@ module Pkg::Util::Net
|
|
383
385
|
Pkg::Util::Net.rsync_to(tarball, host, '/tmp')
|
384
386
|
appendix = Pkg::Util.rand_string
|
385
387
|
git_bundle_directory = File.join('/tmp', "#{Pkg::Config.project}-#{appendix}")
|
386
|
-
command =
|
387
|
-
#{tar} -zxvf /tmp/#{tarball_name}.tar.gz -C /tmp/ ;
|
388
|
-
git clone --recursive /tmp/#{tarball_name} #{git_bundle_directory} ;
|
389
|
-
DOC
|
388
|
+
command = <<~DOC
|
389
|
+
#{tar} -zxvf /tmp/#{tarball_name}.tar.gz -C /tmp/ ;
|
390
|
+
git clone --recursive /tmp/#{tarball_name} #{git_bundle_directory} ;
|
391
|
+
DOC
|
390
392
|
Pkg::Util::Net.remote_execute(host, command)
|
391
393
|
return git_bundle_directory
|
392
394
|
end
|
393
395
|
|
394
396
|
def remote_bundle_install_command
|
395
|
-
export_packaging_location = ''
|
396
397
|
export_packaging_location = "export PACKAGING_LOCATION='#{ENV['PACKAGING_LOCATION']}';" if ENV['PACKAGING_LOCATION'] && !ENV['PACKAGING_LOCATION'].empty?
|
397
|
-
|
398
|
+
export_vanagon_location = "export VANAGON_LOCATION='#{ENV['VANAGON_LOCATION']}';" if ENV['VANAGON_LOCATION'] && !ENV['VANAGON_LOCATION'].empty?
|
399
|
+
"source /usr/local/rvm/scripts/rvm; rvm use ruby-2.5.1; #{export_packaging_location} #{export_vanagon_location} bundle install --path .bundle/gems ;"
|
398
400
|
end
|
399
401
|
|
400
402
|
# Given a BuildInstance object and a host, send its params to the host. Return
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# Module for signing all packages to places
|
2
|
+
|
3
|
+
module Pkg::Util::Repo
|
4
|
+
class << self
|
5
|
+
# Create yum repositories of built RPM packages for this SHA on the distribution server
|
6
|
+
def rpm_repos
|
7
|
+
Pkg::Util::File.fetch
|
8
|
+
Pkg::Rpm::Repo.create_remote_repos
|
9
|
+
end
|
10
|
+
|
11
|
+
# Create apt repositories of build DEB packages for this SHA on the distributions server
|
12
|
+
def deb_repos
|
13
|
+
Pkg::Util::File.fetch
|
14
|
+
Pkg::Deb::Repo.create_repos
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -2,14 +2,13 @@
|
|
2
2
|
|
3
3
|
module Pkg::Util::Serialization
|
4
4
|
class << self
|
5
|
-
|
6
5
|
# Given the path to a yaml file, load the yaml file into an object and return the object.
|
7
6
|
def load_yaml(file)
|
8
7
|
require 'yaml'
|
9
8
|
file = File.expand_path(file)
|
10
9
|
begin
|
11
10
|
input_data = YAML.load_file(file) || {}
|
12
|
-
rescue => e
|
11
|
+
rescue StandardError => e
|
13
12
|
fail "There was an error loading data from #{file}.\n#{e}"
|
14
13
|
end
|
15
14
|
input_data
|
data/lib/packaging/util/ship.rb
CHANGED
@@ -87,7 +87,7 @@ module Pkg::Util::Ship
|
|
87
87
|
puts "Do you want to ship the above files to (#{staging_server})?"
|
88
88
|
return false unless Pkg::Util.ask_yes_or_no
|
89
89
|
|
90
|
-
extra_flags = %w
|
90
|
+
extra_flags = %w[--ignore-existing --delay-updates]
|
91
91
|
extra_flags << '--dry-run' if ENV['DRYRUN']
|
92
92
|
|
93
93
|
staged_pkgs.each do |pkg|
|
@@ -330,7 +330,7 @@ module Pkg::Util::Ship
|
|
330
330
|
def test_ship(vm, ship_task)
|
331
331
|
command = 'getent group release || groupadd release'
|
332
332
|
Pkg::Util::Net.remote_execute(vm, command)
|
333
|
-
hosts_to_override = %w
|
333
|
+
hosts_to_override = %w[
|
334
334
|
APT_HOST
|
335
335
|
DMG_HOST
|
336
336
|
GEM_HOST
|
@@ -349,10 +349,157 @@ module Pkg::Util::Ship
|
|
349
349
|
TAR_STAGING_SERVER
|
350
350
|
YUM_STAGING_SERVER
|
351
351
|
STAGING_SERVER
|
352
|
-
|
352
|
+
]
|
353
353
|
hosts_to_override.each do |host|
|
354
354
|
ENV[host] = vm
|
355
355
|
end
|
356
356
|
Rake::Task[ship_task].invoke
|
357
357
|
end
|
358
|
+
|
359
|
+
# Ship pkg directory contents to distribution server
|
360
|
+
def ship(target = 'artifacts', local_directory = 'pkg')
|
361
|
+
Pkg::Util::File.fetch
|
362
|
+
|
363
|
+
unless Pkg::Config.project
|
364
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
365
|
+
end
|
366
|
+
|
367
|
+
project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
|
368
|
+
artifact_directory = "#{project_basedir}/#{target}"
|
369
|
+
|
370
|
+
# For EZBake builds, we also want to include the ezbake.manifest file to
|
371
|
+
# get a snapshot of this build and all dependencies. We eventually will
|
372
|
+
# create a yaml version of this file, but until that point we want to
|
373
|
+
# make the original ezbake.manifest available
|
374
|
+
#
|
375
|
+
ezbake_manifest = File.join('ext', 'ezbake.manifest')
|
376
|
+
if File.exist?(ezbake_manifest)
|
377
|
+
FileUtils.cp(ezbake_manifest, File.join(local_directory, "#{Pkg::Config.ref}.ezbake.manifest"))
|
378
|
+
end
|
379
|
+
ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
|
380
|
+
if File.exists?(ezbake_yaml)
|
381
|
+
FileUtils.cp(ezbake_yaml, File.join(local_directory, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
|
382
|
+
end
|
383
|
+
|
384
|
+
# Inside build_metadata*.json files there is additional metadata containing
|
385
|
+
# information such as git ref and dependencies that are needed at build
|
386
|
+
# time. If these files exist, copy them downstream.
|
387
|
+
# Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
|
388
|
+
build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
|
389
|
+
build_metadata_json_files.each do |source_file|
|
390
|
+
target_file = File.join(local_directory, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
|
391
|
+
FileUtils.cp(source_file, target_file)
|
392
|
+
end
|
393
|
+
|
394
|
+
# Sadly, the packaging repo cannot yet act on its own, without living
|
395
|
+
# inside of a packaging-repo compatible project. This means in order to
|
396
|
+
# use the packaging repo for shipping and signing (things that really
|
397
|
+
# don't require build automation, specifically) we still need the project
|
398
|
+
# clone itself.
|
399
|
+
Pkg::Util::Git.bundle('HEAD', 'signing_bundle', local_directory)
|
400
|
+
|
401
|
+
# While we're bundling things, let's also make a git bundle of the
|
402
|
+
# packaging repo that we're using when we invoke pl:jenkins:ship. We can
|
403
|
+
# have a reasonable level of confidence, later on, that the git bundle on
|
404
|
+
# the distribution server was, in fact, the git bundle used to create the
|
405
|
+
# associated packages. This is because this ship task is automatically
|
406
|
+
# called upon completion each cell of the pl:jenkins:uber_build, and we
|
407
|
+
# have --ignore-existing set below. As such, the only git bundle that
|
408
|
+
# should possibly be on the distribution is the one used to create the
|
409
|
+
# packages.
|
410
|
+
# We're bundling the packaging repo because it allows us to keep an
|
411
|
+
# archive of the packaging source that was used to create the packages,
|
412
|
+
# so that later on if we need to rebuild an older package to audit it or
|
413
|
+
# for some other reason we're assured that the new package isn't
|
414
|
+
# different by virtue of the packaging automation.
|
415
|
+
if defined?(PACKAGING_ROOT)
|
416
|
+
packaging_bundle = ''
|
417
|
+
Dir.chdir(PACKAGING_ROOT) do
|
418
|
+
packaging_bundle = Pkg::Util::Git.bundle('HEAD', 'packaging-bundle')
|
419
|
+
end
|
420
|
+
FileUtils.mv(packaging_bundle, local_directory)
|
421
|
+
end
|
422
|
+
|
423
|
+
# This is functionality to add the project-arch.msi links that have no
|
424
|
+
# version. The code itself looks for the link (if it's there already)
|
425
|
+
# and if the source package exists before linking. Searching for the
|
426
|
+
# packages has been restricted specifically to just the pkg/windows dir
|
427
|
+
# on purpose, as this is where we currently have all windows packages
|
428
|
+
# building to. Once we move the Metadata about the output location in
|
429
|
+
# to one source of truth we can refactor this to use that to search
|
430
|
+
# -Sean P. M. 08/12/16
|
431
|
+
|
432
|
+
{
|
433
|
+
'windows' => ['x86', 'x64'],
|
434
|
+
'windowsfips' => ['x64']
|
435
|
+
}.each_pair do |platform, archs|
|
436
|
+
packages = Dir["#{local_directory}/#{platform}/*"]
|
437
|
+
|
438
|
+
archs.each do |arch|
|
439
|
+
package_version = Pkg::Util::Git.describe.tr('-', '.')
|
440
|
+
package_filename = File.join(local_directory, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
|
441
|
+
link_filename = File.join(local_directory, platform, "#{Pkg::Config.project}-#{arch}.msi")
|
442
|
+
|
443
|
+
next unless !packages.include?(link_filename) && packages.include?(package_filename)
|
444
|
+
# Dear future code spelunkers:
|
445
|
+
# Using symlinks instead of hard links causes failures when we try
|
446
|
+
# to set these files to be immutable. Also be wary of whether the
|
447
|
+
# linking utility you're using expects the source path to be relative
|
448
|
+
# to the link target or pwd.
|
449
|
+
#
|
450
|
+
FileUtils.ln(package_filename, link_filename)
|
451
|
+
end
|
452
|
+
end
|
453
|
+
|
454
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
455
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
|
456
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_directory}")
|
457
|
+
Pkg::Util::Net.rsync_to("#{local_directory}/", Pkg::Config.distribution_server, "#{artifact_directory}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
|
458
|
+
end
|
459
|
+
|
460
|
+
# In order to get a snapshot of what this build looked like at the time
|
461
|
+
# of shipping, we also generate and ship the params file
|
462
|
+
#
|
463
|
+
Pkg::Config.config_to_yaml(local_directory)
|
464
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
465
|
+
Pkg::Util::Net.rsync_to("#{local_directory}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_directory}/", extra_flags: ["--exclude repo_configs"])
|
466
|
+
end
|
467
|
+
|
468
|
+
# If we just shipped a tagged version, we want to make it immutable
|
469
|
+
files = Dir.glob("#{local_directory}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
|
470
|
+
"#{artifact_directory}/#{file.sub(/^#{local_directory}\//, '')}"
|
471
|
+
end
|
472
|
+
|
473
|
+
Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
|
474
|
+
Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
|
475
|
+
Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
|
476
|
+
end
|
477
|
+
|
478
|
+
def ship_to_artifactory(local_directory = 'pkg')
|
479
|
+
Pkg::Util::File.fetch
|
480
|
+
unless Pkg::Config.project
|
481
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
482
|
+
end
|
483
|
+
artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
|
484
|
+
|
485
|
+
artifacts = Dir.glob("#{local_directory}/**/*").reject { |e| File.directory? e }
|
486
|
+
artifacts.sort! do |a, b|
|
487
|
+
if File.extname(a) =~ /(md5|sha\d+)/ && File.extname(b) !~ /(md5|sha\d+)/
|
488
|
+
1
|
489
|
+
elsif File.extname(b) =~ /(md5|sha\d+)/ && File.extname(a) !~ /(md5|sha\d+)/
|
490
|
+
-1
|
491
|
+
else
|
492
|
+
a <=> b
|
493
|
+
end
|
494
|
+
end
|
495
|
+
artifacts.each do |artifact|
|
496
|
+
if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
|
497
|
+
artifactory.deploy_package(artifact)
|
498
|
+
elsif artifactory.package_exists_on_artifactory?(artifact)
|
499
|
+
warn "Attempt to upload '#{artifact}' failed. Package already exists!"
|
500
|
+
else
|
501
|
+
artifactory.deploy_package(artifact)
|
502
|
+
end
|
503
|
+
end
|
504
|
+
end
|
358
505
|
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
# Module for signing all packages to places
|
2
|
+
|
3
|
+
|
4
|
+
module Pkg::Util::Sign
|
5
|
+
class << self
|
6
|
+
# Sign all locally staged packages on signing server.
|
7
|
+
def sign_all(root_directory = nil)
|
8
|
+
Pkg::Util::File.fetch
|
9
|
+
root_directory ||= ENV['DEFAULT_DIRECTORY']
|
10
|
+
Dir["#{root_directory}/*"].empty? and fail "There were no files found in #{root_directory}. \
|
11
|
+
Maybe you wanted to build/retrieve something first?"
|
12
|
+
|
13
|
+
# Because rpms and debs are laid out differently in PE under pkg/ they
|
14
|
+
# have a different sign task to address this. Rather than create a whole
|
15
|
+
# extra :jenkins task for signing PE, we determine which sign task to use
|
16
|
+
# based on if we're building PE.
|
17
|
+
# We also listen in on the environment variable SIGNING_BUNDLE. This is
|
18
|
+
# _NOT_ intended for public use, but rather with the internal promotion
|
19
|
+
# workflow for Puppet Enterprise. SIGNING_BUNDLE is the path to a tarball
|
20
|
+
# containing a git bundle to be used as the environment for the packaging
|
21
|
+
# repo in a signing operation.
|
22
|
+
signing_bundle = ENV['SIGNING_BUNDLE']
|
23
|
+
sign_tasks = ["pl:sign_rpms"]
|
24
|
+
sign_tasks << "pl:sign_deb_changes" unless Dir["#{root_directory}/**/*.changes"].empty?
|
25
|
+
sign_tasks << "pl:sign_tar" if Pkg::Config.build_tar
|
26
|
+
sign_tasks << "pl:sign_gem" if Pkg::Config.build_gem
|
27
|
+
sign_tasks << "pl:sign_osx" if Pkg::Config.build_dmg || Pkg::Config.vanagon_project
|
28
|
+
sign_tasks << "pl:sign_swix" if Pkg::Config.vanagon_project
|
29
|
+
sign_tasks << "pl:sign_svr4" if Pkg::Config.vanagon_project
|
30
|
+
sign_tasks << "pl:sign_ips" if Pkg::Config.vanagon_project
|
31
|
+
sign_tasks << "pl:sign_msi" if Pkg::Config.build_msi || Pkg::Config.vanagon_project
|
32
|
+
remote_repo = Pkg::Util::Net.remote_unpack_git_bundle(Pkg::Config.signing_server, 'HEAD', nil, signing_bundle)
|
33
|
+
build_params = Pkg::Util::Net.remote_buildparams(Pkg::Config.signing_server, Pkg::Config)
|
34
|
+
Pkg::Util::Net.rsync_to(root_directory, Pkg::Config.signing_server, remote_repo)
|
35
|
+
rake_command = <<~DOC
|
36
|
+
cd #{remote_repo} ;
|
37
|
+
#{Pkg::Util::Net.remote_bundle_install_command}
|
38
|
+
bundle exec rake #{sign_tasks.map { |task| task + "[#{root_directory}]" }.join(' ')} PARAMS_FILE=#{build_params}
|
39
|
+
DOC
|
40
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, rake_command)
|
41
|
+
Pkg::Util::Net.rsync_from("#{remote_repo}/#{root_directory}/", Pkg::Config.signing_server, "#{root_directory}/")
|
42
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, "rm -rf #{remote_repo}")
|
43
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.signing_server, "rm #{build_params}")
|
44
|
+
puts "Signed packages staged in #{root_directory}/ directory"
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
data/lib/packaging/util/tool.rb
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
# Utility methods for handling system binaries
|
2
2
|
|
3
3
|
module Pkg::Util::Tool
|
4
|
-
|
5
4
|
# Set up utility methods for handling system binaries
|
6
5
|
#
|
7
6
|
class << self
|
@@ -15,7 +14,7 @@ module Pkg::Util::Tool
|
|
15
14
|
|
16
15
|
if Pkg::Util::OS.windows? && File.extname(location).empty?
|
17
16
|
exts = ENV['PATHEXT']
|
18
|
-
exts = exts ? exts.split(File::PATH_SEPARATOR) : %w
|
17
|
+
exts = exts ? exts.split(File::PATH_SEPARATOR) : %w[.EXE .BAT .CMD .COM]
|
19
18
|
exts.each do |ext|
|
20
19
|
locationext = File.expand_path(location + ext)
|
21
20
|
|
@@ -30,12 +29,10 @@ module Pkg::Util::Tool
|
|
30
29
|
end
|
31
30
|
|
32
31
|
alias :has_tool :find_tool
|
33
|
-
|
34
32
|
end
|
35
33
|
|
36
34
|
# Set up paths to system tools we use in the packaging repo
|
37
35
|
# no matter what distribution we're packaging for.
|
38
36
|
|
39
37
|
GIT = Pkg::Util::Tool.check_tool('git')
|
40
|
-
|
41
38
|
end
|
@@ -80,7 +80,6 @@ module Pkg::Util::Version
|
|
80
80
|
# 5.3.0.rc4-1
|
81
81
|
# 3.0.5.rc6.24.g431768c-1
|
82
82
|
#
|
83
|
-
# rubocop:disable Metrics/AbcSize, Metrics/MethodLength
|
84
83
|
def base_pkg_version(version = Pkg::Config.version)
|
85
84
|
return "#{dot_version(version)}-#{Pkg::Config.release}".split('-') if final?(version) || Pkg::Config.vanagon_project
|
86
85
|
|
@@ -142,9 +141,6 @@ module Pkg::Util::Version
|
|
142
141
|
# If you invoke this the version will only be modified in the temporary copy,
|
143
142
|
# with the intent that it never change the official source tree.
|
144
143
|
#
|
145
|
-
# rubocop:disable Metrics/AbcSize
|
146
|
-
# rubocop:disable Metrics/CyclomaticComplexity
|
147
|
-
# rubocop:disable Metrics/PerceivedComplexity
|
148
144
|
def versionbump(workdir = nil)
|
149
145
|
version = ENV['VERSION'] || Pkg::Config.version.to_s.strip
|
150
146
|
new_version = '"' + version + '"'
|
@@ -182,7 +178,7 @@ module Pkg::Util::Version
|
|
182
178
|
# input json file and output if it "looks tagged" or not
|
183
179
|
#
|
184
180
|
# @param json_data [hash] json data hash containing the ref to check
|
185
|
-
def report_json_tags(json_data)
|
181
|
+
def report_json_tags(json_data)
|
186
182
|
puts 'component: ' + File.basename(json_data['url'])
|
187
183
|
puts 'ref: ' + json_data['ref'].to_s
|
188
184
|
if Pkg::Util::Git.remote_tagged?(json_data['url'], json_data['ref'].to_s)
|
data/lib/packaging/util.rb
CHANGED
@@ -25,9 +25,11 @@ module Pkg::Util
|
|
25
25
|
require 'packaging/util/version'
|
26
26
|
require 'packaging/util/windows'
|
27
27
|
require 'packaging/util/git_tags'
|
28
|
+
require 'packaging/util/sign'
|
29
|
+
require 'packaging/util/repo'
|
28
30
|
|
29
31
|
def self.boolean_value(var)
|
30
|
-
return true if var == true || (
|
32
|
+
return true if var == true || (var.is_a?(String) && (var.downcase == 'true' || var.downcase =~ /^y$|^yes$/))
|
31
33
|
return false
|
32
34
|
end
|
33
35
|
|
data/lib/packaging.rb
CHANGED
@@ -223,7 +223,7 @@ describe "Pkg::Config" do
|
|
223
223
|
"./artifacts/aix/7.1/PC1/ppc/puppet-agent-5.3.2-1.aix7.1.ppc.rpm"
|
224
224
|
|
225
225
|
fedora_artifacts = \
|
226
|
-
"./artifacts/fedora/
|
226
|
+
"./artifacts/fedora/32/PC1/x86_64/puppet-agent-5.3.2-1.fc32.x86_64.rpm"
|
227
227
|
|
228
228
|
windows_artifacts = \
|
229
229
|
"./artifacts/windows/puppet-agent-x64.msi\n" \
|
@@ -280,8 +280,8 @@ describe "Pkg::Config" do
|
|
280
280
|
it "should not use 'f' in fedora platform tags" do
|
281
281
|
allow(Pkg::Util::Net).to receive(:remote_execute).and_return(fedora_artifacts, nil)
|
282
282
|
data = Pkg::Config.platform_data
|
283
|
-
expect(data).to include('fedora-
|
284
|
-
expect(data).not_to include('fedora-
|
283
|
+
expect(data).to include('fedora-32-x86_64')
|
284
|
+
expect(data).not_to include('fedora-f32-x86_64')
|
285
285
|
end
|
286
286
|
|
287
287
|
it "should collect packages whose extname differ from package_format" do
|
@@ -6,7 +6,7 @@ describe "Pkg::Deb::Repo" do
|
|
6
6
|
let(:project) { "deb_repos" }
|
7
7
|
let(:ref) { "1234abcd" }
|
8
8
|
let(:base_url) { "http://#{builds_server}/#{project}/#{ref}" }
|
9
|
-
let(:cows) { ["xenial", "
|
9
|
+
let(:cows) { ["xenial", "trusty", "stretch", ""] }
|
10
10
|
let(:wget_results) { cows.map {|cow| "#{base_url}/repos/apt/#{cow}" }.join("\n") }
|
11
11
|
let(:wget_garbage) { "\n and an index\nhttp://somethingelse.com/robots" }
|
12
12
|
let(:repo_configs) { cows.reject {|cow| cow.empty?}.map {|dist| "pkg/repo_configs/deb/pl-#{project}-#{ref}-#{dist}.list" } }
|
@@ -5,12 +5,11 @@ describe 'Pkg::Paths' do
|
|
5
5
|
arch_transformations = {
|
6
6
|
['pkg/el-8-x86_64/puppet-agent-6.9.0-1.el8.x86_64.rpm', 'el', '8'] => 'x86_64',
|
7
7
|
['pkg/el/8/puppet6/aarch64/puppet-agent-6.5.0.3094.g16b6fa6f-1.el8.aarch64.rpm', 'el', '8'] => 'aarch64',
|
8
|
-
['artifacts/fedora/32/puppet6/x86_64/puppet-agent-6.9.0-1.
|
8
|
+
['artifacts/fedora/32/puppet6/x86_64/puppet-agent-6.9.0-1.fc32.x86_64.rpm', 'fedora', '32'] => 'x86_64',
|
9
9
|
['pkg/ubuntu-16.04-amd64/puppet-agent_4.99.0-1xenial_amd64.deb', 'ubuntu', '16.04'] => 'amd64',
|
10
10
|
['artifacts/deb/focal/puppet6/puppet-agent_6.5.0.3094.g16b6fa6f-1focal_arm64.deb', 'ubuntu', '20.04'] => 'aarch64',
|
11
11
|
|
12
12
|
['artifacts/ubuntu-16.04-i386/puppetserver_5.0.1-0.1SNAPSHOT.2017.07.27T2346puppetlabs1.debian.tar.gz', 'ubuntu', '16.04'] => 'source',
|
13
|
-
['artifacts/deb/jessie/PC1/puppetserver_5.0.1.master.orig.tar.gz', 'debian', '8'] => 'source',
|
14
13
|
['artifacts/el/6/PC1/SRPMS/puppetserver-5.0.1.master-0.1SNAPSHOT.2017.08.18T0951.el6.src.rpm', 'el', '6'] => 'SRPMS'
|
15
14
|
}
|
16
15
|
arch_transformations.each do |path_array, arch|
|
@@ -273,7 +272,7 @@ describe 'Pkg::Paths' do
|
|
273
272
|
.to eq(fake_apt_repo_path)
|
274
273
|
end
|
275
274
|
it 'returns nonfinal_yum_repo_path for nonfinal rpms' do
|
276
|
-
expect(Pkg::Paths.remote_repo_base('fedora-
|
275
|
+
expect(Pkg::Paths.remote_repo_base('fedora-34-x86_64', nonfinal: true))
|
277
276
|
.to eq(fake_yum_nightly_repo_path)
|
278
277
|
end
|
279
278
|
it 'returns nonfinal_apt_repo_path for nonfinal debs' do
|
@@ -26,7 +26,7 @@ describe 'Pkg::Platforms' do
|
|
26
26
|
|
27
27
|
describe '#versions_for_platform' do
|
28
28
|
it 'should return all supported versions for a given platform' do
|
29
|
-
expect(Pkg::Platforms.versions_for_platform('el')).to match_array(['
|
29
|
+
expect(Pkg::Platforms.versions_for_platform('el')).to match_array(['6', '7', '8', '9'])
|
30
30
|
end
|
31
31
|
|
32
32
|
it 'should raise an error if given a nonexistent platform' do
|
@@ -36,7 +36,7 @@ describe 'Pkg::Platforms' do
|
|
36
36
|
|
37
37
|
describe '#codenames' do
|
38
38
|
it 'should return all codenames for a given platform' do
|
39
|
-
codenames = ['focal', 'bionic', 'bullseye', 'buster', '
|
39
|
+
codenames = ['focal', 'bionic', 'bullseye', 'buster', 'stretch', 'trusty', 'xenial']
|
40
40
|
expect(Pkg::Platforms.codenames).to match_array(codenames)
|
41
41
|
end
|
42
42
|
end
|
@@ -97,16 +97,16 @@ describe 'Pkg::Platforms' do
|
|
97
97
|
end
|
98
98
|
|
99
99
|
describe '#platform_lookup' do
|
100
|
-
['osx-10.15-x86_64', 'osx-11-x86_64'].each do |platform|
|
100
|
+
['osx-10.15-x86_64', 'osx-11-x86_64', 'osx-12-x86_64'].each do |platform|
|
101
101
|
it 'should return a hash of platform info' do
|
102
102
|
expect(Pkg::Platforms.platform_lookup(platform)).to be_instance_of(Hash)
|
103
103
|
end
|
104
|
-
|
104
|
+
|
105
105
|
it 'should include at least arch and package format keys' do
|
106
106
|
expect(Pkg::Platforms.platform_lookup(platform).keys).to include(:architectures)
|
107
107
|
expect(Pkg::Platforms.platform_lookup(platform).keys).to include(:package_format)
|
108
108
|
end
|
109
|
-
end
|
109
|
+
end
|
110
110
|
end
|
111
111
|
|
112
112
|
describe '#get_attribute' do
|
@@ -166,7 +166,7 @@ describe 'Pkg::Platforms' do
|
|
166
166
|
|
167
167
|
describe '#generic_platform_tag' do
|
168
168
|
it 'fails for unsupported platforms' do
|
169
|
-
expect { Pkg::Platforms.generic_platform_tag('
|
169
|
+
expect { Pkg::Platforms.generic_platform_tag('noplatform') }.to raise_error
|
170
170
|
end
|
171
171
|
|
172
172
|
it 'returns a supported platform tag containing the supplied platform' do
|
@@ -15,13 +15,6 @@ Header V4 RSA/SHA256 Signature, key ID ef8d349f: NOKEY
|
|
15
15
|
Header SHA1 digest: OK (3cb7e9861e8bc09783a1b6c8d88243a3c16daa81)
|
16
16
|
V4 RSA/SHA256 Signature, key ID ef8d349f: NOKEY
|
17
17
|
MD5 digest: OK (d5f06ba2a9053de532326d0659ec0d11)
|
18
|
-
DOC
|
19
|
-
}
|
20
|
-
let(:el5_signed_response) { <<-DOC
|
21
|
-
Header V3 RSA/SHA1 signature: NOKEY, key ID ef8d349f
|
22
|
-
Header SHA1 digest: OK (12ea7bd578097a3aecc5deb8ada6aca6147d68e3)
|
23
|
-
V3 RSA/SHA1 signature: NOKEY, key ID ef8d349f
|
24
|
-
MD5 digest: OK (27353c6153068a3c9902fcb4ad5b8b92)
|
25
18
|
DOC
|
26
19
|
}
|
27
20
|
let(:sles12_signed_response) { <<-DOC
|
@@ -40,10 +33,6 @@ DOC
|
|
40
33
|
allow(Pkg::Sign::Rpm).to receive(:`).and_return(el7_signed_response)
|
41
34
|
expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
|
42
35
|
end
|
43
|
-
it 'returns true if rpm has been signed (el5)' do
|
44
|
-
allow(Pkg::Sign::Rpm).to receive(:`).and_return(el5_signed_response)
|
45
|
-
expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
|
46
|
-
end
|
47
36
|
it 'returns true if rpm has been signed (sles12)' do
|
48
37
|
allow(Pkg::Sign::Rpm).to receive(:`).and_return(sles12_signed_response)
|
49
38
|
expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
|
@@ -63,12 +52,11 @@ DOC
|
|
63
52
|
end
|
64
53
|
|
65
54
|
describe '#sign_all' do
|
66
|
-
let(:rpm_directory) {
|
55
|
+
let(:rpm_directory) { Dir.mktmpdir }
|
67
56
|
let(:rpms_not_to_sign) { [
|
68
57
|
"#{rpm_directory}/aix/7.1/PC1/ppc/puppet-agent-5.5.3-1.aix7.1.ppc.rpm",
|
69
58
|
] }
|
70
59
|
let(:v3_rpms) { [
|
71
|
-
"#{rpm_directory}/el/5/PC1/i386/puppet-agent-5.5.3-1.el5.i386.rpm",
|
72
60
|
"#{rpm_directory}/sles/11/PC1/x86_64/puppet-agent-5.5.3-1.sles11.x86_64.rpm",
|
73
61
|
] }
|
74
62
|
let(:v4_rpms) { [
|
@@ -150,11 +150,11 @@ describe 'Pkg::Util::Git' do
|
|
150
150
|
|
151
151
|
context '#remote_tagged?' do
|
152
152
|
it 'reports Yes on tagged component' do
|
153
|
-
expect(Pkg::Util::Git.remote_tagged?('
|
153
|
+
expect(Pkg::Util::Git.remote_tagged?('https://github.com/puppetlabs/leatherman.git', 'refs/tags/0.6.2')).to be(true)
|
154
154
|
end
|
155
155
|
|
156
156
|
it 'reports No on non-tagged component' do
|
157
|
-
expect(Pkg::Util::Git.remote_tagged?('
|
157
|
+
expect(Pkg::Util::Git.remote_tagged?('https://github.com/puppetlabs/leatherman.git', '4eef05389ebf418b62af17406c7f9f13fa51f975')).to be(false)
|
158
158
|
end
|
159
159
|
end
|
160
160
|
end
|