packaging 0.106.0 → 0.106.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +4 -4
- data/lib/packaging/archive.rb +2 -2
- data/lib/packaging/artifactory/extensions.rb +1 -0
- data/lib/packaging/artifactory.rb +27 -23
- data/lib/packaging/config/params.rb +191 -199
- data/lib/packaging/config/validations.rb +0 -2
- data/lib/packaging/config.rb +8 -8
- data/lib/packaging/deb/repo.rb +11 -14
- data/lib/packaging/gem.rb +2 -2
- data/lib/packaging/metrics.rb +7 -7
- data/lib/packaging/nuget.rb +0 -1
- data/lib/packaging/paths.rb +11 -13
- data/lib/packaging/platforms.rb +10 -6
- data/lib/packaging/repo.rb +11 -12
- data/lib/packaging/retrieve.rb +1 -1
- data/lib/packaging/rpm/repo.rb +8 -8
- data/lib/packaging/sign/dmg.rb +8 -7
- data/lib/packaging/sign/ips.rb +64 -32
- data/lib/packaging/sign/msi.rb +84 -112
- data/lib/packaging/sign/rpm.rb +1 -1
- data/lib/packaging/sign.rb +0 -1
- data/lib/packaging/tar.rb +2 -4
- data/lib/packaging/util/date.rb +0 -1
- data/lib/packaging/util/distribution_server.rb +2 -2
- data/lib/packaging/util/execution.rb +2 -4
- data/lib/packaging/util/file.rb +2 -3
- data/lib/packaging/util/git.rb +1 -3
- data/lib/packaging/util/git_tags.rb +3 -3
- data/lib/packaging/util/gpg.rb +3 -4
- data/lib/packaging/util/jenkins.rb +0 -3
- data/lib/packaging/util/misc.rb +1 -1
- data/lib/packaging/util/net.rb +26 -22
- data/lib/packaging/util/repo.rb +0 -1
- data/lib/packaging/util/serialization.rb +1 -2
- data/lib/packaging/util/ship.rb +3 -3
- data/lib/packaging/util/sign.rb +8 -8
- data/lib/packaging/util/tool.rb +1 -4
- data/lib/packaging/util/version.rb +1 -5
- data/lib/packaging/util.rb +1 -1
- data/lib/packaging.rb +1 -2
- data/spec/lib/packaging/platforms_spec.rb +1 -1
- data/spec/lib/packaging/sign_spec.rb +1 -1
- data/spec/lib/packaging/util/git_spec.rb +2 -2
- data/spec/lib/packaging/util/git_tag_spec.rb +5 -5
- data/tasks/30_metrics.rake +2 -2
- data/tasks/apple.rake +8 -14
- data/tasks/archive.rake +1 -2
- data/tasks/deb.rake +7 -8
- data/tasks/doc.rake +5 -3
- data/tasks/education.rake +2 -4
- data/tasks/gem.rake +20 -12
- data/tasks/jenkins.rake +27 -15
- data/tasks/jenkins_dynamic.rake +10 -10
- data/tasks/mock.rake +8 -9
- data/tasks/nightly_repos.rake +14 -14
- data/tasks/pe_ship.rake +10 -17
- data/tasks/retrieve.rake +2 -2
- data/tasks/rpm.rake +1 -1
- data/tasks/ship.rake +6 -6
- data/tasks/sign.rake +5 -5
- data/tasks/tar.rake +2 -3
- data/tasks/update.rake +2 -2
- data/tasks/vendor_gems.rake +5 -7
- data/tasks/version.rake +2 -2
- metadata +59 -31
data/lib/packaging/sign/msi.rb
CHANGED
@@ -2,123 +2,95 @@ module Pkg::Sign::Msi
|
|
2
2
|
module_function
|
3
3
|
|
4
4
|
def sign(target_dir = 'pkg')
|
5
|
-
|
5
|
+
require 'google/cloud/storage'
|
6
|
+
require 'googleauth'
|
7
|
+
require 'json'
|
8
|
+
require 'net/http'
|
9
|
+
require 'uri'
|
6
10
|
|
7
|
-
|
8
|
-
|
11
|
+
gcp_service_account_credentials = Pkg::Config.msi_signing_gcp_service_account_credentials
|
12
|
+
signing_service_url = Pkg::Config.msi_signing_service_url
|
9
13
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
14
|
+
begin
|
15
|
+
authorizer = Google::Auth::ServiceAccountCredentials.make_creds(
|
16
|
+
json_key_io: File.open(gcp_service_account_credentials),
|
17
|
+
target_audience: signing_service_url
|
18
|
+
)
|
19
|
+
rescue StandardError => e
|
20
|
+
fail "msis can only be signed by jenkins.\n#{e}"
|
21
|
+
end
|
15
22
|
|
16
|
-
|
17
|
-
# We are currently adding two signatures to the msi.
|
18
|
-
#
|
19
|
-
# Microsoft compatable Signatures are composed of three different
|
20
|
-
# elements.
|
21
|
-
# 1) The Certificate used to sign the package. This is the element that
|
22
|
-
# is attached to organization. The certificate has an associated
|
23
|
-
# algorithm. We recently (February 2016) had to switch from a sha1 to
|
24
|
-
# a sha256 certificate. Sha1 was deprecated by many Microsoft
|
25
|
-
# elements on 2016-01-01, which forced us to switch to a sha256 cert.
|
26
|
-
# This sha256 certificate is recognized by all currently supported
|
27
|
-
# windows platforms (Windows 8/Vista forward).
|
28
|
-
# 2) The signature used to attach the certificate to the package. This
|
29
|
-
# can be a done with a variety of digest algorithms. Older platforms
|
30
|
-
# (i.e., Windows 8 and Windows Vista) don't recognize later
|
31
|
-
# algorithms like sha256.
|
32
|
-
# 3) The timestamp used to validate when the package was signed. This
|
33
|
-
# comes from an external source and can be delivered with a variety
|
34
|
-
# of digest algorithms. Older platforms do not recognize newer
|
35
|
-
# algorithms like sha256.
|
36
|
-
#
|
37
|
-
# We could have only one signature with the Sha256 Cert, Sha1 Signature,
|
38
|
-
# and Sha1 Timestamp, but that would be too easy. The sha256 signature
|
39
|
-
# and timestamp add more security to our packages. We can't have only
|
40
|
-
# sha256 elements in our package signature, though, because Windows 8
|
41
|
-
# and Windows Vista just don't recognize them at all.
|
42
|
-
#
|
43
|
-
# In order to add two signatures to an MSI, we also need to change the
|
44
|
-
# tool we use to sign packages with. Previously, we were using SignTool
|
45
|
-
# which is the Microsoft blessed program used to sign packages. However,
|
46
|
-
# this tool isn't able to add two signatures to an MSI specifically. It
|
47
|
-
# can dual-sign an exe, just not an MSI. In order to get the dual-signed
|
48
|
-
# packages, we decided to switch over to using osslsigncode. The original
|
49
|
-
# project didn't have support to compile on a windows system, so we
|
50
|
-
# decided to use this fork. The binaries on the signer were pulled from
|
51
|
-
# https://sourceforge.net/u/keeely/osslsigncode/ci/master/tree/
|
52
|
-
#
|
53
|
-
# These are our signatures:
|
54
|
-
# The first signature:
|
55
|
-
# * Sha256 Certificate
|
56
|
-
# * Sha1 Signature
|
57
|
-
# * Sha1 Timestamp
|
58
|
-
#
|
59
|
-
# The second signature:
|
60
|
-
# * Sha256 Certificate
|
61
|
-
# * Sha256 Signature
|
62
|
-
# * Sha256 Timestamp
|
63
|
-
#
|
64
|
-
# Once we no longer support Windows 8/Windows Vista, we can remove the
|
65
|
-
# first Sha1 signature.
|
66
|
-
sign_command = <<-CMD
|
67
|
-
for msipath in #{msis.join(" ")}; do
|
68
|
-
msi="$(basename $msipath)"
|
69
|
-
msidir="C:/#{work_dir}/$(dirname $msipath)"
|
70
|
-
if "/cygdrive/c/tools/osslsigncode-fork/osslsigncode.exe" verify -in "$msidir/$msi" ; then
|
71
|
-
echo "$msi is already signed, skipping . . ." ;
|
72
|
-
else
|
73
|
-
tries=5
|
74
|
-
sha1Servers=(http://timestamp.digicert.com/sha1/timestamp
|
75
|
-
http://timestamp.comodoca.com/authenticode)
|
76
|
-
for timeserver in "${sha1Servers[@]}"; do
|
77
|
-
for ((try=1; try<=$tries; try++)) do
|
78
|
-
ret=$(/cygdrive/c/tools/osslsigncode-fork/osslsigncode.exe sign \
|
79
|
-
-n "Puppet" -i "http://www.puppet.com" \
|
80
|
-
-h sha1 \
|
81
|
-
-pkcs12 "#{Pkg::Config.msi_signing_cert}" \
|
82
|
-
-pass "#{Pkg::Config.msi_signing_cert_pw}" \
|
83
|
-
-t "$timeserver" \
|
84
|
-
-in "$msidir/$msi" \
|
85
|
-
-out "$msidir/signed-$msi")
|
86
|
-
if [[ $ret == *"Succeeded"* ]]; then break; fi
|
87
|
-
done;
|
88
|
-
if [[ $ret == *"Succeeded"* ]]; then break; fi
|
89
|
-
done;
|
90
|
-
echo $ret
|
91
|
-
if [[ $ret != *"Succeeded"* ]]; then exit 1; fi
|
92
|
-
sha256Servers=(http://timestamp.digicert.com/sha256/timestamp
|
93
|
-
http://timestamp.comodoca.com?td=sha256)
|
94
|
-
for timeserver in "${sha256Servers[@]}"; do
|
95
|
-
for ((try=1; try<=$tries; try++)) do
|
96
|
-
ret=$(/cygdrive/c/tools/osslsigncode-fork/osslsigncode.exe sign \
|
97
|
-
-n "Puppet" -i "http://www.puppet.com" \
|
98
|
-
-nest -h sha256 \
|
99
|
-
-pkcs12 "#{Pkg::Config.msi_signing_cert}" \
|
100
|
-
-pass "#{Pkg::Config.msi_signing_cert_pw}" \
|
101
|
-
-ts "$timeserver" \
|
102
|
-
-in "$msidir/signed-$msi" \
|
103
|
-
-out "$msidir/$msi")
|
104
|
-
if [[ $ret == *"Succeeded"* ]]; then break; fi
|
105
|
-
done;
|
106
|
-
if [[ $ret == *"Succeeded"* ]]; then break; fi
|
107
|
-
done;
|
108
|
-
echo $ret
|
109
|
-
if [[ $ret != *"Succeeded"* ]]; then exit 1; fi
|
110
|
-
fi
|
111
|
-
done
|
112
|
-
CMD
|
23
|
+
gcp_auth_token = authorizer.fetch_access_token!['id_token']
|
113
24
|
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
{ fail_fast: false }
|
25
|
+
gcp_storage = Google::Cloud::Storage.new(
|
26
|
+
project_id: 'puppet-release-engineering',
|
27
|
+
credentials: gcp_service_account_credentials
|
118
28
|
)
|
119
|
-
|
120
|
-
|
29
|
+
tosign_bucket = gcp_storage.bucket('windows-tosign-bucket')
|
30
|
+
signed_bucket = gcp_storage.bucket('windows-signed-bucket')
|
31
|
+
|
32
|
+
service_uri = URI.parse(signing_service_url)
|
33
|
+
headers = { 'Content-Type': 'application/json', 'Authorization': "Bearer #{gcp_auth_token}" }
|
34
|
+
http = Net::HTTP.new(service_uri.host, service_uri.port)
|
35
|
+
http.use_ssl = true
|
36
|
+
request = Net::HTTP::Post.new(service_uri.request_uri, headers)
|
37
|
+
|
38
|
+
# Create hash to keep track of the signed msis
|
39
|
+
signed_msis = {}
|
40
|
+
|
41
|
+
msis = Dir.glob("#{target_dir}/windows*/**/*.msi")
|
42
|
+
|
43
|
+
# Upload msis to GCP and sign them
|
44
|
+
msis.each do |msi|
|
45
|
+
begin
|
46
|
+
tosign_bucket.create_file(msi, msi)
|
47
|
+
rescue StandardError => e
|
48
|
+
delete_tosign_msis(tosign_bucket, msis)
|
49
|
+
fail "There was an error uploading #{msi} to the windows-tosign-bucket gcp bucket.\n#{e}"
|
50
|
+
end
|
51
|
+
msi_json = { 'Path': msi }
|
52
|
+
request.body = msi_json.to_json
|
53
|
+
begin
|
54
|
+
response = http.request(request)
|
55
|
+
response_body = JSON.parse(JSON.parse(response.body.to_json), :quirks_mode => true)
|
56
|
+
rescue StandardError => e
|
57
|
+
delete_tosign_msis(tosign_bucket, msis)
|
58
|
+
delete_signed_msis(signed_bucket, signed_msis)
|
59
|
+
fail "There was an error signing #{msi}.\n#{e}"
|
60
|
+
end
|
61
|
+
# Store location of signed msi
|
62
|
+
signed_msi = response_body['Path']
|
63
|
+
signed_msis[msi] = signed_msi
|
64
|
+
end
|
65
|
+
|
66
|
+
# Download the signed msis
|
67
|
+
msis.each do |msi|
|
68
|
+
begin
|
69
|
+
signed_msi = signed_bucket.file(signed_msis[msi])
|
70
|
+
signed_msi.download(msi)
|
71
|
+
rescue StandardError => e
|
72
|
+
delete_tosign_msis(tosign_bucket, msis)
|
73
|
+
delete_signed_msis(signed_bucket, signed_msis)
|
74
|
+
fail "There was an error retrieving the signed msi:#{msi}.\n#{e}"
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
# Cleanup buckets
|
79
|
+
delete_tosign_msis(tosign_bucket, msis)
|
80
|
+
delete_signed_msis(signed_bucket, signed_msis)
|
81
|
+
end
|
82
|
+
|
83
|
+
def delete_tosign_msis(bucket, msis)
|
84
|
+
msis.each do |msi|
|
85
|
+
tosign_msi = bucket.file(msi)
|
86
|
+
tosign_msi.delete unless tosign_msi.nil?
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def delete_signed_msis(bucket, signed_msis)
|
91
|
+
signed_msis.each do |msi, temp_name|
|
92
|
+
signed_msi = bucket.file(temp_name)
|
93
|
+
signed_msi.delete unless signed_msi.nil?
|
121
94
|
end
|
122
|
-
Pkg::Util::Net.remote_execute(ssh_host_string, "if [ -d '/cygdrive/c/#{work_dir}' ]; then rm -rf '/cygdrive/c/#{work_dir}'; fi")
|
123
95
|
end
|
124
96
|
end
|
data/lib/packaging/sign/rpm.rb
CHANGED
@@ -70,7 +70,7 @@ module Pkg::Sign::Rpm
|
|
70
70
|
v4_rpms = []
|
71
71
|
rpms_to_sign.each do |rpm|
|
72
72
|
platform_tag = Pkg::Paths.tag_from_artifact_path(rpm)
|
73
|
-
platform, version,
|
73
|
+
platform, version, = Pkg::Platforms.parse_platform_tag(platform_tag)
|
74
74
|
|
75
75
|
# We don't sign AIX rpms
|
76
76
|
next if platform_tag.include?('aix')
|
data/lib/packaging/sign.rb
CHANGED
data/lib/packaging/tar.rb
CHANGED
@@ -5,7 +5,6 @@ module Pkg
|
|
5
5
|
include FileUtils
|
6
6
|
|
7
7
|
attr_accessor :files, :project, :version, :excludes, :target, :templates
|
8
|
-
attr_reader :tar
|
9
8
|
|
10
9
|
def initialize
|
11
10
|
@tar = Pkg::Util::Tool.find_tool('tar', :required => true)
|
@@ -56,7 +55,7 @@ module Pkg
|
|
56
55
|
patterns =
|
57
56
|
case @files
|
58
57
|
when String
|
59
|
-
|
58
|
+
warn "warning: `files` should be an array, not a string"
|
60
59
|
@files.split(' ')
|
61
60
|
when Array
|
62
61
|
@files
|
@@ -137,7 +136,7 @@ module Pkg
|
|
137
136
|
def tar(target, source)
|
138
137
|
mkpath File.dirname(target)
|
139
138
|
cd File.dirname(source) do
|
140
|
-
%x(#{@tar} #{@excludes.map { |x|
|
139
|
+
%x(#{@tar} #{@excludes.map { |x| " --exclude #{x} " }.join if @excludes} -zcf '#{File.basename(target)}' '#{File.basename(source)}')
|
141
140
|
unless $?.success?
|
142
141
|
fail "Failed to create .tar.gz archive with #{@tar}. Please ensure the tar command in your path accepts the flags '-c', '-z', and '-f'"
|
143
142
|
end
|
@@ -157,7 +156,6 @@ module Pkg
|
|
157
156
|
self.tar(@target, workdir)
|
158
157
|
self.clean_up workdir
|
159
158
|
end
|
160
|
-
|
161
159
|
end
|
162
160
|
end
|
163
161
|
|
data/lib/packaging/util/date.rb
CHANGED
@@ -31,8 +31,8 @@ module Pkg::Util::DistributionServer
|
|
31
31
|
|
32
32
|
# If we just shipped a tagged version, we want to make it immutable
|
33
33
|
files = Dir.glob("#{local_source_directory}/**/*")
|
34
|
-
|
35
|
-
|
34
|
+
.select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }
|
35
|
+
.map { |f| "#{remote_target_directory}/#{f.sub(/^#{local_source_directory}\//, '')}" }
|
36
36
|
|
37
37
|
Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
|
38
38
|
Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
|
@@ -1,9 +1,7 @@
|
|
1
1
|
# Utility methods for handling system calls and interactions
|
2
2
|
|
3
3
|
module Pkg::Util::Execution
|
4
|
-
|
5
4
|
class << self
|
6
|
-
|
7
5
|
# Alias to $?.success? that makes success? slightly easier to test and stub
|
8
6
|
# If immediately run, $? will not be instanciated, so only call success? if
|
9
7
|
# $? exists, otherwise return nil
|
@@ -23,7 +21,7 @@ module Pkg::Util::Execution
|
|
23
21
|
# while also raising an exception if a command does not succeed (ala `sh "cmd"`).
|
24
22
|
def ex(command, debug = false)
|
25
23
|
puts "Executing '#{command}'..." if debug
|
26
|
-
ret =
|
24
|
+
ret = %x(#{command})
|
27
25
|
unless Pkg::Util::Execution.success?
|
28
26
|
raise RuntimeError
|
29
27
|
end
|
@@ -71,7 +69,7 @@ module Pkg::Util::Execution
|
|
71
69
|
blk.call
|
72
70
|
success = true
|
73
71
|
break
|
74
|
-
rescue => err
|
72
|
+
rescue StandardError => err
|
75
73
|
puts "An error was encountered evaluating block. Retrying.."
|
76
74
|
exception = err.to_s + "\n" + err.backtrace.join("\n")
|
77
75
|
end
|
data/lib/packaging/util/file.rb
CHANGED
@@ -2,7 +2,6 @@
|
|
2
2
|
require 'fileutils'
|
3
3
|
|
4
4
|
module Pkg::Util::File
|
5
|
-
|
6
5
|
class << self
|
7
6
|
def exist?(file)
|
8
7
|
::File.exist?(file)
|
@@ -15,7 +14,7 @@ module Pkg::Util::File
|
|
15
14
|
|
16
15
|
def mktemp
|
17
16
|
mktemp = Pkg::Util::Tool.find_tool('mktemp', :required => true)
|
18
|
-
stdout,
|
17
|
+
stdout, = Pkg::Util::Execution.capture3("#{mktemp} -d -t pkgXXXXXX")
|
19
18
|
stdout.strip
|
20
19
|
end
|
21
20
|
|
@@ -79,7 +78,7 @@ module Pkg::Util::File
|
|
79
78
|
target_opts = "-C #{target}"
|
80
79
|
end
|
81
80
|
if file_exists?(source, :required => true)
|
82
|
-
stdout,
|
81
|
+
stdout, = Pkg::Util::Execution.capture3(%(#{tar} #{options} #{target_opts} -xf #{source}))
|
83
82
|
stdout
|
84
83
|
end
|
85
84
|
end
|
data/lib/packaging/util/git.rb
CHANGED
@@ -22,7 +22,6 @@ module Pkg::Util::Git
|
|
22
22
|
end
|
23
23
|
|
24
24
|
# Git utility to create a new git bundle
|
25
|
-
# rubocop:disable Metrics/AbcSize
|
26
25
|
def bundle(treeish, appendix = Pkg::Util.rand_string, temp = Pkg::Util::File.mktemp)
|
27
26
|
fail_unless_repo
|
28
27
|
Pkg::Util::Execution.capture3("#{Pkg::Util::Tool::GIT} bundle create #{temp}/#{Pkg::Config.project}-#{Pkg::Config.version}-#{appendix} #{treeish} --tags")
|
@@ -113,13 +112,12 @@ module Pkg::Util::Git
|
|
113
112
|
end
|
114
113
|
end
|
115
114
|
|
116
|
-
# rubocop:disable Style/GuardClause
|
117
115
|
def fail_unless_repo
|
118
116
|
unless repo?
|
119
117
|
raise "Pkg::Config.project_root (#{Pkg::Config.project_root}) is not \
|
120
118
|
a valid git repository"
|
121
119
|
end
|
122
|
-
|
120
|
+
end
|
123
121
|
|
124
122
|
# Return the basename of the project repo
|
125
123
|
def project_name
|
@@ -1,6 +1,6 @@
|
|
1
1
|
module Pkg::Util
|
2
2
|
class Git_tag
|
3
|
-
attr_reader :address, :ref, :ref_name, :ref_type
|
3
|
+
attr_reader :address, :ref, :ref_name, :ref_type
|
4
4
|
|
5
5
|
GIT = Pkg::Util::Tool::GIT
|
6
6
|
DEVNULL = Pkg::Util::OS::DEVNULL
|
@@ -43,7 +43,7 @@ module Pkg::Util
|
|
43
43
|
# Fetch the full ref using ls-remote, this should raise an error if it returns non-zero
|
44
44
|
# because that means this ref doesn't exist in the repo
|
45
45
|
def fetch_full_ref
|
46
|
-
stdout,
|
46
|
+
stdout, = Pkg::Util::Execution.capture3("#{GIT} ls-remote --tags --heads --exit-code #{address} #{ref}")
|
47
47
|
stdout.split.last
|
48
48
|
rescue RuntimeError => e
|
49
49
|
raise "ERROR : Not a ref or sha!\n#{e}"
|
@@ -54,7 +54,7 @@ module Pkg::Util
|
|
54
54
|
end
|
55
55
|
|
56
56
|
def ref?
|
57
|
-
|
57
|
+
%x(#{GIT} check-ref-format #{ref} >#{DEVNULL} 2>&1)
|
58
58
|
$?.success?
|
59
59
|
end
|
60
60
|
|
data/lib/packaging/util/gpg.rb
CHANGED
@@ -1,6 +1,5 @@
|
|
1
1
|
module Pkg::Util::Gpg
|
2
2
|
class << self
|
3
|
-
|
4
3
|
# Please note that this method is not used in determining what key is used
|
5
4
|
# to sign the debian repos. That is defined in the freight config that
|
6
5
|
# lives on our internal repo staging host. The debian conf/distribution
|
@@ -31,14 +30,14 @@ module Pkg::Util::Gpg
|
|
31
30
|
|
32
31
|
def kill_keychain
|
33
32
|
if keychain
|
34
|
-
stdout,
|
33
|
+
stdout, = Pkg::Util::Execution.capture3("#{keychain} -k mine")
|
35
34
|
stdout
|
36
35
|
end
|
37
36
|
end
|
38
37
|
|
39
38
|
def start_keychain
|
40
39
|
if keychain
|
41
|
-
keychain_output,
|
40
|
+
keychain_output, = Pkg::Util::Execution.capture3("#{keychain} -q --agents gpg --eval #{key}")
|
42
41
|
keychain_output.chomp!
|
43
42
|
new_env = keychain_output.match(/GPG_AGENT_INFO=([^;]*)/)
|
44
43
|
ENV["GPG_AGENT_INFO"] = new_env[1]
|
@@ -56,7 +55,7 @@ module Pkg::Util::Gpg
|
|
56
55
|
return true
|
57
56
|
end
|
58
57
|
use_tty = "--no-tty --use-agent" if ENV['RPM_GPG_AGENT']
|
59
|
-
stdout,
|
58
|
+
stdout, = Pkg::Util::Execution.capture3("#{gpg} #{use_tty} --armor --detach-sign -u #{key} #{file}")
|
60
59
|
stdout
|
61
60
|
else
|
62
61
|
fail "No gpg available. Cannot sign #{file}."
|
@@ -3,9 +3,7 @@ require 'net/http'
|
|
3
3
|
require 'json'
|
4
4
|
|
5
5
|
module Pkg::Util::Jenkins
|
6
|
-
|
7
6
|
class << self
|
8
|
-
|
9
7
|
# Use the curl to create a jenkins job from a valid XML
|
10
8
|
# configuration file.
|
11
9
|
# Returns the URL to the job
|
@@ -90,6 +88,5 @@ module Pkg::Util::Jenkins
|
|
90
88
|
|
91
89
|
wait_for_build job_hash['lastBuild']['url']
|
92
90
|
end
|
93
|
-
|
94
91
|
end
|
95
92
|
end
|
data/lib/packaging/util/misc.rb
CHANGED
@@ -57,7 +57,7 @@ module Pkg::Util::Misc
|
|
57
57
|
def check_rubygems_ownership(gem_name)
|
58
58
|
require 'yaml'
|
59
59
|
credentials = YAML.load_file("#{ENV['HOME']}/.gem/credentials")
|
60
|
-
gems = YAML.
|
60
|
+
gems = YAML.safe_load(%x(curl -H 'Authorization:#{credentials[:rubygems_api_key]}' https://rubygems.org/api/v1/gems.yaml))
|
61
61
|
gems.each do |gem|
|
62
62
|
if gem['name'] == gem_name
|
63
63
|
return true
|
data/lib/packaging/util/net.rb
CHANGED
@@ -1,15 +1,13 @@
|
|
1
1
|
# Utility methods for handling network calls and interactions
|
2
2
|
|
3
3
|
module Pkg::Util::Net
|
4
|
-
|
5
4
|
class << self
|
6
|
-
|
7
5
|
# This simple method does an HTTP get of a URI and writes it to a file
|
8
6
|
# in a slightly more platform agnostic way than curl/wget
|
9
7
|
def fetch_uri(uri, target)
|
10
8
|
require 'open-uri'
|
11
9
|
if Pkg::Util::File.file_writable?(File.dirname(target))
|
12
|
-
File.open(target, 'w') { |f| f.puts(open(uri).read) }
|
10
|
+
File.open(target, 'w') { |f| f.puts(URI.open(uri).read) }
|
13
11
|
end
|
14
12
|
end
|
15
13
|
|
@@ -37,7 +35,7 @@ module Pkg::Util::Net
|
|
37
35
|
Array(hosts).flatten.each do |host|
|
38
36
|
begin
|
39
37
|
remote_execute(host, 'exit', { extra_options: '-oBatchMode=yes' })
|
40
|
-
rescue
|
38
|
+
rescue StandardError
|
41
39
|
errs << host
|
42
40
|
end
|
43
41
|
end
|
@@ -56,7 +54,7 @@ module Pkg::Util::Net
|
|
56
54
|
begin
|
57
55
|
remote_execute(host, "gpg --list-secret-keys #{gpg} > /dev/null 2&>1",
|
58
56
|
{ extra_options: '-oBatchMode=yes' })
|
59
|
-
rescue
|
57
|
+
rescue StandardError
|
60
58
|
errs << host
|
61
59
|
end
|
62
60
|
end
|
@@ -112,13 +110,14 @@ module Pkg::Util::Net
|
|
112
110
|
###
|
113
111
|
### Deprecated method implemented as a shim to the new `remote_execute` method
|
114
112
|
###
|
115
|
-
def remote_ssh_cmd(target, command, capture_output = false, extra_options = '', fail_fast = true, trace = false)
|
113
|
+
def remote_ssh_cmd(target, command, capture_output = false, extra_options = '', fail_fast = true, trace = false) # rubocop:disable Metrics/ParameterLists
|
116
114
|
puts "Warn: \"remote_ssh_cmd\" call in packaging is deprecated. Use \"remote_execute\" instead."
|
117
115
|
remote_execute(target, command, {
|
118
116
|
capture_output: capture_output,
|
119
117
|
extra_options: extra_options,
|
120
118
|
fail_fast: fail_fast,
|
121
|
-
trace: trace
|
119
|
+
trace: trace
|
120
|
+
})
|
122
121
|
end
|
123
122
|
|
124
123
|
# Construct a valid rsync command
|
@@ -149,7 +148,8 @@ module Pkg::Util::Net
|
|
149
148
|
target_path: nil,
|
150
149
|
target_host: nil,
|
151
150
|
extra_flags: nil,
|
152
|
-
dryrun: false
|
151
|
+
dryrun: false
|
152
|
+
}.merge(opts)
|
153
153
|
origin = Pathname.new(origin_path)
|
154
154
|
target = options[:target_path] || origin.parent
|
155
155
|
|
@@ -187,9 +187,10 @@ module Pkg::Util::Net
|
|
187
187
|
target_path: nil,
|
188
188
|
target_host: nil,
|
189
189
|
extra_flags: nil,
|
190
|
-
dryrun: ENV['DRYRUN']
|
190
|
+
dryrun: ENV['DRYRUN']
|
191
|
+
}.merge(opts.delete_if { |_, value| value.nil? })
|
191
192
|
|
192
|
-
stdout,
|
193
|
+
stdout, = Pkg::Util::Execution.capture3(rsync_cmd(source, options), true)
|
193
194
|
stdout
|
194
195
|
end
|
195
196
|
|
@@ -223,7 +224,7 @@ module Pkg::Util::Net
|
|
223
224
|
s3cmd = Pkg::Util::Tool.check_tool('s3cmd')
|
224
225
|
|
225
226
|
if Pkg::Util::File.file_exists?(File.join(ENV['HOME'], '.s3cfg'))
|
226
|
-
stdout,
|
227
|
+
stdout, = Pkg::Util::Execution.capture3("#{s3cmd} sync #{flags.join(' ')} '#{source}' s3://#{target_bucket}/#{target_directory}/")
|
227
228
|
stdout
|
228
229
|
else
|
229
230
|
fail "#{File.join(ENV['HOME'], '.s3cfg')} does not exist. It is required to ship files using s3cmd."
|
@@ -279,7 +280,7 @@ module Pkg::Util::Net
|
|
279
280
|
'--write-out "%{http_code}"',
|
280
281
|
'--output /dev/null'
|
281
282
|
]
|
282
|
-
stdout,
|
283
|
+
stdout, = Pkg::Util::Net.curl_form_data(uri, data)
|
283
284
|
stdout
|
284
285
|
end
|
285
286
|
|
@@ -292,18 +293,18 @@ module Pkg::Util::Net
|
|
292
293
|
end
|
293
294
|
|
294
295
|
def remote_set_ownership(host, owner, group, files)
|
295
|
-
remote_cmd = "for file in #{files.join(
|
296
|
+
remote_cmd = "for file in #{files.join(' ')}; do if [[ -d $file ]] || ! `lsattr $file | grep -q '\\-i\\-'`; then sudo chown #{owner}:#{group} $file; else echo \"$file is immutable\"; fi; done"
|
296
297
|
Pkg::Util::Net.remote_execute(host, remote_cmd)
|
297
298
|
end
|
298
299
|
|
299
300
|
def remote_set_permissions(host, permissions, files)
|
300
|
-
remote_cmd = "for file in #{files.join(
|
301
|
+
remote_cmd = "for file in #{files.join(' ')}; do if [[ -d $file ]] || ! `lsattr $file | grep -q '\\-i\\-'`; then sudo chmod #{permissions} $file; else echo \"$file is immutable\"; fi; done"
|
301
302
|
Pkg::Util::Net.remote_execute(host, remote_cmd)
|
302
303
|
end
|
303
304
|
|
304
305
|
# Remotely set the immutable bit on a list of files
|
305
306
|
def remote_set_immutable(host, files)
|
306
|
-
Pkg::Util::Net.remote_execute(host, "sudo chattr +i #{files.join(
|
307
|
+
Pkg::Util::Net.remote_execute(host, "sudo chattr +i #{files.join(' ')}")
|
307
308
|
end
|
308
309
|
|
309
310
|
# Create a symlink indicating the latest version of a package
|
@@ -350,8 +351,9 @@ module Pkg::Util::Net
|
|
350
351
|
CMD
|
351
352
|
|
352
353
|
_, err = Pkg::Util::Net.remote_execute(
|
353
|
-
|
354
|
-
|
354
|
+
Pkg::Config.staging_server, cmd, { capture_output: true }
|
355
|
+
)
|
356
|
+
warn err
|
355
357
|
end
|
356
358
|
|
357
359
|
def escape_html(uri)
|
@@ -383,17 +385,19 @@ module Pkg::Util::Net
|
|
383
385
|
Pkg::Util::Net.rsync_to(tarball, host, '/tmp')
|
384
386
|
appendix = Pkg::Util.rand_string
|
385
387
|
git_bundle_directory = File.join('/tmp', "#{Pkg::Config.project}-#{appendix}")
|
386
|
-
command =
|
387
|
-
#{tar} -zxvf /tmp/#{tarball_name}.tar.gz -C /tmp/ ;
|
388
|
-
git clone --recursive /tmp/#{tarball_name} #{git_bundle_directory} ;
|
389
|
-
DOC
|
388
|
+
command = <<~DOC
|
389
|
+
#{tar} -zxvf /tmp/#{tarball_name}.tar.gz -C /tmp/ ;
|
390
|
+
git clone --recursive /tmp/#{tarball_name} #{git_bundle_directory} ;
|
391
|
+
DOC
|
390
392
|
Pkg::Util::Net.remote_execute(host, command)
|
391
393
|
return git_bundle_directory
|
392
394
|
end
|
393
395
|
|
394
396
|
def remote_bundle_install_command
|
397
|
+
rvm_ruby_version = ENV['RVM_RUBY_VERSION'] || '2.7.5'
|
395
398
|
export_packaging_location = "export PACKAGING_LOCATION='#{ENV['PACKAGING_LOCATION']}';" if ENV['PACKAGING_LOCATION'] && !ENV['PACKAGING_LOCATION'].empty?
|
396
|
-
|
399
|
+
export_vanagon_location = "export VANAGON_LOCATION='#{ENV['VANAGON_LOCATION']}';" if ENV['VANAGON_LOCATION'] && !ENV['VANAGON_LOCATION'].empty?
|
400
|
+
"source /usr/local/rvm/scripts/rvm; rvm use ruby-#{rvm_ruby_version}; #{export_packaging_location} #{export_vanagon_location} bundle install --path .bundle/gems ;"
|
397
401
|
end
|
398
402
|
|
399
403
|
# Given a BuildInstance object and a host, send its params to the host. Return
|
data/lib/packaging/util/repo.rb
CHANGED
@@ -2,14 +2,13 @@
|
|
2
2
|
|
3
3
|
module Pkg::Util::Serialization
|
4
4
|
class << self
|
5
|
-
|
6
5
|
# Given the path to a yaml file, load the yaml file into an object and return the object.
|
7
6
|
def load_yaml(file)
|
8
7
|
require 'yaml'
|
9
8
|
file = File.expand_path(file)
|
10
9
|
begin
|
11
10
|
input_data = YAML.load_file(file) || {}
|
12
|
-
rescue => e
|
11
|
+
rescue StandardError => e
|
13
12
|
fail "There was an error loading data from #{file}.\n#{e}"
|
14
13
|
end
|
15
14
|
input_data
|
data/lib/packaging/util/ship.rb
CHANGED
@@ -87,7 +87,7 @@ module Pkg::Util::Ship
|
|
87
87
|
puts "Do you want to ship the above files to (#{staging_server})?"
|
88
88
|
return false unless Pkg::Util.ask_yes_or_no
|
89
89
|
|
90
|
-
extra_flags = %w
|
90
|
+
extra_flags = %w[--ignore-existing --delay-updates]
|
91
91
|
extra_flags << '--dry-run' if ENV['DRYRUN']
|
92
92
|
|
93
93
|
staged_pkgs.each do |pkg|
|
@@ -330,7 +330,7 @@ module Pkg::Util::Ship
|
|
330
330
|
def test_ship(vm, ship_task)
|
331
331
|
command = 'getent group release || groupadd release'
|
332
332
|
Pkg::Util::Net.remote_execute(vm, command)
|
333
|
-
hosts_to_override = %w
|
333
|
+
hosts_to_override = %w[
|
334
334
|
APT_HOST
|
335
335
|
DMG_HOST
|
336
336
|
GEM_HOST
|
@@ -349,7 +349,7 @@ module Pkg::Util::Ship
|
|
349
349
|
TAR_STAGING_SERVER
|
350
350
|
YUM_STAGING_SERVER
|
351
351
|
STAGING_SERVER
|
352
|
-
|
352
|
+
]
|
353
353
|
hosts_to_override.each do |host|
|
354
354
|
ENV[host] = vm
|
355
355
|
end
|