packaging 0.88.77 → 0.99.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/README.md +128 -74
- data/lib/packaging/artifactory.rb +60 -433
- data/lib/packaging/config/params.rb +7 -28
- data/lib/packaging/config.rb +50 -150
- data/lib/packaging/deb/repo.rb +19 -20
- data/lib/packaging/gem.rb +83 -41
- data/lib/packaging/ips.rb +57 -0
- data/lib/packaging/msi.rb +89 -0
- data/lib/packaging/nuget.rb +1 -1
- data/lib/packaging/osx.rb +36 -0
- data/lib/packaging/paths.rb +87 -225
- data/lib/packaging/platforms.rb +416 -443
- data/lib/packaging/repo.rb +22 -122
- data/lib/packaging/retrieve.rb +7 -36
- data/lib/packaging/rpm/repo.rb +8 -5
- data/lib/packaging/tar.rb +0 -9
- data/lib/packaging/util/date.rb +0 -5
- data/lib/packaging/util/execution.rb +2 -2
- data/lib/packaging/util/git.rb +1 -1
- data/lib/packaging/util/gpg.rb +1 -5
- data/lib/packaging/util/net.rb +37 -79
- data/lib/packaging/util/rake_utils.rb +0 -1
- data/lib/packaging/util/ship.rb +13 -142
- data/lib/packaging/util/tool.rb +1 -1
- data/lib/packaging/util/version.rb +0 -8
- data/lib/packaging/util.rb +2 -2
- data/lib/packaging.rb +3 -3
- data/spec/fixtures/config/params.yaml +2 -0
- data/spec/lib/packaging/artifactory_spec.rb +16 -66
- data/spec/lib/packaging/config_spec.rb +29 -49
- data/spec/lib/packaging/deb/repo_spec.rb +7 -16
- data/spec/lib/packaging/paths_spec.rb +56 -321
- data/spec/lib/packaging/platforms_spec.rb +21 -46
- data/spec/lib/packaging/repo_spec.rb +40 -78
- data/spec/lib/packaging/retrieve_spec.rb +8 -47
- data/spec/lib/packaging/rpm/repo_spec.rb +4 -4
- data/spec/lib/packaging/tar_spec.rb +40 -34
- data/spec/lib/packaging/util/git_tag_spec.rb +1 -1
- data/spec/lib/packaging/util/gpg_spec.rb +1 -1
- data/spec/lib/packaging/util/net_spec.rb +15 -35
- data/spec/lib/packaging/util/ship_spec.rb +63 -145
- data/spec/spec_helper.rb +14 -0
- data/tasks/00_utils.rake +6 -4
- data/tasks/apple.rake +0 -2
- data/tasks/config.rake +0 -5
- data/tasks/education.rake +5 -5
- data/tasks/fetch.rake +14 -17
- data/tasks/gem.rake +121 -134
- data/tasks/jenkins.rake +7 -51
- data/tasks/nightly_repos.rake +69 -20
- data/tasks/pe_ship.rake +11 -16
- data/tasks/retrieve.rake +6 -13
- data/tasks/ship.rake +256 -196
- data/tasks/sign.rake +135 -63
- data/tasks/tar.rake +6 -0
- data/templates/packaging.xml.erb +7 -9
- data/templates/repo.xml.erb +3 -6
- metadata +27 -80
- data/lib/packaging/archive.rb +0 -126
- data/lib/packaging/artifactory/extensions.rb +0 -94
- data/lib/packaging/config/validations.rb +0 -13
- data/lib/packaging/metrics.rb +0 -15
- data/lib/packaging/sign/deb.rb +0 -9
- data/lib/packaging/sign/dmg.rb +0 -41
- data/lib/packaging/sign/ips.rb +0 -57
- data/lib/packaging/sign/msi.rb +0 -124
- data/lib/packaging/sign/rpm.rb +0 -115
- data/lib/packaging/sign.rb +0 -8
- data/spec/lib/packaging/gem_spec.rb +0 -86
- data/spec/lib/packaging/sign_spec.rb +0 -133
- data/tasks/archive.rake +0 -69
data/tasks/ship.rake
CHANGED
@@ -5,39 +5,27 @@ namespace :pl do
|
|
5
5
|
# to various target yum and apt repositories based on their specific type
|
6
6
|
# e.g., final vs devel vs PE vs FOSS packages
|
7
7
|
|
8
|
-
desc "Update
|
8
|
+
desc "Update remote yum repository on '#{Pkg::Config.yum_staging_server}'"
|
9
9
|
task update_yum_repo: 'pl:fetch' do
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
end
|
15
|
-
end
|
16
|
-
|
17
|
-
desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
|
18
|
-
task update_all_final_yum_repos: 'pl:fetch' do
|
19
|
-
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
|
20
|
-
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
21
|
-
if Pkg::Util.ask_yes_or_no
|
22
|
-
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
23
|
-
end
|
24
|
-
end
|
25
|
-
|
26
|
-
desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
|
27
|
-
task update_nightlies_yum_repo: 'pl:fetch' do
|
28
|
-
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
|
29
|
-
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
30
|
-
if Pkg::Util.ask_yes_or_no
|
31
|
-
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
10
|
+
if Pkg::Util::Version.final?
|
11
|
+
path = Pkg::Config.yum_repo_path
|
12
|
+
else
|
13
|
+
path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
|
32
14
|
end
|
33
|
-
|
15
|
+
yum_whitelist = {
|
16
|
+
__REPO_NAME__: Pkg::Paths.repo_name,
|
17
|
+
__REPO_PATH__: path,
|
18
|
+
__REPO_HOST__: Pkg::Config.yum_staging_server,
|
19
|
+
__GPG_KEY__: Pkg::Util::Gpg.key
|
20
|
+
}
|
34
21
|
|
35
|
-
desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
|
36
|
-
task update_all_nightlies_yum_repos: 'pl:fetch' do
|
37
|
-
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
|
38
22
|
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
39
23
|
if Pkg::Util.ask_yes_or_no
|
40
|
-
|
24
|
+
if Pkg::Config.yum_repo_command
|
25
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, Pkg::Util::Misc.search_and_replace(Pkg::Config.yum_repo_command, yum_whitelist))
|
26
|
+
else
|
27
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, 'rake -f /opt/repository/Rakefile mk_repo')
|
28
|
+
end
|
41
29
|
end
|
42
30
|
end
|
43
31
|
|
@@ -45,17 +33,35 @@ namespace :pl do
|
|
45
33
|
|
46
34
|
desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
|
47
35
|
task update_apt_repo: 'pl:fetch' do
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
36
|
+
if Pkg::Util::Version.final?
|
37
|
+
path = Pkg::Config.apt_repo_path
|
38
|
+
cmd = Pkg::Config.apt_repo_command
|
39
|
+
else
|
40
|
+
path = Pkg::Config.nonfinal_apt_repo_path || Pkg::Config.apt_repo_path
|
41
|
+
cmd = Pkg::Config.nonfinal_apt_repo_command || Pkg::Config.apt_repo_command
|
42
|
+
end
|
43
|
+
apt_whitelist = {
|
44
|
+
__REPO_NAME__: Pkg::Paths.repo_name,
|
45
|
+
__REPO_PATH__: path,
|
46
|
+
__REPO_URL__: Pkg::Config.apt_repo_url,
|
47
|
+
__REPO_HOST__: Pkg::Config.apt_host,
|
48
|
+
__APT_PLATFORMS__: Pkg::Config.apt_releases.join(' '),
|
49
|
+
__GPG_KEY__: Pkg::Util::Gpg.key
|
50
|
+
}
|
53
51
|
|
54
|
-
desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
|
55
|
-
task update_nightlies_apt_repo: 'pl:fetch' do
|
56
52
|
$stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
|
57
53
|
if Pkg::Util.ask_yes_or_no
|
58
|
-
|
54
|
+
if cmd
|
55
|
+
Pkg::Util::Net.remote_ssh_cmd(
|
56
|
+
Pkg::Config.apt_signing_server,
|
57
|
+
Pkg::Util::Misc.search_and_replace(
|
58
|
+
cmd,
|
59
|
+
apt_whitelist
|
60
|
+
)
|
61
|
+
)
|
62
|
+
else
|
63
|
+
warn %(Pkg::Config#apt_repo_command returned something unexpected, so no attempt will be made to update remote repos)
|
64
|
+
end
|
59
65
|
end
|
60
66
|
end
|
61
67
|
|
@@ -65,12 +71,6 @@ namespace :pl do
|
|
65
71
|
Rake::Task['pl:remote:update_yum_repo'].invoke
|
66
72
|
end
|
67
73
|
|
68
|
-
desc "Update nightlies apt and yum repos"
|
69
|
-
task :update_nightly_repos => "pl:fetch" do
|
70
|
-
Rake::Task['pl:remote:update_nightlies_apt_repo'].invoke
|
71
|
-
Rake::Task['pl:remote:update_nightlies_yum_repo'].invoke
|
72
|
-
end
|
73
|
-
|
74
74
|
desc "Update remote ips repository on #{Pkg::Config.ips_host}"
|
75
75
|
task :update_ips_repo => 'pl:fetch' do
|
76
76
|
if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
|
@@ -83,11 +83,7 @@ namespace :pl do
|
|
83
83
|
source_dir = 'pkg/solaris/11/'
|
84
84
|
end
|
85
85
|
|
86
|
-
tmpdir, _ = Pkg::Util::Net.
|
87
|
-
Pkg::Config.ips_host,
|
88
|
-
'mktemp -d -p /var/tmp',
|
89
|
-
{ capture_output: true }
|
90
|
-
)
|
86
|
+
tmpdir, _ = Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, 'mktemp -d -p /var/tmp', true)
|
91
87
|
tmpdir.chomp!
|
92
88
|
|
93
89
|
Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
|
@@ -96,9 +92,9 @@ namespace :pl do
|
|
96
92
|
sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
|
97
93
|
done)
|
98
94
|
|
99
|
-
Pkg::Util::Net.
|
100
|
-
Pkg::Util::Net.
|
101
|
-
Pkg::Util::Net.
|
95
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, remote_cmd)
|
96
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
|
97
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
|
102
98
|
end
|
103
99
|
end
|
104
100
|
|
@@ -108,7 +104,7 @@ namespace :pl do
|
|
108
104
|
if Pkg::Util.ask_yes_or_no
|
109
105
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
110
106
|
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.dmg_path, target_host: Pkg::Config.dmg_host, extra_flags: ['--update'])
|
111
|
-
Pkg::Util::Net.
|
107
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.dmg_staging_server, cmd)
|
112
108
|
end
|
113
109
|
end
|
114
110
|
end
|
@@ -119,7 +115,7 @@ namespace :pl do
|
|
119
115
|
if Pkg::Util.ask_yes_or_no
|
120
116
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
121
117
|
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.swix_path, target_host: Pkg::Config.swix_host, extra_flags: ['--update'])
|
122
|
-
Pkg::Util::Net.
|
118
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.swix_staging_server, cmd)
|
123
119
|
end
|
124
120
|
end
|
125
121
|
end
|
@@ -134,7 +130,7 @@ namespace :pl do
|
|
134
130
|
else
|
135
131
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
136
132
|
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.tarball_path, target_host: Pkg::Config.tar_host, extra_flags: ['--update'])
|
137
|
-
Pkg::Util::Net.
|
133
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.tar_staging_server, cmd)
|
138
134
|
end
|
139
135
|
end
|
140
136
|
end
|
@@ -150,7 +146,7 @@ namespace :pl do
|
|
150
146
|
else
|
151
147
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
152
148
|
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.msi_path, target_host: Pkg::Config.msi_host, extra_flags: ['--update'])
|
153
|
-
Pkg::Util::Net.
|
149
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.msi_staging_server, cmd)
|
154
150
|
end
|
155
151
|
end
|
156
152
|
end
|
@@ -178,7 +174,7 @@ namespace :pl do
|
|
178
174
|
if Pkg::Util.ask_yes_or_no
|
179
175
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
180
176
|
command = 'sudo /usr/local/bin/s3_repo_sync.sh apt.puppetlabs.com'
|
181
|
-
Pkg::Util::Net.
|
177
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_signing_server, command)
|
182
178
|
end
|
183
179
|
end
|
184
180
|
end
|
@@ -204,7 +200,7 @@ namespace :pl do
|
|
204
200
|
if Pkg::Util.ask_yes_or_no
|
205
201
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
206
202
|
command = 'sudo /usr/local/bin/s3_repo_sync.sh yum.puppetlabs.com'
|
207
|
-
Pkg::Util::Net.
|
203
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, command)
|
208
204
|
end
|
209
205
|
end
|
210
206
|
end
|
@@ -215,7 +211,7 @@ namespace :pl do
|
|
215
211
|
if Pkg::Util.ask_yes_or_no
|
216
212
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
217
213
|
command = 'sudo /usr/local/bin/s3_repo_sync.sh downloads.puppetlabs.com'
|
218
|
-
Pkg::Util::Net.
|
214
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
|
219
215
|
end
|
220
216
|
end
|
221
217
|
end
|
@@ -232,7 +228,7 @@ namespace :pl do
|
|
232
228
|
puts "Deploying nightly builds from #{Pkg::Config.staging_server} to AWS S3..."
|
233
229
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
234
230
|
command = 'sudo /usr/local/bin/s3_repo_sync.sh nightlies.puppet.com'
|
235
|
-
Pkg::Util::Net.
|
231
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
|
236
232
|
end
|
237
233
|
end
|
238
234
|
|
@@ -244,51 +240,52 @@ namespace :pl do
|
|
244
240
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
245
241
|
Pkg::Config.rsync_servers.each do |rsync_server|
|
246
242
|
['apt', 'yum'].each do |repo|
|
247
|
-
|
248
|
-
|
249
|
-
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
243
|
+
command = "sudo su - rsync --command 'rsync --verbose -a --exclude '*.html' --delete /opt/repo-s3-stage/repositories/#{repo}.puppetlabs.com/ rsync@#{rsync_server}:/opt/repository/#{repo}'"
|
244
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
|
250
245
|
end
|
251
246
|
end
|
252
247
|
end
|
253
248
|
end
|
254
249
|
end
|
255
|
-
|
256
|
-
desc "Remotely link nightly shipped gems to latest versions on #{Pkg::Config.gem_host}"
|
257
|
-
task link_nightly_shipped_gems_to_latest: 'pl:fetch' do
|
258
|
-
Pkg::Config.gemversion = Pkg::Util::Version.extended_dot_version
|
259
|
-
|
260
|
-
remote_path = Pkg::Config.nonfinal_gem_path
|
261
|
-
gems = FileList['pkg/*.gem'].map! { |path| path.gsub!('pkg/', '') }
|
262
|
-
command = %(cd #{remote_path}; )
|
263
|
-
|
264
|
-
command += gems.map! do |gem_name|
|
265
|
-
%(sudo ln -sf #{gem_name} #{gem_name.gsub(Pkg::Config.gemversion, 'latest')})
|
266
|
-
end.join(';')
|
267
|
-
|
268
|
-
command += %(; sync)
|
269
|
-
|
270
|
-
Pkg::Util::Net.remote_execute(Pkg::Config.gem_host, command)
|
271
|
-
end
|
272
250
|
end
|
273
251
|
|
274
252
|
desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
|
275
253
|
task ship_rpms: 'pl:fetch' do
|
276
|
-
Pkg::Util::
|
277
|
-
|
254
|
+
if Pkg::Util::Version.final?
|
255
|
+
path = Pkg::Config.yum_repo_path
|
256
|
+
else
|
257
|
+
path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
|
258
|
+
end
|
259
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.rpm', 'pkg/**/*.srpm'], Pkg::Config.yum_staging_server, path)
|
278
260
|
|
279
|
-
|
280
|
-
|
281
|
-
|
261
|
+
# I really don't care which one we grab, it just has to be some supported
|
262
|
+
# version and architecture from the `el` hash. So here we're just grabbing
|
263
|
+
# the first one, parsing out some info, and breaking out of the loop. Not
|
264
|
+
# elegant, I know, but effective.
|
265
|
+
Pkg::Platforms::PLATFORM_INFO['el'].each do |key, value|
|
266
|
+
generic_platform_tag = "el-#{key}-#{value[:architectures][0]}"
|
267
|
+
Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.yum_staging_server, path)
|
268
|
+
break
|
269
|
+
end
|
282
270
|
end
|
283
271
|
|
284
272
|
desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
|
285
273
|
task ship_debs: 'pl:fetch' do
|
286
|
-
Pkg::Util::
|
287
|
-
|
274
|
+
if Pkg::Util::Version.final?
|
275
|
+
staging_path = Pkg::Config.apt_repo_staging_path
|
276
|
+
else
|
277
|
+
staging_path = Pkg::Config.nonfinal_apt_repo_staging_path || Pkg::Config.apt_repo_staging_path
|
278
|
+
end
|
279
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.debian.tar.gz', 'pkg/**/*.orig.tar.gz', 'pkg/**/*.dsc', 'pkg/**/*.deb', 'pkg/**/*.changes'], Pkg::Config.apt_signing_server, staging_path, chattr: false)
|
288
280
|
|
289
|
-
|
290
|
-
|
291
|
-
|
281
|
+
# We need to iterate through all the supported platforms here because of
|
282
|
+
# how deb repos are set up. Each codename will have its own link from the
|
283
|
+
# current versioned repo (i.e., puppet5) to the rolling repo. The one thing
|
284
|
+
# we don't care about is architecture, so we just grab the first supported
|
285
|
+
# architecture for the codename we're working with at the moment.
|
286
|
+
Pkg::Platforms.codenames.each do |codename|
|
287
|
+
Pkg::Util::Ship.create_rolling_repo_link(Pkg::Platforms.codename_to_tags(codename)[0], Pkg::Config.apt_signing_server, staging_path)
|
288
|
+
end
|
292
289
|
end
|
293
290
|
|
294
291
|
desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
|
@@ -305,6 +302,7 @@ namespace :pl do
|
|
305
302
|
puts "Do you want to start shipping the rubygem '#{gem_file}'?"
|
306
303
|
next unless Pkg::Util.ask_yes_or_no
|
307
304
|
Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
|
305
|
+
Rake::Task['pl:ship_gem_to_internal_mirror'].execute(file: gem_file)
|
308
306
|
end
|
309
307
|
|
310
308
|
Rake::Task['pl:ship_gem_to_downloads'].invoke
|
@@ -314,22 +312,6 @@ namespace :pl do
|
|
314
312
|
end
|
315
313
|
end
|
316
314
|
|
317
|
-
desc 'Ship built gem to internal Gem mirror and public nightlies file server'
|
318
|
-
task ship_nightly_gem: 'pl:fetch' do
|
319
|
-
# We want to ship a Gem only for projects that build gems, so
|
320
|
-
# all of the Gem shipping tasks are wrapped in an `if`.
|
321
|
-
if Pkg::Config.build_gem
|
322
|
-
fail 'Value `Pkg::Config.gem_host` not defined, skipping nightly ship' unless Pkg::Config.gem_host
|
323
|
-
fail 'Value `Pkg::Config.nonfinal_gem_path` not defined, skipping nightly ship' unless Pkg::Config.nonfinal_gem_path
|
324
|
-
FileList['pkg/*.gem'].each do |gem_file|
|
325
|
-
Pkg::Gem.ship_to_internal_mirror(gem_file)
|
326
|
-
end
|
327
|
-
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
328
|
-
Pkg::Util::Ship.ship_gem('pkg', Pkg::Config.nonfinal_gem_path, platform_independent: true)
|
329
|
-
end
|
330
|
-
end
|
331
|
-
end
|
332
|
-
|
333
315
|
desc 'Ship built gem to rubygems.org'
|
334
316
|
task :ship_gem_to_rubygems, [:file] => 'pl:fetch' do |_t, args|
|
335
317
|
puts "Do you want to ship #{args[:file]} to rubygems.org?"
|
@@ -341,22 +323,50 @@ namespace :pl do
|
|
341
323
|
end
|
342
324
|
end
|
343
325
|
|
344
|
-
desc "Ship built gems to
|
345
|
-
task :
|
346
|
-
|
326
|
+
desc "Ship built gems to internal Gem server (#{Pkg::Config.internal_gem_host})"
|
327
|
+
task :ship_gem_to_internal_mirror, [:file] => 'pl:fetch' do |_t, args|
|
328
|
+
unless Pkg::Config.internal_gem_host
|
329
|
+
warn 'Value `Pkg::Config.internal_gem_host` not defined; skipping internal ship'
|
330
|
+
end
|
331
|
+
|
332
|
+
puts "Do you want to ship #{args[:file]} to the internal stickler server(#{Pkg::Config.internal_stickler_host})?"
|
333
|
+
if Pkg::Util.ask_yes_or_no
|
334
|
+
puts "Shipping gem #{args[:file]} to internal Gem server (#{Pkg::Config.internal_stickler_host})"
|
347
335
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
348
|
-
Pkg::
|
336
|
+
Pkg::Gem.ship_to_stickler(args[:file])
|
349
337
|
end
|
350
|
-
|
338
|
+
end
|
339
|
+
|
340
|
+
puts "Do you want to ship #{args[:file]} to the internal nexus server(#{Pkg::Config.internal_nexus_host})?"
|
341
|
+
if Pkg::Util.ask_yes_or_no
|
342
|
+
puts "Shipping gem #{args[:file]} to internal Gem server (#{Pkg::Config.internal_nexus_host})"
|
343
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
344
|
+
Pkg::Gem.ship_to_nexus(args[:file])
|
345
|
+
end
|
346
|
+
end
|
347
|
+
end
|
348
|
+
|
349
|
+
desc "Ship built gems to public Downloads server (#{Pkg::Config.gem_host})"
|
350
|
+
task :ship_gem_to_downloads => 'pl:fetch' do
|
351
|
+
unless Pkg::Config.gem_host
|
351
352
|
warn 'Value `Pkg::Config.gem_host` not defined; skipping shipping to public Download server'
|
352
353
|
end
|
354
|
+
|
355
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
356
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/*.gem*'], Pkg::Config.gem_host, Pkg::Config.gem_path, platform_independent: true)
|
357
|
+
end
|
353
358
|
end
|
354
359
|
|
355
360
|
desc "Ship svr4 packages to #{Pkg::Config.svr4_host}"
|
356
361
|
task :ship_svr4 do
|
357
362
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
358
363
|
if File.directory?("pkg/solaris/10")
|
359
|
-
Pkg::Util::
|
364
|
+
if Pkg::Util::Version.final?
|
365
|
+
path = Pkg::Config.svr4_path
|
366
|
+
else
|
367
|
+
path = Pkg::Config.nonfinal_svr4_path || Pkg::Config.svr4_path
|
368
|
+
end
|
369
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.pkg.gz'], Pkg::Config.svr4_host, path)
|
360
370
|
end
|
361
371
|
end
|
362
372
|
end
|
@@ -365,39 +375,87 @@ namespace :pl do
|
|
365
375
|
task :ship_p5p do
|
366
376
|
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
367
377
|
if File.directory?("pkg/solaris/11")
|
368
|
-
Pkg::Util::
|
378
|
+
if Pkg::Util::Version.final?
|
379
|
+
path = Pkg::Config.p5p_path
|
380
|
+
else
|
381
|
+
path = Pkg::Config.nonfinal_p5p_path || Pkg::Config.p5p_path
|
382
|
+
end
|
383
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.p5p'], Pkg::Config.p5p_host, path)
|
369
384
|
end
|
370
385
|
end
|
371
386
|
end
|
372
387
|
|
373
388
|
desc "ship apple dmg to #{Pkg::Config.dmg_staging_server}"
|
374
389
|
task ship_dmg: 'pl:fetch' do
|
375
|
-
|
376
|
-
|
377
|
-
|
390
|
+
# TODO: realistically, this shouldn't be here. This block needs to be
|
391
|
+
# removed, but only when we can successfully modify all instances of
|
392
|
+
# this to be set to '/opt/downloads'. In the meantime, we need to write
|
393
|
+
# this terrible workaround to ensure backward compatibility.
|
394
|
+
#
|
395
|
+
# I'm so sorry
|
396
|
+
# ~MAS 2017-08-14
|
397
|
+
if Pkg::Config.dmg_path == "/opt/downloads/mac"
|
398
|
+
path = "/opt/downloads"
|
399
|
+
else
|
400
|
+
path = Pkg::Config.dmg_path
|
401
|
+
end
|
402
|
+
path = Pkg::Config.nonfinal_dmg_path if Pkg::Config.nonfinal_dmg_path && !Pkg::Util::Version.final?
|
378
403
|
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
404
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.dmg'], Pkg::Config.dmg_staging_server, path)
|
405
|
+
|
406
|
+
# I really don't care which one we grab, it just has to be some supported
|
407
|
+
# version and architecture from the `osx` hash. So here we're just grabbing
|
408
|
+
# the first one, parsing out some info, and breaking out of the loop. Not
|
409
|
+
# elegant, I know, but effective.
|
410
|
+
Pkg::Platforms::PLATFORM_INFO['osx'].each do |key, value|
|
411
|
+
generic_platform_tag = "osx-#{key}-#{value[:architectures][0]}"
|
412
|
+
Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.dmg_staging_server, path)
|
413
|
+
break
|
414
|
+
end
|
415
|
+
|
416
|
+
Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
|
417
|
+
# TODO remove the PC1 links when we no longer need to maintain them
|
418
|
+
_, version, arch = Pkg::Platforms.parse_platform_tag(platform_tag)
|
419
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', "/opt/downloads/mac/#{version}/PC1/#{arch}", 'dmg')
|
420
|
+
|
421
|
+
# Create the latest symlink for the current supported repo
|
422
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(platform_tag, path), 'dmg')
|
423
|
+
end
|
383
424
|
end
|
384
425
|
|
385
426
|
desc "ship Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
|
386
427
|
task ship_swix: 'pl:fetch' do
|
387
|
-
|
388
|
-
|
389
|
-
|
428
|
+
# TODO: realistically, this shouldn't be here. This block needs to be
|
429
|
+
# removed, but only when we can successfully modify all instances of
|
430
|
+
# this to be set to '/opt/downloads'. In the meantime, we need to write
|
431
|
+
# this terrible workaround to ensure backward compatibility.
|
432
|
+
#
|
433
|
+
# I'm so sorry
|
434
|
+
# ~MAS 2017-08-14
|
435
|
+
if Pkg::Config.swix_path == "/opt/downloads/eos"
|
436
|
+
path = "/opt/downloads"
|
437
|
+
else
|
438
|
+
path = Pkg::Config.swix_path
|
439
|
+
end
|
440
|
+
path = Pkg::Config.nonfinal_swix_path if Pkg::Config.nonfinal_swix_path && !Pkg::Util::Version.final?
|
441
|
+
|
442
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.swix*'], Pkg::Config.swix_staging_server, path)
|
390
443
|
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
444
|
+
# I really don't care which one we grab, it just has to be some supported
|
445
|
+
# version and architecture from the `eos` hash. So here we're just grabbing
|
446
|
+
# the first one, parsing out some info, and breaking out of the loop. Not
|
447
|
+
# elegant, I know, but effective.
|
448
|
+
Pkg::Platforms::PLATFORM_INFO['eos'].each do |key, value|
|
449
|
+
generic_platform_tag = "eos-#{key}-#{value[:architectures][0]}"
|
450
|
+
Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.swix_staging_server, path)
|
451
|
+
break
|
452
|
+
end
|
395
453
|
end
|
396
454
|
|
397
455
|
desc "ship tarball and signature to #{Pkg::Config.tar_staging_server}"
|
398
456
|
task ship_tar: 'pl:fetch' do
|
399
457
|
if Pkg::Config.build_tar
|
400
|
-
Pkg::Util::Ship.
|
458
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/*.tar.gz*'], Pkg::Config.tar_staging_server, Pkg::Config.tarball_path, excludes: ['signing_bundle', 'packaging-bundle'], platform_independent: true)
|
401
459
|
end
|
402
460
|
end
|
403
461
|
|
@@ -413,19 +471,41 @@ namespace :pl do
|
|
413
471
|
|
414
472
|
desc "Ship MSI packages to #{Pkg::Config.msi_staging_server}"
|
415
473
|
task ship_msi: 'pl:fetch' do
|
416
|
-
|
417
|
-
|
418
|
-
|
474
|
+
# TODO: realistically, this shouldn't be here. This block needs to be
|
475
|
+
# removed, but only when we can successfully modify all instances of
|
476
|
+
# this to be set to '/opt/downloads'. In the meantime, we need to write
|
477
|
+
# this terrible workaround to ensure backward compatibility.
|
478
|
+
#
|
479
|
+
# I'm so sorry
|
480
|
+
# ~MAS 2017-08-14
|
481
|
+
if Pkg::Config.msi_path == "/opt/downloads/windows"
|
482
|
+
path = "/opt/downloads"
|
483
|
+
else
|
484
|
+
path = Pkg::Config.msi_path
|
485
|
+
end
|
486
|
+
path = Pkg::Config.nonfinal_msi_path if Pkg::Config.nonfinal_msi_path && !Pkg::Util::Version.final?
|
419
487
|
|
420
|
-
|
421
|
-
task ship_nightly_msi: 'pl:fetch' do
|
422
|
-
path = Pkg::Paths.remote_repo_base(package_format: 'msi', nonfinal: true)
|
423
|
-
Pkg::Util::Ship.ship_msi('pkg', path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"], nonfinal: true)
|
424
|
-
end
|
488
|
+
Pkg::Util::Ship.ship_pkgs(['pkg/**/*.msi'], Pkg::Config.msi_staging_server, path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
|
425
489
|
|
426
|
-
|
427
|
-
|
428
|
-
|
490
|
+
# I really don't care which one we grab, it just has to be some supported
|
491
|
+
# version and architecture from the `windows` hash. So here we're just grabbing
|
492
|
+
# the first one, parsing out some info, and breaking out of the loop. Not
|
493
|
+
# elegant, I know, but effective.
|
494
|
+
Pkg::Platforms::PLATFORM_INFO['windows'].each do |key, value|
|
495
|
+
generic_platform_tag = "windows-#{key}-#{value[:architectures][0]}"
|
496
|
+
Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.msi_staging_server, path)
|
497
|
+
|
498
|
+
# Create the symlinks for the latest supported repo
|
499
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x64')
|
500
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x86')
|
501
|
+
break
|
502
|
+
end
|
503
|
+
|
504
|
+
# We provide symlinks to the latest package in a given directory. This
|
505
|
+
# allows users to upgrade more easily to the latest version that we release
|
506
|
+
# TODO remove the links to PC1 when we no longer ship to that repo
|
507
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x64')
|
508
|
+
Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x86')
|
429
509
|
end
|
430
510
|
|
431
511
|
desc 'UBER ship: ship all the things in pkg'
|
@@ -448,16 +528,6 @@ namespace :pl do
|
|
448
528
|
end
|
449
529
|
end
|
450
530
|
|
451
|
-
desc 'Create the rolling repo links'
|
452
|
-
task create_repo_links: 'pl:fetch' do
|
453
|
-
Pkg::Util::Ship.create_rolling_repo_links
|
454
|
-
end
|
455
|
-
|
456
|
-
desc 'Create rolling repo links for nightlies'
|
457
|
-
task create_nightly_repo_links: 'pl:fetch' do
|
458
|
-
Pkg::Util::Ship.create_rolling_repo_links(true)
|
459
|
-
end
|
460
|
-
|
461
531
|
desc 'Test out the ship requirements'
|
462
532
|
task ship_check: 'pl:fetch' do
|
463
533
|
errs = []
|
@@ -503,17 +573,29 @@ namespace :pl do
|
|
503
573
|
# Check for ability to sign OSX. Should just need to be able to unlock keychain
|
504
574
|
begin
|
505
575
|
unless ssh_errs.include?(Pkg::Config.osx_signing_server)
|
506
|
-
Pkg::Util::Net.
|
507
|
-
Pkg::Config.osx_signing_server,
|
508
|
-
%(/usr/bin/security -q unlock-keychain -p "#{Pkg::Config.osx_signing_keychain_pw}" "#{Pkg::Config.osx_signing_keychain}"),
|
509
|
-
{ extra_options: '-oBatchMode=yes' }
|
510
|
-
)
|
576
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.osx_signing_server, %(/usr/bin/security -q unlock-keychain -p "#{Pkg::Config.osx_signing_keychain_pw}" "#{Pkg::Config.osx_signing_keychain}"), false, '-oBatchMode=yes')
|
511
577
|
end
|
512
578
|
rescue
|
513
579
|
errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
|
514
580
|
end
|
515
581
|
|
516
582
|
if Pkg::Config.build_gem
|
583
|
+
# Do we have stickler and nexus?
|
584
|
+
if Pkg::Util::Misc.check_gem('stickler')
|
585
|
+
`stickler list --server #{Pkg::Config.internal_stickler_host} > /dev/null 2>&1`
|
586
|
+
unless $CHILD_STATUS.zero?
|
587
|
+
errs << "Listing gems at the stickler server #{Pkg::Config.internal_stickler_host} failed!"
|
588
|
+
end
|
589
|
+
else
|
590
|
+
errs << 'gem stickler not found'
|
591
|
+
end
|
592
|
+
|
593
|
+
errs << 'gem nexus not found' unless Pkg::Util::Misc.check_gem('nexus')
|
594
|
+
`gem list --source #{Pkg::Config.internal_nexus_host} > /dev/null 2>&1`
|
595
|
+
unless $CHILD_STATUS.zero?
|
596
|
+
errs << "Listing gems at the nexus server #{Pkg::Config.internal_nexus_host} failed!"
|
597
|
+
end
|
598
|
+
|
517
599
|
# Do we have rubygems access set up
|
518
600
|
if Pkg::Util::File.file_exists?("#{ENV['HOME']}/.gem/credentials")
|
519
601
|
# Do we have permissions to publish this gem on rubygems
|
@@ -534,7 +616,6 @@ namespace :pl do
|
|
534
616
|
puts " * #{err}"
|
535
617
|
end
|
536
618
|
end
|
537
|
-
|
538
619
|
end
|
539
620
|
|
540
621
|
# It is odd to namespace this ship task under :jenkins, but this task is
|
@@ -547,29 +628,17 @@ namespace :pl do
|
|
547
628
|
desc 'ship pkg directory contents to artifactory'
|
548
629
|
task :ship_to_artifactory, :local_dir do |_t, args|
|
549
630
|
Pkg::Util::RakeUtils.invoke_task('pl:fetch')
|
550
|
-
unless Pkg::Config.project
|
551
|
-
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
552
|
-
end
|
553
631
|
artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
|
554
632
|
|
555
633
|
local_dir = args.local_dir || 'pkg'
|
556
634
|
Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
|
557
|
-
|
558
|
-
artifactory.deploy_package(artifact)
|
559
|
-
elsif artifactory.package_exists_on_artifactory?(artifact)
|
560
|
-
warn "Attempt to upload '#{artifact}' failed. Package already exists!"
|
561
|
-
else
|
562
|
-
artifactory.deploy_package(artifact)
|
563
|
-
end
|
635
|
+
artifactory.deploy_package(artifact)
|
564
636
|
end
|
565
637
|
end
|
566
638
|
|
567
639
|
desc 'Ship pkg directory contents to distribution server'
|
568
640
|
task :ship, :target, :local_dir do |_t, args|
|
569
641
|
Pkg::Util::RakeUtils.invoke_task('pl:fetch')
|
570
|
-
unless Pkg::Config.project
|
571
|
-
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
572
|
-
end
|
573
642
|
target = args.target || 'artifacts'
|
574
643
|
local_dir = args.local_dir || 'pkg'
|
575
644
|
project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
|
@@ -589,14 +658,12 @@ namespace :pl do
|
|
589
658
|
cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
|
590
659
|
end
|
591
660
|
|
592
|
-
#
|
661
|
+
# We are starting to collect additional metadata which contains
|
593
662
|
# information such as git ref and dependencies that are needed at build
|
594
|
-
# time. If
|
595
|
-
|
596
|
-
|
597
|
-
|
598
|
-
target_file = File.join(local_dir, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
|
599
|
-
cp(source_file, target_file)
|
663
|
+
# time. If this file exists we will make it available for downstream.
|
664
|
+
build_data_json = File.join("ext", "build_metadata.json")
|
665
|
+
if File.exists?(build_data_json)
|
666
|
+
cp(build_data_json, File.join(local_dir, "#{Pkg::Config.ref}.build_metadata.json"))
|
600
667
|
end
|
601
668
|
|
602
669
|
# Sadly, the packaging repo cannot yet act on its own, without living
|
@@ -636,32 +703,25 @@ namespace :pl do
|
|
636
703
|
# building to. Once we move the Metadata about the output location in
|
637
704
|
# to one source of truth we can refactor this to use that to search
|
638
705
|
# -Sean P. M. 08/12/16
|
639
|
-
|
640
|
-
|
641
|
-
|
642
|
-
|
643
|
-
|
644
|
-
|
645
|
-
|
646
|
-
|
647
|
-
|
648
|
-
|
649
|
-
|
650
|
-
|
651
|
-
|
652
|
-
|
653
|
-
# Using symlinks instead of hard links causes failures when we try
|
654
|
-
# to set these files to be immutable. Also be wary of whether the
|
655
|
-
# linking utility you're using expects the source path to be relative
|
656
|
-
# to the link target or pwd.
|
657
|
-
#
|
658
|
-
FileUtils.ln(package_filename, link_filename)
|
659
|
-
end
|
706
|
+
packages = Dir["#{local_dir}/windows/*"]
|
707
|
+
['x86', 'x64'].each do |arch|
|
708
|
+
package_version = Pkg::Util::Git.describe.tr('-', '.')
|
709
|
+
package_filename = File.join(local_dir, 'windows', "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
|
710
|
+
link_filename = File.join(local_dir, 'windows', "#{Pkg::Config.project}-#{arch}.msi")
|
711
|
+
|
712
|
+
next unless !packages.include?(link_filename) && packages.include?(package_filename)
|
713
|
+
# Dear future code spelunkers:
|
714
|
+
# Using symlinks instead of hard links causes failures when we try
|
715
|
+
# to set these files to be immutable. Also be wary of whether the
|
716
|
+
# linking utility you're using expects the source path to be relative
|
717
|
+
# to the link target or pwd.
|
718
|
+
#
|
719
|
+
FileUtils.ln(package_filename, link_filename)
|
660
720
|
end
|
661
721
|
|
662
722
|
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
663
|
-
Pkg::Util::Net.
|
664
|
-
Pkg::Util::Net.
|
723
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
|
724
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
|
665
725
|
Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
|
666
726
|
end
|
667
727
|
|