packaging 0.101.0 → 0.105.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -15,13 +15,6 @@ Header V4 RSA/SHA256 Signature, key ID ef8d349f: NOKEY
15
15
  Header SHA1 digest: OK (3cb7e9861e8bc09783a1b6c8d88243a3c16daa81)
16
16
  V4 RSA/SHA256 Signature, key ID ef8d349f: NOKEY
17
17
  MD5 digest: OK (d5f06ba2a9053de532326d0659ec0d11)
18
- DOC
19
- }
20
- let(:el5_signed_response) { <<-DOC
21
- Header V3 RSA/SHA1 signature: NOKEY, key ID ef8d349f
22
- Header SHA1 digest: OK (12ea7bd578097a3aecc5deb8ada6aca6147d68e3)
23
- V3 RSA/SHA1 signature: NOKEY, key ID ef8d349f
24
- MD5 digest: OK (27353c6153068a3c9902fcb4ad5b8b92)
25
18
  DOC
26
19
  }
27
20
  let(:sles12_signed_response) { <<-DOC
@@ -40,10 +33,6 @@ DOC
40
33
  allow(Pkg::Sign::Rpm).to receive(:`).and_return(el7_signed_response)
41
34
  expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
42
35
  end
43
- it 'returns true if rpm has been signed (el5)' do
44
- allow(Pkg::Sign::Rpm).to receive(:`).and_return(el5_signed_response)
45
- expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
46
- end
47
36
  it 'returns true if rpm has been signed (sles12)' do
48
37
  allow(Pkg::Sign::Rpm).to receive(:`).and_return(sles12_signed_response)
49
38
  expect(Pkg::Sign::Rpm.has_sig?(rpm)).to be true
@@ -68,7 +57,6 @@ DOC
68
57
  "#{rpm_directory}/aix/7.1/PC1/ppc/puppet-agent-5.5.3-1.aix7.1.ppc.rpm",
69
58
  ] }
70
59
  let(:v3_rpms) { [
71
- "#{rpm_directory}/el/5/PC1/i386/puppet-agent-5.5.3-1.el5.i386.rpm",
72
60
  "#{rpm_directory}/sles/11/PC1/x86_64/puppet-agent-5.5.3-1.sles11.x86_64.rpm",
73
61
  ] }
74
62
  let(:v4_rpms) { [
@@ -51,7 +51,6 @@ describe '#Pkg::Util::Ship' do
51
51
  pkg/sles/12/puppet6/ppc64le/puppet-agent-6.19.0-1.sles12.ppc64le.rpm
52
52
  pkg/sles/12/puppet6/x86_64/puppet-agent-6.19.0-1.sles12.x86_64.rpm
53
53
  pkg/sles/15/puppet6/x86_64/puppet-agent-6.19.0-1.sles15.x86_64.rpm
54
- pkg/apple/10.14/puppet6/x86_64/puppet-agent-6.19.0-1.osx10.14.dmg
55
54
  pkg/apple/10.15/puppet6/x86_64/puppet-agent-6.19.0-1.osx10.15.dmg
56
55
  pkg/fedora/32/puppet6/x86_64/puppet-agent-6.19.0-1.fc32.x86_64.rpm
57
56
  pkg/windows/puppet-agent-6.19.0-x64.msi
@@ -71,7 +70,6 @@ describe '#Pkg::Util::Ship' do
71
70
  pkg/puppet6/sles/12/ppc64le/puppet-agent-6.19.0-1.sles12.ppc64le.rpm
72
71
  pkg/puppet6/sles/12/x86_64/puppet-agent-6.19.0-1.sles12.x86_64.rpm
73
72
  pkg/puppet6/sles/15/x86_64/puppet-agent-6.19.0-1.sles15.x86_64.rpm
74
- pkg/mac/puppet6/10.14/x86_64/puppet-agent-6.19.0-1.osx10.14.dmg
75
73
  pkg/mac/puppet6/10.15/x86_64/puppet-agent-6.19.0-1.osx10.15.dmg
76
74
  pkg/puppet6/fedora/32/x86_64/puppet-agent-6.19.0-1.fc32.x86_64.rpm
77
75
  pkg/windows/puppet6/puppet-agent-6.19.0-x64.msi
data/tasks/deb_repos.rake CHANGED
@@ -10,6 +10,7 @@
10
10
  #
11
11
  namespace :pl do
12
12
  namespace :jenkins do
13
+ # The equivalent to invoking this task is calling Pkg::Util::Repo.deb_repos
13
14
  desc "Create apt repositories of build DEB packages for this SHA on the distributions erver"
14
15
  task :deb_repos => "pl:fetch" do
15
16
  Pkg::Deb::Repo.create_repos
data/tasks/fetch.rake CHANGED
@@ -26,6 +26,7 @@ end
26
26
  # It uses curl to download the files, and places them in a temporary
27
27
  # directory, e.g. /tmp/somedirectory/{project,team}/Pkg::Config.builder_data_file
28
28
  namespace :pl do
29
+ # The equivalent to invoking this task is calling Pkg::Util::File.fetch
29
30
  desc "retrieve build-data configurations to override/extend local build_defaults"
30
31
  task :fetch do
31
32
  # Remove .packaging directory from old-style extras loading
data/tasks/jenkins.rake CHANGED
@@ -286,7 +286,7 @@ namespace :pl do
286
286
  ship_nightly_msi
287
287
  )
288
288
  tasks.map { |t| "pl:#{t}" }.each do |t|
289
- puts "Running #{t} . . ."
289
+ puts "Running #{t}:"
290
290
  Rake::Task[t].invoke
291
291
  end
292
292
  end
@@ -6,6 +6,7 @@
6
6
  # generic tasks with data not generally useful outside the
7
7
  # PL Release team
8
8
  namespace :pl do
9
+ # The equivalent to invoking this task is calling Pkg::Util::File.load_extras(temp_directory)
9
10
  task :load_extras, :tempdir do |t, args|
10
11
  unless ENV['PARAMS_FILE'] && ENV['PARAMS_FILE'] != ''
11
12
  tempdir = args.tempdir
data/tasks/rpm_repos.rake CHANGED
@@ -11,6 +11,7 @@
11
11
  #
12
12
  namespace :pl do
13
13
  namespace :jenkins do
14
+ # The equivalent to invoking this task is calling Pkg::Util::Repo.rpm_repos
14
15
  desc "Create yum repositories of built RPM packages for this SHA on the distribution server"
15
16
  task :rpm_repos => "pl:fetch" do
16
17
  Pkg::Rpm::Repo.create_remote_repos
data/tasks/ship.rake CHANGED
@@ -1,44 +1,77 @@
1
1
  namespace :pl do
2
2
  namespace :remote do
3
- # These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
4
- # The rake task takes packages in a specific directory and freights them
5
- # to various target yum and apt repositories based on their specific type
6
- # e.g., final vs devel vs PE vs FOSS packages
3
+ # Repo updates are the ways we convince the various repos to regenerate any repo-based
4
+ # metadata.
5
+ #
6
+ # It is broken up into various pieces and types to try to avoid too much redundant
7
+ # behavior.
7
8
 
8
9
  desc "Update '#{Pkg::Config.repo_name}' yum repository on '#{Pkg::Config.yum_staging_server}'"
9
10
  task update_yum_repo: 'pl:fetch' do
10
11
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
11
12
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
12
- if Pkg::Util.ask_yes_or_no
13
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Paths.yum_repo_name, :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
14
- end
13
+ next unless Pkg::Util.ask_yes_or_no
14
+
15
+ Pkg::Repo.update_repo(
16
+ Pkg::Config.yum_staging_server,
17
+ command,
18
+ {
19
+ repo_name: Pkg::Paths.yum_repo_name,
20
+ repo_path: Pkg::Config.yum_repo_path,
21
+ repo_host: Pkg::Config.yum_staging_server
22
+ }
23
+ )
15
24
  end
16
25
 
17
26
  desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
18
27
  task update_all_final_yum_repos: 'pl:fetch' do
19
28
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
20
29
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
21
- if Pkg::Util.ask_yes_or_no
22
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
23
- end
30
+ next unless Pkg::Util.ask_yes_or_no
31
+
32
+ Pkg::Repo.update_repo(
33
+ Pkg::Config.yum_staging_server,
34
+ command,
35
+ {
36
+ repo_name: '',
37
+ repo_path: Pkg::Config.yum_repo_path,
38
+ repo_host: Pkg::Config.yum_staging_server
39
+ }
40
+ )
24
41
  end
25
42
 
26
43
  desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
27
44
  task update_nightlies_yum_repo: 'pl:fetch' do
28
45
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
29
46
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
30
- if Pkg::Util.ask_yes_or_no
31
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
32
- end
47
+ next unless Pkg::Util.ask_yes_or_no
48
+
49
+ Pkg::Repo.update_repo(
50
+ Pkg::Config.yum_staging_server,
51
+ command,
52
+ {
53
+ repo_name: Pkg::Config.nonfinal_repo_name,
54
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
55
+ repo_host: Pkg::Config.yum_staging_server
56
+ }
57
+ )
33
58
  end
34
59
 
35
60
  desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
36
61
  task update_all_nightlies_yum_repos: 'pl:fetch' do
37
62
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
38
63
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
39
- if Pkg::Util.ask_yes_or_no
40
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
41
- end
64
+ next unless Pkg::Util.ask_yes_or_no
65
+
66
+ Pkg::Repo.update_repo(
67
+ Pkg::Config.yum_staging_server,
68
+ command,
69
+ {
70
+ repo_name: '',
71
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
72
+ repo_host: Pkg::Config.yum_staging_server
73
+ }
74
+ )
42
75
  end
43
76
 
44
77
  task freight: :update_apt_repo
@@ -46,17 +79,35 @@ namespace :pl do
46
79
  desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
47
80
  task update_apt_repo: 'pl:fetch' do
48
81
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
49
- if Pkg::Util.ask_yes_or_no
50
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.apt_repo_command, { :repo_name => Pkg::Paths.apt_repo_name, :repo_path => Pkg::Config.apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
51
- end
82
+ next unless Pkg::Util.ask_yes_or_no
83
+
84
+ Pkg::Repo.update_repo(
85
+ Pkg::Config.apt_signing_server,
86
+ Pkg::Config.apt_repo_command,
87
+ {
88
+ repo_name: Pkg::Paths.apt_repo_name,
89
+ repo_path: Pkg::Config.apt_repo_path,
90
+ repo_host: Pkg::Config.apt_host,
91
+ repo_url: Pkg::Config.apt_repo_url
92
+ }
93
+ )
52
94
  end
53
95
 
54
96
  desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
55
97
  task update_nightlies_apt_repo: 'pl:fetch' do
56
98
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
57
- if Pkg::Util.ask_yes_or_no
58
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.nonfinal_apt_repo_command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
59
- end
99
+ next unless Pkg::Util.ask_yes_or_no
100
+
101
+ Pkg::Repo.update_repo(
102
+ Pkg::Config.apt_signing_server,
103
+ Pkg::Config.nonfinal_apt_repo_command,
104
+ {
105
+ repo_name: Pkg::Config.nonfinal_repo_name,
106
+ repo_path: Pkg::Config.nonfinal_apt_repo_path,
107
+ repo_host: Pkg::Config.apt_host,
108
+ repo_url: Pkg::Config.apt_repo_url
109
+ }
110
+ )
60
111
  end
61
112
 
62
113
  desc "Update apt and yum repos"
@@ -74,32 +125,31 @@ namespace :pl do
74
125
  desc "Update remote ips repository on #{Pkg::Config.ips_host}"
75
126
  task :update_ips_repo => 'pl:fetch' do
76
127
  if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
77
- $stdout.puts "There aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11. Maybe something went wrong?"
78
- else
128
+ $stdout.puts "Error: there aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11."
129
+ next
130
+ end
79
131
 
80
- if !Dir['pkg/ips/pkgs/**/*'].empty?
81
- source_dir = 'pkg/ips/pkgs/'
82
- else
83
- source_dir = 'pkg/solaris/11/'
84
- end
132
+ source_dir = 'pkg/solaris/11/'
133
+ source_dir = 'pkg/ips/pkgs/' unless Dir['pkg/ips/pkgs/**/*'].empty?
85
134
 
86
- tmpdir, _ = Pkg::Util::Net.remote_execute(
87
- Pkg::Config.ips_host,
88
- 'mktemp -d -p /var/tmp',
89
- { capture_output: true }
90
- )
91
- tmpdir.chomp!
135
+ tmpdir, _ = Pkg::Util::Net.remote_execute(
136
+ Pkg::Config.ips_host,
137
+ 'mktemp -d -p /var/tmp',
138
+ { capture_output: true }
139
+ )
140
+ tmpdir.chomp!
92
141
 
93
- Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
142
+ Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
94
143
 
95
- remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
96
- sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
144
+ remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
145
+ sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
97
146
  done)
98
147
 
99
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
100
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
101
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
102
- end
148
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
149
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
150
+ "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
151
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
152
+ "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
103
153
  end
104
154
 
105
155
  desc "Move dmg repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}"
@@ -235,6 +285,7 @@ namespace :pl do
235
285
 
236
286
  desc "Sync signed apt repos from #{Pkg::Config.apt_signing_server} to Google Cloud Platform"
237
287
  task :sync_apt_repo_to_gcp => 'pl:fetch' do
288
+ ssh = Pkg::Util::Tool.check_tool('ssh')
238
289
  target_site = 'apt.repos.puppetlabs.com'
239
290
  sync_command_puppet_6 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet6"
240
291
  sync_command_puppet_7 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet7"
@@ -243,8 +294,11 @@ namespace :pl do
243
294
  puts
244
295
 
245
296
  Pkg::Util::Execution.retry_on_fail(times: 3) do
246
- Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, sync_command_puppet_6)
247
- Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, sync_command_puppet_7)
297
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_6}"')
298
+ end
299
+
300
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
301
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_7}"')
248
302
  end
249
303
  end
250
304
  # Keep 'deploy' for backward compatibility
@@ -292,6 +346,13 @@ namespace :pl do
292
346
  end
293
347
  end
294
348
 
349
+ ##
350
+ ## Here's where we start 'shipping' (old terminology) or 'staging' (current terminology)
351
+ ## by copying local 'pkg' directories to the staging server.
352
+ ##
353
+ ## Note, that for debs, we conflate 'staging server' with 'signing server' because we
354
+ ## must stage in th place where we sign.
355
+ ##
295
356
  desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
296
357
  task ship_rpms: 'pl:fetch' do
297
358
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.yum_repo_path)
@@ -302,6 +363,7 @@ namespace :pl do
302
363
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.nonfinal_yum_repo_path, nonfinal: true)
303
364
  end
304
365
 
366
+ ## This is the old-style deb shipping
305
367
  desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
306
368
  task ship_debs: 'pl:fetch' do
307
369
  Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.apt_repo_staging_path, chattr: false)
@@ -309,7 +371,20 @@ namespace :pl do
309
371
 
310
372
  desc "Ship nightly debs to #{Pkg::Config.apt_signing_server}"
311
373
  task ship_nightly_debs: 'pl:fetch' do
312
- Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
374
+ Pkg::Util::Ship.ship_debs(
375
+ 'pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
376
+ end
377
+
378
+ ## This is the new-style apt stager
379
+ desc "Stage debs to #{Pkg::Config.apt_signing_server}"
380
+ task stage_stable_debs: 'pl:fetch' do
381
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'stable')
382
+ end
383
+ task stage_debs: :stage_stable_debs
384
+
385
+ desc "Stage nightly debs to #{Pkg::Config.apt_signing_server}"
386
+ task stage_nightly_debs: 'pl:fetch' do
387
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'nightly')
313
388
  end
314
389
 
315
390
  desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
@@ -325,12 +400,13 @@ namespace :pl do
325
400
  puts 'This will ship to an internal gem mirror, a public file server, and rubygems.org'
326
401
  puts "Do you want to start shipping the rubygem '#{gem_file}'?"
327
402
  next unless Pkg::Util.ask_yes_or_no
403
+
328
404
  Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
329
405
  end
330
406
 
331
407
  Rake::Task['pl:ship_gem_to_downloads'].invoke
332
408
  else
333
- $stderr.puts 'Not shipping development gem using odd_even strategy for the sake of your users.'
409
+ warn 'Not shipping development gem using odd_even strategy for the sake of your users.'
334
410
  end
335
411
  end
336
412
  end
@@ -342,6 +418,7 @@ namespace :pl do
342
418
  if Pkg::Config.build_gem
343
419
  fail 'Value `Pkg::Config.gem_host` not defined, skipping nightly ship' unless Pkg::Config.gem_host
344
420
  fail 'Value `Pkg::Config.nonfinal_gem_path` not defined, skipping nightly ship' unless Pkg::Config.nonfinal_gem_path
421
+
345
422
  FileList['pkg/*.gem'].each do |gem_file|
346
423
  Pkg::Gem.ship_to_internal_mirror(gem_file)
347
424
  end
@@ -451,22 +528,25 @@ namespace :pl do
451
528
 
452
529
  desc 'UBER ship: ship all the things in pkg'
453
530
  task uber_ship: 'pl:fetch' do
454
- if Pkg::Util.confirm_ship(FileList['pkg/**/*'])
455
- Rake::Task['pl:ship_rpms'].invoke
456
- Rake::Task['pl:ship_debs'].invoke
457
- Rake::Task['pl:ship_dmg'].invoke
458
- Rake::Task['pl:ship_swix'].invoke
459
- Rake::Task['pl:ship_nuget'].invoke
460
- Rake::Task['pl:ship_tar'].invoke
461
- Rake::Task['pl:ship_svr4'].invoke
462
- Rake::Task['pl:ship_p5p'].invoke
463
- Rake::Task['pl:ship_msi'].invoke
464
- add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?) if Pkg::Config.benchmark
465
- post_shipped_metrics if Pkg::Config.benchmark
466
- else
531
+ unless Pkg::Util.confirm_ship(FileList['pkg/**/*'])
467
532
  puts 'Ship canceled'
468
533
  exit
469
534
  end
535
+
536
+ Rake::Task['pl:ship_rpms'].invoke
537
+ Rake::Task['pl:ship_debs'].invoke
538
+ Rake::Task['pl:ship_dmg'].invoke
539
+ Rake::Task['pl:ship_swix'].invoke
540
+ Rake::Task['pl:ship_nuget'].invoke
541
+ Rake::Task['pl:ship_tar'].invoke
542
+ Rake::Task['pl:ship_svr4'].invoke
543
+ Rake::Task['pl:ship_p5p'].invoke
544
+ Rake::Task['pl:ship_msi'].invoke
545
+
546
+ if Pkg::Config.benchmark
547
+ add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?)
548
+ post_shipped_metrics
549
+ end
470
550
  end
471
551
 
472
552
  desc 'Create the rolling repo links'
@@ -530,7 +610,7 @@ namespace :pl do
530
610
  { extra_options: '-oBatchMode=yes' }
531
611
  )
532
612
  end
533
- rescue
613
+ rescue StandardError
534
614
  errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
535
615
  end
536
616
 
@@ -565,70 +645,62 @@ namespace :pl do
565
645
  # server path. That way we can separate out built artifacts from
566
646
  # signed/actually shipped artifacts e.g. $path/shipped/ or $path/artifacts.
567
647
  namespace :jenkins do
648
+ # The equivalent to invoking this task is calling Pkg::Util::Ship.ship_to_artifactory(local_directory, target)
568
649
  desc 'ship pkg directory contents to artifactory'
569
650
  task :ship_to_artifactory, :local_dir do |_t, args|
570
651
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
571
652
  unless Pkg::Config.project
572
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
653
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
654
+ "in the 'PROJECT_OVERRIDE' environment variable."
573
655
  end
656
+
574
657
  artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
575
658
 
576
659
  local_dir = args.local_dir || 'pkg'
577
- artifacts = Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }
578
- artifacts.sort! do |a, b|
579
- if File.extname(a) =~ /(md5|sha\d+)/ && File.extname(b) !~ /(md5|sha\d+)/
580
- 1
581
- elsif File.extname(b) =~ /(md5|sha\d+)/ && File.extname(a) !~ /(md5|sha\d+)/
582
- -1
583
- else
584
- a <=> b
585
- end
586
- end
587
- artifacts.each do |artifact|
588
- if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
589
- artifactory.deploy_package(artifact)
590
- elsif artifactory.package_exists_on_artifactory?(artifact)
591
- warn "Attempt to upload '#{artifact}' failed. Package already exists!"
592
- else
660
+ Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
661
+ # Always deploy yamls and jsons
662
+ if artifact.end_with?('.yaml', '.json')
593
663
  artifactory.deploy_package(artifact)
664
+ next
594
665
  end
666
+
667
+ # Don't deploy if the package already exists
668
+ if artifactory.package_exists_on_artifactory?(artifact)
669
+ warn "Attempt to upload '#{artifact}' failed. Package already exists."
670
+ next
671
+ end
672
+
673
+ artifactory.deploy_package(artifact)
595
674
  end
596
675
  end
597
676
 
598
- desc 'Ship pkg directory contents to distribution server'
677
+ # The equivalent to invoking this task is calling Pkg::Util::Ship.ship(local_directory, target)
678
+ desc 'Ship "pkg" directory contents to distribution server'
599
679
  task :ship, :target, :local_dir do |_t, args|
600
680
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
601
681
  unless Pkg::Config.project
602
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
682
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
683
+ "in the 'PROJECT_OVERRIDE' environment variable."
603
684
  end
685
+
604
686
  target = args.target || 'artifacts'
605
687
  local_dir = args.local_dir || 'pkg'
606
- project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
607
- artifact_dir = "#{project_basedir}/#{target}"
688
+ project_basedir = File.join(
689
+ Pkg::Config.jenkins_repo_path, Pkg::Config.project, Pkg::Config.ref
690
+ )
691
+ artifact_dir = File.join(project_basedir, target)
608
692
 
609
693
  # For EZBake builds, we also want to include the ezbake.manifest file to
610
694
  # get a snapshot of this build and all dependencies. We eventually will
611
695
  # create a yaml version of this file, but until that point we want to
612
696
  # make the original ezbake.manifest available
613
- #
614
- ezbake_manifest = File.join('ext', 'ezbake.manifest')
615
- if File.exist?(ezbake_manifest)
616
- cp(ezbake_manifest, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest"))
617
- end
618
- ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
619
- if File.exists?(ezbake_yaml)
620
- cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
621
- end
697
+ Pkg::Util::EZbake.add_manifest(local_dir)
622
698
 
623
699
  # Inside build_metadata*.json files there is additional metadata containing
624
700
  # information such as git ref and dependencies that are needed at build
625
701
  # time. If these files exist, copy them downstream.
626
702
  # Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
627
- build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
628
- build_metadata_json_files.each do |source_file|
629
- target_file = File.join(local_dir, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
630
- cp(source_file, target_file)
631
- end
703
+ Pkg::Util::BuildMetadata.add_misc_json_files(local_dir)
632
704
 
633
705
  # Sadly, the packaging repo cannot yet act on its own, without living
634
706
  # inside of a packaging-repo compatible project. This means in order to
@@ -664,54 +736,11 @@ namespace :pl do
664
736
  # and if the source package exists before linking. Searching for the
665
737
  # packages has been restricted specifically to just the pkg/windows dir
666
738
  # on purpose, as this is where we currently have all windows packages
667
- # building to. Once we move the Metadata about the output location in
668
- # to one source of truth we can refactor this to use that to search
669
- # -Sean P. M. 08/12/16
670
-
671
- {
672
- 'windows' => ['x86', 'x64'],
673
- 'windowsfips' => ['x64']
674
- }.each_pair do |platform, archs|
675
- packages = Dir["#{local_dir}/#{platform}/*"]
676
-
677
- archs.each do |arch|
678
- package_version = Pkg::Util::Git.describe.tr('-', '.')
679
- package_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
680
- link_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{arch}.msi")
681
-
682
- next unless !packages.include?(link_filename) && packages.include?(package_filename)
683
- # Dear future code spelunkers:
684
- # Using symlinks instead of hard links causes failures when we try
685
- # to set these files to be immutable. Also be wary of whether the
686
- # linking utility you're using expects the source path to be relative
687
- # to the link target or pwd.
688
- #
689
- FileUtils.ln(package_filename, link_filename)
690
- end
691
- end
692
-
693
- Pkg::Util::Execution.retry_on_fail(times: 3) do
694
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
695
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
696
- Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
697
- end
698
-
699
- # In order to get a snapshot of what this build looked like at the time
700
- # of shipping, we also generate and ship the params file
701
- #
702
- Pkg::Config.config_to_yaml(local_dir)
703
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
704
- Pkg::Util::Net.rsync_to("#{local_dir}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ["--exclude repo_configs"])
705
- end
706
-
707
- # If we just shipped a tagged version, we want to make it immutable
708
- files = Dir.glob("#{local_dir}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
709
- "#{artifact_dir}/#{file.sub(/^#{local_dir}\//, '')}"
710
- end
739
+ # building to.
740
+ Pkg::Util::Windows.add_msi_links(local_dir)
711
741
 
712
- Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
713
- Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
714
- Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
742
+ # Send packages to the distribution server.
743
+ Pkg::Util::DistributionServer.send_packages(local_dir, artifact_dir)
715
744
  end
716
745
 
717
746
  desc 'Ship generated repository configs to the distribution server'
data/tasks/sign.rake CHANGED
@@ -107,8 +107,8 @@ namespace :pl do
107
107
  ##
108
108
  # This crazy piece of work establishes a remote repo on the signing
109
109
  # server, ships our packages out to it, signs them, and brings them back.
110
- #
111
110
  namespace :jenkins do
111
+ # The equivalent to invoking this task is calling Pkg::Util::Sign.sign_all(root_directory)
112
112
  desc "Sign all locally staged packages on #{Pkg::Config.signing_server}"
113
113
  task :sign_all, :root_dir do |_t, args|
114
114
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')