packaging 0.99.80 → 0.102.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,38 @@
1
+ # Utility methods for handling windows
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::Windows
6
+ class << self
7
+ def add_msi_links(local_source_directory)
8
+ {
9
+ 'windows' => ['x86', 'x64'],
10
+ 'windowsfips' => ['x64']
11
+ }.each_pair do |platform, archs|
12
+ packages = Dir["#{local_source_directory}/#{platform}/*"]
13
+
14
+ archs.each do |arch|
15
+ package_version = Pkg::Util::Git.describe.tr('-', '.')
16
+ package_filename = File.join(
17
+ local_source_directory, platform,
18
+ "#{Pkg::Config.project}-#{package_version}-#{arch}.msi"
19
+ )
20
+ link_filename = File.join(
21
+ local_source_directory,
22
+ platform,
23
+ "#{Pkg::Config.project}-#{arch}.msi"
24
+ )
25
+
26
+ next unless !packages.include?(link_filename) && packages.include?(package_filename)
27
+
28
+ # Dear future code spelunkers:
29
+ # Using symlinks instead of hard links causes failures when we try
30
+ # to set these files to be immutable. Also be wary of whether the
31
+ # linking utility you're using expects the source path to be relative
32
+ # to the link target or pwd.
33
+ FileUtils.ln(package_filename, link_filename)
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -4,8 +4,12 @@ module Pkg::Util
4
4
  require 'benchmark'
5
5
  require 'base64'
6
6
  require 'io/console'
7
+ require 'packaging/util/apt_staging_server'
8
+ require 'packaging/util/build_metadata'
7
9
  require 'packaging/util/date'
10
+ require 'packaging/util/distribution_server'
8
11
  require 'packaging/util/execution'
12
+ require 'packaging/util/ezbake'
9
13
  require 'packaging/util/file'
10
14
  require 'packaging/util/git'
11
15
  require 'packaging/util/gpg'
@@ -19,6 +23,7 @@ module Pkg::Util
19
23
  require 'packaging/util/tool'
20
24
  require 'packaging/util/rake_utils'
21
25
  require 'packaging/util/version'
26
+ require 'packaging/util/windows'
22
27
  require 'packaging/util/git_tags'
23
28
 
24
29
  def self.boolean_value(var)
data/tasks/jenkins.rake CHANGED
@@ -286,7 +286,7 @@ namespace :pl do
286
286
  ship_nightly_msi
287
287
  )
288
288
  tasks.map { |t| "pl:#{t}" }.each do |t|
289
- puts "Running #{t} . . ."
289
+ puts "Running #{t}:"
290
290
  Rake::Task[t].invoke
291
291
  end
292
292
  end
@@ -93,7 +93,7 @@ DOC
93
93
  # The repo configs have Pkg::Config.builds_server used in them, but that
94
94
  # is internal, so we need to replace it with our public server. We also
95
95
  # want them only to see repos, and not signed repos, since the host is
96
- # called nightlies.puppetlabs.com. Here we replace those values in each
96
+ # called nightlies.puppet.com. Here we replace those values in each
97
97
  # config with the desired value.
98
98
  Dir.glob("#{local_target}/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
99
99
  new_contents = File.read(config).gsub(Pkg::Config.builds_server, target_host).gsub(/#{target_prefix}_repos/, "repos")
data/tasks/ship.rake CHANGED
@@ -1,44 +1,77 @@
1
1
  namespace :pl do
2
2
  namespace :remote do
3
- # These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
4
- # The rake task takes packages in a specific directory and freights them
5
- # to various target yum and apt repositories based on their specific type
6
- # e.g., final vs devel vs PE vs FOSS packages
3
+ # Repo updates are the ways we convince the various repos to regenerate any repo-based
4
+ # metadata.
5
+ #
6
+ # It is broken up into various pieces and types to try to avoid too much redundant
7
+ # behavior.
7
8
 
8
9
  desc "Update '#{Pkg::Config.repo_name}' yum repository on '#{Pkg::Config.yum_staging_server}'"
9
10
  task update_yum_repo: 'pl:fetch' do
10
11
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
11
12
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
12
- if Pkg::Util.ask_yes_or_no
13
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Paths.yum_repo_name, :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
14
- end
13
+ next unless Pkg::Util.ask_yes_or_no
14
+
15
+ Pkg::Repo.update_repo(
16
+ Pkg::Config.yum_staging_server,
17
+ command,
18
+ {
19
+ repo_name: Pkg::Paths.yum_repo_name,
20
+ repo_path: Pkg::Config.yum_repo_path,
21
+ repo_host: Pkg::Config.yum_staging_server
22
+ }
23
+ )
15
24
  end
16
25
 
17
26
  desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
18
27
  task update_all_final_yum_repos: 'pl:fetch' do
19
28
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
20
29
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
21
- if Pkg::Util.ask_yes_or_no
22
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
23
- end
30
+ next unless Pkg::Util.ask_yes_or_no
31
+
32
+ Pkg::Repo.update_repo(
33
+ Pkg::Config.yum_staging_server,
34
+ command,
35
+ {
36
+ repo_name: '',
37
+ repo_path: Pkg::Config.yum_repo_path,
38
+ repo_host: Pkg::Config.yum_staging_server
39
+ }
40
+ )
24
41
  end
25
42
 
26
43
  desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
27
44
  task update_nightlies_yum_repo: 'pl:fetch' do
28
45
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
29
46
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
30
- if Pkg::Util.ask_yes_or_no
31
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
32
- end
47
+ next unless Pkg::Util.ask_yes_or_no
48
+
49
+ Pkg::Repo.update_repo(
50
+ Pkg::Config.yum_staging_server,
51
+ command,
52
+ {
53
+ repo_name: Pkg::Config.nonfinal_repo_name,
54
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
55
+ repo_host: Pkg::Config.yum_staging_server
56
+ }
57
+ )
33
58
  end
34
59
 
35
60
  desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
36
61
  task update_all_nightlies_yum_repos: 'pl:fetch' do
37
62
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
38
63
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
39
- if Pkg::Util.ask_yes_or_no
40
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
41
- end
64
+ next unless Pkg::Util.ask_yes_or_no
65
+
66
+ Pkg::Repo.update_repo(
67
+ Pkg::Config.yum_staging_server,
68
+ command,
69
+ {
70
+ repo_name: '',
71
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
72
+ repo_host: Pkg::Config.yum_staging_server
73
+ }
74
+ )
42
75
  end
43
76
 
44
77
  task freight: :update_apt_repo
@@ -46,17 +79,35 @@ namespace :pl do
46
79
  desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
47
80
  task update_apt_repo: 'pl:fetch' do
48
81
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
49
- if Pkg::Util.ask_yes_or_no
50
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.apt_repo_command, { :repo_name => Pkg::Paths.apt_repo_name, :repo_path => Pkg::Config.apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
51
- end
82
+ next unless Pkg::Util.ask_yes_or_no
83
+
84
+ Pkg::Repo.update_repo(
85
+ Pkg::Config.apt_signing_server,
86
+ Pkg::Config.apt_repo_command,
87
+ {
88
+ repo_name: Pkg::Paths.apt_repo_name,
89
+ repo_path: Pkg::Config.apt_repo_path,
90
+ repo_host: Pkg::Config.apt_host,
91
+ repo_url: Pkg::Config.apt_repo_url
92
+ }
93
+ )
52
94
  end
53
95
 
54
96
  desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
55
97
  task update_nightlies_apt_repo: 'pl:fetch' do
56
98
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
57
- if Pkg::Util.ask_yes_or_no
58
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.nonfinal_apt_repo_command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
59
- end
99
+ next unless Pkg::Util.ask_yes_or_no
100
+
101
+ Pkg::Repo.update_repo(
102
+ Pkg::Config.apt_signing_server,
103
+ Pkg::Config.nonfinal_apt_repo_command,
104
+ {
105
+ repo_name: Pkg::Config.nonfinal_repo_name,
106
+ repo_path: Pkg::Config.nonfinal_apt_repo_path,
107
+ repo_host: Pkg::Config.apt_host,
108
+ repo_url: Pkg::Config.apt_repo_url
109
+ }
110
+ )
60
111
  end
61
112
 
62
113
  desc "Update apt and yum repos"
@@ -74,32 +125,31 @@ namespace :pl do
74
125
  desc "Update remote ips repository on #{Pkg::Config.ips_host}"
75
126
  task :update_ips_repo => 'pl:fetch' do
76
127
  if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
77
- $stdout.puts "There aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11. Maybe something went wrong?"
78
- else
128
+ $stdout.puts "Error: there aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11."
129
+ next
130
+ end
79
131
 
80
- if !Dir['pkg/ips/pkgs/**/*'].empty?
81
- source_dir = 'pkg/ips/pkgs/'
82
- else
83
- source_dir = 'pkg/solaris/11/'
84
- end
132
+ source_dir = 'pkg/solaris/11/'
133
+ source_dir = 'pkg/ips/pkgs/' unless Dir['pkg/ips/pkgs/**/*'].empty?
85
134
 
86
- tmpdir, _ = Pkg::Util::Net.remote_execute(
87
- Pkg::Config.ips_host,
88
- 'mktemp -d -p /var/tmp',
89
- { capture_output: true }
90
- )
91
- tmpdir.chomp!
135
+ tmpdir, _ = Pkg::Util::Net.remote_execute(
136
+ Pkg::Config.ips_host,
137
+ 'mktemp -d -p /var/tmp',
138
+ { capture_output: true }
139
+ )
140
+ tmpdir.chomp!
92
141
 
93
- Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
142
+ Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
94
143
 
95
- remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
96
- sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
144
+ remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
145
+ sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
97
146
  done)
98
147
 
99
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
100
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
101
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
102
- end
148
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
149
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
150
+ "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
151
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
152
+ "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
103
153
  end
104
154
 
105
155
  desc "Move dmg repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}"
@@ -172,20 +222,10 @@ namespace :pl do
172
222
  end
173
223
  end
174
224
 
175
- desc "Copy signed deb repos from #{Pkg::Config.apt_signing_server} to AWS S3"
176
- task :deploy_apt_repo_to_s3 => 'pl:fetch' do
177
- puts "Really run S3 sync to deploy Debian repos from #{Pkg::Config.apt_signing_server} to AWS S3? [y,n]"
178
- if Pkg::Util.ask_yes_or_no
179
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
180
- command = 'sudo /usr/local/bin/s3_repo_sync.sh apt.puppetlabs.com'
181
- Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, command)
182
- end
183
- end
184
- end
185
-
186
225
  desc "Copy rpm repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}"
187
226
  task deploy_yum_repo: 'pl:fetch' do
188
- puts "Really run remote rsync to deploy yum repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}? [y,n]"
227
+ puts "Really run remote rsync to deploy yum repos from #{Pkg::Config.yum_staging_server} " \
228
+ "to #{Pkg::Config.yum_host}? [y,n]"
189
229
  if Pkg::Util.ask_yes_or_no
190
230
  Pkg::Util::Execution.retry_on_fail(times: 3) do
191
231
  Pkg::Rpm::Repo.deploy_repos(
@@ -198,27 +238,71 @@ namespace :pl do
198
238
  end
199
239
  end
200
240
 
201
- desc "Copy signed RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3"
241
+ ##
242
+ ## S3 / GCP syncing
243
+ S3_REPO_SYNC = 'sudo /usr/local/bin/s3_repo_sync.sh'
244
+ GCP_REPO_SYNC = '/usr/local/bin/gcp_repo_sync'
245
+
246
+ desc "Sync signed apt repos from #{Pkg::Config.apt_signing_server} to AWS S3"
247
+ task :deploy_apt_repo_to_s3 => 'pl:fetch' do
248
+ sync_command = "#{S3_REPO_SYNC} apt.puppetlabs.com"
249
+ puts "Sync apt repos from #{Pkg::Config.apt_signing_server} to AWS S3? [y,n]"
250
+ next unless Pkg::Util.ask_yes_or_no
251
+
252
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
253
+ Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, sync_command)
254
+ end
255
+ end
256
+
257
+ desc "Sync signed yum repos from #{Pkg::Config.yum_staging_server} to AWS S3"
202
258
  task :deploy_yum_repo_to_s3 => 'pl:fetch' do
203
- puts "Really run S3 sync to deploy RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3? [y,n]"
204
- if Pkg::Util.ask_yes_or_no
205
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
206
- command = 'sudo /usr/local/bin/s3_repo_sync.sh yum.puppetlabs.com'
207
- Pkg::Util::Net.remote_execute(Pkg::Config.yum_staging_server, command)
208
- end
259
+ sync_command = "#{S3_REPO_SYNC} yum.puppetlabs.com"
260
+ puts "Sync yum repos from #{Pkg::Config.yum_staging_server} to AWS S3? [y,n]"
261
+ next unless Pkg::Util.ask_yes_or_no
262
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
263
+ Pkg::Util::Net.remote_execute(Pkg::Config.yum_staging_server, sync_command)
209
264
  end
210
265
  end
211
266
 
212
267
  desc "Sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
213
268
  task :deploy_downloads_to_s3 => 'pl:fetch' do
214
- puts "Really run S3 sync to sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3? [y,n]"
215
- if Pkg::Util.ask_yes_or_no
216
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
217
- command = 'sudo /usr/local/bin/s3_repo_sync.sh downloads.puppetlabs.com'
218
- Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
219
- end
269
+ sync_command = "#{S3_REPO_SYNC} downloads.puppetlabs.com"
270
+ puts "Sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3? [y,n]"
271
+ next unless Pkg::Util.ask_yes_or_no
272
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
273
+ Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, sync_command)
274
+ end
275
+ end
276
+
277
+ desc "Sync nightlies.puppet.com from #{Pkg::Config.staging_server} to AWS S3"
278
+ task :deploy_nightlies_to_s3 => 'pl:fetch' do
279
+ sync_command = "#{S3_REPO_SYNC} nightlies.puppet.com"
280
+ puts "Syncing nightly builds from #{Pkg::Config.staging_server} to AWS S3"
281
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
282
+ Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, sync_command)
283
+ end
284
+ end
285
+
286
+ desc "Sync signed apt repos from #{Pkg::Config.apt_signing_server} to Google Cloud Platform"
287
+ task :sync_apt_repo_to_gcp => 'pl:fetch' do
288
+ ssh = Pkg::Util::Tool.check_tool('ssh')
289
+ target_site = 'apt.repos.puppetlabs.com'
290
+ sync_command_puppet_6 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet6"
291
+ sync_command_puppet_7 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet7"
292
+ print "Sync apt repos from #{Pkg::Config.apt_signing_server} to #{target_site}? [y,n] "
293
+ next unless Pkg::Util.ask_yes_or_no
294
+ puts
295
+
296
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
297
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_6}"')
298
+ end
299
+
300
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
301
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_7}"')
220
302
  end
221
303
  end
304
+ # Keep 'deploy' for backward compatibility
305
+ task :deploy_apt_repo_to_gcp => :sync_apt_repo_to_gcp
222
306
 
223
307
  desc "Sync apt, yum, and downloads.pl.com to AWS S3"
224
308
  task :deploy_final_builds_to_s3 => "pl:fetch" do
@@ -227,15 +311,6 @@ namespace :pl do
227
311
  Rake::Task['pl:remote:deploy_downloads_to_s3'].invoke
228
312
  end
229
313
 
230
- desc "Sync nightlies.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
231
- task :deploy_nightlies_to_s3 => 'pl:fetch' do
232
- puts "Deploying nightly builds from #{Pkg::Config.staging_server} to AWS S3..."
233
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
234
- command = 'sudo /usr/local/bin/s3_repo_sync.sh nightlies.puppet.com'
235
- Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
236
- end
237
- end
238
-
239
314
  desc "Sync yum and apt from #{Pkg::Config.staging_server} to rsync servers"
240
315
  task :deploy_to_rsync_server => 'pl:fetch' do
241
316
  # This task must run after the S3 sync has run, or else /opt/repo-s3-stage won't be up-to-date
@@ -271,6 +346,13 @@ namespace :pl do
271
346
  end
272
347
  end
273
348
 
349
+ ##
350
+ ## Here's where we start 'shipping' (old terminology) or 'staging' (current terminology)
351
+ ## by copying local 'pkg' directories to the staging server.
352
+ ##
353
+ ## Note, that for debs, we conflate 'staging server' with 'signing server' because we
354
+ ## must stage in th place where we sign.
355
+ ##
274
356
  desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
275
357
  task ship_rpms: 'pl:fetch' do
276
358
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.yum_repo_path)
@@ -281,6 +363,7 @@ namespace :pl do
281
363
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.nonfinal_yum_repo_path, nonfinal: true)
282
364
  end
283
365
 
366
+ ## This is the old-style deb shipping
284
367
  desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
285
368
  task ship_debs: 'pl:fetch' do
286
369
  Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.apt_repo_staging_path, chattr: false)
@@ -288,7 +371,20 @@ namespace :pl do
288
371
 
289
372
  desc "Ship nightly debs to #{Pkg::Config.apt_signing_server}"
290
373
  task ship_nightly_debs: 'pl:fetch' do
291
- Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
374
+ Pkg::Util::Ship.ship_debs(
375
+ 'pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
376
+ end
377
+
378
+ ## This is the new-style apt stager
379
+ desc "Stage debs to #{Pkg::Config.apt_signing_server}"
380
+ task stage_stable_debs: 'pl:fetch' do
381
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'stable')
382
+ end
383
+ task stage_debs: :stage_stable_debs
384
+
385
+ desc "Stage nightly debs to #{Pkg::Config.apt_signing_server}"
386
+ task stage_nightly_debs: 'pl:fetch' do
387
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'nightly')
292
388
  end
293
389
 
294
390
  desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
@@ -304,12 +400,13 @@ namespace :pl do
304
400
  puts 'This will ship to an internal gem mirror, a public file server, and rubygems.org'
305
401
  puts "Do you want to start shipping the rubygem '#{gem_file}'?"
306
402
  next unless Pkg::Util.ask_yes_or_no
403
+
307
404
  Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
308
405
  end
309
406
 
310
407
  Rake::Task['pl:ship_gem_to_downloads'].invoke
311
408
  else
312
- $stderr.puts 'Not shipping development gem using odd_even strategy for the sake of your users.'
409
+ warn 'Not shipping development gem using odd_even strategy for the sake of your users.'
313
410
  end
314
411
  end
315
412
  end
@@ -321,6 +418,7 @@ namespace :pl do
321
418
  if Pkg::Config.build_gem
322
419
  fail 'Value `Pkg::Config.gem_host` not defined, skipping nightly ship' unless Pkg::Config.gem_host
323
420
  fail 'Value `Pkg::Config.nonfinal_gem_path` not defined, skipping nightly ship' unless Pkg::Config.nonfinal_gem_path
421
+
324
422
  FileList['pkg/*.gem'].each do |gem_file|
325
423
  Pkg::Gem.ship_to_internal_mirror(gem_file)
326
424
  end
@@ -430,22 +528,25 @@ namespace :pl do
430
528
 
431
529
  desc 'UBER ship: ship all the things in pkg'
432
530
  task uber_ship: 'pl:fetch' do
433
- if Pkg::Util.confirm_ship(FileList['pkg/**/*'])
434
- Rake::Task['pl:ship_rpms'].invoke
435
- Rake::Task['pl:ship_debs'].invoke
436
- Rake::Task['pl:ship_dmg'].invoke
437
- Rake::Task['pl:ship_swix'].invoke
438
- Rake::Task['pl:ship_nuget'].invoke
439
- Rake::Task['pl:ship_tar'].invoke
440
- Rake::Task['pl:ship_svr4'].invoke
441
- Rake::Task['pl:ship_p5p'].invoke
442
- Rake::Task['pl:ship_msi'].invoke
443
- add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?) if Pkg::Config.benchmark
444
- post_shipped_metrics if Pkg::Config.benchmark
445
- else
531
+ unless Pkg::Util.confirm_ship(FileList['pkg/**/*'])
446
532
  puts 'Ship canceled'
447
533
  exit
448
534
  end
535
+
536
+ Rake::Task['pl:ship_rpms'].invoke
537
+ Rake::Task['pl:ship_debs'].invoke
538
+ Rake::Task['pl:ship_dmg'].invoke
539
+ Rake::Task['pl:ship_swix'].invoke
540
+ Rake::Task['pl:ship_nuget'].invoke
541
+ Rake::Task['pl:ship_tar'].invoke
542
+ Rake::Task['pl:ship_svr4'].invoke
543
+ Rake::Task['pl:ship_p5p'].invoke
544
+ Rake::Task['pl:ship_msi'].invoke
545
+
546
+ if Pkg::Config.benchmark
547
+ add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?)
548
+ post_shipped_metrics
549
+ end
449
550
  end
450
551
 
451
552
  desc 'Create the rolling repo links'
@@ -509,7 +610,7 @@ namespace :pl do
509
610
  { extra_options: '-oBatchMode=yes' }
510
611
  )
511
612
  end
512
- rescue
613
+ rescue StandardError
513
614
  errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
514
615
  end
515
616
 
@@ -548,66 +649,56 @@ namespace :pl do
548
649
  task :ship_to_artifactory, :local_dir do |_t, args|
549
650
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
550
651
  unless Pkg::Config.project
551
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
652
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
653
+ "in the 'PROJECT_OVERRIDE' environment variable."
552
654
  end
655
+
553
656
  artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
554
657
 
555
658
  local_dir = args.local_dir || 'pkg'
556
- artifacts = Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }
557
- artifacts.sort! do |a, b|
558
- if File.extname(a) =~ /(md5|sha\d+)/ && File.extname(b) !~ /(md5|sha\d+)/
559
- 1
560
- elsif File.extname(b) =~ /(md5|sha\d+)/ && File.extname(a) !~ /(md5|sha\d+)/
561
- -1
562
- else
563
- a <=> b
564
- end
565
- end
566
- artifacts.each do |artifact|
567
- if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
568
- artifactory.deploy_package(artifact)
569
- elsif artifactory.package_exists_on_artifactory?(artifact)
570
- warn "Attempt to upload '#{artifact}' failed. Package already exists!"
571
- else
659
+ Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
660
+ # Always deploy yamls and jsons
661
+ if artifact.end_with?('.yaml', '.json')
572
662
  artifactory.deploy_package(artifact)
663
+ next
664
+ end
665
+
666
+ # Don't deploy if the package already exists
667
+ if artifactory.package_exists_on_artifactory?(artifact)
668
+ warn "Attempt to upload '#{artifact}' failed. Package already exists."
669
+ next
573
670
  end
671
+
672
+ artifactory.deploy_package(artifact)
574
673
  end
575
674
  end
576
675
 
577
- desc 'Ship pkg directory contents to distribution server'
676
+ desc 'Ship "pkg" directory contents to distribution server'
578
677
  task :ship, :target, :local_dir do |_t, args|
579
678
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
580
679
  unless Pkg::Config.project
581
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
680
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
681
+ "in the 'PROJECT_OVERRIDE' environment variable."
582
682
  end
683
+
583
684
  target = args.target || 'artifacts'
584
685
  local_dir = args.local_dir || 'pkg'
585
- project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
586
- artifact_dir = "#{project_basedir}/#{target}"
686
+ project_basedir = File.join(
687
+ Pkg::Config.jenkins_repo_path, Pkg::Config.project, Pkg::Config.ref
688
+ )
689
+ artifact_dir = File.join(project_basedir, target)
587
690
 
588
691
  # For EZBake builds, we also want to include the ezbake.manifest file to
589
692
  # get a snapshot of this build and all dependencies. We eventually will
590
693
  # create a yaml version of this file, but until that point we want to
591
694
  # make the original ezbake.manifest available
592
- #
593
- ezbake_manifest = File.join('ext', 'ezbake.manifest')
594
- if File.exist?(ezbake_manifest)
595
- cp(ezbake_manifest, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest"))
596
- end
597
- ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
598
- if File.exists?(ezbake_yaml)
599
- cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
600
- end
695
+ Pkg::Util::EZbake.add_manifest(local_dir)
601
696
 
602
697
  # Inside build_metadata*.json files there is additional metadata containing
603
698
  # information such as git ref and dependencies that are needed at build
604
699
  # time. If these files exist, copy them downstream.
605
700
  # Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
606
- build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
607
- build_metadata_json_files.each do |source_file|
608
- target_file = File.join(local_dir, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
609
- cp(source_file, target_file)
610
- end
701
+ Pkg::Util::BuildMetadata.add_misc_json_files(local_dir)
611
702
 
612
703
  # Sadly, the packaging repo cannot yet act on its own, without living
613
704
  # inside of a packaging-repo compatible project. This means in order to
@@ -643,54 +734,11 @@ namespace :pl do
643
734
  # and if the source package exists before linking. Searching for the
644
735
  # packages has been restricted specifically to just the pkg/windows dir
645
736
  # on purpose, as this is where we currently have all windows packages
646
- # building to. Once we move the Metadata about the output location in
647
- # to one source of truth we can refactor this to use that to search
648
- # -Sean P. M. 08/12/16
649
-
650
- {
651
- 'windows' => ['x86', 'x64'],
652
- 'windowsfips' => ['x64']
653
- }.each_pair do |platform, archs|
654
- packages = Dir["#{local_dir}/#{platform}/*"]
655
-
656
- archs.each do |arch|
657
- package_version = Pkg::Util::Git.describe.tr('-', '.')
658
- package_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
659
- link_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{arch}.msi")
660
-
661
- next unless !packages.include?(link_filename) && packages.include?(package_filename)
662
- # Dear future code spelunkers:
663
- # Using symlinks instead of hard links causes failures when we try
664
- # to set these files to be immutable. Also be wary of whether the
665
- # linking utility you're using expects the source path to be relative
666
- # to the link target or pwd.
667
- #
668
- FileUtils.ln(package_filename, link_filename)
669
- end
670
- end
671
-
672
- Pkg::Util::Execution.retry_on_fail(times: 3) do
673
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
674
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
675
- Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
676
- end
677
-
678
- # In order to get a snapshot of what this build looked like at the time
679
- # of shipping, we also generate and ship the params file
680
- #
681
- Pkg::Config.config_to_yaml(local_dir)
682
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
683
- Pkg::Util::Net.rsync_to("#{local_dir}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ["--exclude repo_configs"])
684
- end
685
-
686
- # If we just shipped a tagged version, we want to make it immutable
687
- files = Dir.glob("#{local_dir}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
688
- "#{artifact_dir}/#{file.sub(/^#{local_dir}\//, '')}"
689
- end
737
+ # building to.
738
+ Pkg::Util::Windows.add_msi_links(local_dir)
690
739
 
691
- Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
692
- Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
693
- Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
740
+ # Send packages to the distribution server.
741
+ Pkg::Util::DistributionServer.send_packages(local_dir, artifact_dir)
694
742
  end
695
743
 
696
744
  desc 'Ship generated repository configs to the distribution server'