packaging 0.99.2 → 0.99.3

Sign up to get free protection for your applications and to get access to all the features.
@@ -162,4 +162,16 @@ describe 'Pkg::Platforms' do
162
162
  end
163
163
  end
164
164
  end
165
+
166
+ describe '#generic_platform_tag' do
167
+ it 'fails for unsupported platforms' do
168
+ expect { Pkg::Platforms.generic_platform_tag('butts') }.to raise_error
169
+ end
170
+
171
+ it 'returns a supported platform tag containing the supplied platform' do
172
+ Pkg::Platforms.supported_platforms.each do |platform|
173
+ expect(Pkg::Platforms.platform_tags).to include(Pkg::Platforms.generic_platform_tag(platform))
174
+ end
175
+ end
176
+ end
165
177
  end
@@ -94,4 +94,40 @@ describe "#Pkg::Repo" do
94
94
  Pkg::Repo.create_all_repo_archives("project", "version")
95
95
  end
96
96
  end
97
+
98
+ describe "#argument_required?" do
99
+ let(:repo_command) { "some command with __REPO_PATH__ but not repo name or anything" }
100
+ let(:required_arg) { 'repo_path' }
101
+ let(:optional_arg) { 'repo_name' }
102
+
103
+ it 'should return true if command requires arg' do
104
+ expect(Pkg::Repo.argument_required?(required_arg, repo_command)).to be_true
105
+ end
106
+
107
+ it 'should return false if command does not need arg' do
108
+ expect(Pkg::Repo.argument_required?(optional_arg, repo_command)).to be_false
109
+ end
110
+ end
111
+
112
+ describe "#update_repo" do
113
+ let(:remote_host) { 'weth.delivery.puppetlabs.net' }
114
+ let(:repo_command) { "some command with __REPO_NAME__ and __REPO_PATH__ and stuff" }
115
+ let(:repo_name) { 'puppet5' }
116
+ let(:repo_path) { '/opt/repository/apt' }
117
+ let(:apt_releases) { ['stretch', 'trusty', 'xenial'] }
118
+
119
+ before(:each) do
120
+ allow(Pkg::Util::Gpg).to receive(:key)
121
+ allow(Pkg::Config).to receive(:apt_releases).and_return(apt_releases)
122
+ end
123
+
124
+ it 'should fail if required params are nil' do
125
+ expect{ Pkg::Repo.update_repo(remote_host, repo_command, { :repo_path => repo_path }) }.to raise_error(RuntimeError, /Missing required argument 'repo_name'/)
126
+ end
127
+
128
+ it 'should execute command if optional params are nil' do
129
+ expect(Pkg::Util::Net).to receive(:remote_ssh_cmd).with(remote_host, "some command with #{repo_name} and #{repo_path} and stuff")
130
+ Pkg::Repo.update_repo(remote_host, repo_command, { :repo_name => repo_name, :repo_path => repo_path })
131
+ end
132
+ end
97
133
  end
@@ -17,8 +17,8 @@ describe 'Pkg::Retrieve' do
17
17
  'ubuntu-16.04-amd64' => {:artifact => './deb/xenial/PC1/puppet-agent_5.3.2.155.gb25e649-1xenial_amd64.deb'},
18
18
  'windows-2012-x64' => {:artifact => './windows/puppet-agent-x64.msi'},
19
19
  }}
20
- build_url = "builds.delivery.puppetlabs.net/#{project}/#{ref}"
21
- build_path = "/opt/jenkins-builds/#{project}/#{ref}"
20
+ build_url = "builds.delivery.puppetlabs.net/#{project}/#{ref}/#{remote_target}"
21
+ build_path = "/opt/jenkins-builds/#{project}/#{ref}/#{remote_target}"
22
22
 
23
23
  before :each do
24
24
  allow(Pkg::Config).to receive(:project).and_return(project)
@@ -86,13 +86,13 @@ describe 'Pkg::Retrieve' do
86
86
  describe '#retrieve_all' do
87
87
  it 'should try to use wget first' do
88
88
  expect(Pkg::Retrieve).to receive(:default_wget)
89
- Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
89
+ Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
90
90
  end
91
91
 
92
92
  it 'should use rsync if wget is not found' do
93
93
  allow(Pkg::Util::Tool).to receive(:find_tool).with('wget').and_return(nil)
94
94
  expect(Pkg::Util::Net).to receive(:rsync_from)
95
- Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
95
+ Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
96
96
  end
97
97
  end
98
98
  end
@@ -86,6 +86,7 @@ end
86
86
  def build_dmg
87
87
  # Local Variables
88
88
  dmg_format_code = 'UDZO'
89
+ dmg_filesystem = 'HFS+'
89
90
  zlib_level = '9'
90
91
  dmg_format_option = "-imagekey zlib-level=#{zlib_level}"
91
92
  dmg_format = "#{dmg_format_code} #{dmg_format_option}"
@@ -108,6 +109,7 @@ def build_dmg
108
109
  -uid 99 \
109
110
  -gid 99 \
110
111
  -ov \
112
+ -fs #{dmg_filesystem} \
111
113
  -format #{dmg_format} \
112
114
  #{dmg_file}")
113
115
 
@@ -265,9 +265,25 @@ namespace :pl do
265
265
  end
266
266
  end
267
267
 
268
+ task :stage_nightlies => "pl:fetch" do
269
+ tasks = %w(
270
+ jenkins:retrieve
271
+ jenkins:sign_all
272
+ ship_nightly_rpms
273
+ ship_nightly_debs
274
+ ship_nightly_dmg
275
+ ship_nightly_swix
276
+ ship_nightly_msi
277
+ )
278
+ tasks.map { |t| "pl:#{t}" }.each do |t|
279
+ puts "Running #{t} . . ."
280
+ Rake::Task[t].invoke
281
+ end
282
+ end
283
+
268
284
  task :ship_nightlies => "pl:fetch" do
269
- Rake::Task['pl:jenkins:uber_ship_lite'].invoke
270
- Rake::Task['pl:remote:update_foss_repos'].invoke
285
+ Rake::Task['pl:jenkins:stage_nightlies'].invoke
286
+ Rake::Task['pl:remote:update_nightly_repos'].invoke
271
287
  Rake::Task['pl:remote:deploy_nightlies_to_s3'].invoke
272
288
  end
273
289
 
@@ -42,9 +42,9 @@ DOC
42
42
  Pkg::Rpm::Repo.create_local_repos('repos')
43
43
  Pkg::Rpm::Repo.sign_repos('repos')
44
44
  Pkg::Deb::Repo.sign_repos('repos', 'Apt repository for signed builds')
45
- Pkg::OSX.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
46
- Pkg::IPS.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
47
- Pkg::MSI.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
45
+ Pkg::Sign::Dmg.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
46
+ Pkg::Sign::Ips.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
47
+ Pkg::Sign::Msi.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
48
48
  end
49
49
 
50
50
  task :ship_signed_repos, [:target_prefix] => "pl:fetch" do |t, args|
@@ -17,13 +17,14 @@ namespace :pl do
17
17
  remote_target = args.remote_target || "artifacts"
18
18
  local_target = args.local_target || "pkg"
19
19
  mkdir_p local_target
20
- build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
21
- build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
20
+ build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
21
+ build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
22
22
  if Pkg::Config.foss_only
23
23
  Pkg::Retrieve.foss_only_retrieve(build_url, local_target)
24
24
  else
25
- Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
25
+ Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
26
26
  end
27
+ fail "Uh oh, looks like we didn't find anything in #{local_target} when attempting to retrieve from #{build_url}!" if Dir["#{local_target}/*"].empty?
27
28
  puts "Packages staged in #{local_target}"
28
29
  end
29
30
  end
@@ -36,9 +37,9 @@ if Pkg::Config.build_pe
36
37
  task :retrieve, [:remote_target, :local_target] => 'pl:fetch' do |t, args|
37
38
  remote_target = args.remote_target || "artifacts"
38
39
  local_target = args.local_target || "pkg"
39
- build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
40
- build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
41
- Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
40
+ build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
41
+ build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
42
+ Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
42
43
  end
43
44
  end
44
45
  end
@@ -5,27 +5,39 @@ namespace :pl do
5
5
  # to various target yum and apt repositories based on their specific type
6
6
  # e.g., final vs devel vs PE vs FOSS packages
7
7
 
8
- desc "Update remote yum repository on '#{Pkg::Config.yum_staging_server}'"
8
+ desc "Update '#{Pkg::Config.repo_name}' yum repository on '#{Pkg::Config.yum_staging_server}'"
9
9
  task update_yum_repo: 'pl:fetch' do
10
- if Pkg::Util::Version.final?
11
- path = Pkg::Config.yum_repo_path
12
- else
13
- path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
10
+ command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
11
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
12
+ if Pkg::Util.ask_yes_or_no
13
+ Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.repo_name, :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
14
14
  end
15
- yum_whitelist = {
16
- __REPO_NAME__: Pkg::Paths.repo_name,
17
- __REPO_PATH__: path,
18
- __REPO_HOST__: Pkg::Config.yum_staging_server,
19
- __GPG_KEY__: Pkg::Util::Gpg.key
20
- }
15
+ end
21
16
 
17
+ desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
18
+ task update_all_final_yum_repos: 'pl:fetch' do
19
+ command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
22
20
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
23
21
  if Pkg::Util.ask_yes_or_no
24
- if Pkg::Config.yum_repo_command
25
- Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, Pkg::Util::Misc.search_and_replace(Pkg::Config.yum_repo_command, yum_whitelist))
26
- else
27
- Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, 'rake -f /opt/repository/Rakefile mk_repo')
28
- end
22
+ Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
23
+ end
24
+ end
25
+
26
+ desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
27
+ task update_nightlies_yum_repo: 'pl:fetch' do
28
+ command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
29
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
30
+ if Pkg::Util.ask_yes_or_no
31
+ Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
32
+ end
33
+ end
34
+
35
+ desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
36
+ task update_all_nightlies_yum_repos: 'pl:fetch' do
37
+ command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
38
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
39
+ if Pkg::Util.ask_yes_or_no
40
+ Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
29
41
  end
30
42
  end
31
43
 
@@ -33,35 +45,17 @@ namespace :pl do
33
45
 
34
46
  desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
35
47
  task update_apt_repo: 'pl:fetch' do
36
- if Pkg::Util::Version.final?
37
- path = Pkg::Config.apt_repo_path
38
- cmd = Pkg::Config.apt_repo_command
39
- else
40
- path = Pkg::Config.nonfinal_apt_repo_path || Pkg::Config.apt_repo_path
41
- cmd = Pkg::Config.nonfinal_apt_repo_command || Pkg::Config.apt_repo_command
42
- end
43
- apt_whitelist = {
44
- __REPO_NAME__: Pkg::Paths.repo_name,
45
- __REPO_PATH__: path,
46
- __REPO_URL__: Pkg::Config.apt_repo_url,
47
- __REPO_HOST__: Pkg::Config.apt_host,
48
- __APT_PLATFORMS__: Pkg::Config.apt_releases.join(' '),
49
- __GPG_KEY__: Pkg::Util::Gpg.key
50
- }
48
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
49
+ if Pkg::Util.ask_yes_or_no
50
+ Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.apt_repo_command, { :repo_name => Pkg::Config.repo_name, :repo_path => Pkg::Config.apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
51
+ end
52
+ end
51
53
 
54
+ desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
55
+ task update_nightlies_apt_repo: 'pl:fetch' do
52
56
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
53
57
  if Pkg::Util.ask_yes_or_no
54
- if cmd
55
- Pkg::Util::Net.remote_ssh_cmd(
56
- Pkg::Config.apt_signing_server,
57
- Pkg::Util::Misc.search_and_replace(
58
- cmd,
59
- apt_whitelist
60
- )
61
- )
62
- else
63
- warn %(Pkg::Config#apt_repo_command returned something unexpected, so no attempt will be made to update remote repos)
64
- end
58
+ Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.nonfinal_apt_repo_command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
65
59
  end
66
60
  end
67
61
 
@@ -71,6 +65,12 @@ namespace :pl do
71
65
  Rake::Task['pl:remote:update_yum_repo'].invoke
72
66
  end
73
67
 
68
+ desc "Update nightlies apt and yum repos"
69
+ task :update_nightly_repos => "pl:fetch" do
70
+ Rake::Task['pl:remote:update_nightlies_apt_repo'].invoke
71
+ Rake::Task['pl:remote:update_nightlies_yum_repo'].invoke
72
+ end
73
+
74
74
  desc "Update remote ips repository on #{Pkg::Config.ips_host}"
75
75
  task :update_ips_repo => 'pl:fetch' do
76
76
  if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
@@ -251,41 +251,22 @@ namespace :pl do
251
251
 
252
252
  desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
253
253
  task ship_rpms: 'pl:fetch' do
254
- if Pkg::Util::Version.final?
255
- path = Pkg::Config.yum_repo_path
256
- else
257
- path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
258
- end
259
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.rpm', 'pkg/**/*.srpm'], Pkg::Config.yum_staging_server, path)
254
+ Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.yum_repo_path)
255
+ end
260
256
 
261
- # I really don't care which one we grab, it just has to be some supported
262
- # version and architecture from the `el` hash. So here we're just grabbing
263
- # the first one, parsing out some info, and breaking out of the loop. Not
264
- # elegant, I know, but effective.
265
- Pkg::Platforms::PLATFORM_INFO['el'].each do |key, value|
266
- generic_platform_tag = "el-#{key}-#{value[:architectures][0]}"
267
- Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.yum_staging_server, path)
268
- break
269
- end
257
+ desc "Ship nightly rpms to #{Pkg::Config.yum_staging_server}"
258
+ task ship_nightly_rpms: 'pl:fetch' do
259
+ Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.nonfinal_yum_repo_path)
270
260
  end
271
261
 
272
262
  desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
273
263
  task ship_debs: 'pl:fetch' do
274
- if Pkg::Util::Version.final?
275
- staging_path = Pkg::Config.apt_repo_staging_path
276
- else
277
- staging_path = Pkg::Config.nonfinal_apt_repo_staging_path || Pkg::Config.apt_repo_staging_path
278
- end
279
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.debian.tar.gz', 'pkg/**/*.orig.tar.gz', 'pkg/**/*.dsc', 'pkg/**/*.deb', 'pkg/**/*.changes'], Pkg::Config.apt_signing_server, staging_path, chattr: false)
264
+ Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.apt_repo_staging_path, chattr: false)
265
+ end
280
266
 
281
- # We need to iterate through all the supported platforms here because of
282
- # how deb repos are set up. Each codename will have its own link from the
283
- # current versioned repo (i.e., puppet5) to the rolling repo. The one thing
284
- # we don't care about is architecture, so we just grab the first supported
285
- # architecture for the codename we're working with at the moment.
286
- Pkg::Platforms.codenames.each do |codename|
287
- Pkg::Util::Ship.create_rolling_repo_link(Pkg::Platforms.codename_to_tags(codename)[0], Pkg::Config.apt_signing_server, staging_path)
288
- end
267
+ desc "Ship nightly debs to #{Pkg::Config.apt_signing_server}"
268
+ task ship_nightly_debs: 'pl:fetch' do
269
+ Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false)
289
270
  end
290
271
 
291
272
  desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
@@ -353,7 +334,7 @@ namespace :pl do
353
334
  end
354
335
 
355
336
  Pkg::Util::Execution.retry_on_fail(times: 3) do
356
- Pkg::Util::Ship.ship_pkgs(['pkg/*.gem*'], Pkg::Config.gem_host, Pkg::Config.gem_path, platform_independent: true)
337
+ Pkg::Util::Ship.ship_gem('pkg', Pkg::Config.gem_path, platform_independent: true)
357
338
  end
358
339
  end
359
340
 
@@ -361,12 +342,7 @@ namespace :pl do
361
342
  task :ship_svr4 do
362
343
  Pkg::Util::Execution.retry_on_fail(:times => 3) do
363
344
  if File.directory?("pkg/solaris/10")
364
- if Pkg::Util::Version.final?
365
- path = Pkg::Config.svr4_path
366
- else
367
- path = Pkg::Config.nonfinal_svr4_path || Pkg::Config.svr4_path
368
- end
369
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.pkg.gz'], Pkg::Config.svr4_host, path)
345
+ Pkg::Util::Ship.ship_svr4('pkg', Pkg::Config.svr4_path)
370
346
  end
371
347
  end
372
348
  end
@@ -375,12 +351,7 @@ namespace :pl do
375
351
  task :ship_p5p do
376
352
  Pkg::Util::Execution.retry_on_fail(:times => 3) do
377
353
  if File.directory?("pkg/solaris/11")
378
- if Pkg::Util::Version.final?
379
- path = Pkg::Config.p5p_path
380
- else
381
- path = Pkg::Config.nonfinal_p5p_path || Pkg::Config.p5p_path
382
- end
383
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.p5p'], Pkg::Config.p5p_host, path)
354
+ Pkg::Util::Ship.ship_p5p('pkg', Pkg::Config.p5p_path)
384
355
  end
385
356
  end
386
357
  end
@@ -399,28 +370,12 @@ namespace :pl do
399
370
  else
400
371
  path = Pkg::Config.dmg_path
401
372
  end
402
- path = Pkg::Config.nonfinal_dmg_path if Pkg::Config.nonfinal_dmg_path && !Pkg::Util::Version.final?
403
-
404
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.dmg'], Pkg::Config.dmg_staging_server, path)
405
-
406
- # I really don't care which one we grab, it just has to be some supported
407
- # version and architecture from the `osx` hash. So here we're just grabbing
408
- # the first one, parsing out some info, and breaking out of the loop. Not
409
- # elegant, I know, but effective.
410
- Pkg::Platforms::PLATFORM_INFO['osx'].each do |key, value|
411
- generic_platform_tag = "osx-#{key}-#{value[:architectures][0]}"
412
- Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.dmg_staging_server, path)
413
- break
414
- end
415
-
416
- Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
417
- # TODO remove the PC1 links when we no longer need to maintain them
418
- _, version, arch = Pkg::Platforms.parse_platform_tag(platform_tag)
419
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', "/opt/downloads/mac/#{version}/PC1/#{arch}", 'dmg')
373
+ Pkg::Util::Ship.ship_dmg('pkg', path)
374
+ end
420
375
 
421
- # Create the latest symlink for the current supported repo
422
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(platform_tag, path), 'dmg')
423
- end
376
+ desc "ship nightly apple dmgs to #{Pkg::Config.dmg_staging_server}"
377
+ task ship_nightly_dmg: 'pl:fetch' do
378
+ Pkg::Util::Ship.ship_dmg('pkg', Pkg::Config.nonfinal_dmg_path)
424
379
  end
425
380
 
426
381
  desc "ship Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
@@ -437,25 +392,18 @@ namespace :pl do
437
392
  else
438
393
  path = Pkg::Config.swix_path
439
394
  end
440
- path = Pkg::Config.nonfinal_swix_path if Pkg::Config.nonfinal_swix_path && !Pkg::Util::Version.final?
441
-
442
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.swix*'], Pkg::Config.swix_staging_server, path)
395
+ Pkg::Util::Ship.ship_swix('pkg', path)
396
+ end
443
397
 
444
- # I really don't care which one we grab, it just has to be some supported
445
- # version and architecture from the `eos` hash. So here we're just grabbing
446
- # the first one, parsing out some info, and breaking out of the loop. Not
447
- # elegant, I know, but effective.
448
- Pkg::Platforms::PLATFORM_INFO['eos'].each do |key, value|
449
- generic_platform_tag = "eos-#{key}-#{value[:architectures][0]}"
450
- Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.swix_staging_server, path)
451
- break
452
- end
398
+ desc "ship nightly Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
399
+ task ship_nightly_swix: 'pl:fetch' do
400
+ Pkg::Util::Ship.ship_swix('pkg', Pkg::Config.nonfinal_swix_path)
453
401
  end
454
402
 
455
403
  desc "ship tarball and signature to #{Pkg::Config.tar_staging_server}"
456
404
  task ship_tar: 'pl:fetch' do
457
405
  if Pkg::Config.build_tar
458
- Pkg::Util::Ship.ship_pkgs(['pkg/*.tar.gz*'], Pkg::Config.tar_staging_server, Pkg::Config.tarball_path, excludes: ['signing_bundle', 'packaging-bundle'], platform_independent: true)
406
+ Pkg::Util::Ship.ship_tar('pkg', Pkg::Config.tarball_path, excludes: ['signing_bundle', 'packaging-bundle'], platform_independent: true)
459
407
  end
460
408
  end
461
409
 
@@ -483,29 +431,12 @@ namespace :pl do
483
431
  else
484
432
  path = Pkg::Config.msi_path
485
433
  end
486
- path = Pkg::Config.nonfinal_msi_path if Pkg::Config.nonfinal_msi_path && !Pkg::Util::Version.final?
487
-
488
- Pkg::Util::Ship.ship_pkgs(['pkg/**/*.msi'], Pkg::Config.msi_staging_server, path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
489
-
490
- # I really don't care which one we grab, it just has to be some supported
491
- # version and architecture from the `windows` hash. So here we're just grabbing
492
- # the first one, parsing out some info, and breaking out of the loop. Not
493
- # elegant, I know, but effective.
494
- Pkg::Platforms::PLATFORM_INFO['windows'].each do |key, value|
495
- generic_platform_tag = "windows-#{key}-#{value[:architectures][0]}"
496
- Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.msi_staging_server, path)
497
-
498
- # Create the symlinks for the latest supported repo
499
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x64')
500
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x86')
501
- break
502
- end
434
+ Pkg::Util::Ship.ship_msi('pkg', path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
435
+ end
503
436
 
504
- # We provide symlinks to the latest package in a given directory. This
505
- # allows users to upgrade more easily to the latest version that we release
506
- # TODO remove the links to PC1 when we no longer ship to that repo
507
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x64')
508
- Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x86')
437
+ desc "Ship nightly MSI packages to #{Pkg::Config.msi_staging_server}"
438
+ task ship_nightly_msi: 'pl:fetch' do
439
+ Pkg::Util::Ship.ship_msi('pkg', Pkg::Config.nonfinal_msi_path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
509
440
  end
510
441
 
511
442
  desc 'UBER ship: ship all the things in pkg'