packaging 0.88.77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +17 -0
- data/README-Solaris.md +117 -0
- data/README.md +977 -0
- data/lib/packaging.rb +32 -0
- data/lib/packaging/archive.rb +126 -0
- data/lib/packaging/artifactory.rb +651 -0
- data/lib/packaging/artifactory/extensions.rb +94 -0
- data/lib/packaging/config.rb +492 -0
- data/lib/packaging/config/params.rb +387 -0
- data/lib/packaging/config/validations.rb +13 -0
- data/lib/packaging/deb.rb +28 -0
- data/lib/packaging/deb/repo.rb +264 -0
- data/lib/packaging/gem.rb +70 -0
- data/lib/packaging/metrics.rb +15 -0
- data/lib/packaging/nuget.rb +39 -0
- data/lib/packaging/paths.rb +376 -0
- data/lib/packaging/platforms.rb +507 -0
- data/lib/packaging/repo.rb +155 -0
- data/lib/packaging/retrieve.rb +75 -0
- data/lib/packaging/rpm.rb +5 -0
- data/lib/packaging/rpm/repo.rb +254 -0
- data/lib/packaging/sign.rb +8 -0
- data/lib/packaging/sign/deb.rb +9 -0
- data/lib/packaging/sign/dmg.rb +41 -0
- data/lib/packaging/sign/ips.rb +57 -0
- data/lib/packaging/sign/msi.rb +124 -0
- data/lib/packaging/sign/rpm.rb +115 -0
- data/lib/packaging/tar.rb +163 -0
- data/lib/packaging/util.rb +146 -0
- data/lib/packaging/util/date.rb +20 -0
- data/lib/packaging/util/execution.rb +85 -0
- data/lib/packaging/util/file.rb +125 -0
- data/lib/packaging/util/git.rb +174 -0
- data/lib/packaging/util/git_tags.rb +73 -0
- data/lib/packaging/util/gpg.rb +66 -0
- data/lib/packaging/util/jenkins.rb +95 -0
- data/lib/packaging/util/misc.rb +69 -0
- data/lib/packaging/util/net.rb +410 -0
- data/lib/packaging/util/os.rb +17 -0
- data/lib/packaging/util/platform.rb +40 -0
- data/lib/packaging/util/rake_utils.rb +112 -0
- data/lib/packaging/util/serialization.rb +19 -0
- data/lib/packaging/util/ship.rb +300 -0
- data/lib/packaging/util/tool.rb +41 -0
- data/lib/packaging/util/version.rb +334 -0
- data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
- data/spec/fixtures/config/ext/project_data.yaml +2 -0
- data/spec/fixtures/configs/components/test_file.json +1 -0
- data/spec/fixtures/configs/components/test_file_2.json +0 -0
- data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
- data/spec/fixtures/util/pre_tasks.yaml +4 -0
- data/spec/lib/packaging/artifactory_spec.rb +221 -0
- data/spec/lib/packaging/config_spec.rb +576 -0
- data/spec/lib/packaging/deb/repo_spec.rb +157 -0
- data/spec/lib/packaging/deb_spec.rb +52 -0
- data/spec/lib/packaging/gem_spec.rb +86 -0
- data/spec/lib/packaging/paths_spec.rb +418 -0
- data/spec/lib/packaging/platforms_spec.rb +178 -0
- data/spec/lib/packaging/repo_spec.rb +135 -0
- data/spec/lib/packaging/retrieve_spec.rb +100 -0
- data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
- data/spec/lib/packaging/sign_spec.rb +133 -0
- data/spec/lib/packaging/tar_spec.rb +116 -0
- data/spec/lib/packaging/util/execution_spec.rb +56 -0
- data/spec/lib/packaging/util/file_spec.rb +139 -0
- data/spec/lib/packaging/util/git_spec.rb +160 -0
- data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
- data/spec/lib/packaging/util/gpg_spec.rb +64 -0
- data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
- data/spec/lib/packaging/util/misc_spec.rb +31 -0
- data/spec/lib/packaging/util/net_spec.rb +259 -0
- data/spec/lib/packaging/util/os_spec.rb +31 -0
- data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
- data/spec/lib/packaging/util/ship_spec.rb +199 -0
- data/spec/lib/packaging/util/version_spec.rb +123 -0
- data/spec/lib/packaging_spec.rb +19 -0
- data/spec/spec_helper.rb +22 -0
- data/static_artifacts/PackageInfo.plist +3 -0
- data/tasks/00_utils.rake +214 -0
- data/tasks/30_metrics.rake +33 -0
- data/tasks/apple.rake +268 -0
- data/tasks/archive.rake +69 -0
- data/tasks/build.rake +12 -0
- data/tasks/clean.rake +5 -0
- data/tasks/config.rake +35 -0
- data/tasks/deb.rake +129 -0
- data/tasks/deb_repos.rake +28 -0
- data/tasks/deprecated.rake +130 -0
- data/tasks/doc.rake +20 -0
- data/tasks/education.rake +57 -0
- data/tasks/fetch.rake +60 -0
- data/tasks/gem.rake +159 -0
- data/tasks/jenkins.rake +538 -0
- data/tasks/jenkins_dynamic.rake +202 -0
- data/tasks/load_extras.rake +21 -0
- data/tasks/mock.rake +348 -0
- data/tasks/nightly_repos.rake +286 -0
- data/tasks/pe_deb.rake +12 -0
- data/tasks/pe_rpm.rake +13 -0
- data/tasks/pe_ship.rake +226 -0
- data/tasks/pe_sign.rake +13 -0
- data/tasks/pe_tar.rake +5 -0
- data/tasks/retrieve.rake +52 -0
- data/tasks/rpm.rake +66 -0
- data/tasks/rpm_repos.rake +29 -0
- data/tasks/ship.rake +692 -0
- data/tasks/sign.rake +154 -0
- data/tasks/tag.rake +8 -0
- data/tasks/tar.rake +28 -0
- data/tasks/update.rake +16 -0
- data/tasks/vanagon.rake +35 -0
- data/tasks/vendor_gems.rake +117 -0
- data/tasks/version.rake +33 -0
- data/tasks/z_data_dump.rake +65 -0
- data/templates/README +1 -0
- data/templates/downstream.xml.erb +47 -0
- data/templates/msi.xml.erb +197 -0
- data/templates/packaging.xml.erb +346 -0
- data/templates/repo.xml.erb +117 -0
- metadata +287 -0
data/tasks/pe_sign.rake
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
if Pkg::Config.build_pe
|
|
2
|
+
namespace :pe do
|
|
3
|
+
desc "Sign all staged in rpms in pkg"
|
|
4
|
+
task :sign_rpms do
|
|
5
|
+
Pkg::Util::RakeUtils.invoke_task("pl:sign_rpms")
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
desc "Sign all debian changes files staged in pkg/pe"
|
|
9
|
+
task :sign_deb_changes do
|
|
10
|
+
Pkg::Util::RakeUtils.invoke_task("pl:sign_deb_changes")
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
end
|
data/tasks/pe_tar.rake
ADDED
data/tasks/retrieve.rake
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
##
|
|
2
|
+
# This task is intended to retrieve packages from the distribution server that
|
|
3
|
+
# have been built by jenkins and placed in a specific location,
|
|
4
|
+
# /opt/jenkins-builds/$PROJECT/$SHA where $PROJECT is the build project as
|
|
5
|
+
# established in build_defaults.yaml and $SHA is the git sha/tag of the project that
|
|
6
|
+
# was built into packages. The current day is assumed, but an environment
|
|
7
|
+
# variable override exists to retrieve packages from another day. The sha/tag is
|
|
8
|
+
# assumed to be the current project's HEAD, e.g. to retrieve packages for a
|
|
9
|
+
# release of 3.1.0, checkout 3.1.0 locally before retrieving.
|
|
10
|
+
#
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
namespace :pl do
|
|
14
|
+
namespace :jenkins do
|
|
15
|
+
desc "Retrieve packages from the distribution server\. Check out commit to retrieve"
|
|
16
|
+
task :retrieve, [:remote_target, :local_target] => 'pl:fetch' do |t, args|
|
|
17
|
+
unless Pkg::Config.project
|
|
18
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
|
19
|
+
end
|
|
20
|
+
remote_target = args.remote_target || "artifacts"
|
|
21
|
+
local_target = args.local_target || "pkg"
|
|
22
|
+
mkdir_p local_target
|
|
23
|
+
build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
|
|
24
|
+
build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
|
|
25
|
+
if Pkg::Config.foss_only
|
|
26
|
+
Pkg::Retrieve.foss_only_retrieve(build_url, local_target)
|
|
27
|
+
else
|
|
28
|
+
Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
|
|
29
|
+
end
|
|
30
|
+
fail "Uh oh, looks like we didn't find anything in #{local_target} when attempting to retrieve from #{build_url}!" if Dir["#{local_target}/*"].empty?
|
|
31
|
+
puts "Packages staged in #{local_target}"
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
if Pkg::Config.build_pe
|
|
37
|
+
namespace :pe do
|
|
38
|
+
namespace :jenkins do
|
|
39
|
+
desc "Retrieve packages from the distribution server\. Check out commit to retrieve"
|
|
40
|
+
task :retrieve, [:remote_target, :local_target] => 'pl:fetch' do |t, args|
|
|
41
|
+
unless Pkg::Config.project
|
|
42
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
|
43
|
+
end
|
|
44
|
+
remote_target = args.remote_target || "artifacts"
|
|
45
|
+
local_target = args.local_target || "pkg"
|
|
46
|
+
build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
|
|
47
|
+
build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{remote_target}"
|
|
48
|
+
Pkg::Retrieve.retrieve_all(build_url, build_path, local_target)
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
end
|
|
52
|
+
end
|
data/tasks/rpm.rake
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
def prep_rpm_build_dir
|
|
2
|
+
temp = Pkg::Util::File.mktemp
|
|
3
|
+
tarball = "#{Pkg::Config.project}-#{Pkg::Config.version}.tar.gz"
|
|
4
|
+
FileUtils.mkdir_p([temp, "#{temp}/SOURCES", "#{temp}/SPECS"])
|
|
5
|
+
FileUtils.cp_r FileList["pkg/#{tarball}*"], "#{temp}/SOURCES", { :preserve => true }
|
|
6
|
+
# If the file ext/redhat/<project>.spec exists in the tarball, we use it. If
|
|
7
|
+
# it doesn't we try to 'erb' the file from a predicted template in source,
|
|
8
|
+
# ext/redhat/<project>.spec.erb. If that doesn't exist, we fail. To do this,
|
|
9
|
+
# we have to open the tarball.
|
|
10
|
+
FileUtils.cp("pkg/#{tarball}", temp, { :preserve => true })
|
|
11
|
+
|
|
12
|
+
# Test for specfile in tarball
|
|
13
|
+
%x(tar -tzf #{File.join(temp, tarball)}).split.grep(/\/ext\/redhat\/#{Pkg::Config.project}.spec$/)
|
|
14
|
+
|
|
15
|
+
if $?.success?
|
|
16
|
+
sh "tar -C #{temp} -xzf #{File.join(temp, tarball)} #{Pkg::Config.project}-#{Pkg::Config.version}/ext/redhat/#{Pkg::Config.project}.spec"
|
|
17
|
+
cp("#{temp}/#{Pkg::Config.project}-#{Pkg::Config.version}/ext/redhat/#{Pkg::Config.project}.spec", "#{temp}/SPECS/")
|
|
18
|
+
elsif File.exists?("ext/redhat/#{Pkg::Config.project}.spec.erb")
|
|
19
|
+
Pkg::Util::File.erb_file("ext/redhat/#{Pkg::Config.project}.spec.erb", "#{temp}/SPECS/#{Pkg::Config.project}.spec", nil, :binding => Pkg::Config.get_binding)
|
|
20
|
+
else
|
|
21
|
+
fail "Could not locate redhat spec ext/redhat/#{Pkg::Config.project}.spec or ext/redhat/#{Pkg::Config.project}.spec.erb"
|
|
22
|
+
end
|
|
23
|
+
temp
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def build_rpm(buildarg = "-bs")
|
|
27
|
+
Pkg::Util::Tool.check_tool('rpmbuild')
|
|
28
|
+
workdir = prep_rpm_build_dir
|
|
29
|
+
rpm_define = "--define \"%_topdir #{workdir}\" "
|
|
30
|
+
rpm_old_version = '--define "_source_filedigest_algorithm 1" --define "_binary_filedigest_algorithm 1" \
|
|
31
|
+
--define "_binary_payload w9.gzdio" --define "_source_payload w9.gzdio" \
|
|
32
|
+
--define "_default_patch_fuzz 2"'
|
|
33
|
+
args = rpm_define + ' ' + rpm_old_version
|
|
34
|
+
FileUtils.mkdir_p('pkg/srpm')
|
|
35
|
+
if buildarg == '-ba'
|
|
36
|
+
FileUtils.mkdir_p('pkg/rpm')
|
|
37
|
+
end
|
|
38
|
+
if Pkg::Config.sign_tar
|
|
39
|
+
Rake::Task["pl:sign_tar"].invoke
|
|
40
|
+
end
|
|
41
|
+
sh "rpmbuild #{args} #{buildarg} --nodeps #{workdir}/SPECS/#{Pkg::Config.project}.spec"
|
|
42
|
+
mv FileList["#{workdir}/SRPMS/*.rpm"], "pkg/srpm"
|
|
43
|
+
if buildarg == '-ba'
|
|
44
|
+
mv FileList["#{workdir}/RPMS/*/*.rpm"], "pkg/rpm"
|
|
45
|
+
end
|
|
46
|
+
rm_rf workdir
|
|
47
|
+
puts
|
|
48
|
+
output = FileList['pkg/*/*.rpm']
|
|
49
|
+
puts "Wrote:"
|
|
50
|
+
output.each do | line |
|
|
51
|
+
puts line
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
namespace :package do
|
|
56
|
+
desc "Create srpm from this git repository (unsigned)"
|
|
57
|
+
task :srpm => :tar do
|
|
58
|
+
build_rpm("-bs")
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
desc "Create .rpm from this git repository (unsigned)"
|
|
62
|
+
task :rpm => :tar do
|
|
63
|
+
build_rpm("-ba")
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
##
|
|
2
|
+
#
|
|
3
|
+
# A set of functionality for creating yum rpm repositories throughout the
|
|
4
|
+
# standard pkg/ directory layout that the packaging repo creates. The standard
|
|
5
|
+
# layout is:
|
|
6
|
+
# pkg/{el,fedora}/{5,6,f16,f17,f18}/{products,devel,dependencies,extras}/{i386,x86_64,SRPMS}
|
|
7
|
+
#
|
|
8
|
+
# Because we'll likely be creating the repos on a server that is remote, e.g.
|
|
9
|
+
# the distribution server, the logic here assumes we'll be doing everything via
|
|
10
|
+
# ssh commands.
|
|
11
|
+
#
|
|
12
|
+
namespace :pl do
|
|
13
|
+
namespace :jenkins do
|
|
14
|
+
desc "Create yum repositories of built RPM packages for this SHA on the distribution server"
|
|
15
|
+
task :rpm_repos => "pl:fetch" do
|
|
16
|
+
Pkg::Rpm::Repo.create_remote_repos
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
desc "Create yum repository configs for package repos for this sha/tag on the distribution server"
|
|
20
|
+
task :generate_rpm_repo_configs => "pl:fetch" do
|
|
21
|
+
Pkg::Rpm::Repo.generate_repo_configs
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
desc "Retrieve rpm yum repository configs from distribution server"
|
|
25
|
+
task :rpm_repo_configs => "pl:fetch" do
|
|
26
|
+
Pkg::Rpm::Repo.retrieve_repo_configs
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
data/tasks/ship.rake
ADDED
|
@@ -0,0 +1,692 @@
|
|
|
1
|
+
namespace :pl do
|
|
2
|
+
namespace :remote do
|
|
3
|
+
# These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
|
|
4
|
+
# The rake task takes packages in a specific directory and freights them
|
|
5
|
+
# to various target yum and apt repositories based on their specific type
|
|
6
|
+
# e.g., final vs devel vs PE vs FOSS packages
|
|
7
|
+
|
|
8
|
+
desc "Update '#{Pkg::Config.repo_name}' yum repository on '#{Pkg::Config.yum_staging_server}'"
|
|
9
|
+
task update_yum_repo: 'pl:fetch' do
|
|
10
|
+
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
|
|
11
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
|
12
|
+
if Pkg::Util.ask_yes_or_no
|
|
13
|
+
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Paths.yum_repo_name, :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
|
|
18
|
+
task update_all_final_yum_repos: 'pl:fetch' do
|
|
19
|
+
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
|
|
20
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
|
21
|
+
if Pkg::Util.ask_yes_or_no
|
|
22
|
+
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
|
23
|
+
end
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
|
|
27
|
+
task update_nightlies_yum_repo: 'pl:fetch' do
|
|
28
|
+
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
|
|
29
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
|
30
|
+
if Pkg::Util.ask_yes_or_no
|
|
31
|
+
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
|
|
36
|
+
task update_all_nightlies_yum_repos: 'pl:fetch' do
|
|
37
|
+
command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
|
|
38
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
|
|
39
|
+
if Pkg::Util.ask_yes_or_no
|
|
40
|
+
Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
task freight: :update_apt_repo
|
|
45
|
+
|
|
46
|
+
desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
|
|
47
|
+
task update_apt_repo: 'pl:fetch' do
|
|
48
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
|
|
49
|
+
if Pkg::Util.ask_yes_or_no
|
|
50
|
+
Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.apt_repo_command, { :repo_name => Pkg::Paths.apt_repo_name, :repo_path => Pkg::Config.apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
|
|
55
|
+
task update_nightlies_apt_repo: 'pl:fetch' do
|
|
56
|
+
$stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
|
|
57
|
+
if Pkg::Util.ask_yes_or_no
|
|
58
|
+
Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.nonfinal_apt_repo_command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
|
|
62
|
+
desc "Update apt and yum repos"
|
|
63
|
+
task :update_foss_repos => "pl:fetch" do
|
|
64
|
+
Rake::Task['pl:remote:update_apt_repo'].invoke
|
|
65
|
+
Rake::Task['pl:remote:update_yum_repo'].invoke
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
desc "Update nightlies apt and yum repos"
|
|
69
|
+
task :update_nightly_repos => "pl:fetch" do
|
|
70
|
+
Rake::Task['pl:remote:update_nightlies_apt_repo'].invoke
|
|
71
|
+
Rake::Task['pl:remote:update_nightlies_yum_repo'].invoke
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
desc "Update remote ips repository on #{Pkg::Config.ips_host}"
|
|
75
|
+
task :update_ips_repo => 'pl:fetch' do
|
|
76
|
+
if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
|
|
77
|
+
$stdout.puts "There aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11. Maybe something went wrong?"
|
|
78
|
+
else
|
|
79
|
+
|
|
80
|
+
if !Dir['pkg/ips/pkgs/**/*'].empty?
|
|
81
|
+
source_dir = 'pkg/ips/pkgs/'
|
|
82
|
+
else
|
|
83
|
+
source_dir = 'pkg/solaris/11/'
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
tmpdir, _ = Pkg::Util::Net.remote_execute(
|
|
87
|
+
Pkg::Config.ips_host,
|
|
88
|
+
'mktemp -d -p /var/tmp',
|
|
89
|
+
{ capture_output: true }
|
|
90
|
+
)
|
|
91
|
+
tmpdir.chomp!
|
|
92
|
+
|
|
93
|
+
Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
|
|
94
|
+
|
|
95
|
+
remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
|
|
96
|
+
sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
|
|
97
|
+
done)
|
|
98
|
+
|
|
99
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
|
|
100
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
|
|
101
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
desc "Move dmg repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}"
|
|
106
|
+
task deploy_dmg_repo: 'pl:fetch' do
|
|
107
|
+
puts "Really run remote rsync to deploy OS X repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}? [y,n]"
|
|
108
|
+
if Pkg::Util.ask_yes_or_no
|
|
109
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
110
|
+
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.dmg_path, target_host: Pkg::Config.dmg_host, extra_flags: ['--update'])
|
|
111
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.dmg_staging_server, cmd)
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
desc "Move swix repos from #{Pkg::Config.swix_staging_server} to #{Pkg::Config.swix_host}"
|
|
117
|
+
task deploy_swix_repo: 'pl:fetch' do
|
|
118
|
+
puts "Really run remote rsync to deploy Arista repos from #{Pkg::Config.swix_staging_server} to #{Pkg::Config.swix_host}? [y,n]"
|
|
119
|
+
if Pkg::Util.ask_yes_or_no
|
|
120
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
121
|
+
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.swix_path, target_host: Pkg::Config.swix_host, extra_flags: ['--update'])
|
|
122
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.swix_staging_server, cmd)
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
desc "Move tar repos from #{Pkg::Config.tar_staging_server} to #{Pkg::Config.tar_host}"
|
|
128
|
+
task deploy_tar_repo: 'pl:fetch' do
|
|
129
|
+
puts "Really run remote rsync to deploy source tarballs from #{Pkg::Config.tar_staging_server} to #{Pkg::Config.tar_host}? [y,n]"
|
|
130
|
+
if Pkg::Util.ask_yes_or_no
|
|
131
|
+
files = Dir.glob("pkg/#{Pkg::Config.project}-#{Pkg::Config.version}.tar.gz*")
|
|
132
|
+
if files.empty?
|
|
133
|
+
puts 'There are no tarballs to ship'
|
|
134
|
+
else
|
|
135
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
136
|
+
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.tarball_path, target_host: Pkg::Config.tar_host, extra_flags: ['--update'])
|
|
137
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.tar_staging_server, cmd)
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
desc "Move MSI repos from #{Pkg::Config.msi_staging_server} to #{Pkg::Config.msi_host}"
|
|
144
|
+
task deploy_msi_repo: 'pl:fetch' do
|
|
145
|
+
puts "Really run remote rsync to deploy source MSIs from #{Pkg::Config.msi_staging_server} to #{Pkg::Config.msi_host}? [y,n]"
|
|
146
|
+
if Pkg::Util.ask_yes_or_no
|
|
147
|
+
files = Dir.glob('pkg/windows/**/*.msi')
|
|
148
|
+
if files.empty?
|
|
149
|
+
puts 'There are no MSIs to ship'
|
|
150
|
+
else
|
|
151
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
152
|
+
cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.msi_path, target_host: Pkg::Config.msi_host, extra_flags: ['--update'])
|
|
153
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.msi_staging_server, cmd)
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
desc "Move signed deb repos from #{Pkg::Config.apt_signing_server} to #{Pkg::Config.apt_host}"
|
|
160
|
+
task deploy_apt_repo: 'pl:fetch' do
|
|
161
|
+
puts "Really run remote rsync to deploy Debian repos from #{Pkg::Config.apt_signing_server} to #{Pkg::Config.apt_host}? [y,n]"
|
|
162
|
+
if Pkg::Util.ask_yes_or_no
|
|
163
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
164
|
+
Pkg::Deb::Repo.deploy_repos(
|
|
165
|
+
Pkg::Config.apt_repo_path,
|
|
166
|
+
Pkg::Config.apt_repo_staging_path,
|
|
167
|
+
Pkg::Config.apt_signing_server,
|
|
168
|
+
Pkg::Config.apt_host,
|
|
169
|
+
ENV['DRYRUN']
|
|
170
|
+
)
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
desc "Copy signed deb repos from #{Pkg::Config.apt_signing_server} to AWS S3"
|
|
176
|
+
task :deploy_apt_repo_to_s3 => 'pl:fetch' do
|
|
177
|
+
puts "Really run S3 sync to deploy Debian repos from #{Pkg::Config.apt_signing_server} to AWS S3? [y,n]"
|
|
178
|
+
if Pkg::Util.ask_yes_or_no
|
|
179
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
180
|
+
command = 'sudo /usr/local/bin/s3_repo_sync.sh apt.puppetlabs.com'
|
|
181
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, command)
|
|
182
|
+
end
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
desc "Copy rpm repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}"
|
|
187
|
+
task deploy_yum_repo: 'pl:fetch' do
|
|
188
|
+
puts "Really run remote rsync to deploy yum repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}? [y,n]"
|
|
189
|
+
if Pkg::Util.ask_yes_or_no
|
|
190
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
191
|
+
Pkg::Rpm::Repo.deploy_repos(
|
|
192
|
+
Pkg::Config.yum_repo_path,
|
|
193
|
+
Pkg::Config.yum_staging_server,
|
|
194
|
+
Pkg::Config.yum_host,
|
|
195
|
+
ENV['DRYRUN']
|
|
196
|
+
)
|
|
197
|
+
end
|
|
198
|
+
end
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
desc "Copy signed RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3"
|
|
202
|
+
task :deploy_yum_repo_to_s3 => 'pl:fetch' do
|
|
203
|
+
puts "Really run S3 sync to deploy RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3? [y,n]"
|
|
204
|
+
if Pkg::Util.ask_yes_or_no
|
|
205
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
206
|
+
command = 'sudo /usr/local/bin/s3_repo_sync.sh yum.puppetlabs.com'
|
|
207
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.yum_staging_server, command)
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
desc "Sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
|
|
213
|
+
task :deploy_downloads_to_s3 => 'pl:fetch' do
|
|
214
|
+
puts "Really run S3 sync to sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3? [y,n]"
|
|
215
|
+
if Pkg::Util.ask_yes_or_no
|
|
216
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
217
|
+
command = 'sudo /usr/local/bin/s3_repo_sync.sh downloads.puppetlabs.com'
|
|
218
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
219
|
+
end
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
desc "Sync apt, yum, and downloads.pl.com to AWS S3"
|
|
224
|
+
task :deploy_final_builds_to_s3 => "pl:fetch" do
|
|
225
|
+
Rake::Task['pl:remote:deploy_apt_repo_to_s3'].invoke
|
|
226
|
+
Rake::Task['pl:remote:deploy_yum_repo_to_s3'].invoke
|
|
227
|
+
Rake::Task['pl:remote:deploy_downloads_to_s3'].invoke
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
desc "Sync nightlies.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
|
|
231
|
+
task :deploy_nightlies_to_s3 => 'pl:fetch' do
|
|
232
|
+
puts "Deploying nightly builds from #{Pkg::Config.staging_server} to AWS S3..."
|
|
233
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
234
|
+
command = 'sudo /usr/local/bin/s3_repo_sync.sh nightlies.puppet.com'
|
|
235
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
desc "Sync yum and apt from #{Pkg::Config.staging_server} to rsync servers"
|
|
240
|
+
task :deploy_to_rsync_server => 'pl:fetch' do
|
|
241
|
+
# This task must run after the S3 sync has run, or else /opt/repo-s3-stage won't be up-to-date
|
|
242
|
+
puts "Really run rsync to sync apt and yum from #{Pkg::Config.staging_server} to rsync servers? Only say yes if the S3 sync task has run. [y,n]"
|
|
243
|
+
if Pkg::Util.ask_yes_or_no
|
|
244
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
245
|
+
Pkg::Config.rsync_servers.each do |rsync_server|
|
|
246
|
+
['apt', 'yum'].each do |repo|
|
|
247
|
+
# Don't --delete so that folks using archived packages can continue to do so
|
|
248
|
+
command = "sudo su - rsync --command 'rsync --verbose -a --exclude '*.html' /opt/repo-s3-stage/repositories/#{repo}.puppetlabs.com/ rsync@#{rsync_server}:/opt/repository/#{repo}'"
|
|
249
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
250
|
+
end
|
|
251
|
+
end
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
desc "Remotely link nightly shipped gems to latest versions on #{Pkg::Config.gem_host}"
|
|
257
|
+
task link_nightly_shipped_gems_to_latest: 'pl:fetch' do
|
|
258
|
+
Pkg::Config.gemversion = Pkg::Util::Version.extended_dot_version
|
|
259
|
+
|
|
260
|
+
remote_path = Pkg::Config.nonfinal_gem_path
|
|
261
|
+
gems = FileList['pkg/*.gem'].map! { |path| path.gsub!('pkg/', '') }
|
|
262
|
+
command = %(cd #{remote_path}; )
|
|
263
|
+
|
|
264
|
+
command += gems.map! do |gem_name|
|
|
265
|
+
%(sudo ln -sf #{gem_name} #{gem_name.gsub(Pkg::Config.gemversion, 'latest')})
|
|
266
|
+
end.join(';')
|
|
267
|
+
|
|
268
|
+
command += %(; sync)
|
|
269
|
+
|
|
270
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.gem_host, command)
|
|
271
|
+
end
|
|
272
|
+
end
|
|
273
|
+
|
|
274
|
+
desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
|
|
275
|
+
task ship_rpms: 'pl:fetch' do
|
|
276
|
+
Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.yum_repo_path)
|
|
277
|
+
end
|
|
278
|
+
|
|
279
|
+
desc "Ship nightly rpms to #{Pkg::Config.yum_staging_server}"
|
|
280
|
+
task ship_nightly_rpms: 'pl:fetch' do
|
|
281
|
+
Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.nonfinal_yum_repo_path, nonfinal: true)
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
|
|
285
|
+
task ship_debs: 'pl:fetch' do
|
|
286
|
+
Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.apt_repo_staging_path, chattr: false)
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
desc "Ship nightly debs to #{Pkg::Config.apt_signing_server}"
|
|
290
|
+
task ship_nightly_debs: 'pl:fetch' do
|
|
291
|
+
Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
|
|
292
|
+
end
|
|
293
|
+
|
|
294
|
+
desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
|
|
295
|
+
task ship_gem: 'pl:fetch' do
|
|
296
|
+
# We want to ship a Gem only for projects that build gems, so
|
|
297
|
+
# all of the Gem shipping tasks are wrapped in an `if`.
|
|
298
|
+
if Pkg::Config.build_gem
|
|
299
|
+
# Even if a project builds a gem, if it uses the odd_even or zero-based
|
|
300
|
+
# strategies, we only want to ship final gems because otherwise a
|
|
301
|
+
# development gem would be preferred over the last final gem
|
|
302
|
+
if Pkg::Util::Version.final?
|
|
303
|
+
FileList['pkg/*.gem'].each do |gem_file|
|
|
304
|
+
puts 'This will ship to an internal gem mirror, a public file server, and rubygems.org'
|
|
305
|
+
puts "Do you want to start shipping the rubygem '#{gem_file}'?"
|
|
306
|
+
next unless Pkg::Util.ask_yes_or_no
|
|
307
|
+
Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
|
|
308
|
+
end
|
|
309
|
+
|
|
310
|
+
Rake::Task['pl:ship_gem_to_downloads'].invoke
|
|
311
|
+
else
|
|
312
|
+
$stderr.puts 'Not shipping development gem using odd_even strategy for the sake of your users.'
|
|
313
|
+
end
|
|
314
|
+
end
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
desc 'Ship built gem to internal Gem mirror and public nightlies file server'
|
|
318
|
+
task ship_nightly_gem: 'pl:fetch' do
|
|
319
|
+
# We want to ship a Gem only for projects that build gems, so
|
|
320
|
+
# all of the Gem shipping tasks are wrapped in an `if`.
|
|
321
|
+
if Pkg::Config.build_gem
|
|
322
|
+
fail 'Value `Pkg::Config.gem_host` not defined, skipping nightly ship' unless Pkg::Config.gem_host
|
|
323
|
+
fail 'Value `Pkg::Config.nonfinal_gem_path` not defined, skipping nightly ship' unless Pkg::Config.nonfinal_gem_path
|
|
324
|
+
FileList['pkg/*.gem'].each do |gem_file|
|
|
325
|
+
Pkg::Gem.ship_to_internal_mirror(gem_file)
|
|
326
|
+
end
|
|
327
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
328
|
+
Pkg::Util::Ship.ship_gem('pkg', Pkg::Config.nonfinal_gem_path, platform_independent: true)
|
|
329
|
+
end
|
|
330
|
+
end
|
|
331
|
+
end
|
|
332
|
+
|
|
333
|
+
desc 'Ship built gem to rubygems.org'
|
|
334
|
+
task :ship_gem_to_rubygems, [:file] => 'pl:fetch' do |_t, args|
|
|
335
|
+
puts "Do you want to ship #{args[:file]} to rubygems.org?"
|
|
336
|
+
if Pkg::Util.ask_yes_or_no
|
|
337
|
+
puts "Shipping gem #{args[:file]} to rubygems.org"
|
|
338
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
339
|
+
Pkg::Gem.ship_to_rubygems(args[:file])
|
|
340
|
+
end
|
|
341
|
+
end
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
desc "Ship built gems to public Downloads server (#{Pkg::Config.gem_host})"
|
|
345
|
+
task :ship_gem_to_downloads => 'pl:fetch' do
|
|
346
|
+
if Pkg::Config.gem_host && Pkg::Config.gem_path
|
|
347
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
348
|
+
Pkg::Util::Ship.ship_gem('pkg', Pkg::Config.gem_path, platform_independent: true)
|
|
349
|
+
end
|
|
350
|
+
else
|
|
351
|
+
warn 'Value `Pkg::Config.gem_host` not defined; skipping shipping to public Download server'
|
|
352
|
+
end
|
|
353
|
+
end
|
|
354
|
+
|
|
355
|
+
desc "Ship svr4 packages to #{Pkg::Config.svr4_host}"
|
|
356
|
+
task :ship_svr4 do
|
|
357
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
358
|
+
if File.directory?("pkg/solaris/10")
|
|
359
|
+
Pkg::Util::Ship.ship_svr4('pkg', Pkg::Config.svr4_path)
|
|
360
|
+
end
|
|
361
|
+
end
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
desc "Ship p5p packages to #{Pkg::Config.p5p_host}"
|
|
365
|
+
task :ship_p5p do
|
|
366
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
367
|
+
if File.directory?("pkg/solaris/11")
|
|
368
|
+
Pkg::Util::Ship.ship_p5p('pkg', Pkg::Config.p5p_path)
|
|
369
|
+
end
|
|
370
|
+
end
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
desc "ship apple dmg to #{Pkg::Config.dmg_staging_server}"
|
|
374
|
+
task ship_dmg: 'pl:fetch' do
|
|
375
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'dmg')
|
|
376
|
+
Pkg::Util::Ship.ship_dmg('pkg', path)
|
|
377
|
+
end
|
|
378
|
+
|
|
379
|
+
desc "ship nightly apple dmgs to #{Pkg::Config.dmg_staging_server}"
|
|
380
|
+
task ship_nightly_dmg: 'pl:fetch' do
|
|
381
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'dmg', nonfinal: true)
|
|
382
|
+
Pkg::Util::Ship.ship_dmg('pkg', path, nonfinal: true)
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
desc "ship Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
|
|
386
|
+
task ship_swix: 'pl:fetch' do
|
|
387
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'swix')
|
|
388
|
+
Pkg::Util::Ship.ship_swix('pkg', path)
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
desc "ship nightly Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
|
|
392
|
+
task ship_nightly_swix: 'pl:fetch' do
|
|
393
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'swix', nonfinal: true)
|
|
394
|
+
Pkg::Util::Ship.ship_swix('pkg', path, nonfinal: true)
|
|
395
|
+
end
|
|
396
|
+
|
|
397
|
+
desc "ship tarball and signature to #{Pkg::Config.tar_staging_server}"
|
|
398
|
+
task ship_tar: 'pl:fetch' do
|
|
399
|
+
if Pkg::Config.build_tar
|
|
400
|
+
Pkg::Util::Ship.ship_tar('pkg', Pkg::Config.tarball_path, excludes: ['signing_bundle', 'packaging-bundle'], platform_independent: true)
|
|
401
|
+
end
|
|
402
|
+
end
|
|
403
|
+
|
|
404
|
+
desc "ship Windows nuget packages to #{Pkg::Config.nuget_host}"
|
|
405
|
+
task ship_nuget: 'pl:fetch' do
|
|
406
|
+
packages = Dir['pkg/**/*.nupkg']
|
|
407
|
+
if packages.empty?
|
|
408
|
+
$stdout.puts "There aren't any nuget packages in pkg/windows. Maybe something went wrong?"
|
|
409
|
+
else
|
|
410
|
+
Pkg::Nuget.ship(packages)
|
|
411
|
+
end
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
desc "Ship MSI packages to #{Pkg::Config.msi_staging_server}"
|
|
415
|
+
task ship_msi: 'pl:fetch' do
|
|
416
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'msi')
|
|
417
|
+
Pkg::Util::Ship.ship_msi('pkg', path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
|
|
418
|
+
end
|
|
419
|
+
|
|
420
|
+
desc "Ship nightly MSI packages to #{Pkg::Config.msi_staging_server}"
|
|
421
|
+
task ship_nightly_msi: 'pl:fetch' do
|
|
422
|
+
path = Pkg::Paths.remote_repo_base(package_format: 'msi', nonfinal: true)
|
|
423
|
+
Pkg::Util::Ship.ship_msi('pkg', path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"], nonfinal: true)
|
|
424
|
+
end
|
|
425
|
+
|
|
426
|
+
desc "Add #{Pkg::Config.project} version #{Pkg::Config.ref} to release-metrics"
|
|
427
|
+
task :update_release_metrics => "pl:fetch" do
|
|
428
|
+
Pkg::Metrics.update_release_metrics
|
|
429
|
+
end
|
|
430
|
+
|
|
431
|
+
desc 'UBER ship: ship all the things in pkg'
|
|
432
|
+
task uber_ship: 'pl:fetch' do
|
|
433
|
+
if Pkg::Util.confirm_ship(FileList['pkg/**/*'])
|
|
434
|
+
Rake::Task['pl:ship_rpms'].invoke
|
|
435
|
+
Rake::Task['pl:ship_debs'].invoke
|
|
436
|
+
Rake::Task['pl:ship_dmg'].invoke
|
|
437
|
+
Rake::Task['pl:ship_swix'].invoke
|
|
438
|
+
Rake::Task['pl:ship_nuget'].invoke
|
|
439
|
+
Rake::Task['pl:ship_tar'].invoke
|
|
440
|
+
Rake::Task['pl:ship_svr4'].invoke
|
|
441
|
+
Rake::Task['pl:ship_p5p'].invoke
|
|
442
|
+
Rake::Task['pl:ship_msi'].invoke
|
|
443
|
+
add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?) if Pkg::Config.benchmark
|
|
444
|
+
post_shipped_metrics if Pkg::Config.benchmark
|
|
445
|
+
else
|
|
446
|
+
puts 'Ship canceled'
|
|
447
|
+
exit
|
|
448
|
+
end
|
|
449
|
+
end
|
|
450
|
+
|
|
451
|
+
desc 'Create the rolling repo links'
|
|
452
|
+
task create_repo_links: 'pl:fetch' do
|
|
453
|
+
Pkg::Util::Ship.create_rolling_repo_links
|
|
454
|
+
end
|
|
455
|
+
|
|
456
|
+
desc 'Create rolling repo links for nightlies'
|
|
457
|
+
task create_nightly_repo_links: 'pl:fetch' do
|
|
458
|
+
Pkg::Util::Ship.create_rolling_repo_links(true)
|
|
459
|
+
end
|
|
460
|
+
|
|
461
|
+
desc 'Test out the ship requirements'
|
|
462
|
+
task ship_check: 'pl:fetch' do
|
|
463
|
+
errs = []
|
|
464
|
+
ssh_errs = []
|
|
465
|
+
gpg_errs = []
|
|
466
|
+
|
|
467
|
+
if ENV['TEAM']
|
|
468
|
+
unless ENV['TEAM'] == 'release'
|
|
469
|
+
errs << "TEAM environment variable is #{ENV['TEAM']}. It should be 'release'"
|
|
470
|
+
end
|
|
471
|
+
else
|
|
472
|
+
errs << 'TEAM environment variable is not set. This should be set to release'
|
|
473
|
+
end
|
|
474
|
+
# Check SSH access to the staging servers
|
|
475
|
+
ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('staging_server').values.uniq)
|
|
476
|
+
# Check SSH access to the signing servers, with some windows special-ness
|
|
477
|
+
ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('signing_server').values.uniq - [Pkg::Config.msi_signing_server])
|
|
478
|
+
ssh_errs << Pkg::Util::Net.check_host_ssh("Administrator@#{Pkg::Config.msi_signing_server}")
|
|
479
|
+
# Check SSH access to the final shipped hosts
|
|
480
|
+
ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('^(?!.*(?=build|internal)).*_host$').values.uniq)
|
|
481
|
+
ssh_errs.flatten!
|
|
482
|
+
unless ssh_errs.empty?
|
|
483
|
+
ssh_errs.each do |host|
|
|
484
|
+
errs << "Unable to ssh to #{host}"
|
|
485
|
+
end
|
|
486
|
+
end
|
|
487
|
+
|
|
488
|
+
# Check for GPG on linux-y systems
|
|
489
|
+
gpg_errs << Pkg::Util::Net.check_host_gpg(Pkg::Config.apt_signing_server, Pkg::Util::Gpg.key)
|
|
490
|
+
gpg_errs << Pkg::Util::Net.check_host_gpg(Pkg::Config.distribution_server, Pkg::Util::Gpg.key)
|
|
491
|
+
gpg_errs.flatten!
|
|
492
|
+
# ignore gpg errors for hosts we couldn't ssh into
|
|
493
|
+
gpg_errs -= ssh_errs
|
|
494
|
+
unless gpg_errs.empty?
|
|
495
|
+
gpg_errs.each do |host|
|
|
496
|
+
errs << "Secret key #{Pkg::Util::Gpg.key} not found on #{host}"
|
|
497
|
+
end
|
|
498
|
+
end
|
|
499
|
+
|
|
500
|
+
# For windows and solaris it looks like as long as you have ssh access
|
|
501
|
+
# to the signers you should be able to sign. If this changes in the future
|
|
502
|
+
# we should add more checks here, but for now it should be fine.
|
|
503
|
+
# Check for ability to sign OSX. Should just need to be able to unlock keychain
|
|
504
|
+
begin
|
|
505
|
+
unless ssh_errs.include?(Pkg::Config.osx_signing_server)
|
|
506
|
+
Pkg::Util::Net.remote_execute(
|
|
507
|
+
Pkg::Config.osx_signing_server,
|
|
508
|
+
%(/usr/bin/security -q unlock-keychain -p "#{Pkg::Config.osx_signing_keychain_pw}" "#{Pkg::Config.osx_signing_keychain}"),
|
|
509
|
+
{ extra_options: '-oBatchMode=yes' }
|
|
510
|
+
)
|
|
511
|
+
end
|
|
512
|
+
rescue
|
|
513
|
+
errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
|
|
514
|
+
end
|
|
515
|
+
|
|
516
|
+
if Pkg::Config.build_gem
|
|
517
|
+
# Do we have rubygems access set up
|
|
518
|
+
if Pkg::Util::File.file_exists?("#{ENV['HOME']}/.gem/credentials")
|
|
519
|
+
# Do we have permissions to publish this gem on rubygems
|
|
520
|
+
unless Pkg::Util::Misc.check_rubygems_ownership(Pkg::Config.gem_name)
|
|
521
|
+
errs << "You don't own #{Pkg::Config.gem_name} on rubygems.org"
|
|
522
|
+
end
|
|
523
|
+
else
|
|
524
|
+
errs << "You haven't set up your .gem/credentials file for rubygems.org access"
|
|
525
|
+
end
|
|
526
|
+
end
|
|
527
|
+
|
|
528
|
+
puts "\n\n"
|
|
529
|
+
if errs.empty?
|
|
530
|
+
puts 'Hooray! You should be good for shipping!'
|
|
531
|
+
else
|
|
532
|
+
puts "Found #{errs.length} issues:"
|
|
533
|
+
errs.each do |err|
|
|
534
|
+
puts " * #{err}"
|
|
535
|
+
end
|
|
536
|
+
end
|
|
537
|
+
|
|
538
|
+
end
|
|
539
|
+
|
|
540
|
+
# It is odd to namespace this ship task under :jenkins, but this task is
|
|
541
|
+
# intended to be a component of the jenkins-based build workflow even if it
|
|
542
|
+
# doesn't interact with jenkins directly. The :target argument is so that we
|
|
543
|
+
# can invoke this task with a subdirectory of the standard distribution
|
|
544
|
+
# server path. That way we can separate out built artifacts from
|
|
545
|
+
# signed/actually shipped artifacts e.g. $path/shipped/ or $path/artifacts.
|
|
546
|
+
namespace :jenkins do
|
|
547
|
+
desc 'ship pkg directory contents to artifactory'
|
|
548
|
+
task :ship_to_artifactory, :local_dir do |_t, args|
|
|
549
|
+
Pkg::Util::RakeUtils.invoke_task('pl:fetch')
|
|
550
|
+
unless Pkg::Config.project
|
|
551
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
|
552
|
+
end
|
|
553
|
+
artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
|
|
554
|
+
|
|
555
|
+
local_dir = args.local_dir || 'pkg'
|
|
556
|
+
Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
|
|
557
|
+
if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
|
|
558
|
+
artifactory.deploy_package(artifact)
|
|
559
|
+
elsif artifactory.package_exists_on_artifactory?(artifact)
|
|
560
|
+
warn "Attempt to upload '#{artifact}' failed. Package already exists!"
|
|
561
|
+
else
|
|
562
|
+
artifactory.deploy_package(artifact)
|
|
563
|
+
end
|
|
564
|
+
end
|
|
565
|
+
end
|
|
566
|
+
|
|
567
|
+
desc 'Ship pkg directory contents to distribution server'
|
|
568
|
+
task :ship, :target, :local_dir do |_t, args|
|
|
569
|
+
Pkg::Util::RakeUtils.invoke_task('pl:fetch')
|
|
570
|
+
unless Pkg::Config.project
|
|
571
|
+
fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
|
|
572
|
+
end
|
|
573
|
+
target = args.target || 'artifacts'
|
|
574
|
+
local_dir = args.local_dir || 'pkg'
|
|
575
|
+
project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
|
|
576
|
+
artifact_dir = "#{project_basedir}/#{target}"
|
|
577
|
+
|
|
578
|
+
# For EZBake builds, we also want to include the ezbake.manifest file to
|
|
579
|
+
# get a snapshot of this build and all dependencies. We eventually will
|
|
580
|
+
# create a yaml version of this file, but until that point we want to
|
|
581
|
+
# make the original ezbake.manifest available
|
|
582
|
+
#
|
|
583
|
+
ezbake_manifest = File.join('ext', 'ezbake.manifest')
|
|
584
|
+
if File.exist?(ezbake_manifest)
|
|
585
|
+
cp(ezbake_manifest, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest"))
|
|
586
|
+
end
|
|
587
|
+
ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
|
|
588
|
+
if File.exists?(ezbake_yaml)
|
|
589
|
+
cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
|
|
590
|
+
end
|
|
591
|
+
|
|
592
|
+
# Inside build_metadata*.json files there is additional metadata containing
|
|
593
|
+
# information such as git ref and dependencies that are needed at build
|
|
594
|
+
# time. If these files exist, copy them downstream.
|
|
595
|
+
# Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
|
|
596
|
+
build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
|
|
597
|
+
build_metadata_json_files.each do |source_file|
|
|
598
|
+
target_file = File.join(local_dir, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
|
|
599
|
+
cp(source_file, target_file)
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
# Sadly, the packaging repo cannot yet act on its own, without living
|
|
603
|
+
# inside of a packaging-repo compatible project. This means in order to
|
|
604
|
+
# use the packaging repo for shipping and signing (things that really
|
|
605
|
+
# don't require build automation, specifically) we still need the project
|
|
606
|
+
# clone itself.
|
|
607
|
+
Pkg::Util::Git.bundle('HEAD', 'signing_bundle', local_dir)
|
|
608
|
+
|
|
609
|
+
# While we're bundling things, let's also make a git bundle of the
|
|
610
|
+
# packaging repo that we're using when we invoke pl:jenkins:ship. We can
|
|
611
|
+
# have a reasonable level of confidence, later on, that the git bundle on
|
|
612
|
+
# the distribution server was, in fact, the git bundle used to create the
|
|
613
|
+
# associated packages. This is because this ship task is automatically
|
|
614
|
+
# called upon completion each cell of the pl:jenkins:uber_build, and we
|
|
615
|
+
# have --ignore-existing set below. As such, the only git bundle that
|
|
616
|
+
# should possibly be on the distribution is the one used to create the
|
|
617
|
+
# packages.
|
|
618
|
+
# We're bundling the packaging repo because it allows us to keep an
|
|
619
|
+
# archive of the packaging source that was used to create the packages,
|
|
620
|
+
# so that later on if we need to rebuild an older package to audit it or
|
|
621
|
+
# for some other reason we're assured that the new package isn't
|
|
622
|
+
# different by virtue of the packaging automation.
|
|
623
|
+
if defined?(PACKAGING_ROOT)
|
|
624
|
+
packaging_bundle = ''
|
|
625
|
+
cd PACKAGING_ROOT do
|
|
626
|
+
packaging_bundle = Pkg::Util::Git.bundle('HEAD', 'packaging-bundle')
|
|
627
|
+
end
|
|
628
|
+
mv(packaging_bundle, local_dir)
|
|
629
|
+
end
|
|
630
|
+
|
|
631
|
+
# This is functionality to add the project-arch.msi links that have no
|
|
632
|
+
# version. The code itself looks for the link (if it's there already)
|
|
633
|
+
# and if the source package exists before linking. Searching for the
|
|
634
|
+
# packages has been restricted specifically to just the pkg/windows dir
|
|
635
|
+
# on purpose, as this is where we currently have all windows packages
|
|
636
|
+
# building to. Once we move the Metadata about the output location in
|
|
637
|
+
# to one source of truth we can refactor this to use that to search
|
|
638
|
+
# -Sean P. M. 08/12/16
|
|
639
|
+
|
|
640
|
+
{
|
|
641
|
+
'windows' => ['x86', 'x64'],
|
|
642
|
+
'windowsfips' => ['x64']
|
|
643
|
+
}.each_pair do |platform, archs|
|
|
644
|
+
packages = Dir["#{local_dir}/#{platform}/*"]
|
|
645
|
+
|
|
646
|
+
archs.each do |arch|
|
|
647
|
+
package_version = Pkg::Util::Git.describe.tr('-', '.')
|
|
648
|
+
package_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
|
|
649
|
+
link_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{arch}.msi")
|
|
650
|
+
|
|
651
|
+
next unless !packages.include?(link_filename) && packages.include?(package_filename)
|
|
652
|
+
# Dear future code spelunkers:
|
|
653
|
+
# Using symlinks instead of hard links causes failures when we try
|
|
654
|
+
# to set these files to be immutable. Also be wary of whether the
|
|
655
|
+
# linking utility you're using expects the source path to be relative
|
|
656
|
+
# to the link target or pwd.
|
|
657
|
+
#
|
|
658
|
+
FileUtils.ln(package_filename, link_filename)
|
|
659
|
+
end
|
|
660
|
+
end
|
|
661
|
+
|
|
662
|
+
Pkg::Util::Execution.retry_on_fail(times: 3) do
|
|
663
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
|
|
664
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
|
|
665
|
+
Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
|
|
666
|
+
end
|
|
667
|
+
|
|
668
|
+
# In order to get a snapshot of what this build looked like at the time
|
|
669
|
+
# of shipping, we also generate and ship the params file
|
|
670
|
+
#
|
|
671
|
+
Pkg::Config.config_to_yaml(local_dir)
|
|
672
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
|
673
|
+
Pkg::Util::Net.rsync_to("#{local_dir}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ["--exclude repo_configs"])
|
|
674
|
+
end
|
|
675
|
+
|
|
676
|
+
# If we just shipped a tagged version, we want to make it immutable
|
|
677
|
+
files = Dir.glob("#{local_dir}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
|
|
678
|
+
"#{artifact_dir}/#{file.sub(/^#{local_dir}\//, '')}"
|
|
679
|
+
end
|
|
680
|
+
|
|
681
|
+
Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
|
|
682
|
+
Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
|
|
683
|
+
Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
|
|
684
|
+
end
|
|
685
|
+
|
|
686
|
+
desc 'Ship generated repository configs to the distribution server'
|
|
687
|
+
task :ship_repo_configs do
|
|
688
|
+
Pkg::Deb::Repo.ship_repo_configs
|
|
689
|
+
Pkg::Rpm::Repo.ship_repo_configs
|
|
690
|
+
end
|
|
691
|
+
end
|
|
692
|
+
end
|