packaging 0.99.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +17 -0
  3. data/README-Solaris.md +117 -0
  4. data/README.md +1031 -0
  5. data/lib/packaging.rb +32 -0
  6. data/lib/packaging/artifactory.rb +278 -0
  7. data/lib/packaging/config.rb +392 -0
  8. data/lib/packaging/config/params.rb +366 -0
  9. data/lib/packaging/deb.rb +28 -0
  10. data/lib/packaging/deb/repo.rb +263 -0
  11. data/lib/packaging/gem.rb +112 -0
  12. data/lib/packaging/ips.rb +57 -0
  13. data/lib/packaging/msi.rb +89 -0
  14. data/lib/packaging/nuget.rb +39 -0
  15. data/lib/packaging/osx.rb +36 -0
  16. data/lib/packaging/paths.rb +238 -0
  17. data/lib/packaging/platforms.rb +480 -0
  18. data/lib/packaging/repo.rb +55 -0
  19. data/lib/packaging/retrieve.rb +46 -0
  20. data/lib/packaging/rpm.rb +5 -0
  21. data/lib/packaging/rpm/repo.rb +257 -0
  22. data/lib/packaging/tar.rb +154 -0
  23. data/lib/packaging/util.rb +146 -0
  24. data/lib/packaging/util/date.rb +15 -0
  25. data/lib/packaging/util/execution.rb +85 -0
  26. data/lib/packaging/util/file.rb +125 -0
  27. data/lib/packaging/util/git.rb +174 -0
  28. data/lib/packaging/util/git_tags.rb +73 -0
  29. data/lib/packaging/util/gpg.rb +62 -0
  30. data/lib/packaging/util/jenkins.rb +95 -0
  31. data/lib/packaging/util/misc.rb +69 -0
  32. data/lib/packaging/util/net.rb +368 -0
  33. data/lib/packaging/util/os.rb +17 -0
  34. data/lib/packaging/util/platform.rb +40 -0
  35. data/lib/packaging/util/rake_utils.rb +111 -0
  36. data/lib/packaging/util/serialization.rb +19 -0
  37. data/lib/packaging/util/ship.rb +171 -0
  38. data/lib/packaging/util/tool.rb +41 -0
  39. data/lib/packaging/util/version.rb +326 -0
  40. data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
  41. data/spec/fixtures/config/ext/project_data.yaml +2 -0
  42. data/spec/fixtures/config/params.yaml +2 -0
  43. data/spec/fixtures/configs/components/test_file.json +1 -0
  44. data/spec/fixtures/configs/components/test_file_2.json +0 -0
  45. data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
  46. data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
  47. data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
  48. data/spec/fixtures/util/pre_tasks.yaml +4 -0
  49. data/spec/lib/packaging/artifactory_spec.rb +171 -0
  50. data/spec/lib/packaging/config_spec.rb +556 -0
  51. data/spec/lib/packaging/deb/repo_spec.rb +148 -0
  52. data/spec/lib/packaging/deb_spec.rb +52 -0
  53. data/spec/lib/packaging/paths_spec.rb +153 -0
  54. data/spec/lib/packaging/platforms_spec.rb +153 -0
  55. data/spec/lib/packaging/repo_spec.rb +97 -0
  56. data/spec/lib/packaging/retrieve_spec.rb +61 -0
  57. data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
  58. data/spec/lib/packaging/tar_spec.rb +122 -0
  59. data/spec/lib/packaging/util/execution_spec.rb +56 -0
  60. data/spec/lib/packaging/util/file_spec.rb +139 -0
  61. data/spec/lib/packaging/util/git_spec.rb +160 -0
  62. data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
  63. data/spec/lib/packaging/util/gpg_spec.rb +64 -0
  64. data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
  65. data/spec/lib/packaging/util/misc_spec.rb +31 -0
  66. data/spec/lib/packaging/util/net_spec.rb +239 -0
  67. data/spec/lib/packaging/util/os_spec.rb +31 -0
  68. data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
  69. data/spec/lib/packaging/util/ship_spec.rb +117 -0
  70. data/spec/lib/packaging/util/version_spec.rb +123 -0
  71. data/spec/lib/packaging_spec.rb +19 -0
  72. data/spec/spec_helper.rb +36 -0
  73. data/static_artifacts/PackageInfo.plist +3 -0
  74. data/tasks/00_utils.rake +216 -0
  75. data/tasks/30_metrics.rake +33 -0
  76. data/tasks/apple.rake +266 -0
  77. data/tasks/build.rake +12 -0
  78. data/tasks/clean.rake +5 -0
  79. data/tasks/config.rake +30 -0
  80. data/tasks/deb.rake +129 -0
  81. data/tasks/deb_repos.rake +28 -0
  82. data/tasks/deprecated.rake +130 -0
  83. data/tasks/doc.rake +20 -0
  84. data/tasks/education.rake +57 -0
  85. data/tasks/fetch.rake +57 -0
  86. data/tasks/gem.rake +146 -0
  87. data/tasks/jenkins.rake +494 -0
  88. data/tasks/jenkins_dynamic.rake +202 -0
  89. data/tasks/load_extras.rake +21 -0
  90. data/tasks/mock.rake +348 -0
  91. data/tasks/nightly_repos.rake +335 -0
  92. data/tasks/pe_deb.rake +12 -0
  93. data/tasks/pe_rpm.rake +13 -0
  94. data/tasks/pe_ship.rake +221 -0
  95. data/tasks/pe_sign.rake +13 -0
  96. data/tasks/pe_tar.rake +5 -0
  97. data/tasks/retrieve.rake +45 -0
  98. data/tasks/rpm.rake +66 -0
  99. data/tasks/rpm_repos.rake +29 -0
  100. data/tasks/ship.rake +752 -0
  101. data/tasks/sign.rake +226 -0
  102. data/tasks/tag.rake +8 -0
  103. data/tasks/tar.rake +34 -0
  104. data/tasks/update.rake +16 -0
  105. data/tasks/vanagon.rake +35 -0
  106. data/tasks/vendor_gems.rake +117 -0
  107. data/tasks/version.rake +33 -0
  108. data/tasks/z_data_dump.rake +65 -0
  109. data/templates/README +1 -0
  110. data/templates/downstream.xml.erb +47 -0
  111. data/templates/msi.xml.erb +197 -0
  112. data/templates/packaging.xml.erb +344 -0
  113. data/templates/repo.xml.erb +114 -0
  114. metadata +234 -0
@@ -0,0 +1,13 @@
1
+ if Pkg::Config.build_pe
2
+ namespace :pe do
3
+ desc "Sign all staged in rpms in pkg"
4
+ task :sign_rpms do
5
+ Pkg::Util::RakeUtils.invoke_task("pl:sign_rpms")
6
+ end
7
+
8
+ desc "Sign all debian changes files staged in pkg/pe"
9
+ task :sign_deb_changes do
10
+ Pkg::Util::RakeUtils.invoke_task("pl:sign_deb_changes")
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,5 @@
1
+ ##
2
+ # An alias from pe:tar to package:tar, for easier automation in jenkins.rake
3
+ namespace :pe do
4
+ task :tar => ["package:tar"]
5
+ end
@@ -0,0 +1,45 @@
1
+ ##
2
+ # This task is intended to retrieve packages from the distribution server that
3
+ # have been built by jenkins and placed in a specific location,
4
+ # /opt/jenkins-builds/$PROJECT/$SHA where $PROJECT is the build project as
5
+ # established in build_defaults.yaml and $SHA is the git sha/tag of the project that
6
+ # was built into packages. The current day is assumed, but an environment
7
+ # variable override exists to retrieve packages from another day. The sha/tag is
8
+ # assumed to be the current project's HEAD, e.g. to retrieve packages for a
9
+ # release of 3.1.0, checkout 3.1.0 locally before retrieving.
10
+ #
11
+
12
+
13
+ namespace :pl do
14
+ namespace :jenkins do
15
+ desc "Retrieve packages from the distribution server\. Check out commit to retrieve"
16
+ task :retrieve, [:remote_target, :local_target] => 'pl:fetch' do |t, args|
17
+ remote_target = args.remote_target || "artifacts"
18
+ local_target = args.local_target || "pkg"
19
+ mkdir_p local_target
20
+ build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
21
+ build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
22
+ if Pkg::Config.foss_only
23
+ Pkg::Retrieve.foss_only_retrieve(build_url, local_target)
24
+ else
25
+ Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
26
+ end
27
+ puts "Packages staged in #{local_target}"
28
+ end
29
+ end
30
+ end
31
+
32
+ if Pkg::Config.build_pe
33
+ namespace :pe do
34
+ namespace :jenkins do
35
+ desc "Retrieve packages from the distribution server\. Check out commit to retrieve"
36
+ task :retrieve, [:remote_target, :local_target] => 'pl:fetch' do |t, args|
37
+ remote_target = args.remote_target || "artifacts"
38
+ local_target = args.local_target || "pkg"
39
+ build_url = "http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
40
+ build_path = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
41
+ Pkg::Retrieve.retrieve_all(build_url, build_path, remote_target, local_target)
42
+ end
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,66 @@
1
+ def prep_rpm_build_dir
2
+ temp = Pkg::Util::File.mktemp
3
+ tarball = "#{Pkg::Config.project}-#{Pkg::Config.version}.tar.gz"
4
+ FileUtils.mkdir_p([temp, "#{temp}/SOURCES", "#{temp}/SPECS"])
5
+ FileUtils.cp_r FileList["pkg/#{tarball}*"], "#{temp}/SOURCES", { :preserve => true }
6
+ # If the file ext/redhat/<project>.spec exists in the tarball, we use it. If
7
+ # it doesn't we try to 'erb' the file from a predicted template in source,
8
+ # ext/redhat/<project>.spec.erb. If that doesn't exist, we fail. To do this,
9
+ # we have to open the tarball.
10
+ FileUtils.cp("pkg/#{tarball}", temp, { :preserve => true })
11
+
12
+ # Test for specfile in tarball
13
+ %x(tar -tzf #{File.join(temp, tarball)}).split.grep(/\/ext\/redhat\/#{Pkg::Config.project}.spec$/)
14
+
15
+ if $?.success?
16
+ sh "tar -C #{temp} -xzf #{File.join(temp, tarball)} #{Pkg::Config.project}-#{Pkg::Config.version}/ext/redhat/#{Pkg::Config.project}.spec"
17
+ cp("#{temp}/#{Pkg::Config.project}-#{Pkg::Config.version}/ext/redhat/#{Pkg::Config.project}.spec", "#{temp}/SPECS/")
18
+ elsif File.exists?("ext/redhat/#{Pkg::Config.project}.spec.erb")
19
+ Pkg::Util::File.erb_file("ext/redhat/#{Pkg::Config.project}.spec.erb", "#{temp}/SPECS/#{Pkg::Config.project}.spec", nil, :binding => Pkg::Config.get_binding)
20
+ else
21
+ fail "Could not locate redhat spec ext/redhat/#{Pkg::Config.project}.spec or ext/redhat/#{Pkg::Config.project}.spec.erb"
22
+ end
23
+ temp
24
+ end
25
+
26
+ def build_rpm(buildarg = "-bs")
27
+ Pkg::Util::Tool.check_tool('rpmbuild')
28
+ workdir = prep_rpm_build_dir
29
+ rpm_define = "--define \"%_topdir #{workdir}\" "
30
+ rpm_old_version = '--define "_source_filedigest_algorithm 1" --define "_binary_filedigest_algorithm 1" \
31
+ --define "_binary_payload w9.gzdio" --define "_source_payload w9.gzdio" \
32
+ --define "_default_patch_fuzz 2"'
33
+ args = rpm_define + ' ' + rpm_old_version
34
+ FileUtils.mkdir_p('pkg/srpm')
35
+ if buildarg == '-ba'
36
+ FileUtils.mkdir_p('pkg/rpm')
37
+ end
38
+ if Pkg::Config.sign_tar
39
+ Rake::Task["pl:sign_tar"].invoke
40
+ end
41
+ sh "rpmbuild #{args} #{buildarg} --nodeps #{workdir}/SPECS/#{Pkg::Config.project}.spec"
42
+ mv FileList["#{workdir}/SRPMS/*.rpm"], "pkg/srpm"
43
+ if buildarg == '-ba'
44
+ mv FileList["#{workdir}/RPMS/*/*.rpm"], "pkg/rpm"
45
+ end
46
+ rm_rf workdir
47
+ puts
48
+ output = FileList['pkg/*/*.rpm']
49
+ puts "Wrote:"
50
+ output.each do | line |
51
+ puts line
52
+ end
53
+ end
54
+
55
+ namespace :package do
56
+ desc "Create srpm from this git repository (unsigned)"
57
+ task :srpm => :tar do
58
+ build_rpm("-bs")
59
+ end
60
+
61
+ desc "Create .rpm from this git repository (unsigned)"
62
+ task :rpm => :tar do
63
+ build_rpm("-ba")
64
+ end
65
+ end
66
+
@@ -0,0 +1,29 @@
1
+ ##
2
+ #
3
+ # A set of functionality for creating yum rpm repositories throughout the
4
+ # standard pkg/ directory layout that the packaging repo creates. The standard
5
+ # layout is:
6
+ # pkg/{el,fedora}/{5,6,f16,f17,f18}/{products,devel,dependencies,extras}/{i386,x86_64,SRPMS}
7
+ #
8
+ # Because we'll likely be creating the repos on a server that is remote, e.g.
9
+ # the distribution server, the logic here assumes we'll be doing everything via
10
+ # ssh commands.
11
+ #
12
+ namespace :pl do
13
+ namespace :jenkins do
14
+ desc "Create yum repositories of built RPM packages for this SHA on the distribution server"
15
+ task :rpm_repos => "pl:fetch" do
16
+ Pkg::Rpm::Repo.create_remote_repos
17
+ end
18
+
19
+ desc "Create yum repository configs for package repos for this sha/tag on the distribution server"
20
+ task :generate_rpm_repo_configs => "pl:fetch" do
21
+ Pkg::Rpm::Repo.generate_repo_configs
22
+ end
23
+
24
+ desc "Retrieve rpm yum repository configs from distribution server"
25
+ task :rpm_repo_configs => "pl:fetch" do
26
+ Pkg::Rpm::Repo.retrieve_repo_configs
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,752 @@
1
+ namespace :pl do
2
+ namespace :remote do
3
+ # These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
4
+ # The rake task takes packages in a specific directory and freights them
5
+ # to various target yum and apt repositories based on their specific type
6
+ # e.g., final vs devel vs PE vs FOSS packages
7
+
8
+ desc "Update remote yum repository on '#{Pkg::Config.yum_staging_server}'"
9
+ task update_yum_repo: 'pl:fetch' do
10
+ if Pkg::Util::Version.final?
11
+ path = Pkg::Config.yum_repo_path
12
+ else
13
+ path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
14
+ end
15
+ yum_whitelist = {
16
+ __REPO_NAME__: Pkg::Paths.repo_name,
17
+ __REPO_PATH__: path,
18
+ __REPO_HOST__: Pkg::Config.yum_staging_server,
19
+ __GPG_KEY__: Pkg::Util::Gpg.key
20
+ }
21
+
22
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
23
+ if Pkg::Util.ask_yes_or_no
24
+ if Pkg::Config.yum_repo_command
25
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, Pkg::Util::Misc.search_and_replace(Pkg::Config.yum_repo_command, yum_whitelist))
26
+ else
27
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, 'rake -f /opt/repository/Rakefile mk_repo')
28
+ end
29
+ end
30
+ end
31
+
32
+ task freight: :update_apt_repo
33
+
34
+ desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
35
+ task update_apt_repo: 'pl:fetch' do
36
+ if Pkg::Util::Version.final?
37
+ path = Pkg::Config.apt_repo_path
38
+ cmd = Pkg::Config.apt_repo_command
39
+ else
40
+ path = Pkg::Config.nonfinal_apt_repo_path || Pkg::Config.apt_repo_path
41
+ cmd = Pkg::Config.nonfinal_apt_repo_command || Pkg::Config.apt_repo_command
42
+ end
43
+ apt_whitelist = {
44
+ __REPO_NAME__: Pkg::Paths.repo_name,
45
+ __REPO_PATH__: path,
46
+ __REPO_URL__: Pkg::Config.apt_repo_url,
47
+ __REPO_HOST__: Pkg::Config.apt_host,
48
+ __APT_PLATFORMS__: Pkg::Config.apt_releases.join(' '),
49
+ __GPG_KEY__: Pkg::Util::Gpg.key
50
+ }
51
+
52
+ $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
53
+ if Pkg::Util.ask_yes_or_no
54
+ if cmd
55
+ Pkg::Util::Net.remote_ssh_cmd(
56
+ Pkg::Config.apt_signing_server,
57
+ Pkg::Util::Misc.search_and_replace(
58
+ cmd,
59
+ apt_whitelist
60
+ )
61
+ )
62
+ else
63
+ warn %(Pkg::Config#apt_repo_command returned something unexpected, so no attempt will be made to update remote repos)
64
+ end
65
+ end
66
+ end
67
+
68
+ desc "Update apt and yum repos"
69
+ task :update_foss_repos => "pl:fetch" do
70
+ Rake::Task['pl:remote:update_apt_repo'].invoke
71
+ Rake::Task['pl:remote:update_yum_repo'].invoke
72
+ end
73
+
74
+ desc "Update remote ips repository on #{Pkg::Config.ips_host}"
75
+ task :update_ips_repo => 'pl:fetch' do
76
+ if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
77
+ $stdout.puts "There aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11. Maybe something went wrong?"
78
+ else
79
+
80
+ if !Dir['pkg/ips/pkgs/**/*'].empty?
81
+ source_dir = 'pkg/ips/pkgs/'
82
+ else
83
+ source_dir = 'pkg/solaris/11/'
84
+ end
85
+
86
+ tmpdir, _ = Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, 'mktemp -d -p /var/tmp', true)
87
+ tmpdir.chomp!
88
+
89
+ Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
90
+
91
+ remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
92
+ sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
93
+ done)
94
+
95
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, remote_cmd)
96
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
97
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
98
+ end
99
+ end
100
+
101
+ desc "Move dmg repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}"
102
+ task deploy_dmg_repo: 'pl:fetch' do
103
+ puts "Really run remote rsync to deploy OS X repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}? [y,n]"
104
+ if Pkg::Util.ask_yes_or_no
105
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
106
+ cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.dmg_path, target_host: Pkg::Config.dmg_host, extra_flags: ['--update'])
107
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.dmg_staging_server, cmd)
108
+ end
109
+ end
110
+ end
111
+
112
+ desc "Move swix repos from #{Pkg::Config.swix_staging_server} to #{Pkg::Config.swix_host}"
113
+ task deploy_swix_repo: 'pl:fetch' do
114
+ puts "Really run remote rsync to deploy Arista repos from #{Pkg::Config.swix_staging_server} to #{Pkg::Config.swix_host}? [y,n]"
115
+ if Pkg::Util.ask_yes_or_no
116
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
117
+ cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.swix_path, target_host: Pkg::Config.swix_host, extra_flags: ['--update'])
118
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.swix_staging_server, cmd)
119
+ end
120
+ end
121
+ end
122
+
123
+ desc "Move tar repos from #{Pkg::Config.tar_staging_server} to #{Pkg::Config.tar_host}"
124
+ task deploy_tar_repo: 'pl:fetch' do
125
+ puts "Really run remote rsync to deploy source tarballs from #{Pkg::Config.tar_staging_server} to #{Pkg::Config.tar_host}? [y,n]"
126
+ if Pkg::Util.ask_yes_or_no
127
+ files = Dir.glob("pkg/#{Pkg::Config.project}-#{Pkg::Config.version}.tar.gz*")
128
+ if files.empty?
129
+ puts 'There are no tarballs to ship'
130
+ else
131
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
132
+ cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.tarball_path, target_host: Pkg::Config.tar_host, extra_flags: ['--update'])
133
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.tar_staging_server, cmd)
134
+ end
135
+ end
136
+ end
137
+ end
138
+
139
+ desc "Move MSI repos from #{Pkg::Config.msi_staging_server} to #{Pkg::Config.msi_host}"
140
+ task deploy_msi_repo: 'pl:fetch' do
141
+ puts "Really run remote rsync to deploy source MSIs from #{Pkg::Config.msi_staging_server} to #{Pkg::Config.msi_host}? [y,n]"
142
+ if Pkg::Util.ask_yes_or_no
143
+ files = Dir.glob('pkg/windows/**/*.msi')
144
+ if files.empty?
145
+ puts 'There are no MSIs to ship'
146
+ else
147
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
148
+ cmd = Pkg::Util::Net.rsync_cmd(Pkg::Config.msi_path, target_host: Pkg::Config.msi_host, extra_flags: ['--update'])
149
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.msi_staging_server, cmd)
150
+ end
151
+ end
152
+ end
153
+ end
154
+
155
+ desc "Move signed deb repos from #{Pkg::Config.apt_signing_server} to #{Pkg::Config.apt_host}"
156
+ task deploy_apt_repo: 'pl:fetch' do
157
+ puts "Really run remote rsync to deploy Debian repos from #{Pkg::Config.apt_signing_server} to #{Pkg::Config.apt_host}? [y,n]"
158
+ if Pkg::Util.ask_yes_or_no
159
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
160
+ Pkg::Deb::Repo.deploy_repos(
161
+ Pkg::Config.apt_repo_path,
162
+ Pkg::Config.apt_repo_staging_path,
163
+ Pkg::Config.apt_signing_server,
164
+ Pkg::Config.apt_host,
165
+ ENV['DRYRUN']
166
+ )
167
+ end
168
+ end
169
+ end
170
+
171
+ desc "Copy signed deb repos from #{Pkg::Config.apt_signing_server} to AWS S3"
172
+ task :deploy_apt_repo_to_s3 => 'pl:fetch' do
173
+ puts "Really run S3 sync to deploy Debian repos from #{Pkg::Config.apt_signing_server} to AWS S3? [y,n]"
174
+ if Pkg::Util.ask_yes_or_no
175
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
176
+ command = 'sudo /usr/local/bin/s3_repo_sync.sh apt.puppetlabs.com'
177
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_signing_server, command)
178
+ end
179
+ end
180
+ end
181
+
182
+ desc "Copy rpm repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}"
183
+ task deploy_yum_repo: 'pl:fetch' do
184
+ puts "Really run remote rsync to deploy yum repos from #{Pkg::Config.yum_staging_server} to #{Pkg::Config.yum_host}? [y,n]"
185
+ if Pkg::Util.ask_yes_or_no
186
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
187
+ Pkg::Rpm::Repo.deploy_repos(
188
+ Pkg::Config.yum_repo_path,
189
+ Pkg::Config.yum_staging_server,
190
+ Pkg::Config.yum_host,
191
+ ENV['DRYRUN']
192
+ )
193
+ end
194
+ end
195
+ end
196
+
197
+ desc "Copy signed RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3"
198
+ task :deploy_yum_repo_to_s3 => 'pl:fetch' do
199
+ puts "Really run S3 sync to deploy RPM repos from #{Pkg::Config.yum_staging_server} to AWS S3? [y,n]"
200
+ if Pkg::Util.ask_yes_or_no
201
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
202
+ command = 'sudo /usr/local/bin/s3_repo_sync.sh yum.puppetlabs.com'
203
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_staging_server, command)
204
+ end
205
+ end
206
+ end
207
+
208
+ desc "Sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
209
+ task :deploy_downloads_to_s3 => 'pl:fetch' do
210
+ puts "Really run S3 sync to sync downloads.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3? [y,n]"
211
+ if Pkg::Util.ask_yes_or_no
212
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
213
+ command = 'sudo /usr/local/bin/s3_repo_sync.sh downloads.puppetlabs.com'
214
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
215
+ end
216
+ end
217
+ end
218
+
219
+ desc "Sync apt, yum, and downloads.pl.com to AWS S3"
220
+ task :deploy_final_builds_to_s3 => "pl:fetch" do
221
+ Rake::Task['pl:remote:deploy_apt_repo_to_s3'].invoke
222
+ Rake::Task['pl:remote:deploy_yum_repo_to_s3'].invoke
223
+ Rake::Task['pl:remote:deploy_downloads_to_s3'].invoke
224
+ end
225
+
226
+ desc "Sync nightlies.puppetlabs.com from #{Pkg::Config.staging_server} to AWS S3"
227
+ task :deploy_nightlies_to_s3 => 'pl:fetch' do
228
+ puts "Deploying nightly builds from #{Pkg::Config.staging_server} to AWS S3..."
229
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
230
+ command = 'sudo /usr/local/bin/s3_repo_sync.sh nightlies.puppet.com'
231
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
232
+ end
233
+ end
234
+
235
+ desc "Sync yum and apt from #{Pkg::Config.staging_server} to rsync servers"
236
+ task :deploy_to_rsync_server => 'pl:fetch' do
237
+ # This task must run after the S3 sync has run, or else /opt/repo-s3-stage won't be up-to-date
238
+ puts "Really run rsync to sync apt and yum from #{Pkg::Config.staging_server} to rsync servers? Only say yes if the S3 sync task has run. [y,n]"
239
+ if Pkg::Util.ask_yes_or_no
240
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
241
+ Pkg::Config.rsync_servers.each do |rsync_server|
242
+ ['apt', 'yum'].each do |repo|
243
+ command = "sudo su - rsync --command 'rsync --verbose -a --exclude '*.html' --delete /opt/repo-s3-stage/repositories/#{repo}.puppetlabs.com/ rsync@#{rsync_server}:/opt/repository/#{repo}'"
244
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.staging_server, command)
245
+ end
246
+ end
247
+ end
248
+ end
249
+ end
250
+ end
251
+
252
+ desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
253
+ task ship_rpms: 'pl:fetch' do
254
+ if Pkg::Util::Version.final?
255
+ path = Pkg::Config.yum_repo_path
256
+ else
257
+ path = Pkg::Config.nonfinal_yum_repo_path || Pkg::Config.yum_repo_path
258
+ end
259
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.rpm', 'pkg/**/*.srpm'], Pkg::Config.yum_staging_server, path)
260
+
261
+ # I really don't care which one we grab, it just has to be some supported
262
+ # version and architecture from the `el` hash. So here we're just grabbing
263
+ # the first one, parsing out some info, and breaking out of the loop. Not
264
+ # elegant, I know, but effective.
265
+ Pkg::Platforms::PLATFORM_INFO['el'].each do |key, value|
266
+ generic_platform_tag = "el-#{key}-#{value[:architectures][0]}"
267
+ Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.yum_staging_server, path)
268
+ break
269
+ end
270
+ end
271
+
272
+ desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
273
+ task ship_debs: 'pl:fetch' do
274
+ if Pkg::Util::Version.final?
275
+ staging_path = Pkg::Config.apt_repo_staging_path
276
+ else
277
+ staging_path = Pkg::Config.nonfinal_apt_repo_staging_path || Pkg::Config.apt_repo_staging_path
278
+ end
279
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.debian.tar.gz', 'pkg/**/*.orig.tar.gz', 'pkg/**/*.dsc', 'pkg/**/*.deb', 'pkg/**/*.changes'], Pkg::Config.apt_signing_server, staging_path, chattr: false)
280
+
281
+ # We need to iterate through all the supported platforms here because of
282
+ # how deb repos are set up. Each codename will have its own link from the
283
+ # current versioned repo (i.e., puppet5) to the rolling repo. The one thing
284
+ # we don't care about is architecture, so we just grab the first supported
285
+ # architecture for the codename we're working with at the moment.
286
+ Pkg::Platforms.codenames.each do |codename|
287
+ Pkg::Util::Ship.create_rolling_repo_link(Pkg::Platforms.codename_to_tags(codename)[0], Pkg::Config.apt_signing_server, staging_path)
288
+ end
289
+ end
290
+
291
+ desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
292
+ task ship_gem: 'pl:fetch' do
293
+ # We want to ship a Gem only for projects that build gems, so
294
+ # all of the Gem shipping tasks are wrapped in an `if`.
295
+ if Pkg::Config.build_gem
296
+ # Even if a project builds a gem, if it uses the odd_even or zero-based
297
+ # strategies, we only want to ship final gems because otherwise a
298
+ # development gem would be preferred over the last final gem
299
+ if Pkg::Util::Version.final?
300
+ FileList['pkg/*.gem'].each do |gem_file|
301
+ puts 'This will ship to an internal gem mirror, a public file server, and rubygems.org'
302
+ puts "Do you want to start shipping the rubygem '#{gem_file}'?"
303
+ next unless Pkg::Util.ask_yes_or_no
304
+ Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
305
+ Rake::Task['pl:ship_gem_to_internal_mirror'].execute(file: gem_file)
306
+ end
307
+
308
+ Rake::Task['pl:ship_gem_to_downloads'].invoke
309
+ else
310
+ $stderr.puts 'Not shipping development gem using odd_even strategy for the sake of your users.'
311
+ end
312
+ end
313
+ end
314
+
315
+ desc 'Ship built gem to rubygems.org'
316
+ task :ship_gem_to_rubygems, [:file] => 'pl:fetch' do |_t, args|
317
+ puts "Do you want to ship #{args[:file]} to rubygems.org?"
318
+ if Pkg::Util.ask_yes_or_no
319
+ puts "Shipping gem #{args[:file]} to rubygems.org"
320
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
321
+ Pkg::Gem.ship_to_rubygems(args[:file])
322
+ end
323
+ end
324
+ end
325
+
326
+ desc "Ship built gems to internal Gem server (#{Pkg::Config.internal_gem_host})"
327
+ task :ship_gem_to_internal_mirror, [:file] => 'pl:fetch' do |_t, args|
328
+ unless Pkg::Config.internal_gem_host
329
+ warn 'Value `Pkg::Config.internal_gem_host` not defined; skipping internal ship'
330
+ end
331
+
332
+ puts "Do you want to ship #{args[:file]} to the internal stickler server(#{Pkg::Config.internal_stickler_host})?"
333
+ if Pkg::Util.ask_yes_or_no
334
+ puts "Shipping gem #{args[:file]} to internal Gem server (#{Pkg::Config.internal_stickler_host})"
335
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
336
+ Pkg::Gem.ship_to_stickler(args[:file])
337
+ end
338
+ end
339
+
340
+ puts "Do you want to ship #{args[:file]} to the internal nexus server(#{Pkg::Config.internal_nexus_host})?"
341
+ if Pkg::Util.ask_yes_or_no
342
+ puts "Shipping gem #{args[:file]} to internal Gem server (#{Pkg::Config.internal_nexus_host})"
343
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
344
+ Pkg::Gem.ship_to_nexus(args[:file])
345
+ end
346
+ end
347
+ end
348
+
349
+ desc "Ship built gems to public Downloads server (#{Pkg::Config.gem_host})"
350
+ task :ship_gem_to_downloads => 'pl:fetch' do
351
+ unless Pkg::Config.gem_host
352
+ warn 'Value `Pkg::Config.gem_host` not defined; skipping shipping to public Download server'
353
+ end
354
+
355
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
356
+ Pkg::Util::Ship.ship_pkgs(['pkg/*.gem*'], Pkg::Config.gem_host, Pkg::Config.gem_path, platform_independent: true)
357
+ end
358
+ end
359
+
360
+ desc "Ship svr4 packages to #{Pkg::Config.svr4_host}"
361
+ task :ship_svr4 do
362
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
363
+ if File.directory?("pkg/solaris/10")
364
+ if Pkg::Util::Version.final?
365
+ path = Pkg::Config.svr4_path
366
+ else
367
+ path = Pkg::Config.nonfinal_svr4_path || Pkg::Config.svr4_path
368
+ end
369
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.pkg.gz'], Pkg::Config.svr4_host, path)
370
+ end
371
+ end
372
+ end
373
+
374
+ desc "Ship p5p packages to #{Pkg::Config.p5p_host}"
375
+ task :ship_p5p do
376
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
377
+ if File.directory?("pkg/solaris/11")
378
+ if Pkg::Util::Version.final?
379
+ path = Pkg::Config.p5p_path
380
+ else
381
+ path = Pkg::Config.nonfinal_p5p_path || Pkg::Config.p5p_path
382
+ end
383
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.p5p'], Pkg::Config.p5p_host, path)
384
+ end
385
+ end
386
+ end
387
+
388
+ desc "ship apple dmg to #{Pkg::Config.dmg_staging_server}"
389
+ task ship_dmg: 'pl:fetch' do
390
+ # TODO: realistically, this shouldn't be here. This block needs to be
391
+ # removed, but only when we can successfully modify all instances of
392
+ # this to be set to '/opt/downloads'. In the meantime, we need to write
393
+ # this terrible workaround to ensure backward compatibility.
394
+ #
395
+ # I'm so sorry
396
+ # ~MAS 2017-08-14
397
+ if Pkg::Config.dmg_path == "/opt/downloads/mac"
398
+ path = "/opt/downloads"
399
+ else
400
+ path = Pkg::Config.dmg_path
401
+ end
402
+ path = Pkg::Config.nonfinal_dmg_path if Pkg::Config.nonfinal_dmg_path && !Pkg::Util::Version.final?
403
+
404
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.dmg'], Pkg::Config.dmg_staging_server, path)
405
+
406
+ # I really don't care which one we grab, it just has to be some supported
407
+ # version and architecture from the `osx` hash. So here we're just grabbing
408
+ # the first one, parsing out some info, and breaking out of the loop. Not
409
+ # elegant, I know, but effective.
410
+ Pkg::Platforms::PLATFORM_INFO['osx'].each do |key, value|
411
+ generic_platform_tag = "osx-#{key}-#{value[:architectures][0]}"
412
+ Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.dmg_staging_server, path)
413
+ break
414
+ end
415
+
416
+ Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
417
+ # TODO remove the PC1 links when we no longer need to maintain them
418
+ _, version, arch = Pkg::Platforms.parse_platform_tag(platform_tag)
419
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', "/opt/downloads/mac/#{version}/PC1/#{arch}", 'dmg')
420
+
421
+ # Create the latest symlink for the current supported repo
422
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(platform_tag, path), 'dmg')
423
+ end
424
+ end
425
+
426
+ desc "ship Arista EOS swix packages and signatures to #{Pkg::Config.swix_staging_server}"
427
+ task ship_swix: 'pl:fetch' do
428
+ # TODO: realistically, this shouldn't be here. This block needs to be
429
+ # removed, but only when we can successfully modify all instances of
430
+ # this to be set to '/opt/downloads'. In the meantime, we need to write
431
+ # this terrible workaround to ensure backward compatibility.
432
+ #
433
+ # I'm so sorry
434
+ # ~MAS 2017-08-14
435
+ if Pkg::Config.swix_path == "/opt/downloads/eos"
436
+ path = "/opt/downloads"
437
+ else
438
+ path = Pkg::Config.swix_path
439
+ end
440
+ path = Pkg::Config.nonfinal_swix_path if Pkg::Config.nonfinal_swix_path && !Pkg::Util::Version.final?
441
+
442
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.swix*'], Pkg::Config.swix_staging_server, path)
443
+
444
+ # I really don't care which one we grab, it just has to be some supported
445
+ # version and architecture from the `eos` hash. So here we're just grabbing
446
+ # the first one, parsing out some info, and breaking out of the loop. Not
447
+ # elegant, I know, but effective.
448
+ Pkg::Platforms::PLATFORM_INFO['eos'].each do |key, value|
449
+ generic_platform_tag = "eos-#{key}-#{value[:architectures][0]}"
450
+ Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.swix_staging_server, path)
451
+ break
452
+ end
453
+ end
454
+
455
+ desc "ship tarball and signature to #{Pkg::Config.tar_staging_server}"
456
+ task ship_tar: 'pl:fetch' do
457
+ if Pkg::Config.build_tar
458
+ Pkg::Util::Ship.ship_pkgs(['pkg/*.tar.gz*'], Pkg::Config.tar_staging_server, Pkg::Config.tarball_path, excludes: ['signing_bundle', 'packaging-bundle'], platform_independent: true)
459
+ end
460
+ end
461
+
462
+ desc "ship Windows nuget packages to #{Pkg::Config.nuget_host}"
463
+ task ship_nuget: 'pl:fetch' do
464
+ packages = Dir['pkg/**/*.nupkg']
465
+ if packages.empty?
466
+ $stdout.puts "There aren't any nuget packages in pkg/windows. Maybe something went wrong?"
467
+ else
468
+ Pkg::Nuget.ship(packages)
469
+ end
470
+ end
471
+
472
+ desc "Ship MSI packages to #{Pkg::Config.msi_staging_server}"
473
+ task ship_msi: 'pl:fetch' do
474
+ # TODO: realistically, this shouldn't be here. This block needs to be
475
+ # removed, but only when we can successfully modify all instances of
476
+ # this to be set to '/opt/downloads'. In the meantime, we need to write
477
+ # this terrible workaround to ensure backward compatibility.
478
+ #
479
+ # I'm so sorry
480
+ # ~MAS 2017-08-14
481
+ if Pkg::Config.msi_path == "/opt/downloads/windows"
482
+ path = "/opt/downloads"
483
+ else
484
+ path = Pkg::Config.msi_path
485
+ end
486
+ path = Pkg::Config.nonfinal_msi_path if Pkg::Config.nonfinal_msi_path && !Pkg::Util::Version.final?
487
+
488
+ Pkg::Util::Ship.ship_pkgs(['pkg/**/*.msi'], Pkg::Config.msi_staging_server, path, excludes: ["#{Pkg::Config.project}-x(86|64).msi"])
489
+
490
+ # I really don't care which one we grab, it just has to be some supported
491
+ # version and architecture from the `windows` hash. So here we're just grabbing
492
+ # the first one, parsing out some info, and breaking out of the loop. Not
493
+ # elegant, I know, but effective.
494
+ Pkg::Platforms::PLATFORM_INFO['windows'].each do |key, value|
495
+ generic_platform_tag = "windows-#{key}-#{value[:architectures][0]}"
496
+ Pkg::Util::Ship.create_rolling_repo_link(generic_platform_tag, Pkg::Config.msi_staging_server, path)
497
+
498
+ # Create the symlinks for the latest supported repo
499
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x64')
500
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', Pkg::Paths.artifacts_path(generic_platform_tag, path), 'msi', arch: 'x86')
501
+ break
502
+ end
503
+
504
+ # We provide symlinks to the latest package in a given directory. This
505
+ # allows users to upgrade more easily to the latest version that we release
506
+ # TODO remove the links to PC1 when we no longer ship to that repo
507
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x64')
508
+ Pkg::Util::Net.remote_create_latest_symlink('puppet-agent', '/opt/downloads/windows', 'msi', arch: 'x86')
509
+ end
510
+
511
+ desc 'UBER ship: ship all the things in pkg'
512
+ task uber_ship: 'pl:fetch' do
513
+ if Pkg::Util.confirm_ship(FileList['pkg/**/*'])
514
+ Rake::Task['pl:ship_rpms'].invoke
515
+ Rake::Task['pl:ship_debs'].invoke
516
+ Rake::Task['pl:ship_dmg'].invoke
517
+ Rake::Task['pl:ship_swix'].invoke
518
+ Rake::Task['pl:ship_nuget'].invoke
519
+ Rake::Task['pl:ship_tar'].invoke
520
+ Rake::Task['pl:ship_svr4'].invoke
521
+ Rake::Task['pl:ship_p5p'].invoke
522
+ Rake::Task['pl:ship_msi'].invoke
523
+ add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?) if Pkg::Config.benchmark
524
+ post_shipped_metrics if Pkg::Config.benchmark
525
+ else
526
+ puts 'Ship canceled'
527
+ exit
528
+ end
529
+ end
530
+
531
+ desc 'Test out the ship requirements'
532
+ task ship_check: 'pl:fetch' do
533
+ errs = []
534
+ ssh_errs = []
535
+ gpg_errs = []
536
+
537
+ if ENV['TEAM']
538
+ unless ENV['TEAM'] == 'release'
539
+ errs << "TEAM environment variable is #{ENV['TEAM']}. It should be 'release'"
540
+ end
541
+ else
542
+ errs << 'TEAM environment variable is not set. This should be set to release'
543
+ end
544
+ # Check SSH access to the staging servers
545
+ ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('staging_server').values.uniq)
546
+ # Check SSH access to the signing servers, with some windows special-ness
547
+ ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('signing_server').values.uniq - [Pkg::Config.msi_signing_server])
548
+ ssh_errs << Pkg::Util::Net.check_host_ssh("Administrator@#{Pkg::Config.msi_signing_server}")
549
+ # Check SSH access to the final shipped hosts
550
+ ssh_errs << Pkg::Util::Net.check_host_ssh(Pkg::Util.filter_configs('^(?!.*(?=build|internal)).*_host$').values.uniq)
551
+ ssh_errs.flatten!
552
+ unless ssh_errs.empty?
553
+ ssh_errs.each do |host|
554
+ errs << "Unable to ssh to #{host}"
555
+ end
556
+ end
557
+
558
+ # Check for GPG on linux-y systems
559
+ gpg_errs << Pkg::Util::Net.check_host_gpg(Pkg::Config.apt_signing_server, Pkg::Util::Gpg.key)
560
+ gpg_errs << Pkg::Util::Net.check_host_gpg(Pkg::Config.distribution_server, Pkg::Util::Gpg.key)
561
+ gpg_errs.flatten!
562
+ # ignore gpg errors for hosts we couldn't ssh into
563
+ gpg_errs -= ssh_errs
564
+ unless gpg_errs.empty?
565
+ gpg_errs.each do |host|
566
+ errs << "Secret key #{Pkg::Util::Gpg.key} not found on #{host}"
567
+ end
568
+ end
569
+
570
+ # For windows and solaris it looks like as long as you have ssh access
571
+ # to the signers you should be able to sign. If this changes in the future
572
+ # we should add more checks here, but for now it should be fine.
573
+ # Check for ability to sign OSX. Should just need to be able to unlock keychain
574
+ begin
575
+ unless ssh_errs.include?(Pkg::Config.osx_signing_server)
576
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.osx_signing_server, %(/usr/bin/security -q unlock-keychain -p "#{Pkg::Config.osx_signing_keychain_pw}" "#{Pkg::Config.osx_signing_keychain}"), false, '-oBatchMode=yes')
577
+ end
578
+ rescue
579
+ errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
580
+ end
581
+
582
+ if Pkg::Config.build_gem
583
+ # Do we have stickler and nexus?
584
+ if Pkg::Util::Misc.check_gem('stickler')
585
+ `stickler list --server #{Pkg::Config.internal_stickler_host} > /dev/null 2>&1`
586
+ unless $CHILD_STATUS.zero?
587
+ errs << "Listing gems at the stickler server #{Pkg::Config.internal_stickler_host} failed!"
588
+ end
589
+ else
590
+ errs << 'gem stickler not found'
591
+ end
592
+
593
+ errs << 'gem nexus not found' unless Pkg::Util::Misc.check_gem('nexus')
594
+ `gem list --source #{Pkg::Config.internal_nexus_host} > /dev/null 2>&1`
595
+ unless $CHILD_STATUS.zero?
596
+ errs << "Listing gems at the nexus server #{Pkg::Config.internal_nexus_host} failed!"
597
+ end
598
+
599
+ # Do we have rubygems access set up
600
+ if Pkg::Util::File.file_exists?("#{ENV['HOME']}/.gem/credentials")
601
+ # Do we have permissions to publish this gem on rubygems
602
+ unless Pkg::Util::Misc.check_rubygems_ownership(Pkg::Config.gem_name)
603
+ errs << "You don't own #{Pkg::Config.gem_name} on rubygems.org"
604
+ end
605
+ else
606
+ errs << "You haven't set up your .gem/credentials file for rubygems.org access"
607
+ end
608
+ end
609
+
610
+ puts "\n\n"
611
+ if errs.empty?
612
+ puts 'Hooray! You should be good for shipping!'
613
+ else
614
+ puts "Found #{errs.length} issues:"
615
+ errs.each do |err|
616
+ puts " * #{err}"
617
+ end
618
+ end
619
+ end
620
+
621
+ # It is odd to namespace this ship task under :jenkins, but this task is
622
+ # intended to be a component of the jenkins-based build workflow even if it
623
+ # doesn't interact with jenkins directly. The :target argument is so that we
624
+ # can invoke this task with a subdirectory of the standard distribution
625
+ # server path. That way we can separate out built artifacts from
626
+ # signed/actually shipped artifacts e.g. $path/shipped/ or $path/artifacts.
627
+ namespace :jenkins do
628
+ desc 'ship pkg directory contents to artifactory'
629
+ task :ship_to_artifactory, :local_dir do |_t, args|
630
+ Pkg::Util::RakeUtils.invoke_task('pl:fetch')
631
+ artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
632
+
633
+ local_dir = args.local_dir || 'pkg'
634
+ Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
635
+ artifactory.deploy_package(artifact)
636
+ end
637
+ end
638
+
639
+ desc 'Ship pkg directory contents to distribution server'
640
+ task :ship, :target, :local_dir do |_t, args|
641
+ Pkg::Util::RakeUtils.invoke_task('pl:fetch')
642
+ target = args.target || 'artifacts'
643
+ local_dir = args.local_dir || 'pkg'
644
+ project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
645
+ artifact_dir = "#{project_basedir}/#{target}"
646
+
647
+ # For EZBake builds, we also want to include the ezbake.manifest file to
648
+ # get a snapshot of this build and all dependencies. We eventually will
649
+ # create a yaml version of this file, but until that point we want to
650
+ # make the original ezbake.manifest available
651
+ #
652
+ ezbake_manifest = File.join('ext', 'ezbake.manifest')
653
+ if File.exist?(ezbake_manifest)
654
+ cp(ezbake_manifest, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest"))
655
+ end
656
+ ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
657
+ if File.exists?(ezbake_yaml)
658
+ cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
659
+ end
660
+
661
+ # We are starting to collect additional metadata which contains
662
+ # information such as git ref and dependencies that are needed at build
663
+ # time. If this file exists we will make it available for downstream.
664
+ build_data_json = File.join("ext", "build_metadata.json")
665
+ if File.exists?(build_data_json)
666
+ cp(build_data_json, File.join(local_dir, "#{Pkg::Config.ref}.build_metadata.json"))
667
+ end
668
+
669
+ # Sadly, the packaging repo cannot yet act on its own, without living
670
+ # inside of a packaging-repo compatible project. This means in order to
671
+ # use the packaging repo for shipping and signing (things that really
672
+ # don't require build automation, specifically) we still need the project
673
+ # clone itself.
674
+ Pkg::Util::Git.bundle('HEAD', 'signing_bundle', local_dir)
675
+
676
+ # While we're bundling things, let's also make a git bundle of the
677
+ # packaging repo that we're using when we invoke pl:jenkins:ship. We can
678
+ # have a reasonable level of confidence, later on, that the git bundle on
679
+ # the distribution server was, in fact, the git bundle used to create the
680
+ # associated packages. This is because this ship task is automatically
681
+ # called upon completion each cell of the pl:jenkins:uber_build, and we
682
+ # have --ignore-existing set below. As such, the only git bundle that
683
+ # should possibly be on the distribution is the one used to create the
684
+ # packages.
685
+ # We're bundling the packaging repo because it allows us to keep an
686
+ # archive of the packaging source that was used to create the packages,
687
+ # so that later on if we need to rebuild an older package to audit it or
688
+ # for some other reason we're assured that the new package isn't
689
+ # different by virtue of the packaging automation.
690
+ if defined?(PACKAGING_ROOT)
691
+ packaging_bundle = ''
692
+ cd PACKAGING_ROOT do
693
+ packaging_bundle = Pkg::Util::Git.bundle('HEAD', 'packaging-bundle')
694
+ end
695
+ mv(packaging_bundle, local_dir)
696
+ end
697
+
698
+ # This is functionality to add the project-arch.msi links that have no
699
+ # version. The code itself looks for the link (if it's there already)
700
+ # and if the source package exists before linking. Searching for the
701
+ # packages has been restricted specifically to just the pkg/windows dir
702
+ # on purpose, as this is where we currently have all windows packages
703
+ # building to. Once we move the Metadata about the output location in
704
+ # to one source of truth we can refactor this to use that to search
705
+ # -Sean P. M. 08/12/16
706
+ packages = Dir["#{local_dir}/windows/*"]
707
+ ['x86', 'x64'].each do |arch|
708
+ package_version = Pkg::Util::Git.describe.tr('-', '.')
709
+ package_filename = File.join(local_dir, 'windows', "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
710
+ link_filename = File.join(local_dir, 'windows', "#{Pkg::Config.project}-#{arch}.msi")
711
+
712
+ next unless !packages.include?(link_filename) && packages.include?(package_filename)
713
+ # Dear future code spelunkers:
714
+ # Using symlinks instead of hard links causes failures when we try
715
+ # to set these files to be immutable. Also be wary of whether the
716
+ # linking utility you're using expects the source path to be relative
717
+ # to the link target or pwd.
718
+ #
719
+ FileUtils.ln(package_filename, link_filename)
720
+ end
721
+
722
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
723
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
724
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
725
+ Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
726
+ end
727
+
728
+ # In order to get a snapshot of what this build looked like at the time
729
+ # of shipping, we also generate and ship the params file
730
+ #
731
+ Pkg::Config.config_to_yaml(local_dir)
732
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
733
+ Pkg::Util::Net.rsync_to("#{local_dir}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ["--exclude repo_configs"])
734
+ end
735
+
736
+ # If we just shipped a tagged version, we want to make it immutable
737
+ files = Dir.glob("#{local_dir}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
738
+ "#{artifact_dir}/#{file.sub(/^#{local_dir}\//, '')}"
739
+ end
740
+
741
+ Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
742
+ Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
743
+ Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
744
+ end
745
+
746
+ desc 'Ship generated repository configs to the distribution server'
747
+ task :ship_repo_configs do
748
+ Pkg::Deb::Repo.ship_repo_configs
749
+ Pkg::Rpm::Repo.ship_repo_configs
750
+ end
751
+ end
752
+ end