packaging 0.88.77

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +17 -0
  3. data/README-Solaris.md +117 -0
  4. data/README.md +977 -0
  5. data/lib/packaging.rb +32 -0
  6. data/lib/packaging/archive.rb +126 -0
  7. data/lib/packaging/artifactory.rb +651 -0
  8. data/lib/packaging/artifactory/extensions.rb +94 -0
  9. data/lib/packaging/config.rb +492 -0
  10. data/lib/packaging/config/params.rb +387 -0
  11. data/lib/packaging/config/validations.rb +13 -0
  12. data/lib/packaging/deb.rb +28 -0
  13. data/lib/packaging/deb/repo.rb +264 -0
  14. data/lib/packaging/gem.rb +70 -0
  15. data/lib/packaging/metrics.rb +15 -0
  16. data/lib/packaging/nuget.rb +39 -0
  17. data/lib/packaging/paths.rb +376 -0
  18. data/lib/packaging/platforms.rb +507 -0
  19. data/lib/packaging/repo.rb +155 -0
  20. data/lib/packaging/retrieve.rb +75 -0
  21. data/lib/packaging/rpm.rb +5 -0
  22. data/lib/packaging/rpm/repo.rb +254 -0
  23. data/lib/packaging/sign.rb +8 -0
  24. data/lib/packaging/sign/deb.rb +9 -0
  25. data/lib/packaging/sign/dmg.rb +41 -0
  26. data/lib/packaging/sign/ips.rb +57 -0
  27. data/lib/packaging/sign/msi.rb +124 -0
  28. data/lib/packaging/sign/rpm.rb +115 -0
  29. data/lib/packaging/tar.rb +163 -0
  30. data/lib/packaging/util.rb +146 -0
  31. data/lib/packaging/util/date.rb +20 -0
  32. data/lib/packaging/util/execution.rb +85 -0
  33. data/lib/packaging/util/file.rb +125 -0
  34. data/lib/packaging/util/git.rb +174 -0
  35. data/lib/packaging/util/git_tags.rb +73 -0
  36. data/lib/packaging/util/gpg.rb +66 -0
  37. data/lib/packaging/util/jenkins.rb +95 -0
  38. data/lib/packaging/util/misc.rb +69 -0
  39. data/lib/packaging/util/net.rb +410 -0
  40. data/lib/packaging/util/os.rb +17 -0
  41. data/lib/packaging/util/platform.rb +40 -0
  42. data/lib/packaging/util/rake_utils.rb +112 -0
  43. data/lib/packaging/util/serialization.rb +19 -0
  44. data/lib/packaging/util/ship.rb +300 -0
  45. data/lib/packaging/util/tool.rb +41 -0
  46. data/lib/packaging/util/version.rb +334 -0
  47. data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
  48. data/spec/fixtures/config/ext/project_data.yaml +2 -0
  49. data/spec/fixtures/configs/components/test_file.json +1 -0
  50. data/spec/fixtures/configs/components/test_file_2.json +0 -0
  51. data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
  52. data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
  53. data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
  54. data/spec/fixtures/util/pre_tasks.yaml +4 -0
  55. data/spec/lib/packaging/artifactory_spec.rb +221 -0
  56. data/spec/lib/packaging/config_spec.rb +576 -0
  57. data/spec/lib/packaging/deb/repo_spec.rb +157 -0
  58. data/spec/lib/packaging/deb_spec.rb +52 -0
  59. data/spec/lib/packaging/gem_spec.rb +86 -0
  60. data/spec/lib/packaging/paths_spec.rb +418 -0
  61. data/spec/lib/packaging/platforms_spec.rb +178 -0
  62. data/spec/lib/packaging/repo_spec.rb +135 -0
  63. data/spec/lib/packaging/retrieve_spec.rb +100 -0
  64. data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
  65. data/spec/lib/packaging/sign_spec.rb +133 -0
  66. data/spec/lib/packaging/tar_spec.rb +116 -0
  67. data/spec/lib/packaging/util/execution_spec.rb +56 -0
  68. data/spec/lib/packaging/util/file_spec.rb +139 -0
  69. data/spec/lib/packaging/util/git_spec.rb +160 -0
  70. data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
  71. data/spec/lib/packaging/util/gpg_spec.rb +64 -0
  72. data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
  73. data/spec/lib/packaging/util/misc_spec.rb +31 -0
  74. data/spec/lib/packaging/util/net_spec.rb +259 -0
  75. data/spec/lib/packaging/util/os_spec.rb +31 -0
  76. data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
  77. data/spec/lib/packaging/util/ship_spec.rb +199 -0
  78. data/spec/lib/packaging/util/version_spec.rb +123 -0
  79. data/spec/lib/packaging_spec.rb +19 -0
  80. data/spec/spec_helper.rb +22 -0
  81. data/static_artifacts/PackageInfo.plist +3 -0
  82. data/tasks/00_utils.rake +214 -0
  83. data/tasks/30_metrics.rake +33 -0
  84. data/tasks/apple.rake +268 -0
  85. data/tasks/archive.rake +69 -0
  86. data/tasks/build.rake +12 -0
  87. data/tasks/clean.rake +5 -0
  88. data/tasks/config.rake +35 -0
  89. data/tasks/deb.rake +129 -0
  90. data/tasks/deb_repos.rake +28 -0
  91. data/tasks/deprecated.rake +130 -0
  92. data/tasks/doc.rake +20 -0
  93. data/tasks/education.rake +57 -0
  94. data/tasks/fetch.rake +60 -0
  95. data/tasks/gem.rake +159 -0
  96. data/tasks/jenkins.rake +538 -0
  97. data/tasks/jenkins_dynamic.rake +202 -0
  98. data/tasks/load_extras.rake +21 -0
  99. data/tasks/mock.rake +348 -0
  100. data/tasks/nightly_repos.rake +286 -0
  101. data/tasks/pe_deb.rake +12 -0
  102. data/tasks/pe_rpm.rake +13 -0
  103. data/tasks/pe_ship.rake +226 -0
  104. data/tasks/pe_sign.rake +13 -0
  105. data/tasks/pe_tar.rake +5 -0
  106. data/tasks/retrieve.rake +52 -0
  107. data/tasks/rpm.rake +66 -0
  108. data/tasks/rpm_repos.rake +29 -0
  109. data/tasks/ship.rake +692 -0
  110. data/tasks/sign.rake +154 -0
  111. data/tasks/tag.rake +8 -0
  112. data/tasks/tar.rake +28 -0
  113. data/tasks/update.rake +16 -0
  114. data/tasks/vanagon.rake +35 -0
  115. data/tasks/vendor_gems.rake +117 -0
  116. data/tasks/version.rake +33 -0
  117. data/tasks/z_data_dump.rake +65 -0
  118. data/templates/README +1 -0
  119. data/templates/downstream.xml.erb +47 -0
  120. data/templates/msi.xml.erb +197 -0
  121. data/templates/packaging.xml.erb +346 -0
  122. data/templates/repo.xml.erb +117 -0
  123. metadata +287 -0
@@ -0,0 +1,286 @@
1
+ namespace :pl do
2
+ ##
3
+ # This crazy piece of work establishes a remote repo on the distribution
4
+ # server, ships our repos out to it, signs them, and brings them back.
5
+ # This is an INTERNAL rake task and should not be considered part of the packaging API.
6
+ # Please do not depend on it.
7
+ #
8
+ namespace :jenkins do
9
+ ##
10
+ # This is to enable the work in CPR-52 to support nightly repos. For this
11
+ # work we'll have signed repos for each package of a build.
12
+ #
13
+ task :remote_sign_repos, [:target_prefix] => "pl:fetch" do |t, args|
14
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
15
+ target = "#{target_prefix}_repos/"
16
+ signing_server = Pkg::Config.signing_server
17
+ # Sign the repos please
18
+ Pkg::Util::File.empty_dir?("repos") and fail "There were no repos found in repos/. Maybe something in the pipeline failed?"
19
+ signing_bundle = ENV['SIGNING_BUNDLE']
20
+
21
+ remote_repo = Pkg::Util::Net.remote_unpack_git_bundle(signing_server, 'HEAD', nil, signing_bundle)
22
+ build_params = Pkg::Util::Net.remote_buildparams(signing_server, Pkg::Config)
23
+ Pkg::Util::Net.rsync_to('repos', signing_server, remote_repo)
24
+ rake_command = <<-DOC
25
+ cd #{remote_repo} ;
26
+ #{Pkg::Util::Net.remote_bundle_install_command}
27
+ bundle exec rake pl:jenkins:sign_repos GPG_KEY=#{Pkg::Util::Gpg.key} PARAMS_FILE=#{build_params}
28
+ DOC
29
+ Pkg::Util::Net.remote_execute(signing_server, rake_command)
30
+ Pkg::Util::Net.rsync_from("#{remote_repo}/repos/", signing_server, target)
31
+ Pkg::Util::Net.remote_execute(signing_server, "rm -rf #{remote_repo}")
32
+ Pkg::Util::Net.remote_execute(signing_server, "rm #{build_params}")
33
+ puts "Signed packages staged in '#{target}' directory"
34
+ end
35
+
36
+ task :sign_repos => "pl:fetch" do
37
+ Pkg::Util::RakeUtils.invoke_task("pl:sign_rpms", "repos")
38
+ Pkg::Rpm::Repo.create_local_repos('repos')
39
+ Pkg::Rpm::Repo.sign_repos('repos')
40
+ Pkg::Deb::Repo.sign_repos('repos', 'Apt repository for signed builds')
41
+ Pkg::Sign::Dmg.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
42
+ Pkg::Sign::Ips.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
43
+ Pkg::Sign::Msi.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
44
+ end
45
+
46
+ task :ship_signed_repos, [:target_prefix] => "pl:fetch" do |t, args|
47
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
48
+ target_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{target_prefix}_repos"
49
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
50
+ # Ship the now signed repos to the distribution server
51
+ Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{target_dir}")
52
+ Pkg::Util::Net.rsync_to("#{target_prefix}_repos/", Pkg::Config.distribution_server, target_dir)
53
+ end
54
+ end
55
+
56
+ # This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
57
+ task :pack_signed_repo, [:path_to_repo, :name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
58
+ # path_to_repo should be relative to ./pkg
59
+ path_to_repo = args.path_to_repo or fail ":path_to_repo is a required argument for #{t}"
60
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
61
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
62
+ Pkg::Repo.create_signed_repo_archive(path_to_repo, name_of_archive, versioning)
63
+ end
64
+
65
+ task :pack_all_signed_repos_individually, [:name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
66
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
67
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
68
+ Pkg::Repo.create_all_repo_archives(name_of_archive, versioning)
69
+ end
70
+
71
+ task :prepare_signed_repos, [:target_host, :target_prefix, :versioning] => ["clean", "pl:fetch"] do |t, args|
72
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
73
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
74
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
75
+ mkdir("pkg")
76
+
77
+ Dir.chdir("pkg") do
78
+ if versioning == 'ref'
79
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
80
+ elsif versioning == 'version'
81
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
82
+ end
83
+
84
+ FileUtils.mkdir_p([local_target, Pkg::Config.project + "-latest"])
85
+
86
+ # Rake task dependencies with arguments are nuts, so we just directly
87
+ # invoke them here. We want the signed_* directories staged as
88
+ # repos/repo_configs, because that's how we want them on the public
89
+ # server
90
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:retrieve", "#{target_prefix}_repos", File.join(local_target, "repos"))
91
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:retrieve", "#{target_prefix}_repo_configs", File.join(local_target, "repo_configs"))
92
+
93
+ # The repo configs have Pkg::Config.builds_server used in them, but that
94
+ # is internal, so we need to replace it with our public server. We also
95
+ # want them only to see repos, and not signed repos, since the host is
96
+ # called nightlies.puppetlabs.com. Here we replace those values in each
97
+ # config with the desired value.
98
+ Dir.glob("#{local_target}/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
99
+ new_contents = File.read(config).gsub(Pkg::Config.builds_server, target_host).gsub(/#{target_prefix}_repos/, "repos")
100
+ File.open(config, "w") { |file| file.puts new_contents }
101
+ end
102
+
103
+ # Latest repo work. This little section does some magic to munge the
104
+ # repo configs and link in the latest repos. The repo_configs are
105
+ # renamed to project-latest-$platform.{list,repo} to ensure that their
106
+ # names stay the same between runs. Their contents have the ref
107
+ # stripped off and the project replaced by $project-latest. Then the
108
+ # repos directory is a symlink to the last pushed ref's repos.
109
+ FileUtils.cp_r(File.join(local_target, "repo_configs"), Pkg::Config.project + "-latest", { :preserve => true })
110
+
111
+ # Now we need to remove the ref and replace $project with
112
+ # $project-latest so that it will work as a pinned latest repo
113
+ # Also rename the repo config to a static filename.
114
+ Dir.glob("#{Pkg::Config.project}-latest/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
115
+ new_contents = File.read(config)
116
+ new_contents.gsub!(%r{#{Pkg::Config.ref}/}, "")
117
+ new_contents.gsub!(%r{#{Pkg::Config.project}/}, Pkg::Config.project + "-latest/")
118
+ new_contents.gsub!(Pkg::Config.ref, "latest")
119
+
120
+ File.open(config, "w") { |file| file.puts new_contents }
121
+ FileUtils.mv(config, config.gsub(Pkg::Config.ref, "latest"))
122
+ end
123
+
124
+ # If we're using the version strategy instead of ref, here we shuffle
125
+ # around directories and munge repo_configs to replace the ref with the
126
+ # version. In the case that dot_version and ref are the same, we
127
+ # have nothing to do, so the conditional is skipped.
128
+ if versioning == 'version' && Pkg::Util::Version.dot_version != Pkg::Config.ref
129
+ Dir.glob("#{local_target}/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
130
+ new_contents = File.read(config)
131
+ new_contents.gsub!(%r{#{Pkg::Config.ref}}, Pkg::Util::Version.dot_version)
132
+
133
+ File.open(config, "w") { |file| file.puts new_contents }
134
+ FileUtils.mv(config, config.gsub(Pkg::Config.ref, Pkg::Util::Version.dot_version))
135
+ end
136
+ end
137
+
138
+ # Make a latest symlink for the project
139
+ FileUtils.ln_sf(File.join("..", local_target, "repos"), File.join(Pkg::Config.project + "-latest"), :verbose => true)
140
+ end
141
+ end
142
+
143
+ task :deploy_signed_repos, [:target_host, :target_basedir, :foss_only] => "pl:fetch" do |t, args|
144
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
145
+ target_basedir = args.target_basedir or fail ":target_basedir is a required argument to #{t}"
146
+ include_paths = []
147
+
148
+ if args.foss_only && Pkg::Config.foss_platforms && !Pkg::Config.foss_platforms.empty?
149
+ Pkg::Config.foss_platforms.each do |platform|
150
+ include_paths << Pkg::Paths.repo_path(platform, legacy: true, nonfinal: true)
151
+ if Pkg::Paths.repo_config_path(platform)
152
+ include_paths << Pkg::Paths.repo_config_path(platform)
153
+ end
154
+ end
155
+ else
156
+ include_paths = ["./"]
157
+ end
158
+
159
+ # Get the directories together - we need to figure out which bits to ship based on the include_path
160
+ # First we get the build itself
161
+ Pkg::Util::Execution.capture3(%(find #{include_paths.map { |path| "pkg/#{Pkg::Config.project}/**/#{path}" }.join(' ') } | sort > include_file))
162
+ Pkg::Util::Execution.capture3(%(mkdir -p tmp && tar -T include_file -cf - | (cd ./tmp && tar -xf -)))
163
+
164
+ # Then we find grab the appropriate meta-data only
165
+ Pkg::Util::Execution.capture3(%(find #{include_paths.map { |path| "pkg/#{Pkg::Config.project}-latest/#{path}" unless path.include? "repos" }.join(' ') } | sort > include_file_latest))
166
+
167
+ #include /repos in the include_file_latest so we correctly include the symlink in the final file list to ship
168
+ Pkg::Util::Execution.capture3(%(echo "pkg/#{Pkg::Config.project}-latest/repos" >> include_file_latest))
169
+ Pkg::Util::Execution.capture3(%(tar -T include_file_latest -cf - | (cd ./tmp && tar -xf -)))
170
+
171
+ Dir.chdir("tmp/pkg") do
172
+ # Ship it to the target for consumption
173
+ # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-after
174
+ Pkg::Util::Net.rsync_to("#{Pkg::Config.project}-latest", target_host, target_basedir, extra_flags: ["--delete-after", "--keep-dirlinks"])
175
+ # Then we ship the sha version with default rsync flags
176
+ Pkg::Util::Net.rsync_to("#{Pkg::Config.project}", target_host, target_basedir)
177
+ end
178
+
179
+ puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped to '#{target_host}:#{target_basedir}'"
180
+ end
181
+
182
+ task :deploy_signed_repos_to_s3, [:target_bucket] => "pl:fetch" do |t, args|
183
+ target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}"
184
+
185
+ # Ship it to the target for consumption
186
+ # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-removed and --acl-public
187
+ Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}-latest/", target_bucket, "#{Pkg::Config.project}-latest", ["--acl-public", "--delete-removed", "--follow-symlinks"])
188
+ # Then we ship the sha version with just --acl-public
189
+ Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}/", target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
190
+
191
+ puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped via s3 to '#{target_bucket}'"
192
+ end
193
+
194
+ task :generate_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|
195
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
196
+ Pkg::Rpm::Repo.generate_repo_configs("#{target_prefix}_repos", "#{target_prefix}_repo_configs", true)
197
+ Pkg::Deb::Repo.generate_repo_configs("#{target_prefix}_repos", "#{target_prefix}_repo_configs")
198
+ end
199
+
200
+ task :ship_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|
201
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
202
+ Pkg::Rpm::Repo.ship_repo_configs("#{target_prefix}_repo_configs")
203
+ Pkg::Deb::Repo.ship_repo_configs("#{target_prefix}_repo_configs")
204
+ end
205
+
206
+ task :generate_signed_repos, [:target_prefix] => ["pl:fetch"] do |t, args|
207
+ target_prefix = args.target_prefix || 'nightly'
208
+ Dir.chdir("pkg") do
209
+ ["pl:jenkins:remote_sign_repos", "pl:jenkins:ship_signed_repos", "pl:jenkins:generate_signed_repo_configs", "pl:jenkins:ship_signed_repo_configs"].each do |task|
210
+ Pkg::Util::RakeUtils.invoke_task(task, target_prefix)
211
+ end
212
+ puts "Shipped '#{Pkg::Config.ref}' (#{Pkg::Config.version}) of '#{Pkg::Config.project}' into the puppet-agent repos."
213
+ end
214
+ end
215
+
216
+ # We want to keep the puppet-agent repos at a higher level and them link
217
+ # them into the correct version of PE. This is a private method and is
218
+ # called from the internal_puppet-agent-ship jenkins job
219
+ #
220
+ # @param target_host the remote host where the packages are being shipped
221
+ # ex: agent-downloads.delivery.puppetlabs.net
222
+ # @param remote_dir the base path to deploy packages to
223
+ # ex: /opt/puppet-agent
224
+ # @param versioning whether the puppet-agent version is a version string or
225
+ # a github ref. Valid values are 'version' and 'ref'
226
+ # @param pe_version the PE-version to deploy to.
227
+ # ex: 2015.2
228
+ task :link_signed_repos, [:target_host, :remote_dir, :versioning, :pe_version] => ["pl:fetch"] do |t, args|
229
+ target_host = args.target_host or fail ":target_host is a required argument for #{t}"
230
+ remote_dir = args.remote_dir or fail ":remote_dir is a required argument for #{t}"
231
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
232
+ pe_version = args.pe_version or fail ":pe_version is a required argument for #{t}"
233
+
234
+ if versioning == 'ref'
235
+ version_string = Pkg::Config.ref
236
+ elsif versioning == 'version'
237
+ version_string = Pkg::Util::Version.dot_version
238
+ end
239
+
240
+ pa_source = File.join(remote_dir, Pkg::Config.project)
241
+ pe_target = File.join(remote_dir, pe_version, Pkg::Config.project)
242
+ local_pa = File.join(pa_source, version_string)
243
+ local_pe = pe_target
244
+ local_pa_latest = "#{pa_source}-latest"
245
+ local_pe_latest = "#{pe_target}-latest"
246
+
247
+ Pkg::Util::Net.remote_execute(target_host, "mkdir -p '#{pe_target}'")
248
+ Pkg::Util::Net.remote_execute(target_host, "mkdir -p '#{local_pe_latest}'")
249
+ Pkg::Util::Net.remote_execute(target_host, "cp -r #{local_pa_latest}/* #{local_pe_latest}")
250
+ Pkg::Util::Net.remote_execute(target_host, "sed -i 's|/#{File.basename(local_pa_latest)}|/#{pe_version}/#{File.basename(local_pa_latest)}|' #{local_pe_latest}/repo_configs/*/*")
251
+ Pkg::Util::Net.remote_execute(target_host, "ln -sf '#{local_pa}' '#{local_pe}'")
252
+ end
253
+
254
+ task :nightly_repos => ["pl:fetch"] do
255
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:generate_signed_repos", 'nightly')
256
+ end
257
+
258
+ task :deploy_nightly_repos, [:target_host, :target_basedir] => ["pl:fetch"] do |t, args|
259
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
260
+ target_basedir = args.target_basedir or fail ":target_basedir is a required argument to #{t}"
261
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:prepare_signed_repos", target_host, 'nightly', 'ref')
262
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:deploy_signed_repos", target_host, target_basedir, true)
263
+ end
264
+
265
+ task :deploy_repos_to_s3, [:target_bucket] => ["pl:fetch"] do |t, args|
266
+ target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}"
267
+ target_host = "https://s3.amazonaws.com/#{target_bucket}"
268
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:prepare_signed_repos", target_host, 'signed', 'version')
269
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:deploy_signed_repos_to_s3", target_bucket)
270
+ end
271
+
272
+ task :update_release_versions do
273
+ target_bucket = ENV['TARGET_BUCKET'] or fail "TARGET_BUCKET must be specified to run the 'update_release_versions' task"
274
+ version = ENV['VERSION'] || Pkg::Util::Version.get_dot_version
275
+
276
+ tempdir = Pkg::Util::File.mktemp
277
+ latest_filepath = File.join(tempdir, "pkg")
278
+ FileUtils.mkdir_p(latest_filepath)
279
+
280
+ latest_filename = File.join(latest_filepath, "LATEST")
281
+ File.open(latest_filename, 'w') { |file| file.write(version) }
282
+ Pkg::Util::Net.s3sync_to(latest_filename, target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
283
+ FileUtils.rm_rf latest_filepath
284
+ end
285
+ end
286
+ end
data/tasks/pe_deb.rake ADDED
@@ -0,0 +1,12 @@
1
+ # "Alias" tasks for PE - these just point at the standard pl: tasks. They exist
2
+ # for ease of aggregation with PE-specific tasks that _are_ actually different
3
+ # from their "pl" counterparts
4
+ if Pkg::Config.build_pe
5
+ namespace :pe do
6
+ desc "Create a PE deb from this repo using the default cow #{Pkg::Config.default_cow}."
7
+ task :deb => "pl:deb"
8
+
9
+ desc "Create PE debs from this git repository using all cows specified in build_defaults yaml"
10
+ task :deb_all => "pl:deb_all"
11
+ end
12
+ end
data/tasks/pe_rpm.rake ADDED
@@ -0,0 +1,13 @@
1
+ if Pkg::Config.build_pe
2
+ namespace :pe do
3
+ desc "Build a PE rpm using rpmbuild (requires all BuildRequires, rpmbuild, etc)"
4
+ task :rpm => "package:rpm"
5
+
6
+ desc "Build rpms using ALL final mocks in build_defaults yaml, keyed to PL infrastructure, pass MOCK to override"
7
+ task :mock_all => ["pl:fetch", "pl:mock_all"]
8
+
9
+ desc "Build a PE rpm using the default mock"
10
+ task :mock => ["pl:fetch", "pl:mock"]
11
+ end
12
+ end
13
+
@@ -0,0 +1,226 @@
1
+ if Pkg::Config.build_pe
2
+ namespace :pe do
3
+ desc "ship PE rpms to #{Pkg::Config.yum_host}"
4
+ task :ship_rpms => "pl:fetch" do
5
+ puts "Shipping packages to #{Pkg::Config.yum_target_path}"
6
+ Pkg::Util::File.empty_dir?("pkg/pe/rpm") and fail "The 'pkg/pe/rpm' directory has no packages. Did you run rake pe:deb?"
7
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
8
+ if Pkg::Config.pe_feature_branch
9
+ puts "On a feature branch - overwrites alllowed"
10
+ Pkg::Util::Net.rsync_to('pkg/pe/rpm/', Pkg::Config.yum_host, Pkg::Config.yum_target_path, extra_flags: [])
11
+ else
12
+ Pkg::Util::Net.rsync_to('pkg/pe/rpm/', Pkg::Config.yum_host, Pkg::Config.yum_target_path)
13
+ end
14
+ end
15
+ if Pkg::Config.team == 'release'
16
+
17
+ # If this is not a feature branch or release branch, we need to link the
18
+ # shipped packages into the feature repos and update their metadata.
19
+ unless Pkg::Config.pe_feature_branch || Pkg::Config.pe_release_branch
20
+ puts "Linking RPMs to feature repo"
21
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:link_shipped_rpms_to_feature_repo")
22
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:update_yum_repo")
23
+ end
24
+ end
25
+ end
26
+
27
+ desc "Ship PE debs to #{Pkg::Config.apt_host}"
28
+ task :ship_debs => "pl:fetch" do
29
+ Pkg::Util::File.empty_dir?("pkg/pe/deb") and fail "The 'pkg/pe/deb' directory has no packages!"
30
+ target_path = ENV['APT_REPO']
31
+
32
+ unless Pkg::Config.pe_feature_branch
33
+ # If APT_REPO isn't specified as an environment variable, we use a temporary one
34
+ # created for this specific deb ship. This enables us to escape the conflicts
35
+ # introduced with simultaneous deb ships.
36
+ #
37
+
38
+ # We are going to iterate over every set of packages, adding them to
39
+ # the repository set by set. This enables us to handle different
40
+ # repositories per distribution. "pkg/pe/deb/" contains directories
41
+ # named for every distribution, e.g. "lucid," "squeeze," etc.
42
+ #
43
+ Dir["pkg/pe/deb/*"].each do |dist|
44
+ dist = File.basename(dist)
45
+ unless target_path
46
+ puts "Creating temporary incoming dir on #{Pkg::Config.apt_host}"
47
+ target_path = %x(ssh -t #{Pkg::Config.apt_host} 'mktemp -d -t incoming-XXXXXX').chomp
48
+ end
49
+
50
+ # For aptly, we ship just the debs into an incoming dir. On the remote end,
51
+ # aptly will pull these debs in and add them to the repositories based on the
52
+ # dist, e.g. lucid, architecture notwithstanding.
53
+ #
54
+ # The layout that the aptly library will expect is:
55
+ #
56
+ # incoming_dir/{$dists}/*.deb
57
+ #
58
+ # ex:
59
+ # incoming_dir|
60
+ # |_lucid/*.deb
61
+ # |_squeeze/*.deb
62
+ # |_precise/*.deb
63
+ # |_wheezy/*.deb
64
+ #
65
+ puts "Shipping PE debs to apt repo 'incoming' dir on #{Pkg::Config.apt_host}"
66
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
67
+ Dir["pkg/pe/deb/#{dist}/*.deb"].each do |deb|
68
+ Pkg::Util::Net.remote_execute(Pkg::Config.apt_host, "mkdir -p '#{target_path}/#{dist}'")
69
+ Pkg::Util::Net.rsync_to(deb, Pkg::Config.apt_host, "#{target_path}/#{dist}/#{File.basename(deb)}")
70
+ end
71
+ end
72
+
73
+ if Pkg::Config.team == 'release'
74
+ Rake::Task["pe:remote:apt"].reenable
75
+ Rake::Task["pe:remote:apt"].invoke(target_path, dist)
76
+ end
77
+
78
+ end
79
+ end
80
+
81
+ # We also ship our PE artifacts to directories for archival purposes and to
82
+ # ease the gathering of both debs and sources when we do PE compose and ship. For
83
+ # this case, we ship everything to directories that mirror the legacy rpm
84
+ # directory format:
85
+ #
86
+ # repos/$dist-{$architecture|source}
87
+ #
88
+ # ex:
89
+ # repos|
90
+ # |_squeeze-i386
91
+ # |_squeeze-amd64
92
+ # |_squeeze-source
93
+ #
94
+ # We also have concerns about shipped artifacts getting accidentally overwritten
95
+ # by newer ones. To handle this, we make everything we ship to the archive
96
+ # directories immutable, after rsyncing out.
97
+ #
98
+ base_path = Pkg::Config.apt_target_path
99
+
100
+ puts "Shipping all built artifacts to to archive directories on #{Pkg::Config.apt_host}"
101
+
102
+
103
+ Pkg::Config.deb_build_targets.each do |target|
104
+ dist, arch = target.match(/(.*)-(.*)/)[1, 2]
105
+ unless Pkg::Util::File.empty_dir? "pkg/pe/deb/#{dist}"
106
+ archive_path = "#{base_path}/#{dist}-#{arch}"
107
+
108
+ # Ship arch-specific debs to correct dir, e.g. 'squeeze-i386'
109
+ unless Dir["pkg/pe/deb/#{dist}/*_#{arch}.deb"].empty?
110
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*_#{arch}.deb", Pkg::Config.apt_host, "#{archive_path}/")
111
+ end
112
+
113
+ unless Dir["pkg/pe/deb/#{dist}/*_all.deb"].empty?
114
+ Pkg::Platforms.arches_for_codename(dist).each do |arch|
115
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*_all.deb", Pkg::Config.apt_host, "#{base_path}/#{dist}-#{arch}/")
116
+ end
117
+ end
118
+
119
+ unless Dir["pkg/pe/deb/#{dist}/*"].select { |i| i !~ /^.*\.deb$/ }.empty?
120
+ # Ship source files to source dir, e.g. 'squeeze-source'
121
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*", Pkg::Config.apt_host, "#{base_path}/#{dist}-source", extra_flags: ["--exclude '*.deb'", "--ignore-existing"])
122
+ end
123
+
124
+ files = Dir["pkg/pe/deb/#{dist}/*{_#{arch},all}.deb"].map { |f| "#{archive_path}/#{File.basename(f)}" }
125
+
126
+ files += Dir["pkg/pe/deb/#{dist}/*"].select { |f| f !~ /^.*\.deb$/ }.map { |f| "#{base_path}/#{dist}-source/#{File.basename(f)}" }
127
+ end
128
+ end
129
+ # If this is not a feature branch or release branch, we need to link the
130
+ # shipped packages into the feature repos
131
+ unless Pkg::Config.pe_feature_branch || Pkg::Config.pe_release_branch
132
+ puts "Linking DEBs to feature repo"
133
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:link_shipped_debs_to_feature_repo")
134
+ end
135
+ end
136
+
137
+ namespace :remote do
138
+ desc "Update remote rpm repodata for PE on #{Pkg::Config.yum_host}"
139
+ task :update_yum_repo => "pl:fetch" do
140
+
141
+ # Paths to the repos.
142
+ repo_base_path = Pkg::Config.yum_target_path
143
+
144
+ # This entire command is going to be passed across SSH, but it's unwieldy on a
145
+ # single line. By breaking it into a series of concatenated strings, we can maintain
146
+ # a semblance of formatting and structure (nevermind readability).
147
+ command = %(for dir in #{repo_base_path}/{#{rpm_family_and_version.join(",")}}-*; do)
148
+ command += %( sudo createrepo --checksum=sha --checkts --update --delta-workers=0 --quiet --database --update $dir; )
149
+ command += %(done; )
150
+ command += %(sync)
151
+
152
+ Pkg::Util::Net.remote_execute(Pkg::Config.yum_host, command)
153
+ end
154
+
155
+ desc "Remotely add shipped packages to apt repo on #{Pkg::Config.apt_host}"
156
+ task :apt, :incoming, :dist do |t, args|
157
+ dist = args.dist
158
+ incoming_dir = args.incoming
159
+ incoming_dir or fail "Adding packages to apt repo requires an incoming directory"
160
+ Pkg::Util::RakeUtils.invoke_task("pl:fetch")
161
+
162
+ cmd = <<-eos
163
+ if ! flock --wait 1200 /opt/tools/aptly/db/LOCK --command /bin/true; then
164
+ echo "Unable to acquire aptly lock, giving up" 1>&2
165
+ exit 1
166
+ fi
167
+ aptly repo add -remove-files #{Pkg::Config::pe_version}-#{dist} #{incoming_dir}/#{dist}
168
+ if [ -d /opt/tools/aptly/public/#{Pkg::Config::pe_version}/dists/#{dist} ]; then
169
+ aptly publish update -gpg-key=\"8BBEB79B\" #{dist} #{Pkg::Config::pe_version}
170
+ else
171
+ aptly publish repo -gpg-key=\"8BBEB79B\" #{Pkg::Config::pe_version}-#{dist} #{Pkg::Config::pe_version}
172
+ fi
173
+ eos
174
+ stdout, stderr = Pkg::Util::Net.remote_execute(
175
+ Pkg::Config.apt_host,
176
+ cmd,
177
+ { capture_output: true }
178
+ )
179
+
180
+ output = stdout.to_s + stderr.to_s
181
+
182
+ if output.include?("ERROR:") || output.include?("There have been errors!")
183
+ # We shouldn't ever get here if aptly returns non-zero on failure, but just in case...
184
+ fail "Unable to add packages to debian repo. Hopefully the output has some helpful information. Output: #{output}"
185
+ end
186
+
187
+ puts
188
+ puts "Aptly output: #{output}"
189
+ puts
190
+
191
+ puts "Cleaning up apt repo 'incoming' dir on #{Pkg::Config.apt_host}"
192
+ Pkg::Util::Net.remote_execute(Pkg::Config.apt_host, "rm -r #{incoming_dir}")
193
+
194
+ end
195
+
196
+ # Throw more tires on the fire
197
+ desc "Remotely link shipped rpm packages into feature repo on #{Pkg::Config.yum_host}"
198
+ task :link_shipped_rpms_to_feature_repo => "pl:fetch" do
199
+ next if Pkg::Config.pe_feature_branch
200
+ repo_base_path = Pkg::Config.yum_target_path
201
+ feature_repo_path = Pkg::Config.yum_target_path(true)
202
+ pkgs = FileList['pkg/pe/rpm/**/*.rpm'].select { |path| path.gsub!('pkg/pe/rpm/', '') }
203
+ command = %(for pkg in #{pkgs.join(' ')}; do)
204
+ command += %( sudo ln -f "#{repo_base_path}/$( dirname ${pkg} )/$( basename ${pkg} )" "#{feature_repo_path}/$( dirname ${pkg} )/" ; )
205
+ command += %(done; )
206
+ command += %(sync)
207
+
208
+ Pkg::Util::Net.remote_execute(Pkg::Config.yum_host, command)
209
+ end
210
+
211
+ desc "Remotely link shipped deb packages into feature repo on #{Pkg::Config.apt_host}"
212
+ task :link_shipped_debs_to_feature_repo => "pl:fetch" do
213
+ next if Pkg::Config.pe_feature_branch
214
+ base_path = Pkg::Config.apt_target_path
215
+ feature_base_path = Pkg::Config.apt_target_path(true)
216
+ pkgs = FileList["pkg/pe/deb/**/*.deb"].select { |path| path.gsub!('pkg/pe/deb/', '') }
217
+ command = %(for pkg in #{pkgs.join(' ')}; do)
218
+ command += %( sudo ln -f "#{base_path}/$( dirname ${pkg} )-amd64/$( basename ${pkg} )" "#{feature_base_path}/$( dirname ${pkg} )-amd64/" ; )
219
+ command += %(done; )
220
+ command += %(sync)
221
+
222
+ Pkg::Util::Net.remote_execute(Pkg::Config.apt_host, command)
223
+ end
224
+ end
225
+ end
226
+ end