packaging 0.99.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE +17 -0
  3. data/README-Solaris.md +117 -0
  4. data/README.md +1031 -0
  5. data/lib/packaging.rb +32 -0
  6. data/lib/packaging/artifactory.rb +278 -0
  7. data/lib/packaging/config.rb +392 -0
  8. data/lib/packaging/config/params.rb +366 -0
  9. data/lib/packaging/deb.rb +28 -0
  10. data/lib/packaging/deb/repo.rb +263 -0
  11. data/lib/packaging/gem.rb +112 -0
  12. data/lib/packaging/ips.rb +57 -0
  13. data/lib/packaging/msi.rb +89 -0
  14. data/lib/packaging/nuget.rb +39 -0
  15. data/lib/packaging/osx.rb +36 -0
  16. data/lib/packaging/paths.rb +238 -0
  17. data/lib/packaging/platforms.rb +480 -0
  18. data/lib/packaging/repo.rb +55 -0
  19. data/lib/packaging/retrieve.rb +46 -0
  20. data/lib/packaging/rpm.rb +5 -0
  21. data/lib/packaging/rpm/repo.rb +257 -0
  22. data/lib/packaging/tar.rb +154 -0
  23. data/lib/packaging/util.rb +146 -0
  24. data/lib/packaging/util/date.rb +15 -0
  25. data/lib/packaging/util/execution.rb +85 -0
  26. data/lib/packaging/util/file.rb +125 -0
  27. data/lib/packaging/util/git.rb +174 -0
  28. data/lib/packaging/util/git_tags.rb +73 -0
  29. data/lib/packaging/util/gpg.rb +62 -0
  30. data/lib/packaging/util/jenkins.rb +95 -0
  31. data/lib/packaging/util/misc.rb +69 -0
  32. data/lib/packaging/util/net.rb +368 -0
  33. data/lib/packaging/util/os.rb +17 -0
  34. data/lib/packaging/util/platform.rb +40 -0
  35. data/lib/packaging/util/rake_utils.rb +111 -0
  36. data/lib/packaging/util/serialization.rb +19 -0
  37. data/lib/packaging/util/ship.rb +171 -0
  38. data/lib/packaging/util/tool.rb +41 -0
  39. data/lib/packaging/util/version.rb +326 -0
  40. data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
  41. data/spec/fixtures/config/ext/project_data.yaml +2 -0
  42. data/spec/fixtures/config/params.yaml +2 -0
  43. data/spec/fixtures/configs/components/test_file.json +1 -0
  44. data/spec/fixtures/configs/components/test_file_2.json +0 -0
  45. data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
  46. data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
  47. data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
  48. data/spec/fixtures/util/pre_tasks.yaml +4 -0
  49. data/spec/lib/packaging/artifactory_spec.rb +171 -0
  50. data/spec/lib/packaging/config_spec.rb +556 -0
  51. data/spec/lib/packaging/deb/repo_spec.rb +148 -0
  52. data/spec/lib/packaging/deb_spec.rb +52 -0
  53. data/spec/lib/packaging/paths_spec.rb +153 -0
  54. data/spec/lib/packaging/platforms_spec.rb +153 -0
  55. data/spec/lib/packaging/repo_spec.rb +97 -0
  56. data/spec/lib/packaging/retrieve_spec.rb +61 -0
  57. data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
  58. data/spec/lib/packaging/tar_spec.rb +122 -0
  59. data/spec/lib/packaging/util/execution_spec.rb +56 -0
  60. data/spec/lib/packaging/util/file_spec.rb +139 -0
  61. data/spec/lib/packaging/util/git_spec.rb +160 -0
  62. data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
  63. data/spec/lib/packaging/util/gpg_spec.rb +64 -0
  64. data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
  65. data/spec/lib/packaging/util/misc_spec.rb +31 -0
  66. data/spec/lib/packaging/util/net_spec.rb +239 -0
  67. data/spec/lib/packaging/util/os_spec.rb +31 -0
  68. data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
  69. data/spec/lib/packaging/util/ship_spec.rb +117 -0
  70. data/spec/lib/packaging/util/version_spec.rb +123 -0
  71. data/spec/lib/packaging_spec.rb +19 -0
  72. data/spec/spec_helper.rb +36 -0
  73. data/static_artifacts/PackageInfo.plist +3 -0
  74. data/tasks/00_utils.rake +216 -0
  75. data/tasks/30_metrics.rake +33 -0
  76. data/tasks/apple.rake +266 -0
  77. data/tasks/build.rake +12 -0
  78. data/tasks/clean.rake +5 -0
  79. data/tasks/config.rake +30 -0
  80. data/tasks/deb.rake +129 -0
  81. data/tasks/deb_repos.rake +28 -0
  82. data/tasks/deprecated.rake +130 -0
  83. data/tasks/doc.rake +20 -0
  84. data/tasks/education.rake +57 -0
  85. data/tasks/fetch.rake +57 -0
  86. data/tasks/gem.rake +146 -0
  87. data/tasks/jenkins.rake +494 -0
  88. data/tasks/jenkins_dynamic.rake +202 -0
  89. data/tasks/load_extras.rake +21 -0
  90. data/tasks/mock.rake +348 -0
  91. data/tasks/nightly_repos.rake +335 -0
  92. data/tasks/pe_deb.rake +12 -0
  93. data/tasks/pe_rpm.rake +13 -0
  94. data/tasks/pe_ship.rake +221 -0
  95. data/tasks/pe_sign.rake +13 -0
  96. data/tasks/pe_tar.rake +5 -0
  97. data/tasks/retrieve.rake +45 -0
  98. data/tasks/rpm.rake +66 -0
  99. data/tasks/rpm_repos.rake +29 -0
  100. data/tasks/ship.rake +752 -0
  101. data/tasks/sign.rake +226 -0
  102. data/tasks/tag.rake +8 -0
  103. data/tasks/tar.rake +34 -0
  104. data/tasks/update.rake +16 -0
  105. data/tasks/vanagon.rake +35 -0
  106. data/tasks/vendor_gems.rake +117 -0
  107. data/tasks/version.rake +33 -0
  108. data/tasks/z_data_dump.rake +65 -0
  109. data/templates/README +1 -0
  110. data/templates/downstream.xml.erb +47 -0
  111. data/templates/msi.xml.erb +197 -0
  112. data/templates/packaging.xml.erb +344 -0
  113. data/templates/repo.xml.erb +114 -0
  114. metadata +234 -0
@@ -0,0 +1,335 @@
1
+ namespace :pl do
2
+ ##
3
+ # This crazy piece of work establishes a remote repo on the distribution
4
+ # server, ships our repos out to it, signs them, and brings them back.
5
+ # This is an INTERNAL rake task and should not be considered part of the packaging API.
6
+ # Please do not depend on it.
7
+ #
8
+ namespace :jenkins do
9
+ ##
10
+ # This is to enable the work in CPR-52 to support nightly repos. For this
11
+ # work we'll have signed repos for each package of a build.
12
+ #
13
+ task :remote_sign_repos, [:target_prefix] => "pl:fetch" do |t, args|
14
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
15
+ target = "#{target_prefix}_repos/"
16
+ signing_server = Pkg::Config.signing_server
17
+ # Sign the repos please
18
+ Pkg::Util::File.empty_dir?("repos") and fail "There were no repos found in repos/. Maybe something in the pipeline failed?"
19
+ signing_bundle = ENV['SIGNING_BUNDLE']
20
+
21
+ remote_repo = Pkg::Util::Net.remote_bootstrap(signing_server, 'HEAD', nil, signing_bundle)
22
+ build_params = Pkg::Util::Net.remote_buildparams(signing_server, Pkg::Config)
23
+ Pkg::Util::Net.rsync_to('repos', signing_server, remote_repo)
24
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "cd #{remote_repo} ; rake pl:jenkins:sign_repos GPG_KEY=#{Pkg::Util::Gpg.key} PARAMS_FILE=#{build_params}")
25
+ Pkg::Util::Net.rsync_from("#{remote_repo}/repos/", signing_server, target)
26
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "rm -rf #{remote_repo}")
27
+ Pkg::Util::Net.remote_ssh_cmd(signing_server, "rm #{build_params}")
28
+ puts "Signed packages staged in '#{target}' directory"
29
+ end
30
+
31
+ task :sign_repos => "pl:fetch" do
32
+ Pkg::Util::RakeUtils.invoke_task("pl:sign_rpms", "repos")
33
+ Pkg::Rpm::Repo.create_local_repos('repos')
34
+ Pkg::Rpm::Repo.sign_repos('repos')
35
+ Pkg::Deb::Repo.sign_repos('repos', 'Apt repository for signed builds')
36
+ Pkg::OSX.sign('repos') unless Dir['repos/apple/**/*.dmg'].empty?
37
+ Pkg::IPS.sign('repos') unless Dir['repos/solaris/11/**/*.p5p'].empty?
38
+ Pkg::MSI.sign('repos') unless Dir['repos/windows/**/*.msi'].empty?
39
+ end
40
+
41
+ task :ship_signed_repos, [:target_prefix] => "pl:fetch" do |t, args|
42
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
43
+ target_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{target_prefix}_repos"
44
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
45
+ # Ship the now signed repos to the distribution server
46
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{target_dir}")
47
+ Pkg::Util::Net.rsync_to("#{target_prefix}_repos/", Pkg::Config.distribution_server, target_dir)
48
+ end
49
+ end
50
+
51
+ # This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
52
+ task :pack_signed_repo, [:path_to_repo, :name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
53
+ # path_to_repo should be relative to ./pkg
54
+ path_to_repo = args.path_to_repo or fail ":path_to_repo is a required argument for #{t}"
55
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
56
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
57
+ Pkg::Repo.create_signed_repo_archive(path_to_repo, name_of_archive, versioning)
58
+ end
59
+
60
+ task :pack_all_signed_repos_individually, [:name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
61
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
62
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
63
+ Pkg::Repo.create_all_repo_archives(name_of_archive, versioning)
64
+ end
65
+
66
+ # This is pretty similar to the 'pack_signed_repo' task. The difference here is that instead
67
+ # of creating a tarball for each repo passed, it adds each repo to a single archive, creating
68
+ # one 'all' tarball with all of the repos. This is useful for cutomers who have a PE master with
69
+ # no internet access. They can unpack the puppet-agent-all tarball into the location that
70
+ # pe_repo expects and use simplified agent install without needing internet access, or having to
71
+ # manually download each agent that they need to feed to pe_repo.
72
+ # This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
73
+ task :pack_all_signed_repos, [:path_to_repo, :name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
74
+ # path_to_repo should be relative to ./pkg
75
+ name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
76
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
77
+ tar = Pkg::Util::Tool.check_tool('tar')
78
+
79
+ Dir.chdir("pkg") do
80
+ if versioning == 'ref'
81
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref, "repos")
82
+ elsif versioning == 'version'
83
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version, "repos")
84
+ end
85
+
86
+ Dir.chdir(local_target) do
87
+ if !Pkg::Util::File.exist?("#{name_of_archive}.tar.gz")
88
+ warn "Skipping #{name_of_archive} because it (#{name_of_archive}.tar.gz) has no files"
89
+ else
90
+ if File.exist?("#{Pkg::Config.project}-all.tar")
91
+ tar_cmd = "--update"
92
+ else
93
+ tar_cmd = "--create"
94
+ end
95
+ Pkg::Util::Execution.ex("#{tar} --owner=0 --group=0 #{tar_cmd} --file #{Pkg::Config.project}-all.tar #{name_of_archive}.tar.gz")
96
+ end
97
+ end
98
+ end
99
+ end
100
+
101
+ # tar does not support adding or updating files in a compressed archive, so
102
+ # we have a task to compress the "all" tarball from the 'pack_all_signed_repos'
103
+ # task
104
+ task :compress_the_all_tarball, [:versioning] => ["pl:fetch"] do |t, args|
105
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
106
+ gzip = Pkg::Util::Tool.check_tool('gzip')
107
+ Dir.chdir("pkg") do
108
+ if versioning == 'ref'
109
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
110
+ elsif versioning == 'version'
111
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
112
+ end
113
+ Dir.chdir(local_target) do
114
+ Pkg::Util::Execution.ex("#{gzip} --fast #{File.join("repos", "#{Pkg::Config.project}-all.tar")}")
115
+ end
116
+ end
117
+ end
118
+
119
+
120
+ task :prepare_signed_repos, [:target_host, :target_prefix, :versioning] => ["clean", "pl:fetch"] do |t, args|
121
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
122
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
123
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
124
+ mkdir("pkg")
125
+
126
+ Dir.chdir("pkg") do
127
+ if versioning == 'ref'
128
+ local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
129
+ elsif versioning == 'version'
130
+ local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
131
+ end
132
+
133
+ FileUtils.mkdir_p([local_target, Pkg::Config.project + "-latest"])
134
+
135
+ # Rake task dependencies with arguments are nuts, so we just directly
136
+ # invoke them here. We want the signed_* directories staged as
137
+ # repos/repo_configs, because that's how we want them on the public
138
+ # server
139
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:retrieve", "#{target_prefix}_repos", File.join(local_target, "repos"))
140
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:retrieve", "#{target_prefix}_repo_configs", File.join(local_target, "repo_configs"))
141
+
142
+ # The repo configs have Pkg::Config.builds_server used in them, but that
143
+ # is internal, so we need to replace it with our public server. We also
144
+ # want them only to see repos, and not signed repos, since the host is
145
+ # called nightlies.puppetlabs.com. Here we replace those values in each
146
+ # config with the desired value.
147
+ Dir.glob("#{local_target}/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
148
+ new_contents = File.read(config).gsub(Pkg::Config.builds_server, target_host).gsub(/#{target_prefix}_repos/, "repos")
149
+ File.open(config, "w") { |file| file.puts new_contents }
150
+ end
151
+
152
+ # Latest repo work. This little section does some magic to munge the
153
+ # repo configs and link in the latest repos. The repo_configs are
154
+ # renamed to project-latest-$platform.{list,repo} to ensure that their
155
+ # names stay the same between runs. Their contents have the ref
156
+ # stripped off and the project replaced by $project-latest. Then the
157
+ # repos directory is a symlink to the last pushed ref's repos.
158
+ FileUtils.cp_r(File.join(local_target, "repo_configs"), Pkg::Config.project + "-latest", { :preserve => true })
159
+
160
+ # Now we need to remove the ref and replace $project with
161
+ # $project-latest so that it will work as a pinned latest repo
162
+ # Also rename the repo config to a static filename.
163
+ Dir.glob("#{Pkg::Config.project}-latest/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
164
+ new_contents = File.read(config)
165
+ new_contents.gsub!(%r{#{Pkg::Config.ref}/}, "")
166
+ new_contents.gsub!(%r{#{Pkg::Config.project}/}, Pkg::Config.project + "-latest/")
167
+ new_contents.gsub!(Pkg::Config.ref, "latest")
168
+
169
+ File.open(config, "w") { |file| file.puts new_contents }
170
+ FileUtils.mv(config, config.gsub(Pkg::Config.ref, "latest"))
171
+ end
172
+
173
+ # If we're using the version strategy instead of ref, here we shuffle
174
+ # around directories and munge repo_configs to replace the ref with the
175
+ # version. In the case that dot_version and ref are the same, we
176
+ # have nothing to do, so the conditional is skipped.
177
+ if versioning == 'version' && Pkg::Util::Version.dot_version != Pkg::Config.ref
178
+ Dir.glob("#{local_target}/repo_configs/**/*").select { |t_config| File.file?(t_config) }.each do |config|
179
+ new_contents = File.read(config)
180
+ new_contents.gsub!(%r{#{Pkg::Config.ref}}, Pkg::Util::Version.dot_version)
181
+
182
+ File.open(config, "w") { |file| file.puts new_contents }
183
+ FileUtils.mv(config, config.gsub(Pkg::Config.ref, Pkg::Util::Version.dot_version))
184
+ end
185
+ end
186
+
187
+ # Make a latest symlink for the project
188
+ FileUtils.ln_sf(File.join("..", local_target, "repos"), File.join(Pkg::Config.project + "-latest"), :verbose => true)
189
+ end
190
+ end
191
+
192
+ task :deploy_signed_repos, [:target_host, :target_basedir, :foss_only] => "pl:fetch" do |t, args|
193
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
194
+ target_basedir = args.target_basedir or fail ":target_basedir is a required argument to #{t}"
195
+ include_paths = []
196
+
197
+ if args.foss_only && Pkg::Config.foss_platforms && !Pkg::Config.foss_platforms.empty?
198
+ Pkg::Config.foss_platforms.each do |platform|
199
+ include_paths << Pkg::Paths.repo_path(platform, legacy: true)
200
+ if Pkg::Paths.repo_config_path(platform)
201
+ include_paths << Pkg::Paths.repo_config_path(platform)
202
+ end
203
+ end
204
+ else
205
+ include_paths = ["./"]
206
+ end
207
+
208
+ # Get the directories together - we need to figure out which bits to ship based on the include_path
209
+ # First we get the build itself
210
+ Pkg::Util::Execution.capture3(%(find #{include_paths.map { |path| "pkg/#{Pkg::Config.project}/**/#{path}" }.join(' ') } | sort > include_file))
211
+ Pkg::Util::Execution.capture3(%(mkdir -p tmp && tar -T include_file -cf - | (cd ./tmp && tar -xf -)))
212
+
213
+ # Then we find grab the appropriate meta-data only
214
+ Pkg::Util::Execution.capture3(%(find #{include_paths.map { |path| "pkg/#{Pkg::Config.project}-latest/#{path}" unless path.include? "repos" }.join(' ') } | sort > include_file_latest))
215
+
216
+ #include /repos in the include_file_latest so we correctly include the symlink in the final file list to ship
217
+ Pkg::Util::Execution.capture3(%(echo "pkg/#{Pkg::Config.project}-latest/repos" >> include_file_latest))
218
+ Pkg::Util::Execution.capture3(%(tar -T include_file_latest -cf - | (cd ./tmp && tar -xf -)))
219
+
220
+ Dir.chdir("tmp/pkg") do
221
+ # Ship it to the target for consumption
222
+ # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-after
223
+ Pkg::Util::Net.rsync_to("#{Pkg::Config.project}-latest", target_host, target_basedir, extra_flags: ["--delete-after", "--keep-dirlinks"])
224
+ # Then we ship the sha version with default rsync flags
225
+ Pkg::Util::Net.rsync_to("#{Pkg::Config.project}", target_host, target_basedir)
226
+ end
227
+
228
+ puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped to '#{target_host}:#{target_basedir}'"
229
+ end
230
+
231
+ task :deploy_signed_repos_to_s3, [:target_bucket] => "pl:fetch" do |t, args|
232
+ target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}"
233
+
234
+ # Ship it to the target for consumption
235
+ # First we ship the latest and clean up any repo-configs that are no longer valid with --delete-removed and --acl-public
236
+ Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}-latest/", target_bucket, "#{Pkg::Config.project}-latest", ["--acl-public", "--delete-removed", "--follow-symlinks"])
237
+ # Then we ship the sha version with just --acl-public
238
+ Pkg::Util::Net.s3sync_to("pkg/#{Pkg::Config.project}/", target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
239
+
240
+ puts "'#{Pkg::Config.ref}' of '#{Pkg::Config.project}' has been shipped via s3 to '#{target_bucket}'"
241
+ end
242
+
243
+ task :generate_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|
244
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
245
+ Pkg::Rpm::Repo.generate_repo_configs("#{target_prefix}_repos", "#{target_prefix}_repo_configs", true)
246
+ Pkg::Deb::Repo.generate_repo_configs("#{target_prefix}_repos", "#{target_prefix}_repo_configs")
247
+ end
248
+
249
+ task :ship_signed_repo_configs, [:target_prefix] => "pl:fetch" do |t, args|
250
+ target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
251
+ Pkg::Rpm::Repo.ship_repo_configs("#{target_prefix}_repo_configs")
252
+ Pkg::Deb::Repo.ship_repo_configs("#{target_prefix}_repo_configs")
253
+ end
254
+
255
+ task :generate_signed_repos, [:target_prefix] => ["pl:fetch"] do |t, args|
256
+ target_prefix = args.target_prefix || 'nightly'
257
+ Dir.chdir("pkg") do
258
+ ["pl:jenkins:remote_sign_repos", "pl:jenkins:ship_signed_repos", "pl:jenkins:generate_signed_repo_configs", "pl:jenkins:ship_signed_repo_configs"].each do |task|
259
+ Pkg::Util::RakeUtils.invoke_task(task, target_prefix)
260
+ end
261
+ puts "Shipped '#{Pkg::Config.ref}' (#{Pkg::Config.version}) of '#{Pkg::Config.project}' into the puppet-agent repos."
262
+ end
263
+ end
264
+
265
+ # We want to keep the puppet-agent repos at a higher level and them link
266
+ # them into the correct version of PE. This is a private method and is
267
+ # called from the internal_puppet-agent-ship jenkins job
268
+ #
269
+ # @param target_host the remote host where the packages are being shipped
270
+ # ex: agent-downloads.delivery.puppetlabs.net
271
+ # @param remote_dir the base path to deploy packages to
272
+ # ex: /opt/puppet-agent
273
+ # @param versioning whether the puppet-agent version is a version string or
274
+ # a github ref. Valid values are 'version' and 'ref'
275
+ # @param pe_version the PE-version to deploy to.
276
+ # ex: 2015.2
277
+ task :link_signed_repos, [:target_host, :remote_dir, :versioning, :pe_version] => ["pl:fetch"] do |t, args|
278
+ target_host = args.target_host or fail ":target_host is a required argument for #{t}"
279
+ remote_dir = args.remote_dir or fail ":remote_dir is a required argument for #{t}"
280
+ versioning = args.versioning or fail ":versioning is a required argument for #{t}"
281
+ pe_version = args.pe_version or fail ":pe_version is a required argument for #{t}"
282
+
283
+ if versioning == 'ref'
284
+ version_string = Pkg::Config.ref
285
+ elsif versioning == 'version'
286
+ version_string = Pkg::Util::Version.dot_version
287
+ end
288
+
289
+ pa_source = File.join(remote_dir, Pkg::Config.project)
290
+ pe_target = File.join(remote_dir, pe_version, Pkg::Config.project)
291
+ local_pa = File.join(pa_source, version_string)
292
+ local_pe = pe_target
293
+ local_pa_latest = "#{pa_source}-latest"
294
+ local_pe_latest = "#{pe_target}-latest"
295
+
296
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "mkdir -p '#{pe_target}'")
297
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "mkdir -p '#{local_pe_latest}'")
298
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "cp -r #{local_pa_latest}/* #{local_pe_latest}")
299
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "sed -i 's|/#{File.basename(local_pa_latest)}|/#{pe_version}/#{File.basename(local_pa_latest)}|' #{local_pe_latest}/repo_configs/*/*")
300
+ Pkg::Util::Net.remote_ssh_cmd(target_host, "ln -sf '#{local_pa}' '#{local_pe}'")
301
+ end
302
+
303
+ task :nightly_repos => ["pl:fetch"] do
304
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:generate_signed_repos", 'nightly')
305
+ end
306
+
307
+ task :deploy_nightly_repos, [:target_host, :target_basedir] => ["pl:fetch"] do |t, args|
308
+ target_host = args.target_host or fail ":target_host is a required argument to #{t}"
309
+ target_basedir = args.target_basedir or fail ":target_basedir is a required argument to #{t}"
310
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:prepare_signed_repos", target_host, 'nightly', 'ref')
311
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:deploy_signed_repos", target_host, target_basedir, true)
312
+ end
313
+
314
+ task :deploy_repos_to_s3, [:target_bucket] => ["pl:fetch"] do |t, args|
315
+ target_bucket = args.target_bucket or fail ":target_bucket is a required argument to #{t}"
316
+ target_host = "https://s3.amazonaws.com/#{target_bucket}"
317
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:prepare_signed_repos", target_host, 'signed', 'version')
318
+ Pkg::Util::RakeUtils.invoke_task("pl:jenkins:deploy_signed_repos_to_s3", target_bucket)
319
+ end
320
+
321
+ task :update_release_versions do
322
+ target_bucket = ENV['TARGET_BUCKET'] or fail "TARGET_BUCKET must be specified to run the 'update_release_versions' task"
323
+ version = ENV['VERSION'] || Pkg::Util::Version.get_dot_version
324
+
325
+ tempdir = Pkg::Util::File.mktemp
326
+ latest_filepath = File.join(tempdir, "pkg")
327
+ FileUtils.mkdir_p(latest_filepath)
328
+
329
+ latest_filename = File.join(latest_filepath, "LATEST")
330
+ File.open(latest_filename, 'w') { |file| file.write(version) }
331
+ Pkg::Util::Net.s3sync_to(latest_filepath, target_bucket, Pkg::Config.project, ["--acl-public", "--follow-symlinks"])
332
+ FileUtils.rm_rf latest_filepath
333
+ end
334
+ end
335
+ end
@@ -0,0 +1,12 @@
1
+ # "Alias" tasks for PE - these just point at the standard pl: tasks. They exist
2
+ # for ease of aggregation with PE-specific tasks that _are_ actually different
3
+ # from their "pl" counterparts
4
+ if Pkg::Config.build_pe
5
+ namespace :pe do
6
+ desc "Create a PE deb from this repo using the default cow #{Pkg::Config.default_cow}."
7
+ task :deb => "pl:deb"
8
+
9
+ desc "Create PE debs from this git repository using all cows specified in build_defaults yaml"
10
+ task :deb_all => "pl:deb_all"
11
+ end
12
+ end
@@ -0,0 +1,13 @@
1
+ if Pkg::Config.build_pe
2
+ namespace :pe do
3
+ desc "Build a PE rpm using rpmbuild (requires all BuildRequires, rpmbuild, etc)"
4
+ task :rpm => "package:rpm"
5
+
6
+ desc "Build rpms using ALL final mocks in build_defaults yaml, keyed to PL infrastructure, pass MOCK to override"
7
+ task :mock_all => ["pl:fetch", "pl:mock_all"]
8
+
9
+ desc "Build a PE rpm using the default mock"
10
+ task :mock => ["pl:fetch", "pl:mock"]
11
+ end
12
+ end
13
+
@@ -0,0 +1,221 @@
1
+ if Pkg::Config.build_pe
2
+ namespace :pe do
3
+ desc "ship PE rpms to #{Pkg::Config.yum_host}"
4
+ task :ship_rpms => "pl:fetch" do
5
+ puts "Shipping packages to #{Pkg::Config.yum_target_path}"
6
+ Pkg::Util::File.empty_dir?("pkg/pe/rpm") and fail "The 'pkg/pe/rpm' directory has no packages. Did you run rake pe:deb?"
7
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
8
+ if Pkg::Config.pe_feature_branch
9
+ puts "On a feature branch - overwrites alllowed"
10
+ Pkg::Util::Net.rsync_to('pkg/pe/rpm/', Pkg::Config.yum_host, Pkg::Config.yum_target_path, extra_flags: [])
11
+ else
12
+ Pkg::Util::Net.rsync_to('pkg/pe/rpm/', Pkg::Config.yum_host, Pkg::Config.yum_target_path)
13
+ end
14
+ end
15
+ if Pkg::Config.team == 'release'
16
+
17
+ # If this is not a feature branch, we need to link the shipped packages into the feature repos,
18
+ # then update their metadata as well.
19
+ unless Pkg::Config.pe_feature_branch
20
+ puts "Linking RPMs to feature repo"
21
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:link_shipped_rpms_to_feature_repo")
22
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:update_yum_repo")
23
+ end
24
+ end
25
+ end
26
+
27
+ desc "Ship PE debs to #{Pkg::Config.apt_host}"
28
+ task :ship_debs => "pl:fetch" do
29
+ Pkg::Util::File.empty_dir?("pkg/pe/deb") and fail "The 'pkg/pe/deb' directory has no packages!"
30
+ target_path = ENV['APT_REPO']
31
+
32
+ unless Pkg::Config.pe_feature_branch
33
+ # If APT_REPO isn't specified as an environment variable, we use a temporary one
34
+ # created for this specific deb ship. This enables us to escape the conflicts
35
+ # introduced with simultaneous deb ships.
36
+ #
37
+
38
+ # We are going to iterate over every set of packages, adding them to
39
+ # the repository set by set. This enables us to handle different
40
+ # repositories per distribution. "pkg/pe/deb/" contains directories
41
+ # named for every distribution, e.g. "lucid," "squeeze," etc.
42
+ #
43
+ Dir["pkg/pe/deb/*"].each do |dist|
44
+ dist = File.basename(dist)
45
+ unless target_path
46
+ puts "Creating temporary incoming dir on #{Pkg::Config.apt_host}"
47
+ target_path = %x(ssh -t #{Pkg::Config.apt_host} 'mktemp -d -t incoming-XXXXXX').chomp
48
+ end
49
+
50
+ # For aptly, we ship just the debs into an incoming dir. On the remote end,
51
+ # aptly will pull these debs in and add them to the repositories based on the
52
+ # dist, e.g. lucid, architecture notwithstanding.
53
+ #
54
+ # The layout that the aptly library will expect is:
55
+ #
56
+ # incoming_dir/{$dists}/*.deb
57
+ #
58
+ # ex:
59
+ # incoming_dir|
60
+ # |_lucid/*.deb
61
+ # |_squeeze/*.deb
62
+ # |_precise/*.deb
63
+ # |_wheezy/*.deb
64
+ #
65
+ puts "Shipping PE debs to apt repo 'incoming' dir on #{Pkg::Config.apt_host}"
66
+ Pkg::Util::Execution.retry_on_fail(:times => 3) do
67
+ Dir["pkg/pe/deb/#{dist}/*.deb"].each do |deb|
68
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_host, "mkdir -p '#{target_path}/#{dist}'")
69
+ Pkg::Util::Net.rsync_to(deb, Pkg::Config.apt_host, "#{target_path}/#{dist}/#{File.basename(deb)}")
70
+ end
71
+ end
72
+
73
+ if Pkg::Config.team == 'release'
74
+ Rake::Task["pe:remote:apt"].reenable
75
+ Rake::Task["pe:remote:apt"].invoke(target_path, dist)
76
+ end
77
+
78
+ end
79
+ end
80
+
81
+ # We also ship our PE artifacts to directories for archival purposes and to
82
+ # ease the gathering of both debs and sources when we do PE compose and ship. For
83
+ # this case, we ship everything to directories that mirror the legacy rpm
84
+ # directory format:
85
+ #
86
+ # repos/$dist-{$architecture|source}
87
+ #
88
+ # ex:
89
+ # repos|
90
+ # |_squeeze-i386
91
+ # |_squeeze-amd64
92
+ # |_squeeze-source
93
+ #
94
+ # We also have concerns about shipped artifacts getting accidentally overwritten
95
+ # by newer ones. To handle this, we make everything we ship to the archive
96
+ # directories immutable, after rsyncing out.
97
+ #
98
+ base_path = Pkg::Config.apt_target_path
99
+
100
+ puts "Shipping all built artifacts to to archive directories on #{Pkg::Config.apt_host}"
101
+
102
+
103
+ Pkg::Config.deb_build_targets.each do |target|
104
+ dist, arch = target.match(/(.*)-(.*)/)[1, 2]
105
+ unless Pkg::Util::File.empty_dir? "pkg/pe/deb/#{dist}"
106
+ archive_path = "#{base_path}/#{dist}-#{arch}"
107
+
108
+ # Ship arch-specific debs to correct dir, e.g. 'squeeze-i386'
109
+ unless Dir["pkg/pe/deb/#{dist}/*_#{arch}.deb"].empty?
110
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*_#{arch}.deb", Pkg::Config.apt_host, "#{archive_path}/")
111
+ end
112
+
113
+ unless Dir["pkg/pe/deb/#{dist}/*_all.deb"].empty?
114
+ Pkg::Platforms.arches_for_codename(dist).each do |arch|
115
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*_all.deb", Pkg::Config.apt_host, "#{base_path}/#{dist}-#{arch}/")
116
+ end
117
+ end
118
+
119
+ unless Dir["pkg/pe/deb/#{dist}/*"].select { |i| i !~ /^.*\.deb$/ }.empty?
120
+ # Ship source files to source dir, e.g. 'squeeze-source'
121
+ Pkg::Util::Net.rsync_to("pkg/pe/deb/#{dist}/*", Pkg::Config.apt_host, "#{base_path}/#{dist}-source", extra_flags: ["--exclude '*.deb'", "--ignore-existing"])
122
+ end
123
+
124
+ files = Dir["pkg/pe/deb/#{dist}/*{_#{arch},all}.deb"].map { |f| "#{archive_path}/#{File.basename(f)}" }
125
+
126
+ files += Dir["pkg/pe/deb/#{dist}/*"].select { |f| f !~ /^.*\.deb$/ }.map { |f| "#{base_path}/#{dist}-source/#{File.basename(f)}" }
127
+ end
128
+ end
129
+ # If this is not a feature branch, we need to link the shipped packages into the feature repos
130
+ unless Pkg::Config.pe_feature_branch
131
+ puts "Linking DEBs to feature repo"
132
+ Pkg::Util::RakeUtils.invoke_task("pe:remote:link_shipped_debs_to_feature_repo")
133
+ end
134
+ end
135
+
136
+ namespace :remote do
137
+ desc "Update remote rpm repodata for PE on #{Pkg::Config.yum_host}"
138
+ task :update_yum_repo => "pl:fetch" do
139
+
140
+ # Paths to the repos.
141
+ repo_base_path = Pkg::Config.yum_target_path
142
+
143
+ # This entire command is going to be passed across SSH, but it's unwieldy on a
144
+ # single line. By breaking it into a series of concatenated strings, we can maintain
145
+ # a semblance of formatting and structure (nevermind readability).
146
+ command = %(for dir in #{repo_base_path}/{#{rpm_family_and_version.join(",")}}-*; do)
147
+ command += %( sudo createrepo --checksum=sha --checkts --update --delta-workers=0 --quiet --database --update $dir; )
148
+ command += %(done; )
149
+ command += %(sync)
150
+
151
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_host, command)
152
+ end
153
+
154
+ desc "Remotely add shipped packages to apt repo on #{Pkg::Config.apt_host}"
155
+ task :apt, :incoming, :dist do |t, args|
156
+ dist = args.dist
157
+ incoming_dir = args.incoming
158
+ incoming_dir or fail "Adding packages to apt repo requires an incoming directory"
159
+ Pkg::Util::RakeUtils.invoke_task("pl:fetch")
160
+
161
+ cmd = <<-eos
162
+ if ! flock --wait 1200 /opt/tools/aptly/db/LOCK --command /bin/true; then
163
+ echo "Unable to acquire aptly lock, giving up" 1>&2
164
+ exit 1
165
+ fi
166
+ aptly repo add -remove-files #{Pkg::Config::pe_version}-#{dist} #{incoming_dir}/#{dist}
167
+ if [ -d /opt/tools/aptly/public/#{Pkg::Config::pe_version}/dists/#{dist} ]; then
168
+ aptly publish update -gpg-key=\"8BBEB79B\" #{dist} #{Pkg::Config::pe_version}
169
+ else
170
+ aptly publish repo -gpg-key=\"8BBEB79B\" #{Pkg::Config::pe_version}-#{dist} #{Pkg::Config::pe_version}
171
+ fi
172
+ eos
173
+ stdout, stderr = Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_host, cmd, true)
174
+
175
+ output = stdout.to_s + stderr.to_s
176
+
177
+ if output.include?("ERROR:") || output.include?("There have been errors!")
178
+ # We shouldn't ever get here if aptly returns non-zero on failure, but just in case...
179
+ fail "Unable to add packages to debian repo. Hopefully the output has some helpful information. Output: #{output}"
180
+ end
181
+
182
+ puts
183
+ puts "Aptly output: #{output}"
184
+ puts
185
+
186
+ puts "Cleaning up apt repo 'incoming' dir on #{Pkg::Config.apt_host}"
187
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_host, "rm -r #{incoming_dir}")
188
+
189
+ end
190
+
191
+ # Throw more tires on the fire
192
+ desc "Remotely link shipped rpm packages into feature repo on #{Pkg::Config.yum_host}"
193
+ task :link_shipped_rpms_to_feature_repo => "pl:fetch" do
194
+ next if Pkg::Config.pe_feature_branch
195
+ repo_base_path = Pkg::Config.yum_target_path
196
+ feature_repo_path = Pkg::Config.yum_target_path(true)
197
+ pkgs = FileList['pkg/pe/rpm/**/*.rpm'].select { |path| path.gsub!('pkg/pe/rpm/', '') }
198
+ command = %(for pkg in #{pkgs.join(' ')}; do)
199
+ command += %( sudo ln -f "#{repo_base_path}/$( dirname ${pkg} )/$( basename ${pkg} )" "#{feature_repo_path}/$( dirname ${pkg} )/" ; )
200
+ command += %(done; )
201
+ command += %(sync)
202
+
203
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.yum_host, command)
204
+ end
205
+
206
+ desc "Remotely link shipped deb packages into feature repo on #{Pkg::Config.apt_host}"
207
+ task :link_shipped_debs_to_feature_repo => "pl:fetch" do
208
+ next if Pkg::Config.pe_feature_branch
209
+ base_path = Pkg::Config.apt_target_path
210
+ feature_base_path = Pkg::Config.apt_target_path(true)
211
+ pkgs = FileList["pkg/pe/deb/**/*.deb"].select { |path| path.gsub!('pkg/pe/deb/', '') }
212
+ command = %(for pkg in #{pkgs.join(' ')}; do)
213
+ command += %( sudo ln -f "#{base_path}/$( dirname ${pkg} )-amd64/$( basename ${pkg} )" "#{feature_base_path}/$( dirname ${pkg} )-amd64/" ; )
214
+ command += %(done; )
215
+ command += %(sync)
216
+
217
+ Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.apt_host, command)
218
+ end
219
+ end
220
+ end
221
+ end