puppet 2.7.23 → 2.7.24
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of puppet might be problematic. Click here for more details.
- data/Gemfile +23 -7
- data/ext/packaging/LICENSE +17 -0
- data/ext/packaging/README.md +92 -18
- data/ext/packaging/spec/tasks/00_utils_spec.rb +28 -21
- data/ext/packaging/spec/tasks/build_object_spec.rb +6 -4
- data/ext/packaging/static_artifacts/PackageInfo.plist +3 -0
- data/ext/packaging/tasks/00_utils.rake +91 -15
- data/ext/packaging/tasks/10_setupvars.rake +39 -24
- data/ext/packaging/tasks/20_setupextravars.rake +1 -5
- data/ext/packaging/tasks/30_metrics.rake +29 -37
- data/ext/packaging/tasks/apple.rake +8 -6
- data/ext/packaging/tasks/build.rake +6 -0
- data/ext/packaging/tasks/deb.rake +1 -4
- data/ext/packaging/tasks/fetch.rake +22 -12
- data/ext/packaging/tasks/gem.rake +88 -35
- data/ext/packaging/tasks/jenkins.rake +25 -1
- data/ext/packaging/tasks/jenkins_dynamic.rake +10 -1
- data/ext/packaging/tasks/mock.rake +37 -19
- data/ext/packaging/tasks/pe_ship.rake +108 -10
- data/ext/packaging/tasks/pe_sign.rake +3 -3
- data/ext/packaging/tasks/retrieve.rake +12 -0
- data/ext/packaging/tasks/rpm_repos.rake +2 -2
- data/ext/packaging/tasks/ship.rake +51 -12
- data/ext/packaging/tasks/sign.rake +42 -12
- data/ext/packaging/tasks/tar.rake +1 -1
- data/ext/packaging/tasks/template.rake +17 -3
- data/ext/packaging/tasks/vendor_gems.rake +1 -1
- data/ext/packaging/templates/downstream.xml.erb +15 -2
- data/ext/packaging/templates/packaging.xml.erb +143 -1
- data/ext/packaging/templates/repo.xml.erb +35 -24
- data/lib/puppet/transaction.rb +1 -1
- data/lib/puppet/type/file.rb +12 -23
- data/lib/puppet/type/file/source.rb +2 -2
- data/lib/puppet/type/service.rb +3 -2
- data/lib/puppet/util.rb +22 -41
- data/lib/puppet/version.rb +1 -1
- data/spec/integration/type/file_spec.rb +22 -35
- data/spec/spec_helper.rb +12 -0
- data/spec/unit/application/kick_spec.rb +9 -4
- data/spec/unit/indirector/catalog/static_compiler_spec.rb +1 -1
- data/spec/unit/type/file/source_spec.rb +8 -7
- data/spec/unit/type/file_spec.rb +0 -29
- metadata +64 -39
@@ -115,6 +115,28 @@ namespace :pl do
|
|
115
115
|
when /tar/ then "tar"
|
116
116
|
else raise "Could not determine build type for #{build_task}"
|
117
117
|
end
|
118
|
+
|
119
|
+
# Create a string of metrics to send to Jenkins for data analysis
|
120
|
+
dist = case build_type
|
121
|
+
when /deb/ then @build.default_cow.split('-')[1]
|
122
|
+
when /rpm/
|
123
|
+
if @build.pe_version
|
124
|
+
@build.final_mocks.split(' ')[0].split('-')[2]
|
125
|
+
else
|
126
|
+
@build.final_mocks.split(' ')[0].split('-')[1..2].join("")
|
127
|
+
end
|
128
|
+
when /dmg/ then "apple"
|
129
|
+
when /gem/ then "gem"
|
130
|
+
when /sles/ then "sles"
|
131
|
+
when /tar/ then "tar"
|
132
|
+
else raise "Could not determine build type for #{build_task}"
|
133
|
+
end
|
134
|
+
|
135
|
+
if @build.pe_version
|
136
|
+
metrics = "#{ENV['USER']}~#{@build.version}~#{@build.pe_version}~#{dist}~#{@build.team}"
|
137
|
+
else
|
138
|
+
metrics = "#{ENV['USER']}~#{@build.version}~N/A~#{dist}~#{@build.team}"
|
139
|
+
end
|
118
140
|
#
|
119
141
|
# Create the data files to send to jenkins
|
120
142
|
properties = @build.params_to_yaml
|
@@ -124,7 +146,8 @@ namespace :pl do
|
|
124
146
|
parameters = [{ "name" => "BUILD_PROPERTIES", "file" => "file0" },
|
125
147
|
{ "name" => "PROJECT_BUNDLE", "file" => "file1" },
|
126
148
|
{ "name" => "PROJECT", "value" => "#{@build.project}" },
|
127
|
-
{ "name" => "BUILD_TYPE", "label" => "#{build_type}" }
|
149
|
+
{ "name" => "BUILD_TYPE", "label" => "#{build_type}" },
|
150
|
+
{ "name" => "METRICS", "value" => "#{metrics}"}]
|
128
151
|
|
129
152
|
# Initialize the args array that will hold all of the arguments we pass
|
130
153
|
# to the curl utility method.
|
@@ -149,6 +172,7 @@ namespace :pl do
|
|
149
172
|
"-Fname=PROJECT_BUNDLE" , "-Ffile1=@#{bundle}",
|
150
173
|
"-Fname=PROJECT" , "-Fvalue=#{@build.project}",
|
151
174
|
"-Fname=BUILD_TYPE" , "-Fvalue=#{build_type}",
|
175
|
+
"-Fname=METRICS" , "-Fvalue=#{metrics}",
|
152
176
|
"-FSubmit=Build",
|
153
177
|
"-Fjson=#{json.to_json}",
|
154
178
|
]
|
@@ -65,10 +65,18 @@ namespace :pl do
|
|
65
65
|
properties = @build.params_to_yaml
|
66
66
|
bundle = git_bundle('HEAD')
|
67
67
|
|
68
|
+
# Create a string of metrics to send to Jenkins for data analysis
|
69
|
+
if @build.pe_version
|
70
|
+
metrics = "#{ENV['USER']}~#{@build.version}~#{@build.pe_version}~#{@build.team}"
|
71
|
+
else
|
72
|
+
metrics = "#{ENV['USER']}~#{@build.version}~N/A~#{@build.team}"
|
73
|
+
end
|
74
|
+
|
68
75
|
# Construct the parameters, which is an array of hashes we turn into JSON
|
69
76
|
parameters = [{ "name" => "BUILD_PROPERTIES", "file" => "file0" },
|
70
77
|
{ "name" => "PROJECT_BUNDLE", "file" => "file1" },
|
71
|
-
{ "name" => "PROJECT", "value" => "#{@build.project}" }
|
78
|
+
{ "name" => "PROJECT", "value" => "#{@build.project}" },
|
79
|
+
{ "name" => "METRICS", "value" => "#{metrics}"}]
|
72
80
|
|
73
81
|
# Contruct the json string
|
74
82
|
json = JSON.generate("parameter" => parameters)
|
@@ -79,6 +87,7 @@ namespace :pl do
|
|
79
87
|
"-Fname=BUILD_PROPERTIES", "-Ffile0=@#{properties}",
|
80
88
|
"-Fname=PROJECT_BUNDLE" , "-Ffile1=@#{bundle}",
|
81
89
|
"-Fname=PROJECT" , "-Fvalue=#{@build.project}",
|
90
|
+
"-Fname=METRICS" , "-Fvalue=#{metrics}",
|
82
91
|
"-FSubmit=Build",
|
83
92
|
"-Fjson=#{json.to_json}",
|
84
93
|
]
|
@@ -30,12 +30,41 @@ def mock_artifact(mock_config, cmd_args)
|
|
30
30
|
configdir_arg = " --configdir #{configdir}"
|
31
31
|
end
|
32
32
|
|
33
|
-
|
34
|
-
|
35
|
-
rm_r configdir unless configdir.nil?
|
33
|
+
begin
|
34
|
+
sh "#{mock} -r #{mock_config} #{configdir_arg} #{cmd_args}"
|
36
35
|
|
37
|
-
|
38
|
-
|
36
|
+
# Return a FileList of the build artifacts
|
37
|
+
return FileList[File.join(basedir, mock_config, 'result','*.rpm')]
|
38
|
+
|
39
|
+
rescue RuntimeError => error
|
40
|
+
build_log = File.join(basedir, mock_config, 'result', 'build.log')
|
41
|
+
root_log = File.join(basedir, mock_config, 'result', 'root.log')
|
42
|
+
content = File.read(build_log) if File.readable?(build_log)
|
43
|
+
|
44
|
+
if File.readable?(root_log)
|
45
|
+
STDERR.puts File.read(root_log)
|
46
|
+
end
|
47
|
+
if content and content.lines.count > 2
|
48
|
+
STDERR.puts content
|
49
|
+
end
|
50
|
+
|
51
|
+
# Any useful info has now been gleaned from the logs in the case of a
|
52
|
+
# failure, so we can safely remove basedir if this is a randomized mockroot
|
53
|
+
# build. Scarily enough, because of mock permissions, we can't actually
|
54
|
+
# just remove it, we have to sudo remove it.
|
55
|
+
|
56
|
+
if randomize and basedir and File.directory?(basedir)
|
57
|
+
sh "sudo -n rm -r #{basedir}"
|
58
|
+
end
|
59
|
+
|
60
|
+
raise error
|
61
|
+
ensure
|
62
|
+
# Unlike basedir, which we keep in the success case, we don't need
|
63
|
+
# configdir anymore either way, so we always clean it up if we're using
|
64
|
+
# randomized mockroots.
|
65
|
+
#
|
66
|
+
rm_r configdir if randomize
|
67
|
+
end
|
39
68
|
end
|
40
69
|
|
41
70
|
# Use mock to build an SRPM
|
@@ -107,7 +136,7 @@ def mock_defines(mock_config)
|
|
107
136
|
family = mock_el_family(mock_config)
|
108
137
|
version = mock_el_ver(mock_config)
|
109
138
|
defines = ""
|
110
|
-
if version
|
139
|
+
if version =~ /^(4|5)$/ or family == "sles"
|
111
140
|
defines = %Q{--define "%dist .#{family}#{version}" \
|
112
141
|
--define "_source_filedigest_algorithm 1" \
|
113
142
|
--define "_binary_filedigest_algorithm 1" \
|
@@ -122,7 +151,7 @@ def build_rpm_with_mock(mocks)
|
|
122
151
|
mocks.split(' ').each do |mock_config|
|
123
152
|
family = mock_el_family(mock_config)
|
124
153
|
version = mock_el_ver(mock_config)
|
125
|
-
subdir =
|
154
|
+
subdir = is_final? ? 'products' : 'devel'
|
126
155
|
bench = Benchmark.realtime do
|
127
156
|
# Set up the rpmbuild dir in a temp space, with our tarball and spec
|
128
157
|
workdir = prep_rpm_build_dir
|
@@ -138,15 +167,6 @@ def build_rpm_with_mock(mocks)
|
|
138
167
|
|
139
168
|
rpms.each do |rpm|
|
140
169
|
rpm.strip!
|
141
|
-
unless ENV['RC_OVERRIDE'] == '1'
|
142
|
-
if is_rc? == FALSE and rpm =~ /[0-9]+rc[0-9]+\./
|
143
|
-
puts "It looks like you might be trying to ship an RC to the production repos. Leaving #{rpm}. Pass RC_OVERRIDE=1 to override."
|
144
|
-
next
|
145
|
-
elsif is_rc? and rpm !~ /[0-9]+rc[0-9]+\./
|
146
|
-
puts "It looks like you might be trying to ship a production release to the development repos. Leaving #{rpm}. Pass RC_OVERRIDE=1 to override."
|
147
|
-
next
|
148
|
-
end
|
149
|
-
end
|
150
170
|
|
151
171
|
if @build.build_pe
|
152
172
|
%x{mkdir -p pkg/pe/rpm/#{family}-#{version}-{srpms,i386,x86_64}}
|
@@ -194,7 +214,7 @@ def build_rpm_with_mock(mocks)
|
|
194
214
|
end
|
195
215
|
end
|
196
216
|
end
|
197
|
-
|
217
|
+
puts "Finished building in: #{bench}"
|
198
218
|
end
|
199
219
|
end
|
200
220
|
|
@@ -261,12 +281,10 @@ namespace :pl do
|
|
261
281
|
# If default mock isn't specified, just take the first one in the @build.final_mocks list
|
262
282
|
@build.default_mock ||= @build.final_mocks.split(' ')[0]
|
263
283
|
build_rpm_with_mock(@build.default_mock)
|
264
|
-
post_metrics if @build.benchmark
|
265
284
|
end
|
266
285
|
|
267
286
|
desc "Use specified mocks to make rpms, keyed to PL infrastructure, pass MOCK to specifiy config"
|
268
287
|
task :mock_all => "package:tar" do
|
269
288
|
build_rpm_with_mock(@build.final_mocks)
|
270
|
-
post_metrics if @build.benchmark
|
271
289
|
end
|
272
290
|
end
|
@@ -15,27 +15,125 @@ if @build.build_pe
|
|
15
15
|
desc "Ship PE debs to #{@build.apt_host}"
|
16
16
|
task :ship_debs => "pl:fetch" do
|
17
17
|
empty_dir?("pkg/pe/deb") and fail "The 'pkg/pe/deb' directory has no packages!"
|
18
|
-
target_path = ENV['APT_REPO']
|
18
|
+
target_path = ENV['APT_REPO']
|
19
|
+
|
20
|
+
# If APT_REPO isn't specified as an environment variable, we use a temporary one
|
21
|
+
# created for this specific deb ship. This enables us to escape the conflicts
|
22
|
+
# introduced with simultaneous deb ships.
|
23
|
+
#
|
24
|
+
unless target_path
|
25
|
+
puts "Creating temporary incoming dir on #{@build.apt_host}"
|
26
|
+
target_path = %x{ssh -t #{@build.apt_host} 'mktemp -d -t incoming-XXXXXX'}.chomp
|
27
|
+
end
|
28
|
+
|
29
|
+
# For reprepro, we ship just the debs into an incoming dir. On the remote end,
|
30
|
+
# reprepro will pull these debs in and add them to the repositories based on the
|
31
|
+
# dist, e.g. lucid, architecture notwithstanding.
|
32
|
+
#
|
33
|
+
# The layout that the reprepro library will expect is:
|
34
|
+
#
|
35
|
+
# incoming_dir/{$dists}/*.deb
|
36
|
+
#
|
37
|
+
# ex:
|
38
|
+
# incoming_dir|
|
39
|
+
# |_lucid/*.deb
|
40
|
+
# |_squeeze/*.deb
|
41
|
+
# |_precise/*.deb
|
42
|
+
# |_wheezy/*.deb
|
43
|
+
#
|
44
|
+
puts "Shipping PE debs to apt repo 'incoming' dir on #{@build.apt_host}"
|
19
45
|
retry_on_fail(:times => 3) do
|
20
|
-
|
46
|
+
cd "pkg/pe/deb" do
|
47
|
+
Dir["**/*.deb"].each do |deb|
|
48
|
+
rsync_to(deb, @build.apt_host, "#{target_path}/#{File.dirname(deb)}/")
|
49
|
+
end
|
50
|
+
end
|
21
51
|
end
|
52
|
+
|
53
|
+
# We also ship our PE artifacts to directories for archival purposes and to
|
54
|
+
# ease the gathering of both debs and sources when we do PE compose and ship. For
|
55
|
+
# this case, we ship everything to directories that mirror the legacy rpm
|
56
|
+
# directory format:
|
57
|
+
#
|
58
|
+
# repos/$dist-{$architecture|source}
|
59
|
+
#
|
60
|
+
# ex:
|
61
|
+
# repos|
|
62
|
+
# |_squeeze-i386
|
63
|
+
# |_squeeze-amd64
|
64
|
+
# |_squeeze-source
|
65
|
+
#
|
66
|
+
# We also have concerns about shipped artifacts getting accidentally overwritten
|
67
|
+
# by newer ones. To handle this, we make everything we ship to the archive
|
68
|
+
# directories immutable, after rsyncing out.
|
69
|
+
#
|
70
|
+
base_path = "#{@build.apt_repo_path}/#{@build.pe_version}/repos"
|
71
|
+
|
72
|
+
puts "Shipping all built artifacts to to archive directories on #{@build.apt_host}"
|
73
|
+
|
74
|
+
@build.cows.split(' ').map { |i| i.sub('.cow','') }.each do |cow|
|
75
|
+
_base, dist, arch = cow.split('-')
|
76
|
+
unless empty_dir? "pkg/pe/deb/#{dist}"
|
77
|
+
archive_path = "#{base_path}/#{dist}-#{arch}"
|
78
|
+
|
79
|
+
# Ship arch-specific debs to correct dir, e.g. 'squeeze-i386'
|
80
|
+
unless Dir["pkg/pe/deb/#{dist}/pe-*_#{arch}.deb"].empty?
|
81
|
+
rsync_to("pkg/pe/deb/#{dist}/pe-*_#{arch}.deb --ignore-existing", @build.apt_host, "#{archive_path}/" )
|
82
|
+
end
|
83
|
+
|
84
|
+
# Ship all-arch debs to same place
|
85
|
+
unless Dir["pkg/pe/deb/#{dist}/pe-*_all.deb"].empty?
|
86
|
+
rsync_to("pkg/pe/deb/#{dist}/pe-*_all.deb --ignore-existing", @build.apt_host, "#{base_path}/#{dist}-i386/")
|
87
|
+
rsync_to("pkg/pe/deb/#{dist}/pe-*_all.deb --ignore-existing", @build.apt_host, "#{base_path}/#{dist}-amd64/")
|
88
|
+
end
|
89
|
+
|
90
|
+
unless Dir["pkg/pe/deb/#{dist}/pe-*"].select { |i| i !~ /^.*\.deb$/ }.empty?
|
91
|
+
# Ship source files to source dir, e.g. 'squeeze-source'
|
92
|
+
rsync_to("pkg/pe/deb/#{dist}/pe-* --exclude *.deb --ignore-existing", @build.apt_host, "#{base_path}/#{dist}-source")
|
93
|
+
end
|
94
|
+
|
95
|
+
files = Dir["pkg/pe/deb/#{dist}/pe-*{_#{arch},all}.deb"].map { |f| "#{archive_path}/#{File.basename(f)}" }
|
96
|
+
|
97
|
+
files += Dir["pkg/pe/deb/#{dist}/pe-*"].select { |f| f !~ /^.*\.deb$/ }.map { |f| "#{base_path}/#{dist}-source/#{File.basename(f)}" }
|
98
|
+
|
99
|
+
unless files.empty?
|
100
|
+
remote_set_immutable(@build.apt_host, files)
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
|
22
106
|
if @build.team == 'release'
|
23
|
-
Rake::Task["pe:remote:
|
107
|
+
Rake::Task["pe:remote:apt"].invoke(target_path)
|
24
108
|
end
|
25
109
|
end
|
26
110
|
|
27
111
|
namespace :remote do
|
28
112
|
desc "Update remote rpm repodata for PE on #{@build.yum_host}"
|
29
113
|
task :update_yum_repo => "pl:fetch" do
|
30
|
-
remote_ssh_cmd(@build.yum_host, "for dir in $(find #{@build.apt_repo_path}/#{@build.pe_version}/repos/el* -type d | grep -v repodata | grep -v cache | xargs) ; do
|
114
|
+
remote_ssh_cmd(@build.yum_host, "for dir in $(find #{@build.apt_repo_path}/#{@build.pe_version}/repos/{sles,el}* -type d | grep -v repodata | grep -v cache | xargs) ; do pushd $dir; sudo createrepo -q -d --update .; popd &> /dev/null ; done; sync")
|
31
115
|
end
|
32
116
|
|
33
|
-
#
|
34
|
-
#
|
35
|
-
#
|
36
|
-
|
37
|
-
|
38
|
-
|
117
|
+
# the repsimple application is a small wrapper around reprepro, the purpose of
|
118
|
+
# which is largely to limit the surface area and functionality of reprepro to
|
119
|
+
# some very basic tasks - add, remove, and add all in a directory. The add_all
|
120
|
+
# command expects an incoming directory option containing .deb files.
|
121
|
+
# Per previous comments, the incoming directory must contain subdirectories named
|
122
|
+
# for debian distributions.
|
123
|
+
desc "Remotely add shipped packages to apt repo on #{@build.apt_host}"
|
124
|
+
task :apt, :incoming do |t, args|
|
125
|
+
incoming_dir = args.incoming
|
126
|
+
incoming_dir or fail "Adding packages to apt repo requires an incoming directory"
|
127
|
+
invoke_task("pl:fetch")
|
128
|
+
remote_ssh_cmd(@build.apt_host, "/usr/bin/repsimple add_all \
|
129
|
+
--confdir /etc/reprepro/#{@build.pe_version} \
|
130
|
+
--basedir #{@build.apt_repo_path}/#{@build.pe_version}/repos/debian \
|
131
|
+
--databasedir /var/lib/reprepro/#{@build.pe_version} \
|
132
|
+
--incomingdir #{incoming_dir}")
|
133
|
+
|
134
|
+
puts "Cleaning up apt repo 'incoming' dir on #{@build.apt_host}"
|
135
|
+
remote_ssh_cmd(@build.apt_host, "rm -r #{incoming_dir}")
|
136
|
+
|
39
137
|
end
|
40
138
|
end
|
41
139
|
end
|
@@ -1,4 +1,4 @@
|
|
1
|
-
# The
|
1
|
+
# The sign_legacy_rpm and sign_rpm methods are defined in sign.rake
|
2
2
|
# This is just adapted for the 'PE' layout
|
3
3
|
|
4
4
|
if @build.build_pe
|
@@ -24,8 +24,8 @@ if @build.build_pe
|
|
24
24
|
end
|
25
25
|
end
|
26
26
|
end
|
27
|
-
|
28
|
-
|
27
|
+
sign_legacy_rpm(old_rpms) unless old_rpms.empty?
|
28
|
+
sign_rpm(modern_rpms) unless modern_rpms.empty?
|
29
29
|
# Now we hardlink them back in
|
30
30
|
Dir["pkg/pe/rpm/*-*-i386/*.noarch.rpm"].each do |rpm|
|
31
31
|
dir = rpm.split('/')[-2]
|
@@ -31,3 +31,15 @@ namespace :pl do
|
|
31
31
|
end
|
32
32
|
end
|
33
33
|
end
|
34
|
+
|
35
|
+
if @build.build_pe
|
36
|
+
namespace :pe do
|
37
|
+
namespace :jenkins do
|
38
|
+
desc "Retrieve packages from the distribution server\. Check out commit to retrieve"
|
39
|
+
task :retrieve, :target do |t, args|
|
40
|
+
target = args.target || "artifacts"
|
41
|
+
invoke_task("pl:jenkins:retrieve", target)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -105,7 +105,7 @@ namespace :pl do
|
|
105
105
|
# ignore this one because its an extra
|
106
106
|
next if url == "#{base_url}srpm/"
|
107
107
|
|
108
|
-
dist,version,
|
108
|
+
dist,version,_subdir,arch = url.split('/')[-4..-1]
|
109
109
|
|
110
110
|
# Create an array of lines that will become our yum config
|
111
111
|
#
|
@@ -117,7 +117,7 @@ namespace :pl do
|
|
117
117
|
|
118
118
|
# Write the new config to a file under our repo configs dir
|
119
119
|
#
|
120
|
-
config_file = File.join("pkg", "repo_configs", "rpm", "pl-#{@build.project}-#{@build.ref}-#{dist}-#{version}-#{arch}
|
120
|
+
config_file = File.join("pkg", "repo_configs", "rpm", "pl-#{@build.project}-#{@build.ref}-#{dist}-#{version}-#{arch}.repo")
|
121
121
|
File.open(config_file, 'w') { |f| f.puts config }
|
122
122
|
end
|
123
123
|
puts "Wrote yum configuration files for #{@build.project} at #{@build.ref} to pkg/repo_configs/rpm"
|
@@ -1,11 +1,12 @@
|
|
1
1
|
namespace :pl do
|
2
2
|
desc "Ship mocked rpms to #{@build.yum_host}"
|
3
3
|
task :ship_rpms do
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
4
|
+
["el", "fedora"].each do |dist|
|
5
|
+
retry_on_fail(:times => 3) do
|
6
|
+
pkgs = Dir["pkg/#{dist}/**/*.rpm"].map { |f| "'#{f.gsub("pkg/#{dist}/", "#{@build.yum_repo_path}/#{dist}/")}'"}
|
7
|
+
rsync_to("pkg/#{dist}", @build.yum_host, @build.yum_repo_path)
|
8
|
+
remote_set_immutable(@build.yum_host, pkgs)
|
9
|
+
end
|
9
10
|
end
|
10
11
|
end
|
11
12
|
|
@@ -19,7 +20,7 @@ namespace :pl do
|
|
19
20
|
task :update_yum_repo do
|
20
21
|
STDOUT.puts "Really run remote repo update on #{@build.yum_host}? [y,n]"
|
21
22
|
if ask_yes_or_no
|
22
|
-
remote_ssh_cmd(@build.yum_host, '
|
23
|
+
remote_ssh_cmd(@build.yum_host, 'rake -f /opt/repository/Rakefile mk_repo')
|
23
24
|
end
|
24
25
|
end
|
25
26
|
|
@@ -28,7 +29,7 @@ namespace :pl do
|
|
28
29
|
STDOUT.puts "Really run remote freight command on #{@build.apt_host}? [y,n]"
|
29
30
|
if ask_yes_or_no
|
30
31
|
override = "OVERRIDE=1" if ENV['OVERRIDE']
|
31
|
-
remote_ssh_cmd(@build.apt_host, "
|
32
|
+
remote_ssh_cmd(@build.apt_host, "rake -f /opt/repository/Rakefile freight #{override}")
|
32
33
|
end
|
33
34
|
end
|
34
35
|
end
|
@@ -60,10 +61,23 @@ namespace :pl do
|
|
60
61
|
end
|
61
62
|
end if @build.build_ips
|
62
63
|
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
64
|
+
# We want to ship a gem only for projects that build gems
|
65
|
+
if @build.build_gem
|
66
|
+
desc "Ship built gem to rubygems"
|
67
|
+
task :ship_gem do
|
68
|
+
# Even if a project builds a gem, if it uses the odd_even strategy, we only
|
69
|
+
# want to ship final gems because otherwise a development gem would be
|
70
|
+
# preferred over the last final gem
|
71
|
+
if @build.version_strategy != "odd_even" || is_final?
|
72
|
+
FileList["pkg/#{@build.project}-#{@build.gemversion}*.gem"].each do |f|
|
73
|
+
puts "Shipping gem #{f} to rubygems"
|
74
|
+
ship_gem(f)
|
75
|
+
end
|
76
|
+
else
|
77
|
+
STDERR.puts "Not shipping development gem using odd_even strategy for the sake of your users."
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
67
81
|
|
68
82
|
desc "ship apple dmg to #{@build.yum_host}"
|
69
83
|
task :ship_dmg => 'pl:fetch' do
|
@@ -89,6 +103,11 @@ namespace :pl do
|
|
89
103
|
Rake::Task["pl:ship_dmg"].execute if @build.build_dmg
|
90
104
|
Rake::Task["pl:ship_tar"].execute
|
91
105
|
Rake::Task["pl:jenkins:ship"].invoke("shipped")
|
106
|
+
add_shipped_metrics(:pe_version => ENV['PE_VER'], :is_rc => (! is_final?)) if @build.benchmark
|
107
|
+
post_shipped_metrics if @build.benchmark
|
108
|
+
else
|
109
|
+
puts "Ship canceled"
|
110
|
+
exit
|
92
111
|
end
|
93
112
|
end
|
94
113
|
|
@@ -104,12 +123,32 @@ namespace :pl do
|
|
104
123
|
invoke_task("pl:fetch")
|
105
124
|
target = args.target || "artifacts"
|
106
125
|
artifact_dir = "#{@build.jenkins_repo_path}/#{@build.project}/#{@build.ref}/#{target}"
|
126
|
+
|
127
|
+
# In order to get a snapshot of what this build looked like at the time
|
128
|
+
# of shipping, we also generate and ship the params file
|
129
|
+
#
|
130
|
+
@build.params_to_yaml('pkg')
|
131
|
+
|
132
|
+
|
133
|
+
# Sadly, the packaging repo cannot yet act on its own, without living
|
134
|
+
# inside of a packaging-repo compatible project. This means in order to
|
135
|
+
# use the packaging repo for shipping and signing (things that really
|
136
|
+
# don't require build automation, specifically) we still need the project
|
137
|
+
# clone itself.
|
138
|
+
git_bundle('HEAD', 'signing_bundle', 'pkg')
|
139
|
+
|
107
140
|
retry_on_fail(:times => 3) do
|
108
141
|
remote_ssh_cmd(@build.distribution_server, "mkdir -p #{artifact_dir}")
|
109
142
|
end
|
110
143
|
retry_on_fail(:times => 3) do
|
111
|
-
|
144
|
+
ignore_existing = "--ignore-existing"
|
145
|
+
rsync_to("pkg/", @build.distribution_server, "#{artifact_dir}/ #{ignore_existing} --exclude repo_configs")
|
146
|
+
end
|
147
|
+
# If we just shipped a tagged version, we want to make it immutable
|
148
|
+
files = Dir.glob("pkg/**/*").select { |f| File.file?(f) }.map do |file|
|
149
|
+
"#{artifact_dir}/#{file.sub(/^pkg\//,'')}"
|
112
150
|
end
|
151
|
+
remote_set_immutable(@build.distribution_server, files)
|
113
152
|
end
|
114
153
|
|
115
154
|
desc "Ship generated repository configs to the distribution server"
|