puppet 2.7.23 → 2.7.24
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of puppet might be problematic. Click here for more details.
- data/Gemfile +23 -7
- data/ext/packaging/LICENSE +17 -0
- data/ext/packaging/README.md +92 -18
- data/ext/packaging/spec/tasks/00_utils_spec.rb +28 -21
- data/ext/packaging/spec/tasks/build_object_spec.rb +6 -4
- data/ext/packaging/static_artifacts/PackageInfo.plist +3 -0
- data/ext/packaging/tasks/00_utils.rake +91 -15
- data/ext/packaging/tasks/10_setupvars.rake +39 -24
- data/ext/packaging/tasks/20_setupextravars.rake +1 -5
- data/ext/packaging/tasks/30_metrics.rake +29 -37
- data/ext/packaging/tasks/apple.rake +8 -6
- data/ext/packaging/tasks/build.rake +6 -0
- data/ext/packaging/tasks/deb.rake +1 -4
- data/ext/packaging/tasks/fetch.rake +22 -12
- data/ext/packaging/tasks/gem.rake +88 -35
- data/ext/packaging/tasks/jenkins.rake +25 -1
- data/ext/packaging/tasks/jenkins_dynamic.rake +10 -1
- data/ext/packaging/tasks/mock.rake +37 -19
- data/ext/packaging/tasks/pe_ship.rake +108 -10
- data/ext/packaging/tasks/pe_sign.rake +3 -3
- data/ext/packaging/tasks/retrieve.rake +12 -0
- data/ext/packaging/tasks/rpm_repos.rake +2 -2
- data/ext/packaging/tasks/ship.rake +51 -12
- data/ext/packaging/tasks/sign.rake +42 -12
- data/ext/packaging/tasks/tar.rake +1 -1
- data/ext/packaging/tasks/template.rake +17 -3
- data/ext/packaging/tasks/vendor_gems.rake +1 -1
- data/ext/packaging/templates/downstream.xml.erb +15 -2
- data/ext/packaging/templates/packaging.xml.erb +143 -1
- data/ext/packaging/templates/repo.xml.erb +35 -24
- data/lib/puppet/transaction.rb +1 -1
- data/lib/puppet/type/file.rb +12 -23
- data/lib/puppet/type/file/source.rb +2 -2
- data/lib/puppet/type/service.rb +3 -2
- data/lib/puppet/util.rb +22 -41
- data/lib/puppet/version.rb +1 -1
- data/spec/integration/type/file_spec.rb +22 -35
- data/spec/spec_helper.rb +12 -0
- data/spec/unit/application/kick_spec.rb +9 -4
- data/spec/unit/indirector/catalog/static_compiler_spec.rb +1 -1
- data/spec/unit/type/file/source_spec.rb +8 -7
- data/spec/unit/type/file_spec.rb +0 -29
- metadata +64 -39
@@ -1,14 +1,36 @@
|
|
1
|
-
def
|
1
|
+
def sign_rpm(rpm, sign_flags = nil)
|
2
|
+
|
3
|
+
# To enable support for wrappers around rpm and thus support for gpg-agent
|
4
|
+
# rpm signing, we have to be able to tell the packaging repo what binary to
|
5
|
+
# use as the rpm signing tool.
|
6
|
+
#
|
7
|
+
rpm_cmd = ENV['RPM'] || find_tool('rpm')
|
8
|
+
|
9
|
+
# If we're using the gpg agent for rpm signing, we don't want to specify the
|
10
|
+
# input for the passphrase, which is what '--passphrase-fd 3' does. However,
|
11
|
+
# if we're not using the gpg agent, this is required, and is part of the
|
12
|
+
# defaults on modern rpm. The fun part of gpg-agent signing of rpms is
|
13
|
+
# specifying that the gpg check command always return true
|
14
|
+
#
|
15
|
+
if boolean_value(ENV['RPM_GPG_AGENT'])
|
16
|
+
gpg_check_cmd = "--define '%__gpg_check_password_cmd /bin/true'"
|
17
|
+
else
|
18
|
+
input_flag = "--passphrase-fd 3"
|
19
|
+
end
|
20
|
+
|
2
21
|
# Try this up to 5 times, to allow for incorrect passwords
|
3
22
|
retry_on_fail(:times => 5) do
|
4
|
-
|
23
|
+
# This definition of %__gpg_sign_cmd is the default on modern rpm. We
|
24
|
+
# accept extra flags to override certain signing behavior for older
|
25
|
+
# versions of rpm, e.g. specifying V3 signatures instead of V4.
|
26
|
+
#
|
27
|
+
sh "#{rpm_cmd} #{gpg_check_cmd} --define '%_gpg_name #{@build.gpg_name}' --define '%__gpg_sign_cmd %{__gpg} gpg #{sign_flags} #{input_flag} --batch --no-verbose --no-armor --no-secmem-warning -u %{_gpg_name} -sbo %{__signature_filename} %{__plaintext_filename}' --addsign #{rpm}"
|
5
28
|
end
|
29
|
+
|
6
30
|
end
|
7
31
|
|
8
|
-
def
|
9
|
-
|
10
|
-
sh "rpm --define '%_gpg_name #{@build.gpg_name}' --addsign #{rpm} > /dev/null"
|
11
|
-
end
|
32
|
+
def sign_legacy_rpm(rpm)
|
33
|
+
sign_rpm(rpm, "--force-v3-sigs --digest-algo=sha1")
|
12
34
|
end
|
13
35
|
|
14
36
|
def rpm_has_sig(rpm)
|
@@ -17,7 +39,9 @@ def rpm_has_sig(rpm)
|
|
17
39
|
end
|
18
40
|
|
19
41
|
def sign_deb_changes(file)
|
20
|
-
|
42
|
+
# Lazy lazy lazy lazy lazy
|
43
|
+
sign_program = "-p'gpg --use-agent --no-tty'" if ENV['RPM_GPG_AGENT']
|
44
|
+
sh "debsign #{sign_program} --re-sign -k#{@build.gpg_key} #{file}"
|
21
45
|
end
|
22
46
|
|
23
47
|
# requires atleast a self signed prvate key and certificate pair
|
@@ -46,17 +70,17 @@ namespace :pl do
|
|
46
70
|
modern_rpms = (Dir["pkg/el/6/**/*.rpm"] + Dir["pkg/fedora/**/*.rpm"]).join(' ')
|
47
71
|
unless el5_rpms.empty?
|
48
72
|
puts "Signing el5 rpms..."
|
49
|
-
|
73
|
+
sign_legacy_rpm(el5_rpms)
|
50
74
|
end
|
51
75
|
|
52
76
|
unless modern_rpms.empty?
|
53
77
|
puts "Signing el6 and fedora rpms..."
|
54
|
-
|
78
|
+
sign_rpm(modern_rpms)
|
55
79
|
end
|
56
80
|
# Now we hardlink them back in
|
57
81
|
Dir["pkg/*/*/*/i386/*.noarch.rpm"].each do |rpm|
|
58
82
|
cd File.dirname(rpm) do
|
59
|
-
ln rpm, File.join("..","x86_64")
|
83
|
+
ln File.basename(rpm), File.join("..","x86_64"), :force => true
|
60
84
|
end
|
61
85
|
end
|
62
86
|
end
|
@@ -109,11 +133,17 @@ namespace :pl do
|
|
109
133
|
# Because rpms and debs are laid out differently in PE under pkg/ they
|
110
134
|
# have a different sign task to address this. Rather than create a whole
|
111
135
|
# extra :jenkins task for signing PE, we determine which sign task to use
|
112
|
-
# based on if we're building PE
|
136
|
+
# based on if we're building PE.
|
137
|
+
# We also listen in on the environment variable SIGNING_BUNDLE. This is
|
138
|
+
# _NOT_ intended for public use, but rather with the internal promotion
|
139
|
+
# workflow for Puppet Enterprise. SIGNING_BUNDLE is the path to a tarball
|
140
|
+
# containing a git bundle to be used as the environment for the packaging
|
141
|
+
# repo in a signing operation.
|
142
|
+
signing_bundle = ENV['SIGNING_BUNDLE']
|
113
143
|
rpm_sign_task = @build.build_pe ? "pe:sign_rpms" : "pl:sign_rpms"
|
114
144
|
deb_sign_task = @build.build_pe ? "pe:sign_deb_changes" : "pl:sign_deb_changes"
|
115
145
|
sign_tasks = ["pl:sign_tar", rpm_sign_task, deb_sign_task]
|
116
|
-
remote_repo = remote_bootstrap(@build.distribution_server, 'HEAD')
|
146
|
+
remote_repo = remote_bootstrap(@build.distribution_server, 'HEAD', nil, signing_bundle)
|
117
147
|
build_params = remote_buildparams(@build.distribution_server, @build)
|
118
148
|
rsync_to('pkg', @build.distribution_server, remote_repo)
|
119
149
|
remote_ssh_cmd(@build.distribution_server, "cd #{remote_repo} ; rake #{sign_tasks.join(' ')} PARAMS_FILE=#{build_params}")
|
@@ -72,7 +72,7 @@ namespace :package do
|
|
72
72
|
#
|
73
73
|
# If you set this the version will only be modified in the temporary copy,
|
74
74
|
# with the intent that it never change the official source tree.
|
75
|
-
|
75
|
+
Rake::Task["package:versionbump"].invoke(workdir) if @build.update_version_file
|
76
76
|
|
77
77
|
cd "pkg" do
|
78
78
|
sh "#{tar} --exclude #{tar_excludes.join(" --exclude ")} -zcf '#{@build.project}-#{@build.version}.tar.gz' #{@build.project}-#{@build.version}"
|
@@ -3,10 +3,24 @@ namespace :package do
|
|
3
3
|
task :template, :workdir do |t, args|
|
4
4
|
workdir = args.workdir
|
5
5
|
|
6
|
-
|
6
|
+
if @build.templates
|
7
|
+
if @build.templates.is_a?(Array)
|
8
|
+
templates = FileList[@build.templates.map {|path| File.join(workdir, path)}]
|
9
|
+
else
|
10
|
+
STDERR.puts "templates must be an Array, not '#{@build.templates.class}'"
|
11
|
+
end
|
12
|
+
else
|
13
|
+
templates = FileList["#{workdir}/ext/**/*.erb"].exclude(/#{workdir}\/ext\/(packaging|osx)/)
|
14
|
+
end
|
15
|
+
|
16
|
+
templates.each do |template|
|
7
17
|
# process the template, stripping off the ERB extension
|
8
|
-
|
9
|
-
|
18
|
+
if File.extname(template) == ".erb"
|
19
|
+
erb(template, template.gsub(/\.erb$/,""))
|
20
|
+
rm_f(template)
|
21
|
+
else
|
22
|
+
STDERR.puts "Skipping #{template} because it doesn't look like an erb template"
|
23
|
+
end
|
10
24
|
end
|
11
25
|
end
|
12
26
|
end
|
@@ -13,7 +13,8 @@ When DOWNSTREAM_JOB is passed to the packaging repo, this job is created to wrap
|
|
13
13
|
</jenkins.plugins.hipchat.HipChatNotifier_-HipChatJobProperty>
|
14
14
|
</properties>
|
15
15
|
<scm class="hudson.scm.NullSCM"/>
|
16
|
-
<
|
16
|
+
<assignedNode>downstream</assignedNode>
|
17
|
+
<canRoam>false</canRoam>
|
17
18
|
<disabled>false</disabled>
|
18
19
|
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
|
19
20
|
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
|
@@ -22,9 +23,21 @@ When DOWNSTREAM_JOB is passed to the packaging repo, this job is created to wrap
|
|
22
23
|
<builders>
|
23
24
|
<hudson.tasks.Shell>
|
24
25
|
<command>#!/bin/bash
|
26
|
+
## We have to check if this has been triggered by a successful package and repo build
|
27
|
+
curl -s "http://<%= "#{@build.jenkins_build_host}" %>/job/<%= "#{@build.project}-repo-#{@build.build_date}-#{@build.ref}" %>/lastBuild/api/json" | grep result\":\"SUCCESS\" > /dev/null
|
28
|
+
|
29
|
+
UPSTREAM_BUILD_STATUS=$?
|
30
|
+
|
25
31
|
set -e
|
32
|
+
|
33
|
+
if [ $UPSTREAM_BUILD_STATUS -eq 0 ] ; then
|
34
|
+
UPSTREAM_BUILD_STATUS="success"
|
35
|
+
else
|
36
|
+
UPSTREAM_BUILD_STATUS="failure"
|
37
|
+
fi
|
38
|
+
|
26
39
|
# This URI was passed in as an argument to the original rake package call
|
27
|
-
curl -i &
|
40
|
+
curl --fail -i "<%= escape_html(add_param_to_uri(ENV['DOWNSTREAM_JOB'], "PACKAGE_BUILD_STATUS=$UPSTREAM_BUILD_STATUS")) %>"</command>
|
28
41
|
</hudson.tasks.Shell>
|
29
42
|
</builders>
|
30
43
|
<publishers/>
|
@@ -82,6 +82,11 @@ return labelMap.get(binding.getVariables().get("command"));</groovyScr
|
|
82
82
|
<description></description>
|
83
83
|
<defaultValue></defaultValue>
|
84
84
|
</hudson.model.StringParameterDefinition>
|
85
|
+
<hudson.model.StringParameterDefinition>
|
86
|
+
<name>METRICS</name>
|
87
|
+
<description></description>
|
88
|
+
<defaultValue></defaultValue>
|
89
|
+
</hudson.model.StringParameterDefinition>
|
85
90
|
</parameterDefinitions>
|
86
91
|
</hudson.model.ParametersDefinitionProperty>
|
87
92
|
</properties>
|
@@ -140,6 +145,143 @@ popd</command>
|
|
140
145
|
</hudson.tasks.Shell>
|
141
146
|
</builders>
|
142
147
|
<publishers>
|
148
|
+
<org.jvnet.hudson.plugins.groovypostbuild.GroovyPostbuildRecorder plugin="groovy-postbuild@1.8">
|
149
|
+
<groovyScript>
|
150
|
+
import java.util.regex.Matcher
|
151
|
+
import java.util.regex.Pattern
|
152
|
+
import java.net.HttpURLConnection
|
153
|
+
import java.util.Date;
|
154
|
+
|
155
|
+
def get_jenkins_build_time() {
|
156
|
+
start_time = manager.build.getStartTimeInMillis()
|
157
|
+
end_time = new Date().getTime()
|
158
|
+
return String.valueOf((end_time - start_time)/1000)
|
159
|
+
}
|
160
|
+
|
161
|
+
// Assemble metrics to post to build metrics server
|
162
|
+
|
163
|
+
app_server = "<%= @build.metrics_url %>"
|
164
|
+
task_metrics = manager.build.getEnvironment(manager.listener)['METRICS']
|
165
|
+
charset = "UTF-8"
|
166
|
+
|
167
|
+
// Maintain backwards compatibility
|
168
|
+
if ( task_metrics == null) {
|
169
|
+
build_user = "N/A"
|
170
|
+
version = "N/A"
|
171
|
+
pe_version = "N/A"
|
172
|
+
build_team = "N/A"
|
173
|
+
} else {
|
174
|
+
build_user = task_metrics.split("~")[0]
|
175
|
+
version = task_metrics.split("~")[1]
|
176
|
+
pe_version = task_metrics.split("~")[2]
|
177
|
+
build_team = task_metrics.split("~")[3]
|
178
|
+
}
|
179
|
+
|
180
|
+
matcher = manager.getLogMatcher(/(?:Finished building in:) ([\d]+\.?[\d]*)/)
|
181
|
+
if (matcher != null) {
|
182
|
+
package_build_time = matcher[0][1]
|
183
|
+
} else {
|
184
|
+
package_build_time = "N/A"
|
185
|
+
}
|
186
|
+
|
187
|
+
cmd_string = manager.build.getEnvironment(manager.listener)['command']
|
188
|
+
if(cmd_string =~ /deb/) {
|
189
|
+
package_type = 'deb'
|
190
|
+
} else if(cmd_string =~ /rpm|mock/) {
|
191
|
+
package_type = 'rpm'
|
192
|
+
} else if(cmd_string =~ /gem/) {
|
193
|
+
package_type = 'gem'
|
194
|
+
} else if(cmd_string =~ /apple/) {
|
195
|
+
package_type = 'dmg'
|
196
|
+
} else if(cmd_string =~ /tar/) {
|
197
|
+
package_type = 'tar'
|
198
|
+
} else {
|
199
|
+
package_type = 'N/A'
|
200
|
+
}
|
201
|
+
|
202
|
+
switch (package_type) {
|
203
|
+
case 'deb':
|
204
|
+
dist = cmd_string.split('-')[1]
|
205
|
+
break
|
206
|
+
case 'rpm':
|
207
|
+
if(pe_version != 'N/A') {
|
208
|
+
dist = cmd_string.split('-')[2]
|
209
|
+
} else {
|
210
|
+
dist = cmd_string.split('-')[1] + cmd_string.split('-')[2]
|
211
|
+
}
|
212
|
+
break
|
213
|
+
case 'gem':
|
214
|
+
dist = 'gem'
|
215
|
+
break
|
216
|
+
case 'dmg':
|
217
|
+
dist = 'apple'
|
218
|
+
break
|
219
|
+
case 'tar':
|
220
|
+
dist = 'tar'
|
221
|
+
break
|
222
|
+
default:
|
223
|
+
dist = 'N/A'
|
224
|
+
}
|
225
|
+
|
226
|
+
jenkins_build_time = get_jenkins_build_time()
|
227
|
+
package_name = manager.build.getEnvironment(manager.listener)['PROJECT']
|
228
|
+
build_loc = manager.build.getEnvironment(manager.listener)['NODE_NAME']
|
229
|
+
build_log = "${manager.build.getEnvironment(manager.listener)['BUILD_URL']}" + "consoleText"
|
230
|
+
success = String.valueOf(manager.build.result)
|
231
|
+
|
232
|
+
String query = String.format("package_name=%s&dist=%s&package_type=%s&build_user=%s&build_team=%s&build_loc=%s&version=%s&pe_version=%s&success=%s&build_log=%s&jenkins_build_time=%s&package_build_time=%s",
|
233
|
+
URLEncoder.encode(package_name, charset),
|
234
|
+
URLEncoder.encode(dist, charset),
|
235
|
+
URLEncoder.encode(package_type, charset),
|
236
|
+
URLEncoder.encode(build_user, charset),
|
237
|
+
URLEncoder.encode(build_team, charset),
|
238
|
+
URLEncoder.encode(build_loc, charset),
|
239
|
+
URLEncoder.encode(version, charset),
|
240
|
+
URLEncoder.encode(pe_version, charset),
|
241
|
+
URLEncoder.encode(success, charset),
|
242
|
+
URLEncoder.encode(build_log, charset),
|
243
|
+
URLEncoder.encode(jenkins_build_time, charset),
|
244
|
+
URLEncoder.encode(package_build_time, charset))
|
245
|
+
|
246
|
+
// Make sure the server is listening before attempting to post data
|
247
|
+
|
248
|
+
URLConnection connection = null
|
249
|
+
serverAlive = false
|
250
|
+
try {
|
251
|
+
URL u = new URL(app_server);
|
252
|
+
connection = (HttpURLConnection) u.openConnection();
|
253
|
+
connection.setRequestMethod("GET");
|
254
|
+
int code = connection.getResponseCode();
|
255
|
+
serverAlive = true
|
256
|
+
connection.disconnect();
|
257
|
+
|
258
|
+
} catch (MalformedURLException e) {
|
259
|
+
serverAlive = false
|
260
|
+
e.printStackTrace()
|
261
|
+
|
262
|
+
} catch (IOException e) {
|
263
|
+
serverAlive = false
|
264
|
+
e.printStackTrace()
|
265
|
+
|
266
|
+
} finally {
|
267
|
+
if (serverAlive == true) {
|
268
|
+
connection = new URL(app_server).openConnection()
|
269
|
+
connection.setDoOutput(true) // Triggers POST.
|
270
|
+
connection.setRequestProperty("Accept-Charset", charset);
|
271
|
+
connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded;charset=" + charset);
|
272
|
+
OutputStream output = null;
|
273
|
+
|
274
|
+
try {
|
275
|
+
output = connection.getOutputStream()
|
276
|
+
output.write(query.getBytes(charset))
|
277
|
+
InputStream response = connection.getInputStream()
|
278
|
+
} finally {
|
279
|
+
if (output != null) try { output.close(); } catch (IOException logOrIgnore) {}
|
280
|
+
}
|
281
|
+
}
|
282
|
+
} </groovyScript>
|
283
|
+
<behavior>0</behavior>
|
284
|
+
</org.jvnet.hudson.plugins.groovypostbuild.GroovyPostbuildRecorder>
|
143
285
|
<hudson.tasks.ArtifactArchiver>
|
144
286
|
<artifacts>**/PROJECT_BUNDLE</artifacts>
|
145
287
|
<latestOnly>false</latestOnly>
|
@@ -157,7 +299,7 @@ popd</command>
|
|
157
299
|
<hudson.plugins.parameterizedtrigger.CurrentBuildParameters/>
|
158
300
|
</configs>
|
159
301
|
<projects><%= "#{@build.project}-repo-#{@build.build_date}-#{@build.ref}" %></projects>
|
160
|
-
<condition>
|
302
|
+
<condition>ALWAYS</condition>
|
161
303
|
<triggerWithNoParameters>false</triggerWithNoParameters>
|
162
304
|
</hudson.plugins.parameterizedtrigger.BuildTriggerConfig>
|
163
305
|
</configs>
|
@@ -41,36 +41,47 @@ This job will trigger the downstream job supplied with DOWNSTREAM_JOB if passed
|
|
41
41
|
</hudson.plugins.copyartifact.CopyArtifact>
|
42
42
|
<hudson.tasks.Shell>
|
43
43
|
<command>#!/bin/bash
|
44
|
+
## We have to check if this has been triggered by a successful package build
|
45
|
+
curl -s "http://<%= "#{@build.jenkins_build_host}" %>/job/<%= "#{@build.project}-packaging-#{@build.build_date}-#{@build.ref}" %>/lastBuild/api/json" | grep result\":\"SUCCESS\" > /dev/null
|
46
|
+
|
47
|
+
PACKAGE_BUILD_RESULT=$?
|
48
|
+
|
44
49
|
set -e
|
45
|
-
### We've retrieved the git bundle from the tarball build, so now we clone it
|
46
|
-
### and use it to trigger our repo creation
|
47
|
-
#
|
48
|
-
# PROJECT_BUNDLE is a tarball containing a bundle file and git_repo is the
|
49
|
-
# directory that will contain the git repository. First we untar the tarball
|
50
|
-
# and then clone the git_bundle
|
51
50
|
|
52
|
-
[ -
|
53
|
-
|
51
|
+
if [ $PACKAGE_BUILD_RESULT -eq 0 ] ; then
|
52
|
+
echo "Detected upstream package build success. Building repos."
|
53
|
+
### We've retrieved the git bundle from the tarball build, so now we clone it
|
54
|
+
### and use it to trigger our repo creation
|
55
|
+
#
|
56
|
+
# PROJECT_BUNDLE is a tarball containing a bundle file and git_repo is the
|
57
|
+
# directory that will contain the git repository. First we untar the tarball
|
58
|
+
# and then clone the git_bundle
|
59
|
+
|
60
|
+
[ -f "PROJECT_BUNDLE" ] || exit 1
|
61
|
+
|
62
|
+
[ -d project ] && rm -rf project
|
63
|
+
|
64
|
+
mkdir project && tar -xzf PROJECT_BUNDLE -C project/
|
54
65
|
|
55
|
-
pushd project
|
56
|
-
|
66
|
+
pushd project
|
67
|
+
git clone --recursive $(ls) git_repo
|
57
68
|
|
58
|
-
|
69
|
+
pushd git_repo
|
59
70
|
|
60
|
-
|
61
|
-
|
71
|
+
### Clone the packaging repo
|
72
|
+
rake package:bootstrap --trace
|
62
73
|
|
63
|
-
|
64
|
-
|
65
|
-
|
74
|
+
### Run repo creation
|
75
|
+
rake pl:jenkins:rpm_repos --trace
|
76
|
+
rake pl:jenkins:deb_repos --trace
|
66
77
|
|
78
|
+
popd
|
67
79
|
popd
|
68
|
-
|
80
|
+
else
|
81
|
+
echo "Detected upstream package build failure. Failing repo creation."
|
82
|
+
exit 1
|
83
|
+
fi
|
69
84
|
|
70
|
-
# Delete the upstream project now that repos have been created. This is largely
|
71
|
-
# to avoid clutter and reclaim space
|
72
|
-
#
|
73
|
-
curl -i -X POST <%= "http://#{@build.jenkins_build_host}/job/#{@build.project}-packaging-#{@build.build_date}-#{@build.ref}" %>/doDelete
|
74
85
|
</command>
|
75
86
|
</hudson.tasks.Shell>
|
76
87
|
</builders><% if ENV['DOWNSTREAM_JOB'] %>
|
@@ -78,9 +89,9 @@ curl -i -X POST <%= "http://#{@build.jenkins_build_host}/job/#{@build.project}-p
|
|
78
89
|
<hudson.tasks.BuildTrigger>
|
79
90
|
<childProjects><%= "#{@build.project}-downstream-#{@build.build_date}-#{@build.ref}" %></childProjects>
|
80
91
|
<threshold>
|
81
|
-
<name>
|
82
|
-
<ordinal>
|
83
|
-
<color>
|
92
|
+
<name>FAILURE</name>
|
93
|
+
<ordinal>2</ordinal>
|
94
|
+
<color>RED</color>
|
84
95
|
</threshold>
|
85
96
|
</hudson.tasks.BuildTrigger>
|
86
97
|
</publishers><% else %>
|
data/lib/puppet/transaction.rb
CHANGED
@@ -151,7 +151,7 @@ class Puppet::Transaction
|
|
151
151
|
begin
|
152
152
|
made = resource.eval_generate.uniq
|
153
153
|
return false if made.empty?
|
154
|
-
made = made.inject({}) {|a,v| a
|
154
|
+
made = made.inject({}) {|a,v| a[v.name] = v; a}
|
155
155
|
rescue => detail
|
156
156
|
puts detail.backtrace if Puppet[:trace]
|
157
157
|
resource.err "Failed to generate additional resources using 'eval_generate: #{detail}"
|
data/lib/puppet/type/file.rb
CHANGED
@@ -736,36 +736,25 @@ Puppet::Type.newtype(:file) do
|
|
736
736
|
def write(property)
|
737
737
|
remove_existing(:file)
|
738
738
|
|
739
|
-
|
740
|
-
if use_temporary_file
|
741
|
-
path = "#{self[:path]}.puppettmp_#{rand(10000)}"
|
742
|
-
path = "#{self[:path]}.puppettmp_#{rand(10000)}" while ::File.exists?(path) or ::File.symlink?(path)
|
743
|
-
else
|
744
|
-
path = self[:path]
|
745
|
-
end
|
739
|
+
assumed_default_mode = 0644
|
746
740
|
|
747
741
|
mode = self.should(:mode) # might be nil
|
748
|
-
|
749
|
-
|
750
|
-
|
751
|
-
|
752
|
-
|
753
|
-
|
754
|
-
|
755
|
-
|
756
|
-
fail_if_checksum_is_wrong(path, content_checksum) if validate_checksum?
|
757
|
-
::File.rename(path, self[:path])
|
758
|
-
rescue => detail
|
759
|
-
fail "Could not rename temporary file #{path} to #{self[:path]}: #{detail}"
|
760
|
-
ensure
|
761
|
-
# Make sure the created file gets removed
|
762
|
-
::File.unlink(path) if FileTest.exists?(path)
|
742
|
+
mode_int = mode ? symbolic_mode_to_int(mode, assumed_default_mode) : nil
|
743
|
+
|
744
|
+
if write_temporary_file?
|
745
|
+
Puppet::Util.replace_file(self[:path], mode_int) do |file|
|
746
|
+
file.binmode
|
747
|
+
content_checksum = write_content(file)
|
748
|
+
file.flush
|
749
|
+
fail_if_checksum_is_wrong(file.path, content_checksum) if validate_checksum?
|
763
750
|
end
|
751
|
+
else
|
752
|
+
umask = mode ? 000 : 022
|
753
|
+
Puppet::Util.withumask(umask) { ::File.open(self[:path], 'wb', mode_int ) { |f| write_content(f) } }
|
764
754
|
end
|
765
755
|
|
766
756
|
# make sure all of the modes are actually correct
|
767
757
|
property_fix
|
768
|
-
|
769
758
|
end
|
770
759
|
|
771
760
|
private
|