puppet 2.7.22 → 2.7.23

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of puppet might be problematic. Click here for more details.

Files changed (55) hide show
  1. data/CHANGELOG +21 -0
  2. data/Rakefile +0 -2
  3. data/ext/build_defaults.yaml +1 -1
  4. data/ext/packaging/README.md +88 -33
  5. data/ext/packaging/packaging.rake +45 -0
  6. data/ext/packaging/spec/tasks/build_object_spec.rb +2 -0
  7. data/ext/packaging/tasks/00_utils.rake +94 -54
  8. data/ext/packaging/tasks/10_setupvars.rake +40 -28
  9. data/ext/packaging/tasks/apple.rake +20 -27
  10. data/ext/packaging/tasks/build.rake +3 -8
  11. data/ext/packaging/tasks/deb.rake +12 -36
  12. data/ext/packaging/tasks/deb_repos.rake +28 -36
  13. data/ext/packaging/tasks/fetch.rake +1 -2
  14. data/ext/packaging/tasks/jenkins.rake +22 -56
  15. data/ext/packaging/tasks/jenkins_dynamic.rake +111 -0
  16. data/ext/packaging/tasks/mock.rake +101 -56
  17. data/ext/packaging/tasks/pe_deb.rake +5 -7
  18. data/ext/packaging/tasks/pe_remote.rake +30 -42
  19. data/ext/packaging/tasks/pe_rpm.rake +3 -11
  20. data/ext/packaging/tasks/pe_ship.rake +14 -18
  21. data/ext/packaging/tasks/pe_sign.rake +11 -0
  22. data/ext/packaging/tasks/pe_tar.rake +1 -1
  23. data/ext/packaging/tasks/release.rake +0 -6
  24. data/ext/packaging/tasks/remote_build.rake +1 -13
  25. data/ext/packaging/tasks/retrieve.rake +1 -1
  26. data/ext/packaging/tasks/rpm.rake +15 -10
  27. data/ext/packaging/tasks/rpm_repos.rake +45 -56
  28. data/ext/packaging/tasks/ship.rake +46 -37
  29. data/ext/packaging/tasks/sign.rake +22 -14
  30. data/ext/packaging/tasks/tar.rake +1 -1
  31. data/ext/packaging/tasks/template.rake +1 -1
  32. data/ext/packaging/templates/README +1 -0
  33. data/ext/packaging/templates/downstream.xml.erb +32 -0
  34. data/ext/packaging/templates/packaging.xml.erb +182 -0
  35. data/ext/packaging/templates/repo.xml.erb +93 -0
  36. data/lib/puppet.rb +2 -0
  37. data/lib/puppet/file_system.rb +3 -0
  38. data/lib/puppet/file_system/path_pattern.rb +97 -0
  39. data/lib/puppet/module.rb +25 -4
  40. data/lib/puppet/module_tool/applications/unpacker.rb +5 -1
  41. data/lib/puppet/parser/files.rb +20 -15
  42. data/lib/puppet/parser/parser_support.rb +10 -1
  43. data/lib/puppet/parser/type_loader.rb +48 -28
  44. data/lib/puppet/version.rb +1 -1
  45. data/spec/unit/file_system/path_pattern_spec.rb +139 -0
  46. data/spec/unit/module_spec.rb +8 -1
  47. data/spec/unit/module_tool/applications/unpacker_spec.rb +6 -0
  48. data/spec/unit/parser/files_spec.rb +6 -67
  49. data/spec/unit/parser/parser_spec.rb +15 -5
  50. data/spec/unit/parser/type_loader_spec.rb +14 -33
  51. data/spec/unit/resource/type_collection_spec.rb +39 -55
  52. metadata +11 -5
  53. data/Gemfile.lock +0 -44
  54. data/ext/packaging/tasks/pe_sles.rake +0 -101
  55. data/ext/packaging/tasks/pre_tasks.rake +0 -0
@@ -26,36 +26,37 @@ else
26
26
  #
27
27
  @build.set_params_from_file('ext/project_data.yaml') if File.readable?('ext/project_data.yaml')
28
28
  @build.set_params_from_file('ext/build_defaults.yaml') if File.readable?('ext/build_defaults.yaml')
29
-
30
- # Allow environment variables to override the settings we just read in. These
31
- # variables are called out specifically because they are likely to require
32
- # overriding in at least some cases.
33
- #
34
- @build.sign_tar = boolean_value(ENV['SIGN_TAR']) if ENV['SIGN_TAR']
35
- @build.build_gem = boolean_value(ENV['GEM']) if ENV['GEM']
36
- @build.build_dmg = boolean_value(ENV['DMG']) if ENV['DMG']
37
- @build.build_ips = boolean_value(ENV['IPS']) if ENV['IPS']
38
- @build.build_doc = boolean_value(ENV['DOC']) if ENV['DOC']
39
- @build.build_pe = boolean_value(ENV['PE_BUILD']) if ENV['PE_BUILD']
40
- @build.debug = boolean_value(ENV['DEBUG']) if ENV['DEBUG']
41
- @build.default_cow = ENV['COW'] if ENV['COW']
42
- @build.cows = ENV['COW'] if ENV['COW']
43
- @build.pbuild_conf = ENV['PBUILDCONF'] if ENV['PBUILDCONF']
44
- @build.packager = ENV['PACKAGER'] if ENV['PACKAGER']
45
- @build.default_mock = ENV['MOCK'] if ENV['MOCK']
46
- @build.final_mocks = ENV['MOCK'] if ENV['MOCK']
47
- @build.rc_mocks = ENV['MOCK'] if ENV['MOCK']
48
- @build.gpg_name = ENV['GPG_NAME'] if ENV['GPG_NAME']
49
- @build.gpg_key = ENV['GPG_KEY'] if ENV['GPG_KEY']
50
- @build.certificate_pem = ENV['CERT_PEM'] if ENV['CERT_PEM']
51
- @build.privatekey_pem = ENV['PRIVATE_PEM'] if ENV['PRIVATE_PEM']
52
- @build.yum_host = ENV['YUM_HOST'] if ENV['YUM_HOST']
53
- @build.yum_repo_path = ENV['YUM_REPO'] if ENV['YUM_REPO']
54
- @build.apt_host = ENV['APT_HOST'] if ENV['APT_HOST']
55
- @build.apt_repo_path = ENV['APT_REPO'] if ENV['APT_REPO']
56
- @build.pe_version = ENV['PE_VER'] if ENV['PE_VER']
57
29
  end
58
30
 
31
+ # Allow environment variables to override the settings we just read in. These
32
+ # variables are called out specifically because they are likely to require
33
+ # overriding in at least some cases.
34
+ #
35
+ @build.sign_tar = boolean_value(ENV['SIGN_TAR']) if ENV['SIGN_TAR']
36
+ @build.build_gem = boolean_value(ENV['GEM']) if ENV['GEM']
37
+ @build.build_dmg = boolean_value(ENV['DMG']) if ENV['DMG']
38
+ @build.build_ips = boolean_value(ENV['IPS']) if ENV['IPS']
39
+ @build.build_doc = boolean_value(ENV['DOC']) if ENV['DOC']
40
+ @build.build_pe = boolean_value(ENV['PE_BUILD']) if ENV['PE_BUILD']
41
+ @build.debug = boolean_value(ENV['DEBUG']) if ENV['DEBUG']
42
+ @build.default_cow = ENV['COW'] if ENV['COW']
43
+ @build.cows = ENV['COW'] if ENV['COW']
44
+ @build.pbuild_conf = ENV['PBUILDCONF'] if ENV['PBUILDCONF']
45
+ @build.packager = ENV['PACKAGER'] if ENV['PACKAGER']
46
+ @build.default_mock = ENV['MOCK'] if ENV['MOCK']
47
+ @build.final_mocks = ENV['MOCK'] if ENV['MOCK']
48
+ @build.rc_mocks = ENV['MOCK'] if ENV['MOCK']
49
+ @build.gpg_name = ENV['GPG_NAME'] if ENV['GPG_NAME']
50
+ @build.gpg_key = ENV['GPG_KEY'] if ENV['GPG_KEY']
51
+ @build.certificate_pem = ENV['CERT_PEM'] if ENV['CERT_PEM']
52
+ @build.privatekey_pem = ENV['PRIVATE_PEM'] if ENV['PRIVATE_PEM']
53
+ @build.yum_host = ENV['YUM_HOST'] if ENV['YUM_HOST']
54
+ @build.yum_repo_path = ENV['YUM_REPO'] if ENV['YUM_REPO']
55
+ @build.apt_host = ENV['APT_HOST'] if ENV['APT_HOST']
56
+ @build.apt_repo_path = ENV['APT_REPO'] if ENV['APT_REPO']
57
+ @build.pe_version = ENV['PE_VER'] if ENV['PE_VER']
58
+ @build.notify = ENV['NOTIFY'] if ENV['NOTIFY']
59
+
59
60
  ##
60
61
  # These parameters are either generated dynamically by the project, or aren't
61
62
  # sufficiently generic/multi-purpose enough to justify being in
@@ -74,6 +75,7 @@ end
74
75
  @build.random_mockroot = ENV['RANDOM_MOCKROOT'] ? boolean_value(ENV['RANDOM_MOCKROOT']) : true
75
76
  @keychain_loaded ||= FALSE
76
77
  @build_root ||= Dir.pwd
78
+ @build.build_date ||= timestamp('-')
77
79
  ##
78
80
  # For backwards compatibilty, we set build:@name to build:@project. @name was
79
81
  # renamed to @project in an effort to align the variable names with what has
@@ -106,3 +108,13 @@ end
106
108
  self.instance_variable_set("@#{param}", value)
107
109
  end
108
110
 
111
+ ##
112
+ # Issue a deprecation warning if the packaging repo wasn't loaded by the loader
113
+ unless @using_loader
114
+ warn "
115
+ DEPRECATED: The packaging repo tasks are now loaded by 'packaging.rake'.
116
+ Please update your Rakefile or loading task to load
117
+ 'ext/packaging/packaging.rake' instead of 'ext/packaging/tasks/*' (25-Jun-2013).
118
+ "
119
+ end
120
+
@@ -9,12 +9,10 @@
9
9
  #
10
10
 
11
11
  # Path to Binaries (Constants)
12
- TAR = '/usr/bin/tar'
13
12
  CP = '/bin/cp'
14
13
  INSTALL = '/usr/bin/install'
15
14
  DITTO = '/usr/bin/ditto'
16
- PACKAGEMAKER = '/Developer/usr/bin/packagemaker'
17
- SED = '/usr/bin/sed'
15
+ PKGBUILD = '/usr/bin/pkgbuild'
18
16
 
19
17
  # Setup task to populate all the variables
20
18
  task :setup do
@@ -22,8 +20,7 @@ task :setup do
22
20
  begin
23
21
  @source_files = data_from_yaml('ext/osx/file_mapping.yaml')
24
22
  rescue
25
- STDERR.puts "Could not load Apple file mappings from 'ext/osx/file_mapping.yaml'"
26
- exit 1
23
+ fail "Could not load Apple file mappings from 'ext/osx/file_mapping.yaml'"
27
24
  end
28
25
  @package_name = @build.project
29
26
  @title = "#{@build.project}-#{@build.version}"
@@ -60,11 +57,11 @@ def make_directory_tree
60
57
  end
61
58
 
62
59
  if File.exists?('ext/osx/postflight.erb')
63
- erb 'ext/osx/postflight.erb', "#{@working_tree["scripts"]}/postflight"
60
+ erb 'ext/osx/postflight.erb', "#{@working_tree["scripts"]}/postinstall"
64
61
  end
65
62
 
66
63
  if File.exists?('ext/osx/preflight.erb')
67
- erb 'ext/osx/preflight.erb', "#{@working_tree["scripts"]}/preflight"
64
+ erb 'ext/osx/preflight.erb', "#{@working_tree["scripts"]}/preinstall"
68
65
  end
69
66
 
70
67
  if File.exists?('ext/osx/prototype.plist.erb')
@@ -89,20 +86,16 @@ def build_dmg
89
86
  dmg_format = "#{dmg_format_code} #{dmg_format_option}"
90
87
  dmg_file = "#{@title}.dmg"
91
88
  package_file = "#{@title}.pkg"
92
- pm_extra_args = '--verbose --no-recommend --no-relocate'
93
89
  package_target_os = '10.4'
94
90
 
95
91
  # Build .pkg file
96
- system("sudo #{PACKAGEMAKER} --root #{@working_tree['working']} \
97
- --id #{@reverse_domain} \
98
- --filter DS_Store \
99
- --target #{package_target_os} \
100
- --title #{@title} \
101
- --info #{@scratch}/prototype.plist \
92
+ system("sudo #{PKGBUILD} --root #{@working_tree['working']} \
102
93
  --scripts #{@working_tree['scripts']} \
103
- --resources #{@working_tree['resources']} \
94
+ --identifier #{@reverse_domain} \
104
95
  --version #{@version} \
105
- #{pm_extra_args} --out #{@working_tree['payload']}/#{package_file}")
96
+ --install-location / \
97
+ --ownership preserve \
98
+ #{@working_tree['payload']}/#{package_file}")
106
99
 
107
100
  # Build .dmg file
108
101
  system("sudo hdiutil create -volname #{@title} \
@@ -152,17 +145,17 @@ def pack_source
152
145
  end
153
146
  end
154
147
 
155
- # Setup a preflight script and replace variables in the files with
148
+ # Setup a preinstall script and replace variables in the files with
156
149
  # the correct paths.
157
- if File.exists?("#{@working_tree['scripts']}/preflight")
158
- chmod(0644, "#{@working_tree['scripts']}/preflight")
159
- sh "sudo chown root:wheel #{@working_tree['scripts']}/preflight"
150
+ if File.exists?("#{@working_tree['scripts']}/preinstall")
151
+ chmod(0755, "#{@working_tree['scripts']}/preinstall")
152
+ sh "sudo chown root:wheel #{@working_tree['scripts']}/preinstall"
160
153
  end
161
154
 
162
- # Setup a postflight from from the erb created earlier
163
- if File.exists?("#{@working_tree['scripts']}/postflight")
164
- chmod(0755, "#{@working_tree['scripts']}/postflight")
165
- sh "sudo chown root:wheel #{@working_tree['scripts']}/postflight"
155
+ # Setup a postinstall from from the erb created earlier
156
+ if File.exists?("#{@working_tree['scripts']}/postinstall")
157
+ chmod(0755, "#{@working_tree['scripts']}/postinstall")
158
+ sh "sudo chown root:wheel #{@working_tree['scripts']}/postinstall"
166
159
  end
167
160
 
168
161
  # Do a run through first setting the specified permissions then
@@ -234,9 +227,9 @@ if @build.build_dmg
234
227
  desc "Task for building an Apple Package"
235
228
  task :apple => [:setup] do
236
229
  bench = Benchmark.realtime do
237
- # Test for Packagemaker binary
238
- raise "Packagemaker must be installed. Please install XCode Tools" unless \
239
- File.exists?(PACKAGEMAKER)
230
+ # Test for pkgbuild binary
231
+ fail "pkgbuild must be installed." unless \
232
+ File.exists?(PKGBUILD)
240
233
 
241
234
  make_directory_tree
242
235
  pack_source
@@ -10,6 +10,7 @@ module Build
10
10
  :apt_repo_url,
11
11
  :author,
12
12
  :benchmark,
13
+ :build_date,
13
14
  :build_defaults,
14
15
  :build_dmg,
15
16
  :build_doc,
@@ -64,6 +65,7 @@ module Build
64
65
  :jenkins_repo_path,
65
66
  :metrics,
66
67
  :name,
68
+ :notify,
67
69
  :project,
68
70
  :origversion,
69
71
  :osx_build_host,
@@ -84,10 +86,6 @@ module Build
84
86
  :rpmversion,
85
87
  :ref,
86
88
  :sign_tar,
87
- :sles_build_host,
88
- :sles_repo_path,
89
- :sles_repo_host,
90
- :sles_arch_repos,
91
89
  :summary,
92
90
  :tar_excludes,
93
91
  :tar_host,
@@ -150,10 +148,7 @@ module Build
150
148
  #
151
149
  def params_to_yaml(output_dir=nil)
152
150
  dir = output_dir.nil? ? get_temp : output_dir
153
- unless File.writable?(dir)
154
- warn "#{dir} does not exist or is not writable, skipping build params write. Exiting.."
155
- exit 1
156
- end
151
+ File.writable?(dir) or fail "#{dir} does not exist or is not writable, skipping build params write. Exiting.."
157
152
  params_file = File.join(dir, "#{self.ref}.yaml")
158
153
  File.open(params_file, 'w') do |f|
159
154
  f.puts params.to_yaml
@@ -1,22 +1,16 @@
1
1
  def pdebuild args
2
2
  results_dir = args[:work_dir]
3
3
  cow = args[:cow]
4
- devel_repo = args[:devel]
5
4
  set_cow_envs(cow)
6
- update_cow(cow, devel_repo)
7
- begin
8
- sh "pdebuild --configfile #{@build.pbuild_conf} \
9
- --buildresult #{results_dir} \
10
- --pbuilder cowbuilder -- \
11
- --basepath /var/cache/pbuilder/#{cow}/"
12
- rescue Exception => e
13
- puts e
14
- handle_method_failure('pdebuild', args)
15
- end
5
+ update_cow(cow)
6
+ sh "pdebuild --configfile #{@build.pbuild_conf} \
7
+ --buildresult #{results_dir} \
8
+ --pbuilder cowbuilder -- \
9
+ --basepath /var/cache/pbuilder/#{cow}/"
10
+ $?.success? or fail "Failed to build deb with #{cow}!"
16
11
  end
17
12
 
18
- def update_cow(cow, is_rc = nil)
19
- ENV['FOSS_DEVEL'] = is_rc.to_s
13
+ def update_cow(cow)
20
14
  ENV['PATH'] = "/usr/sbin:#{ENV['PATH']}"
21
15
  set_cow_envs(cow)
22
16
  retry_on_fail(:times => 3) do
@@ -29,8 +23,7 @@ def debuild args
29
23
  begin
30
24
  sh "debuild --no-lintian -uc -us"
31
25
  rescue
32
- STDERR.puts "Something went wrong. Hopefully the backscroll or #{results_dir}/#{@build.project}_#{@build.debversion}.build file has a clue."
33
- exit 1
26
+ fail "Something went wrong. Hopefully the backscroll or #{results_dir}/#{@build.project}_#{@build.debversion}.build file has a clue."
34
27
  end
35
28
  end
36
29
 
@@ -44,17 +37,16 @@ task :prep_deb_tars, :work_dir do |t,args|
44
37
  end
45
38
  end
46
39
 
47
- task :build_deb, :deb_command, :cow, :devel do |t,args|
40
+ task :build_deb, :deb_command, :cow do |t,args|
48
41
  bench = Benchmark.realtime do
49
42
  deb_build = args.deb_command
50
43
  cow = args.cow
51
- devel = args.devel
52
44
  work_dir = get_temp
53
45
  subdir = 'pe/' if @build.build_pe
54
46
  dest_dir = "#{@build_root}/pkg/#{subdir}deb/#{cow.split('-')[1] unless cow.nil?}"
55
47
  check_tool(deb_build)
56
48
  mkdir_p dest_dir
57
- deb_args = { :work_dir => work_dir, :cow => cow, :devel => devel}
49
+ deb_args = { :work_dir => work_dir, :cow => cow}
58
50
  Rake::Task[:prep_deb_tars].reenable
59
51
  Rake::Task[:prep_deb_tars].invoke(work_dir)
60
52
  cd "#{work_dir}/#{@build.project}-#{@build.debversion}" do
@@ -80,13 +72,7 @@ namespace :pl do
80
72
  desc "Create a deb from this repo using the default cow #{@build.default_cow}."
81
73
  task :deb => "package:tar" do
82
74
  check_var('PE_VER', @build.pe_version) if @build.build_pe
83
- Rake::Task[:build_deb].invoke('pdebuild', @build.default_cow, is_rc?)
84
- post_metrics if @build.benchmark
85
- end
86
-
87
- task :deb_rc => "package:tar" do
88
- deprecate("pl:deb_rc", "pl:deb")
89
- Rake::Task[:build_deb].invoke('pdebuild', @build.default_cow, 'true')
75
+ Rake::Task[:build_deb].invoke('pdebuild', @build.default_cow)
90
76
  post_metrics if @build.benchmark
91
77
  end
92
78
 
@@ -96,18 +82,8 @@ namespace :pl do
96
82
  @build.cows.split(' ').each do |cow|
97
83
  Rake::Task["package:tar"].invoke
98
84
  Rake::Task[:build_deb].reenable
99
- Rake::Task[:build_deb].invoke('pdebuild', cow, is_rc?)
85
+ Rake::Task[:build_deb].invoke('pdebuild', cow)
100
86
  end
101
87
  post_metrics if @build.benchmark
102
88
  end
103
-
104
- task :deb_all_rc do
105
- deprecate("pl:deb_all_rc", "pl:deb_all")
106
- @build.cows.split(' ').each do |cow|
107
- Rake::Task["package:tar"].invoke
108
- Rake::Task[:build_deb].reenable
109
- Rake::Task[:build_deb].invoke('pdebuild', cow, 'true')
110
- end
111
- end
112
- post_metrics if @build.benchmark
113
89
  end
@@ -18,14 +18,14 @@ namespace :pl do
18
18
  artifact_directory = File.join(@build.jenkins_repo_path, @build.project, @build.ref)
19
19
 
20
20
  cmd = 'echo " Checking for deb build artifacts. Will exit if not found.." ; '
21
- cmd << "[ -d #{artifact_directory}/artifacts/#{prefix}deb ] || exit 0 ; "
21
+ cmd << "[ -d #{artifact_directory}/artifacts/#{prefix}deb ] || exit 1 ; "
22
22
  # Descend into the deb directory and obtain the list of distributions
23
23
  # we'll be building repos for
24
24
  cmd << "pushd #{artifact_directory}/artifacts/#{prefix}deb && dists=$(ls) && popd; "
25
25
  # We do one more check here to make sure we actually have distributions
26
26
  # to build for. If deb is empty we want to just exit.
27
27
  #
28
- cmd << '[ -n "$dists" ] || exit 0 ; '
28
+ cmd << '[ -n "$dists" ] || exit 1 ; '
29
29
  cmd << "pushd #{artifact_directory} ; "
30
30
 
31
31
  cmd << 'echo "Checking for running repo creation. Will wait if detected." ; '
@@ -52,17 +52,19 @@ Description: Apt repository for acceptance testing" >> conf/distributions ; '
52
52
  cmd << "$reprepro includedeb $dist ../../#{prefix}deb/$dist/*.deb ; popd ; done ; "
53
53
  cmd << "popd ; popd "
54
54
 
55
- remote_ssh_cmd(@build.distribution_server, cmd)
55
+ begin
56
+ remote_ssh_cmd(@build.distribution_server, cmd)
57
+ # Now that we've created our package repositories, we can generate repo
58
+ # configurations for use with downstream jobs, acceptance clients, etc.
59
+ Rake::Task["pl:jenkins:generate_deb_repo_configs"].execute
56
60
 
57
- # Always remove the lock file, even if we've failed
58
- remote_ssh_cmd(@build.distribution_server, "rm -f #{artifact_directory}/.lock")
59
-
60
- # Now that we've created our package repositories, we can generate repo
61
- # configurations for use with downstream jobs, acceptance clients, etc.
62
- Rake::Task["pl:jenkins:generate_deb_repo_configs"].execute
61
+ # Now that we've created the repo configs, we can ship them
62
+ Rake::Task["pl:jenkins:ship_repo_configs"].execute
63
+ ensure
64
+ # Always remove the lock file, even if we've failed
65
+ remote_ssh_cmd(@build.distribution_server, "rm -f #{artifact_directory}/.lock")
66
+ end
63
67
 
64
- # Now that we've created the repo configs, we can ship them
65
- Rake::Task["pl:jenkins:ship_repo_configs"].execute
66
68
  end
67
69
 
68
70
  # Generate apt configuration files that point to the repositories created
@@ -83,20 +85,15 @@ Description: Apt repository for acceptance testing" >> conf/distributions ; '
83
85
  # We use wget to obtain a directory listing of what are presumably our deb repos
84
86
  #
85
87
  repo_urls = []
86
- if wget = find_tool("wget")
87
- # First test if the directory even exists
88
- #
89
- wget_results = %x{#{wget} --spider -r -l 1 --no-parent #{base_url} 2>&1}
90
- if $?.success?
91
- # We want to exclude index and robots files and only include the http: prefixed elements
92
- repo_urls = wget_results.split.uniq.reject{|x| x=~ /\?|index|robots/}.select{|x| x =~ /http:/}.map{|x| x.chomp('/')}
93
- else
94
- puts "No debian repos available for #{@build.project} at #{@build.ref}."
95
- exit 0
96
- end
88
+ wget = find_tool("wget") or fail "Could not find `wget` tool. This is needed for composing the debian repo configurations. Install `wget` and try again."
89
+ # First test if the directory even exists
90
+ #
91
+ wget_results = %x{#{wget} --spider -r -l 1 --no-parent #{base_url} 2>&1}
92
+ if $?.success?
93
+ # We want to exclude index and robots files and only include the http: prefixed elements
94
+ repo_urls = wget_results.split.uniq.reject{|x| x=~ /\?|index|robots/}.select{|x| x =~ /http:/}.map{|x| x.chomp('/')}
97
95
  else
98
- warn "Could not find `wget` tool. This is needed for composing the debian repo configurations. Install `wget` and try again."
99
- exit 0
96
+ fail "No debian repos available for #{@build.project} at #{@build.ref}."
100
97
  end
101
98
 
102
99
  # Create apt sources.list files that can be added to hosts for installing
@@ -118,18 +115,13 @@ Description: Apt repository for acceptance testing" >> conf/distributions ; '
118
115
 
119
116
  desc "Retrieve debian apt repository configs for this sha"
120
117
  task :deb_repo_configs => "pl:fetch" do
121
- if wget = find_tool("wget")
122
- mkdir_p "pkg/repo_configs"
123
- config_url = "#{@build.builds_server}/#{@build.project}/#{@build.ref}/repo_configs/deb/"
124
- begin
125
- sh "#{wget} -r -np -nH --cut-dirs 3 -P pkg/repo_configs --reject 'index*' #{config_url}"
126
- rescue
127
- warn "Couldn't retrieve deb apt repo configs. See preceding http response for more info."
128
- exit 1
129
- end
130
- else
131
- warn "Could not find `wget` tool! wget is required to download the repository configs."
132
- exit 1
118
+ wget = find_tool("wget") or fail "Could not find `wget` tool. This is needed for composing the debian repo configurations. Install `wget` and try again."
119
+ mkdir_p "pkg/repo_configs"
120
+ config_url = "#{@build.builds_server}/#{@build.project}/#{@build.ref}/repo_configs/deb/"
121
+ begin
122
+ sh "#{wget} -r -np -nH --cut-dirs 3 -P pkg/repo_configs --reject 'index*' #{config_url}"
123
+ rescue
124
+ fail "Couldn't retrieve deb apt repo configs. See preceding http response for more info."
133
125
  end
134
126
  end
135
127
  end
@@ -42,8 +42,7 @@ namespace :pl do
42
42
  invoke_task("pl:load_extras", tempdir)
43
43
  rm_rf(tempdir)
44
44
  rescue
45
- STDERR.puts "There was an error fetching the builder extras data."
46
- exit 1
45
+ fail "There was an error fetching the builder extras data."
47
46
  end
48
47
  end
49
48
  end
@@ -91,18 +91,10 @@ namespace :pl do
91
91
  #
92
92
  task :post_build, :build_task do |t, args|
93
93
  # Check for a dirty tree before allowing a remote build that is doomed to unexpected results
94
- if source_dirty?
95
- warn "The source tree is dirty, e.g. there are uncommited changes. Please commit/discard changes and try again."
96
- exit 1
97
- end
94
+ fail_on_dirty_source
98
95
 
99
96
  # We use JSON for parsing the json part of the submission to JSON
100
- begin
101
- require 'json'
102
- rescue LoadError
103
- warn "Couldn't require 'json'. JSON is required for sanely generating the string we curl to Jenkins."
104
- exit 1
105
- end
97
+ require_library_or_fail 'json'
106
98
 
107
99
  build_task = args.build_task
108
100
  ##
@@ -121,7 +113,6 @@ namespace :pl do
121
113
  when /dmg|apple/ then "dmg"
122
114
  when /gem/ then "gem"
123
115
  when /tar/ then "tar"
124
- when /sles/ then "sles"
125
116
  else raise "Could not determine build type for #{build_task}"
126
117
  end
127
118
  #
@@ -172,16 +163,18 @@ namespace :pl do
172
163
 
173
164
  # Call out to the curl_form_data utility method in 00_utils.rake
174
165
  #
175
- if curl_form_data(trigger_url, args)
176
- puts "Build submitted. To view your build results, go to #{job_url}"
177
- puts "Your packages will be available at #{@build.distribution_server}:#{@build.jenkins_repo_path}/#{@build.project}/#{@build.ref}"
178
- else
179
- warn "An error occurred submitting the job to jenkins. Take a look at the preceding http response for more info."
166
+ begin
167
+ if curl_form_data(trigger_url, args)
168
+ puts "Build submitted. To view your build results, go to #{job_url}"
169
+ puts "Your packages will be available at #{@build.distribution_server}:#{@build.jenkins_repo_path}/#{@build.project}/#{@build.ref}"
170
+ else
171
+ fail "An error occurred submitting the job to jenkins. Take a look at the preceding http response for more info."
172
+ end
173
+ ensure
174
+ # Clean up after ourselves
175
+ rm bundle
176
+ rm properties
180
177
  end
181
-
182
- # Clean up after ourselves
183
- rm bundle
184
- rm properties
185
178
  end
186
179
  end
187
180
  end
@@ -217,7 +210,7 @@ namespace :pl do
217
210
  end
218
211
 
219
212
  # This does the mocks in parallel
220
- desc "Queue pl:mock-all on jenkins builder"
213
+ desc "Queue pl:mock_all on jenkins builder"
221
214
  task :mock_all => "pl:fetch" do
222
215
  @build.final_mocks.split(' ').each do |mock|
223
216
  @build.default_mock = mock
@@ -226,17 +219,6 @@ namespace :pl do
226
219
  end
227
220
  end
228
221
 
229
- desc "Jenkins UBER build: build all the things with jenkins"
230
- task :uber_build do
231
- uber_tasks = ["jenkins:deb_all", "jenkins:mock_all", "jenkins:tar"]
232
- uber_tasks << "jenkins:dmg" if @build.build_dmg
233
- uber_tasks << "jenkins:gem" if @build.build_gem
234
- uber_tasks.map { |t| "pl:#{t}" }.each do |t|
235
- invoke_task(t)
236
- sleep 5
237
- end
238
- end
239
-
240
222
  desc "Retrieve packages built by jenkins, sign, and ship all!"
241
223
  task :uber_ship => "pl:fetch" do
242
224
  uber_tasks = ["jenkins:retrieve", "jenkins:sign_all", "uber_ship", "remote:freight", "remote:update_yum_repo" ]
@@ -247,21 +229,16 @@ namespace :pl do
247
229
  end
248
230
 
249
231
  ##
250
- # If this is a PE project, we want PE tasks as well. However, because the
251
- # PE tasks use :remote as their default (e.g., not namespaced under remote)
252
- # we have to explicily use the "local" tasks, since these will be local
253
- # builds on jenkins agents. Also, we support building on SLES for PE, so we
254
- # add a sles task.
232
+ # If this is a PE project, we want PE tasks as well.
255
233
  #
256
234
  if @build.build_pe
257
235
  namespace :pe do
258
236
  namespace :jenkins do
259
- tasks << "sles"
260
237
  tasks.each do |build_task|
261
238
  desc "Queue pe:#{build_task} build on jenkins builder"
262
239
  task build_task => "pl:fetch" do
263
240
  check_var("PE_VER", @build.pe_version)
264
- invoke_task("pl:jenkins:post_build", "pe:local_#{build_task}")
241
+ invoke_task("pl:jenkins:post_build", "pe:#{build_task}")
265
242
  end
266
243
  end
267
244
 
@@ -274,26 +251,17 @@ if @build.build_pe
274
251
  check_var("PE_VER", @build.pe_version)
275
252
  @build.cows.split(' ').each do |cow|
276
253
  @build.default_cow = cow
277
- invoke_task("pl:jenkins:post_build", "pe:local_deb")
254
+ invoke_task("pl:jenkins:post_build", "pe:deb")
278
255
  sleep 5
279
256
  end
280
257
  end
281
258
 
282
259
  # This does the mocks in parallel
283
- desc "Queue pe:mock-all on jenkins builder"
260
+ desc "Queue pe:mock_all on jenkins builder"
284
261
  task :mock_all => "pl:fetch" do
285
262
  @build.final_mocks.split(' ').each do |mock|
286
263
  @build.default_mock = mock
287
- invoke_task("pl:jenkins:post_build", "pe:local_mock")
288
- sleep 5
289
- end
290
- end
291
-
292
- desc "Queue builds of all PE packages for this project in Jenkins"
293
- task :uber_build => "pl:fetch" do
294
- check_var("PE_VER", @build.pe_version)
295
- ["tar", "deb_all", "mock_all", "sles"].each do |task|
296
- invoke_task("pe:jenkins:#{task}")
264
+ invoke_task("pl:jenkins:post_build", "pe:mock")
297
265
  sleep 5
298
266
  end
299
267
  end
@@ -333,15 +301,13 @@ namespace :pl do
333
301
  namespace :jenkins do
334
302
  desc "Trigger a jenkins uri with SHA of HEAD as a string param, requires \"URI\""
335
303
  task :post, :uri do |t, args|
336
- uri = args.uri || ENV['URI']
337
- raise "pl:jenkins:post requires a URI, either via URI= or pl:jenkin:post[URI]" if uri.nil?
304
+ uri = (args.uri or ENV['URI']) or fail "pl:jenkins:post requires a URI, either via URI= or pl:jenkin:post[URI]"
338
305
 
339
306
  # We use JSON for parsing the json part of the submission.
340
307
  begin
341
308
  require 'json'
342
309
  rescue LoadError
343
- warn "Couldn't require 'json'. JSON is required for sanely generating the string we curl to Jenkins."
344
- exit 1
310
+ fail "Couldn't require 'json'. JSON is required for sanely generating the string we curl to Jenkins."
345
311
  end
346
312
 
347
313
  # Assemble the JSON string for the JSON parameter
@@ -357,7 +323,7 @@ namespace :pl do
357
323
  if curl_form_data(uri, args)
358
324
  puts "Job triggered at #{uri}."
359
325
  else
360
- puts "An error occurred attempting to trigger the job at #{uri}. Please see the preceding http response for more info."
326
+ fail "An error occurred attempting to trigger the job at #{uri}. Please see the preceding http response for more info."
361
327
  end
362
328
  end
363
329
  end