packaging 0.103.0 → 0.104.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5f86a8438548dce924f97c559885a4b5d9c94bc88449717e8781d1639683efb4
4
- data.tar.gz: 59ef7b771215acc69595e42f705ceca186f2467141ebda0a996ea8d52fc4945c
3
+ metadata.gz: '0489f147bdc92caf1f1047d3a0cfac453b125222e18dc9547b78c9aeb1e69122'
4
+ data.tar.gz: 298a35804a42f8eaaccaf1d740b0cb50ed3bc19a53aeb4c1b5689ec00a7bc747
5
5
  SHA512:
6
- metadata.gz: b2609ced8514dce238e41345e024a1740f1dd4e62441a6f6be9ed91645fbc8db90151f8e367c9d86bc0e5907621589981f94a6cbeaea60e4ad97496b8f2c19ab
7
- data.tar.gz: 1afba4a382a78490faee8e2f4b9974ea9ef80a9eec1a23c4733aaef86b8fe360902f440aea43daa307cf7eb84c3705db7d885ab7af2df948ebc5de1933956845
6
+ metadata.gz: a331668242a0d7c1ce4eb1190bdb5cf6decc557a4082e6ce833298e39bfb4b3da82ee78e6e7a8d6e4b429fcfab8cc34e051d7183a552dc985f90ff38bb200ad6
7
+ data.tar.gz: '084c6c13bec16b7714d16861e906e030f3abd0c6a576545a984cfaf72b9791bf4935f21d13d4817d652d755d91f1c65a292a30e4ff6d7af0296f5760a44fe9a8'
@@ -80,7 +80,7 @@ module Pkg
80
80
 
81
81
  dir = "/opt/jenkins-builds/#{self.project}/#{self.ref}"
82
82
  cmd = "if [ -s \"#{dir}/artifacts\" ]; then cd #{dir};"\
83
- "find ./artifacts/ -mindepth 2 -type f; fi"
83
+ "find ./artifacts -mindepth 2 -type f; fi"
84
84
  artifacts, _ = Pkg::Util::Net.remote_execute(
85
85
  self.builds_server,
86
86
  cmd,
@@ -95,6 +95,7 @@ module Pkg
95
95
  # the correct place. For 5.x and 6.x release streams the f prefix
96
96
  # has been removed and so tag will equal original_tag
97
97
  original_tag = Pkg::Paths.tag_from_artifact_path(artifact)
98
+ fail "Error: unrecognized artifact \"#{artifact}\"" if original_tag.nil?
98
99
 
99
100
  # Remove the f-prefix from the fedora platform tag keys so that
100
101
  # beaker can rely on consistent keys once we rip out the f for good
@@ -203,15 +204,18 @@ module Pkg
203
204
  # string. Accept an argument for the write target file. If not specified,
204
205
  # the name of the params file is the current git commit sha or tag.
205
206
  #
206
- def config_to_yaml(target = nil)
207
- file = "#{self.ref}.yaml"
208
- target = target.nil? ? File.join(Pkg::Util::File.mktemp, "#{self.ref}.yaml") : File.join(target, file)
209
- Pkg::Util::File.file_writable?(File.dirname(target), :required => true)
210
- File.open(target, 'w') do |f|
207
+ def config_to_yaml(destination_directory = nil)
208
+ destination_directory = Pkg::Util::File.mktemp if destination_directory.nil?
209
+ config_yaml_file_name = "#{self.ref}.yaml"
210
+
211
+ config_yaml_path = File.join(destination_directory, config_yaml_file_name)
212
+
213
+ Pkg::Util::File.file_writable?(File.dirname(config_yaml_path), :required => true)
214
+ File.open(config_yaml_path, 'w') do |f|
211
215
  f.puts self.config_to_hash.to_yaml
212
216
  end
213
- puts target
214
- target
217
+ puts config_yaml_path
218
+ return config_yaml_path
215
219
  end
216
220
 
217
221
  ##
@@ -36,7 +36,8 @@ module Pkg::Repo
36
36
  Dir.chdir(File.join('pkg', local_target)) do
37
37
  puts "Info: Archiving #{repo_location} as #{archive_name}"
38
38
  target_tarball = File.join('repos', "#{archive_name}.tar.gz")
39
- tar_command = "#{tar} --owner=0 --group=0 --create --gzip --file #{target_tarball} #{repo_location}"
39
+ tar_command = %W[#{tar} --owner=0 --group=0 --create --gzip
40
+ --file #{target_tarball} #{repo_location}].join(' ')
40
41
  stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
41
42
  return stdout
42
43
  end
@@ -62,12 +63,12 @@ module Pkg::Repo
62
63
  next
63
64
  end
64
65
 
65
- tar_action = "--create"
66
- if File.exist?(all_repos_tarball_name)
67
- tar_action = "--update"
68
- end
66
+ tar_action = '--create'
67
+ tar_action = '--update' if File.exist?(all_repos_tarball_name)
68
+
69
+ tar_command = %W[#{tar} --owner=0 --group=0 #{tar_action}
70
+ --file #{all_repos_tarball_name} #{repo_tarball_path}].join(' ')
69
71
 
70
- tar_command = "#{tar} --owner=0 --group=0 #{tar_action} --file #{all_repos_tarball_name} #{repo_tarball_path}"
71
72
  stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
72
73
  puts stdout
73
74
  end
@@ -117,7 +118,8 @@ module Pkg::Repo
117
118
  )
118
119
  return stdout.split
119
120
  rescue => e
120
- fail "Error: Could not retrieve directories that contain #{pkg_ext} packages in #{Pkg::Config.distribution_server}:#{artifact_directory}"
121
+ fail "Error: Could not retrieve directories that contain #{pkg_ext} " \
122
+ "packages in #{Pkg::Config.distribution_server}:#{artifact_directory}: #{e}"
121
123
  end
122
124
 
123
125
  def populate_repo_directory(artifact_parent_directory)
@@ -126,7 +128,8 @@ module Pkg::Repo
126
128
  cmd << 'rsync --archive --verbose --one-file-system --ignore-existing artifacts/ repos/ '
127
129
  Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, cmd)
128
130
  rescue => e
129
- fail "Error: Could not populate repos directory in #{Pkg::Config.distribution_server}:#{artifact_parent_directory}"
131
+ fail "Error: Could not populate repos directory in " \
132
+ "#{Pkg::Config.distribution_server}:#{artifact_parent_directory}: #{e}"
130
133
  end
131
134
 
132
135
  def argument_required?(argument_name, repo_command)
@@ -134,12 +137,12 @@ module Pkg::Repo
134
137
  end
135
138
 
136
139
  def update_repo(remote_host, command, options = {})
137
- fail_message = "Error: Missing required argument '%s', update your build_defaults?"
140
+ fail_message = "Error: Missing required argument '%s', perhaps update build_defaults?"
138
141
  [:repo_name, :repo_path, :repo_host, :repo_url].each do |option|
139
142
  fail fail_message % option.to_s if argument_required?(option.to_s, command) && !options[option]
140
143
  end
141
144
 
142
- whitelist = {
145
+ repo_configuration = {
143
146
  __REPO_NAME__: options[:repo_name],
144
147
  __REPO_PATH__: options[:repo_path],
145
148
  __REPO_HOST__: options[:repo_host],
@@ -149,7 +152,7 @@ module Pkg::Repo
149
152
  }
150
153
  Pkg::Util::Net.remote_execute(
151
154
  remote_host,
152
- Pkg::Util::Misc.search_and_replace(command, whitelist))
155
+ Pkg::Util::Misc.search_and_replace(command, repo_configuration))
153
156
  end
154
157
  end
155
158
  end
@@ -0,0 +1,8 @@
1
+ # Utility methods for handling Apt staging server.
2
+
3
+ module Pkg::Util::AptStagingServer
4
+ def self.send_packages(pkg_directory, apt_component = 'stable')
5
+ %x(apt-stage-artifacts --component=#{apt_component} #{pkg_directory})
6
+ fail 'APT artifact staging failed.' unless $CHILD_STATUS.success?
7
+ end
8
+ end
@@ -0,0 +1,17 @@
1
+ # Utility methods for handling miscellaneous build metadata
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::BuildMetadata
6
+ class << self
7
+ def add_misc_json_files(target_directory)
8
+ misc_json_files = Dir.glob('ext/build_metadata*.json')
9
+ misc_json_files.each do |source_file|
10
+ target_file = File.join(
11
+ target_directory, "#{Pkg::Config.ref}.#{File.basename(source_file)}"
12
+ )
13
+ FileUtils.cp(source_file, target_file)
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,42 @@
1
+ # Utility methods for the older distribution server
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::DistributionServer
6
+ class << self
7
+ def send_packages(local_source_directory, remote_target_directory)
8
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
9
+ Pkg::Util::Net.remote_execute(
10
+ Pkg::Config.distribution_server,
11
+ "mkdir --mode=775 --parents #{remote_target_directory}"
12
+ )
13
+ Pkg::Util::Net.rsync_to(
14
+ "#{local_source_directory}/",
15
+ Pkg::Config.distribution_server, "#{remote_target_directory}/",
16
+ extra_flags: ['--ignore-existing', '--exclude repo_configs']
17
+ )
18
+ end
19
+
20
+ # In order to get a snapshot of what this build looked like at the time
21
+ # of shipping, we also generate and ship the params file
22
+ #
23
+ Pkg::Config.config_to_yaml(local_source_directory)
24
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
25
+ Pkg::Util::Net.rsync_to(
26
+ "#{local_source_directory}/#{Pkg::Config.ref}.yaml",
27
+ Pkg::Config.distribution_server, "#{remote_target_directory}/",
28
+ extra_flags: ["--exclude repo_configs"]
29
+ )
30
+ end
31
+
32
+ # If we just shipped a tagged version, we want to make it immutable
33
+ files = Dir.glob("#{local_source_directory}/**/*")
34
+ .select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }
35
+ .map { |f| "#{remote_target_directory}/#{f.sub(/^#{local_source_directory}\//, '')}" }
36
+
37
+ Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
38
+ Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
39
+ Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,26 @@
1
+ # Utility methods for handling ezbake
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::EZbake
6
+ class << self
7
+ def add_manifest(target_directory)
8
+ ezbake_manifest = File.join('ext', 'ezbake.manifest')
9
+ ezbake_yaml = File.join('ext', 'ezbake.manifest.yaml')
10
+
11
+ if File.exist?(ezbake_manifest)
12
+ FileUtils.cp(
13
+ ezbake_manifest,
14
+ File.join(target_directory, "#{Pkg::Config.ref}.ezbake.manifest")
15
+ )
16
+ end
17
+
18
+ if File.exists?(ezbake_yaml)
19
+ FileUtils.cp(
20
+ ezbake_yaml,
21
+ File.join(target_directory, "#{Pkg::Config.ref}.ezbake.manifest.yaml")
22
+ )
23
+ end
24
+ end
25
+ end
26
+ end
@@ -159,7 +159,7 @@ module Pkg::Util::Net
159
159
  raise(ArgumentError, "Cannot sync path '#{origin}' because both origin_host and target_host are nil. Perhaps you need to set TEAM=release ?") unless
160
160
  options[:origin_host] || options[:target_host]
161
161
 
162
- cmd = %W(
162
+ cmd = %W[
163
163
  #{options[:bin]}
164
164
  --recursive
165
165
  --hard-links
@@ -169,7 +169,7 @@ module Pkg::Util::Net
169
169
  --no-perms
170
170
  --no-owner
171
171
  --no-group
172
- ) + [*options[:extra_flags]]
172
+ ] + [*options[:extra_flags]]
173
173
 
174
174
  cmd << '--dry-run' if options[:dryrun]
175
175
  cmd << Pkg::Util.pseudo_uri(path: origin, host: options[:origin_host])
@@ -1,14 +1,20 @@
1
1
  # Module for shipping all packages to places
2
+
3
+ require 'English'
2
4
  require 'tmpdir'
5
+
3
6
  module Pkg::Util::Ship
4
7
  module_function
5
8
 
6
- def collect_packages(pkg_exts, excludes = []) # rubocop:disable Metrics/MethodLength
9
+ def collect_packages(pkg_exts, excludes = [])
7
10
  pkgs = pkg_exts.map { |ext| Dir.glob(ext) }.flatten
8
11
  return [] if pkgs.empty?
9
- excludes.each do |exclude|
10
- pkgs.delete_if { |p| p.match(exclude) }
11
- end if excludes
12
+
13
+ if excludes
14
+ excludes.each do |exclude|
15
+ pkgs.delete_if { |p| p.match(exclude) }
16
+ end
17
+ end
12
18
  if pkgs.empty?
13
19
  $stdout.puts "No packages with (#{pkg_exts.join(', ')}) extensions found staged in 'pkg'"
14
20
  $stdout.puts "Maybe your excludes argument (#{excludes}) is too restrictive?"
@@ -59,13 +65,13 @@ module Pkg::Util::Ship
59
65
  # false (most paths will be platform dependent), but set to true for gems
60
66
  # and tarballs since those just land directly under /opt/downloads/<project>
61
67
  #
62
- # rubocop:disable Metrics/MethodLength, Metrics/AbcSize
63
68
  def ship_pkgs(pkg_exts, staging_server, remote_path, opts = {})
64
69
  options = {
65
70
  excludes: [],
66
71
  chattr: true,
67
72
  platform_independent: false,
68
- nonfinal: false }.merge(opts)
73
+ nonfinal: false
74
+ }.merge(opts)
69
75
 
70
76
  # First find the packages to be shipped. We must find them before moving
71
77
  # to our temporary staging directory
@@ -73,35 +79,39 @@ module Pkg::Util::Ship
73
79
  return false if local_packages.empty?
74
80
 
75
81
  tmpdir = Dir.mktmpdir
76
- staged_pkgs = reorganize_packages(local_packages, tmpdir, options[:platform_independent], options[:nonfinal])
82
+ staged_pkgs = reorganize_packages(
83
+ local_packages, tmpdir, options[:platform_independent], options[:nonfinal]
84
+ )
77
85
 
78
86
  puts staged_pkgs.sort
79
87
  puts "Do you want to ship the above files to (#{staging_server})?"
80
- if Pkg::Util.ask_yes_or_no
81
- extra_flags = ['--ignore-existing', '--delay-updates']
82
- extra_flags << '--dry-run' if ENV['DRYRUN']
83
-
84
- staged_pkgs.each do |pkg|
85
- Pkg::Util::Execution.retry_on_fail(times: 3) do
86
- sub_string = 'pkg'
87
- remote_pkg = pkg.sub(sub_string, remote_path)
88
- remote_basepath = File.dirname(remote_pkg)
89
- Pkg::Util::Net.remote_execute(staging_server, "mkdir -p #{remote_basepath}")
90
- Pkg::Util::Net.rsync_to(
91
- File.join(tmpdir, pkg),
92
- staging_server,
93
- remote_basepath,
94
- extra_flags: extra_flags
95
- )
96
-
97
- Pkg::Util::Net.remote_set_ownership(staging_server, 'root', 'release', [remote_basepath, remote_pkg])
98
- Pkg::Util::Net.remote_set_permissions(staging_server, '775', [remote_basepath])
99
- Pkg::Util::Net.remote_set_permissions(staging_server, '0664', [remote_pkg])
100
- Pkg::Util::Net.remote_set_immutable(staging_server, [remote_pkg]) if options[:chattr]
101
- end
88
+ return false unless Pkg::Util.ask_yes_or_no
89
+
90
+ extra_flags = %w(--ignore-existing --delay-updates)
91
+ extra_flags << '--dry-run' if ENV['DRYRUN']
92
+
93
+ staged_pkgs.each do |pkg|
94
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
95
+ sub_string = 'pkg'
96
+ remote_pkg = pkg.sub(sub_string, remote_path)
97
+ remote_basepath = File.dirname(remote_pkg)
98
+ Pkg::Util::Net.remote_execute(staging_server, "mkdir -p #{remote_basepath}")
99
+ Pkg::Util::Net.rsync_to(
100
+ File.join(tmpdir, pkg),
101
+ staging_server,
102
+ remote_basepath,
103
+ extra_flags: extra_flags
104
+ )
105
+
106
+ Pkg::Util::Net.remote_set_ownership(
107
+ staging_server, 'root', 'release', [remote_basepath, remote_pkg]
108
+ )
109
+ Pkg::Util::Net.remote_set_permissions(staging_server, '775', [remote_basepath])
110
+ Pkg::Util::Net.remote_set_permissions(staging_server, '0664', [remote_pkg])
111
+ Pkg::Util::Net.remote_set_immutable(staging_server, [remote_pkg]) if options[:chattr]
102
112
  end
103
- return true
104
113
  end
114
+ return true
105
115
  end
106
116
 
107
117
  def ship_rpms(local_staging_directory, remote_path, opts = {})
@@ -123,6 +133,8 @@ module Pkg::Util::Ship
123
133
  ship_pkgs(things_to_ship, Pkg::Config.apt_signing_server, remote_path, opts)
124
134
  end
125
135
 
136
+
137
+
126
138
  def ship_svr4(local_staging_directory, remote_path, opts = {})
127
139
  ship_pkgs(["#{local_staging_directory}/**/*.pkg.gz"], Pkg::Config.svr4_host, remote_path, opts)
128
140
  end
@@ -132,33 +144,63 @@ module Pkg::Util::Ship
132
144
  end
133
145
 
134
146
  def ship_dmg(local_staging_directory, remote_path, opts = {})
135
- packages_have_shipped = ship_pkgs(["#{local_staging_directory}/**/*.dmg"],
136
- Pkg::Config.dmg_staging_server, remote_path, opts)
137
-
138
- if packages_have_shipped
139
- Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
140
- # Create the latest symlink for the current supported repo
141
- Pkg::Util::Net.remote_create_latest_symlink(
142
- Pkg::Config.project,
143
- Pkg::Paths.artifacts_path(platform_tag, remote_path, opts[:nonfinal]),
144
- 'dmg'
145
- )
146
- end
147
+ packages_have_shipped = ship_pkgs(
148
+ ["#{local_staging_directory}/**/*.dmg"],
149
+ Pkg::Config.dmg_staging_server, remote_path, opts
150
+ )
151
+
152
+ return unless packages_have_shipped
153
+
154
+ Pkg::Platforms.platform_tags_for_package_format('dmg').each do |platform_tag|
155
+ # Create the latest symlink for the current supported repo
156
+ Pkg::Util::Net.remote_create_latest_symlink(
157
+ Pkg::Config.project,
158
+ Pkg::Paths.artifacts_path(platform_tag, remote_path, opts[:nonfinal]),
159
+ 'dmg'
160
+ )
147
161
  end
148
162
  end
149
163
 
150
164
  def ship_swix(local_staging_directory, remote_path, opts = {})
151
- ship_pkgs(["#{local_staging_directory}/**/*.swix"], Pkg::Config.swix_staging_server, remote_path, opts)
165
+ ship_pkgs(
166
+ ["#{local_staging_directory}/**/*.swix"],
167
+ Pkg::Config.swix_staging_server,
168
+ remote_path,
169
+ opts
170
+ )
152
171
  end
153
172
 
154
173
  def ship_msi(local_staging_directory, remote_path, opts = {})
155
- packages_have_shipped = ship_pkgs(["#{local_staging_directory}/**/*.msi"], Pkg::Config.msi_staging_server, remote_path, opts)
174
+ packages_have_shipped = ship_pkgs(
175
+ ["#{local_staging_directory}/**/*.msi"],
176
+ Pkg::Config.msi_staging_server,
177
+ remote_path,
178
+ opts
179
+ )
180
+ return unless packages_have_shipped
156
181
 
157
- if packages_have_shipped
158
- # Create the symlinks for the latest supported repo
159
- Pkg::Util::Net.remote_create_latest_symlink(Pkg::Config.project, Pkg::Paths.artifacts_path(Pkg::Platforms.generic_platform_tag('windows'), remote_path, opts[:nonfinal]), 'msi', arch: 'x64')
160
- Pkg::Util::Net.remote_create_latest_symlink(Pkg::Config.project, Pkg::Paths.artifacts_path(Pkg::Platforms.generic_platform_tag('windows'), remote_path, opts[:nonfinal]), 'msi', arch: 'x86')
161
- end
182
+ # Create the symlinks for the latest supported repo
183
+ Pkg::Util::Net.remote_create_latest_symlink(
184
+ Pkg::Config.project,
185
+ Pkg::Paths.artifacts_path(
186
+ Pkg::Platforms.generic_platform_tag('windows'),
187
+ remote_path,
188
+ opts[:nonfinal]
189
+ ),
190
+ 'msi',
191
+ arch: 'x64'
192
+ )
193
+
194
+ Pkg::Util::Net.remote_create_latest_symlink(
195
+ Pkg::Config.project,
196
+ Pkg::Paths.artifacts_path(
197
+ Pkg::Platforms.generic_platform_tag('windows'),
198
+ remote_path,
199
+ opts[:nonfinal]
200
+ ),
201
+ 'msi',
202
+ arch: 'x86'
203
+ )
162
204
  end
163
205
 
164
206
  def ship_gem(local_staging_directory, remote_path, opts = {})
@@ -166,44 +208,32 @@ module Pkg::Util::Ship
166
208
  end
167
209
 
168
210
  def ship_tar(local_staging_directory, remote_path, opts = {})
169
- ship_pkgs(["#{local_staging_directory}/*.tar.gz*"], Pkg::Config.tar_staging_server, remote_path, opts)
211
+ ship_pkgs(
212
+ ["#{local_staging_directory}/*.tar.gz*"],
213
+ Pkg::Config.tar_staging_server,
214
+ remote_path,
215
+ opts
216
+ )
170
217
  end
171
218
 
172
219
  def rolling_repo_link_command(platform_tag, repo_path, nonfinal = false)
173
- base_path, link_path = Pkg::Paths.artifacts_base_path_and_link_path(platform_tag, repo_path, nonfinal)
220
+ base_path, link_path = Pkg::Paths.artifacts_base_path_and_link_path(
221
+ platform_tag,
222
+ repo_path,
223
+ nonfinal
224
+ )
174
225
 
175
226
  if link_path.nil?
176
227
  puts "No link target set, not creating rolling repo link for #{base_path}"
177
228
  return nil
178
229
  end
179
-
180
- cmd = <<-CMD
181
- if [ ! -d #{base_path} ] ; then
182
- echo "Link target '#{base_path}' does not exist; skipping"
183
- exit 0
184
- fi
185
- # If it's a link but pointing to the wrong place, remove the link
186
- # This is likely to happen around the transition times, like puppet5 -> puppet6
187
- if [ -L #{link_path} ] && [ ! #{base_path} -ef #{link_path} ] ; then
188
- rm #{link_path}
189
- # This is the link you're looking for, nothing to see here
190
- elif [ -L #{link_path} ] ; then
191
- exit 0
192
- # Don't want to delete it if it isn't a link, that could be destructive
193
- # So, fail!
194
- elif [ -e #{link_path} ] ; then
195
- echo "#{link_path} exists but isn't a link, I don't know what to do with this" >&2
196
- exit 1
197
- fi
198
- ln -s #{base_path} #{link_path}
199
- CMD
200
230
  end
201
231
 
202
232
  def create_rolling_repo_link(platform_tag, staging_server, repo_path, nonfinal = false)
203
233
  command = rolling_repo_link_command(platform_tag, repo_path, nonfinal)
204
234
 
205
235
  Pkg::Util::Net.remote_execute(staging_server, command) unless command.nil?
206
- rescue => e
236
+ rescue StandardError => e
207
237
  fail "Failed to create rolling repo link for '#{platform_tag}'.\n#{e}\n#{e.backtrace}"
208
238
  end
209
239
 
@@ -214,10 +244,33 @@ module Pkg::Util::Ship
214
244
  swix_path = Pkg::Paths.remote_repo_base(nonfinal: nonfinal, package_format: 'swix')
215
245
  msi_path = Pkg::Paths.remote_repo_base(nonfinal: nonfinal, package_format: 'msi')
216
246
 
217
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('el'), Pkg::Config.yum_staging_server, yum_path, nonfinal)
218
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('osx'), Pkg::Config.dmg_staging_server, dmg_path, nonfinal)
219
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('eos'), Pkg::Config.swix_staging_server, swix_path, nonfinal)
220
- create_rolling_repo_link(Pkg::Platforms.generic_platform_tag('windows'), Pkg::Config.msi_staging_server, msi_path, nonfinal)
247
+ create_rolling_repo_link(
248
+ Pkg::Platforms.generic_platform_tag('el'),
249
+ Pkg::Config.yum_staging_server,
250
+ yum_path,
251
+ nonfinal
252
+ )
253
+
254
+ create_rolling_repo_link(
255
+ Pkg::Platforms.generic_platform_tag('osx'),
256
+ Pkg::Config.dmg_staging_server,
257
+ dmg_path,
258
+ nonfinal
259
+ )
260
+
261
+ create_rolling_repo_link(
262
+ Pkg::Platforms.generic_platform_tag('eos'),
263
+ Pkg::Config.swix_staging_server,
264
+ swix_path,
265
+ nonfinal
266
+ )
267
+
268
+ create_rolling_repo_link(
269
+ Pkg::Platforms.generic_platform_tag('windows'),
270
+ Pkg::Config.msi_staging_server,
271
+ msi_path,
272
+ nonfinal
273
+ )
221
274
 
222
275
  # We need to iterate through all the supported platforms here because of
223
276
  # how deb repos are set up. Each codename will have its own link from the
@@ -231,7 +284,12 @@ module Pkg::Util::Ship
231
284
  apt_path = Pkg::Config.nonfinal_apt_repo_staging_path
232
285
  end
233
286
  Pkg::Platforms.codenames.each do |codename|
234
- create_rolling_repo_link(Pkg::Platforms.codename_to_tags(codename)[0], Pkg::Config.apt_signing_server, apt_path, nonfinal)
287
+ create_rolling_repo_link(
288
+ Pkg::Platforms.codename_to_tags(codename)[0],
289
+ Pkg::Config.apt_signing_server,
290
+ apt_path,
291
+ nonfinal
292
+ )
235
293
  end
236
294
  end
237
295
 
@@ -0,0 +1,38 @@
1
+ # Utility methods for handling windows
2
+
3
+ require 'fileutils'
4
+
5
+ module Pkg::Util::Windows
6
+ class << self
7
+ def add_msi_links(local_source_directory)
8
+ {
9
+ 'windows' => ['x86', 'x64'],
10
+ 'windowsfips' => ['x64']
11
+ }.each_pair do |platform, archs|
12
+ packages = Dir["#{local_source_directory}/#{platform}/*"]
13
+
14
+ archs.each do |arch|
15
+ package_version = Pkg::Util::Git.describe.tr('-', '.')
16
+ package_filename = File.join(
17
+ local_source_directory, platform,
18
+ "#{Pkg::Config.project}-#{package_version}-#{arch}.msi"
19
+ )
20
+ link_filename = File.join(
21
+ local_source_directory,
22
+ platform,
23
+ "#{Pkg::Config.project}-#{arch}.msi"
24
+ )
25
+
26
+ next unless !packages.include?(link_filename) && packages.include?(package_filename)
27
+
28
+ # Dear future code spelunkers:
29
+ # Using symlinks instead of hard links causes failures when we try
30
+ # to set these files to be immutable. Also be wary of whether the
31
+ # linking utility you're using expects the source path to be relative
32
+ # to the link target or pwd.
33
+ FileUtils.ln(package_filename, link_filename)
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -4,8 +4,12 @@ module Pkg::Util
4
4
  require 'benchmark'
5
5
  require 'base64'
6
6
  require 'io/console'
7
+ require 'packaging/util/apt_staging_server'
8
+ require 'packaging/util/build_metadata'
7
9
  require 'packaging/util/date'
10
+ require 'packaging/util/distribution_server'
8
11
  require 'packaging/util/execution'
12
+ require 'packaging/util/ezbake'
9
13
  require 'packaging/util/file'
10
14
  require 'packaging/util/git'
11
15
  require 'packaging/util/gpg'
@@ -19,6 +23,7 @@ module Pkg::Util
19
23
  require 'packaging/util/tool'
20
24
  require 'packaging/util/rake_utils'
21
25
  require 'packaging/util/version'
26
+ require 'packaging/util/windows'
22
27
  require 'packaging/util/git_tags'
23
28
 
24
29
  def self.boolean_value(var)
data/tasks/jenkins.rake CHANGED
@@ -286,7 +286,7 @@ namespace :pl do
286
286
  ship_nightly_msi
287
287
  )
288
288
  tasks.map { |t| "pl:#{t}" }.each do |t|
289
- puts "Running #{t} . . ."
289
+ puts "Running #{t}:"
290
290
  Rake::Task[t].invoke
291
291
  end
292
292
  end
data/tasks/ship.rake CHANGED
@@ -1,44 +1,77 @@
1
1
  namespace :pl do
2
2
  namespace :remote do
3
- # These hacky bits execute a pre-existing rake task on the Pkg::Config.apt_host
4
- # The rake task takes packages in a specific directory and freights them
5
- # to various target yum and apt repositories based on their specific type
6
- # e.g., final vs devel vs PE vs FOSS packages
3
+ # Repo updates are the ways we convince the various repos to regenerate any repo-based
4
+ # metadata.
5
+ #
6
+ # It is broken up into various pieces and types to try to avoid too much redundant
7
+ # behavior.
7
8
 
8
9
  desc "Update '#{Pkg::Config.repo_name}' yum repository on '#{Pkg::Config.yum_staging_server}'"
9
10
  task update_yum_repo: 'pl:fetch' do
10
11
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
11
12
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
12
- if Pkg::Util.ask_yes_or_no
13
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Paths.yum_repo_name, :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
14
- end
13
+ next unless Pkg::Util.ask_yes_or_no
14
+
15
+ Pkg::Repo.update_repo(
16
+ Pkg::Config.yum_staging_server,
17
+ command,
18
+ {
19
+ repo_name: Pkg::Paths.yum_repo_name,
20
+ repo_path: Pkg::Config.yum_repo_path,
21
+ repo_host: Pkg::Config.yum_staging_server
22
+ }
23
+ )
15
24
  end
16
25
 
17
26
  desc "Update all final yum repositories on '#{Pkg::Config.yum_staging_server}'"
18
27
  task update_all_final_yum_repos: 'pl:fetch' do
19
28
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository/Rakefile mk_repo'
20
29
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
21
- if Pkg::Util.ask_yes_or_no
22
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
23
- end
30
+ next unless Pkg::Util.ask_yes_or_no
31
+
32
+ Pkg::Repo.update_repo(
33
+ Pkg::Config.yum_staging_server,
34
+ command,
35
+ {
36
+ repo_name: '',
37
+ repo_path: Pkg::Config.yum_repo_path,
38
+ repo_host: Pkg::Config.yum_staging_server
39
+ }
40
+ )
24
41
  end
25
42
 
26
43
  desc "Update '#{Pkg::Config.nonfinal_repo_name}' nightly yum repository on '#{Pkg::Config.yum_staging_server}'"
27
44
  task update_nightlies_yum_repo: 'pl:fetch' do
28
45
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
29
46
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
30
- if Pkg::Util.ask_yes_or_no
31
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
32
- end
47
+ next unless Pkg::Util.ask_yes_or_no
48
+
49
+ Pkg::Repo.update_repo(
50
+ Pkg::Config.yum_staging_server,
51
+ command,
52
+ {
53
+ repo_name: Pkg::Config.nonfinal_repo_name,
54
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
55
+ repo_host: Pkg::Config.yum_staging_server
56
+ }
57
+ )
33
58
  end
34
59
 
35
60
  desc "Update all nightly yum repositories on '#{Pkg::Config.yum_staging_server}'"
36
61
  task update_all_nightlies_yum_repos: 'pl:fetch' do
37
62
  command = Pkg::Config.yum_repo_command || 'rake -f /opt/repository-nightlies/Rakefile mk_repo'
38
63
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.yum_staging_server}'? [y,n]"
39
- if Pkg::Util.ask_yes_or_no
40
- Pkg::Repo.update_repo(Pkg::Config.yum_staging_server, command, { :repo_name => '', :repo_path => Pkg::Config.nonfinal_yum_repo_path, :repo_host => Pkg::Config.yum_staging_server })
41
- end
64
+ next unless Pkg::Util.ask_yes_or_no
65
+
66
+ Pkg::Repo.update_repo(
67
+ Pkg::Config.yum_staging_server,
68
+ command,
69
+ {
70
+ repo_name: '',
71
+ repo_path: Pkg::Config.nonfinal_yum_repo_path,
72
+ repo_host: Pkg::Config.yum_staging_server
73
+ }
74
+ )
42
75
  end
43
76
 
44
77
  task freight: :update_apt_repo
@@ -46,17 +79,35 @@ namespace :pl do
46
79
  desc "Update remote apt repository on '#{Pkg::Config.apt_signing_server}'"
47
80
  task update_apt_repo: 'pl:fetch' do
48
81
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
49
- if Pkg::Util.ask_yes_or_no
50
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.apt_repo_command, { :repo_name => Pkg::Paths.apt_repo_name, :repo_path => Pkg::Config.apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
51
- end
82
+ next unless Pkg::Util.ask_yes_or_no
83
+
84
+ Pkg::Repo.update_repo(
85
+ Pkg::Config.apt_signing_server,
86
+ Pkg::Config.apt_repo_command,
87
+ {
88
+ repo_name: Pkg::Paths.apt_repo_name,
89
+ repo_path: Pkg::Config.apt_repo_path,
90
+ repo_host: Pkg::Config.apt_host,
91
+ repo_url: Pkg::Config.apt_repo_url
92
+ }
93
+ )
52
94
  end
53
95
 
54
96
  desc "Update nightlies apt repository on '#{Pkg::Config.apt_signing_server}'"
55
97
  task update_nightlies_apt_repo: 'pl:fetch' do
56
98
  $stdout.puts "Really run remote repo update on '#{Pkg::Config.apt_signing_server}'? [y,n]"
57
- if Pkg::Util.ask_yes_or_no
58
- Pkg::Repo.update_repo(Pkg::Config.apt_signing_server, Pkg::Config.nonfinal_apt_repo_command, { :repo_name => Pkg::Config.nonfinal_repo_name, :repo_path => Pkg::Config.nonfinal_apt_repo_path, :repo_host => Pkg::Config.apt_host, :repo_url => Pkg::Config.apt_repo_url })
59
- end
99
+ next unless Pkg::Util.ask_yes_or_no
100
+
101
+ Pkg::Repo.update_repo(
102
+ Pkg::Config.apt_signing_server,
103
+ Pkg::Config.nonfinal_apt_repo_command,
104
+ {
105
+ repo_name: Pkg::Config.nonfinal_repo_name,
106
+ repo_path: Pkg::Config.nonfinal_apt_repo_path,
107
+ repo_host: Pkg::Config.apt_host,
108
+ repo_url: Pkg::Config.apt_repo_url
109
+ }
110
+ )
60
111
  end
61
112
 
62
113
  desc "Update apt and yum repos"
@@ -74,32 +125,31 @@ namespace :pl do
74
125
  desc "Update remote ips repository on #{Pkg::Config.ips_host}"
75
126
  task :update_ips_repo => 'pl:fetch' do
76
127
  if Dir['pkg/ips/pkgs/**/*'].empty? && Dir['pkg/solaris/11/**/*'].empty?
77
- $stdout.puts "There aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11. Maybe something went wrong?"
78
- else
128
+ $stdout.puts "Error: there aren't any p5p packages in pkg/ips/pkgs or pkg/solaris/11."
129
+ next
130
+ end
79
131
 
80
- if !Dir['pkg/ips/pkgs/**/*'].empty?
81
- source_dir = 'pkg/ips/pkgs/'
82
- else
83
- source_dir = 'pkg/solaris/11/'
84
- end
132
+ source_dir = 'pkg/solaris/11/'
133
+ source_dir = 'pkg/ips/pkgs/' unless Dir['pkg/ips/pkgs/**/*'].empty?
85
134
 
86
- tmpdir, _ = Pkg::Util::Net.remote_execute(
87
- Pkg::Config.ips_host,
88
- 'mktemp -d -p /var/tmp',
89
- { capture_output: true }
90
- )
91
- tmpdir.chomp!
135
+ tmpdir, _ = Pkg::Util::Net.remote_execute(
136
+ Pkg::Config.ips_host,
137
+ 'mktemp -d -p /var/tmp',
138
+ { capture_output: true }
139
+ )
140
+ tmpdir.chomp!
92
141
 
93
- Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
142
+ Pkg::Util::Net.rsync_to(source_dir, Pkg::Config.ips_host, tmpdir)
94
143
 
95
- remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
96
- sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
144
+ remote_cmd = %(for pkg in #{tmpdir}/*.p5p; do
145
+ sudo pkgrecv -s $pkg -d #{Pkg::Config.ips_path} '*';
97
146
  done)
98
147
 
99
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
100
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
101
- Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
102
- end
148
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host, remote_cmd)
149
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
150
+ "sudo pkgrepo refresh -s #{Pkg::Config.ips_path}")
151
+ Pkg::Util::Net.remote_execute(Pkg::Config.ips_host,
152
+ "sudo /usr/sbin/svcadm restart svc:/application/pkg/server:#{Pkg::Config.ips_repo || 'default'}")
103
153
  end
104
154
 
105
155
  desc "Move dmg repos from #{Pkg::Config.dmg_staging_server} to #{Pkg::Config.dmg_host}"
@@ -235,6 +285,7 @@ namespace :pl do
235
285
 
236
286
  desc "Sync signed apt repos from #{Pkg::Config.apt_signing_server} to Google Cloud Platform"
237
287
  task :sync_apt_repo_to_gcp => 'pl:fetch' do
288
+ ssh = Pkg::Util::Tool.check_tool('ssh')
238
289
  target_site = 'apt.repos.puppetlabs.com'
239
290
  sync_command_puppet_6 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet6"
240
291
  sync_command_puppet_7 = "#{GCP_REPO_SYNC} apt.repos.puppet.com puppet7"
@@ -243,8 +294,11 @@ namespace :pl do
243
294
  puts
244
295
 
245
296
  Pkg::Util::Execution.retry_on_fail(times: 3) do
246
- Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, sync_command_puppet_6)
247
- Pkg::Util::Net.remote_execute(Pkg::Config.apt_signing_server, sync_command_puppet_7)
297
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_6}"')
298
+ end
299
+
300
+ Pkg::Util::Execution.retry_on_fail(times: 3) do
301
+ %x(#{ssh} #{Pkg::Config.apt_signing_server} '/bin/bash -l -c "#{sync_command_puppet_7}"')
248
302
  end
249
303
  end
250
304
  # Keep 'deploy' for backward compatibility
@@ -292,6 +346,13 @@ namespace :pl do
292
346
  end
293
347
  end
294
348
 
349
+ ##
350
+ ## Here's where we start 'shipping' (old terminology) or 'staging' (current terminology)
351
+ ## by copying local 'pkg' directories to the staging server.
352
+ ##
353
+ ## Note, that for debs, we conflate 'staging server' with 'signing server' because we
354
+ ## must stage in th place where we sign.
355
+ ##
295
356
  desc "Ship mocked rpms to #{Pkg::Config.yum_staging_server}"
296
357
  task ship_rpms: 'pl:fetch' do
297
358
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.yum_repo_path)
@@ -302,6 +363,7 @@ namespace :pl do
302
363
  Pkg::Util::Ship.ship_rpms('pkg', Pkg::Config.nonfinal_yum_repo_path, nonfinal: true)
303
364
  end
304
365
 
366
+ ## This is the old-style deb shipping
305
367
  desc "Ship cow-built debs to #{Pkg::Config.apt_signing_server}"
306
368
  task ship_debs: 'pl:fetch' do
307
369
  Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.apt_repo_staging_path, chattr: false)
@@ -309,7 +371,20 @@ namespace :pl do
309
371
 
310
372
  desc "Ship nightly debs to #{Pkg::Config.apt_signing_server}"
311
373
  task ship_nightly_debs: 'pl:fetch' do
312
- Pkg::Util::Ship.ship_debs('pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
374
+ Pkg::Util::Ship.ship_debs(
375
+ 'pkg', Pkg::Config.nonfinal_apt_repo_staging_path, chattr: false, nonfinal: true)
376
+ end
377
+
378
+ ## This is the new-style apt stager
379
+ desc "Stage debs to #{Pkg::Config.apt_signing_server}"
380
+ task stage_stable_debs: 'pl:fetch' do
381
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'stable')
382
+ end
383
+ task stage_debs: :stage_stable_debs
384
+
385
+ desc "Stage nightly debs to #{Pkg::Config.apt_signing_server}"
386
+ task stage_nightly_debs: 'pl:fetch' do
387
+ Pkg::Util::AptStagingServer.send_packages('pkg', 'nightly')
313
388
  end
314
389
 
315
390
  desc 'Ship built gem to rubygems.org, internal Gem mirror, and public file server'
@@ -325,12 +400,13 @@ namespace :pl do
325
400
  puts 'This will ship to an internal gem mirror, a public file server, and rubygems.org'
326
401
  puts "Do you want to start shipping the rubygem '#{gem_file}'?"
327
402
  next unless Pkg::Util.ask_yes_or_no
403
+
328
404
  Rake::Task['pl:ship_gem_to_rubygems'].execute(file: gem_file)
329
405
  end
330
406
 
331
407
  Rake::Task['pl:ship_gem_to_downloads'].invoke
332
408
  else
333
- $stderr.puts 'Not shipping development gem using odd_even strategy for the sake of your users.'
409
+ warn 'Not shipping development gem using odd_even strategy for the sake of your users.'
334
410
  end
335
411
  end
336
412
  end
@@ -342,6 +418,7 @@ namespace :pl do
342
418
  if Pkg::Config.build_gem
343
419
  fail 'Value `Pkg::Config.gem_host` not defined, skipping nightly ship' unless Pkg::Config.gem_host
344
420
  fail 'Value `Pkg::Config.nonfinal_gem_path` not defined, skipping nightly ship' unless Pkg::Config.nonfinal_gem_path
421
+
345
422
  FileList['pkg/*.gem'].each do |gem_file|
346
423
  Pkg::Gem.ship_to_internal_mirror(gem_file)
347
424
  end
@@ -451,22 +528,25 @@ namespace :pl do
451
528
 
452
529
  desc 'UBER ship: ship all the things in pkg'
453
530
  task uber_ship: 'pl:fetch' do
454
- if Pkg::Util.confirm_ship(FileList['pkg/**/*'])
455
- Rake::Task['pl:ship_rpms'].invoke
456
- Rake::Task['pl:ship_debs'].invoke
457
- Rake::Task['pl:ship_dmg'].invoke
458
- Rake::Task['pl:ship_swix'].invoke
459
- Rake::Task['pl:ship_nuget'].invoke
460
- Rake::Task['pl:ship_tar'].invoke
461
- Rake::Task['pl:ship_svr4'].invoke
462
- Rake::Task['pl:ship_p5p'].invoke
463
- Rake::Task['pl:ship_msi'].invoke
464
- add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?) if Pkg::Config.benchmark
465
- post_shipped_metrics if Pkg::Config.benchmark
466
- else
531
+ unless Pkg::Util.confirm_ship(FileList['pkg/**/*'])
467
532
  puts 'Ship canceled'
468
533
  exit
469
534
  end
535
+
536
+ Rake::Task['pl:ship_rpms'].invoke
537
+ Rake::Task['pl:ship_debs'].invoke
538
+ Rake::Task['pl:ship_dmg'].invoke
539
+ Rake::Task['pl:ship_swix'].invoke
540
+ Rake::Task['pl:ship_nuget'].invoke
541
+ Rake::Task['pl:ship_tar'].invoke
542
+ Rake::Task['pl:ship_svr4'].invoke
543
+ Rake::Task['pl:ship_p5p'].invoke
544
+ Rake::Task['pl:ship_msi'].invoke
545
+
546
+ if Pkg::Config.benchmark
547
+ add_shipped_metrics(pe_version: ENV['PE_VER'], is_rc: !Pkg::Util::Version.final?)
548
+ post_shipped_metrics
549
+ end
470
550
  end
471
551
 
472
552
  desc 'Create the rolling repo links'
@@ -530,7 +610,7 @@ namespace :pl do
530
610
  { extra_options: '-oBatchMode=yes' }
531
611
  )
532
612
  end
533
- rescue
613
+ rescue StandardError
534
614
  errs << "Unlocking the OSX keychain failed! Check the password in your .bashrc on #{Pkg::Config.osx_signing_server}"
535
615
  end
536
616
 
@@ -569,66 +649,56 @@ namespace :pl do
569
649
  task :ship_to_artifactory, :local_dir do |_t, args|
570
650
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
571
651
  unless Pkg::Config.project
572
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
652
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
653
+ "in the 'PROJECT_OVERRIDE' environment variable."
573
654
  end
655
+
574
656
  artifactory = Pkg::ManageArtifactory.new(Pkg::Config.project, Pkg::Config.ref)
575
657
 
576
658
  local_dir = args.local_dir || 'pkg'
577
- artifacts = Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }
578
- artifacts.sort! do |a, b|
579
- if File.extname(a) =~ /(md5|sha\d+)/ && File.extname(b) !~ /(md5|sha\d+)/
580
- 1
581
- elsif File.extname(b) =~ /(md5|sha\d+)/ && File.extname(a) !~ /(md5|sha\d+)/
582
- -1
583
- else
584
- a <=> b
585
- end
586
- end
587
- artifacts.each do |artifact|
588
- if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
589
- artifactory.deploy_package(artifact)
590
- elsif artifactory.package_exists_on_artifactory?(artifact)
591
- warn "Attempt to upload '#{artifact}' failed. Package already exists!"
592
- else
659
+ Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
660
+ # Always deploy yamls and jsons
661
+ if artifact.end_with?('.yaml', '.json')
593
662
  artifactory.deploy_package(artifact)
663
+ next
594
664
  end
665
+
666
+ # Don't deploy if the package already exists
667
+ if artifactory.package_exists_on_artifactory?(artifact)
668
+ warn "Attempt to upload '#{artifact}' failed. Package already exists."
669
+ next
670
+ end
671
+
672
+ artifactory.deploy_package(artifact)
595
673
  end
596
674
  end
597
675
 
598
- desc 'Ship pkg directory contents to distribution server'
676
+ desc 'Ship "pkg" directory contents to distribution server'
599
677
  task :ship, :target, :local_dir do |_t, args|
600
678
  Pkg::Util::RakeUtils.invoke_task('pl:fetch')
601
679
  unless Pkg::Config.project
602
- fail "You must set the 'project' in build_defaults.yaml or with the 'PROJECT_OVERRIDE' environment variable."
680
+ fail "Error: 'project' must be set in build_defaults.yaml or " \
681
+ "in the 'PROJECT_OVERRIDE' environment variable."
603
682
  end
683
+
604
684
  target = args.target || 'artifacts'
605
685
  local_dir = args.local_dir || 'pkg'
606
- project_basedir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
607
- artifact_dir = "#{project_basedir}/#{target}"
686
+ project_basedir = File.join(
687
+ Pkg::Config.jenkins_repo_path, Pkg::Config.project, Pkg::Config.ref
688
+ )
689
+ artifact_dir = File.join(project_basedir, target)
608
690
 
609
691
  # For EZBake builds, we also want to include the ezbake.manifest file to
610
692
  # get a snapshot of this build and all dependencies. We eventually will
611
693
  # create a yaml version of this file, but until that point we want to
612
694
  # make the original ezbake.manifest available
613
- #
614
- ezbake_manifest = File.join('ext', 'ezbake.manifest')
615
- if File.exist?(ezbake_manifest)
616
- cp(ezbake_manifest, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest"))
617
- end
618
- ezbake_yaml = File.join("ext", "ezbake.manifest.yaml")
619
- if File.exists?(ezbake_yaml)
620
- cp(ezbake_yaml, File.join(local_dir, "#{Pkg::Config.ref}.ezbake.manifest.yaml"))
621
- end
695
+ Pkg::Util::EZbake.add_manifest(local_dir)
622
696
 
623
697
  # Inside build_metadata*.json files there is additional metadata containing
624
698
  # information such as git ref and dependencies that are needed at build
625
699
  # time. If these files exist, copy them downstream.
626
700
  # Typically these files are named 'ext/build_metadata.<project>.<platform>.json'
627
- build_metadata_json_files = Dir.glob('ext/build_metadata*.json')
628
- build_metadata_json_files.each do |source_file|
629
- target_file = File.join(local_dir, "#{Pkg::Config.ref}.#{File.basename(source_file)}")
630
- cp(source_file, target_file)
631
- end
701
+ Pkg::Util::BuildMetadata.add_misc_json_files(local_dir)
632
702
 
633
703
  # Sadly, the packaging repo cannot yet act on its own, without living
634
704
  # inside of a packaging-repo compatible project. This means in order to
@@ -664,54 +734,11 @@ namespace :pl do
664
734
  # and if the source package exists before linking. Searching for the
665
735
  # packages has been restricted specifically to just the pkg/windows dir
666
736
  # on purpose, as this is where we currently have all windows packages
667
- # building to. Once we move the Metadata about the output location in
668
- # to one source of truth we can refactor this to use that to search
669
- # -Sean P. M. 08/12/16
670
-
671
- {
672
- 'windows' => ['x86', 'x64'],
673
- 'windowsfips' => ['x64']
674
- }.each_pair do |platform, archs|
675
- packages = Dir["#{local_dir}/#{platform}/*"]
676
-
677
- archs.each do |arch|
678
- package_version = Pkg::Util::Git.describe.tr('-', '.')
679
- package_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{package_version}-#{arch}.msi")
680
- link_filename = File.join(local_dir, platform, "#{Pkg::Config.project}-#{arch}.msi")
681
-
682
- next unless !packages.include?(link_filename) && packages.include?(package_filename)
683
- # Dear future code spelunkers:
684
- # Using symlinks instead of hard links causes failures when we try
685
- # to set these files to be immutable. Also be wary of whether the
686
- # linking utility you're using expects the source path to be relative
687
- # to the link target or pwd.
688
- #
689
- FileUtils.ln(package_filename, link_filename)
690
- end
691
- end
692
-
693
- Pkg::Util::Execution.retry_on_fail(times: 3) do
694
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir --mode=775 -p #{project_basedir}")
695
- Pkg::Util::Net.remote_execute(Pkg::Config.distribution_server, "mkdir -p #{artifact_dir}")
696
- Pkg::Util::Net.rsync_to("#{local_dir}/", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ['--ignore-existing', '--exclude repo_configs'])
697
- end
698
-
699
- # In order to get a snapshot of what this build looked like at the time
700
- # of shipping, we also generate and ship the params file
701
- #
702
- Pkg::Config.config_to_yaml(local_dir)
703
- Pkg::Util::Execution.retry_on_fail(:times => 3) do
704
- Pkg::Util::Net.rsync_to("#{local_dir}/#{Pkg::Config.ref}.yaml", Pkg::Config.distribution_server, "#{artifact_dir}/", extra_flags: ["--exclude repo_configs"])
705
- end
706
-
707
- # If we just shipped a tagged version, we want to make it immutable
708
- files = Dir.glob("#{local_dir}/**/*").select { |f| File.file?(f) and !f.include? "#{Pkg::Config.ref}.yaml" }.map do |file|
709
- "#{artifact_dir}/#{file.sub(/^#{local_dir}\//, '')}"
710
- end
737
+ # building to.
738
+ Pkg::Util::Windows.add_msi_links(local_dir)
711
739
 
712
- Pkg::Util::Net.remote_set_ownership(Pkg::Config.distribution_server, 'root', 'release', files)
713
- Pkg::Util::Net.remote_set_permissions(Pkg::Config.distribution_server, '0664', files)
714
- Pkg::Util::Net.remote_set_immutable(Pkg::Config.distribution_server, files)
740
+ # Send packages to the distribution server.
741
+ Pkg::Util::DistributionServer.send_packages(local_dir, artifact_dir)
715
742
  end
716
743
 
717
744
  desc 'Ship generated repository configs to the distribution server'
metadata CHANGED
@@ -1,15 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: packaging
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.103.0
4
+ version: 0.104.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Puppet Labs
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-10-14 00:00:00.000000000 Z
11
+ date: 2021-11-16 00:00:00.000000000 Z
12
12
  dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: pry-byebug
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
13
27
  - !ruby/object:Gem::Dependency
14
28
  name: rspec
15
29
  requirement: !ruby/object:Gem::Requirement
@@ -53,19 +67,19 @@ dependencies:
53
67
  - !ruby/object:Gem::Version
54
68
  version: '0'
55
69
  - !ruby/object:Gem::Dependency
56
- name: rake
70
+ name: apt_stage_artifacts
57
71
  requirement: !ruby/object:Gem::Requirement
58
72
  requirements:
59
73
  - - ">="
60
74
  - !ruby/object:Gem::Version
61
- version: '12.3'
75
+ version: '0'
62
76
  type: :runtime
63
77
  prerelease: false
64
78
  version_requirements: !ruby/object:Gem::Requirement
65
79
  requirements:
66
80
  - - ">="
67
81
  - !ruby/object:Gem::Version
68
- version: '12.3'
82
+ version: '0'
69
83
  - !ruby/object:Gem::Dependency
70
84
  name: artifactory
71
85
  requirement: !ruby/object:Gem::Requirement
@@ -80,6 +94,20 @@ dependencies:
80
94
  - - "~>"
81
95
  - !ruby/object:Gem::Version
82
96
  version: '2'
97
+ - !ruby/object:Gem::Dependency
98
+ name: csv
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - '='
102
+ - !ruby/object:Gem::Version
103
+ version: 3.1.5
104
+ type: :runtime
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - '='
109
+ - !ruby/object:Gem::Version
110
+ version: 3.1.5
83
111
  - !ruby/object:Gem::Dependency
84
112
  name: release-metrics
85
113
  requirement: !ruby/object:Gem::Requirement
@@ -95,19 +123,19 @@ dependencies:
95
123
  - !ruby/object:Gem::Version
96
124
  version: '0'
97
125
  - !ruby/object:Gem::Dependency
98
- name: csv
126
+ name: rake
99
127
  requirement: !ruby/object:Gem::Requirement
100
128
  requirements:
101
- - - '='
129
+ - - ">="
102
130
  - !ruby/object:Gem::Version
103
- version: 3.1.5
131
+ version: '12.3'
104
132
  type: :runtime
105
133
  prerelease: false
106
134
  version_requirements: !ruby/object:Gem::Requirement
107
135
  requirements:
108
- - - '='
136
+ - - ">="
109
137
  - !ruby/object:Gem::Version
110
- version: 3.1.5
138
+ version: '12.3'
111
139
  description: Packaging automation written in Rake and Ruby. Easily build native packages
112
140
  for most platforms with a few data files and git.
113
141
  email: info@puppetlabs.com
@@ -144,8 +172,12 @@ files:
144
172
  - lib/packaging/sign/rpm.rb
145
173
  - lib/packaging/tar.rb
146
174
  - lib/packaging/util.rb
175
+ - lib/packaging/util/apt_staging_server.rb
176
+ - lib/packaging/util/build_metadata.rb
147
177
  - lib/packaging/util/date.rb
178
+ - lib/packaging/util/distribution_server.rb
148
179
  - lib/packaging/util/execution.rb
180
+ - lib/packaging/util/ezbake.rb
149
181
  - lib/packaging/util/file.rb
150
182
  - lib/packaging/util/git.rb
151
183
  - lib/packaging/util/git_tags.rb
@@ -160,6 +192,7 @@ files:
160
192
  - lib/packaging/util/ship.rb
161
193
  - lib/packaging/util/tool.rb
162
194
  - lib/packaging/util/version.rb
195
+ - lib/packaging/util/windows.rb
163
196
  - spec/fixtures/config/ext/build_defaults.yaml
164
197
  - spec/fixtures/config/ext/project_data.yaml
165
198
  - spec/fixtures/configs/components/test_file.json
@@ -248,7 +281,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
248
281
  requirements:
249
282
  - - ">="
250
283
  - !ruby/object:Gem::Version
251
- version: 2.0.0
284
+ version: 2.3.0
252
285
  required_rubygems_version: !ruby/object:Gem::Requirement
253
286
  requirements:
254
287
  - - ">="
@@ -260,28 +293,28 @@ signing_key:
260
293
  specification_version: 4
261
294
  summary: Puppet Labs' packaging automation
262
295
  test_files:
263
- - spec/lib/packaging_spec.rb
264
- - spec/lib/packaging/config_spec.rb
265
296
  - spec/lib/packaging/repo_spec.rb
266
- - spec/lib/packaging/artifactory_spec.rb
267
- - spec/lib/packaging/paths_spec.rb
268
- - spec/lib/packaging/deb/repo_spec.rb
269
- - spec/lib/packaging/retrieve_spec.rb
270
297
  - spec/lib/packaging/gem_spec.rb
271
- - spec/lib/packaging/sign_spec.rb
272
- - spec/lib/packaging/platforms_spec.rb
273
- - spec/lib/packaging/deb_spec.rb
274
- - spec/lib/packaging/rpm/repo_spec.rb
275
- - spec/lib/packaging/util/git_spec.rb
298
+ - spec/lib/packaging/tar_spec.rb
276
299
  - spec/lib/packaging/util/rake_utils_spec.rb
277
- - spec/lib/packaging/util/misc_spec.rb
278
- - spec/lib/packaging/util/os_spec.rb
279
- - spec/lib/packaging/util/file_spec.rb
280
- - spec/lib/packaging/util/jenkins_spec.rb
300
+ - spec/lib/packaging/util/git_spec.rb
281
301
  - spec/lib/packaging/util/gpg_spec.rb
302
+ - spec/lib/packaging/util/execution_spec.rb
282
303
  - spec/lib/packaging/util/version_spec.rb
283
- - spec/lib/packaging/util/ship_spec.rb
284
304
  - spec/lib/packaging/util/net_spec.rb
285
305
  - spec/lib/packaging/util/git_tag_spec.rb
286
- - spec/lib/packaging/util/execution_spec.rb
287
- - spec/lib/packaging/tar_spec.rb
306
+ - spec/lib/packaging/util/os_spec.rb
307
+ - spec/lib/packaging/util/ship_spec.rb
308
+ - spec/lib/packaging/util/file_spec.rb
309
+ - spec/lib/packaging/util/jenkins_spec.rb
310
+ - spec/lib/packaging/util/misc_spec.rb
311
+ - spec/lib/packaging/rpm/repo_spec.rb
312
+ - spec/lib/packaging/paths_spec.rb
313
+ - spec/lib/packaging/platforms_spec.rb
314
+ - spec/lib/packaging/deb_spec.rb
315
+ - spec/lib/packaging/sign_spec.rb
316
+ - spec/lib/packaging/retrieve_spec.rb
317
+ - spec/lib/packaging/artifactory_spec.rb
318
+ - spec/lib/packaging/deb/repo_spec.rb
319
+ - spec/lib/packaging/config_spec.rb
320
+ - spec/lib/packaging_spec.rb