packaging 0.99.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +17 -0
- data/README-Solaris.md +117 -0
- data/README.md +1031 -0
- data/lib/packaging.rb +32 -0
- data/lib/packaging/artifactory.rb +278 -0
- data/lib/packaging/config.rb +392 -0
- data/lib/packaging/config/params.rb +366 -0
- data/lib/packaging/deb.rb +28 -0
- data/lib/packaging/deb/repo.rb +263 -0
- data/lib/packaging/gem.rb +112 -0
- data/lib/packaging/ips.rb +57 -0
- data/lib/packaging/msi.rb +89 -0
- data/lib/packaging/nuget.rb +39 -0
- data/lib/packaging/osx.rb +36 -0
- data/lib/packaging/paths.rb +238 -0
- data/lib/packaging/platforms.rb +480 -0
- data/lib/packaging/repo.rb +55 -0
- data/lib/packaging/retrieve.rb +46 -0
- data/lib/packaging/rpm.rb +5 -0
- data/lib/packaging/rpm/repo.rb +257 -0
- data/lib/packaging/tar.rb +154 -0
- data/lib/packaging/util.rb +146 -0
- data/lib/packaging/util/date.rb +15 -0
- data/lib/packaging/util/execution.rb +85 -0
- data/lib/packaging/util/file.rb +125 -0
- data/lib/packaging/util/git.rb +174 -0
- data/lib/packaging/util/git_tags.rb +73 -0
- data/lib/packaging/util/gpg.rb +62 -0
- data/lib/packaging/util/jenkins.rb +95 -0
- data/lib/packaging/util/misc.rb +69 -0
- data/lib/packaging/util/net.rb +368 -0
- data/lib/packaging/util/os.rb +17 -0
- data/lib/packaging/util/platform.rb +40 -0
- data/lib/packaging/util/rake_utils.rb +111 -0
- data/lib/packaging/util/serialization.rb +19 -0
- data/lib/packaging/util/ship.rb +171 -0
- data/lib/packaging/util/tool.rb +41 -0
- data/lib/packaging/util/version.rb +326 -0
- data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
- data/spec/fixtures/config/ext/project_data.yaml +2 -0
- data/spec/fixtures/config/params.yaml +2 -0
- data/spec/fixtures/configs/components/test_file.json +1 -0
- data/spec/fixtures/configs/components/test_file_2.json +0 -0
- data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
- data/spec/fixtures/util/pre_tasks.yaml +4 -0
- data/spec/lib/packaging/artifactory_spec.rb +171 -0
- data/spec/lib/packaging/config_spec.rb +556 -0
- data/spec/lib/packaging/deb/repo_spec.rb +148 -0
- data/spec/lib/packaging/deb_spec.rb +52 -0
- data/spec/lib/packaging/paths_spec.rb +153 -0
- data/spec/lib/packaging/platforms_spec.rb +153 -0
- data/spec/lib/packaging/repo_spec.rb +97 -0
- data/spec/lib/packaging/retrieve_spec.rb +61 -0
- data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
- data/spec/lib/packaging/tar_spec.rb +122 -0
- data/spec/lib/packaging/util/execution_spec.rb +56 -0
- data/spec/lib/packaging/util/file_spec.rb +139 -0
- data/spec/lib/packaging/util/git_spec.rb +160 -0
- data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
- data/spec/lib/packaging/util/gpg_spec.rb +64 -0
- data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
- data/spec/lib/packaging/util/misc_spec.rb +31 -0
- data/spec/lib/packaging/util/net_spec.rb +239 -0
- data/spec/lib/packaging/util/os_spec.rb +31 -0
- data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
- data/spec/lib/packaging/util/ship_spec.rb +117 -0
- data/spec/lib/packaging/util/version_spec.rb +123 -0
- data/spec/lib/packaging_spec.rb +19 -0
- data/spec/spec_helper.rb +36 -0
- data/static_artifacts/PackageInfo.plist +3 -0
- data/tasks/00_utils.rake +216 -0
- data/tasks/30_metrics.rake +33 -0
- data/tasks/apple.rake +266 -0
- data/tasks/build.rake +12 -0
- data/tasks/clean.rake +5 -0
- data/tasks/config.rake +30 -0
- data/tasks/deb.rake +129 -0
- data/tasks/deb_repos.rake +28 -0
- data/tasks/deprecated.rake +130 -0
- data/tasks/doc.rake +20 -0
- data/tasks/education.rake +57 -0
- data/tasks/fetch.rake +57 -0
- data/tasks/gem.rake +146 -0
- data/tasks/jenkins.rake +494 -0
- data/tasks/jenkins_dynamic.rake +202 -0
- data/tasks/load_extras.rake +21 -0
- data/tasks/mock.rake +348 -0
- data/tasks/nightly_repos.rake +335 -0
- data/tasks/pe_deb.rake +12 -0
- data/tasks/pe_rpm.rake +13 -0
- data/tasks/pe_ship.rake +221 -0
- data/tasks/pe_sign.rake +13 -0
- data/tasks/pe_tar.rake +5 -0
- data/tasks/retrieve.rake +45 -0
- data/tasks/rpm.rake +66 -0
- data/tasks/rpm_repos.rake +29 -0
- data/tasks/ship.rake +752 -0
- data/tasks/sign.rake +226 -0
- data/tasks/tag.rake +8 -0
- data/tasks/tar.rake +34 -0
- data/tasks/update.rake +16 -0
- data/tasks/vanagon.rake +35 -0
- data/tasks/vendor_gems.rake +117 -0
- data/tasks/version.rake +33 -0
- data/tasks/z_data_dump.rake +65 -0
- data/templates/README +1 -0
- data/templates/downstream.xml.erb +47 -0
- data/templates/msi.xml.erb +197 -0
- data/templates/packaging.xml.erb +344 -0
- data/templates/repo.xml.erb +114 -0
- metadata +234 -0
@@ -0,0 +1,55 @@
|
|
1
|
+
module Pkg::Repo
|
2
|
+
|
3
|
+
class << self
|
4
|
+
def create_signed_repo_archive(path_to_repo, name_of_archive, versioning)
|
5
|
+
tar = Pkg::Util::Tool.check_tool('tar')
|
6
|
+
Dir.chdir("pkg") do
|
7
|
+
if versioning == 'ref'
|
8
|
+
local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
|
9
|
+
elsif versioning == 'version'
|
10
|
+
local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
|
11
|
+
end
|
12
|
+
Dir.chdir(local_target) do
|
13
|
+
if Pkg::Util::File.empty_dir?(path_to_repo)
|
14
|
+
if ENV['FAIL_ON_MISSING_TARGET'] == "true"
|
15
|
+
raise "ERROR: missing packages under #{path_to_repo}"
|
16
|
+
else
|
17
|
+
warn "Skipping #{name_of_archive} because #{path_to_repo} has no files"
|
18
|
+
end
|
19
|
+
else
|
20
|
+
puts "Archiving #{path_to_repo} as #{name_of_archive}"
|
21
|
+
stdout, _, _ = Pkg::Util::Execution.capture3("#{tar} --owner=0 --group=0 --create --gzip --file #{File.join('repos', "#{name_of_archive}.tar.gz")} #{path_to_repo}")
|
22
|
+
stdout
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def create_all_repo_archives(project, versioning)
|
29
|
+
platforms = Pkg::Config.platform_repos
|
30
|
+
platforms.each do |platform|
|
31
|
+
archive_name = "#{project}-#{platform['name']}"
|
32
|
+
create_signed_repo_archive(platform['repo_location'], archive_name, versioning)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def directories_that_contain_packages(artifact_directory, pkg_ext)
|
37
|
+
cmd = "[ -d #{artifact_directory} ] || exit 1 ; "
|
38
|
+
cmd << "pushd #{artifact_directory} > /dev/null && "
|
39
|
+
cmd << "find . -name '*.#{pkg_ext}' -print0 | xargs --no-run-if-empty -0 -I {} dirname {} "
|
40
|
+
stdout, stderr = Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, cmd, true)
|
41
|
+
return stdout.split
|
42
|
+
rescue => e
|
43
|
+
fail "Could not retrieve directories that contain #{pkg_ext} packages in #{Pkg::Config.distribution_server}:#{artifact_directory}"
|
44
|
+
end
|
45
|
+
|
46
|
+
def populate_repo_directory(artifact_parent_directory)
|
47
|
+
cmd = "[ -d #{artifact_parent_directory}/artifacts ] || exit 1 ; "
|
48
|
+
cmd << "pushd #{artifact_parent_directory} > /dev/null && "
|
49
|
+
cmd << 'rsync --archive --verbose --one-file-system --ignore-existing artifacts/ repos/ '
|
50
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, cmd)
|
51
|
+
rescue => e
|
52
|
+
fail "Could not populate repos directory in #{Pkg::Config.distribution_server}:#{artifact_parent_directory}"
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
module Pkg::Retrieve
|
2
|
+
module_function
|
3
|
+
|
4
|
+
# --no-parent = Only descend when recursing, never ascend
|
5
|
+
# --no-host-directories = Discard http://#{Pkg::Config.builds_server} when saving to disk
|
6
|
+
# --level=0 = infinitely recurse, no limit
|
7
|
+
# --cut-dirs 3 = will cut off #{Pkg::Config.project}, #{Pkg::Config.ref}, and the first directory in #{remote_target} from the url when saving to disk
|
8
|
+
# --directory-prefix = where to save to disk (defaults to ./)
|
9
|
+
# --reject = Reject all hits that match the supplied regex
|
10
|
+
|
11
|
+
def default_wget(local_target, url)
|
12
|
+
wget = Pkg::Util::Tool.check_tool('wget')
|
13
|
+
wget_command = "#{wget} --quiet --recursive --no-parent --no-host-directories --level=0 --cut-dirs 3 --directory-prefix=#{local_target} --reject 'index*' #{url}"
|
14
|
+
puts "Executing #{wget_command} . . ."
|
15
|
+
%x(#{wget_command})
|
16
|
+
end
|
17
|
+
|
18
|
+
# This will always retrieve from under the 'artifacts' directory
|
19
|
+
def foss_only_retrieve(build_url, local_target)
|
20
|
+
unless Pkg::Config.foss_platforms
|
21
|
+
fail "FOSS_ONLY specified, but I don't know anything about FOSS_PLATFORMS. Retrieve cancelled."
|
22
|
+
end
|
23
|
+
default_wget(local_target, "#{build_url}/artifacts/#{Pkg::Config.ref}.yaml")
|
24
|
+
yaml_path = File.join(local_target, "#{Pkg::Config.ref}.yaml")
|
25
|
+
unless File.readable?(yaml_path)
|
26
|
+
fail "Couldn't read #{Pkg::Config.ref}.yaml, which is necessary for FOSS_ONLY. Retrieve cancelled."
|
27
|
+
end
|
28
|
+
platform_data = Pkg::Util::Serialization.load_yaml(yaml_path)[:platform_data]
|
29
|
+
platform_data.each do |platform, paths|
|
30
|
+
default_wget(local_target, "#{build_url}/artifacts/#{paths[:artifact]}") if Pkg::Config.foss_platforms.include?(platform)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def retrieve_all(build_url, rsync_path, remote_target, local_target)
|
35
|
+
if Pkg::Util::Tool.find_tool("wget")
|
36
|
+
default_wget(local_target, "#{build_url}/#{remote_target}/")
|
37
|
+
else
|
38
|
+
warn "Could not find `wget` tool. Falling back to rsyncing from #{Pkg::Config.distribution_server}."
|
39
|
+
begin
|
40
|
+
Pkg::Util::Net.rsync_from("#{rsync_path}/#{remote_target}/", Pkg::Config.distribution_server, "#{local_target}/")
|
41
|
+
rescue => e
|
42
|
+
fail "Couldn't rsync packages from distribution server.\n#{e}"
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,257 @@
|
|
1
|
+
# Utilities for working with rpm repos
|
2
|
+
require 'fileutils'
|
3
|
+
require 'find'
|
4
|
+
|
5
|
+
module Pkg::Rpm::Repo
|
6
|
+
class << self
|
7
|
+
def base_url
|
8
|
+
"http://#{Pkg::Config.builds_server}/#{Pkg::Config.project}/#{Pkg::Config.ref}"
|
9
|
+
end
|
10
|
+
|
11
|
+
def ship_repo_configs(target = "repo_configs")
|
12
|
+
if Pkg::Util::File.empty_dir?("pkg/#{target}/rpm")
|
13
|
+
warn "No repo configs have been generated! Try pl:rpm_repo_configs."
|
14
|
+
return
|
15
|
+
end
|
16
|
+
|
17
|
+
invoke_task("pl:fetch")
|
18
|
+
repo_dir = "#{Pkg::Config.jenkins_repo_path}/#{Pkg::Config.project}/#{Pkg::Config.ref}/#{target}/rpm"
|
19
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "mkdir -p #{repo_dir}")
|
20
|
+
Pkg::Util::Execution.retry_on_fail(:times => 3) do
|
21
|
+
Pkg::Util::Net.rsync_to("pkg/#{target}/rpm/", Pkg::Config.distribution_server, repo_dir)
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
def repo_creation_command(repo_directory, artifact_paths = nil)
|
26
|
+
cmd = "[ -d #{repo_directory} ] || exit 1 ; "
|
27
|
+
cmd << "pushd #{repo_directory} > /dev/null && "
|
28
|
+
cmd << 'echo "Checking for running repo creation. Will wait if detected." && '
|
29
|
+
cmd << 'while [ -f .lock ] ; do sleep 1 ; echo -n "." ; done && '
|
30
|
+
cmd << 'echo "Setting lock" && '
|
31
|
+
cmd << 'touch .lock && '
|
32
|
+
cmd << 'createrepo=$(which createrepo) ; '
|
33
|
+
|
34
|
+
# Added for compatibility.
|
35
|
+
# The nightly repo ships operate differently and do not want to be calculating
|
36
|
+
# the correct paths based on which packages are available on the distribution
|
37
|
+
# host, we just want to be `createrepo`ing for what we've staged locally
|
38
|
+
#
|
39
|
+
# We should only assume repo_directory exists locally if we didn't pass
|
40
|
+
# artifact paths
|
41
|
+
if artifact_paths.nil?
|
42
|
+
# Since the command already has a `pushd #{repo_directory}` let's make sure
|
43
|
+
# we're calculating artifact paths relative to that.
|
44
|
+
Dir.chdir repo_directory do
|
45
|
+
artifact_paths = Dir.glob('**/*.rpm').map { |package| File.dirname(package) }
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
artifact_paths.each do |path|
|
50
|
+
next if path.include? 'aix'
|
51
|
+
cmd << "if [ -d #{path} ]; then "
|
52
|
+
cmd << "pushd #{path} && "
|
53
|
+
cmd << '$createrepo --checksum=sha --checkts --update --delta-workers=0 --database . && '
|
54
|
+
cmd << 'popd ; '
|
55
|
+
cmd << 'fi ;'
|
56
|
+
end
|
57
|
+
cmd
|
58
|
+
end
|
59
|
+
|
60
|
+
# @deprecated this command will die a painful death when we are
|
61
|
+
# able to sit down with Operations and refactor our distribution infra.
|
62
|
+
# At a minimum, it should be refactored alongside its Debian counterpart
|
63
|
+
# into something modestly more generic.
|
64
|
+
# - Ryan McKern 11/2015
|
65
|
+
#
|
66
|
+
# @param origin_path [String] path for RPM repos on local filesystem
|
67
|
+
# @param destination_path [String] path for RPM repos on remote filesystem
|
68
|
+
# @param destination [String] remote host to send rsynced content to. If
|
69
|
+
# nil will copy locally
|
70
|
+
# @param dryrun [Boolean] whether or not to use '--dry-run'
|
71
|
+
#
|
72
|
+
# @return [String] an rsync command that can be executed on a remote host
|
73
|
+
# to copy local content from that host to a remote node.
|
74
|
+
def repo_deployment_command(origin_path, destination_path, destination, dryrun = false)
|
75
|
+
path = Pathname.new(origin_path)
|
76
|
+
dest_path = Pathname.new(destination_path)
|
77
|
+
|
78
|
+
# You may think "rsync doesn't actually remove the sticky bit, let's
|
79
|
+
# remove the Dugo-s from the chmod". However, that will make your rsyncs
|
80
|
+
# fail due to permission errors.
|
81
|
+
options = %w(
|
82
|
+
rsync
|
83
|
+
--recursive
|
84
|
+
--links
|
85
|
+
--hard-links
|
86
|
+
--update
|
87
|
+
--human-readable
|
88
|
+
--itemize-changes
|
89
|
+
--progress
|
90
|
+
--verbose
|
91
|
+
--super
|
92
|
+
--delay-updates
|
93
|
+
--omit-dir-times
|
94
|
+
--no-perms
|
95
|
+
--no-owner
|
96
|
+
--no-group
|
97
|
+
)
|
98
|
+
|
99
|
+
options << '--dry-run' if dryrun
|
100
|
+
options << path
|
101
|
+
|
102
|
+
if destination
|
103
|
+
options << "#{destination}:#{dest_path.parent}"
|
104
|
+
else
|
105
|
+
options << "#{dest_path.parent}"
|
106
|
+
end
|
107
|
+
|
108
|
+
options.join("\s")
|
109
|
+
end
|
110
|
+
|
111
|
+
def sign_repos(directory)
|
112
|
+
files_to_sign = Find.find(directory).select { |file| file.match(/repomd.xml$/) }
|
113
|
+
files_to_sign.each do |file|
|
114
|
+
Pkg::Util::Gpg.sign_file(file)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def retrieve_repo_configs(target = "repo_configs")
|
119
|
+
wget = Pkg::Util::Tool.check_tool("wget")
|
120
|
+
FileUtils.mkdir_p("pkg/#{target}")
|
121
|
+
config_url = "#{base_url}/#{target}/rpm/"
|
122
|
+
begin
|
123
|
+
stdout, _, _ = Pkg::Util::Execution.capture3("#{wget} -r -np -nH --cut-dirs 3 -P pkg/#{target} --reject 'index*' #{config_url}")
|
124
|
+
stdout
|
125
|
+
rescue => e
|
126
|
+
fail "Couldn't retrieve rpm yum repo configs.\n#{e}"
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
# Generate yum configuration files that point to the repositories created
|
131
|
+
# on the distribution server with packages created from the current source
|
132
|
+
# repo commit. There is one for each dist/version that is packaged (e.g.
|
133
|
+
# el5, el6, etc). Files are created in pkg/repo_configs/rpm and are named
|
134
|
+
# pl-$project-$sha.conf, and can be placed in /etc/yum.repos.d to enable
|
135
|
+
# clients to install these packages.
|
136
|
+
#
|
137
|
+
def generate_repo_configs(source = "repos", target = "repo_configs", signed = false)
|
138
|
+
# We have a hard requirement on wget because of all the download magicks
|
139
|
+
# we have to do
|
140
|
+
#
|
141
|
+
wget = Pkg::Util::Tool.check_tool("wget")
|
142
|
+
|
143
|
+
# This is the standard path to all build artifacts on the distribution
|
144
|
+
# server for this commit
|
145
|
+
#
|
146
|
+
repo_base = "#{base_url}/#{source}/"
|
147
|
+
|
148
|
+
# First check if the artifacts directory exists
|
149
|
+
#
|
150
|
+
|
151
|
+
# We have to do two checks here - first that there are directories with
|
152
|
+
# repodata folders in them, and second that those same directories also
|
153
|
+
# contain rpms
|
154
|
+
#
|
155
|
+
stdout, _, _ = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 5 --no-parent #{repo_base} 2>&1")
|
156
|
+
stdout = stdout.split.uniq.reject { |x| x =~ /\?|index/ }.select { |x| x =~ /http:.*repodata\/$/ }
|
157
|
+
|
158
|
+
# RPMs will always exist at the same directory level as the repodata
|
159
|
+
# folder, which means if we go up a level we should find rpms
|
160
|
+
#
|
161
|
+
yum_repos = []
|
162
|
+
stdout.map { |x| x.chomp('repodata/') }.each do |url|
|
163
|
+
output, _, _ = Pkg::Util::Execution.capture3("#{wget} --spider -r -l 1 --no-parent #{url} 2>&1")
|
164
|
+
unless output.split.uniq.reject { |x| x =~ /\?|index/ }.select { |x| x =~ /http:.*\.rpm$/ }.empty?
|
165
|
+
yum_repos << url
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
if yum_repos.empty?
|
170
|
+
warn "No rpm repos were found to generate configs from!"
|
171
|
+
return
|
172
|
+
end
|
173
|
+
|
174
|
+
FileUtils.mkdir_p(File.join("pkg", target, "rpm"))
|
175
|
+
|
176
|
+
# Parse the rpm configs file to generate repository configs. Each line in
|
177
|
+
# the rpm_configs file corresponds with a repo directory on the
|
178
|
+
# distribution server.
|
179
|
+
#
|
180
|
+
yum_repos.each do |url|
|
181
|
+
# We ship a base 'srpm' that gets turned into a repo, but we want to
|
182
|
+
# ignore this one because its an extra
|
183
|
+
next if url == "#{repo_base}srpm/"
|
184
|
+
|
185
|
+
platform_tag = Pkg::Paths.tag_from_artifact_path(url)
|
186
|
+
platform, version, arch = Pkg::Platforms.parse_platform_tag(platform_tag)
|
187
|
+
|
188
|
+
# Create an array of lines that will become our yum config
|
189
|
+
#
|
190
|
+
config = ["[pl-#{Pkg::Config.project}-#{Pkg::Config.ref}]"]
|
191
|
+
config << ["name=PL Repo for #{Pkg::Config.project} at commit #{Pkg::Config.ref}"]
|
192
|
+
config << ["baseurl=#{url}"]
|
193
|
+
config << ["enabled=1"]
|
194
|
+
if signed
|
195
|
+
config << ["gpgcheck=1"]
|
196
|
+
config << ["gpgkey=http://#{Pkg::Config.builds_server}/#{Pkg::Util::Gpg.key}"]
|
197
|
+
else
|
198
|
+
config << ["gpgcheck=0"]
|
199
|
+
end
|
200
|
+
|
201
|
+
# Write the new config to a file under our repo configs dir
|
202
|
+
#
|
203
|
+
config_file = File.join("pkg", target, "rpm", "pl-#{Pkg::Config.project}-#{Pkg::Config.ref}-#{platform}-#{version}-#{arch}.repo")
|
204
|
+
File.open(config_file, 'w') { |f| f.puts config }
|
205
|
+
end
|
206
|
+
puts "Wrote yum configuration files for #{Pkg::Config.project} at #{Pkg::Config.ref} to pkg/#{target}/rpm"
|
207
|
+
end
|
208
|
+
|
209
|
+
def create_local_repos(directory = "repos")
|
210
|
+
stdout, _, _ = Pkg::Util::Execution.capture3("bash -c '#{repo_creation_command(directory)}'")
|
211
|
+
stdout
|
212
|
+
end
|
213
|
+
|
214
|
+
def create_remote_repos(directory = 'repos')
|
215
|
+
artifact_directory = File.join(Pkg::Config.jenkins_repo_path, Pkg::Config.project, Pkg::Config.ref)
|
216
|
+
artifact_paths = Pkg::Repo.directories_that_contain_packages(File.join(artifact_directory, 'artifacts'), 'rpm')
|
217
|
+
Pkg::Repo.populate_repo_directory(artifact_directory)
|
218
|
+
command = Pkg::Rpm::Repo.repo_creation_command(File.join(artifact_directory, directory), artifact_paths)
|
219
|
+
|
220
|
+
begin
|
221
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, command)
|
222
|
+
# Now that we've created our package repositories, we can generate repo
|
223
|
+
# configurations for use with downstream jobs, acceptance clients, etc.
|
224
|
+
Pkg::Rpm::Repo.generate_repo_configs
|
225
|
+
|
226
|
+
# Now that we've created the repo configs, we can ship them
|
227
|
+
Pkg::Rpm::Repo.ship_repo_configs
|
228
|
+
ensure
|
229
|
+
# Always remove the lock file, even if we've failed
|
230
|
+
Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, "rm -f #{artifact_directory}/repos/.lock")
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
def create_repos_from_artifacts(directory = "repos")
|
235
|
+
Pkg::Util.deprecate('Pkg::Rpm::Repo.create_repos_from_artifacts', 'Pkg::Rpm::Repo.create_remote_repos')
|
236
|
+
create_remote_repos(directory)
|
237
|
+
end
|
238
|
+
|
239
|
+
def create_repos(directory = "repos")
|
240
|
+
Pkg::Util.deprecate('Pkg::Rpm::Repo.create_repos', 'Pkg::Rpm::Repo.create_local_repos')
|
241
|
+
create_local_repos(directory)
|
242
|
+
end
|
243
|
+
|
244
|
+
# @deprecated this command is exactly as awful as you think it is.
|
245
|
+
# -- Ryan McKern 12/2015
|
246
|
+
#
|
247
|
+
# @param yum_path [String] path for rpm repos on local and remote filesystem
|
248
|
+
# @param origin_server [String] remote host to start the rsync from
|
249
|
+
# @param destination_server [String] remote host to send rsynced content to
|
250
|
+
# @param dryrun [Boolean] whether or not to use '--dry-run'
|
251
|
+
def deploy_repos(yum_path, origin_server, destination_server, dryrun = false)
|
252
|
+
rsync_command = repo_deployment_command(yum_path, yum_path, destination_server, dryrun)
|
253
|
+
|
254
|
+
Pkg::Util::Net.remote_ssh_cmd(origin_server, rsync_command)
|
255
|
+
end
|
256
|
+
end
|
257
|
+
end
|
@@ -0,0 +1,154 @@
|
|
1
|
+
module Pkg
|
2
|
+
class Tar
|
3
|
+
require 'fileutils'
|
4
|
+
require 'pathname'
|
5
|
+
include FileUtils
|
6
|
+
|
7
|
+
attr_accessor :files, :project, :version, :excludes, :target, :templates
|
8
|
+
attr_reader :tar
|
9
|
+
|
10
|
+
def initialize
|
11
|
+
@tar = Pkg::Util::Tool.find_tool('tar', :required => true)
|
12
|
+
@project = Pkg::Config.project
|
13
|
+
@version = Pkg::Config.version
|
14
|
+
@files = Pkg::Config.files
|
15
|
+
@target = File.join(Pkg::Config.project_root, "pkg", "#{@project}-#{@version}.tar.gz")
|
16
|
+
|
17
|
+
# We require that the excludes list be a string (which is space
|
18
|
+
# separated, we hope)(deprecated) or an array.
|
19
|
+
#
|
20
|
+
if Pkg::Config.tar_excludes
|
21
|
+
if Pkg::Config.tar_excludes.is_a?(String)
|
22
|
+
warn "warning: `tar_excludes` should be an array, not a string"
|
23
|
+
@excludes = Pkg::Config.tar_excludes.split(' ')
|
24
|
+
elsif Pkg::Config.tar_excludes.is_a?(Array)
|
25
|
+
@excludes = Pkg::Config.tar_excludes
|
26
|
+
else
|
27
|
+
fail "Tarball excludes must either be an array or a string, not #{@excludes.class}"
|
28
|
+
end
|
29
|
+
else
|
30
|
+
@excludes = []
|
31
|
+
end
|
32
|
+
|
33
|
+
# On the other hand, support for explicit templates started with Arrays,
|
34
|
+
# so that's all we support.
|
35
|
+
#
|
36
|
+
if Pkg::Config.templates
|
37
|
+
@templates = Pkg::Config.templates.dup
|
38
|
+
fail "templates must be an array" unless @templates.is_a?(Array)
|
39
|
+
expand_templates
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
def install_files_to(workdir)
|
44
|
+
# It is nice to use arrays in YAML to represent array content, but we used
|
45
|
+
# to support a mode where a space-separated string was used. Support both
|
46
|
+
# to allow a gentle migration to a modern style...
|
47
|
+
patterns =
|
48
|
+
case @files
|
49
|
+
when String
|
50
|
+
$stderr.puts "warning: `files` should be an array, not a string"
|
51
|
+
@files.split(' ')
|
52
|
+
when Array
|
53
|
+
@files
|
54
|
+
else
|
55
|
+
raise "`files` must be a string or an array!"
|
56
|
+
end
|
57
|
+
|
58
|
+
Pkg::Util::File.install_files_into_dir(patterns, workdir)
|
59
|
+
end
|
60
|
+
|
61
|
+
# The templates of a project can include globs, which may expand to an
|
62
|
+
# arbitrary number of files. This method expands all of the templates using
|
63
|
+
# Dir.glob and then filters out any templates that live in the packaging
|
64
|
+
# tools themselves. If the template is a source/target combination, it is
|
65
|
+
# returned to the array untouched.
|
66
|
+
def expand_templates
|
67
|
+
@templates.map! do |tempfile|
|
68
|
+
if tempfile.is_a?(String)
|
69
|
+
# Expand possible globs to all matching entries
|
70
|
+
Dir.glob(File.join(Pkg::Config::project_root, tempfile))
|
71
|
+
elsif tempfile.is_a?(Hash)
|
72
|
+
tempfile
|
73
|
+
end
|
74
|
+
end
|
75
|
+
@templates.flatten!
|
76
|
+
|
77
|
+
# Reject matches that are templates from packaging itself. These will contain the packaging root.
|
78
|
+
# These tend to come from the current tar.rake implementation.
|
79
|
+
@templates.reject! { |temp| temp.is_a?(String) && temp.match(/#{Pkg::Config::packaging_root}/) }
|
80
|
+
end
|
81
|
+
|
82
|
+
# Given the tar object's template files (assumed to be in Pkg::Config.project_root), transform
|
83
|
+
# them, removing the originals. If workdir is passed, assume Pkg::Config.project_root
|
84
|
+
# exists in workdir
|
85
|
+
def template(workdir = nil)
|
86
|
+
workdir ||= Pkg::Config.project_root
|
87
|
+
root = Pathname.new(Pkg::Config.project_root)
|
88
|
+
|
89
|
+
# Templates can be either a string or a hash of source and target. If it
|
90
|
+
# is a string, the target is assumed to be the same path as the
|
91
|
+
# source,with the extension removed. If it is a hash, we assume nothing
|
92
|
+
# and use the provided source and target.
|
93
|
+
@templates.each do |cur_template|
|
94
|
+
if cur_template.is_a?(String)
|
95
|
+
template_file = File.expand_path(cur_template)
|
96
|
+
target_file = template_file.sub(File.extname(template_file), "")
|
97
|
+
elsif cur_template.is_a?(Hash)
|
98
|
+
template_file = File.expand_path(cur_template["source"])
|
99
|
+
target_file = File.expand_path(cur_template["target"])
|
100
|
+
end
|
101
|
+
|
102
|
+
# We construct paths to the erb template and its proposed target file
|
103
|
+
# relative to the project root, *not* fully qualified. This allows us
|
104
|
+
# to, given a temporary workdir containing a copy of the project,
|
105
|
+
# construct the full path to the erb and target file inside the
|
106
|
+
# temporary workdir.
|
107
|
+
#
|
108
|
+
rel_path_to_template = Pathname.new(template_file).relative_path_from(root).to_s
|
109
|
+
rel_path_to_target = Pathname.new(target_file).relative_path_from(root).to_s
|
110
|
+
|
111
|
+
# What we pass to Pkg::util::File.erb_file are the paths to the erb
|
112
|
+
# and target inside of a temporary project directory. We are, in
|
113
|
+
# essence, templating "in place." This is why we remove the original
|
114
|
+
# files - they're not the originals in the authoritative project
|
115
|
+
# directory, but the originals in the temporary working copy.
|
116
|
+
if File.exist?(File.join(workdir, rel_path_to_template))
|
117
|
+
mkpath(File.dirname(File.join(workdir, rel_path_to_target)), :verbose => false)
|
118
|
+
Pkg::Util::File.erb_file(File.join(workdir, rel_path_to_template), File.join(workdir, rel_path_to_target), true, :binding => Pkg::Config.get_binding)
|
119
|
+
elsif File.exist?(File.join(root, rel_path_to_template))
|
120
|
+
mkpath(File.dirname(File.join(workdir, rel_path_to_target)), :verbose => false)
|
121
|
+
Pkg::Util::File.erb_file(File.join(root, rel_path_to_template), File.join(workdir, rel_path_to_target), false, :binding => Pkg::Config.get_binding)
|
122
|
+
else
|
123
|
+
fail "Expected to find #{template_file} in #{root} for templating. But it was not there. Maybe you deleted it?"
|
124
|
+
end
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def tar(target, source)
|
129
|
+
mkpath File.dirname(target)
|
130
|
+
cd File.dirname(source) do
|
131
|
+
%x(#{@tar} #{@excludes.map { |x| (" --exclude #{x} ") }.join if @excludes} -zcf '#{File.basename(target)}' '#{File.basename(source)}')
|
132
|
+
unless $?.success?
|
133
|
+
fail "Failed to create .tar.gz archive with #{@tar}. Please ensure the tar command in your path accepts the flags '-c', '-z', and '-f'"
|
134
|
+
end
|
135
|
+
mv File.basename(target), target
|
136
|
+
end
|
137
|
+
end
|
138
|
+
|
139
|
+
def clean_up(workdir)
|
140
|
+
rm_rf workdir
|
141
|
+
end
|
142
|
+
|
143
|
+
def pkg!
|
144
|
+
workdir = File.join(Pkg::Util::File.mktemp, "#{@project}-#{@version}")
|
145
|
+
mkpath workdir
|
146
|
+
self.install_files_to workdir
|
147
|
+
self.template(workdir)
|
148
|
+
self.tar(@target, workdir)
|
149
|
+
self.clean_up workdir
|
150
|
+
end
|
151
|
+
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|