packaging 0.88.77
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +17 -0
- data/README-Solaris.md +117 -0
- data/README.md +977 -0
- data/lib/packaging.rb +32 -0
- data/lib/packaging/archive.rb +126 -0
- data/lib/packaging/artifactory.rb +651 -0
- data/lib/packaging/artifactory/extensions.rb +94 -0
- data/lib/packaging/config.rb +492 -0
- data/lib/packaging/config/params.rb +387 -0
- data/lib/packaging/config/validations.rb +13 -0
- data/lib/packaging/deb.rb +28 -0
- data/lib/packaging/deb/repo.rb +264 -0
- data/lib/packaging/gem.rb +70 -0
- data/lib/packaging/metrics.rb +15 -0
- data/lib/packaging/nuget.rb +39 -0
- data/lib/packaging/paths.rb +376 -0
- data/lib/packaging/platforms.rb +507 -0
- data/lib/packaging/repo.rb +155 -0
- data/lib/packaging/retrieve.rb +75 -0
- data/lib/packaging/rpm.rb +5 -0
- data/lib/packaging/rpm/repo.rb +254 -0
- data/lib/packaging/sign.rb +8 -0
- data/lib/packaging/sign/deb.rb +9 -0
- data/lib/packaging/sign/dmg.rb +41 -0
- data/lib/packaging/sign/ips.rb +57 -0
- data/lib/packaging/sign/msi.rb +124 -0
- data/lib/packaging/sign/rpm.rb +115 -0
- data/lib/packaging/tar.rb +163 -0
- data/lib/packaging/util.rb +146 -0
- data/lib/packaging/util/date.rb +20 -0
- data/lib/packaging/util/execution.rb +85 -0
- data/lib/packaging/util/file.rb +125 -0
- data/lib/packaging/util/git.rb +174 -0
- data/lib/packaging/util/git_tags.rb +73 -0
- data/lib/packaging/util/gpg.rb +66 -0
- data/lib/packaging/util/jenkins.rb +95 -0
- data/lib/packaging/util/misc.rb +69 -0
- data/lib/packaging/util/net.rb +410 -0
- data/lib/packaging/util/os.rb +17 -0
- data/lib/packaging/util/platform.rb +40 -0
- data/lib/packaging/util/rake_utils.rb +112 -0
- data/lib/packaging/util/serialization.rb +19 -0
- data/lib/packaging/util/ship.rb +300 -0
- data/lib/packaging/util/tool.rb +41 -0
- data/lib/packaging/util/version.rb +334 -0
- data/spec/fixtures/config/ext/build_defaults.yaml +2 -0
- data/spec/fixtures/config/ext/project_data.yaml +2 -0
- data/spec/fixtures/configs/components/test_file.json +1 -0
- data/spec/fixtures/configs/components/test_file_2.json +0 -0
- data/spec/fixtures/configs/components/test_file_not_tagged.json +1 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.txt +0 -0
- data/spec/fixtures/configs/components/test_file_wrong_ext.wrong +0 -0
- data/spec/fixtures/util/pre_tasks.yaml +4 -0
- data/spec/lib/packaging/artifactory_spec.rb +221 -0
- data/spec/lib/packaging/config_spec.rb +576 -0
- data/spec/lib/packaging/deb/repo_spec.rb +157 -0
- data/spec/lib/packaging/deb_spec.rb +52 -0
- data/spec/lib/packaging/gem_spec.rb +86 -0
- data/spec/lib/packaging/paths_spec.rb +418 -0
- data/spec/lib/packaging/platforms_spec.rb +178 -0
- data/spec/lib/packaging/repo_spec.rb +135 -0
- data/spec/lib/packaging/retrieve_spec.rb +100 -0
- data/spec/lib/packaging/rpm/repo_spec.rb +133 -0
- data/spec/lib/packaging/sign_spec.rb +133 -0
- data/spec/lib/packaging/tar_spec.rb +116 -0
- data/spec/lib/packaging/util/execution_spec.rb +56 -0
- data/spec/lib/packaging/util/file_spec.rb +139 -0
- data/spec/lib/packaging/util/git_spec.rb +160 -0
- data/spec/lib/packaging/util/git_tag_spec.rb +36 -0
- data/spec/lib/packaging/util/gpg_spec.rb +64 -0
- data/spec/lib/packaging/util/jenkins_spec.rb +112 -0
- data/spec/lib/packaging/util/misc_spec.rb +31 -0
- data/spec/lib/packaging/util/net_spec.rb +259 -0
- data/spec/lib/packaging/util/os_spec.rb +31 -0
- data/spec/lib/packaging/util/rake_utils_spec.rb +70 -0
- data/spec/lib/packaging/util/ship_spec.rb +199 -0
- data/spec/lib/packaging/util/version_spec.rb +123 -0
- data/spec/lib/packaging_spec.rb +19 -0
- data/spec/spec_helper.rb +22 -0
- data/static_artifacts/PackageInfo.plist +3 -0
- data/tasks/00_utils.rake +214 -0
- data/tasks/30_metrics.rake +33 -0
- data/tasks/apple.rake +268 -0
- data/tasks/archive.rake +69 -0
- data/tasks/build.rake +12 -0
- data/tasks/clean.rake +5 -0
- data/tasks/config.rake +35 -0
- data/tasks/deb.rake +129 -0
- data/tasks/deb_repos.rake +28 -0
- data/tasks/deprecated.rake +130 -0
- data/tasks/doc.rake +20 -0
- data/tasks/education.rake +57 -0
- data/tasks/fetch.rake +60 -0
- data/tasks/gem.rake +159 -0
- data/tasks/jenkins.rake +538 -0
- data/tasks/jenkins_dynamic.rake +202 -0
- data/tasks/load_extras.rake +21 -0
- data/tasks/mock.rake +348 -0
- data/tasks/nightly_repos.rake +286 -0
- data/tasks/pe_deb.rake +12 -0
- data/tasks/pe_rpm.rake +13 -0
- data/tasks/pe_ship.rake +226 -0
- data/tasks/pe_sign.rake +13 -0
- data/tasks/pe_tar.rake +5 -0
- data/tasks/retrieve.rake +52 -0
- data/tasks/rpm.rake +66 -0
- data/tasks/rpm_repos.rake +29 -0
- data/tasks/ship.rake +692 -0
- data/tasks/sign.rake +154 -0
- data/tasks/tag.rake +8 -0
- data/tasks/tar.rake +28 -0
- data/tasks/update.rake +16 -0
- data/tasks/vanagon.rake +35 -0
- data/tasks/vendor_gems.rake +117 -0
- data/tasks/version.rake +33 -0
- data/tasks/z_data_dump.rake +65 -0
- data/templates/README +1 -0
- data/templates/downstream.xml.erb +47 -0
- data/templates/msi.xml.erb +197 -0
- data/templates/packaging.xml.erb +346 -0
- data/templates/repo.xml.erb +117 -0
- metadata +287 -0
data/lib/packaging.rb
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
module Pkg
|
|
2
|
+
|
|
3
|
+
LIBDIR = File.expand_path(File.dirname(__FILE__))
|
|
4
|
+
|
|
5
|
+
$:.unshift(LIBDIR) unless
|
|
6
|
+
$:.include?(File.dirname(__FILE__)) || $:.include?(LIBDIR)
|
|
7
|
+
|
|
8
|
+
require 'packaging/platforms'
|
|
9
|
+
require 'packaging/util'
|
|
10
|
+
require 'packaging/config'
|
|
11
|
+
require 'packaging/paths'
|
|
12
|
+
require 'packaging/tar'
|
|
13
|
+
require 'packaging/deb'
|
|
14
|
+
require 'packaging/rpm'
|
|
15
|
+
require 'packaging/nuget'
|
|
16
|
+
require 'packaging/gem'
|
|
17
|
+
require 'packaging/repo'
|
|
18
|
+
require 'packaging/artifactory'
|
|
19
|
+
require 'packaging/retrieve'
|
|
20
|
+
require 'packaging/sign'
|
|
21
|
+
require 'packaging/archive'
|
|
22
|
+
require 'packaging/metrics'
|
|
23
|
+
|
|
24
|
+
# Load configuration defaults
|
|
25
|
+
Pkg::Config.load_defaults
|
|
26
|
+
Pkg::Config.load_default_configs
|
|
27
|
+
Pkg::Config.load_versioning
|
|
28
|
+
Pkg::Config.load_overrides
|
|
29
|
+
Pkg::Config.load_envvars
|
|
30
|
+
Pkg::Config.issue_reassignments
|
|
31
|
+
Pkg::Config.issue_deprecations
|
|
32
|
+
end
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
module Pkg::Archive
|
|
2
|
+
module_function
|
|
3
|
+
|
|
4
|
+
# Array of base paths for foss artifacts on weth
|
|
5
|
+
def base_paths
|
|
6
|
+
[Pkg::Config.yum_repo_path, Pkg::Config.apt_repo_staging_path, Pkg::Config.apt_repo_path, '/opt/downloads'].compact.freeze
|
|
7
|
+
end
|
|
8
|
+
|
|
9
|
+
# Array of paths for temporarily staging artifacts before syncing to release-archives on s3
|
|
10
|
+
def archive_paths
|
|
11
|
+
[Pkg::Config.yum_archive_path, Pkg::Config.apt_archive_path, Pkg::Config.freight_archive_path, Pkg::Config.downloads_archive_path, '/opt/tmp-apt'].compact.freeze
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
# Move yum directories from repo path to archive staging path
|
|
15
|
+
def stage_yum_archives(directory)
|
|
16
|
+
# /opt/repository/yum/#{directory}
|
|
17
|
+
full_directory = File.join(Pkg::Config.yum_repo_path, directory)
|
|
18
|
+
archive_path = File.join(Pkg::Config.yum_archive_path, directory)
|
|
19
|
+
command = <<-CMD
|
|
20
|
+
if [ ! -d #{full_directory} ]; then
|
|
21
|
+
if [ -d #{archive_path} ]; then
|
|
22
|
+
echo "Directory #{full_directory} has already been staged, skipping . . ."
|
|
23
|
+
exit 0
|
|
24
|
+
else
|
|
25
|
+
echo "ERROR: Couldn't find directory #{full_directory}, exiting . . ."
|
|
26
|
+
exit 1
|
|
27
|
+
fi
|
|
28
|
+
fi
|
|
29
|
+
find #{full_directory} -type l -delete
|
|
30
|
+
sudo chattr -i -R #{full_directory}
|
|
31
|
+
sudo mkdir --parents #{File.dirname(archive_path)}
|
|
32
|
+
sudo chown root:release -R #{Pkg::Config.yum_archive_path}
|
|
33
|
+
sudo chmod g+w -R #{Pkg::Config.yum_archive_path}
|
|
34
|
+
mv #{full_directory} #{archive_path}
|
|
35
|
+
CMD
|
|
36
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
# Move directories from freight path (aka repo staging path) to archive staging paths
|
|
40
|
+
def stage_apt_archives(directory)
|
|
41
|
+
find_command = "find #{Pkg::Config.apt_repo_staging_path} -type d -name #{directory}"
|
|
42
|
+
find_command = "find #{Pkg::Config.apt_repo_staging_path} -maxdepth 2 -type f" if directory == 'main'
|
|
43
|
+
command = <<-CMD
|
|
44
|
+
for stuff in $(#{find_command}); do
|
|
45
|
+
find $stuff -type l -delete
|
|
46
|
+
codename=$(dirname ${stuff##{Pkg::Config.apt_repo_staging_path}/})
|
|
47
|
+
sudo mkdir --parents #{Pkg::Config.freight_archive_path}/$codename
|
|
48
|
+
sudo chown root:release -R #{Pkg::Config.freight_archive_path}/$codename
|
|
49
|
+
sudo chmod g+w -R #{Pkg::Config.freight_archive_path}/$codename
|
|
50
|
+
mv $stuff #{Pkg::Config.freight_archive_path}/$codename
|
|
51
|
+
|
|
52
|
+
pool_directory=#{Pkg::Config.apt_repo_path}/pool/$codename/#{directory}
|
|
53
|
+
if [ ! -d $pool_directory ]; then
|
|
54
|
+
echo "Can't find directory $pool_directory, it may have already been archived, skipping . . ."
|
|
55
|
+
continue
|
|
56
|
+
fi
|
|
57
|
+
sudo mkdir --parents /opt/tmp-apt
|
|
58
|
+
sudo chown root:release -R /opt/tmp-apt
|
|
59
|
+
sudo chmod g+w -R /opt/tmp-apt
|
|
60
|
+
mv $pool_directory /opt/tmp-apt
|
|
61
|
+
done
|
|
62
|
+
CMD
|
|
63
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# Move downloads directories to archive staging path
|
|
67
|
+
def stage_downloads_archives(directory)
|
|
68
|
+
# /opt/downloads/#{directory}
|
|
69
|
+
full_directory = File.join('/', 'opt', 'downloads', directory)
|
|
70
|
+
archive_path = File.join(Pkg::Config.downloads_archive_path, directory)
|
|
71
|
+
command = <<-CMD
|
|
72
|
+
if [ ! -d #{full_directory} ]; then
|
|
73
|
+
if [ -d #{archive_path} ]; then
|
|
74
|
+
echo "Directory #{full_directory} has already been staged, skipping . . ."
|
|
75
|
+
exit 0
|
|
76
|
+
else
|
|
77
|
+
echo "ERROR: Couldn't find directory #{full_directory}, exiting . . ."
|
|
78
|
+
exit 1
|
|
79
|
+
fi
|
|
80
|
+
fi
|
|
81
|
+
find #{full_directory} -type l -delete
|
|
82
|
+
sudo chattr -i -R #{full_directory}
|
|
83
|
+
sudo mkdir --parents #{File.dirname(archive_path)}
|
|
84
|
+
sudo chown root:release -R #{Pkg::Config.downloads_archive_path}
|
|
85
|
+
sudo chmod g+w -R #{Pkg::Config.downloads_archive_path}
|
|
86
|
+
mv #{full_directory} #{archive_path}
|
|
87
|
+
CMD
|
|
88
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Delete empty directories from repo paths on weth
|
|
92
|
+
def remove_empty_directories
|
|
93
|
+
base_paths.each do |path|
|
|
94
|
+
command = <<-CMD
|
|
95
|
+
for directory in $(find #{path} -type d); do
|
|
96
|
+
if [ ! -d $directory ]; then
|
|
97
|
+
echo "Can't find directory $directory, it was probably already deleted, skipping . . ."
|
|
98
|
+
continue
|
|
99
|
+
fi
|
|
100
|
+
files=$(find $directory -type f)
|
|
101
|
+
if [ -z "$files" ]; then
|
|
102
|
+
echo "No files in directory $directory, deleting . . ."
|
|
103
|
+
sudo rm -rf $directory
|
|
104
|
+
fi
|
|
105
|
+
done
|
|
106
|
+
CMD
|
|
107
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
108
|
+
end
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
# Delete broken symlinks from repo paths on weth
|
|
112
|
+
def remove_dead_symlinks
|
|
113
|
+
base_paths.each do |path|
|
|
114
|
+
command = "find #{path} -xtype l -delete"
|
|
115
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
116
|
+
end
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
# Delete artifacts from archive staging paths (after they've been synced to s3)
|
|
120
|
+
def delete_staged_archives
|
|
121
|
+
archive_paths.each do |archive_path|
|
|
122
|
+
command = "sudo rm -rf #{File.join(archive_path, '*')}"
|
|
123
|
+
Pkg::Util::Net.remote_execute(Pkg::Config.staging_server, command)
|
|
124
|
+
end
|
|
125
|
+
end
|
|
126
|
+
end
|
|
@@ -0,0 +1,651 @@
|
|
|
1
|
+
require 'artifactory'
|
|
2
|
+
require 'uri'
|
|
3
|
+
require 'open-uri'
|
|
4
|
+
require 'digest'
|
|
5
|
+
require 'packaging/artifactory/extensions'
|
|
6
|
+
|
|
7
|
+
module Pkg
|
|
8
|
+
|
|
9
|
+
# The Artifactory class
|
|
10
|
+
# This class provides automation to access the artifactory repos maintained
|
|
11
|
+
# by the Release Engineering team at Puppet. It has the ability to both push
|
|
12
|
+
# artifacts to the repos, and to retrieve them back from the repos.
|
|
13
|
+
class ManageArtifactory
|
|
14
|
+
|
|
15
|
+
# The Artifactory property that the artifactCleanup user plugin
|
|
16
|
+
# {https://github.com/jfrog/artifactory-user-plugins/tree/master/cleanup/artifactCleanup}
|
|
17
|
+
# uses to tell it to not clean a particular artifact
|
|
18
|
+
ARTIFACTORY_CLEANUP_SKIP_PROPERTY = 'cleanup.skip'
|
|
19
|
+
|
|
20
|
+
DEFAULT_REPO_TYPE = 'generic'
|
|
21
|
+
DEFAULT_REPO_BASE = 'development'
|
|
22
|
+
|
|
23
|
+
# @param project [String] The name of the project this package is for
|
|
24
|
+
# @param project_version [String] The version of the project we want the
|
|
25
|
+
# package for. This can be one of three things:
|
|
26
|
+
# 1) the final tag of the project the packages were built from
|
|
27
|
+
# 2) the long git sha the project the packages were built from
|
|
28
|
+
# 3) the EZBake generated development sha where the packages live
|
|
29
|
+
# @option :artifactory_uri [String] the uri for the artifactory server.
|
|
30
|
+
# This currently defaults to 'https://artifactory.delivery.puppetlabs.net/artifactory'
|
|
31
|
+
# @option :repo_base [String] The base of all repos, set for consistency.
|
|
32
|
+
# This currently defaults to 'development'
|
|
33
|
+
def initialize(project, project_version, opts = {})
|
|
34
|
+
@artifactory_uri = opts[:artifactory_uri] || 'https://artifactory.delivery.puppetlabs.net/artifactory'
|
|
35
|
+
@repo_base = opts[:repo_base] || DEFAULT_REPO_BASE
|
|
36
|
+
|
|
37
|
+
@project = project
|
|
38
|
+
@project_version = project_version
|
|
39
|
+
|
|
40
|
+
Artifactory.endpoint = @artifactory_uri
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# @param platform_tag [String] The platform tag string for the repo we need
|
|
44
|
+
# information on. If generic information is needed, pass in `generic`
|
|
45
|
+
# @return [Array] An array containing three items, first being the main repo
|
|
46
|
+
# name for the platform_tag, the second being the subdirectories of the
|
|
47
|
+
# repo leading to the artifact we want to install, and the third being the
|
|
48
|
+
# alternate subdirectories for a given repo. This last option is only
|
|
49
|
+
# currently used for debian platforms, where the path to the repo
|
|
50
|
+
# specified in the list file is different than the full path to the repo.
|
|
51
|
+
def location_for(platform_tag)
|
|
52
|
+
toplevel_repo = DEFAULT_REPO_TYPE
|
|
53
|
+
repo_subdirectories = File.join(@repo_base, @project, @project_version)
|
|
54
|
+
|
|
55
|
+
unless platform_tag == DEFAULT_REPO_TYPE
|
|
56
|
+
format = Pkg::Platforms.package_format_for_tag(platform_tag)
|
|
57
|
+
platform, version, architecture = Pkg::Platforms.parse_platform_tag(platform_tag)
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
case format
|
|
61
|
+
when 'rpm'
|
|
62
|
+
toplevel_repo = 'rpm'
|
|
63
|
+
repo_subdirectories = File.join(repo_subdirectories, "#{platform}-#{version}-#{architecture}")
|
|
64
|
+
when 'deb'
|
|
65
|
+
toplevel_repo = 'debian__local'
|
|
66
|
+
repo_subdirectories = File.join(repo_subdirectories, "#{platform}-#{version}")
|
|
67
|
+
when 'swix', 'dmg', 'svr4', 'ips'
|
|
68
|
+
repo_subdirectories = File.join(repo_subdirectories, "#{platform}-#{version}-#{architecture}")
|
|
69
|
+
when 'msi'
|
|
70
|
+
repo_subdirectories = File.join(repo_subdirectories, "#{platform}-#{architecture}")
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
[toplevel_repo, repo_subdirectories]
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# @param platform_tag [String] The platform tag specific to the information
|
|
77
|
+
# we need. If only the generic information is needed, pass in `generic`
|
|
78
|
+
# @return [Hash] Returns a hash of data specific to this platform tag
|
|
79
|
+
def platform_specific_data(platform_tag)
|
|
80
|
+
unless platform_tag == DEFAULT_REPO_TYPE
|
|
81
|
+
platform, version, architecture = Pkg::Platforms.parse_platform_tag(platform_tag)
|
|
82
|
+
package_format = Pkg::Platforms.package_format_for_tag(platform_tag)
|
|
83
|
+
if package_format == 'deb'
|
|
84
|
+
codename = Pkg::Platforms.codename_for_platform_version(platform, version)
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
repo_name, repo_subdirectories = location_for(platform_tag)
|
|
89
|
+
full_artifactory_path = File.join(repo_name, repo_subdirectories)
|
|
90
|
+
|
|
91
|
+
{
|
|
92
|
+
platform: platform,
|
|
93
|
+
platform_version: version,
|
|
94
|
+
architecture: architecture,
|
|
95
|
+
codename: codename,
|
|
96
|
+
package_format: package_format,
|
|
97
|
+
repo_name: repo_name,
|
|
98
|
+
repo_subdirectories: repo_subdirectories,
|
|
99
|
+
full_artifactory_path: full_artifactory_path
|
|
100
|
+
}
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# @param platform_tag [String] The platform to generate the list contents
|
|
104
|
+
# for
|
|
105
|
+
# @return [String] The contents of the debian list file to enable the
|
|
106
|
+
# debian artifactory repos for the specified project and version
|
|
107
|
+
def deb_list_contents(platform_tag)
|
|
108
|
+
data = platform_specific_data(platform_tag)
|
|
109
|
+
if data[:package_format] == 'deb'
|
|
110
|
+
return "deb #{@artifactory_uri}/#{data[:repo_name]} #{data[:codename]} #{data[:repo_subdirectories]}"
|
|
111
|
+
end
|
|
112
|
+
raise "The platform '#{platform_tag}' is not an apt-based system."
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
# @param platform_tag [String] The platform to generate the repo file
|
|
116
|
+
# contents for
|
|
117
|
+
# @return [String] The contents of the rpm repo file to enable the rpm
|
|
118
|
+
# artifactory repo for the specified project and version
|
|
119
|
+
def rpm_repo_contents(platform_tag)
|
|
120
|
+
data = platform_specific_data(platform_tag)
|
|
121
|
+
if data[:package_format] == 'rpm'
|
|
122
|
+
return <<-DOC
|
|
123
|
+
[Artifactory #{@project} #{@project_version} for #{platform_tag}]
|
|
124
|
+
name=Artifactory Repository for #{@project} #{@project_version} for #{platform_tag}
|
|
125
|
+
baseurl=#{@artifactory_uri}/#{data[:repo_name]}/#{data[:repo_subdirectories]}
|
|
126
|
+
enabled=1
|
|
127
|
+
gpgcheck=0
|
|
128
|
+
#Optional - if you have GPG signing keys installed, use the below flags to verify the repository metadata signature:
|
|
129
|
+
#gpgkey=#{@artifactory_uri}/#{data[:repo_name]}/#{data[:repo_subdirectories]}/repomd.xml.key
|
|
130
|
+
#repo_gpgcheck=1
|
|
131
|
+
DOC
|
|
132
|
+
end
|
|
133
|
+
raise "The platform '#{platform_tag}' is not a yum-based system"
|
|
134
|
+
end
|
|
135
|
+
|
|
136
|
+
# Verify the correct environment variables are set in order to process
|
|
137
|
+
# authorization to access the artifactory repos
|
|
138
|
+
def check_authorization
|
|
139
|
+
unless (ENV['ARTIFACTORY_USERNAME'] && ENV['ARTIFACTORY_PASSWORD']) || ENV['ARTIFACTORY_API_KEY']
|
|
140
|
+
raise <<-DOC
|
|
141
|
+
Unable to determine credentials for Artifactory. Please set one of the
|
|
142
|
+
following environment variables:
|
|
143
|
+
|
|
144
|
+
For basic authentication, please set:
|
|
145
|
+
ARTIFACTORY_USERNAME
|
|
146
|
+
ARTIFACTORY_PASSWORD
|
|
147
|
+
|
|
148
|
+
If you would like to use the API key, ensure ARTIFACTORY_USERNAME and
|
|
149
|
+
ARTIFACTORY_PASSWORD are not set, as these take precedence. Instead, please
|
|
150
|
+
set:
|
|
151
|
+
ARTIFACTORY_API_KEY
|
|
152
|
+
|
|
153
|
+
You can also set the path to a pem file with your custom certificates with:
|
|
154
|
+
ARTIFACTORY_SSL_PEM_FILE
|
|
155
|
+
DOC
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
# @param platform_tag [String] The platform tag to generate deploy
|
|
160
|
+
# properties for
|
|
161
|
+
# @return [String] Any required extra bits that we need for the curl
|
|
162
|
+
# command used to deploy packages to artifactory
|
|
163
|
+
#
|
|
164
|
+
# These are a few examples from chef/artifactory-client. These could
|
|
165
|
+
# potentially be very powerful, but we should decide how to use them.
|
|
166
|
+
# status: 'DEV',
|
|
167
|
+
# rating: 5,
|
|
168
|
+
# branch: 'master'
|
|
169
|
+
#
|
|
170
|
+
# Currently we are including everything that would be included in the yaml
|
|
171
|
+
# file that is generated at package build time.
|
|
172
|
+
def deploy_properties(platform_tag, file_name)
|
|
173
|
+
data = platform_specific_data(platform_tag)
|
|
174
|
+
|
|
175
|
+
# TODO This method should be returning the entire contents of the yaml
|
|
176
|
+
# file in hash form to include as metadata for these artifacts. In this
|
|
177
|
+
# current iteration, the hash isn't formatted properly and the attempt to
|
|
178
|
+
# deploy to Artifactory bails out. I'm leaving this in so that we at least
|
|
179
|
+
# have multiple places to remind us that it needs to happen.
|
|
180
|
+
#properties_hash = Pkg::Config.config_to_hash
|
|
181
|
+
properties_hash = {}
|
|
182
|
+
if data[:package_format] == 'deb'
|
|
183
|
+
architecture = data[:architecture]
|
|
184
|
+
# set arch correctly for noarch packages
|
|
185
|
+
if file_name =~ /_all\.deb$/
|
|
186
|
+
architecture = 'all'
|
|
187
|
+
end
|
|
188
|
+
properties_hash.merge!({
|
|
189
|
+
'deb.distribution' => data[:codename],
|
|
190
|
+
'deb.component' => data[:repo_subdirectories],
|
|
191
|
+
'deb.architecture' => architecture,
|
|
192
|
+
})
|
|
193
|
+
end
|
|
194
|
+
properties_hash
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Basic method to check if a package exists on artifactory
|
|
198
|
+
# @param package [String] The full relative path to the package to be
|
|
199
|
+
# checked, relative from the current working directory
|
|
200
|
+
# Return true if package already exists on artifactory
|
|
201
|
+
def package_exists_on_artifactory?(package)
|
|
202
|
+
check_authorization
|
|
203
|
+
artifact = Artifactory::Resource::Artifact.search(name: File.basename(package), :artifactory_uri => @artifactory_uri)
|
|
204
|
+
if artifact.empty?
|
|
205
|
+
return false
|
|
206
|
+
else
|
|
207
|
+
return true
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
# @param package [String] The full relative path to the package to be
|
|
212
|
+
# shipped, relative from the current working directory
|
|
213
|
+
def deploy_package(package)
|
|
214
|
+
platform_tag = Pkg::Paths.tag_from_artifact_path(package) || DEFAULT_REPO_TYPE
|
|
215
|
+
data = platform_specific_data(platform_tag)
|
|
216
|
+
|
|
217
|
+
check_authorization
|
|
218
|
+
artifact = Artifactory::Resource::Artifact.new(local_path: package)
|
|
219
|
+
artifact_md5 = Digest::MD5.file(package).hexdigest
|
|
220
|
+
headers = { "X-Checksum-Md5" => artifact_md5 }
|
|
221
|
+
artifact.upload(
|
|
222
|
+
data[:repo_name],
|
|
223
|
+
File.join(data[:repo_subdirectories], File.basename(package)),
|
|
224
|
+
deploy_properties(platform_tag, File.basename(package)),
|
|
225
|
+
headers
|
|
226
|
+
)
|
|
227
|
+
rescue
|
|
228
|
+
raise "Attempt to upload '#{package}' to #{File.join(@artifactory_uri, data[:full_artifactory_path])} failed"
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
# @param pkg [String] The package to download YAML for
|
|
232
|
+
# i.e. 'puppet-agent' or 'puppetdb'
|
|
233
|
+
# @param ref [String] The git ref (sha or tag) we want the YAML for
|
|
234
|
+
#
|
|
235
|
+
# @return [String] The contents of the YAML file
|
|
236
|
+
def retrieve_yaml_data(pkg, ref)
|
|
237
|
+
yaml_url = "#{@artifactory_uri}/#{DEFAULT_REPO_TYPE}/#{DEFAULT_REPO_BASE}/#{pkg}/#{ref}/#{ref}.yaml"
|
|
238
|
+
open(yaml_url) { |f| f.read }
|
|
239
|
+
rescue
|
|
240
|
+
raise "Failed to load YAML data for #{pkg} at #{ref} from #{yaml_url}!"
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# @param platform_data [Hash] The hash of the platform data that needs to be
|
|
244
|
+
# parsed
|
|
245
|
+
# @param platform_tag [String] The tag that the data we want belongs to
|
|
246
|
+
# @return [String] The name of the package for the given project,
|
|
247
|
+
# project_version, and platform_tag
|
|
248
|
+
def package_name(platform_data, platform_tag)
|
|
249
|
+
return File.basename(platform_data[platform_tag][:artifact])
|
|
250
|
+
rescue
|
|
251
|
+
fail_message = <<-DOC
|
|
252
|
+
Package name could not be found from loaded yaml data. Either this package
|
|
253
|
+
does not exist, or '#{platform_tag}' is not present in this dataset.
|
|
254
|
+
|
|
255
|
+
The following are available platform tags for '#{@project}' '#{@project_version}':
|
|
256
|
+
#{platform_data.keys.sort}
|
|
257
|
+
DOC
|
|
258
|
+
raise fail_message
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
# @param platform_data [Hash] The hash of the platform data that needs to be
|
|
262
|
+
# parsed
|
|
263
|
+
# @param platform_tag [String] The tag that the data we want belongs to
|
|
264
|
+
# @return [Array] An array containing all packages for the given project,
|
|
265
|
+
# project_version, and platform_tag
|
|
266
|
+
def all_package_names(platform_data, platform_tag)
|
|
267
|
+
packages = [platform_data[platform_tag][:artifact]]
|
|
268
|
+
packages << platform_data[platform_tag][:additional_artifacts]
|
|
269
|
+
packages.flatten!
|
|
270
|
+
packages.reject! { |package| package.nil? || package.empty? }
|
|
271
|
+
packages.map { |package| File.basename(package) }
|
|
272
|
+
rescue
|
|
273
|
+
fail_message = <<-DOC
|
|
274
|
+
Package name could not be found from loaded yaml data. Either this package
|
|
275
|
+
does not exist, or '#{platform_tag}' is not present in this dataset.
|
|
276
|
+
|
|
277
|
+
The following are available platform tags for '#{@project}' '#{@project_version}':
|
|
278
|
+
#{platform_data.keys.sort}
|
|
279
|
+
DOC
|
|
280
|
+
raise fail_message
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
# Promotes a build based on build SHA or tag (or SNAPSHOT version, for ezbake)
|
|
284
|
+
# Depending on if it's an RPM or Deb package promote accordingly
|
|
285
|
+
# 'promote' by copying the package(s) to the enterprise directory on artifactory
|
|
286
|
+
#
|
|
287
|
+
# @param pkg [String] the package name ex. puppet-agent
|
|
288
|
+
# @param ref [String] tag or SHA of package(s) to be promoted
|
|
289
|
+
# @param platform_tag [String] the platform tag of the artifact
|
|
290
|
+
# ex. el-7-x86_64, ubuntu-18.04-amd64
|
|
291
|
+
# @param repository [String] the repository to promote
|
|
292
|
+
# the artifact to. Will prepend 'rpm_' or 'debian_' to the repositories
|
|
293
|
+
# depending on package type
|
|
294
|
+
# @param debian_component [String] the debian component to promote packages
|
|
295
|
+
# into. Optional.
|
|
296
|
+
def promote_package(pkg, ref, platform_tag, repository, debian_component = nil)
|
|
297
|
+
# load package metadata
|
|
298
|
+
yaml_content = retrieve_yaml_data(pkg, ref)
|
|
299
|
+
yaml_data = YAML::load(yaml_content)
|
|
300
|
+
|
|
301
|
+
# get the artifact name
|
|
302
|
+
artifact_names = all_package_names(yaml_data[:platform_data], platform_tag)
|
|
303
|
+
artifact_names.each do |artifact_name|
|
|
304
|
+
artifact_search_results = Artifactory::Resource::Artifact.search(
|
|
305
|
+
name: artifact_name, :artifactory_uri => @artifactory_uri)
|
|
306
|
+
|
|
307
|
+
if artifact_search_results.empty?
|
|
308
|
+
raise "Error: could not find PKG=#{pkg} at REF=#{ref} for #{platform_tag}"
|
|
309
|
+
end
|
|
310
|
+
artifact_to_promote = artifact_search_results[0]
|
|
311
|
+
|
|
312
|
+
# This makes an assumption that we're using some consistent repo names
|
|
313
|
+
# but need to either prepend 'rpm_' or 'debian_' based on package type
|
|
314
|
+
case File.extname(artifact_name)
|
|
315
|
+
when '.rpm'
|
|
316
|
+
promotion_path = "rpm_#{repository}/#{platform_tag}/#{artifact_name}"
|
|
317
|
+
when '.deb'
|
|
318
|
+
promotion_path = "debian_#{repository}/#{platform_tag}/#{artifact_name}"
|
|
319
|
+
properties = { 'deb.component' => debian_component } unless debian_component.nil?
|
|
320
|
+
else
|
|
321
|
+
raise "Error: Unknown promotion repository for #{artifact_name}! Only .rpm and .deb files are supported!"
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
begin
|
|
325
|
+
source_path = artifact_to_promote.download_uri.sub(@artifactory_uri, '')
|
|
326
|
+
puts "promoting #{artifact_name} from #{source_path} to #{promotion_path}"
|
|
327
|
+
artifact_to_promote.copy(promotion_path)
|
|
328
|
+
unless properties.nil?
|
|
329
|
+
artifacts = Artifactory::Resource::Artifact.search(name: artifact_name, :artifactory_uri => @artifactory_uri)
|
|
330
|
+
promoted_artifact = artifacts.select { |artifact| artifact.download_uri =~ %r{#{promotion_path}} }.first
|
|
331
|
+
promoted_artifact.properties(properties)
|
|
332
|
+
end
|
|
333
|
+
rescue Artifactory::Error::HTTPError => e
|
|
334
|
+
if e.message =~ /(destination and source are the same|user doesn't have permissions to override)/i
|
|
335
|
+
puts "Skipping promotion of #{artifact_name}; it has already been promoted"
|
|
336
|
+
else
|
|
337
|
+
puts "#{e.message}"
|
|
338
|
+
raise e
|
|
339
|
+
end
|
|
340
|
+
rescue => e
|
|
341
|
+
puts "Something went wrong promoting #{artifact_name}!"
|
|
342
|
+
raise e
|
|
343
|
+
end
|
|
344
|
+
end
|
|
345
|
+
end
|
|
346
|
+
|
|
347
|
+
# Using the manifest provided by enterprise-dist, grab the appropropriate packages from artifactory based on md5sum
|
|
348
|
+
# @param staging_directory [String] location to download packages to
|
|
349
|
+
# @param manifest [File] JSON file containing information about what packages to download and the corresponding md5sums
|
|
350
|
+
# @param remote_path [String] Optional partial path on the remote host containing packages
|
|
351
|
+
# Used to specify which subdirectories packages will be downloaded from.
|
|
352
|
+
def download_packages(staging_directory, manifest, remote_path = '')
|
|
353
|
+
check_authorization
|
|
354
|
+
manifest.each do |dist, packages|
|
|
355
|
+
puts "Grabbing the #{dist} packages from artifactory"
|
|
356
|
+
packages.each do |name, info|
|
|
357
|
+
filename = info['filename']
|
|
358
|
+
artifacts = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: ["rpm_enterprise__local", "debian_enterprise__local"], name: filename)
|
|
359
|
+
artifact_to_download = artifacts.select { |artifact| artifact.download_uri.include? remote_path }.first
|
|
360
|
+
# If we found matching artifacts, but not in the correct path, copy the artifact to the correct path
|
|
361
|
+
# This should help us keep repos up to date with the packages we are expecting to be there
|
|
362
|
+
# while helping us avoid 'what the hell, could not find package' errors
|
|
363
|
+
if artifact_to_download.nil? && !artifacts.empty?
|
|
364
|
+
artifact_to_copy = artifacts.first
|
|
365
|
+
copy_artifact(artifact_to_copy, artifact_to_copy.repo, "#{remote_path}/#{dist}/#{filename}")
|
|
366
|
+
artifacts = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: ["rpm_enterprise__local", "debian_enterprise__local"], name: filename)
|
|
367
|
+
artifact_to_download = artifacts.select { |artifact| artifact.download_uri.include? remote_path }.first
|
|
368
|
+
end
|
|
369
|
+
|
|
370
|
+
if artifact_to_download.nil?
|
|
371
|
+
message = "Error: what the hell, could not find package #{filename} with md5sum #{info["md5"]}"
|
|
372
|
+
unless remote_path.empty?
|
|
373
|
+
message += " in #{remote_path}"
|
|
374
|
+
end
|
|
375
|
+
raise message
|
|
376
|
+
else
|
|
377
|
+
full_staging_path = "#{staging_directory}/#{dist}"
|
|
378
|
+
puts "downloading #{artifact_to_download.download_uri} to #{File.join(full_staging_path, filename)}"
|
|
379
|
+
artifact_to_download.download(full_staging_path, filename: filename)
|
|
380
|
+
end
|
|
381
|
+
end
|
|
382
|
+
end
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
# Ship PE tarballs to specified artifactory repo and paths
|
|
386
|
+
# @param local_tarball_directory [String] the local directory containing the tarballs
|
|
387
|
+
# @param target_repo [String] the artifactory repo to ship the tarballs to
|
|
388
|
+
# @param ship_paths [Array] the artifactory path(s) to ship the tarballs to within
|
|
389
|
+
# the target_repo
|
|
390
|
+
def ship_pe_tarballs(local_tarball_directory, target_repo, ship_paths)
|
|
391
|
+
check_authorization
|
|
392
|
+
ship_paths.each do |path|
|
|
393
|
+
Dir.foreach(local_tarball_directory) do |pe_tarball|
|
|
394
|
+
next if pe_tarball == '.' || pe_tarball == ".."
|
|
395
|
+
begin
|
|
396
|
+
puts "Uploading #{pe_tarball} to #{target_repo}/#{path}#{pe_tarball}"
|
|
397
|
+
artifact = Artifactory::Resource::Artifact.new(
|
|
398
|
+
local_path: "#{local_tarball_directory}/#{pe_tarball}")
|
|
399
|
+
artifact.upload(target_repo, "#{path}#{pe_tarball}")
|
|
400
|
+
rescue Errno::EPIPE
|
|
401
|
+
STDERR.puts "Warning: Could not upload #{pe_tarball} to #{target_repo}/#{path}. Skipping."
|
|
402
|
+
next
|
|
403
|
+
end
|
|
404
|
+
end
|
|
405
|
+
end
|
|
406
|
+
end
|
|
407
|
+
|
|
408
|
+
# Upload file to Artifactory
|
|
409
|
+
# @param local_path [String] local path to file to upload
|
|
410
|
+
# @param target_repo [String] repo on artifactory to upload to
|
|
411
|
+
# @param target_path [String] path within target_repo to upload to
|
|
412
|
+
# @param properties [Hash] Optional property names and values to assign the uploaded file
|
|
413
|
+
# For example, this would set both the 'cleanup.skip' and 'deb.component' properties:
|
|
414
|
+
# \{ "cleanup.skip" => true, "deb.component" => 'bionic' \}
|
|
415
|
+
# @param headers [Hash] Optional upload headers, most likely checksums, for the upload request
|
|
416
|
+
# "X-Checksum-Md5" and "X-Checksum-Sha1" are typical
|
|
417
|
+
def upload_file(local_path, target_repo, target_path, properties = {}, headers = {})
|
|
418
|
+
fail "Error: Couldn't find file at #{local_path}." unless File.exist? local_path
|
|
419
|
+
check_authorization
|
|
420
|
+
artifact = Artifactory::Resource::Artifact.new(local_path: local_path)
|
|
421
|
+
full_upload_path = File.join(target_path, File.basename(local_path))
|
|
422
|
+
begin
|
|
423
|
+
puts "Uploading #{local_path} to #{target_repo}/#{full_upload_path} . . ."
|
|
424
|
+
artifact.upload(target_repo, full_upload_path, properties, headers)
|
|
425
|
+
rescue Artifactory::Error::HTTPError => e
|
|
426
|
+
fail "Error: Upload failed. Ensure path #{target_path} exists in the #{target_repo} repository."
|
|
427
|
+
end
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
# Start by clearing the ARTIFACTORY_CLEANUP_SKIP_PROPERTY on all artifacts in a
|
|
431
|
+
# single repo/directory location. This allows all artifacts in the directory to be cleaned.
|
|
432
|
+
# Once cleared, set ARTIFACTORY_CLEANUP_SKIP_PROPERTY on those matching pe_build_version,
|
|
433
|
+
# presumably the latest. This prevents those artifacts from being deleted.
|
|
434
|
+
#
|
|
435
|
+
# @param repo [String] Artifactory repository that contains the specified directory
|
|
436
|
+
# @param directory [String] Artifactory directory in repo containing the artifacts from which to
|
|
437
|
+
# set the 'cleanup.skip' property setting to false
|
|
438
|
+
# @param pe_build_version [String] Set 'cleanup.skip' property on artifacts that
|
|
439
|
+
# contain this string in their file inside the directory.
|
|
440
|
+
def prevent_artifact_cleanup(repo, directory, pe_build_version)
|
|
441
|
+
# Clean up any trailing slashes on directory, just in case
|
|
442
|
+
directory.sub!(/(\/)+$/, '')
|
|
443
|
+
|
|
444
|
+
all_artifacts_pattern = "#{directory}/*"
|
|
445
|
+
latest_artifacts_pattern = "#{directory}/*#{pe_build_version}*"
|
|
446
|
+
|
|
447
|
+
all_artifacts = Artifactory::Resource::Artifact.pattern_search(
|
|
448
|
+
repo: repo,
|
|
449
|
+
pattern: all_artifacts_pattern
|
|
450
|
+
)
|
|
451
|
+
latest_artifacts = Artifactory::Resource::Artifact.pattern_search(
|
|
452
|
+
repo: repo,
|
|
453
|
+
pattern: latest_artifacts_pattern
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
# Clear cleanup.skip on all artifacts in directory
|
|
457
|
+
puts "Clearing #{ARTIFACTORY_CLEANUP_SKIP_PROPERTY} in #{repo}/#{all_artifacts_pattern}"
|
|
458
|
+
all_artifacts.each do |artifact|
|
|
459
|
+
artifact.properties(ARTIFACTORY_CLEANUP_SKIP_PROPERTY => false)
|
|
460
|
+
end
|
|
461
|
+
|
|
462
|
+
# Set cleanup.skip on all artifacts in directory matching *pe_build_version*
|
|
463
|
+
puts "Setting #{ARTIFACTORY_CLEANUP_SKIP_PROPERTY} in #{repo}/#{latest_artifacts_pattern}"
|
|
464
|
+
latest_artifacts.each do |artifact|
|
|
465
|
+
artifact.properties(ARTIFACTORY_CLEANUP_SKIP_PROPERTY => true)
|
|
466
|
+
end
|
|
467
|
+
end
|
|
468
|
+
|
|
469
|
+
# Search for artifacts matching `artifact_name` in `repo` with path matching
|
|
470
|
+
# `path`
|
|
471
|
+
# @param artifact_name [String] name of artifact to download
|
|
472
|
+
# @param repo [String] repo the artifact lives
|
|
473
|
+
# @param path [String] path to artifact in the repo
|
|
474
|
+
#
|
|
475
|
+
# @return [Array<Artifactory::Resource::Artifact>] A list of artifacts that
|
|
476
|
+
# match the query
|
|
477
|
+
def search_with_path(artifact_id, repo, path)
|
|
478
|
+
check_authorization
|
|
479
|
+
artifacts = Artifactory::Resource::Artifact.search(name: artifact_id, repos: repo)
|
|
480
|
+
artifacts.select { |artifact| artifact.download_uri.include? path }
|
|
481
|
+
end
|
|
482
|
+
|
|
483
|
+
# Download an artifact based on name, repo, and path to artifact
|
|
484
|
+
# @param artifact_name [String] name of artifact to download
|
|
485
|
+
# @param repo [String] repo the artifact lives
|
|
486
|
+
# @param path [String] path to artifact in the repo
|
|
487
|
+
# @param target [String] directory to download artifact to. Defaults to '.'
|
|
488
|
+
# @param filename [String] Filename to save artifact as. Defaults to artifact_name
|
|
489
|
+
def download_artifact(artifact_name, repo, path, target: '.', filename: nil)
|
|
490
|
+
filename ||= artifact_name
|
|
491
|
+
artifacts = search_with_path(artifact_name, repo, path)
|
|
492
|
+
return nil if artifacts.empty?
|
|
493
|
+
# Only download the first of the artifacts since we're saving them to
|
|
494
|
+
# the same location anyways
|
|
495
|
+
artifacts.first.download(target, filename: filename)
|
|
496
|
+
end
|
|
497
|
+
|
|
498
|
+
# Download final pe tarballs to local path based on name, repo, and path on artifactory
|
|
499
|
+
# @param pe_version [String] pe final tag
|
|
500
|
+
# @param repo [String] repo the tarballs live
|
|
501
|
+
# @param remote_path [String] path to tarballs in the repo
|
|
502
|
+
# @param local_path [String] local path to download tarballs to
|
|
503
|
+
def download_final_pe_tarballs(pe_version, repo, remote_path, local_path)
|
|
504
|
+
check_authorization
|
|
505
|
+
artifacts = Artifactory::Resource::Artifact.search(name: pe_version, repos: repo, exact_match: false)
|
|
506
|
+
artifacts.each do |artifact|
|
|
507
|
+
next unless artifact.download_uri.include? remote_path
|
|
508
|
+
next if artifact.download_uri.include? "-rc"
|
|
509
|
+
artifact.download(local_path)
|
|
510
|
+
end
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
# Download beta pe tarballs to local path based on tag, repo, and path on artifactory
|
|
514
|
+
# @param beta_tag [String] rc tag of beta release ex. 2019.2.0-rc10
|
|
515
|
+
# @param repo [String] repo the tarballs live
|
|
516
|
+
# @param remote_path [String] path to tarballs in the repo
|
|
517
|
+
# @param local_path [String] local path to download tarballs to
|
|
518
|
+
def download_beta_pe_tarballs(beta_tag, repo, remote_path, local_path)
|
|
519
|
+
check_authorization
|
|
520
|
+
pattern = "#{remote_path}/*-#{beta_tag}-*"
|
|
521
|
+
artifacts = Artifactory::Resource::Artifact.pattern_search(repo: repo, pattern: pattern)
|
|
522
|
+
artifacts.each do |artifact|
|
|
523
|
+
artifact.download(local_path)
|
|
524
|
+
end
|
|
525
|
+
end
|
|
526
|
+
|
|
527
|
+
# When we ship a new PE release we copy final tarballs to archives/releases
|
|
528
|
+
# @param pe_version [String] pe final tag
|
|
529
|
+
# @param repo [String] repo the tarballs live
|
|
530
|
+
# @param remote_path [String] path to tarballs in the repo
|
|
531
|
+
# @param target_path [String] path copy tarballs to, assumes same repo
|
|
532
|
+
def copy_final_pe_tarballs(pe_version, repo, remote_path, target_path)
|
|
533
|
+
check_authorization
|
|
534
|
+
final_tarballs = Artifactory::Resource::Artifact.search(name: pe_version, repos: repo, exact_match: false)
|
|
535
|
+
final_tarballs.each do |artifact|
|
|
536
|
+
next unless artifact.download_uri.include? remote_path
|
|
537
|
+
next if artifact.download_uri.include? "-rc"
|
|
538
|
+
filename = File.basename(artifact.download_uri)
|
|
539
|
+
# Artifactory does NOT like when you use `File.join`, so let's concatenate!
|
|
540
|
+
full_target_path = "#{repo}/#{target_path}/#{filename}"
|
|
541
|
+
puts "INFO: Copying #{filename} to #{full_target_path} . . ."
|
|
542
|
+
artifact.copy(full_target_path)
|
|
543
|
+
end
|
|
544
|
+
end
|
|
545
|
+
|
|
546
|
+
# Copy an artifact to a target repo/path
|
|
547
|
+
#
|
|
548
|
+
# @param artifact [Artifactory::Resource::Artifact] The artifact to be copied
|
|
549
|
+
# @param target_repo [String] The repository to copy the artifact to
|
|
550
|
+
# @param target_path [String] The path in the target repository to copy the artifact to
|
|
551
|
+
# @param target_debian_component [String] `deb.component` property to set on the copied artifact
|
|
552
|
+
# defaults to `Pkg::Paths.debian_component_from_path(target_path)`
|
|
553
|
+
def copy_artifact(artifact, target_repo, target_path, target_debian_component = nil)
|
|
554
|
+
filename = File.basename(artifact.download_uri)
|
|
555
|
+
artifactory_target_path = "#{target_repo}/#{target_path}"
|
|
556
|
+
puts "Copying #{artifact.download_uri} to #{artifactory_target_path}"
|
|
557
|
+
begin
|
|
558
|
+
artifact.copy(artifactory_target_path)
|
|
559
|
+
rescue Artifactory::Error::HTTPError
|
|
560
|
+
STDERR.puts "Could not copy #{artifactory_target_path}. Source and destination are the same. Skipping..."
|
|
561
|
+
end
|
|
562
|
+
|
|
563
|
+
if File.extname(filename) == '.deb'
|
|
564
|
+
target_debian_component ||= Pkg::Paths.debian_component_from_path(target_path)
|
|
565
|
+
copied_artifact_search = search_with_path(filename, target_repo, target_path)
|
|
566
|
+
fail "Error: what the hell, could not find just-copied package #{filename} under #{target_repo}/#{target_path}" if copied_artifact_search.empty?
|
|
567
|
+
copied_artifact = copied_artifact_search.first
|
|
568
|
+
properties = { 'deb.component' => target_debian_component }
|
|
569
|
+
copied_artifact.properties(properties)
|
|
570
|
+
end
|
|
571
|
+
end
|
|
572
|
+
|
|
573
|
+
# When we cut a new PE branch, we need to copy the pe components into <pe_version>/{repos,feature,release}/<platform>
|
|
574
|
+
# @param manifest [File] JSON file containing information about what packages to download and the corresponding md5sums
|
|
575
|
+
# @param target_path [String] path on artifactory to copy components to, e.g. <pe_version>/release
|
|
576
|
+
def populate_pe_repos(manifest, target_path)
|
|
577
|
+
check_authorization
|
|
578
|
+
manifest.each do |dist, packages|
|
|
579
|
+
puts "Copying #{dist} packages..."
|
|
580
|
+
packages.each do |name, info|
|
|
581
|
+
filename = info["filename"]
|
|
582
|
+
artifact = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: ["rpm_enterprise__local", "debian_enterprise__local"], name: filename).first
|
|
583
|
+
if artifact.nil?
|
|
584
|
+
raise "Error: what the hell, could not find package #{filename} with md5sum #{info["md5"]}"
|
|
585
|
+
end
|
|
586
|
+
copy_artifact(artifact, artifact.repo, "#{target_path}/#{dist}/#{filename}")
|
|
587
|
+
end
|
|
588
|
+
end
|
|
589
|
+
end
|
|
590
|
+
|
|
591
|
+
# Remove all artifacts in repo based on pattern, used when we purge all artifacts in release/ after PE release
|
|
592
|
+
# @param repos [Array] repos that we want to search for artifacts in
|
|
593
|
+
# @param pattern [String] pattern for artifacts that should be deleted ex. `2019.1/release/*/*`
|
|
594
|
+
def teardown_repo(repos, pattern)
|
|
595
|
+
check_authorization
|
|
596
|
+
repos.each do |repo|
|
|
597
|
+
artifacts = Artifactory::Resource::Artifact.pattern_search(repo: repo, pattern: pattern)
|
|
598
|
+
artifacts.each do |artifact|
|
|
599
|
+
puts "Deleting #{artifact.download_uri}"
|
|
600
|
+
artifact.delete
|
|
601
|
+
end
|
|
602
|
+
end
|
|
603
|
+
end
|
|
604
|
+
|
|
605
|
+
# Remove promoted artifacts if promotion is reverted, use information provided in manifest
|
|
606
|
+
# @param manifest [File] JSON file containing information about what packages to download and the corresponding md5sums
|
|
607
|
+
# @param remote_path [String] path on artifactory to promoted packages ex. 2019.1/repos/
|
|
608
|
+
# @param package [String] package name ex. puppet-agent
|
|
609
|
+
# @param repos [Array] the repos the promoted artifacts live
|
|
610
|
+
def remove_promoted_packages(manifest, remote_path, package, repos)
|
|
611
|
+
check_authorization
|
|
612
|
+
manifest.each do |dist, packages|
|
|
613
|
+
packages.each do |package_name, info|
|
|
614
|
+
next unless package_name == package
|
|
615
|
+
filename = info["filename"]
|
|
616
|
+
artifacts = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: repos, name: filename)
|
|
617
|
+
artifacts.each do |artifact|
|
|
618
|
+
next unless artifact.download_uri.include? remote_path
|
|
619
|
+
puts "Removing reverted package #{artifact.download_uri}"
|
|
620
|
+
artifact.delete
|
|
621
|
+
end
|
|
622
|
+
end
|
|
623
|
+
end
|
|
624
|
+
end
|
|
625
|
+
|
|
626
|
+
# Remove shipped PE tarballs from artifactory
|
|
627
|
+
# Used when compose fails, we only want the tarball shipped to artifactory if all platforms succeed
|
|
628
|
+
# Identify which packages were created and shipped based on md5sum and remove them
|
|
629
|
+
# @param tarball_path [String] the local path to the tarballs that were shipped
|
|
630
|
+
# @param pe_repo [String] the artifactory repo the tarballs were shipped to
|
|
631
|
+
def purge_copied_pe_tarballs(tarball_path, pe_repo)
|
|
632
|
+
check_authorization
|
|
633
|
+
Dir.foreach("#{tarball_path}/") do |pe_tarball|
|
|
634
|
+
next if pe_tarball == '.' || pe_tarball == ".."
|
|
635
|
+
md5 = Digest::MD5.file("#{tarball_path}/#{pe_tarball}").hexdigest
|
|
636
|
+
artifacts_to_delete = Artifactory::Resource::Artifact.checksum_search(md5: md5, repos: pe_repo, name: pe_tarball)
|
|
637
|
+
next if artifacts_to_delete.nil?
|
|
638
|
+
begin
|
|
639
|
+
artifacts_to_delete.each do |artifact|
|
|
640
|
+
puts "Removing #{pe_tarball} from #{pe_repo}... "
|
|
641
|
+
artifact.delete
|
|
642
|
+
end
|
|
643
|
+
rescue Artifactory::Error::HTTPError
|
|
644
|
+
STDERR.puts "Error: cannot remove #{pe_tarball}, do you have the right permissions?"
|
|
645
|
+
end
|
|
646
|
+
end
|
|
647
|
+
end
|
|
648
|
+
|
|
649
|
+
private :check_authorization
|
|
650
|
+
end
|
|
651
|
+
end
|