packaging 0.99.40 → 0.99.45

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 29234ce00ec3d4e6b806de21de5146a170c5ae50
4
- data.tar.gz: 225084d9ba126f53ef36aab40c3a35a48696a5bd
2
+ SHA256:
3
+ metadata.gz: 4ac7070ff94ebd455cb676a068b288cf6a66bd67cb9b9fae886b939cd122f33e
4
+ data.tar.gz: 2f54c6d07e0c1dfa1a1a8e08e95d818727da0d2e3857f7673e3fa16f1aac7d3b
5
5
  SHA512:
6
- metadata.gz: 4f9ec133dc94d0cd44ff0c8a926ced515688a2bbfbf9810ab4cd7c5a7c9b3e0dabb2224c14814723965c3477fc0a0bc0381a6030519a46dfd70cb55f4f181ebd
7
- data.tar.gz: 0a030c97eb33c623b9852a0459a45a3e2ec0906cafd9d5a9536a5b433852687358f3444c183e524244f809422252db15b7c05ce7f8240a4e96a2bf14b729a10d
6
+ metadata.gz: 0ad7a909bf0daaa570ca54266efe75aff45e0b3f48b8a0553c26048d5f694c2db92f5e5571a8bc4ee142f1776d55b7ee3f7816fbd63ac4ec5d3eed4ac08c29f6
7
+ data.tar.gz: 515edd1fc993713e6c465ce085f97470208d070ca0bb16dd3e2ebc7f61b319b9edd60a8263a4ed5087574b6bfeb81e3dbb75a553a6860804370607ca2be20144
@@ -1,7 +1,69 @@
1
+ require 'artifactory'
1
2
  require 'uri'
2
3
  require 'open-uri'
3
4
  require 'digest'
4
5
 
6
+ #
7
+ # [eric.griswold] This is unfortunate. The 'pattern_search' class method really does belong in
8
+ # the Artifactory gem. However, because of some of Chef's social policies,
9
+ # I am unwilling to contribute this code there. If that changes, I'll submit a PR. Until
10
+ # then, it'll live here.
11
+ #
12
+ module Artifactory
13
+ class Resource::Artifact
14
+ #
15
+ # Search for an artifact in a repo using an Ant-like pattern.
16
+ # Unlike many Artifactory searches, this one is restricted to a single
17
+ # repository.
18
+ #
19
+ # @example Search in a repository named 'foo_local' for an artifact in a directory containing
20
+ # the word "recent", named "artifact[0-9].txt"
21
+ # Artifact.pattern_search(pattern: '*recent*/artifact[0-9].txt',
22
+ # repo: 'foo_local')
23
+ #
24
+ # @param [Hash] options
25
+ # A hash of options, as follows:
26
+ #
27
+ # @option options [Artifactory::Client] :client
28
+ # the client object to make the request with
29
+ # @option options [String] :pattern
30
+ # the Ant-like pattern to use for finding artifacts within the repos. Note that the
31
+ # Ant pattern '**' is barred in this case by JFrog.
32
+ # @option options [String] :repo
33
+ # the repo to search
34
+ #
35
+ # @return [Array<Resource::Artifact>]
36
+ # a list of artifacts that match the query
37
+ #
38
+ def self.pattern_search(options = {})
39
+ client = extract_client!(options)
40
+ params = Util.slice(options, :pattern, :repo)
41
+ pattern_search_parameter = { :pattern => "#{params[:repo]}:#{params[:pattern]}" }
42
+ response = client.get('/api/search/pattern', pattern_search_parameter)
43
+ return [] if response['files'].nil? || response['files'].empty?
44
+
45
+ # A typical response:
46
+ # {
47
+ # "repoUri"=>"https:<artifactory endpoint>/<repo>",
48
+ # "sourcePattern"=>"<repo>:<provided search pattern>",
49
+ # "files"=>[<filename that matched pattern>, ...]
50
+ # }
51
+ #
52
+ # Inserting '/api/storage' before the repo makes the 'from_url' call work correctly.
53
+ #
54
+ repo_uri = response['repoUri']
55
+ unless repo_uri.include?('/api/storage/')
56
+ # rubocop:disable Style/PercentLiteralDelimiters
57
+ repo_uri.sub!(%r(/#{params[:repo]}$), "/api/storage/#{params[:repo]}")
58
+ end
59
+ response['files'].map do |file_path|
60
+ from_url("#{repo_uri}/#{file_path}", client: client)
61
+ end
62
+ end
63
+ end
64
+ end
65
+
66
+
5
67
  module Pkg
6
68
 
7
69
  # The Artifactory class
@@ -10,6 +72,11 @@ module Pkg
10
72
  # artifacts to the repos, and to retrieve them back from the repos.
11
73
  class ManageArtifactory
12
74
 
75
+ # The Artifactory property that the artifactCleanup user plugin
76
+ # {https://github.com/jfrog/artifactory-user-plugins/tree/master/cleanup/artifactCleanup}
77
+ # uses to tell it to not clean a particular artifact
78
+ ARTIFACTORY_CLEANUP_SKIP_PROPERTY = 'cleanup.skip'
79
+
13
80
  DEFAULT_REPO_TYPE = 'generic'
14
81
  DEFAULT_REPO_BASE = 'development'
15
82
 
@@ -24,8 +91,6 @@ module Pkg
24
91
  # @option :repo_base [String] The base of all repos, set for consistency.
25
92
  # This currently defaults to 'development'
26
93
  def initialize(project, project_version, opts = {})
27
- require 'artifactory'
28
-
29
94
  @artifactory_uri = opts[:artifactory_uri] || 'https://artifactory.delivery.puppetlabs.net/artifactory'
30
95
  @repo_base = opts[:repo_base] || DEFAULT_REPO_BASE
31
96
 
@@ -297,17 +362,20 @@ module Pkg
297
362
  # get the artifact name
298
363
  artifact_names = all_package_names(yaml_data[:platform_data], platform_tag)
299
364
  artifact_names.each do |artifact_name|
300
- artifact_to_promote = Artifactory::Resource::Artifact.search(name: artifact_name, :artifactory_uri => @artifactory_uri)
365
+ artifact_search_results = Artifactory::Resource::Artifact.search(
366
+ name: artifact_name, :artifactory_uri => @artifactory_uri)
301
367
 
302
- if artifact_to_promote.empty?
368
+ if artifact_search_results.empty?
303
369
  raise "Error: could not find PKG=#{pkg} at REF=#{git_ref} for #{platform_tag}"
304
370
  end
371
+ artifact_to_promote = artifact_search_results[0]
305
372
 
306
373
  # This makes an assumption that we're using some consistent repo names
307
374
  # but need to either prepend 'rpm_' or 'debian_' based on package type
308
- if File.extname(artifact_name) == '.rpm'
375
+ case File.extname(artifact_name)
376
+ when '.rpm'
309
377
  promotion_path = "rpm_#{repository}/#{platform_tag}/#{artifact_name}"
310
- elsif File.extname(artifact_name) == '.deb'
378
+ when '.deb'
311
379
  promotion_path = "debian_#{repository}/#{platform_tag}/#{artifact_name}"
312
380
  properties = { 'deb.component' => debian_component } unless debian_component.nil?
313
381
  else
@@ -315,8 +383,9 @@ module Pkg
315
383
  end
316
384
 
317
385
  begin
318
- puts "promoting #{artifact_name} to #{promotion_path}"
319
- artifact_to_promote[0].copy(promotion_path)
386
+ source_path = artifact_to_promote.download_uri.sub(@artifactory_uri, '')
387
+ puts "promoting #{artifact_name} from #{source_path} to #{promotion_path}"
388
+ artifact_to_promote.copy(promotion_path)
320
389
  unless properties.nil?
321
390
  artifacts = Artifactory::Resource::Artifact.search(name: artifact_name, :artifactory_uri => @artifactory_uri)
322
391
  promoted_artifact = artifacts.select { |artifact| artifact.download_uri =~ %r{#{promotion_path}} }.first
@@ -356,21 +425,201 @@ module Pkg
356
425
  end
357
426
 
358
427
  # Ship PE tarballs to specified artifactory repo and paths
359
- # @param tarball_path [String] the path of the tarballs to ship
428
+ # @param local_tarball_directory [String] the local directory containing the tarballs
360
429
  # @param target_repo [String] the artifactory repo to ship the tarballs to
361
- # @param ship_paths [Array] the artifactory path(s) to ship the tarballs to within the target_repo
362
- def ship_pe_tarballs(tarball_path, target_repo, ship_paths)
430
+ # @param ship_paths [Array] the artifactory path(s) to ship the tarballs to within
431
+ # the target_repo
432
+ def ship_pe_tarballs(local_tarball_directory, target_repo, ship_paths)
363
433
  check_authorization
364
- Dir.foreach("#{tarball_path}/") do |pe_tarball|
365
- unless pe_tarball == '.' || pe_tarball == ".."
366
- ship_paths.each do |path|
367
- begin
368
- puts "Uploading #{pe_tarball} to #{target_repo}/#{path}... "
369
- artifact = Artifactory::Resource::Artifact.new(local_path: "#{tarball_path}/#{pe_tarball}")
370
- artifact.upload(target_repo, "/#{path}/#{pe_tarball}")
371
- rescue Errno::EPIPE
372
- STDERR.puts "Error: Could not upload #{pe_tarball} to #{path}"
434
+ ship_paths.each do |path|
435
+ unset_cleanup_skip_on_artifacts(target_repo, path)
436
+ Dir.foreach(local_tarball_directory) do |pe_tarball|
437
+ next if pe_tarball == '.' || pe_tarball == ".."
438
+ begin
439
+ puts "Uploading #{pe_tarball} to #{target_repo}/#{path}#{pe_tarball}"
440
+ artifact = Artifactory::Resource::Artifact.new(
441
+ local_path: "#{local_tarball_directory}/#{pe_tarball}")
442
+ uploaded_artifact = artifact.upload(target_repo, "#{path}#{pe_tarball}")
443
+ # The Artifactory gem property setter only works when '/api/storage' is used in
444
+ # the 'uri' field.
445
+ # Strangely, the above Artifactory::Resource::Artifact.new gives us the raw URI.
446
+ # Therefore we are forced to do some path surgery, inserting
447
+ # '/api/storage' before "/#{target_repo}" to make the property setting work.
448
+ storage_artifact = uploaded_artifact
449
+ unless storage_artifact.uri.include?("/api/storage")
450
+ storage_artifact.uri = storage_artifact.uri.sub(
451
+ "/#{target_repo}",
452
+ "/api/storage/#{target_repo}")
373
453
  end
454
+ storage_artifact.properties(ARTIFACTORY_CLEANUP_SKIP_PROPERTY => true)
455
+ rescue Errno::EPIPE
456
+ ## [eric.griswold] maybe this should be fatal?
457
+ STDERR.puts "Warning: Could not upload #{pe_tarball} to #{target_repo}/#{path}. Skipping."
458
+ next
459
+ end
460
+ end
461
+ end
462
+ end
463
+
464
+ # Upload file to Artifactory
465
+ # @param local_path [String] local path to file to upload
466
+ # @param target_repo [String] repo on artifactory to upload to
467
+ # @param target_path [String] path within target_repo to upload to
468
+ def upload_file(local_path, target_repo, target_path)
469
+ fail "Error: Couldn't find file at #{local_path}." unless File.exist? local_path
470
+ check_authorization
471
+ artifact = Artifactory::Resource::Artifact.new(local_path: local_path)
472
+ full_upload_path = File.join(target_path, File.basename(local_path))
473
+ begin
474
+ puts "Uploading #{local_path} to #{target_repo}/#{full_upload_path} . . ."
475
+ artifact.upload(target_repo, full_upload_path)
476
+ rescue Artifactory::Error::HTTPError => e
477
+ fail "Error: Upload failed. Ensure path #{target_path} exists in the #{target_repo} repository."
478
+ end
479
+ end
480
+
481
+ # Clear the ARTIFACTORY_CLEANUP_SKIP_PROPERTY on all artifacts in
482
+ # a specified directory in a given Artifactory repo that match
483
+ # /<directory>/*.tar. Use this before uploading newer tarballs to maintain
484
+ # 'cleanup.skip' on the latest tarballs only.
485
+ #
486
+ # @param repo [String] Artifactory repository that contains the specified directory
487
+ # @param directory [String] Artifactory directory in repo containing the artifacts from which to
488
+ # set the 'cleanup.skip' property setting to false
489
+ def unset_cleanup_skip_on_artifacts(repo, directory)
490
+ artifacts_with_cleanup_skip = Artifactory::Resource::Artifact.property_search(
491
+ ARTIFACTORY_CLEANUP_SKIP_PROPERTY => true,
492
+ "repos" => repo
493
+ )
494
+
495
+ # For the upcoming directory check, make sure we know where our trailing slashes are.
496
+ directory_no_trailing_slashes = directory.sub(/(\/)+$/, '')
497
+
498
+ # For all tarball artifacts in #{directory} that have the Artifactory property
499
+ # 'cleanup.skip' set to true, set it to 'false'
500
+ artifacts_with_cleanup_skip.each do |artifact|
501
+ next unless artifact.uri.include?("/#{directory_no_trailing_slashes}/")
502
+ artifact.properties(ARTIFACTORY_CLEANUP_SKIP_PROPERTY => false)
503
+ end
504
+ end
505
+
506
+ # Download an artifact based on name, repo, and path to artifact
507
+ # @param artifact_name [String] name of artifact to download
508
+ # @param repo [String] repo the artifact lives
509
+ # @param path [String] path to artifact in the repo
510
+ def download_artifact(artifact_name, repo, path)
511
+ check_authorization
512
+ artifacts = Artifactory::Resource::Artifact.search(name: artifact_name, repos: repo)
513
+ artifacts.each do |artifact|
514
+ if artifact.download_uri.include? path
515
+ artifact.download('.')
516
+ end
517
+ end
518
+ end
519
+
520
+ # Download final pe tarballs to local path based on name, repo, and path on artifactory
521
+ # @param pe_version [String] pe final tag
522
+ # @param repo [String] repo the tarballs live
523
+ # @param remote_path [String] path to tarballs in the repo
524
+ # @param local_path [String] local path to download tarballs to
525
+ def download_final_pe_tarballs(pe_version, repo, remote_path, local_path)
526
+ check_authorization
527
+ artifacts = Artifactory::Resource::Artifact.search(name: pe_version, repos: repo)
528
+ artifacts.each do |artifact|
529
+ next unless artifact.download_uri.include? remote_path
530
+ next if artifact.download_uri.include? "-rc"
531
+ artifact.download(local_path)
532
+ end
533
+ end
534
+
535
+ # Download beta pe tarballs to local path based on tag, repo, and path on artifactory
536
+ # @param beta_tag [String] rc tag of beta release ex. 2019.2.0-rc10
537
+ # @param repo [String] repo the tarballs live
538
+ # @param remote_path [String] path to tarballs in the repo
539
+ # @param local_path [String] local path to download tarballs to
540
+ def download_beta_pe_tarballs(beta_tag, repo, remote_path, local_path)
541
+ check_authorization
542
+ pattern = "#{remote_path}/*-#{beta_tag}-*"
543
+ artifacts = Artifactory::Resource::Artifact.pattern_search(repo: repo, pattern: pattern)
544
+ artifacts.each do |artifact|
545
+ artifact.download(local_path)
546
+ end
547
+ end
548
+
549
+ # When we ship a new PE release we copy final tarballs to archives/releases
550
+ # @param pe_version [String] pe final tag
551
+ # @param repo [String] repo the tarballs live
552
+ # @param remote_path [String] path to tarballs in the repo
553
+ # @param target_path [String] path copy tarballs to, assumes same repo
554
+ def copy_final_pe_tarballs(pe_version, repo, remote_path, target_path)
555
+ check_authorization
556
+ final_tarballs = Artifactory::Resource::Artifact.search(name: pe_version, repos: repo)
557
+ final_tarballs.each do |artifact|
558
+ next unless artifact.download_uri.include? remote_path
559
+ next if artifact.download_uri.include? "-rc"
560
+ artifact.copy("#{repo}/#{target_path}")
561
+ end
562
+ end
563
+
564
+ # When we cut a new PE branch, we need to copy the pe components into <pe_version>/{repos,feature,release}/<platform>
565
+ # @param manifest [File] JSON file containing information about what packages to download and the corresponding md5sums
566
+ # @param target_path [String] path on artifactory to copy components to, e.g. <pe_version>/release
567
+ def populate_pe_repos(manifest, target_path)
568
+ check_authorization
569
+ manifest.each do |dist, packages|
570
+ puts "Copying #{dist} packages..."
571
+ packages.each do |name, info|
572
+ artifact = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: ["rpm_enterprise__local", "debian_enterprise__local"]).first
573
+ if artifact.nil?
574
+ raise "Error: what the hell, could not find package #{info["filename"]} with md5sum #{info["md5"]}"
575
+ end
576
+ begin
577
+ artifact_target_path = "#{artifact.repo}/#{target_path}/#{dist}/#{info["filename"]}"
578
+ puts "Copying #{artifact.download_uri} to #{artifact_target_path}"
579
+ artifact.copy(artifact_target_path)
580
+ rescue Artifactory::Error::HTTPError
581
+ STDERR.puts "Could not copy #{artifact_target_path}. Source and destination are the same. Skipping..."
582
+ end
583
+ if File.extname(info["filename"]) == '.deb'
584
+ copied_artifact_search = Artifactory::Resource::Artifact.pattern_search(repo: 'debian_enterprise__local', pattern: "#{target_path}/*/#{info["filename"]}")
585
+ fail "Error: what the hell, could not find just-copied package #{info["filename"]} under debian_enterprise__local/#{target_path}" if copied_artifact_search.nil?
586
+ copied_artifact = copied_artifact_search.first
587
+ properties = { 'deb.component' => Pkg::Paths.two_digit_pe_version_from_path(target_path) }
588
+ copied_artifact.properties(properties)
589
+ end
590
+ end
591
+ end
592
+ end
593
+
594
+ # Remove all artifacts in repo based on pattern, used when we purge all artifacts in release/ after PE release
595
+ # @param repos [Array] repos that we want to search for artifacts in
596
+ # @param pattern [String] pattern for artifacts that should be deleted ex. `2019.1/release/*/*`
597
+ def teardown_repo(repos, pattern)
598
+ check_authorization
599
+ repos.each do |repo|
600
+ artifacts = Artifactory::Resource::Artifact.pattern_search(repo: repo, pattern: pattern)
601
+ artifacts.each do |artifact|
602
+ puts "Deleting #{artifact.download_uri}"
603
+ artifact.delete
604
+ end
605
+ end
606
+ end
607
+
608
+ # Remove promoted artifacts if promotion is reverted, use information provided in manifest
609
+ # @param manifest [File] JSON file containing information about what packages to download and the corresponding md5sums
610
+ # @param remote_path [String] path on artifactory to promoted packages ex. 2019.1/repos/
611
+ # @param package [String] package name ex. puppet-agent
612
+ # @param repos [Array] the repos the promoted artifacts live
613
+ def remove_promoted_packages(manifest, remote_path, package, repos)
614
+ check_authorization
615
+ manifest.each do |dist, packages|
616
+ packages.each do |package_name, info|
617
+ next unless package_name == package
618
+ artifacts = Artifactory::Resource::Artifact.checksum_search(md5: "#{info["md5"]}", repos: repos)
619
+ artifacts.each do |artifact|
620
+ next unless artifact.download_uri.include? remote_path
621
+ puts "Removing reverted package #{artifact.download_uri}"
622
+ artifact.delete
374
623
  end
375
624
  end
376
625
  end
@@ -99,7 +99,7 @@ module Pkg
99
99
  # beaker install the msi without having to know any version
100
100
  # information, but we should report the versioned artifact in
101
101
  # platform_data
102
- next if platform == 'windows' && File.basename(artifact) == "#{self.project}-#{arch}.#{package_format}"
102
+ next if platform =~ /^windows.*$/ && File.basename(artifact) == "#{self.project}-#{arch}.#{package_format}"
103
103
 
104
104
  # Sometimes we have source or debug packages. We don't want to save
105
105
  # these paths in favor of the artifact paths.
@@ -25,13 +25,15 @@ module Pkg::Paths
25
25
  # with the artifact and path
26
26
  def tag_from_artifact_path(path)
27
27
  platform = Pkg::Platforms.supported_platforms.find { |p| path =~ /(\/|\.)#{p}[^\.]/ }
28
+ platform = 'windowsfips' if path =~ /windowsfips/
29
+
28
30
  codename = Pkg::Platforms.codenames.find { |c| path =~ /\/#{c}/ }
29
31
 
30
32
  if codename
31
33
  platform, version = Pkg::Platforms.codename_to_platform_version(codename)
32
34
  end
33
35
 
34
- version = '2012' if platform == 'windows'
36
+ version = '2012' if platform =~ /^windows.*$/
35
37
 
36
38
  version ||= Pkg::Platforms.versions_for_platform(platform).find { |v| path =~ /#{platform}(\/|-)?#{v}/ }
37
39
 
@@ -152,7 +154,7 @@ module Pkg::Paths
152
154
  if is_legacy_repo?(repo_name(nonfinal))
153
155
  [File.join(path_prefix, 'windows'), nil]
154
156
  else
155
- [File.join(path_prefix, 'windows', repo_name(nonfinal)), link_name(nonfinal).nil? ? nil : File.join(path_prefix, 'windows', link_name(nonfinal))]
157
+ [File.join(path_prefix, platform, repo_name(nonfinal)), link_name(nonfinal).nil? ? nil : File.join(path_prefix, platform, link_name(nonfinal))]
156
158
  end
157
159
  else
158
160
  raise "Not sure where to find packages with a package format of '#{package_format}'"
@@ -242,7 +244,7 @@ module Pkg::Paths
242
244
  if options[:legacy]
243
245
  File.join('repos', 'windows')
244
246
  else
245
- File.join('repos', 'windows', repo_target)
247
+ File.join('repos', platform, repo_target)
246
248
  end
247
249
  else
248
250
  raise "Not sure what to do with a package format of '#{package_format}'"
@@ -300,4 +302,10 @@ module Pkg::Paths
300
302
  return nil
301
303
  end
302
304
  end
305
+
306
+ def two_digit_pe_version_from_path(path)
307
+ matches = path.match(/\d+\.\d+/)
308
+ fail "Error: Could not determine PE version from path #{path}" if matches.nil?
309
+ return matches[0]
310
+ end
303
311
  end
@@ -340,6 +340,13 @@ module Pkg
340
340
  repo: false,
341
341
  }
342
342
  },
343
+ 'windowsfips' => {
344
+ '2012' => {
345
+ architectures: ['x64'],
346
+ package_format: 'msi',
347
+ repo: false,
348
+ }
349
+ },
343
350
  }.freeze
344
351
 
345
352
  # @return [Array] An array of Strings, containing all of the supported
@@ -1,35 +1,108 @@
1
1
  module Pkg::Repo
2
2
 
3
3
  class << self
4
- def create_signed_repo_archive(path_to_repo, name_of_archive, versioning)
4
+
5
+ ##
6
+ ## Construct a local_target based upon the versioning style
7
+ ##
8
+ def construct_local_target_path(project, versioning)
9
+ case versioning
10
+ when 'ref'
11
+ return File.join(project, Pkg::Config.ref)
12
+ when 'version'
13
+ return File.join(project, Pkg::Util::Version.dot_version)
14
+ else
15
+ fail "Error: Unknown versioning argument: #{versioning}"
16
+ end
17
+ end
18
+
19
+ ##
20
+ ## Put a single signed repo into a tarball stored in
21
+ ## 'pkg/<local_target>/<archive_name>.tar.gz'
22
+ ##
23
+ def create_signed_repo_archive(repo_location, archive_name, versioning)
5
24
  tar = Pkg::Util::Tool.check_tool('tar')
6
- Dir.chdir("pkg") do
7
- if versioning == 'ref'
8
- local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
9
- elsif versioning == 'version'
10
- local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
25
+
26
+ local_target = construct_local_target_path(Pkg::Config.project, versioning)
27
+
28
+ if Pkg::Util::File.empty_dir?(File.join('pkg', local_target, repo_location))
29
+ if ENV['FAIL_ON_MISSING_TARGET'] == "true"
30
+ raise "Error: missing packages under #{repo_location}"
11
31
  end
12
- Dir.chdir(local_target) do
13
- if Pkg::Util::File.empty_dir?(path_to_repo)
14
- if ENV['FAIL_ON_MISSING_TARGET'] == "true"
15
- raise "ERROR: missing packages under #{path_to_repo}"
16
- else
17
- warn "Skipping #{name_of_archive} because #{path_to_repo} has no files"
18
- end
19
- else
20
- puts "Archiving #{path_to_repo} as #{name_of_archive}"
21
- stdout, _, _ = Pkg::Util::Execution.capture3("#{tar} --owner=0 --group=0 --create --gzip --file #{File.join('repos', "#{name_of_archive}.tar.gz")} #{path_to_repo}")
22
- stdout
23
- end
32
+ warn "Warn: Skipping #{archive_name} because #{repo_location} has no files"
33
+ return
34
+ end
35
+
36
+ Dir.chdir(File.join('pkg', local_target)) do
37
+ puts "Info: Archiving #{repo_location} as #{archive_name}"
38
+ target_tarball = File.join('repos', "#{archive_name}.tar.gz")
39
+ tar_command = "#{tar} --owner=0 --group=0 --create --gzip --file #{target_tarball} #{repo_location}"
40
+ stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
41
+ return stdout
42
+ end
43
+ end
44
+
45
+ ##
46
+ ## Add a single repo tarball into the 'all' tarball located in
47
+ ## 'pkg/<local_target>/<project>-all.tar'
48
+ ## Create the 'all' tarball if needed.
49
+ ##
50
+ def update_tarball_of_all_repos(project, platform, versioning)
51
+ tar = Pkg::Util::Tool.check_tool('tar')
52
+
53
+ all_repos_tarball_name = "#{project}-all.tar"
54
+ archive_name = "#{project}-#{platform['name']}"
55
+ local_target = construct_local_target_path(project, versioning)
56
+ repo_tarball_name = "#{archive_name}.tar.gz"
57
+ repo_tarball_path = File.join('repos', repo_tarball_name)
58
+
59
+ Dir.chdir(File.join('pkg', local_target)) do
60
+ unless Pkg::Util::File.exist?(repo_tarball_path)
61
+ warn "Skipping #{archive_name} because it (#{repo_tarball_path}) contains no files"
62
+ next
63
+ end
64
+
65
+ tar_action = "--create"
66
+ if File.exist?(all_repos_tarball_name)
67
+ tar_action = "--update"
24
68
  end
69
+
70
+ tar_command = "#{tar} --owner=0 --group=0 #{tar_action} --file #{all_repos_tarball_name} #{repo_tarball_path}"
71
+ stdout, _, _ = Pkg::Util::Execution.capture3(tar_command)
72
+ puts stdout
25
73
  end
26
74
  end
27
75
 
76
+ ##
77
+ ## Invoke gzip to compress the 'all' tarball located in
78
+ ## 'pkg/<local_target>/<project>-all.tar'
79
+ ##
80
+ def compress_tarball_of_all_repos(all_repos_tarball_name)
81
+ gzip = Pkg::Util::Tool.check_tool('gzip')
82
+
83
+ gzip_command = "#{gzip} --fast #{all_repos_tarball_name}"
84
+ stdout, _, _ = Pkg::Util::Execution.capture3(gzip_command)
85
+ puts stdout
86
+ end
87
+
88
+ ##
89
+ ## Generate each of the repos listed in <Config.platform_repos>.
90
+ ## Update the 'all repos' tarball as we do each one.
91
+ ## Compress the 'all repos' tarball when all the repos have been generated
92
+ ##
28
93
  def create_all_repo_archives(project, versioning)
29
94
  platforms = Pkg::Config.platform_repos
95
+ local_target = construct_local_target_path(project, versioning)
96
+ all_repos_tarball_name = "#{project}-all.tar"
97
+
30
98
  platforms.each do |platform|
31
99
  archive_name = "#{project}-#{platform['name']}"
32
100
  create_signed_repo_archive(platform['repo_location'], archive_name, versioning)
101
+ update_tarball_of_all_repos(project, platform, versioning)
102
+ end
103
+
104
+ Dir.chdir(File.join('pkg', local_target)) do
105
+ compress_tarball_of_all_repos(all_repos_tarball_name)
33
106
  end
34
107
  end
35
108
 
@@ -40,7 +113,7 @@ module Pkg::Repo
40
113
  stdout, stderr = Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, cmd, true)
41
114
  return stdout.split
42
115
  rescue => e
43
- fail "Could not retrieve directories that contain #{pkg_ext} packages in #{Pkg::Config.distribution_server}:#{artifact_directory}"
116
+ fail "Error: Could not retrieve directories that contain #{pkg_ext} packages in #{Pkg::Config.distribution_server}:#{artifact_directory}"
44
117
  end
45
118
 
46
119
  def populate_repo_directory(artifact_parent_directory)
@@ -49,7 +122,7 @@ module Pkg::Repo
49
122
  cmd << 'rsync --archive --verbose --one-file-system --ignore-existing artifacts/ repos/ '
50
123
  Pkg::Util::Net.remote_ssh_cmd(Pkg::Config.distribution_server, cmd)
51
124
  rescue => e
52
- fail "Could not populate repos directory in #{Pkg::Config.distribution_server}:#{artifact_parent_directory}"
125
+ fail "Error: Could not populate repos directory in #{Pkg::Config.distribution_server}:#{artifact_parent_directory}"
53
126
  end
54
127
 
55
128
  def argument_required?(argument_name, repo_command)
@@ -57,7 +130,7 @@ module Pkg::Repo
57
130
  end
58
131
 
59
132
  def update_repo(remote_host, command, options = {})
60
- fail_message = "Missing required argument '%s', update your build_defaults?"
133
+ fail_message = "Error: Missing required argument '%s', update your build_defaults?"
61
134
  [:repo_name, :repo_path, :repo_host, :repo_url].each do |option|
62
135
  fail fail_message % option.to_s if argument_required?(option.to_s, command) && !options[option]
63
136
  end
@@ -9,7 +9,7 @@ module Pkg::Sign::Msi
9
9
 
10
10
  work_dir = "Windows/Temp/#{Pkg::Util.rand_string}"
11
11
  Pkg::Util::Net.remote_ssh_cmd(ssh_host_string, "mkdir -p C:/#{work_dir}")
12
- msis = Dir.glob("#{target_dir}/windows/**/*.msi")
12
+ msis = Dir.glob("#{target_dir}/windows*/**/*.msi")
13
13
  Pkg::Util::Net.rsync_to(msis.join(" "), rsync_host_string, "/cygdrive/c/#{work_dir}")
14
14
 
15
15
  # Please Note:
@@ -26,6 +26,11 @@ describe 'artifactory.rb' do
26
26
  :repo_config => '',
27
27
  :additional_artifacts => ["./windows/puppet-agent-extras-5.3.1.34-x86.msi"],
28
28
  },
29
+ 'windowsfips-2012-x64' => {
30
+ :artifact => "./windowsfips/puppet-agent-5.3.1.34-x64.msi",
31
+ :repo_config => '',
32
+ :additional_artifacts => ["./windowsfips/puppet-agent-extras-5.3.1.34-x64.msi"],
33
+ },
29
34
  'eos-4-i386' => {
30
35
  :artifact => "./eos/4/PC1/i386/puppet-agent-5.3.1.34.gf65f9ef-1.eos4.i386.swix",
31
36
  :repo_config => '',
@@ -65,6 +70,12 @@ describe 'artifactory.rb' do
65
70
  :package_name => 'path/to/a/windows/package/puppet-agent-5.3.1.34-x86.msi',
66
71
  :all_package_names => ['puppet-agent-5.3.1.34-x86.msi','puppet-agent-extras-5.3.1.34-x86.msi']
67
72
  },
73
+ 'windowsfips-2012-x64' => {
74
+ :toplevel_repo => 'generic',
75
+ :repo_subdirectories => "#{default_repo_name}/#{project}/#{project_version}/windowsfips-x64",
76
+ :package_name => 'path/to/a/windowsfips/package/puppet-agent-5.3.1.34-x64.msi',
77
+ :all_package_names => ['puppet-agent-5.3.1.34-x64.msi','puppet-agent-extras-5.3.1.34-x64.msi']
78
+ },
68
79
  'eos-4-i386' => {
69
80
  :toplevel_repo => 'generic',
70
81
  :repo_subdirectories => "#{default_repo_name}/#{project}/#{project_version}/eos-4-i386",
@@ -232,7 +232,9 @@ describe "Pkg::Config" do
232
232
  "./artifacts/windows/puppet-agent-x64.msi\n" \
233
233
  "./artifacts/windows/puppet-agent-5.3.2-x86.wixpdb\n" \
234
234
  "./artifacts/windows/puppet-agent-5.3.2-x86.msi\n" \
235
- "./artifacts/windows/puppet-agent-5.3.2-x64.msi"
235
+ "./artifacts/windows/puppet-agent-5.3.2-x64.msi\n"\
236
+ "./artifacts/windowsfips/puppet-agent-x64.msi\n" \
237
+ "./artifacts/windowsfips/puppet-agent-5.3.2-x64.msi"
236
238
 
237
239
  solaris_artifacts = \
238
240
  "./artifacts/solaris/11/PC1/puppet-agent@5.3.2,5.11-1.sparc.p5p\n" \
@@ -297,6 +299,7 @@ describe "Pkg::Config" do
297
299
  data = Pkg::Config.platform_data
298
300
  expect(data['windows-2012-x86']).to include(:artifact => './windows/puppet-agent-5.3.2-x86.msi')
299
301
  expect(data['windows-2012-x64']).to include(:artifact => './windows/puppet-agent-5.3.2-x64.msi')
302
+ expect(data['windowsfips-2012-x64']).to include(:artifact => './windowsfips/puppet-agent-5.3.2-x64.msi')
300
303
  end
301
304
 
302
305
  it "should not collect debug packages" do
@@ -19,7 +19,7 @@ describe 'Pkg::Platforms' do
19
19
 
20
20
  describe '#supported_platforms' do
21
21
  it 'should return all supported platforms' do
22
- platforms = ['aix', 'cisco-wrlinux', 'cumulus', 'debian', 'el', 'eos', 'fedora', 'osx', 'redhatfips', 'sles', 'solaris', 'ubuntu', 'windows']
22
+ platforms = ['aix', 'cisco-wrlinux', 'cumulus', 'debian', 'el', 'eos', 'fedora', 'osx', 'redhatfips', 'sles', 'solaris', 'ubuntu', 'windows', 'windowsfips']
23
23
  expect(Pkg::Platforms.supported_platforms).to match_array(platforms)
24
24
  end
25
25
  end
@@ -127,6 +127,7 @@ describe 'Pkg::Platforms' do
127
127
  test_cases = {
128
128
  'debian-9-amd64' => ['debian', '9', 'amd64'],
129
129
  'windows-2012-x86' => ['windows', '2012', 'x86'],
130
+ 'windowsfips-2012-x64' => ['windowsfips', '2012', 'x64'],
130
131
  'el-7-x86_64' => ['el', '7', 'x86_64'],
131
132
  'cisco-wrlinux-7-x86_64' => ['cisco-wrlinux', '7', 'x86_64'],
132
133
  'cisco-wrlinux-7' => ['cisco-wrlinux', '7', ''],
@@ -16,46 +16,45 @@ describe "#Pkg::Repo" do
16
16
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(false)
17
17
  allow(Pkg::Util::Execution).to receive(:capture3)
18
18
 
19
- expect(Dir).to receive(:chdir).with("pkg").and_yield
20
- expect(Dir).to receive(:chdir).with("project/1.1.1").and_yield
19
+ expect(Dir).to receive(:chdir).with('pkg/project/1.1.1').and_yield
21
20
  Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "version")
22
21
  end
23
22
 
24
- it "should use a ref if ref is specified as versioning" do
25
- allow(Pkg::Util::Tool).to receive(:check_tool).and_return("tarcommand")
26
- allow(Dir).to receive(:chdir).with("pkg").and_yield
23
+ it 'should use a ref if ref is specified as versioning' do
24
+ allow(Pkg::Util::Tool).to receive(:check_tool).and_return('tarcommand')
25
+ allow(Dir).to receive(:chdir).with('pkg').and_yield
27
26
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(false)
28
27
  allow(Pkg::Util::Execution).to receive(:capture3)
29
28
 
30
- expect(Pkg::Config).to receive(:project).and_return("project")
31
- expect(Pkg::Config).to receive(:ref).and_return("AAAAAAAAAAAAAAA")
32
- expect(Dir).to receive(:chdir).with("project/AAAAAAAAAAAAAAA").and_yield
33
- Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "ref")
29
+ expect(Pkg::Config).to receive(:project).and_return('project')
30
+ expect(Pkg::Config).to receive(:ref).and_return('AAAAAAAAAAAAAAA')
31
+ expect(Dir).to receive(:chdir).with('pkg/project/AAAAAAAAAAAAAAA').and_yield
32
+ Pkg::Repo.create_signed_repo_archive('/path', 'project-debian-6-i386', 'ref')
34
33
  end
35
34
 
36
- it "should use dot versions if version is specified as versioning" do
37
- allow(Pkg::Util::Tool).to receive(:check_tool).and_return("tarcommand")
38
- allow(Dir).to receive(:chdir).with("pkg").and_yield
35
+ it 'should use dot versions if version is specified as versioning' do
36
+ allow(Pkg::Util::Tool).to receive(:check_tool).and_return('tarcommand')
37
+ allow(Dir).to receive(:chdir).with('pkg').and_yield
39
38
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(false)
40
39
  allow(Pkg::Util::Execution).to receive(:capture3)
41
40
 
42
- expect(Pkg::Config).to receive(:project).and_return("project")
43
- expect(Pkg::Util::Version).to receive(:dot_version).and_return("1.1.1")
44
- expect(Dir).to receive(:chdir).with("project/1.1.1").and_yield
45
- Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "version")
41
+ expect(Pkg::Config).to receive(:project).and_return('project')
42
+ expect(Pkg::Util::Version).to receive(:dot_version).and_return('1.1.1')
43
+ expect(Dir).to receive(:chdir).with('pkg/project/1.1.1').and_yield
44
+ Pkg::Repo.create_signed_repo_archive('/path', 'project-debian-6-i386', 'version')
46
45
  end
47
46
 
48
- it "should fail if ENV['FAIL_ON_MISSING_TARGET'] is true and empty_dir? is also true" do
49
- allow(Pkg::Util::Tool).to receive(:check_tool).and_return("tarcommand")
50
- allow(Pkg::Config).to receive(:project).and_return("project")
51
- allow(Pkg::Util::Version).to receive(:dot_version).and_return("1.1.1")
47
+ it 'should fail if ENV["FAIL_ON_MISSING_TARGET"] is true and empty_dir? is also true' do
48
+ allow(Pkg::Util::Tool).to receive(:check_tool).and_return('tarcommand')
49
+ allow(Pkg::Config).to receive(:project).and_return('project')
50
+ allow(Pkg::Util::Version).to receive(:dot_version).and_return('1.1.1')
52
51
  allow(Pkg::Util::Execution).to receive(:capture3)
53
- allow(Dir).to receive(:chdir).with("pkg").and_yield
54
- allow(Dir).to receive(:chdir).with("project/1.1.1").and_yield
52
+ allow(Dir).to receive(:chdir).with('pkg').and_yield
53
+ allow(Dir).to receive(:chdir).with('project/1.1.1').and_yield
55
54
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(true)
56
- ENV['FAIL_ON_MISSING_TARGET'] = "true"
55
+ ENV['FAIL_ON_MISSING_TARGET'] = 'true'
57
56
 
58
- expect{Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "version")}.to raise_error(RuntimeError, "ERROR: missing packages under /path")
57
+ expect{Pkg::Repo.create_signed_repo_archive('/path', 'project-debian-6-i386', 'version')}.to raise_error(RuntimeError, 'Error: missing packages under /path')
59
58
  end
60
59
 
61
60
  it "should only warn if ENV['FAIL_ON_MISSING_TARGET'] is false and empty_dir? is true" do
@@ -63,35 +62,38 @@ describe "#Pkg::Repo" do
63
62
  allow(Pkg::Config).to receive(:project).and_return("project")
64
63
  allow(Pkg::Util::Version).to receive(:dot_version).and_return("1.1.1")
65
64
  allow(Pkg::Util::Execution).to receive(:capture3)
66
- allow(Dir).to receive(:chdir).with("pkg").and_yield
67
- allow(Dir).to receive(:chdir).with("project/1.1.1").and_yield
65
+ allow(Dir).to receive(:chdir).with("pkg/project/1.1.1").and_yield
68
66
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(true)
69
67
  ENV['FAIL_ON_MISSING_TARGET'] = "false"
70
68
 
71
69
  expect{Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "version")}.not_to raise_error
72
70
  end
73
71
 
74
- it "should invoke tar correctly" do
75
- allow(Pkg::Util::Tool).to receive(:check_tool).and_return("tarcommand")
76
- allow(Pkg::Config).to receive(:project).and_return("project")
77
- allow(Pkg::Util::Version).to receive(:dot_version).and_return("1.1.1")
78
- allow(Dir).to receive(:chdir).with("pkg").and_yield
79
- allow(Dir).to receive(:chdir).with("project/1.1.1").and_yield
72
+ it 'should invoke tar correctly' do
73
+ allow(Pkg::Util::Tool).to receive(:check_tool).and_return('tarcommand')
74
+ allow(Pkg::Config).to receive(:project).and_return('project')
75
+ allow(Pkg::Util::Version).to receive(:dot_version).and_return('1.1.1')
76
+ allow(Dir).to receive(:chdir).with('pkg/project/1.1.1').and_yield
80
77
  allow(Pkg::Util::File).to receive(:empty_dir?).and_return(false)
81
78
 
82
- expect(Pkg::Util::Execution).to receive(:capture3).with("tarcommand --owner=0 --group=0 --create --gzip --file repos/project-debian-6-i386.tar.gz /path")
83
- Pkg::Repo.create_signed_repo_archive("/path", "project-debian-6-i386", "version")
79
+ expect(Pkg::Util::Execution).to receive(:capture3).with('tarcommand --owner=0 --group=0 --create --gzip --file repos/project-debian-6-i386.tar.gz /path')
80
+ Pkg::Repo.create_signed_repo_archive('/path', 'project-debian-6-i386', 'version')
84
81
  end
85
82
  end
86
83
 
87
- describe "#create_signed_repo_archive" do
88
- it "should invoke create_signed_repo_archive correctly for multiple entries in platform_repos" do
84
+ describe '#create_signed_repo_archive' do
85
+ it 'should invoke create_signed_repo_archive correctly for multiple entries in platform_repos' do
89
86
  allow(Pkg::Config).to receive(:platform_repos).and_return(platform_repo_stub)
87
+ allow(Pkg::Config).to receive(:project).and_return('project')
88
+ allow(Pkg::Util::Version).to receive(:dot_version).and_return('1.1.1')
89
+ allow(Dir).to receive(:chdir).with('pkg/project/1.1.1').and_yield
90
+
91
+ expect(Pkg::Repo).to receive(:create_signed_repo_archive).with('repos/el/4/**/i386', 'project-el-4-i386', 'version')
92
+ expect(Pkg::Repo).to receive(:create_signed_repo_archive).with('repos/el/5/**/i386', 'project-el-5-i386', 'version')
93
+ expect(Pkg::Repo).to receive(:create_signed_repo_archive).with('repos/el/6/**/i386', 'project-el-6-i386', 'version')
90
94
 
91
- expect(Pkg::Repo).to receive(:create_signed_repo_archive).with("repos/el/4/**/i386", "project-el-4-i386", "version")
92
- expect(Pkg::Repo).to receive(:create_signed_repo_archive).with("repos/el/5/**/i386", "project-el-5-i386", "version")
93
- expect(Pkg::Repo).to receive(:create_signed_repo_archive).with("repos/el/6/**/i386", "project-el-6-i386", "version")
94
- Pkg::Repo.create_all_repo_archives("project", "version")
95
+ allow(Pkg::Util::Execution).to receive(:capture3)
96
+ Pkg::Repo.create_all_repo_archives('project', 'version')
95
97
  end
96
98
  end
97
99
 
@@ -4,7 +4,9 @@ describe '#Pkg::Util::Ship' do
4
4
  describe '#collect_packages' do
5
5
  msi_pkgs = [
6
6
  'pkg/windows/puppet5/puppet-agent-1.4.1.2904.g8023dd1-x86.msi',
7
- 'pkg/windows/puppet5/puppet-agent-x86.msi'
7
+ 'pkg/windows/puppet5/puppet-agent-x86.msi',
8
+ 'pkg/windowsfips/puppet5/puppet-agent-1.4.1.2904.g8023dd1-x64.msi',
9
+ 'pkg/windowsfips/puppet5/puppet-agent-x64.msi'
8
10
  ]
9
11
  swix_pkgs = [
10
12
  'pkg/eos/puppet5/4/i386/puppet-agent-1.4.1.2904.g8023dd1-1.eos4.i386.swix',
@@ -24,7 +26,13 @@ describe '#Pkg::Util::Ship' do
24
26
  expect(Pkg::Util::Ship.collect_packages(['pkg/**/*.msi'], ['puppet-agent-x(86|64).msi'])).not_to include('pkg/windows/puppet5/puppet-agent-x86.msi')
25
27
  end
26
28
  it 'correctly includes packages that do not match a passed excluded argument' do
27
- expect(Pkg::Util::Ship.collect_packages(['pkg/**/*.msi'], ['puppet-agent-x(86|64).msi'])).to include('pkg/windows/puppet5/puppet-agent-1.4.1.2904.g8023dd1-x86.msi')
29
+ expect(Pkg::Util::Ship.collect_packages(['pkg/**/*.msi'], ['puppet-agent-x(86|64).msi'])).to \
30
+ match_array(
31
+ [
32
+ 'pkg/windows/puppet5/puppet-agent-1.4.1.2904.g8023dd1-x86.msi',
33
+ 'pkg/windowsfips/puppet5/puppet-agent-1.4.1.2904.g8023dd1-x64.msi',
34
+ ]
35
+ )
28
36
  end
29
37
  end
30
38
 
@@ -68,60 +68,6 @@ DOC
68
68
  Pkg::Repo.create_all_repo_archives(name_of_archive, versioning)
69
69
  end
70
70
 
71
- # This is pretty similar to the 'pack_signed_repo' task. The difference here is that instead
72
- # of creating a tarball for each repo passed, it adds each repo to a single archive, creating
73
- # one 'all' tarball with all of the repos. This is useful for cutomers who have a PE master with
74
- # no internet access. They can unpack the puppet-agent-all tarball into the location that
75
- # pe_repo expects and use simplified agent install without needing internet access, or having to
76
- # manually download each agent that they need to feed to pe_repo.
77
- # This task should be invoked after prepare_signed_repos, so that there are repos to pack up.
78
- task :pack_all_signed_repos, [:path_to_repo, :name_of_archive, :versioning] => ["pl:fetch"] do |t, args|
79
- # path_to_repo should be relative to ./pkg
80
- name_of_archive = args.name_of_archive or fail ":name_of_archive is a required argument for #{t}"
81
- versioning = args.versioning or fail ":versioning is a required argument for #{t}"
82
- tar = Pkg::Util::Tool.check_tool('tar')
83
-
84
- Dir.chdir("pkg") do
85
- if versioning == 'ref'
86
- local_target = File.join(Pkg::Config.project, Pkg::Config.ref, "repos")
87
- elsif versioning == 'version'
88
- local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version, "repos")
89
- end
90
-
91
- Dir.chdir(local_target) do
92
- if !Pkg::Util::File.exist?("#{name_of_archive}.tar.gz")
93
- warn "Skipping #{name_of_archive} because it (#{name_of_archive}.tar.gz) has no files"
94
- else
95
- if File.exist?("#{Pkg::Config.project}-all.tar")
96
- tar_cmd = "--update"
97
- else
98
- tar_cmd = "--create"
99
- end
100
- Pkg::Util::Execution.ex("#{tar} --owner=0 --group=0 #{tar_cmd} --file #{Pkg::Config.project}-all.tar #{name_of_archive}.tar.gz")
101
- end
102
- end
103
- end
104
- end
105
-
106
- # tar does not support adding or updating files in a compressed archive, so
107
- # we have a task to compress the "all" tarball from the 'pack_all_signed_repos'
108
- # task
109
- task :compress_the_all_tarball, [:versioning] => ["pl:fetch"] do |t, args|
110
- versioning = args.versioning or fail ":versioning is a required argument for #{t}"
111
- gzip = Pkg::Util::Tool.check_tool('gzip')
112
- Dir.chdir("pkg") do
113
- if versioning == 'ref'
114
- local_target = File.join(Pkg::Config.project, Pkg::Config.ref)
115
- elsif versioning == 'version'
116
- local_target = File.join(Pkg::Config.project, Pkg::Util::Version.dot_version)
117
- end
118
- Dir.chdir(local_target) do
119
- Pkg::Util::Execution.ex("#{gzip} --fast #{File.join("repos", "#{Pkg::Config.project}-all.tar")}")
120
- end
121
- end
122
- end
123
-
124
-
125
71
  task :prepare_signed_repos, [:target_host, :target_prefix, :versioning] => ["clean", "pl:fetch"] do |t, args|
126
72
  target_host = args.target_host or fail ":target_host is a required argument to #{t}"
127
73
  target_prefix = args.target_prefix or fail ":target_prefix is a required argument for #{t}"
@@ -545,7 +545,9 @@ namespace :pl do
545
545
 
546
546
  local_dir = args.local_dir || 'pkg'
547
547
  Dir.glob("#{local_dir}/**/*").reject { |e| File.directory? e }.each do |artifact|
548
- if artifactory.package_exists_on_artifactory?(artifact)
548
+ if File.extname(artifact) == ".yaml" || File.extname(artifact) == ".json"
549
+ artifactory.deploy_package(artifact)
550
+ elsif artifactory.package_exists_on_artifactory?(artifact)
549
551
  warn "Attempt to upload '#{artifact}' failed. Package already exists!"
550
552
  else
551
553
  artifactory.deploy_package(artifact)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: packaging
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.99.40
4
+ version: 0.99.45
5
5
  platform: ruby
6
6
  authors:
7
7
  - Puppet Labs
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-08-28 00:00:00.000000000 Z
11
+ date: 2019-10-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -56,14 +56,14 @@ dependencies:
56
56
  name: rake
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
- - - "~>"
59
+ - - ">="
60
60
  - !ruby/object:Gem::Version
61
61
  version: '12.3'
62
62
  type: :runtime
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - "~>"
66
+ - - ">="
67
67
  - !ruby/object:Gem::Version
68
68
  version: '12.3'
69
69
  - !ruby/object:Gem::Dependency
@@ -224,34 +224,33 @@ required_rubygems_version: !ruby/object:Gem::Requirement
224
224
  - !ruby/object:Gem::Version
225
225
  version: '0'
226
226
  requirements: []
227
- rubyforge_project:
228
- rubygems_version: 2.6.9
227
+ rubygems_version: 3.0.3
229
228
  signing_key:
230
229
  specification_version: 4
231
230
  summary: Puppet Labs' packaging automation
232
231
  test_files:
232
+ - spec/lib/packaging/retrieve_spec.rb
233
233
  - spec/lib/packaging/paths_spec.rb
234
+ - spec/lib/packaging/tar_spec.rb
235
+ - spec/lib/packaging/deb/repo_spec.rb
236
+ - spec/lib/packaging/rpm/repo_spec.rb
237
+ - spec/lib/packaging/artifactory_spec.rb
238
+ - spec/lib/packaging/repo_spec.rb
239
+ - spec/lib/packaging/deb_spec.rb
234
240
  - spec/lib/packaging/sign_spec.rb
235
- - spec/lib/packaging/config_spec.rb
236
- - spec/lib/packaging/util/execution_spec.rb
241
+ - spec/lib/packaging/gem_spec.rb
242
+ - spec/lib/packaging/util/rake_utils_spec.rb
243
+ - spec/lib/packaging/util/git_spec.rb
237
244
  - spec/lib/packaging/util/os_spec.rb
245
+ - spec/lib/packaging/util/misc_spec.rb
238
246
  - spec/lib/packaging/util/version_spec.rb
239
- - spec/lib/packaging/util/git_spec.rb
240
- - spec/lib/packaging/util/jenkins_spec.rb
241
- - spec/lib/packaging/util/rake_utils_spec.rb
247
+ - spec/lib/packaging/util/file_spec.rb
248
+ - spec/lib/packaging/util/execution_spec.rb
242
249
  - spec/lib/packaging/util/gpg_spec.rb
250
+ - spec/lib/packaging/util/jenkins_spec.rb
251
+ - spec/lib/packaging/util/ship_spec.rb
243
252
  - spec/lib/packaging/util/git_tag_spec.rb
244
- - spec/lib/packaging/util/file_spec.rb
245
253
  - spec/lib/packaging/util/net_spec.rb
246
- - spec/lib/packaging/util/misc_spec.rb
247
- - spec/lib/packaging/util/ship_spec.rb
248
- - spec/lib/packaging/repo_spec.rb
249
- - spec/lib/packaging/tar_spec.rb
250
- - spec/lib/packaging/retrieve_spec.rb
251
254
  - spec/lib/packaging/platforms_spec.rb
252
- - spec/lib/packaging/deb_spec.rb
253
- - spec/lib/packaging/deb/repo_spec.rb
254
- - spec/lib/packaging/rpm/repo_spec.rb
255
- - spec/lib/packaging/gem_spec.rb
256
- - spec/lib/packaging/artifactory_spec.rb
255
+ - spec/lib/packaging/config_spec.rb
257
256
  - spec/lib/packaging_spec.rb