allure-report-publisher 1.2.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1809950b5cb79015f9d4568c28eea62fea2ff4abb581ee762303684aa86859a2
4
- data.tar.gz: e22f93a0a820aded1cd74bef9b3836867e6c25a0a454acd57343159ba19c780f
3
+ metadata.gz: 60bffdf8cfaef83d728fa0f0ddb3d5f87227f5fd3f3af1df048ac5859a7e21c7
4
+ data.tar.gz: d3a83dd85d3c3586db99d0c26bf6c5323e4bdc23beff83d80beae12ae0dfcdca
5
5
  SHA512:
6
- metadata.gz: 7695a8fab036e58f775d1d6372ddcc054cd791212e278c840fb2642e4c4c29fb35409fe5cf9e30475217be471f564e68c231362305e89f3815bdb31c53f05ec1
7
- data.tar.gz: c9a64aaf86756c14c2e735a9632a1024d178ea1b8e9e97a43d65ad2afd05ff4adaa38094a9f12e083a70d145006269defdb1252ef64991e6412913cd47e6ea5e
6
+ metadata.gz: 1811d5892f98e633ab10585425c359839da9bdef5828389b18c7e10591031b2dae88b920f7d291d72ddcf7b2894ad457ff975769e74e03f84e4baa0bd94a88ab
7
+ data.tar.gz: e27359fd763ce52f1ce55522681e06d446c44d815980b0e2777c9181073a5270d78148f1820afff893406483bd24732e778c0ccbff512d0537511efe00234e6e
@@ -29,15 +29,6 @@ module Publisher
29
29
  )
30
30
  end
31
31
 
32
- # Check if gsutil is installed and executable
33
- #
34
- # @return [Boolean]
35
- def gsutil?
36
- execute_shell("which gsutil") && true
37
- rescue StandardError
38
- false
39
- end
40
-
41
32
  # Debug logging session output
42
33
  #
43
34
  # @return [StringIO]
@@ -5,6 +5,8 @@ module Publisher
5
5
  module Uploaders
6
6
  class HistoryNotFoundError < StandardError; end
7
7
 
8
+ PARALLEL_THREADS = 8
9
+
8
10
  # Uploader implementation
9
11
  #
10
12
  class Uploader
@@ -65,7 +67,9 @@ module Publisher
65
67
  #
66
68
  # @return [void]
67
69
  def upload
68
- run_uploads
70
+ upload_history unless !run_id || copy_latest
71
+ upload_report
72
+ upload_latest_copy if copy_latest
69
73
  end
70
74
 
71
75
  # Add allure report url to pull request description
@@ -234,15 +238,6 @@ module Publisher
234
238
  log_debug("Saved '#{EXECUTOR_JSON}' as '#{json_path}'\n#{JSON.pretty_generate(ci_provider.executor_info)}")
235
239
  end
236
240
 
237
- # Run upload commands
238
- #
239
- # @return [void]
240
- def run_uploads
241
- upload_history unless !run_id || copy_latest
242
- upload_report
243
- upload_latest_copy if copy_latest
244
- end
245
-
246
241
  # Fetch allure report history
247
242
  #
248
243
  # @return [void]
@@ -14,13 +14,6 @@ module Publisher
14
14
  @client ||= Google::Cloud::Storage.new
15
15
  end
16
16
 
17
- # Gsutil class
18
- #
19
- # @return [Helpers::Gsutil]
20
- def gsutil
21
- @gsutil ||= Helpers::Gsutil.init
22
- end
23
-
24
17
  # GCS bucket
25
18
  #
26
19
  # @return [Google::Cloud::Storage::Bucket]
@@ -62,7 +55,7 @@ module Publisher
62
55
  # @return [void]
63
56
  def upload_history
64
57
  log_debug("Uploading report history")
65
- file_upload(report_files.select { |file| file.fnmatch?("*/history/*") }, prefix)
58
+ upload_to_gcs(report_files.select { |file| file.fnmatch?("*/history/*") }, prefix)
66
59
  end
67
60
 
68
61
  # Upload allure report
@@ -70,19 +63,28 @@ module Publisher
70
63
  # @return [void]
71
64
  def upload_report
72
65
  log_debug("Uploading report files")
73
- return batch_upload(report_path, full_prefix) if gsutil.valid?
74
-
75
- file_upload(report_files, full_prefix)
66
+ upload_to_gcs(report_files, full_prefix)
76
67
  end
77
68
 
78
69
  # Upload copy of latest run
79
70
  #
80
71
  # @return [void]
81
72
  def upload_latest_copy
82
- log_debug("Uploading report copy as latest report")
83
- return batch_copy(full_prefix, prefix, cache_control: 60) if gsutil.valid?
73
+ log_debug("Copying report as latest")
84
74
 
85
- file_upload(report_files, prefix, cache_control: 60)
75
+ args = report_files.map do |file|
76
+ {
77
+ source_file: bucket.file(key(full_prefix, file.relative_path_from(report_path))),
78
+ destination: key(prefix, file.relative_path_from(report_path))
79
+ }
80
+ end
81
+
82
+ Parallel.each(args, in_threads: PARALLEL_THREADS) do |obj|
83
+ obj[:source_file].copy(obj[:destination], force_copy_metadata: true) do |f|
84
+ f.cache_control = "public, max-age=60"
85
+ end
86
+ end
87
+ log_debug("Finished latest report copy successfully")
86
88
  end
87
89
 
88
90
  # Upload files to gcs
@@ -90,53 +92,22 @@ module Publisher
90
92
  # @param [Array<Pathname>] files
91
93
  # @param [String] key_prefix
92
94
  # @param [Hash] params
93
- # @return [void]
94
- def file_upload(files, key_prefix, cache_control: 3600)
95
- threads = 8
95
+ # @return [Array<Hash>]
96
+ def upload_to_gcs(files, key_prefix)
96
97
  args = files.map do |file|
97
98
  {
98
99
  file: file.to_s,
99
- path: key(key_prefix, file.relative_path_from(report_path)),
100
- cache_control: "public, max-age=#{cache_control}"
100
+ path: key(key_prefix, file.relative_path_from(report_path))
101
101
  }
102
102
  end
103
103
 
104
- log_debug("Uploading '#{args.size}' files in '#{threads}' threads to bucker '#{bucket_name}'")
105
- Parallel.each(args, in_threads: threads) do |obj|
106
- bucket.create_file(*obj.slice(:file, :path).values, **obj.slice(:cache_control))
104
+ log_debug("Uploading '#{args.size}' files in '#{PARALLEL_THREADS}' threads")
105
+ Parallel.each(args, in_threads: PARALLEL_THREADS) do |obj|
106
+ bucket.create_file(*obj.slice(:file, :path).values, cache_control: "public, max-age=3600")
107
107
  end
108
108
  log_debug("Finished upload successfully")
109
109
  end
110
110
 
111
- # Upload directory recursively
112
- #
113
- # @param [String] source_dir
114
- # @param [String] destination_dir
115
- # @return [void]
116
- def batch_upload(source_dir, destination_dir, cache_control: 3600)
117
- gsutil.batch_upload(
118
- source_dir: source_dir,
119
- destination_dir: destination_dir,
120
- bucket: bucket_name,
121
- cache_control: cache_control
122
- )
123
- end
124
-
125
- # Copy directory within the bucket
126
- #
127
- # @param [String] source_dir
128
- # @param [String] destination_dir
129
- # @param [String] cache_control
130
- # @return [void]
131
- def batch_copy(source_dir, destination_dir, cache_control: 3600)
132
- gsutil.batch_copy(
133
- source_dir: source_dir,
134
- destination_dir: destination_dir,
135
- bucket: bucket_name,
136
- cache_control: cache_control
137
- )
138
- end
139
-
140
111
  # Fabricate key for s3 object
141
112
  #
142
113
  # @param [String] *args
@@ -81,8 +81,21 @@ module Publisher
81
81
  #
82
82
  # @return [void]
83
83
  def upload_latest_copy
84
- log_debug("Uploading report copy as latest report")
85
- upload_to_s3(report_files, prefix, cache_control: 60)
84
+ log_debug("Copying report as latest")
85
+
86
+ args = report_files.map do |file|
87
+ {
88
+ bucket: bucket_name,
89
+ copy_source: "/#{bucket_name}/#{key(full_prefix, file.relative_path_from(report_path))}",
90
+ key: key(prefix, file.relative_path_from(report_path)),
91
+ metadata_directive: "REPLACE",
92
+ content_type: MiniMime.lookup_by_filename(file).content_type,
93
+ cache_control: "max-age=60"
94
+ }
95
+ end
96
+
97
+ Parallel.each(args, in_threads: PARALLEL_THREADS) { |obj| client.copy_object(obj) }
98
+ log_debug("Finished latest report copy successfully")
86
99
  end
87
100
 
88
101
  # Upload files to s3
@@ -90,20 +103,19 @@ module Publisher
90
103
  # @param [Array<Pathname>] files
91
104
  # @param [String] key_prefix
92
105
  # @return [Array<Hash>]
93
- def upload_to_s3(files, key_prefix, cache_control: 3600)
94
- threads = 8
106
+ def upload_to_s3(files, key_prefix)
95
107
  args = files.map do |file|
96
108
  {
97
109
  body: File.new(file),
98
110
  bucket: bucket_name,
99
111
  key: key(key_prefix, file.relative_path_from(report_path)),
100
112
  content_type: MiniMime.lookup_by_filename(file).content_type,
101
- cache_control: "max-age=#{cache_control}"
113
+ cache_control: "max-age=3600"
102
114
  }
103
115
  end
104
116
 
105
- log_debug("Uploading '#{args.size}' files in '#{threads}' threads")
106
- Parallel.each(args, in_threads: threads) { |obj| client.put_object(obj) }
117
+ log_debug("Uploading '#{args.size}' files in '#{PARALLEL_THREADS}' threads")
118
+ Parallel.each(args, in_threads: PARALLEL_THREADS) { |obj| client.put_object(obj) }
107
119
  log_debug("Finished upload successfully")
108
120
  end
109
121
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Publisher
4
- VERSION = "1.2.0"
4
+ VERSION = "1.3.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: allure-report-publisher
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.0
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrejs Cunskis
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-22 00:00:00.000000000 Z
11
+ date: 2022-11-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-s3
@@ -19,7 +19,7 @@ dependencies:
19
19
  version: 1.93.1
20
20
  - - "<"
21
21
  - !ruby/object:Gem::Version
22
- version: 1.116.0
22
+ version: 1.118.0
23
23
  type: :runtime
24
24
  prerelease: false
25
25
  version_requirements: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  version: 1.93.1
30
30
  - - "<"
31
31
  - !ruby/object:Gem::Version
32
- version: 1.116.0
32
+ version: 1.118.0
33
33
  - !ruby/object:Gem::Dependency
34
34
  name: dry-cli
35
35
  requirement: !ruby/object:Gem::Requirement
@@ -403,7 +403,6 @@ files:
403
403
  - lib/allure_report_publisher.rb
404
404
  - lib/allure_report_publisher/commands/upload.rb
405
405
  - lib/allure_report_publisher/commands/version.rb
406
- - lib/allure_report_publisher/lib/helpers/gsutil.rb
407
406
  - lib/allure_report_publisher/lib/helpers/helpers.rb
408
407
  - lib/allure_report_publisher/lib/helpers/spinner.rb
409
408
  - lib/allure_report_publisher/lib/helpers/summary.rb
@@ -1,141 +0,0 @@
1
- require "tempfile"
2
-
3
- module Publisher
4
- module Helpers
5
- # Helper class for gsutil cli utility
6
- #
7
- class Gsutil
8
- class UnsupportedConfig < StandardError; end
9
- class Uninitialised < StandardError; end
10
-
11
- include Helpers
12
-
13
- def self.init
14
- new.init!
15
- end
16
-
17
- private_class_method :new
18
-
19
- # Initialize gsutil
20
- #
21
- # @return [Gsutil]
22
- def init!
23
- log_debug("Setting up gsutil")
24
- @valid = execute_shell("which gsutil") && true
25
-
26
- log_debug("Checking google credentials")
27
- check_credentials
28
- log_debug("Credentials valid, gsutil initialized")
29
- self
30
- rescue StandardError => e
31
- case e
32
- when UnsupportedConfig
33
- log_debug("credentials not compatible with gsutil! Falling back to google sdk client for batch uploads")
34
- when ShellCommandFailure
35
- log_debug("gsutil command not found, falling back to gcs client")
36
- else
37
- log_debug("gsutil init failed: error: #{e}\nbacktrace: #{e.backtrace&.join("\n")}")
38
- end
39
-
40
- @valid = false
41
- self
42
- end
43
-
44
- # Check if gsutil is valid
45
- #
46
- # @return [Boolean]
47
- def valid?
48
- @valid
49
- end
50
-
51
- # Perform copy operation within a single bucket
52
- #
53
- # @param [String] source_dir
54
- # @param [String] destination_dir
55
- # @param [String] bucket
56
- # @param [Integer] cache_control
57
- # @return [void]
58
- def batch_copy(source_dir:, destination_dir:, bucket:, cache_control: 3600)
59
- batch_upload(
60
- source_dir: "gs://#{bucket}/#{source_dir}",
61
- destination_dir: destination_dir,
62
- bucket: bucket,
63
- cache_control: cache_control
64
- )
65
- end
66
-
67
- # Perform batch upload operation
68
- #
69
- # @param [String] source_dir
70
- # @param [String] destination_dir
71
- # @param [String] bucket
72
- # @param [String] cache_control
73
- # @return [void]
74
- def batch_upload(source_dir:, destination_dir:, bucket:, cache_control: 3600)
75
- raise(Uninitialised, "gsutil has not been properly set up!") unless valid?
76
-
77
- action = source_dir.start_with?("gs://") ? "Copying" : "Uploading"
78
- destination = "gs://#{bucket}/#{destination_dir}"
79
-
80
- log_debug("#{action} '#{source_dir}' to '#{destination}'")
81
- with_credentials do |key_file|
82
- execute_shell([
83
- base_cmd(key_file),
84
- "-h 'Cache-Control:private, max-age=#{cache_control}'",
85
- "rsync",
86
- "-j json,csv,txt,js,css",
87
- "-r #{source_dir} #{destination}"
88
- ].join(" "))
89
- end
90
- log_debug("Finished upload successfully")
91
- end
92
-
93
- private
94
-
95
- # Execute block with gcs credentials
96
- #
97
- # @return [void]
98
- def with_credentials
99
- if json_key[:file]
100
- yield(json_key[:key])
101
- else
102
- Tempfile.create("auth") do |f|
103
- f.write(json_key[:key])
104
- f.close
105
-
106
- yield(f.path)
107
- end
108
- end
109
- end
110
-
111
- # Google auth default credentials
112
- #
113
- # @return [String, Hash]
114
- def gcs_credentials
115
- @gcs_credentials ||= Google::Cloud::Storage.default_credentials
116
- end
117
-
118
- # Google auth json key
119
- #
120
- # @return [Hash]
121
- def json_key
122
- @json_key ||= if gcs_credentials.is_a?(Hash)
123
- { file: false, key: gcs_credentials.to_json }
124
- elsif gcs_credentials.is_a?(String) && File.exist?(gcs_credentials)
125
- { file: true, key: gcs_credentials.tap { |f| JSON.parse(File.read(f)) } }
126
- else
127
- raise(UnsupportedConfig, "only google key json credentials are supported for gsutil")
128
- end
129
- end
130
- alias check_credentials json_key
131
-
132
- # Base command
133
- #
134
- # @param [String] key_file
135
- # @return [String]
136
- def base_cmd(key_file)
137
- "gsutil -o 'Credentials:gs_service_key_file=#{key_file}' -m"
138
- end
139
- end
140
- end
141
- end