allure-report-publisher 1.2.0 → 1.4.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1809950b5cb79015f9d4568c28eea62fea2ff4abb581ee762303684aa86859a2
4
- data.tar.gz: e22f93a0a820aded1cd74bef9b3836867e6c25a0a454acd57343159ba19c780f
3
+ metadata.gz: 43fbcf0c18cbd23c8e95fcd74bd1fe9719120528ec31d7be73d34c3fb1e2c61c
4
+ data.tar.gz: eee7966c985bf5841c48f27d82ed43677935c4dd4361dda817e89888f961998a
5
5
  SHA512:
6
- metadata.gz: 7695a8fab036e58f775d1d6372ddcc054cd791212e278c840fb2642e4c4c29fb35409fe5cf9e30475217be471f564e68c231362305e89f3815bdb31c53f05ec1
7
- data.tar.gz: c9a64aaf86756c14c2e735a9632a1024d178ea1b8e9e97a43d65ad2afd05ff4adaa38094a9f12e083a70d145006269defdb1252ef64991e6412913cd47e6ea5e
6
+ metadata.gz: 4c10ac6928132f16f6bbf13deca50d22841b489c9dd59edeef5a9c65eea079bf5c7a33c04e215129019d2ca3881e38ecc5f6ebc257ed944e0cade634c0880c31
7
+ data.tar.gz: 422fda1ca3805877a1c7c720f373a72fa5dce728d56a2ccac2ac842043705ae6a56e9d00e9101ed3a34d5150666d023392982fe846b34b8964589c6a046068b5
data/README.md CHANGED
@@ -49,7 +49,8 @@ Options:
49
49
  --update-pr=VALUE # Add report url to PR via comment or description update. Required: false: (comment/description/actions)
50
50
  --summary=VALUE # Additionally add summary table to PR comment or description. Required: false: (behaviors/suites/packages/total)
51
51
  --summary-table-type=VALUE # Summary table type. Required: false: (ascii/markdown), default: :ascii
52
- --[no-]collapse-summary # Create summary as a collapsable section, default: false
52
+ --base-url=VALUE # Use custom base url instead of default cloud provider one. Required: false
53
+ --[no-]collapse-summary # Create summary as a collapsible section, default: false
53
54
  --[no-]copy-latest # Keep copy of latest report at base prefix path, default: false
54
55
  --[no-]color # Force color output
55
56
  --[no-]ignore-missing-results # Ignore missing allure results, default: false
@@ -1,3 +1,5 @@
1
+ require "uri"
2
+
1
3
  module Publisher
2
4
  module Commands
3
5
  # Upload allure report
@@ -40,10 +42,13 @@ module Publisher
40
42
  Publisher::Helpers::Summary::ASCII,
41
43
  Publisher::Helpers::Summary::MARKDOWN
42
44
  ]
45
+ option :base_url,
46
+ type: :string,
47
+ desc: "Use custom base url instead of default cloud provider one. Required: false"
43
48
  option :collapse_summary,
44
49
  type: :boolean,
45
50
  default: false,
46
- desc: "Create summary as a collapsable section"
51
+ desc: "Create summary as a collapsible section"
47
52
  option :copy_latest,
48
53
  type: :boolean,
49
54
  default: false,
@@ -95,6 +100,7 @@ module Publisher
95
100
  **args.slice(
96
101
  :bucket,
97
102
  :prefix,
103
+ :base_url,
98
104
  :copy_latest,
99
105
  :update_pr,
100
106
  :collapse_summary,
@@ -120,6 +126,9 @@ module Publisher
120
126
  def validate_args
121
127
  error("Missing argument --results-glob!") unless args[:results_glob]
122
128
  error("Missing argument --bucket!") unless args[:bucket]
129
+ URI.parse(args[:base_url]) if args[:base_url]
130
+ rescue URI::InvalidURIError
131
+ error("Invalid --base-url value!")
123
132
  end
124
133
 
125
134
  # Scan for allure results paths
@@ -29,15 +29,6 @@ module Publisher
29
29
  )
30
30
  end
31
31
 
32
- # Check if gsutil is installed and executable
33
- #
34
- # @return [Boolean]
35
- def gsutil?
36
- execute_shell("which gsutil") && true
37
- rescue StandardError
38
- false
39
- end
40
-
41
32
  # Debug logging session output
42
33
  #
43
34
  # @return [StringIO]
@@ -5,6 +5,8 @@ module Publisher
5
5
  module Uploaders
6
6
  class HistoryNotFoundError < StandardError; end
7
7
 
8
+ PARALLEL_THREADS = 8
9
+
8
10
  # Uploader implementation
9
11
  #
10
12
  class Uploader
@@ -26,6 +28,7 @@ module Publisher
26
28
  # @option args [Array] :result_paths
27
29
  # @option args [String] :bucket
28
30
  # @option args [String] :prefix
31
+ # @option args [String] :base_url
29
32
  # @option args [Boolean] :update_pr
30
33
  # @option args [String] :summary_type
31
34
  # @option args [Symbol] :summary_table_type
@@ -35,6 +38,7 @@ module Publisher
35
38
  @result_paths = args[:result_paths]
36
39
  @bucket_name = args[:bucket]
37
40
  @prefix = args[:prefix]
41
+ @base_url = args[:base_url]
38
42
  @update_pr = args[:update_pr]
39
43
  @summary_type = args[:summary_type]
40
44
  @summary_table_type = args[:summary_table_type]
@@ -65,7 +69,9 @@ module Publisher
65
69
  #
66
70
  # @return [void]
67
71
  def upload
68
- run_uploads
72
+ upload_history unless !run_id || copy_latest
73
+ upload_report
74
+ upload_latest_copy if copy_latest
69
75
  end
70
76
 
71
77
  # Add allure report url to pull request description
@@ -100,6 +106,7 @@ module Publisher
100
106
  attr_reader :result_paths,
101
107
  :bucket_name,
102
108
  :prefix,
109
+ :base_url,
103
110
  :update_pr,
104
111
  :copy_latest,
105
112
  :summary_type,
@@ -234,15 +241,6 @@ module Publisher
234
241
  log_debug("Saved '#{EXECUTOR_JSON}' as '#{json_path}'\n#{JSON.pretty_generate(ci_provider.executor_info)}")
235
242
  end
236
243
 
237
- # Run upload commands
238
- #
239
- # @return [void]
240
- def run_uploads
241
- upload_history unless !run_id || copy_latest
242
- upload_report
243
- upload_latest_copy if copy_latest
244
- end
245
-
246
244
  # Fetch allure report history
247
245
  #
248
246
  # @return [void]
@@ -14,13 +14,6 @@ module Publisher
14
14
  @client ||= Google::Cloud::Storage.new
15
15
  end
16
16
 
17
- # Gsutil class
18
- #
19
- # @return [Helpers::Gsutil]
20
- def gsutil
21
- @gsutil ||= Helpers::Gsutil.init
22
- end
23
-
24
17
  # GCS bucket
25
18
  #
26
19
  # @return [Google::Cloud::Storage::Bucket]
@@ -62,7 +55,7 @@ module Publisher
62
55
  # @return [void]
63
56
  def upload_history
64
57
  log_debug("Uploading report history")
65
- file_upload(report_files.select { |file| file.fnmatch?("*/history/*") }, prefix)
58
+ upload_to_gcs(report_files.select { |file| file.fnmatch?("*/history/*") }, prefix)
66
59
  end
67
60
 
68
61
  # Upload allure report
@@ -70,19 +63,28 @@ module Publisher
70
63
  # @return [void]
71
64
  def upload_report
72
65
  log_debug("Uploading report files")
73
- return batch_upload(report_path, full_prefix) if gsutil.valid?
74
-
75
- file_upload(report_files, full_prefix)
66
+ upload_to_gcs(report_files, full_prefix)
76
67
  end
77
68
 
78
69
  # Upload copy of latest run
79
70
  #
80
71
  # @return [void]
81
72
  def upload_latest_copy
82
- log_debug("Uploading report copy as latest report")
83
- return batch_copy(full_prefix, prefix, cache_control: 60) if gsutil.valid?
73
+ log_debug("Copying report as latest")
84
74
 
85
- file_upload(report_files, prefix, cache_control: 60)
75
+ args = report_files.map do |file|
76
+ {
77
+ source_file: bucket.file(key(full_prefix, file.relative_path_from(report_path))),
78
+ destination: key(prefix, file.relative_path_from(report_path))
79
+ }
80
+ end
81
+
82
+ Parallel.each(args, in_threads: PARALLEL_THREADS) do |obj|
83
+ obj[:source_file].copy(obj[:destination], force_copy_metadata: true) do |f|
84
+ f.cache_control = "public, max-age=60"
85
+ end
86
+ end
87
+ log_debug("Finished latest report copy successfully")
86
88
  end
87
89
 
88
90
  # Upload files to gcs
@@ -90,53 +92,22 @@ module Publisher
90
92
  # @param [Array<Pathname>] files
91
93
  # @param [String] key_prefix
92
94
  # @param [Hash] params
93
- # @return [void]
94
- def file_upload(files, key_prefix, cache_control: 3600)
95
- threads = 8
95
+ # @return [Array<Hash>]
96
+ def upload_to_gcs(files, key_prefix)
96
97
  args = files.map do |file|
97
98
  {
98
99
  file: file.to_s,
99
- path: key(key_prefix, file.relative_path_from(report_path)),
100
- cache_control: "public, max-age=#{cache_control}"
100
+ path: key(key_prefix, file.relative_path_from(report_path))
101
101
  }
102
102
  end
103
103
 
104
- log_debug("Uploading '#{args.size}' files in '#{threads}' threads to bucker '#{bucket_name}'")
105
- Parallel.each(args, in_threads: threads) do |obj|
106
- bucket.create_file(*obj.slice(:file, :path).values, **obj.slice(:cache_control))
104
+ log_debug("Uploading '#{args.size}' files in '#{PARALLEL_THREADS}' threads")
105
+ Parallel.each(args, in_threads: PARALLEL_THREADS) do |obj|
106
+ bucket.create_file(*obj.slice(:file, :path).values, cache_control: "public, max-age=3600")
107
107
  end
108
108
  log_debug("Finished upload successfully")
109
109
  end
110
110
 
111
- # Upload directory recursively
112
- #
113
- # @param [String] source_dir
114
- # @param [String] destination_dir
115
- # @return [void]
116
- def batch_upload(source_dir, destination_dir, cache_control: 3600)
117
- gsutil.batch_upload(
118
- source_dir: source_dir,
119
- destination_dir: destination_dir,
120
- bucket: bucket_name,
121
- cache_control: cache_control
122
- )
123
- end
124
-
125
- # Copy directory within the bucket
126
- #
127
- # @param [String] source_dir
128
- # @param [String] destination_dir
129
- # @param [String] cache_control
130
- # @return [void]
131
- def batch_copy(source_dir, destination_dir, cache_control: 3600)
132
- gsutil.batch_copy(
133
- source_dir: source_dir,
134
- destination_dir: destination_dir,
135
- bucket: bucket_name,
136
- cache_control: cache_control
137
- )
138
- end
139
-
140
111
  # Fabricate key for s3 object
141
112
  #
142
113
  # @param [String] *args
@@ -150,7 +121,7 @@ module Publisher
150
121
  # @param [String] path_prefix
151
122
  # @return [String]
152
123
  def url(path_prefix)
153
- ["https://storage.googleapis.com", bucket_name, path_prefix, "index.html"].compact.join("/")
124
+ [base_url || "https://storage.googleapis.com", bucket_name, path_prefix, "index.html"].compact.join("/")
154
125
  end
155
126
  end
156
127
  end
@@ -81,8 +81,21 @@ module Publisher
81
81
  #
82
82
  # @return [void]
83
83
  def upload_latest_copy
84
- log_debug("Uploading report copy as latest report")
85
- upload_to_s3(report_files, prefix, cache_control: 60)
84
+ log_debug("Copying report as latest")
85
+
86
+ args = report_files.map do |file|
87
+ {
88
+ bucket: bucket_name,
89
+ copy_source: "/#{bucket_name}/#{key(full_prefix, file.relative_path_from(report_path))}",
90
+ key: key(prefix, file.relative_path_from(report_path)),
91
+ metadata_directive: "REPLACE",
92
+ content_type: MiniMime.lookup_by_filename(file).content_type,
93
+ cache_control: "max-age=60"
94
+ }
95
+ end
96
+
97
+ Parallel.each(args, in_threads: PARALLEL_THREADS) { |obj| client.copy_object(obj) }
98
+ log_debug("Finished latest report copy successfully")
86
99
  end
87
100
 
88
101
  # Upload files to s3
@@ -90,20 +103,19 @@ module Publisher
90
103
  # @param [Array<Pathname>] files
91
104
  # @param [String] key_prefix
92
105
  # @return [Array<Hash>]
93
- def upload_to_s3(files, key_prefix, cache_control: 3600)
94
- threads = 8
106
+ def upload_to_s3(files, key_prefix)
95
107
  args = files.map do |file|
96
108
  {
97
109
  body: File.new(file),
98
110
  bucket: bucket_name,
99
111
  key: key(key_prefix, file.relative_path_from(report_path)),
100
112
  content_type: MiniMime.lookup_by_filename(file).content_type,
101
- cache_control: "max-age=#{cache_control}"
113
+ cache_control: "max-age=3600"
102
114
  }
103
115
  end
104
116
 
105
- log_debug("Uploading '#{args.size}' files in '#{threads}' threads")
106
- Parallel.each(args, in_threads: threads) { |obj| client.put_object(obj) }
117
+ log_debug("Uploading '#{args.size}' files in '#{PARALLEL_THREADS}' threads")
118
+ Parallel.each(args, in_threads: PARALLEL_THREADS) { |obj| client.put_object(obj) }
107
119
  log_debug("Finished upload successfully")
108
120
  end
109
121
 
@@ -120,7 +132,7 @@ module Publisher
120
132
  # @param [String] path_prefix
121
133
  # @return [String]
122
134
  def url(path_prefix)
123
- ["http://#{bucket_name}.s3.amazonaws.com", path_prefix, "index.html"].compact.join("/")
135
+ [base_url || "http://#{bucket_name}.s3.amazonaws.com", path_prefix, "index.html"].compact.join("/")
124
136
  end
125
137
  end
126
138
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Publisher
4
- VERSION = "1.2.0"
4
+ VERSION = "1.4.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: allure-report-publisher
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.0
4
+ version: 1.4.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrejs Cunskis
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-22 00:00:00.000000000 Z
11
+ date: 2022-12-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-s3
@@ -19,7 +19,7 @@ dependencies:
19
19
  version: 1.93.1
20
20
  - - "<"
21
21
  - !ruby/object:Gem::Version
22
- version: 1.116.0
22
+ version: 1.118.0
23
23
  type: :runtime
24
24
  prerelease: false
25
25
  version_requirements: !ruby/object:Gem::Requirement
@@ -29,7 +29,7 @@ dependencies:
29
29
  version: 1.93.1
30
30
  - - "<"
31
31
  - !ruby/object:Gem::Version
32
- version: 1.116.0
32
+ version: 1.118.0
33
33
  - !ruby/object:Gem::Dependency
34
34
  name: dry-cli
35
35
  requirement: !ruby/object:Gem::Requirement
@@ -39,7 +39,7 @@ dependencies:
39
39
  version: '0.6'
40
40
  - - "<"
41
41
  - !ruby/object:Gem::Version
42
- version: '0.8'
42
+ version: '1.1'
43
43
  type: :runtime
44
44
  prerelease: false
45
45
  version_requirements: !ruby/object:Gem::Requirement
@@ -49,7 +49,7 @@ dependencies:
49
49
  version: '0.6'
50
50
  - - "<"
51
51
  - !ruby/object:Gem::Version
52
- version: '0.8'
52
+ version: '1.1'
53
53
  - !ruby/object:Gem::Dependency
54
54
  name: faraday-retry
55
55
  requirement: !ruby/object:Gem::Requirement
@@ -222,14 +222,14 @@ dependencies:
222
222
  requirements:
223
223
  - - "~>"
224
224
  - !ruby/object:Gem::Version
225
- version: 2.18.0
225
+ version: 2.20.0
226
226
  type: :development
227
227
  prerelease: false
228
228
  version_requirements: !ruby/object:Gem::Requirement
229
229
  requirements:
230
230
  - - "~>"
231
231
  - !ruby/object:Gem::Version
232
- version: 2.18.0
232
+ version: 2.20.0
233
233
  - !ruby/object:Gem::Dependency
234
234
  name: climate_control
235
235
  requirement: !ruby/object:Gem::Requirement
@@ -382,14 +382,14 @@ dependencies:
382
382
  requirements:
383
383
  - - "~>"
384
384
  - !ruby/object:Gem::Version
385
- version: 0.47.0
385
+ version: 0.48.0
386
386
  type: :development
387
387
  prerelease: false
388
388
  version_requirements: !ruby/object:Gem::Requirement
389
389
  requirements:
390
390
  - - "~>"
391
391
  - !ruby/object:Gem::Version
392
- version: 0.47.0
392
+ version: 0.48.0
393
393
  description: Upload allure reports to different file storage providers
394
394
  email:
395
395
  - andrejs.cunskis@gmail.com
@@ -403,7 +403,6 @@ files:
403
403
  - lib/allure_report_publisher.rb
404
404
  - lib/allure_report_publisher/commands/upload.rb
405
405
  - lib/allure_report_publisher/commands/version.rb
406
- - lib/allure_report_publisher/lib/helpers/gsutil.rb
407
406
  - lib/allure_report_publisher/lib/helpers/helpers.rb
408
407
  - lib/allure_report_publisher/lib/helpers/spinner.rb
409
408
  - lib/allure_report_publisher/lib/helpers/summary.rb
@@ -1,141 +0,0 @@
1
- require "tempfile"
2
-
3
- module Publisher
4
- module Helpers
5
- # Helper class for gsutil cli utility
6
- #
7
- class Gsutil
8
- class UnsupportedConfig < StandardError; end
9
- class Uninitialised < StandardError; end
10
-
11
- include Helpers
12
-
13
- def self.init
14
- new.init!
15
- end
16
-
17
- private_class_method :new
18
-
19
- # Initialize gsutil
20
- #
21
- # @return [Gsutil]
22
- def init!
23
- log_debug("Setting up gsutil")
24
- @valid = execute_shell("which gsutil") && true
25
-
26
- log_debug("Checking google credentials")
27
- check_credentials
28
- log_debug("Credentials valid, gsutil initialized")
29
- self
30
- rescue StandardError => e
31
- case e
32
- when UnsupportedConfig
33
- log_debug("credentials not compatible with gsutil! Falling back to google sdk client for batch uploads")
34
- when ShellCommandFailure
35
- log_debug("gsutil command not found, falling back to gcs client")
36
- else
37
- log_debug("gsutil init failed: error: #{e}\nbacktrace: #{e.backtrace&.join("\n")}")
38
- end
39
-
40
- @valid = false
41
- self
42
- end
43
-
44
- # Check if gsutil is valid
45
- #
46
- # @return [Boolean]
47
- def valid?
48
- @valid
49
- end
50
-
51
- # Perform copy operation within a single bucket
52
- #
53
- # @param [String] source_dir
54
- # @param [String] destination_dir
55
- # @param [String] bucket
56
- # @param [Integer] cache_control
57
- # @return [void]
58
- def batch_copy(source_dir:, destination_dir:, bucket:, cache_control: 3600)
59
- batch_upload(
60
- source_dir: "gs://#{bucket}/#{source_dir}",
61
- destination_dir: destination_dir,
62
- bucket: bucket,
63
- cache_control: cache_control
64
- )
65
- end
66
-
67
- # Perform batch upload operation
68
- #
69
- # @param [String] source_dir
70
- # @param [String] destination_dir
71
- # @param [String] bucket
72
- # @param [String] cache_control
73
- # @return [void]
74
- def batch_upload(source_dir:, destination_dir:, bucket:, cache_control: 3600)
75
- raise(Uninitialised, "gsutil has not been properly set up!") unless valid?
76
-
77
- action = source_dir.start_with?("gs://") ? "Copying" : "Uploading"
78
- destination = "gs://#{bucket}/#{destination_dir}"
79
-
80
- log_debug("#{action} '#{source_dir}' to '#{destination}'")
81
- with_credentials do |key_file|
82
- execute_shell([
83
- base_cmd(key_file),
84
- "-h 'Cache-Control:private, max-age=#{cache_control}'",
85
- "rsync",
86
- "-j json,csv,txt,js,css",
87
- "-r #{source_dir} #{destination}"
88
- ].join(" "))
89
- end
90
- log_debug("Finished upload successfully")
91
- end
92
-
93
- private
94
-
95
- # Execute block with gcs credentials
96
- #
97
- # @return [void]
98
- def with_credentials
99
- if json_key[:file]
100
- yield(json_key[:key])
101
- else
102
- Tempfile.create("auth") do |f|
103
- f.write(json_key[:key])
104
- f.close
105
-
106
- yield(f.path)
107
- end
108
- end
109
- end
110
-
111
- # Google auth default credentials
112
- #
113
- # @return [String, Hash]
114
- def gcs_credentials
115
- @gcs_credentials ||= Google::Cloud::Storage.default_credentials
116
- end
117
-
118
- # Google auth json key
119
- #
120
- # @return [Hash]
121
- def json_key
122
- @json_key ||= if gcs_credentials.is_a?(Hash)
123
- { file: false, key: gcs_credentials.to_json }
124
- elsif gcs_credentials.is_a?(String) && File.exist?(gcs_credentials)
125
- { file: true, key: gcs_credentials.tap { |f| JSON.parse(File.read(f)) } }
126
- else
127
- raise(UnsupportedConfig, "only google key json credentials are supported for gsutil")
128
- end
129
- end
130
- alias check_credentials json_key
131
-
132
- # Base command
133
- #
134
- # @param [String] key_file
135
- # @return [String]
136
- def base_cmd(key_file)
137
- "gsutil -o 'Credentials:gs_service_key_file=#{key_file}' -m"
138
- end
139
- end
140
- end
141
- end