jirametrics 2.16 → 2.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ba8fee0ad6769e79ce646489af04aa0a6c5b551d5210b670aa96a4d10de45511
4
- data.tar.gz: 96d90aeb54d71daf99bfed389bd44c7783d53ddc03ffa707067ff9141d2df867
3
+ metadata.gz: 4e9534fd6ca22944557cfe40c63ebf5c30ff111458ec52dd3e35617246315ce8
4
+ data.tar.gz: 2feb93d3ae826f133902664751f3cee4e54de3512daaaa882ae78fed47605e65
5
5
  SHA512:
6
- metadata.gz: 681b65b97aa5d4431e764d03aae7a1e1bf8eacadea9e2aea3bb694ba45be6e2edf1fe9c5ded299fad874e721eeda4ce1dc7d7a8c0d03a0bb18eb9174a05730da
7
- data.tar.gz: 0c3550f70d8f164c1a16b4cf91e102c6a4eae403c376629fcdd4cf685f80a6a0d517151280a22e08665e64bca9679af25f25684ef22d4edd4bc7b2ac83e00bca
6
+ metadata.gz: 84942ea82c68aa66325299e5db0e5f5a4fe4af95eb909d1fde004435b53898c60ee7dde2251fec0e075f4b2db7ba2f1eade384795370f3b90d7230926e79d2b9
7
+ data.tar.gz: 9015a7c5ef2b60726dca272a0b5b07de9f62d76e076038e4c8d45cbae13facf37f7e8f45dac82639c72615bbee549de756d3fb4792d21345fb4d3dc3fb17a32e
@@ -2,11 +2,12 @@
2
2
 
3
3
  require 'random-word'
4
4
 
5
- class Anonymizer
5
+ class Anonymizer < ChartBase
6
6
  # needed for testing
7
7
  attr_reader :project_config, :issues
8
8
 
9
9
  def initialize project_config:, date_adjustment: -200
10
+ super()
10
11
  @project_config = project_config
11
12
  @issues = @project_config.issues
12
13
  @all_boards = @project_config.all_boards
@@ -130,18 +131,19 @@ class Anonymizer
130
131
  end
131
132
  end
132
133
 
133
- def shift_all_dates
134
- @file_system.log "Shifting all dates by #{@date_adjustment} days"
134
+ def shift_all_dates date_adjustment: @date_adjustment
135
+ adjustment_in_seconds = 60 * 60 * 24 * date_adjustment
136
+ @file_system.log "Shifting all dates by #{label_days date_adjustment}"
135
137
  @issues.each do |issue|
136
138
  issue.changes.each do |change|
137
- change.time = change.time + @date_adjustment
139
+ change.time = change.time + adjustment_in_seconds
138
140
  end
139
141
 
140
- issue.raw['fields']['updated'] = (issue.updated + @date_adjustment).to_s
142
+ issue.raw['fields']['updated'] = (issue.updated + adjustment_in_seconds).to_s
141
143
  end
142
144
 
143
145
  range = @project_config.time_range
144
- @project_config.time_range = (range.begin + @date_adjustment)..(range.end + @date_adjustment)
146
+ @project_config.time_range = (range.begin + date_adjustment)..(range.end + date_adjustment)
145
147
  end
146
148
 
147
149
  def random_name
@@ -1,8 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class ChangeItem
4
- attr_reader :field, :value_id, :old_value_id, :raw, :time, :author_raw
5
- attr_accessor :value, :old_value
4
+ attr_reader :field, :value_id, :old_value_id, :raw, :author_raw
5
+ attr_accessor :value, :old_value, :time
6
6
 
7
7
  def initialize raw:, author_raw:, time:, artificial: false
8
8
  @raw = raw
@@ -181,7 +181,7 @@ class DailyView < ChartBase
181
181
  table = +''
182
182
  table << '<table>'
183
183
  history.each do |c|
184
- time = c.time.strftime '%b %d, %I:%M%P'
184
+ time = c.time.strftime '%b %d, %Y @ %I:%M%P'
185
185
 
186
186
  table << '<tr>'
187
187
  table << "<td><span class='time' title='Timestamp: #{c.time}'>#{time}</span></td>"
@@ -3,6 +3,27 @@
3
3
  require 'cgi'
4
4
  require 'json'
5
5
 
6
+ class DownloadIssueData
7
+ attr_accessor :key, :found_in_primary_query, :last_modified,
8
+ :up_to_date, :cache_path, :issue
9
+
10
+ def initialize(
11
+ key:,
12
+ found_in_primary_query: true,
13
+ last_modified: nil,
14
+ up_to_date: true,
15
+ cache_path: nil,
16
+ issue: nil
17
+ )
18
+ @key = key
19
+ @found_in_primary_query = found_in_primary_query
20
+ @last_modified = last_modified
21
+ @up_to_date = up_to_date
22
+ @cache_path = cache_path
23
+ @issue = issue
24
+ end
25
+ end
26
+
6
27
  class Downloader
7
28
  CURRENT_METADATA_VERSION = 4
8
29
 
@@ -12,6 +33,15 @@ class Downloader
12
33
  # For testing only
13
34
  attr_reader :start_date_in_query, :board_id_to_filter_id
14
35
 
36
+ def self.create download_config:, file_system:, jira_gateway:
37
+ is_cloud = jira_gateway.settings['jira_cloud'] || jira_gateway.cloud?
38
+ (is_cloud ? DownloaderForCloud : DownloaderForDataCenter).new(
39
+ download_config: download_config,
40
+ file_system: file_system,
41
+ jira_gateway: jira_gateway
42
+ )
43
+ end
44
+
15
45
  def initialize download_config:, file_system:, jira_gateway:
16
46
  @metadata = {}
17
47
  @download_config = download_config
@@ -28,7 +58,6 @@ class Downloader
28
58
  log '', both: true
29
59
  log @download_config.project_config.name, both: true
30
60
 
31
- init_gateway
32
61
  load_metadata
33
62
 
34
63
  if @metadata['no-download']
@@ -50,11 +79,6 @@ class Downloader
50
79
  save_metadata
51
80
  end
52
81
 
53
- def init_gateway
54
- @jira_gateway.load_jira_config(@download_config.project_config.jira_config)
55
- @jira_gateway.ignore_ssl_errors = @download_config.project_config.settings['ignore_ssl_errors']
56
- end
57
-
58
82
  def log text, both: false
59
83
  @file_system.log text, also_write_to_stderr: both
60
84
  end
@@ -66,93 +90,6 @@ class Downloader
66
90
  ids
67
91
  end
68
92
 
69
- def download_issues board:
70
- log " Downloading primary issues for board #{board.id}", both: true
71
- path = File.join(@target_path, "#{file_prefix}_issues/")
72
- unless Dir.exist?(path)
73
- log " Creating path #{path}"
74
- Dir.mkdir(path)
75
- end
76
-
77
- filter_id = @board_id_to_filter_id[board.id]
78
- jql = make_jql(filter_id: filter_id)
79
- jira_search_by_jql(jql: jql, initial_query: true, board: board, path: path)
80
-
81
- log " Downloading linked issues for board #{board.id}", both: true
82
- loop do
83
- @issue_keys_pending_download.reject! { |key| @issue_keys_downloaded_in_current_run.include? key }
84
- break if @issue_keys_pending_download.empty?
85
-
86
- keys_to_request = @issue_keys_pending_download[0..99]
87
- @issue_keys_pending_download.reject! { |key| keys_to_request.include? key }
88
- jql = "key in (#{keys_to_request.join(', ')})"
89
- jira_search_by_jql(jql: jql, initial_query: false, board: board, path: path)
90
- end
91
- end
92
-
93
- def jira_search_by_jql jql:, initial_query:, board:, path:
94
- intercept_jql = @download_config.project_config.settings['intercept_jql']
95
- jql = intercept_jql.call jql if intercept_jql
96
-
97
- log " JQL: #{jql}"
98
- escaped_jql = CGI.escape jql
99
-
100
- if @jira_gateway.cloud?
101
- max_results = 5_000 # The maximum allowed by Jira
102
- next_page_token = nil
103
- issue_count = 0
104
-
105
- loop do
106
- json = @jira_gateway.call_url relative_url: '/rest/api/3/search/jql' \
107
- "?jql=#{escaped_jql}&maxResults=#{max_results}&" \
108
- "nextPageToken=#{next_page_token}&expand=changelog&fields=*all"
109
- next_page_token = json['nextPageToken']
110
-
111
- json['issues'].each do |issue_json|
112
- issue_json['exporter'] = {
113
- 'in_initial_query' => initial_query
114
- }
115
- identify_other_issues_to_be_downloaded raw_issue: issue_json, board: board
116
- file = "#{issue_json['key']}-#{board.id}.json"
117
-
118
- @file_system.save_json(json: issue_json, filename: File.join(path, file))
119
- issue_count += 1
120
- end
121
-
122
- message = " Downloaded #{issue_count} issues"
123
- log message, both: true
124
-
125
- break unless next_page_token
126
- end
127
- else
128
- max_results = 100
129
- start_at = 0
130
- total = 1
131
- while start_at < total
132
- json = @jira_gateway.call_url relative_url: '/rest/api/2/search' \
133
- "?jql=#{escaped_jql}&maxResults=#{max_results}&startAt=#{start_at}&expand=changelog&fields=*all"
134
-
135
- json['issues'].each do |issue_json|
136
- issue_json['exporter'] = {
137
- 'in_initial_query' => initial_query
138
- }
139
- identify_other_issues_to_be_downloaded raw_issue: issue_json, board: board
140
- file = "#{issue_json['key']}-#{board.id}.json"
141
-
142
- @file_system.save_json(json: issue_json, filename: File.join(path, file))
143
- end
144
-
145
- total = json['total'].to_i
146
- max_results = json['maxResults']
147
-
148
- message = " Downloaded #{start_at + 1}-#{[start_at + max_results, total].min} of #{total} issues to #{path} "
149
- log message, both: true
150
-
151
- start_at += json['issues'].size
152
- end
153
- end
154
- end
155
-
156
93
  def identify_other_issues_to_be_downloaded raw_issue:, board:
157
94
  issue = Issue.new raw: raw_issue, board: board
158
95
  @issue_keys_downloaded_in_current_run << issue.key
@@ -327,11 +264,7 @@ class Downloader
327
264
 
328
265
  if start_date
329
266
  @download_date_range = start_date..today.to_date
330
-
331
- # For an incremental download, we want to query from the end of the previous one, not from the
332
- # beginning of the full range.
333
- @start_date_in_query = metadata['date_end'] || @download_date_range.begin
334
- log " Incremental download only. Pulling from #{@start_date_in_query}", both: true if metadata['date_end']
267
+ @start_date_in_query = @download_date_range.begin
335
268
 
336
269
  # Catch-all to pick up anything that's been around since before the range started but hasn't
337
270
  # had an update during the range.
@@ -351,4 +284,92 @@ class Downloader
351
284
  def file_prefix
352
285
  @download_config.project_config.get_file_prefix
353
286
  end
287
+
288
+ def download_issues board:
289
+ log " Downloading primary issues for board #{board.id} from #{jira_instance_type}", both: true
290
+ path = File.join(@target_path, "#{file_prefix}_issues/")
291
+ unless @file_system.dir_exist?(path)
292
+ log " Creating path #{path}"
293
+ @file_system.mkdir(path)
294
+ end
295
+
296
+ filter_id = @board_id_to_filter_id[board.id]
297
+ jql = make_jql(filter_id: filter_id)
298
+ intercept_jql = @download_config.project_config.settings['intercept_jql']
299
+ jql = intercept_jql.call jql if intercept_jql
300
+
301
+ issue_data_hash = search_for_issues jql: jql, board_id: board.id, path: path
302
+
303
+ loop do
304
+ related_issue_keys = Set.new
305
+ issue_data_hash
306
+ .values
307
+ .reject { |data| data.up_to_date }
308
+ .each_slice(100) do |slice|
309
+ slice = bulk_fetch_issues(
310
+ issue_datas: slice, board: board, in_initial_query: true
311
+ )
312
+ slice.each do |data|
313
+ @file_system.save_json(
314
+ json: data.issue.raw, filename: data.cache_path
315
+ )
316
+ # Set the timestamp on the file to match the updated one so that we don't have
317
+ # to parse the file just to find the timestamp
318
+ @file_system.utime time: data.issue.updated, file: data.cache_path
319
+
320
+ issue = data.issue
321
+ next unless issue
322
+
323
+ parent_key = issue.parent_key(project_config: @download_config.project_config)
324
+ related_issue_keys << parent_key if parent_key
325
+
326
+ # Sub-tasks
327
+ issue.raw['fields']['subtasks']&.each do |raw_subtask|
328
+ related_issue_keys << raw_subtask['key']
329
+ end
330
+ end
331
+ end
332
+
333
+ # Remove all the ones we already downloaded
334
+ related_issue_keys.reject! { |key| issue_data_hash[key] }
335
+
336
+ related_issue_keys.each do |key|
337
+ data = DownloadIssueData.new key: key
338
+ data.found_in_primary_query = false
339
+ data.up_to_date = false
340
+ data.cache_path = File.join(path, "#{key}-#{board.id}.json")
341
+ issue_data_hash[key] = data
342
+ end
343
+ break if related_issue_keys.empty?
344
+
345
+ log " Downloading linked issues for board #{board.id}", both: true
346
+ end
347
+
348
+ delete_issues_from_cache_that_are_not_in_server(
349
+ issue_data_hash: issue_data_hash, path: path
350
+ )
351
+ end
352
+
353
+ def delete_issues_from_cache_that_are_not_in_server issue_data_hash:, path:
354
+ # The gotcha with deleted issues is that they just stop being returned in queries
355
+ # and we have no way to know that they should be removed from our local cache.
356
+ # With the new approach, we ask for every issue that Jira knows about (within
357
+ # the parameters of the query) and then delete anything that's in our local cache
358
+ # but wasn't returned.
359
+ @file_system.foreach path do |file|
360
+ next if file.start_with? '.'
361
+ unless /^(?<key>\w+-\d+)-\d+\.json$/ =~ file
362
+ raise "Unexpected filename in #{path}: #{file}"
363
+ end
364
+ next if issue_data_hash[key] # Still in Jira
365
+
366
+ file_to_delete = File.join(path, file)
367
+ log " Removing #{file_to_delete} from local cache"
368
+ file_system.unlink file_to_delete
369
+ end
370
+ end
371
+
372
+ def last_modified filename:
373
+ File.mtime(filename) if File.exist?(filename)
374
+ end
354
375
  end
@@ -0,0 +1,114 @@
1
+ # frozen_string_literal: true
2
+
3
+ class DownloaderForCloud < Downloader
4
+ def jira_instance_type
5
+ 'Jira Cloud'
6
+ end
7
+
8
+ def search_for_issues jql:, board_id:, path:
9
+ log " JQL: #{jql}"
10
+ escaped_jql = CGI.escape jql
11
+
12
+ hash = {}
13
+ max_results = 5_000 # The maximum allowed by Jira
14
+ next_page_token = nil
15
+ issue_count = 0
16
+
17
+ loop do
18
+ relative_url = +''
19
+ relative_url << '/rest/api/3/search/jql'
20
+ relative_url << "?jql=#{escaped_jql}&maxResults=#{max_results}"
21
+ relative_url << "&nextPageToken=#{next_page_token}" if next_page_token
22
+ relative_url << '&fields=updated'
23
+
24
+ json = @jira_gateway.call_url relative_url: relative_url
25
+ next_page_token = json['nextPageToken']
26
+
27
+ json['issues'].each do |i|
28
+ key = i['key']
29
+ data = DownloadIssueData.new key: key
30
+ data.key = key
31
+ data.last_modified = Time.parse i['fields']['updated']
32
+ data.found_in_primary_query = true
33
+ data.cache_path = File.join(path, "#{key}-#{board_id}.json")
34
+ data.up_to_date = last_modified(filename: data.cache_path) == data.last_modified
35
+ hash[key] = data
36
+ issue_count += 1
37
+ end
38
+
39
+ message = " Found #{issue_count} issues"
40
+ log message, both: true
41
+
42
+ break unless next_page_token
43
+ end
44
+ hash
45
+ end
46
+
47
+ def bulk_fetch_issues issue_datas:, board:, in_initial_query:
48
+ # We used to use the expand option to pull in the changelog directly. Unfortunately
49
+ # that only returns the "recent" changes, not all of them. So now we get the issue
50
+ # without changes and then make a second call for that changes. Then we insert it
51
+ # into the raw issue as if it had been there all along.
52
+ log " Downloading #{issue_datas.size} issues", both: true
53
+ payload = {
54
+ 'fields' => ['*all'],
55
+ 'issueIdsOrKeys' => issue_datas.collect(&:key)
56
+ }
57
+ response = @jira_gateway.post_request(
58
+ relative_url: '/rest/api/3/issue/bulkfetch',
59
+ payload: JSON.generate(payload)
60
+ )
61
+
62
+ attach_changelog_to_issues issue_datas: issue_datas, issue_jsons: response['issues']
63
+
64
+ response['issues'].each do |issue_json|
65
+ issue_json['exporter'] = {
66
+ 'in_initial_query' => in_initial_query
67
+ }
68
+ issue = Issue.new(raw: issue_json, board: board)
69
+ data = issue_datas.find { |d| d.key == issue.key }
70
+ data.up_to_date = true
71
+ data.last_modified = issue.updated
72
+ data.issue = issue
73
+ end
74
+
75
+ issue_datas
76
+ end
77
+
78
+ def attach_changelog_to_issues issue_datas:, issue_jsons:
79
+ max_results = 10_000 # The max jira accepts is 10K
80
+ payload = {
81
+ 'issueIdsOrKeys' => issue_datas.collect(&:key),
82
+ 'maxResults' => max_results
83
+ }
84
+ loop do
85
+ response = @jira_gateway.post_request(
86
+ relative_url: '/rest/api/3/changelog/bulkfetch',
87
+ payload: JSON.generate(payload)
88
+ )
89
+
90
+ response['issueChangeLogs'].each do |issue_change_log|
91
+ issue_id = issue_change_log['issueId']
92
+ json = issue_jsons.find { |json| json['id'] == issue_id }
93
+
94
+ unless json['changelog']
95
+ # If this is our first time in, there won't be a changelog section
96
+ json['changelog'] = {
97
+ 'startAt' => 0,
98
+ 'maxResults' => max_results,
99
+ 'total' => 0,
100
+ 'histories' => []
101
+ }
102
+ end
103
+
104
+ new_changes = issue_change_log['changeHistories']
105
+ json['changelog']['total'] += new_changes.size
106
+ json['changelog']['histories'] += new_changes
107
+ end
108
+
109
+ next_page_token = response['nextPageToken']
110
+ payload['nextPageToken'] = next_page_token
111
+ break if next_page_token.nil?
112
+ end
113
+ end
114
+ end
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ class DownloaderForDataCenter < Downloader
4
+ def jira_instance_type
5
+ 'Jira DataCenter'
6
+ end
7
+
8
+ def search_for_issues jql:, board_id:, path:
9
+ log " JQL: #{jql}"
10
+ escaped_jql = CGI.escape jql
11
+
12
+ hash = {}
13
+ max_results = 100
14
+ start_at = 0
15
+ total = 1
16
+ while start_at < total
17
+ json = @jira_gateway.call_url relative_url: '/rest/api/2/search' \
18
+ "?jql=#{escaped_jql}&maxResults=#{max_results}&startAt=#{start_at}&fields=updated"
19
+ json['issues'].each do |i|
20
+ key = i['key']
21
+ cache_path = File.join(path, "#{key}-#{board_id}.json")
22
+ last_modified = Time.parse(i['fields']['updated'])
23
+ data = DownloadIssueData.new(
24
+ key: key,
25
+ last_modified: last_modified,
26
+ found_in_primary_query: true,
27
+ cache_path: cache_path,
28
+ up_to_date: last_modified(filename: cache_path) == last_modified
29
+ )
30
+ hash[key] = data
31
+ end
32
+ total = json['total'].to_i
33
+ max_results = json['maxResults']
34
+
35
+ message = " Found #{json['issues'].count} issues"
36
+ log message, both: true
37
+
38
+ start_at += json['issues'].size
39
+ end
40
+ hash
41
+ end
42
+
43
+ def bulk_fetch_issues issue_datas:, board:, in_initial_query:
44
+ log " Downloading #{issue_datas.size} issues", both: true
45
+ payload = {
46
+ 'expand' => [
47
+ 'changelog'
48
+ ],
49
+ 'fields' => ['*all'],
50
+ 'issueIdsOrKeys' => issue_datas.collect(&:key)
51
+ }
52
+ response = @jira_gateway.post_request(
53
+ relative_url: '/rest/api/2/issue/bulkfetch',
54
+ payload: JSON.generate(payload)
55
+ )
56
+ response['issues'].each do |issue_json|
57
+ issue_json['exporter'] = {
58
+ 'in_initial_query' => in_initial_query
59
+ }
60
+ issue = Issue.new(raw: issue_json, board: board)
61
+ data = issue_datas.find { |d| d.key == issue.key }
62
+ data.up_to_date = true
63
+ data.last_modified = issue.updated
64
+ data.issue = issue
65
+ end
66
+ issue_datas
67
+ end
68
+ end
@@ -50,24 +50,29 @@ class Exporter
50
50
  end
51
51
 
52
52
  project.download_config.run
53
- downloader = Downloader.new(
53
+ # load_jira_config(download_config.project_config.jira_config)
54
+ # @ignore_ssl_errors = download_config.project_config.settings['ignore_ssl_errors']
55
+ gateway = JiraGateway.new(
56
+ file_system: file_system, jira_config: project.jira_config, settings: project.settings
57
+ )
58
+ downloader = Downloader.create(
54
59
  download_config: project.download_config,
55
60
  file_system: file_system,
56
- jira_gateway: JiraGateway.new(file_system: file_system)
61
+ jira_gateway: gateway
57
62
  )
58
63
  downloader.run
59
64
  end
60
65
  puts "Full output from downloader in #{file_system.logfile_name}"
61
66
  end
62
67
 
63
- def info keys, name_filter:
68
+ def info key, name_filter:
64
69
  selected = []
65
70
  each_project_config(name_filter: name_filter) do |project|
66
71
  project.evaluate_next_level
67
72
 
68
73
  project.run load_only: true
69
74
  project.issues.each do |issue|
70
- selected << [project, issue] if keys.include? issue.key
75
+ selected << [project, issue] if key == issue.key
71
76
  end
72
77
  rescue => e # rubocop:disable Style/RescueStandardError
73
78
  # This happens when we're attempting to load an aggregated project because it hasn't been
@@ -76,7 +81,7 @@ class Exporter
76
81
  end
77
82
 
78
83
  if selected.empty?
79
- file_system.log "No issues found to match #{keys.collect(&:inspect).join(', ')}"
84
+ file_system.log "No issues found to match #{key.inspect}"
80
85
  else
81
86
  selected.each do |project, issue|
82
87
  file_system.log "\nProject #{project.name}", also_write_to_stderr: true
@@ -5,6 +5,13 @@ require 'json'
5
5
  class FileSystem
6
6
  attr_accessor :logfile, :logfile_name
7
7
 
8
+ def initialize
9
+ # In almost all cases, this will be immediately replaced in the Exporter
10
+ # but if we fail before we get that far, this will at least let a useful
11
+ # error show up on the console.
12
+ @logfile = $stdout
13
+ end
14
+
8
15
  # Effectively the same as File.read except it forces the encoding to UTF-8
9
16
  def load filename, supress_deprecation: false
10
17
  if filename.end_with?('.json') && !supress_deprecation
@@ -31,6 +38,14 @@ class FileSystem
31
38
  File.write(filename, content)
32
39
  end
33
40
 
41
+ def mkdir path
42
+ FileUtils.mkdir_p path
43
+ end
44
+
45
+ def utime file:, time:
46
+ File.utime time, time, file
47
+ end
48
+
34
49
  def warning message, more: nil
35
50
  log "Warning: #{message}", more: more, also_write_to_stderr: true
36
51
  end
@@ -66,7 +81,15 @@ class FileSystem
66
81
  end
67
82
 
68
83
  def file_exist? filename
69
- File.exist? filename
84
+ File.exist?(filename) && File.file?(filename)
85
+ end
86
+
87
+ def dir_exist? path
88
+ File.exist?(path) && File.directory?(path)
89
+ end
90
+
91
+ def unlink filename
92
+ File.unlink filename
70
93
  end
71
94
 
72
95
  def deprecated message:, date:, depth: 2
@@ -212,7 +212,11 @@ class Issue
212
212
  end
213
213
 
214
214
  def parse_time text
215
- Time.parse(text).getlocal(@timezone_offset)
215
+ if text.is_a? String
216
+ Time.parse(text).getlocal(@timezone_offset)
217
+ else
218
+ Time.at(text / 1000).getlocal(@timezone_offset)
219
+ end
216
220
  end
217
221
 
218
222
  def created
@@ -3,17 +3,47 @@
3
3
  require 'cgi'
4
4
  require 'json'
5
5
  require 'English'
6
+ require 'open3'
6
7
 
7
8
  class JiraGateway
8
- attr_accessor :ignore_ssl_errors, :jira_url
9
+ attr_accessor :ignore_ssl_errors
10
+ attr_reader :jira_url, :settings, :file_system
9
11
 
10
- def initialize file_system:
12
+ def initialize file_system:, jira_config:, settings:
11
13
  @file_system = file_system
14
+ load_jira_config(jira_config)
15
+ @settings = settings
16
+ @ignore_ssl_errors = settings['ignore_ssl_errors']
17
+ end
18
+
19
+ def post_request relative_url:, payload:
20
+ command = make_curl_command url: "#{@jira_url}#{relative_url}", method: 'POST'
21
+ log_entry = " #{command.gsub(/\s+/, ' ')}"
22
+ log_entry = sanitize_message log_entry
23
+ @file_system.log log_entry
24
+
25
+ stdout, stderr, status = Open3.capture3(command, stdin_data: payload)
26
+ unless status.success?
27
+ @file_system.log "Failed call with exit status #{status.exitstatus}!"
28
+ @file_system.log "Returned (stdout): #{stdout.inspect}"
29
+ @file_system.log "Returned (stderr): #{stderr.inspect}"
30
+ raise "Failed call with exit status #{status.exitstatus}. " \
31
+ "See #{@file_system.logfile_name} for details"
32
+ end
33
+
34
+ @file_system.log "Returned (stderr): #{stderr}" unless stderr == ''
35
+ raise 'no response from curl on stdout' if stdout == ''
36
+
37
+ parse_response(command: command, result: stdout)
12
38
  end
13
39
 
14
40
  def call_url relative_url:
15
41
  command = make_curl_command url: "#{@jira_url}#{relative_url}"
16
42
  result = call_command command
43
+ parse_response(command: command, result: result)
44
+ end
45
+
46
+ def parse_response command:, result:
17
47
  begin
18
48
  json = JSON.parse(result)
19
49
  rescue # rubocop:disable Style/RescueStandardError
@@ -31,12 +61,12 @@ class JiraGateway
31
61
  token = @jira_api_token || @jira_personal_access_token
32
62
  raise 'Neither Jira API Token or personal access token has been set' unless token
33
63
 
34
- message.gsub(@jira_api_token, '[API_TOKEN]')
64
+ message.gsub(token, '[API_TOKEN]')
35
65
  end
36
66
 
37
67
  def call_command command
38
68
  log_entry = " #{command.gsub(/\s+/, ' ')}"
39
- log_entry = sanitize_message log_entry if @jira_api_token
69
+ log_entry = sanitize_message log_entry
40
70
  @file_system.log log_entry
41
71
 
42
72
  result = `#{command}`
@@ -65,7 +95,7 @@ class JiraGateway
65
95
  @cookies = (jira_config['cookies'] || []).collect { |key, value| "#{key}=#{value}" }.join(';')
66
96
  end
67
97
 
68
- def make_curl_command url:
98
+ def make_curl_command url:, method: 'GET'
69
99
  command = +''
70
100
  command << 'curl'
71
101
  command << ' -L' # follow redirects
@@ -74,8 +104,13 @@ class JiraGateway
74
104
  command << " --cookie #{@cookies.inspect}" unless @cookies.empty?
75
105
  command << " --user #{@jira_email}:#{@jira_api_token}" if @jira_api_token
76
106
  command << " -H \"Authorization: Bearer #{@jira_personal_access_token}\"" if @jira_personal_access_token
77
- command << ' --request GET'
107
+ command << " --request #{method}"
108
+ if method == 'POST'
109
+ command << ' --data @-'
110
+ command << ' --header "Content-Type: application/json"'
111
+ end
78
112
  command << ' --header "Accept: application/json"'
113
+ command << ' --show-error --fail' # Better diagnostics when the server returns an error
79
114
  command << " --url \"#{url}\""
80
115
  command
81
116
  end
data/lib/jirametrics.rb CHANGED
@@ -47,9 +47,9 @@ class JiraMetrics < Thor
47
47
 
48
48
  option :config
49
49
  desc 'info', 'Dump information about one issue'
50
- def info keys
50
+ def info key
51
51
  load_config options[:config]
52
- Exporter.instance.info(keys, name_filter: options[:name] || '*')
52
+ Exporter.instance.info(key, name_filter: options[:name] || '*')
53
53
  end
54
54
 
55
55
  no_commands do
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jirametrics
3
3
  version: !ruby/object:Gem::Version
4
- version: '2.16'
4
+ version: 2.17.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Bowler
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-09-10 00:00:00.000000000 Z
10
+ date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: random-word
@@ -87,6 +87,8 @@ files:
87
87
  - lib/jirametrics/dependency_chart.rb
88
88
  - lib/jirametrics/download_config.rb
89
89
  - lib/jirametrics/downloader.rb
90
+ - lib/jirametrics/downloader_for_cloud.rb
91
+ - lib/jirametrics/downloader_for_data_center.rb
90
92
  - lib/jirametrics/estimate_accuracy_chart.rb
91
93
  - lib/jirametrics/estimation_configuration.rb
92
94
  - lib/jirametrics/examples/aggregated_project.rb
@@ -157,7 +159,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
157
159
  - !ruby/object:Gem::Version
158
160
  version: '0'
159
161
  requirements: []
160
- rubygems_version: 3.6.2
162
+ rubygems_version: 3.7.2
161
163
  specification_version: 4
162
164
  summary: Extract Jira metrics
163
165
  test_files: []