jirametrics 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/bin/jirametrics +4 -0
- data/lib/jirametrics/aggregate_config.rb +89 -0
- data/lib/jirametrics/aging_work_bar_chart.rb +235 -0
- data/lib/jirametrics/aging_work_in_progress_chart.rb +148 -0
- data/lib/jirametrics/aging_work_table.rb +149 -0
- data/lib/jirametrics/anonymizer.rb +186 -0
- data/lib/jirametrics/blocked_stalled_change.rb +43 -0
- data/lib/jirametrics/board.rb +85 -0
- data/lib/jirametrics/board_column.rb +14 -0
- data/lib/jirametrics/board_config.rb +31 -0
- data/lib/jirametrics/change_item.rb +80 -0
- data/lib/jirametrics/chart_base.rb +239 -0
- data/lib/jirametrics/columns_config.rb +42 -0
- data/lib/jirametrics/cycletime_config.rb +69 -0
- data/lib/jirametrics/cycletime_histogram.rb +74 -0
- data/lib/jirametrics/cycletime_scatterplot.rb +128 -0
- data/lib/jirametrics/daily_wip_by_age_chart.rb +88 -0
- data/lib/jirametrics/daily_wip_by_blocked_stalled_chart.rb +77 -0
- data/lib/jirametrics/daily_wip_chart.rb +123 -0
- data/lib/jirametrics/data_quality_report.rb +278 -0
- data/lib/jirametrics/dependency_chart.rb +217 -0
- data/lib/jirametrics/discard_changes_before.rb +37 -0
- data/lib/jirametrics/download_config.rb +41 -0
- data/lib/jirametrics/downloader.rb +337 -0
- data/lib/jirametrics/examples/aggregated_project.rb +36 -0
- data/lib/jirametrics/examples/standard_project.rb +111 -0
- data/lib/jirametrics/expedited_chart.rb +169 -0
- data/lib/jirametrics/experimental/generator.rb +209 -0
- data/lib/jirametrics/experimental/info.rb +77 -0
- data/lib/jirametrics/exporter.rb +127 -0
- data/lib/jirametrics/file_config.rb +119 -0
- data/lib/jirametrics/fix_version.rb +21 -0
- data/lib/jirametrics/groupable_issue_chart.rb +44 -0
- data/lib/jirametrics/grouping_rules.rb +13 -0
- data/lib/jirametrics/hierarchy_table.rb +31 -0
- data/lib/jirametrics/html/aging_work_bar_chart.erb +72 -0
- data/lib/jirametrics/html/aging_work_in_progress_chart.erb +52 -0
- data/lib/jirametrics/html/aging_work_table.erb +60 -0
- data/lib/jirametrics/html/collapsible_issues_panel.erb +32 -0
- data/lib/jirametrics/html/cycletime_histogram.erb +41 -0
- data/lib/jirametrics/html/cycletime_scatterplot.erb +103 -0
- data/lib/jirametrics/html/daily_wip_chart.erb +63 -0
- data/lib/jirametrics/html/data_quality_report.erb +126 -0
- data/lib/jirametrics/html/expedited_chart.erb +67 -0
- data/lib/jirametrics/html/hierarchy_table.erb +29 -0
- data/lib/jirametrics/html/index.erb +66 -0
- data/lib/jirametrics/html/sprint_burndown.erb +116 -0
- data/lib/jirametrics/html/story_point_accuracy_chart.erb +57 -0
- data/lib/jirametrics/html/throughput_chart.erb +65 -0
- data/lib/jirametrics/html_report_config.rb +217 -0
- data/lib/jirametrics/issue.rb +521 -0
- data/lib/jirametrics/issue_link.rb +60 -0
- data/lib/jirametrics/json_file_loader.rb +9 -0
- data/lib/jirametrics/project_config.rb +442 -0
- data/lib/jirametrics/rules.rb +34 -0
- data/lib/jirametrics/self_or_issue_dispatcher.rb +15 -0
- data/lib/jirametrics/sprint.rb +43 -0
- data/lib/jirametrics/sprint_burndown.rb +335 -0
- data/lib/jirametrics/sprint_issue_change_data.rb +31 -0
- data/lib/jirametrics/status.rb +26 -0
- data/lib/jirametrics/status_collection.rb +67 -0
- data/lib/jirametrics/story_point_accuracy_chart.rb +139 -0
- data/lib/jirametrics/throughput_chart.rb +91 -0
- data/lib/jirametrics/tree_organizer.rb +96 -0
- data/lib/jirametrics/trend_line_calculator.rb +74 -0
- data/lib/jirametrics.rb +85 -0
- metadata +167 -0
@@ -0,0 +1,337 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'cgi'
|
4
|
+
require 'json'
|
5
|
+
require 'english'
|
6
|
+
|
7
|
+
class Downloader
|
8
|
+
CURRENT_METADATA_VERSION = 4
|
9
|
+
|
10
|
+
attr_accessor :metadata, :quiet_mode, :logfile, :logfile_name
|
11
|
+
|
12
|
+
# For testing only
|
13
|
+
attr_reader :start_date_in_query
|
14
|
+
|
15
|
+
def initialize download_config:, json_file_loader: JsonFileLoader.new
|
16
|
+
@metadata = {}
|
17
|
+
@download_config = download_config
|
18
|
+
@target_path = @download_config.project_config.target_path
|
19
|
+
@json_file_loader = json_file_loader
|
20
|
+
@board_id_to_filter_id = {}
|
21
|
+
|
22
|
+
@issue_keys_downloaded_in_current_run = []
|
23
|
+
@issue_keys_pending_download = []
|
24
|
+
end
|
25
|
+
|
26
|
+
def run
|
27
|
+
log '', both: true
|
28
|
+
log @download_config.project_config.name, both: true
|
29
|
+
|
30
|
+
load_jira_config(@download_config.project_config.jira_config)
|
31
|
+
load_metadata
|
32
|
+
|
33
|
+
if @metadata['no-download']
|
34
|
+
log ' Skipping download. Found no-download in meta file', both: true
|
35
|
+
return
|
36
|
+
end
|
37
|
+
|
38
|
+
# board_ids = @download_config.board_ids
|
39
|
+
|
40
|
+
remove_old_files
|
41
|
+
download_statuses
|
42
|
+
find_board_ids.each do |id|
|
43
|
+
download_board_configuration board_id: id
|
44
|
+
download_issues board_id: id
|
45
|
+
end
|
46
|
+
|
47
|
+
save_metadata
|
48
|
+
end
|
49
|
+
|
50
|
+
def log text, both: false
|
51
|
+
@logfile&.puts text
|
52
|
+
puts text if both
|
53
|
+
end
|
54
|
+
|
55
|
+
def find_board_ids
|
56
|
+
ids = @download_config.project_config.board_configs.collect(&:id)
|
57
|
+
if ids.empty?
|
58
|
+
deprecated message: 'board_ids in the download block have been deprecated. See https://github.com/mikebowler/jira-export/wiki/Deprecated'
|
59
|
+
ids = @download_config.board_ids
|
60
|
+
end
|
61
|
+
raise 'Board ids must be specified' if ids.empty?
|
62
|
+
|
63
|
+
ids
|
64
|
+
end
|
65
|
+
|
66
|
+
def load_jira_config jira_config
|
67
|
+
@jira_url = jira_config['url']
|
68
|
+
@jira_email = jira_config['email']
|
69
|
+
@jira_api_token = jira_config['api_token']
|
70
|
+
@jira_personal_access_token = jira_config['personal_access_token']
|
71
|
+
|
72
|
+
raise 'When specifying an api-token, you must also specify email' if @jira_api_token && !@jira_email
|
73
|
+
|
74
|
+
if @jira_api_token && @jira_personal_access_token
|
75
|
+
raise "You can't specify both an api-token and a personal-access-token. They don't work together."
|
76
|
+
end
|
77
|
+
|
78
|
+
@cookies = (jira_config['cookies'] || []).collect { |key, value| "#{key}=#{value}" }.join(';')
|
79
|
+
end
|
80
|
+
|
81
|
+
def call_command command
|
82
|
+
log " #{command.gsub(/\s+/, ' ')}"
|
83
|
+
result = `#{command}`
|
84
|
+
log result unless $CHILD_STATUS.success?
|
85
|
+
return result if $CHILD_STATUS.success?
|
86
|
+
|
87
|
+
log "Failed call with exit status #{$CHILD_STATUS.exitstatus}. See #{@logfile_name} for details", both: true
|
88
|
+
exit $CHILD_STATUS.exitstatus
|
89
|
+
end
|
90
|
+
|
91
|
+
def make_curl_command url:
|
92
|
+
command = 'curl'
|
93
|
+
command += ' -s'
|
94
|
+
command += ' -k' if @download_config.project_config.settings['ignore_ssl_errors']
|
95
|
+
command += " --cookie #{@cookies.inspect}" unless @cookies.empty?
|
96
|
+
command += " --user #{@jira_email}:#{@jira_api_token}" if @jira_api_token
|
97
|
+
command += " -H \"Authorization: Bearer #{@jira_personal_access_token}\"" if @jira_personal_access_token
|
98
|
+
command += ' --request GET'
|
99
|
+
command += ' --header "Accept: application/json"'
|
100
|
+
command += " --url \"#{url}\""
|
101
|
+
command
|
102
|
+
end
|
103
|
+
|
104
|
+
def download_issues board_id:
|
105
|
+
log " Downloading primary issues for board #{board_id}", both: true
|
106
|
+
path = "#{@target_path}#{@download_config.project_config.file_prefix}_issues/"
|
107
|
+
unless Dir.exist?(path)
|
108
|
+
log " Creating path #{path}"
|
109
|
+
Dir.mkdir(path)
|
110
|
+
end
|
111
|
+
|
112
|
+
filter_id = @board_id_to_filter_id[board_id]
|
113
|
+
jql = make_jql(filter_id: filter_id)
|
114
|
+
jira_search_by_jql(jql: jql, initial_query: true, board_id: board_id, path: path)
|
115
|
+
|
116
|
+
log " Downloading linked issues for board #{board_id}", both: true
|
117
|
+
loop do
|
118
|
+
@issue_keys_pending_download.reject! { |key| @issue_keys_downloaded_in_current_run.include? key }
|
119
|
+
break if @issue_keys_pending_download.empty?
|
120
|
+
|
121
|
+
keys_to_request = @issue_keys_pending_download[0..99]
|
122
|
+
@issue_keys_pending_download.reject! { |key| keys_to_request.include? key }
|
123
|
+
jql = "key in (#{keys_to_request.join(', ')})"
|
124
|
+
jira_search_by_jql(jql: jql, initial_query: false, board_id: board_id, path: path)
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def jira_search_by_jql jql:, initial_query:, board_id:, path:
|
129
|
+
intercept_jql = @download_config.project_config.settings['intercept_jql']
|
130
|
+
jql = intercept_jql.call jql if intercept_jql
|
131
|
+
|
132
|
+
log " #{jql}"
|
133
|
+
escaped_jql = CGI.escape jql
|
134
|
+
|
135
|
+
max_results = 100
|
136
|
+
start_at = 0
|
137
|
+
total = 1
|
138
|
+
while start_at < total
|
139
|
+
command = make_curl_command url: "#{@jira_url}/rest/api/2/search" \
|
140
|
+
"?jql=#{escaped_jql}&maxResults=#{max_results}&startAt=#{start_at}&expand=changelog&fields=*all"
|
141
|
+
|
142
|
+
json = JSON.parse call_command(command)
|
143
|
+
exit_if_call_failed json
|
144
|
+
|
145
|
+
json['issues'].each do |issue_json|
|
146
|
+
issue_json['exporter'] = {
|
147
|
+
'in_initial_query' => initial_query
|
148
|
+
}
|
149
|
+
identify_other_issues_to_be_downloaded issue_json
|
150
|
+
file = "#{issue_json['key']}-#{board_id}.json"
|
151
|
+
write_json(issue_json, File.join(path, file))
|
152
|
+
end
|
153
|
+
|
154
|
+
total = json['total'].to_i
|
155
|
+
max_results = json['maxResults']
|
156
|
+
|
157
|
+
message = " Downloaded #{start_at + 1}-#{[start_at + max_results, total].min} of #{total} issues to #{path} "
|
158
|
+
log message, both: true
|
159
|
+
|
160
|
+
start_at += json['issues'].size
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
def identify_other_issues_to_be_downloaded raw_issue
|
165
|
+
issue = Issue.new raw: raw_issue, board: nil
|
166
|
+
@issue_keys_downloaded_in_current_run << issue.key
|
167
|
+
|
168
|
+
# Parent
|
169
|
+
parent_key = issue.parent_key(project_config: @download_config.project_config)
|
170
|
+
@issue_keys_pending_download << parent_key if parent_key
|
171
|
+
|
172
|
+
# Sub-tasks
|
173
|
+
issue.raw['fields']['subtasks'].each do |raw_subtask|
|
174
|
+
@issue_keys_pending_download << raw_subtask['key']
|
175
|
+
end
|
176
|
+
|
177
|
+
# Links
|
178
|
+
# We shouldn't blindly follow links as some, like cloners, aren't valuable and are just wasting time/effort
|
179
|
+
# to download
|
180
|
+
# issue.raw['fields']['issuelinks'].each do |raw_link|
|
181
|
+
# @issue_keys_pending_download << IssueLink(raw: raw_link).other_issue.key
|
182
|
+
# end
|
183
|
+
end
|
184
|
+
|
185
|
+
def exit_if_call_failed json
|
186
|
+
# Sometimes Jira returns the singular form of errorMessage and sometimes the plural. Consistency FTW.
|
187
|
+
return unless json['errorMessages'] || json['errorMessage']
|
188
|
+
|
189
|
+
log "Download failed. See #{@logfile_name} for details.", both: true
|
190
|
+
log " #{JSON.pretty_generate(json)}"
|
191
|
+
exit 1
|
192
|
+
end
|
193
|
+
|
194
|
+
def download_statuses
|
195
|
+
log ' Downloading all statuses', both: true
|
196
|
+
command = make_curl_command url: "\"#{@jira_url}/rest/api/2/status\""
|
197
|
+
json = JSON.parse call_command(command)
|
198
|
+
|
199
|
+
write_json json, "#{@target_path}#{@download_config.project_config.file_prefix}_statuses.json"
|
200
|
+
end
|
201
|
+
|
202
|
+
def download_board_configuration board_id:
|
203
|
+
log " Downloading board configuration for board #{board_id}", both: true
|
204
|
+
command = make_curl_command url: "#{@jira_url}/rest/agile/1.0/board/#{board_id}/configuration"
|
205
|
+
|
206
|
+
json = JSON.parse call_command(command)
|
207
|
+
exit_if_call_failed json
|
208
|
+
|
209
|
+
@board_id_to_filter_id[board_id] = json['filter']['id'].to_i
|
210
|
+
# @board_configuration = json if @download_config.board_ids.size == 1
|
211
|
+
|
212
|
+
file_prefix = @download_config.project_config.file_prefix
|
213
|
+
write_json json, "#{@target_path}#{file_prefix}_board_#{board_id}_configuration.json"
|
214
|
+
|
215
|
+
download_sprints board_id: board_id if json['type'] == 'scrum'
|
216
|
+
end
|
217
|
+
|
218
|
+
def download_sprints board_id:
|
219
|
+
log " Downloading sprints for board #{board_id}", both: true
|
220
|
+
file_prefix = @download_config.project_config.file_prefix
|
221
|
+
max_results = 100
|
222
|
+
start_at = 0
|
223
|
+
is_last = false
|
224
|
+
|
225
|
+
while is_last == false
|
226
|
+
command = make_curl_command url: "#{@jira_url}/rest/agile/1.0/board/#{board_id}/sprint?" \
|
227
|
+
"maxResults=#{max_results}&startAt=#{start_at}"
|
228
|
+
json = JSON.parse call_command(command)
|
229
|
+
exit_if_call_failed json
|
230
|
+
|
231
|
+
write_json json, "#{@target_path}#{file_prefix}_board_#{board_id}_sprints_#{start_at}.json"
|
232
|
+
is_last = json['isLast']
|
233
|
+
max_results = json['maxResults']
|
234
|
+
start_at += json['values'].size
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
def write_json json, filename
|
239
|
+
file_path = File.dirname(filename)
|
240
|
+
FileUtils.mkdir_p file_path unless File.exist?(file_path)
|
241
|
+
|
242
|
+
File.write(filename, JSON.pretty_generate(json))
|
243
|
+
end
|
244
|
+
|
245
|
+
def metadata_pathname
|
246
|
+
"#{@target_path}#{@download_config.project_config.file_prefix}_meta.json"
|
247
|
+
end
|
248
|
+
|
249
|
+
def load_metadata
|
250
|
+
# If we've never done a download before then this file won't be there. That's ok.
|
251
|
+
return unless File.exist? metadata_pathname
|
252
|
+
|
253
|
+
hash = JSON.parse(File.read metadata_pathname)
|
254
|
+
|
255
|
+
# Only use the saved metadata if the version number is the same one that we're currently using.
|
256
|
+
# If the cached data is in an older format then we're going to throw most of it away.
|
257
|
+
@cached_data_format_is_current = (hash['version'] || 0) == CURRENT_METADATA_VERSION
|
258
|
+
if @cached_data_format_is_current
|
259
|
+
hash.each do |key, value|
|
260
|
+
value = Date.parse(value) if value.is_a?(String) && value =~ /^\d{4}-\d{2}-\d{2}$/
|
261
|
+
@metadata[key] = value
|
262
|
+
end
|
263
|
+
end
|
264
|
+
|
265
|
+
# Even if this is the old format, we want to obey this one tag
|
266
|
+
@metadata['no-download'] = hash['no-download'] if hash['no-download']
|
267
|
+
end
|
268
|
+
|
269
|
+
def save_metadata
|
270
|
+
@metadata['version'] = CURRENT_METADATA_VERSION
|
271
|
+
@metadata['date_start_from_last_query'] = @start_date_in_query if @start_date_in_query
|
272
|
+
|
273
|
+
if @download_date_range.nil?
|
274
|
+
log "Making up a date range in meta since one wasn't specified. You'll want to change that.", both: true
|
275
|
+
today = Date.today
|
276
|
+
@download_date_range = (today - 7)..today
|
277
|
+
end
|
278
|
+
|
279
|
+
@metadata['earliest_date_start'] = @download_date_range.begin if @metadata['earliest_date_start'].nil?
|
280
|
+
|
281
|
+
@metadata['date_start'] = @download_date_range.begin
|
282
|
+
@metadata['date_end'] = @download_date_range.end
|
283
|
+
|
284
|
+
@metadata['jira_url'] = @jira_url
|
285
|
+
|
286
|
+
write_json @metadata, metadata_pathname
|
287
|
+
end
|
288
|
+
|
289
|
+
def remove_old_files
|
290
|
+
file_prefix = @download_config.project_config.file_prefix
|
291
|
+
Dir.foreach @target_path do |file|
|
292
|
+
next unless file =~ /^#{file_prefix}_\d+\.json$/
|
293
|
+
|
294
|
+
File.unlink "#{@target_path}#{file}"
|
295
|
+
end
|
296
|
+
|
297
|
+
return if @cached_data_format_is_current
|
298
|
+
|
299
|
+
# Also throw away all the previously downloaded issues.
|
300
|
+
path = File.join @target_path, "#{file_prefix}_issues"
|
301
|
+
return unless File.exist? path
|
302
|
+
|
303
|
+
Dir.foreach path do |file|
|
304
|
+
next unless file =~ /\.json$/
|
305
|
+
|
306
|
+
File.unlink File.join(path, file)
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
def make_jql filter_id:, today: Date.today
|
311
|
+
segments = []
|
312
|
+
segments << "filter=#{filter_id}"
|
313
|
+
|
314
|
+
unless @download_config.rolling_date_count.nil?
|
315
|
+
@download_date_range = (today.to_date - @download_config.rolling_date_count)..today.to_date
|
316
|
+
|
317
|
+
# For an incremental download, we want to query from the end of the previous one, not from the
|
318
|
+
# beginning of the full range.
|
319
|
+
@start_date_in_query = metadata['date_end'] || @download_date_range.begin
|
320
|
+
log " Incremental download only. Pulling from #{@start_date_in_query}", both: true if metadata['date_end']
|
321
|
+
|
322
|
+
# Catch-all to pick up anything that's been around since before the range started but hasn't
|
323
|
+
# had an update during the range.
|
324
|
+
catch_all = '((status changed OR Sprint is not EMPTY) AND statusCategory != Done)'
|
325
|
+
|
326
|
+
# Pick up any issues that had a status change in the range
|
327
|
+
start_date_text = @start_date_in_query.strftime '%Y-%m-%d'
|
328
|
+
end_date_text = today.strftime '%Y-%m-%d'
|
329
|
+
# find_in_range = %((status changed DURING ("#{start_date_text} 00:00","#{end_date_text} 23:59")))
|
330
|
+
find_in_range = %((updated >= "#{start_date_text} 00:00" AND updated <= "#{end_date_text} 23:59"))
|
331
|
+
|
332
|
+
segments << "(#{find_in_range} OR #{catch_all})"
|
333
|
+
end
|
334
|
+
|
335
|
+
segments.join ' AND '
|
336
|
+
end
|
337
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Exporter
|
4
|
+
def aggregated_project name:, project_names:
|
5
|
+
project name: name do
|
6
|
+
puts name
|
7
|
+
aggregate do
|
8
|
+
project_names.each do |project_name|
|
9
|
+
include_issues_from project_name
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
file_prefix name
|
14
|
+
|
15
|
+
file do
|
16
|
+
file_suffix '.html'
|
17
|
+
issues.reject! do |issue|
|
18
|
+
%w[Sub-task Epic].include? issue.type
|
19
|
+
end
|
20
|
+
|
21
|
+
html_report do
|
22
|
+
cycletime_scatterplot do
|
23
|
+
show_trend_lines
|
24
|
+
grouping_rules do |issue, rules|
|
25
|
+
rules.label = issue.board.name
|
26
|
+
end
|
27
|
+
end
|
28
|
+
aging_work_in_progress_chart
|
29
|
+
aging_work_table do
|
30
|
+
age_cutoff 21
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,111 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class Exporter
|
4
|
+
def standard_project name:, file_prefix:, ignore_issues: nil, starting_status: nil, boards: {}, default_board: nil
|
5
|
+
project name: name do
|
6
|
+
puts name
|
7
|
+
|
8
|
+
settings['blocked_link_text'] = ['is blocked by']
|
9
|
+
file_prefix file_prefix
|
10
|
+
download do
|
11
|
+
rolling_date_count 90
|
12
|
+
end
|
13
|
+
|
14
|
+
boards.each_key do |board_id|
|
15
|
+
block = boards[board_id]
|
16
|
+
if block == :default
|
17
|
+
block = lambda do |_|
|
18
|
+
start_at first_time_in_status_category('In Progress')
|
19
|
+
stop_at still_in_status_category('Done')
|
20
|
+
end
|
21
|
+
end
|
22
|
+
board id: board_id do
|
23
|
+
cycletime(&block)
|
24
|
+
expedited_priority_names 'Critical', 'Highest', 'Immediate Gating'
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
file do
|
29
|
+
file_suffix '.html'
|
30
|
+
issues.reject! do |issue|
|
31
|
+
%w[Sub-task Epic].include? issue.type
|
32
|
+
end
|
33
|
+
|
34
|
+
issues.reject! { |issue| ignore_issues.include? issue.key } if ignore_issues
|
35
|
+
|
36
|
+
html_report do
|
37
|
+
board_id default_board if default_board
|
38
|
+
|
39
|
+
html "<H1>#{file_prefix}</H1>", type: :header
|
40
|
+
boards.each_key do |id|
|
41
|
+
board = find_board id
|
42
|
+
html "<div><a href='#{board.url}'>#{id} #{board.name}</a></div>",
|
43
|
+
type: :header
|
44
|
+
end
|
45
|
+
|
46
|
+
discard_changes_before status_becomes: (starting_status || :backlog)
|
47
|
+
|
48
|
+
cycletime_scatterplot do
|
49
|
+
show_trend_lines
|
50
|
+
end
|
51
|
+
cycletime_scatterplot do # Epics
|
52
|
+
header_text 'Parents only'
|
53
|
+
filter_issues { |i| i.parent }
|
54
|
+
end
|
55
|
+
cycletime_histogram
|
56
|
+
cycletime_histogram do
|
57
|
+
grouping_rules do |issue, rules|
|
58
|
+
rules.label = issue.board.cycletime.stopped_time(issue).to_date.strftime('%b %Y')
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
throughput_chart do
|
63
|
+
description_text '<h2>Number of items completed, grouped by issue type</h2>'
|
64
|
+
end
|
65
|
+
throughput_chart do
|
66
|
+
header_text nil
|
67
|
+
description_text '<h2>Number of items completed, grouped by completion status and resolution</h2>'
|
68
|
+
grouping_rules do |issue, rules|
|
69
|
+
if issue.resolution
|
70
|
+
rules.label = "#{issue.status.name}:#{issue.resolution}"
|
71
|
+
else
|
72
|
+
rules.label = issue.status.name
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
aging_work_in_progress_chart
|
78
|
+
aging_work_bar_chart
|
79
|
+
aging_work_table
|
80
|
+
daily_wip_by_age_chart
|
81
|
+
daily_wip_by_blocked_stalled_chart
|
82
|
+
expedited_chart
|
83
|
+
sprint_burndown
|
84
|
+
story_point_accuracy_chart
|
85
|
+
# story_point_accuracy_chart do
|
86
|
+
# header_text nil
|
87
|
+
# description_text nil
|
88
|
+
# y_axis(sort_order: %w[Story Task Defect], label: 'TShirt Sizes') { |issue, _started_time| issue.type }
|
89
|
+
# end
|
90
|
+
|
91
|
+
dependency_chart do
|
92
|
+
link_rules do |link, rules|
|
93
|
+
case link.name
|
94
|
+
when 'Cloners'
|
95
|
+
rules.ignore
|
96
|
+
when 'Dependency', 'Blocks', 'Parent/Child', 'Cause', 'Satisfy Requirement', 'Relates'
|
97
|
+
rules.merge_bidirectional keep: 'outward'
|
98
|
+
rules.merge_bidirectional keep: 'outward'
|
99
|
+
when 'Sync'
|
100
|
+
rules.use_bidirectional_arrows
|
101
|
+
# rules.line_color = 'red'
|
102
|
+
else
|
103
|
+
puts "name=#{link.name}, label=#{link.label}"
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
@@ -0,0 +1,169 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'jirametrics/chart_base'
|
4
|
+
|
5
|
+
class ExpeditedChart < ChartBase
|
6
|
+
EXPEDITED_SEGMENT = Object.new.tap do |segment|
|
7
|
+
def segment.to_json *_args
|
8
|
+
<<~SNIPPET
|
9
|
+
{
|
10
|
+
borderColor: ctx => expedited(ctx, 'red') || notExpedited(ctx, 'gray'),
|
11
|
+
borderDash: ctx => notExpedited(ctx, [6, 6])
|
12
|
+
}
|
13
|
+
SNIPPET
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
attr_accessor :issues, :cycletime, :possible_statuses, :date_range
|
18
|
+
attr_reader :expedited_label
|
19
|
+
|
20
|
+
def initialize
|
21
|
+
super()
|
22
|
+
|
23
|
+
header_text 'Expedited work'
|
24
|
+
description_text <<-HTML
|
25
|
+
<p>
|
26
|
+
This chart only shows issues that have been expedited at some point. We care about these as
|
27
|
+
any form of expedited work will affect the entire system and will slow down non-expedited work.
|
28
|
+
Refer to this article on
|
29
|
+
<a href="https://improvingflow.com/2021/06/16/classes-of-service.html">classes of service</a>
|
30
|
+
for a longer explanation on why we want to avoid expedited work.
|
31
|
+
</p>
|
32
|
+
<p>
|
33
|
+
The lines indicate time that this issue was expedited. When the line is red then the issue was
|
34
|
+
expedited at that time. When it's gray then it wasn't. Orange dots indicate the date the work
|
35
|
+
was started and green dots represent the completion date. Lastly, the vertical height of the
|
36
|
+
lines/dots indicates how long it's been since this issue was created.
|
37
|
+
</p>
|
38
|
+
HTML
|
39
|
+
end
|
40
|
+
|
41
|
+
def run
|
42
|
+
data_sets = find_expedited_issues.collect do |issue|
|
43
|
+
make_expedite_lines_data_set(issue: issue, expedite_data: prepare_expedite_data(issue))
|
44
|
+
end.compact
|
45
|
+
|
46
|
+
if data_sets.empty?
|
47
|
+
'<h1>Expedited work</h1>There is no expedited work in this time period.'
|
48
|
+
else
|
49
|
+
wrap_and_render(binding, __FILE__)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def prepare_expedite_data issue
|
54
|
+
expedite_start = nil
|
55
|
+
result = []
|
56
|
+
expedited_priority_names = issue.board.expedited_priority_names
|
57
|
+
|
58
|
+
issue.changes.each do |change|
|
59
|
+
next unless change.priority?
|
60
|
+
|
61
|
+
if expedited_priority_names.include? change.value
|
62
|
+
expedite_start = change.time
|
63
|
+
elsif expedite_start
|
64
|
+
start_date = expedite_start.to_date
|
65
|
+
stop_date = change.time.to_date
|
66
|
+
|
67
|
+
if date_range.include?(start_date) || date_range.include?(stop_date) ||
|
68
|
+
(start_date < date_range.begin && stop_date > date_range.end)
|
69
|
+
|
70
|
+
result << [expedite_start, :expedite_start]
|
71
|
+
result << [change.time, :expedite_stop]
|
72
|
+
end
|
73
|
+
expedite_start = nil
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# If expedite_start is still set then we never ended.
|
78
|
+
result << [expedite_start, :expedite_start] if expedite_start
|
79
|
+
result
|
80
|
+
end
|
81
|
+
|
82
|
+
def find_expedited_issues
|
83
|
+
expedited_issues = @issues.reject do |issue|
|
84
|
+
prepare_expedite_data(issue).empty?
|
85
|
+
end
|
86
|
+
|
87
|
+
expedited_issues.sort { |a, b| a.key_as_i <=> b.key_as_i }
|
88
|
+
end
|
89
|
+
|
90
|
+
def later_date date1, date2
|
91
|
+
return date1 if date2.nil?
|
92
|
+
return date2 if date1.nil?
|
93
|
+
|
94
|
+
[date1, date2].max
|
95
|
+
end
|
96
|
+
|
97
|
+
def make_point issue:, time:, label:, expedited:
|
98
|
+
{
|
99
|
+
y: (time.to_date - issue.created.to_date).to_i + 1,
|
100
|
+
x: time.to_date.to_s,
|
101
|
+
title: ["#{issue.key} #{label} : #{issue.summary}"],
|
102
|
+
expedited: (expedited ? 1 : 0)
|
103
|
+
}
|
104
|
+
end
|
105
|
+
|
106
|
+
def make_expedite_lines_data_set issue:, expedite_data:
|
107
|
+
cycletime = issue.board.cycletime
|
108
|
+
started_time = cycletime.started_time(issue)
|
109
|
+
stopped_time = cycletime.stopped_time(issue)
|
110
|
+
|
111
|
+
expedite_data << [started_time, :issue_started] if started_time
|
112
|
+
expedite_data << [stopped_time, :issue_stopped] if stopped_time
|
113
|
+
expedite_data.sort! { |a, b| a[0] <=> b[0] }
|
114
|
+
|
115
|
+
# If none of the data would be visible on the chart then skip it.
|
116
|
+
return nil unless expedite_data.any? { |time, _action| time.to_date >= date_range.begin }
|
117
|
+
|
118
|
+
data = []
|
119
|
+
dot_colors = []
|
120
|
+
point_styles = []
|
121
|
+
expedited = false
|
122
|
+
|
123
|
+
expedite_data.each do |time, action|
|
124
|
+
case action
|
125
|
+
when :issue_started
|
126
|
+
data << make_point(issue: issue, time: time, label: 'Started', expedited: expedited)
|
127
|
+
dot_colors << 'orange'
|
128
|
+
point_styles << 'rect'
|
129
|
+
when :issue_stopped
|
130
|
+
data << make_point(issue: issue, time: time, label: 'Completed', expedited: expedited)
|
131
|
+
dot_colors << 'green'
|
132
|
+
point_styles << 'rect'
|
133
|
+
when :expedite_start
|
134
|
+
data << make_point(issue: issue, time: time, label: 'Expedited', expedited: true)
|
135
|
+
dot_colors << 'red'
|
136
|
+
point_styles << 'circle'
|
137
|
+
expedited = true
|
138
|
+
when :expedite_stop
|
139
|
+
data << make_point(issue: issue, time: time, label: 'Not expedited', expedited: false)
|
140
|
+
dot_colors << 'gray'
|
141
|
+
point_styles << 'circle'
|
142
|
+
expedited = false
|
143
|
+
else
|
144
|
+
raise "Unexpected action: #{action}"
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
unless expedite_data.empty?
|
149
|
+
last_change_time = expedite_data[-1][0].to_date
|
150
|
+
if last_change_time && last_change_time <= date_range.end && stopped_time.nil?
|
151
|
+
data << make_point(issue: issue, time: date_range.end, label: 'Still ongoing', expedited: expedited)
|
152
|
+
dot_colors << 'blue' # It won't be visible so it doesn't matter
|
153
|
+
point_styles << 'dash'
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
{
|
158
|
+
type: 'line',
|
159
|
+
label: issue.key,
|
160
|
+
data: data,
|
161
|
+
fill: false,
|
162
|
+
showLine: true,
|
163
|
+
backgroundColor: dot_colors,
|
164
|
+
pointBorderColor: 'black',
|
165
|
+
pointStyle: point_styles,
|
166
|
+
segment: EXPEDITED_SEGMENT
|
167
|
+
}
|
168
|
+
end
|
169
|
+
end
|