jirametrics 2.26.1 → 2.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 960ce78e94ec2c7e87875bd0dc6db2c16b11cee0e4c4dd5a22d8bf3bab1c893d
4
- data.tar.gz: f5b2d9aac4d9059654ad7a569d477b0b689c8d22f30760867aec9e13b3f1db61
3
+ metadata.gz: d93d43ce61e0fcec89c5ee737044cff4007b1f0079be2c6be774c56cb15130e8
4
+ data.tar.gz: ffbe1b15fc8aa08f928e7c322cf53dc5a1646b9da01b7b9e81e8b5bd2c44650d
5
5
  SHA512:
6
- metadata.gz: 727e2f5b3e57e937eacd05d868ff1f7a75e16210f0e46e75117a88e56287fb765238753710289d9fb2f09b38d6e940f47d0ae2689be313b1282238727ddae0eb
7
- data.tar.gz: d60771157926c74a7ac45d12ea023eef1808b4ea027f8edd6fba6108a79b2f0df8bdb054bf58049cc8a3f3de44e4926dd0af0a40b95b25fb07ed1391d829418b
6
+ metadata.gz: 66d0c9c0db9b11c4278935a8342cae5f9b6b4e34fffc46d1bad6b719b6ca779374191f05f4139caf28995e8014ab77cfea79945df9abe546405abf9e381e4311
7
+ data.tar.gz: 3f10707f9b44c14dd8dae67ce73cd208cb7554cee147d502f3b99315b49cb524aa6f12662c1624720f204e814a5686e37393f907da8ec43f1befc88398a79cc7
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
3
+
4
+ require 'jirametrics'
5
+ JiraMetrics.start(['mcp'] + ARGV)
@@ -377,7 +377,7 @@ class ChartBase
377
377
  end
378
378
 
379
379
  def seam_start type = 'chart'
380
- "\n<!-- seam-start | chart#{@@chart_counter} | #{self.class} | #{header_text} | #{type} -->"
380
+ "\n<!-- seam-start | chart#{@@chart_counter} | #{self.class} | #{header_text} | #{type} -->\n"
381
381
  end
382
382
 
383
383
  def seam_end type = 'chart'
@@ -69,6 +69,9 @@ class CumulativeFlowDiagram < ChartBase
69
69
  When the cursor is near the right edge and that point falls outside the visible date range,
70
70
  CT and TP cannot be calculated and are hidden; only WIP is shown.
71
71
  </div>
72
+ <div class="p">
73
+ See also: This article on <a href="https://blog.mikebowler.ca/2026/03/27/cumulative-flow-diagram/">how to read a CFD</a>.
74
+ </div>
72
75
  HTML
73
76
  instance_eval(&block)
74
77
  end
@@ -87,13 +87,14 @@ class DailyView < ChartBase
87
87
  lines << ["#{marker} Blocked by flag"] if blocked_stalled.flag
88
88
  lines << ["#{marker} Blocked by status: #{blocked_stalled.status}"] if blocked_stalled.blocked_by_status?
89
89
  blocked_stalled.blocking_issue_keys&.each do |key|
90
- blocking_issue = issues.find { |i| i.key == key }
90
+ blocking_issue = issues.find_by_key key: key, include_hidden: true
91
91
  if blocking_issue
92
- lines << "<section><div class=\"foldable startFolded\">#{marker} Blocked by issue: #{key}</div>"
92
+ lines << "<section><div class=\"foldable startFolded\">#{marker} Blocked by issue: " \
93
+ "#{make_issue_label issue: blocking_issue, done: blocking_issue.done?}</div>"
93
94
  lines << blocking_issue
94
95
  lines << '</section>'
95
96
  else
96
- lines << ["#{marker} Blocked by issue: #{key}"]
97
+ lines << ["#{marker} Blocked by issue: #{key} (no description found)"]
97
98
  end
98
99
  end
99
100
  elsif blocked_stalled.stalled_by_status?
@@ -108,11 +108,24 @@ class DownloaderForCloud < Downloader
108
108
  }
109
109
  issue = Issue.new(raw: issue_json, board: board)
110
110
  data = issue_datas.find { |d| d.key == issue.key }
111
+ unless data
112
+ log " Skipping #{issue.key}: returned by Jira but key not in request (issue may have been moved)"
113
+ next
114
+ end
111
115
  data.up_to_date = true
112
116
  data.last_modified = issue.updated
113
117
  data.issue = issue
114
118
  end
115
119
 
120
+ # Mark any unmatched requests as up_to_date to prevent infinite re-fetching.
121
+ # This happens when Jira returns a different key (moved issue) leaving the original unmatched.
122
+ issue_datas.each do |data|
123
+ next if data.up_to_date
124
+
125
+ log " Skipping #{data.key}: not returned by Jira (issue may have been deleted or moved)"
126
+ data.up_to_date = true
127
+ end
128
+
116
129
  issue_datas
117
130
  end
118
131
 
@@ -168,15 +181,20 @@ class DownloaderForCloud < Downloader
168
181
 
169
182
  issue_data_hash = search_for_issues jql: jql, board_id: board.id, path: path
170
183
 
184
+ checked_for_related = Set.new
185
+ in_related_phase = false
186
+
171
187
  loop do
172
188
  related_issue_keys = Set.new
173
189
  stale = issue_data_hash.values.reject { |data| data.up_to_date }
174
190
  unless stale.empty?
175
- log_start ' Downloading more issues '
191
+ log_start ' Downloading more issues ' unless in_related_phase
176
192
  stale.each_slice(100) do |slice|
177
193
  slice = bulk_fetch_issues(issue_datas: slice, board: board, in_initial_query: true)
178
194
  progress_dot
179
195
  slice.each do |data|
196
+ next unless data.issue
197
+
180
198
  @file_system.save_json(
181
199
  json: data.issue.raw, filename: data.cache_path
182
200
  )
@@ -184,22 +202,25 @@ class DownloaderForCloud < Downloader
184
202
  # to parse the file just to find the timestamp
185
203
  @file_system.utime time: data.issue.updated, file: data.cache_path
186
204
 
187
- issue = data.issue
188
- next unless issue
189
-
190
- parent_key = issue.parent_key(project_config: @download_config.project_config)
191
- related_issue_keys << parent_key if parent_key
192
-
193
- # Sub-tasks
194
- issue.raw['fields']['subtasks']&.each do |raw_subtask|
195
- related_issue_keys << raw_subtask['key']
196
- end
205
+ collect_related_issue_keys issue: data.issue, related_issue_keys: related_issue_keys
206
+ checked_for_related << data.key
197
207
  end
198
208
  end
199
- end_progress
209
+ end_progress unless in_related_phase
200
210
  end
201
211
 
202
- # Remove all the ones we already downloaded
212
+ # Also scan up-to-date cached issues we haven't checked yet — they may reference
213
+ # related issues that are not in the primary query result.
214
+ issue_data_hash.each_value do |data|
215
+ next if checked_for_related.include?(data.key)
216
+ next unless @file_system.file_exist?(data.cache_path)
217
+
218
+ checked_for_related << data.key
219
+ raw = @file_system.load_json(data.cache_path)
220
+ collect_related_issue_keys issue: Issue.new(raw: raw, board: board), related_issue_keys: related_issue_keys
221
+ end
222
+
223
+ # Remove all the ones we already have
203
224
  related_issue_keys.reject! { |key| issue_data_hash[key] }
204
225
 
205
226
  related_issue_keys.each do |key|
@@ -211,9 +232,15 @@ class DownloaderForCloud < Downloader
211
232
  end
212
233
  break if related_issue_keys.empty?
213
234
 
214
- log " Downloading linked issues for board #{board.id}", both: true
235
+ unless in_related_phase
236
+ in_related_phase = true
237
+ log " Identifying related issues (parents, subtasks, links) for board #{board.id}", both: true
238
+ log_start ' Downloading more issues '
239
+ end
215
240
  end
216
241
 
242
+ end_progress if in_related_phase
243
+
217
244
  delete_issues_from_cache_that_are_not_in_server(
218
245
  issue_data_hash: issue_data_hash, path: path
219
246
  )
@@ -238,6 +265,22 @@ class DownloaderForCloud < Downloader
238
265
  end
239
266
  end
240
267
 
268
+ def collect_related_issue_keys issue:, related_issue_keys:
269
+ parent_key = issue.parent_key(project_config: @download_config.project_config)
270
+ related_issue_keys << parent_key if parent_key
271
+
272
+ issue.raw['fields']['subtasks']&.each do |raw_subtask|
273
+ related_issue_keys << raw_subtask['key']
274
+ end
275
+
276
+ issue.raw['fields']['issuelinks']&.each do |link|
277
+ next if link['type']['name'] == 'Cloners'
278
+
279
+ linked = link['inwardIssue'] || link['outwardIssue']
280
+ related_issue_keys << linked['key'] if linked
281
+ end
282
+ end
283
+
241
284
  def last_modified filename:
242
285
  File.mtime(filename) if File.exist?(filename)
243
286
  end
@@ -10,7 +10,7 @@
10
10
  class Exporter
11
11
  def aggregated_project name:, project_names:, settings: {}
12
12
  project name: name do
13
- puts name
13
+ file_system.log name
14
14
  file_prefix name
15
15
  self.settings.merge! stringify_keys(settings)
16
16
 
@@ -9,7 +9,7 @@ class Exporter
9
9
  show_experimental_charts: false, github_repos: nil
10
10
  exporter = self
11
11
  project name: name do
12
- puts name
12
+ file_system.log name
13
13
  file_prefix file_prefix
14
14
 
15
15
  self.anonymize if anonymize
@@ -35,7 +35,7 @@ class Exporter
35
35
  download do
36
36
  self.rolling_date_count(rolling_date_count) if rolling_date_count
37
37
  self.no_earlier_than(no_earlier_than) if no_earlier_than
38
- github_repo github_repos if github_repos
38
+ github_repo *github_repos if github_repos
39
39
  end
40
40
 
41
41
  issues.reject! do |issue|
@@ -3,13 +3,14 @@
3
3
  require 'json'
4
4
 
5
5
  class FileSystem
6
- attr_accessor :logfile, :logfile_name
6
+ attr_accessor :logfile, :logfile_name, :log_only
7
7
 
8
8
  def initialize
9
9
  # In almost all cases, this will be immediately replaced in the Exporter
10
10
  # but if we fail before we get that far, this will at least let a useful
11
11
  # error show up on the console.
12
12
  @logfile = $stdout
13
+ @log_only = false
13
14
  end
14
15
 
15
16
  # Effectively the same as File.read except it forces the encoding to UTF-8
@@ -59,7 +60,7 @@ class FileSystem
59
60
 
60
61
  logfile.puts message
61
62
  logfile.puts more if more
62
- return unless also_write_to_stderr
63
+ return if log_only || !also_write_to_stderr
63
64
 
64
65
  # Obscure edge-case where we're trying to log something before logging is even
65
66
  # set up. Quick escape here so that we don't dump the error twice.
@@ -70,23 +71,29 @@ class FileSystem
70
71
 
71
72
  def log_start message
72
73
  logfile.puts message
73
- return if logfile == $stdout
74
+ return if log_only || logfile == $stdout
74
75
 
75
76
  $stderr.print message
76
77
  $stderr.flush
77
78
  end
78
79
 
79
80
  def start_progress
81
+ return if log_only
82
+
80
83
  $stderr.print ' '
81
84
  $stderr.flush
82
85
  end
83
86
 
84
87
  def progress_dot
88
+ return if log_only
89
+
85
90
  $stderr.print '.'
86
91
  $stderr.flush
87
92
  end
88
93
 
89
94
  def end_progress
95
+ return if log_only
96
+
90
97
  $stderr.puts '' # rubocop:disable Style/StderrPuts
91
98
  end
92
99
 
@@ -64,9 +64,10 @@ class GithubGateway
64
64
  raw_pr['body']
65
65
  ]
66
66
 
67
- sources.compact
68
- .flat_map { |s| s.scan(@issue_key_pattern) }
69
- .uniq
67
+ keys = sources.compact.flat_map { |s| s.scan(@issue_key_pattern) }.uniq
68
+ return keys unless keys.empty?
69
+
70
+ commit_messages_for(raw_pr['number']).flat_map { |msg| msg.scan(@issue_key_pattern) }.uniq
70
71
  end
71
72
 
72
73
  def extract_reviews raw_reviews
@@ -83,11 +84,19 @@ class GithubGateway
83
84
 
84
85
  private
85
86
 
87
+ def commit_messages_for pr_number
88
+ args = ['pr', 'view', pr_number.to_s, '--json', 'commits', '--repo', @repo]
89
+ result = run_command(args)
90
+ (result['commits'] || []).flat_map do |commit|
91
+ [commit['messageHeadline'], commit['messageBody']].compact
92
+ end
93
+ end
94
+
86
95
  def build_issue_key_pattern
87
96
  return nil if @project_keys.empty?
88
97
 
89
98
  keys_pattern = @project_keys.map { |k| Regexp.escape(k) }.join('|')
90
- Regexp.new("\\b(?:#{keys_pattern})-\\d+\\b")
99
+ Regexp.new("\\b(?:#{keys_pattern})-\\d+(?![A-Za-z0-9])")
91
100
  end
92
101
 
93
102
  def run_command args
@@ -35,7 +35,7 @@ function makeFoldable() {
35
35
  const toggleButton = document.createElement(element.tagName); //'button');
36
36
  toggleButton.id = toggleId;
37
37
  toggleButton.className = 'foldable-toggle-btn';
38
- toggleButton.innerHTML = '▼ ' + element.textContent;
38
+ toggleButton.innerHTML = '▼ ' + element.innerHTML;
39
39
 
40
40
  // Create a content container
41
41
  const contentContainer = document.createElement('div');
@@ -28,15 +28,20 @@ new Chart(document.getElementById('<%= chart_id %>').getContext('2d'), {
28
28
  max: "<%= (date_range.end + 1).to_s %>"
29
29
  },
30
30
  y: {
31
+ min: 0,
32
+ max: <%= (@highest_y_value * 1.1).ceil %>,
31
33
  scaleLabel: {
32
- display: true,
33
- min: 0,
34
- max: <%= @highest_y_value %>
34
+ display: true
35
35
  },
36
36
  <%= render_axis_title :y %>
37
37
  grid: {
38
38
  color: <%= CssVariable['--grid-line-color'].to_json %>
39
39
  },
40
+ ticks: {
41
+ callback: function(value, index, ticks) {
42
+ return index === ticks.length - 1 ? null : value;
43
+ }
44
+ }
40
45
  }
41
46
  },
42
47
  plugins: {
@@ -210,7 +210,25 @@ class Issue
210
210
  end
211
211
 
212
212
  def first_time_visible_on_board
213
- first_time_in_status(*board.visible_columns.collect(&:status_ids).flatten)
213
+ visible_status_ids = board.visible_columns.collect(&:status_ids).flatten
214
+ return first_time_in_status(*visible_status_ids) unless board.scrum?
215
+
216
+ # For scrum boards, an issue is only visible when BOTH conditions are true simultaneously:
217
+ # 1. Its status is in a visible column
218
+ # 2. It is in an active sprint
219
+ # At each moment one condition becomes true, check if the other is already true.
220
+ candidates = []
221
+
222
+ status_changes.each do |change|
223
+ next unless visible_status_ids.include?(change.value_id)
224
+ candidates << change if in_active_sprint_at?(change.time)
225
+ end
226
+
227
+ sprint_entry_events.each do |effective_time, representative_change|
228
+ candidates << representative_change if in_visible_status_at?(effective_time, visible_status_ids)
229
+ end
230
+
231
+ candidates.min_by(&:time)
214
232
  end
215
233
 
216
234
  def reasons_not_visible_on_board
@@ -815,6 +833,72 @@ class Issue
815
833
 
816
834
  private
817
835
 
836
+ # Returns [[effective_time, change_item]] for each moment the issue entered an active sprint.
837
+ # Skips sprints that were removed before they activated.
838
+ def sprint_entry_events
839
+ data_clazz = Struct.new(:sprint_id, :sprint_start, :add_time, :change)
840
+ events = []
841
+ in_sprint = []
842
+
843
+ @changes.each do |change|
844
+ next unless change.sprint?
845
+
846
+ (change.value_id - change.old_value_id).each do |sprint_id|
847
+ sprint_start, = find_sprint_start_end(sprint_id: sprint_id, change: change)
848
+ in_sprint << data_clazz.new(sprint_id, sprint_start, change.time, change) if sprint_start
849
+ end
850
+
851
+ (change.old_value_id - change.value_id).each do |sprint_id|
852
+ data = in_sprint.find { |d| d.sprint_id == sprint_id }
853
+ next unless data
854
+
855
+ in_sprint.delete(data)
856
+ next if data.sprint_start >= change.time # sprint hadn't activated before removal
857
+
858
+ effective_time = [data.add_time, data.sprint_start].max
859
+ events << [effective_time, sprint_change_at(effective_time, data.change)]
860
+ end
861
+ end
862
+
863
+ in_sprint.each do |data|
864
+ effective_time = [data.add_time, data.sprint_start].max
865
+ events << [effective_time, sprint_change_at(effective_time, data.change)]
866
+ end
867
+
868
+ events
869
+ end
870
+
871
+ def sprint_change_at effective_time, change
872
+ return change if effective_time == change.time
873
+
874
+ ChangeItem.new(
875
+ raw: { 'field' => 'Sprint', 'toString' => 'Sprint activated', 'to' => '0', 'from' => nil, 'fromString' => nil },
876
+ author_raw: nil,
877
+ time: effective_time,
878
+ artificial: true
879
+ )
880
+ end
881
+
882
+ def in_active_sprint_at? time
883
+ active_ids = []
884
+ @changes.each do |change|
885
+ break if change.time > time
886
+ next unless change.sprint?
887
+
888
+ (change.value_id - change.old_value_id).each do |sprint_id|
889
+ sprint_start, = find_sprint_start_end(sprint_id: sprint_id, change: change)
890
+ active_ids << sprint_id if sprint_start && sprint_start <= time
891
+ end
892
+ (change.old_value_id - change.value_id).each { |id| active_ids.delete(id) }
893
+ end
894
+ active_ids.any?
895
+ end
896
+
897
+ def in_visible_status_at? time, visible_status_ids
898
+ last = status_changes.reverse.find { |c| c.time <= time }
899
+ last && visible_status_ids.include?(last.value_id)
900
+ end
901
+
818
902
  def load_history_into_changes
819
903
  @raw['changelog']['histories']&.each do |history|
820
904
  created = parse_time(history['created'])
@@ -0,0 +1,531 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'mcp'
4
+ require 'mcp/server/transports/stdio_transport'
5
+
6
+ class McpServer
7
+ def initialize projects:, aggregates: {}, timezone_offset: '+00:00'
8
+ @projects = projects
9
+ @aggregates = aggregates
10
+ @timezone_offset = timezone_offset
11
+ end
12
+
13
+ def run
14
+ canonical_tools = [ListProjectsTool, AgingWorkTool, CompletedWorkTool, NotYetStartedTool, StatusTimeAnalysisTool]
15
+ alias_tools = ALIASES.map do |alias_name, canonical|
16
+ schema = canonical.input_schema
17
+ Class.new(canonical) do
18
+ tool_name alias_name
19
+ input_schema schema
20
+ end
21
+ end
22
+
23
+ server = MCP::Server.new(
24
+ name: 'jirametrics',
25
+ version: Gem.loaded_specs['jirametrics']&.version&.to_s || '0.0.0',
26
+ tools: canonical_tools + alias_tools,
27
+ server_context: { projects: @projects, aggregates: @aggregates, timezone_offset: @timezone_offset }
28
+ )
29
+
30
+ transport = MCP::Server::Transports::StdioTransport.new(server)
31
+ transport.open
32
+ end
33
+
34
+ HISTORY_FILTER_SCHEMA = {
35
+ history_field: {
36
+ type: 'string',
37
+ description: 'When combined with history_value, only return issues where this field ever had that value ' \
38
+ '(e.g. "priority", "status"). Both history_field and history_value must be provided together.'
39
+ },
40
+ history_value: {
41
+ type: 'string',
42
+ description: 'The value to look for in the change history of history_field (e.g. "Highest", "Done").'
43
+ },
44
+ ever_blocked: {
45
+ type: 'boolean',
46
+ description: 'When true, only return issues that were ever blocked. Blocked includes flagged items, ' \
47
+ 'issues in blocked statuses, and blocking issue links.'
48
+ },
49
+ ever_stalled: {
50
+ type: 'boolean',
51
+ description: 'When true, only return issues that were ever stalled. Stalled means the issue sat ' \
52
+ 'inactive for longer than the stalled threshold, or entered a stalled status.'
53
+ },
54
+ currently_blocked: {
55
+ type: 'boolean',
56
+ description: 'When true, only return issues that are currently blocked (as of the data end date).'
57
+ },
58
+ currently_stalled: {
59
+ type: 'boolean',
60
+ description: 'When true, only return issues that are currently stalled (as of the data end date).'
61
+ }
62
+ }.freeze
63
+
64
+ def self.resolve_projects server_context, project_filter
65
+ return nil if project_filter.nil?
66
+
67
+ aggregates = server_context[:aggregates] || {}
68
+ aggregates[project_filter] || [project_filter]
69
+ end
70
+
71
+ def self.column_name_for board, status_id
72
+ board.visible_columns.find { |c| c.status_ids.include?(status_id) }&.name
73
+ end
74
+
75
+ def self.time_per_column issue, end_time
76
+ changes = issue.status_changes
77
+ _, stopped = issue.started_stopped_times
78
+ effective_end = stopped && stopped < end_time ? stopped : end_time
79
+ board = issue.board
80
+
81
+ result = Hash.new(0.0)
82
+
83
+ if changes.empty?
84
+ col = column_name_for(board, issue.status.id) || issue.status.name
85
+ duration = effective_end - issue.created
86
+ result[col] += duration if duration.positive?
87
+ return result
88
+ end
89
+
90
+ first_change = changes.first
91
+ initial_col = column_name_for(board, first_change.old_value_id) || first_change.old_value
92
+ initial_duration = first_change.time - issue.created
93
+ result[initial_col] += initial_duration if initial_duration.positive?
94
+
95
+ changes.each_cons(2) do |prev_change, next_change|
96
+ col = column_name_for(board, prev_change.value_id) || prev_change.value
97
+ duration = next_change.time - prev_change.time
98
+ result[col] += duration if duration.positive?
99
+ end
100
+
101
+ last_change = changes.last
102
+ final_col = column_name_for(board, last_change.value_id) || last_change.value
103
+ final_duration = effective_end - last_change.time
104
+ result[final_col] += final_duration if final_duration.positive?
105
+
106
+ result
107
+ end
108
+
109
+ def self.time_per_status issue, end_time
110
+ changes = issue.status_changes
111
+ _, stopped = issue.started_stopped_times
112
+ effective_end = stopped && stopped < end_time ? stopped : end_time
113
+
114
+ result = Hash.new(0.0)
115
+
116
+ if changes.empty?
117
+ duration = effective_end - issue.created
118
+ result[issue.status.name] += duration if duration.positive?
119
+ return result
120
+ end
121
+
122
+ first_change = changes.first
123
+ initial_duration = first_change.time - issue.created
124
+ result[first_change.old_value] += initial_duration if initial_duration.positive?
125
+
126
+ changes.each_cons(2) do |prev_change, next_change|
127
+ duration = next_change.time - prev_change.time
128
+ result[prev_change.value] += duration if duration.positive?
129
+ end
130
+
131
+ last_change = changes.last
132
+ final_duration = effective_end - last_change.time
133
+ result[last_change.value] += final_duration if final_duration.positive?
134
+
135
+ result
136
+ end
137
+
138
+ def self.flow_efficiency_percent issue, end_time
139
+ active_time, total_time = issue.flow_efficiency_numbers(end_time: end_time)
140
+ total_time.positive? ? (active_time / total_time * 100).round(1) : nil
141
+ end
142
+
143
+ def self.matches_blocked_stalled?(bsc, ever_blocked, ever_stalled, currently_blocked, currently_stalled)
144
+ return false if ever_blocked && bsc.none?(&:blocked?)
145
+ return false if ever_stalled && bsc.none?(&:stalled?)
146
+ return false if currently_blocked && !bsc.last&.blocked?
147
+ return false if currently_stalled && !bsc.last&.stalled?
148
+
149
+ true
150
+ end
151
+
152
+ def self.matches_history?(issue, end_time, history_field, history_value,
153
+ ever_blocked, ever_stalled, currently_blocked, currently_stalled)
154
+ return false if history_field && history_value &&
155
+ issue.changes.none? { |c| c.field == history_field && c.value == history_value }
156
+
157
+ if ever_blocked || ever_stalled || currently_blocked || currently_stalled
158
+ bsc = issue.blocked_stalled_changes(end_time: end_time)
159
+ return false unless matches_blocked_stalled?(bsc, ever_blocked, ever_stalled,
160
+ currently_blocked, currently_stalled)
161
+ end
162
+
163
+ true
164
+ end
165
+
166
+ class ListProjectsTool < MCP::Tool
167
+ tool_name 'list_projects'
168
+ description 'Lists all available projects with basic metadata. Call this first when the user asks a ' \
169
+ 'question that could apply to multiple projects, so you can clarify which one they mean.'
170
+
171
+ input_schema(type: 'object', properties: {})
172
+
173
+ def self.call(server_context:, **)
174
+ lines = server_context[:projects].map do |project_name, project_data|
175
+ "#{project_name} | #{project_data[:issues].size} issues | Data through: #{project_data[:today]}"
176
+ end
177
+
178
+ aggregates = server_context[:aggregates] || {}
179
+ unless aggregates.empty?
180
+ lines << ''
181
+ lines << 'Aggregate groups (can be used as a project filter):'
182
+ aggregates.each do |name, constituent_names|
183
+ lines << "#{name} | includes: #{constituent_names.join(', ')}"
184
+ end
185
+ end
186
+
187
+ MCP::Tool::Response.new([{ type: 'text', text: lines.join("\n") }])
188
+ end
189
+ end
190
+
191
+ class AgingWorkTool < MCP::Tool
192
+ tool_name 'aging_work'
193
+ description 'Returns all issues that have been started but not yet completed (work in progress), ' \
194
+ 'sorted from oldest to newest. Age is the number of days since the issue was started.'
195
+
196
+ input_schema(
197
+ type: 'object',
198
+ properties: {
199
+ min_age_days: {
200
+ type: 'integer',
201
+ description: 'Only return issues at least this many days old. Omit to return all ages.'
202
+ },
203
+ project: {
204
+ type: 'string',
205
+ description: 'Only return issues from this project name. Omit to return all projects.'
206
+ },
207
+ current_status: {
208
+ type: 'string',
209
+ description: 'Only return issues currently in this status (e.g. "Review", "In Progress").'
210
+ },
211
+ current_column: {
212
+ type: 'string',
213
+ description: 'Only return issues whose current status maps to this board column (e.g. "In Progress").'
214
+ },
215
+ **HISTORY_FILTER_SCHEMA
216
+ }
217
+ )
218
+
219
+ def self.call(server_context:, min_age_days: nil, project: nil, project_name: nil,
220
+ current_status: nil, current_column: nil,
221
+ history_field: nil, history_value: nil, ever_blocked: nil, ever_stalled: nil,
222
+ currently_blocked: nil, currently_stalled: nil, **)
223
+ project ||= project_name
224
+ rows = []
225
+ allowed_projects = McpServer.resolve_projects(server_context, project)
226
+
227
+ server_context[:projects].each do |project_name, project_data|
228
+ next if allowed_projects && !allowed_projects.include?(project_name)
229
+
230
+ today = project_data[:today]
231
+ project_data[:issues].each do |issue|
232
+ started, stopped = issue.started_stopped_times
233
+ next unless started && !stopped
234
+ next if current_status && issue.status.name != current_status
235
+ next if current_column && McpServer.column_name_for(issue.board, issue.status.id) != current_column
236
+
237
+ age = (today - started.to_date).to_i + 1
238
+ next if min_age_days && age < min_age_days
239
+ unless McpServer.matches_history?(issue, project_data[:end_time],
240
+ history_field, history_value, ever_blocked, ever_stalled,
241
+ currently_blocked, currently_stalled)
242
+ next
243
+ end
244
+
245
+ rows << {
246
+ key: issue.key,
247
+ summary: issue.summary,
248
+ status: issue.status.name,
249
+ type: issue.type,
250
+ age_days: age,
251
+ flow_efficiency: McpServer.flow_efficiency_percent(issue, project_data[:end_time]),
252
+ project: project_name
253
+ }
254
+ end
255
+ end
256
+
257
+ rows.sort_by! { |r| -r[:age_days] }
258
+
259
+ if rows.empty?
260
+ text = 'No aging work found.'
261
+ else
262
+ lines = rows.map do |r|
263
+ fe = r[:flow_efficiency] ? " | FE: #{r[:flow_efficiency]}%" : ''
264
+ "#{r[:key]} | #{r[:project]} | #{r[:type]} | #{r[:status]} | Age: #{r[:age_days]}d#{fe} | #{r[:summary]}"
265
+ end
266
+ text = lines.join("\n")
267
+ end
268
+
269
+ MCP::Tool::Response.new([{ type: 'text', text: text }])
270
+ end
271
+ end
272
+
273
+ class CompletedWorkTool < MCP::Tool
274
+ tool_name 'completed_work'
275
+ description 'Returns issues that have been completed, sorted most recently completed first. ' \
276
+ 'Includes cycle time (days from start to completion).'
277
+
278
+ input_schema(
279
+ type: 'object',
280
+ properties: {
281
+ days_back: {
282
+ type: 'integer',
283
+ description: 'Only return issues completed within this many days of the data end date. Omit to return all.'
284
+ },
285
+ project: {
286
+ type: 'string',
287
+ description: 'Only return issues from this project name. Omit to return all projects.'
288
+ },
289
+ completed_status: {
290
+ type: 'string',
291
+ description: 'Only return issues whose status at completion matches this value (e.g. "Cancelled", "Done").'
292
+ },
293
+ completed_resolution: {
294
+ type: 'string',
295
+ description: 'Only return issues whose resolution at completion matches this value (e.g. "Won\'t Do").'
296
+ },
297
+ **HISTORY_FILTER_SCHEMA
298
+ }
299
+ )
300
+
301
+ def self.build_row issue, project_name, started, stopped, cutoff, completed_status, completed_resolution,
302
+ end_time, history_field, history_value, ever_blocked, ever_stalled,
303
+ currently_blocked, currently_stalled
304
+ completed_date = stopped.to_date
305
+ return nil if cutoff && completed_date < cutoff
306
+
307
+ status_at_done, resolution_at_done = issue.status_resolution_at_done
308
+ return nil if completed_status && status_at_done&.name != completed_status
309
+ return nil if completed_resolution && completed_resolution != resolution_at_done
310
+ return nil unless McpServer.matches_history?(issue, end_time,
311
+ history_field, history_value, ever_blocked, ever_stalled,
312
+ currently_blocked, currently_stalled)
313
+
314
+ cycle_time = started ? (completed_date - started.to_date).to_i + 1 : nil
315
+ {
316
+ key: issue.key,
317
+ summary: issue.summary,
318
+ type: issue.type,
319
+ completed_date: completed_date,
320
+ cycle_time_days: cycle_time,
321
+ flow_efficiency: McpServer.flow_efficiency_percent(issue, stopped),
322
+ status_at_done: status_at_done&.name,
323
+ resolution_at_done: resolution_at_done,
324
+ project: project_name
325
+ }
326
+ end
327
+
328
+ def self.call(server_context:, days_back: nil, project: nil, project_name: nil,
329
+ completed_status: nil, completed_resolution: nil,
330
+ history_field: nil, history_value: nil, ever_blocked: nil, ever_stalled: nil,
331
+ currently_blocked: nil, currently_stalled: nil, **)
332
+ project ||= project_name
333
+ rows = []
334
+ allowed_projects = McpServer.resolve_projects(server_context, project)
335
+
336
+ server_context[:projects].each do |project_name, project_data|
337
+ next if allowed_projects && !allowed_projects.include?(project_name)
338
+
339
+ today = project_data[:today]
340
+ cutoff = today - days_back if days_back
341
+
342
+ project_data[:issues].each do |issue|
343
+ started, stopped = issue.started_stopped_times
344
+ next unless stopped
345
+
346
+ row = build_row(issue, project_name, started, stopped, cutoff, completed_status, completed_resolution,
347
+ project_data[:end_time], history_field, history_value, ever_blocked, ever_stalled,
348
+ currently_blocked, currently_stalled)
349
+ rows << row if row
350
+ end
351
+ end
352
+
353
+ rows.sort_by! { |r| -r[:completed_date].to_time.to_i }
354
+
355
+ if rows.empty?
356
+ text = 'No completed work found.'
357
+ else
358
+ lines = rows.map do |r|
359
+ ct = r[:cycle_time_days] ? "#{r[:cycle_time_days]}d" : 'unknown'
360
+ fe = r[:flow_efficiency] ? " | FE: #{r[:flow_efficiency]}%" : ''
361
+ completion = [r[:status_at_done], r[:resolution_at_done]].compact.join(' / ')
362
+ "#{r[:key]} | #{r[:project]} | #{r[:type]} | #{r[:completed_date]} | " \
363
+ "Cycle time: #{ct}#{fe} | #{completion} | #{r[:summary]}"
364
+ end
365
+ text = lines.join("\n")
366
+ end
367
+
368
+ MCP::Tool::Response.new([{ type: 'text', text: text }])
369
+ end
370
+ end
371
+
372
+ class NotYetStartedTool < MCP::Tool
373
+ tool_name 'not_yet_started'
374
+ description 'Returns issues that have not yet been started (backlog items), sorted by creation date oldest first.'
375
+
376
+ input_schema(
377
+ type: 'object',
378
+ properties: {
379
+ project: {
380
+ type: 'string',
381
+ description: 'Only return issues from this project name. Omit to return all projects.'
382
+ },
383
+ current_status: {
384
+ type: 'string',
385
+ description: 'Only return issues currently in this status (e.g. "To Do", "Backlog").'
386
+ },
387
+ current_column: {
388
+ type: 'string',
389
+ description: 'Only return issues whose current status maps to this board column.'
390
+ },
391
+ **HISTORY_FILTER_SCHEMA
392
+ }
393
+ )
394
+
395
+ def self.call(server_context:, project: nil, project_name: nil, current_status: nil, current_column: nil,
396
+ history_field: nil, history_value: nil, ever_blocked: nil, ever_stalled: nil,
397
+ currently_blocked: nil, currently_stalled: nil, **)
398
+ project ||= project_name
399
+ rows = []
400
+ allowed_projects = McpServer.resolve_projects(server_context, project)
401
+
402
+ server_context[:projects].each do |project_name, project_data|
403
+ next if allowed_projects && !allowed_projects.include?(project_name)
404
+
405
+ project_data[:issues].each do |issue|
406
+ started, stopped = issue.started_stopped_times
407
+ next if started || stopped
408
+ next if current_status && issue.status.name != current_status
409
+ next if current_column && McpServer.column_name_for(issue.board, issue.status.id) != current_column
410
+ unless McpServer.matches_history?(issue, project_data[:end_time],
411
+ history_field, history_value, ever_blocked, ever_stalled,
412
+ currently_blocked, currently_stalled)
413
+ next
414
+ end
415
+
416
+ rows << {
417
+ key: issue.key,
418
+ summary: issue.summary,
419
+ status: issue.status.name,
420
+ type: issue.type,
421
+ created: issue.created.to_date,
422
+ project: project_name
423
+ }
424
+ end
425
+ end
426
+
427
+ rows.sort_by! { |r| r[:created] }
428
+
429
+ if rows.empty?
430
+ text = 'No unstarted work found.'
431
+ else
432
+ lines = rows.map do |r|
433
+ "#{r[:key]} | #{r[:project]} | #{r[:type]} | #{r[:status]} | Created: #{r[:created]} | #{r[:summary]}"
434
+ end
435
+ text = lines.join("\n")
436
+ end
437
+
438
+ MCP::Tool::Response.new([{ type: 'text', text: text }])
439
+ end
440
+ end
441
+
442
+ class StatusTimeAnalysisTool < MCP::Tool
443
+ tool_name 'status_time_analysis'
444
+ description 'Aggregates the time issues spend in each status or column, ranked by average days. ' \
445
+ 'Useful for identifying bottlenecks. Before calling this tool, always ask the user ' \
446
+ 'which issues they want to include: aging (in progress), completed, not yet started, ' \
447
+ 'or all. Do not assume — the answer changes the result significantly.'
448
+
449
+ input_schema(
450
+ type: 'object',
451
+ properties: {
452
+ project: {
453
+ type: 'string',
454
+ description: 'Only include issues from this project name. Omit to include all projects.'
455
+ },
456
+ issue_state: {
457
+ type: 'string',
458
+ enum: %w[all aging completed not_started],
459
+ description: 'Which issues to include: "aging" (in progress), "completed", ' \
460
+ '"not_started" (backlog), or "all" (default).'
461
+ },
462
+ group_by: {
463
+ type: 'string',
464
+ enum: %w[status column],
465
+ description: 'Whether to group results by status name (default) or board column.'
466
+ }
467
+ }
468
+ )
469
+
470
+ def self.select_issues issue, issue_state
471
+ started, stopped = issue.started_stopped_times
472
+ case issue_state
473
+ when 'aging' then started && !stopped
474
+ when 'completed' then !!stopped
475
+ when 'not_started' then !started && !stopped
476
+ else true
477
+ end
478
+ end
479
+
480
+ def self.call(server_context:, project: nil, project_name: nil, issue_state: 'all', group_by: 'status',
481
+ column: nil, **)
482
+ project ||= project_name
483
+ group_by = 'column' if column
484
+
485
+ totals = Hash.new { |h, k| h[k] = { total_seconds: 0.0, visit_count: 0 } }
486
+ allowed_projects = McpServer.resolve_projects(server_context, project)
487
+
488
+ server_context[:projects].each do |project_name, project_data|
489
+ next if allowed_projects && !allowed_projects.include?(project_name)
490
+
491
+ project_data[:issues].each do |issue|
492
+ next unless select_issues(issue, issue_state)
493
+
494
+ time_map = if group_by == 'column'
495
+ McpServer.time_per_column(issue, project_data[:end_time])
496
+ else
497
+ McpServer.time_per_status(issue, project_data[:end_time])
498
+ end
499
+
500
+ time_map.each do |name, seconds|
501
+ totals[name][:total_seconds] += seconds
502
+ totals[name][:visit_count] += 1
503
+ end
504
+ end
505
+ end
506
+
507
+ return MCP::Tool::Response.new([{ type: 'text', text: 'No data found.' }]) if totals.empty?
508
+
509
+ rows = totals.map do |name, data|
510
+ total_days = (data[:total_seconds] / 86_400.0).round(1)
511
+ avg_days = (data[:total_seconds] / data[:visit_count] / 86_400.0).round(1)
512
+ { name: name, total_days: total_days, avg_days: avg_days, visit_count: data[:visit_count] }
513
+ end
514
+ rows.sort_by! { |r| -r[:avg_days] }
515
+
516
+ label = group_by == 'column' ? 'Column' : 'Status'
517
+ lines = rows.map do |r|
518
+ "#{label}: #{r[:name]} | Avg: #{r[:avg_days]}d | Total: #{r[:total_days]}d | Issues: #{r[:visit_count]}"
519
+ end
520
+ MCP::Tool::Response.new([{ type: 'text', text: lines.join("\n") }])
521
+ end
522
+ end
523
+
524
+ # Alternative tool names used by AI agents other than Claude.
525
+ # Each entry maps an alias name to the canonical tool class it delegates to.
526
+ # The alias inherits the canonical tool's schema and call behaviour automatically.
527
+ # To add a new alias, append one line: 'alias_name' => CanonicalToolClass
528
+ ALIASES = {
529
+ 'board_list' => ListProjectsTool
530
+ }.freeze
531
+ end
@@ -98,6 +98,12 @@ class ProjectConfig
98
98
  !!@aggregate_config
99
99
  end
100
100
 
101
+ def aggregate_project_names
102
+ return [] unless aggregated_project?
103
+
104
+ @aggregate_config.included_projects.filter_map(&:name)
105
+ end
106
+
101
107
  def download &block
102
108
  raise 'Not allowed to have multiple download blocks in one project' if @download_config
103
109
  raise 'Not allowed to have both an aggregate and a download section. Pick only one.' if @aggregate_config
@@ -19,8 +19,8 @@ class PullRequestCycleTimeHistogram < TimeBasedHistogram
19
19
  HTML
20
20
 
21
21
  init_configuration_block(block) do
22
- grouping_rules do |pull_request, _rule|
23
- rules.label = pull_request.repo
22
+ grouping_rules do |pull_request, rule|
23
+ rule.label = pull_request.repo
24
24
  end
25
25
  end
26
26
  end
@@ -18,8 +18,8 @@ class PullRequestCycleTimeScatterplot < TimeBasedScatterplot
18
18
  HTML
19
19
 
20
20
  init_configuration_block(block) do
21
- grouping_rules do |pull_request, _rule|
22
- rules.label = pull_request.repo
21
+ grouping_rules do |pull_request, rule|
22
+ rule.label = pull_request.repo
23
23
  end
24
24
  end
25
25
  end
@@ -48,8 +48,15 @@ class PullRequestCycleTimeScatterplot < TimeBasedScatterplot
48
48
  end
49
49
 
50
50
  def y_value pull_request
51
- divisor = { minutes: 60, hours: 3600, days: 86_400 }[@cycletime_unit]
52
- ((pull_request.closed_at - pull_request.opened_at) / divisor).round
51
+ if @cycletime_unit == :days
52
+ tz = timezone_offset || '+00:00'
53
+ opened = pull_request.opened_at.getlocal(tz).to_date
54
+ closed = pull_request.closed_at.getlocal(tz).to_date
55
+ (closed - opened).to_i + 1
56
+ else
57
+ divisor = { minutes: 60, hours: 3600 }[@cycletime_unit]
58
+ ((pull_request.closed_at - pull_request.opened_at) / divisor).round
59
+ end
53
60
  end
54
61
 
55
62
  def label_cycletime value
@@ -62,7 +69,8 @@ class PullRequestCycleTimeScatterplot < TimeBasedScatterplot
62
69
 
63
70
  def title_value pull_request, rules: nil
64
71
  age_label = label_cycletime y_value(pull_request)
65
- "#{pull_request.title} | #{rules.label} | Age:#{age_label}#{lines_changed_text(pull_request)}"
72
+ keys = pull_request.issue_keys.join(', ')
73
+ "#{keys} | #{pull_request.title} | #{rules.label} | Age:#{age_label}#{lines_changed_text(pull_request)}"
66
74
  end
67
75
 
68
76
  def lines_changed_text pull_request
@@ -60,6 +60,11 @@ class Stitcher < HtmlGenerator
60
60
  if matches[:seam] == 'start'
61
61
  content = +''
62
62
  else
63
+ if content.nil? || content.strip.empty?
64
+ file_system.warning "Seam found with no content in #{filename.inspect}: " \
65
+ "id=#{matches[:id].strip.inspect}, class=#{matches[:clazz].strip.inspect}, " \
66
+ "title=#{matches[:title].strip.inspect}"
67
+ end
63
68
  @all_stitches << Stitcher::StitchContent.new(
64
69
  file: filename, title: matches[:title], type: matches[:type], content: content
65
70
  )
data/lib/jirametrics.rb CHANGED
@@ -52,6 +52,47 @@ class JiraMetrics < Thor
52
52
  Exporter.instance.info(key, name_filter: options[:name] || '*')
53
53
  end
54
54
 
55
+ option :config
56
+ option :name
57
+ desc 'mcp', 'Start in MCP (Model Context Protocol) server mode'
58
+ def mcp
59
+ # Redirect stdout to stderr for the entire startup phase so that any
60
+ # incidental output (from config files, gem loading, etc.) does not
61
+ # corrupt the JSON-RPC channel before the MCP transport takes over.
62
+ original_stdout = $stdout.dup
63
+ $stdout.reopen($stderr)
64
+
65
+ load_config options[:config]
66
+ require 'jirametrics/mcp_server'
67
+
68
+ Exporter.instance.file_system.log_only = true
69
+
70
+ projects = {}
71
+ aggregates = {}
72
+ Exporter.instance.each_project_config(name_filter: options[:name] || '*') do |project|
73
+ project.evaluate_next_level
74
+ project.run load_only: true
75
+ projects[project.name || 'default'] = {
76
+ issues: project.issues,
77
+ today: project.time_range.end.to_date,
78
+ end_time: project.time_range.end
79
+ }
80
+ rescue StandardError => e
81
+ if e.message.start_with? 'This is an aggregated project'
82
+ names = project.aggregate_project_names
83
+ aggregates[project.name] = names if names.any?
84
+ next
85
+ end
86
+ next if e.message.start_with? 'No data found'
87
+
88
+ raise
89
+ end
90
+
91
+ $stdout.reopen(original_stdout)
92
+ original_stdout.close
93
+ McpServer.new(projects: projects, aggregates: aggregates, timezone_offset: Exporter.instance.timezone_offset).run
94
+ end
95
+
55
96
  option :config
56
97
  desc 'stitch', 'Dump information about one issue'
57
98
  def stitch stitch_file = 'stitcher.erb'
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: jirametrics
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.26.1
4
+ version: '2.28'
5
5
  platform: ruby
6
6
  authors:
7
7
  - Mike Bowler
@@ -9,6 +9,34 @@ bindir: bin
9
9
  cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
+ - !ruby/object:Gem::Dependency
13
+ name: mutant-rspec
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: '0'
19
+ type: :development
20
+ prerelease: false
21
+ version_requirements: !ruby/object:Gem::Requirement
22
+ requirements:
23
+ - - ">="
24
+ - !ruby/object:Gem::Version
25
+ version: '0'
26
+ - !ruby/object:Gem::Dependency
27
+ name: mcp
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - ">="
31
+ - !ruby/object:Gem::Version
32
+ version: '0'
33
+ type: :runtime
34
+ prerelease: false
35
+ version_requirements: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ">="
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
12
40
  - !ruby/object:Gem::Dependency
13
41
  name: random-word
14
42
  requirement: !ruby/object:Gem::Requirement
@@ -55,10 +83,12 @@ description: Extract metrics from Jira and export to either a report or to CSV f
55
83
  email: mbowler@gargoylesoftware.com
56
84
  executables:
57
85
  - jirametrics
86
+ - jirametrics-mcp
58
87
  extensions: []
59
88
  extra_rdoc_files: []
60
89
  files:
61
90
  - bin/jirametrics
91
+ - bin/jirametrics-mcp
62
92
  - lib/jirametrics.rb
63
93
  - lib/jirametrics/aggregate_config.rb
64
94
  - lib/jirametrics/aging_work_bar_chart.rb
@@ -131,6 +161,7 @@ files:
131
161
  - lib/jirametrics/issue_link.rb
132
162
  - lib/jirametrics/issue_printer.rb
133
163
  - lib/jirametrics/jira_gateway.rb
164
+ - lib/jirametrics/mcp_server.rb
134
165
  - lib/jirametrics/project_config.rb
135
166
  - lib/jirametrics/pull_request.rb
136
167
  - lib/jirametrics/pull_request_cycle_time_histogram.rb