work-chronicler 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +432 -0
- package/bin/mcp.js +6 -0
- package/bin/work-chronicler.js +3 -0
- package/dist/cli/analyzer/classifier.d.ts +31 -0
- package/dist/cli/analyzer/classifier.d.ts.map +1 -0
- package/dist/cli/analyzer/classifier.js +171 -0
- package/dist/cli/analyzer/index.d.ts +5 -0
- package/dist/cli/analyzer/index.d.ts.map +1 -0
- package/dist/cli/analyzer/index.js +4 -0
- package/dist/cli/analyzer/projects.d.ts +10 -0
- package/dist/cli/analyzer/projects.d.ts.map +1 -0
- package/dist/cli/analyzer/projects.js +228 -0
- package/dist/cli/analyzer/stats.d.ts +30 -0
- package/dist/cli/analyzer/stats.d.ts.map +1 -0
- package/dist/cli/analyzer/stats.js +80 -0
- package/dist/cli/analyzer/timeline.d.ts +6 -0
- package/dist/cli/analyzer/timeline.d.ts.map +1 -0
- package/dist/cli/analyzer/timeline.js +224 -0
- package/dist/cli/commands/analyze.d.ts +3 -0
- package/dist/cli/commands/analyze.d.ts.map +1 -0
- package/dist/cli/commands/analyze.js +216 -0
- package/dist/cli/commands/fetch/all.d.ts +3 -0
- package/dist/cli/commands/fetch/all.d.ts.map +1 -0
- package/dist/cli/commands/fetch/all.js +91 -0
- package/dist/cli/commands/fetch/github.d.ts +3 -0
- package/dist/cli/commands/fetch/github.d.ts.map +1 -0
- package/dist/cli/commands/fetch/github.js +39 -0
- package/dist/cli/commands/fetch/jira.d.ts +3 -0
- package/dist/cli/commands/fetch/jira.d.ts.map +1 -0
- package/dist/cli/commands/fetch/jira.js +39 -0
- package/dist/cli/commands/filter.d.ts +3 -0
- package/dist/cli/commands/filter.d.ts.map +1 -0
- package/dist/cli/commands/filter.js +247 -0
- package/dist/cli/commands/init.d.ts +3 -0
- package/dist/cli/commands/init.d.ts.map +1 -0
- package/dist/cli/commands/init.js +47 -0
- package/dist/cli/commands/link.d.ts +3 -0
- package/dist/cli/commands/link.d.ts.map +1 -0
- package/dist/cli/commands/link.js +25 -0
- package/dist/cli/commands/mcp.d.ts +3 -0
- package/dist/cli/commands/mcp.d.ts.map +1 -0
- package/dist/cli/commands/mcp.js +43 -0
- package/dist/cli/commands/status.d.ts +3 -0
- package/dist/cli/commands/status.d.ts.map +1 -0
- package/dist/cli/commands/status.js +28 -0
- package/dist/cli/fetchers/github.d.ts +20 -0
- package/dist/cli/fetchers/github.d.ts.map +1 -0
- package/dist/cli/fetchers/github.js +345 -0
- package/dist/cli/fetchers/jira.d.ts +20 -0
- package/dist/cli/fetchers/jira.d.ts.map +1 -0
- package/dist/cli/fetchers/jira.js +268 -0
- package/dist/cli/index.d.ts +3 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +43 -0
- package/dist/cli/linker/index.d.ts +17 -0
- package/dist/cli/linker/index.d.ts.map +1 -0
- package/dist/cli/linker/index.js +129 -0
- package/dist/cli/prompts/index.d.ts +61 -0
- package/dist/cli/prompts/index.d.ts.map +1 -0
- package/dist/cli/prompts/index.js +258 -0
- package/dist/core/config/loader.d.ts +61 -0
- package/dist/core/config/loader.d.ts.map +1 -0
- package/dist/core/config/loader.js +146 -0
- package/dist/core/config/schema.d.ts +587 -0
- package/dist/core/config/schema.d.ts.map +1 -0
- package/dist/core/config/schema.js +95 -0
- package/dist/core/index.d.ts +5 -0
- package/dist/core/index.d.ts.map +1 -0
- package/dist/core/index.js +7 -0
- package/dist/core/storage/index.d.ts +4 -0
- package/dist/core/storage/index.d.ts.map +1 -0
- package/dist/core/storage/index.js +3 -0
- package/dist/core/storage/paths.d.ts +85 -0
- package/dist/core/storage/paths.d.ts.map +1 -0
- package/dist/core/storage/paths.js +110 -0
- package/dist/core/storage/reader.d.ts +69 -0
- package/dist/core/storage/reader.d.ts.map +1 -0
- package/dist/core/storage/reader.js +181 -0
- package/dist/core/storage/writer.d.ts +41 -0
- package/dist/core/storage/writer.d.ts.map +1 -0
- package/dist/core/storage/writer.js +50 -0
- package/dist/core/types/index.d.ts +5 -0
- package/dist/core/types/index.d.ts.map +1 -0
- package/dist/core/types/index.js +4 -0
- package/dist/core/types/pr.d.ts +75 -0
- package/dist/core/types/pr.d.ts.map +1 -0
- package/dist/core/types/pr.js +35 -0
- package/dist/core/types/project.d.ts +450 -0
- package/dist/core/types/project.d.ts.map +1 -0
- package/dist/core/types/project.js +75 -0
- package/dist/core/types/ticket.d.ts +51 -0
- package/dist/core/types/ticket.d.ts.map +1 -0
- package/dist/core/types/ticket.js +17 -0
- package/dist/core/types/timeline.d.ts +1177 -0
- package/dist/core/types/timeline.d.ts.map +1 -0
- package/dist/core/types/timeline.js +100 -0
- package/dist/mcp/index.d.ts +15 -0
- package/dist/mcp/index.d.ts.map +1 -0
- package/dist/mcp/index.js +26 -0
- package/dist/mcp/server.d.ts +22 -0
- package/dist/mcp/server.d.ts.map +1 -0
- package/dist/mcp/server.js +584 -0
- package/dist/mcp/tools/get-stats.d.ts +26 -0
- package/dist/mcp/tools/get-stats.d.ts.map +1 -0
- package/dist/mcp/tools/get-stats.js +64 -0
- package/dist/mcp/tools/search-prs.d.ts +18 -0
- package/dist/mcp/tools/search-prs.d.ts.map +1 -0
- package/dist/mcp/tools/search-prs.js +44 -0
- package/dist/mcp/tools/search-tickets.d.ts +19 -0
- package/dist/mcp/tools/search-tickets.d.ts.map +1 -0
- package/dist/mcp/tools/search-tickets.js +49 -0
- package/package.json +79 -0
|
@@ -0,0 +1,584 @@
|
|
|
1
|
+
import * as fs from 'node:fs/promises';
|
|
2
|
+
import { findConfigPath, getAnalysisFilePath, getEffectiveOutputDir, getOutputDirectory, loadConfig, readAllPRs, readAllTickets, } from '../core/index.js';
|
|
3
|
+
import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js';
|
|
4
|
+
import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
/**
|
|
7
|
+
* Validate and parse a date string. Returns the Date if valid, throws if invalid.
|
|
8
|
+
*/
|
|
9
|
+
function parseAndValidateDate(dateStr, fieldName) {
|
|
10
|
+
const date = new Date(dateStr);
|
|
11
|
+
if (Number.isNaN(date.getTime())) {
|
|
12
|
+
throw new Error(`Invalid ${fieldName} date format: "${dateStr}". Use ISO format (YYYY-MM-DD).`);
|
|
13
|
+
}
|
|
14
|
+
return date;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Create and configure the MCP server with all tools
|
|
18
|
+
*
|
|
19
|
+
* Config discovery order:
|
|
20
|
+
* 1. WORK_CHRONICLER_CONFIG environment variable (path to config file)
|
|
21
|
+
* 2. WORK_CHRONICLER_DIR environment variable (directory containing config)
|
|
22
|
+
* 3. Current working directory
|
|
23
|
+
* 4. ~/.config/work-chronicler/
|
|
24
|
+
*/
|
|
25
|
+
export async function createServer() {
|
|
26
|
+
// Support explicit config path via environment variable
|
|
27
|
+
const envConfigPath = process.env.WORK_CHRONICLER_CONFIG;
|
|
28
|
+
const envConfigDir = process.env.WORK_CHRONICLER_DIR;
|
|
29
|
+
let explicitPath;
|
|
30
|
+
if (envConfigPath) {
|
|
31
|
+
explicitPath = envConfigPath;
|
|
32
|
+
}
|
|
33
|
+
else if (envConfigDir) {
|
|
34
|
+
// If directory specified, look for config files in it
|
|
35
|
+
explicitPath = `${envConfigDir}/work-chronicler.yaml`;
|
|
36
|
+
}
|
|
37
|
+
const configPath = findConfigPath(explicitPath);
|
|
38
|
+
const config = await loadConfig(explicitPath);
|
|
39
|
+
const baseOutputDir = getOutputDirectory(config, configPath ?? undefined);
|
|
40
|
+
// Check if filtered data exists and use it by default
|
|
41
|
+
const { dir: outputDir, isFiltered } = getEffectiveOutputDir(baseOutputDir);
|
|
42
|
+
const server = new McpServer({
|
|
43
|
+
name: 'work-chronicler',
|
|
44
|
+
version: '0.1.0',
|
|
45
|
+
});
|
|
46
|
+
// Log which directory we're using (visible in debug logs)
|
|
47
|
+
if (isFiltered) {
|
|
48
|
+
console.error('[work-chronicler] Using filtered data from:', outputDir);
|
|
49
|
+
}
|
|
50
|
+
// Register all tools
|
|
51
|
+
registerSearchPRsTool(server, outputDir);
|
|
52
|
+
registerSearchTicketsTool(server, outputDir);
|
|
53
|
+
registerGetLinkedWorkTool(server, outputDir);
|
|
54
|
+
registerListReposTool(server, outputDir);
|
|
55
|
+
registerGetStatsTool(server, outputDir);
|
|
56
|
+
registerGetProjectsTool(server, outputDir);
|
|
57
|
+
registerGetTimelineTool(server, outputDir);
|
|
58
|
+
return {
|
|
59
|
+
config,
|
|
60
|
+
outputDir,
|
|
61
|
+
server,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Start the MCP server using stdio transport
|
|
66
|
+
*/
|
|
67
|
+
export async function startServer(ctx) {
|
|
68
|
+
const transport = new StdioServerTransport();
|
|
69
|
+
await ctx.server.connect(transport);
|
|
70
|
+
}
|
|
71
|
+
// =============================================================================
|
|
72
|
+
// Tool Implementations
|
|
73
|
+
// =============================================================================
|
|
74
|
+
function registerSearchPRsTool(server, outputDir) {
|
|
75
|
+
server.tool('search_prs', 'Search pull requests by date range, repository, keywords, or impact level', {
|
|
76
|
+
query: z.string().optional().describe('Search term for title and body'),
|
|
77
|
+
org: z.string().optional().describe('Filter by organization name'),
|
|
78
|
+
repo: z.string().optional().describe('Filter by repository name'),
|
|
79
|
+
impact: z
|
|
80
|
+
.enum(['flagship', 'major', 'standard', 'minor'])
|
|
81
|
+
.optional()
|
|
82
|
+
.describe('Filter by impact level'),
|
|
83
|
+
state: z
|
|
84
|
+
.enum(['merged', 'open', 'closed'])
|
|
85
|
+
.optional()
|
|
86
|
+
.describe('Filter by PR state'),
|
|
87
|
+
since: z
|
|
88
|
+
.string()
|
|
89
|
+
.optional()
|
|
90
|
+
.describe('Start date (ISO format, e.g., 2025-01-01)'),
|
|
91
|
+
until: z
|
|
92
|
+
.string()
|
|
93
|
+
.optional()
|
|
94
|
+
.describe('End date (ISO format, e.g., 2025-12-31)'),
|
|
95
|
+
limit: z.number().optional().describe('Maximum number of results'),
|
|
96
|
+
}, async (input) => {
|
|
97
|
+
const allPRs = await readAllPRs(outputDir);
|
|
98
|
+
let filtered = allPRs;
|
|
99
|
+
if (input.org) {
|
|
100
|
+
filtered = filtered.filter((pr) => pr.frontmatter.org === input.org);
|
|
101
|
+
}
|
|
102
|
+
if (input.repo) {
|
|
103
|
+
const repo = input.repo;
|
|
104
|
+
filtered = filtered.filter((pr) => pr.frontmatter.repository.includes(repo));
|
|
105
|
+
}
|
|
106
|
+
if (input.impact) {
|
|
107
|
+
filtered = filtered.filter((pr) => pr.frontmatter.impact === input.impact);
|
|
108
|
+
}
|
|
109
|
+
if (input.state) {
|
|
110
|
+
filtered = filtered.filter((pr) => pr.frontmatter.state === input.state);
|
|
111
|
+
}
|
|
112
|
+
if (input.since) {
|
|
113
|
+
const sinceDate = parseAndValidateDate(input.since, 'since');
|
|
114
|
+
filtered = filtered.filter((pr) => new Date(pr.frontmatter.createdAt) >= sinceDate);
|
|
115
|
+
}
|
|
116
|
+
if (input.until) {
|
|
117
|
+
const untilDate = parseAndValidateDate(input.until, 'until');
|
|
118
|
+
filtered = filtered.filter((pr) => new Date(pr.frontmatter.createdAt) <= untilDate);
|
|
119
|
+
}
|
|
120
|
+
if (input.query) {
|
|
121
|
+
const queryLower = input.query.toLowerCase();
|
|
122
|
+
filtered = filtered.filter((pr) => pr.frontmatter.title.toLowerCase().includes(queryLower) ||
|
|
123
|
+
pr.body.toLowerCase().includes(queryLower));
|
|
124
|
+
}
|
|
125
|
+
// Sort by date descending
|
|
126
|
+
filtered.sort((a, b) => new Date(b.frontmatter.createdAt).getTime() -
|
|
127
|
+
new Date(a.frontmatter.createdAt).getTime());
|
|
128
|
+
const total = filtered.length;
|
|
129
|
+
if (input.limit && input.limit > 0) {
|
|
130
|
+
filtered = filtered.slice(0, input.limit);
|
|
131
|
+
}
|
|
132
|
+
// Format results for LLM consumption
|
|
133
|
+
const results = filtered.map((pr) => ({
|
|
134
|
+
title: pr.frontmatter.title,
|
|
135
|
+
url: pr.frontmatter.url,
|
|
136
|
+
repository: pr.frontmatter.repository,
|
|
137
|
+
state: pr.frontmatter.state,
|
|
138
|
+
impact: pr.frontmatter.impact,
|
|
139
|
+
createdAt: pr.frontmatter.createdAt,
|
|
140
|
+
mergedAt: pr.frontmatter.mergedAt,
|
|
141
|
+
additions: pr.frontmatter.additions,
|
|
142
|
+
deletions: pr.frontmatter.deletions,
|
|
143
|
+
jiraTickets: pr.frontmatter.jiraTickets,
|
|
144
|
+
body: pr.body.slice(0, 500) + (pr.body.length > 500 ? '...' : ''),
|
|
145
|
+
}));
|
|
146
|
+
return {
|
|
147
|
+
content: [
|
|
148
|
+
{
|
|
149
|
+
type: 'text',
|
|
150
|
+
text: JSON.stringify({ results, total, returned: results.length }),
|
|
151
|
+
},
|
|
152
|
+
],
|
|
153
|
+
};
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
function registerSearchTicketsTool(server, outputDir) {
|
|
157
|
+
server.tool('search_tickets', 'Search JIRA tickets by project, status, or keywords', {
|
|
158
|
+
query: z
|
|
159
|
+
.string()
|
|
160
|
+
.optional()
|
|
161
|
+
.describe('Search term for key, summary, and body'),
|
|
162
|
+
org: z.string().optional().describe('Filter by organization name'),
|
|
163
|
+
project: z.string().optional().describe('Filter by JIRA project key'),
|
|
164
|
+
status: z.string().optional().describe('Filter by ticket status'),
|
|
165
|
+
since: z.string().optional().describe('Start date (ISO format)'),
|
|
166
|
+
until: z.string().optional().describe('End date (ISO format)'),
|
|
167
|
+
limit: z.number().optional().describe('Maximum number of results'),
|
|
168
|
+
}, async (input) => {
|
|
169
|
+
const allTickets = await readAllTickets(outputDir);
|
|
170
|
+
let filtered = allTickets;
|
|
171
|
+
if (input.org) {
|
|
172
|
+
filtered = filtered.filter((t) => t.frontmatter.org === input.org);
|
|
173
|
+
}
|
|
174
|
+
if (input.project) {
|
|
175
|
+
filtered = filtered.filter((t) => t.frontmatter.project === input.project);
|
|
176
|
+
}
|
|
177
|
+
if (input.status) {
|
|
178
|
+
const status = input.status.toLowerCase();
|
|
179
|
+
filtered = filtered.filter((t) => t.frontmatter.status.toLowerCase().includes(status));
|
|
180
|
+
}
|
|
181
|
+
if (input.since) {
|
|
182
|
+
const sinceDate = parseAndValidateDate(input.since, 'since');
|
|
183
|
+
filtered = filtered.filter((t) => new Date(t.frontmatter.createdAt) >= sinceDate);
|
|
184
|
+
}
|
|
185
|
+
if (input.until) {
|
|
186
|
+
const untilDate = parseAndValidateDate(input.until, 'until');
|
|
187
|
+
filtered = filtered.filter((t) => new Date(t.frontmatter.createdAt) <= untilDate);
|
|
188
|
+
}
|
|
189
|
+
if (input.query) {
|
|
190
|
+
const queryLower = input.query.toLowerCase();
|
|
191
|
+
filtered = filtered.filter((t) => t.frontmatter.summary.toLowerCase().includes(queryLower) ||
|
|
192
|
+
t.frontmatter.key.toLowerCase().includes(queryLower) ||
|
|
193
|
+
t.body.toLowerCase().includes(queryLower));
|
|
194
|
+
}
|
|
195
|
+
// Sort by date descending
|
|
196
|
+
filtered.sort((a, b) => new Date(b.frontmatter.createdAt).getTime() -
|
|
197
|
+
new Date(a.frontmatter.createdAt).getTime());
|
|
198
|
+
const total = filtered.length;
|
|
199
|
+
if (input.limit && input.limit > 0) {
|
|
200
|
+
filtered = filtered.slice(0, input.limit);
|
|
201
|
+
}
|
|
202
|
+
const results = filtered.map((t) => ({
|
|
203
|
+
key: t.frontmatter.key,
|
|
204
|
+
summary: t.frontmatter.summary,
|
|
205
|
+
url: t.frontmatter.url,
|
|
206
|
+
project: t.frontmatter.project,
|
|
207
|
+
status: t.frontmatter.status,
|
|
208
|
+
issueType: t.frontmatter.issueType,
|
|
209
|
+
createdAt: t.frontmatter.createdAt,
|
|
210
|
+
storyPoints: t.frontmatter.storyPoints,
|
|
211
|
+
linkedPRs: t.frontmatter.linkedPRs,
|
|
212
|
+
body: t.body.slice(0, 500) + (t.body.length > 500 ? '...' : ''),
|
|
213
|
+
}));
|
|
214
|
+
return {
|
|
215
|
+
content: [
|
|
216
|
+
{
|
|
217
|
+
type: 'text',
|
|
218
|
+
text: JSON.stringify({ results, total, returned: results.length }),
|
|
219
|
+
},
|
|
220
|
+
],
|
|
221
|
+
};
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
function registerGetLinkedWorkTool(server, outputDir) {
|
|
225
|
+
server.tool('get_linked_work', 'Get a PR with its linked JIRA tickets, or a ticket with its linked PRs', {
|
|
226
|
+
prUrl: z.string().optional().describe('GitHub PR URL to look up'),
|
|
227
|
+
ticketKey: z.string().optional().describe('JIRA ticket key to look up'),
|
|
228
|
+
}, async (input) => {
|
|
229
|
+
if (!input.prUrl && !input.ticketKey) {
|
|
230
|
+
return {
|
|
231
|
+
content: [
|
|
232
|
+
{
|
|
233
|
+
type: 'text',
|
|
234
|
+
text: JSON.stringify({
|
|
235
|
+
error: 'Either prUrl or ticketKey must be provided',
|
|
236
|
+
}),
|
|
237
|
+
},
|
|
238
|
+
],
|
|
239
|
+
};
|
|
240
|
+
}
|
|
241
|
+
const allPRs = await readAllPRs(outputDir);
|
|
242
|
+
const allTickets = await readAllTickets(outputDir);
|
|
243
|
+
if (input.prUrl) {
|
|
244
|
+
const prUrl = input.prUrl;
|
|
245
|
+
const pr = allPRs.find((p) => p.frontmatter.url === prUrl ||
|
|
246
|
+
p.frontmatter.url.endsWith(prUrl.replace(/.*\/pull\//, '')));
|
|
247
|
+
if (!pr) {
|
|
248
|
+
return {
|
|
249
|
+
content: [
|
|
250
|
+
{
|
|
251
|
+
type: 'text',
|
|
252
|
+
text: JSON.stringify({ error: 'PR not found' }),
|
|
253
|
+
},
|
|
254
|
+
],
|
|
255
|
+
};
|
|
256
|
+
}
|
|
257
|
+
const linkedTickets = allTickets.filter((t) => pr.frontmatter.jiraTickets?.includes(t.frontmatter.key));
|
|
258
|
+
return {
|
|
259
|
+
content: [
|
|
260
|
+
{
|
|
261
|
+
type: 'text',
|
|
262
|
+
text: JSON.stringify({
|
|
263
|
+
pr: {
|
|
264
|
+
...pr.frontmatter,
|
|
265
|
+
body: pr.body,
|
|
266
|
+
},
|
|
267
|
+
linkedTickets: linkedTickets.map((t) => ({
|
|
268
|
+
...t.frontmatter,
|
|
269
|
+
body: t.body,
|
|
270
|
+
})),
|
|
271
|
+
}),
|
|
272
|
+
},
|
|
273
|
+
],
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
if (input.ticketKey) {
|
|
277
|
+
const ticket = allTickets.find((t) => t.frontmatter.key.toLowerCase() === input.ticketKey?.toLowerCase());
|
|
278
|
+
if (!ticket) {
|
|
279
|
+
return {
|
|
280
|
+
content: [
|
|
281
|
+
{
|
|
282
|
+
type: 'text',
|
|
283
|
+
text: JSON.stringify({ error: 'Ticket not found' }),
|
|
284
|
+
},
|
|
285
|
+
],
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
const linkedPRs = allPRs.filter((p) => p.frontmatter.jiraTickets?.includes(ticket.frontmatter.key));
|
|
289
|
+
return {
|
|
290
|
+
content: [
|
|
291
|
+
{
|
|
292
|
+
type: 'text',
|
|
293
|
+
text: JSON.stringify({
|
|
294
|
+
ticket: {
|
|
295
|
+
...ticket.frontmatter,
|
|
296
|
+
body: ticket.body,
|
|
297
|
+
},
|
|
298
|
+
linkedPRs: linkedPRs.map((p) => ({
|
|
299
|
+
...p.frontmatter,
|
|
300
|
+
body: p.body,
|
|
301
|
+
})),
|
|
302
|
+
}),
|
|
303
|
+
},
|
|
304
|
+
],
|
|
305
|
+
};
|
|
306
|
+
}
|
|
307
|
+
return {
|
|
308
|
+
content: [{ type: 'text', text: JSON.stringify({}) }],
|
|
309
|
+
};
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
function registerListReposTool(server, outputDir) {
|
|
313
|
+
server.tool('list_repos', 'List all repositories with data and their statistics', {}, async () => {
|
|
314
|
+
const allPRs = await readAllPRs(outputDir);
|
|
315
|
+
const repoStats = {};
|
|
316
|
+
for (const pr of allPRs) {
|
|
317
|
+
const repoKey = pr.frontmatter.repository;
|
|
318
|
+
if (!repoStats[repoKey]) {
|
|
319
|
+
repoStats[repoKey] = {
|
|
320
|
+
org: pr.frontmatter.org,
|
|
321
|
+
repo: repoKey.split('/')[1] || repoKey,
|
|
322
|
+
prCount: 0,
|
|
323
|
+
additions: 0,
|
|
324
|
+
deletions: 0,
|
|
325
|
+
earliestPR: null,
|
|
326
|
+
latestPR: null,
|
|
327
|
+
impactBreakdown: {},
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
const stats = repoStats[repoKey];
|
|
331
|
+
stats.prCount++;
|
|
332
|
+
stats.additions += pr.frontmatter.additions;
|
|
333
|
+
stats.deletions += pr.frontmatter.deletions;
|
|
334
|
+
const createdAt = pr.frontmatter.createdAt;
|
|
335
|
+
if (!stats.earliestPR || createdAt < stats.earliestPR) {
|
|
336
|
+
stats.earliestPR = createdAt;
|
|
337
|
+
}
|
|
338
|
+
if (!stats.latestPR || createdAt > stats.latestPR) {
|
|
339
|
+
stats.latestPR = createdAt;
|
|
340
|
+
}
|
|
341
|
+
const impact = pr.frontmatter.impact || 'standard';
|
|
342
|
+
stats.impactBreakdown[impact] =
|
|
343
|
+
(stats.impactBreakdown[impact] || 0) + 1;
|
|
344
|
+
}
|
|
345
|
+
const repos = Object.values(repoStats).sort((a, b) => b.prCount - a.prCount);
|
|
346
|
+
return {
|
|
347
|
+
content: [
|
|
348
|
+
{
|
|
349
|
+
type: 'text',
|
|
350
|
+
text: JSON.stringify({ repos, totalRepos: repos.length }),
|
|
351
|
+
},
|
|
352
|
+
],
|
|
353
|
+
};
|
|
354
|
+
});
|
|
355
|
+
}
|
|
356
|
+
function registerGetStatsTool(server, outputDir) {
|
|
357
|
+
server.tool('get_stats', 'Get summary statistics about work history, optionally filtered by date range', {
|
|
358
|
+
since: z.string().optional().describe('Start date (ISO format)'),
|
|
359
|
+
until: z.string().optional().describe('End date (ISO format)'),
|
|
360
|
+
}, async (input) => {
|
|
361
|
+
// First try to load from stats.json if no date filter
|
|
362
|
+
if (!input.since && !input.until) {
|
|
363
|
+
try {
|
|
364
|
+
const statsPath = getAnalysisFilePath(outputDir, 'stats.json');
|
|
365
|
+
const statsContent = await fs.readFile(statsPath, 'utf-8');
|
|
366
|
+
return {
|
|
367
|
+
content: [
|
|
368
|
+
{
|
|
369
|
+
type: 'text',
|
|
370
|
+
text: statsContent,
|
|
371
|
+
},
|
|
372
|
+
],
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
catch {
|
|
376
|
+
// Fall through to compute stats
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
// Compute stats from data
|
|
380
|
+
let prs = await readAllPRs(outputDir);
|
|
381
|
+
let tickets = await readAllTickets(outputDir);
|
|
382
|
+
// Apply date filters
|
|
383
|
+
if (input.since) {
|
|
384
|
+
const sinceDate = parseAndValidateDate(input.since, 'since');
|
|
385
|
+
prs = prs.filter((pr) => new Date(pr.frontmatter.createdAt) >= sinceDate);
|
|
386
|
+
tickets = tickets.filter((t) => new Date(t.frontmatter.createdAt) >= sinceDate);
|
|
387
|
+
}
|
|
388
|
+
if (input.until) {
|
|
389
|
+
const untilDate = parseAndValidateDate(input.until, 'until');
|
|
390
|
+
prs = prs.filter((pr) => new Date(pr.frontmatter.createdAt) <= untilDate);
|
|
391
|
+
tickets = tickets.filter((t) => new Date(t.frontmatter.createdAt) <= untilDate);
|
|
392
|
+
}
|
|
393
|
+
// PR stats
|
|
394
|
+
const prsByOrg = {};
|
|
395
|
+
const prsByRepo = {};
|
|
396
|
+
const prsByState = {};
|
|
397
|
+
const prsByImpact = {};
|
|
398
|
+
let totalAdditions = 0;
|
|
399
|
+
let totalDeletions = 0;
|
|
400
|
+
for (const pr of prs) {
|
|
401
|
+
prsByOrg[pr.frontmatter.org] = (prsByOrg[pr.frontmatter.org] ?? 0) + 1;
|
|
402
|
+
prsByRepo[pr.frontmatter.repository] =
|
|
403
|
+
(prsByRepo[pr.frontmatter.repository] ?? 0) + 1;
|
|
404
|
+
prsByState[pr.frontmatter.state] =
|
|
405
|
+
(prsByState[pr.frontmatter.state] ?? 0) + 1;
|
|
406
|
+
const impact = pr.frontmatter.impact || 'standard';
|
|
407
|
+
prsByImpact[impact] = (prsByImpact[impact] ?? 0) + 1;
|
|
408
|
+
totalAdditions += pr.frontmatter.additions;
|
|
409
|
+
totalDeletions += pr.frontmatter.deletions;
|
|
410
|
+
}
|
|
411
|
+
// Ticket stats
|
|
412
|
+
const ticketsByOrg = {};
|
|
413
|
+
const ticketsByProject = {};
|
|
414
|
+
const ticketsByStatus = {};
|
|
415
|
+
let totalStoryPoints = 0;
|
|
416
|
+
for (const ticket of tickets) {
|
|
417
|
+
ticketsByOrg[ticket.frontmatter.org] =
|
|
418
|
+
(ticketsByOrg[ticket.frontmatter.org] ?? 0) + 1;
|
|
419
|
+
ticketsByProject[ticket.frontmatter.project] =
|
|
420
|
+
(ticketsByProject[ticket.frontmatter.project] ?? 0) + 1;
|
|
421
|
+
ticketsByStatus[ticket.frontmatter.status] =
|
|
422
|
+
(ticketsByStatus[ticket.frontmatter.status] ?? 0) + 1;
|
|
423
|
+
totalStoryPoints += ticket.frontmatter.storyPoints ?? 0;
|
|
424
|
+
}
|
|
425
|
+
// Date range
|
|
426
|
+
const allDates = [
|
|
427
|
+
...prs.map((pr) => pr.frontmatter.createdAt),
|
|
428
|
+
...tickets.map((t) => t.frontmatter.createdAt),
|
|
429
|
+
].filter(Boolean);
|
|
430
|
+
const sortedDates = allDates.sort();
|
|
431
|
+
// Linked work count
|
|
432
|
+
const prsWithTickets = prs.filter((pr) => pr.frontmatter.jiraTickets?.length).length;
|
|
433
|
+
const stats = {
|
|
434
|
+
prs: {
|
|
435
|
+
total: prs.length,
|
|
436
|
+
byOrg: prsByOrg,
|
|
437
|
+
byRepo: prsByRepo,
|
|
438
|
+
byState: prsByState,
|
|
439
|
+
byImpact: prsByImpact,
|
|
440
|
+
linkedToTickets: prsWithTickets,
|
|
441
|
+
totalAdditions,
|
|
442
|
+
totalDeletions,
|
|
443
|
+
},
|
|
444
|
+
tickets: {
|
|
445
|
+
total: tickets.length,
|
|
446
|
+
byOrg: ticketsByOrg,
|
|
447
|
+
byProject: ticketsByProject,
|
|
448
|
+
byStatus: ticketsByStatus,
|
|
449
|
+
totalStoryPoints,
|
|
450
|
+
},
|
|
451
|
+
dateRange: {
|
|
452
|
+
earliest: sortedDates[0] ?? null,
|
|
453
|
+
latest: sortedDates[sortedDates.length - 1] ?? null,
|
|
454
|
+
},
|
|
455
|
+
};
|
|
456
|
+
return {
|
|
457
|
+
content: [
|
|
458
|
+
{
|
|
459
|
+
type: 'text',
|
|
460
|
+
text: JSON.stringify(stats),
|
|
461
|
+
},
|
|
462
|
+
],
|
|
463
|
+
};
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
function registerGetProjectsTool(server, outputDir) {
|
|
467
|
+
server.tool('get_projects', 'Get detected project groupings that cluster related PRs and tickets', {
|
|
468
|
+
confidence: z
|
|
469
|
+
.enum(['high', 'medium', 'low'])
|
|
470
|
+
.optional()
|
|
471
|
+
.describe('Filter by confidence level'),
|
|
472
|
+
limit: z.number().optional().describe('Maximum number of projects'),
|
|
473
|
+
}, async (input) => {
|
|
474
|
+
try {
|
|
475
|
+
const projectsPath = getAnalysisFilePath(outputDir, 'projects.json');
|
|
476
|
+
const projectsContent = await fs.readFile(projectsPath, 'utf-8');
|
|
477
|
+
const projectsData = JSON.parse(projectsContent);
|
|
478
|
+
let projects = projectsData.projects || [];
|
|
479
|
+
if (input.confidence) {
|
|
480
|
+
projects = projects.filter((p) => p.confidence === input.confidence);
|
|
481
|
+
}
|
|
482
|
+
if (input.limit && input.limit > 0) {
|
|
483
|
+
projects = projects.slice(0, input.limit);
|
|
484
|
+
}
|
|
485
|
+
return {
|
|
486
|
+
content: [
|
|
487
|
+
{
|
|
488
|
+
type: 'text',
|
|
489
|
+
text: JSON.stringify({
|
|
490
|
+
projects,
|
|
491
|
+
total: projectsData.projects?.length || 0,
|
|
492
|
+
returned: projects.length,
|
|
493
|
+
summary: projectsData.summary,
|
|
494
|
+
}),
|
|
495
|
+
},
|
|
496
|
+
],
|
|
497
|
+
};
|
|
498
|
+
}
|
|
499
|
+
catch {
|
|
500
|
+
return {
|
|
501
|
+
content: [
|
|
502
|
+
{
|
|
503
|
+
type: 'text',
|
|
504
|
+
text: JSON.stringify({
|
|
505
|
+
error: 'projects.json not found. Run `work-chronicler analyze --projects` first.',
|
|
506
|
+
}),
|
|
507
|
+
},
|
|
508
|
+
],
|
|
509
|
+
};
|
|
510
|
+
}
|
|
511
|
+
});
|
|
512
|
+
}
|
|
513
|
+
function registerGetTimelineTool(server, outputDir) {
|
|
514
|
+
server.tool('get_timeline', 'Get chronological timeline of work grouped by week or month', {
|
|
515
|
+
granularity: z
|
|
516
|
+
.enum(['week', 'month'])
|
|
517
|
+
.optional()
|
|
518
|
+
.describe('Time grouping (default: week)'),
|
|
519
|
+
since: z.string().optional().describe('Start date (ISO format)'),
|
|
520
|
+
until: z.string().optional().describe('End date (ISO format)'),
|
|
521
|
+
limit: z.number().optional().describe('Maximum number of periods'),
|
|
522
|
+
}, async (input) => {
|
|
523
|
+
try {
|
|
524
|
+
const timelinePath = getAnalysisFilePath(outputDir, 'timeline.json');
|
|
525
|
+
const timelineContent = await fs.readFile(timelinePath, 'utf-8');
|
|
526
|
+
const timelineData = JSON.parse(timelineContent);
|
|
527
|
+
const granularity = input.granularity || 'week';
|
|
528
|
+
// Extract weeks from months if needed
|
|
529
|
+
let periods;
|
|
530
|
+
let totalCount;
|
|
531
|
+
if (granularity === 'week') {
|
|
532
|
+
// Flatten weeks from all months
|
|
533
|
+
const allWeeks = timelineData.months.flatMap((m) => m.weeks);
|
|
534
|
+
periods = allWeeks;
|
|
535
|
+
totalCount = timelineData.summary.totalWeeks;
|
|
536
|
+
}
|
|
537
|
+
else {
|
|
538
|
+
periods = timelineData.months;
|
|
539
|
+
totalCount = timelineData.summary.totalMonths;
|
|
540
|
+
}
|
|
541
|
+
// Filter by date range
|
|
542
|
+
if (input.since || input.until) {
|
|
543
|
+
periods = periods.filter((p) => {
|
|
544
|
+
// TimelineWeek has weekStart, TimelineMonth has month (YYYY-MM format)
|
|
545
|
+
const periodStart = 'weekStart' in p ? p.weekStart : `${p.month}-01`;
|
|
546
|
+
if (input.since && periodStart < input.since)
|
|
547
|
+
return false;
|
|
548
|
+
if (input.until && periodStart > input.until)
|
|
549
|
+
return false;
|
|
550
|
+
return true;
|
|
551
|
+
});
|
|
552
|
+
}
|
|
553
|
+
if (input.limit && input.limit > 0) {
|
|
554
|
+
periods = periods.slice(0, input.limit);
|
|
555
|
+
}
|
|
556
|
+
return {
|
|
557
|
+
content: [
|
|
558
|
+
{
|
|
559
|
+
type: 'text',
|
|
560
|
+
text: JSON.stringify({
|
|
561
|
+
granularity,
|
|
562
|
+
periods,
|
|
563
|
+
total: totalCount,
|
|
564
|
+
returned: periods.length,
|
|
565
|
+
summary: timelineData.summary,
|
|
566
|
+
}),
|
|
567
|
+
},
|
|
568
|
+
],
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
catch {
|
|
572
|
+
return {
|
|
573
|
+
content: [
|
|
574
|
+
{
|
|
575
|
+
type: 'text',
|
|
576
|
+
text: JSON.stringify({
|
|
577
|
+
error: 'timeline.json not found. Run `work-chronicler analyze --timeline` first.',
|
|
578
|
+
}),
|
|
579
|
+
},
|
|
580
|
+
],
|
|
581
|
+
};
|
|
582
|
+
}
|
|
583
|
+
});
|
|
584
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export interface WorkStats {
|
|
2
|
+
prs: {
|
|
3
|
+
total: number;
|
|
4
|
+
byOrg: Record<string, number>;
|
|
5
|
+
byRepo: Record<string, number>;
|
|
6
|
+
byState: Record<string, number>;
|
|
7
|
+
totalAdditions: number;
|
|
8
|
+
totalDeletions: number;
|
|
9
|
+
};
|
|
10
|
+
tickets: {
|
|
11
|
+
total: number;
|
|
12
|
+
byOrg: Record<string, number>;
|
|
13
|
+
byProject: Record<string, number>;
|
|
14
|
+
byStatus: Record<string, number>;
|
|
15
|
+
totalStoryPoints: number;
|
|
16
|
+
};
|
|
17
|
+
dateRange: {
|
|
18
|
+
earliest: string | null;
|
|
19
|
+
latest: string | null;
|
|
20
|
+
};
|
|
21
|
+
}
|
|
22
|
+
/**
|
|
23
|
+
* Get summary statistics about work history
|
|
24
|
+
*/
|
|
25
|
+
export declare function getStats(outputDir: string): Promise<WorkStats>;
|
|
26
|
+
//# sourceMappingURL=get-stats.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"get-stats.d.ts","sourceRoot":"","sources":["../../../src/mcp/tools/get-stats.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,SAAS;IACxB,GAAG,EAAE;QACH,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC/B,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAChC,cAAc,EAAE,MAAM,CAAC;QACvB,cAAc,EAAE,MAAM,CAAC;KACxB,CAAC;IACF,OAAO,EAAE;QACP,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAC9B,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QAClC,QAAQ,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACjC,gBAAgB,EAAE,MAAM,CAAC;KAC1B,CAAC;IACF,SAAS,EAAE;QACT,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;QACxB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;KACvB,CAAC;CACH;AAED;;GAEG;AACH,wBAAsB,QAAQ,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,SAAS,CAAC,CAkEpE"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { readAllPRs, readAllTickets } from '../../core/index.js';
|
|
2
|
+
/**
|
|
3
|
+
* Get summary statistics about work history
|
|
4
|
+
*/
|
|
5
|
+
export async function getStats(outputDir) {
|
|
6
|
+
const prs = await readAllPRs(outputDir);
|
|
7
|
+
const tickets = await readAllTickets(outputDir);
|
|
8
|
+
// PR stats
|
|
9
|
+
const prsByOrg = {};
|
|
10
|
+
const prsByRepo = {};
|
|
11
|
+
const prsByState = {};
|
|
12
|
+
let totalAdditions = 0;
|
|
13
|
+
let totalDeletions = 0;
|
|
14
|
+
for (const pr of prs) {
|
|
15
|
+
prsByOrg[pr.frontmatter.org] = (prsByOrg[pr.frontmatter.org] ?? 0) + 1;
|
|
16
|
+
prsByRepo[pr.frontmatter.repository] =
|
|
17
|
+
(prsByRepo[pr.frontmatter.repository] ?? 0) + 1;
|
|
18
|
+
prsByState[pr.frontmatter.state] =
|
|
19
|
+
(prsByState[pr.frontmatter.state] ?? 0) + 1;
|
|
20
|
+
totalAdditions += pr.frontmatter.additions;
|
|
21
|
+
totalDeletions += pr.frontmatter.deletions;
|
|
22
|
+
}
|
|
23
|
+
// Ticket stats
|
|
24
|
+
const ticketsByOrg = {};
|
|
25
|
+
const ticketsByProject = {};
|
|
26
|
+
const ticketsByStatus = {};
|
|
27
|
+
let totalStoryPoints = 0;
|
|
28
|
+
for (const ticket of tickets) {
|
|
29
|
+
ticketsByOrg[ticket.frontmatter.org] =
|
|
30
|
+
(ticketsByOrg[ticket.frontmatter.org] ?? 0) + 1;
|
|
31
|
+
ticketsByProject[ticket.frontmatter.project] =
|
|
32
|
+
(ticketsByProject[ticket.frontmatter.project] ?? 0) + 1;
|
|
33
|
+
ticketsByStatus[ticket.frontmatter.status] =
|
|
34
|
+
(ticketsByStatus[ticket.frontmatter.status] ?? 0) + 1;
|
|
35
|
+
totalStoryPoints += ticket.frontmatter.storyPoints ?? 0;
|
|
36
|
+
}
|
|
37
|
+
// Date range
|
|
38
|
+
const allDates = [
|
|
39
|
+
...prs.map((pr) => pr.frontmatter.createdAt),
|
|
40
|
+
...tickets.map((t) => t.frontmatter.createdAt),
|
|
41
|
+
].filter(Boolean);
|
|
42
|
+
const sortedDates = allDates.sort();
|
|
43
|
+
return {
|
|
44
|
+
prs: {
|
|
45
|
+
total: prs.length,
|
|
46
|
+
byOrg: prsByOrg,
|
|
47
|
+
byRepo: prsByRepo,
|
|
48
|
+
byState: prsByState,
|
|
49
|
+
totalAdditions,
|
|
50
|
+
totalDeletions,
|
|
51
|
+
},
|
|
52
|
+
tickets: {
|
|
53
|
+
total: tickets.length,
|
|
54
|
+
byOrg: ticketsByOrg,
|
|
55
|
+
byProject: ticketsByProject,
|
|
56
|
+
byStatus: ticketsByStatus,
|
|
57
|
+
totalStoryPoints,
|
|
58
|
+
},
|
|
59
|
+
dateRange: {
|
|
60
|
+
earliest: sortedDates[0] ?? null,
|
|
61
|
+
latest: sortedDates[sortedDates.length - 1] ?? null,
|
|
62
|
+
},
|
|
63
|
+
};
|
|
64
|
+
}
|