@jussmor/commit-memory-mcp 0.3.5 → 0.3.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,13 @@
1
1
  import Database from "better-sqlite3";
2
- import type { CommitChunk } from "../types.js";
2
+ import type { CommitChunk, PullRequestCommentRecord, PullRequestDecisionRecord, PullRequestRecord, PullRequestReviewRecord, WorktreeSessionRecord } from "../types.js";
3
3
  export type RagDatabase = Database.Database;
4
4
  export declare function openDatabase(dbPath: string): RagDatabase;
5
5
  export declare function hasChunk(db: RagDatabase, chunkId: string): boolean;
6
6
  export declare function upsertChunk(db: RagDatabase, chunk: CommitChunk, embedding: number[]): void;
7
7
  export declare function touchIndexState(db: RagDatabase): void;
8
+ export declare function upsertPullRequest(db: RagDatabase, pr: PullRequestRecord): void;
9
+ export declare function replacePullRequestComments(db: RagDatabase, repoOwner: string, repoName: string, prNumber: number, comments: PullRequestCommentRecord[]): void;
10
+ export declare function replacePullRequestReviews(db: RagDatabase, repoOwner: string, repoName: string, prNumber: number, reviews: PullRequestReviewRecord[]): void;
11
+ export declare function replacePullRequestDecisions(db: RagDatabase, repoOwner: string, repoName: string, prNumber: number, decisions: PullRequestDecisionRecord[]): void;
12
+ export declare function touchPullRequestSyncState(db: RagDatabase, repoOwner: string, repoName: string): void;
13
+ export declare function upsertWorktreeSession(db: RagDatabase, session: WorktreeSessionRecord): void;
package/dist/db/client.js CHANGED
@@ -39,6 +39,76 @@ export function openDatabase(dbPath) {
39
39
  id INTEGER PRIMARY KEY CHECK (id = 1),
40
40
  last_indexed_at TEXT NOT NULL
41
41
  );
42
+
43
+ CREATE TABLE IF NOT EXISTS prs (
44
+ repo_owner TEXT NOT NULL,
45
+ repo_name TEXT NOT NULL,
46
+ pr_number INTEGER NOT NULL,
47
+ title TEXT NOT NULL,
48
+ body TEXT NOT NULL,
49
+ author TEXT NOT NULL,
50
+ state TEXT NOT NULL,
51
+ created_at TEXT NOT NULL,
52
+ updated_at TEXT NOT NULL,
53
+ merged_at TEXT,
54
+ url TEXT NOT NULL,
55
+ PRIMARY KEY (repo_owner, repo_name, pr_number)
56
+ );
57
+
58
+ CREATE TABLE IF NOT EXISTS pr_comments (
59
+ id TEXT PRIMARY KEY,
60
+ repo_owner TEXT NOT NULL,
61
+ repo_name TEXT NOT NULL,
62
+ pr_number INTEGER NOT NULL,
63
+ author TEXT NOT NULL,
64
+ body TEXT NOT NULL,
65
+ created_at TEXT NOT NULL,
66
+ updated_at TEXT NOT NULL,
67
+ url TEXT NOT NULL,
68
+ FOREIGN KEY (repo_owner, repo_name, pr_number)
69
+ REFERENCES prs(repo_owner, repo_name, pr_number)
70
+ );
71
+
72
+ CREATE TABLE IF NOT EXISTS pr_reviews (
73
+ id TEXT PRIMARY KEY,
74
+ repo_owner TEXT NOT NULL,
75
+ repo_name TEXT NOT NULL,
76
+ pr_number INTEGER NOT NULL,
77
+ author TEXT NOT NULL,
78
+ state TEXT NOT NULL,
79
+ body TEXT NOT NULL,
80
+ submitted_at TEXT NOT NULL,
81
+ FOREIGN KEY (repo_owner, repo_name, pr_number)
82
+ REFERENCES prs(repo_owner, repo_name, pr_number)
83
+ );
84
+
85
+ CREATE TABLE IF NOT EXISTS pr_decisions (
86
+ id TEXT PRIMARY KEY,
87
+ repo_owner TEXT NOT NULL,
88
+ repo_name TEXT NOT NULL,
89
+ pr_number INTEGER NOT NULL,
90
+ source TEXT NOT NULL,
91
+ author TEXT NOT NULL,
92
+ summary TEXT NOT NULL,
93
+ severity TEXT NOT NULL,
94
+ created_at TEXT NOT NULL,
95
+ FOREIGN KEY (repo_owner, repo_name, pr_number)
96
+ REFERENCES prs(repo_owner, repo_name, pr_number)
97
+ );
98
+
99
+ CREATE TABLE IF NOT EXISTS pr_sync_state (
100
+ repo_owner TEXT NOT NULL,
101
+ repo_name TEXT NOT NULL,
102
+ last_synced_at TEXT NOT NULL,
103
+ PRIMARY KEY (repo_owner, repo_name)
104
+ );
105
+
106
+ CREATE TABLE IF NOT EXISTS worktree_sessions (
107
+ path TEXT PRIMARY KEY,
108
+ branch TEXT NOT NULL,
109
+ base_branch TEXT NOT NULL,
110
+ last_synced_at TEXT NOT NULL
111
+ );
42
112
  `);
43
113
  return db;
44
114
  }
@@ -90,3 +160,113 @@ export function touchIndexState(db) {
90
160
  last_indexed_at = excluded.last_indexed_at
91
161
  `).run(now);
92
162
  }
163
+ export function upsertPullRequest(db, pr) {
164
+ db.prepare(`
165
+ INSERT INTO prs (
166
+ repo_owner,
167
+ repo_name,
168
+ pr_number,
169
+ title,
170
+ body,
171
+ author,
172
+ state,
173
+ created_at,
174
+ updated_at,
175
+ merged_at,
176
+ url
177
+ )
178
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
179
+ ON CONFLICT(repo_owner, repo_name, pr_number) DO UPDATE SET
180
+ title = excluded.title,
181
+ body = excluded.body,
182
+ author = excluded.author,
183
+ state = excluded.state,
184
+ created_at = excluded.created_at,
185
+ updated_at = excluded.updated_at,
186
+ merged_at = excluded.merged_at,
187
+ url = excluded.url
188
+ `).run(pr.repoOwner, pr.repoName, pr.number, pr.title, pr.body, pr.author, pr.state, pr.createdAt, pr.updatedAt, pr.mergedAt, pr.url);
189
+ }
190
+ export function replacePullRequestComments(db, repoOwner, repoName, prNumber, comments) {
191
+ db.prepare(`
192
+ DELETE FROM pr_comments
193
+ WHERE repo_owner = ? AND repo_name = ? AND pr_number = ?
194
+ `).run(repoOwner, repoName, prNumber);
195
+ const insert = db.prepare(`
196
+ INSERT INTO pr_comments (
197
+ id,
198
+ repo_owner,
199
+ repo_name,
200
+ pr_number,
201
+ author,
202
+ body,
203
+ created_at,
204
+ updated_at,
205
+ url
206
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
207
+ `);
208
+ for (const comment of comments) {
209
+ insert.run(comment.id, repoOwner, repoName, prNumber, comment.author, comment.body, comment.createdAt, comment.updatedAt, comment.url);
210
+ }
211
+ }
212
+ export function replacePullRequestReviews(db, repoOwner, repoName, prNumber, reviews) {
213
+ db.prepare(`
214
+ DELETE FROM pr_reviews
215
+ WHERE repo_owner = ? AND repo_name = ? AND pr_number = ?
216
+ `).run(repoOwner, repoName, prNumber);
217
+ const insert = db.prepare(`
218
+ INSERT INTO pr_reviews (
219
+ id,
220
+ repo_owner,
221
+ repo_name,
222
+ pr_number,
223
+ author,
224
+ state,
225
+ body,
226
+ submitted_at
227
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
228
+ `);
229
+ for (const review of reviews) {
230
+ insert.run(review.id, repoOwner, repoName, prNumber, review.author, review.state, review.body, review.submittedAt);
231
+ }
232
+ }
233
+ export function replacePullRequestDecisions(db, repoOwner, repoName, prNumber, decisions) {
234
+ db.prepare(`
235
+ DELETE FROM pr_decisions
236
+ WHERE repo_owner = ? AND repo_name = ? AND pr_number = ?
237
+ `).run(repoOwner, repoName, prNumber);
238
+ const insert = db.prepare(`
239
+ INSERT INTO pr_decisions (
240
+ id,
241
+ repo_owner,
242
+ repo_name,
243
+ pr_number,
244
+ source,
245
+ author,
246
+ summary,
247
+ severity,
248
+ created_at
249
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
250
+ `);
251
+ for (const decision of decisions) {
252
+ insert.run(decision.id, repoOwner, repoName, prNumber, decision.source, decision.author, decision.summary, decision.severity, decision.createdAt);
253
+ }
254
+ }
255
+ export function touchPullRequestSyncState(db, repoOwner, repoName) {
256
+ db.prepare(`
257
+ INSERT INTO pr_sync_state (repo_owner, repo_name, last_synced_at)
258
+ VALUES (?, ?, ?)
259
+ ON CONFLICT(repo_owner, repo_name) DO UPDATE SET
260
+ last_synced_at = excluded.last_synced_at
261
+ `).run(repoOwner, repoName, new Date().toISOString());
262
+ }
263
+ export function upsertWorktreeSession(db, session) {
264
+ db.prepare(`
265
+ INSERT INTO worktree_sessions (path, branch, base_branch, last_synced_at)
266
+ VALUES (?, ?, ?, ?)
267
+ ON CONFLICT(path) DO UPDATE SET
268
+ branch = excluded.branch,
269
+ base_branch = excluded.base_branch,
270
+ last_synced_at = excluded.last_synced_at
271
+ `).run(session.path, session.branch, session.baseBranch, session.lastSyncedAt);
272
+ }
@@ -0,0 +1,51 @@
1
+ export declare function whoChangedFile(options: {
2
+ repoPath: string;
3
+ filePath: string;
4
+ limit: number;
5
+ }): {
6
+ filePath: string;
7
+ commits: Array<{
8
+ sha: string;
9
+ author: string;
10
+ date: string;
11
+ subject: string;
12
+ }>;
13
+ authors: Array<{
14
+ author: string;
15
+ commitCount: number;
16
+ lastCommitAt: string;
17
+ }>;
18
+ };
19
+ export declare function latestCommitForFile(repoPath: string, filePath: string): string | null;
20
+ export declare function commitDetails(repoPath: string, sha: string): {
21
+ sha: string;
22
+ author: string;
23
+ date: string;
24
+ subject: string;
25
+ body: string;
26
+ };
27
+ export declare function mainBranchOvernightBrief(options: {
28
+ repoPath: string;
29
+ baseBranch: string;
30
+ sinceHours: number;
31
+ limit: number;
32
+ }): {
33
+ baseBranch: string;
34
+ sinceHours: number;
35
+ commits: Array<{
36
+ sha: string;
37
+ author: string;
38
+ date: string;
39
+ subject: string;
40
+ }>;
41
+ };
42
+ export declare function resumeFeatureSessionBrief(options: {
43
+ worktreePath: string;
44
+ baseBranch: string;
45
+ }): {
46
+ worktreePath: string;
47
+ branch: string;
48
+ ahead: number;
49
+ behind: number;
50
+ overlapFiles: string[];
51
+ };
@@ -0,0 +1,146 @@
1
+ import { execFileSync } from "node:child_process";
2
+ import path from "node:path";
3
+ function runGit(repoPath, args) {
4
+ return execFileSync("git", ["-C", repoPath, ...args], {
5
+ encoding: "utf8",
6
+ stdio: ["ignore", "pipe", "pipe"],
7
+ });
8
+ }
9
+ export function whoChangedFile(options) {
10
+ const repoPath = path.resolve(options.repoPath);
11
+ const output = runGit(repoPath, [
12
+ "log",
13
+ `-n${options.limit}`,
14
+ "--format=%H%x1f%an%x1f%aI%x1f%s",
15
+ "--",
16
+ options.filePath,
17
+ ]).trim();
18
+ if (!output) {
19
+ return { filePath: options.filePath, commits: [], authors: [] };
20
+ }
21
+ const commits = output.split("\n").map((line) => {
22
+ const [sha = "", author = "", date = "", subject = ""] = line.split("\x1f");
23
+ return { sha, author, date, subject };
24
+ });
25
+ const authorMap = new Map();
26
+ for (const commit of commits) {
27
+ const existing = authorMap.get(commit.author);
28
+ if (!existing) {
29
+ authorMap.set(commit.author, {
30
+ commitCount: 1,
31
+ lastCommitAt: commit.date,
32
+ });
33
+ continue;
34
+ }
35
+ existing.commitCount += 1;
36
+ if (commit.date > existing.lastCommitAt) {
37
+ existing.lastCommitAt = commit.date;
38
+ }
39
+ }
40
+ const authors = Array.from(authorMap.entries())
41
+ .map(([author, value]) => ({ author, ...value }))
42
+ .sort((a, b) => b.commitCount - a.commitCount);
43
+ return {
44
+ filePath: options.filePath,
45
+ commits,
46
+ authors,
47
+ };
48
+ }
49
+ export function latestCommitForFile(repoPath, filePath) {
50
+ const output = runGit(path.resolve(repoPath), [
51
+ "log",
52
+ "-n1",
53
+ "--format=%H",
54
+ "--",
55
+ filePath,
56
+ ]).trim();
57
+ return output || null;
58
+ }
59
+ export function commitDetails(repoPath, sha) {
60
+ const output = runGit(path.resolve(repoPath), [
61
+ "show",
62
+ "--no-color",
63
+ "--format=%H%x1f%an%x1f%aI%x1f%s%x1f%b",
64
+ "--no-patch",
65
+ sha,
66
+ ]).trimEnd();
67
+ const [commitSha = sha, author = "", date = "", subject = "", body = ""] = output.split("\x1f");
68
+ return { sha: commitSha, author, date, subject, body };
69
+ }
70
+ export function mainBranchOvernightBrief(options) {
71
+ const repoPath = path.resolve(options.repoPath);
72
+ const since = `${Math.max(1, options.sinceHours)} hours ago`;
73
+ const output = runGit(repoPath, [
74
+ "log",
75
+ `-n${Math.max(1, options.limit)}`,
76
+ `--since=${since}`,
77
+ "--format=%H%x1f%an%x1f%aI%x1f%s",
78
+ `origin/${options.baseBranch}`,
79
+ ]).trim();
80
+ if (!output) {
81
+ return {
82
+ baseBranch: options.baseBranch,
83
+ sinceHours: options.sinceHours,
84
+ commits: [],
85
+ };
86
+ }
87
+ const commits = output.split("\n").map((line) => {
88
+ const [sha = "", author = "", date = "", subject = ""] = line.split("\x1f");
89
+ return { sha, author, date, subject };
90
+ });
91
+ return {
92
+ baseBranch: options.baseBranch,
93
+ sinceHours: options.sinceHours,
94
+ commits,
95
+ };
96
+ }
97
+ export function resumeFeatureSessionBrief(options) {
98
+ const worktreePath = path.resolve(options.worktreePath);
99
+ const branch = runGit(worktreePath, [
100
+ "rev-parse",
101
+ "--abbrev-ref",
102
+ "HEAD",
103
+ ]).trim();
104
+ const countRaw = runGit(worktreePath, [
105
+ "rev-list",
106
+ "--left-right",
107
+ "--count",
108
+ `origin/${options.baseBranch}...HEAD`,
109
+ ]).trim();
110
+ const [behindRaw = "0", aheadRaw = "0"] = countRaw.split(/\s+/);
111
+ const behind = Number.parseInt(behindRaw, 10) || 0;
112
+ const ahead = Number.parseInt(aheadRaw, 10) || 0;
113
+ const mergeBase = runGit(worktreePath, [
114
+ "merge-base",
115
+ "HEAD",
116
+ `origin/${options.baseBranch}`,
117
+ ]).trim();
118
+ const baseFilesRaw = runGit(worktreePath, [
119
+ "diff",
120
+ "--name-only",
121
+ `${mergeBase}..origin/${options.baseBranch}`,
122
+ ]).trim();
123
+ const featureFilesRaw = runGit(worktreePath, [
124
+ "diff",
125
+ "--name-only",
126
+ `${mergeBase}..HEAD`,
127
+ ]).trim();
128
+ const baseFiles = new Set(baseFilesRaw
129
+ .split("\n")
130
+ .map((line) => line.trim())
131
+ .filter(Boolean));
132
+ const featureFiles = new Set(featureFilesRaw
133
+ .split("\n")
134
+ .map((line) => line.trim())
135
+ .filter(Boolean));
136
+ const overlapFiles = Array.from(featureFiles)
137
+ .filter((file) => baseFiles.has(file))
138
+ .slice(0, 40);
139
+ return {
140
+ worktreePath,
141
+ branch,
142
+ ahead,
143
+ behind,
144
+ overlapFiles,
145
+ };
146
+ }
@@ -0,0 +1,3 @@
1
+ import type { WorktreeRecord } from "../types.js";
2
+ export declare function listActiveWorktrees(repoPath: string): WorktreeRecord[];
3
+ export declare function currentBranch(repoPath: string): string;
@@ -0,0 +1,52 @@
1
+ import { execFileSync } from "node:child_process";
2
+ import path from "node:path";
3
+ function runGit(repoPath, args) {
4
+ return execFileSync("git", ["-C", repoPath, ...args], {
5
+ encoding: "utf8",
6
+ stdio: ["ignore", "pipe", "pipe"],
7
+ });
8
+ }
9
+ export function listActiveWorktrees(repoPath) {
10
+ const root = path.resolve(repoPath);
11
+ const output = runGit(root, ["worktree", "list", "--porcelain"]);
12
+ const records = [];
13
+ const blocks = output
14
+ .split("\n\n")
15
+ .map((block) => block.trim())
16
+ .filter(Boolean);
17
+ for (const block of blocks) {
18
+ const lines = block.split("\n");
19
+ let recordPath = "";
20
+ let headSha = "";
21
+ let branch = "detached";
22
+ for (const line of lines) {
23
+ if (line.startsWith("worktree ")) {
24
+ recordPath = line.slice("worktree ".length).trim();
25
+ }
26
+ if (line.startsWith("HEAD ")) {
27
+ headSha = line.slice("HEAD ".length).trim();
28
+ }
29
+ if (line.startsWith("branch ")) {
30
+ const fullRef = line.slice("branch ".length).trim();
31
+ branch = fullRef.replace("refs/heads/", "");
32
+ }
33
+ }
34
+ if (!recordPath) {
35
+ continue;
36
+ }
37
+ records.push({
38
+ path: recordPath,
39
+ branch,
40
+ headSha,
41
+ isCurrent: path.resolve(recordPath) === root,
42
+ });
43
+ }
44
+ return records;
45
+ }
46
+ export function currentBranch(repoPath) {
47
+ return runGit(path.resolve(repoPath), [
48
+ "rev-parse",
49
+ "--abbrev-ref",
50
+ "HEAD",
51
+ ]).trim();
52
+ }
@@ -6,14 +6,59 @@ import { execFileSync } from "node:child_process";
6
6
  import fs from "node:fs";
7
7
  import path from "node:path";
8
8
  import { pathToFileURL } from "node:url";
9
- import { openDatabase } from "../db/client.js";
10
- import { indexRepository } from "../indexer.js";
11
- import { explainCommitMatch, searchRelatedCommits } from "../search/query.js";
12
- function runGitDiff(repoPath, sha) {
13
- return execFileSync("git", ["-C", repoPath, "show", "--no-color", "--stat", "--patch", sha], {
14
- encoding: "utf8",
9
+ import { openDatabase, upsertWorktreeSession } from "../db/client.js";
10
+ import { commitDetails, latestCommitForFile, mainBranchOvernightBrief, resumeFeatureSessionBrief, whoChangedFile, } from "../git/insights.js";
11
+ import { listActiveWorktrees } from "../git/worktree.js";
12
+ import { syncPullRequestContext } from "../pr/sync.js";
13
+ function fetchRemote(repoPath) {
14
+ execFileSync("git", ["-C", repoPath, "fetch", "--all", "--prune"], {
15
+ stdio: ["ignore", "pipe", "pipe"],
15
16
  });
16
17
  }
18
+ function detectReferencedPrNumber(text) {
19
+ const match = text.match(/#(\d{1,8})\b/);
20
+ if (!match) {
21
+ return null;
22
+ }
23
+ const value = Number.parseInt(match[1] ?? "", 10);
24
+ if (!Number.isFinite(value) || value <= 0) {
25
+ return null;
26
+ }
27
+ return value;
28
+ }
29
+ function loadPullRequestContext(db, prNumber, repoOwner, repoName) {
30
+ const pr = repoOwner && repoName
31
+ ? (db
32
+ .prepare(`
33
+ SELECT repo_owner, repo_name, pr_number, title, body, author, state, created_at, updated_at, merged_at, url
34
+ FROM prs
35
+ WHERE repo_owner = ? AND repo_name = ? AND pr_number = ?
36
+ LIMIT 1
37
+ `)
38
+ .get(repoOwner, repoName, prNumber) ?? null)
39
+ : (db
40
+ .prepare(`
41
+ SELECT repo_owner, repo_name, pr_number, title, body, author, state, created_at, updated_at, merged_at, url
42
+ FROM prs
43
+ WHERE pr_number = ?
44
+ ORDER BY updated_at DESC
45
+ LIMIT 1
46
+ `)
47
+ .get(prNumber) ?? null);
48
+ if (!pr) {
49
+ return { pr: null, decisions: [] };
50
+ }
51
+ const decisions = db
52
+ .prepare(`
53
+ SELECT id, source, author, summary, severity, created_at
54
+ FROM pr_decisions
55
+ WHERE repo_owner = ? AND repo_name = ? AND pr_number = ?
56
+ ORDER BY created_at DESC
57
+ LIMIT 50
58
+ `)
59
+ .all(pr.repo_owner, pr.repo_name, pr.pr_number);
60
+ return { pr, decisions };
61
+ }
17
62
  function getConfig() {
18
63
  const repoPath = path.resolve(process.env.COMMIT_RAG_REPO ?? process.cwd());
19
64
  const dbPath = path.resolve(process.env.COMMIT_RAG_DB ?? path.join(repoPath, ".commit-rag.db"));
@@ -23,7 +68,7 @@ function getConfig() {
23
68
  export async function startMcpServer() {
24
69
  const server = new Server({
25
70
  name: "commit-memory-mcp",
26
- version: "0.3.1",
71
+ version: "0.4.0",
27
72
  }, {
28
73
  capabilities: {
29
74
  tools: {},
@@ -32,117 +77,309 @@ export async function startMcpServer() {
32
77
  server.setRequestHandler(ListToolsRequestSchema, async () => ({
33
78
  tools: [
34
79
  {
35
- name: "search_related_commits",
36
- description: "Find commit chunks semantically related to current work context.",
80
+ name: "sync_pr_context",
81
+ description: "Sync pull request description/comments/reviews from GitHub CLI into local context DB.",
37
82
  inputSchema: {
38
83
  type: "object",
39
84
  properties: {
40
- query: { type: "string" },
41
- activeFile: { type: "string" },
85
+ owner: { type: "string" },
86
+ repo: { type: "string" },
87
+ prNumbers: {
88
+ type: "array",
89
+ items: { type: "number" },
90
+ },
42
91
  limit: { type: "number" },
43
92
  },
44
- required: ["query"],
93
+ required: ["owner", "repo"],
94
+ },
95
+ },
96
+ {
97
+ name: "list_active_worktrees",
98
+ description: "List active git worktrees for multi-session feature work.",
99
+ inputSchema: {
100
+ type: "object",
101
+ properties: {
102
+ baseBranch: { type: "string" },
103
+ },
104
+ required: [],
45
105
  },
46
106
  },
47
107
  {
48
- name: "explain_commit_match",
49
- description: "Return contextual details for a chunk match.",
108
+ name: "who_changed_this",
109
+ description: "Show who changed a file recently and summarize top authors.",
50
110
  inputSchema: {
51
111
  type: "object",
52
112
  properties: {
53
- chunkId: { type: "string" },
113
+ filePath: { type: "string" },
114
+ limit: { type: "number" },
54
115
  },
55
- required: ["chunkId"],
116
+ required: ["filePath"],
56
117
  },
57
118
  },
58
119
  {
59
- name: "get_commit_diff",
60
- description: "Get full git show output for a commit SHA.",
120
+ name: "why_was_this_changed",
121
+ description: "Explain intent for a commit or file using git history and synced PR decisions.",
61
122
  inputSchema: {
62
123
  type: "object",
63
124
  properties: {
64
125
  sha: { type: "string" },
126
+ filePath: { type: "string" },
127
+ owner: { type: "string" },
128
+ repo: { type: "string" },
65
129
  },
66
- required: ["sha"],
130
+ required: [],
67
131
  },
68
132
  },
69
133
  {
70
- name: "reindex_commits",
71
- description: "Refresh commit index from git history.",
134
+ name: "get_main_branch_overnight_brief",
135
+ description: "Summarize what changed recently on main branch while you were offline.",
72
136
  inputSchema: {
73
137
  type: "object",
74
138
  properties: {
139
+ baseBranch: { type: "string" },
140
+ sinceHours: { type: "number" },
75
141
  limit: { type: "number" },
76
142
  },
77
143
  required: [],
78
144
  },
79
145
  },
146
+ {
147
+ name: "resume_feature_session_brief",
148
+ description: "Brief branch divergence and overlap risk for a feature worktree.",
149
+ inputSchema: {
150
+ type: "object",
151
+ properties: {
152
+ worktreePath: { type: "string" },
153
+ baseBranch: { type: "string" },
154
+ },
155
+ required: [],
156
+ },
157
+ },
158
+ {
159
+ name: "pre_plan_sync_brief",
160
+ description: "Run sync + overnight + feature resume analysis before planning work.",
161
+ inputSchema: {
162
+ type: "object",
163
+ properties: {
164
+ owner: { type: "string" },
165
+ repo: { type: "string" },
166
+ baseBranch: { type: "string" },
167
+ worktreePath: { type: "string" },
168
+ filePath: { type: "string" },
169
+ sinceHours: { type: "number" },
170
+ limit: { type: "number" },
171
+ },
172
+ required: ["owner", "repo"],
173
+ },
174
+ },
80
175
  ],
81
176
  }));
82
177
  server.setRequestHandler(CallToolRequestSchema, async (request) => {
83
178
  const { repoPath, dbPath, limit: defaultLimit } = getConfig();
84
- if (request.params.name === "reindex_commits") {
85
- const limit = Number(request.params.arguments?.limit ?? defaultLimit);
86
- const summary = await indexRepository({ repoPath, dbPath, limit });
179
+ if (request.params.name === "sync_pr_context") {
180
+ const owner = String(request.params.arguments?.owner ?? "").trim();
181
+ const repo = String(request.params.arguments?.repo ?? "").trim();
182
+ if (!owner || !repo) {
183
+ return {
184
+ content: [{ type: "text", text: "owner and repo are required" }],
185
+ isError: true,
186
+ };
187
+ }
188
+ const numbersRaw = request.params.arguments?.prNumbers;
189
+ const prNumbers = Array.isArray(numbersRaw)
190
+ ? numbersRaw
191
+ .map((value) => Number(value))
192
+ .filter((value) => Number.isFinite(value) && value > 0)
193
+ : undefined;
194
+ const limit = Number(request.params.arguments?.limit ?? 25);
195
+ const summary = await syncPullRequestContext({
196
+ repoPath,
197
+ dbPath,
198
+ repoOwner: owner,
199
+ repoName: repo,
200
+ prNumbers,
201
+ limit,
202
+ });
87
203
  return {
88
204
  content: [{ type: "text", text: JSON.stringify(summary, null, 2) }],
89
205
  };
90
206
  }
91
- const db = openDatabase(dbPath);
92
- try {
93
- if (request.params.name === "search_related_commits") {
94
- const query = String(request.params.arguments?.query ?? "").trim();
95
- const activeFile = request.params.arguments?.activeFile
96
- ? String(request.params.arguments.activeFile)
97
- : undefined;
98
- const limit = Number(request.params.arguments?.limit ?? 8);
99
- if (!query) {
100
- return {
101
- content: [{ type: "text", text: "query is required" }],
102
- isError: true,
103
- };
207
+ if (request.params.name === "list_active_worktrees") {
208
+ const baseBranch = String(request.params.arguments?.baseBranch ?? "").trim() || "main";
209
+ const worktrees = listActiveWorktrees(repoPath);
210
+ const db = openDatabase(dbPath);
211
+ try {
212
+ for (const worktree of worktrees) {
213
+ upsertWorktreeSession(db, {
214
+ path: worktree.path,
215
+ branch: worktree.branch,
216
+ baseBranch,
217
+ lastSyncedAt: new Date().toISOString(),
218
+ });
104
219
  }
105
- const results = await searchRelatedCommits(db, query, limit, activeFile);
220
+ }
221
+ finally {
222
+ db.close();
223
+ }
224
+ return {
225
+ content: [{ type: "text", text: JSON.stringify(worktrees, null, 2) }],
226
+ };
227
+ }
228
+ if (request.params.name === "who_changed_this") {
229
+ const filePath = String(request.params.arguments?.filePath ?? "").trim();
230
+ const limit = Number(request.params.arguments?.limit ?? 20);
231
+ if (!filePath) {
106
232
  return {
107
- content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
233
+ content: [{ type: "text", text: "filePath is required" }],
234
+ isError: true,
108
235
  };
109
236
  }
110
- if (request.params.name === "explain_commit_match") {
111
- const chunkId = String(request.params.arguments?.chunkId ?? "").trim();
112
- if (!chunkId) {
113
- return {
114
- content: [{ type: "text", text: "chunkId is required" }],
115
- isError: true,
116
- };
117
- }
118
- const result = explainCommitMatch(db, chunkId);
237
+ const output = whoChangedFile({
238
+ repoPath,
239
+ filePath,
240
+ limit: Number.isFinite(limit) && limit > 0 ? limit : 20,
241
+ });
242
+ return {
243
+ content: [{ type: "text", text: JSON.stringify(output, null, 2) }],
244
+ };
245
+ }
246
+ if (request.params.name === "why_was_this_changed") {
247
+ const owner = String(request.params.arguments?.owner ?? "").trim();
248
+ const repo = String(request.params.arguments?.repo ?? "").trim();
249
+ const filePath = String(request.params.arguments?.filePath ?? "").trim();
250
+ const rawSha = String(request.params.arguments?.sha ?? "").trim();
251
+ const sha = rawSha || (filePath ? latestCommitForFile(repoPath, filePath) : null);
252
+ if (!sha) {
119
253
  return {
120
- content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
254
+ content: [
255
+ {
256
+ type: "text",
257
+ text: "Provide sha or a filePath that has commit history.",
258
+ },
259
+ ],
260
+ isError: true,
121
261
  };
122
262
  }
123
- if (request.params.name === "get_commit_diff") {
124
- const sha = String(request.params.arguments?.sha ?? "").trim();
125
- if (!sha) {
126
- return {
127
- content: [{ type: "text", text: "sha is required" }],
128
- isError: true,
129
- };
263
+ const commit = commitDetails(repoPath, sha);
264
+ const prNumber = detectReferencedPrNumber(`${commit.subject}\n${commit.body}`);
265
+ let prContext = { pr: null, decisions: [] };
266
+ if (prNumber) {
267
+ const db = openDatabase(dbPath);
268
+ try {
269
+ prContext = loadPullRequestContext(db, prNumber, owner || undefined, repo || undefined);
130
270
  }
131
- const output = runGitDiff(repoPath, sha);
271
+ finally {
272
+ db.close();
273
+ }
274
+ }
275
+ const result = {
276
+ filePath: filePath || null,
277
+ commit,
278
+ referencedPullRequestNumber: prNumber,
279
+ pullRequest: prContext.pr,
280
+ decisions: prContext.decisions,
281
+ };
282
+ return {
283
+ content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
284
+ };
285
+ }
286
+ if (request.params.name === "get_main_branch_overnight_brief") {
287
+ const baseBranch = String(request.params.arguments?.baseBranch ?? "").trim() || "main";
288
+ const sinceHours = Number(request.params.arguments?.sinceHours ?? 12);
289
+ const limit = Number(request.params.arguments?.limit ?? defaultLimit);
290
+ fetchRemote(repoPath);
291
+ const brief = mainBranchOvernightBrief({
292
+ repoPath,
293
+ baseBranch,
294
+ sinceHours: Number.isFinite(sinceHours) ? sinceHours : 12,
295
+ limit: Number.isFinite(limit) ? limit : defaultLimit,
296
+ });
297
+ return {
298
+ content: [{ type: "text", text: JSON.stringify(brief, null, 2) }],
299
+ };
300
+ }
301
+ if (request.params.name === "resume_feature_session_brief") {
302
+ const worktreePath = String(request.params.arguments?.worktreePath ?? "").trim() || repoPath;
303
+ const baseBranch = String(request.params.arguments?.baseBranch ?? "").trim() || "main";
304
+ fetchRemote(repoPath);
305
+ const brief = resumeFeatureSessionBrief({
306
+ worktreePath,
307
+ baseBranch,
308
+ });
309
+ const db = openDatabase(dbPath);
310
+ try {
311
+ upsertWorktreeSession(db, {
312
+ path: brief.worktreePath,
313
+ branch: brief.branch,
314
+ baseBranch,
315
+ lastSyncedAt: new Date().toISOString(),
316
+ });
317
+ }
318
+ finally {
319
+ db.close();
320
+ }
321
+ return {
322
+ content: [{ type: "text", text: JSON.stringify(brief, null, 2) }],
323
+ };
324
+ }
325
+ if (request.params.name === "pre_plan_sync_brief") {
326
+ const owner = String(request.params.arguments?.owner ?? "").trim();
327
+ const repo = String(request.params.arguments?.repo ?? "").trim();
328
+ const baseBranch = String(request.params.arguments?.baseBranch ?? "").trim() || "main";
329
+ const worktreePath = String(request.params.arguments?.worktreePath ?? "").trim() || repoPath;
330
+ const filePath = String(request.params.arguments?.filePath ?? "").trim();
331
+ const sinceHours = Number(request.params.arguments?.sinceHours ?? 12);
332
+ const limit = Number(request.params.arguments?.limit ?? 25);
333
+ if (!owner || !repo) {
132
334
  return {
133
- content: [{ type: "text", text: output }],
335
+ content: [{ type: "text", text: "owner and repo are required" }],
336
+ isError: true,
134
337
  };
135
338
  }
136
- return {
137
- content: [
138
- { type: "text", text: `Unknown tool: ${request.params.name}` },
339
+ fetchRemote(repoPath);
340
+ const syncSummary = await syncPullRequestContext({
341
+ repoPath,
342
+ dbPath,
343
+ repoOwner: owner,
344
+ repoName: repo,
345
+ limit,
346
+ });
347
+ const overnight = mainBranchOvernightBrief({
348
+ repoPath,
349
+ baseBranch,
350
+ sinceHours: Number.isFinite(sinceHours) ? sinceHours : 12,
351
+ limit,
352
+ });
353
+ const resume = resumeFeatureSessionBrief({
354
+ worktreePath,
355
+ baseBranch,
356
+ });
357
+ const fileFocus = filePath
358
+ ? whoChangedFile({
359
+ repoPath,
360
+ filePath,
361
+ limit: 10,
362
+ })
363
+ : null;
364
+ const prePlan = {
365
+ syncSummary,
366
+ overnight,
367
+ resume,
368
+ fileFocus,
369
+ recommendations: [
370
+ "Review blocker-level decisions from synced PR context first.",
371
+ "Rebase or merge main if behind is non-zero before coding.",
372
+ "Resolve overlap files before expanding feature scope.",
139
373
  ],
140
- isError: true,
374
+ };
375
+ return {
376
+ content: [{ type: "text", text: JSON.stringify(prePlan, null, 2) }],
141
377
  };
142
378
  }
143
- finally {
144
- db.close();
145
- }
379
+ return {
380
+ content: [{ type: "text", text: `Unknown tool: ${request.params.name}` }],
381
+ isError: true,
382
+ };
146
383
  });
147
384
  const transport = new StdioServerTransport();
148
385
  await server.connect(transport);
@@ -0,0 +1,9 @@
1
+ import type { PullRequestSyncSummary } from "../types.js";
2
+ export declare function syncPullRequestContext(options: {
3
+ repoPath: string;
4
+ dbPath: string;
5
+ repoOwner: string;
6
+ repoName: string;
7
+ prNumbers?: number[];
8
+ limit?: number;
9
+ }): Promise<PullRequestSyncSummary>;
@@ -0,0 +1,201 @@
1
+ import { execFileSync } from "node:child_process";
2
+ import path from "node:path";
3
+ import { openDatabase, replacePullRequestComments, replacePullRequestDecisions, replacePullRequestReviews, touchPullRequestSyncState, upsertPullRequest, } from "../db/client.js";
4
+ function runGh(repoPath, args) {
5
+ return execFileSync("gh", args, {
6
+ cwd: repoPath,
7
+ encoding: "utf8",
8
+ stdio: ["ignore", "pipe", "pipe"],
9
+ });
10
+ }
11
+ function summarize(text) {
12
+ const compact = text.replace(/\s+/g, " ").trim();
13
+ if (!compact) {
14
+ return "No summary available.";
15
+ }
16
+ if (compact.length <= 280) {
17
+ return compact;
18
+ }
19
+ return `${compact.slice(0, 277)}...`;
20
+ }
21
+ function classifySeverity(text) {
22
+ if (/\b(block|blocking|must|required|cannot|can't|broken|fail)\b/i.test(text)) {
23
+ return "blocker";
24
+ }
25
+ if (/\b(should|consider|follow\s*up|todo|risk|later)\b/i.test(text)) {
26
+ return "warning";
27
+ }
28
+ return "info";
29
+ }
30
+ function isDecisionSignal(text) {
31
+ return /\b(decision|decide|decided|agreed|resolved|approved|final|ship|merged?)\b/i.test(text);
32
+ }
33
+ function parseComments(prNumber, comments) {
34
+ if (!Array.isArray(comments)) {
35
+ return [];
36
+ }
37
+ return comments.map((comment, index) => ({
38
+ id: String(comment.id ?? `${prNumber}-comment-${index + 1}`),
39
+ prNumber,
40
+ author: comment.author?.login ?? "unknown",
41
+ body: comment.body?.trim() ?? "",
42
+ createdAt: comment.createdAt ?? new Date(0).toISOString(),
43
+ updatedAt: comment.updatedAt ?? comment.createdAt ?? new Date(0).toISOString(),
44
+ url: comment.url ?? "",
45
+ }));
46
+ }
47
+ function parseReviews(prNumber, reviews) {
48
+ if (!Array.isArray(reviews)) {
49
+ return [];
50
+ }
51
+ return reviews.map((review, index) => ({
52
+ id: String(review.id ?? `${prNumber}-review-${index + 1}`),
53
+ prNumber,
54
+ author: review.author?.login ?? "unknown",
55
+ state: review.state ?? "COMMENTED",
56
+ body: review.body?.trim() ?? "",
57
+ submittedAt: review.submittedAt ?? new Date(0).toISOString(),
58
+ }));
59
+ }
60
+ function createDecisionRecords(options) {
61
+ const decisions = [];
62
+ if (options.pr.body.trim()) {
63
+ decisions.push({
64
+ id: `pr-${options.pr.number}-description`,
65
+ prNumber: options.pr.number,
66
+ source: "description",
67
+ author: options.pr.author,
68
+ summary: summarize(options.pr.body),
69
+ severity: classifySeverity(options.pr.body),
70
+ createdAt: options.pr.updatedAt,
71
+ });
72
+ }
73
+ for (const review of options.reviews) {
74
+ if (!review.body && review.state === "COMMENTED") {
75
+ continue;
76
+ }
77
+ if (review.state === "CHANGES_REQUESTED" ||
78
+ review.state === "APPROVED" ||
79
+ isDecisionSignal(review.body)) {
80
+ const text = [review.state, review.body].filter(Boolean).join(" - ");
81
+ decisions.push({
82
+ id: `pr-${options.pr.number}-review-${review.id}`,
83
+ prNumber: options.pr.number,
84
+ source: "review",
85
+ author: review.author,
86
+ summary: summarize(text),
87
+ severity: review.state === "CHANGES_REQUESTED"
88
+ ? "blocker"
89
+ : classifySeverity(text),
90
+ createdAt: review.submittedAt,
91
+ });
92
+ }
93
+ }
94
+ for (const comment of options.comments) {
95
+ if (!comment.body || !isDecisionSignal(comment.body)) {
96
+ continue;
97
+ }
98
+ decisions.push({
99
+ id: `pr-${options.pr.number}-comment-${comment.id}`,
100
+ prNumber: options.pr.number,
101
+ source: "comment",
102
+ author: comment.author,
103
+ summary: summarize(comment.body),
104
+ severity: classifySeverity(comment.body),
105
+ createdAt: comment.updatedAt,
106
+ });
107
+ }
108
+ return decisions;
109
+ }
110
+ function parsePullRequest(repoOwner, repoName, raw) {
111
+ const number = Number(raw.number ?? 0);
112
+ if (!Number.isFinite(number) || number <= 0) {
113
+ throw new Error("Invalid pull request number returned by gh");
114
+ }
115
+ const pr = {
116
+ repoOwner,
117
+ repoName,
118
+ number,
119
+ title: raw.title?.trim() ?? "",
120
+ body: raw.body?.trim() ?? "",
121
+ author: raw.author?.login ?? "unknown",
122
+ state: raw.state ?? "UNKNOWN",
123
+ createdAt: raw.createdAt ?? new Date(0).toISOString(),
124
+ updatedAt: raw.updatedAt ?? raw.createdAt ?? new Date(0).toISOString(),
125
+ mergedAt: raw.mergedAt ?? null,
126
+ url: raw.url ?? "",
127
+ };
128
+ const comments = parseComments(number, raw.comments);
129
+ const reviews = parseReviews(number, raw.reviews);
130
+ const decisions = createDecisionRecords({ pr, comments, reviews });
131
+ return { pr, comments, reviews, decisions };
132
+ }
133
+ function listRecentPullRequestNumbers(repoPath, repoOwner, repoName, limit) {
134
+ const output = runGh(repoPath, [
135
+ "pr",
136
+ "list",
137
+ "-R",
138
+ `${repoOwner}/${repoName}`,
139
+ "--state",
140
+ "merged",
141
+ "--limit",
142
+ String(limit),
143
+ "--json",
144
+ "number",
145
+ ]);
146
+ const rows = JSON.parse(output);
147
+ return rows
148
+ .map((row) => Number(row.number ?? 0))
149
+ .filter((value) => Number.isFinite(value) && value > 0);
150
+ }
151
+ function fetchPullRequest(repoPath, repoOwner, repoName, prNumber) {
152
+ const output = runGh(repoPath, [
153
+ "pr",
154
+ "view",
155
+ String(prNumber),
156
+ "-R",
157
+ `${repoOwner}/${repoName}`,
158
+ "--json",
159
+ "number,title,body,author,state,createdAt,updatedAt,mergedAt,url,comments,reviews",
160
+ ]);
161
+ return JSON.parse(output);
162
+ }
163
+ export async function syncPullRequestContext(options) {
164
+ const repoPath = path.resolve(options.repoPath);
165
+ const dbPath = path.resolve(options.dbPath);
166
+ const limit = Number.isFinite(options.limit) ? Number(options.limit) : 20;
167
+ const prNumbers = options.prNumbers && options.prNumbers.length > 0
168
+ ? options.prNumbers
169
+ : listRecentPullRequestNumbers(repoPath, options.repoOwner, options.repoName, limit);
170
+ const db = openDatabase(dbPath);
171
+ let syncedPrs = 0;
172
+ let syncedComments = 0;
173
+ let syncedReviews = 0;
174
+ let promotedDecisions = 0;
175
+ const tx = db.transaction(() => {
176
+ for (const prNumber of prNumbers) {
177
+ const raw = fetchPullRequest(repoPath, options.repoOwner, options.repoName, prNumber);
178
+ const parsed = parsePullRequest(options.repoOwner, options.repoName, raw);
179
+ upsertPullRequest(db, parsed.pr);
180
+ replacePullRequestComments(db, options.repoOwner, options.repoName, parsed.pr.number, parsed.comments);
181
+ replacePullRequestReviews(db, options.repoOwner, options.repoName, parsed.pr.number, parsed.reviews);
182
+ replacePullRequestDecisions(db, options.repoOwner, options.repoName, parsed.pr.number, parsed.decisions);
183
+ syncedPrs += 1;
184
+ syncedComments += parsed.comments.length;
185
+ syncedReviews += parsed.reviews.length;
186
+ promotedDecisions += parsed.decisions.length;
187
+ }
188
+ touchPullRequestSyncState(db, options.repoOwner, options.repoName);
189
+ });
190
+ tx();
191
+ db.close();
192
+ return {
193
+ syncedPrs,
194
+ syncedComments,
195
+ syncedReviews,
196
+ promotedDecisions,
197
+ repoOwner: options.repoOwner,
198
+ repoName: options.repoName,
199
+ syncedAt: new Date().toISOString(),
200
+ };
201
+ }
package/dist/types.d.ts CHANGED
@@ -24,3 +24,63 @@ export type IndexSummary = {
24
24
  indexedChunks: number;
25
25
  skippedChunks: number;
26
26
  };
27
+ export type PullRequestRecord = {
28
+ repoOwner: string;
29
+ repoName: string;
30
+ number: number;
31
+ title: string;
32
+ body: string;
33
+ author: string;
34
+ state: string;
35
+ createdAt: string;
36
+ updatedAt: string;
37
+ mergedAt: string | null;
38
+ url: string;
39
+ };
40
+ export type PullRequestCommentRecord = {
41
+ id: string;
42
+ prNumber: number;
43
+ author: string;
44
+ body: string;
45
+ createdAt: string;
46
+ updatedAt: string;
47
+ url: string;
48
+ };
49
+ export type PullRequestReviewRecord = {
50
+ id: string;
51
+ prNumber: number;
52
+ author: string;
53
+ state: string;
54
+ body: string;
55
+ submittedAt: string;
56
+ };
57
+ export type PullRequestDecisionRecord = {
58
+ id: string;
59
+ prNumber: number;
60
+ source: "description" | "comment" | "review";
61
+ author: string;
62
+ summary: string;
63
+ severity: "info" | "warning" | "blocker";
64
+ createdAt: string;
65
+ };
66
+ export type PullRequestSyncSummary = {
67
+ syncedPrs: number;
68
+ syncedComments: number;
69
+ syncedReviews: number;
70
+ promotedDecisions: number;
71
+ repoOwner: string;
72
+ repoName: string;
73
+ syncedAt: string;
74
+ };
75
+ export type WorktreeRecord = {
76
+ path: string;
77
+ branch: string;
78
+ headSha: string;
79
+ isCurrent: boolean;
80
+ };
81
+ export type WorktreeSessionRecord = {
82
+ path: string;
83
+ branch: string;
84
+ lastSyncedAt: string;
85
+ baseBranch: string;
86
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@jussmor/commit-memory-mcp",
3
- "version": "0.3.5",
3
+ "version": "0.3.6",
4
4
  "mcpName": "io.github.jussmor/commit-memory",
5
5
  "description": "Commit-aware RAG with sqlite-vec and MCP tools for local agent workflows",
6
6
  "license": "MIT",