@jussmor/commit-memory-mcp 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,49 @@
1
+ # @jussmor/commit-memory-mcp
2
+
3
+ Local commit-aware RAG package powered by sqlite-vec with MCP tool endpoints.
4
+
5
+ ## Features
6
+
7
+ - Indexes git commits across branches into commit-file chunks
8
+ - Embeds chunks using a local embedding source (Ollama when configured)
9
+ - Stores vectors in sqlite-vec (SQLite)
10
+ - Exposes MCP tools for agent workflows
11
+
12
+ ## MCP tools
13
+
14
+ - `search_related_commits`
15
+ - `explain_commit_match`
16
+ - `get_commit_diff`
17
+
18
+ ## Quick start
19
+
20
+ ```bash
21
+ npm install @jussmor/commit-memory-mcp
22
+ npx commit-memory-index --repo /path/to/repo --db /path/to/repo/.commit-rag.db --limit 400
23
+ npx commit-memory-mcp
24
+ ```
25
+
26
+ ## Publish
27
+
28
+ ```bash
29
+ npm run build
30
+ npm publish --access public
31
+ mcp-publisher login github
32
+ mcp-publisher publish
33
+ ```
34
+
35
+ ## VS Code MCP registration
36
+
37
+ Copy `mcp.config.example.json` entries into your user MCP config and adjust paths/env values.
38
+
39
+ For MCP Registry publication, keep `package.json` `mcpName` and `server.json` `name` in sync.
40
+
41
+ ## Environment
42
+
43
+ - `COMMIT_RAG_REPO` default repository path for MCP
44
+ - `COMMIT_RAG_DB` sqlite db path
45
+ - `COMMIT_RAG_LIMIT` max commits to index per run
46
+ - `OLLAMA_BASE_URL` local ollama URL (default `http://127.0.0.1:11434`)
47
+ - `OLLAMA_EMBED_MODEL` local embedding model name
48
+
49
+ If `OLLAMA_EMBED_MODEL` is not set, the package uses deterministic local fallback embeddings.
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,44 @@
1
+ #!/usr/bin/env node
2
+ import path from "node:path";
3
+ import { indexRepository } from "../indexer.js";
4
+ function parseArgs(argv) {
5
+ const args = {
6
+ repoPath: process.cwd(),
7
+ dbPath: path.resolve(process.cwd(), ".commit-rag.db"),
8
+ limit: 400,
9
+ };
10
+ for (let i = 0; i < argv.length; i += 1) {
11
+ const arg = argv[i];
12
+ if (arg === "--repo") {
13
+ args.repoPath = path.resolve(argv[i + 1] ?? process.cwd());
14
+ i += 1;
15
+ continue;
16
+ }
17
+ if (arg === "--db") {
18
+ args.dbPath = path.resolve(argv[i + 1] ?? ".commit-rag.db");
19
+ i += 1;
20
+ continue;
21
+ }
22
+ if (arg === "--limit") {
23
+ const value = Number.parseInt(argv[i + 1] ?? "", 10);
24
+ if (!Number.isFinite(value) || value <= 0) {
25
+ throw new Error("Invalid --limit value");
26
+ }
27
+ args.limit = value;
28
+ i += 1;
29
+ continue;
30
+ }
31
+ throw new Error(`Unknown argument: ${arg}`);
32
+ }
33
+ return args;
34
+ }
35
+ async function main() {
36
+ const args = parseArgs(process.argv.slice(2));
37
+ const summary = await indexRepository(args);
38
+ process.stdout.write(`${JSON.stringify(summary, null, 2)}\n`);
39
+ }
40
+ main().catch((error) => {
41
+ const message = error instanceof Error ? error.message : "Unknown error";
42
+ process.stderr.write(`${message}\n`);
43
+ process.exitCode = 1;
44
+ });
@@ -0,0 +1,7 @@
1
+ import Database from "better-sqlite3";
2
+ import type { CommitChunk } from "../types.js";
3
+ export type RagDatabase = Database.Database;
4
+ export declare function openDatabase(dbPath: string): RagDatabase;
5
+ export declare function hasChunk(db: RagDatabase, chunkId: string): boolean;
6
+ export declare function upsertChunk(db: RagDatabase, chunk: CommitChunk, embedding: number[]): void;
7
+ export declare function touchIndexState(db: RagDatabase): void;
@@ -0,0 +1,92 @@
1
+ import Database from "better-sqlite3";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
4
+ import { load } from "sqlite-vec";
5
+ export function openDatabase(dbPath) {
6
+ const resolved = path.resolve(dbPath);
7
+ fs.mkdirSync(path.dirname(resolved), { recursive: true });
8
+ const db = new Database(resolved);
9
+ load(db);
10
+ db.exec(`
11
+ CREATE TABLE IF NOT EXISTS commits (
12
+ sha TEXT PRIMARY KEY,
13
+ author TEXT NOT NULL,
14
+ date TEXT NOT NULL,
15
+ subject TEXT NOT NULL,
16
+ body TEXT NOT NULL
17
+ );
18
+
19
+ CREATE TABLE IF NOT EXISTS commit_chunks (
20
+ chunk_id TEXT PRIMARY KEY,
21
+ sha TEXT NOT NULL,
22
+ file_path TEXT NOT NULL,
23
+ hunk_text TEXT NOT NULL,
24
+ indexed_text TEXT NOT NULL,
25
+ FOREIGN KEY (sha) REFERENCES commits(sha)
26
+ );
27
+
28
+ CREATE VIRTUAL TABLE IF NOT EXISTS chunk_vectors USING vec0(
29
+ embedding FLOAT[384]
30
+ );
31
+
32
+ CREATE TABLE IF NOT EXISTS chunk_vector_map (
33
+ chunk_id TEXT PRIMARY KEY,
34
+ vec_rowid INTEGER NOT NULL UNIQUE,
35
+ FOREIGN KEY (chunk_id) REFERENCES commit_chunks(chunk_id)
36
+ );
37
+
38
+ CREATE TABLE IF NOT EXISTS index_state (
39
+ id INTEGER PRIMARY KEY CHECK (id = 1),
40
+ last_indexed_at TEXT NOT NULL
41
+ );
42
+ `);
43
+ return db;
44
+ }
45
+ export function hasChunk(db, chunkId) {
46
+ const row = db
47
+ .prepare("SELECT 1 AS ok FROM commit_chunks WHERE chunk_id = ? LIMIT 1")
48
+ .get(chunkId);
49
+ return Boolean(row?.ok);
50
+ }
51
+ export function upsertChunk(db, chunk, embedding) {
52
+ db.prepare(`
53
+ INSERT INTO commits (sha, author, date, subject, body)
54
+ VALUES (?, ?, ?, ?, ?)
55
+ ON CONFLICT(sha) DO UPDATE SET
56
+ author = excluded.author,
57
+ date = excluded.date,
58
+ subject = excluded.subject,
59
+ body = excluded.body
60
+ `).run(chunk.sha, chunk.author, chunk.date, chunk.subject, chunk.body);
61
+ db.prepare(`
62
+ INSERT INTO commit_chunks (chunk_id, sha, file_path, hunk_text, indexed_text)
63
+ VALUES (?, ?, ?, ?, ?)
64
+ ON CONFLICT(chunk_id) DO UPDATE SET
65
+ sha = excluded.sha,
66
+ file_path = excluded.file_path,
67
+ hunk_text = excluded.hunk_text,
68
+ indexed_text = excluded.indexed_text
69
+ `).run(chunk.chunkId, chunk.sha, chunk.filePath, chunk.hunkText, chunk.indexedText);
70
+ const existing = db
71
+ .prepare("SELECT vec_rowid FROM chunk_vector_map WHERE chunk_id = ?")
72
+ .get(chunk.chunkId);
73
+ const embeddingJson = JSON.stringify(embedding);
74
+ if (existing) {
75
+ db.prepare("UPDATE chunk_vectors SET embedding = ? WHERE rowid = ?").run(embeddingJson, existing.vec_rowid);
76
+ return;
77
+ }
78
+ const result = db
79
+ .prepare("INSERT INTO chunk_vectors (embedding) VALUES (?)")
80
+ .run(embeddingJson);
81
+ const rowid = Number(result.lastInsertRowid);
82
+ db.prepare("INSERT INTO chunk_vector_map (chunk_id, vec_rowid) VALUES (?, ?)").run(chunk.chunkId, rowid);
83
+ }
84
+ export function touchIndexState(db) {
85
+ const now = new Date().toISOString();
86
+ db.prepare(`
87
+ INSERT INTO index_state (id, last_indexed_at)
88
+ VALUES (1, ?)
89
+ ON CONFLICT(id) DO UPDATE SET
90
+ last_indexed_at = excluded.last_indexed_at
91
+ `).run(now);
92
+ }
@@ -0,0 +1,2 @@
1
+ import type { CommitChunk } from "../types.js";
2
+ export declare function extractCommitChunks(repoPath: string, limit: number): CommitChunk[];
@@ -0,0 +1,97 @@
1
+ import { execFileSync } from "node:child_process";
2
+ import crypto from "node:crypto";
3
+ function runGit(repoPath, args) {
4
+ return execFileSync("git", ["-C", repoPath, ...args], {
5
+ encoding: "utf8",
6
+ stdio: ["ignore", "pipe", "pipe"],
7
+ });
8
+ }
9
+ function getChangedLines(patch) {
10
+ return patch
11
+ .split("\n")
12
+ .filter((line) => (line.startsWith("+") || line.startsWith("-")) &&
13
+ !line.startsWith("+++") &&
14
+ !line.startsWith("---"))
15
+ .slice(0, 120);
16
+ }
17
+ function createChunkId(sha, filePath, text) {
18
+ const hash = crypto
19
+ .createHash("sha256")
20
+ .update(text)
21
+ .digest("hex")
22
+ .slice(0, 16);
23
+ return `${sha}:${filePath}:${hash}`;
24
+ }
25
+ export function extractCommitChunks(repoPath, limit) {
26
+ const shaOutput = runGit(repoPath, [
27
+ "log",
28
+ "--all",
29
+ "--format=%H",
30
+ `-n${limit}`,
31
+ ]).trim();
32
+ if (!shaOutput) {
33
+ return [];
34
+ }
35
+ const shas = shaOutput
36
+ .split("\n")
37
+ .map((line) => line.trim())
38
+ .filter(Boolean);
39
+ const chunks = [];
40
+ for (const sha of shas) {
41
+ const meta = runGit(repoPath, [
42
+ "show",
43
+ "--no-color",
44
+ "--format=%an%x1f%aI%x1f%s%x1f%b",
45
+ "--no-patch",
46
+ sha,
47
+ ]).trimEnd();
48
+ const [author = "", date = "", subject = "", body = ""] = meta.split("\x1f");
49
+ const filesRaw = runGit(repoPath, [
50
+ "show",
51
+ "--no-color",
52
+ "--pretty=format:",
53
+ "--name-only",
54
+ sha,
55
+ ]).trim();
56
+ const files = Array.from(new Set(filesRaw
57
+ .split("\n")
58
+ .map((line) => line.trim())
59
+ .filter(Boolean)));
60
+ for (const filePath of files) {
61
+ const patch = runGit(repoPath, [
62
+ "show",
63
+ "--no-color",
64
+ "--pretty=format:",
65
+ "--unified=0",
66
+ sha,
67
+ "--",
68
+ filePath,
69
+ ]).trim();
70
+ const changedLines = getChangedLines(patch);
71
+ const hunkText = changedLines.join("\n");
72
+ if (!hunkText) {
73
+ continue;
74
+ }
75
+ const indexedText = [
76
+ `subject: ${subject}`,
77
+ `body: ${body}`,
78
+ `file: ${filePath}`,
79
+ "changes:",
80
+ hunkText,
81
+ ].join("\n");
82
+ const chunkId = createChunkId(sha, filePath, indexedText);
83
+ chunks.push({
84
+ chunkId,
85
+ sha,
86
+ author,
87
+ date,
88
+ subject,
89
+ body,
90
+ filePath,
91
+ hunkText,
92
+ indexedText,
93
+ });
94
+ }
95
+ }
96
+ return chunks;
97
+ }
@@ -0,0 +1,4 @@
1
+ export { openDatabase } from "./db/client.js";
2
+ export { indexRepository } from "./indexer.js";
3
+ export { explainCommitMatch, searchRelatedCommits } from "./search/query.js";
4
+ export type { CommitChunk, IndexSummary, SearchResult } from "./types.js";
package/dist/index.js ADDED
@@ -0,0 +1,3 @@
1
+ export { openDatabase } from "./db/client.js";
2
+ export { indexRepository } from "./indexer.js";
3
+ export { explainCommitMatch, searchRelatedCommits } from "./search/query.js";
@@ -0,0 +1,6 @@
1
+ import type { IndexSummary } from "./types.js";
2
+ export declare function indexRepository(options: {
3
+ repoPath: string;
4
+ dbPath: string;
5
+ limit: number;
6
+ }): Promise<IndexSummary>;
@@ -0,0 +1,37 @@
1
+ import path from "node:path";
2
+ import { hasChunk, openDatabase, touchIndexState, upsertChunk, } from "./db/client.js";
3
+ import { extractCommitChunks } from "./git/extract.js";
4
+ import { embedText } from "./search/embeddings.js";
5
+ export async function indexRepository(options) {
6
+ const repoPath = path.resolve(options.repoPath);
7
+ const dbPath = path.resolve(options.dbPath);
8
+ const chunks = extractCommitChunks(repoPath, options.limit);
9
+ const db = openDatabase(dbPath);
10
+ let indexedChunks = 0;
11
+ let skippedChunks = 0;
12
+ const indexedCommits = new Set();
13
+ const pendingInserts = [];
14
+ for (const chunk of chunks) {
15
+ if (hasChunk(db, chunk.chunkId)) {
16
+ skippedChunks += 1;
17
+ continue;
18
+ }
19
+ const embedding = await embedText(chunk.indexedText);
20
+ pendingInserts.push({ chunk, embedding });
21
+ indexedChunks += 1;
22
+ indexedCommits.add(chunk.sha);
23
+ }
24
+ const writeTransaction = db.transaction((rows) => {
25
+ for (const row of rows) {
26
+ upsertChunk(db, row.chunk, row.embedding);
27
+ }
28
+ });
29
+ writeTransaction(pendingInserts);
30
+ touchIndexState(db);
31
+ db.close();
32
+ return {
33
+ indexedCommits: indexedCommits.size,
34
+ indexedChunks,
35
+ skippedChunks,
36
+ };
37
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export declare function startMcpServer(): Promise<void>;
@@ -0,0 +1,154 @@
1
+ #!/usr/bin/env node
2
+ import { Server } from "@modelcontextprotocol/sdk/server/index.js";
3
+ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js";
5
+ import { execFileSync } from "node:child_process";
6
+ import path from "node:path";
7
+ import { openDatabase } from "../db/client.js";
8
+ import { indexRepository } from "../indexer.js";
9
+ import { explainCommitMatch, searchRelatedCommits } from "../search/query.js";
10
+ function runGitDiff(repoPath, sha) {
11
+ return execFileSync("git", ["-C", repoPath, "show", "--no-color", "--stat", "--patch", sha], {
12
+ encoding: "utf8",
13
+ });
14
+ }
15
+ function getConfig() {
16
+ const repoPath = path.resolve(process.env.COMMIT_RAG_REPO ?? process.cwd());
17
+ const dbPath = path.resolve(process.env.COMMIT_RAG_DB ?? path.join(repoPath, ".commit-rag.db"));
18
+ const limit = Number.parseInt(process.env.COMMIT_RAG_LIMIT ?? "", 10) || 400;
19
+ return { repoPath, dbPath, limit };
20
+ }
21
+ export async function startMcpServer() {
22
+ const server = new Server({
23
+ name: "commit-memory-mcp",
24
+ version: "0.3.1",
25
+ }, {
26
+ capabilities: {
27
+ tools: {},
28
+ },
29
+ });
30
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({
31
+ tools: [
32
+ {
33
+ name: "search_related_commits",
34
+ description: "Find commit chunks semantically related to current work context.",
35
+ inputSchema: {
36
+ type: "object",
37
+ properties: {
38
+ query: { type: "string" },
39
+ activeFile: { type: "string" },
40
+ limit: { type: "number" },
41
+ },
42
+ required: ["query"],
43
+ },
44
+ },
45
+ {
46
+ name: "explain_commit_match",
47
+ description: "Return contextual details for a chunk match.",
48
+ inputSchema: {
49
+ type: "object",
50
+ properties: {
51
+ chunkId: { type: "string" },
52
+ },
53
+ required: ["chunkId"],
54
+ },
55
+ },
56
+ {
57
+ name: "get_commit_diff",
58
+ description: "Get full git show output for a commit SHA.",
59
+ inputSchema: {
60
+ type: "object",
61
+ properties: {
62
+ sha: { type: "string" },
63
+ },
64
+ required: ["sha"],
65
+ },
66
+ },
67
+ {
68
+ name: "reindex_commits",
69
+ description: "Refresh commit index from git history.",
70
+ inputSchema: {
71
+ type: "object",
72
+ properties: {
73
+ limit: { type: "number" },
74
+ },
75
+ required: [],
76
+ },
77
+ },
78
+ ],
79
+ }));
80
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
81
+ const { repoPath, dbPath, limit: defaultLimit } = getConfig();
82
+ if (request.params.name === "reindex_commits") {
83
+ const limit = Number(request.params.arguments?.limit ?? defaultLimit);
84
+ const summary = await indexRepository({ repoPath, dbPath, limit });
85
+ return {
86
+ content: [{ type: "text", text: JSON.stringify(summary, null, 2) }],
87
+ };
88
+ }
89
+ const db = openDatabase(dbPath);
90
+ try {
91
+ if (request.params.name === "search_related_commits") {
92
+ const query = String(request.params.arguments?.query ?? "").trim();
93
+ const activeFile = request.params.arguments?.activeFile
94
+ ? String(request.params.arguments.activeFile)
95
+ : undefined;
96
+ const limit = Number(request.params.arguments?.limit ?? 8);
97
+ if (!query) {
98
+ return {
99
+ content: [{ type: "text", text: "query is required" }],
100
+ isError: true,
101
+ };
102
+ }
103
+ const results = await searchRelatedCommits(db, query, limit, activeFile);
104
+ return {
105
+ content: [{ type: "text", text: JSON.stringify(results, null, 2) }],
106
+ };
107
+ }
108
+ if (request.params.name === "explain_commit_match") {
109
+ const chunkId = String(request.params.arguments?.chunkId ?? "").trim();
110
+ if (!chunkId) {
111
+ return {
112
+ content: [{ type: "text", text: "chunkId is required" }],
113
+ isError: true,
114
+ };
115
+ }
116
+ const result = explainCommitMatch(db, chunkId);
117
+ return {
118
+ content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
119
+ };
120
+ }
121
+ if (request.params.name === "get_commit_diff") {
122
+ const sha = String(request.params.arguments?.sha ?? "").trim();
123
+ if (!sha) {
124
+ return {
125
+ content: [{ type: "text", text: "sha is required" }],
126
+ isError: true,
127
+ };
128
+ }
129
+ const output = runGitDiff(repoPath, sha);
130
+ return {
131
+ content: [{ type: "text", text: output }],
132
+ };
133
+ }
134
+ return {
135
+ content: [
136
+ { type: "text", text: `Unknown tool: ${request.params.name}` },
137
+ ],
138
+ isError: true,
139
+ };
140
+ }
141
+ finally {
142
+ db.close();
143
+ }
144
+ });
145
+ const transport = new StdioServerTransport();
146
+ await server.connect(transport);
147
+ }
148
+ if (import.meta.url === `file://${process.argv[1]}`) {
149
+ startMcpServer().catch((error) => {
150
+ const message = error instanceof Error ? error.message : "Unknown error";
151
+ process.stderr.write(`${message}\n`);
152
+ process.exitCode = 1;
153
+ });
154
+ }
@@ -0,0 +1,2 @@
1
+ export declare function embedText(text: string): Promise<number[]>;
2
+ export declare function getExpectedDimension(): number;
@@ -0,0 +1,60 @@
1
+ const FALLBACK_DIMENSION = 384;
2
+ function hashToken(token) {
3
+ let hash = 2166136261;
4
+ for (let i = 0; i < token.length; i += 1) {
5
+ hash ^= token.charCodeAt(i);
6
+ hash +=
7
+ (hash << 1) + (hash << 4) + (hash << 7) + (hash << 8) + (hash << 24);
8
+ }
9
+ return Math.abs(hash >>> 0);
10
+ }
11
+ function normalize(values) {
12
+ const magnitude = Math.sqrt(values.reduce((sum, value) => sum + value * value, 0));
13
+ if (!Number.isFinite(magnitude) || magnitude === 0) {
14
+ return values;
15
+ }
16
+ return values.map((value) => value / magnitude);
17
+ }
18
+ function fallbackEmbedding(text) {
19
+ const vector = new Array(FALLBACK_DIMENSION).fill(0);
20
+ const tokens = text
21
+ .toLowerCase()
22
+ .split(/[^a-z0-9_]+/)
23
+ .filter(Boolean);
24
+ for (const token of tokens) {
25
+ const index = hashToken(token) % FALLBACK_DIMENSION;
26
+ vector[index] += 1;
27
+ }
28
+ return normalize(vector);
29
+ }
30
+ export async function embedText(text) {
31
+ const model = process.env.OLLAMA_EMBED_MODEL;
32
+ if (!model) {
33
+ return fallbackEmbedding(text);
34
+ }
35
+ const baseUrl = process.env.OLLAMA_BASE_URL ?? "http://127.0.0.1:11434";
36
+ const response = await fetch(`${baseUrl}/api/embeddings`, {
37
+ method: "POST",
38
+ headers: {
39
+ "Content-Type": "application/json",
40
+ },
41
+ body: JSON.stringify({
42
+ model,
43
+ prompt: text,
44
+ }),
45
+ });
46
+ if (!response.ok) {
47
+ throw new Error(`Embedding request failed: ${response.status}`);
48
+ }
49
+ const payload = (await response.json());
50
+ if (!payload.embedding || !Array.isArray(payload.embedding)) {
51
+ throw new Error("Embedding response missing vector");
52
+ }
53
+ return normalize(payload.embedding);
54
+ }
55
+ export function getExpectedDimension() {
56
+ return process.env.OLLAMA_EMBED_MODEL
57
+ ? Number.parseInt(process.env.COMMIT_RAG_DIMENSION ?? "", 10) ||
58
+ FALLBACK_DIMENSION
59
+ : FALLBACK_DIMENSION;
60
+ }
@@ -0,0 +1,4 @@
1
+ import type { Database } from "better-sqlite3";
2
+ import type { SearchResult } from "../types.js";
3
+ export declare function searchRelatedCommits(db: Database, query: string, limit: number, activeFile?: string): Promise<SearchResult[]>;
4
+ export declare function explainCommitMatch(db: Database, chunkId: string): SearchResult | null;
@@ -0,0 +1,115 @@
1
+ import { embedText } from "./embeddings.js";
2
+ function scoreWithBoost(base, row, activeFile) {
3
+ let score = base;
4
+ if (activeFile && row.file_path === activeFile) {
5
+ score += 0.2;
6
+ }
7
+ if (activeFile) {
8
+ const parent = activeFile.split("/").slice(0, -1).join("/");
9
+ if (parent && row.file_path.startsWith(parent)) {
10
+ score += 0.05;
11
+ }
12
+ }
13
+ return score;
14
+ }
15
+ function createPreview(hunkText) {
16
+ return hunkText.split("\n").slice(0, 6).join("\n");
17
+ }
18
+ export async function searchRelatedCommits(db, query, limit, activeFile) {
19
+ const embedding = await embedText(query);
20
+ const embeddingJson = JSON.stringify(embedding);
21
+ try {
22
+ const rows = db
23
+ .prepare(`
24
+ SELECT
25
+ v.distance AS distance,
26
+ c.chunk_id AS chunk_id,
27
+ c.sha AS sha,
28
+ c.file_path AS file_path,
29
+ cm.subject AS subject,
30
+ cm.date AS date,
31
+ cm.author AS author,
32
+ c.hunk_text AS hunk_text
33
+ FROM chunk_vectors v
34
+ JOIN chunk_vector_map m ON m.vec_rowid = v.rowid
35
+ JOIN commit_chunks c ON c.chunk_id = m.chunk_id
36
+ JOIN commits cm ON cm.sha = c.sha
37
+ WHERE v.embedding MATCH ? AND k = ?
38
+ `)
39
+ .all(embeddingJson, limit);
40
+ return rows.map((row) => {
41
+ const base = 1 / (1 + Math.max(0, row.distance));
42
+ const score = scoreWithBoost(base, row, activeFile);
43
+ return {
44
+ chunkId: row.chunk_id,
45
+ sha: row.sha,
46
+ filePath: row.file_path,
47
+ subject: row.subject,
48
+ score,
49
+ date: row.date,
50
+ author: row.author,
51
+ preview: createPreview(row.hunk_text),
52
+ };
53
+ });
54
+ }
55
+ catch {
56
+ const rows = db
57
+ .prepare(`
58
+ SELECT
59
+ c.chunk_id AS chunk_id,
60
+ c.sha AS sha,
61
+ c.file_path AS file_path,
62
+ cm.subject AS subject,
63
+ cm.date AS date,
64
+ cm.author AS author,
65
+ c.hunk_text AS hunk_text
66
+ FROM commit_chunks c
67
+ JOIN commits cm ON cm.sha = c.sha
68
+ WHERE c.indexed_text LIKE ?
69
+ ORDER BY cm.date DESC
70
+ LIMIT ?
71
+ `)
72
+ .all(`%${query}%`, limit);
73
+ return rows.map((row, idx) => ({
74
+ chunkId: row.chunk_id,
75
+ sha: row.sha,
76
+ filePath: row.file_path,
77
+ subject: row.subject,
78
+ score: scoreWithBoost(Math.max(0.01, 1 - idx / (limit + 1)), row, activeFile),
79
+ date: row.date,
80
+ author: row.author,
81
+ preview: createPreview(row.hunk_text),
82
+ }));
83
+ }
84
+ }
85
+ export function explainCommitMatch(db, chunkId) {
86
+ const row = db
87
+ .prepare(`
88
+ SELECT
89
+ c.chunk_id AS chunk_id,
90
+ c.sha AS sha,
91
+ c.file_path AS file_path,
92
+ cm.subject AS subject,
93
+ cm.date AS date,
94
+ cm.author AS author,
95
+ c.hunk_text AS hunk_text
96
+ FROM commit_chunks c
97
+ JOIN commits cm ON cm.sha = c.sha
98
+ WHERE c.chunk_id = ?
99
+ LIMIT 1
100
+ `)
101
+ .get(chunkId);
102
+ if (!row) {
103
+ return null;
104
+ }
105
+ return {
106
+ chunkId: row.chunk_id,
107
+ sha: row.sha,
108
+ filePath: row.file_path,
109
+ subject: row.subject,
110
+ score: 1,
111
+ date: row.date,
112
+ author: row.author,
113
+ preview: createPreview(row.hunk_text),
114
+ };
115
+ }
@@ -0,0 +1,26 @@
1
+ export type CommitChunk = {
2
+ chunkId: string;
3
+ sha: string;
4
+ author: string;
5
+ date: string;
6
+ subject: string;
7
+ body: string;
8
+ filePath: string;
9
+ hunkText: string;
10
+ indexedText: string;
11
+ };
12
+ export type SearchResult = {
13
+ chunkId: string;
14
+ sha: string;
15
+ filePath: string;
16
+ subject: string;
17
+ score: number;
18
+ date: string;
19
+ author: string;
20
+ preview: string;
21
+ };
22
+ export type IndexSummary = {
23
+ indexedCommits: number;
24
+ indexedChunks: number;
25
+ skippedChunks: number;
26
+ };
package/dist/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
package/package.json ADDED
@@ -0,0 +1,55 @@
1
+ {
2
+ "name": "@jussmor/commit-memory-mcp",
3
+ "version": "0.3.1",
4
+ "mcpName": "io.github.jussmor/commit-memory",
5
+ "description": "Commit-aware RAG with sqlite-vec and MCP tools for local agent workflows",
6
+ "license": "MIT",
7
+ "repository": {
8
+ "type": "git",
9
+ "url": "https://github.com/JussMor/commit-memory-mcp.git"
10
+ },
11
+ "homepage": "https://github.com/JussMor/commit-memory-mcp",
12
+ "publishConfig": {
13
+ "access": "public"
14
+ },
15
+ "type": "module",
16
+ "main": "./dist/index.js",
17
+ "types": "./dist/index.d.ts",
18
+ "exports": {
19
+ ".": {
20
+ "types": "./dist/index.d.ts",
21
+ "import": "./dist/index.js"
22
+ },
23
+ "./mcp": {
24
+ "types": "./dist/mcp/server.d.ts",
25
+ "import": "./dist/mcp/server.js"
26
+ }
27
+ },
28
+ "bin": {
29
+ "commit-memory-mcp": "./dist/mcp/server.js",
30
+ "commit-memory-index": "./dist/cli/index.js"
31
+ },
32
+ "files": [
33
+ "dist",
34
+ "server.json"
35
+ ],
36
+ "engines": {
37
+ "node": ">=20"
38
+ },
39
+ "scripts": {
40
+ "build": "tsc -p tsconfig.json",
41
+ "clean": "rm -rf dist",
42
+ "typecheck": "tsc --noEmit -p tsconfig.json",
43
+ "prepublishOnly": "npm run clean && npm run build"
44
+ },
45
+ "dependencies": {
46
+ "@modelcontextprotocol/sdk": "^1.11.0",
47
+ "better-sqlite3": "^11.10.0",
48
+ "sqlite-vec": "^0.1.6"
49
+ },
50
+ "devDependencies": {
51
+ "@types/better-sqlite3": "^7.6.13",
52
+ "@types/node": "^22.15.3",
53
+ "typescript": "^5.8.3"
54
+ }
55
+ }
package/server.json ADDED
@@ -0,0 +1,58 @@
1
+ {
2
+ "$schema": "https://static.modelcontextprotocol.io/schemas/2025-12-11/server.schema.json",
3
+ "name": "io.github.jussmor/commit-memory",
4
+ "description": "Local commit-aware RAG MCP server for semantic git history search using SQLite vectors.",
5
+ "repository": {
6
+ "url": "https://github.com/JussMor/commit-memory-mcp",
7
+ "source": "github"
8
+ },
9
+ "version": "0.3.1",
10
+ "packages": [
11
+ {
12
+ "registryType": "npm",
13
+ "identifier": "@jussmor/commit-memory-mcp",
14
+ "version": "0.3.1",
15
+ "transport": {
16
+ "type": "stdio"
17
+ },
18
+ "runtimeHint": "npx",
19
+ "environmentVariables": [
20
+ {
21
+ "name": "COMMIT_RAG_REPO",
22
+ "description": "Absolute path to the target git repository.",
23
+ "format": "string",
24
+ "isRequired": true,
25
+ "isSecret": false
26
+ },
27
+ {
28
+ "name": "COMMIT_RAG_DB",
29
+ "description": "Absolute path to the SQLite database file.",
30
+ "format": "string",
31
+ "isRequired": false,
32
+ "isSecret": false
33
+ },
34
+ {
35
+ "name": "COMMIT_RAG_LIMIT",
36
+ "description": "Maximum number of commits to index per run.",
37
+ "format": "string",
38
+ "isRequired": false,
39
+ "isSecret": false
40
+ },
41
+ {
42
+ "name": "OLLAMA_BASE_URL",
43
+ "description": "Ollama base URL for embedding requests.",
44
+ "format": "string",
45
+ "isRequired": false,
46
+ "isSecret": false
47
+ },
48
+ {
49
+ "name": "OLLAMA_EMBED_MODEL",
50
+ "description": "Embedding model name to use with Ollama.",
51
+ "format": "string",
52
+ "isRequired": false,
53
+ "isSecret": false
54
+ }
55
+ ]
56
+ }
57
+ ]
58
+ }