@hir4ta/mneme 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +29 -0
- package/.mcp.json +18 -0
- package/README.ja.md +400 -0
- package/README.md +410 -0
- package/bin/mneme.js +203 -0
- package/dist/lib/db.js +340 -0
- package/dist/lib/fuzzy-search.js +214 -0
- package/dist/lib/github.js +121 -0
- package/dist/lib/similarity.js +193 -0
- package/dist/lib/utils.js +62 -0
- package/dist/public/apple-touch-icon.png +0 -0
- package/dist/public/assets/index-BgqCALAg.css +1 -0
- package/dist/public/assets/index-EMvn4VEa.js +330 -0
- package/dist/public/assets/react-force-graph-2d-DWoBaKmT.js +46 -0
- package/dist/public/favicon-128-max.png +0 -0
- package/dist/public/favicon-256-max.png +0 -0
- package/dist/public/favicon-32-max.png +0 -0
- package/dist/public/favicon-512-max.png +0 -0
- package/dist/public/favicon-64-max.png +0 -0
- package/dist/public/index.html +15 -0
- package/dist/server.js +4791 -0
- package/dist/servers/db-server.js +30558 -0
- package/dist/servers/search-server.js +30366 -0
- package/hooks/default-tags.json +1055 -0
- package/hooks/hooks.json +61 -0
- package/hooks/post-tool-use.sh +96 -0
- package/hooks/pre-compact.sh +187 -0
- package/hooks/session-end.sh +567 -0
- package/hooks/session-start.sh +380 -0
- package/hooks/user-prompt-submit.sh +253 -0
- package/package.json +77 -0
- package/servers/db-server.ts +993 -0
- package/servers/search-server.ts +675 -0
- package/skills/AGENTS.override.md +5 -0
- package/skills/harvest/skill.md +295 -0
- package/skills/init-mneme/skill.md +101 -0
- package/skills/plan/skill.md +422 -0
- package/skills/report/skill.md +74 -0
- package/skills/resume/skill.md +278 -0
- package/skills/review/skill.md +419 -0
- package/skills/save/skill.md +482 -0
- package/skills/search/skill.md +175 -0
- package/skills/using-mneme/skill.md +185 -0
package/dist/lib/db.js
ADDED
|
@@ -0,0 +1,340 @@
|
|
|
1
|
+
// lib/db.ts
|
|
2
|
+
import { execSync } from "node:child_process";
|
|
3
|
+
import { existsSync, mkdirSync, readFileSync } from "node:fs";
|
|
4
|
+
import { dirname, join } from "node:path";
|
|
5
|
+
import { fileURLToPath } from "node:url";
|
|
6
|
+
var originalEmit = process.emit;
|
|
7
|
+
process.emit = (event, ...args) => {
|
|
8
|
+
if (event === "warning" && typeof args[0] === "object" && args[0] !== null && "name" in args[0] && args[0].name === "ExperimentalWarning" && "message" in args[0] && typeof args[0].message === "string" && args[0].message.includes("SQLite")) {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
return originalEmit.apply(process, [event, ...args]);
|
|
12
|
+
};
|
|
13
|
+
var { DatabaseSync } = await import("node:sqlite");
|
|
14
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
15
|
+
var __dirname = dirname(__filename);
|
|
16
|
+
function getCurrentUser() {
|
|
17
|
+
try {
|
|
18
|
+
return execSync("git config user.name", { encoding: "utf-8" }).trim();
|
|
19
|
+
} catch {
|
|
20
|
+
try {
|
|
21
|
+
return execSync("whoami", { encoding: "utf-8" }).trim();
|
|
22
|
+
} catch {
|
|
23
|
+
return "unknown";
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
function getLocalDbPath(projectPath) {
|
|
28
|
+
return join(projectPath, ".mneme", "local.db");
|
|
29
|
+
}
|
|
30
|
+
function configurePragmas(db) {
|
|
31
|
+
db.exec("PRAGMA journal_mode = WAL");
|
|
32
|
+
db.exec("PRAGMA busy_timeout = 5000");
|
|
33
|
+
db.exec("PRAGMA synchronous = NORMAL");
|
|
34
|
+
}
|
|
35
|
+
function initLocalDatabase(projectPath) {
|
|
36
|
+
const mnemeDir = join(projectPath, ".mneme");
|
|
37
|
+
if (!existsSync(mnemeDir)) {
|
|
38
|
+
mkdirSync(mnemeDir, { recursive: true });
|
|
39
|
+
}
|
|
40
|
+
const dbPath = getLocalDbPath(projectPath);
|
|
41
|
+
const db = new DatabaseSync(dbPath);
|
|
42
|
+
configurePragmas(db);
|
|
43
|
+
const schemaPath = join(__dirname, "schema.sql");
|
|
44
|
+
if (existsSync(schemaPath)) {
|
|
45
|
+
const schema = readFileSync(schemaPath, "utf-8");
|
|
46
|
+
db.exec(schema);
|
|
47
|
+
}
|
|
48
|
+
return db;
|
|
49
|
+
}
|
|
50
|
+
function openLocalDatabase(projectPath) {
|
|
51
|
+
const dbPath = getLocalDbPath(projectPath);
|
|
52
|
+
if (!existsSync(dbPath)) {
|
|
53
|
+
return null;
|
|
54
|
+
}
|
|
55
|
+
const db = new DatabaseSync(dbPath);
|
|
56
|
+
configurePragmas(db);
|
|
57
|
+
return db;
|
|
58
|
+
}
|
|
59
|
+
function getRepositoryInfo(projectPath) {
|
|
60
|
+
const result = {
|
|
61
|
+
repository: null,
|
|
62
|
+
repository_url: null,
|
|
63
|
+
repository_root: null
|
|
64
|
+
};
|
|
65
|
+
try {
|
|
66
|
+
execSync("git rev-parse --git-dir", {
|
|
67
|
+
cwd: projectPath,
|
|
68
|
+
encoding: "utf-8",
|
|
69
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
70
|
+
});
|
|
71
|
+
result.repository_root = execSync("git rev-parse --show-toplevel", {
|
|
72
|
+
cwd: projectPath,
|
|
73
|
+
encoding: "utf-8"
|
|
74
|
+
}).trim();
|
|
75
|
+
try {
|
|
76
|
+
result.repository_url = execSync("git remote get-url origin", {
|
|
77
|
+
cwd: projectPath,
|
|
78
|
+
encoding: "utf-8"
|
|
79
|
+
}).trim();
|
|
80
|
+
const match = result.repository_url.match(/[:/]([^/]+\/[^/]+?)(\.git)?$/);
|
|
81
|
+
if (match) {
|
|
82
|
+
result.repository = match[1].replace(/\.git$/, "");
|
|
83
|
+
}
|
|
84
|
+
} catch {
|
|
85
|
+
}
|
|
86
|
+
} catch {
|
|
87
|
+
}
|
|
88
|
+
return result;
|
|
89
|
+
}
|
|
90
|
+
function insertInteractions(db, interactions) {
|
|
91
|
+
const insert = db.prepare(`
|
|
92
|
+
INSERT INTO interactions (session_id, project_path, repository, repository_url, repository_root, owner, role, content, thinking, tool_calls, timestamp, is_compact_summary)
|
|
93
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
94
|
+
`);
|
|
95
|
+
db.exec("BEGIN TRANSACTION");
|
|
96
|
+
try {
|
|
97
|
+
for (const item of interactions) {
|
|
98
|
+
insert.run(
|
|
99
|
+
item.session_id,
|
|
100
|
+
item.project_path,
|
|
101
|
+
item.repository || null,
|
|
102
|
+
item.repository_url || null,
|
|
103
|
+
item.repository_root || null,
|
|
104
|
+
item.owner,
|
|
105
|
+
item.role,
|
|
106
|
+
item.content,
|
|
107
|
+
item.thinking || null,
|
|
108
|
+
item.tool_calls || null,
|
|
109
|
+
item.timestamp,
|
|
110
|
+
item.is_compact_summary || 0
|
|
111
|
+
);
|
|
112
|
+
}
|
|
113
|
+
db.exec("COMMIT");
|
|
114
|
+
} catch (error) {
|
|
115
|
+
db.exec("ROLLBACK");
|
|
116
|
+
throw error;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
function getInteractions(db, sessionId) {
|
|
120
|
+
const stmt = db.prepare(`
|
|
121
|
+
SELECT * FROM interactions
|
|
122
|
+
WHERE session_id = ?
|
|
123
|
+
ORDER BY timestamp ASC
|
|
124
|
+
`);
|
|
125
|
+
return stmt.all(sessionId);
|
|
126
|
+
}
|
|
127
|
+
function getInteractionsByOwner(db, sessionId, owner) {
|
|
128
|
+
const stmt = db.prepare(`
|
|
129
|
+
SELECT * FROM interactions
|
|
130
|
+
WHERE session_id = ? AND owner = ?
|
|
131
|
+
ORDER BY timestamp ASC
|
|
132
|
+
`);
|
|
133
|
+
return stmt.all(sessionId, owner);
|
|
134
|
+
}
|
|
135
|
+
function getInteractionsBySessionIds(db, sessionIds) {
|
|
136
|
+
if (sessionIds.length === 0) {
|
|
137
|
+
return [];
|
|
138
|
+
}
|
|
139
|
+
const placeholders = sessionIds.map(() => "?").join(", ");
|
|
140
|
+
const stmt = db.prepare(`
|
|
141
|
+
SELECT * FROM interactions
|
|
142
|
+
WHERE session_id IN (${placeholders})
|
|
143
|
+
ORDER BY timestamp ASC, id ASC
|
|
144
|
+
`);
|
|
145
|
+
return stmt.all(...sessionIds);
|
|
146
|
+
}
|
|
147
|
+
function getInteractionsBySessionIdsAndOwner(db, sessionIds, owner) {
|
|
148
|
+
if (sessionIds.length === 0) {
|
|
149
|
+
return [];
|
|
150
|
+
}
|
|
151
|
+
const placeholders = sessionIds.map(() => "?").join(", ");
|
|
152
|
+
const stmt = db.prepare(`
|
|
153
|
+
SELECT * FROM interactions
|
|
154
|
+
WHERE session_id IN (${placeholders}) AND owner = ?
|
|
155
|
+
ORDER BY timestamp ASC, id ASC
|
|
156
|
+
`);
|
|
157
|
+
return stmt.all(...sessionIds, owner);
|
|
158
|
+
}
|
|
159
|
+
function hasInteractionsForSessionIds(db, sessionIds, owner) {
|
|
160
|
+
if (sessionIds.length === 0) {
|
|
161
|
+
return false;
|
|
162
|
+
}
|
|
163
|
+
const placeholders = sessionIds.map(() => "?").join(", ");
|
|
164
|
+
const stmt = db.prepare(`
|
|
165
|
+
SELECT COUNT(*) as count FROM interactions
|
|
166
|
+
WHERE session_id IN (${placeholders}) AND owner = ?
|
|
167
|
+
`);
|
|
168
|
+
const result = stmt.get(...sessionIds, owner);
|
|
169
|
+
return result.count > 0;
|
|
170
|
+
}
|
|
171
|
+
function hasInteractions(db, sessionId, owner) {
|
|
172
|
+
const stmt = db.prepare(`
|
|
173
|
+
SELECT COUNT(*) as count FROM interactions
|
|
174
|
+
WHERE session_id = ? AND owner = ?
|
|
175
|
+
`);
|
|
176
|
+
const result = stmt.get(sessionId, owner);
|
|
177
|
+
return result.count > 0;
|
|
178
|
+
}
|
|
179
|
+
function insertPreCompactBackup(db, backup) {
|
|
180
|
+
const stmt = db.prepare(`
|
|
181
|
+
INSERT INTO pre_compact_backups (session_id, project_path, owner, interactions)
|
|
182
|
+
VALUES (?, ?, ?, ?)
|
|
183
|
+
`);
|
|
184
|
+
stmt.run(
|
|
185
|
+
backup.session_id,
|
|
186
|
+
backup.project_path,
|
|
187
|
+
backup.owner,
|
|
188
|
+
backup.interactions
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
function getLatestBackup(db, sessionId) {
|
|
192
|
+
const stmt = db.prepare(`
|
|
193
|
+
SELECT * FROM pre_compact_backups
|
|
194
|
+
WHERE session_id = ?
|
|
195
|
+
ORDER BY created_at DESC
|
|
196
|
+
LIMIT 1
|
|
197
|
+
`);
|
|
198
|
+
return stmt.get(sessionId) || null;
|
|
199
|
+
}
|
|
200
|
+
function getAllBackups(db, sessionId) {
|
|
201
|
+
const stmt = db.prepare(`
|
|
202
|
+
SELECT * FROM pre_compact_backups
|
|
203
|
+
WHERE session_id = ?
|
|
204
|
+
ORDER BY created_at ASC
|
|
205
|
+
`);
|
|
206
|
+
return stmt.all(sessionId);
|
|
207
|
+
}
|
|
208
|
+
function searchInteractions(db, query, limit = 10) {
|
|
209
|
+
const stmt = db.prepare(`
|
|
210
|
+
SELECT i.session_id, i.content, i.thinking
|
|
211
|
+
FROM interactions_fts fts
|
|
212
|
+
JOIN interactions i ON fts.rowid = i.id
|
|
213
|
+
WHERE interactions_fts MATCH ?
|
|
214
|
+
LIMIT ?
|
|
215
|
+
`);
|
|
216
|
+
return stmt.all(query, limit);
|
|
217
|
+
}
|
|
218
|
+
function deleteInteractions(db, sessionId) {
|
|
219
|
+
const stmt = db.prepare("DELETE FROM interactions WHERE session_id = ?");
|
|
220
|
+
stmt.run(sessionId);
|
|
221
|
+
}
|
|
222
|
+
function deleteBackups(db, sessionId) {
|
|
223
|
+
const stmt = db.prepare(
|
|
224
|
+
"DELETE FROM pre_compact_backups WHERE session_id = ?"
|
|
225
|
+
);
|
|
226
|
+
stmt.run(sessionId);
|
|
227
|
+
}
|
|
228
|
+
function getDbStats(db) {
|
|
229
|
+
const interactionsCount = db.prepare("SELECT COUNT(*) as count FROM interactions").get();
|
|
230
|
+
const backupsCount = db.prepare("SELECT COUNT(*) as count FROM pre_compact_backups").get();
|
|
231
|
+
return {
|
|
232
|
+
interactions: interactionsCount.count,
|
|
233
|
+
backups: backupsCount.count
|
|
234
|
+
};
|
|
235
|
+
}
|
|
236
|
+
function getInteractionsByProject(db, projectPath) {
|
|
237
|
+
const stmt = db.prepare(`
|
|
238
|
+
SELECT * FROM interactions
|
|
239
|
+
WHERE project_path = ?
|
|
240
|
+
ORDER BY timestamp ASC
|
|
241
|
+
`);
|
|
242
|
+
return stmt.all(projectPath);
|
|
243
|
+
}
|
|
244
|
+
function getInteractionsByRepository(db, repository) {
|
|
245
|
+
const stmt = db.prepare(`
|
|
246
|
+
SELECT * FROM interactions
|
|
247
|
+
WHERE repository = ?
|
|
248
|
+
ORDER BY timestamp ASC
|
|
249
|
+
`);
|
|
250
|
+
return stmt.all(repository);
|
|
251
|
+
}
|
|
252
|
+
function getUniqueProjects(db) {
|
|
253
|
+
const stmt = db.prepare(`
|
|
254
|
+
SELECT DISTINCT project_path FROM interactions
|
|
255
|
+
ORDER BY project_path
|
|
256
|
+
`);
|
|
257
|
+
const rows = stmt.all();
|
|
258
|
+
return rows.map((r) => r.project_path);
|
|
259
|
+
}
|
|
260
|
+
function getUniqueRepositories(db) {
|
|
261
|
+
const stmt = db.prepare(`
|
|
262
|
+
SELECT DISTINCT repository FROM interactions
|
|
263
|
+
WHERE repository IS NOT NULL
|
|
264
|
+
ORDER BY repository
|
|
265
|
+
`);
|
|
266
|
+
const rows = stmt.all();
|
|
267
|
+
return rows.map((r) => r.repository);
|
|
268
|
+
}
|
|
269
|
+
function deleteInteractionsByProject(db, projectPath) {
|
|
270
|
+
const stmt = db.prepare("DELETE FROM interactions WHERE project_path = ?");
|
|
271
|
+
const result = stmt.run(projectPath);
|
|
272
|
+
return Number(result.changes);
|
|
273
|
+
}
|
|
274
|
+
function deleteInteractionsBefore(db, beforeDate) {
|
|
275
|
+
const stmt = db.prepare("DELETE FROM interactions WHERE timestamp < ?");
|
|
276
|
+
const result = stmt.run(beforeDate);
|
|
277
|
+
return Number(result.changes);
|
|
278
|
+
}
|
|
279
|
+
function deleteBackupsByProject(db, projectPath) {
|
|
280
|
+
const stmt = db.prepare(
|
|
281
|
+
"DELETE FROM pre_compact_backups WHERE project_path = ?"
|
|
282
|
+
);
|
|
283
|
+
const result = stmt.run(projectPath);
|
|
284
|
+
return Number(result.changes);
|
|
285
|
+
}
|
|
286
|
+
function countInteractions(db, filter) {
|
|
287
|
+
const conditions = [];
|
|
288
|
+
const params = [];
|
|
289
|
+
if (filter.sessionId) {
|
|
290
|
+
conditions.push("session_id = ?");
|
|
291
|
+
params.push(filter.sessionId);
|
|
292
|
+
}
|
|
293
|
+
if (filter.projectPath) {
|
|
294
|
+
conditions.push("project_path = ?");
|
|
295
|
+
params.push(filter.projectPath);
|
|
296
|
+
}
|
|
297
|
+
if (filter.repository) {
|
|
298
|
+
conditions.push("repository = ?");
|
|
299
|
+
params.push(filter.repository);
|
|
300
|
+
}
|
|
301
|
+
if (filter.before) {
|
|
302
|
+
conditions.push("timestamp < ?");
|
|
303
|
+
params.push(filter.before);
|
|
304
|
+
}
|
|
305
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
306
|
+
const stmt = db.prepare(
|
|
307
|
+
`SELECT COUNT(*) as count FROM interactions ${whereClause}`
|
|
308
|
+
);
|
|
309
|
+
const result = stmt.get(...params);
|
|
310
|
+
return result.count;
|
|
311
|
+
}
|
|
312
|
+
export {
|
|
313
|
+
countInteractions,
|
|
314
|
+
deleteBackups,
|
|
315
|
+
deleteBackupsByProject,
|
|
316
|
+
deleteInteractions,
|
|
317
|
+
deleteInteractionsBefore,
|
|
318
|
+
deleteInteractionsByProject,
|
|
319
|
+
getAllBackups,
|
|
320
|
+
getCurrentUser,
|
|
321
|
+
getDbStats,
|
|
322
|
+
getInteractions,
|
|
323
|
+
getInteractionsByOwner,
|
|
324
|
+
getInteractionsByProject,
|
|
325
|
+
getInteractionsByRepository,
|
|
326
|
+
getInteractionsBySessionIds,
|
|
327
|
+
getInteractionsBySessionIdsAndOwner,
|
|
328
|
+
getLatestBackup,
|
|
329
|
+
getLocalDbPath,
|
|
330
|
+
getRepositoryInfo,
|
|
331
|
+
getUniqueProjects,
|
|
332
|
+
getUniqueRepositories,
|
|
333
|
+
hasInteractions,
|
|
334
|
+
hasInteractionsForSessionIds,
|
|
335
|
+
initLocalDatabase,
|
|
336
|
+
insertInteractions,
|
|
337
|
+
insertPreCompactBackup,
|
|
338
|
+
openLocalDatabase,
|
|
339
|
+
searchInteractions
|
|
340
|
+
};
|
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
// lib/fuzzy-search.ts
|
|
2
|
+
import * as fs2 from "node:fs";
|
|
3
|
+
import * as path2 from "node:path";
|
|
4
|
+
|
|
5
|
+
// lib/utils.ts
|
|
6
|
+
import * as fs from "node:fs";
|
|
7
|
+
import * as path from "node:path";
|
|
8
|
+
function safeReadJson(filePath, fallback) {
|
|
9
|
+
try {
|
|
10
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
11
|
+
return JSON.parse(content);
|
|
12
|
+
} catch {
|
|
13
|
+
return fallback;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
function findJsonFiles(dir) {
|
|
17
|
+
const results = [];
|
|
18
|
+
if (!fs.existsSync(dir)) return results;
|
|
19
|
+
const items = fs.readdirSync(dir, { withFileTypes: true });
|
|
20
|
+
for (const item of items) {
|
|
21
|
+
const fullPath = path.join(dir, item.name);
|
|
22
|
+
if (item.isDirectory()) {
|
|
23
|
+
results.push(...findJsonFiles(fullPath));
|
|
24
|
+
} else if (item.name.endsWith(".json")) {
|
|
25
|
+
results.push(fullPath);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return results;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// lib/fuzzy-search.ts
|
|
32
|
+
function levenshtein(a, b) {
|
|
33
|
+
const matrix = [];
|
|
34
|
+
for (let i = 0; i <= a.length; i++) {
|
|
35
|
+
matrix[i] = [i];
|
|
36
|
+
}
|
|
37
|
+
for (let j = 0; j <= b.length; j++) {
|
|
38
|
+
matrix[0][j] = j;
|
|
39
|
+
}
|
|
40
|
+
for (let i = 1; i <= a.length; i++) {
|
|
41
|
+
for (let j = 1; j <= b.length; j++) {
|
|
42
|
+
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
|
43
|
+
matrix[i][j] = Math.min(
|
|
44
|
+
matrix[i - 1][j] + 1,
|
|
45
|
+
// deletion
|
|
46
|
+
matrix[i][j - 1] + 1,
|
|
47
|
+
// insertion
|
|
48
|
+
matrix[i - 1][j - 1] + cost
|
|
49
|
+
// substitution
|
|
50
|
+
);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
return matrix[a.length][b.length];
|
|
54
|
+
}
|
|
55
|
+
function expandAliases(query, tags) {
|
|
56
|
+
const results = /* @__PURE__ */ new Set([query]);
|
|
57
|
+
const lowerQuery = query.toLowerCase();
|
|
58
|
+
for (const tag of tags) {
|
|
59
|
+
const allTerms = [tag.id, tag.label, ...tag.aliases].map(
|
|
60
|
+
(t) => t.toLowerCase()
|
|
61
|
+
);
|
|
62
|
+
if (allTerms.includes(lowerQuery)) {
|
|
63
|
+
results.add(tag.id);
|
|
64
|
+
results.add(tag.label);
|
|
65
|
+
for (const alias of tag.aliases) {
|
|
66
|
+
results.add(alias);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return Array.from(results);
|
|
71
|
+
}
|
|
72
|
+
function calculateSimilarity(text, query) {
|
|
73
|
+
const lowerText = text.toLowerCase();
|
|
74
|
+
const lowerQuery = query.toLowerCase();
|
|
75
|
+
if (lowerText === lowerQuery) return 10;
|
|
76
|
+
if (lowerText.includes(lowerQuery)) return 5;
|
|
77
|
+
if (lowerQuery.includes(lowerText)) return 3;
|
|
78
|
+
const distance = levenshtein(lowerText, lowerQuery);
|
|
79
|
+
if (distance <= 2) return 2;
|
|
80
|
+
if (distance <= 3) return 1;
|
|
81
|
+
return 0;
|
|
82
|
+
}
|
|
83
|
+
async function search(options) {
|
|
84
|
+
const {
|
|
85
|
+
query,
|
|
86
|
+
mnemeDir,
|
|
87
|
+
targets = ["sessions", "decisions"],
|
|
88
|
+
limit = 20,
|
|
89
|
+
timeout = 1e4
|
|
90
|
+
} = options;
|
|
91
|
+
const startTime = Date.now();
|
|
92
|
+
const results = [];
|
|
93
|
+
const tagsPath = path2.join(mnemeDir, "tags.json");
|
|
94
|
+
const tagsData = safeReadJson(tagsPath, { tags: [] });
|
|
95
|
+
const expandedQueries = expandAliases(query, tagsData.tags);
|
|
96
|
+
if (targets.includes("sessions")) {
|
|
97
|
+
const sessionsDir = path2.join(mnemeDir, "sessions");
|
|
98
|
+
if (fs2.existsSync(sessionsDir)) {
|
|
99
|
+
const files = findJsonFiles(sessionsDir);
|
|
100
|
+
for (const file of files) {
|
|
101
|
+
if (Date.now() - startTime > timeout) break;
|
|
102
|
+
const session = safeReadJson(file, {});
|
|
103
|
+
const score = scoreDocument(session, expandedQueries, [
|
|
104
|
+
"title",
|
|
105
|
+
"goal",
|
|
106
|
+
"tags"
|
|
107
|
+
]);
|
|
108
|
+
if (score > 0) {
|
|
109
|
+
results.push({
|
|
110
|
+
type: "session",
|
|
111
|
+
id: session.id || path2.basename(file, ".json"),
|
|
112
|
+
score,
|
|
113
|
+
title: session.title || "Untitled",
|
|
114
|
+
highlights: []
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
if (targets.includes("decisions")) {
|
|
121
|
+
const decisionsDir = path2.join(mnemeDir, "decisions");
|
|
122
|
+
if (fs2.existsSync(decisionsDir)) {
|
|
123
|
+
const files = findJsonFiles(decisionsDir);
|
|
124
|
+
for (const file of files) {
|
|
125
|
+
if (Date.now() - startTime > timeout) break;
|
|
126
|
+
const decision = safeReadJson(file, {});
|
|
127
|
+
const score = scoreDocument(decision, expandedQueries, [
|
|
128
|
+
"title",
|
|
129
|
+
"decision",
|
|
130
|
+
"tags"
|
|
131
|
+
]);
|
|
132
|
+
if (score > 0) {
|
|
133
|
+
results.push({
|
|
134
|
+
type: "decision",
|
|
135
|
+
id: decision.id || path2.basename(file, ".json"),
|
|
136
|
+
score,
|
|
137
|
+
title: decision.title || "Untitled",
|
|
138
|
+
highlights: []
|
|
139
|
+
});
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
if (targets.includes("patterns")) {
|
|
145
|
+
const patternsDir = path2.join(mnemeDir, "patterns");
|
|
146
|
+
if (fs2.existsSync(patternsDir)) {
|
|
147
|
+
const files = findJsonFiles(patternsDir);
|
|
148
|
+
for (const file of files) {
|
|
149
|
+
if (Date.now() - startTime > timeout) break;
|
|
150
|
+
const pattern = safeReadJson(file, {});
|
|
151
|
+
const patterns = pattern.patterns || [];
|
|
152
|
+
for (const p of patterns) {
|
|
153
|
+
const score = scoreDocument(p, expandedQueries, [
|
|
154
|
+
"description",
|
|
155
|
+
"errorPattern",
|
|
156
|
+
"tags"
|
|
157
|
+
]);
|
|
158
|
+
if (score > 0) {
|
|
159
|
+
results.push({
|
|
160
|
+
type: "pattern",
|
|
161
|
+
id: `${path2.basename(file, ".json")}-${p.type || "unknown"}`,
|
|
162
|
+
score,
|
|
163
|
+
title: p.description || "Untitled pattern",
|
|
164
|
+
highlights: []
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return results.sort((a, b) => b.score - a.score).slice(0, limit);
|
|
172
|
+
}
|
|
173
|
+
function scoreDocument(doc, queries, fields) {
|
|
174
|
+
let totalScore = 0;
|
|
175
|
+
for (const field of fields) {
|
|
176
|
+
const value = doc[field];
|
|
177
|
+
if (typeof value === "string") {
|
|
178
|
+
for (const q of queries) {
|
|
179
|
+
totalScore += calculateSimilarity(value, q);
|
|
180
|
+
}
|
|
181
|
+
} else if (Array.isArray(value)) {
|
|
182
|
+
for (const item of value) {
|
|
183
|
+
if (typeof item === "string") {
|
|
184
|
+
for (const q of queries) {
|
|
185
|
+
totalScore += calculateSimilarity(item, q);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
return totalScore;
|
|
192
|
+
}
|
|
193
|
+
var isMain = process.argv[1]?.endsWith("fuzzy-search.js") || process.argv[1]?.endsWith("fuzzy-search.ts");
|
|
194
|
+
if (isMain && process.argv.length > 2) {
|
|
195
|
+
const args = process.argv.slice(2);
|
|
196
|
+
const queryIndex = args.indexOf("--query");
|
|
197
|
+
const query = queryIndex !== -1 ? args[queryIndex + 1] : "";
|
|
198
|
+
const mnemeDir = `${process.cwd()}/.mneme`;
|
|
199
|
+
if (!query) {
|
|
200
|
+
console.error(JSON.stringify({ success: false, error: "Missing --query" }));
|
|
201
|
+
process.exit(0);
|
|
202
|
+
}
|
|
203
|
+
search({ query, mnemeDir }).then((results) => {
|
|
204
|
+
console.log(JSON.stringify({ success: true, results }));
|
|
205
|
+
}).catch((error) => {
|
|
206
|
+
console.error(JSON.stringify({ success: false, error: String(error) }));
|
|
207
|
+
});
|
|
208
|
+
}
|
|
209
|
+
export {
|
|
210
|
+
calculateSimilarity,
|
|
211
|
+
expandAliases,
|
|
212
|
+
levenshtein,
|
|
213
|
+
search
|
|
214
|
+
};
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
// lib/github.ts
|
|
2
|
+
function parsePRUrl(url) {
|
|
3
|
+
const patterns = [
|
|
4
|
+
// Standard PR URL: https://github.com/owner/repo/pull/123
|
|
5
|
+
/^https?:\/\/github\.com\/([^/]+)\/([^/]+)\/pull\/(\d+)/
|
|
6
|
+
];
|
|
7
|
+
for (const pattern of patterns) {
|
|
8
|
+
const match = url.match(pattern);
|
|
9
|
+
if (match) {
|
|
10
|
+
const [, owner, repo, prNumber] = match;
|
|
11
|
+
return {
|
|
12
|
+
owner,
|
|
13
|
+
repo,
|
|
14
|
+
prNumber: Number.parseInt(prNumber, 10),
|
|
15
|
+
url: `https://github.com/${owner}/${repo}/pull/${prNumber}`
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
return null;
|
|
20
|
+
}
|
|
21
|
+
async function fetchPRDiff(prSource) {
|
|
22
|
+
const { owner, repo, prNumber } = prSource;
|
|
23
|
+
const { execSync } = await import("node:child_process");
|
|
24
|
+
try {
|
|
25
|
+
const diff = execSync(`gh pr diff ${prNumber} --repo ${owner}/${repo}`, {
|
|
26
|
+
encoding: "utf-8",
|
|
27
|
+
maxBuffer: 10 * 1024 * 1024
|
|
28
|
+
});
|
|
29
|
+
return diff;
|
|
30
|
+
} catch (error) {
|
|
31
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
32
|
+
throw new Error(`Failed to fetch PR diff: ${message}`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
async function fetchPRComments(prSource) {
|
|
36
|
+
const { owner, repo, prNumber } = prSource;
|
|
37
|
+
const { execSync } = await import("node:child_process");
|
|
38
|
+
try {
|
|
39
|
+
const reviewCommentsJson = execSync(
|
|
40
|
+
`gh api repos/${owner}/${repo}/pulls/${prNumber}/comments --paginate`,
|
|
41
|
+
{ encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 }
|
|
42
|
+
);
|
|
43
|
+
const reviewComments = JSON.parse(reviewCommentsJson);
|
|
44
|
+
return reviewComments.map(
|
|
45
|
+
(comment) => ({
|
|
46
|
+
id: comment.id,
|
|
47
|
+
body: comment.body,
|
|
48
|
+
path: comment.path,
|
|
49
|
+
line: comment.line ?? comment.original_line,
|
|
50
|
+
user: { login: comment.user.login },
|
|
51
|
+
createdAt: comment.created_at,
|
|
52
|
+
url: comment.html_url
|
|
53
|
+
})
|
|
54
|
+
);
|
|
55
|
+
} catch (error) {
|
|
56
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
57
|
+
throw new Error(`Failed to fetch PR comments: ${message}`);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
async function fetchPRIssueComments(prSource) {
|
|
61
|
+
const { owner, repo, prNumber } = prSource;
|
|
62
|
+
const { execSync } = await import("node:child_process");
|
|
63
|
+
try {
|
|
64
|
+
const issueCommentsJson = execSync(
|
|
65
|
+
`gh api repos/${owner}/${repo}/issues/${prNumber}/comments --paginate`,
|
|
66
|
+
{ encoding: "utf-8", maxBuffer: 10 * 1024 * 1024 }
|
|
67
|
+
);
|
|
68
|
+
const issueComments = JSON.parse(issueCommentsJson);
|
|
69
|
+
return issueComments.map(
|
|
70
|
+
(comment) => ({
|
|
71
|
+
id: comment.id,
|
|
72
|
+
body: comment.body,
|
|
73
|
+
path: "",
|
|
74
|
+
line: null,
|
|
75
|
+
user: { login: comment.user.login },
|
|
76
|
+
createdAt: comment.created_at,
|
|
77
|
+
url: comment.html_url
|
|
78
|
+
})
|
|
79
|
+
);
|
|
80
|
+
} catch (error) {
|
|
81
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
82
|
+
throw new Error(`Failed to fetch PR issue comments: ${message}`);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
async function fetchAllPRComments(prSource) {
|
|
86
|
+
const [reviewComments, issueComments] = await Promise.all([
|
|
87
|
+
fetchPRComments(prSource),
|
|
88
|
+
fetchPRIssueComments(prSource)
|
|
89
|
+
]);
|
|
90
|
+
return [...reviewComments, ...issueComments];
|
|
91
|
+
}
|
|
92
|
+
async function fetchPRMetadata(prSource) {
|
|
93
|
+
const { owner, repo, prNumber } = prSource;
|
|
94
|
+
const { execSync } = await import("node:child_process");
|
|
95
|
+
try {
|
|
96
|
+
const prJson = execSync(
|
|
97
|
+
`gh pr view ${prNumber} --repo ${owner}/${repo} --json title,body,state,author,createdAt,mergedAt`,
|
|
98
|
+
{ encoding: "utf-8" }
|
|
99
|
+
);
|
|
100
|
+
const pr = JSON.parse(prJson);
|
|
101
|
+
return {
|
|
102
|
+
title: pr.title,
|
|
103
|
+
body: pr.body || "",
|
|
104
|
+
state: pr.state,
|
|
105
|
+
author: pr.author.login,
|
|
106
|
+
createdAt: pr.createdAt,
|
|
107
|
+
mergedAt: pr.mergedAt
|
|
108
|
+
};
|
|
109
|
+
} catch (error) {
|
|
110
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
111
|
+
throw new Error(`Failed to fetch PR metadata: ${message}`);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
export {
|
|
115
|
+
fetchAllPRComments,
|
|
116
|
+
fetchPRComments,
|
|
117
|
+
fetchPRDiff,
|
|
118
|
+
fetchPRIssueComments,
|
|
119
|
+
fetchPRMetadata,
|
|
120
|
+
parsePRUrl
|
|
121
|
+
};
|