pkm-mcp-server 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +52 -0
- package/LICENSE +21 -0
- package/README.md +246 -0
- package/activity.js +147 -0
- package/embeddings.js +672 -0
- package/graph.js +340 -0
- package/handlers.js +871 -0
- package/helpers.js +855 -0
- package/index.js +498 -0
- package/package.json +63 -0
- package/sample-project/CLAUDE.md +193 -0
- package/templates/adr.md +52 -0
- package/templates/daily-note.md +19 -0
- package/templates/devlog.md +35 -0
- package/templates/fleeting-note.md +11 -0
- package/templates/literature-note.md +25 -0
- package/templates/meeting-notes.md +28 -0
- package/templates/moc.md +22 -0
- package/templates/permanent-note.md +26 -0
- package/templates/project-index.md +38 -0
- package/templates/research-note.md +35 -0
- package/templates/task.md +22 -0
- package/templates/troubleshooting-log.md +32 -0
- package/utils.js +31 -0
package/handlers.js
ADDED
|
@@ -0,0 +1,871 @@
|
|
|
1
|
+
import fs from "fs/promises";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import {
|
|
4
|
+
resolvePath as resolvePathBase,
|
|
5
|
+
matchesFilters,
|
|
6
|
+
formatMetadata,
|
|
7
|
+
countOccurrences,
|
|
8
|
+
substituteTemplateVariables,
|
|
9
|
+
validateFrontmatterStrict,
|
|
10
|
+
extractInlineTags,
|
|
11
|
+
matchesTagPattern,
|
|
12
|
+
findSectionRange,
|
|
13
|
+
listHeadings,
|
|
14
|
+
extractTailSections,
|
|
15
|
+
buildBasenameMap,
|
|
16
|
+
resolveFuzzyPath,
|
|
17
|
+
resolveFuzzyFolder,
|
|
18
|
+
computePeek,
|
|
19
|
+
formatPeek,
|
|
20
|
+
updateFrontmatter,
|
|
21
|
+
compareFrontmatterValues,
|
|
22
|
+
AUTO_REDIRECT_THRESHOLD,
|
|
23
|
+
FORCE_HARD_CAP,
|
|
24
|
+
CHUNK_SIZE,
|
|
25
|
+
} from "./helpers.js";
|
|
26
|
+
import { exploreNeighborhood, formatNeighborhood, findFilesLinkingTo, rewriteWikilinks } from "./graph.js";
|
|
27
|
+
import { getAllMarkdownFiles, extractFrontmatter } from "./utils.js";
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Create all tool handler functions with shared context.
|
|
31
|
+
* @param {Object} ctx
|
|
32
|
+
* @param {string} ctx.vaultPath - absolute path to vault root
|
|
33
|
+
* @param {Map} ctx.templateRegistry - loaded templates
|
|
34
|
+
* @param {Object|null} ctx.semanticIndex - SemanticIndex instance (null if no API key)
|
|
35
|
+
* @param {Object|null} ctx.activityLog - ActivityLog instance
|
|
36
|
+
* @param {string} ctx.sessionId - current session UUID
|
|
37
|
+
* @returns {Map<string, function>} tool name to handler function
|
|
38
|
+
*/
|
|
39
|
+
export async function createHandlers({ vaultPath, templateRegistry, semanticIndex, activityLog, sessionId }) {
|
|
40
|
+
const resolvePath = (relativePath) => resolvePathBase(relativePath, vaultPath);
|
|
41
|
+
|
|
42
|
+
// Build basename map for fuzzy path resolution (read-only tools)
|
|
43
|
+
const allFiles = await getAllMarkdownFiles(vaultPath);
|
|
44
|
+
const { basenameMap, allFilesSet } = buildBasenameMap(allFiles);
|
|
45
|
+
|
|
46
|
+
/** Resolve a file path with fuzzy fallback (for read-only tools). */
|
|
47
|
+
const resolveFile = (inputPath) => {
|
|
48
|
+
const resolved = resolveFuzzyPath(inputPath, basenameMap, allFilesSet);
|
|
49
|
+
return resolvePath(resolved);
|
|
50
|
+
};
|
|
51
|
+
|
|
52
|
+
/** Resolve a folder path with fuzzy fallback. */
|
|
53
|
+
const resolveFolder = (folder) => {
|
|
54
|
+
// Security check first — reject traversal attempts immediately
|
|
55
|
+
const exactResolved = resolvePath(folder);
|
|
56
|
+
|
|
57
|
+
// Check if this is a known directory (any file has it as a prefix)
|
|
58
|
+
const isKnownDir = Array.from(allFilesSet).some(f => f.startsWith(folder + "/") || f.startsWith(folder + path.sep));
|
|
59
|
+
if (isKnownDir) return exactResolved;
|
|
60
|
+
|
|
61
|
+
// Not a known directory — try fuzzy resolution
|
|
62
|
+
const resolvedFolder = resolveFuzzyFolder(folder, Array.from(allFilesSet));
|
|
63
|
+
return resolvePath(resolvedFolder);
|
|
64
|
+
};
|
|
65
|
+
|
|
66
|
+
async function handleRead(args) {
|
|
67
|
+
const filePath = resolveFile(args.path);
|
|
68
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
69
|
+
|
|
70
|
+
// Validate mutual exclusivity
|
|
71
|
+
const modeCount = [
|
|
72
|
+
args.heading !== undefined,
|
|
73
|
+
args.tail !== undefined,
|
|
74
|
+
args.tail_sections !== undefined,
|
|
75
|
+
args.chunk !== undefined,
|
|
76
|
+
args.lines !== undefined,
|
|
77
|
+
].filter(Boolean).length;
|
|
78
|
+
if (modeCount > 1) {
|
|
79
|
+
throw new Error("Only one of 'heading', 'tail', 'tail_sections', 'chunk', or 'lines' can be specified at a time");
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Auto-redirect: return peek data for large files without explicit pagination
|
|
83
|
+
const hasExplicitMode = args.heading !== undefined || args.tail !== undefined ||
|
|
84
|
+
args.tail_sections !== undefined || args.chunk !== undefined || args.lines !== undefined;
|
|
85
|
+
if (!hasExplicitMode && !args.force && content.length > AUTO_REDIRECT_THRESHOLD) {
|
|
86
|
+
const relativePath = path.relative(vaultPath, filePath);
|
|
87
|
+
const peekData = computePeek(content, relativePath);
|
|
88
|
+
return { content: [{ type: "text", text: formatPeek(peekData, { redirected: true }) }] };
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Force hard cap
|
|
92
|
+
if (args.force && content.length > FORCE_HARD_CAP) {
|
|
93
|
+
const relativePath = path.relative(vaultPath, filePath);
|
|
94
|
+
const peekData = computePeek(content, relativePath);
|
|
95
|
+
const text = formatPeek(peekData, { redirected: true }) +
|
|
96
|
+
`\n\n**Hard cap reached.** File is ${content.length.toLocaleString()} chars, exceeding the ~400k char limit even with force=true. Use heading, chunk, or lines params to read portions.`;
|
|
97
|
+
return { content: [{ type: "text", text }] };
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
let text = content;
|
|
101
|
+
|
|
102
|
+
if (args.heading) {
|
|
103
|
+
const range = findSectionRange(content, args.heading);
|
|
104
|
+
if (!range) {
|
|
105
|
+
const available = listHeadings(content);
|
|
106
|
+
const list = available.length > 0
|
|
107
|
+
? `Available headings:\n${available.join("\n")}`
|
|
108
|
+
: "No headings found in file";
|
|
109
|
+
throw new Error(`Heading not found: ${args.heading}\n${list}`);
|
|
110
|
+
}
|
|
111
|
+
text = content.slice(range.headingStart, range.sectionEnd);
|
|
112
|
+
} else if (args.tail) {
|
|
113
|
+
// Extract frontmatter and prepend it
|
|
114
|
+
let frontmatter = "";
|
|
115
|
+
let body = content;
|
|
116
|
+
if (content.startsWith("---")) {
|
|
117
|
+
const endIndex = content.indexOf("\n---", 3);
|
|
118
|
+
if (endIndex !== -1) {
|
|
119
|
+
frontmatter = content.slice(0, endIndex + 4);
|
|
120
|
+
body = content.slice(endIndex + 4);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
const lines = body.split("\n");
|
|
124
|
+
const tailLines = lines.slice(-args.tail);
|
|
125
|
+
text = frontmatter + (frontmatter && !frontmatter.endsWith("\n") ? "\n" : "") + tailLines.join("\n");
|
|
126
|
+
} else if (args.tail_sections) {
|
|
127
|
+
const level = args.section_level || 2;
|
|
128
|
+
text = extractTailSections(content, args.tail_sections, level);
|
|
129
|
+
} else if (args.chunk !== undefined) {
|
|
130
|
+
const totalChunks = Math.ceil(content.length / CHUNK_SIZE);
|
|
131
|
+
if (args.chunk < 1 || args.chunk > totalChunks) {
|
|
132
|
+
throw new Error(`Invalid chunk: ${args.chunk}. File has ${totalChunks} chunk${totalChunks === 1 ? "" : "s"} (1-indexed).`);
|
|
133
|
+
}
|
|
134
|
+
const start = (args.chunk - 1) * CHUNK_SIZE;
|
|
135
|
+
const end = Math.min(start + CHUNK_SIZE, content.length);
|
|
136
|
+
text = `[Chunk ${args.chunk} of ${totalChunks}, chars ${start + 1}-${end} of ${content.length}]\n\n` + content.slice(start, end);
|
|
137
|
+
} else if (args.lines) {
|
|
138
|
+
const allLines = content.split("\n");
|
|
139
|
+
const { start, end } = args.lines;
|
|
140
|
+
if (start < 1 || end < start || start > allLines.length) {
|
|
141
|
+
throw new Error(`Invalid line range: ${start}-${end}. File has ${allLines.length} lines.`);
|
|
142
|
+
}
|
|
143
|
+
const clampedEnd = Math.min(end, allLines.length);
|
|
144
|
+
text = `[Lines ${start}-${clampedEnd} of ${allLines.length}]\n\n` + allLines.slice(start - 1, clampedEnd).join("\n");
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
return { content: [{ type: "text", text }] };
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
async function handlePeek(args) {
|
|
151
|
+
const filePath = resolveFile(args.path);
|
|
152
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
153
|
+
const relativePath = path.relative(vaultPath, filePath);
|
|
154
|
+
const peekData = computePeek(content, relativePath);
|
|
155
|
+
return { content: [{ type: "text", text: formatPeek(peekData) }] };
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
async function handleWrite(args) {
|
|
159
|
+
const { template: templateName, path: outputPath, variables = {}, frontmatter = {}, createDirs = true } = args;
|
|
160
|
+
|
|
161
|
+
const templateInfo = templateRegistry.get(templateName);
|
|
162
|
+
if (!templateInfo) {
|
|
163
|
+
const available = Array.from(templateRegistry.keys()).join(", ");
|
|
164
|
+
throw new Error(`Template "${templateName}" not found. Available templates: ${available || "(none)"}`);
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
const filePath = resolvePath(outputPath);
|
|
168
|
+
|
|
169
|
+
const title = path.basename(outputPath, ".md");
|
|
170
|
+
const substituted = substituteTemplateVariables(templateInfo.content, {
|
|
171
|
+
title,
|
|
172
|
+
custom: variables,
|
|
173
|
+
frontmatter
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
const validation = validateFrontmatterStrict(substituted);
|
|
177
|
+
if (!validation.valid) {
|
|
178
|
+
throw new Error(`Template validation failed:\n${validation.errors.join("\n")}`);
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
if (createDirs) {
|
|
182
|
+
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
// Atomic create — wx flag fails if file already exists (no TOCTOU race)
|
|
186
|
+
try {
|
|
187
|
+
await fs.writeFile(filePath, substituted, { encoding: "utf-8", flag: "wx" });
|
|
188
|
+
} catch (e) {
|
|
189
|
+
if (e.code === "EEXIST") {
|
|
190
|
+
throw new Error(`File already exists: ${outputPath}. Use vault_edit or vault_append to modify existing files.`, { cause: e });
|
|
191
|
+
}
|
|
192
|
+
throw e;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Update basename map with the new file
|
|
196
|
+
const newBasename = path.basename(outputPath, ".md").toLowerCase();
|
|
197
|
+
if (!basenameMap.has(newBasename)) {
|
|
198
|
+
basenameMap.set(newBasename, []);
|
|
199
|
+
}
|
|
200
|
+
basenameMap.get(newBasename).push(outputPath);
|
|
201
|
+
allFilesSet.add(outputPath);
|
|
202
|
+
|
|
203
|
+
const fm = validation.frontmatter;
|
|
204
|
+
const createdStr = fm.created instanceof Date
|
|
205
|
+
? fm.created.toISOString().split("T")[0]
|
|
206
|
+
: fm.created;
|
|
207
|
+
return {
|
|
208
|
+
content: [{
|
|
209
|
+
type: "text",
|
|
210
|
+
text: `Created ${outputPath} from template "${templateName}"\n\nFrontmatter:\n- type: ${fm.type}\n- created: ${createdStr}\n- tags: ${(fm.tags || []).filter(Boolean).join(", ")}`
|
|
211
|
+
}]
|
|
212
|
+
};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
async function handleAppend(args) {
|
|
216
|
+
const filePath = resolvePath(args.path);
|
|
217
|
+
let existing;
|
|
218
|
+
try {
|
|
219
|
+
existing = await fs.readFile(filePath, "utf-8");
|
|
220
|
+
} catch (e) {
|
|
221
|
+
if (e.code === "ENOENT") {
|
|
222
|
+
throw new Error(`File not found: ${args.path}. Use vault_write to create new files.`, { cause: e });
|
|
223
|
+
}
|
|
224
|
+
throw e;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
let newContent;
|
|
228
|
+
if (args.position) {
|
|
229
|
+
if (!args.heading) {
|
|
230
|
+
throw new Error("'heading' is required when 'position' is specified");
|
|
231
|
+
}
|
|
232
|
+
const range = findSectionRange(existing, args.heading);
|
|
233
|
+
if (!range) {
|
|
234
|
+
throw new Error(`Heading not found in ${args.path}: ${args.heading}`);
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
if (args.position === "before_heading") {
|
|
238
|
+
newContent = existing.slice(0, range.headingStart) + args.content + "\n" + existing.slice(range.headingStart);
|
|
239
|
+
} else if (args.position === "after_heading") {
|
|
240
|
+
newContent = existing.slice(0, range.afterHeading) + args.content + "\n" + existing.slice(range.afterHeading);
|
|
241
|
+
} else if (args.position === "end_of_section") {
|
|
242
|
+
const before = existing.slice(0, range.sectionEnd);
|
|
243
|
+
const after = existing.slice(range.sectionEnd);
|
|
244
|
+
const separator = before.length > 0 && !before.endsWith("\n") ? "\n" : "";
|
|
245
|
+
newContent = before + separator + args.content + "\n" + after;
|
|
246
|
+
} else {
|
|
247
|
+
throw new Error(`Unknown position: ${args.position}`);
|
|
248
|
+
}
|
|
249
|
+
} else if (args.heading) {
|
|
250
|
+
const range = findSectionRange(existing, args.heading);
|
|
251
|
+
if (range) {
|
|
252
|
+
newContent = existing.slice(0, range.afterHeading) + args.content + "\n" + existing.slice(range.afterHeading);
|
|
253
|
+
} else {
|
|
254
|
+
newContent = existing + "\n" + args.content;
|
|
255
|
+
}
|
|
256
|
+
} else {
|
|
257
|
+
newContent = existing + "\n" + args.content;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
await fs.writeFile(filePath, newContent, "utf-8");
|
|
261
|
+
return { content: [{ type: "text", text: `Appended to ${args.path}${args.position ? ` (${args.position})` : ""}` }] };
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
async function handleEdit(args) {
|
|
265
|
+
const filePath = resolvePath(args.path);
|
|
266
|
+
let content;
|
|
267
|
+
try {
|
|
268
|
+
content = await fs.readFile(filePath, "utf-8");
|
|
269
|
+
} catch (e) {
|
|
270
|
+
if (e.code === "ENOENT") {
|
|
271
|
+
throw new Error(`File not found: ${args.path}`, { cause: e });
|
|
272
|
+
}
|
|
273
|
+
throw e;
|
|
274
|
+
}
|
|
275
|
+
const count = countOccurrences(content, args.old_string);
|
|
276
|
+
|
|
277
|
+
if (count === 0) {
|
|
278
|
+
return {
|
|
279
|
+
content: [{ type: "text", text: `No match found for the specified old_string in ${args.path}` }],
|
|
280
|
+
isError: true
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
|
|
284
|
+
if (count > 1) {
|
|
285
|
+
return {
|
|
286
|
+
content: [{ type: "text", text: `Found ${count} matches for old_string in ${args.path}. Please provide a more specific string that matches exactly once.` }],
|
|
287
|
+
isError: true
|
|
288
|
+
};
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const newContent = content.replace(args.old_string, () => args.new_string);
|
|
292
|
+
await fs.writeFile(filePath, newContent, "utf-8");
|
|
293
|
+
return { content: [{ type: "text", text: `Successfully edited ${args.path}` }] };
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
async function handleSearch(args) {
|
|
297
|
+
const searchDir = args.folder ? resolveFolder(args.folder) : vaultPath;
|
|
298
|
+
const files = await getAllMarkdownFiles(searchDir);
|
|
299
|
+
const results = [];
|
|
300
|
+
const query = args.query.toLowerCase();
|
|
301
|
+
const limit = args.limit || 10;
|
|
302
|
+
|
|
303
|
+
for (const file of files) {
|
|
304
|
+
if (results.length >= limit) break;
|
|
305
|
+
const filePath = path.join(searchDir, file);
|
|
306
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
307
|
+
if (content.toLowerCase().includes(query)) {
|
|
308
|
+
const lines = content.split("\n");
|
|
309
|
+
const matchingLines = lines
|
|
310
|
+
.map((line, i) => ({ line, num: i + 1 }))
|
|
311
|
+
.filter(({ line }) => line.toLowerCase().includes(query))
|
|
312
|
+
.slice(0, 3);
|
|
313
|
+
|
|
314
|
+
results.push({
|
|
315
|
+
path: file,
|
|
316
|
+
matches: matchingLines.map(m => `L${m.num}: ${m.line.trim().slice(0, 100)}`)
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
return {
|
|
322
|
+
content: [{
|
|
323
|
+
type: "text",
|
|
324
|
+
text: results.length > 0
|
|
325
|
+
? results.map(r => `**${r.path}**\n${r.matches.join("\n")}`).join("\n\n")
|
|
326
|
+
: "No matches found"
|
|
327
|
+
}]
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
// Linear-time glob matching (no regex, no backtracking)
|
|
332
|
+
function globMatch(pattern, str) {
|
|
333
|
+
const parts = pattern.split("*");
|
|
334
|
+
if (parts.length === 1) return str === pattern;
|
|
335
|
+
|
|
336
|
+
if (!str.startsWith(parts[0])) return false;
|
|
337
|
+
const lastPart = parts[parts.length - 1];
|
|
338
|
+
if (!str.endsWith(lastPart)) return false;
|
|
339
|
+
|
|
340
|
+
let pos = parts[0].length;
|
|
341
|
+
const endLimit = str.length - lastPart.length;
|
|
342
|
+
for (let i = 1; i < parts.length - 1; i++) {
|
|
343
|
+
if (parts[i] === "") continue; // consecutive wildcards
|
|
344
|
+
const idx = str.indexOf(parts[i], pos);
|
|
345
|
+
if (idx === -1 || idx + parts[i].length > endLimit) return false;
|
|
346
|
+
pos = idx + parts[i].length;
|
|
347
|
+
}
|
|
348
|
+
return pos <= endLimit;
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async function handleList(args) {
|
|
352
|
+
const listPath = resolvePath(args.path || "");
|
|
353
|
+
const entries = await fs.readdir(listPath, { withFileTypes: true });
|
|
354
|
+
|
|
355
|
+
const items = [];
|
|
356
|
+
for (const entry of entries) {
|
|
357
|
+
if (entry.name.startsWith(".")) continue;
|
|
358
|
+
|
|
359
|
+
const itemPath = path.join(args.path || "", entry.name);
|
|
360
|
+
if (entry.isDirectory()) {
|
|
361
|
+
items.push(`[dir] ${itemPath}/`);
|
|
362
|
+
if (args.recursive) {
|
|
363
|
+
const subItems = await getAllMarkdownFiles(path.join(listPath, entry.name));
|
|
364
|
+
items.push(...subItems.map(f => ` ${path.join(itemPath, f)}`));
|
|
365
|
+
}
|
|
366
|
+
} else if (!args.pattern || globMatch(args.pattern, entry.name)) {
|
|
367
|
+
items.push(itemPath);
|
|
368
|
+
}
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
return { content: [{ type: "text", text: items.join("\n") || "Empty directory" }] };
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
async function handleRecent(args) {
|
|
375
|
+
const searchDir = args.folder ? resolveFolder(args.folder) : vaultPath;
|
|
376
|
+
const files = await getAllMarkdownFiles(searchDir);
|
|
377
|
+
const limit = args.limit || 10;
|
|
378
|
+
|
|
379
|
+
const withStats = await Promise.all(
|
|
380
|
+
files.map(async (file) => {
|
|
381
|
+
const stat = await fs.stat(path.join(searchDir, file));
|
|
382
|
+
return { path: file, mtime: stat.mtime };
|
|
383
|
+
})
|
|
384
|
+
);
|
|
385
|
+
|
|
386
|
+
const sorted = withStats
|
|
387
|
+
.sort((a, b) => b.mtime - a.mtime)
|
|
388
|
+
.slice(0, limit);
|
|
389
|
+
|
|
390
|
+
return {
|
|
391
|
+
content: [{
|
|
392
|
+
type: "text",
|
|
393
|
+
text: sorted.map(f => `${f.path} (${f.mtime.toISOString().split("T")[0]})`).join("\n")
|
|
394
|
+
}]
|
|
395
|
+
};
|
|
396
|
+
}
|
|
397
|
+
|
|
398
|
+
async function handleLinks(args) {
|
|
399
|
+
const resolvedVaultRelative = resolveFuzzyPath(args.path, basenameMap, allFilesSet);
|
|
400
|
+
const filePath = resolvePath(resolvedVaultRelative);
|
|
401
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
402
|
+
|
|
403
|
+
const result = { outgoing: [], incoming: [] };
|
|
404
|
+
|
|
405
|
+
if (args.direction !== "incoming") {
|
|
406
|
+
const linkRegex = /\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g;
|
|
407
|
+
let match;
|
|
408
|
+
while ((match = linkRegex.exec(content)) !== null) {
|
|
409
|
+
result.outgoing.push(match[1]);
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
if (args.direction !== "outgoing") {
|
|
414
|
+
const allFilesList = Array.from(allFilesSet);
|
|
415
|
+
const linkingFiles = await findFilesLinkingTo(resolvedVaultRelative, vaultPath, allFilesList, basenameMap, allFilesSet);
|
|
416
|
+
result.incoming = linkingFiles.map(({ file }) => file);
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
let output = "";
|
|
420
|
+
if (result.outgoing.length > 0) {
|
|
421
|
+
output += `**Outgoing links:**\n${result.outgoing.map(l => `- [[${l}]]`).join("\n")}\n\n`;
|
|
422
|
+
}
|
|
423
|
+
if (result.incoming.length > 0) {
|
|
424
|
+
output += `**Incoming links:**\n${result.incoming.map(l => `- ${l}`).join("\n")}`;
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
return { content: [{ type: "text", text: output || "No links found" }] };
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
async function handleNeighborhood(args) {
|
|
431
|
+
const resolvedPath = resolveFuzzyPath(args.path, basenameMap, allFilesSet);
|
|
432
|
+
const depth = Math.min(args.depth || 2, 5);
|
|
433
|
+
const direction = args.direction || "both";
|
|
434
|
+
|
|
435
|
+
const result = await exploreNeighborhood({
|
|
436
|
+
startPath: resolvedPath,
|
|
437
|
+
vaultPath,
|
|
438
|
+
depth,
|
|
439
|
+
direction,
|
|
440
|
+
});
|
|
441
|
+
|
|
442
|
+
const text = formatNeighborhood(result, {
|
|
443
|
+
startPath: resolvedPath,
|
|
444
|
+
depth,
|
|
445
|
+
direction,
|
|
446
|
+
});
|
|
447
|
+
|
|
448
|
+
return { content: [{ type: "text", text }] };
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
async function handleQuery(args) {
|
|
452
|
+
const searchDir = args.folder ? resolveFolder(args.folder) : vaultPath;
|
|
453
|
+
const files = await getAllMarkdownFiles(searchDir);
|
|
454
|
+
const limit = args.limit || 50;
|
|
455
|
+
const results = [];
|
|
456
|
+
|
|
457
|
+
const filters = {
|
|
458
|
+
type: args.type,
|
|
459
|
+
status: args.status,
|
|
460
|
+
tags: args.tags,
|
|
461
|
+
tags_any: args.tags_any,
|
|
462
|
+
created_after: args.created_after,
|
|
463
|
+
created_before: args.created_before,
|
|
464
|
+
custom_fields: args.custom_fields,
|
|
465
|
+
};
|
|
466
|
+
|
|
467
|
+
for (const file of files) {
|
|
468
|
+
if (!args.sort_by && results.length >= limit) break;
|
|
469
|
+
const filePath = path.join(searchDir, file);
|
|
470
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
471
|
+
const metadata = extractFrontmatter(content);
|
|
472
|
+
|
|
473
|
+
if (matchesFilters(metadata, filters)) {
|
|
474
|
+
const { summary, tagLine } = formatMetadata(metadata);
|
|
475
|
+
const relativePath = args.folder
|
|
476
|
+
? path.join(args.folder, file)
|
|
477
|
+
: file;
|
|
478
|
+
results.push({ path: relativePath, summary, tagLine, metadata });
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
// Sort results if sort_by specified
|
|
483
|
+
if (args.sort_by) {
|
|
484
|
+
const sortField = args.sort_by;
|
|
485
|
+
const sortDesc = args.sort_order === "desc";
|
|
486
|
+
results.sort((a, b) => {
|
|
487
|
+
const cmp = compareFrontmatterValues(a.metadata[sortField], b.metadata[sortField], sortField);
|
|
488
|
+
return sortDesc ? -cmp : cmp;
|
|
489
|
+
});
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
// Apply limit after sorting
|
|
493
|
+
const limited = results.slice(0, limit);
|
|
494
|
+
|
|
495
|
+
if (limited.length === 0) {
|
|
496
|
+
return {
|
|
497
|
+
content: [{
|
|
498
|
+
type: "text",
|
|
499
|
+
text: "No notes found matching the query."
|
|
500
|
+
}]
|
|
501
|
+
};
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
const output = `Found ${limited.length} note${limited.length === 1 ? "" : "s"} matching query:\n\n` +
|
|
505
|
+
limited.map(r => {
|
|
506
|
+
let entry = `**${r.path}**\n${r.summary}`;
|
|
507
|
+
if (r.tagLine) entry += `\n${r.tagLine}`;
|
|
508
|
+
return entry;
|
|
509
|
+
}).join("\n\n");
|
|
510
|
+
|
|
511
|
+
return { content: [{ type: "text", text: output }] };
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
async function handleTags(args) {
|
|
515
|
+
const searchDir = args.folder ? resolveFolder(args.folder) : vaultPath;
|
|
516
|
+
const files = await getAllMarkdownFiles(searchDir);
|
|
517
|
+
const tagCounts = new Map();
|
|
518
|
+
let notesWithTags = 0;
|
|
519
|
+
|
|
520
|
+
for (const file of files) {
|
|
521
|
+
const filePath = path.join(searchDir, file);
|
|
522
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
523
|
+
|
|
524
|
+
const fileTags = new Set();
|
|
525
|
+
|
|
526
|
+
const metadata = extractFrontmatter(content);
|
|
527
|
+
if (metadata && Array.isArray(metadata.tags)) {
|
|
528
|
+
for (const tag of metadata.tags) {
|
|
529
|
+
if (tag) fileTags.add(String(tag).toLowerCase());
|
|
530
|
+
}
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
if (args.include_inline) {
|
|
534
|
+
for (const tag of extractInlineTags(content)) {
|
|
535
|
+
fileTags.add(tag);
|
|
536
|
+
}
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
if (fileTags.size > 0) {
|
|
540
|
+
notesWithTags++;
|
|
541
|
+
for (const tag of fileTags) {
|
|
542
|
+
tagCounts.set(tag, (tagCounts.get(tag) || 0) + 1);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
let results = Array.from(tagCounts.entries());
|
|
548
|
+
if (args.pattern) {
|
|
549
|
+
results = results.filter(([tag]) => matchesTagPattern(tag, args.pattern));
|
|
550
|
+
}
|
|
551
|
+
|
|
552
|
+
results.sort((a, b) => b[1] - a[1] || a[0].localeCompare(b[0]));
|
|
553
|
+
|
|
554
|
+
if (results.length === 0) {
|
|
555
|
+
return {
|
|
556
|
+
content: [{ type: "text", text: "No tags found matching criteria." }]
|
|
557
|
+
};
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
const header = `Found ${results.length} unique tag${results.length === 1 ? "" : "s"} across ${notesWithTags} note${notesWithTags === 1 ? "" : "s"}\n`;
|
|
561
|
+
const lines = results.map(([tag, count]) => `${tag} (${count})`);
|
|
562
|
+
|
|
563
|
+
return {
|
|
564
|
+
content: [{ type: "text", text: header + "\n" + lines.join("\n") }]
|
|
565
|
+
};
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
async function handleActivity(args) {
|
|
569
|
+
const action = args.action || "query";
|
|
570
|
+
|
|
571
|
+
if (action === "query") {
|
|
572
|
+
const entries = activityLog?.query({
|
|
573
|
+
limit: args.limit || 50,
|
|
574
|
+
tool: args.tool,
|
|
575
|
+
session: args.session,
|
|
576
|
+
since: args.since,
|
|
577
|
+
before: args.before,
|
|
578
|
+
path: args.path
|
|
579
|
+
}) || [];
|
|
580
|
+
|
|
581
|
+
if (entries.length === 0) {
|
|
582
|
+
return {
|
|
583
|
+
content: [{ type: "text", text: `No activity entries found. (current session: ${sessionId.slice(0, 8)})` }]
|
|
584
|
+
};
|
|
585
|
+
}
|
|
586
|
+
|
|
587
|
+
const formatted = entries.map(e => {
|
|
588
|
+
const ts = e.timestamp.replace("T", " ").slice(0, 19);
|
|
589
|
+
const sessionShort = e.session_id.slice(0, 8);
|
|
590
|
+
return `[${ts}] [${sessionShort}] ${e.tool_name}\n${e.args_json}`;
|
|
591
|
+
}).join("\n\n");
|
|
592
|
+
|
|
593
|
+
return {
|
|
594
|
+
content: [{
|
|
595
|
+
type: "text",
|
|
596
|
+
text: `Activity log (${entries.length} entr${entries.length === 1 ? "y" : "ies"}, current session: ${sessionId.slice(0, 8)}):\n\n${formatted}`
|
|
597
|
+
}]
|
|
598
|
+
};
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
if (action === "clear") {
|
|
602
|
+
const deleted = activityLog?.clear({
|
|
603
|
+
session: args.session,
|
|
604
|
+
tool: args.tool,
|
|
605
|
+
before: args.before
|
|
606
|
+
}) || 0;
|
|
607
|
+
|
|
608
|
+
return {
|
|
609
|
+
content: [{
|
|
610
|
+
type: "text",
|
|
611
|
+
text: `Cleared ${deleted} activity entr${deleted === 1 ? "y" : "ies"}.`
|
|
612
|
+
}]
|
|
613
|
+
};
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
throw new Error(`Unknown action: ${action}. Use 'query' or 'clear'.`);
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
async function handleSemanticSearch(args) {
|
|
620
|
+
if (!semanticIndex?.isAvailable) {
|
|
621
|
+
throw new Error("Semantic search not available (OPENAI_API_KEY not set)");
|
|
622
|
+
}
|
|
623
|
+
const text = await semanticIndex.search({
|
|
624
|
+
query: args.query,
|
|
625
|
+
limit: args.limit || 5,
|
|
626
|
+
folder: args.folder,
|
|
627
|
+
threshold: args.threshold
|
|
628
|
+
});
|
|
629
|
+
return { content: [{ type: "text", text }] };
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
async function handleSuggestLinks(args) {
|
|
633
|
+
if (!semanticIndex?.isAvailable) {
|
|
634
|
+
throw new Error("Link suggestions not available (OPENAI_API_KEY not set)");
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
let inputText = args.content;
|
|
638
|
+
const sourcePath = args.path;
|
|
639
|
+
if (!inputText && !sourcePath) {
|
|
640
|
+
throw new Error("Either 'content' or 'path' must be provided");
|
|
641
|
+
}
|
|
642
|
+
if (!inputText) {
|
|
643
|
+
const filePath = resolveFile(sourcePath);
|
|
644
|
+
inputText = await fs.readFile(filePath, "utf-8");
|
|
645
|
+
}
|
|
646
|
+
|
|
647
|
+
let body = inputText;
|
|
648
|
+
if (body.startsWith("---")) {
|
|
649
|
+
const endIdx = body.indexOf("\n---", 3);
|
|
650
|
+
if (endIdx !== -1) body = body.slice(endIdx + 4).trim();
|
|
651
|
+
}
|
|
652
|
+
if (!body) throw new Error("No content to analyze");
|
|
653
|
+
|
|
654
|
+
const linkRegex = /\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g;
|
|
655
|
+
const linkedNames = new Set();
|
|
656
|
+
let match;
|
|
657
|
+
while ((match = linkRegex.exec(inputText)) !== null) {
|
|
658
|
+
const target = match[1];
|
|
659
|
+
linkedNames.add(path.basename(target, ".md").toLowerCase());
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
const excludeFiles = new Set();
|
|
663
|
+
if (sourcePath) excludeFiles.add(sourcePath);
|
|
664
|
+
|
|
665
|
+
const results = await semanticIndex.searchRaw({
|
|
666
|
+
query: body.slice(0, 8000),
|
|
667
|
+
limit: (args.limit || 5) * 3,
|
|
668
|
+
folder: args.folder,
|
|
669
|
+
threshold: args.threshold,
|
|
670
|
+
excludeFiles
|
|
671
|
+
});
|
|
672
|
+
|
|
673
|
+
const suggestions = [];
|
|
674
|
+
for (const r of results) {
|
|
675
|
+
if (suggestions.length >= (args.limit || 5)) break;
|
|
676
|
+
const basename = path.basename(r.path, ".md").toLowerCase();
|
|
677
|
+
if (linkedNames.has(basename)) continue;
|
|
678
|
+
suggestions.push(r);
|
|
679
|
+
}
|
|
680
|
+
|
|
681
|
+
if (suggestions.length === 0) {
|
|
682
|
+
return { content: [{ type: "text", text: "No link suggestions found." }] };
|
|
683
|
+
}
|
|
684
|
+
|
|
685
|
+
const formatted = suggestions.map(r =>
|
|
686
|
+
`**${r.path}** (score: ${r.score})\n${r.preview}`
|
|
687
|
+
).join("\n\n");
|
|
688
|
+
|
|
689
|
+
return {
|
|
690
|
+
content: [{ type: "text", text: `Found ${suggestions.length} link suggestion${suggestions.length === 1 ? "" : "s"}:\n\n${formatted}` }]
|
|
691
|
+
};
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
async function handleTrash(args) {
|
|
695
|
+
const resolvedRelative = args.path;
|
|
696
|
+
const filePath = resolvePath(resolvedRelative);
|
|
697
|
+
|
|
698
|
+
// Verify file exists
|
|
699
|
+
await fs.access(filePath);
|
|
700
|
+
|
|
701
|
+
// Find incoming links for warning output
|
|
702
|
+
const allFilesList = Array.from(allFilesSet);
|
|
703
|
+
const linkingFiles = await findFilesLinkingTo(resolvedRelative, vaultPath, allFilesList, basenameMap, allFilesSet);
|
|
704
|
+
|
|
705
|
+
// Determine trash destination: .trash/<original-relative-path>
|
|
706
|
+
let trashRelative = path.join(".trash", resolvedRelative);
|
|
707
|
+
let trashAbsolute = path.join(vaultPath, trashRelative);
|
|
708
|
+
|
|
709
|
+
// Handle collision: append timestamp suffix
|
|
710
|
+
try {
|
|
711
|
+
await fs.access(trashAbsolute);
|
|
712
|
+
// Collision — add timestamp
|
|
713
|
+
const ext = path.extname(resolvedRelative);
|
|
714
|
+
const base = resolvedRelative.slice(0, -ext.length);
|
|
715
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, "-").slice(0, 19);
|
|
716
|
+
trashRelative = path.join(".trash", `${base}.${timestamp}${ext}`);
|
|
717
|
+
trashAbsolute = path.join(vaultPath, trashRelative);
|
|
718
|
+
} catch (e) {
|
|
719
|
+
if (e.code !== "ENOENT") throw e;
|
|
720
|
+
// No collision — use original path
|
|
721
|
+
}
|
|
722
|
+
|
|
723
|
+
// Create trash directory and move file
|
|
724
|
+
await fs.mkdir(path.dirname(trashAbsolute), { recursive: true });
|
|
725
|
+
await fs.rename(filePath, trashAbsolute);
|
|
726
|
+
|
|
727
|
+
// Update in-memory basename map
|
|
728
|
+
allFilesSet.delete(resolvedRelative);
|
|
729
|
+
const oldBasename = path.basename(resolvedRelative, ".md").toLowerCase();
|
|
730
|
+
const entries = basenameMap.get(oldBasename);
|
|
731
|
+
if (entries) {
|
|
732
|
+
const idx = entries.indexOf(resolvedRelative);
|
|
733
|
+
if (idx !== -1) entries.splice(idx, 1);
|
|
734
|
+
if (entries.length === 0) basenameMap.delete(oldBasename);
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
// Build output
|
|
738
|
+
let text = `Trashed ${resolvedRelative} → ${trashRelative}`;
|
|
739
|
+
if (linkingFiles.length > 0) {
|
|
740
|
+
text += `\n\n**Warning:** ${linkingFiles.length} file${linkingFiles.length === 1 ? "" : "s"} had links to this note (now broken):`;
|
|
741
|
+
for (const { file } of linkingFiles) {
|
|
742
|
+
text += `\n- ${file}`;
|
|
743
|
+
}
|
|
744
|
+
}
|
|
745
|
+
|
|
746
|
+
return { content: [{ type: "text", text }] };
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
async function handleMove(args) {
|
|
750
|
+
// Both source and destination require exact paths (destructive operation)
|
|
751
|
+
const oldRelative = args.old_path;
|
|
752
|
+
const oldAbsolute = resolvePath(oldRelative);
|
|
753
|
+
const newRelative = args.new_path;
|
|
754
|
+
const newAbsolute = resolvePath(newRelative);
|
|
755
|
+
|
|
756
|
+
// Verify source exists
|
|
757
|
+
await fs.access(oldAbsolute);
|
|
758
|
+
|
|
759
|
+
// Verify destination does NOT exist
|
|
760
|
+
try {
|
|
761
|
+
await fs.access(newAbsolute);
|
|
762
|
+
throw new Error(`Destination already exists: ${newRelative}. Use vault_edit or vault_trash + vault_write instead.`);
|
|
763
|
+
} catch (e) {
|
|
764
|
+
if (e.code !== "ENOENT") throw e;
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
// Find files linking to the source (before move)
|
|
768
|
+
const allFilesList = Array.from(allFilesSet);
|
|
769
|
+
const linkingFiles = args.update_links !== false
|
|
770
|
+
? await findFilesLinkingTo(oldRelative, vaultPath, allFilesList, basenameMap, allFilesSet)
|
|
771
|
+
: [];
|
|
772
|
+
|
|
773
|
+
// Create destination directory and move file
|
|
774
|
+
await fs.mkdir(path.dirname(newAbsolute), { recursive: true });
|
|
775
|
+
await fs.rename(oldAbsolute, newAbsolute);
|
|
776
|
+
|
|
777
|
+
// Update basename map: remove old, add new
|
|
778
|
+
allFilesSet.delete(oldRelative);
|
|
779
|
+
const oldBasename = path.basename(oldRelative, ".md").toLowerCase();
|
|
780
|
+
const oldEntries = basenameMap.get(oldBasename);
|
|
781
|
+
if (oldEntries) {
|
|
782
|
+
const idx = oldEntries.indexOf(oldRelative);
|
|
783
|
+
if (idx !== -1) oldEntries.splice(idx, 1);
|
|
784
|
+
if (oldEntries.length === 0) basenameMap.delete(oldBasename);
|
|
785
|
+
}
|
|
786
|
+
|
|
787
|
+
allFilesSet.add(newRelative);
|
|
788
|
+
const newBasename = path.basename(newRelative, ".md").toLowerCase();
|
|
789
|
+
if (!basenameMap.has(newBasename)) {
|
|
790
|
+
basenameMap.set(newBasename, []);
|
|
791
|
+
}
|
|
792
|
+
basenameMap.get(newBasename).push(newRelative);
|
|
793
|
+
|
|
794
|
+
// Determine new link target — use full path if basename is now ambiguous
|
|
795
|
+
const newEntries = basenameMap.get(newBasename);
|
|
796
|
+
const isAmbiguous = newEntries && newEntries.length > 1;
|
|
797
|
+
const newLinkTarget = isAmbiguous
|
|
798
|
+
? newRelative.replace(/\.md$/, "")
|
|
799
|
+
: path.basename(newRelative, ".md");
|
|
800
|
+
|
|
801
|
+
// Determine old link target — pass full path so both [[basename]] and [[folder/name]] links match
|
|
802
|
+
const oldLinkTarget = oldRelative.replace(/\.md$/, "");
|
|
803
|
+
|
|
804
|
+
// Rewrite wikilinks in referring files
|
|
805
|
+
let updatedCount = 0;
|
|
806
|
+
if (args.update_links !== false) {
|
|
807
|
+
for (const { file, content } of linkingFiles) {
|
|
808
|
+
const updated = rewriteWikilinks(content, oldLinkTarget, newLinkTarget);
|
|
809
|
+
if (updated !== content) {
|
|
810
|
+
await fs.writeFile(path.join(vaultPath, file), updated, "utf-8");
|
|
811
|
+
updatedCount++;
|
|
812
|
+
}
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
|
|
816
|
+
// Build output
|
|
817
|
+
let text = `Moved ${oldRelative} → ${newRelative}`;
|
|
818
|
+
if (updatedCount > 0) {
|
|
819
|
+
text += `\nUpdated wikilinks in ${updatedCount} file${updatedCount === 1 ? "" : "s"}`;
|
|
820
|
+
}
|
|
821
|
+
if (isAmbiguous) {
|
|
822
|
+
text += `\n\n**Note:** Basename "${path.basename(newRelative, ".md")}" is ambiguous (${newEntries.length} files). Links were rewritten using full paths.`;
|
|
823
|
+
}
|
|
824
|
+
|
|
825
|
+
return { content: [{ type: "text", text }] };
|
|
826
|
+
}
|
|
827
|
+
|
|
828
|
+
async function handleUpdateFrontmatter(args) {
|
|
829
|
+
const filePath = resolvePath(args.path);
|
|
830
|
+
let content;
|
|
831
|
+
try {
|
|
832
|
+
content = await fs.readFile(filePath, "utf-8");
|
|
833
|
+
} catch (e) {
|
|
834
|
+
if (e.code === "ENOENT") {
|
|
835
|
+
throw new Error(`File not found: ${args.path}`, { cause: e });
|
|
836
|
+
}
|
|
837
|
+
throw e;
|
|
838
|
+
}
|
|
839
|
+
const { content: newContent, frontmatter } = updateFrontmatter(content, args.fields || {});
|
|
840
|
+
await fs.writeFile(filePath, newContent, "utf-8");
|
|
841
|
+
|
|
842
|
+
const lines = Object.entries(frontmatter).map(([k, v]) => {
|
|
843
|
+
const display = Array.isArray(v) ? `[${v.join(", ")}]` : String(v);
|
|
844
|
+
return `${k}: ${display}`;
|
|
845
|
+
});
|
|
846
|
+
return {
|
|
847
|
+
content: [{ type: "text", text: `Updated frontmatter in ${args.path}:\n${lines.join("\n")}` }]
|
|
848
|
+
};
|
|
849
|
+
}
|
|
850
|
+
|
|
851
|
+
return new Map([
|
|
852
|
+
["vault_read", handleRead],
|
|
853
|
+
["vault_write", handleWrite],
|
|
854
|
+
["vault_append", handleAppend],
|
|
855
|
+
["vault_edit", handleEdit],
|
|
856
|
+
["vault_search", handleSearch],
|
|
857
|
+
["vault_list", handleList],
|
|
858
|
+
["vault_recent", handleRecent],
|
|
859
|
+
["vault_links", handleLinks],
|
|
860
|
+
["vault_neighborhood", handleNeighborhood],
|
|
861
|
+
["vault_query", handleQuery],
|
|
862
|
+
["vault_tags", handleTags],
|
|
863
|
+
["vault_activity", handleActivity],
|
|
864
|
+
["vault_semantic_search", handleSemanticSearch],
|
|
865
|
+
["vault_suggest_links", handleSuggestLinks],
|
|
866
|
+
["vault_peek", handlePeek],
|
|
867
|
+
["vault_trash", handleTrash],
|
|
868
|
+
["vault_move", handleMove],
|
|
869
|
+
["vault_update_frontmatter", handleUpdateFrontmatter],
|
|
870
|
+
]);
|
|
871
|
+
}
|