@oomkapwn/enquire-mcp 1.6.0 → 1.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +57 -0
- package/README.md +8 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +35 -2
- package/dist/index.js.map +1 -1
- package/dist/tools.d.ts +106 -0
- package/dist/tools.d.ts.map +1 -1
- package/dist/tools.js +375 -0
- package/dist/tools.js.map +1 -1
- package/dist/vault.d.ts +8 -0
- package/dist/vault.d.ts.map +1 -1
- package/dist/vault.js +75 -0
- package/dist/vault.js.map +1 -1
- package/docs/api.md +52 -1
- package/package.json +1 -1
package/dist/tools.js
CHANGED
|
@@ -1445,6 +1445,381 @@ export async function openInUi(vault, args) {
|
|
|
1445
1445
|
title: stripMd(target.basename)
|
|
1446
1446
|
};
|
|
1447
1447
|
}
|
|
1448
|
+
export async function listCanvases(vault, args) {
|
|
1449
|
+
await vault.ensureExists();
|
|
1450
|
+
const limit = args.limit ?? 100;
|
|
1451
|
+
const all = await vault.listFilesByExtension(".canvas", args.folder);
|
|
1452
|
+
const out = [];
|
|
1453
|
+
for (const e of all) {
|
|
1454
|
+
if (out.length >= limit)
|
|
1455
|
+
break;
|
|
1456
|
+
let nodeCount = 0;
|
|
1457
|
+
let edgeCount = 0;
|
|
1458
|
+
let size = e.mtimeMs; // placeholder; replaced below
|
|
1459
|
+
try {
|
|
1460
|
+
const buf = await vault.readBinaryFile(e.absPath);
|
|
1461
|
+
size = buf.byteLength;
|
|
1462
|
+
const txt = buf.toString("utf8");
|
|
1463
|
+
const parsed = JSON.parse(txt);
|
|
1464
|
+
nodeCount = Array.isArray(parsed.nodes) ? parsed.nodes.length : 0;
|
|
1465
|
+
edgeCount = Array.isArray(parsed.edges) ? parsed.edges.length : 0;
|
|
1466
|
+
}
|
|
1467
|
+
catch {
|
|
1468
|
+
// Malformed canvas — fall through with 0 counts. Don't poison the listing.
|
|
1469
|
+
}
|
|
1470
|
+
out.push({
|
|
1471
|
+
path: e.relPath,
|
|
1472
|
+
name: e.basename.replace(/\.canvas$/i, ""),
|
|
1473
|
+
size_bytes: size,
|
|
1474
|
+
mtime: new Date(e.mtimeMs).toISOString(),
|
|
1475
|
+
node_count: nodeCount,
|
|
1476
|
+
edge_count: edgeCount
|
|
1477
|
+
});
|
|
1478
|
+
}
|
|
1479
|
+
out.sort((a, b) => b.mtime.localeCompare(a.mtime));
|
|
1480
|
+
return out;
|
|
1481
|
+
}
|
|
1482
|
+
export async function readCanvas(vault, args) {
|
|
1483
|
+
await vault.ensureExists();
|
|
1484
|
+
if (!args.path)
|
|
1485
|
+
throw new Error("path is required");
|
|
1486
|
+
const normalized = args.path.toLowerCase().endsWith(".canvas") ? args.path : `${args.path}.canvas`;
|
|
1487
|
+
const abs = vault.resolveInside(normalized);
|
|
1488
|
+
await vault.stat(abs); // throws if missing or excluded — fail fast
|
|
1489
|
+
const rel = vault.toRel(abs);
|
|
1490
|
+
const buf = await vault.readBinaryFile(abs);
|
|
1491
|
+
let parsed;
|
|
1492
|
+
try {
|
|
1493
|
+
parsed = JSON.parse(buf.toString("utf8"));
|
|
1494
|
+
}
|
|
1495
|
+
catch (err) {
|
|
1496
|
+
throw new Error(`Canvas file is not valid JSON: ${rel} — ${err instanceof Error ? err.message : String(err)}`);
|
|
1497
|
+
}
|
|
1498
|
+
// Resolve each `file:` node's reference against the vault's current
|
|
1499
|
+
// markdown index — surfaces broken canvas links the same way
|
|
1500
|
+
// get_unresolved_wikilinks does for note bodies.
|
|
1501
|
+
const allMarkdown = await vault.listMarkdown();
|
|
1502
|
+
const nodes = [];
|
|
1503
|
+
const summary = { text: 0, file: 0, link: 0, group: 0, unknown: 0 };
|
|
1504
|
+
const brokenRefs = [];
|
|
1505
|
+
if (Array.isArray(parsed.nodes)) {
|
|
1506
|
+
for (const raw of parsed.nodes) {
|
|
1507
|
+
if (!raw || typeof raw !== "object")
|
|
1508
|
+
continue;
|
|
1509
|
+
const n = raw;
|
|
1510
|
+
const id = typeof n.id === "string" ? n.id : "";
|
|
1511
|
+
const x = typeof n.x === "number" ? n.x : 0;
|
|
1512
|
+
const y = typeof n.y === "number" ? n.y : 0;
|
|
1513
|
+
const width = typeof n.width === "number" ? n.width : 0;
|
|
1514
|
+
const height = typeof n.height === "number" ? n.height : 0;
|
|
1515
|
+
const color = typeof n.color === "string" ? n.color : undefined;
|
|
1516
|
+
const type = typeof n.type === "string" ? n.type : "unknown";
|
|
1517
|
+
switch (type) {
|
|
1518
|
+
case "text":
|
|
1519
|
+
nodes.push({
|
|
1520
|
+
kind: "text",
|
|
1521
|
+
id,
|
|
1522
|
+
x,
|
|
1523
|
+
y,
|
|
1524
|
+
width,
|
|
1525
|
+
height,
|
|
1526
|
+
text: typeof n.text === "string" ? n.text : "",
|
|
1527
|
+
...(color !== undefined ? { color } : {})
|
|
1528
|
+
});
|
|
1529
|
+
summary.text += 1;
|
|
1530
|
+
break;
|
|
1531
|
+
case "file": {
|
|
1532
|
+
const fileRef = typeof n.file === "string" ? n.file : "";
|
|
1533
|
+
// Strip leading slash so `findBestMatch` treats it as relative.
|
|
1534
|
+
const cleaned = fileRef.replace(/^\/+/, "");
|
|
1535
|
+
// findBestMatch only looks at the basename; for canvases we have a full
|
|
1536
|
+
// vault-relative path, so try direct match first. Fall through to
|
|
1537
|
+
// findBestMatch (basename) for the path-stripped case.
|
|
1538
|
+
const direct = cleaned.length > 0 ? allMarkdown.find((m) => m.relPath.replace(/\\/g, "/") === cleaned) : undefined;
|
|
1539
|
+
const resolved = direct ?? (cleaned ? findBestMatch(allMarkdown, cleaned) : null);
|
|
1540
|
+
if (cleaned && !resolved)
|
|
1541
|
+
brokenRefs.push(cleaned);
|
|
1542
|
+
nodes.push({
|
|
1543
|
+
kind: "file",
|
|
1544
|
+
id,
|
|
1545
|
+
x,
|
|
1546
|
+
y,
|
|
1547
|
+
width,
|
|
1548
|
+
height,
|
|
1549
|
+
file: fileRef,
|
|
1550
|
+
file_resolved: resolved ? resolved.relPath : null,
|
|
1551
|
+
...(typeof n.subpath === "string" ? { subpath: n.subpath } : {}),
|
|
1552
|
+
...(color !== undefined ? { color } : {})
|
|
1553
|
+
});
|
|
1554
|
+
summary.file += 1;
|
|
1555
|
+
break;
|
|
1556
|
+
}
|
|
1557
|
+
case "link":
|
|
1558
|
+
nodes.push({
|
|
1559
|
+
kind: "link",
|
|
1560
|
+
id,
|
|
1561
|
+
x,
|
|
1562
|
+
y,
|
|
1563
|
+
width,
|
|
1564
|
+
height,
|
|
1565
|
+
url: typeof n.url === "string" ? n.url : "",
|
|
1566
|
+
...(color !== undefined ? { color } : {})
|
|
1567
|
+
});
|
|
1568
|
+
summary.link += 1;
|
|
1569
|
+
break;
|
|
1570
|
+
case "group":
|
|
1571
|
+
nodes.push({
|
|
1572
|
+
kind: "group",
|
|
1573
|
+
id,
|
|
1574
|
+
x,
|
|
1575
|
+
y,
|
|
1576
|
+
width,
|
|
1577
|
+
height,
|
|
1578
|
+
...(typeof n.label === "string" ? { label: n.label } : {}),
|
|
1579
|
+
...(color !== undefined ? { color } : {})
|
|
1580
|
+
});
|
|
1581
|
+
summary.group += 1;
|
|
1582
|
+
break;
|
|
1583
|
+
default:
|
|
1584
|
+
nodes.push({ kind: "unknown", id, raw_type: type, raw: n });
|
|
1585
|
+
summary.unknown += 1;
|
|
1586
|
+
}
|
|
1587
|
+
}
|
|
1588
|
+
}
|
|
1589
|
+
const edges = [];
|
|
1590
|
+
if (Array.isArray(parsed.edges)) {
|
|
1591
|
+
for (const raw of parsed.edges) {
|
|
1592
|
+
if (!raw || typeof raw !== "object")
|
|
1593
|
+
continue;
|
|
1594
|
+
const e = raw;
|
|
1595
|
+
const id = typeof e.id === "string" ? e.id : "";
|
|
1596
|
+
const fromNode = typeof e.fromNode === "string" ? e.fromNode : "";
|
|
1597
|
+
const toNode = typeof e.toNode === "string" ? e.toNode : "";
|
|
1598
|
+
if (!fromNode || !toNode)
|
|
1599
|
+
continue;
|
|
1600
|
+
edges.push({
|
|
1601
|
+
id,
|
|
1602
|
+
from_node: fromNode,
|
|
1603
|
+
...(typeof e.fromSide === "string" ? { from_side: e.fromSide } : {}),
|
|
1604
|
+
to_node: toNode,
|
|
1605
|
+
...(typeof e.toSide === "string" ? { to_side: e.toSide } : {}),
|
|
1606
|
+
...(typeof e.label === "string" ? { label: e.label } : {}),
|
|
1607
|
+
...(typeof e.color === "string" ? { color: e.color } : {})
|
|
1608
|
+
});
|
|
1609
|
+
}
|
|
1610
|
+
}
|
|
1611
|
+
const stat = await vault.stat(abs);
|
|
1612
|
+
return {
|
|
1613
|
+
path: rel,
|
|
1614
|
+
name: path.basename(rel).replace(/\.canvas$/i, ""),
|
|
1615
|
+
size_bytes: stat.size,
|
|
1616
|
+
mtime: new Date(stat.mtimeMs).toISOString(),
|
|
1617
|
+
nodes,
|
|
1618
|
+
edges,
|
|
1619
|
+
summary,
|
|
1620
|
+
broken_file_refs: brokenRefs
|
|
1621
|
+
};
|
|
1622
|
+
}
|
|
1623
|
+
const tfidfCache = new WeakMap();
|
|
1624
|
+
const STOP_WORDS = new Set([
|
|
1625
|
+
"a",
|
|
1626
|
+
"an",
|
|
1627
|
+
"and",
|
|
1628
|
+
"are",
|
|
1629
|
+
"as",
|
|
1630
|
+
"at",
|
|
1631
|
+
"be",
|
|
1632
|
+
"but",
|
|
1633
|
+
"by",
|
|
1634
|
+
"for",
|
|
1635
|
+
"from",
|
|
1636
|
+
"has",
|
|
1637
|
+
"have",
|
|
1638
|
+
"if",
|
|
1639
|
+
"in",
|
|
1640
|
+
"is",
|
|
1641
|
+
"it",
|
|
1642
|
+
"its",
|
|
1643
|
+
"of",
|
|
1644
|
+
"on",
|
|
1645
|
+
"or",
|
|
1646
|
+
"that",
|
|
1647
|
+
"the",
|
|
1648
|
+
"this",
|
|
1649
|
+
"to",
|
|
1650
|
+
"was",
|
|
1651
|
+
"were",
|
|
1652
|
+
"will",
|
|
1653
|
+
"with",
|
|
1654
|
+
"i",
|
|
1655
|
+
"you",
|
|
1656
|
+
"we",
|
|
1657
|
+
"they",
|
|
1658
|
+
"he",
|
|
1659
|
+
"she",
|
|
1660
|
+
"not",
|
|
1661
|
+
"no",
|
|
1662
|
+
"do",
|
|
1663
|
+
"does",
|
|
1664
|
+
"did",
|
|
1665
|
+
"had",
|
|
1666
|
+
"been",
|
|
1667
|
+
"being",
|
|
1668
|
+
"so",
|
|
1669
|
+
"than",
|
|
1670
|
+
"then",
|
|
1671
|
+
"there",
|
|
1672
|
+
"their",
|
|
1673
|
+
"them",
|
|
1674
|
+
"these",
|
|
1675
|
+
"those",
|
|
1676
|
+
"what",
|
|
1677
|
+
"when",
|
|
1678
|
+
"where",
|
|
1679
|
+
"which",
|
|
1680
|
+
"who",
|
|
1681
|
+
"why",
|
|
1682
|
+
"how"
|
|
1683
|
+
]);
|
|
1684
|
+
function tokenizeForTfidf(text) {
|
|
1685
|
+
const lower = text.toLowerCase();
|
|
1686
|
+
const out = [];
|
|
1687
|
+
for (const m of lower.matchAll(/[a-z0-9][a-z0-9_-]*/g)) {
|
|
1688
|
+
const t = m[0];
|
|
1689
|
+
if (t.length < 2)
|
|
1690
|
+
continue;
|
|
1691
|
+
if (t.length > 40)
|
|
1692
|
+
continue;
|
|
1693
|
+
if (STOP_WORDS.has(t))
|
|
1694
|
+
continue;
|
|
1695
|
+
out.push(t);
|
|
1696
|
+
}
|
|
1697
|
+
return out;
|
|
1698
|
+
}
|
|
1699
|
+
async function buildTfidfIndex(vault) {
|
|
1700
|
+
const entries = await vault.listMarkdown();
|
|
1701
|
+
const cached = tfidfCache.get(vault);
|
|
1702
|
+
if (cached &&
|
|
1703
|
+
cached.entriesRef.length === entries.length &&
|
|
1704
|
+
cached.entriesRef.every((e, i) => entries[i]?.relPath === e.relPath && entries[i]?.mtimeMs === e.mtimeMs)) {
|
|
1705
|
+
return cached;
|
|
1706
|
+
}
|
|
1707
|
+
const rawDocs = [];
|
|
1708
|
+
const docFreq = new Map();
|
|
1709
|
+
for (const e of entries) {
|
|
1710
|
+
const { parsed } = await vault.readNote(e.absPath, e.mtimeMs);
|
|
1711
|
+
const tokens = tokenizeForTfidf(parsed.body);
|
|
1712
|
+
const tf = new Map();
|
|
1713
|
+
for (const t of tokens)
|
|
1714
|
+
tf.set(t, (tf.get(t) ?? 0) + 1);
|
|
1715
|
+
rawDocs.push({ entry: e, tf });
|
|
1716
|
+
for (const t of tf.keys())
|
|
1717
|
+
docFreq.set(t, (docFreq.get(t) ?? 0) + 1);
|
|
1718
|
+
}
|
|
1719
|
+
// Smoothed IDF: ln(1 + N / (1 + df)). Smoothing keeps every-doc terms
|
|
1720
|
+
// non-zero and tames inflation on small vaults.
|
|
1721
|
+
const N = rawDocs.length || 1;
|
|
1722
|
+
const idf = new Map();
|
|
1723
|
+
for (const [term, df] of docFreq) {
|
|
1724
|
+
idf.set(term, Math.log(1 + N / (1 + df)));
|
|
1725
|
+
}
|
|
1726
|
+
const docs = [];
|
|
1727
|
+
for (const r of rawDocs) {
|
|
1728
|
+
const weights = new Map();
|
|
1729
|
+
let normSq = 0;
|
|
1730
|
+
for (const [term, count] of r.tf) {
|
|
1731
|
+
const w = (1 + Math.log(count)) * (idf.get(term) ?? 0);
|
|
1732
|
+
if (w === 0)
|
|
1733
|
+
continue;
|
|
1734
|
+
weights.set(term, w);
|
|
1735
|
+
normSq += w * w;
|
|
1736
|
+
}
|
|
1737
|
+
const norm = Math.sqrt(normSq);
|
|
1738
|
+
if (norm > 0) {
|
|
1739
|
+
for (const [t, w] of weights)
|
|
1740
|
+
weights.set(t, w / norm);
|
|
1741
|
+
}
|
|
1742
|
+
docs.push({
|
|
1743
|
+
relPath: r.entry.relPath,
|
|
1744
|
+
basename: r.entry.basename,
|
|
1745
|
+
mtimeMs: r.entry.mtimeMs,
|
|
1746
|
+
weights
|
|
1747
|
+
});
|
|
1748
|
+
}
|
|
1749
|
+
const result = { docs, idf, entriesRef: entries };
|
|
1750
|
+
tfidfCache.set(vault, result);
|
|
1751
|
+
return result;
|
|
1752
|
+
}
|
|
1753
|
+
export async function semanticSearch(vault, args) {
|
|
1754
|
+
await vault.ensureExists();
|
|
1755
|
+
const limit = args.limit ?? 10;
|
|
1756
|
+
const minScore = args.min_score ?? 0.05;
|
|
1757
|
+
if (!args.query.trim())
|
|
1758
|
+
throw new Error("query must not be empty");
|
|
1759
|
+
const { docs, idf } = await buildTfidfIndex(vault);
|
|
1760
|
+
// Vectorize query: same tokenization, IDF from the corpus, L2 normalize.
|
|
1761
|
+
const qTokens = tokenizeForTfidf(args.query);
|
|
1762
|
+
const qTf = new Map();
|
|
1763
|
+
for (const t of qTokens)
|
|
1764
|
+
qTf.set(t, (qTf.get(t) ?? 0) + 1);
|
|
1765
|
+
const qWeights = new Map();
|
|
1766
|
+
let qNormSq = 0;
|
|
1767
|
+
for (const [t, count] of qTf) {
|
|
1768
|
+
const w = (1 + Math.log(count)) * (idf.get(t) ?? 0);
|
|
1769
|
+
if (w === 0)
|
|
1770
|
+
continue;
|
|
1771
|
+
qWeights.set(t, w);
|
|
1772
|
+
qNormSq += w * w;
|
|
1773
|
+
}
|
|
1774
|
+
const qNorm = Math.sqrt(qNormSq);
|
|
1775
|
+
if (qNorm > 0) {
|
|
1776
|
+
for (const [t, w] of qWeights)
|
|
1777
|
+
qWeights.set(t, w / qNorm);
|
|
1778
|
+
}
|
|
1779
|
+
// Cosine = Σ q[t]·d[t] over shared terms (both vectors are L2-normed).
|
|
1780
|
+
const folderPrefix = args.folder ? `${args.folder.replace(/\/+$/, "")}/` : null;
|
|
1781
|
+
const scored = [];
|
|
1782
|
+
for (const doc of docs) {
|
|
1783
|
+
if (folderPrefix && !doc.relPath.startsWith(folderPrefix) && doc.relPath !== args.folder)
|
|
1784
|
+
continue;
|
|
1785
|
+
let s = 0;
|
|
1786
|
+
const matched = [];
|
|
1787
|
+
for (const [t, qw] of qWeights) {
|
|
1788
|
+
const dw = doc.weights.get(t);
|
|
1789
|
+
if (dw !== undefined) {
|
|
1790
|
+
s += qw * dw;
|
|
1791
|
+
matched.push(t);
|
|
1792
|
+
}
|
|
1793
|
+
}
|
|
1794
|
+
if (s < minScore)
|
|
1795
|
+
continue;
|
|
1796
|
+
scored.push({ doc, score: s, matchedTerms: matched });
|
|
1797
|
+
}
|
|
1798
|
+
scored.sort((a, b) => b.score - a.score);
|
|
1799
|
+
const matches = [];
|
|
1800
|
+
for (const { doc, score, matchedTerms } of scored.slice(0, limit)) {
|
|
1801
|
+
matchedTerms.sort((a, b) => (idf.get(b) ?? 0) - (idf.get(a) ?? 0));
|
|
1802
|
+
const { content } = await vault.readNote(vault.resolveInside(doc.relPath), doc.mtimeMs);
|
|
1803
|
+
let snippetText = "";
|
|
1804
|
+
for (const t of matchedTerms) {
|
|
1805
|
+
const idx = content.toLowerCase().indexOf(t);
|
|
1806
|
+
if (idx >= 0) {
|
|
1807
|
+
const { snippet } = sliceSnippet(content, idx, t.length);
|
|
1808
|
+
snippetText = snippet;
|
|
1809
|
+
break;
|
|
1810
|
+
}
|
|
1811
|
+
}
|
|
1812
|
+
matches.push({
|
|
1813
|
+
path: doc.relPath,
|
|
1814
|
+
title: stripMd(doc.basename),
|
|
1815
|
+
score: Math.round(score * 10000) / 10000,
|
|
1816
|
+
snippet: snippetText,
|
|
1817
|
+
matched_terms: matchedTerms.slice(0, 8),
|
|
1818
|
+
mtime: new Date(doc.mtimeMs).toISOString()
|
|
1819
|
+
});
|
|
1820
|
+
}
|
|
1821
|
+
return { query: args.query, total_docs: docs.length, method: "tfidf-cosine", matches };
|
|
1822
|
+
}
|
|
1448
1823
|
// ─── small set / string helpers shared by find_similar / get_note_neighbors ─
|
|
1449
1824
|
function jaccard(a, b) {
|
|
1450
1825
|
if (a.size === 0 && b.size === 0)
|