@liendev/lien 0.18.0 → 0.19.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -56,7 +56,7 @@ var init_constants = __esm({
56
56
  DEFAULT_GIT_POLL_INTERVAL_MS = 1e4;
57
57
  DEFAULT_DEBOUNCE_MS = 1e3;
58
58
  CURRENT_CONFIG_VERSION = getPackageVersion();
59
- INDEX_FORMAT_VERSION = 2;
59
+ INDEX_FORMAT_VERSION = 4;
60
60
  }
61
61
  });
62
62
 
@@ -100,6 +100,19 @@ var init_schema = __esm({
100
100
  // Enabled by default (fast with incremental indexing!)
101
101
  debounceMs: DEFAULT_DEBOUNCE_MS
102
102
  },
103
+ complexity: {
104
+ enabled: true,
105
+ thresholds: {
106
+ testPaths: 15,
107
+ // 🔀 Max test paths per function
108
+ mentalLoad: 15,
109
+ // 🧠 Max mental load score
110
+ timeToUnderstandMinutes: 60,
111
+ // ⏱️ Functions taking >1 hour to understand
112
+ estimatedBugs: 1.5
113
+ // 🐛 Functions estimated to have >1.5 bugs
114
+ }
115
+ },
103
116
  frameworks: []
104
117
  // Will be populated by lien init via framework detection
105
118
  };
@@ -260,6 +273,13 @@ function deepMergeConfig(defaults, user) {
260
273
  ...defaults.fileWatching,
261
274
  ...user.fileWatching
262
275
  },
276
+ complexity: user.complexity ? {
277
+ enabled: user.complexity.enabled ?? defaults.complexity?.enabled ?? true,
278
+ thresholds: {
279
+ ...defaults.complexity?.thresholds,
280
+ ...user.complexity.thresholds || {}
281
+ }
282
+ } : defaults.complexity,
263
283
  frameworks: user.frameworks ?? defaults.frameworks
264
284
  };
265
285
  }
@@ -1457,6 +1477,613 @@ var init_parser = __esm({
1457
1477
  }
1458
1478
  });
1459
1479
 
1480
+ // src/indexer/ast/complexity/cyclomatic.ts
1481
+ function calculateComplexity(node) {
1482
+ let complexity = 1;
1483
+ function traverse(n) {
1484
+ if (DECISION_POINTS.includes(n.type)) {
1485
+ if (n.type === "binary_expression") {
1486
+ const operator = n.childForFieldName("operator");
1487
+ if (operator && (operator.text === "&&" || operator.text === "||")) {
1488
+ complexity++;
1489
+ }
1490
+ } else {
1491
+ complexity++;
1492
+ }
1493
+ }
1494
+ for (let i = 0; i < n.namedChildCount; i++) {
1495
+ const child = n.namedChild(i);
1496
+ if (child) traverse(child);
1497
+ }
1498
+ }
1499
+ traverse(node);
1500
+ return complexity;
1501
+ }
1502
+ var DECISION_POINTS;
1503
+ var init_cyclomatic = __esm({
1504
+ "src/indexer/ast/complexity/cyclomatic.ts"() {
1505
+ "use strict";
1506
+ DECISION_POINTS = [
1507
+ // Common across languages (TypeScript/JavaScript/Python/PHP)
1508
+ "if_statement",
1509
+ // if conditions
1510
+ "while_statement",
1511
+ // while loops
1512
+ "for_statement",
1513
+ // for loops
1514
+ "switch_case",
1515
+ // switch/case statements
1516
+ "catch_clause",
1517
+ // try/catch error handling
1518
+ "ternary_expression",
1519
+ // Ternary operator (a ? b : c)
1520
+ "binary_expression",
1521
+ // For && and || logical operators
1522
+ // TypeScript/JavaScript specific
1523
+ "do_statement",
1524
+ // do...while loops
1525
+ "for_in_statement",
1526
+ // for...in loops
1527
+ "for_of_statement",
1528
+ // for...of loops
1529
+ // PHP specific
1530
+ "foreach_statement",
1531
+ // PHP foreach loops
1532
+ // Python specific
1533
+ "elif_clause",
1534
+ // Python elif (adds decision point)
1535
+ // Note: 'else_clause' is NOT a decision point (it's the default path)
1536
+ "except_clause",
1537
+ // Python except (try/except)
1538
+ "conditional_expression"
1539
+ // Python ternary (x if cond else y)
1540
+ ];
1541
+ }
1542
+ });
1543
+
1544
+ // src/indexer/ast/complexity/cognitive.ts
1545
+ function getLogicalOperator(node) {
1546
+ if (node.type !== "binary_expression" && node.type !== "boolean_operator") {
1547
+ return null;
1548
+ }
1549
+ const operator = node.childForFieldName("operator");
1550
+ const opText = operator?.text;
1551
+ if (opText === "&&" || opText === "and") return "&&";
1552
+ if (opText === "||" || opText === "or") return "||";
1553
+ return null;
1554
+ }
1555
+ function getChildNestingLevel(parent, child, currentLevel) {
1556
+ const isCondition = parent.childForFieldName("condition") === child;
1557
+ const isElseClause = NON_NESTING_TYPES.has(child.type);
1558
+ return !isCondition && !isElseClause ? currentLevel + 1 : currentLevel;
1559
+ }
1560
+ function getNestedLambdaIncrement(nodeType, nestingLevel) {
1561
+ return LAMBDA_TYPES.has(nodeType) && nestingLevel > 0 ? 1 : 0;
1562
+ }
1563
+ function traverseLogicalChildren(n, level, op, ctx) {
1564
+ const operator = n.childForFieldName("operator");
1565
+ for (let i = 0; i < n.namedChildCount; i++) {
1566
+ const child = n.namedChild(i);
1567
+ if (child && child !== operator) ctx.traverse(child, level, op);
1568
+ }
1569
+ }
1570
+ function traverseNestingChildren(n, level, ctx) {
1571
+ for (let i = 0; i < n.namedChildCount; i++) {
1572
+ const child = n.namedChild(i);
1573
+ if (child) ctx.traverse(child, getChildNestingLevel(n, child, level), null);
1574
+ }
1575
+ }
1576
+ function traverseAllChildren(n, level, ctx) {
1577
+ for (let i = 0; i < n.namedChildCount; i++) {
1578
+ const child = n.namedChild(i);
1579
+ if (child) ctx.traverse(child, level, null);
1580
+ }
1581
+ }
1582
+ function calculateCognitiveComplexity(node) {
1583
+ let complexity = 0;
1584
+ const ctx = { traverse };
1585
+ function traverse(n, nestingLevel, lastLogicalOp) {
1586
+ const logicalOp = getLogicalOperator(n);
1587
+ if (logicalOp) {
1588
+ complexity += lastLogicalOp !== logicalOp ? 1 : 0;
1589
+ traverseLogicalChildren(n, nestingLevel, logicalOp, ctx);
1590
+ return;
1591
+ }
1592
+ if (NESTING_TYPES.has(n.type)) {
1593
+ complexity += 1 + nestingLevel;
1594
+ traverseNestingChildren(n, nestingLevel, ctx);
1595
+ return;
1596
+ }
1597
+ if (NON_NESTING_TYPES.has(n.type)) {
1598
+ complexity += 1;
1599
+ traverseAllChildren(n, nestingLevel + 1, ctx);
1600
+ return;
1601
+ }
1602
+ complexity += getNestedLambdaIncrement(n.type, nestingLevel);
1603
+ traverseAllChildren(n, nestingLevel, ctx);
1604
+ }
1605
+ traverse(node, 0, null);
1606
+ return complexity;
1607
+ }
1608
+ var NESTING_TYPES, NON_NESTING_TYPES, LAMBDA_TYPES;
1609
+ var init_cognitive = __esm({
1610
+ "src/indexer/ast/complexity/cognitive.ts"() {
1611
+ "use strict";
1612
+ NESTING_TYPES = /* @__PURE__ */ new Set([
1613
+ "if_statement",
1614
+ "for_statement",
1615
+ "while_statement",
1616
+ "switch_statement",
1617
+ "catch_clause",
1618
+ "except_clause",
1619
+ "do_statement",
1620
+ "for_in_statement",
1621
+ "for_of_statement",
1622
+ "foreach_statement",
1623
+ "match_statement"
1624
+ ]);
1625
+ NON_NESTING_TYPES = /* @__PURE__ */ new Set([
1626
+ "else_clause",
1627
+ "elif_clause",
1628
+ "ternary_expression",
1629
+ "conditional_expression"
1630
+ ]);
1631
+ LAMBDA_TYPES = /* @__PURE__ */ new Set(["arrow_function", "function_expression", "lambda"]);
1632
+ }
1633
+ });
1634
+
1635
+ // src/indexer/ast/complexity/halstead.ts
1636
+ function getOperatorSymbols(language) {
1637
+ return OPERATOR_SYMBOLS[language] || OPERATOR_SYMBOLS.typescript;
1638
+ }
1639
+ function getOperatorKeywords(language) {
1640
+ return OPERATOR_KEYWORDS[language] || OPERATOR_KEYWORDS.typescript;
1641
+ }
1642
+ function isOperator(node, language) {
1643
+ const nodeType = node.type;
1644
+ const nodeText = node.text;
1645
+ if (OPERATOR_NODE_TYPES.has(nodeType)) {
1646
+ return true;
1647
+ }
1648
+ const symbols = getOperatorSymbols(language);
1649
+ const keywords = getOperatorKeywords(language);
1650
+ return symbols.has(nodeText) || keywords.has(nodeText);
1651
+ }
1652
+ function isOperand(node) {
1653
+ return OPERAND_NODE_TYPES.has(node.type);
1654
+ }
1655
+ function getOperatorKey(node) {
1656
+ if (OPERATOR_NODE_TYPES.has(node.type)) {
1657
+ const operator = node.childForFieldName("operator");
1658
+ if (operator) {
1659
+ return operator.text;
1660
+ }
1661
+ return node.type;
1662
+ }
1663
+ return node.text;
1664
+ }
1665
+ function getOperandKey(node) {
1666
+ return node.text;
1667
+ }
1668
+ function sumValues(map) {
1669
+ let sum = 0;
1670
+ for (const count of map.values()) {
1671
+ sum += count;
1672
+ }
1673
+ return sum;
1674
+ }
1675
+ function countHalstead(node, language) {
1676
+ const operators = /* @__PURE__ */ new Map();
1677
+ const operands = /* @__PURE__ */ new Map();
1678
+ function traverse(n) {
1679
+ if (isOperator(n, language)) {
1680
+ const key = getOperatorKey(n);
1681
+ operators.set(key, (operators.get(key) || 0) + 1);
1682
+ }
1683
+ if (isOperand(n)) {
1684
+ const key = getOperandKey(n);
1685
+ operands.set(key, (operands.get(key) || 0) + 1);
1686
+ }
1687
+ for (const child of n.children) {
1688
+ traverse(child);
1689
+ }
1690
+ }
1691
+ traverse(node);
1692
+ return {
1693
+ n1: operators.size,
1694
+ n2: operands.size,
1695
+ N1: sumValues(operators),
1696
+ N2: sumValues(operands),
1697
+ operators,
1698
+ operands
1699
+ };
1700
+ }
1701
+ function calculateHalsteadMetrics(counts) {
1702
+ const { n1, n2, N1, N2 } = counts;
1703
+ const vocabulary = n1 + n2;
1704
+ const length = N1 + N2;
1705
+ const volume = vocabulary > 0 ? length * Math.log2(vocabulary) : 0;
1706
+ const difficulty = n2 > 0 ? n1 / 2 * (N2 / n2) : 0;
1707
+ const effort = difficulty * volume;
1708
+ const time = effort / 18;
1709
+ const bugs = volume / 3e3;
1710
+ return {
1711
+ vocabulary: Math.round(vocabulary),
1712
+ length: Math.round(length),
1713
+ volume: Math.round(volume * 100) / 100,
1714
+ difficulty: Math.round(difficulty * 100) / 100,
1715
+ effort: Math.round(effort),
1716
+ time: Math.round(time),
1717
+ bugs: Math.round(bugs * 1e3) / 1e3
1718
+ };
1719
+ }
1720
+ function calculateHalstead(node, language) {
1721
+ const counts = countHalstead(node, language);
1722
+ return calculateHalsteadMetrics(counts);
1723
+ }
1724
+ var OPERATOR_SYMBOLS, OPERATOR_KEYWORDS, OPERATOR_NODE_TYPES, OPERAND_NODE_TYPES;
1725
+ var init_halstead = __esm({
1726
+ "src/indexer/ast/complexity/halstead.ts"() {
1727
+ "use strict";
1728
+ OPERATOR_SYMBOLS = {
1729
+ typescript: /* @__PURE__ */ new Set([
1730
+ // Arithmetic
1731
+ "+",
1732
+ "-",
1733
+ "*",
1734
+ "/",
1735
+ "%",
1736
+ "**",
1737
+ // Comparison
1738
+ "==",
1739
+ "===",
1740
+ "!=",
1741
+ "!==",
1742
+ "<",
1743
+ ">",
1744
+ "<=",
1745
+ ">=",
1746
+ // Logical
1747
+ "&&",
1748
+ "||",
1749
+ "!",
1750
+ "??",
1751
+ // Assignment
1752
+ "=",
1753
+ "+=",
1754
+ "-=",
1755
+ "*=",
1756
+ "/=",
1757
+ "%=",
1758
+ "**=",
1759
+ "&&=",
1760
+ "||=",
1761
+ "??=",
1762
+ // Bitwise
1763
+ "&",
1764
+ "|",
1765
+ "^",
1766
+ "~",
1767
+ "<<",
1768
+ ">>",
1769
+ ">>>",
1770
+ "&=",
1771
+ "|=",
1772
+ "^=",
1773
+ "<<=",
1774
+ ">>=",
1775
+ ">>>=",
1776
+ // Other
1777
+ "?",
1778
+ ":",
1779
+ ".",
1780
+ "?.",
1781
+ "++",
1782
+ "--",
1783
+ "...",
1784
+ "=>",
1785
+ // Brackets/parens (counted as operators)
1786
+ "(",
1787
+ ")",
1788
+ "[",
1789
+ "]",
1790
+ "{",
1791
+ "}"
1792
+ ]),
1793
+ python: /* @__PURE__ */ new Set([
1794
+ // Arithmetic
1795
+ "+",
1796
+ "-",
1797
+ "*",
1798
+ "/",
1799
+ "%",
1800
+ "**",
1801
+ "//",
1802
+ // Comparison
1803
+ "==",
1804
+ "!=",
1805
+ "<",
1806
+ ">",
1807
+ "<=",
1808
+ ">=",
1809
+ // Logical (handled via keywords below)
1810
+ // Assignment
1811
+ "=",
1812
+ "+=",
1813
+ "-=",
1814
+ "*=",
1815
+ "/=",
1816
+ "%=",
1817
+ "**=",
1818
+ "//=",
1819
+ "&=",
1820
+ "|=",
1821
+ "^=",
1822
+ "<<=",
1823
+ ">>=",
1824
+ // Bitwise
1825
+ "&",
1826
+ "|",
1827
+ "^",
1828
+ "~",
1829
+ "<<",
1830
+ ">>",
1831
+ // Other
1832
+ ".",
1833
+ ":",
1834
+ "->",
1835
+ "@",
1836
+ "(",
1837
+ ")",
1838
+ "[",
1839
+ "]",
1840
+ "{",
1841
+ "}"
1842
+ ]),
1843
+ php: /* @__PURE__ */ new Set([
1844
+ // Arithmetic
1845
+ "+",
1846
+ "-",
1847
+ "*",
1848
+ "/",
1849
+ "%",
1850
+ "**",
1851
+ // Comparison
1852
+ "==",
1853
+ "===",
1854
+ "!=",
1855
+ "!==",
1856
+ "<>",
1857
+ "<",
1858
+ ">",
1859
+ "<=",
1860
+ ">=",
1861
+ "<=>",
1862
+ // Logical
1863
+ "&&",
1864
+ "||",
1865
+ "!",
1866
+ "and",
1867
+ "or",
1868
+ "xor",
1869
+ // Assignment
1870
+ "=",
1871
+ "+=",
1872
+ "-=",
1873
+ "*=",
1874
+ "/=",
1875
+ "%=",
1876
+ "**=",
1877
+ ".=",
1878
+ "&=",
1879
+ "|=",
1880
+ "^=",
1881
+ "<<=",
1882
+ ">>=",
1883
+ "??=",
1884
+ // Bitwise
1885
+ "&",
1886
+ "|",
1887
+ "^",
1888
+ "~",
1889
+ "<<",
1890
+ ">>",
1891
+ // String
1892
+ ".",
1893
+ // Other
1894
+ "?",
1895
+ ":",
1896
+ "::",
1897
+ "->",
1898
+ "=>",
1899
+ "??",
1900
+ "@",
1901
+ "(",
1902
+ ")",
1903
+ "[",
1904
+ "]",
1905
+ "{",
1906
+ "}"
1907
+ ])
1908
+ };
1909
+ OPERATOR_KEYWORDS = {
1910
+ typescript: /* @__PURE__ */ new Set([
1911
+ "if",
1912
+ "else",
1913
+ "for",
1914
+ "while",
1915
+ "do",
1916
+ "switch",
1917
+ "case",
1918
+ "default",
1919
+ "return",
1920
+ "throw",
1921
+ "try",
1922
+ "catch",
1923
+ "finally",
1924
+ "new",
1925
+ "delete",
1926
+ "typeof",
1927
+ "instanceof",
1928
+ "in",
1929
+ "of",
1930
+ "await",
1931
+ "yield",
1932
+ "break",
1933
+ "continue",
1934
+ "const",
1935
+ "let",
1936
+ "var",
1937
+ "function",
1938
+ "class",
1939
+ "extends",
1940
+ "implements",
1941
+ "import",
1942
+ "export",
1943
+ "from",
1944
+ "as"
1945
+ ]),
1946
+ python: /* @__PURE__ */ new Set([
1947
+ "if",
1948
+ "elif",
1949
+ "else",
1950
+ "for",
1951
+ "while",
1952
+ "match",
1953
+ "case",
1954
+ "return",
1955
+ "raise",
1956
+ "try",
1957
+ "except",
1958
+ "finally",
1959
+ "and",
1960
+ "or",
1961
+ "not",
1962
+ "is",
1963
+ "in",
1964
+ "await",
1965
+ "yield",
1966
+ "break",
1967
+ "continue",
1968
+ "pass",
1969
+ "def",
1970
+ "class",
1971
+ "lambda",
1972
+ "async",
1973
+ "import",
1974
+ "from",
1975
+ "as",
1976
+ "with",
1977
+ "global",
1978
+ "nonlocal",
1979
+ "del",
1980
+ "assert"
1981
+ ]),
1982
+ php: /* @__PURE__ */ new Set([
1983
+ "if",
1984
+ "elseif",
1985
+ "else",
1986
+ "for",
1987
+ "foreach",
1988
+ "while",
1989
+ "do",
1990
+ "switch",
1991
+ "case",
1992
+ "default",
1993
+ "match",
1994
+ "return",
1995
+ "throw",
1996
+ "try",
1997
+ "catch",
1998
+ "finally",
1999
+ "new",
2000
+ "clone",
2001
+ "instanceof",
2002
+ "yield",
2003
+ "break",
2004
+ "continue",
2005
+ "function",
2006
+ "class",
2007
+ "extends",
2008
+ "implements",
2009
+ "trait",
2010
+ "interface",
2011
+ "use",
2012
+ "namespace",
2013
+ "as",
2014
+ "echo",
2015
+ "print",
2016
+ "include",
2017
+ "require",
2018
+ "include_once",
2019
+ "require_once",
2020
+ "global",
2021
+ "static",
2022
+ "const",
2023
+ "public",
2024
+ "private",
2025
+ "protected",
2026
+ "readonly"
2027
+ ])
2028
+ };
2029
+ OPERATOR_NODE_TYPES = /* @__PURE__ */ new Set([
2030
+ // Expression operators
2031
+ "binary_expression",
2032
+ "unary_expression",
2033
+ "update_expression",
2034
+ "assignment_expression",
2035
+ "augmented_assignment_expression",
2036
+ "ternary_expression",
2037
+ "conditional_expression",
2038
+ // Call/access operators
2039
+ "call_expression",
2040
+ "method_call",
2041
+ "member_expression",
2042
+ "subscript_expression",
2043
+ "attribute",
2044
+ // Object/array literals ([] and {} are operators)
2045
+ "array",
2046
+ "object",
2047
+ "dictionary",
2048
+ "list"
2049
+ ]);
2050
+ OPERAND_NODE_TYPES = /* @__PURE__ */ new Set([
2051
+ // Identifiers
2052
+ "identifier",
2053
+ "property_identifier",
2054
+ "shorthand_property_identifier",
2055
+ "variable_name",
2056
+ "name",
2057
+ // Literals
2058
+ "number",
2059
+ "integer",
2060
+ "float",
2061
+ "string",
2062
+ "string_fragment",
2063
+ "template_string",
2064
+ "true",
2065
+ "false",
2066
+ "null",
2067
+ "undefined",
2068
+ "none",
2069
+ // Special
2070
+ "this",
2071
+ "self",
2072
+ "super"
2073
+ ]);
2074
+ }
2075
+ });
2076
+
2077
+ // src/indexer/ast/complexity/index.ts
2078
+ var init_complexity = __esm({
2079
+ "src/indexer/ast/complexity/index.ts"() {
2080
+ "use strict";
2081
+ init_cyclomatic();
2082
+ init_cognitive();
2083
+ init_halstead();
2084
+ }
2085
+ });
2086
+
1460
2087
  // src/indexer/ast/symbols.ts
1461
2088
  function extractFunctionInfo(node, content, parentClass) {
1462
2089
  const nameNode = node.childForFieldName("name");
@@ -1592,62 +2219,6 @@ function extractReturnType(node, _content) {
1592
2219
  if (!returnTypeNode) return void 0;
1593
2220
  return returnTypeNode.text;
1594
2221
  }
1595
- function calculateComplexity(node) {
1596
- let complexity = 1;
1597
- const decisionPoints = [
1598
- // Common across languages (TypeScript/JavaScript/Python/PHP)
1599
- "if_statement",
1600
- // if conditions
1601
- "while_statement",
1602
- // while loops
1603
- "for_statement",
1604
- // for loops
1605
- "switch_case",
1606
- // switch/case statements
1607
- "catch_clause",
1608
- // try/catch error handling
1609
- "ternary_expression",
1610
- // Ternary operator (a ? b : c)
1611
- "binary_expression",
1612
- // For && and || logical operators
1613
- // TypeScript/JavaScript specific
1614
- "do_statement",
1615
- // do...while loops
1616
- "for_in_statement",
1617
- // for...in loops
1618
- "for_of_statement",
1619
- // for...of loops
1620
- // PHP specific
1621
- "foreach_statement",
1622
- // PHP foreach loops
1623
- // Python specific
1624
- "elif_clause",
1625
- // Python elif (adds decision point)
1626
- // Note: 'else_clause' is NOT a decision point (it's the default path)
1627
- "except_clause",
1628
- // Python except (try/except)
1629
- "conditional_expression"
1630
- // Python ternary (x if cond else y)
1631
- ];
1632
- function traverse(n) {
1633
- if (decisionPoints.includes(n.type)) {
1634
- if (n.type === "binary_expression") {
1635
- const operator = n.childForFieldName("operator");
1636
- if (operator && (operator.text === "&&" || operator.text === "||")) {
1637
- complexity++;
1638
- }
1639
- } else {
1640
- complexity++;
1641
- }
1642
- }
1643
- for (let i = 0; i < n.namedChildCount; i++) {
1644
- const child = n.namedChild(i);
1645
- if (child) traverse(child);
1646
- }
1647
- }
1648
- traverse(node);
1649
- return complexity;
1650
- }
1651
2222
  function extractImports(rootNode) {
1652
2223
  const imports = [];
1653
2224
  function traverse(node) {
@@ -1678,6 +2249,7 @@ var symbolExtractors;
1678
2249
  var init_symbols = __esm({
1679
2250
  "src/indexer/ast/symbols.ts"() {
1680
2251
  "use strict";
2252
+ init_complexity();
1681
2253
  symbolExtractors = {
1682
2254
  // TypeScript/JavaScript
1683
2255
  "function_declaration": extractFunctionInfo,
@@ -1989,32 +2561,29 @@ function chunkByAST(filepath, content, options = {}) {
1989
2561
  chunks.sort((a, b) => a.metadata.startLine - b.metadata.startLine);
1990
2562
  return chunks;
1991
2563
  }
2564
+ function isFunctionDeclaration(node, depth, traverser) {
2565
+ if (depth !== 0 || !traverser.isDeclarationWithFunction(node)) return false;
2566
+ return traverser.findFunctionInDeclaration(node).hasFunction;
2567
+ }
2568
+ function isTargetNode(node, depth, traverser) {
2569
+ return depth <= 1 && traverser.targetNodeTypes.includes(node.type);
2570
+ }
1992
2571
  function findTopLevelNodes(rootNode, traverser) {
1993
2572
  const nodes = [];
1994
2573
  function traverse(node, depth) {
1995
- if (traverser.isDeclarationWithFunction(node) && depth === 0) {
1996
- const declInfo = traverser.findFunctionInDeclaration(node);
1997
- if (declInfo.hasFunction) {
1998
- nodes.push(node);
1999
- return;
2000
- }
2001
- }
2002
- if (depth <= 1 && traverser.targetNodeTypes.includes(node.type)) {
2574
+ if (isFunctionDeclaration(node, depth, traverser) || isTargetNode(node, depth, traverser)) {
2003
2575
  nodes.push(node);
2004
2576
  return;
2005
2577
  }
2006
2578
  if (traverser.shouldExtractChildren(node)) {
2007
2579
  const body = traverser.getContainerBody(node);
2008
- if (body) {
2009
- traverse(body, depth + 1);
2010
- }
2580
+ if (body) traverse(body, depth + 1);
2011
2581
  return;
2012
2582
  }
2013
- if (traverser.shouldTraverseChildren(node)) {
2014
- for (let i = 0; i < node.namedChildCount; i++) {
2015
- const child = node.namedChild(i);
2016
- if (child) traverse(child, depth);
2017
- }
2583
+ if (!traverser.shouldTraverseChildren(node)) return;
2584
+ for (let i = 0; i < node.namedChildCount; i++) {
2585
+ const child = node.namedChild(i);
2586
+ if (child) traverse(child, depth);
2018
2587
  }
2019
2588
  }
2020
2589
  traverse(rootNode, 0);
@@ -2025,39 +2594,45 @@ function getNodeContent(node, lines) {
2025
2594
  const endLine = node.endPosition.row;
2026
2595
  return lines.slice(startLine, endLine + 1).join("\n");
2027
2596
  }
2028
- function createChunk(filepath, node, content, symbolInfo, imports, language) {
2029
- const symbols = {
2030
- functions: [],
2031
- classes: [],
2032
- interfaces: []
2033
- };
2034
- if (symbolInfo?.name) {
2035
- if (symbolInfo.type === "function" || symbolInfo.type === "method") {
2036
- symbols.functions.push(symbolInfo.name);
2037
- } else if (symbolInfo.type === "class") {
2038
- symbols.classes.push(symbolInfo.name);
2039
- } else if (symbolInfo.type === "interface") {
2040
- symbols.interfaces.push(symbolInfo.name);
2041
- }
2597
+ function buildLegacySymbols(symbolInfo) {
2598
+ const symbols = { functions: [], classes: [], interfaces: [] };
2599
+ if (symbolInfo?.name && symbolInfo.type) {
2600
+ const arrayKey = SYMBOL_TYPE_TO_ARRAY[symbolInfo.type];
2601
+ if (arrayKey) symbols[arrayKey].push(symbolInfo.name);
2042
2602
  }
2603
+ return symbols;
2604
+ }
2605
+ function getChunkType(symbolInfo) {
2606
+ if (!symbolInfo) return "block";
2607
+ return symbolInfo.type === "class" ? "class" : "function";
2608
+ }
2609
+ function createChunk(filepath, node, content, symbolInfo, imports, language) {
2610
+ const symbols = buildLegacySymbols(symbolInfo);
2611
+ const shouldCalcComplexity = symbolInfo?.type && COMPLEXITY_SYMBOL_TYPES.has(symbolInfo.type);
2612
+ const cognitiveComplexity = shouldCalcComplexity ? calculateCognitiveComplexity(node) : void 0;
2613
+ const halstead = shouldCalcComplexity ? calculateHalstead(node, language) : void 0;
2043
2614
  return {
2044
2615
  content,
2045
2616
  metadata: {
2046
2617
  file: filepath,
2047
2618
  startLine: node.startPosition.row + 1,
2048
2619
  endLine: node.endPosition.row + 1,
2049
- type: symbolInfo == null ? "block" : symbolInfo.type === "class" ? "class" : "function",
2620
+ type: getChunkType(symbolInfo),
2050
2621
  language,
2051
- // Legacy symbols field for backward compatibility
2052
2622
  symbols,
2053
- // New AST-derived metadata
2054
2623
  symbolName: symbolInfo?.name,
2055
2624
  symbolType: symbolInfo?.type,
2056
2625
  parentClass: symbolInfo?.parentClass,
2057
2626
  complexity: symbolInfo?.complexity,
2627
+ cognitiveComplexity,
2058
2628
  parameters: symbolInfo?.parameters,
2059
2629
  signature: symbolInfo?.signature,
2060
- imports
2630
+ imports,
2631
+ // Halstead metrics
2632
+ halsteadVolume: halstead?.volume,
2633
+ halsteadDifficulty: halstead?.difficulty,
2634
+ halsteadEffort: halstead?.effort,
2635
+ halsteadBugs: halstead?.bugs
2061
2636
  }
2062
2637
  };
2063
2638
  }
@@ -2110,12 +2685,21 @@ function extractUncoveredCode(lines, coveredRanges, filepath, minChunkSize, impo
2110
2685
  function shouldUseAST(filepath) {
2111
2686
  return isASTSupported(filepath);
2112
2687
  }
2688
+ var SYMBOL_TYPE_TO_ARRAY, COMPLEXITY_SYMBOL_TYPES;
2113
2689
  var init_chunker = __esm({
2114
2690
  "src/indexer/ast/chunker.ts"() {
2115
2691
  "use strict";
2116
2692
  init_parser();
2117
2693
  init_symbols();
2694
+ init_complexity();
2118
2695
  init_traversers();
2696
+ SYMBOL_TYPE_TO_ARRAY = {
2697
+ function: "functions",
2698
+ method: "functions",
2699
+ class: "classes",
2700
+ interface: "interfaces"
2701
+ };
2702
+ COMPLEXITY_SYMBOL_TYPES = /* @__PURE__ */ new Set(["function", "method"]);
2119
2703
  }
2120
2704
  });
2121
2705
 
@@ -2778,6 +3362,46 @@ var init_boosting = __esm({
2778
3362
  });
2779
3363
 
2780
3364
  // src/vectordb/query.ts
3365
+ function isValidRecord(r) {
3366
+ return Boolean(
3367
+ r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
3368
+ );
3369
+ }
3370
+ function hasValidArrayEntries(arr) {
3371
+ return Boolean(arr && arr.length > 0 && arr[0] !== "");
3372
+ }
3373
+ function getSymbolsForType(r, symbolType) {
3374
+ if (symbolType === "function") return r.functionNames || [];
3375
+ if (symbolType === "class") return r.classNames || [];
3376
+ if (symbolType === "interface") return r.interfaceNames || [];
3377
+ return [
3378
+ ...r.functionNames || [],
3379
+ ...r.classNames || [],
3380
+ ...r.interfaceNames || []
3381
+ ];
3382
+ }
3383
+ function buildSearchResultMetadata(r) {
3384
+ return {
3385
+ file: r.file,
3386
+ startLine: r.startLine,
3387
+ endLine: r.endLine,
3388
+ type: r.type,
3389
+ language: r.language,
3390
+ symbolName: r.symbolName || void 0,
3391
+ symbolType: r.symbolType,
3392
+ parentClass: r.parentClass || void 0,
3393
+ complexity: r.complexity || void 0,
3394
+ cognitiveComplexity: r.cognitiveComplexity || void 0,
3395
+ parameters: hasValidArrayEntries(r.parameters) ? r.parameters : void 0,
3396
+ signature: r.signature || void 0,
3397
+ imports: hasValidArrayEntries(r.imports) ? r.imports : void 0,
3398
+ // Halstead metrics (v0.19.0) - use explicit null check to preserve valid 0 values
3399
+ halsteadVolume: r.halsteadVolume != null ? r.halsteadVolume : void 0,
3400
+ halsteadDifficulty: r.halsteadDifficulty != null ? r.halsteadDifficulty : void 0,
3401
+ halsteadEffort: r.halsteadEffort != null ? r.halsteadEffort : void 0,
3402
+ halsteadBugs: r.halsteadBugs != null ? r.halsteadBugs : void 0
3403
+ };
3404
+ }
2781
3405
  function applyRelevanceBoosting(query, filepath, baseScore) {
2782
3406
  if (!query) {
2783
3407
  return baseScore;
@@ -2790,21 +3414,7 @@ function dbRecordToSearchResult(r, query) {
2790
3414
  const boostedScore = applyRelevanceBoosting(query, r.file, baseScore);
2791
3415
  return {
2792
3416
  content: r.content,
2793
- metadata: {
2794
- file: r.file,
2795
- startLine: r.startLine,
2796
- endLine: r.endLine,
2797
- type: r.type,
2798
- language: r.language,
2799
- // AST-derived metadata (v0.13.0)
2800
- symbolName: r.symbolName || void 0,
2801
- symbolType: r.symbolType,
2802
- parentClass: r.parentClass || void 0,
2803
- complexity: r.complexity || void 0,
2804
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2805
- signature: r.signature || void 0,
2806
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2807
- },
3417
+ metadata: buildSearchResultMetadata(r),
2808
3418
  score: boostedScore,
2809
3419
  relevance: calculateRelevance(boostedScore)
2810
3420
  };
@@ -2814,10 +3424,8 @@ async function search(table, queryVector, limit = 5, query) {
2814
3424
  throw new DatabaseError("Vector database not initialized");
2815
3425
  }
2816
3426
  try {
2817
- const results = await table.search(Array.from(queryVector)).limit(limit + 20).execute();
2818
- const filtered = results.filter(
2819
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2820
- ).map((r) => dbRecordToSearchResult(r, query)).sort((a, b) => a.score - b.score).slice(0, limit);
3427
+ const results = await table.search(Array.from(queryVector)).limit(limit + 20).toArray();
3428
+ const filtered = results.filter(isValidRecord).map((r) => dbRecordToSearchResult(r, query)).sort((a, b) => a.score - b.score).slice(0, limit);
2821
3429
  return filtered;
2822
3430
  } catch (error) {
2823
3431
  const errorMsg = String(error);
@@ -2838,10 +3446,8 @@ async function scanWithFilter(table, options) {
2838
3446
  try {
2839
3447
  const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2840
3448
  const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 5, 200));
2841
- const results = await query.execute();
2842
- let filtered = results.filter(
2843
- (r) => r.content && r.content.trim().length > 0 && r.file && r.file.length > 0
2844
- );
3449
+ const results = await query.toArray();
3450
+ let filtered = results.filter(isValidRecord);
2845
3451
  if (language) {
2846
3452
  filtered = filtered.filter(
2847
3453
  (r) => r.language && r.language.toLowerCase() === language.toLowerCase()
@@ -2853,119 +3459,89 @@ async function scanWithFilter(table, options) {
2853
3459
  (r) => regex.test(r.content) || regex.test(r.file)
2854
3460
  );
2855
3461
  }
2856
- return filtered.slice(0, limit).map((r) => {
2857
- const score = 0;
2858
- return {
2859
- content: r.content,
2860
- metadata: {
2861
- file: r.file,
2862
- startLine: r.startLine,
2863
- endLine: r.endLine,
2864
- type: r.type,
2865
- language: r.language,
2866
- // AST-derived metadata (v0.13.0)
2867
- symbolName: r.symbolName || void 0,
2868
- symbolType: r.symbolType,
2869
- parentClass: r.parentClass || void 0,
2870
- complexity: r.complexity || void 0,
2871
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2872
- signature: r.signature || void 0,
2873
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2874
- },
2875
- score,
2876
- relevance: calculateRelevance(score)
2877
- };
2878
- });
3462
+ return filtered.slice(0, limit).map((r) => ({
3463
+ content: r.content,
3464
+ metadata: buildSearchResultMetadata(r),
3465
+ score: 0,
3466
+ relevance: calculateRelevance(0)
3467
+ }));
2879
3468
  } catch (error) {
2880
3469
  throw wrapError(error, "Failed to scan with filter");
2881
3470
  }
2882
3471
  }
2883
3472
  function matchesSymbolType(record, symbolType, symbols) {
2884
3473
  if (record.symbolType) {
2885
- if (symbolType === "function") {
2886
- return record.symbolType === "function" || record.symbolType === "method";
2887
- } else if (symbolType === "class") {
2888
- return record.symbolType === "class";
2889
- } else if (symbolType === "interface") {
2890
- return record.symbolType === "interface";
2891
- }
2892
- return false;
3474
+ return SYMBOL_TYPE_MATCHES[symbolType]?.has(record.symbolType) ?? false;
2893
3475
  }
2894
3476
  return symbols.length > 0 && symbols.some((s) => s.length > 0 && s !== "");
2895
3477
  }
3478
+ function matchesSymbolFilter(r, { language, pattern, symbolType }) {
3479
+ if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
3480
+ return false;
3481
+ }
3482
+ const symbols = getSymbolsForType(r, symbolType);
3483
+ const astSymbolName = r.symbolName || "";
3484
+ if (symbols.length === 0 && !astSymbolName) {
3485
+ return false;
3486
+ }
3487
+ if (pattern) {
3488
+ const regex = new RegExp(pattern, "i");
3489
+ const nameMatches = symbols.some((s) => regex.test(s)) || regex.test(astSymbolName);
3490
+ if (!nameMatches) return false;
3491
+ }
3492
+ if (symbolType) {
3493
+ return matchesSymbolType(r, symbolType, symbols);
3494
+ }
3495
+ return true;
3496
+ }
3497
+ function buildLegacySymbols2(r) {
3498
+ return {
3499
+ functions: hasValidArrayEntries(r.functionNames) ? r.functionNames : [],
3500
+ classes: hasValidArrayEntries(r.classNames) ? r.classNames : [],
3501
+ interfaces: hasValidArrayEntries(r.interfaceNames) ? r.interfaceNames : []
3502
+ };
3503
+ }
2896
3504
  async function querySymbols(table, options) {
2897
3505
  if (!table) {
2898
3506
  throw new DatabaseError("Vector database not initialized");
2899
3507
  }
2900
3508
  const { language, pattern, symbolType, limit = 50 } = options;
3509
+ const filterOpts = { language, pattern, symbolType };
2901
3510
  try {
2902
3511
  const zeroVector = Array(EMBEDDING_DIMENSION).fill(0);
2903
3512
  const query = table.search(zeroVector).where('file != ""').limit(Math.max(limit * 10, 500));
2904
- const results = await query.execute();
2905
- let filtered = results.filter((r) => {
2906
- if (!r.content || r.content.trim().length === 0) {
2907
- return false;
2908
- }
2909
- if (!r.file || r.file.length === 0) {
2910
- return false;
2911
- }
2912
- if (language && (!r.language || r.language.toLowerCase() !== language.toLowerCase())) {
2913
- return false;
2914
- }
2915
- const symbols = symbolType === "function" ? r.functionNames || [] : symbolType === "class" ? r.classNames || [] : symbolType === "interface" ? r.interfaceNames || [] : [...r.functionNames || [], ...r.classNames || [], ...r.interfaceNames || []];
2916
- const astSymbolName = r.symbolName || "";
2917
- if (symbols.length === 0 && !astSymbolName) {
2918
- return false;
2919
- }
2920
- if (pattern) {
2921
- const regex = new RegExp(pattern, "i");
2922
- const matchesOldSymbols = symbols.some((s) => regex.test(s));
2923
- const matchesASTSymbol = regex.test(astSymbolName);
2924
- const nameMatches = matchesOldSymbols || matchesASTSymbol;
2925
- if (!nameMatches) return false;
2926
- if (symbolType) {
2927
- return matchesSymbolType(r, symbolType, symbols);
2928
- }
2929
- return nameMatches;
2930
- }
2931
- if (symbolType) {
2932
- return matchesSymbolType(r, symbolType, symbols);
2933
- }
2934
- return true;
2935
- });
2936
- return filtered.slice(0, limit).map((r) => {
2937
- const score = 0;
2938
- return {
2939
- content: r.content,
2940
- metadata: {
2941
- file: r.file,
2942
- startLine: r.startLine,
2943
- endLine: r.endLine,
2944
- type: r.type,
2945
- language: r.language,
2946
- symbols: {
2947
- functions: r.functionNames && r.functionNames.length > 0 && r.functionNames[0] !== "" ? r.functionNames : [],
2948
- classes: r.classNames && r.classNames.length > 0 && r.classNames[0] !== "" ? r.classNames : [],
2949
- interfaces: r.interfaceNames && r.interfaceNames.length > 0 && r.interfaceNames[0] !== "" ? r.interfaceNames : []
2950
- },
2951
- // AST-derived metadata (v0.13.0)
2952
- symbolName: r.symbolName || void 0,
2953
- symbolType: r.symbolType,
2954
- parentClass: r.parentClass || void 0,
2955
- complexity: r.complexity || void 0,
2956
- parameters: r.parameters && r.parameters.length > 0 && r.parameters[0] !== "" ? r.parameters : void 0,
2957
- signature: r.signature || void 0,
2958
- imports: r.imports && r.imports.length > 0 && r.imports[0] !== "" ? r.imports : void 0
2959
- },
2960
- score,
2961
- relevance: calculateRelevance(score)
2962
- };
2963
- });
3513
+ const results = await query.toArray();
3514
+ const filtered = results.filter((r) => isValidRecord(r) && matchesSymbolFilter(r, filterOpts));
3515
+ return filtered.slice(0, limit).map((r) => ({
3516
+ content: r.content,
3517
+ metadata: {
3518
+ ...buildSearchResultMetadata(r),
3519
+ symbols: buildLegacySymbols2(r)
3520
+ },
3521
+ score: 0,
3522
+ relevance: calculateRelevance(0)
3523
+ }));
2964
3524
  } catch (error) {
2965
3525
  throw wrapError(error, "Failed to query symbols");
2966
3526
  }
2967
3527
  }
2968
- var PATH_STRATEGY, FILENAME_STRATEGY, FILE_TYPE_STRATEGIES, BOOSTING_COMPOSERS;
3528
+ async function scanAll(table, options = {}) {
3529
+ if (!table) {
3530
+ throw new DatabaseError("Vector database not initialized");
3531
+ }
3532
+ try {
3533
+ const totalRows = await table.countRows();
3534
+ const MIN_SCAN_LIMIT = 1e3;
3535
+ const results = await scanWithFilter(table, {
3536
+ ...options,
3537
+ limit: Math.max(totalRows, MIN_SCAN_LIMIT)
3538
+ });
3539
+ return results;
3540
+ } catch (error) {
3541
+ throw wrapError(error, "Failed to scan all chunks");
3542
+ }
3543
+ }
3544
+ var PATH_STRATEGY, FILENAME_STRATEGY, FILE_TYPE_STRATEGIES, BOOSTING_COMPOSERS, SYMBOL_TYPE_MATCHES;
2969
3545
  var init_query = __esm({
2970
3546
  "src/vectordb/query.ts"() {
2971
3547
  "use strict";
@@ -2986,10 +3562,67 @@ var init_query = __esm({
2986
3562
  ["conceptual" /* CONCEPTUAL */]: new BoostingComposer().addStrategy(PATH_STRATEGY).addStrategy(FILENAME_STRATEGY).addStrategy(FILE_TYPE_STRATEGIES["conceptual" /* CONCEPTUAL */]),
2987
3563
  ["implementation" /* IMPLEMENTATION */]: new BoostingComposer().addStrategy(PATH_STRATEGY).addStrategy(FILENAME_STRATEGY).addStrategy(FILE_TYPE_STRATEGIES["implementation" /* IMPLEMENTATION */])
2988
3564
  };
3565
+ SYMBOL_TYPE_MATCHES = {
3566
+ function: /* @__PURE__ */ new Set(["function", "method"]),
3567
+ class: /* @__PURE__ */ new Set(["class"]),
3568
+ interface: /* @__PURE__ */ new Set(["interface"])
3569
+ };
2989
3570
  }
2990
3571
  });
2991
3572
 
2992
3573
  // src/vectordb/batch-insert.ts
3574
+ function transformChunkToRecord(vector, content, metadata) {
3575
+ return {
3576
+ vector: Array.from(vector),
3577
+ content,
3578
+ file: metadata.file,
3579
+ startLine: metadata.startLine,
3580
+ endLine: metadata.endLine,
3581
+ type: metadata.type,
3582
+ language: metadata.language,
3583
+ // Ensure arrays have at least empty string for Arrow type inference
3584
+ functionNames: getNonEmptyArray(metadata.symbols?.functions),
3585
+ classNames: getNonEmptyArray(metadata.symbols?.classes),
3586
+ interfaceNames: getNonEmptyArray(metadata.symbols?.interfaces),
3587
+ // AST-derived metadata (v0.13.0)
3588
+ symbolName: metadata.symbolName || "",
3589
+ symbolType: metadata.symbolType || "",
3590
+ parentClass: metadata.parentClass || "",
3591
+ complexity: metadata.complexity || 0,
3592
+ cognitiveComplexity: metadata.cognitiveComplexity || 0,
3593
+ parameters: getNonEmptyArray(metadata.parameters),
3594
+ signature: metadata.signature || "",
3595
+ imports: getNonEmptyArray(metadata.imports),
3596
+ // Halstead metrics (v0.19.0)
3597
+ halsteadVolume: metadata.halsteadVolume || 0,
3598
+ halsteadDifficulty: metadata.halsteadDifficulty || 0,
3599
+ halsteadEffort: metadata.halsteadEffort || 0,
3600
+ halsteadBugs: metadata.halsteadBugs || 0
3601
+ };
3602
+ }
3603
+ function getNonEmptyArray(arr) {
3604
+ return arr && arr.length > 0 ? arr : [""];
3605
+ }
3606
+ function splitBatchInHalf(batch) {
3607
+ const half = Math.floor(batch.vectors.length / 2);
3608
+ return [
3609
+ {
3610
+ vectors: batch.vectors.slice(0, half),
3611
+ metadatas: batch.metadatas.slice(0, half),
3612
+ contents: batch.contents.slice(0, half)
3613
+ },
3614
+ {
3615
+ vectors: batch.vectors.slice(half),
3616
+ metadatas: batch.metadatas.slice(half),
3617
+ contents: batch.contents.slice(half)
3618
+ }
3619
+ ];
3620
+ }
3621
+ function transformBatchToRecords(batch) {
3622
+ return batch.vectors.map(
3623
+ (vector, i) => transformChunkToRecord(vector, batch.contents[i], batch.metadatas[i])
3624
+ );
3625
+ }
2993
3626
  async function insertBatch(db, table, tableName, vectors, metadatas, contents) {
2994
3627
  if (!db) {
2995
3628
  throw new DatabaseError("Vector database not initialized");
@@ -3022,72 +3655,60 @@ async function insertBatch(db, table, tableName, vectors, metadatas, contents) {
3022
3655
  }
3023
3656
  async function insertBatchInternal(db, table, tableName, vectors, metadatas, contents) {
3024
3657
  const queue = [{ vectors, metadatas, contents }];
3025
- const failedRecords = [];
3658
+ const failedBatches = [];
3026
3659
  let currentTable = table;
3660
+ let lastError;
3027
3661
  while (queue.length > 0) {
3028
3662
  const batch = queue.shift();
3029
- if (!batch) break;
3030
- try {
3031
- const records = batch.vectors.map((vector, i) => ({
3032
- vector: Array.from(vector),
3033
- content: batch.contents[i],
3034
- file: batch.metadatas[i].file,
3035
- startLine: batch.metadatas[i].startLine,
3036
- endLine: batch.metadatas[i].endLine,
3037
- type: batch.metadatas[i].type,
3038
- language: batch.metadatas[i].language,
3039
- // Ensure arrays have at least empty string for Arrow type inference
3040
- functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
3041
- classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
3042
- interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""],
3043
- // AST-derived metadata (v0.13.0)
3044
- symbolName: batch.metadatas[i].symbolName || "",
3045
- symbolType: batch.metadatas[i].symbolType || "",
3046
- parentClass: batch.metadatas[i].parentClass || "",
3047
- complexity: batch.metadatas[i].complexity || 0,
3048
- parameters: batch.metadatas[i].parameters && batch.metadatas[i].parameters.length > 0 ? batch.metadatas[i].parameters : [""],
3049
- signature: batch.metadatas[i].signature || "",
3050
- imports: batch.metadatas[i].imports && batch.metadatas[i].imports.length > 0 ? batch.metadatas[i].imports : [""]
3051
- }));
3052
- if (!currentTable) {
3053
- currentTable = await db.createTable(tableName, records);
3054
- } else {
3055
- await currentTable.add(records);
3056
- }
3057
- } catch (error) {
3058
- if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
3059
- const half = Math.floor(batch.vectors.length / 2);
3060
- queue.push({
3061
- vectors: batch.vectors.slice(0, half),
3062
- metadatas: batch.metadatas.slice(0, half),
3063
- contents: batch.contents.slice(0, half)
3064
- });
3065
- queue.push({
3066
- vectors: batch.vectors.slice(half),
3067
- metadatas: batch.metadatas.slice(half),
3068
- contents: batch.contents.slice(half)
3069
- });
3070
- } else {
3071
- failedRecords.push(batch);
3072
- }
3663
+ const insertResult = await tryInsertBatch(db, currentTable, tableName, batch);
3664
+ if (insertResult.success) {
3665
+ currentTable = insertResult.table;
3666
+ } else {
3667
+ lastError = insertResult.error;
3668
+ handleBatchFailure(batch, queue, failedBatches);
3073
3669
  }
3074
3670
  }
3075
- if (failedRecords.length > 0) {
3076
- const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
3077
- throw new DatabaseError(
3078
- `Failed to insert ${totalFailed} record(s) after retry attempts`,
3079
- {
3080
- failedBatches: failedRecords.length,
3081
- totalRecords: totalFailed,
3082
- sampleFile: failedRecords[0].metadatas[0].file
3083
- }
3084
- );
3085
- }
3671
+ throwIfBatchesFailed(failedBatches, lastError);
3086
3672
  if (!currentTable) {
3087
3673
  throw new DatabaseError("Failed to create table during batch insert");
3088
3674
  }
3089
3675
  return currentTable;
3090
3676
  }
3677
+ async function tryInsertBatch(db, currentTable, tableName, batch) {
3678
+ try {
3679
+ const records = transformBatchToRecords(batch);
3680
+ if (!currentTable) {
3681
+ const newTable = await db.createTable(tableName, records);
3682
+ return { success: true, table: newTable };
3683
+ } else {
3684
+ await currentTable.add(records);
3685
+ return { success: true, table: currentTable };
3686
+ }
3687
+ } catch (error) {
3688
+ return { success: false, table: currentTable, error };
3689
+ }
3690
+ }
3691
+ function handleBatchFailure(batch, queue, failedBatches) {
3692
+ if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
3693
+ const [firstHalf, secondHalf] = splitBatchInHalf(batch);
3694
+ queue.push(firstHalf, secondHalf);
3695
+ } else {
3696
+ failedBatches.push(batch);
3697
+ }
3698
+ }
3699
+ function throwIfBatchesFailed(failedBatches, lastError) {
3700
+ if (failedBatches.length === 0) return;
3701
+ const totalFailed = failedBatches.reduce((sum, batch) => sum + batch.vectors.length, 0);
3702
+ throw new DatabaseError(
3703
+ `Failed to insert ${totalFailed} record(s) after retry attempts`,
3704
+ {
3705
+ failedBatches: failedBatches.length,
3706
+ totalRecords: totalFailed,
3707
+ sampleFile: failedBatches[0].metadatas[0].file,
3708
+ lastError: lastError?.message
3709
+ }
3710
+ );
3711
+ }
3091
3712
  var init_batch_insert = __esm({
3092
3713
  "src/vectordb/batch-insert.ts"() {
3093
3714
  "use strict";
@@ -3097,7 +3718,9 @@ var init_batch_insert = __esm({
3097
3718
  });
3098
3719
 
3099
3720
  // src/vectordb/maintenance.ts
3100
- async function clear(db, table, tableName) {
3721
+ import fs14 from "fs/promises";
3722
+ import path15 from "path";
3723
+ async function clear(db, table, tableName, dbPath) {
3101
3724
  if (!db) {
3102
3725
  throw new DatabaseError("Vector database not initialized");
3103
3726
  }
@@ -3105,6 +3728,13 @@ async function clear(db, table, tableName) {
3105
3728
  if (table) {
3106
3729
  await db.dropTable(tableName);
3107
3730
  }
3731
+ if (dbPath) {
3732
+ const lanceDir = path15.join(dbPath, `${tableName}.lance`);
3733
+ try {
3734
+ await fs14.rm(lanceDir, { recursive: true, force: true });
3735
+ } catch {
3736
+ }
3737
+ }
3108
3738
  } catch (error) {
3109
3739
  throw wrapError(error, "Failed to clear vector database");
3110
3740
  }
@@ -3152,8 +3782,8 @@ var lancedb_exports = {};
3152
3782
  __export(lancedb_exports, {
3153
3783
  VectorDB: () => VectorDB
3154
3784
  });
3155
- import * as lancedb from "vectordb";
3156
- import path15 from "path";
3785
+ import * as lancedb from "@lancedb/lancedb";
3786
+ import path16 from "path";
3157
3787
  import os2 from "os";
3158
3788
  import crypto2 from "crypto";
3159
3789
  var VectorDB;
@@ -3174,9 +3804,9 @@ var init_lancedb = __esm({
3174
3804
  lastVersionCheck = 0;
3175
3805
  currentVersion = 0;
3176
3806
  constructor(projectRoot) {
3177
- const projectName = path15.basename(projectRoot);
3807
+ const projectName = path16.basename(projectRoot);
3178
3808
  const pathHash = crypto2.createHash("md5").update(projectRoot).digest("hex").substring(0, 8);
3179
- this.dbPath = path15.join(
3809
+ this.dbPath = path16.join(
3180
3810
  os2.homedir(),
3181
3811
  ".lien",
3182
3812
  "indices",
@@ -3244,6 +3874,18 @@ var init_lancedb = __esm({
3244
3874
  }
3245
3875
  return scanWithFilter(this.table, options);
3246
3876
  }
3877
+ /**
3878
+ * Scan all chunks in the database
3879
+ * Fetches total count first, then retrieves all chunks in a single optimized query
3880
+ * @param options - Filter options (language, pattern)
3881
+ * @returns All matching chunks
3882
+ */
3883
+ async scanAll(options = {}) {
3884
+ if (!this.table) {
3885
+ throw new DatabaseError("Vector database not initialized");
3886
+ }
3887
+ return scanAll(this.table, options);
3888
+ }
3247
3889
  async querySymbols(options) {
3248
3890
  if (!this.table) {
3249
3891
  throw new DatabaseError("Vector database not initialized");
@@ -3254,7 +3896,7 @@ var init_lancedb = __esm({
3254
3896
  if (!this.db) {
3255
3897
  throw new DatabaseError("Vector database not initialized");
3256
3898
  }
3257
- await clear(this.db, this.table, this.tableName);
3899
+ await clear(this.db, this.table, this.tableName, this.dbPath);
3258
3900
  this.table = null;
3259
3901
  }
3260
3902
  async deleteByFile(filepath) {
@@ -3325,7 +3967,7 @@ var init_lancedb = __esm({
3325
3967
  if (count === 0) {
3326
3968
  return false;
3327
3969
  }
3328
- const sample = await this.table.search(Array(EMBEDDING_DIMENSION).fill(0)).limit(Math.min(count, 5)).execute();
3970
+ const sample = await this.table.search(Array(EMBEDDING_DIMENSION).fill(0)).limit(Math.min(count, 5)).toArray();
3329
3971
  const hasRealData = sample.some(
3330
3972
  (r) => r.content && r.content.trim().length > 0
3331
3973
  );
@@ -3348,8 +3990,8 @@ var manifest_exports = {};
3348
3990
  __export(manifest_exports, {
3349
3991
  ManifestManager: () => ManifestManager
3350
3992
  });
3351
- import fs14 from "fs/promises";
3352
- import path16 from "path";
3993
+ import fs15 from "fs/promises";
3994
+ import path17 from "path";
3353
3995
  var MANIFEST_FILE, ManifestManager;
3354
3996
  var init_manifest = __esm({
3355
3997
  "src/indexer/manifest.ts"() {
@@ -3371,7 +4013,7 @@ var init_manifest = __esm({
3371
4013
  */
3372
4014
  constructor(indexPath) {
3373
4015
  this.indexPath = indexPath;
3374
- this.manifestPath = path16.join(indexPath, MANIFEST_FILE);
4016
+ this.manifestPath = path17.join(indexPath, MANIFEST_FILE);
3375
4017
  }
3376
4018
  /**
3377
4019
  * Loads the manifest from disk.
@@ -3384,7 +4026,7 @@ var init_manifest = __esm({
3384
4026
  */
3385
4027
  async load() {
3386
4028
  try {
3387
- const content = await fs14.readFile(this.manifestPath, "utf-8");
4029
+ const content = await fs15.readFile(this.manifestPath, "utf-8");
3388
4030
  const manifest = JSON.parse(content);
3389
4031
  if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {
3390
4032
  console.error(
@@ -3411,7 +4053,7 @@ var init_manifest = __esm({
3411
4053
  */
3412
4054
  async save(manifest) {
3413
4055
  try {
3414
- await fs14.mkdir(this.indexPath, { recursive: true });
4056
+ await fs15.mkdir(this.indexPath, { recursive: true });
3415
4057
  const manifestToSave = {
3416
4058
  ...manifest,
3417
4059
  formatVersion: INDEX_FORMAT_VERSION,
@@ -3419,7 +4061,7 @@ var init_manifest = __esm({
3419
4061
  lastIndexed: Date.now()
3420
4062
  };
3421
4063
  const content = JSON.stringify(manifestToSave, null, 2);
3422
- await fs14.writeFile(this.manifestPath, content, "utf-8");
4064
+ await fs15.writeFile(this.manifestPath, content, "utf-8");
3423
4065
  } catch (error) {
3424
4066
  console.error(`[Lien] Warning: Failed to save manifest: ${error}`);
3425
4067
  }
@@ -3556,7 +4198,7 @@ var init_manifest = __esm({
3556
4198
  */
3557
4199
  async clear() {
3558
4200
  try {
3559
- await fs14.unlink(this.manifestPath);
4201
+ await fs15.unlink(this.manifestPath);
3560
4202
  } catch (error) {
3561
4203
  if (error.code !== "ENOENT") {
3562
4204
  console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);
@@ -3585,9 +4227,9 @@ var tracker_exports = {};
3585
4227
  __export(tracker_exports, {
3586
4228
  GitStateTracker: () => GitStateTracker
3587
4229
  });
3588
- import fs15 from "fs/promises";
3589
- import path17 from "path";
3590
- var GitStateTracker;
4230
+ import fs16 from "fs/promises";
4231
+ import path18 from "path";
4232
+ var GitStateTracker;
3591
4233
  var init_tracker = __esm({
3592
4234
  "src/git/tracker.ts"() {
3593
4235
  "use strict";
@@ -3598,7 +4240,7 @@ var init_tracker = __esm({
3598
4240
  currentState = null;
3599
4241
  constructor(rootDir, indexPath) {
3600
4242
  this.rootDir = rootDir;
3601
- this.stateFile = path17.join(indexPath, ".git-state.json");
4243
+ this.stateFile = path18.join(indexPath, ".git-state.json");
3602
4244
  }
3603
4245
  /**
3604
4246
  * Loads the last known git state from disk.
@@ -3606,7 +4248,7 @@ var init_tracker = __esm({
3606
4248
  */
3607
4249
  async loadState() {
3608
4250
  try {
3609
- const content = await fs15.readFile(this.stateFile, "utf-8");
4251
+ const content = await fs16.readFile(this.stateFile, "utf-8");
3610
4252
  return JSON.parse(content);
3611
4253
  } catch {
3612
4254
  return null;
@@ -3618,7 +4260,7 @@ var init_tracker = __esm({
3618
4260
  async saveState(state) {
3619
4261
  try {
3620
4262
  const content = JSON.stringify(state, null, 2);
3621
- await fs15.writeFile(this.stateFile, content, "utf-8");
4263
+ await fs16.writeFile(this.stateFile, content, "utf-8");
3622
4264
  } catch (error) {
3623
4265
  console.error(`[Lien] Warning: Failed to save git state: ${error}`);
3624
4266
  }
@@ -3768,148 +4410,6 @@ var init_tracker = __esm({
3768
4410
  }
3769
4411
  });
3770
4412
 
3771
- // src/indexer/change-detector.ts
3772
- import fs16 from "fs/promises";
3773
- async function detectChanges(rootDir, vectorDB, config) {
3774
- const manifest = new ManifestManager(vectorDB.dbPath);
3775
- const savedManifest = await manifest.load();
3776
- if (!savedManifest) {
3777
- const allFiles = await getAllFiles(rootDir, config);
3778
- return {
3779
- added: allFiles,
3780
- modified: [],
3781
- deleted: [],
3782
- reason: "full"
3783
- };
3784
- }
3785
- const gitAvailable = await isGitAvailable();
3786
- const isRepo = await isGitRepo(rootDir);
3787
- if (gitAvailable && isRepo && savedManifest.gitState) {
3788
- const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
3789
- await gitTracker.initialize();
3790
- const currentState = gitTracker.getState();
3791
- if (currentState && (currentState.branch !== savedManifest.gitState.branch || currentState.commit !== savedManifest.gitState.commit)) {
3792
- try {
3793
- const changedFilesPaths = await getChangedFiles(
3794
- rootDir,
3795
- savedManifest.gitState.commit,
3796
- currentState.commit
3797
- );
3798
- const changedFilesSet = new Set(changedFilesPaths);
3799
- const allFiles = await getAllFiles(rootDir, config);
3800
- const currentFileSet = new Set(allFiles);
3801
- const added = [];
3802
- const modified = [];
3803
- const deleted = [];
3804
- for (const filepath of changedFilesPaths) {
3805
- if (currentFileSet.has(filepath)) {
3806
- if (savedManifest.files[filepath]) {
3807
- modified.push(filepath);
3808
- } else {
3809
- added.push(filepath);
3810
- }
3811
- }
3812
- }
3813
- for (const filepath of allFiles) {
3814
- if (!savedManifest.files[filepath] && !changedFilesSet.has(filepath)) {
3815
- added.push(filepath);
3816
- }
3817
- }
3818
- for (const filepath of Object.keys(savedManifest.files)) {
3819
- if (!currentFileSet.has(filepath)) {
3820
- deleted.push(filepath);
3821
- }
3822
- }
3823
- return {
3824
- added,
3825
- modified,
3826
- deleted,
3827
- reason: "git-state-changed"
3828
- };
3829
- } catch (error) {
3830
- console.warn(`[Lien] Git diff failed, falling back to full reindex: ${error}`);
3831
- const allFiles = await getAllFiles(rootDir, config);
3832
- const currentFileSet = new Set(allFiles);
3833
- const deleted = [];
3834
- for (const filepath of Object.keys(savedManifest.files)) {
3835
- if (!currentFileSet.has(filepath)) {
3836
- deleted.push(filepath);
3837
- }
3838
- }
3839
- return {
3840
- added: allFiles,
3841
- modified: [],
3842
- deleted,
3843
- reason: "git-state-changed"
3844
- };
3845
- }
3846
- }
3847
- }
3848
- return await mtimeBasedDetection(rootDir, savedManifest, config);
3849
- }
3850
- async function getAllFiles(rootDir, config) {
3851
- if (isModernConfig(config) && config.frameworks.length > 0) {
3852
- return await scanCodebaseWithFrameworks(rootDir, config);
3853
- } else if (isLegacyConfig(config)) {
3854
- return await scanCodebase({
3855
- rootDir,
3856
- includePatterns: config.indexing.include,
3857
- excludePatterns: config.indexing.exclude
3858
- });
3859
- } else {
3860
- return await scanCodebase({
3861
- rootDir,
3862
- includePatterns: [],
3863
- excludePatterns: []
3864
- });
3865
- }
3866
- }
3867
- async function mtimeBasedDetection(rootDir, savedManifest, config) {
3868
- const added = [];
3869
- const modified = [];
3870
- const deleted = [];
3871
- const currentFiles = await getAllFiles(rootDir, config);
3872
- const currentFileSet = new Set(currentFiles);
3873
- const fileStats = /* @__PURE__ */ new Map();
3874
- for (const filepath of currentFiles) {
3875
- try {
3876
- const stats = await fs16.stat(filepath);
3877
- fileStats.set(filepath, stats.mtimeMs);
3878
- } catch {
3879
- continue;
3880
- }
3881
- }
3882
- for (const [filepath, mtime] of fileStats) {
3883
- const entry = savedManifest.files[filepath];
3884
- if (!entry) {
3885
- added.push(filepath);
3886
- } else if (entry.lastModified < mtime) {
3887
- modified.push(filepath);
3888
- }
3889
- }
3890
- for (const filepath of Object.keys(savedManifest.files)) {
3891
- if (!currentFileSet.has(filepath)) {
3892
- deleted.push(filepath);
3893
- }
3894
- }
3895
- return {
3896
- added,
3897
- modified,
3898
- deleted,
3899
- reason: "mtime"
3900
- };
3901
- }
3902
- var init_change_detector = __esm({
3903
- "src/indexer/change-detector.ts"() {
3904
- "use strict";
3905
- init_manifest();
3906
- init_scanner();
3907
- init_schema();
3908
- init_tracker();
3909
- init_utils();
3910
- }
3911
- });
3912
-
3913
4413
  // src/utils/result.ts
3914
4414
  function Ok(value) {
3915
4415
  return { ok: true, value };
@@ -3928,6 +4428,21 @@ var init_result = __esm({
3928
4428
 
3929
4429
  // src/indexer/incremental.ts
3930
4430
  import fs17 from "fs/promises";
4431
+ import path19 from "path";
4432
+ function normalizeToRelativePath(filepath, rootDir) {
4433
+ const root = (rootDir || process.cwd()).replace(/\\/g, "/").replace(/\/$/, "");
4434
+ const normalized = filepath.replace(/\\/g, "/");
4435
+ if (!path19.isAbsolute(filepath)) {
4436
+ return normalized;
4437
+ }
4438
+ if (normalized.startsWith(root + "/")) {
4439
+ return normalized.slice(root.length + 1);
4440
+ }
4441
+ if (normalized.startsWith(root)) {
4442
+ return normalized.slice(root.length);
4443
+ }
4444
+ return path19.relative(root, filepath).replace(/\\/g, "/");
4445
+ }
3931
4446
  async function processFileContent(filepath, content, embeddings, config, verbose) {
3932
4447
  const chunkSize = isModernConfig(config) ? config.core.chunkSize : isLegacyConfig(config) ? config.indexing.chunkSize : 75;
3933
4448
  const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : isLegacyConfig(config) ? config.indexing.chunkOverlap : 10;
@@ -3964,61 +4479,63 @@ async function processFileContent(filepath, content, embeddings, config, verbose
3964
4479
  }
3965
4480
  async function indexSingleFile(filepath, vectorDB, embeddings, config, options = {}) {
3966
4481
  const { verbose } = options;
4482
+ const normalizedPath = normalizeToRelativePath(filepath);
3967
4483
  try {
3968
4484
  try {
3969
4485
  await fs17.access(filepath);
3970
4486
  } catch {
3971
4487
  if (verbose) {
3972
- console.error(`[Lien] File deleted: ${filepath}`);
4488
+ console.error(`[Lien] File deleted: ${normalizedPath}`);
3973
4489
  }
3974
- await vectorDB.deleteByFile(filepath);
4490
+ await vectorDB.deleteByFile(normalizedPath);
3975
4491
  const manifest2 = new ManifestManager(vectorDB.dbPath);
3976
- await manifest2.removeFile(filepath);
4492
+ await manifest2.removeFile(normalizedPath);
3977
4493
  return;
3978
4494
  }
3979
4495
  const content = await fs17.readFile(filepath, "utf-8");
3980
- const result = await processFileContent(filepath, content, embeddings, config, verbose || false);
4496
+ const result = await processFileContent(normalizedPath, content, embeddings, config, verbose || false);
3981
4497
  const stats = await fs17.stat(filepath);
3982
4498
  const manifest = new ManifestManager(vectorDB.dbPath);
3983
4499
  if (result === null) {
3984
- await vectorDB.deleteByFile(filepath);
3985
- await manifest.updateFile(filepath, {
3986
- filepath,
4500
+ await vectorDB.deleteByFile(normalizedPath);
4501
+ await manifest.updateFile(normalizedPath, {
4502
+ filepath: normalizedPath,
3987
4503
  lastModified: stats.mtimeMs,
3988
4504
  chunkCount: 0
3989
4505
  });
3990
4506
  return;
3991
4507
  }
3992
4508
  await vectorDB.updateFile(
3993
- filepath,
4509
+ normalizedPath,
3994
4510
  result.vectors,
3995
4511
  result.chunks.map((c) => c.metadata),
3996
4512
  result.texts
3997
4513
  );
3998
- await manifest.updateFile(filepath, {
3999
- filepath,
4514
+ await manifest.updateFile(normalizedPath, {
4515
+ filepath: normalizedPath,
4000
4516
  lastModified: stats.mtimeMs,
4001
4517
  chunkCount: result.chunkCount
4002
4518
  });
4003
4519
  if (verbose) {
4004
- console.error(`[Lien] \u2713 Updated ${filepath} (${result.chunkCount} chunks)`);
4520
+ console.error(`[Lien] \u2713 Updated ${normalizedPath} (${result.chunkCount} chunks)`);
4005
4521
  }
4006
4522
  } catch (error) {
4007
- console.error(`[Lien] \u26A0\uFE0F Failed to index ${filepath}: ${error}`);
4523
+ console.error(`[Lien] \u26A0\uFE0F Failed to index ${normalizedPath}: ${error}`);
4008
4524
  }
4009
4525
  }
4010
- async function processSingleFileForIndexing(filepath, embeddings, config, verbose) {
4526
+ async function processSingleFileForIndexing(filepath, normalizedPath, embeddings, config, verbose) {
4011
4527
  try {
4012
4528
  const stats = await fs17.stat(filepath);
4013
4529
  const content = await fs17.readFile(filepath, "utf-8");
4014
- const result = await processFileContent(filepath, content, embeddings, config, verbose);
4530
+ const result = await processFileContent(normalizedPath, content, embeddings, config, verbose);
4015
4531
  return Ok({
4016
- filepath,
4532
+ filepath: normalizedPath,
4533
+ // Store normalized path
4017
4534
  result,
4018
4535
  mtime: stats.mtimeMs
4019
4536
  });
4020
4537
  } catch (error) {
4021
- return Err(`Failed to process ${filepath}: ${error}`);
4538
+ return Err(`Failed to process ${normalizedPath}: ${error}`);
4022
4539
  }
4023
4540
  }
4024
4541
  async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, options = {}) {
@@ -4026,17 +4543,18 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
4026
4543
  let processedCount = 0;
4027
4544
  const manifestEntries = [];
4028
4545
  for (const filepath of filepaths) {
4029
- const result = await processSingleFileForIndexing(filepath, embeddings, config, verbose || false);
4546
+ const normalizedPath = normalizeToRelativePath(filepath);
4547
+ const result = await processSingleFileForIndexing(filepath, normalizedPath, embeddings, config, verbose || false);
4030
4548
  if (isOk(result)) {
4031
- const { result: processResult, mtime } = result.value;
4549
+ const { filepath: storedPath, result: processResult, mtime } = result.value;
4032
4550
  if (processResult === null) {
4033
4551
  try {
4034
- await vectorDB.deleteByFile(filepath);
4552
+ await vectorDB.deleteByFile(storedPath);
4035
4553
  } catch (error) {
4036
4554
  }
4037
4555
  const manifest = new ManifestManager(vectorDB.dbPath);
4038
- await manifest.updateFile(filepath, {
4039
- filepath,
4556
+ await manifest.updateFile(storedPath, {
4557
+ filepath: storedPath,
4040
4558
  lastModified: mtime,
4041
4559
  chunkCount: 0
4042
4560
  });
@@ -4044,7 +4562,7 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
4044
4562
  continue;
4045
4563
  }
4046
4564
  try {
4047
- await vectorDB.deleteByFile(filepath);
4565
+ await vectorDB.deleteByFile(storedPath);
4048
4566
  } catch (error) {
4049
4567
  }
4050
4568
  await vectorDB.insertBatch(
@@ -4053,12 +4571,12 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
4053
4571
  processResult.texts
4054
4572
  );
4055
4573
  manifestEntries.push({
4056
- filepath,
4574
+ filepath: storedPath,
4057
4575
  chunkCount: processResult.chunkCount,
4058
4576
  mtime
4059
4577
  });
4060
4578
  if (verbose) {
4061
- console.error(`[Lien] \u2713 Updated ${filepath} (${processResult.chunkCount} chunks)`);
4579
+ console.error(`[Lien] \u2713 Updated ${storedPath} (${processResult.chunkCount} chunks)`);
4062
4580
  }
4063
4581
  processedCount++;
4064
4582
  } else {
@@ -4066,12 +4584,12 @@ async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, optio
4066
4584
  console.error(`[Lien] ${result.error}`);
4067
4585
  }
4068
4586
  try {
4069
- await vectorDB.deleteByFile(filepath);
4587
+ await vectorDB.deleteByFile(normalizedPath);
4070
4588
  const manifest = new ManifestManager(vectorDB.dbPath);
4071
- await manifest.removeFile(filepath);
4589
+ await manifest.removeFile(normalizedPath);
4072
4590
  } catch (error) {
4073
4591
  if (verbose) {
4074
- console.error(`[Lien] Note: ${filepath} not in index`);
4592
+ console.error(`[Lien] Note: ${normalizedPath} not in index`);
4075
4593
  }
4076
4594
  }
4077
4595
  processedCount++;
@@ -4101,6 +4619,174 @@ var init_incremental = __esm({
4101
4619
  }
4102
4620
  });
4103
4621
 
4622
+ // src/indexer/change-detector.ts
4623
+ import fs18 from "fs/promises";
4624
+ import path20 from "path";
4625
+ async function hasGitStateChanged(rootDir, dbPath, savedGitState) {
4626
+ if (!savedGitState) return { changed: false };
4627
+ const gitAvailable = await isGitAvailable();
4628
+ const isRepo = await isGitRepo(rootDir);
4629
+ if (!gitAvailable || !isRepo) return { changed: false };
4630
+ const gitTracker = new GitStateTracker(rootDir, dbPath);
4631
+ await gitTracker.initialize();
4632
+ const currentState = gitTracker.getState();
4633
+ if (!currentState) return { changed: false };
4634
+ const changed = currentState.branch !== savedGitState.branch || currentState.commit !== savedGitState.commit;
4635
+ return { changed, currentState };
4636
+ }
4637
+ function categorizeChangedFiles(changedFilesPaths, currentFileSet, normalizedManifestFiles, allFiles) {
4638
+ const changedFilesSet = new Set(changedFilesPaths);
4639
+ const added = [];
4640
+ const modified = [];
4641
+ const deleted = [];
4642
+ for (const filepath of changedFilesPaths) {
4643
+ if (currentFileSet.has(filepath)) {
4644
+ if (normalizedManifestFiles.has(filepath)) {
4645
+ modified.push(filepath);
4646
+ } else {
4647
+ added.push(filepath);
4648
+ }
4649
+ }
4650
+ }
4651
+ for (const filepath of allFiles) {
4652
+ if (!normalizedManifestFiles.has(filepath) && !changedFilesSet.has(filepath)) {
4653
+ added.push(filepath);
4654
+ }
4655
+ }
4656
+ for (const normalizedPath of normalizedManifestFiles) {
4657
+ if (!currentFileSet.has(normalizedPath)) {
4658
+ deleted.push(normalizedPath);
4659
+ }
4660
+ }
4661
+ return { added, modified, deleted };
4662
+ }
4663
+ function normalizeManifestPaths(manifestFiles, rootDir) {
4664
+ const normalized = /* @__PURE__ */ new Set();
4665
+ for (const filepath of Object.keys(manifestFiles)) {
4666
+ normalized.add(normalizeToRelativePath(filepath, rootDir));
4667
+ }
4668
+ return normalized;
4669
+ }
4670
+ async function detectGitBasedChanges(rootDir, savedManifest, currentCommit, config) {
4671
+ const changedFilesAbsolute = await getChangedFiles(
4672
+ rootDir,
4673
+ savedManifest.gitState.commit,
4674
+ currentCommit
4675
+ );
4676
+ const changedFilesPaths = changedFilesAbsolute.map((fp) => normalizeToRelativePath(fp, rootDir));
4677
+ const allFiles = await getAllFiles(rootDir, config);
4678
+ const currentFileSet = new Set(allFiles);
4679
+ const normalizedManifestFiles = normalizeManifestPaths(savedManifest.files, rootDir);
4680
+ const { added, modified, deleted } = categorizeChangedFiles(
4681
+ changedFilesPaths,
4682
+ currentFileSet,
4683
+ normalizedManifestFiles,
4684
+ allFiles
4685
+ );
4686
+ return { added, modified, deleted, reason: "git-state-changed" };
4687
+ }
4688
+ async function fallbackToFullReindex(rootDir, savedManifest, config) {
4689
+ const allFiles = await getAllFiles(rootDir, config);
4690
+ const currentFileSet = new Set(allFiles);
4691
+ const deleted = [];
4692
+ for (const filepath of Object.keys(savedManifest.files)) {
4693
+ const normalizedPath = normalizeToRelativePath(filepath, rootDir);
4694
+ if (!currentFileSet.has(normalizedPath)) {
4695
+ deleted.push(normalizedPath);
4696
+ }
4697
+ }
4698
+ return { added: allFiles, modified: [], deleted, reason: "git-state-changed" };
4699
+ }
4700
+ async function detectChanges(rootDir, vectorDB, config) {
4701
+ const manifest = new ManifestManager(vectorDB.dbPath);
4702
+ const savedManifest = await manifest.load();
4703
+ if (!savedManifest) {
4704
+ const allFiles = await getAllFiles(rootDir, config);
4705
+ return { added: allFiles, modified: [], deleted: [], reason: "full" };
4706
+ }
4707
+ const gitCheck = await hasGitStateChanged(rootDir, vectorDB.dbPath, savedManifest.gitState);
4708
+ if (gitCheck.changed && gitCheck.currentState) {
4709
+ try {
4710
+ return await detectGitBasedChanges(rootDir, savedManifest, gitCheck.currentState.commit, config);
4711
+ } catch (error) {
4712
+ console.warn(`[Lien] Git diff failed, falling back to full reindex: ${error}`);
4713
+ return await fallbackToFullReindex(rootDir, savedManifest, config);
4714
+ }
4715
+ }
4716
+ return await mtimeBasedDetection(rootDir, savedManifest, config);
4717
+ }
4718
+ async function getAllFiles(rootDir, config) {
4719
+ let files;
4720
+ if (isModernConfig(config) && config.frameworks.length > 0) {
4721
+ files = await scanCodebaseWithFrameworks(rootDir, config);
4722
+ } else if (isLegacyConfig(config)) {
4723
+ files = await scanCodebase({
4724
+ rootDir,
4725
+ includePatterns: config.indexing.include,
4726
+ excludePatterns: config.indexing.exclude
4727
+ });
4728
+ } else {
4729
+ files = await scanCodebase({
4730
+ rootDir,
4731
+ includePatterns: [],
4732
+ excludePatterns: []
4733
+ });
4734
+ }
4735
+ return files.map((fp) => normalizeToRelativePath(fp, rootDir));
4736
+ }
4737
+ async function mtimeBasedDetection(rootDir, savedManifest, config) {
4738
+ const added = [];
4739
+ const modified = [];
4740
+ const deleted = [];
4741
+ const currentFiles = await getAllFiles(rootDir, config);
4742
+ const currentFileSet = new Set(currentFiles);
4743
+ const normalizedManifestFiles = /* @__PURE__ */ new Map();
4744
+ for (const [filepath, entry] of Object.entries(savedManifest.files)) {
4745
+ const normalizedPath = normalizeToRelativePath(filepath, rootDir);
4746
+ normalizedManifestFiles.set(normalizedPath, entry);
4747
+ }
4748
+ const fileStats = /* @__PURE__ */ new Map();
4749
+ for (const filepath of currentFiles) {
4750
+ try {
4751
+ const absolutePath = path20.isAbsolute(filepath) ? filepath : path20.join(rootDir, filepath);
4752
+ const stats = await fs18.stat(absolutePath);
4753
+ fileStats.set(filepath, stats.mtimeMs);
4754
+ } catch {
4755
+ continue;
4756
+ }
4757
+ }
4758
+ for (const [filepath, mtime] of fileStats) {
4759
+ const entry = normalizedManifestFiles.get(filepath);
4760
+ if (!entry) {
4761
+ added.push(filepath);
4762
+ } else if (entry.lastModified < mtime) {
4763
+ modified.push(filepath);
4764
+ }
4765
+ }
4766
+ for (const normalizedPath of normalizedManifestFiles.keys()) {
4767
+ if (!currentFileSet.has(normalizedPath)) {
4768
+ deleted.push(normalizedPath);
4769
+ }
4770
+ }
4771
+ return {
4772
+ added,
4773
+ modified,
4774
+ deleted,
4775
+ reason: "mtime"
4776
+ };
4777
+ }
4778
+ var init_change_detector = __esm({
4779
+ "src/indexer/change-detector.ts"() {
4780
+ "use strict";
4781
+ init_manifest();
4782
+ init_scanner();
4783
+ init_schema();
4784
+ init_tracker();
4785
+ init_utils();
4786
+ init_incremental();
4787
+ }
4788
+ });
4789
+
4104
4790
  // src/utils/loading-messages.ts
4105
4791
  function getIndexingMessage() {
4106
4792
  const message = INDEXING_MESSAGES[currentIndexingIndex % INDEXING_MESSAGES.length];
@@ -4281,10 +4967,53 @@ var indexer_exports = {};
4281
4967
  __export(indexer_exports, {
4282
4968
  indexCodebase: () => indexCodebase
4283
4969
  });
4284
- import fs18 from "fs/promises";
4970
+ import fs19 from "fs/promises";
4285
4971
  import ora from "ora";
4286
4972
  import chalk5 from "chalk";
4287
4973
  import pLimit from "p-limit";
4974
+ function getIndexingConfig(config) {
4975
+ if (isModernConfig(config)) {
4976
+ return {
4977
+ concurrency: config.core.concurrency,
4978
+ embeddingBatchSize: config.core.embeddingBatchSize,
4979
+ chunkSize: config.core.chunkSize,
4980
+ chunkOverlap: config.core.chunkOverlap,
4981
+ useAST: config.chunking.useAST,
4982
+ astFallback: config.chunking.astFallback
4983
+ };
4984
+ }
4985
+ return {
4986
+ concurrency: 4,
4987
+ embeddingBatchSize: 50,
4988
+ chunkSize: 75,
4989
+ chunkOverlap: 10,
4990
+ useAST: true,
4991
+ astFallback: "line-based"
4992
+ };
4993
+ }
4994
+ async function scanFilesToIndex(rootDir, config) {
4995
+ if (isModernConfig(config) && config.frameworks.length > 0) {
4996
+ return scanCodebaseWithFrameworks(rootDir, config);
4997
+ }
4998
+ if (isLegacyConfig(config)) {
4999
+ return scanCodebase({
5000
+ rootDir,
5001
+ includePatterns: config.indexing.include,
5002
+ excludePatterns: config.indexing.exclude
5003
+ });
5004
+ }
5005
+ return scanCodebase({ rootDir, includePatterns: [], excludePatterns: [] });
5006
+ }
5007
+ async function processEmbeddingMicroBatches(texts, embeddings) {
5008
+ const results = [];
5009
+ for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {
5010
+ const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));
5011
+ const microResults = await embeddings.embedBatch(microBatch);
5012
+ results.push(...microResults);
5013
+ await new Promise((resolve) => setImmediate(resolve));
5014
+ }
5015
+ return results;
5016
+ }
4288
5017
  async function updateGitState(rootDir, vectorDB, manifest) {
4289
5018
  const { isGitAvailable: isGitAvailable2, isGitRepo: isGitRepo2 } = await Promise.resolve().then(() => (init_utils(), utils_exports));
4290
5019
  const { GitStateTracker: GitStateTracker2 } = await Promise.resolve().then(() => (init_tracker(), tracker_exports));
@@ -4369,23 +5098,10 @@ async function tryIncrementalIndex(rootDir, vectorDB, config, options, spinner)
4369
5098
  return true;
4370
5099
  }
4371
5100
  async function performFullIndex(rootDir, vectorDB, config, options, spinner) {
5101
+ spinner.text = "Clearing existing index...";
5102
+ await vectorDB.clear();
4372
5103
  spinner.text = "Scanning codebase...";
4373
- let files;
4374
- if (isModernConfig(config) && config.frameworks.length > 0) {
4375
- files = await scanCodebaseWithFrameworks(rootDir, config);
4376
- } else if (isLegacyConfig(config)) {
4377
- files = await scanCodebase({
4378
- rootDir,
4379
- includePatterns: config.indexing.include,
4380
- excludePatterns: config.indexing.exclude
4381
- });
4382
- } else {
4383
- files = await scanCodebase({
4384
- rootDir,
4385
- includePatterns: [],
4386
- excludePatterns: []
4387
- });
4388
- }
5104
+ const files = await scanFilesToIndex(rootDir, config);
4389
5105
  if (files.length === 0) {
4390
5106
  spinner.fail("No files found to index");
4391
5107
  return;
@@ -4395,14 +5111,13 @@ async function performFullIndex(rootDir, vectorDB, config, options, spinner) {
4395
5111
  const embeddings = new LocalEmbeddings();
4396
5112
  await embeddings.initialize();
4397
5113
  spinner.succeed("Embedding model loaded");
4398
- const concurrency = isModernConfig(config) ? config.core.concurrency : 4;
4399
- const embeddingBatchSize = isModernConfig(config) ? config.core.embeddingBatchSize : 50;
5114
+ const indexConfig = getIndexingConfig(config);
4400
5115
  const vectorDBBatchSize = 100;
4401
- spinner.start(`Processing files with ${concurrency}x concurrency...`);
5116
+ spinner.start(`Processing files with ${indexConfig.concurrency}x concurrency...`);
4402
5117
  const startTime = Date.now();
4403
5118
  let processedChunks = 0;
4404
5119
  const chunkAccumulator = [];
4405
- const limit = pLimit(concurrency);
5120
+ const limit = pLimit(indexConfig.concurrency);
4406
5121
  const indexedFileEntries = [];
4407
5122
  const progressTracker = new IndexingProgressTracker(files.length, spinner);
4408
5123
  progressTracker.start();
@@ -4418,53 +5133,35 @@ async function performFullIndex(rootDir, vectorDB, config, options, spinner) {
4418
5133
  return processingQueue;
4419
5134
  };
4420
5135
  const doProcessChunks = async () => {
4421
- if (chunkAccumulator.length === 0) {
4422
- return;
4423
- }
5136
+ if (chunkAccumulator.length === 0) return;
4424
5137
  const currentPromise = processingQueue;
4425
5138
  try {
4426
5139
  const toProcess = chunkAccumulator.splice(0, chunkAccumulator.length);
4427
- for (let i = 0; i < toProcess.length; i += embeddingBatchSize) {
4428
- const batch = toProcess.slice(i, Math.min(i + embeddingBatchSize, toProcess.length));
4429
- progressTracker.setMessage(getEmbeddingMessage());
5140
+ for (let i = 0; i < toProcess.length; i += indexConfig.embeddingBatchSize) {
5141
+ const batch = toProcess.slice(i, Math.min(i + indexConfig.embeddingBatchSize, toProcess.length));
4430
5142
  const texts = batch.map((item) => item.content);
4431
- const embeddingVectors = [];
4432
- for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {
4433
- const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));
4434
- const microResults = await embeddings.embedBatch(microBatch);
4435
- embeddingVectors.push(...microResults);
4436
- await new Promise((resolve) => setImmediate(resolve));
4437
- }
5143
+ progressTracker.setMessage(getEmbeddingMessage());
5144
+ const embeddingVectors = await processEmbeddingMicroBatches(texts, embeddings);
4438
5145
  processedChunks += batch.length;
4439
5146
  progressTracker.setMessage(`Inserting ${batch.length} chunks into vector space...`);
4440
- await vectorDB.insertBatch(
4441
- embeddingVectors,
4442
- batch.map((item) => item.chunk.metadata),
4443
- texts
4444
- );
5147
+ await vectorDB.insertBatch(embeddingVectors, batch.map((item) => item.chunk.metadata), texts);
4445
5148
  await new Promise((resolve) => setImmediate(resolve));
4446
5149
  }
4447
5150
  progressTracker.setMessage(getIndexingMessage());
4448
5151
  } finally {
4449
- if (processingQueue === currentPromise) {
4450
- processingQueue = null;
4451
- }
5152
+ if (processingQueue === currentPromise) processingQueue = null;
4452
5153
  }
4453
5154
  };
4454
5155
  const filePromises = files.map(
4455
5156
  (file) => limit(async () => {
4456
5157
  try {
4457
- const stats = await fs18.stat(file);
4458
- const content = await fs18.readFile(file, "utf-8");
4459
- const chunkSize = isModernConfig(config) ? config.core.chunkSize : 75;
4460
- const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : 10;
4461
- const useAST = isModernConfig(config) ? config.chunking.useAST : true;
4462
- const astFallback = isModernConfig(config) ? config.chunking.astFallback : "line-based";
5158
+ const stats = await fs19.stat(file);
5159
+ const content = await fs19.readFile(file, "utf-8");
4463
5160
  const chunks = chunkFile(file, content, {
4464
- chunkSize,
4465
- chunkOverlap,
4466
- useAST,
4467
- astFallback
5161
+ chunkSize: indexConfig.chunkSize,
5162
+ chunkOverlap: indexConfig.chunkOverlap,
5163
+ useAST: indexConfig.useAST,
5164
+ astFallback: indexConfig.astFallback
4468
5165
  });
4469
5166
  if (chunks.length === 0) {
4470
5167
  progressTracker.incrementFiles();
@@ -4529,7 +5226,7 @@ async function performFullIndex(rootDir, vectorDB, config, options, spinner) {
4529
5226
  await writeVersionFile(vectorDB.dbPath);
4530
5227
  const totalTime = ((Date.now() - startTime) / 1e3).toFixed(1);
4531
5228
  spinner.succeed(
4532
- `Indexed ${progressTracker.getProcessedCount()} files (${processedChunks} chunks) in ${totalTime}s using ${concurrency}x concurrency`
5229
+ `Indexed ${progressTracker.getProcessedCount()} files (${processedChunks} chunks) in ${totalTime}s using ${indexConfig.concurrency}x concurrency`
4533
5230
  );
4534
5231
  console.log(chalk5.dim("\nNext step: Run"), chalk5.bold("lien serve"), chalk5.dim("to start the MCP server"));
4535
5232
  }
@@ -4794,19 +5491,27 @@ async function generateNodeJsConfig(_rootDir, _relativePath) {
4794
5491
  "**/*.mdx"
4795
5492
  ],
4796
5493
  exclude: [
5494
+ // Node.js dependencies (with ** prefix for nested projects)
5495
+ "**/node_modules/**",
4797
5496
  "node_modules/**",
5497
+ // PHP/Composer dependencies (for monorepos with PHP)
5498
+ "**/vendor/**",
5499
+ "vendor/**",
5500
+ // Build outputs
5501
+ "**/dist/**",
4798
5502
  "dist/**",
5503
+ "**/build/**",
4799
5504
  "build/**",
4800
- "coverage/**",
5505
+ "**/public/build/**",
5506
+ "public/build/**",
5507
+ "out/**",
5508
+ // Framework build caches
4801
5509
  ".next/**",
4802
5510
  ".nuxt/**",
4803
5511
  ".vite/**",
4804
5512
  ".lien/**",
4805
- "out/**",
4806
- "*.min.js",
4807
- "*.min.css",
4808
- "*.bundle.js",
4809
- // Test artifacts (source files are indexed, but not output)
5513
+ // Test artifacts
5514
+ "coverage/**",
4810
5515
  "playwright-report/**",
4811
5516
  "test-results/**",
4812
5517
  // Build/generated artifacts
@@ -4815,7 +5520,11 @@ async function generateNodeJsConfig(_rootDir, _relativePath) {
4815
5520
  ".cache/**",
4816
5521
  ".turbo/**",
4817
5522
  ".vercel/**",
4818
- ".netlify/**"
5523
+ ".netlify/**",
5524
+ // Minified/bundled files
5525
+ "**/*.min.js",
5526
+ "**/*.min.css",
5527
+ "**/*.bundle.js"
4819
5528
  ]
4820
5529
  };
4821
5530
  }
@@ -4909,18 +5618,32 @@ async function generatePhpConfig(_rootDir, _relativePath) {
4909
5618
  "CHANGELOG.md"
4910
5619
  ],
4911
5620
  exclude: [
5621
+ // Composer dependencies (CRITICAL)
5622
+ "**/vendor/**",
4912
5623
  "vendor/**",
5624
+ // Node.js dependencies
5625
+ "**/node_modules/**",
4913
5626
  "node_modules/**",
5627
+ // Build outputs
5628
+ "**/dist/**",
4914
5629
  "dist/**",
5630
+ "**/build/**",
4915
5631
  "build/**",
5632
+ "**/public/build/**",
5633
+ "public/build/**",
5634
+ // Laravel/PHP system directories
4916
5635
  "storage/**",
4917
5636
  "cache/**",
5637
+ "bootstrap/cache/**",
4918
5638
  // Test artifacts
4919
5639
  "coverage/**",
4920
5640
  "test-results/**",
4921
5641
  ".phpunit.cache/**",
4922
5642
  // Build outputs
4923
- "__generated__/**"
5643
+ "__generated__/**",
5644
+ // Minified files
5645
+ "**/*.min.js",
5646
+ "**/*.min.css"
4924
5647
  ]
4925
5648
  };
4926
5649
  }
@@ -5022,30 +5745,42 @@ async function generateLaravelConfig(_rootDir, _relativePath) {
5022
5745
  "resources/**/*.php",
5023
5746
  "tests/**/*.php",
5024
5747
  "*.php",
5025
- // Frontend assets (Vue/React/Inertia) - Broadened for flexibility
5026
- "**/*.js",
5027
- "**/*.ts",
5028
- "**/*.jsx",
5029
- "**/*.tsx",
5030
- "**/*.vue",
5748
+ // Frontend assets (Vue/React/Inertia) - Scoped to resources/ to avoid build output
5749
+ "resources/**/*.js",
5750
+ "resources/**/*.ts",
5751
+ "resources/**/*.jsx",
5752
+ "resources/**/*.tsx",
5753
+ "resources/**/*.vue",
5031
5754
  // Blade templates
5032
5755
  "resources/views/**/*.blade.php",
5033
5756
  // Documentation
5034
- "**/*.md",
5035
- "**/*.mdx",
5036
5757
  "docs/**/*.md",
5037
5758
  "README.md",
5038
5759
  "CHANGELOG.md"
5039
5760
  ],
5040
5761
  exclude: [
5762
+ // Composer dependencies (CRITICAL: exclude before any include patterns)
5763
+ "**/vendor/**",
5041
5764
  "vendor/**",
5765
+ // Build outputs (Vite/Mix compiled assets)
5766
+ "**/public/build/**",
5767
+ "public/build/**",
5768
+ "public/hot",
5769
+ "**/dist/**",
5770
+ "dist/**",
5771
+ "**/build/**",
5772
+ "build/**",
5773
+ // Laravel system directories
5042
5774
  "storage/**",
5043
5775
  "bootstrap/cache/**",
5044
- "public/**",
5776
+ "public/**/*.js",
5777
+ // Compiled JS in public
5778
+ "public/**/*.css",
5779
+ // Compiled CSS in public
5780
+ // Node.js dependencies
5781
+ "**/node_modules/**",
5045
5782
  "node_modules/**",
5046
- "dist/**",
5047
- "build/**",
5048
- // Test artifacts (source files are indexed, but not output)
5783
+ // Test artifacts
5049
5784
  "playwright-report/**",
5050
5785
  "test-results/**",
5051
5786
  "coverage/**",
@@ -5054,7 +5789,10 @@ async function generateLaravelConfig(_rootDir, _relativePath) {
5054
5789
  // Frontend build outputs
5055
5790
  ".vite/**",
5056
5791
  ".nuxt/**",
5057
- ".next/**"
5792
+ ".next/**",
5793
+ // Minified files
5794
+ "**/*.min.js",
5795
+ "**/*.min.css"
5058
5796
  ]
5059
5797
  };
5060
5798
  }
@@ -5737,8 +6475,8 @@ async function indexCommand(options) {
5737
6475
 
5738
6476
  // src/cli/serve.ts
5739
6477
  import chalk7 from "chalk";
5740
- import fs19 from "fs/promises";
5741
- import path18 from "path";
6478
+ import fs20 from "fs/promises";
6479
+ import path21 from "path";
5742
6480
 
5743
6481
  // src/mcp/server.ts
5744
6482
  import { Server } from "@modelcontextprotocol/sdk/server/index.js";
@@ -5822,6 +6560,20 @@ var GetDependentsSchema = z5.object({
5822
6560
  )
5823
6561
  });
5824
6562
 
6563
+ // src/mcp/schemas/complexity.schema.ts
6564
+ import { z as z6 } from "zod";
6565
+ var GetComplexitySchema = z6.object({
6566
+ files: z6.array(z6.string().min(1, "Filepath cannot be empty")).optional().describe(
6567
+ "Specific files to analyze. If omitted, analyzes entire codebase.\n\nExample: ['src/auth.ts', 'src/api/user.ts']"
6568
+ ),
6569
+ top: z6.number().int().min(1, "Top must be at least 1").max(50, "Top cannot exceed 50").default(10).describe(
6570
+ "Return top N most complex functions. Default: 10\n\nUse higher values to see more violations."
6571
+ ),
6572
+ threshold: z6.number().int().min(1, "Threshold must be at least 1").optional().describe(
6573
+ "Only return functions above this complexity threshold.\n\nNote: Violations are first identified using the threshold from lien.config.json (default: 15). This parameter filters those violations to show only items above the specified value. Setting threshold below the config threshold will not show additional functions."
6574
+ )
6575
+ });
6576
+
5825
6577
  // src/mcp/tools.ts
5826
6578
  var tools = [
5827
6579
  toMCPToolSchema(
@@ -5915,149 +6667,33 @@ Returns:
5915
6667
  - Risk level (low/medium/high/critical) based on dependent count and complexity
5916
6668
 
5917
6669
  Example: get_dependents({ filepath: "src/utils/validate.ts" })`
5918
- )
5919
- ];
6670
+ ),
6671
+ toMCPToolSchema(
6672
+ GetComplexitySchema,
6673
+ "get_complexity",
6674
+ `Get complexity analysis for files or the entire codebase.
5920
6675
 
5921
- // src/mcp/server.ts
5922
- init_lancedb();
5923
- init_local();
5924
- init_tracker();
5925
- init_incremental();
5926
- init_service();
5927
- init_manifest();
5928
- init_utils();
6676
+ Analyzes multiple complexity metrics:
6677
+ - **Test paths**: Number of test cases needed for full coverage (cyclomatic)
6678
+ - **Mental load**: How hard to follow - penalizes nesting (cognitive)
6679
+ - **Time to understand**: Estimated reading time based on Halstead effort
6680
+ - **Estimated bugs**: Predicted bug count based on Halstead volume
5929
6681
 
5930
- // src/watcher/index.ts
5931
- init_schema();
5932
- import chokidar from "chokidar";
5933
- var FileWatcher = class {
5934
- watcher = null;
5935
- debounceTimers = /* @__PURE__ */ new Map();
5936
- config;
5937
- rootDir;
5938
- onChangeHandler = null;
5939
- constructor(rootDir, config) {
5940
- this.rootDir = rootDir;
5941
- this.config = config;
5942
- }
5943
- /**
5944
- * Starts watching files for changes.
5945
- *
5946
- * @param handler - Callback function called when files change
5947
- */
5948
- async start(handler) {
5949
- if (this.watcher) {
5950
- throw new Error("File watcher is already running");
5951
- }
5952
- this.onChangeHandler = handler;
5953
- let includePatterns;
5954
- let excludePatterns;
5955
- if (isLegacyConfig(this.config)) {
5956
- includePatterns = this.config.indexing.include;
5957
- excludePatterns = this.config.indexing.exclude;
5958
- } else if (isModernConfig(this.config)) {
5959
- includePatterns = this.config.frameworks.flatMap((f) => f.config.include);
5960
- excludePatterns = this.config.frameworks.flatMap((f) => f.config.exclude);
5961
- } else {
5962
- includePatterns = ["**/*"];
5963
- excludePatterns = [];
5964
- }
5965
- this.watcher = chokidar.watch(includePatterns, {
5966
- cwd: this.rootDir,
5967
- ignored: excludePatterns,
5968
- persistent: true,
5969
- ignoreInitial: true,
5970
- // Don't trigger for existing files
5971
- awaitWriteFinish: {
5972
- stabilityThreshold: 500,
5973
- // Wait 500ms for file to stop changing
5974
- pollInterval: 100
5975
- },
5976
- // Performance optimizations
5977
- usePolling: false,
5978
- interval: 100,
5979
- binaryInterval: 300
5980
- });
5981
- this.watcher.on("add", (filepath) => this.handleChange("add", filepath)).on("change", (filepath) => this.handleChange("change", filepath)).on("unlink", (filepath) => this.handleChange("unlink", filepath)).on("error", (error) => {
5982
- console.error(`[Lien] File watcher error: ${error}`);
5983
- });
5984
- await new Promise((resolve) => {
5985
- this.watcher.on("ready", () => {
5986
- resolve();
5987
- });
5988
- });
5989
- }
5990
- /**
5991
- * Handles a file change event with debouncing.
5992
- * Debouncing prevents rapid reindexing when files are saved multiple times quickly.
5993
- */
5994
- handleChange(type, filepath) {
5995
- const existingTimer = this.debounceTimers.get(filepath);
5996
- if (existingTimer) {
5997
- clearTimeout(existingTimer);
5998
- }
5999
- const timer = setTimeout(() => {
6000
- this.debounceTimers.delete(filepath);
6001
- if (this.onChangeHandler) {
6002
- const absolutePath = filepath.startsWith("/") ? filepath : `${this.rootDir}/${filepath}`;
6003
- try {
6004
- const result = this.onChangeHandler({
6005
- type,
6006
- filepath: absolutePath
6007
- });
6008
- if (result instanceof Promise) {
6009
- result.catch((error) => {
6010
- console.error(`[Lien] Error handling file change: ${error}`);
6011
- });
6012
- }
6013
- } catch (error) {
6014
- console.error(`[Lien] Error handling file change: ${error}`);
6015
- }
6016
- }
6017
- }, this.config.fileWatching.debounceMs);
6018
- this.debounceTimers.set(filepath, timer);
6019
- }
6020
- /**
6021
- * Stops the file watcher and cleans up resources.
6022
- */
6023
- async stop() {
6024
- if (!this.watcher) {
6025
- return;
6026
- }
6027
- for (const timer of this.debounceTimers.values()) {
6028
- clearTimeout(timer);
6029
- }
6030
- this.debounceTimers.clear();
6031
- await this.watcher.close();
6032
- this.watcher = null;
6033
- this.onChangeHandler = null;
6034
- }
6035
- /**
6036
- * Gets the list of files currently being watched.
6037
- */
6038
- getWatchedFiles() {
6039
- if (!this.watcher) {
6040
- return [];
6041
- }
6042
- const watched = this.watcher.getWatched();
6043
- const files = [];
6044
- for (const [dir, filenames] of Object.entries(watched)) {
6045
- for (const filename of filenames) {
6046
- files.push(`${dir}/${filename}`);
6047
- }
6048
- }
6049
- return files;
6050
- }
6051
- /**
6052
- * Checks if the watcher is currently running.
6053
- */
6054
- isRunning() {
6055
- return this.watcher !== null;
6056
- }
6057
- };
6682
+ Use for tech debt analysis and refactoring prioritization:
6683
+ - "What are the most complex functions?"
6684
+ - "Show me tech debt hotspots"
6685
+ - "What should I refactor?"
6058
6686
 
6059
- // src/mcp/server.ts
6060
- init_constants();
6687
+ Examples:
6688
+ get_complexity({ top: 10 })
6689
+ get_complexity({ files: ["src/auth.ts", "src/api/user.ts"] })
6690
+ get_complexity({ threshold: 15 })
6691
+
6692
+ Returns violations with metricType ('cyclomatic', 'cognitive', 'halstead_effort',
6693
+ or 'halstead_bugs'), risk levels, and dependent counts.
6694
+ Human-readable output: "23 (needs ~23 tests)", "\u{1F9E0} 45", "~2h 30m", "2.27 bugs".`
6695
+ )
6696
+ ];
6061
6697
 
6062
6698
  // src/mcp/utils/tool-wrapper.ts
6063
6699
  init_errors();
@@ -6114,9 +6750,47 @@ function wrapToolHandler(schema, handler) {
6114
6750
  };
6115
6751
  }
6116
6752
 
6753
+ // src/mcp/handlers/semantic-search.ts
6754
+ async function handleSemanticSearch(args, ctx) {
6755
+ const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
6756
+ return await wrapToolHandler(
6757
+ SemanticSearchSchema,
6758
+ async (validatedArgs) => {
6759
+ log(`Searching for: "${validatedArgs.query}"`);
6760
+ await checkAndReconnect();
6761
+ const queryEmbedding = await embeddings.embed(validatedArgs.query);
6762
+ const results = await vectorDB.search(queryEmbedding, validatedArgs.limit, validatedArgs.query);
6763
+ log(`Found ${results.length} results`);
6764
+ return {
6765
+ indexInfo: getIndexMetadata(),
6766
+ results
6767
+ };
6768
+ }
6769
+ )(args);
6770
+ }
6771
+
6772
+ // src/mcp/handlers/find-similar.ts
6773
+ async function handleFindSimilar(args, ctx) {
6774
+ const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
6775
+ return await wrapToolHandler(
6776
+ FindSimilarSchema,
6777
+ async (validatedArgs) => {
6778
+ log(`Finding similar code...`);
6779
+ await checkAndReconnect();
6780
+ const codeEmbedding = await embeddings.embed(validatedArgs.code);
6781
+ const results = await vectorDB.search(codeEmbedding, validatedArgs.limit, validatedArgs.code);
6782
+ log(`Found ${results.length} similar chunks`);
6783
+ return {
6784
+ indexInfo: getIndexMetadata(),
6785
+ results
6786
+ };
6787
+ }
6788
+ )(args);
6789
+ }
6790
+
6117
6791
  // src/mcp/utils/path-matching.ts
6118
- function normalizePath(path19, workspaceRoot) {
6119
- let normalized = path19.replace(/['"]/g, "").trim().replace(/\\/g, "/");
6792
+ function normalizePath(path23, workspaceRoot) {
6793
+ let normalized = path23.replace(/['"]/g, "").trim().replace(/\\/g, "/");
6120
6794
  normalized = normalized.replace(/\.(ts|tsx|js|jsx)$/, "");
6121
6795
  if (normalized.startsWith(workspaceRoot + "/")) {
6122
6796
  normalized = normalized.substring(workspaceRoot.length + 1);
@@ -6158,17 +6832,165 @@ function isTestFile2(filepath) {
6158
6832
  return /\.(test|spec)\.[^/]+$/.test(filepath) || /(^|[/\\])(test|tests|__tests__)[/\\]/.test(filepath);
6159
6833
  }
6160
6834
 
6161
- // src/mcp/server.ts
6162
- init_errors();
6163
- var __filename4 = fileURLToPath4(import.meta.url);
6164
- var __dirname4 = dirname3(__filename4);
6165
- var require4 = createRequire3(import.meta.url);
6166
- var packageJson3;
6167
- try {
6168
- packageJson3 = require4(join3(__dirname4, "../package.json"));
6169
- } catch {
6170
- packageJson3 = require4(join3(__dirname4, "../../package.json"));
6835
+ // src/mcp/handlers/get-files-context.ts
6836
+ var SCAN_LIMIT = 1e4;
6837
+ async function handleGetFilesContext(args, ctx) {
6838
+ const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
6839
+ return await wrapToolHandler(
6840
+ GetFilesContextSchema,
6841
+ async (validatedArgs) => {
6842
+ const filepaths = Array.isArray(validatedArgs.filepaths) ? validatedArgs.filepaths : [validatedArgs.filepaths];
6843
+ const isSingleFile = !Array.isArray(validatedArgs.filepaths);
6844
+ log(`Getting context for: ${filepaths.join(", ")}`);
6845
+ await checkAndReconnect();
6846
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
6847
+ const fileEmbeddings = await Promise.all(filepaths.map((fp) => embeddings.embed(fp)));
6848
+ const allFileSearches = await Promise.all(
6849
+ fileEmbeddings.map(
6850
+ (embedding, i) => vectorDB.search(embedding, 50, filepaths[i])
6851
+ )
6852
+ );
6853
+ const fileChunksMap = filepaths.map((filepath, i) => {
6854
+ const allResults = allFileSearches[i];
6855
+ const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6856
+ return allResults.filter((r) => {
6857
+ const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6858
+ return chunkCanonical === targetCanonical;
6859
+ });
6860
+ });
6861
+ let relatedChunksMap = [];
6862
+ if (validatedArgs.includeRelated) {
6863
+ const filesWithChunks = fileChunksMap.map((chunks, i) => ({ chunks, filepath: filepaths[i], index: i })).filter(({ chunks }) => chunks.length > 0);
6864
+ if (filesWithChunks.length > 0) {
6865
+ const relatedEmbeddings = await Promise.all(
6866
+ filesWithChunks.map(({ chunks }) => embeddings.embed(chunks[0].content))
6867
+ );
6868
+ const relatedSearches = await Promise.all(
6869
+ relatedEmbeddings.map(
6870
+ (embedding, i) => vectorDB.search(embedding, 5, filesWithChunks[i].chunks[0].content)
6871
+ )
6872
+ );
6873
+ relatedChunksMap = Array.from({ length: filepaths.length }, () => []);
6874
+ filesWithChunks.forEach(({ filepath, index }, i) => {
6875
+ const related = relatedSearches[i];
6876
+ const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6877
+ relatedChunksMap[index] = related.filter((r) => {
6878
+ const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6879
+ return chunkCanonical !== targetCanonical;
6880
+ });
6881
+ });
6882
+ }
6883
+ }
6884
+ const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
6885
+ if (allChunks.length === SCAN_LIMIT) {
6886
+ log(`Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`, "warning");
6887
+ }
6888
+ const pathCache = /* @__PURE__ */ new Map();
6889
+ const normalizePathCached = (path23) => {
6890
+ if (pathCache.has(path23)) return pathCache.get(path23);
6891
+ const normalized = normalizePath(path23, workspaceRoot);
6892
+ pathCache.set(path23, normalized);
6893
+ return normalized;
6894
+ };
6895
+ const testAssociationsMap = filepaths.map((filepath) => {
6896
+ const normalizedTarget = normalizePathCached(filepath);
6897
+ const testFiles = /* @__PURE__ */ new Set();
6898
+ for (const chunk of allChunks) {
6899
+ const chunkFile2 = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6900
+ if (!isTestFile2(chunkFile2)) continue;
6901
+ const imports = chunk.metadata.imports || [];
6902
+ for (const imp of imports) {
6903
+ const normalizedImport = normalizePathCached(imp);
6904
+ if (matchesFile(normalizedImport, normalizedTarget)) {
6905
+ testFiles.add(chunkFile2);
6906
+ break;
6907
+ }
6908
+ }
6909
+ }
6910
+ return Array.from(testFiles);
6911
+ });
6912
+ const filesData = {};
6913
+ filepaths.forEach((filepath, i) => {
6914
+ const fileChunks = fileChunksMap[i];
6915
+ const relatedChunks = relatedChunksMap[i] || [];
6916
+ const seenChunks = /* @__PURE__ */ new Set();
6917
+ const dedupedChunks = [...fileChunks, ...relatedChunks].filter((chunk) => {
6918
+ const canonicalFile = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6919
+ const chunkId = `${canonicalFile}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6920
+ if (seenChunks.has(chunkId)) return false;
6921
+ seenChunks.add(chunkId);
6922
+ return true;
6923
+ });
6924
+ filesData[filepath] = {
6925
+ chunks: dedupedChunks,
6926
+ testAssociations: testAssociationsMap[i]
6927
+ };
6928
+ });
6929
+ log(`Found ${Object.values(filesData).reduce((sum, f) => sum + f.chunks.length, 0)} total chunks`);
6930
+ if (isSingleFile) {
6931
+ const filepath = filepaths[0];
6932
+ return {
6933
+ indexInfo: getIndexMetadata(),
6934
+ file: filepath,
6935
+ chunks: filesData[filepath].chunks,
6936
+ testAssociations: filesData[filepath].testAssociations
6937
+ };
6938
+ } else {
6939
+ return {
6940
+ indexInfo: getIndexMetadata(),
6941
+ files: filesData
6942
+ };
6943
+ }
6944
+ }
6945
+ )(args);
6946
+ }
6947
+
6948
+ // src/mcp/handlers/list-functions.ts
6949
+ async function handleListFunctions(args, ctx) {
6950
+ const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
6951
+ return await wrapToolHandler(
6952
+ ListFunctionsSchema,
6953
+ async (validatedArgs) => {
6954
+ log("Listing functions with symbol metadata...");
6955
+ await checkAndReconnect();
6956
+ let results;
6957
+ let usedMethod = "symbols";
6958
+ try {
6959
+ results = await vectorDB.querySymbols({
6960
+ language: validatedArgs.language,
6961
+ pattern: validatedArgs.pattern,
6962
+ limit: 50
6963
+ });
6964
+ if (results.length === 0 && (validatedArgs.language || validatedArgs.pattern)) {
6965
+ log("No symbol results, falling back to content scan...");
6966
+ results = await vectorDB.scanWithFilter({
6967
+ language: validatedArgs.language,
6968
+ pattern: validatedArgs.pattern,
6969
+ limit: 50
6970
+ });
6971
+ usedMethod = "content";
6972
+ }
6973
+ } catch (error) {
6974
+ log(`Symbol query failed, falling back to content scan: ${error}`);
6975
+ results = await vectorDB.scanWithFilter({
6976
+ language: validatedArgs.language,
6977
+ pattern: validatedArgs.pattern,
6978
+ limit: 50
6979
+ });
6980
+ usedMethod = "content";
6981
+ }
6982
+ log(`Found ${results.length} matches using ${usedMethod} method`);
6983
+ return {
6984
+ indexInfo: getIndexMetadata(),
6985
+ method: usedMethod,
6986
+ results,
6987
+ note: usedMethod === "content" ? 'Using content search. Run "lien reindex" to enable faster symbol-based queries.' : void 0
6988
+ };
6989
+ }
6990
+ )(args);
6171
6991
  }
6992
+
6993
+ // src/mcp/handlers/get-dependents.ts
6172
6994
  var DEPENDENT_COUNT_THRESHOLDS = {
6173
6995
  LOW: 5,
6174
6996
  // Few dependents, safe to change
@@ -6193,559 +7015,1053 @@ var COMPLEXITY_THRESHOLDS = {
6193
7015
  MEDIUM_MAX: 15
6194
7016
  // Occasional branching
6195
7017
  };
6196
- var SCAN_LIMIT = 1e4;
6197
- async function startMCPServer(options) {
6198
- const { rootDir, verbose, watch } = options;
6199
- const log = (message) => {
6200
- if (verbose) {
6201
- console.error(`[Lien MCP] ${message}`);
7018
+ var SCAN_LIMIT2 = 1e4;
7019
+ var RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
7020
+ function buildImportIndex(allChunks, normalizePathCached) {
7021
+ const importIndex = /* @__PURE__ */ new Map();
7022
+ for (const chunk of allChunks) {
7023
+ const imports = chunk.metadata.imports || [];
7024
+ for (const imp of imports) {
7025
+ const normalizedImport = normalizePathCached(imp);
7026
+ if (!importIndex.has(normalizedImport)) {
7027
+ importIndex.set(normalizedImport, []);
7028
+ }
7029
+ importIndex.get(normalizedImport).push(chunk);
7030
+ }
7031
+ }
7032
+ return importIndex;
7033
+ }
7034
+ function findDependentChunks(importIndex, normalizedTarget) {
7035
+ const dependentChunks = [];
7036
+ const seenChunkIds = /* @__PURE__ */ new Set();
7037
+ const addChunk = (chunk) => {
7038
+ const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
7039
+ if (!seenChunkIds.has(chunkId)) {
7040
+ dependentChunks.push(chunk);
7041
+ seenChunkIds.add(chunkId);
6202
7042
  }
6203
7043
  };
6204
- log("Initializing MCP server...");
6205
- const embeddings = new LocalEmbeddings();
6206
- const vectorDB = new VectorDB(rootDir);
6207
- try {
6208
- log("Loading embedding model...");
6209
- await embeddings.initialize();
6210
- log("Loading vector database...");
6211
- await vectorDB.initialize();
6212
- log("Embeddings and vector DB ready");
6213
- } catch (error) {
6214
- console.error(`Failed to initialize: ${error}`);
6215
- process.exit(1);
7044
+ if (importIndex.has(normalizedTarget)) {
7045
+ for (const chunk of importIndex.get(normalizedTarget)) {
7046
+ addChunk(chunk);
7047
+ }
6216
7048
  }
6217
- const server = new Server(
6218
- {
6219
- name: "lien",
6220
- version: packageJson3.version
6221
- },
6222
- {
6223
- capabilities: {
6224
- tools: {}
7049
+ for (const [normalizedImport, chunks] of importIndex.entries()) {
7050
+ if (normalizedImport !== normalizedTarget && matchesFile(normalizedImport, normalizedTarget)) {
7051
+ for (const chunk of chunks) {
7052
+ addChunk(chunk);
6225
7053
  }
6226
7054
  }
6227
- );
6228
- server.setRequestHandler(ListToolsRequestSchema, async () => ({
6229
- tools
6230
- }));
6231
- const checkAndReconnect = async () => {
7055
+ }
7056
+ return dependentChunks;
7057
+ }
7058
+ function calculateFileComplexities(chunksByFile) {
7059
+ const fileComplexities = [];
7060
+ for (const [filepath, chunks] of chunksByFile.entries()) {
7061
+ const complexities = chunks.map((c) => c.metadata.complexity).filter((c) => typeof c === "number" && c > 0);
7062
+ if (complexities.length > 0) {
7063
+ const sum = complexities.reduce((a, b) => a + b, 0);
7064
+ fileComplexities.push({
7065
+ filepath,
7066
+ avgComplexity: Math.round(sum / complexities.length * 10) / 10,
7067
+ maxComplexity: Math.max(...complexities),
7068
+ complexityScore: sum,
7069
+ chunksWithComplexity: complexities.length
7070
+ });
7071
+ }
7072
+ }
7073
+ return fileComplexities;
7074
+ }
7075
+ function calculateOverallComplexityMetrics(fileComplexities) {
7076
+ if (fileComplexities.length === 0) {
7077
+ return {
7078
+ averageComplexity: 0,
7079
+ maxComplexity: 0,
7080
+ filesWithComplexityData: 0,
7081
+ highComplexityDependents: [],
7082
+ complexityRiskBoost: "low"
7083
+ };
7084
+ }
7085
+ const allAvgs = fileComplexities.map((f) => f.avgComplexity);
7086
+ const allMaxes = fileComplexities.map((f) => f.maxComplexity);
7087
+ const totalAvg = allAvgs.reduce((a, b) => a + b, 0) / allAvgs.length;
7088
+ const globalMax = Math.max(...allMaxes);
7089
+ const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({ filepath: f.filepath, maxComplexity: f.maxComplexity, avgComplexity: f.avgComplexity }));
7090
+ const complexityRiskBoost = calculateComplexityRiskBoost(totalAvg, globalMax);
7091
+ return {
7092
+ averageComplexity: Math.round(totalAvg * 10) / 10,
7093
+ maxComplexity: globalMax,
7094
+ filesWithComplexityData: fileComplexities.length,
7095
+ highComplexityDependents,
7096
+ complexityRiskBoost
7097
+ };
7098
+ }
7099
+ function calculateComplexityRiskBoost(avgComplexity, maxComplexity) {
7100
+ if (avgComplexity > COMPLEXITY_THRESHOLDS.CRITICAL_AVG || maxComplexity > COMPLEXITY_THRESHOLDS.CRITICAL_MAX) {
7101
+ return "critical";
7102
+ }
7103
+ if (avgComplexity > COMPLEXITY_THRESHOLDS.HIGH_AVG || maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_MAX) {
7104
+ return "high";
7105
+ }
7106
+ if (avgComplexity > COMPLEXITY_THRESHOLDS.MEDIUM_AVG || maxComplexity > COMPLEXITY_THRESHOLDS.MEDIUM_MAX) {
7107
+ return "medium";
7108
+ }
7109
+ return "low";
7110
+ }
7111
+ function calculateRiskLevel(dependentCount, complexityRiskBoost) {
7112
+ let riskLevel = dependentCount === 0 ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
7113
+ if (RISK_ORDER[complexityRiskBoost] > RISK_ORDER[riskLevel]) {
7114
+ riskLevel = complexityRiskBoost;
7115
+ }
7116
+ return riskLevel;
7117
+ }
7118
+ async function handleGetDependents(args, ctx) {
7119
+ const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
7120
+ return await wrapToolHandler(
7121
+ GetDependentsSchema,
7122
+ async (validatedArgs) => {
7123
+ log(`Finding dependents of: ${validatedArgs.filepath}`);
7124
+ await checkAndReconnect();
7125
+ const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT2 });
7126
+ const hitLimit = allChunks.length === SCAN_LIMIT2;
7127
+ if (hitLimit) {
7128
+ log(`Scanned ${SCAN_LIMIT2} chunks (limit reached). Results may be incomplete.`, "warning");
7129
+ }
7130
+ log(`Scanning ${allChunks.length} chunks for imports...`);
7131
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
7132
+ const pathCache = /* @__PURE__ */ new Map();
7133
+ const normalizePathCached = (path23) => {
7134
+ if (!pathCache.has(path23)) pathCache.set(path23, normalizePath(path23, workspaceRoot));
7135
+ return pathCache.get(path23);
7136
+ };
7137
+ const importIndex = buildImportIndex(allChunks, normalizePathCached);
7138
+ const normalizedTarget = normalizePathCached(validatedArgs.filepath);
7139
+ const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
7140
+ const chunksByFile = /* @__PURE__ */ new Map();
7141
+ for (const chunk of dependentChunks) {
7142
+ const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
7143
+ const existing = chunksByFile.get(canonical) || [];
7144
+ existing.push(chunk);
7145
+ chunksByFile.set(canonical, existing);
7146
+ }
7147
+ const fileComplexities = calculateFileComplexities(chunksByFile);
7148
+ const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
7149
+ const uniqueFiles = Array.from(chunksByFile.keys()).map((filepath) => ({
7150
+ filepath,
7151
+ isTestFile: isTestFile2(filepath)
7152
+ }));
7153
+ const riskLevel = calculateRiskLevel(uniqueFiles.length, complexityMetrics.complexityRiskBoost);
7154
+ log(`Found ${uniqueFiles.length} dependent files (risk: ${riskLevel}${complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`);
7155
+ return {
7156
+ indexInfo: getIndexMetadata(),
7157
+ filepath: validatedArgs.filepath,
7158
+ dependentCount: uniqueFiles.length,
7159
+ riskLevel,
7160
+ dependents: uniqueFiles,
7161
+ complexityMetrics,
7162
+ note: hitLimit ? `Warning: Scanned ${SCAN_LIMIT2} chunks (limit reached). Results may be incomplete.` : void 0
7163
+ };
7164
+ }
7165
+ )(args);
7166
+ }
7167
+
7168
+ // src/mcp/handlers/get-complexity.ts
7169
+ import collect from "collect.js";
7170
+
7171
+ // src/insights/types.ts
7172
+ var RISK_ORDER2 = { low: 0, medium: 1, high: 2, critical: 3 };
7173
+
7174
+ // src/indexer/dependency-analyzer.ts
7175
+ var DEPENDENT_COUNT_THRESHOLDS2 = {
7176
+ LOW: 5,
7177
+ // Few dependents, safe to change
7178
+ MEDIUM: 15,
7179
+ // Moderate impact, review dependents
7180
+ HIGH: 30
7181
+ // High impact, careful planning needed
7182
+ };
7183
+ var COMPLEXITY_THRESHOLDS2 = {
7184
+ HIGH_COMPLEXITY_DEPENDENT: 10,
7185
+ // Individual file is complex
7186
+ CRITICAL_AVG: 15,
7187
+ // Average complexity indicates systemic complexity
7188
+ CRITICAL_MAX: 25,
7189
+ // Peak complexity indicates hotspot
7190
+ HIGH_AVG: 10,
7191
+ // Moderately complex on average
7192
+ HIGH_MAX: 20,
7193
+ // Some complex functions exist
7194
+ MEDIUM_AVG: 6,
7195
+ // Slightly above simple code
7196
+ MEDIUM_MAX: 15
7197
+ // Occasional branching
7198
+ };
7199
+ function createPathNormalizer(workspaceRoot) {
7200
+ const cache = /* @__PURE__ */ new Map();
7201
+ return (path23) => {
7202
+ const cached = cache.get(path23);
7203
+ if (cached !== void 0) return cached;
7204
+ const normalized = normalizePath(path23, workspaceRoot);
7205
+ cache.set(path23, normalized);
7206
+ return normalized;
7207
+ };
7208
+ }
7209
+ function buildImportIndex2(chunks, normalizePathCached) {
7210
+ const importIndex = /* @__PURE__ */ new Map();
7211
+ for (const chunk of chunks) {
7212
+ const imports = chunk.metadata.imports || [];
7213
+ for (const imp of imports) {
7214
+ const normalizedImport = normalizePathCached(imp);
7215
+ let chunkList = importIndex.get(normalizedImport);
7216
+ if (!chunkList) {
7217
+ chunkList = [];
7218
+ importIndex.set(normalizedImport, chunkList);
7219
+ }
7220
+ chunkList.push(chunk);
7221
+ }
7222
+ }
7223
+ return importIndex;
7224
+ }
7225
+ function findDependentChunks2(normalizedTarget, importIndex) {
7226
+ const dependentChunks = [];
7227
+ const seenChunkIds = /* @__PURE__ */ new Set();
7228
+ const addChunk = (chunk) => {
7229
+ const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
7230
+ if (!seenChunkIds.has(chunkId)) {
7231
+ dependentChunks.push(chunk);
7232
+ seenChunkIds.add(chunkId);
7233
+ }
7234
+ };
7235
+ const directMatches = importIndex.get(normalizedTarget);
7236
+ if (directMatches) {
7237
+ for (const chunk of directMatches) {
7238
+ addChunk(chunk);
7239
+ }
7240
+ }
7241
+ for (const [normalizedImport, chunks] of importIndex.entries()) {
7242
+ if (normalizedImport !== normalizedTarget && matchesFile(normalizedImport, normalizedTarget)) {
7243
+ for (const chunk of chunks) {
7244
+ addChunk(chunk);
7245
+ }
7246
+ }
7247
+ }
7248
+ return dependentChunks;
7249
+ }
7250
+ function groupChunksByFile(chunks, workspaceRoot) {
7251
+ const chunksByFile = /* @__PURE__ */ new Map();
7252
+ for (const chunk of chunks) {
7253
+ const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
7254
+ let existing = chunksByFile.get(canonical);
7255
+ if (!existing) {
7256
+ existing = [];
7257
+ chunksByFile.set(canonical, existing);
7258
+ }
7259
+ existing.push(chunk);
7260
+ }
7261
+ return chunksByFile;
7262
+ }
7263
+ function calculateFileComplexities2(chunksByFile) {
7264
+ const fileComplexities = [];
7265
+ for (const [filepath, chunks] of chunksByFile.entries()) {
7266
+ const complexities = chunks.map((c) => c.metadata.complexity).filter((c) => typeof c === "number" && c > 0);
7267
+ if (complexities.length > 0) {
7268
+ const sum = complexities.reduce((a, b) => a + b, 0);
7269
+ const avg = sum / complexities.length;
7270
+ const max = Math.max(...complexities);
7271
+ fileComplexities.push({
7272
+ filepath,
7273
+ avgComplexity: Math.round(avg * 10) / 10,
7274
+ maxComplexity: max,
7275
+ complexityScore: sum,
7276
+ chunksWithComplexity: complexities.length
7277
+ });
7278
+ }
7279
+ }
7280
+ return fileComplexities;
7281
+ }
7282
+ function calculateOverallComplexityMetrics2(fileComplexities) {
7283
+ if (fileComplexities.length === 0) {
7284
+ return void 0;
7285
+ }
7286
+ const allAvgs = fileComplexities.map((f) => f.avgComplexity);
7287
+ const allMaxes = fileComplexities.map((f) => f.maxComplexity);
7288
+ const totalAvg = allAvgs.reduce((a, b) => a + b, 0) / allAvgs.length;
7289
+ const globalMax = Math.max(...allMaxes);
7290
+ const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS2.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({
7291
+ filepath: f.filepath,
7292
+ maxComplexity: f.maxComplexity,
7293
+ avgComplexity: f.avgComplexity
7294
+ }));
7295
+ const complexityRiskBoost = calculateComplexityRiskBoost2(totalAvg, globalMax);
7296
+ return {
7297
+ averageComplexity: Math.round(totalAvg * 10) / 10,
7298
+ maxComplexity: globalMax,
7299
+ filesWithComplexityData: fileComplexities.length,
7300
+ highComplexityDependents,
7301
+ complexityRiskBoost
7302
+ };
7303
+ }
7304
+ function calculateComplexityRiskBoost2(avgComplexity, maxComplexity) {
7305
+ if (avgComplexity > COMPLEXITY_THRESHOLDS2.CRITICAL_AVG || maxComplexity > COMPLEXITY_THRESHOLDS2.CRITICAL_MAX) {
7306
+ return "critical";
7307
+ }
7308
+ if (avgComplexity > COMPLEXITY_THRESHOLDS2.HIGH_AVG || maxComplexity > COMPLEXITY_THRESHOLDS2.HIGH_MAX) {
7309
+ return "high";
7310
+ }
7311
+ if (avgComplexity > COMPLEXITY_THRESHOLDS2.MEDIUM_AVG || maxComplexity > COMPLEXITY_THRESHOLDS2.MEDIUM_MAX) {
7312
+ return "medium";
7313
+ }
7314
+ return "low";
7315
+ }
7316
+ function calculateRiskLevelFromCount(count) {
7317
+ if (count <= DEPENDENT_COUNT_THRESHOLDS2.LOW) {
7318
+ return "low";
7319
+ }
7320
+ if (count <= DEPENDENT_COUNT_THRESHOLDS2.MEDIUM) {
7321
+ return "medium";
7322
+ }
7323
+ if (count <= DEPENDENT_COUNT_THRESHOLDS2.HIGH) {
7324
+ return "high";
7325
+ }
7326
+ return "critical";
7327
+ }
7328
+ function analyzeDependencies(targetFilepath, allChunks, workspaceRoot) {
7329
+ const normalizePathCached = createPathNormalizer(workspaceRoot);
7330
+ const importIndex = buildImportIndex2(allChunks, normalizePathCached);
7331
+ const normalizedTarget = normalizePathCached(targetFilepath);
7332
+ const dependentChunks = findDependentChunks2(normalizedTarget, importIndex);
7333
+ const chunksByFile = groupChunksByFile(dependentChunks, workspaceRoot);
7334
+ const fileComplexities = calculateFileComplexities2(chunksByFile);
7335
+ const complexityMetrics = calculateOverallComplexityMetrics2(fileComplexities);
7336
+ const dependents = Array.from(chunksByFile.keys()).map((filepath) => ({
7337
+ filepath,
7338
+ isTestFile: isTestFile2(filepath)
7339
+ }));
7340
+ let riskLevel = calculateRiskLevelFromCount(dependents.length);
7341
+ if (complexityMetrics?.complexityRiskBoost) {
7342
+ if (RISK_ORDER2[complexityMetrics.complexityRiskBoost] > RISK_ORDER2[riskLevel]) {
7343
+ riskLevel = complexityMetrics.complexityRiskBoost;
7344
+ }
7345
+ }
7346
+ return {
7347
+ dependents,
7348
+ dependentCount: dependents.length,
7349
+ riskLevel,
7350
+ complexityMetrics
7351
+ };
7352
+ }
7353
+
7354
+ // src/insights/complexity-analyzer.ts
7355
+ var SEVERITY = { warning: 1, error: 2 };
7356
+ var ComplexityAnalyzer = class {
7357
+ constructor(vectorDB, config) {
7358
+ this.vectorDB = vectorDB;
7359
+ this.config = config;
7360
+ }
7361
+ /**
7362
+ * Analyze complexity of codebase or specific files
7363
+ * @param files - Optional list of specific files to analyze
7364
+ * @returns Complexity report with violations and summary
7365
+ */
7366
+ async analyze(files) {
7367
+ const allChunks = await this.vectorDB.scanAll();
7368
+ const chunks = files ? allChunks.filter((c) => this.matchesAnyFile(c.metadata.file, files)) : allChunks;
7369
+ const violations = this.findViolations(chunks);
7370
+ const report = this.buildReport(violations, chunks);
7371
+ this.enrichWithDependencies(report, allChunks);
7372
+ return report;
7373
+ }
7374
+ /**
7375
+ * Normalize a file path to a consistent relative format
7376
+ * Converts absolute paths to relative paths from workspace root
7377
+ */
7378
+ normalizeFilePath(filepath) {
7379
+ const workspaceRoot = process.cwd();
7380
+ const normalized = filepath.replace(/\\/g, "/");
7381
+ const normalizedRoot = workspaceRoot.replace(/\\/g, "/");
7382
+ if (normalized.startsWith(normalizedRoot + "/")) {
7383
+ return normalized.slice(normalizedRoot.length + 1);
7384
+ }
7385
+ if (normalized.startsWith(normalizedRoot)) {
7386
+ return normalized.slice(normalizedRoot.length);
7387
+ }
7388
+ return normalized;
7389
+ }
7390
+ /**
7391
+ * Check if a chunk's file matches any of the target files
7392
+ * Uses exact match or suffix matching to avoid unintended matches
7393
+ */
7394
+ matchesAnyFile(chunkFile2, targetFiles) {
7395
+ const normalizedChunkFile = chunkFile2.replace(/\\/g, "/");
7396
+ return targetFiles.some((target) => {
7397
+ const normalizedTarget = target.replace(/\\/g, "/");
7398
+ return normalizedChunkFile === normalizedTarget || normalizedChunkFile.endsWith("/" + normalizedTarget);
7399
+ });
7400
+ }
7401
+ /**
7402
+ * Create a violation if complexity exceeds threshold
7403
+ */
7404
+ createViolation(metadata, complexity, baseThreshold, metricType) {
7405
+ const warningThreshold = baseThreshold * SEVERITY.warning;
7406
+ const errorThreshold = baseThreshold * SEVERITY.error;
7407
+ if (complexity < warningThreshold) return null;
7408
+ const violationSeverity = complexity >= errorThreshold ? "error" : "warning";
7409
+ const effectiveThreshold = violationSeverity === "error" ? errorThreshold : warningThreshold;
7410
+ const message = metricType === "cyclomatic" ? `Needs ~${complexity} test cases for full coverage (threshold: ${Math.round(effectiveThreshold)})` : `Mental load ${complexity} exceeds threshold ${Math.round(effectiveThreshold)} (hard to follow)`;
7411
+ return {
7412
+ filepath: metadata.file,
7413
+ startLine: metadata.startLine,
7414
+ endLine: metadata.endLine,
7415
+ symbolName: metadata.symbolName || "unknown",
7416
+ symbolType: metadata.symbolType,
7417
+ language: metadata.language,
7418
+ complexity,
7419
+ threshold: Math.round(effectiveThreshold),
7420
+ severity: violationSeverity,
7421
+ message,
7422
+ metricType
7423
+ };
7424
+ }
7425
+ /**
7426
+ * Deduplicate and filter chunks to only function/method types.
7427
+ * Handles potential index duplicates by tracking file+line ranges.
7428
+ */
7429
+ getUniqueFunctionChunks(chunks) {
7430
+ const seen = /* @__PURE__ */ new Set();
7431
+ const result = [];
7432
+ for (const { metadata } of chunks) {
7433
+ if (metadata.symbolType !== "function" && metadata.symbolType !== "method") continue;
7434
+ const key = `${metadata.file}:${metadata.startLine}-${metadata.endLine}`;
7435
+ if (seen.has(key)) continue;
7436
+ seen.add(key);
7437
+ result.push(metadata);
7438
+ }
7439
+ return result;
7440
+ }
7441
+ /**
7442
+ * Convert Halstead effort to time in minutes.
7443
+ * Formula: Time (seconds) = Effort / 18 (Stroud number for mental discrimination)
7444
+ * Time (minutes) = Effort / (18 * 60) = Effort / 1080
7445
+ */
7446
+ effortToMinutes(effort) {
7447
+ return effort / 1080;
7448
+ }
7449
+ /**
7450
+ * Format minutes as human-readable time (e.g., "2h 30m" or "45m")
7451
+ */
7452
+ formatTime(minutes) {
7453
+ if (minutes >= 60) {
7454
+ const hours = Math.floor(minutes / 60);
7455
+ const mins = Math.round(minutes % 60);
7456
+ return mins > 0 ? `${hours}h ${mins}m` : `${hours}h`;
7457
+ }
7458
+ return `${Math.round(minutes)}m`;
7459
+ }
7460
+ /**
7461
+ * Create a Halstead violation if metrics exceed thresholds
7462
+ */
7463
+ createHalsteadViolation(metadata, metricValue, threshold, metricType) {
7464
+ const warningThreshold = threshold * SEVERITY.warning;
7465
+ const errorThreshold = threshold * SEVERITY.error;
7466
+ if (metricValue < warningThreshold) return null;
7467
+ const violationSeverity = metricValue >= errorThreshold ? "error" : "warning";
7468
+ const effectiveThreshold = violationSeverity === "error" ? errorThreshold : warningThreshold;
7469
+ let message;
7470
+ if (metricType === "halstead_effort") {
7471
+ const timeMinutes = this.effortToMinutes(metricValue);
7472
+ const thresholdMinutes = this.effortToMinutes(effectiveThreshold);
7473
+ message = `Time to understand ~${this.formatTime(timeMinutes)} exceeds threshold ${this.formatTime(thresholdMinutes)}`;
7474
+ } else {
7475
+ message = `Estimated bugs ${metricValue.toFixed(2)} exceeds threshold ${effectiveThreshold.toFixed(1)}`;
7476
+ }
7477
+ const halsteadDetails = {
7478
+ volume: metadata.halsteadVolume || 0,
7479
+ difficulty: metadata.halsteadDifficulty || 0,
7480
+ effort: metadata.halsteadEffort || 0,
7481
+ bugs: metadata.halsteadBugs || 0
7482
+ };
7483
+ let complexity;
7484
+ let displayThreshold;
7485
+ if (metricType === "halstead_effort") {
7486
+ complexity = Math.round(this.effortToMinutes(metricValue));
7487
+ displayThreshold = Math.round(this.effortToMinutes(effectiveThreshold));
7488
+ } else {
7489
+ complexity = metricValue;
7490
+ displayThreshold = effectiveThreshold;
7491
+ }
7492
+ return {
7493
+ filepath: metadata.file,
7494
+ startLine: metadata.startLine,
7495
+ endLine: metadata.endLine,
7496
+ symbolName: metadata.symbolName || "unknown",
7497
+ symbolType: metadata.symbolType,
7498
+ language: metadata.language,
7499
+ complexity,
7500
+ threshold: displayThreshold,
7501
+ severity: violationSeverity,
7502
+ message,
7503
+ metricType,
7504
+ halsteadDetails
7505
+ };
7506
+ }
7507
+ /**
7508
+ * Check complexity metrics and create violations for a single chunk.
7509
+ */
7510
+ checkChunkComplexity(metadata, thresholds) {
7511
+ const violations = [];
7512
+ if (metadata.complexity) {
7513
+ const v = this.createViolation(metadata, metadata.complexity, thresholds.testPaths, "cyclomatic");
7514
+ if (v) violations.push(v);
7515
+ }
7516
+ if (metadata.cognitiveComplexity) {
7517
+ const v = this.createViolation(metadata, metadata.cognitiveComplexity, thresholds.mentalLoad, "cognitive");
7518
+ if (v) violations.push(v);
7519
+ }
7520
+ if (thresholds.halsteadEffort && metadata.halsteadEffort) {
7521
+ const v = this.createHalsteadViolation(metadata, metadata.halsteadEffort, thresholds.halsteadEffort, "halstead_effort");
7522
+ if (v) violations.push(v);
7523
+ }
7524
+ if (thresholds.estimatedBugs && metadata.halsteadBugs) {
7525
+ const v = this.createHalsteadViolation(metadata, metadata.halsteadBugs, thresholds.estimatedBugs, "halstead_bugs");
7526
+ if (v) violations.push(v);
7527
+ }
7528
+ return violations;
7529
+ }
7530
+ /**
7531
+ * Convert time in minutes to Halstead effort.
7532
+ * This is the inverse of effortToMinutes().
7533
+ * Formula: Time (seconds) = Effort / 18 (Stroud number)
7534
+ * So: Effort = Time (minutes) * 60 * 18 = Time * 1080
7535
+ */
7536
+ minutesToEffort(minutes) {
7537
+ return minutes * 1080;
7538
+ }
7539
+ /**
7540
+ * Find all complexity violations based on thresholds.
7541
+ * Checks cyclomatic, cognitive, and Halstead complexity.
7542
+ */
7543
+ findViolations(chunks) {
7544
+ const configThresholds = this.config.complexity?.thresholds;
7545
+ const halsteadEffort = configThresholds?.timeToUnderstandMinutes ? this.minutesToEffort(configThresholds.timeToUnderstandMinutes) : this.minutesToEffort(60);
7546
+ const thresholds = {
7547
+ testPaths: configThresholds?.testPaths ?? 15,
7548
+ mentalLoad: configThresholds?.mentalLoad ?? 15,
7549
+ halsteadEffort,
7550
+ // Converted from minutes to effort internally (see above)
7551
+ estimatedBugs: configThresholds?.estimatedBugs ?? 1.5
7552
+ // Direct decimal value (no conversion needed)
7553
+ };
7554
+ const functionChunks = this.getUniqueFunctionChunks(chunks);
7555
+ return functionChunks.flatMap(
7556
+ (metadata) => this.checkChunkComplexity(metadata, thresholds)
7557
+ );
7558
+ }
7559
+ /**
7560
+ * Build the final report with summary and per-file data
7561
+ */
7562
+ buildReport(violations, allChunks) {
7563
+ const fileViolationsMap = /* @__PURE__ */ new Map();
7564
+ for (const violation of violations) {
7565
+ const normalizedPath = this.normalizeFilePath(violation.filepath);
7566
+ violation.filepath = normalizedPath;
7567
+ const existing = fileViolationsMap.get(normalizedPath) || [];
7568
+ existing.push(violation);
7569
+ fileViolationsMap.set(normalizedPath, existing);
7570
+ }
7571
+ const analyzedFiles = new Set(allChunks.map((c) => this.normalizeFilePath(c.metadata.file)));
7572
+ const files = {};
7573
+ for (const filepath of analyzedFiles) {
7574
+ const fileViolations = fileViolationsMap.get(filepath) || [];
7575
+ files[filepath] = {
7576
+ violations: fileViolations,
7577
+ dependents: [],
7578
+ // Will be enriched later if needed
7579
+ testAssociations: [],
7580
+ // Will be enriched later if needed
7581
+ riskLevel: this.calculateRiskLevel(fileViolations)
7582
+ };
7583
+ }
7584
+ const errorCount = violations.filter((v) => v.severity === "error").length;
7585
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
7586
+ const complexityValues = allChunks.filter((c) => c.metadata.complexity !== void 0 && c.metadata.complexity > 0).map((c) => c.metadata.complexity);
7587
+ const avgComplexity = complexityValues.length > 0 ? complexityValues.reduce((sum, val) => sum + val, 0) / complexityValues.length : 0;
7588
+ const maxComplexity = complexityValues.length > 0 ? Math.max(...complexityValues) : 0;
7589
+ return {
7590
+ summary: {
7591
+ filesAnalyzed: analyzedFiles.size,
7592
+ totalViolations: violations.length,
7593
+ bySeverity: { error: errorCount, warning: warningCount },
7594
+ avgComplexity: Math.round(avgComplexity * 10) / 10,
7595
+ // Round to 1 decimal
7596
+ maxComplexity
7597
+ },
7598
+ files
7599
+ };
7600
+ }
7601
+ /**
7602
+ * Calculate risk level based on violations
7603
+ */
7604
+ calculateRiskLevel(violations) {
7605
+ if (violations.length === 0) return "low";
7606
+ const hasErrors = violations.some((v) => v.severity === "error");
7607
+ const errorCount = violations.filter((v) => v.severity === "error").length;
7608
+ if (errorCount >= 3) return "critical";
7609
+ if (hasErrors) return "high";
7610
+ if (violations.length >= 3) return "medium";
7611
+ return "low";
7612
+ }
7613
+ /**
7614
+ * Enrich files with violations with dependency data
7615
+ * This adds:
7616
+ * - List of dependent files (who imports this?)
7617
+ * - Boosted risk level based on dependents + complexity
7618
+ */
7619
+ enrichWithDependencies(report, allChunks) {
7620
+ const workspaceRoot = process.cwd();
7621
+ const filesWithViolations = Object.entries(report.files).filter(([_, data]) => data.violations.length > 0).map(([filepath, _]) => filepath);
7622
+ for (const filepath of filesWithViolations) {
7623
+ const fileData = report.files[filepath];
7624
+ const depAnalysis = analyzeDependencies(filepath, allChunks, workspaceRoot);
7625
+ fileData.dependents = depAnalysis.dependents.map((d) => d.filepath);
7626
+ fileData.dependentCount = depAnalysis.dependentCount;
7627
+ if (RISK_ORDER2[depAnalysis.riskLevel] > RISK_ORDER2[fileData.riskLevel]) {
7628
+ fileData.riskLevel = depAnalysis.riskLevel;
7629
+ }
7630
+ if (depAnalysis.complexityMetrics) {
7631
+ fileData.dependentComplexityMetrics = {
7632
+ averageComplexity: depAnalysis.complexityMetrics.averageComplexity,
7633
+ maxComplexity: depAnalysis.complexityMetrics.maxComplexity,
7634
+ filesWithComplexityData: depAnalysis.complexityMetrics.filesWithComplexityData
7635
+ };
7636
+ }
7637
+ }
7638
+ }
7639
+ };
7640
+
7641
+ // src/mcp/handlers/get-complexity.ts
7642
+ function transformViolation(v, fileData) {
7643
+ return {
7644
+ filepath: v.filepath,
7645
+ symbolName: v.symbolName,
7646
+ symbolType: v.symbolType,
7647
+ startLine: v.startLine,
7648
+ endLine: v.endLine,
7649
+ complexity: v.complexity,
7650
+ metricType: v.metricType,
7651
+ threshold: v.threshold,
7652
+ severity: v.severity,
7653
+ language: v.language,
7654
+ message: v.message,
7655
+ dependentCount: fileData.dependentCount || 0,
7656
+ riskLevel: fileData.riskLevel,
7657
+ ...v.halsteadDetails && { halsteadDetails: v.halsteadDetails }
7658
+ };
7659
+ }
7660
+ async function handleGetComplexity(args, ctx) {
7661
+ const { vectorDB, config, log, checkAndReconnect, getIndexMetadata } = ctx;
7662
+ return await wrapToolHandler(
7663
+ GetComplexitySchema,
7664
+ async (validatedArgs) => {
7665
+ log("Analyzing complexity...");
7666
+ await checkAndReconnect();
7667
+ const analyzer = new ComplexityAnalyzer(vectorDB, config);
7668
+ const report = await analyzer.analyze(validatedArgs.files);
7669
+ log(`Analyzed ${report.summary.filesAnalyzed} files`);
7670
+ const allViolations = collect(Object.entries(report.files)).flatMap(
7671
+ ([, fileData]) => fileData.violations.map((v) => transformViolation(v, fileData))
7672
+ ).sortByDesc("complexity").all();
7673
+ const violations = validatedArgs.threshold !== void 0 ? allViolations.filter((v) => v.complexity >= validatedArgs.threshold) : allViolations;
7674
+ const topViolations = violations.slice(0, validatedArgs.top);
7675
+ const bySeverity = collect(violations).countBy("severity").all();
7676
+ return {
7677
+ indexInfo: getIndexMetadata(),
7678
+ summary: {
7679
+ filesAnalyzed: report.summary.filesAnalyzed,
7680
+ avgComplexity: report.summary.avgComplexity,
7681
+ maxComplexity: report.summary.maxComplexity,
7682
+ violationCount: violations.length,
7683
+ bySeverity: {
7684
+ error: bySeverity["error"] || 0,
7685
+ warning: bySeverity["warning"] || 0
7686
+ }
7687
+ },
7688
+ violations: topViolations
7689
+ };
7690
+ }
7691
+ )(args);
7692
+ }
7693
+
7694
+ // src/mcp/handlers/index.ts
7695
+ var toolHandlers = {
7696
+ "semantic_search": handleSemanticSearch,
7697
+ "find_similar": handleFindSimilar,
7698
+ "get_files_context": handleGetFilesContext,
7699
+ "list_functions": handleListFunctions,
7700
+ "get_dependents": handleGetDependents,
7701
+ "get_complexity": handleGetComplexity
7702
+ };
7703
+
7704
+ // src/mcp/server.ts
7705
+ init_lancedb();
7706
+ init_local();
7707
+ init_tracker();
7708
+ init_incremental();
7709
+ init_service();
7710
+ init_manifest();
7711
+ init_utils();
7712
+
7713
+ // src/watcher/index.ts
7714
+ init_schema();
7715
+ import chokidar from "chokidar";
7716
+ var FileWatcher = class {
7717
+ watcher = null;
7718
+ debounceTimers = /* @__PURE__ */ new Map();
7719
+ config;
7720
+ rootDir;
7721
+ onChangeHandler = null;
7722
+ constructor(rootDir, config) {
7723
+ this.rootDir = rootDir;
7724
+ this.config = config;
7725
+ }
7726
+ /**
7727
+ * Starts watching files for changes.
7728
+ *
7729
+ * @param handler - Callback function called when files change
7730
+ */
7731
+ async start(handler) {
7732
+ if (this.watcher) {
7733
+ throw new Error("File watcher is already running");
7734
+ }
7735
+ this.onChangeHandler = handler;
7736
+ let includePatterns;
7737
+ let excludePatterns;
7738
+ if (isLegacyConfig(this.config)) {
7739
+ includePatterns = this.config.indexing.include;
7740
+ excludePatterns = this.config.indexing.exclude;
7741
+ } else if (isModernConfig(this.config)) {
7742
+ includePatterns = this.config.frameworks.flatMap((f) => f.config.include);
7743
+ excludePatterns = this.config.frameworks.flatMap((f) => f.config.exclude);
7744
+ } else {
7745
+ includePatterns = ["**/*"];
7746
+ excludePatterns = [];
7747
+ }
7748
+ this.watcher = chokidar.watch(includePatterns, {
7749
+ cwd: this.rootDir,
7750
+ ignored: excludePatterns,
7751
+ persistent: true,
7752
+ ignoreInitial: true,
7753
+ // Don't trigger for existing files
7754
+ awaitWriteFinish: {
7755
+ stabilityThreshold: 500,
7756
+ // Wait 500ms for file to stop changing
7757
+ pollInterval: 100
7758
+ },
7759
+ // Performance optimizations
7760
+ usePolling: false,
7761
+ interval: 100,
7762
+ binaryInterval: 300
7763
+ });
7764
+ this.watcher.on("add", (filepath) => this.handleChange("add", filepath)).on("change", (filepath) => this.handleChange("change", filepath)).on("unlink", (filepath) => this.handleChange("unlink", filepath)).on("error", (error) => {
7765
+ console.error(`[Lien] File watcher error: ${error}`);
7766
+ });
7767
+ await new Promise((resolve) => {
7768
+ this.watcher.on("ready", () => {
7769
+ resolve();
7770
+ });
7771
+ });
7772
+ }
7773
+ /**
7774
+ * Handles a file change event with debouncing.
7775
+ * Debouncing prevents rapid reindexing when files are saved multiple times quickly.
7776
+ */
7777
+ handleChange(type, filepath) {
7778
+ const existingTimer = this.debounceTimers.get(filepath);
7779
+ if (existingTimer) {
7780
+ clearTimeout(existingTimer);
7781
+ }
7782
+ const timer = setTimeout(() => {
7783
+ this.debounceTimers.delete(filepath);
7784
+ if (this.onChangeHandler) {
7785
+ const absolutePath = filepath.startsWith("/") ? filepath : `${this.rootDir}/${filepath}`;
7786
+ try {
7787
+ const result = this.onChangeHandler({
7788
+ type,
7789
+ filepath: absolutePath
7790
+ });
7791
+ if (result instanceof Promise) {
7792
+ result.catch((error) => {
7793
+ console.error(`[Lien] Error handling file change: ${error}`);
7794
+ });
7795
+ }
7796
+ } catch (error) {
7797
+ console.error(`[Lien] Error handling file change: ${error}`);
7798
+ }
7799
+ }
7800
+ }, this.config.fileWatching.debounceMs);
7801
+ this.debounceTimers.set(filepath, timer);
7802
+ }
7803
+ /**
7804
+ * Stops the file watcher and cleans up resources.
7805
+ */
7806
+ async stop() {
7807
+ if (!this.watcher) {
7808
+ return;
7809
+ }
7810
+ for (const timer of this.debounceTimers.values()) {
7811
+ clearTimeout(timer);
7812
+ }
7813
+ this.debounceTimers.clear();
7814
+ await this.watcher.close();
7815
+ this.watcher = null;
7816
+ this.onChangeHandler = null;
7817
+ }
7818
+ /**
7819
+ * Gets the list of files currently being watched.
7820
+ */
7821
+ getWatchedFiles() {
7822
+ if (!this.watcher) {
7823
+ return [];
7824
+ }
7825
+ const watched = this.watcher.getWatched();
7826
+ const files = [];
7827
+ for (const [dir, filenames] of Object.entries(watched)) {
7828
+ for (const filename of filenames) {
7829
+ files.push(`${dir}/${filename}`);
7830
+ }
7831
+ }
7832
+ return files;
7833
+ }
7834
+ /**
7835
+ * Checks if the watcher is currently running.
7836
+ */
7837
+ isRunning() {
7838
+ return this.watcher !== null;
7839
+ }
7840
+ };
7841
+
7842
+ // src/mcp/server.ts
7843
+ init_constants();
7844
+ init_errors();
7845
+ var __filename4 = fileURLToPath4(import.meta.url);
7846
+ var __dirname4 = dirname3(__filename4);
7847
+ var require4 = createRequire3(import.meta.url);
7848
+ var packageJson3;
7849
+ try {
7850
+ packageJson3 = require4(join3(__dirname4, "../package.json"));
7851
+ } catch {
7852
+ packageJson3 = require4(join3(__dirname4, "../../package.json"));
7853
+ }
7854
+ async function initializeDatabase(rootDir, log) {
7855
+ const embeddings = new LocalEmbeddings();
7856
+ const vectorDB = new VectorDB(rootDir);
7857
+ log("Loading embedding model...");
7858
+ await embeddings.initialize();
7859
+ log("Loading vector database...");
7860
+ await vectorDB.initialize();
7861
+ log("Embeddings and vector DB ready");
7862
+ return { embeddings, vectorDB };
7863
+ }
7864
+ async function handleAutoIndexing(vectorDB, config, rootDir, log) {
7865
+ const hasIndex = await vectorDB.hasData();
7866
+ if (!hasIndex && config.mcp.autoIndexOnFirstRun) {
7867
+ log("\u{1F4E6} No index found - running initial indexing...");
7868
+ log("\u23F1\uFE0F This may take 5-20 minutes depending on project size");
6232
7869
  try {
6233
- const versionChanged = await vectorDB.checkVersion();
6234
- if (versionChanged) {
6235
- log("Index version changed, reconnecting to database...");
6236
- await vectorDB.reconnect();
6237
- log("Reconnected to updated index");
7870
+ const { indexCodebase: indexCodebase2 } = await Promise.resolve().then(() => (init_indexer(), indexer_exports));
7871
+ await indexCodebase2({ rootDir, verbose: true });
7872
+ log("\u2705 Initial indexing complete!");
7873
+ } catch (error) {
7874
+ log(`\u26A0\uFE0F Initial indexing failed: ${error}`, "warning");
7875
+ log("You can manually run: lien index", "warning");
7876
+ }
7877
+ } else if (!hasIndex) {
7878
+ log("\u26A0\uFE0F No index found. Auto-indexing is disabled in config.", "warning");
7879
+ log('Run "lien index" to index your codebase.', "warning");
7880
+ }
7881
+ }
7882
+ async function setupGitDetection(config, rootDir, vectorDB, embeddings, verbose, log) {
7883
+ if (!config.gitDetection.enabled) {
7884
+ log("Git detection disabled by configuration");
7885
+ return { gitTracker: null, gitPollInterval: null };
7886
+ }
7887
+ const gitAvailable = await isGitAvailable();
7888
+ const isRepo = await isGitRepo(rootDir);
7889
+ if (!gitAvailable) {
7890
+ log("Git not available - git detection disabled");
7891
+ return { gitTracker: null, gitPollInterval: null };
7892
+ }
7893
+ if (!isRepo) {
7894
+ log("Not a git repository - git detection disabled");
7895
+ return { gitTracker: null, gitPollInterval: null };
7896
+ }
7897
+ log("\u2713 Detected git repository");
7898
+ const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
7899
+ try {
7900
+ log("Checking for git changes...");
7901
+ const changedFiles = await gitTracker.initialize();
7902
+ if (changedFiles && changedFiles.length > 0) {
7903
+ log(`\u{1F33F} Git changes detected: ${changedFiles.length} files changed`);
7904
+ const count = await indexMultipleFiles(changedFiles, vectorDB, embeddings, config, { verbose });
7905
+ log(`\u2713 Reindexed ${count} files`);
7906
+ } else {
7907
+ log("\u2713 Index is up to date with git state");
7908
+ }
7909
+ } catch (error) {
7910
+ log(`Failed to check git state on startup: ${error}`, "warning");
7911
+ }
7912
+ log(`\u2713 Git detection enabled (checking every ${config.gitDetection.pollIntervalMs / 1e3}s)`);
7913
+ const gitPollInterval = setInterval(async () => {
7914
+ try {
7915
+ const changedFiles = await gitTracker.detectChanges();
7916
+ if (changedFiles && changedFiles.length > 0) {
7917
+ log(`\u{1F33F} Git change detected: ${changedFiles.length} files changed`);
7918
+ indexMultipleFiles(changedFiles, vectorDB, embeddings, config, { verbose }).then((count) => log(`\u2713 Background reindex complete: ${count} files`)).catch((error) => log(`Background reindex failed: ${error}`, "warning"));
6238
7919
  }
6239
7920
  } catch (error) {
6240
- log(`Version check failed: ${error}`);
7921
+ log(`Git detection check failed: ${error}`, "warning");
6241
7922
  }
6242
- };
6243
- const getIndexMetadata = () => ({
6244
- indexVersion: vectorDB.getCurrentVersion(),
6245
- indexDate: vectorDB.getVersionDate()
6246
- });
6247
- const versionCheckInterval = setInterval(async () => {
6248
- await checkAndReconnect();
6249
- }, VERSION_CHECK_INTERVAL_MS);
7923
+ }, config.gitDetection.pollIntervalMs);
7924
+ return { gitTracker, gitPollInterval };
7925
+ }
7926
+ async function setupFileWatching(watch, config, rootDir, vectorDB, embeddings, verbose, log) {
7927
+ const fileWatchingEnabled = watch !== void 0 ? watch : config.fileWatching.enabled;
7928
+ if (!fileWatchingEnabled) return null;
7929
+ log("\u{1F440} Starting file watcher...");
7930
+ const fileWatcher = new FileWatcher(rootDir, config);
7931
+ try {
7932
+ await fileWatcher.start(async (event) => {
7933
+ const { type, filepath } = event;
7934
+ if (type === "unlink") {
7935
+ log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
7936
+ try {
7937
+ await vectorDB.deleteByFile(filepath);
7938
+ const manifest = new ManifestManager(vectorDB.dbPath);
7939
+ await manifest.removeFile(filepath);
7940
+ log(`\u2713 Removed ${filepath} from index`);
7941
+ } catch (error) {
7942
+ log(`Failed to remove ${filepath}: ${error}`, "warning");
7943
+ }
7944
+ } else {
7945
+ const action = type === "add" ? "added" : "changed";
7946
+ log(`\u{1F4DD} File ${action}: ${filepath}`);
7947
+ indexSingleFile(filepath, vectorDB, embeddings, config, { verbose }).catch((error) => log(`Failed to reindex ${filepath}: ${error}`, "warning"));
7948
+ }
7949
+ });
7950
+ log(`\u2713 File watching enabled (watching ${fileWatcher.getWatchedFiles().length} files)`);
7951
+ return fileWatcher;
7952
+ } catch (error) {
7953
+ log(`Failed to start file watcher: ${error}`, "warning");
7954
+ return null;
7955
+ }
7956
+ }
7957
+ function registerToolCallHandler(server, toolContext, log) {
6250
7958
  server.setRequestHandler(CallToolRequestSchema, async (request) => {
6251
7959
  const { name, arguments: args } = request.params;
6252
7960
  log(`Handling tool call: ${name}`);
7961
+ const handler = toolHandlers[name];
7962
+ if (!handler) {
7963
+ const error = new LienError(
7964
+ `Unknown tool: ${name}`,
7965
+ "INVALID_INPUT" /* INVALID_INPUT */,
7966
+ { requestedTool: name, availableTools: tools.map((t) => t.name) },
7967
+ "medium",
7968
+ false,
7969
+ false
7970
+ );
7971
+ return { isError: true, content: [{ type: "text", text: JSON.stringify(error.toJSON(), null, 2) }] };
7972
+ }
6253
7973
  try {
6254
- switch (name) {
6255
- case "semantic_search":
6256
- return await wrapToolHandler(
6257
- SemanticSearchSchema,
6258
- async (validatedArgs) => {
6259
- log(`Searching for: "${validatedArgs.query}"`);
6260
- await checkAndReconnect();
6261
- const queryEmbedding = await embeddings.embed(validatedArgs.query);
6262
- const results = await vectorDB.search(queryEmbedding, validatedArgs.limit, validatedArgs.query);
6263
- log(`Found ${results.length} results`);
6264
- return {
6265
- indexInfo: getIndexMetadata(),
6266
- results
6267
- };
6268
- }
6269
- )(args);
6270
- case "find_similar":
6271
- return await wrapToolHandler(
6272
- FindSimilarSchema,
6273
- async (validatedArgs) => {
6274
- log(`Finding similar code...`);
6275
- await checkAndReconnect();
6276
- const codeEmbedding = await embeddings.embed(validatedArgs.code);
6277
- const results = await vectorDB.search(codeEmbedding, validatedArgs.limit, validatedArgs.code);
6278
- log(`Found ${results.length} similar chunks`);
6279
- return {
6280
- indexInfo: getIndexMetadata(),
6281
- results
6282
- };
6283
- }
6284
- )(args);
6285
- case "get_files_context":
6286
- return await wrapToolHandler(
6287
- GetFilesContextSchema,
6288
- async (validatedArgs) => {
6289
- const filepaths = Array.isArray(validatedArgs.filepaths) ? validatedArgs.filepaths : [validatedArgs.filepaths];
6290
- const isSingleFile = !Array.isArray(validatedArgs.filepaths);
6291
- log(`Getting context for: ${filepaths.join(", ")}`);
6292
- await checkAndReconnect();
6293
- const workspaceRoot = process.cwd().replace(/\\/g, "/");
6294
- const fileEmbeddings = await Promise.all(filepaths.map((fp) => embeddings.embed(fp)));
6295
- const allFileSearches = await Promise.all(
6296
- fileEmbeddings.map(
6297
- (embedding, i) => vectorDB.search(embedding, 50, filepaths[i])
6298
- )
6299
- );
6300
- const fileChunksMap = filepaths.map((filepath, i) => {
6301
- const allResults = allFileSearches[i];
6302
- const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6303
- return allResults.filter((r) => {
6304
- const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6305
- return chunkCanonical === targetCanonical;
6306
- });
6307
- });
6308
- let relatedChunksMap = [];
6309
- if (validatedArgs.includeRelated) {
6310
- const filesWithChunks = fileChunksMap.map((chunks, i) => ({ chunks, filepath: filepaths[i], index: i })).filter(({ chunks }) => chunks.length > 0);
6311
- if (filesWithChunks.length > 0) {
6312
- const relatedEmbeddings = await Promise.all(
6313
- filesWithChunks.map(({ chunks }) => embeddings.embed(chunks[0].content))
6314
- );
6315
- const relatedSearches = await Promise.all(
6316
- relatedEmbeddings.map(
6317
- (embedding, i) => vectorDB.search(embedding, 5, filesWithChunks[i].chunks[0].content)
6318
- )
6319
- );
6320
- relatedChunksMap = Array.from({ length: filepaths.length }, () => []);
6321
- filesWithChunks.forEach(({ filepath, index }, i) => {
6322
- const related = relatedSearches[i];
6323
- const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6324
- relatedChunksMap[index] = related.filter((r) => {
6325
- const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6326
- return chunkCanonical !== targetCanonical;
6327
- });
6328
- });
6329
- }
6330
- }
6331
- const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
6332
- if (allChunks.length === SCAN_LIMIT) {
6333
- log(`WARNING: Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`);
6334
- }
6335
- const pathCache = /* @__PURE__ */ new Map();
6336
- const normalizePathCached = (path19) => {
6337
- if (pathCache.has(path19)) return pathCache.get(path19);
6338
- const normalized = normalizePath(path19, workspaceRoot);
6339
- pathCache.set(path19, normalized);
6340
- return normalized;
6341
- };
6342
- const testAssociationsMap = filepaths.map((filepath) => {
6343
- const normalizedTarget = normalizePathCached(filepath);
6344
- const testFiles = /* @__PURE__ */ new Set();
6345
- for (const chunk of allChunks) {
6346
- const chunkFile2 = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6347
- if (!isTestFile2(chunkFile2)) continue;
6348
- const imports = chunk.metadata.imports || [];
6349
- for (const imp of imports) {
6350
- const normalizedImport = normalizePathCached(imp);
6351
- if (matchesFile(normalizedImport, normalizedTarget)) {
6352
- testFiles.add(chunkFile2);
6353
- break;
6354
- }
6355
- }
6356
- }
6357
- return Array.from(testFiles);
6358
- });
6359
- const filesData = {};
6360
- filepaths.forEach((filepath, i) => {
6361
- const fileChunks = fileChunksMap[i];
6362
- const relatedChunks = relatedChunksMap[i] || [];
6363
- const seenChunks = /* @__PURE__ */ new Set();
6364
- const dedupedChunks = [...fileChunks, ...relatedChunks].filter((chunk) => {
6365
- const canonicalFile = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6366
- const chunkId = `${canonicalFile}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6367
- if (seenChunks.has(chunkId)) return false;
6368
- seenChunks.add(chunkId);
6369
- return true;
6370
- });
6371
- filesData[filepath] = {
6372
- chunks: dedupedChunks,
6373
- testAssociations: testAssociationsMap[i]
6374
- };
6375
- });
6376
- log(`Found ${Object.values(filesData).reduce((sum, f) => sum + f.chunks.length, 0)} total chunks`);
6377
- if (isSingleFile) {
6378
- const filepath = filepaths[0];
6379
- return {
6380
- indexInfo: getIndexMetadata(),
6381
- file: filepath,
6382
- chunks: filesData[filepath].chunks,
6383
- testAssociations: filesData[filepath].testAssociations
6384
- };
6385
- } else {
6386
- return {
6387
- indexInfo: getIndexMetadata(),
6388
- files: filesData
6389
- };
6390
- }
6391
- }
6392
- )(args);
6393
- case "list_functions":
6394
- return await wrapToolHandler(
6395
- ListFunctionsSchema,
6396
- async (validatedArgs) => {
6397
- log("Listing functions with symbol metadata...");
6398
- await checkAndReconnect();
6399
- let results;
6400
- let usedMethod = "symbols";
6401
- try {
6402
- results = await vectorDB.querySymbols({
6403
- language: validatedArgs.language,
6404
- pattern: validatedArgs.pattern,
6405
- limit: 50
6406
- });
6407
- if (results.length === 0 && (validatedArgs.language || validatedArgs.pattern)) {
6408
- log("No symbol results, falling back to content scan...");
6409
- results = await vectorDB.scanWithFilter({
6410
- language: validatedArgs.language,
6411
- pattern: validatedArgs.pattern,
6412
- limit: 50
6413
- });
6414
- usedMethod = "content";
6415
- }
6416
- } catch (error) {
6417
- log(`Symbol query failed, falling back to content scan: ${error}`);
6418
- results = await vectorDB.scanWithFilter({
6419
- language: validatedArgs.language,
6420
- pattern: validatedArgs.pattern,
6421
- limit: 50
6422
- });
6423
- usedMethod = "content";
6424
- }
6425
- log(`Found ${results.length} matches using ${usedMethod} method`);
6426
- return {
6427
- indexInfo: getIndexMetadata(),
6428
- method: usedMethod,
6429
- results,
6430
- note: usedMethod === "content" ? 'Using content search. Run "lien reindex" to enable faster symbol-based queries.' : void 0
6431
- };
6432
- }
6433
- )(args);
6434
- case "get_dependents":
6435
- return await wrapToolHandler(
6436
- GetDependentsSchema,
6437
- async (validatedArgs) => {
6438
- log(`Finding dependents of: ${validatedArgs.filepath}`);
6439
- await checkAndReconnect();
6440
- const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
6441
- if (allChunks.length === SCAN_LIMIT) {
6442
- log(`WARNING: Scanned ${SCAN_LIMIT} chunks (limit reached). Results may be incomplete for large codebases.`);
6443
- }
6444
- log(`Scanning ${allChunks.length} chunks for imports...`);
6445
- const workspaceRoot = process.cwd().replace(/\\/g, "/");
6446
- const pathCache = /* @__PURE__ */ new Map();
6447
- const normalizePathCached = (path19) => {
6448
- if (pathCache.has(path19)) return pathCache.get(path19);
6449
- const normalized = normalizePath(path19, workspaceRoot);
6450
- pathCache.set(path19, normalized);
6451
- return normalized;
6452
- };
6453
- const importIndex = /* @__PURE__ */ new Map();
6454
- for (const chunk of allChunks) {
6455
- const imports = chunk.metadata.imports || [];
6456
- for (const imp of imports) {
6457
- const normalizedImport = normalizePathCached(imp);
6458
- if (!importIndex.has(normalizedImport)) {
6459
- importIndex.set(normalizedImport, []);
6460
- }
6461
- importIndex.get(normalizedImport).push(chunk);
6462
- }
6463
- }
6464
- const normalizedTarget = normalizePathCached(validatedArgs.filepath);
6465
- const dependentChunks = [];
6466
- const seenChunkIds = /* @__PURE__ */ new Set();
6467
- if (importIndex.has(normalizedTarget)) {
6468
- for (const chunk of importIndex.get(normalizedTarget)) {
6469
- const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6470
- if (!seenChunkIds.has(chunkId)) {
6471
- dependentChunks.push(chunk);
6472
- seenChunkIds.add(chunkId);
6473
- }
6474
- }
6475
- }
6476
- for (const [normalizedImport, chunks] of importIndex.entries()) {
6477
- if (normalizedImport !== normalizedTarget && matchesFile(normalizedImport, normalizedTarget)) {
6478
- for (const chunk of chunks) {
6479
- const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6480
- if (!seenChunkIds.has(chunkId)) {
6481
- dependentChunks.push(chunk);
6482
- seenChunkIds.add(chunkId);
6483
- }
6484
- }
6485
- }
6486
- }
6487
- const chunksByFile = /* @__PURE__ */ new Map();
6488
- for (const chunk of dependentChunks) {
6489
- const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6490
- const existing = chunksByFile.get(canonical) || [];
6491
- existing.push(chunk);
6492
- chunksByFile.set(canonical, existing);
6493
- }
6494
- const fileComplexities = [];
6495
- for (const [filepath, chunks] of chunksByFile.entries()) {
6496
- const complexities = chunks.map((c) => c.metadata.complexity).filter((c) => typeof c === "number" && c > 0);
6497
- if (complexities.length > 0) {
6498
- const sum = complexities.reduce((a, b) => a + b, 0);
6499
- const avg = sum / complexities.length;
6500
- const max = Math.max(...complexities);
6501
- fileComplexities.push({
6502
- filepath,
6503
- avgComplexity: Math.round(avg * 10) / 10,
6504
- // Round to 1 decimal
6505
- maxComplexity: max,
6506
- complexityScore: sum,
6507
- chunksWithComplexity: complexities.length
6508
- });
6509
- }
6510
- }
6511
- let complexityMetrics;
6512
- if (fileComplexities.length > 0) {
6513
- const allAvgs = fileComplexities.map((f) => f.avgComplexity);
6514
- const allMaxes = fileComplexities.map((f) => f.maxComplexity);
6515
- const totalAvg = allAvgs.reduce((a, b) => a + b, 0) / allAvgs.length;
6516
- const globalMax = Math.max(...allMaxes);
6517
- const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({
6518
- filepath: f.filepath,
6519
- maxComplexity: f.maxComplexity,
6520
- avgComplexity: f.avgComplexity
6521
- }));
6522
- let complexityRiskBoost = "low";
6523
- if (totalAvg > COMPLEXITY_THRESHOLDS.CRITICAL_AVG || globalMax > COMPLEXITY_THRESHOLDS.CRITICAL_MAX) {
6524
- complexityRiskBoost = "critical";
6525
- } else if (totalAvg > COMPLEXITY_THRESHOLDS.HIGH_AVG || globalMax > COMPLEXITY_THRESHOLDS.HIGH_MAX) {
6526
- complexityRiskBoost = "high";
6527
- } else if (totalAvg > COMPLEXITY_THRESHOLDS.MEDIUM_AVG || globalMax > COMPLEXITY_THRESHOLDS.MEDIUM_MAX) {
6528
- complexityRiskBoost = "medium";
6529
- }
6530
- complexityMetrics = {
6531
- averageComplexity: Math.round(totalAvg * 10) / 10,
6532
- maxComplexity: globalMax,
6533
- filesWithComplexityData: fileComplexities.length,
6534
- highComplexityDependents,
6535
- complexityRiskBoost
6536
- };
6537
- } else {
6538
- complexityMetrics = {
6539
- averageComplexity: 0,
6540
- maxComplexity: 0,
6541
- filesWithComplexityData: 0,
6542
- highComplexityDependents: [],
6543
- complexityRiskBoost: "low"
6544
- };
6545
- }
6546
- const uniqueFiles = Array.from(chunksByFile.keys()).map((filepath) => ({
6547
- filepath,
6548
- isTestFile: isTestFile2(filepath)
6549
- }));
6550
- const count = uniqueFiles.length;
6551
- let riskLevel = count === 0 ? "low" : count <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : count <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : count <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
6552
- const RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
6553
- if (RISK_ORDER[complexityMetrics.complexityRiskBoost] > RISK_ORDER[riskLevel]) {
6554
- riskLevel = complexityMetrics.complexityRiskBoost;
6555
- }
6556
- log(`Found ${count} dependent files (risk: ${riskLevel}${complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`);
6557
- let note;
6558
- if (allChunks.length === SCAN_LIMIT) {
6559
- note = `Warning: Scanned ${SCAN_LIMIT} chunks (limit reached). Results may be incomplete for large codebases. Some dependents might not be listed.`;
6560
- }
6561
- return {
6562
- indexInfo: getIndexMetadata(),
6563
- filepath: validatedArgs.filepath,
6564
- dependentCount: count,
6565
- riskLevel,
6566
- dependents: uniqueFiles,
6567
- complexityMetrics,
6568
- note
6569
- };
6570
- }
6571
- )(args);
6572
- default:
6573
- throw new LienError(
6574
- `Unknown tool: ${name}`,
6575
- "INVALID_INPUT" /* INVALID_INPUT */,
6576
- { requestedTool: name, availableTools: tools.map((t) => t.name) },
6577
- "medium",
6578
- false,
6579
- false
6580
- );
6581
- }
7974
+ return await handler(args, toolContext);
6582
7975
  } catch (error) {
6583
7976
  if (error instanceof LienError) {
6584
- return {
6585
- isError: true,
6586
- content: [{
6587
- type: "text",
6588
- text: JSON.stringify(error.toJSON(), null, 2)
6589
- }]
6590
- };
7977
+ return { isError: true, content: [{ type: "text", text: JSON.stringify(error.toJSON(), null, 2) }] };
6591
7978
  }
6592
7979
  console.error(`Unexpected error handling tool call ${name}:`, error);
6593
7980
  return {
6594
7981
  isError: true,
6595
7982
  content: [{
6596
7983
  type: "text",
6597
- text: JSON.stringify({
6598
- error: error instanceof Error ? error.message : "Unknown error",
6599
- code: "INTERNAL_ERROR" /* INTERNAL_ERROR */,
6600
- tool: name
6601
- }, null, 2)
7984
+ text: JSON.stringify({ error: error instanceof Error ? error.message : "Unknown error", code: "INTERNAL_ERROR" /* INTERNAL_ERROR */, tool: name }, null, 2)
6602
7985
  }]
6603
7986
  };
6604
7987
  }
6605
7988
  });
6606
- const config = await configService.load(rootDir);
6607
- const hasIndex = await vectorDB.hasData();
6608
- if (!hasIndex && config.mcp.autoIndexOnFirstRun) {
6609
- log("\u{1F4E6} No index found - running initial indexing...");
6610
- log("\u23F1\uFE0F This may take 5-20 minutes depending on project size");
6611
- try {
6612
- const { indexCodebase: indexCodebase2 } = await Promise.resolve().then(() => (init_indexer(), indexer_exports));
6613
- await indexCodebase2({ rootDir, verbose: true });
6614
- log("\u2705 Initial indexing complete!");
6615
- } catch (error) {
6616
- log(`\u26A0\uFE0F Initial indexing failed: ${error}`);
6617
- log("You can manually run: lien index");
7989
+ }
7990
+ async function startMCPServer(options) {
7991
+ const { rootDir, verbose, watch } = options;
7992
+ const earlyLog = (message, level = "info") => {
7993
+ if (verbose || level === "warning" || level === "error") {
7994
+ console.error(`[Lien MCP] [${level}] ${message}`);
6618
7995
  }
6619
- } else if (!hasIndex) {
6620
- log("\u26A0\uFE0F No index found. Auto-indexing is disabled in config.");
6621
- log('Run "lien index" to index your codebase.');
6622
- }
6623
- let gitTracker = null;
6624
- let gitPollInterval = null;
6625
- let fileWatcher = null;
6626
- if (config.gitDetection.enabled) {
6627
- const gitAvailable = await isGitAvailable();
6628
- const isRepo = await isGitRepo(rootDir);
6629
- if (gitAvailable && isRepo) {
6630
- log("\u2713 Detected git repository");
6631
- gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
6632
- try {
6633
- log("Checking for git changes...");
6634
- const changedFiles = await gitTracker.initialize();
6635
- if (changedFiles && changedFiles.length > 0) {
6636
- log(`\u{1F33F} Git changes detected: ${changedFiles.length} files changed`);
6637
- log("Reindexing changed files...");
6638
- const count = await indexMultipleFiles(
6639
- changedFiles,
6640
- vectorDB,
6641
- embeddings,
6642
- config,
6643
- { verbose }
6644
- );
6645
- log(`\u2713 Reindexed ${count} files`);
6646
- } else {
6647
- log("\u2713 Index is up to date with git state");
6648
- }
6649
- } catch (error) {
6650
- log(`Warning: Failed to check git state on startup: ${error}`);
6651
- }
6652
- log(`\u2713 Git detection enabled (checking every ${config.gitDetection.pollIntervalMs / 1e3}s)`);
6653
- gitPollInterval = setInterval(async () => {
6654
- try {
6655
- const changedFiles = await gitTracker.detectChanges();
6656
- if (changedFiles && changedFiles.length > 0) {
6657
- log(`\u{1F33F} Git change detected: ${changedFiles.length} files changed`);
6658
- log("Reindexing in background...");
6659
- indexMultipleFiles(
6660
- changedFiles,
6661
- vectorDB,
6662
- embeddings,
6663
- config,
6664
- { verbose }
6665
- ).then((count) => {
6666
- log(`\u2713 Background reindex complete: ${count} files`);
6667
- }).catch((error) => {
6668
- log(`Warning: Background reindex failed: ${error}`);
6669
- });
6670
- }
6671
- } catch (error) {
6672
- log(`Warning: Git detection check failed: ${error}`);
6673
- }
6674
- }, config.gitDetection.pollIntervalMs);
6675
- } else {
6676
- if (!gitAvailable) {
6677
- log("Git not available - git detection disabled");
6678
- } else if (!isRepo) {
6679
- log("Not a git repository - git detection disabled");
6680
- }
7996
+ };
7997
+ earlyLog("Initializing MCP server...");
7998
+ const { embeddings, vectorDB } = await initializeDatabase(rootDir, earlyLog).catch((error) => {
7999
+ console.error(`Failed to initialize: ${error}`);
8000
+ process.exit(1);
8001
+ });
8002
+ const server = new Server(
8003
+ { name: "lien", version: packageJson3.version },
8004
+ { capabilities: { tools: {}, logging: {} } }
8005
+ );
8006
+ const log = (message, level = "info") => {
8007
+ if (verbose || level === "warning" || level === "error") {
8008
+ server.sendLoggingMessage({
8009
+ level,
8010
+ logger: "lien",
8011
+ data: message
8012
+ }).catch(() => {
8013
+ console.error(`[Lien MCP] [${level}] ${message}`);
8014
+ });
6681
8015
  }
6682
- } else {
6683
- log("Git detection disabled by configuration");
6684
- }
6685
- const fileWatchingEnabled = watch !== void 0 ? watch : config.fileWatching.enabled;
6686
- if (fileWatchingEnabled) {
6687
- log("\u{1F440} Starting file watcher...");
6688
- fileWatcher = new FileWatcher(rootDir, config);
8016
+ };
8017
+ server.setRequestHandler(ListToolsRequestSchema, async () => ({ tools }));
8018
+ const checkAndReconnect = async () => {
6689
8019
  try {
6690
- await fileWatcher.start(async (event) => {
6691
- const { type, filepath } = event;
6692
- if (type === "unlink") {
6693
- log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
6694
- try {
6695
- await vectorDB.deleteByFile(filepath);
6696
- const manifest = new ManifestManager(vectorDB.dbPath);
6697
- await manifest.removeFile(filepath);
6698
- log(`\u2713 Removed ${filepath} from index`);
6699
- } catch (error) {
6700
- log(`Warning: Failed to remove ${filepath}: ${error}`);
6701
- }
6702
- } else {
6703
- const action = type === "add" ? "added" : "changed";
6704
- log(`\u{1F4DD} File ${action}: ${filepath}`);
6705
- indexSingleFile(filepath, vectorDB, embeddings, config, { verbose }).catch((error) => {
6706
- log(`Warning: Failed to reindex ${filepath}: ${error}`);
6707
- });
6708
- }
6709
- });
6710
- const watchedCount = fileWatcher.getWatchedFiles().length;
6711
- log(`\u2713 File watching enabled (watching ${watchedCount} files)`);
8020
+ if (await vectorDB.checkVersion()) {
8021
+ log("Index version changed, reconnecting...");
8022
+ await vectorDB.reconnect();
8023
+ }
6712
8024
  } catch (error) {
6713
- log(`Warning: Failed to start file watcher: ${error}`);
6714
- fileWatcher = null;
8025
+ log(`Version check failed: ${error}`, "warning");
6715
8026
  }
6716
- }
8027
+ };
8028
+ const getIndexMetadata = () => ({
8029
+ indexVersion: vectorDB.getCurrentVersion(),
8030
+ indexDate: vectorDB.getVersionDate()
8031
+ });
8032
+ const versionCheckInterval = setInterval(checkAndReconnect, VERSION_CHECK_INTERVAL_MS);
8033
+ const config = await configService.load(rootDir);
8034
+ const toolContext = { vectorDB, embeddings, config, rootDir, log, checkAndReconnect, getIndexMetadata };
8035
+ registerToolCallHandler(server, toolContext, log);
8036
+ await handleAutoIndexing(vectorDB, config, rootDir, log);
8037
+ const { gitPollInterval } = await setupGitDetection(config, rootDir, vectorDB, embeddings, verbose, log);
8038
+ const fileWatcher = await setupFileWatching(watch, config, rootDir, vectorDB, embeddings, verbose, log);
6717
8039
  const cleanup = async () => {
6718
8040
  log("Shutting down MCP server...");
6719
8041
  clearInterval(versionCheckInterval);
6720
- if (gitPollInterval) {
6721
- clearInterval(gitPollInterval);
6722
- }
6723
- if (fileWatcher) {
6724
- await fileWatcher.stop();
6725
- }
8042
+ if (gitPollInterval) clearInterval(gitPollInterval);
8043
+ if (fileWatcher) await fileWatcher.stop();
6726
8044
  process.exit(0);
6727
8045
  };
6728
8046
  process.on("SIGINT", cleanup);
6729
8047
  process.on("SIGTERM", cleanup);
6730
8048
  const transport = new StdioServerTransport();
6731
8049
  transport.onclose = () => {
6732
- log("Transport closed, parent process likely terminated");
8050
+ log("Transport closed");
6733
8051
  cleanup().catch(() => process.exit(0));
6734
8052
  };
6735
- transport.onerror = (error) => {
6736
- log(`Transport error: ${error}`);
6737
- };
8053
+ transport.onerror = (error) => log(`Transport error: ${error}`);
6738
8054
  await server.connect(transport);
6739
8055
  log("MCP server started and listening on stdio");
6740
8056
  }
6741
8057
 
6742
8058
  // src/cli/serve.ts
6743
8059
  async function serveCommand(options) {
6744
- const rootDir = options.root ? path18.resolve(options.root) : process.cwd();
8060
+ const rootDir = options.root ? path21.resolve(options.root) : process.cwd();
6745
8061
  try {
6746
8062
  if (options.root) {
6747
8063
  try {
6748
- const stats = await fs19.stat(rootDir);
8064
+ const stats = await fs20.stat(rootDir);
6749
8065
  if (!stats.isDirectory()) {
6750
8066
  console.error(chalk7.red(`Error: --root path is not a directory: ${rootDir}`));
6751
8067
  process.exit(1);
@@ -6784,6 +8100,370 @@ async function serveCommand(options) {
6784
8100
  }
6785
8101
  }
6786
8102
 
8103
+ // src/cli/complexity.ts
8104
+ init_lancedb();
8105
+ init_service();
8106
+ import chalk9 from "chalk";
8107
+ import fs21 from "fs";
8108
+ import path22 from "path";
8109
+
8110
+ // src/insights/formatters/text.ts
8111
+ import chalk8 from "chalk";
8112
+ function getMetricLabel(metricType) {
8113
+ switch (metricType) {
8114
+ case "cognitive":
8115
+ return "\u{1F9E0} Mental load";
8116
+ case "cyclomatic":
8117
+ return "\u{1F500} Test paths";
8118
+ case "halstead_effort":
8119
+ return "\u23F1\uFE0F Time to understand";
8120
+ case "halstead_bugs":
8121
+ return "\u{1F41B} Estimated bugs";
8122
+ default:
8123
+ return "Complexity";
8124
+ }
8125
+ }
8126
+ function effortToMinutes(effort) {
8127
+ return effort / 1080;
8128
+ }
8129
+ function formatTime(minutes) {
8130
+ if (minutes >= 60) {
8131
+ const hours = Math.floor(minutes / 60);
8132
+ const mins = Math.round(minutes % 60);
8133
+ return mins > 0 ? `${hours}h ${mins}m` : `${hours}h`;
8134
+ }
8135
+ return `${Math.round(minutes)}m`;
8136
+ }
8137
+ function formatHalsteadDetails(violation) {
8138
+ if (!violation.halsteadDetails) return [];
8139
+ const { volume, difficulty, effort, bugs } = violation.halsteadDetails;
8140
+ const timeStr = formatTime(effortToMinutes(effort));
8141
+ return [
8142
+ chalk8.dim(` \u{1F4CA} Volume: ${Math.round(volume).toLocaleString()}, Difficulty: ${difficulty.toFixed(1)}`),
8143
+ chalk8.dim(` \u23F1\uFE0F Time: ~${timeStr}, Est. bugs: ${bugs.toFixed(2)}`)
8144
+ ];
8145
+ }
8146
+ var metricFormatters = {
8147
+ halstead_effort: (val, thresh) => ({
8148
+ // val/thresh are already in minutes (human-scale)
8149
+ complexity: "~" + formatTime(val),
8150
+ threshold: formatTime(thresh)
8151
+ }),
8152
+ halstead_bugs: (val, thresh) => ({
8153
+ complexity: val.toFixed(2),
8154
+ threshold: thresh.toFixed(1)
8155
+ }),
8156
+ cyclomatic: (val, thresh) => ({
8157
+ complexity: `${val} (needs ~${val} tests)`,
8158
+ threshold: thresh.toString()
8159
+ })
8160
+ };
8161
+ var defaultFormatter = (val, thresh) => ({
8162
+ complexity: val.toString(),
8163
+ threshold: thresh.toString()
8164
+ });
8165
+ function formatSymbolDisplay(violation, isBold) {
8166
+ const display = ["function", "method"].includes(violation.symbolType) ? `${violation.symbolName}()` : violation.symbolName;
8167
+ return isBold ? chalk8.bold(display) : display;
8168
+ }
8169
+ function formatDependencyInfo(fileData) {
8170
+ const depCount = fileData.dependentCount ?? fileData.dependents.length;
8171
+ if (depCount === 0) return [];
8172
+ const lines = [chalk8.dim(` \u{1F4E6} Imported by ${depCount} file${depCount !== 1 ? "s" : ""}`)];
8173
+ if (fileData.dependentComplexityMetrics) {
8174
+ const { averageComplexity, maxComplexity } = fileData.dependentComplexityMetrics;
8175
+ lines.push(chalk8.dim(` - Dependent avg complexity: ${averageComplexity}, max: ${maxComplexity}`));
8176
+ }
8177
+ return lines;
8178
+ }
8179
+ function formatPercentageOver(complexity, threshold) {
8180
+ if (threshold <= 0) return "N/A (invalid threshold)";
8181
+ return `${Math.round((complexity - threshold) / threshold * 100)}% over threshold`;
8182
+ }
8183
+ function formatViolation(violation, fileData, colorFn, isBold) {
8184
+ const symbolText = formatSymbolDisplay(violation, isBold);
8185
+ const metricLabel = getMetricLabel(violation.metricType);
8186
+ const formatter = metricFormatters[violation.metricType] || defaultFormatter;
8187
+ const { complexity: complexityDisplay, threshold: thresholdDisplay } = formatter(violation.complexity, violation.threshold);
8188
+ return [
8189
+ colorFn(` ${violation.file}:${violation.startLine}`) + chalk8.dim(" - ") + symbolText,
8190
+ chalk8.dim(` ${metricLabel}: ${complexityDisplay} (threshold: ${thresholdDisplay})`),
8191
+ chalk8.dim(` \u2B06\uFE0F ${formatPercentageOver(violation.complexity, violation.threshold)}`),
8192
+ ...formatHalsteadDetails(violation),
8193
+ ...formatDependencyInfo(fileData),
8194
+ chalk8.dim(` \u26A0\uFE0F Risk: ${fileData.riskLevel.toUpperCase()}`),
8195
+ ""
8196
+ ];
8197
+ }
8198
+ function formatTextReport(report) {
8199
+ const lines = [];
8200
+ lines.push(chalk8.bold("\u{1F50D} Complexity Analysis\n"));
8201
+ lines.push(chalk8.bold("Summary:"));
8202
+ lines.push(chalk8.dim(" Files analyzed: ") + report.summary.filesAnalyzed.toString());
8203
+ const errorText = `${report.summary.bySeverity.error} error${report.summary.bySeverity.error !== 1 ? "s" : ""}`;
8204
+ const warningText = `${report.summary.bySeverity.warning} warning${report.summary.bySeverity.warning !== 1 ? "s" : ""}`;
8205
+ lines.push(chalk8.dim(" Violations: ") + `${report.summary.totalViolations} (${errorText}, ${warningText})`);
8206
+ lines.push(chalk8.dim(" Average complexity: ") + report.summary.avgComplexity.toString());
8207
+ lines.push(chalk8.dim(" Max complexity: ") + report.summary.maxComplexity.toString());
8208
+ lines.push("");
8209
+ const filesWithViolations = Object.entries(report.files).filter(([_, data]) => data.violations.length > 0).sort((a, b) => b[1].violations.length - a[1].violations.length);
8210
+ if (filesWithViolations.length === 0) {
8211
+ lines.push(chalk8.green("\u2713 No violations found!"));
8212
+ return lines.join("\n");
8213
+ }
8214
+ const errors = filesWithViolations.flatMap(
8215
+ ([file, data]) => data.violations.filter((v) => v.severity === "error").map((v) => ({ file, ...v }))
8216
+ );
8217
+ if (errors.length > 0) {
8218
+ lines.push(chalk8.red.bold("\u274C Errors:\n"));
8219
+ for (const error of errors) {
8220
+ lines.push(...formatViolation(error, report.files[error.file], chalk8.red, true));
8221
+ }
8222
+ }
8223
+ const warnings = filesWithViolations.flatMap(
8224
+ ([file, data]) => data.violations.filter((v) => v.severity === "warning").map((v) => ({ file, ...v }))
8225
+ );
8226
+ if (warnings.length > 0) {
8227
+ lines.push(chalk8.yellow.bold("\u26A0\uFE0F Warnings:\n"));
8228
+ for (const warning of warnings) {
8229
+ lines.push(...formatViolation(warning, report.files[warning.file], chalk8.yellow, false));
8230
+ }
8231
+ }
8232
+ return lines.join("\n");
8233
+ }
8234
+
8235
+ // src/insights/formatters/json.ts
8236
+ function formatJsonReport(report) {
8237
+ return JSON.stringify(report, null, 2);
8238
+ }
8239
+
8240
+ // src/insights/formatters/sarif.ts
8241
+ function getRuleId(metricType) {
8242
+ switch (metricType) {
8243
+ case "cognitive":
8244
+ return "lien/high-cognitive-complexity";
8245
+ case "cyclomatic":
8246
+ return "lien/high-cyclomatic-complexity";
8247
+ case "halstead_effort":
8248
+ return "lien/high-halstead-effort";
8249
+ case "halstead_bugs":
8250
+ return "lien/high-estimated-bugs";
8251
+ default:
8252
+ return "lien/high-complexity";
8253
+ }
8254
+ }
8255
+ function formatSarifReport(report) {
8256
+ const rules = [
8257
+ {
8258
+ id: "lien/high-cyclomatic-complexity",
8259
+ shortDescription: {
8260
+ text: "Too many test paths"
8261
+ },
8262
+ fullDescription: {
8263
+ text: "Function or method requires too many test cases to achieve full branch coverage. Each decision point (if, switch, loop) adds a path that needs testing."
8264
+ },
8265
+ help: {
8266
+ text: "Consider refactoring by extracting methods, using early returns, or simplifying conditional logic to reduce the number of test paths."
8267
+ }
8268
+ },
8269
+ {
8270
+ id: "lien/high-cognitive-complexity",
8271
+ shortDescription: {
8272
+ text: "High mental load"
8273
+ },
8274
+ fullDescription: {
8275
+ text: "Function or method has high mental load (deeply nested or hard to follow), requiring too much mental effort to understand and maintain."
8276
+ },
8277
+ help: {
8278
+ text: "Consider flattening nested conditionals, extracting helper functions, or using guard clauses to reduce mental load."
8279
+ }
8280
+ },
8281
+ {
8282
+ id: "lien/high-halstead-effort",
8283
+ shortDescription: {
8284
+ text: "Long time to understand"
8285
+ },
8286
+ fullDescription: {
8287
+ text: "Function or method takes too long to understand, based on Halstead metrics (operators and operands count)."
8288
+ },
8289
+ help: {
8290
+ text: "Consider simplifying expressions, reducing variable count, or breaking into smaller functions."
8291
+ }
8292
+ },
8293
+ {
8294
+ id: "lien/high-estimated-bugs",
8295
+ shortDescription: {
8296
+ text: "High estimated bug count"
8297
+ },
8298
+ fullDescription: {
8299
+ text: "Function or method is likely to contain bugs based on Halstead metrics (Volume / 3000), which estimates bug count from code complexity."
8300
+ },
8301
+ help: {
8302
+ text: "Consider simplifying the function, breaking into smaller units, or adding thorough test coverage."
8303
+ }
8304
+ }
8305
+ ];
8306
+ const results = [];
8307
+ for (const [filepath, fileData] of Object.entries(report.files)) {
8308
+ for (const violation of fileData.violations) {
8309
+ const ruleId = getRuleId(violation.metricType);
8310
+ results.push({
8311
+ ruleId,
8312
+ level: violation.severity,
8313
+ message: {
8314
+ text: `${violation.symbolName}: ${violation.message}`
8315
+ },
8316
+ locations: [
8317
+ {
8318
+ physicalLocation: {
8319
+ artifactLocation: {
8320
+ uri: filepath
8321
+ },
8322
+ region: {
8323
+ startLine: violation.startLine,
8324
+ endLine: violation.endLine
8325
+ }
8326
+ }
8327
+ }
8328
+ ]
8329
+ });
8330
+ }
8331
+ }
8332
+ const sarifReport = {
8333
+ $schema: "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
8334
+ version: "2.1.0",
8335
+ runs: [
8336
+ {
8337
+ tool: {
8338
+ driver: {
8339
+ name: "Lien Complexity Analyzer",
8340
+ version: "1.0.0",
8341
+ informationUri: "https://github.com/liendev/lien",
8342
+ rules
8343
+ }
8344
+ },
8345
+ results
8346
+ }
8347
+ ]
8348
+ };
8349
+ return JSON.stringify(sarifReport, null, 2);
8350
+ }
8351
+
8352
+ // src/insights/formatters/index.ts
8353
+ function formatReport(report, format) {
8354
+ switch (format) {
8355
+ case "json":
8356
+ return formatJsonReport(report);
8357
+ case "sarif":
8358
+ return formatSarifReport(report);
8359
+ case "text":
8360
+ default:
8361
+ return formatTextReport(report);
8362
+ }
8363
+ }
8364
+
8365
+ // src/cli/complexity.ts
8366
+ var VALID_FAIL_ON = ["error", "warning"];
8367
+ var VALID_FORMATS = ["text", "json", "sarif"];
8368
+ function validateFailOn(failOn) {
8369
+ if (failOn && !VALID_FAIL_ON.includes(failOn)) {
8370
+ console.error(chalk9.red(`Error: Invalid --fail-on value "${failOn}". Must be either 'error' or 'warning'`));
8371
+ process.exit(1);
8372
+ }
8373
+ }
8374
+ function validateFormat(format) {
8375
+ if (!VALID_FORMATS.includes(format)) {
8376
+ console.error(chalk9.red(`Error: Invalid --format value "${format}". Must be one of: text, json, sarif`));
8377
+ process.exit(1);
8378
+ }
8379
+ }
8380
+ function validateFilesExist(files, rootDir) {
8381
+ if (!files || files.length === 0) return;
8382
+ const missingFiles = files.filter((file) => {
8383
+ const fullPath = path22.isAbsolute(file) ? file : path22.join(rootDir, file);
8384
+ return !fs21.existsSync(fullPath);
8385
+ });
8386
+ if (missingFiles.length > 0) {
8387
+ console.error(chalk9.red(`Error: File${missingFiles.length > 1 ? "s" : ""} not found:`));
8388
+ missingFiles.forEach((file) => console.error(chalk9.red(` - ${file}`)));
8389
+ process.exit(1);
8390
+ }
8391
+ }
8392
+ function parseThresholdValue(value, flagName) {
8393
+ if (!value) return null;
8394
+ const parsed = parseInt(value, 10);
8395
+ if (isNaN(parsed)) {
8396
+ console.error(chalk9.red(`Error: Invalid ${flagName} value "${value}". Must be a number`));
8397
+ process.exit(1);
8398
+ }
8399
+ if (parsed <= 0) {
8400
+ console.error(chalk9.red(`Error: Invalid ${flagName} value "${value}". Must be a positive number`));
8401
+ process.exit(1);
8402
+ }
8403
+ return parsed;
8404
+ }
8405
+ function parseThresholdOverrides(options) {
8406
+ const baseThreshold = parseThresholdValue(options.threshold, "--threshold");
8407
+ const cyclomaticOverride = parseThresholdValue(options.cyclomaticThreshold, "--cyclomatic-threshold");
8408
+ const cognitiveOverride = parseThresholdValue(options.cognitiveThreshold, "--cognitive-threshold");
8409
+ return {
8410
+ // Specific flags take precedence over --threshold
8411
+ cyclomatic: cyclomaticOverride ?? baseThreshold,
8412
+ cognitive: cognitiveOverride ?? baseThreshold
8413
+ };
8414
+ }
8415
+ function applyThresholdOverrides(config, overrides) {
8416
+ if (overrides.cyclomatic === null && overrides.cognitive === null) return;
8417
+ const cfg = config;
8418
+ if (!cfg.complexity) {
8419
+ cfg.complexity = {
8420
+ enabled: true,
8421
+ thresholds: { testPaths: 15, mentalLoad: 15 }
8422
+ };
8423
+ } else if (!cfg.complexity.thresholds) {
8424
+ cfg.complexity.thresholds = { testPaths: 15, mentalLoad: 15 };
8425
+ }
8426
+ if (overrides.cyclomatic !== null) {
8427
+ cfg.complexity.thresholds.testPaths = overrides.cyclomatic;
8428
+ }
8429
+ if (overrides.cognitive !== null) {
8430
+ cfg.complexity.thresholds.mentalLoad = overrides.cognitive;
8431
+ }
8432
+ }
8433
+ async function ensureIndexExists(vectorDB) {
8434
+ try {
8435
+ await vectorDB.scanWithFilter({ limit: 1 });
8436
+ } catch {
8437
+ console.error(chalk9.red("Error: Index not found"));
8438
+ console.log(chalk9.yellow("\nRun"), chalk9.bold("lien index"), chalk9.yellow("to index your codebase first"));
8439
+ process.exit(1);
8440
+ }
8441
+ }
8442
+ async function complexityCommand(options) {
8443
+ const rootDir = process.cwd();
8444
+ try {
8445
+ validateFailOn(options.failOn);
8446
+ validateFormat(options.format);
8447
+ validateFilesExist(options.files, rootDir);
8448
+ const thresholdOverrides = parseThresholdOverrides(options);
8449
+ const config = await configService.load(rootDir);
8450
+ const vectorDB = new VectorDB(rootDir);
8451
+ await vectorDB.initialize();
8452
+ await ensureIndexExists(vectorDB);
8453
+ applyThresholdOverrides(config, thresholdOverrides);
8454
+ const analyzer = new ComplexityAnalyzer(vectorDB, config);
8455
+ const report = await analyzer.analyze(options.files);
8456
+ console.log(formatReport(report, options.format));
8457
+ if (options.failOn) {
8458
+ const hasViolations = options.failOn === "error" ? report.summary.bySeverity.error > 0 : report.summary.totalViolations > 0;
8459
+ if (hasViolations) process.exit(1);
8460
+ }
8461
+ } catch (error) {
8462
+ console.error(chalk9.red("Error analyzing complexity:"), error);
8463
+ process.exit(1);
8464
+ }
8465
+ }
8466
+
6787
8467
  // src/cli/index.ts
6788
8468
  var __filename5 = fileURLToPath5(import.meta.url);
6789
8469
  var __dirname5 = dirname4(__filename5);
@@ -6800,6 +8480,7 @@ program.command("init").description("Initialize Lien in the current directory").
6800
8480
  program.command("index").description("Index the codebase for semantic search").option("-f, --force", "Force full reindex (skip incremental)").option("-w, --watch", "Watch for changes and re-index automatically").option("-v, --verbose", "Show detailed logging during indexing").action(indexCommand);
6801
8481
  program.command("serve").description("Start the MCP server for Cursor integration").option("-p, --port <port>", "Port number (for future use)", "7133").option("--no-watch", "Disable file watching for this session").option("-w, --watch", "[DEPRECATED] File watching is now enabled by default").option("-r, --root <path>", "Root directory to serve (defaults to current directory)").action(serveCommand);
6802
8482
  program.command("status").description("Show indexing status and statistics").action(statusCommand);
8483
+ program.command("complexity").description("Analyze code complexity").option("--files <paths...>", "Specific files to analyze").option("--format <type>", "Output format: text, json, sarif", "text").option("--threshold <n>", "Override both complexity thresholds (cyclomatic & cognitive)").option("--cyclomatic-threshold <n>", "Override cyclomatic complexity threshold only").option("--cognitive-threshold <n>", "Override cognitive complexity threshold only").option("--fail-on <severity>", "Exit 1 if violations: error, warning").action(complexityCommand);
6803
8484
 
6804
8485
  // src/index.ts
6805
8486
  program.parse();