@liendev/lien 0.10.0 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +1382 -571
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -10,7 +10,7 @@ var __export = (target, all) => {
|
|
|
10
10
|
};
|
|
11
11
|
|
|
12
12
|
// src/constants.ts
|
|
13
|
-
var DEFAULT_CHUNK_SIZE, DEFAULT_CHUNK_OVERLAP, DEFAULT_CONCURRENCY, DEFAULT_EMBEDDING_BATCH_SIZE, EMBEDDING_DIMENSIONS, DEFAULT_EMBEDDING_MODEL, DEFAULT_PORT, VERSION_CHECK_INTERVAL_MS, DEFAULT_GIT_POLL_INTERVAL_MS, DEFAULT_DEBOUNCE_MS, CURRENT_CONFIG_VERSION;
|
|
13
|
+
var DEFAULT_CHUNK_SIZE, DEFAULT_CHUNK_OVERLAP, DEFAULT_CONCURRENCY, DEFAULT_EMBEDDING_BATCH_SIZE, EMBEDDING_MICRO_BATCH_SIZE, VECTOR_DB_MAX_BATCH_SIZE, VECTOR_DB_MIN_BATCH_SIZE, EMBEDDING_DIMENSIONS, DEFAULT_EMBEDDING_MODEL, DEFAULT_PORT, VERSION_CHECK_INTERVAL_MS, DEFAULT_GIT_POLL_INTERVAL_MS, DEFAULT_DEBOUNCE_MS, CURRENT_CONFIG_VERSION, INDEX_FORMAT_VERSION;
|
|
14
14
|
var init_constants = __esm({
|
|
15
15
|
"src/constants.ts"() {
|
|
16
16
|
"use strict";
|
|
@@ -18,6 +18,9 @@ var init_constants = __esm({
|
|
|
18
18
|
DEFAULT_CHUNK_OVERLAP = 10;
|
|
19
19
|
DEFAULT_CONCURRENCY = 4;
|
|
20
20
|
DEFAULT_EMBEDDING_BATCH_SIZE = 50;
|
|
21
|
+
EMBEDDING_MICRO_BATCH_SIZE = 10;
|
|
22
|
+
VECTOR_DB_MAX_BATCH_SIZE = 1e3;
|
|
23
|
+
VECTOR_DB_MIN_BATCH_SIZE = 10;
|
|
21
24
|
EMBEDDING_DIMENSIONS = 384;
|
|
22
25
|
DEFAULT_EMBEDDING_MODEL = "Xenova/all-MiniLM-L6-v2";
|
|
23
26
|
DEFAULT_PORT = 7133;
|
|
@@ -25,6 +28,7 @@ var init_constants = __esm({
|
|
|
25
28
|
DEFAULT_GIT_POLL_INTERVAL_MS = 1e4;
|
|
26
29
|
DEFAULT_DEBOUNCE_MS = 1e3;
|
|
27
30
|
CURRENT_CONFIG_VERSION = "0.3.0";
|
|
31
|
+
INDEX_FORMAT_VERSION = 1;
|
|
28
32
|
}
|
|
29
33
|
});
|
|
30
34
|
|
|
@@ -58,8 +62,8 @@ var init_schema = __esm({
|
|
|
58
62
|
pollIntervalMs: DEFAULT_GIT_POLL_INTERVAL_MS
|
|
59
63
|
},
|
|
60
64
|
fileWatching: {
|
|
61
|
-
enabled:
|
|
62
|
-
//
|
|
65
|
+
enabled: true,
|
|
66
|
+
// Enabled by default (fast with incremental indexing!)
|
|
63
67
|
debounceMs: DEFAULT_DEBOUNCE_MS
|
|
64
68
|
},
|
|
65
69
|
frameworks: []
|
|
@@ -116,75 +120,6 @@ var init_merge = __esm({
|
|
|
116
120
|
}
|
|
117
121
|
});
|
|
118
122
|
|
|
119
|
-
// src/utils/banner.ts
|
|
120
|
-
var banner_exports = {};
|
|
121
|
-
__export(banner_exports, {
|
|
122
|
-
showBanner: () => showBanner,
|
|
123
|
-
showCompactBanner: () => showCompactBanner
|
|
124
|
-
});
|
|
125
|
-
import figlet from "figlet";
|
|
126
|
-
import chalk from "chalk";
|
|
127
|
-
import { createRequire } from "module";
|
|
128
|
-
import { fileURLToPath } from "url";
|
|
129
|
-
import { dirname, join } from "path";
|
|
130
|
-
function wrapInBox(text, footer, padding = 1) {
|
|
131
|
-
const lines = text.split("\n").filter((line) => line.trim().length > 0);
|
|
132
|
-
const maxLength = Math.max(...lines.map((line) => line.length));
|
|
133
|
-
const horizontalBorder = "\u2500".repeat(maxLength + padding * 2);
|
|
134
|
-
const top = `\u250C${horizontalBorder}\u2510`;
|
|
135
|
-
const bottom = `\u2514${horizontalBorder}\u2518`;
|
|
136
|
-
const separator = `\u251C${horizontalBorder}\u2524`;
|
|
137
|
-
const paddedLines = lines.map((line) => {
|
|
138
|
-
const padRight = " ".repeat(maxLength - line.length + padding);
|
|
139
|
-
const padLeft = " ".repeat(padding);
|
|
140
|
-
return `\u2502${padLeft}${line}${padRight}\u2502`;
|
|
141
|
-
});
|
|
142
|
-
const totalPad = maxLength - footer.length;
|
|
143
|
-
const leftPad = Math.floor(totalPad / 2);
|
|
144
|
-
const rightPad = totalPad - leftPad;
|
|
145
|
-
const centeredFooter = " ".repeat(leftPad) + footer + " ".repeat(rightPad);
|
|
146
|
-
const paddedFooter = `\u2502${" ".repeat(padding)}${centeredFooter}${" ".repeat(padding)}\u2502`;
|
|
147
|
-
return [top, ...paddedLines, separator, paddedFooter, bottom].join("\n");
|
|
148
|
-
}
|
|
149
|
-
function showBanner() {
|
|
150
|
-
const banner = figlet.textSync("LIEN", {
|
|
151
|
-
font: "ANSI Shadow",
|
|
152
|
-
horizontalLayout: "fitted",
|
|
153
|
-
verticalLayout: "fitted"
|
|
154
|
-
});
|
|
155
|
-
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
156
|
-
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
157
|
-
console.error(chalk.cyan(boxedBanner));
|
|
158
|
-
console.error();
|
|
159
|
-
}
|
|
160
|
-
function showCompactBanner() {
|
|
161
|
-
const banner = figlet.textSync("LIEN", {
|
|
162
|
-
font: "ANSI Shadow",
|
|
163
|
-
horizontalLayout: "fitted",
|
|
164
|
-
verticalLayout: "fitted"
|
|
165
|
-
});
|
|
166
|
-
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
167
|
-
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
168
|
-
console.log(chalk.cyan(boxedBanner));
|
|
169
|
-
console.log();
|
|
170
|
-
}
|
|
171
|
-
var __filename, __dirname, require2, packageJson, PACKAGE_NAME, VERSION;
|
|
172
|
-
var init_banner = __esm({
|
|
173
|
-
"src/utils/banner.ts"() {
|
|
174
|
-
"use strict";
|
|
175
|
-
__filename = fileURLToPath(import.meta.url);
|
|
176
|
-
__dirname = dirname(__filename);
|
|
177
|
-
require2 = createRequire(import.meta.url);
|
|
178
|
-
try {
|
|
179
|
-
packageJson = require2(join(__dirname, "../package.json"));
|
|
180
|
-
} catch {
|
|
181
|
-
packageJson = require2(join(__dirname, "../../package.json"));
|
|
182
|
-
}
|
|
183
|
-
PACKAGE_NAME = packageJson.name;
|
|
184
|
-
VERSION = packageJson.version;
|
|
185
|
-
}
|
|
186
|
-
});
|
|
187
|
-
|
|
188
123
|
// src/config/migration.ts
|
|
189
124
|
function needsMigration(config) {
|
|
190
125
|
if (!config) {
|
|
@@ -772,6 +707,115 @@ ${validation.errors.join("\n")}`,
|
|
|
772
707
|
}
|
|
773
708
|
});
|
|
774
709
|
|
|
710
|
+
// src/git/utils.ts
|
|
711
|
+
var utils_exports = {};
|
|
712
|
+
__export(utils_exports, {
|
|
713
|
+
getChangedFiles: () => getChangedFiles,
|
|
714
|
+
getChangedFilesBetweenCommits: () => getChangedFilesBetweenCommits,
|
|
715
|
+
getChangedFilesInCommit: () => getChangedFilesInCommit,
|
|
716
|
+
getCurrentBranch: () => getCurrentBranch,
|
|
717
|
+
getCurrentCommit: () => getCurrentCommit,
|
|
718
|
+
isGitAvailable: () => isGitAvailable,
|
|
719
|
+
isGitRepo: () => isGitRepo
|
|
720
|
+
});
|
|
721
|
+
import { exec } from "child_process";
|
|
722
|
+
import { promisify } from "util";
|
|
723
|
+
import fs7 from "fs/promises";
|
|
724
|
+
import path7 from "path";
|
|
725
|
+
async function isGitRepo(rootDir) {
|
|
726
|
+
try {
|
|
727
|
+
const gitDir = path7.join(rootDir, ".git");
|
|
728
|
+
await fs7.access(gitDir);
|
|
729
|
+
return true;
|
|
730
|
+
} catch {
|
|
731
|
+
return false;
|
|
732
|
+
}
|
|
733
|
+
}
|
|
734
|
+
async function getCurrentBranch(rootDir) {
|
|
735
|
+
try {
|
|
736
|
+
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
|
|
737
|
+
cwd: rootDir,
|
|
738
|
+
timeout: 5e3
|
|
739
|
+
// 5 second timeout
|
|
740
|
+
});
|
|
741
|
+
return stdout.trim();
|
|
742
|
+
} catch (error) {
|
|
743
|
+
throw new Error(`Failed to get current branch: ${error}`);
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
async function getCurrentCommit(rootDir) {
|
|
747
|
+
try {
|
|
748
|
+
const { stdout } = await execAsync("git rev-parse HEAD", {
|
|
749
|
+
cwd: rootDir,
|
|
750
|
+
timeout: 5e3
|
|
751
|
+
});
|
|
752
|
+
return stdout.trim();
|
|
753
|
+
} catch (error) {
|
|
754
|
+
throw new Error(`Failed to get current commit: ${error}`);
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
async function getChangedFiles(rootDir, fromRef, toRef) {
|
|
758
|
+
try {
|
|
759
|
+
const { stdout } = await execAsync(
|
|
760
|
+
`git diff --name-only ${fromRef}...${toRef}`,
|
|
761
|
+
{
|
|
762
|
+
cwd: rootDir,
|
|
763
|
+
timeout: 1e4
|
|
764
|
+
// 10 second timeout for diffs
|
|
765
|
+
}
|
|
766
|
+
);
|
|
767
|
+
const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
|
|
768
|
+
return files;
|
|
769
|
+
} catch (error) {
|
|
770
|
+
throw new Error(`Failed to get changed files: ${error}`);
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
async function getChangedFilesInCommit(rootDir, commitSha) {
|
|
774
|
+
try {
|
|
775
|
+
const { stdout } = await execAsync(
|
|
776
|
+
`git diff-tree --no-commit-id --name-only -r ${commitSha}`,
|
|
777
|
+
{
|
|
778
|
+
cwd: rootDir,
|
|
779
|
+
timeout: 1e4
|
|
780
|
+
}
|
|
781
|
+
);
|
|
782
|
+
const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
|
|
783
|
+
return files;
|
|
784
|
+
} catch (error) {
|
|
785
|
+
throw new Error(`Failed to get changed files in commit: ${error}`);
|
|
786
|
+
}
|
|
787
|
+
}
|
|
788
|
+
async function getChangedFilesBetweenCommits(rootDir, fromCommit, toCommit) {
|
|
789
|
+
try {
|
|
790
|
+
const { stdout } = await execAsync(
|
|
791
|
+
`git diff --name-only ${fromCommit} ${toCommit}`,
|
|
792
|
+
{
|
|
793
|
+
cwd: rootDir,
|
|
794
|
+
timeout: 1e4
|
|
795
|
+
}
|
|
796
|
+
);
|
|
797
|
+
const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
|
|
798
|
+
return files;
|
|
799
|
+
} catch (error) {
|
|
800
|
+
throw new Error(`Failed to get changed files between commits: ${error}`);
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
async function isGitAvailable() {
|
|
804
|
+
try {
|
|
805
|
+
await execAsync("git --version", { timeout: 3e3 });
|
|
806
|
+
return true;
|
|
807
|
+
} catch {
|
|
808
|
+
return false;
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
var execAsync;
|
|
812
|
+
var init_utils = __esm({
|
|
813
|
+
"src/git/utils.ts"() {
|
|
814
|
+
"use strict";
|
|
815
|
+
execAsync = promisify(exec);
|
|
816
|
+
}
|
|
817
|
+
});
|
|
818
|
+
|
|
775
819
|
// src/vectordb/version.ts
|
|
776
820
|
import fs8 from "fs/promises";
|
|
777
821
|
import path8 from "path";
|
|
@@ -1530,6 +1574,7 @@ var init_lancedb = __esm({
|
|
|
1530
1574
|
init_errors();
|
|
1531
1575
|
init_relevance();
|
|
1532
1576
|
init_intent_classifier();
|
|
1577
|
+
init_constants();
|
|
1533
1578
|
VectorDB = class _VectorDB {
|
|
1534
1579
|
db = null;
|
|
1535
1580
|
table = null;
|
|
@@ -1575,27 +1620,76 @@ var init_lancedb = __esm({
|
|
|
1575
1620
|
contentsLength: contents.length
|
|
1576
1621
|
});
|
|
1577
1622
|
}
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
// Ensure arrays have at least empty string for Arrow type inference
|
|
1588
|
-
functionNames: metadatas[i].symbols?.functions && metadatas[i].symbols.functions.length > 0 ? metadatas[i].symbols.functions : [""],
|
|
1589
|
-
classNames: metadatas[i].symbols?.classes && metadatas[i].symbols.classes.length > 0 ? metadatas[i].symbols.classes : [""],
|
|
1590
|
-
interfaceNames: metadatas[i].symbols?.interfaces && metadatas[i].symbols.interfaces.length > 0 ? metadatas[i].symbols.interfaces : [""]
|
|
1591
|
-
}));
|
|
1592
|
-
if (!this.table) {
|
|
1593
|
-
this.table = await this.db.createTable(this.tableName, records);
|
|
1594
|
-
} else {
|
|
1595
|
-
await this.table.add(records);
|
|
1623
|
+
if (vectors.length === 0) {
|
|
1624
|
+
return;
|
|
1625
|
+
}
|
|
1626
|
+
if (vectors.length > VECTOR_DB_MAX_BATCH_SIZE) {
|
|
1627
|
+
for (let i = 0; i < vectors.length; i += VECTOR_DB_MAX_BATCH_SIZE) {
|
|
1628
|
+
const batchVectors = vectors.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
|
|
1629
|
+
const batchMetadata = metadatas.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
|
|
1630
|
+
const batchContents = contents.slice(i, Math.min(i + VECTOR_DB_MAX_BATCH_SIZE, vectors.length));
|
|
1631
|
+
await this._insertBatchInternal(batchVectors, batchMetadata, batchContents);
|
|
1596
1632
|
}
|
|
1597
|
-
}
|
|
1598
|
-
|
|
1633
|
+
} else {
|
|
1634
|
+
await this._insertBatchInternal(vectors, metadatas, contents);
|
|
1635
|
+
}
|
|
1636
|
+
}
|
|
1637
|
+
/**
|
|
1638
|
+
* Internal method to insert a single batch with iterative retry logic.
|
|
1639
|
+
* Uses a queue-based approach to avoid deep recursion on large batch failures.
|
|
1640
|
+
*/
|
|
1641
|
+
async _insertBatchInternal(vectors, metadatas, contents) {
|
|
1642
|
+
const queue = [{ vectors, metadatas, contents }];
|
|
1643
|
+
const failedRecords = [];
|
|
1644
|
+
while (queue.length > 0) {
|
|
1645
|
+
const batch = queue.shift();
|
|
1646
|
+
try {
|
|
1647
|
+
const records = batch.vectors.map((vector, i) => ({
|
|
1648
|
+
vector: Array.from(vector),
|
|
1649
|
+
content: batch.contents[i],
|
|
1650
|
+
file: batch.metadatas[i].file,
|
|
1651
|
+
startLine: batch.metadatas[i].startLine,
|
|
1652
|
+
endLine: batch.metadatas[i].endLine,
|
|
1653
|
+
type: batch.metadatas[i].type,
|
|
1654
|
+
language: batch.metadatas[i].language,
|
|
1655
|
+
// Ensure arrays have at least empty string for Arrow type inference
|
|
1656
|
+
functionNames: batch.metadatas[i].symbols?.functions && batch.metadatas[i].symbols.functions.length > 0 ? batch.metadatas[i].symbols.functions : [""],
|
|
1657
|
+
classNames: batch.metadatas[i].symbols?.classes && batch.metadatas[i].symbols.classes.length > 0 ? batch.metadatas[i].symbols.classes : [""],
|
|
1658
|
+
interfaceNames: batch.metadatas[i].symbols?.interfaces && batch.metadatas[i].symbols.interfaces.length > 0 ? batch.metadatas[i].symbols.interfaces : [""]
|
|
1659
|
+
}));
|
|
1660
|
+
if (!this.table) {
|
|
1661
|
+
this.table = await this.db.createTable(this.tableName, records);
|
|
1662
|
+
} else {
|
|
1663
|
+
await this.table.add(records);
|
|
1664
|
+
}
|
|
1665
|
+
} catch (error) {
|
|
1666
|
+
if (batch.vectors.length > VECTOR_DB_MIN_BATCH_SIZE) {
|
|
1667
|
+
const half = Math.floor(batch.vectors.length / 2);
|
|
1668
|
+
queue.push({
|
|
1669
|
+
vectors: batch.vectors.slice(0, half),
|
|
1670
|
+
metadatas: batch.metadatas.slice(0, half),
|
|
1671
|
+
contents: batch.contents.slice(0, half)
|
|
1672
|
+
});
|
|
1673
|
+
queue.push({
|
|
1674
|
+
vectors: batch.vectors.slice(half),
|
|
1675
|
+
metadatas: batch.metadatas.slice(half),
|
|
1676
|
+
contents: batch.contents.slice(half)
|
|
1677
|
+
});
|
|
1678
|
+
} else {
|
|
1679
|
+
failedRecords.push(batch);
|
|
1680
|
+
}
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
if (failedRecords.length > 0) {
|
|
1684
|
+
const totalFailed = failedRecords.reduce((sum, batch) => sum + batch.vectors.length, 0);
|
|
1685
|
+
throw new DatabaseError(
|
|
1686
|
+
`Failed to insert ${totalFailed} record(s) after retry attempts`,
|
|
1687
|
+
{
|
|
1688
|
+
failedBatches: failedRecords.length,
|
|
1689
|
+
totalRecords: totalFailed,
|
|
1690
|
+
sampleFile: failedRecords[0].metadatas[0].file
|
|
1691
|
+
}
|
|
1692
|
+
);
|
|
1599
1693
|
}
|
|
1600
1694
|
}
|
|
1601
1695
|
async search(queryVector, limit = 5, query) {
|
|
@@ -1893,79 +1987,1019 @@ var init_lancedb = __esm({
|
|
|
1893
1987
|
}
|
|
1894
1988
|
});
|
|
1895
1989
|
|
|
1896
|
-
// src/
|
|
1897
|
-
|
|
1898
|
-
|
|
1899
|
-
|
|
1990
|
+
// src/utils/version.ts
|
|
1991
|
+
import { createRequire as createRequire2 } from "module";
|
|
1992
|
+
import { fileURLToPath as fileURLToPath3 } from "url";
|
|
1993
|
+
import { dirname as dirname2, join as join2 } from "path";
|
|
1994
|
+
function getPackageVersion() {
|
|
1995
|
+
return packageJson2.version;
|
|
1996
|
+
}
|
|
1997
|
+
var __filename3, __dirname3, require3, packageJson2;
|
|
1998
|
+
var init_version2 = __esm({
|
|
1999
|
+
"src/utils/version.ts"() {
|
|
2000
|
+
"use strict";
|
|
2001
|
+
__filename3 = fileURLToPath3(import.meta.url);
|
|
2002
|
+
__dirname3 = dirname2(__filename3);
|
|
2003
|
+
require3 = createRequire2(import.meta.url);
|
|
2004
|
+
try {
|
|
2005
|
+
packageJson2 = require3(join2(__dirname3, "../package.json"));
|
|
2006
|
+
} catch {
|
|
2007
|
+
try {
|
|
2008
|
+
packageJson2 = require3(join2(__dirname3, "../../package.json"));
|
|
2009
|
+
} catch {
|
|
2010
|
+
console.warn("[Lien] Warning: Could not load package.json, using fallback version");
|
|
2011
|
+
packageJson2 = { version: "0.0.0-unknown" };
|
|
2012
|
+
}
|
|
2013
|
+
}
|
|
2014
|
+
}
|
|
2015
|
+
});
|
|
2016
|
+
|
|
2017
|
+
// src/indexer/manifest.ts
|
|
2018
|
+
var manifest_exports = {};
|
|
2019
|
+
__export(manifest_exports, {
|
|
2020
|
+
ManifestManager: () => ManifestManager
|
|
1900
2021
|
});
|
|
1901
2022
|
import fs11 from "fs/promises";
|
|
1902
|
-
import
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
excludePatterns: []
|
|
1926
|
-
});
|
|
1927
|
-
}
|
|
1928
|
-
if (files.length === 0) {
|
|
1929
|
-
spinner.fail("No files found to index");
|
|
1930
|
-
return;
|
|
1931
|
-
}
|
|
1932
|
-
spinner.text = `Found ${files.length} files`;
|
|
1933
|
-
spinner.text = "Loading embedding model (this may take a minute on first run)...";
|
|
1934
|
-
const embeddings = new LocalEmbeddings();
|
|
1935
|
-
await embeddings.initialize();
|
|
1936
|
-
spinner.succeed("Embedding model loaded");
|
|
1937
|
-
spinner.start("Initializing vector database...");
|
|
1938
|
-
const vectorDB = new VectorDB(rootDir);
|
|
1939
|
-
await vectorDB.initialize();
|
|
1940
|
-
spinner.succeed("Vector database initialized");
|
|
1941
|
-
const concurrency = isModernConfig(config) ? config.core.concurrency : 4;
|
|
1942
|
-
const batchSize = isModernConfig(config) ? config.core.embeddingBatchSize : 50;
|
|
1943
|
-
spinner.start(`Processing files with ${concurrency}x concurrency...`);
|
|
1944
|
-
const startTime = Date.now();
|
|
1945
|
-
let processedFiles = 0;
|
|
1946
|
-
let processedChunks = 0;
|
|
1947
|
-
const chunkAccumulator = [];
|
|
1948
|
-
const limit = pLimit(concurrency);
|
|
1949
|
-
const processAccumulatedChunks = async () => {
|
|
1950
|
-
if (chunkAccumulator.length === 0) return;
|
|
1951
|
-
const toProcess = chunkAccumulator.splice(0, chunkAccumulator.length);
|
|
1952
|
-
for (let i = 0; i < toProcess.length; i += batchSize) {
|
|
1953
|
-
const batch = toProcess.slice(i, Math.min(i + batchSize, toProcess.length));
|
|
1954
|
-
const texts = batch.map((item) => item.content);
|
|
1955
|
-
const embeddingVectors = await embeddings.embedBatch(texts);
|
|
1956
|
-
await vectorDB.insertBatch(
|
|
1957
|
-
embeddingVectors,
|
|
1958
|
-
batch.map((item) => item.chunk.metadata),
|
|
1959
|
-
texts
|
|
1960
|
-
);
|
|
1961
|
-
processedChunks += batch.length;
|
|
2023
|
+
import path12 from "path";
|
|
2024
|
+
var MANIFEST_FILE, ManifestManager;
|
|
2025
|
+
var init_manifest = __esm({
|
|
2026
|
+
"src/indexer/manifest.ts"() {
|
|
2027
|
+
"use strict";
|
|
2028
|
+
init_constants();
|
|
2029
|
+
init_version2();
|
|
2030
|
+
MANIFEST_FILE = "manifest.json";
|
|
2031
|
+
ManifestManager = class {
|
|
2032
|
+
manifestPath;
|
|
2033
|
+
indexPath;
|
|
2034
|
+
/**
|
|
2035
|
+
* Promise-based lock to prevent race conditions during concurrent updates.
|
|
2036
|
+
* Ensures read-modify-write operations are atomic.
|
|
2037
|
+
*/
|
|
2038
|
+
updateLock = Promise.resolve();
|
|
2039
|
+
/**
|
|
2040
|
+
* Creates a new ManifestManager
|
|
2041
|
+
* @param indexPath - Path to the index directory (same as VectorDB path)
|
|
2042
|
+
*/
|
|
2043
|
+
constructor(indexPath) {
|
|
2044
|
+
this.indexPath = indexPath;
|
|
2045
|
+
this.manifestPath = path12.join(indexPath, MANIFEST_FILE);
|
|
1962
2046
|
}
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
2047
|
+
/**
|
|
2048
|
+
* Loads the manifest from disk.
|
|
2049
|
+
* Returns null if:
|
|
2050
|
+
* - Manifest doesn't exist (first run)
|
|
2051
|
+
* - Manifest is corrupt
|
|
2052
|
+
* - Format version is incompatible (triggers full reindex)
|
|
2053
|
+
*
|
|
2054
|
+
* @returns Loaded manifest or null
|
|
2055
|
+
*/
|
|
2056
|
+
async load() {
|
|
1966
2057
|
try {
|
|
1967
|
-
const content = await fs11.readFile(
|
|
1968
|
-
const
|
|
2058
|
+
const content = await fs11.readFile(this.manifestPath, "utf-8");
|
|
2059
|
+
const manifest = JSON.parse(content);
|
|
2060
|
+
if (manifest.formatVersion !== INDEX_FORMAT_VERSION) {
|
|
2061
|
+
console.error(
|
|
2062
|
+
`[Lien] Index format v${manifest.formatVersion} is incompatible with current v${INDEX_FORMAT_VERSION}`
|
|
2063
|
+
);
|
|
2064
|
+
console.error(`[Lien] Full reindex required after Lien upgrade`);
|
|
2065
|
+
await this.clear();
|
|
2066
|
+
return null;
|
|
2067
|
+
}
|
|
2068
|
+
return manifest;
|
|
2069
|
+
} catch (error) {
|
|
2070
|
+
if (error.code === "ENOENT") {
|
|
2071
|
+
return null;
|
|
2072
|
+
}
|
|
2073
|
+
console.error(`[Lien] Warning: Failed to load manifest: ${error}`);
|
|
2074
|
+
return null;
|
|
2075
|
+
}
|
|
2076
|
+
}
|
|
2077
|
+
/**
|
|
2078
|
+
* Saves the manifest to disk.
|
|
2079
|
+
* Always saves with current format and package versions.
|
|
2080
|
+
*
|
|
2081
|
+
* @param manifest - Manifest to save
|
|
2082
|
+
*/
|
|
2083
|
+
async save(manifest) {
|
|
2084
|
+
try {
|
|
2085
|
+
await fs11.mkdir(this.indexPath, { recursive: true });
|
|
2086
|
+
const manifestToSave = {
|
|
2087
|
+
...manifest,
|
|
2088
|
+
formatVersion: INDEX_FORMAT_VERSION,
|
|
2089
|
+
lienVersion: getPackageVersion(),
|
|
2090
|
+
lastIndexed: Date.now()
|
|
2091
|
+
};
|
|
2092
|
+
const content = JSON.stringify(manifestToSave, null, 2);
|
|
2093
|
+
await fs11.writeFile(this.manifestPath, content, "utf-8");
|
|
2094
|
+
} catch (error) {
|
|
2095
|
+
console.error(`[Lien] Warning: Failed to save manifest: ${error}`);
|
|
2096
|
+
}
|
|
2097
|
+
}
|
|
2098
|
+
/**
|
|
2099
|
+
* Adds or updates a file entry in the manifest.
|
|
2100
|
+
* Protected by lock to prevent race conditions during concurrent updates.
|
|
2101
|
+
*
|
|
2102
|
+
* @param filepath - Path to the file
|
|
2103
|
+
* @param entry - File entry metadata
|
|
2104
|
+
*/
|
|
2105
|
+
async updateFile(filepath, entry) {
|
|
2106
|
+
this.updateLock = this.updateLock.then(async () => {
|
|
2107
|
+
const manifest = await this.load() || this.createEmpty();
|
|
2108
|
+
manifest.files[filepath] = entry;
|
|
2109
|
+
await this.save(manifest);
|
|
2110
|
+
}).catch((error) => {
|
|
2111
|
+
console.error(`[Lien] Failed to update manifest for ${filepath}: ${error}`);
|
|
2112
|
+
return void 0;
|
|
2113
|
+
});
|
|
2114
|
+
await this.updateLock;
|
|
2115
|
+
}
|
|
2116
|
+
/**
|
|
2117
|
+
* Removes a file entry from the manifest.
|
|
2118
|
+
* Protected by lock to prevent race conditions during concurrent updates.
|
|
2119
|
+
*
|
|
2120
|
+
* Note: If the manifest doesn't exist, this is a no-op (not an error).
|
|
2121
|
+
* This can happen legitimately after clearing the index or on fresh installs.
|
|
2122
|
+
*
|
|
2123
|
+
* @param filepath - Path to the file to remove
|
|
2124
|
+
*/
|
|
2125
|
+
async removeFile(filepath) {
|
|
2126
|
+
this.updateLock = this.updateLock.then(async () => {
|
|
2127
|
+
const manifest = await this.load();
|
|
2128
|
+
if (!manifest) {
|
|
2129
|
+
return;
|
|
2130
|
+
}
|
|
2131
|
+
delete manifest.files[filepath];
|
|
2132
|
+
await this.save(manifest);
|
|
2133
|
+
}).catch((error) => {
|
|
2134
|
+
console.error(`[Lien] Failed to remove manifest entry for ${filepath}: ${error}`);
|
|
2135
|
+
return void 0;
|
|
2136
|
+
});
|
|
2137
|
+
await this.updateLock;
|
|
2138
|
+
}
|
|
2139
|
+
/**
|
|
2140
|
+
* Updates multiple files at once (more efficient than individual updates).
|
|
2141
|
+
* Protected by lock to prevent race conditions during concurrent updates.
|
|
2142
|
+
*
|
|
2143
|
+
* @param entries - Array of file entries to update
|
|
2144
|
+
*/
|
|
2145
|
+
async updateFiles(entries) {
|
|
2146
|
+
this.updateLock = this.updateLock.then(async () => {
|
|
2147
|
+
const manifest = await this.load() || this.createEmpty();
|
|
2148
|
+
for (const entry of entries) {
|
|
2149
|
+
manifest.files[entry.filepath] = entry;
|
|
2150
|
+
}
|
|
2151
|
+
await this.save(manifest);
|
|
2152
|
+
}).catch((error) => {
|
|
2153
|
+
console.error(`[Lien] Failed to update manifest for ${entries.length} files: ${error}`);
|
|
2154
|
+
return void 0;
|
|
2155
|
+
});
|
|
2156
|
+
await this.updateLock;
|
|
2157
|
+
}
|
|
2158
|
+
/**
|
|
2159
|
+
* Updates the git state in the manifest.
|
|
2160
|
+
* Protected by lock to prevent race conditions during concurrent updates.
|
|
2161
|
+
*
|
|
2162
|
+
* @param gitState - Current git state
|
|
2163
|
+
*/
|
|
2164
|
+
async updateGitState(gitState) {
|
|
2165
|
+
this.updateLock = this.updateLock.then(async () => {
|
|
2166
|
+
const manifest = await this.load() || this.createEmpty();
|
|
2167
|
+
manifest.gitState = gitState;
|
|
2168
|
+
await this.save(manifest);
|
|
2169
|
+
}).catch((error) => {
|
|
2170
|
+
console.error(`[Lien] Failed to update git state in manifest: ${error}`);
|
|
2171
|
+
return void 0;
|
|
2172
|
+
});
|
|
2173
|
+
await this.updateLock;
|
|
2174
|
+
}
|
|
2175
|
+
/**
|
|
2176
|
+
* Gets the list of files currently in the manifest
|
|
2177
|
+
*
|
|
2178
|
+
* @returns Array of filepaths
|
|
2179
|
+
*/
|
|
2180
|
+
async getIndexedFiles() {
|
|
2181
|
+
const manifest = await this.load();
|
|
2182
|
+
if (!manifest) return [];
|
|
2183
|
+
return Object.keys(manifest.files);
|
|
2184
|
+
}
|
|
2185
|
+
/**
|
|
2186
|
+
* Detects which files have changed based on mtime comparison
|
|
2187
|
+
*
|
|
2188
|
+
* @param currentFiles - Map of current files with their mtimes
|
|
2189
|
+
* @returns Array of filepaths that have changed
|
|
2190
|
+
*/
|
|
2191
|
+
async getChangedFiles(currentFiles) {
|
|
2192
|
+
const manifest = await this.load();
|
|
2193
|
+
if (!manifest) {
|
|
2194
|
+
return Array.from(currentFiles.keys());
|
|
2195
|
+
}
|
|
2196
|
+
const changedFiles = [];
|
|
2197
|
+
for (const [filepath, mtime] of currentFiles) {
|
|
2198
|
+
const entry = manifest.files[filepath];
|
|
2199
|
+
if (!entry) {
|
|
2200
|
+
changedFiles.push(filepath);
|
|
2201
|
+
} else if (entry.lastModified < mtime) {
|
|
2202
|
+
changedFiles.push(filepath);
|
|
2203
|
+
}
|
|
2204
|
+
}
|
|
2205
|
+
return changedFiles;
|
|
2206
|
+
}
|
|
2207
|
+
/**
|
|
2208
|
+
* Gets files that are in the manifest but not in the current file list
|
|
2209
|
+
* (i.e., deleted files)
|
|
2210
|
+
*
|
|
2211
|
+
* @param currentFiles - Set of current file paths
|
|
2212
|
+
* @returns Array of deleted file paths
|
|
2213
|
+
*/
|
|
2214
|
+
async getDeletedFiles(currentFiles) {
|
|
2215
|
+
const manifest = await this.load();
|
|
2216
|
+
if (!manifest) return [];
|
|
2217
|
+
const deletedFiles = [];
|
|
2218
|
+
for (const filepath of Object.keys(manifest.files)) {
|
|
2219
|
+
if (!currentFiles.has(filepath)) {
|
|
2220
|
+
deletedFiles.push(filepath);
|
|
2221
|
+
}
|
|
2222
|
+
}
|
|
2223
|
+
return deletedFiles;
|
|
2224
|
+
}
|
|
2225
|
+
/**
|
|
2226
|
+
* Clears the manifest file
|
|
2227
|
+
*/
|
|
2228
|
+
async clear() {
|
|
2229
|
+
try {
|
|
2230
|
+
await fs11.unlink(this.manifestPath);
|
|
2231
|
+
} catch (error) {
|
|
2232
|
+
if (error.code !== "ENOENT") {
|
|
2233
|
+
console.error(`[Lien] Warning: Failed to clear manifest: ${error}`);
|
|
2234
|
+
}
|
|
2235
|
+
}
|
|
2236
|
+
}
|
|
2237
|
+
/**
|
|
2238
|
+
* Creates an empty manifest with current version information
|
|
2239
|
+
*
|
|
2240
|
+
* @returns Empty manifest
|
|
2241
|
+
*/
|
|
2242
|
+
createEmpty() {
|
|
2243
|
+
return {
|
|
2244
|
+
formatVersion: INDEX_FORMAT_VERSION,
|
|
2245
|
+
lienVersion: getPackageVersion(),
|
|
2246
|
+
lastIndexed: Date.now(),
|
|
2247
|
+
files: {}
|
|
2248
|
+
};
|
|
2249
|
+
}
|
|
2250
|
+
};
|
|
2251
|
+
}
|
|
2252
|
+
});
|
|
2253
|
+
|
|
2254
|
+
// src/git/tracker.ts
|
|
2255
|
+
var tracker_exports = {};
|
|
2256
|
+
__export(tracker_exports, {
|
|
2257
|
+
GitStateTracker: () => GitStateTracker
|
|
2258
|
+
});
|
|
2259
|
+
import fs12 from "fs/promises";
|
|
2260
|
+
import path13 from "path";
|
|
2261
|
+
var GitStateTracker;
|
|
2262
|
+
var init_tracker = __esm({
|
|
2263
|
+
"src/git/tracker.ts"() {
|
|
2264
|
+
"use strict";
|
|
2265
|
+
init_utils();
|
|
2266
|
+
GitStateTracker = class {
|
|
2267
|
+
stateFile;
|
|
2268
|
+
rootDir;
|
|
2269
|
+
currentState = null;
|
|
2270
|
+
constructor(rootDir, indexPath) {
|
|
2271
|
+
this.rootDir = rootDir;
|
|
2272
|
+
this.stateFile = path13.join(indexPath, ".git-state.json");
|
|
2273
|
+
}
|
|
2274
|
+
/**
|
|
2275
|
+
* Loads the last known git state from disk.
|
|
2276
|
+
* Returns null if no state file exists (first run).
|
|
2277
|
+
*/
|
|
2278
|
+
async loadState() {
|
|
2279
|
+
try {
|
|
2280
|
+
const content = await fs12.readFile(this.stateFile, "utf-8");
|
|
2281
|
+
return JSON.parse(content);
|
|
2282
|
+
} catch {
|
|
2283
|
+
return null;
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
/**
|
|
2287
|
+
* Saves the current git state to disk.
|
|
2288
|
+
*/
|
|
2289
|
+
async saveState(state) {
|
|
2290
|
+
try {
|
|
2291
|
+
const content = JSON.stringify(state, null, 2);
|
|
2292
|
+
await fs12.writeFile(this.stateFile, content, "utf-8");
|
|
2293
|
+
} catch (error) {
|
|
2294
|
+
console.error(`[Lien] Warning: Failed to save git state: ${error}`);
|
|
2295
|
+
}
|
|
2296
|
+
}
|
|
2297
|
+
/**
|
|
2298
|
+
* Gets the current git state from the repository.
|
|
2299
|
+
*
|
|
2300
|
+
* @returns Current git state
|
|
2301
|
+
* @throws Error if git commands fail
|
|
2302
|
+
*/
|
|
2303
|
+
async getCurrentGitState() {
|
|
2304
|
+
const branch = await getCurrentBranch(this.rootDir);
|
|
2305
|
+
const commit = await getCurrentCommit(this.rootDir);
|
|
2306
|
+
return {
|
|
2307
|
+
branch,
|
|
2308
|
+
commit,
|
|
2309
|
+
timestamp: Date.now()
|
|
2310
|
+
};
|
|
2311
|
+
}
|
|
2312
|
+
/**
|
|
2313
|
+
* Initializes the tracker by loading saved state and checking current state.
|
|
2314
|
+
* Should be called once when MCP server starts.
|
|
2315
|
+
*
|
|
2316
|
+
* @returns Array of changed files if state changed, null if no changes or first run
|
|
2317
|
+
*/
|
|
2318
|
+
async initialize() {
|
|
2319
|
+
const isRepo = await isGitRepo(this.rootDir);
|
|
2320
|
+
if (!isRepo) {
|
|
2321
|
+
return null;
|
|
2322
|
+
}
|
|
2323
|
+
try {
|
|
2324
|
+
this.currentState = await this.getCurrentGitState();
|
|
2325
|
+
const previousState = await this.loadState();
|
|
2326
|
+
if (!previousState) {
|
|
2327
|
+
await this.saveState(this.currentState);
|
|
2328
|
+
return null;
|
|
2329
|
+
}
|
|
2330
|
+
const branchChanged = previousState.branch !== this.currentState.branch;
|
|
2331
|
+
const commitChanged = previousState.commit !== this.currentState.commit;
|
|
2332
|
+
if (!branchChanged && !commitChanged) {
|
|
2333
|
+
return null;
|
|
2334
|
+
}
|
|
2335
|
+
let changedFiles = [];
|
|
2336
|
+
if (branchChanged) {
|
|
2337
|
+
try {
|
|
2338
|
+
changedFiles = await getChangedFiles(
|
|
2339
|
+
this.rootDir,
|
|
2340
|
+
previousState.branch,
|
|
2341
|
+
this.currentState.branch
|
|
2342
|
+
);
|
|
2343
|
+
} catch (error) {
|
|
2344
|
+
console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);
|
|
2345
|
+
changedFiles = await getChangedFilesBetweenCommits(
|
|
2346
|
+
this.rootDir,
|
|
2347
|
+
previousState.commit,
|
|
2348
|
+
this.currentState.commit
|
|
2349
|
+
);
|
|
2350
|
+
}
|
|
2351
|
+
} else if (commitChanged) {
|
|
2352
|
+
changedFiles = await getChangedFilesBetweenCommits(
|
|
2353
|
+
this.rootDir,
|
|
2354
|
+
previousState.commit,
|
|
2355
|
+
this.currentState.commit
|
|
2356
|
+
);
|
|
2357
|
+
}
|
|
2358
|
+
await this.saveState(this.currentState);
|
|
2359
|
+
return changedFiles;
|
|
2360
|
+
} catch (error) {
|
|
2361
|
+
console.error(`[Lien] Failed to initialize git tracker: ${error}`);
|
|
2362
|
+
return null;
|
|
2363
|
+
}
|
|
2364
|
+
}
|
|
2365
|
+
/**
|
|
2366
|
+
* Checks for git state changes since last check.
|
|
2367
|
+
* This is called periodically by the MCP server.
|
|
2368
|
+
*
|
|
2369
|
+
* @returns Array of changed files if state changed, null if no changes
|
|
2370
|
+
*/
|
|
2371
|
+
async detectChanges() {
|
|
2372
|
+
const isRepo = await isGitRepo(this.rootDir);
|
|
2373
|
+
if (!isRepo) {
|
|
2374
|
+
return null;
|
|
2375
|
+
}
|
|
2376
|
+
try {
|
|
2377
|
+
const newState = await this.getCurrentGitState();
|
|
2378
|
+
if (!this.currentState) {
|
|
2379
|
+
this.currentState = newState;
|
|
2380
|
+
await this.saveState(newState);
|
|
2381
|
+
return null;
|
|
2382
|
+
}
|
|
2383
|
+
const branchChanged = this.currentState.branch !== newState.branch;
|
|
2384
|
+
const commitChanged = this.currentState.commit !== newState.commit;
|
|
2385
|
+
if (!branchChanged && !commitChanged) {
|
|
2386
|
+
return null;
|
|
2387
|
+
}
|
|
2388
|
+
let changedFiles = [];
|
|
2389
|
+
if (branchChanged) {
|
|
2390
|
+
try {
|
|
2391
|
+
changedFiles = await getChangedFiles(
|
|
2392
|
+
this.rootDir,
|
|
2393
|
+
this.currentState.branch,
|
|
2394
|
+
newState.branch
|
|
2395
|
+
);
|
|
2396
|
+
} catch (error) {
|
|
2397
|
+
console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);
|
|
2398
|
+
changedFiles = await getChangedFilesBetweenCommits(
|
|
2399
|
+
this.rootDir,
|
|
2400
|
+
this.currentState.commit,
|
|
2401
|
+
newState.commit
|
|
2402
|
+
);
|
|
2403
|
+
}
|
|
2404
|
+
} else if (commitChanged) {
|
|
2405
|
+
changedFiles = await getChangedFilesBetweenCommits(
|
|
2406
|
+
this.rootDir,
|
|
2407
|
+
this.currentState.commit,
|
|
2408
|
+
newState.commit
|
|
2409
|
+
);
|
|
2410
|
+
}
|
|
2411
|
+
this.currentState = newState;
|
|
2412
|
+
await this.saveState(newState);
|
|
2413
|
+
return changedFiles;
|
|
2414
|
+
} catch (error) {
|
|
2415
|
+
console.error(`[Lien] Failed to detect git changes: ${error}`);
|
|
2416
|
+
return null;
|
|
2417
|
+
}
|
|
2418
|
+
}
|
|
2419
|
+
/**
|
|
2420
|
+
* Gets the current git state.
|
|
2421
|
+
* Useful for status display.
|
|
2422
|
+
*/
|
|
2423
|
+
getState() {
|
|
2424
|
+
return this.currentState;
|
|
2425
|
+
}
|
|
2426
|
+
/**
|
|
2427
|
+
* Manually updates the saved state.
|
|
2428
|
+
* Useful after manual reindexing to sync state.
|
|
2429
|
+
*/
|
|
2430
|
+
async updateState() {
|
|
2431
|
+
try {
|
|
2432
|
+
this.currentState = await this.getCurrentGitState();
|
|
2433
|
+
await this.saveState(this.currentState);
|
|
2434
|
+
} catch (error) {
|
|
2435
|
+
console.error(`[Lien] Failed to update git state: ${error}`);
|
|
2436
|
+
}
|
|
2437
|
+
}
|
|
2438
|
+
};
|
|
2439
|
+
}
|
|
2440
|
+
});
|
|
2441
|
+
|
|
2442
|
+
// src/indexer/change-detector.ts
|
|
2443
|
+
import fs13 from "fs/promises";
|
|
2444
|
+
async function detectChanges(rootDir, vectorDB, config) {
|
|
2445
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
2446
|
+
const savedManifest = await manifest.load();
|
|
2447
|
+
if (!savedManifest) {
|
|
2448
|
+
const allFiles = await getAllFiles(rootDir, config);
|
|
2449
|
+
return {
|
|
2450
|
+
added: allFiles,
|
|
2451
|
+
modified: [],
|
|
2452
|
+
deleted: [],
|
|
2453
|
+
reason: "full"
|
|
2454
|
+
};
|
|
2455
|
+
}
|
|
2456
|
+
const gitAvailable = await isGitAvailable();
|
|
2457
|
+
const isRepo = await isGitRepo(rootDir);
|
|
2458
|
+
if (gitAvailable && isRepo && savedManifest.gitState) {
|
|
2459
|
+
const gitTracker = new GitStateTracker(rootDir, vectorDB.dbPath);
|
|
2460
|
+
await gitTracker.initialize();
|
|
2461
|
+
const currentState = gitTracker.getState();
|
|
2462
|
+
if (currentState && (currentState.branch !== savedManifest.gitState.branch || currentState.commit !== savedManifest.gitState.commit)) {
|
|
2463
|
+
try {
|
|
2464
|
+
const changedFilesPaths = await getChangedFiles(
|
|
2465
|
+
rootDir,
|
|
2466
|
+
savedManifest.gitState.commit,
|
|
2467
|
+
currentState.commit
|
|
2468
|
+
);
|
|
2469
|
+
const changedFilesSet = new Set(changedFilesPaths);
|
|
2470
|
+
const allFiles = await getAllFiles(rootDir, config);
|
|
2471
|
+
const currentFileSet = new Set(allFiles);
|
|
2472
|
+
const added = [];
|
|
2473
|
+
const modified = [];
|
|
2474
|
+
const deleted = [];
|
|
2475
|
+
for (const filepath of changedFilesPaths) {
|
|
2476
|
+
if (currentFileSet.has(filepath)) {
|
|
2477
|
+
if (savedManifest.files[filepath]) {
|
|
2478
|
+
modified.push(filepath);
|
|
2479
|
+
} else {
|
|
2480
|
+
added.push(filepath);
|
|
2481
|
+
}
|
|
2482
|
+
}
|
|
2483
|
+
}
|
|
2484
|
+
for (const filepath of allFiles) {
|
|
2485
|
+
if (!savedManifest.files[filepath] && !changedFilesSet.has(filepath)) {
|
|
2486
|
+
added.push(filepath);
|
|
2487
|
+
}
|
|
2488
|
+
}
|
|
2489
|
+
for (const filepath of Object.keys(savedManifest.files)) {
|
|
2490
|
+
if (!currentFileSet.has(filepath)) {
|
|
2491
|
+
deleted.push(filepath);
|
|
2492
|
+
}
|
|
2493
|
+
}
|
|
2494
|
+
return {
|
|
2495
|
+
added,
|
|
2496
|
+
modified,
|
|
2497
|
+
deleted,
|
|
2498
|
+
reason: "git-state-changed"
|
|
2499
|
+
};
|
|
2500
|
+
} catch (error) {
|
|
2501
|
+
console.warn(`[Lien] Git diff failed, falling back to full reindex: ${error}`);
|
|
2502
|
+
const allFiles = await getAllFiles(rootDir, config);
|
|
2503
|
+
const currentFileSet = new Set(allFiles);
|
|
2504
|
+
const deleted = [];
|
|
2505
|
+
for (const filepath of Object.keys(savedManifest.files)) {
|
|
2506
|
+
if (!currentFileSet.has(filepath)) {
|
|
2507
|
+
deleted.push(filepath);
|
|
2508
|
+
}
|
|
2509
|
+
}
|
|
2510
|
+
return {
|
|
2511
|
+
added: allFiles,
|
|
2512
|
+
modified: [],
|
|
2513
|
+
deleted,
|
|
2514
|
+
reason: "git-state-changed"
|
|
2515
|
+
};
|
|
2516
|
+
}
|
|
2517
|
+
}
|
|
2518
|
+
}
|
|
2519
|
+
return await mtimeBasedDetection(rootDir, savedManifest, config);
|
|
2520
|
+
}
|
|
2521
|
+
async function getAllFiles(rootDir, config) {
|
|
2522
|
+
if (isModernConfig(config) && config.frameworks.length > 0) {
|
|
2523
|
+
return await scanCodebaseWithFrameworks(rootDir, config);
|
|
2524
|
+
} else if (isLegacyConfig(config)) {
|
|
2525
|
+
return await scanCodebase({
|
|
2526
|
+
rootDir,
|
|
2527
|
+
includePatterns: config.indexing.include,
|
|
2528
|
+
excludePatterns: config.indexing.exclude
|
|
2529
|
+
});
|
|
2530
|
+
} else {
|
|
2531
|
+
return await scanCodebase({
|
|
2532
|
+
rootDir,
|
|
2533
|
+
includePatterns: [],
|
|
2534
|
+
excludePatterns: []
|
|
2535
|
+
});
|
|
2536
|
+
}
|
|
2537
|
+
}
|
|
2538
|
+
async function mtimeBasedDetection(rootDir, savedManifest, config) {
|
|
2539
|
+
const added = [];
|
|
2540
|
+
const modified = [];
|
|
2541
|
+
const deleted = [];
|
|
2542
|
+
const currentFiles = await getAllFiles(rootDir, config);
|
|
2543
|
+
const currentFileSet = new Set(currentFiles);
|
|
2544
|
+
const fileStats = /* @__PURE__ */ new Map();
|
|
2545
|
+
for (const filepath of currentFiles) {
|
|
2546
|
+
try {
|
|
2547
|
+
const stats = await fs13.stat(filepath);
|
|
2548
|
+
fileStats.set(filepath, stats.mtimeMs);
|
|
2549
|
+
} catch {
|
|
2550
|
+
continue;
|
|
2551
|
+
}
|
|
2552
|
+
}
|
|
2553
|
+
for (const [filepath, mtime] of fileStats) {
|
|
2554
|
+
const entry = savedManifest.files[filepath];
|
|
2555
|
+
if (!entry) {
|
|
2556
|
+
added.push(filepath);
|
|
2557
|
+
} else if (entry.lastModified < mtime) {
|
|
2558
|
+
modified.push(filepath);
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
for (const filepath of Object.keys(savedManifest.files)) {
|
|
2562
|
+
if (!currentFileSet.has(filepath)) {
|
|
2563
|
+
deleted.push(filepath);
|
|
2564
|
+
}
|
|
2565
|
+
}
|
|
2566
|
+
return {
|
|
2567
|
+
added,
|
|
2568
|
+
modified,
|
|
2569
|
+
deleted,
|
|
2570
|
+
reason: "mtime"
|
|
2571
|
+
};
|
|
2572
|
+
}
|
|
2573
|
+
var init_change_detector = __esm({
|
|
2574
|
+
"src/indexer/change-detector.ts"() {
|
|
2575
|
+
"use strict";
|
|
2576
|
+
init_manifest();
|
|
2577
|
+
init_scanner();
|
|
2578
|
+
init_schema();
|
|
2579
|
+
init_tracker();
|
|
2580
|
+
init_utils();
|
|
2581
|
+
}
|
|
2582
|
+
});
|
|
2583
|
+
|
|
2584
|
+
// src/indexer/incremental.ts
|
|
2585
|
+
import fs14 from "fs/promises";
|
|
2586
|
+
async function processFileContent(filepath, content, embeddings, config, verbose) {
|
|
2587
|
+
const chunkSize = isModernConfig(config) ? config.core.chunkSize : isLegacyConfig(config) ? config.indexing.chunkSize : 75;
|
|
2588
|
+
const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : isLegacyConfig(config) ? config.indexing.chunkOverlap : 10;
|
|
2589
|
+
const chunks = chunkFile(filepath, content, {
|
|
2590
|
+
chunkSize,
|
|
2591
|
+
chunkOverlap
|
|
2592
|
+
});
|
|
2593
|
+
if (chunks.length === 0) {
|
|
2594
|
+
if (verbose) {
|
|
2595
|
+
console.error(`[Lien] Empty file: ${filepath}`);
|
|
2596
|
+
}
|
|
2597
|
+
return null;
|
|
2598
|
+
}
|
|
2599
|
+
const texts = chunks.map((c) => c.content);
|
|
2600
|
+
const vectors = [];
|
|
2601
|
+
for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {
|
|
2602
|
+
const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));
|
|
2603
|
+
const microResults = await embeddings.embedBatch(microBatch);
|
|
2604
|
+
vectors.push(...microResults);
|
|
2605
|
+
if (texts.length > EMBEDDING_MICRO_BATCH_SIZE) {
|
|
2606
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
2607
|
+
}
|
|
2608
|
+
}
|
|
2609
|
+
return {
|
|
2610
|
+
chunkCount: chunks.length,
|
|
2611
|
+
vectors,
|
|
2612
|
+
chunks,
|
|
2613
|
+
texts
|
|
2614
|
+
};
|
|
2615
|
+
}
|
|
2616
|
+
async function indexSingleFile(filepath, vectorDB, embeddings, config, options = {}) {
|
|
2617
|
+
const { verbose } = options;
|
|
2618
|
+
try {
|
|
2619
|
+
try {
|
|
2620
|
+
await fs14.access(filepath);
|
|
2621
|
+
} catch {
|
|
2622
|
+
if (verbose) {
|
|
2623
|
+
console.error(`[Lien] File deleted: ${filepath}`);
|
|
2624
|
+
}
|
|
2625
|
+
await vectorDB.deleteByFile(filepath);
|
|
2626
|
+
const manifest2 = new ManifestManager(vectorDB.dbPath);
|
|
2627
|
+
await manifest2.removeFile(filepath);
|
|
2628
|
+
return;
|
|
2629
|
+
}
|
|
2630
|
+
const content = await fs14.readFile(filepath, "utf-8");
|
|
2631
|
+
const result = await processFileContent(filepath, content, embeddings, config, verbose || false);
|
|
2632
|
+
const stats = await fs14.stat(filepath);
|
|
2633
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
2634
|
+
if (result === null) {
|
|
2635
|
+
await vectorDB.deleteByFile(filepath);
|
|
2636
|
+
await manifest.updateFile(filepath, {
|
|
2637
|
+
filepath,
|
|
2638
|
+
lastModified: stats.mtimeMs,
|
|
2639
|
+
chunkCount: 0
|
|
2640
|
+
});
|
|
2641
|
+
return;
|
|
2642
|
+
}
|
|
2643
|
+
await vectorDB.updateFile(
|
|
2644
|
+
filepath,
|
|
2645
|
+
result.vectors,
|
|
2646
|
+
result.chunks.map((c) => c.metadata),
|
|
2647
|
+
result.texts
|
|
2648
|
+
);
|
|
2649
|
+
await manifest.updateFile(filepath, {
|
|
2650
|
+
filepath,
|
|
2651
|
+
lastModified: stats.mtimeMs,
|
|
2652
|
+
chunkCount: result.chunkCount
|
|
2653
|
+
});
|
|
2654
|
+
if (verbose) {
|
|
2655
|
+
console.error(`[Lien] \u2713 Updated ${filepath} (${result.chunkCount} chunks)`);
|
|
2656
|
+
}
|
|
2657
|
+
} catch (error) {
|
|
2658
|
+
console.error(`[Lien] \u26A0\uFE0F Failed to index ${filepath}: ${error}`);
|
|
2659
|
+
}
|
|
2660
|
+
}
|
|
2661
|
+
async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, options = {}) {
|
|
2662
|
+
const { verbose } = options;
|
|
2663
|
+
let processedCount = 0;
|
|
2664
|
+
const manifestEntries = [];
|
|
2665
|
+
for (const filepath of filepaths) {
|
|
2666
|
+
let content;
|
|
2667
|
+
let fileMtime;
|
|
2668
|
+
try {
|
|
2669
|
+
const stats = await fs14.stat(filepath);
|
|
2670
|
+
fileMtime = stats.mtimeMs;
|
|
2671
|
+
content = await fs14.readFile(filepath, "utf-8");
|
|
2672
|
+
} catch (error) {
|
|
2673
|
+
if (verbose) {
|
|
2674
|
+
console.error(`[Lien] File not readable: ${filepath}`);
|
|
2675
|
+
}
|
|
2676
|
+
try {
|
|
2677
|
+
await vectorDB.deleteByFile(filepath);
|
|
2678
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
2679
|
+
await manifest.removeFile(filepath);
|
|
2680
|
+
} catch (error2) {
|
|
2681
|
+
if (verbose) {
|
|
2682
|
+
console.error(`[Lien] Note: ${filepath} not in index`);
|
|
2683
|
+
}
|
|
2684
|
+
}
|
|
2685
|
+
processedCount++;
|
|
2686
|
+
continue;
|
|
2687
|
+
}
|
|
2688
|
+
try {
|
|
2689
|
+
const result = await processFileContent(filepath, content, embeddings, config, verbose || false);
|
|
2690
|
+
if (result === null) {
|
|
2691
|
+
try {
|
|
2692
|
+
await vectorDB.deleteByFile(filepath);
|
|
2693
|
+
} catch (error) {
|
|
2694
|
+
}
|
|
2695
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
2696
|
+
await manifest.updateFile(filepath, {
|
|
2697
|
+
filepath,
|
|
2698
|
+
lastModified: fileMtime,
|
|
2699
|
+
chunkCount: 0
|
|
2700
|
+
});
|
|
2701
|
+
processedCount++;
|
|
2702
|
+
continue;
|
|
2703
|
+
}
|
|
2704
|
+
try {
|
|
2705
|
+
await vectorDB.deleteByFile(filepath);
|
|
2706
|
+
} catch (error) {
|
|
2707
|
+
}
|
|
2708
|
+
await vectorDB.insertBatch(
|
|
2709
|
+
result.vectors,
|
|
2710
|
+
result.chunks.map((c) => c.metadata),
|
|
2711
|
+
result.texts
|
|
2712
|
+
);
|
|
2713
|
+
manifestEntries.push({
|
|
2714
|
+
filepath,
|
|
2715
|
+
chunkCount: result.chunkCount,
|
|
2716
|
+
mtime: fileMtime
|
|
2717
|
+
});
|
|
2718
|
+
if (verbose) {
|
|
2719
|
+
console.error(`[Lien] \u2713 Updated ${filepath} (${result.chunkCount} chunks)`);
|
|
2720
|
+
}
|
|
2721
|
+
processedCount++;
|
|
2722
|
+
} catch (error) {
|
|
2723
|
+
console.error(`[Lien] \u26A0\uFE0F Failed to index ${filepath}: ${error}`);
|
|
2724
|
+
}
|
|
2725
|
+
}
|
|
2726
|
+
if (manifestEntries.length > 0) {
|
|
2727
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
2728
|
+
await manifest.updateFiles(
|
|
2729
|
+
manifestEntries.map((entry) => ({
|
|
2730
|
+
filepath: entry.filepath,
|
|
2731
|
+
lastModified: entry.mtime,
|
|
2732
|
+
// Use actual file mtime for accurate change detection
|
|
2733
|
+
chunkCount: entry.chunkCount
|
|
2734
|
+
}))
|
|
2735
|
+
);
|
|
2736
|
+
}
|
|
2737
|
+
return processedCount;
|
|
2738
|
+
}
|
|
2739
|
+
var init_incremental = __esm({
|
|
2740
|
+
"src/indexer/incremental.ts"() {
|
|
2741
|
+
"use strict";
|
|
2742
|
+
init_chunker();
|
|
2743
|
+
init_schema();
|
|
2744
|
+
init_manifest();
|
|
2745
|
+
init_constants();
|
|
2746
|
+
}
|
|
2747
|
+
});
|
|
2748
|
+
|
|
2749
|
+
// src/utils/loading-messages.ts
|
|
2750
|
+
function getIndexingMessage() {
|
|
2751
|
+
const message = INDEXING_MESSAGES[currentIndexingIndex % INDEXING_MESSAGES.length];
|
|
2752
|
+
currentIndexingIndex++;
|
|
2753
|
+
return message;
|
|
2754
|
+
}
|
|
2755
|
+
function getEmbeddingMessage() {
|
|
2756
|
+
const message = EMBEDDING_MESSAGES[currentEmbeddingIndex % EMBEDDING_MESSAGES.length];
|
|
2757
|
+
currentEmbeddingIndex++;
|
|
2758
|
+
return message;
|
|
2759
|
+
}
|
|
2760
|
+
function getModelLoadingMessage() {
|
|
2761
|
+
const message = MODEL_LOADING_MESSAGES[currentModelIndex % MODEL_LOADING_MESSAGES.length];
|
|
2762
|
+
currentModelIndex++;
|
|
2763
|
+
return message;
|
|
2764
|
+
}
|
|
2765
|
+
var INDEXING_MESSAGES, EMBEDDING_MESSAGES, MODEL_LOADING_MESSAGES, currentIndexingIndex, currentEmbeddingIndex, currentModelIndex;
|
|
2766
|
+
var init_loading_messages = __esm({
|
|
2767
|
+
"src/utils/loading-messages.ts"() {
|
|
2768
|
+
"use strict";
|
|
2769
|
+
INDEXING_MESSAGES = [
|
|
2770
|
+
"Teaching AI to read your spaghetti code...",
|
|
2771
|
+
"Convincing the LLM that your variable names make sense...",
|
|
2772
|
+
"Indexing your TODO comments (so many TODOs)...",
|
|
2773
|
+
'Building semantic links faster than you can say "grep"...',
|
|
2774
|
+
"Making your codebase searchable (the good, the bad, and the ugly)...",
|
|
2775
|
+
"Chunking code like a boss...",
|
|
2776
|
+
"Feeding your code to the neural network (it's hungry)...",
|
|
2777
|
+
"Creating embeddings (it's like compression, but fancier)...",
|
|
2778
|
+
"Teaching machines to understand your midnight commits...",
|
|
2779
|
+
"Vectorizing your technical debt...",
|
|
2780
|
+
"Indexing... because Ctrl+F wasn't cutting it anymore...",
|
|
2781
|
+
"Making semantic connections (unlike your last refactor)...",
|
|
2782
|
+
"Processing files faster than your CI pipeline...",
|
|
2783
|
+
"Embedding wisdom from your comments (all 3 of them)...",
|
|
2784
|
+
"Analyzing code semantics (yes, even that one function)...",
|
|
2785
|
+
"Building search index (now with 100% more AI)...",
|
|
2786
|
+
"Crunching vectors like it's nobody's business...",
|
|
2787
|
+
"Linking code fragments across the spacetime continuum...",
|
|
2788
|
+
"Teaching transformers about your coding style...",
|
|
2789
|
+
"Preparing for semantic search domination...",
|
|
2790
|
+
"Indexing your genius (and that hacky workaround from 2019)...",
|
|
2791
|
+
"Making your codebase AI-readable (you're welcome, future you)...",
|
|
2792
|
+
"Converting code to math (engineers love this trick)...",
|
|
2793
|
+
"Building the neural net's mental model of your app...",
|
|
2794
|
+
"Chunking files like a lumberjack, but for code..."
|
|
2795
|
+
];
|
|
2796
|
+
EMBEDDING_MESSAGES = [
|
|
2797
|
+
"Generating embeddings (math is happening)...",
|
|
2798
|
+
"Teaching transformers about your forEach loops...",
|
|
2799
|
+
"Converting code to 384-dimensional space (wild, right?)...",
|
|
2800
|
+
"Running the neural network (the Matrix, but for code)...",
|
|
2801
|
+
"Creating semantic vectors (fancy word for AI magic)...",
|
|
2802
|
+
"Embedding your code into hyperspace...",
|
|
2803
|
+
'Teaching the model what "clean code" means in your codebase...',
|
|
2804
|
+
'Generating vectors faster than you can say "AI"...',
|
|
2805
|
+
"Making math from your methods...",
|
|
2806
|
+
"Transforming code into numbers (the AI way)...",
|
|
2807
|
+
"Processing with transformers.js (yes, it runs locally!)...",
|
|
2808
|
+
"Embedding semantics (your code's hidden meaning)...",
|
|
2809
|
+
"Vectorizing variables (alliteration achieved)...",
|
|
2810
|
+
"Teaching AI the difference between foo and bar...",
|
|
2811
|
+
"Creating embeddings (384 dimensions of awesome)..."
|
|
2812
|
+
];
|
|
2813
|
+
MODEL_LOADING_MESSAGES = [
|
|
2814
|
+
"Waking up the neural network...",
|
|
2815
|
+
"Loading transformer model (patience, young padawan)...",
|
|
2816
|
+
"Downloading AI brain (first run only, promise!)...",
|
|
2817
|
+
"Initializing the semantic search engine...",
|
|
2818
|
+
"Booting up the language model (coffee break recommended)...",
|
|
2819
|
+
"Loading 100MB of pure AI goodness...",
|
|
2820
|
+
"Preparing the transformer for action...",
|
|
2821
|
+
"Model loading (this is why we run locally)...",
|
|
2822
|
+
"Spinning up the embedding generator...",
|
|
2823
|
+
"Getting the AI ready for your codebase..."
|
|
2824
|
+
];
|
|
2825
|
+
currentIndexingIndex = 0;
|
|
2826
|
+
currentEmbeddingIndex = 0;
|
|
2827
|
+
currentModelIndex = 0;
|
|
2828
|
+
}
|
|
2829
|
+
});
|
|
2830
|
+
|
|
2831
|
+
// src/indexer/index.ts
|
|
2832
|
+
var indexer_exports = {};
|
|
2833
|
+
__export(indexer_exports, {
|
|
2834
|
+
indexCodebase: () => indexCodebase
|
|
2835
|
+
});
|
|
2836
|
+
import fs15 from "fs/promises";
|
|
2837
|
+
import ora from "ora";
|
|
2838
|
+
import chalk4 from "chalk";
|
|
2839
|
+
import pLimit from "p-limit";
|
|
2840
|
+
async function indexCodebase(options = {}) {
|
|
2841
|
+
const rootDir = options.rootDir ?? process.cwd();
|
|
2842
|
+
const spinner = ora("Starting indexing process...").start();
|
|
2843
|
+
let updateInterval;
|
|
2844
|
+
try {
|
|
2845
|
+
spinner.text = "Loading configuration...";
|
|
2846
|
+
const config = await configService.load(rootDir);
|
|
2847
|
+
spinner.text = "Initializing vector database...";
|
|
2848
|
+
const vectorDB = new VectorDB(rootDir);
|
|
2849
|
+
await vectorDB.initialize();
|
|
2850
|
+
if (!options.force) {
|
|
2851
|
+
spinner.text = "Checking for changes...";
|
|
2852
|
+
const manifest2 = new ManifestManager(vectorDB.dbPath);
|
|
2853
|
+
const savedManifest = await manifest2.load();
|
|
2854
|
+
if (savedManifest) {
|
|
2855
|
+
const changes = await detectChanges(rootDir, vectorDB, config);
|
|
2856
|
+
if (changes.reason !== "full") {
|
|
2857
|
+
const totalChanges = changes.added.length + changes.modified.length;
|
|
2858
|
+
const totalDeleted = changes.deleted.length;
|
|
2859
|
+
if (totalChanges === 0 && totalDeleted === 0) {
|
|
2860
|
+
spinner.succeed("No changes detected - index is up to date!");
|
|
2861
|
+
return;
|
|
2862
|
+
}
|
|
2863
|
+
spinner.succeed(
|
|
2864
|
+
`Detected changes: ${totalChanges} files to index, ${totalDeleted} to remove (${changes.reason} detection)`
|
|
2865
|
+
);
|
|
2866
|
+
spinner.start(getModelLoadingMessage());
|
|
2867
|
+
const embeddings2 = new LocalEmbeddings();
|
|
2868
|
+
await embeddings2.initialize();
|
|
2869
|
+
spinner.succeed("Embedding model loaded");
|
|
2870
|
+
if (totalDeleted > 0) {
|
|
2871
|
+
spinner.start(`Removing ${totalDeleted} deleted files...`);
|
|
2872
|
+
let removedCount = 0;
|
|
2873
|
+
for (const filepath of changes.deleted) {
|
|
2874
|
+
try {
|
|
2875
|
+
await vectorDB.deleteByFile(filepath);
|
|
2876
|
+
await manifest2.removeFile(filepath);
|
|
2877
|
+
removedCount++;
|
|
2878
|
+
} catch (err) {
|
|
2879
|
+
spinner.warn(`Failed to remove file "${filepath}": ${err instanceof Error ? err.message : String(err)}`);
|
|
2880
|
+
}
|
|
2881
|
+
}
|
|
2882
|
+
spinner.succeed(`Removed ${removedCount}/${totalDeleted} deleted files`);
|
|
2883
|
+
}
|
|
2884
|
+
if (totalChanges > 0) {
|
|
2885
|
+
spinner.start(`Reindexing ${totalChanges} changed files...`);
|
|
2886
|
+
const filesToIndex = [...changes.added, ...changes.modified];
|
|
2887
|
+
const count = await indexMultipleFiles(
|
|
2888
|
+
filesToIndex,
|
|
2889
|
+
vectorDB,
|
|
2890
|
+
embeddings2,
|
|
2891
|
+
config,
|
|
2892
|
+
{ verbose: options.verbose }
|
|
2893
|
+
);
|
|
2894
|
+
await writeVersionFile(vectorDB.dbPath);
|
|
2895
|
+
spinner.succeed(
|
|
2896
|
+
`Incremental reindex complete: ${count}/${totalChanges} files indexed successfully`
|
|
2897
|
+
);
|
|
2898
|
+
}
|
|
2899
|
+
const { isGitAvailable: isGitAvailable3, isGitRepo: isGitRepo3 } = await Promise.resolve().then(() => (init_utils(), utils_exports));
|
|
2900
|
+
const { GitStateTracker: GitStateTracker3 } = await Promise.resolve().then(() => (init_tracker(), tracker_exports));
|
|
2901
|
+
const gitAvailable2 = await isGitAvailable3();
|
|
2902
|
+
const isRepo2 = await isGitRepo3(rootDir);
|
|
2903
|
+
if (gitAvailable2 && isRepo2) {
|
|
2904
|
+
const gitTracker = new GitStateTracker3(rootDir, vectorDB.dbPath);
|
|
2905
|
+
await gitTracker.initialize();
|
|
2906
|
+
const gitState = gitTracker.getState();
|
|
2907
|
+
if (gitState) {
|
|
2908
|
+
await manifest2.updateGitState(gitState);
|
|
2909
|
+
}
|
|
2910
|
+
}
|
|
2911
|
+
console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
|
|
2912
|
+
return;
|
|
2913
|
+
}
|
|
2914
|
+
spinner.text = "Full reindex required...";
|
|
2915
|
+
}
|
|
2916
|
+
} else {
|
|
2917
|
+
spinner.text = "Force flag enabled, performing full reindex...";
|
|
2918
|
+
}
|
|
2919
|
+
spinner.text = "Scanning codebase...";
|
|
2920
|
+
let files;
|
|
2921
|
+
if (isModernConfig(config) && config.frameworks.length > 0) {
|
|
2922
|
+
files = await scanCodebaseWithFrameworks(rootDir, config);
|
|
2923
|
+
} else if (isLegacyConfig(config)) {
|
|
2924
|
+
files = await scanCodebase({
|
|
2925
|
+
rootDir,
|
|
2926
|
+
includePatterns: config.indexing.include,
|
|
2927
|
+
excludePatterns: config.indexing.exclude
|
|
2928
|
+
});
|
|
2929
|
+
} else {
|
|
2930
|
+
files = await scanCodebase({
|
|
2931
|
+
rootDir,
|
|
2932
|
+
includePatterns: [],
|
|
2933
|
+
excludePatterns: []
|
|
2934
|
+
});
|
|
2935
|
+
}
|
|
2936
|
+
if (files.length === 0) {
|
|
2937
|
+
spinner.fail("No files found to index");
|
|
2938
|
+
return;
|
|
2939
|
+
}
|
|
2940
|
+
spinner.text = `Found ${files.length} files`;
|
|
2941
|
+
spinner.text = getModelLoadingMessage();
|
|
2942
|
+
const embeddings = new LocalEmbeddings();
|
|
2943
|
+
await embeddings.initialize();
|
|
2944
|
+
spinner.succeed("Embedding model loaded");
|
|
2945
|
+
const concurrency = isModernConfig(config) ? config.core.concurrency : 4;
|
|
2946
|
+
const embeddingBatchSize = isModernConfig(config) ? config.core.embeddingBatchSize : 50;
|
|
2947
|
+
const vectorDBBatchSize = 100;
|
|
2948
|
+
spinner.start(`Processing files with ${concurrency}x concurrency...`);
|
|
2949
|
+
const startTime = Date.now();
|
|
2950
|
+
let processedFiles = 0;
|
|
2951
|
+
let processedChunks = 0;
|
|
2952
|
+
const chunkAccumulator = [];
|
|
2953
|
+
const limit = pLimit(concurrency);
|
|
2954
|
+
const indexedFileEntries = [];
|
|
2955
|
+
const progressState = {
|
|
2956
|
+
processedFiles: 0,
|
|
2957
|
+
totalFiles: files.length,
|
|
2958
|
+
wittyMessage: getIndexingMessage()
|
|
2959
|
+
};
|
|
2960
|
+
const SPINNER_UPDATE_INTERVAL_MS = 200;
|
|
2961
|
+
const MESSAGE_ROTATION_INTERVAL_MS = 8e3;
|
|
2962
|
+
const MESSAGE_ROTATION_TICKS = Math.floor(MESSAGE_ROTATION_INTERVAL_MS / SPINNER_UPDATE_INTERVAL_MS);
|
|
2963
|
+
let spinnerTick = 0;
|
|
2964
|
+
updateInterval = setInterval(() => {
|
|
2965
|
+
spinnerTick++;
|
|
2966
|
+
if (spinnerTick >= MESSAGE_ROTATION_TICKS) {
|
|
2967
|
+
progressState.wittyMessage = getIndexingMessage();
|
|
2968
|
+
spinnerTick = 0;
|
|
2969
|
+
}
|
|
2970
|
+
spinner.text = `${progressState.processedFiles}/${progressState.totalFiles} files | ${progressState.wittyMessage}`;
|
|
2971
|
+
}, SPINNER_UPDATE_INTERVAL_MS);
|
|
2972
|
+
const processAccumulatedChunks = async () => {
|
|
2973
|
+
if (chunkAccumulator.length === 0) return;
|
|
2974
|
+
const toProcess = chunkAccumulator.splice(0, chunkAccumulator.length);
|
|
2975
|
+
for (let i = 0; i < toProcess.length; i += embeddingBatchSize) {
|
|
2976
|
+
const batch = toProcess.slice(i, Math.min(i + embeddingBatchSize, toProcess.length));
|
|
2977
|
+
progressState.wittyMessage = getEmbeddingMessage();
|
|
2978
|
+
const texts = batch.map((item) => item.content);
|
|
2979
|
+
const embeddingVectors = [];
|
|
2980
|
+
for (let j = 0; j < texts.length; j += EMBEDDING_MICRO_BATCH_SIZE) {
|
|
2981
|
+
const microBatch = texts.slice(j, Math.min(j + EMBEDDING_MICRO_BATCH_SIZE, texts.length));
|
|
2982
|
+
const microResults = await embeddings.embedBatch(microBatch);
|
|
2983
|
+
embeddingVectors.push(...microResults);
|
|
2984
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
2985
|
+
}
|
|
2986
|
+
processedChunks += batch.length;
|
|
2987
|
+
progressState.wittyMessage = `Inserting ${batch.length} chunks into vector space...`;
|
|
2988
|
+
await vectorDB.insertBatch(
|
|
2989
|
+
embeddingVectors,
|
|
2990
|
+
batch.map((item) => item.chunk.metadata),
|
|
2991
|
+
texts
|
|
2992
|
+
);
|
|
2993
|
+
await new Promise((resolve) => setImmediate(resolve));
|
|
2994
|
+
}
|
|
2995
|
+
progressState.wittyMessage = getIndexingMessage();
|
|
2996
|
+
};
|
|
2997
|
+
const filePromises = files.map(
|
|
2998
|
+
(file) => limit(async () => {
|
|
2999
|
+
try {
|
|
3000
|
+
const stats = await fs15.stat(file);
|
|
3001
|
+
const content = await fs15.readFile(file, "utf-8");
|
|
3002
|
+
const chunkSize = isModernConfig(config) ? config.core.chunkSize : 75;
|
|
1969
3003
|
const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : 10;
|
|
1970
3004
|
const chunks = chunkFile(file, content, {
|
|
1971
3005
|
chunkSize,
|
|
@@ -1973,6 +3007,7 @@ async function indexCodebase(options = {}) {
|
|
|
1973
3007
|
});
|
|
1974
3008
|
if (chunks.length === 0) {
|
|
1975
3009
|
processedFiles++;
|
|
3010
|
+
progressState.processedFiles = processedFiles;
|
|
1976
3011
|
return;
|
|
1977
3012
|
}
|
|
1978
3013
|
for (const chunk of chunks) {
|
|
@@ -1981,25 +3016,53 @@ async function indexCodebase(options = {}) {
|
|
|
1981
3016
|
content: chunk.content
|
|
1982
3017
|
});
|
|
1983
3018
|
}
|
|
1984
|
-
|
|
3019
|
+
indexedFileEntries.push({
|
|
3020
|
+
filepath: file,
|
|
3021
|
+
chunkCount: chunks.length,
|
|
3022
|
+
mtime: stats.mtimeMs
|
|
3023
|
+
});
|
|
3024
|
+
processedFiles++;
|
|
3025
|
+
progressState.processedFiles = processedFiles;
|
|
3026
|
+
if (chunkAccumulator.length >= vectorDBBatchSize) {
|
|
1985
3027
|
await processAccumulatedChunks();
|
|
1986
3028
|
}
|
|
1987
|
-
processedFiles++;
|
|
1988
|
-
const elapsed = (Date.now() - startTime) / 1e3;
|
|
1989
|
-
const rate = processedFiles / elapsed;
|
|
1990
|
-
const eta = rate > 0 ? Math.round((files.length - processedFiles) / rate) : 0;
|
|
1991
|
-
spinner.text = `Indexed ${processedFiles}/${files.length} files (${processedChunks} chunks) | ${concurrency}x concurrency | ETA: ${eta}s`;
|
|
1992
3029
|
} catch (error) {
|
|
1993
3030
|
if (options.verbose) {
|
|
1994
3031
|
console.error(chalk4.yellow(`
|
|
1995
3032
|
\u26A0\uFE0F Skipping ${file}: ${error}`));
|
|
1996
3033
|
}
|
|
1997
3034
|
processedFiles++;
|
|
3035
|
+
progressState.processedFiles = processedFiles;
|
|
1998
3036
|
}
|
|
1999
3037
|
})
|
|
2000
3038
|
);
|
|
2001
3039
|
await Promise.all(filePromises);
|
|
3040
|
+
progressState.wittyMessage = "Processing final chunks...";
|
|
2002
3041
|
await processAccumulatedChunks();
|
|
3042
|
+
clearInterval(updateInterval);
|
|
3043
|
+
spinner.start("Saving index manifest...");
|
|
3044
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
3045
|
+
await manifest.updateFiles(
|
|
3046
|
+
indexedFileEntries.map((entry) => ({
|
|
3047
|
+
filepath: entry.filepath,
|
|
3048
|
+
lastModified: entry.mtime,
|
|
3049
|
+
// Use actual file mtime for accurate change detection
|
|
3050
|
+
chunkCount: entry.chunkCount
|
|
3051
|
+
}))
|
|
3052
|
+
);
|
|
3053
|
+
const { isGitAvailable: isGitAvailable2, isGitRepo: isGitRepo2 } = await Promise.resolve().then(() => (init_utils(), utils_exports));
|
|
3054
|
+
const { GitStateTracker: GitStateTracker2 } = await Promise.resolve().then(() => (init_tracker(), tracker_exports));
|
|
3055
|
+
const gitAvailable = await isGitAvailable2();
|
|
3056
|
+
const isRepo = await isGitRepo2(rootDir);
|
|
3057
|
+
if (gitAvailable && isRepo) {
|
|
3058
|
+
const gitTracker = new GitStateTracker2(rootDir, vectorDB.dbPath);
|
|
3059
|
+
await gitTracker.initialize();
|
|
3060
|
+
const gitState = gitTracker.getState();
|
|
3061
|
+
if (gitState) {
|
|
3062
|
+
await manifest.updateGitState(gitState);
|
|
3063
|
+
}
|
|
3064
|
+
}
|
|
3065
|
+
spinner.succeed("Manifest saved");
|
|
2003
3066
|
await writeVersionFile(vectorDB.dbPath);
|
|
2004
3067
|
const totalTime = ((Date.now() - startTime) / 1e3).toFixed(1);
|
|
2005
3068
|
spinner.succeed(
|
|
@@ -2007,39 +3070,107 @@ async function indexCodebase(options = {}) {
|
|
|
2007
3070
|
);
|
|
2008
3071
|
console.log(chalk4.dim("\nNext step: Run"), chalk4.bold("lien serve"), chalk4.dim("to start the MCP server"));
|
|
2009
3072
|
} catch (error) {
|
|
3073
|
+
if (updateInterval) {
|
|
3074
|
+
clearInterval(updateInterval);
|
|
3075
|
+
}
|
|
2010
3076
|
spinner.fail(`Indexing failed: ${error}`);
|
|
2011
3077
|
throw error;
|
|
2012
3078
|
}
|
|
2013
3079
|
}
|
|
2014
|
-
var init_indexer = __esm({
|
|
2015
|
-
"src/indexer/index.ts"() {
|
|
2016
|
-
"use strict";
|
|
2017
|
-
init_scanner();
|
|
2018
|
-
init_chunker();
|
|
2019
|
-
init_local();
|
|
2020
|
-
init_lancedb();
|
|
2021
|
-
init_service();
|
|
2022
|
-
init_version();
|
|
2023
|
-
init_schema();
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
3080
|
+
var init_indexer = __esm({
|
|
3081
|
+
"src/indexer/index.ts"() {
|
|
3082
|
+
"use strict";
|
|
3083
|
+
init_scanner();
|
|
3084
|
+
init_chunker();
|
|
3085
|
+
init_local();
|
|
3086
|
+
init_lancedb();
|
|
3087
|
+
init_service();
|
|
3088
|
+
init_version();
|
|
3089
|
+
init_schema();
|
|
3090
|
+
init_manifest();
|
|
3091
|
+
init_change_detector();
|
|
3092
|
+
init_incremental();
|
|
3093
|
+
init_loading_messages();
|
|
3094
|
+
init_constants();
|
|
3095
|
+
}
|
|
3096
|
+
});
|
|
3097
|
+
|
|
3098
|
+
// src/cli/index.ts
|
|
3099
|
+
import { Command } from "commander";
|
|
3100
|
+
import { createRequire as createRequire4 } from "module";
|
|
3101
|
+
import { fileURLToPath as fileURLToPath5 } from "url";
|
|
3102
|
+
import { dirname as dirname4, join as join4 } from "path";
|
|
3103
|
+
|
|
3104
|
+
// src/cli/init.ts
|
|
3105
|
+
init_schema();
|
|
3106
|
+
init_merge();
|
|
3107
|
+
import fs5 from "fs/promises";
|
|
3108
|
+
import path5 from "path";
|
|
3109
|
+
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
3110
|
+
import chalk2 from "chalk";
|
|
3111
|
+
import inquirer from "inquirer";
|
|
3112
|
+
|
|
3113
|
+
// src/utils/banner.ts
|
|
3114
|
+
import figlet from "figlet";
|
|
3115
|
+
import chalk from "chalk";
|
|
3116
|
+
import { createRequire } from "module";
|
|
3117
|
+
import { fileURLToPath } from "url";
|
|
3118
|
+
import { dirname, join } from "path";
|
|
3119
|
+
var __filename = fileURLToPath(import.meta.url);
|
|
3120
|
+
var __dirname = dirname(__filename);
|
|
3121
|
+
var require2 = createRequire(import.meta.url);
|
|
3122
|
+
var packageJson;
|
|
3123
|
+
try {
|
|
3124
|
+
packageJson = require2(join(__dirname, "../package.json"));
|
|
3125
|
+
} catch {
|
|
3126
|
+
packageJson = require2(join(__dirname, "../../package.json"));
|
|
3127
|
+
}
|
|
3128
|
+
var PACKAGE_NAME = packageJson.name;
|
|
3129
|
+
var VERSION = packageJson.version;
|
|
3130
|
+
function wrapInBox(text, footer, padding = 1) {
|
|
3131
|
+
const lines = text.split("\n").filter((line) => line.trim().length > 0);
|
|
3132
|
+
const maxLength = Math.max(...lines.map((line) => line.length));
|
|
3133
|
+
const horizontalBorder = "\u2500".repeat(maxLength + padding * 2);
|
|
3134
|
+
const top = `\u250C${horizontalBorder}\u2510`;
|
|
3135
|
+
const bottom = `\u2514${horizontalBorder}\u2518`;
|
|
3136
|
+
const separator = `\u251C${horizontalBorder}\u2524`;
|
|
3137
|
+
const paddedLines = lines.map((line) => {
|
|
3138
|
+
const padRight = " ".repeat(maxLength - line.length + padding);
|
|
3139
|
+
const padLeft = " ".repeat(padding);
|
|
3140
|
+
return `\u2502${padLeft}${line}${padRight}\u2502`;
|
|
3141
|
+
});
|
|
3142
|
+
const totalPad = maxLength - footer.length;
|
|
3143
|
+
const leftPad = Math.floor(totalPad / 2);
|
|
3144
|
+
const rightPad = totalPad - leftPad;
|
|
3145
|
+
const centeredFooter = " ".repeat(leftPad) + footer + " ".repeat(rightPad);
|
|
3146
|
+
const paddedFooter = `\u2502${" ".repeat(padding)}${centeredFooter}${" ".repeat(padding)}\u2502`;
|
|
3147
|
+
return [top, ...paddedLines, separator, paddedFooter, bottom].join("\n");
|
|
3148
|
+
}
|
|
3149
|
+
function showBanner() {
|
|
3150
|
+
const banner = figlet.textSync("LIEN", {
|
|
3151
|
+
font: "ANSI Shadow",
|
|
3152
|
+
horizontalLayout: "fitted",
|
|
3153
|
+
verticalLayout: "fitted"
|
|
3154
|
+
});
|
|
3155
|
+
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
3156
|
+
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
3157
|
+
console.error(chalk.cyan(boxedBanner));
|
|
3158
|
+
console.error();
|
|
3159
|
+
}
|
|
3160
|
+
function showCompactBanner() {
|
|
3161
|
+
const banner = figlet.textSync("LIEN", {
|
|
3162
|
+
font: "ANSI Shadow",
|
|
3163
|
+
horizontalLayout: "fitted",
|
|
3164
|
+
verticalLayout: "fitted"
|
|
3165
|
+
});
|
|
3166
|
+
const footer = `${PACKAGE_NAME} - v${VERSION}`;
|
|
3167
|
+
const boxedBanner = wrapInBox(banner.trim(), footer);
|
|
3168
|
+
console.log(chalk.cyan(boxedBanner));
|
|
3169
|
+
console.log();
|
|
3170
|
+
}
|
|
2032
3171
|
|
|
2033
3172
|
// src/cli/init.ts
|
|
2034
|
-
init_schema();
|
|
2035
|
-
init_merge();
|
|
2036
|
-
init_banner();
|
|
2037
3173
|
init_migration();
|
|
2038
|
-
import fs5 from "fs/promises";
|
|
2039
|
-
import path5 from "path";
|
|
2040
|
-
import { fileURLToPath as fileURLToPath2 } from "url";
|
|
2041
|
-
import chalk2 from "chalk";
|
|
2042
|
-
import inquirer from "inquirer";
|
|
2043
3174
|
|
|
2044
3175
|
// src/frameworks/detector-service.ts
|
|
2045
3176
|
import fs4 from "fs/promises";
|
|
@@ -2126,17 +3257,17 @@ var nodejsDetector = {
|
|
|
2126
3257
|
evidence: []
|
|
2127
3258
|
};
|
|
2128
3259
|
const packageJsonPath = path.join(fullPath, "package.json");
|
|
2129
|
-
let
|
|
3260
|
+
let packageJson5 = null;
|
|
2130
3261
|
try {
|
|
2131
3262
|
const content = await fs.readFile(packageJsonPath, "utf-8");
|
|
2132
|
-
|
|
3263
|
+
packageJson5 = JSON.parse(content);
|
|
2133
3264
|
result.evidence.push("Found package.json");
|
|
2134
3265
|
} catch {
|
|
2135
3266
|
return result;
|
|
2136
3267
|
}
|
|
2137
3268
|
result.detected = true;
|
|
2138
3269
|
result.confidence = "high";
|
|
2139
|
-
if (
|
|
3270
|
+
if (packageJson5.devDependencies?.typescript || packageJson5.dependencies?.typescript) {
|
|
2140
3271
|
result.evidence.push("TypeScript detected");
|
|
2141
3272
|
}
|
|
2142
3273
|
const testFrameworks = [
|
|
@@ -2147,7 +3278,7 @@ var nodejsDetector = {
|
|
|
2147
3278
|
{ name: "@playwright/test", display: "Playwright" }
|
|
2148
3279
|
];
|
|
2149
3280
|
for (const framework of testFrameworks) {
|
|
2150
|
-
if (
|
|
3281
|
+
if (packageJson5.devDependencies?.[framework.name] || packageJson5.dependencies?.[framework.name]) {
|
|
2151
3282
|
result.evidence.push(`${framework.display} test framework detected`);
|
|
2152
3283
|
break;
|
|
2153
3284
|
}
|
|
@@ -2160,13 +3291,13 @@ var nodejsDetector = {
|
|
|
2160
3291
|
{ name: "@nestjs/core", display: "NestJS" }
|
|
2161
3292
|
];
|
|
2162
3293
|
for (const fw of frameworks) {
|
|
2163
|
-
if (
|
|
3294
|
+
if (packageJson5.dependencies?.[fw.name]) {
|
|
2164
3295
|
result.evidence.push(`${fw.display} detected`);
|
|
2165
3296
|
break;
|
|
2166
3297
|
}
|
|
2167
3298
|
}
|
|
2168
|
-
if (
|
|
2169
|
-
result.version =
|
|
3299
|
+
if (packageJson5.engines?.node) {
|
|
3300
|
+
result.version = packageJson5.engines.node;
|
|
2170
3301
|
}
|
|
2171
3302
|
return result;
|
|
2172
3303
|
},
|
|
@@ -2814,93 +3945,13 @@ async function upgradeConfig(configPath) {
|
|
|
2814
3945
|
|
|
2815
3946
|
// src/cli/status.ts
|
|
2816
3947
|
init_service();
|
|
3948
|
+
init_utils();
|
|
3949
|
+
init_version();
|
|
2817
3950
|
import chalk3 from "chalk";
|
|
2818
3951
|
import fs9 from "fs/promises";
|
|
2819
3952
|
import path9 from "path";
|
|
2820
3953
|
import os from "os";
|
|
2821
3954
|
import crypto from "crypto";
|
|
2822
|
-
|
|
2823
|
-
// src/git/utils.ts
|
|
2824
|
-
import { exec } from "child_process";
|
|
2825
|
-
import { promisify } from "util";
|
|
2826
|
-
import fs7 from "fs/promises";
|
|
2827
|
-
import path7 from "path";
|
|
2828
|
-
var execAsync = promisify(exec);
|
|
2829
|
-
async function isGitRepo(rootDir) {
|
|
2830
|
-
try {
|
|
2831
|
-
const gitDir = path7.join(rootDir, ".git");
|
|
2832
|
-
await fs7.access(gitDir);
|
|
2833
|
-
return true;
|
|
2834
|
-
} catch {
|
|
2835
|
-
return false;
|
|
2836
|
-
}
|
|
2837
|
-
}
|
|
2838
|
-
async function getCurrentBranch(rootDir) {
|
|
2839
|
-
try {
|
|
2840
|
-
const { stdout } = await execAsync("git rev-parse --abbrev-ref HEAD", {
|
|
2841
|
-
cwd: rootDir,
|
|
2842
|
-
timeout: 5e3
|
|
2843
|
-
// 5 second timeout
|
|
2844
|
-
});
|
|
2845
|
-
return stdout.trim();
|
|
2846
|
-
} catch (error) {
|
|
2847
|
-
throw new Error(`Failed to get current branch: ${error}`);
|
|
2848
|
-
}
|
|
2849
|
-
}
|
|
2850
|
-
async function getCurrentCommit(rootDir) {
|
|
2851
|
-
try {
|
|
2852
|
-
const { stdout } = await execAsync("git rev-parse HEAD", {
|
|
2853
|
-
cwd: rootDir,
|
|
2854
|
-
timeout: 5e3
|
|
2855
|
-
});
|
|
2856
|
-
return stdout.trim();
|
|
2857
|
-
} catch (error) {
|
|
2858
|
-
throw new Error(`Failed to get current commit: ${error}`);
|
|
2859
|
-
}
|
|
2860
|
-
}
|
|
2861
|
-
async function getChangedFiles(rootDir, fromRef, toRef) {
|
|
2862
|
-
try {
|
|
2863
|
-
const { stdout } = await execAsync(
|
|
2864
|
-
`git diff --name-only ${fromRef}...${toRef}`,
|
|
2865
|
-
{
|
|
2866
|
-
cwd: rootDir,
|
|
2867
|
-
timeout: 1e4
|
|
2868
|
-
// 10 second timeout for diffs
|
|
2869
|
-
}
|
|
2870
|
-
);
|
|
2871
|
-
const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
|
|
2872
|
-
return files;
|
|
2873
|
-
} catch (error) {
|
|
2874
|
-
throw new Error(`Failed to get changed files: ${error}`);
|
|
2875
|
-
}
|
|
2876
|
-
}
|
|
2877
|
-
async function getChangedFilesBetweenCommits(rootDir, fromCommit, toCommit) {
|
|
2878
|
-
try {
|
|
2879
|
-
const { stdout } = await execAsync(
|
|
2880
|
-
`git diff --name-only ${fromCommit} ${toCommit}`,
|
|
2881
|
-
{
|
|
2882
|
-
cwd: rootDir,
|
|
2883
|
-
timeout: 1e4
|
|
2884
|
-
}
|
|
2885
|
-
);
|
|
2886
|
-
const files = stdout.trim().split("\n").filter(Boolean).map((file) => path7.join(rootDir, file));
|
|
2887
|
-
return files;
|
|
2888
|
-
} catch (error) {
|
|
2889
|
-
throw new Error(`Failed to get changed files between commits: ${error}`);
|
|
2890
|
-
}
|
|
2891
|
-
}
|
|
2892
|
-
async function isGitAvailable() {
|
|
2893
|
-
try {
|
|
2894
|
-
await execAsync("git --version", { timeout: 3e3 });
|
|
2895
|
-
return true;
|
|
2896
|
-
} catch {
|
|
2897
|
-
return false;
|
|
2898
|
-
}
|
|
2899
|
-
}
|
|
2900
|
-
|
|
2901
|
-
// src/cli/status.ts
|
|
2902
|
-
init_version();
|
|
2903
|
-
init_banner();
|
|
2904
3955
|
init_schema();
|
|
2905
3956
|
async function statusCommand() {
|
|
2906
3957
|
const rootDir = process.cwd();
|
|
@@ -2986,14 +4037,25 @@ async function statusCommand() {
|
|
|
2986
4037
|
|
|
2987
4038
|
// src/cli/index-cmd.ts
|
|
2988
4039
|
init_indexer();
|
|
2989
|
-
init_banner();
|
|
2990
4040
|
import chalk5 from "chalk";
|
|
2991
4041
|
async function indexCommand(options) {
|
|
2992
4042
|
showCompactBanner();
|
|
2993
4043
|
try {
|
|
4044
|
+
if (options.force) {
|
|
4045
|
+
const { VectorDB: VectorDB2 } = await Promise.resolve().then(() => (init_lancedb(), lancedb_exports));
|
|
4046
|
+
const { ManifestManager: ManifestManager2 } = await Promise.resolve().then(() => (init_manifest(), manifest_exports));
|
|
4047
|
+
console.log(chalk5.yellow("Clearing existing index and manifest..."));
|
|
4048
|
+
const vectorDB = new VectorDB2(process.cwd());
|
|
4049
|
+
await vectorDB.initialize();
|
|
4050
|
+
await vectorDB.clear();
|
|
4051
|
+
const manifest = new ManifestManager2(vectorDB.dbPath);
|
|
4052
|
+
await manifest.clear();
|
|
4053
|
+
console.log(chalk5.green("\u2713 Index and manifest cleared\n"));
|
|
4054
|
+
}
|
|
2994
4055
|
await indexCodebase({
|
|
2995
4056
|
rootDir: process.cwd(),
|
|
2996
|
-
verbose: options.verbose || false
|
|
4057
|
+
verbose: options.verbose || false,
|
|
4058
|
+
force: options.force || false
|
|
2997
4059
|
});
|
|
2998
4060
|
if (options.watch) {
|
|
2999
4061
|
console.log(chalk5.yellow("\n\u26A0\uFE0F Watch mode not yet implemented"));
|
|
@@ -3006,8 +4068,8 @@ async function indexCommand(options) {
|
|
|
3006
4068
|
|
|
3007
4069
|
// src/cli/serve.ts
|
|
3008
4070
|
import chalk6 from "chalk";
|
|
3009
|
-
import
|
|
3010
|
-
import
|
|
4071
|
+
import fs16 from "fs/promises";
|
|
4072
|
+
import path14 from "path";
|
|
3011
4073
|
|
|
3012
4074
|
// src/mcp/server.ts
|
|
3013
4075
|
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
|
@@ -3016,9 +4078,9 @@ import {
|
|
|
3016
4078
|
CallToolRequestSchema,
|
|
3017
4079
|
ListToolsRequestSchema
|
|
3018
4080
|
} from "@modelcontextprotocol/sdk/types.js";
|
|
3019
|
-
import { createRequire as
|
|
3020
|
-
import { fileURLToPath as
|
|
3021
|
-
import { dirname as
|
|
4081
|
+
import { createRequire as createRequire3 } from "module";
|
|
4082
|
+
import { fileURLToPath as fileURLToPath4 } from "url";
|
|
4083
|
+
import { dirname as dirname3, join as join3 } from "path";
|
|
3022
4084
|
|
|
3023
4085
|
// src/mcp/tools.ts
|
|
3024
4086
|
var tools = [
|
|
@@ -3101,247 +4163,11 @@ var tools = [
|
|
|
3101
4163
|
// src/mcp/server.ts
|
|
3102
4164
|
init_lancedb();
|
|
3103
4165
|
init_local();
|
|
3104
|
-
|
|
3105
|
-
|
|
3106
|
-
import fs12 from "fs/promises";
|
|
3107
|
-
import path12 from "path";
|
|
3108
|
-
var GitStateTracker = class {
|
|
3109
|
-
stateFile;
|
|
3110
|
-
rootDir;
|
|
3111
|
-
currentState = null;
|
|
3112
|
-
constructor(rootDir, indexPath) {
|
|
3113
|
-
this.rootDir = rootDir;
|
|
3114
|
-
this.stateFile = path12.join(indexPath, ".git-state.json");
|
|
3115
|
-
}
|
|
3116
|
-
/**
|
|
3117
|
-
* Loads the last known git state from disk.
|
|
3118
|
-
* Returns null if no state file exists (first run).
|
|
3119
|
-
*/
|
|
3120
|
-
async loadState() {
|
|
3121
|
-
try {
|
|
3122
|
-
const content = await fs12.readFile(this.stateFile, "utf-8");
|
|
3123
|
-
return JSON.parse(content);
|
|
3124
|
-
} catch {
|
|
3125
|
-
return null;
|
|
3126
|
-
}
|
|
3127
|
-
}
|
|
3128
|
-
/**
|
|
3129
|
-
* Saves the current git state to disk.
|
|
3130
|
-
*/
|
|
3131
|
-
async saveState(state) {
|
|
3132
|
-
try {
|
|
3133
|
-
const content = JSON.stringify(state, null, 2);
|
|
3134
|
-
await fs12.writeFile(this.stateFile, content, "utf-8");
|
|
3135
|
-
} catch (error) {
|
|
3136
|
-
console.error(`[Lien] Warning: Failed to save git state: ${error}`);
|
|
3137
|
-
}
|
|
3138
|
-
}
|
|
3139
|
-
/**
|
|
3140
|
-
* Gets the current git state from the repository.
|
|
3141
|
-
*
|
|
3142
|
-
* @returns Current git state
|
|
3143
|
-
* @throws Error if git commands fail
|
|
3144
|
-
*/
|
|
3145
|
-
async getCurrentGitState() {
|
|
3146
|
-
const branch = await getCurrentBranch(this.rootDir);
|
|
3147
|
-
const commit = await getCurrentCommit(this.rootDir);
|
|
3148
|
-
return {
|
|
3149
|
-
branch,
|
|
3150
|
-
commit,
|
|
3151
|
-
timestamp: Date.now()
|
|
3152
|
-
};
|
|
3153
|
-
}
|
|
3154
|
-
/**
|
|
3155
|
-
* Initializes the tracker by loading saved state and checking current state.
|
|
3156
|
-
* Should be called once when MCP server starts.
|
|
3157
|
-
*
|
|
3158
|
-
* @returns Array of changed files if state changed, null if no changes or first run
|
|
3159
|
-
*/
|
|
3160
|
-
async initialize() {
|
|
3161
|
-
const isRepo = await isGitRepo(this.rootDir);
|
|
3162
|
-
if (!isRepo) {
|
|
3163
|
-
return null;
|
|
3164
|
-
}
|
|
3165
|
-
try {
|
|
3166
|
-
this.currentState = await this.getCurrentGitState();
|
|
3167
|
-
const previousState = await this.loadState();
|
|
3168
|
-
if (!previousState) {
|
|
3169
|
-
await this.saveState(this.currentState);
|
|
3170
|
-
return null;
|
|
3171
|
-
}
|
|
3172
|
-
const branchChanged = previousState.branch !== this.currentState.branch;
|
|
3173
|
-
const commitChanged = previousState.commit !== this.currentState.commit;
|
|
3174
|
-
if (!branchChanged && !commitChanged) {
|
|
3175
|
-
return null;
|
|
3176
|
-
}
|
|
3177
|
-
let changedFiles = [];
|
|
3178
|
-
if (branchChanged) {
|
|
3179
|
-
try {
|
|
3180
|
-
changedFiles = await getChangedFiles(
|
|
3181
|
-
this.rootDir,
|
|
3182
|
-
previousState.branch,
|
|
3183
|
-
this.currentState.branch
|
|
3184
|
-
);
|
|
3185
|
-
} catch (error) {
|
|
3186
|
-
console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);
|
|
3187
|
-
changedFiles = await getChangedFilesBetweenCommits(
|
|
3188
|
-
this.rootDir,
|
|
3189
|
-
previousState.commit,
|
|
3190
|
-
this.currentState.commit
|
|
3191
|
-
);
|
|
3192
|
-
}
|
|
3193
|
-
} else if (commitChanged) {
|
|
3194
|
-
changedFiles = await getChangedFilesBetweenCommits(
|
|
3195
|
-
this.rootDir,
|
|
3196
|
-
previousState.commit,
|
|
3197
|
-
this.currentState.commit
|
|
3198
|
-
);
|
|
3199
|
-
}
|
|
3200
|
-
await this.saveState(this.currentState);
|
|
3201
|
-
return changedFiles;
|
|
3202
|
-
} catch (error) {
|
|
3203
|
-
console.error(`[Lien] Failed to initialize git tracker: ${error}`);
|
|
3204
|
-
return null;
|
|
3205
|
-
}
|
|
3206
|
-
}
|
|
3207
|
-
/**
|
|
3208
|
-
* Checks for git state changes since last check.
|
|
3209
|
-
* This is called periodically by the MCP server.
|
|
3210
|
-
*
|
|
3211
|
-
* @returns Array of changed files if state changed, null if no changes
|
|
3212
|
-
*/
|
|
3213
|
-
async detectChanges() {
|
|
3214
|
-
const isRepo = await isGitRepo(this.rootDir);
|
|
3215
|
-
if (!isRepo) {
|
|
3216
|
-
return null;
|
|
3217
|
-
}
|
|
3218
|
-
try {
|
|
3219
|
-
const newState = await this.getCurrentGitState();
|
|
3220
|
-
if (!this.currentState) {
|
|
3221
|
-
this.currentState = newState;
|
|
3222
|
-
await this.saveState(newState);
|
|
3223
|
-
return null;
|
|
3224
|
-
}
|
|
3225
|
-
const branchChanged = this.currentState.branch !== newState.branch;
|
|
3226
|
-
const commitChanged = this.currentState.commit !== newState.commit;
|
|
3227
|
-
if (!branchChanged && !commitChanged) {
|
|
3228
|
-
return null;
|
|
3229
|
-
}
|
|
3230
|
-
let changedFiles = [];
|
|
3231
|
-
if (branchChanged) {
|
|
3232
|
-
try {
|
|
3233
|
-
changedFiles = await getChangedFiles(
|
|
3234
|
-
this.rootDir,
|
|
3235
|
-
this.currentState.branch,
|
|
3236
|
-
newState.branch
|
|
3237
|
-
);
|
|
3238
|
-
} catch (error) {
|
|
3239
|
-
console.error(`[Lien] Branch diff failed, using commit diff: ${error}`);
|
|
3240
|
-
changedFiles = await getChangedFilesBetweenCommits(
|
|
3241
|
-
this.rootDir,
|
|
3242
|
-
this.currentState.commit,
|
|
3243
|
-
newState.commit
|
|
3244
|
-
);
|
|
3245
|
-
}
|
|
3246
|
-
} else if (commitChanged) {
|
|
3247
|
-
changedFiles = await getChangedFilesBetweenCommits(
|
|
3248
|
-
this.rootDir,
|
|
3249
|
-
this.currentState.commit,
|
|
3250
|
-
newState.commit
|
|
3251
|
-
);
|
|
3252
|
-
}
|
|
3253
|
-
this.currentState = newState;
|
|
3254
|
-
await this.saveState(newState);
|
|
3255
|
-
return changedFiles;
|
|
3256
|
-
} catch (error) {
|
|
3257
|
-
console.error(`[Lien] Failed to detect git changes: ${error}`);
|
|
3258
|
-
return null;
|
|
3259
|
-
}
|
|
3260
|
-
}
|
|
3261
|
-
/**
|
|
3262
|
-
* Gets the current git state.
|
|
3263
|
-
* Useful for status display.
|
|
3264
|
-
*/
|
|
3265
|
-
getState() {
|
|
3266
|
-
return this.currentState;
|
|
3267
|
-
}
|
|
3268
|
-
/**
|
|
3269
|
-
* Manually updates the saved state.
|
|
3270
|
-
* Useful after manual reindexing to sync state.
|
|
3271
|
-
*/
|
|
3272
|
-
async updateState() {
|
|
3273
|
-
try {
|
|
3274
|
-
this.currentState = await this.getCurrentGitState();
|
|
3275
|
-
await this.saveState(this.currentState);
|
|
3276
|
-
} catch (error) {
|
|
3277
|
-
console.error(`[Lien] Failed to update git state: ${error}`);
|
|
3278
|
-
}
|
|
3279
|
-
}
|
|
3280
|
-
};
|
|
3281
|
-
|
|
3282
|
-
// src/indexer/incremental.ts
|
|
3283
|
-
init_chunker();
|
|
3284
|
-
init_schema();
|
|
3285
|
-
import fs13 from "fs/promises";
|
|
3286
|
-
async function indexSingleFile(filepath, vectorDB, embeddings, config, options = {}) {
|
|
3287
|
-
const { verbose } = options;
|
|
3288
|
-
try {
|
|
3289
|
-
try {
|
|
3290
|
-
await fs13.access(filepath);
|
|
3291
|
-
} catch {
|
|
3292
|
-
if (verbose) {
|
|
3293
|
-
console.error(`[Lien] File deleted: ${filepath}`);
|
|
3294
|
-
}
|
|
3295
|
-
await vectorDB.deleteByFile(filepath);
|
|
3296
|
-
return;
|
|
3297
|
-
}
|
|
3298
|
-
const content = await fs13.readFile(filepath, "utf-8");
|
|
3299
|
-
const chunkSize = isModernConfig(config) ? config.core.chunkSize : isLegacyConfig(config) ? config.indexing.chunkSize : 75;
|
|
3300
|
-
const chunkOverlap = isModernConfig(config) ? config.core.chunkOverlap : isLegacyConfig(config) ? config.indexing.chunkOverlap : 10;
|
|
3301
|
-
const chunks = chunkFile(filepath, content, {
|
|
3302
|
-
chunkSize,
|
|
3303
|
-
chunkOverlap
|
|
3304
|
-
});
|
|
3305
|
-
if (chunks.length === 0) {
|
|
3306
|
-
if (verbose) {
|
|
3307
|
-
console.error(`[Lien] Empty file: ${filepath}`);
|
|
3308
|
-
}
|
|
3309
|
-
await vectorDB.deleteByFile(filepath);
|
|
3310
|
-
return;
|
|
3311
|
-
}
|
|
3312
|
-
const texts = chunks.map((c) => c.content);
|
|
3313
|
-
const vectors = await embeddings.embedBatch(texts);
|
|
3314
|
-
await vectorDB.updateFile(
|
|
3315
|
-
filepath,
|
|
3316
|
-
vectors,
|
|
3317
|
-
chunks.map((c) => c.metadata),
|
|
3318
|
-
texts
|
|
3319
|
-
);
|
|
3320
|
-
if (verbose) {
|
|
3321
|
-
console.error(`[Lien] \u2713 Updated ${filepath} (${chunks.length} chunks)`);
|
|
3322
|
-
}
|
|
3323
|
-
} catch (error) {
|
|
3324
|
-
console.error(`[Lien] \u26A0\uFE0F Failed to index ${filepath}: ${error}`);
|
|
3325
|
-
}
|
|
3326
|
-
}
|
|
3327
|
-
async function indexMultipleFiles(filepaths, vectorDB, embeddings, config, options = {}) {
|
|
3328
|
-
const { verbose } = options;
|
|
3329
|
-
let successCount = 0;
|
|
3330
|
-
for (const filepath of filepaths) {
|
|
3331
|
-
try {
|
|
3332
|
-
await indexSingleFile(filepath, vectorDB, embeddings, config, options);
|
|
3333
|
-
successCount++;
|
|
3334
|
-
} catch (error) {
|
|
3335
|
-
if (verbose) {
|
|
3336
|
-
console.error(`[Lien] Failed to process ${filepath}`);
|
|
3337
|
-
}
|
|
3338
|
-
}
|
|
3339
|
-
}
|
|
3340
|
-
return successCount;
|
|
3341
|
-
}
|
|
3342
|
-
|
|
3343
|
-
// src/mcp/server.ts
|
|
4166
|
+
init_tracker();
|
|
4167
|
+
init_incremental();
|
|
3344
4168
|
init_service();
|
|
4169
|
+
init_manifest();
|
|
4170
|
+
init_utils();
|
|
3345
4171
|
|
|
3346
4172
|
// src/watcher/index.ts
|
|
3347
4173
|
init_schema();
|
|
@@ -3474,14 +4300,14 @@ var FileWatcher = class {
|
|
|
3474
4300
|
|
|
3475
4301
|
// src/mcp/server.ts
|
|
3476
4302
|
init_constants();
|
|
3477
|
-
var
|
|
3478
|
-
var
|
|
3479
|
-
var
|
|
3480
|
-
var
|
|
4303
|
+
var __filename4 = fileURLToPath4(import.meta.url);
|
|
4304
|
+
var __dirname4 = dirname3(__filename4);
|
|
4305
|
+
var require4 = createRequire3(import.meta.url);
|
|
4306
|
+
var packageJson3;
|
|
3481
4307
|
try {
|
|
3482
|
-
|
|
4308
|
+
packageJson3 = require4(join3(__dirname4, "../package.json"));
|
|
3483
4309
|
} catch {
|
|
3484
|
-
|
|
4310
|
+
packageJson3 = require4(join3(__dirname4, "../../package.json"));
|
|
3485
4311
|
}
|
|
3486
4312
|
async function startMCPServer(options) {
|
|
3487
4313
|
const { rootDir, verbose, watch } = options;
|
|
@@ -3506,7 +4332,7 @@ async function startMCPServer(options) {
|
|
|
3506
4332
|
const server = new Server(
|
|
3507
4333
|
{
|
|
3508
4334
|
name: "lien",
|
|
3509
|
-
version:
|
|
4335
|
+
version: packageJson3.version
|
|
3510
4336
|
},
|
|
3511
4337
|
{
|
|
3512
4338
|
capabilities: {
|
|
@@ -3762,7 +4588,7 @@ async function startMCPServer(options) {
|
|
|
3762
4588
|
} else {
|
|
3763
4589
|
log("Git detection disabled by configuration");
|
|
3764
4590
|
}
|
|
3765
|
-
const fileWatchingEnabled = watch
|
|
4591
|
+
const fileWatchingEnabled = watch !== void 0 ? watch : config.fileWatching.enabled;
|
|
3766
4592
|
if (fileWatchingEnabled) {
|
|
3767
4593
|
log("\u{1F440} Starting file watcher...");
|
|
3768
4594
|
fileWatcher = new FileWatcher(rootDir, config);
|
|
@@ -3773,6 +4599,8 @@ async function startMCPServer(options) {
|
|
|
3773
4599
|
log(`\u{1F5D1}\uFE0F File deleted: ${filepath}`);
|
|
3774
4600
|
try {
|
|
3775
4601
|
await vectorDB.deleteByFile(filepath);
|
|
4602
|
+
const manifest = new ManifestManager(vectorDB.dbPath);
|
|
4603
|
+
await manifest.removeFile(filepath);
|
|
3776
4604
|
log(`\u2713 Removed ${filepath} from index`);
|
|
3777
4605
|
} catch (error) {
|
|
3778
4606
|
log(`Warning: Failed to remove ${filepath}: ${error}`);
|
|
@@ -3818,13 +4646,12 @@ async function startMCPServer(options) {
|
|
|
3818
4646
|
}
|
|
3819
4647
|
|
|
3820
4648
|
// src/cli/serve.ts
|
|
3821
|
-
init_banner();
|
|
3822
4649
|
async function serveCommand(options) {
|
|
3823
|
-
const rootDir = options.root ?
|
|
4650
|
+
const rootDir = options.root ? path14.resolve(options.root) : process.cwd();
|
|
3824
4651
|
try {
|
|
3825
4652
|
if (options.root) {
|
|
3826
4653
|
try {
|
|
3827
|
-
const stats = await
|
|
4654
|
+
const stats = await fs16.stat(rootDir);
|
|
3828
4655
|
if (!stats.isDirectory()) {
|
|
3829
4656
|
console.error(chalk6.red(`Error: --root path is not a directory: ${rootDir}`));
|
|
3830
4657
|
process.exit(1);
|
|
@@ -3847,10 +4674,15 @@ async function serveCommand(options) {
|
|
|
3847
4674
|
console.error(chalk6.dim(`Serving from: ${rootDir}
|
|
3848
4675
|
`));
|
|
3849
4676
|
}
|
|
4677
|
+
if (options.watch) {
|
|
4678
|
+
console.error(chalk6.yellow("\u26A0\uFE0F --watch flag is deprecated (file watching is now default)"));
|
|
4679
|
+
console.error(chalk6.dim(" Use --no-watch to disable file watching\n"));
|
|
4680
|
+
}
|
|
4681
|
+
const watch = options.noWatch ? false : options.watch ? true : void 0;
|
|
3850
4682
|
await startMCPServer({
|
|
3851
4683
|
rootDir,
|
|
3852
4684
|
verbose: true,
|
|
3853
|
-
watch
|
|
4685
|
+
watch
|
|
3854
4686
|
});
|
|
3855
4687
|
} catch (error) {
|
|
3856
4688
|
console.error(chalk6.red("Failed to start MCP server:"), error);
|
|
@@ -3859,42 +4691,21 @@ async function serveCommand(options) {
|
|
|
3859
4691
|
}
|
|
3860
4692
|
|
|
3861
4693
|
// src/cli/index.ts
|
|
3862
|
-
var
|
|
3863
|
-
var
|
|
3864
|
-
var
|
|
3865
|
-
var
|
|
4694
|
+
var __filename5 = fileURLToPath5(import.meta.url);
|
|
4695
|
+
var __dirname5 = dirname4(__filename5);
|
|
4696
|
+
var require5 = createRequire4(import.meta.url);
|
|
4697
|
+
var packageJson4;
|
|
3866
4698
|
try {
|
|
3867
|
-
|
|
4699
|
+
packageJson4 = require5(join4(__dirname5, "../package.json"));
|
|
3868
4700
|
} catch {
|
|
3869
|
-
|
|
4701
|
+
packageJson4 = require5(join4(__dirname5, "../../package.json"));
|
|
3870
4702
|
}
|
|
3871
4703
|
var program = new Command();
|
|
3872
|
-
program.name("lien").description("Local semantic code search for AI assistants via MCP").version(
|
|
4704
|
+
program.name("lien").description("Local semantic code search for AI assistants via MCP").version(packageJson4.version);
|
|
3873
4705
|
program.command("init").description("Initialize Lien in the current directory").option("-u, --upgrade", "Upgrade existing config with new options").option("-y, --yes", "Skip interactive prompts and use defaults").option("-p, --path <path>", "Path to initialize (defaults to current directory)").action(initCommand);
|
|
3874
|
-
program.command("index").description("Index the codebase for semantic search").option("-w, --watch", "Watch for changes and re-index automatically").option("-v, --verbose", "Show detailed logging during indexing").action(indexCommand);
|
|
3875
|
-
program.command("serve").description("Start the MCP server for Cursor integration").option("-p, --port <port>", "Port number (for future use)", "7133").option("-w, --watch", "
|
|
4706
|
+
program.command("index").description("Index the codebase for semantic search").option("-f, --force", "Force full reindex (skip incremental)").option("-w, --watch", "Watch for changes and re-index automatically").option("-v, --verbose", "Show detailed logging during indexing").action(indexCommand);
|
|
4707
|
+
program.command("serve").description("Start the MCP server for Cursor integration").option("-p, --port <port>", "Port number (for future use)", "7133").option("--no-watch", "Disable file watching for this session").option("-w, --watch", "[DEPRECATED] File watching is now enabled by default").option("-r, --root <path>", "Root directory to serve (defaults to current directory)").action(serveCommand);
|
|
3876
4708
|
program.command("status").description("Show indexing status and statistics").action(statusCommand);
|
|
3877
|
-
program.command("reindex").description("Clear index and re-index the entire codebase").option("-v, --verbose", "Show detailed logging during indexing").action(async (options) => {
|
|
3878
|
-
const { showCompactBanner: showCompactBanner2 } = await Promise.resolve().then(() => (init_banner(), banner_exports));
|
|
3879
|
-
const chalk7 = (await import("chalk")).default;
|
|
3880
|
-
const { VectorDB: VectorDB2 } = await Promise.resolve().then(() => (init_lancedb(), lancedb_exports));
|
|
3881
|
-
const { indexCodebase: indexCodebase2 } = await Promise.resolve().then(() => (init_indexer(), indexer_exports));
|
|
3882
|
-
showCompactBanner2();
|
|
3883
|
-
try {
|
|
3884
|
-
console.log(chalk7.yellow("Clearing existing index..."));
|
|
3885
|
-
const vectorDB = new VectorDB2(process.cwd());
|
|
3886
|
-
await vectorDB.initialize();
|
|
3887
|
-
await vectorDB.clear();
|
|
3888
|
-
console.log(chalk7.green("\u2713 Index cleared\n"));
|
|
3889
|
-
await indexCodebase2({
|
|
3890
|
-
rootDir: process.cwd(),
|
|
3891
|
-
verbose: options.verbose || false
|
|
3892
|
-
});
|
|
3893
|
-
} catch (error) {
|
|
3894
|
-
console.error(chalk7.red("Error during re-indexing:"), error);
|
|
3895
|
-
process.exit(1);
|
|
3896
|
-
}
|
|
3897
|
-
});
|
|
3898
4709
|
|
|
3899
4710
|
// src/index.ts
|
|
3900
4711
|
program.parse();
|