@liendev/lien 0.26.0 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -8334,6 +8334,9 @@ var GetDependentsSchema = external_exports.object({
8334
8334
  filepath: external_exports.string().min(1, "Filepath cannot be empty").describe(
8335
8335
  "Path to file to find dependents for (relative to workspace root).\n\nExample: 'src/utils/validate.ts'\n\nReturns all files that import or depend on this file.\n\nNote: Scans up to 10,000 code chunks. For very large codebases,\nresults may be incomplete (a warning will be included if truncated)."
8336
8336
  ),
8337
+ symbol: external_exports.string().min(1, "Symbol cannot be an empty string").optional().describe(
8338
+ "Optional: specific exported symbol to find usages of.\n\nWhen provided, returns call sites instead of just importing files.\n\nExample: 'validateEmail' to find where validateEmail() is called.\n\nResponse includes 'usages' array showing which functions call this symbol."
8339
+ ),
8337
8340
  depth: external_exports.number().int().min(1).max(1).default(1).describe(
8338
8341
  "Depth of transitive dependencies. Only depth=1 (direct dependents) is currently supported.\n\n1 = Direct dependents only"
8339
8342
  ),
@@ -8643,7 +8646,7 @@ async function handleFindSimilar(args, ctx) {
8643
8646
  // src/mcp/utils/path-matching.ts
8644
8647
  function normalizePath(path6, workspaceRoot) {
8645
8648
  let normalized = path6.replace(/['"]/g, "").trim().replace(/\\/g, "/");
8646
- normalized = normalized.replace(/\.(ts|tsx|js|jsx)$/, "");
8649
+ normalized = normalized.replace(/\.(ts|tsx|js|jsx|php|py)$/, "");
8647
8650
  if (normalized.startsWith(workspaceRoot + "/")) {
8648
8651
  normalized = normalized.substring(workspaceRoot.length + 1);
8649
8652
  }
@@ -8671,8 +8674,56 @@ function matchesFile(normalizedImport, normalizedTarget) {
8671
8674
  if (matchesAtBoundary(cleanedImport, normalizedTarget) || matchesAtBoundary(normalizedTarget, cleanedImport)) {
8672
8675
  return true;
8673
8676
  }
8677
+ if (matchesPHPNamespace(normalizedImport, normalizedTarget)) {
8678
+ return true;
8679
+ }
8680
+ if (matchesPythonModule(normalizedImport, normalizedTarget)) {
8681
+ return true;
8682
+ }
8674
8683
  return false;
8675
8684
  }
8685
+ function matchesDirectPythonModule(moduleAsPath, targetWithoutPy) {
8686
+ return targetWithoutPy === moduleAsPath || targetWithoutPy === moduleAsPath + "/__init__" || targetWithoutPy.replace(/\/__init__$/, "") === moduleAsPath;
8687
+ }
8688
+ function matchesParentPythonPackage(moduleAsPath, targetWithoutPy) {
8689
+ return targetWithoutPy.startsWith(moduleAsPath + "/");
8690
+ }
8691
+ function matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) {
8692
+ return targetWithoutPy.endsWith("/" + moduleAsPath) || targetWithoutPy.endsWith("/" + moduleAsPath + "/__init__");
8693
+ }
8694
+ function matchesWithSourcePrefix(moduleAsPath, targetWithoutPy) {
8695
+ const moduleIndex = targetWithoutPy.indexOf(moduleAsPath);
8696
+ if (moduleIndex < 0) return false;
8697
+ const prefix = targetWithoutPy.substring(0, moduleIndex);
8698
+ const prefixSlashes = (prefix.match(/\//g) || []).length;
8699
+ return prefixSlashes <= 1 && (prefix === "" || prefix.endsWith("/"));
8700
+ }
8701
+ function matchesPythonModule(importPath, targetPath) {
8702
+ if (!importPath.includes(".")) {
8703
+ return false;
8704
+ }
8705
+ const moduleAsPath = importPath.replace(/\./g, "/");
8706
+ const targetWithoutPy = targetPath.replace(/\.py$/, "");
8707
+ return matchesDirectPythonModule(moduleAsPath, targetWithoutPy) || matchesParentPythonPackage(moduleAsPath, targetWithoutPy) || matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) || matchesWithSourcePrefix(moduleAsPath, targetWithoutPy);
8708
+ }
8709
+ function matchesPHPNamespace(importPath, targetPath) {
8710
+ const importComponents = importPath.split("/").filter(Boolean);
8711
+ const targetComponents = targetPath.split("/").filter(Boolean);
8712
+ if (importComponents.length === 0 || targetComponents.length === 0) {
8713
+ return false;
8714
+ }
8715
+ let matched = 0;
8716
+ for (let i = 1; i <= importComponents.length && i <= targetComponents.length; i++) {
8717
+ const impComp = importComponents[importComponents.length - i].toLowerCase();
8718
+ const targetComp = targetComponents[targetComponents.length - i].toLowerCase();
8719
+ if (impComp === targetComp) {
8720
+ matched++;
8721
+ } else {
8722
+ break;
8723
+ }
8724
+ }
8725
+ return matched === importComponents.length;
8726
+ }
8676
8727
  function getCanonicalPath(filepath, workspaceRoot) {
8677
8728
  let canonical = filepath.replace(/\\/g, "/");
8678
8729
  if (canonical.startsWith(workspaceRoot + "/")) {
@@ -8942,7 +8993,7 @@ var COMPLEXITY_THRESHOLDS = {
8942
8993
  MEDIUM_MAX: 15
8943
8994
  // Occasional branching
8944
8995
  };
8945
- async function findDependents(vectorDB, filepath, crossRepo, log) {
8996
+ async function scanChunks(vectorDB, crossRepo, log) {
8946
8997
  let allChunks;
8947
8998
  if (crossRepo && vectorDB instanceof QdrantDB2) {
8948
8999
  allChunks = await vectorDB.scanCrossRepo({ limit: SCAN_LIMIT2 });
@@ -8956,48 +9007,105 @@ async function findDependents(vectorDB, filepath, crossRepo, log) {
8956
9007
  if (hitLimit) {
8957
9008
  log(`Scanned ${SCAN_LIMIT2} chunks (limit reached). Results may be incomplete.`, "warning");
8958
9009
  }
8959
- log(`Scanning ${allChunks.length} chunks for imports...`);
9010
+ return { allChunks, hitLimit };
9011
+ }
9012
+ function createPathNormalizer() {
8960
9013
  const workspaceRoot = process.cwd().replace(/\\/g, "/");
8961
- const pathCache = /* @__PURE__ */ new Map();
8962
- const normalizePathCached = (path6) => {
8963
- if (!pathCache.has(path6)) pathCache.set(path6, normalizePath(path6, workspaceRoot));
8964
- return pathCache.get(path6);
9014
+ const cache = /* @__PURE__ */ new Map();
9015
+ return (path6) => {
9016
+ if (!cache.has(path6)) {
9017
+ cache.set(path6, normalizePath(path6, workspaceRoot));
9018
+ }
9019
+ return cache.get(path6);
8965
9020
  };
8966
- const importIndex = buildImportIndex(allChunks, normalizePathCached);
8967
- const normalizedTarget = normalizePathCached(filepath);
8968
- const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
9021
+ }
9022
+ function groupChunksByFile(chunks) {
9023
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
8969
9024
  const chunksByFile = /* @__PURE__ */ new Map();
8970
- for (const chunk of dependentChunks) {
9025
+ for (const chunk of chunks) {
8971
9026
  const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
8972
9027
  const existing = chunksByFile.get(canonical) || [];
8973
9028
  existing.push(chunk);
8974
9029
  chunksByFile.set(canonical, existing);
8975
9030
  }
9031
+ return chunksByFile;
9032
+ }
9033
+ function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached, allChunks, filepath, log) {
9034
+ if (symbol) {
9035
+ validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log);
9036
+ return findSymbolUsages(chunksByFile, symbol, normalizedTarget, normalizePathCached);
9037
+ }
9038
+ const dependents = Array.from(chunksByFile.keys()).map((fp) => ({
9039
+ filepath: fp,
9040
+ isTestFile: isTestFile(fp)
9041
+ }));
9042
+ return { dependents, totalUsageCount: void 0 };
9043
+ }
9044
+ function validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log) {
9045
+ const targetFileExportsSymbol = allChunks.some((chunk) => {
9046
+ const chunkFile = normalizePathCached(chunk.metadata.file);
9047
+ return matchesFile(chunkFile, normalizedTarget) && chunk.metadata.exports?.includes(symbol);
9048
+ });
9049
+ if (!targetFileExportsSymbol) {
9050
+ log(`Warning: Symbol "${symbol}" not found in exports of ${filepath}`, "warning");
9051
+ }
9052
+ }
9053
+ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
9054
+ const { allChunks, hitLimit } = await scanChunks(vectorDB, crossRepo, log);
9055
+ log(`Scanning ${allChunks.length} chunks for imports...`);
9056
+ const normalizePathCached = createPathNormalizer();
9057
+ const normalizedTarget = normalizePathCached(filepath);
9058
+ const importIndex = buildImportIndex(allChunks, normalizePathCached);
9059
+ const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
9060
+ const chunksByFile = groupChunksByFile(dependentChunks);
8976
9061
  const fileComplexities = calculateFileComplexities(chunksByFile);
8977
9062
  const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
8978
- const uniqueFiles = Array.from(chunksByFile.keys()).map((filepath2) => ({
8979
- filepath: filepath2,
8980
- isTestFile: isTestFile(filepath2)
8981
- }));
9063
+ const { dependents, totalUsageCount } = buildDependentsList(
9064
+ chunksByFile,
9065
+ symbol,
9066
+ normalizedTarget,
9067
+ normalizePathCached,
9068
+ allChunks,
9069
+ filepath,
9070
+ log
9071
+ );
9072
+ dependents.sort((a, b) => {
9073
+ if (a.isTestFile === b.isTestFile) return 0;
9074
+ return a.isTestFile ? 1 : -1;
9075
+ });
9076
+ const testDependentCount = dependents.filter((f) => f.isTestFile).length;
9077
+ const productionDependentCount = dependents.length - testDependentCount;
8982
9078
  return {
8983
- dependents: uniqueFiles,
9079
+ dependents,
9080
+ productionDependentCount,
9081
+ testDependentCount,
8984
9082
  chunksByFile,
8985
9083
  fileComplexities,
8986
9084
  complexityMetrics,
8987
9085
  hitLimit,
8988
- allChunks
9086
+ allChunks,
9087
+ totalUsageCount
8989
9088
  };
8990
9089
  }
8991
9090
  function buildImportIndex(allChunks, normalizePathCached) {
8992
9091
  const importIndex = /* @__PURE__ */ new Map();
9092
+ const addToIndex = (importPath, chunk) => {
9093
+ const normalizedImport = normalizePathCached(importPath);
9094
+ if (!importIndex.has(normalizedImport)) {
9095
+ importIndex.set(normalizedImport, []);
9096
+ }
9097
+ importIndex.get(normalizedImport).push(chunk);
9098
+ };
8993
9099
  for (const chunk of allChunks) {
8994
9100
  const imports = chunk.metadata.imports || [];
8995
9101
  for (const imp of imports) {
8996
- const normalizedImport = normalizePathCached(imp);
8997
- if (!importIndex.has(normalizedImport)) {
8998
- importIndex.set(normalizedImport, []);
9102
+ addToIndex(imp, chunk);
9103
+ }
9104
+ const importedSymbols = chunk.metadata.importedSymbols;
9105
+ if (importedSymbols && typeof importedSymbols === "object") {
9106
+ for (const modulePath of Object.keys(importedSymbols)) {
9107
+ addToIndex(modulePath, chunk);
8999
9108
  }
9000
- importIndex.get(normalizedImport).push(chunk);
9001
9109
  }
9002
9110
  }
9003
9111
  return importIndex;
@@ -9079,14 +9187,15 @@ function calculateComplexityRiskBoost(avgComplexity, maxComplexity) {
9079
9187
  }
9080
9188
  return "low";
9081
9189
  }
9082
- function calculateRiskLevel(dependentCount, complexityRiskBoost) {
9190
+ function calculateRiskLevel(dependentCount, complexityRiskBoost, productionDependentCount) {
9083
9191
  const DEPENDENT_COUNT_THRESHOLDS = {
9084
9192
  LOW: 5,
9085
9193
  MEDIUM: 15,
9086
9194
  HIGH: 30
9087
9195
  };
9088
9196
  const RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
9089
- let riskLevel = dependentCount === 0 ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
9197
+ const effectiveCount = productionDependentCount ?? dependentCount;
9198
+ let riskLevel = effectiveCount === 0 ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
9090
9199
  if (RISK_ORDER[complexityRiskBoost] > RISK_ORDER[riskLevel]) {
9091
9200
  riskLevel = complexityRiskBoost;
9092
9201
  }
@@ -9118,45 +9227,167 @@ function groupDependentsByRepo(dependents, chunks) {
9118
9227
  }
9119
9228
  return grouped;
9120
9229
  }
9230
+ function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached) {
9231
+ const dependents = [];
9232
+ let totalUsageCount = 0;
9233
+ for (const [filepath, chunks] of chunksByFile.entries()) {
9234
+ if (!fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached)) {
9235
+ continue;
9236
+ }
9237
+ const usages = extractSymbolUsagesFromChunks(chunks, targetSymbol);
9238
+ dependents.push({
9239
+ filepath,
9240
+ isTestFile: isTestFile(filepath),
9241
+ usages: usages.length > 0 ? usages : void 0
9242
+ });
9243
+ totalUsageCount += usages.length;
9244
+ }
9245
+ return { dependents, totalUsageCount };
9246
+ }
9247
+ function fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached) {
9248
+ return chunks.some((chunk) => {
9249
+ const importedSymbols = chunk.metadata.importedSymbols;
9250
+ if (!importedSymbols) return false;
9251
+ for (const [importPath, symbols] of Object.entries(importedSymbols)) {
9252
+ const normalizedImport = normalizePathCached(importPath);
9253
+ if (matchesFile(normalizedImport, normalizedTarget)) {
9254
+ if (symbols.includes(targetSymbol)) return true;
9255
+ if (symbols.some((s) => s.startsWith("* as "))) return true;
9256
+ }
9257
+ }
9258
+ return false;
9259
+ });
9260
+ }
9261
+ function extractSymbolUsagesFromChunks(chunks, targetSymbol) {
9262
+ const usages = [];
9263
+ for (const chunk of chunks) {
9264
+ const callSites = chunk.metadata.callSites;
9265
+ if (!callSites) continue;
9266
+ const lines = chunk.content.split("\n");
9267
+ for (const call of callSites) {
9268
+ if (call.symbol === targetSymbol) {
9269
+ usages.push({
9270
+ callerSymbol: chunk.metadata.symbolName || "unknown",
9271
+ line: call.line,
9272
+ snippet: extractSnippet(lines, call.line, chunk.metadata.startLine, targetSymbol)
9273
+ });
9274
+ }
9275
+ }
9276
+ }
9277
+ return usages;
9278
+ }
9279
+ function extractSnippet(lines, callLine, startLine, symbolName) {
9280
+ const lineIndex = callLine - startLine;
9281
+ const placeholder = `${symbolName}(...)`;
9282
+ if (lineIndex < 0 || lineIndex >= lines.length) {
9283
+ return placeholder;
9284
+ }
9285
+ const directLine = lines[lineIndex].trim();
9286
+ if (directLine) {
9287
+ return directLine;
9288
+ }
9289
+ const searchRadius = 5;
9290
+ for (let i = lineIndex - 1; i >= Math.max(0, lineIndex - searchRadius); i--) {
9291
+ const candidate = lines[i].trim();
9292
+ if (candidate) {
9293
+ return candidate;
9294
+ }
9295
+ }
9296
+ for (let i = lineIndex + 1; i < Math.min(lines.length, lineIndex + searchRadius + 1); i++) {
9297
+ const candidate = lines[i].trim();
9298
+ if (candidate) {
9299
+ return candidate;
9300
+ }
9301
+ }
9302
+ return placeholder;
9303
+ }
9121
9304
 
9122
9305
  // src/mcp/handlers/get-dependents.ts
9306
+ function checkCrossRepoFallback(crossRepo, vectorDB) {
9307
+ return Boolean(crossRepo && !(vectorDB instanceof QdrantDB3));
9308
+ }
9309
+ function buildNotes(crossRepoFallback, hitLimit) {
9310
+ const notes = [];
9311
+ if (crossRepoFallback) {
9312
+ notes.push("Cross-repo search requires Qdrant backend. Fell back to single-repo search.");
9313
+ }
9314
+ if (hitLimit) {
9315
+ notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
9316
+ }
9317
+ return notes;
9318
+ }
9319
+ function logRiskAssessment(analysis, riskLevel, symbol, log) {
9320
+ const prodTest = `(${analysis.productionDependentCount} prod, ${analysis.testDependentCount} test)`;
9321
+ if (symbol && analysis.totalUsageCount !== void 0) {
9322
+ if (analysis.totalUsageCount > 0) {
9323
+ log(
9324
+ `Found ${analysis.totalUsageCount} tracked call sites across ${analysis.dependents.length} files ${prodTest} - risk: ${riskLevel}`
9325
+ );
9326
+ } else {
9327
+ log(
9328
+ `Found ${analysis.dependents.length} files importing '${symbol}' ${prodTest} - risk: ${riskLevel} (Note: Call site tracking unavailable for these chunks)`
9329
+ );
9330
+ }
9331
+ } else {
9332
+ log(
9333
+ `Found ${analysis.dependents.length} dependents ${prodTest} - risk: ${riskLevel}`
9334
+ );
9335
+ }
9336
+ }
9337
+ function buildDependentsResponse(analysis, args, riskLevel, indexInfo, notes, crossRepo, vectorDB) {
9338
+ const { symbol, filepath } = args;
9339
+ const response = {
9340
+ indexInfo,
9341
+ filepath,
9342
+ dependentCount: analysis.dependents.length,
9343
+ productionDependentCount: analysis.productionDependentCount,
9344
+ testDependentCount: analysis.testDependentCount,
9345
+ riskLevel,
9346
+ dependents: analysis.dependents,
9347
+ complexityMetrics: analysis.complexityMetrics
9348
+ };
9349
+ if (symbol) {
9350
+ response.symbol = symbol;
9351
+ }
9352
+ if (analysis.totalUsageCount !== void 0) {
9353
+ response.totalUsageCount = analysis.totalUsageCount;
9354
+ }
9355
+ if (notes.length > 0) {
9356
+ response.note = notes.join(" ");
9357
+ }
9358
+ if (crossRepo && vectorDB instanceof QdrantDB3) {
9359
+ response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
9360
+ }
9361
+ return response;
9362
+ }
9123
9363
  async function handleGetDependents(args, ctx) {
9124
9364
  const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
9125
9365
  return await wrapToolHandler(
9126
9366
  GetDependentsSchema,
9127
9367
  async (validatedArgs) => {
9128
- const { crossRepo, filepath } = validatedArgs;
9129
- log(`Finding dependents of: ${filepath}${crossRepo ? " (cross-repo)" : ""}`);
9368
+ const { crossRepo, filepath, symbol } = validatedArgs;
9369
+ const symbolSuffix = symbol ? ` (symbol: ${symbol})` : "";
9370
+ const crossRepoSuffix = crossRepo ? " (cross-repo)" : "";
9371
+ log(`Finding dependents of: ${filepath}${symbolSuffix}${crossRepoSuffix}`);
9130
9372
  await checkAndReconnect();
9131
- const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log);
9373
+ const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log, symbol);
9132
9374
  const riskLevel = calculateRiskLevel(
9133
9375
  analysis.dependents.length,
9134
- analysis.complexityMetrics.complexityRiskBoost
9376
+ analysis.complexityMetrics.complexityRiskBoost,
9377
+ analysis.productionDependentCount
9135
9378
  );
9136
- log(
9137
- `Found ${analysis.dependents.length} dependent files (risk: ${riskLevel}${analysis.complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`
9138
- );
9139
- const notes = [];
9140
- const crossRepoFallback = crossRepo && !(vectorDB instanceof QdrantDB3);
9141
- if (crossRepoFallback) {
9142
- notes.push("Cross-repo search requires Qdrant backend. Fell back to single-repo search.");
9143
- }
9144
- if (analysis.hitLimit) {
9145
- notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
9146
- }
9147
- const response = {
9148
- indexInfo: getIndexMetadata(),
9149
- filepath: validatedArgs.filepath,
9150
- dependentCount: analysis.dependents.length,
9379
+ logRiskAssessment(analysis, riskLevel, symbol, log);
9380
+ const crossRepoFallback = checkCrossRepoFallback(crossRepo, vectorDB);
9381
+ const notes = buildNotes(crossRepoFallback, analysis.hitLimit);
9382
+ return buildDependentsResponse(
9383
+ analysis,
9384
+ validatedArgs,
9151
9385
  riskLevel,
9152
- dependents: analysis.dependents,
9153
- complexityMetrics: analysis.complexityMetrics,
9154
- ...notes.length > 0 && { note: notes.join(" ") }
9155
- };
9156
- if (crossRepo && vectorDB instanceof QdrantDB3) {
9157
- response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
9158
- }
9159
- return response;
9386
+ getIndexMetadata(),
9387
+ notes,
9388
+ crossRepo,
9389
+ vectorDB
9390
+ );
9160
9391
  }
9161
9392
  )(args);
9162
9393
  }