@liendev/lien 0.25.0 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -8334,6 +8334,9 @@ var GetDependentsSchema = external_exports.object({
8334
8334
  filepath: external_exports.string().min(1, "Filepath cannot be empty").describe(
8335
8335
  "Path to file to find dependents for (relative to workspace root).\n\nExample: 'src/utils/validate.ts'\n\nReturns all files that import or depend on this file.\n\nNote: Scans up to 10,000 code chunks. For very large codebases,\nresults may be incomplete (a warning will be included if truncated)."
8336
8336
  ),
8337
+ symbol: external_exports.string().min(1, "Symbol cannot be an empty string").optional().describe(
8338
+ "Optional: specific exported symbol to find usages of.\n\nWhen provided, returns call sites instead of just importing files.\n\nExample: 'validateEmail' to find where validateEmail() is called.\n\nResponse includes 'usages' array showing which functions call this symbol."
8339
+ ),
8337
8340
  depth: external_exports.number().int().min(1).max(1).default(1).describe(
8338
8341
  "Depth of transitive dependencies. Only depth=1 (direct dependents) is currently supported.\n\n1 = Direct dependents only"
8339
8342
  ),
@@ -8565,12 +8568,14 @@ async function handleSemanticSearch(args, ctx) {
8565
8568
  await checkAndReconnect();
8566
8569
  const queryEmbedding = await embeddings.embed(query);
8567
8570
  let results;
8571
+ let crossRepoFallback = false;
8568
8572
  if (crossRepo && vectorDB instanceof QdrantDB) {
8569
8573
  results = await vectorDB.searchCrossRepo(queryEmbedding, limit, { repoIds });
8570
8574
  log(`Found ${results.length} results across ${Object.keys(groupResultsByRepo(results)).length} repos`);
8571
8575
  } else {
8572
8576
  if (crossRepo) {
8573
8577
  log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo search.");
8578
+ crossRepoFallback = true;
8574
8579
  }
8575
8580
  results = await vectorDB.search(queryEmbedding, limit, query);
8576
8581
  log(`Found ${results.length} results`);
@@ -8582,6 +8587,9 @@ async function handleSemanticSearch(args, ctx) {
8582
8587
  if (crossRepo && vectorDB instanceof QdrantDB) {
8583
8588
  response.groupedByRepo = groupResultsByRepo(results);
8584
8589
  }
8590
+ if (crossRepoFallback) {
8591
+ response.note = "Cross-repo search requires Qdrant backend. Fell back to single-repo search.";
8592
+ }
8585
8593
  return response;
8586
8594
  }
8587
8595
  )(args);
@@ -8638,7 +8646,7 @@ async function handleFindSimilar(args, ctx) {
8638
8646
  // src/mcp/utils/path-matching.ts
8639
8647
  function normalizePath(path6, workspaceRoot) {
8640
8648
  let normalized = path6.replace(/['"]/g, "").trim().replace(/\\/g, "/");
8641
- normalized = normalized.replace(/\.(ts|tsx|js|jsx)$/, "");
8649
+ normalized = normalized.replace(/\.(ts|tsx|js|jsx|php|py)$/, "");
8642
8650
  if (normalized.startsWith(workspaceRoot + "/")) {
8643
8651
  normalized = normalized.substring(workspaceRoot.length + 1);
8644
8652
  }
@@ -8666,8 +8674,56 @@ function matchesFile(normalizedImport, normalizedTarget) {
8666
8674
  if (matchesAtBoundary(cleanedImport, normalizedTarget) || matchesAtBoundary(normalizedTarget, cleanedImport)) {
8667
8675
  return true;
8668
8676
  }
8677
+ if (matchesPHPNamespace(normalizedImport, normalizedTarget)) {
8678
+ return true;
8679
+ }
8680
+ if (matchesPythonModule(normalizedImport, normalizedTarget)) {
8681
+ return true;
8682
+ }
8669
8683
  return false;
8670
8684
  }
8685
+ function matchesDirectPythonModule(moduleAsPath, targetWithoutPy) {
8686
+ return targetWithoutPy === moduleAsPath || targetWithoutPy === moduleAsPath + "/__init__" || targetWithoutPy.replace(/\/__init__$/, "") === moduleAsPath;
8687
+ }
8688
+ function matchesParentPythonPackage(moduleAsPath, targetWithoutPy) {
8689
+ return targetWithoutPy.startsWith(moduleAsPath + "/");
8690
+ }
8691
+ function matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) {
8692
+ return targetWithoutPy.endsWith("/" + moduleAsPath) || targetWithoutPy.endsWith("/" + moduleAsPath + "/__init__");
8693
+ }
8694
+ function matchesWithSourcePrefix(moduleAsPath, targetWithoutPy) {
8695
+ const moduleIndex = targetWithoutPy.indexOf(moduleAsPath);
8696
+ if (moduleIndex < 0) return false;
8697
+ const prefix = targetWithoutPy.substring(0, moduleIndex);
8698
+ const prefixSlashes = (prefix.match(/\//g) || []).length;
8699
+ return prefixSlashes <= 1 && (prefix === "" || prefix.endsWith("/"));
8700
+ }
8701
+ function matchesPythonModule(importPath, targetPath) {
8702
+ if (!importPath.includes(".")) {
8703
+ return false;
8704
+ }
8705
+ const moduleAsPath = importPath.replace(/\./g, "/");
8706
+ const targetWithoutPy = targetPath.replace(/\.py$/, "");
8707
+ return matchesDirectPythonModule(moduleAsPath, targetWithoutPy) || matchesParentPythonPackage(moduleAsPath, targetWithoutPy) || matchesSuffixPythonModule(moduleAsPath, targetWithoutPy) || matchesWithSourcePrefix(moduleAsPath, targetWithoutPy);
8708
+ }
8709
+ function matchesPHPNamespace(importPath, targetPath) {
8710
+ const importComponents = importPath.split("/").filter(Boolean);
8711
+ const targetComponents = targetPath.split("/").filter(Boolean);
8712
+ if (importComponents.length === 0 || targetComponents.length === 0) {
8713
+ return false;
8714
+ }
8715
+ let matched = 0;
8716
+ for (let i = 1; i <= importComponents.length && i <= targetComponents.length; i++) {
8717
+ const impComp = importComponents[importComponents.length - i].toLowerCase();
8718
+ const targetComp = targetComponents[targetComponents.length - i].toLowerCase();
8719
+ if (impComp === targetComp) {
8720
+ matched++;
8721
+ } else {
8722
+ break;
8723
+ }
8724
+ }
8725
+ return matched === importComponents.length;
8726
+ }
8671
8727
  function getCanonicalPath(filepath, workspaceRoot) {
8672
8728
  let canonical = filepath.replace(/\\/g, "/");
8673
8729
  if (canonical.startsWith(workspaceRoot + "/")) {
@@ -8784,6 +8840,25 @@ function buildFilesData(filepaths, fileChunksMap, relatedChunksMap, testAssociat
8784
8840
  });
8785
8841
  return filesData;
8786
8842
  }
8843
+ function buildScanLimitNote(hitScanLimit) {
8844
+ return hitScanLimit ? "Scanned 10,000 chunks (limit reached). Test associations may be incomplete for large codebases." : void 0;
8845
+ }
8846
+ function buildSingleFileResponse(filepath, filesData, indexInfo, note) {
8847
+ return {
8848
+ indexInfo,
8849
+ file: filepath,
8850
+ chunks: filesData[filepath].chunks,
8851
+ testAssociations: filesData[filepath].testAssociations,
8852
+ ...note && { note }
8853
+ };
8854
+ }
8855
+ function buildMultiFileResponse(filesData, indexInfo, note) {
8856
+ return {
8857
+ indexInfo,
8858
+ files: filesData,
8859
+ ...note && { note }
8860
+ };
8861
+ }
8787
8862
  async function handleGetFilesContext(args, ctx) {
8788
8863
  const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
8789
8864
  return await wrapToolHandler(
@@ -8810,7 +8885,8 @@ async function handleGetFilesContext(args, ctx) {
8810
8885
  );
8811
8886
  }
8812
8887
  const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
8813
- if (allChunks.length === SCAN_LIMIT) {
8888
+ const hitScanLimit = allChunks.length === SCAN_LIMIT;
8889
+ if (hitScanLimit) {
8814
8890
  log(
8815
8891
  `Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`,
8816
8892
  "warning"
@@ -8833,20 +8909,9 @@ async function handleGetFilesContext(args, ctx) {
8833
8909
  0
8834
8910
  );
8835
8911
  log(`Found ${totalChunks} total chunks`);
8836
- if (isSingleFile) {
8837
- const filepath = filepaths[0];
8838
- return {
8839
- indexInfo: getIndexMetadata(),
8840
- file: filepath,
8841
- chunks: filesData[filepath].chunks,
8842
- testAssociations: filesData[filepath].testAssociations
8843
- };
8844
- } else {
8845
- return {
8846
- indexInfo: getIndexMetadata(),
8847
- files: filesData
8848
- };
8849
- }
8912
+ const note = buildScanLimitNote(hitScanLimit);
8913
+ const indexInfo = getIndexMetadata();
8914
+ return isSingleFile ? buildSingleFileResponse(filepaths[0], filesData, indexInfo, note) : buildMultiFileResponse(filesData, indexInfo, note);
8850
8915
  }
8851
8916
  )(args);
8852
8917
  }
@@ -8928,7 +8993,7 @@ var COMPLEXITY_THRESHOLDS = {
8928
8993
  MEDIUM_MAX: 15
8929
8994
  // Occasional branching
8930
8995
  };
8931
- async function findDependents(vectorDB, filepath, crossRepo, log) {
8996
+ async function scanChunks(vectorDB, crossRepo, log) {
8932
8997
  let allChunks;
8933
8998
  if (crossRepo && vectorDB instanceof QdrantDB2) {
8934
8999
  allChunks = await vectorDB.scanCrossRepo({ limit: SCAN_LIMIT2 });
@@ -8942,48 +9007,105 @@ async function findDependents(vectorDB, filepath, crossRepo, log) {
8942
9007
  if (hitLimit) {
8943
9008
  log(`Scanned ${SCAN_LIMIT2} chunks (limit reached). Results may be incomplete.`, "warning");
8944
9009
  }
8945
- log(`Scanning ${allChunks.length} chunks for imports...`);
9010
+ return { allChunks, hitLimit };
9011
+ }
9012
+ function createPathNormalizer() {
8946
9013
  const workspaceRoot = process.cwd().replace(/\\/g, "/");
8947
- const pathCache = /* @__PURE__ */ new Map();
8948
- const normalizePathCached = (path6) => {
8949
- if (!pathCache.has(path6)) pathCache.set(path6, normalizePath(path6, workspaceRoot));
8950
- return pathCache.get(path6);
9014
+ const cache = /* @__PURE__ */ new Map();
9015
+ return (path6) => {
9016
+ if (!cache.has(path6)) {
9017
+ cache.set(path6, normalizePath(path6, workspaceRoot));
9018
+ }
9019
+ return cache.get(path6);
8951
9020
  };
8952
- const importIndex = buildImportIndex(allChunks, normalizePathCached);
8953
- const normalizedTarget = normalizePathCached(filepath);
8954
- const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
9021
+ }
9022
+ function groupChunksByFile(chunks) {
9023
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
8955
9024
  const chunksByFile = /* @__PURE__ */ new Map();
8956
- for (const chunk of dependentChunks) {
9025
+ for (const chunk of chunks) {
8957
9026
  const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
8958
9027
  const existing = chunksByFile.get(canonical) || [];
8959
9028
  existing.push(chunk);
8960
9029
  chunksByFile.set(canonical, existing);
8961
9030
  }
9031
+ return chunksByFile;
9032
+ }
9033
+ function buildDependentsList(chunksByFile, symbol, normalizedTarget, normalizePathCached, allChunks, filepath, log) {
9034
+ if (symbol) {
9035
+ validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log);
9036
+ return findSymbolUsages(chunksByFile, symbol, normalizedTarget, normalizePathCached);
9037
+ }
9038
+ const dependents = Array.from(chunksByFile.keys()).map((fp) => ({
9039
+ filepath: fp,
9040
+ isTestFile: isTestFile(fp)
9041
+ }));
9042
+ return { dependents, totalUsageCount: void 0 };
9043
+ }
9044
+ function validateSymbolExport(allChunks, normalizedTarget, normalizePathCached, symbol, filepath, log) {
9045
+ const targetFileExportsSymbol = allChunks.some((chunk) => {
9046
+ const chunkFile = normalizePathCached(chunk.metadata.file);
9047
+ return matchesFile(chunkFile, normalizedTarget) && chunk.metadata.exports?.includes(symbol);
9048
+ });
9049
+ if (!targetFileExportsSymbol) {
9050
+ log(`Warning: Symbol "${symbol}" not found in exports of ${filepath}`, "warning");
9051
+ }
9052
+ }
9053
+ async function findDependents(vectorDB, filepath, crossRepo, log, symbol) {
9054
+ const { allChunks, hitLimit } = await scanChunks(vectorDB, crossRepo, log);
9055
+ log(`Scanning ${allChunks.length} chunks for imports...`);
9056
+ const normalizePathCached = createPathNormalizer();
9057
+ const normalizedTarget = normalizePathCached(filepath);
9058
+ const importIndex = buildImportIndex(allChunks, normalizePathCached);
9059
+ const dependentChunks = findDependentChunks(importIndex, normalizedTarget);
9060
+ const chunksByFile = groupChunksByFile(dependentChunks);
8962
9061
  const fileComplexities = calculateFileComplexities(chunksByFile);
8963
9062
  const complexityMetrics = calculateOverallComplexityMetrics(fileComplexities);
8964
- const uniqueFiles = Array.from(chunksByFile.keys()).map((filepath2) => ({
8965
- filepath: filepath2,
8966
- isTestFile: isTestFile(filepath2)
8967
- }));
9063
+ const { dependents, totalUsageCount } = buildDependentsList(
9064
+ chunksByFile,
9065
+ symbol,
9066
+ normalizedTarget,
9067
+ normalizePathCached,
9068
+ allChunks,
9069
+ filepath,
9070
+ log
9071
+ );
9072
+ dependents.sort((a, b) => {
9073
+ if (a.isTestFile === b.isTestFile) return 0;
9074
+ return a.isTestFile ? 1 : -1;
9075
+ });
9076
+ const testDependentCount = dependents.filter((f) => f.isTestFile).length;
9077
+ const productionDependentCount = dependents.length - testDependentCount;
8968
9078
  return {
8969
- dependents: uniqueFiles,
9079
+ dependents,
9080
+ productionDependentCount,
9081
+ testDependentCount,
8970
9082
  chunksByFile,
8971
9083
  fileComplexities,
8972
9084
  complexityMetrics,
8973
9085
  hitLimit,
8974
- allChunks
9086
+ allChunks,
9087
+ totalUsageCount
8975
9088
  };
8976
9089
  }
8977
9090
  function buildImportIndex(allChunks, normalizePathCached) {
8978
9091
  const importIndex = /* @__PURE__ */ new Map();
9092
+ const addToIndex = (importPath, chunk) => {
9093
+ const normalizedImport = normalizePathCached(importPath);
9094
+ if (!importIndex.has(normalizedImport)) {
9095
+ importIndex.set(normalizedImport, []);
9096
+ }
9097
+ importIndex.get(normalizedImport).push(chunk);
9098
+ };
8979
9099
  for (const chunk of allChunks) {
8980
9100
  const imports = chunk.metadata.imports || [];
8981
9101
  for (const imp of imports) {
8982
- const normalizedImport = normalizePathCached(imp);
8983
- if (!importIndex.has(normalizedImport)) {
8984
- importIndex.set(normalizedImport, []);
9102
+ addToIndex(imp, chunk);
9103
+ }
9104
+ const importedSymbols = chunk.metadata.importedSymbols;
9105
+ if (importedSymbols && typeof importedSymbols === "object") {
9106
+ for (const modulePath of Object.keys(importedSymbols)) {
9107
+ addToIndex(modulePath, chunk);
8985
9108
  }
8986
- importIndex.get(normalizedImport).push(chunk);
8987
9109
  }
8988
9110
  }
8989
9111
  return importIndex;
@@ -9065,14 +9187,15 @@ function calculateComplexityRiskBoost(avgComplexity, maxComplexity) {
9065
9187
  }
9066
9188
  return "low";
9067
9189
  }
9068
- function calculateRiskLevel(dependentCount, complexityRiskBoost) {
9190
+ function calculateRiskLevel(dependentCount, complexityRiskBoost, productionDependentCount) {
9069
9191
  const DEPENDENT_COUNT_THRESHOLDS = {
9070
9192
  LOW: 5,
9071
9193
  MEDIUM: 15,
9072
9194
  HIGH: 30
9073
9195
  };
9074
9196
  const RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
9075
- let riskLevel = dependentCount === 0 ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : dependentCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
9197
+ const effectiveCount = productionDependentCount ?? dependentCount;
9198
+ let riskLevel = effectiveCount === 0 ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : effectiveCount <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
9076
9199
  if (RISK_ORDER[complexityRiskBoost] > RISK_ORDER[riskLevel]) {
9077
9200
  riskLevel = complexityRiskBoost;
9078
9201
  }
@@ -9104,37 +9227,167 @@ function groupDependentsByRepo(dependents, chunks) {
9104
9227
  }
9105
9228
  return grouped;
9106
9229
  }
9230
+ function findSymbolUsages(chunksByFile, targetSymbol, normalizedTarget, normalizePathCached) {
9231
+ const dependents = [];
9232
+ let totalUsageCount = 0;
9233
+ for (const [filepath, chunks] of chunksByFile.entries()) {
9234
+ if (!fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached)) {
9235
+ continue;
9236
+ }
9237
+ const usages = extractSymbolUsagesFromChunks(chunks, targetSymbol);
9238
+ dependents.push({
9239
+ filepath,
9240
+ isTestFile: isTestFile(filepath),
9241
+ usages: usages.length > 0 ? usages : void 0
9242
+ });
9243
+ totalUsageCount += usages.length;
9244
+ }
9245
+ return { dependents, totalUsageCount };
9246
+ }
9247
+ function fileImportsSymbol(chunks, targetSymbol, normalizedTarget, normalizePathCached) {
9248
+ return chunks.some((chunk) => {
9249
+ const importedSymbols = chunk.metadata.importedSymbols;
9250
+ if (!importedSymbols) return false;
9251
+ for (const [importPath, symbols] of Object.entries(importedSymbols)) {
9252
+ const normalizedImport = normalizePathCached(importPath);
9253
+ if (matchesFile(normalizedImport, normalizedTarget)) {
9254
+ if (symbols.includes(targetSymbol)) return true;
9255
+ if (symbols.some((s) => s.startsWith("* as "))) return true;
9256
+ }
9257
+ }
9258
+ return false;
9259
+ });
9260
+ }
9261
+ function extractSymbolUsagesFromChunks(chunks, targetSymbol) {
9262
+ const usages = [];
9263
+ for (const chunk of chunks) {
9264
+ const callSites = chunk.metadata.callSites;
9265
+ if (!callSites) continue;
9266
+ const lines = chunk.content.split("\n");
9267
+ for (const call of callSites) {
9268
+ if (call.symbol === targetSymbol) {
9269
+ usages.push({
9270
+ callerSymbol: chunk.metadata.symbolName || "unknown",
9271
+ line: call.line,
9272
+ snippet: extractSnippet(lines, call.line, chunk.metadata.startLine, targetSymbol)
9273
+ });
9274
+ }
9275
+ }
9276
+ }
9277
+ return usages;
9278
+ }
9279
+ function extractSnippet(lines, callLine, startLine, symbolName) {
9280
+ const lineIndex = callLine - startLine;
9281
+ const placeholder = `${symbolName}(...)`;
9282
+ if (lineIndex < 0 || lineIndex >= lines.length) {
9283
+ return placeholder;
9284
+ }
9285
+ const directLine = lines[lineIndex].trim();
9286
+ if (directLine) {
9287
+ return directLine;
9288
+ }
9289
+ const searchRadius = 5;
9290
+ for (let i = lineIndex - 1; i >= Math.max(0, lineIndex - searchRadius); i--) {
9291
+ const candidate = lines[i].trim();
9292
+ if (candidate) {
9293
+ return candidate;
9294
+ }
9295
+ }
9296
+ for (let i = lineIndex + 1; i < Math.min(lines.length, lineIndex + searchRadius + 1); i++) {
9297
+ const candidate = lines[i].trim();
9298
+ if (candidate) {
9299
+ return candidate;
9300
+ }
9301
+ }
9302
+ return placeholder;
9303
+ }
9107
9304
 
9108
9305
  // src/mcp/handlers/get-dependents.ts
9306
+ function checkCrossRepoFallback(crossRepo, vectorDB) {
9307
+ return Boolean(crossRepo && !(vectorDB instanceof QdrantDB3));
9308
+ }
9309
+ function buildNotes(crossRepoFallback, hitLimit) {
9310
+ const notes = [];
9311
+ if (crossRepoFallback) {
9312
+ notes.push("Cross-repo search requires Qdrant backend. Fell back to single-repo search.");
9313
+ }
9314
+ if (hitLimit) {
9315
+ notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
9316
+ }
9317
+ return notes;
9318
+ }
9319
+ function logRiskAssessment(analysis, riskLevel, symbol, log) {
9320
+ const prodTest = `(${analysis.productionDependentCount} prod, ${analysis.testDependentCount} test)`;
9321
+ if (symbol && analysis.totalUsageCount !== void 0) {
9322
+ if (analysis.totalUsageCount > 0) {
9323
+ log(
9324
+ `Found ${analysis.totalUsageCount} tracked call sites across ${analysis.dependents.length} files ${prodTest} - risk: ${riskLevel}`
9325
+ );
9326
+ } else {
9327
+ log(
9328
+ `Found ${analysis.dependents.length} files importing '${symbol}' ${prodTest} - risk: ${riskLevel} (Note: Call site tracking unavailable for these chunks)`
9329
+ );
9330
+ }
9331
+ } else {
9332
+ log(
9333
+ `Found ${analysis.dependents.length} dependents ${prodTest} - risk: ${riskLevel}`
9334
+ );
9335
+ }
9336
+ }
9337
+ function buildDependentsResponse(analysis, args, riskLevel, indexInfo, notes, crossRepo, vectorDB) {
9338
+ const { symbol, filepath } = args;
9339
+ const response = {
9340
+ indexInfo,
9341
+ filepath,
9342
+ dependentCount: analysis.dependents.length,
9343
+ productionDependentCount: analysis.productionDependentCount,
9344
+ testDependentCount: analysis.testDependentCount,
9345
+ riskLevel,
9346
+ dependents: analysis.dependents,
9347
+ complexityMetrics: analysis.complexityMetrics
9348
+ };
9349
+ if (symbol) {
9350
+ response.symbol = symbol;
9351
+ }
9352
+ if (analysis.totalUsageCount !== void 0) {
9353
+ response.totalUsageCount = analysis.totalUsageCount;
9354
+ }
9355
+ if (notes.length > 0) {
9356
+ response.note = notes.join(" ");
9357
+ }
9358
+ if (crossRepo && vectorDB instanceof QdrantDB3) {
9359
+ response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
9360
+ }
9361
+ return response;
9362
+ }
9109
9363
  async function handleGetDependents(args, ctx) {
9110
9364
  const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
9111
9365
  return await wrapToolHandler(
9112
9366
  GetDependentsSchema,
9113
9367
  async (validatedArgs) => {
9114
- const { crossRepo, filepath } = validatedArgs;
9115
- log(`Finding dependents of: ${filepath}${crossRepo ? " (cross-repo)" : ""}`);
9368
+ const { crossRepo, filepath, symbol } = validatedArgs;
9369
+ const symbolSuffix = symbol ? ` (symbol: ${symbol})` : "";
9370
+ const crossRepoSuffix = crossRepo ? " (cross-repo)" : "";
9371
+ log(`Finding dependents of: ${filepath}${symbolSuffix}${crossRepoSuffix}`);
9116
9372
  await checkAndReconnect();
9117
- const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log);
9373
+ const analysis = await findDependents(vectorDB, filepath, crossRepo ?? false, log, symbol);
9118
9374
  const riskLevel = calculateRiskLevel(
9119
9375
  analysis.dependents.length,
9120
- analysis.complexityMetrics.complexityRiskBoost
9376
+ analysis.complexityMetrics.complexityRiskBoost,
9377
+ analysis.productionDependentCount
9121
9378
  );
9122
- log(
9123
- `Found ${analysis.dependents.length} dependent files (risk: ${riskLevel}${analysis.complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`
9124
- );
9125
- const response = {
9126
- indexInfo: getIndexMetadata(),
9127
- filepath: validatedArgs.filepath,
9128
- dependentCount: analysis.dependents.length,
9379
+ logRiskAssessment(analysis, riskLevel, symbol, log);
9380
+ const crossRepoFallback = checkCrossRepoFallback(crossRepo, vectorDB);
9381
+ const notes = buildNotes(crossRepoFallback, analysis.hitLimit);
9382
+ return buildDependentsResponse(
9383
+ analysis,
9384
+ validatedArgs,
9129
9385
  riskLevel,
9130
- dependents: analysis.dependents,
9131
- complexityMetrics: analysis.complexityMetrics,
9132
- note: analysis.hitLimit ? `Warning: Scanned 10000 chunks (limit reached). Results may be incomplete.` : void 0
9133
- };
9134
- if (crossRepo && vectorDB instanceof QdrantDB3) {
9135
- response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
9136
- }
9137
- return response;
9386
+ getIndexMetadata(),
9387
+ notes,
9388
+ crossRepo,
9389
+ vectorDB
9390
+ );
9138
9391
  }
9139
9392
  )(args);
9140
9393
  }
@@ -9176,6 +9429,35 @@ function groupViolationsByRepo(violations, allChunks) {
9176
9429
  }
9177
9430
  return grouped;
9178
9431
  }
9432
+ async function fetchCrossRepoChunks(vectorDB, crossRepo, repoIds, log) {
9433
+ if (!crossRepo) {
9434
+ return { chunks: [], fallback: false };
9435
+ }
9436
+ if (vectorDB instanceof QdrantDB4) {
9437
+ const chunks = await vectorDB.scanCrossRepo({ limit: 1e5, repoIds });
9438
+ log(`Scanned ${chunks.length} chunks across repos`);
9439
+ return { chunks, fallback: false };
9440
+ }
9441
+ return { chunks: [], fallback: true };
9442
+ }
9443
+ function processViolations(report, threshold, top) {
9444
+ const allViolations = (0, import_collect.default)(Object.entries(report.files)).flatMap(
9445
+ ([, fileData]) => fileData.violations.map((v) => transformViolation(v, fileData))
9446
+ ).sortByDesc("complexity").all();
9447
+ const violations = threshold !== void 0 ? allViolations.filter((v) => v.complexity >= threshold) : allViolations;
9448
+ const severityCounts = (0, import_collect.default)(violations).countBy("severity").all();
9449
+ return {
9450
+ violations,
9451
+ topViolations: violations.slice(0, top),
9452
+ bySeverity: {
9453
+ error: severityCounts["error"] || 0,
9454
+ warning: severityCounts["warning"] || 0
9455
+ }
9456
+ };
9457
+ }
9458
+ function buildCrossRepoFallbackNote(fallback) {
9459
+ return fallback ? "Cross-repo analysis requires Qdrant backend. Fell back to single-repo analysis." : void 0;
9460
+ }
9179
9461
  async function handleGetComplexity(args, ctx) {
9180
9462
  const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
9181
9463
  return await wrapToolHandler(
@@ -9184,23 +9466,20 @@ async function handleGetComplexity(args, ctx) {
9184
9466
  const { crossRepo, repoIds, files, top, threshold } = validatedArgs;
9185
9467
  log(`Analyzing complexity${crossRepo ? " (cross-repo)" : ""}...`);
9186
9468
  await checkAndReconnect();
9187
- let allChunks = [];
9188
- if (crossRepo && vectorDB instanceof QdrantDB4) {
9189
- allChunks = await vectorDB.scanCrossRepo({
9190
- limit: 1e5,
9191
- repoIds
9192
- });
9193
- log(`Scanned ${allChunks.length} chunks across repos`);
9194
- }
9469
+ const { chunks: allChunks, fallback } = await fetchCrossRepoChunks(
9470
+ vectorDB,
9471
+ crossRepo,
9472
+ repoIds,
9473
+ log
9474
+ );
9195
9475
  const analyzer = new ComplexityAnalyzer(vectorDB);
9196
- const report = await analyzer.analyze(files, crossRepo && vectorDB instanceof QdrantDB4 ? crossRepo : false, repoIds);
9476
+ const report = await analyzer.analyze(files, crossRepo && !fallback, repoIds);
9197
9477
  log(`Analyzed ${report.summary.filesAnalyzed} files`);
9198
- const allViolations = (0, import_collect.default)(Object.entries(report.files)).flatMap(
9199
- ([, fileData]) => fileData.violations.map((v) => transformViolation(v, fileData))
9200
- ).sortByDesc("complexity").all();
9201
- const violations = threshold !== void 0 ? allViolations.filter((v) => v.complexity >= threshold) : allViolations;
9202
- const topViolations = violations.slice(0, top);
9203
- const bySeverity = (0, import_collect.default)(violations).countBy("severity").all();
9478
+ const { violations, topViolations, bySeverity } = processViolations(
9479
+ report,
9480
+ threshold,
9481
+ top ?? 10
9482
+ );
9204
9483
  const response = {
9205
9484
  indexInfo: getIndexMetadata(),
9206
9485
  summary: {
@@ -9208,17 +9487,17 @@ async function handleGetComplexity(args, ctx) {
9208
9487
  avgComplexity: report.summary.avgComplexity,
9209
9488
  maxComplexity: report.summary.maxComplexity,
9210
9489
  violationCount: violations.length,
9211
- bySeverity: {
9212
- error: bySeverity["error"] || 0,
9213
- warning: bySeverity["warning"] || 0
9214
- }
9490
+ bySeverity
9215
9491
  },
9216
9492
  violations: topViolations
9217
9493
  };
9218
- if (crossRepo && vectorDB instanceof QdrantDB4 && allChunks.length > 0) {
9494
+ if (crossRepo && !fallback && allChunks.length > 0) {
9219
9495
  response.groupedByRepo = groupViolationsByRepo(topViolations, allChunks);
9220
- } else if (crossRepo) {
9496
+ }
9497
+ const note = buildCrossRepoFallbackNote(fallback);
9498
+ if (note) {
9221
9499
  log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo analysis.", "warning");
9500
+ response.note = note;
9222
9501
  }
9223
9502
  return response;
9224
9503
  }