@liendev/lien 0.25.0 → 0.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -8565,12 +8565,14 @@ async function handleSemanticSearch(args, ctx) {
8565
8565
  await checkAndReconnect();
8566
8566
  const queryEmbedding = await embeddings.embed(query);
8567
8567
  let results;
8568
+ let crossRepoFallback = false;
8568
8569
  if (crossRepo && vectorDB instanceof QdrantDB) {
8569
8570
  results = await vectorDB.searchCrossRepo(queryEmbedding, limit, { repoIds });
8570
8571
  log(`Found ${results.length} results across ${Object.keys(groupResultsByRepo(results)).length} repos`);
8571
8572
  } else {
8572
8573
  if (crossRepo) {
8573
8574
  log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo search.");
8575
+ crossRepoFallback = true;
8574
8576
  }
8575
8577
  results = await vectorDB.search(queryEmbedding, limit, query);
8576
8578
  log(`Found ${results.length} results`);
@@ -8582,6 +8584,9 @@ async function handleSemanticSearch(args, ctx) {
8582
8584
  if (crossRepo && vectorDB instanceof QdrantDB) {
8583
8585
  response.groupedByRepo = groupResultsByRepo(results);
8584
8586
  }
8587
+ if (crossRepoFallback) {
8588
+ response.note = "Cross-repo search requires Qdrant backend. Fell back to single-repo search.";
8589
+ }
8585
8590
  return response;
8586
8591
  }
8587
8592
  )(args);
@@ -8784,6 +8789,25 @@ function buildFilesData(filepaths, fileChunksMap, relatedChunksMap, testAssociat
8784
8789
  });
8785
8790
  return filesData;
8786
8791
  }
8792
+ function buildScanLimitNote(hitScanLimit) {
8793
+ return hitScanLimit ? "Scanned 10,000 chunks (limit reached). Test associations may be incomplete for large codebases." : void 0;
8794
+ }
8795
+ function buildSingleFileResponse(filepath, filesData, indexInfo, note) {
8796
+ return {
8797
+ indexInfo,
8798
+ file: filepath,
8799
+ chunks: filesData[filepath].chunks,
8800
+ testAssociations: filesData[filepath].testAssociations,
8801
+ ...note && { note }
8802
+ };
8803
+ }
8804
+ function buildMultiFileResponse(filesData, indexInfo, note) {
8805
+ return {
8806
+ indexInfo,
8807
+ files: filesData,
8808
+ ...note && { note }
8809
+ };
8810
+ }
8787
8811
  async function handleGetFilesContext(args, ctx) {
8788
8812
  const { vectorDB, embeddings, log, checkAndReconnect, getIndexMetadata } = ctx;
8789
8813
  return await wrapToolHandler(
@@ -8810,7 +8834,8 @@ async function handleGetFilesContext(args, ctx) {
8810
8834
  );
8811
8835
  }
8812
8836
  const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
8813
- if (allChunks.length === SCAN_LIMIT) {
8837
+ const hitScanLimit = allChunks.length === SCAN_LIMIT;
8838
+ if (hitScanLimit) {
8814
8839
  log(
8815
8840
  `Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`,
8816
8841
  "warning"
@@ -8833,20 +8858,9 @@ async function handleGetFilesContext(args, ctx) {
8833
8858
  0
8834
8859
  );
8835
8860
  log(`Found ${totalChunks} total chunks`);
8836
- if (isSingleFile) {
8837
- const filepath = filepaths[0];
8838
- return {
8839
- indexInfo: getIndexMetadata(),
8840
- file: filepath,
8841
- chunks: filesData[filepath].chunks,
8842
- testAssociations: filesData[filepath].testAssociations
8843
- };
8844
- } else {
8845
- return {
8846
- indexInfo: getIndexMetadata(),
8847
- files: filesData
8848
- };
8849
- }
8861
+ const note = buildScanLimitNote(hitScanLimit);
8862
+ const indexInfo = getIndexMetadata();
8863
+ return isSingleFile ? buildSingleFileResponse(filepaths[0], filesData, indexInfo, note) : buildMultiFileResponse(filesData, indexInfo, note);
8850
8864
  }
8851
8865
  )(args);
8852
8866
  }
@@ -9122,6 +9136,14 @@ async function handleGetDependents(args, ctx) {
9122
9136
  log(
9123
9137
  `Found ${analysis.dependents.length} dependent files (risk: ${riskLevel}${analysis.complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`
9124
9138
  );
9139
+ const notes = [];
9140
+ const crossRepoFallback = crossRepo && !(vectorDB instanceof QdrantDB3);
9141
+ if (crossRepoFallback) {
9142
+ notes.push("Cross-repo search requires Qdrant backend. Fell back to single-repo search.");
9143
+ }
9144
+ if (analysis.hitLimit) {
9145
+ notes.push("Scanned 10,000 chunks (limit reached). Results may be incomplete.");
9146
+ }
9125
9147
  const response = {
9126
9148
  indexInfo: getIndexMetadata(),
9127
9149
  filepath: validatedArgs.filepath,
@@ -9129,7 +9151,7 @@ async function handleGetDependents(args, ctx) {
9129
9151
  riskLevel,
9130
9152
  dependents: analysis.dependents,
9131
9153
  complexityMetrics: analysis.complexityMetrics,
9132
- note: analysis.hitLimit ? `Warning: Scanned 10000 chunks (limit reached). Results may be incomplete.` : void 0
9154
+ ...notes.length > 0 && { note: notes.join(" ") }
9133
9155
  };
9134
9156
  if (crossRepo && vectorDB instanceof QdrantDB3) {
9135
9157
  response.groupedByRepo = groupDependentsByRepo(analysis.dependents, analysis.allChunks);
@@ -9176,6 +9198,35 @@ function groupViolationsByRepo(violations, allChunks) {
9176
9198
  }
9177
9199
  return grouped;
9178
9200
  }
9201
+ async function fetchCrossRepoChunks(vectorDB, crossRepo, repoIds, log) {
9202
+ if (!crossRepo) {
9203
+ return { chunks: [], fallback: false };
9204
+ }
9205
+ if (vectorDB instanceof QdrantDB4) {
9206
+ const chunks = await vectorDB.scanCrossRepo({ limit: 1e5, repoIds });
9207
+ log(`Scanned ${chunks.length} chunks across repos`);
9208
+ return { chunks, fallback: false };
9209
+ }
9210
+ return { chunks: [], fallback: true };
9211
+ }
9212
+ function processViolations(report, threshold, top) {
9213
+ const allViolations = (0, import_collect.default)(Object.entries(report.files)).flatMap(
9214
+ ([, fileData]) => fileData.violations.map((v) => transformViolation(v, fileData))
9215
+ ).sortByDesc("complexity").all();
9216
+ const violations = threshold !== void 0 ? allViolations.filter((v) => v.complexity >= threshold) : allViolations;
9217
+ const severityCounts = (0, import_collect.default)(violations).countBy("severity").all();
9218
+ return {
9219
+ violations,
9220
+ topViolations: violations.slice(0, top),
9221
+ bySeverity: {
9222
+ error: severityCounts["error"] || 0,
9223
+ warning: severityCounts["warning"] || 0
9224
+ }
9225
+ };
9226
+ }
9227
+ function buildCrossRepoFallbackNote(fallback) {
9228
+ return fallback ? "Cross-repo analysis requires Qdrant backend. Fell back to single-repo analysis." : void 0;
9229
+ }
9179
9230
  async function handleGetComplexity(args, ctx) {
9180
9231
  const { vectorDB, log, checkAndReconnect, getIndexMetadata } = ctx;
9181
9232
  return await wrapToolHandler(
@@ -9184,23 +9235,20 @@ async function handleGetComplexity(args, ctx) {
9184
9235
  const { crossRepo, repoIds, files, top, threshold } = validatedArgs;
9185
9236
  log(`Analyzing complexity${crossRepo ? " (cross-repo)" : ""}...`);
9186
9237
  await checkAndReconnect();
9187
- let allChunks = [];
9188
- if (crossRepo && vectorDB instanceof QdrantDB4) {
9189
- allChunks = await vectorDB.scanCrossRepo({
9190
- limit: 1e5,
9191
- repoIds
9192
- });
9193
- log(`Scanned ${allChunks.length} chunks across repos`);
9194
- }
9238
+ const { chunks: allChunks, fallback } = await fetchCrossRepoChunks(
9239
+ vectorDB,
9240
+ crossRepo,
9241
+ repoIds,
9242
+ log
9243
+ );
9195
9244
  const analyzer = new ComplexityAnalyzer(vectorDB);
9196
- const report = await analyzer.analyze(files, crossRepo && vectorDB instanceof QdrantDB4 ? crossRepo : false, repoIds);
9245
+ const report = await analyzer.analyze(files, crossRepo && !fallback, repoIds);
9197
9246
  log(`Analyzed ${report.summary.filesAnalyzed} files`);
9198
- const allViolations = (0, import_collect.default)(Object.entries(report.files)).flatMap(
9199
- ([, fileData]) => fileData.violations.map((v) => transformViolation(v, fileData))
9200
- ).sortByDesc("complexity").all();
9201
- const violations = threshold !== void 0 ? allViolations.filter((v) => v.complexity >= threshold) : allViolations;
9202
- const topViolations = violations.slice(0, top);
9203
- const bySeverity = (0, import_collect.default)(violations).countBy("severity").all();
9247
+ const { violations, topViolations, bySeverity } = processViolations(
9248
+ report,
9249
+ threshold,
9250
+ top ?? 10
9251
+ );
9204
9252
  const response = {
9205
9253
  indexInfo: getIndexMetadata(),
9206
9254
  summary: {
@@ -9208,17 +9256,17 @@ async function handleGetComplexity(args, ctx) {
9208
9256
  avgComplexity: report.summary.avgComplexity,
9209
9257
  maxComplexity: report.summary.maxComplexity,
9210
9258
  violationCount: violations.length,
9211
- bySeverity: {
9212
- error: bySeverity["error"] || 0,
9213
- warning: bySeverity["warning"] || 0
9214
- }
9259
+ bySeverity
9215
9260
  },
9216
9261
  violations: topViolations
9217
9262
  };
9218
- if (crossRepo && vectorDB instanceof QdrantDB4 && allChunks.length > 0) {
9263
+ if (crossRepo && !fallback && allChunks.length > 0) {
9219
9264
  response.groupedByRepo = groupViolationsByRepo(topViolations, allChunks);
9220
- } else if (crossRepo) {
9265
+ }
9266
+ const note = buildCrossRepoFallbackNote(fallback);
9267
+ if (note) {
9221
9268
  log("Warning: crossRepo=true requires Qdrant backend. Falling back to single-repo analysis.", "warning");
9269
+ response.note = note;
9222
9270
  }
9223
9271
  return response;
9224
9272
  }