@liendev/lien 0.16.0 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -17,6 +17,7 @@ You have access to Lien semantic search tools. USE THEM INSTEAD OF grep/ripgrep/
17
17
  | "Find all Controllers" | `list_functions` | grep |
18
18
  | Edit a file | `get_files_context` FIRST | direct edit |
19
19
  | Find similar code | `find_similar` | manual search |
20
+ | "What depends on this file?" | `get_dependents` | manual grep |
20
21
 
21
22
  ## Before ANY Code Change
22
23
 
@@ -35,9 +36,11 @@ REQUIRED sequence:
35
36
 
36
37
  **`get_files_context({ filepaths: "path/to/file.ts" })`** or **`get_files_context({ filepaths: ["file1.ts", "file2.ts"] })`**
37
38
  - MANDATORY before editing any file
38
- - Returns `testAssociations`: which tests cover this file
39
+ - Returns `testAssociations`: which tests import/cover this file (reverse dependency lookup)
39
40
  - Shows file dependencies and relationships
40
41
  - Accepts single filepath or array of filepaths for batch operations
42
+ - Single file returns: `{ file: string, chunks: [], testAssociations: [] }`
43
+ - Multiple files returns: `{ files: { [path]: { chunks: [], testAssociations: [] } } }`
41
44
 
42
45
  **`list_functions({ pattern: ".*Controller.*" })`**
43
46
  - Fast symbol lookup by naming pattern
@@ -48,11 +51,30 @@ REQUIRED sequence:
48
51
  - Find similar implementations for consistency
49
52
  - Use when refactoring or detecting duplication
50
53
 
54
+ **`get_dependents({ filepath: "path/to/file.ts", depth: 1 })`**
55
+ - Find all files that import/depend on a target file
56
+ - Use for impact analysis: "What breaks if I change this?"
57
+ - Returns risk level (low/medium/high/critical) based on:
58
+ - Dependency count (how many files import it)
59
+ - Complexity metrics (how complex the dependent code is)
60
+ - Highlights top 5 most complex dependents when complexity data available
61
+
51
62
  ## Test Associations
52
63
 
53
- `get_files_context` returns `testAssociations` showing which tests cover the file.
54
- ALWAYS check this before modifying source code.
55
- After changes, remind the user: "This file is covered by [test files] - run these to verify."
64
+ `get_files_context` returns `testAssociations` showing which tests import/cover the file.
65
+ - Uses reverse dependency lookup to find test files that import the source file
66
+ - Returns array of test file paths for each requested file
67
+ - ALWAYS check this before modifying source code
68
+ - After changes, remind the user: "This file is covered by [test files] - run these to verify."
69
+
70
+ Example:
71
+ ```typescript
72
+ get_files_context({ filepaths: "src/auth.ts" })
73
+ // Returns: { file: "src/auth.ts", chunks: [...], testAssociations: ["src/__tests__/auth.test.ts"] }
74
+
75
+ get_files_context({ filepaths: ["src/auth.ts", "src/user.ts"] })
76
+ // Returns: { files: { "src/auth.ts": { chunks: [...], testAssociations: [...] }, ... } }
77
+ ```
56
78
 
57
79
  ## Workflow Patterns
58
80
 
@@ -73,6 +95,15 @@ After changes, remind the user: "This file is covered by [test files] - run thes
73
95
  5. Tell user which tests to run
74
96
  ```
75
97
 
98
+ ### Pattern 3: Impact Analysis Before Refactoring
99
+ ```
100
+ 1. get_dependents({ filepath: "target/file.ts" })
101
+ 2. Review risk level and dependent count
102
+ 3. Check highComplexityDependents (if any)
103
+ 4. Use get_files_context on high-risk dependents
104
+ 5. Plan refactoring strategy based on impact
105
+ ```
106
+
76
107
  ## Query Construction
77
108
 
78
109
  ### Good Queries (DO THIS)
package/dist/index.js CHANGED
@@ -5688,6 +5688,17 @@ var ListFunctionsSchema = z4.object({
5688
5688
  )
5689
5689
  });
5690
5690
 
5691
+ // src/mcp/schemas/dependents.schema.ts
5692
+ import { z as z5 } from "zod";
5693
+ var GetDependentsSchema = z5.object({
5694
+ filepath: z5.string().min(1, "Filepath cannot be empty").describe(
5695
+ "Path to file to find dependents for (relative to workspace root).\n\nExample: 'src/utils/validate.ts'\n\nReturns all files that import or depend on this file.\n\nNote: Scans up to 10,000 code chunks. For very large codebases,\nresults may be incomplete (a warning will be included if truncated)."
5696
+ ),
5697
+ depth: z5.number().int().min(1).max(1).default(1).describe(
5698
+ "Depth of transitive dependencies. Only depth=1 (direct dependents) is currently supported.\n\n1 = Direct dependents only"
5699
+ )
5700
+ });
5701
+
5691
5702
  // src/mcp/tools.ts
5692
5703
  var tools = [
5693
5704
  toMCPToolSchema(
@@ -5722,15 +5733,39 @@ MANDATORY: Call this BEFORE editing any file. Accepts single path or array of pa
5722
5733
 
5723
5734
  Single file:
5724
5735
  get_files_context({ filepaths: "src/auth.ts" })
5736
+
5737
+ Returns:
5738
+ {
5739
+ file: "src/auth.ts",
5740
+ chunks: [...],
5741
+ testAssociations: ["src/__tests__/auth.test.ts"]
5742
+ }
5725
5743
 
5726
5744
  Multiple files (batch):
5727
5745
  get_files_context({ filepaths: ["src/auth.ts", "src/user.ts"] })
5746
+
5747
+ Returns:
5748
+ {
5749
+ files: {
5750
+ "src/auth.ts": {
5751
+ chunks: [...],
5752
+ testAssociations: ["src/__tests__/auth.test.ts"]
5753
+ },
5754
+ "src/user.ts": {
5755
+ chunks: [...],
5756
+ testAssociations: ["src/__tests__/user.test.ts"]
5757
+ }
5758
+ }
5759
+ }
5728
5760
 
5729
5761
  Returns for each file:
5730
5762
  - All chunks and related code
5731
- - testAssociations (which tests cover this file)
5763
+ - testAssociations: Array of test files that import this file (reverse dependency lookup)
5732
5764
  - Relevance scoring
5733
5765
 
5766
+ ALWAYS check testAssociations before modifying source code.
5767
+ After changes, remind the user to run the associated tests.
5768
+
5734
5769
  Batch calls are more efficient than multiple single-file calls.`
5735
5770
  ),
5736
5771
  toMCPToolSchema(
@@ -5743,6 +5778,20 @@ Examples:
5743
5778
  - "Find service classes" \u2192 list_functions({ pattern: ".*Service$" })
5744
5779
 
5745
5780
  10x faster than semantic_search for structural/architectural queries. Use semantic_search instead when searching by what code DOES.`
5781
+ ),
5782
+ toMCPToolSchema(
5783
+ GetDependentsSchema,
5784
+ "get_dependents",
5785
+ `Find all code that depends on a file (reverse dependency lookup). Use for impact analysis:
5786
+ - "What breaks if I change this?"
5787
+ - "Is this safe to delete?"
5788
+ - "What imports this module?"
5789
+
5790
+ Returns:
5791
+ - List of files that import the target
5792
+ - Risk level (low/medium/high/critical) based on dependent count and complexity
5793
+
5794
+ Example: get_dependents({ filepath: "src/utils/validate.ts" })`
5746
5795
  )
5747
5796
  ];
5748
5797
 
@@ -5942,6 +5991,50 @@ function wrapToolHandler(schema, handler) {
5942
5991
  };
5943
5992
  }
5944
5993
 
5994
+ // src/mcp/utils/path-matching.ts
5995
+ function normalizePath(path18, workspaceRoot) {
5996
+ let normalized = path18.replace(/['"]/g, "").trim().replace(/\\/g, "/");
5997
+ normalized = normalized.replace(/\.(ts|tsx|js|jsx)$/, "");
5998
+ if (normalized.startsWith(workspaceRoot + "/")) {
5999
+ normalized = normalized.substring(workspaceRoot.length + 1);
6000
+ }
6001
+ return normalized;
6002
+ }
6003
+ function matchesAtBoundary(str, pattern) {
6004
+ const index = str.indexOf(pattern);
6005
+ if (index === -1) return false;
6006
+ const charBefore = index > 0 ? str[index - 1] : "/";
6007
+ if (charBefore !== "/" && index !== 0) return false;
6008
+ const endIndex = index + pattern.length;
6009
+ if (endIndex === str.length) return true;
6010
+ const charAfter = str[endIndex];
6011
+ return charAfter === "/";
6012
+ }
6013
+ function matchesFile(normalizedImport, normalizedTarget) {
6014
+ if (normalizedImport === normalizedTarget) return true;
6015
+ if (matchesAtBoundary(normalizedImport, normalizedTarget)) {
6016
+ return true;
6017
+ }
6018
+ if (matchesAtBoundary(normalizedTarget, normalizedImport)) {
6019
+ return true;
6020
+ }
6021
+ const cleanedImport = normalizedImport.replace(/^(\.\.?\/)+/, "");
6022
+ if (matchesAtBoundary(cleanedImport, normalizedTarget) || matchesAtBoundary(normalizedTarget, cleanedImport)) {
6023
+ return true;
6024
+ }
6025
+ return false;
6026
+ }
6027
+ function getCanonicalPath(filepath, workspaceRoot) {
6028
+ let canonical = filepath.replace(/\\/g, "/");
6029
+ if (canonical.startsWith(workspaceRoot + "/")) {
6030
+ canonical = canonical.substring(workspaceRoot.length + 1);
6031
+ }
6032
+ return canonical;
6033
+ }
6034
+ function isTestFile2(filepath) {
6035
+ return /\.(test|spec)\.[^/]+$/.test(filepath) || /(^|[/\\])(test|tests|__tests__)[/\\]/.test(filepath);
6036
+ }
6037
+
5945
6038
  // src/mcp/server.ts
5946
6039
  init_errors();
5947
6040
  var __filename4 = fileURLToPath4(import.meta.url);
@@ -5953,6 +6046,31 @@ try {
5953
6046
  } catch {
5954
6047
  packageJson3 = require4(join3(__dirname4, "../../package.json"));
5955
6048
  }
6049
+ var DEPENDENT_COUNT_THRESHOLDS = {
6050
+ LOW: 5,
6051
+ // Few dependents, safe to change
6052
+ MEDIUM: 15,
6053
+ // Moderate impact, review dependents
6054
+ HIGH: 30
6055
+ // High impact, careful planning needed
6056
+ };
6057
+ var COMPLEXITY_THRESHOLDS = {
6058
+ HIGH_COMPLEXITY_DEPENDENT: 10,
6059
+ // Individual file is complex
6060
+ CRITICAL_AVG: 15,
6061
+ // Average complexity indicates systemic complexity
6062
+ CRITICAL_MAX: 25,
6063
+ // Peak complexity indicates hotspot
6064
+ HIGH_AVG: 10,
6065
+ // Moderately complex on average
6066
+ HIGH_MAX: 20,
6067
+ // Some complex functions exist
6068
+ MEDIUM_AVG: 6,
6069
+ // Slightly above simple code
6070
+ MEDIUM_MAX: 15
6071
+ // Occasional branching
6072
+ };
6073
+ var SCAN_LIMIT = 1e4;
5956
6074
  async function startMCPServer(options) {
5957
6075
  const { rootDir, verbose, watch } = options;
5958
6076
  const log = (message) => {
@@ -6049,6 +6167,7 @@ async function startMCPServer(options) {
6049
6167
  const isSingleFile = !Array.isArray(validatedArgs.filepaths);
6050
6168
  log(`Getting context for: ${filepaths.join(", ")}`);
6051
6169
  await checkAndReconnect();
6170
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
6052
6171
  const fileEmbeddings = await Promise.all(filepaths.map((fp) => embeddings.embed(fp)));
6053
6172
  const allFileSearches = await Promise.all(
6054
6173
  fileEmbeddings.map(
@@ -6057,9 +6176,11 @@ async function startMCPServer(options) {
6057
6176
  );
6058
6177
  const fileChunksMap = filepaths.map((filepath, i) => {
6059
6178
  const allResults = allFileSearches[i];
6060
- return allResults.filter(
6061
- (r) => r.metadata.file.includes(filepath) || filepath.includes(r.metadata.file)
6062
- );
6179
+ const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6180
+ return allResults.filter((r) => {
6181
+ const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6182
+ return chunkCanonical === targetCanonical;
6183
+ });
6063
6184
  });
6064
6185
  let relatedChunksMap = [];
6065
6186
  if (validatedArgs.includeRelated) {
@@ -6076,18 +6197,57 @@ async function startMCPServer(options) {
6076
6197
  relatedChunksMap = Array.from({ length: filepaths.length }, () => []);
6077
6198
  filesWithChunks.forEach(({ filepath, index }, i) => {
6078
6199
  const related = relatedSearches[i];
6079
- relatedChunksMap[index] = related.filter(
6080
- (r) => !r.metadata.file.includes(filepath) && !filepath.includes(r.metadata.file)
6081
- );
6200
+ const targetCanonical = getCanonicalPath(filepath, workspaceRoot);
6201
+ relatedChunksMap[index] = related.filter((r) => {
6202
+ const chunkCanonical = getCanonicalPath(r.metadata.file, workspaceRoot);
6203
+ return chunkCanonical !== targetCanonical;
6204
+ });
6082
6205
  });
6083
6206
  }
6084
6207
  }
6208
+ const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
6209
+ if (allChunks.length === SCAN_LIMIT) {
6210
+ log(`WARNING: Scanned ${SCAN_LIMIT} chunks (limit reached). Test associations may be incomplete for large codebases.`);
6211
+ }
6212
+ const pathCache = /* @__PURE__ */ new Map();
6213
+ const normalizePathCached = (path18) => {
6214
+ if (pathCache.has(path18)) return pathCache.get(path18);
6215
+ const normalized = normalizePath(path18, workspaceRoot);
6216
+ pathCache.set(path18, normalized);
6217
+ return normalized;
6218
+ };
6219
+ const testAssociationsMap = filepaths.map((filepath) => {
6220
+ const normalizedTarget = normalizePathCached(filepath);
6221
+ const testFiles = /* @__PURE__ */ new Set();
6222
+ for (const chunk of allChunks) {
6223
+ const chunkFile2 = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6224
+ if (!isTestFile2(chunkFile2)) continue;
6225
+ const imports = chunk.metadata.imports || [];
6226
+ for (const imp of imports) {
6227
+ const normalizedImport = normalizePathCached(imp);
6228
+ if (matchesFile(normalizedImport, normalizedTarget)) {
6229
+ testFiles.add(chunkFile2);
6230
+ break;
6231
+ }
6232
+ }
6233
+ }
6234
+ return Array.from(testFiles);
6235
+ });
6085
6236
  const filesData = {};
6086
6237
  filepaths.forEach((filepath, i) => {
6087
6238
  const fileChunks = fileChunksMap[i];
6088
6239
  const relatedChunks = relatedChunksMap[i] || [];
6240
+ const seenChunks = /* @__PURE__ */ new Set();
6241
+ const dedupedChunks = [...fileChunks, ...relatedChunks].filter((chunk) => {
6242
+ const canonicalFile = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6243
+ const chunkId = `${canonicalFile}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6244
+ if (seenChunks.has(chunkId)) return false;
6245
+ seenChunks.add(chunkId);
6246
+ return true;
6247
+ });
6089
6248
  filesData[filepath] = {
6090
- chunks: [...fileChunks, ...relatedChunks]
6249
+ chunks: dedupedChunks,
6250
+ testAssociations: testAssociationsMap[i]
6091
6251
  };
6092
6252
  });
6093
6253
  log(`Found ${Object.values(filesData).reduce((sum, f) => sum + f.chunks.length, 0)} total chunks`);
@@ -6096,7 +6256,8 @@ async function startMCPServer(options) {
6096
6256
  return {
6097
6257
  indexInfo: getIndexMetadata(),
6098
6258
  file: filepath,
6099
- chunks: filesData[filepath].chunks
6259
+ chunks: filesData[filepath].chunks,
6260
+ testAssociations: filesData[filepath].testAssociations
6100
6261
  };
6101
6262
  } else {
6102
6263
  return {
@@ -6147,6 +6308,144 @@ async function startMCPServer(options) {
6147
6308
  };
6148
6309
  }
6149
6310
  )(args);
6311
+ case "get_dependents":
6312
+ return await wrapToolHandler(
6313
+ GetDependentsSchema,
6314
+ async (validatedArgs) => {
6315
+ log(`Finding dependents of: ${validatedArgs.filepath}`);
6316
+ await checkAndReconnect();
6317
+ const allChunks = await vectorDB.scanWithFilter({ limit: SCAN_LIMIT });
6318
+ if (allChunks.length === SCAN_LIMIT) {
6319
+ log(`WARNING: Scanned ${SCAN_LIMIT} chunks (limit reached). Results may be incomplete for large codebases.`);
6320
+ }
6321
+ log(`Scanning ${allChunks.length} chunks for imports...`);
6322
+ const workspaceRoot = process.cwd().replace(/\\/g, "/");
6323
+ const pathCache = /* @__PURE__ */ new Map();
6324
+ const normalizePathCached = (path18) => {
6325
+ if (pathCache.has(path18)) return pathCache.get(path18);
6326
+ const normalized = normalizePath(path18, workspaceRoot);
6327
+ pathCache.set(path18, normalized);
6328
+ return normalized;
6329
+ };
6330
+ const importIndex = /* @__PURE__ */ new Map();
6331
+ for (const chunk of allChunks) {
6332
+ const imports = chunk.metadata.imports || [];
6333
+ for (const imp of imports) {
6334
+ const normalizedImport = normalizePathCached(imp);
6335
+ if (!importIndex.has(normalizedImport)) {
6336
+ importIndex.set(normalizedImport, []);
6337
+ }
6338
+ importIndex.get(normalizedImport).push(chunk);
6339
+ }
6340
+ }
6341
+ const normalizedTarget = normalizePathCached(validatedArgs.filepath);
6342
+ const dependentChunks = [];
6343
+ const seenChunkIds = /* @__PURE__ */ new Set();
6344
+ if (importIndex.has(normalizedTarget)) {
6345
+ for (const chunk of importIndex.get(normalizedTarget)) {
6346
+ const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6347
+ if (!seenChunkIds.has(chunkId)) {
6348
+ dependentChunks.push(chunk);
6349
+ seenChunkIds.add(chunkId);
6350
+ }
6351
+ }
6352
+ }
6353
+ for (const [normalizedImport, chunks] of importIndex.entries()) {
6354
+ if (normalizedImport !== normalizedTarget && matchesFile(normalizedImport, normalizedTarget)) {
6355
+ for (const chunk of chunks) {
6356
+ const chunkId = `${chunk.metadata.file}:${chunk.metadata.startLine}-${chunk.metadata.endLine}`;
6357
+ if (!seenChunkIds.has(chunkId)) {
6358
+ dependentChunks.push(chunk);
6359
+ seenChunkIds.add(chunkId);
6360
+ }
6361
+ }
6362
+ }
6363
+ }
6364
+ const chunksByFile = /* @__PURE__ */ new Map();
6365
+ for (const chunk of dependentChunks) {
6366
+ const canonical = getCanonicalPath(chunk.metadata.file, workspaceRoot);
6367
+ const existing = chunksByFile.get(canonical) || [];
6368
+ existing.push(chunk);
6369
+ chunksByFile.set(canonical, existing);
6370
+ }
6371
+ const fileComplexities = [];
6372
+ for (const [filepath, chunks] of chunksByFile.entries()) {
6373
+ const complexities = chunks.map((c) => c.metadata.complexity).filter((c) => typeof c === "number" && c > 0);
6374
+ if (complexities.length > 0) {
6375
+ const sum = complexities.reduce((a, b) => a + b, 0);
6376
+ const avg = sum / complexities.length;
6377
+ const max = Math.max(...complexities);
6378
+ fileComplexities.push({
6379
+ filepath,
6380
+ avgComplexity: Math.round(avg * 10) / 10,
6381
+ // Round to 1 decimal
6382
+ maxComplexity: max,
6383
+ complexityScore: sum,
6384
+ chunksWithComplexity: complexities.length
6385
+ });
6386
+ }
6387
+ }
6388
+ let complexityMetrics;
6389
+ if (fileComplexities.length > 0) {
6390
+ const allAvgs = fileComplexities.map((f) => f.avgComplexity);
6391
+ const allMaxes = fileComplexities.map((f) => f.maxComplexity);
6392
+ const totalAvg = allAvgs.reduce((a, b) => a + b, 0) / allAvgs.length;
6393
+ const globalMax = Math.max(...allMaxes);
6394
+ const highComplexityDependents = fileComplexities.filter((f) => f.maxComplexity > COMPLEXITY_THRESHOLDS.HIGH_COMPLEXITY_DEPENDENT).sort((a, b) => b.maxComplexity - a.maxComplexity).slice(0, 5).map((f) => ({
6395
+ filepath: f.filepath,
6396
+ maxComplexity: f.maxComplexity,
6397
+ avgComplexity: f.avgComplexity
6398
+ }));
6399
+ let complexityRiskBoost = "low";
6400
+ if (totalAvg > COMPLEXITY_THRESHOLDS.CRITICAL_AVG || globalMax > COMPLEXITY_THRESHOLDS.CRITICAL_MAX) {
6401
+ complexityRiskBoost = "critical";
6402
+ } else if (totalAvg > COMPLEXITY_THRESHOLDS.HIGH_AVG || globalMax > COMPLEXITY_THRESHOLDS.HIGH_MAX) {
6403
+ complexityRiskBoost = "high";
6404
+ } else if (totalAvg > COMPLEXITY_THRESHOLDS.MEDIUM_AVG || globalMax > COMPLEXITY_THRESHOLDS.MEDIUM_MAX) {
6405
+ complexityRiskBoost = "medium";
6406
+ }
6407
+ complexityMetrics = {
6408
+ averageComplexity: Math.round(totalAvg * 10) / 10,
6409
+ maxComplexity: globalMax,
6410
+ filesWithComplexityData: fileComplexities.length,
6411
+ highComplexityDependents,
6412
+ complexityRiskBoost
6413
+ };
6414
+ } else {
6415
+ complexityMetrics = {
6416
+ averageComplexity: 0,
6417
+ maxComplexity: 0,
6418
+ filesWithComplexityData: 0,
6419
+ highComplexityDependents: [],
6420
+ complexityRiskBoost: "low"
6421
+ };
6422
+ }
6423
+ const uniqueFiles = Array.from(chunksByFile.keys()).map((filepath) => ({
6424
+ filepath,
6425
+ isTestFile: isTestFile2(filepath)
6426
+ }));
6427
+ const count = uniqueFiles.length;
6428
+ let riskLevel = count === 0 ? "low" : count <= DEPENDENT_COUNT_THRESHOLDS.LOW ? "low" : count <= DEPENDENT_COUNT_THRESHOLDS.MEDIUM ? "medium" : count <= DEPENDENT_COUNT_THRESHOLDS.HIGH ? "high" : "critical";
6429
+ const RISK_ORDER = { low: 0, medium: 1, high: 2, critical: 3 };
6430
+ if (RISK_ORDER[complexityMetrics.complexityRiskBoost] > RISK_ORDER[riskLevel]) {
6431
+ riskLevel = complexityMetrics.complexityRiskBoost;
6432
+ }
6433
+ log(`Found ${count} dependent files (risk: ${riskLevel}${complexityMetrics.filesWithComplexityData > 0 ? ", complexity-boosted" : ""})`);
6434
+ let note;
6435
+ if (allChunks.length === SCAN_LIMIT) {
6436
+ note = `Warning: Scanned ${SCAN_LIMIT} chunks (limit reached). Results may be incomplete for large codebases. Some dependents might not be listed.`;
6437
+ }
6438
+ return {
6439
+ indexInfo: getIndexMetadata(),
6440
+ filepath: validatedArgs.filepath,
6441
+ dependentCount: count,
6442
+ riskLevel,
6443
+ dependents: uniqueFiles,
6444
+ complexityMetrics,
6445
+ note
6446
+ };
6447
+ }
6448
+ )(args);
6150
6449
  default:
6151
6450
  throw new LienError(
6152
6451
  `Unknown tool: ${name}`,