@levnikolaevich/hex-line-mcp 1.9.0 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -97,13 +97,15 @@ If a project already has `.hex-skills/codegraph/index.db`, `hex-line` can add li
97
97
 
98
98
  - Graph enrichment is optional. If `.hex-skills/codegraph/index.db` is missing, `hex-line` falls back to standard behavior silently.
99
99
  - `better-sqlite3` is optional. If it is unavailable, `hex-line` still works without graph hints.
100
- - `edit_file` reports **Call impact**, not full semantic blast radius. The warning uses call-graph callers only.
100
+ - `edit_file` reports **Semantic impact** using explainable graph facts: external callers, downstream return/property flow, and clone peers when present.
101
101
 
102
102
  `hex-line` does not read `hex-graph` internals directly anymore. The integration uses a small read-only contract exposed by `hex-graph-mcp`:
103
103
 
104
- - `hex_line_contract`
105
- - `hex_line_symbol_annotations`
106
- - `hex_line_call_edges`
104
+ - `hex_line_symbols`
105
+ - `hex_line_line_facts`
106
+ - `hex_line_edit_impacts`
107
+ - `hex_line_edit_impact_facts`
108
+ - `hex_line_clone_siblings`
107
109
 
108
110
  ## Tools Reference
109
111
 
package/dist/hook.mjs CHANGED
@@ -54,7 +54,7 @@ function normalizeOutput(text, opts = {}) {
54
54
  }
55
55
 
56
56
  // hook.mjs
57
- import { readFileSync, statSync } from "node:fs";
57
+ import { readFileSync, statSync, writeSync } from "node:fs";
58
58
  import { resolve } from "node:path";
59
59
  import { homedir } from "node:os";
60
60
  import { fileURLToPath } from "node:url";
@@ -141,6 +141,7 @@ var TOOL_HINTS = {
141
141
  changes: "mcp__hex-line__changes (git diff with change symbols)",
142
142
  bulk: "mcp__hex-line__bulk_replace (multi-file search-replace)"
143
143
  };
144
+ var DEFERRED_HINT = "If schemas not loaded: ToolSearch('+hex-line read edit')";
144
145
  var BASH_REDIRECTS = [
145
146
  { regex: /^cat\s+\S+/, key: "cat" },
146
147
  { regex: /^head\s+/, key: "head" },
@@ -282,16 +283,25 @@ function getHookMode() {
282
283
  }
283
284
  return _hookMode;
284
285
  }
286
+ function safeExit(fd, data, code) {
287
+ writeSync(fd, data);
288
+ process.exit(code);
289
+ }
290
+ function debugLog(action, reason) {
291
+ writeSync(2, `[hex-hook] ${action}: ${reason}
292
+ `);
293
+ }
285
294
  function block(reason, context) {
295
+ const msg = context ? `${reason}
296
+ ${context}` : reason;
286
297
  const output = {
287
298
  hookSpecificOutput: {
288
299
  permissionDecision: "deny"
289
300
  },
290
- systemMessage: context ? `${reason}
291
- ${context}` : reason
301
+ systemMessage: msg
292
302
  };
293
- process.stdout.write(JSON.stringify(output));
294
- process.exit(2);
303
+ debugLog("BLOCK", reason);
304
+ safeExit(1, JSON.stringify(output), 2);
295
305
  }
296
306
  function advise(reason, context) {
297
307
  const output = {
@@ -301,8 +311,7 @@ function advise(reason, context) {
301
311
  systemMessage: context ? `${reason}
302
312
  ${context}` : reason
303
313
  };
304
- process.stdout.write(JSON.stringify(output));
305
- process.exit(0);
314
+ safeExit(1, JSON.stringify(output), 0);
306
315
  }
307
316
  function redirect(reason, context) {
308
317
  if (getHookMode() === "advisory") {
@@ -324,24 +333,20 @@ function handlePreToolUse(data) {
324
333
  if (BINARY_EXT.has(extOf(filePath))) {
325
334
  process.exit(0);
326
335
  }
327
- const normalPath = filePath.replace(/\\/g, "/");
328
- if (normalPath.includes(".claude/plans/") || normalPath.includes("AppData")) {
336
+ const resolvedNorm = resolveToolPath(filePath).replace(/\\/g, "/");
337
+ const cwdNorm = process.cwd().replace(/\\/g, "/");
338
+ const homeNorm = homedir().replace(/\\/g, "/");
339
+ const claudeAllow = [
340
+ cwdNorm + "/.claude/settings.json",
341
+ cwdNorm + "/.claude/settings.local.json",
342
+ homeNorm + "/.claude/settings.json",
343
+ homeNorm + "/.claude/settings.local.json"
344
+ ];
345
+ if (claudeAllow.some((p) => resolvedNorm.toLowerCase() === p.toLowerCase())) {
329
346
  process.exit(0);
330
347
  }
331
- const ALLOWED_CONFIGS = /* @__PURE__ */ new Set(["settings.json", "settings.local.json"]);
332
- const fileName = normalPath.split("/").pop();
333
- if (ALLOWED_CONFIGS.has(fileName)) {
334
- let candidate = filePath;
335
- if (candidate.startsWith("~/")) {
336
- candidate = homedir().replace(/\\/g, "/") + candidate.slice(1);
337
- }
338
- const absPath = resolve(process.cwd(), candidate).replace(/\\/g, "/");
339
- const projectClaude = resolve(process.cwd(), ".claude").replace(/\\/g, "/") + "/";
340
- const globalClaude = resolve(homedir(), ".claude").replace(/\\/g, "/") + "/";
341
- const cmp = process.platform === "win32" ? (a, b) => a.toLowerCase().startsWith(b.toLowerCase()) : (a, b) => a.startsWith(b);
342
- if (cmp(absPath, projectClaude) || cmp(absPath, globalClaude)) {
343
- process.exit(0);
344
- }
348
+ if (resolvedNorm.includes("/.claude/")) {
349
+ redirect("Protected .claude/ path. Use built-in tools for .claude/ config files.");
345
350
  }
346
351
  if (toolName === "Read") {
347
352
  if (isPartialRead(toolInput)) {
@@ -349,29 +354,30 @@ function handlePreToolUse(data) {
349
354
  }
350
355
  if (fileSize !== null && fileSize <= LARGE_FILE_BYTES) {
351
356
  const ext2 = filePath ? extOf(filePath) : "";
352
- const hint = filePath && OUTLINEABLE_EXT.has(ext2) ? `mcp__hex-line__outline(path="${filePath}") gives a compact structural map. For edits, use mcp__hex-line__read_file(path="${filePath}") with ranges.` : filePath ? `NEXT READ: use mcp__hex-line__read_file(path="${filePath}"). Built-in Read allowed this time but wastes edit context.` : "NEXT READ: use mcp__hex-line__read_file. Built-in Read allowed this time but wastes edit context.";
353
- advise(hint);
357
+ const hint = filePath && OUTLINEABLE_EXT.has(ext2) ? `Use mcp__hex-line__outline(path="${filePath}") for structure, then mcp__hex-line__read_file(path="${filePath}") with ranges.` : filePath ? `Use mcp__hex-line__read_file(path="${filePath}"). Built-in Read wastes edit context.` : "Use mcp__hex-line__read_file. Built-in Read wastes edit context.";
358
+ advise(hint, DEFERRED_HINT);
354
359
  }
355
360
  const ext = filePath ? extOf(filePath) : "";
356
361
  const outlineHint = filePath && OUTLINEABLE_EXT.has(ext) ? `Use mcp__hex-line__outline(path="${filePath}") for structure, then mcp__hex-line__read_file(path="${filePath}") with ranges to read only what you need.` : filePath ? `Use mcp__hex-line__read_file(path="${filePath}") with ranges or offset/limit` : "Use mcp__hex-line__directory_tree or mcp__hex-line__read_file";
357
- redirect(outlineHint, "Do not use built-in Read for full reads of large files.");
362
+ redirect(outlineHint, "Do not use built-in Read for full reads of large files.\n" + DEFERRED_HINT);
358
363
  }
359
364
  if (toolName === "Edit") {
360
365
  const oldText = String(toolInput.old_string || "");
361
366
  const isLargeEdit = Boolean(toolInput.replace_all) || oldText.length > LARGE_EDIT_CHARS || fileSize !== null && fileSize > LARGE_FILE_BYTES;
362
367
  if (!isLargeEdit) {
363
- process.exit(0);
368
+ const editHint = filePath ? `Prefer mcp__hex-line__edit_file(path="${filePath}") for hash-verified edits.` : "Prefer mcp__hex-line__edit_file for hash-verified edits.";
369
+ advise(editHint);
364
370
  }
365
371
  const target = filePath ? `Use mcp__hex-line__grep_search or mcp__hex-line__read_file, then mcp__hex-line__edit_file with path="${filePath}"` : "Use mcp__hex-line__grep_search or mcp__hex-line__read_file, then mcp__hex-line__edit_file";
366
- redirect(target, "For large or repeated edits: locate anchors/checksums first, then call edit_file once with batched edits.");
372
+ redirect(target, "For large or repeated edits: locate anchors/checksums first, then call edit_file once with batched edits.\n" + DEFERRED_HINT);
367
373
  }
368
374
  if (toolName === "Write") {
369
375
  const pathNote = filePath ? ` with path="${filePath}"` : "";
370
- redirect(`Use mcp__hex-line__write_file${pathNote}`, TOOL_HINTS.Write);
376
+ redirect(`Use mcp__hex-line__write_file${pathNote}`, TOOL_HINTS.Write + "\n" + DEFERRED_HINT);
371
377
  }
372
378
  if (toolName === "Grep") {
373
379
  const pathNote = filePath ? ` with path="${filePath}"` : "";
374
- redirect(`Use mcp__hex-line__grep_search${pathNote}`, TOOL_HINTS.Grep);
380
+ redirect(`Use mcp__hex-line__grep_search${pathNote}`, TOOL_HINTS.Grep + "\n" + DEFERRED_HINT);
375
381
  }
376
382
  }
377
383
  if (toolName === "Bash") {
@@ -457,8 +463,7 @@ function handlePostToolUse(data) {
457
463
  `Original: ${originalCount} lines | Filtered: ${filteredCount} lines`,
458
464
  "=".repeat(50)
459
465
  ].join("\n");
460
- process.stderr.write(output);
461
- process.exit(2);
466
+ safeExit(2, output, 2);
462
467
  }
463
468
  function handleSessionStart() {
464
469
  const settingsFiles = [
@@ -479,9 +484,8 @@ function handleSessionStart() {
479
484
  }
480
485
  }
481
486
  const prefix = styleActive ? "Hex-line MCP available. Output style active.\n" : "Hex-line MCP available.\n";
482
- const msg = prefix + "Call hex-line tools directly. Do not use ToolSearch for hex-line tools.\nWorkflow:\n- Discovery: outline for code and markdown files, read_file for targeted reads, grep_search for symbol/text lookup\n- Read cheaply: prefer offset/limit or ranges; avoid full-file Read on large files\n- Edit safely: read/grep first, then one batched edit_file call per file with base_revision when available\n- Verify before reread: use verify to check checksums or revision freshness\n- Multi-file rename/refactor: use bulk_replace\n- New files: use write_file\nExceptions: images, PDFs, notebooks, .claude/settings.json, .claude/settings.local.json use built-in Read. Glob is always OK.";
483
- process.stdout.write(JSON.stringify({ systemMessage: msg }));
484
- process.exit(0);
487
+ const msg = prefix + "<hex-line_instructions>\n <deferred_loading>If hex-line schemas not loaded, run: ToolSearch('+hex-line read edit')</deferred_loading>\n <exploration>\n <rule>Use outline for structure (code + markdown), not Read. ~10-20 lines vs hundreds.</rule>\n <rule>Use read_file with offset/limit or ranges for targeted reads.</rule>\n <rule>Use grep_search before editing to get hash anchors.</rule>\n </exploration>\n <editing>\n <path name='surgical'>grep_search \u2192 edit_file (fastest: hash-verified, no full read needed)</path>\n <path name='exploratory'>outline \u2192 read_file (ranges) \u2192 edit_file with base_revision</path>\n <path name='multi-file'>bulk_replace for text rename/refactor across files</path>\n </editing>\n <tips>\n <tip>Carry revision from read_file into base_revision on edit_file.</tip>\n <tip>If edit returns CONFLICT, call verify \u2014 only reread when STALE.</tip>\n <tip>Batch multiple edits to same file in one edit_file call.</tip>\n <tip>Use write_file for new files (no prior Read needed).</tip>\n </tips>\n <exceptions>Built-in Read OK for: images, PDFs, notebooks, Glob (always), .claude/settings.json</exceptions>\n</hex-line_instructions>";
488
+ safeExit(1, JSON.stringify({ systemMessage: msg }), 0);
485
489
  }
486
490
  var _norm = (p) => p.replace(/\\/g, "/");
487
491
  if (_norm(process.argv[1]) === _norm(fileURLToPath(import.meta.url))) {
package/dist/server.mjs CHANGED
@@ -281,7 +281,23 @@ function validateWritePath(filePath) {
281
281
  import { existsSync as existsSync2 } from "node:fs";
282
282
  import { join as join3, dirname as dirname2, relative } from "node:path";
283
283
  import { createRequire } from "node:module";
284
- var HEX_LINE_CONTRACT_VERSION = 2;
284
+ var REQUIRED_VIEWS = [
285
+ "hex_line_symbols",
286
+ "hex_line_line_facts",
287
+ "hex_line_edit_impacts",
288
+ "hex_line_edit_impact_facts",
289
+ "hex_line_clone_siblings"
290
+ ];
291
+ var FACT_PRIORITY = /* @__PURE__ */ new Map([
292
+ ["definition", 0],
293
+ ["through_flow", 1],
294
+ ["outgoing_flow", 2],
295
+ ["incoming_flow", 3],
296
+ ["callee", 4],
297
+ ["caller", 5],
298
+ ["clone", 6],
299
+ ["hotspot", 7]
300
+ ]);
285
301
  var _dbs = /* @__PURE__ */ new Map();
286
302
  var _driverUnavailable = false;
287
303
  function getGraphDB(filePath) {
@@ -295,7 +311,7 @@ function getGraphDB(filePath) {
295
311
  const require2 = createRequire(import.meta.url);
296
312
  const Database = require2("better-sqlite3");
297
313
  const db = new Database(dbPath, { readonly: true });
298
- if (!validateHexLineContract(db)) {
314
+ if (!validateContract(db)) {
299
315
  db.close();
300
316
  return null;
301
317
  }
@@ -306,25 +322,51 @@ function getGraphDB(filePath) {
306
322
  return null;
307
323
  }
308
324
  }
309
- function validateHexLineContract(db) {
325
+ function validateContract(db) {
310
326
  try {
311
- const contract = db.prepare("SELECT contract_version FROM hex_line_contract LIMIT 1").get();
312
- if (!contract || contract.contract_version !== HEX_LINE_CONTRACT_VERSION) return false;
313
- db.prepare("SELECT node_id, file, line_start, line_end, display_name, kind, callees, callers FROM hex_line_symbol_annotations LIMIT 1").all();
314
- db.prepare("SELECT source_id, target_id, source_file, source_line, source_display_name, target_file, target_line, target_display_name, confidence FROM hex_line_call_edges LIMIT 1").all();
327
+ for (const viewName of REQUIRED_VIEWS) {
328
+ const row = db.prepare(
329
+ "SELECT name FROM sqlite_master WHERE type = 'view' AND name = ? LIMIT 1"
330
+ ).get(viewName);
331
+ if (!row) return false;
332
+ }
333
+ db.prepare("SELECT node_id, file, line_start, line_end, display_name, kind FROM hex_line_symbols LIMIT 1").all();
334
+ db.prepare("SELECT fact_kind, related_display_name, confidence, origin FROM hex_line_line_facts LIMIT 1").all();
335
+ db.prepare("SELECT symbol_node_id, external_callers_count, downstream_return_flow_count, downstream_property_flow_count, sink_reach_count FROM hex_line_edit_impacts LIMIT 1").all();
336
+ db.prepare("SELECT edited_symbol_id, fact_kind, target_display_name, path_kind, flow_hops FROM hex_line_edit_impact_facts LIMIT 1").all();
315
337
  return true;
316
338
  } catch {
317
339
  return false;
318
340
  }
319
341
  }
342
+ function shortKind(kind) {
343
+ return { function: "fn", class: "cls", method: "mtd", variable: "var" }[kind] || kind;
344
+ }
345
+ function compactSymbolCounts(node) {
346
+ const parts = [];
347
+ if ((node.callees_exact || 0) > 0 || (node.callers_exact || 0) > 0) {
348
+ parts.push(`${node.callees_exact}\u2193 ${node.callers_exact}\u2191`);
349
+ }
350
+ const flowParts = [];
351
+ if ((node.incoming_flow_count || 0) > 0) flowParts.push(`${node.incoming_flow_count}in`);
352
+ if ((node.outgoing_flow_count || 0) > 0) flowParts.push(`${node.outgoing_flow_count}out`);
353
+ if ((node.through_flow_count || 0) > 0) flowParts.push(`${node.through_flow_count}thru`);
354
+ if (flowParts.length > 0) parts.push(`flow ${flowParts.join(" ")}`);
355
+ if ((node.clone_sibling_count || 0) > 0) parts.push(`clone ${node.clone_sibling_count}`);
356
+ return parts;
357
+ }
320
358
  function symbolAnnotation(db, file, name) {
321
359
  try {
322
360
  const node = db.prepare(
323
- "SELECT callees, callers FROM hex_line_symbol_annotations WHERE file = ? AND name = ? LIMIT 1"
361
+ `SELECT display_name, kind, callers_exact, callees_exact, incoming_flow_count, outgoing_flow_count, through_flow_count, clone_sibling_count
362
+ FROM hex_line_symbols
363
+ WHERE file = ? AND name = ?
364
+ LIMIT 1`
324
365
  ).get(file, name);
325
366
  if (!node) return null;
326
- if (node.callees === 0 && node.callers === 0) return null;
327
- return `[${node.callees}\u2193 ${node.callers}\u2191]`;
367
+ const parts = compactSymbolCounts(node);
368
+ const prefix = shortKind(node.kind);
369
+ return parts.length > 0 ? `[${prefix} ${parts.join(" | ")}]` : `[${prefix}]`;
328
370
  } catch {
329
371
  return null;
330
372
  }
@@ -332,58 +374,85 @@ function symbolAnnotation(db, file, name) {
332
374
  function fileAnnotations(db, file) {
333
375
  try {
334
376
  const nodes = db.prepare(
335
- `SELECT display_name, kind, callees, callers
336
- FROM hex_line_symbol_annotations
377
+ `SELECT display_name, kind, callers_exact, callees_exact, incoming_flow_count, outgoing_flow_count, through_flow_count, clone_sibling_count
378
+ FROM hex_line_symbols
337
379
  WHERE file = ?
338
380
  ORDER BY line_start`
339
381
  ).all(file);
340
382
  return nodes.map((node) => ({
341
383
  name: node.display_name,
342
384
  kind: node.kind,
343
- callees: node.callees,
344
- callers: node.callers
385
+ callers_exact: node.callers_exact,
386
+ callees_exact: node.callees_exact,
387
+ incoming_flow_count: node.incoming_flow_count,
388
+ outgoing_flow_count: node.outgoing_flow_count,
389
+ through_flow_count: node.through_flow_count,
390
+ clone_sibling_count: node.clone_sibling_count
345
391
  }));
346
392
  } catch {
347
393
  return [];
348
394
  }
349
395
  }
350
- function callImpact(db, file, startLine, endLine) {
396
+ function formatLineFact(fact) {
397
+ const countParts = compactSymbolCounts(fact);
398
+ const suffix = countParts.length > 0 ? ` | ${countParts.join(" | ")}` : "";
399
+ switch (fact.fact_kind) {
400
+ case "definition":
401
+ return `[${shortKind(fact.kind)}${suffix}]`;
402
+ case "callee":
403
+ return fact.related_display_name ? `[callee:${fact.related_display_name}${suffix}]` : `[callee${suffix}]`;
404
+ case "caller":
405
+ return fact.related_display_name ? `[caller:${fact.related_display_name}${suffix}]` : `[caller${suffix}]`;
406
+ case "outgoing_flow":
407
+ return `[flow-out:${fact.target_anchor_kind || "?"}${suffix}]`;
408
+ case "incoming_flow":
409
+ return `[flow-in:${fact.target_anchor_kind || "?"}${suffix}]`;
410
+ case "through_flow":
411
+ return `[flow-through${suffix}]`;
412
+ case "clone":
413
+ return `[clone${suffix}]`;
414
+ case "hotspot":
415
+ return `[hotspot${suffix}]`;
416
+ default:
417
+ return `[${fact.fact_kind}${suffix}]`;
418
+ }
419
+ }
420
+ function priorityForFact(factKind) {
421
+ return FACT_PRIORITY.get(factKind) ?? 99;
422
+ }
423
+ function matchAnnotation(db, file, line) {
351
424
  try {
352
- const modified = db.prepare(
353
- `SELECT node_id
354
- FROM hex_line_symbol_annotations
355
- WHERE file = ?
356
- AND line_start <= ?
357
- AND line_end >= ?`
358
- ).all(file, endLine, startLine);
359
- if (modified.length === 0) return [];
360
- const affected = [];
361
- const seen = /* @__PURE__ */ new Set();
362
- for (const node of modified) {
363
- const dependents = db.prepare(
364
- `SELECT source_display_name AS name, source_file AS file, source_line AS line
365
- FROM hex_line_call_edges
366
- WHERE target_id = ?
367
- AND confidence IN ('exact', 'precise')`
368
- ).all(node.node_id);
369
- for (const dep of dependents) {
370
- const key = `${dep.file}:${dep.name}`;
371
- if (!seen.has(key) && dep.file !== file) {
372
- seen.add(key);
373
- affected.push({ name: dep.name, file: dep.file, line: dep.line });
374
- }
375
- }
376
- }
377
- return affected.slice(0, 10);
425
+ const facts = db.prepare(
426
+ `SELECT
427
+ lf.display_name,
428
+ lf.kind,
429
+ lf.fact_kind,
430
+ lf.related_display_name,
431
+ lf.source_anchor_kind,
432
+ lf.target_anchor_kind,
433
+ hs.callers_exact,
434
+ hs.callees_exact,
435
+ hs.incoming_flow_count,
436
+ hs.outgoing_flow_count,
437
+ hs.through_flow_count,
438
+ hs.clone_sibling_count
439
+ FROM hex_line_line_facts lf
440
+ LEFT JOIN hex_line_symbols hs ON hs.node_id = lf.symbol_node_id
441
+ WHERE lf.file = ? AND lf.line_start <= ? AND lf.line_end >= ?
442
+ ORDER BY lf.line_start DESC`
443
+ ).all(file, line, line);
444
+ if (facts.length === 0) return null;
445
+ facts.sort((left, right) => priorityForFact(left.fact_kind) - priorityForFact(right.fact_kind));
446
+ return formatLineFact(facts[0]);
378
447
  } catch {
379
- return [];
448
+ return null;
380
449
  }
381
450
  }
382
451
  function cloneWarning(db, file, startLine, endLine) {
383
452
  try {
384
453
  const modified = db.prepare(
385
454
  `SELECT node_id
386
- FROM hex_line_symbol_annotations
455
+ FROM hex_line_symbols
387
456
  WHERE file = ?
388
457
  AND line_start <= ?
389
458
  AND line_end >= ?`
@@ -393,17 +462,20 @@ function cloneWarning(db, file, startLine, endLine) {
393
462
  const seen = /* @__PURE__ */ new Set();
394
463
  for (const node of modified) {
395
464
  const siblings = db.prepare(
396
- `SELECT s2.file, s2.line_start, s2.display_name
397
- FROM hex_line_clone_siblings s1
398
- JOIN hex_line_clone_siblings s2 ON s2.norm_hash = s1.norm_hash AND s2.node_id != s1.node_id
399
- WHERE s1.node_id = ?`
465
+ `SELECT clone_peer_name, clone_peer_file, clone_peer_line, clone_type
466
+ FROM hex_line_clone_siblings
467
+ WHERE node_id = ?`
400
468
  ).all(node.node_id);
401
- for (const sib of siblings) {
402
- const key = `${sib.file}:${sib.display_name}`;
403
- if (!seen.has(key)) {
404
- seen.add(key);
405
- clones.push({ name: sib.display_name, file: sib.file, line: sib.line_start });
406
- }
469
+ for (const sibling of siblings) {
470
+ const key = `${sibling.clone_peer_file}:${sibling.clone_peer_name}:${sibling.clone_peer_line}`;
471
+ if (seen.has(key)) continue;
472
+ seen.add(key);
473
+ clones.push({
474
+ name: sibling.clone_peer_name,
475
+ file: sibling.clone_peer_file,
476
+ line: sibling.clone_peer_line,
477
+ cloneType: sibling.clone_type
478
+ });
407
479
  }
408
480
  }
409
481
  return clones.slice(0, 10);
@@ -411,21 +483,64 @@ function cloneWarning(db, file, startLine, endLine) {
411
483
  return [];
412
484
  }
413
485
  }
414
- function matchAnnotation(db, file, line) {
486
+ function semanticImpact(db, file, startLine, endLine) {
415
487
  try {
416
- const node = db.prepare(
417
- `SELECT display_name, kind, callees, callers
418
- FROM hex_line_symbol_annotations
419
- WHERE file = ? AND line_start <= ? AND line_end >= ?
420
- ORDER BY line_start DESC
421
- LIMIT 1`
422
- ).get(file, line, line);
423
- if (!node) return null;
424
- const kindShort = { function: "fn", class: "cls", method: "mtd", variable: "var" }[node.kind] || node.kind;
425
- if (node.callees === 0 && node.callers === 0) return `[${kindShort}]`;
426
- return `[${kindShort} ${node.callees}\u2193 ${node.callers}\u2191]`;
488
+ const modified = db.prepare(
489
+ `SELECT symbol_node_id, display_name, external_callers_count, downstream_return_flow_count, downstream_property_flow_count, sink_reach_count, clone_sibling_count
490
+ FROM hex_line_edit_impacts
491
+ WHERE file = ?
492
+ AND line_start <= ?
493
+ AND line_end >= ?`
494
+ ).all(file, endLine, startLine);
495
+ if (modified.length === 0) return [];
496
+ return modified.map((item) => {
497
+ const facts = db.prepare(
498
+ `SELECT fact_kind, target_display_name, target_file, target_line, intermediate_display_name, path_kind, flow_hops, source_anchor_kind, target_anchor_kind, access_path_json
499
+ FROM hex_line_edit_impact_facts
500
+ WHERE edited_symbol_id = ?
501
+ ORDER BY
502
+ CASE fact_kind
503
+ WHEN 'external_caller' THEN 0
504
+ WHEN 'return_flow_to_symbol' THEN 1
505
+ WHEN 'property_flow_to_symbol' THEN 2
506
+ WHEN 'flow_reaches_terminal_anchor' THEN 3
507
+ WHEN 'clone_sibling' THEN 4
508
+ ELSE 9
509
+ END,
510
+ target_file,
511
+ target_line`
512
+ ).all(item.symbol_node_id);
513
+ const seen = /* @__PURE__ */ new Set();
514
+ const dedupedFacts = facts.filter((fact) => {
515
+ const key = [
516
+ fact.fact_kind,
517
+ fact.target_display_name || "",
518
+ fact.target_file || "",
519
+ fact.target_line || "",
520
+ fact.path_kind || "",
521
+ fact.flow_hops || "",
522
+ fact.source_anchor_kind || "",
523
+ fact.target_anchor_kind || "",
524
+ fact.access_path_json || ""
525
+ ].join("|");
526
+ if (seen.has(key)) return false;
527
+ seen.add(key);
528
+ return true;
529
+ });
530
+ return {
531
+ symbol: item.display_name,
532
+ counts: {
533
+ externalCallers: item.external_callers_count,
534
+ downstreamReturnFlow: item.downstream_return_flow_count,
535
+ downstreamPropertyFlow: item.downstream_property_flow_count,
536
+ sinkReach: item.sink_reach_count,
537
+ cloneSiblings: item.clone_sibling_count
538
+ },
539
+ facts: dedupedFacts
540
+ };
541
+ });
427
542
  } catch {
428
- return null;
543
+ return [];
429
544
  }
430
545
  }
431
546
  function getRelativePath(filePath) {
@@ -977,8 +1092,17 @@ ${text}
977
1092
  const annos = fileAnnotations(db, relFile);
978
1093
  if (annos.length > 0) {
979
1094
  const items = annos.map((a) => {
980
- const counts = a.callees || a.callers ? ` ${a.callees}\u2193 ${a.callers}\u2191` : "";
981
- return `${a.name} [${a.kind}${counts}]`;
1095
+ const parts = [];
1096
+ if ((a.callees_exact || 0) > 0 || (a.callers_exact || 0) > 0) {
1097
+ parts.push(`${a.callees_exact}\u2193 ${a.callers_exact}\u2191`);
1098
+ }
1099
+ const flow = [];
1100
+ if ((a.incoming_flow_count || 0) > 0) flow.push(`${a.incoming_flow_count}in`);
1101
+ if ((a.outgoing_flow_count || 0) > 0) flow.push(`${a.outgoing_flow_count}out`);
1102
+ if ((a.through_flow_count || 0) > 0) flow.push(`${a.through_flow_count}thru`);
1103
+ if (flow.length > 0) parts.push(`flow ${flow.join(" ")}`);
1104
+ if ((a.clone_sibling_count || 0) > 0) parts.push(`clone ${a.clone_sibling_count}`);
1105
+ return parts.length > 0 ? `${a.name} [${a.kind} ${parts.join(" | ")}]` : `${a.name} [${a.kind}]`;
982
1106
  });
983
1107
  graphLine = `
984
1108
  Graph: ${items.join(" | ")}`;
@@ -1348,13 +1472,14 @@ function applyReplaceLinesEdit(edit, ctx) {
1348
1472
  const { start: csStart, end: csEnd, hex: csHex } = parseChecksum(rangeChecksum2);
1349
1473
  if (!coverage.ok) {
1350
1474
  const snip = buildErrorSnippet(origLines, actualStart - 1);
1475
+ const retryChecksum = buildRangeChecksum(currentSnapshot, actualStart, actualEnd);
1351
1476
  throw new Error(
1352
1477
  `${coverage.reason}
1353
1478
 
1354
1479
  Current content (lines ${snip.start}-${snip.end}):
1355
1480
  ${snip.text}
1356
1481
 
1357
- Tip: Use updated hashes above for retry.`
1482
+ ` + (retryChecksum ? `Retry checksum: ${retryChecksum}` : "Tip: Use updated hashes above for retry.")
1358
1483
  );
1359
1484
  }
1360
1485
  const actual = buildRangeChecksum(currentSnapshot, csStart, csEnd);
@@ -1413,7 +1538,7 @@ function editFile(filePath, edits, opts = {}) {
1413
1538
  const currentSnapshot = readSnapshot(real);
1414
1539
  const baseSnapshot = opts.baseRevision ? getSnapshotByRevision(opts.baseRevision) : null;
1415
1540
  const hasBaseSnapshot = !!(baseSnapshot && baseSnapshot.path === real);
1416
- const staleRevision = !!opts.baseRevision && opts.baseRevision !== currentSnapshot.revision;
1541
+ const staleRevision = !!opts.baseRevision && opts.baseRevision !== currentSnapshot.revision && hasBaseSnapshot;
1417
1542
  const changedRanges = staleRevision && hasBaseSnapshot ? computeChangedRanges(baseSnapshot.lines, currentSnapshot.lines) : [];
1418
1543
  const conflictPolicy = opts.conflictPolicy || "conservative";
1419
1544
  const original = currentSnapshot.content;
@@ -1594,13 +1719,30 @@ ${serializeReadBlock(block)}`;
1594
1719
  const db = getGraphDB(real);
1595
1720
  const relFile = db ? getRelativePath(real) : null;
1596
1721
  if (db && relFile && fullDiff && minLine <= maxLine) {
1597
- const affected = callImpact(db, relFile, minLine, maxLine);
1598
- if (affected.length > 0) {
1599
- const list = affected.map((a) => `${a.name} (${a.file}:${a.line})`).join(", ");
1722
+ const impacts = semanticImpact(db, relFile, minLine, maxLine);
1723
+ if (impacts.length > 0) {
1724
+ const sections = impacts.map((impact) => {
1725
+ const totals = [];
1726
+ if (impact.counts.externalCallers > 0) totals.push(`${impact.counts.externalCallers} external callers`);
1727
+ if (impact.counts.downstreamReturnFlow > 0) totals.push(`${impact.counts.downstreamReturnFlow} downstream return-flow`);
1728
+ if (impact.counts.downstreamPropertyFlow > 0) totals.push(`${impact.counts.downstreamPropertyFlow} property-flow`);
1729
+ if (impact.counts.sinkReach > 0) totals.push(`${impact.counts.sinkReach} terminal flow reach`);
1730
+ if (impact.counts.cloneSiblings > 0) totals.push(`${impact.counts.cloneSiblings} clone siblings`);
1731
+ const headline = totals.length > 0 ? totals.join(", ") : "no downstream graph facts";
1732
+ const factLines = impact.facts.slice(0, 5).map((fact) => {
1733
+ const target = fact.target_display_name ? `${fact.target_display_name} (${fact.target_file}:${fact.target_line})` : `${fact.target_file}:${fact.target_line}`;
1734
+ const via = fact.path_kind ? ` via ${fact.path_kind}` : "";
1735
+ return `${fact.fact_kind}: ${target}${via}`;
1736
+ });
1737
+ return [
1738
+ `${impact.symbol}: ${headline}`,
1739
+ ...factLines.map((line) => ` ${line}`)
1740
+ ].join("\n");
1741
+ });
1600
1742
  msg += `
1601
1743
 
1602
- \u26A0 Call impact: ${affected.length} callers in other files
1603
- ${list}`;
1744
+ \u26A0 Semantic impact:
1745
+ ${sections.join("\n")}`;
1604
1746
  }
1605
1747
  const clones = cloneWarning(db, relFile, minLine, maxLine);
1606
1748
  if (clones.length > 0) {
@@ -2846,7 +2988,7 @@ OUTPUT_CAPPED: Output exceeded ${MAX_BULK_OUTPUT_CHARS} chars.`;
2846
2988
  }
2847
2989
 
2848
2990
  // server.mjs
2849
- var version = true ? "1.9.0" : (await null).createRequire(import.meta.url)("./package.json").version;
2991
+ var version = true ? "1.10.0" : (await null).createRequire(import.meta.url)("./package.json").version;
2850
2992
  var { server, StdioServerTransport } = await createServerRuntime({
2851
2993
  name: "hex-line-mcp",
2852
2994
  version
@@ -2907,7 +3049,7 @@ ERROR: ${e.message}`);
2907
3049
  });
2908
3050
  server.registerTool("edit_file", {
2909
3051
  title: "Edit File",
2910
- description: "Apply verified partial edits to one file.",
3052
+ description: "Apply hash-verified partial edits to one file. Batch multiple edits in one call. Carry base_revision from prior read/edit for auto-rebase on concurrent changes.",
2911
3053
  inputSchema: z2.object({
2912
3054
  path: z2.string().describe("File to edit"),
2913
3055
  edits: z2.union([z2.string(), z2.array(z2.any())]).describe(
@@ -3042,7 +3184,7 @@ server.registerTool("outline", {
3042
3184
  });
3043
3185
  server.registerTool("verify", {
3044
3186
  title: "Verify Checksums",
3045
- description: "Verify held checksums without rereading the file.",
3187
+ description: "Check if held checksums are still valid without rereading. Use after edit_file returns CONFLICT to decide: VALID (retry), STALE (reread ranges), INVALID (reread file).",
3046
3188
  inputSchema: z2.object({
3047
3189
  path: z2.string().describe("File path"),
3048
3190
  checksums: z2.array(z2.string()).describe('Checksum strings, e.g. ["1-50:f7e2a1b0", "51-100:abcd1234"]'),
@@ -3062,7 +3204,7 @@ server.registerTool("verify", {
3062
3204
  });
3063
3205
  server.registerTool("directory_tree", {
3064
3206
  title: "Directory Tree",
3065
- description: "Directory tree with .gitignore support. Pattern glob to find files/dirs by name. Skips node_modules, .git, dist.",
3207
+ description: "Gitignore-aware directory tree. Use pattern glob to find files/dirs by name instead of Bash find/ls. Skips node_modules, .git, dist.",
3066
3208
  inputSchema: z2.object({
3067
3209
  path: z2.string().describe("Directory path"),
3068
3210
  pattern: z2.string().optional().describe('Glob filter on names (e.g. "*-mcp", "*.mjs"). Returns flat match list instead of tree'),
@@ -3082,7 +3224,7 @@ server.registerTool("directory_tree", {
3082
3224
  });
3083
3225
  server.registerTool("get_file_info", {
3084
3226
  title: "File Info",
3085
- description: "File metadata without reading content: size, line count, modification time, type, binary detection. Use before reading large files to check size.",
3227
+ description: "File metadata without reading content: size, line count, mtime, binary detection. Use before read_file on unknown files to decide offset/limit strategy.",
3086
3228
  inputSchema: z2.object({
3087
3229
  path: z2.string().describe("File path")
3088
3230
  }),
@@ -3097,7 +3239,7 @@ server.registerTool("get_file_info", {
3097
3239
  });
3098
3240
  server.registerTool("changes", {
3099
3241
  title: "Semantic Diff",
3100
- description: "Compare file or directory against git ref (default: HEAD). Shows added/removed/modified symbols or file stats.",
3242
+ description: "Semantic diff against git ref (default: HEAD). Shows added/removed/modified symbols. Use to review changes before commit.",
3101
3243
  inputSchema: z2.object({
3102
3244
  path: z2.string().describe("File or directory path"),
3103
3245
  compare_against: z2.string().optional().describe('Git ref to compare against (default: "HEAD")')
@@ -3113,7 +3255,7 @@ server.registerTool("changes", {
3113
3255
  });
3114
3256
  server.registerTool("bulk_replace", {
3115
3257
  title: "Bulk Replace",
3116
- description: "Search-and-replace across multiple files with compact or full diff output.",
3258
+ description: "Search-and-replace text across multiple files. Use for renames, refactors. Compact or full diff output.",
3117
3259
  inputSchema: z2.object({
3118
3260
  replacements: z2.union([z2.string(), replacementPairsSchema]).describe('JSON array of {old, new} pairs: [{"old":"foo","new":"bar"}]'),
3119
3261
  glob: z2.string().optional().describe('File glob (default: "**/*.{md,mjs,json,yml,ts,js}")'),
package/output-style.md CHANGED
@@ -16,35 +16,30 @@ keep-coding-instructions: true
16
16
  | Grep | `mcp__hex-line__grep_search` | Edit-ready matches |
17
17
  | Edit (text rename) | `mcp__hex-line__bulk_replace` | Multi-file text rename/refactor |
18
18
  | Bash `find`/`tree` | `mcp__hex-line__directory_tree` | Pattern search, gitignore-aware |
19
+ | Full code read | `mcp__hex-line__outline` then `read_file` with ranges | Structure first, read targeted |
19
20
 
20
- ## Efficient File Reading
21
+ **Bootstrap**: if hex-line calls fail, load schemas: `ToolSearch('+hex-line read edit')`
21
22
 
22
- For unfamiliar code files >100 lines, prefer:
23
- 1. `outline` first
24
- 2. `read_file` with `offset`/`limit` or `ranges`
25
- 3. `paths` or `ranges` when batching several targets
23
+ ## Workflow Paths
26
24
 
27
- Avoid reading a large file in full. Prefer compact, targeted reads.
25
+ | Path | When | Flow |
26
+ |------|------|------|
27
+ | Surgical | Know the target | `grep_search` → `edit_file` |
28
+ | Exploratory | Need context first | `outline` → `read_file` (ranges) → `edit_file` |
29
+ | Multi-file | Text rename/refactor | `bulk_replace` |
30
+ | Verify | Check freshness | `verify` → reread only if STALE |
28
31
 
29
32
  Bash OK for: npm/node/git/docker/curl, pipes, compound commands.
30
33
  **Built-in OK for:** images, PDFs, notebooks, Glob (always), `.claude/settings.json`, `.claude/settings.local.json`.
31
34
 
32
35
  ## Edit Workflow
33
36
 
34
- Prefer:
35
- 1. collect all known hunks for one file
36
- 2. send one `edit_file` call with batched edits
37
- 3. carry `revision` from `read_file` into `base_revision` on follow-up edits
38
- 4. use `set_line`, `replace_lines`, `insert_after`, `replace_between` based on scope
39
- 5. if edit returns CONFLICT, call `verify` with stale checksum it reports VALID/STALE/INVALID without rereading the whole file
40
- 6. only reread (`read_file`) when `verify` confirms STALE
41
-
42
- Post-edit output uses `block: post_edit` with checksum — use it directly for follow-up edits or verify.
43
-
44
- Avoid:
45
- - chained same-file `edit_file` calls when all edits are already known
46
- - full-file rewrites for local changes
47
- - using `bulk_replace` for structural block rewrites
37
+ | Do | Don't |
38
+ |----|-------|
39
+ | Batch all hunks in one `edit_file` | Chain same-file `edit_file` calls |
40
+ | Carry `revision` `base_revision` | Full-file rewrite for local changes |
41
+ | `verify` before reread | `bulk_replace` for block rewrites |
42
+ | `post_edit` checksum for follow-up | |
48
43
 
49
44
 
50
45
  ## hex-graph — Code Analysis
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@levnikolaevich/hex-line-mcp",
3
- "version": "1.9.0",
3
+ "version": "1.10.0",
4
4
  "mcpName": "io.github.levnikolaevich/hex-line-mcp",
5
5
  "type": "module",
6
6
  "description": "Hash-verified file editing MCP + token efficiency hook for AI coding agents. 10 tools: read, edit, write, grep, outline, verify, directory_tree, file_info, changes, bulk_replace.",
@@ -73,6 +73,7 @@
73
73
  },
74
74
  "homepage": "https://github.com/levnikolaevich/claude-code-skills/tree/master/mcp/hex-line-mcp",
75
75
  "devDependencies": {
76
- "eslint": "^10.1.0"
76
+ "eslint": "^10.1.0",
77
+ "typescript": "^6.0.2"
77
78
  }
78
79
  }