@levnikolaevich/hex-line-mcp 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/search.mjs ADDED
@@ -0,0 +1,132 @@
1
+ /**
2
+ * File search via ripgrep with hash-annotated results.
3
+ * Uses spawn with arg arrays (no shell string interpolation).
4
+ */
5
+
6
+ import { spawn } from "node:child_process";
7
+ import { resolve } from "node:path";
8
+ import { fnv1a, lineTag } from "./hash.mjs";
9
+ import { getGraphDB, matchAnnotation, getRelativePath } from "./graph-enrich.mjs";
10
+
11
+ const DEFAULT_LIMIT = 100;
12
+ const MAX_OUTPUT = 10 * 1024 * 1024; // 10 MB
13
+ const TIMEOUT = 30000; // 30s
14
+
15
+ /**
16
+ * Search files using ripgrep.
17
+ *
18
+ * @param {string} pattern - regex pattern
19
+ * @param {object} opts - { path, glob, type, caseInsensitive, context, limit, plain }
20
+ * @returns {Promise<string>} formatted results
21
+ */
22
+ export function grepSearch(pattern, opts = {}) {
23
+ return new Promise((resolve_, reject) => {
24
+ // Convert Git Bash /c/path → c:/path on Windows
25
+ const rawPath = opts.path || "";
26
+ const normPath = (process.platform === "win32" && /^\/[a-zA-Z]\//.test(rawPath))
27
+ ? rawPath[1] + ":" + rawPath.slice(2) : rawPath;
28
+ const target = normPath ? resolve(normPath) : process.cwd();
29
+ const args = ["-n", "--no-heading", "--with-filename"];
30
+ const plain = !!opts.plain;
31
+
32
+ if (opts.caseInsensitive) args.push("-i");
33
+ if (opts.context && opts.context > 0) args.push("-C", String(opts.context));
34
+ if (opts.glob) args.push("--glob", opts.glob);
35
+ if (opts.type) args.push("--type", opts.type);
36
+
37
+ const limit = (opts.limit && opts.limit > 0) ? opts.limit : DEFAULT_LIMIT;
38
+ args.push("-m", String(limit));
39
+ args.push("--", pattern, target);
40
+
41
+ let stdout = "";
42
+ let totalBytes = 0;
43
+ let killed = false;
44
+
45
+ const child = spawn("rg", args, { timeout: TIMEOUT });
46
+
47
+ child.stdout.on("data", (chunk) => {
48
+ totalBytes += chunk.length;
49
+ if (totalBytes > MAX_OUTPUT) {
50
+ killed = true;
51
+ child.kill();
52
+ return;
53
+ }
54
+ stdout += chunk.toString("utf-8");
55
+ });
56
+
57
+ let stderrBuf = "";
58
+ child.stderr.on("data", (chunk) => { stderrBuf += chunk.toString("utf-8"); });
59
+
60
+ child.on("error", (err) => {
61
+ if (err.code === "ENOENT") {
62
+ reject(new Error("ripgrep (rg) not found. Install: https://github.com/BurntSushi/ripgrep#installation"));
63
+ } else {
64
+ reject(new Error(`rg spawn error: ${err.message}`));
65
+ }
66
+ });
67
+
68
+ child.on("close", (code) => {
69
+ if (killed) {
70
+ resolve_("GREP_OUTPUT_TRUNCATED: exceeded 10MB. Use specific glob/path.");
71
+ return;
72
+ }
73
+ if (code === 1) {
74
+ resolve_("No matches found.");
75
+ return;
76
+ }
77
+ if (code !== 0 && code !== null) {
78
+ const reason = stderrBuf.trim() || "unknown error";
79
+ reject(new Error(`GREP_ERROR: rg exit ${code} — ${reason}`));
80
+ return;
81
+ }
82
+
83
+ // Format results with hash tags
84
+ const resultLines = stdout.trimEnd().split("\n");
85
+ const formatted = [];
86
+ const db = getGraphDB(target);
87
+ const relCache = new Map();
88
+
89
+ // Match line: file:42:content
90
+ const matchRe = /^((?:[A-Za-z]:)?[^:]*):(\d+):(.*)$/;
91
+ // Context line: file-42-content
92
+ const ctxRe = /^((?:[A-Za-z]:)?[^-]*)-(\d+)-(.*)$/;
93
+
94
+ if (plain) {
95
+ // Plain mode: file:line:content without hash tags
96
+ for (const rl of resultLines) {
97
+ formatted.push(rl);
98
+ }
99
+ } else {
100
+ for (const rl of resultLines) {
101
+ if (!rl || rl === "--") { formatted.push(rl); continue; }
102
+ // Normalize backslashes for consistent regex matching on Windows
103
+ const nl = rl.replace(/\\/g, "/");
104
+
105
+ const m = matchRe.exec(nl);
106
+ if (m) {
107
+ const tag = lineTag(fnv1a(m[3]));
108
+ let anno = "";
109
+ if (db) {
110
+ let rel = relCache.get(m[1]);
111
+ if (rel === undefined) { rel = getRelativePath(resolve(m[1])) || ""; relCache.set(m[1], rel); }
112
+ if (rel) { const a = matchAnnotation(db, rel, +m[2]); if (a) anno = ` ${a}`; }
113
+ }
114
+ formatted.push(`${m[1]}:>>${tag}.${m[2]}\t${m[3]}${anno}`);
115
+ continue;
116
+ }
117
+
118
+ const c = ctxRe.exec(nl);
119
+ if (c) {
120
+ const tag = lineTag(fnv1a(c[3]));
121
+ formatted.push(`${c[1]}: ${tag}.${c[2]}\t${c[3]}`);
122
+ continue;
123
+ }
124
+
125
+ formatted.push(rl);
126
+ }
127
+ }
128
+
129
+ resolve_(`\`\`\`\n${formatted.join("\n")}\n\`\`\``);
130
+ });
131
+ });
132
+ }
@@ -0,0 +1,114 @@
1
+ /**
2
+ * Security boundaries for file operations.
3
+ *
4
+ * Claude Code provides its own sandbox (permissions, project scope).
5
+ * This module handles: path canonicalization, symlink resolution,
6
+ * binary file detection, and size limits.
7
+ */
8
+
9
+ import { realpathSync, statSync, existsSync, readdirSync, openSync, readSync, closeSync } from "node:fs";
10
+ import { resolve, isAbsolute, dirname } from "node:path";
11
+
12
+ const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10 MB
13
+
14
+ /**
15
+ * Convert Git Bash /c/Users/... → c:/Users/... on Windows.
16
+ * Node.js resolve() treats /c/ as absolute from current drive root, producing D:\c\Users.
17
+ */
18
+ function normalizePath(p) {
19
+ if (process.platform === "win32" && /^\/[a-zA-Z]\//.test(p)) {
20
+ return p[1] + ":" + p.slice(2);
21
+ }
22
+ return p;
23
+ }
24
+
25
+ /**
26
+ * Validate a file path against security boundaries.
27
+ * Returns the canonicalized absolute path.
28
+ * Throws on violation.
29
+ */
30
+ export function validatePath(filePath) {
31
+ if (!filePath) throw new Error("Empty file path");
32
+
33
+ const normalized = normalizePath(filePath);
34
+ const abs = isAbsolute(normalized) ? normalized : resolve(process.cwd(), normalized);
35
+
36
+ // Check existence — show parent directory contents as fallback
37
+ if (!existsSync(abs)) {
38
+ let hint = "";
39
+ try {
40
+ const parent = dirname(abs);
41
+ if (existsSync(parent)) {
42
+ const entries = readdirSync(parent, { withFileTypes: true });
43
+ const listing = entries.slice(0, 20).map(e =>
44
+ ` ${e.isDirectory() ? "d" : "f"} ${e.name}`
45
+ ).join("\n");
46
+ hint = `\n\nParent directory ${parent} contains:\n${listing}`;
47
+ if (entries.length > 20) hint += `\n ... (${entries.length - 20} more)`;
48
+ }
49
+ } catch {}
50
+ throw new Error(`FILE_NOT_FOUND: ${abs}${hint}`);
51
+ }
52
+
53
+ // Canonicalize (resolves symlinks)
54
+ let real;
55
+ try {
56
+ real = realpathSync(abs);
57
+ } catch (e) {
58
+ throw new Error(`Cannot resolve path: ${abs} (${e.message})`);
59
+ }
60
+
61
+ // Check file type
62
+ const stat = statSync(real);
63
+ if (stat.isDirectory()) return real; // directories allowed for listing
64
+ if (!stat.isFile()) {
65
+ const type = stat.isSymbolicLink() ? "symlink" : "special";
66
+ throw new Error(`NOT_REGULAR_FILE: ${real} (${type}). Cannot read special files.`);
67
+ }
68
+
69
+ // Size check
70
+ if (stat.size > MAX_FILE_SIZE) {
71
+ throw new Error(`FILE_TOO_LARGE: ${real} (${(stat.size / 1024 / 1024).toFixed(1)}MB, max ${MAX_FILE_SIZE / 1024 / 1024}MB). Use offset/limit to read a range.`);
72
+ }
73
+
74
+ // Binary detection (check first 8KB for null bytes — only read 8KB, not whole file)
75
+ const bfd = openSync(real, "r");
76
+ const probe = Buffer.alloc(8192);
77
+ const bytesRead = readSync(bfd, probe, 0, 8192, 0);
78
+ closeSync(bfd);
79
+ for (let i = 0; i < bytesRead; i++) {
80
+ if (probe[i] === 0) {
81
+ throw new Error(`BINARY_FILE: ${real}. Use built-in Read tool (supports images, PDFs, notebooks).`);
82
+ }
83
+ }
84
+
85
+ return real;
86
+ }
87
+
88
+ /**
89
+ * Validate path for write (does NOT require file to exist).
90
+ * Resolves to absolute path, validates parent exists or can be created.
91
+ */
92
+ export function validateWritePath(filePath) {
93
+ if (!filePath) throw new Error("Empty file path");
94
+
95
+ const normalized = normalizePath(filePath);
96
+ const abs = isAbsolute(normalized) ? normalized : resolve(process.cwd(), normalized);
97
+
98
+ // For write, the file might not exist yet — validate the parent directory
99
+ if (!existsSync(abs)) {
100
+ const parent = resolve(abs, "..");
101
+ if (!existsSync(parent)) {
102
+ // Walk up to find an existing ancestor (parent dirs will be created by write_file)
103
+ let ancestor = resolve(parent, "..");
104
+ while (!existsSync(ancestor) && ancestor !== resolve(ancestor, "..")) {
105
+ ancestor = resolve(ancestor, "..");
106
+ }
107
+ if (!existsSync(ancestor)) {
108
+ throw new Error(`No existing ancestor directory for: ${abs}`);
109
+ }
110
+ }
111
+ }
112
+
113
+ return abs;
114
+ }
package/lib/setup.mjs ADDED
@@ -0,0 +1,132 @@
1
+ /**
2
+ * Setup hex-line hooks for CLI agents.
3
+ *
4
+ * Idempotent: re-running with same config produces no changes.
5
+ * Supports: claude (hooks in settings.local.json), gemini, codex (info only).
6
+ */
7
+
8
+ import { readFileSync, writeFileSync, existsSync, mkdirSync } from "node:fs";
9
+ import { resolve, dirname } from "node:path";
10
+
11
+ const HOOK_COMMAND = "node mcp/hex-line-mcp/hook.mjs";
12
+
13
+ const CLAUDE_HOOKS = {
14
+ SessionStart: {
15
+ matcher: "*",
16
+ hooks: [{ type: "command", command: HOOK_COMMAND, timeout: 5 }],
17
+ },
18
+ PreToolUse: {
19
+ matcher: "Read|Edit|Write|Grep|Bash",
20
+ hooks: [{ type: "command", command: HOOK_COMMAND, timeout: 5 }],
21
+ },
22
+ PostToolUse: {
23
+ matcher: "Bash",
24
+ hooks: [{ type: "command", command: HOOK_COMMAND, timeout: 10 }],
25
+ },
26
+ };
27
+
28
+ // ---- Helpers ----
29
+
30
+ function readJson(filePath) {
31
+ if (!existsSync(filePath)) return null;
32
+ return JSON.parse(readFileSync(filePath, "utf-8"));
33
+ }
34
+
35
+ function writeJson(filePath, data) {
36
+ mkdirSync(dirname(filePath), { recursive: true });
37
+ writeFileSync(filePath, JSON.stringify(data, null, 2) + "\n", "utf-8");
38
+ }
39
+
40
+ /**
41
+ * Find existing hook entry index by command substring.
42
+ * @param {Array} entries - Array of {matcher, hooks[]} objects
43
+ * @param {string} command - Command string to match
44
+ * @returns {number} Index or -1
45
+ */
46
+ function findEntryByCommand(entries, command) {
47
+ return entries.findIndex(
48
+ (e) => Array.isArray(e.hooks) && e.hooks.some((h) => h.command === command)
49
+ );
50
+ }
51
+
52
+ // ---- Agent configurators ----
53
+
54
+ function setupClaude() {
55
+ const settingsPath = resolve(process.cwd(), ".claude/settings.local.json");
56
+ const config = readJson(settingsPath) || {};
57
+
58
+ if (!config.hooks || typeof config.hooks !== "object") {
59
+ config.hooks = {};
60
+ }
61
+
62
+ let changed = false;
63
+
64
+ for (const [event, desired] of Object.entries(CLAUDE_HOOKS)) {
65
+ if (!Array.isArray(config.hooks[event])) {
66
+ config.hooks[event] = [];
67
+ }
68
+
69
+ const entries = config.hooks[event];
70
+ const idx = findEntryByCommand(entries, HOOK_COMMAND);
71
+
72
+ if (idx >= 0) {
73
+ // Entry exists — check if matcher and timeout match
74
+ const existing = entries[idx];
75
+ if (existing.matcher === desired.matcher &&
76
+ existing.hooks.length === desired.hooks.length &&
77
+ existing.hooks[0].timeout === desired.hooks[0].timeout) {
78
+ continue; // Already configured exactly
79
+ }
80
+ // Update in place
81
+ entries[idx] = { matcher: desired.matcher, hooks: [...desired.hooks] };
82
+ changed = true;
83
+ } else {
84
+ entries.push({ matcher: desired.matcher, hooks: [...desired.hooks] });
85
+ changed = true;
86
+ }
87
+ }
88
+
89
+ if (config.disableAllHooks !== false) {
90
+ config.disableAllHooks = false;
91
+ changed = true;
92
+ }
93
+
94
+ if (!changed) {
95
+ return "Claude: already configured, no changes";
96
+ }
97
+
98
+ writeJson(settingsPath, config);
99
+ return "Claude: PreToolUse + PostToolUse -> mcp/hex-line-mcp/hook.mjs OK";
100
+ }
101
+
102
+ function setupGemini() {
103
+ return "Gemini: Not supported (Gemini CLI does not support hooks. Add MCP Tool Preferences to GEMINI.md instead)";
104
+ }
105
+
106
+ function setupCodex() {
107
+ return "Codex: Not supported (Codex CLI does not support hooks. Add MCP Tool Preferences to AGENTS.md instead)";
108
+ }
109
+
110
+ // ---- Public API ----
111
+
112
+ const AGENTS = { claude: setupClaude, gemini: setupGemini, codex: setupCodex };
113
+
114
+ /**
115
+ * Configure hex-line hooks for one or all supported agents.
116
+ * @param {string} [agent="all"] - "claude", "gemini", "codex", or "all"
117
+ * @returns {string} Status report
118
+ */
119
+ export function setupHooks(agent = "all") {
120
+ const target = (agent || "all").toLowerCase();
121
+
122
+ if (target !== "all" && !AGENTS[target]) {
123
+ throw new Error(`UNKNOWN_AGENT: '${agent}'. Supported: claude, gemini, codex, all`);
124
+ }
125
+
126
+ const targets = target === "all" ? Object.keys(AGENTS) : [target];
127
+ const results = targets.map((name) => " " + AGENTS[name]());
128
+
129
+ const header = `Hooks configured for ${target}:`;
130
+ const footer = "\nRestart Claude Code to apply hook changes.";
131
+ return [header, ...results, footer].join("\n");
132
+ }
package/lib/tree.mjs ADDED
@@ -0,0 +1,162 @@
1
+ /**
2
+ * Compact directory tree with .gitignore support.
3
+ *
4
+ * Skips common build/cache dirs by default.
5
+ * Parses .gitignore patterns (simple subset: globs, comments, negation).
6
+ */
7
+
8
+ import { readdirSync, readFileSync, statSync, existsSync } from "node:fs";
9
+ import { resolve, basename, join } from "node:path";
10
+
11
+ const SKIP_DIRS = new Set([
12
+ "node_modules", ".git", "dist", "build", "__pycache__", ".next", "coverage",
13
+ ]);
14
+
15
+ /**
16
+ * Parse .gitignore into match functions.
17
+ * Supports: comments (#), negation (!), wildcards (*), dir-only trailing /.
18
+ */
19
+ function parseGitignore(content) {
20
+ const lines = content.replace(/\r\n/g, "\n").split("\n");
21
+ const patterns = [];
22
+ for (const raw of lines) {
23
+ const line = raw.trim();
24
+ if (!line || line.startsWith("#")) continue;
25
+ const negate = line.startsWith("!");
26
+ let pat = negate ? line.slice(1) : line;
27
+ // Strip leading /
28
+ if (pat.startsWith("/")) pat = pat.slice(1);
29
+ // Strip trailing /
30
+ const dirOnly = pat.endsWith("/");
31
+ if (dirOnly) pat = pat.slice(0, -1);
32
+ // Convert glob to regex
33
+ const re = new RegExp(
34
+ "^" + pat.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*\*/g, "\0").replace(/\*/g, "[^/]*").replace(/\0/g, ".*").replace(/\?/g, ".") + "$"
35
+ );
36
+ patterns.push({ re, negate, dirOnly });
37
+ }
38
+ return patterns;
39
+ }
40
+
41
+ function isIgnored(name, isDir, patterns) {
42
+ let ignored = false;
43
+ for (const { re, negate, dirOnly } of patterns) {
44
+ if (dirOnly && !isDir) continue;
45
+ if (re.test(name)) ignored = !negate;
46
+ }
47
+ return ignored;
48
+ }
49
+
50
+ function formatSize(bytes) {
51
+ if (bytes >= 1024 * 1024) return `${(bytes / 1024 / 1024).toFixed(1)}MB`;
52
+ if (bytes >= 1024) return `${(bytes / 1024).toFixed(1)}KB`;
53
+ return `${bytes}B`;
54
+ }
55
+
56
+ /**
57
+ * Build directory tree recursively.
58
+ * @param {string} dirPath - Absolute directory path
59
+ * @param {object} opts - { max_depth, gitignore, format }
60
+ * @returns {string} Formatted tree
61
+ */
62
+ export function directoryTree(dirPath, opts = {}) {
63
+ const compact = opts.format === "compact";
64
+ const maxDepth = compact ? 1 : (opts.max_depth ?? 3);
65
+ const useGitignore = opts.gitignore ?? true;
66
+
67
+ // Convert Git Bash /c/path → c:/path on Windows
68
+ const normalized = (process.platform === "win32" && /^\/[a-zA-Z]\//.test(dirPath))
69
+ ? dirPath[1] + ":" + dirPath.slice(2) : dirPath;
70
+ const abs = resolve(normalized);
71
+ if (!existsSync(abs)) throw new Error(`DIRECTORY_NOT_FOUND: ${abs}. Check path or use directory_tree on parent directory.`);
72
+ const rootStat = statSync(abs);
73
+ if (!rootStat.isDirectory()) throw new Error(`Not a directory: ${abs}`);
74
+
75
+ // Load .gitignore
76
+ let patterns = [];
77
+ if (useGitignore) {
78
+ const gi = join(abs, ".gitignore");
79
+ if (existsSync(gi)) {
80
+ try { patterns = parseGitignore(readFileSync(gi, "utf-8")); } catch { /* skip */ }
81
+ }
82
+ }
83
+
84
+ let totalFiles = 0;
85
+ let totalSize = 0;
86
+ const lines = [];
87
+
88
+ function walk(dir, prefix, depth) {
89
+ if (depth > maxDepth) return;
90
+ let entries;
91
+ try {
92
+ entries = readdirSync(dir, { withFileTypes: true });
93
+ } catch { return; }
94
+
95
+ // Sort: directories first, then files, alphabetical
96
+ entries.sort((a, b) => {
97
+ const aDir = a.isDirectory() ? 0 : 1;
98
+ const bDir = b.isDirectory() ? 0 : 1;
99
+ if (aDir !== bDir) return aDir - bDir;
100
+ return a.name.localeCompare(b.name);
101
+ });
102
+
103
+ for (const entry of entries) {
104
+ const name = entry.name;
105
+ const isDir = entry.isDirectory();
106
+
107
+ if (SKIP_DIRS.has(name) && isDir) continue;
108
+ if (isIgnored(name, isDir, patterns)) continue;
109
+
110
+ const full = join(dir, name);
111
+
112
+ if (isDir) {
113
+ if (compact) {
114
+ lines.push(`${prefix}${name}/`);
115
+ } else {
116
+ // Count files in subdirectory
117
+ const subInfo = { files: 0 };
118
+ countFiles(full, subInfo);
119
+ lines.push(`${prefix}${name}/ (${subInfo.files} files)`);
120
+ }
121
+ walk(full, prefix + " ", depth + 1);
122
+ } else {
123
+ totalFiles++;
124
+ if (compact) {
125
+ lines.push(`${prefix}${name}`);
126
+ } else {
127
+ let size = 0;
128
+ try { size = statSync(full).size; } catch { /* skip */ }
129
+ totalSize += size;
130
+ if (size >= 1024) {
131
+ lines.push(`${prefix}${name} (${formatSize(size)})`);
132
+ } else {
133
+ lines.push(`${prefix}${name}`);
134
+ }
135
+ }
136
+ }
137
+ }
138
+ }
139
+
140
+ function countFiles(dir, info, depth = 0) {
141
+ if (depth > 10) return; // safety limit for deep trees
142
+ let entries;
143
+ try { entries = readdirSync(dir, { withFileTypes: true }); } catch { return; }
144
+ for (const entry of entries) {
145
+ if (SKIP_DIRS.has(entry.name) && entry.isDirectory()) continue;
146
+ if (isIgnored(entry.name, entry.isDirectory(), patterns)) continue;
147
+ if (entry.isDirectory()) {
148
+ countFiles(join(dir, entry.name), info, depth + 1);
149
+ } else {
150
+ info.files++;
151
+ }
152
+ }
153
+ }
154
+
155
+ const rootName = basename(abs);
156
+ walk(abs, " ", 1);
157
+
158
+ const header = compact
159
+ ? `Directory: ${rootName}/ (${totalFiles} files)`
160
+ : `Directory: ${rootName}/ (${totalFiles} files, ${formatSize(totalSize)})`;
161
+ return `${header}\n\n${rootName}/\n${lines.join("\n")}`;
162
+ }
@@ -0,0 +1,56 @@
1
+ import { readFile, writeFile } from "node:fs/promises";
2
+ import { join } from "node:path";
3
+ import { tmpdir } from "node:os";
4
+
5
+ const CACHE_FILE = join(tmpdir(), "hex-line-mcp-update.json");
6
+ const CHECK_INTERVAL = 24 * 60 * 60 * 1000; // 24 hours
7
+ const TIMEOUT = 3000;
8
+
9
+ async function readCache() {
10
+ try {
11
+ return JSON.parse(await readFile(CACHE_FILE, "utf-8"));
12
+ } catch { return null; }
13
+ }
14
+
15
+ async function writeCache(entry) {
16
+ await writeFile(CACHE_FILE, JSON.stringify(entry)).catch(() => {});
17
+ }
18
+
19
+ async function fetchLatest(packageName) {
20
+ try {
21
+ const ctrl = new AbortController();
22
+ const timer = setTimeout(() => ctrl.abort(), TIMEOUT);
23
+ const res = await fetch(`https://registry.npmjs.org/${packageName}/latest`, { signal: ctrl.signal });
24
+ clearTimeout(timer);
25
+ if (!res.ok) return null;
26
+ const data = await res.json();
27
+ return data.version ?? null;
28
+ } catch { return null; }
29
+ }
30
+
31
+ function compareVersions(a, b) {
32
+ const pa = a.split(".").map(Number);
33
+ const pb = b.split(".").map(Number);
34
+ for (let i = 0; i < 3; i++) {
35
+ if ((pa[i] || 0) < (pb[i] || 0)) return -1;
36
+ if ((pa[i] || 0) > (pb[i] || 0)) return 1;
37
+ }
38
+ return 0;
39
+ }
40
+
41
+ export async function checkForUpdates(packageName, currentVersion) {
42
+ const cached = await readCache();
43
+ if (cached && Date.now() - cached.timestamp < CHECK_INTERVAL) {
44
+ if (cached.latest && compareVersions(currentVersion, cached.latest) < 0) {
45
+ process.stderr.write(`${packageName} update: ${currentVersion} → ${cached.latest}. Run: npm install -g ${packageName}\n`);
46
+ }
47
+ return;
48
+ }
49
+ const latest = await fetchLatest(packageName);
50
+ if (latest) {
51
+ await writeCache({ timestamp: Date.now(), latest });
52
+ if (compareVersions(currentVersion, latest) < 0) {
53
+ process.stderr.write(`${packageName} update: ${currentVersion} → ${latest}. Run: npm install -g ${packageName}\n`);
54
+ }
55
+ }
56
+ }
package/lib/verify.mjs ADDED
@@ -0,0 +1,54 @@
1
+ /**
2
+ * Checksum verification without re-reading full file.
3
+ * Validates range checksums from prior reads.
4
+ */
5
+
6
+ import { readFileSync } from "node:fs";
7
+ import { fnv1a, rangeChecksum, parseChecksum } from "./hash.mjs";
8
+ import { validatePath } from "./security.mjs";
9
+
10
+ /**
11
+ * Verify checksums against current file state.
12
+ *
13
+ * @param {string} filePath
14
+ * @param {string[]} checksums - array of "start-end:8hex" strings
15
+ * @returns {string} verification result
16
+ */
17
+ export function verifyChecksums(filePath, checksums) {
18
+ const real = validatePath(filePath);
19
+ const content = readFileSync(real, "utf-8").replace(/\r\n/g, "\n");
20
+ const lines = content.split("\n");
21
+
22
+ // Pre-compute all line hashes
23
+ const lineHashes = lines.map((l) => fnv1a(l));
24
+
25
+ const results = [];
26
+ let allValid = true;
27
+
28
+ for (const cs of checksums) {
29
+ const parsed = parseChecksum(cs);
30
+
31
+ if (parsed.start < 1 || parsed.end > lines.length) {
32
+ results.push(`${cs}: INVALID (range ${parsed.start}-${parsed.end} exceeds file length ${lines.length})`);
33
+ allValid = false;
34
+ continue;
35
+ }
36
+
37
+ const currentHashes = lineHashes.slice(parsed.start - 1, parsed.end);
38
+ const current = rangeChecksum(currentHashes, parsed.start, parsed.end);
39
+ const currentHex = current.split(":")[1];
40
+
41
+ if (currentHex === parsed.hex) {
42
+ results.push(`${cs}: valid`);
43
+ } else {
44
+ results.push(`${cs}: STALE → current: ${current}`);
45
+ allValid = false;
46
+ }
47
+ }
48
+
49
+ if (allValid && checksums.length > 0) {
50
+ return `All ${checksums.length} checksum(s) valid for ${filePath}`;
51
+ }
52
+
53
+ return results.join("\n");
54
+ }