wolverine-ai 4.7.0 → 4.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/agent/agent-engine.js +98 -2
- package/src/brain/brain.js +5 -1
- package/src/brain/tool-router.js +211 -0
- package/src/core/ai-client.js +5 -0
- package/src/core/config.js +6 -2
- package/src/core/runner.js +36 -27
- package/src/core/wolverine.js +24 -17
- package/src/skills/deps.js +5 -0
- package/src/vault/vault-manager.js +37 -41
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "wolverine-ai",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.9.0",
|
|
4
4
|
"description": "Self-healing Node.js server framework powered by AI. Catches crashes, diagnoses errors, generates fixes, verifies, and restarts — automatically.",
|
|
5
5
|
"main": "src/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -121,7 +121,7 @@ const TOOL_DEFINITIONS = [
|
|
|
121
121
|
type: "object",
|
|
122
122
|
properties: {
|
|
123
123
|
command: { type: "string", description: "Shell command to execute" },
|
|
124
|
-
timeout: { type: "number", description: "Timeout in ms (default:
|
|
124
|
+
timeout: { type: "number", description: "Timeout in ms (default: 30000, max: 60000)" },
|
|
125
125
|
},
|
|
126
126
|
required: ["command"],
|
|
127
127
|
},
|
|
@@ -442,6 +442,22 @@ const TOOL_DEFINITIONS = [
|
|
|
442
442
|
},
|
|
443
443
|
},
|
|
444
444
|
},
|
|
445
|
+
// ── ENVIRONMENT ──
|
|
446
|
+
{
|
|
447
|
+
type: "function",
|
|
448
|
+
function: {
|
|
449
|
+
name: "add_env_var",
|
|
450
|
+
description: "Append a key=value pair to .env.local. Only adds if the key does not already exist. Use for missing environment variable errors.",
|
|
451
|
+
parameters: {
|
|
452
|
+
type: "object",
|
|
453
|
+
properties: {
|
|
454
|
+
key: { type: "string", description: "Environment variable name (e.g. DATABASE_URL)" },
|
|
455
|
+
value: { type: "string", description: "Value to set" },
|
|
456
|
+
},
|
|
457
|
+
required: ["key", "value"],
|
|
458
|
+
},
|
|
459
|
+
},
|
|
460
|
+
},
|
|
445
461
|
// ── TASK MANAGEMENT ──
|
|
446
462
|
{
|
|
447
463
|
type: "function",
|
|
@@ -814,6 +830,7 @@ class AgentEngine {
|
|
|
814
830
|
case "check_file_descriptors": return this._checkFileDescriptors(args);
|
|
815
831
|
case "check_event_loop": return this._checkEventLoop(args);
|
|
816
832
|
case "check_websocket": return this._checkWebsocket(args);
|
|
833
|
+
case "add_env_var": return this._addEnvVar(args);
|
|
817
834
|
case "done": return this._done(args);
|
|
818
835
|
// Legacy aliases
|
|
819
836
|
case "list_files": return this._globFiles({ pattern: (args.dir || ".") + "/*" + (args.pattern || "") });
|
|
@@ -1204,7 +1221,8 @@ class AgentEngine {
|
|
|
1204
1221
|
try {
|
|
1205
1222
|
let Database;
|
|
1206
1223
|
try { Database = require("better-sqlite3"); } catch {
|
|
1207
|
-
|
|
1224
|
+
// Fallback: try PostgreSQL via pg if available
|
|
1225
|
+
return this._inspectDbPg(args);
|
|
1208
1226
|
}
|
|
1209
1227
|
const db = new Database(dbPath, { readonly: true });
|
|
1210
1228
|
let result;
|
|
@@ -1236,6 +1254,55 @@ class AgentEngine {
|
|
|
1236
1254
|
} catch (e) { return { content: `DB error: ${e.message}` }; }
|
|
1237
1255
|
}
|
|
1238
1256
|
|
|
1257
|
+
async _inspectDbPg(args) {
|
|
1258
|
+
let pg;
|
|
1259
|
+
try { pg = require("pg"); } catch {
|
|
1260
|
+
return { content: "Neither better-sqlite3 nor pg is installed. Run: npm install better-sqlite3 (for SQLite) or npm install pg (for PostgreSQL)" };
|
|
1261
|
+
}
|
|
1262
|
+
// db_path is treated as a connection string or uses DATABASE_URL
|
|
1263
|
+
const connectionString = args.db_path.startsWith("postgres")
|
|
1264
|
+
? args.db_path
|
|
1265
|
+
: process.env.DATABASE_URL;
|
|
1266
|
+
if (!connectionString) {
|
|
1267
|
+
return { content: "PostgreSQL: no connection string. Set DATABASE_URL or pass a postgres:// URI as db_path." };
|
|
1268
|
+
}
|
|
1269
|
+
const client = new pg.Client({ connectionString, statement_timeout: 10000 });
|
|
1270
|
+
try {
|
|
1271
|
+
await client.connect();
|
|
1272
|
+
let result;
|
|
1273
|
+
if (args.action === "tables") {
|
|
1274
|
+
const res = await client.query("SELECT tablename FROM pg_tables WHERE schemaname = 'public' ORDER BY tablename");
|
|
1275
|
+
result = res.rows.map(r => r.tablename).join("\n") || "(no tables)";
|
|
1276
|
+
} else if (args.action === "schema") {
|
|
1277
|
+
const res = await client.query(`SELECT table_name, column_name, data_type, is_nullable FROM information_schema.columns WHERE table_schema = 'public' ORDER BY table_name, ordinal_position`);
|
|
1278
|
+
const tables = {};
|
|
1279
|
+
for (const row of res.rows) {
|
|
1280
|
+
if (!tables[row.table_name]) tables[row.table_name] = [];
|
|
1281
|
+
tables[row.table_name].push(` ${row.column_name} ${row.data_type}${row.is_nullable === "NO" ? " NOT NULL" : ""}`);
|
|
1282
|
+
}
|
|
1283
|
+
result = Object.entries(tables).map(([t, cols]) => `TABLE ${t}:\n${cols.join("\n")}`).join("\n\n") || "(no tables)";
|
|
1284
|
+
} else if (args.action === "query") {
|
|
1285
|
+
if (!args.sql) return { content: "Error: sql required for query action" };
|
|
1286
|
+
const upper = args.sql.trim().toUpperCase();
|
|
1287
|
+
if (!upper.startsWith("SELECT") && !upper.startsWith("SHOW")) {
|
|
1288
|
+
return { content: "BLOCKED: inspect_db only allows SELECT/SHOW for PostgreSQL. Use run_db_fix for writes." };
|
|
1289
|
+
}
|
|
1290
|
+
const res = await client.query(args.sql);
|
|
1291
|
+
result = JSON.stringify(res.rows.slice(0, 50), null, 2);
|
|
1292
|
+
if (res.rows.length > 50) result += `\n... (${res.rows.length} total rows, showing first 50)`;
|
|
1293
|
+
} else {
|
|
1294
|
+
result = "Unknown action. Use: tables, schema, or query";
|
|
1295
|
+
}
|
|
1296
|
+
const { redact } = require("../security/secret-redactor");
|
|
1297
|
+
console.log(chalk.gray(` 🗃️ DB (pg) ${args.action}: ${args.db_path}`));
|
|
1298
|
+
return { content: redact(result) };
|
|
1299
|
+
} catch (e) {
|
|
1300
|
+
return { content: `PostgreSQL error: ${e.message}` };
|
|
1301
|
+
} finally {
|
|
1302
|
+
try { await client.end(); } catch {}
|
|
1303
|
+
}
|
|
1304
|
+
}
|
|
1305
|
+
|
|
1239
1306
|
_runDbFix(args) {
|
|
1240
1307
|
const dbPath = path.resolve(this.cwd, args.db_path);
|
|
1241
1308
|
try {
|
|
@@ -1752,6 +1819,35 @@ class AgentEngine {
|
|
|
1752
1819
|
};
|
|
1753
1820
|
}
|
|
1754
1821
|
|
|
1822
|
+
// ── Environment variable tool ──
|
|
1823
|
+
// Controlled exception to _isProtectedPath: allows APPENDING to .env.local only.
|
|
1824
|
+
_addEnvVar(args) {
|
|
1825
|
+
const key = (args.key || "").trim();
|
|
1826
|
+
const value = args.value || "";
|
|
1827
|
+
if (!key || !/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) {
|
|
1828
|
+
return { content: "Error: invalid env var name. Must match [A-Za-z_][A-Za-z0-9_]*" };
|
|
1829
|
+
}
|
|
1830
|
+
const envPath = path.resolve(this.cwd, ".env.local");
|
|
1831
|
+
try {
|
|
1832
|
+
// Check if key already exists
|
|
1833
|
+
if (fs.existsSync(envPath)) {
|
|
1834
|
+
const existing = fs.readFileSync(envPath, "utf-8");
|
|
1835
|
+
const keyRegex = new RegExp(`^${key}=`, "m");
|
|
1836
|
+
if (keyRegex.test(existing)) {
|
|
1837
|
+
return { content: `${key} already exists in .env.local — not overwriting.` };
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
// Append the key=value pair
|
|
1841
|
+
const line = `${key}=${value}\n`;
|
|
1842
|
+
fs.appendFileSync(envPath, line, "utf-8");
|
|
1843
|
+
console.log(chalk.green(` 🔑 Added ${key} to .env.local`));
|
|
1844
|
+
if (this.logger) this.logger.info("agent.env_var_add", `Added ${key} to .env.local`);
|
|
1845
|
+
return { content: `Successfully added ${key} to .env.local` };
|
|
1846
|
+
} catch (err) {
|
|
1847
|
+
return { content: `Error adding env var: ${err.message}` };
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
|
|
1755
1851
|
// ── Protected path guard ──
|
|
1756
1852
|
// Wolverine's own source code is off-limits to the agent.
|
|
1757
1853
|
// The agent should build/fix the USER's project, not modify itself.
|
package/src/brain/brain.js
CHANGED
|
@@ -145,6 +145,10 @@ const SEED_DOCS = [
|
|
|
145
145
|
text: "Server context scanner (wolverine --init): scans server/ directory on every startup to build .wolverine/server-context.json. Extracts routes (HTTP methods + paths from fastify/express), middleware stack, database type + tables, config structure, dependencies, env vars used (process.env.X patterns), and full file tree. Context summary auto-injected into agent's heal prompt so it knows the server's route map, DB schema, and dependencies without re-scanning. Manual scan: wolverine --init. Auto-scan: runs silently on every boot. The context is read-only — never modified by the agent.",
|
|
146
146
|
metadata: { topic: "server-context" },
|
|
147
147
|
},
|
|
148
|
+
{
|
|
149
|
+
text: "Tool router (src/brain/tool-router.js): maps error types to recommended tool chains. Injected into agent prompt automatically — agent sees 'TOOL ROUTE: diagnose with [X, Y], fix with [Z]. Strategy: ...' for every error it heals. 25+ error categories mapped: TypeError→read_file+edit_file, ENOENT→list_dir+write_file, ECONNREFUSED→check_network+check_port+inspect_cache, EMFILE→check_file_descriptors+disk_cleanup, ENOSPC→disk_cleanup, certificate→inspect_certificate, OOM→check_memory+check_event_loop, websocket���check_websocket, database→inspect_db+run_db_fix (with prereq: inspect_db FIRST), missing_module→audit_deps+verify_node_modules, env_missing→inspect_env+add_env_var. Lookup is O(1) by error type + regex fallback. No AI tokens used for routing.",
|
|
150
|
+
metadata: { topic: "tool-router" },
|
|
151
|
+
},
|
|
148
152
|
{
|
|
149
153
|
text: "Telemetry architecture: 4 files, ~250 lines total. heartbeat.js sends one HTTP POST every 60s (5s timeout, non-blocking). register.js auto-registers and caches key in memory + disk. queue.js appends to JSONL file only on failure, trims lazily. telemetry.js collects from subsystems using optional chaining (no crashes if subsystem missing). All secrets redacted before sending. Response bodies drained immediately (res.resume). No blocking, no delays, no busy waits.",
|
|
150
154
|
metadata: { topic: "telemetry-architecture" },
|
|
@@ -154,7 +158,7 @@ const SEED_DOCS = [
|
|
|
154
158
|
metadata: { topic: "fastify" },
|
|
155
159
|
},
|
|
156
160
|
{
|
|
157
|
-
text: "npm package: wolverine-ai on npmjs.com (
|
|
161
|
+
text: "npm package: wolverine-ai on npmjs.com (latest). Install: npm i wolverine-ai. CLI: npx wolverine server/index.js. 85 files, 190KB compressed. Includes src/, bin/, examples/. Server directory created from src/templates/server/ on first run (never overwritten). GitHub: https://github.com/bobbyswhip/Wolverine. Unified billing: all AI calls route through inference proxy with credit-based billing. WOLVERINE_API_KEY authenticates through billing proxy, WOLVERINE_GPU_KEY for direct GPU access. 3 providers: openai, anthropic, wolverine (self-hosted GPU via Vast.ai).",
|
|
158
162
|
metadata: { topic: "npm-package" },
|
|
159
163
|
},
|
|
160
164
|
{
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tool Router — maps error types to recommended tool chains.
|
|
3
|
+
*
|
|
4
|
+
* Lightweight adjacency graph that tells the agent which tools to use
|
|
5
|
+
* for each error category. Injected into the agent prompt so it doesn't
|
|
6
|
+
* waste turns guessing.
|
|
7
|
+
*
|
|
8
|
+
* Structure per error type:
|
|
9
|
+
* diagnose: tools to understand the problem (read-only)
|
|
10
|
+
* fix: tools to apply the solution (write)
|
|
11
|
+
* hint: one-line strategy for the AI
|
|
12
|
+
* prereq: tools that must run BEFORE fix tools
|
|
13
|
+
*
|
|
14
|
+
* Lookup: O(1) by error classifier output or regex match on error message.
|
|
15
|
+
*/
|
|
16
|
+
|
|
17
|
+
const TOOL_ROUTES = {
|
|
18
|
+
// ── Code Errors ──
|
|
19
|
+
"TypeError": {
|
|
20
|
+
diagnose: ["read_file", "grep_code"],
|
|
21
|
+
fix: ["edit_file"],
|
|
22
|
+
hint: "Read the file at the error line. Find the undefined/null variable. Add a guard or fix the assignment.",
|
|
23
|
+
},
|
|
24
|
+
"ReferenceError": {
|
|
25
|
+
diagnose: ["read_file", "grep_code"],
|
|
26
|
+
fix: ["edit_file"],
|
|
27
|
+
hint: "Variable is not defined. Check for typos, missing imports, or scope issues.",
|
|
28
|
+
},
|
|
29
|
+
"SyntaxError": {
|
|
30
|
+
diagnose: ["read_file"],
|
|
31
|
+
fix: ["edit_file"],
|
|
32
|
+
hint: "Parse error in source. Read the file and fix the syntax at the reported line.",
|
|
33
|
+
},
|
|
34
|
+
"RangeError": {
|
|
35
|
+
diagnose: ["read_file", "check_memory"],
|
|
36
|
+
fix: ["edit_file"],
|
|
37
|
+
hint: "Stack overflow or invalid array length. Check for infinite recursion or unbounded growth.",
|
|
38
|
+
},
|
|
39
|
+
|
|
40
|
+
// ── Module/Dependency Errors ──
|
|
41
|
+
"missing_module": {
|
|
42
|
+
diagnose: ["audit_deps", "verify_node_modules"],
|
|
43
|
+
fix: ["bash_exec"],
|
|
44
|
+
hint: "Run npm install <module>. If in package.json but missing from disk, run npm ci.",
|
|
45
|
+
},
|
|
46
|
+
"missing_file": {
|
|
47
|
+
diagnose: ["read_file", "list_dir", "inspect_env"],
|
|
48
|
+
fix: ["write_file", "bash_exec"],
|
|
49
|
+
hint: "Check if the path is wrong (edit require) or if the file should exist (create it with expected content).",
|
|
50
|
+
},
|
|
51
|
+
"version_conflict": {
|
|
52
|
+
diagnose: ["audit_deps", "check_migration", "verify_node_modules"],
|
|
53
|
+
fix: ["bash_exec"],
|
|
54
|
+
hint: "Check peer deps and version compatibility. May need npm install package@version or full reinstall.",
|
|
55
|
+
},
|
|
56
|
+
|
|
57
|
+
// ── File System Errors ──
|
|
58
|
+
"ENOENT": {
|
|
59
|
+
diagnose: ["list_dir", "inspect_env", "read_file"],
|
|
60
|
+
fix: ["write_file", "bash_exec"],
|
|
61
|
+
prereq: ["read_file"],
|
|
62
|
+
hint: "File or directory doesn't exist. Check if path is correct, create if missing.",
|
|
63
|
+
},
|
|
64
|
+
"EACCES": {
|
|
65
|
+
diagnose: ["bash_exec", "list_dir"],
|
|
66
|
+
fix: ["bash_exec"],
|
|
67
|
+
hint: "Permission denied. Run chmod to fix permissions on the target file/directory.",
|
|
68
|
+
},
|
|
69
|
+
"ENOSPC": {
|
|
70
|
+
diagnose: ["disk_cleanup", "check_memory"],
|
|
71
|
+
fix: ["disk_cleanup"],
|
|
72
|
+
hint: "Disk full. Run disk_cleanup with dry_run=false to clear old backups and caches.",
|
|
73
|
+
},
|
|
74
|
+
"EMFILE": {
|
|
75
|
+
diagnose: ["check_file_descriptors", "list_processes", "check_event_loop"],
|
|
76
|
+
fix: ["disk_cleanup", "restart_service"],
|
|
77
|
+
hint: "Too many open files. Check for FD leaks, clear caches, consider raising ulimit in system profile.",
|
|
78
|
+
},
|
|
79
|
+
|
|
80
|
+
// ── Network Errors ──
|
|
81
|
+
"ECONNREFUSED": {
|
|
82
|
+
diagnose: ["check_network", "check_port", "inspect_cache", "inspect_env"],
|
|
83
|
+
fix: ["edit_file", "restart_service"],
|
|
84
|
+
hint: "Target service is down or wrong host/port. Check config, verify service is running.",
|
|
85
|
+
},
|
|
86
|
+
"ECONNRESET": {
|
|
87
|
+
diagnose: ["check_network", "check_logs"],
|
|
88
|
+
fix: ["edit_file"],
|
|
89
|
+
hint: "Connection reset by remote. Check for timeout settings, proxy config, or unstable network.",
|
|
90
|
+
},
|
|
91
|
+
"ETIMEDOUT": {
|
|
92
|
+
diagnose: ["check_network", "check_logs", "inspect_certificate"],
|
|
93
|
+
fix: ["edit_file"],
|
|
94
|
+
hint: "Connection timed out. Increase timeout, check DNS, verify endpoint is reachable.",
|
|
95
|
+
},
|
|
96
|
+
"EADDRINUSE": {
|
|
97
|
+
diagnose: ["check_port", "list_processes"],
|
|
98
|
+
fix: ["bash_exec"],
|
|
99
|
+
hint: "Port already in use. Find and kill the stale process holding the port.",
|
|
100
|
+
},
|
|
101
|
+
|
|
102
|
+
// ── Database Errors ──
|
|
103
|
+
"database": {
|
|
104
|
+
diagnose: ["inspect_db", "inspect_cache", "read_file"],
|
|
105
|
+
fix: ["run_db_fix", "edit_file"],
|
|
106
|
+
prereq: ["inspect_db"],
|
|
107
|
+
hint: "ALWAYS inspect_db before run_db_fix. Check table schema, query the affected data, then fix.",
|
|
108
|
+
},
|
|
109
|
+
"pool_exhaustion": {
|
|
110
|
+
diagnose: ["inspect_cache", "check_network", "check_logs"],
|
|
111
|
+
fix: ["edit_file", "restart_service"],
|
|
112
|
+
hint: "DB connection pool exhausted. Check for connection leaks, increase pool size, or restart.",
|
|
113
|
+
},
|
|
114
|
+
|
|
115
|
+
// ── SSL/TLS Errors ──
|
|
116
|
+
"certificate": {
|
|
117
|
+
diagnose: ["inspect_certificate", "check_network", "inspect_env"],
|
|
118
|
+
fix: ["bash_exec", "edit_file", "add_env_var"],
|
|
119
|
+
hint: "Check cert expiry, SAN list, and chain. May need renewal, hostname fix, or CA bundle.",
|
|
120
|
+
},
|
|
121
|
+
|
|
122
|
+
// ── Memory/Performance Errors ──
|
|
123
|
+
"OOM": {
|
|
124
|
+
diagnose: ["check_memory", "check_event_loop", "list_processes"],
|
|
125
|
+
fix: ["edit_file", "restart_service"],
|
|
126
|
+
hint: "Out of memory. Check for memory leaks (growing arrays, unclosed streams), reduce batch sizes.",
|
|
127
|
+
},
|
|
128
|
+
"event_loop_blocked": {
|
|
129
|
+
diagnose: ["check_event_loop", "check_logs", "check_memory"],
|
|
130
|
+
fix: ["edit_file"],
|
|
131
|
+
hint: "Synchronous operation blocking event loop. Replace readFileSync→readFile, execSync→exec, etc.",
|
|
132
|
+
},
|
|
133
|
+
|
|
134
|
+
// ── WebSocket Errors ──
|
|
135
|
+
"websocket": {
|
|
136
|
+
diagnose: ["check_websocket", "check_network", "check_port"],
|
|
137
|
+
fix: ["edit_file"],
|
|
138
|
+
hint: "Test WS handshake. Check upgrade headers, proxy config, and connection timeout settings.",
|
|
139
|
+
},
|
|
140
|
+
|
|
141
|
+
// ── Config/Environment Errors ──
|
|
142
|
+
"config": {
|
|
143
|
+
diagnose: ["read_file", "inspect_env", "list_dir"],
|
|
144
|
+
fix: ["write_file", "edit_file", "add_env_var"],
|
|
145
|
+
hint: "Check if config file exists and has expected structure. Check if required env vars are set.",
|
|
146
|
+
},
|
|
147
|
+
"env_missing": {
|
|
148
|
+
diagnose: ["inspect_env", "read_file"],
|
|
149
|
+
fix: ["add_env_var"],
|
|
150
|
+
hint: "Required environment variable not set. Add it to .env.local with the correct value.",
|
|
151
|
+
},
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Find tool recommendations for an error.
|
|
156
|
+
* @param {string} errorMessage — the raw error message
|
|
157
|
+
* @param {string} errorType — from error-parser classifyError() (optional)
|
|
158
|
+
* @returns {{ diagnose: string[], fix: string[], prereq?: string[], hint: string } | null}
|
|
159
|
+
*/
|
|
160
|
+
function route(errorMessage, errorType) {
|
|
161
|
+
const msg = (errorMessage || "").toLowerCase();
|
|
162
|
+
|
|
163
|
+
// 1. Try exact error type match
|
|
164
|
+
if (errorType && TOOL_ROUTES[errorType]) return TOOL_ROUTES[errorType];
|
|
165
|
+
|
|
166
|
+
// 2. Try error class match
|
|
167
|
+
if (/typeerror/i.test(msg)) return TOOL_ROUTES.TypeError;
|
|
168
|
+
if (/referenceerror/i.test(msg)) return TOOL_ROUTES.ReferenceError;
|
|
169
|
+
if (/syntaxerror|unexpected token/i.test(msg)) return TOOL_ROUTES.SyntaxError;
|
|
170
|
+
if (/rangeerror/i.test(msg)) return TOOL_ROUTES.RangeError;
|
|
171
|
+
|
|
172
|
+
// 3. Try errno/code match
|
|
173
|
+
if (/enoent|no such file/i.test(msg)) return TOOL_ROUTES.ENOENT;
|
|
174
|
+
if (/eacces|eperm/i.test(msg)) return TOOL_ROUTES.EACCES;
|
|
175
|
+
if (/enospc/i.test(msg)) return TOOL_ROUTES.ENOSPC;
|
|
176
|
+
if (/emfile|enfile/i.test(msg)) return TOOL_ROUTES.EMFILE;
|
|
177
|
+
if (/econnrefused/i.test(msg)) return TOOL_ROUTES.ECONNREFUSED;
|
|
178
|
+
if (/econnreset/i.test(msg)) return TOOL_ROUTES.ECONNRESET;
|
|
179
|
+
if (/etimedout/i.test(msg)) return TOOL_ROUTES.ETIMEDOUT;
|
|
180
|
+
if (/eaddrinuse/i.test(msg)) return TOOL_ROUTES.EADDRINUSE;
|
|
181
|
+
|
|
182
|
+
// 4. Try pattern match
|
|
183
|
+
if (/cannot find module/i.test(msg)) return /['"][./\\]/.test(msg) ? TOOL_ROUTES.missing_file : TOOL_ROUTES.missing_module;
|
|
184
|
+
if (/cert|ssl|tls|self.signed/i.test(msg)) return TOOL_ROUTES.certificate;
|
|
185
|
+
if (/pool.*exhaust|pool.*timeout|acquire.*timeout/i.test(msg)) return TOOL_ROUTES.pool_exhaustion;
|
|
186
|
+
if (/websocket|ws.*close|transport.*close/i.test(msg)) return TOOL_ROUTES.websocket;
|
|
187
|
+
if (/out of memory|heap.*limit|allocation.*failed/i.test(msg)) return TOOL_ROUTES.OOM;
|
|
188
|
+
if (/not.set|undefined.*env|missing.*env/i.test(msg)) return TOOL_ROUTES.env_missing;
|
|
189
|
+
if (/missing.*config|invalid.*json|invalid.*config/i.test(msg)) return TOOL_ROUTES.config;
|
|
190
|
+
if (/sqlite|postgres|mysql|mongo|sequelize|prisma|knex/i.test(msg)) return TOOL_ROUTES.database;
|
|
191
|
+
if (/peer dep|eresolve|version.*mismatch/i.test(msg)) return TOOL_ROUTES.version_conflict;
|
|
192
|
+
|
|
193
|
+
return null; // unknown — let the AI figure it out
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Get a compact prompt injection for the agent.
|
|
198
|
+
* Returns a string like "TOOL ROUTE: diagnose with [check_port, list_processes], fix with [bash_exec]. Hint: ..."
|
|
199
|
+
*/
|
|
200
|
+
function getRoutePrompt(errorMessage, errorType) {
|
|
201
|
+
const r = route(errorMessage, errorType);
|
|
202
|
+
if (!r) return "";
|
|
203
|
+
const parts = [`TOOL ROUTE for this error:`];
|
|
204
|
+
parts.push(` Diagnose: ${r.diagnose.join(", ")}`);
|
|
205
|
+
if (r.prereq) parts.push(` Prerequisites: ${r.prereq.join(", ")} (run these FIRST)`);
|
|
206
|
+
parts.push(` Fix: ${r.fix.join(", ")}`);
|
|
207
|
+
parts.push(` Strategy: ${r.hint}`);
|
|
208
|
+
return parts.join("\n");
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
module.exports = { route, getRoutePrompt, TOOL_ROUTES };
|
package/src/core/ai-client.js
CHANGED
|
@@ -659,6 +659,11 @@ Include both if needed, or just one.`;
|
|
|
659
659
|
const result = await aiCall({ model, systemPrompt, userPrompt, maxTokens: 2048, category: "reasoning" });
|
|
660
660
|
const content = (result.content || "").trim();
|
|
661
661
|
|
|
662
|
+
// Guard: cap length before regex extraction to prevent catastrophic backtracking
|
|
663
|
+
if (content.length > 50000) {
|
|
664
|
+
throw new Error("AI response too large for regex extraction (>50K chars)");
|
|
665
|
+
}
|
|
666
|
+
|
|
662
667
|
// Strip thinking tags (Gemma), markdown fences, and any prefix text
|
|
663
668
|
let cleaned = content
|
|
664
669
|
.replace(/<\|channel>.*?<channel\|>/gs, "")
|
package/src/core/config.js
CHANGED
|
@@ -15,11 +15,14 @@ const path = require("path");
|
|
|
15
15
|
*/
|
|
16
16
|
|
|
17
17
|
let _config = null;
|
|
18
|
+
let _configRoot = null;
|
|
19
|
+
|
|
20
|
+
function setConfigRoot(root) { _configRoot = root; }
|
|
18
21
|
|
|
19
22
|
function loadConfig() {
|
|
20
23
|
if (_config) return _config;
|
|
21
24
|
|
|
22
|
-
const configPath = path.join(process.cwd(), "server", "config", "settings.json");
|
|
25
|
+
const configPath = path.join(_configRoot || process.cwd(), "server", "config", "settings.json");
|
|
23
26
|
let fileConfig = {};
|
|
24
27
|
if (fs.existsSync(configPath)) {
|
|
25
28
|
try {
|
|
@@ -109,6 +112,7 @@ function getConfig(dotPath) {
|
|
|
109
112
|
}
|
|
110
113
|
|
|
111
114
|
function resetConfig() { _config = null; }
|
|
115
|
+
function resetConfigRoot() { _configRoot = null; }
|
|
112
116
|
|
|
113
117
|
/**
|
|
114
118
|
* Migrate old provider-based config to new flat models format.
|
|
@@ -153,4 +157,4 @@ function _migrateAndEnsureDefaults(fileConfig, configPath) {
|
|
|
153
157
|
}
|
|
154
158
|
}
|
|
155
159
|
|
|
156
|
-
module.exports = { loadConfig, getConfig, resetConfig };
|
|
160
|
+
module.exports = { loadConfig, getConfig, resetConfig, setConfigRoot, resetConfigRoot };
|
package/src/core/runner.js
CHANGED
|
@@ -45,14 +45,30 @@ class WolverineRunner {
|
|
|
45
45
|
this.child = null;
|
|
46
46
|
this.running = false;
|
|
47
47
|
|
|
48
|
+
// Stability tracking
|
|
49
|
+
this._lastStartTime = null;
|
|
50
|
+
this._lastBackupId = null;
|
|
51
|
+
this._stabilityTimer = null;
|
|
52
|
+
this._stderrBuffer = "";
|
|
53
|
+
this._healInProgress = false;
|
|
54
|
+
this._healStatus = null; // { active, file, error, phase, startedAt, iteration }
|
|
55
|
+
|
|
56
|
+
this._initSubsystems(options);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Initialize all subsystems — extracted from constructor for readability.
|
|
61
|
+
*/
|
|
62
|
+
_initSubsystems(options) {
|
|
48
63
|
// Core subsystems
|
|
49
64
|
this.sandbox = new Sandbox(this.cwd);
|
|
50
65
|
this.redactor = initRedactor(this.cwd);
|
|
66
|
+
const cfg = loadConfig();
|
|
51
67
|
this.rateLimiter = new RateLimiter({
|
|
52
|
-
maxCallsPerWindow:
|
|
53
|
-
windowMs:
|
|
54
|
-
minGapMs:
|
|
55
|
-
maxTokensPerHour:
|
|
68
|
+
maxCallsPerWindow: cfg.rateLimiting.maxCallsPerWindow,
|
|
69
|
+
windowMs: cfg.rateLimiting.windowMs,
|
|
70
|
+
minGapMs: cfg.rateLimiting.minGapMs,
|
|
71
|
+
maxTokensPerHour: cfg.rateLimiting.maxTokensPerHour,
|
|
56
72
|
maxGlobalHealsPerWindow: parseInt(process.env.WOLVERINE_RATE_MAX_GLOBAL_HEALS, 10) || 5,
|
|
57
73
|
globalWindowMs: parseInt(process.env.WOLVERINE_RATE_GLOBAL_WINDOW_MS, 10) || 300000,
|
|
58
74
|
});
|
|
@@ -68,14 +84,14 @@ class WolverineRunner {
|
|
|
68
84
|
});
|
|
69
85
|
|
|
70
86
|
// Health monitoring
|
|
71
|
-
const port =
|
|
87
|
+
const port = cfg.server.port;
|
|
72
88
|
this.healthMonitor = new HealthMonitor({
|
|
73
89
|
port,
|
|
74
90
|
path: options.healthPath || "/health",
|
|
75
|
-
intervalMs:
|
|
76
|
-
timeoutMs:
|
|
77
|
-
failThreshold:
|
|
78
|
-
startDelayMs:
|
|
91
|
+
intervalMs: cfg.healthCheck.intervalMs,
|
|
92
|
+
timeoutMs: cfg.healthCheck.timeoutMs,
|
|
93
|
+
failThreshold: cfg.healthCheck.failThreshold,
|
|
94
|
+
startDelayMs: cfg.healthCheck.startDelayMs,
|
|
79
95
|
});
|
|
80
96
|
|
|
81
97
|
// Performance monitoring
|
|
@@ -89,6 +105,9 @@ class WolverineRunner {
|
|
|
89
105
|
// Process monitor — heartbeat, memory, CPU, leak detection
|
|
90
106
|
this.processMonitor = new ProcessMonitor({ logger: this.logger });
|
|
91
107
|
|
|
108
|
+
// Brain — semantic memory + project context
|
|
109
|
+
this.brain = new Brain(this.cwd);
|
|
110
|
+
|
|
92
111
|
// Route prober — tests all routes periodically
|
|
93
112
|
this.routeProber = new RouteProber({
|
|
94
113
|
port,
|
|
@@ -98,9 +117,9 @@ class WolverineRunner {
|
|
|
98
117
|
|
|
99
118
|
// Error monitor — detects caught 500 errors without process crash
|
|
100
119
|
this.errorMonitor = new ErrorMonitor({
|
|
101
|
-
threshold:
|
|
102
|
-
windowMs:
|
|
103
|
-
cooldownMs:
|
|
120
|
+
threshold: cfg.errorMonitor.threshold,
|
|
121
|
+
windowMs: cfg.errorMonitor.windowMs,
|
|
122
|
+
cooldownMs: cfg.errorMonitor.cooldownMs,
|
|
104
123
|
logger: this.logger,
|
|
105
124
|
onError: (routePath, errorDetails) => this._healFromError(routePath, errorDetails),
|
|
106
125
|
});
|
|
@@ -111,9 +130,6 @@ class WolverineRunner {
|
|
|
111
130
|
windowMs: parseInt(process.env.WOLVERINE_LOOP_WINDOW_MS, 10) || 600000,
|
|
112
131
|
});
|
|
113
132
|
|
|
114
|
-
// Brain — semantic memory + project context
|
|
115
|
-
this.brain = new Brain(this.cwd);
|
|
116
|
-
|
|
117
133
|
// Skills — discoverable capabilities
|
|
118
134
|
this.skills = new SkillRegistry();
|
|
119
135
|
this.skills.load();
|
|
@@ -143,14 +159,6 @@ class WolverineRunner {
|
|
|
143
159
|
routeProber: this.routeProber,
|
|
144
160
|
errorMonitor: this.errorMonitor,
|
|
145
161
|
});
|
|
146
|
-
|
|
147
|
-
// Stability tracking
|
|
148
|
-
this._lastStartTime = null;
|
|
149
|
-
this._lastBackupId = null;
|
|
150
|
-
this._stabilityTimer = null;
|
|
151
|
-
this._stderrBuffer = "";
|
|
152
|
-
this._healInProgress = false;
|
|
153
|
-
this._healStatus = null; // { active, file, error, phase, startedAt, iteration }
|
|
154
162
|
}
|
|
155
163
|
|
|
156
164
|
async start() {
|
|
@@ -465,9 +473,10 @@ class WolverineRunner {
|
|
|
465
473
|
this._healInProgress = false;
|
|
466
474
|
return;
|
|
467
475
|
}
|
|
468
|
-
//
|
|
476
|
+
// Pass through directly — _healAndRestart checks _healInProgress internally,
|
|
477
|
+
// so release it just before the call to avoid a race window
|
|
469
478
|
this._healInProgress = false;
|
|
470
|
-
await this._healAndRestart();
|
|
479
|
+
await this._healAndRestart({ skipHealLockCheck: true });
|
|
471
480
|
} catch (err) {
|
|
472
481
|
// #5: Prevent unhandled errors in health callback from crashing the parent
|
|
473
482
|
console.log(chalk.red(` ⚠️ Health callback error: ${err.message}`));
|
|
@@ -570,8 +579,8 @@ class WolverineRunner {
|
|
|
570
579
|
this.errorMonitor.reset();
|
|
571
580
|
}
|
|
572
581
|
|
|
573
|
-
async _healAndRestart() {
|
|
574
|
-
if (this._healInProgress) return;
|
|
582
|
+
async _healAndRestart(options) {
|
|
583
|
+
if (this._healInProgress && !options?.skipHealLockCheck) return;
|
|
575
584
|
// #9: Bail if stop() was called during the window between crash and heal
|
|
576
585
|
if (this._shuttingDown) return;
|
|
577
586
|
this._healInProgress = true;
|
package/src/core/wolverine.js
CHANGED
|
@@ -1,19 +1,26 @@
|
|
|
1
|
+
const fs = require("fs");
|
|
2
|
+
const path = require("path");
|
|
3
|
+
const os = require("os");
|
|
4
|
+
const { execSync } = require("child_process");
|
|
1
5
|
const chalk = require("chalk");
|
|
2
6
|
const { parseError } = require("./error-parser");
|
|
3
|
-
const { requestRepair, getClient, aiCall, _trackOp } = require("./ai-client");
|
|
7
|
+
const { requestRepair, getClient, aiCall, _trackOp, getTrackerSnapshot } = require("./ai-client");
|
|
4
8
|
const { getModel } = require("./models");
|
|
5
9
|
const { applyPatch } = require("./patcher");
|
|
6
10
|
const { verifyFix } = require("./verifier");
|
|
7
11
|
const { Sandbox, SandboxViolationError } = require("../security/sandbox");
|
|
8
12
|
const { RateLimiter } = require("../security/rate-limiter");
|
|
9
13
|
const { detectInjection } = require("../security/injection-detector");
|
|
14
|
+
const { redact, hasSecrets } = require("../security/secret-redactor");
|
|
10
15
|
const { BackupManager } = require("../backup/backup-manager");
|
|
16
|
+
const { getRoutePrompt } = require("../brain/tool-router");
|
|
11
17
|
const { AgentEngine } = require("../agent/agent-engine");
|
|
12
18
|
const { ResearchAgent } = require("../agent/research-agent");
|
|
13
19
|
const { GoalLoop } = require("../agent/goal-loop");
|
|
14
20
|
const { exploreAndFix, spawnParallel } = require("../agent/sub-agents");
|
|
15
21
|
const { EVENT_TYPES } = require("../logger/event-logger");
|
|
16
22
|
const { diagnose: diagnoseDeps } = require("../skills/deps");
|
|
23
|
+
const { getSummary: getServerContextSummary } = require("./server-context");
|
|
17
24
|
|
|
18
25
|
/**
|
|
19
26
|
* The Wolverine healing engine — v3.
|
|
@@ -55,9 +62,7 @@ async function heal(opts) {
|
|
|
55
62
|
async function _healImpl({ stderr, cwd, sandbox, notifier, rateLimiter, backupManager, logger, brain, mcp, skills, repairHistory, routeContext }) {
|
|
56
63
|
const healStartTime = Date.now();
|
|
57
64
|
// Snapshot token tracker at heal start — diff at end = FULL pipeline cost
|
|
58
|
-
const { getTrackerSnapshot } = require("./ai-client");
|
|
59
65
|
const _snapshot = getTrackerSnapshot();
|
|
60
|
-
const { redact, hasSecrets } = require("../security/secret-redactor");
|
|
61
66
|
|
|
62
67
|
// Guard: don't burn tokens on empty stderr (signal kills, clean shutdowns, etc.)
|
|
63
68
|
if (!stderr || stderr.trim().length < 10) {
|
|
@@ -191,8 +196,6 @@ async function _healImpl({ stderr, cwd, sandbox, notifier, rateLimiter, backupMa
|
|
|
191
196
|
let backupSourceCode = "";
|
|
192
197
|
if (hasFile && backupManager) {
|
|
193
198
|
try {
|
|
194
|
-
const fs = require("fs");
|
|
195
|
-
const path = require("path");
|
|
196
199
|
const stableBackups = backupManager.getAll().filter(b => b.status === "stable" || b.status === "verified");
|
|
197
200
|
if (stableBackups.length > 0) {
|
|
198
201
|
const latest = stableBackups[stableBackups.length - 1];
|
|
@@ -210,10 +213,15 @@ async function _healImpl({ stderr, cwd, sandbox, notifier, rateLimiter, backupMa
|
|
|
210
213
|
let brainContext = "";
|
|
211
214
|
// Inject server context (routes, DB, config, deps) if available
|
|
212
215
|
try {
|
|
213
|
-
const
|
|
214
|
-
const serverCtx = getSummary(cwd);
|
|
216
|
+
const serverCtx = getServerContextSummary(cwd);
|
|
215
217
|
if (serverCtx) brainContext += serverCtx + "\n\n";
|
|
216
218
|
} catch {}
|
|
219
|
+
// Inject tool routing — tells agent exactly which tools to use for this error type
|
|
220
|
+
const toolRoute = getRoutePrompt(parsed.errorMessage, parsed.errorType);
|
|
221
|
+
if (toolRoute) {
|
|
222
|
+
brainContext += toolRoute + "\n\n";
|
|
223
|
+
console.log(chalk.gray(` 🗺️ Tool route: ${toolRoute.split("\n")[1]?.trim() || "matched"}`));
|
|
224
|
+
}
|
|
217
225
|
// Inject relevant skill context (claw-code: pre-enrich prompt with matched tools)
|
|
218
226
|
if (skills) {
|
|
219
227
|
const skillCtx = skills.buildContext(parsed.errorMessage);
|
|
@@ -348,7 +356,6 @@ async function _healImpl({ stderr, cwd, sandbox, notifier, rateLimiter, backupMa
|
|
|
348
356
|
|
|
349
357
|
// Execute shell commands first (npm install, mkdir, etc.)
|
|
350
358
|
if (repair.commands && Array.isArray(repair.commands)) {
|
|
351
|
-
const { execSync } = require("child_process");
|
|
352
359
|
for (const cmd of repair.commands) {
|
|
353
360
|
// Block dangerous commands
|
|
354
361
|
if (/rm\s+-rf\s+[/\\]|format\s+c:|mkfs/i.test(cmd)) {
|
|
@@ -554,7 +561,6 @@ async function _healImpl({ stderr, cwd, sandbox, notifier, rateLimiter, backupMa
|
|
|
554
561
|
* Returns { fixed: boolean, action: string }
|
|
555
562
|
*/
|
|
556
563
|
async function tryOperationalFix(parsed, cwd, logger, sandbox) {
|
|
557
|
-
const { execSync } = require("child_process");
|
|
558
564
|
const msg = parsed.errorMessage || "";
|
|
559
565
|
|
|
560
566
|
// Pattern 1: Dependency issues — use deps skill for structured diagnosis
|
|
@@ -581,8 +587,6 @@ async function tryOperationalFix(parsed, cwd, logger, sandbox) {
|
|
|
581
587
|
|| msg.match(/cannot find.*?'([^']+\.\w+)'/i);
|
|
582
588
|
if (enoent) {
|
|
583
589
|
const missingFile = enoent[1];
|
|
584
|
-
const fs = require("fs");
|
|
585
|
-
const path = require("path");
|
|
586
590
|
|
|
587
591
|
// Only auto-create if it's inside the project and looks like a config/data file
|
|
588
592
|
const rel = path.relative(cwd, missingFile).replace(/\\/g, "/");
|
|
@@ -639,7 +643,6 @@ async function tryOperationalFix(parsed, cwd, logger, sandbox) {
|
|
|
639
643
|
const permFile = msg.match(/(?:EACCES|EPERM).*?'([^']+)'/);
|
|
640
644
|
if (permFile) {
|
|
641
645
|
try {
|
|
642
|
-
const fs = require("fs");
|
|
643
646
|
fs.chmodSync(permFile[1], 0o755);
|
|
644
647
|
console.log(chalk.blue(` 🔑 Fixed permissions on: ${permFile[1]}`));
|
|
645
648
|
return { fixed: true, action: `Fixed permissions (chmod 755) on: ${permFile[1]}` };
|
|
@@ -671,7 +674,6 @@ async function tryOperationalFix(parsed, cwd, logger, sandbox) {
|
|
|
671
674
|
// Pattern 5: ENOSPC — disk full, try automated cleanup
|
|
672
675
|
if (/ENOSPC/.test(msg)) {
|
|
673
676
|
try {
|
|
674
|
-
const os = require("os");
|
|
675
677
|
const backupDir = path.join(os.homedir(), ".wolverine-safe-backups", "snapshots");
|
|
676
678
|
let cleaned = 0;
|
|
677
679
|
if (fs.existsSync(backupDir)) {
|
|
@@ -717,9 +719,6 @@ async function tryOperationalFix(parsed, cwd, logger, sandbox) {
|
|
|
717
719
|
* Returns a JSON string with empty/default values, or null if can't infer.
|
|
718
720
|
*/
|
|
719
721
|
function _inferJsonConfig(missingFile, cwd, parsed) {
|
|
720
|
-
const fs = require("fs");
|
|
721
|
-
const path = require("path");
|
|
722
|
-
|
|
723
722
|
// Find which source file loads the missing config
|
|
724
723
|
const basename = path.basename(missingFile);
|
|
725
724
|
const sourceFile = parsed.filePath;
|
|
@@ -728,10 +727,18 @@ function _inferJsonConfig(missingFile, cwd, parsed) {
|
|
|
728
727
|
// #17: Escape all regex special characters in basename to prevent regex injection
|
|
729
728
|
const escapedBasename = basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
730
729
|
|
|
730
|
+
// #23: Guard against regex construction failure on unusual filenames
|
|
731
|
+
let configVarRegex;
|
|
732
|
+
try {
|
|
733
|
+
configVarRegex = new RegExp(`(?:const|let|var)\\s+(\\w+)\\s*=\\s*(?:require|JSON\\.parse).*${escapedBasename}`);
|
|
734
|
+
} catch {
|
|
735
|
+
return null;
|
|
736
|
+
}
|
|
737
|
+
|
|
731
738
|
try {
|
|
732
739
|
const source = fs.readFileSync(sourceFile, "utf-8");
|
|
733
740
|
// Look for property accesses on the loaded config: config.apiUrl, config.timeout, etc.
|
|
734
|
-
const configVarMatch = source.match(
|
|
741
|
+
const configVarMatch = source.match(configVarRegex);
|
|
735
742
|
if (!configVarMatch) return null;
|
|
736
743
|
|
|
737
744
|
const varName = configVarMatch[1];
|
package/src/skills/deps.js
CHANGED
|
@@ -161,14 +161,19 @@ function findUnused(cwd) {
|
|
|
161
161
|
|
|
162
162
|
// Scan all .js/.ts/.mjs/.cjs files for require/import statements
|
|
163
163
|
const usedPackages = new Set();
|
|
164
|
+
let _fileCount = 0;
|
|
165
|
+
const FILE_SCAN_CAP = 5000;
|
|
164
166
|
const scanDir = (dir) => {
|
|
167
|
+
if (_fileCount >= FILE_SCAN_CAP) return;
|
|
165
168
|
let entries;
|
|
166
169
|
try { entries = fs.readdirSync(dir, { withFileTypes: true }); } catch { return; }
|
|
167
170
|
for (const entry of entries) {
|
|
171
|
+
if (_fileCount >= FILE_SCAN_CAP) return;
|
|
168
172
|
if (entry.name === "node_modules" || entry.name === ".git" || entry.name === ".wolverine") continue;
|
|
169
173
|
const fullPath = path.join(dir, entry.name);
|
|
170
174
|
if (entry.isDirectory()) { scanDir(fullPath); continue; }
|
|
171
175
|
if (!/\.(js|ts|mjs|cjs|jsx|tsx)$/.test(entry.name)) continue;
|
|
176
|
+
_fileCount++;
|
|
172
177
|
try {
|
|
173
178
|
const content = fs.readFileSync(fullPath, "utf-8");
|
|
174
179
|
// Match require("X") and import ... from "X"
|
|
@@ -16,9 +16,9 @@ const path = require("path");
|
|
|
16
16
|
* - Survives git pull, npm install, auto-update (lives in .wolverine/)
|
|
17
17
|
*/
|
|
18
18
|
|
|
19
|
-
const VAULT_DIR = () => path.join(process.cwd(), ".wolverine", "vault");
|
|
20
|
-
const MASTER_KEY_PATH = () => path.join(VAULT_DIR(), "master.key");
|
|
21
|
-
const ETH_VAULT_PATH = () => path.join(VAULT_DIR(), "eth.vault");
|
|
19
|
+
const VAULT_DIR = (projectRoot) => path.join(projectRoot || process.cwd(), ".wolverine", "vault");
|
|
20
|
+
const MASTER_KEY_PATH = (projectRoot) => path.join(VAULT_DIR(projectRoot), "master.key");
|
|
21
|
+
const ETH_VAULT_PATH = (projectRoot) => path.join(VAULT_DIR(projectRoot), "eth.vault");
|
|
22
22
|
|
|
23
23
|
const ALGORITHM = "aes-256-gcm";
|
|
24
24
|
const IV_LENGTH = 16;
|
|
@@ -28,26 +28,26 @@ const AUTH_TAG_LENGTH = 16;
|
|
|
28
28
|
* Initialize the vault. Idempotent — creates keys only if missing.
|
|
29
29
|
* Called during runner startup before any server code runs.
|
|
30
30
|
*/
|
|
31
|
-
async function initVault() {
|
|
32
|
-
const vaultDir = VAULT_DIR();
|
|
31
|
+
async function initVault(projectRoot) {
|
|
32
|
+
const vaultDir = VAULT_DIR(projectRoot);
|
|
33
33
|
fs.mkdirSync(vaultDir, { recursive: true });
|
|
34
34
|
|
|
35
35
|
let created = false;
|
|
36
36
|
|
|
37
37
|
// Master encryption key
|
|
38
|
-
if (!fs.existsSync(MASTER_KEY_PATH())) {
|
|
38
|
+
if (!fs.existsSync(MASTER_KEY_PATH(projectRoot))) {
|
|
39
39
|
const masterKey = crypto.randomBytes(32);
|
|
40
|
-
fs.writeFileSync(MASTER_KEY_PATH(), masterKey);
|
|
41
|
-
try { fs.chmodSync(MASTER_KEY_PATH(), 0o600); } catch {}
|
|
40
|
+
fs.writeFileSync(MASTER_KEY_PATH(projectRoot), masterKey);
|
|
41
|
+
try { fs.chmodSync(MASTER_KEY_PATH(projectRoot), 0o600); } catch {}
|
|
42
42
|
masterKey.fill(0);
|
|
43
43
|
created = true;
|
|
44
44
|
console.log(" 🔐 Vault: master encryption key generated");
|
|
45
45
|
}
|
|
46
46
|
|
|
47
47
|
// Ethereum private key (encrypted)
|
|
48
|
-
if (!fs.existsSync(ETH_VAULT_PATH())) {
|
|
48
|
+
if (!fs.existsSync(ETH_VAULT_PATH(projectRoot))) {
|
|
49
49
|
const ethKey = crypto.randomBytes(32);
|
|
50
|
-
await encryptAndStore(ethKey);
|
|
50
|
+
await encryptAndStore(ethKey, { projectRoot });
|
|
51
51
|
ethKey.fill(0);
|
|
52
52
|
created = true;
|
|
53
53
|
console.log(" 🔐 Vault: ethereum wallet created");
|
|
@@ -59,18 +59,19 @@ async function initVault() {
|
|
|
59
59
|
/**
|
|
60
60
|
* Check if vault is fully initialized.
|
|
61
61
|
*/
|
|
62
|
-
function isVaultInitialized() {
|
|
63
|
-
return fs.existsSync(MASTER_KEY_PATH()) && fs.existsSync(ETH_VAULT_PATH());
|
|
62
|
+
function isVaultInitialized(projectRoot) {
|
|
63
|
+
return fs.existsSync(MASTER_KEY_PATH(projectRoot)) && fs.existsSync(ETH_VAULT_PATH(projectRoot));
|
|
64
64
|
}
|
|
65
65
|
|
|
66
66
|
/**
|
|
67
67
|
* Encrypt a private key Buffer and write to eth.vault.
|
|
68
68
|
* Wipes the master key from memory after use.
|
|
69
69
|
*/
|
|
70
|
-
async function encryptAndStore(keyBuf) {
|
|
70
|
+
async function encryptAndStore(keyBuf, options) {
|
|
71
|
+
const projectRoot = options?.projectRoot;
|
|
71
72
|
let masterKey = null;
|
|
72
73
|
try {
|
|
73
|
-
masterKey = fs.readFileSync(MASTER_KEY_PATH());
|
|
74
|
+
masterKey = fs.readFileSync(MASTER_KEY_PATH(projectRoot));
|
|
74
75
|
const iv = crypto.randomBytes(IV_LENGTH);
|
|
75
76
|
const cipher = crypto.createCipheriv(ALGORITHM, masterKey, iv);
|
|
76
77
|
const encrypted = Buffer.concat([cipher.update(keyBuf), cipher.final()]);
|
|
@@ -83,13 +84,13 @@ async function encryptAndStore(keyBuf) {
|
|
|
83
84
|
authTag: authTag.toString("hex"),
|
|
84
85
|
ciphertext: encrypted.toString("hex"),
|
|
85
86
|
created: new Date().toISOString(),
|
|
86
|
-
rotated: null,
|
|
87
|
+
rotated: options?.rotated || null,
|
|
87
88
|
};
|
|
88
89
|
|
|
89
|
-
const tmpPath = ETH_VAULT_PATH() + ".tmp";
|
|
90
|
+
const tmpPath = ETH_VAULT_PATH(projectRoot) + ".tmp";
|
|
90
91
|
fs.writeFileSync(tmpPath, JSON.stringify(vault, null, 2), "utf-8");
|
|
91
|
-
fs.renameSync(tmpPath, ETH_VAULT_PATH());
|
|
92
|
-
try { fs.chmodSync(ETH_VAULT_PATH(), 0o600); } catch {}
|
|
92
|
+
fs.renameSync(tmpPath, ETH_VAULT_PATH(projectRoot));
|
|
93
|
+
try { fs.chmodSync(ETH_VAULT_PATH(projectRoot), 0o600); } catch {}
|
|
93
94
|
} finally {
|
|
94
95
|
if (masterKey) masterKey.fill(0);
|
|
95
96
|
}
|
|
@@ -99,15 +100,15 @@ async function encryptAndStore(keyBuf) {
|
|
|
99
100
|
* Decrypt the Ethereum private key. Returns a Buffer.
|
|
100
101
|
* CALLER MUST call .fill(0) on the returned Buffer when done.
|
|
101
102
|
*/
|
|
102
|
-
function decryptPrivateKey() {
|
|
103
|
-
if (!isVaultInitialized()) {
|
|
103
|
+
function decryptPrivateKey(projectRoot) {
|
|
104
|
+
if (!isVaultInitialized(projectRoot)) {
|
|
104
105
|
throw new Error("vault not initialized");
|
|
105
106
|
}
|
|
106
107
|
|
|
107
108
|
let masterKey = null;
|
|
108
109
|
try {
|
|
109
|
-
masterKey = fs.readFileSync(MASTER_KEY_PATH());
|
|
110
|
-
const vault = JSON.parse(fs.readFileSync(ETH_VAULT_PATH(), "utf-8"));
|
|
110
|
+
masterKey = fs.readFileSync(MASTER_KEY_PATH(projectRoot));
|
|
111
|
+
const vault = JSON.parse(fs.readFileSync(ETH_VAULT_PATH(projectRoot), "utf-8"));
|
|
111
112
|
|
|
112
113
|
if (vault.version !== 1) throw new Error("unsupported vault version");
|
|
113
114
|
|
|
@@ -128,16 +129,11 @@ function decryptPrivateKey() {
|
|
|
128
129
|
* Re-encrypt with a fresh IV. Defensive measure if key material was
|
|
129
130
|
* potentially exposed in an error message.
|
|
130
131
|
*/
|
|
131
|
-
async function rotateEncryption() {
|
|
132
|
+
async function rotateEncryption(projectRoot) {
|
|
132
133
|
let keyBuf = null;
|
|
133
134
|
try {
|
|
134
|
-
keyBuf = decryptPrivateKey();
|
|
135
|
-
await encryptAndStore(keyBuf);
|
|
136
|
-
|
|
137
|
-
// Update rotated timestamp
|
|
138
|
-
const vault = JSON.parse(fs.readFileSync(ETH_VAULT_PATH(), "utf-8"));
|
|
139
|
-
vault.rotated = new Date().toISOString();
|
|
140
|
-
fs.writeFileSync(ETH_VAULT_PATH(), JSON.stringify(vault, null, 2), "utf-8");
|
|
135
|
+
keyBuf = decryptPrivateKey(projectRoot);
|
|
136
|
+
await encryptAndStore(keyBuf, { rotated: new Date().toISOString(), projectRoot });
|
|
141
137
|
} finally {
|
|
142
138
|
if (keyBuf) keyBuf.fill(0);
|
|
143
139
|
}
|
|
@@ -147,11 +143,11 @@ async function rotateEncryption() {
|
|
|
147
143
|
* Export vault contents for backup. Returns raw Buffers.
|
|
148
144
|
* Caller MUST wipe masterKey after writing to backup.
|
|
149
145
|
*/
|
|
150
|
-
function exportVaultForBackup() {
|
|
151
|
-
if (!isVaultInitialized()) return null;
|
|
146
|
+
function exportVaultForBackup(projectRoot) {
|
|
147
|
+
if (!isVaultInitialized(projectRoot)) return null;
|
|
152
148
|
return {
|
|
153
|
-
masterKey: fs.readFileSync(MASTER_KEY_PATH()),
|
|
154
|
-
vaultFile: fs.readFileSync(ETH_VAULT_PATH(), "utf-8"),
|
|
149
|
+
masterKey: fs.readFileSync(MASTER_KEY_PATH(projectRoot)),
|
|
150
|
+
vaultFile: fs.readFileSync(ETH_VAULT_PATH(projectRoot), "utf-8"),
|
|
155
151
|
};
|
|
156
152
|
}
|
|
157
153
|
|
|
@@ -159,18 +155,18 @@ function exportVaultForBackup() {
|
|
|
159
155
|
* Import vault from backup. Only used during catastrophic recovery
|
|
160
156
|
* when both vault files are missing.
|
|
161
157
|
*/
|
|
162
|
-
function importVaultFromBackup(masterKeyBuf, vaultFileStr) {
|
|
163
|
-
const vaultDir = VAULT_DIR();
|
|
158
|
+
function importVaultFromBackup(masterKeyBuf, vaultFileStr, projectRoot) {
|
|
159
|
+
const vaultDir = VAULT_DIR(projectRoot);
|
|
164
160
|
fs.mkdirSync(vaultDir, { recursive: true });
|
|
165
161
|
|
|
166
|
-
fs.writeFileSync(MASTER_KEY_PATH(), masterKeyBuf);
|
|
167
|
-
try { fs.chmodSync(MASTER_KEY_PATH(), 0o600); } catch {}
|
|
162
|
+
fs.writeFileSync(MASTER_KEY_PATH(projectRoot), masterKeyBuf);
|
|
163
|
+
try { fs.chmodSync(MASTER_KEY_PATH(projectRoot), 0o600); } catch {}
|
|
168
164
|
|
|
169
|
-
fs.writeFileSync(ETH_VAULT_PATH(), vaultFileStr, "utf-8");
|
|
170
|
-
try { fs.chmodSync(ETH_VAULT_PATH(), 0o600); } catch {}
|
|
165
|
+
fs.writeFileSync(ETH_VAULT_PATH(projectRoot), vaultFileStr, "utf-8");
|
|
166
|
+
try { fs.chmodSync(ETH_VAULT_PATH(projectRoot), 0o600); } catch {}
|
|
171
167
|
}
|
|
172
168
|
|
|
173
|
-
function getVaultPath() { return VAULT_DIR(); }
|
|
169
|
+
function getVaultPath(projectRoot) { return VAULT_DIR(projectRoot); }
|
|
174
170
|
|
|
175
171
|
module.exports = {
|
|
176
172
|
initVault,
|