wolverine-ai 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. package/PLATFORM.md +442 -0
  2. package/README.md +475 -0
  3. package/SERVER_BEST_PRACTICES.md +62 -0
  4. package/TELEMETRY.md +108 -0
  5. package/bin/wolverine.js +95 -0
  6. package/examples/01-basic-typo.js +31 -0
  7. package/examples/02-multi-file/routes/users.js +15 -0
  8. package/examples/02-multi-file/server.js +25 -0
  9. package/examples/03-syntax-error.js +23 -0
  10. package/examples/04-secret-leak.js +14 -0
  11. package/examples/05-expired-key.js +27 -0
  12. package/examples/06-json-config/config.json +13 -0
  13. package/examples/06-json-config/server.js +28 -0
  14. package/examples/07-rate-limit-loop.js +11 -0
  15. package/examples/08-sandbox-escape.js +20 -0
  16. package/examples/buggy-server.js +39 -0
  17. package/examples/demos/01-basic-typo/index.js +20 -0
  18. package/examples/demos/01-basic-typo/routes/api.js +13 -0
  19. package/examples/demos/01-basic-typo/routes/health.js +4 -0
  20. package/examples/demos/02-multi-file/index.js +24 -0
  21. package/examples/demos/02-multi-file/routes/api.js +13 -0
  22. package/examples/demos/02-multi-file/routes/health.js +4 -0
  23. package/examples/demos/03-syntax-error/index.js +18 -0
  24. package/examples/demos/04-secret-leak/index.js +16 -0
  25. package/examples/demos/05-expired-key/index.js +21 -0
  26. package/examples/demos/06-json-config/config.json +9 -0
  27. package/examples/demos/06-json-config/index.js +20 -0
  28. package/examples/demos/07-null-crash/index.js +16 -0
  29. package/examples/run-demo.js +110 -0
  30. package/package.json +67 -0
  31. package/server/config/settings.json +62 -0
  32. package/server/index.js +33 -0
  33. package/server/routes/api.js +12 -0
  34. package/server/routes/health.js +16 -0
  35. package/server/routes/time.js +12 -0
  36. package/src/agent/agent-engine.js +727 -0
  37. package/src/agent/goal-loop.js +140 -0
  38. package/src/agent/research-agent.js +120 -0
  39. package/src/agent/sub-agents.js +176 -0
  40. package/src/backup/backup-manager.js +321 -0
  41. package/src/brain/brain.js +315 -0
  42. package/src/brain/embedder.js +131 -0
  43. package/src/brain/function-map.js +263 -0
  44. package/src/brain/vector-store.js +267 -0
  45. package/src/core/ai-client.js +387 -0
  46. package/src/core/cluster-manager.js +144 -0
  47. package/src/core/config.js +89 -0
  48. package/src/core/error-parser.js +87 -0
  49. package/src/core/health-monitor.js +129 -0
  50. package/src/core/models.js +132 -0
  51. package/src/core/patcher.js +55 -0
  52. package/src/core/runner.js +464 -0
  53. package/src/core/system-info.js +141 -0
  54. package/src/core/verifier.js +146 -0
  55. package/src/core/wolverine.js +290 -0
  56. package/src/dashboard/server.js +1332 -0
  57. package/src/index.js +94 -0
  58. package/src/logger/event-logger.js +237 -0
  59. package/src/logger/pricing.js +96 -0
  60. package/src/logger/repair-history.js +109 -0
  61. package/src/logger/token-tracker.js +277 -0
  62. package/src/mcp/mcp-client.js +224 -0
  63. package/src/mcp/mcp-registry.js +228 -0
  64. package/src/mcp/mcp-security.js +152 -0
  65. package/src/monitor/perf-monitor.js +300 -0
  66. package/src/monitor/process-monitor.js +231 -0
  67. package/src/monitor/route-prober.js +191 -0
  68. package/src/notifications/notifier.js +227 -0
  69. package/src/platform/heartbeat.js +93 -0
  70. package/src/platform/queue.js +53 -0
  71. package/src/platform/register.js +64 -0
  72. package/src/platform/telemetry.js +76 -0
  73. package/src/security/admin-auth.js +150 -0
  74. package/src/security/injection-detector.js +174 -0
  75. package/src/security/rate-limiter.js +152 -0
  76. package/src/security/sandbox.js +128 -0
  77. package/src/security/secret-redactor.js +217 -0
  78. package/src/skills/skill-registry.js +129 -0
  79. package/src/skills/sql.js +375 -0
@@ -0,0 +1,321 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const chalk = require("chalk");
4
+
5
+ /**
6
+ * Smart Backup Manager — manages versioned backups with stability tracking.
7
+ *
8
+ * Backup lifecycle:
9
+ * 1. UNSTABLE: Created when a fix is applied, before verification
10
+ * 2. VERIFIED: The fix ran without immediately crashing (same error)
11
+ * 3. STABLE: The server ran successfully for STABILITY_THRESHOLD without crashing
12
+ *
13
+ * Retention policy:
14
+ * - Unstable backups: deleted after 7 days
15
+ * - Verified backups: kept for 7 days, then pruned unless promoted to stable
16
+ * - Stable backups: after 7 days, keep only 1 per day (most recent each day)
17
+ *
18
+ * Storage layout:
19
+ * .wolverine/
20
+ * backups/
21
+ * manifest.json — tracks all backups with metadata
22
+ * <timestamp>/ — one directory per backup event
23
+ * <filename>.bak — the original file content
24
+ */
25
+
26
+ const WOLVERINE_DIR = ".wolverine";
27
+ const BACKUPS_DIR = path.join(WOLVERINE_DIR, "backups");
28
+ const MANIFEST_FILE = path.join(BACKUPS_DIR, "manifest.json");
29
+
30
+ // Stability threshold: how long the server must run without the same crash
31
+ // to consider a fix "stable" (default: 30 minutes)
32
+ const STABILITY_THRESHOLD_MS = 30 * 60 * 1000;
33
+
34
+ // Retention: unstable/verified backups older than this are pruned
35
+ const RETENTION_UNSTABLE_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
36
+
37
+ class BackupManager {
38
+ constructor(projectRoot) {
39
+ this.projectRoot = path.resolve(projectRoot);
40
+ this.backupsDir = path.join(this.projectRoot, BACKUPS_DIR);
41
+ this.manifestPath = path.join(this.projectRoot, MANIFEST_FILE);
42
+ this._ensureDirs();
43
+ this.manifest = this._loadManifest();
44
+ }
45
+
46
+ /**
47
+ * Create a backup of specific files or the entire server/ directory.
48
+ * Returns a backupId that can be used to rollback or promote.
49
+ *
50
+ * @param {string[]|null} filePaths — specific files, or null to backup entire server/
51
+ */
52
+ createBackup(filePaths) {
53
+ const backupId = Date.now().toString(36) + "-" + Math.random().toString(36).slice(2, 6);
54
+ const timestamp = Date.now();
55
+ const backupDir = path.join(this.backupsDir, backupId);
56
+ fs.mkdirSync(backupDir, { recursive: true });
57
+
58
+ // If no specific files, backup the entire server/ directory
59
+ if (!filePaths || filePaths.length === 0) {
60
+ filePaths = this._collectServerFiles();
61
+ }
62
+
63
+ const files = [];
64
+ for (const filePath of filePaths) {
65
+ const absPath = path.isAbsolute(filePath) ? filePath : path.resolve(this.projectRoot, filePath);
66
+ if (!fs.existsSync(absPath)) continue;
67
+ // Skip large files (>10MB) and binary blobs
68
+ try {
69
+ const stat = fs.statSync(absPath);
70
+ if (stat.size > 10 * 1024 * 1024) continue;
71
+ } catch { continue; }
72
+
73
+ const relativePath = path.relative(this.projectRoot, absPath);
74
+ const backupFile = path.join(backupDir, relativePath.replace(/[/\\]/g, "__"));
75
+ fs.copyFileSync(absPath, backupFile);
76
+
77
+ files.push({
78
+ original: absPath,
79
+ relative: relativePath,
80
+ backup: backupFile,
81
+ });
82
+ }
83
+
84
+ const entry = {
85
+ id: backupId,
86
+ timestamp,
87
+ status: "unstable",
88
+ files,
89
+ errorSignature: null,
90
+ promotedAt: null,
91
+ verifiedAt: null,
92
+ };
93
+
94
+ this.manifest.backups.push(entry);
95
+ this._saveManifest();
96
+
97
+ return backupId;
98
+ }
99
+
100
+ /**
101
+ * Rollback to a specific backup.
102
+ */
103
+ rollbackTo(backupId) {
104
+ const entry = this.manifest.backups.find(b => b.id === backupId);
105
+ if (!entry) {
106
+ console.log(chalk.red(`Backup ${backupId} not found.`));
107
+ return false;
108
+ }
109
+
110
+ let allRestored = true;
111
+ for (const file of entry.files) {
112
+ if (fs.existsSync(file.backup)) {
113
+ fs.copyFileSync(file.backup, file.original);
114
+ console.log(chalk.yellow(` ↩️ Restored: ${file.relative}`));
115
+ } else {
116
+ console.log(chalk.red(` ❌ Backup file missing: ${file.backup}`));
117
+ allRestored = false;
118
+ }
119
+ }
120
+
121
+ return allRestored;
122
+ }
123
+
124
+ /**
125
+ * Rollback to the most recent backup (any status).
126
+ */
127
+ rollbackLatest() {
128
+ if (this.manifest.backups.length === 0) return false;
129
+ const latest = this.manifest.backups[this.manifest.backups.length - 1];
130
+ console.log(chalk.yellow(`\n↩️ Rolling back to backup ${latest.id} (${new Date(latest.timestamp).toISOString()})...`));
131
+ return this.rollbackTo(latest.id);
132
+ }
133
+
134
+ /**
135
+ * Mark a backup as verified (fix didn't immediately reproduce the error).
136
+ */
137
+ markVerified(backupId) {
138
+ const entry = this.manifest.backups.find(b => b.id === backupId);
139
+ if (entry && entry.status === "unstable") {
140
+ entry.status = "verified";
141
+ entry.verifiedAt = Date.now();
142
+ this._saveManifest();
143
+ }
144
+ }
145
+
146
+ /**
147
+ * Mark a backup as stable (server ran for the full stability threshold).
148
+ */
149
+ markStable(backupId) {
150
+ const entry = this.manifest.backups.find(b => b.id === backupId);
151
+ if (entry && (entry.status === "verified" || entry.status === "unstable")) {
152
+ entry.status = "stable";
153
+ entry.promotedAt = Date.now();
154
+ this._saveManifest();
155
+ console.log(chalk.green(` 🏆 Backup ${backupId} promoted to STABLE.`));
156
+ }
157
+ }
158
+
159
+ /**
160
+ * Set the error signature on a backup (for tracking what error this fix addressed).
161
+ */
162
+ setErrorSignature(backupId, signature) {
163
+ const entry = this.manifest.backups.find(b => b.id === backupId);
164
+ if (entry) {
165
+ entry.errorSignature = signature;
166
+ this._saveManifest();
167
+ }
168
+ }
169
+
170
+ /**
171
+ * Run the retention policy — prune old backups.
172
+ *
173
+ * Rules:
174
+ * 1. Unstable/verified backups older than 7 days → delete
175
+ * 2. Stable backups older than 7 days → keep only 1 per day (most recent each day)
176
+ * 3. All stable backups within 7 days → keep
177
+ */
178
+ prune() {
179
+ const now = Date.now();
180
+ const cutoff = now - RETENTION_UNSTABLE_MS;
181
+ let pruned = 0;
182
+
183
+ // Separate backups by status
184
+ const toKeep = [];
185
+ const stableOld = [];
186
+
187
+ for (const entry of this.manifest.backups) {
188
+ if (entry.status === "stable") {
189
+ if (entry.timestamp < cutoff) {
190
+ stableOld.push(entry);
191
+ } else {
192
+ toKeep.push(entry);
193
+ }
194
+ } else {
195
+ // Unstable or verified
196
+ if (entry.timestamp < cutoff) {
197
+ this._deleteBackupFiles(entry);
198
+ pruned++;
199
+ } else {
200
+ toKeep.push(entry);
201
+ }
202
+ }
203
+ }
204
+
205
+ // For old stable backups: keep 1 per day
206
+ if (stableOld.length > 0) {
207
+ const byDay = new Map();
208
+ for (const entry of stableOld) {
209
+ const dayKey = new Date(entry.timestamp).toISOString().slice(0, 10);
210
+ if (!byDay.has(dayKey)) {
211
+ byDay.set(dayKey, []);
212
+ }
213
+ byDay.get(dayKey).push(entry);
214
+ }
215
+
216
+ for (const [, dayEntries] of byDay) {
217
+ // Sort by timestamp descending, keep the newest per day
218
+ dayEntries.sort((a, b) => b.timestamp - a.timestamp);
219
+ toKeep.push(dayEntries[0]); // keep the most recent
220
+ for (let i = 1; i < dayEntries.length; i++) {
221
+ this._deleteBackupFiles(dayEntries[i]);
222
+ pruned++;
223
+ }
224
+ }
225
+ }
226
+
227
+ this.manifest.backups = toKeep;
228
+ this._saveManifest();
229
+
230
+ if (pruned > 0) {
231
+ console.log(chalk.gray(` 🧹 Pruned ${pruned} old backup(s).`));
232
+ }
233
+
234
+ return pruned;
235
+ }
236
+
237
+ /**
238
+ * Get summary stats for logging.
239
+ */
240
+ getStats() {
241
+ const counts = { unstable: 0, verified: 0, stable: 0 };
242
+ for (const entry of this.manifest.backups) {
243
+ counts[entry.status] = (counts[entry.status] || 0) + 1;
244
+ }
245
+ return {
246
+ total: this.manifest.backups.length,
247
+ ...counts,
248
+ };
249
+ }
250
+
251
+ // -- Private --
252
+
253
+ _ensureDirs() {
254
+ fs.mkdirSync(this.backupsDir, { recursive: true });
255
+ }
256
+
257
+ _loadManifest() {
258
+ if (fs.existsSync(this.manifestPath)) {
259
+ try {
260
+ return JSON.parse(fs.readFileSync(this.manifestPath, "utf-8"));
261
+ } catch {
262
+ return { version: 1, backups: [] };
263
+ }
264
+ }
265
+ return { version: 1, backups: [] };
266
+ }
267
+
268
+ _saveManifest() {
269
+ fs.writeFileSync(this.manifestPath, JSON.stringify(this.manifest, null, 2), "utf-8");
270
+ }
271
+
272
+ _deleteBackupFiles(entry) {
273
+ const backupDir = path.join(this.backupsDir, entry.id);
274
+ if (fs.existsSync(backupDir)) {
275
+ for (const file of fs.readdirSync(backupDir)) {
276
+ fs.unlinkSync(path.join(backupDir, file));
277
+ }
278
+ fs.rmdirSync(backupDir);
279
+ }
280
+ }
281
+
282
+ /**
283
+ * Collect all files in the server/ directory for full backup.
284
+ * Includes: .js, .json, .sql, .db, .sqlite, .yaml, .yml, .env, .html, .css
285
+ * Excludes: node_modules, .git, large binaries
286
+ */
287
+ _collectServerFiles() {
288
+ const serverDir = path.join(this.projectRoot, "server");
289
+ if (!fs.existsSync(serverDir)) return [];
290
+
291
+ const files = [];
292
+ const SKIP = new Set(["node_modules", ".git", ".wolverine"]);
293
+ const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB
294
+
295
+ const walk = (dir) => {
296
+ let entries;
297
+ try { entries = fs.readdirSync(dir, { withFileTypes: true }); } catch { return; }
298
+
299
+ for (const entry of entries) {
300
+ if (SKIP.has(entry.name)) continue;
301
+
302
+ const fullPath = path.join(dir, entry.name);
303
+ if (entry.isDirectory()) {
304
+ walk(fullPath);
305
+ } else {
306
+ try {
307
+ const stat = fs.statSync(fullPath);
308
+ if (stat.size <= MAX_FILE_SIZE) {
309
+ files.push(fullPath);
310
+ }
311
+ } catch {}
312
+ }
313
+ }
314
+ };
315
+
316
+ walk(serverDir);
317
+ return files;
318
+ }
319
+ }
320
+
321
+ module.exports = { BackupManager, STABILITY_THRESHOLD_MS };
@@ -0,0 +1,315 @@
1
+ const chalk = require("chalk");
2
+ const { VectorStore } = require("./vector-store");
3
+ const { embed, embedBatch, compactAndEmbed } = require("./embedder");
4
+ const { scanProject, mapToChunks } = require("./function-map");
5
+
6
+ /**
7
+ * The Wolverine Brain — semantic memory + project context.
8
+ *
9
+ * On startup:
10
+ * 1. Load persisted vector store from .wolverine/brain/
11
+ * 2. Scan the project to build a live function map
12
+ * 3. Seed wolverine documentation (only on first init)
13
+ * 4. Embed function map chunks into the "functions" namespace
14
+ *
15
+ * During operation:
16
+ * - remember(): Compact + embed + store a new memory
17
+ * - recall(): Semantic search for relevant context
18
+ * - getContext(): Build a context string for the AI agent
19
+ *
20
+ * Namespaces:
21
+ * - "docs" — wolverine documentation, how self-healing works
22
+ * - "functions" — live function map (routes, exports, classes)
23
+ * - "errors" — past errors and their contexts
24
+ * - "fixes" — successful fixes and their explanations
25
+ * - "learnings" — patterns learned from fix successes/failures
26
+ */
27
+
28
+ // Seed documents — wolverine's knowledge about itself
29
+ const SEED_DOCS = [
30
+ {
31
+ text: "Wolverine Node.js is an autonomous self-healing server agent. It monitors a Node.js process, catches crashes, analyzes errors using AI, generates code fixes, verifies them, and restarts the server. It operates as a process manager similar to PM2 but with AI-powered repair capabilities.",
32
+ metadata: { topic: "overview" },
33
+ },
34
+ {
35
+ text: "Wolverine heal pipeline: crash detected → error parsed (file, line, message) → prompt injection scan (AUDIT_MODEL) → rate limit check → fast path repair (CODING_MODEL via Responses/Chat API) → if fast path fails verification → escalate to multi-file agent (REASONING_MODEL with tools: read_file, write_file, list_files, search_files) → verify fix (syntax check + boot probe) → rollback on failure.",
36
+ metadata: { topic: "heal-pipeline" },
37
+ },
38
+ {
39
+ text: "Wolverine backup system: every fix creates a backup before patching. Status lifecycle: UNSTABLE (just created) → VERIFIED (fix passed boot probe) → STABLE (server ran 30min+ without crash). Retention: unstable backups pruned after 7 days. Stable backups older than 7 days keep 1 per day.",
40
+ metadata: { topic: "backup-system" },
41
+ },
42
+ {
43
+ text: "Wolverine security: file sandbox restricts all reads/writes to the project directory. Prompt injection detection runs on every error (regex layer + AI audit via AUDIT_MODEL). Rate limiter prevents error explosion cost with sliding window, min gap, hourly token budget, and exponential backoff for error loops.",
44
+ metadata: { topic: "security" },
45
+ },
46
+ {
47
+ text: "Wolverine model tiers: REASONING_MODEL for deep multi-step debugging. CODING_MODEL for code repair generation. CHAT_MODEL for explanations and summaries. AUDIT_MODEL for security scans (runs every error, keep cheap). UTILITY_MODEL for JSON formatting and thought compaction. TEXT_EMBEDDING_MODEL for brain vector embeddings.",
48
+ metadata: { topic: "model-config" },
49
+ },
50
+ {
51
+ text: "Wolverine performance monitoring: tracks HTTP endpoint response times, detects slow endpoints (>2s avg), identifies spam/DDoS patterns (>100 req/min to one endpoint), flags response time spikes (5x normal), detects high error rates (>20%). Triggers AI analysis for optimization suggestions.",
52
+ metadata: { topic: "perf-monitoring" },
53
+ },
54
+ {
55
+ text: "Wolverine brain: semantic vector database for long-term memory. Stores project function maps, past errors, successful fixes, and learned patterns. Uses TEXT_EMBEDDING_MODEL for embeddings and UTILITY_MODEL to compact thoughts before embedding. In-memory cosine similarity search for speed. Persisted to .wolverine/brain/.",
56
+ metadata: { topic: "brain" },
57
+ },
58
+ {
59
+ text: "Wolverine health checks: periodically pings the server's /health endpoint. After 3 consecutive failures, force-kills and triggers heal cycle. Configurable interval, timeout, and fail threshold. Prevents undetected hangs and frozen servers.",
60
+ metadata: { topic: "health-checks" },
61
+ },
62
+ {
63
+ text: "Wolverine fix verification: after applying a patch, runs a 2-step validation. Step 1: node --check for syntax errors. Step 2: boot probe — starts the server on an ephemeral port for 10s. If same error recurs: fix didn't work, rollback. If different error: fix broke something else, rollback. If alive: fix works, proceed.",
64
+ metadata: { topic: "verification" },
65
+ },
66
+ {
67
+ text: "Wolverine multi-file agent: 15-turn agent loop with tools. Can read any file, write any file type (js, json, sql, yaml, env, dockerfile), list directories, and search across the codebase. Used when the fast path single-file fix fails. Tracks token budget (50k max) to control costs.",
68
+ metadata: { topic: "agent" },
69
+ },
70
+ {
71
+ text: "Wolverine supports the Responses API for codex models and Chat Completions API for standard models. Auto-detects based on model name. Codex models use openai.responses.create() with input/instructions/tools. Standard models use openai.chat.completions.create() with messages/tools.",
72
+ metadata: { topic: "api-support" },
73
+ },
74
+ {
75
+ text: "Common Node.js errors Wolverine fixes: ReferenceError (undefined variables, typos), TypeError (calling methods on wrong types, null access), SyntaxError (missing brackets, invalid JSON), EADDRINUSE (port already in use), MODULE_NOT_FOUND (missing dependencies), ECONNREFUSED (database/service connection failures), unhandled promise rejections.",
76
+ metadata: { topic: "common-errors" },
77
+ },
78
+ {
79
+ text: "Server uses Fastify (5.6x faster than Express, 114k req/s). server/index.js wires routes with fastify.register(require('./routes/X'), {prefix:'/X'}). Route files: async function routes(fastify) { fastify.get('/', async () => ({...})); } module.exports = routes. server/routes/ has one file per resource. server/config/settings.json for all settings.",
80
+ metadata: { topic: "server-structure" },
81
+ },
82
+ {
83
+ text: "Server best practices: validate all input, use express.json() with size limits. Never expose secrets in responses. Use env vars for config. Add /health endpoint returning status+uptime+memory. Keep routes thin — logic in services. Use async/await never block event loop. Global error handler middleware. Consistent error format: {error:'message'}. One route file per resource. Rate limit public endpoints.",
84
+ metadata: { topic: "server-best-practices" },
85
+ },
86
+ {
87
+ text: "Wolverine editable scope: only files inside server/ can be modified by the agent. src/, bin/, tests/, .env, package.json, node_modules/ are all protected. The agent's _isProtectedPath guard blocks writes to anything outside server/. Direct edits target the script wolverine was launched with (server/index.js by default).",
88
+ metadata: { topic: "editable-scope" },
89
+ },
90
+ {
91
+ text: "SQL Skill: wolverine has a SQL skill at src/skills/sql.js providing: (1) sqlGuard() middleware that blocks SQL injection attacks on all endpoints by scanning query params, body, URL params, and headers for injection patterns like UNION SELECT, stacked queries, tautologies, comment bypass, hex encoding, timing attacks. (2) SafeDB class for parameterized-only database queries — blocks string concatenation in SQL. Usage: const {db,sqlGuard}=require('../src/skills/sql'); app.use(sqlGuard()); db.all('SELECT * FROM users WHERE id=?',[id]);",
92
+ metadata: { topic: "skill-sql" },
93
+ },
94
+ {
95
+ text: "SQL injection patterns wolverine detects: OR/AND tautologies ('1'='1'), UNION SELECT, stacked queries (;DROP TABLE), comment bypass (-- or #), hex encoding (0x), CHAR() encoding, SLEEP/BENCHMARK timing attacks, INFORMATION_SCHEMA probing, LOAD_FILE/INTO OUTFILE data exfiltration. All blocked with 403 and logged as security.sqli_blocked events.",
96
+ metadata: { topic: "skill-sql-patterns" },
97
+ },
98
+ {
99
+ text: "Database best practices: SafeDB uses split connections — separate read connection (concurrent, never waits) and write connection (single writer, FIFO queue). Write queue drains synchronously in one microtask, zero delays. WAL mode means readers never block writers. Each write is microseconds. db.transaction(fn) queues as single atomic unit. No busy_timeout, no blocking, no IPC. Reads: db.get(), db.all() are instant. Writes: db.run(), db.exec() go through queue.",
100
+ metadata: { topic: "skill-sql-best-practices" },
101
+ },
102
+ {
103
+ text: "Sub-agent system: wolverine can spawn specialized sub-agents for divide-and-conquer. 7 types: explore (read-only, investigates codebase), plan (read-only, proposes fix strategy), fix (read+write, applies targeted fix), verify (read-only, checks if fix works), research (searches brain+web for solutions), security (audits for vulnerabilities), database (handles DB issues with SQL skill). Each type has restricted tools and a specific model.",
104
+ metadata: { topic: "sub-agents" },
105
+ },
106
+ {
107
+ text: "Sub-agent workflow: explore→plan→fix. Explorer finds relevant files, Planner proposes a strategy using exploration results, Fixer executes the plan. Used automatically on goal loop iteration 3+ and dashboard LARGE tier commands. Sub-agents can also run in parallel via spawnParallel() for independent tasks like running security audit + explore simultaneously.",
108
+ metadata: { topic: "sub-agent-workflow" },
109
+ },
110
+ {
111
+ text: "Sub-agent tool restrictions (claw-code pattern): explore gets read_file/glob/grep/git. plan gets read_file/glob/grep/brain. fix gets read_file/write_file/edit_file/glob/grep. verify gets read_file/glob/grep/bash. research gets read_file/grep/web_fetch/brain. security gets read_file/glob/grep. database gets read_file/write_file/edit_file/glob/grep/bash. No agent gets tools it doesn't need.",
112
+ metadata: { topic: "sub-agent-tools" },
113
+ },
114
+ {
115
+ text: "Heal pipeline escalation: Iteration 1 uses fast path (CODING_MODEL, single file, cheapest). Iteration 2 uses single agent (REASONING_MODEL, multi-file, 8 turns). Iteration 3+ uses sub-agents (explore→plan→fix, 3 specialized agents with restricted tools). Each iteration gets context from previous failures. Deep research (RESEARCH_MODEL) triggers after 2+ failures.",
116
+ metadata: { topic: "heal-escalation" },
117
+ },
118
+ {
119
+ text: "Process manager: wolverine monitors memory (RSS/heap) every 10s, detects memory leaks (N consecutive growth samples → auto-restart), enforces memory limit (default 512MB), tracks CPU%, probes all routes every 30s, detects response time degradation trends (stable/degrading/improving). Analytics dashboard shows memory/CPU charts and per-route health.",
120
+ metadata: { topic: "process-manager" },
121
+ },
122
+ {
123
+ text: "Auto-clustering: wolverine detects machine capabilities (cores, RAM, disk, platform, Docker/K8s, cloud provider) and forks optimal workers. 2 cores = 2 workers, 3-4 = cores-1, 5-8 = cores-1 cap 6, 9+ = cores/2 cap 16. Workers auto-respawn on crash with exponential backoff. CLI: --single (no cluster), --workers N (fixed), --info (show system). Settings in server/config/settings.json cluster.mode.",
124
+ metadata: { topic: "clustering" },
125
+ },
126
+ {
127
+ text: "System detection: wolverine --info shows CPU cores/model/speed, total/free RAM, disk space, Node version, platform, container environment (Docker, Kubernetes), cloud provider (AWS, GCP, Azure, Railway, Fly, Render, Heroku). Used by ClusterManager to auto-scale worker count. Dashboard API: GET /api/system returns full machine info.",
128
+ metadata: { topic: "system-detection" },
129
+ },
130
+ {
131
+ text: "Configuration: all settings in server/config/settings.json (models, port, telemetry, rate limits, health checks, clustering, cors, logging). Secrets only in .env.local (API keys, admin key). Config loader priority: env vars > settings.json > defaults. Agent can read and edit settings.json since it's inside server/.",
132
+ metadata: { topic: "configuration" },
133
+ },
134
+ {
135
+ text: "Platform telemetry: lightweight background process, zero-config. Default platform: api.wolverinenode.xyz. Auto-registers on first run (retries every 60s until platform responds), saves key to .wolverine/platform-key. Heartbeat payload matches PLATFORM.md spec: instanceId, server (name/port/uptime/status/pid), process (memoryMB/cpuPercent), routes, repairs, usage (tokens/cost/calls/byCategory), brain, backups. Offline-resilient: queues up to 1440 heartbeats locally, drains on reconnect. No chalk dependency, cached version/key in memory, minimal IO. Opt out: WOLVERINE_TELEMETRY=false. Override URL: WOLVERINE_PLATFORM_URL.",
136
+ metadata: { topic: "platform-telemetry" },
137
+ },
138
+ {
139
+ text: "Telemetry architecture: 4 files, ~250 lines total. heartbeat.js sends one HTTP POST every 60s (5s timeout, non-blocking). register.js auto-registers and caches key in memory + disk. queue.js appends to JSONL file only on failure, trims lazily. telemetry.js collects from subsystems using optional chaining (no crashes if subsystem missing). All secrets redacted before sending. Response bodies drained immediately (res.resume). No blocking, no delays, no busy waits.",
140
+ metadata: { topic: "telemetry-architecture" },
141
+ },
142
+ ];
143
+
144
+ class Brain {
145
+ constructor(projectRoot) {
146
+ this.projectRoot = projectRoot;
147
+ this.store = new VectorStore(projectRoot);
148
+ this.functionMap = null;
149
+ this._initialized = false;
150
+ }
151
+
152
+ /**
153
+ * Initialize the brain. Call once on startup.
154
+ * Scans project, seeds docs if needed, embeds function map.
155
+ */
156
+ async init() {
157
+ const stats = this.store.getStats();
158
+ const isFirstRun = stats.totalEntries === 0;
159
+
160
+ console.log(chalk.gray(` 🧠 Brain: ${stats.totalEntries} memories loaded`));
161
+
162
+ // 1. Seed wolverine docs on first run
163
+ if (isFirstRun) {
164
+ console.log(chalk.gray(" 🧠 First run — seeding wolverine documentation..."));
165
+ await this._seedDocs();
166
+ }
167
+
168
+ // 2. Scan project for live function map
169
+ console.log(chalk.gray(" 🧠 Scanning project for function map..."));
170
+ this.functionMap = scanProject(this.projectRoot);
171
+ console.log(chalk.gray(` 🧠 Found: ${this.functionMap.routes.length} routes, ${this.functionMap.functions.length} functions, ${this.functionMap.classes.length} classes`));
172
+
173
+ // 3. Embed function map (replace old "functions" entries)
174
+ await this._embedFunctionMap();
175
+
176
+ // 4. Save
177
+ this.store.save();
178
+
179
+ this._initialized = true;
180
+ const finalStats = this.store.getStats();
181
+ console.log(chalk.gray(` 🧠 Brain ready: ${finalStats.totalEntries} total memories`));
182
+ if (finalStats.namespaces) {
183
+ const ns = Object.entries(finalStats.namespaces).map(([k, v]) => `${k}:${v}`).join(", ");
184
+ console.log(chalk.gray(` 🧠 Namespaces: ${ns}`));
185
+ }
186
+ }
187
+
188
+ /**
189
+ * Remember something — compact, embed, and store.
190
+ *
191
+ * @param {string} namespace - "errors", "fixes", "learnings"
192
+ * @param {string} rawText - The raw text to remember
193
+ * @param {object} metadata - Structured metadata
194
+ */
195
+ async remember(namespace, rawText, metadata = {}) {
196
+ const { compacted, embedding } = await compactAndEmbed(rawText);
197
+ const id = this.store.add(namespace, compacted, embedding, metadata);
198
+ this.store.save();
199
+ return id;
200
+ }
201
+
202
+ /**
203
+ * Recall relevant memories — two-tier search for speed.
204
+ *
205
+ * Tier 1: Fast keyword search (instant, no API call)
206
+ * Tier 2: Semantic embedding search (API call, only if keywords miss)
207
+ */
208
+ async recall(query, options = {}) {
209
+ const topK = options.topK || 5;
210
+
211
+ // Tier 1: keyword search (instant)
212
+ const keywordResults = this.store.keywordSearch(query, { topK, namespace: options.namespace, minTokens: 1 });
213
+ if (keywordResults.length >= topK) {
214
+ return keywordResults;
215
+ }
216
+
217
+ // Tier 2: semantic search (API call — only if keyword search didn't find enough)
218
+ try {
219
+ const queryEmbedding = await embed(query);
220
+ const semanticResults = this.store.search(queryEmbedding, { topK, ...options });
221
+
222
+ // Merge: keyword results first (they're more precise), then semantic
223
+ const seen = new Set(keywordResults.map(r => r.id));
224
+ const merged = [...keywordResults];
225
+ for (const r of semanticResults) {
226
+ if (!seen.has(r.id)) {
227
+ merged.push(r);
228
+ seen.add(r.id);
229
+ }
230
+ }
231
+ return merged.slice(0, topK);
232
+ } catch {
233
+ // If embedding API fails, return keyword results only
234
+ return keywordResults;
235
+ }
236
+ }
237
+
238
+ /**
239
+ * Build a full context string for the agent.
240
+ * Includes: function map summary + relevant memories.
241
+ */
242
+ async getContext(errorMessage) {
243
+ const parts = [];
244
+
245
+ // Function map summary (always included — fast, no API call)
246
+ if (this.functionMap) {
247
+ parts.push("## Server Function Map\n" + this.functionMap.summary);
248
+ }
249
+
250
+ // Two-tier recall: keyword first, semantic fallback
251
+ if (errorMessage) {
252
+ const memories = await this.recall(errorMessage, { topK: 5, minScore: 0.3 });
253
+ if (memories.length > 0) {
254
+ parts.push("\n## Relevant Context from Brain");
255
+ for (const mem of memories) {
256
+ const nsLabel = mem.namespace.toUpperCase();
257
+ parts.push(`[${nsLabel}] ${mem.text}`);
258
+ }
259
+ }
260
+ }
261
+
262
+ return parts.join("\n");
263
+ }
264
+
265
+ /**
266
+ * Get stats for dashboard/logging.
267
+ */
268
+ getStats() {
269
+ return {
270
+ ...this.store.getStats(),
271
+ functionMap: this.functionMap ? {
272
+ routes: this.functionMap.routes.length,
273
+ functions: this.functionMap.functions.length,
274
+ classes: this.functionMap.classes.length,
275
+ files: this.functionMap.files.length,
276
+ } : null,
277
+ };
278
+ }
279
+
280
+ // -- Private --
281
+
282
+ async _seedDocs() {
283
+ const texts = SEED_DOCS.map(d => d.text);
284
+ const embeddings = await embedBatch(texts);
285
+
286
+ for (let i = 0; i < SEED_DOCS.length; i++) {
287
+ this.store.add("docs", SEED_DOCS[i].text, embeddings[i], SEED_DOCS[i].metadata);
288
+ }
289
+
290
+ console.log(chalk.gray(` 🧠 Seeded ${SEED_DOCS.length} documentation entries`));
291
+ }
292
+
293
+ async _embedFunctionMap() {
294
+ // Clear old function map entries
295
+ const oldEntries = this.store.getNamespace("functions");
296
+ for (const entry of oldEntries) {
297
+ this.store.delete(entry.id);
298
+ }
299
+
300
+ // Generate chunks from the live map
301
+ const chunks = mapToChunks(this.functionMap);
302
+ if (chunks.length === 0) return;
303
+
304
+ const texts = chunks.map(c => c.text);
305
+ const embeddings = await embedBatch(texts);
306
+
307
+ for (let i = 0; i < chunks.length; i++) {
308
+ this.store.add("functions", chunks[i].text, embeddings[i], chunks[i].metadata);
309
+ }
310
+
311
+ console.log(chalk.gray(` 🧠 Indexed ${chunks.length} function map chunks`));
312
+ }
313
+ }
314
+
315
+ module.exports = { Brain, SEED_DOCS };