@stackmemoryai/stackmemory 0.3.14 → 0.3.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,747 @@
1
+ import { Command } from "commander";
2
+ import { join } from "path";
3
+ import { existsSync } from "fs";
4
+ import { logger } from "../../core/monitoring/logger.js";
5
+ function createContextRehydrateCommand() {
6
+ const command = new Command("rehydrate");
7
+ command.description("Enhanced context rehydration after Claude compaction").option("-c, --checkpoint <id>", "Use specific checkpoint ID").option("--create", "Create new rehydration checkpoint").option("-l, --list", "List available checkpoints").option("-v, --verbose", "Verbose output").option("--verify", "Verify checkpoint contents and integrity").option("--with-traces", "Include stack trace context in output").option("--traces", "Show recent stack traces from database").option("--trace-stats", "Show stack trace statistics and patterns").action(async (options) => {
8
+ await handleContextRehydrate(options);
9
+ });
10
+ return command;
11
+ }
12
+ async function handleContextRehydrate(options) {
13
+ const projectRoot = process.cwd();
14
+ const dbPath = join(projectRoot, ".stackmemory", "context.db");
15
+ if (!existsSync(dbPath)) {
16
+ console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
17
+ return;
18
+ }
19
+ try {
20
+ console.log("\u{1F504} Enhanced Context Rehydration System");
21
+ console.log("\u{1F4DA} This system preserves rich context across Claude compactions\n");
22
+ if (options.list) {
23
+ await listCheckpoints();
24
+ return;
25
+ }
26
+ if (options.create) {
27
+ console.log("\u{1F504} Creating rehydration checkpoint...");
28
+ await createRehydrationCheckpoint(options.withTraces);
29
+ return;
30
+ }
31
+ if (options.verify) {
32
+ await verifyCheckpoints(options.checkpoint);
33
+ return;
34
+ }
35
+ if (options.traces) {
36
+ await showStackTraces();
37
+ return;
38
+ }
39
+ if (options.traceStats) {
40
+ await showStackTraceStats();
41
+ return;
42
+ }
43
+ console.log("\u{1F4BE} Starting context rehydration...");
44
+ if (options.verbose) {
45
+ console.log("\u{1F4CB} Analyzing current session state...");
46
+ }
47
+ const success = await performRehydration(options.checkpoint);
48
+ if (success) {
49
+ console.log("\u2705 Context successfully rehydrated");
50
+ console.log("\u{1F4CA} Rich context has been injected into current session");
51
+ if (options.verbose) {
52
+ console.log("\n\u{1F4C1} Context includes:");
53
+ console.log(" \u2022 File snapshots with content previews");
54
+ console.log(" \u2022 Project structure mapping");
55
+ console.log(" \u2022 Previous decisions and reasoning");
56
+ console.log(" \u2022 Active workflow detection");
57
+ console.log(" \u2022 User preferences and pain points");
58
+ }
59
+ } else {
60
+ console.log("\u26A0\uFE0F Context rehydration failed");
61
+ console.log("\u{1F4A1} Try creating a checkpoint first with --create");
62
+ }
63
+ } catch (error) {
64
+ logger.error("Context rehydration error:", error);
65
+ console.error("\u274C Failed to rehydrate context:", error instanceof Error ? error.message : error);
66
+ process.exit(1);
67
+ }
68
+ }
69
+ async function createRehydrationCheckpoint(withTraces = false) {
70
+ const fs = await import("fs/promises");
71
+ const checkpointDir = join(process.cwd(), ".stackmemory", "rehydration");
72
+ try {
73
+ await fs.mkdir(checkpointDir, { recursive: true });
74
+ const checkpointId = `checkpoint_${Date.now()}`;
75
+ const checkpoint = {
76
+ id: checkpointId,
77
+ timestamp: Date.now(),
78
+ created_at: (/* @__PURE__ */ new Date()).toISOString(),
79
+ working_directory: process.cwd(),
80
+ recent_files: await getRecentFiles(),
81
+ project_context: await analyzeProjectContext(),
82
+ session_info: {
83
+ pid: process.pid,
84
+ env: {
85
+ NODE_ENV: process.env.NODE_ENV,
86
+ PWD: process.env.PWD
87
+ }
88
+ },
89
+ stack_traces: withTraces ? await captureStackTraces() : [],
90
+ error_patterns: withTraces ? await detectErrorPatterns() : [],
91
+ verification: {
92
+ files_captured: 0,
93
+ total_size: 0,
94
+ integrity_hash: ""
95
+ }
96
+ };
97
+ const checkpointPath = join(checkpointDir, `${checkpointId}.json`);
98
+ await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));
99
+ checkpoint.verification.files_captured = checkpoint.recent_files.length;
100
+ checkpoint.verification.total_size = checkpoint.recent_files.reduce((sum, file) => sum + file.size, 0);
101
+ checkpoint.verification.integrity_hash = await calculateCheckpointHash(checkpoint);
102
+ await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));
103
+ console.log(`\u2705 Created checkpoint: ${checkpointId}`);
104
+ console.log(`\u{1F4C1} Saved to: ${checkpointPath}`);
105
+ console.log(`\u{1F4CA} Captured ${checkpoint.recent_files.length} recent files`);
106
+ if (withTraces) {
107
+ console.log(`\u{1F41B} Captured ${checkpoint.stack_traces.length} stack traces`);
108
+ console.log(`\u{1F50D} Detected ${checkpoint.error_patterns.length} error patterns`);
109
+ }
110
+ } catch (error) {
111
+ console.error("\u274C Failed to create checkpoint:", error);
112
+ throw error;
113
+ }
114
+ }
115
+ async function performRehydration(checkpointId) {
116
+ const fs = await import("fs/promises");
117
+ const checkpointDir = join(process.cwd(), ".stackmemory", "rehydration");
118
+ try {
119
+ let checkpoint;
120
+ if (checkpointId) {
121
+ const checkpointPath = join(checkpointDir, `${checkpointId}.json`);
122
+ const content = await fs.readFile(checkpointPath, "utf8");
123
+ checkpoint = JSON.parse(content);
124
+ } else {
125
+ const files = await fs.readdir(checkpointDir);
126
+ const checkpointFiles = files.filter((f) => f.endsWith(".json"));
127
+ if (checkpointFiles.length === 0) {
128
+ console.log("\u{1F4ED} No checkpoints found");
129
+ return false;
130
+ }
131
+ checkpointFiles.sort((a, b) => b.localeCompare(a));
132
+ const latestFile = checkpointFiles[0];
133
+ const content = await fs.readFile(join(checkpointDir, latestFile), "utf8");
134
+ checkpoint = JSON.parse(content);
135
+ }
136
+ console.log(`\u{1F504} Rehydrating from checkpoint: ${checkpoint.id}`);
137
+ console.log(`\u{1F4C5} Created: ${checkpoint.created_at}`);
138
+ console.log(`\u{1F4C1} Working directory: ${checkpoint.working_directory}`);
139
+ console.log(`\u{1F4CB} Recent files: ${checkpoint.recent_files.length}`);
140
+ console.log("\n\u{1F4CA} Context Summary:");
141
+ checkpoint.recent_files.slice(0, 5).forEach((file, i) => {
142
+ console.log(` ${i + 1}. ${file.path} (${file.size} bytes, modified ${new Date(file.mtime).toLocaleString()})`);
143
+ });
144
+ if (checkpoint.project_context.key_files.length > 0) {
145
+ console.log(`
146
+ \u{1F511} Key project files: ${checkpoint.project_context.key_files.join(", ")}`);
147
+ }
148
+ return true;
149
+ } catch (error) {
150
+ console.error("\u274C Failed to rehydrate:", error);
151
+ return false;
152
+ }
153
+ }
154
+ async function getRecentFiles() {
155
+ const fs = await import("fs/promises");
156
+ try {
157
+ const files = await fs.readdir(".", { withFileTypes: true });
158
+ const recentFiles = [];
159
+ for (const file of files.slice(0, 20)) {
160
+ if (file.isFile() && !file.name.startsWith(".")) {
161
+ try {
162
+ const stats = await fs.stat(file.name);
163
+ recentFiles.push({
164
+ path: file.name,
165
+ size: stats.size,
166
+ mtime: stats.mtimeMs
167
+ });
168
+ } catch {
169
+ }
170
+ }
171
+ }
172
+ return recentFiles.sort((a, b) => b.mtime - a.mtime);
173
+ } catch (error) {
174
+ console.warn("Could not analyze recent files:", error);
175
+ return [];
176
+ }
177
+ }
178
+ async function analyzeProjectContext() {
179
+ const fs = await import("fs/promises");
180
+ const context = {
181
+ key_files: [],
182
+ project_type: "unknown",
183
+ framework: "unknown"
184
+ };
185
+ try {
186
+ const projectFiles = ["package.json", "tsconfig.json", "README.md", "docker-compose.yml"];
187
+ for (const file of projectFiles) {
188
+ try {
189
+ await fs.access(file);
190
+ context.key_files.push(file);
191
+ } catch {
192
+ }
193
+ }
194
+ if (context.key_files.includes("package.json")) {
195
+ context.project_type = "node";
196
+ try {
197
+ const packageContent = await fs.readFile("package.json", "utf8");
198
+ const packageJson = JSON.parse(packageContent);
199
+ if (packageJson.dependencies?.react) {
200
+ context.framework = "react";
201
+ } else if (packageJson.dependencies?.vue) {
202
+ context.framework = "vue";
203
+ } else if (packageJson.dependencies?.next) {
204
+ context.framework = "next";
205
+ }
206
+ } catch {
207
+ }
208
+ }
209
+ } catch (error) {
210
+ console.warn("Could not analyze project context:", error);
211
+ }
212
+ return context;
213
+ }
214
+ async function listCheckpoints() {
215
+ try {
216
+ const checkpointDir = "./.stackmemory/rehydration";
217
+ const fs = await import("fs/promises");
218
+ try {
219
+ const files = await fs.readdir(checkpointDir);
220
+ const checkpoints = files.filter((f) => f.endsWith(".json"));
221
+ if (checkpoints.length === 0) {
222
+ console.log("\u{1F4ED} No rehydration checkpoints found");
223
+ console.log("\u{1F4A1} Create one with: stackmemory context rehydrate --create");
224
+ return;
225
+ }
226
+ console.log(`\u{1F4CB} Found ${checkpoints.length} rehydration checkpoint(s):
227
+ `);
228
+ for (const file of checkpoints) {
229
+ const id = file.replace(".json", "");
230
+ const stats = await fs.stat(`${checkpointDir}/${file}`);
231
+ try {
232
+ const content = await fs.readFile(`${checkpointDir}/${file}`, "utf8");
233
+ const checkpoint = JSON.parse(content);
234
+ console.log(`\u{1F516} ${id}`);
235
+ console.log(` Created: ${stats.birthtime.toISOString()}`);
236
+ console.log(` Size: ${(stats.size / 1024).toFixed(1)} KB`);
237
+ console.log(` Files: ${checkpoint.verification?.files_captured || checkpoint.recent_files?.length || 0}`);
238
+ if (checkpoint.stack_traces?.length > 0) {
239
+ console.log(` Stack traces: ${checkpoint.stack_traces.length}`);
240
+ }
241
+ if (checkpoint.error_patterns?.length > 0) {
242
+ console.log(` Error patterns: ${checkpoint.error_patterns.length}`);
243
+ }
244
+ console.log("");
245
+ } catch {
246
+ console.log(`\u{1F516} ${id}`);
247
+ console.log(` Created: ${stats.birthtime.toISOString()}`);
248
+ console.log(` Size: ${(stats.size / 1024).toFixed(1)} KB
249
+ `);
250
+ }
251
+ }
252
+ console.log("\u{1F4A1} Use: stackmemory context rehydrate -c <checkpoint-id>");
253
+ console.log("\u{1F4A1} Verify: stackmemory context rehydrate --verify -c <checkpoint-id>");
254
+ } catch (err) {
255
+ console.log("\u{1F4ED} No rehydration checkpoints directory found");
256
+ console.log("\u{1F4A1} Create first checkpoint with: stackmemory context rehydrate --create");
257
+ }
258
+ } catch (error) {
259
+ console.error("\u274C Failed to list checkpoints:", error);
260
+ }
261
+ }
262
+ async function verifyCheckpoints(checkpointId) {
263
+ try {
264
+ const checkpointDir = "./.stackmemory/rehydration";
265
+ const fs = await import("fs/promises");
266
+ if (checkpointId) {
267
+ const checkpointPath = `${checkpointDir}/${checkpointId}.json`;
268
+ await verifyCheckpoint(checkpointPath);
269
+ } else {
270
+ const files = await fs.readdir(checkpointDir);
271
+ const checkpoints = files.filter((f) => f.endsWith(".json"));
272
+ console.log(`\u{1F50D} Verifying ${checkpoints.length} checkpoint(s)...
273
+ `);
274
+ for (const file of checkpoints) {
275
+ await verifyCheckpoint(`${checkpointDir}/${file}`);
276
+ console.log("");
277
+ }
278
+ }
279
+ } catch (error) {
280
+ console.error("\u274C Failed to verify checkpoints:", error);
281
+ }
282
+ }
283
+ async function verifyCheckpoint(checkpointPath) {
284
+ const fs = await import("fs/promises");
285
+ try {
286
+ const content = await fs.readFile(checkpointPath, "utf8");
287
+ const checkpoint = JSON.parse(content);
288
+ const fileName = checkpointPath.split("/").pop()?.replace(".json", "") || "unknown";
289
+ console.log(`\u{1F50D} Verifying checkpoint: ${fileName}`);
290
+ const requiredFields = ["id", "timestamp", "working_directory", "recent_files"];
291
+ const missingFields = requiredFields.filter((field) => !checkpoint[field]);
292
+ if (missingFields.length > 0) {
293
+ console.log(`\u274C Missing required fields: ${missingFields.join(", ")}`);
294
+ return;
295
+ }
296
+ console.log(`\u{1F4C1} Files captured: ${checkpoint.recent_files?.length || 0}`);
297
+ if (checkpoint.verification) {
298
+ console.log(`\u{1F4CA} Total size: ${(checkpoint.verification.total_size / 1024).toFixed(1)} KB`);
299
+ console.log(`\u{1F512} Integrity hash: ${checkpoint.verification.integrity_hash.slice(0, 12)}...`);
300
+ const recalculatedHash = await calculateCheckpointHash(checkpoint);
301
+ if (recalculatedHash === checkpoint.verification.integrity_hash) {
302
+ console.log(`\u2705 Integrity check: PASSED`);
303
+ } else {
304
+ console.log(`\u274C Integrity check: FAILED (data may be corrupted)`);
305
+ }
306
+ }
307
+ if (checkpoint.stack_traces) {
308
+ console.log(`\u{1F41B} Stack traces: ${checkpoint.stack_traces.length}`);
309
+ const pendingTraces = checkpoint.stack_traces.filter((t) => t.resolution_status === "pending");
310
+ const resolvedTraces = checkpoint.stack_traces.filter((t) => t.resolution_status === "resolved");
311
+ if (pendingTraces.length > 0) {
312
+ console.log(` \u23F3 Pending resolution: ${pendingTraces.length}`);
313
+ }
314
+ if (resolvedTraces.length > 0) {
315
+ console.log(` \u2705 Resolved: ${resolvedTraces.length}`);
316
+ }
317
+ }
318
+ if (checkpoint.error_patterns?.length > 0) {
319
+ console.log(`\u{1F50D} Error patterns detected: ${checkpoint.error_patterns.join(", ")}`);
320
+ }
321
+ console.log(`\u2705 Checkpoint verification complete`);
322
+ } catch (error) {
323
+ console.log(`\u274C Failed to verify checkpoint: ${error}`);
324
+ }
325
+ }
326
+ async function captureStackTraces() {
327
+ try {
328
+ const traces = [];
329
+ const fs = await import("fs/promises");
330
+ const { execSync } = await import("child_process");
331
+ const stackMemoryLogs = [
332
+ ".stackmemory/error.log",
333
+ ".stackmemory/compaction.log",
334
+ ".stackmemory/trace.log",
335
+ ".stackmemory/debug.log"
336
+ ];
337
+ const nodeLogs = [
338
+ "npm-debug.log",
339
+ "error.log",
340
+ "debug.log",
341
+ "yarn-error.log",
342
+ "pnpm-debug.log"
343
+ ];
344
+ const buildLogs = [
345
+ "build-errors.log",
346
+ "webpack-errors.log",
347
+ "vite-errors.log",
348
+ "jest-errors.log",
349
+ "test-results.log"
350
+ ];
351
+ const frameworkLogs = [
352
+ ".next/trace",
353
+ "logs/error.log",
354
+ "tmp/cache/error.log"
355
+ ];
356
+ const allLogFiles = [...stackMemoryLogs, ...nodeLogs, ...buildLogs, ...frameworkLogs];
357
+ for (const logFile of allLogFiles) {
358
+ await extractTracesFromLogFile(logFile, traces, fs);
359
+ }
360
+ await extractFromTerminalHistory(traces);
361
+ await extractFromClaudeSession(traces, fs);
362
+ await extractFromBuildCommands(traces);
363
+ await extractFromGitLogs(traces);
364
+ await extractFromBrowserLogs(traces, fs);
365
+ return traces;
366
+ } catch {
367
+ return [];
368
+ }
369
+ }
370
+ async function detectErrorPatterns() {
371
+ const traces = await captureStackTraces();
372
+ const patterns = /* @__PURE__ */ new Map();
373
+ for (const trace of traces) {
374
+ const errorType = trace.error_message.split(":")[0].trim();
375
+ patterns.set(errorType, (patterns.get(errorType) || 0) + 1);
376
+ }
377
+ return Array.from(patterns.entries()).filter(([, count]) => count > 1).map(([pattern]) => pattern);
378
+ }
379
+ async function extractTracesFromLogFile(logFile, traces, fs) {
380
+ try {
381
+ const logContent = await fs.readFile(logFile, "utf8");
382
+ const lines = logContent.split("\n");
383
+ const errorPatterns = [
384
+ /Error:/i,
385
+ /TypeError:/i,
386
+ /ReferenceError:/i,
387
+ /SyntaxError:/i,
388
+ /RangeError:/i,
389
+ /URIError:/i,
390
+ /EvalError:/i,
391
+ /UnhandledPromiseRejectionWarning:/i,
392
+ /DeprecationWarning:/i,
393
+ /\s+at\s+/,
394
+ // Stack trace lines
395
+ /Failed to compile/i,
396
+ /Build failed/i,
397
+ /Test failed/i
398
+ ];
399
+ let currentError = null;
400
+ let stackFrames = [];
401
+ for (let i = 0; i < lines.length; i++) {
402
+ const line = lines[i];
403
+ if (errorPatterns.some((pattern) => pattern.test(line))) {
404
+ if (currentError && stackFrames.length > 0) {
405
+ traces.push({
406
+ ...currentError,
407
+ stack_frames: [...stackFrames],
408
+ file_path: logFile,
409
+ timestamp: Date.now(),
410
+ context: `Extracted from ${logFile} around line ${i}`,
411
+ resolution_status: "pending"
412
+ });
413
+ }
414
+ if (line.includes("Error:") || line.includes("TypeError:")) {
415
+ currentError = {
416
+ error_message: line.trim()
417
+ };
418
+ stackFrames = [line.trim()];
419
+ } else if (line.includes("at ")) {
420
+ stackFrames.push(line.trim());
421
+ }
422
+ } else if (currentError && line.includes("at ")) {
423
+ stackFrames.push(line.trim());
424
+ }
425
+ }
426
+ if (currentError && stackFrames.length > 0) {
427
+ traces.push({
428
+ ...currentError,
429
+ stack_frames: [...stackFrames],
430
+ file_path: logFile,
431
+ timestamp: Date.now(),
432
+ context: `Extracted from ${logFile}`,
433
+ resolution_status: "pending"
434
+ });
435
+ }
436
+ } catch {
437
+ }
438
+ }
439
+ async function extractFromTerminalHistory(traces) {
440
+ try {
441
+ const { execSync } = await import("child_process");
442
+ const historyCommands = [
443
+ "npm run build 2>&1 | tail -50",
444
+ "npm test 2>&1 | tail -50",
445
+ "npm start 2>&1 | tail -50"
446
+ ];
447
+ for (const cmd of historyCommands) {
448
+ try {
449
+ const output = execSync(cmd, { encoding: "utf8", timeout: 5e3 });
450
+ if (output.includes("Error:") || output.includes("failed")) {
451
+ const errorLines = output.split("\n").filter(
452
+ (line) => line.includes("Error:") || line.includes("at ") || line.includes("failed")
453
+ );
454
+ if (errorLines.length > 0) {
455
+ traces.push({
456
+ error_message: errorLines[0],
457
+ stack_frames: errorLines,
458
+ file_path: "terminal_output",
459
+ timestamp: Date.now(),
460
+ context: `Recent command: ${cmd}`,
461
+ resolution_status: "pending"
462
+ });
463
+ }
464
+ }
465
+ } catch {
466
+ }
467
+ }
468
+ } catch {
469
+ }
470
+ }
471
+ async function extractFromClaudeSession(traces, fs) {
472
+ try {
473
+ const claudePaths = [
474
+ "~/.claude/logs",
475
+ "~/.local/share/claude/logs",
476
+ "/tmp/claude-logs",
477
+ ".claude-logs"
478
+ ];
479
+ for (const logPath of claudePaths) {
480
+ try {
481
+ const files = await fs.readdir(logPath);
482
+ const recentLogs = files.filter((f) => f.endsWith(".log")).slice(-5);
483
+ for (const logFile of recentLogs) {
484
+ await extractTracesFromLogFile(`${logPath}/${logFile}`, traces, fs);
485
+ }
486
+ } catch {
487
+ }
488
+ }
489
+ } catch {
490
+ }
491
+ }
492
+ async function extractFromBuildCommands(traces) {
493
+ try {
494
+ const { execSync } = await import("child_process");
495
+ const buildCommands = [
496
+ "npm run lint --silent",
497
+ "npm run typecheck --silent",
498
+ "npx tsc --noEmit --skipLibCheck"
499
+ ];
500
+ for (const cmd of buildCommands) {
501
+ try {
502
+ execSync(cmd, { encoding: "utf8", timeout: 1e4 });
503
+ } catch (error) {
504
+ if (error.stdout || error.stderr) {
505
+ const output = error.stdout + error.stderr;
506
+ const errorLines = output.split("\n").filter(
507
+ (line) => line.includes("Error:") || line.includes("at ") || line.includes("error TS")
508
+ );
509
+ if (errorLines.length > 0) {
510
+ traces.push({
511
+ error_message: errorLines[0] || `Build command failed: ${cmd}`,
512
+ stack_frames: errorLines,
513
+ file_path: "build_output",
514
+ timestamp: Date.now(),
515
+ context: `Build command: ${cmd}`,
516
+ resolution_status: "pending"
517
+ });
518
+ }
519
+ }
520
+ }
521
+ }
522
+ } catch {
523
+ }
524
+ }
525
+ async function extractFromGitLogs(traces) {
526
+ try {
527
+ const { execSync } = await import("child_process");
528
+ const gitOutput = execSync(
529
+ 'git log --oneline -10 --grep="fix\\|error\\|bug" 2>/dev/null || echo "No git history"',
530
+ { encoding: "utf8", timeout: 5e3 }
531
+ );
532
+ if (gitOutput.includes("fix") || gitOutput.includes("error")) {
533
+ traces.push({
534
+ error_message: "Recent git commits indicate error fixes",
535
+ stack_frames: gitOutput.split("\n").filter((line) => line.trim()),
536
+ file_path: "git_history",
537
+ timestamp: Date.now(),
538
+ context: "Git commit history analysis",
539
+ resolution_status: "resolved"
540
+ // These are likely fixed
541
+ });
542
+ }
543
+ } catch {
544
+ }
545
+ }
546
+ async function extractFromBrowserLogs(traces, fs) {
547
+ try {
548
+ const browserLogPaths = [
549
+ "console.log",
550
+ "browser-errors.log",
551
+ "dev-server.log",
552
+ ".vscode/dev-console.log",
553
+ "tmp/browser-console.log"
554
+ ];
555
+ for (const logPath of browserLogPaths) {
556
+ try {
557
+ const logContent = await fs.readFile(logPath, "utf8");
558
+ const browserPatterns = [
559
+ /console\.error/i,
560
+ /Uncaught \w+Error/i,
561
+ /Promise rejection/i,
562
+ /React\s+Warning/i,
563
+ /Failed to load/i
564
+ ];
565
+ const lines = logContent.split("\n");
566
+ for (const line of lines) {
567
+ if (browserPatterns.some((pattern) => pattern.test(line))) {
568
+ traces.push({
569
+ error_message: line.trim(),
570
+ stack_frames: [line.trim()],
571
+ file_path: logPath,
572
+ timestamp: Date.now(),
573
+ context: "Browser console error",
574
+ resolution_status: "pending"
575
+ });
576
+ }
577
+ }
578
+ } catch {
579
+ }
580
+ }
581
+ } catch {
582
+ }
583
+ }
584
+ async function showStackTraces() {
585
+ try {
586
+ const projectRoot = process.cwd();
587
+ const dbPath = join(projectRoot, ".stackmemory", "context.db");
588
+ if (!existsSync(dbPath)) {
589
+ console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
590
+ return;
591
+ }
592
+ const Database = (await import("better-sqlite3")).default;
593
+ const db = new Database(dbPath);
594
+ try {
595
+ const tableExists = db.prepare(`
596
+ SELECT name FROM sqlite_master
597
+ WHERE type='table' AND name='stack_traces'
598
+ `).get();
599
+ if (!tableExists) {
600
+ console.log("\u{1F4ED} No stack traces found in database");
601
+ console.log("\u{1F4A1} Stack traces are stored when using enhanced rehydration features");
602
+ return;
603
+ }
604
+ const traces = db.prepare(`
605
+ SELECT * FROM stack_traces
606
+ ORDER BY created_at DESC
607
+ LIMIT 20
608
+ `).all();
609
+ if (traces.length === 0) {
610
+ console.log("\u{1F4ED} No stack traces found in database");
611
+ return;
612
+ }
613
+ console.log(`\u{1F41B} Recent Stack Traces (${traces.length} found)
614
+ `);
615
+ for (const trace of traces) {
616
+ const createdAt = new Date(trace.created_at * 1e3).toLocaleString();
617
+ const severity = trace.error_severity || "medium";
618
+ const severityIcon = severity === "high" ? "\u{1F534}" : severity === "low" ? "\u{1F7E1}" : "\u{1F7E0}";
619
+ console.log(`${severityIcon} ${trace.error_type || "Error"} - ${severity.toUpperCase()}`);
620
+ console.log(` Message: ${trace.error_message}`);
621
+ console.log(` File: ${trace.file_path || "unknown"}${trace.line_number ? `:${trace.line_number}` : ""}`);
622
+ console.log(` Function: ${trace.function_name || "unknown"}`);
623
+ console.log(` Status: ${trace.resolution_status}`);
624
+ console.log(` Created: ${createdAt}`);
625
+ console.log(` Context: ${trace.context || "No context"}`);
626
+ const stackFrames = JSON.parse(trace.stack_frames || "[]");
627
+ if (stackFrames.length > 0) {
628
+ console.log(` Stack (first 3 lines):`);
629
+ stackFrames.slice(0, 3).forEach((frame) => {
630
+ console.log(` ${frame.trim()}`);
631
+ });
632
+ }
633
+ console.log("");
634
+ }
635
+ console.log("\u{1F4A1} Use --trace-stats for statistics and patterns");
636
+ } finally {
637
+ db.close();
638
+ }
639
+ } catch (error) {
640
+ console.error("\u274C Failed to show stack traces:", error);
641
+ }
642
+ }
643
+ async function showStackTraceStats() {
644
+ try {
645
+ const projectRoot = process.cwd();
646
+ const dbPath = join(projectRoot, ".stackmemory", "context.db");
647
+ if (!existsSync(dbPath)) {
648
+ console.log('\u274C StackMemory not initialized. Run "stackmemory init" first.');
649
+ return;
650
+ }
651
+ const Database = (await import("better-sqlite3")).default;
652
+ const db = new Database(dbPath);
653
+ try {
654
+ const tableExists = db.prepare(`
655
+ SELECT name FROM sqlite_master
656
+ WHERE type='table' AND name='stack_traces'
657
+ `).get();
658
+ if (!tableExists) {
659
+ console.log("\u{1F4ED} No stack trace data available");
660
+ return;
661
+ }
662
+ console.log("\u{1F4CA} Stack Trace Statistics\n");
663
+ const totalTraces = db.prepare("SELECT COUNT(*) as count FROM stack_traces").get().count;
664
+ console.log(`Total traces: ${totalTraces}`);
665
+ const statusStats = db.prepare(`
666
+ SELECT resolution_status, COUNT(*) as count
667
+ FROM stack_traces
668
+ GROUP BY resolution_status
669
+ ORDER BY count DESC
670
+ `).all();
671
+ console.log("\n\u{1F4C8} By Resolution Status:");
672
+ for (const stat of statusStats) {
673
+ const percentage = (stat.count / totalTraces * 100).toFixed(1);
674
+ console.log(` ${stat.resolution_status}: ${stat.count} (${percentage}%)`);
675
+ }
676
+ const typeStats = db.prepare(`
677
+ SELECT error_type, COUNT(*) as count
678
+ FROM stack_traces
679
+ GROUP BY error_type
680
+ ORDER BY count DESC
681
+ LIMIT 10
682
+ `).all();
683
+ console.log("\n\u{1F50D} Top Error Types:");
684
+ for (const stat of typeStats) {
685
+ const percentage = (stat.count / totalTraces * 100).toFixed(1);
686
+ console.log(` ${stat.error_type}: ${stat.count} (${percentage}%)`);
687
+ }
688
+ const severityStats = db.prepare(`
689
+ SELECT error_severity, COUNT(*) as count
690
+ FROM stack_traces
691
+ GROUP BY error_severity
692
+ ORDER BY
693
+ CASE error_severity
694
+ WHEN 'high' THEN 1
695
+ WHEN 'medium' THEN 2
696
+ WHEN 'low' THEN 3
697
+ END
698
+ `).all();
699
+ console.log("\n\u26A0\uFE0F By Severity:");
700
+ for (const stat of severityStats) {
701
+ const percentage = (stat.count / totalTraces * 100).toFixed(1);
702
+ const icon = stat.error_severity === "high" ? "\u{1F534}" : stat.error_severity === "low" ? "\u{1F7E1}" : "\u{1F7E0}";
703
+ console.log(` ${icon} ${stat.error_severity}: ${stat.count} (${percentage}%)`);
704
+ }
705
+ const recentTraces = db.prepare(`
706
+ SELECT COUNT(*) as count
707
+ FROM stack_traces
708
+ WHERE created_at > (unixepoch() - 86400)
709
+ `).get().count;
710
+ console.log(`
711
+ \u{1F4C5} Recent Activity (24 hours): ${recentTraces} traces`);
712
+ const fileStats = db.prepare(`
713
+ SELECT file_path, COUNT(*) as count
714
+ FROM stack_traces
715
+ WHERE file_path IS NOT NULL
716
+ GROUP BY file_path
717
+ ORDER BY count DESC
718
+ LIMIT 5
719
+ `).all();
720
+ if (fileStats.length > 0) {
721
+ console.log("\n\u{1F5C2}\uFE0F Most Problematic Files:");
722
+ for (const stat of fileStats) {
723
+ console.log(` ${stat.file_path}: ${stat.count} errors`);
724
+ }
725
+ }
726
+ } finally {
727
+ db.close();
728
+ }
729
+ } catch (error) {
730
+ console.error("\u274C Failed to show stack trace statistics:", error);
731
+ }
732
+ }
733
+ async function calculateCheckpointHash(checkpoint) {
734
+ const crypto = await import("crypto");
735
+ const data = JSON.stringify({
736
+ id: checkpoint.id,
737
+ timestamp: checkpoint.timestamp,
738
+ files_count: checkpoint.recent_files?.length || 0,
739
+ project_context: checkpoint.project_context,
740
+ stack_traces_count: checkpoint.stack_traces?.length || 0
741
+ });
742
+ return crypto.createHash("sha256").update(data).digest("hex");
743
+ }
744
+ export {
745
+ createContextRehydrateCommand
746
+ };
747
+ //# sourceMappingURL=context-rehydrate.js.map