@triedotdev/mcp 1.0.113 → 1.0.115

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/dist/auto-fix-apply-PCAHWLXF.js +10 -0
  2. package/dist/autonomy-config-JXB7WCZ2.js +30 -0
  3. package/dist/chunk-2GIAROBF.js +173 -0
  4. package/dist/chunk-2GIAROBF.js.map +1 -0
  5. package/dist/{chunk-33WL3D7A.js → chunk-2SIFK7OW.js} +7 -419
  6. package/dist/chunk-2SIFK7OW.js.map +1 -0
  7. package/dist/chunk-43X6JBEM.js +36 -0
  8. package/dist/chunk-43X6JBEM.js.map +1 -0
  9. package/dist/chunk-55DOQNHJ.js +772 -0
  10. package/dist/chunk-55DOQNHJ.js.map +1 -0
  11. package/dist/chunk-6LXSA2OZ.js +425 -0
  12. package/dist/chunk-6LXSA2OZ.js.map +1 -0
  13. package/dist/{chunk-SDS3UVFY.js → chunk-AOFYU6T3.js} +113 -559
  14. package/dist/chunk-AOFYU6T3.js.map +1 -0
  15. package/dist/{chunk-6QR6QZIX.js → chunk-D3EXBJE2.js} +25 -658
  16. package/dist/chunk-D3EXBJE2.js.map +1 -0
  17. package/dist/chunk-DJ2YAGHK.js +50 -0
  18. package/dist/chunk-DJ2YAGHK.js.map +1 -0
  19. package/dist/chunk-DZREHOGW.js +706 -0
  20. package/dist/chunk-DZREHOGW.js.map +1 -0
  21. package/dist/chunk-I2GFI3AM.js +340 -0
  22. package/dist/chunk-I2GFI3AM.js.map +1 -0
  23. package/dist/chunk-KRH642MT.js +947 -0
  24. package/dist/chunk-KRH642MT.js.map +1 -0
  25. package/dist/{chunk-QYOACM2C.js → chunk-MVNJPJBK.js} +22 -252
  26. package/dist/chunk-MVNJPJBK.js.map +1 -0
  27. package/dist/chunk-NS2MSZMB.js +394 -0
  28. package/dist/chunk-NS2MSZMB.js.map +1 -0
  29. package/dist/chunk-SWSK7ANT.js +340 -0
  30. package/dist/chunk-SWSK7ANT.js.map +1 -0
  31. package/dist/chunk-VRLMTOB6.js +566 -0
  32. package/dist/chunk-VRLMTOB6.js.map +1 -0
  33. package/dist/chunk-YR4BMGYO.js +130 -0
  34. package/dist/chunk-YR4BMGYO.js.map +1 -0
  35. package/dist/chunk-ZV2K6M7T.js +74 -0
  36. package/dist/chunk-ZV2K6M7T.js.map +1 -0
  37. package/dist/{chunk-2764KZZQ.js → chunk-ZYKEILVK.js} +451 -1069
  38. package/dist/chunk-ZYKEILVK.js.map +1 -0
  39. package/dist/cli/main.js +107 -375
  40. package/dist/cli/main.js.map +1 -1
  41. package/dist/cli/yolo-daemon.js +18 -8
  42. package/dist/cli/yolo-daemon.js.map +1 -1
  43. package/dist/client-7XZHCMD3.js +28 -0
  44. package/dist/client-7XZHCMD3.js.map +1 -0
  45. package/dist/{goal-manager-AP4LTE6U.js → goal-manager-LMS6ZJB7.js} +7 -3
  46. package/dist/goal-manager-LMS6ZJB7.js.map +1 -0
  47. package/dist/goal-validator-T5HEYBC5.js +186 -0
  48. package/dist/goal-validator-T5HEYBC5.js.map +1 -0
  49. package/dist/graph-U5JWSAB5.js +10 -0
  50. package/dist/graph-U5JWSAB5.js.map +1 -0
  51. package/dist/guardian-agent-EXP7APLC.js +25 -0
  52. package/dist/guardian-agent-EXP7APLC.js.map +1 -0
  53. package/dist/hypothesis-KGC3P54C.js +19 -0
  54. package/dist/hypothesis-KGC3P54C.js.map +1 -0
  55. package/dist/incident-index-PNIVT47T.js +11 -0
  56. package/dist/incident-index-PNIVT47T.js.map +1 -0
  57. package/dist/index.js +369 -43
  58. package/dist/index.js.map +1 -1
  59. package/dist/ledger-SR6OEBLO.js +15 -0
  60. package/dist/ledger-SR6OEBLO.js.map +1 -0
  61. package/dist/output-manager-BOTMXSND.js +13 -0
  62. package/dist/output-manager-BOTMXSND.js.map +1 -0
  63. package/dist/pattern-discovery-F7LU5K6E.js +8 -0
  64. package/dist/pattern-discovery-F7LU5K6E.js.map +1 -0
  65. package/package.json +1 -1
  66. package/dist/chunk-2764KZZQ.js.map +0 -1
  67. package/dist/chunk-33WL3D7A.js.map +0 -1
  68. package/dist/chunk-6JPPYG7F.js +0 -1813
  69. package/dist/chunk-6JPPYG7F.js.map +0 -1
  70. package/dist/chunk-6QR6QZIX.js.map +0 -1
  71. package/dist/chunk-QYOACM2C.js.map +0 -1
  72. package/dist/chunk-SDS3UVFY.js.map +0 -1
  73. package/dist/guardian-agent-XEYNG7RH.js +0 -18
  74. /package/dist/{goal-manager-AP4LTE6U.js.map → auto-fix-apply-PCAHWLXF.js.map} +0 -0
  75. /package/dist/{guardian-agent-XEYNG7RH.js.map → autonomy-config-JXB7WCZ2.js.map} +0 -0
@@ -0,0 +1,772 @@
1
+ import {
2
+ BackupManager,
3
+ CompactedSummariesIndexSchema,
4
+ IssueIndexSchema,
5
+ safeParseAndValidate
6
+ } from "./chunk-KRH642MT.js";
7
+ import {
8
+ appendIssuesToLedger
9
+ } from "./chunk-YR4BMGYO.js";
10
+ import {
11
+ atomicWriteJSON
12
+ } from "./chunk-43X6JBEM.js";
13
+ import {
14
+ getTrieDirectory,
15
+ getWorkingDirectory
16
+ } from "./chunk-R4AAPFXC.js";
17
+
18
+ // src/memory/compactor.ts
19
+ import { mkdir, readFile } from "fs/promises";
20
+ import { existsSync } from "fs";
21
+ import { join } from "path";
22
+ async function compactOldIssues(issues, options = {}) {
23
+ const keepDays = options.keepDays ?? 30;
24
+ const minIssues = options.minIssuesToCompact ?? 100;
25
+ const cutoffDate = /* @__PURE__ */ new Date();
26
+ cutoffDate.setDate(cutoffDate.getDate() - keepDays);
27
+ const oldIssues = issues.filter((i) => new Date(i.timestamp) < cutoffDate);
28
+ const recentIssues = issues.filter((i) => new Date(i.timestamp) >= cutoffDate);
29
+ if (oldIssues.length < minIssues) {
30
+ return { summary: null, remaining: issues };
31
+ }
32
+ const summary = buildSummary(oldIssues);
33
+ return { summary, remaining: recentIssues };
34
+ }
35
+ function buildSummary(issues) {
36
+ const sorted = issues.sort(
37
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
38
+ );
39
+ const bySeverity = {};
40
+ const byAgent = {};
41
+ const patternMap = /* @__PURE__ */ new Map();
42
+ const fileCount = /* @__PURE__ */ new Map();
43
+ for (const issue of issues) {
44
+ bySeverity[issue.severity] = (bySeverity[issue.severity] || 0) + 1;
45
+ byAgent[issue.agent] = (byAgent[issue.agent] || 0) + 1;
46
+ const patternKey = normalizePattern(issue.issue);
47
+ const existing = patternMap.get(patternKey);
48
+ if (existing) {
49
+ existing.count++;
50
+ } else {
51
+ patternMap.set(patternKey, { count: 1, issue });
52
+ }
53
+ const fileName = issue.file.split("/").pop() || issue.file;
54
+ fileCount.set(fileName, (fileCount.get(fileName) || 0) + 1);
55
+ }
56
+ const topPatterns = Array.from(patternMap.entries()).sort((a, b) => b[1].count - a[1].count).slice(0, 10).map(([pattern, data]) => ({
57
+ pattern: pattern.slice(0, 100),
58
+ count: data.count,
59
+ severity: data.issue.severity,
60
+ agent: data.issue.agent,
61
+ exampleFix: data.issue.fix.slice(0, 200)
62
+ }));
63
+ const hotFiles = Array.from(fileCount.entries()).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([file, count]) => ({ file, count }));
64
+ return {
65
+ period: `${sorted[0]?.timestamp.split("T")[0]} to ${sorted[sorted.length - 1]?.timestamp.split("T")[0]}`,
66
+ startDate: sorted[0]?.timestamp || "",
67
+ endDate: sorted[sorted.length - 1]?.timestamp || "",
68
+ totalIssues: issues.length,
69
+ resolvedCount: issues.filter((i) => i.resolved).length,
70
+ bySeverity,
71
+ byAgent,
72
+ topPatterns,
73
+ hotFiles,
74
+ compactedAt: (/* @__PURE__ */ new Date()).toISOString()
75
+ };
76
+ }
77
+ function normalizePattern(text) {
78
+ return text.toLowerCase().replace(/`[^`]+`/g, "CODE").replace(/\b\d+\b/g, "N").replace(/["']/g, "").replace(/\s+/g, " ").trim().slice(0, 150);
79
+ }
80
+ async function saveCompactedSummary(summary, projectDir) {
81
+ const memoryDir = join(getTrieDirectory(projectDir), "memory");
82
+ await mkdir(memoryDir, { recursive: true });
83
+ const summaryPath = join(memoryDir, "compacted-summaries.json");
84
+ let summaries = [];
85
+ try {
86
+ if (existsSync(summaryPath)) {
87
+ const content = await readFile(summaryPath, "utf-8");
88
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
89
+ if (result.success) {
90
+ summaries = result.data;
91
+ }
92
+ }
93
+ } catch {
94
+ summaries = [];
95
+ }
96
+ summaries.push(summary);
97
+ if (summaries.length > 12) {
98
+ summaries = summaries.slice(-12);
99
+ }
100
+ const backupManager = new BackupManager(summaryPath);
101
+ await backupManager.createBackup();
102
+ await atomicWriteJSON(summaryPath, summaries);
103
+ }
104
+ async function loadCompactedSummaries(projectDir) {
105
+ const summaryPath = join(getTrieDirectory(projectDir), "memory", "compacted-summaries.json");
106
+ try {
107
+ if (existsSync(summaryPath)) {
108
+ const content = await readFile(summaryPath, "utf-8");
109
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
110
+ if (result.success) {
111
+ return result.data;
112
+ }
113
+ const backupManager = new BackupManager(summaryPath);
114
+ if (await backupManager.recoverFromBackup()) {
115
+ const recovered = await readFile(summaryPath, "utf-8");
116
+ const recoveredResult = safeParseAndValidate(recovered, CompactedSummariesIndexSchema);
117
+ if (recoveredResult.success) {
118
+ return recoveredResult.data;
119
+ }
120
+ }
121
+ }
122
+ } catch {
123
+ }
124
+ return [];
125
+ }
126
+ async function getHistoricalInsights(projectDir) {
127
+ const summaries = await loadCompactedSummaries(projectDir);
128
+ if (summaries.length === 0) {
129
+ return {
130
+ totalHistoricalIssues: 0,
131
+ recurringPatterns: [],
132
+ improvementTrend: "unknown"
133
+ };
134
+ }
135
+ const totalHistoricalIssues = summaries.reduce((sum, s) => sum + s.totalIssues, 0);
136
+ const patternCounts = /* @__PURE__ */ new Map();
137
+ for (const summary of summaries) {
138
+ for (const pattern of summary.topPatterns) {
139
+ const key = pattern.pattern;
140
+ const existing = patternCounts.get(key);
141
+ if (existing) {
142
+ existing.count += pattern.count;
143
+ existing.appearances++;
144
+ } else {
145
+ patternCounts.set(key, { ...pattern, appearances: 1 });
146
+ }
147
+ }
148
+ }
149
+ const recurringPatterns = Array.from(patternCounts.values()).filter((p) => p.appearances >= 2).sort((a, b) => b.count - a.count).slice(0, 5);
150
+ let improvementTrend = "unknown";
151
+ if (summaries.length >= 2) {
152
+ const recent = summaries.slice(-2);
153
+ const olderCount = recent[0]?.totalIssues || 0;
154
+ const newerCount = recent[1]?.totalIssues || 0;
155
+ if (newerCount < olderCount * 0.8) {
156
+ improvementTrend = "improving";
157
+ } else if (newerCount > olderCount * 1.2) {
158
+ improvementTrend = "declining";
159
+ } else {
160
+ improvementTrend = "stable";
161
+ }
162
+ }
163
+ return {
164
+ totalHistoricalIssues,
165
+ recurringPatterns,
166
+ improvementTrend
167
+ };
168
+ }
169
+
170
+ // src/memory/issue-store.ts
171
+ import { mkdir as mkdir2, writeFile, readFile as readFile2, readdir } from "fs/promises";
172
+ import { createHash } from "crypto";
173
+ import { existsSync as existsSync2 } from "fs";
174
+ import { join as join2 } from "path";
175
+
176
+ // src/memory/bm25.ts
177
+ var BM25Index = class _BM25Index {
178
+ documents = /* @__PURE__ */ new Map();
179
+ termFrequencies = /* @__PURE__ */ new Map();
180
+ documentFrequencies = /* @__PURE__ */ new Map();
181
+ documentLengths = /* @__PURE__ */ new Map();
182
+ avgDocLength = 0;
183
+ k1 = 1.5;
184
+ b = 0.75;
185
+ /**
186
+ * Add a document to the index
187
+ */
188
+ addDocument(doc) {
189
+ const tokens = this.tokenize(doc.text);
190
+ this.documents.set(doc.id, doc);
191
+ this.documentLengths.set(doc.id, tokens.length);
192
+ const termFreq = /* @__PURE__ */ new Map();
193
+ const seenTerms = /* @__PURE__ */ new Set();
194
+ for (const token of tokens) {
195
+ termFreq.set(token, (termFreq.get(token) || 0) + 1);
196
+ if (!seenTerms.has(token)) {
197
+ seenTerms.add(token);
198
+ this.documentFrequencies.set(token, (this.documentFrequencies.get(token) || 0) + 1);
199
+ }
200
+ }
201
+ this.termFrequencies.set(doc.id, termFreq);
202
+ this.updateAvgDocLength();
203
+ }
204
+ /**
205
+ * Add multiple documents
206
+ */
207
+ addDocuments(docs) {
208
+ for (const doc of docs) {
209
+ this.addDocument(doc);
210
+ }
211
+ }
212
+ /**
213
+ * Search the index
214
+ */
215
+ search(query, limit = 10) {
216
+ const queryTokens = this.tokenize(query);
217
+ const scores = /* @__PURE__ */ new Map();
218
+ const N = this.documents.size;
219
+ for (const [docId] of this.documents) {
220
+ let score = 0;
221
+ const docLength = this.documentLengths.get(docId) || 0;
222
+ const termFreqs = this.termFrequencies.get(docId);
223
+ if (!termFreqs) continue;
224
+ for (const term of queryTokens) {
225
+ const tf = termFreqs.get(term) || 0;
226
+ if (tf === 0) continue;
227
+ const df = this.documentFrequencies.get(term) || 0;
228
+ const idf = Math.log((N - df + 0.5) / (df + 0.5) + 1);
229
+ const numerator = tf * (this.k1 + 1);
230
+ const denominator = tf + this.k1 * (1 - this.b + this.b * (docLength / this.avgDocLength));
231
+ score += idf * (numerator / denominator);
232
+ }
233
+ if (score > 0) {
234
+ scores.set(docId, score);
235
+ }
236
+ }
237
+ return Array.from(scores.entries()).sort((a, b) => b[1] - a[1]).slice(0, limit).map(([id, score]) => {
238
+ const metadata = this.documents.get(id)?.metadata;
239
+ const result = { id, score };
240
+ if (metadata !== void 0) {
241
+ result.metadata = metadata;
242
+ }
243
+ return result;
244
+ });
245
+ }
246
+ /**
247
+ * Get document count
248
+ */
249
+ get size() {
250
+ return this.documents.size;
251
+ }
252
+ /**
253
+ * Clear the index
254
+ */
255
+ clear() {
256
+ this.documents.clear();
257
+ this.termFrequencies.clear();
258
+ this.documentFrequencies.clear();
259
+ this.documentLengths.clear();
260
+ this.avgDocLength = 0;
261
+ }
262
+ /**
263
+ * Serialize the index to JSON
264
+ */
265
+ serialize() {
266
+ return JSON.stringify({
267
+ documents: Array.from(this.documents.entries()),
268
+ termFrequencies: Array.from(this.termFrequencies.entries()).map(([k, v]) => [k, Array.from(v.entries())]),
269
+ documentFrequencies: Array.from(this.documentFrequencies.entries()),
270
+ documentLengths: Array.from(this.documentLengths.entries()),
271
+ avgDocLength: this.avgDocLength
272
+ });
273
+ }
274
+ /**
275
+ * Load from serialized JSON
276
+ */
277
+ static deserialize(json) {
278
+ const data = JSON.parse(json);
279
+ const index = new _BM25Index();
280
+ index.documents = new Map(data.documents);
281
+ index.termFrequencies = new Map(data.termFrequencies.map(([k, v]) => [k, new Map(v)]));
282
+ index.documentFrequencies = new Map(data.documentFrequencies);
283
+ index.documentLengths = new Map(data.documentLengths);
284
+ index.avgDocLength = data.avgDocLength;
285
+ return index;
286
+ }
287
+ tokenize(text) {
288
+ return text.toLowerCase().replace(/[^\w\s]/g, " ").split(/\s+/).filter((token) => token.length > 2 && !this.isStopWord(token));
289
+ }
290
+ isStopWord(word) {
291
+ const stopWords = /* @__PURE__ */ new Set([
292
+ "the",
293
+ "be",
294
+ "to",
295
+ "of",
296
+ "and",
297
+ "a",
298
+ "in",
299
+ "that",
300
+ "have",
301
+ "i",
302
+ "it",
303
+ "for",
304
+ "not",
305
+ "on",
306
+ "with",
307
+ "he",
308
+ "as",
309
+ "you",
310
+ "do",
311
+ "at",
312
+ "this",
313
+ "but",
314
+ "his",
315
+ "by",
316
+ "from",
317
+ "they",
318
+ "we",
319
+ "say",
320
+ "her",
321
+ "she",
322
+ "or",
323
+ "an",
324
+ "will",
325
+ "my",
326
+ "one",
327
+ "all",
328
+ "would",
329
+ "there",
330
+ "their",
331
+ "what",
332
+ "so",
333
+ "up",
334
+ "out",
335
+ "if",
336
+ "about",
337
+ "who",
338
+ "get",
339
+ "which",
340
+ "go",
341
+ "me",
342
+ "when",
343
+ "make",
344
+ "can",
345
+ "like",
346
+ "time",
347
+ "no",
348
+ "just",
349
+ "him",
350
+ "know",
351
+ "take",
352
+ "into",
353
+ "year",
354
+ "your",
355
+ "some",
356
+ "could",
357
+ "them",
358
+ "see",
359
+ "other",
360
+ "than",
361
+ "then",
362
+ "now",
363
+ "look",
364
+ "only",
365
+ "come",
366
+ "its",
367
+ "over",
368
+ "also",
369
+ "back",
370
+ "after",
371
+ "use",
372
+ "two",
373
+ "how",
374
+ "our",
375
+ "first",
376
+ "way",
377
+ "even",
378
+ "new",
379
+ "want",
380
+ "because",
381
+ "any",
382
+ "these",
383
+ "give",
384
+ "day",
385
+ "most",
386
+ "us",
387
+ "should",
388
+ "been",
389
+ "has",
390
+ "was",
391
+ "are"
392
+ ]);
393
+ return stopWords.has(word);
394
+ }
395
+ updateAvgDocLength() {
396
+ if (this.documentLengths.size === 0) {
397
+ this.avgDocLength = 0;
398
+ return;
399
+ }
400
+ const total = Array.from(this.documentLengths.values()).reduce((a, b) => a + b, 0);
401
+ this.avgDocLength = total / this.documentLengths.size;
402
+ }
403
+ };
404
+
405
+ // src/memory/issue-store.ts
406
+ async function storeIssues(issues, project, workDir) {
407
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
408
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
409
+ await mkdir2(memoryDir, { recursive: true });
410
+ const stored = [];
411
+ const now = (/* @__PURE__ */ new Date()).toISOString();
412
+ const seenHashes = /* @__PURE__ */ new Set();
413
+ let duplicates = 0;
414
+ for (const issue of issues) {
415
+ const hash = hashIssue(issue);
416
+ if (seenHashes.has(hash)) {
417
+ duplicates++;
418
+ continue;
419
+ }
420
+ seenHashes.add(hash);
421
+ const storedIssue = {
422
+ id: issue.id,
423
+ hash,
424
+ severity: issue.severity,
425
+ issue: issue.issue,
426
+ fix: issue.fix,
427
+ file: issue.file,
428
+ line: issue.line,
429
+ agent: issue.agent,
430
+ category: issue.category,
431
+ timestamp: now,
432
+ project,
433
+ resolved: false,
434
+ resolvedAt: void 0
435
+ };
436
+ stored.push(storedIssue);
437
+ }
438
+ await appendToDailyLog(stored, projectDir);
439
+ await appendIssuesToLedger(stored, projectDir);
440
+ const dedupedCount = await updateIssueIndex(stored, projectDir);
441
+ return { stored: dedupedCount, duplicates: duplicates + (stored.length - dedupedCount) };
442
+ }
443
+ async function searchIssues(query, options = {}) {
444
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
445
+ const limit = options.limit || 10;
446
+ const allIssues = await loadIssueIndex(projectDir);
447
+ if (allIssues.length === 0) {
448
+ return [];
449
+ }
450
+ const filteredIssues = allIssues.filter((issue) => {
451
+ if (options.project && issue.project !== options.project) return false;
452
+ if (options.severity && !options.severity.includes(issue.severity)) return false;
453
+ if (options.agent && issue.agent !== options.agent) return false;
454
+ if (!options.includeResolved && issue.resolved) return false;
455
+ return true;
456
+ });
457
+ if (filteredIssues.length === 0) {
458
+ return [];
459
+ }
460
+ const bm25 = new BM25Index();
461
+ const issueMap = /* @__PURE__ */ new Map();
462
+ for (const issue of filteredIssues) {
463
+ const searchText = `${issue.issue} ${issue.fix} ${issue.file} ${issue.agent} ${issue.category || ""} ${issue.severity}`;
464
+ bm25.addDocument({
465
+ id: issue.id,
466
+ text: searchText
467
+ });
468
+ issueMap.set(issue.id, issue);
469
+ }
470
+ const bm25Results = bm25.search(query, limit);
471
+ return bm25Results.map((result) => ({
472
+ issue: issueMap.get(result.id),
473
+ score: result.score,
474
+ matchType: "bm25"
475
+ }));
476
+ }
477
+ async function findSimilarIssues(issue, options = {}) {
478
+ const query = `${issue.issue} ${issue.fix} ${issue.agent}`;
479
+ const searchOptions = {
480
+ limit: (options.limit || 5) + 5,
481
+ // Get extra to account for filtering
482
+ includeResolved: true
483
+ };
484
+ if (options.workDir !== void 0) {
485
+ searchOptions.workDir = options.workDir;
486
+ }
487
+ const results = await searchIssues(query, searchOptions);
488
+ let filtered = results.filter((r) => r.issue.id !== issue.id);
489
+ if (options.excludeSameFile) {
490
+ filtered = filtered.filter((r) => r.issue.file !== issue.file);
491
+ }
492
+ return filtered.slice(0, options.limit || 5);
493
+ }
494
+ async function markIssueResolved(issueId, workDir) {
495
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
496
+ const index = await loadIssueIndex(projectDir);
497
+ const issue = index.find((i) => i.id === issueId);
498
+ if (!issue) return false;
499
+ issue.resolved = true;
500
+ issue.resolvedAt = (/* @__PURE__ */ new Date()).toISOString();
501
+ await saveIssueIndex(index, projectDir);
502
+ return true;
503
+ }
504
+ async function autoResolveIssues(newIssueHashes, scannedFiles, workDir) {
505
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
506
+ const index = await loadIssueIndex(projectDir);
507
+ const scannedFileSet = new Set(scannedFiles.map((f) => f.replace(/\\/g, "/")));
508
+ const now = (/* @__PURE__ */ new Date()).toISOString();
509
+ let resolvedCount = 0;
510
+ let stillActiveCount = 0;
511
+ for (const issue of index) {
512
+ if (issue.resolved) continue;
513
+ const normalizedFile = issue.file.replace(/\\/g, "/");
514
+ if (!scannedFileSet.has(normalizedFile)) {
515
+ stillActiveCount++;
516
+ continue;
517
+ }
518
+ if (!newIssueHashes.has(issue.hash)) {
519
+ issue.resolved = true;
520
+ issue.resolvedAt = now;
521
+ resolvedCount++;
522
+ } else {
523
+ stillActiveCount++;
524
+ }
525
+ }
526
+ if (resolvedCount > 0) {
527
+ await saveIssueIndex(index, projectDir);
528
+ }
529
+ return { resolved: resolvedCount, stillActive: stillActiveCount };
530
+ }
531
+ function getIssueHash(issue) {
532
+ return hashIssue(issue);
533
+ }
534
+ async function getMemoryStats(workDir) {
535
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
536
+ const index = await loadIssueIndex(projectDir);
537
+ const historical = await getHistoricalInsights(projectDir);
538
+ const MAX_ISSUES = 1e4;
539
+ const uniqueHashes = new Set(index.map((i) => i.hash));
540
+ const stats = {
541
+ totalIssues: index.length,
542
+ activeIssues: 0,
543
+ issuesByAgent: {},
544
+ issuesBySeverity: {},
545
+ activeIssuesBySeverity: {},
546
+ oldestIssue: void 0,
547
+ newestIssue: void 0,
548
+ resolvedCount: 0,
549
+ historicalIssues: historical.totalHistoricalIssues,
550
+ improvementTrend: historical.improvementTrend,
551
+ capacityInfo: {
552
+ current: index.length,
553
+ max: MAX_ISSUES,
554
+ percentFull: Math.round(index.length / MAX_ISSUES * 100),
555
+ isAtCap: index.length >= MAX_ISSUES
556
+ },
557
+ deduplicationStats: {
558
+ duplicatesAvoided: index.length - uniqueHashes.size,
559
+ uniquePatterns: uniqueHashes.size
560
+ }
561
+ };
562
+ for (const issue of index) {
563
+ stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;
564
+ stats.issuesBySeverity[issue.severity] = (stats.issuesBySeverity[issue.severity] || 0) + 1;
565
+ if (issue.resolved) {
566
+ stats.resolvedCount++;
567
+ } else {
568
+ stats.activeIssues++;
569
+ stats.activeIssuesBySeverity[issue.severity] = (stats.activeIssuesBySeverity[issue.severity] || 0) + 1;
570
+ }
571
+ }
572
+ if (index.length > 0) {
573
+ const sorted = [...index].sort(
574
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
575
+ );
576
+ const oldest = sorted[0]?.timestamp;
577
+ const newest = sorted[sorted.length - 1]?.timestamp;
578
+ if (oldest !== void 0) {
579
+ stats.oldestIssue = oldest;
580
+ }
581
+ if (newest !== void 0) {
582
+ stats.newestIssue = newest;
583
+ }
584
+ }
585
+ return stats;
586
+ }
587
+ async function getRecentIssues(options = {}) {
588
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
589
+ const index = await loadIssueIndex(projectDir);
590
+ const limit = options.limit || 20;
591
+ const daysBack = options.daysBack || 7;
592
+ const includeResolved = options.includeResolved ?? false;
593
+ const cutoff = /* @__PURE__ */ new Date();
594
+ cutoff.setDate(cutoff.getDate() - daysBack);
595
+ return index.filter((i) => {
596
+ if (new Date(i.timestamp) < cutoff) return false;
597
+ if (!includeResolved && i.resolved) return false;
598
+ return true;
599
+ }).sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()).slice(0, limit);
600
+ }
601
+ async function purgeIssues(strategy, options = {}) {
602
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
603
+ const index = await loadIssueIndex(projectDir);
604
+ const originalCount = index.length;
605
+ let remaining = [];
606
+ switch (strategy) {
607
+ case "smart":
608
+ const thirtyDaysAgo = /* @__PURE__ */ new Date();
609
+ thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
610
+ remaining = index.filter((i) => {
611
+ const isRecent = new Date(i.timestamp) >= thirtyDaysAgo;
612
+ const isImportant = ["critical", "high"].includes(i.severity);
613
+ const isUnresolved = !i.resolved;
614
+ return isRecent || isImportant || isUnresolved;
615
+ });
616
+ break;
617
+ case "resolved":
618
+ remaining = index.filter((i) => !i.resolved);
619
+ break;
620
+ case "old":
621
+ const daysOld = options.daysOld || 90;
622
+ const cutoffDate = /* @__PURE__ */ new Date();
623
+ cutoffDate.setDate(cutoffDate.getDate() - daysOld);
624
+ remaining = index.filter((i) => new Date(i.timestamp) >= cutoffDate);
625
+ break;
626
+ case "all":
627
+ remaining = [];
628
+ break;
629
+ }
630
+ await saveIssueIndex(remaining, projectDir);
631
+ return {
632
+ removed: originalCount - remaining.length,
633
+ remaining: remaining.length,
634
+ strategy
635
+ };
636
+ }
637
+ async function getDailyLogs(workDir) {
638
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
639
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
640
+ try {
641
+ if (!existsSync2(memoryDir)) return [];
642
+ const files = await readdir(memoryDir);
643
+ return files.filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f)).sort().reverse();
644
+ } catch {
645
+ return [];
646
+ }
647
+ }
648
+ async function appendToDailyLog(issues, projectDir) {
649
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
650
+ const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
651
+ const logPath = join2(memoryDir, `${today}.md`);
652
+ let content = "";
653
+ try {
654
+ if (existsSync2(logPath)) {
655
+ content = await readFile2(logPath, "utf-8");
656
+ } else {
657
+ content = `# Issue Log: ${today}
658
+
659
+ `;
660
+ }
661
+ } catch {
662
+ content = `# Issue Log: ${today}
663
+
664
+ `;
665
+ }
666
+ const time = (/* @__PURE__ */ new Date()).toTimeString().split(" ")[0];
667
+ const newEntries = issues.map(
668
+ (i) => `## [${time}] ${i.severity.toUpperCase()}: ${i.issue.slice(0, 80)}${i.issue.length > 80 ? "..." : ""}
669
+ - **File:** \`${i.file}\`${i.line ? `:${i.line}` : ""}
670
+ - **Agent:** ${i.agent}
671
+ - **Fix:** ${i.fix.slice(0, 200)}${i.fix.length > 200 ? "..." : ""}
672
+ `
673
+ ).join("\n");
674
+ content += newEntries + "\n";
675
+ await writeFile(logPath, content);
676
+ }
677
+ async function loadIssueIndex(projectDir) {
678
+ const indexPath = join2(getTrieDirectory(projectDir), "memory", "issues.json");
679
+ try {
680
+ if (existsSync2(indexPath)) {
681
+ const content = await readFile2(indexPath, "utf-8");
682
+ const result = safeParseAndValidate(content, IssueIndexSchema);
683
+ if (result.success) {
684
+ return result.data;
685
+ }
686
+ console.error(` Issue index corrupted: ${result.error}`);
687
+ const backupManager = new BackupManager(indexPath);
688
+ if (await backupManager.recoverFromBackup()) {
689
+ console.error(" \u2705 Recovered from backup");
690
+ const recovered = await readFile2(indexPath, "utf-8");
691
+ const recoveredResult = safeParseAndValidate(recovered, IssueIndexSchema);
692
+ if (recoveredResult.success) {
693
+ return recoveredResult.data;
694
+ }
695
+ }
696
+ console.error(" No valid backup found, starting fresh");
697
+ }
698
+ } catch {
699
+ }
700
+ return [];
701
+ }
702
+ async function updateIssueIndex(newIssues, projectDir) {
703
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
704
+ await mkdir2(memoryDir, { recursive: true });
705
+ let existing = await loadIssueIndex(projectDir);
706
+ const hashSet = new Set(existing.map((i) => i.hash));
707
+ const toAdd = newIssues.filter((i) => !hashSet.has(i.hash));
708
+ const dedupedCount = toAdd.length;
709
+ existing = [...existing, ...toAdd];
710
+ if (existing.length > 500) {
711
+ const { summary, remaining } = await compactOldIssues(existing, {
712
+ keepDays: 30,
713
+ minIssuesToCompact: 100
714
+ });
715
+ if (summary) {
716
+ await saveCompactedSummary(summary, projectDir);
717
+ existing = remaining;
718
+ }
719
+ }
720
+ if (existing.length > 1e4) {
721
+ existing = intelligentPrune(existing, 1e4);
722
+ }
723
+ await saveIssueIndex(existing, projectDir);
724
+ return dedupedCount;
725
+ }
726
+ function intelligentPrune(issues, targetCount) {
727
+ const severityWeight = {
728
+ critical: 100,
729
+ high: 50,
730
+ moderate: 20,
731
+ low: 10,
732
+ info: 5
733
+ };
734
+ const scored = issues.map((issue) => {
735
+ const ageInDays = (Date.now() - new Date(issue.timestamp).getTime()) / (1e3 * 60 * 60 * 24);
736
+ const recencyScore = Math.max(0, 100 - ageInDays * 2);
737
+ const severityScore = severityWeight[issue.severity] || 10;
738
+ const resolvedPenalty = issue.resolved ? -50 : 0;
739
+ return {
740
+ issue,
741
+ score: recencyScore + severityScore + resolvedPenalty
742
+ };
743
+ });
744
+ return scored.sort((a, b) => b.score - a.score).slice(0, targetCount).map((s) => s.issue);
745
+ }
746
+ async function saveIssueIndex(issues, projectDir) {
747
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
748
+ await mkdir2(memoryDir, { recursive: true });
749
+ const indexPath = join2(memoryDir, "issues.json");
750
+ const backupManager = new BackupManager(indexPath);
751
+ await backupManager.createBackup();
752
+ await atomicWriteJSON(indexPath, issues);
753
+ }
754
+ function hashIssue(issue) {
755
+ const content = `${issue.issue}|${issue.file}|${issue.severity}|${issue.agent}`;
756
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
757
+ }
758
+
759
+ export {
760
+ getHistoricalInsights,
761
+ storeIssues,
762
+ searchIssues,
763
+ findSimilarIssues,
764
+ markIssueResolved,
765
+ autoResolveIssues,
766
+ getIssueHash,
767
+ getMemoryStats,
768
+ getRecentIssues,
769
+ purgeIssues,
770
+ getDailyLogs
771
+ };
772
+ //# sourceMappingURL=chunk-55DOQNHJ.js.map