@triedotdev/mcp 1.0.62 → 1.0.64

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +591 -52
  2. package/dist/agent-smith-W4HUCFGC.js +14 -0
  3. package/dist/{agent-smith-runner-ZU4R3I2Z.js → agent-smith-runner-QRVOEOBE.js} +13 -7
  4. package/dist/agent-smith-runner-QRVOEOBE.js.map +1 -0
  5. package/dist/chunk-4YSLDGBL.js +674 -0
  6. package/dist/chunk-4YSLDGBL.js.map +1 -0
  7. package/dist/chunk-7KHT2NKR.js +212 -0
  8. package/dist/chunk-7KHT2NKR.js.map +1 -0
  9. package/dist/{chunk-XSPS463E.js → chunk-ALA6733H.js} +492 -14
  10. package/dist/chunk-ALA6733H.js.map +1 -0
  11. package/dist/chunk-AQCAMIQQ.js +139 -0
  12. package/dist/chunk-AQCAMIQQ.js.map +1 -0
  13. package/dist/chunk-D3DMONAJ.js +904 -0
  14. package/dist/chunk-D3DMONAJ.js.map +1 -0
  15. package/dist/{chunk-KB5ZN6K2.js → chunk-GWSNINKX.js} +2 -2
  16. package/dist/{chunk-32WLOG6E.js → chunk-K6BQBKIR.js} +662 -633
  17. package/dist/chunk-K6BQBKIR.js.map +1 -0
  18. package/dist/{chunk-ASGSTVVF.js → chunk-KOFQ47YW.js} +10 -6
  19. package/dist/chunk-KOFQ47YW.js.map +1 -0
  20. package/dist/{chunk-XXNE6HBE.js → chunk-N2AZH3EQ.js} +7697 -4803
  21. package/dist/chunk-N2AZH3EQ.js.map +1 -0
  22. package/dist/chunk-PBOVCPKE.js +2566 -0
  23. package/dist/chunk-PBOVCPKE.js.map +1 -0
  24. package/dist/{chunk-NUT4G5AY.js → chunk-R7Z7OHTJ.js} +493 -650
  25. package/dist/chunk-R7Z7OHTJ.js.map +1 -0
  26. package/dist/chunk-TSHZQKCM.js +933 -0
  27. package/dist/chunk-TSHZQKCM.js.map +1 -0
  28. package/dist/{chunk-S4VGGLXF.js → chunk-X2PABPBH.js} +461 -892
  29. package/dist/chunk-X2PABPBH.js.map +1 -0
  30. package/dist/cli/create-agent.js +3 -2
  31. package/dist/cli/create-agent.js.map +1 -1
  32. package/dist/cli/main.js +1120 -70
  33. package/dist/cli/main.js.map +1 -1
  34. package/dist/cli/yolo-daemon.js +151 -41
  35. package/dist/cli/yolo-daemon.js.map +1 -1
  36. package/dist/goal-manager-KFBOAP4X.js +20 -0
  37. package/dist/goal-manager-KFBOAP4X.js.map +1 -0
  38. package/dist/guardian-agent-PULK546O.js +17 -0
  39. package/dist/guardian-agent-PULK546O.js.map +1 -0
  40. package/dist/index.js +329 -74
  41. package/dist/index.js.map +1 -1
  42. package/dist/issue-store-QRDF3X55.js +22 -0
  43. package/dist/issue-store-QRDF3X55.js.map +1 -0
  44. package/dist/workers/agent-worker.js +6 -3
  45. package/dist/workers/agent-worker.js.map +1 -1
  46. package/package.json +1 -1
  47. package/dist/agent-smith-57MKX5QC.js +0 -13
  48. package/dist/agent-smith-runner-ZU4R3I2Z.js.map +0 -1
  49. package/dist/chunk-32WLOG6E.js.map +0 -1
  50. package/dist/chunk-ASGSTVVF.js.map +0 -1
  51. package/dist/chunk-NUT4G5AY.js.map +0 -1
  52. package/dist/chunk-S4VGGLXF.js.map +0 -1
  53. package/dist/chunk-XSPS463E.js.map +0 -1
  54. package/dist/chunk-XXNE6HBE.js.map +0 -1
  55. /package/dist/{agent-smith-57MKX5QC.js.map → agent-smith-W4HUCFGC.js.map} +0 -0
  56. /package/dist/{chunk-KB5ZN6K2.js.map → chunk-GWSNINKX.js.map} +0 -0
@@ -0,0 +1,933 @@
1
+ import {
2
+ getWorkingDirectory
3
+ } from "./chunk-KOFQ47YW.js";
4
+
5
+ // src/memory/issue-store.ts
6
+ import { mkdir as mkdir3, writeFile as writeFile2, readFile as readFile3, readdir as readdir2 } from "fs/promises";
7
+ import { createHash } from "crypto";
8
+ import { existsSync as existsSync3 } from "fs";
9
+ import { join as join3 } from "path";
10
+
11
+ // src/memory/bm25.ts
12
+ var BM25Index = class _BM25Index {
13
+ documents = /* @__PURE__ */ new Map();
14
+ termFrequencies = /* @__PURE__ */ new Map();
15
+ documentFrequencies = /* @__PURE__ */ new Map();
16
+ documentLengths = /* @__PURE__ */ new Map();
17
+ avgDocLength = 0;
18
+ k1 = 1.5;
19
+ b = 0.75;
20
+ /**
21
+ * Add a document to the index
22
+ */
23
+ addDocument(doc) {
24
+ const tokens = this.tokenize(doc.text);
25
+ this.documents.set(doc.id, doc);
26
+ this.documentLengths.set(doc.id, tokens.length);
27
+ const termFreq = /* @__PURE__ */ new Map();
28
+ const seenTerms = /* @__PURE__ */ new Set();
29
+ for (const token of tokens) {
30
+ termFreq.set(token, (termFreq.get(token) || 0) + 1);
31
+ if (!seenTerms.has(token)) {
32
+ seenTerms.add(token);
33
+ this.documentFrequencies.set(token, (this.documentFrequencies.get(token) || 0) + 1);
34
+ }
35
+ }
36
+ this.termFrequencies.set(doc.id, termFreq);
37
+ this.updateAvgDocLength();
38
+ }
39
+ /**
40
+ * Add multiple documents
41
+ */
42
+ addDocuments(docs) {
43
+ for (const doc of docs) {
44
+ this.addDocument(doc);
45
+ }
46
+ }
47
+ /**
48
+ * Search the index
49
+ */
50
+ search(query, limit = 10) {
51
+ const queryTokens = this.tokenize(query);
52
+ const scores = /* @__PURE__ */ new Map();
53
+ const N = this.documents.size;
54
+ for (const [docId] of this.documents) {
55
+ let score = 0;
56
+ const docLength = this.documentLengths.get(docId) || 0;
57
+ const termFreqs = this.termFrequencies.get(docId);
58
+ if (!termFreqs) continue;
59
+ for (const term of queryTokens) {
60
+ const tf = termFreqs.get(term) || 0;
61
+ if (tf === 0) continue;
62
+ const df = this.documentFrequencies.get(term) || 0;
63
+ const idf = Math.log((N - df + 0.5) / (df + 0.5) + 1);
64
+ const numerator = tf * (this.k1 + 1);
65
+ const denominator = tf + this.k1 * (1 - this.b + this.b * (docLength / this.avgDocLength));
66
+ score += idf * (numerator / denominator);
67
+ }
68
+ if (score > 0) {
69
+ scores.set(docId, score);
70
+ }
71
+ }
72
+ return Array.from(scores.entries()).sort((a, b) => b[1] - a[1]).slice(0, limit).map(([id, score]) => {
73
+ const metadata = this.documents.get(id)?.metadata;
74
+ const result = { id, score };
75
+ if (metadata !== void 0) {
76
+ result.metadata = metadata;
77
+ }
78
+ return result;
79
+ });
80
+ }
81
+ /**
82
+ * Get document count
83
+ */
84
+ get size() {
85
+ return this.documents.size;
86
+ }
87
+ /**
88
+ * Clear the index
89
+ */
90
+ clear() {
91
+ this.documents.clear();
92
+ this.termFrequencies.clear();
93
+ this.documentFrequencies.clear();
94
+ this.documentLengths.clear();
95
+ this.avgDocLength = 0;
96
+ }
97
+ /**
98
+ * Serialize the index to JSON
99
+ */
100
+ serialize() {
101
+ return JSON.stringify({
102
+ documents: Array.from(this.documents.entries()),
103
+ termFrequencies: Array.from(this.termFrequencies.entries()).map(([k, v]) => [k, Array.from(v.entries())]),
104
+ documentFrequencies: Array.from(this.documentFrequencies.entries()),
105
+ documentLengths: Array.from(this.documentLengths.entries()),
106
+ avgDocLength: this.avgDocLength
107
+ });
108
+ }
109
+ /**
110
+ * Load from serialized JSON
111
+ */
112
+ static deserialize(json) {
113
+ const data = JSON.parse(json);
114
+ const index = new _BM25Index();
115
+ index.documents = new Map(data.documents);
116
+ index.termFrequencies = new Map(data.termFrequencies.map(([k, v]) => [k, new Map(v)]));
117
+ index.documentFrequencies = new Map(data.documentFrequencies);
118
+ index.documentLengths = new Map(data.documentLengths);
119
+ index.avgDocLength = data.avgDocLength;
120
+ return index;
121
+ }
122
+ tokenize(text) {
123
+ return text.toLowerCase().replace(/[^\w\s]/g, " ").split(/\s+/).filter((token) => token.length > 2 && !this.isStopWord(token));
124
+ }
125
+ isStopWord(word) {
126
+ const stopWords = /* @__PURE__ */ new Set([
127
+ "the",
128
+ "be",
129
+ "to",
130
+ "of",
131
+ "and",
132
+ "a",
133
+ "in",
134
+ "that",
135
+ "have",
136
+ "i",
137
+ "it",
138
+ "for",
139
+ "not",
140
+ "on",
141
+ "with",
142
+ "he",
143
+ "as",
144
+ "you",
145
+ "do",
146
+ "at",
147
+ "this",
148
+ "but",
149
+ "his",
150
+ "by",
151
+ "from",
152
+ "they",
153
+ "we",
154
+ "say",
155
+ "her",
156
+ "she",
157
+ "or",
158
+ "an",
159
+ "will",
160
+ "my",
161
+ "one",
162
+ "all",
163
+ "would",
164
+ "there",
165
+ "their",
166
+ "what",
167
+ "so",
168
+ "up",
169
+ "out",
170
+ "if",
171
+ "about",
172
+ "who",
173
+ "get",
174
+ "which",
175
+ "go",
176
+ "me",
177
+ "when",
178
+ "make",
179
+ "can",
180
+ "like",
181
+ "time",
182
+ "no",
183
+ "just",
184
+ "him",
185
+ "know",
186
+ "take",
187
+ "into",
188
+ "year",
189
+ "your",
190
+ "some",
191
+ "could",
192
+ "them",
193
+ "see",
194
+ "other",
195
+ "than",
196
+ "then",
197
+ "now",
198
+ "look",
199
+ "only",
200
+ "come",
201
+ "its",
202
+ "over",
203
+ "also",
204
+ "back",
205
+ "after",
206
+ "use",
207
+ "two",
208
+ "how",
209
+ "our",
210
+ "first",
211
+ "way",
212
+ "even",
213
+ "new",
214
+ "want",
215
+ "because",
216
+ "any",
217
+ "these",
218
+ "give",
219
+ "day",
220
+ "most",
221
+ "us",
222
+ "should",
223
+ "been",
224
+ "has",
225
+ "was",
226
+ "are"
227
+ ]);
228
+ return stopWords.has(word);
229
+ }
230
+ updateAvgDocLength() {
231
+ if (this.documentLengths.size === 0) {
232
+ this.avgDocLength = 0;
233
+ return;
234
+ }
235
+ const total = Array.from(this.documentLengths.values()).reduce((a, b) => a + b, 0);
236
+ this.avgDocLength = total / this.documentLengths.size;
237
+ }
238
+ };
239
+
240
+ // src/memory/compactor.ts
241
+ import { mkdir as mkdir2, readFile as readFile2 } from "fs/promises";
242
+ import { existsSync as existsSync2 } from "fs";
243
+ import { join as join2 } from "path";
244
+
245
+ // src/utils/atomic-write.ts
246
+ import { writeFile, rename, unlink, mkdir } from "fs/promises";
247
+ import { randomBytes } from "crypto";
248
+ import { dirname } from "path";
249
+ async function atomicWriteFile(filePath, data, options = {}) {
250
+ const { createDir = true, encoding = "utf-8" } = options;
251
+ const tempSuffix = randomBytes(6).toString("hex");
252
+ const tempPath = `${filePath}.${tempSuffix}.tmp`;
253
+ try {
254
+ if (createDir) {
255
+ await mkdir(dirname(filePath), { recursive: true });
256
+ }
257
+ if (typeof data === "string") {
258
+ await writeFile(tempPath, data, { encoding });
259
+ } else {
260
+ await writeFile(tempPath, data);
261
+ }
262
+ await rename(tempPath, filePath);
263
+ } catch (error) {
264
+ try {
265
+ await unlink(tempPath);
266
+ } catch {
267
+ }
268
+ throw error;
269
+ }
270
+ }
271
+ async function atomicWriteJSON(filePath, data, options = {}) {
272
+ const { spaces = 2, ...writeOptions } = options;
273
+ const json = JSON.stringify(data, null, spaces);
274
+ await atomicWriteFile(filePath, json, writeOptions);
275
+ }
276
+
277
+ // src/utils/backup-manager.ts
278
+ import { copyFile, readdir, unlink as unlink2, readFile, stat } from "fs/promises";
279
+ import { existsSync } from "fs";
280
+ import { dirname as dirname2, basename, join } from "path";
281
+ var BackupManager = class {
282
+ filePath;
283
+ maxBackups;
284
+ validator;
285
+ backupDir;
286
+ baseFileName;
287
+ constructor(filePath, options = {}) {
288
+ this.filePath = filePath;
289
+ this.maxBackups = options.maxBackups ?? 5;
290
+ this.validator = options.validator;
291
+ this.backupDir = dirname2(filePath);
292
+ this.baseFileName = basename(filePath);
293
+ }
294
+ /**
295
+ * Create a backup of the current file
296
+ *
297
+ * @returns The backup file path, or null if source doesn't exist
298
+ */
299
+ async createBackup() {
300
+ if (!existsSync(this.filePath)) {
301
+ return null;
302
+ }
303
+ const timestamp = Date.now();
304
+ const backupPath = this.getBackupPath(timestamp);
305
+ await copyFile(this.filePath, backupPath);
306
+ await this.pruneOldBackups();
307
+ return backupPath;
308
+ }
309
+ /**
310
+ * List all backups sorted by timestamp (newest first)
311
+ */
312
+ async listBackups() {
313
+ if (!existsSync(this.backupDir)) {
314
+ return [];
315
+ }
316
+ const files = await readdir(this.backupDir);
317
+ const backupPattern = new RegExp(
318
+ `^${this.escapeRegex(this.baseFileName)}\\.backup\\.(\\d+)$`
319
+ );
320
+ const backups = [];
321
+ for (const file of files) {
322
+ const match = file.match(backupPattern);
323
+ if (match) {
324
+ const timestamp = parseInt(match[1], 10);
325
+ const backupPath = join(this.backupDir, file);
326
+ try {
327
+ const stats = await stat(backupPath);
328
+ backups.push({
329
+ path: backupPath,
330
+ timestamp,
331
+ size: stats.size
332
+ });
333
+ } catch {
334
+ }
335
+ }
336
+ }
337
+ return backups.sort((a, b) => b.timestamp - a.timestamp);
338
+ }
339
+ /**
340
+ * Find the first valid backup
341
+ *
342
+ * Iterates through backups from newest to oldest,
343
+ * returning the first one that passes validation.
344
+ *
345
+ * @returns Path to valid backup, or null if none found
346
+ */
347
+ async findValidBackup() {
348
+ const backups = await this.listBackups();
349
+ for (const backup of backups) {
350
+ if (await this.validateBackup(backup.path)) {
351
+ return backup.path;
352
+ }
353
+ }
354
+ return null;
355
+ }
356
+ /**
357
+ * Validate a backup file
358
+ *
359
+ * If a custom validator was provided, uses that.
360
+ * Otherwise, attempts JSON parse for .json files.
361
+ *
362
+ * @returns true if backup is valid
363
+ */
364
+ async validateBackup(backupPath) {
365
+ try {
366
+ const content = await readFile(backupPath, "utf-8");
367
+ if (this.validator) {
368
+ return this.validator(content);
369
+ }
370
+ if (this.filePath.endsWith(".json")) {
371
+ JSON.parse(content);
372
+ return true;
373
+ }
374
+ return content.length > 0;
375
+ } catch {
376
+ return false;
377
+ }
378
+ }
379
+ /**
380
+ * Recover from the most recent valid backup
381
+ *
382
+ * @returns true if recovery was successful
383
+ */
384
+ async recoverFromBackup() {
385
+ const validBackup = await this.findValidBackup();
386
+ if (!validBackup) {
387
+ return false;
388
+ }
389
+ await copyFile(validBackup, this.filePath);
390
+ return true;
391
+ }
392
+ /**
393
+ * Remove old backups beyond the max limit
394
+ */
395
+ async pruneOldBackups() {
396
+ const backups = await this.listBackups();
397
+ const toRemove = backups.slice(this.maxBackups);
398
+ for (const backup of toRemove) {
399
+ try {
400
+ await unlink2(backup.path);
401
+ } catch {
402
+ }
403
+ }
404
+ }
405
+ /**
406
+ * Get the backup path for a given timestamp
407
+ */
408
+ getBackupPath(timestamp) {
409
+ return join(this.backupDir, `${this.baseFileName}.backup.${timestamp}`);
410
+ }
411
+ /**
412
+ * Escape special regex characters in a string
413
+ */
414
+ escapeRegex(str) {
415
+ return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
416
+ }
417
+ /**
418
+ * Get the number of existing backups
419
+ */
420
+ async getBackupCount() {
421
+ const backups = await this.listBackups();
422
+ return backups.length;
423
+ }
424
+ /**
425
+ * Get the most recent backup timestamp
426
+ */
427
+ async getLatestBackupTime() {
428
+ const backups = await this.listBackups();
429
+ return backups[0]?.timestamp ?? null;
430
+ }
431
+ /**
432
+ * Delete all backups
433
+ */
434
+ async clearBackups() {
435
+ const backups = await this.listBackups();
436
+ let deleted = 0;
437
+ for (const backup of backups) {
438
+ try {
439
+ await unlink2(backup.path);
440
+ deleted++;
441
+ } catch {
442
+ }
443
+ }
444
+ return deleted;
445
+ }
446
+ };
447
+
448
+ // src/memory/validation.ts
449
+ import { z } from "zod";
450
+ var StoredIssueSchema = z.object({
451
+ id: z.string(),
452
+ hash: z.string(),
453
+ severity: z.string(),
454
+ issue: z.string(),
455
+ fix: z.string(),
456
+ file: z.string(),
457
+ line: z.number().optional(),
458
+ agent: z.string(),
459
+ category: z.string().optional(),
460
+ timestamp: z.string(),
461
+ project: z.string(),
462
+ resolved: z.boolean().optional(),
463
+ resolvedAt: z.string().optional()
464
+ });
465
+ var IssueIndexSchema = z.array(StoredIssueSchema);
466
+ var FixAppliedSchema = z.object({
467
+ project: z.string(),
468
+ timestamp: z.string(),
469
+ fix: z.string()
470
+ });
471
+ var GlobalPatternSchema = z.object({
472
+ id: z.string(),
473
+ pattern: z.string(),
474
+ description: z.string(),
475
+ severity: z.string(),
476
+ agent: z.string(),
477
+ occurrences: z.number(),
478
+ projects: z.array(z.string()),
479
+ firstSeen: z.string(),
480
+ lastSeen: z.string(),
481
+ fixApplied: FixAppliedSchema.optional()
482
+ });
483
+ var GlobalPatternsIndexSchema = z.array(GlobalPatternSchema);
484
+ var ProjectSummarySchema = z.object({
485
+ name: z.string(),
486
+ path: z.string(),
487
+ lastScan: z.string(),
488
+ healthScore: z.number(),
489
+ totalIssues: z.number(),
490
+ patterns: z.array(z.string())
491
+ });
492
+ var PatternSummarySchema = z.object({
493
+ pattern: z.string(),
494
+ count: z.number(),
495
+ severity: z.string(),
496
+ agent: z.string(),
497
+ exampleFix: z.string()
498
+ });
499
+ var HotFileSchema = z.object({
500
+ file: z.string(),
501
+ count: z.number()
502
+ });
503
+ var CompactedSummarySchema = z.object({
504
+ period: z.string(),
505
+ startDate: z.string(),
506
+ endDate: z.string(),
507
+ totalIssues: z.number(),
508
+ resolvedCount: z.number(),
509
+ bySeverity: z.record(z.string(), z.number()),
510
+ byAgent: z.record(z.string(), z.number()),
511
+ topPatterns: z.array(PatternSummarySchema),
512
+ hotFiles: z.array(HotFileSchema),
513
+ compactedAt: z.string()
514
+ });
515
+ var CompactedSummariesIndexSchema = z.array(CompactedSummarySchema);
516
+ function safeParseAndValidate(content, schema) {
517
+ try {
518
+ const parsed = JSON.parse(content);
519
+ const result = schema.safeParse(parsed);
520
+ if (result.success) {
521
+ return { success: true, data: result.data };
522
+ }
523
+ return {
524
+ success: false,
525
+ error: `Validation failed: ${result.error.issues[0]?.message || "Unknown error"}`
526
+ };
527
+ } catch (error) {
528
+ return {
529
+ success: false,
530
+ error: `JSON parse failed: ${error instanceof Error ? error.message : "Unknown error"}`
531
+ };
532
+ }
533
+ }
534
+
535
+ // src/memory/compactor.ts
536
+ async function compactOldIssues(issues, options = {}) {
537
+ const keepDays = options.keepDays ?? 30;
538
+ const minIssues = options.minIssuesToCompact ?? 100;
539
+ const cutoffDate = /* @__PURE__ */ new Date();
540
+ cutoffDate.setDate(cutoffDate.getDate() - keepDays);
541
+ const oldIssues = issues.filter((i) => new Date(i.timestamp) < cutoffDate);
542
+ const recentIssues = issues.filter((i) => new Date(i.timestamp) >= cutoffDate);
543
+ if (oldIssues.length < minIssues) {
544
+ return { summary: null, remaining: issues };
545
+ }
546
+ const summary = buildSummary(oldIssues);
547
+ return { summary, remaining: recentIssues };
548
+ }
549
+ function buildSummary(issues) {
550
+ const sorted = issues.sort(
551
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
552
+ );
553
+ const bySeverity = {};
554
+ const byAgent = {};
555
+ const patternMap = /* @__PURE__ */ new Map();
556
+ const fileCount = /* @__PURE__ */ new Map();
557
+ for (const issue of issues) {
558
+ bySeverity[issue.severity] = (bySeverity[issue.severity] || 0) + 1;
559
+ byAgent[issue.agent] = (byAgent[issue.agent] || 0) + 1;
560
+ const patternKey = normalizePattern(issue.issue);
561
+ const existing = patternMap.get(patternKey);
562
+ if (existing) {
563
+ existing.count++;
564
+ } else {
565
+ patternMap.set(patternKey, { count: 1, issue });
566
+ }
567
+ const fileName = issue.file.split("/").pop() || issue.file;
568
+ fileCount.set(fileName, (fileCount.get(fileName) || 0) + 1);
569
+ }
570
+ const topPatterns = Array.from(patternMap.entries()).sort((a, b) => b[1].count - a[1].count).slice(0, 10).map(([pattern, data]) => ({
571
+ pattern: pattern.slice(0, 100),
572
+ count: data.count,
573
+ severity: data.issue.severity,
574
+ agent: data.issue.agent,
575
+ exampleFix: data.issue.fix.slice(0, 200)
576
+ }));
577
+ const hotFiles = Array.from(fileCount.entries()).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([file, count]) => ({ file, count }));
578
+ return {
579
+ period: `${sorted[0]?.timestamp.split("T")[0]} to ${sorted[sorted.length - 1]?.timestamp.split("T")[0]}`,
580
+ startDate: sorted[0]?.timestamp || "",
581
+ endDate: sorted[sorted.length - 1]?.timestamp || "",
582
+ totalIssues: issues.length,
583
+ resolvedCount: issues.filter((i) => i.resolved).length,
584
+ bySeverity,
585
+ byAgent,
586
+ topPatterns,
587
+ hotFiles,
588
+ compactedAt: (/* @__PURE__ */ new Date()).toISOString()
589
+ };
590
+ }
591
+ function normalizePattern(text) {
592
+ return text.toLowerCase().replace(/`[^`]+`/g, "CODE").replace(/\b\d+\b/g, "N").replace(/["']/g, "").replace(/\s+/g, " ").trim().slice(0, 150);
593
+ }
594
+ async function saveCompactedSummary(summary, projectDir) {
595
+ const memoryDir = join2(projectDir, ".trie", "memory");
596
+ await mkdir2(memoryDir, { recursive: true });
597
+ const summaryPath = join2(memoryDir, "compacted-summaries.json");
598
+ let summaries = [];
599
+ try {
600
+ if (existsSync2(summaryPath)) {
601
+ const content = await readFile2(summaryPath, "utf-8");
602
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
603
+ if (result.success) {
604
+ summaries = result.data;
605
+ }
606
+ }
607
+ } catch {
608
+ summaries = [];
609
+ }
610
+ summaries.push(summary);
611
+ if (summaries.length > 12) {
612
+ summaries = summaries.slice(-12);
613
+ }
614
+ const backupManager = new BackupManager(summaryPath);
615
+ await backupManager.createBackup();
616
+ await atomicWriteJSON(summaryPath, summaries);
617
+ }
618
+ async function loadCompactedSummaries(projectDir) {
619
+ const summaryPath = join2(projectDir, ".trie", "memory", "compacted-summaries.json");
620
+ try {
621
+ if (existsSync2(summaryPath)) {
622
+ const content = await readFile2(summaryPath, "utf-8");
623
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
624
+ if (result.success) {
625
+ return result.data;
626
+ }
627
+ const backupManager = new BackupManager(summaryPath);
628
+ if (await backupManager.recoverFromBackup()) {
629
+ const recovered = await readFile2(summaryPath, "utf-8");
630
+ const recoveredResult = safeParseAndValidate(recovered, CompactedSummariesIndexSchema);
631
+ if (recoveredResult.success) {
632
+ return recoveredResult.data;
633
+ }
634
+ }
635
+ }
636
+ } catch {
637
+ }
638
+ return [];
639
+ }
640
+ async function getHistoricalInsights(projectDir) {
641
+ const summaries = await loadCompactedSummaries(projectDir);
642
+ if (summaries.length === 0) {
643
+ return {
644
+ totalHistoricalIssues: 0,
645
+ recurringPatterns: [],
646
+ improvementTrend: "unknown"
647
+ };
648
+ }
649
+ const totalHistoricalIssues = summaries.reduce((sum, s) => sum + s.totalIssues, 0);
650
+ const patternCounts = /* @__PURE__ */ new Map();
651
+ for (const summary of summaries) {
652
+ for (const pattern of summary.topPatterns) {
653
+ const key = pattern.pattern;
654
+ const existing = patternCounts.get(key);
655
+ if (existing) {
656
+ existing.count += pattern.count;
657
+ existing.appearances++;
658
+ } else {
659
+ patternCounts.set(key, { ...pattern, appearances: 1 });
660
+ }
661
+ }
662
+ }
663
+ const recurringPatterns = Array.from(patternCounts.values()).filter((p) => p.appearances >= 2).sort((a, b) => b.count - a.count).slice(0, 5);
664
+ let improvementTrend = "unknown";
665
+ if (summaries.length >= 2) {
666
+ const recent = summaries.slice(-2);
667
+ const olderCount = recent[0]?.totalIssues || 0;
668
+ const newerCount = recent[1]?.totalIssues || 0;
669
+ if (newerCount < olderCount * 0.8) {
670
+ improvementTrend = "improving";
671
+ } else if (newerCount > olderCount * 1.2) {
672
+ improvementTrend = "declining";
673
+ } else {
674
+ improvementTrend = "stable";
675
+ }
676
+ }
677
+ return {
678
+ totalHistoricalIssues,
679
+ recurringPatterns,
680
+ improvementTrend
681
+ };
682
+ }
683
+
684
+ // src/memory/issue-store.ts
685
+ async function storeIssues(issues, project, workDir) {
686
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
687
+ const memoryDir = join3(projectDir, ".trie", "memory");
688
+ await mkdir3(memoryDir, { recursive: true });
689
+ const stored = [];
690
+ const now = (/* @__PURE__ */ new Date()).toISOString();
691
+ for (const issue of issues) {
692
+ const hash = hashIssue(issue);
693
+ const storedIssue = {
694
+ id: issue.id,
695
+ hash,
696
+ severity: issue.severity,
697
+ issue: issue.issue,
698
+ fix: issue.fix,
699
+ file: issue.file,
700
+ line: issue.line,
701
+ agent: issue.agent,
702
+ category: issue.category,
703
+ timestamp: now,
704
+ project,
705
+ resolved: false,
706
+ resolvedAt: void 0
707
+ };
708
+ stored.push(storedIssue);
709
+ }
710
+ await appendToDailyLog(stored, projectDir);
711
+ await updateIssueIndex(stored, projectDir);
712
+ return stored.length;
713
+ }
714
+ async function searchIssues(query, options = {}) {
715
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
716
+ const limit = options.limit || 10;
717
+ const allIssues = await loadIssueIndex(projectDir);
718
+ if (allIssues.length === 0) {
719
+ return [];
720
+ }
721
+ const filteredIssues = allIssues.filter((issue) => {
722
+ if (options.project && issue.project !== options.project) return false;
723
+ if (options.severity && !options.severity.includes(issue.severity)) return false;
724
+ if (options.agent && issue.agent !== options.agent) return false;
725
+ if (!options.includeResolved && issue.resolved) return false;
726
+ return true;
727
+ });
728
+ if (filteredIssues.length === 0) {
729
+ return [];
730
+ }
731
+ const bm25 = new BM25Index();
732
+ const issueMap = /* @__PURE__ */ new Map();
733
+ for (const issue of filteredIssues) {
734
+ const searchText = `${issue.issue} ${issue.fix} ${issue.file} ${issue.agent} ${issue.category || ""} ${issue.severity}`;
735
+ bm25.addDocument({
736
+ id: issue.id,
737
+ text: searchText
738
+ });
739
+ issueMap.set(issue.id, issue);
740
+ }
741
+ const bm25Results = bm25.search(query, limit);
742
+ return bm25Results.map((result) => ({
743
+ issue: issueMap.get(result.id),
744
+ score: result.score,
745
+ matchType: "bm25"
746
+ }));
747
+ }
748
+ async function findSimilarIssues(issue, options = {}) {
749
+ const query = `${issue.issue} ${issue.fix} ${issue.agent}`;
750
+ const searchOptions = {
751
+ limit: (options.limit || 5) + 5,
752
+ // Get extra to account for filtering
753
+ includeResolved: true
754
+ };
755
+ if (options.workDir !== void 0) {
756
+ searchOptions.workDir = options.workDir;
757
+ }
758
+ const results = await searchIssues(query, searchOptions);
759
+ let filtered = results.filter((r) => r.issue.id !== issue.id);
760
+ if (options.excludeSameFile) {
761
+ filtered = filtered.filter((r) => r.issue.file !== issue.file);
762
+ }
763
+ return filtered.slice(0, options.limit || 5);
764
+ }
765
+ async function markIssueResolved(issueId, workDir) {
766
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
767
+ const index = await loadIssueIndex(projectDir);
768
+ const issue = index.find((i) => i.id === issueId);
769
+ if (!issue) return false;
770
+ issue.resolved = true;
771
+ issue.resolvedAt = (/* @__PURE__ */ new Date()).toISOString();
772
+ await saveIssueIndex(index, projectDir);
773
+ return true;
774
+ }
775
+ async function getMemoryStats(workDir) {
776
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
777
+ const index = await loadIssueIndex(projectDir);
778
+ const historical = await getHistoricalInsights(projectDir);
779
+ const stats = {
780
+ totalIssues: index.length,
781
+ issuesByAgent: {},
782
+ issuesBySeverity: {},
783
+ oldestIssue: void 0,
784
+ newestIssue: void 0,
785
+ resolvedCount: 0,
786
+ historicalIssues: historical.totalHistoricalIssues,
787
+ improvementTrend: historical.improvementTrend
788
+ };
789
+ for (const issue of index) {
790
+ stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;
791
+ stats.issuesBySeverity[issue.severity] = (stats.issuesBySeverity[issue.severity] || 0) + 1;
792
+ if (issue.resolved) stats.resolvedCount++;
793
+ }
794
+ if (index.length > 0) {
795
+ const sorted = [...index].sort(
796
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
797
+ );
798
+ const oldest = sorted[0]?.timestamp;
799
+ const newest = sorted[sorted.length - 1]?.timestamp;
800
+ if (oldest !== void 0) {
801
+ stats.oldestIssue = oldest;
802
+ }
803
+ if (newest !== void 0) {
804
+ stats.newestIssue = newest;
805
+ }
806
+ }
807
+ return stats;
808
+ }
809
+ async function getRecentIssues(options = {}) {
810
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
811
+ const index = await loadIssueIndex(projectDir);
812
+ const limit = options.limit || 20;
813
+ const daysBack = options.daysBack || 7;
814
+ const cutoff = /* @__PURE__ */ new Date();
815
+ cutoff.setDate(cutoff.getDate() - daysBack);
816
+ return index.filter((i) => new Date(i.timestamp) >= cutoff).sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()).slice(0, limit);
817
+ }
818
+ async function getDailyLogs(workDir) {
819
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
820
+ const memoryDir = join3(projectDir, ".trie", "memory");
821
+ try {
822
+ if (!existsSync3(memoryDir)) return [];
823
+ const files = await readdir2(memoryDir);
824
+ return files.filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f)).sort().reverse();
825
+ } catch {
826
+ return [];
827
+ }
828
+ }
829
+ async function appendToDailyLog(issues, projectDir) {
830
+ const memoryDir = join3(projectDir, ".trie", "memory");
831
+ const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
832
+ const logPath = join3(memoryDir, `${today}.md`);
833
+ let content = "";
834
+ try {
835
+ if (existsSync3(logPath)) {
836
+ content = await readFile3(logPath, "utf-8");
837
+ } else {
838
+ content = `# Issue Log: ${today}
839
+
840
+ `;
841
+ }
842
+ } catch {
843
+ content = `# Issue Log: ${today}
844
+
845
+ `;
846
+ }
847
+ const time = (/* @__PURE__ */ new Date()).toTimeString().split(" ")[0];
848
+ const newEntries = issues.map(
849
+ (i) => `## [${time}] ${i.severity.toUpperCase()}: ${i.issue.slice(0, 80)}${i.issue.length > 80 ? "..." : ""}
850
+ - **File:** \`${i.file}\`${i.line ? `:${i.line}` : ""}
851
+ - **Agent:** ${i.agent}
852
+ - **Fix:** ${i.fix.slice(0, 200)}${i.fix.length > 200 ? "..." : ""}
853
+ `
854
+ ).join("\n");
855
+ content += newEntries + "\n";
856
+ await writeFile2(logPath, content);
857
+ }
858
+ async function loadIssueIndex(projectDir) {
859
+ const indexPath = join3(projectDir, ".trie", "memory", "issues.json");
860
+ try {
861
+ if (existsSync3(indexPath)) {
862
+ const content = await readFile3(indexPath, "utf-8");
863
+ const result = safeParseAndValidate(content, IssueIndexSchema);
864
+ if (result.success) {
865
+ return result.data;
866
+ }
867
+ console.error(` \u26A0\uFE0F Issue index corrupted: ${result.error}`);
868
+ const backupManager = new BackupManager(indexPath);
869
+ if (await backupManager.recoverFromBackup()) {
870
+ console.error(" \u2705 Recovered from backup");
871
+ const recovered = await readFile3(indexPath, "utf-8");
872
+ const recoveredResult = safeParseAndValidate(recovered, IssueIndexSchema);
873
+ if (recoveredResult.success) {
874
+ return recoveredResult.data;
875
+ }
876
+ }
877
+ console.error(" \u274C No valid backup found, starting fresh");
878
+ }
879
+ } catch {
880
+ }
881
+ return [];
882
+ }
883
+ async function updateIssueIndex(newIssues, projectDir) {
884
+ const memoryDir = join3(projectDir, ".trie", "memory");
885
+ await mkdir3(memoryDir, { recursive: true });
886
+ let existing = await loadIssueIndex(projectDir);
887
+ const hashSet = new Set(existing.map((i) => i.hash));
888
+ const toAdd = newIssues.filter((i) => !hashSet.has(i.hash));
889
+ existing = [...existing, ...toAdd];
890
+ if (existing.length > 500) {
891
+ const { summary, remaining } = await compactOldIssues(existing, {
892
+ keepDays: 30,
893
+ minIssuesToCompact: 100
894
+ });
895
+ if (summary) {
896
+ await saveCompactedSummary(summary, projectDir);
897
+ existing = remaining;
898
+ }
899
+ }
900
+ if (existing.length > 1e3) {
901
+ existing = existing.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()).slice(0, 1e3);
902
+ }
903
+ await saveIssueIndex(existing, projectDir);
904
+ }
905
+ async function saveIssueIndex(issues, projectDir) {
906
+ const memoryDir = join3(projectDir, ".trie", "memory");
907
+ await mkdir3(memoryDir, { recursive: true });
908
+ const indexPath = join3(memoryDir, "issues.json");
909
+ const backupManager = new BackupManager(indexPath);
910
+ await backupManager.createBackup();
911
+ await atomicWriteJSON(indexPath, issues);
912
+ }
913
+ function hashIssue(issue) {
914
+ const content = `${issue.issue}|${issue.file}|${issue.severity}|${issue.agent}`;
915
+ return createHash("sha256").update(content).digest("hex").slice(0, 16);
916
+ }
917
+
918
+ export {
919
+ atomicWriteFile,
920
+ atomicWriteJSON,
921
+ BackupManager,
922
+ GlobalPatternsIndexSchema,
923
+ safeParseAndValidate,
924
+ getHistoricalInsights,
925
+ storeIssues,
926
+ searchIssues,
927
+ findSimilarIssues,
928
+ markIssueResolved,
929
+ getMemoryStats,
930
+ getRecentIssues,
931
+ getDailyLogs
932
+ };
933
+ //# sourceMappingURL=chunk-TSHZQKCM.js.map