@triedotdev/mcp 1.0.138 → 1.0.140

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. package/README.md +184 -38
  2. package/dist/{autonomy-config-TZ6HF4FA.js → autonomy-config-ZCOSTMPD.js} +2 -2
  3. package/dist/{chunk-X3F5QDER.js → chunk-4O2KRHK4.js} +934 -132
  4. package/dist/chunk-4O2KRHK4.js.map +1 -0
  5. package/dist/{chunk-J5EMP4XW.js → chunk-5KJ4UJOY.js} +9 -4
  6. package/dist/chunk-5KJ4UJOY.js.map +1 -0
  7. package/dist/chunk-62POBLFC.js +1925 -0
  8. package/dist/chunk-62POBLFC.js.map +1 -0
  9. package/dist/{chunk-GFFUDJMK.js → chunk-75ADWWUF.js} +13 -13
  10. package/dist/chunk-75ADWWUF.js.map +1 -0
  11. package/dist/{chunk-D3AS5LY7.js → chunk-7OJ6JIPL.js} +39 -604
  12. package/dist/chunk-7OJ6JIPL.js.map +1 -0
  13. package/dist/{chunk-3RRXWX3V.js → chunk-AF2APASP.js} +38 -4
  14. package/dist/{chunk-3RRXWX3V.js.map → chunk-AF2APASP.js.map} +1 -1
  15. package/dist/{chunk-QSWUPSLK.js → chunk-FH335WL5.js} +9 -1
  16. package/dist/chunk-FH335WL5.js.map +1 -0
  17. package/dist/{chunk-Y32FM3MR.js → chunk-FPEMP54L.js} +21 -15
  18. package/dist/chunk-FPEMP54L.js.map +1 -0
  19. package/dist/{chunk-EDDT4ZIH.js → chunk-GXF6JOCN.js} +21 -323
  20. package/dist/chunk-GXF6JOCN.js.map +1 -0
  21. package/dist/chunk-LD7ZEFNY.js +132 -0
  22. package/dist/chunk-LD7ZEFNY.js.map +1 -0
  23. package/dist/chunk-NKHO34UZ.js +467 -0
  24. package/dist/chunk-NKHO34UZ.js.map +1 -0
  25. package/dist/{chunk-YOKQ25IW.js → chunk-OQ4A3RDY.js} +14 -14
  26. package/dist/{chunk-6LLH3TBZ.js → chunk-UOSTOLU7.js} +12 -12
  27. package/dist/{chunk-67GSG2ST.js → chunk-XTTZAQWJ.js} +18 -15
  28. package/dist/chunk-XTTZAQWJ.js.map +1 -0
  29. package/dist/{chunk-FOCXXIXY.js → chunk-YEIJW6X6.js} +2 -2
  30. package/dist/chunk-YOJGSRZK.js +216 -0
  31. package/dist/chunk-YOJGSRZK.js.map +1 -0
  32. package/dist/cli/main.js +573 -59
  33. package/dist/cli/main.js.map +1 -1
  34. package/dist/cli/yolo-daemon.js +15 -13
  35. package/dist/cli/yolo-daemon.js.map +1 -1
  36. package/dist/{client-JTU5TRLB.js → client-INNE2GGZ.js} +2 -2
  37. package/dist/{codebase-index-FNJ4GCBE.js → codebase-index-5SEOESWM.js} +3 -3
  38. package/dist/fast-analyzer-AYLZB5TW.js +216 -0
  39. package/dist/fast-analyzer-AYLZB5TW.js.map +1 -0
  40. package/dist/github-ingester-J2ZFYXVE.js +11 -0
  41. package/dist/{goal-manager-6BJQ36AH.js → goal-manager-ZBWKWEML.js} +3 -3
  42. package/dist/{goal-validator-GISXYANK.js → goal-validator-HNXXUCPW.js} +3 -3
  43. package/dist/{graph-X2FMRQLG.js → graph-J4OGTYCO.js} +2 -2
  44. package/dist/{hypothesis-K3KQJOXJ.js → hypothesis-JCUMZKTG.js} +3 -3
  45. package/dist/index.js +1090 -108
  46. package/dist/index.js.map +1 -1
  47. package/dist/{issue-store-BO5OWLJW.js → issue-store-LZWZIGM7.js} +2 -2
  48. package/dist/linear-ingester-JRDQAIAA.js +11 -0
  49. package/dist/linear-ingester-JRDQAIAA.js.map +1 -0
  50. package/dist/{trie-agent-XMSGMD7E.js → trie-agent-M6PHM6UD.js} +10 -10
  51. package/dist/trie-agent-M6PHM6UD.js.map +1 -0
  52. package/package.json +15 -8
  53. package/dist/chunk-67GSG2ST.js.map +0 -1
  54. package/dist/chunk-D3AS5LY7.js.map +0 -1
  55. package/dist/chunk-EDDT4ZIH.js.map +0 -1
  56. package/dist/chunk-GFFUDJMK.js.map +0 -1
  57. package/dist/chunk-J5EMP4XW.js.map +0 -1
  58. package/dist/chunk-QSWUPSLK.js.map +0 -1
  59. package/dist/chunk-X3F5QDER.js.map +0 -1
  60. package/dist/chunk-Y32FM3MR.js.map +0 -1
  61. package/dist/chunk-Z2P4WST6.js +0 -883
  62. package/dist/chunk-Z2P4WST6.js.map +0 -1
  63. /package/dist/{autonomy-config-TZ6HF4FA.js.map → autonomy-config-ZCOSTMPD.js.map} +0 -0
  64. /package/dist/{chunk-YOKQ25IW.js.map → chunk-OQ4A3RDY.js.map} +0 -0
  65. /package/dist/{chunk-6LLH3TBZ.js.map → chunk-UOSTOLU7.js.map} +0 -0
  66. /package/dist/{chunk-FOCXXIXY.js.map → chunk-YEIJW6X6.js.map} +0 -0
  67. /package/dist/{client-JTU5TRLB.js.map → client-INNE2GGZ.js.map} +0 -0
  68. /package/dist/{codebase-index-FNJ4GCBE.js.map → codebase-index-5SEOESWM.js.map} +0 -0
  69. /package/dist/{goal-manager-6BJQ36AH.js.map → github-ingester-J2ZFYXVE.js.map} +0 -0
  70. /package/dist/{goal-validator-GISXYANK.js.map → goal-manager-ZBWKWEML.js.map} +0 -0
  71. /package/dist/{graph-X2FMRQLG.js.map → goal-validator-HNXXUCPW.js.map} +0 -0
  72. /package/dist/{hypothesis-K3KQJOXJ.js.map → graph-J4OGTYCO.js.map} +0 -0
  73. /package/dist/{issue-store-BO5OWLJW.js.map → hypothesis-JCUMZKTG.js.map} +0 -0
  74. /package/dist/{trie-agent-XMSGMD7E.js.map → issue-store-LZWZIGM7.js.map} +0 -0
@@ -0,0 +1,1925 @@
1
+ import {
2
+ BackupManager,
3
+ CompactedSummariesIndexSchema,
4
+ IssueIndexSchema,
5
+ safeParseAndValidate
6
+ } from "./chunk-4MJ52WBH.js";
7
+ import {
8
+ atomicWriteJSON
9
+ } from "./chunk-43X6JBEM.js";
10
+ import {
11
+ getTrieDirectory,
12
+ getWorkingDirectory
13
+ } from "./chunk-SH7H3WRU.js";
14
+
15
+ // src/memory/issue-store.ts
16
+ import { mkdir as mkdir3, writeFile as writeFile2, readFile as readFile3, readdir as readdir2 } from "fs/promises";
17
+ import { createHash as createHash2 } from "crypto";
18
+ import { existsSync as existsSync4 } from "fs";
19
+ import { join as join3 } from "path";
20
+
21
+ // src/memory/bm25.ts
22
+ var BM25Index = class _BM25Index {
23
+ documents = /* @__PURE__ */ new Map();
24
+ termFrequencies = /* @__PURE__ */ new Map();
25
+ documentFrequencies = /* @__PURE__ */ new Map();
26
+ documentLengths = /* @__PURE__ */ new Map();
27
+ avgDocLength = 0;
28
+ k1 = 1.5;
29
+ b = 0.75;
30
+ /**
31
+ * Add a document to the index
32
+ */
33
+ addDocument(doc) {
34
+ const tokens = this.tokenize(doc.text);
35
+ this.documents.set(doc.id, doc);
36
+ this.documentLengths.set(doc.id, tokens.length);
37
+ const termFreq = /* @__PURE__ */ new Map();
38
+ const seenTerms = /* @__PURE__ */ new Set();
39
+ for (const token of tokens) {
40
+ termFreq.set(token, (termFreq.get(token) || 0) + 1);
41
+ if (!seenTerms.has(token)) {
42
+ seenTerms.add(token);
43
+ this.documentFrequencies.set(token, (this.documentFrequencies.get(token) || 0) + 1);
44
+ }
45
+ }
46
+ this.termFrequencies.set(doc.id, termFreq);
47
+ this.updateAvgDocLength();
48
+ }
49
+ /**
50
+ * Add multiple documents
51
+ */
52
+ addDocuments(docs) {
53
+ for (const doc of docs) {
54
+ this.addDocument(doc);
55
+ }
56
+ }
57
+ /**
58
+ * Search the index
59
+ */
60
+ search(query, limit = 10) {
61
+ const queryTokens = this.tokenize(query);
62
+ const scores = /* @__PURE__ */ new Map();
63
+ const N = this.documents.size;
64
+ for (const [docId] of this.documents) {
65
+ let score = 0;
66
+ const docLength = this.documentLengths.get(docId) || 0;
67
+ const termFreqs = this.termFrequencies.get(docId);
68
+ if (!termFreqs) continue;
69
+ for (const term of queryTokens) {
70
+ const tf = termFreqs.get(term) || 0;
71
+ if (tf === 0) continue;
72
+ const df = this.documentFrequencies.get(term) || 0;
73
+ const idf = Math.log((N - df + 0.5) / (df + 0.5) + 1);
74
+ const numerator = tf * (this.k1 + 1);
75
+ const denominator = tf + this.k1 * (1 - this.b + this.b * (docLength / this.avgDocLength));
76
+ score += idf * (numerator / denominator);
77
+ }
78
+ if (score > 0) {
79
+ scores.set(docId, score);
80
+ }
81
+ }
82
+ return Array.from(scores.entries()).sort((a, b) => b[1] - a[1]).slice(0, limit).map(([id, score]) => {
83
+ const metadata = this.documents.get(id)?.metadata;
84
+ const result = { id, score };
85
+ if (metadata !== void 0) {
86
+ result.metadata = metadata;
87
+ }
88
+ return result;
89
+ });
90
+ }
91
+ /**
92
+ * Get document count
93
+ */
94
+ get size() {
95
+ return this.documents.size;
96
+ }
97
+ /**
98
+ * Clear the index
99
+ */
100
+ clear() {
101
+ this.documents.clear();
102
+ this.termFrequencies.clear();
103
+ this.documentFrequencies.clear();
104
+ this.documentLengths.clear();
105
+ this.avgDocLength = 0;
106
+ }
107
+ /**
108
+ * Serialize the index to JSON
109
+ */
110
+ serialize() {
111
+ return JSON.stringify({
112
+ documents: Array.from(this.documents.entries()),
113
+ termFrequencies: Array.from(this.termFrequencies.entries()).map(([k, v]) => [k, Array.from(v.entries())]),
114
+ documentFrequencies: Array.from(this.documentFrequencies.entries()),
115
+ documentLengths: Array.from(this.documentLengths.entries()),
116
+ avgDocLength: this.avgDocLength
117
+ });
118
+ }
119
+ /**
120
+ * Load from serialized JSON
121
+ */
122
+ static deserialize(json) {
123
+ const data = JSON.parse(json);
124
+ const index = new _BM25Index();
125
+ index.documents = new Map(data.documents);
126
+ index.termFrequencies = new Map(data.termFrequencies.map(([k, v]) => [k, new Map(v)]));
127
+ index.documentFrequencies = new Map(data.documentFrequencies);
128
+ index.documentLengths = new Map(data.documentLengths);
129
+ index.avgDocLength = data.avgDocLength;
130
+ return index;
131
+ }
132
+ tokenize(text) {
133
+ return text.toLowerCase().replace(/[^\w\s]/g, " ").split(/\s+/).filter((token) => token.length > 2 && !this.isStopWord(token));
134
+ }
135
+ isStopWord(word) {
136
+ const stopWords = /* @__PURE__ */ new Set([
137
+ "the",
138
+ "be",
139
+ "to",
140
+ "of",
141
+ "and",
142
+ "a",
143
+ "in",
144
+ "that",
145
+ "have",
146
+ "i",
147
+ "it",
148
+ "for",
149
+ "not",
150
+ "on",
151
+ "with",
152
+ "he",
153
+ "as",
154
+ "you",
155
+ "do",
156
+ "at",
157
+ "this",
158
+ "but",
159
+ "his",
160
+ "by",
161
+ "from",
162
+ "they",
163
+ "we",
164
+ "say",
165
+ "her",
166
+ "she",
167
+ "or",
168
+ "an",
169
+ "will",
170
+ "my",
171
+ "one",
172
+ "all",
173
+ "would",
174
+ "there",
175
+ "their",
176
+ "what",
177
+ "so",
178
+ "up",
179
+ "out",
180
+ "if",
181
+ "about",
182
+ "who",
183
+ "get",
184
+ "which",
185
+ "go",
186
+ "me",
187
+ "when",
188
+ "make",
189
+ "can",
190
+ "like",
191
+ "time",
192
+ "no",
193
+ "just",
194
+ "him",
195
+ "know",
196
+ "take",
197
+ "into",
198
+ "year",
199
+ "your",
200
+ "some",
201
+ "could",
202
+ "them",
203
+ "see",
204
+ "other",
205
+ "than",
206
+ "then",
207
+ "now",
208
+ "look",
209
+ "only",
210
+ "come",
211
+ "its",
212
+ "over",
213
+ "also",
214
+ "back",
215
+ "after",
216
+ "use",
217
+ "two",
218
+ "how",
219
+ "our",
220
+ "first",
221
+ "way",
222
+ "even",
223
+ "new",
224
+ "want",
225
+ "because",
226
+ "any",
227
+ "these",
228
+ "give",
229
+ "day",
230
+ "most",
231
+ "us",
232
+ "should",
233
+ "been",
234
+ "has",
235
+ "was",
236
+ "are"
237
+ ]);
238
+ return stopWords.has(word);
239
+ }
240
+ updateAvgDocLength() {
241
+ if (this.documentLengths.size === 0) {
242
+ this.avgDocLength = 0;
243
+ return;
244
+ }
245
+ const total = Array.from(this.documentLengths.values()).reduce((a, b) => a + b, 0);
246
+ this.avgDocLength = total / this.documentLengths.size;
247
+ }
248
+ };
249
+
250
+ // src/memory/compactor.ts
251
+ import { mkdir, readFile } from "fs/promises";
252
+ import { existsSync } from "fs";
253
+ import { join } from "path";
254
+ async function compactOldIssues(issues, options = {}) {
255
+ const keepDays = options.keepDays ?? 30;
256
+ const minIssues = options.minIssuesToCompact ?? 100;
257
+ const cutoffDate = /* @__PURE__ */ new Date();
258
+ cutoffDate.setDate(cutoffDate.getDate() - keepDays);
259
+ const oldIssues = issues.filter((i) => new Date(i.timestamp) < cutoffDate);
260
+ const recentIssues = issues.filter((i) => new Date(i.timestamp) >= cutoffDate);
261
+ if (oldIssues.length < minIssues) {
262
+ return { summary: null, remaining: issues };
263
+ }
264
+ const summary = buildSummary(oldIssues);
265
+ return { summary, remaining: recentIssues };
266
+ }
267
+ function buildSummary(issues) {
268
+ const sorted = issues.sort(
269
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
270
+ );
271
+ const bySeverity = {};
272
+ const byAgent = {};
273
+ const patternMap = /* @__PURE__ */ new Map();
274
+ const fileCount = /* @__PURE__ */ new Map();
275
+ for (const issue of issues) {
276
+ bySeverity[issue.severity] = (bySeverity[issue.severity] || 0) + 1;
277
+ byAgent[issue.agent] = (byAgent[issue.agent] || 0) + 1;
278
+ const patternKey = normalizePattern(issue.issue);
279
+ const existing = patternMap.get(patternKey);
280
+ if (existing) {
281
+ existing.count++;
282
+ } else {
283
+ patternMap.set(patternKey, { count: 1, issue });
284
+ }
285
+ const fileName = issue.file.split("/").pop() || issue.file;
286
+ fileCount.set(fileName, (fileCount.get(fileName) || 0) + 1);
287
+ }
288
+ const topPatterns = Array.from(patternMap.entries()).sort((a, b) => b[1].count - a[1].count).slice(0, 10).map(([pattern, data]) => ({
289
+ pattern: pattern.slice(0, 100),
290
+ count: data.count,
291
+ severity: data.issue.severity,
292
+ agent: data.issue.agent,
293
+ exampleFix: data.issue.fix.slice(0, 200)
294
+ }));
295
+ const hotFiles = Array.from(fileCount.entries()).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([file, count]) => ({ file, count }));
296
+ return {
297
+ period: `${sorted[0]?.timestamp.split("T")[0]} to ${sorted[sorted.length - 1]?.timestamp.split("T")[0]}`,
298
+ startDate: sorted[0]?.timestamp || "",
299
+ endDate: sorted[sorted.length - 1]?.timestamp || "",
300
+ totalIssues: issues.length,
301
+ resolvedCount: issues.filter((i) => i.resolved).length,
302
+ bySeverity,
303
+ byAgent,
304
+ topPatterns,
305
+ hotFiles,
306
+ compactedAt: (/* @__PURE__ */ new Date()).toISOString()
307
+ };
308
+ }
309
+ function normalizePattern(text) {
310
+ return text.toLowerCase().replace(/`[^`]+`/g, "CODE").replace(/\b\d+\b/g, "N").replace(/["']/g, "").replace(/\s+/g, " ").trim().slice(0, 150);
311
+ }
312
+ async function saveCompactedSummary(summary, projectDir) {
313
+ const memoryDir = join(getTrieDirectory(projectDir), "memory");
314
+ await mkdir(memoryDir, { recursive: true });
315
+ const summaryPath = join(memoryDir, "compacted-summaries.json");
316
+ let summaries = [];
317
+ try {
318
+ if (existsSync(summaryPath)) {
319
+ const content = await readFile(summaryPath, "utf-8");
320
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
321
+ if (result.success) {
322
+ summaries = result.data;
323
+ }
324
+ }
325
+ } catch {
326
+ summaries = [];
327
+ }
328
+ summaries.push(summary);
329
+ if (summaries.length > 12) {
330
+ summaries = summaries.slice(-12);
331
+ }
332
+ const backupManager = new BackupManager(summaryPath);
333
+ await backupManager.createBackup();
334
+ await atomicWriteJSON(summaryPath, summaries);
335
+ }
336
+ async function loadCompactedSummaries(projectDir) {
337
+ const summaryPath = join(getTrieDirectory(projectDir), "memory", "compacted-summaries.json");
338
+ try {
339
+ if (existsSync(summaryPath)) {
340
+ const content = await readFile(summaryPath, "utf-8");
341
+ const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);
342
+ if (result.success) {
343
+ return result.data;
344
+ }
345
+ const backupManager = new BackupManager(summaryPath);
346
+ if (await backupManager.recoverFromBackup()) {
347
+ const recovered = await readFile(summaryPath, "utf-8");
348
+ const recoveredResult = safeParseAndValidate(recovered, CompactedSummariesIndexSchema);
349
+ if (recoveredResult.success) {
350
+ return recoveredResult.data;
351
+ }
352
+ }
353
+ }
354
+ } catch {
355
+ }
356
+ return [];
357
+ }
358
+ async function getHistoricalInsights(projectDir) {
359
+ const summaries = await loadCompactedSummaries(projectDir);
360
+ if (summaries.length === 0) {
361
+ return {
362
+ totalHistoricalIssues: 0,
363
+ recurringPatterns: [],
364
+ improvementTrend: "unknown"
365
+ };
366
+ }
367
+ const totalHistoricalIssues = summaries.reduce((sum, s) => sum + s.totalIssues, 0);
368
+ const patternCounts = /* @__PURE__ */ new Map();
369
+ for (const summary of summaries) {
370
+ for (const pattern of summary.topPatterns) {
371
+ const key = pattern.pattern;
372
+ const existing = patternCounts.get(key);
373
+ if (existing) {
374
+ existing.count += pattern.count;
375
+ existing.appearances++;
376
+ } else {
377
+ patternCounts.set(key, { ...pattern, appearances: 1 });
378
+ }
379
+ }
380
+ }
381
+ const recurringPatterns = Array.from(patternCounts.values()).filter((p) => p.appearances >= 2).sort((a, b) => b.count - a.count).slice(0, 5);
382
+ let improvementTrend = "unknown";
383
+ if (summaries.length >= 2) {
384
+ const recent = summaries.slice(-2);
385
+ const olderCount = recent[0]?.totalIssues || 0;
386
+ const newerCount = recent[1]?.totalIssues || 0;
387
+ if (newerCount < olderCount * 0.8) {
388
+ improvementTrend = "improving";
389
+ } else if (newerCount > olderCount * 1.2) {
390
+ improvementTrend = "declining";
391
+ } else {
392
+ improvementTrend = "stable";
393
+ }
394
+ }
395
+ return {
396
+ totalHistoricalIssues,
397
+ recurringPatterns,
398
+ improvementTrend
399
+ };
400
+ }
401
+
402
+ // src/memory/ledger.ts
403
+ import { createHash } from "crypto";
404
+ import { mkdir as mkdir2, readFile as readFile2, writeFile, stat, unlink, readdir } from "fs/promises";
405
+ import { existsSync as existsSync3 } from "fs";
406
+ import { createGzip, createGunzip } from "zlib";
407
+ import { pipeline } from "stream/promises";
408
+ import { createReadStream, createWriteStream } from "fs";
409
+ import { join as join2 } from "path";
410
+
411
+ // src/agent/git.ts
412
+ import { existsSync as existsSync2 } from "fs";
413
+ import path from "path";
414
+
415
+ // src/utils/command-runner.ts
416
+ import { exec, execFile, execSync } from "child_process";
417
+ import { promisify } from "util";
418
+
419
+ // src/skills/audit-logger.ts
420
+ function formatAuditLog(_entry) {
421
+ return "Audit logging has been integrated into the decision ledger";
422
+ }
423
+ function getAuditStatistics() {
424
+ return {
425
+ totalScans: 0,
426
+ totalIssues: 0,
427
+ criticalCount: 0,
428
+ seriousCount: 0,
429
+ moderateCount: 0,
430
+ lowCount: 0,
431
+ totalExecutions: 0,
432
+ successfulExecutions: 0,
433
+ failedExecutions: 0,
434
+ uniqueSkills: 0,
435
+ totalCommands: 0,
436
+ blockedCommands: 0,
437
+ totalNetworkCalls: 0,
438
+ blockedNetworkCalls: 0
439
+ };
440
+ }
441
+ function createAuditEntry(skillName, skillSource, triggeredBy, targetPath) {
442
+ return {
443
+ skillName,
444
+ skillSource,
445
+ triggeredBy,
446
+ targetPath,
447
+ startedAt: (/* @__PURE__ */ new Date()).toISOString(),
448
+ commands: []
449
+ };
450
+ }
451
+ function completeAuditEntry(entry, success, error) {
452
+ const result = {
453
+ ...entry,
454
+ completedAt: (/* @__PURE__ */ new Date()).toISOString(),
455
+ success
456
+ };
457
+ if (error !== void 0) {
458
+ result.error = error;
459
+ }
460
+ return result;
461
+ }
462
+ async function logSkillExecution(_execution) {
463
+ }
464
+ async function getRecentAuditLogs(_limit = 10) {
465
+ return [];
466
+ }
467
+ async function getSkillAuditLogs(_skillName) {
468
+ return [];
469
+ }
470
+
471
+ // src/utils/command-runner.ts
472
+ var execAsync = promisify(exec);
473
+ var execFileAsync = promisify(execFile);
474
+ function redact(text) {
475
+ return text.replace(/\b(AWS|ANTHROPIC|OPENAI|GITHUB)_[A-Z0-9_]*\s*=\s*([^\s"'`]+)/gi, "$1_<REDACTED>=<REDACTED>").replace(/\bBearer\s+[A-Za-z0-9\-._~+/]+=*\b/g, "Bearer <REDACTED>").replace(/\bghp_[A-Za-z0-9]{20,}\b/g, "ghp_<REDACTED>").replace(/\b(?:xox[baprs]-)[A-Za-z0-9-]{10,}\b/g, "<REDACTED_SLACK_TOKEN>").replace(/\bAKIA[0-9A-Z]{16}\b/g, "AKIA<REDACTED>");
476
+ }
477
+ function clampOutput(text, maxChars) {
478
+ if (text.length <= maxChars) return text;
479
+ return text.slice(0, maxChars) + `
480
+ \u2026(truncated ${text.length - maxChars} chars)`;
481
+ }
482
+ function buildCommandRecord(command) {
483
+ return {
484
+ command,
485
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
486
+ };
487
+ }
488
+ async function finalizeAndWrite(entry, cmd, outcome, options) {
489
+ const duration = Date.now() - outcome.startedAt;
490
+ cmd.duration = duration;
491
+ if (outcome.exitCode !== void 0) {
492
+ cmd.exitCode = outcome.exitCode;
493
+ }
494
+ const captureOutput = options?.captureOutput ?? false;
495
+ const redactOutput = options?.redactOutput ?? true;
496
+ const maxOutputChars = options?.maxOutputChars ?? 2e3;
497
+ if (captureOutput) {
498
+ const out = outcome.stdout ?? "";
499
+ const err = outcome.stderr ?? "";
500
+ cmd.stdout = redactOutput ? redact(clampOutput(out, maxOutputChars)) : clampOutput(out, maxOutputChars);
501
+ cmd.stderr = redactOutput ? redact(clampOutput(err, maxOutputChars)) : clampOutput(err, maxOutputChars);
502
+ }
503
+ const completed = completeAuditEntry(entry, outcome.success, outcome.error);
504
+ await logSkillExecution(completed);
505
+ }
506
+ function runShellCommandSync(command, audit, options) {
507
+ const startedAt = Date.now();
508
+ const entry = createAuditEntry(audit.actor, audit.source ?? "trie", audit.triggeredBy, audit.targetPath);
509
+ const cmd = buildCommandRecord(command);
510
+ entry.commands?.push(cmd);
511
+ try {
512
+ const stdout = execSync(command, {
513
+ cwd: options?.cwd,
514
+ timeout: options?.timeoutMs,
515
+ maxBuffer: options?.maxBuffer,
516
+ encoding: "utf-8",
517
+ stdio: ["pipe", "pipe", "pipe"]
518
+ });
519
+ void finalizeAndWrite(entry, cmd, { success: true, exitCode: 0, stdout, stderr: "", startedAt }, options);
520
+ return { stdout: stdout ?? "", exitCode: 0 };
521
+ } catch (e) {
522
+ const err = e;
523
+ const stdout = typeof err.stdout === "string" ? err.stdout : "";
524
+ const stderr = typeof err.stderr === "string" ? err.stderr : "";
525
+ const exitCode = typeof err.status === "number" ? err.status : 1;
526
+ void finalizeAndWrite(
527
+ entry,
528
+ cmd,
529
+ { success: false, exitCode, stdout, stderr, error: err.message, startedAt },
530
+ { ...options, captureOutput: options?.captureOutput ?? true }
531
+ );
532
+ return { stdout, exitCode };
533
+ }
534
+ }
535
+ async function runExecFile(file, args, audit, options) {
536
+ const startedAt = Date.now();
537
+ const command = [file, ...args].join(" ");
538
+ const entry = createAuditEntry(audit.actor, audit.source ?? "trie", audit.triggeredBy, audit.targetPath);
539
+ const cmd = buildCommandRecord(command);
540
+ entry.commands?.push(cmd);
541
+ try {
542
+ const { stdout, stderr } = await execFileAsync(file, args, {
543
+ cwd: options?.cwd,
544
+ timeout: options?.timeoutMs,
545
+ maxBuffer: options?.maxBuffer
546
+ });
547
+ await finalizeAndWrite(entry, cmd, { success: true, exitCode: 0, stdout: String(stdout ?? ""), stderr: String(stderr ?? ""), startedAt }, options);
548
+ return { stdout: String(stdout ?? ""), stderr: String(stderr ?? ""), exitCode: 0 };
549
+ } catch (e) {
550
+ const err = e;
551
+ const stdout = typeof err.stdout === "string" ? err.stdout : "";
552
+ const stderr = typeof err.stderr === "string" ? err.stderr : "";
553
+ const exitCode = typeof err.code === "number" ? err.code : 1;
554
+ await finalizeAndWrite(
555
+ entry,
556
+ cmd,
557
+ { success: false, exitCode, stdout, stderr, error: err.message, startedAt },
558
+ { ...options, captureOutput: options?.captureOutput ?? true }
559
+ );
560
+ return { stdout, stderr, exitCode };
561
+ }
562
+ }
563
+
564
+ // src/agent/git.ts
565
+ async function execGit(args, cwd) {
566
+ try {
567
+ const { stdout } = await runExecFile(
568
+ "git",
569
+ ["-C", cwd, ...args],
570
+ { actor: "internal:git", triggeredBy: "manual", targetPath: cwd },
571
+ { maxBuffer: 10 * 1024 * 1024, captureOutput: false }
572
+ );
573
+ return stdout.trim();
574
+ } catch (error) {
575
+ const stderr = error?.stderr?.toString();
576
+ if (stderr?.includes("not a git repository") || stderr?.includes("does not have any commits")) {
577
+ return null;
578
+ }
579
+ throw error;
580
+ }
581
+ }
582
+ async function ensureRepo(projectPath) {
583
+ const result = await execGit(["rev-parse", "--is-inside-work-tree"], projectPath);
584
+ return result === "true";
585
+ }
586
+ function parseNameStatus(output) {
587
+ return output.split("\n").map((line) => line.trim()).filter(Boolean).map((line) => {
588
+ const parts = line.split(" ");
589
+ const status = parts[0] ?? "";
590
+ const filePath = parts[1] ?? "";
591
+ const oldPath = parts[2];
592
+ const change = { status, path: filePath };
593
+ if (oldPath) change.oldPath = oldPath;
594
+ return change;
595
+ }).filter((entry) => entry.path.length > 0);
596
+ }
597
+ async function getRecentCommits(projectPath, limit) {
598
+ const isRepo = await ensureRepo(projectPath);
599
+ if (!isRepo) return [];
600
+ const output = await execGit(
601
+ ["log", `-n`, String(limit), "--pretty=format:%H%x09%an%x09%ad%x09%s", "--date=iso"],
602
+ projectPath
603
+ );
604
+ if (!output) return [];
605
+ return output.split("\n").map((line) => {
606
+ const [hash, author, date, message] = line.split(" ");
607
+ return { hash, author, date, message };
608
+ });
609
+ }
610
+ async function getLastCommit(projectPath) {
611
+ const commits = await getRecentCommits(projectPath, 1);
612
+ return commits[0] ?? null;
613
+ }
614
+ async function getStagedChanges(projectPath) {
615
+ const isRepo = await ensureRepo(projectPath);
616
+ if (!isRepo) return [];
617
+ const output = await execGit(["diff", "--cached", "--name-status"], projectPath);
618
+ if (!output) return [];
619
+ return parseNameStatus(output);
620
+ }
621
+ async function getUncommittedChanges(projectPath) {
622
+ const isRepo = await ensureRepo(projectPath);
623
+ if (!isRepo) return [];
624
+ const changes = [];
625
+ const unstaged = await execGit(["diff", "--name-status"], projectPath);
626
+ if (unstaged) {
627
+ changes.push(...parseNameStatus(unstaged));
628
+ }
629
+ const untracked = await execGit(["ls-files", "--others", "--exclude-standard"], projectPath);
630
+ if (untracked) {
631
+ changes.push(
632
+ ...untracked.split("\n").map((p) => p.trim()).filter(Boolean).map((p) => ({ status: "??", path: p }))
633
+ );
634
+ }
635
+ return changes;
636
+ }
637
+ async function getGitChangedFiles(projectPath) {
638
+ const isRepo = await ensureRepo(projectPath);
639
+ if (!isRepo) return null;
640
+ const [staged, uncommitted] = await Promise.all([
641
+ getStagedChanges(projectPath).catch(() => []),
642
+ getUncommittedChanges(projectPath).catch(() => [])
643
+ ]);
644
+ const paths = /* @__PURE__ */ new Set();
645
+ for (const change of [...staged, ...uncommitted]) {
646
+ if (change.path) paths.add(change.path);
647
+ if (change.oldPath) paths.add(change.oldPath);
648
+ }
649
+ return [...paths];
650
+ }
651
+ async function getDiff(projectPath, commitHash) {
652
+ const isRepo = await ensureRepo(projectPath);
653
+ if (!isRepo) return "";
654
+ const diff = await execGit(["show", commitHash, "--unified=3", "--no-color"], projectPath);
655
+ return diff ?? "";
656
+ }
657
+ async function getWorkingTreeDiff(projectPath, stagedOnly = false) {
658
+ const isRepo = await ensureRepo(projectPath);
659
+ if (!isRepo) return "";
660
+ const args = stagedOnly ? ["diff", "--cached", "--unified=3", "--no-color"] : ["diff", "--unified=3", "--no-color"];
661
+ const diff = await execGit(args, projectPath);
662
+ return diff ?? "";
663
+ }
664
+ async function isGitRepo(projectPath) {
665
+ const result = await execGit(["rev-parse", "--is-inside-work-tree"], projectPath);
666
+ return result === "true";
667
+ }
668
+ async function getChangedFilesSinceTimestamp(projectPath, timestamp) {
669
+ const isRepo = await isGitRepo(projectPath);
670
+ if (!isRepo) return null;
671
+ try {
672
+ const sinceDate = new Date(timestamp).toISOString();
673
+ const GIT_TIMEOUT_MS = 5e3;
674
+ const startTime = Date.now();
675
+ const committedChangesPromise = execGit(
676
+ ["log", `--since=${sinceDate}`, "--name-only", "--pretty=format:"],
677
+ projectPath
678
+ );
679
+ const committedChangesTimeout = new Promise((resolve) => {
680
+ setTimeout(() => resolve(null), GIT_TIMEOUT_MS);
681
+ });
682
+ const committedChanges = await Promise.race([committedChangesPromise, committedChangesTimeout]);
683
+ if (Date.now() - startTime > GIT_TIMEOUT_MS) {
684
+ return null;
685
+ }
686
+ const stagedPromise = execGit(["diff", "--cached", "--name-only"], projectPath);
687
+ const unstagedPromise = execGit(["diff", "--name-only"], projectPath);
688
+ const untrackedPromise = execGit(
689
+ ["ls-files", "--others", "--exclude-standard"],
690
+ projectPath
691
+ );
692
+ const timeoutPromise = new Promise((resolve) => {
693
+ setTimeout(() => resolve(null), Math.max(0, GIT_TIMEOUT_MS - (Date.now() - startTime)));
694
+ });
695
+ const [stagedChanges, unstagedChanges, untrackedFiles] = await Promise.race([
696
+ Promise.all([stagedPromise, unstagedPromise, untrackedPromise]),
697
+ timeoutPromise.then(() => [null, null, null])
698
+ ]);
699
+ const changedFiles = /* @__PURE__ */ new Set();
700
+ const addFiles = (output) => {
701
+ if (output) {
702
+ output.split("\n").map((f) => f.trim()).filter(Boolean).forEach((f) => changedFiles.add(path.join(projectPath, f)));
703
+ }
704
+ };
705
+ addFiles(committedChanges);
706
+ addFiles(stagedChanges);
707
+ addFiles(unstagedChanges);
708
+ addFiles(untrackedFiles);
709
+ return Array.from(changedFiles);
710
+ } catch {
711
+ return null;
712
+ }
713
+ }
714
+
715
+ // src/memory/ledger.ts
716
+ var LEDGER_FILENAME = "ledger.json";
717
+ var SHARED_LEDGER_DIR = ".trie-shared";
718
+ var MANIFEST_FILENAME = "ledger-manifest.json";
719
+ var SYNC_STATE_FILENAME = "ledger-sync.json";
720
+ var GENESIS_HASH = "0".repeat(64);
721
+ var LEDGER_VERSION = 2;
722
+ async function appendIssuesToLedger(issues, workDir, author) {
723
+ if (issues.length === 0) return null;
724
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
725
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
726
+ await mkdir2(memoryDir, { recursive: true });
727
+ const isRepo = await isGitRepo(projectDir);
728
+ const lastCommit = isRepo ? await getLastCommit(projectDir) : null;
729
+ const blockAuthor = author || lastCommit?.author || "unknown";
730
+ const blocks = await loadLedger(projectDir);
731
+ const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
732
+ const now = (/* @__PURE__ */ new Date()).toISOString();
733
+ const entries = issues.map((issue) => ({
734
+ id: issue.id,
735
+ hash: issue.hash,
736
+ severity: issue.severity,
737
+ file: issue.file,
738
+ agent: issue.agent,
739
+ timestamp: issue.timestamp
740
+ }));
741
+ const previousBlock = blocks[blocks.length - 1];
742
+ const block = previousBlock && previousBlock.date === today ? previousBlock : createSyncableBlock(today, now, previousBlock?.blockHash ?? GENESIS_HASH, blockAuthor, lastCommit?.hash, blocks.length);
743
+ if (block !== previousBlock) {
744
+ blocks.push(block);
745
+ }
746
+ block.entries = [...block.entries, ...entries];
747
+ block.merkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
748
+ block.blockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
749
+ block.updatedAt = now;
750
+ await saveLedger(blocks, projectDir);
751
+ return block;
752
+ }
753
+ async function verifyLedger(workDir) {
754
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
755
+ const blocks = await loadLedger(projectDir);
756
+ if (blocks.length === 0) {
757
+ return { valid: true };
758
+ }
759
+ for (let i = 0; i < blocks.length; i += 1) {
760
+ const block = blocks[i];
761
+ if (!block) {
762
+ return { valid: false, error: `Block ${i} missing` };
763
+ }
764
+ const expectedPreviousHash = i === 0 ? GENESIS_HASH : blocks[i - 1]?.blockHash;
765
+ if (!expectedPreviousHash) {
766
+ return { valid: false, error: `Block ${i} missing previous block` };
767
+ }
768
+ if (block.previousHash !== expectedPreviousHash) {
769
+ return { valid: false, error: `Block ${i} previous hash mismatch` };
770
+ }
771
+ const computedMerkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
772
+ if (block.merkleRoot !== computedMerkleRoot) {
773
+ return { valid: false, error: `Block ${i} merkle root mismatch` };
774
+ }
775
+ const computedBlockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
776
+ if (block.blockHash !== computedBlockHash) {
777
+ return { valid: false, error: `Block ${i} hash mismatch` };
778
+ }
779
+ }
780
+ return { valid: true };
781
+ }
782
+ function computeMerkleRoot(hashes) {
783
+ if (hashes.length === 0) {
784
+ return sha256("");
785
+ }
786
+ let level = hashes.slice();
787
+ while (level.length > 1) {
788
+ const nextLevel = [];
789
+ for (let i = 0; i < level.length; i += 2) {
790
+ const left = level[i];
791
+ const right = level[i + 1] ?? left;
792
+ nextLevel.push(sha256(`${left}:${right}`));
793
+ }
794
+ level = nextLevel;
795
+ }
796
+ return level[0];
797
+ }
798
+ function computeBlockHash(previousHash, merkleRoot, date, version) {
799
+ return sha256(`${version}:${date}:${previousHash}:${merkleRoot}`);
800
+ }
801
+ function createSyncableBlock(date, now, previousHash, author, gitCommit, chainHeight = 0) {
802
+ return {
803
+ version: LEDGER_VERSION,
804
+ date,
805
+ entries: [],
806
+ previousHash,
807
+ merkleRoot: "",
808
+ blockHash: "",
809
+ createdAt: now,
810
+ updatedAt: now,
811
+ author,
812
+ gitCommit,
813
+ chainHeight
814
+ };
815
+ }
816
+ async function loadLedger(projectDir) {
817
+ const ledgerPath = join2(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
818
+ try {
819
+ if (!existsSync3(ledgerPath)) return [];
820
+ const content = await readFile2(ledgerPath, "utf-8");
821
+ const parsed = JSON.parse(content);
822
+ if (!Array.isArray(parsed)) return [];
823
+ return parsed;
824
+ } catch {
825
+ return [];
826
+ }
827
+ }
828
+ async function getLedgerBlocks(workDir) {
829
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
830
+ return loadLedger(projectDir);
831
+ }
832
+ async function saveLedger(blocks, projectDir) {
833
+ const ledgerPath = join2(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
834
+ await atomicWriteJSON(ledgerPath, blocks);
835
+ }
836
+ function sha256(input) {
837
+ return createHash("sha256").update(input).digest("hex");
838
+ }
839
+ function getSharedLedgerDir(projectDir) {
840
+ return join2(projectDir, SHARED_LEDGER_DIR);
841
+ }
842
+ function getActiveBlocksDir(projectDir) {
843
+ return join2(getSharedLedgerDir(projectDir), "active");
844
+ }
845
+ function getArchivedBlocksDir(projectDir) {
846
+ return join2(getSharedLedgerDir(projectDir), "archived");
847
+ }
848
+ function getManifestPath(projectDir) {
849
+ return join2(getSharedLedgerDir(projectDir), MANIFEST_FILENAME);
850
+ }
851
+ function getSyncStatePath(projectDir) {
852
+ return join2(getTrieDirectory(projectDir), "memory", SYNC_STATE_FILENAME);
853
+ }
854
+ async function ensureSharedStorageStructure(projectDir) {
855
+ const sharedDir = getSharedLedgerDir(projectDir);
856
+ const activeDir = getActiveBlocksDir(projectDir);
857
+ const archivedDir = getArchivedBlocksDir(projectDir);
858
+ await mkdir2(sharedDir, { recursive: true });
859
+ await mkdir2(activeDir, { recursive: true });
860
+ await mkdir2(archivedDir, { recursive: true });
861
+ }
862
+ async function loadManifest(projectDir) {
863
+ const manifestPath = getManifestPath(projectDir);
864
+ try {
865
+ if (!existsSync3(manifestPath)) return null;
866
+ const content = await readFile2(manifestPath, "utf-8");
867
+ return JSON.parse(content);
868
+ } catch {
869
+ return null;
870
+ }
871
+ }
872
+ async function saveManifest(manifest, projectDir) {
873
+ const manifestPath = getManifestPath(projectDir);
874
+ await atomicWriteJSON(manifestPath, manifest);
875
+ }
876
+ async function createDefaultManifest(projectDir) {
877
+ const now = (/* @__PURE__ */ new Date()).toISOString();
878
+ return {
879
+ version: LEDGER_VERSION,
880
+ created: now,
881
+ lastSync: now,
882
+ totalBlocks: 0,
883
+ totalEntries: 0,
884
+ activeBlocks: [],
885
+ archivedBlocks: [],
886
+ index: {
887
+ byDate: {},
888
+ byAuthor: {},
889
+ bySeverity: {}
890
+ },
891
+ compressionConfig: {
892
+ enabled: true,
893
+ archiveAfterDays: 30,
894
+ compressionLevel: 6,
895
+ maxHotStorageSize: 50 * 1024 * 1024
896
+ // 50MB
897
+ }
898
+ };
899
+ }
900
+ async function loadSyncState(projectDir) {
901
+ const syncStatePath = getSyncStatePath(projectDir);
902
+ try {
903
+ if (!existsSync3(syncStatePath)) return null;
904
+ const content = await readFile2(syncStatePath, "utf-8");
905
+ return JSON.parse(content);
906
+ } catch {
907
+ return null;
908
+ }
909
+ }
910
+ async function saveSyncState(syncState, projectDir) {
911
+ const syncStatePath = getSyncStatePath(projectDir);
912
+ const memoryDir = join2(getTrieDirectory(projectDir), "memory");
913
+ await mkdir2(memoryDir, { recursive: true });
914
+ await atomicWriteJSON(syncStatePath, syncState);
915
+ }
916
+ async function initializeSharedLedger(workDir) {
917
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
918
+ await ensureSharedStorageStructure(projectDir);
919
+ const existingManifest = await loadManifest(projectDir);
920
+ if (!existingManifest) {
921
+ const manifest = await createDefaultManifest(projectDir);
922
+ await saveManifest(manifest, projectDir);
923
+ }
924
+ const existingSyncState = await loadSyncState(projectDir);
925
+ if (!existingSyncState) {
926
+ const now = (/* @__PURE__ */ new Date()).toISOString();
927
+ const syncState = {
928
+ lastSyncTimestamp: now,
929
+ conflicts: [],
930
+ localChanges: false,
931
+ sharedChanges: false
932
+ };
933
+ await saveSyncState(syncState, projectDir);
934
+ }
935
+ }
936
+ async function syncLedgerFromShared(workDir) {
937
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
938
+ await initializeSharedLedger(projectDir);
939
+ const manifest = await loadManifest(projectDir);
940
+ const localBlocks = await loadLedger(projectDir);
941
+ const sharedBlocks = await loadSharedBlocks(projectDir);
942
+ if (!manifest) {
943
+ throw new Error("Failed to load ledger manifest");
944
+ }
945
+ const mergeResult = await mergeChains(localBlocks, sharedBlocks, "timestamp");
946
+ await saveLedger(mergeResult.mergedChain, projectDir);
947
+ const syncState = {
948
+ lastSyncTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
949
+ conflicts: mergeResult.conflicts,
950
+ localChanges: false,
951
+ sharedChanges: false
952
+ };
953
+ await saveSyncState(syncState, projectDir);
954
+ return mergeResult;
955
+ }
956
+ async function pushLedgerToShared(workDir) {
957
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
958
+ await initializeSharedLedger(projectDir);
959
+ const localBlocks = await loadLedger(projectDir);
960
+ const manifest = await loadManifest(projectDir) || await createDefaultManifest(projectDir);
961
+ const activeDir = getActiveBlocksDir(projectDir);
962
+ for (const block of localBlocks) {
963
+ const blockFilename = `${block.date}.json`;
964
+ const blockPath = join2(activeDir, blockFilename);
965
+ if (!existsSync3(blockPath) || block.updatedAt > manifest.lastSync) {
966
+ await atomicWriteJSON(blockPath, block);
967
+ manifest.index.byDate[block.date] = `active/${blockFilename}`;
968
+ if (block.author) {
969
+ if (!manifest.index.byAuthor[block.author]) {
970
+ manifest.index.byAuthor[block.author] = [];
971
+ }
972
+ if (!manifest.index.byAuthor[block.author].includes(`active/${blockFilename}`)) {
973
+ manifest.index.byAuthor[block.author].push(`active/${blockFilename}`);
974
+ }
975
+ }
976
+ if (!manifest.activeBlocks.includes(blockFilename)) {
977
+ manifest.activeBlocks.push(blockFilename);
978
+ }
979
+ }
980
+ }
981
+ manifest.lastSync = (/* @__PURE__ */ new Date()).toISOString();
982
+ manifest.totalBlocks = manifest.activeBlocks.length + manifest.archivedBlocks.length;
983
+ manifest.totalEntries = localBlocks.reduce((sum, block) => sum + block.entries.length, 0);
984
+ await saveManifest(manifest, projectDir);
985
+ }
986
+ async function loadSharedBlocks(projectDir) {
987
+ const manifest = await loadManifest(projectDir);
988
+ if (!manifest) return [];
989
+ const blocks = [];
990
+ const activeDir = getActiveBlocksDir(projectDir);
991
+ for (const filename of manifest.activeBlocks) {
992
+ const blockPath = join2(activeDir, filename);
993
+ try {
994
+ if (existsSync3(blockPath)) {
995
+ const content = await readFile2(blockPath, "utf-8");
996
+ const block = JSON.parse(content);
997
+ blocks.push(block);
998
+ }
999
+ } catch (error) {
1000
+ console.warn(`Failed to load shared block ${filename}:`, error);
1001
+ }
1002
+ }
1003
+ return blocks.sort((a, b) => a.date.localeCompare(b.date));
1004
+ }
1005
+ async function mergeChains(localBlocks, remoteBlocks, strategy = "timestamp") {
1006
+ const conflicts = [];
1007
+ const mergedBlocks = /* @__PURE__ */ new Map();
1008
+ const stats = {
1009
+ localBlocks: localBlocks.length,
1010
+ remoteBlocks: remoteBlocks.length,
1011
+ mergedBlocks: 0,
1012
+ duplicatesRemoved: 0
1013
+ };
1014
+ const localByDate = /* @__PURE__ */ new Map();
1015
+ for (const block of localBlocks) {
1016
+ localByDate.set(block.date, block);
1017
+ }
1018
+ const remoteByDate = /* @__PURE__ */ new Map();
1019
+ for (const block of remoteBlocks) {
1020
+ remoteByDate.set(block.date, block);
1021
+ }
1022
+ const allDates = /* @__PURE__ */ new Set([...localByDate.keys(), ...remoteByDate.keys()]);
1023
+ for (const date of allDates) {
1024
+ const localBlock = localByDate.get(date);
1025
+ const remoteBlock = remoteByDate.get(date);
1026
+ if (localBlock && remoteBlock) {
1027
+ const conflict = detectBlockConflict(localBlock, remoteBlock);
1028
+ if (conflict) {
1029
+ conflicts.push(conflict);
1030
+ const resolvedBlock = resolveConflict(conflict, strategy);
1031
+ if (resolvedBlock) {
1032
+ mergedBlocks.set(date, resolvedBlock);
1033
+ }
1034
+ } else {
1035
+ const mergedBlock = mergeBlockEntries(localBlock, remoteBlock);
1036
+ mergedBlocks.set(date, mergedBlock);
1037
+ }
1038
+ } else if (localBlock) {
1039
+ mergedBlocks.set(date, localBlock);
1040
+ } else if (remoteBlock) {
1041
+ mergedBlocks.set(date, remoteBlock);
1042
+ }
1043
+ }
1044
+ const resultBlocks = Array.from(mergedBlocks.values()).sort((a, b) => a.date.localeCompare(b.date)).map((block, index) => ({
1045
+ ...block,
1046
+ chainHeight: index
1047
+ }));
1048
+ stats.mergedBlocks = resultBlocks.length;
1049
+ stats.duplicatesRemoved = stats.localBlocks + stats.remoteBlocks - stats.mergedBlocks;
1050
+ return {
1051
+ mergedChain: resultBlocks,
1052
+ conflicts,
1053
+ resolutionStrategy: strategy,
1054
+ stats
1055
+ };
1056
+ }
1057
+ function detectBlockConflict(localBlock, remoteBlock) {
1058
+ if (localBlock.blockHash !== remoteBlock.blockHash) {
1059
+ return {
1060
+ type: "hash_mismatch",
1061
+ description: `Different block content for date ${localBlock.date}`,
1062
+ localBlock,
1063
+ remoteBlock
1064
+ };
1065
+ }
1066
+ const localEntryHashes = new Set(localBlock.entries.map((e) => e.hash));
1067
+ const remoteEntryHashes = new Set(remoteBlock.entries.map((e) => e.hash));
1068
+ const hasConflictingEntries = localBlock.entries.some((localEntry) => {
1069
+ const conflictingRemoteEntry = remoteBlock.entries.find(
1070
+ (remoteEntry) => remoteEntry.id === localEntry.id && remoteEntry.hash !== localEntry.hash
1071
+ );
1072
+ return !!conflictingRemoteEntry;
1073
+ });
1074
+ if (hasConflictingEntries) {
1075
+ return {
1076
+ type: "duplicate_entry",
1077
+ description: `Conflicting entries found for date ${localBlock.date}`,
1078
+ localBlock,
1079
+ remoteBlock
1080
+ };
1081
+ }
1082
+ return null;
1083
+ }
1084
+ function resolveConflict(conflict, strategy) {
1085
+ if (!conflict.localBlock || !conflict.remoteBlock) {
1086
+ return null;
1087
+ }
1088
+ switch (strategy) {
1089
+ case "longest":
1090
+ return conflict.localBlock.entries.length >= conflict.remoteBlock.entries.length ? conflict.localBlock : conflict.remoteBlock;
1091
+ case "timestamp":
1092
+ return conflict.localBlock.updatedAt >= conflict.remoteBlock.updatedAt ? conflict.localBlock : conflict.remoteBlock;
1093
+ case "manual":
1094
+ return { ...conflict.localBlock, conflictResolved: false };
1095
+ default:
1096
+ return conflict.localBlock;
1097
+ }
1098
+ }
1099
+ function mergeBlockEntries(localBlock, remoteBlock) {
1100
+ const entryMap = /* @__PURE__ */ new Map();
1101
+ for (const entry of localBlock.entries) {
1102
+ entryMap.set(entry.hash, entry);
1103
+ }
1104
+ for (const entry of remoteBlock.entries) {
1105
+ entryMap.set(entry.hash, entry);
1106
+ }
1107
+ const mergedEntries = Array.from(entryMap.values());
1108
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1109
+ const authors = [localBlock.author, remoteBlock.author].filter(Boolean);
1110
+ const combinedAuthor = authors.length > 1 ? authors.join("+") : authors[0] || "unknown";
1111
+ return {
1112
+ ...localBlock,
1113
+ entries: mergedEntries,
1114
+ author: combinedAuthor,
1115
+ updatedAt: now,
1116
+ merkleRoot: computeMerkleRoot(mergedEntries.map((e) => e.hash)),
1117
+ blockHash: computeBlockHash(
1118
+ localBlock.previousHash,
1119
+ computeMerkleRoot(mergedEntries.map((e) => e.hash)),
1120
+ localBlock.date,
1121
+ localBlock.version
1122
+ )
1123
+ };
1124
+ }
1125
+ async function migrateLegacyLedger(workDir) {
1126
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1127
+ const legacyLedgerPath = join2(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
1128
+ if (!existsSync3(legacyLedgerPath)) {
1129
+ return false;
1130
+ }
1131
+ try {
1132
+ const legacyBlocks = await loadLedger(projectDir);
1133
+ if (legacyBlocks.length > 0 && legacyBlocks[0].author !== void 0) {
1134
+ return false;
1135
+ }
1136
+ console.log(`Migrating legacy ledger with ${legacyBlocks.length} blocks...`);
1137
+ const isRepo = await isGitRepo(projectDir);
1138
+ const lastCommit = isRepo ? await getLastCommit(projectDir) : null;
1139
+ const defaultAuthor = lastCommit?.author || "legacy-migration";
1140
+ const migratedBlocks = legacyBlocks.map((block, index) => ({
1141
+ ...block,
1142
+ version: LEDGER_VERSION,
1143
+ author: defaultAuthor,
1144
+ gitCommit: lastCommit?.hash,
1145
+ chainHeight: index,
1146
+ syncedAt: (/* @__PURE__ */ new Date()).toISOString()
1147
+ }));
1148
+ await initializeSharedLedger(projectDir);
1149
+ await saveLedger(migratedBlocks, projectDir);
1150
+ await pushLedgerToShared(projectDir);
1151
+ const backupPath = `${legacyLedgerPath}.backup.${Date.now()}`;
1152
+ await writeFile(backupPath, JSON.stringify(legacyBlocks, null, 2));
1153
+ console.log(`\u2713 Migration complete. Backup saved to ${backupPath}`);
1154
+ return true;
1155
+ } catch (error) {
1156
+ console.error("Failed to migrate legacy ledger:", error);
1157
+ return false;
1158
+ }
1159
+ }
1160
+ async function detectLegacyLedger(workDir) {
1161
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1162
+ const legacyLedgerPath = join2(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
1163
+ if (!existsSync3(legacyLedgerPath)) {
1164
+ return false;
1165
+ }
1166
+ try {
1167
+ const blocks = await loadLedger(projectDir);
1168
+ return blocks.length > 0 && blocks[0].author === void 0;
1169
+ } catch {
1170
+ return false;
1171
+ }
1172
+ }
1173
+ async function getLedgerSyncStatus(workDir) {
1174
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1175
+ const hasLegacyLedger = await detectLegacyLedger(projectDir);
1176
+ const manifest = await loadManifest(projectDir);
1177
+ const syncState = await loadSyncState(projectDir);
1178
+ const localBlocks = await loadLedger(projectDir);
1179
+ const sharedBlocks = await loadSharedBlocks(projectDir);
1180
+ return {
1181
+ isInitialized: !!manifest,
1182
+ hasLegacyLedger,
1183
+ syncState,
1184
+ manifest,
1185
+ localBlocks: localBlocks.length,
1186
+ sharedBlocks: sharedBlocks.length,
1187
+ conflicts: syncState?.conflicts.length || 0
1188
+ };
1189
+ }
1190
+ async function compressOldBlocks(workDir) {
1191
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1192
+ const manifest = await loadManifest(projectDir);
1193
+ if (!manifest || !manifest.compressionConfig.enabled) {
1194
+ return { archived: 0, sizeReduction: 0 };
1195
+ }
1196
+ const activeDir = getActiveBlocksDir(projectDir);
1197
+ const archivedDir = getArchivedBlocksDir(projectDir);
1198
+ const cutoffDate = /* @__PURE__ */ new Date();
1199
+ cutoffDate.setDate(cutoffDate.getDate() - manifest.compressionConfig.archiveAfterDays);
1200
+ let archivedCount = 0;
1201
+ let originalSize = 0;
1202
+ let compressedSize = 0;
1203
+ const blocksByMonth = /* @__PURE__ */ new Map();
1204
+ for (const blockFile of manifest.activeBlocks) {
1205
+ const blockPath = join2(activeDir, blockFile);
1206
+ const blockDate = blockFile.replace(".json", "");
1207
+ if (new Date(blockDate) < cutoffDate) {
1208
+ const monthKey = blockDate.slice(0, 7);
1209
+ if (!blocksByMonth.has(monthKey)) {
1210
+ blocksByMonth.set(monthKey, []);
1211
+ }
1212
+ blocksByMonth.get(monthKey).push(blockFile);
1213
+ }
1214
+ }
1215
+ for (const [monthKey, blockFiles] of blocksByMonth) {
1216
+ const archivePath = join2(archivedDir, `${monthKey}.tar.gz`);
1217
+ if (existsSync3(archivePath)) {
1218
+ continue;
1219
+ }
1220
+ console.log(`Archiving ${blockFiles.length} blocks for ${monthKey}...`);
1221
+ const monthlyBlocks = [];
1222
+ for (const blockFile of blockFiles) {
1223
+ const blockPath = join2(activeDir, blockFile);
1224
+ try {
1225
+ const stats = await stat(blockPath);
1226
+ originalSize += stats.size;
1227
+ const content = await readFile2(blockPath, "utf-8");
1228
+ const block = JSON.parse(content);
1229
+ monthlyBlocks.push(block);
1230
+ } catch (error) {
1231
+ console.warn(`Failed to read block ${blockFile}:`, error);
1232
+ }
1233
+ }
1234
+ if (monthlyBlocks.length > 0) {
1235
+ const archiveData = JSON.stringify(monthlyBlocks);
1236
+ const tempPath = `${archivePath}.tmp`;
1237
+ await pipeline(
1238
+ Buffer.from(archiveData),
1239
+ createGzip({ level: manifest.compressionConfig.compressionLevel }),
1240
+ createWriteStream(tempPath)
1241
+ );
1242
+ const compressedStats = await stat(tempPath);
1243
+ compressedSize += compressedStats.size;
1244
+ await writeFile(archivePath, await readFile2(tempPath));
1245
+ await unlink(tempPath);
1246
+ for (const blockFile of blockFiles) {
1247
+ const blockPath = join2(activeDir, blockFile);
1248
+ await unlink(blockPath);
1249
+ const index = manifest.activeBlocks.indexOf(blockFile);
1250
+ if (index > -1) {
1251
+ manifest.activeBlocks.splice(index, 1);
1252
+ }
1253
+ }
1254
+ manifest.archivedBlocks.push(`${monthKey}.tar.gz`);
1255
+ archivedCount += blockFiles.length;
1256
+ }
1257
+ }
1258
+ if (archivedCount > 0) {
1259
+ await saveManifest(manifest, projectDir);
1260
+ }
1261
+ return {
1262
+ archived: archivedCount,
1263
+ sizeReduction: originalSize > 0 ? Math.round((originalSize - compressedSize) / originalSize * 100) : 0
1264
+ };
1265
+ }
1266
+ async function getStorageStats(workDir) {
1267
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1268
+ const manifest = await loadManifest(projectDir);
1269
+ if (!manifest) {
1270
+ return {
1271
+ activeBlocks: 0,
1272
+ archivedBlocks: 0,
1273
+ activeSize: 0,
1274
+ archivedSize: 0,
1275
+ compressionRatio: 0,
1276
+ totalEntries: 0
1277
+ };
1278
+ }
1279
+ const activeDir = getActiveBlocksDir(projectDir);
1280
+ const archivedDir = getArchivedBlocksDir(projectDir);
1281
+ let activeSize = 0;
1282
+ let archivedSize = 0;
1283
+ for (const blockFile of manifest.activeBlocks) {
1284
+ const blockPath = join2(activeDir, blockFile);
1285
+ try {
1286
+ if (existsSync3(blockPath)) {
1287
+ const stats = await stat(blockPath);
1288
+ activeSize += stats.size;
1289
+ }
1290
+ } catch {
1291
+ }
1292
+ }
1293
+ for (const archiveFile of manifest.archivedBlocks) {
1294
+ const archivePath = join2(archivedDir, archiveFile);
1295
+ try {
1296
+ if (existsSync3(archivePath)) {
1297
+ const stats = await stat(archivePath);
1298
+ archivedSize += stats.size;
1299
+ }
1300
+ } catch {
1301
+ }
1302
+ }
1303
+ const totalSize = activeSize + archivedSize;
1304
+ const estimatedUncompressed = archivedSize * 5;
1305
+ const compressionRatio = estimatedUncompressed > 0 ? Math.round((1 - archivedSize / estimatedUncompressed) * 100) : 0;
1306
+ return {
1307
+ activeBlocks: manifest.activeBlocks.length,
1308
+ archivedBlocks: manifest.archivedBlocks.length,
1309
+ activeSize,
1310
+ archivedSize,
1311
+ compressionRatio,
1312
+ totalEntries: manifest.totalEntries
1313
+ };
1314
+ }
1315
+ async function shouldCompress(workDir) {
1316
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1317
+ const manifest = await loadManifest(projectDir);
1318
+ if (!manifest || !manifest.compressionConfig.enabled) {
1319
+ return false;
1320
+ }
1321
+ const stats = await getStorageStats(projectDir);
1322
+ const exceedsSize = stats.activeSize > manifest.compressionConfig.maxHotStorageSize;
1323
+ const hasOldBlocks = manifest.activeBlocks.some((blockFile) => {
1324
+ const blockDate = blockFile.replace(".json", "");
1325
+ const cutoffDate = /* @__PURE__ */ new Date();
1326
+ cutoffDate.setDate(cutoffDate.getDate() - manifest.compressionConfig.archiveAfterDays);
1327
+ return new Date(blockDate) < cutoffDate;
1328
+ });
1329
+ return exceedsSize || hasOldBlocks;
1330
+ }
1331
+ async function deleteBlocks(blockDates, workDir, confirmDeletion = false) {
1332
+ if (!confirmDeletion) {
1333
+ return {
1334
+ success: false,
1335
+ deletedBlocks: 0,
1336
+ error: "Deletion not confirmed. This is a permanent operation that cannot be undone."
1337
+ };
1338
+ }
1339
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1340
+ const blocks = await loadLedger(projectDir);
1341
+ if (blocks.length === 0) {
1342
+ return {
1343
+ success: false,
1344
+ deletedBlocks: 0,
1345
+ error: "No blocks found in ledger"
1346
+ };
1347
+ }
1348
+ const blocksToDelete = blocks.filter((block) => blockDates.includes(block.date));
1349
+ const remainingBlocks = blocks.filter((block) => !blockDates.includes(block.date));
1350
+ if (blocksToDelete.length === 0) {
1351
+ return {
1352
+ success: false,
1353
+ deletedBlocks: 0,
1354
+ error: "No matching blocks found for the specified dates"
1355
+ };
1356
+ }
1357
+ const integrityIssue = checkChainIntegrityAfterDeletion(blocks, blockDates);
1358
+ if (integrityIssue) {
1359
+ return {
1360
+ success: false,
1361
+ deletedBlocks: 0,
1362
+ error: `Cannot delete blocks: ${integrityIssue}`
1363
+ };
1364
+ }
1365
+ try {
1366
+ const repairedChain = repairChainAfterDeletion(remainingBlocks);
1367
+ await saveLedger(repairedChain, projectDir);
1368
+ await cleanupSharedStorage(blockDates, projectDir);
1369
+ return {
1370
+ success: true,
1371
+ deletedBlocks: blocksToDelete.length,
1372
+ warning: remainingBlocks.length === 0 ? "All blocks have been deleted. Ledger is now empty." : void 0
1373
+ };
1374
+ } catch (error) {
1375
+ return {
1376
+ success: false,
1377
+ deletedBlocks: 0,
1378
+ error: `Failed to delete blocks: ${error instanceof Error ? error.message : "Unknown error"}`
1379
+ };
1380
+ }
1381
+ }
1382
+ async function deleteAllBlocks(workDir, confirmDeletion = false) {
1383
+ if (!confirmDeletion) {
1384
+ return {
1385
+ success: false,
1386
+ deletedBlocks: 0,
1387
+ error: "Deletion not confirmed. This will permanently delete ALL blocks and cannot be undone."
1388
+ };
1389
+ }
1390
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1391
+ const blocks = await loadLedger(projectDir);
1392
+ if (blocks.length === 0) {
1393
+ return {
1394
+ success: true,
1395
+ deletedBlocks: 0,
1396
+ warning: "Ledger is already empty"
1397
+ };
1398
+ }
1399
+ try {
1400
+ await saveLedger([], projectDir);
1401
+ await cleanupAllSharedStorage(projectDir);
1402
+ return {
1403
+ success: true,
1404
+ deletedBlocks: blocks.length,
1405
+ warning: "All blocks have been permanently deleted. Ledger is now empty."
1406
+ };
1407
+ } catch (error) {
1408
+ return {
1409
+ success: false,
1410
+ deletedBlocks: 0,
1411
+ error: `Failed to delete all blocks: ${error instanceof Error ? error.message : "Unknown error"}`
1412
+ };
1413
+ }
1414
+ }
1415
+ function checkChainIntegrityAfterDeletion(blocks, datesToDelete) {
1416
+ const sortedBlocks = blocks.sort((a, b) => a.date.localeCompare(b.date));
1417
+ const deleteIndices = /* @__PURE__ */ new Set();
1418
+ sortedBlocks.forEach((block, index) => {
1419
+ if (datesToDelete.includes(block.date)) {
1420
+ deleteIndices.add(index);
1421
+ }
1422
+ });
1423
+ if (deleteIndices.has(0) && sortedBlocks.length > 1) {
1424
+ return "Cannot delete the genesis block when other blocks exist. This would break the chain.";
1425
+ }
1426
+ for (let i = 1; i < sortedBlocks.length; i++) {
1427
+ const currentBlock = sortedBlocks[i];
1428
+ const previousBlock = sortedBlocks[i - 1];
1429
+ if (!deleteIndices.has(i) && deleteIndices.has(i - 1)) {
1430
+ let lastValidIndex = i - 2;
1431
+ while (lastValidIndex >= 0 && deleteIndices.has(lastValidIndex)) {
1432
+ lastValidIndex--;
1433
+ }
1434
+ if (lastValidIndex >= 0) {
1435
+ const expectedPreviousHash = sortedBlocks[lastValidIndex].blockHash;
1436
+ if (currentBlock.previousHash !== expectedPreviousHash) {
1437
+ return `Deleting block ${previousBlock.date} would break the chain. Block ${currentBlock.date} references it.`;
1438
+ }
1439
+ }
1440
+ }
1441
+ }
1442
+ return null;
1443
+ }
1444
+ function repairChainAfterDeletion(remainingBlocks) {
1445
+ if (remainingBlocks.length === 0) {
1446
+ return [];
1447
+ }
1448
+ const sortedBlocks = remainingBlocks.sort((a, b) => a.date.localeCompare(b.date));
1449
+ for (let i = 0; i < sortedBlocks.length; i++) {
1450
+ const block = sortedBlocks[i];
1451
+ const previousHash = i === 0 ? GENESIS_HASH : sortedBlocks[i - 1].blockHash;
1452
+ block.previousHash = previousHash;
1453
+ block.blockHash = computeBlockHash(
1454
+ block.previousHash,
1455
+ block.merkleRoot,
1456
+ block.date,
1457
+ block.version
1458
+ );
1459
+ block.chainHeight = i;
1460
+ block.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
1461
+ }
1462
+ return sortedBlocks;
1463
+ }
1464
+ async function cleanupSharedStorage(deletedDates, projectDir) {
1465
+ const manifest = await loadManifest(projectDir);
1466
+ if (!manifest) return;
1467
+ const activeDir = getActiveBlocksDir(projectDir);
1468
+ const archivedDir = getArchivedBlocksDir(projectDir);
1469
+ for (const date of deletedDates) {
1470
+ const blockFile = `${date}.json`;
1471
+ const blockPath = join2(activeDir, blockFile);
1472
+ if (existsSync3(blockPath)) {
1473
+ try {
1474
+ await unlink(blockPath);
1475
+ } catch {
1476
+ }
1477
+ }
1478
+ const index = manifest.activeBlocks.indexOf(blockFile);
1479
+ if (index > -1) {
1480
+ manifest.activeBlocks.splice(index, 1);
1481
+ }
1482
+ delete manifest.index.byDate[date];
1483
+ }
1484
+ manifest.totalBlocks = manifest.activeBlocks.length + manifest.archivedBlocks.length;
1485
+ await saveManifest(manifest, projectDir);
1486
+ }
1487
+ async function cleanupAllSharedStorage(projectDir) {
1488
+ const sharedDir = getSharedLedgerDir(projectDir);
1489
+ if (existsSync3(sharedDir)) {
1490
+ try {
1491
+ const manifest = await createDefaultManifest(projectDir);
1492
+ await saveManifest(manifest, projectDir);
1493
+ const activeDir = getActiveBlocksDir(projectDir);
1494
+ if (existsSync3(activeDir)) {
1495
+ const files = await readdir(activeDir).catch(() => []);
1496
+ for (const file of files) {
1497
+ if (file.endsWith(".json")) {
1498
+ try {
1499
+ await unlink(join2(activeDir, file));
1500
+ } catch {
1501
+ }
1502
+ }
1503
+ }
1504
+ }
1505
+ } catch {
1506
+ }
1507
+ }
1508
+ }
1509
+
1510
+ // src/memory/issue-store.ts
1511
+ async function storeIssues(issues, project, workDir) {
1512
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1513
+ const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1514
+ await mkdir3(memoryDir, { recursive: true });
1515
+ const stored = [];
1516
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1517
+ const seenHashes = /* @__PURE__ */ new Set();
1518
+ let duplicates = 0;
1519
+ for (const issue of issues) {
1520
+ const hash = hashIssue(issue);
1521
+ if (seenHashes.has(hash)) {
1522
+ duplicates++;
1523
+ continue;
1524
+ }
1525
+ seenHashes.add(hash);
1526
+ const storedIssue = {
1527
+ id: issue.id,
1528
+ hash,
1529
+ severity: issue.severity,
1530
+ issue: issue.issue,
1531
+ fix: issue.fix,
1532
+ file: issue.file,
1533
+ line: issue.line,
1534
+ agent: issue.agent,
1535
+ category: issue.category,
1536
+ timestamp: now,
1537
+ project,
1538
+ resolved: false,
1539
+ resolvedAt: void 0
1540
+ };
1541
+ stored.push(storedIssue);
1542
+ }
1543
+ await appendToDailyLog(stored, projectDir);
1544
+ await appendIssuesToLedger(stored, projectDir);
1545
+ const dedupedCount = await updateIssueIndex(stored, projectDir);
1546
+ return { stored: dedupedCount, duplicates: duplicates + (stored.length - dedupedCount) };
1547
+ }
1548
+ async function searchIssues(query, options = {}) {
1549
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
1550
+ const limit = options.limit || 10;
1551
+ const allIssues = await loadIssueIndex(projectDir);
1552
+ if (allIssues.length === 0) {
1553
+ return [];
1554
+ }
1555
+ const filteredIssues = allIssues.filter((issue) => {
1556
+ if (options.project && issue.project !== options.project) return false;
1557
+ if (options.severity && !options.severity.includes(issue.severity)) return false;
1558
+ if (options.agent && issue.agent !== options.agent) return false;
1559
+ if (!options.includeResolved && issue.resolved) return false;
1560
+ return true;
1561
+ });
1562
+ if (filteredIssues.length === 0) {
1563
+ return [];
1564
+ }
1565
+ const bm25 = new BM25Index();
1566
+ const issueMap = /* @__PURE__ */ new Map();
1567
+ for (const issue of filteredIssues) {
1568
+ const searchText = `${issue.issue} ${issue.fix} ${issue.file} ${issue.agent} ${issue.category || ""} ${issue.severity}`;
1569
+ bm25.addDocument({
1570
+ id: issue.id,
1571
+ text: searchText
1572
+ });
1573
+ issueMap.set(issue.id, issue);
1574
+ }
1575
+ const bm25Results = bm25.search(query, limit);
1576
+ return bm25Results.map((result) => ({
1577
+ issue: issueMap.get(result.id),
1578
+ score: result.score,
1579
+ matchType: "bm25"
1580
+ }));
1581
+ }
1582
+ async function findSimilarIssues(issue, options = {}) {
1583
+ const query = `${issue.issue} ${issue.fix} ${issue.agent}`;
1584
+ const searchOptions = {
1585
+ limit: (options.limit || 5) + 5,
1586
+ // Get extra to account for filtering
1587
+ includeResolved: true
1588
+ };
1589
+ if (options.workDir !== void 0) {
1590
+ searchOptions.workDir = options.workDir;
1591
+ }
1592
+ const results = await searchIssues(query, searchOptions);
1593
+ let filtered = results.filter((r) => r.issue.id !== issue.id);
1594
+ if (options.excludeSameFile) {
1595
+ filtered = filtered.filter((r) => r.issue.file !== issue.file);
1596
+ }
1597
+ return filtered.slice(0, options.limit || 5);
1598
+ }
1599
+ async function markIssueResolved(issueId, workDir) {
1600
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1601
+ const index = await loadIssueIndex(projectDir);
1602
+ const issue = index.find((i) => i.id === issueId);
1603
+ if (!issue) return false;
1604
+ issue.resolved = true;
1605
+ issue.resolvedAt = (/* @__PURE__ */ new Date()).toISOString();
1606
+ await saveIssueIndex(index, projectDir);
1607
+ return true;
1608
+ }
1609
+ async function autoResolveIssues(newIssueHashes, scannedFiles, workDir) {
1610
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1611
+ const index = await loadIssueIndex(projectDir);
1612
+ const scannedFileSet = new Set(scannedFiles.map((f) => f.replace(/\\/g, "/")));
1613
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1614
+ let resolvedCount = 0;
1615
+ let stillActiveCount = 0;
1616
+ for (const issue of index) {
1617
+ if (issue.resolved) continue;
1618
+ const normalizedFile = issue.file.replace(/\\/g, "/");
1619
+ if (!scannedFileSet.has(normalizedFile)) {
1620
+ stillActiveCount++;
1621
+ continue;
1622
+ }
1623
+ if (!newIssueHashes.has(issue.hash)) {
1624
+ issue.resolved = true;
1625
+ issue.resolvedAt = now;
1626
+ resolvedCount++;
1627
+ } else {
1628
+ stillActiveCount++;
1629
+ }
1630
+ }
1631
+ if (resolvedCount > 0) {
1632
+ await saveIssueIndex(index, projectDir);
1633
+ }
1634
+ return { resolved: resolvedCount, stillActive: stillActiveCount };
1635
+ }
1636
+ async function resolveGoalViolation(file, goalDescription, workDir) {
1637
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1638
+ const index = await loadIssueIndex(projectDir);
1639
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1640
+ let resolvedCount = 0;
1641
+ for (const issue of index) {
1642
+ if (issue.resolved) continue;
1643
+ if (issue.agent !== "goal-violation") continue;
1644
+ const normalizedFile = issue.file.replace(/\\/g, "/");
1645
+ const normalizedTarget = file.replace(/\\/g, "/");
1646
+ if (normalizedFile === normalizedTarget && issue.issue.includes(`Goal "${goalDescription}"`)) {
1647
+ issue.resolved = true;
1648
+ issue.resolvedAt = now;
1649
+ resolvedCount++;
1650
+ }
1651
+ }
1652
+ if (resolvedCount > 0) {
1653
+ await saveIssueIndex(index, projectDir);
1654
+ }
1655
+ return resolvedCount;
1656
+ }
1657
+ function getIssueHash(issue) {
1658
+ return hashIssue(issue);
1659
+ }
1660
+ async function getMemoryStats(workDir) {
1661
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1662
+ const index = await loadIssueIndex(projectDir);
1663
+ const historical = await getHistoricalInsights(projectDir);
1664
+ const MAX_ISSUES = 1e4;
1665
+ const uniqueHashes = new Set(index.map((i) => i.hash));
1666
+ const stats = {
1667
+ totalIssues: index.length,
1668
+ activeIssues: 0,
1669
+ issuesByAgent: {},
1670
+ issuesBySeverity: {},
1671
+ activeIssuesBySeverity: {},
1672
+ oldestIssue: void 0,
1673
+ newestIssue: void 0,
1674
+ resolvedCount: 0,
1675
+ historicalIssues: historical.totalHistoricalIssues,
1676
+ improvementTrend: historical.improvementTrend,
1677
+ capacityInfo: {
1678
+ current: index.length,
1679
+ max: MAX_ISSUES,
1680
+ percentFull: Math.round(index.length / MAX_ISSUES * 100),
1681
+ isAtCap: index.length >= MAX_ISSUES
1682
+ },
1683
+ deduplicationStats: {
1684
+ duplicatesAvoided: index.length - uniqueHashes.size,
1685
+ uniquePatterns: uniqueHashes.size
1686
+ }
1687
+ };
1688
+ for (const issue of index) {
1689
+ stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;
1690
+ stats.issuesBySeverity[issue.severity] = (stats.issuesBySeverity[issue.severity] || 0) + 1;
1691
+ if (issue.resolved) {
1692
+ stats.resolvedCount++;
1693
+ } else {
1694
+ stats.activeIssues++;
1695
+ stats.activeIssuesBySeverity[issue.severity] = (stats.activeIssuesBySeverity[issue.severity] || 0) + 1;
1696
+ }
1697
+ }
1698
+ if (index.length > 0) {
1699
+ const sorted = [...index].sort(
1700
+ (a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
1701
+ );
1702
+ const oldest = sorted[0]?.timestamp;
1703
+ const newest = sorted[sorted.length - 1]?.timestamp;
1704
+ if (oldest !== void 0) {
1705
+ stats.oldestIssue = oldest;
1706
+ }
1707
+ if (newest !== void 0) {
1708
+ stats.newestIssue = newest;
1709
+ }
1710
+ }
1711
+ return stats;
1712
+ }
1713
+ async function getRecentIssues(options = {}) {
1714
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
1715
+ const index = await loadIssueIndex(projectDir);
1716
+ const limit = options.limit || 20;
1717
+ const daysBack = options.daysBack || 7;
1718
+ const includeResolved = options.includeResolved ?? false;
1719
+ const cutoff = /* @__PURE__ */ new Date();
1720
+ cutoff.setDate(cutoff.getDate() - daysBack);
1721
+ return index.filter((i) => {
1722
+ if (new Date(i.timestamp) < cutoff) return false;
1723
+ if (!includeResolved && i.resolved) return false;
1724
+ return true;
1725
+ }).sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()).slice(0, limit);
1726
+ }
1727
+ async function purgeIssues(strategy, options = {}) {
1728
+ const projectDir = options.workDir || getWorkingDirectory(void 0, true);
1729
+ const index = await loadIssueIndex(projectDir);
1730
+ const originalCount = index.length;
1731
+ let remaining = [];
1732
+ switch (strategy) {
1733
+ case "smart":
1734
+ const thirtyDaysAgo = /* @__PURE__ */ new Date();
1735
+ thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
1736
+ remaining = index.filter((i) => {
1737
+ const isRecent = new Date(i.timestamp) >= thirtyDaysAgo;
1738
+ const isImportant = ["critical", "high"].includes(i.severity);
1739
+ const isUnresolved = !i.resolved;
1740
+ return isRecent || isImportant || isUnresolved;
1741
+ });
1742
+ break;
1743
+ case "resolved":
1744
+ remaining = index.filter((i) => !i.resolved);
1745
+ break;
1746
+ case "old":
1747
+ const daysOld = options.daysOld || 90;
1748
+ const cutoffDate = /* @__PURE__ */ new Date();
1749
+ cutoffDate.setDate(cutoffDate.getDate() - daysOld);
1750
+ remaining = index.filter((i) => new Date(i.timestamp) >= cutoffDate);
1751
+ break;
1752
+ case "all":
1753
+ remaining = [];
1754
+ break;
1755
+ }
1756
+ await saveIssueIndex(remaining, projectDir);
1757
+ return {
1758
+ removed: originalCount - remaining.length,
1759
+ remaining: remaining.length,
1760
+ strategy
1761
+ };
1762
+ }
1763
+ async function getDailyLogs(workDir) {
1764
+ const projectDir = workDir || getWorkingDirectory(void 0, true);
1765
+ const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1766
+ try {
1767
+ if (!existsSync4(memoryDir)) return [];
1768
+ const files = await readdir2(memoryDir);
1769
+ return files.filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f)).sort().reverse();
1770
+ } catch {
1771
+ return [];
1772
+ }
1773
+ }
1774
+ async function appendToDailyLog(issues, projectDir) {
1775
+ const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1776
+ const today = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
1777
+ const logPath = join3(memoryDir, `${today}.md`);
1778
+ let content = "";
1779
+ try {
1780
+ if (existsSync4(logPath)) {
1781
+ content = await readFile3(logPath, "utf-8");
1782
+ } else {
1783
+ content = `# Issue Log: ${today}
1784
+
1785
+ `;
1786
+ }
1787
+ } catch {
1788
+ content = `# Issue Log: ${today}
1789
+
1790
+ `;
1791
+ }
1792
+ const time = (/* @__PURE__ */ new Date()).toTimeString().split(" ")[0];
1793
+ const newEntries = issues.map(
1794
+ (i) => `## [${time}] ${i.severity.toUpperCase()}: ${i.issue.slice(0, 80)}${i.issue.length > 80 ? "..." : ""}
1795
+ - **File:** \`${i.file}\`${i.line ? `:${i.line}` : ""}
1796
+ - **Agent:** ${i.agent}
1797
+ - **Fix:** ${i.fix.slice(0, 200)}${i.fix.length > 200 ? "..." : ""}
1798
+ `
1799
+ ).join("\n");
1800
+ content += newEntries + "\n";
1801
+ await writeFile2(logPath, content);
1802
+ }
1803
+ async function loadIssueIndex(projectDir) {
1804
+ const indexPath = join3(getTrieDirectory(projectDir), "memory", "issues.json");
1805
+ try {
1806
+ if (existsSync4(indexPath)) {
1807
+ const content = await readFile3(indexPath, "utf-8");
1808
+ const result = safeParseAndValidate(content, IssueIndexSchema);
1809
+ if (result.success) {
1810
+ return result.data;
1811
+ }
1812
+ console.error(` Issue index corrupted: ${result.error}`);
1813
+ const backupManager = new BackupManager(indexPath);
1814
+ if (await backupManager.recoverFromBackup()) {
1815
+ console.error(" \u2705 Recovered from backup");
1816
+ const recovered = await readFile3(indexPath, "utf-8");
1817
+ const recoveredResult = safeParseAndValidate(recovered, IssueIndexSchema);
1818
+ if (recoveredResult.success) {
1819
+ return recoveredResult.data;
1820
+ }
1821
+ }
1822
+ console.error(" No valid backup found, starting fresh");
1823
+ }
1824
+ } catch {
1825
+ }
1826
+ return [];
1827
+ }
1828
+ async function updateIssueIndex(newIssues, projectDir) {
1829
+ const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1830
+ await mkdir3(memoryDir, { recursive: true });
1831
+ let existing = await loadIssueIndex(projectDir);
1832
+ const hashSet = new Set(existing.map((i) => i.hash));
1833
+ const toAdd = newIssues.filter((i) => !hashSet.has(i.hash));
1834
+ const dedupedCount = toAdd.length;
1835
+ existing = [...existing, ...toAdd];
1836
+ if (existing.length > 500) {
1837
+ const { summary, remaining } = await compactOldIssues(existing, {
1838
+ keepDays: 30,
1839
+ minIssuesToCompact: 100
1840
+ });
1841
+ if (summary) {
1842
+ await saveCompactedSummary(summary, projectDir);
1843
+ existing = remaining;
1844
+ }
1845
+ }
1846
+ if (existing.length > 1e4) {
1847
+ existing = intelligentPrune(existing, 1e4);
1848
+ }
1849
+ await saveIssueIndex(existing, projectDir);
1850
+ return dedupedCount;
1851
+ }
1852
+ function intelligentPrune(issues, targetCount) {
1853
+ const severityWeight = {
1854
+ critical: 100,
1855
+ high: 50,
1856
+ moderate: 20,
1857
+ low: 10,
1858
+ info: 5
1859
+ };
1860
+ const scored = issues.map((issue) => {
1861
+ const ageInDays = (Date.now() - new Date(issue.timestamp).getTime()) / (1e3 * 60 * 60 * 24);
1862
+ const recencyScore = Math.max(0, 100 - ageInDays * 2);
1863
+ const severityScore = severityWeight[issue.severity] || 10;
1864
+ const resolvedPenalty = issue.resolved ? -50 : 0;
1865
+ return {
1866
+ issue,
1867
+ score: recencyScore + severityScore + resolvedPenalty
1868
+ };
1869
+ });
1870
+ return scored.sort((a, b) => b.score - a.score).slice(0, targetCount).map((s) => s.issue);
1871
+ }
1872
+ async function saveIssueIndex(issues, projectDir) {
1873
+ const memoryDir = join3(getTrieDirectory(projectDir), "memory");
1874
+ await mkdir3(memoryDir, { recursive: true });
1875
+ const indexPath = join3(memoryDir, "issues.json");
1876
+ const backupManager = new BackupManager(indexPath);
1877
+ await backupManager.createBackup();
1878
+ await atomicWriteJSON(indexPath, issues);
1879
+ }
1880
+ function hashIssue(issue) {
1881
+ const content = `${issue.issue}|${issue.file}|${issue.severity}|${issue.agent}`;
1882
+ return createHash2("sha256").update(content).digest("hex").slice(0, 16);
1883
+ }
1884
+
1885
+ export {
1886
+ getHistoricalInsights,
1887
+ formatAuditLog,
1888
+ getAuditStatistics,
1889
+ getRecentAuditLogs,
1890
+ getSkillAuditLogs,
1891
+ runShellCommandSync,
1892
+ getRecentCommits,
1893
+ getStagedChanges,
1894
+ getUncommittedChanges,
1895
+ getGitChangedFiles,
1896
+ getDiff,
1897
+ getWorkingTreeDiff,
1898
+ isGitRepo,
1899
+ getChangedFilesSinceTimestamp,
1900
+ verifyLedger,
1901
+ getLedgerBlocks,
1902
+ initializeSharedLedger,
1903
+ syncLedgerFromShared,
1904
+ pushLedgerToShared,
1905
+ migrateLegacyLedger,
1906
+ detectLegacyLedger,
1907
+ getLedgerSyncStatus,
1908
+ compressOldBlocks,
1909
+ getStorageStats,
1910
+ shouldCompress,
1911
+ deleteBlocks,
1912
+ deleteAllBlocks,
1913
+ storeIssues,
1914
+ searchIssues,
1915
+ findSimilarIssues,
1916
+ markIssueResolved,
1917
+ autoResolveIssues,
1918
+ resolveGoalViolation,
1919
+ getIssueHash,
1920
+ getMemoryStats,
1921
+ getRecentIssues,
1922
+ purgeIssues,
1923
+ getDailyLogs
1924
+ };
1925
+ //# sourceMappingURL=chunk-62POBLFC.js.map