@snapback/cli 1.0.11 → 1.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -9
- package/dist/{analysis-Z53F5FT2.js → analysis-C6XVLBAL.js} +3 -3
- package/dist/{analysis-Z53F5FT2.js.map → analysis-C6XVLBAL.js.map} +1 -1
- package/dist/{chunk-SVJ67PPQ.js → chunk-2TOJVUVJ.js} +296 -33
- package/dist/chunk-2TOJVUVJ.js.map +1 -0
- package/dist/chunk-5EQLSU5B.js +385 -0
- package/dist/chunk-5EQLSU5B.js.map +1 -0
- package/dist/{chunk-YOVA65PS.js → chunk-A3TUM7U4.js} +320 -63
- package/dist/chunk-A3TUM7U4.js.map +1 -0
- package/dist/{chunk-ISVRGBWT.js → chunk-LEXNOXPV.js} +6030 -632
- package/dist/chunk-LEXNOXPV.js.map +1 -0
- package/dist/{chunk-G7QXHNGB.js → chunk-OJNDAPC2.js} +41 -15
- package/dist/chunk-OJNDAPC2.js.map +1 -0
- package/dist/{chunk-NKBZIXCN.js → chunk-Q5XZ3DCB.js} +5 -5
- package/dist/{chunk-NKBZIXCN.js.map → chunk-Q5XZ3DCB.js.map} +1 -1
- package/dist/chunk-QLCHTUT5.js +1067 -0
- package/dist/chunk-QLCHTUT5.js.map +1 -0
- package/dist/dist-D2SHOZMS.js +8 -0
- package/dist/{dist-7UKXVKH3.js.map → dist-D2SHOZMS.js.map} +1 -1
- package/dist/{dist-7UKXVKH3.js → dist-L76VXYJ5.js} +3 -3
- package/dist/{dist-QFS5YG5L.js.map → dist-L76VXYJ5.js.map} +1 -1
- package/dist/dist-RPM72FHJ.js +5 -0
- package/dist/{dist-WKLJSPJT.js.map → dist-RPM72FHJ.js.map} +1 -1
- package/dist/index.js +30953 -15593
- package/dist/index.js.map +1 -1
- package/dist/learning-pruner-YSZSOOOC.js +7 -0
- package/dist/learning-pruner-YSZSOOOC.js.map +1 -0
- package/dist/{secure-credentials-6UMEU22H.js → secure-credentials-A4QHHOE2.js} +14 -6
- package/dist/secure-credentials-A4QHHOE2.js.map +1 -0
- package/dist/{snapback-dir-T3CRQRY6.js → snapback-dir-6QUSO6Y3.js} +3 -3
- package/dist/{snapback-dir-T3CRQRY6.js.map → snapback-dir-6QUSO6Y3.js.map} +1 -1
- package/dist/storage-H366UNAR.js +6 -0
- package/dist/storage-H366UNAR.js.map +1 -0
- package/package.json +8 -9
- package/dist/chunk-G7QXHNGB.js.map +0 -1
- package/dist/chunk-ISVRGBWT.js.map +0 -1
- package/dist/chunk-SVJ67PPQ.js.map +0 -1
- package/dist/chunk-YOVA65PS.js.map +0 -1
- package/dist/dist-QFS5YG5L.js +0 -5
- package/dist/dist-WKLJSPJT.js +0 -8
- package/dist/secure-credentials-6UMEU22H.js.map +0 -1
|
@@ -0,0 +1,385 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { StateStore } from './chunk-QLCHTUT5.js';
|
|
3
|
+
import { __name } from './chunk-BW7RALUZ.js';
|
|
4
|
+
import { existsSync } from 'fs';
|
|
5
|
+
import { mkdir, writeFile } from 'fs/promises';
|
|
6
|
+
import { join } from 'path';
|
|
7
|
+
|
|
8
|
+
var AutomatedLearningPruner = class {
|
|
9
|
+
static {
|
|
10
|
+
__name(this, "AutomatedLearningPruner");
|
|
11
|
+
}
|
|
12
|
+
config;
|
|
13
|
+
stateStore;
|
|
14
|
+
constructor(config) {
|
|
15
|
+
this.config = {
|
|
16
|
+
workspaceRoot: config.workspaceRoot,
|
|
17
|
+
dryRun: config.dryRun ?? false,
|
|
18
|
+
maxAgeDays: config.maxAgeDays ?? 90,
|
|
19
|
+
minUsageCount: config.minUsageCount ?? 3,
|
|
20
|
+
archiveDir: config.archiveDir ?? ".snapback/archive"
|
|
21
|
+
};
|
|
22
|
+
this.stateStore = new StateStore({
|
|
23
|
+
snapbackDir: join(this.config.workspaceRoot, ".snapback")
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Initialize pruner (loads state)
|
|
28
|
+
*/
|
|
29
|
+
async initialize() {
|
|
30
|
+
await this.stateStore.load();
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Prune stale violations (file existence + pattern validation)
|
|
34
|
+
*
|
|
35
|
+
* Validates:
|
|
36
|
+
* 1. File referenced in violation still exists
|
|
37
|
+
* 2. Pattern mentioned in violation still exists in code
|
|
38
|
+
*
|
|
39
|
+
* Archives violations that fail validation.
|
|
40
|
+
*/
|
|
41
|
+
async pruneStaleViolations() {
|
|
42
|
+
const violations = this.stateStore.getViolations();
|
|
43
|
+
const staleViolations = [];
|
|
44
|
+
for (const violation of violations) {
|
|
45
|
+
const filePath = join(this.config.workspaceRoot, violation.file);
|
|
46
|
+
if (!existsSync(filePath)) {
|
|
47
|
+
staleViolations.push(violation);
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
const patternExists = await this.checkPatternExists(violation);
|
|
51
|
+
if (!patternExists) {
|
|
52
|
+
staleViolations.push(violation);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const archivedFiles = [];
|
|
56
|
+
if (!this.config.dryRun && staleViolations.length > 0) {
|
|
57
|
+
const archivePath = await this.archiveViolations(staleViolations);
|
|
58
|
+
archivedFiles.push(archivePath);
|
|
59
|
+
for (const violation of staleViolations) {
|
|
60
|
+
this.stateStore.removeViolation(violation.id);
|
|
61
|
+
}
|
|
62
|
+
await this.stateStore.save();
|
|
63
|
+
}
|
|
64
|
+
return {
|
|
65
|
+
totalChecked: violations.length,
|
|
66
|
+
staleCount: staleViolations.length,
|
|
67
|
+
archivedCount: this.config.dryRun ? 0 : staleViolations.length,
|
|
68
|
+
archivedFiles,
|
|
69
|
+
dryRun: this.config.dryRun
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Update learning confidence scores
|
|
74
|
+
*
|
|
75
|
+
* Scoring formula:
|
|
76
|
+
* - Age score: (1 - daysSinceCreated / maxAgeDays) * 0.3
|
|
77
|
+
* - Usage score: (usageCount / minUsageCount) * 0.7
|
|
78
|
+
* - Final: ageScore + usageScore (clamped to 0-1)
|
|
79
|
+
*/
|
|
80
|
+
async updateLearningScores() {
|
|
81
|
+
const learnings = this.stateStore.getLearnings();
|
|
82
|
+
let updatedCount = 0;
|
|
83
|
+
let totalConfidence = 0;
|
|
84
|
+
let lowConfidenceCount = 0;
|
|
85
|
+
const now = Date.now();
|
|
86
|
+
for (const learning of learnings) {
|
|
87
|
+
const createdAt = new Date(learning.createdAt).getTime();
|
|
88
|
+
const daysSinceCreated = (now - createdAt) / (1e3 * 60 * 60 * 24);
|
|
89
|
+
const ageScore = Math.max(0, 1 - daysSinceCreated / this.config.maxAgeDays) * 0.3;
|
|
90
|
+
const usageCount = (learning.accessCount || 0) + (learning.appliedCount || 0);
|
|
91
|
+
const useScore = Math.min(usageCount / this.config.minUsageCount, 1) * 0.7;
|
|
92
|
+
const newScore = Math.max(0, Math.min(1, ageScore + useScore));
|
|
93
|
+
const currentScore = learning.relevanceScore ?? 1;
|
|
94
|
+
if (Math.abs(newScore - currentScore) > 1e-3 || currentScore === 1) {
|
|
95
|
+
this.stateStore.updateLearning(learning.id, {
|
|
96
|
+
relevanceScore: newScore
|
|
97
|
+
});
|
|
98
|
+
updatedCount++;
|
|
99
|
+
}
|
|
100
|
+
totalConfidence += newScore;
|
|
101
|
+
if (newScore < 0.3) {
|
|
102
|
+
lowConfidenceCount++;
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
if (updatedCount > 0 && !this.config.dryRun) {
|
|
106
|
+
await this.stateStore.save();
|
|
107
|
+
}
|
|
108
|
+
return {
|
|
109
|
+
totalScored: learnings.length,
|
|
110
|
+
updatedCount,
|
|
111
|
+
avgConfidence: learnings.length > 0 ? totalConfidence / learnings.length : 0,
|
|
112
|
+
lowConfidenceCount
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
/**
|
|
116
|
+
* Deduplicate learnings (merge similar entries)
|
|
117
|
+
*
|
|
118
|
+
* Similarity algorithm:
|
|
119
|
+
* - Exact type match
|
|
120
|
+
* - Levenshtein distance on trigger+action < 0.2 (80% similar)
|
|
121
|
+
* - Merge: keep higher usage count, combine keywords
|
|
122
|
+
*/
|
|
123
|
+
async deduplicateLearnings() {
|
|
124
|
+
const learnings = this.stateStore.getLearnings();
|
|
125
|
+
const groups = this.findDuplicateGroups(learnings);
|
|
126
|
+
let mergedCount = 0;
|
|
127
|
+
for (const group of groups) {
|
|
128
|
+
if (group.length < 2) {
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
const sorted = group.sort((a, b) => {
|
|
132
|
+
const aUsage = (a.accessCount || 0) + (a.appliedCount || 0);
|
|
133
|
+
const bUsage = (b.accessCount || 0) + (b.appliedCount || 0);
|
|
134
|
+
return bUsage - aUsage;
|
|
135
|
+
});
|
|
136
|
+
const primary = sorted[0];
|
|
137
|
+
const duplicates = sorted.slice(1);
|
|
138
|
+
if (!this.config.dryRun) {
|
|
139
|
+
const combinedKeywords = /* @__PURE__ */ new Set([
|
|
140
|
+
...primary.keywords || []
|
|
141
|
+
]);
|
|
142
|
+
for (const dup of duplicates) {
|
|
143
|
+
for (const kw of dup.keywords || []) {
|
|
144
|
+
combinedKeywords.add(kw);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
this.stateStore.updateLearning(primary.id, {
|
|
148
|
+
keywords: Array.from(combinedKeywords)
|
|
149
|
+
});
|
|
150
|
+
mergedCount += duplicates.length;
|
|
151
|
+
} else {
|
|
152
|
+
mergedCount += duplicates.length;
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
if (mergedCount > 0 && !this.config.dryRun) {
|
|
156
|
+
await this.stateStore.save();
|
|
157
|
+
}
|
|
158
|
+
return {
|
|
159
|
+
totalChecked: learnings.length,
|
|
160
|
+
duplicateGroups: groups.length,
|
|
161
|
+
mergedCount,
|
|
162
|
+
dryRun: this.config.dryRun
|
|
163
|
+
};
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* Archive stale learnings and violations
|
|
167
|
+
*
|
|
168
|
+
* TWO-PHASE DECAY LIFECYCLE (consolidated from LearningGCService):
|
|
169
|
+
* Phase 1: Archive (30d unused, usageCount <3) - set archived flag in StateStore
|
|
170
|
+
* Phase 2: Delete (90d archived) - permanently remove from StateStore
|
|
171
|
+
*
|
|
172
|
+
* Archives:
|
|
173
|
+
* - Learnings with relevanceScore < 0.3
|
|
174
|
+
* - Learnings older than maxAgeDays with no usage (file-based fallback for migration)
|
|
175
|
+
*/
|
|
176
|
+
async archiveStaleItems() {
|
|
177
|
+
const learnings = this.stateStore.getLearnings();
|
|
178
|
+
const staleLearnings = [];
|
|
179
|
+
const now = Date.now();
|
|
180
|
+
for (const learning of learnings) {
|
|
181
|
+
if (learning.archived) {
|
|
182
|
+
continue;
|
|
183
|
+
}
|
|
184
|
+
const score = learning.relevanceScore ?? 1;
|
|
185
|
+
const createdAt = new Date(learning.createdAt).getTime();
|
|
186
|
+
const daysSinceCreated = (now - createdAt) / (1e3 * 60 * 60 * 24);
|
|
187
|
+
const usageCount = (learning.accessCount || 0) + (learning.appliedCount || 0);
|
|
188
|
+
if (score < 0.3 || daysSinceCreated > this.config.maxAgeDays && usageCount === 0) {
|
|
189
|
+
staleLearnings.push(learning);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
const archiveDir = join(this.config.workspaceRoot, this.config.archiveDir);
|
|
193
|
+
let archivedLearnings = 0;
|
|
194
|
+
const archivedViolations = 0;
|
|
195
|
+
if (!this.config.dryRun && staleLearnings.length > 0) {
|
|
196
|
+
await mkdir(archiveDir, {
|
|
197
|
+
recursive: true
|
|
198
|
+
});
|
|
199
|
+
for (const learning of staleLearnings) {
|
|
200
|
+
const success = this.stateStore.archiveLearning(learning.id);
|
|
201
|
+
if (success) {
|
|
202
|
+
archivedLearnings++;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
if (staleLearnings.length > 0) {
|
|
206
|
+
const archivePath = join(archiveDir, `learnings_${Date.now()}.jsonl`);
|
|
207
|
+
const content = staleLearnings.map((l) => JSON.stringify(l)).join("\n");
|
|
208
|
+
await writeFile(archivePath, content, "utf-8");
|
|
209
|
+
}
|
|
210
|
+
if (archivedLearnings > 0) {
|
|
211
|
+
await this.stateStore.save();
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
return {
|
|
215
|
+
archived: {
|
|
216
|
+
learnings: this.config.dryRun ? staleLearnings.length : archivedLearnings,
|
|
217
|
+
violations: this.config.dryRun ? 0 : archivedViolations
|
|
218
|
+
},
|
|
219
|
+
archivePath: archiveDir,
|
|
220
|
+
dryRun: this.config.dryRun
|
|
221
|
+
};
|
|
222
|
+
}
|
|
223
|
+
/**
|
|
224
|
+
* Delete permanently archived learnings (Phase 2 of two-phase decay)
|
|
225
|
+
*
|
|
226
|
+
* Deletes learnings that have been:
|
|
227
|
+
* - Archived for > 90 days (default)
|
|
228
|
+
* - Confirmed as no longer relevant
|
|
229
|
+
*
|
|
230
|
+
* Safety: Requires explicit call, not part of default archive flow
|
|
231
|
+
*/
|
|
232
|
+
async deletePermanentlyArchived() {
|
|
233
|
+
const learnings = this.stateStore.getLearnings();
|
|
234
|
+
const now = Date.now();
|
|
235
|
+
const deleteThresholdDays = 90;
|
|
236
|
+
const deleteCandidates = [];
|
|
237
|
+
for (const learning of learnings) {
|
|
238
|
+
if (!learning.archived || !learning.archivedAt) {
|
|
239
|
+
continue;
|
|
240
|
+
}
|
|
241
|
+
const archivedAt = new Date(learning.archivedAt).getTime();
|
|
242
|
+
const daysSinceArchived = (now - archivedAt) / (1e3 * 60 * 60 * 24);
|
|
243
|
+
if (daysSinceArchived > deleteThresholdDays) {
|
|
244
|
+
deleteCandidates.push(learning);
|
|
245
|
+
}
|
|
246
|
+
}
|
|
247
|
+
let deletedCount = 0;
|
|
248
|
+
if (!this.config.dryRun && deleteCandidates.length > 0) {
|
|
249
|
+
for (const learning of deleteCandidates) {
|
|
250
|
+
const success = this.stateStore.deleteLearning(learning.id);
|
|
251
|
+
if (success) {
|
|
252
|
+
deletedCount++;
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
if (deletedCount > 0) {
|
|
256
|
+
await this.stateStore.save();
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
return {
|
|
260
|
+
deletedCount: this.config.dryRun ? deleteCandidates.length : deletedCount,
|
|
261
|
+
dryRun: this.config.dryRun
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
// -------------------------------------------------------------------------
|
|
265
|
+
// PRIVATE HELPERS
|
|
266
|
+
// -------------------------------------------------------------------------
|
|
267
|
+
/**
|
|
268
|
+
* Check if violation pattern still exists in code
|
|
269
|
+
*/
|
|
270
|
+
async checkPatternExists(violation) {
|
|
271
|
+
const filePath = join(this.config.workspaceRoot, violation.file);
|
|
272
|
+
switch (violation.type) {
|
|
273
|
+
case "silent_catch":
|
|
274
|
+
case "silent-error-swallowing":
|
|
275
|
+
return this.checkRegexInFile(filePath, /catch\s*\([^)]*\)\s*\{\s*\}/);
|
|
276
|
+
case "hash-duplication":
|
|
277
|
+
return this.checkRegexInFile(filePath, /createHash\s*\(\s*['"]sha256['"]\s*\)/);
|
|
278
|
+
case "missing_defensive_check":
|
|
279
|
+
case "missing-null-check":
|
|
280
|
+
return this.checkRegexInFile(filePath, /\.(map|filter|forEach)\s*\(/);
|
|
281
|
+
case "dead_code":
|
|
282
|
+
case "unused_constant":
|
|
283
|
+
return true;
|
|
284
|
+
default:
|
|
285
|
+
return true;
|
|
286
|
+
}
|
|
287
|
+
}
|
|
288
|
+
/**
|
|
289
|
+
* Check if regex pattern exists in file
|
|
290
|
+
*/
|
|
291
|
+
async checkRegexInFile(filePath, pattern) {
|
|
292
|
+
try {
|
|
293
|
+
const { readFile } = await import('fs/promises');
|
|
294
|
+
const content = await readFile(filePath, "utf-8");
|
|
295
|
+
return pattern.test(content);
|
|
296
|
+
} catch {
|
|
297
|
+
return false;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
/**
|
|
301
|
+
* Find duplicate learning groups
|
|
302
|
+
*/
|
|
303
|
+
findDuplicateGroups(learnings) {
|
|
304
|
+
const groups = [];
|
|
305
|
+
const processed = /* @__PURE__ */ new Set();
|
|
306
|
+
for (let i = 0; i < learnings.length; i++) {
|
|
307
|
+
if (processed.has(learnings[i].id)) {
|
|
308
|
+
continue;
|
|
309
|
+
}
|
|
310
|
+
const group = [
|
|
311
|
+
learnings[i]
|
|
312
|
+
];
|
|
313
|
+
processed.add(learnings[i].id);
|
|
314
|
+
for (let j = i + 1; j < learnings.length; j++) {
|
|
315
|
+
if (processed.has(learnings[j].id)) {
|
|
316
|
+
continue;
|
|
317
|
+
}
|
|
318
|
+
if (this.areSimilarLearnings(learnings[i], learnings[j])) {
|
|
319
|
+
group.push(learnings[j]);
|
|
320
|
+
processed.add(learnings[j].id);
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
if (group.length > 1) {
|
|
324
|
+
groups.push(group);
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return groups;
|
|
328
|
+
}
|
|
329
|
+
/**
|
|
330
|
+
* Check if two learnings are similar
|
|
331
|
+
*/
|
|
332
|
+
areSimilarLearnings(a, b) {
|
|
333
|
+
if (a.type !== b.type) {
|
|
334
|
+
return false;
|
|
335
|
+
}
|
|
336
|
+
const aText = `${a.trigger} ${a.action}`.toLowerCase();
|
|
337
|
+
const bText = `${b.trigger} ${b.action}`.toLowerCase();
|
|
338
|
+
const distance = this.levenshteinDistance(aText, bText);
|
|
339
|
+
const maxLen = Math.max(aText.length, bText.length);
|
|
340
|
+
const similarity = 1 - distance / maxLen;
|
|
341
|
+
return similarity >= 0.6;
|
|
342
|
+
}
|
|
343
|
+
/**
|
|
344
|
+
* Calculate Levenshtein distance
|
|
345
|
+
*/
|
|
346
|
+
levenshteinDistance(a, b) {
|
|
347
|
+
const matrix = [];
|
|
348
|
+
for (let i = 0; i <= b.length; i++) {
|
|
349
|
+
matrix[i] = [
|
|
350
|
+
i
|
|
351
|
+
];
|
|
352
|
+
}
|
|
353
|
+
for (let j = 0; j <= a.length; j++) {
|
|
354
|
+
matrix[0][j] = j;
|
|
355
|
+
}
|
|
356
|
+
for (let i = 1; i <= b.length; i++) {
|
|
357
|
+
for (let j = 1; j <= a.length; j++) {
|
|
358
|
+
if (b.charAt(i - 1) === a.charAt(j - 1)) {
|
|
359
|
+
matrix[i][j] = matrix[i - 1][j - 1];
|
|
360
|
+
} else {
|
|
361
|
+
matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, matrix[i][j - 1] + 1, matrix[i - 1][j] + 1);
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
}
|
|
365
|
+
return matrix[b.length][a.length];
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Archive violations to file
|
|
369
|
+
*/
|
|
370
|
+
async archiveViolations(violations) {
|
|
371
|
+
const archiveDir = join(this.config.workspaceRoot, this.config.archiveDir);
|
|
372
|
+
await mkdir(archiveDir, {
|
|
373
|
+
recursive: true
|
|
374
|
+
});
|
|
375
|
+
const timestamp = Date.now();
|
|
376
|
+
const archivePath = join(archiveDir, `violations_${timestamp}.jsonl`);
|
|
377
|
+
const content = violations.map((v) => JSON.stringify(v)).join("\n");
|
|
378
|
+
await writeFile(archivePath, content, "utf-8");
|
|
379
|
+
return archivePath;
|
|
380
|
+
}
|
|
381
|
+
};
|
|
382
|
+
|
|
383
|
+
export { AutomatedLearningPruner };
|
|
384
|
+
//# sourceMappingURL=chunk-5EQLSU5B.js.map
|
|
385
|
+
//# sourceMappingURL=chunk-5EQLSU5B.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/services/learning-pruner.ts"],"names":["AutomatedLearningPruner","config","stateStore","workspaceRoot","dryRun","maxAgeDays","minUsageCount","archiveDir","StateStore","snapbackDir","join","initialize","load","pruneStaleViolations","violations","getViolations","staleViolations","violation","filePath","file","existsSync","push","patternExists","checkPatternExists","archivedFiles","length","archivePath","archiveViolations","removeViolation","id","save","totalChecked","staleCount","archivedCount","updateLearningScores","learnings","getLearnings","updatedCount","totalConfidence","lowConfidenceCount","now","Date","learning","createdAt","getTime","daysSinceCreated","ageScore","Math","max","usageCount","accessCount","appliedCount","useScore","min","newScore","currentScore","relevanceScore","abs","updateLearning","totalScored","avgConfidence","deduplicateLearnings","groups","findDuplicateGroups","mergedCount","group","sorted","sort","a","b","aUsage","bUsage","primary","duplicates","slice","combinedKeywords","Set","keywords","dup","kw","add","Array","from","duplicateGroups","archiveStaleItems","staleLearnings","archived","score","archivedLearnings","archivedViolations","mkdir","recursive","success","archiveLearning","content","map","l","JSON","stringify","writeFile","deletePermanentlyArchived","deleteThresholdDays","deleteCandidates","archivedAt","daysSinceArchived","deletedCount","deleteLearning","type","checkRegexInFile","pattern","readFile","test","processed","i","has","j","areSimilarLearnings","aText","trigger","action","toLowerCase","bText","distance","levenshteinDistance","maxLen","similarity","matrix","charAt","timestamp","v"],"mappings":";;;;;;;AAwGO,IAAMA,0BAAN,MAAMA;EAxGb;;;AAyGkBC,EAAAA,MAAAA;AACAC,EAAAA,UAAAA;AAEjB,EAAA,WAAA,CAAYD,MAAAA,EAAsB;AACjC,IAAA,IAAA,CAAKA,MAAAA,GAAS;AACbE,MAAAA,aAAAA,EAAeF,MAAAA,CAAOE,aAAAA;AACtBC,MAAAA,MAAAA,EAAQH,OAAOG,MAAAA,IAAU,KAAA;AACzBC,MAAAA,UAAAA,EAAYJ,OAAOI,UAAAA,IAAc,EAAA;AACjCC,MAAAA,aAAAA,EAAeL,OAAOK,aAAAA,IAAiB,CAAA;AACvCC,MAAAA,UAAAA,EAAYN,OAAOM,UAAAA,IAAc;AAClC,KAAA;AAGA,IAAA,IAAA,CAAKL,UAAAA,GAAa,IAAIM,UAAAA,CAAW;AAChCC,MAAAA,WAAAA,EAAaC,IAAAA,CAAK,IAAA,CAAKT,MAAAA,CAAOE,aAAAA,EAAe,WAAA;KAC9C,CAAA;AACD,EAAA;;;;AAKA,EAAA,MAAMQ,UAAAA,GAA4B;AACjC,IAAA,MAAM,IAAA,CAAKT,WAAWU,IAAAA,EAAI;AAC3B,EAAA;;;;;;;;;;AAWA,EAAA,MAAMC,oBAAAA,GAA6C;AAClD,IAAA,MAAMC,UAAAA,GAAa,IAAA,CAAKZ,UAAAA,CAAWa,aAAAA,EAAa;AAChD,IAAA,MAAMC,kBAAqC,EAAA;AAE3C,IAAA,KAAA,MAAWC,aAAaH,UAAAA,EAAY;AAEnC,MAAA,MAAMI,WAAWR,IAAAA,CAAK,IAAA,CAAKT,MAAAA,CAAOE,aAAAA,EAAec,UAAUE,IAAI,CAAA;AAC/D,MAAA,IAAI,CAACC,UAAAA,CAAWF,QAAAA,CAAAA,EAAW;AAC1BF,QAAAA,eAAAA,CAAgBK,KAAKJ,SAAAA,CAAAA;AACrB,QAAA;AACD,MAAA;AAGA,MAAA,MAAMK,aAAAA,GAAgB,MAAM,IAAA,CAAKC,kBAAAA,CAAmBN,SAAAA,CAAAA;AACpD,MAAA,IAAI,CAACK,aAAAA,EAAe;AACnBN,QAAAA,eAAAA,CAAgBK,KAAKJ,SAAAA,CAAAA;AACtB,MAAA;AACD,IAAA;AAGA,IAAA,MAAMO,gBAA0B,EAAA;AAChC,IAAA,IAAI,CAAC,IAAA,CAAKvB,MAAAA,CAAOG,MAAAA,IAAUY,eAAAA,CAAgBS,SAAS,CAAA,EAAG;AACtD,MAAA,MAAMC,WAAAA,GAAc,MAAM,IAAA,CAAKC,iBAAAA,CAAkBX,eAAAA,CAAAA;AACjDQ,MAAAA,aAAAA,CAAcH,KAAKK,WAAAA,CAAAA;AAGnB,MAAA,KAAA,MAAWT,aAAaD,eAAAA,EAAiB;AACxC,QAAA,IAAA,CAAKd,UAAAA,CAAW0B,eAAAA,CAAgBX,SAAAA,CAAUY,EAAE,CAAA;AAC7C,MAAA;AAGA,MAAA,MAAM,IAAA,CAAK3B,WAAW4B,IAAAA,EAAI;AAC3B,IAAA;AAEA,IAAA,OAAO;AACNC,MAAAA,YAAAA,EAAcjB,UAAAA,CAAWW,MAAAA;AACzBO,MAAAA,UAAAA,EAAYhB,eAAAA,CAAgBS,MAAAA;AAC5BQ,MAAAA,aAAAA,EAAe,IAAA,CAAKhC,MAAAA,CAAOG,MAAAA,GAAS,CAAA,GAAIY,eAAAA,CAAgBS,MAAAA;AACxDD,MAAAA,aAAAA;AACApB,MAAAA,MAAAA,EAAQ,KAAKH,MAAAA,CAAOG;AACrB,KAAA;AACD,EAAA;;;;;;;;;AAUA,EAAA,MAAM8B,oBAAAA,GAAmD;AACxD,IAAA,MAAMC,SAAAA,GAAY,IAAA,CAAKjC,UAAAA,CAAWkC,YAAAA,EAAY;AAC9C,IAAA,IAAIC,YAAAA,GAAe,CAAA;AACnB,IAAA,IAAIC,eAAAA,GAAkB,CAAA;AACtB,IAAA,IAAIC,kBAAAA,GAAqB,CAAA;AAEzB,IAAA,MAAMC,GAAAA,GAAMC,KAAKD,GAAAA,EAAG;AAEpB,IAAA,KAAA,MAAWE,YAAYP,SAAAA,EAAW;AACjC,MAAA,MAAMQ,YAAY,IAAIF,IAAAA,CAAKC,QAAAA,CAASC,SAAS,EAAEC,OAAAA,EAAO;AACtD,MAAA,MAAMC,gBAAAA,GAAAA,CAAoBL,GAAAA,GAAMG,SAAAA,KAAc,GAAA,GAAO,KAAK,EAAA,GAAK,EAAA,CAAA;AAG/D,MAAA,MAAMG,QAAAA,GAAWC,KAAKC,GAAAA,CAAI,CAAA,EAAG,IAAIH,gBAAAA,GAAmB,IAAA,CAAK5C,MAAAA,CAAOI,UAAU,CAAA,GAAI,GAAA;AAG9E,MAAA,MAAM4C,UAAAA,GAAAA,CAAcP,QAAAA,CAASQ,WAAAA,IAAe,CAAA,KAAMR,SAASS,YAAAA,IAAgB,CAAA,CAAA;AAC3E,MAAA,MAAMC,QAAAA,GAAWL,KAAKM,GAAAA,CAAIJ,UAAAA,GAAa,KAAKhD,MAAAA,CAAOK,aAAAA,EAAe,CAAA,CAAA,GAAK,GAAA;AAGvE,MAAA,MAAMgD,QAAAA,GAAWP,KAAKC,GAAAA,CAAI,CAAA,EAAGD,KAAKM,GAAAA,CAAI,CAAA,EAAGP,QAAAA,GAAWM,QAAAA,CAAAA,CAAAA;AAIpD,MAAA,MAAMG,YAAAA,GAAeb,SAASc,cAAAA,IAAkB,CAAA;AAChD,MAAA,IAAIT,KAAKU,GAAAA,CAAIH,QAAAA,GAAWC,YAAAA,CAAAA,GAAgB,IAAA,IAASA,iBAAiB,CAAA,EAAK;AACtE,QAAA,IAAA,CAAKrD,UAAAA,CAAWwD,cAAAA,CAAehB,QAAAA,CAASb,EAAAA,EAAI;UAAE2B,cAAAA,EAAgBF;SAAS,CAAA;AACvEjB,QAAAA,YAAAA,EAAAA;AACD,MAAA;AAEAC,MAAAA,eAAAA,IAAmBgB,QAAAA;AACnB,MAAA,IAAIA,WAAW,GAAA,EAAK;AACnBf,QAAAA,kBAAAA,EAAAA;AACD,MAAA;AACD,IAAA;AAGA,IAAA,IAAIF,YAAAA,GAAe,CAAA,IAAK,CAAC,IAAA,CAAKpC,OAAOG,MAAAA,EAAQ;AAC5C,MAAA,MAAM,IAAA,CAAKF,WAAW4B,IAAAA,EAAI;AAC3B,IAAA;AAEA,IAAA,OAAO;AACN6B,MAAAA,WAAAA,EAAaxB,SAAAA,CAAUV,MAAAA;AACvBY,MAAAA,YAAAA;AACAuB,MAAAA,aAAAA,EAAezB,SAAAA,CAAUV,MAAAA,GAAS,CAAA,GAAIa,eAAAA,GAAkBH,UAAUV,MAAAA,GAAS,CAAA;AAC3Ec,MAAAA;AACD,KAAA;AACD,EAAA;;;;;;;;;AAUA,EAAA,MAAMsB,oBAAAA,GAA8C;AACnD,IAAA,MAAM1B,SAAAA,GAAY,IAAA,CAAKjC,UAAAA,CAAWkC,YAAAA,EAAY;AAC9C,IAAA,MAAM0B,MAAAA,GAAS,IAAA,CAAKC,mBAAAA,CAAoB5B,SAAAA,CAAAA;AACxC,IAAA,IAAI6B,WAAAA,GAAc,CAAA;AAElB,IAAA,KAAA,MAAWC,SAASH,MAAAA,EAAQ;AAC3B,MAAA,IAAIG,KAAAA,CAAMxC,SAAS,CAAA,EAAG;AACrB,QAAA;AACD,MAAA;AAGA,MAAA,MAAMyC,MAAAA,GAASD,KAAAA,CAAME,IAAAA,CAAK,CAACC,GAAGC,CAAAA,KAAAA;AAC7B,QAAA,MAAMC,MAAAA,GAAAA,CAAUF,CAAAA,CAAElB,WAAAA,IAAe,CAAA,KAAMkB,EAAEjB,YAAAA,IAAgB,CAAA,CAAA;AACzD,QAAA,MAAMoB,MAAAA,GAAAA,CAAUF,CAAAA,CAAEnB,WAAAA,IAAe,CAAA,KAAMmB,EAAElB,YAAAA,IAAgB,CAAA,CAAA;AACzD,QAAA,OAAOoB,MAAAA,GAASD,MAAAA;MACjB,CAAA,CAAA;AAEA,MAAA,MAAME,OAAAA,GAAUN,OAAO,CAAA,CAAA;AACvB,MAAA,MAAMO,UAAAA,GAAaP,MAAAA,CAAOQ,KAAAA,CAAM,CAAA,CAAA;AAGhC,MAAA,IAAI,CAAC,IAAA,CAAKzE,MAAAA,CAAOG,MAAAA,EAAQ;AACxB,QAAA,MAAMuE,gBAAAA,uBAAuBC,GAAAA,CAAI;AAAKJ,UAAAA,GAAAA,OAAAA,CAAQK,YAAY;AAAI,SAAA,CAAA;AAC9D,QAAA,KAAA,MAAWC,OAAOL,UAAAA,EAAY;AAE7B,UAAA,KAAA,MAAWM,EAAAA,IAAMD,GAAAA,CAAID,QAAAA,IAAY,EAAA,EAAI;AACpCF,YAAAA,gBAAAA,CAAiBK,IAAID,EAAAA,CAAAA;AACtB,UAAA;AAGD,QAAA;AAEA,QAAA,IAAA,CAAK7E,UAAAA,CAAWwD,cAAAA,CAAec,OAAAA,CAAQ3C,EAAAA,EAAI;UAC1CgD,QAAAA,EAAUI,KAAAA,CAAMC,KAAKP,gBAAAA;SACtB,CAAA;AAEAX,QAAAA,WAAAA,IAAeS,UAAAA,CAAWhD,MAAAA;MAC3B,CAAA,MAAO;AACNuC,QAAAA,WAAAA,IAAeS,UAAAA,CAAWhD,MAAAA;AAC3B,MAAA;AACD,IAAA;AAEA,IAAA,IAAIuC,WAAAA,GAAc,CAAA,IAAK,CAAC,IAAA,CAAK/D,OAAOG,MAAAA,EAAQ;AAC3C,MAAA,MAAM,IAAA,CAAKF,WAAW4B,IAAAA,EAAI;AAC3B,IAAA;AAEA,IAAA,OAAO;AACNC,MAAAA,YAAAA,EAAcI,SAAAA,CAAUV,MAAAA;AACxB0D,MAAAA,eAAAA,EAAiBrB,MAAAA,CAAOrC,MAAAA;AACxBuC,MAAAA,WAAAA;AACA5D,MAAAA,MAAAA,EAAQ,KAAKH,MAAAA,CAAOG;AACrB,KAAA;AACD,EAAA;;;;;;;;;;;;AAaA,EAAA,MAAMgF,iBAAAA,GAA4C;AACjD,IAAA,MAAMjD,SAAAA,GAAY,IAAA,CAAKjC,UAAAA,CAAWkC,YAAAA,EAAY;AAE9C,IAAA,MAAMiD,iBAAmC,EAAA;AACzC,IAAA,MAAM7C,GAAAA,GAAMC,KAAKD,GAAAA,EAAG;AAGpB,IAAA,KAAA,MAAWE,YAAYP,SAAAA,EAAW;AAEjC,MAAA,IAAIO,SAAS4C,QAAAA,EAAU;AACtB,QAAA;AACD,MAAA;AAEA,MAAA,MAAMC,KAAAA,GAAQ7C,SAASc,cAAAA,IAAkB,CAAA;AACzC,MAAA,MAAMb,YAAY,IAAIF,IAAAA,CAAKC,QAAAA,CAASC,SAAS,EAAEC,OAAAA,EAAO;AACtD,MAAA,MAAMC,gBAAAA,GAAAA,CAAoBL,GAAAA,GAAMG,SAAAA,KAAc,GAAA,GAAO,KAAK,EAAA,GAAK,EAAA,CAAA;AAC/D,MAAA,MAAMM,UAAAA,GAAAA,CAAcP,QAAAA,CAASQ,WAAAA,IAAe,CAAA,KAAMR,SAASS,YAAAA,IAAgB,CAAA,CAAA;AAG3E,MAAA,IAAIoC,QAAQ,GAAA,IAAQ1C,gBAAAA,GAAmB,KAAK5C,MAAAA,CAAOI,UAAAA,IAAc4C,eAAe,CAAA,EAAI;AACnFoC,QAAAA,cAAAA,CAAehE,KAAKqB,QAAAA,CAAAA;AACrB,MAAA;AACD,IAAA;AAEA,IAAA,MAAMnC,aAAaG,IAAAA,CAAK,IAAA,CAAKT,OAAOE,aAAAA,EAAe,IAAA,CAAKF,OAAOM,UAAU,CAAA;AACzE,IAAA,IAAIiF,iBAAAA,GAAoB,CAAA;AACxB,IAAA,MAAMC,kBAAAA,GAAqB,CAAA;AAE3B,IAAA,IAAI,CAAC,IAAA,CAAKxF,MAAAA,CAAOG,MAAAA,IAAUiF,cAAAA,CAAe5D,SAAS,CAAA,EAAG;AACrD,MAAA,MAAMiE,MAAMnF,UAAAA,EAAY;QAAEoF,SAAAA,EAAW;OAAK,CAAA;AAG1C,MAAA,KAAA,MAAWjD,YAAY2C,cAAAA,EAAgB;AACtC,QAAA,MAAMO,OAAAA,GAAU,IAAA,CAAK1F,UAAAA,CAAW2F,eAAAA,CAAgBnD,SAASb,EAAE,CAAA;AAC3D,QAAA,IAAI+D,OAAAA,EAAS;AACZJ,UAAAA,iBAAAA,EAAAA;AACD,QAAA;AACD,MAAA;AAGA,MAAA,IAAIH,cAAAA,CAAe5D,SAAS,CAAA,EAAG;AAC9B,QAAA,MAAMC,cAAchB,IAAAA,CAAKH,UAAAA,EAAY,aAAakC,IAAAA,CAAKD,GAAAA,EAAG,CAAA,MAAA,CAAU,CAAA;AACpE,QAAA,MAAMsD,OAAAA,GAAUT,cAAAA,CAAeU,GAAAA,CAAI,CAACC,CAAAA,KAAMC,IAAAA,CAAKC,SAAAA,CAAUF,CAAAA,CAAAA,CAAAA,CAAItF,IAAAA,CAAK,IAAA,CAAA;AAClE,QAAA,MAAMyF,SAAAA,CAAUzE,WAAAA,EAAaoE,OAAAA,EAAS,OAAA,CAAA;AACvC,MAAA;AAGA,MAAA,IAAIN,oBAAoB,CAAA,EAAG;AAC1B,QAAA,MAAM,IAAA,CAAKtF,WAAW4B,IAAAA,EAAI;AAC3B,MAAA;AAGD,IAAA;AAEA,IAAA,OAAO;MACNwD,QAAAA,EAAU;AACTnD,QAAAA,SAAAA,EAAW,IAAA,CAAKlC,MAAAA,CAAOG,MAAAA,GAASiF,cAAAA,CAAe5D,MAAAA,GAAS+D,iBAAAA;QACxD1E,UAAAA,EAAY,IAAA,CAAKb,MAAAA,CAAOG,MAAAA,GAAS,CAAA,GAAIqF;AACtC,OAAA;MACA/D,WAAAA,EAAanB,UAAAA;AACbH,MAAAA,MAAAA,EAAQ,KAAKH,MAAAA,CAAOG;AACrB,KAAA;AACD,EAAA;;;;;;;;;;AAWA,EAAA,MAAMgG,yBAAAA,GAAgF;AACrF,IAAA,MAAMjE,SAAAA,GAAY,IAAA,CAAKjC,UAAAA,CAAWkC,YAAAA,EAAY;AAC9C,IAAA,MAAMI,GAAAA,GAAMC,KAAKD,GAAAA,EAAG;AACpB,IAAA,MAAM6D,mBAAAA,GAAsB,EAAA;AAE5B,IAAA,MAAMC,mBAAqC,EAAA;AAE3C,IAAA,KAAA,MAAW5D,YAAYP,SAAAA,EAAW;AACjC,MAAA,IAAI,CAACO,QAAAA,CAAS4C,QAAAA,IAAY,CAAC5C,SAAS6D,UAAAA,EAAY;AAC/C,QAAA;AACD,MAAA;AAEA,MAAA,MAAMA,aAAa,IAAI9D,IAAAA,CAAKC,QAAAA,CAAS6D,UAAU,EAAE3D,OAAAA,EAAO;AACxD,MAAA,MAAM4D,iBAAAA,GAAAA,CAAqBhE,GAAAA,GAAM+D,UAAAA,KAAe,GAAA,GAAO,KAAK,EAAA,GAAK,EAAA,CAAA;AAEjE,MAAA,IAAIC,oBAAoBH,mBAAAA,EAAqB;AAC5CC,QAAAA,gBAAAA,CAAiBjF,KAAKqB,QAAAA,CAAAA;AACvB,MAAA;AACD,IAAA;AAEA,IAAA,IAAI+D,YAAAA,GAAe,CAAA;AAEnB,IAAA,IAAI,CAAC,IAAA,CAAKxG,MAAAA,CAAOG,MAAAA,IAAUkG,gBAAAA,CAAiB7E,SAAS,CAAA,EAAG;AACvD,MAAA,KAAA,MAAWiB,YAAY4D,gBAAAA,EAAkB;AACxC,QAAA,MAAMV,OAAAA,GAAU,IAAA,CAAK1F,UAAAA,CAAWwG,cAAAA,CAAehE,SAASb,EAAE,CAAA;AAC1D,QAAA,IAAI+D,OAAAA,EAAS;AACZa,UAAAA,YAAAA,EAAAA;AACD,QAAA;AACD,MAAA;AAGA,MAAA,IAAIA,eAAe,CAAA,EAAG;AACrB,QAAA,MAAM,IAAA,CAAKvG,WAAW4B,IAAAA,EAAI;AAC3B,MAAA;AACD,IAAA;AAEA,IAAA,OAAO;AACN2E,MAAAA,YAAAA,EAAc,IAAA,CAAKxG,MAAAA,CAAOG,MAAAA,GAASkG,gBAAAA,CAAiB7E,MAAAA,GAASgF,YAAAA;AAC7DrG,MAAAA,MAAAA,EAAQ,KAAKH,MAAAA,CAAOG;AACrB,KAAA;AACD,EAAA;;;;;;;AASA,EAAA,MAAcmB,mBAAmBN,SAAAA,EAA8C;AAC9E,IAAA,MAAMC,WAAWR,IAAAA,CAAK,IAAA,CAAKT,MAAAA,CAAOE,aAAAA,EAAec,UAAUE,IAAI,CAAA;AAG/D,IAAA,QAAQF,UAAU0F,IAAAA;MACjB,KAAK,cAAA;MACL,KAAK,yBAAA;AACJ,QAAA,OAAO,IAAA,CAAKC,gBAAAA,CAAiB1F,QAAAA,EAAU,6BAAA,CAAA;MAExC,KAAK,kBAAA;AACJ,QAAA,OAAO,IAAA,CAAK0F,gBAAAA,CAAiB1F,QAAAA,EAAU,uCAAA,CAAA;MAExC,KAAK,yBAAA;MACL,KAAK,oBAAA;AAEJ,QAAA,OAAO,IAAA,CAAK0F,gBAAAA,CAAiB1F,QAAAA,EAAU,6BAAA,CAAA;MAExC,KAAK,WAAA;MACL,KAAK,iBAAA;AAGJ,QAAA,OAAO,IAAA;AAER,MAAA;AAEC,QAAA,OAAO,IAAA;AACT;AACD,EAAA;;;;EAKA,MAAc0F,gBAAAA,CAAiB1F,UAAkB2F,OAAAA,EAAmC;AACnF,IAAA,IAAI;AACH,MAAA,MAAM,EAAEC,QAAAA,EAAQ,GAAK,MAAM,OAAO,aAAA,CAAA;AAClC,MAAA,MAAMhB,OAAAA,GAAU,MAAMgB,QAAAA,CAAS5F,QAAAA,EAAU,OAAA,CAAA;AACzC,MAAA,OAAO2F,OAAAA,CAAQE,KAAKjB,OAAAA,CAAAA;IACrB,CAAA,CAAA,MAAQ;AACP,MAAA,OAAO,KAAA;AACR,IAAA;AACD,EAAA;;;;AAKQ/B,EAAAA,mBAAAA,CAAoB5B,SAAAA,EAAiD;AAC5E,IAAA,MAAM2B,SAA6B,EAAA;AACnC,IAAA,MAAMkD,SAAAA,uBAAgBpC,GAAAA,EAAAA;AAEtB,IAAA,KAAA,IAASqC,CAAAA,GAAI,CAAA,EAAGA,CAAAA,GAAI9E,SAAAA,CAAUV,QAAQwF,CAAAA,EAAAA,EAAK;AAC1C,MAAA,IAAID,UAAUE,GAAAA,CAAI/E,SAAAA,CAAU8E,CAAAA,CAAAA,CAAGpF,EAAE,CAAA,EAAG;AACnC,QAAA;AACD,MAAA;AAEA,MAAA,MAAMoC,KAAAA,GAA0B;AAAC9B,QAAAA,SAAAA,CAAU8E,CAAAA;;AAC3CD,MAAAA,SAAAA,CAAUhC,GAAAA,CAAI7C,SAAAA,CAAU8E,CAAAA,CAAAA,CAAGpF,EAAE,CAAA;AAE7B,MAAA,KAAA,IAASsF,IAAIF,CAAAA,GAAI,CAAA,EAAGE,CAAAA,GAAIhF,SAAAA,CAAUV,QAAQ0F,CAAAA,EAAAA,EAAK;AAC9C,QAAA,IAAIH,UAAUE,GAAAA,CAAI/E,SAAAA,CAAUgF,CAAAA,CAAAA,CAAGtF,EAAE,CAAA,EAAG;AACnC,UAAA;AACD,QAAA;AAEA,QAAA,IAAI,IAAA,CAAKuF,oBAAoBjF,SAAAA,CAAU8E,CAAAA,GAAI9E,SAAAA,CAAUgF,CAAAA,CAAE,CAAA,EAAG;AACzDlD,UAAAA,KAAAA,CAAM5C,IAAAA,CAAKc,SAAAA,CAAUgF,CAAAA,CAAE,CAAA;AACvBH,UAAAA,SAAAA,CAAUhC,GAAAA,CAAI7C,SAAAA,CAAUgF,CAAAA,CAAAA,CAAGtF,EAAE,CAAA;AAC9B,QAAA;AACD,MAAA;AAEA,MAAA,IAAIoC,KAAAA,CAAMxC,SAAS,CAAA,EAAG;AACrBqC,QAAAA,MAAAA,CAAOzC,KAAK4C,KAAAA,CAAAA;AACb,MAAA;AACD,IAAA;AAEA,IAAA,OAAOH,MAAAA;AACR,EAAA;;;;AAKQsD,EAAAA,mBAAAA,CAAoBhD,GAAmBC,CAAAA,EAA4B;AAE1E,IAAA,IAAID,CAAAA,CAAEuC,IAAAA,KAAStC,CAAAA,CAAEsC,IAAAA,EAAM;AACtB,MAAA,OAAO,KAAA;AACR,IAAA;AAGA,IAAA,MAAMU,KAAAA,GAAQ,GAAGjD,CAAAA,CAAEkD,OAAO,IAAIlD,CAAAA,CAAEmD,MAAM,GAAGC,WAAAA,EAAW;AACpD,IAAA,MAAMC,KAAAA,GAAQ,GAAGpD,CAAAA,CAAEiD,OAAO,IAAIjD,CAAAA,CAAEkD,MAAM,GAAGC,WAAAA,EAAW;AAGpD,IAAA,MAAME,QAAAA,GAAW,IAAA,CAAKC,mBAAAA,CAAoBN,KAAAA,EAAOI,KAAAA,CAAAA;AACjD,IAAA,MAAMG,SAAS7E,IAAAA,CAAKC,GAAAA,CAAIqE,KAAAA,CAAM5F,MAAAA,EAAQgG,MAAMhG,MAAM,CAAA;AAClD,IAAA,MAAMoG,UAAAA,GAAa,IAAIH,QAAAA,GAAWE,MAAAA;AAElC,IAAA,OAAOC,UAAAA,IAAc,GAAA;AACtB,EAAA;;;;AAKQF,EAAAA,mBAAAA,CAAoBvD,GAAWC,CAAAA,EAAmB;AACzD,IAAA,MAAMyD,SAAqB,EAAA;AAE3B,IAAA,KAAA,IAASb,CAAAA,GAAI,CAAA,EAAGA,CAAAA,IAAK5C,CAAAA,CAAE5C,QAAQwF,CAAAA,EAAAA,EAAK;AACnCa,MAAAA,MAAAA,CAAOb,CAAAA,CAAAA,GAAK;AAACA,QAAAA;;AACd,IAAA;AAEA,IAAA,KAAA,IAASE,CAAAA,GAAI,CAAA,EAAGA,CAAAA,IAAK/C,CAAAA,CAAE3C,QAAQ0F,CAAAA,EAAAA,EAAK;AACnCW,MAAAA,MAAAA,CAAO,CAAA,CAAA,CAAGX,CAAAA,CAAAA,GAAKA,CAAAA;AAChB,IAAA;AAEA,IAAA,KAAA,IAASF,CAAAA,GAAI,CAAA,EAAGA,CAAAA,IAAK5C,CAAAA,CAAE5C,QAAQwF,CAAAA,EAAAA,EAAK;AACnC,MAAA,KAAA,IAASE,CAAAA,GAAI,CAAA,EAAGA,CAAAA,IAAK/C,CAAAA,CAAE3C,QAAQ0F,CAAAA,EAAAA,EAAK;AACnC,QAAA,IAAI9C,CAAAA,CAAE0D,OAAOd,CAAAA,GAAI,CAAA,MAAO7C,CAAAA,CAAE2D,MAAAA,CAAOZ,CAAAA,GAAI,CAAA,CAAA,EAAI;AACxCW,UAAAA,MAAAA,CAAOb,CAAAA,EAAGE,CAAAA,CAAAA,GAAKW,OAAOb,CAAAA,GAAI,CAAA,CAAA,CAAGE,CAAAA,GAAI,CAAA,CAAA;QAClC,CAAA,MAAO;AACNW,UAAAA,MAAAA,CAAOb,CAAAA,CAAAA,CAAGE,CAAAA,CAAAA,GAAKpE,IAAAA,CAAKM,GAAAA,CACnByE,MAAAA,CAAOb,CAAAA,GAAI,CAAA,CAAA,CAAGE,CAAAA,GAAI,CAAA,CAAA,GAAK,CAAA,EACvBW,MAAAA,CAAOb,CAAAA,CAAAA,CAAGE,CAAAA,GAAI,CAAA,CAAA,GAAK,CAAA,EACnBW,MAAAA,CAAOb,CAAAA,GAAI,CAAA,CAAA,CAAGE,CAAAA,CAAAA,GAAK,CAAA,CAAA;AAErB,QAAA;AACD,MAAA;AACD,IAAA;AAEA,IAAA,OAAOW,MAAAA,CAAOzD,CAAAA,CAAE5C,MAAM,CAAA,CAAE2C,EAAE3C,MAAM,CAAA;AACjC,EAAA;;;;AAKA,EAAA,MAAcE,kBAAkBb,UAAAA,EAAgD;AAC/E,IAAA,MAAMP,aAAaG,IAAAA,CAAK,IAAA,CAAKT,OAAOE,aAAAA,EAAe,IAAA,CAAKF,OAAOM,UAAU,CAAA;AACzE,IAAA,MAAMmF,MAAMnF,UAAAA,EAAY;MAAEoF,SAAAA,EAAW;KAAK,CAAA;AAE1C,IAAA,MAAMqC,SAAAA,GAAYvF,KAAKD,GAAAA,EAAG;AAC1B,IAAA,MAAMd,WAAAA,GAAchB,IAAAA,CAAKH,UAAAA,EAAY,CAAA,WAAA,EAAcyH,SAAAA,CAAAA,MAAAA,CAAiB,CAAA;AACpE,IAAA,MAAMlC,OAAAA,GAAUhF,UAAAA,CAAWiF,GAAAA,CAAI,CAACkC,CAAAA,KAAMhC,IAAAA,CAAKC,SAAAA,CAAU+B,CAAAA,CAAAA,CAAAA,CAAIvH,IAAAA,CAAK,IAAA,CAAA;AAE9D,IAAA,MAAMyF,SAAAA,CAAUzE,WAAAA,EAAaoE,OAAAA,EAAS,OAAA,CAAA;AACtC,IAAA,OAAOpE,WAAAA;AACR,EAAA;AACD","file":"chunk-5EQLSU5B.js","sourcesContent":["/**\n * Automated Learning Pruner\n *\n * Core engine for automated learning and violation lifecycle management.\n * Features:\n * - File existence validation for violations\n * - Age-based + usage-based confidence scoring\n * - Pattern detection (validates if code patterns still exist)\n * - Deduplication (merges similar learnings)\n * - Archive mechanism (safe removal with rollback capability)\n *\n * @module services/learning-pruner\n */\n\nimport { existsSync } from \"node:fs\";\nimport { mkdir, writeFile } from \"node:fs/promises\";\nimport { join } from \"node:path\";\nimport { StateStore, type StoredLearning, type StoredViolation } from \"@snapback/intelligence/storage\";\n\n// =============================================================================\n// TYPES\n// =============================================================================\n\nexport interface PrunerConfig {\n\t/** Workspace root directory */\n\tworkspaceRoot: string;\n\t/** Enable dry-run mode (no actual changes) */\n\tdryRun?: boolean;\n\t/** Maximum age in days before archiving stale learnings */\n\tmaxAgeDays?: number;\n\t/** Minimum usage count to keep high-value learnings */\n\tminUsageCount?: number;\n\t/** Archive directory (relative to workspace) */\n\tarchiveDir?: string;\n}\n\nexport interface PruneResult {\n\t/** Total violations checked */\n\ttotalChecked: number;\n\t/** Violations marked for archival */\n\tstaleCount: number;\n\t/** Violations archived (0 if dryRun) */\n\tarchivedCount: number;\n\t/** File paths of archived violations */\n\tarchivedFiles: string[];\n\t/** Dry run mode? */\n\tdryRun: boolean;\n}\n\nexport interface ScoreUpdateResult {\n\t/** Total learnings scored */\n\ttotalScored: number;\n\t/** Learnings with updated scores */\n\tupdatedCount: number;\n\t/** Average confidence score */\n\tavgConfidence: number;\n\t/** Low confidence learnings (< 0.3) */\n\tlowConfidenceCount: number;\n}\n\nexport interface DedupeResult {\n\t/** Total learnings checked */\n\ttotalChecked: number;\n\t/** Duplicate groups found */\n\tduplicateGroups: number;\n\t/** Learnings merged */\n\tmergedCount: number;\n\t/** Dry run mode? */\n\tdryRun: boolean;\n}\n\nexport interface ArchiveResult {\n\t/** Items archived */\n\tarchived: {\n\t\tlearnings: number;\n\t\tviolations: number;\n\t};\n\t/** Archive location */\n\tarchivePath: string;\n\t/** Dry run mode? */\n\tdryRun: boolean;\n}\n\n// =============================================================================\n// AUTOMATED LEARNING PRUNER\n// =============================================================================\n\n/**\n * Automated Learning Pruner\n *\n * Manages learning and violation lifecycle with intelligent pruning.\n *\n * @example\n * ```typescript\n * const pruner = new AutomatedLearningPruner({\n * workspaceRoot: '/path/to/project',\n * dryRun: true,\n * maxAgeDays: 90\n * });\n *\n * const result = await pruner.pruneStaleViolations();\n * console.log(`Archived ${result.archivedCount} violations`);\n * ```\n */\nexport class AutomatedLearningPruner {\n\tprivate readonly config: Required<PrunerConfig>;\n\tprivate readonly stateStore: StateStore;\n\n\tconstructor(config: PrunerConfig) {\n\t\tthis.config = {\n\t\t\tworkspaceRoot: config.workspaceRoot,\n\t\t\tdryRun: config.dryRun ?? false,\n\t\t\tmaxAgeDays: config.maxAgeDays ?? 90,\n\t\t\tminUsageCount: config.minUsageCount ?? 3,\n\t\t\tarchiveDir: config.archiveDir ?? \".snapback/archive\",\n\t\t};\n\n\t\t// Initialize StateStore (leverages existing intelligence storage)\n\t\tthis.stateStore = new StateStore({\n\t\t\tsnapbackDir: join(this.config.workspaceRoot, \".snapback\"),\n\t\t});\n\t}\n\n\t/**\n\t * Initialize pruner (loads state)\n\t */\n\tasync initialize(): Promise<void> {\n\t\tawait this.stateStore.load();\n\t}\n\n\t/**\n\t * Prune stale violations (file existence + pattern validation)\n\t *\n\t * Validates:\n\t * 1. File referenced in violation still exists\n\t * 2. Pattern mentioned in violation still exists in code\n\t *\n\t * Archives violations that fail validation.\n\t */\n\tasync pruneStaleViolations(): Promise<PruneResult> {\n\t\tconst violations = this.stateStore.getViolations();\n\t\tconst staleViolations: StoredViolation[] = [];\n\n\t\tfor (const violation of violations) {\n\t\t\t// Check 1: File existence\n\t\t\tconst filePath = join(this.config.workspaceRoot, violation.file);\n\t\t\tif (!existsSync(filePath)) {\n\t\t\t\tstaleViolations.push(violation);\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Check 2: Pattern still exists in code\n\t\t\tconst patternExists = await this.checkPatternExists(violation);\n\t\t\tif (!patternExists) {\n\t\t\t\tstaleViolations.push(violation);\n\t\t\t}\n\t\t}\n\n\t\t// Archive stale violations\n\t\tconst archivedFiles: string[] = [];\n\t\tif (!this.config.dryRun && staleViolations.length > 0) {\n\t\t\tconst archivePath = await this.archiveViolations(staleViolations);\n\t\t\tarchivedFiles.push(archivePath);\n\n\t\t\t// Remove violations from StateStore\n\t\t\tfor (const violation of staleViolations) {\n\t\t\t\tthis.stateStore.removeViolation(violation.id);\n\t\t\t}\n\n\t\t\t// Persist changes\n\t\t\tawait this.stateStore.save();\n\t\t}\n\n\t\treturn {\n\t\t\ttotalChecked: violations.length,\n\t\t\tstaleCount: staleViolations.length,\n\t\t\tarchivedCount: this.config.dryRun ? 0 : staleViolations.length,\n\t\t\tarchivedFiles,\n\t\t\tdryRun: this.config.dryRun,\n\t\t};\n\t}\n\n\t/**\n\t * Update learning confidence scores\n\t *\n\t * Scoring formula:\n\t * - Age score: (1 - daysSinceCreated / maxAgeDays) * 0.3\n\t * - Usage score: (usageCount / minUsageCount) * 0.7\n\t * - Final: ageScore + usageScore (clamped to 0-1)\n\t */\n\tasync updateLearningScores(): Promise<ScoreUpdateResult> {\n\t\tconst learnings = this.stateStore.getLearnings();\n\t\tlet updatedCount = 0;\n\t\tlet totalConfidence = 0;\n\t\tlet lowConfidenceCount = 0;\n\n\t\tconst now = Date.now();\n\n\t\tfor (const learning of learnings) {\n\t\t\tconst createdAt = new Date(learning.createdAt).getTime();\n\t\t\tconst daysSinceCreated = (now - createdAt) / (1000 * 60 * 60 * 24);\n\n\t\t\t// Age score (newer = higher score)\n\t\t\tconst ageScore = Math.max(0, 1 - daysSinceCreated / this.config.maxAgeDays) * 0.3;\n\n\t\t\t// Usage score (more used = higher score)\n\t\t\tconst usageCount = (learning.accessCount || 0) + (learning.appliedCount || 0);\n\t\t\tconst useScore = Math.min(usageCount / this.config.minUsageCount, 1) * 0.7;\n\n\t\t\t// Final score\n\t\t\tconst newScore = Math.max(0, Math.min(1, ageScore + useScore));\n\n\t\t\t// Always update score (even if similar to existing)\n\t\t\t// This ensures scores are recalculated based on current config\n\t\t\tconst currentScore = learning.relevanceScore ?? 1.0;\n\t\t\tif (Math.abs(newScore - currentScore) > 0.001 || currentScore === 1.0) {\n\t\t\t\tthis.stateStore.updateLearning(learning.id, { relevanceScore: newScore });\n\t\t\t\tupdatedCount++;\n\t\t\t}\n\n\t\t\ttotalConfidence += newScore;\n\t\t\tif (newScore < 0.3) {\n\t\t\t\tlowConfidenceCount++;\n\t\t\t}\n\t\t}\n\n\t\t// Persist changes\n\t\tif (updatedCount > 0 && !this.config.dryRun) {\n\t\t\tawait this.stateStore.save();\n\t\t}\n\n\t\treturn {\n\t\t\ttotalScored: learnings.length,\n\t\t\tupdatedCount,\n\t\t\tavgConfidence: learnings.length > 0 ? totalConfidence / learnings.length : 0,\n\t\t\tlowConfidenceCount,\n\t\t};\n\t}\n\n\t/**\n\t * Deduplicate learnings (merge similar entries)\n\t *\n\t * Similarity algorithm:\n\t * - Exact type match\n\t * - Levenshtein distance on trigger+action < 0.2 (80% similar)\n\t * - Merge: keep higher usage count, combine keywords\n\t */\n\tasync deduplicateLearnings(): Promise<DedupeResult> {\n\t\tconst learnings = this.stateStore.getLearnings();\n\t\tconst groups = this.findDuplicateGroups(learnings);\n\t\tlet mergedCount = 0;\n\n\t\tfor (const group of groups) {\n\t\t\tif (group.length < 2) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\t// Sort by usage (highest first)\n\t\t\tconst sorted = group.sort((a, b) => {\n\t\t\t\tconst aUsage = (a.accessCount || 0) + (a.appliedCount || 0);\n\t\t\t\tconst bUsage = (b.accessCount || 0) + (b.appliedCount || 0);\n\t\t\t\treturn bUsage - aUsage;\n\t\t\t});\n\n\t\t\tconst primary = sorted[0];\n\t\t\tconst duplicates = sorted.slice(1);\n\n\t\t\t// Merge duplicates into primary\n\t\t\tif (!this.config.dryRun) {\n\t\t\t\tconst combinedKeywords = new Set([...(primary.keywords || [])]);\n\t\t\t\tfor (const dup of duplicates) {\n\t\t\t\t\t// Combine keywords\n\t\t\t\t\tfor (const kw of dup.keywords || []) {\n\t\t\t\t\t\tcombinedKeywords.add(kw);\n\t\t\t\t\t}\n\t\t\t\t\t// Note: Need to add removeL learning method to StateStore\n\t\t\t\t\t// For now, just track what would be merged\n\t\t\t\t}\n\n\t\t\t\tthis.stateStore.updateLearning(primary.id, {\n\t\t\t\t\tkeywords: Array.from(combinedKeywords),\n\t\t\t\t});\n\n\t\t\t\tmergedCount += duplicates.length;\n\t\t\t} else {\n\t\t\t\tmergedCount += duplicates.length;\n\t\t\t}\n\t\t}\n\n\t\tif (mergedCount > 0 && !this.config.dryRun) {\n\t\t\tawait this.stateStore.save();\n\t\t}\n\n\t\treturn {\n\t\t\ttotalChecked: learnings.length,\n\t\t\tduplicateGroups: groups.length,\n\t\t\tmergedCount,\n\t\t\tdryRun: this.config.dryRun,\n\t\t};\n\t}\n\n\t/**\n\t * Archive stale learnings and violations\n\t *\n\t * TWO-PHASE DECAY LIFECYCLE (consolidated from LearningGCService):\n\t * Phase 1: Archive (30d unused, usageCount <3) - set archived flag in StateStore\n\t * Phase 2: Delete (90d archived) - permanently remove from StateStore\n\t *\n\t * Archives:\n\t * - Learnings with relevanceScore < 0.3\n\t * - Learnings older than maxAgeDays with no usage (file-based fallback for migration)\n\t */\n\tasync archiveStaleItems(): Promise<ArchiveResult> {\n\t\tconst learnings = this.stateStore.getLearnings();\n\n\t\tconst staleLearnings: StoredLearning[] = [];\n\t\tconst now = Date.now();\n\n\t\t// Phase 1: Identify archive candidates (30d unused, <3 usage)\n\t\tfor (const learning of learnings) {\n\t\t\t// Skip already archived\n\t\t\tif (learning.archived) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tconst score = learning.relevanceScore ?? 1.0;\n\t\t\tconst createdAt = new Date(learning.createdAt).getTime();\n\t\t\tconst daysSinceCreated = (now - createdAt) / (1000 * 60 * 60 * 24);\n\t\t\tconst usageCount = (learning.accessCount || 0) + (learning.appliedCount || 0);\n\n\t\t\t// Archive if low confidence OR (old + unused)\n\t\t\tif (score < 0.3 || (daysSinceCreated > this.config.maxAgeDays && usageCount === 0)) {\n\t\t\t\tstaleLearnings.push(learning);\n\t\t\t}\n\t\t}\n\n\t\tconst archiveDir = join(this.config.workspaceRoot, this.config.archiveDir);\n\t\tlet archivedLearnings = 0;\n\t\tconst archivedViolations = 0;\n\n\t\tif (!this.config.dryRun && staleLearnings.length > 0) {\n\t\t\tawait mkdir(archiveDir, { recursive: true });\n\n\t\t\t// USE STATESTORE FLAGS (Phase 2.6b consolidation)\n\t\t\tfor (const learning of staleLearnings) {\n\t\t\t\tconst success = this.stateStore.archiveLearning(learning.id);\n\t\t\t\tif (success) {\n\t\t\t\t\tarchivedLearnings++;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Fallback: Also write to file for backup/migration (legacy support)\n\t\t\tif (staleLearnings.length > 0) {\n\t\t\t\tconst archivePath = join(archiveDir, `learnings_${Date.now()}.jsonl`);\n\t\t\t\tconst content = staleLearnings.map((l) => JSON.stringify(l)).join(\"\\n\");\n\t\t\t\tawait writeFile(archivePath, content, \"utf-8\");\n\t\t\t}\n\n\t\t\t// Persist StateStore changes\n\t\t\tif (archivedLearnings > 0) {\n\t\t\t\tawait this.stateStore.save();\n\t\t\t}\n\n\t\t\t// Note: Violations are handled by pruneStaleViolations()\n\t\t}\n\n\t\treturn {\n\t\t\tarchived: {\n\t\t\t\tlearnings: this.config.dryRun ? staleLearnings.length : archivedLearnings,\n\t\t\t\tviolations: this.config.dryRun ? 0 : archivedViolations,\n\t\t\t},\n\t\t\tarchivePath: archiveDir,\n\t\t\tdryRun: this.config.dryRun,\n\t\t};\n\t}\n\n\t/**\n\t * Delete permanently archived learnings (Phase 2 of two-phase decay)\n\t *\n\t * Deletes learnings that have been:\n\t * - Archived for > 90 days (default)\n\t * - Confirmed as no longer relevant\n\t *\n\t * Safety: Requires explicit call, not part of default archive flow\n\t */\n\tasync deletePermanentlyArchived(): Promise<{ deletedCount: number; dryRun: boolean }> {\n\t\tconst learnings = this.stateStore.getLearnings();\n\t\tconst now = Date.now();\n\t\tconst deleteThresholdDays = 90; // Fixed: 90 days after archival\n\n\t\tconst deleteCandidates: StoredLearning[] = [];\n\n\t\tfor (const learning of learnings) {\n\t\t\tif (!learning.archived || !learning.archivedAt) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tconst archivedAt = new Date(learning.archivedAt).getTime();\n\t\t\tconst daysSinceArchived = (now - archivedAt) / (1000 * 60 * 60 * 24);\n\n\t\t\tif (daysSinceArchived > deleteThresholdDays) {\n\t\t\t\tdeleteCandidates.push(learning);\n\t\t\t}\n\t\t}\n\n\t\tlet deletedCount = 0;\n\n\t\tif (!this.config.dryRun && deleteCandidates.length > 0) {\n\t\t\tfor (const learning of deleteCandidates) {\n\t\t\t\tconst success = this.stateStore.deleteLearning(learning.id);\n\t\t\t\tif (success) {\n\t\t\t\t\tdeletedCount++;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Persist StateStore changes\n\t\t\tif (deletedCount > 0) {\n\t\t\t\tawait this.stateStore.save();\n\t\t\t}\n\t\t}\n\n\t\treturn {\n\t\t\tdeletedCount: this.config.dryRun ? deleteCandidates.length : deletedCount,\n\t\t\tdryRun: this.config.dryRun,\n\t\t};\n\t}\n\n\t// -------------------------------------------------------------------------\n\t// PRIVATE HELPERS\n\t// -------------------------------------------------------------------------\n\n\t/**\n\t * Check if violation pattern still exists in code\n\t */\n\tprivate async checkPatternExists(violation: StoredViolation): Promise<boolean> {\n\t\tconst filePath = join(this.config.workspaceRoot, violation.file);\n\n\t\t// Pattern detection based on violation type\n\t\tswitch (violation.type) {\n\t\t\tcase \"silent_catch\":\n\t\t\tcase \"silent-error-swallowing\":\n\t\t\t\treturn this.checkRegexInFile(filePath, /catch\\s*\\([^)]*\\)\\s*\\{\\s*\\}/);\n\n\t\t\tcase \"hash-duplication\":\n\t\t\t\treturn this.checkRegexInFile(filePath, /createHash\\s*\\(\\s*['\"]sha256['\"]\\s*\\)/);\n\n\t\t\tcase \"missing_defensive_check\":\n\t\t\tcase \"missing-null-check\":\n\t\t\t\t// Look for array operations without null checks\n\t\t\t\treturn this.checkRegexInFile(filePath, /\\.(map|filter|forEach)\\s*\\(/);\n\n\t\t\tcase \"dead_code\":\n\t\t\tcase \"unused_constant\":\n\t\t\t\t// Check if identifier is referenced elsewhere\n\t\t\t\t// This is complex, for now assume pattern exists if file exists\n\t\t\t\treturn true;\n\n\t\t\tdefault:\n\t\t\t\t// Unknown pattern type, assume it exists (conservative)\n\t\t\t\treturn true;\n\t\t}\n\t}\n\n\t/**\n\t * Check if regex pattern exists in file\n\t */\n\tprivate async checkRegexInFile(filePath: string, pattern: RegExp): Promise<boolean> {\n\t\ttry {\n\t\t\tconst { readFile } = await import(\"node:fs/promises\");\n\t\t\tconst content = await readFile(filePath, \"utf-8\");\n\t\t\treturn pattern.test(content);\n\t\t} catch {\n\t\t\treturn false; // File read error = pattern doesn't exist\n\t\t}\n\t}\n\n\t/**\n\t * Find duplicate learning groups\n\t */\n\tprivate findDuplicateGroups(learnings: StoredLearning[]): StoredLearning[][] {\n\t\tconst groups: StoredLearning[][] = [];\n\t\tconst processed = new Set<string>();\n\n\t\tfor (let i = 0; i < learnings.length; i++) {\n\t\t\tif (processed.has(learnings[i].id)) {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tconst group: StoredLearning[] = [learnings[i]];\n\t\t\tprocessed.add(learnings[i].id);\n\n\t\t\tfor (let j = i + 1; j < learnings.length; j++) {\n\t\t\t\tif (processed.has(learnings[j].id)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tif (this.areSimilarLearnings(learnings[i], learnings[j])) {\n\t\t\t\t\tgroup.push(learnings[j]);\n\t\t\t\t\tprocessed.add(learnings[j].id);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (group.length > 1) {\n\t\t\t\tgroups.push(group);\n\t\t\t}\n\t\t}\n\n\t\treturn groups;\n\t}\n\n\t/**\n\t * Check if two learnings are similar\n\t */\n\tprivate areSimilarLearnings(a: StoredLearning, b: StoredLearning): boolean {\n\t\t// Type must match\n\t\tif (a.type !== b.type) {\n\t\t\treturn false;\n\t\t}\n\n\t\t// Combine trigger + action for similarity check\n\t\tconst aText = `${a.trigger} ${a.action}`.toLowerCase();\n\t\tconst bText = `${b.trigger} ${b.action}`.toLowerCase();\n\n\t\t// Simple similarity: check for 60% overlap (lowered for fuzzy matching of similar learnings)\n\t\tconst distance = this.levenshteinDistance(aText, bText);\n\t\tconst maxLen = Math.max(aText.length, bText.length);\n\t\tconst similarity = 1 - distance / maxLen;\n\n\t\treturn similarity >= 0.6;\n\t}\n\n\t/**\n\t * Calculate Levenshtein distance\n\t */\n\tprivate levenshteinDistance(a: string, b: string): number {\n\t\tconst matrix: number[][] = [];\n\n\t\tfor (let i = 0; i <= b.length; i++) {\n\t\t\tmatrix[i] = [i];\n\t\t}\n\n\t\tfor (let j = 0; j <= a.length; j++) {\n\t\t\tmatrix[0][j] = j;\n\t\t}\n\n\t\tfor (let i = 1; i <= b.length; i++) {\n\t\t\tfor (let j = 1; j <= a.length; j++) {\n\t\t\t\tif (b.charAt(i - 1) === a.charAt(j - 1)) {\n\t\t\t\t\tmatrix[i][j] = matrix[i - 1][j - 1];\n\t\t\t\t} else {\n\t\t\t\t\tmatrix[i][j] = Math.min(\n\t\t\t\t\t\tmatrix[i - 1][j - 1] + 1, // substitution\n\t\t\t\t\t\tmatrix[i][j - 1] + 1, // insertion\n\t\t\t\t\t\tmatrix[i - 1][j] + 1, // deletion\n\t\t\t\t\t);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\treturn matrix[b.length][a.length];\n\t}\n\n\t/**\n\t * Archive violations to file\n\t */\n\tprivate async archiveViolations(violations: StoredViolation[]): Promise<string> {\n\t\tconst archiveDir = join(this.config.workspaceRoot, this.config.archiveDir);\n\t\tawait mkdir(archiveDir, { recursive: true });\n\n\t\tconst timestamp = Date.now();\n\t\tconst archivePath = join(archiveDir, `violations_${timestamp}.jsonl`);\n\t\tconst content = violations.map((v) => JSON.stringify(v)).join(\"\\n\");\n\n\t\tawait writeFile(archivePath, content, \"utf-8\");\n\t\treturn archivePath;\n\t}\n}\n"]}
|