@triedotdev/mcp 1.0.74 → 1.0.75
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +60 -5
- package/dist/{chunk-COXK23KW.js → chunk-35FAFFHE.js} +2 -2
- package/dist/{chunk-E2ASFZMF.js → chunk-3RKY55HZ.js} +3 -3
- package/dist/{chunk-BN5DLROX.js → chunk-53URTRWH.js} +2 -2
- package/dist/{chunk-2KLYR5GW.js → chunk-6QKDEGWR.js} +83 -5
- package/dist/chunk-6QKDEGWR.js.map +1 -0
- package/dist/{chunk-BHIKY5PW.js → chunk-AIC4HOOQ.js} +3 -3
- package/dist/{chunk-3AKE4M4Z.js → chunk-LNLLZQWH.js} +4 -4
- package/dist/{chunk-6TSYRIQS.js → chunk-P6VLSYXN.js} +59 -5
- package/dist/chunk-P6VLSYXN.js.map +1 -0
- package/dist/{chunk-LWT7XFDD.js → chunk-RRDDAD5N.js} +6 -6
- package/dist/{chunk-OAICCSDL.js → chunk-UPKBO5EM.js} +8 -8
- package/dist/cli/main.js +102 -7
- package/dist/cli/main.js.map +1 -1
- package/dist/cli/yolo-daemon.js +8 -8
- package/dist/{goal-manager-5FLR7IS2.js → goal-manager-NI4LJ2SX.js} +4 -4
- package/dist/{guardian-agent-QFREM3AR.js → guardian-agent-R5HX7UWJ.js} +6 -6
- package/dist/index.js +112 -15
- package/dist/index.js.map +1 -1
- package/dist/{issue-store-SHQZICED.js → issue-store-MULGOF6B.js} +4 -2
- package/dist/workers/agent-worker.js +3 -3
- package/package.json +1 -1
- package/dist/chunk-2KLYR5GW.js.map +0 -1
- package/dist/chunk-6TSYRIQS.js.map +0 -1
- /package/dist/{chunk-COXK23KW.js.map → chunk-35FAFFHE.js.map} +0 -0
- /package/dist/{chunk-E2ASFZMF.js.map → chunk-3RKY55HZ.js.map} +0 -0
- /package/dist/{chunk-BN5DLROX.js.map → chunk-53URTRWH.js.map} +0 -0
- /package/dist/{chunk-BHIKY5PW.js.map → chunk-AIC4HOOQ.js.map} +0 -0
- /package/dist/{chunk-3AKE4M4Z.js.map → chunk-LNLLZQWH.js.map} +0 -0
- /package/dist/{chunk-LWT7XFDD.js.map → chunk-RRDDAD5N.js.map} +0 -0
- /package/dist/{chunk-OAICCSDL.js.map → chunk-UPKBO5EM.js.map} +0 -0
- /package/dist/{goal-manager-5FLR7IS2.js.map → goal-manager-NI4LJ2SX.js.map} +0 -0
- /package/dist/{guardian-agent-QFREM3AR.js.map → guardian-agent-R5HX7UWJ.js.map} +0 -0
- /package/dist/{issue-store-SHQZICED.js.map → issue-store-MULGOF6B.js.map} +0 -0
package/README.md
CHANGED
|
@@ -252,11 +252,16 @@ Trie Guardian: "auth/login.ts has 5 past incidents (5x above average).
|
|
|
252
252
|
🔮 Hypothesis validated: Friday deploys cause issues"
|
|
253
253
|
```
|
|
254
254
|
|
|
255
|
-
### Memory System Hardening
|
|
255
|
+
### Memory System Hardening & Management
|
|
256
256
|
- **Atomic writes**: Temp file + rename pattern prevents data corruption on crash/interrupt
|
|
257
257
|
- **SHA256 hashing**: Cryptographic deduplication (replaced collision-prone bit-shift hash)
|
|
258
|
+
- **Intelligent deduplication**: Same issue pattern stored once with occurrence count (e.g., 2,481 `parseInt` issues → 1 pattern)
|
|
259
|
+
- **Capacity management**: 1,000 issue cap with intelligent pruning (prioritizes recent, high-severity, unresolved)
|
|
260
|
+
- **User notifications**: Clear warnings at 80%/100% capacity with actionable purge recommendations
|
|
258
261
|
- **Backup rotation**: Automated 5-backup rotation with recovery commands
|
|
259
262
|
- **Zod validation**: Schema validation for all memory data structures
|
|
263
|
+
- **Memory purge**: Four strategies (smart/resolved/old/all) for managing storage via CLI and MCP
|
|
264
|
+
- **Deduplication metrics**: See duplicates avoided and unique patterns in stats
|
|
260
265
|
- **Why Phase 1 only**: JSON performs well at Trie's scale (1K-10K issues). SQLite, embeddings, and session management add complexity without proportional value for a security scanning CLI tool.
|
|
261
266
|
|
|
262
267
|
### Guardian + Visual QA Integration
|
|
@@ -1147,6 +1152,15 @@ trie checkpoint "finished auth flow"
|
|
|
1147
1152
|
# Search issue memory
|
|
1148
1153
|
trie memory search "SQL injection"
|
|
1149
1154
|
|
|
1155
|
+
# View memory statistics and capacity
|
|
1156
|
+
trie memory stats
|
|
1157
|
+
|
|
1158
|
+
# Manage memory capacity (NEW!)
|
|
1159
|
+
trie memory purge smart # Remove resolved & old low-priority (recommended)
|
|
1160
|
+
trie memory purge resolved # Remove all resolved issues
|
|
1161
|
+
trie memory purge old 60 # Remove issues older than 60 days
|
|
1162
|
+
trie memory purge all # Clear all issues (keeps summaries)
|
|
1163
|
+
|
|
1150
1164
|
# View cross-project patterns
|
|
1151
1165
|
trie memory global patterns
|
|
1152
1166
|
|
|
@@ -2011,13 +2025,18 @@ trie://team # Team ownership info
|
|
|
2011
2025
|
|
|
2012
2026
|
## Issue Memory
|
|
2013
2027
|
|
|
2014
|
-
Trie stores all detected incidents for search, pattern discovery, and cross-project learning. Uses BM25 ranking (same algorithm as Elasticsearch) for intelligent search.
|
|
2028
|
+
Trie stores all detected incidents for search, pattern discovery, and cross-project learning. Uses BM25 ranking (same algorithm as Elasticsearch) for intelligent search with intelligent deduplication and capacity management.
|
|
2015
2029
|
|
|
2016
2030
|
### Memory System
|
|
2017
2031
|
|
|
2018
|
-
**Data Integrity
|
|
2032
|
+
**Data Integrity & Capacity Management:**
|
|
2019
2033
|
- **Atomic writes**: Temp file + rename pattern prevents corruption on crash/interrupt
|
|
2020
2034
|
- **SHA256 hashing**: Cryptographic deduplication (no collision risk)
|
|
2035
|
+
- **Intelligent deduplication**: Same pattern stored once with occurrence count (e.g., 2,481 `parseInt` issues → 1 pattern with count)
|
|
2036
|
+
- **1,000 issue cap**: Automatic intelligent pruning prioritizes recent, high-severity, unresolved issues
|
|
2037
|
+
- **Capacity warnings**: Clear notifications at 80%/100% with actionable recommendations
|
|
2038
|
+
- **Memory purge**: Four strategies (smart/resolved/old/all) for managing storage
|
|
2039
|
+
- **Deduplication metrics**: Track duplicates avoided and unique patterns
|
|
2021
2040
|
- **Backup rotation**: 5 automated backups with recovery commands
|
|
2022
2041
|
- **Zod validation**: Schema validation catches malformed data early
|
|
2023
2042
|
|
|
@@ -2026,10 +2045,46 @@ JSON files perform well at Trie's scale (1,000-10,000 issues). SQLite, vector em
|
|
|
2026
2045
|
### Local Memory (`.trie/memory/`)
|
|
2027
2046
|
|
|
2028
2047
|
Incidents from each `trie tell` command are stored locally:
|
|
2029
|
-
- `issues.json` - Searchable incident index with BM25 ranking
|
|
2048
|
+
- `issues.json` - Searchable incident index with BM25 ranking (max 1,000 active issues)
|
|
2049
|
+
- `compacted-summaries.json` - Historical summaries from auto-compaction
|
|
2030
2050
|
- `patterns.json` - Discovered patterns (3+ incidents with confidence scores)
|
|
2031
2051
|
- `YYYY-MM-DD.md` - Daily incident logs (human-readable)
|
|
2032
2052
|
|
|
2053
|
+
### Memory Capacity Management
|
|
2054
|
+
|
|
2055
|
+
When approaching the 1,000 issue cap, Trie provides clear warnings and purge options:
|
|
2056
|
+
|
|
2057
|
+
**CLI:**
|
|
2058
|
+
```bash
|
|
2059
|
+
# Check memory capacity and stats
|
|
2060
|
+
trie memory stats
|
|
2061
|
+
|
|
2062
|
+
# Smart purge (recommended): Remove resolved & old low-priority issues
|
|
2063
|
+
trie memory purge smart
|
|
2064
|
+
|
|
2065
|
+
# Remove all resolved issues
|
|
2066
|
+
trie memory purge resolved
|
|
2067
|
+
|
|
2068
|
+
# Remove issues older than N days (default 90)
|
|
2069
|
+
trie memory purge old 60
|
|
2070
|
+
|
|
2071
|
+
# Clear all issues (keeps historical summaries, requires --confirm)
|
|
2072
|
+
trie memory purge all --confirm
|
|
2073
|
+
```
|
|
2074
|
+
|
|
2075
|
+
**MCP:**
|
|
2076
|
+
```
|
|
2077
|
+
trie_memory action="stats" # View capacity info
|
|
2078
|
+
trie_memory action="purge" purgeStrategy="smart" # Smart purge
|
|
2079
|
+
trie_memory action="purge" purgeStrategy="old" daysOld=60
|
|
2080
|
+
```
|
|
2081
|
+
|
|
2082
|
+
**What happens at capacity:**
|
|
2083
|
+
- **At 80%**: Info message suggesting smart purge
|
|
2084
|
+
- **At 100%**: Warning with all purge options and automatic intelligent pruning
|
|
2085
|
+
- **Automatic compaction**: Old issues (>30 days) automatically compacted into summaries
|
|
2086
|
+
- **Intelligent pruning**: If still >1,000, keeps recent, high-severity, and unresolved issues
|
|
2087
|
+
|
|
2033
2088
|
### Search Incidents
|
|
2034
2089
|
|
|
2035
2090
|
**CLI:**
|
|
@@ -2040,7 +2095,7 @@ trie memory search "SQL injection"
|
|
|
2040
2095
|
# View recent incidents
|
|
2041
2096
|
trie memory recent
|
|
2042
2097
|
|
|
2043
|
-
# Show statistics
|
|
2098
|
+
# Show statistics and capacity
|
|
2044
2099
|
trie memory stats
|
|
2045
2100
|
```
|
|
2046
2101
|
|
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
BackupManager,
|
|
3
3
|
atomicWriteJSON,
|
|
4
4
|
safeParseAndValidate
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-6QKDEGWR.js";
|
|
6
6
|
|
|
7
7
|
// src/guardian/guardian-state.ts
|
|
8
8
|
import { mkdir, readFile } from "fs/promises";
|
|
@@ -676,4 +676,4 @@ function getGuardianState(projectPath) {
|
|
|
676
676
|
export {
|
|
677
677
|
getGuardianState
|
|
678
678
|
};
|
|
679
|
-
//# sourceMappingURL=chunk-
|
|
679
|
+
//# sourceMappingURL=chunk-35FAFFHE.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import {
|
|
2
2
|
recordToGlobalMemory,
|
|
3
3
|
updateGlobalMemoryMd
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-P6VLSYXN.js";
|
|
5
5
|
import {
|
|
6
6
|
checkFileLevelIssues,
|
|
7
7
|
getVibeCodeTrie,
|
|
@@ -20,7 +20,7 @@ import {
|
|
|
20
20
|
atomicWriteFile,
|
|
21
21
|
atomicWriteJSON,
|
|
22
22
|
storeIssues
|
|
23
|
-
} from "./chunk-
|
|
23
|
+
} from "./chunk-6QKDEGWR.js";
|
|
24
24
|
import {
|
|
25
25
|
getWorkingDirectory
|
|
26
26
|
} from "./chunk-CM7EHNQK.js";
|
|
@@ -10451,4 +10451,4 @@ export {
|
|
|
10451
10451
|
CustomSkill,
|
|
10452
10452
|
getSkillRegistry
|
|
10453
10453
|
};
|
|
10454
|
-
//# sourceMappingURL=chunk-
|
|
10454
|
+
//# sourceMappingURL=chunk-3RKY55HZ.js.map
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
getStagedChanges,
|
|
6
6
|
getUncommittedChanges,
|
|
7
7
|
getWorkingTreeDiff
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-LNLLZQWH.js";
|
|
9
9
|
import {
|
|
10
10
|
Trie
|
|
11
11
|
} from "./chunk-6NLHFIYA.js";
|
|
@@ -1828,4 +1828,4 @@ export {
|
|
|
1828
1828
|
IncidentIndex,
|
|
1829
1829
|
TrieFeedbackTool
|
|
1830
1830
|
};
|
|
1831
|
-
//# sourceMappingURL=chunk-
|
|
1831
|
+
//# sourceMappingURL=chunk-53URTRWH.js.map
|
|
@@ -688,8 +688,15 @@ async function storeIssues(issues, project, workDir) {
|
|
|
688
688
|
await mkdir3(memoryDir, { recursive: true });
|
|
689
689
|
const stored = [];
|
|
690
690
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
691
|
+
const seenHashes = /* @__PURE__ */ new Set();
|
|
692
|
+
let duplicates = 0;
|
|
691
693
|
for (const issue of issues) {
|
|
692
694
|
const hash = hashIssue(issue);
|
|
695
|
+
if (seenHashes.has(hash)) {
|
|
696
|
+
duplicates++;
|
|
697
|
+
continue;
|
|
698
|
+
}
|
|
699
|
+
seenHashes.add(hash);
|
|
693
700
|
const storedIssue = {
|
|
694
701
|
id: issue.id,
|
|
695
702
|
hash,
|
|
@@ -708,8 +715,8 @@ async function storeIssues(issues, project, workDir) {
|
|
|
708
715
|
stored.push(storedIssue);
|
|
709
716
|
}
|
|
710
717
|
await appendToDailyLog(stored, projectDir);
|
|
711
|
-
await updateIssueIndex(stored, projectDir);
|
|
712
|
-
return stored.length;
|
|
718
|
+
const dedupedCount = await updateIssueIndex(stored, projectDir);
|
|
719
|
+
return { stored: dedupedCount, duplicates: duplicates + (stored.length - dedupedCount) };
|
|
713
720
|
}
|
|
714
721
|
async function searchIssues(query, options = {}) {
|
|
715
722
|
const projectDir = options.workDir || getWorkingDirectory(void 0, true);
|
|
@@ -776,6 +783,8 @@ async function getMemoryStats(workDir) {
|
|
|
776
783
|
const projectDir = workDir || getWorkingDirectory(void 0, true);
|
|
777
784
|
const index = await loadIssueIndex(projectDir);
|
|
778
785
|
const historical = await getHistoricalInsights(projectDir);
|
|
786
|
+
const MAX_ISSUES = 1e3;
|
|
787
|
+
const uniqueHashes = new Set(index.map((i) => i.hash));
|
|
779
788
|
const stats = {
|
|
780
789
|
totalIssues: index.length,
|
|
781
790
|
issuesByAgent: {},
|
|
@@ -784,7 +793,17 @@ async function getMemoryStats(workDir) {
|
|
|
784
793
|
newestIssue: void 0,
|
|
785
794
|
resolvedCount: 0,
|
|
786
795
|
historicalIssues: historical.totalHistoricalIssues,
|
|
787
|
-
improvementTrend: historical.improvementTrend
|
|
796
|
+
improvementTrend: historical.improvementTrend,
|
|
797
|
+
capacityInfo: {
|
|
798
|
+
current: index.length,
|
|
799
|
+
max: MAX_ISSUES,
|
|
800
|
+
percentFull: Math.round(index.length / MAX_ISSUES * 100),
|
|
801
|
+
isAtCap: index.length >= MAX_ISSUES
|
|
802
|
+
},
|
|
803
|
+
deduplicationStats: {
|
|
804
|
+
duplicatesAvoided: index.length - uniqueHashes.size,
|
|
805
|
+
uniquePatterns: uniqueHashes.size
|
|
806
|
+
}
|
|
788
807
|
};
|
|
789
808
|
for (const issue of index) {
|
|
790
809
|
stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;
|
|
@@ -815,6 +834,42 @@ async function getRecentIssues(options = {}) {
|
|
|
815
834
|
cutoff.setDate(cutoff.getDate() - daysBack);
|
|
816
835
|
return index.filter((i) => new Date(i.timestamp) >= cutoff).sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime()).slice(0, limit);
|
|
817
836
|
}
|
|
837
|
+
async function purgeIssues(strategy, options = {}) {
|
|
838
|
+
const projectDir = options.workDir || getWorkingDirectory(void 0, true);
|
|
839
|
+
const index = await loadIssueIndex(projectDir);
|
|
840
|
+
const originalCount = index.length;
|
|
841
|
+
let remaining = [];
|
|
842
|
+
switch (strategy) {
|
|
843
|
+
case "smart":
|
|
844
|
+
const thirtyDaysAgo = /* @__PURE__ */ new Date();
|
|
845
|
+
thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);
|
|
846
|
+
remaining = index.filter((i) => {
|
|
847
|
+
const isRecent = new Date(i.timestamp) >= thirtyDaysAgo;
|
|
848
|
+
const isImportant = ["critical", "high"].includes(i.severity);
|
|
849
|
+
const isUnresolved = !i.resolved;
|
|
850
|
+
return isRecent || isImportant || isUnresolved;
|
|
851
|
+
});
|
|
852
|
+
break;
|
|
853
|
+
case "resolved":
|
|
854
|
+
remaining = index.filter((i) => !i.resolved);
|
|
855
|
+
break;
|
|
856
|
+
case "old":
|
|
857
|
+
const daysOld = options.daysOld || 90;
|
|
858
|
+
const cutoffDate = /* @__PURE__ */ new Date();
|
|
859
|
+
cutoffDate.setDate(cutoffDate.getDate() - daysOld);
|
|
860
|
+
remaining = index.filter((i) => new Date(i.timestamp) >= cutoffDate);
|
|
861
|
+
break;
|
|
862
|
+
case "all":
|
|
863
|
+
remaining = [];
|
|
864
|
+
break;
|
|
865
|
+
}
|
|
866
|
+
await saveIssueIndex(remaining, projectDir);
|
|
867
|
+
return {
|
|
868
|
+
removed: originalCount - remaining.length,
|
|
869
|
+
remaining: remaining.length,
|
|
870
|
+
strategy
|
|
871
|
+
};
|
|
872
|
+
}
|
|
818
873
|
async function getDailyLogs(workDir) {
|
|
819
874
|
const projectDir = workDir || getWorkingDirectory(void 0, true);
|
|
820
875
|
const memoryDir = join3(projectDir, ".trie", "memory");
|
|
@@ -886,6 +941,7 @@ async function updateIssueIndex(newIssues, projectDir) {
|
|
|
886
941
|
let existing = await loadIssueIndex(projectDir);
|
|
887
942
|
const hashSet = new Set(existing.map((i) => i.hash));
|
|
888
943
|
const toAdd = newIssues.filter((i) => !hashSet.has(i.hash));
|
|
944
|
+
const dedupedCount = toAdd.length;
|
|
889
945
|
existing = [...existing, ...toAdd];
|
|
890
946
|
if (existing.length > 500) {
|
|
891
947
|
const { summary, remaining } = await compactOldIssues(existing, {
|
|
@@ -898,9 +954,30 @@ async function updateIssueIndex(newIssues, projectDir) {
|
|
|
898
954
|
}
|
|
899
955
|
}
|
|
900
956
|
if (existing.length > 1e3) {
|
|
901
|
-
existing = existing
|
|
957
|
+
existing = intelligentPrune(existing, 1e3);
|
|
902
958
|
}
|
|
903
959
|
await saveIssueIndex(existing, projectDir);
|
|
960
|
+
return dedupedCount;
|
|
961
|
+
}
|
|
962
|
+
function intelligentPrune(issues, targetCount) {
|
|
963
|
+
const severityWeight = {
|
|
964
|
+
critical: 100,
|
|
965
|
+
high: 50,
|
|
966
|
+
moderate: 20,
|
|
967
|
+
low: 10,
|
|
968
|
+
info: 5
|
|
969
|
+
};
|
|
970
|
+
const scored = issues.map((issue) => {
|
|
971
|
+
const ageInDays = (Date.now() - new Date(issue.timestamp).getTime()) / (1e3 * 60 * 60 * 24);
|
|
972
|
+
const recencyScore = Math.max(0, 100 - ageInDays * 2);
|
|
973
|
+
const severityScore = severityWeight[issue.severity] || 10;
|
|
974
|
+
const resolvedPenalty = issue.resolved ? -50 : 0;
|
|
975
|
+
return {
|
|
976
|
+
issue,
|
|
977
|
+
score: recencyScore + severityScore + resolvedPenalty
|
|
978
|
+
};
|
|
979
|
+
});
|
|
980
|
+
return scored.sort((a, b) => b.score - a.score).slice(0, targetCount).map((s) => s.issue);
|
|
904
981
|
}
|
|
905
982
|
async function saveIssueIndex(issues, projectDir) {
|
|
906
983
|
const memoryDir = join3(projectDir, ".trie", "memory");
|
|
@@ -928,6 +1005,7 @@ export {
|
|
|
928
1005
|
markIssueResolved,
|
|
929
1006
|
getMemoryStats,
|
|
930
1007
|
getRecentIssues,
|
|
1008
|
+
purgeIssues,
|
|
931
1009
|
getDailyLogs
|
|
932
1010
|
};
|
|
933
|
-
//# sourceMappingURL=chunk-
|
|
1011
|
+
//# sourceMappingURL=chunk-6QKDEGWR.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/memory/issue-store.ts","../src/memory/bm25.ts","../src/memory/compactor.ts","../src/utils/atomic-write.ts","../src/utils/backup-manager.ts","../src/memory/validation.ts"],"sourcesContent":["/**\n * Issue Memory Store\n * \n * Stores issues for semantic search and pattern detection.\n * Uses BM25 ranking for search (same algorithm as Elasticsearch).\n * Local JSON storage with daily logs.\n * \n * Phase 1 Hardening:\n * - SHA256 hashing for proper deduplication\n * - Atomic writes to prevent corruption\n * - Rotational backups for recovery\n * - Zod validation for data integrity\n */\n\nimport { mkdir, writeFile, readFile, readdir } from 'fs/promises';\nimport { createHash } from 'crypto';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { getWorkingDirectory } from '../utils/workspace.js';\nimport type { Issue } from '../types/index.js';\nimport { BM25Index } from './bm25.js';\nimport { compactOldIssues, saveCompactedSummary, getHistoricalInsights } from './compactor.js';\nimport { atomicWriteJSON } from '../utils/atomic-write.js';\nimport { BackupManager } from '../utils/backup-manager.js';\nimport { IssueIndexSchema, safeParseAndValidate } from './validation.js';\n\nexport interface StoredIssue {\n id: string;\n hash: string;\n severity: string;\n issue: string;\n fix: string;\n file: string;\n line: number | undefined;\n agent: string;\n category: string | undefined;\n timestamp: string;\n project: string;\n resolved: boolean | undefined;\n resolvedAt: string | undefined;\n}\n\nexport interface IssueSearchResult {\n issue: StoredIssue;\n score: number;\n matchType: 'bm25' | 'keyword' | 'fts5';\n}\n\nexport interface IssueMemoryStats {\n totalIssues: number;\n issuesByAgent: Record<string, number>;\n issuesBySeverity: Record<string, number>;\n oldestIssue: string | undefined;\n newestIssue: string | undefined;\n resolvedCount: number;\n historicalIssues: number;\n improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown';\n capacityInfo: {\n current: number;\n max: number;\n percentFull: number;\n isAtCap: boolean;\n };\n deduplicationStats: {\n duplicatesAvoided: number;\n uniquePatterns: number;\n };\n}\n\n/**\n * Store issues from a scan\n * Returns number of unique issues added (after deduplication)\n */\nexport async function storeIssues(\n issues: Issue[],\n project: string,\n workDir?: string\n): Promise<{ stored: number; duplicates: number }> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(projectDir, '.trie', 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const stored: StoredIssue[] = [];\n const now = new Date().toISOString();\n const seenHashes = new Set<string>();\n let duplicates = 0;\n \n for (const issue of issues) {\n const hash = hashIssue(issue);\n \n // Skip duplicates within the same scan\n if (seenHashes.has(hash)) {\n duplicates++;\n continue;\n }\n seenHashes.add(hash);\n \n const storedIssue: StoredIssue = {\n id: issue.id,\n hash,\n severity: issue.severity,\n issue: issue.issue,\n fix: issue.fix,\n file: issue.file,\n line: issue.line,\n agent: issue.agent,\n category: issue.category,\n timestamp: now,\n project,\n resolved: false,\n resolvedAt: undefined,\n };\n stored.push(storedIssue);\n }\n\n await appendToDailyLog(stored, projectDir);\n const dedupedCount = await updateIssueIndex(stored, projectDir);\n \n return { stored: dedupedCount, duplicates: duplicates + (stored.length - dedupedCount) };\n}\n\n/**\n * Search issues using BM25 ranking (same algorithm as Elasticsearch)\n */\nexport async function searchIssues(\n query: string,\n options: {\n workDir?: string;\n limit?: number;\n project?: string;\n severity?: string[];\n agent?: string;\n includeResolved?: boolean;\n } = {}\n): Promise<IssueSearchResult[]> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const limit = options.limit || 10;\n const allIssues = await loadIssueIndex(projectDir);\n \n if (allIssues.length === 0) {\n return [];\n }\n\n // Filter issues first\n const filteredIssues = allIssues.filter(issue => {\n if (options.project && issue.project !== options.project) return false;\n if (options.severity && !options.severity.includes(issue.severity)) return false;\n if (options.agent && issue.agent !== options.agent) return false;\n if (!options.includeResolved && issue.resolved) return false;\n return true;\n });\n\n if (filteredIssues.length === 0) {\n return [];\n }\n\n // Build BM25 index\n const bm25 = new BM25Index();\n const issueMap = new Map<string, StoredIssue>();\n \n for (const issue of filteredIssues) {\n const searchText = `${issue.issue} ${issue.fix} ${issue.file} ${issue.agent} ${issue.category || ''} ${issue.severity}`;\n bm25.addDocument({\n id: issue.id,\n text: searchText,\n });\n issueMap.set(issue.id, issue);\n }\n\n // Search with BM25\n const bm25Results = bm25.search(query, limit);\n \n return bm25Results.map(result => ({\n issue: issueMap.get(result.id)!,\n score: result.score,\n matchType: 'bm25' as const,\n }));\n}\n\n/**\n * Find similar issues using BM25 similarity\n */\nexport async function findSimilarIssues(\n issue: Issue,\n options: {\n workDir?: string;\n limit?: number;\n excludeSameFile?: boolean;\n } = {}\n): Promise<IssueSearchResult[]> {\n // Use the issue description and fix as the query for similarity\n const query = `${issue.issue} ${issue.fix} ${issue.agent}`;\n const searchOptions: Parameters<typeof searchIssues>[1] = {\n limit: (options.limit || 5) + 5, // Get extra to account for filtering\n includeResolved: true,\n };\n if (options.workDir !== undefined) {\n searchOptions.workDir = options.workDir;\n }\n const results = await searchIssues(query, searchOptions);\n\n let filtered = results.filter(r => r.issue.id !== issue.id);\n \n if (options.excludeSameFile) {\n filtered = filtered.filter(r => r.issue.file !== issue.file);\n }\n \n return filtered.slice(0, options.limit || 5);\n}\n\n/**\n * Mark an issue as resolved\n */\nexport async function markIssueResolved(\n issueId: string,\n workDir?: string\n): Promise<boolean> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n \n const issue = index.find(i => i.id === issueId);\n if (!issue) return false;\n \n issue.resolved = true;\n issue.resolvedAt = new Date().toISOString();\n \n await saveIssueIndex(index, projectDir);\n return true;\n}\n\n/**\n * Get memory statistics including historical insights\n */\nexport async function getMemoryStats(workDir?: string): Promise<IssueMemoryStats> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n const historical = await getHistoricalInsights(projectDir);\n \n const MAX_ISSUES = 1000;\n const uniqueHashes = new Set(index.map(i => i.hash));\n \n const stats: IssueMemoryStats = {\n totalIssues: index.length,\n issuesByAgent: {},\n issuesBySeverity: {},\n oldestIssue: undefined,\n newestIssue: undefined,\n resolvedCount: 0,\n historicalIssues: historical.totalHistoricalIssues,\n improvementTrend: historical.improvementTrend,\n capacityInfo: {\n current: index.length,\n max: MAX_ISSUES,\n percentFull: Math.round((index.length / MAX_ISSUES) * 100),\n isAtCap: index.length >= MAX_ISSUES,\n },\n deduplicationStats: {\n duplicatesAvoided: index.length - uniqueHashes.size,\n uniquePatterns: uniqueHashes.size,\n },\n };\n\n for (const issue of index) {\n stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;\n stats.issuesBySeverity[issue.severity] = (stats.issuesBySeverity[issue.severity] || 0) + 1;\n if (issue.resolved) stats.resolvedCount++;\n }\n\n if (index.length > 0) {\n const sorted = [...index].sort((a, b) => \n new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()\n );\n const oldest = sorted[0]?.timestamp;\n const newest = sorted[sorted.length - 1]?.timestamp;\n if (oldest !== undefined) {\n stats.oldestIssue = oldest;\n }\n if (newest !== undefined) {\n stats.newestIssue = newest;\n }\n }\n\n return stats;\n}\n\n/**\n * Get recent issues\n */\nexport async function getRecentIssues(\n options: {\n workDir?: string;\n limit?: number;\n daysBack?: number;\n } = {}\n): Promise<StoredIssue[]> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n const limit = options.limit || 20;\n const daysBack = options.daysBack || 7;\n \n const cutoff = new Date();\n cutoff.setDate(cutoff.getDate() - daysBack);\n \n return index\n .filter(i => new Date(i.timestamp) >= cutoff)\n .sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())\n .slice(0, limit);\n}\n\n/**\n * Purge issues from memory\n * Offers different strategies for managing memory capacity\n */\nexport async function purgeIssues(\n strategy: 'smart' | 'resolved' | 'old' | 'all',\n options: {\n workDir?: string;\n daysOld?: number;\n } = {}\n): Promise<{ removed: number; remaining: number; strategy: string }> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n const originalCount = index.length;\n \n let remaining: StoredIssue[] = [];\n \n switch (strategy) {\n case 'smart':\n // Keep: critical/high severity, recent (< 30 days), unresolved\n const thirtyDaysAgo = new Date();\n thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);\n \n remaining = index.filter(i => {\n const isRecent = new Date(i.timestamp) >= thirtyDaysAgo;\n const isImportant = ['critical', 'high'].includes(i.severity);\n const isUnresolved = !i.resolved;\n \n return isRecent || isImportant || isUnresolved;\n });\n break;\n \n case 'resolved':\n // Remove all resolved issues\n remaining = index.filter(i => !i.resolved);\n break;\n \n case 'old':\n // Remove issues older than specified days (default 90)\n const daysOld = options.daysOld || 90;\n const cutoffDate = new Date();\n cutoffDate.setDate(cutoffDate.getDate() - daysOld);\n \n remaining = index.filter(i => new Date(i.timestamp) >= cutoffDate);\n break;\n \n case 'all':\n // Clear all issues (keeps compacted summaries)\n remaining = [];\n break;\n }\n \n await saveIssueIndex(remaining, projectDir);\n \n return {\n removed: originalCount - remaining.length,\n remaining: remaining.length,\n strategy,\n };\n}\n\n/**\n * Get daily log files\n */\nexport async function getDailyLogs(workDir?: string): Promise<string[]> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(projectDir, '.trie', 'memory');\n \n try {\n if (!existsSync(memoryDir)) return [];\n const files = await readdir(memoryDir);\n return files\n .filter(f => /^\\d{4}-\\d{2}-\\d{2}\\.md$/.test(f))\n .sort()\n .reverse();\n } catch {\n return [];\n }\n}\n\n// Private helpers\n\nasync function appendToDailyLog(issues: StoredIssue[], projectDir: string): Promise<void> {\n const memoryDir = join(projectDir, '.trie', 'memory');\n const today = new Date().toISOString().split('T')[0];\n const logPath = join(memoryDir, `${today}.md`);\n\n let content = '';\n \n try {\n if (existsSync(logPath)) {\n content = await readFile(logPath, 'utf-8');\n } else {\n content = `# Issue Log: ${today}\\n\\n`;\n }\n } catch {\n content = `# Issue Log: ${today}\\n\\n`;\n }\n\n const time = new Date().toTimeString().split(' ')[0];\n \n const newEntries = issues.map(i => \n `## [${time}] ${i.severity.toUpperCase()}: ${i.issue.slice(0, 80)}${i.issue.length > 80 ? '...' : ''}\\n` +\n `- **File:** \\`${i.file}\\`${i.line ? `:${i.line}` : ''}\\n` +\n `- **Agent:** ${i.agent}\\n` +\n `- **Fix:** ${i.fix.slice(0, 200)}${i.fix.length > 200 ? '...' : ''}\\n`\n ).join('\\n');\n\n content += newEntries + '\\n';\n \n await writeFile(logPath, content);\n}\n\n/**\n * Load issue index with validation and auto-recovery\n * \n * If the file is corrupted:\n * 1. Attempts to recover from the most recent valid backup\n * 2. Returns empty array if no valid backup exists\n */\nasync function loadIssueIndex(projectDir: string): Promise<StoredIssue[]> {\n const indexPath = join(projectDir, '.trie', 'memory', 'issues.json');\n \n try {\n if (existsSync(indexPath)) {\n const content = await readFile(indexPath, 'utf-8');\n const result = safeParseAndValidate(content, IssueIndexSchema);\n \n if (result.success) {\n return result.data as StoredIssue[];\n }\n \n // Validation failed - attempt recovery from backup\n console.error(` Issue index corrupted: ${result.error}`);\n const backupManager = new BackupManager(indexPath);\n \n if (await backupManager.recoverFromBackup()) {\n console.error(' ✅ Recovered from backup');\n const recovered = await readFile(indexPath, 'utf-8');\n const recoveredResult = safeParseAndValidate(recovered, IssueIndexSchema);\n if (recoveredResult.success) {\n return recoveredResult.data as StoredIssue[];\n }\n }\n \n console.error(' No valid backup found, starting fresh');\n }\n } catch {\n // Index doesn't exist or recovery failed\n }\n \n return [];\n}\n\nasync function updateIssueIndex(newIssues: StoredIssue[], projectDir: string): Promise<number> {\n const memoryDir = join(projectDir, '.trie', 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n let existing = await loadIssueIndex(projectDir);\n \n // Intelligent deduplication: only add truly unique issues\n // Issues are unique if they have different hash (content + file + severity + agent)\n const hashSet = new Set(existing.map(i => i.hash));\n const toAdd = newIssues.filter(i => !hashSet.has(i.hash));\n const dedupedCount = toAdd.length;\n \n existing = [...existing, ...toAdd];\n \n // Intelligent compaction: summarize old issues instead of deleting\n if (existing.length > 500) {\n const { summary, remaining } = await compactOldIssues(existing, {\n keepDays: 30,\n minIssuesToCompact: 100,\n });\n \n if (summary) {\n await saveCompactedSummary(summary, projectDir);\n existing = remaining;\n }\n }\n \n // Hard cap: prune to 1000 if still too large\n // Prioritize: 1) Recent issues, 2) High severity, 3) Unresolved\n if (existing.length > 1000) {\n existing = intelligentPrune(existing, 1000);\n }\n \n await saveIssueIndex(existing, projectDir);\n return dedupedCount;\n}\n\n/**\n * Intelligently prune issues to target count\n * Prioritizes: recent, high severity, unresolved\n */\nfunction intelligentPrune(issues: StoredIssue[], targetCount: number): StoredIssue[] {\n const severityWeight: Record<string, number> = {\n critical: 100,\n high: 50,\n moderate: 20,\n low: 10,\n info: 5,\n };\n \n const scored = issues.map(issue => {\n const ageInDays = (Date.now() - new Date(issue.timestamp).getTime()) / (1000 * 60 * 60 * 24);\n const recencyScore = Math.max(0, 100 - ageInDays * 2); // Newer = higher score\n const severityScore = severityWeight[issue.severity] || 10;\n const resolvedPenalty = issue.resolved ? -50 : 0;\n \n return {\n issue,\n score: recencyScore + severityScore + resolvedPenalty,\n };\n });\n \n return scored\n .sort((a, b) => b.score - a.score)\n .slice(0, targetCount)\n .map(s => s.issue);\n}\n\n/**\n * Save issue index with backup and atomic write\n * \n * 1. Creates a backup of the existing file\n * 2. Writes the new data atomically (temp file + rename)\n * 3. Maintains up to 5 rotational backups\n */\nasync function saveIssueIndex(issues: StoredIssue[], projectDir: string): Promise<void> {\n const memoryDir = join(projectDir, '.trie', 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const indexPath = join(memoryDir, 'issues.json');\n \n // Create backup before writing\n const backupManager = new BackupManager(indexPath);\n await backupManager.createBackup();\n \n // Atomic write\n await atomicWriteJSON(indexPath, issues);\n}\n\n/**\n * Hash an issue using SHA256 for proper deduplication\n * \n * Uses cryptographic hashing to eliminate collision risk.\n * The hash is truncated to 16 characters for storage efficiency\n * while still providing effectively zero collision probability.\n */\nfunction hashIssue(issue: Issue): string {\n const content = `${issue.issue}|${issue.file}|${issue.severity}|${issue.agent}`;\n return createHash('sha256').update(content).digest('hex').slice(0, 16);\n}\n\n","/**\n * BM25 Search Implementation\n * \n * BM25 (Best Match 25) is a ranking function used by search engines.\n * It's more sophisticated than TF-IDF and handles term frequency saturation.\n * \n * This is the same algorithm used by Elasticsearch.\n */\n\nexport interface BM25Document {\n id: string;\n text: string;\n metadata?: Record<string, unknown>;\n}\n\nexport interface BM25Result {\n id: string;\n score: number;\n metadata?: Record<string, unknown>;\n}\n\nexport class BM25Index {\n private documents: Map<string, BM25Document> = new Map();\n private termFrequencies: Map<string, Map<string, number>> = new Map();\n private documentFrequencies: Map<string, number> = new Map();\n private documentLengths: Map<string, number> = new Map();\n private avgDocLength: number = 0;\n private k1: number = 1.5;\n private b: number = 0.75;\n\n /**\n * Add a document to the index\n */\n addDocument(doc: BM25Document): void {\n const tokens = this.tokenize(doc.text);\n this.documents.set(doc.id, doc);\n this.documentLengths.set(doc.id, tokens.length);\n\n const termFreq = new Map<string, number>();\n const seenTerms = new Set<string>();\n\n for (const token of tokens) {\n termFreq.set(token, (termFreq.get(token) || 0) + 1);\n \n if (!seenTerms.has(token)) {\n seenTerms.add(token);\n this.documentFrequencies.set(token, (this.documentFrequencies.get(token) || 0) + 1);\n }\n }\n\n this.termFrequencies.set(doc.id, termFreq);\n this.updateAvgDocLength();\n }\n\n /**\n * Add multiple documents\n */\n addDocuments(docs: BM25Document[]): void {\n for (const doc of docs) {\n this.addDocument(doc);\n }\n }\n\n /**\n * Search the index\n */\n search(query: string, limit: number = 10): BM25Result[] {\n const queryTokens = this.tokenize(query);\n const scores: Map<string, number> = new Map();\n const N = this.documents.size;\n\n for (const [docId] of this.documents) {\n let score = 0;\n const docLength = this.documentLengths.get(docId) || 0;\n const termFreqs = this.termFrequencies.get(docId);\n\n if (!termFreqs) continue;\n\n for (const term of queryTokens) {\n const tf = termFreqs.get(term) || 0;\n if (tf === 0) continue;\n\n const df = this.documentFrequencies.get(term) || 0;\n const idf = Math.log((N - df + 0.5) / (df + 0.5) + 1);\n\n const numerator = tf * (this.k1 + 1);\n const denominator = tf + this.k1 * (1 - this.b + this.b * (docLength / this.avgDocLength));\n \n score += idf * (numerator / denominator);\n }\n\n if (score > 0) {\n scores.set(docId, score);\n }\n }\n\n return Array.from(scores.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, limit)\n .map(([id, score]) => {\n const metadata = this.documents.get(id)?.metadata;\n const result: BM25Result = { id, score };\n if (metadata !== undefined) {\n result.metadata = metadata;\n }\n return result;\n });\n }\n\n /**\n * Get document count\n */\n get size(): number {\n return this.documents.size;\n }\n\n /**\n * Clear the index\n */\n clear(): void {\n this.documents.clear();\n this.termFrequencies.clear();\n this.documentFrequencies.clear();\n this.documentLengths.clear();\n this.avgDocLength = 0;\n }\n\n /**\n * Serialize the index to JSON\n */\n serialize(): string {\n return JSON.stringify({\n documents: Array.from(this.documents.entries()),\n termFrequencies: Array.from(this.termFrequencies.entries()).map(([k, v]) => [k, Array.from(v.entries())]),\n documentFrequencies: Array.from(this.documentFrequencies.entries()),\n documentLengths: Array.from(this.documentLengths.entries()),\n avgDocLength: this.avgDocLength,\n });\n }\n\n /**\n * Load from serialized JSON\n */\n static deserialize(json: string): BM25Index {\n const data = JSON.parse(json);\n const index = new BM25Index();\n \n index.documents = new Map(data.documents);\n index.termFrequencies = new Map(data.termFrequencies.map(([k, v]: [string, [string, number][]]) => [k, new Map(v)]));\n index.documentFrequencies = new Map(data.documentFrequencies);\n index.documentLengths = new Map(data.documentLengths);\n index.avgDocLength = data.avgDocLength;\n \n return index;\n }\n\n private tokenize(text: string): string[] {\n return text\n .toLowerCase()\n .replace(/[^\\w\\s]/g, ' ')\n .split(/\\s+/)\n .filter(token => token.length > 2 && !this.isStopWord(token));\n }\n\n private isStopWord(word: string): boolean {\n const stopWords = new Set([\n 'the', 'be', 'to', 'of', 'and', 'a', 'in', 'that', 'have', 'i',\n 'it', 'for', 'not', 'on', 'with', 'he', 'as', 'you', 'do', 'at',\n 'this', 'but', 'his', 'by', 'from', 'they', 'we', 'say', 'her', 'she',\n 'or', 'an', 'will', 'my', 'one', 'all', 'would', 'there', 'their', 'what',\n 'so', 'up', 'out', 'if', 'about', 'who', 'get', 'which', 'go', 'me',\n 'when', 'make', 'can', 'like', 'time', 'no', 'just', 'him', 'know', 'take',\n 'into', 'year', 'your', 'some', 'could', 'them', 'see', 'other', 'than', 'then',\n 'now', 'look', 'only', 'come', 'its', 'over', 'also', 'back', 'after', 'use',\n 'two', 'how', 'our', 'first', 'way', 'even', 'new', 'want', 'because', 'any',\n 'these', 'give', 'day', 'most', 'us', 'should', 'been', 'has', 'was', 'are',\n ]);\n return stopWords.has(word);\n }\n\n private updateAvgDocLength(): void {\n if (this.documentLengths.size === 0) {\n this.avgDocLength = 0;\n return;\n }\n const total = Array.from(this.documentLengths.values()).reduce((a, b) => a + b, 0);\n this.avgDocLength = total / this.documentLengths.size;\n }\n}\n","/**\n * Memory Compactor\n * \n * Intelligently compacts old issues into summaries instead of deleting them.\n * Preserves patterns and insights while reducing storage.\n * \n * Phase 1 Hardening:\n * - Atomic writes to prevent corruption\n * - Backup rotation for recovery\n * - Zod validation for data integrity\n */\n\nimport { mkdir, readFile } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport type { StoredIssue } from './issue-store.js';\nimport { atomicWriteJSON } from '../utils/atomic-write.js';\nimport { BackupManager } from '../utils/backup-manager.js';\nimport { CompactedSummariesIndexSchema, safeParseAndValidate } from './validation.js';\n\nexport interface CompactedSummary {\n period: string;\n startDate: string;\n endDate: string;\n totalIssues: number;\n resolvedCount: number;\n bySeverity: Record<string, number>;\n byAgent: Record<string, number>;\n topPatterns: PatternSummary[];\n hotFiles: { file: string; count: number }[];\n compactedAt: string;\n}\n\nexport interface PatternSummary {\n pattern: string;\n count: number;\n severity: string;\n agent: string;\n exampleFix: string;\n}\n\n/**\n * Compact old issues into summaries\n * Returns the compacted summary and the remaining (recent) issues\n */\nexport async function compactOldIssues(\n issues: StoredIssue[],\n options: {\n keepDays?: number;\n minIssuesToCompact?: number;\n } = {}\n): Promise<{ summary: CompactedSummary | null; remaining: StoredIssue[] }> {\n const keepDays = options.keepDays ?? 30;\n const minIssues = options.minIssuesToCompact ?? 100;\n \n const cutoffDate = new Date();\n cutoffDate.setDate(cutoffDate.getDate() - keepDays);\n \n const oldIssues = issues.filter(i => new Date(i.timestamp) < cutoffDate);\n const recentIssues = issues.filter(i => new Date(i.timestamp) >= cutoffDate);\n \n // Only compact if we have enough old issues\n if (oldIssues.length < minIssues) {\n return { summary: null, remaining: issues };\n }\n \n // Build summary\n const summary = buildSummary(oldIssues);\n \n return { summary, remaining: recentIssues };\n}\n\n/**\n * Build a summary from a set of issues\n */\nfunction buildSummary(issues: StoredIssue[]): CompactedSummary {\n const sorted = issues.sort((a, b) => \n new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()\n );\n \n const bySeverity: Record<string, number> = {};\n const byAgent: Record<string, number> = {};\n const patternMap: Map<string, { count: number; issue: StoredIssue }> = new Map();\n const fileCount: Map<string, number> = new Map();\n \n for (const issue of issues) {\n // Count by severity\n bySeverity[issue.severity] = (bySeverity[issue.severity] || 0) + 1;\n \n // Count by agent\n byAgent[issue.agent] = (byAgent[issue.agent] || 0) + 1;\n \n // Track patterns (normalized issue text)\n const patternKey = normalizePattern(issue.issue);\n const existing = patternMap.get(patternKey);\n if (existing) {\n existing.count++;\n } else {\n patternMap.set(patternKey, { count: 1, issue });\n }\n \n // Count files\n const fileName = issue.file.split('/').pop() || issue.file;\n fileCount.set(fileName, (fileCount.get(fileName) || 0) + 1);\n }\n \n // Get top patterns\n const topPatterns = Array.from(patternMap.entries())\n .sort((a, b) => b[1].count - a[1].count)\n .slice(0, 10)\n .map(([pattern, data]) => ({\n pattern: pattern.slice(0, 100),\n count: data.count,\n severity: data.issue.severity,\n agent: data.issue.agent,\n exampleFix: data.issue.fix.slice(0, 200),\n }));\n \n // Get hot files\n const hotFiles = Array.from(fileCount.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 10)\n .map(([file, count]) => ({ file, count }));\n \n return {\n period: `${sorted[0]?.timestamp.split('T')[0]} to ${sorted[sorted.length - 1]?.timestamp.split('T')[0]}`,\n startDate: sorted[0]?.timestamp || '',\n endDate: sorted[sorted.length - 1]?.timestamp || '',\n totalIssues: issues.length,\n resolvedCount: issues.filter(i => i.resolved).length,\n bySeverity,\n byAgent,\n topPatterns,\n hotFiles,\n compactedAt: new Date().toISOString(),\n };\n}\n\n/**\n * Normalize issue text for pattern matching\n */\nfunction normalizePattern(text: string): string {\n return text\n .toLowerCase()\n .replace(/`[^`]+`/g, 'CODE')\n .replace(/\\b\\d+\\b/g, 'N')\n .replace(/[\"']/g, '')\n .replace(/\\s+/g, ' ')\n .trim()\n .slice(0, 150);\n}\n\n/**\n * Save compacted summary to disk with atomic write and backup\n */\nexport async function saveCompactedSummary(\n summary: CompactedSummary,\n projectDir: string\n): Promise<void> {\n const memoryDir = join(projectDir, '.trie', 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const summaryPath = join(memoryDir, 'compacted-summaries.json');\n \n let summaries: CompactedSummary[] = [];\n try {\n if (existsSync(summaryPath)) {\n const content = await readFile(summaryPath, 'utf-8');\n const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);\n if (result.success) {\n summaries = result.data as CompactedSummary[];\n }\n }\n } catch {\n summaries = [];\n }\n \n summaries.push(summary);\n \n // Keep only last 12 summaries (1 year of monthly summaries)\n if (summaries.length > 12) {\n summaries = summaries.slice(-12);\n }\n \n // Create backup before writing\n const backupManager = new BackupManager(summaryPath);\n await backupManager.createBackup();\n \n // Atomic write\n await atomicWriteJSON(summaryPath, summaries);\n}\n\n/**\n * Load compacted summaries with validation and auto-recovery\n */\nexport async function loadCompactedSummaries(projectDir: string): Promise<CompactedSummary[]> {\n const summaryPath = join(projectDir, '.trie', 'memory', 'compacted-summaries.json');\n \n try {\n if (existsSync(summaryPath)) {\n const content = await readFile(summaryPath, 'utf-8');\n const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);\n \n if (result.success) {\n return result.data as CompactedSummary[];\n }\n \n // Validation failed - attempt recovery\n const backupManager = new BackupManager(summaryPath);\n if (await backupManager.recoverFromBackup()) {\n const recovered = await readFile(summaryPath, 'utf-8');\n const recoveredResult = safeParseAndValidate(recovered, CompactedSummariesIndexSchema);\n if (recoveredResult.success) {\n return recoveredResult.data as CompactedSummary[];\n }\n }\n }\n } catch {\n // File doesn't exist or recovery failed\n }\n \n return [];\n}\n\n/**\n * Generate a markdown summary of compacted history\n */\nexport function formatCompactedSummary(summary: CompactedSummary): string {\n const lines: string[] = [\n `## Compacted Summary: ${summary.period}`,\n '',\n `**Total Issues:** ${summary.totalIssues} (${summary.resolvedCount} resolved)`,\n '',\n '### By Severity',\n ...Object.entries(summary.bySeverity).map(([s, c]) => `- ${s}: ${c}`),\n '',\n '### Top Patterns',\n ...summary.topPatterns.slice(0, 5).map(p => \n `- **${p.pattern.slice(0, 50)}...** (${p.count}x, ${p.severity})`\n ),\n '',\n '### Hot Files',\n ...summary.hotFiles.slice(0, 5).map(f => `- ${f.file}: ${f.count} issues`),\n ];\n \n return lines.join('\\n');\n}\n\n/**\n * Get insights from compacted history\n */\nexport async function getHistoricalInsights(projectDir: string): Promise<{\n totalHistoricalIssues: number;\n recurringPatterns: PatternSummary[];\n improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown';\n}> {\n const summaries = await loadCompactedSummaries(projectDir);\n \n if (summaries.length === 0) {\n return {\n totalHistoricalIssues: 0,\n recurringPatterns: [],\n improvementTrend: 'unknown',\n };\n }\n \n const totalHistoricalIssues = summaries.reduce((sum, s) => sum + s.totalIssues, 0);\n \n // Find patterns that appear across multiple summaries\n const patternCounts: Map<string, PatternSummary & { appearances: number }> = new Map();\n \n for (const summary of summaries) {\n for (const pattern of summary.topPatterns) {\n const key = pattern.pattern;\n const existing = patternCounts.get(key);\n if (existing) {\n existing.count += pattern.count;\n existing.appearances++;\n } else {\n patternCounts.set(key, { ...pattern, appearances: 1 });\n }\n }\n }\n \n const recurringPatterns = Array.from(patternCounts.values())\n .filter(p => p.appearances >= 2)\n .sort((a, b) => b.count - a.count)\n .slice(0, 5);\n \n // Calculate improvement trend\n let improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown' = 'unknown';\n \n if (summaries.length >= 2) {\n const recent = summaries.slice(-2);\n const olderCount = recent[0]?.totalIssues || 0;\n const newerCount = recent[1]?.totalIssues || 0;\n \n if (newerCount < olderCount * 0.8) {\n improvementTrend = 'improving';\n } else if (newerCount > olderCount * 1.2) {\n improvementTrend = 'declining';\n } else {\n improvementTrend = 'stable';\n }\n }\n \n return {\n totalHistoricalIssues,\n recurringPatterns,\n improvementTrend,\n };\n}\n","/**\n * Atomic File Write Utility\n * \n * Provides safe file writes using the temp file + rename pattern.\n * Prevents data corruption from interrupted writes or crashes.\n * \n * Pattern:\n * 1. Write to a temporary file with random suffix\n * 2. Rename temp file to target path (atomic on POSIX systems)\n * 3. Clean up temp file on failure\n */\n\nimport { writeFile, rename, unlink, mkdir } from 'fs/promises';\nimport { randomBytes } from 'crypto';\nimport { dirname } from 'path';\n\nexport interface AtomicWriteOptions {\n /**\n * Create parent directories if they don't exist\n * @default true\n */\n createDir?: boolean;\n \n /**\n * File encoding for string data\n * @default 'utf-8'\n */\n encoding?: BufferEncoding;\n}\n\n/**\n * Write data to a file atomically\n * \n * Uses a temp file + rename pattern to ensure the target file\n * is never in a partially written state. On POSIX systems,\n * rename() is atomic, so readers will always see either the\n * old complete file or the new complete file.\n * \n * @param filePath - Target file path\n * @param data - Data to write (string or Buffer)\n * @param options - Write options\n * @throws Error if write or rename fails\n * \n * @example\n * ```typescript\n * await atomicWriteFile('/path/to/data.json', JSON.stringify(data, null, 2));\n * ```\n */\nexport async function atomicWriteFile(\n filePath: string,\n data: string | Buffer,\n options: AtomicWriteOptions = {}\n): Promise<void> {\n const { createDir = true, encoding = 'utf-8' } = options;\n \n // Generate a unique temp file path\n const tempSuffix = randomBytes(6).toString('hex');\n const tempPath = `${filePath}.${tempSuffix}.tmp`;\n \n try {\n // Ensure parent directory exists\n if (createDir) {\n await mkdir(dirname(filePath), { recursive: true });\n }\n \n // Write to temp file\n if (typeof data === 'string') {\n await writeFile(tempPath, data, { encoding });\n } else {\n await writeFile(tempPath, data);\n }\n \n // Atomic rename to target path\n await rename(tempPath, filePath);\n } catch (error) {\n // Clean up temp file on failure\n try {\n await unlink(tempPath);\n } catch {\n // Ignore cleanup errors - temp file might not exist\n }\n throw error;\n }\n}\n\n/**\n * Write JSON data to a file atomically\n * \n * Convenience wrapper that handles JSON serialization\n * with optional pretty printing.\n * \n * @param filePath - Target file path\n * @param data - Data to serialize as JSON\n * @param options - Write options and JSON formatting\n * @throws Error if serialization, write, or rename fails\n * \n * @example\n * ```typescript\n * await atomicWriteJSON('/path/to/config.json', { key: 'value' });\n * ```\n */\nexport async function atomicWriteJSON(\n filePath: string,\n data: unknown,\n options: AtomicWriteOptions & {\n /**\n * Spaces for JSON pretty printing\n * @default 2\n */\n spaces?: number;\n } = {}\n): Promise<void> {\n const { spaces = 2, ...writeOptions } = options;\n const json = JSON.stringify(data, null, spaces);\n await atomicWriteFile(filePath, json, writeOptions);\n}\n","/**\n * Backup Manager\n * \n * Maintains rotational backups of critical data files.\n * Provides automatic recovery from corrupted files.\n * \n * Features:\n * - N rotational backups (default: 5)\n * - Automatic pruning of old backups\n * - Validation before recovery\n * - Timestamp-based backup naming\n */\n\nimport { copyFile, readdir, unlink, readFile, stat } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { dirname, basename, join } from 'path';\n\nexport interface BackupOptions {\n /**\n * Maximum number of backups to keep\n * @default 5\n */\n maxBackups?: number;\n \n /**\n * Custom validator function for backup validation\n * Returns true if the backup is valid\n */\n validator?: (content: string) => boolean;\n}\n\nexport interface BackupInfo {\n path: string;\n timestamp: number;\n size: number;\n}\n\n/**\n * Manages rotational backups for a file\n * \n * @example\n * ```typescript\n * const manager = new BackupManager('/path/to/issues.json');\n * \n * // Create backup before modifying\n * await manager.createBackup();\n * \n * // If corruption detected, recover\n * if (await manager.recoverFromBackup()) {\n * console.log('Recovered from backup');\n * }\n * ```\n */\nexport class BackupManager {\n private readonly filePath: string;\n private readonly maxBackups: number;\n private readonly validator: ((content: string) => boolean) | undefined;\n private readonly backupDir: string;\n private readonly baseFileName: string;\n\n constructor(filePath: string, options: BackupOptions = {}) {\n this.filePath = filePath;\n this.maxBackups = options.maxBackups ?? 5;\n this.validator = options.validator;\n this.backupDir = dirname(filePath);\n this.baseFileName = basename(filePath);\n }\n\n /**\n * Create a backup of the current file\n * \n * @returns The backup file path, or null if source doesn't exist\n */\n async createBackup(): Promise<string | null> {\n if (!existsSync(this.filePath)) {\n return null;\n }\n\n const timestamp = Date.now();\n const backupPath = this.getBackupPath(timestamp);\n\n await copyFile(this.filePath, backupPath);\n await this.pruneOldBackups();\n\n return backupPath;\n }\n\n /**\n * List all backups sorted by timestamp (newest first)\n */\n async listBackups(): Promise<BackupInfo[]> {\n if (!existsSync(this.backupDir)) {\n return [];\n }\n\n const files = await readdir(this.backupDir);\n const backupPattern = new RegExp(\n `^${this.escapeRegex(this.baseFileName)}\\\\.backup\\\\.(\\\\d+)$`\n );\n\n const backups: BackupInfo[] = [];\n\n for (const file of files) {\n const match = file.match(backupPattern);\n if (match) {\n const timestamp = parseInt(match[1]!, 10);\n const backupPath = join(this.backupDir, file);\n \n try {\n const stats = await stat(backupPath);\n backups.push({\n path: backupPath,\n timestamp,\n size: stats.size,\n });\n } catch {\n // Skip files we can't stat\n }\n }\n }\n\n // Sort by timestamp, newest first\n return backups.sort((a, b) => b.timestamp - a.timestamp);\n }\n\n /**\n * Find the first valid backup\n * \n * Iterates through backups from newest to oldest,\n * returning the first one that passes validation.\n * \n * @returns Path to valid backup, or null if none found\n */\n async findValidBackup(): Promise<string | null> {\n const backups = await this.listBackups();\n\n for (const backup of backups) {\n if (await this.validateBackup(backup.path)) {\n return backup.path;\n }\n }\n\n return null;\n }\n\n /**\n * Validate a backup file\n * \n * If a custom validator was provided, uses that.\n * Otherwise, attempts JSON parse for .json files.\n * \n * @returns true if backup is valid\n */\n async validateBackup(backupPath: string): Promise<boolean> {\n try {\n const content = await readFile(backupPath, 'utf-8');\n\n // Use custom validator if provided\n if (this.validator) {\n return this.validator(content);\n }\n\n // Default validation: check if it's valid JSON for .json files\n if (this.filePath.endsWith('.json')) {\n JSON.parse(content);\n return true;\n }\n\n // For non-JSON files, just check it's readable\n return content.length > 0;\n } catch {\n return false;\n }\n }\n\n /**\n * Recover from the most recent valid backup\n * \n * @returns true if recovery was successful\n */\n async recoverFromBackup(): Promise<boolean> {\n const validBackup = await this.findValidBackup();\n \n if (!validBackup) {\n return false;\n }\n\n await copyFile(validBackup, this.filePath);\n return true;\n }\n\n /**\n * Remove old backups beyond the max limit\n */\n private async pruneOldBackups(): Promise<void> {\n const backups = await this.listBackups();\n\n // Remove backups beyond the limit\n const toRemove = backups.slice(this.maxBackups);\n \n for (const backup of toRemove) {\n try {\n await unlink(backup.path);\n } catch {\n // Ignore errors - backup might already be removed\n }\n }\n }\n\n /**\n * Get the backup path for a given timestamp\n */\n private getBackupPath(timestamp: number): string {\n return join(this.backupDir, `${this.baseFileName}.backup.${timestamp}`);\n }\n\n /**\n * Escape special regex characters in a string\n */\n private escapeRegex(str: string): string {\n return str.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&');\n }\n\n /**\n * Get the number of existing backups\n */\n async getBackupCount(): Promise<number> {\n const backups = await this.listBackups();\n return backups.length;\n }\n\n /**\n * Get the most recent backup timestamp\n */\n async getLatestBackupTime(): Promise<number | null> {\n const backups = await this.listBackups();\n return backups[0]?.timestamp ?? null;\n }\n\n /**\n * Delete all backups\n */\n async clearBackups(): Promise<number> {\n const backups = await this.listBackups();\n let deleted = 0;\n\n for (const backup of backups) {\n try {\n await unlink(backup.path);\n deleted++;\n } catch {\n // Ignore errors\n }\n }\n\n return deleted;\n }\n}\n\n/**\n * Default JSON validator\n * \n * Validates that content is valid JSON and optionally\n * checks for required fields.\n */\nexport function createJSONValidator(requiredFields?: string[]): (content: string) => boolean {\n return (content: string): boolean => {\n try {\n const parsed = JSON.parse(content);\n \n if (requiredFields && Array.isArray(parsed)) {\n // For arrays, check first element has required fields\n if (parsed.length > 0) {\n const first = parsed[0];\n return requiredFields.every(field => field in first);\n }\n // Empty array is valid\n return true;\n }\n \n if (requiredFields && typeof parsed === 'object' && parsed !== null) {\n return requiredFields.every(field => field in parsed);\n }\n \n return true;\n } catch {\n return false;\n }\n };\n}\n","/**\n * Memory Validation\n * \n * Schema validation for memory storage files using Zod.\n * Catches corruption early and provides clear error messages.\n * \n * Used by:\n * - issue-store.ts for issues.json\n * - global-memory.ts for global-patterns.json\n * - compactor.ts for compacted-summaries.json\n */\n\nimport { z } from 'zod';\n\n// ============================================================================\n// Issue Schemas\n// ============================================================================\n\n/**\n * Schema for a stored issue in issues.json\n */\nexport const StoredIssueSchema = z.object({\n id: z.string(),\n hash: z.string(),\n severity: z.string(),\n issue: z.string(),\n fix: z.string(),\n file: z.string(),\n line: z.number().optional(),\n agent: z.string(),\n category: z.string().optional(),\n timestamp: z.string(),\n project: z.string(),\n resolved: z.boolean().optional(),\n resolvedAt: z.string().optional(),\n});\n\nexport type ValidatedStoredIssue = z.infer<typeof StoredIssueSchema>;\n\n/**\n * Schema for the issues.json file (array of issues)\n */\nexport const IssueIndexSchema = z.array(StoredIssueSchema);\n\n// ============================================================================\n// Global Pattern Schemas\n// ============================================================================\n\n/**\n * Schema for a fix applied record\n */\nexport const FixAppliedSchema = z.object({\n project: z.string(),\n timestamp: z.string(),\n fix: z.string(),\n});\n\n/**\n * Schema for a global pattern in global-patterns.json\n */\nexport const GlobalPatternSchema = z.object({\n id: z.string(),\n pattern: z.string(),\n description: z.string(),\n severity: z.string(),\n agent: z.string(),\n occurrences: z.number(),\n projects: z.array(z.string()),\n firstSeen: z.string(),\n lastSeen: z.string(),\n fixApplied: FixAppliedSchema.optional(),\n});\n\nexport type ValidatedGlobalPattern = z.infer<typeof GlobalPatternSchema>;\n\n/**\n * Schema for global-patterns.json (array of patterns)\n */\nexport const GlobalPatternsIndexSchema = z.array(GlobalPatternSchema);\n\n// ============================================================================\n// Project Summary Schemas\n// ============================================================================\n\n/**\n * Schema for a project summary\n */\nexport const ProjectSummarySchema = z.object({\n name: z.string(),\n path: z.string(),\n lastScan: z.string(),\n healthScore: z.number(),\n totalIssues: z.number(),\n patterns: z.array(z.string()),\n});\n\nexport type ValidatedProjectSummary = z.infer<typeof ProjectSummarySchema>;\n\n// ============================================================================\n// Compacted Summary Schemas\n// ============================================================================\n\n/**\n * Schema for a pattern summary in compacted data\n */\nexport const PatternSummarySchema = z.object({\n pattern: z.string(),\n count: z.number(),\n severity: z.string(),\n agent: z.string(),\n exampleFix: z.string(),\n});\n\n/**\n * Schema for a hot file entry\n */\nexport const HotFileSchema = z.object({\n file: z.string(),\n count: z.number(),\n});\n\n/**\n * Schema for a compacted summary\n */\nexport const CompactedSummarySchema = z.object({\n period: z.string(),\n startDate: z.string(),\n endDate: z.string(),\n totalIssues: z.number(),\n resolvedCount: z.number(),\n bySeverity: z.record(z.string(), z.number()),\n byAgent: z.record(z.string(), z.number()),\n topPatterns: z.array(PatternSummarySchema),\n hotFiles: z.array(HotFileSchema),\n compactedAt: z.string(),\n});\n\nexport type ValidatedCompactedSummary = z.infer<typeof CompactedSummarySchema>;\n\n/**\n * Schema for compacted-summaries.json (array of summaries)\n */\nexport const CompactedSummariesIndexSchema = z.array(CompactedSummarySchema);\n\n// ============================================================================\n// Validation Error\n// ============================================================================\n\n/**\n * Custom error for validation failures\n */\nexport class ValidationError extends Error {\n constructor(\n message: string,\n public readonly zodError: z.ZodError,\n public readonly filePath?: string\n ) {\n super(message);\n this.name = 'ValidationError';\n }\n\n /**\n * Get a human-readable summary of validation errors\n */\n getSummary(): string {\n const issues = this.zodError.issues.slice(0, 5);\n const lines = issues.map(issue => {\n const path = issue.path.join('.');\n return ` - ${path}: ${issue.message}`;\n });\n \n if (this.zodError.issues.length > 5) {\n lines.push(` ... and ${this.zodError.issues.length - 5} more errors`);\n }\n \n return lines.join('\\n');\n }\n}\n\n// ============================================================================\n// Validation Functions\n// ============================================================================\n\n/**\n * Validate issue index data\n * \n * @throws ValidationError if data is invalid\n * @returns Validated issue array\n */\nexport function validateIssueIndex(\n data: unknown,\n filePath?: string\n): ValidatedStoredIssue[] {\n const result = IssueIndexSchema.safeParse(data);\n \n if (!result.success) {\n throw new ValidationError(\n `Issue index validation failed${filePath ? ` (${filePath})` : ''}`,\n result.error,\n filePath\n );\n }\n \n return result.data;\n}\n\n/**\n * Validate global patterns data\n * \n * @throws ValidationError if data is invalid\n * @returns Validated patterns array\n */\nexport function validateGlobalPatterns(\n data: unknown,\n filePath?: string\n): ValidatedGlobalPattern[] {\n const result = GlobalPatternsIndexSchema.safeParse(data);\n \n if (!result.success) {\n throw new ValidationError(\n `Global patterns validation failed${filePath ? ` (${filePath})` : ''}`,\n result.error,\n filePath\n );\n }\n \n return result.data;\n}\n\n/**\n * Validate project summary data\n * \n * @throws ValidationError if data is invalid\n * @returns Validated project summary\n */\nexport function validateProjectSummary(\n data: unknown,\n filePath?: string\n): ValidatedProjectSummary {\n const result = ProjectSummarySchema.safeParse(data);\n \n if (!result.success) {\n throw new ValidationError(\n `Project summary validation failed${filePath ? ` (${filePath})` : ''}`,\n result.error,\n filePath\n );\n }\n \n return result.data;\n}\n\n/**\n * Validate compacted summaries data\n * \n * @throws ValidationError if data is invalid\n * @returns Validated summaries array\n */\nexport function validateCompactedSummaries(\n data: unknown,\n filePath?: string\n): ValidatedCompactedSummary[] {\n const result = CompactedSummariesIndexSchema.safeParse(data);\n \n if (!result.success) {\n throw new ValidationError(\n `Compacted summaries validation failed${filePath ? ` (${filePath})` : ''}`,\n result.error,\n filePath\n );\n }\n \n return result.data;\n}\n\n/**\n * Safely parse and validate JSON\n * \n * Combines JSON parsing with schema validation.\n * Returns null on any error instead of throwing.\n */\nexport function safeParseAndValidate<T>(\n content: string,\n schema: z.ZodSchema<T>\n): { success: true; data: T } | { success: false; error: string } {\n try {\n const parsed = JSON.parse(content);\n const result = schema.safeParse(parsed);\n \n if (result.success) {\n return { success: true, data: result.data };\n }\n \n return {\n success: false,\n error: `Validation failed: ${result.error.issues[0]?.message || 'Unknown error'}`,\n };\n } catch (error) {\n return {\n success: false,\n error: `JSON parse failed: ${error instanceof Error ? error.message : 'Unknown error'}`,\n };\n }\n}\n\n/**\n * Check if data matches a schema (non-throwing)\n */\nexport function isValidSchema<T>(data: unknown, schema: z.ZodSchema<T>): boolean {\n return schema.safeParse(data).success;\n}\n"],"mappings":";;;;;AAcA,SAAS,SAAAA,QAAO,aAAAC,YAAW,YAAAC,WAAU,WAAAC,gBAAe;AACpD,SAAS,kBAAkB;AAC3B,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,QAAAC,aAAY;;;ACId,IAAM,YAAN,MAAM,WAAU;AAAA,EACb,YAAuC,oBAAI,IAAI;AAAA,EAC/C,kBAAoD,oBAAI,IAAI;AAAA,EAC5D,sBAA2C,oBAAI,IAAI;AAAA,EACnD,kBAAuC,oBAAI,IAAI;AAAA,EAC/C,eAAuB;AAAA,EACvB,KAAa;AAAA,EACb,IAAY;AAAA;AAAA;AAAA;AAAA,EAKpB,YAAY,KAAyB;AACnC,UAAM,SAAS,KAAK,SAAS,IAAI,IAAI;AACrC,SAAK,UAAU,IAAI,IAAI,IAAI,GAAG;AAC9B,SAAK,gBAAgB,IAAI,IAAI,IAAI,OAAO,MAAM;AAE9C,UAAM,WAAW,oBAAI,IAAoB;AACzC,UAAM,YAAY,oBAAI,IAAY;AAElC,eAAW,SAAS,QAAQ;AAC1B,eAAS,IAAI,QAAQ,SAAS,IAAI,KAAK,KAAK,KAAK,CAAC;AAElD,UAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AACzB,kBAAU,IAAI,KAAK;AACnB,aAAK,oBAAoB,IAAI,QAAQ,KAAK,oBAAoB,IAAI,KAAK,KAAK,KAAK,CAAC;AAAA,MACpF;AAAA,IACF;AAEA,SAAK,gBAAgB,IAAI,IAAI,IAAI,QAAQ;AACzC,SAAK,mBAAmB;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAA4B;AACvC,eAAW,OAAO,MAAM;AACtB,WAAK,YAAY,GAAG;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,QAAgB,IAAkB;AACtD,UAAM,cAAc,KAAK,SAAS,KAAK;AACvC,UAAM,SAA8B,oBAAI,IAAI;AAC5C,UAAM,IAAI,KAAK,UAAU;AAEzB,eAAW,CAAC,KAAK,KAAK,KAAK,WAAW;AACpC,UAAI,QAAQ;AACZ,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK,KAAK;AACrD,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK;AAEhD,UAAI,CAAC,UAAW;AAEhB,iBAAW,QAAQ,aAAa;AAC9B,cAAM,KAAK,UAAU,IAAI,IAAI,KAAK;AAClC,YAAI,OAAO,EAAG;AAEd,cAAM,KAAK,KAAK,oBAAoB,IAAI,IAAI,KAAK;AACjD,cAAM,MAAM,KAAK,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,CAAC;AAEpD,cAAM,YAAY,MAAM,KAAK,KAAK;AAClC,cAAM,cAAc,KAAK,KAAK,MAAM,IAAI,KAAK,IAAI,KAAK,KAAK,YAAY,KAAK;AAE5E,iBAAS,OAAO,YAAY;AAAA,MAC9B;AAEA,UAAI,QAAQ,GAAG;AACb,eAAO,IAAI,OAAO,KAAK;AAAA,MACzB;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,KAAK,EACd,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM;AACpB,YAAM,WAAW,KAAK,UAAU,IAAI,EAAE,GAAG;AACzC,YAAM,SAAqB,EAAE,IAAI,MAAM;AACvC,UAAI,aAAa,QAAW;AAC1B,eAAO,WAAW;AAAA,MACpB;AACA,aAAO;AAAA,IACT,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAe;AACjB,WAAO,KAAK,UAAU;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,UAAU,MAAM;AACrB,SAAK,gBAAgB,MAAM;AAC3B,SAAK,oBAAoB,MAAM;AAC/B,SAAK,gBAAgB,MAAM;AAC3B,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAoB;AAClB,WAAO,KAAK,UAAU;AAAA,MACpB,WAAW,MAAM,KAAK,KAAK,UAAU,QAAQ,CAAC;AAAA,MAC9C,iBAAiB,MAAM,KAAK,KAAK,gBAAgB,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,MAAM,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC;AAAA,MACxG,qBAAqB,MAAM,KAAK,KAAK,oBAAoB,QAAQ,CAAC;AAAA,MAClE,iBAAiB,MAAM,KAAK,KAAK,gBAAgB,QAAQ,CAAC;AAAA,MAC1D,cAAc,KAAK;AAAA,IACrB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,MAAyB;AAC1C,UAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,UAAM,QAAQ,IAAI,WAAU;AAE5B,UAAM,YAAY,IAAI,IAAI,KAAK,SAAS;AACxC,UAAM,kBAAkB,IAAI,IAAI,KAAK,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAoC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;AACnH,UAAM,sBAAsB,IAAI,IAAI,KAAK,mBAAmB;AAC5D,UAAM,kBAAkB,IAAI,IAAI,KAAK,eAAe;AACpD,UAAM,eAAe,KAAK;AAE1B,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,MAAwB;AACvC,WAAO,KACJ,YAAY,EACZ,QAAQ,YAAY,GAAG,EACvB,MAAM,KAAK,EACX,OAAO,WAAS,MAAM,SAAS,KAAK,CAAC,KAAK,WAAW,KAAK,CAAC;AAAA,EAChE;AAAA,EAEQ,WAAW,MAAuB;AACxC,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MAAO;AAAA,MAAM;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAK;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAC3D;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAC3D;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAO;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAChE;AAAA,MAAM;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAS;AAAA,MAAS;AAAA,MACnE;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAAS;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAM;AAAA,MAC/D;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MACpE;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAS;AAAA,MAAQ;AAAA,MACzE;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MACvE;AAAA,MAAO;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAW;AAAA,MACvE;AAAA,MAAS;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAU;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAO;AAAA,IACxE,CAAC;AACD,WAAO,UAAU,IAAI,IAAI;AAAA,EAC3B;AAAA,EAEQ,qBAA2B;AACjC,QAAI,KAAK,gBAAgB,SAAS,GAAG;AACnC,WAAK,eAAe;AACpB;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,KAAK,gBAAgB,OAAO,CAAC,EAAE,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AACjF,SAAK,eAAe,QAAQ,KAAK,gBAAgB;AAAA,EACnD;AACF;;;AChLA,SAAS,SAAAC,QAAO,YAAAC,iBAAgB;AAChC,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,QAAAC,aAAY;;;ACFrB,SAAS,WAAW,QAAQ,QAAQ,aAAa;AACjD,SAAS,mBAAmB;AAC5B,SAAS,eAAe;AAkCxB,eAAsB,gBACpB,UACA,MACA,UAA8B,CAAC,GAChB;AACf,QAAM,EAAE,YAAY,MAAM,WAAW,QAAQ,IAAI;AAGjD,QAAM,aAAa,YAAY,CAAC,EAAE,SAAS,KAAK;AAChD,QAAM,WAAW,GAAG,QAAQ,IAAI,UAAU;AAE1C,MAAI;AAEF,QAAI,WAAW;AACb,YAAM,MAAM,QAAQ,QAAQ,GAAG,EAAE,WAAW,KAAK,CAAC;AAAA,IACpD;AAGA,QAAI,OAAO,SAAS,UAAU;AAC5B,YAAM,UAAU,UAAU,MAAM,EAAE,SAAS,CAAC;AAAA,IAC9C,OAAO;AACL,YAAM,UAAU,UAAU,IAAI;AAAA,IAChC;AAGA,UAAM,OAAO,UAAU,QAAQ;AAAA,EACjC,SAAS,OAAO;AAEd,QAAI;AACF,YAAM,OAAO,QAAQ;AAAA,IACvB,QAAQ;AAAA,IAER;AACA,UAAM;AAAA,EACR;AACF;AAkBA,eAAsB,gBACpB,UACA,MACA,UAMI,CAAC,GACU;AACf,QAAM,EAAE,SAAS,GAAG,GAAG,aAAa,IAAI;AACxC,QAAM,OAAO,KAAK,UAAU,MAAM,MAAM,MAAM;AAC9C,QAAM,gBAAgB,UAAU,MAAM,YAAY;AACpD;;;ACtGA,SAAS,UAAU,SAAS,UAAAC,SAAQ,UAAU,YAAY;AAC1D,SAAS,kBAAkB;AAC3B,SAAS,WAAAC,UAAS,UAAU,YAAY;AAsCjC,IAAM,gBAAN,MAAoB;AAAA,EACR;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EAEjB,YAAY,UAAkB,UAAyB,CAAC,GAAG;AACzD,SAAK,WAAW;AAChB,SAAK,aAAa,QAAQ,cAAc;AACxC,SAAK,YAAY,QAAQ;AACzB,SAAK,YAAYA,SAAQ,QAAQ;AACjC,SAAK,eAAe,SAAS,QAAQ;AAAA,EACvC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,eAAuC;AAC3C,QAAI,CAAC,WAAW,KAAK,QAAQ,GAAG;AAC9B,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,KAAK,IAAI;AAC3B,UAAM,aAAa,KAAK,cAAc,SAAS;AAE/C,UAAM,SAAS,KAAK,UAAU,UAAU;AACxC,UAAM,KAAK,gBAAgB;AAE3B,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAqC;AACzC,QAAI,CAAC,WAAW,KAAK,SAAS,GAAG;AAC/B,aAAO,CAAC;AAAA,IACV;AAEA,UAAM,QAAQ,MAAM,QAAQ,KAAK,SAAS;AAC1C,UAAM,gBAAgB,IAAI;AAAA,MACxB,IAAI,KAAK,YAAY,KAAK,YAAY,CAAC;AAAA,IACzC;AAEA,UAAM,UAAwB,CAAC;AAE/B,eAAW,QAAQ,OAAO;AACxB,YAAM,QAAQ,KAAK,MAAM,aAAa;AACtC,UAAI,OAAO;AACT,cAAM,YAAY,SAAS,MAAM,CAAC,GAAI,EAAE;AACxC,cAAM,aAAa,KAAK,KAAK,WAAW,IAAI;AAE5C,YAAI;AACF,gBAAM,QAAQ,MAAM,KAAK,UAAU;AACnC,kBAAQ,KAAK;AAAA,YACX,MAAM;AAAA,YACN;AAAA,YACA,MAAM,MAAM;AAAA,UACd,CAAC;AAAA,QACH,QAAQ;AAAA,QAER;AAAA,MACF;AAAA,IACF;AAGA,WAAO,QAAQ,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,kBAA0C;AAC9C,UAAM,UAAU,MAAM,KAAK,YAAY;AAEvC,eAAW,UAAU,SAAS;AAC5B,UAAI,MAAM,KAAK,eAAe,OAAO,IAAI,GAAG;AAC1C,eAAO,OAAO;AAAA,MAChB;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,eAAe,YAAsC;AACzD,QAAI;AACF,YAAM,UAAU,MAAM,SAAS,YAAY,OAAO;AAGlD,UAAI,KAAK,WAAW;AAClB,eAAO,KAAK,UAAU,OAAO;AAAA,MAC/B;AAGA,UAAI,KAAK,SAAS,SAAS,OAAO,GAAG;AACnC,aAAK,MAAM,OAAO;AAClB,eAAO;AAAA,MACT;AAGA,aAAO,QAAQ,SAAS;AAAA,IAC1B,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,oBAAsC;AAC1C,UAAM,cAAc,MAAM,KAAK,gBAAgB;AAE/C,QAAI,CAAC,aAAa;AAChB,aAAO;AAAA,IACT;AAEA,UAAM,SAAS,aAAa,KAAK,QAAQ;AACzC,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,kBAAiC;AAC7C,UAAM,UAAU,MAAM,KAAK,YAAY;AAGvC,UAAM,WAAW,QAAQ,MAAM,KAAK,UAAU;AAE9C,eAAW,UAAU,UAAU;AAC7B,UAAI;AACF,cAAMD,QAAO,OAAO,IAAI;AAAA,MAC1B,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,cAAc,WAA2B;AAC/C,WAAO,KAAK,KAAK,WAAW,GAAG,KAAK,YAAY,WAAW,SAAS,EAAE;AAAA,EACxE;AAAA;AAAA;AAAA;AAAA,EAKQ,YAAY,KAAqB;AACvC,WAAO,IAAI,QAAQ,uBAAuB,MAAM;AAAA,EAClD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBAAkC;AACtC,UAAM,UAAU,MAAM,KAAK,YAAY;AACvC,WAAO,QAAQ;AAAA,EACjB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAA8C;AAClD,UAAM,UAAU,MAAM,KAAK,YAAY;AACvC,WAAO,QAAQ,CAAC,GAAG,aAAa;AAAA,EAClC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,eAAgC;AACpC,UAAM,UAAU,MAAM,KAAK,YAAY;AACvC,QAAI,UAAU;AAEd,eAAW,UAAU,SAAS;AAC5B,UAAI;AACF,cAAMA,QAAO,OAAO,IAAI;AACxB;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AACF;;;ACrPA,SAAS,SAAS;AASX,IAAM,oBAAoB,EAAE,OAAO;AAAA,EACxC,IAAI,EAAE,OAAO;AAAA,EACb,MAAM,EAAE,OAAO;AAAA,EACf,UAAU,EAAE,OAAO;AAAA,EACnB,OAAO,EAAE,OAAO;AAAA,EAChB,KAAK,EAAE,OAAO;AAAA,EACd,MAAM,EAAE,OAAO;AAAA,EACf,MAAM,EAAE,OAAO,EAAE,SAAS;AAAA,EAC1B,OAAO,EAAE,OAAO;AAAA,EAChB,UAAU,EAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,WAAW,EAAE,OAAO;AAAA,EACpB,SAAS,EAAE,OAAO;AAAA,EAClB,UAAU,EAAE,QAAQ,EAAE,SAAS;AAAA,EAC/B,YAAY,EAAE,OAAO,EAAE,SAAS;AAClC,CAAC;AAOM,IAAM,mBAAmB,EAAE,MAAM,iBAAiB;AASlD,IAAM,mBAAmB,EAAE,OAAO;AAAA,EACvC,SAAS,EAAE,OAAO;AAAA,EAClB,WAAW,EAAE,OAAO;AAAA,EACpB,KAAK,EAAE,OAAO;AAChB,CAAC;AAKM,IAAM,sBAAsB,EAAE,OAAO;AAAA,EAC1C,IAAI,EAAE,OAAO;AAAA,EACb,SAAS,EAAE,OAAO;AAAA,EAClB,aAAa,EAAE,OAAO;AAAA,EACtB,UAAU,EAAE,OAAO;AAAA,EACnB,OAAO,EAAE,OAAO;AAAA,EAChB,aAAa,EAAE,OAAO;AAAA,EACtB,UAAU,EAAE,MAAM,EAAE,OAAO,CAAC;AAAA,EAC5B,WAAW,EAAE,OAAO;AAAA,EACpB,UAAU,EAAE,OAAO;AAAA,EACnB,YAAY,iBAAiB,SAAS;AACxC,CAAC;AAOM,IAAM,4BAA4B,EAAE,MAAM,mBAAmB;AAS7D,IAAM,uBAAuB,EAAE,OAAO;AAAA,EAC3C,MAAM,EAAE,OAAO;AAAA,EACf,MAAM,EAAE,OAAO;AAAA,EACf,UAAU,EAAE,OAAO;AAAA,EACnB,aAAa,EAAE,OAAO;AAAA,EACtB,aAAa,EAAE,OAAO;AAAA,EACtB,UAAU,EAAE,MAAM,EAAE,OAAO,CAAC;AAC9B,CAAC;AAWM,IAAM,uBAAuB,EAAE,OAAO;AAAA,EAC3C,SAAS,EAAE,OAAO;AAAA,EAClB,OAAO,EAAE,OAAO;AAAA,EAChB,UAAU,EAAE,OAAO;AAAA,EACnB,OAAO,EAAE,OAAO;AAAA,EAChB,YAAY,EAAE,OAAO;AACvB,CAAC;AAKM,IAAM,gBAAgB,EAAE,OAAO;AAAA,EACpC,MAAM,EAAE,OAAO;AAAA,EACf,OAAO,EAAE,OAAO;AAClB,CAAC;AAKM,IAAM,yBAAyB,EAAE,OAAO;AAAA,EAC7C,QAAQ,EAAE,OAAO;AAAA,EACjB,WAAW,EAAE,OAAO;AAAA,EACpB,SAAS,EAAE,OAAO;AAAA,EAClB,aAAa,EAAE,OAAO;AAAA,EACtB,eAAe,EAAE,OAAO;AAAA,EACxB,YAAY,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC;AAAA,EAC3C,SAAS,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC;AAAA,EACxC,aAAa,EAAE,MAAM,oBAAoB;AAAA,EACzC,UAAU,EAAE,MAAM,aAAa;AAAA,EAC/B,aAAa,EAAE,OAAO;AACxB,CAAC;AAOM,IAAM,gCAAgC,EAAE,MAAM,sBAAsB;AA2IpE,SAAS,qBACd,SACA,QACgE;AAChE,MAAI;AACF,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAM,SAAS,OAAO,UAAU,MAAM;AAEtC,QAAI,OAAO,SAAS;AAClB,aAAO,EAAE,SAAS,MAAM,MAAM,OAAO,KAAK;AAAA,IAC5C;AAEA,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,sBAAsB,OAAO,MAAM,OAAO,CAAC,GAAG,WAAW,eAAe;AAAA,IACjF;AAAA,EACF,SAAS,OAAO;AACd,WAAO;AAAA,MACL,SAAS;AAAA,MACT,OAAO,sBAAsB,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,IACvF;AAAA,EACF;AACF;;;AHlQA,eAAsB,iBACpB,QACA,UAGI,CAAC,GACoE;AACzE,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,YAAY,QAAQ,sBAAsB;AAEhD,QAAM,aAAa,oBAAI,KAAK;AAC5B,aAAW,QAAQ,WAAW,QAAQ,IAAI,QAAQ;AAElD,QAAM,YAAY,OAAO,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,IAAI,UAAU;AACvE,QAAM,eAAe,OAAO,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,KAAK,UAAU;AAG3E,MAAI,UAAU,SAAS,WAAW;AAChC,WAAO,EAAE,SAAS,MAAM,WAAW,OAAO;AAAA,EAC5C;AAGA,QAAM,UAAU,aAAa,SAAS;AAEtC,SAAO,EAAE,SAAS,WAAW,aAAa;AAC5C;AAKA,SAAS,aAAa,QAAyC;AAC7D,QAAM,SAAS,OAAO;AAAA,IAAK,CAAC,GAAG,MAC7B,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAAA,EAClE;AAEA,QAAM,aAAqC,CAAC;AAC5C,QAAM,UAAkC,CAAC;AACzC,QAAM,aAAiE,oBAAI,IAAI;AAC/E,QAAM,YAAiC,oBAAI,IAAI;AAE/C,aAAW,SAAS,QAAQ;AAE1B,eAAW,MAAM,QAAQ,KAAK,WAAW,MAAM,QAAQ,KAAK,KAAK;AAGjE,YAAQ,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK,KAAK;AAGrD,UAAM,aAAa,iBAAiB,MAAM,KAAK;AAC/C,UAAM,WAAW,WAAW,IAAI,UAAU;AAC1C,QAAI,UAAU;AACZ,eAAS;AAAA,IACX,OAAO;AACL,iBAAW,IAAI,YAAY,EAAE,OAAO,GAAG,MAAM,CAAC;AAAA,IAChD;AAGA,UAAM,WAAW,MAAM,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK,MAAM;AACtD,cAAU,IAAI,WAAW,UAAU,IAAI,QAAQ,KAAK,KAAK,CAAC;AAAA,EAC5D;AAGA,QAAM,cAAc,MAAM,KAAK,WAAW,QAAQ,CAAC,EAChD,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK,EACtC,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,SAAS,IAAI,OAAO;AAAA,IACzB,SAAS,QAAQ,MAAM,GAAG,GAAG;AAAA,IAC7B,OAAO,KAAK;AAAA,IACZ,UAAU,KAAK,MAAM;AAAA,IACrB,OAAO,KAAK,MAAM;AAAA,IAClB,YAAY,KAAK,MAAM,IAAI,MAAM,GAAG,GAAG;AAAA,EACzC,EAAE;AAGJ,QAAM,WAAW,MAAM,KAAK,UAAU,QAAQ,CAAC,EAC5C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO,EAAE,MAAM,MAAM,EAAE;AAE3C,SAAO;AAAA,IACL,QAAQ,GAAG,OAAO,CAAC,GAAG,UAAU,MAAM,GAAG,EAAE,CAAC,CAAC,OAAO,OAAO,OAAO,SAAS,CAAC,GAAG,UAAU,MAAM,GAAG,EAAE,CAAC,CAAC;AAAA,IACtG,WAAW,OAAO,CAAC,GAAG,aAAa;AAAA,IACnC,SAAS,OAAO,OAAO,SAAS,CAAC,GAAG,aAAa;AAAA,IACjD,aAAa,OAAO;AAAA,IACpB,eAAe,OAAO,OAAO,OAAK,EAAE,QAAQ,EAAE;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,EACtC;AACF;AAKA,SAAS,iBAAiB,MAAsB;AAC9C,SAAO,KACJ,YAAY,EACZ,QAAQ,YAAY,MAAM,EAC1B,QAAQ,YAAY,GAAG,EACvB,QAAQ,SAAS,EAAE,EACnB,QAAQ,QAAQ,GAAG,EACnB,KAAK,EACL,MAAM,GAAG,GAAG;AACjB;AAKA,eAAsB,qBACpB,SACA,YACe;AACf,QAAM,YAAYE,MAAK,YAAY,SAAS,QAAQ;AACpD,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,cAAcD,MAAK,WAAW,0BAA0B;AAE9D,MAAI,YAAgC,CAAC;AACrC,MAAI;AACF,QAAIE,YAAW,WAAW,GAAG;AAC3B,YAAM,UAAU,MAAMC,UAAS,aAAa,OAAO;AACnD,YAAM,SAAS,qBAAqB,SAAS,6BAA6B;AAC1E,UAAI,OAAO,SAAS;AAClB,oBAAY,OAAO;AAAA,MACrB;AAAA,IACF;AAAA,EACF,QAAQ;AACN,gBAAY,CAAC;AAAA,EACf;AAEA,YAAU,KAAK,OAAO;AAGtB,MAAI,UAAU,SAAS,IAAI;AACzB,gBAAY,UAAU,MAAM,GAAG;AAAA,EACjC;AAGA,QAAM,gBAAgB,IAAI,cAAc,WAAW;AACnD,QAAM,cAAc,aAAa;AAGjC,QAAM,gBAAgB,aAAa,SAAS;AAC9C;AAKA,eAAsB,uBAAuB,YAAiD;AAC5F,QAAM,cAAcH,MAAK,YAAY,SAAS,UAAU,0BAA0B;AAElF,MAAI;AACF,QAAIE,YAAW,WAAW,GAAG;AAC3B,YAAM,UAAU,MAAMC,UAAS,aAAa,OAAO;AACnD,YAAM,SAAS,qBAAqB,SAAS,6BAA6B;AAE1E,UAAI,OAAO,SAAS;AAClB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,gBAAgB,IAAI,cAAc,WAAW;AACnD,UAAI,MAAM,cAAc,kBAAkB,GAAG;AAC3C,cAAM,YAAY,MAAMA,UAAS,aAAa,OAAO;AACrD,cAAM,kBAAkB,qBAAqB,WAAW,6BAA6B;AACrF,YAAI,gBAAgB,SAAS;AAC3B,iBAAO,gBAAgB;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,CAAC;AACV;AA6BA,eAAsB,sBAAsB,YAIzC;AACD,QAAM,YAAY,MAAM,uBAAuB,UAAU;AAEzD,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,MACL,uBAAuB;AAAA,MACvB,mBAAmB,CAAC;AAAA,MACpB,kBAAkB;AAAA,IACpB;AAAA,EACF;AAEA,QAAM,wBAAwB,UAAU,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,aAAa,CAAC;AAGjF,QAAM,gBAAuE,oBAAI,IAAI;AAErF,aAAW,WAAW,WAAW;AAC/B,eAAW,WAAW,QAAQ,aAAa;AACzC,YAAM,MAAM,QAAQ;AACpB,YAAM,WAAW,cAAc,IAAI,GAAG;AACtC,UAAI,UAAU;AACZ,iBAAS,SAAS,QAAQ;AAC1B,iBAAS;AAAA,MACX,OAAO;AACL,sBAAc,IAAI,KAAK,EAAE,GAAG,SAAS,aAAa,EAAE,CAAC;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,MAAM,KAAK,cAAc,OAAO,CAAC,EACxD,OAAO,OAAK,EAAE,eAAe,CAAC,EAC9B,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,CAAC;AAGb,MAAI,mBAAqE;AAEzE,MAAI,UAAU,UAAU,GAAG;AACzB,UAAM,SAAS,UAAU,MAAM,EAAE;AACjC,UAAM,aAAa,OAAO,CAAC,GAAG,eAAe;AAC7C,UAAM,aAAa,OAAO,CAAC,GAAG,eAAe;AAE7C,QAAI,aAAa,aAAa,KAAK;AACjC,yBAAmB;AAAA,IACrB,WAAW,aAAa,aAAa,KAAK;AACxC,yBAAmB;AAAA,IACrB,OAAO;AACL,yBAAmB;AAAA,IACrB;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AF9OA,eAAsB,YACpB,QACA,SACA,SACiD;AACjD,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYC,MAAK,YAAY,SAAS,QAAQ;AACpD,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,SAAwB,CAAC;AAC/B,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,aAAa,oBAAI,IAAY;AACnC,MAAI,aAAa;AAEjB,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO,UAAU,KAAK;AAG5B,QAAI,WAAW,IAAI,IAAI,GAAG;AACxB;AACA;AAAA,IACF;AACA,eAAW,IAAI,IAAI;AAEnB,UAAM,cAA2B;AAAA,MAC/B,IAAI,MAAM;AAAA,MACV;AAAA,MACA,UAAU,MAAM;AAAA,MAChB,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,UAAU,MAAM;AAAA,MAChB,WAAW;AAAA,MACX;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,IACd;AACA,WAAO,KAAK,WAAW;AAAA,EACzB;AAEA,QAAM,iBAAiB,QAAQ,UAAU;AACzC,QAAM,eAAe,MAAM,iBAAiB,QAAQ,UAAU;AAE9D,SAAO,EAAE,QAAQ,cAAc,YAAY,cAAc,OAAO,SAAS,cAAc;AACzF;AAKA,eAAsB,aACpB,OACA,UAOI,CAAC,GACyB;AAC9B,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,YAAY,MAAM,eAAe,UAAU;AAEjD,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,iBAAiB,UAAU,OAAO,WAAS;AAC/C,QAAI,QAAQ,WAAW,MAAM,YAAY,QAAQ,QAAS,QAAO;AACjE,QAAI,QAAQ,YAAY,CAAC,QAAQ,SAAS,SAAS,MAAM,QAAQ,EAAG,QAAO;AAC3E,QAAI,QAAQ,SAAS,MAAM,UAAU,QAAQ,MAAO,QAAO;AAC3D,QAAI,CAAC,QAAQ,mBAAmB,MAAM,SAAU,QAAO;AACvD,WAAO;AAAA,EACT,CAAC;AAED,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,OAAO,IAAI,UAAU;AAC3B,QAAM,WAAW,oBAAI,IAAyB;AAE9C,aAAW,SAAS,gBAAgB;AAClC,UAAM,aAAa,GAAG,MAAM,KAAK,IAAI,MAAM,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,IAAI,MAAM,QAAQ;AACrH,SAAK,YAAY;AAAA,MACf,IAAI,MAAM;AAAA,MACV,MAAM;AAAA,IACR,CAAC;AACD,aAAS,IAAI,MAAM,IAAI,KAAK;AAAA,EAC9B;AAGA,QAAM,cAAc,KAAK,OAAO,OAAO,KAAK;AAE5C,SAAO,YAAY,IAAI,aAAW;AAAA,IAChC,OAAO,SAAS,IAAI,OAAO,EAAE;AAAA,IAC7B,OAAO,OAAO;AAAA,IACd,WAAW;AAAA,EACb,EAAE;AACJ;AAKA,eAAsB,kBACpB,OACA,UAII,CAAC,GACyB;AAE9B,QAAM,QAAQ,GAAG,MAAM,KAAK,IAAI,MAAM,GAAG,IAAI,MAAM,KAAK;AACxD,QAAM,gBAAoD;AAAA,IACxD,QAAQ,QAAQ,SAAS,KAAK;AAAA;AAAA,IAC9B,iBAAiB;AAAA,EACnB;AACA,MAAI,QAAQ,YAAY,QAAW;AACjC,kBAAc,UAAU,QAAQ;AAAA,EAClC;AACA,QAAM,UAAU,MAAM,aAAa,OAAO,aAAa;AAEvD,MAAI,WAAW,QAAQ,OAAO,OAAK,EAAE,MAAM,OAAO,MAAM,EAAE;AAE1D,MAAI,QAAQ,iBAAiB;AAC3B,eAAW,SAAS,OAAO,OAAK,EAAE,MAAM,SAAS,MAAM,IAAI;AAAA,EAC7D;AAEA,SAAO,SAAS,MAAM,GAAG,QAAQ,SAAS,CAAC;AAC7C;AAKA,eAAsB,kBACpB,SACA,SACkB;AAClB,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAE7C,QAAM,QAAQ,MAAM,KAAK,OAAK,EAAE,OAAO,OAAO;AAC9C,MAAI,CAAC,MAAO,QAAO;AAEnB,QAAM,WAAW;AACjB,QAAM,cAAa,oBAAI,KAAK,GAAE,YAAY;AAE1C,QAAM,eAAe,OAAO,UAAU;AACtC,SAAO;AACT;AAKA,eAAsB,eAAe,SAA6C;AAChF,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,QAAM,aAAa,MAAM,sBAAsB,UAAU;AAEzD,QAAM,aAAa;AACnB,QAAM,eAAe,IAAI,IAAI,MAAM,IAAI,OAAK,EAAE,IAAI,CAAC;AAEnD,QAAM,QAA0B;AAAA,IAC9B,aAAa,MAAM;AAAA,IACnB,eAAe,CAAC;AAAA,IAChB,kBAAkB,CAAC;AAAA,IACnB,aAAa;AAAA,IACb,aAAa;AAAA,IACb,eAAe;AAAA,IACf,kBAAkB,WAAW;AAAA,IAC7B,kBAAkB,WAAW;AAAA,IAC7B,cAAc;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,KAAK;AAAA,MACL,aAAa,KAAK,MAAO,MAAM,SAAS,aAAc,GAAG;AAAA,MACzD,SAAS,MAAM,UAAU;AAAA,IAC3B;AAAA,IACA,oBAAoB;AAAA,MAClB,mBAAmB,MAAM,SAAS,aAAa;AAAA,MAC/C,gBAAgB,aAAa;AAAA,IAC/B;AAAA,EACF;AAEA,aAAW,SAAS,OAAO;AACzB,UAAM,cAAc,MAAM,KAAK,KAAK,MAAM,cAAc,MAAM,KAAK,KAAK,KAAK;AAC7E,UAAM,iBAAiB,MAAM,QAAQ,KAAK,MAAM,iBAAiB,MAAM,QAAQ,KAAK,KAAK;AACzF,QAAI,MAAM,SAAU,OAAM;AAAA,EAC5B;AAEA,MAAI,MAAM,SAAS,GAAG;AACpB,UAAM,SAAS,CAAC,GAAG,KAAK,EAAE;AAAA,MAAK,CAAC,GAAG,MACjC,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAAA,IAClE;AACA,UAAM,SAAS,OAAO,CAAC,GAAG;AAC1B,UAAM,SAAS,OAAO,OAAO,SAAS,CAAC,GAAG;AAC1C,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc;AAAA,IACtB;AACA,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc;AAAA,IACtB;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAsB,gBACpB,UAII,CAAC,GACmB;AACxB,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,WAAW,QAAQ,YAAY;AAErC,QAAM,SAAS,oBAAI,KAAK;AACxB,SAAO,QAAQ,OAAO,QAAQ,IAAI,QAAQ;AAE1C,SAAO,MACJ,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,KAAK,MAAM,EAC3C,KAAK,CAAC,GAAG,MAAM,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAChF,MAAM,GAAG,KAAK;AACnB;AAMA,eAAsB,YACpB,UACA,UAGI,CAAC,GAC8D;AACnE,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,QAAM,gBAAgB,MAAM;AAE5B,MAAI,YAA2B,CAAC;AAEhC,UAAQ,UAAU;AAAA,IAChB,KAAK;AAEH,YAAM,gBAAgB,oBAAI,KAAK;AAC/B,oBAAc,QAAQ,cAAc,QAAQ,IAAI,EAAE;AAElD,kBAAY,MAAM,OAAO,OAAK;AAC5B,cAAM,WAAW,IAAI,KAAK,EAAE,SAAS,KAAK;AAC1C,cAAM,cAAc,CAAC,YAAY,MAAM,EAAE,SAAS,EAAE,QAAQ;AAC5D,cAAM,eAAe,CAAC,EAAE;AAExB,eAAO,YAAY,eAAe;AAAA,MACpC,CAAC;AACD;AAAA,IAEF,KAAK;AAEH,kBAAY,MAAM,OAAO,OAAK,CAAC,EAAE,QAAQ;AACzC;AAAA,IAEF,KAAK;AAEH,YAAM,UAAU,QAAQ,WAAW;AACnC,YAAM,aAAa,oBAAI,KAAK;AAC5B,iBAAW,QAAQ,WAAW,QAAQ,IAAI,OAAO;AAEjD,kBAAY,MAAM,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,KAAK,UAAU;AACjE;AAAA,IAEF,KAAK;AAEH,kBAAY,CAAC;AACb;AAAA,EACJ;AAEA,QAAM,eAAe,WAAW,UAAU;AAE1C,SAAO;AAAA,IACL,SAAS,gBAAgB,UAAU;AAAA,IACnC,WAAW,UAAU;AAAA,IACrB;AAAA,EACF;AACF;AAKA,eAAsB,aAAa,SAAqC;AACtE,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYD,MAAK,YAAY,SAAS,QAAQ;AAEpD,MAAI;AACF,QAAI,CAACE,YAAW,SAAS,EAAG,QAAO,CAAC;AACpC,UAAM,QAAQ,MAAMC,SAAQ,SAAS;AACrC,WAAO,MACJ,OAAO,OAAK,0BAA0B,KAAK,CAAC,CAAC,EAC7C,KAAK,EACL,QAAQ;AAAA,EACb,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAIA,eAAe,iBAAiB,QAAuB,YAAmC;AACxF,QAAM,YAAYH,MAAK,YAAY,SAAS,QAAQ;AACpD,QAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACnD,QAAM,UAAUA,MAAK,WAAW,GAAG,KAAK,KAAK;AAE7C,MAAI,UAAU;AAEd,MAAI;AACF,QAAIE,YAAW,OAAO,GAAG;AACvB,gBAAU,MAAME,UAAS,SAAS,OAAO;AAAA,IAC3C,OAAO;AACL,gBAAU,gBAAgB,KAAK;AAAA;AAAA;AAAA,IACjC;AAAA,EACF,QAAQ;AACN,cAAU,gBAAgB,KAAK;AAAA;AAAA;AAAA,EACjC;AAEA,QAAM,QAAO,oBAAI,KAAK,GAAE,aAAa,EAAE,MAAM,GAAG,EAAE,CAAC;AAEnD,QAAM,aAAa,OAAO;AAAA,IAAI,OAC5B,OAAO,IAAI,KAAK,EAAE,SAAS,YAAY,CAAC,KAAK,EAAE,MAAM,MAAM,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,SAAS,KAAK,QAAQ,EAAE;AAAA,gBACnF,EAAE,IAAI,KAAK,EAAE,OAAO,IAAI,EAAE,IAAI,KAAK,EAAE;AAAA,eACtC,EAAE,KAAK;AAAA,aACT,EAAE,IAAI,MAAM,GAAG,GAAG,CAAC,GAAG,EAAE,IAAI,SAAS,MAAM,QAAQ,EAAE;AAAA;AAAA,EACrE,EAAE,KAAK,IAAI;AAEX,aAAW,aAAa;AAExB,QAAMC,WAAU,SAAS,OAAO;AAClC;AASA,eAAe,eAAe,YAA4C;AACxE,QAAM,YAAYL,MAAK,YAAY,SAAS,UAAU,aAAa;AAEnE,MAAI;AACF,QAAIE,YAAW,SAAS,GAAG;AACzB,YAAM,UAAU,MAAME,UAAS,WAAW,OAAO;AACjD,YAAM,SAAS,qBAAqB,SAAS,gBAAgB;AAE7D,UAAI,OAAO,SAAS;AAClB,eAAO,OAAO;AAAA,MAChB;AAGA,cAAQ,MAAM,6BAA6B,OAAO,KAAK,EAAE;AACzD,YAAM,gBAAgB,IAAI,cAAc,SAAS;AAEjD,UAAI,MAAM,cAAc,kBAAkB,GAAG;AAC3C,gBAAQ,MAAM,iCAA4B;AAC1C,cAAM,YAAY,MAAMA,UAAS,WAAW,OAAO;AACnD,cAAM,kBAAkB,qBAAqB,WAAW,gBAAgB;AACxE,YAAI,gBAAgB,SAAS;AAC3B,iBAAO,gBAAgB;AAAA,QACzB;AAAA,MACF;AAEA,cAAQ,MAAM,0CAA0C;AAAA,IAC1D;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,CAAC;AACV;AAEA,eAAe,iBAAiB,WAA0B,YAAqC;AAC7F,QAAM,YAAYJ,MAAK,YAAY,SAAS,QAAQ;AACpD,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,MAAI,WAAW,MAAM,eAAe,UAAU;AAI9C,QAAM,UAAU,IAAI,IAAI,SAAS,IAAI,OAAK,EAAE,IAAI,CAAC;AACjD,QAAM,QAAQ,UAAU,OAAO,OAAK,CAAC,QAAQ,IAAI,EAAE,IAAI,CAAC;AACxD,QAAM,eAAe,MAAM;AAE3B,aAAW,CAAC,GAAG,UAAU,GAAG,KAAK;AAGjC,MAAI,SAAS,SAAS,KAAK;AACzB,UAAM,EAAE,SAAS,UAAU,IAAI,MAAM,iBAAiB,UAAU;AAAA,MAC9D,UAAU;AAAA,MACV,oBAAoB;AAAA,IACtB,CAAC;AAED,QAAI,SAAS;AACX,YAAM,qBAAqB,SAAS,UAAU;AAC9C,iBAAW;AAAA,IACb;AAAA,EACF;AAIA,MAAI,SAAS,SAAS,KAAM;AAC1B,eAAW,iBAAiB,UAAU,GAAI;AAAA,EAC5C;AAEA,QAAM,eAAe,UAAU,UAAU;AACzC,SAAO;AACT;AAMA,SAAS,iBAAiB,QAAuB,aAAoC;AACnF,QAAM,iBAAyC;AAAA,IAC7C,UAAU;AAAA,IACV,MAAM;AAAA,IACN,UAAU;AAAA,IACV,KAAK;AAAA,IACL,MAAM;AAAA,EACR;AAEA,QAAM,SAAS,OAAO,IAAI,WAAS;AACjC,UAAM,aAAa,KAAK,IAAI,IAAI,IAAI,KAAK,MAAM,SAAS,EAAE,QAAQ,MAAM,MAAO,KAAK,KAAK;AACzF,UAAM,eAAe,KAAK,IAAI,GAAG,MAAM,YAAY,CAAC;AACpD,UAAM,gBAAgB,eAAe,MAAM,QAAQ,KAAK;AACxD,UAAM,kBAAkB,MAAM,WAAW,MAAM;AAE/C,WAAO;AAAA,MACL;AAAA,MACA,OAAO,eAAe,gBAAgB;AAAA,IACxC;AAAA,EACF,CAAC;AAED,SAAO,OACJ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,WAAW,EACpB,IAAI,OAAK,EAAE,KAAK;AACrB;AASA,eAAe,eAAe,QAAuB,YAAmC;AACtF,QAAM,YAAYD,MAAK,YAAY,SAAS,QAAQ;AACpD,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,YAAYD,MAAK,WAAW,aAAa;AAG/C,QAAM,gBAAgB,IAAI,cAAc,SAAS;AACjD,QAAM,cAAc,aAAa;AAGjC,QAAM,gBAAgB,WAAW,MAAM;AACzC;AASA,SAAS,UAAU,OAAsB;AACvC,QAAM,UAAU,GAAG,MAAM,KAAK,IAAI,MAAM,IAAI,IAAI,MAAM,QAAQ,IAAI,MAAM,KAAK;AAC7E,SAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AACvE;","names":["mkdir","writeFile","readFile","readdir","existsSync","join","mkdir","readFile","existsSync","join","unlink","dirname","join","mkdir","existsSync","readFile","join","mkdir","existsSync","readdir","readFile","writeFile"]}
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import {
|
|
2
2
|
getGuardianState
|
|
3
|
-
} from "./chunk-
|
|
3
|
+
} from "./chunk-35FAFFHE.js";
|
|
4
4
|
import {
|
|
5
5
|
BackupManager,
|
|
6
6
|
atomicWriteJSON,
|
|
7
7
|
getMemoryStats,
|
|
8
8
|
safeParseAndValidate,
|
|
9
9
|
searchIssues
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-6QKDEGWR.js";
|
|
11
11
|
|
|
12
12
|
// src/guardian/insight-store.ts
|
|
13
13
|
import { mkdir, readFile } from "fs/promises";
|
|
@@ -1039,4 +1039,4 @@ export {
|
|
|
1039
1039
|
getGoalManager,
|
|
1040
1040
|
clearGoalManagers
|
|
1041
1041
|
};
|
|
1042
|
-
//# sourceMappingURL=chunk-
|
|
1042
|
+
//# sourceMappingURL=chunk-AIC4HOOQ.js.map
|
|
@@ -2,15 +2,15 @@ import {
|
|
|
2
2
|
CustomSkill,
|
|
3
3
|
getSkillRegistry,
|
|
4
4
|
loadContextState
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-3RKY55HZ.js";
|
|
6
6
|
import {
|
|
7
7
|
getGlobalMemoryStats
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-P6VLSYXN.js";
|
|
9
9
|
import {
|
|
10
10
|
getHistoricalInsights,
|
|
11
11
|
getMemoryStats,
|
|
12
12
|
getRecentIssues
|
|
13
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-6QKDEGWR.js";
|
|
14
14
|
import {
|
|
15
15
|
getWorkingDirectory
|
|
16
16
|
} from "./chunk-CM7EHNQK.js";
|
|
@@ -1779,4 +1779,4 @@ export {
|
|
|
1779
1779
|
getSkillsByCategory,
|
|
1780
1780
|
getSkillCategories
|
|
1781
1781
|
};
|
|
1782
|
-
//# sourceMappingURL=chunk-
|
|
1782
|
+
//# sourceMappingURL=chunk-LNLLZQWH.js.map
|
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
GlobalPatternsIndexSchema,
|
|
4
4
|
atomicWriteJSON,
|
|
5
5
|
safeParseAndValidate
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-6QKDEGWR.js";
|
|
7
7
|
|
|
8
8
|
// src/memory/global-memory.ts
|
|
9
9
|
import { mkdir, writeFile, readFile, readdir } from "fs/promises";
|
|
@@ -81,17 +81,30 @@ async function listTrackedProjects() {
|
|
|
81
81
|
async function getGlobalMemoryStats() {
|
|
82
82
|
const patterns = await loadGlobalPatterns();
|
|
83
83
|
const projects = await listTrackedProjects();
|
|
84
|
+
const MAX_PATTERNS = 500;
|
|
84
85
|
const patternsByAgent = {};
|
|
85
86
|
for (const pattern of patterns) {
|
|
86
87
|
patternsByAgent[pattern.agent] = (patternsByAgent[pattern.agent] || 0) + 1;
|
|
87
88
|
}
|
|
89
|
+
const totalOccurrences = patterns.reduce((sum, p) => sum + p.occurrences, 0);
|
|
90
|
+
const avgOccurrences = patterns.length > 0 ? totalOccurrences / patterns.length : 0;
|
|
88
91
|
return {
|
|
89
92
|
totalPatterns: patterns.length,
|
|
90
93
|
crossProjectPatterns: patterns.filter((p) => p.projects.length >= 2).length,
|
|
91
94
|
trackedProjects: projects.length,
|
|
92
|
-
totalOccurrences
|
|
95
|
+
totalOccurrences,
|
|
93
96
|
fixedPatterns: patterns.filter((p) => p.fixApplied).length,
|
|
94
|
-
patternsByAgent
|
|
97
|
+
patternsByAgent,
|
|
98
|
+
capacityInfo: {
|
|
99
|
+
current: patterns.length,
|
|
100
|
+
max: MAX_PATTERNS,
|
|
101
|
+
percentFull: Math.round(patterns.length / MAX_PATTERNS * 100),
|
|
102
|
+
isAtCap: patterns.length >= MAX_PATTERNS
|
|
103
|
+
},
|
|
104
|
+
deduplicationStats: {
|
|
105
|
+
uniquePatterns: patterns.length,
|
|
106
|
+
averageOccurrences: Math.round(avgOccurrences * 10) / 10
|
|
107
|
+
}
|
|
95
108
|
};
|
|
96
109
|
}
|
|
97
110
|
async function updateGlobalMemoryMd() {
|
|
@@ -187,11 +200,52 @@ async function loadGlobalPatterns() {
|
|
|
187
200
|
async function saveGlobalPatterns(patterns) {
|
|
188
201
|
await mkdir(GLOBAL_MEMORY_DIR, { recursive: true });
|
|
189
202
|
const patternsPath = join(GLOBAL_MEMORY_DIR, "global-patterns.json");
|
|
190
|
-
const
|
|
203
|
+
const patternMap = /* @__PURE__ */ new Map();
|
|
204
|
+
for (const pattern of patterns) {
|
|
205
|
+
const existing = patternMap.get(pattern.id);
|
|
206
|
+
if (existing) {
|
|
207
|
+
existing.occurrences += pattern.occurrences;
|
|
208
|
+
for (const proj of pattern.projects) {
|
|
209
|
+
if (!existing.projects.includes(proj)) {
|
|
210
|
+
existing.projects.push(proj);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
existing.lastSeen = pattern.lastSeen > existing.lastSeen ? pattern.lastSeen : existing.lastSeen;
|
|
214
|
+
} else {
|
|
215
|
+
patternMap.set(pattern.id, pattern);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
const deduplicated = Array.from(patternMap.values());
|
|
219
|
+
const pruned = intelligentPruneGlobalPatterns(deduplicated, 500);
|
|
191
220
|
const backupManager = new BackupManager(patternsPath);
|
|
192
221
|
await backupManager.createBackup();
|
|
193
222
|
await atomicWriteJSON(patternsPath, pruned);
|
|
194
223
|
}
|
|
224
|
+
function intelligentPruneGlobalPatterns(patterns, targetCount) {
|
|
225
|
+
if (patterns.length <= targetCount) {
|
|
226
|
+
return patterns;
|
|
227
|
+
}
|
|
228
|
+
const severityWeight = {
|
|
229
|
+
critical: 100,
|
|
230
|
+
high: 50,
|
|
231
|
+
moderate: 20,
|
|
232
|
+
low: 10,
|
|
233
|
+
info: 5
|
|
234
|
+
};
|
|
235
|
+
const scored = patterns.map((pattern) => {
|
|
236
|
+
const ageInDays = (Date.now() - new Date(pattern.lastSeen).getTime()) / (1e3 * 60 * 60 * 24);
|
|
237
|
+
const recencyScore = Math.max(0, 100 - ageInDays * 2);
|
|
238
|
+
const severityScore = severityWeight[pattern.severity] || 10;
|
|
239
|
+
const crossProjectBonus = (pattern.projects.length - 1) * 30;
|
|
240
|
+
const fixedBonus = pattern.fixApplied ? 20 : 0;
|
|
241
|
+
const occurrenceScore = Math.min(pattern.occurrences * 2, 100);
|
|
242
|
+
return {
|
|
243
|
+
pattern,
|
|
244
|
+
score: recencyScore + severityScore + crossProjectBonus + fixedBonus + occurrenceScore
|
|
245
|
+
};
|
|
246
|
+
});
|
|
247
|
+
return scored.sort((a, b) => b.score - a.score).slice(0, targetCount).map((s) => s.pattern);
|
|
248
|
+
}
|
|
195
249
|
function extractPatternId(issue) {
|
|
196
250
|
const normalized = issue.issue.toLowerCase().replace(/`[^`]+`/g, "CODE").replace(/\b\d+\b/g, "N").replace(/['"]/g, "").slice(0, 100);
|
|
197
251
|
const hash = createHash("sha256").update(normalized).digest("hex").slice(0, 12);
|
|
@@ -209,4 +263,4 @@ export {
|
|
|
209
263
|
updateGlobalMemoryMd,
|
|
210
264
|
searchGlobalPatterns
|
|
211
265
|
};
|
|
212
|
-
//# sourceMappingURL=chunk-
|
|
266
|
+
//# sourceMappingURL=chunk-P6VLSYXN.js.map
|