@triedotdev/mcp 1.0.113 → 1.0.115
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/auto-fix-apply-PCAHWLXF.js +10 -0
- package/dist/autonomy-config-JXB7WCZ2.js +30 -0
- package/dist/chunk-2GIAROBF.js +173 -0
- package/dist/chunk-2GIAROBF.js.map +1 -0
- package/dist/{chunk-33WL3D7A.js → chunk-2SIFK7OW.js} +7 -419
- package/dist/chunk-2SIFK7OW.js.map +1 -0
- package/dist/chunk-43X6JBEM.js +36 -0
- package/dist/chunk-43X6JBEM.js.map +1 -0
- package/dist/chunk-55DOQNHJ.js +772 -0
- package/dist/chunk-55DOQNHJ.js.map +1 -0
- package/dist/chunk-6LXSA2OZ.js +425 -0
- package/dist/chunk-6LXSA2OZ.js.map +1 -0
- package/dist/{chunk-SDS3UVFY.js → chunk-AOFYU6T3.js} +113 -559
- package/dist/chunk-AOFYU6T3.js.map +1 -0
- package/dist/{chunk-6QR6QZIX.js → chunk-D3EXBJE2.js} +25 -658
- package/dist/chunk-D3EXBJE2.js.map +1 -0
- package/dist/chunk-DJ2YAGHK.js +50 -0
- package/dist/chunk-DJ2YAGHK.js.map +1 -0
- package/dist/chunk-DZREHOGW.js +706 -0
- package/dist/chunk-DZREHOGW.js.map +1 -0
- package/dist/chunk-I2GFI3AM.js +340 -0
- package/dist/chunk-I2GFI3AM.js.map +1 -0
- package/dist/chunk-KRH642MT.js +947 -0
- package/dist/chunk-KRH642MT.js.map +1 -0
- package/dist/{chunk-QYOACM2C.js → chunk-MVNJPJBK.js} +22 -252
- package/dist/chunk-MVNJPJBK.js.map +1 -0
- package/dist/chunk-NS2MSZMB.js +394 -0
- package/dist/chunk-NS2MSZMB.js.map +1 -0
- package/dist/chunk-SWSK7ANT.js +340 -0
- package/dist/chunk-SWSK7ANT.js.map +1 -0
- package/dist/chunk-VRLMTOB6.js +566 -0
- package/dist/chunk-VRLMTOB6.js.map +1 -0
- package/dist/chunk-YR4BMGYO.js +130 -0
- package/dist/chunk-YR4BMGYO.js.map +1 -0
- package/dist/chunk-ZV2K6M7T.js +74 -0
- package/dist/chunk-ZV2K6M7T.js.map +1 -0
- package/dist/{chunk-2764KZZQ.js → chunk-ZYKEILVK.js} +451 -1069
- package/dist/chunk-ZYKEILVK.js.map +1 -0
- package/dist/cli/main.js +107 -375
- package/dist/cli/main.js.map +1 -1
- package/dist/cli/yolo-daemon.js +18 -8
- package/dist/cli/yolo-daemon.js.map +1 -1
- package/dist/client-7XZHCMD3.js +28 -0
- package/dist/client-7XZHCMD3.js.map +1 -0
- package/dist/{goal-manager-AP4LTE6U.js → goal-manager-LMS6ZJB7.js} +7 -3
- package/dist/goal-manager-LMS6ZJB7.js.map +1 -0
- package/dist/goal-validator-T5HEYBC5.js +186 -0
- package/dist/goal-validator-T5HEYBC5.js.map +1 -0
- package/dist/graph-U5JWSAB5.js +10 -0
- package/dist/graph-U5JWSAB5.js.map +1 -0
- package/dist/guardian-agent-EXP7APLC.js +25 -0
- package/dist/guardian-agent-EXP7APLC.js.map +1 -0
- package/dist/hypothesis-KGC3P54C.js +19 -0
- package/dist/hypothesis-KGC3P54C.js.map +1 -0
- package/dist/incident-index-PNIVT47T.js +11 -0
- package/dist/incident-index-PNIVT47T.js.map +1 -0
- package/dist/index.js +369 -43
- package/dist/index.js.map +1 -1
- package/dist/ledger-SR6OEBLO.js +15 -0
- package/dist/ledger-SR6OEBLO.js.map +1 -0
- package/dist/output-manager-BOTMXSND.js +13 -0
- package/dist/output-manager-BOTMXSND.js.map +1 -0
- package/dist/pattern-discovery-F7LU5K6E.js +8 -0
- package/dist/pattern-discovery-F7LU5K6E.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-2764KZZQ.js.map +0 -1
- package/dist/chunk-33WL3D7A.js.map +0 -1
- package/dist/chunk-6JPPYG7F.js +0 -1813
- package/dist/chunk-6JPPYG7F.js.map +0 -1
- package/dist/chunk-6QR6QZIX.js.map +0 -1
- package/dist/chunk-QYOACM2C.js.map +0 -1
- package/dist/chunk-SDS3UVFY.js.map +0 -1
- package/dist/guardian-agent-XEYNG7RH.js +0 -18
- /package/dist/{goal-manager-AP4LTE6U.js.map → auto-fix-apply-PCAHWLXF.js.map} +0 -0
- /package/dist/{guardian-agent-XEYNG7RH.js.map → autonomy-config-JXB7WCZ2.js.map} +0 -0
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import {
|
|
2
|
+
atomicWriteJSON
|
|
3
|
+
} from "./chunk-43X6JBEM.js";
|
|
4
|
+
import {
|
|
5
|
+
getTrieDirectory,
|
|
6
|
+
getWorkingDirectory
|
|
7
|
+
} from "./chunk-R4AAPFXC.js";
|
|
8
|
+
|
|
9
|
+
// src/memory/ledger.ts
|
|
10
|
+
import { createHash } from "crypto";
|
|
11
|
+
import { mkdir, readFile } from "fs/promises";
|
|
12
|
+
import { existsSync } from "fs";
|
|
13
|
+
import { join } from "path";
|
|
14
|
+
var LEDGER_FILENAME = "ledger.json";
|
|
15
|
+
var GENESIS_HASH = "0".repeat(64);
|
|
16
|
+
var LEDGER_VERSION = 1;
|
|
17
|
+
async function appendIssuesToLedger(issues, workDir) {
|
|
18
|
+
if (issues.length === 0) return null;
|
|
19
|
+
const projectDir = workDir || getWorkingDirectory(void 0, true);
|
|
20
|
+
const memoryDir = join(getTrieDirectory(projectDir), "memory");
|
|
21
|
+
await mkdir(memoryDir, { recursive: true });
|
|
22
|
+
const blocks = await loadLedger(projectDir);
|
|
23
|
+
const today = (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
24
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
25
|
+
const entries = issues.map((issue) => ({
|
|
26
|
+
id: issue.id,
|
|
27
|
+
hash: issue.hash,
|
|
28
|
+
severity: issue.severity,
|
|
29
|
+
file: issue.file,
|
|
30
|
+
agent: issue.agent,
|
|
31
|
+
timestamp: issue.timestamp
|
|
32
|
+
}));
|
|
33
|
+
const previousBlock = blocks[blocks.length - 1];
|
|
34
|
+
const block = previousBlock && previousBlock.date === today ? previousBlock : createBlock(today, now, previousBlock?.blockHash ?? GENESIS_HASH);
|
|
35
|
+
if (block !== previousBlock) {
|
|
36
|
+
blocks.push(block);
|
|
37
|
+
}
|
|
38
|
+
block.entries = [...block.entries, ...entries];
|
|
39
|
+
block.merkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
|
|
40
|
+
block.blockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
|
|
41
|
+
block.updatedAt = now;
|
|
42
|
+
await saveLedger(blocks, projectDir);
|
|
43
|
+
return block;
|
|
44
|
+
}
|
|
45
|
+
async function verifyLedger(workDir) {
|
|
46
|
+
const projectDir = workDir || getWorkingDirectory(void 0, true);
|
|
47
|
+
const blocks = await loadLedger(projectDir);
|
|
48
|
+
if (blocks.length === 0) {
|
|
49
|
+
return { valid: true };
|
|
50
|
+
}
|
|
51
|
+
for (let i = 0; i < blocks.length; i += 1) {
|
|
52
|
+
const block = blocks[i];
|
|
53
|
+
if (!block) {
|
|
54
|
+
return { valid: false, error: `Block ${i} missing` };
|
|
55
|
+
}
|
|
56
|
+
const expectedPreviousHash = i === 0 ? GENESIS_HASH : blocks[i - 1]?.blockHash;
|
|
57
|
+
if (!expectedPreviousHash) {
|
|
58
|
+
return { valid: false, error: `Block ${i} missing previous block` };
|
|
59
|
+
}
|
|
60
|
+
if (block.previousHash !== expectedPreviousHash) {
|
|
61
|
+
return { valid: false, error: `Block ${i} previous hash mismatch` };
|
|
62
|
+
}
|
|
63
|
+
const computedMerkleRoot = computeMerkleRoot(block.entries.map((entry) => entry.hash));
|
|
64
|
+
if (block.merkleRoot !== computedMerkleRoot) {
|
|
65
|
+
return { valid: false, error: `Block ${i} merkle root mismatch` };
|
|
66
|
+
}
|
|
67
|
+
const computedBlockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);
|
|
68
|
+
if (block.blockHash !== computedBlockHash) {
|
|
69
|
+
return { valid: false, error: `Block ${i} hash mismatch` };
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return { valid: true };
|
|
73
|
+
}
|
|
74
|
+
function computeMerkleRoot(hashes) {
|
|
75
|
+
if (hashes.length === 0) {
|
|
76
|
+
return sha256("");
|
|
77
|
+
}
|
|
78
|
+
let level = hashes.slice();
|
|
79
|
+
while (level.length > 1) {
|
|
80
|
+
const nextLevel = [];
|
|
81
|
+
for (let i = 0; i < level.length; i += 2) {
|
|
82
|
+
const left = level[i];
|
|
83
|
+
const right = level[i + 1] ?? left;
|
|
84
|
+
nextLevel.push(sha256(`${left}:${right}`));
|
|
85
|
+
}
|
|
86
|
+
level = nextLevel;
|
|
87
|
+
}
|
|
88
|
+
return level[0];
|
|
89
|
+
}
|
|
90
|
+
function computeBlockHash(previousHash, merkleRoot, date, version) {
|
|
91
|
+
return sha256(`${version}:${date}:${previousHash}:${merkleRoot}`);
|
|
92
|
+
}
|
|
93
|
+
function createBlock(date, now, previousHash) {
|
|
94
|
+
return {
|
|
95
|
+
version: LEDGER_VERSION,
|
|
96
|
+
date,
|
|
97
|
+
entries: [],
|
|
98
|
+
previousHash,
|
|
99
|
+
merkleRoot: "",
|
|
100
|
+
blockHash: "",
|
|
101
|
+
createdAt: now,
|
|
102
|
+
updatedAt: now
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
async function loadLedger(projectDir) {
|
|
106
|
+
const ledgerPath = join(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
|
|
107
|
+
try {
|
|
108
|
+
if (!existsSync(ledgerPath)) return [];
|
|
109
|
+
const content = await readFile(ledgerPath, "utf-8");
|
|
110
|
+
const parsed = JSON.parse(content);
|
|
111
|
+
if (!Array.isArray(parsed)) return [];
|
|
112
|
+
return parsed;
|
|
113
|
+
} catch {
|
|
114
|
+
return [];
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
async function saveLedger(blocks, projectDir) {
|
|
118
|
+
const ledgerPath = join(getTrieDirectory(projectDir), "memory", LEDGER_FILENAME);
|
|
119
|
+
await atomicWriteJSON(ledgerPath, blocks);
|
|
120
|
+
}
|
|
121
|
+
function sha256(input) {
|
|
122
|
+
return createHash("sha256").update(input).digest("hex");
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
export {
|
|
126
|
+
appendIssuesToLedger,
|
|
127
|
+
verifyLedger,
|
|
128
|
+
computeMerkleRoot
|
|
129
|
+
};
|
|
130
|
+
//# sourceMappingURL=chunk-YR4BMGYO.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/memory/ledger.ts"],"sourcesContent":["import { createHash } from 'crypto';\nimport { mkdir, readFile } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { atomicWriteJSON } from '../utils/atomic-write.js';\nimport { getWorkingDirectory, getTrieDirectory } from '../utils/workspace.js';\nimport type { StoredIssue } from './issue-store.js';\n\nconst LEDGER_FILENAME = 'ledger.json';\nconst GENESIS_HASH = '0'.repeat(64);\nconst LEDGER_VERSION = 1;\n\nexport interface LedgerEntry {\n id: string;\n hash: string;\n severity: string;\n file: string;\n agent: string;\n timestamp: string;\n}\n\nexport interface LedgerBlock {\n version: number;\n date: string;\n entries: LedgerEntry[];\n previousHash: string;\n merkleRoot: string;\n blockHash: string;\n createdAt: string;\n updatedAt: string;\n}\n\nexport interface LedgerVerificationResult {\n valid: boolean;\n error?: string;\n}\n\nexport async function appendIssuesToLedger(\n issues: StoredIssue[],\n workDir?: string\n): Promise<LedgerBlock | null> {\n if (issues.length === 0) return null;\n\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n await mkdir(memoryDir, { recursive: true });\n\n const blocks = await loadLedger(projectDir);\n const today = new Date().toISOString().slice(0, 10);\n const now = new Date().toISOString();\n\n const entries: LedgerEntry[] = issues.map(issue => ({\n id: issue.id,\n hash: issue.hash,\n severity: issue.severity,\n file: issue.file,\n agent: issue.agent,\n timestamp: issue.timestamp,\n }));\n\n const previousBlock = blocks[blocks.length - 1];\n const block = previousBlock && previousBlock.date === today\n ? previousBlock\n : createBlock(today, now, previousBlock?.blockHash ?? GENESIS_HASH);\n\n if (block !== previousBlock) {\n blocks.push(block);\n }\n\n block.entries = [...block.entries, ...entries];\n block.merkleRoot = computeMerkleRoot(block.entries.map(entry => entry.hash));\n block.blockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);\n block.updatedAt = now;\n\n await saveLedger(blocks, projectDir);\n return block;\n}\n\nexport async function verifyLedger(workDir?: string): Promise<LedgerVerificationResult> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const blocks = await loadLedger(projectDir);\n\n if (blocks.length === 0) {\n return { valid: true };\n }\n\n for (let i = 0; i < blocks.length; i += 1) {\n const block = blocks[i];\n if (!block) {\n return { valid: false, error: `Block ${i} missing` };\n }\n const expectedPreviousHash = i === 0\n ? GENESIS_HASH\n : blocks[i - 1]?.blockHash;\n\n if (!expectedPreviousHash) {\n return { valid: false, error: `Block ${i} missing previous block` };\n }\n\n if (block.previousHash !== expectedPreviousHash) {\n return { valid: false, error: `Block ${i} previous hash mismatch` };\n }\n\n const computedMerkleRoot = computeMerkleRoot(block.entries.map(entry => entry.hash));\n if (block.merkleRoot !== computedMerkleRoot) {\n return { valid: false, error: `Block ${i} merkle root mismatch` };\n }\n\n const computedBlockHash = computeBlockHash(block.previousHash, block.merkleRoot, block.date, block.version);\n if (block.blockHash !== computedBlockHash) {\n return { valid: false, error: `Block ${i} hash mismatch` };\n }\n }\n\n return { valid: true };\n}\n\nexport function computeMerkleRoot(hashes: string[]): string {\n if (hashes.length === 0) {\n return sha256('');\n }\n\n let level = hashes.slice();\n while (level.length > 1) {\n const nextLevel: string[] = [];\n for (let i = 0; i < level.length; i += 2) {\n const left = level[i];\n const right = level[i + 1] ?? left;\n nextLevel.push(sha256(`${left}:${right}`));\n }\n level = nextLevel;\n }\n\n return level[0]!;\n}\n\nfunction computeBlockHash(previousHash: string, merkleRoot: string, date: string, version: number): string {\n return sha256(`${version}:${date}:${previousHash}:${merkleRoot}`);\n}\n\nfunction createBlock(date: string, now: string, previousHash: string): LedgerBlock {\n return {\n version: LEDGER_VERSION,\n date,\n entries: [],\n previousHash,\n merkleRoot: '',\n blockHash: '',\n createdAt: now,\n updatedAt: now,\n };\n}\n\nasync function loadLedger(projectDir: string): Promise<LedgerBlock[]> {\n const ledgerPath = join(getTrieDirectory(projectDir), 'memory', LEDGER_FILENAME);\n try {\n if (!existsSync(ledgerPath)) return [];\n const content = await readFile(ledgerPath, 'utf-8');\n const parsed = JSON.parse(content);\n if (!Array.isArray(parsed)) return [];\n return parsed as LedgerBlock[];\n } catch {\n return [];\n }\n}\n\nasync function saveLedger(blocks: LedgerBlock[], projectDir: string): Promise<void> {\n const ledgerPath = join(getTrieDirectory(projectDir), 'memory', LEDGER_FILENAME);\n await atomicWriteJSON(ledgerPath, blocks);\n}\n\nfunction sha256(input: string): string {\n return createHash('sha256').update(input).digest('hex');\n}\n"],"mappings":";;;;;;;;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,OAAO,gBAAgB;AAChC,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AAKrB,IAAM,kBAAkB;AACxB,IAAM,eAAe,IAAI,OAAO,EAAE;AAClC,IAAM,iBAAiB;AA2BvB,eAAsB,qBACpB,QACA,SAC6B;AAC7B,MAAI,OAAO,WAAW,EAAG,QAAO;AAEhC,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAY,KAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,SAAS,MAAM,WAAW,UAAU;AAC1C,QAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE;AAClD,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,QAAM,UAAyB,OAAO,IAAI,YAAU;AAAA,IAClD,IAAI,MAAM;AAAA,IACV,MAAM,MAAM;AAAA,IACZ,UAAU,MAAM;AAAA,IAChB,MAAM,MAAM;AAAA,IACZ,OAAO,MAAM;AAAA,IACb,WAAW,MAAM;AAAA,EACnB,EAAE;AAEF,QAAM,gBAAgB,OAAO,OAAO,SAAS,CAAC;AAC9C,QAAM,QAAQ,iBAAiB,cAAc,SAAS,QAClD,gBACA,YAAY,OAAO,KAAK,eAAe,aAAa,YAAY;AAEpE,MAAI,UAAU,eAAe;AAC3B,WAAO,KAAK,KAAK;AAAA,EACnB;AAEA,QAAM,UAAU,CAAC,GAAG,MAAM,SAAS,GAAG,OAAO;AAC7C,QAAM,aAAa,kBAAkB,MAAM,QAAQ,IAAI,WAAS,MAAM,IAAI,CAAC;AAC3E,QAAM,YAAY,iBAAiB,MAAM,cAAc,MAAM,YAAY,MAAM,MAAM,MAAM,OAAO;AAClG,QAAM,YAAY;AAElB,QAAM,WAAW,QAAQ,UAAU;AACnC,SAAO;AACT;AAEA,eAAsB,aAAa,SAAqD;AACtF,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,SAAS,MAAM,WAAW,UAAU;AAE1C,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO,EAAE,OAAO,KAAK;AAAA,EACvB;AAEA,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK,GAAG;AACzC,UAAM,QAAQ,OAAO,CAAC;AACtB,QAAI,CAAC,OAAO;AACV,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,CAAC,WAAW;AAAA,IACrD;AACA,UAAM,uBAAuB,MAAM,IAC/B,eACA,OAAO,IAAI,CAAC,GAAG;AAEnB,QAAI,CAAC,sBAAsB;AACzB,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,CAAC,0BAA0B;AAAA,IACpE;AAEA,QAAI,MAAM,iBAAiB,sBAAsB;AAC/C,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,CAAC,0BAA0B;AAAA,IACpE;AAEA,UAAM,qBAAqB,kBAAkB,MAAM,QAAQ,IAAI,WAAS,MAAM,IAAI,CAAC;AACnF,QAAI,MAAM,eAAe,oBAAoB;AAC3C,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,CAAC,wBAAwB;AAAA,IAClE;AAEA,UAAM,oBAAoB,iBAAiB,MAAM,cAAc,MAAM,YAAY,MAAM,MAAM,MAAM,OAAO;AAC1G,QAAI,MAAM,cAAc,mBAAmB;AACzC,aAAO,EAAE,OAAO,OAAO,OAAO,SAAS,CAAC,iBAAiB;AAAA,IAC3D;AAAA,EACF;AAEA,SAAO,EAAE,OAAO,KAAK;AACvB;AAEO,SAAS,kBAAkB,QAA0B;AAC1D,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO,OAAO,EAAE;AAAA,EAClB;AAEA,MAAI,QAAQ,OAAO,MAAM;AACzB,SAAO,MAAM,SAAS,GAAG;AACvB,UAAM,YAAsB,CAAC;AAC7B,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,GAAG;AACxC,YAAM,OAAO,MAAM,CAAC;AACpB,YAAM,QAAQ,MAAM,IAAI,CAAC,KAAK;AAC9B,gBAAU,KAAK,OAAO,GAAG,IAAI,IAAI,KAAK,EAAE,CAAC;AAAA,IAC3C;AACA,YAAQ;AAAA,EACV;AAEA,SAAO,MAAM,CAAC;AAChB;AAEA,SAAS,iBAAiB,cAAsB,YAAoB,MAAc,SAAyB;AACzG,SAAO,OAAO,GAAG,OAAO,IAAI,IAAI,IAAI,YAAY,IAAI,UAAU,EAAE;AAClE;AAEA,SAAS,YAAY,MAAc,KAAa,cAAmC;AACjF,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA,SAAS,CAAC;AAAA,IACV;AAAA,IACA,YAAY;AAAA,IACZ,WAAW;AAAA,IACX,WAAW;AAAA,IACX,WAAW;AAAA,EACb;AACF;AAEA,eAAe,WAAW,YAA4C;AACpE,QAAM,aAAa,KAAK,iBAAiB,UAAU,GAAG,UAAU,eAAe;AAC/E,MAAI;AACF,QAAI,CAAC,WAAW,UAAU,EAAG,QAAO,CAAC;AACrC,UAAM,UAAU,MAAM,SAAS,YAAY,OAAO;AAClD,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,QAAI,CAAC,MAAM,QAAQ,MAAM,EAAG,QAAO,CAAC;AACpC,WAAO;AAAA,EACT,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAEA,eAAe,WAAW,QAAuB,YAAmC;AAClF,QAAM,aAAa,KAAK,iBAAiB,UAAU,GAAG,UAAU,eAAe;AAC/E,QAAM,gBAAgB,YAAY,MAAM;AAC1C;AAEA,SAAS,OAAO,OAAuB;AACrC,SAAO,WAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK;AACxD;","names":[]}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
// src/agent/pattern-discovery.ts
|
|
2
|
+
var TriePatternDiscovery = class {
|
|
3
|
+
constructor(graph, incidentIndex) {
|
|
4
|
+
this.graph = graph;
|
|
5
|
+
this.incidentIndex = incidentIndex;
|
|
6
|
+
}
|
|
7
|
+
discoverHotPatterns(threshold = 3) {
|
|
8
|
+
const trie = this.incidentIndex.getFileTrie();
|
|
9
|
+
const hotZones = trie.getHotZones(threshold);
|
|
10
|
+
return hotZones.map((zone) => ({
|
|
11
|
+
type: zone.path.endsWith("/") ? "directory" : "file",
|
|
12
|
+
path: zone.path,
|
|
13
|
+
incidentCount: zone.incidentCount,
|
|
14
|
+
confidence: zone.confidence,
|
|
15
|
+
relatedFiles: trie.getDirectoryIncidents(zone.path).map((i) => i.file)
|
|
16
|
+
}));
|
|
17
|
+
}
|
|
18
|
+
async discoverCoOccurrences(minCount = 3) {
|
|
19
|
+
const incidents = await this.getAllIncidents();
|
|
20
|
+
const coOccurrences = /* @__PURE__ */ new Map();
|
|
21
|
+
for (const inc of incidents) {
|
|
22
|
+
const files = await this.getFilesForIncident(inc);
|
|
23
|
+
for (let i = 0; i < files.length; i++) {
|
|
24
|
+
for (let j = i + 1; j < files.length; j++) {
|
|
25
|
+
const a = files[i];
|
|
26
|
+
const b = files[j];
|
|
27
|
+
if (!coOccurrences.has(a)) coOccurrences.set(a, /* @__PURE__ */ new Map());
|
|
28
|
+
const counts = coOccurrences.get(a);
|
|
29
|
+
counts.set(b, (counts.get(b) || 0) + 1);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
const patterns = [];
|
|
34
|
+
for (const [a, map] of coOccurrences.entries()) {
|
|
35
|
+
for (const [b, count] of map.entries()) {
|
|
36
|
+
if (count >= minCount) {
|
|
37
|
+
const denom = Math.min(
|
|
38
|
+
this.incidentIndex.getFileTrie().getIncidents(a).length || 1,
|
|
39
|
+
this.incidentIndex.getFileTrie().getIncidents(b).length || 1
|
|
40
|
+
);
|
|
41
|
+
patterns.push({
|
|
42
|
+
files: [a, b],
|
|
43
|
+
coOccurrences: count,
|
|
44
|
+
confidence: Math.min(1, count / denom)
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return patterns.sort((x, y) => y.confidence - x.confidence);
|
|
50
|
+
}
|
|
51
|
+
async getAllIncidents() {
|
|
52
|
+
const nodes = await this.graph.listNodes();
|
|
53
|
+
return nodes.filter((n) => n.type === "incident");
|
|
54
|
+
}
|
|
55
|
+
async getFilesForIncident(incident) {
|
|
56
|
+
const files = /* @__PURE__ */ new Set();
|
|
57
|
+
const edges = await this.graph.getEdges(incident.id, "both");
|
|
58
|
+
for (const edge of edges) {
|
|
59
|
+
if (edge.type === "causedBy" || edge.type === "leadTo") {
|
|
60
|
+
const changeId = edge.type === "causedBy" ? edge.to_id : edge.from_id;
|
|
61
|
+
const change = await this.graph.getNode("change", changeId);
|
|
62
|
+
if (change?.data?.files && Array.isArray(change.data.files)) {
|
|
63
|
+
change.data.files.forEach((f) => files.add(f));
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
return Array.from(files).map((f) => f.replace(/\\/g, "/"));
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
|
|
71
|
+
export {
|
|
72
|
+
TriePatternDiscovery
|
|
73
|
+
};
|
|
74
|
+
//# sourceMappingURL=chunk-ZV2K6M7T.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/agent/pattern-discovery.ts"],"sourcesContent":["import { IncidentIndex } from '../context/incident-index.js';\nimport type { ContextGraph } from '../context/graph.js';\nimport type { ChangeNode, IncidentNode } from '../context/nodes.js';\n\nexport interface HotPattern {\n type: 'file' | 'directory';\n path: string;\n incidentCount: number;\n confidence: number;\n relatedFiles?: string[];\n}\n\nexport interface CoOccurrencePattern {\n files: [string, string];\n coOccurrences: number;\n confidence: number;\n}\n\nexport class TriePatternDiscovery {\n constructor(\n private graph: ContextGraph,\n private incidentIndex: IncidentIndex\n ) {}\n\n discoverHotPatterns(threshold = 3): HotPattern[] {\n const trie = this.incidentIndex.getFileTrie();\n const hotZones = trie.getHotZones(threshold);\n\n return hotZones.map((zone) => ({\n type: zone.path.endsWith('/') ? 'directory' : 'file',\n path: zone.path,\n incidentCount: zone.incidentCount,\n confidence: zone.confidence,\n relatedFiles: trie.getDirectoryIncidents(zone.path).map((i) => i.file)\n }));\n }\n\n async discoverCoOccurrences(minCount = 3): Promise<CoOccurrencePattern[]> {\n const incidents = await this.getAllIncidents();\n const coOccurrences: Map<string, Map<string, number>> = new Map();\n\n for (const inc of incidents) {\n const files = await this.getFilesForIncident(inc);\n for (let i = 0; i < files.length; i++) {\n for (let j = i + 1; j < files.length; j++) {\n const a = files[i]!;\n const b = files[j]!;\n if (!coOccurrences.has(a)) coOccurrences.set(a, new Map());\n const counts = coOccurrences.get(a)!;\n counts.set(b, (counts.get(b) || 0) + 1);\n }\n }\n }\n\n const patterns: CoOccurrencePattern[] = [];\n for (const [a, map] of coOccurrences.entries()) {\n for (const [b, count] of map.entries()) {\n if (count >= minCount) {\n const denom = Math.min(\n this.incidentIndex.getFileTrie().getIncidents(a).length || 1,\n this.incidentIndex.getFileTrie().getIncidents(b).length || 1\n );\n patterns.push({\n files: [a, b],\n coOccurrences: count,\n confidence: Math.min(1, count / denom)\n });\n }\n }\n }\n\n return patterns.sort((x, y) => y.confidence - x.confidence);\n }\n\n private async getAllIncidents(): Promise<IncidentNode[]> {\n const nodes = await this.graph.listNodes();\n return nodes.filter((n) => n.type === 'incident') as IncidentNode[];\n }\n\n private async getFilesForIncident(incident: IncidentNode): Promise<string[]> {\n const files = new Set<string>();\n const edges = await this.graph.getEdges(incident.id, 'both');\n\n for (const edge of edges) {\n if (edge.type === 'causedBy' || edge.type === 'leadTo') {\n const changeId = edge.type === 'causedBy' ? edge.to_id : edge.from_id;\n const change = await this.graph.getNode('change', changeId) as ChangeNode | null;\n if (change?.data?.files && Array.isArray(change.data.files)) {\n change.data.files.forEach((f: string) => files.add(f));\n }\n }\n }\n\n return Array.from(files).map((f) => f.replace(/\\\\/g, '/'));\n }\n}\n"],"mappings":";AAkBO,IAAM,uBAAN,MAA2B;AAAA,EAChC,YACU,OACA,eACR;AAFQ;AACA;AAAA,EACP;AAAA,EAEH,oBAAoB,YAAY,GAAiB;AAC/C,UAAM,OAAO,KAAK,cAAc,YAAY;AAC5C,UAAM,WAAW,KAAK,YAAY,SAAS;AAE3C,WAAO,SAAS,IAAI,CAAC,UAAU;AAAA,MAC7B,MAAM,KAAK,KAAK,SAAS,GAAG,IAAI,cAAc;AAAA,MAC9C,MAAM,KAAK;AAAA,MACX,eAAe,KAAK;AAAA,MACpB,YAAY,KAAK;AAAA,MACjB,cAAc,KAAK,sBAAsB,KAAK,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,IACvE,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,sBAAsB,WAAW,GAAmC;AACxE,UAAM,YAAY,MAAM,KAAK,gBAAgB;AAC7C,UAAM,gBAAkD,oBAAI,IAAI;AAEhE,eAAW,OAAO,WAAW;AAC3B,YAAM,QAAQ,MAAM,KAAK,oBAAoB,GAAG;AAChD,eAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,iBAAS,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,gBAAM,IAAI,MAAM,CAAC;AACjB,gBAAM,IAAI,MAAM,CAAC;AACjB,cAAI,CAAC,cAAc,IAAI,CAAC,EAAG,eAAc,IAAI,GAAG,oBAAI,IAAI,CAAC;AACzD,gBAAM,SAAS,cAAc,IAAI,CAAC;AAClC,iBAAO,IAAI,IAAI,OAAO,IAAI,CAAC,KAAK,KAAK,CAAC;AAAA,QACxC;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAkC,CAAC;AACzC,eAAW,CAAC,GAAG,GAAG,KAAK,cAAc,QAAQ,GAAG;AAC9C,iBAAW,CAAC,GAAG,KAAK,KAAK,IAAI,QAAQ,GAAG;AACtC,YAAI,SAAS,UAAU;AACrB,gBAAM,QAAQ,KAAK;AAAA,YACjB,KAAK,cAAc,YAAY,EAAE,aAAa,CAAC,EAAE,UAAU;AAAA,YAC3D,KAAK,cAAc,YAAY,EAAE,aAAa,CAAC,EAAE,UAAU;AAAA,UAC7D;AACA,mBAAS,KAAK;AAAA,YACZ,OAAO,CAAC,GAAG,CAAC;AAAA,YACZ,eAAe;AAAA,YACf,YAAY,KAAK,IAAI,GAAG,QAAQ,KAAK;AAAA,UACvC,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF;AAEA,WAAO,SAAS,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAAA,EAC5D;AAAA,EAEA,MAAc,kBAA2C;AACvD,UAAM,QAAQ,MAAM,KAAK,MAAM,UAAU;AACzC,WAAO,MAAM,OAAO,CAAC,MAAM,EAAE,SAAS,UAAU;AAAA,EAClD;AAAA,EAEA,MAAc,oBAAoB,UAA2C;AAC3E,UAAM,QAAQ,oBAAI,IAAY;AAC9B,UAAM,QAAQ,MAAM,KAAK,MAAM,SAAS,SAAS,IAAI,MAAM;AAE3D,eAAW,QAAQ,OAAO;AACxB,UAAI,KAAK,SAAS,cAAc,KAAK,SAAS,UAAU;AACtD,cAAM,WAAW,KAAK,SAAS,aAAa,KAAK,QAAQ,KAAK;AAC9D,cAAM,SAAS,MAAM,KAAK,MAAM,QAAQ,UAAU,QAAQ;AAC1D,YAAI,QAAQ,MAAM,SAAS,MAAM,QAAQ,OAAO,KAAK,KAAK,GAAG;AAC3D,iBAAO,KAAK,MAAM,QAAQ,CAAC,MAAc,MAAM,IAAI,CAAC,CAAC;AAAA,QACvD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,KAAK,EAAE,IAAI,CAAC,MAAM,EAAE,QAAQ,OAAO,GAAG,CAAC;AAAA,EAC3D;AACF;","names":[]}
|