@triedotdev/mcp 1.0.94 → 1.0.99

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. package/README.md +145 -137
  2. package/dist/{chunk-JAAIHNOE.js → chunk-APMV77PU.js} +21 -6
  3. package/dist/chunk-APMV77PU.js.map +1 -0
  4. package/dist/{chunk-HLSBTOVE.js → chunk-B3MNN3XB.js} +13 -18
  5. package/dist/{chunk-HLSBTOVE.js.map → chunk-B3MNN3XB.js.map} +1 -1
  6. package/dist/{chunk-IIF5XDCJ.js → chunk-DIZFGLXE.js} +787 -4696
  7. package/dist/chunk-DIZFGLXE.js.map +1 -0
  8. package/dist/{chunk-JO6RVXS6.js → chunk-F4NJ4CBP.js} +2 -2
  9. package/dist/{chunk-AZRCKBGF.js → chunk-FNCCZ3XB.js} +1222 -75
  10. package/dist/chunk-FNCCZ3XB.js.map +1 -0
  11. package/dist/chunk-G76DYVGX.js +136 -0
  12. package/dist/chunk-G76DYVGX.js.map +1 -0
  13. package/dist/chunk-HSNE46VE.js +956 -0
  14. package/dist/chunk-HSNE46VE.js.map +1 -0
  15. package/dist/{chunk-STEFLYPR.js → chunk-IXO4G4D3.js} +2 -2
  16. package/dist/{chunk-OEYIOOYB.js → chunk-JDHR5BDR.js} +2 -3
  17. package/dist/chunk-NIASHOAB.js +1304 -0
  18. package/dist/chunk-NIASHOAB.js.map +1 -0
  19. package/dist/{chunk-CKM6A3G6.js → chunk-OVRG5RP3.js} +6 -7
  20. package/dist/chunk-OVRG5RP3.js.map +1 -0
  21. package/dist/{chunk-RYRVEO2B.js → chunk-R3I2GCZC.js} +3 -3
  22. package/dist/{chunk-WT3XQCG2.js → chunk-R4AAPFXC.js} +2 -2
  23. package/dist/cli/create-agent.js +931 -7
  24. package/dist/cli/create-agent.js.map +1 -1
  25. package/dist/cli/main.js +151 -383
  26. package/dist/cli/main.js.map +1 -1
  27. package/dist/cli/yolo-daemon.js +13 -20
  28. package/dist/cli/yolo-daemon.js.map +1 -1
  29. package/dist/{goal-manager-HOZ7R2QV.js → goal-manager-LAOT4QQX.js} +6 -6
  30. package/dist/guardian-agent-M352CBE5.js +19 -0
  31. package/dist/index.js +1025 -1550
  32. package/dist/index.js.map +1 -1
  33. package/dist/{issue-store-DXIOP6AK.js → issue-store-W2X33X2X.js} +4 -4
  34. package/dist/{progress-LHI66U7B.js → progress-PQVEM7BR.js} +2 -2
  35. package/dist/{vibe-code-signatures-C5A4BHXD.js → vibe-code-signatures-ELEWJFGZ.js} +3 -3
  36. package/dist/{vulnerability-signatures-SVIHJQO5.js → vulnerability-signatures-EIJQX2TS.js} +3 -3
  37. package/dist/workers/agent-worker.js +2 -11
  38. package/dist/workers/agent-worker.js.map +1 -1
  39. package/package.json +2 -2
  40. package/dist/agent-smith-MYQ35URL.js +0 -14
  41. package/dist/agent-smith-runner-4TBONXCP.js +0 -573
  42. package/dist/agent-smith-runner-4TBONXCP.js.map +0 -1
  43. package/dist/cache-manager-RMPRPD5T.js +0 -10
  44. package/dist/chunk-AZRCKBGF.js.map +0 -1
  45. package/dist/chunk-CKM6A3G6.js.map +0 -1
  46. package/dist/chunk-E2ZATINO.js +0 -10879
  47. package/dist/chunk-E2ZATINO.js.map +0 -1
  48. package/dist/chunk-FFWNZUG2.js +0 -266
  49. package/dist/chunk-FFWNZUG2.js.map +0 -1
  50. package/dist/chunk-FK6DQKDY.js +0 -175
  51. package/dist/chunk-FK6DQKDY.js.map +0 -1
  52. package/dist/chunk-IFGF33R5.js +0 -279
  53. package/dist/chunk-IFGF33R5.js.map +0 -1
  54. package/dist/chunk-IIF5XDCJ.js.map +0 -1
  55. package/dist/chunk-JAAIHNOE.js.map +0 -1
  56. package/dist/chunk-ODWDESYP.js +0 -141
  57. package/dist/chunk-ODWDESYP.js.map +0 -1
  58. package/dist/chunk-OWBWNXSC.js +0 -955
  59. package/dist/chunk-OWBWNXSC.js.map +0 -1
  60. package/dist/chunk-Q764X2WD.js +0 -2124
  61. package/dist/chunk-Q764X2WD.js.map +0 -1
  62. package/dist/chunk-RE6ZWXJC.js +0 -279
  63. package/dist/chunk-RE6ZWXJC.js.map +0 -1
  64. package/dist/chunk-RNJ6JKMA.js +0 -2270
  65. package/dist/chunk-RNJ6JKMA.js.map +0 -1
  66. package/dist/chunk-Y62VM3ER.js +0 -536
  67. package/dist/chunk-Y62VM3ER.js.map +0 -1
  68. package/dist/git-45LZUUYA.js +0 -29
  69. package/dist/guardian-agent-RB2UQP5V.js +0 -21
  70. package/dist/progress-LHI66U7B.js.map +0 -1
  71. package/dist/vibe-code-signatures-C5A4BHXD.js.map +0 -1
  72. package/dist/vulnerability-signatures-SVIHJQO5.js.map +0 -1
  73. /package/dist/{chunk-JO6RVXS6.js.map → chunk-F4NJ4CBP.js.map} +0 -0
  74. /package/dist/{chunk-STEFLYPR.js.map → chunk-IXO4G4D3.js.map} +0 -0
  75. /package/dist/{chunk-OEYIOOYB.js.map → chunk-JDHR5BDR.js.map} +0 -0
  76. /package/dist/{chunk-RYRVEO2B.js.map → chunk-R3I2GCZC.js.map} +0 -0
  77. /package/dist/{chunk-WT3XQCG2.js.map → chunk-R4AAPFXC.js.map} +0 -0
  78. /package/dist/{agent-smith-MYQ35URL.js.map → goal-manager-LAOT4QQX.js.map} +0 -0
  79. /package/dist/{cache-manager-RMPRPD5T.js.map → guardian-agent-M352CBE5.js.map} +0 -0
  80. /package/dist/{git-45LZUUYA.js.map → issue-store-W2X33X2X.js.map} +0 -0
  81. /package/dist/{goal-manager-HOZ7R2QV.js.map → progress-PQVEM7BR.js.map} +0 -0
  82. /package/dist/{guardian-agent-RB2UQP5V.js.map → vibe-code-signatures-ELEWJFGZ.js.map} +0 -0
  83. /package/dist/{issue-store-DXIOP6AK.js.map → vulnerability-signatures-EIJQX2TS.js.map} +0 -0
@@ -1,14 +1,938 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
- buildAgentFromDocument
4
- } from "../chunk-OWBWNXSC.js";
5
- import "../chunk-WT3XQCG2.js";
6
- import "../chunk-JAAIHNOE.js";
3
+ getTrieDirectory,
4
+ getWorkingDirectory
5
+ } from "../chunk-R4AAPFXC.js";
6
+ import "../chunk-APMV77PU.js";
7
7
  import "../chunk-DGUM43GV.js";
8
8
 
9
+ // src/ingest/agent-builder.ts
10
+ import Anthropic2 from "@anthropic-ai/sdk";
11
+
12
+ // src/ingest/document-parser.ts
13
+ import { readFile } from "fs/promises";
14
+ import { extname, basename } from "path";
15
+ async function parseDocument(filePath) {
16
+ const ext = extname(filePath).toLowerCase();
17
+ const fileType = getFileType(ext);
18
+ if (!fileType) {
19
+ throw new Error(`Unsupported file type: ${ext}. Supported: .pdf, .txt, .md, .rtf`);
20
+ }
21
+ let rawText;
22
+ let metadata = {
23
+ fileType,
24
+ originalPath: filePath,
25
+ parsedAt: (/* @__PURE__ */ new Date()).toISOString()
26
+ };
27
+ switch (fileType) {
28
+ case "pdf":
29
+ const pdfResult = await parsePDF(filePath);
30
+ rawText = pdfResult.text;
31
+ metadata.pageCount = pdfResult.pageCount;
32
+ if (pdfResult.title !== void 0) {
33
+ metadata.title = pdfResult.title;
34
+ }
35
+ break;
36
+ case "txt":
37
+ rawText = await parseTXT(filePath);
38
+ break;
39
+ case "md":
40
+ rawText = await parseMarkdown(filePath);
41
+ break;
42
+ case "rtf":
43
+ rawText = await parseRTF(filePath);
44
+ break;
45
+ default:
46
+ throw new Error(`Unsupported file type: ${fileType}`);
47
+ }
48
+ metadata.wordCount = countWords(rawText);
49
+ const sections = extractSections(rawText, fileType);
50
+ if (!metadata.title) {
51
+ metadata.title = extractTitle(rawText, sections) || basename(filePath, ext);
52
+ }
53
+ return {
54
+ rawText,
55
+ metadata,
56
+ sections
57
+ };
58
+ }
59
+ function getFileType(ext) {
60
+ const typeMap = {
61
+ ".pdf": "pdf",
62
+ ".txt": "txt",
63
+ ".md": "md",
64
+ ".markdown": "md",
65
+ ".rtf": "rtf"
66
+ };
67
+ return typeMap[ext] || null;
68
+ }
69
+ async function parsePDF(filePath) {
70
+ try {
71
+ const pdfParse = (await import("pdf-parse")).default;
72
+ const dataBuffer = await readFile(filePath);
73
+ const data = await pdfParse(dataBuffer);
74
+ const result = {
75
+ text: data.text,
76
+ pageCount: data.numpages
77
+ };
78
+ if (data.info?.Title) {
79
+ result.title = data.info.Title;
80
+ }
81
+ return result;
82
+ } catch (error) {
83
+ if (error.code === "MODULE_NOT_FOUND") {
84
+ throw new Error(
85
+ "PDF parsing requires the pdf-parse package. Install it with: npm install pdf-parse"
86
+ );
87
+ }
88
+ throw error;
89
+ }
90
+ }
91
+ async function parseTXT(filePath) {
92
+ return await readFile(filePath, "utf-8");
93
+ }
94
+ async function parseMarkdown(filePath) {
95
+ const content = await readFile(filePath, "utf-8");
96
+ return content.replace(/^[-*_]{3,}$/gm, "").replace(/\n{3,}/g, "\n\n").trim();
97
+ }
98
+ async function parseRTF(filePath) {
99
+ const content = await readFile(filePath, "utf-8");
100
+ return stripRTF(content);
101
+ }
102
+ function stripRTF(rtf) {
103
+ let text = rtf.replace(/^{\\rtf1[^}]*}/i, "");
104
+ text = text.replace(/\\[a-z]+(-?\d+)?[ ]?/gi, "");
105
+ text = text.replace(/{[^{}]*}/g, "");
106
+ text = text.replace(/\\'([0-9a-f]{2})/gi, (_, hex) => String.fromCharCode(parseInt(hex, 16))).replace(/\\par\b/g, "\n").replace(/\\tab\b/g, " ").replace(/\\line\b/g, "\n").replace(/[{}\\]/g, "");
107
+ text = text.replace(/\r\n/g, "\n").replace(/\n{3,}/g, "\n\n").trim();
108
+ return text;
109
+ }
110
+ function countWords(text) {
111
+ return text.split(/\s+/).filter((word) => word.length > 0).length;
112
+ }
113
+ function extractSections(text, fileType) {
114
+ const sections = [];
115
+ if (fileType === "md") {
116
+ const headingRegex = /^(#{1,6})\s+(.+)$/gm;
117
+ let match;
118
+ while ((match = headingRegex.exec(text)) !== null) {
119
+ const level = match[1].length;
120
+ const title = match[2].trim();
121
+ const startIndex = match.index;
122
+ if (sections.length > 0) {
123
+ const lastSection = sections[sections.length - 1];
124
+ lastSection.endIndex = startIndex;
125
+ lastSection.content = text.slice(
126
+ lastSection.startIndex,
127
+ startIndex
128
+ ).trim();
129
+ }
130
+ sections.push({
131
+ title,
132
+ level,
133
+ startIndex,
134
+ endIndex: text.length,
135
+ content: ""
136
+ });
137
+ }
138
+ if (sections.length > 0) {
139
+ const lastSection = sections[sections.length - 1];
140
+ lastSection.content = text.slice(
141
+ lastSection.startIndex
142
+ ).trim();
143
+ }
144
+ } else {
145
+ const chapterPatterns = [
146
+ /^chapter\s+(\d+|[ivxlc]+)[:\.\s]+(.*)$/gim,
147
+ /^section\s+(\d+|[ivxlc]+)[:\.\s]+(.*)$/gim,
148
+ /^part\s+(\d+|[ivxlc]+)[:\.\s]+(.*)$/gim,
149
+ /^(\d+)\.\s+([A-Z][^.]+)$/gm,
150
+ /^([A-Z][A-Z\s]+)$/gm
151
+ // ALL CAPS headings
152
+ ];
153
+ for (const pattern of chapterPatterns) {
154
+ let match;
155
+ pattern.lastIndex = 0;
156
+ while ((match = pattern.exec(text)) !== null) {
157
+ const title = match[2] || match[1] || match[0];
158
+ sections.push({
159
+ title: title.trim(),
160
+ level: 1,
161
+ startIndex: match.index,
162
+ endIndex: text.length,
163
+ content: ""
164
+ });
165
+ }
166
+ if (sections.length > 0) break;
167
+ }
168
+ sections.sort((a, b) => a.startIndex - b.startIndex);
169
+ for (let i = 0; i < sections.length; i++) {
170
+ const section = sections[i];
171
+ const nextSection = sections[i + 1];
172
+ const endIndex = nextSection !== void 0 ? nextSection.startIndex : text.length;
173
+ section.endIndex = endIndex;
174
+ section.content = text.slice(section.startIndex, endIndex).trim();
175
+ }
176
+ }
177
+ if (sections.length === 0) {
178
+ sections.push({
179
+ title: "Document Content",
180
+ level: 1,
181
+ startIndex: 0,
182
+ endIndex: text.length,
183
+ content: text.trim()
184
+ });
185
+ }
186
+ return sections;
187
+ }
188
+ function extractTitle(text, sections) {
189
+ const firstSection = sections[0];
190
+ if (firstSection !== void 0 && firstSection.title !== "Document Content") {
191
+ return firstSection.title;
192
+ }
193
+ const firstLine = text.split("\n")[0]?.trim();
194
+ if (firstLine && firstLine.length < 100 && !firstLine.includes(".")) {
195
+ return firstLine;
196
+ }
197
+ return null;
198
+ }
199
+ function chunkDocument(result, maxChunkSize = 4e3) {
200
+ const chunks = [];
201
+ if (result.rawText.length <= maxChunkSize) {
202
+ return [result.rawText];
203
+ }
204
+ if (result.sections.length > 1) {
205
+ let currentChunk = "";
206
+ for (const section of result.sections) {
207
+ const sectionText = `## ${section.title}
208
+
209
+ ${section.content}
210
+
211
+ `;
212
+ if (currentChunk.length + sectionText.length > maxChunkSize) {
213
+ if (currentChunk) chunks.push(currentChunk.trim());
214
+ if (sectionText.length > maxChunkSize) {
215
+ chunks.push(...splitByParagraphs(sectionText, maxChunkSize));
216
+ currentChunk = "";
217
+ } else {
218
+ currentChunk = sectionText;
219
+ }
220
+ } else {
221
+ currentChunk += sectionText;
222
+ }
223
+ }
224
+ if (currentChunk) chunks.push(currentChunk.trim());
225
+ } else {
226
+ chunks.push(...splitByParagraphs(result.rawText, maxChunkSize));
227
+ }
228
+ return chunks;
229
+ }
230
+ function splitByParagraphs(text, maxSize) {
231
+ const chunks = [];
232
+ const paragraphs = text.split(/\n\s*\n/);
233
+ let currentChunk = "";
234
+ for (const para of paragraphs) {
235
+ if (currentChunk.length + para.length + 2 > maxSize) {
236
+ if (currentChunk) chunks.push(currentChunk.trim());
237
+ if (para.length > maxSize) {
238
+ chunks.push(...splitBySentences(para, maxSize));
239
+ currentChunk = "";
240
+ } else {
241
+ currentChunk = para;
242
+ }
243
+ } else {
244
+ currentChunk += (currentChunk ? "\n\n" : "") + para;
245
+ }
246
+ }
247
+ if (currentChunk) chunks.push(currentChunk.trim());
248
+ return chunks;
249
+ }
250
+ function splitBySentences(text, maxSize) {
251
+ const chunks = [];
252
+ const sentences = text.match(/[^.!?]+[.!?]+/g) || [text];
253
+ let currentChunk = "";
254
+ for (const sentence of sentences) {
255
+ if (currentChunk.length + sentence.length > maxSize) {
256
+ if (currentChunk) chunks.push(currentChunk.trim());
257
+ currentChunk = sentence;
258
+ } else {
259
+ currentChunk += sentence;
260
+ }
261
+ }
262
+ if (currentChunk) chunks.push(currentChunk.trim());
263
+ return chunks;
264
+ }
265
+
266
+ // src/ingest/knowledge-compressor.ts
267
+ import Anthropic from "@anthropic-ai/sdk";
268
+
269
+ // src/ingest/compression-prompts.ts
270
+ var COMPRESSION_PROMPTS = {
271
+ /**
272
+ * System prompt for the knowledge extractor
273
+ */
274
+ system: `You are an expert knowledge extraction system. Your job is to analyze documents and extract structured, actionable knowledge that can be used by a code review agent.
275
+
276
+ You must output valid JSON that matches the required schema exactly. Be thorough but concise - extract the essence of the knowledge without unnecessary verbosity.
277
+
278
+ Focus on:
279
+ 1. Core concepts that are fundamental to understand the material
280
+ 2. Best practices that should be followed
281
+ 3. Anti-patterns and mistakes to avoid
282
+ 4. Detection patterns that could identify issues in code
283
+ 5. Key terminology and definitions`,
284
+ /**
285
+ * Prompt for extracting knowledge from a chunk
286
+ */
287
+ extractChunk: `Analyze this document chunk and extract structured knowledge.
288
+
289
+ ## Document Chunk:
290
+ {{chunk}}
291
+
292
+ ## Instructions:
293
+ Extract the following from this chunk:
294
+
295
+ 1. **Core Concepts**: Key ideas, principles, or rules that are taught
296
+ 2. **Best Practices**: Recommended approaches or patterns
297
+ 3. **Anti-Patterns**: Things to avoid, common mistakes
298
+ 4. **Code Patterns**: Any code patterns or detection rules that could identify issues
299
+ 5. **Terminology**: Important terms and their definitions
300
+
301
+ Output as JSON:
302
+ {
303
+ "coreConcepts": [
304
+ {
305
+ "name": "string",
306
+ "description": "string",
307
+ "importance": "critical" | "important" | "supplementary",
308
+ "keywords": ["string"]
309
+ }
310
+ ],
311
+ "bestPractices": [
312
+ {
313
+ "name": "string",
314
+ "description": "string",
315
+ "rationale": "string",
316
+ "codeExample": "string or null"
317
+ }
318
+ ],
319
+ "antiPatterns": [
320
+ {
321
+ "name": "string",
322
+ "description": "string",
323
+ "whyBad": "string",
324
+ "betterAlternative": "string"
325
+ }
326
+ ],
327
+ "codePatterns": [
328
+ {
329
+ "name": "string",
330
+ "description": "string",
331
+ "type": "best-practice" | "anti-pattern" | "security" | "compliance",
332
+ "regexHint": "string (a regex pattern that might detect this, or null)",
333
+ "keywords": ["string"]
334
+ }
335
+ ],
336
+ "terminology": {
337
+ "term": "definition"
338
+ }
339
+ }
340
+
341
+ Only include items that are clearly present in the chunk. Quality over quantity.`,
342
+ /**
343
+ * Prompt for merging extracted knowledge
344
+ */
345
+ mergeKnowledge: `Merge and deduplicate these knowledge extractions into a cohesive summary.
346
+
347
+ ## Extractions to Merge:
348
+ {{extractions}}
349
+
350
+ ## Instructions:
351
+ 1. Combine similar concepts
352
+ 2. Remove duplicates
353
+ 3. Prioritize the most important items
354
+ 4. Ensure consistency in terminology
355
+ 5. Rank items by importance
356
+
357
+ Output a single merged JSON with the same structure, keeping only the most valuable and distinct items.
358
+ Limit to top 20 core concepts, 15 best practices, 15 anti-patterns, and 25 code patterns.`,
359
+ /**
360
+ * Prompt for generating detection rules
361
+ */
362
+ generateDetectionRules: `Based on this knowledge base, generate detection rules for a code review agent.
363
+
364
+ ## Knowledge Base:
365
+ {{knowledge}}
366
+
367
+ ## Document Context:
368
+ - Title: {{title}}
369
+ - Domain: {{domain}}
370
+ - Word Count: {{wordCount}}
371
+
372
+ ## Instructions:
373
+ For each anti-pattern and best practice, generate detection rules that could find violations in code.
374
+
375
+ For each rule, provide:
376
+ 1. A unique ID (format: {{prefix}}-XXX)
377
+ 2. Clear name and description
378
+ 3. Severity (critical, serious, moderate, low, info)
379
+ 4. Detection patterns:
380
+ - regex: Array of regex patterns (JavaScript-compatible)
381
+ - keywords: Words that might indicate this issue
382
+ - semantic: Natural language description for AI-based detection
383
+ 5. Fix information:
384
+ - description: How to fix
385
+ - example: Code example if applicable
386
+ - autoFixable: boolean
387
+
388
+ Output as JSON array of detection rules:
389
+ [
390
+ {
391
+ "id": "string",
392
+ "name": "string",
393
+ "description": "string",
394
+ "severity": "critical" | "serious" | "moderate" | "low" | "info",
395
+ "patterns": {
396
+ "regex": ["string"],
397
+ "keywords": ["string"],
398
+ "semantic": "string"
399
+ },
400
+ "fix": {
401
+ "description": "string",
402
+ "example": "string or null",
403
+ "autoFixable": boolean
404
+ },
405
+ "regulation": "string or null (for legal/compliance rules)",
406
+ "category": "string"
407
+ }
408
+ ]
409
+
410
+ Generate 15-30 detection rules prioritizing the most impactful issues.`,
411
+ /**
412
+ * Prompt for generating agent prompts
413
+ */
414
+ generateAgentPrompts: `Generate system and analysis prompts for a code review agent based on this knowledge.
415
+
416
+ ## Knowledge Base Summary:
417
+ {{summary}}
418
+
419
+ ## Core Concepts:
420
+ {{concepts}}
421
+
422
+ ## Detection Focus:
423
+ {{patterns}}
424
+
425
+ ## Agent Info:
426
+ - Name: {{agentName}}
427
+ - Category: {{category}}
428
+ - Domain: {{domain}}
429
+
430
+ ## Instructions:
431
+ Generate:
432
+ 1. A system prompt that gives the agent its persona and expertise
433
+ 2. An analysis prompt template for reviewing code
434
+ 3. A fix prompt template for suggesting fixes
435
+
436
+ The prompts should:
437
+ - Reference the specific knowledge from the document
438
+ - Be authoritative but helpful
439
+ - Include the key concepts and terminology
440
+ - Guide the agent to look for the specific patterns
441
+
442
+ Output as JSON:
443
+ {
444
+ "systemPrompt": "string",
445
+ "analysisPrompt": "string (use {{code}}, {{filePath}}, {{language}} as placeholders)",
446
+ "fixPrompt": "string (use {{issue}}, {{code}}, {{filePath}} as placeholders)"
447
+ }`,
448
+ /**
449
+ * Prompt for detecting document domain
450
+ */
451
+ detectDomain: `Analyze this document and determine its primary domain/category.
452
+
453
+ ## Document Title:
454
+ {{title}}
455
+
456
+ ## Sample Content:
457
+ {{sample}}
458
+
459
+ ## Instructions:
460
+ Determine the primary domain of this document. Choose ONE:
461
+ - "technical": Programming, frameworks, libraries, code patterns
462
+ - "legal": Laws, regulations, compliance (GDPR, HIPAA, etc.)
463
+ - "policy": Company policies, internal rules, guidelines
464
+ - "security": Security practices, vulnerability prevention
465
+ - "architecture": System design, patterns, architecture principles
466
+ - "general": General knowledge, doesn't fit other categories
467
+
468
+ Also determine:
469
+ 1. What types of code/files this knowledge applies to
470
+ 2. What context signals should trigger this agent
471
+ 3. Key content patterns to look for
472
+
473
+ Output as JSON:
474
+ {
475
+ "domain": "technical" | "legal" | "policy" | "security" | "architecture" | "general",
476
+ "filePatterns": ["*.ext", ...],
477
+ "contentPatterns": ["regex pattern", ...],
478
+ "contextSignals": ["touchesAuth", "touchesUI", ...],
479
+ "reasoning": "Brief explanation of why this domain was chosen"
480
+ }`,
481
+ /**
482
+ * Prompt for generating a summary
483
+ */
484
+ generateSummary: `Create a concise executive summary of this document for use in an AI agent's context.
485
+
486
+ ## Document:
487
+ {{content}}
488
+
489
+ ## Instructions:
490
+ Write a 2-3 paragraph summary that:
491
+ 1. Explains what the document covers
492
+ 2. Highlights the most important takeaways
493
+ 3. Describes how this knowledge applies to code review
494
+
495
+ The summary will be used as context for an AI code review agent, so focus on actionable insights.
496
+
497
+ Keep it under 500 words.`
498
+ };
499
+
500
+ // src/ingest/knowledge-compressor.ts
501
+ async function compressKnowledge(document, options) {
502
+ const { agentName, maxChunkSize = 4e3, verbose = false } = options;
503
+ const client = new Anthropic();
504
+ const log = verbose ? console.error.bind(console) : () => {
505
+ };
506
+ log("\u{1F4DA} Starting knowledge compression...");
507
+ log(" \u251C\u2500 Detecting document domain...");
508
+ const domainInfo = await detectDomain(client, document);
509
+ log(` \u2502 \u2514\u2500 Domain: ${domainInfo.domain}`);
510
+ const chunks = chunkDocument(document, maxChunkSize);
511
+ log(` \u251C\u2500 Document chunked into ${chunks.length} pieces`);
512
+ log(" \u251C\u2500 Extracting knowledge from chunks...");
513
+ const extractions = [];
514
+ for (let i = 0; i < chunks.length; i++) {
515
+ log(` \u2502 \u251C\u2500 Processing chunk ${i + 1}/${chunks.length}...`);
516
+ try {
517
+ const chunk = chunks[i];
518
+ if (chunk !== void 0) {
519
+ const extraction = await extractFromChunk(client, chunk);
520
+ extractions.push(extraction);
521
+ }
522
+ } catch (error) {
523
+ log(` \u2502 \u2502 \u2514\u2500 Warning: Failed to extract from chunk ${i + 1}`);
524
+ }
525
+ }
526
+ log(" \u251C\u2500 Merging and deduplicating knowledge...");
527
+ const mergedKnowledge = await mergeExtractions(client, extractions);
528
+ log(" \u251C\u2500 Generating detection rules...");
529
+ const detectionRules = await generateDetectionRules(
530
+ client,
531
+ mergedKnowledge,
532
+ document.metadata.title || agentName,
533
+ domainInfo.domain,
534
+ agentName
535
+ );
536
+ log(` \u2502 \u2514\u2500 Generated ${detectionRules.length} detection rules`);
537
+ log(" \u2514\u2500 Generating knowledge summary...");
538
+ const summary = await generateSummary(client, document.rawText.slice(0, 8e3));
539
+ const compressed = {
540
+ domain: domainInfo.domain,
541
+ summary,
542
+ coreConcepts: mergedKnowledge.coreConcepts,
543
+ bestPractices: mergedKnowledge.bestPractices,
544
+ antiPatterns: mergedKnowledge.antiPatterns,
545
+ detectionRules,
546
+ glossary: mergedKnowledge.terminology,
547
+ sourceDocument: {
548
+ title: document.metadata.title || agentName,
549
+ wordCount: document.metadata.wordCount,
550
+ compressionRatio: Math.round(document.metadata.wordCount / (summary.length + JSON.stringify(detectionRules).length / 5))
551
+ }
552
+ };
553
+ return compressed;
554
+ }
555
+ async function detectDomain(client, document) {
556
+ const sampleSize = Math.min(document.rawText.length, 3e3);
557
+ const sample = document.rawText.slice(0, sampleSize);
558
+ const prompt = COMPRESSION_PROMPTS.detectDomain.replace("{{title}}", document.metadata.title || "Unknown").replace("{{sample}}", sample);
559
+ const response = await client.messages.create({
560
+ model: "claude-sonnet-4-20250514",
561
+ max_tokens: 1e3,
562
+ system: COMPRESSION_PROMPTS.system,
563
+ messages: [{ role: "user", content: prompt }]
564
+ });
565
+ const firstContent = response.content[0];
566
+ const text = firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
567
+ try {
568
+ const jsonMatch = text.match(/\{[\s\S]*\}/);
569
+ if (jsonMatch) {
570
+ return JSON.parse(jsonMatch[0]);
571
+ }
572
+ } catch (e) {
573
+ }
574
+ return {
575
+ domain: "general",
576
+ filePatterns: ["*"],
577
+ contentPatterns: [],
578
+ contextSignals: [],
579
+ reasoning: "Could not determine domain, using general"
580
+ };
581
+ }
582
+ async function extractFromChunk(client, chunk) {
583
+ const prompt = COMPRESSION_PROMPTS.extractChunk.replace("{{chunk}}", chunk);
584
+ const response = await client.messages.create({
585
+ model: "claude-sonnet-4-20250514",
586
+ max_tokens: 4e3,
587
+ system: COMPRESSION_PROMPTS.system,
588
+ messages: [{ role: "user", content: prompt }]
589
+ });
590
+ const firstContent = response.content[0];
591
+ const text = firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
592
+ try {
593
+ const jsonMatch = text.match(/\{[\s\S]*\}/);
594
+ if (jsonMatch) {
595
+ const parsed = JSON.parse(jsonMatch[0]);
596
+ return {
597
+ coreConcepts: parsed.coreConcepts || [],
598
+ bestPractices: parsed.bestPractices || [],
599
+ antiPatterns: parsed.antiPatterns || [],
600
+ codePatterns: parsed.codePatterns || [],
601
+ terminology: parsed.terminology || {}
602
+ };
603
+ }
604
+ } catch (e) {
605
+ }
606
+ return {
607
+ coreConcepts: [],
608
+ bestPractices: [],
609
+ antiPatterns: [],
610
+ codePatterns: [],
611
+ terminology: {}
612
+ };
613
+ }
614
+ async function mergeExtractions(client, extractions) {
615
+ if (extractions.length <= 2) {
616
+ return combineExtractions(extractions);
617
+ }
618
+ const prompt = COMPRESSION_PROMPTS.mergeKnowledge.replace("{{extractions}}", JSON.stringify(extractions, null, 2));
619
+ const response = await client.messages.create({
620
+ model: "claude-sonnet-4-20250514",
621
+ max_tokens: 8e3,
622
+ system: COMPRESSION_PROMPTS.system,
623
+ messages: [{ role: "user", content: prompt }]
624
+ });
625
+ const firstContent = response.content[0];
626
+ const text = firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
627
+ try {
628
+ const jsonMatch = text.match(/\{[\s\S]*\}/);
629
+ if (jsonMatch) {
630
+ return JSON.parse(jsonMatch[0]);
631
+ }
632
+ } catch (e) {
633
+ }
634
+ return combineExtractions(extractions);
635
+ }
636
+ function combineExtractions(extractions) {
637
+ const combined = {
638
+ coreConcepts: [],
639
+ bestPractices: [],
640
+ antiPatterns: [],
641
+ codePatterns: [],
642
+ terminology: {}
643
+ };
644
+ const seenConcepts = /* @__PURE__ */ new Set();
645
+ const seenPractices = /* @__PURE__ */ new Set();
646
+ const seenAntiPatterns = /* @__PURE__ */ new Set();
647
+ const seenPatterns = /* @__PURE__ */ new Set();
648
+ for (const extraction of extractions) {
649
+ for (const concept of extraction.coreConcepts) {
650
+ const key = concept.name.toLowerCase();
651
+ if (!seenConcepts.has(key)) {
652
+ seenConcepts.add(key);
653
+ combined.coreConcepts.push(concept);
654
+ }
655
+ }
656
+ for (const practice of extraction.bestPractices) {
657
+ const key = practice.name.toLowerCase();
658
+ if (!seenPractices.has(key)) {
659
+ seenPractices.add(key);
660
+ combined.bestPractices.push(practice);
661
+ }
662
+ }
663
+ for (const anti of extraction.antiPatterns) {
664
+ const key = anti.name.toLowerCase();
665
+ if (!seenAntiPatterns.has(key)) {
666
+ seenAntiPatterns.add(key);
667
+ combined.antiPatterns.push(anti);
668
+ }
669
+ }
670
+ for (const pattern of extraction.codePatterns) {
671
+ const key = pattern.name.toLowerCase();
672
+ if (!seenPatterns.has(key)) {
673
+ seenPatterns.add(key);
674
+ combined.codePatterns.push(pattern);
675
+ }
676
+ }
677
+ Object.assign(combined.terminology, extraction.terminology);
678
+ }
679
+ return combined;
680
+ }
681
+ async function generateDetectionRules(client, knowledge, title, domain, agentName) {
682
+ const prefix = agentName.toUpperCase().replace(/[^A-Z]/g, "").slice(0, 4) || "CUST";
683
+ const prompt = COMPRESSION_PROMPTS.generateDetectionRules.replace("{{knowledge}}", JSON.stringify(knowledge, null, 2)).replace("{{title}}", title).replace("{{domain}}", domain).replace("{{wordCount}}", String(knowledge.coreConcepts.length * 100)).replace(/\{\{prefix\}\}/g, prefix);
684
+ const response = await client.messages.create({
685
+ model: "claude-sonnet-4-20250514",
686
+ max_tokens: 8e3,
687
+ system: COMPRESSION_PROMPTS.system,
688
+ messages: [{ role: "user", content: prompt }]
689
+ });
690
+ const firstContent = response.content[0];
691
+ const text = firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
692
+ try {
693
+ const jsonMatch = text.match(/\[[\s\S]*\]/);
694
+ if (jsonMatch) {
695
+ const rules = JSON.parse(jsonMatch[0]);
696
+ return rules.map((rule, i) => ({
697
+ id: rule.id || `${prefix}-${String(i + 1).padStart(3, "0")}`,
698
+ name: rule.name || "Unknown Rule",
699
+ description: rule.description || "",
700
+ severity: rule.severity || "moderate",
701
+ patterns: {
702
+ regex: rule.patterns?.regex || [],
703
+ keywords: rule.patterns?.keywords || [],
704
+ semantic: rule.patterns?.semantic || ""
705
+ },
706
+ fix: {
707
+ description: rule.fix?.description || "Review and fix manually",
708
+ example: rule.fix?.example || void 0,
709
+ autoFixable: rule.fix?.autoFixable || false
710
+ },
711
+ regulation: rule.regulation || void 0,
712
+ category: rule.category || domain
713
+ }));
714
+ }
715
+ } catch (e) {
716
+ console.error("Failed to parse detection rules:", e);
717
+ }
718
+ return [];
719
+ }
720
+ async function generateSummary(client, content) {
721
+ const prompt = COMPRESSION_PROMPTS.generateSummary.replace("{{content}}", content);
722
+ const response = await client.messages.create({
723
+ model: "claude-sonnet-4-20250514",
724
+ max_tokens: 1e3,
725
+ system: COMPRESSION_PROMPTS.system,
726
+ messages: [{ role: "user", content: prompt }]
727
+ });
728
+ const firstContent = response.content[0];
729
+ return firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
730
+ }
731
+ async function generateAgentPrompts(client, knowledge, agentName, category) {
732
+ const prompt = COMPRESSION_PROMPTS.generateAgentPrompts.replace("{{summary}}", knowledge.summary).replace("{{concepts}}", JSON.stringify(knowledge.coreConcepts.slice(0, 10), null, 2)).replace("{{patterns}}", JSON.stringify(knowledge.detectionRules.slice(0, 10), null, 2)).replace("{{agentName}}", agentName).replace("{{category}}", category).replace("{{domain}}", knowledge.domain);
733
+ const response = await client.messages.create({
734
+ model: "claude-sonnet-4-20250514",
735
+ max_tokens: 4e3,
736
+ system: COMPRESSION_PROMPTS.system,
737
+ messages: [{ role: "user", content: prompt }]
738
+ });
739
+ const firstContent = response.content[0];
740
+ const text = firstContent !== void 0 && firstContent.type === "text" ? firstContent.text : "";
741
+ try {
742
+ const jsonMatch = text.match(/\{[\s\S]*\}/);
743
+ if (jsonMatch) {
744
+ return JSON.parse(jsonMatch[0]);
745
+ }
746
+ } catch (e) {
747
+ }
748
+ return {
749
+ systemPrompt: `You are an expert code reviewer specializing in ${category}. Review code based on best practices and patterns from "${agentName}".`,
750
+ analysisPrompt: `Review this code for issues related to ${category}:
751
+
752
+ \`\`\`{{language}}
753
+ {{code}}
754
+ \`\`\`
755
+
756
+ File: {{filePath}}`,
757
+ fixPrompt: `Fix this issue: {{issue}}
758
+
759
+ Code:
760
+ \`\`\`{{language}}
761
+ {{code}}
762
+ \`\`\`
763
+
764
+ File: {{filePath}}`
765
+ };
766
+ }
767
+
768
+ // src/ingest/agent-builder.ts
769
+ import { mkdir, writeFile, readFile as readFile2 } from "fs/promises";
770
+ import { join } from "path";
771
+ async function buildAgentFromDocument(options, verbose = true) {
772
+ const { filePath, agentName, category } = options;
773
+ const log = verbose ? console.error.bind(console) : () => {
774
+ };
775
+ try {
776
+ log("\u{1F4DA} Parsing document...");
777
+ const document = await parseDocument(filePath);
778
+ log(` \u251C\u2500 File type: ${document.metadata.fileType}`);
779
+ log(` \u251C\u2500 Words: ${document.metadata.wordCount.toLocaleString()}`);
780
+ log(` \u2514\u2500 Sections: ${document.sections.length}`);
781
+ log("\n\u{1F9E0} Compressing knowledge...");
782
+ const compressOptions = {
783
+ agentName,
784
+ verbose
785
+ };
786
+ if (category !== void 0) {
787
+ compressOptions.category = category;
788
+ }
789
+ const knowledge = await compressKnowledge(document, compressOptions);
790
+ log(` \u251C\u2500 Core concepts: ${knowledge.coreConcepts.length}`);
791
+ log(` \u251C\u2500 Best practices: ${knowledge.bestPractices.length}`);
792
+ log(` \u251C\u2500 Anti-patterns: ${knowledge.antiPatterns.length}`);
793
+ log(` \u2514\u2500 Detection rules: ${knowledge.detectionRules.length}`);
794
+ log("\n\u{1F4DD} Generating agent prompts...");
795
+ const client = new Anthropic2();
796
+ const prompts = await generateAgentPrompts(
797
+ client,
798
+ knowledge,
799
+ agentName,
800
+ category || knowledge.domain
801
+ );
802
+ log("\n\u{1F916} Building agent configuration...");
803
+ const agentConfig = buildAgentConfig(
804
+ document,
805
+ knowledge,
806
+ prompts,
807
+ options
808
+ );
809
+ const configPath = await saveAgentConfig(agentConfig);
810
+ log(` \u2514\u2500 Saved to: ${configPath}`);
811
+ return {
812
+ success: true,
813
+ agentName: agentConfig.name,
814
+ configPath,
815
+ stats: {
816
+ documentWords: document.metadata.wordCount,
817
+ conceptsExtracted: knowledge.coreConcepts.length,
818
+ patternsGenerated: knowledge.detectionRules.length,
819
+ compressionRatio: knowledge.sourceDocument.compressionRatio
820
+ }
821
+ };
822
+ } catch (error) {
823
+ const errorMessage = error instanceof Error ? error.message : String(error);
824
+ log(`
825
+ \u274C Error: ${errorMessage}`);
826
+ return {
827
+ success: false,
828
+ agentName,
829
+ configPath: "",
830
+ stats: {
831
+ documentWords: 0,
832
+ conceptsExtracted: 0,
833
+ patternsGenerated: 0,
834
+ compressionRatio: 0
835
+ },
836
+ error: errorMessage
837
+ };
838
+ }
839
+ }
840
+ function buildAgentConfig(document, knowledge, prompts, options) {
841
+ const { agentName, displayName, description, category } = options;
842
+ const docTitle = document.metadata.title;
843
+ const activationRules = buildActivationRules(knowledge);
844
+ return {
845
+ name: sanitizeAgentName(agentName),
846
+ displayName: displayName || formatDisplayName(agentName),
847
+ description: description || `Code review agent based on "${document.metadata.title || agentName}"`,
848
+ version: "1.0.0",
849
+ category: category || knowledge.domain,
850
+ source: {
851
+ type: "document",
852
+ originalFile: document.metadata.originalPath,
853
+ fileType: document.metadata.fileType,
854
+ compressedAt: (/* @__PURE__ */ new Date()).toISOString(),
855
+ ...docTitle !== void 0 && { documentTitle: docTitle }
856
+ },
857
+ systemPrompt: prompts.systemPrompt,
858
+ analysisPrompt: prompts.analysisPrompt,
859
+ fixPrompt: prompts.fixPrompt,
860
+ activationRules,
861
+ patterns: knowledge.detectionRules,
862
+ knowledge
863
+ };
864
+ }
865
+ function buildActivationRules(knowledge) {
866
+ const domainRules = {
867
+ technical: {
868
+ filePatterns: ["*.ts", "*.tsx", "*.js", "*.jsx", "*.py", "*.go", "*.rs"],
869
+ contextSignals: ["touchesUI", "touchesAPI"],
870
+ priority: 2
871
+ },
872
+ legal: {
873
+ filePatterns: ["*"],
874
+ contextSignals: ["touchesUserData", "touchesAuth", "touchesPayments"],
875
+ priority: 2
876
+ },
877
+ policy: {
878
+ filePatterns: ["*"],
879
+ contextSignals: ["touchesAuth", "touchesAPI", "touchesDatabase"],
880
+ priority: 3
881
+ },
882
+ security: {
883
+ filePatterns: ["*"],
884
+ contextSignals: ["touchesAuth", "touchesCrypto", "touchesAPI", "touchesDatabase"],
885
+ priority: 1
886
+ },
887
+ architecture: {
888
+ filePatterns: ["*.ts", "*.tsx", "*.js", "*.jsx", "*.py", "*.go"],
889
+ contextSignals: ["touchesAPI", "touchesDatabase"],
890
+ priority: 2
891
+ },
892
+ general: {
893
+ filePatterns: ["*"],
894
+ contextSignals: [],
895
+ priority: 3
896
+ }
897
+ };
898
+ const domainDefaults = domainRules[knowledge.domain] ?? domainRules.general;
899
+ const contentPatterns = [];
900
+ for (const rule of knowledge.detectionRules) {
901
+ if (rule.patterns.keywords) {
902
+ contentPatterns.push(...rule.patterns.keywords.slice(0, 3));
903
+ }
904
+ }
905
+ for (const concept of knowledge.coreConcepts.slice(0, 5)) {
906
+ if (concept.keywords) {
907
+ contentPatterns.push(...concept.keywords.slice(0, 2));
908
+ }
909
+ }
910
+ const uniquePatterns = [...new Set(contentPatterns)].slice(0, 20);
911
+ return {
912
+ filePatterns: domainDefaults.filePatterns ?? ["*"],
913
+ contentPatterns: uniquePatterns,
914
+ contextSignals: domainDefaults.contextSignals ?? [],
915
+ minConfidence: 0.3,
916
+ priority: domainDefaults.priority ?? 2
917
+ };
918
+ }
919
+ async function saveAgentConfig(config) {
920
+ const trieDir = join(getTrieDirectory(getWorkingDirectory(void 0, true)), "agents");
921
+ await mkdir(trieDir, { recursive: true });
922
+ const configPath = join(trieDir, `${config.name}.json`);
923
+ await writeFile(configPath, JSON.stringify(config, null, 2));
924
+ return configPath;
925
+ }
926
+ function sanitizeAgentName(name) {
927
+ return name.toLowerCase().replace(/[^a-z0-9-]/g, "-").replace(/-+/g, "-").replace(/^-|-$/g, "");
928
+ }
929
+ function formatDisplayName(name) {
930
+ return name.split(/[-_]/).map((word) => word.charAt(0).toUpperCase() + word.slice(1)).join(" ");
931
+ }
932
+
9
933
  // src/cli/create-agent.ts
10
934
  import { existsSync } from "fs";
11
- import { resolve, basename, extname } from "path";
935
+ import { resolve, basename as basename2, extname as extname2 } from "path";
12
936
  function parseArgs() {
13
937
  const args = process.argv.slice(2);
14
938
  if (args.length === 0 || args.includes("--help") || args.includes("-h")) {
@@ -25,7 +949,7 @@ function parseArgs() {
25
949
  filePath = resolve(filePath);
26
950
  let agentName = args[1];
27
951
  if (!agentName || agentName.startsWith("--")) {
28
- const filename = basename(filePath, extname(filePath));
952
+ const filename = basename2(filePath, extname2(filePath));
29
953
  agentName = filename.toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "");
30
954
  console.log(`Auto-generated agent name: ${agentName}`);
31
955
  }
@@ -109,7 +1033,7 @@ async function main() {
109
1033
  console.error("\nTip: Drag & drop the file directly into the terminal!");
110
1034
  process.exit(1);
111
1035
  }
112
- const ext = extname(options.filePath).toLowerCase();
1036
+ const ext = extname2(options.filePath).toLowerCase();
113
1037
  const supportedTypes = [".pdf", ".txt", ".md", ".markdown", ".rtf"];
114
1038
  if (!supportedTypes.includes(ext)) {
115
1039
  console.error(`Unsupported file type: ${ext}`);