@triedotdev/mcp 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/QUICK_START.md +230 -0
- package/README.md +235 -0
- package/dist/chunk-3CS6Z2SL.js +927 -0
- package/dist/chunk-3CS6Z2SL.js.map +1 -0
- package/dist/chunk-6NLHFIYA.js +344 -0
- package/dist/chunk-6NLHFIYA.js.map +1 -0
- package/dist/chunk-DGUM43GV.js +11 -0
- package/dist/chunk-DGUM43GV.js.map +1 -0
- package/dist/chunk-E7CKHS3R.js +7615 -0
- package/dist/chunk-E7CKHS3R.js.map +1 -0
- package/dist/chunk-EYNAGEQK.js +950 -0
- package/dist/chunk-EYNAGEQK.js.map +1 -0
- package/dist/chunk-MR755QGT.js +927 -0
- package/dist/chunk-MR755QGT.js.map +1 -0
- package/dist/cli/create-agent.d.ts +1 -0
- package/dist/cli/create-agent.js +156 -0
- package/dist/cli/create-agent.js.map +1 -0
- package/dist/cli/main.d.ts +1 -0
- package/dist/cli/main.js +280 -0
- package/dist/cli/main.js.map +1 -0
- package/dist/cli/yolo-daemon.d.ts +1 -0
- package/dist/cli/yolo-daemon.js +326 -0
- package/dist/cli/yolo-daemon.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +3882 -0
- package/dist/index.js.map +1 -0
- package/dist/vibe-code-signatures-4CBHUSI7.js +15 -0
- package/dist/vibe-code-signatures-4CBHUSI7.js.map +1 -0
- package/dist/vulnerability-signatures-J3CUQ7VR.js +17 -0
- package/dist/vulnerability-signatures-J3CUQ7VR.js.map +1 -0
- package/package.json +77 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/ingest/agent-builder.ts","../src/ingest/document-parser.ts","../src/ingest/knowledge-compressor.ts","../src/ingest/compression-prompts.ts"],"sourcesContent":["/**\n * Agent Builder - Generates complete agent configs from compressed knowledge\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\nimport type { \n DocumentParserResult, \n CompressedKnowledge, \n GeneratedAgentConfig,\n CreateAgentOptions,\n CreateAgentResult \n} from '../types/custom-agent.js';\nimport { parseDocument } from './document-parser.js';\nimport { compressKnowledge, generateAgentPrompts } from './knowledge-compressor.js';\nimport { mkdir, writeFile, readFile } from 'fs/promises';\nimport { join } from 'path';\n\n/**\n * Build a complete agent from a document file\n */\nexport async function buildAgentFromDocument(\n options: CreateAgentOptions,\n verbose: boolean = true\n): Promise<CreateAgentResult> {\n const { filePath, agentName, category } = options;\n \n const log = verbose ? console.error.bind(console) : () => {};\n \n try {\n // Step 1: Parse the document\n log('š Parsing document...');\n const document = await parseDocument(filePath);\n log(` āā File type: ${document.metadata.fileType}`);\n log(` āā Words: ${document.metadata.wordCount.toLocaleString()}`);\n log(` āā Sections: ${document.sections.length}`);\n \n // Step 2: Compress knowledge\n log('\\nš§ Compressing knowledge...');\n const compressOptions: { agentName: string; category?: string; verbose: boolean } = {\n agentName,\n verbose,\n };\n if (category !== undefined) {\n compressOptions.category = category;\n }\n const knowledge = await compressKnowledge(document, compressOptions);\n log(` āā Core concepts: ${knowledge.coreConcepts.length}`);\n log(` āā Best practices: ${knowledge.bestPractices.length}`);\n log(` āā Anti-patterns: ${knowledge.antiPatterns.length}`);\n log(` āā Detection rules: ${knowledge.detectionRules.length}`);\n \n // Step 3: Generate agent prompts\n log('\\nš Generating agent prompts...');\n const client = new Anthropic();\n const prompts = await generateAgentPrompts(\n client,\n knowledge,\n agentName,\n category || knowledge.domain\n );\n \n // Step 4: Build agent config\n log('\\nš¤ Building agent configuration...');\n const agentConfig = buildAgentConfig(\n document,\n knowledge,\n prompts,\n options\n );\n \n // Step 5: Save agent config\n const configPath = await saveAgentConfig(agentConfig);\n log(` āā Saved to: ${configPath}`);\n \n return {\n success: true,\n agentName: agentConfig.name,\n configPath,\n stats: {\n documentWords: document.metadata.wordCount,\n conceptsExtracted: knowledge.coreConcepts.length,\n patternsGenerated: knowledge.detectionRules.length,\n compressionRatio: knowledge.sourceDocument.compressionRatio,\n },\n };\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n log(`\\nā Error: ${errorMessage}`);\n \n return {\n success: false,\n agentName,\n configPath: '',\n stats: {\n documentWords: 0,\n conceptsExtracted: 0,\n patternsGenerated: 0,\n compressionRatio: 0,\n },\n error: errorMessage,\n };\n }\n}\n\n/**\n * Build the agent configuration object\n */\nfunction buildAgentConfig(\n document: DocumentParserResult,\n knowledge: CompressedKnowledge,\n prompts: { systemPrompt: string; analysisPrompt: string; fixPrompt: string },\n options: CreateAgentOptions\n): GeneratedAgentConfig {\n const { agentName, displayName, description, category } = options;\n const docTitle = document.metadata.title;\n \n // Determine activation rules based on domain\n const activationRules = buildActivationRules(knowledge);\n \n return {\n name: sanitizeAgentName(agentName),\n displayName: displayName || formatDisplayName(agentName),\n description: description || `Code review agent based on \"${document.metadata.title || agentName}\"`,\n version: '1.0.0',\n category: category || knowledge.domain,\n \n source: {\n type: 'document',\n originalFile: document.metadata.originalPath,\n fileType: document.metadata.fileType,\n compressedAt: new Date().toISOString(),\n ...(docTitle !== undefined && { documentTitle: docTitle }),\n },\n \n systemPrompt: prompts.systemPrompt,\n analysisPrompt: prompts.analysisPrompt,\n fixPrompt: prompts.fixPrompt,\n \n activationRules,\n patterns: knowledge.detectionRules,\n knowledge,\n };\n}\n\n/**\n * Build activation rules based on domain\n */\nfunction buildActivationRules(knowledge: CompressedKnowledge): GeneratedAgentConfig['activationRules'] {\n const domainRules: Record<string, Partial<GeneratedAgentConfig['activationRules']>> = {\n technical: {\n filePatterns: ['*.ts', '*.tsx', '*.js', '*.jsx', '*.py', '*.go', '*.rs'],\n contextSignals: ['touchesUI', 'touchesAPI'],\n priority: 2,\n },\n legal: {\n filePatterns: ['*'],\n contextSignals: ['touchesUserData', 'touchesAuth', 'touchesPayments'],\n priority: 2,\n },\n policy: {\n filePatterns: ['*'],\n contextSignals: ['touchesAuth', 'touchesAPI', 'touchesDatabase'],\n priority: 3,\n },\n security: {\n filePatterns: ['*'],\n contextSignals: ['touchesAuth', 'touchesCrypto', 'touchesAPI', 'touchesDatabase'],\n priority: 1,\n },\n architecture: {\n filePatterns: ['*.ts', '*.tsx', '*.js', '*.jsx', '*.py', '*.go'],\n contextSignals: ['touchesAPI', 'touchesDatabase'],\n priority: 2,\n },\n general: {\n filePatterns: ['*'],\n contextSignals: [],\n priority: 3,\n },\n };\n \n const domainDefaults = domainRules[knowledge.domain] ?? domainRules.general!;\n \n // Extract content patterns from detection rules\n const contentPatterns: string[] = [];\n for (const rule of knowledge.detectionRules) {\n if (rule.patterns.keywords) {\n contentPatterns.push(...rule.patterns.keywords.slice(0, 3));\n }\n }\n \n // Add patterns from core concepts\n for (const concept of knowledge.coreConcepts.slice(0, 5)) {\n if (concept.keywords) {\n contentPatterns.push(...concept.keywords.slice(0, 2));\n }\n }\n \n // Deduplicate\n const uniquePatterns = [...new Set(contentPatterns)].slice(0, 20);\n \n return {\n filePatterns: domainDefaults.filePatterns ?? ['*'],\n contentPatterns: uniquePatterns,\n contextSignals: domainDefaults.contextSignals ?? [],\n minConfidence: 0.3,\n priority: domainDefaults.priority ?? 2,\n };\n}\n\n/**\n * Save agent config to .trie/agents/\n */\nasync function saveAgentConfig(config: GeneratedAgentConfig): Promise<string> {\n const trieDir = join(process.cwd(), '.trie', 'agents');\n \n // Ensure directory exists\n await mkdir(trieDir, { recursive: true });\n \n const configPath = join(trieDir, `${config.name}.json`);\n await writeFile(configPath, JSON.stringify(config, null, 2));\n \n return configPath;\n}\n\n/**\n * Load an existing agent config\n */\nexport async function loadAgentConfig(name: string): Promise<GeneratedAgentConfig | null> {\n try {\n const configPath = join(process.cwd(), '.trie', 'agents', `${name}.json`);\n const content = await readFile(configPath, 'utf-8');\n return JSON.parse(content);\n } catch {\n return null;\n }\n}\n\n/**\n * List all custom agents\n */\nexport async function listCustomAgents(): Promise<string[]> {\n try {\n const { readdir } = await import('fs/promises');\n const trieDir = join(process.cwd(), '.trie', 'agents');\n const files = await readdir(trieDir);\n return files\n .filter(f => f.endsWith('.json'))\n .map(f => f.replace('.json', ''));\n } catch {\n return [];\n }\n}\n\n/**\n * Sanitize agent name\n */\nfunction sanitizeAgentName(name: string): string {\n return name\n .toLowerCase()\n .replace(/[^a-z0-9-]/g, '-')\n .replace(/-+/g, '-')\n .replace(/^-|-$/g, '');\n}\n\n/**\n * Format display name from agent name\n */\nfunction formatDisplayName(name: string): string {\n return name\n .split(/[-_]/)\n .map(word => word.charAt(0).toUpperCase() + word.slice(1))\n .join(' ');\n}\n\n","/**\n * Document Parser - Extracts text from PDF, TXT, MD, and RTF files\n */\n\nimport { readFile } from 'fs/promises';\nimport { extname, basename } from 'path';\nimport type { DocumentParserResult, DocumentSection, DocumentMetadata } from '../types/custom-agent.js';\n\n/**\n * Parse a document file and extract text content\n */\nexport async function parseDocument(filePath: string): Promise<DocumentParserResult> {\n const ext = extname(filePath).toLowerCase();\n const fileType = getFileType(ext);\n \n if (!fileType) {\n throw new Error(`Unsupported file type: ${ext}. Supported: .pdf, .txt, .md, .rtf`);\n }\n \n let rawText: string;\n let metadata: Partial<DocumentMetadata> = {\n fileType,\n originalPath: filePath,\n parsedAt: new Date().toISOString(),\n };\n \n switch (fileType) {\n case 'pdf':\n const pdfResult = await parsePDF(filePath);\n rawText = pdfResult.text;\n metadata.pageCount = pdfResult.pageCount;\n if (pdfResult.title !== undefined) {\n metadata.title = pdfResult.title;\n }\n break;\n \n case 'txt':\n rawText = await parseTXT(filePath);\n break;\n \n case 'md':\n rawText = await parseMarkdown(filePath);\n break;\n \n case 'rtf':\n rawText = await parseRTF(filePath);\n break;\n \n default:\n throw new Error(`Unsupported file type: ${fileType}`);\n }\n \n // Count words\n metadata.wordCount = countWords(rawText);\n \n // Extract sections\n const sections = extractSections(rawText, fileType);\n \n // Try to extract title from content if not found\n if (!metadata.title) {\n metadata.title = extractTitle(rawText, sections) || basename(filePath, ext);\n }\n \n return {\n rawText,\n metadata: metadata as DocumentMetadata,\n sections,\n };\n}\n\nfunction getFileType(ext: string): 'pdf' | 'txt' | 'md' | 'rtf' | null {\n const typeMap: Record<string, 'pdf' | 'txt' | 'md' | 'rtf'> = {\n '.pdf': 'pdf',\n '.txt': 'txt',\n '.md': 'md',\n '.markdown': 'md',\n '.rtf': 'rtf',\n };\n return typeMap[ext] || null;\n}\n\n/**\n * Parse PDF files using pdf-parse\n */\nasync function parsePDF(filePath: string): Promise<{ text: string; pageCount: number; title?: string }> {\n try {\n // Dynamic import to handle optional dependency\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const pdfParse = (await import('pdf-parse' as string)).default as (data: Buffer) => Promise<{ text: string; numpages: number; info?: { Title?: string } }>;\n const dataBuffer = await readFile(filePath);\n const data = await pdfParse(dataBuffer);\n \n const result: { text: string; pageCount: number; title?: string } = {\n text: data.text,\n pageCount: data.numpages,\n };\n if (data.info?.Title) {\n result.title = data.info.Title;\n }\n return result;\n } catch (error) {\n // If pdf-parse is not installed, provide helpful error\n if ((error as NodeJS.ErrnoException).code === 'MODULE_NOT_FOUND') {\n throw new Error(\n 'PDF parsing requires the pdf-parse package. Install it with: npm install pdf-parse'\n );\n }\n throw error;\n }\n}\n\n/**\n * Parse plain text files\n */\nasync function parseTXT(filePath: string): Promise<string> {\n return await readFile(filePath, 'utf-8');\n}\n\n/**\n * Parse Markdown files (strip some formatting for cleaner text)\n */\nasync function parseMarkdown(filePath: string): Promise<string> {\n const content = await readFile(filePath, 'utf-8');\n \n // Keep the markdown mostly intact but clean up some elements\n return content\n // Remove horizontal rules\n .replace(/^[-*_]{3,}$/gm, '')\n // Clean up excessive whitespace\n .replace(/\\n{3,}/g, '\\n\\n')\n .trim();\n}\n\n/**\n * Parse RTF files (basic RTF stripping)\n */\nasync function parseRTF(filePath: string): Promise<string> {\n const content = await readFile(filePath, 'utf-8');\n \n // Basic RTF to text conversion\n return stripRTF(content);\n}\n\n/**\n * Strip RTF formatting to get plain text\n */\nfunction stripRTF(rtf: string): string {\n // Remove RTF header\n let text = rtf.replace(/^{\\\\rtf1[^}]*}/i, '');\n \n // Remove RTF control words\n text = text.replace(/\\\\[a-z]+(-?\\d+)?[ ]?/gi, '');\n \n // Remove groups\n text = text.replace(/{[^{}]*}/g, '');\n \n // Handle special characters\n text = text\n .replace(/\\\\'([0-9a-f]{2})/gi, (_, hex) => String.fromCharCode(parseInt(hex, 16)))\n .replace(/\\\\par\\b/g, '\\n')\n .replace(/\\\\tab\\b/g, '\\t')\n .replace(/\\\\line\\b/g, '\\n')\n .replace(/[{}\\\\]/g, '');\n \n // Clean up whitespace\n text = text.replace(/\\r\\n/g, '\\n').replace(/\\n{3,}/g, '\\n\\n').trim();\n \n return text;\n}\n\n/**\n * Count words in text\n */\nfunction countWords(text: string): number {\n return text.split(/\\s+/).filter(word => word.length > 0).length;\n}\n\n/**\n * Extract sections/chapters from document\n */\nfunction extractSections(text: string, fileType: 'pdf' | 'txt' | 'md' | 'rtf'): DocumentSection[] {\n const sections: DocumentSection[] = [];\n \n if (fileType === 'md') {\n // Parse Markdown headings\n const headingRegex = /^(#{1,6})\\s+(.+)$/gm;\n let match: RegExpExecArray | null;\n \n while ((match = headingRegex.exec(text)) !== null) {\n const level = match[1]!.length;\n const title = match[2]!.trim();\n const startIndex = match.index;\n \n // Close previous section\n if (sections.length > 0) {\n const lastSection = sections[sections.length - 1]!;\n lastSection.endIndex = startIndex;\n lastSection.content = text.slice(\n lastSection.startIndex,\n startIndex\n ).trim();\n }\n \n sections.push({\n title,\n level,\n startIndex,\n endIndex: text.length,\n content: '',\n });\n }\n \n // Set content for last section\n if (sections.length > 0) {\n const lastSection = sections[sections.length - 1]!;\n lastSection.content = text.slice(\n lastSection.startIndex\n ).trim();\n }\n } else {\n // For other formats, try to detect chapters/sections\n const chapterPatterns = [\n /^chapter\\s+(\\d+|[ivxlc]+)[:\\.\\s]+(.*)$/gim,\n /^section\\s+(\\d+|[ivxlc]+)[:\\.\\s]+(.*)$/gim,\n /^part\\s+(\\d+|[ivxlc]+)[:\\.\\s]+(.*)$/gim,\n /^(\\d+)\\.\\s+([A-Z][^.]+)$/gm,\n /^([A-Z][A-Z\\s]+)$/gm, // ALL CAPS headings\n ];\n \n for (const pattern of chapterPatterns) {\n let match: RegExpExecArray | null;\n pattern.lastIndex = 0;\n \n while ((match = pattern.exec(text)) !== null) {\n const title = match[2] || match[1] || match[0];\n \n sections.push({\n title: title.trim(),\n level: 1,\n startIndex: match.index,\n endIndex: text.length,\n content: '',\n });\n }\n \n if (sections.length > 0) break;\n }\n \n // Sort by position and calculate content\n sections.sort((a, b) => a.startIndex - b.startIndex);\n \n for (let i = 0; i < sections.length; i++) {\n const section = sections[i]!;\n const nextSection = sections[i + 1];\n const endIndex = nextSection !== undefined\n ? nextSection.startIndex \n : text.length;\n section.endIndex = endIndex;\n section.content = text.slice(section.startIndex, endIndex).trim();\n }\n }\n \n // If no sections found, create one for the whole document\n if (sections.length === 0) {\n sections.push({\n title: 'Document Content',\n level: 1,\n startIndex: 0,\n endIndex: text.length,\n content: text.trim(),\n });\n }\n \n return sections;\n}\n\n/**\n * Try to extract document title from content\n */\nfunction extractTitle(text: string, sections: DocumentSection[]): string | null {\n // Try first section heading\n const firstSection = sections[0];\n if (firstSection !== undefined && firstSection.title !== 'Document Content') {\n return firstSection.title;\n }\n \n // Try first line if it looks like a title\n const firstLine = text.split('\\n')[0]?.trim();\n if (firstLine && firstLine.length < 100 && !firstLine.includes('.')) {\n return firstLine;\n }\n \n return null;\n}\n\n/**\n * Chunk document into smaller pieces for processing\n */\nexport function chunkDocument(\n result: DocumentParserResult, \n maxChunkSize: number = 4000\n): string[] {\n const chunks: string[] = [];\n \n // If document is small enough, return as single chunk\n if (result.rawText.length <= maxChunkSize) {\n return [result.rawText];\n }\n \n // Try to chunk by sections first\n if (result.sections.length > 1) {\n let currentChunk = '';\n \n for (const section of result.sections) {\n const sectionText = `## ${section.title}\\n\\n${section.content}\\n\\n`;\n \n if (currentChunk.length + sectionText.length > maxChunkSize) {\n if (currentChunk) chunks.push(currentChunk.trim());\n \n // If section itself is too large, split it\n if (sectionText.length > maxChunkSize) {\n chunks.push(...splitByParagraphs(sectionText, maxChunkSize));\n currentChunk = '';\n } else {\n currentChunk = sectionText;\n }\n } else {\n currentChunk += sectionText;\n }\n }\n \n if (currentChunk) chunks.push(currentChunk.trim());\n } else {\n // Split by paragraphs\n chunks.push(...splitByParagraphs(result.rawText, maxChunkSize));\n }\n \n return chunks;\n}\n\n/**\n * Split text by paragraphs\n */\nfunction splitByParagraphs(text: string, maxSize: number): string[] {\n const chunks: string[] = [];\n const paragraphs = text.split(/\\n\\s*\\n/);\n let currentChunk = '';\n \n for (const para of paragraphs) {\n if (currentChunk.length + para.length + 2 > maxSize) {\n if (currentChunk) chunks.push(currentChunk.trim());\n \n // If paragraph itself is too large, split by sentences\n if (para.length > maxSize) {\n chunks.push(...splitBySentences(para, maxSize));\n currentChunk = '';\n } else {\n currentChunk = para;\n }\n } else {\n currentChunk += (currentChunk ? '\\n\\n' : '') + para;\n }\n }\n \n if (currentChunk) chunks.push(currentChunk.trim());\n return chunks;\n}\n\n/**\n * Split text by sentences\n */\nfunction splitBySentences(text: string, maxSize: number): string[] {\n const chunks: string[] = [];\n const sentences = text.match(/[^.!?]+[.!?]+/g) || [text];\n let currentChunk = '';\n \n for (const sentence of sentences) {\n if (currentChunk.length + sentence.length > maxSize) {\n if (currentChunk) chunks.push(currentChunk.trim());\n currentChunk = sentence;\n } else {\n currentChunk += sentence;\n }\n }\n \n if (currentChunk) chunks.push(currentChunk.trim());\n return chunks;\n}\n\n","/**\n * Knowledge Compressor - AI-powered knowledge extraction and compression\n */\n\nimport Anthropic from '@anthropic-ai/sdk';\nimport { COMPRESSION_PROMPTS } from './compression-prompts.js';\nimport type { \n DocumentParserResult, \n CompressedKnowledge, \n CoreConcept, \n BestPractice, \n AntiPattern, \n DetectionRule \n} from '../types/custom-agent.js';\nimport { chunkDocument } from './document-parser.js';\n\ninterface ChunkExtraction {\n coreConcepts: CoreConcept[];\n bestPractices: BestPractice[];\n antiPatterns: AntiPattern[];\n codePatterns: {\n name: string;\n description: string;\n type: string;\n regexHint?: string;\n keywords: string[];\n }[];\n terminology: Record<string, string>;\n}\n\ninterface DomainDetection {\n domain: CompressedKnowledge['domain'];\n filePatterns: string[];\n contentPatterns: string[];\n contextSignals: string[];\n reasoning: string;\n}\n\n/**\n * Compress document knowledge using AI\n */\nexport async function compressKnowledge(\n document: DocumentParserResult,\n options: {\n agentName: string;\n category?: string;\n maxChunkSize?: number;\n verbose?: boolean;\n }\n): Promise<CompressedKnowledge> {\n const { agentName, maxChunkSize = 4000, verbose = false } = options;\n \n const client = new Anthropic();\n \n const log = verbose ? console.error.bind(console) : () => {};\n \n log('š Starting knowledge compression...');\n \n // Step 1: Detect domain\n log(' āā Detecting document domain...');\n const domainInfo = await detectDomain(client, document);\n log(` ā āā Domain: ${domainInfo.domain}`);\n \n // Step 2: Chunk the document\n const chunks = chunkDocument(document, maxChunkSize);\n log(` āā Document chunked into ${chunks.length} pieces`);\n \n // Step 3: Extract knowledge from each chunk\n log(' āā Extracting knowledge from chunks...');\n const extractions: ChunkExtraction[] = [];\n \n for (let i = 0; i < chunks.length; i++) {\n log(` ā āā Processing chunk ${i + 1}/${chunks.length}...`);\n try {\n const chunk = chunks[i];\n if (chunk !== undefined) {\n const extraction = await extractFromChunk(client, chunk);\n extractions.push(extraction);\n }\n } catch (error) {\n log(` ā ā āā Warning: Failed to extract from chunk ${i + 1}`);\n }\n }\n \n // Step 4: Merge extractions\n log(' āā Merging and deduplicating knowledge...');\n const mergedKnowledge = await mergeExtractions(client, extractions);\n \n // Step 5: Generate detection rules\n log(' āā Generating detection rules...');\n const detectionRules = await generateDetectionRules(\n client, \n mergedKnowledge, \n document.metadata.title || agentName,\n domainInfo.domain,\n agentName\n );\n log(` ā āā Generated ${detectionRules.length} detection rules`);\n \n // Step 6: Generate summary\n log(' āā Generating knowledge summary...');\n const summary = await generateSummary(client, document.rawText.slice(0, 8000));\n \n const compressed: CompressedKnowledge = {\n domain: domainInfo.domain,\n summary,\n coreConcepts: mergedKnowledge.coreConcepts,\n bestPractices: mergedKnowledge.bestPractices,\n antiPatterns: mergedKnowledge.antiPatterns,\n detectionRules,\n glossary: mergedKnowledge.terminology,\n sourceDocument: {\n title: document.metadata.title || agentName,\n wordCount: document.metadata.wordCount,\n compressionRatio: Math.round(document.metadata.wordCount / (summary.length + JSON.stringify(detectionRules).length / 5)),\n },\n };\n \n return compressed;\n}\n\n/**\n * Detect document domain\n */\nasync function detectDomain(\n client: Anthropic, \n document: DocumentParserResult\n): Promise<DomainDetection> {\n const sampleSize = Math.min(document.rawText.length, 3000);\n const sample = document.rawText.slice(0, sampleSize);\n \n const prompt = COMPRESSION_PROMPTS.detectDomain\n .replace('{{title}}', document.metadata.title || 'Unknown')\n .replace('{{sample}}', sample);\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 1000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n const text = firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n \n try {\n // Extract JSON from response\n const jsonMatch = text.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n return JSON.parse(jsonMatch[0]);\n }\n } catch (e) {\n // Fall back to defaults\n }\n \n return {\n domain: 'general',\n filePatterns: ['*'],\n contentPatterns: [],\n contextSignals: [],\n reasoning: 'Could not determine domain, using general',\n };\n}\n\n/**\n * Extract knowledge from a single chunk\n */\nasync function extractFromChunk(\n client: Anthropic, \n chunk: string\n): Promise<ChunkExtraction> {\n const prompt = COMPRESSION_PROMPTS.extractChunk.replace('{{chunk}}', chunk);\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 4000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n const text = firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n \n try {\n const jsonMatch = text.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n const parsed = JSON.parse(jsonMatch[0]);\n return {\n coreConcepts: parsed.coreConcepts || [],\n bestPractices: parsed.bestPractices || [],\n antiPatterns: parsed.antiPatterns || [],\n codePatterns: parsed.codePatterns || [],\n terminology: parsed.terminology || {},\n };\n }\n } catch (e) {\n // Return empty extraction\n }\n \n return {\n coreConcepts: [],\n bestPractices: [],\n antiPatterns: [],\n codePatterns: [],\n terminology: {},\n };\n}\n\n/**\n * Merge multiple extractions\n */\nasync function mergeExtractions(\n client: Anthropic,\n extractions: ChunkExtraction[]\n): Promise<ChunkExtraction> {\n // If only one or two extractions, just combine them\n if (extractions.length <= 2) {\n return combineExtractions(extractions);\n }\n \n // For larger sets, use AI to merge\n const prompt = COMPRESSION_PROMPTS.mergeKnowledge\n .replace('{{extractions}}', JSON.stringify(extractions, null, 2));\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 8000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n const text = firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n \n try {\n const jsonMatch = text.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n return JSON.parse(jsonMatch[0]);\n }\n } catch (e) {\n // Fall back to simple combine\n }\n \n return combineExtractions(extractions);\n}\n\n/**\n * Simple combination of extractions\n */\nfunction combineExtractions(extractions: ChunkExtraction[]): ChunkExtraction {\n const combined: ChunkExtraction = {\n coreConcepts: [],\n bestPractices: [],\n antiPatterns: [],\n codePatterns: [],\n terminology: {},\n };\n \n const seenConcepts = new Set<string>();\n const seenPractices = new Set<string>();\n const seenAntiPatterns = new Set<string>();\n const seenPatterns = new Set<string>();\n \n for (const extraction of extractions) {\n for (const concept of extraction.coreConcepts) {\n const key = concept.name.toLowerCase();\n if (!seenConcepts.has(key)) {\n seenConcepts.add(key);\n combined.coreConcepts.push(concept);\n }\n }\n \n for (const practice of extraction.bestPractices) {\n const key = practice.name.toLowerCase();\n if (!seenPractices.has(key)) {\n seenPractices.add(key);\n combined.bestPractices.push(practice);\n }\n }\n \n for (const anti of extraction.antiPatterns) {\n const key = anti.name.toLowerCase();\n if (!seenAntiPatterns.has(key)) {\n seenAntiPatterns.add(key);\n combined.antiPatterns.push(anti);\n }\n }\n \n for (const pattern of extraction.codePatterns) {\n const key = pattern.name.toLowerCase();\n if (!seenPatterns.has(key)) {\n seenPatterns.add(key);\n combined.codePatterns.push(pattern);\n }\n }\n \n Object.assign(combined.terminology, extraction.terminology);\n }\n \n return combined;\n}\n\n/**\n * Generate detection rules from knowledge\n */\nasync function generateDetectionRules(\n client: Anthropic,\n knowledge: ChunkExtraction,\n title: string,\n domain: string,\n agentName: string\n): Promise<DetectionRule[]> {\n const prefix = agentName.toUpperCase().replace(/[^A-Z]/g, '').slice(0, 4) || 'CUST';\n \n const prompt = COMPRESSION_PROMPTS.generateDetectionRules\n .replace('{{knowledge}}', JSON.stringify(knowledge, null, 2))\n .replace('{{title}}', title)\n .replace('{{domain}}', domain)\n .replace('{{wordCount}}', String(knowledge.coreConcepts.length * 100))\n .replace(/\\{\\{prefix\\}\\}/g, prefix);\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 8000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n const text = firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n \n try {\n const jsonMatch = text.match(/\\[[\\s\\S]*\\]/);\n if (jsonMatch) {\n const rules = JSON.parse(jsonMatch[0]);\n // Validate and clean up rules\n return rules.map((rule: Partial<DetectionRule>, i: number) => ({\n id: rule.id || `${prefix}-${String(i + 1).padStart(3, '0')}`,\n name: rule.name || 'Unknown Rule',\n description: rule.description || '',\n severity: rule.severity || 'moderate',\n patterns: {\n regex: rule.patterns?.regex || [],\n keywords: rule.patterns?.keywords || [],\n semantic: rule.patterns?.semantic || '',\n },\n fix: {\n description: rule.fix?.description || 'Review and fix manually',\n example: rule.fix?.example || undefined,\n autoFixable: rule.fix?.autoFixable || false,\n },\n regulation: rule.regulation || undefined,\n category: rule.category || domain,\n }));\n }\n } catch (e) {\n console.error('Failed to parse detection rules:', e);\n }\n \n return [];\n}\n\n/**\n * Generate document summary\n */\nasync function generateSummary(client: Anthropic, content: string): Promise<string> {\n const prompt = COMPRESSION_PROMPTS.generateSummary\n .replace('{{content}}', content);\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 1000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n return firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n}\n\n/**\n * Generate agent prompts from knowledge\n */\nexport async function generateAgentPrompts(\n client: Anthropic,\n knowledge: CompressedKnowledge,\n agentName: string,\n category: string\n): Promise<{ systemPrompt: string; analysisPrompt: string; fixPrompt: string }> {\n const prompt = COMPRESSION_PROMPTS.generateAgentPrompts\n .replace('{{summary}}', knowledge.summary)\n .replace('{{concepts}}', JSON.stringify(knowledge.coreConcepts.slice(0, 10), null, 2))\n .replace('{{patterns}}', JSON.stringify(knowledge.detectionRules.slice(0, 10), null, 2))\n .replace('{{agentName}}', agentName)\n .replace('{{category}}', category)\n .replace('{{domain}}', knowledge.domain);\n \n const response = await client.messages.create({\n model: 'claude-sonnet-4-20250514',\n max_tokens: 4000,\n system: COMPRESSION_PROMPTS.system,\n messages: [{ role: 'user', content: prompt }],\n });\n \n const firstContent = response.content[0];\n const text = firstContent !== undefined && firstContent.type === 'text' ? firstContent.text : '';\n \n try {\n const jsonMatch = text.match(/\\{[\\s\\S]*\\}/);\n if (jsonMatch) {\n return JSON.parse(jsonMatch[0]);\n }\n } catch (e) {\n // Return defaults\n }\n \n return {\n systemPrompt: `You are an expert code reviewer specializing in ${category}. Review code based on best practices and patterns from \"${agentName}\".`,\n analysisPrompt: `Review this code for issues related to ${category}:\\n\\n\\`\\`\\`{{language}}\\n{{code}}\\n\\`\\`\\`\\n\\nFile: {{filePath}}`,\n fixPrompt: `Fix this issue: {{issue}}\\n\\nCode:\\n\\`\\`\\`{{language}}\\n{{code}}\\n\\`\\`\\`\\n\\nFile: {{filePath}}`,\n };\n}\n\n","/**\n * Prompts for AI-powered knowledge compression\n */\n\nexport const COMPRESSION_PROMPTS = {\n /**\n * System prompt for the knowledge extractor\n */\n system: `You are an expert knowledge extraction system. Your job is to analyze documents and extract structured, actionable knowledge that can be used by a code review agent.\n\nYou must output valid JSON that matches the required schema exactly. Be thorough but concise - extract the essence of the knowledge without unnecessary verbosity.\n\nFocus on:\n1. Core concepts that are fundamental to understand the material\n2. Best practices that should be followed\n3. Anti-patterns and mistakes to avoid\n4. Detection patterns that could identify issues in code\n5. Key terminology and definitions`,\n\n /**\n * Prompt for extracting knowledge from a chunk\n */\n extractChunk: `Analyze this document chunk and extract structured knowledge.\n\n## Document Chunk:\n{{chunk}}\n\n## Instructions:\nExtract the following from this chunk:\n\n1. **Core Concepts**: Key ideas, principles, or rules that are taught\n2. **Best Practices**: Recommended approaches or patterns\n3. **Anti-Patterns**: Things to avoid, common mistakes\n4. **Code Patterns**: Any code patterns or detection rules that could identify issues\n5. **Terminology**: Important terms and their definitions\n\nOutput as JSON:\n{\n \"coreConcepts\": [\n {\n \"name\": \"string\",\n \"description\": \"string\",\n \"importance\": \"critical\" | \"important\" | \"supplementary\",\n \"keywords\": [\"string\"]\n }\n ],\n \"bestPractices\": [\n {\n \"name\": \"string\",\n \"description\": \"string\",\n \"rationale\": \"string\",\n \"codeExample\": \"string or null\"\n }\n ],\n \"antiPatterns\": [\n {\n \"name\": \"string\",\n \"description\": \"string\",\n \"whyBad\": \"string\",\n \"betterAlternative\": \"string\"\n }\n ],\n \"codePatterns\": [\n {\n \"name\": \"string\",\n \"description\": \"string\",\n \"type\": \"best-practice\" | \"anti-pattern\" | \"security\" | \"compliance\",\n \"regexHint\": \"string (a regex pattern that might detect this, or null)\",\n \"keywords\": [\"string\"]\n }\n ],\n \"terminology\": {\n \"term\": \"definition\"\n }\n}\n\nOnly include items that are clearly present in the chunk. Quality over quantity.`,\n\n /**\n * Prompt for merging extracted knowledge\n */\n mergeKnowledge: `Merge and deduplicate these knowledge extractions into a cohesive summary.\n\n## Extractions to Merge:\n{{extractions}}\n\n## Instructions:\n1. Combine similar concepts\n2. Remove duplicates\n3. Prioritize the most important items\n4. Ensure consistency in terminology\n5. Rank items by importance\n\nOutput a single merged JSON with the same structure, keeping only the most valuable and distinct items.\nLimit to top 20 core concepts, 15 best practices, 15 anti-patterns, and 25 code patterns.`,\n\n /**\n * Prompt for generating detection rules\n */\n generateDetectionRules: `Based on this knowledge base, generate detection rules for a code review agent.\n\n## Knowledge Base:\n{{knowledge}}\n\n## Document Context:\n- Title: {{title}}\n- Domain: {{domain}}\n- Word Count: {{wordCount}}\n\n## Instructions:\nFor each anti-pattern and best practice, generate detection rules that could find violations in code.\n\nFor each rule, provide:\n1. A unique ID (format: {{prefix}}-XXX)\n2. Clear name and description\n3. Severity (critical, serious, moderate, low, info)\n4. Detection patterns:\n - regex: Array of regex patterns (JavaScript-compatible)\n - keywords: Words that might indicate this issue\n - semantic: Natural language description for AI-based detection\n5. Fix information:\n - description: How to fix\n - example: Code example if applicable\n - autoFixable: boolean\n\nOutput as JSON array of detection rules:\n[\n {\n \"id\": \"string\",\n \"name\": \"string\",\n \"description\": \"string\",\n \"severity\": \"critical\" | \"serious\" | \"moderate\" | \"low\" | \"info\",\n \"patterns\": {\n \"regex\": [\"string\"],\n \"keywords\": [\"string\"],\n \"semantic\": \"string\"\n },\n \"fix\": {\n \"description\": \"string\",\n \"example\": \"string or null\",\n \"autoFixable\": boolean\n },\n \"regulation\": \"string or null (for legal/compliance rules)\",\n \"category\": \"string\"\n }\n]\n\nGenerate 15-30 detection rules prioritizing the most impactful issues.`,\n\n /**\n * Prompt for generating agent prompts\n */\n generateAgentPrompts: `Generate system and analysis prompts for a code review agent based on this knowledge.\n\n## Knowledge Base Summary:\n{{summary}}\n\n## Core Concepts:\n{{concepts}}\n\n## Detection Focus:\n{{patterns}}\n\n## Agent Info:\n- Name: {{agentName}}\n- Category: {{category}}\n- Domain: {{domain}}\n\n## Instructions:\nGenerate:\n1. A system prompt that gives the agent its persona and expertise\n2. An analysis prompt template for reviewing code\n3. A fix prompt template for suggesting fixes\n\nThe prompts should:\n- Reference the specific knowledge from the document\n- Be authoritative but helpful\n- Include the key concepts and terminology\n- Guide the agent to look for the specific patterns\n\nOutput as JSON:\n{\n \"systemPrompt\": \"string\",\n \"analysisPrompt\": \"string (use {{code}}, {{filePath}}, {{language}} as placeholders)\",\n \"fixPrompt\": \"string (use {{issue}}, {{code}}, {{filePath}} as placeholders)\"\n}`,\n\n /**\n * Prompt for detecting document domain\n */\n detectDomain: `Analyze this document and determine its primary domain/category.\n\n## Document Title:\n{{title}}\n\n## Sample Content:\n{{sample}}\n\n## Instructions:\nDetermine the primary domain of this document. Choose ONE:\n- \"technical\": Programming, frameworks, libraries, code patterns\n- \"legal\": Laws, regulations, compliance (GDPR, HIPAA, etc.)\n- \"policy\": Company policies, internal rules, guidelines\n- \"security\": Security practices, vulnerability prevention\n- \"architecture\": System design, patterns, architecture principles\n- \"general\": General knowledge, doesn't fit other categories\n\nAlso determine:\n1. What types of code/files this knowledge applies to\n2. What context signals should trigger this agent\n3. Key content patterns to look for\n\nOutput as JSON:\n{\n \"domain\": \"technical\" | \"legal\" | \"policy\" | \"security\" | \"architecture\" | \"general\",\n \"filePatterns\": [\"*.ext\", ...],\n \"contentPatterns\": [\"regex pattern\", ...],\n \"contextSignals\": [\"touchesAuth\", \"touchesUI\", ...],\n \"reasoning\": \"Brief explanation of why this domain was chosen\"\n}`,\n\n /**\n * Prompt for generating a summary\n */\n generateSummary: `Create a concise executive summary of this document for use in an AI agent's context.\n\n## Document:\n{{content}}\n\n## Instructions:\nWrite a 2-3 paragraph summary that:\n1. Explains what the document covers\n2. Highlights the most important takeaways\n3. Describes how this knowledge applies to code review\n\nThe summary will be used as context for an AI code review agent, so focus on actionable insights.\n\nKeep it under 500 words.`\n};\n\nexport type CompressionPromptKey = keyof typeof COMPRESSION_PROMPTS;\n\n"],"mappings":";AAIA,OAAOA,gBAAe;;;ACAtB,SAAS,gBAAgB;AACzB,SAAS,SAAS,gBAAgB;AAMlC,eAAsB,cAAc,UAAiD;AACnF,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,QAAM,WAAW,YAAY,GAAG;AAEhC,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,0BAA0B,GAAG,oCAAoC;AAAA,EACnF;AAEA,MAAI;AACJ,MAAI,WAAsC;AAAA,IACxC;AAAA,IACA,cAAc;AAAA,IACd,WAAU,oBAAI,KAAK,GAAE,YAAY;AAAA,EACnC;AAEA,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,YAAM,YAAY,MAAM,SAAS,QAAQ;AACzC,gBAAU,UAAU;AACpB,eAAS,YAAY,UAAU;AAC/B,UAAI,UAAU,UAAU,QAAW;AACjC,iBAAS,QAAQ,UAAU;AAAA,MAC7B;AACA;AAAA,IAEF,KAAK;AACH,gBAAU,MAAM,SAAS,QAAQ;AACjC;AAAA,IAEF,KAAK;AACH,gBAAU,MAAM,cAAc,QAAQ;AACtC;AAAA,IAEF,KAAK;AACH,gBAAU,MAAM,SAAS,QAAQ;AACjC;AAAA,IAEF;AACE,YAAM,IAAI,MAAM,0BAA0B,QAAQ,EAAE;AAAA,EACxD;AAGA,WAAS,YAAY,WAAW,OAAO;AAGvC,QAAM,WAAW,gBAAgB,SAAS,QAAQ;AAGlD,MAAI,CAAC,SAAS,OAAO;AACnB,aAAS,QAAQ,aAAa,SAAS,QAAQ,KAAK,SAAS,UAAU,GAAG;AAAA,EAC5E;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEA,SAAS,YAAY,KAAkD;AACrE,QAAM,UAAwD;AAAA,IAC5D,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,OAAO;AAAA,IACP,aAAa;AAAA,IACb,QAAQ;AAAA,EACV;AACA,SAAO,QAAQ,GAAG,KAAK;AACzB;AAKA,eAAe,SAAS,UAAgF;AACtG,MAAI;AAGF,UAAM,YAAY,MAAM,OAAO,WAAqB,GAAG;AACvD,UAAM,aAAa,MAAM,SAAS,QAAQ;AAC1C,UAAM,OAAO,MAAM,SAAS,UAAU;AAEtC,UAAM,SAA8D;AAAA,MAClE,MAAM,KAAK;AAAA,MACX,WAAW,KAAK;AAAA,IAClB;AACA,QAAI,KAAK,MAAM,OAAO;AACpB,aAAO,QAAQ,KAAK,KAAK;AAAA,IAC3B;AACA,WAAO;AAAA,EACT,SAAS,OAAO;AAEd,QAAK,MAAgC,SAAS,oBAAoB;AAChE,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AACA,UAAM;AAAA,EACR;AACF;AAKA,eAAe,SAAS,UAAmC;AACzD,SAAO,MAAM,SAAS,UAAU,OAAO;AACzC;AAKA,eAAe,cAAc,UAAmC;AAC9D,QAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAGhD,SAAO,QAEJ,QAAQ,iBAAiB,EAAE,EAE3B,QAAQ,WAAW,MAAM,EACzB,KAAK;AACV;AAKA,eAAe,SAAS,UAAmC;AACzD,QAAM,UAAU,MAAM,SAAS,UAAU,OAAO;AAGhD,SAAO,SAAS,OAAO;AACzB;AAKA,SAAS,SAAS,KAAqB;AAErC,MAAI,OAAO,IAAI,QAAQ,mBAAmB,EAAE;AAG5C,SAAO,KAAK,QAAQ,0BAA0B,EAAE;AAGhD,SAAO,KAAK,QAAQ,aAAa,EAAE;AAGnC,SAAO,KACJ,QAAQ,sBAAsB,CAAC,GAAG,QAAQ,OAAO,aAAa,SAAS,KAAK,EAAE,CAAC,CAAC,EAChF,QAAQ,YAAY,IAAI,EACxB,QAAQ,YAAY,GAAI,EACxB,QAAQ,aAAa,IAAI,EACzB,QAAQ,WAAW,EAAE;AAGxB,SAAO,KAAK,QAAQ,SAAS,IAAI,EAAE,QAAQ,WAAW,MAAM,EAAE,KAAK;AAEnE,SAAO;AACT;AAKA,SAAS,WAAW,MAAsB;AACxC,SAAO,KAAK,MAAM,KAAK,EAAE,OAAO,UAAQ,KAAK,SAAS,CAAC,EAAE;AAC3D;AAKA,SAAS,gBAAgB,MAAc,UAA2D;AAChG,QAAM,WAA8B,CAAC;AAErC,MAAI,aAAa,MAAM;AAErB,UAAM,eAAe;AACrB,QAAI;AAEJ,YAAQ,QAAQ,aAAa,KAAK,IAAI,OAAO,MAAM;AACjD,YAAM,QAAQ,MAAM,CAAC,EAAG;AACxB,YAAM,QAAQ,MAAM,CAAC,EAAG,KAAK;AAC7B,YAAM,aAAa,MAAM;AAGzB,UAAI,SAAS,SAAS,GAAG;AACvB,cAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,oBAAY,WAAW;AACvB,oBAAY,UAAU,KAAK;AAAA,UACzB,YAAY;AAAA,UACZ;AAAA,QACF,EAAE,KAAK;AAAA,MACT;AAEA,eAAS,KAAK;AAAA,QACZ;AAAA,QACA;AAAA,QACA;AAAA,QACA,UAAU,KAAK;AAAA,QACf,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAGA,QAAI,SAAS,SAAS,GAAG;AACvB,YAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAChD,kBAAY,UAAU,KAAK;AAAA,QACzB,YAAY;AAAA,MACd,EAAE,KAAK;AAAA,IACT;AAAA,EACF,OAAO;AAEL,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA;AAAA,IACF;AAEA,eAAW,WAAW,iBAAiB;AACrC,UAAI;AACJ,cAAQ,YAAY;AAEpB,cAAQ,QAAQ,QAAQ,KAAK,IAAI,OAAO,MAAM;AAC5C,cAAM,QAAQ,MAAM,CAAC,KAAK,MAAM,CAAC,KAAK,MAAM,CAAC;AAE7C,iBAAS,KAAK;AAAA,UACZ,OAAO,MAAM,KAAK;AAAA,UAClB,OAAO;AAAA,UACP,YAAY,MAAM;AAAA,UAClB,UAAU,KAAK;AAAA,UACf,SAAS;AAAA,QACX,CAAC;AAAA,MACH;AAEA,UAAI,SAAS,SAAS,EAAG;AAAA,IAC3B;AAGA,aAAS,KAAK,CAAC,GAAG,MAAM,EAAE,aAAa,EAAE,UAAU;AAEnD,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,UAAU,SAAS,CAAC;AAC1B,YAAM,cAAc,SAAS,IAAI,CAAC;AAClC,YAAM,WAAW,gBAAgB,SAC7B,YAAY,aACZ,KAAK;AACT,cAAQ,WAAW;AACnB,cAAQ,UAAU,KAAK,MAAM,QAAQ,YAAY,QAAQ,EAAE,KAAK;AAAA,IAClE;AAAA,EACF;AAGA,MAAI,SAAS,WAAW,GAAG;AACzB,aAAS,KAAK;AAAA,MACZ,OAAO;AAAA,MACP,OAAO;AAAA,MACP,YAAY;AAAA,MACZ,UAAU,KAAK;AAAA,MACf,SAAS,KAAK,KAAK;AAAA,IACrB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKA,SAAS,aAAa,MAAc,UAA4C;AAE9E,QAAM,eAAe,SAAS,CAAC;AAC/B,MAAI,iBAAiB,UAAa,aAAa,UAAU,oBAAoB;AAC3E,WAAO,aAAa;AAAA,EACtB;AAGA,QAAM,YAAY,KAAK,MAAM,IAAI,EAAE,CAAC,GAAG,KAAK;AAC5C,MAAI,aAAa,UAAU,SAAS,OAAO,CAAC,UAAU,SAAS,GAAG,GAAG;AACnE,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,SAAS,cACd,QACA,eAAuB,KACb;AACV,QAAM,SAAmB,CAAC;AAG1B,MAAI,OAAO,QAAQ,UAAU,cAAc;AACzC,WAAO,CAAC,OAAO,OAAO;AAAA,EACxB;AAGA,MAAI,OAAO,SAAS,SAAS,GAAG;AAC9B,QAAI,eAAe;AAEnB,eAAW,WAAW,OAAO,UAAU;AACrC,YAAM,cAAc,MAAM,QAAQ,KAAK;AAAA;AAAA,EAAO,QAAQ,OAAO;AAAA;AAAA;AAE7D,UAAI,aAAa,SAAS,YAAY,SAAS,cAAc;AAC3D,YAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AAGjD,YAAI,YAAY,SAAS,cAAc;AACrC,iBAAO,KAAK,GAAG,kBAAkB,aAAa,YAAY,CAAC;AAC3D,yBAAe;AAAA,QACjB,OAAO;AACL,yBAAe;AAAA,QACjB;AAAA,MACF,OAAO;AACL,wBAAgB;AAAA,MAClB;AAAA,IACF;AAEA,QAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AAAA,EACnD,OAAO;AAEL,WAAO,KAAK,GAAG,kBAAkB,OAAO,SAAS,YAAY,CAAC;AAAA,EAChE;AAEA,SAAO;AACT;AAKA,SAAS,kBAAkB,MAAc,SAA2B;AAClE,QAAM,SAAmB,CAAC;AAC1B,QAAM,aAAa,KAAK,MAAM,SAAS;AACvC,MAAI,eAAe;AAEnB,aAAW,QAAQ,YAAY;AAC7B,QAAI,aAAa,SAAS,KAAK,SAAS,IAAI,SAAS;AACnD,UAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AAGjD,UAAI,KAAK,SAAS,SAAS;AACzB,eAAO,KAAK,GAAG,iBAAiB,MAAM,OAAO,CAAC;AAC9C,uBAAe;AAAA,MACjB,OAAO;AACL,uBAAe;AAAA,MACjB;AAAA,IACF,OAAO;AACL,uBAAiB,eAAe,SAAS,MAAM;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AACjD,SAAO;AACT;AAKA,SAAS,iBAAiB,MAAc,SAA2B;AACjE,QAAM,SAAmB,CAAC;AAC1B,QAAM,YAAY,KAAK,MAAM,gBAAgB,KAAK,CAAC,IAAI;AACvD,MAAI,eAAe;AAEnB,aAAW,YAAY,WAAW;AAChC,QAAI,aAAa,SAAS,SAAS,SAAS,SAAS;AACnD,UAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AACjD,qBAAe;AAAA,IACjB,OAAO;AACL,sBAAgB;AAAA,IAClB;AAAA,EACF;AAEA,MAAI,aAAc,QAAO,KAAK,aAAa,KAAK,CAAC;AACjD,SAAO;AACT;;;AC/XA,OAAO,eAAe;;;ACAf,IAAM,sBAAsB;AAAA;AAAA;AAAA;AAAA,EAIjC,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAcR,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EA2Dd,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBhB,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAqDxB,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAsCtB,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkCd,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAcnB;;;ADrMA,eAAsB,kBACpB,UACA,SAM8B;AAC9B,QAAM,EAAE,WAAW,eAAe,KAAM,UAAU,MAAM,IAAI;AAE5D,QAAM,SAAS,IAAI,UAAU;AAE7B,QAAM,MAAM,UAAU,QAAQ,MAAM,KAAK,OAAO,IAAI,MAAM;AAAA,EAAC;AAE3D,MAAI,6CAAsC;AAG1C,MAAI,8CAAoC;AACxC,QAAM,aAAa,MAAM,aAAa,QAAQ,QAAQ;AACtD,MAAI,mCAAoB,WAAW,MAAM,EAAE;AAG3C,QAAM,SAAS,cAAc,UAAU,YAAY;AACnD,MAAI,yCAA+B,OAAO,MAAM,SAAS;AAGzD,MAAI,qDAA2C;AAC/C,QAAM,cAAiC,CAAC;AAExC,WAAS,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;AACtC,QAAI,4CAA6B,IAAI,CAAC,IAAI,OAAO,MAAM,KAAK;AAC5D,QAAI;AACF,YAAM,QAAQ,OAAO,CAAC;AACtB,UAAI,UAAU,QAAW;AACvB,cAAM,aAAa,MAAM,iBAAiB,QAAQ,KAAK;AACvD,oBAAY,KAAK,UAAU;AAAA,MAC7B;AAAA,IACF,SAAS,OAAO;AACd,UAAI,yEAAqD,IAAI,CAAC,EAAE;AAAA,IAClE;AAAA,EACF;AAGA,MAAI,wDAA8C;AAClD,QAAM,kBAAkB,MAAM,iBAAiB,QAAQ,WAAW;AAGlE,MAAI,+CAAqC;AACzC,QAAM,iBAAiB,MAAM;AAAA,IAC3B;AAAA,IACA;AAAA,IACA,SAAS,SAAS,SAAS;AAAA,IAC3B,WAAW;AAAA,IACX;AAAA,EACF;AACA,MAAI,qCAAsB,eAAe,MAAM,kBAAkB;AAGjE,MAAI,iDAAuC;AAC3C,QAAM,UAAU,MAAM,gBAAgB,QAAQ,SAAS,QAAQ,MAAM,GAAG,GAAI,CAAC;AAE7E,QAAM,aAAkC;AAAA,IACtC,QAAQ,WAAW;AAAA,IACnB;AAAA,IACA,cAAc,gBAAgB;AAAA,IAC9B,eAAe,gBAAgB;AAAA,IAC/B,cAAc,gBAAgB;AAAA,IAC9B;AAAA,IACA,UAAU,gBAAgB;AAAA,IAC1B,gBAAgB;AAAA,MACd,OAAO,SAAS,SAAS,SAAS;AAAA,MAClC,WAAW,SAAS,SAAS;AAAA,MAC7B,kBAAkB,KAAK,MAAM,SAAS,SAAS,aAAa,QAAQ,SAAS,KAAK,UAAU,cAAc,EAAE,SAAS,EAAE;AAAA,IACzH;AAAA,EACF;AAEA,SAAO;AACT;AAKA,eAAe,aACb,QACA,UAC0B;AAC1B,QAAM,aAAa,KAAK,IAAI,SAAS,QAAQ,QAAQ,GAAI;AACzD,QAAM,SAAS,SAAS,QAAQ,MAAM,GAAG,UAAU;AAEnD,QAAM,SAAS,oBAAoB,aAChC,QAAQ,aAAa,SAAS,SAAS,SAAS,SAAS,EACzD,QAAQ,cAAc,MAAM;AAE/B,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,QAAM,OAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAE9F,MAAI;AAEF,UAAM,YAAY,KAAK,MAAM,aAAa;AAC1C,QAAI,WAAW;AACb,aAAO,KAAK,MAAM,UAAU,CAAC,CAAC;AAAA,IAChC;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,cAAc,CAAC,GAAG;AAAA,IAClB,iBAAiB,CAAC;AAAA,IAClB,gBAAgB,CAAC;AAAA,IACjB,WAAW;AAAA,EACb;AACF;AAKA,eAAe,iBACb,QACA,OAC0B;AAC1B,QAAM,SAAS,oBAAoB,aAAa,QAAQ,aAAa,KAAK;AAE1E,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,QAAM,OAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAE9F,MAAI;AACF,UAAM,YAAY,KAAK,MAAM,aAAa;AAC1C,QAAI,WAAW;AACb,YAAM,SAAS,KAAK,MAAM,UAAU,CAAC,CAAC;AACtC,aAAO;AAAA,QACL,cAAc,OAAO,gBAAgB,CAAC;AAAA,QACtC,eAAe,OAAO,iBAAiB,CAAC;AAAA,QACxC,cAAc,OAAO,gBAAgB,CAAC;AAAA,QACtC,cAAc,OAAO,gBAAgB,CAAC;AAAA,QACtC,aAAa,OAAO,eAAe,CAAC;AAAA,MACtC;AAAA,IACF;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAEA,SAAO;AAAA,IACL,cAAc,CAAC;AAAA,IACf,eAAe,CAAC;AAAA,IAChB,cAAc,CAAC;AAAA,IACf,cAAc,CAAC;AAAA,IACf,aAAa,CAAC;AAAA,EAChB;AACF;AAKA,eAAe,iBACb,QACA,aAC0B;AAE1B,MAAI,YAAY,UAAU,GAAG;AAC3B,WAAO,mBAAmB,WAAW;AAAA,EACvC;AAGA,QAAM,SAAS,oBAAoB,eAChC,QAAQ,mBAAmB,KAAK,UAAU,aAAa,MAAM,CAAC,CAAC;AAElE,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,QAAM,OAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAE9F,MAAI;AACF,UAAM,YAAY,KAAK,MAAM,aAAa;AAC1C,QAAI,WAAW;AACb,aAAO,KAAK,MAAM,UAAU,CAAC,CAAC;AAAA,IAChC;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAEA,SAAO,mBAAmB,WAAW;AACvC;AAKA,SAAS,mBAAmB,aAAiD;AAC3E,QAAM,WAA4B;AAAA,IAChC,cAAc,CAAC;AAAA,IACf,eAAe,CAAC;AAAA,IAChB,cAAc,CAAC;AAAA,IACf,cAAc,CAAC;AAAA,IACf,aAAa,CAAC;AAAA,EAChB;AAEA,QAAM,eAAe,oBAAI,IAAY;AACrC,QAAM,gBAAgB,oBAAI,IAAY;AACtC,QAAM,mBAAmB,oBAAI,IAAY;AACzC,QAAM,eAAe,oBAAI,IAAY;AAErC,aAAW,cAAc,aAAa;AACpC,eAAW,WAAW,WAAW,cAAc;AAC7C,YAAM,MAAM,QAAQ,KAAK,YAAY;AACrC,UAAI,CAAC,aAAa,IAAI,GAAG,GAAG;AAC1B,qBAAa,IAAI,GAAG;AACpB,iBAAS,aAAa,KAAK,OAAO;AAAA,MACpC;AAAA,IACF;AAEA,eAAW,YAAY,WAAW,eAAe;AAC/C,YAAM,MAAM,SAAS,KAAK,YAAY;AACtC,UAAI,CAAC,cAAc,IAAI,GAAG,GAAG;AAC3B,sBAAc,IAAI,GAAG;AACrB,iBAAS,cAAc,KAAK,QAAQ;AAAA,MACtC;AAAA,IACF;AAEA,eAAW,QAAQ,WAAW,cAAc;AAC1C,YAAM,MAAM,KAAK,KAAK,YAAY;AAClC,UAAI,CAAC,iBAAiB,IAAI,GAAG,GAAG;AAC9B,yBAAiB,IAAI,GAAG;AACxB,iBAAS,aAAa,KAAK,IAAI;AAAA,MACjC;AAAA,IACF;AAEA,eAAW,WAAW,WAAW,cAAc;AAC7C,YAAM,MAAM,QAAQ,KAAK,YAAY;AACrC,UAAI,CAAC,aAAa,IAAI,GAAG,GAAG;AAC1B,qBAAa,IAAI,GAAG;AACpB,iBAAS,aAAa,KAAK,OAAO;AAAA,MACpC;AAAA,IACF;AAEA,WAAO,OAAO,SAAS,aAAa,WAAW,WAAW;AAAA,EAC5D;AAEA,SAAO;AACT;AAKA,eAAe,uBACb,QACA,WACA,OACA,QACA,WAC0B;AAC1B,QAAM,SAAS,UAAU,YAAY,EAAE,QAAQ,WAAW,EAAE,EAAE,MAAM,GAAG,CAAC,KAAK;AAE7E,QAAM,SAAS,oBAAoB,uBAChC,QAAQ,iBAAiB,KAAK,UAAU,WAAW,MAAM,CAAC,CAAC,EAC3D,QAAQ,aAAa,KAAK,EAC1B,QAAQ,cAAc,MAAM,EAC5B,QAAQ,iBAAiB,OAAO,UAAU,aAAa,SAAS,GAAG,CAAC,EACpE,QAAQ,mBAAmB,MAAM;AAEpC,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,QAAM,OAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAE9F,MAAI;AACF,UAAM,YAAY,KAAK,MAAM,aAAa;AAC1C,QAAI,WAAW;AACb,YAAM,QAAQ,KAAK,MAAM,UAAU,CAAC,CAAC;AAErC,aAAO,MAAM,IAAI,CAAC,MAA8B,OAAe;AAAA,QAC7D,IAAI,KAAK,MAAM,GAAG,MAAM,IAAI,OAAO,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG,CAAC;AAAA,QAC1D,MAAM,KAAK,QAAQ;AAAA,QACnB,aAAa,KAAK,eAAe;AAAA,QACjC,UAAU,KAAK,YAAY;AAAA,QAC3B,UAAU;AAAA,UACR,OAAO,KAAK,UAAU,SAAS,CAAC;AAAA,UAChC,UAAU,KAAK,UAAU,YAAY,CAAC;AAAA,UACtC,UAAU,KAAK,UAAU,YAAY;AAAA,QACvC;AAAA,QACA,KAAK;AAAA,UACH,aAAa,KAAK,KAAK,eAAe;AAAA,UACtC,SAAS,KAAK,KAAK,WAAW;AAAA,UAC9B,aAAa,KAAK,KAAK,eAAe;AAAA,QACxC;AAAA,QACA,YAAY,KAAK,cAAc;AAAA,QAC/B,UAAU,KAAK,YAAY;AAAA,MAC7B,EAAE;AAAA,IACJ;AAAA,EACF,SAAS,GAAG;AACV,YAAQ,MAAM,oCAAoC,CAAC;AAAA,EACrD;AAEA,SAAO,CAAC;AACV;AAKA,eAAe,gBAAgB,QAAmB,SAAkC;AAClF,QAAM,SAAS,oBAAoB,gBAChC,QAAQ,eAAe,OAAO;AAEjC,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,SAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAC1F;AAKA,eAAsB,qBACpB,QACA,WACA,WACA,UAC8E;AAC9E,QAAM,SAAS,oBAAoB,qBAChC,QAAQ,eAAe,UAAU,OAAO,EACxC,QAAQ,gBAAgB,KAAK,UAAU,UAAU,aAAa,MAAM,GAAG,EAAE,GAAG,MAAM,CAAC,CAAC,EACpF,QAAQ,gBAAgB,KAAK,UAAU,UAAU,eAAe,MAAM,GAAG,EAAE,GAAG,MAAM,CAAC,CAAC,EACtF,QAAQ,iBAAiB,SAAS,EAClC,QAAQ,gBAAgB,QAAQ,EAChC,QAAQ,cAAc,UAAU,MAAM;AAEzC,QAAM,WAAW,MAAM,OAAO,SAAS,OAAO;AAAA,IAC5C,OAAO;AAAA,IACP,YAAY;AAAA,IACZ,QAAQ,oBAAoB;AAAA,IAC5B,UAAU,CAAC,EAAE,MAAM,QAAQ,SAAS,OAAO,CAAC;AAAA,EAC9C,CAAC;AAED,QAAM,eAAe,SAAS,QAAQ,CAAC;AACvC,QAAM,OAAO,iBAAiB,UAAa,aAAa,SAAS,SAAS,aAAa,OAAO;AAE9F,MAAI;AACF,UAAM,YAAY,KAAK,MAAM,aAAa;AAC1C,QAAI,WAAW;AACb,aAAO,KAAK,MAAM,UAAU,CAAC,CAAC;AAAA,IAChC;AAAA,EACF,SAAS,GAAG;AAAA,EAEZ;AAEA,SAAO;AAAA,IACL,cAAc,mDAAmD,QAAQ,4DAA4D,SAAS;AAAA,IAC9I,gBAAgB,0CAA0C,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAClE,WAAW;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EACb;AACF;;;AFvZA,SAAS,OAAO,WAAW,YAAAC,iBAAgB;AAC3C,SAAS,YAAY;AAKrB,eAAsB,uBACpB,SACA,UAAmB,MACS;AAC5B,QAAM,EAAE,UAAU,WAAW,SAAS,IAAI;AAE1C,QAAM,MAAM,UAAU,QAAQ,MAAM,KAAK,OAAO,IAAI,MAAM;AAAA,EAAC;AAE3D,MAAI;AAEF,QAAI,+BAAwB;AAC5B,UAAM,WAAW,MAAM,cAAc,QAAQ;AAC7C,QAAI,8BAAoB,SAAS,SAAS,QAAQ,EAAE;AACpD,QAAI,0BAAgB,SAAS,SAAS,UAAU,eAAe,CAAC,EAAE;AAClE,QAAI,6BAAmB,SAAS,SAAS,MAAM,EAAE;AAGjD,QAAI,sCAA+B;AACnC,UAAM,kBAA8E;AAAA,MAClF;AAAA,MACA;AAAA,IACF;AACA,QAAI,aAAa,QAAW;AAC1B,sBAAgB,WAAW;AAAA,IAC7B;AACA,UAAM,YAAY,MAAM,kBAAkB,UAAU,eAAe;AACnE,QAAI,kCAAwB,UAAU,aAAa,MAAM,EAAE;AAC3D,QAAI,mCAAyB,UAAU,cAAc,MAAM,EAAE;AAC7D,QAAI,kCAAwB,UAAU,aAAa,MAAM,EAAE;AAC3D,QAAI,oCAA0B,UAAU,eAAe,MAAM,EAAE;AAG/D,QAAI,yCAAkC;AACtC,UAAM,SAAS,IAAIC,WAAU;AAC7B,UAAM,UAAU,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY,UAAU;AAAA,IACxB;AAGA,QAAI,6CAAsC;AAC1C,UAAM,cAAc;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAGA,UAAM,aAAa,MAAM,gBAAgB,WAAW;AACpD,QAAI,6BAAmB,UAAU,EAAE;AAEnC,WAAO;AAAA,MACL,SAAS;AAAA,MACT,WAAW,YAAY;AAAA,MACvB;AAAA,MACA,OAAO;AAAA,QACL,eAAe,SAAS,SAAS;AAAA,QACjC,mBAAmB,UAAU,aAAa;AAAA,QAC1C,mBAAmB,UAAU,eAAe;AAAA,QAC5C,kBAAkB,UAAU,eAAe;AAAA,MAC7C;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,UAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,QAAI;AAAA,gBAAc,YAAY,EAAE;AAEhC,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA,YAAY;AAAA,MACZ,OAAO;AAAA,QACL,eAAe;AAAA,QACf,mBAAmB;AAAA,QACnB,mBAAmB;AAAA,QACnB,kBAAkB;AAAA,MACpB;AAAA,MACA,OAAO;AAAA,IACT;AAAA,EACF;AACF;AAKA,SAAS,iBACP,UACA,WACA,SACA,SACsB;AACtB,QAAM,EAAE,WAAW,aAAa,aAAa,SAAS,IAAI;AAC1D,QAAM,WAAW,SAAS,SAAS;AAGnC,QAAM,kBAAkB,qBAAqB,SAAS;AAEtD,SAAO;AAAA,IACL,MAAM,kBAAkB,SAAS;AAAA,IACjC,aAAa,eAAe,kBAAkB,SAAS;AAAA,IACvD,aAAa,eAAe,+BAA+B,SAAS,SAAS,SAAS,SAAS;AAAA,IAC/F,SAAS;AAAA,IACT,UAAU,YAAY,UAAU;AAAA,IAEhC,QAAQ;AAAA,MACN,MAAM;AAAA,MACN,cAAc,SAAS,SAAS;AAAA,MAChC,UAAU,SAAS,SAAS;AAAA,MAC5B,eAAc,oBAAI,KAAK,GAAE,YAAY;AAAA,MACrC,GAAI,aAAa,UAAa,EAAE,eAAe,SAAS;AAAA,IAC1D;AAAA,IAEA,cAAc,QAAQ;AAAA,IACtB,gBAAgB,QAAQ;AAAA,IACxB,WAAW,QAAQ;AAAA,IAEnB;AAAA,IACA,UAAU,UAAU;AAAA,IACpB;AAAA,EACF;AACF;AAKA,SAAS,qBAAqB,WAAyE;AACrG,QAAM,cAAgF;AAAA,IACpF,WAAW;AAAA,MACT,cAAc,CAAC,QAAQ,SAAS,QAAQ,SAAS,QAAQ,QAAQ,MAAM;AAAA,MACvE,gBAAgB,CAAC,aAAa,YAAY;AAAA,MAC1C,UAAU;AAAA,IACZ;AAAA,IACA,OAAO;AAAA,MACL,cAAc,CAAC,GAAG;AAAA,MAClB,gBAAgB,CAAC,mBAAmB,eAAe,iBAAiB;AAAA,MACpE,UAAU;AAAA,IACZ;AAAA,IACA,QAAQ;AAAA,MACN,cAAc,CAAC,GAAG;AAAA,MAClB,gBAAgB,CAAC,eAAe,cAAc,iBAAiB;AAAA,MAC/D,UAAU;AAAA,IACZ;AAAA,IACA,UAAU;AAAA,MACR,cAAc,CAAC,GAAG;AAAA,MAClB,gBAAgB,CAAC,eAAe,iBAAiB,cAAc,iBAAiB;AAAA,MAChF,UAAU;AAAA,IACZ;AAAA,IACA,cAAc;AAAA,MACZ,cAAc,CAAC,QAAQ,SAAS,QAAQ,SAAS,QAAQ,MAAM;AAAA,MAC/D,gBAAgB,CAAC,cAAc,iBAAiB;AAAA,MAChD,UAAU;AAAA,IACZ;AAAA,IACA,SAAS;AAAA,MACP,cAAc,CAAC,GAAG;AAAA,MAClB,gBAAgB,CAAC;AAAA,MACjB,UAAU;AAAA,IACZ;AAAA,EACF;AAEA,QAAM,iBAAiB,YAAY,UAAU,MAAM,KAAK,YAAY;AAGpE,QAAM,kBAA4B,CAAC;AACnC,aAAW,QAAQ,UAAU,gBAAgB;AAC3C,QAAI,KAAK,SAAS,UAAU;AAC1B,sBAAgB,KAAK,GAAG,KAAK,SAAS,SAAS,MAAM,GAAG,CAAC,CAAC;AAAA,IAC5D;AAAA,EACF;AAGA,aAAW,WAAW,UAAU,aAAa,MAAM,GAAG,CAAC,GAAG;AACxD,QAAI,QAAQ,UAAU;AACpB,sBAAgB,KAAK,GAAG,QAAQ,SAAS,MAAM,GAAG,CAAC,CAAC;AAAA,IACtD;AAAA,EACF;AAGA,QAAM,iBAAiB,CAAC,GAAG,IAAI,IAAI,eAAe,CAAC,EAAE,MAAM,GAAG,EAAE;AAEhE,SAAO;AAAA,IACL,cAAc,eAAe,gBAAgB,CAAC,GAAG;AAAA,IACjD,iBAAiB;AAAA,IACjB,gBAAgB,eAAe,kBAAkB,CAAC;AAAA,IAClD,eAAe;AAAA,IACf,UAAU,eAAe,YAAY;AAAA,EACvC;AACF;AAKA,eAAe,gBAAgB,QAA+C;AAC5E,QAAM,UAAU,KAAK,QAAQ,IAAI,GAAG,SAAS,QAAQ;AAGrD,QAAM,MAAM,SAAS,EAAE,WAAW,KAAK,CAAC;AAExC,QAAM,aAAa,KAAK,SAAS,GAAG,OAAO,IAAI,OAAO;AACtD,QAAM,UAAU,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAE3D,SAAO;AACT;AAKA,eAAsB,gBAAgB,MAAoD;AACxF,MAAI;AACF,UAAM,aAAa,KAAK,QAAQ,IAAI,GAAG,SAAS,UAAU,GAAG,IAAI,OAAO;AACxE,UAAM,UAAU,MAAMD,UAAS,YAAY,OAAO;AAClD,WAAO,KAAK,MAAM,OAAO;AAAA,EAC3B,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAKA,eAAsB,mBAAsC;AAC1D,MAAI;AACF,UAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,aAAa;AAC9C,UAAM,UAAU,KAAK,QAAQ,IAAI,GAAG,SAAS,QAAQ;AACrD,UAAM,QAAQ,MAAM,QAAQ,OAAO;AACnC,WAAO,MACJ,OAAO,OAAK,EAAE,SAAS,OAAO,CAAC,EAC/B,IAAI,OAAK,EAAE,QAAQ,SAAS,EAAE,CAAC;AAAA,EACpC,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAKA,SAAS,kBAAkB,MAAsB;AAC/C,SAAO,KACJ,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,OAAO,GAAG,EAClB,QAAQ,UAAU,EAAE;AACzB;AAKA,SAAS,kBAAkB,MAAsB;AAC/C,SAAO,KACJ,MAAM,MAAM,EACZ,IAAI,UAAQ,KAAK,OAAO,CAAC,EAAE,YAAY,IAAI,KAAK,MAAM,CAAC,CAAC,EACxD,KAAK,GAAG;AACb;","names":["Anthropic","readFile","Anthropic"]}
|