@triedotdev/mcp 1.0.168 → 1.0.170

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. package/README.md +54 -500
  2. package/dist/chunk-2YXOBNKW.js +619 -0
  3. package/dist/chunk-2YXOBNKW.js.map +1 -0
  4. package/dist/chunk-QR64Y5TI.js +363 -0
  5. package/dist/chunk-QR64Y5TI.js.map +1 -0
  6. package/dist/cli/main.d.ts +0 -15
  7. package/dist/cli/main.js +356 -3100
  8. package/dist/cli/main.js.map +1 -1
  9. package/dist/index.js +2 -36
  10. package/dist/index.js.map +1 -1
  11. package/dist/server/mcp-server.js +2 -36
  12. package/package.json +8 -31
  13. package/dist/autonomy-config-FSERX3O3.js +0 -30
  14. package/dist/autonomy-config-FSERX3O3.js.map +0 -1
  15. package/dist/chat-store-JNGNTDSN.js +0 -15
  16. package/dist/chat-store-JNGNTDSN.js.map +0 -1
  17. package/dist/chunk-2HF65EHQ.js +0 -311
  18. package/dist/chunk-2HF65EHQ.js.map +0 -1
  19. package/dist/chunk-43X6JBEM.js +0 -36
  20. package/dist/chunk-43X6JBEM.js.map +0 -1
  21. package/dist/chunk-4MXH2ZPT.js +0 -1827
  22. package/dist/chunk-4MXH2ZPT.js.map +0 -1
  23. package/dist/chunk-575YT2SD.js +0 -737
  24. package/dist/chunk-575YT2SD.js.map +0 -1
  25. package/dist/chunk-5BRRRTN6.js +0 -354
  26. package/dist/chunk-5BRRRTN6.js.map +0 -1
  27. package/dist/chunk-6NLHFIYA.js +0 -344
  28. package/dist/chunk-6NLHFIYA.js.map +0 -1
  29. package/dist/chunk-7WITSO22.js +0 -824
  30. package/dist/chunk-7WITSO22.js.map +0 -1
  31. package/dist/chunk-DGUM43GV.js +0 -11
  32. package/dist/chunk-DGUM43GV.js.map +0 -1
  33. package/dist/chunk-EFWVF6TI.js +0 -267
  34. package/dist/chunk-EFWVF6TI.js.map +0 -1
  35. package/dist/chunk-F6WFNUAY.js +0 -216
  36. package/dist/chunk-F6WFNUAY.js.map +0 -1
  37. package/dist/chunk-FQ45QP5A.js +0 -361
  38. package/dist/chunk-FQ45QP5A.js.map +0 -1
  39. package/dist/chunk-G2TGF6TR.js +0 -573
  40. package/dist/chunk-G2TGF6TR.js.map +0 -1
  41. package/dist/chunk-GTKYBOXL.js +0 -700
  42. package/dist/chunk-GTKYBOXL.js.map +0 -1
  43. package/dist/chunk-HVCDY3AK.js +0 -850
  44. package/dist/chunk-HVCDY3AK.js.map +0 -1
  45. package/dist/chunk-JVMBCWKS.js +0 -348
  46. package/dist/chunk-JVMBCWKS.js.map +0 -1
  47. package/dist/chunk-KDHN2ZQE.js +0 -313
  48. package/dist/chunk-KDHN2ZQE.js.map +0 -1
  49. package/dist/chunk-LQIMKE3P.js +0 -12524
  50. package/dist/chunk-LQIMKE3P.js.map +0 -1
  51. package/dist/chunk-ME2OERF5.js +0 -345
  52. package/dist/chunk-ME2OERF5.js.map +0 -1
  53. package/dist/chunk-MRHKX5M5.js +0 -662
  54. package/dist/chunk-MRHKX5M5.js.map +0 -1
  55. package/dist/chunk-OBQ74FOU.js +0 -27
  56. package/dist/chunk-OBQ74FOU.js.map +0 -1
  57. package/dist/chunk-OMR4YCBS.js +0 -987
  58. package/dist/chunk-OMR4YCBS.js.map +0 -1
  59. package/dist/chunk-Q5EKA5YA.js +0 -254
  60. package/dist/chunk-Q5EKA5YA.js.map +0 -1
  61. package/dist/chunk-Q63FFI6D.js +0 -132
  62. package/dist/chunk-Q63FFI6D.js.map +0 -1
  63. package/dist/chunk-SY6KQG44.js +0 -983
  64. package/dist/chunk-SY6KQG44.js.map +0 -1
  65. package/dist/chunk-T63OHG4Q.js +0 -440
  66. package/dist/chunk-T63OHG4Q.js.map +0 -1
  67. package/dist/chunk-TN5WEKWI.js +0 -173
  68. package/dist/chunk-TN5WEKWI.js.map +0 -1
  69. package/dist/chunk-VUL52BQL.js +0 -402
  70. package/dist/chunk-VUL52BQL.js.map +0 -1
  71. package/dist/chunk-VVITXIHN.js +0 -189
  72. package/dist/chunk-VVITXIHN.js.map +0 -1
  73. package/dist/chunk-WCN7S3EI.js +0 -14
  74. package/dist/chunk-WCN7S3EI.js.map +0 -1
  75. package/dist/chunk-XE6KQRKZ.js +0 -816
  76. package/dist/chunk-XE6KQRKZ.js.map +0 -1
  77. package/dist/chunk-XPZZFPBZ.js +0 -491
  78. package/dist/chunk-XPZZFPBZ.js.map +0 -1
  79. package/dist/chunk-XTFWT2XM.js +0 -727
  80. package/dist/chunk-XTFWT2XM.js.map +0 -1
  81. package/dist/chunk-YDHUCDHM.js +0 -4011
  82. package/dist/chunk-YDHUCDHM.js.map +0 -1
  83. package/dist/chunk-YZ6Y2H3P.js +0 -1289
  84. package/dist/chunk-YZ6Y2H3P.js.map +0 -1
  85. package/dist/chunk-ZJF5FTBX.js +0 -1396
  86. package/dist/chunk-ZJF5FTBX.js.map +0 -1
  87. package/dist/chunk-ZV2K6M7T.js +0 -74
  88. package/dist/chunk-ZV2K6M7T.js.map +0 -1
  89. package/dist/cli/create-agent.d.ts +0 -1
  90. package/dist/cli/create-agent.js +0 -1050
  91. package/dist/cli/create-agent.js.map +0 -1
  92. package/dist/cli/yolo-daemon.d.ts +0 -1
  93. package/dist/cli/yolo-daemon.js +0 -423
  94. package/dist/cli/yolo-daemon.js.map +0 -1
  95. package/dist/client-NJPZE5JT.js +0 -28
  96. package/dist/client-NJPZE5JT.js.map +0 -1
  97. package/dist/codebase-index-VAPF32XX.js +0 -12
  98. package/dist/codebase-index-VAPF32XX.js.map +0 -1
  99. package/dist/fast-analyzer-XXYMOXRK.js +0 -216
  100. package/dist/fast-analyzer-XXYMOXRK.js.map +0 -1
  101. package/dist/git-EO5SRFMN.js +0 -28
  102. package/dist/git-EO5SRFMN.js.map +0 -1
  103. package/dist/github-ingester-ZOKK6GRS.js +0 -11
  104. package/dist/github-ingester-ZOKK6GRS.js.map +0 -1
  105. package/dist/goal-manager-YOB7VWK7.js +0 -25
  106. package/dist/goal-manager-YOB7VWK7.js.map +0 -1
  107. package/dist/goal-validator-ULKIBDPX.js +0 -24
  108. package/dist/goal-validator-ULKIBDPX.js.map +0 -1
  109. package/dist/graph-B3NA4S7I.js +0 -10
  110. package/dist/graph-B3NA4S7I.js.map +0 -1
  111. package/dist/hypothesis-7BFFT5JY.js +0 -23
  112. package/dist/hypothesis-7BFFT5JY.js.map +0 -1
  113. package/dist/incident-index-EFNUSGWL.js +0 -11
  114. package/dist/incident-index-EFNUSGWL.js.map +0 -1
  115. package/dist/insight-store-EC4PLSAW.js +0 -22
  116. package/dist/insight-store-EC4PLSAW.js.map +0 -1
  117. package/dist/issue-store-ZIRP23EP.js +0 -36
  118. package/dist/issue-store-ZIRP23EP.js.map +0 -1
  119. package/dist/ledger-TWZTGDFA.js +0 -58
  120. package/dist/ledger-TWZTGDFA.js.map +0 -1
  121. package/dist/linear-ingester-XXPAZZRW.js +0 -11
  122. package/dist/linear-ingester-XXPAZZRW.js.map +0 -1
  123. package/dist/output-manager-RVJ37XKA.js +0 -13
  124. package/dist/output-manager-RVJ37XKA.js.map +0 -1
  125. package/dist/parse-goal-violation-SACGFG3C.js +0 -8
  126. package/dist/parse-goal-violation-SACGFG3C.js.map +0 -1
  127. package/dist/pattern-discovery-F7LU5K6E.js +0 -8
  128. package/dist/pattern-discovery-F7LU5K6E.js.map +0 -1
  129. package/dist/progress-SRQ2V3BP.js +0 -18
  130. package/dist/progress-SRQ2V3BP.js.map +0 -1
  131. package/dist/project-state-AHPA77SM.js +0 -28
  132. package/dist/project-state-AHPA77SM.js.map +0 -1
  133. package/dist/sync-M2FSWPBC.js +0 -12
  134. package/dist/sync-M2FSWPBC.js.map +0 -1
  135. package/dist/terminal-spawn-5YXDMUCF.js +0 -157
  136. package/dist/terminal-spawn-5YXDMUCF.js.map +0 -1
  137. package/dist/tiered-storage-Z3YCR465.js +0 -12
  138. package/dist/tiered-storage-Z3YCR465.js.map +0 -1
  139. package/dist/trie-agent-3YDPEGHJ.js +0 -28
  140. package/dist/trie-agent-3YDPEGHJ.js.map +0 -1
  141. package/dist/ui/chat.html +0 -1014
  142. package/dist/ui/goals.html +0 -967
  143. package/dist/ui/hypotheses.html +0 -1011
  144. package/dist/ui/ledger.html +0 -954
  145. package/dist/ui/nudges.html +0 -995
  146. package/dist/vibe-code-signatures-F6URTBW3.js +0 -16
  147. package/dist/vibe-code-signatures-F6URTBW3.js.map +0 -1
  148. package/dist/vulnerability-signatures-T7SKHORW.js +0 -18
  149. package/dist/vulnerability-signatures-T7SKHORW.js.map +0 -1
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/memory/issue-store.ts","../src/memory/bm25.ts","../src/memory/compactor.ts"],"sourcesContent":["/**\n * Issue Memory Store\n * \n * Working state cache complementary to the immutable ledger.\n * \n * Architecture:\n * - Ledger (.trie/memory/ledger.json) = immutable source of truth\n * - Issue Memory = mutable working state + BM25 search index\n * \n * Issue memory provides:\n * - Fast BM25 semantic search for pattern matching\n * - Mutable state tracking (resolved, open, fixed)\n * - Goal violation metrics and progress tracking\n * - Links back to ledger via ledgerBlockHash\n * \n * Phase 1 Hardening:\n * - SHA256 hashing for proper deduplication\n * - Atomic writes to prevent corruption\n * - Rotational backups for recovery\n * - Zod validation for data integrity\n */\n\nimport { mkdir, readFile, readdir } from 'fs/promises';\nimport { createHash } from 'crypto';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { getWorkingDirectory, getTrieDirectory } from '../utils/workspace.js';\nimport type { Issue } from '../types/index.js';\nimport { BM25Index } from './bm25.js';\nimport { compactOldIssues, saveCompactedSummary, getHistoricalInsights } from './compactor.js';\nimport { atomicWriteJSON } from '../utils/atomic-write.js';\nimport { withFileLock } from '../utils/file-lock.js';\nimport { BackupManager } from '../utils/backup-manager.js';\nimport { IssueIndexSchema, safeParseAndValidate } from './validation.js';\nimport { appendIssuesToLedger } from './ledger.js';\n\nexport interface StoredIssue {\n id: string;\n hash: string;\n severity: string;\n issue: string;\n fix: string;\n file: string;\n line: number | undefined;\n agent: string;\n category: string | undefined;\n timestamp: string;\n project: string;\n resolved: boolean | undefined;\n resolvedAt: string | undefined;\n ledgerBlockHash?: string; // Link to immutable ledger record\n}\n\nexport interface IssueSearchResult {\n issue: StoredIssue;\n score: number;\n matchType: 'bm25' | 'keyword' | 'fts5';\n}\n\nexport interface IssueMemoryStats {\n totalIssues: number;\n activeIssues: number; // Unresolved issues\n issuesByAgent: Record<string, number>;\n issuesBySeverity: Record<string, number>; // All issues (historical)\n activeIssuesBySeverity: Record<string, number>; // Only unresolved\n oldestIssue: string | undefined;\n newestIssue: string | undefined;\n resolvedCount: number;\n historicalIssues: number;\n improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown';\n capacityInfo: {\n current: number;\n max: number;\n percentFull: number;\n isAtCap: boolean;\n };\n deduplicationStats: {\n duplicatesAvoided: number;\n uniquePatterns: number;\n };\n}\n\n/**\n * Store issues and link them to the ledger\n * \n * Flow:\n * 1. Append to ledger (immutable source of truth)\n * 2. Cache in issue memory with ledger reference\n * \n * Returns number of unique issues added (after deduplication)\n */\nexport async function storeIssues(\n issues: Issue[],\n project: string,\n workDir?: string\n): Promise<{ stored: number; duplicates: number }> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const stored: StoredIssue[] = [];\n const now = new Date().toISOString();\n const seenHashes = new Set<string>();\n let duplicates = 0;\n \n for (const issue of issues) {\n const hash = hashIssue(issue);\n \n // Skip duplicates within the same scan\n if (seenHashes.has(hash)) {\n duplicates++;\n continue;\n }\n seenHashes.add(hash);\n \n const storedIssue: StoredIssue = {\n id: issue.id,\n hash,\n severity: issue.severity,\n issue: issue.issue,\n fix: issue.fix,\n file: issue.file,\n line: issue.line,\n agent: issue.agent,\n category: issue.category,\n timestamp: now,\n project,\n resolved: false,\n resolvedAt: undefined,\n ...(issue.ledgerBlockHash && { ledgerBlockHash: issue.ledgerBlockHash }),\n };\n stored.push(storedIssue);\n }\n\n // 1. Write to ledger (immutable source of truth)\n const ledgerBlock = await appendIssuesToLedger(stored, projectDir);\n \n // 2. Link issues to their ledger block\n if (ledgerBlock) {\n for (const issue of stored) {\n issue.ledgerBlockHash = ledgerBlock.blockHash;\n }\n }\n \n // 3. Cache in issue memory (working state + search index)\n const dedupedCount = await updateIssueIndex(stored, projectDir);\n \n return { stored: dedupedCount, duplicates: duplicates + (stored.length - dedupedCount) };\n}\n\n/**\n * Search issues using BM25 ranking (same algorithm as Elasticsearch)\n */\nexport async function searchIssues(\n query: string,\n options: {\n workDir?: string;\n limit?: number;\n project?: string;\n severity?: string[];\n agent?: string;\n includeResolved?: boolean;\n } = {}\n): Promise<IssueSearchResult[]> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const limit = options.limit || 10;\n const allIssues = await loadIssueIndex(projectDir);\n \n if (allIssues.length === 0) {\n return [];\n }\n\n // Filter issues first\n const filteredIssues = allIssues.filter(issue => {\n if (options.project && issue.project !== options.project) return false;\n if (options.severity && !options.severity.includes(issue.severity)) return false;\n if (options.agent && issue.agent !== options.agent) return false;\n if (!options.includeResolved && issue.resolved) return false;\n return true;\n });\n\n if (filteredIssues.length === 0) {\n return [];\n }\n\n // Build BM25 index\n const bm25 = new BM25Index();\n const issueMap = new Map<string, StoredIssue>();\n \n for (const issue of filteredIssues) {\n const searchText = `${issue.issue} ${issue.fix} ${issue.file} ${issue.agent} ${issue.category || ''} ${issue.severity}`;\n bm25.addDocument({\n id: issue.id,\n text: searchText,\n });\n issueMap.set(issue.id, issue);\n }\n\n // Search with BM25\n const bm25Results = bm25.search(query, limit);\n \n return bm25Results.map(result => ({\n issue: issueMap.get(result.id)!,\n score: result.score,\n matchType: 'bm25' as const,\n }));\n}\n\n/**\n * Find similar issues using BM25 similarity\n */\nexport async function findSimilarIssues(\n issue: Issue,\n options: {\n workDir?: string;\n limit?: number;\n excludeSameFile?: boolean;\n } = {}\n): Promise<IssueSearchResult[]> {\n // Use the issue description and fix as the query for similarity\n const query = `${issue.issue} ${issue.fix} ${issue.agent}`;\n const searchOptions: Parameters<typeof searchIssues>[1] = {\n limit: (options.limit || 5) + 5, // Get extra to account for filtering\n includeResolved: true,\n };\n if (options.workDir !== undefined) {\n searchOptions.workDir = options.workDir;\n }\n const results = await searchIssues(query, searchOptions);\n\n let filtered = results.filter(r => r.issue.id !== issue.id);\n \n if (options.excludeSameFile) {\n filtered = filtered.filter(r => r.issue.file !== issue.file);\n }\n \n return filtered.slice(0, options.limit || 5);\n}\n\n/**\n * Mark an issue as resolved\n */\nexport async function markIssueResolved(\n issueId: string,\n workDir?: string\n): Promise<boolean> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n const indexPath = join(memoryDir, 'issues.json');\n \n // Use file locking to prevent concurrent write conflicts\n return withFileLock(indexPath, async () => {\n const index = await loadIssueIndex(projectDir);\n \n const issue = index.find(i => i.id === issueId);\n if (!issue) return false;\n \n issue.resolved = true;\n issue.resolvedAt = new Date().toISOString();\n \n await saveIssueIndexInternal(index, projectDir);\n return true;\n }, { timeout: 10000 });\n}\n\n/**\n * Auto-resolve issues that were not found in the latest scan\n * \n * After a scan completes, this function compares new issues against stored issues.\n * Issues that were previously found in scanned files but are no longer detected\n * are automatically marked as resolved.\n * \n * @param newIssueHashes - Set of hashes from the current scan\n * @param scannedFiles - List of files that were scanned (to scope resolution)\n * @param workDir - Working directory\n * @returns Number of issues auto-resolved\n */\nexport async function autoResolveIssues(\n newIssueHashes: Set<string>,\n scannedFiles: string[],\n workDir?: string\n): Promise<{ resolved: number; stillActive: number }> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n const indexPath = join(memoryDir, 'issues.json');\n \n // Use file locking to prevent concurrent write conflicts\n return withFileLock(indexPath, async () => {\n const index = await loadIssueIndex(projectDir);\n \n // Normalize scanned file paths for comparison\n const scannedFileSet = new Set(scannedFiles.map(f => f.replace(/\\\\/g, '/')));\n \n const now = new Date().toISOString();\n let resolvedCount = 0;\n let stillActiveCount = 0;\n \n for (const issue of index) {\n // Skip already resolved issues\n if (issue.resolved) continue;\n \n // Normalize the issue file path\n const normalizedFile = issue.file.replace(/\\\\/g, '/');\n \n // Only auto-resolve issues in files that were scanned\n // This prevents marking issues as resolved if we just did a partial scan\n if (!scannedFileSet.has(normalizedFile)) {\n stillActiveCount++;\n continue;\n }\n \n // If the issue's hash is NOT in the new scan results, it's been fixed\n if (!newIssueHashes.has(issue.hash)) {\n issue.resolved = true;\n issue.resolvedAt = now;\n resolvedCount++;\n } else {\n stillActiveCount++;\n }\n }\n \n // Only save if we resolved something\n if (resolvedCount > 0) {\n await saveIssueIndexInternal(index, projectDir);\n }\n \n return { resolved: resolvedCount, stillActive: stillActiveCount };\n }, { timeout: 15000 });\n}\n\n/**\n * Resolve goal violation issues for a specific file and goal\n * Called when a goal violation is fixed\n */\nexport async function resolveGoalViolation(\n file: string,\n goalDescription: string,\n workDir?: string\n): Promise<number> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n const indexPath = join(memoryDir, 'issues.json');\n \n // Use file locking to prevent concurrent write conflicts\n let resolvedCount = await withFileLock(indexPath, async () => {\n const index = await loadIssueIndex(projectDir);\n \n const now = new Date().toISOString();\n let count = 0;\n \n for (const issue of index) {\n if (issue.resolved) continue;\n if (issue.agent !== 'goal-violation') continue;\n \n // Match file and goal description\n const normalizedFile = issue.file.replace(/\\\\/g, '/');\n const normalizedTarget = file.replace(/\\\\/g, '/');\n \n if (normalizedFile === normalizedTarget && \n issue.issue.includes(`Goal \"${goalDescription}\"`)) {\n issue.resolved = true;\n issue.resolvedAt = now;\n count++;\n }\n }\n \n if (count > 0) {\n await saveIssueIndexInternal(index, projectDir);\n }\n \n return count;\n }, { timeout: 10000 });\n \n // Always try to resolve nudges in SQL storage, even if issue-store had nothing\n // This handles cases where nudges exist in SQL but not in the issue index\n try {\n const { getStorage } = await import('../storage/tiered-storage.js');\n const storage = getStorage(projectDir);\n await storage.initialize();\n const nudgeResolved = await storage.resolveNudgesForGoalViolation(file, goalDescription, workDir);\n if (nudgeResolved > 0) {\n console.debug(`[IssueStore] Auto-resolved ${nudgeResolved} nudge(s) for fixed goal violation`);\n resolvedCount += nudgeResolved;\n }\n } catch (e) {\n console.debug('[IssueStore] Nudge resolution failed (non-fatal):', e);\n }\n \n return resolvedCount;\n}\n\n/**\n * Get hash for an issue (for external callers)\n */\nexport function getIssueHash(issue: Issue): string {\n return hashIssue(issue);\n}\n\n/**\n * Get memory statistics including historical insights\n */\nexport async function getMemoryStats(workDir?: string): Promise<IssueMemoryStats> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n const historical = await getHistoricalInsights(projectDir);\n \n const MAX_ISSUES = 10000;\n const uniqueHashes = new Set(index.map(i => i.hash));\n \n const stats: IssueMemoryStats = {\n totalIssues: index.length,\n activeIssues: 0,\n issuesByAgent: {},\n issuesBySeverity: {},\n activeIssuesBySeverity: {},\n oldestIssue: undefined,\n newestIssue: undefined,\n resolvedCount: 0,\n historicalIssues: historical.totalHistoricalIssues,\n improvementTrend: historical.improvementTrend,\n capacityInfo: {\n current: index.length,\n max: MAX_ISSUES,\n percentFull: Math.round((index.length / MAX_ISSUES) * 100),\n isAtCap: index.length >= MAX_ISSUES,\n },\n deduplicationStats: {\n duplicatesAvoided: index.length - uniqueHashes.size,\n uniquePatterns: uniqueHashes.size,\n },\n };\n\n for (const issue of index) {\n stats.issuesByAgent[issue.agent] = (stats.issuesByAgent[issue.agent] || 0) + 1;\n stats.issuesBySeverity[issue.severity] = (stats.issuesBySeverity[issue.severity] || 0) + 1;\n \n if (issue.resolved) {\n stats.resolvedCount++;\n } else {\n stats.activeIssues++;\n stats.activeIssuesBySeverity[issue.severity] = (stats.activeIssuesBySeverity[issue.severity] || 0) + 1;\n }\n }\n\n if (index.length > 0) {\n const sorted = [...index].sort((a, b) => \n new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()\n );\n const oldest = sorted[0]?.timestamp;\n const newest = sorted[sorted.length - 1]?.timestamp;\n if (oldest !== undefined) {\n stats.oldestIssue = oldest;\n }\n if (newest !== undefined) {\n stats.newestIssue = newest;\n }\n }\n\n return stats;\n}\n\n/**\n * Get recent issues\n * @param options.includeResolved - If false (default), only returns unresolved issues\n */\nexport async function getRecentIssues(\n options: {\n workDir?: string;\n limit?: number;\n daysBack?: number;\n includeResolved?: boolean;\n } = {}\n): Promise<StoredIssue[]> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const index = await loadIssueIndex(projectDir);\n const limit = options.limit || 20;\n const daysBack = options.daysBack || 7;\n const includeResolved = options.includeResolved ?? false;\n \n const cutoff = new Date();\n cutoff.setDate(cutoff.getDate() - daysBack);\n \n return index\n .filter(i => {\n if (new Date(i.timestamp) < cutoff) return false;\n if (!includeResolved && i.resolved) return false;\n return true;\n })\n .sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())\n .slice(0, limit);\n}\n\n/**\n * Purge issues from memory\n * Offers different strategies for managing memory capacity\n */\nexport async function purgeIssues(\n strategy: 'smart' | 'resolved' | 'old' | 'all',\n options: {\n workDir?: string;\n daysOld?: number;\n } = {}\n): Promise<{ removed: number; remaining: number; strategy: string }> {\n const projectDir = options.workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n const indexPath = join(memoryDir, 'issues.json');\n \n // Use file locking to prevent concurrent write conflicts\n return withFileLock(indexPath, async () => {\n const index = await loadIssueIndex(projectDir);\n const originalCount = index.length;\n \n let remaining: StoredIssue[] = [];\n \n switch (strategy) {\n case 'smart':\n // Keep: critical/high severity, recent (< 30 days), unresolved\n const thirtyDaysAgo = new Date();\n thirtyDaysAgo.setDate(thirtyDaysAgo.getDate() - 30);\n \n remaining = index.filter(i => {\n const isRecent = new Date(i.timestamp) >= thirtyDaysAgo;\n const isImportant = ['critical', 'high'].includes(i.severity);\n const isUnresolved = !i.resolved;\n \n return isRecent || isImportant || isUnresolved;\n });\n break;\n \n case 'resolved':\n // Remove all resolved issues\n remaining = index.filter(i => !i.resolved);\n break;\n \n case 'old':\n // Remove issues older than specified days (default 90)\n const daysOld = options.daysOld || 90;\n const cutoffDate = new Date();\n cutoffDate.setDate(cutoffDate.getDate() - daysOld);\n \n remaining = index.filter(i => new Date(i.timestamp) >= cutoffDate);\n break;\n \n case 'all':\n // Clear all issues (keeps compacted summaries)\n remaining = [];\n break;\n }\n \n await saveIssueIndexInternal(remaining, projectDir);\n \n return {\n removed: originalCount - remaining.length,\n remaining: remaining.length,\n strategy,\n };\n }, { timeout: 10000 });\n}\n\n/**\n * Get daily log files (legacy - deprecated)\n * \n * Note: Daily logs are redundant with the ledger.\n * The ledger provides the same information with better guarantees.\n * This function is kept for backward compatibility.\n */\nexport async function getDailyLogs(workDir?: string): Promise<string[]> {\n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n \n try {\n if (!existsSync(memoryDir)) return [];\n const files = await readdir(memoryDir);\n return files\n .filter(f => /^\\d{4}-\\d{2}-\\d{2}\\.md$/.test(f))\n .sort()\n .reverse();\n } catch {\n return [];\n }\n}\n\n// Private helpers\n\n/**\n * Get ledger entry for an issue\n * \n * Issue memory stores working state (resolved, etc.)\n * Ledger stores immutable history (what was detected, when)\n * \n * This helper links them together for full context.\n */\nexport async function getIssueLedgerEntry(\n issue: StoredIssue,\n workDir?: string\n): Promise<{ block: any; entry: any } | null> {\n if (!issue.ledgerBlockHash) return null;\n \n const projectDir = workDir || getWorkingDirectory(undefined, true);\n const { getLedgerBlocks } = await import('./ledger.js');\n const blocks = await getLedgerBlocks(projectDir);\n \n // Find the block this issue is linked to\n const block = blocks.find(b => b.blockHash === issue.ledgerBlockHash);\n if (!block) return null;\n \n // Find the specific entry in that block\n const entry = block.entries.find((e: any) => e.hash === issue.hash);\n if (!entry) return null;\n \n return { block, entry };\n}\n\n// Private helpers\n\n/**\n * Load issue index with validation and auto-recovery\n * \n * If the file is corrupted:\n * 1. Attempts to recover from the most recent valid backup\n * 2. Returns empty array if no valid backup exists\n */\nasync function loadIssueIndex(projectDir: string): Promise<StoredIssue[]> {\n const indexPath = join(getTrieDirectory(projectDir), 'memory', 'issues.json');\n \n try {\n if (existsSync(indexPath)) {\n const content = await readFile(indexPath, 'utf-8');\n const result = safeParseAndValidate(content, IssueIndexSchema);\n \n if (result.success) {\n return result.data as StoredIssue[];\n }\n \n // Validation failed - attempt recovery from backup\n const errorMsg = 'success' in result && !result.success ? (result as any).error : 'Unknown validation error';\n console.error(` Issue index corrupted: ${errorMsg}`);\n const backupManager = new BackupManager(indexPath);\n \n if (await backupManager.recoverFromBackup()) {\n console.error(' ✅ Recovered from backup');\n const recovered = await readFile(indexPath, 'utf-8');\n const recoveredResult = safeParseAndValidate(recovered, IssueIndexSchema);\n if (recoveredResult.success) {\n return recoveredResult.data as StoredIssue[];\n }\n }\n \n console.error(' No valid backup found, starting fresh');\n }\n } catch {\n // Index doesn't exist or recovery failed\n }\n \n return [];\n}\n\nasync function updateIssueIndex(newIssues: StoredIssue[], projectDir: string): Promise<number> {\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const indexPath = join(memoryDir, 'issues.json');\n \n // Use file locking to prevent concurrent write conflicts\n return withFileLock(indexPath, async () => {\n let existing = await loadIssueIndex(projectDir);\n \n // Intelligent deduplication: only add truly unique issues\n // Issues are unique if they have different hash (content + file + severity + agent)\n const hashSet = new Set(existing.map(i => i.hash));\n const toAdd = newIssues.filter(i => !hashSet.has(i.hash));\n const dedupedCount = toAdd.length;\n \n existing = [...existing, ...toAdd];\n \n // Intelligent compaction: summarize old issues instead of deleting\n if (existing.length > 500) {\n const { summary, remaining } = await compactOldIssues(existing, {\n keepDays: 30,\n minIssuesToCompact: 100,\n });\n \n if (summary) {\n await saveCompactedSummary(summary, projectDir);\n existing = remaining;\n }\n }\n \n // Hard cap: prune to 10,000 if still too large\n // Prioritize: 1) Recent issues, 2) High severity, 3) Unresolved\n if (existing.length > 10000) {\n existing = intelligentPrune(existing, 10000);\n }\n \n // Use internal save since lock is already held\n await saveIssueIndexInternal(existing, projectDir);\n return dedupedCount;\n }, { timeout: 15000 });\n}\n\n/**\n * Intelligently prune issues to target count\n * Prioritizes: recent, high severity, unresolved\n */\nfunction intelligentPrune(issues: StoredIssue[], targetCount: number): StoredIssue[] {\n const severityWeight: Record<string, number> = {\n critical: 100,\n high: 50,\n moderate: 20,\n low: 10,\n info: 5,\n };\n \n const scored = issues.map(issue => {\n const ageInDays = (Date.now() - new Date(issue.timestamp).getTime()) / (1000 * 60 * 60 * 24);\n const recencyScore = Math.max(0, 100 - ageInDays * 2); // Newer = higher score\n const severityScore = severityWeight[issue.severity] || 10;\n const resolvedPenalty = issue.resolved ? -50 : 0;\n \n return {\n issue,\n score: recencyScore + severityScore + resolvedPenalty,\n };\n });\n \n return scored\n .sort((a, b) => b.score - a.score)\n .slice(0, targetCount)\n .map(s => s.issue);\n}\n\n/**\n * Save issue index with backup and atomic write\n * \n * This is the internal version that doesn't acquire locks.\n * All public functions that call this should acquire locks first.\n * \n * 1. Creates a backup of the existing file\n * 2. Writes the new data atomically (temp file + rename)\n * 3. Maintains up to 5 rotational backups\n */\nasync function saveIssueIndexInternal(issues: StoredIssue[], projectDir: string): Promise<void> {\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const indexPath = join(memoryDir, 'issues.json');\n \n // Create backup before writing\n const backupManager = new BackupManager(indexPath);\n await backupManager.createBackup();\n \n // Atomic write\n await atomicWriteJSON(indexPath, issues);\n}\n\n/**\n * Hash an issue using SHA256 for proper deduplication\n * \n * Uses cryptographic hashing to eliminate collision risk.\n * The hash is truncated to 16 characters for storage efficiency\n * while still providing effectively zero collision probability.\n */\nfunction hashIssue(issue: Issue): string {\n const content = `${issue.issue}|${issue.file}|${issue.severity}|${issue.agent}`;\n return createHash('sha256').update(content).digest('hex').slice(0, 16);\n}\n\n","/**\n * BM25 Search Implementation\n * \n * BM25 (Best Match 25) is a ranking function used by search engines.\n * It's more sophisticated than TF-IDF and handles term frequency saturation.\n * \n * This is the same algorithm used by Elasticsearch.\n */\n\nexport interface BM25Document {\n id: string;\n text: string;\n metadata?: Record<string, unknown>;\n}\n\nexport interface BM25Result {\n id: string;\n score: number;\n metadata?: Record<string, unknown>;\n}\n\nexport class BM25Index {\n private documents: Map<string, BM25Document> = new Map();\n private termFrequencies: Map<string, Map<string, number>> = new Map();\n private documentFrequencies: Map<string, number> = new Map();\n private documentLengths: Map<string, number> = new Map();\n private avgDocLength: number = 0;\n private k1: number = 1.5;\n private b: number = 0.75;\n\n /**\n * Add a document to the index\n */\n addDocument(doc: BM25Document): void {\n const tokens = this.tokenize(doc.text);\n this.documents.set(doc.id, doc);\n this.documentLengths.set(doc.id, tokens.length);\n\n const termFreq = new Map<string, number>();\n const seenTerms = new Set<string>();\n\n for (const token of tokens) {\n termFreq.set(token, (termFreq.get(token) || 0) + 1);\n \n if (!seenTerms.has(token)) {\n seenTerms.add(token);\n this.documentFrequencies.set(token, (this.documentFrequencies.get(token) || 0) + 1);\n }\n }\n\n this.termFrequencies.set(doc.id, termFreq);\n this.updateAvgDocLength();\n }\n\n /**\n * Add multiple documents\n */\n addDocuments(docs: BM25Document[]): void {\n for (const doc of docs) {\n this.addDocument(doc);\n }\n }\n\n /**\n * Search the index\n */\n search(query: string, limit: number = 10): BM25Result[] {\n const queryTokens = this.tokenize(query);\n const scores: Map<string, number> = new Map();\n const N = this.documents.size;\n\n for (const [docId] of this.documents) {\n let score = 0;\n const docLength = this.documentLengths.get(docId) || 0;\n const termFreqs = this.termFrequencies.get(docId);\n\n if (!termFreqs) continue;\n\n for (const term of queryTokens) {\n const tf = termFreqs.get(term) || 0;\n if (tf === 0) continue;\n\n const df = this.documentFrequencies.get(term) || 0;\n const idf = Math.log((N - df + 0.5) / (df + 0.5) + 1);\n\n const numerator = tf * (this.k1 + 1);\n const denominator = tf + this.k1 * (1 - this.b + this.b * (docLength / this.avgDocLength));\n \n score += idf * (numerator / denominator);\n }\n\n if (score > 0) {\n scores.set(docId, score);\n }\n }\n\n return Array.from(scores.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, limit)\n .map(([id, score]) => {\n const metadata = this.documents.get(id)?.metadata;\n const result: BM25Result = { id, score };\n if (metadata !== undefined) {\n result.metadata = metadata;\n }\n return result;\n });\n }\n\n /**\n * Get document count\n */\n get size(): number {\n return this.documents.size;\n }\n\n /**\n * Clear the index\n */\n clear(): void {\n this.documents.clear();\n this.termFrequencies.clear();\n this.documentFrequencies.clear();\n this.documentLengths.clear();\n this.avgDocLength = 0;\n }\n\n /**\n * Serialize the index to JSON\n */\n serialize(): string {\n return JSON.stringify({\n documents: Array.from(this.documents.entries()),\n termFrequencies: Array.from(this.termFrequencies.entries()).map(([k, v]) => [k, Array.from(v.entries())]),\n documentFrequencies: Array.from(this.documentFrequencies.entries()),\n documentLengths: Array.from(this.documentLengths.entries()),\n avgDocLength: this.avgDocLength,\n });\n }\n\n /**\n * Load from serialized JSON\n */\n static deserialize(json: string): BM25Index {\n const data = JSON.parse(json);\n const index = new BM25Index();\n \n index.documents = new Map(data.documents);\n index.termFrequencies = new Map(data.termFrequencies.map(([k, v]: [string, [string, number][]]) => [k, new Map(v)]));\n index.documentFrequencies = new Map(data.documentFrequencies);\n index.documentLengths = new Map(data.documentLengths);\n index.avgDocLength = data.avgDocLength;\n \n return index;\n }\n\n private tokenize(text: string): string[] {\n return text\n .toLowerCase()\n .replace(/[^\\w\\s]/g, ' ')\n .split(/\\s+/)\n .filter(token => token.length > 2 && !this.isStopWord(token));\n }\n\n private isStopWord(word: string): boolean {\n const stopWords = new Set([\n 'the', 'be', 'to', 'of', 'and', 'a', 'in', 'that', 'have', 'i',\n 'it', 'for', 'not', 'on', 'with', 'he', 'as', 'you', 'do', 'at',\n 'this', 'but', 'his', 'by', 'from', 'they', 'we', 'say', 'her', 'she',\n 'or', 'an', 'will', 'my', 'one', 'all', 'would', 'there', 'their', 'what',\n 'so', 'up', 'out', 'if', 'about', 'who', 'get', 'which', 'go', 'me',\n 'when', 'make', 'can', 'like', 'time', 'no', 'just', 'him', 'know', 'take',\n 'into', 'year', 'your', 'some', 'could', 'them', 'see', 'other', 'than', 'then',\n 'now', 'look', 'only', 'come', 'its', 'over', 'also', 'back', 'after', 'use',\n 'two', 'how', 'our', 'first', 'way', 'even', 'new', 'want', 'because', 'any',\n 'these', 'give', 'day', 'most', 'us', 'should', 'been', 'has', 'was', 'are',\n ]);\n return stopWords.has(word);\n }\n\n private updateAvgDocLength(): void {\n if (this.documentLengths.size === 0) {\n this.avgDocLength = 0;\n return;\n }\n const total = Array.from(this.documentLengths.values()).reduce((a, b) => a + b, 0);\n this.avgDocLength = total / this.documentLengths.size;\n }\n}\n","/**\n * Memory Compactor\n * \n * Intelligently compacts old issues into summaries instead of deleting them.\n * Preserves patterns and insights while reducing storage.\n * \n * Phase 1 Hardening:\n * - Atomic writes to prevent corruption\n * - Backup rotation for recovery\n * - Zod validation for data integrity\n */\n\nimport { mkdir, readFile } from 'fs/promises';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { getTrieDirectory } from '../utils/workspace.js';\nimport type { StoredIssue } from './issue-store.js';\nimport { atomicWriteJSON } from '../utils/atomic-write.js';\nimport { BackupManager } from '../utils/backup-manager.js';\nimport { CompactedSummariesIndexSchema, safeParseAndValidate } from './validation.js';\n\nexport interface CompactedSummary {\n period: string;\n startDate: string;\n endDate: string;\n totalIssues: number;\n resolvedCount: number;\n bySeverity: Record<string, number>;\n byAgent: Record<string, number>;\n topPatterns: PatternSummary[];\n hotFiles: { file: string; count: number }[];\n compactedAt: string;\n}\n\nexport interface PatternSummary {\n pattern: string;\n count: number;\n severity: string;\n agent: string;\n exampleFix: string;\n}\n\n/**\n * Compact old issues into summaries\n * Returns the compacted summary and the remaining (recent) issues\n */\nexport async function compactOldIssues(\n issues: StoredIssue[],\n options: {\n keepDays?: number;\n minIssuesToCompact?: number;\n } = {}\n): Promise<{ summary: CompactedSummary | null; remaining: StoredIssue[] }> {\n const keepDays = options.keepDays ?? 30;\n const minIssues = options.minIssuesToCompact ?? 100;\n \n const cutoffDate = new Date();\n cutoffDate.setDate(cutoffDate.getDate() - keepDays);\n \n const oldIssues = issues.filter(i => new Date(i.timestamp) < cutoffDate);\n const recentIssues = issues.filter(i => new Date(i.timestamp) >= cutoffDate);\n \n // Only compact if we have enough old issues\n if (oldIssues.length < minIssues) {\n return { summary: null, remaining: issues };\n }\n \n // Build summary\n const summary = buildSummary(oldIssues);\n \n return { summary, remaining: recentIssues };\n}\n\n/**\n * Build a summary from a set of issues\n */\nfunction buildSummary(issues: StoredIssue[]): CompactedSummary {\n const sorted = issues.sort((a, b) => \n new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()\n );\n \n const bySeverity: Record<string, number> = {};\n const byAgent: Record<string, number> = {};\n const patternMap: Map<string, { count: number; issue: StoredIssue }> = new Map();\n const fileCount: Map<string, number> = new Map();\n \n for (const issue of issues) {\n // Count by severity\n bySeverity[issue.severity] = (bySeverity[issue.severity] || 0) + 1;\n \n // Count by agent\n byAgent[issue.agent] = (byAgent[issue.agent] || 0) + 1;\n \n // Track patterns (normalized issue text)\n const patternKey = normalizePattern(issue.issue);\n const existing = patternMap.get(patternKey);\n if (existing) {\n existing.count++;\n } else {\n patternMap.set(patternKey, { count: 1, issue });\n }\n \n // Count files\n const fileName = issue.file.split('/').pop() || issue.file;\n fileCount.set(fileName, (fileCount.get(fileName) || 0) + 1);\n }\n \n // Get top patterns\n const topPatterns = Array.from(patternMap.entries())\n .sort((a, b) => b[1].count - a[1].count)\n .slice(0, 10)\n .map(([pattern, data]) => ({\n pattern: pattern.slice(0, 100),\n count: data.count,\n severity: data.issue.severity,\n agent: data.issue.agent,\n exampleFix: data.issue.fix.slice(0, 200),\n }));\n \n // Get hot files\n const hotFiles = Array.from(fileCount.entries())\n .sort((a, b) => b[1] - a[1])\n .slice(0, 10)\n .map(([file, count]) => ({ file, count }));\n \n return {\n period: `${sorted[0]?.timestamp.split('T')[0]} to ${sorted[sorted.length - 1]?.timestamp.split('T')[0]}`,\n startDate: sorted[0]?.timestamp || '',\n endDate: sorted[sorted.length - 1]?.timestamp || '',\n totalIssues: issues.length,\n resolvedCount: issues.filter(i => i.resolved).length,\n bySeverity,\n byAgent,\n topPatterns,\n hotFiles,\n compactedAt: new Date().toISOString(),\n };\n}\n\n/**\n * Normalize issue text for pattern matching\n */\nfunction normalizePattern(text: string): string {\n return text\n .toLowerCase()\n .replace(/`[^`]+`/g, 'CODE')\n .replace(/\\b\\d+\\b/g, 'N')\n .replace(/[\"']/g, '')\n .replace(/\\s+/g, ' ')\n .trim()\n .slice(0, 150);\n}\n\n/**\n * Save compacted summary to disk with atomic write and backup\n */\nexport async function saveCompactedSummary(\n summary: CompactedSummary,\n projectDir: string\n): Promise<void> {\n const memoryDir = join(getTrieDirectory(projectDir), 'memory');\n await mkdir(memoryDir, { recursive: true });\n \n const summaryPath = join(memoryDir, 'compacted-summaries.json');\n \n let summaries: CompactedSummary[] = [];\n try {\n if (existsSync(summaryPath)) {\n const content = await readFile(summaryPath, 'utf-8');\n const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);\n if (result.success) {\n summaries = result.data as CompactedSummary[];\n }\n }\n } catch {\n summaries = [];\n }\n \n summaries.push(summary);\n \n // Keep only last 12 summaries (1 year of monthly summaries)\n if (summaries.length > 12) {\n summaries = summaries.slice(-12);\n }\n \n // Create backup before writing\n const backupManager = new BackupManager(summaryPath);\n await backupManager.createBackup();\n \n // Atomic write\n await atomicWriteJSON(summaryPath, summaries);\n}\n\n/**\n * Load compacted summaries with validation and auto-recovery\n */\nexport async function loadCompactedSummaries(projectDir: string): Promise<CompactedSummary[]> {\n const summaryPath = join(getTrieDirectory(projectDir), 'memory', 'compacted-summaries.json');\n \n try {\n if (existsSync(summaryPath)) {\n const content = await readFile(summaryPath, 'utf-8');\n const result = safeParseAndValidate(content, CompactedSummariesIndexSchema);\n \n if (result.success) {\n return result.data as CompactedSummary[];\n }\n \n // Validation failed - attempt recovery\n const backupManager = new BackupManager(summaryPath);\n if (await backupManager.recoverFromBackup()) {\n const recovered = await readFile(summaryPath, 'utf-8');\n const recoveredResult = safeParseAndValidate(recovered, CompactedSummariesIndexSchema);\n if (recoveredResult.success) {\n return recoveredResult.data as CompactedSummary[];\n }\n }\n }\n } catch {\n // File doesn't exist or recovery failed\n }\n \n return [];\n}\n\n/**\n * Generate a markdown summary of compacted history\n */\nexport function formatCompactedSummary(summary: CompactedSummary): string {\n const lines: string[] = [\n `## Compacted Summary: ${summary.period}`,\n '',\n `**Total Issues:** ${summary.totalIssues} (${summary.resolvedCount} resolved)`,\n '',\n '### By Severity',\n ...Object.entries(summary.bySeverity).map(([s, c]) => `- ${s}: ${c}`),\n '',\n '### Top Patterns',\n ...summary.topPatterns.slice(0, 5).map(p => \n `- **${p.pattern.slice(0, 50)}...** (${p.count}x, ${p.severity})`\n ),\n '',\n '### Hot Files',\n ...summary.hotFiles.slice(0, 5).map(f => `- ${f.file}: ${f.count} issues`),\n ];\n \n return lines.join('\\n');\n}\n\n/**\n * Get insights from compacted history\n */\nexport async function getHistoricalInsights(projectDir: string): Promise<{\n totalHistoricalIssues: number;\n recurringPatterns: PatternSummary[];\n improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown';\n}> {\n const summaries = await loadCompactedSummaries(projectDir);\n \n if (summaries.length === 0) {\n return {\n totalHistoricalIssues: 0,\n recurringPatterns: [],\n improvementTrend: 'unknown',\n };\n }\n \n const totalHistoricalIssues = summaries.reduce((sum, s) => sum + s.totalIssues, 0);\n \n // Find patterns that appear across multiple summaries\n const patternCounts: Map<string, PatternSummary & { appearances: number }> = new Map();\n \n for (const summary of summaries) {\n for (const pattern of summary.topPatterns) {\n const key = pattern.pattern;\n const existing = patternCounts.get(key);\n if (existing) {\n existing.count += pattern.count;\n existing.appearances++;\n } else {\n patternCounts.set(key, { ...pattern, appearances: 1 });\n }\n }\n }\n \n const recurringPatterns = Array.from(patternCounts.values())\n .filter(p => p.appearances >= 2)\n .sort((a, b) => b.count - a.count)\n .slice(0, 5);\n \n // Calculate improvement trend\n let improvementTrend: 'improving' | 'stable' | 'declining' | 'unknown' = 'unknown';\n \n if (summaries.length >= 2) {\n const recent = summaries.slice(-2);\n const olderCount = recent[0]?.totalIssues || 0;\n const newerCount = recent[1]?.totalIssues || 0;\n \n if (newerCount < olderCount * 0.8) {\n improvementTrend = 'improving';\n } else if (newerCount > olderCount * 1.2) {\n improvementTrend = 'declining';\n } else {\n improvementTrend = 'stable';\n }\n }\n \n return {\n totalHistoricalIssues,\n recurringPatterns,\n improvementTrend,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAsBA,SAAS,SAAAA,QAAO,YAAAC,WAAU,eAAe;AACzC,SAAS,kBAAkB;AAC3B,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,QAAAC,aAAY;;;ACJd,IAAM,YAAN,MAAM,WAAU;AAAA,EACb,YAAuC,oBAAI,IAAI;AAAA,EAC/C,kBAAoD,oBAAI,IAAI;AAAA,EAC5D,sBAA2C,oBAAI,IAAI;AAAA,EACnD,kBAAuC,oBAAI,IAAI;AAAA,EAC/C,eAAuB;AAAA,EACvB,KAAa;AAAA,EACb,IAAY;AAAA;AAAA;AAAA;AAAA,EAKpB,YAAY,KAAyB;AACnC,UAAM,SAAS,KAAK,SAAS,IAAI,IAAI;AACrC,SAAK,UAAU,IAAI,IAAI,IAAI,GAAG;AAC9B,SAAK,gBAAgB,IAAI,IAAI,IAAI,OAAO,MAAM;AAE9C,UAAM,WAAW,oBAAI,IAAoB;AACzC,UAAM,YAAY,oBAAI,IAAY;AAElC,eAAW,SAAS,QAAQ;AAC1B,eAAS,IAAI,QAAQ,SAAS,IAAI,KAAK,KAAK,KAAK,CAAC;AAElD,UAAI,CAAC,UAAU,IAAI,KAAK,GAAG;AACzB,kBAAU,IAAI,KAAK;AACnB,aAAK,oBAAoB,IAAI,QAAQ,KAAK,oBAAoB,IAAI,KAAK,KAAK,KAAK,CAAC;AAAA,MACpF;AAAA,IACF;AAEA,SAAK,gBAAgB,IAAI,IAAI,IAAI,QAAQ;AACzC,SAAK,mBAAmB;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,MAA4B;AACvC,eAAW,OAAO,MAAM;AACtB,WAAK,YAAY,GAAG;AAAA,IACtB;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,OAAe,QAAgB,IAAkB;AACtD,UAAM,cAAc,KAAK,SAAS,KAAK;AACvC,UAAM,SAA8B,oBAAI,IAAI;AAC5C,UAAM,IAAI,KAAK,UAAU;AAEzB,eAAW,CAAC,KAAK,KAAK,KAAK,WAAW;AACpC,UAAI,QAAQ;AACZ,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK,KAAK;AACrD,YAAM,YAAY,KAAK,gBAAgB,IAAI,KAAK;AAEhD,UAAI,CAAC,UAAW;AAEhB,iBAAW,QAAQ,aAAa;AAC9B,cAAM,KAAK,UAAU,IAAI,IAAI,KAAK;AAClC,YAAI,OAAO,EAAG;AAEd,cAAM,KAAK,KAAK,oBAAoB,IAAI,IAAI,KAAK;AACjD,cAAM,MAAM,KAAK,KAAK,IAAI,KAAK,QAAQ,KAAK,OAAO,CAAC;AAEpD,cAAM,YAAY,MAAM,KAAK,KAAK;AAClC,cAAM,cAAc,KAAK,KAAK,MAAM,IAAI,KAAK,IAAI,KAAK,KAAK,YAAY,KAAK;AAE5E,iBAAS,OAAO,YAAY;AAAA,MAC9B;AAEA,UAAI,QAAQ,GAAG;AACb,eAAO,IAAI,OAAO,KAAK;AAAA,MACzB;AAAA,IACF;AAEA,WAAO,MAAM,KAAK,OAAO,QAAQ,CAAC,EAC/B,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,KAAK,EACd,IAAI,CAAC,CAAC,IAAI,KAAK,MAAM;AACpB,YAAM,WAAW,KAAK,UAAU,IAAI,EAAE,GAAG;AACzC,YAAM,SAAqB,EAAE,IAAI,MAAM;AACvC,UAAI,aAAa,QAAW;AAC1B,eAAO,WAAW;AAAA,MACpB;AACA,aAAO;AAAA,IACT,CAAC;AAAA,EACL;AAAA;AAAA;AAAA;AAAA,EAKA,IAAI,OAAe;AACjB,WAAO,KAAK,UAAU;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA,EAKA,QAAc;AACZ,SAAK,UAAU,MAAM;AACrB,SAAK,gBAAgB,MAAM;AAC3B,SAAK,oBAAoB,MAAM;AAC/B,SAAK,gBAAgB,MAAM;AAC3B,SAAK,eAAe;AAAA,EACtB;AAAA;AAAA;AAAA;AAAA,EAKA,YAAoB;AAClB,WAAO,KAAK,UAAU;AAAA,MACpB,WAAW,MAAM,KAAK,KAAK,UAAU,QAAQ,CAAC;AAAA,MAC9C,iBAAiB,MAAM,KAAK,KAAK,gBAAgB,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,MAAM,KAAK,EAAE,QAAQ,CAAC,CAAC,CAAC;AAAA,MACxG,qBAAqB,MAAM,KAAK,KAAK,oBAAoB,QAAQ,CAAC;AAAA,MAClE,iBAAiB,MAAM,KAAK,KAAK,gBAAgB,QAAQ,CAAC;AAAA,MAC1D,cAAc,KAAK;AAAA,IACrB,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA,EAKA,OAAO,YAAY,MAAyB;AAC1C,UAAM,OAAO,KAAK,MAAM,IAAI;AAC5B,UAAM,QAAQ,IAAI,WAAU;AAE5B,UAAM,YAAY,IAAI,IAAI,KAAK,SAAS;AACxC,UAAM,kBAAkB,IAAI,IAAI,KAAK,gBAAgB,IAAI,CAAC,CAAC,GAAG,CAAC,MAAoC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;AACnH,UAAM,sBAAsB,IAAI,IAAI,KAAK,mBAAmB;AAC5D,UAAM,kBAAkB,IAAI,IAAI,KAAK,eAAe;AACpD,UAAM,eAAe,KAAK;AAE1B,WAAO;AAAA,EACT;AAAA,EAEQ,SAAS,MAAwB;AACvC,WAAO,KACJ,YAAY,EACZ,QAAQ,YAAY,GAAG,EACvB,MAAM,KAAK,EACX,OAAO,WAAS,MAAM,SAAS,KAAK,CAAC,KAAK,WAAW,KAAK,CAAC;AAAA,EAChE;AAAA,EAEQ,WAAW,MAAuB;AACxC,UAAM,YAAY,oBAAI,IAAI;AAAA,MACxB;AAAA,MAAO;AAAA,MAAM;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAK;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAC3D;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAC3D;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAO;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAChE;AAAA,MAAM;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAS;AAAA,MAAS;AAAA,MACnE;AAAA,MAAM;AAAA,MAAM;AAAA,MAAO;AAAA,MAAM;AAAA,MAAS;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAM;AAAA,MAC/D;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MACpE;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAS;AAAA,MAAQ;AAAA,MACzE;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAQ;AAAA,MAAS;AAAA,MACvE;AAAA,MAAO;AAAA,MAAO;AAAA,MAAO;AAAA,MAAS;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAW;AAAA,MACvE;AAAA,MAAS;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAQ;AAAA,MAAM;AAAA,MAAU;AAAA,MAAQ;AAAA,MAAO;AAAA,MAAO;AAAA,IACxE,CAAC;AACD,WAAO,UAAU,IAAI,IAAI;AAAA,EAC3B;AAAA,EAEQ,qBAA2B;AACjC,QAAI,KAAK,gBAAgB,SAAS,GAAG;AACnC,WAAK,eAAe;AACpB;AAAA,IACF;AACA,UAAM,QAAQ,MAAM,KAAK,KAAK,gBAAgB,OAAO,CAAC,EAAE,OAAO,CAAC,GAAG,MAAM,IAAI,GAAG,CAAC;AACjF,SAAK,eAAe,QAAQ,KAAK,gBAAgB;AAAA,EACnD;AACF;;;AChLA,SAAS,OAAO,gBAAgB;AAChC,SAAS,kBAAkB;AAC3B,SAAS,YAAY;AAgCrB,eAAsB,iBACpB,QACA,UAGI,CAAC,GACoE;AACzE,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,YAAY,QAAQ,sBAAsB;AAEhD,QAAM,aAAa,oBAAI,KAAK;AAC5B,aAAW,QAAQ,WAAW,QAAQ,IAAI,QAAQ;AAElD,QAAM,YAAY,OAAO,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,IAAI,UAAU;AACvE,QAAM,eAAe,OAAO,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,KAAK,UAAU;AAG3E,MAAI,UAAU,SAAS,WAAW;AAChC,WAAO,EAAE,SAAS,MAAM,WAAW,OAAO;AAAA,EAC5C;AAGA,QAAM,UAAU,aAAa,SAAS;AAEtC,SAAO,EAAE,SAAS,WAAW,aAAa;AAC5C;AAKA,SAAS,aAAa,QAAyC;AAC7D,QAAM,SAAS,OAAO;AAAA,IAAK,CAAC,GAAG,MAC7B,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAAA,EAClE;AAEA,QAAM,aAAqC,CAAC;AAC5C,QAAM,UAAkC,CAAC;AACzC,QAAM,aAAiE,oBAAI,IAAI;AAC/E,QAAM,YAAiC,oBAAI,IAAI;AAE/C,aAAW,SAAS,QAAQ;AAE1B,eAAW,MAAM,QAAQ,KAAK,WAAW,MAAM,QAAQ,KAAK,KAAK;AAGjE,YAAQ,MAAM,KAAK,KAAK,QAAQ,MAAM,KAAK,KAAK,KAAK;AAGrD,UAAM,aAAa,iBAAiB,MAAM,KAAK;AAC/C,UAAM,WAAW,WAAW,IAAI,UAAU;AAC1C,QAAI,UAAU;AACZ,eAAS;AAAA,IACX,OAAO;AACL,iBAAW,IAAI,YAAY,EAAE,OAAO,GAAG,MAAM,CAAC;AAAA,IAChD;AAGA,UAAM,WAAW,MAAM,KAAK,MAAM,GAAG,EAAE,IAAI,KAAK,MAAM;AACtD,cAAU,IAAI,WAAW,UAAU,IAAI,QAAQ,KAAK,KAAK,CAAC;AAAA,EAC5D;AAGA,QAAM,cAAc,MAAM,KAAK,WAAW,QAAQ,CAAC,EAChD,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,EAAE,QAAQ,EAAE,CAAC,EAAE,KAAK,EACtC,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,SAAS,IAAI,OAAO;AAAA,IACzB,SAAS,QAAQ,MAAM,GAAG,GAAG;AAAA,IAC7B,OAAO,KAAK;AAAA,IACZ,UAAU,KAAK,MAAM;AAAA,IACrB,OAAO,KAAK,MAAM;AAAA,IAClB,YAAY,KAAK,MAAM,IAAI,MAAM,GAAG,GAAG;AAAA,EACzC,EAAE;AAGJ,QAAM,WAAW,MAAM,KAAK,UAAU,QAAQ,CAAC,EAC5C,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,EAAE,EACX,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO,EAAE,MAAM,MAAM,EAAE;AAE3C,SAAO;AAAA,IACL,QAAQ,GAAG,OAAO,CAAC,GAAG,UAAU,MAAM,GAAG,EAAE,CAAC,CAAC,OAAO,OAAO,OAAO,SAAS,CAAC,GAAG,UAAU,MAAM,GAAG,EAAE,CAAC,CAAC;AAAA,IACtG,WAAW,OAAO,CAAC,GAAG,aAAa;AAAA,IACnC,SAAS,OAAO,OAAO,SAAS,CAAC,GAAG,aAAa;AAAA,IACjD,aAAa,OAAO;AAAA,IACpB,eAAe,OAAO,OAAO,OAAK,EAAE,QAAQ,EAAE;AAAA,IAC9C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,cAAa,oBAAI,KAAK,GAAE,YAAY;AAAA,EACtC;AACF;AAKA,SAAS,iBAAiB,MAAsB;AAC9C,SAAO,KACJ,YAAY,EACZ,QAAQ,YAAY,MAAM,EAC1B,QAAQ,YAAY,GAAG,EACvB,QAAQ,SAAS,EAAE,EACnB,QAAQ,QAAQ,GAAG,EACnB,KAAK,EACL,MAAM,GAAG,GAAG;AACjB;AAKA,eAAsB,qBACpB,SACA,YACe;AACf,QAAM,YAAY,KAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,MAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,cAAc,KAAK,WAAW,0BAA0B;AAE9D,MAAI,YAAgC,CAAC;AACrC,MAAI;AACF,QAAI,WAAW,WAAW,GAAG;AAC3B,YAAM,UAAU,MAAM,SAAS,aAAa,OAAO;AACnD,YAAM,SAAS,qBAAqB,SAAS,6BAA6B;AAC1E,UAAI,OAAO,SAAS;AAClB,oBAAY,OAAO;AAAA,MACrB;AAAA,IACF;AAAA,EACF,QAAQ;AACN,gBAAY,CAAC;AAAA,EACf;AAEA,YAAU,KAAK,OAAO;AAGtB,MAAI,UAAU,SAAS,IAAI;AACzB,gBAAY,UAAU,MAAM,GAAG;AAAA,EACjC;AAGA,QAAM,gBAAgB,IAAI,cAAc,WAAW;AACnD,QAAM,cAAc,aAAa;AAGjC,QAAM,gBAAgB,aAAa,SAAS;AAC9C;AAKA,eAAsB,uBAAuB,YAAiD;AAC5F,QAAM,cAAc,KAAK,iBAAiB,UAAU,GAAG,UAAU,0BAA0B;AAE3F,MAAI;AACF,QAAI,WAAW,WAAW,GAAG;AAC3B,YAAM,UAAU,MAAM,SAAS,aAAa,OAAO;AACnD,YAAM,SAAS,qBAAqB,SAAS,6BAA6B;AAE1E,UAAI,OAAO,SAAS;AAClB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,gBAAgB,IAAI,cAAc,WAAW;AACnD,UAAI,MAAM,cAAc,kBAAkB,GAAG;AAC3C,cAAM,YAAY,MAAM,SAAS,aAAa,OAAO;AACrD,cAAM,kBAAkB,qBAAqB,WAAW,6BAA6B;AACrF,YAAI,gBAAgB,SAAS;AAC3B,iBAAO,gBAAgB;AAAA,QACzB;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,CAAC;AACV;AA6BA,eAAsB,sBAAsB,YAIzC;AACD,QAAM,YAAY,MAAM,uBAAuB,UAAU;AAEzD,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,MACL,uBAAuB;AAAA,MACvB,mBAAmB,CAAC;AAAA,MACpB,kBAAkB;AAAA,IACpB;AAAA,EACF;AAEA,QAAM,wBAAwB,UAAU,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,aAAa,CAAC;AAGjF,QAAM,gBAAuE,oBAAI,IAAI;AAErF,aAAW,WAAW,WAAW;AAC/B,eAAW,WAAW,QAAQ,aAAa;AACzC,YAAM,MAAM,QAAQ;AACpB,YAAM,WAAW,cAAc,IAAI,GAAG;AACtC,UAAI,UAAU;AACZ,iBAAS,SAAS,QAAQ;AAC1B,iBAAS;AAAA,MACX,OAAO;AACL,sBAAc,IAAI,KAAK,EAAE,GAAG,SAAS,aAAa,EAAE,CAAC;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,MAAM,KAAK,cAAc,OAAO,CAAC,EACxD,OAAO,OAAK,EAAE,eAAe,CAAC,EAC9B,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,CAAC;AAGb,MAAI,mBAAqE;AAEzE,MAAI,UAAU,UAAU,GAAG;AACzB,UAAM,SAAS,UAAU,MAAM,EAAE;AACjC,UAAM,aAAa,OAAO,CAAC,GAAG,eAAe;AAC7C,UAAM,aAAa,OAAO,CAAC,GAAG,eAAe;AAE7C,QAAI,aAAa,aAAa,KAAK;AACjC,yBAAmB;AAAA,IACrB,WAAW,aAAa,aAAa,KAAK;AACxC,yBAAmB;AAAA,IACrB,OAAO;AACL,yBAAmB;AAAA,IACrB;AAAA,EACF;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AF7NA,eAAsB,YACpB,QACA,SACA,SACiD;AACjD,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYC,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,SAAwB,CAAC;AAC/B,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,aAAa,oBAAI,IAAY;AACnC,MAAI,aAAa;AAEjB,aAAW,SAAS,QAAQ;AAC1B,UAAM,OAAO,UAAU,KAAK;AAG5B,QAAI,WAAW,IAAI,IAAI,GAAG;AACxB;AACA;AAAA,IACF;AACA,eAAW,IAAI,IAAI;AAEnB,UAAM,cAA2B;AAAA,MAC/B,IAAI,MAAM;AAAA,MACV;AAAA,MACA,UAAU,MAAM;AAAA,MAChB,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,MAAM,MAAM;AAAA,MACZ,MAAM,MAAM;AAAA,MACZ,OAAO,MAAM;AAAA,MACb,UAAU,MAAM;AAAA,MAChB,WAAW;AAAA,MACX;AAAA,MACA,UAAU;AAAA,MACV,YAAY;AAAA,MACZ,GAAI,MAAM,mBAAmB,EAAE,iBAAiB,MAAM,gBAAgB;AAAA,IACxE;AACA,WAAO,KAAK,WAAW;AAAA,EACzB;AAGA,QAAM,cAAc,MAAM,qBAAqB,QAAQ,UAAU;AAGjE,MAAI,aAAa;AACf,eAAW,SAAS,QAAQ;AAC1B,YAAM,kBAAkB,YAAY;AAAA,IACtC;AAAA,EACF;AAGA,QAAM,eAAe,MAAM,iBAAiB,QAAQ,UAAU;AAE9D,SAAO,EAAE,QAAQ,cAAc,YAAY,cAAc,OAAO,SAAS,cAAc;AACzF;AAKA,eAAsB,aACpB,OACA,UAOI,CAAC,GACyB;AAC9B,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,YAAY,MAAM,eAAe,UAAU;AAEjD,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,iBAAiB,UAAU,OAAO,WAAS;AAC/C,QAAI,QAAQ,WAAW,MAAM,YAAY,QAAQ,QAAS,QAAO;AACjE,QAAI,QAAQ,YAAY,CAAC,QAAQ,SAAS,SAAS,MAAM,QAAQ,EAAG,QAAO;AAC3E,QAAI,QAAQ,SAAS,MAAM,UAAU,QAAQ,MAAO,QAAO;AAC3D,QAAI,CAAC,QAAQ,mBAAmB,MAAM,SAAU,QAAO;AACvD,WAAO;AAAA,EACT,CAAC;AAED,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,OAAO,IAAI,UAAU;AAC3B,QAAM,WAAW,oBAAI,IAAyB;AAE9C,aAAW,SAAS,gBAAgB;AAClC,UAAM,aAAa,GAAG,MAAM,KAAK,IAAI,MAAM,GAAG,IAAI,MAAM,IAAI,IAAI,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,IAAI,MAAM,QAAQ;AACrH,SAAK,YAAY;AAAA,MACf,IAAI,MAAM;AAAA,MACV,MAAM;AAAA,IACR,CAAC;AACD,aAAS,IAAI,MAAM,IAAI,KAAK;AAAA,EAC9B;AAGA,QAAM,cAAc,KAAK,OAAO,OAAO,KAAK;AAE5C,SAAO,YAAY,IAAI,aAAW;AAAA,IAChC,OAAO,SAAS,IAAI,OAAO,EAAE;AAAA,IAC7B,OAAO,OAAO;AAAA,IACd,WAAW;AAAA,EACb,EAAE;AACJ;AAKA,eAAsB,kBACpB,OACA,UAII,CAAC,GACyB;AAE9B,QAAM,QAAQ,GAAG,MAAM,KAAK,IAAI,MAAM,GAAG,IAAI,MAAM,KAAK;AACxD,QAAM,gBAAoD;AAAA,IACxD,QAAQ,QAAQ,SAAS,KAAK;AAAA;AAAA,IAC9B,iBAAiB;AAAA,EACnB;AACA,MAAI,QAAQ,YAAY,QAAW;AACjC,kBAAc,UAAU,QAAQ;AAAA,EAClC;AACA,QAAM,UAAU,MAAM,aAAa,OAAO,aAAa;AAEvD,MAAI,WAAW,QAAQ,OAAO,OAAK,EAAE,MAAM,OAAO,MAAM,EAAE;AAE1D,MAAI,QAAQ,iBAAiB;AAC3B,eAAW,SAAS,OAAO,OAAK,EAAE,MAAM,SAAS,MAAM,IAAI;AAAA,EAC7D;AAEA,SAAO,SAAS,MAAM,GAAG,QAAQ,SAAS,CAAC;AAC7C;AAKA,eAAsB,kBACpB,SACA,SACkB;AAClB,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYD,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,YAAYA,MAAK,WAAW,aAAa;AAG/C,SAAO,aAAa,WAAW,YAAY;AACzC,UAAM,QAAQ,MAAM,eAAe,UAAU;AAE7C,UAAM,QAAQ,MAAM,KAAK,OAAK,EAAE,OAAO,OAAO;AAC9C,QAAI,CAAC,MAAO,QAAO;AAEnB,UAAM,WAAW;AACjB,UAAM,cAAa,oBAAI,KAAK,GAAE,YAAY;AAE1C,UAAM,uBAAuB,OAAO,UAAU;AAC9C,WAAO;AAAA,EACT,GAAG,EAAE,SAAS,IAAM,CAAC;AACvB;AAcA,eAAsB,kBACpB,gBACA,cACA,SACoD;AACpD,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYA,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,YAAYA,MAAK,WAAW,aAAa;AAG/C,SAAO,aAAa,WAAW,YAAY;AACzC,UAAM,QAAQ,MAAM,eAAe,UAAU;AAG7C,UAAM,iBAAiB,IAAI,IAAI,aAAa,IAAI,OAAK,EAAE,QAAQ,OAAO,GAAG,CAAC,CAAC;AAE3E,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI,gBAAgB;AACpB,QAAI,mBAAmB;AAEvB,eAAW,SAAS,OAAO;AAEzB,UAAI,MAAM,SAAU;AAGpB,YAAM,iBAAiB,MAAM,KAAK,QAAQ,OAAO,GAAG;AAIpD,UAAI,CAAC,eAAe,IAAI,cAAc,GAAG;AACvC;AACA;AAAA,MACF;AAGA,UAAI,CAAC,eAAe,IAAI,MAAM,IAAI,GAAG;AACnC,cAAM,WAAW;AACjB,cAAM,aAAa;AACnB;AAAA,MACF,OAAO;AACL;AAAA,MACF;AAAA,IACF;AAGA,QAAI,gBAAgB,GAAG;AACrB,YAAM,uBAAuB,OAAO,UAAU;AAAA,IAChD;AAEA,WAAO,EAAE,UAAU,eAAe,aAAa,iBAAiB;AAAA,EAClE,GAAG,EAAE,SAAS,KAAM,CAAC;AACvB;AAMA,eAAsB,qBACpB,MACA,iBACA,SACiB;AACjB,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYA,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,YAAYA,MAAK,WAAW,aAAa;AAG/C,MAAI,gBAAgB,MAAM,aAAa,WAAW,YAAY;AAC5D,UAAM,QAAQ,MAAM,eAAe,UAAU;AAE7C,UAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAI,QAAQ;AAEZ,eAAW,SAAS,OAAO;AACzB,UAAI,MAAM,SAAU;AACpB,UAAI,MAAM,UAAU,iBAAkB;AAGtC,YAAM,iBAAiB,MAAM,KAAK,QAAQ,OAAO,GAAG;AACpD,YAAM,mBAAmB,KAAK,QAAQ,OAAO,GAAG;AAEhD,UAAI,mBAAmB,oBACnB,MAAM,MAAM,SAAS,SAAS,eAAe,GAAG,GAAG;AACrD,cAAM,WAAW;AACjB,cAAM,aAAa;AACnB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,QAAQ,GAAG;AACb,YAAM,uBAAuB,OAAO,UAAU;AAAA,IAChD;AAEA,WAAO;AAAA,EACT,GAAG,EAAE,SAAS,IAAM,CAAC;AAIrB,MAAI;AACF,UAAM,EAAE,WAAW,IAAI,MAAM,OAAO,8BAA8B;AAClE,UAAM,UAAU,WAAW,UAAU;AACrC,UAAM,QAAQ,WAAW;AACzB,UAAM,gBAAgB,MAAM,QAAQ,8BAA8B,MAAM,iBAAiB,OAAO;AAChG,QAAI,gBAAgB,GAAG;AACrB,cAAQ,MAAM,8BAA8B,aAAa,oCAAoC;AAC7F,uBAAiB;AAAA,IACnB;AAAA,EACF,SAAS,GAAG;AACV,YAAQ,MAAM,qDAAqD,CAAC;AAAA,EACtE;AAEA,SAAO;AACT;AAKO,SAAS,aAAa,OAAsB;AACjD,SAAO,UAAU,KAAK;AACxB;AAKA,eAAsB,eAAe,SAA6C;AAChF,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,QAAM,aAAa,MAAM,sBAAsB,UAAU;AAEzD,QAAM,aAAa;AACnB,QAAM,eAAe,IAAI,IAAI,MAAM,IAAI,OAAK,EAAE,IAAI,CAAC;AAEnD,QAAM,QAA0B;AAAA,IAC9B,aAAa,MAAM;AAAA,IACnB,cAAc;AAAA,IACd,eAAe,CAAC;AAAA,IAChB,kBAAkB,CAAC;AAAA,IACnB,wBAAwB,CAAC;AAAA,IACzB,aAAa;AAAA,IACb,aAAa;AAAA,IACb,eAAe;AAAA,IACf,kBAAkB,WAAW;AAAA,IAC7B,kBAAkB,WAAW;AAAA,IAC7B,cAAc;AAAA,MACZ,SAAS,MAAM;AAAA,MACf,KAAK;AAAA,MACL,aAAa,KAAK,MAAO,MAAM,SAAS,aAAc,GAAG;AAAA,MACzD,SAAS,MAAM,UAAU;AAAA,IAC3B;AAAA,IACA,oBAAoB;AAAA,MAClB,mBAAmB,MAAM,SAAS,aAAa;AAAA,MAC/C,gBAAgB,aAAa;AAAA,IAC/B;AAAA,EACF;AAEA,aAAW,SAAS,OAAO;AACzB,UAAM,cAAc,MAAM,KAAK,KAAK,MAAM,cAAc,MAAM,KAAK,KAAK,KAAK;AAC7E,UAAM,iBAAiB,MAAM,QAAQ,KAAK,MAAM,iBAAiB,MAAM,QAAQ,KAAK,KAAK;AAEzF,QAAI,MAAM,UAAU;AAClB,YAAM;AAAA,IACR,OAAO;AACL,YAAM;AACN,YAAM,uBAAuB,MAAM,QAAQ,KAAK,MAAM,uBAAuB,MAAM,QAAQ,KAAK,KAAK;AAAA,IACvG;AAAA,EACF;AAEA,MAAI,MAAM,SAAS,GAAG;AACpB,UAAM,SAAS,CAAC,GAAG,KAAK,EAAE;AAAA,MAAK,CAAC,GAAG,MACjC,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ;AAAA,IAClE;AACA,UAAM,SAAS,OAAO,CAAC,GAAG;AAC1B,UAAM,SAAS,OAAO,OAAO,SAAS,CAAC,GAAG;AAC1C,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc;AAAA,IACtB;AACA,QAAI,WAAW,QAAW;AACxB,YAAM,cAAc;AAAA,IACtB;AAAA,EACF;AAEA,SAAO;AACT;AAMA,eAAsB,gBACpB,UAKI,CAAC,GACmB;AACxB,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,kBAAkB,QAAQ,mBAAmB;AAEnD,QAAM,SAAS,oBAAI,KAAK;AACxB,SAAO,QAAQ,OAAO,QAAQ,IAAI,QAAQ;AAE1C,SAAO,MACJ,OAAO,OAAK;AACX,QAAI,IAAI,KAAK,EAAE,SAAS,IAAI,OAAQ,QAAO;AAC3C,QAAI,CAAC,mBAAmB,EAAE,SAAU,QAAO;AAC3C,WAAO;AAAA,EACT,CAAC,EACA,KAAK,CAAC,GAAG,MAAM,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAChF,MAAM,GAAG,KAAK;AACnB;AAMA,eAAsB,YACpB,UACA,UAGI,CAAC,GAC8D;AACnE,QAAM,aAAa,QAAQ,WAAW,oBAAoB,QAAW,IAAI;AACzE,QAAM,YAAYA,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAM,YAAYA,MAAK,WAAW,aAAa;AAG/C,SAAO,aAAa,WAAW,YAAY;AACzC,UAAM,QAAQ,MAAM,eAAe,UAAU;AAC7C,UAAM,gBAAgB,MAAM;AAE5B,QAAI,YAA2B,CAAC;AAEhC,YAAQ,UAAU;AAAA,MAChB,KAAK;AAEH,cAAM,gBAAgB,oBAAI,KAAK;AAC/B,sBAAc,QAAQ,cAAc,QAAQ,IAAI,EAAE;AAElD,oBAAY,MAAM,OAAO,OAAK;AAC5B,gBAAM,WAAW,IAAI,KAAK,EAAE,SAAS,KAAK;AAC1C,gBAAM,cAAc,CAAC,YAAY,MAAM,EAAE,SAAS,EAAE,QAAQ;AAC5D,gBAAM,eAAe,CAAC,EAAE;AAExB,iBAAO,YAAY,eAAe;AAAA,QACpC,CAAC;AACD;AAAA,MAEF,KAAK;AAEH,oBAAY,MAAM,OAAO,OAAK,CAAC,EAAE,QAAQ;AACzC;AAAA,MAEF,KAAK;AAEH,cAAM,UAAU,QAAQ,WAAW;AACnC,cAAM,aAAa,oBAAI,KAAK;AAC5B,mBAAW,QAAQ,WAAW,QAAQ,IAAI,OAAO;AAEjD,oBAAY,MAAM,OAAO,OAAK,IAAI,KAAK,EAAE,SAAS,KAAK,UAAU;AACjE;AAAA,MAEF,KAAK;AAEH,oBAAY,CAAC;AACb;AAAA,IACJ;AAEA,UAAM,uBAAuB,WAAW,UAAU;AAElD,WAAO;AAAA,MACL,SAAS,gBAAgB,UAAU;AAAA,MACnC,WAAW,UAAU;AAAA,MACrB;AAAA,IACF;AAAA,EACF,GAAG,EAAE,SAAS,IAAM,CAAC;AACvB;AASA,eAAsB,aAAa,SAAqC;AACtE,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,YAAYA,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAE7D,MAAI;AACF,QAAI,CAACE,YAAW,SAAS,EAAG,QAAO,CAAC;AACpC,UAAM,QAAQ,MAAM,QAAQ,SAAS;AACrC,WAAO,MACJ,OAAO,OAAK,0BAA0B,KAAK,CAAC,CAAC,EAC7C,KAAK,EACL,QAAQ;AAAA,EACb,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;AAYA,eAAsB,oBACpB,OACA,SAC4C;AAC5C,MAAI,CAAC,MAAM,gBAAiB,QAAO;AAEnC,QAAM,aAAa,WAAW,oBAAoB,QAAW,IAAI;AACjE,QAAM,EAAE,gBAAgB,IAAI,MAAM,OAAO,sBAAa;AACtD,QAAM,SAAS,MAAM,gBAAgB,UAAU;AAG/C,QAAM,QAAQ,OAAO,KAAK,OAAK,EAAE,cAAc,MAAM,eAAe;AACpE,MAAI,CAAC,MAAO,QAAO;AAGnB,QAAM,QAAQ,MAAM,QAAQ,KAAK,CAAC,MAAW,EAAE,SAAS,MAAM,IAAI;AAClE,MAAI,CAAC,MAAO,QAAO;AAEnB,SAAO,EAAE,OAAO,MAAM;AACxB;AAWA,eAAe,eAAe,YAA4C;AACxE,QAAM,YAAYF,MAAK,iBAAiB,UAAU,GAAG,UAAU,aAAa;AAE5E,MAAI;AACF,QAAIE,YAAW,SAAS,GAAG;AACzB,YAAM,UAAU,MAAMC,UAAS,WAAW,OAAO;AACjD,YAAM,SAAS,qBAAqB,SAAS,gBAAgB;AAE7D,UAAI,OAAO,SAAS;AAClB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,WAAW,aAAa,UAAU,CAAC,OAAO,UAAW,OAAe,QAAQ;AAClF,cAAQ,MAAM,6BAA6B,QAAQ,EAAE;AACrD,YAAM,gBAAgB,IAAI,cAAc,SAAS;AAEjD,UAAI,MAAM,cAAc,kBAAkB,GAAG;AAC3C,gBAAQ,MAAM,iCAA4B;AAC1C,cAAM,YAAY,MAAMA,UAAS,WAAW,OAAO;AACnD,cAAM,kBAAkB,qBAAqB,WAAW,gBAAgB;AACxE,YAAI,gBAAgB,SAAS;AAC3B,iBAAO,gBAAgB;AAAA,QACzB;AAAA,MACF;AAEA,cAAQ,MAAM,0CAA0C;AAAA,IAC1D;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO,CAAC;AACV;AAEA,eAAe,iBAAiB,WAA0B,YAAqC;AAC7F,QAAM,YAAYH,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,YAAYD,MAAK,WAAW,aAAa;AAG/C,SAAO,aAAa,WAAW,YAAY;AACzC,QAAI,WAAW,MAAM,eAAe,UAAU;AAI9C,UAAM,UAAU,IAAI,IAAI,SAAS,IAAI,OAAK,EAAE,IAAI,CAAC;AACjD,UAAM,QAAQ,UAAU,OAAO,OAAK,CAAC,QAAQ,IAAI,EAAE,IAAI,CAAC;AACxD,UAAM,eAAe,MAAM;AAE3B,eAAW,CAAC,GAAG,UAAU,GAAG,KAAK;AAGjC,QAAI,SAAS,SAAS,KAAK;AACzB,YAAM,EAAE,SAAS,UAAU,IAAI,MAAM,iBAAiB,UAAU;AAAA,QAC9D,UAAU;AAAA,QACV,oBAAoB;AAAA,MACtB,CAAC;AAED,UAAI,SAAS;AACX,cAAM,qBAAqB,SAAS,UAAU;AAC9C,mBAAW;AAAA,MACb;AAAA,IACF;AAIA,QAAI,SAAS,SAAS,KAAO;AAC3B,iBAAW,iBAAiB,UAAU,GAAK;AAAA,IAC7C;AAGA,UAAM,uBAAuB,UAAU,UAAU;AACjD,WAAO;AAAA,EACT,GAAG,EAAE,SAAS,KAAM,CAAC;AACvB;AAMA,SAAS,iBAAiB,QAAuB,aAAoC;AACnF,QAAM,iBAAyC;AAAA,IAC7C,UAAU;AAAA,IACV,MAAM;AAAA,IACN,UAAU;AAAA,IACV,KAAK;AAAA,IACL,MAAM;AAAA,EACR;AAEA,QAAM,SAAS,OAAO,IAAI,WAAS;AACjC,UAAM,aAAa,KAAK,IAAI,IAAI,IAAI,KAAK,MAAM,SAAS,EAAE,QAAQ,MAAM,MAAO,KAAK,KAAK;AACzF,UAAM,eAAe,KAAK,IAAI,GAAG,MAAM,YAAY,CAAC;AACpD,UAAM,gBAAgB,eAAe,MAAM,QAAQ,KAAK;AACxD,UAAM,kBAAkB,MAAM,WAAW,MAAM;AAE/C,WAAO;AAAA,MACL;AAAA,MACA,OAAO,eAAe,gBAAgB;AAAA,IACxC;AAAA,EACF,CAAC;AAED,SAAO,OACJ,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK,EAChC,MAAM,GAAG,WAAW,EACpB,IAAI,OAAK,EAAE,KAAK;AACrB;AAYA,eAAe,uBAAuB,QAAuB,YAAmC;AAC9F,QAAM,YAAYA,MAAK,iBAAiB,UAAU,GAAG,QAAQ;AAC7D,QAAMC,OAAM,WAAW,EAAE,WAAW,KAAK,CAAC;AAE1C,QAAM,YAAYD,MAAK,WAAW,aAAa;AAG/C,QAAM,gBAAgB,IAAI,cAAc,SAAS;AACjD,QAAM,cAAc,aAAa;AAGjC,QAAM,gBAAgB,WAAW,MAAM;AACzC;AASA,SAAS,UAAU,OAAsB;AACvC,QAAM,UAAU,GAAG,MAAM,KAAK,IAAI,MAAM,IAAI,IAAI,MAAM,QAAQ,IAAI,MAAM,KAAK;AAC7E,SAAO,WAAW,QAAQ,EAAE,OAAO,OAAO,EAAE,OAAO,KAAK,EAAE,MAAM,GAAG,EAAE;AACvE;","names":["mkdir","readFile","existsSync","join","join","mkdir","existsSync","readFile"]}
@@ -1,491 +0,0 @@
1
- import {
2
- getTrieDirectory,
3
- getWorkingDirectory
4
- } from "./chunk-VVITXIHN.js";
5
- import {
6
- isInteractiveMode
7
- } from "./chunk-KDHN2ZQE.js";
8
-
9
- // src/config/loader.ts
10
- import { readFile, writeFile, mkdir } from "fs/promises";
11
- import { existsSync as existsSync2 } from "fs";
12
- import { join as join2, dirname } from "path";
13
-
14
- // src/config/validation.ts
15
- import { z } from "zod";
16
- import { existsSync, readFileSync, mkdirSync } from "fs";
17
- import { resolve, join } from "path";
18
- var API_KEY_PATTERNS = {
19
- anthropic: /^sk-ant-api\d{2}-[\w-]{95}$/,
20
- openai: /^sk-[\w]{48}$/,
21
- github: /^ghp_[\w]{36}$/,
22
- vercel: /^[\w]{24}$/,
23
- linear: /^lin_api_[\w]{40,60}$/
24
- };
25
- var ApiKeysSchema = z.object({
26
- anthropic: z.string().regex(API_KEY_PATTERNS.anthropic, "Invalid Anthropic API key format").optional(),
27
- openai: z.string().regex(API_KEY_PATTERNS.openai, "Invalid OpenAI API key format").optional(),
28
- // Lenient: accept GitHub classic (ghp_*), fine-grained (github_pat_*), or any string 10+ chars
29
- github: z.string().min(10).optional(),
30
- vercel: z.string().regex(API_KEY_PATTERNS.vercel, "Invalid Vercel token format").optional(),
31
- linear: z.string().optional(),
32
- cursor: z.string().optional()
33
- });
34
- var AgentConfigSchema = z.object({
35
- enabled: z.array(z.string()).optional().default([]),
36
- disabled: z.array(z.string()).optional().default([]),
37
- parallel: z.boolean().optional().default(true),
38
- maxConcurrency: z.number().int().min(1).max(20).optional().default(4),
39
- timeout: z.number().int().min(1e3).max(3e5).optional().default(12e4),
40
- // 2 minutes
41
- cache: z.boolean().optional().default(true)
42
- });
43
- var ComplianceSchema = z.object({
44
- standards: z.array(z.enum(["SOC2", "GDPR", "HIPAA", "CCPA", "PCI-DSS", "ISO27001", "FDA21CFR11", "SOX", "ICFR", "GLBA", "FFIEC", "FINRA"])).optional().default(["SOC2"]),
45
- enforceCompliance: z.boolean().optional().default(false),
46
- reportFormat: z.enum(["json", "sarif", "csv", "html"]).optional().default("json"),
47
- // Enhanced validation to prevent false negatives
48
- continuousValidation: z.boolean().optional().default(true),
49
- mandatoryAuditTrail: z.boolean().optional().default(true),
50
- falseNegativeDetection: z.object({
51
- enabled: z.boolean().optional().default(true),
52
- crossValidation: z.boolean().optional().default(true),
53
- humanReviewRequired: z.boolean().optional().default(false),
54
- samplingRate: z.number().min(0).max(1).optional().default(0.1)
55
- // 10% sampling
56
- }).optional().default({
57
- enabled: true,
58
- crossValidation: true,
59
- humanReviewRequired: false,
60
- samplingRate: 0.1
61
- })
62
- });
63
- var OutputSchema = z.object({
64
- format: z.enum(["console", "json", "sarif", "junit"]).optional().default("console"),
65
- level: z.enum(["critical", "serious", "moderate", "low", "all"]).optional().default("all"),
66
- interactive: z.boolean().optional().default(false),
67
- streaming: z.boolean().optional().default(true),
68
- colors: z.boolean().optional().default(true)
69
- });
70
- var PathsSchema = z.object({
71
- include: z.array(z.string()).optional().default([]),
72
- exclude: z.array(z.string()).optional().default(["node_modules", "dist", "build", ".git"]),
73
- configDir: z.string().optional().default(".trie"),
74
- outputDir: z.string().optional().default("trie-reports")
75
- });
76
- var IntegrationsSchema = z.object({
77
- github: z.object({
78
- enabled: z.boolean().optional().default(false),
79
- token: z.string().optional(),
80
- webhook: z.string().url().optional()
81
- }).optional(),
82
- slack: z.object({
83
- enabled: z.boolean().optional().default(false),
84
- webhook: z.string().url().optional(),
85
- channel: z.string().optional()
86
- }).optional(),
87
- jira: z.object({
88
- enabled: z.boolean().optional().default(false),
89
- url: z.string().url().optional(),
90
- token: z.string().optional(),
91
- project: z.string().optional()
92
- }).optional()
93
- });
94
- var UserSchema = z.object({
95
- name: z.string().min(1).optional(),
96
- email: z.string().email().optional(),
97
- role: z.enum([
98
- "developer",
99
- "designer",
100
- "qa",
101
- "devops",
102
- "security",
103
- "architect",
104
- "manager",
105
- "contributor"
106
- ]).optional().default("developer"),
107
- github: z.string().optional(),
108
- // GitHub username
109
- url: z.string().url().optional()
110
- // Personal/portfolio URL
111
- });
112
- var TrieConfigSchema = z.object({
113
- version: z.string().optional().default("1.0.0"),
114
- apiKeys: ApiKeysSchema.optional(),
115
- agents: AgentConfigSchema.optional(),
116
- compliance: ComplianceSchema.optional(),
117
- output: OutputSchema.optional(),
118
- paths: PathsSchema.optional(),
119
- integrations: IntegrationsSchema.optional(),
120
- user: UserSchema.optional()
121
- // User identity for attribution
122
- });
123
- var ConfigValidator = class {
124
- /**
125
- * Validate configuration object
126
- */
127
- validateConfig(config) {
128
- try {
129
- const validated = TrieConfigSchema.parse(config);
130
- const businessErrors = this.validateBusinessLogic(validated);
131
- if (businessErrors.length > 0) {
132
- return { success: false, errors: businessErrors };
133
- }
134
- return { success: true, data: validated };
135
- } catch (error) {
136
- if (error instanceof z.ZodError) {
137
- const errors = error.errors.map(
138
- (err) => `${err.path.join(".")}: ${err.message}`
139
- );
140
- return { success: false, errors };
141
- }
142
- return {
143
- success: false,
144
- errors: [`Configuration validation failed: ${error instanceof Error ? error.message : "Unknown error"}`]
145
- };
146
- }
147
- }
148
- /**
149
- * Validate environment variables for API keys
150
- */
151
- validateEnvironment() {
152
- const warnings = [];
153
- const errors = [];
154
- const exposedPatterns = [
155
- "NEXT_PUBLIC_ANTHROPIC",
156
- "REACT_APP_ANTHROPIC",
157
- "VITE_ANTHROPIC",
158
- "PUBLIC_ANTHROPIC"
159
- ];
160
- for (const pattern of exposedPatterns) {
161
- const envVars = Object.keys(process.env).filter((key) => key.includes(pattern));
162
- for (const envVar of envVars) {
163
- errors.push(`[!] Security risk: API key in client-side environment variable: ${envVar}`);
164
- }
165
- }
166
- let anthropicKey = process.env.ANTHROPIC_API_KEY;
167
- if (!anthropicKey) {
168
- try {
169
- const configPath = join(getTrieDirectory(getWorkingDirectory(void 0, true)), "config.json");
170
- if (existsSync(configPath)) {
171
- const config = JSON.parse(readFileSync(configPath, "utf-8"));
172
- anthropicKey = config.apiKeys?.anthropic;
173
- }
174
- } catch {
175
- }
176
- }
177
- if (anthropicKey && !API_KEY_PATTERNS.anthropic.test(anthropicKey)) {
178
- errors.push("ANTHROPIC_API_KEY does not match expected format");
179
- }
180
- if (!anthropicKey) {
181
- warnings.push("ANTHROPIC_API_KEY not set - AI features will be disabled. Set in environment, .trie/config.json, or .env file");
182
- }
183
- if (!process.env.GITHUB_TOKEN && process.env.CI) {
184
- warnings.push("GITHUB_TOKEN not set - GitHub integration disabled");
185
- }
186
- return {
187
- valid: errors.length === 0,
188
- warnings,
189
- errors
190
- };
191
- }
192
- /**
193
- * Validate file paths in configuration
194
- */
195
- validatePaths(paths) {
196
- const errors = [];
197
- if (paths?.include) {
198
- for (const path of paths.include) {
199
- const resolvedPath = resolve(path);
200
- if (!existsSync(resolvedPath)) {
201
- errors.push(`Include path does not exist: ${path}`);
202
- }
203
- }
204
- }
205
- if (paths?.configDir) {
206
- const configPath = resolve(paths.configDir);
207
- if (!existsSync(configPath)) {
208
- try {
209
- mkdirSync(configPath, { recursive: true });
210
- } catch {
211
- errors.push(`Cannot create config directory: ${paths.configDir}`);
212
- }
213
- }
214
- }
215
- return {
216
- valid: errors.length === 0,
217
- errors
218
- };
219
- }
220
- /**
221
- * Validate integration configurations
222
- */
223
- validateIntegrations(integrations) {
224
- const errors = [];
225
- if (integrations?.github?.enabled) {
226
- if (!integrations.github.token) {
227
- errors.push("GitHub integration enabled but no token provided");
228
- }
229
- }
230
- if (integrations?.slack?.enabled) {
231
- if (!integrations.slack.webhook) {
232
- errors.push("Slack integration enabled but no webhook URL provided");
233
- }
234
- }
235
- if (integrations?.jira?.enabled) {
236
- if (!integrations.jira.url || !integrations.jira.token || !integrations.jira.project) {
237
- errors.push("JIRA integration enabled but missing required fields (url, token, project)");
238
- }
239
- }
240
- return {
241
- valid: errors.length === 0,
242
- errors
243
- };
244
- }
245
- /**
246
- * Business logic validation
247
- */
248
- validateBusinessLogic(config) {
249
- const errors = [];
250
- if (config.agents?.enabled && config.agents?.disabled) {
251
- const overlap = config.agents.enabled.filter(
252
- (agent) => config.agents?.disabled?.includes(agent)
253
- );
254
- if (overlap.length > 0) {
255
- errors.push(`Agents cannot be both enabled and disabled: ${overlap.join(", ")}`);
256
- }
257
- }
258
- if (config.agents?.maxConcurrency && config.agents.maxConcurrency > 10) {
259
- errors.push("maxConcurrency should not exceed 10 for optimal performance");
260
- }
261
- if (config.compliance?.standards) {
262
- const validStandards = ["SOC2", "GDPR", "HIPAA", "CCPA", "PCI-DSS", "ISO27001", "FDA21CFR11", "SOX", "ICFR", "GLBA", "FFIEC", "FINRA"];
263
- const invalidStandards = config.compliance.standards.filter(
264
- (standard) => !validStandards.includes(standard)
265
- );
266
- if (invalidStandards.length > 0) {
267
- errors.push(`Invalid compliance standards: ${invalidStandards.join(", ")}. Valid standards: ${validStandards.join(", ")}`);
268
- }
269
- }
270
- if (config.paths) {
271
- const pathValidation = this.validatePaths(config.paths);
272
- errors.push(...pathValidation.errors);
273
- }
274
- if (config.integrations) {
275
- const integrationValidation = this.validateIntegrations(config.integrations);
276
- errors.push(...integrationValidation.errors);
277
- }
278
- return errors;
279
- }
280
- /**
281
- * Generate configuration template
282
- */
283
- generateTemplate() {
284
- return {
285
- version: "1.0.0",
286
- agents: {
287
- enabled: ["security", "bugs", "types"],
288
- disabled: [],
289
- parallel: true,
290
- maxConcurrency: 4,
291
- timeout: 12e4,
292
- cache: true
293
- },
294
- compliance: {
295
- standards: ["SOC2"],
296
- enforceCompliance: false,
297
- reportFormat: "json"
298
- },
299
- output: {
300
- format: "console",
301
- level: "all",
302
- interactive: false,
303
- streaming: true,
304
- colors: true
305
- },
306
- paths: {
307
- include: [],
308
- exclude: ["node_modules", "dist", "build", ".git"],
309
- configDir: ".trie",
310
- outputDir: "trie-reports"
311
- }
312
- };
313
- }
314
- /**
315
- * Validate and provide suggestions for improvement
316
- */
317
- analyze(config) {
318
- const suggestions = [];
319
- const securityIssues = [];
320
- const optimizations = [];
321
- let score = 100;
322
- let hasApiKey = Boolean(config.apiKeys?.anthropic || process.env.ANTHROPIC_API_KEY);
323
- if (!hasApiKey) {
324
- try {
325
- const workDir = getWorkingDirectory(void 0, true);
326
- const envFiles = [".env", ".env.local", ".env.production"];
327
- for (const envFile of envFiles) {
328
- const envPath = join(workDir, envFile);
329
- if (existsSync(envPath)) {
330
- const envContent = readFileSync(envPath, "utf-8");
331
- if (envContent.includes("ANTHROPIC_API_KEY=")) {
332
- hasApiKey = true;
333
- break;
334
- }
335
- }
336
- }
337
- } catch {
338
- }
339
- }
340
- if (!hasApiKey) {
341
- suggestions.push("Add ANTHROPIC_API_KEY to enable AI-powered analysis for better issue detection. Set in environment, .trie/config.json, or .env file");
342
- score -= 10;
343
- }
344
- if (config.agents?.parallel === false) {
345
- optimizations.push("Enable parallel agent execution for 3-5x faster scans");
346
- score -= 15;
347
- }
348
- if (config.agents?.cache === false) {
349
- optimizations.push("Enable result caching to speed up repeated scans");
350
- score -= 10;
351
- }
352
- if (!config.compliance?.standards || config.compliance.standards.length === 0) {
353
- suggestions.push("Configure compliance standards (SOC2, GDPR, HIPAA, ISO27001, PCI-DSS, SOX, ICFR, etc.) for regulatory requirements");
354
- score -= 5;
355
- }
356
- if (!config.compliance?.falseNegativeDetection?.enabled) {
357
- suggestions.push("Enable false negative detection to prevent compliance gaps");
358
- score -= 3;
359
- }
360
- if (!config.compliance?.continuousValidation) {
361
- suggestions.push("Enable continuous validation for ongoing compliance monitoring");
362
- score -= 2;
363
- }
364
- const hasIntegrations = config.integrations && (config.integrations.github?.enabled || config.integrations.slack?.enabled || config.integrations.jira?.enabled);
365
- if (!hasIntegrations) {
366
- suggestions.push("Consider enabling GitHub/Slack/JIRA integrations for better team collaboration");
367
- score -= 5;
368
- }
369
- if (config.apiKeys) {
370
- securityIssues.push("API keys in config file - consider using environment variables instead");
371
- score -= 20;
372
- }
373
- return {
374
- score: Math.max(0, score),
375
- suggestions,
376
- securityIssues,
377
- optimizations
378
- };
379
- }
380
- };
381
- var DEFAULT_CONFIG = {
382
- version: "1.0.0",
383
- agents: {
384
- enabled: [],
385
- disabled: [],
386
- parallel: true,
387
- maxConcurrency: 4,
388
- timeout: 12e4,
389
- cache: true
390
- },
391
- compliance: {
392
- standards: ["SOC2"],
393
- enforceCompliance: false,
394
- reportFormat: "json"
395
- },
396
- output: {
397
- format: "console",
398
- level: "all",
399
- interactive: false,
400
- streaming: true,
401
- colors: true
402
- },
403
- paths: {
404
- include: [],
405
- exclude: ["node_modules", "dist", "build", ".git", ".next", ".nuxt", "coverage"],
406
- configDir: ".trie",
407
- outputDir: "trie-reports"
408
- }
409
- };
410
-
411
- // src/config/loader.ts
412
- async function loadConfig() {
413
- const validator = new ConfigValidator();
414
- const configPath = join2(getTrieDirectory(getWorkingDirectory(void 0, true)), "config.json");
415
- try {
416
- if (!existsSync2(configPath)) {
417
- return DEFAULT_CONFIG;
418
- }
419
- const configFile = await readFile(configPath, "utf-8");
420
- const userConfig = JSON.parse(configFile);
421
- const merged = mergeConfig(DEFAULT_CONFIG, userConfig);
422
- const result = validator.validateConfig(merged);
423
- if (!result.success) {
424
- if (!isInteractiveMode()) {
425
- console.error("Configuration validation failed:");
426
- for (const error of result.errors) {
427
- console.error(` - ${error}`);
428
- }
429
- }
430
- return DEFAULT_CONFIG;
431
- }
432
- if (!isInteractiveMode()) {
433
- const envValidation = validator.validateEnvironment();
434
- for (const warning of envValidation.warnings) {
435
- console.warn(warning);
436
- }
437
- for (const error of envValidation.errors) {
438
- console.error(error);
439
- }
440
- }
441
- return result.data;
442
- } catch (error) {
443
- if (!isInteractiveMode()) {
444
- console.error("Failed to load config, using defaults:", error);
445
- }
446
- return DEFAULT_CONFIG;
447
- }
448
- }
449
- async function saveConfig(config) {
450
- const configPath = join2(getTrieDirectory(getWorkingDirectory(void 0, true)), "config.json");
451
- const dir = dirname(configPath);
452
- if (!existsSync2(dir)) {
453
- await mkdir(dir, { recursive: true });
454
- }
455
- let existing = {};
456
- if (existsSync2(configPath)) {
457
- try {
458
- const content = await readFile(configPath, "utf-8");
459
- existing = JSON.parse(content);
460
- } catch {
461
- }
462
- }
463
- const toWrite = {
464
- ...existing,
465
- ...config,
466
- // Preserve autonomy block (used by Cursor key, etc.) - TrieConfig doesn't include it
467
- autonomy: existing.autonomy ?? void 0
468
- };
469
- await writeFile(configPath, JSON.stringify(toWrite, null, 2), "utf-8");
470
- }
471
- function mergeConfig(defaults, user) {
472
- if (typeof user !== "object" || user === null || Array.isArray(user)) {
473
- return { ...defaults };
474
- }
475
- const result = { ...defaults };
476
- for (const [key, value] of Object.entries(user)) {
477
- const defaultValue = defaults[key];
478
- if (typeof value === "object" && value !== null && !Array.isArray(value) && typeof defaultValue === "object" && defaultValue !== null) {
479
- result[key] = mergeConfig(defaultValue, value);
480
- } else {
481
- result[key] = value;
482
- }
483
- }
484
- return result;
485
- }
486
-
487
- export {
488
- loadConfig,
489
- saveConfig
490
- };
491
- //# sourceMappingURL=chunk-XPZZFPBZ.js.map