@agntk/agent-harness 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. package/README.md +104 -43
  2. package/dist/{analytics-RPT73WNM.js → analytics-L24W3B7U.js} +1 -2
  3. package/dist/auto-processor-QIRUOGEI.js +12 -0
  4. package/dist/{chunk-UPLBF4RZ.js → chunk-2UVWCTAY.js} +2 -2
  5. package/dist/{chunk-CSL3ERUI.js → chunk-4P6TRFPZ.js} +3 -3
  6. package/dist/chunk-4P6TRFPZ.js.map +1 -0
  7. package/dist/{chunk-4CWAGBNS.js → chunk-4TQQZILG.js} +73 -3
  8. package/dist/chunk-4TQQZILG.js.map +1 -0
  9. package/dist/{chunk-DA7IKHC4.js → chunk-5CO5JTYT.js} +2 -2
  10. package/dist/chunk-5CO5JTYT.js.map +1 -0
  11. package/dist/{chunk-A7BJPQQ6.js → chunk-5O5OGOOQ.js} +2 -2
  12. package/dist/{chunk-UWQTZMNI.js → chunk-7GZ4D6V6.js} +2 -2
  13. package/dist/{chunk-FLZU44SV.js → chunk-AN6Y4MDD.js} +6 -6
  14. package/dist/{chunk-4FDUOGSZ.js → chunk-D7AWV24Z.js} +3 -3
  15. package/dist/{chunk-CHJ5GNZC.js → chunk-EC42HQQH.js} +2 -2
  16. package/dist/{chunk-YIJY5DBV.js → chunk-KLYMGWQJ.js} +4 -4
  17. package/dist/chunk-KLYMGWQJ.js.map +1 -0
  18. package/dist/{chunk-YUFNYN2H.js → chunk-M62KLIEK.js} +4 -4
  19. package/dist/chunk-M62KLIEK.js.map +1 -0
  20. package/dist/{chunk-GJNNR2RA.js → chunk-M6PDMK2O.js} +3 -3
  21. package/dist/{chunk-M7NXUK55.js → chunk-NVC2WY4K.js} +2 -2
  22. package/dist/{chunk-XTBKL5BI.js → chunk-PMFAYKBD.js} +2 -2
  23. package/dist/chunk-PMFAYKBD.js.map +1 -0
  24. package/dist/{chunk-274RV3YO.js → chunk-QMOIVORH.js} +3 -3
  25. package/dist/chunk-QMOIVORH.js.map +1 -0
  26. package/dist/{chunk-RY3ZFII7.js → chunk-SEHAQTBO.js} +6 -6
  27. package/dist/{chunk-MPZ3BPUI.js → chunk-UMXPOYZR.js} +4 -4
  28. package/dist/{chunk-W4T7PGI2.js → chunk-UXCHAS3Z.js} +4 -4
  29. package/dist/cli/index.js +153 -167
  30. package/dist/cli/index.js.map +1 -1
  31. package/dist/config-PYSS3QY6.js +12 -0
  32. package/dist/context-loader-RSXXFW5R.js +12 -0
  33. package/dist/{conversation-QDEIDQPH.js → conversation-TBTFIJVU.js} +6 -7
  34. package/dist/{cost-tracker-RS3W7SVY.js → cost-tracker-NZRZEHVA.js} +1 -2
  35. package/dist/{delegate-VJCJLYEK.js → delegate-3KJAL4NZ.js} +7 -8
  36. package/dist/{emotional-state-VQVRA6ED.js → emotional-state-IN4ZUL2Q.js} +1 -2
  37. package/dist/{emotional-state-VQVRA6ED.js.map → emotional-state-IN4ZUL2Q.js.map} +1 -1
  38. package/dist/{env-discovery-2BLVMAIM.js → env-discovery-PXBRE5FX.js} +1 -2
  39. package/dist/{env-discovery-2BLVMAIM.js.map → env-discovery-PXBRE5FX.js.map} +1 -1
  40. package/dist/{export-6GCYHEHQ.js → export-GYLWROMB.js} +3 -4
  41. package/dist/{export-6GCYHEHQ.js.map → export-GYLWROMB.js.map} +1 -1
  42. package/dist/graph-LEEO37L3.js +13 -0
  43. package/dist/{harness-WE4SLCML.js → harness-R5FKRICG.js} +8 -9
  44. package/dist/{health-NZ6WNIMV.js → health-HL2JYHIY.js} +1 -2
  45. package/dist/indexer-L5UC6J2V.js +15 -0
  46. package/dist/{instinct-learner-SRM72DHF.js → instinct-learner-QGAMIS3X.js} +5 -6
  47. package/dist/{intake-4M3HNU43.js → intake-SVJKFHTL.js} +5 -6
  48. package/dist/{intelligence-HJOCA4SJ.js → intelligence-XPV3MC5U.js} +10 -13
  49. package/dist/intelligence-XPV3MC5U.js.map +1 -0
  50. package/dist/{journal-WANJL3MI.js → journal-ITUMKT6U.js} +5 -6
  51. package/dist/{loader-C3TKIKZR.js → loader-27PLDCOJ.js} +3 -4
  52. package/dist/{mcp-WTQJJZAO.js → mcp-JSIUJJZV.js} +1 -2
  53. package/dist/{mcp-discovery-WPAQFL6S.js → mcp-discovery-DG3RQYLM.js} +1 -2
  54. package/dist/{mcp-discovery-WPAQFL6S.js.map → mcp-discovery-DG3RQYLM.js.map} +1 -1
  55. package/dist/{mcp-installer-6O2XXD3V.js → mcp-installer-X2TJ2S2G.js} +3 -4
  56. package/dist/{mcp-installer-6O2XXD3V.js.map → mcp-installer-X2TJ2S2G.js.map} +1 -1
  57. package/dist/{metrics-KXGNFAAB.js → metrics-2MNINXNQ.js} +1 -2
  58. package/dist/{primitive-registry-I6VTIR4W.js → primitive-registry-ZMGGXSO5.js} +3 -4
  59. package/dist/{primitive-registry-I6VTIR4W.js.map → primitive-registry-ZMGGXSO5.js.map} +1 -1
  60. package/dist/{project-discovery-C4UMD7JI.js → project-discovery-FQLAZKEM.js} +1 -2
  61. package/dist/project-discovery-FQLAZKEM.js.map +1 -0
  62. package/dist/{provider-SXPQZ74H.js → provider-HQY6SPZI.js} +1 -2
  63. package/dist/{rate-limiter-RLRVM325.js → rate-limiter-PH5DCVU4.js} +1 -2
  64. package/dist/{rule-engine-YGQ3RYZM.js → rule-engine-DM26S77N.js} +3 -4
  65. package/dist/{rule-engine-YGQ3RYZM.js.map → rule-engine-DM26S77N.js.map} +1 -1
  66. package/dist/{scaffold-A3VRRCBV.js → scaffold-2F36YVW6.js} +5 -6
  67. package/dist/{scaffold-A3VRRCBV.js.map → scaffold-2F36YVW6.js.map} +1 -1
  68. package/dist/{scheduler-XHHIVHRI.js → scheduler-Q7GB2KCW.js} +11 -12
  69. package/dist/{scheduler-XHHIVHRI.js.map → scheduler-Q7GB2KCW.js.map} +1 -1
  70. package/dist/{search-V3W5JMJG.js → search-6Y6NCOLQ.js} +3 -4
  71. package/dist/search-6Y6NCOLQ.js.map +1 -0
  72. package/dist/{semantic-search-2DTOO5UX.js → semantic-search-FN6FZIXI.js} +3 -4
  73. package/dist/semantic-search-FN6FZIXI.js.map +1 -0
  74. package/dist/{serve-DTQ3HENY.js → serve-MXRTP2HE.js} +10 -11
  75. package/dist/serve-MXRTP2HE.js.map +1 -0
  76. package/dist/{sessions-CZGVXKQE.js → sessions-G6SZZXWS.js} +1 -2
  77. package/dist/{sources-RW5DT56F.js → sources-7LDYO5GK.js} +1 -2
  78. package/dist/{starter-packs-76YUVHEU.js → starter-packs-OR7NI5NA.js} +1 -2
  79. package/dist/{starter-packs-76YUVHEU.js.map → starter-packs-OR7NI5NA.js.map} +1 -1
  80. package/dist/{state-GMXILIHW.js → state-25IQEC5C.js} +1 -2
  81. package/dist/{state-merge-NKO5FRBA.js → state-merge-E333OEIQ.js} +1 -2
  82. package/dist/{state-merge-NKO5FRBA.js.map → state-merge-E333OEIQ.js.map} +1 -1
  83. package/dist/{telemetry-UC6PBXC7.js → telemetry-RS2JZUZP.js} +4 -5
  84. package/dist/{tool-executor-MJ7IG7PQ.js → tool-executor-6I5PHQDY.js} +5 -6
  85. package/dist/{tools-DZ4KETET.js → tools-NDFJNVHK.js} +4 -5
  86. package/dist/{types-EW7AIB3R.js → types-NPJZAI72.js} +2 -3
  87. package/dist/{universal-installer-AAXXYM5A.js → universal-installer-LCAZHFZR.js} +91 -7
  88. package/dist/universal-installer-LCAZHFZR.js.map +1 -0
  89. package/dist/validator-LM7RZWSH.js +21 -0
  90. package/dist/{verification-gate-FYXUX6LH.js → verification-gate-2O6DF2B7.js} +3 -4
  91. package/dist/verification-gate-2O6DF2B7.js.map +1 -0
  92. package/dist/{versioning-Z3XNE2Q2.js → versioning-WEGF6KJG.js} +1 -2
  93. package/dist/versioning-WEGF6KJG.js.map +1 -0
  94. package/dist/{watcher-ISJC7YKL.js → watcher-GZWQSWZ6.js} +5 -6
  95. package/dist/{watcher-ISJC7YKL.js.map → watcher-GZWQSWZ6.js.map} +1 -1
  96. package/dist/{web-server-DD7ZOP46.js → web-server-2Y4CHD2W.js} +8 -9
  97. package/package.json +1 -9
  98. package/dist/agent-framework-K4GUIICH.js +0 -344
  99. package/dist/agent-framework-K4GUIICH.js.map +0 -1
  100. package/dist/auto-processor-OLE45UI3.js +0 -13
  101. package/dist/chunk-274RV3YO.js.map +0 -1
  102. package/dist/chunk-4CWAGBNS.js.map +0 -1
  103. package/dist/chunk-CSL3ERUI.js.map +0 -1
  104. package/dist/chunk-DA7IKHC4.js.map +0 -1
  105. package/dist/chunk-DGUM43GV.js +0 -11
  106. package/dist/chunk-FD55B3IO.js +0 -204
  107. package/dist/chunk-FD55B3IO.js.map +0 -1
  108. package/dist/chunk-GUJTBGVS.js +0 -2212
  109. package/dist/chunk-GUJTBGVS.js.map +0 -1
  110. package/dist/chunk-KFX54TQM.js +0 -165
  111. package/dist/chunk-KFX54TQM.js.map +0 -1
  112. package/dist/chunk-XTBKL5BI.js.map +0 -1
  113. package/dist/chunk-YIJY5DBV.js.map +0 -1
  114. package/dist/chunk-YUFNYN2H.js.map +0 -1
  115. package/dist/chunk-ZZJOFKAT.js +0 -13
  116. package/dist/config-WVMRUOCA.js +0 -13
  117. package/dist/context-loader-3ORBPMHJ.js +0 -13
  118. package/dist/graph-YUIPOSOO.js +0 -14
  119. package/dist/harness-LCHA3DWP.js +0 -10
  120. package/dist/index.d.ts +0 -3612
  121. package/dist/index.js +0 -13713
  122. package/dist/index.js.map +0 -1
  123. package/dist/indexer-LONANRRM.js +0 -16
  124. package/dist/intelligence-HJOCA4SJ.js.map +0 -1
  125. package/dist/project-discovery-C4UMD7JI.js.map +0 -1
  126. package/dist/provider-LQHQX7Z7.js +0 -26
  127. package/dist/search-V3W5JMJG.js.map +0 -1
  128. package/dist/semantic-search-2DTOO5UX.js.map +0 -1
  129. package/dist/serve-DTQ3HENY.js.map +0 -1
  130. package/dist/tools-DZ4KETET.js.map +0 -1
  131. package/dist/types-EW7AIB3R.js.map +0 -1
  132. package/dist/types-WGDLSPO6.js +0 -16
  133. package/dist/types-WGDLSPO6.js.map +0 -1
  134. package/dist/universal-installer-AAXXYM5A.js.map +0 -1
  135. package/dist/validator-7WXMDIHH.js +0 -22
  136. package/dist/validator-7WXMDIHH.js.map +0 -1
  137. package/dist/verification-gate-FYXUX6LH.js.map +0 -1
  138. package/dist/versioning-Z3XNE2Q2.js.map +0 -1
  139. package/dist/web-server-DD7ZOP46.js.map +0 -1
  140. /package/dist/{analytics-RPT73WNM.js.map → analytics-L24W3B7U.js.map} +0 -0
  141. /package/dist/{auto-processor-OLE45UI3.js.map → auto-processor-QIRUOGEI.js.map} +0 -0
  142. /package/dist/{chunk-UPLBF4RZ.js.map → chunk-2UVWCTAY.js.map} +0 -0
  143. /package/dist/{chunk-A7BJPQQ6.js.map → chunk-5O5OGOOQ.js.map} +0 -0
  144. /package/dist/{chunk-UWQTZMNI.js.map → chunk-7GZ4D6V6.js.map} +0 -0
  145. /package/dist/{chunk-FLZU44SV.js.map → chunk-AN6Y4MDD.js.map} +0 -0
  146. /package/dist/{chunk-4FDUOGSZ.js.map → chunk-D7AWV24Z.js.map} +0 -0
  147. /package/dist/{chunk-CHJ5GNZC.js.map → chunk-EC42HQQH.js.map} +0 -0
  148. /package/dist/{chunk-GJNNR2RA.js.map → chunk-M6PDMK2O.js.map} +0 -0
  149. /package/dist/{chunk-M7NXUK55.js.map → chunk-NVC2WY4K.js.map} +0 -0
  150. /package/dist/{chunk-RY3ZFII7.js.map → chunk-SEHAQTBO.js.map} +0 -0
  151. /package/dist/{chunk-MPZ3BPUI.js.map → chunk-UMXPOYZR.js.map} +0 -0
  152. /package/dist/{chunk-W4T7PGI2.js.map → chunk-UXCHAS3Z.js.map} +0 -0
  153. /package/dist/{chunk-DGUM43GV.js.map → config-PYSS3QY6.js.map} +0 -0
  154. /package/dist/{chunk-ZZJOFKAT.js.map → context-loader-RSXXFW5R.js.map} +0 -0
  155. /package/dist/{config-WVMRUOCA.js.map → conversation-TBTFIJVU.js.map} +0 -0
  156. /package/dist/{context-loader-3ORBPMHJ.js.map → cost-tracker-NZRZEHVA.js.map} +0 -0
  157. /package/dist/{conversation-QDEIDQPH.js.map → delegate-3KJAL4NZ.js.map} +0 -0
  158. /package/dist/{cost-tracker-RS3W7SVY.js.map → graph-LEEO37L3.js.map} +0 -0
  159. /package/dist/{delegate-VJCJLYEK.js.map → harness-R5FKRICG.js.map} +0 -0
  160. /package/dist/{graph-YUIPOSOO.js.map → health-HL2JYHIY.js.map} +0 -0
  161. /package/dist/{harness-LCHA3DWP.js.map → indexer-L5UC6J2V.js.map} +0 -0
  162. /package/dist/{harness-WE4SLCML.js.map → instinct-learner-QGAMIS3X.js.map} +0 -0
  163. /package/dist/{health-NZ6WNIMV.js.map → intake-SVJKFHTL.js.map} +0 -0
  164. /package/dist/{indexer-LONANRRM.js.map → journal-ITUMKT6U.js.map} +0 -0
  165. /package/dist/{instinct-learner-SRM72DHF.js.map → loader-27PLDCOJ.js.map} +0 -0
  166. /package/dist/{intake-4M3HNU43.js.map → mcp-JSIUJJZV.js.map} +0 -0
  167. /package/dist/{journal-WANJL3MI.js.map → metrics-2MNINXNQ.js.map} +0 -0
  168. /package/dist/{loader-C3TKIKZR.js.map → provider-HQY6SPZI.js.map} +0 -0
  169. /package/dist/{mcp-WTQJJZAO.js.map → rate-limiter-PH5DCVU4.js.map} +0 -0
  170. /package/dist/{metrics-KXGNFAAB.js.map → sessions-G6SZZXWS.js.map} +0 -0
  171. /package/dist/{provider-LQHQX7Z7.js.map → sources-7LDYO5GK.js.map} +0 -0
  172. /package/dist/{provider-SXPQZ74H.js.map → state-25IQEC5C.js.map} +0 -0
  173. /package/dist/{rate-limiter-RLRVM325.js.map → telemetry-RS2JZUZP.js.map} +0 -0
  174. /package/dist/{sessions-CZGVXKQE.js.map → tool-executor-6I5PHQDY.js.map} +0 -0
  175. /package/dist/{sources-RW5DT56F.js.map → tools-NDFJNVHK.js.map} +0 -0
  176. /package/dist/{state-GMXILIHW.js.map → types-NPJZAI72.js.map} +0 -0
  177. /package/dist/{telemetry-UC6PBXC7.js.map → validator-LM7RZWSH.js.map} +0 -0
  178. /package/dist/{tool-executor-MJ7IG7PQ.js.map → web-server-2Y4CHD2W.js.map} +0 -0
@@ -1,16 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- import {
4
- buildIndex,
5
- rebuildAllIndexes,
6
- writeIndexFile
7
- } from "./chunk-4FDUOGSZ.js";
8
- import "./chunk-UPLBF4RZ.js";
9
- import "./chunk-4CWAGBNS.js";
10
- import "./chunk-ZZJOFKAT.js";
11
- export {
12
- buildIndex,
13
- rebuildAllIndexes,
14
- writeIndexFile
15
- };
16
- //# sourceMappingURL=indexer-LONANRRM.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/intelligence.ts"],"sourcesContent":["import { existsSync, readdirSync, readFileSync, statSync } from 'fs';\nimport { join, relative } from 'path';\nimport { loadDirectoryWithErrors, loadDirectory } from '../primitives/loader.js';\nimport { buildDependencyGraph } from './graph.js';\nimport { getPrimitiveDirs } from '../core/types.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\nimport { loadConfig } from '../core/config.js';\nimport type { InstinctCandidate } from './instinct-learner.js';\nimport { installInstinct } from './instinct-learner.js';\nimport { log } from '../core/logger.js';\nimport { checkRateLimit } from './rate-limiter.js';\nimport { buildRateLimits } from './guardrails.js';\nimport { checkBudget } from './cost-tracker.js';\nimport { validateHarness } from './validator.js';\n\n// --- Auto-Promote Instincts ---\n\nexport interface PatternOccurrence {\n behavior: string;\n journalDates: string[];\n count: number;\n}\n\nexport interface AutoPromoteResult {\n patterns: PatternOccurrence[];\n promoted: string[];\n skipped: string[];\n journalsScanned: number;\n}\n\n/**\n * Scan all journals for instinct candidates that appear 3+ times.\n * These repeated patterns suggest strong behavioral signals worth auto-promoting.\n *\n * The function:\n * 1. Reads all journal files\n * 2. Extracts \"## Instinct Candidates\" sections\n * 3. Normalizes behavior text for fuzzy matching\n * 4. Groups by similar behavior (normalized string comparison)\n * 5. Returns patterns with 3+ occurrences across different journal dates\n * 6. Optionally auto-installs promoted instincts\n */\nexport function autoPromoteInstincts(\n harnessDir: string,\n options?: { threshold?: number; install?: boolean },\n): AutoPromoteResult {\n const threshold = options?.threshold ?? 3;\n const journalDir = join(harnessDir, 'memory', 'journal');\n\n if (!existsSync(journalDir)) {\n return { patterns: [], promoted: [], skipped: [], journalsScanned: 0 };\n }\n\n const files = readdirSync(journalDir)\n .filter((f) => f.endsWith('.md') && /^\\d{4}-\\d{2}-\\d{2}/.test(f))\n .sort();\n\n // Collect all instinct candidate behaviors with their journal dates\n const behaviorMap = new Map<string, { original: string; dates: Set<string> }>();\n\n for (const file of files) {\n const content = readFileSync(join(journalDir, file), 'utf-8');\n const dateMatch = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (!dateMatch) continue;\n const journalDate = dateMatch[1];\n\n // Extract instinct candidates section\n const sectionMatch = content.match(/## Instinct Candidates\\n([\\s\\S]*?)(?=\\n## |\\n*$)/);\n if (!sectionMatch) continue;\n\n const lines = sectionMatch[1]\n .split('\\n')\n .filter((l) => l.startsWith('- '))\n .map((l) => l.slice(2).trim().replace(/^INSTINCT:\\s*/i, ''));\n\n for (const line of lines) {\n if (!line) continue;\n const normalized = normalizeBehavior(line);\n if (!normalized) continue;\n\n const existing = behaviorMap.get(normalized);\n if (existing) {\n existing.dates.add(journalDate);\n } else {\n behaviorMap.set(normalized, { original: line, dates: new Set([journalDate]) });\n }\n }\n }\n\n // Filter to patterns with threshold+ occurrences across different dates\n const patterns: PatternOccurrence[] = [];\n for (const [, value] of behaviorMap) {\n if (value.dates.size >= threshold) {\n patterns.push({\n behavior: value.original,\n journalDates: [...value.dates].sort(),\n count: value.dates.size,\n });\n }\n }\n\n // Sort by count descending\n patterns.sort((a, b) => b.count - a.count);\n\n // Deduplicate against existing instincts\n const existingIds = new Set<string>();\n const existingBehaviors = new Set<string>();\n const instinctsDir = join(harnessDir, 'instincts');\n if (existsSync(instinctsDir)) {\n const docs = loadDirectory(instinctsDir);\n for (const doc of docs) {\n existingIds.add(doc.frontmatter.id);\n if (doc.l0) existingBehaviors.add(normalizeBehavior(doc.l0));\n }\n }\n\n const promoted: string[] = [];\n const skipped: string[] = [];\n\n for (const pattern of patterns) {\n const normalized = normalizeBehavior(pattern.behavior);\n const id = behaviorToId(pattern.behavior);\n\n if (existingIds.has(id) || existingBehaviors.has(normalized)) {\n skipped.push(id);\n continue;\n }\n\n if (options?.install) {\n const candidate: InstinctCandidate = {\n id,\n behavior: pattern.behavior,\n provenance: `auto-promote:${pattern.journalDates.length}x across ${pattern.journalDates[0]} to ${pattern.journalDates[pattern.journalDates.length - 1]}`,\n confidence: Math.min(0.9, 0.5 + pattern.count * 0.1),\n };\n\n const path = installInstinct(harnessDir, candidate);\n if (path) {\n promoted.push(id);\n } else {\n skipped.push(id);\n }\n }\n }\n\n return { patterns, promoted, skipped, journalsScanned: files.length };\n}\n\n/**\n * Normalize behavior text for fuzzy matching.\n * Lowercases, strips punctuation, collapses whitespace.\n */\nfunction normalizeBehavior(text: string): string {\n return text\n .toLowerCase()\n .replace(/[^a-z0-9\\s]/g, '')\n .replace(/\\s+/g, ' ')\n .trim();\n}\n\n/**\n * Convert a behavior string to a kebab-case ID.\n */\nfunction behaviorToId(behavior: string): string {\n return behavior\n .toLowerCase()\n .replace(/[^a-z0-9\\s-]/g, '')\n .replace(/\\s+/g, '-')\n .slice(0, 50)\n .replace(/-+$/, '');\n}\n\n// --- Dead Primitive Detection ---\n\nexport interface DeadPrimitive {\n id: string;\n path: string;\n directory: string;\n lastModified: string;\n daysSinceModified: number;\n reason: string;\n}\n\nexport interface DeadPrimitiveResult {\n dead: DeadPrimitive[];\n totalScanned: number;\n thresholdDays: number;\n}\n\n/**\n * Detect \"dead\" primitives — files that are:\n * 1. Orphaned (no incoming or outgoing references via related:/with:)\n * 2. Not modified in the last N days (default 30)\n *\n * Excludes session and journal directories (memory files).\n * Does NOT flag recently created primitives even if orphaned.\n */\nexport function detectDeadPrimitives(\n harnessDir: string,\n config?: HarnessConfig,\n options?: { thresholdDays?: number },\n): DeadPrimitiveResult {\n const thresholdDays = options?.thresholdDays ?? 30;\n const now = Date.now();\n const thresholdMs = thresholdDays * 24 * 60 * 60 * 1000;\n\n // Build dependency graph to find orphans\n const graph = buildDependencyGraph(harnessDir, config);\n const orphanIds = new Set(graph.orphans);\n\n // Also find nodes with only broken refs (effectively orphaned)\n const connectedIds = new Set<string>();\n for (const edge of graph.edges) {\n connectedIds.add(edge.from);\n connectedIds.add(edge.to);\n }\n\n const dead: DeadPrimitive[] = [];\n let totalScanned = 0;\n\n for (const node of graph.nodes) {\n totalScanned++;\n\n // Skip non-orphans\n if (!orphanIds.has(node.id)) continue;\n\n // Check file modification time\n const absPath = join(harnessDir, node.path);\n if (!existsSync(absPath)) continue;\n\n try {\n const stat = statSync(absPath);\n const mtime = stat.mtime.getTime();\n const daysSince = Math.floor((now - mtime) / (24 * 60 * 60 * 1000));\n\n if (daysSince >= thresholdDays) {\n dead.push({\n id: node.id,\n path: node.path,\n directory: node.directory,\n lastModified: stat.mtime.toISOString().split('T')[0],\n daysSinceModified: daysSince,\n reason: `Orphaned (no references) and not modified in ${daysSince} days`,\n });\n }\n } catch (err) {\n log.warn(`Failed to stat ${absPath}: ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n // Sort by days since modified (most stale first)\n dead.sort((a, b) => b.daysSinceModified - a.daysSinceModified);\n\n return { dead, totalScanned, thresholdDays };\n}\n\n// --- Contradiction Detection ---\n\nexport interface Contradiction {\n primitiveA: { id: string; path: string; type: string; text: string };\n primitiveB: { id: string; path: string; type: string; text: string };\n reason: string;\n severity: 'low' | 'medium' | 'high';\n}\n\nexport interface ContradictionResult {\n contradictions: Contradiction[];\n rulesChecked: number;\n instinctsChecked: number;\n}\n\n/**\n * Detect contradictions between rules and instincts.\n *\n * Checks for:\n * 1. Direct negation patterns (\"always X\" vs \"never X\", \"do X\" vs \"don't X\")\n * 2. Conflicting tag overlap with opposing behavioral signals\n * 3. Same topic with contradictory directives\n *\n * This is a heuristic-based detector (no LLM needed).\n * Returns candidate contradictions for human review.\n */\nexport function detectContradictions(\n harnessDir: string,\n): ContradictionResult {\n const rulesDir = join(harnessDir, 'rules');\n const instinctsDir = join(harnessDir, 'instincts');\n\n const rules: HarnessDocument[] = existsSync(rulesDir) ? loadDirectory(rulesDir) : [];\n const instincts: HarnessDocument[] = existsSync(instinctsDir) ? loadDirectory(instinctsDir) : [];\n\n const contradictions: Contradiction[] = [];\n\n // Build a lookup of behavioral directives from each document\n const ruleDirectives = rules.map((doc) => ({\n doc,\n directives: extractDirectives(doc),\n topics: extractTopics(doc),\n }));\n\n const instinctDirectives = instincts.map((doc) => ({\n doc,\n directives: extractDirectives(doc),\n topics: extractTopics(doc),\n }));\n\n // Cross-check rules vs instincts\n for (const rule of ruleDirectives) {\n for (const instinct of instinctDirectives) {\n // Check directive negation patterns\n for (const rd of rule.directives) {\n for (const id of instinct.directives) {\n const negation = checkNegation(rd, id);\n if (negation) {\n contradictions.push({\n primitiveA: {\n id: rule.doc.frontmatter.id,\n path: relative(harnessDir, rule.doc.path),\n type: 'rule',\n text: rd.raw,\n },\n primitiveB: {\n id: instinct.doc.frontmatter.id,\n path: relative(harnessDir, instinct.doc.path),\n type: 'instinct',\n text: id.raw,\n },\n reason: negation,\n severity: 'high',\n });\n }\n }\n }\n\n // Check topic conflicts (same topic, opposing signals)\n const sharedTopics = rule.topics.filter((t) => instinct.topics.includes(t));\n if (sharedTopics.length > 0) {\n // Check if one says \"always\" and other says \"never\" about shared topic\n const ruleText = (rule.doc.l0 + ' ' + rule.doc.body).toLowerCase();\n const instinctText = (instinct.doc.l0 + ' ' + instinct.doc.body).toLowerCase();\n\n for (const topic of sharedTopics) {\n const ruleHasAlways = hasPositiveDirective(ruleText, topic);\n const instinctHasNever = hasNegativeDirective(instinctText, topic);\n const ruleHasNever = hasNegativeDirective(ruleText, topic);\n const instinctHasAlways = hasPositiveDirective(instinctText, topic);\n\n if ((ruleHasAlways && instinctHasNever) || (ruleHasNever && instinctHasAlways)) {\n // Avoid duplicate if already caught by directive check\n const alreadyCaught = contradictions.some(\n (c) =>\n c.primitiveA.id === rule.doc.frontmatter.id &&\n c.primitiveB.id === instinct.doc.frontmatter.id,\n );\n if (!alreadyCaught) {\n contradictions.push({\n primitiveA: {\n id: rule.doc.frontmatter.id,\n path: relative(harnessDir, rule.doc.path),\n type: 'rule',\n text: rule.doc.l0 || rule.doc.frontmatter.id,\n },\n primitiveB: {\n id: instinct.doc.frontmatter.id,\n path: relative(harnessDir, instinct.doc.path),\n type: 'instinct',\n text: instinct.doc.l0 || instinct.doc.frontmatter.id,\n },\n reason: `Conflicting directives about \"${topic}\"`,\n severity: 'medium',\n });\n }\n }\n }\n }\n }\n }\n\n // Also check rules vs rules and instincts vs instincts\n checkIntraGroupContradictions(ruleDirectives, 'rule', harnessDir, contradictions);\n checkIntraGroupContradictions(instinctDirectives, 'instinct', harnessDir, contradictions);\n\n return {\n contradictions,\n rulesChecked: rules.length,\n instinctsChecked: instincts.length,\n };\n}\n\ninterface Directive {\n action: 'positive' | 'negative';\n verb: string;\n subject: string;\n raw: string;\n}\n\n/**\n * Extract behavioral directives from a document.\n * Looks for patterns like \"always X\", \"never Y\", \"do X\", \"don't Y\", \"avoid X\", \"prefer Y\".\n */\nfunction extractDirectives(doc: HarnessDocument): Directive[] {\n const directives: Directive[] = [];\n const text = (doc.l0 + '\\n' + doc.body).trim();\n\n // Process line by line\n for (const line of text.split('\\n')) {\n const trimmed = line.trim().toLowerCase();\n if (!trimmed || trimmed.startsWith('#')) continue;\n\n // Strip list markers\n const cleaned = trimmed.replace(/^[-*]\\s+/, '').replace(/^\\d+\\.\\s+/, '');\n\n // Positive patterns\n const positiveMatch = cleaned.match(\n /^(always|must|should|prefer|ensure|require|use)\\s+(.+)/,\n );\n if (positiveMatch) {\n directives.push({\n action: 'positive',\n verb: positiveMatch[1],\n subject: positiveMatch[2].replace(/[.!]$/, ''),\n raw: cleaned,\n });\n continue;\n }\n\n // Negative patterns\n const negativeMatch = cleaned.match(\n /^(never|don'?t|avoid|do not|must not|should not|shouldn'?t)\\s+(.+)/,\n );\n if (negativeMatch) {\n directives.push({\n action: 'negative',\n verb: negativeMatch[1],\n subject: negativeMatch[2].replace(/[.!]$/, ''),\n raw: cleaned,\n });\n }\n }\n\n return directives;\n}\n\n/**\n * Extract topic keywords from a document (from tags, ID, and L0).\n */\nfunction extractTopics(doc: HarnessDocument): string[] {\n const topics: string[] = [];\n\n // Tags as topics\n for (const tag of doc.frontmatter.tags) {\n topics.push(tag.toLowerCase());\n }\n\n // ID words as topics\n const idParts = doc.frontmatter.id.split('-').filter((p) => p.length > 2);\n topics.push(...idParts.map((p) => p.toLowerCase()));\n\n return [...new Set(topics)];\n}\n\n/**\n * Check if two directives are negations of each other.\n */\nfunction checkNegation(a: Directive, b: Directive): string | null {\n // One positive, one negative\n if (a.action === b.action) return null;\n\n // Normalize subjects for comparison\n const subA = a.subject.toLowerCase().replace(/\\s+/g, ' ').trim();\n const subB = b.subject.toLowerCase().replace(/\\s+/g, ' ').trim();\n\n // Direct subject match\n if (subA === subB) {\n return `Direct contradiction: \"${a.raw}\" vs \"${b.raw}\"`;\n }\n\n // Fuzzy match: check if one subject is a substring of the other (with word boundaries)\n const wordsA = subA.split(' ').filter((w) => w.length > 3);\n const wordsB = subB.split(' ').filter((w) => w.length > 3);\n const overlap = wordsA.filter((w) => wordsB.includes(w));\n\n if (overlap.length >= 2 && overlap.length >= Math.min(wordsA.length, wordsB.length) * 0.6) {\n return `Likely contradiction (shared terms: ${overlap.join(', ')}): \"${a.raw}\" vs \"${b.raw}\"`;\n }\n\n return null;\n}\n\nfunction hasPositiveDirective(text: string, topic: string): boolean {\n const patterns = [\n new RegExp(`always\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`must\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`should\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`prefer\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`use\\\\s+\\\\w*${topic}`, 'i'),\n ];\n return patterns.some((p) => p.test(text));\n}\n\nfunction hasNegativeDirective(text: string, topic: string): boolean {\n const patterns = [\n new RegExp(`never\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`avoid\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`don'?t\\\\s+\\\\w*${topic}`, 'i'),\n new RegExp(`do not\\\\s+\\\\w*${topic}`, 'i'),\n ];\n return patterns.some((p) => p.test(text));\n}\n\nfunction checkIntraGroupContradictions(\n group: Array<{ doc: HarnessDocument; directives: Directive[]; topics: string[] }>,\n type: string,\n harnessDir: string,\n contradictions: Contradiction[],\n): void {\n for (let i = 0; i < group.length; i++) {\n for (let j = i + 1; j < group.length; j++) {\n const a = group[i];\n const b = group[j];\n\n for (const da of a.directives) {\n for (const db of b.directives) {\n const negation = checkNegation(da, db);\n if (negation) {\n contradictions.push({\n primitiveA: {\n id: a.doc.frontmatter.id,\n path: relative(harnessDir, a.doc.path),\n type,\n text: da.raw,\n },\n primitiveB: {\n id: b.doc.frontmatter.id,\n path: relative(harnessDir, b.doc.path),\n type,\n text: db.raw,\n },\n reason: negation,\n severity: 'medium',\n });\n }\n }\n }\n }\n }\n}\n\n// --- Session Enrichment ---\n\nexport interface SessionEnrichment {\n sessionId: string;\n topics: string[];\n tokenCount: number;\n stepCount: number;\n model: string;\n toolsUsed: string[];\n primitivesReferenced: string[];\n duration: string;\n}\n\nexport interface EnrichmentResult {\n enriched: SessionEnrichment[];\n sessionsScanned: number;\n}\n\n/**\n * Enrich sessions with extracted metadata.\n *\n * Scans session files and extracts:\n * - Topics (from prompt text, frequent nouns, matched primitive IDs)\n * - Token/step counts (from frontmatter or markdown body)\n * - Model used\n * - Tools used (from tool call sections)\n * - Referenced primitives (IDs mentioned in session text)\n * - Duration\n */\nexport function enrichSessions(\n harnessDir: string,\n config?: HarnessConfig,\n options?: { from?: string; to?: string },\n): EnrichmentResult {\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (!existsSync(sessionsDir)) {\n return { enriched: [], sessionsScanned: 0 };\n }\n\n // Load all primitive IDs for cross-reference\n const primitiveIds = new Set<string>();\n const dirs = getPrimitiveDirs(config);\n for (const dir of dirs) {\n const fullPath = join(harnessDir, dir);\n if (!existsSync(fullPath)) continue;\n const { docs } = loadDirectoryWithErrors(fullPath);\n for (const doc of docs) {\n primitiveIds.add(doc.frontmatter.id);\n }\n }\n\n const files = readdirSync(sessionsDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.') && !f.startsWith('_'))\n .sort();\n\n // Filter by date range\n const filtered = files.filter((f) => {\n if (!options?.from && !options?.to) return true;\n const dateMatch = f.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (!dateMatch) return false;\n const d = dateMatch[1];\n if (options?.from && d < options.from) return false;\n if (options?.to && d > options.to) return false;\n return true;\n });\n\n const enriched: SessionEnrichment[] = [];\n\n for (const file of filtered) {\n const content = readFileSync(join(sessionsDir, file), 'utf-8');\n const sessionId = file.replace(/\\.md$/, '');\n\n const enrichment = enrichSession(content, sessionId, primitiveIds);\n enriched.push(enrichment);\n }\n\n return { enriched, sessionsScanned: filtered.length };\n}\n\nfunction enrichSession(\n content: string,\n sessionId: string,\n primitiveIds: Set<string>,\n): SessionEnrichment {\n // Extract metadata from frontmatter/body\n const tokensMatch = content.match(/[Tt]okens?[:\\s]+(\\d[\\d,]*)/);\n const tokenCount = tokensMatch ? parseInt(tokensMatch[1].replace(/,/g, ''), 10) : 0;\n\n const stepsMatch = content.match(/[Ss]teps?[:\\s]+(\\d+)/);\n const stepCount = stepsMatch ? parseInt(stepsMatch[1], 10) : 0;\n\n const modelMatch = content.match(/[Mm]odel[:\\s]+([^\\n]+)/);\n const model = modelMatch ? modelMatch[1].trim() : 'unknown';\n\n const durationMatch = content.match(/[Dd]uration[:\\s]+([^\\n]+)/);\n const duration = durationMatch ? durationMatch[1].trim() : '';\n\n // Extract tools used from tool call sections\n const toolsUsed: string[] = [];\n const toolMatches = content.matchAll(/### Tool(?:\\s+Call)?:\\s*(\\S+)/g);\n for (const match of toolMatches) {\n const toolName = match[1];\n if (!toolsUsed.includes(toolName)) {\n toolsUsed.push(toolName);\n }\n }\n\n // Also check for tool_calls in frontmatter-style sections\n const toolCallMatches = content.matchAll(/toolName[:\\s]+[\"']?(\\S+)[\"']?/g);\n for (const match of toolCallMatches) {\n const toolName = match[1];\n if (!toolsUsed.includes(toolName)) {\n toolsUsed.push(toolName);\n }\n }\n\n // Find referenced primitives (any primitive ID that appears in text)\n const primitivesReferenced: string[] = [];\n for (const id of primitiveIds) {\n if (id.length < 3) continue; // Skip very short IDs to avoid false positives\n if (content.includes(id)) {\n primitivesReferenced.push(id);\n }\n }\n\n // Extract topics from prompt section\n const topics = extractSessionTopics(content);\n\n return {\n sessionId,\n topics,\n tokenCount,\n stepCount,\n model,\n toolsUsed,\n primitivesReferenced,\n duration,\n };\n}\n\n/**\n * Extract topic keywords from session content.\n * Uses a simple frequency-based approach on meaningful words.\n */\nfunction extractSessionTopics(content: string): string[] {\n // Extract prompt section specifically\n const promptMatch = content.match(/## Prompt\\n([\\s\\S]*?)(?=\\n## |$)/);\n const promptText = promptMatch ? promptMatch[1] : '';\n\n // Also include summary\n const summaryMatch = content.match(/## Summary\\n([\\s\\S]*?)(?=\\n## |$)/);\n const summaryText = summaryMatch ? summaryMatch[1] : '';\n\n const text = (promptText + ' ' + summaryText).toLowerCase();\n\n // Common stop words to filter out\n const stopWords = new Set([\n 'the', 'a', 'an', 'is', 'are', 'was', 'were', 'be', 'been', 'being',\n 'have', 'has', 'had', 'do', 'does', 'did', 'will', 'would', 'could',\n 'should', 'may', 'might', 'shall', 'can', 'need', 'must', 'to', 'of',\n 'in', 'for', 'on', 'with', 'at', 'by', 'from', 'as', 'into', 'about',\n 'like', 'through', 'after', 'before', 'between', 'under', 'during',\n 'and', 'or', 'but', 'not', 'no', 'nor', 'so', 'yet', 'both', 'either',\n 'neither', 'each', 'every', 'all', 'any', 'few', 'more', 'most', 'other',\n 'some', 'such', 'than', 'too', 'very', 'just', 'also', 'this', 'that',\n 'these', 'those', 'it', 'its', 'i', 'me', 'my', 'we', 'our', 'you',\n 'your', 'he', 'she', 'they', 'them', 'their', 'what', 'which', 'who',\n 'when', 'where', 'how', 'why', 'if', 'then', 'else', 'while', 'up',\n 'out', 'off', 'over', 'only', 'own', 'same', 'get', 'got', 'make',\n 'made', 'use', 'used', 'using', 'one', 'two', 'new',\n ]);\n\n // Count word frequencies\n const words = text\n .replace(/[^a-z0-9\\s-]/g, '')\n .split(/\\s+/)\n .filter((w) => w.length > 3 && !stopWords.has(w));\n\n const freq = new Map<string, number>();\n for (const word of words) {\n freq.set(word, (freq.get(word) ?? 0) + 1);\n }\n\n // Return top 5 most frequent meaningful words\n return Array.from(freq.entries())\n .filter(([, count]) => count >= 1)\n .sort((a, b) => b[1] - a[1])\n .slice(0, 5)\n .map(([word]) => word);\n}\n\n// --- Capability Suggestions ---\n\nexport interface CapabilitySuggestion {\n topic: string;\n frequency: number;\n sessionDates: string[];\n suggestion: string;\n suggestedType: 'skill' | 'playbook';\n}\n\nexport interface CapabilitySuggestionResult {\n suggestions: CapabilitySuggestion[];\n topicsAnalyzed: number;\n sessionsScanned: number;\n}\n\n/**\n * Suggest capabilities (skills/playbooks) for frequent session topics\n * that don't have existing coverage.\n *\n * Scans sessions for recurring topics, cross-references against existing\n * skills/playbooks, and suggests new ones for uncovered topics.\n */\nexport function suggestCapabilities(\n harnessDir: string,\n config?: HarnessConfig,\n options?: { minFrequency?: number },\n): CapabilitySuggestionResult {\n const minFrequency = options?.minFrequency ?? 3;\n\n // Enrich sessions to get topics\n const { enriched, sessionsScanned } = enrichSessions(harnessDir, config);\n\n // Collect topic frequency across sessions\n const topicOccurrences = new Map<string, Set<string>>();\n for (const session of enriched) {\n const dateMatch = session.sessionId.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n const date = dateMatch ? dateMatch[1] : session.sessionId;\n\n for (const topic of session.topics) {\n if (!topicOccurrences.has(topic)) {\n topicOccurrences.set(topic, new Set());\n }\n topicOccurrences.get(topic)!.add(date);\n }\n }\n\n // Load existing skills and playbooks\n const coveredTopics = new Set<string>();\n const skillsDir = join(harnessDir, 'skills');\n const playbooksDir = join(harnessDir, 'playbooks');\n\n for (const dir of [skillsDir, playbooksDir]) {\n if (!existsSync(dir)) continue;\n const docs = loadDirectory(dir);\n for (const doc of docs) {\n // Add ID parts as covered topics\n for (const part of doc.frontmatter.id.split('-')) {\n if (part.length > 2) coveredTopics.add(part.toLowerCase());\n }\n // Add tags as covered topics\n for (const tag of doc.frontmatter.tags) {\n coveredTopics.add(tag.toLowerCase());\n }\n }\n }\n\n // Find frequent uncovered topics\n const suggestions: CapabilitySuggestion[] = [];\n\n for (const [topic, dates] of topicOccurrences) {\n if (dates.size < minFrequency) continue;\n if (coveredTopics.has(topic)) continue;\n\n const suggestedType = dates.size >= 5 ? 'playbook' : 'skill';\n suggestions.push({\n topic,\n frequency: dates.size,\n sessionDates: [...dates].sort(),\n suggestion: `Create a ${suggestedType} for \"${topic}\" — appeared in ${dates.size} session(s)`,\n suggestedType,\n });\n }\n\n // Sort by frequency\n suggestions.sort((a, b) => b.frequency - a.frequency);\n\n return {\n suggestions,\n topicsAnalyzed: topicOccurrences.size,\n sessionsScanned,\n };\n}\n\n// --- Failure Taxonomy ---\n\n/**\n * Named failure modes with recovery strategies.\n * Based on common agent failure patterns (context overflow, tool errors,\n * budget exhaustion, hallucination, stale primitives, circular delegation).\n */\nexport type FailureMode =\n | 'context_overflow'\n | 'tool_execution_error'\n | 'budget_exhausted'\n | 'rate_limited'\n | 'llm_timeout'\n | 'llm_error'\n | 'hallucination_detected'\n | 'stale_primitive'\n | 'circular_delegation'\n | 'missing_dependency'\n | 'parse_error'\n | 'config_invalid'\n | 'mcp_connection_failed'\n | 'state_corruption'\n | 'unknown';\n\nexport interface FailureRecord {\n mode: FailureMode;\n timestamp: string;\n sessionId?: string;\n message: string;\n context?: Record<string, unknown>;\n recoveryAttempted?: string;\n recovered: boolean;\n}\n\nexport interface FailureTaxonomy {\n modes: Record<FailureMode, {\n description: string;\n severity: 'low' | 'medium' | 'high' | 'critical';\n recoveryStrategies: string[];\n autoRecoverable: boolean;\n }>;\n}\n\n/**\n * The canonical failure taxonomy for agent-harness.\n * Each mode has a description, severity level, recovery strategies,\n * and whether automatic recovery is possible.\n */\nexport const FAILURE_TAXONOMY: FailureTaxonomy = {\n modes: {\n context_overflow: {\n description: 'System prompt + conversation exceeds model context window',\n severity: 'high',\n recoveryStrategies: [\n 'Trim oldest messages from conversation history',\n 'Reduce primitive loading level (L2 → L1 → L0)',\n 'Archive old sessions to free memory budget',\n 'Split into sub-conversations with summarized context',\n ],\n autoRecoverable: true,\n },\n tool_execution_error: {\n description: 'An MCP or HTTP tool call failed during execution',\n severity: 'medium',\n recoveryStrategies: [\n 'Retry with exponential backoff',\n 'Fall back to alternative tool if available',\n 'Report error to LLM and ask for alternative approach',\n 'Skip tool and proceed with available context',\n ],\n autoRecoverable: true,\n },\n budget_exhausted: {\n description: 'Daily or monthly spending limit has been reached',\n severity: 'critical',\n recoveryStrategies: [\n 'Wait until next budget period',\n 'Switch to cheaper model (fast_model or summary_model)',\n 'Queue non-urgent tasks for later execution',\n 'Alert operator to increase budget',\n ],\n autoRecoverable: false,\n },\n rate_limited: {\n description: 'LLM API rate limit hit (per-minute/hour/day)',\n severity: 'medium',\n recoveryStrategies: [\n 'Wait for retryAfterMs from rate limiter',\n 'Reduce request frequency',\n 'Queue and batch requests',\n ],\n autoRecoverable: true,\n },\n llm_timeout: {\n description: 'LLM API call timed out without response',\n severity: 'medium',\n recoveryStrategies: [\n 'Retry with same prompt',\n 'Retry with shorter prompt (reduce context)',\n 'Switch to faster model',\n 'Increase timeout_ms in config',\n ],\n autoRecoverable: true,\n },\n llm_error: {\n description: 'LLM API returned an error response (4xx/5xx)',\n severity: 'high',\n recoveryStrategies: [\n 'Retry with exponential backoff (max_retries in config)',\n 'Switch to fallback model',\n 'Check API key validity',\n 'Log error details for debugging',\n ],\n autoRecoverable: true,\n },\n hallucination_detected: {\n description: 'LLM output contains fabricated facts or references to non-existent primitives',\n severity: 'medium',\n recoveryStrategies: [\n 'Re-prompt with explicit grounding: \"Only reference primitives that exist\"',\n 'Validate output against known primitive IDs',\n 'Add validation step before acting on LLM output',\n 'Record in journal for future training',\n ],\n autoRecoverable: false,\n },\n stale_primitive: {\n description: 'A referenced primitive is outdated, deprecated, or orphaned',\n severity: 'low',\n recoveryStrategies: [\n 'Run detectDeadPrimitives() to identify stale files',\n 'Archive deprecated primitives',\n 'Update references to point to current versions',\n 'Auto-flag via validator',\n ],\n autoRecoverable: true,\n },\n circular_delegation: {\n description: 'Agent delegation loop detected (A delegates to B delegates to A)',\n severity: 'high',\n recoveryStrategies: [\n 'Track delegation chain and break on cycle detection',\n 'Set max delegation depth (default: 3)',\n 'Return partial result from last agent in chain',\n 'Log delegation graph for debugging',\n ],\n autoRecoverable: true,\n },\n missing_dependency: {\n description: 'A required dependency (primitive, MCP server, API key) is missing',\n severity: 'high',\n recoveryStrategies: [\n 'Run doctorHarness() to auto-fix missing files',\n 'Check .env for required API keys',\n 'Install missing MCP servers',\n 'Prompt user to install missing bundle',\n ],\n autoRecoverable: false,\n },\n parse_error: {\n description: 'A primitive file has invalid YAML frontmatter or malformed content',\n severity: 'medium',\n recoveryStrategies: [\n 'Run fixCapability() to auto-repair frontmatter',\n 'Skip the malformed file and log a warning',\n 'Use default frontmatter values',\n 'Report to user for manual fix',\n ],\n autoRecoverable: true,\n },\n config_invalid: {\n description: 'config.yaml fails schema validation',\n severity: 'critical',\n recoveryStrategies: [\n 'Fall back to CONFIG_DEFAULTS',\n 'Report specific validation errors to user',\n 'Run harness doctor to attempt repair',\n ],\n autoRecoverable: false,\n },\n mcp_connection_failed: {\n description: 'Failed to connect to an MCP server (process spawn or HTTP)',\n severity: 'medium',\n recoveryStrategies: [\n 'Retry connection with backoff',\n 'Disable the server and continue without its tools',\n 'Check command/URL/env configuration',\n 'Fall back to built-in tools only',\n ],\n autoRecoverable: true,\n },\n state_corruption: {\n description: 'state.md is unreadable or contains invalid data',\n severity: 'high',\n recoveryStrategies: [\n 'Fall back to DEFAULT_STATE',\n 'Rebuild state from session history',\n 'Reset state.md and log the event',\n ],\n autoRecoverable: true,\n },\n unknown: {\n description: 'An unclassified error occurred',\n severity: 'high',\n recoveryStrategies: [\n 'Log full error with stack trace',\n 'Record in health.json failure counter',\n 'Alert operator',\n 'Graceful shutdown if critical path',\n ],\n autoRecoverable: false,\n },\n },\n};\n\nexport interface FailureAnalysis {\n recentFailures: FailureRecord[];\n modeFrequency: Record<string, number>;\n mostCommonMode: FailureMode | null;\n suggestedRecovery: string[];\n healthImplication: 'healthy' | 'degraded' | 'unhealthy';\n}\n\n/**\n * Classify an error into a failure mode.\n */\nexport function classifyFailure(error: Error | string, context?: Record<string, unknown>): FailureMode {\n const msg = typeof error === 'string' ? error.toLowerCase() : error.message.toLowerCase();\n\n if (msg.includes('context') && (msg.includes('overflow') || msg.includes('too long') || msg.includes('exceed'))) {\n return 'context_overflow';\n }\n if (msg.includes('tool') && (msg.includes('fail') || msg.includes('error') || msg.includes('timeout'))) {\n return 'tool_execution_error';\n }\n if (msg.includes('budget') || msg.includes('spending') || msg.includes('limit exceeded')) {\n return 'budget_exhausted';\n }\n if (msg.includes('rate limit') || msg.includes('429') || msg.includes('too many requests')) {\n return 'rate_limited';\n }\n if (msg.includes('timeout') || msg.includes('timed out') || msg.includes('ETIMEDOUT')) {\n return 'llm_timeout';\n }\n if (msg.includes('mcp') && (msg.includes('connect') || msg.includes('spawn') || msg.includes('failed'))) {\n return 'mcp_connection_failed';\n }\n if (msg.includes('parse') || msg.includes('yaml') || msg.includes('frontmatter') || msg.includes('malformed')) {\n return 'parse_error';\n }\n if (msg.includes('config') && (msg.includes('invalid') || msg.includes('validation'))) {\n return 'config_invalid';\n }\n if (msg.includes('state') && (msg.includes('corrupt') || msg.includes('invalid') || msg.includes('unreadable'))) {\n return 'state_corruption';\n }\n if (msg.includes('circular') || msg.includes('delegation loop') || msg.includes('cycle')) {\n return 'circular_delegation';\n }\n if (msg.includes('missing') || msg.includes('not found') || msg.includes('dependency')) {\n return 'missing_dependency';\n }\n if (msg.includes('401') || msg.includes('403') || msg.includes('500') || msg.includes('502') || msg.includes('503')) {\n return 'llm_error';\n }\n\n return 'unknown';\n}\n\n/**\n * Get recovery strategies for a failure mode.\n */\nexport function getRecoveryStrategies(mode: FailureMode): string[] {\n return FAILURE_TAXONOMY.modes[mode]?.recoveryStrategies ?? ['Log error and alert operator'];\n}\n\n/**\n * Analyze failure patterns from session history and health data.\n * Returns frequency analysis and recovery suggestions.\n */\nexport function analyzeFailures(\n harnessDir: string,\n options?: { days?: number },\n): FailureAnalysis {\n const days = options?.days ?? 7;\n const now = Date.now();\n const cutoffMs = days * 24 * 60 * 60 * 1000;\n\n const recentFailures: FailureRecord[] = [];\n\n // Scan health.json for failures\n const healthPath = join(harnessDir, 'memory', 'health.json');\n if (existsSync(healthPath)) {\n try {\n const health = JSON.parse(readFileSync(healthPath, 'utf-8'));\n if (health.lastError) {\n const mode = classifyFailure(health.lastError);\n recentFailures.push({\n mode,\n timestamp: health.lastFailure || new Date().toISOString(),\n message: health.lastError,\n recovered: health.consecutiveFailures === 0,\n });\n }\n } catch {\n // Malformed health.json\n }\n }\n\n // Scan sessions for error indicators\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (existsSync(sessionsDir)) {\n const files = readdirSync(sessionsDir)\n .filter((f) => f.endsWith('.md') && !f.startsWith('.'))\n .sort()\n .reverse();\n\n for (const file of files) {\n const dateMatch = file.match(/^(\\d{4}-\\d{2}-\\d{2})/);\n if (dateMatch) {\n const fileDate = new Date(dateMatch[1]).getTime();\n if (now - fileDate > cutoffMs) break;\n }\n\n try {\n const content = readFileSync(join(sessionsDir, file), 'utf-8');\n // Look for error patterns in session content\n const errorLines = content.split('\\n').filter((l) =>\n l.toLowerCase().includes('error') ||\n l.toLowerCase().includes('failed') ||\n l.toLowerCase().includes('timeout'),\n );\n\n for (const line of errorLines.slice(0, 3)) {\n const mode = classifyFailure(line);\n if (mode !== 'unknown') {\n recentFailures.push({\n mode,\n timestamp: dateMatch?.[1] ?? 'unknown',\n sessionId: file.replace('.md', ''),\n message: line.trim().slice(0, 200),\n recovered: true,\n });\n }\n }\n } catch {\n // Skip unreadable sessions\n }\n }\n }\n\n // Calculate frequency\n const modeFrequency: Record<string, number> = {};\n for (const f of recentFailures) {\n modeFrequency[f.mode] = (modeFrequency[f.mode] ?? 0) + 1;\n }\n\n // Find most common mode\n let mostCommonMode: FailureMode | null = null;\n let maxFreq = 0;\n for (const [mode, count] of Object.entries(modeFrequency)) {\n if (count > maxFreq) {\n maxFreq = count;\n mostCommonMode = mode as FailureMode;\n }\n }\n\n // Suggest recovery\n const suggestedRecovery = mostCommonMode\n ? getRecoveryStrategies(mostCommonMode)\n : [];\n\n // Determine health implication\n let healthImplication: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';\n if (recentFailures.length > 5) {\n healthImplication = 'unhealthy';\n } else if (recentFailures.length > 0) {\n healthImplication = 'degraded';\n }\n\n return {\n recentFailures: recentFailures.slice(0, 20),\n modeFrequency,\n mostCommonMode,\n suggestedRecovery,\n healthImplication,\n };\n}\n\n// --- Verification Gates ---\n\nexport type GateStatus = 'pass' | 'fail' | 'warn' | 'skip';\n\nexport interface GateCheck {\n name: string;\n description: string;\n status: GateStatus;\n message: string;\n details?: Record<string, unknown>;\n}\n\nexport interface VerificationGateResult {\n gateName: string;\n passed: boolean;\n checks: GateCheck[];\n summary: string;\n}\n\nexport type GateDefinition = {\n name: string;\n description: string;\n check: (harnessDir: string, config?: HarnessConfig) => GateCheck[];\n};\n\n/**\n * Built-in verification gates for the harness.\n * Each gate is a set of checks that must pass at a specific stage.\n */\nexport const BUILTIN_GATES: GateDefinition[] = [\n {\n name: 'pre-boot',\n description: 'Checks before agent boot: config valid, CORE.md exists, API key available',\n check: (harnessDir: string) => {\n const checks: GateCheck[] = [];\n\n // CORE.md exists\n checks.push(existsSync(join(harnessDir, 'CORE.md'))\n ? { name: 'core-md', description: 'CORE.md exists', status: 'pass', message: 'CORE.md present' }\n : { name: 'core-md', description: 'CORE.md exists', status: 'fail', message: 'Missing CORE.md — required for agent identity' });\n\n // Config valid\n try {\n loadConfig(harnessDir);\n checks.push({ name: 'config-valid', description: 'config.yaml valid', status: 'pass', message: 'Config parsed successfully' });\n } catch (err) {\n checks.push({ name: 'config-valid', description: 'config.yaml valid', status: 'fail', message: `Config error: ${err instanceof Error ? err.message : String(err)}` });\n }\n\n // API key available\n const hasKey = !!(process.env.OPENROUTER_API_KEY || process.env.OPENAI_API_KEY || process.env.ANTHROPIC_API_KEY);\n checks.push(hasKey\n ? { name: 'api-key', description: 'API key available', status: 'pass', message: 'API key found in environment' }\n : { name: 'api-key', description: 'API key available', status: 'warn', message: 'No API key in environment — will need --api-key flag' });\n\n // Memory directory\n const memDir = join(harnessDir, 'memory');\n checks.push(existsSync(memDir)\n ? { name: 'memory-dir', description: 'Memory directory exists', status: 'pass', message: 'memory/ directory present' }\n : { name: 'memory-dir', description: 'Memory directory exists', status: 'warn', message: 'memory/ directory missing — will be created on first run' });\n\n return checks;\n },\n },\n {\n name: 'pre-run',\n description: 'Checks before each LLM call: budget, rate limits, context budget',\n check: (harnessDir: string) => {\n const checks: GateCheck[] = [];\n\n let config: HarnessConfig;\n try {\n config = loadConfig(harnessDir);\n } catch {\n checks.push({ name: 'config-load', description: 'Config loadable', status: 'fail', message: 'Cannot load config' });\n return checks;\n }\n\n // Budget check\n try {\n const budgetStatus = checkBudget(harnessDir, config.budget);\n const exceeded = (budgetStatus.daily_remaining_usd !== null && budgetStatus.daily_remaining_usd <= 0) ||\n (budgetStatus.monthly_remaining_usd !== null && budgetStatus.monthly_remaining_usd <= 0);\n if (exceeded) {\n checks.push({ name: 'budget', description: 'Budget not exceeded', status: 'fail', message: 'Budget limit exceeded', details: { ...budgetStatus } });\n } else {\n checks.push({ name: 'budget', description: 'Budget not exceeded', status: 'pass', message: 'Within budget' });\n }\n } catch (err) {\n checks.push({ name: 'budget', description: 'Budget not exceeded', status: 'skip', message: `Budget check unavailable: ${err instanceof Error ? err.message : String(err)}` });\n }\n\n // Rate limit check\n try {\n const limits = buildRateLimits(config);\n if (limits.length === 0) {\n checks.push({ name: 'rate-limit', description: 'Rate limit not hit', status: 'pass', message: 'No rate limits configured' });\n } else {\n let blocked = false;\n for (const limit of limits) {\n const rateCheck = checkRateLimit(harnessDir, limit);\n if (!rateCheck.allowed) {\n const windowLabel = limit.window_ms <= 60_000 ? 'minute' : limit.window_ms <= 3_600_000 ? 'hour' : 'day';\n checks.push({ name: 'rate-limit', description: 'Rate limit not hit', status: 'fail', message: `Rate limited (${windowLabel}): ${rateCheck.current}/${rateCheck.max}. Retry after ${Math.ceil(rateCheck.retry_after_ms / 1000)}s` });\n blocked = true;\n break;\n }\n }\n if (!blocked) {\n checks.push({ name: 'rate-limit', description: 'Rate limit not hit', status: 'pass', message: 'Within rate limits' });\n }\n }\n } catch (err) {\n checks.push({ name: 'rate-limit', description: 'Rate limit not hit', status: 'skip', message: `Rate limit check unavailable: ${err instanceof Error ? err.message : String(err)}` });\n }\n\n // Health check\n const healthPath = join(harnessDir, 'memory', 'health.json');\n if (existsSync(healthPath)) {\n try {\n const health = JSON.parse(readFileSync(healthPath, 'utf-8'));\n if (health.consecutiveFailures >= 3) {\n checks.push({ name: 'health', description: 'Agent healthy', status: 'warn', message: `${health.consecutiveFailures} consecutive failures detected` });\n } else {\n checks.push({ name: 'health', description: 'Agent healthy', status: 'pass', message: 'No recent failure pattern' });\n }\n } catch {\n checks.push({ name: 'health', description: 'Agent healthy', status: 'skip', message: 'Health data unavailable' });\n }\n }\n\n return checks;\n },\n },\n {\n name: 'post-session',\n description: 'Checks after a session: session recorded, no parse errors, primitives intact',\n check: (harnessDir: string) => {\n const checks: GateCheck[] = [];\n\n // Sessions directory exists and has files\n const sessionsDir = join(harnessDir, 'memory', 'sessions');\n if (existsSync(sessionsDir)) {\n const files = readdirSync(sessionsDir).filter((f) => f.endsWith('.md') && !f.startsWith('.'));\n checks.push({\n name: 'sessions-recorded',\n description: 'Sessions being recorded',\n status: files.length > 0 ? 'pass' : 'warn',\n message: `${files.length} session file(s) in memory`,\n });\n } else {\n checks.push({ name: 'sessions-recorded', description: 'Sessions being recorded', status: 'warn', message: 'No sessions directory' });\n }\n\n // Check for parse errors in primitives\n const dirs = getPrimitiveDirs();\n let totalParseErrors = 0;\n for (const dir of dirs) {\n const fullPath = join(harnessDir, dir);\n if (!existsSync(fullPath)) continue;\n const { errors } = loadDirectoryWithErrors(fullPath);\n totalParseErrors += errors.length;\n }\n\n checks.push(totalParseErrors === 0\n ? { name: 'parse-errors', description: 'No primitive parse errors', status: 'pass', message: 'All primitives parse cleanly' }\n : { name: 'parse-errors', description: 'No primitive parse errors', status: 'warn', message: `${totalParseErrors} parse error(s) in primitives` });\n\n return checks;\n },\n },\n {\n name: 'pre-deploy',\n description: 'Checks before deployment: validator passes, no dead primitives, no contradictions',\n check: (harnessDir: string, config?: HarnessConfig) => {\n const checks: GateCheck[] = [];\n\n // Run validator\n try {\n const validation = validateHarness(harnessDir);\n if (validation.errors.length > 0) {\n checks.push({ name: 'validator', description: 'Validator passes', status: 'fail', message: `${validation.errors.length} error(s)`, details: { errors: validation.errors } });\n } else if (validation.warnings.length > 0) {\n checks.push({ name: 'validator', description: 'Validator passes', status: 'warn', message: `${validation.warnings.length} warning(s)` });\n } else {\n checks.push({ name: 'validator', description: 'Validator passes', status: 'pass', message: 'Validation clean' });\n }\n } catch (err) {\n checks.push({ name: 'validator', description: 'Validator passes', status: 'fail', message: `Validator error: ${err instanceof Error ? err.message : String(err)}` });\n }\n\n // Check for dead primitives\n const deadResult = detectDeadPrimitives(harnessDir, config);\n checks.push(deadResult.dead.length === 0\n ? { name: 'dead-primitives', description: 'No dead primitives', status: 'pass', message: 'All primitives referenced or recently modified' }\n : { name: 'dead-primitives', description: 'No dead primitives', status: 'warn', message: `${deadResult.dead.length} dead primitive(s) found`, details: { dead: deadResult.dead.map((d) => d.id) } });\n\n // Check for contradictions\n const contradictionResult = detectContradictions(harnessDir);\n checks.push(contradictionResult.contradictions.length === 0\n ? { name: 'contradictions', description: 'No contradictions', status: 'pass', message: 'No conflicting rules/instincts' }\n : { name: 'contradictions', description: 'No contradictions', status: 'warn', message: `${contradictionResult.contradictions.length} potential contradiction(s)` });\n\n return checks;\n },\n },\n];\n\n/**\n * Run a verification gate by name.\n * Returns all check results and an overall pass/fail status.\n */\nexport function runGate(\n gateName: string,\n harnessDir: string,\n config?: HarnessConfig,\n): VerificationGateResult {\n const gate = BUILTIN_GATES.find((g) => g.name === gateName);\n if (!gate) {\n return {\n gateName,\n passed: false,\n checks: [{ name: 'gate-not-found', description: 'Gate exists', status: 'fail', message: `Unknown gate: ${gateName}` }],\n summary: `Gate \"${gateName}\" not found. Available: ${BUILTIN_GATES.map((g) => g.name).join(', ')}`,\n };\n }\n\n const checks = gate.check(harnessDir, config);\n const hasFails = checks.some((c) => c.status === 'fail');\n const hasWarns = checks.some((c) => c.status === 'warn');\n\n const passed = !hasFails;\n const passCount = checks.filter((c) => c.status === 'pass').length;\n const failCount = checks.filter((c) => c.status === 'fail').length;\n const warnCount = checks.filter((c) => c.status === 'warn').length;\n\n let summary = `${gate.name}: ${passCount} passed`;\n if (failCount > 0) summary += `, ${failCount} failed`;\n if (warnCount > 0) summary += `, ${warnCount} warnings`;\n\n return { gateName, passed, checks, summary };\n}\n\n/**\n * Run all built-in verification gates.\n */\nexport function runAllGates(\n harnessDir: string,\n config?: HarnessConfig,\n): VerificationGateResult[] {\n return BUILTIN_GATES.map((gate) => runGate(gate.name, harnessDir, config));\n}\n\n/**\n * List available gate names and descriptions.\n */\nexport function listGates(): Array<{ name: string; description: string }> {\n return BUILTIN_GATES.map((g) => ({ name: g.name, description: g.description }));\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,aAAa,cAAc,gBAAgB;AAChE,SAAS,MAAM,gBAAgB;AAyCxB,SAAS,qBACd,YACA,SACmB;AACnB,QAAM,YAAY,SAAS,aAAa;AACxC,QAAM,aAAa,KAAK,YAAY,UAAU,SAAS;AAEvD,MAAI,CAAC,WAAW,UAAU,GAAG;AAC3B,WAAO,EAAE,UAAU,CAAC,GAAG,UAAU,CAAC,GAAG,SAAS,CAAC,GAAG,iBAAiB,EAAE;AAAA,EACvE;AAEA,QAAM,QAAQ,YAAY,UAAU,EACjC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,qBAAqB,KAAK,CAAC,CAAC,EAC/D,KAAK;AAGR,QAAM,cAAc,oBAAI,IAAsD;AAE9E,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,aAAa,KAAK,YAAY,IAAI,GAAG,OAAO;AAC5D,UAAM,YAAY,KAAK,MAAM,sBAAsB;AACnD,QAAI,CAAC,UAAW;AAChB,UAAM,cAAc,UAAU,CAAC;AAG/B,UAAM,eAAe,QAAQ,MAAM,kDAAkD;AACrF,QAAI,CAAC,aAAc;AAEnB,UAAM,QAAQ,aAAa,CAAC,EACzB,MAAM,IAAI,EACV,OAAO,CAAC,MAAM,EAAE,WAAW,IAAI,CAAC,EAChC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,KAAK,EAAE,QAAQ,kBAAkB,EAAE,CAAC;AAE7D,eAAW,QAAQ,OAAO;AACxB,UAAI,CAAC,KAAM;AACX,YAAM,aAAa,kBAAkB,IAAI;AACzC,UAAI,CAAC,WAAY;AAEjB,YAAM,WAAW,YAAY,IAAI,UAAU;AAC3C,UAAI,UAAU;AACZ,iBAAS,MAAM,IAAI,WAAW;AAAA,MAChC,OAAO;AACL,oBAAY,IAAI,YAAY,EAAE,UAAU,MAAM,OAAO,oBAAI,IAAI,CAAC,WAAW,CAAC,EAAE,CAAC;AAAA,MAC/E;AAAA,IACF;AAAA,EACF;AAGA,QAAM,WAAgC,CAAC;AACvC,aAAW,CAAC,EAAE,KAAK,KAAK,aAAa;AACnC,QAAI,MAAM,MAAM,QAAQ,WAAW;AACjC,eAAS,KAAK;AAAA,QACZ,UAAU,MAAM;AAAA,QAChB,cAAc,CAAC,GAAG,MAAM,KAAK,EAAE,KAAK;AAAA,QACpC,OAAO,MAAM,MAAM;AAAA,MACrB,CAAC;AAAA,IACH;AAAA,EACF;AAGA,WAAS,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAGzC,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,oBAAoB,oBAAI,IAAY;AAC1C,QAAM,eAAe,KAAK,YAAY,WAAW;AACjD,MAAI,WAAW,YAAY,GAAG;AAC5B,UAAM,OAAO,cAAc,YAAY;AACvC,eAAW,OAAO,MAAM;AACtB,kBAAY,IAAI,IAAI,YAAY,EAAE;AAClC,UAAI,IAAI,GAAI,mBAAkB,IAAI,kBAAkB,IAAI,EAAE,CAAC;AAAA,IAC7D;AAAA,EACF;AAEA,QAAM,WAAqB,CAAC;AAC5B,QAAM,UAAoB,CAAC;AAE3B,aAAW,WAAW,UAAU;AAC9B,UAAM,aAAa,kBAAkB,QAAQ,QAAQ;AACrD,UAAM,KAAK,aAAa,QAAQ,QAAQ;AAExC,QAAI,YAAY,IAAI,EAAE,KAAK,kBAAkB,IAAI,UAAU,GAAG;AAC5D,cAAQ,KAAK,EAAE;AACf;AAAA,IACF;AAEA,QAAI,SAAS,SAAS;AACpB,YAAM,YAA+B;AAAA,QACnC;AAAA,QACA,UAAU,QAAQ;AAAA,QAClB,YAAY,gBAAgB,QAAQ,aAAa,MAAM,YAAY,QAAQ,aAAa,CAAC,CAAC,OAAO,QAAQ,aAAa,QAAQ,aAAa,SAAS,CAAC,CAAC;AAAA,QACtJ,YAAY,KAAK,IAAI,KAAK,MAAM,QAAQ,QAAQ,GAAG;AAAA,MACrD;AAEA,YAAM,OAAO,gBAAgB,YAAY,SAAS;AAClD,UAAI,MAAM;AACR,iBAAS,KAAK,EAAE;AAAA,MAClB,OAAO;AACL,gBAAQ,KAAK,EAAE;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,UAAU,UAAU,SAAS,iBAAiB,MAAM,OAAO;AACtE;AAMA,SAAS,kBAAkB,MAAsB;AAC/C,SAAO,KACJ,YAAY,EACZ,QAAQ,gBAAgB,EAAE,EAC1B,QAAQ,QAAQ,GAAG,EACnB,KAAK;AACV;AAKA,SAAS,aAAa,UAA0B;AAC9C,SAAO,SACJ,YAAY,EACZ,QAAQ,iBAAiB,EAAE,EAC3B,QAAQ,QAAQ,GAAG,EACnB,MAAM,GAAG,EAAE,EACX,QAAQ,OAAO,EAAE;AACtB;AA2BO,SAAS,qBACd,YACA,QACA,SACqB;AACrB,QAAM,gBAAgB,SAAS,iBAAiB;AAChD,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,cAAc,gBAAgB,KAAK,KAAK,KAAK;AAGnD,QAAM,QAAQ,qBAAqB,YAAY,MAAM;AACrD,QAAM,YAAY,IAAI,IAAI,MAAM,OAAO;AAGvC,QAAM,eAAe,oBAAI,IAAY;AACrC,aAAW,QAAQ,MAAM,OAAO;AAC9B,iBAAa,IAAI,KAAK,IAAI;AAC1B,iBAAa,IAAI,KAAK,EAAE;AAAA,EAC1B;AAEA,QAAM,OAAwB,CAAC;AAC/B,MAAI,eAAe;AAEnB,aAAW,QAAQ,MAAM,OAAO;AAC9B;AAGA,QAAI,CAAC,UAAU,IAAI,KAAK,EAAE,EAAG;AAG7B,UAAM,UAAU,KAAK,YAAY,KAAK,IAAI;AAC1C,QAAI,CAAC,WAAW,OAAO,EAAG;AAE1B,QAAI;AACF,YAAM,OAAO,SAAS,OAAO;AAC7B,YAAM,QAAQ,KAAK,MAAM,QAAQ;AACjC,YAAM,YAAY,KAAK,OAAO,MAAM,UAAU,KAAK,KAAK,KAAK,IAAK;AAElE,UAAI,aAAa,eAAe;AAC9B,aAAK,KAAK;AAAA,UACR,IAAI,KAAK;AAAA,UACT,MAAM,KAAK;AAAA,UACX,WAAW,KAAK;AAAA,UAChB,cAAc,KAAK,MAAM,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,UACnD,mBAAmB;AAAA,UACnB,QAAQ,gDAAgD,SAAS;AAAA,QACnE,CAAC;AAAA,MACH;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,KAAK,kBAAkB,OAAO,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IAC3F;AAAA,EACF;AAGA,OAAK,KAAK,CAAC,GAAG,MAAM,EAAE,oBAAoB,EAAE,iBAAiB;AAE7D,SAAO,EAAE,MAAM,cAAc,cAAc;AAC7C;AA4BO,SAAS,qBACd,YACqB;AACrB,QAAM,WAAW,KAAK,YAAY,OAAO;AACzC,QAAM,eAAe,KAAK,YAAY,WAAW;AAEjD,QAAM,QAA2B,WAAW,QAAQ,IAAI,cAAc,QAAQ,IAAI,CAAC;AACnF,QAAM,YAA+B,WAAW,YAAY,IAAI,cAAc,YAAY,IAAI,CAAC;AAE/F,QAAM,iBAAkC,CAAC;AAGzC,QAAM,iBAAiB,MAAM,IAAI,CAAC,SAAS;AAAA,IACzC;AAAA,IACA,YAAY,kBAAkB,GAAG;AAAA,IACjC,QAAQ,cAAc,GAAG;AAAA,EAC3B,EAAE;AAEF,QAAM,qBAAqB,UAAU,IAAI,CAAC,SAAS;AAAA,IACjD;AAAA,IACA,YAAY,kBAAkB,GAAG;AAAA,IACjC,QAAQ,cAAc,GAAG;AAAA,EAC3B,EAAE;AAGF,aAAW,QAAQ,gBAAgB;AACjC,eAAW,YAAY,oBAAoB;AAEzC,iBAAW,MAAM,KAAK,YAAY;AAChC,mBAAW,MAAM,SAAS,YAAY;AACpC,gBAAM,WAAW,cAAc,IAAI,EAAE;AACrC,cAAI,UAAU;AACZ,2BAAe,KAAK;AAAA,cAClB,YAAY;AAAA,gBACV,IAAI,KAAK,IAAI,YAAY;AAAA,gBACzB,MAAM,SAAS,YAAY,KAAK,IAAI,IAAI;AAAA,gBACxC,MAAM;AAAA,gBACN,MAAM,GAAG;AAAA,cACX;AAAA,cACA,YAAY;AAAA,gBACV,IAAI,SAAS,IAAI,YAAY;AAAA,gBAC7B,MAAM,SAAS,YAAY,SAAS,IAAI,IAAI;AAAA,gBAC5C,MAAM;AAAA,gBACN,MAAM,GAAG;AAAA,cACX;AAAA,cACA,QAAQ;AAAA,cACR,UAAU;AAAA,YACZ,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAGA,YAAM,eAAe,KAAK,OAAO,OAAO,CAAC,MAAM,SAAS,OAAO,SAAS,CAAC,CAAC;AAC1E,UAAI,aAAa,SAAS,GAAG;AAE3B,cAAM,YAAY,KAAK,IAAI,KAAK,MAAM,KAAK,IAAI,MAAM,YAAY;AACjE,cAAM,gBAAgB,SAAS,IAAI,KAAK,MAAM,SAAS,IAAI,MAAM,YAAY;AAE7E,mBAAW,SAAS,cAAc;AAChC,gBAAM,gBAAgB,qBAAqB,UAAU,KAAK;AAC1D,gBAAM,mBAAmB,qBAAqB,cAAc,KAAK;AACjE,gBAAM,eAAe,qBAAqB,UAAU,KAAK;AACzD,gBAAM,oBAAoB,qBAAqB,cAAc,KAAK;AAElE,cAAK,iBAAiB,oBAAsB,gBAAgB,mBAAoB;AAE9E,kBAAM,gBAAgB,eAAe;AAAA,cACnC,CAAC,MACC,EAAE,WAAW,OAAO,KAAK,IAAI,YAAY,MACzC,EAAE,WAAW,OAAO,SAAS,IAAI,YAAY;AAAA,YACjD;AACA,gBAAI,CAAC,eAAe;AAClB,6BAAe,KAAK;AAAA,gBAClB,YAAY;AAAA,kBACV,IAAI,KAAK,IAAI,YAAY;AAAA,kBACzB,MAAM,SAAS,YAAY,KAAK,IAAI,IAAI;AAAA,kBACxC,MAAM;AAAA,kBACN,MAAM,KAAK,IAAI,MAAM,KAAK,IAAI,YAAY;AAAA,gBAC5C;AAAA,gBACA,YAAY;AAAA,kBACV,IAAI,SAAS,IAAI,YAAY;AAAA,kBAC7B,MAAM,SAAS,YAAY,SAAS,IAAI,IAAI;AAAA,kBAC5C,MAAM;AAAA,kBACN,MAAM,SAAS,IAAI,MAAM,SAAS,IAAI,YAAY;AAAA,gBACpD;AAAA,gBACA,QAAQ,iCAAiC,KAAK;AAAA,gBAC9C,UAAU;AAAA,cACZ,CAAC;AAAA,YACH;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAGA,gCAA8B,gBAAgB,QAAQ,YAAY,cAAc;AAChF,gCAA8B,oBAAoB,YAAY,YAAY,cAAc;AAExF,SAAO;AAAA,IACL;AAAA,IACA,cAAc,MAAM;AAAA,IACpB,kBAAkB,UAAU;AAAA,EAC9B;AACF;AAaA,SAAS,kBAAkB,KAAmC;AAC5D,QAAM,aAA0B,CAAC;AACjC,QAAM,QAAQ,IAAI,KAAK,OAAO,IAAI,MAAM,KAAK;AAG7C,aAAW,QAAQ,KAAK,MAAM,IAAI,GAAG;AACnC,UAAM,UAAU,KAAK,KAAK,EAAE,YAAY;AACxC,QAAI,CAAC,WAAW,QAAQ,WAAW,GAAG,EAAG;AAGzC,UAAM,UAAU,QAAQ,QAAQ,YAAY,EAAE,EAAE,QAAQ,aAAa,EAAE;AAGvE,UAAM,gBAAgB,QAAQ;AAAA,MAC5B;AAAA,IACF;AACA,QAAI,eAAe;AACjB,iBAAW,KAAK;AAAA,QACd,QAAQ;AAAA,QACR,MAAM,cAAc,CAAC;AAAA,QACrB,SAAS,cAAc,CAAC,EAAE,QAAQ,SAAS,EAAE;AAAA,QAC7C,KAAK;AAAA,MACP,CAAC;AACD;AAAA,IACF;AAGA,UAAM,gBAAgB,QAAQ;AAAA,MAC5B;AAAA,IACF;AACA,QAAI,eAAe;AACjB,iBAAW,KAAK;AAAA,QACd,QAAQ;AAAA,QACR,MAAM,cAAc,CAAC;AAAA,QACrB,SAAS,cAAc,CAAC,EAAE,QAAQ,SAAS,EAAE;AAAA,QAC7C,KAAK;AAAA,MACP,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,cAAc,KAAgC;AACrD,QAAM,SAAmB,CAAC;AAG1B,aAAW,OAAO,IAAI,YAAY,MAAM;AACtC,WAAO,KAAK,IAAI,YAAY,CAAC;AAAA,EAC/B;AAGA,QAAM,UAAU,IAAI,YAAY,GAAG,MAAM,GAAG,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACxE,SAAO,KAAK,GAAG,QAAQ,IAAI,CAAC,MAAM,EAAE,YAAY,CAAC,CAAC;AAElD,SAAO,CAAC,GAAG,IAAI,IAAI,MAAM,CAAC;AAC5B;AAKA,SAAS,cAAc,GAAc,GAA6B;AAEhE,MAAI,EAAE,WAAW,EAAE,OAAQ,QAAO;AAGlC,QAAM,OAAO,EAAE,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAC/D,QAAM,OAAO,EAAE,QAAQ,YAAY,EAAE,QAAQ,QAAQ,GAAG,EAAE,KAAK;AAG/D,MAAI,SAAS,MAAM;AACjB,WAAO,0BAA0B,EAAE,GAAG,SAAS,EAAE,GAAG;AAAA,EACtD;AAGA,QAAM,SAAS,KAAK,MAAM,GAAG,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACzD,QAAM,SAAS,KAAK,MAAM,GAAG,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,CAAC;AACzD,QAAM,UAAU,OAAO,OAAO,CAAC,MAAM,OAAO,SAAS,CAAC,CAAC;AAEvD,MAAI,QAAQ,UAAU,KAAK,QAAQ,UAAU,KAAK,IAAI,OAAO,QAAQ,OAAO,MAAM,IAAI,KAAK;AACzF,WAAO,uCAAuC,QAAQ,KAAK,IAAI,CAAC,OAAO,EAAE,GAAG,SAAS,EAAE,GAAG;AAAA,EAC5F;AAEA,SAAO;AACT;AAEA,SAAS,qBAAqB,MAAc,OAAwB;AAClE,QAAM,WAAW;AAAA,IACf,IAAI,OAAO,iBAAiB,KAAK,IAAI,GAAG;AAAA,IACxC,IAAI,OAAO,eAAe,KAAK,IAAI,GAAG;AAAA,IACtC,IAAI,OAAO,iBAAiB,KAAK,IAAI,GAAG;AAAA,IACxC,IAAI,OAAO,iBAAiB,KAAK,IAAI,GAAG;AAAA,IACxC,IAAI,OAAO,cAAc,KAAK,IAAI,GAAG;AAAA,EACvC;AACA,SAAO,SAAS,KAAK,CAAC,MAAM,EAAE,KAAK,IAAI,CAAC;AAC1C;AAEA,SAAS,qBAAqB,MAAc,OAAwB;AAClE,QAAM,WAAW;AAAA,IACf,IAAI,OAAO,gBAAgB,KAAK,IAAI,GAAG;AAAA,IACvC,IAAI,OAAO,gBAAgB,KAAK,IAAI,GAAG;AAAA,IACvC,IAAI,OAAO,iBAAiB,KAAK,IAAI,GAAG;AAAA,IACxC,IAAI,OAAO,iBAAiB,KAAK,IAAI,GAAG;AAAA,EAC1C;AACA,SAAO,SAAS,KAAK,CAAC,MAAM,EAAE,KAAK,IAAI,CAAC;AAC1C;AAEA,SAAS,8BACP,OACA,MACA,YACA,gBACM;AACN,WAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,aAAS,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACzC,YAAM,IAAI,MAAM,CAAC;AACjB,YAAM,IAAI,MAAM,CAAC;AAEjB,iBAAW,MAAM,EAAE,YAAY;AAC7B,mBAAW,MAAM,EAAE,YAAY;AAC7B,gBAAM,WAAW,cAAc,IAAI,EAAE;AACrC,cAAI,UAAU;AACZ,2BAAe,KAAK;AAAA,cAClB,YAAY;AAAA,gBACV,IAAI,EAAE,IAAI,YAAY;AAAA,gBACtB,MAAM,SAAS,YAAY,EAAE,IAAI,IAAI;AAAA,gBACrC;AAAA,gBACA,MAAM,GAAG;AAAA,cACX;AAAA,cACA,YAAY;AAAA,gBACV,IAAI,EAAE,IAAI,YAAY;AAAA,gBACtB,MAAM,SAAS,YAAY,EAAE,IAAI,IAAI;AAAA,gBACrC;AAAA,gBACA,MAAM,GAAG;AAAA,cACX;AAAA,cACA,QAAQ;AAAA,cACR,UAAU;AAAA,YACZ,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AA+BO,SAAS,eACd,YACA,QACA,SACkB;AAClB,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,MAAI,CAAC,WAAW,WAAW,GAAG;AAC5B,WAAO,EAAE,UAAU,CAAC,GAAG,iBAAiB,EAAE;AAAA,EAC5C;AAGA,QAAM,eAAe,oBAAI,IAAY;AACrC,QAAM,OAAO,iBAAiB,MAAM;AACpC,aAAW,OAAO,MAAM;AACtB,UAAM,WAAW,KAAK,YAAY,GAAG;AACrC,QAAI,CAAC,WAAW,QAAQ,EAAG;AAC3B,UAAM,EAAE,KAAK,IAAI,wBAAwB,QAAQ;AACjD,eAAW,OAAO,MAAM;AACtB,mBAAa,IAAI,IAAI,YAAY,EAAE;AAAA,IACrC;AAAA,EACF;AAEA,QAAM,QAAQ,YAAY,WAAW,EAClC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EAC3E,KAAK;AAGR,QAAM,WAAW,MAAM,OAAO,CAAC,MAAM;AACnC,QAAI,CAAC,SAAS,QAAQ,CAAC,SAAS,GAAI,QAAO;AAC3C,UAAM,YAAY,EAAE,MAAM,sBAAsB;AAChD,QAAI,CAAC,UAAW,QAAO;AACvB,UAAM,IAAI,UAAU,CAAC;AACrB,QAAI,SAAS,QAAQ,IAAI,QAAQ,KAAM,QAAO;AAC9C,QAAI,SAAS,MAAM,IAAI,QAAQ,GAAI,QAAO;AAC1C,WAAO;AAAA,EACT,CAAC;AAED,QAAM,WAAgC,CAAC;AAEvC,aAAW,QAAQ,UAAU;AAC3B,UAAM,UAAU,aAAa,KAAK,aAAa,IAAI,GAAG,OAAO;AAC7D,UAAM,YAAY,KAAK,QAAQ,SAAS,EAAE;AAE1C,UAAM,aAAa,cAAc,SAAS,WAAW,YAAY;AACjE,aAAS,KAAK,UAAU;AAAA,EAC1B;AAEA,SAAO,EAAE,UAAU,iBAAiB,SAAS,OAAO;AACtD;AAEA,SAAS,cACP,SACA,WACA,cACmB;AAEnB,QAAM,cAAc,QAAQ,MAAM,4BAA4B;AAC9D,QAAM,aAAa,cAAc,SAAS,YAAY,CAAC,EAAE,QAAQ,MAAM,EAAE,GAAG,EAAE,IAAI;AAElF,QAAM,aAAa,QAAQ,MAAM,sBAAsB;AACvD,QAAM,YAAY,aAAa,SAAS,WAAW,CAAC,GAAG,EAAE,IAAI;AAE7D,QAAM,aAAa,QAAQ,MAAM,wBAAwB;AACzD,QAAM,QAAQ,aAAa,WAAW,CAAC,EAAE,KAAK,IAAI;AAElD,QAAM,gBAAgB,QAAQ,MAAM,2BAA2B;AAC/D,QAAM,WAAW,gBAAgB,cAAc,CAAC,EAAE,KAAK,IAAI;AAG3D,QAAM,YAAsB,CAAC;AAC7B,QAAM,cAAc,QAAQ,SAAS,gCAAgC;AACrE,aAAW,SAAS,aAAa;AAC/B,UAAM,WAAW,MAAM,CAAC;AACxB,QAAI,CAAC,UAAU,SAAS,QAAQ,GAAG;AACjC,gBAAU,KAAK,QAAQ;AAAA,IACzB;AAAA,EACF;AAGA,QAAM,kBAAkB,QAAQ,SAAS,gCAAgC;AACzE,aAAW,SAAS,iBAAiB;AACnC,UAAM,WAAW,MAAM,CAAC;AACxB,QAAI,CAAC,UAAU,SAAS,QAAQ,GAAG;AACjC,gBAAU,KAAK,QAAQ;AAAA,IACzB;AAAA,EACF;AAGA,QAAM,uBAAiC,CAAC;AACxC,aAAW,MAAM,cAAc;AAC7B,QAAI,GAAG,SAAS,EAAG;AACnB,QAAI,QAAQ,SAAS,EAAE,GAAG;AACxB,2BAAqB,KAAK,EAAE;AAAA,IAC9B;AAAA,EACF;AAGA,QAAM,SAAS,qBAAqB,OAAO;AAE3C,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAMA,SAAS,qBAAqB,SAA2B;AAEvD,QAAM,cAAc,QAAQ,MAAM,kCAAkC;AACpE,QAAM,aAAa,cAAc,YAAY,CAAC,IAAI;AAGlD,QAAM,eAAe,QAAQ,MAAM,mCAAmC;AACtE,QAAM,cAAc,eAAe,aAAa,CAAC,IAAI;AAErD,QAAM,QAAQ,aAAa,MAAM,aAAa,YAAY;AAG1D,QAAM,YAAY,oBAAI,IAAI;AAAA,IACxB;AAAA,IAAO;AAAA,IAAK;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAQ;AAAA,IAC5D;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAO;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAS;AAAA,IAC5D;AAAA,IAAU;AAAA,IAAO;AAAA,IAAS;AAAA,IAAS;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAM;AAAA,IAChE;AAAA,IAAM;AAAA,IAAO;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAQ;AAAA,IAC7D;AAAA,IAAQ;AAAA,IAAW;AAAA,IAAS;AAAA,IAAU;AAAA,IAAW;AAAA,IAAS;AAAA,IAC1D;AAAA,IAAO;AAAA,IAAM;AAAA,IAAO;AAAA,IAAO;AAAA,IAAM;AAAA,IAAO;AAAA,IAAM;AAAA,IAAO;AAAA,IAAQ;AAAA,IAC7D;AAAA,IAAW;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAO;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IACjE;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAC/D;AAAA,IAAS;AAAA,IAAS;AAAA,IAAM;AAAA,IAAO;AAAA,IAAK;AAAA,IAAM;AAAA,IAAM;AAAA,IAAM;AAAA,IAAO;AAAA,IAC7D;AAAA,IAAQ;AAAA,IAAM;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAQ;AAAA,IAAS;AAAA,IAC/D;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAO;AAAA,IAAO;AAAA,IAAM;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAS;AAAA,IAC9D;AAAA,IAAO;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAO;AAAA,IAC3D;AAAA,IAAQ;AAAA,IAAO;AAAA,IAAQ;AAAA,IAAS;AAAA,IAAO;AAAA,IAAO;AAAA,EAChD,CAAC;AAGD,QAAM,QAAQ,KACX,QAAQ,iBAAiB,EAAE,EAC3B,MAAM,KAAK,EACX,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,CAAC,UAAU,IAAI,CAAC,CAAC;AAElD,QAAM,OAAO,oBAAI,IAAoB;AACrC,aAAW,QAAQ,OAAO;AACxB,SAAK,IAAI,OAAO,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC;AAAA,EAC1C;AAGA,SAAO,MAAM,KAAK,KAAK,QAAQ,CAAC,EAC7B,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,SAAS,CAAC,EAChC,KAAK,CAAC,GAAG,MAAM,EAAE,CAAC,IAAI,EAAE,CAAC,CAAC,EAC1B,MAAM,GAAG,CAAC,EACV,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AACzB;AAyBO,SAAS,oBACd,YACA,QACA,SAC4B;AAC5B,QAAM,eAAe,SAAS,gBAAgB;AAG9C,QAAM,EAAE,UAAU,gBAAgB,IAAI,eAAe,YAAY,MAAM;AAGvE,QAAM,mBAAmB,oBAAI,IAAyB;AACtD,aAAW,WAAW,UAAU;AAC9B,UAAM,YAAY,QAAQ,UAAU,MAAM,sBAAsB;AAChE,UAAM,OAAO,YAAY,UAAU,CAAC,IAAI,QAAQ;AAEhD,eAAW,SAAS,QAAQ,QAAQ;AAClC,UAAI,CAAC,iBAAiB,IAAI,KAAK,GAAG;AAChC,yBAAiB,IAAI,OAAO,oBAAI,IAAI,CAAC;AAAA,MACvC;AACA,uBAAiB,IAAI,KAAK,EAAG,IAAI,IAAI;AAAA,IACvC;AAAA,EACF;AAGA,QAAM,gBAAgB,oBAAI,IAAY;AACtC,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,QAAM,eAAe,KAAK,YAAY,WAAW;AAEjD,aAAW,OAAO,CAAC,WAAW,YAAY,GAAG;AAC3C,QAAI,CAAC,WAAW,GAAG,EAAG;AACtB,UAAM,OAAO,cAAc,GAAG;AAC9B,eAAW,OAAO,MAAM;AAEtB,iBAAW,QAAQ,IAAI,YAAY,GAAG,MAAM,GAAG,GAAG;AAChD,YAAI,KAAK,SAAS,EAAG,eAAc,IAAI,KAAK,YAAY,CAAC;AAAA,MAC3D;AAEA,iBAAW,OAAO,IAAI,YAAY,MAAM;AACtC,sBAAc,IAAI,IAAI,YAAY,CAAC;AAAA,MACrC;AAAA,IACF;AAAA,EACF;AAGA,QAAM,cAAsC,CAAC;AAE7C,aAAW,CAAC,OAAO,KAAK,KAAK,kBAAkB;AAC7C,QAAI,MAAM,OAAO,aAAc;AAC/B,QAAI,cAAc,IAAI,KAAK,EAAG;AAE9B,UAAM,gBAAgB,MAAM,QAAQ,IAAI,aAAa;AACrD,gBAAY,KAAK;AAAA,MACf;AAAA,MACA,WAAW,MAAM;AAAA,MACjB,cAAc,CAAC,GAAG,KAAK,EAAE,KAAK;AAAA,MAC9B,YAAY,YAAY,aAAa,SAAS,KAAK,wBAAmB,MAAM,IAAI;AAAA,MAChF;AAAA,IACF,CAAC;AAAA,EACH;AAGA,cAAY,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS;AAEpD,SAAO;AAAA,IACL;AAAA,IACA,gBAAgB,iBAAiB;AAAA,IACjC;AAAA,EACF;AACF;AAkDO,IAAM,mBAAoC;AAAA,EAC/C,OAAO;AAAA,IACL,kBAAkB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,sBAAsB;AAAA,MACpB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,kBAAkB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,cAAc;AAAA,MACZ,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,aAAa;AAAA,MACX,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,WAAW;AAAA,MACT,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,wBAAwB;AAAA,MACtB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,iBAAiB;AAAA,MACf,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,qBAAqB;AAAA,MACnB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,oBAAoB;AAAA,MAClB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,aAAa;AAAA,MACX,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,gBAAgB;AAAA,MACd,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,uBAAuB;AAAA,MACrB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,kBAAkB;AAAA,MAChB,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,IACA,SAAS;AAAA,MACP,aAAa;AAAA,MACb,UAAU;AAAA,MACV,oBAAoB;AAAA,QAClB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,MACA,iBAAiB;AAAA,IACnB;AAAA,EACF;AACF;AAaO,SAAS,gBAAgB,OAAuB,SAAgD;AACrG,QAAM,MAAM,OAAO,UAAU,WAAW,MAAM,YAAY,IAAI,MAAM,QAAQ,YAAY;AAExF,MAAI,IAAI,SAAS,SAAS,MAAM,IAAI,SAAS,UAAU,KAAK,IAAI,SAAS,UAAU,KAAK,IAAI,SAAS,QAAQ,IAAI;AAC/G,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,MAAM,MAAM,IAAI,SAAS,MAAM,KAAK,IAAI,SAAS,OAAO,KAAK,IAAI,SAAS,SAAS,IAAI;AACtG,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,QAAQ,KAAK,IAAI,SAAS,UAAU,KAAK,IAAI,SAAS,gBAAgB,GAAG;AACxF,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,YAAY,KAAK,IAAI,SAAS,KAAK,KAAK,IAAI,SAAS,mBAAmB,GAAG;AAC1F,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,WAAW,KAAK,IAAI,SAAS,WAAW,GAAG;AACrF,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,KAAK,MAAM,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,OAAO,KAAK,IAAI,SAAS,QAAQ,IAAI;AACvG,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,OAAO,KAAK,IAAI,SAAS,MAAM,KAAK,IAAI,SAAS,aAAa,KAAK,IAAI,SAAS,WAAW,GAAG;AAC7G,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,QAAQ,MAAM,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,YAAY,IAAI;AACrF,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,OAAO,MAAM,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,YAAY,IAAI;AAC/G,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,UAAU,KAAK,IAAI,SAAS,iBAAiB,KAAK,IAAI,SAAS,OAAO,GAAG;AACxF,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,SAAS,KAAK,IAAI,SAAS,WAAW,KAAK,IAAI,SAAS,YAAY,GAAG;AACtF,WAAO;AAAA,EACT;AACA,MAAI,IAAI,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,GAAG;AACnH,WAAO;AAAA,EACT;AAEA,SAAO;AACT;AAKO,SAAS,sBAAsB,MAA6B;AACjE,SAAO,iBAAiB,MAAM,IAAI,GAAG,sBAAsB,CAAC,8BAA8B;AAC5F;AAMO,SAAS,gBACd,YACA,SACiB;AACjB,QAAM,OAAO,SAAS,QAAQ;AAC9B,QAAM,MAAM,KAAK,IAAI;AACrB,QAAM,WAAW,OAAO,KAAK,KAAK,KAAK;AAEvC,QAAM,iBAAkC,CAAC;AAGzC,QAAM,aAAa,KAAK,YAAY,UAAU,aAAa;AAC3D,MAAI,WAAW,UAAU,GAAG;AAC1B,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,aAAa,YAAY,OAAO,CAAC;AAC3D,UAAI,OAAO,WAAW;AACpB,cAAM,OAAO,gBAAgB,OAAO,SAAS;AAC7C,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA,WAAW,OAAO,gBAAe,oBAAI,KAAK,GAAE,YAAY;AAAA,UACxD,SAAS,OAAO;AAAA,UAChB,WAAW,OAAO,wBAAwB;AAAA,QAC5C,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,QAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAM,QAAQ,YAAY,WAAW,EAClC,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC,EACrD,KAAK,EACL,QAAQ;AAEX,eAAW,QAAQ,OAAO;AACxB,YAAM,YAAY,KAAK,MAAM,sBAAsB;AACnD,UAAI,WAAW;AACb,cAAM,WAAW,IAAI,KAAK,UAAU,CAAC,CAAC,EAAE,QAAQ;AAChD,YAAI,MAAM,WAAW,SAAU;AAAA,MACjC;AAEA,UAAI;AACF,cAAM,UAAU,aAAa,KAAK,aAAa,IAAI,GAAG,OAAO;AAE7D,cAAM,aAAa,QAAQ,MAAM,IAAI,EAAE;AAAA,UAAO,CAAC,MAC7C,EAAE,YAAY,EAAE,SAAS,OAAO,KAChC,EAAE,YAAY,EAAE,SAAS,QAAQ,KACjC,EAAE,YAAY,EAAE,SAAS,SAAS;AAAA,QACpC;AAEA,mBAAW,QAAQ,WAAW,MAAM,GAAG,CAAC,GAAG;AACzC,gBAAM,OAAO,gBAAgB,IAAI;AACjC,cAAI,SAAS,WAAW;AACtB,2BAAe,KAAK;AAAA,cAClB;AAAA,cACA,WAAW,YAAY,CAAC,KAAK;AAAA,cAC7B,WAAW,KAAK,QAAQ,OAAO,EAAE;AAAA,cACjC,SAAS,KAAK,KAAK,EAAE,MAAM,GAAG,GAAG;AAAA,cACjC,WAAW;AAAA,YACb,CAAC;AAAA,UACH;AAAA,QACF;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AAAA,EACF;AAGA,QAAM,gBAAwC,CAAC;AAC/C,aAAW,KAAK,gBAAgB;AAC9B,kBAAc,EAAE,IAAI,KAAK,cAAc,EAAE,IAAI,KAAK,KAAK;AAAA,EACzD;AAGA,MAAI,iBAAqC;AACzC,MAAI,UAAU;AACd,aAAW,CAAC,MAAM,KAAK,KAAK,OAAO,QAAQ,aAAa,GAAG;AACzD,QAAI,QAAQ,SAAS;AACnB,gBAAU;AACV,uBAAiB;AAAA,IACnB;AAAA,EACF;AAGA,QAAM,oBAAoB,iBACtB,sBAAsB,cAAc,IACpC,CAAC;AAGL,MAAI,oBAA0D;AAC9D,MAAI,eAAe,SAAS,GAAG;AAC7B,wBAAoB;AAAA,EACtB,WAAW,eAAe,SAAS,GAAG;AACpC,wBAAoB;AAAA,EACtB;AAEA,SAAO;AAAA,IACL,gBAAgB,eAAe,MAAM,GAAG,EAAE;AAAA,IAC1C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AA+BO,IAAM,gBAAkC;AAAA,EAC7C;AAAA,IACE,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO,CAAC,eAAuB;AAC7B,YAAM,SAAsB,CAAC;AAG7B,aAAO,KAAK,WAAW,KAAK,YAAY,SAAS,CAAC,IAC9C,EAAE,MAAM,WAAW,aAAa,kBAAkB,QAAQ,QAAQ,SAAS,kBAAkB,IAC7F,EAAE,MAAM,WAAW,aAAa,kBAAkB,QAAQ,QAAQ,SAAS,qDAAgD,CAAC;AAGhI,UAAI;AACF,mBAAW,UAAU;AACrB,eAAO,KAAK,EAAE,MAAM,gBAAgB,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,6BAA6B,CAAC;AAAA,MAC/H,SAAS,KAAK;AACZ,eAAO,KAAK,EAAE,MAAM,gBAAgB,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,iBAAiB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,GAAG,CAAC;AAAA,MACtK;AAGA,YAAM,SAAS,CAAC,EAAE,QAAQ,IAAI,sBAAsB,QAAQ,IAAI,kBAAkB,QAAQ,IAAI;AAC9F,aAAO,KAAK,SACR,EAAE,MAAM,WAAW,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,+BAA+B,IAC7G,EAAE,MAAM,WAAW,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,4DAAuD,CAAC;AAG1I,YAAM,SAAS,KAAK,YAAY,QAAQ;AACxC,aAAO,KAAK,WAAW,MAAM,IACzB,EAAE,MAAM,cAAc,aAAa,2BAA2B,QAAQ,QAAQ,SAAS,4BAA4B,IACnH,EAAE,MAAM,cAAc,aAAa,2BAA2B,QAAQ,QAAQ,SAAS,gEAA2D,CAAC;AAEvJ,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO,CAAC,eAAuB;AAC7B,YAAM,SAAsB,CAAC;AAE7B,UAAI;AACJ,UAAI;AACF,iBAAS,WAAW,UAAU;AAAA,MAChC,QAAQ;AACN,eAAO,KAAK,EAAE,MAAM,eAAe,aAAa,mBAAmB,QAAQ,QAAQ,SAAS,qBAAqB,CAAC;AAClH,eAAO;AAAA,MACT;AAGA,UAAI;AACF,cAAM,eAAe,YAAY,YAAY,OAAO,MAAM;AAC1D,cAAM,WAAY,aAAa,wBAAwB,QAAQ,aAAa,uBAAuB,KAChG,aAAa,0BAA0B,QAAQ,aAAa,yBAAyB;AACxF,YAAI,UAAU;AACZ,iBAAO,KAAK,EAAE,MAAM,UAAU,aAAa,uBAAuB,QAAQ,QAAQ,SAAS,yBAAyB,SAAS,EAAE,GAAG,aAAa,EAAE,CAAC;AAAA,QACpJ,OAAO;AACL,iBAAO,KAAK,EAAE,MAAM,UAAU,aAAa,uBAAuB,QAAQ,QAAQ,SAAS,gBAAgB,CAAC;AAAA,QAC9G;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,EAAE,MAAM,UAAU,aAAa,uBAAuB,QAAQ,QAAQ,SAAS,6BAA6B,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,GAAG,CAAC;AAAA,MAC9K;AAGA,UAAI;AACF,cAAM,SAAS,gBAAgB,MAAM;AACrC,YAAI,OAAO,WAAW,GAAG;AACvB,iBAAO,KAAK,EAAE,MAAM,cAAc,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,4BAA4B,CAAC;AAAA,QAC7H,OAAO;AACL,cAAI,UAAU;AACd,qBAAW,SAAS,QAAQ;AAC1B,kBAAM,YAAY,eAAe,YAAY,KAAK;AAClD,gBAAI,CAAC,UAAU,SAAS;AACtB,oBAAM,cAAc,MAAM,aAAa,MAAS,WAAW,MAAM,aAAa,OAAY,SAAS;AACnG,qBAAO,KAAK,EAAE,MAAM,cAAc,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,iBAAiB,WAAW,MAAM,UAAU,OAAO,IAAI,UAAU,GAAG,iBAAiB,KAAK,KAAK,UAAU,iBAAiB,GAAI,CAAC,IAAI,CAAC;AAClO,wBAAU;AACV;AAAA,YACF;AAAA,UACF;AACA,cAAI,CAAC,SAAS;AACZ,mBAAO,KAAK,EAAE,MAAM,cAAc,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,qBAAqB,CAAC;AAAA,UACtH;AAAA,QACF;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,EAAE,MAAM,cAAc,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,iCAAiC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,GAAG,CAAC;AAAA,MACrL;AAGA,YAAM,aAAa,KAAK,YAAY,UAAU,aAAa;AAC3D,UAAI,WAAW,UAAU,GAAG;AAC1B,YAAI;AACF,gBAAM,SAAS,KAAK,MAAM,aAAa,YAAY,OAAO,CAAC;AAC3D,cAAI,OAAO,uBAAuB,GAAG;AACnC,mBAAO,KAAK,EAAE,MAAM,UAAU,aAAa,iBAAiB,QAAQ,QAAQ,SAAS,GAAG,OAAO,mBAAmB,iCAAiC,CAAC;AAAA,UACtJ,OAAO;AACL,mBAAO,KAAK,EAAE,MAAM,UAAU,aAAa,iBAAiB,QAAQ,QAAQ,SAAS,4BAA4B,CAAC;AAAA,UACpH;AAAA,QACF,QAAQ;AACN,iBAAO,KAAK,EAAE,MAAM,UAAU,aAAa,iBAAiB,QAAQ,QAAQ,SAAS,0BAA0B,CAAC;AAAA,QAClH;AAAA,MACF;AAEA,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO,CAAC,eAAuB;AAC7B,YAAM,SAAsB,CAAC;AAG7B,YAAM,cAAc,KAAK,YAAY,UAAU,UAAU;AACzD,UAAI,WAAW,WAAW,GAAG;AAC3B,cAAM,QAAQ,YAAY,WAAW,EAAE,OAAO,CAAC,MAAM,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAC5F,eAAO,KAAK;AAAA,UACV,MAAM;AAAA,UACN,aAAa;AAAA,UACb,QAAQ,MAAM,SAAS,IAAI,SAAS;AAAA,UACpC,SAAS,GAAG,MAAM,MAAM;AAAA,QAC1B,CAAC;AAAA,MACH,OAAO;AACL,eAAO,KAAK,EAAE,MAAM,qBAAqB,aAAa,2BAA2B,QAAQ,QAAQ,SAAS,wBAAwB,CAAC;AAAA,MACrI;AAGA,YAAM,OAAO,iBAAiB;AAC9B,UAAI,mBAAmB;AACvB,iBAAW,OAAO,MAAM;AACtB,cAAM,WAAW,KAAK,YAAY,GAAG;AACrC,YAAI,CAAC,WAAW,QAAQ,EAAG;AAC3B,cAAM,EAAE,OAAO,IAAI,wBAAwB,QAAQ;AACnD,4BAAoB,OAAO;AAAA,MAC7B;AAEA,aAAO,KAAK,qBAAqB,IAC7B,EAAE,MAAM,gBAAgB,aAAa,6BAA6B,QAAQ,QAAQ,SAAS,+BAA+B,IAC1H,EAAE,MAAM,gBAAgB,aAAa,6BAA6B,QAAQ,QAAQ,SAAS,GAAG,gBAAgB,gCAAgC,CAAC;AAEnJ,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,aAAa;AAAA,IACb,OAAO,CAAC,YAAoB,WAA2B;AACrD,YAAM,SAAsB,CAAC;AAG7B,UAAI;AACF,cAAM,aAAa,gBAAgB,UAAU;AAC7C,YAAI,WAAW,OAAO,SAAS,GAAG;AAChC,iBAAO,KAAK,EAAE,MAAM,aAAa,aAAa,oBAAoB,QAAQ,QAAQ,SAAS,GAAG,WAAW,OAAO,MAAM,aAAa,SAAS,EAAE,QAAQ,WAAW,OAAO,EAAE,CAAC;AAAA,QAC7K,WAAW,WAAW,SAAS,SAAS,GAAG;AACzC,iBAAO,KAAK,EAAE,MAAM,aAAa,aAAa,oBAAoB,QAAQ,QAAQ,SAAS,GAAG,WAAW,SAAS,MAAM,cAAc,CAAC;AAAA,QACzI,OAAO;AACL,iBAAO,KAAK,EAAE,MAAM,aAAa,aAAa,oBAAoB,QAAQ,QAAQ,SAAS,mBAAmB,CAAC;AAAA,QACjH;AAAA,MACF,SAAS,KAAK;AACZ,eAAO,KAAK,EAAE,MAAM,aAAa,aAAa,oBAAoB,QAAQ,QAAQ,SAAS,oBAAoB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,GAAG,CAAC;AAAA,MACrK;AAGA,YAAM,aAAa,qBAAqB,YAAY,MAAM;AAC1D,aAAO,KAAK,WAAW,KAAK,WAAW,IACnC,EAAE,MAAM,mBAAmB,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,iDAAiD,IACxI,EAAE,MAAM,mBAAmB,aAAa,sBAAsB,QAAQ,QAAQ,SAAS,GAAG,WAAW,KAAK,MAAM,4BAA4B,SAAS,EAAE,MAAM,WAAW,KAAK,IAAI,CAAC,MAAM,EAAE,EAAE,EAAE,EAAE,CAAC;AAGrM,YAAM,sBAAsB,qBAAqB,UAAU;AAC3D,aAAO,KAAK,oBAAoB,eAAe,WAAW,IACtD,EAAE,MAAM,kBAAkB,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,iCAAiC,IACtH,EAAE,MAAM,kBAAkB,aAAa,qBAAqB,QAAQ,QAAQ,SAAS,GAAG,oBAAoB,eAAe,MAAM,8BAA8B,CAAC;AAEpK,aAAO;AAAA,IACT;AAAA,EACF;AACF;AAMO,SAAS,QACd,UACA,YACA,QACwB;AACxB,QAAM,OAAO,cAAc,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AAC1D,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,MACL;AAAA,MACA,QAAQ;AAAA,MACR,QAAQ,CAAC,EAAE,MAAM,kBAAkB,aAAa,eAAe,QAAQ,QAAQ,SAAS,iBAAiB,QAAQ,GAAG,CAAC;AAAA,MACrH,SAAS,SAAS,QAAQ,2BAA2B,cAAc,IAAI,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC;AAAA,IAClG;AAAA,EACF;AAEA,QAAM,SAAS,KAAK,MAAM,YAAY,MAAM;AAC5C,QAAM,WAAW,OAAO,KAAK,CAAC,MAAM,EAAE,WAAW,MAAM;AACvD,QAAM,WAAW,OAAO,KAAK,CAAC,MAAM,EAAE,WAAW,MAAM;AAEvD,QAAM,SAAS,CAAC;AAChB,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EAAE;AAC5D,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EAAE;AAC5D,QAAM,YAAY,OAAO,OAAO,CAAC,MAAM,EAAE,WAAW,MAAM,EAAE;AAE5D,MAAI,UAAU,GAAG,KAAK,IAAI,KAAK,SAAS;AACxC,MAAI,YAAY,EAAG,YAAW,KAAK,SAAS;AAC5C,MAAI,YAAY,EAAG,YAAW,KAAK,SAAS;AAE5C,SAAO,EAAE,UAAU,QAAQ,QAAQ,QAAQ;AAC7C;AAKO,SAAS,YACd,YACA,QAC0B;AAC1B,SAAO,cAAc,IAAI,CAAC,SAAS,QAAQ,KAAK,MAAM,YAAY,MAAM,CAAC;AAC3E;AAKO,SAAS,YAA0D;AACxE,SAAO,cAAc,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,EAAE,YAAY,EAAE;AAChF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/project-discovery.ts"],"sourcesContent":["import { existsSync, readFileSync, readdirSync } from 'fs';\nimport { join, basename } from 'path';\n\n// --- Types ---\n\n/** A detected project characteristic */\nexport interface ProjectSignal {\n /** What was detected (e.g. \"TypeScript\", \"React\", \"Docker\") */\n name: string;\n /** Category of signal */\n category: 'language' | 'framework' | 'tool' | 'runtime' | 'database' | 'cloud' | 'testing';\n /** Source file that triggered the detection */\n source: string;\n /** Additional details */\n details?: string;\n}\n\n/** Suggested rule, skill, or MCP server */\nexport interface ProjectSuggestion {\n /** What type of thing to add */\n type: 'rule' | 'skill' | 'mcp-server';\n /** Human-readable suggestion */\n message: string;\n /** File to create (for rules/skills) or server query (for MCP) */\n target: string;\n /** Triggered by these signals */\n signals: string[];\n}\n\n/** Full project discovery result */\nexport interface ProjectDiscoveryResult {\n /** Detected project signals */\n signals: ProjectSignal[];\n /** Files that were examined */\n filesExamined: string[];\n /** Suggestions based on signals */\n suggestions: ProjectSuggestion[];\n}\n\n// --- Detection Functions ---\n\ninterface DetectionRule {\n file: string;\n detect: (content: string, dir: string) => ProjectSignal[];\n}\n\nfunction detectPackageJson(content: string): ProjectSignal[] {\n const signals: ProjectSignal[] = [];\n\n let pkg: Record<string, unknown>;\n try {\n pkg = JSON.parse(content);\n } catch {\n return signals;\n }\n\n const allDeps = {\n ...(pkg.dependencies as Record<string, string> | undefined),\n ...(pkg.devDependencies as Record<string, string> | undefined),\n };\n\n // Language\n if (allDeps['typescript'] || existsSync('tsconfig.json')) {\n signals.push({ name: 'TypeScript', category: 'language', source: 'package.json' });\n }\n\n // Frameworks\n const frameworks: Record<string, { name: string; category: ProjectSignal['category'] }> = {\n 'react': { name: 'React', category: 'framework' },\n 'next': { name: 'Next.js', category: 'framework' },\n 'vue': { name: 'Vue', category: 'framework' },\n 'nuxt': { name: 'Nuxt', category: 'framework' },\n 'svelte': { name: 'Svelte', category: 'framework' },\n '@angular/core': { name: 'Angular', category: 'framework' },\n 'express': { name: 'Express', category: 'framework' },\n 'fastify': { name: 'Fastify', category: 'framework' },\n 'hono': { name: 'Hono', category: 'framework' },\n 'astro': { name: 'Astro', category: 'framework' },\n 'remix': { name: 'Remix', category: 'framework' },\n '@remix-run/node': { name: 'Remix', category: 'framework' },\n 'electron': { name: 'Electron', category: 'framework' },\n };\n\n for (const [dep, info] of Object.entries(frameworks)) {\n if (allDeps[dep]) {\n signals.push({ name: info.name, category: info.category, source: 'package.json', details: `v${allDeps[dep]}` });\n }\n }\n\n // Testing\n const testLibs: Record<string, string> = {\n 'vitest': 'Vitest',\n 'jest': 'Jest',\n 'mocha': 'Mocha',\n '@playwright/test': 'Playwright',\n 'cypress': 'Cypress',\n };\n\n for (const [dep, name] of Object.entries(testLibs)) {\n if (allDeps[dep]) {\n signals.push({ name, category: 'testing', source: 'package.json' });\n }\n }\n\n // Databases\n const dbLibs: Record<string, string> = {\n 'prisma': 'Prisma',\n '@prisma/client': 'Prisma',\n 'drizzle-orm': 'Drizzle',\n 'mongoose': 'MongoDB (Mongoose)',\n 'pg': 'PostgreSQL',\n 'mysql2': 'MySQL',\n 'better-sqlite3': 'SQLite',\n 'redis': 'Redis',\n 'ioredis': 'Redis',\n };\n\n for (const [dep, name] of Object.entries(dbLibs)) {\n if (allDeps[dep]) {\n signals.push({ name, category: 'database', source: 'package.json' });\n }\n }\n\n // Tools\n const tools: Record<string, string> = {\n 'eslint': 'ESLint',\n 'prettier': 'Prettier',\n 'tailwindcss': 'Tailwind CSS',\n 'storybook': 'Storybook',\n '@storybook/react': 'Storybook',\n 'docker-compose': 'Docker Compose',\n };\n\n for (const [dep, name] of Object.entries(tools)) {\n if (allDeps[dep]) {\n signals.push({ name, category: 'tool', source: 'package.json' });\n }\n }\n\n return signals;\n}\n\nfunction detectFromFiles(dir: string): ProjectSignal[] {\n const signals: ProjectSignal[] = [];\n const entries = new Set<string>();\n\n try {\n for (const e of readdirSync(dir)) {\n entries.add(e);\n }\n } catch {\n return signals;\n }\n\n // Config files\n if (entries.has('Dockerfile') || entries.has('docker-compose.yml') || entries.has('docker-compose.yaml')) {\n signals.push({ name: 'Docker', category: 'runtime', source: 'Dockerfile' });\n }\n\n if (entries.has('.github')) {\n signals.push({ name: 'GitHub Actions', category: 'tool', source: '.github/' });\n }\n\n if (entries.has('Makefile')) {\n signals.push({ name: 'Make', category: 'tool', source: 'Makefile' });\n }\n\n if (entries.has('pyproject.toml') || entries.has('setup.py') || entries.has('requirements.txt')) {\n signals.push({ name: 'Python', category: 'language', source: 'pyproject.toml' });\n }\n\n if (entries.has('Cargo.toml')) {\n signals.push({ name: 'Rust', category: 'language', source: 'Cargo.toml' });\n }\n\n if (entries.has('go.mod')) {\n signals.push({ name: 'Go', category: 'language', source: 'go.mod' });\n }\n\n if (entries.has('Gemfile')) {\n signals.push({ name: 'Ruby', category: 'language', source: 'Gemfile' });\n }\n\n if (entries.has('.terraform') || entries.has('main.tf')) {\n signals.push({ name: 'Terraform', category: 'cloud', source: 'main.tf' });\n }\n\n if (entries.has('serverless.yml') || entries.has('serverless.yaml')) {\n signals.push({ name: 'Serverless Framework', category: 'cloud', source: 'serverless.yml' });\n }\n\n if (entries.has('vercel.json')) {\n signals.push({ name: 'Vercel', category: 'cloud', source: 'vercel.json' });\n }\n\n if (entries.has('netlify.toml')) {\n signals.push({ name: 'Netlify', category: 'cloud', source: 'netlify.toml' });\n }\n\n if (entries.has('wrangler.toml') || entries.has('wrangler.jsonc')) {\n signals.push({ name: 'Cloudflare Workers', category: 'cloud', source: 'wrangler.toml' });\n }\n\n if (entries.has('.prisma') || entries.has('prisma')) {\n signals.push({ name: 'Prisma', category: 'database', source: 'prisma/' });\n }\n\n if (entries.has('supabase')) {\n signals.push({ name: 'Supabase', category: 'database', source: 'supabase/' });\n }\n\n return signals;\n}\n\n// --- Suggestion Engine ---\n\ninterface SuggestionRule {\n signals: string[];\n type: ProjectSuggestion['type'];\n message: string;\n target: string;\n}\n\nconst SUGGESTION_RULES: SuggestionRule[] = [\n {\n signals: ['TypeScript'],\n type: 'rule',\n message: 'Add a TypeScript coding standards rule',\n target: 'rules/typescript-standards.md',\n },\n {\n signals: ['React'],\n type: 'rule',\n message: 'Add React component patterns rule',\n target: 'rules/react-patterns.md',\n },\n {\n signals: ['Next.js'],\n type: 'skill',\n message: 'Add Next.js development skill',\n target: 'skills/nextjs.md',\n },\n {\n signals: ['Docker'],\n type: 'rule',\n message: 'Add Docker/containerization rule',\n target: 'rules/docker.md',\n },\n {\n signals: ['GitHub Actions'],\n type: 'skill',\n message: 'Add CI/CD pipeline skill',\n target: 'skills/ci-cd.md',\n },\n {\n signals: ['PostgreSQL', 'Prisma'],\n type: 'mcp-server',\n message: 'Install PostgreSQL MCP server for database access',\n target: 'postgres',\n },\n {\n signals: ['Supabase'],\n type: 'mcp-server',\n message: 'Install Supabase MCP server',\n target: 'supabase',\n },\n {\n signals: ['ESLint'],\n type: 'rule',\n message: 'Add linting standards rule',\n target: 'rules/linting.md',\n },\n {\n signals: ['Vitest', 'Jest'],\n type: 'rule',\n message: 'Add testing standards rule',\n target: 'rules/testing.md',\n },\n {\n signals: ['Tailwind CSS'],\n type: 'rule',\n message: 'Add styling conventions rule',\n target: 'rules/styling.md',\n },\n];\n\nfunction generateSuggestions(signals: ProjectSignal[]): ProjectSuggestion[] {\n const signalNames = new Set(signals.map((s) => s.name));\n const suggestions: ProjectSuggestion[] = [];\n\n for (const rule of SUGGESTION_RULES) {\n // Check if ANY of the required signals are present\n const matchedSignals = rule.signals.filter((s) => signalNames.has(s));\n if (matchedSignals.length > 0) {\n suggestions.push({\n type: rule.type,\n message: rule.message,\n target: rule.target,\n signals: matchedSignals,\n });\n }\n }\n\n return suggestions;\n}\n\n// --- Main Discovery ---\n\n/** Options for project discovery */\nexport interface ProjectDiscoveryOptions {\n /** Project directory to scan */\n dir?: string;\n}\n\n/**\n * Scan a project directory to detect its technology stack and suggest\n * rules, skills, and MCP servers.\n */\nexport function discoverProjectContext(options?: ProjectDiscoveryOptions): ProjectDiscoveryResult {\n const dir = options?.dir ?? process.cwd();\n const signals: ProjectSignal[] = [];\n const filesExamined: string[] = [];\n\n // Scan package.json\n const packageJsonPath = join(dir, 'package.json');\n if (existsSync(packageJsonPath)) {\n filesExamined.push(packageJsonPath);\n const content = readFileSync(packageJsonPath, 'utf-8');\n signals.push(...detectPackageJson(content));\n }\n\n // Scan directory for files/folders\n filesExamined.push(dir);\n signals.push(...detectFromFiles(dir));\n\n // Deduplicate signals by name\n const seen = new Set<string>();\n const uniqueSignals = signals.filter((s) => {\n if (seen.has(s.name)) return false;\n seen.add(s.name);\n return true;\n });\n\n // Generate suggestions\n const suggestions = generateSuggestions(uniqueSignals);\n\n return {\n signals: uniqueSignals,\n filesExamined,\n suggestions,\n };\n}\n"],"mappings":";;;;;AAAA,SAAS,YAAY,cAAc,mBAAmB;AACtD,SAAS,YAAsB;AA6C/B,SAAS,kBAAkB,SAAkC;AAC3D,QAAM,UAA2B,CAAC;AAElC,MAAI;AACJ,MAAI;AACF,UAAM,KAAK,MAAM,OAAO;AAAA,EAC1B,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,QAAM,UAAU;AAAA,IACd,GAAI,IAAI;AAAA,IACR,GAAI,IAAI;AAAA,EACV;AAGA,MAAI,QAAQ,YAAY,KAAK,WAAW,eAAe,GAAG;AACxD,YAAQ,KAAK,EAAE,MAAM,cAAc,UAAU,YAAY,QAAQ,eAAe,CAAC;AAAA,EACnF;AAGA,QAAM,aAAoF;AAAA,IACxF,SAAS,EAAE,MAAM,SAAS,UAAU,YAAY;AAAA,IAChD,QAAQ,EAAE,MAAM,WAAW,UAAU,YAAY;AAAA,IACjD,OAAO,EAAE,MAAM,OAAO,UAAU,YAAY;AAAA,IAC5C,QAAQ,EAAE,MAAM,QAAQ,UAAU,YAAY;AAAA,IAC9C,UAAU,EAAE,MAAM,UAAU,UAAU,YAAY;AAAA,IAClD,iBAAiB,EAAE,MAAM,WAAW,UAAU,YAAY;AAAA,IAC1D,WAAW,EAAE,MAAM,WAAW,UAAU,YAAY;AAAA,IACpD,WAAW,EAAE,MAAM,WAAW,UAAU,YAAY;AAAA,IACpD,QAAQ,EAAE,MAAM,QAAQ,UAAU,YAAY;AAAA,IAC9C,SAAS,EAAE,MAAM,SAAS,UAAU,YAAY;AAAA,IAChD,SAAS,EAAE,MAAM,SAAS,UAAU,YAAY;AAAA,IAChD,mBAAmB,EAAE,MAAM,SAAS,UAAU,YAAY;AAAA,IAC1D,YAAY,EAAE,MAAM,YAAY,UAAU,YAAY;AAAA,EACxD;AAEA,aAAW,CAAC,KAAK,IAAI,KAAK,OAAO,QAAQ,UAAU,GAAG;AACpD,QAAI,QAAQ,GAAG,GAAG;AAChB,cAAQ,KAAK,EAAE,MAAM,KAAK,MAAM,UAAU,KAAK,UAAU,QAAQ,gBAAgB,SAAS,IAAI,QAAQ,GAAG,CAAC,GAAG,CAAC;AAAA,IAChH;AAAA,EACF;AAGA,QAAM,WAAmC;AAAA,IACvC,UAAU;AAAA,IACV,QAAQ;AAAA,IACR,SAAS;AAAA,IACT,oBAAoB;AAAA,IACpB,WAAW;AAAA,EACb;AAEA,aAAW,CAAC,KAAK,IAAI,KAAK,OAAO,QAAQ,QAAQ,GAAG;AAClD,QAAI,QAAQ,GAAG,GAAG;AAChB,cAAQ,KAAK,EAAE,MAAM,UAAU,WAAW,QAAQ,eAAe,CAAC;AAAA,IACpE;AAAA,EACF;AAGA,QAAM,SAAiC;AAAA,IACrC,UAAU;AAAA,IACV,kBAAkB;AAAA,IAClB,eAAe;AAAA,IACf,YAAY;AAAA,IACZ,MAAM;AAAA,IACN,UAAU;AAAA,IACV,kBAAkB;AAAA,IAClB,SAAS;AAAA,IACT,WAAW;AAAA,EACb;AAEA,aAAW,CAAC,KAAK,IAAI,KAAK,OAAO,QAAQ,MAAM,GAAG;AAChD,QAAI,QAAQ,GAAG,GAAG;AAChB,cAAQ,KAAK,EAAE,MAAM,UAAU,YAAY,QAAQ,eAAe,CAAC;AAAA,IACrE;AAAA,EACF;AAGA,QAAM,QAAgC;AAAA,IACpC,UAAU;AAAA,IACV,YAAY;AAAA,IACZ,eAAe;AAAA,IACf,aAAa;AAAA,IACb,oBAAoB;AAAA,IACpB,kBAAkB;AAAA,EACpB;AAEA,aAAW,CAAC,KAAK,IAAI,KAAK,OAAO,QAAQ,KAAK,GAAG;AAC/C,QAAI,QAAQ,GAAG,GAAG;AAChB,cAAQ,KAAK,EAAE,MAAM,UAAU,QAAQ,QAAQ,eAAe,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,gBAAgB,KAA8B;AACrD,QAAM,UAA2B,CAAC;AAClC,QAAM,UAAU,oBAAI,IAAY;AAEhC,MAAI;AACF,eAAW,KAAK,YAAY,GAAG,GAAG;AAChC,cAAQ,IAAI,CAAC;AAAA,IACf;AAAA,EACF,QAAQ;AACN,WAAO;AAAA,EACT;AAGA,MAAI,QAAQ,IAAI,YAAY,KAAK,QAAQ,IAAI,oBAAoB,KAAK,QAAQ,IAAI,qBAAqB,GAAG;AACxG,YAAQ,KAAK,EAAE,MAAM,UAAU,UAAU,WAAW,QAAQ,aAAa,CAAC;AAAA,EAC5E;AAEA,MAAI,QAAQ,IAAI,SAAS,GAAG;AAC1B,YAAQ,KAAK,EAAE,MAAM,kBAAkB,UAAU,QAAQ,QAAQ,WAAW,CAAC;AAAA,EAC/E;AAEA,MAAI,QAAQ,IAAI,UAAU,GAAG;AAC3B,YAAQ,KAAK,EAAE,MAAM,QAAQ,UAAU,QAAQ,QAAQ,WAAW,CAAC;AAAA,EACrE;AAEA,MAAI,QAAQ,IAAI,gBAAgB,KAAK,QAAQ,IAAI,UAAU,KAAK,QAAQ,IAAI,kBAAkB,GAAG;AAC/F,YAAQ,KAAK,EAAE,MAAM,UAAU,UAAU,YAAY,QAAQ,iBAAiB,CAAC;AAAA,EACjF;AAEA,MAAI,QAAQ,IAAI,YAAY,GAAG;AAC7B,YAAQ,KAAK,EAAE,MAAM,QAAQ,UAAU,YAAY,QAAQ,aAAa,CAAC;AAAA,EAC3E;AAEA,MAAI,QAAQ,IAAI,QAAQ,GAAG;AACzB,YAAQ,KAAK,EAAE,MAAM,MAAM,UAAU,YAAY,QAAQ,SAAS,CAAC;AAAA,EACrE;AAEA,MAAI,QAAQ,IAAI,SAAS,GAAG;AAC1B,YAAQ,KAAK,EAAE,MAAM,QAAQ,UAAU,YAAY,QAAQ,UAAU,CAAC;AAAA,EACxE;AAEA,MAAI,QAAQ,IAAI,YAAY,KAAK,QAAQ,IAAI,SAAS,GAAG;AACvD,YAAQ,KAAK,EAAE,MAAM,aAAa,UAAU,SAAS,QAAQ,UAAU,CAAC;AAAA,EAC1E;AAEA,MAAI,QAAQ,IAAI,gBAAgB,KAAK,QAAQ,IAAI,iBAAiB,GAAG;AACnE,YAAQ,KAAK,EAAE,MAAM,wBAAwB,UAAU,SAAS,QAAQ,iBAAiB,CAAC;AAAA,EAC5F;AAEA,MAAI,QAAQ,IAAI,aAAa,GAAG;AAC9B,YAAQ,KAAK,EAAE,MAAM,UAAU,UAAU,SAAS,QAAQ,cAAc,CAAC;AAAA,EAC3E;AAEA,MAAI,QAAQ,IAAI,cAAc,GAAG;AAC/B,YAAQ,KAAK,EAAE,MAAM,WAAW,UAAU,SAAS,QAAQ,eAAe,CAAC;AAAA,EAC7E;AAEA,MAAI,QAAQ,IAAI,eAAe,KAAK,QAAQ,IAAI,gBAAgB,GAAG;AACjE,YAAQ,KAAK,EAAE,MAAM,sBAAsB,UAAU,SAAS,QAAQ,gBAAgB,CAAC;AAAA,EACzF;AAEA,MAAI,QAAQ,IAAI,SAAS,KAAK,QAAQ,IAAI,QAAQ,GAAG;AACnD,YAAQ,KAAK,EAAE,MAAM,UAAU,UAAU,YAAY,QAAQ,UAAU,CAAC;AAAA,EAC1E;AAEA,MAAI,QAAQ,IAAI,UAAU,GAAG;AAC3B,YAAQ,KAAK,EAAE,MAAM,YAAY,UAAU,YAAY,QAAQ,YAAY,CAAC;AAAA,EAC9E;AAEA,SAAO;AACT;AAWA,IAAM,mBAAqC;AAAA,EACzC;AAAA,IACE,SAAS,CAAC,YAAY;AAAA,IACtB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,OAAO;AAAA,IACjB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,SAAS;AAAA,IACnB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,QAAQ;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,gBAAgB;AAAA,IAC1B,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,cAAc,QAAQ;AAAA,IAChC,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,UAAU;AAAA,IACpB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,QAAQ;AAAA,IAClB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,UAAU,MAAM;AAAA,IAC1B,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AAAA,EACA;AAAA,IACE,SAAS,CAAC,cAAc;AAAA,IACxB,MAAM;AAAA,IACN,SAAS;AAAA,IACT,QAAQ;AAAA,EACV;AACF;AAEA,SAAS,oBAAoB,SAA+C;AAC1E,QAAM,cAAc,IAAI,IAAI,QAAQ,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC;AACtD,QAAM,cAAmC,CAAC;AAE1C,aAAW,QAAQ,kBAAkB;AAEnC,UAAM,iBAAiB,KAAK,QAAQ,OAAO,CAAC,MAAM,YAAY,IAAI,CAAC,CAAC;AACpE,QAAI,eAAe,SAAS,GAAG;AAC7B,kBAAY,KAAK;AAAA,QACf,MAAM,KAAK;AAAA,QACX,SAAS,KAAK;AAAA,QACd,QAAQ,KAAK;AAAA,QACb,SAAS;AAAA,MACX,CAAC;AAAA,IACH;AAAA,EACF;AAEA,SAAO;AACT;AAcO,SAAS,uBAAuB,SAA2D;AAChG,QAAM,MAAM,SAAS,OAAO,QAAQ,IAAI;AACxC,QAAM,UAA2B,CAAC;AAClC,QAAM,gBAA0B,CAAC;AAGjC,QAAM,kBAAkB,KAAK,KAAK,cAAc;AAChD,MAAI,WAAW,eAAe,GAAG;AAC/B,kBAAc,KAAK,eAAe;AAClC,UAAM,UAAU,aAAa,iBAAiB,OAAO;AACrD,YAAQ,KAAK,GAAG,kBAAkB,OAAO,CAAC;AAAA,EAC5C;AAGA,gBAAc,KAAK,GAAG;AACtB,UAAQ,KAAK,GAAG,gBAAgB,GAAG,CAAC;AAGpC,QAAM,OAAO,oBAAI,IAAY;AAC7B,QAAM,gBAAgB,QAAQ,OAAO,CAAC,MAAM;AAC1C,QAAI,KAAK,IAAI,EAAE,IAAI,EAAG,QAAO;AAC7B,SAAK,IAAI,EAAE,IAAI;AACf,WAAO;AAAA,EACT,CAAC;AAGD,QAAM,cAAc,oBAAoB,aAAa;AAErD,SAAO;AAAA,IACL,SAAS;AAAA,IACT;AAAA,IACA;AAAA,EACF;AACF;","names":[]}
@@ -1,26 +0,0 @@
1
- import {
2
- generate,
3
- generateWithMessages,
4
- getFastModel,
5
- getModel,
6
- getProvider,
7
- getSummaryModel,
8
- resetProvider,
9
- streamGenerate,
10
- streamGenerateWithDetails,
11
- streamWithMessages
12
- } from "./chunk-FD55B3IO.js";
13
- import "./chunk-DGUM43GV.js";
14
- export {
15
- generate,
16
- generateWithMessages,
17
- getFastModel,
18
- getModel,
19
- getProvider,
20
- getSummaryModel,
21
- resetProvider,
22
- streamGenerate,
23
- streamGenerateWithDetails,
24
- streamWithMessages
25
- };
26
- //# sourceMappingURL=provider-LQHQX7Z7.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/search.ts"],"sourcesContent":["import { existsSync } from 'fs';\nimport { join, basename, relative } from 'path';\nimport { loadDirectory } from '../primitives/loader.js';\nimport { getPrimitiveDirs } from '../core/types.js';\nimport type { HarnessConfig, HarnessDocument } from '../core/types.js';\n\nexport interface SearchOptions {\n /** Filter by tag (case-insensitive) */\n tag?: string;\n /** Filter by primitive type directory (e.g., \"rules\", \"skills\") */\n type?: string;\n /** Filter by status (e.g., \"active\", \"draft\") */\n status?: string;\n /** Filter by author (e.g., \"human\", \"agent\") */\n author?: string;\n}\n\nexport interface SearchResult {\n doc: HarnessDocument;\n directory: string;\n matchReason: string;\n}\n\n/**\n * Search primitives across all directories by query text and/or filters.\n * Query matches against: id, tags, L0 summary, L1 summary, body content.\n */\nexport function searchPrimitives(\n harnessDir: string,\n query?: string,\n options?: SearchOptions,\n config?: HarnessConfig,\n): SearchResult[] {\n const results: SearchResult[] = [];\n const dirs = getPrimitiveDirs(config);\n const queryLower = query?.toLowerCase();\n\n for (const dir of dirs) {\n // Filter by type directory if specified\n if (options?.type) {\n const typeNorm = options.type.toLowerCase();\n // Accept both singular (\"rule\") and plural (\"rules\")\n if (dir !== typeNorm && dir !== typeNorm + 's' && dir.replace(/s$/, '') !== typeNorm) {\n continue;\n }\n }\n\n const fullPath = join(harnessDir, dir);\n if (!existsSync(fullPath)) continue;\n\n const docs = loadDirectory(fullPath);\n\n for (const doc of docs) {\n // Filter by status\n if (options?.status && doc.frontmatter.status !== options.status) continue;\n\n // Filter by author\n if (options?.author && doc.frontmatter.author !== options.author) continue;\n\n // Filter by tag\n if (options?.tag) {\n const tagLower = options.tag.toLowerCase();\n const hasTag = doc.frontmatter.tags.some((t) => t.toLowerCase() === tagLower);\n if (!hasTag) continue;\n }\n\n // Match query text\n if (queryLower) {\n const matchReason = matchDocument(doc, queryLower);\n if (!matchReason) continue;\n results.push({ doc, directory: dir, matchReason });\n } else {\n // No query — return all matching filters\n results.push({ doc, directory: dir, matchReason: 'filter match' });\n }\n }\n }\n\n return results;\n}\n\nfunction matchDocument(doc: HarnessDocument, queryLower: string): string | null {\n // Check id\n if (doc.frontmatter.id.toLowerCase().includes(queryLower)) {\n return `id: ${doc.frontmatter.id}`;\n }\n\n // Check tags\n for (const tag of doc.frontmatter.tags) {\n if (tag.toLowerCase().includes(queryLower)) {\n return `tag: ${tag}`;\n }\n }\n\n // Check L0\n if (doc.l0.toLowerCase().includes(queryLower)) {\n return `L0: ${doc.l0.slice(0, 80)}`;\n }\n\n // Check L1\n if (doc.l1.toLowerCase().includes(queryLower)) {\n return `L1 match`;\n }\n\n // Check body content\n const bodyLower = doc.body.toLowerCase();\n const idx = bodyLower.indexOf(queryLower);\n if (idx !== -1) {\n const start = Math.max(0, idx - 20);\n const end = Math.min(bodyLower.length, idx + queryLower.length + 30);\n const snippet = doc.body.slice(start, end).replace(/\\n/g, ' ').trim();\n return `body: ...${snippet}...`;\n }\n\n return null;\n}\n"],"mappings":";;;;;;;;;;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,YAAgC;AA0BlC,SAAS,iBACd,YACA,OACA,SACA,QACgB;AAChB,QAAM,UAA0B,CAAC;AACjC,QAAM,OAAO,iBAAiB,MAAM;AACpC,QAAM,aAAa,OAAO,YAAY;AAEtC,aAAW,OAAO,MAAM;AAEtB,QAAI,SAAS,MAAM;AACjB,YAAM,WAAW,QAAQ,KAAK,YAAY;AAE1C,UAAI,QAAQ,YAAY,QAAQ,WAAW,OAAO,IAAI,QAAQ,MAAM,EAAE,MAAM,UAAU;AACpF;AAAA,MACF;AAAA,IACF;AAEA,UAAM,WAAW,KAAK,YAAY,GAAG;AACrC,QAAI,CAAC,WAAW,QAAQ,EAAG;AAE3B,UAAM,OAAO,cAAc,QAAQ;AAEnC,eAAW,OAAO,MAAM;AAEtB,UAAI,SAAS,UAAU,IAAI,YAAY,WAAW,QAAQ,OAAQ;AAGlE,UAAI,SAAS,UAAU,IAAI,YAAY,WAAW,QAAQ,OAAQ;AAGlE,UAAI,SAAS,KAAK;AAChB,cAAM,WAAW,QAAQ,IAAI,YAAY;AACzC,cAAM,SAAS,IAAI,YAAY,KAAK,KAAK,CAAC,MAAM,EAAE,YAAY,MAAM,QAAQ;AAC5E,YAAI,CAAC,OAAQ;AAAA,MACf;AAGA,UAAI,YAAY;AACd,cAAM,cAAc,cAAc,KAAK,UAAU;AACjD,YAAI,CAAC,YAAa;AAClB,gBAAQ,KAAK,EAAE,KAAK,WAAW,KAAK,YAAY,CAAC;AAAA,MACnD,OAAO;AAEL,gBAAQ,KAAK,EAAE,KAAK,WAAW,KAAK,aAAa,eAAe,CAAC;AAAA,MACnE;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,SAAS,cAAc,KAAsB,YAAmC;AAE9E,MAAI,IAAI,YAAY,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AACzD,WAAO,OAAO,IAAI,YAAY,EAAE;AAAA,EAClC;AAGA,aAAW,OAAO,IAAI,YAAY,MAAM;AACtC,QAAI,IAAI,YAAY,EAAE,SAAS,UAAU,GAAG;AAC1C,aAAO,QAAQ,GAAG;AAAA,IACpB;AAAA,EACF;AAGA,MAAI,IAAI,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AAC7C,WAAO,OAAO,IAAI,GAAG,MAAM,GAAG,EAAE,CAAC;AAAA,EACnC;AAGA,MAAI,IAAI,GAAG,YAAY,EAAE,SAAS,UAAU,GAAG;AAC7C,WAAO;AAAA,EACT;AAGA,QAAM,YAAY,IAAI,KAAK,YAAY;AACvC,QAAM,MAAM,UAAU,QAAQ,UAAU;AACxC,MAAI,QAAQ,IAAI;AACd,UAAM,QAAQ,KAAK,IAAI,GAAG,MAAM,EAAE;AAClC,UAAM,MAAM,KAAK,IAAI,UAAU,QAAQ,MAAM,WAAW,SAAS,EAAE;AACnE,UAAM,UAAU,IAAI,KAAK,MAAM,OAAO,GAAG,EAAE,QAAQ,OAAO,GAAG,EAAE,KAAK;AACpE,WAAO,YAAY,OAAO;AAAA,EAC5B;AAEA,SAAO;AACT;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/semantic-search.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync, mkdirSync, statSync } from 'fs';\nimport { join } from 'path';\nimport { loadAllPrimitives, estimateTokens, getAtLevel } from '../primitives/loader.js';\nimport type { HarnessDocument, HarnessConfig } from '../core/types.js';\nimport { withFileLockSync } from './file-lock.js';\n\n// ─── Types ───────────────────────────────────────────────────────────────────\n\n/** A stored embedding for a single primitive document. */\nexport interface EmbeddingRecord {\n /** Document ID from frontmatter */\n id: string;\n /** Path to the source markdown file */\n path: string;\n /** Primitive directory (rules, skills, etc.) */\n directory: string;\n /** Text that was embedded (L0 + L1 + tags) */\n embeddedText: string;\n /** The embedding vector */\n vector: number[];\n /** File modification time (to detect stale embeddings) */\n mtime: string;\n /** When the embedding was generated */\n createdAt: string;\n}\n\n/** Embedding store format — persisted as JSON. */\nexport interface EmbeddingStore {\n /** Model ID used for embeddings (invalidate cache if changed) */\n modelId: string;\n /** Embedding vector dimension */\n dimensions: number;\n /** Map of document ID → embedding record */\n records: Record<string, EmbeddingRecord>;\n /** Last full index time */\n lastIndexedAt: string;\n}\n\n/** Result of a semantic search query. */\nexport interface SemanticSearchResult {\n doc: HarnessDocument;\n directory: string;\n /** Cosine similarity score (0-1, higher is more relevant) */\n score: number;\n /** The embedded text that matched */\n embeddedText: string;\n}\n\n/** Function signature for embedding text → vector. */\nexport type EmbedFunction = (texts: string[]) => Promise<number[][]>;\n\n/** Configuration for the semantic search module. */\nexport interface SemanticSearchConfig {\n /** Function to embed text (wraps Vercel AI SDK embed/embedMany) */\n embed: EmbedFunction;\n /** Embedding model identifier (for cache invalidation) */\n modelId: string;\n /** Maximum results to return (default: 10) */\n maxResults?: number;\n /** Minimum similarity threshold (default: 0.3) */\n minScore?: number;\n}\n\n// ─── Constants ───────────────────────────────────────────────────────────────\n\nconst STORE_FILE = 'embeddings.json';\nconst STORE_DIR = 'memory';\n\n// ─── Store Management ────────────────────────────────────────────────────────\n\n/** Load the embedding store from disk. Returns null if not found or invalid. */\nexport function loadEmbeddingStore(harnessDir: string): EmbeddingStore | null {\n const storePath = join(harnessDir, STORE_DIR, STORE_FILE);\n if (!existsSync(storePath)) return null;\n\n try {\n const raw = readFileSync(storePath, 'utf-8');\n return JSON.parse(raw) as EmbeddingStore;\n } catch {\n return null;\n }\n}\n\n/** Save the embedding store to disk. */\nexport function saveEmbeddingStore(harnessDir: string, store: EmbeddingStore): void {\n const storeDir = join(harnessDir, STORE_DIR);\n if (!existsSync(storeDir)) {\n mkdirSync(storeDir, { recursive: true });\n }\n\n const storePath = join(storeDir, STORE_FILE);\n withFileLockSync(harnessDir, storePath, () => {\n writeFileSync(storePath, JSON.stringify(store), 'utf-8');\n });\n}\n\n// ─── Text Extraction ─────────────────────────────────────────────────────────\n\n/**\n * Extract embeddable text from a document.\n * Combines: tags, L0 summary, L1 summary, and first 500 chars of body.\n * This gives a compact representation for embedding.\n */\nexport function extractEmbeddableText(doc: HarnessDocument): string {\n const parts: string[] = [];\n\n // Tags provide topical context\n if (doc.frontmatter.tags.length > 0) {\n parts.push(`Tags: ${doc.frontmatter.tags.join(', ')}`);\n }\n\n // L0 — one-liner\n if (doc.l0) {\n parts.push(doc.l0);\n }\n\n // L1 — paragraph summary\n if (doc.l1) {\n parts.push(doc.l1);\n }\n\n // Truncated body for additional context\n const bodyPreview = doc.body.slice(0, 500).trim();\n if (bodyPreview) {\n parts.push(bodyPreview);\n }\n\n return parts.join('\\n').trim();\n}\n\n// ─── Indexing ────────────────────────────────────────────────────────────────\n\n/**\n * Detect which primitives need re-embedding.\n * A primitive is stale if:\n * - It doesn't exist in the store\n * - Its file mtime has changed since last embedding\n * - The embedding model has changed\n */\nexport function detectStalePrimitives(\n harnessDir: string,\n store: EmbeddingStore | null,\n modelId: string,\n config?: HarnessConfig,\n): Array<{ doc: HarnessDocument; directory: string }> {\n const stale: Array<{ doc: HarnessDocument; directory: string }> = [];\n const allPrimitives = loadAllPrimitives(harnessDir, config?.extensions?.directories);\n\n // If model changed, everything is stale\n const modelChanged = store !== null && store.modelId !== modelId;\n\n for (const [directory, docs] of allPrimitives) {\n for (const doc of docs) {\n if (doc.frontmatter.status !== 'active') continue;\n\n const id = doc.frontmatter.id;\n\n if (modelChanged || !store) {\n stale.push({ doc, directory });\n continue;\n }\n\n const existing = store.records[id];\n if (!existing) {\n stale.push({ doc, directory });\n continue;\n }\n\n // Check if file changed\n try {\n const stat = statSync(doc.path);\n if (stat.mtime.toISOString() !== existing.mtime) {\n stale.push({ doc, directory });\n }\n } catch {\n stale.push({ doc, directory });\n }\n }\n }\n\n return stale;\n}\n\n/**\n * Index (or re-index) all primitives that need embeddings.\n * Incrementally updates the store — only re-embeds stale documents.\n *\n * @param harnessDir - Harness directory path\n * @param config - Semantic search configuration with embed function\n * @param harnessConfig - Optional harness config for extension directories\n * @returns Updated embedding store\n */\nexport async function indexPrimitives(\n harnessDir: string,\n searchConfig: SemanticSearchConfig,\n harnessConfig?: HarnessConfig,\n): Promise<EmbeddingStore> {\n let store = loadEmbeddingStore(harnessDir);\n\n const stale = detectStalePrimitives(harnessDir, store, searchConfig.modelId, harnessConfig);\n\n if (stale.length === 0 && store) {\n return store;\n }\n\n // Initialize store if needed\n if (!store || store.modelId !== searchConfig.modelId) {\n store = {\n modelId: searchConfig.modelId,\n dimensions: 0,\n records: {},\n lastIndexedAt: new Date().toISOString(),\n };\n }\n\n // Extract texts to embed\n const textsToEmbed: string[] = [];\n const docInfos: Array<{ doc: HarnessDocument; directory: string }> = [];\n\n for (const item of stale) {\n const text = extractEmbeddableText(item.doc);\n if (!text) continue;\n textsToEmbed.push(text);\n docInfos.push(item);\n }\n\n if (textsToEmbed.length === 0) {\n return store;\n }\n\n // Batch embed (chunked to avoid hitting rate limits)\n const batchSize = 50;\n for (let i = 0; i < textsToEmbed.length; i += batchSize) {\n const batch = textsToEmbed.slice(i, i + batchSize);\n const batchDocs = docInfos.slice(i, i + batchSize);\n\n const vectors = await searchConfig.embed(batch);\n\n for (let j = 0; j < vectors.length; j++) {\n const doc = batchDocs[j].doc;\n const vector = vectors[j];\n\n if (store.dimensions === 0 && vector.length > 0) {\n store.dimensions = vector.length;\n }\n\n let mtime: string;\n try {\n const stat = statSync(doc.path);\n mtime = stat.mtime.toISOString();\n } catch {\n mtime = new Date().toISOString();\n }\n\n store.records[doc.frontmatter.id] = {\n id: doc.frontmatter.id,\n path: doc.path,\n directory: batchDocs[j].directory,\n embeddedText: batch[j],\n vector,\n mtime,\n createdAt: new Date().toISOString(),\n };\n }\n }\n\n store.lastIndexedAt = new Date().toISOString();\n\n // Clean up deleted docs\n const allIds = new Set<string>();\n const allPrimitives = loadAllPrimitives(harnessDir, harnessConfig?.extensions?.directories);\n for (const [, docs] of allPrimitives) {\n for (const doc of docs) {\n allIds.add(doc.frontmatter.id);\n }\n }\n\n for (const id of Object.keys(store.records)) {\n if (!allIds.has(id)) {\n delete store.records[id];\n }\n }\n\n saveEmbeddingStore(harnessDir, store);\n return store;\n}\n\n// ─── Search ──────────────────────────────────────────────────────────────────\n\n/**\n * Compute cosine similarity between two vectors.\n * Returns a value between -1 and 1 (1 = identical, 0 = orthogonal).\n */\nexport function cosineSimilarity(a: number[], b: number[]): number {\n if (a.length !== b.length || a.length === 0) return 0;\n\n let dotProduct = 0;\n let normA = 0;\n let normB = 0;\n\n for (let i = 0; i < a.length; i++) {\n dotProduct += a[i] * b[i];\n normA += a[i] * a[i];\n normB += b[i] * b[i];\n }\n\n const denominator = Math.sqrt(normA) * Math.sqrt(normB);\n if (denominator === 0) return 0;\n\n return dotProduct / denominator;\n}\n\n/**\n * Perform semantic search over indexed primitives.\n *\n * @param harnessDir - Harness directory path\n * @param query - Natural language query\n * @param searchConfig - Search configuration with embed function\n * @param harnessConfig - Optional harness config\n * @returns Ranked search results by cosine similarity\n */\nexport async function semanticSearch(\n harnessDir: string,\n query: string,\n searchConfig: SemanticSearchConfig,\n harnessConfig?: HarnessConfig,\n): Promise<SemanticSearchResult[]> {\n const store = loadEmbeddingStore(harnessDir);\n if (!store || Object.keys(store.records).length === 0) {\n return [];\n }\n\n const maxResults = searchConfig.maxResults ?? 10;\n const minScore = searchConfig.minScore ?? 0.3;\n\n // Embed the query\n const [queryVector] = await searchConfig.embed([query]);\n if (!queryVector || queryVector.length === 0) {\n return [];\n }\n\n // Score all documents\n const scored: Array<{ record: EmbeddingRecord; score: number }> = [];\n\n for (const record of Object.values(store.records)) {\n const score = cosineSimilarity(queryVector, record.vector);\n if (score >= minScore) {\n scored.push({ record, score });\n }\n }\n\n // Sort by score descending\n scored.sort((a, b) => b.score - a.score);\n\n // Load the actual documents for results\n const allPrimitives = loadAllPrimitives(harnessDir, harnessConfig?.extensions?.directories);\n const docMap = new Map<string, { doc: HarnessDocument; directory: string }>();\n for (const [directory, docs] of allPrimitives) {\n for (const doc of docs) {\n docMap.set(doc.frontmatter.id, { doc, directory });\n }\n }\n\n const results: SemanticSearchResult[] = [];\n\n for (const { record, score } of scored.slice(0, maxResults)) {\n const entry = docMap.get(record.id);\n if (!entry) continue;\n\n results.push({\n doc: entry.doc,\n directory: entry.directory,\n score,\n embeddedText: record.embeddedText,\n });\n }\n\n return results;\n}\n\n/**\n * Get embedding stats for the harness.\n */\nexport function getEmbeddingStats(harnessDir: string): {\n indexed: number;\n modelId: string | null;\n dimensions: number;\n lastIndexedAt: string | null;\n storeSize: number;\n} {\n const store = loadEmbeddingStore(harnessDir);\n if (!store) {\n return {\n indexed: 0,\n modelId: null,\n dimensions: 0,\n lastIndexedAt: null,\n storeSize: 0,\n };\n }\n\n const storePath = join(harnessDir, STORE_DIR, STORE_FILE);\n let storeSize = 0;\n try {\n storeSize = statSync(storePath).size;\n } catch {\n // Ignore\n }\n\n return {\n indexed: Object.keys(store.records).length,\n modelId: store.modelId,\n dimensions: store.dimensions,\n lastIndexedAt: store.lastIndexedAt,\n storeSize,\n };\n}\n"],"mappings":";;;;;;;;;;;;AAAA,SAAS,YAAY,cAAc,eAAe,WAAW,gBAAgB;AAC7E,SAAS,YAAY;AAgErB,IAAM,aAAa;AACnB,IAAM,YAAY;AAKX,SAAS,mBAAmB,YAA2C;AAC5E,QAAM,YAAY,KAAK,YAAY,WAAW,UAAU;AACxD,MAAI,CAAC,WAAW,SAAS,EAAG,QAAO;AAEnC,MAAI;AACF,UAAM,MAAM,aAAa,WAAW,OAAO;AAC3C,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAGO,SAAS,mBAAmB,YAAoB,OAA6B;AAClF,QAAM,WAAW,KAAK,YAAY,SAAS;AAC3C,MAAI,CAAC,WAAW,QAAQ,GAAG;AACzB,cAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAAA,EACzC;AAEA,QAAM,YAAY,KAAK,UAAU,UAAU;AAC3C,mBAAiB,YAAY,WAAW,MAAM;AAC5C,kBAAc,WAAW,KAAK,UAAU,KAAK,GAAG,OAAO;AAAA,EACzD,CAAC;AACH;AASO,SAAS,sBAAsB,KAA8B;AAClE,QAAM,QAAkB,CAAC;AAGzB,MAAI,IAAI,YAAY,KAAK,SAAS,GAAG;AACnC,UAAM,KAAK,SAAS,IAAI,YAAY,KAAK,KAAK,IAAI,CAAC,EAAE;AAAA,EACvD;AAGA,MAAI,IAAI,IAAI;AACV,UAAM,KAAK,IAAI,EAAE;AAAA,EACnB;AAGA,MAAI,IAAI,IAAI;AACV,UAAM,KAAK,IAAI,EAAE;AAAA,EACnB;AAGA,QAAM,cAAc,IAAI,KAAK,MAAM,GAAG,GAAG,EAAE,KAAK;AAChD,MAAI,aAAa;AACf,UAAM,KAAK,WAAW;AAAA,EACxB;AAEA,SAAO,MAAM,KAAK,IAAI,EAAE,KAAK;AAC/B;AAWO,SAAS,sBACd,YACA,OACA,SACA,QACoD;AACpD,QAAM,QAA4D,CAAC;AACnE,QAAM,gBAAgB,kBAAkB,YAAY,QAAQ,YAAY,WAAW;AAGnF,QAAM,eAAe,UAAU,QAAQ,MAAM,YAAY;AAEzD,aAAW,CAAC,WAAW,IAAI,KAAK,eAAe;AAC7C,eAAW,OAAO,MAAM;AACtB,UAAI,IAAI,YAAY,WAAW,SAAU;AAEzC,YAAM,KAAK,IAAI,YAAY;AAE3B,UAAI,gBAAgB,CAAC,OAAO;AAC1B,cAAM,KAAK,EAAE,KAAK,UAAU,CAAC;AAC7B;AAAA,MACF;AAEA,YAAM,WAAW,MAAM,QAAQ,EAAE;AACjC,UAAI,CAAC,UAAU;AACb,cAAM,KAAK,EAAE,KAAK,UAAU,CAAC;AAC7B;AAAA,MACF;AAGA,UAAI;AACF,cAAM,OAAO,SAAS,IAAI,IAAI;AAC9B,YAAI,KAAK,MAAM,YAAY,MAAM,SAAS,OAAO;AAC/C,gBAAM,KAAK,EAAE,KAAK,UAAU,CAAC;AAAA,QAC/B;AAAA,MACF,QAAQ;AACN,cAAM,KAAK,EAAE,KAAK,UAAU,CAAC;AAAA,MAC/B;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAWA,eAAsB,gBACpB,YACA,cACA,eACyB;AACzB,MAAI,QAAQ,mBAAmB,UAAU;AAEzC,QAAM,QAAQ,sBAAsB,YAAY,OAAO,aAAa,SAAS,aAAa;AAE1F,MAAI,MAAM,WAAW,KAAK,OAAO;AAC/B,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,SAAS,MAAM,YAAY,aAAa,SAAS;AACpD,YAAQ;AAAA,MACN,SAAS,aAAa;AAAA,MACtB,YAAY;AAAA,MACZ,SAAS,CAAC;AAAA,MACV,gBAAe,oBAAI,KAAK,GAAE,YAAY;AAAA,IACxC;AAAA,EACF;AAGA,QAAM,eAAyB,CAAC;AAChC,QAAM,WAA+D,CAAC;AAEtE,aAAW,QAAQ,OAAO;AACxB,UAAM,OAAO,sBAAsB,KAAK,GAAG;AAC3C,QAAI,CAAC,KAAM;AACX,iBAAa,KAAK,IAAI;AACtB,aAAS,KAAK,IAAI;AAAA,EACpB;AAEA,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,YAAY;AAClB,WAAS,IAAI,GAAG,IAAI,aAAa,QAAQ,KAAK,WAAW;AACvD,UAAM,QAAQ,aAAa,MAAM,GAAG,IAAI,SAAS;AACjD,UAAM,YAAY,SAAS,MAAM,GAAG,IAAI,SAAS;AAEjD,UAAM,UAAU,MAAM,aAAa,MAAM,KAAK;AAE9C,aAAS,IAAI,GAAG,IAAI,QAAQ,QAAQ,KAAK;AACvC,YAAM,MAAM,UAAU,CAAC,EAAE;AACzB,YAAM,SAAS,QAAQ,CAAC;AAExB,UAAI,MAAM,eAAe,KAAK,OAAO,SAAS,GAAG;AAC/C,cAAM,aAAa,OAAO;AAAA,MAC5B;AAEA,UAAI;AACJ,UAAI;AACF,cAAM,OAAO,SAAS,IAAI,IAAI;AAC9B,gBAAQ,KAAK,MAAM,YAAY;AAAA,MACjC,QAAQ;AACN,iBAAQ,oBAAI,KAAK,GAAE,YAAY;AAAA,MACjC;AAEA,YAAM,QAAQ,IAAI,YAAY,EAAE,IAAI;AAAA,QAClC,IAAI,IAAI,YAAY;AAAA,QACpB,MAAM,IAAI;AAAA,QACV,WAAW,UAAU,CAAC,EAAE;AAAA,QACxB,cAAc,MAAM,CAAC;AAAA,QACrB;AAAA,QACA;AAAA,QACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpC;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAgB,oBAAI,KAAK,GAAE,YAAY;AAG7C,QAAM,SAAS,oBAAI,IAAY;AAC/B,QAAM,gBAAgB,kBAAkB,YAAY,eAAe,YAAY,WAAW;AAC1F,aAAW,CAAC,EAAE,IAAI,KAAK,eAAe;AACpC,eAAW,OAAO,MAAM;AACtB,aAAO,IAAI,IAAI,YAAY,EAAE;AAAA,IAC/B;AAAA,EACF;AAEA,aAAW,MAAM,OAAO,KAAK,MAAM,OAAO,GAAG;AAC3C,QAAI,CAAC,OAAO,IAAI,EAAE,GAAG;AACnB,aAAO,MAAM,QAAQ,EAAE;AAAA,IACzB;AAAA,EACF;AAEA,qBAAmB,YAAY,KAAK;AACpC,SAAO;AACT;AAQO,SAAS,iBAAiB,GAAa,GAAqB;AACjE,MAAI,EAAE,WAAW,EAAE,UAAU,EAAE,WAAW,EAAG,QAAO;AAEpD,MAAI,aAAa;AACjB,MAAI,QAAQ;AACZ,MAAI,QAAQ;AAEZ,WAAS,IAAI,GAAG,IAAI,EAAE,QAAQ,KAAK;AACjC,kBAAc,EAAE,CAAC,IAAI,EAAE,CAAC;AACxB,aAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AACnB,aAAS,EAAE,CAAC,IAAI,EAAE,CAAC;AAAA,EACrB;AAEA,QAAM,cAAc,KAAK,KAAK,KAAK,IAAI,KAAK,KAAK,KAAK;AACtD,MAAI,gBAAgB,EAAG,QAAO;AAE9B,SAAO,aAAa;AACtB;AAWA,eAAsB,eACpB,YACA,OACA,cACA,eACiC;AACjC,QAAM,QAAQ,mBAAmB,UAAU;AAC3C,MAAI,CAAC,SAAS,OAAO,KAAK,MAAM,OAAO,EAAE,WAAW,GAAG;AACrD,WAAO,CAAC;AAAA,EACV;AAEA,QAAM,aAAa,aAAa,cAAc;AAC9C,QAAM,WAAW,aAAa,YAAY;AAG1C,QAAM,CAAC,WAAW,IAAI,MAAM,aAAa,MAAM,CAAC,KAAK,CAAC;AACtD,MAAI,CAAC,eAAe,YAAY,WAAW,GAAG;AAC5C,WAAO,CAAC;AAAA,EACV;AAGA,QAAM,SAA4D,CAAC;AAEnE,aAAW,UAAU,OAAO,OAAO,MAAM,OAAO,GAAG;AACjD,UAAM,QAAQ,iBAAiB,aAAa,OAAO,MAAM;AACzD,QAAI,SAAS,UAAU;AACrB,aAAO,KAAK,EAAE,QAAQ,MAAM,CAAC;AAAA,IAC/B;AAAA,EACF;AAGA,SAAO,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAGvC,QAAM,gBAAgB,kBAAkB,YAAY,eAAe,YAAY,WAAW;AAC1F,QAAM,SAAS,oBAAI,IAAyD;AAC5E,aAAW,CAAC,WAAW,IAAI,KAAK,eAAe;AAC7C,eAAW,OAAO,MAAM;AACtB,aAAO,IAAI,IAAI,YAAY,IAAI,EAAE,KAAK,UAAU,CAAC;AAAA,IACnD;AAAA,EACF;AAEA,QAAM,UAAkC,CAAC;AAEzC,aAAW,EAAE,QAAQ,MAAM,KAAK,OAAO,MAAM,GAAG,UAAU,GAAG;AAC3D,UAAM,QAAQ,OAAO,IAAI,OAAO,EAAE;AAClC,QAAI,CAAC,MAAO;AAEZ,YAAQ,KAAK;AAAA,MACX,KAAK,MAAM;AAAA,MACX,WAAW,MAAM;AAAA,MACjB;AAAA,MACA,cAAc,OAAO;AAAA,IACvB,CAAC;AAAA,EACH;AAEA,SAAO;AACT;AAKO,SAAS,kBAAkB,YAMhC;AACA,QAAM,QAAQ,mBAAmB,UAAU;AAC3C,MAAI,CAAC,OAAO;AACV,WAAO;AAAA,MACL,SAAS;AAAA,MACT,SAAS;AAAA,MACT,YAAY;AAAA,MACZ,eAAe;AAAA,MACf,WAAW;AAAA,IACb;AAAA,EACF;AAEA,QAAM,YAAY,KAAK,YAAY,WAAW,UAAU;AACxD,MAAI,YAAY;AAChB,MAAI;AACF,gBAAY,SAAS,SAAS,EAAE;AAAA,EAClC,QAAQ;AAAA,EAER;AAEA,SAAO;AAAA,IACL,SAAS,OAAO,KAAK,MAAM,OAAO,EAAE;AAAA,IACpC,SAAS,MAAM;AAAA,IACf,YAAY,MAAM;AAAA,IAClB,eAAe,MAAM;AAAA,IACrB;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/serve.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { Hono } from 'hono';\nimport { cors } from 'hono/cors';\nimport { serve as honoServe } from '@hono/node-server';\nimport type { Server } from 'http';\nimport { createWebApp } from './web-server.js';\nimport { log } from '../core/logger.js';\nimport { loadConfig } from '../core/config.js';\nimport { Conversation } from './conversation.js';\nimport { withFileLockSync } from './file-lock.js';\nimport type { HarnessConfig } from '../core/types.js';\n\n// ─── Types ───────────────────────────────────────────────────────────────────\n\nexport interface ServeOptions {\n harnessDir: string;\n port?: number;\n apiKey?: string;\n /** Secret for authenticating incoming webhooks */\n webhookSecret?: string;\n /** Enable CORS for all origins (default: true) */\n corsEnabled?: boolean;\n}\n\nexport interface WebhookRegistration {\n /** Unique webhook ID */\n id: string;\n /** URL to send events to */\n url: string;\n /** Events to subscribe to (e.g., ['session_end', 'state_change']) */\n events: string[];\n /** Optional secret for signing payloads */\n secret?: string;\n /** Whether this webhook is active */\n active: boolean;\n /** Created timestamp */\n createdAt: string;\n}\n\nexport interface WebhookPayload {\n event: string;\n timestamp: string;\n data: unknown;\n webhookId: string;\n}\n\nexport interface WebhookStore {\n webhooks: WebhookRegistration[];\n}\n\nexport interface ServeResult {\n server: Server;\n port: number;\n /** Function to fire a webhook event */\n fireEvent: (event: string, data: unknown) => Promise<void>;\n /** Function to stop the server */\n stop: () => void;\n}\n\n// ─── Webhook Store ──────────────────────────────────────────────────────────\n\nconst WEBHOOK_FILE = 'webhooks.json';\n\nfunction loadWebhooks(harnessDir: string): WebhookStore {\n const filePath = join(harnessDir, 'memory', WEBHOOK_FILE);\n if (!existsSync(filePath)) return { webhooks: [] };\n\n try {\n const raw = readFileSync(filePath, 'utf-8');\n return JSON.parse(raw) as WebhookStore;\n } catch {\n return { webhooks: [] };\n }\n}\n\nfunction saveWebhooks(harnessDir: string, store: WebhookStore): void {\n const memDir = join(harnessDir, 'memory');\n if (!existsSync(memDir)) mkdirSync(memDir, { recursive: true });\n\n const filePath = join(memDir, WEBHOOK_FILE);\n withFileLockSync(harnessDir, filePath, () => {\n writeFileSync(filePath, JSON.stringify(store, null, 2), 'utf-8');\n });\n}\n\n// ─── Webhook Delivery ───────────────────────────────────────────────────────\n\n/**\n * Fire an event to all subscribed webhooks.\n * Non-blocking — logs failures but never throws.\n */\nasync function fireWebhookEvent(\n harnessDir: string,\n event: string,\n data: unknown,\n): Promise<void> {\n const store = loadWebhooks(harnessDir);\n const subscribers = store.webhooks.filter(\n (w) => w.active && (w.events.includes('*') || w.events.includes(event)),\n );\n\n if (subscribers.length === 0) return;\n\n const payload: Omit<WebhookPayload, 'webhookId'> = {\n event,\n timestamp: new Date().toISOString(),\n data,\n };\n\n const deliveries = subscribers.map(async (webhook) => {\n try {\n const body = JSON.stringify({ ...payload, webhookId: webhook.id });\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n 'X-Harness-Event': event,\n 'X-Webhook-ID': webhook.id,\n };\n\n // HMAC signing if secret is configured\n if (webhook.secret) {\n const crypto = await import('crypto');\n const hmac = crypto.createHmac('sha256', webhook.secret);\n hmac.update(body);\n headers['X-Harness-Signature'] = `sha256=${hmac.digest('hex')}`;\n }\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), 10000);\n\n const response = await fetch(webhook.url, {\n method: 'POST',\n headers,\n body,\n signal: controller.signal,\n });\n\n clearTimeout(timer);\n\n if (!response.ok) {\n log.warn(`Webhook ${webhook.id} delivery failed: HTTP ${response.status}`);\n }\n } catch (err) {\n log.warn(`Webhook ${webhook.id} delivery failed: ${err instanceof Error ? err.message : String(err)}`);\n }\n });\n\n await Promise.allSettled(deliveries);\n}\n\n// ─── Server Factory ─────────────────────────────────────────────────────────\n\n/**\n * Create and start the harness API server.\n *\n * Includes:\n * - All dashboard endpoints from web-server.ts\n * - Webhook registration and management API\n * - Prompt execution endpoint (POST /api/run)\n * - Health check endpoint (GET /api/health)\n * - Version information endpoint (GET /api/info)\n *\n * Usage:\n * ```typescript\n * const result = startServe({\n * harnessDir: './my-harness',\n * port: 8080,\n * webhookSecret: 'my-secret',\n * });\n *\n * // Fire events to registered webhooks\n * await result.fireEvent('custom_event', { key: 'value' });\n *\n * // Stop the server\n * result.stop();\n * ```\n */\nexport function startServe(options: ServeOptions): ServeResult {\n const {\n harnessDir,\n port = 8080,\n apiKey,\n webhookSecret,\n } = options;\n\n // Build the base web app (dashboard + primitives + sessions + chat + SSE)\n const { app: baseApp, broadcaster } = createWebApp(harnessDir, { apiKey });\n\n // Create a new Hono app that wraps the base with additional endpoints\n const app = new Hono();\n app.use('*', cors());\n\n // ── Authentication middleware for webhook management ──\n const requireAuth = (secret: string | undefined) => {\n return async (c: { req: { header: (name: string) => string | undefined }; json: (body: unknown, status: number) => Response }, next: () => Promise<void>): Promise<Response | void> => {\n if (!secret) return next();\n const auth = c.req.header('Authorization');\n if (!auth || auth !== `Bearer ${secret}`) {\n return c.json({ error: 'Unauthorized' }, 401);\n }\n return next();\n };\n };\n\n // ── Health Check ──\n app.get('/api/health', (c) => {\n return c.json({\n status: 'ok',\n timestamp: new Date().toISOString(),\n harnessDir,\n });\n });\n\n // ── Agent Info ──\n app.get('/api/info', (c) => {\n try {\n const config = loadConfig(harnessDir);\n return c.json({\n name: config.agent.name,\n version: config.agent.version,\n model: config.model.id,\n provider: config.model.provider,\n });\n } catch {\n return c.json({ error: 'Failed to load config' }, 500);\n }\n });\n\n // ── Run prompt ──\n app.post('/api/run', async (c) => {\n const body = await c.req.json<{ prompt?: string; model?: string }>().catch(() => ({} as { prompt?: string; model?: string }));\n if (!body.prompt || body.prompt.trim().length === 0) {\n return c.json({ error: 'prompt is required' }, 400);\n }\n\n try {\n const { createHarness } = await import('../core/harness.js');\n const harness = createHarness({\n dir: harnessDir,\n model: body.model,\n apiKey,\n });\n\n await harness.boot();\n const result = await harness.run(body.prompt);\n await harness.shutdown();\n\n // Fire webhook\n await fireWebhookEvent(harnessDir, 'run_complete', {\n prompt: body.prompt,\n text: result.text,\n });\n\n return c.json({\n text: result.text,\n usage: result.usage,\n steps: result.steps,\n });\n } catch (err) {\n const message = err instanceof Error ? err.message : String(err);\n await fireWebhookEvent(harnessDir, 'run_error', {\n prompt: body.prompt,\n error: message,\n });\n return c.json({ error: message }, 500);\n }\n });\n\n // ── Webhook Registration API ──\n\n // List registered webhooks\n app.get('/api/webhooks', requireAuth(webhookSecret) as never, (c) => {\n const store = loadWebhooks(harnessDir);\n return c.json(store.webhooks.map((w) => ({\n id: w.id,\n url: w.url,\n events: w.events,\n active: w.active,\n createdAt: w.createdAt,\n })));\n });\n\n // Register a new webhook\n app.post('/api/webhooks', requireAuth(webhookSecret) as never, async (c) => {\n const body = await c.req.json<{\n url?: string;\n events?: string[];\n secret?: string;\n }>().catch(() => ({} as { url?: string; events?: string[]; secret?: string }));\n\n if (!body.url) {\n return c.json({ error: 'url is required' }, 400);\n }\n\n // Validate URL\n try {\n new URL(body.url);\n } catch {\n return c.json({ error: 'Invalid URL' }, 400);\n }\n\n const store = loadWebhooks(harnessDir);\n const id = `wh_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 8)}`;\n\n const webhook: WebhookRegistration = {\n id,\n url: body.url,\n events: body.events ?? ['*'],\n secret: body.secret,\n active: true,\n createdAt: new Date().toISOString(),\n };\n\n store.webhooks.push(webhook);\n saveWebhooks(harnessDir, store);\n\n return c.json({ id, url: webhook.url, events: webhook.events }, 201);\n });\n\n // Delete a webhook\n app.delete('/api/webhooks/:id', requireAuth(webhookSecret) as never, (c) => {\n const id = c.req.param('id');\n const store = loadWebhooks(harnessDir);\n const index = store.webhooks.findIndex((w) => w.id === id);\n\n if (index === -1) {\n return c.json({ error: 'Webhook not found' }, 404);\n }\n\n store.webhooks.splice(index, 1);\n saveWebhooks(harnessDir, store);\n\n return c.json({ deleted: id });\n });\n\n // Toggle webhook active/inactive\n app.patch('/api/webhooks/:id', requireAuth(webhookSecret) as never, async (c) => {\n const id = c.req.param('id');\n const body = await c.req.json<{ active?: boolean }>().catch(() => ({} as { active?: boolean }));\n\n const store = loadWebhooks(harnessDir);\n const webhook = store.webhooks.find((w) => w.id === id);\n\n if (!webhook) {\n return c.json({ error: 'Webhook not found' }, 404);\n }\n\n if (body.active !== undefined) {\n webhook.active = body.active;\n }\n\n saveWebhooks(harnessDir, store);\n return c.json({ id, active: webhook.active });\n });\n\n // Test a webhook (sends a test event)\n app.post('/api/webhooks/:id/test', requireAuth(webhookSecret) as never, async (c) => {\n const id = c.req.param('id');\n const store = loadWebhooks(harnessDir);\n const webhook = store.webhooks.find((w) => w.id === id);\n\n if (!webhook) {\n return c.json({ error: 'Webhook not found' }, 404);\n }\n\n try {\n const body = JSON.stringify({\n event: 'test',\n timestamp: new Date().toISOString(),\n data: { message: 'Webhook test from harness serve' },\n webhookId: webhook.id,\n });\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n 'X-Harness-Event': 'test',\n 'X-Webhook-ID': webhook.id,\n };\n\n if (webhook.secret) {\n const crypto = await import('crypto');\n const hmac = crypto.createHmac('sha256', webhook.secret);\n hmac.update(body);\n headers['X-Harness-Signature'] = `sha256=${hmac.digest('hex')}`;\n }\n\n const controller = new AbortController();\n const timer = setTimeout(() => controller.abort(), 10000);\n\n const response = await fetch(webhook.url, {\n method: 'POST',\n headers,\n body,\n signal: controller.signal,\n });\n\n clearTimeout(timer);\n\n return c.json({\n success: response.ok,\n status: response.status,\n statusText: response.statusText,\n });\n } catch (err) {\n return c.json({\n success: false,\n error: err instanceof Error ? err.message : String(err),\n });\n }\n });\n\n // ── Mount base web app routes ──\n app.route('/', baseApp);\n\n // Start server\n const server = honoServe({ fetch: app.fetch, port }) as Server;\n\n const stop = (): void => {\n server.close();\n };\n\n return {\n server,\n port,\n fireEvent: (event: string, data: unknown) => fireWebhookEvent(harnessDir, event, data),\n stop,\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,cAAc,eAAe,iBAAiB;AACnE,SAAS,YAAY;AA6DrB,IAAM,eAAe;AAErB,SAAS,aAAa,YAAkC;AACtD,QAAM,WAAW,KAAK,YAAY,UAAU,YAAY;AACxD,MAAI,CAAC,WAAW,QAAQ,EAAG,QAAO,EAAE,UAAU,CAAC,EAAE;AAEjD,MAAI;AACF,UAAM,MAAM,aAAa,UAAU,OAAO;AAC1C,WAAO,KAAK,MAAM,GAAG;AAAA,EACvB,QAAQ;AACN,WAAO,EAAE,UAAU,CAAC,EAAE;AAAA,EACxB;AACF;AAEA,SAAS,aAAa,YAAoB,OAA2B;AACnE,QAAM,SAAS,KAAK,YAAY,QAAQ;AACxC,MAAI,CAAC,WAAW,MAAM,EAAG,WAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AAE9D,QAAM,WAAW,KAAK,QAAQ,YAAY;AAC1C,mBAAiB,YAAY,UAAU,MAAM;AAC3C,kBAAc,UAAU,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AAAA,EACjE,CAAC;AACH;AAQA,eAAe,iBACb,YACA,OACA,MACe;AACf,QAAM,QAAQ,aAAa,UAAU;AACrC,QAAM,cAAc,MAAM,SAAS;AAAA,IACjC,CAAC,MAAM,EAAE,WAAW,EAAE,OAAO,SAAS,GAAG,KAAK,EAAE,OAAO,SAAS,KAAK;AAAA,EACvE;AAEA,MAAI,YAAY,WAAW,EAAG;AAE9B,QAAM,UAA6C;AAAA,IACjD;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IAClC;AAAA,EACF;AAEA,QAAM,aAAa,YAAY,IAAI,OAAO,YAAY;AACpD,QAAI;AACF,YAAM,OAAO,KAAK,UAAU,EAAE,GAAG,SAAS,WAAW,QAAQ,GAAG,CAAC;AACjE,YAAM,UAAkC;AAAA,QACtC,gBAAgB;AAAA,QAChB,mBAAmB;AAAA,QACnB,gBAAgB,QAAQ;AAAA,MAC1B;AAGA,UAAI,QAAQ,QAAQ;AAClB,cAAM,SAAS,MAAM,OAAO,QAAQ;AACpC,cAAM,OAAO,OAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,aAAK,OAAO,IAAI;AAChB,gBAAQ,qBAAqB,IAAI,UAAU,KAAK,OAAO,KAAK,CAAC;AAAA,MAC/D;AAEA,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAExD,YAAM,WAAW,MAAM,MAAM,QAAQ,KAAK;AAAA,QACxC,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,mBAAa,KAAK;AAElB,UAAI,CAAC,SAAS,IAAI;AAChB,YAAI,KAAK,WAAW,QAAQ,EAAE,0BAA0B,SAAS,MAAM,EAAE;AAAA,MAC3E;AAAA,IACF,SAAS,KAAK;AACZ,UAAI,KAAK,WAAW,QAAQ,EAAE,qBAAqB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IACvG;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,WAAW,UAAU;AACrC;AA6BO,SAAS,WAAW,SAAoC;AAC7D,QAAM;AAAA,IACJ;AAAA,IACA,OAAO;AAAA,IACP;AAAA,IACA;AAAA,EACF,IAAI;AAGJ,QAAM,EAAE,KAAK,SAAS,YAAY,IAAI,aAAa,YAAY,EAAE,OAAO,CAAC;AAGzE,QAAM,MAAM,IAAI,KAAK;AACrB,MAAI,IAAI,KAAK,KAAK,CAAC;AAGnB,QAAM,cAAc,CAAC,WAA+B;AAClD,WAAO,OAAO,GAAiH,SAAwD;AACrL,UAAI,CAAC,OAAQ,QAAO,KAAK;AACzB,YAAM,OAAO,EAAE,IAAI,OAAO,eAAe;AACzC,UAAI,CAAC,QAAQ,SAAS,UAAU,MAAM,IAAI;AACxC,eAAO,EAAE,KAAK,EAAE,OAAO,eAAe,GAAG,GAAG;AAAA,MAC9C;AACA,aAAO,KAAK;AAAA,IACd;AAAA,EACF;AAGA,MAAI,IAAI,eAAe,CAAC,MAAM;AAC5B,WAAO,EAAE,KAAK;AAAA,MACZ,QAAQ;AAAA,MACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAGD,MAAI,IAAI,aAAa,CAAC,MAAM;AAC1B,QAAI;AACF,YAAM,SAAS,WAAW,UAAU;AACpC,aAAO,EAAE,KAAK;AAAA,QACZ,MAAM,OAAO,MAAM;AAAA,QACnB,SAAS,OAAO,MAAM;AAAA,QACtB,OAAO,OAAO,MAAM;AAAA,QACpB,UAAU,OAAO,MAAM;AAAA,MACzB,CAAC;AAAA,IACH,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,wBAAwB,GAAG,GAAG;AAAA,IACvD;AAAA,EACF,CAAC;AAGD,MAAI,KAAK,YAAY,OAAO,MAAM;AAChC,UAAM,OAAO,MAAM,EAAE,IAAI,KAA0C,EAAE,MAAM,OAAO,CAAC,EAAyC;AAC5H,QAAI,CAAC,KAAK,UAAU,KAAK,OAAO,KAAK,EAAE,WAAW,GAAG;AACnD,aAAO,EAAE,KAAK,EAAE,OAAO,qBAAqB,GAAG,GAAG;AAAA,IACpD;AAEA,QAAI;AACF,YAAM,EAAE,cAAc,IAAI,MAAM,OAAO,uBAAoB;AAC3D,YAAM,UAAU,cAAc;AAAA,QAC5B,KAAK;AAAA,QACL,OAAO,KAAK;AAAA,QACZ;AAAA,MACF,CAAC;AAED,YAAM,QAAQ,KAAK;AACnB,YAAM,SAAS,MAAM,QAAQ,IAAI,KAAK,MAAM;AAC5C,YAAM,QAAQ,SAAS;AAGvB,YAAM,iBAAiB,YAAY,gBAAgB;AAAA,QACjD,QAAQ,KAAK;AAAA,QACb,MAAM,OAAO;AAAA,MACf,CAAC;AAED,aAAO,EAAE,KAAK;AAAA,QACZ,MAAM,OAAO;AAAA,QACb,OAAO,OAAO;AAAA,QACd,OAAO,OAAO;AAAA,MAChB,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,YAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,YAAM,iBAAiB,YAAY,aAAa;AAAA,QAC9C,QAAQ,KAAK;AAAA,QACb,OAAO;AAAA,MACT,CAAC;AACD,aAAO,EAAE,KAAK,EAAE,OAAO,QAAQ,GAAG,GAAG;AAAA,IACvC;AAAA,EACF,CAAC;AAKD,MAAI,IAAI,iBAAiB,YAAY,aAAa,GAAY,CAAC,MAAM;AACnE,UAAM,QAAQ,aAAa,UAAU;AACrC,WAAO,EAAE,KAAK,MAAM,SAAS,IAAI,CAAC,OAAO;AAAA,MACvC,IAAI,EAAE;AAAA,MACN,KAAK,EAAE;AAAA,MACP,QAAQ,EAAE;AAAA,MACV,QAAQ,EAAE;AAAA,MACV,WAAW,EAAE;AAAA,IACf,EAAE,CAAC;AAAA,EACL,CAAC;AAGD,MAAI,KAAK,iBAAiB,YAAY,aAAa,GAAY,OAAO,MAAM;AAC1E,UAAM,OAAO,MAAM,EAAE,IAAI,KAItB,EAAE,MAAM,OAAO,CAAC,EAA0D;AAE7E,QAAI,CAAC,KAAK,KAAK;AACb,aAAO,EAAE,KAAK,EAAE,OAAO,kBAAkB,GAAG,GAAG;AAAA,IACjD;AAGA,QAAI;AACF,UAAI,IAAI,KAAK,GAAG;AAAA,IAClB,QAAQ;AACN,aAAO,EAAE,KAAK,EAAE,OAAO,cAAc,GAAG,GAAG;AAAA,IAC7C;AAEA,UAAM,QAAQ,aAAa,UAAU;AACrC,UAAM,KAAK,MAAM,KAAK,IAAI,EAAE,SAAS,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,MAAM,GAAG,CAAC,CAAC;AAElF,UAAM,UAA+B;AAAA,MACnC;AAAA,MACA,KAAK,KAAK;AAAA,MACV,QAAQ,KAAK,UAAU,CAAC,GAAG;AAAA,MAC3B,QAAQ,KAAK;AAAA,MACb,QAAQ;AAAA,MACR,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,IACpC;AAEA,UAAM,SAAS,KAAK,OAAO;AAC3B,iBAAa,YAAY,KAAK;AAE9B,WAAO,EAAE,KAAK,EAAE,IAAI,KAAK,QAAQ,KAAK,QAAQ,QAAQ,OAAO,GAAG,GAAG;AAAA,EACrE,CAAC;AAGD,MAAI,OAAO,qBAAqB,YAAY,aAAa,GAAY,CAAC,MAAM;AAC1E,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,QAAQ,aAAa,UAAU;AACrC,UAAM,QAAQ,MAAM,SAAS,UAAU,CAAC,MAAM,EAAE,OAAO,EAAE;AAEzD,QAAI,UAAU,IAAI;AAChB,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,UAAM,SAAS,OAAO,OAAO,CAAC;AAC9B,iBAAa,YAAY,KAAK;AAE9B,WAAO,EAAE,KAAK,EAAE,SAAS,GAAG,CAAC;AAAA,EAC/B,CAAC;AAGD,MAAI,MAAM,qBAAqB,YAAY,aAAa,GAAY,OAAO,MAAM;AAC/E,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,OAAO,MAAM,EAAE,IAAI,KAA2B,EAAE,MAAM,OAAO,CAAC,EAA0B;AAE9F,UAAM,QAAQ,aAAa,UAAU;AACrC,UAAM,UAAU,MAAM,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAEtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI,KAAK,WAAW,QAAW;AAC7B,cAAQ,SAAS,KAAK;AAAA,IACxB;AAEA,iBAAa,YAAY,KAAK;AAC9B,WAAO,EAAE,KAAK,EAAE,IAAI,QAAQ,QAAQ,OAAO,CAAC;AAAA,EAC9C,CAAC;AAGD,MAAI,KAAK,0BAA0B,YAAY,aAAa,GAAY,OAAO,MAAM;AACnF,UAAM,KAAK,EAAE,IAAI,MAAM,IAAI;AAC3B,UAAM,QAAQ,aAAa,UAAU;AACrC,UAAM,UAAU,MAAM,SAAS,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE;AAEtD,QAAI,CAAC,SAAS;AACZ,aAAO,EAAE,KAAK,EAAE,OAAO,oBAAoB,GAAG,GAAG;AAAA,IACnD;AAEA,QAAI;AACF,YAAM,OAAO,KAAK,UAAU;AAAA,QAC1B,OAAO;AAAA,QACP,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,QAClC,MAAM,EAAE,SAAS,kCAAkC;AAAA,QACnD,WAAW,QAAQ;AAAA,MACrB,CAAC;AAED,YAAM,UAAkC;AAAA,QACtC,gBAAgB;AAAA,QAChB,mBAAmB;AAAA,QACnB,gBAAgB,QAAQ;AAAA,MAC1B;AAEA,UAAI,QAAQ,QAAQ;AAClB,cAAM,SAAS,MAAM,OAAO,QAAQ;AACpC,cAAM,OAAO,OAAO,WAAW,UAAU,QAAQ,MAAM;AACvD,aAAK,OAAO,IAAI;AAChB,gBAAQ,qBAAqB,IAAI,UAAU,KAAK,OAAO,KAAK,CAAC;AAAA,MAC/D;AAEA,YAAM,aAAa,IAAI,gBAAgB;AACvC,YAAM,QAAQ,WAAW,MAAM,WAAW,MAAM,GAAG,GAAK;AAExD,YAAM,WAAW,MAAM,MAAM,QAAQ,KAAK;AAAA,QACxC,QAAQ;AAAA,QACR;AAAA,QACA;AAAA,QACA,QAAQ,WAAW;AAAA,MACrB,CAAC;AAED,mBAAa,KAAK;AAElB,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS,SAAS;AAAA,QAClB,QAAQ,SAAS;AAAA,QACjB,YAAY,SAAS;AAAA,MACvB,CAAC;AAAA,IACH,SAAS,KAAK;AACZ,aAAO,EAAE,KAAK;AAAA,QACZ,SAAS;AAAA,QACT,OAAO,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAAA,MACxD,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAGD,MAAI,MAAM,KAAK,OAAO;AAGtB,QAAM,SAAS,MAAU,EAAE,OAAO,IAAI,OAAO,KAAK,CAAC;AAEnD,QAAM,OAAO,MAAY;AACvB,WAAO,MAAM;AAAA,EACf;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA,WAAW,CAAC,OAAe,SAAkB,iBAAiB,YAAY,OAAO,IAAI;AAAA,IACrF;AAAA,EACF;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -1,16 +0,0 @@
1
- import {
2
- CONFIG_DEFAULTS,
3
- CORE_PRIMITIVE_DIRS,
4
- FrontmatterSchema,
5
- HarnessConfigSchema,
6
- getPrimitiveDirs
7
- } from "./chunk-KFX54TQM.js";
8
- import "./chunk-DGUM43GV.js";
9
- export {
10
- CONFIG_DEFAULTS,
11
- CORE_PRIMITIVE_DIRS,
12
- FrontmatterSchema,
13
- HarnessConfigSchema,
14
- getPrimitiveDirs
15
- };
16
- //# sourceMappingURL=types-WGDLSPO6.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/runtime/universal-installer.ts"],"sourcesContent":["import { existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync } from 'fs';\nimport { join, basename, extname } from 'path';\nimport { tmpdir } from 'os';\nimport { createRequire } from 'module';\nimport matter from 'gray-matter';\nimport { parse as parseYaml } from 'yaml';\nimport { fixCapability, installCapability, downloadCapability } from './intake.js';\nimport { autoProcessFile } from './auto-processor.js';\nimport { discoverSources, loadAllSources } from './sources.js';\nimport type { Source, SourceDiscoveryResult } from './sources.js';\nimport { log } from '../core/logger.js';\n\n// ─── Provenance ──────────────────────────────────────────────────────────────\n\n/**\n * Read the harness's own package.json version for the `installed_by` field.\n *\n * Has to handle three possible runtime layouts because tsup bundles flat:\n * - Dev/test: src/runtime/universal-installer.ts → ../../package.json\n * - Built bin: dist/cli/index.js → ../../package.json\n * - Built lib: dist/<bundle>.js → ../package.json\n *\n * Walks up one directory at a time, requires `package.json`, and returns\n * the version of the FIRST one whose name is `@agntk/agent-harness`. Stops\n * after a few levels so a broken environment never causes an infinite loop.\n * Returns \"unknown\" on any failure so an install never blocks on this.\n */\nfunction getHarnessVersion(): string {\n try {\n const require = createRequire(import.meta.url);\n const candidates = [\n '../package.json',\n '../../package.json',\n '../../../package.json',\n ];\n for (const candidate of candidates) {\n try {\n const pkg = require(candidate) as { name?: string; version?: string };\n if (pkg.name === '@agntk/agent-harness' && pkg.version) {\n return pkg.version;\n }\n } catch {\n // Candidate didn't resolve — try the next one.\n }\n }\n return 'unknown';\n } catch {\n return 'unknown';\n }\n}\n\n/**\n * Resolve a commit SHA for a GitHub raw URL by calling the GitHub Contents API.\n *\n * Input URL shape:\n * https://raw.githubusercontent.com/{owner}/{repo}/{ref}/{path}\n * where {ref} is either a 40-char commit SHA or a branch/tag name.\n *\n * Returns the SHA (either the one already in the URL, or the one resolved from\n * a branch name via the Contents API). Returns `null` on any failure — network\n * error, timeout, 404, non-github host, unparseable URL — so the install can\n * proceed without source_commit.\n */\nasync function resolveGithubCommitSha(url: string): Promise<string | null> {\n // Only handle raw.githubusercontent.com URLs\n const match = url.match(\n /^https?:\\/\\/raw\\.githubusercontent\\.com\\/([^/]+)\\/([^/]+)\\/([^/]+)\\/(.+)$/,\n );\n if (!match) return null;\n const [, owner, repo, ref, path] = match;\n\n // If ref is already a 40-char hex SHA, just return it\n if (/^[0-9a-f]{40}$/i.test(ref)) return ref;\n\n // Otherwise resolve via the Contents API\n const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${path}?ref=${ref}`;\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), 5000);\n try {\n const response = await fetch(apiUrl, {\n signal: controller.signal,\n headers: { 'Accept': 'application/vnd.github+json' },\n });\n if (!response.ok) return null;\n const data = (await response.json()) as { sha?: string };\n if (typeof data.sha === 'string' && /^[0-9a-f]{40}$/i.test(data.sha)) {\n return data.sha;\n }\n return null;\n } catch {\n return null;\n } finally {\n clearTimeout(timeout);\n }\n}\n\n// ─── License Detection (Level 2 of task 12.14) ──────────────────────────────\n\n/**\n * Result of license detection. `spdxId` is one of:\n * - A standard SPDX identifier (\"MIT\", \"Apache-2.0\", etc.)\n * - \"PROPRIETARY\" — text says \"all rights reserved\" or no permission grant\n * - \"UNKNOWN\" — no LICENSE file found, or text doesn't match any pattern\n */\nexport interface LicenseInfo {\n /** SPDX id, \"PROPRIETARY\", or \"UNKNOWN\" */\n spdxId: string;\n /** First copyright line found in the LICENSE text, if any */\n copyright?: string;\n /** URL to the license file the detector actually found, if any */\n licenseSource?: string;\n}\n\n/** Sibling LICENSE filenames to probe in the same directory as the installed file. */\nconst SIBLING_LICENSE_NAMES = [\n 'LICENSE',\n 'LICENSE.txt',\n 'LICENSE.md',\n 'COPYING',\n 'COPYING.txt',\n] as const;\n\n/**\n * Classify a LICENSE file's body text into an SPDX id, \"PROPRIETARY\", or \"UNKNOWN\".\n * Substring-based detection — not a full parser. Good enough for the common cases\n * (MIT, Apache-2.0, BSD, ISC, GPL, MPL, CC) and the proprietary \"all rights reserved\"\n * pattern that bit us in v0.1.0.\n */\nfunction classifyLicenseText(text: string): string {\n const lower = text.toLowerCase();\n // PROPRIETARY check first — overrides any false-positive substring match below.\n if (lower.includes('all rights reserved')) {\n return 'PROPRIETARY';\n }\n // Then SPDX-by-substring. Order matters: check more-specific patterns first\n // (e.g. AGPL before GPL, LGPL before GPL).\n if (lower.includes('mit license')) return 'MIT';\n if (lower.includes('apache license, version 2.0') || lower.includes('apache-2.0'))\n return 'Apache-2.0';\n if (lower.includes('mozilla public license version 2.0') || lower.includes('mpl-2.0'))\n return 'MPL-2.0';\n if (lower.includes('gnu affero general public license')) return 'AGPL-3.0';\n if (lower.includes('gnu lesser general public license')) {\n if (lower.includes('version 3')) return 'LGPL-3.0';\n if (lower.includes('version 2')) return 'LGPL-2.1';\n }\n if (lower.includes('gnu general public license')) {\n if (lower.includes('version 3')) return 'GPL-3.0';\n if (lower.includes('version 2')) return 'GPL-2.0';\n }\n if (lower.includes('isc license')) return 'ISC';\n if (lower.includes('cc0 1.0 universal') || lower.includes('cc0-1.0')) return 'CC0-1.0';\n if (lower.includes('creative commons attribution-sharealike 4.0')) return 'CC-BY-SA-4.0';\n if (lower.includes('creative commons attribution 4.0') || lower.includes('cc-by-4.0'))\n return 'CC-BY-4.0';\n if (\n lower.includes('redistribution and use in source and binary forms') &&\n lower.includes('neither the name of')\n ) {\n return 'BSD-3-Clause';\n }\n if (lower.includes('redistribution and use in source and binary forms')) {\n return 'BSD-2-Clause';\n }\n if (lower.includes('this is free and unencumbered software released into the public domain')) {\n return 'Unlicense';\n }\n return 'UNKNOWN';\n}\n\n/**\n * Extract the first `Copyright (c) YEAR ...` line from a license body.\n * Returns the trimmed line, or undefined if no copyright line is found.\n */\nfunction extractCopyright(text: string): string | undefined {\n // Match lines starting with \"©\" or \"Copyright\" (any case) and containing a\n // 4-digit year. The leading \"©\" can be followed immediately by space/digit\n // (it's a non-word char so we don't put a \\b after it). \"Copyright\" can be\n // followed by anything as long as a year appears later in the line.\n const lines = text.split(/\\r?\\n/);\n for (const line of lines) {\n const trimmed = line.trim();\n if (/^©\\s*\\d{4}/.test(trimmed) || /^copyright\\b.*\\d{4}/i.test(trimmed)) {\n return trimmed;\n }\n }\n return undefined;\n}\n\n/**\n * Try to fetch a single sibling LICENSE file next to the installed file.\n * Returns the body text on success, null on any failure (404, network, timeout).\n */\nasync function fetchSiblingLicense(\n siblingUrl: string,\n): Promise<string | null> {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), 5000);\n try {\n const response = await fetch(siblingUrl, { signal: controller.signal });\n if (!response.ok) return null;\n return await response.text();\n } catch {\n return null;\n } finally {\n clearTimeout(timeout);\n }\n}\n\n/**\n * Try to fetch the repo-root LICENSE via the GitHub License API.\n * Returns the SPDX id, html_url, and (when available) the decoded body text\n * so the caller can extract a copyright line. Null on any failure.\n *\n * The API response shape:\n * {\n * license: { spdx_id: \"MIT\" },\n * html_url: \"https://github.com/owner/repo/blob/main/LICENSE\",\n * content: \"<base64>\",\n * encoding: \"base64\"\n * }\n *\n * https://docs.github.com/en/rest/licenses/licenses#get-the-license-for-a-repository\n */\nasync function fetchGithubRepoLicense(\n owner: string,\n repo: string,\n): Promise<{ spdxId: string; htmlUrl: string; body?: string } | null> {\n const apiUrl = `https://api.github.com/repos/${owner}/${repo}/license`;\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), 5000);\n try {\n const response = await fetch(apiUrl, {\n signal: controller.signal,\n headers: { 'Accept': 'application/vnd.github+json' },\n });\n if (!response.ok) return null;\n const data = (await response.json()) as {\n license?: { spdx_id?: string };\n html_url?: string;\n content?: string;\n encoding?: string;\n };\n const spdxId = data.license?.spdx_id;\n if (!spdxId || spdxId === 'NOASSERTION') return null;\n\n // Decode the base64-encoded body so the caller can extract copyright.\n // Tolerant of failures — if decoding throws, just omit the body.\n let body: string | undefined;\n if (data.content && data.encoding === 'base64') {\n try {\n body = Buffer.from(data.content, 'base64').toString('utf-8');\n } catch {\n body = undefined;\n }\n }\n\n return { spdxId, htmlUrl: data.html_url ?? '', body };\n } catch {\n return null;\n } finally {\n clearTimeout(timeout);\n }\n}\n\n/**\n * Detect the license of a file at a given URL.\n *\n * Lookup order, strictest finding wins:\n * 1. Per-file LICENSE sibling in the same directory as the file.\n * Catches the v0.1.0 case where each anthropics/skills/<skill>/\n * directory contained its own proprietary LICENSE.txt.\n * 2. Repository root LICENSE via the GitHub License API. Returns SPDX id.\n * 3. Caller falls back to the source file's own frontmatter (handled in\n * recordProvenance, not here).\n *\n * Strictness rule: PROPRIETARY > UNKNOWN > permissive SPDX. If a per-file\n * LICENSE says \"All rights reserved\" we never look at the repo root —\n * proprietary always wins.\n *\n * Non-github URLs return immediately with `{ spdxId: 'UNKNOWN' }` since we\n * have no way to look up a license. The caller can still use frontmatter.\n *\n * @param sourceUrl The URL the user passed to `harness install`\n * @returns LicenseInfo. Always returns an object — never throws or returns null.\n */\nexport async function detectLicense(sourceUrl: string): Promise<LicenseInfo> {\n // Only github raw URLs have a structure we can probe.\n const match = sourceUrl.match(\n /^https?:\\/\\/raw\\.githubusercontent\\.com\\/([^/]+)\\/([^/]+)\\/([^/]+)\\/(.+)$/,\n );\n if (!match) {\n return { spdxId: 'UNKNOWN' };\n }\n const [, owner, repo, ref, path] = match;\n\n // 1. Per-file LICENSE sibling — split path into dir, then probe each filename.\n const lastSlash = path.lastIndexOf('/');\n const dir = lastSlash >= 0 ? path.slice(0, lastSlash) : '';\n const dirPrefix = dir ? `${dir}/` : '';\n\n for (const siblingName of SIBLING_LICENSE_NAMES) {\n const siblingUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${ref}/${dirPrefix}${siblingName}`;\n const text = await fetchSiblingLicense(siblingUrl);\n if (text) {\n const spdxId = classifyLicenseText(text);\n const copyright = extractCopyright(text);\n return { spdxId, copyright, licenseSource: siblingUrl };\n }\n }\n\n // 2. Repository root LICENSE via the GitHub License API. The API returns\n // the SPDX id directly, plus a base64-encoded body we can use to extract\n // a copyright line.\n const repoLicense = await fetchGithubRepoLicense(owner, repo);\n if (repoLicense) {\n const copyright = repoLicense.body\n ? extractCopyright(repoLicense.body)\n : undefined;\n return {\n spdxId: repoLicense.spdxId,\n copyright,\n licenseSource: repoLicense.htmlUrl || undefined,\n };\n }\n\n return { spdxId: 'UNKNOWN' };\n}\n\n/**\n * Inject provenance and license fields into a normalized markdown file's\n * frontmatter.\n *\n * Provenance rules (Level 1 of task 12.14):\n * - `source` and `source_commit` are preserved if already present (idempotent)\n * - `installed_at` and `installed_by` are always updated to reflect the most\n * recent install action\n * - `source_commit` is only written when a SHA could be resolved\n *\n * License rules (Level 2 of task 12.14):\n * - `license`, `copyright`, `license_source` are preserved if already present\n * in the source file's frontmatter (idempotent — author intent wins)\n * - Otherwise detected via detectLicense() and merged in\n * - License detection NEVER blocks the install. Failures result in\n * `license: UNKNOWN` rather than an error.\n *\n * @param content Normalized markdown content with existing frontmatter\n * @param originalSource The exact URL the user passed to `harness install`\n * @returns The content with provenance + license fields merged into frontmatter\n */\nexport async function recordProvenance(\n content: string,\n originalSource: string,\n): Promise<string> {\n let parsed: ReturnType<typeof matter>;\n try {\n parsed = matter(content);\n } catch {\n return content;\n }\n\n const data = parsed.data as Record<string, unknown>;\n\n // Preserve existing source — idempotency rule\n if (!data.source) {\n data.source = originalSource;\n }\n\n // Preserve existing source_commit; only resolve if missing AND URL is github raw\n if (!data.source_commit) {\n const sha = await resolveGithubCommitSha(originalSource);\n if (sha) {\n data.source_commit = sha;\n }\n }\n\n // License detection (Level 2). Idempotent: don't overwrite author-set fields.\n // Only run detection if at least one license-related field is missing — saves\n // the network calls when re-installing files that already carry their license.\n if (!data.license || !data.copyright || !data.license_source) {\n const license = await detectLicense(originalSource);\n if (!data.license) {\n data.license = license.spdxId;\n }\n if (!data.copyright && license.copyright) {\n data.copyright = license.copyright;\n }\n if (!data.license_source && license.licenseSource) {\n data.license_source = license.licenseSource;\n }\n }\n\n // Always update these to reflect the most recent install\n data.installed_at = new Date().toISOString();\n data.installed_by = `agent-harness@${getHarnessVersion()}`;\n\n return matter.stringify(parsed.content, data);\n}\n\n// ─── Types ───────────────────────────────────────────────────────────────────\n\n/** Detected source format of a file to be installed. */\nexport type SourceFormat =\n | 'harness' // Already harness convention (frontmatter + L0/L1)\n | 'claude-skill' // Claude Code SKILL.md (plain markdown, no frontmatter)\n | 'faf-yaml' // .faf YAML format\n | 'raw-markdown' // Plain markdown with no harness structure\n | 'bash-hook' // Bash/shell script (hook or workflow)\n | 'mcp-config' // MCP server configuration (JSON/YAML)\n | 'unknown';\n\n/** Result of format detection. */\nexport interface FormatDetection {\n /** Detected format */\n format: SourceFormat;\n /** Inferred primitive type (skill, agent, rule, etc.) */\n primitiveType: string | null;\n /** Confidence score (0-1) */\n confidence: number;\n /** Reasons for the detection */\n reasons: string[];\n}\n\n/** Result of a universal install operation. */\nexport interface UniversalInstallResult {\n /** Whether installation succeeded */\n installed: boolean;\n /** Source reference that was resolved */\n source: string;\n /** Detected format */\n format: FormatDetection;\n /** Path where the file was installed */\n destination: string;\n /** Fixes applied during normalization */\n fixes: string[];\n /** Errors encountered */\n errors: string[];\n /** Suggested dependencies to install */\n suggestedDependencies: string[];\n}\n\n/** Options for the universal installer. */\nexport interface UniversalInstallOptions {\n /** Override the detected primitive type (skill, rule, agent, etc.) */\n type?: string;\n /** Override the generated ID */\n id?: string;\n /** Force install even if validation has warnings */\n force?: boolean;\n /** Skip auto-fix (frontmatter, L0/L1 generation) */\n skipFix?: boolean;\n /** Additional tags to add */\n tags?: string[];\n}\n\n// ─── Constants ───────────────────────────────────────────────────────────────\n\nconst VALID_TYPES = ['rule', 'instinct', 'skill', 'playbook', 'workflow', 'tool', 'agent'];\n\nconst TYPE_DIRS: Record<string, string> = {\n rule: 'rules',\n instinct: 'instincts',\n skill: 'skills',\n playbook: 'playbooks',\n workflow: 'workflows',\n tool: 'tools',\n agent: 'agents',\n};\n\n// ─── Format Detection ────────────────────────────────────────────────────────\n\n/**\n * Detect the format of a file based on its content and extension.\n *\n * Detection heuristics:\n * - Has `---` frontmatter with `id:` + `status:` → harness convention\n * - Has `---` frontmatter but missing harness fields → raw-markdown\n * - `.faf` or `.yaml`/`.yml` with `type:` + `content:` keys → faf-yaml\n * - `.sh`/`.bash` or starts with `#!/` → bash-hook\n * - JSON/YAML with `mcpServers` or `servers` → mcp-config\n * - Plain markdown with no frontmatter → claude-skill or raw-markdown\n */\nexport function detectFormat(content: string, filename: string): FormatDetection {\n const ext = extname(filename).toLowerCase();\n const reasons: string[] = [];\n let format: SourceFormat = 'unknown';\n let primitiveType: string | null = null;\n let confidence = 0;\n\n // Check for bash/shell scripts\n if (ext === '.sh' || ext === '.bash' || content.trimStart().startsWith('#!/')) {\n format = 'bash-hook';\n primitiveType = 'workflow';\n confidence = 0.9;\n reasons.push('Shell script detected (shebang or .sh extension)');\n\n // Hooks are typically short scripts with specific patterns\n if (content.includes('hook') || content.includes('pre-commit') || content.includes('post-')) {\n primitiveType = 'workflow';\n reasons.push('Hook pattern detected in content');\n }\n\n return { format, primitiveType, confidence, reasons };\n }\n\n // Check for JSON/YAML MCP configs\n if (ext === '.json') {\n try {\n const parsed = JSON.parse(content) as Record<string, unknown>;\n if (parsed.mcpServers || parsed.servers || parsed.command || parsed.args) {\n format = 'mcp-config';\n primitiveType = 'tool';\n confidence = 0.9;\n reasons.push('MCP configuration JSON detected');\n return { format, primitiveType, confidence, reasons };\n }\n } catch {\n // Not valid JSON, continue\n }\n }\n\n // Check for .faf YAML format\n if (ext === '.faf' || ext === '.yaml' || ext === '.yml') {\n try {\n const parsed = parseYaml(content) as Record<string, unknown>;\n if (parsed.type && parsed.content) {\n format = 'faf-yaml';\n primitiveType = inferTypeFromFafType(String(parsed.type));\n confidence = 0.9;\n reasons.push(`.faf YAML format with type: ${parsed.type}`);\n return { format, primitiveType, confidence, reasons };\n }\n // YAML with mcpServers\n if (parsed.mcpServers || parsed.servers) {\n format = 'mcp-config';\n primitiveType = 'tool';\n confidence = 0.85;\n reasons.push('MCP configuration YAML detected');\n return { format, primitiveType, confidence, reasons };\n }\n } catch {\n // Not valid YAML, continue\n }\n }\n\n // Check for markdown content\n if (ext === '.md' || ext === '' || !ext) {\n // Try to parse frontmatter\n try {\n const parsed = matter(content);\n const data = parsed.data as Record<string, unknown>;\n\n if (data.id && data.status) {\n // Has harness-style frontmatter\n format = 'harness';\n confidence = 0.95;\n reasons.push('Harness frontmatter detected (id + status fields)');\n\n // Detect type from tags\n const tags = Array.isArray(data.tags)\n ? (data.tags as string[]).map((t) => String(t).toLowerCase())\n : [];\n for (const type of VALID_TYPES) {\n if (tags.includes(type)) {\n primitiveType = type;\n break;\n }\n }\n\n return { format, primitiveType, confidence, reasons };\n }\n\n if (Object.keys(data).length > 0) {\n // Has some frontmatter but not harness convention\n format = 'raw-markdown';\n confidence = 0.7;\n reasons.push('Markdown with non-harness frontmatter');\n }\n } catch {\n // No frontmatter or parse error\n }\n\n // Check for Claude Code SKILL.md patterns\n if (format === 'unknown' || format === 'raw-markdown') {\n const isClaudeSkill = detectClaudeSkillPattern(content, filename);\n if (isClaudeSkill) {\n format = 'claude-skill';\n primitiveType = 'skill';\n confidence = 0.8;\n reasons.push('Claude Code SKILL.md pattern detected');\n return { format, primitiveType, confidence, reasons };\n }\n }\n\n // Plain markdown — infer type from content\n if (format === 'unknown') {\n format = 'raw-markdown';\n confidence = 0.5;\n reasons.push('Plain markdown without frontmatter');\n }\n\n // Try to infer type from content/filename\n if (!primitiveType) {\n primitiveType = inferTypeFromContent(content, filename);\n if (primitiveType) {\n reasons.push(`Type inferred from content/filename: ${primitiveType}`);\n }\n }\n\n return { format, primitiveType, confidence, reasons };\n }\n\n return { format, primitiveType, confidence, reasons };\n}\n\n// ─── Format Normalization ────────────────────────────────────────────────────\n\n/**\n * Normalize content from any detected format to harness convention.\n * Returns the normalized markdown content ready for writing.\n */\nexport function normalizeToHarness(\n content: string,\n filename: string,\n detection: FormatDetection,\n options?: UniversalInstallOptions,\n): { content: string; filename: string; fixes: string[] } {\n const fixes: string[] = [];\n const type = options?.type ?? detection.primitiveType;\n\n switch (detection.format) {\n case 'harness':\n // Already in harness format — just pass through\n return { content, filename, fixes: ['Already in harness format'] };\n\n case 'claude-skill':\n return normalizeClaudeSkill(content, filename, type, options, fixes);\n\n case 'faf-yaml':\n return normalizeFafYaml(content, filename, type, options, fixes);\n\n case 'raw-markdown':\n return normalizeRawMarkdown(content, filename, type, options, fixes);\n\n case 'bash-hook':\n return normalizeBashHook(content, filename, type, options, fixes);\n\n case 'mcp-config':\n return normalizeMcpConfig(content, filename, options, fixes);\n\n default:\n return normalizeRawMarkdown(content, filename, type, options, fixes);\n }\n}\n\n/**\n * Convert Claude Code SKILL.md to harness convention.\n * Claude skills are plain markdown — add frontmatter + L0/L1.\n */\nfunction normalizeClaudeSkill(\n content: string,\n filename: string,\n type: string | null,\n options: UniversalInstallOptions | undefined,\n fixes: string[],\n): { content: string; filename: string; fixes: string[] } {\n const id = options?.id ?? deriveId(filename);\n const primitiveType = type ?? 'skill';\n const tags = [primitiveType, ...(options?.tags ?? [])];\n\n // Extract first heading as title\n const headingMatch = content.match(/^#\\s+(.+)$/m);\n const title = headingMatch ? headingMatch[1].trim() : id;\n\n const frontmatter: Record<string, unknown> = {\n id,\n created: new Date().toISOString().split('T')[0],\n author: 'human',\n status: 'active',\n tags,\n };\n\n // Generate L0 from title/first heading\n const l0 = title.length > 120 ? title.slice(0, 117) + '...' : title;\n\n // Generate L1 from first paragraph\n const paragraphs = content.split(/\\n{2,}/).filter((p) => {\n const trimmed = p.trim();\n return trimmed.length > 0 && !trimmed.startsWith('#') && !trimmed.startsWith('<!--');\n });\n const l1 = paragraphs.length > 0\n ? paragraphs[0].replace(/\\n/g, ' ').trim().slice(0, 300)\n : '';\n\n let body = `<!-- L0: ${l0} -->\\n`;\n if (l1) {\n body += `<!-- L1: ${l1} -->\\n`;\n }\n body += '\\n' + content;\n\n const result = matter.stringify(body, frontmatter);\n fixes.push('Added harness frontmatter (id, status, tags)');\n fixes.push(`Generated L0 from heading: \"${l0}\"`);\n if (l1) fixes.push('Generated L1 from first paragraph');\n\n const outFilename = ensureMdExtension(filename);\n return { content: result, filename: outFilename, fixes };\n}\n\n/**\n * Convert .faf YAML format to harness markdown.\n */\nfunction normalizeFafYaml(\n content: string,\n filename: string,\n type: string | null,\n options: UniversalInstallOptions | undefined,\n fixes: string[],\n): { content: string; filename: string; fixes: string[] } {\n let parsed: Record<string, unknown>;\n try {\n parsed = parseYaml(content) as Record<string, unknown>;\n } catch {\n fixes.push('Failed to parse YAML — treating as raw markdown');\n return normalizeRawMarkdown(content, filename, type, options, fixes);\n }\n\n const id = options?.id ?? String(parsed.id ?? deriveId(filename));\n const fafType = String(parsed.type ?? 'skill');\n const primitiveType = type ?? inferTypeFromFafType(fafType) ?? 'skill';\n const title = String(parsed.title ?? parsed.name ?? id);\n const description = String(parsed.description ?? '');\n const fafContent = String(parsed.content ?? '');\n const fafTags = Array.isArray(parsed.tags)\n ? (parsed.tags as string[]).map(String)\n : [];\n\n const tags = [primitiveType, ...fafTags, ...(options?.tags ?? [])];\n\n const frontmatter: Record<string, unknown> = {\n id,\n created: new Date().toISOString().split('T')[0],\n author: 'human',\n status: 'active',\n tags: [...new Set(tags)],\n };\n\n const l0 = title.length > 120 ? title.slice(0, 117) + '...' : title;\n const l1 = description.length > 300 ? description.slice(0, 297) + '...' : description;\n\n let body = `<!-- L0: ${l0} -->\\n`;\n if (l1) body += `<!-- L1: ${l1} -->\\n`;\n body += `\\n# ${title}\\n\\n`;\n if (description) body += `${description}\\n\\n`;\n if (fafContent) body += fafContent + '\\n';\n\n const result = matter.stringify(body, frontmatter);\n fixes.push('Converted .faf YAML to harness markdown');\n fixes.push(`Added frontmatter (id: ${id}, type: ${primitiveType})`);\n\n const outFilename = deriveId(filename) + '.md';\n return { content: result, filename: outFilename, fixes };\n}\n\n/**\n * Normalize raw markdown (no frontmatter or non-harness frontmatter).\n */\nfunction normalizeRawMarkdown(\n content: string,\n filename: string,\n type: string | null,\n options: UniversalInstallOptions | undefined,\n fixes: string[],\n): { content: string; filename: string; fixes: string[] } {\n const id = options?.id ?? deriveId(filename);\n const primitiveType = type ?? 'skill';\n const tags = [primitiveType, ...(options?.tags ?? [])];\n\n // Try to preserve any existing frontmatter\n let parsed: ReturnType<typeof matter>;\n try {\n parsed = matter(content);\n } catch {\n parsed = { data: {}, content, orig: '', excerpt: '', language: '', matter: '', stringify: () => '' } as ReturnType<typeof matter>;\n }\n\n const data = parsed.data as Record<string, unknown>;\n\n // Set required harness fields — options override existing values\n if (options?.id || !data.id) {\n data.id = id;\n fixes.push(`Set id: \"${id}\"`);\n }\n if (!data.status) {\n data.status = 'active';\n fixes.push('Added status: \"active\"');\n }\n if (!data.created) {\n data.created = new Date().toISOString().split('T')[0];\n fixes.push('Added created date');\n }\n if (!data.author || !['human', 'agent', 'infrastructure'].includes(String(data.author))) {\n data.author = 'human';\n fixes.push('Added author: \"human\"');\n }\n if (!Array.isArray(data.tags) || data.tags.length === 0) {\n data.tags = [...new Set(tags)];\n fixes.push(`Added tags: [${(data.tags as string[]).join(', ')}]`);\n }\n\n let body = parsed.content;\n\n // Add L0 if missing\n const l0Regex = /<!--\\s*L0:\\s*(.*?)\\s*-->/;\n if (!l0Regex.test(body)) {\n const headingMatch = body.match(/^#\\s+(.+)$/m);\n const firstLine = body.split('\\n').find((line) => line.trim().length > 0);\n const summary = headingMatch ? headingMatch[1].trim() : (firstLine?.trim() ?? id);\n const l0 = summary.length > 120 ? summary.slice(0, 117) + '...' : summary;\n body = `<!-- L0: ${l0} -->\\n${body}`;\n fixes.push(`Generated L0: \"${l0}\"`);\n }\n\n // Add L1 if missing\n const l1Regex = /<!--\\s*L1:\\s*([\\s\\S]*?)\\s*-->/;\n if (!l1Regex.test(body)) {\n const paragraphs = body.split(/\\n{2,}/).filter((p) => {\n const trimmed = p.trim();\n return trimmed.length > 0 && !trimmed.startsWith('<!--') && !trimmed.startsWith('#');\n });\n if (paragraphs.length > 0) {\n const para = paragraphs[0].replace(/\\n/g, ' ').trim();\n const l1 = para.length > 300 ? para.slice(0, 297) + '...' : para;\n const l0Pos = body.indexOf('-->');\n if (l0Pos !== -1) {\n const insertPos = l0Pos + 3;\n body = body.slice(0, insertPos) + `\\n<!-- L1: ${l1} -->` + body.slice(insertPos);\n } else {\n body = `<!-- L1: ${l1} -->\\n${body}`;\n }\n fixes.push('Generated L1 from first paragraph');\n }\n }\n\n const result = matter.stringify(body, data);\n const outFilename = ensureMdExtension(filename);\n return { content: result, filename: outFilename, fixes };\n}\n\n/**\n * Wrap a bash hook script in harness markdown.\n */\nfunction normalizeBashHook(\n content: string,\n filename: string,\n type: string | null,\n options: UniversalInstallOptions | undefined,\n fixes: string[],\n): { content: string; filename: string; fixes: string[] } {\n const id = options?.id ?? deriveId(filename);\n const primitiveType = type ?? 'workflow';\n const tags = [primitiveType, 'hook', ...(options?.tags ?? [])];\n\n // Extract description from comments at top of script\n const commentLines = content.split('\\n')\n .filter((line) => line.startsWith('#') && !line.startsWith('#!'))\n .map((line) => line.replace(/^#\\s?/, '').trim())\n .filter((line) => line.length > 0);\n\n const description = commentLines.length > 0\n ? commentLines.slice(0, 3).join(' ')\n : `Bash hook: ${id}`;\n\n const frontmatter: Record<string, unknown> = {\n id,\n created: new Date().toISOString().split('T')[0],\n author: 'human',\n status: 'active',\n tags: [...new Set(tags)],\n };\n\n const l0 = description.length > 120 ? description.slice(0, 117) + '...' : description;\n\n let body = `<!-- L0: ${l0} -->\\n\\n`;\n body += `# ${id}\\n\\n`;\n body += `${description}\\n\\n`;\n body += '```bash\\n';\n body += content;\n if (!content.endsWith('\\n')) body += '\\n';\n body += '```\\n';\n\n const result = matter.stringify(body, frontmatter);\n fixes.push('Wrapped bash script in harness markdown');\n fixes.push(`Added frontmatter (id: ${id}, type: ${primitiveType})`);\n\n const outFilename = deriveId(filename) + '.md';\n return { content: result, filename: outFilename, fixes };\n}\n\n/**\n * Convert an MCP config to harness tool documentation.\n */\nfunction normalizeMcpConfig(\n content: string,\n filename: string,\n options: UniversalInstallOptions | undefined,\n fixes: string[],\n): { content: string; filename: string; fixes: string[] } {\n const id = options?.id ?? deriveId(filename);\n const tags = ['tool', 'mcp', ...(options?.tags ?? [])];\n\n // Try to parse config\n let config: Record<string, unknown> = {};\n const ext = extname(filename).toLowerCase();\n try {\n if (ext === '.json') {\n config = JSON.parse(content) as Record<string, unknown>;\n } else {\n config = parseYaml(content) as Record<string, unknown>;\n }\n } catch {\n fixes.push('Failed to parse MCP config');\n }\n\n const serverName = String(config.name ?? config.command ?? id);\n const description = String(config.description ?? `MCP server: ${serverName}`);\n\n const frontmatter: Record<string, unknown> = {\n id,\n created: new Date().toISOString().split('T')[0],\n author: 'human',\n status: 'active',\n tags: [...new Set(tags)],\n };\n\n const l0 = description.length > 120 ? description.slice(0, 117) + '...' : description;\n\n let body = `<!-- L0: ${l0} -->\\n\\n`;\n body += `# MCP Server: ${serverName}\\n\\n`;\n body += `${description}\\n\\n`;\n body += '## Configuration\\n\\n';\n body += '```json\\n';\n body += JSON.stringify(config, null, 2);\n body += '\\n```\\n';\n\n const result = matter.stringify(body, frontmatter);\n fixes.push('Converted MCP config to harness tool documentation');\n fixes.push(`Added frontmatter (id: ${id})`);\n\n const outFilename = deriveId(filename) + '.md';\n return { content: result, filename: outFilename, fixes };\n}\n\n// ─── Source Resolution ───────────────────────────────────────────────────────\n\n/**\n * Resolve a source reference to a local file path.\n *\n * Supports:\n * - Local file paths (absolute or relative)\n * - HTTPS URLs (GitHub raw, any markdown URL)\n * - Source query (searches registered sources)\n *\n * @returns Path to a local file (downloaded if remote)\n */\nexport async function resolveSource(\n source: string,\n harnessDir: string,\n): Promise<{ localPath: string; originalSource: string; error?: string }> {\n // Case 1: Local file path\n if (existsSync(source)) {\n return { localPath: source, originalSource: source };\n }\n\n // Case 2: URL\n if (source.startsWith('https://') || source.startsWith('http://')) {\n // Convert GitHub URL to raw if needed\n const rawUrl = convertToRawUrl(source);\n const result = await downloadCapability(rawUrl);\n if (result.downloaded) {\n return { localPath: result.localPath, originalSource: source };\n }\n return { localPath: '', originalSource: source, error: result.error };\n }\n\n // Case 3: Source registry lookup — search known sources\n const results = discoverSources(harnessDir, source, { maxResults: 1 });\n if (results.length > 0) {\n const hit = results[0];\n // If the source is a GitHub source, construct a raw URL\n if (hit.source.type === 'github') {\n const rawUrl = convertToRawUrl(hit.url);\n const result = await downloadCapability(rawUrl);\n if (result.downloaded) {\n return { localPath: result.localPath, originalSource: source };\n }\n return { localPath: '', originalSource: source, error: result.error };\n }\n return { localPath: '', originalSource: source, error: `Source \"${hit.source.name}\" is type \"${hit.source.type}\" — direct install not yet supported for this type` };\n }\n\n return { localPath: '', originalSource: source, error: `Could not resolve \"${source}\" — not a local file, URL, or known source` };\n}\n\n// ─── Main Install Function ───────────────────────────────────────────────────\n\n/**\n * Universal install: resolve → detect → normalize → fix → install.\n *\n * Accepts a local path, URL, or search query. Detects the format,\n * normalizes to harness convention, applies auto-fixes, and installs\n * to the correct directory.\n *\n * @param harnessDir - Harness directory\n * @param source - File path, URL, or name to install\n * @param options - Installation options\n * @returns Install result with status, fixes, errors, dependency hints\n */\nexport async function universalInstall(\n harnessDir: string,\n source: string,\n options?: UniversalInstallOptions,\n): Promise<UniversalInstallResult> {\n const result: UniversalInstallResult = {\n installed: false,\n source,\n format: { format: 'unknown', primitiveType: null, confidence: 0, reasons: [] },\n destination: '',\n fixes: [],\n errors: [],\n suggestedDependencies: [],\n };\n\n // Step 1: Resolve source to local file\n const resolved = await resolveSource(source, harnessDir);\n if (resolved.error || !resolved.localPath) {\n result.errors.push(resolved.error ?? 'Failed to resolve source');\n return result;\n }\n\n // Step 2: Read content\n let content: string;\n try {\n content = readFileSync(resolved.localPath, 'utf-8');\n } catch (err) {\n result.errors.push(`Failed to read file: ${err instanceof Error ? err.message : String(err)}`);\n return result;\n }\n\n if (content.trim().length === 0) {\n result.errors.push('File is empty');\n return result;\n }\n\n // Step 3: Detect format\n const filename = basename(resolved.localPath);\n const detection = detectFormat(content, filename);\n result.format = detection;\n\n // Step 4: Normalize to harness convention\n const normalized = normalizeToHarness(content, filename, detection, options);\n result.fixes.push(...normalized.fixes);\n\n // Step 4b: Record provenance for URL installs so every installed file is\n // traceable back to its source. Local-path installs are skipped — the path\n // on disk is not a stable identifier.\n let finalContent = normalized.content;\n if (source.startsWith('http://') || source.startsWith('https://')) {\n finalContent = await recordProvenance(finalContent, source);\n result.fixes.push('Recorded provenance (source, installed_at, installed_by)');\n }\n\n // Step 5: Write normalized content to temp file for installation\n const tempDir = join(tmpdir(), 'harness-install');\n mkdirSync(tempDir, { recursive: true });\n const tempPath = join(tempDir, normalized.filename);\n writeFileSync(tempPath, finalContent, 'utf-8');\n\n // Step 6: Apply auto-fix if not skipped\n if (!options?.skipFix) {\n const fixResult = fixCapability(tempPath);\n result.fixes.push(...fixResult.fixes_applied);\n\n if (!fixResult.valid && !options?.force) {\n result.errors.push(...fixResult.errors);\n return result;\n }\n }\n\n // Step 7: Install via existing pipeline\n const installResult = installCapability(harnessDir, tempPath);\n result.installed = installResult.installed;\n result.destination = installResult.destination;\n\n if (!installResult.installed) {\n result.errors.push(...installResult.evalResult.errors);\n // If force mode, try direct copy\n if (options?.force && detection.primitiveType) {\n const targetDir = join(harnessDir, TYPE_DIRS[detection.primitiveType] ?? 'skills');\n if (!existsSync(targetDir)) mkdirSync(targetDir, { recursive: true });\n const dest = join(targetDir, normalized.filename);\n copyFileSync(tempPath, dest);\n result.installed = true;\n result.destination = dest;\n result.fixes.push('Force-installed despite validation errors');\n }\n }\n\n // Step 8: Scan for dependency hints\n result.suggestedDependencies = extractDependencyHints(normalized.content);\n\n return result;\n}\n\n/**\n * Install from a URL (convenience wrapper).\n */\nexport async function installFromUrl(\n harnessDir: string,\n url: string,\n options?: UniversalInstallOptions,\n): Promise<UniversalInstallResult> {\n return universalInstall(harnessDir, url, options);\n}\n\n/**\n * Install from a local file path (convenience wrapper).\n */\nexport async function installFromFile(\n harnessDir: string,\n filePath: string,\n options?: UniversalInstallOptions,\n): Promise<UniversalInstallResult> {\n return universalInstall(harnessDir, filePath, options);\n}\n\n// ─── Helpers ─────────────────────────────────────────────────────────────────\n\nfunction deriveId(filename: string): string {\n const base = basename(filename).replace(/\\.(md|faf|yaml|yml|json|sh|bash)$/i, '');\n return base.replace(/[^a-z0-9-]/gi, '-').toLowerCase();\n}\n\nfunction ensureMdExtension(filename: string): string {\n if (filename.endsWith('.md')) return filename;\n return deriveId(filename) + '.md';\n}\n\n/**\n * Convert a GitHub URL to its raw content URL.\n *\n * Handles:\n * - github.com/owner/repo/blob/branch/path → raw.githubusercontent.com/owner/repo/branch/path\n * - Already raw.githubusercontent.com URLs → pass through\n * - Other URLs → pass through\n */\nexport function convertToRawUrl(url: string): string {\n // Already raw\n if (url.includes('raw.githubusercontent.com')) return url;\n\n // GitHub blob URL → raw\n const blobMatch = url.match(\n /^https?:\\/\\/github\\.com\\/([^/]+)\\/([^/]+)\\/blob\\/(.+)$/,\n );\n if (blobMatch) {\n const [, owner, repo, rest] = blobMatch;\n return `https://raw.githubusercontent.com/${owner}/${repo}/${rest}`;\n }\n\n return url;\n}\n\n/**\n * Detect if content matches Claude Code SKILL.md patterns.\n * Claude skills are plain markdown with specific structural patterns.\n */\nfunction detectClaudeSkillPattern(content: string, filename: string): boolean {\n const nameLower = filename.toLowerCase();\n\n // Filename patterns\n if (nameLower === 'skill.md' || nameLower.endsWith('-skill.md') || nameLower.endsWith('_skill.md')) {\n return true;\n }\n\n // Content patterns common in Claude Code skills\n const patterns = [\n /^#\\s+.+skill/im,\n /instructions?\\s+for\\s+/i,\n /when\\s+(the\\s+)?user\\s+(asks?|wants?|needs?|requests?)/i,\n /you\\s+(should|must|will)\\s+/i,\n ];\n\n let matches = 0;\n for (const pattern of patterns) {\n if (pattern.test(content)) matches++;\n }\n\n // Need at least 2 pattern matches to classify as Claude skill\n // (plain markdown + instructional tone)\n return matches >= 2 && !content.startsWith('---');\n}\n\nfunction inferTypeFromFafType(fafType: string): string | null {\n const typeMap: Record<string, string> = {\n skill: 'skill',\n agent: 'agent',\n rule: 'rule',\n playbook: 'playbook',\n workflow: 'workflow',\n tool: 'tool',\n instinct: 'instinct',\n hook: 'workflow',\n template: 'skill',\n plugin: 'skill',\n };\n\n return typeMap[fafType.toLowerCase()] ?? null;\n}\n\nfunction inferTypeFromContent(content: string, filename: string): string | null {\n const lower = content.toLowerCase();\n const nameLower = filename.toLowerCase();\n\n // From filename\n if (nameLower.includes('rule')) return 'rule';\n if (nameLower.includes('agent')) return 'agent';\n if (nameLower.includes('playbook')) return 'playbook';\n if (nameLower.includes('workflow')) return 'workflow';\n if (nameLower.includes('instinct')) return 'instinct';\n if (nameLower.includes('tool')) return 'tool';\n if (nameLower.includes('skill')) return 'skill';\n\n // From content patterns\n if (lower.includes('# rule:') || lower.includes('## rules')) return 'rule';\n if (lower.includes('# agent:') || lower.includes('## agent')) return 'agent';\n if (lower.includes('# playbook:') || lower.includes('## playbook')) return 'playbook';\n if (lower.includes('# skill:') || lower.includes('## skill')) return 'skill';\n if (lower.includes('# workflow:') || lower.includes('## workflow')) return 'workflow';\n if (lower.includes('# tool:') || lower.includes('## tool')) return 'tool';\n\n // Default for markdown without clear type\n return null;\n}\n\n/**\n * Extract dependency hints from content.\n * Looks for references to tools, skills, or other primitives.\n */\nfunction extractDependencyHints(content: string): string[] {\n const hints: string[] = [];\n const seen = new Set<string>();\n\n // Look for \"requires:\" or \"depends:\" in frontmatter\n try {\n const parsed = matter(content);\n const data = parsed.data as Record<string, unknown>;\n if (Array.isArray(data.requires)) {\n for (const dep of data.requires as string[]) {\n if (!seen.has(dep)) {\n hints.push(dep);\n seen.add(dep);\n }\n }\n }\n if (Array.isArray(data.depends)) {\n for (const dep of data.depends as string[]) {\n if (!seen.has(dep)) {\n hints.push(dep);\n seen.add(dep);\n }\n }\n }\n if (Array.isArray(data.related)) {\n for (const dep of data.related as string[]) {\n if (!seen.has(dep)) {\n hints.push(dep);\n seen.add(dep);\n }\n }\n }\n } catch {\n // Ignore parse errors\n }\n\n return hints;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA,SAAS,YAAY,cAAc,eAAe,WAAW,oBAAoB;AACjF,SAAS,MAAM,UAAU,eAAe;AACxC,SAAS,cAAc;AACvB,SAAS,qBAAqB;AAC9B,OAAO,YAAY;AACnB,SAAS,SAAS,iBAAiB;AAsBnC,SAAS,oBAA4B;AACnC,MAAI;AACF,UAAMA,WAAU,cAAc,YAAY,GAAG;AAC7C,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,eAAW,aAAa,YAAY;AAClC,UAAI;AACF,cAAM,MAAMA,SAAQ,SAAS;AAC7B,YAAI,IAAI,SAAS,0BAA0B,IAAI,SAAS;AACtD,iBAAO,IAAI;AAAA,QACb;AAAA,MACF,QAAQ;AAAA,MAER;AAAA,IACF;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAcA,eAAe,uBAAuB,KAAqC;AAEzE,QAAM,QAAQ,IAAI;AAAA,IAChB;AAAA,EACF;AACA,MAAI,CAAC,MAAO,QAAO;AACnB,QAAM,CAAC,EAAE,OAAO,MAAM,KAAK,IAAI,IAAI;AAGnC,MAAI,kBAAkB,KAAK,GAAG,EAAG,QAAO;AAGxC,QAAM,SAAS,gCAAgC,KAAK,IAAI,IAAI,aAAa,IAAI,QAAQ,GAAG;AACxF,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,GAAI;AACzD,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,MACnC,QAAQ,WAAW;AAAA,MACnB,SAAS,EAAE,UAAU,8BAA8B;AAAA,IACrD,CAAC;AACD,QAAI,CAAC,SAAS,GAAI,QAAO;AACzB,UAAM,OAAQ,MAAM,SAAS,KAAK;AAClC,QAAI,OAAO,KAAK,QAAQ,YAAY,kBAAkB,KAAK,KAAK,GAAG,GAAG;AACpE,aAAO,KAAK;AAAA,IACd;AACA,WAAO;AAAA,EACT,QAAQ;AACN,WAAO;AAAA,EACT,UAAE;AACA,iBAAa,OAAO;AAAA,EACtB;AACF;AAoBA,IAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAQA,SAAS,oBAAoB,MAAsB;AACjD,QAAM,QAAQ,KAAK,YAAY;AAE/B,MAAI,MAAM,SAAS,qBAAqB,GAAG;AACzC,WAAO;AAAA,EACT;AAGA,MAAI,MAAM,SAAS,aAAa,EAAG,QAAO;AAC1C,MAAI,MAAM,SAAS,6BAA6B,KAAK,MAAM,SAAS,YAAY;AAC9E,WAAO;AACT,MAAI,MAAM,SAAS,oCAAoC,KAAK,MAAM,SAAS,SAAS;AAClF,WAAO;AACT,MAAI,MAAM,SAAS,mCAAmC,EAAG,QAAO;AAChE,MAAI,MAAM,SAAS,mCAAmC,GAAG;AACvD,QAAI,MAAM,SAAS,WAAW,EAAG,QAAO;AACxC,QAAI,MAAM,SAAS,WAAW,EAAG,QAAO;AAAA,EAC1C;AACA,MAAI,MAAM,SAAS,4BAA4B,GAAG;AAChD,QAAI,MAAM,SAAS,WAAW,EAAG,QAAO;AACxC,QAAI,MAAM,SAAS,WAAW,EAAG,QAAO;AAAA,EAC1C;AACA,MAAI,MAAM,SAAS,aAAa,EAAG,QAAO;AAC1C,MAAI,MAAM,SAAS,mBAAmB,KAAK,MAAM,SAAS,SAAS,EAAG,QAAO;AAC7E,MAAI,MAAM,SAAS,6CAA6C,EAAG,QAAO;AAC1E,MAAI,MAAM,SAAS,kCAAkC,KAAK,MAAM,SAAS,WAAW;AAClF,WAAO;AACT,MACE,MAAM,SAAS,mDAAmD,KAClE,MAAM,SAAS,qBAAqB,GACpC;AACA,WAAO;AAAA,EACT;AACA,MAAI,MAAM,SAAS,mDAAmD,GAAG;AACvE,WAAO;AAAA,EACT;AACA,MAAI,MAAM,SAAS,wEAAwE,GAAG;AAC5F,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAMA,SAAS,iBAAiB,MAAkC;AAK1D,QAAM,QAAQ,KAAK,MAAM,OAAO;AAChC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,aAAa,KAAK,OAAO,KAAK,uBAAuB,KAAK,OAAO,GAAG;AACtE,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO;AACT;AAMA,eAAe,oBACb,YACwB;AACxB,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,GAAI;AACzD,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,YAAY,EAAE,QAAQ,WAAW,OAAO,CAAC;AACtE,QAAI,CAAC,SAAS,GAAI,QAAO;AACzB,WAAO,MAAM,SAAS,KAAK;AAAA,EAC7B,QAAQ;AACN,WAAO;AAAA,EACT,UAAE;AACA,iBAAa,OAAO;AAAA,EACtB;AACF;AAiBA,eAAe,uBACb,OACA,MACoE;AACpE,QAAM,SAAS,gCAAgC,KAAK,IAAI,IAAI;AAC5D,QAAM,aAAa,IAAI,gBAAgB;AACvC,QAAM,UAAU,WAAW,MAAM,WAAW,MAAM,GAAG,GAAI;AACzD,MAAI;AACF,UAAM,WAAW,MAAM,MAAM,QAAQ;AAAA,MACnC,QAAQ,WAAW;AAAA,MACnB,SAAS,EAAE,UAAU,8BAA8B;AAAA,IACrD,CAAC;AACD,QAAI,CAAC,SAAS,GAAI,QAAO;AACzB,UAAM,OAAQ,MAAM,SAAS,KAAK;AAMlC,UAAM,SAAS,KAAK,SAAS;AAC7B,QAAI,CAAC,UAAU,WAAW,cAAe,QAAO;AAIhD,QAAI;AACJ,QAAI,KAAK,WAAW,KAAK,aAAa,UAAU;AAC9C,UAAI;AACF,eAAO,OAAO,KAAK,KAAK,SAAS,QAAQ,EAAE,SAAS,OAAO;AAAA,MAC7D,QAAQ;AACN,eAAO;AAAA,MACT;AAAA,IACF;AAEA,WAAO,EAAE,QAAQ,SAAS,KAAK,YAAY,IAAI,KAAK;AAAA,EACtD,QAAQ;AACN,WAAO;AAAA,EACT,UAAE;AACA,iBAAa,OAAO;AAAA,EACtB;AACF;AAuBA,eAAsB,cAAc,WAAyC;AAE3E,QAAM,QAAQ,UAAU;AAAA,IACtB;AAAA,EACF;AACA,MAAI,CAAC,OAAO;AACV,WAAO,EAAE,QAAQ,UAAU;AAAA,EAC7B;AACA,QAAM,CAAC,EAAE,OAAO,MAAM,KAAK,IAAI,IAAI;AAGnC,QAAM,YAAY,KAAK,YAAY,GAAG;AACtC,QAAM,MAAM,aAAa,IAAI,KAAK,MAAM,GAAG,SAAS,IAAI;AACxD,QAAM,YAAY,MAAM,GAAG,GAAG,MAAM;AAEpC,aAAW,eAAe,uBAAuB;AAC/C,UAAM,aAAa,qCAAqC,KAAK,IAAI,IAAI,IAAI,GAAG,IAAI,SAAS,GAAG,WAAW;AACvG,UAAM,OAAO,MAAM,oBAAoB,UAAU;AACjD,QAAI,MAAM;AACR,YAAM,SAAS,oBAAoB,IAAI;AACvC,YAAM,YAAY,iBAAiB,IAAI;AACvC,aAAO,EAAE,QAAQ,WAAW,eAAe,WAAW;AAAA,IACxD;AAAA,EACF;AAKA,QAAM,cAAc,MAAM,uBAAuB,OAAO,IAAI;AAC5D,MAAI,aAAa;AACf,UAAM,YAAY,YAAY,OAC1B,iBAAiB,YAAY,IAAI,IACjC;AACJ,WAAO;AAAA,MACL,QAAQ,YAAY;AAAA,MACpB;AAAA,MACA,eAAe,YAAY,WAAW;AAAA,IACxC;AAAA,EACF;AAEA,SAAO,EAAE,QAAQ,UAAU;AAC7B;AAuBA,eAAsB,iBACpB,SACA,gBACiB;AACjB,MAAI;AACJ,MAAI;AACF,aAAS,OAAO,OAAO;AAAA,EACzB,QAAQ;AACN,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,OAAO;AAGpB,MAAI,CAAC,KAAK,QAAQ;AAChB,SAAK,SAAS;AAAA,EAChB;AAGA,MAAI,CAAC,KAAK,eAAe;AACvB,UAAM,MAAM,MAAM,uBAAuB,cAAc;AACvD,QAAI,KAAK;AACP,WAAK,gBAAgB;AAAA,IACvB;AAAA,EACF;AAKA,MAAI,CAAC,KAAK,WAAW,CAAC,KAAK,aAAa,CAAC,KAAK,gBAAgB;AAC5D,UAAM,UAAU,MAAM,cAAc,cAAc;AAClD,QAAI,CAAC,KAAK,SAAS;AACjB,WAAK,UAAU,QAAQ;AAAA,IACzB;AACA,QAAI,CAAC,KAAK,aAAa,QAAQ,WAAW;AACxC,WAAK,YAAY,QAAQ;AAAA,IAC3B;AACA,QAAI,CAAC,KAAK,kBAAkB,QAAQ,eAAe;AACjD,WAAK,iBAAiB,QAAQ;AAAA,IAChC;AAAA,EACF;AAGA,OAAK,gBAAe,oBAAI,KAAK,GAAE,YAAY;AAC3C,OAAK,eAAe,iBAAiB,kBAAkB,CAAC;AAExD,SAAO,OAAO,UAAU,OAAO,SAAS,IAAI;AAC9C;AA4DA,IAAM,cAAc,CAAC,QAAQ,YAAY,SAAS,YAAY,YAAY,QAAQ,OAAO;AAEzF,IAAM,YAAoC;AAAA,EACxC,MAAM;AAAA,EACN,UAAU;AAAA,EACV,OAAO;AAAA,EACP,UAAU;AAAA,EACV,UAAU;AAAA,EACV,MAAM;AAAA,EACN,OAAO;AACT;AAeO,SAAS,aAAa,SAAiB,UAAmC;AAC/E,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,QAAM,UAAoB,CAAC;AAC3B,MAAI,SAAuB;AAC3B,MAAI,gBAA+B;AACnC,MAAI,aAAa;AAGjB,MAAI,QAAQ,SAAS,QAAQ,WAAW,QAAQ,UAAU,EAAE,WAAW,KAAK,GAAG;AAC7E,aAAS;AACT,oBAAgB;AAChB,iBAAa;AACb,YAAQ,KAAK,kDAAkD;AAG/D,QAAI,QAAQ,SAAS,MAAM,KAAK,QAAQ,SAAS,YAAY,KAAK,QAAQ,SAAS,OAAO,GAAG;AAC3F,sBAAgB;AAChB,cAAQ,KAAK,kCAAkC;AAAA,IACjD;AAEA,WAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,EACtD;AAGA,MAAI,QAAQ,SAAS;AACnB,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAI,OAAO,cAAc,OAAO,WAAW,OAAO,WAAW,OAAO,MAAM;AACxE,iBAAS;AACT,wBAAgB;AAChB,qBAAa;AACb,gBAAQ,KAAK,iCAAiC;AAC9C,eAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,MACtD;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,QAAQ,UAAU,QAAQ,WAAW,QAAQ,QAAQ;AACvD,QAAI;AACF,YAAM,SAAS,UAAU,OAAO;AAChC,UAAI,OAAO,QAAQ,OAAO,SAAS;AACjC,iBAAS;AACT,wBAAgB,qBAAqB,OAAO,OAAO,IAAI,CAAC;AACxD,qBAAa;AACb,gBAAQ,KAAK,+BAA+B,OAAO,IAAI,EAAE;AACzD,eAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,MACtD;AAEA,UAAI,OAAO,cAAc,OAAO,SAAS;AACvC,iBAAS;AACT,wBAAgB;AAChB,qBAAa;AACb,gBAAQ,KAAK,iCAAiC;AAC9C,eAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,MACtD;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AAGA,MAAI,QAAQ,SAAS,QAAQ,MAAM,CAAC,KAAK;AAEvC,QAAI;AACF,YAAM,SAAS,OAAO,OAAO;AAC7B,YAAM,OAAO,OAAO;AAEpB,UAAI,KAAK,MAAM,KAAK,QAAQ;AAE1B,iBAAS;AACT,qBAAa;AACb,gBAAQ,KAAK,mDAAmD;AAGhE,cAAM,OAAO,MAAM,QAAQ,KAAK,IAAI,IAC/B,KAAK,KAAkB,IAAI,CAAC,MAAM,OAAO,CAAC,EAAE,YAAY,CAAC,IAC1D,CAAC;AACL,mBAAW,QAAQ,aAAa;AAC9B,cAAI,KAAK,SAAS,IAAI,GAAG;AACvB,4BAAgB;AAChB;AAAA,UACF;AAAA,QACF;AAEA,eAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,MACtD;AAEA,UAAI,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AAEhC,iBAAS;AACT,qBAAa;AACb,gBAAQ,KAAK,uCAAuC;AAAA,MACtD;AAAA,IACF,QAAQ;AAAA,IAER;AAGA,QAAI,WAAW,aAAa,WAAW,gBAAgB;AACrD,YAAM,gBAAgB,yBAAyB,SAAS,QAAQ;AAChE,UAAI,eAAe;AACjB,iBAAS;AACT,wBAAgB;AAChB,qBAAa;AACb,gBAAQ,KAAK,uCAAuC;AACpD,eAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,MACtD;AAAA,IACF;AAGA,QAAI,WAAW,WAAW;AACxB,eAAS;AACT,mBAAa;AACb,cAAQ,KAAK,oCAAoC;AAAA,IACnD;AAGA,QAAI,CAAC,eAAe;AAClB,sBAAgB,qBAAqB,SAAS,QAAQ;AACtD,UAAI,eAAe;AACjB,gBAAQ,KAAK,wCAAwC,aAAa,EAAE;AAAA,MACtE;AAAA,IACF;AAEA,WAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AAAA,EACtD;AAEA,SAAO,EAAE,QAAQ,eAAe,YAAY,QAAQ;AACtD;AAQO,SAAS,mBACd,SACA,UACA,WACA,SACwD;AACxD,QAAM,QAAkB,CAAC;AACzB,QAAM,OAAO,SAAS,QAAQ,UAAU;AAExC,UAAQ,UAAU,QAAQ;AAAA,IACxB,KAAK;AAEH,aAAO,EAAE,SAAS,UAAU,OAAO,CAAC,2BAA2B,EAAE;AAAA,IAEnE,KAAK;AACH,aAAO,qBAAqB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,IAErE,KAAK;AACH,aAAO,iBAAiB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,IAEjE,KAAK;AACH,aAAO,qBAAqB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,IAErE,KAAK;AACH,aAAO,kBAAkB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,IAElE,KAAK;AACH,aAAO,mBAAmB,SAAS,UAAU,SAAS,KAAK;AAAA,IAE7D;AACE,aAAO,qBAAqB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,EACvE;AACF;AAMA,SAAS,qBACP,SACA,UACA,MACA,SACA,OACwD;AACxD,QAAM,KAAK,SAAS,MAAM,SAAS,QAAQ;AAC3C,QAAM,gBAAgB,QAAQ;AAC9B,QAAM,OAAO,CAAC,eAAe,GAAI,SAAS,QAAQ,CAAC,CAAE;AAGrD,QAAM,eAAe,QAAQ,MAAM,aAAa;AAChD,QAAM,QAAQ,eAAe,aAAa,CAAC,EAAE,KAAK,IAAI;AAEtD,QAAM,cAAuC;AAAA,IAC3C;AAAA,IACA,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR;AAAA,EACF;AAGA,QAAM,KAAK,MAAM,SAAS,MAAM,MAAM,MAAM,GAAG,GAAG,IAAI,QAAQ;AAG9D,QAAM,aAAa,QAAQ,MAAM,QAAQ,EAAE,OAAO,CAAC,MAAM;AACvD,UAAM,UAAU,EAAE,KAAK;AACvB,WAAO,QAAQ,SAAS,KAAK,CAAC,QAAQ,WAAW,GAAG,KAAK,CAAC,QAAQ,WAAW,MAAM;AAAA,EACrF,CAAC;AACD,QAAM,KAAK,WAAW,SAAS,IAC3B,WAAW,CAAC,EAAE,QAAQ,OAAO,GAAG,EAAE,KAAK,EAAE,MAAM,GAAG,GAAG,IACrD;AAEJ,MAAI,OAAO,YAAY,EAAE;AAAA;AACzB,MAAI,IAAI;AACN,YAAQ,YAAY,EAAE;AAAA;AAAA,EACxB;AACA,UAAQ,OAAO;AAEf,QAAM,SAAS,OAAO,UAAU,MAAM,WAAW;AACjD,QAAM,KAAK,8CAA8C;AACzD,QAAM,KAAK,+BAA+B,EAAE,GAAG;AAC/C,MAAI,GAAI,OAAM,KAAK,mCAAmC;AAEtD,QAAM,cAAc,kBAAkB,QAAQ;AAC9C,SAAO,EAAE,SAAS,QAAQ,UAAU,aAAa,MAAM;AACzD;AAKA,SAAS,iBACP,SACA,UACA,MACA,SACA,OACwD;AACxD,MAAI;AACJ,MAAI;AACF,aAAS,UAAU,OAAO;AAAA,EAC5B,QAAQ;AACN,UAAM,KAAK,sDAAiD;AAC5D,WAAO,qBAAqB,SAAS,UAAU,MAAM,SAAS,KAAK;AAAA,EACrE;AAEA,QAAM,KAAK,SAAS,MAAM,OAAO,OAAO,MAAM,SAAS,QAAQ,CAAC;AAChE,QAAM,UAAU,OAAO,OAAO,QAAQ,OAAO;AAC7C,QAAM,gBAAgB,QAAQ,qBAAqB,OAAO,KAAK;AAC/D,QAAM,QAAQ,OAAO,OAAO,SAAS,OAAO,QAAQ,EAAE;AACtD,QAAM,cAAc,OAAO,OAAO,eAAe,EAAE;AACnD,QAAM,aAAa,OAAO,OAAO,WAAW,EAAE;AAC9C,QAAM,UAAU,MAAM,QAAQ,OAAO,IAAI,IACpC,OAAO,KAAkB,IAAI,MAAM,IACpC,CAAC;AAEL,QAAM,OAAO,CAAC,eAAe,GAAG,SAAS,GAAI,SAAS,QAAQ,CAAC,CAAE;AAEjE,QAAM,cAAuC;AAAA,IAC3C;AAAA,IACA,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,MAAM,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC;AAAA,EACzB;AAEA,QAAM,KAAK,MAAM,SAAS,MAAM,MAAM,MAAM,GAAG,GAAG,IAAI,QAAQ;AAC9D,QAAM,KAAK,YAAY,SAAS,MAAM,YAAY,MAAM,GAAG,GAAG,IAAI,QAAQ;AAE1E,MAAI,OAAO,YAAY,EAAE;AAAA;AACzB,MAAI,GAAI,SAAQ,YAAY,EAAE;AAAA;AAC9B,UAAQ;AAAA,IAAO,KAAK;AAAA;AAAA;AACpB,MAAI,YAAa,SAAQ,GAAG,WAAW;AAAA;AAAA;AACvC,MAAI,WAAY,SAAQ,aAAa;AAErC,QAAM,SAAS,OAAO,UAAU,MAAM,WAAW;AACjD,QAAM,KAAK,yCAAyC;AACpD,QAAM,KAAK,0BAA0B,EAAE,WAAW,aAAa,GAAG;AAElE,QAAM,cAAc,SAAS,QAAQ,IAAI;AACzC,SAAO,EAAE,SAAS,QAAQ,UAAU,aAAa,MAAM;AACzD;AAKA,SAAS,qBACP,SACA,UACA,MACA,SACA,OACwD;AACxD,QAAM,KAAK,SAAS,MAAM,SAAS,QAAQ;AAC3C,QAAM,gBAAgB,QAAQ;AAC9B,QAAM,OAAO,CAAC,eAAe,GAAI,SAAS,QAAQ,CAAC,CAAE;AAGrD,MAAI;AACJ,MAAI;AACF,aAAS,OAAO,OAAO;AAAA,EACzB,QAAQ;AACN,aAAS,EAAE,MAAM,CAAC,GAAG,SAAS,MAAM,IAAI,SAAS,IAAI,UAAU,IAAI,QAAQ,IAAI,WAAW,MAAM,GAAG;AAAA,EACrG;AAEA,QAAM,OAAO,OAAO;AAGpB,MAAI,SAAS,MAAM,CAAC,KAAK,IAAI;AAC3B,SAAK,KAAK;AACV,UAAM,KAAK,YAAY,EAAE,GAAG;AAAA,EAC9B;AACA,MAAI,CAAC,KAAK,QAAQ;AAChB,SAAK,SAAS;AACd,UAAM,KAAK,wBAAwB;AAAA,EACrC;AACA,MAAI,CAAC,KAAK,SAAS;AACjB,SAAK,WAAU,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACpD,UAAM,KAAK,oBAAoB;AAAA,EACjC;AACA,MAAI,CAAC,KAAK,UAAU,CAAC,CAAC,SAAS,SAAS,gBAAgB,EAAE,SAAS,OAAO,KAAK,MAAM,CAAC,GAAG;AACvF,SAAK,SAAS;AACd,UAAM,KAAK,uBAAuB;AAAA,EACpC;AACA,MAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,KAAK,KAAK,KAAK,WAAW,GAAG;AACvD,SAAK,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC;AAC7B,UAAM,KAAK,gBAAiB,KAAK,KAAkB,KAAK,IAAI,CAAC,GAAG;AAAA,EAClE;AAEA,MAAI,OAAO,OAAO;AAGlB,QAAM,UAAU;AAChB,MAAI,CAAC,QAAQ,KAAK,IAAI,GAAG;AACvB,UAAM,eAAe,KAAK,MAAM,aAAa;AAC7C,UAAM,YAAY,KAAK,MAAM,IAAI,EAAE,KAAK,CAAC,SAAS,KAAK,KAAK,EAAE,SAAS,CAAC;AACxE,UAAM,UAAU,eAAe,aAAa,CAAC,EAAE,KAAK,IAAK,WAAW,KAAK,KAAK;AAC9E,UAAM,KAAK,QAAQ,SAAS,MAAM,QAAQ,MAAM,GAAG,GAAG,IAAI,QAAQ;AAClE,WAAO,YAAY,EAAE;AAAA,EAAS,IAAI;AAClC,UAAM,KAAK,kBAAkB,EAAE,GAAG;AAAA,EACpC;AAGA,QAAM,UAAU;AAChB,MAAI,CAAC,QAAQ,KAAK,IAAI,GAAG;AACvB,UAAM,aAAa,KAAK,MAAM,QAAQ,EAAE,OAAO,CAAC,MAAM;AACpD,YAAM,UAAU,EAAE,KAAK;AACvB,aAAO,QAAQ,SAAS,KAAK,CAAC,QAAQ,WAAW,MAAM,KAAK,CAAC,QAAQ,WAAW,GAAG;AAAA,IACrF,CAAC;AACD,QAAI,WAAW,SAAS,GAAG;AACzB,YAAM,OAAO,WAAW,CAAC,EAAE,QAAQ,OAAO,GAAG,EAAE,KAAK;AACpD,YAAM,KAAK,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,GAAG,IAAI,QAAQ;AAC5D,YAAM,QAAQ,KAAK,QAAQ,KAAK;AAChC,UAAI,UAAU,IAAI;AAChB,cAAM,YAAY,QAAQ;AAC1B,eAAO,KAAK,MAAM,GAAG,SAAS,IAAI;AAAA,WAAc,EAAE,SAAS,KAAK,MAAM,SAAS;AAAA,MACjF,OAAO;AACL,eAAO,YAAY,EAAE;AAAA,EAAS,IAAI;AAAA,MACpC;AACA,YAAM,KAAK,mCAAmC;AAAA,IAChD;AAAA,EACF;AAEA,QAAM,SAAS,OAAO,UAAU,MAAM,IAAI;AAC1C,QAAM,cAAc,kBAAkB,QAAQ;AAC9C,SAAO,EAAE,SAAS,QAAQ,UAAU,aAAa,MAAM;AACzD;AAKA,SAAS,kBACP,SACA,UACA,MACA,SACA,OACwD;AACxD,QAAM,KAAK,SAAS,MAAM,SAAS,QAAQ;AAC3C,QAAM,gBAAgB,QAAQ;AAC9B,QAAM,OAAO,CAAC,eAAe,QAAQ,GAAI,SAAS,QAAQ,CAAC,CAAE;AAG7D,QAAM,eAAe,QAAQ,MAAM,IAAI,EACpC,OAAO,CAAC,SAAS,KAAK,WAAW,GAAG,KAAK,CAAC,KAAK,WAAW,IAAI,CAAC,EAC/D,IAAI,CAAC,SAAS,KAAK,QAAQ,SAAS,EAAE,EAAE,KAAK,CAAC,EAC9C,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC;AAEnC,QAAM,cAAc,aAAa,SAAS,IACtC,aAAa,MAAM,GAAG,CAAC,EAAE,KAAK,GAAG,IACjC,cAAc,EAAE;AAEpB,QAAM,cAAuC;AAAA,IAC3C;AAAA,IACA,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,MAAM,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC;AAAA,EACzB;AAEA,QAAM,KAAK,YAAY,SAAS,MAAM,YAAY,MAAM,GAAG,GAAG,IAAI,QAAQ;AAE1E,MAAI,OAAO,YAAY,EAAE;AAAA;AAAA;AACzB,UAAQ,KAAK,EAAE;AAAA;AAAA;AACf,UAAQ,GAAG,WAAW;AAAA;AAAA;AACtB,UAAQ;AACR,UAAQ;AACR,MAAI,CAAC,QAAQ,SAAS,IAAI,EAAG,SAAQ;AACrC,UAAQ;AAER,QAAM,SAAS,OAAO,UAAU,MAAM,WAAW;AACjD,QAAM,KAAK,yCAAyC;AACpD,QAAM,KAAK,0BAA0B,EAAE,WAAW,aAAa,GAAG;AAElE,QAAM,cAAc,SAAS,QAAQ,IAAI;AACzC,SAAO,EAAE,SAAS,QAAQ,UAAU,aAAa,MAAM;AACzD;AAKA,SAAS,mBACP,SACA,UACA,SACA,OACwD;AACxD,QAAM,KAAK,SAAS,MAAM,SAAS,QAAQ;AAC3C,QAAM,OAAO,CAAC,QAAQ,OAAO,GAAI,SAAS,QAAQ,CAAC,CAAE;AAGrD,MAAI,SAAkC,CAAC;AACvC,QAAM,MAAM,QAAQ,QAAQ,EAAE,YAAY;AAC1C,MAAI;AACF,QAAI,QAAQ,SAAS;AACnB,eAAS,KAAK,MAAM,OAAO;AAAA,IAC7B,OAAO;AACL,eAAS,UAAU,OAAO;AAAA,IAC5B;AAAA,EACF,QAAQ;AACN,UAAM,KAAK,4BAA4B;AAAA,EACzC;AAEA,QAAM,aAAa,OAAO,OAAO,QAAQ,OAAO,WAAW,EAAE;AAC7D,QAAM,cAAc,OAAO,OAAO,eAAe,eAAe,UAAU,EAAE;AAE5E,QAAM,cAAuC;AAAA,IAC3C;AAAA,IACA,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,IAC9C,QAAQ;AAAA,IACR,QAAQ;AAAA,IACR,MAAM,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC;AAAA,EACzB;AAEA,QAAM,KAAK,YAAY,SAAS,MAAM,YAAY,MAAM,GAAG,GAAG,IAAI,QAAQ;AAE1E,MAAI,OAAO,YAAY,EAAE;AAAA;AAAA;AACzB,UAAQ,iBAAiB,UAAU;AAAA;AAAA;AACnC,UAAQ,GAAG,WAAW;AAAA;AAAA;AACtB,UAAQ;AACR,UAAQ;AACR,UAAQ,KAAK,UAAU,QAAQ,MAAM,CAAC;AACtC,UAAQ;AAER,QAAM,SAAS,OAAO,UAAU,MAAM,WAAW;AACjD,QAAM,KAAK,oDAAoD;AAC/D,QAAM,KAAK,0BAA0B,EAAE,GAAG;AAE1C,QAAM,cAAc,SAAS,QAAQ,IAAI;AACzC,SAAO,EAAE,SAAS,QAAQ,UAAU,aAAa,MAAM;AACzD;AAcA,eAAsB,cACpB,QACA,YACwE;AAExE,MAAI,WAAW,MAAM,GAAG;AACtB,WAAO,EAAE,WAAW,QAAQ,gBAAgB,OAAO;AAAA,EACrD;AAGA,MAAI,OAAO,WAAW,UAAU,KAAK,OAAO,WAAW,SAAS,GAAG;AAEjE,UAAM,SAAS,gBAAgB,MAAM;AACrC,UAAM,SAAS,MAAM,mBAAmB,MAAM;AAC9C,QAAI,OAAO,YAAY;AACrB,aAAO,EAAE,WAAW,OAAO,WAAW,gBAAgB,OAAO;AAAA,IAC/D;AACA,WAAO,EAAE,WAAW,IAAI,gBAAgB,QAAQ,OAAO,OAAO,MAAM;AAAA,EACtE;AAGA,QAAM,UAAU,gBAAgB,YAAY,QAAQ,EAAE,YAAY,EAAE,CAAC;AACrE,MAAI,QAAQ,SAAS,GAAG;AACtB,UAAM,MAAM,QAAQ,CAAC;AAErB,QAAI,IAAI,OAAO,SAAS,UAAU;AAChC,YAAM,SAAS,gBAAgB,IAAI,GAAG;AACtC,YAAM,SAAS,MAAM,mBAAmB,MAAM;AAC9C,UAAI,OAAO,YAAY;AACrB,eAAO,EAAE,WAAW,OAAO,WAAW,gBAAgB,OAAO;AAAA,MAC/D;AACA,aAAO,EAAE,WAAW,IAAI,gBAAgB,QAAQ,OAAO,OAAO,MAAM;AAAA,IACtE;AACA,WAAO,EAAE,WAAW,IAAI,gBAAgB,QAAQ,OAAO,WAAW,IAAI,OAAO,IAAI,cAAc,IAAI,OAAO,IAAI,0DAAqD;AAAA,EACrK;AAEA,SAAO,EAAE,WAAW,IAAI,gBAAgB,QAAQ,OAAO,sBAAsB,MAAM,kDAA6C;AAClI;AAgBA,eAAsB,iBACpB,YACA,QACA,SACiC;AACjC,QAAM,SAAiC;AAAA,IACrC,WAAW;AAAA,IACX;AAAA,IACA,QAAQ,EAAE,QAAQ,WAAW,eAAe,MAAM,YAAY,GAAG,SAAS,CAAC,EAAE;AAAA,IAC7E,aAAa;AAAA,IACb,OAAO,CAAC;AAAA,IACR,QAAQ,CAAC;AAAA,IACT,uBAAuB,CAAC;AAAA,EAC1B;AAGA,QAAM,WAAW,MAAM,cAAc,QAAQ,UAAU;AACvD,MAAI,SAAS,SAAS,CAAC,SAAS,WAAW;AACzC,WAAO,OAAO,KAAK,SAAS,SAAS,0BAA0B;AAC/D,WAAO;AAAA,EACT;AAGA,MAAI;AACJ,MAAI;AACF,cAAU,aAAa,SAAS,WAAW,OAAO;AAAA,EACpD,SAAS,KAAK;AACZ,WAAO,OAAO,KAAK,wBAAwB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAC7F,WAAO;AAAA,EACT;AAEA,MAAI,QAAQ,KAAK,EAAE,WAAW,GAAG;AAC/B,WAAO,OAAO,KAAK,eAAe;AAClC,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,SAAS,SAAS,SAAS;AAC5C,QAAM,YAAY,aAAa,SAAS,QAAQ;AAChD,SAAO,SAAS;AAGhB,QAAM,aAAa,mBAAmB,SAAS,UAAU,WAAW,OAAO;AAC3E,SAAO,MAAM,KAAK,GAAG,WAAW,KAAK;AAKrC,MAAI,eAAe,WAAW;AAC9B,MAAI,OAAO,WAAW,SAAS,KAAK,OAAO,WAAW,UAAU,GAAG;AACjE,mBAAe,MAAM,iBAAiB,cAAc,MAAM;AAC1D,WAAO,MAAM,KAAK,0DAA0D;AAAA,EAC9E;AAGA,QAAM,UAAU,KAAK,OAAO,GAAG,iBAAiB;AAChD,YAAU,SAAS,EAAE,WAAW,KAAK,CAAC;AACtC,QAAM,WAAW,KAAK,SAAS,WAAW,QAAQ;AAClD,gBAAc,UAAU,cAAc,OAAO;AAG7C,MAAI,CAAC,SAAS,SAAS;AACrB,UAAM,YAAY,cAAc,QAAQ;AACxC,WAAO,MAAM,KAAK,GAAG,UAAU,aAAa;AAE5C,QAAI,CAAC,UAAU,SAAS,CAAC,SAAS,OAAO;AACvC,aAAO,OAAO,KAAK,GAAG,UAAU,MAAM;AACtC,aAAO;AAAA,IACT;AAAA,EACF;AAGA,QAAM,gBAAgB,kBAAkB,YAAY,QAAQ;AAC5D,SAAO,YAAY,cAAc;AACjC,SAAO,cAAc,cAAc;AAEnC,MAAI,CAAC,cAAc,WAAW;AAC5B,WAAO,OAAO,KAAK,GAAG,cAAc,WAAW,MAAM;AAErD,QAAI,SAAS,SAAS,UAAU,eAAe;AAC7C,YAAM,YAAY,KAAK,YAAY,UAAU,UAAU,aAAa,KAAK,QAAQ;AACjF,UAAI,CAAC,WAAW,SAAS,EAAG,WAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AACpE,YAAM,OAAO,KAAK,WAAW,WAAW,QAAQ;AAChD,mBAAa,UAAU,IAAI;AAC3B,aAAO,YAAY;AACnB,aAAO,cAAc;AACrB,aAAO,MAAM,KAAK,2CAA2C;AAAA,IAC/D;AAAA,EACF;AAGA,SAAO,wBAAwB,uBAAuB,WAAW,OAAO;AAExE,SAAO;AACT;AAKA,eAAsB,eACpB,YACA,KACA,SACiC;AACjC,SAAO,iBAAiB,YAAY,KAAK,OAAO;AAClD;AAKA,eAAsB,gBACpB,YACA,UACA,SACiC;AACjC,SAAO,iBAAiB,YAAY,UAAU,OAAO;AACvD;AAIA,SAAS,SAAS,UAA0B;AAC1C,QAAM,OAAO,SAAS,QAAQ,EAAE,QAAQ,sCAAsC,EAAE;AAChF,SAAO,KAAK,QAAQ,gBAAgB,GAAG,EAAE,YAAY;AACvD;AAEA,SAAS,kBAAkB,UAA0B;AACnD,MAAI,SAAS,SAAS,KAAK,EAAG,QAAO;AACrC,SAAO,SAAS,QAAQ,IAAI;AAC9B;AAUO,SAAS,gBAAgB,KAAqB;AAEnD,MAAI,IAAI,SAAS,2BAA2B,EAAG,QAAO;AAGtD,QAAM,YAAY,IAAI;AAAA,IACpB;AAAA,EACF;AACA,MAAI,WAAW;AACb,UAAM,CAAC,EAAE,OAAO,MAAM,IAAI,IAAI;AAC9B,WAAO,qCAAqC,KAAK,IAAI,IAAI,IAAI,IAAI;AAAA,EACnE;AAEA,SAAO;AACT;AAMA,SAAS,yBAAyB,SAAiB,UAA2B;AAC5E,QAAM,YAAY,SAAS,YAAY;AAGvC,MAAI,cAAc,cAAc,UAAU,SAAS,WAAW,KAAK,UAAU,SAAS,WAAW,GAAG;AAClG,WAAO;AAAA,EACT;AAGA,QAAM,WAAW;AAAA,IACf;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,MAAI,UAAU;AACd,aAAW,WAAW,UAAU;AAC9B,QAAI,QAAQ,KAAK,OAAO,EAAG;AAAA,EAC7B;AAIA,SAAO,WAAW,KAAK,CAAC,QAAQ,WAAW,KAAK;AAClD;AAEA,SAAS,qBAAqB,SAAgC;AAC5D,QAAM,UAAkC;AAAA,IACtC,OAAO;AAAA,IACP,OAAO;AAAA,IACP,MAAM;AAAA,IACN,UAAU;AAAA,IACV,UAAU;AAAA,IACV,MAAM;AAAA,IACN,UAAU;AAAA,IACV,MAAM;AAAA,IACN,UAAU;AAAA,IACV,QAAQ;AAAA,EACV;AAEA,SAAO,QAAQ,QAAQ,YAAY,CAAC,KAAK;AAC3C;AAEA,SAAS,qBAAqB,SAAiB,UAAiC;AAC9E,QAAM,QAAQ,QAAQ,YAAY;AAClC,QAAM,YAAY,SAAS,YAAY;AAGvC,MAAI,UAAU,SAAS,MAAM,EAAG,QAAO;AACvC,MAAI,UAAU,SAAS,OAAO,EAAG,QAAO;AACxC,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,UAAU,EAAG,QAAO;AAC3C,MAAI,UAAU,SAAS,MAAM,EAAG,QAAO;AACvC,MAAI,UAAU,SAAS,OAAO,EAAG,QAAO;AAGxC,MAAI,MAAM,SAAS,SAAS,KAAK,MAAM,SAAS,UAAU,EAAG,QAAO;AACpE,MAAI,MAAM,SAAS,UAAU,KAAK,MAAM,SAAS,UAAU,EAAG,QAAO;AACrE,MAAI,MAAM,SAAS,aAAa,KAAK,MAAM,SAAS,aAAa,EAAG,QAAO;AAC3E,MAAI,MAAM,SAAS,UAAU,KAAK,MAAM,SAAS,UAAU,EAAG,QAAO;AACrE,MAAI,MAAM,SAAS,aAAa,KAAK,MAAM,SAAS,aAAa,EAAG,QAAO;AAC3E,MAAI,MAAM,SAAS,SAAS,KAAK,MAAM,SAAS,SAAS,EAAG,QAAO;AAGnE,SAAO;AACT;AAMA,SAAS,uBAAuB,SAA2B;AACzD,QAAM,QAAkB,CAAC;AACzB,QAAM,OAAO,oBAAI,IAAY;AAG7B,MAAI;AACF,UAAM,SAAS,OAAO,OAAO;AAC7B,UAAM,OAAO,OAAO;AACpB,QAAI,MAAM,QAAQ,KAAK,QAAQ,GAAG;AAChC,iBAAW,OAAO,KAAK,UAAsB;AAC3C,YAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,gBAAM,KAAK,GAAG;AACd,eAAK,IAAI,GAAG;AAAA,QACd;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/B,iBAAW,OAAO,KAAK,SAAqB;AAC1C,YAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,gBAAM,KAAK,GAAG;AACd,eAAK,IAAI,GAAG;AAAA,QACd;AAAA,MACF;AAAA,IACF;AACA,QAAI,MAAM,QAAQ,KAAK,OAAO,GAAG;AAC/B,iBAAW,OAAO,KAAK,SAAqB;AAC1C,YAAI,CAAC,KAAK,IAAI,GAAG,GAAG;AAClB,gBAAM,KAAK,GAAG;AACd,eAAK,IAAI,GAAG;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;","names":["require"]}