@agntk/agent-harness 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (212) hide show
  1. package/LICENSE +21 -0
  2. package/NOTICE +41 -0
  3. package/README.md +445 -0
  4. package/defaults/agents/summarizer.md +49 -0
  5. package/defaults/instincts/lead-with-answer.md +24 -0
  6. package/defaults/instincts/qualify-before-recommending.md +40 -0
  7. package/defaults/instincts/read-before-edit.md +23 -0
  8. package/defaults/instincts/search-before-create.md +23 -0
  9. package/defaults/playbooks/ship-feature.md +31 -0
  10. package/defaults/rules/ask-before-assuming.md +35 -0
  11. package/defaults/rules/operations.md +35 -0
  12. package/defaults/rules/respect-the-user.md +39 -0
  13. package/defaults/skills/business-analyst.md +181 -0
  14. package/defaults/skills/content-marketer.md +184 -0
  15. package/defaults/skills/research.md +34 -0
  16. package/defaults/tools/example-web-search.md +60 -0
  17. package/defaults/workflows/daily-reflection.md +54 -0
  18. package/dist/agent-framework-K4GUIICH.js +344 -0
  19. package/dist/agent-framework-K4GUIICH.js.map +1 -0
  20. package/dist/analytics-RPT73WNM.js +12 -0
  21. package/dist/analytics-RPT73WNM.js.map +1 -0
  22. package/dist/auto-processor-OLE45UI3.js +13 -0
  23. package/dist/auto-processor-OLE45UI3.js.map +1 -0
  24. package/dist/chunk-274RV3YO.js +162 -0
  25. package/dist/chunk-274RV3YO.js.map +1 -0
  26. package/dist/chunk-4CWAGBNS.js +168 -0
  27. package/dist/chunk-4CWAGBNS.js.map +1 -0
  28. package/dist/chunk-4FDUOGSZ.js +69 -0
  29. package/dist/chunk-4FDUOGSZ.js.map +1 -0
  30. package/dist/chunk-5H34JPMB.js +199 -0
  31. package/dist/chunk-5H34JPMB.js.map +1 -0
  32. package/dist/chunk-6EMOEYGU.js +102 -0
  33. package/dist/chunk-6EMOEYGU.js.map +1 -0
  34. package/dist/chunk-A7BJPQQ6.js +236 -0
  35. package/dist/chunk-A7BJPQQ6.js.map +1 -0
  36. package/dist/chunk-AGAAFJEO.js +76 -0
  37. package/dist/chunk-AGAAFJEO.js.map +1 -0
  38. package/dist/chunk-BSKDOFRT.js +65 -0
  39. package/dist/chunk-BSKDOFRT.js.map +1 -0
  40. package/dist/chunk-CHJ5GNZC.js +100 -0
  41. package/dist/chunk-CHJ5GNZC.js.map +1 -0
  42. package/dist/chunk-CSL3ERUI.js +307 -0
  43. package/dist/chunk-CSL3ERUI.js.map +1 -0
  44. package/dist/chunk-DA7IKHC4.js +229 -0
  45. package/dist/chunk-DA7IKHC4.js.map +1 -0
  46. package/dist/chunk-DGUM43GV.js +11 -0
  47. package/dist/chunk-DGUM43GV.js.map +1 -0
  48. package/dist/chunk-DTTXPHFW.js +211 -0
  49. package/dist/chunk-DTTXPHFW.js.map +1 -0
  50. package/dist/chunk-FD55B3IO.js +204 -0
  51. package/dist/chunk-FD55B3IO.js.map +1 -0
  52. package/dist/chunk-FLZU44SV.js +230 -0
  53. package/dist/chunk-FLZU44SV.js.map +1 -0
  54. package/dist/chunk-GJNNR2RA.js +200 -0
  55. package/dist/chunk-GJNNR2RA.js.map +1 -0
  56. package/dist/chunk-GNUSHD2Y.js +111 -0
  57. package/dist/chunk-GNUSHD2Y.js.map +1 -0
  58. package/dist/chunk-GUJTBGVS.js +2212 -0
  59. package/dist/chunk-GUJTBGVS.js.map +1 -0
  60. package/dist/chunk-IZ6UZ3ZL.js +207 -0
  61. package/dist/chunk-IZ6UZ3ZL.js.map +1 -0
  62. package/dist/chunk-JKMGYWXB.js +197 -0
  63. package/dist/chunk-JKMGYWXB.js.map +1 -0
  64. package/dist/chunk-KFX54TQM.js +165 -0
  65. package/dist/chunk-KFX54TQM.js.map +1 -0
  66. package/dist/chunk-M7NXUK55.js +199 -0
  67. package/dist/chunk-M7NXUK55.js.map +1 -0
  68. package/dist/chunk-MPZ3BPUI.js +374 -0
  69. package/dist/chunk-MPZ3BPUI.js.map +1 -0
  70. package/dist/chunk-OC6YSTDX.js +119 -0
  71. package/dist/chunk-OC6YSTDX.js.map +1 -0
  72. package/dist/chunk-RC6MEZB6.js +469 -0
  73. package/dist/chunk-RC6MEZB6.js.map +1 -0
  74. package/dist/chunk-RY3ZFII7.js +3440 -0
  75. package/dist/chunk-RY3ZFII7.js.map +1 -0
  76. package/dist/chunk-TAT6JU3X.js +167 -0
  77. package/dist/chunk-TAT6JU3X.js.map +1 -0
  78. package/dist/chunk-UDZIS2AQ.js +79 -0
  79. package/dist/chunk-UDZIS2AQ.js.map +1 -0
  80. package/dist/chunk-UPLBF4RZ.js +115 -0
  81. package/dist/chunk-UPLBF4RZ.js.map +1 -0
  82. package/dist/chunk-UWQTZMNI.js +154 -0
  83. package/dist/chunk-UWQTZMNI.js.map +1 -0
  84. package/dist/chunk-W4T7PGI2.js +346 -0
  85. package/dist/chunk-W4T7PGI2.js.map +1 -0
  86. package/dist/chunk-XTBKL5BI.js +111 -0
  87. package/dist/chunk-XTBKL5BI.js.map +1 -0
  88. package/dist/chunk-YIJY5DBV.js +399 -0
  89. package/dist/chunk-YIJY5DBV.js.map +1 -0
  90. package/dist/chunk-YUFNYN2H.js +242 -0
  91. package/dist/chunk-YUFNYN2H.js.map +1 -0
  92. package/dist/chunk-Z2PUCXTZ.js +94 -0
  93. package/dist/chunk-Z2PUCXTZ.js.map +1 -0
  94. package/dist/chunk-ZZJOFKAT.js +13 -0
  95. package/dist/chunk-ZZJOFKAT.js.map +1 -0
  96. package/dist/cli/index.js +3661 -0
  97. package/dist/cli/index.js.map +1 -0
  98. package/dist/config-WVMRUOCA.js +13 -0
  99. package/dist/config-WVMRUOCA.js.map +1 -0
  100. package/dist/context-loader-3ORBPMHJ.js +13 -0
  101. package/dist/context-loader-3ORBPMHJ.js.map +1 -0
  102. package/dist/conversation-QDEIDQPH.js +22 -0
  103. package/dist/conversation-QDEIDQPH.js.map +1 -0
  104. package/dist/cost-tracker-RS3W7SVY.js +24 -0
  105. package/dist/cost-tracker-RS3W7SVY.js.map +1 -0
  106. package/dist/delegate-VJCJLYEK.js +29 -0
  107. package/dist/delegate-VJCJLYEK.js.map +1 -0
  108. package/dist/emotional-state-VQVRA6ED.js +206 -0
  109. package/dist/emotional-state-VQVRA6ED.js.map +1 -0
  110. package/dist/env-discovery-2BLVMAIM.js +251 -0
  111. package/dist/env-discovery-2BLVMAIM.js.map +1 -0
  112. package/dist/export-6GCYHEHQ.js +165 -0
  113. package/dist/export-6GCYHEHQ.js.map +1 -0
  114. package/dist/graph-YUIPOSOO.js +14 -0
  115. package/dist/graph-YUIPOSOO.js.map +1 -0
  116. package/dist/harness-LCHA3DWP.js +10 -0
  117. package/dist/harness-LCHA3DWP.js.map +1 -0
  118. package/dist/harness-WE4SLCML.js +26 -0
  119. package/dist/harness-WE4SLCML.js.map +1 -0
  120. package/dist/health-NZ6WNIMV.js +23 -0
  121. package/dist/health-NZ6WNIMV.js.map +1 -0
  122. package/dist/index.d.ts +3612 -0
  123. package/dist/index.js +13501 -0
  124. package/dist/index.js.map +1 -0
  125. package/dist/indexer-LONANRRM.js +16 -0
  126. package/dist/indexer-LONANRRM.js.map +1 -0
  127. package/dist/instinct-learner-SRM72DHF.js +20 -0
  128. package/dist/instinct-learner-SRM72DHF.js.map +1 -0
  129. package/dist/intake-4M3HNU43.js +21 -0
  130. package/dist/intake-4M3HNU43.js.map +1 -0
  131. package/dist/intelligence-HJOCA4SJ.js +1081 -0
  132. package/dist/intelligence-HJOCA4SJ.js.map +1 -0
  133. package/dist/journal-WANJL3MI.js +24 -0
  134. package/dist/journal-WANJL3MI.js.map +1 -0
  135. package/dist/loader-C3TKIKZR.js +23 -0
  136. package/dist/loader-C3TKIKZR.js.map +1 -0
  137. package/dist/mcp-WTQJJZAO.js +15 -0
  138. package/dist/mcp-WTQJJZAO.js.map +1 -0
  139. package/dist/mcp-discovery-WPAQFL6S.js +377 -0
  140. package/dist/mcp-discovery-WPAQFL6S.js.map +1 -0
  141. package/dist/mcp-installer-6O2XXD3V.js +394 -0
  142. package/dist/mcp-installer-6O2XXD3V.js.map +1 -0
  143. package/dist/metrics-KXGNFAAB.js +20 -0
  144. package/dist/metrics-KXGNFAAB.js.map +1 -0
  145. package/dist/primitive-registry-I6VTIR4W.js +512 -0
  146. package/dist/primitive-registry-I6VTIR4W.js.map +1 -0
  147. package/dist/project-discovery-C4UMD7JI.js +246 -0
  148. package/dist/project-discovery-C4UMD7JI.js.map +1 -0
  149. package/dist/provider-LQHQX7Z7.js +26 -0
  150. package/dist/provider-LQHQX7Z7.js.map +1 -0
  151. package/dist/provider-SXPQZ74H.js +28 -0
  152. package/dist/provider-SXPQZ74H.js.map +1 -0
  153. package/dist/rate-limiter-RLRVM325.js +22 -0
  154. package/dist/rate-limiter-RLRVM325.js.map +1 -0
  155. package/dist/rule-engine-YGQ3RYZM.js +182 -0
  156. package/dist/rule-engine-YGQ3RYZM.js.map +1 -0
  157. package/dist/scaffold-A3VRRCBV.js +347 -0
  158. package/dist/scaffold-A3VRRCBV.js.map +1 -0
  159. package/dist/scheduler-XHHIVHRI.js +397 -0
  160. package/dist/scheduler-XHHIVHRI.js.map +1 -0
  161. package/dist/search-V3W5JMJG.js +75 -0
  162. package/dist/search-V3W5JMJG.js.map +1 -0
  163. package/dist/semantic-search-2DTOO5UX.js +241 -0
  164. package/dist/semantic-search-2DTOO5UX.js.map +1 -0
  165. package/dist/serve-DTQ3HENY.js +291 -0
  166. package/dist/serve-DTQ3HENY.js.map +1 -0
  167. package/dist/sessions-CZGVXKQE.js +21 -0
  168. package/dist/sessions-CZGVXKQE.js.map +1 -0
  169. package/dist/sources-RW5DT56F.js +32 -0
  170. package/dist/sources-RW5DT56F.js.map +1 -0
  171. package/dist/starter-packs-76YUVHEU.js +893 -0
  172. package/dist/starter-packs-76YUVHEU.js.map +1 -0
  173. package/dist/state-GMXILIHW.js +13 -0
  174. package/dist/state-GMXILIHW.js.map +1 -0
  175. package/dist/state-merge-NKO5FRBA.js +174 -0
  176. package/dist/state-merge-NKO5FRBA.js.map +1 -0
  177. package/dist/telemetry-UC6PBXC7.js +22 -0
  178. package/dist/telemetry-UC6PBXC7.js.map +1 -0
  179. package/dist/tool-executor-MJ7IG7PQ.js +28 -0
  180. package/dist/tool-executor-MJ7IG7PQ.js.map +1 -0
  181. package/dist/tools-DZ4KETET.js +20 -0
  182. package/dist/tools-DZ4KETET.js.map +1 -0
  183. package/dist/types-EW7AIB3R.js +18 -0
  184. package/dist/types-EW7AIB3R.js.map +1 -0
  185. package/dist/types-WGDLSPO6.js +16 -0
  186. package/dist/types-WGDLSPO6.js.map +1 -0
  187. package/dist/universal-installer-QGS4SJGX.js +578 -0
  188. package/dist/universal-installer-QGS4SJGX.js.map +1 -0
  189. package/dist/validator-7WXMDIHH.js +22 -0
  190. package/dist/validator-7WXMDIHH.js.map +1 -0
  191. package/dist/verification-gate-FYXUX6LH.js +246 -0
  192. package/dist/verification-gate-FYXUX6LH.js.map +1 -0
  193. package/dist/versioning-Z3XNE2Q2.js +271 -0
  194. package/dist/versioning-Z3XNE2Q2.js.map +1 -0
  195. package/dist/watcher-ISJC7YKL.js +109 -0
  196. package/dist/watcher-ISJC7YKL.js.map +1 -0
  197. package/dist/web-server-DD7ZOP46.js +28 -0
  198. package/dist/web-server-DD7ZOP46.js.map +1 -0
  199. package/package.json +76 -0
  200. package/sources.yaml +121 -0
  201. package/templates/assistant/CORE.md +24 -0
  202. package/templates/assistant/SYSTEM.md +24 -0
  203. package/templates/assistant/config.yaml +51 -0
  204. package/templates/base/CORE.md +17 -0
  205. package/templates/base/SYSTEM.md +24 -0
  206. package/templates/base/config.yaml +51 -0
  207. package/templates/claude-opus/config.yaml +51 -0
  208. package/templates/code-reviewer/CORE.md +25 -0
  209. package/templates/code-reviewer/SYSTEM.md +30 -0
  210. package/templates/code-reviewer/config.yaml +51 -0
  211. package/templates/gpt4/config.yaml +51 -0
  212. package/templates/local/config.yaml +51 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/auto-processor.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync, readdirSync } from 'fs';\nimport { basename, relative, join } from 'path';\nimport matter from 'gray-matter';\nimport { getPrimitiveDirs } from '../core/types.js';\n\n// --- Types ---\n\n/** Result of auto-processing a file */\nexport interface AutoProcessResult {\n /** Path that was processed */\n path: string;\n /** Whether any changes were made */\n modified: boolean;\n /** List of fixes applied */\n fixes: string[];\n /** Errors encountered */\n errors: string[];\n}\n\n/** Options for auto-processing */\nexport interface AutoProcessOptions {\n /** Harness directory (for inferring primitive type from path) */\n harnessDir: string;\n /** Whether to generate frontmatter (default: true) */\n generateFrontmatter?: boolean;\n /** Whether to generate L0/L1 summaries (default: true) */\n generateSummaries?: boolean;\n}\n\n// --- L0/L1 Regex ---\n\nconst L0_REGEX = /<!--\\s*L0:\\s*(.*?)\\s*-->/;\nconst L1_REGEX = /<!--\\s*L1:\\s*([\\s\\S]*?)\\s*-->/;\n\n// --- Frontmatter detection ---\n\n/**\n * Infer the primitive type from the file's directory relative to harness root.\n * e.g., \"rules/my-rule.md\" → \"rule\", \"skills/coding.md\" → \"skill\"\n */\nfunction inferTypeFromPath(filePath: string, harnessDir: string): string | null {\n const rel = relative(harnessDir, filePath);\n const topDir = rel.split('/')[0];\n\n const dirToType: Record<string, string> = {\n rules: 'rule',\n instincts: 'instinct',\n skills: 'skill',\n playbooks: 'playbook',\n workflows: 'workflow',\n tools: 'tool',\n agents: 'agent',\n };\n\n return dirToType[topDir] ?? null;\n}\n\n/**\n * Derive an id from a filename.\n * \"my-cool-rule.md\" → \"my-cool-rule\"\n */\nfunction deriveId(filePath: string): string {\n return basename(filePath, '.md').replace(/[^a-z0-9-]/gi, '-').toLowerCase();\n}\n\n/**\n * Generate L0 summary from content (first heading or first non-empty line).\n */\nfunction generateL0(content: string): string | null {\n // Try first markdown heading\n const headingMatch = content.match(/^#\\s+(.+)$/m);\n if (headingMatch) {\n const text = headingMatch[1].trim();\n return text.length > 120 ? text.slice(0, 117) + '...' : text;\n }\n\n // Fall back to first non-empty, non-comment line\n const lines = content.split('\\n');\n for (const line of lines) {\n const trimmed = line.trim();\n if (trimmed.length > 0 && !trimmed.startsWith('<!--') && !trimmed.startsWith('---')) {\n return trimmed.length > 120 ? trimmed.slice(0, 117) + '...' : trimmed;\n }\n }\n\n return null;\n}\n\n/**\n * Generate L1 summary from content (first substantial paragraph).\n */\nfunction generateL1(content: string): string | null {\n const paragraphs = content.split(/\\n{2,}/).filter((p) => {\n const trimmed = p.trim();\n return (\n trimmed.length > 0 &&\n !trimmed.startsWith('<!--') &&\n !trimmed.startsWith('#') &&\n !trimmed.startsWith('---')\n );\n });\n\n if (paragraphs.length === 0) return null;\n\n const para = paragraphs[0].replace(/\\n/g, ' ').trim();\n return para.length > 300 ? para.slice(0, 297) + '...' : para;\n}\n\n// --- Main Auto-Processor ---\n\n/**\n * Auto-process a markdown primitive file:\n * 1. Generate frontmatter if missing (id from filename, created, status, tags, author)\n * 2. Generate L0 summary if missing (from heading or first line)\n * 3. Generate L1 summary if missing (from first paragraph)\n *\n * This is designed to run on file save (via watcher) to ensure\n * all primitives have valid structure without user intervention.\n */\nexport function autoProcessFile(\n filePath: string,\n options: AutoProcessOptions,\n): AutoProcessResult {\n const result: AutoProcessResult = {\n path: filePath,\n modified: false,\n fixes: [],\n errors: [],\n };\n\n if (!existsSync(filePath) || !filePath.endsWith('.md')) {\n return result;\n }\n\n // Skip index files and special files\n const filename = basename(filePath);\n if (filename.startsWith('_') || filename === 'CORE.md' || filename === 'SYSTEM.md' || filename === 'state.md') {\n return result;\n }\n\n let raw: string;\n try {\n raw = readFileSync(filePath, 'utf-8');\n } catch (err) {\n result.errors.push(`Failed to read: ${err instanceof Error ? err.message : String(err)}`);\n return result;\n }\n\n // Empty file — nothing to process\n if (raw.trim().length === 0) {\n return result;\n }\n\n const generateFrontmatter = options.generateFrontmatter !== false;\n const generateSummaries = options.generateSummaries !== false;\n\n let parsed: ReturnType<typeof matter>;\n try {\n parsed = matter(raw);\n } catch {\n // If frontmatter parsing fails, try to add basic frontmatter\n if (generateFrontmatter) {\n const id = deriveId(filePath);\n const type = inferTypeFromPath(filePath, options.harnessDir);\n const tags = type ? [type] : [];\n const data = {\n id,\n created: new Date().toISOString().split('T')[0],\n author: 'infrastructure' as const,\n status: 'active' as const,\n tags,\n };\n const newContent = matter.stringify(raw, data);\n writeFileSync(filePath, newContent, 'utf-8');\n result.modified = true;\n result.fixes.push(`Added frontmatter (id: ${id})`);\n }\n return result;\n }\n\n const data = { ...parsed.data } as Record<string, unknown>;\n let content = parsed.content;\n let modified = false;\n\n // --- Frontmatter fixes ---\n if (generateFrontmatter) {\n // Fix: Missing id\n if (!data.id) {\n data.id = deriveId(filePath);\n result.fixes.push(`Added id: \"${data.id}\"`);\n modified = true;\n }\n\n // Fix: Missing created\n if (!data.created) {\n data.created = new Date().toISOString().split('T')[0];\n result.fixes.push('Added created date');\n modified = true;\n }\n\n // Fix: Missing author — infrastructure for auto-generated\n if (!data.author) {\n data.author = 'human';\n result.fixes.push('Added author: \"human\"');\n modified = true;\n }\n\n // Fix: Missing status\n if (!data.status) {\n data.status = 'active';\n result.fixes.push('Added status: \"active\"');\n modified = true;\n }\n\n // Fix: Missing tags — add type tag from directory\n if (!Array.isArray(data.tags) || data.tags.length === 0) {\n const type = inferTypeFromPath(filePath, options.harnessDir);\n data.tags = type ? [type] : [];\n if (type) {\n result.fixes.push(`Added tag: \"${type}\"`);\n modified = true;\n }\n }\n }\n\n // --- L0/L1 summary fixes ---\n if (generateSummaries) {\n // Fix: Missing L0\n if (!L0_REGEX.test(content)) {\n const l0 = generateL0(content);\n if (l0) {\n content = `<!-- L0: ${l0} -->\\n${content}`;\n result.fixes.push('Generated L0 summary');\n modified = true;\n }\n }\n\n // Fix: Missing L1\n if (!L1_REGEX.test(content)) {\n const l1 = generateL1(content);\n if (l1) {\n const l0Pos = content.indexOf('-->');\n if (l0Pos !== -1) {\n const insertPos = l0Pos + 3;\n content = content.slice(0, insertPos) + `\\n<!-- L1: ${l1} -->` + content.slice(insertPos);\n } else {\n content = `<!-- L1: ${l1} -->\\n${content}`;\n }\n result.fixes.push('Generated L1 summary');\n modified = true;\n }\n }\n }\n\n // Write back if modified\n if (modified) {\n try {\n const newRaw = matter.stringify(content, data);\n writeFileSync(filePath, newRaw, 'utf-8');\n result.modified = true;\n } catch (err) {\n result.errors.push(`Failed to write: ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n return result;\n}\n\n/**\n * Auto-process all primitives in a harness directory.\n * Useful for batch processing after init or on first dev startup.\n */\nexport function autoProcessAll(\n harnessDir: string,\n options?: { generateFrontmatter?: boolean; generateSummaries?: boolean },\n): AutoProcessResult[] {\n const results: AutoProcessResult[] = [];\n const dirs = getPrimitiveDirs();\n\n for (const dir of dirs) {\n const dirPath = join(harnessDir, dir);\n if (!existsSync(dirPath)) continue;\n\n const files = readdirSync(dirPath).filter((f: string) => f.endsWith('.md') && !f.startsWith('_'));\n for (const file of files) {\n const filePath = join(dirPath, file);\n const result = autoProcessFile(filePath, {\n harnessDir,\n generateFrontmatter: options?.generateFrontmatter,\n generateSummaries: options?.generateSummaries,\n });\n if (result.modified || result.errors.length > 0) {\n results.push(result);\n }\n }\n }\n\n return results;\n}\n"],"mappings":";;;;;;;AAAA,SAAS,cAAc,eAAe,YAAY,mBAAmB;AACrE,SAAS,UAAU,UAAU,YAAY;AACzC,OAAO,YAAY;AA6BnB,IAAM,WAAW;AACjB,IAAM,WAAW;AAQjB,SAAS,kBAAkB,UAAkB,YAAmC;AAC9E,QAAM,MAAM,SAAS,YAAY,QAAQ;AACzC,QAAM,SAAS,IAAI,MAAM,GAAG,EAAE,CAAC;AAE/B,QAAM,YAAoC;AAAA,IACxC,OAAO;AAAA,IACP,WAAW;AAAA,IACX,QAAQ;AAAA,IACR,WAAW;AAAA,IACX,WAAW;AAAA,IACX,OAAO;AAAA,IACP,QAAQ;AAAA,EACV;AAEA,SAAO,UAAU,MAAM,KAAK;AAC9B;AAMA,SAAS,SAAS,UAA0B;AAC1C,SAAO,SAAS,UAAU,KAAK,EAAE,QAAQ,gBAAgB,GAAG,EAAE,YAAY;AAC5E;AAKA,SAAS,WAAW,SAAgC;AAElD,QAAM,eAAe,QAAQ,MAAM,aAAa;AAChD,MAAI,cAAc;AAChB,UAAM,OAAO,aAAa,CAAC,EAAE,KAAK;AAClC,WAAO,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,GAAG,IAAI,QAAQ;AAAA,EAC1D;AAGA,QAAM,QAAQ,QAAQ,MAAM,IAAI;AAChC,aAAW,QAAQ,OAAO;AACxB,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,QAAQ,SAAS,KAAK,CAAC,QAAQ,WAAW,MAAM,KAAK,CAAC,QAAQ,WAAW,KAAK,GAAG;AACnF,aAAO,QAAQ,SAAS,MAAM,QAAQ,MAAM,GAAG,GAAG,IAAI,QAAQ;AAAA,IAChE;AAAA,EACF;AAEA,SAAO;AACT;AAKA,SAAS,WAAW,SAAgC;AAClD,QAAM,aAAa,QAAQ,MAAM,QAAQ,EAAE,OAAO,CAAC,MAAM;AACvD,UAAM,UAAU,EAAE,KAAK;AACvB,WACE,QAAQ,SAAS,KACjB,CAAC,QAAQ,WAAW,MAAM,KAC1B,CAAC,QAAQ,WAAW,GAAG,KACvB,CAAC,QAAQ,WAAW,KAAK;AAAA,EAE7B,CAAC;AAED,MAAI,WAAW,WAAW,EAAG,QAAO;AAEpC,QAAM,OAAO,WAAW,CAAC,EAAE,QAAQ,OAAO,GAAG,EAAE,KAAK;AACpD,SAAO,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,GAAG,IAAI,QAAQ;AAC1D;AAaO,SAAS,gBACd,UACA,SACmB;AACnB,QAAM,SAA4B;AAAA,IAChC,MAAM;AAAA,IACN,UAAU;AAAA,IACV,OAAO,CAAC;AAAA,IACR,QAAQ,CAAC;AAAA,EACX;AAEA,MAAI,CAAC,WAAW,QAAQ,KAAK,CAAC,SAAS,SAAS,KAAK,GAAG;AACtD,WAAO;AAAA,EACT;AAGA,QAAM,WAAW,SAAS,QAAQ;AAClC,MAAI,SAAS,WAAW,GAAG,KAAK,aAAa,aAAa,aAAa,eAAe,aAAa,YAAY;AAC7G,WAAO;AAAA,EACT;AAEA,MAAI;AACJ,MAAI;AACF,UAAM,aAAa,UAAU,OAAO;AAAA,EACtC,SAAS,KAAK;AACZ,WAAO,OAAO,KAAK,mBAAmB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AACxF,WAAO;AAAA,EACT;AAGA,MAAI,IAAI,KAAK,EAAE,WAAW,GAAG;AAC3B,WAAO;AAAA,EACT;AAEA,QAAM,sBAAsB,QAAQ,wBAAwB;AAC5D,QAAM,oBAAoB,QAAQ,sBAAsB;AAExD,MAAI;AACJ,MAAI;AACF,aAAS,OAAO,GAAG;AAAA,EACrB,QAAQ;AAEN,QAAI,qBAAqB;AACvB,YAAM,KAAK,SAAS,QAAQ;AAC5B,YAAM,OAAO,kBAAkB,UAAU,QAAQ,UAAU;AAC3D,YAAM,OAAO,OAAO,CAAC,IAAI,IAAI,CAAC;AAC9B,YAAMA,QAAO;AAAA,QACX;AAAA,QACA,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AAAA,QAC9C,QAAQ;AAAA,QACR,QAAQ;AAAA,QACR;AAAA,MACF;AACA,YAAM,aAAa,OAAO,UAAU,KAAKA,KAAI;AAC7C,oBAAc,UAAU,YAAY,OAAO;AAC3C,aAAO,WAAW;AAClB,aAAO,MAAM,KAAK,0BAA0B,EAAE,GAAG;AAAA,IACnD;AACA,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,EAAE,GAAG,OAAO,KAAK;AAC9B,MAAI,UAAU,OAAO;AACrB,MAAI,WAAW;AAGf,MAAI,qBAAqB;AAEvB,QAAI,CAAC,KAAK,IAAI;AACZ,WAAK,KAAK,SAAS,QAAQ;AAC3B,aAAO,MAAM,KAAK,cAAc,KAAK,EAAE,GAAG;AAC1C,iBAAW;AAAA,IACb;AAGA,QAAI,CAAC,KAAK,SAAS;AACjB,WAAK,WAAU,oBAAI,KAAK,GAAE,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC;AACpD,aAAO,MAAM,KAAK,oBAAoB;AACtC,iBAAW;AAAA,IACb;AAGA,QAAI,CAAC,KAAK,QAAQ;AAChB,WAAK,SAAS;AACd,aAAO,MAAM,KAAK,uBAAuB;AACzC,iBAAW;AAAA,IACb;AAGA,QAAI,CAAC,KAAK,QAAQ;AAChB,WAAK,SAAS;AACd,aAAO,MAAM,KAAK,wBAAwB;AAC1C,iBAAW;AAAA,IACb;AAGA,QAAI,CAAC,MAAM,QAAQ,KAAK,IAAI,KAAK,KAAK,KAAK,WAAW,GAAG;AACvD,YAAM,OAAO,kBAAkB,UAAU,QAAQ,UAAU;AAC3D,WAAK,OAAO,OAAO,CAAC,IAAI,IAAI,CAAC;AAC7B,UAAI,MAAM;AACR,eAAO,MAAM,KAAK,eAAe,IAAI,GAAG;AACxC,mBAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAGA,MAAI,mBAAmB;AAErB,QAAI,CAAC,SAAS,KAAK,OAAO,GAAG;AAC3B,YAAM,KAAK,WAAW,OAAO;AAC7B,UAAI,IAAI;AACN,kBAAU,YAAY,EAAE;AAAA,EAAS,OAAO;AACxC,eAAO,MAAM,KAAK,sBAAsB;AACxC,mBAAW;AAAA,MACb;AAAA,IACF;AAGA,QAAI,CAAC,SAAS,KAAK,OAAO,GAAG;AAC3B,YAAM,KAAK,WAAW,OAAO;AAC7B,UAAI,IAAI;AACN,cAAM,QAAQ,QAAQ,QAAQ,KAAK;AACnC,YAAI,UAAU,IAAI;AAChB,gBAAM,YAAY,QAAQ;AAC1B,oBAAU,QAAQ,MAAM,GAAG,SAAS,IAAI;AAAA,WAAc,EAAE,SAAS,QAAQ,MAAM,SAAS;AAAA,QAC1F,OAAO;AACL,oBAAU,YAAY,EAAE;AAAA,EAAS,OAAO;AAAA,QAC1C;AACA,eAAO,MAAM,KAAK,sBAAsB;AACxC,mBAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAGA,MAAI,UAAU;AACZ,QAAI;AACF,YAAM,SAAS,OAAO,UAAU,SAAS,IAAI;AAC7C,oBAAc,UAAU,QAAQ,OAAO;AACvC,aAAO,WAAW;AAAA,IACpB,SAAS,KAAK;AACZ,aAAO,OAAO,KAAK,oBAAoB,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IAC3F;AAAA,EACF;AAEA,SAAO;AACT;AAMO,SAAS,eACd,YACA,SACqB;AACrB,QAAM,UAA+B,CAAC;AACtC,QAAM,OAAO,iBAAiB;AAE9B,aAAW,OAAO,MAAM;AACtB,UAAM,UAAU,KAAK,YAAY,GAAG;AACpC,QAAI,CAAC,WAAW,OAAO,EAAG;AAE1B,UAAM,QAAQ,YAAY,OAAO,EAAE,OAAO,CAAC,MAAc,EAAE,SAAS,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,CAAC;AAChG,eAAW,QAAQ,OAAO;AACxB,YAAM,WAAW,KAAK,SAAS,IAAI;AACnC,YAAM,SAAS,gBAAgB,UAAU;AAAA,QACvC;AAAA,QACA,qBAAqB,SAAS;AAAA,QAC9B,mBAAmB,SAAS;AAAA,MAC9B,CAAC;AACD,UAAI,OAAO,YAAY,OAAO,OAAO,SAAS,GAAG;AAC/C,gBAAQ,KAAK,MAAM;AAAA,MACrB;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;","names":["data"]}
@@ -0,0 +1,374 @@
1
+ #!/usr/bin/env node
2
+
3
+ import {
4
+ buildSystemPrompt
5
+ } from "./chunk-UWQTZMNI.js";
6
+ import {
7
+ createSessionId,
8
+ writeSession
9
+ } from "./chunk-DTTXPHFW.js";
10
+ import {
11
+ withFileLockSync
12
+ } from "./chunk-Z2PUCXTZ.js";
13
+ import {
14
+ estimateTokens
15
+ } from "./chunk-UPLBF4RZ.js";
16
+ import {
17
+ log
18
+ } from "./chunk-BSKDOFRT.js";
19
+ import {
20
+ generateWithMessages,
21
+ getModel,
22
+ streamWithMessages
23
+ } from "./chunk-IZ6UZ3ZL.js";
24
+ import {
25
+ loadConfig
26
+ } from "./chunk-CHJ5GNZC.js";
27
+
28
+ // src/runtime/conversation.ts
29
+ import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
30
+ import { join } from "path";
31
+ var CONVERSATION_BUDGET_RATIO = 0.5;
32
+ var MIN_MESSAGES = 2;
33
+ var MAX_MESSAGES = 100;
34
+ var Conversation = class {
35
+ messages = [];
36
+ harnessDir;
37
+ apiKey;
38
+ systemPrompt = "";
39
+ systemPromptTokens = 0;
40
+ maxContextTokens = 2e5;
41
+ modelOverride;
42
+ providerOverride;
43
+ recordSessions = true;
44
+ tools;
45
+ maxToolSteps;
46
+ constructor(harnessDir, apiKey, options) {
47
+ this.harnessDir = harnessDir;
48
+ this.apiKey = apiKey;
49
+ this.tools = options?.tools ?? {};
50
+ this.maxToolSteps = options?.maxToolSteps ?? 5;
51
+ if (options?.recordSessions !== void 0) {
52
+ this.recordSessions = options.recordSessions;
53
+ }
54
+ }
55
+ /** Update the tool set (e.g., after MCP servers connect) */
56
+ setTools(tools) {
57
+ this.tools = tools;
58
+ }
59
+ setModelOverride(modelId) {
60
+ if (!modelId || !modelId.trim()) {
61
+ throw new Error("modelId cannot be empty");
62
+ }
63
+ this.modelOverride = modelId.trim();
64
+ }
65
+ setProviderOverride(provider) {
66
+ if (!provider || !provider.trim()) {
67
+ throw new Error("provider cannot be empty");
68
+ }
69
+ this.providerOverride = provider.trim();
70
+ }
71
+ async init() {
72
+ const config = this.getConfig();
73
+ const ctx = buildSystemPrompt(this.harnessDir, config);
74
+ this.systemPrompt = ctx.systemPrompt;
75
+ this.systemPromptTokens = ctx.budget.used_tokens;
76
+ this.maxContextTokens = config.model.max_tokens;
77
+ const jsonlPath = join(this.harnessDir, "memory", "context.jsonl");
78
+ const legacyPath = join(this.harnessDir, "memory", "context.md");
79
+ if (existsSync(jsonlPath)) {
80
+ const raw = readFileSync(jsonlPath, "utf-8");
81
+ this.messages = parseJsonlContext(raw);
82
+ } else if (existsSync(legacyPath)) {
83
+ const raw = readFileSync(legacyPath, "utf-8");
84
+ this.messages = parseLegacyContext(raw);
85
+ this.save();
86
+ }
87
+ }
88
+ getConfig() {
89
+ const config = loadConfig(this.harnessDir);
90
+ if (this.modelOverride || this.providerOverride) {
91
+ return {
92
+ ...config,
93
+ model: {
94
+ ...config.model,
95
+ ...this.modelOverride ? { id: this.modelOverride } : {},
96
+ ...this.providerOverride ? { provider: this.providerOverride } : {}
97
+ }
98
+ };
99
+ }
100
+ return config;
101
+ }
102
+ /**
103
+ * Token budget available for conversation messages.
104
+ * Allocates CONVERSATION_BUDGET_RATIO of (max_tokens - system_prompt) to messages.
105
+ */
106
+ getMessageBudget() {
107
+ const available = this.maxContextTokens - this.systemPromptTokens;
108
+ return Math.floor(available * CONVERSATION_BUDGET_RATIO);
109
+ }
110
+ /**
111
+ * Trim oldest messages until token budget is satisfied.
112
+ * Always retains at least MIN_MESSAGES.
113
+ */
114
+ trimToTokenBudget() {
115
+ const budget = this.getMessageBudget();
116
+ while (this.messages.length > MAX_MESSAGES) {
117
+ this.messages.shift();
118
+ }
119
+ let totalTokens = this.messages.reduce((sum, m) => sum + m.tokens, 0);
120
+ while (totalTokens > budget && this.messages.length > MIN_MESSAGES) {
121
+ const removed = this.messages.shift();
122
+ if (removed) {
123
+ totalTokens -= removed.tokens;
124
+ }
125
+ }
126
+ }
127
+ toModelMessages() {
128
+ return this.messages.map((m) => ({
129
+ role: m.role,
130
+ content: m.content
131
+ }));
132
+ }
133
+ async send(userMessage) {
134
+ if (!userMessage || !userMessage.trim()) {
135
+ throw new Error("Message cannot be empty");
136
+ }
137
+ this.messages.push({
138
+ role: "user",
139
+ content: userMessage,
140
+ tokens: estimateTokens(userMessage)
141
+ });
142
+ this.trimToTokenBudget();
143
+ const config = this.getConfig();
144
+ const model = getModel(config, this.apiKey);
145
+ const started = (/* @__PURE__ */ new Date()).toISOString();
146
+ const hasTools = Object.keys(this.tools).length > 0;
147
+ const result = await generateWithMessages({
148
+ model,
149
+ system: this.systemPrompt,
150
+ messages: this.toModelMessages(),
151
+ ...hasTools ? { tools: this.tools, maxToolSteps: this.maxToolSteps } : {}
152
+ });
153
+ this.messages.push({
154
+ role: "assistant",
155
+ content: result.text,
156
+ tokens: estimateTokens(result.text)
157
+ });
158
+ try {
159
+ this.save();
160
+ } catch (err) {
161
+ log.warn(`Failed to save conversation context: ${err instanceof Error ? err.message : String(err)}`);
162
+ }
163
+ if (this.recordSessions) {
164
+ try {
165
+ this.writeSessionRecord(
166
+ config,
167
+ userMessage,
168
+ result.text,
169
+ result.usage.totalTokens,
170
+ started,
171
+ result.steps,
172
+ result.toolCalls
173
+ );
174
+ } catch (err) {
175
+ log.warn(`Failed to record chat session: ${err instanceof Error ? err.message : String(err)}`);
176
+ }
177
+ }
178
+ return {
179
+ text: result.text,
180
+ usage: result.usage,
181
+ steps: result.steps,
182
+ toolCalls: result.toolCalls
183
+ };
184
+ }
185
+ sendStream(userMessage) {
186
+ if (!userMessage || !userMessage.trim()) {
187
+ throw new Error("Message cannot be empty");
188
+ }
189
+ this.messages.push({
190
+ role: "user",
191
+ content: userMessage,
192
+ tokens: estimateTokens(userMessage)
193
+ });
194
+ this.trimToTokenBudget();
195
+ const config = this.getConfig();
196
+ const model = getModel(config, this.apiKey);
197
+ const started = (/* @__PURE__ */ new Date()).toISOString();
198
+ const hasTools = Object.keys(this.tools).length > 0;
199
+ const streamResult = streamWithMessages({
200
+ model,
201
+ system: this.systemPrompt,
202
+ messages: this.toModelMessages(),
203
+ ...hasTools ? { tools: this.tools, maxToolSteps: this.maxToolSteps } : {}
204
+ });
205
+ let resolveResult;
206
+ const resultPromise = new Promise((res) => {
207
+ resolveResult = res;
208
+ });
209
+ const self = this;
210
+ async function* generateStream() {
211
+ let fullResponse = "";
212
+ for await (const chunk of streamResult.textStream) {
213
+ fullResponse += chunk;
214
+ yield chunk;
215
+ }
216
+ self.messages.push({
217
+ role: "assistant",
218
+ content: fullResponse,
219
+ tokens: estimateTokens(fullResponse)
220
+ });
221
+ try {
222
+ self.save();
223
+ } catch (err) {
224
+ log.warn(`Failed to save conversation context: ${err instanceof Error ? err.message : String(err)}`);
225
+ }
226
+ let usageResult = { totalTokens: 0 };
227
+ let stepsResult = 1;
228
+ let toolCallsResult = [];
229
+ try {
230
+ [usageResult, stepsResult, toolCallsResult] = await Promise.all([
231
+ streamResult.usage,
232
+ streamResult.steps,
233
+ streamResult.toolCalls
234
+ ]);
235
+ } catch (err) {
236
+ log.warn(`Failed to resolve stream metadata: ${err instanceof Error ? err.message : String(err)}`);
237
+ }
238
+ if (self.recordSessions) {
239
+ try {
240
+ self.writeSessionRecord(
241
+ config,
242
+ userMessage,
243
+ fullResponse,
244
+ usageResult.totalTokens,
245
+ started,
246
+ stepsResult,
247
+ toolCallsResult
248
+ );
249
+ } catch (err) {
250
+ log.warn(`Failed to record chat session: ${err instanceof Error ? err.message : String(err)}`);
251
+ }
252
+ }
253
+ resolveResult({
254
+ text: fullResponse,
255
+ usage: usageResult,
256
+ steps: stepsResult,
257
+ toolCalls: toolCallsResult
258
+ });
259
+ }
260
+ return {
261
+ textStream: generateStream(),
262
+ result: resultPromise
263
+ };
264
+ }
265
+ save() {
266
+ const memoryDir = join(this.harnessDir, "memory");
267
+ if (!existsSync(memoryDir)) {
268
+ mkdirSync(memoryDir, { recursive: true });
269
+ }
270
+ const jsonlPath = join(memoryDir, "context.jsonl");
271
+ const lines = this.messages.map(
272
+ (m) => JSON.stringify({ role: m.role, content: m.content })
273
+ );
274
+ withFileLockSync(this.harnessDir, "context.jsonl", () => {
275
+ writeFileSync(jsonlPath, lines.join("\n"), "utf-8");
276
+ });
277
+ }
278
+ clear() {
279
+ this.messages = [];
280
+ const jsonlPath = join(this.harnessDir, "memory", "context.jsonl");
281
+ const legacyPath = join(this.harnessDir, "memory", "context.md");
282
+ if (existsSync(jsonlPath)) {
283
+ writeFileSync(jsonlPath, "", "utf-8");
284
+ }
285
+ if (existsSync(legacyPath)) {
286
+ writeFileSync(legacyPath, "", "utf-8");
287
+ }
288
+ }
289
+ writeSessionRecord(config, prompt, response, totalTokens, started, steps, toolCalls) {
290
+ const sessionId = createSessionId();
291
+ const ended = (/* @__PURE__ */ new Date()).toISOString();
292
+ const session = {
293
+ id: sessionId,
294
+ started,
295
+ ended,
296
+ prompt: prompt.slice(0, 500),
297
+ summary: response.slice(0, 200),
298
+ tokens_used: totalTokens,
299
+ model_id: config.model.id,
300
+ steps: steps ?? 1,
301
+ tool_calls: toolCalls && toolCalls.length > 0 ? toolCalls : void 0
302
+ };
303
+ writeSession(this.harnessDir, session);
304
+ }
305
+ getHistory() {
306
+ return this.messages.map((m) => ({ role: m.role, content: m.content }));
307
+ }
308
+ /** Token usage stats for the conversation window */
309
+ getTokenStats() {
310
+ const messageTokens = this.messages.reduce((sum, m) => sum + m.tokens, 0);
311
+ return {
312
+ messageTokens,
313
+ budget: this.getMessageBudget(),
314
+ messageCount: this.messages.length
315
+ };
316
+ }
317
+ };
318
+ function parseJsonlContext(raw) {
319
+ if (!raw.trim()) return [];
320
+ const messages = [];
321
+ for (const line of raw.split("\n")) {
322
+ const trimmed = line.trim();
323
+ if (!trimmed) continue;
324
+ try {
325
+ const parsed = JSON.parse(trimmed);
326
+ if (parsed.role && parsed.content) {
327
+ messages.push({
328
+ role: parsed.role,
329
+ content: parsed.content,
330
+ tokens: estimateTokens(parsed.content)
331
+ });
332
+ }
333
+ } catch {
334
+ }
335
+ }
336
+ return messages;
337
+ }
338
+ function parseLegacyContext(raw) {
339
+ const messages = [];
340
+ const lines = raw.split("\n");
341
+ let currentRole = null;
342
+ let currentContent = [];
343
+ for (const line of lines) {
344
+ if (line.startsWith("### User")) {
345
+ if (currentRole && currentContent.length > 0) {
346
+ const content = currentContent.join("\n").trim();
347
+ messages.push({ role: currentRole, content, tokens: estimateTokens(content) });
348
+ }
349
+ currentRole = "user";
350
+ currentContent = [];
351
+ } else if (line.startsWith("### Assistant")) {
352
+ if (currentRole && currentContent.length > 0) {
353
+ const content = currentContent.join("\n").trim();
354
+ messages.push({ role: currentRole, content, tokens: estimateTokens(content) });
355
+ }
356
+ currentRole = "assistant";
357
+ currentContent = [];
358
+ } else if (currentRole) {
359
+ currentContent.push(line);
360
+ }
361
+ }
362
+ if (currentRole && currentContent.length > 0) {
363
+ const content = currentContent.join("\n").trim();
364
+ messages.push({ role: currentRole, content, tokens: estimateTokens(content) });
365
+ }
366
+ return messages;
367
+ }
368
+
369
+ export {
370
+ Conversation,
371
+ parseJsonlContext,
372
+ parseLegacyContext
373
+ };
374
+ //# sourceMappingURL=chunk-MPZ3BPUI.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/conversation.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { getModel, generateWithMessages, streamWithMessages } from '../llm/provider.js';\nimport { loadConfig } from '../core/config.js';\nimport { log } from '../core/logger.js';\nimport { buildSystemPrompt } from './context-loader.js';\nimport { estimateTokens } from '../primitives/loader.js';\nimport { createSessionId, writeSession, type SessionRecord } from './sessions.js';\nimport { withFileLockSync } from './file-lock.js';\nimport type { AIToolSet } from './tool-executor.js';\nimport type { ModelMessage } from '@ai-sdk/provider-utils';\nimport type { HarnessConfig, ToolCallInfo } from '../core/types.js';\n\ninterface Message {\n role: 'user' | 'assistant';\n content: string;\n tokens: number;\n}\n\n/** Persisted context format — one JSON object per line */\ninterface PersistedMessage {\n role: 'user' | 'assistant';\n content: string;\n}\n\n// Reserve 50% of remaining context (after system prompt) for conversation history\nconst CONVERSATION_BUDGET_RATIO = 0.50;\n// Minimum messages to always keep (latest exchange)\nconst MIN_MESSAGES = 2;\n// Hard cap on message count regardless of tokens\nconst MAX_MESSAGES = 100;\n\nexport interface ConversationOptions {\n recordSessions?: boolean;\n /** AI SDK tools available during conversation */\n tools?: AIToolSet;\n /** Maximum tool-use roundtrips per message (default: 5) */\n maxToolSteps?: number;\n}\n\nexport interface ConversationSendResult {\n text: string;\n usage: { totalTokens: number };\n steps: number;\n toolCalls: ToolCallInfo[];\n}\n\nexport interface ConversationStreamResult {\n /** Async iterable of text chunks — consume with for-await */\n textStream: AsyncIterable<string>;\n /** Resolves after the stream is fully consumed with turn metadata */\n result: Promise<{ text: string; usage: { totalTokens: number }; steps: number; toolCalls: ToolCallInfo[] }>;\n}\n\nexport class Conversation {\n private messages: Message[] = [];\n private harnessDir: string;\n private apiKey?: string;\n private systemPrompt: string = '';\n private systemPromptTokens: number = 0;\n private maxContextTokens: number = 200000;\n private modelOverride?: string;\n private providerOverride?: string;\n private recordSessions: boolean = true;\n private tools: AIToolSet;\n private maxToolSteps: number;\n\n constructor(harnessDir: string, apiKey?: string, options?: ConversationOptions) {\n this.harnessDir = harnessDir;\n this.apiKey = apiKey;\n this.tools = options?.tools ?? {};\n this.maxToolSteps = options?.maxToolSteps ?? 5;\n if (options?.recordSessions !== undefined) {\n this.recordSessions = options.recordSessions;\n }\n }\n\n /** Update the tool set (e.g., after MCP servers connect) */\n setTools(tools: AIToolSet): void {\n this.tools = tools;\n }\n\n setModelOverride(modelId: string): void {\n if (!modelId || !modelId.trim()) {\n throw new Error('modelId cannot be empty');\n }\n this.modelOverride = modelId.trim();\n }\n\n setProviderOverride(provider: string): void {\n if (!provider || !provider.trim()) {\n throw new Error('provider cannot be empty');\n }\n this.providerOverride = provider.trim();\n }\n\n async init(): Promise<void> {\n const config = this.getConfig();\n const ctx = buildSystemPrompt(this.harnessDir, config);\n this.systemPrompt = ctx.systemPrompt;\n this.systemPromptTokens = ctx.budget.used_tokens;\n this.maxContextTokens = config.model.max_tokens;\n\n // Load persisted context — try JSON-lines first, fall back to legacy markdown\n const jsonlPath = join(this.harnessDir, 'memory', 'context.jsonl');\n const legacyPath = join(this.harnessDir, 'memory', 'context.md');\n\n if (existsSync(jsonlPath)) {\n const raw = readFileSync(jsonlPath, 'utf-8');\n this.messages = parseJsonlContext(raw);\n } else if (existsSync(legacyPath)) {\n const raw = readFileSync(legacyPath, 'utf-8');\n this.messages = parseLegacyContext(raw);\n // Migrate: save in new format immediately\n this.save();\n }\n }\n\n private getConfig(): HarnessConfig {\n const config = loadConfig(this.harnessDir);\n if (this.modelOverride || this.providerOverride) {\n return {\n ...config,\n model: {\n ...config.model,\n ...(this.modelOverride ? { id: this.modelOverride } : {}),\n ...(this.providerOverride ? { provider: this.providerOverride } : {}),\n },\n };\n }\n return config;\n }\n\n /**\n * Token budget available for conversation messages.\n * Allocates CONVERSATION_BUDGET_RATIO of (max_tokens - system_prompt) to messages.\n */\n private getMessageBudget(): number {\n const available = this.maxContextTokens - this.systemPromptTokens;\n return Math.floor(available * CONVERSATION_BUDGET_RATIO);\n }\n\n /**\n * Trim oldest messages until token budget is satisfied.\n * Always retains at least MIN_MESSAGES.\n */\n private trimToTokenBudget(): void {\n const budget = this.getMessageBudget();\n\n // Hard cap on count\n while (this.messages.length > MAX_MESSAGES) {\n this.messages.shift();\n }\n\n // Trim by token budget — drop oldest messages first\n let totalTokens = this.messages.reduce((sum, m) => sum + m.tokens, 0);\n while (totalTokens > budget && this.messages.length > MIN_MESSAGES) {\n const removed = this.messages.shift();\n if (removed) {\n totalTokens -= removed.tokens;\n }\n }\n }\n\n private toModelMessages(): ModelMessage[] {\n return this.messages.map((m): ModelMessage => ({\n role: m.role,\n content: m.content,\n }));\n }\n\n async send(userMessage: string): Promise<ConversationSendResult> {\n if (!userMessage || !userMessage.trim()) {\n throw new Error('Message cannot be empty');\n }\n\n this.messages.push({\n role: 'user',\n content: userMessage,\n tokens: estimateTokens(userMessage),\n });\n\n this.trimToTokenBudget();\n\n const config = this.getConfig();\n const model = getModel(config, this.apiKey);\n\n const started = new Date().toISOString();\n const hasTools = Object.keys(this.tools).length > 0;\n\n const result = await generateWithMessages({\n model,\n system: this.systemPrompt,\n messages: this.toModelMessages(),\n ...(hasTools ? { tools: this.tools, maxToolSteps: this.maxToolSteps } : {}),\n });\n\n this.messages.push({\n role: 'assistant',\n content: result.text,\n tokens: estimateTokens(result.text),\n });\n\n try {\n this.save();\n } catch (err) {\n log.warn(`Failed to save conversation context: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n // Record session for this chat turn\n if (this.recordSessions) {\n try {\n this.writeSessionRecord(\n config, userMessage, result.text,\n result.usage.totalTokens, started,\n result.steps, result.toolCalls,\n );\n } catch (err) {\n log.warn(`Failed to record chat session: ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n return {\n text: result.text,\n usage: result.usage,\n steps: result.steps,\n toolCalls: result.toolCalls,\n };\n }\n\n sendStream(userMessage: string): ConversationStreamResult {\n if (!userMessage || !userMessage.trim()) {\n throw new Error('Message cannot be empty');\n }\n\n this.messages.push({\n role: 'user',\n content: userMessage,\n tokens: estimateTokens(userMessage),\n });\n\n this.trimToTokenBudget();\n\n const config = this.getConfig();\n const model = getModel(config, this.apiKey);\n const started = new Date().toISOString();\n const hasTools = Object.keys(this.tools).length > 0;\n\n const streamResult = streamWithMessages({\n model,\n system: this.systemPrompt,\n messages: this.toModelMessages(),\n ...(hasTools ? { tools: this.tools, maxToolSteps: this.maxToolSteps } : {}),\n });\n\n // Deferred result — resolves after stream is fully consumed\n let resolveResult: (r: ConversationStreamResult['result'] extends Promise<infer T> ? T : never) => void;\n const resultPromise = new Promise<ConversationStreamResult['result'] extends Promise<infer T> ? T : never>((res) => {\n resolveResult = res;\n });\n\n const self = this;\n\n async function* generateStream(): AsyncIterable<string> {\n let fullResponse = '';\n\n for await (const chunk of streamResult.textStream) {\n fullResponse += chunk;\n yield chunk;\n }\n\n self.messages.push({\n role: 'assistant',\n content: fullResponse,\n tokens: estimateTokens(fullResponse),\n });\n\n try {\n self.save();\n } catch (err) {\n log.warn(`Failed to save conversation context: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n // Resolve post-stream metadata\n let usageResult = { totalTokens: 0 };\n let stepsResult = 1;\n let toolCallsResult: ToolCallInfo[] = [];\n try {\n [usageResult, stepsResult, toolCallsResult] = await Promise.all([\n streamResult.usage,\n streamResult.steps,\n streamResult.toolCalls,\n ]);\n } catch (err) {\n log.warn(`Failed to resolve stream metadata: ${err instanceof Error ? err.message : String(err)}`);\n }\n\n // Record session for this chat turn\n if (self.recordSessions) {\n try {\n self.writeSessionRecord(\n config, userMessage, fullResponse,\n usageResult.totalTokens, started,\n stepsResult, toolCallsResult,\n );\n } catch (err) {\n log.warn(`Failed to record chat session: ${err instanceof Error ? err.message : String(err)}`);\n }\n }\n\n resolveResult({\n text: fullResponse,\n usage: usageResult,\n steps: stepsResult,\n toolCalls: toolCallsResult,\n });\n }\n\n return {\n textStream: generateStream(),\n result: resultPromise,\n };\n }\n\n save(): void {\n const memoryDir = join(this.harnessDir, 'memory');\n if (!existsSync(memoryDir)) {\n mkdirSync(memoryDir, { recursive: true });\n }\n\n // Write JSON-lines format — one JSON object per line, with file lock\n const jsonlPath = join(memoryDir, 'context.jsonl');\n const lines = this.messages.map((m): string =>\n JSON.stringify({ role: m.role, content: m.content } satisfies PersistedMessage)\n );\n withFileLockSync(this.harnessDir, 'context.jsonl', () => {\n writeFileSync(jsonlPath, lines.join('\\n'), 'utf-8');\n });\n }\n\n clear(): void {\n this.messages = [];\n const jsonlPath = join(this.harnessDir, 'memory', 'context.jsonl');\n const legacyPath = join(this.harnessDir, 'memory', 'context.md');\n if (existsSync(jsonlPath)) {\n writeFileSync(jsonlPath, '', 'utf-8');\n }\n if (existsSync(legacyPath)) {\n writeFileSync(legacyPath, '', 'utf-8');\n }\n }\n\n private writeSessionRecord(\n config: HarnessConfig,\n prompt: string,\n response: string,\n totalTokens: number,\n started: string,\n steps?: number,\n toolCalls?: ToolCallInfo[],\n ): void {\n const sessionId = createSessionId();\n const ended = new Date().toISOString();\n\n const session: SessionRecord = {\n id: sessionId,\n started,\n ended,\n prompt: prompt.slice(0, 500),\n summary: response.slice(0, 200),\n tokens_used: totalTokens,\n model_id: config.model.id,\n steps: steps ?? 1,\n tool_calls: toolCalls && toolCalls.length > 0 ? toolCalls : undefined,\n };\n\n writeSession(this.harnessDir, session);\n }\n\n getHistory(): Array<{ role: string; content: string }> {\n return this.messages.map((m) => ({ role: m.role, content: m.content }));\n }\n\n /** Token usage stats for the conversation window */\n getTokenStats(): { messageTokens: number; budget: number; messageCount: number } {\n const messageTokens = this.messages.reduce((sum, m) => sum + m.tokens, 0);\n return {\n messageTokens,\n budget: this.getMessageBudget(),\n messageCount: this.messages.length,\n };\n }\n}\n\n/**\n * Parse JSON-lines context format.\n * Each line is a JSON object: { role, content }\n */\nfunction parseJsonlContext(raw: string): Message[] {\n if (!raw.trim()) return [];\n\n const messages: Message[] = [];\n for (const line of raw.split('\\n')) {\n const trimmed = line.trim();\n if (!trimmed) continue;\n try {\n const parsed = JSON.parse(trimmed) as PersistedMessage;\n if (parsed.role && parsed.content) {\n messages.push({\n role: parsed.role,\n content: parsed.content,\n tokens: estimateTokens(parsed.content),\n });\n }\n } catch {\n // Skip malformed lines\n }\n }\n return messages;\n}\n\n/**\n * Parse legacy context.md format (### User / ### Assistant sections).\n * Kept for backward compatibility — auto-migrates on first load.\n */\nfunction parseLegacyContext(raw: string): Message[] {\n const messages: Message[] = [];\n const lines = raw.split('\\n');\n let currentRole: 'user' | 'assistant' | null = null;\n let currentContent: string[] = [];\n\n for (const line of lines) {\n if (line.startsWith('### User')) {\n if (currentRole && currentContent.length > 0) {\n const content = currentContent.join('\\n').trim();\n messages.push({ role: currentRole, content, tokens: estimateTokens(content) });\n }\n currentRole = 'user';\n currentContent = [];\n } else if (line.startsWith('### Assistant')) {\n if (currentRole && currentContent.length > 0) {\n const content = currentContent.join('\\n').trim();\n messages.push({ role: currentRole, content, tokens: estimateTokens(content) });\n }\n currentRole = 'assistant';\n currentContent = [];\n } else if (currentRole) {\n currentContent.push(line);\n }\n }\n if (currentRole && currentContent.length > 0) {\n const content = currentContent.join('\\n').trim();\n messages.push({ role: currentRole, content, tokens: estimateTokens(content) });\n }\n\n return messages;\n}\n\n// Export parsers for testing\nexport { parseJsonlContext, parseLegacyContext };\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,cAAc,eAAe,YAAY,iBAAiB;AACnE,SAAS,YAAY;AAyBrB,IAAM,4BAA4B;AAElC,IAAM,eAAe;AAErB,IAAM,eAAe;AAwBd,IAAM,eAAN,MAAmB;AAAA,EAChB,WAAsB,CAAC;AAAA,EACvB;AAAA,EACA;AAAA,EACA,eAAuB;AAAA,EACvB,qBAA6B;AAAA,EAC7B,mBAA2B;AAAA,EAC3B;AAAA,EACA;AAAA,EACA,iBAA0B;AAAA,EAC1B;AAAA,EACA;AAAA,EAER,YAAY,YAAoB,QAAiB,SAA+B;AAC9E,SAAK,aAAa;AAClB,SAAK,SAAS;AACd,SAAK,QAAQ,SAAS,SAAS,CAAC;AAChC,SAAK,eAAe,SAAS,gBAAgB;AAC7C,QAAI,SAAS,mBAAmB,QAAW;AACzC,WAAK,iBAAiB,QAAQ;AAAA,IAChC;AAAA,EACF;AAAA;AAAA,EAGA,SAAS,OAAwB;AAC/B,SAAK,QAAQ;AAAA,EACf;AAAA,EAEA,iBAAiB,SAAuB;AACtC,QAAI,CAAC,WAAW,CAAC,QAAQ,KAAK,GAAG;AAC/B,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AACA,SAAK,gBAAgB,QAAQ,KAAK;AAAA,EACpC;AAAA,EAEA,oBAAoB,UAAwB;AAC1C,QAAI,CAAC,YAAY,CAAC,SAAS,KAAK,GAAG;AACjC,YAAM,IAAI,MAAM,0BAA0B;AAAA,IAC5C;AACA,SAAK,mBAAmB,SAAS,KAAK;AAAA,EACxC;AAAA,EAEA,MAAM,OAAsB;AAC1B,UAAM,SAAS,KAAK,UAAU;AAC9B,UAAM,MAAM,kBAAkB,KAAK,YAAY,MAAM;AACrD,SAAK,eAAe,IAAI;AACxB,SAAK,qBAAqB,IAAI,OAAO;AACrC,SAAK,mBAAmB,OAAO,MAAM;AAGrC,UAAM,YAAY,KAAK,KAAK,YAAY,UAAU,eAAe;AACjE,UAAM,aAAa,KAAK,KAAK,YAAY,UAAU,YAAY;AAE/D,QAAI,WAAW,SAAS,GAAG;AACzB,YAAM,MAAM,aAAa,WAAW,OAAO;AAC3C,WAAK,WAAW,kBAAkB,GAAG;AAAA,IACvC,WAAW,WAAW,UAAU,GAAG;AACjC,YAAM,MAAM,aAAa,YAAY,OAAO;AAC5C,WAAK,WAAW,mBAAmB,GAAG;AAEtC,WAAK,KAAK;AAAA,IACZ;AAAA,EACF;AAAA,EAEQ,YAA2B;AACjC,UAAM,SAAS,WAAW,KAAK,UAAU;AACzC,QAAI,KAAK,iBAAiB,KAAK,kBAAkB;AAC/C,aAAO;AAAA,QACL,GAAG;AAAA,QACH,OAAO;AAAA,UACL,GAAG,OAAO;AAAA,UACV,GAAI,KAAK,gBAAgB,EAAE,IAAI,KAAK,cAAc,IAAI,CAAC;AAAA,UACvD,GAAI,KAAK,mBAAmB,EAAE,UAAU,KAAK,iBAAiB,IAAI,CAAC;AAAA,QACrE;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAA2B;AACjC,UAAM,YAAY,KAAK,mBAAmB,KAAK;AAC/C,WAAO,KAAK,MAAM,YAAY,yBAAyB;AAAA,EACzD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAA0B;AAChC,UAAM,SAAS,KAAK,iBAAiB;AAGrC,WAAO,KAAK,SAAS,SAAS,cAAc;AAC1C,WAAK,SAAS,MAAM;AAAA,IACtB;AAGA,QAAI,cAAc,KAAK,SAAS,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACpE,WAAO,cAAc,UAAU,KAAK,SAAS,SAAS,cAAc;AAClE,YAAM,UAAU,KAAK,SAAS,MAAM;AACpC,UAAI,SAAS;AACX,uBAAe,QAAQ;AAAA,MACzB;AAAA,IACF;AAAA,EACF;AAAA,EAEQ,kBAAkC;AACxC,WAAO,KAAK,SAAS,IAAI,CAAC,OAAqB;AAAA,MAC7C,MAAM,EAAE;AAAA,MACR,SAAS,EAAE;AAAA,IACb,EAAE;AAAA,EACJ;AAAA,EAEA,MAAM,KAAK,aAAsD;AAC/D,QAAI,CAAC,eAAe,CAAC,YAAY,KAAK,GAAG;AACvC,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AAEA,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ,eAAe,WAAW;AAAA,IACpC,CAAC;AAED,SAAK,kBAAkB;AAEvB,UAAM,SAAS,KAAK,UAAU;AAC9B,UAAM,QAAQ,SAAS,QAAQ,KAAK,MAAM;AAE1C,UAAM,WAAU,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,WAAW,OAAO,KAAK,KAAK,KAAK,EAAE,SAAS;AAElD,UAAM,SAAS,MAAM,qBAAqB;AAAA,MACxC;AAAA,MACA,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK,gBAAgB;AAAA,MAC/B,GAAI,WAAW,EAAE,OAAO,KAAK,OAAO,cAAc,KAAK,aAAa,IAAI,CAAC;AAAA,IAC3E,CAAC;AAED,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,SAAS,OAAO;AAAA,MAChB,QAAQ,eAAe,OAAO,IAAI;AAAA,IACpC,CAAC;AAED,QAAI;AACF,WAAK,KAAK;AAAA,IACZ,SAAS,KAAK;AACZ,UAAI,KAAK,wCAAwC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,IACrG;AAGA,QAAI,KAAK,gBAAgB;AACvB,UAAI;AACF,aAAK;AAAA,UACH;AAAA,UAAQ;AAAA,UAAa,OAAO;AAAA,UAC5B,OAAO,MAAM;AAAA,UAAa;AAAA,UAC1B,OAAO;AAAA,UAAO,OAAO;AAAA,QACvB;AAAA,MACF,SAAS,KAAK;AACZ,YAAI,KAAK,kCAAkC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,MAC/F;AAAA,IACF;AAEA,WAAO;AAAA,MACL,MAAM,OAAO;AAAA,MACb,OAAO,OAAO;AAAA,MACd,OAAO,OAAO;AAAA,MACd,WAAW,OAAO;AAAA,IACpB;AAAA,EACF;AAAA,EAEA,WAAW,aAA+C;AACxD,QAAI,CAAC,eAAe,CAAC,YAAY,KAAK,GAAG;AACvC,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AAEA,SAAK,SAAS,KAAK;AAAA,MACjB,MAAM;AAAA,MACN,SAAS;AAAA,MACT,QAAQ,eAAe,WAAW;AAAA,IACpC,CAAC;AAED,SAAK,kBAAkB;AAEvB,UAAM,SAAS,KAAK,UAAU;AAC9B,UAAM,QAAQ,SAAS,QAAQ,KAAK,MAAM;AAC1C,UAAM,WAAU,oBAAI,KAAK,GAAE,YAAY;AACvC,UAAM,WAAW,OAAO,KAAK,KAAK,KAAK,EAAE,SAAS;AAElD,UAAM,eAAe,mBAAmB;AAAA,MACtC;AAAA,MACA,QAAQ,KAAK;AAAA,MACb,UAAU,KAAK,gBAAgB;AAAA,MAC/B,GAAI,WAAW,EAAE,OAAO,KAAK,OAAO,cAAc,KAAK,aAAa,IAAI,CAAC;AAAA,IAC3E,CAAC;AAGD,QAAI;AACJ,UAAM,gBAAgB,IAAI,QAAiF,CAAC,QAAQ;AAClH,sBAAgB;AAAA,IAClB,CAAC;AAED,UAAM,OAAO;AAEb,oBAAgB,iBAAwC;AACtD,UAAI,eAAe;AAEnB,uBAAiB,SAAS,aAAa,YAAY;AACjD,wBAAgB;AAChB,cAAM;AAAA,MACR;AAEA,WAAK,SAAS,KAAK;AAAA,QACjB,MAAM;AAAA,QACN,SAAS;AAAA,QACT,QAAQ,eAAe,YAAY;AAAA,MACrC,CAAC;AAED,UAAI;AACF,aAAK,KAAK;AAAA,MACZ,SAAS,KAAK;AACZ,YAAI,KAAK,wCAAwC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,MACrG;AAGA,UAAI,cAAc,EAAE,aAAa,EAAE;AACnC,UAAI,cAAc;AAClB,UAAI,kBAAkC,CAAC;AACvC,UAAI;AACF,SAAC,aAAa,aAAa,eAAe,IAAI,MAAM,QAAQ,IAAI;AAAA,UAC9D,aAAa;AAAA,UACb,aAAa;AAAA,UACb,aAAa;AAAA,QACf,CAAC;AAAA,MACH,SAAS,KAAK;AACZ,YAAI,KAAK,sCAAsC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,MACnG;AAGA,UAAI,KAAK,gBAAgB;AACvB,YAAI;AACF,eAAK;AAAA,YACH;AAAA,YAAQ;AAAA,YAAa;AAAA,YACrB,YAAY;AAAA,YAAa;AAAA,YACzB;AAAA,YAAa;AAAA,UACf;AAAA,QACF,SAAS,KAAK;AACZ,cAAI,KAAK,kCAAkC,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC,EAAE;AAAA,QAC/F;AAAA,MACF;AAEA,oBAAc;AAAA,QACZ,MAAM;AAAA,QACN,OAAO;AAAA,QACP,OAAO;AAAA,QACP,WAAW;AAAA,MACb,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,YAAY,eAAe;AAAA,MAC3B,QAAQ;AAAA,IACV;AAAA,EACF;AAAA,EAEA,OAAa;AACX,UAAM,YAAY,KAAK,KAAK,YAAY,QAAQ;AAChD,QAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,gBAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,IAC1C;AAGA,UAAM,YAAY,KAAK,WAAW,eAAe;AACjD,UAAM,QAAQ,KAAK,SAAS;AAAA,MAAI,CAAC,MAC/B,KAAK,UAAU,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,CAA4B;AAAA,IAChF;AACA,qBAAiB,KAAK,YAAY,iBAAiB,MAAM;AACvD,oBAAc,WAAW,MAAM,KAAK,IAAI,GAAG,OAAO;AAAA,IACpD,CAAC;AAAA,EACH;AAAA,EAEA,QAAc;AACZ,SAAK,WAAW,CAAC;AACjB,UAAM,YAAY,KAAK,KAAK,YAAY,UAAU,eAAe;AACjE,UAAM,aAAa,KAAK,KAAK,YAAY,UAAU,YAAY;AAC/D,QAAI,WAAW,SAAS,GAAG;AACzB,oBAAc,WAAW,IAAI,OAAO;AAAA,IACtC;AACA,QAAI,WAAW,UAAU,GAAG;AAC1B,oBAAc,YAAY,IAAI,OAAO;AAAA,IACvC;AAAA,EACF;AAAA,EAEQ,mBACN,QACA,QACA,UACA,aACA,SACA,OACA,WACM;AACN,UAAM,YAAY,gBAAgB;AAClC,UAAM,SAAQ,oBAAI,KAAK,GAAE,YAAY;AAErC,UAAM,UAAyB;AAAA,MAC7B,IAAI;AAAA,MACJ;AAAA,MACA;AAAA,MACA,QAAQ,OAAO,MAAM,GAAG,GAAG;AAAA,MAC3B,SAAS,SAAS,MAAM,GAAG,GAAG;AAAA,MAC9B,aAAa;AAAA,MACb,UAAU,OAAO,MAAM;AAAA,MACvB,OAAO,SAAS;AAAA,MAChB,YAAY,aAAa,UAAU,SAAS,IAAI,YAAY;AAAA,IAC9D;AAEA,iBAAa,KAAK,YAAY,OAAO;AAAA,EACvC;AAAA,EAEA,aAAuD;AACrD,WAAO,KAAK,SAAS,IAAI,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,EAAE,QAAQ,EAAE;AAAA,EACxE;AAAA;AAAA,EAGA,gBAAiF;AAC/E,UAAM,gBAAgB,KAAK,SAAS,OAAO,CAAC,KAAK,MAAM,MAAM,EAAE,QAAQ,CAAC;AACxE,WAAO;AAAA,MACL;AAAA,MACA,QAAQ,KAAK,iBAAiB;AAAA,MAC9B,cAAc,KAAK,SAAS;AAAA,IAC9B;AAAA,EACF;AACF;AAMA,SAAS,kBAAkB,KAAwB;AACjD,MAAI,CAAC,IAAI,KAAK,EAAG,QAAO,CAAC;AAEzB,QAAM,WAAsB,CAAC;AAC7B,aAAW,QAAQ,IAAI,MAAM,IAAI,GAAG;AAClC,UAAM,UAAU,KAAK,KAAK;AAC1B,QAAI,CAAC,QAAS;AACd,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,OAAO;AACjC,UAAI,OAAO,QAAQ,OAAO,SAAS;AACjC,iBAAS,KAAK;AAAA,UACZ,MAAM,OAAO;AAAA,UACb,SAAS,OAAO;AAAA,UAChB,QAAQ,eAAe,OAAO,OAAO;AAAA,QACvC,CAAC;AAAA,MACH;AAAA,IACF,QAAQ;AAAA,IAER;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,mBAAmB,KAAwB;AAClD,QAAM,WAAsB,CAAC;AAC7B,QAAM,QAAQ,IAAI,MAAM,IAAI;AAC5B,MAAI,cAA2C;AAC/C,MAAI,iBAA2B,CAAC;AAEhC,aAAW,QAAQ,OAAO;AACxB,QAAI,KAAK,WAAW,UAAU,GAAG;AAC/B,UAAI,eAAe,eAAe,SAAS,GAAG;AAC5C,cAAM,UAAU,eAAe,KAAK,IAAI,EAAE,KAAK;AAC/C,iBAAS,KAAK,EAAE,MAAM,aAAa,SAAS,QAAQ,eAAe,OAAO,EAAE,CAAC;AAAA,MAC/E;AACA,oBAAc;AACd,uBAAiB,CAAC;AAAA,IACpB,WAAW,KAAK,WAAW,eAAe,GAAG;AAC3C,UAAI,eAAe,eAAe,SAAS,GAAG;AAC5C,cAAM,UAAU,eAAe,KAAK,IAAI,EAAE,KAAK;AAC/C,iBAAS,KAAK,EAAE,MAAM,aAAa,SAAS,QAAQ,eAAe,OAAO,EAAE,CAAC;AAAA,MAC/E;AACA,oBAAc;AACd,uBAAiB,CAAC;AAAA,IACpB,WAAW,aAAa;AACtB,qBAAe,KAAK,IAAI;AAAA,IAC1B;AAAA,EACF;AACA,MAAI,eAAe,eAAe,SAAS,GAAG;AAC5C,UAAM,UAAU,eAAe,KAAK,IAAI,EAAE,KAAK;AAC/C,aAAS,KAAK,EAAE,MAAM,aAAa,SAAS,QAAQ,eAAe,OAAO,EAAE,CAAC;AAAA,EAC/E;AAEA,SAAO;AACT;","names":[]}
@@ -0,0 +1,119 @@
1
+ #!/usr/bin/env node
2
+
3
+
4
+ // src/runtime/rate-limiter.ts
5
+ import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
6
+ import { join } from "path";
7
+ var RATE_FILE = "rate-limits.json";
8
+ var MAX_EVENTS = 1e4;
9
+ function getStorePath(harnessDir) {
10
+ return join(harnessDir, "memory", RATE_FILE);
11
+ }
12
+ function loadRateLimits(harnessDir) {
13
+ const storePath = getStorePath(harnessDir);
14
+ if (!existsSync(storePath)) {
15
+ return { events: [], updated: (/* @__PURE__ */ new Date()).toISOString() };
16
+ }
17
+ try {
18
+ const content = readFileSync(storePath, "utf-8");
19
+ const parsed = JSON.parse(content);
20
+ if (typeof parsed === "object" && parsed !== null && "events" in parsed && Array.isArray(parsed.events)) {
21
+ return parsed;
22
+ }
23
+ return { events: [], updated: (/* @__PURE__ */ new Date()).toISOString() };
24
+ } catch {
25
+ return { events: [], updated: (/* @__PURE__ */ new Date()).toISOString() };
26
+ }
27
+ }
28
+ function saveRateLimits(harnessDir, store) {
29
+ const memoryDir = join(harnessDir, "memory");
30
+ if (!existsSync(memoryDir)) {
31
+ mkdirSync(memoryDir, { recursive: true });
32
+ }
33
+ if (store.events.length > MAX_EVENTS) {
34
+ store.events = store.events.slice(store.events.length - MAX_EVENTS);
35
+ }
36
+ store.updated = (/* @__PURE__ */ new Date()).toISOString();
37
+ writeFileSync(getStorePath(harnessDir), JSON.stringify(store, null, 2), "utf-8");
38
+ }
39
+ function pruneExpired(store, now, maxWindowMs) {
40
+ const cutoff = now - maxWindowMs;
41
+ store.events = store.events.filter((e) => e.timestamp > cutoff);
42
+ }
43
+ function checkRateLimit(harnessDir, limit, now) {
44
+ const currentTime = now ?? Date.now();
45
+ const store = loadRateLimits(harnessDir);
46
+ const windowStart = currentTime - limit.window_ms;
47
+ const eventsInWindow = store.events.filter(
48
+ (e) => e.key === limit.key && e.timestamp > windowStart
49
+ );
50
+ const current = eventsInWindow.length;
51
+ const allowed = current < limit.max_requests;
52
+ let retryAfterMs = 0;
53
+ if (!allowed && eventsInWindow.length > 0) {
54
+ const oldest = eventsInWindow.reduce((min, e) => Math.min(min, e.timestamp), Infinity);
55
+ retryAfterMs = Math.max(0, oldest + limit.window_ms - currentTime);
56
+ }
57
+ return {
58
+ allowed,
59
+ key: limit.key,
60
+ current,
61
+ max: limit.max_requests,
62
+ window_ms: limit.window_ms,
63
+ retry_after_ms: retryAfterMs
64
+ };
65
+ }
66
+ function recordEvent(harnessDir, key, now) {
67
+ const currentTime = now ?? Date.now();
68
+ const store = loadRateLimits(harnessDir);
69
+ store.events.push({ key, timestamp: currentTime });
70
+ pruneExpired(store, currentTime, 36e5);
71
+ saveRateLimits(harnessDir, store);
72
+ }
73
+ function tryAcquire(harnessDir, limit, now) {
74
+ const currentTime = now ?? Date.now();
75
+ const check = checkRateLimit(harnessDir, limit, currentTime);
76
+ if (check.allowed) {
77
+ recordEvent(harnessDir, limit.key, currentTime);
78
+ }
79
+ return check;
80
+ }
81
+ function getUsage(harnessDir, key, windowMs, now) {
82
+ const currentTime = now ?? Date.now();
83
+ const store = loadRateLimits(harnessDir);
84
+ const windowStart = currentTime - windowMs;
85
+ const eventsInWindow = store.events.filter(
86
+ (e) => e.key === key && e.timestamp > windowStart
87
+ );
88
+ if (eventsInWindow.length === 0) {
89
+ return { count: 0, oldest: null, newest: null };
90
+ }
91
+ const timestamps = eventsInWindow.map((e) => e.timestamp);
92
+ return {
93
+ count: eventsInWindow.length,
94
+ oldest: Math.min(...timestamps),
95
+ newest: Math.max(...timestamps)
96
+ };
97
+ }
98
+ function clearRateLimits(harnessDir, key) {
99
+ const store = loadRateLimits(harnessDir);
100
+ const before = store.events.length;
101
+ if (key) {
102
+ store.events = store.events.filter((e) => e.key !== key);
103
+ } else {
104
+ store.events = [];
105
+ }
106
+ saveRateLimits(harnessDir, store);
107
+ return before - store.events.length;
108
+ }
109
+
110
+ export {
111
+ loadRateLimits,
112
+ saveRateLimits,
113
+ checkRateLimit,
114
+ recordEvent,
115
+ tryAcquire,
116
+ getUsage,
117
+ clearRateLimits
118
+ };
119
+ //# sourceMappingURL=chunk-OC6YSTDX.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/runtime/rate-limiter.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\n\n/** A recorded event in the sliding window */\nexport interface RateEvent {\n key: string;\n timestamp: number;\n}\n\n/** Rate limit rule: max requests per window_ms */\nexport interface RateLimit {\n key: string;\n max_requests: number;\n window_ms: number;\n}\n\n/** Result of a rate limit check */\nexport interface RateLimitCheck {\n allowed: boolean;\n key: string;\n current: number;\n max: number;\n window_ms: number;\n /** ms until oldest event expires (0 if allowed) */\n retry_after_ms: number;\n}\n\n/** Persisted rate limit state */\nexport interface RateLimitStore {\n events: RateEvent[];\n updated: string;\n}\n\nconst RATE_FILE = 'rate-limits.json';\nconst MAX_EVENTS = 10000;\n\nfunction getStorePath(harnessDir: string): string {\n return join(harnessDir, 'memory', RATE_FILE);\n}\n\n/**\n * Load rate limit events from disk.\n * Returns empty store if file doesn't exist or is corrupt.\n */\nexport function loadRateLimits(harnessDir: string): RateLimitStore {\n const storePath = getStorePath(harnessDir);\n if (!existsSync(storePath)) {\n return { events: [], updated: new Date().toISOString() };\n }\n\n try {\n const content = readFileSync(storePath, 'utf-8');\n const parsed: unknown = JSON.parse(content);\n if (\n typeof parsed === 'object' &&\n parsed !== null &&\n 'events' in parsed &&\n Array.isArray((parsed as RateLimitStore).events)\n ) {\n return parsed as RateLimitStore;\n }\n return { events: [], updated: new Date().toISOString() };\n } catch {\n return { events: [], updated: new Date().toISOString() };\n }\n}\n\n/**\n * Save rate limit events to disk. Trims to MAX_EVENTS.\n */\nexport function saveRateLimits(harnessDir: string, store: RateLimitStore): void {\n const memoryDir = join(harnessDir, 'memory');\n if (!existsSync(memoryDir)) {\n mkdirSync(memoryDir, { recursive: true });\n }\n\n if (store.events.length > MAX_EVENTS) {\n store.events = store.events.slice(store.events.length - MAX_EVENTS);\n }\n\n store.updated = new Date().toISOString();\n writeFileSync(getStorePath(harnessDir), JSON.stringify(store, null, 2), 'utf-8');\n}\n\n/**\n * Prune expired events from the store (older than the largest known window).\n */\nfunction pruneExpired(store: RateLimitStore, now: number, maxWindowMs: number): void {\n const cutoff = now - maxWindowMs;\n store.events = store.events.filter((e) => e.timestamp > cutoff);\n}\n\n/**\n * Check if a request is allowed under the given rate limit.\n * Does NOT record the event — call recordEvent() separately on success.\n */\nexport function checkRateLimit(\n harnessDir: string,\n limit: RateLimit,\n now?: number,\n): RateLimitCheck {\n const currentTime = now ?? Date.now();\n const store = loadRateLimits(harnessDir);\n\n const windowStart = currentTime - limit.window_ms;\n const eventsInWindow = store.events.filter(\n (e) => e.key === limit.key && e.timestamp > windowStart,\n );\n\n const current = eventsInWindow.length;\n const allowed = current < limit.max_requests;\n\n let retryAfterMs = 0;\n if (!allowed && eventsInWindow.length > 0) {\n // Time until the oldest event in the window expires\n const oldest = eventsInWindow.reduce((min, e) => Math.min(min, e.timestamp), Infinity);\n retryAfterMs = Math.max(0, oldest + limit.window_ms - currentTime);\n }\n\n return {\n allowed,\n key: limit.key,\n current,\n max: limit.max_requests,\n window_ms: limit.window_ms,\n retry_after_ms: retryAfterMs,\n };\n}\n\n/**\n * Record a rate limit event for a key.\n */\nexport function recordEvent(harnessDir: string, key: string, now?: number): void {\n const currentTime = now ?? Date.now();\n const store = loadRateLimits(harnessDir);\n\n store.events.push({ key, timestamp: currentTime });\n\n // Prune events older than 1 hour to keep the store manageable\n pruneExpired(store, currentTime, 3600000);\n\n saveRateLimits(harnessDir, store);\n}\n\n/**\n * Check rate limit AND record the event if allowed.\n * Returns the check result. If not allowed, does NOT record.\n */\nexport function tryAcquire(\n harnessDir: string,\n limit: RateLimit,\n now?: number,\n): RateLimitCheck {\n const currentTime = now ?? Date.now();\n const check = checkRateLimit(harnessDir, limit, currentTime);\n\n if (check.allowed) {\n recordEvent(harnessDir, limit.key, currentTime);\n }\n\n return check;\n}\n\n/**\n * Get current usage for a key within a window.\n */\nexport function getUsage(\n harnessDir: string,\n key: string,\n windowMs: number,\n now?: number,\n): { count: number; oldest: number | null; newest: number | null } {\n const currentTime = now ?? Date.now();\n const store = loadRateLimits(harnessDir);\n\n const windowStart = currentTime - windowMs;\n const eventsInWindow = store.events.filter(\n (e) => e.key === key && e.timestamp > windowStart,\n );\n\n if (eventsInWindow.length === 0) {\n return { count: 0, oldest: null, newest: null };\n }\n\n const timestamps = eventsInWindow.map((e) => e.timestamp);\n return {\n count: eventsInWindow.length,\n oldest: Math.min(...timestamps),\n newest: Math.max(...timestamps),\n };\n}\n\n/**\n * Clear all rate limit events for a key, or all keys.\n */\nexport function clearRateLimits(harnessDir: string, key?: string): number {\n const store = loadRateLimits(harnessDir);\n const before = store.events.length;\n\n if (key) {\n store.events = store.events.filter((e) => e.key !== key);\n } else {\n store.events = [];\n }\n\n saveRateLimits(harnessDir, store);\n return before - store.events.length;\n}\n"],"mappings":";;;;AAAA,SAAS,cAAc,eAAe,YAAY,iBAAiB;AACnE,SAAS,YAAY;AAgCrB,IAAM,YAAY;AAClB,IAAM,aAAa;AAEnB,SAAS,aAAa,YAA4B;AAChD,SAAO,KAAK,YAAY,UAAU,SAAS;AAC7C;AAMO,SAAS,eAAe,YAAoC;AACjE,QAAM,YAAY,aAAa,UAAU;AACzC,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO,EAAE,QAAQ,CAAC,GAAG,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,EACzD;AAEA,MAAI;AACF,UAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,UAAM,SAAkB,KAAK,MAAM,OAAO;AAC1C,QACE,OAAO,WAAW,YAClB,WAAW,QACX,YAAY,UACZ,MAAM,QAAS,OAA0B,MAAM,GAC/C;AACA,aAAO;AAAA,IACT;AACA,WAAO,EAAE,QAAQ,CAAC,GAAG,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,EACzD,QAAQ;AACN,WAAO,EAAE,QAAQ,CAAC,GAAG,UAAS,oBAAI,KAAK,GAAE,YAAY,EAAE;AAAA,EACzD;AACF;AAKO,SAAS,eAAe,YAAoB,OAA6B;AAC9E,QAAM,YAAY,KAAK,YAAY,QAAQ;AAC3C,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,cAAU,WAAW,EAAE,WAAW,KAAK,CAAC;AAAA,EAC1C;AAEA,MAAI,MAAM,OAAO,SAAS,YAAY;AACpC,UAAM,SAAS,MAAM,OAAO,MAAM,MAAM,OAAO,SAAS,UAAU;AAAA,EACpE;AAEA,QAAM,WAAU,oBAAI,KAAK,GAAE,YAAY;AACvC,gBAAc,aAAa,UAAU,GAAG,KAAK,UAAU,OAAO,MAAM,CAAC,GAAG,OAAO;AACjF;AAKA,SAAS,aAAa,OAAuB,KAAa,aAA2B;AACnF,QAAM,SAAS,MAAM;AACrB,QAAM,SAAS,MAAM,OAAO,OAAO,CAAC,MAAM,EAAE,YAAY,MAAM;AAChE;AAMO,SAAS,eACd,YACA,OACA,KACgB;AAChB,QAAM,cAAc,OAAO,KAAK,IAAI;AACpC,QAAM,QAAQ,eAAe,UAAU;AAEvC,QAAM,cAAc,cAAc,MAAM;AACxC,QAAM,iBAAiB,MAAM,OAAO;AAAA,IAClC,CAAC,MAAM,EAAE,QAAQ,MAAM,OAAO,EAAE,YAAY;AAAA,EAC9C;AAEA,QAAM,UAAU,eAAe;AAC/B,QAAM,UAAU,UAAU,MAAM;AAEhC,MAAI,eAAe;AACnB,MAAI,CAAC,WAAW,eAAe,SAAS,GAAG;AAEzC,UAAM,SAAS,eAAe,OAAO,CAAC,KAAK,MAAM,KAAK,IAAI,KAAK,EAAE,SAAS,GAAG,QAAQ;AACrF,mBAAe,KAAK,IAAI,GAAG,SAAS,MAAM,YAAY,WAAW;AAAA,EACnE;AAEA,SAAO;AAAA,IACL;AAAA,IACA,KAAK,MAAM;AAAA,IACX;AAAA,IACA,KAAK,MAAM;AAAA,IACX,WAAW,MAAM;AAAA,IACjB,gBAAgB;AAAA,EAClB;AACF;AAKO,SAAS,YAAY,YAAoB,KAAa,KAAoB;AAC/E,QAAM,cAAc,OAAO,KAAK,IAAI;AACpC,QAAM,QAAQ,eAAe,UAAU;AAEvC,QAAM,OAAO,KAAK,EAAE,KAAK,WAAW,YAAY,CAAC;AAGjD,eAAa,OAAO,aAAa,IAAO;AAExC,iBAAe,YAAY,KAAK;AAClC;AAMO,SAAS,WACd,YACA,OACA,KACgB;AAChB,QAAM,cAAc,OAAO,KAAK,IAAI;AACpC,QAAM,QAAQ,eAAe,YAAY,OAAO,WAAW;AAE3D,MAAI,MAAM,SAAS;AACjB,gBAAY,YAAY,MAAM,KAAK,WAAW;AAAA,EAChD;AAEA,SAAO;AACT;AAKO,SAAS,SACd,YACA,KACA,UACA,KACiE;AACjE,QAAM,cAAc,OAAO,KAAK,IAAI;AACpC,QAAM,QAAQ,eAAe,UAAU;AAEvC,QAAM,cAAc,cAAc;AAClC,QAAM,iBAAiB,MAAM,OAAO;AAAA,IAClC,CAAC,MAAM,EAAE,QAAQ,OAAO,EAAE,YAAY;AAAA,EACxC;AAEA,MAAI,eAAe,WAAW,GAAG;AAC/B,WAAO,EAAE,OAAO,GAAG,QAAQ,MAAM,QAAQ,KAAK;AAAA,EAChD;AAEA,QAAM,aAAa,eAAe,IAAI,CAAC,MAAM,EAAE,SAAS;AACxD,SAAO;AAAA,IACL,OAAO,eAAe;AAAA,IACtB,QAAQ,KAAK,IAAI,GAAG,UAAU;AAAA,IAC9B,QAAQ,KAAK,IAAI,GAAG,UAAU;AAAA,EAChC;AACF;AAKO,SAAS,gBAAgB,YAAoB,KAAsB;AACxE,QAAM,QAAQ,eAAe,UAAU;AACvC,QAAM,SAAS,MAAM,OAAO;AAE5B,MAAI,KAAK;AACP,UAAM,SAAS,MAAM,OAAO,OAAO,CAAC,MAAM,EAAE,QAAQ,GAAG;AAAA,EACzD,OAAO;AACL,UAAM,SAAS,CAAC;AAAA,EAClB;AAEA,iBAAe,YAAY,KAAK;AAChC,SAAO,SAAS,MAAM,OAAO;AAC/B;","names":[]}