@mariozechner/pi-coding-agent 0.45.7 → 0.47.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. package/CHANGELOG.md +59 -0
  2. package/README.md +24 -3
  3. package/dist/cli.d.ts.map +1 -1
  4. package/dist/cli.js +1 -0
  5. package/dist/cli.js.map +1 -1
  6. package/dist/config.d.ts +2 -0
  7. package/dist/config.d.ts.map +1 -1
  8. package/dist/config.js +2 -0
  9. package/dist/config.js.map +1 -1
  10. package/dist/core/agent-session.d.ts +11 -3
  11. package/dist/core/agent-session.d.ts.map +1 -1
  12. package/dist/core/agent-session.js +81 -14
  13. package/dist/core/agent-session.js.map +1 -1
  14. package/dist/core/compaction/compaction.d.ts.map +1 -1
  15. package/dist/core/compaction/compaction.js +6 -5
  16. package/dist/core/compaction/compaction.js.map +1 -1
  17. package/dist/core/export-html/ansi-to-html.d.ts +22 -0
  18. package/dist/core/export-html/ansi-to-html.d.ts.map +1 -0
  19. package/dist/core/export-html/ansi-to-html.js +249 -0
  20. package/dist/core/export-html/ansi-to-html.js.map +1 -0
  21. package/dist/core/export-html/index.d.ts +17 -0
  22. package/dist/core/export-html/index.d.ts.map +1 -1
  23. package/dist/core/export-html/index.js +52 -23
  24. package/dist/core/export-html/index.js.map +1 -1
  25. package/dist/core/export-html/template.css +0 -33
  26. package/dist/core/export-html/template.js +171 -18
  27. package/dist/core/export-html/tool-renderer.d.ts +35 -0
  28. package/dist/core/export-html/tool-renderer.d.ts.map +1 -0
  29. package/dist/core/export-html/tool-renderer.js +57 -0
  30. package/dist/core/export-html/tool-renderer.js.map +1 -0
  31. package/dist/core/extensions/index.d.ts +1 -1
  32. package/dist/core/extensions/index.d.ts.map +1 -1
  33. package/dist/core/extensions/index.js.map +1 -1
  34. package/dist/core/extensions/runner.d.ts +5 -1
  35. package/dist/core/extensions/runner.d.ts.map +1 -1
  36. package/dist/core/extensions/runner.js +41 -0
  37. package/dist/core/extensions/runner.js.map +1 -1
  38. package/dist/core/extensions/types.d.ts +24 -1
  39. package/dist/core/extensions/types.d.ts.map +1 -1
  40. package/dist/core/extensions/types.js.map +1 -1
  41. package/dist/core/keybindings.d.ts +1 -5
  42. package/dist/core/keybindings.d.ts.map +1 -1
  43. package/dist/core/keybindings.js +4 -12
  44. package/dist/core/keybindings.js.map +1 -1
  45. package/dist/core/model-resolver.d.ts.map +1 -1
  46. package/dist/core/model-resolver.js +1 -0
  47. package/dist/core/model-resolver.js.map +1 -1
  48. package/dist/core/prompt-templates.d.ts.map +1 -1
  49. package/dist/core/prompt-templates.js +4 -27
  50. package/dist/core/prompt-templates.js.map +1 -1
  51. package/dist/core/skills.d.ts.map +1 -1
  52. package/dist/core/skills.js +6 -37
  53. package/dist/core/skills.js.map +1 -1
  54. package/dist/core/system-prompt.d.ts.map +1 -1
  55. package/dist/core/system-prompt.js +19 -14
  56. package/dist/core/system-prompt.js.map +1 -1
  57. package/dist/core/tools/edit-diff.d.ts +30 -0
  58. package/dist/core/tools/edit-diff.d.ts.map +1 -1
  59. package/dist/core/tools/edit-diff.js +82 -10
  60. package/dist/core/tools/edit-diff.js.map +1 -1
  61. package/dist/core/tools/edit.d.ts.map +1 -1
  62. package/dist/core/tools/edit.js +16 -13
  63. package/dist/core/tools/edit.js.map +1 -1
  64. package/dist/core/tools/path-utils.d.ts +1 -0
  65. package/dist/core/tools/path-utils.d.ts.map +1 -1
  66. package/dist/core/tools/path-utils.js +7 -0
  67. package/dist/core/tools/path-utils.js.map +1 -1
  68. package/dist/core/tools/read.d.ts.map +1 -1
  69. package/dist/core/tools/read.js +13 -2
  70. package/dist/core/tools/read.js.map +1 -1
  71. package/dist/index.d.ts +3 -1
  72. package/dist/index.d.ts.map +1 -1
  73. package/dist/index.js +3 -0
  74. package/dist/index.js.map +1 -1
  75. package/dist/main.d.ts.map +1 -1
  76. package/dist/main.js +70 -9
  77. package/dist/main.js.map +1 -1
  78. package/dist/modes/interactive/components/bash-execution.d.ts.map +1 -1
  79. package/dist/modes/interactive/components/bash-execution.js +4 -3
  80. package/dist/modes/interactive/components/bash-execution.js.map +1 -1
  81. package/dist/modes/interactive/components/bordered-loader.d.ts.map +1 -1
  82. package/dist/modes/interactive/components/bordered-loader.js +2 -1
  83. package/dist/modes/interactive/components/bordered-loader.js.map +1 -1
  84. package/dist/modes/interactive/components/branch-summary-message.d.ts.map +1 -1
  85. package/dist/modes/interactive/components/branch-summary-message.js +4 -1
  86. package/dist/modes/interactive/components/branch-summary-message.js.map +1 -1
  87. package/dist/modes/interactive/components/compaction-summary-message.d.ts.map +1 -1
  88. package/dist/modes/interactive/components/compaction-summary-message.js +4 -1
  89. package/dist/modes/interactive/components/compaction-summary-message.js.map +1 -1
  90. package/dist/modes/interactive/components/custom-editor.d.ts +2 -2
  91. package/dist/modes/interactive/components/custom-editor.d.ts.map +1 -1
  92. package/dist/modes/interactive/components/custom-editor.js +5 -5
  93. package/dist/modes/interactive/components/custom-editor.js.map +1 -1
  94. package/dist/modes/interactive/components/extension-editor.d.ts +3 -1
  95. package/dist/modes/interactive/components/extension-editor.d.ts.map +1 -1
  96. package/dist/modes/interactive/components/extension-editor.js +15 -9
  97. package/dist/modes/interactive/components/extension-editor.js.map +1 -1
  98. package/dist/modes/interactive/components/extension-input.d.ts.map +1 -1
  99. package/dist/modes/interactive/components/extension-input.js +2 -1
  100. package/dist/modes/interactive/components/extension-input.js.map +1 -1
  101. package/dist/modes/interactive/components/extension-selector.d.ts.map +1 -1
  102. package/dist/modes/interactive/components/extension-selector.js +6 -1
  103. package/dist/modes/interactive/components/extension-selector.js.map +1 -1
  104. package/dist/modes/interactive/components/index.d.ts +1 -0
  105. package/dist/modes/interactive/components/index.d.ts.map +1 -1
  106. package/dist/modes/interactive/components/index.js +1 -0
  107. package/dist/modes/interactive/components/index.js.map +1 -1
  108. package/dist/modes/interactive/components/keybinding-hints.d.ts +41 -0
  109. package/dist/modes/interactive/components/keybinding-hints.d.ts.map +1 -0
  110. package/dist/modes/interactive/components/keybinding-hints.js +61 -0
  111. package/dist/modes/interactive/components/keybinding-hints.js.map +1 -0
  112. package/dist/modes/interactive/components/login-dialog.d.ts.map +1 -1
  113. package/dist/modes/interactive/components/login-dialog.js +4 -3
  114. package/dist/modes/interactive/components/login-dialog.js.map +1 -1
  115. package/dist/modes/interactive/components/session-selector-search.d.ts +21 -0
  116. package/dist/modes/interactive/components/session-selector-search.d.ts.map +1 -0
  117. package/dist/modes/interactive/components/session-selector-search.js +146 -0
  118. package/dist/modes/interactive/components/session-selector-search.js.map +1 -0
  119. package/dist/modes/interactive/components/session-selector.d.ts +7 -1
  120. package/dist/modes/interactive/components/session-selector.d.ts.map +1 -1
  121. package/dist/modes/interactive/components/session-selector.js +35 -8
  122. package/dist/modes/interactive/components/session-selector.js.map +1 -1
  123. package/dist/modes/interactive/components/tool-execution.d.ts.map +1 -1
  124. package/dist/modes/interactive/components/tool-execution.js +14 -8
  125. package/dist/modes/interactive/components/tool-execution.js.map +1 -1
  126. package/dist/modes/interactive/components/tree-selector.d.ts +7 -0
  127. package/dist/modes/interactive/components/tree-selector.d.ts.map +1 -1
  128. package/dist/modes/interactive/components/tree-selector.js +143 -4
  129. package/dist/modes/interactive/components/tree-selector.js.map +1 -1
  130. package/dist/modes/interactive/interactive-mode.d.ts +4 -5
  131. package/dist/modes/interactive/interactive-mode.d.ts.map +1 -1
  132. package/dist/modes/interactive/interactive-mode.js +63 -126
  133. package/dist/modes/interactive/interactive-mode.js.map +1 -1
  134. package/dist/modes/print-mode.d.ts.map +1 -1
  135. package/dist/modes/print-mode.js +1 -1
  136. package/dist/modes/print-mode.js.map +1 -1
  137. package/dist/modes/rpc/rpc-mode.d.ts.map +1 -1
  138. package/dist/modes/rpc/rpc-mode.js +1 -0
  139. package/dist/modes/rpc/rpc-mode.js.map +1 -1
  140. package/dist/utils/frontmatter.d.ts +8 -0
  141. package/dist/utils/frontmatter.d.ts.map +1 -0
  142. package/dist/utils/frontmatter.js +26 -0
  143. package/dist/utils/frontmatter.js.map +1 -0
  144. package/dist/utils/image-convert.d.ts.map +1 -1
  145. package/dist/utils/image-convert.js +12 -14
  146. package/dist/utils/image-convert.js.map +1 -1
  147. package/dist/utils/image-resize.d.ts +2 -2
  148. package/dist/utils/image-resize.d.ts.map +1 -1
  149. package/dist/utils/image-resize.js +102 -122
  150. package/dist/utils/image-resize.js.map +1 -1
  151. package/docs/extensions.md +51 -0
  152. package/docs/rpc.md +15 -15
  153. package/docs/tui.md +26 -0
  154. package/examples/extensions/input-transform.ts +43 -0
  155. package/examples/extensions/modal-editor.ts +1 -1
  156. package/examples/extensions/overlay-test.ts +8 -3
  157. package/examples/extensions/plan-mode/README.md +1 -1
  158. package/examples/extensions/plan-mode/index.ts +2 -2
  159. package/examples/extensions/question.ts +1 -1
  160. package/examples/extensions/questionnaire.ts +1 -1
  161. package/examples/extensions/rainbow-editor.ts +1 -8
  162. package/examples/extensions/subagent/agents.ts +3 -32
  163. package/examples/extensions/with-deps/package-lock.json +2 -2
  164. package/examples/extensions/with-deps/package.json +1 -1
  165. package/package.json +6 -5
  166. package/dist/utils/vips.d.ts +0 -11
  167. package/dist/utils/vips.d.ts.map +0 -1
  168. package/dist/utils/vips.js +0 -35
  169. package/dist/utils/vips.js.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"file":"compaction.d.ts","sourceRoot":"","sources":["../../../src/core/compaction/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAChE,OAAO,KAAK,EAAoB,KAAK,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAG1E,OAAO,KAAK,EAAmB,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAC3E,OAAO,EAIN,KAAK,cAAc,EAInB,MAAM,YAAY,CAAC;AAMpB,kEAAkE;AAClE,MAAM,WAAW,iBAAiB;IACjC,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,aAAa,EAAE,MAAM,EAAE,CAAC;CACxB;AAwDD,8EAA8E;AAC9E,MAAM,WAAW,gBAAgB,CAAC,CAAC,GAAG,OAAO;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,+FAA+F;IAC/F,OAAO,CAAC,EAAE,CAAC,CAAC;CACZ;AAMD,MAAM,WAAW,kBAAkB;IAClC,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED,eAAO,MAAM,2BAA2B,EAAE,kBAIzC,CAAC;AAMF;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAE3D;AAgBD;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,KAAK,GAAG,SAAS,CAShF;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,aAAa,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,QAAQ,EAAE,kBAAkB,GAAG,OAAO,CAGjH;AAMD;;;GAGG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,YAAY,GAAG,MAAM,CA0D5D;AA8CD;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,MAAM,CAe1G;AAED,MAAM,WAAW,cAAc;IAC9B,mCAAmC;IACnC,mBAAmB,EAAE,MAAM,CAAC;IAC5B,qFAAqF;IACrF,cAAc,EAAE,MAAM,CAAC;IACvB,uEAAuE;IACvE,WAAW,EAAE,OAAO,CAAC;CACrB;AAED;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAC3B,OAAO,EAAE,YAAY,EAAE,EACvB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EAChB,gBAAgB,EAAE,MAAM,GACtB,cAAc,CAyDhB;AA8ED;;;GAGG;AACH,wBAAsB,eAAe,CACpC,eAAe,EAAE,YAAY,EAAE,EAC/B,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,EAC3B,eAAe,CAAC,EAAE,MAAM,GACtB,OAAO,CAAC,MAAM,CAAC,CA6CjB;AAMD,MAAM,WAAW,qBAAqB;IACrC,kCAAkC;IAClC,gBAAgB,EAAE,MAAM,CAAC;IACzB,qDAAqD;IACrD,mBAAmB,EAAE,YAAY,EAAE,CAAC;IACpC,2EAA2E;IAC3E,kBAAkB,EAAE,YAAY,EAAE,CAAC;IACnC,iEAAiE;IACjE,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,6DAA6D;IAC7D,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,yDAAyD;IACzD,OAAO,EAAE,cAAc,CAAC;IACxB,8CAA8C;IAC9C,QAAQ,EAAE,kBAAkB,CAAC;CAC7B;AAED,wBAAgB,iBAAiB,CAChC,WAAW,EAAE,YAAY,EAAE,EAC3B,QAAQ,EAAE,kBAAkB,GAC1B,qBAAqB,GAAG,SAAS,CAwEnC;AAqBD;;;;;;GAMG;AACH,wBAAsB,OAAO,CAC5B,WAAW,EAAE,qBAAqB,EAClC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,MAAM,EAAE,MAAM,EACd,kBAAkB,CAAC,EAAE,MAAM,EAC3B,MAAM,CAAC,EAAE,WAAW,GAClB,OAAO,CAAC,gBAAgB,CAAC,CA4D3B","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete, completeSimple } from \"@mariozechner/pi-ai\";\nimport { convertToLlm, createBranchSummaryMessage, createCustomMessage } from \"../messages.js\";\nimport type { CompactionEntry, SessionEntry } from \"../session-manager.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// File Operation Tracking\n// ============================================================================\n\n/** Details stored in CompactionEntry.details for file tracking */\nexport interface CompactionDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\n/**\n * Extract file operations from messages and previous compaction entries.\n */\nfunction extractFileOperations(\n\tmessages: AgentMessage[],\n\tentries: SessionEntry[],\n\tprevCompactionIndex: number,\n): FileOperations {\n\tconst fileOps = createFileOps();\n\n\t// Collect from previous compaction's details (if pi-generated)\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\tif (!prevCompaction.fromHook && prevCompaction.details) {\n\t\t\t// fromHook field kept for session file compatibility\n\t\t\tconst details = prevCompaction.details as CompactionDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\tfor (const f of details.modifiedFiles) fileOps.edited.add(f);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Extract from tool calls in messages\n\tfor (const msg of messages) {\n\t\textractFileOpsFromMessage(msg, fileOps);\n\t}\n\n\treturn fileOps;\n}\n\n// ============================================================================\n// Message Extraction\n// ============================================================================\n\n/**\n * Extract AgentMessage from an entry if it produces one.\n * Returns undefined for entries that don't contribute to LLM context.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tif (entry.type === \"message\") {\n\t\treturn entry.message;\n\t}\n\tif (entry.type === \"custom_message\") {\n\t\treturn createCustomMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\t}\n\tif (entry.type === \"branch_summary\") {\n\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\t}\n\treturn undefined;\n}\n\n/** Result from compact() - SessionManager adds uuid/parentUuid when saving */\nexport interface CompactionResult<T = unknown> {\n\tsummary: string;\n\tfirstKeptEntryId: string;\n\ttokensBefore: number;\n\t/** Extension-specific data (e.g., ArtifactIndex, version markers for structured compaction) */\n\tdetails?: T;\n}\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n * Skips aborted and error messages as they don't have valid usage data.\n */\nfunction getAssistantUsage(msg: AgentMessage): Usage | undefined {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.stopReason !== \"error\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | undefined {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Estimate token count for a message using chars/4 heuristic.\n * This is conservative (overestimates tokens).\n */\nexport function estimateTokens(message: AgentMessage): number {\n\tlet chars = 0;\n\n\tswitch (message.role) {\n\t\tcase \"user\": {\n\t\t\tconst content = (message as { content: string | Array<{ type: string; text?: string }> }).content;\n\t\t\tif (typeof content === \"string\") {\n\t\t\t\tchars = content.length;\n\t\t\t} else if (Array.isArray(content)) {\n\t\t\t\tfor (const block of content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"assistant\": {\n\t\t\tconst assistant = message as AssistantMessage;\n\t\t\tfor (const block of assistant.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tchars += block.text.length;\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tchars += block.thinking.length;\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tchars += block.name.length + JSON.stringify(block.arguments).length;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"custom\":\n\t\tcase \"toolResult\": {\n\t\t\tif (typeof message.content === \"string\") {\n\t\t\t\tchars = message.content.length;\n\t\t\t} else {\n\t\t\t\tfor (const block of message.content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tchars += 4800; // Estimate images as 4000 chars, or 1200 tokens\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"bashExecution\": {\n\t\t\tchars = message.command.length + message.output.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"branchSummary\":\n\t\tcase \"compactionSummary\": {\n\t\t\tchars = message.summary.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t}\n\n\treturn 0;\n}\n\n/**\n * Find valid cut points: indices of user, assistant, custom, or bashExecution messages.\n * Never cut at tool results (they must follow their tool call).\n * When we cut at an assistant message with tool calls, its tool results follow it\n * and will be kept.\n * BashExecutionMessage is treated like a user message (user-initiated context).\n */\nfunction findValidCutPoints(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst cutPoints: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tswitch (entry.type) {\n\t\t\tcase \"message\": {\n\t\t\t\tconst role = entry.message.role;\n\t\t\t\tswitch (role) {\n\t\t\t\t\tcase \"bashExecution\":\n\t\t\t\t\tcase \"custom\":\n\t\t\t\t\tcase \"branchSummary\":\n\t\t\t\t\tcase \"compactionSummary\":\n\t\t\t\t\tcase \"user\":\n\t\t\t\t\tcase \"assistant\":\n\t\t\t\t\t\tcutPoints.push(i);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"toolResult\":\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"thinking_level_change\":\n\t\t\tcase \"model_change\":\n\t\t\tcase \"compaction\":\n\t\t\tcase \"branch_summary\":\n\t\t\tcase \"custom\":\n\t\t\tcase \"custom_message\":\n\t\t\tcase \"label\":\n\t\t}\n\t\t// branch_summary and custom_message are user-role messages, valid cut points\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\tcutPoints.push(i);\n\t\t}\n\t}\n\treturn cutPoints;\n}\n\n/**\n * Find the user message (or bashExecution) that starts the turn containing the given entry index.\n * Returns -1 if no turn start found before the index.\n * BashExecutionMessage is treated like a user message for turn boundaries.\n */\nexport function findTurnStartIndex(entries: SessionEntry[], entryIndex: number, startIndex: number): number {\n\tfor (let i = entryIndex; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\t// branch_summary and custom_message are user-role messages, can start a turn\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\treturn i;\n\t\t}\n\t\tif (entry.type === \"message\") {\n\t\t\tconst role = entry.message.role;\n\t\t\tif (role === \"user\" || role === \"bashExecution\") {\n\t\t\t\treturn i;\n\t\t\t}\n\t\t}\n\t}\n\treturn -1;\n}\n\nexport interface CutPointResult {\n\t/** Index of first entry to keep */\n\tfirstKeptEntryIndex: number;\n\t/** Index of user message that starts the turn being split, or -1 if not splitting */\n\tturnStartIndex: number;\n\t/** Whether this cut splits a turn (cut point is not a user message) */\n\tisSplitTurn: boolean;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n *\n * Algorithm: Walk backwards from newest, accumulating estimated message sizes.\n * Stop when we've accumulated >= keepRecentTokens. Cut at that point.\n *\n * Can cut at user OR assistant messages (never tool results). When cutting at an\n * assistant message with tool calls, its tool results come after and will be kept.\n *\n * Returns CutPointResult with:\n * - firstKeptEntryIndex: the entry index to start keeping from\n * - turnStartIndex: if cutting mid-turn, the user message that started that turn\n * - isSplitTurn: whether we're cutting in the middle of a turn\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): CutPointResult {\n\tconst cutPoints = findValidCutPoints(entries, startIndex, endIndex);\n\n\tif (cutPoints.length === 0) {\n\t\treturn { firstKeptEntryIndex: startIndex, turnStartIndex: -1, isSplitTurn: false };\n\t}\n\n\t// Walk backwards from newest, accumulating estimated message sizes\n\tlet accumulatedTokens = 0;\n\tlet cutIndex = cutPoints[0]; // Default: keep from first message (not header)\n\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type !== \"message\") continue;\n\n\t\t// Estimate this message's size\n\t\tconst messageTokens = estimateTokens(entry.message);\n\t\taccumulatedTokens += messageTokens;\n\n\t\t// Check if we've exceeded the budget\n\t\tif (accumulatedTokens >= keepRecentTokens) {\n\t\t\t// Find the closest valid cut point at or after this entry\n\t\t\tfor (let c = 0; c < cutPoints.length; c++) {\n\t\t\t\tif (cutPoints[c] >= i) {\n\t\t\t\t\tcutIndex = cutPoints[c];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Scan backwards from cutIndex to include any non-message entries (bash, settings, etc.)\n\twhile (cutIndex > startIndex) {\n\t\tconst prevEntry = entries[cutIndex - 1];\n\t\t// Stop at session header or compaction boundaries\n\t\tif (prevEntry.type === \"compaction\") {\n\t\t\tbreak;\n\t\t}\n\t\tif (prevEntry.type === \"message\") {\n\t\t\t// Stop if we hit any message\n\t\t\tbreak;\n\t\t}\n\t\t// Include this non-message entry (bash, settings change, etc.)\n\t\tcutIndex--;\n\t}\n\n\t// Determine if this is a split turn\n\tconst cutEntry = entries[cutIndex];\n\tconst isUserMessage = cutEntry.type === \"message\" && cutEntry.message.role === \"user\";\n\tconst turnStartIndex = isUserMessage ? -1 : findTurnStartIndex(entries, cutIndex, startIndex);\n\n\treturn {\n\t\tfirstKeptEntryIndex: cutIndex,\n\t\tturnStartIndex,\n\t\tisSplitTurn: !isUserMessage && turnStartIndex !== -1,\n\t};\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `The messages above are a conversation to summarize. Create a structured context checkpoint summary that another LLM will use to continue the work.\n\nUse this EXACT format:\n\n## Goal\n[What is the user trying to accomplish? Can be multiple items if the session covers different tasks.]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned by user]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Current work]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [Ordered list of what should happen next]\n\n## Critical Context\n- [Any data, examples, or references needed to continue]\n- [Or \"(none)\" if not applicable]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\nconst UPDATE_SUMMARIZATION_PROMPT = `The messages above are NEW conversation messages to incorporate into the existing summary provided in <previous-summary> tags.\n\nUpdate the existing structured summary with new information. RULES:\n- PRESERVE all existing information from the previous summary\n- ADD new progress, decisions, and context from the new messages\n- UPDATE the Progress section: move items from \"In Progress\" to \"Done\" when completed\n- UPDATE \"Next Steps\" based on what was accomplished\n- PRESERVE exact file paths, function names, and error messages\n- If something is no longer relevant, you may remove it\n\nUse this EXACT format:\n\n## Goal\n[Preserve existing goals, add new ones if the task expanded]\n\n## Constraints & Preferences\n- [Preserve existing, add new ones discovered]\n\n## Progress\n### Done\n- [x] [Include previously done items AND newly completed items]\n\n### In Progress\n- [ ] [Current work - update based on progress]\n\n### Blocked\n- [Current blockers - remove if resolved]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale] (preserve all previous, add new)\n\n## Next Steps\n1. [Update based on current state]\n\n## Critical Context\n- [Preserve important context, add new if needed]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n * If previousSummary is provided, uses the update prompt to merge.\n */\nexport async function generateSummary(\n\tcurrentMessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n\tpreviousSummary?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\t// Use update prompt if we have a previous summary, otherwise initial prompt\n\tlet basePrompt = previousSummary ? UPDATE_SUMMARIZATION_PROMPT : SUMMARIZATION_PROMPT;\n\tif (customInstructions) {\n\t\tbasePrompt = `${basePrompt}\\n\\nAdditional focus: ${customInstructions}`;\n\t}\n\n\t// Serialize conversation to text so model doesn't try to continue it\n\t// Convert to LLM messages first (handles custom types like bashExecution, custom, etc.)\n\tconst llmMessages = convertToLlm(currentMessages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build the prompt with conversation wrapped in tags\n\tlet promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n`;\n\tif (previousSummary) {\n\t\tpromptText += `<previous-summary>\\n${previousSummary}\\n</previous-summary>\\n\\n`;\n\t}\n\tpromptText += basePrompt;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey, reasoning: \"high\" },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Compaction Preparation (for extensions)\n// ============================================================================\n\nexport interface CompactionPreparation {\n\t/** UUID of first entry to keep */\n\tfirstKeptEntryId: string;\n\t/** Messages that will be summarized and discarded */\n\tmessagesToSummarize: AgentMessage[];\n\t/** Messages that will be turned into turn prefix summary (if splitting) */\n\tturnPrefixMessages: AgentMessage[];\n\t/** Whether this is a split turn (cut point in middle of turn) */\n\tisSplitTurn: boolean;\n\ttokensBefore: number;\n\t/** Summary from previous compaction, for iterative update */\n\tpreviousSummary?: string;\n\t/** File operations extracted from messagesToSummarize */\n\tfileOps: FileOperations;\n\t/** Compaction settions from settings.jsonl\t*/\n\tsettings: CompactionSettings;\n}\n\nexport function prepareCompaction(\n\tpathEntries: SessionEntry[],\n\tsettings: CompactionSettings,\n): CompactionPreparation | undefined {\n\tif (pathEntries.length > 0 && pathEntries[pathEntries.length - 1].type === \"compaction\") {\n\t\treturn undefined;\n\t}\n\n\tlet prevCompactionIndex = -1;\n\tfor (let i = pathEntries.length - 1; i >= 0; i--) {\n\t\tif (pathEntries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = pathEntries.length;\n\n\tconst lastUsage = getLastAssistantUsage(pathEntries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\tconst cutPoint = findCutPoint(pathEntries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Get UUID of first kept entry\n\tconst firstKeptEntry = pathEntries[cutPoint.firstKeptEntryIndex];\n\tif (!firstKeptEntry?.id) {\n\t\treturn undefined; // Session needs migration\n\t}\n\tconst firstKeptEntryId = firstKeptEntry.id;\n\n\tconst historyEnd = cutPoint.isSplitTurn ? cutPoint.turnStartIndex : cutPoint.firstKeptEntryIndex;\n\n\t// Messages to summarize (will be discarded after summary)\n\tconst messagesToSummarize: AgentMessage[] = [];\n\tfor (let i = boundaryStart; i < historyEnd; i++) {\n\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\tif (msg) messagesToSummarize.push(msg);\n\t}\n\n\t// Messages for turn prefix summary (if splitting a turn)\n\tconst turnPrefixMessages: AgentMessage[] = [];\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (let i = cutPoint.turnStartIndex; i < cutPoint.firstKeptEntryIndex; i++) {\n\t\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\t\tif (msg) turnPrefixMessages.push(msg);\n\t\t}\n\t}\n\n\t// Get previous summary for iterative update\n\tlet previousSummary: string | undefined;\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;\n\t\tpreviousSummary = prevCompaction.summary;\n\t}\n\n\t// Extract file operations from messages and previous compaction\n\tconst fileOps = extractFileOperations(messagesToSummarize, pathEntries, prevCompactionIndex);\n\n\t// Also extract file ops from turn prefix if splitting\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (const msg of turnPrefixMessages) {\n\t\t\textractFileOpsFromMessage(msg, fileOps);\n\t\t}\n\t}\n\n\treturn {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn: cutPoint.isSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t};\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\nconst TURN_PREFIX_SUMMARIZATION_PROMPT = `This is the PREFIX of a turn that was too large to keep. The SUFFIX (recent work) is retained.\n\nSummarize the prefix to provide context for the retained suffix:\n\n## Original Request\n[What did the user ask for in this turn?]\n\n## Early Progress\n- [Key decisions and work done in the prefix]\n\n## Context for Suffix\n- [Information needed to understand the retained recent work]\n\nBe concise. Focus on what's needed to understand the kept suffix.`;\n\n/**\n * Generate summaries for compaction using prepared data.\n * Returns CompactionResult - SessionManager adds uuid/parentUuid when saving.\n *\n * @param preparation - Pre-calculated preparation from prepareCompaction()\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tpreparation: CompactionPreparation,\n\tmodel: Model<any>,\n\tapiKey: string,\n\tcustomInstructions?: string,\n\tsignal?: AbortSignal,\n): Promise<CompactionResult> {\n\tconst {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t} = preparation;\n\n\t// Generate summaries (can be parallel if both needed) and merge into one\n\tlet summary: string;\n\n\tif (isSplitTurn && turnPrefixMessages.length > 0) {\n\t\t// Generate both summaries in parallel\n\t\tconst [historyResult, turnPrefixResult] = await Promise.all([\n\t\t\tmessagesToSummarize.length > 0\n\t\t\t\t? generateSummary(\n\t\t\t\t\t\tmessagesToSummarize,\n\t\t\t\t\t\tmodel,\n\t\t\t\t\t\tsettings.reserveTokens,\n\t\t\t\t\t\tapiKey,\n\t\t\t\t\t\tsignal,\n\t\t\t\t\t\tcustomInstructions,\n\t\t\t\t\t\tpreviousSummary,\n\t\t\t\t\t)\n\t\t\t\t: Promise.resolve(\"No prior history.\"),\n\t\t\tgenerateTurnPrefixSummary(turnPrefixMessages, model, settings.reserveTokens, apiKey, signal),\n\t\t]);\n\t\t// Merge into single summary\n\t\tsummary = `${historyResult}\\n\\n---\\n\\n**Turn Context (split turn):**\\n\\n${turnPrefixResult}`;\n\t} else {\n\t\t// Just generate history summary\n\t\tsummary = await generateSummary(\n\t\t\tmessagesToSummarize,\n\t\t\tmodel,\n\t\t\tsettings.reserveTokens,\n\t\t\tapiKey,\n\t\t\tsignal,\n\t\t\tcustomInstructions,\n\t\t\tpreviousSummary,\n\t\t);\n\t}\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\tif (!firstKeptEntryId) {\n\t\tthrow new Error(\"First kept entry has no UUID - session may need migration\");\n\t}\n\n\treturn {\n\t\tsummary,\n\t\tfirstKeptEntryId,\n\t\ttokensBefore,\n\t\tdetails: { readFiles, modifiedFiles } as CompactionDetails,\n\t};\n}\n\n/**\n * Generate a summary for a turn prefix (when splitting a turn).\n */\nasync function generateTurnPrefixSummary(\n\tmessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.5 * reserveTokens); // Smaller budget for turn prefix\n\n\tconst transformedMessages = convertToLlm(messages);\n\tconst summarizationMessages = [\n\t\t...transformedMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: TURN_PREFIX_SUMMARIZATION_PROMPT }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Turn prefix summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\treturn response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n}\n"]}
1
+ {"version":3,"file":"compaction.d.ts","sourceRoot":"","sources":["../../../src/core/compaction/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAChE,OAAO,KAAK,EAAoB,KAAK,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAG1E,OAAO,KAAK,EAAmB,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAC3E,OAAO,EAIN,KAAK,cAAc,EAInB,MAAM,YAAY,CAAC;AAMpB,kEAAkE;AAClE,MAAM,WAAW,iBAAiB;IACjC,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,aAAa,EAAE,MAAM,EAAE,CAAC;CACxB;AAwDD,8EAA8E;AAC9E,MAAM,WAAW,gBAAgB,CAAC,CAAC,GAAG,OAAO;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,gBAAgB,EAAE,MAAM,CAAC;IACzB,YAAY,EAAE,MAAM,CAAC;IACrB,+FAA+F;IAC/F,OAAO,CAAC,EAAE,CAAC,CAAC;CACZ;AAMD,MAAM,WAAW,kBAAkB;IAClC,OAAO,EAAE,OAAO,CAAC;IACjB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;CACzB;AAED,eAAO,MAAM,2BAA2B,EAAE,kBAIzC,CAAC;AAMF;;;GAGG;AACH,wBAAgB,sBAAsB,CAAC,KAAK,EAAE,KAAK,GAAG,MAAM,CAE3D;AAgBD;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,OAAO,EAAE,YAAY,EAAE,GAAG,KAAK,GAAG,SAAS,CAShF;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,aAAa,EAAE,MAAM,EAAE,aAAa,EAAE,MAAM,EAAE,QAAQ,EAAE,kBAAkB,GAAG,OAAO,CAGjH;AAMD;;;GAGG;AACH,wBAAgB,cAAc,CAAC,OAAO,EAAE,YAAY,GAAG,MAAM,CA0D5D;AA8CD;;;;GAIG;AACH,wBAAgB,kBAAkB,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,UAAU,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,GAAG,MAAM,CAe1G;AAED,MAAM,WAAW,cAAc;IAC9B,mCAAmC;IACnC,mBAAmB,EAAE,MAAM,CAAC;IAC5B,qFAAqF;IACrF,cAAc,EAAE,MAAM,CAAC;IACvB,uEAAuE;IACvE,WAAW,EAAE,OAAO,CAAC;CACrB;AAED;;;;;;;;;;;;;;;GAeG;AACH,wBAAgB,YAAY,CAC3B,OAAO,EAAE,YAAY,EAAE,EACvB,UAAU,EAAE,MAAM,EAClB,QAAQ,EAAE,MAAM,EAChB,gBAAgB,EAAE,MAAM,GACtB,cAAc,CAyDhB;AA8ED;;;GAGG;AACH,wBAAsB,eAAe,CACpC,eAAe,EAAE,YAAY,EAAE,EAC/B,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,aAAa,EAAE,MAAM,EACrB,MAAM,EAAE,MAAM,EACd,MAAM,CAAC,EAAE,WAAW,EACpB,kBAAkB,CAAC,EAAE,MAAM,EAC3B,eAAe,CAAC,EAAE,MAAM,GACtB,OAAO,CAAC,MAAM,CAAC,CA6CjB;AAMD,MAAM,WAAW,qBAAqB;IACrC,kCAAkC;IAClC,gBAAgB,EAAE,MAAM,CAAC;IACzB,qDAAqD;IACrD,mBAAmB,EAAE,YAAY,EAAE,CAAC;IACpC,2EAA2E;IAC3E,kBAAkB,EAAE,YAAY,EAAE,CAAC;IACnC,iEAAiE;IACjE,WAAW,EAAE,OAAO,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,6DAA6D;IAC7D,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,yDAAyD;IACzD,OAAO,EAAE,cAAc,CAAC;IACxB,8CAA8C;IAC9C,QAAQ,EAAE,kBAAkB,CAAC;CAC7B;AAED,wBAAgB,iBAAiB,CAChC,WAAW,EAAE,YAAY,EAAE,EAC3B,QAAQ,EAAE,kBAAkB,GAC1B,qBAAqB,GAAG,SAAS,CAwEnC;AAqBD;;;;;;GAMG;AACH,wBAAsB,OAAO,CAC5B,WAAW,EAAE,qBAAqB,EAClC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,EACjB,MAAM,EAAE,MAAM,EACd,kBAAkB,CAAC,EAAE,MAAM,EAC3B,MAAM,CAAC,EAAE,WAAW,GAClB,OAAO,CAAC,gBAAgB,CAAC,CA4D3B","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { completeSimple } from \"@mariozechner/pi-ai\";\nimport { convertToLlm, createBranchSummaryMessage, createCustomMessage } from \"../messages.js\";\nimport type { CompactionEntry, SessionEntry } from \"../session-manager.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// File Operation Tracking\n// ============================================================================\n\n/** Details stored in CompactionEntry.details for file tracking */\nexport interface CompactionDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\n/**\n * Extract file operations from messages and previous compaction entries.\n */\nfunction extractFileOperations(\n\tmessages: AgentMessage[],\n\tentries: SessionEntry[],\n\tprevCompactionIndex: number,\n): FileOperations {\n\tconst fileOps = createFileOps();\n\n\t// Collect from previous compaction's details (if pi-generated)\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\tif (!prevCompaction.fromHook && prevCompaction.details) {\n\t\t\t// fromHook field kept for session file compatibility\n\t\t\tconst details = prevCompaction.details as CompactionDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\tfor (const f of details.modifiedFiles) fileOps.edited.add(f);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Extract from tool calls in messages\n\tfor (const msg of messages) {\n\t\textractFileOpsFromMessage(msg, fileOps);\n\t}\n\n\treturn fileOps;\n}\n\n// ============================================================================\n// Message Extraction\n// ============================================================================\n\n/**\n * Extract AgentMessage from an entry if it produces one.\n * Returns undefined for entries that don't contribute to LLM context.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tif (entry.type === \"message\") {\n\t\treturn entry.message;\n\t}\n\tif (entry.type === \"custom_message\") {\n\t\treturn createCustomMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\t}\n\tif (entry.type === \"branch_summary\") {\n\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\t}\n\treturn undefined;\n}\n\n/** Result from compact() - SessionManager adds uuid/parentUuid when saving */\nexport interface CompactionResult<T = unknown> {\n\tsummary: string;\n\tfirstKeptEntryId: string;\n\ttokensBefore: number;\n\t/** Extension-specific data (e.g., ArtifactIndex, version markers for structured compaction) */\n\tdetails?: T;\n}\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n * Skips aborted and error messages as they don't have valid usage data.\n */\nfunction getAssistantUsage(msg: AgentMessage): Usage | undefined {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.stopReason !== \"error\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | undefined {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Estimate token count for a message using chars/4 heuristic.\n * This is conservative (overestimates tokens).\n */\nexport function estimateTokens(message: AgentMessage): number {\n\tlet chars = 0;\n\n\tswitch (message.role) {\n\t\tcase \"user\": {\n\t\t\tconst content = (message as { content: string | Array<{ type: string; text?: string }> }).content;\n\t\t\tif (typeof content === \"string\") {\n\t\t\t\tchars = content.length;\n\t\t\t} else if (Array.isArray(content)) {\n\t\t\t\tfor (const block of content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"assistant\": {\n\t\t\tconst assistant = message as AssistantMessage;\n\t\t\tfor (const block of assistant.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tchars += block.text.length;\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tchars += block.thinking.length;\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tchars += block.name.length + JSON.stringify(block.arguments).length;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"custom\":\n\t\tcase \"toolResult\": {\n\t\t\tif (typeof message.content === \"string\") {\n\t\t\t\tchars = message.content.length;\n\t\t\t} else {\n\t\t\t\tfor (const block of message.content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tchars += 4800; // Estimate images as 4000 chars, or 1200 tokens\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"bashExecution\": {\n\t\t\tchars = message.command.length + message.output.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"branchSummary\":\n\t\tcase \"compactionSummary\": {\n\t\t\tchars = message.summary.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t}\n\n\treturn 0;\n}\n\n/**\n * Find valid cut points: indices of user, assistant, custom, or bashExecution messages.\n * Never cut at tool results (they must follow their tool call).\n * When we cut at an assistant message with tool calls, its tool results follow it\n * and will be kept.\n * BashExecutionMessage is treated like a user message (user-initiated context).\n */\nfunction findValidCutPoints(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst cutPoints: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tswitch (entry.type) {\n\t\t\tcase \"message\": {\n\t\t\t\tconst role = entry.message.role;\n\t\t\t\tswitch (role) {\n\t\t\t\t\tcase \"bashExecution\":\n\t\t\t\t\tcase \"custom\":\n\t\t\t\t\tcase \"branchSummary\":\n\t\t\t\t\tcase \"compactionSummary\":\n\t\t\t\t\tcase \"user\":\n\t\t\t\t\tcase \"assistant\":\n\t\t\t\t\t\tcutPoints.push(i);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"toolResult\":\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"thinking_level_change\":\n\t\t\tcase \"model_change\":\n\t\t\tcase \"compaction\":\n\t\t\tcase \"branch_summary\":\n\t\t\tcase \"custom\":\n\t\t\tcase \"custom_message\":\n\t\t\tcase \"label\":\n\t\t}\n\t\t// branch_summary and custom_message are user-role messages, valid cut points\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\tcutPoints.push(i);\n\t\t}\n\t}\n\treturn cutPoints;\n}\n\n/**\n * Find the user message (or bashExecution) that starts the turn containing the given entry index.\n * Returns -1 if no turn start found before the index.\n * BashExecutionMessage is treated like a user message for turn boundaries.\n */\nexport function findTurnStartIndex(entries: SessionEntry[], entryIndex: number, startIndex: number): number {\n\tfor (let i = entryIndex; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\t// branch_summary and custom_message are user-role messages, can start a turn\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\treturn i;\n\t\t}\n\t\tif (entry.type === \"message\") {\n\t\t\tconst role = entry.message.role;\n\t\t\tif (role === \"user\" || role === \"bashExecution\") {\n\t\t\t\treturn i;\n\t\t\t}\n\t\t}\n\t}\n\treturn -1;\n}\n\nexport interface CutPointResult {\n\t/** Index of first entry to keep */\n\tfirstKeptEntryIndex: number;\n\t/** Index of user message that starts the turn being split, or -1 if not splitting */\n\tturnStartIndex: number;\n\t/** Whether this cut splits a turn (cut point is not a user message) */\n\tisSplitTurn: boolean;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n *\n * Algorithm: Walk backwards from newest, accumulating estimated message sizes.\n * Stop when we've accumulated >= keepRecentTokens. Cut at that point.\n *\n * Can cut at user OR assistant messages (never tool results). When cutting at an\n * assistant message with tool calls, its tool results come after and will be kept.\n *\n * Returns CutPointResult with:\n * - firstKeptEntryIndex: the entry index to start keeping from\n * - turnStartIndex: if cutting mid-turn, the user message that started that turn\n * - isSplitTurn: whether we're cutting in the middle of a turn\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): CutPointResult {\n\tconst cutPoints = findValidCutPoints(entries, startIndex, endIndex);\n\n\tif (cutPoints.length === 0) {\n\t\treturn { firstKeptEntryIndex: startIndex, turnStartIndex: -1, isSplitTurn: false };\n\t}\n\n\t// Walk backwards from newest, accumulating estimated message sizes\n\tlet accumulatedTokens = 0;\n\tlet cutIndex = cutPoints[0]; // Default: keep from first message (not header)\n\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type !== \"message\") continue;\n\n\t\t// Estimate this message's size\n\t\tconst messageTokens = estimateTokens(entry.message);\n\t\taccumulatedTokens += messageTokens;\n\n\t\t// Check if we've exceeded the budget\n\t\tif (accumulatedTokens >= keepRecentTokens) {\n\t\t\t// Find the closest valid cut point at or after this entry\n\t\t\tfor (let c = 0; c < cutPoints.length; c++) {\n\t\t\t\tif (cutPoints[c] >= i) {\n\t\t\t\t\tcutIndex = cutPoints[c];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Scan backwards from cutIndex to include any non-message entries (bash, settings, etc.)\n\twhile (cutIndex > startIndex) {\n\t\tconst prevEntry = entries[cutIndex - 1];\n\t\t// Stop at session header or compaction boundaries\n\t\tif (prevEntry.type === \"compaction\") {\n\t\t\tbreak;\n\t\t}\n\t\tif (prevEntry.type === \"message\") {\n\t\t\t// Stop if we hit any message\n\t\t\tbreak;\n\t\t}\n\t\t// Include this non-message entry (bash, settings change, etc.)\n\t\tcutIndex--;\n\t}\n\n\t// Determine if this is a split turn\n\tconst cutEntry = entries[cutIndex];\n\tconst isUserMessage = cutEntry.type === \"message\" && cutEntry.message.role === \"user\";\n\tconst turnStartIndex = isUserMessage ? -1 : findTurnStartIndex(entries, cutIndex, startIndex);\n\n\treturn {\n\t\tfirstKeptEntryIndex: cutIndex,\n\t\tturnStartIndex,\n\t\tisSplitTurn: !isUserMessage && turnStartIndex !== -1,\n\t};\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `The messages above are a conversation to summarize. Create a structured context checkpoint summary that another LLM will use to continue the work.\n\nUse this EXACT format:\n\n## Goal\n[What is the user trying to accomplish? Can be multiple items if the session covers different tasks.]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned by user]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Current work]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [Ordered list of what should happen next]\n\n## Critical Context\n- [Any data, examples, or references needed to continue]\n- [Or \"(none)\" if not applicable]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\nconst UPDATE_SUMMARIZATION_PROMPT = `The messages above are NEW conversation messages to incorporate into the existing summary provided in <previous-summary> tags.\n\nUpdate the existing structured summary with new information. RULES:\n- PRESERVE all existing information from the previous summary\n- ADD new progress, decisions, and context from the new messages\n- UPDATE the Progress section: move items from \"In Progress\" to \"Done\" when completed\n- UPDATE \"Next Steps\" based on what was accomplished\n- PRESERVE exact file paths, function names, and error messages\n- If something is no longer relevant, you may remove it\n\nUse this EXACT format:\n\n## Goal\n[Preserve existing goals, add new ones if the task expanded]\n\n## Constraints & Preferences\n- [Preserve existing, add new ones discovered]\n\n## Progress\n### Done\n- [x] [Include previously done items AND newly completed items]\n\n### In Progress\n- [ ] [Current work - update based on progress]\n\n### Blocked\n- [Current blockers - remove if resolved]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale] (preserve all previous, add new)\n\n## Next Steps\n1. [Update based on current state]\n\n## Critical Context\n- [Preserve important context, add new if needed]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n * If previousSummary is provided, uses the update prompt to merge.\n */\nexport async function generateSummary(\n\tcurrentMessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n\tpreviousSummary?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\t// Use update prompt if we have a previous summary, otherwise initial prompt\n\tlet basePrompt = previousSummary ? UPDATE_SUMMARIZATION_PROMPT : SUMMARIZATION_PROMPT;\n\tif (customInstructions) {\n\t\tbasePrompt = `${basePrompt}\\n\\nAdditional focus: ${customInstructions}`;\n\t}\n\n\t// Serialize conversation to text so model doesn't try to continue it\n\t// Convert to LLM messages first (handles custom types like bashExecution, custom, etc.)\n\tconst llmMessages = convertToLlm(currentMessages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build the prompt with conversation wrapped in tags\n\tlet promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n`;\n\tif (previousSummary) {\n\t\tpromptText += `<previous-summary>\\n${previousSummary}\\n</previous-summary>\\n\\n`;\n\t}\n\tpromptText += basePrompt;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey, reasoning: \"high\" },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Compaction Preparation (for extensions)\n// ============================================================================\n\nexport interface CompactionPreparation {\n\t/** UUID of first entry to keep */\n\tfirstKeptEntryId: string;\n\t/** Messages that will be summarized and discarded */\n\tmessagesToSummarize: AgentMessage[];\n\t/** Messages that will be turned into turn prefix summary (if splitting) */\n\tturnPrefixMessages: AgentMessage[];\n\t/** Whether this is a split turn (cut point in middle of turn) */\n\tisSplitTurn: boolean;\n\ttokensBefore: number;\n\t/** Summary from previous compaction, for iterative update */\n\tpreviousSummary?: string;\n\t/** File operations extracted from messagesToSummarize */\n\tfileOps: FileOperations;\n\t/** Compaction settions from settings.jsonl\t*/\n\tsettings: CompactionSettings;\n}\n\nexport function prepareCompaction(\n\tpathEntries: SessionEntry[],\n\tsettings: CompactionSettings,\n): CompactionPreparation | undefined {\n\tif (pathEntries.length > 0 && pathEntries[pathEntries.length - 1].type === \"compaction\") {\n\t\treturn undefined;\n\t}\n\n\tlet prevCompactionIndex = -1;\n\tfor (let i = pathEntries.length - 1; i >= 0; i--) {\n\t\tif (pathEntries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = pathEntries.length;\n\n\tconst lastUsage = getLastAssistantUsage(pathEntries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\tconst cutPoint = findCutPoint(pathEntries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Get UUID of first kept entry\n\tconst firstKeptEntry = pathEntries[cutPoint.firstKeptEntryIndex];\n\tif (!firstKeptEntry?.id) {\n\t\treturn undefined; // Session needs migration\n\t}\n\tconst firstKeptEntryId = firstKeptEntry.id;\n\n\tconst historyEnd = cutPoint.isSplitTurn ? cutPoint.turnStartIndex : cutPoint.firstKeptEntryIndex;\n\n\t// Messages to summarize (will be discarded after summary)\n\tconst messagesToSummarize: AgentMessage[] = [];\n\tfor (let i = boundaryStart; i < historyEnd; i++) {\n\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\tif (msg) messagesToSummarize.push(msg);\n\t}\n\n\t// Messages for turn prefix summary (if splitting a turn)\n\tconst turnPrefixMessages: AgentMessage[] = [];\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (let i = cutPoint.turnStartIndex; i < cutPoint.firstKeptEntryIndex; i++) {\n\t\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\t\tif (msg) turnPrefixMessages.push(msg);\n\t\t}\n\t}\n\n\t// Get previous summary for iterative update\n\tlet previousSummary: string | undefined;\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;\n\t\tpreviousSummary = prevCompaction.summary;\n\t}\n\n\t// Extract file operations from messages and previous compaction\n\tconst fileOps = extractFileOperations(messagesToSummarize, pathEntries, prevCompactionIndex);\n\n\t// Also extract file ops from turn prefix if splitting\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (const msg of turnPrefixMessages) {\n\t\t\textractFileOpsFromMessage(msg, fileOps);\n\t\t}\n\t}\n\n\treturn {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn: cutPoint.isSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t};\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\nconst TURN_PREFIX_SUMMARIZATION_PROMPT = `This is the PREFIX of a turn that was too large to keep. The SUFFIX (recent work) is retained.\n\nSummarize the prefix to provide context for the retained suffix:\n\n## Original Request\n[What did the user ask for in this turn?]\n\n## Early Progress\n- [Key decisions and work done in the prefix]\n\n## Context for Suffix\n- [Information needed to understand the retained recent work]\n\nBe concise. Focus on what's needed to understand the kept suffix.`;\n\n/**\n * Generate summaries for compaction using prepared data.\n * Returns CompactionResult - SessionManager adds uuid/parentUuid when saving.\n *\n * @param preparation - Pre-calculated preparation from prepareCompaction()\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tpreparation: CompactionPreparation,\n\tmodel: Model<any>,\n\tapiKey: string,\n\tcustomInstructions?: string,\n\tsignal?: AbortSignal,\n): Promise<CompactionResult> {\n\tconst {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t} = preparation;\n\n\t// Generate summaries (can be parallel if both needed) and merge into one\n\tlet summary: string;\n\n\tif (isSplitTurn && turnPrefixMessages.length > 0) {\n\t\t// Generate both summaries in parallel\n\t\tconst [historyResult, turnPrefixResult] = await Promise.all([\n\t\t\tmessagesToSummarize.length > 0\n\t\t\t\t? generateSummary(\n\t\t\t\t\t\tmessagesToSummarize,\n\t\t\t\t\t\tmodel,\n\t\t\t\t\t\tsettings.reserveTokens,\n\t\t\t\t\t\tapiKey,\n\t\t\t\t\t\tsignal,\n\t\t\t\t\t\tcustomInstructions,\n\t\t\t\t\t\tpreviousSummary,\n\t\t\t\t\t)\n\t\t\t\t: Promise.resolve(\"No prior history.\"),\n\t\t\tgenerateTurnPrefixSummary(turnPrefixMessages, model, settings.reserveTokens, apiKey, signal),\n\t\t]);\n\t\t// Merge into single summary\n\t\tsummary = `${historyResult}\\n\\n---\\n\\n**Turn Context (split turn):**\\n\\n${turnPrefixResult}`;\n\t} else {\n\t\t// Just generate history summary\n\t\tsummary = await generateSummary(\n\t\t\tmessagesToSummarize,\n\t\t\tmodel,\n\t\t\tsettings.reserveTokens,\n\t\t\tapiKey,\n\t\t\tsignal,\n\t\t\tcustomInstructions,\n\t\t\tpreviousSummary,\n\t\t);\n\t}\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\tif (!firstKeptEntryId) {\n\t\tthrow new Error(\"First kept entry has no UUID - session may need migration\");\n\t}\n\n\treturn {\n\t\tsummary,\n\t\tfirstKeptEntryId,\n\t\ttokensBefore,\n\t\tdetails: { readFiles, modifiedFiles } as CompactionDetails,\n\t};\n}\n\n/**\n * Generate a summary for a turn prefix (when splitting a turn).\n */\nasync function generateTurnPrefixSummary(\n\tmessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.5 * reserveTokens); // Smaller budget for turn prefix\n\tconst llmMessages = convertToLlm(messages);\n\tconst conversationText = serializeConversation(llmMessages);\n\tconst promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n${TURN_PREFIX_SUMMARIZATION_PROMPT}`;\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Turn prefix summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\treturn response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n}\n"]}
@@ -4,7 +4,7 @@
4
4
  * Pure functions for compaction logic. The session manager handles I/O,
5
5
  * and after compaction the session is reloaded.
6
6
  */
7
- import { complete, completeSimple } from "@mariozechner/pi-ai";
7
+ import { completeSimple } from "@mariozechner/pi-ai";
8
8
  import { convertToLlm, createBranchSummaryMessage, createCustomMessage } from "../messages.js";
9
9
  import { computeFileLists, createFileOps, extractFileOpsFromMessage, formatFileOperations, SUMMARIZATION_SYSTEM_PROMPT, serializeConversation, } from "./utils.js";
10
10
  /**
@@ -538,16 +538,17 @@ export async function compact(preparation, model, apiKey, customInstructions, si
538
538
  */
539
539
  async function generateTurnPrefixSummary(messages, model, reserveTokens, apiKey, signal) {
540
540
  const maxTokens = Math.floor(0.5 * reserveTokens); // Smaller budget for turn prefix
541
- const transformedMessages = convertToLlm(messages);
541
+ const llmMessages = convertToLlm(messages);
542
+ const conversationText = serializeConversation(llmMessages);
543
+ const promptText = `<conversation>\n${conversationText}\n</conversation>\n\n${TURN_PREFIX_SUMMARIZATION_PROMPT}`;
542
544
  const summarizationMessages = [
543
- ...transformedMessages,
544
545
  {
545
546
  role: "user",
546
- content: [{ type: "text", text: TURN_PREFIX_SUMMARIZATION_PROMPT }],
547
+ content: [{ type: "text", text: promptText }],
547
548
  timestamp: Date.now(),
548
549
  },
549
550
  ];
550
- const response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });
551
+ const response = await completeSimple(model, { systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages }, { maxTokens, signal, apiKey });
551
552
  if (response.stopReason === "error") {
552
553
  throw new Error(`Turn prefix summarization failed: ${response.errorMessage || "Unknown error"}`);
553
554
  }
@@ -1 +1 @@
1
- {"version":3,"file":"compaction.js","sourceRoot":"","sources":["../../../src/core/compaction/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,QAAQ,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAC/D,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAE/F,OAAO,EACN,gBAAgB,EAChB,aAAa,EACb,yBAAyB,EAEzB,oBAAoB,EACpB,2BAA2B,EAC3B,qBAAqB,GACrB,MAAM,YAAY,CAAC;AAYpB;;GAEG;AACH,SAAS,qBAAqB,CAC7B,QAAwB,EACxB,OAAuB,EACvB,mBAA2B,EACV;IACjB,MAAM,OAAO,GAAG,aAAa,EAAE,CAAC;IAEhC,+DAA+D;IAC/D,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,OAAO,CAAC,mBAAmB,CAAoB,CAAC;QACvE,IAAI,CAAC,cAAc,CAAC,QAAQ,IAAI,cAAc,CAAC,OAAO,EAAE,CAAC;YACxD,qDAAqD;YACrD,MAAM,OAAO,GAAG,cAAc,CAAC,OAA4B,CAAC;YAC5D,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC;gBACtC,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,SAAS;oBAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACxD,CAAC;YACD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE,CAAC;gBAC1C,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,aAAa;oBAAE,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC9D,CAAC;QACF,CAAC;IACF,CAAC;IAED,sCAAsC;IACtC,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC5B,yBAAyB,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IACzC,CAAC;IAED,OAAO,OAAO,CAAC;AAAA,CACf;AAED,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAmB,EAA4B;IAC3E,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QAC9B,OAAO,KAAK,CAAC,OAAO,CAAC;IACtB,CAAC;IACD,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;QACrC,OAAO,mBAAmB,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;IAC5G,CAAC;IACD,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;QACrC,OAAO,0BAA0B,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;IACjF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAqBD,MAAM,CAAC,MAAM,2BAA2B,GAAuB;IAC9D,OAAO,EAAE,IAAI;IACb,aAAa,EAAE,KAAK;IACpB,gBAAgB,EAAE,KAAK;CACvB,CAAC;AAEF,+EAA+E;AAC/E,oBAAoB;AACpB,+EAA+E;AAE/E;;;GAGG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAY,EAAU;IAC5D,OAAO,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,UAAU,CAAC;AAAA,CAC5F;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,GAAiB,EAAqB;IAChE,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,IAAI,OAAO,IAAI,GAAG,EAAE,CAAC;QAChD,MAAM,YAAY,GAAG,GAAuB,CAAC;QAC7C,IAAI,YAAY,CAAC,UAAU,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,KAAK,OAAO,IAAI,YAAY,CAAC,KAAK,EAAE,CAAC;YACxG,OAAO,YAAY,CAAC,KAAK,CAAC;QAC3B,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CAAC,OAAuB,EAAqB;IACjF,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK;gBAAE,OAAO,KAAK,CAAC;QACzB,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,aAAqB,EAAE,aAAqB,EAAE,QAA4B,EAAW;IAClH,IAAI,CAAC,QAAQ,CAAC,OAAO;QAAE,OAAO,KAAK,CAAC;IACpC,OAAO,aAAa,GAAG,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAAA,CAC9D;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,OAAqB,EAAU;IAC7D,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;QACtB,KAAK,MAAM,EAAE,CAAC;YACb,MAAM,OAAO,GAAI,OAAwE,CAAC,OAAO,CAAC;YAClG,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACjC,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC;YACxB,CAAC;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;gBACnC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;oBAC7B,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;wBACzC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;oBAC5B,CAAC;gBACF,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,WAAW,EAAE,CAAC;YAClB,MAAM,SAAS,GAAG,OAA2B,CAAC;YAC9C,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,OAAO,EAAE,CAAC;gBACvC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;gBAC5B,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,KAAK,IAAI,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAChC,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;gBACrE,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,QAAQ,CAAC;QACd,KAAK,YAAY,EAAE,CAAC;YACnB,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACzC,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACP,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;oBACrC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;wBACzC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;oBAC5B,CAAC;oBACD,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;wBAC5B,KAAK,IAAI,IAAI,CAAC,CAAC,gDAAgD;oBAChE,CAAC;gBACF,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,eAAe,EAAE,CAAC;YACtB,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC;YACvD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,eAAe,CAAC;QACrB,KAAK,mBAAmB,EAAE,CAAC;YAC1B,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC;YAC/B,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;IACF,CAAC;IAED,OAAO,CAAC,CAAC;AAAA,CACT;AAED;;;;;;GAMG;AACH,SAAS,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,QAAgB,EAAY;IACpG,MAAM,SAAS,GAAa,EAAE,CAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;YACpB,KAAK,SAAS,EAAE,CAAC;gBAChB,MAAM,IAAI,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC;gBAChC,QAAQ,IAAI,EAAE,CAAC;oBACd,KAAK,eAAe,CAAC;oBACrB,KAAK,QAAQ,CAAC;oBACd,KAAK,eAAe,CAAC;oBACrB,KAAK,mBAAmB,CAAC;oBACzB,KAAK,MAAM,CAAC;oBACZ,KAAK,WAAW;wBACf,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;wBAClB,MAAM;oBACP,KAAK,YAAY;wBAChB,MAAM;gBACR,CAAC;gBACD,MAAM;YACP,CAAC;YACD,KAAK,uBAAuB,CAAC;YAC7B,KAAK,cAAc,CAAC;YACpB,KAAK,YAAY,CAAC;YAClB,KAAK,gBAAgB,CAAC;YACtB,KAAK,QAAQ,CAAC;YACd,KAAK,gBAAgB,CAAC;YACtB,KAAK,OAAO,CAAC;QACd,CAAC;QACD,6EAA6E;QAC7E,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;YACxE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,UAAkB,EAAU;IAC3G,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,6EAA6E;QAC7E,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;YACxE,OAAO,CAAC,CAAC;QACV,CAAC;QACD,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,IAAI,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC;YAChC,IAAI,IAAI,KAAK,MAAM,IAAI,IAAI,KAAK,eAAe,EAAE,CAAC;gBACjD,OAAO,CAAC,CAAC;YACV,CAAC;QACF,CAAC;IACF,CAAC;IACD,OAAO,CAAC,CAAC,CAAC;AAAA,CACV;AAWD;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,YAAY,CAC3B,OAAuB,EACvB,UAAkB,EAClB,QAAgB,EAChB,gBAAwB,EACP;IACjB,MAAM,SAAS,GAAG,kBAAkB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC;IAEpE,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5B,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,cAAc,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,KAAK,EAAE,CAAC;IACpF,CAAC;IAED,mEAAmE;IACnE,IAAI,iBAAiB,GAAG,CAAC,CAAC;IAC1B,IAAI,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,gDAAgD;IAE7E,KAAK,IAAI,CAAC,GAAG,QAAQ,GAAG,CAAC,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS;YAAE,SAAS;QAEvC,+BAA+B;QAC/B,MAAM,aAAa,GAAG,cAAc,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACpD,iBAAiB,IAAI,aAAa,CAAC;QAEnC,qCAAqC;QACrC,IAAI,iBAAiB,IAAI,gBAAgB,EAAE,CAAC;YAC3C,0DAA0D;YAC1D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3C,IAAI,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;oBACvB,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;oBACxB,MAAM;gBACP,CAAC;YACF,CAAC;YACD,MAAM;QACP,CAAC;IACF,CAAC;IAED,yFAAyF;IACzF,OAAO,QAAQ,GAAG,UAAU,EAAE,CAAC;QAC9B,MAAM,SAAS,GAAG,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC,CAAC;QACxC,kDAAkD;QAClD,IAAI,SAAS,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACrC,MAAM;QACP,CAAC;QACD,IAAI,SAAS,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAClC,6BAA6B;YAC7B,MAAM;QACP,CAAC;QACD,+DAA+D;QAC/D,QAAQ,EAAE,CAAC;IACZ,CAAC;IAED,oCAAoC;IACpC,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,MAAM,aAAa,GAAG,QAAQ,CAAC,IAAI,KAAK,SAAS,IAAI,QAAQ,CAAC,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC;IACtF,MAAM,cAAc,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,kBAAkB,CAAC,OAAO,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;IAE9F,OAAO;QACN,mBAAmB,EAAE,QAAQ;QAC7B,cAAc;QACd,WAAW,EAAE,CAAC,aAAa,IAAI,cAAc,KAAK,CAAC,CAAC;KACpD,CAAC;AAAA,CACF;AAED,+EAA+E;AAC/E,gBAAgB;AAChB,+EAA+E;AAE/E,MAAM,oBAAoB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0FA+B6D,CAAC;AAE3F,MAAM,2BAA2B,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0FAqCsD,CAAC;AAE3F;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACpC,eAA+B,EAC/B,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACpB,kBAA2B,EAC3B,eAAwB,EACN;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC;IAElD,4EAA4E;IAC5E,IAAI,UAAU,GAAG,eAAe,CAAC,CAAC,CAAC,2BAA2B,CAAC,CAAC,CAAC,oBAAoB,CAAC;IACtF,IAAI,kBAAkB,EAAE,CAAC;QACxB,UAAU,GAAG,GAAG,UAAU,yBAAyB,kBAAkB,EAAE,CAAC;IACzE,CAAC;IAED,qEAAqE;IACrE,wFAAwF;IACxF,MAAM,WAAW,GAAG,YAAY,CAAC,eAAe,CAAC,CAAC;IAClD,MAAM,gBAAgB,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IAE5D,qDAAqD;IACrD,IAAI,UAAU,GAAG,mBAAmB,gBAAgB,uBAAuB,CAAC;IAC5E,IAAI,eAAe,EAAE,CAAC;QACrB,UAAU,IAAI,uBAAuB,eAAe,2BAA2B,CAAC;IACjF,CAAC;IACD,UAAU,IAAI,UAAU,CAAC;IAEzB,MAAM,qBAAqB,GAAG;QAC7B;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;YACtD,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,cAAc,CACpC,KAAK,EACL,EAAE,YAAY,EAAE,2BAA2B,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAC9E,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAChD,CAAC;IAEF,IAAI,QAAQ,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,CAAC,YAAY,IAAI,eAAe,EAAE,CAAC,CAAC;IACtF,CAAC;IAED,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO;SAClC,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,OAAO,WAAW,CAAC;AAAA,CACnB;AAwBD,MAAM,UAAU,iBAAiB,CAChC,WAA2B,EAC3B,QAA4B,EACQ;IACpC,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,IAAI,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;QACzF,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,IAAI,mBAAmB,GAAG,CAAC,CAAC,CAAC;IAC7B,KAAK,IAAI,CAAC,GAAG,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAClD,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YAC1C,mBAAmB,GAAG,CAAC,CAAC;YACxB,MAAM;QACP,CAAC;IACF,CAAC;IACD,MAAM,aAAa,GAAG,mBAAmB,GAAG,CAAC,CAAC;IAC9C,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC;IAEvC,MAAM,SAAS,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IACrD,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAEvE,MAAM,QAAQ,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAElG,+BAA+B;IAC/B,MAAM,cAAc,GAAG,WAAW,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;IACjE,IAAI,CAAC,cAAc,EAAE,EAAE,EAAE,CAAC;QACzB,OAAO,SAAS,CAAC,CAAC,0BAA0B;IAC7C,CAAC;IACD,MAAM,gBAAgB,GAAG,cAAc,CAAC,EAAE,CAAC;IAE3C,MAAM,UAAU,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,mBAAmB,CAAC;IAEjG,0DAA0D;IAC1D,MAAM,mBAAmB,GAAmB,EAAE,CAAC;IAC/C,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,GAAG,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;QAChD,IAAI,GAAG;YAAE,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACxC,CAAC;IAED,yDAAyD;IACzD,MAAM,kBAAkB,GAAmB,EAAE,CAAC;IAC9C,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC;QAC1B,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,cAAc,EAAE,CAAC,GAAG,QAAQ,CAAC,mBAAmB,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7E,MAAM,GAAG,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,IAAI,GAAG;gBAAE,kBAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACvC,CAAC;IACF,CAAC;IAED,4CAA4C;IAC5C,IAAI,eAAmC,CAAC;IACxC,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,WAAW,CAAC,mBAAmB,CAAoB,CAAC;QAC3E,eAAe,GAAG,cAAc,CAAC,OAAO,CAAC;IAC1C,CAAC;IAED,gEAAgE;IAChE,MAAM,OAAO,GAAG,qBAAqB,CAAC,mBAAmB,EAAE,WAAW,EAAE,mBAAmB,CAAC,CAAC;IAE7F,sDAAsD;IACtD,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC;QAC1B,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE,CAAC;YACtC,yBAAyB,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACzC,CAAC;IACF,CAAC;IAED,OAAO;QACN,gBAAgB;QAChB,mBAAmB;QACnB,kBAAkB;QAClB,WAAW,EAAE,QAAQ,CAAC,WAAW;QACjC,YAAY;QACZ,eAAe;QACf,OAAO;QACP,QAAQ;KACR,CAAC;AAAA,CACF;AAED,+EAA+E;AAC/E,2BAA2B;AAC3B,+EAA+E;AAE/E,MAAM,gCAAgC,GAAG;;;;;;;;;;;;;kEAayB,CAAC;AAEnE;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC5B,WAAkC,EAClC,KAAiB,EACjB,MAAc,EACd,kBAA2B,EAC3B,MAAoB,EACQ;IAC5B,MAAM,EACL,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,WAAW,EACX,YAAY,EACZ,eAAe,EACf,OAAO,EACP,QAAQ,GACR,GAAG,WAAW,CAAC;IAEhB,yEAAyE;IACzE,IAAI,OAAe,CAAC;IAEpB,IAAI,WAAW,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAClD,sCAAsC;QACtC,MAAM,CAAC,aAAa,EAAE,gBAAgB,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;YAC3D,mBAAmB,CAAC,MAAM,GAAG,CAAC;gBAC7B,CAAC,CAAC,eAAe,CACf,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,EAClB,eAAe,CACf;gBACF,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,mBAAmB,CAAC;YACvC,yBAAyB,CAAC,kBAAkB,EAAE,KAAK,EAAE,QAAQ,CAAC,aAAa,EAAE,MAAM,EAAE,MAAM,CAAC;SAC5F,CAAC,CAAC;QACH,4BAA4B;QAC5B,OAAO,GAAG,GAAG,aAAa,gDAAgD,gBAAgB,EAAE,CAAC;IAC9F,CAAC;SAAM,CAAC;QACP,gCAAgC;QAChC,OAAO,GAAG,MAAM,eAAe,CAC9B,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,EAClB,eAAe,CACf,CAAC;IACH,CAAC;IAED,2CAA2C;IAC3C,MAAM,EAAE,SAAS,EAAE,aAAa,EAAE,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC/D,OAAO,IAAI,oBAAoB,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;IAE1D,IAAI,CAAC,gBAAgB,EAAE,CAAC;QACvB,MAAM,IAAI,KAAK,CAAC,2DAA2D,CAAC,CAAC;IAC9E,CAAC;IAED,OAAO;QACN,OAAO;QACP,gBAAgB;QAChB,YAAY;QACZ,OAAO,EAAE,EAAE,SAAS,EAAE,aAAa,EAAuB;KAC1D,CAAC;AAAA,CACF;AAED;;GAEG;AACH,KAAK,UAAU,yBAAyB,CACvC,QAAwB,EACxB,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACF;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC,CAAC,iCAAiC;IAEpF,MAAM,mBAAmB,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IACnD,MAAM,qBAAqB,GAAG;QAC7B,GAAG,mBAAmB;QACtB;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,gCAAgC,EAAE,CAAC;YAC5E,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,KAAK,EAAE,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAAE,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;IAE3G,IAAI,QAAQ,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CAAC,qCAAqC,QAAQ,CAAC,YAAY,IAAI,eAAe,EAAE,CAAC,CAAC;IAClG,CAAC;IAED,OAAO,QAAQ,CAAC,OAAO;SACrB,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { complete, completeSimple } from \"@mariozechner/pi-ai\";\nimport { convertToLlm, createBranchSummaryMessage, createCustomMessage } from \"../messages.js\";\nimport type { CompactionEntry, SessionEntry } from \"../session-manager.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// File Operation Tracking\n// ============================================================================\n\n/** Details stored in CompactionEntry.details for file tracking */\nexport interface CompactionDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\n/**\n * Extract file operations from messages and previous compaction entries.\n */\nfunction extractFileOperations(\n\tmessages: AgentMessage[],\n\tentries: SessionEntry[],\n\tprevCompactionIndex: number,\n): FileOperations {\n\tconst fileOps = createFileOps();\n\n\t// Collect from previous compaction's details (if pi-generated)\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\tif (!prevCompaction.fromHook && prevCompaction.details) {\n\t\t\t// fromHook field kept for session file compatibility\n\t\t\tconst details = prevCompaction.details as CompactionDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\tfor (const f of details.modifiedFiles) fileOps.edited.add(f);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Extract from tool calls in messages\n\tfor (const msg of messages) {\n\t\textractFileOpsFromMessage(msg, fileOps);\n\t}\n\n\treturn fileOps;\n}\n\n// ============================================================================\n// Message Extraction\n// ============================================================================\n\n/**\n * Extract AgentMessage from an entry if it produces one.\n * Returns undefined for entries that don't contribute to LLM context.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tif (entry.type === \"message\") {\n\t\treturn entry.message;\n\t}\n\tif (entry.type === \"custom_message\") {\n\t\treturn createCustomMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\t}\n\tif (entry.type === \"branch_summary\") {\n\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\t}\n\treturn undefined;\n}\n\n/** Result from compact() - SessionManager adds uuid/parentUuid when saving */\nexport interface CompactionResult<T = unknown> {\n\tsummary: string;\n\tfirstKeptEntryId: string;\n\ttokensBefore: number;\n\t/** Extension-specific data (e.g., ArtifactIndex, version markers for structured compaction) */\n\tdetails?: T;\n}\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n * Skips aborted and error messages as they don't have valid usage data.\n */\nfunction getAssistantUsage(msg: AgentMessage): Usage | undefined {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.stopReason !== \"error\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | undefined {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Estimate token count for a message using chars/4 heuristic.\n * This is conservative (overestimates tokens).\n */\nexport function estimateTokens(message: AgentMessage): number {\n\tlet chars = 0;\n\n\tswitch (message.role) {\n\t\tcase \"user\": {\n\t\t\tconst content = (message as { content: string | Array<{ type: string; text?: string }> }).content;\n\t\t\tif (typeof content === \"string\") {\n\t\t\t\tchars = content.length;\n\t\t\t} else if (Array.isArray(content)) {\n\t\t\t\tfor (const block of content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"assistant\": {\n\t\t\tconst assistant = message as AssistantMessage;\n\t\t\tfor (const block of assistant.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tchars += block.text.length;\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tchars += block.thinking.length;\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tchars += block.name.length + JSON.stringify(block.arguments).length;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"custom\":\n\t\tcase \"toolResult\": {\n\t\t\tif (typeof message.content === \"string\") {\n\t\t\t\tchars = message.content.length;\n\t\t\t} else {\n\t\t\t\tfor (const block of message.content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tchars += 4800; // Estimate images as 4000 chars, or 1200 tokens\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"bashExecution\": {\n\t\t\tchars = message.command.length + message.output.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"branchSummary\":\n\t\tcase \"compactionSummary\": {\n\t\t\tchars = message.summary.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t}\n\n\treturn 0;\n}\n\n/**\n * Find valid cut points: indices of user, assistant, custom, or bashExecution messages.\n * Never cut at tool results (they must follow their tool call).\n * When we cut at an assistant message with tool calls, its tool results follow it\n * and will be kept.\n * BashExecutionMessage is treated like a user message (user-initiated context).\n */\nfunction findValidCutPoints(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst cutPoints: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tswitch (entry.type) {\n\t\t\tcase \"message\": {\n\t\t\t\tconst role = entry.message.role;\n\t\t\t\tswitch (role) {\n\t\t\t\t\tcase \"bashExecution\":\n\t\t\t\t\tcase \"custom\":\n\t\t\t\t\tcase \"branchSummary\":\n\t\t\t\t\tcase \"compactionSummary\":\n\t\t\t\t\tcase \"user\":\n\t\t\t\t\tcase \"assistant\":\n\t\t\t\t\t\tcutPoints.push(i);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"toolResult\":\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"thinking_level_change\":\n\t\t\tcase \"model_change\":\n\t\t\tcase \"compaction\":\n\t\t\tcase \"branch_summary\":\n\t\t\tcase \"custom\":\n\t\t\tcase \"custom_message\":\n\t\t\tcase \"label\":\n\t\t}\n\t\t// branch_summary and custom_message are user-role messages, valid cut points\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\tcutPoints.push(i);\n\t\t}\n\t}\n\treturn cutPoints;\n}\n\n/**\n * Find the user message (or bashExecution) that starts the turn containing the given entry index.\n * Returns -1 if no turn start found before the index.\n * BashExecutionMessage is treated like a user message for turn boundaries.\n */\nexport function findTurnStartIndex(entries: SessionEntry[], entryIndex: number, startIndex: number): number {\n\tfor (let i = entryIndex; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\t// branch_summary and custom_message are user-role messages, can start a turn\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\treturn i;\n\t\t}\n\t\tif (entry.type === \"message\") {\n\t\t\tconst role = entry.message.role;\n\t\t\tif (role === \"user\" || role === \"bashExecution\") {\n\t\t\t\treturn i;\n\t\t\t}\n\t\t}\n\t}\n\treturn -1;\n}\n\nexport interface CutPointResult {\n\t/** Index of first entry to keep */\n\tfirstKeptEntryIndex: number;\n\t/** Index of user message that starts the turn being split, or -1 if not splitting */\n\tturnStartIndex: number;\n\t/** Whether this cut splits a turn (cut point is not a user message) */\n\tisSplitTurn: boolean;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n *\n * Algorithm: Walk backwards from newest, accumulating estimated message sizes.\n * Stop when we've accumulated >= keepRecentTokens. Cut at that point.\n *\n * Can cut at user OR assistant messages (never tool results). When cutting at an\n * assistant message with tool calls, its tool results come after and will be kept.\n *\n * Returns CutPointResult with:\n * - firstKeptEntryIndex: the entry index to start keeping from\n * - turnStartIndex: if cutting mid-turn, the user message that started that turn\n * - isSplitTurn: whether we're cutting in the middle of a turn\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): CutPointResult {\n\tconst cutPoints = findValidCutPoints(entries, startIndex, endIndex);\n\n\tif (cutPoints.length === 0) {\n\t\treturn { firstKeptEntryIndex: startIndex, turnStartIndex: -1, isSplitTurn: false };\n\t}\n\n\t// Walk backwards from newest, accumulating estimated message sizes\n\tlet accumulatedTokens = 0;\n\tlet cutIndex = cutPoints[0]; // Default: keep from first message (not header)\n\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type !== \"message\") continue;\n\n\t\t// Estimate this message's size\n\t\tconst messageTokens = estimateTokens(entry.message);\n\t\taccumulatedTokens += messageTokens;\n\n\t\t// Check if we've exceeded the budget\n\t\tif (accumulatedTokens >= keepRecentTokens) {\n\t\t\t// Find the closest valid cut point at or after this entry\n\t\t\tfor (let c = 0; c < cutPoints.length; c++) {\n\t\t\t\tif (cutPoints[c] >= i) {\n\t\t\t\t\tcutIndex = cutPoints[c];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Scan backwards from cutIndex to include any non-message entries (bash, settings, etc.)\n\twhile (cutIndex > startIndex) {\n\t\tconst prevEntry = entries[cutIndex - 1];\n\t\t// Stop at session header or compaction boundaries\n\t\tif (prevEntry.type === \"compaction\") {\n\t\t\tbreak;\n\t\t}\n\t\tif (prevEntry.type === \"message\") {\n\t\t\t// Stop if we hit any message\n\t\t\tbreak;\n\t\t}\n\t\t// Include this non-message entry (bash, settings change, etc.)\n\t\tcutIndex--;\n\t}\n\n\t// Determine if this is a split turn\n\tconst cutEntry = entries[cutIndex];\n\tconst isUserMessage = cutEntry.type === \"message\" && cutEntry.message.role === \"user\";\n\tconst turnStartIndex = isUserMessage ? -1 : findTurnStartIndex(entries, cutIndex, startIndex);\n\n\treturn {\n\t\tfirstKeptEntryIndex: cutIndex,\n\t\tturnStartIndex,\n\t\tisSplitTurn: !isUserMessage && turnStartIndex !== -1,\n\t};\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `The messages above are a conversation to summarize. Create a structured context checkpoint summary that another LLM will use to continue the work.\n\nUse this EXACT format:\n\n## Goal\n[What is the user trying to accomplish? Can be multiple items if the session covers different tasks.]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned by user]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Current work]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [Ordered list of what should happen next]\n\n## Critical Context\n- [Any data, examples, or references needed to continue]\n- [Or \"(none)\" if not applicable]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\nconst UPDATE_SUMMARIZATION_PROMPT = `The messages above are NEW conversation messages to incorporate into the existing summary provided in <previous-summary> tags.\n\nUpdate the existing structured summary with new information. RULES:\n- PRESERVE all existing information from the previous summary\n- ADD new progress, decisions, and context from the new messages\n- UPDATE the Progress section: move items from \"In Progress\" to \"Done\" when completed\n- UPDATE \"Next Steps\" based on what was accomplished\n- PRESERVE exact file paths, function names, and error messages\n- If something is no longer relevant, you may remove it\n\nUse this EXACT format:\n\n## Goal\n[Preserve existing goals, add new ones if the task expanded]\n\n## Constraints & Preferences\n- [Preserve existing, add new ones discovered]\n\n## Progress\n### Done\n- [x] [Include previously done items AND newly completed items]\n\n### In Progress\n- [ ] [Current work - update based on progress]\n\n### Blocked\n- [Current blockers - remove if resolved]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale] (preserve all previous, add new)\n\n## Next Steps\n1. [Update based on current state]\n\n## Critical Context\n- [Preserve important context, add new if needed]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n * If previousSummary is provided, uses the update prompt to merge.\n */\nexport async function generateSummary(\n\tcurrentMessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n\tpreviousSummary?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\t// Use update prompt if we have a previous summary, otherwise initial prompt\n\tlet basePrompt = previousSummary ? UPDATE_SUMMARIZATION_PROMPT : SUMMARIZATION_PROMPT;\n\tif (customInstructions) {\n\t\tbasePrompt = `${basePrompt}\\n\\nAdditional focus: ${customInstructions}`;\n\t}\n\n\t// Serialize conversation to text so model doesn't try to continue it\n\t// Convert to LLM messages first (handles custom types like bashExecution, custom, etc.)\n\tconst llmMessages = convertToLlm(currentMessages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build the prompt with conversation wrapped in tags\n\tlet promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n`;\n\tif (previousSummary) {\n\t\tpromptText += `<previous-summary>\\n${previousSummary}\\n</previous-summary>\\n\\n`;\n\t}\n\tpromptText += basePrompt;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey, reasoning: \"high\" },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Compaction Preparation (for extensions)\n// ============================================================================\n\nexport interface CompactionPreparation {\n\t/** UUID of first entry to keep */\n\tfirstKeptEntryId: string;\n\t/** Messages that will be summarized and discarded */\n\tmessagesToSummarize: AgentMessage[];\n\t/** Messages that will be turned into turn prefix summary (if splitting) */\n\tturnPrefixMessages: AgentMessage[];\n\t/** Whether this is a split turn (cut point in middle of turn) */\n\tisSplitTurn: boolean;\n\ttokensBefore: number;\n\t/** Summary from previous compaction, for iterative update */\n\tpreviousSummary?: string;\n\t/** File operations extracted from messagesToSummarize */\n\tfileOps: FileOperations;\n\t/** Compaction settions from settings.jsonl\t*/\n\tsettings: CompactionSettings;\n}\n\nexport function prepareCompaction(\n\tpathEntries: SessionEntry[],\n\tsettings: CompactionSettings,\n): CompactionPreparation | undefined {\n\tif (pathEntries.length > 0 && pathEntries[pathEntries.length - 1].type === \"compaction\") {\n\t\treturn undefined;\n\t}\n\n\tlet prevCompactionIndex = -1;\n\tfor (let i = pathEntries.length - 1; i >= 0; i--) {\n\t\tif (pathEntries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = pathEntries.length;\n\n\tconst lastUsage = getLastAssistantUsage(pathEntries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\tconst cutPoint = findCutPoint(pathEntries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Get UUID of first kept entry\n\tconst firstKeptEntry = pathEntries[cutPoint.firstKeptEntryIndex];\n\tif (!firstKeptEntry?.id) {\n\t\treturn undefined; // Session needs migration\n\t}\n\tconst firstKeptEntryId = firstKeptEntry.id;\n\n\tconst historyEnd = cutPoint.isSplitTurn ? cutPoint.turnStartIndex : cutPoint.firstKeptEntryIndex;\n\n\t// Messages to summarize (will be discarded after summary)\n\tconst messagesToSummarize: AgentMessage[] = [];\n\tfor (let i = boundaryStart; i < historyEnd; i++) {\n\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\tif (msg) messagesToSummarize.push(msg);\n\t}\n\n\t// Messages for turn prefix summary (if splitting a turn)\n\tconst turnPrefixMessages: AgentMessage[] = [];\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (let i = cutPoint.turnStartIndex; i < cutPoint.firstKeptEntryIndex; i++) {\n\t\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\t\tif (msg) turnPrefixMessages.push(msg);\n\t\t}\n\t}\n\n\t// Get previous summary for iterative update\n\tlet previousSummary: string | undefined;\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;\n\t\tpreviousSummary = prevCompaction.summary;\n\t}\n\n\t// Extract file operations from messages and previous compaction\n\tconst fileOps = extractFileOperations(messagesToSummarize, pathEntries, prevCompactionIndex);\n\n\t// Also extract file ops from turn prefix if splitting\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (const msg of turnPrefixMessages) {\n\t\t\textractFileOpsFromMessage(msg, fileOps);\n\t\t}\n\t}\n\n\treturn {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn: cutPoint.isSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t};\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\nconst TURN_PREFIX_SUMMARIZATION_PROMPT = `This is the PREFIX of a turn that was too large to keep. The SUFFIX (recent work) is retained.\n\nSummarize the prefix to provide context for the retained suffix:\n\n## Original Request\n[What did the user ask for in this turn?]\n\n## Early Progress\n- [Key decisions and work done in the prefix]\n\n## Context for Suffix\n- [Information needed to understand the retained recent work]\n\nBe concise. Focus on what's needed to understand the kept suffix.`;\n\n/**\n * Generate summaries for compaction using prepared data.\n * Returns CompactionResult - SessionManager adds uuid/parentUuid when saving.\n *\n * @param preparation - Pre-calculated preparation from prepareCompaction()\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tpreparation: CompactionPreparation,\n\tmodel: Model<any>,\n\tapiKey: string,\n\tcustomInstructions?: string,\n\tsignal?: AbortSignal,\n): Promise<CompactionResult> {\n\tconst {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t} = preparation;\n\n\t// Generate summaries (can be parallel if both needed) and merge into one\n\tlet summary: string;\n\n\tif (isSplitTurn && turnPrefixMessages.length > 0) {\n\t\t// Generate both summaries in parallel\n\t\tconst [historyResult, turnPrefixResult] = await Promise.all([\n\t\t\tmessagesToSummarize.length > 0\n\t\t\t\t? generateSummary(\n\t\t\t\t\t\tmessagesToSummarize,\n\t\t\t\t\t\tmodel,\n\t\t\t\t\t\tsettings.reserveTokens,\n\t\t\t\t\t\tapiKey,\n\t\t\t\t\t\tsignal,\n\t\t\t\t\t\tcustomInstructions,\n\t\t\t\t\t\tpreviousSummary,\n\t\t\t\t\t)\n\t\t\t\t: Promise.resolve(\"No prior history.\"),\n\t\t\tgenerateTurnPrefixSummary(turnPrefixMessages, model, settings.reserveTokens, apiKey, signal),\n\t\t]);\n\t\t// Merge into single summary\n\t\tsummary = `${historyResult}\\n\\n---\\n\\n**Turn Context (split turn):**\\n\\n${turnPrefixResult}`;\n\t} else {\n\t\t// Just generate history summary\n\t\tsummary = await generateSummary(\n\t\t\tmessagesToSummarize,\n\t\t\tmodel,\n\t\t\tsettings.reserveTokens,\n\t\t\tapiKey,\n\t\t\tsignal,\n\t\t\tcustomInstructions,\n\t\t\tpreviousSummary,\n\t\t);\n\t}\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\tif (!firstKeptEntryId) {\n\t\tthrow new Error(\"First kept entry has no UUID - session may need migration\");\n\t}\n\n\treturn {\n\t\tsummary,\n\t\tfirstKeptEntryId,\n\t\ttokensBefore,\n\t\tdetails: { readFiles, modifiedFiles } as CompactionDetails,\n\t};\n}\n\n/**\n * Generate a summary for a turn prefix (when splitting a turn).\n */\nasync function generateTurnPrefixSummary(\n\tmessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.5 * reserveTokens); // Smaller budget for turn prefix\n\n\tconst transformedMessages = convertToLlm(messages);\n\tconst summarizationMessages = [\n\t\t...transformedMessages,\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: TURN_PREFIX_SUMMARIZATION_PROMPT }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await complete(model, { messages: summarizationMessages }, { maxTokens, signal, apiKey });\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Turn prefix summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\treturn response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n}\n"]}
1
+ {"version":3,"file":"compaction.js","sourceRoot":"","sources":["../../../src/core/compaction/compaction.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,YAAY,EAAE,0BAA0B,EAAE,mBAAmB,EAAE,MAAM,gBAAgB,CAAC;AAE/F,OAAO,EACN,gBAAgB,EAChB,aAAa,EACb,yBAAyB,EAEzB,oBAAoB,EACpB,2BAA2B,EAC3B,qBAAqB,GACrB,MAAM,YAAY,CAAC;AAYpB;;GAEG;AACH,SAAS,qBAAqB,CAC7B,QAAwB,EACxB,OAAuB,EACvB,mBAA2B,EACV;IACjB,MAAM,OAAO,GAAG,aAAa,EAAE,CAAC;IAEhC,+DAA+D;IAC/D,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,OAAO,CAAC,mBAAmB,CAAoB,CAAC;QACvE,IAAI,CAAC,cAAc,CAAC,QAAQ,IAAI,cAAc,CAAC,OAAO,EAAE,CAAC;YACxD,qDAAqD;YACrD,MAAM,OAAO,GAAG,cAAc,CAAC,OAA4B,CAAC;YAC5D,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC;gBACtC,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,SAAS;oBAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACxD,CAAC;YACD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE,CAAC;gBAC1C,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,aAAa;oBAAE,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YAC9D,CAAC;QACF,CAAC;IACF,CAAC;IAED,sCAAsC;IACtC,KAAK,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAC5B,yBAAyB,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IACzC,CAAC;IAED,OAAO,OAAO,CAAC;AAAA,CACf;AAED,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAmB,EAA4B;IAC3E,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;QAC9B,OAAO,KAAK,CAAC,OAAO,CAAC;IACtB,CAAC;IACD,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;QACrC,OAAO,mBAAmB,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;IAC5G,CAAC;IACD,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;QACrC,OAAO,0BAA0B,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;IACjF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAqBD,MAAM,CAAC,MAAM,2BAA2B,GAAuB;IAC9D,OAAO,EAAE,IAAI;IACb,aAAa,EAAE,KAAK;IACpB,gBAAgB,EAAE,KAAK;CACvB,CAAC;AAEF,+EAA+E;AAC/E,oBAAoB;AACpB,+EAA+E;AAE/E;;;GAGG;AACH,MAAM,UAAU,sBAAsB,CAAC,KAAY,EAAU;IAC5D,OAAO,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,SAAS,GAAG,KAAK,CAAC,UAAU,CAAC;AAAA,CAC5F;AAED;;;GAGG;AACH,SAAS,iBAAiB,CAAC,GAAiB,EAAqB;IAChE,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,IAAI,OAAO,IAAI,GAAG,EAAE,CAAC;QAChD,MAAM,YAAY,GAAG,GAAuB,CAAC;QAC7C,IAAI,YAAY,CAAC,UAAU,KAAK,SAAS,IAAI,YAAY,CAAC,UAAU,KAAK,OAAO,IAAI,YAAY,CAAC,KAAK,EAAE,CAAC;YACxG,OAAO,YAAY,CAAC,KAAK,CAAC;QAC3B,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;GAEG;AACH,MAAM,UAAU,qBAAqB,CAAC,OAAuB,EAAqB;IACjF,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAC/C,IAAI,KAAK;gBAAE,OAAO,KAAK,CAAC;QACzB,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;GAEG;AACH,MAAM,UAAU,aAAa,CAAC,aAAqB,EAAE,aAAqB,EAAE,QAA4B,EAAW;IAClH,IAAI,CAAC,QAAQ,CAAC,OAAO;QAAE,OAAO,KAAK,CAAC;IACpC,OAAO,aAAa,GAAG,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC;AAAA,CAC9D;AAED,+EAA+E;AAC/E,sBAAsB;AACtB,+EAA+E;AAE/E;;;GAGG;AACH,MAAM,UAAU,cAAc,CAAC,OAAqB,EAAU;IAC7D,IAAI,KAAK,GAAG,CAAC,CAAC;IAEd,QAAQ,OAAO,CAAC,IAAI,EAAE,CAAC;QACtB,KAAK,MAAM,EAAE,CAAC;YACb,MAAM,OAAO,GAAI,OAAwE,CAAC,OAAO,CAAC;YAClG,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACjC,KAAK,GAAG,OAAO,CAAC,MAAM,CAAC;YACxB,CAAC;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;gBACnC,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;oBAC7B,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;wBACzC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;oBAC5B,CAAC;gBACF,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,WAAW,EAAE,CAAC;YAClB,MAAM,SAAS,GAAG,OAA2B,CAAC;YAC9C,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,OAAO,EAAE,CAAC;gBACvC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,EAAE,CAAC;oBAC3B,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;gBAC5B,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,KAAK,IAAI,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAChC,CAAC;qBAAM,IAAI,KAAK,CAAC,IAAI,KAAK,UAAU,EAAE,CAAC;oBACtC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC;gBACrE,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,QAAQ,CAAC;QACd,KAAK,YAAY,EAAE,CAAC;YACnB,IAAI,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,EAAE,CAAC;gBACzC,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACP,KAAK,MAAM,KAAK,IAAI,OAAO,CAAC,OAAO,EAAE,CAAC;oBACrC,IAAI,KAAK,CAAC,IAAI,KAAK,MAAM,IAAI,KAAK,CAAC,IAAI,EAAE,CAAC;wBACzC,KAAK,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC;oBAC5B,CAAC;oBACD,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;wBAC5B,KAAK,IAAI,IAAI,CAAC,CAAC,gDAAgD;oBAChE,CAAC;gBACF,CAAC;YACF,CAAC;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,eAAe,EAAE,CAAC;YACtB,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC;YACvD,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;QACD,KAAK,eAAe,CAAC;QACrB,KAAK,mBAAmB,EAAE,CAAC;YAC1B,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC;YAC/B,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC7B,CAAC;IACF,CAAC;IAED,OAAO,CAAC,CAAC;AAAA,CACT;AAED;;;;;;GAMG;AACH,SAAS,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,QAAgB,EAAY;IACpG,MAAM,SAAS,GAAa,EAAE,CAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,GAAG,QAAQ,EAAE,CAAC,EAAE,EAAE,CAAC;QAC5C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;YACpB,KAAK,SAAS,EAAE,CAAC;gBAChB,MAAM,IAAI,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC;gBAChC,QAAQ,IAAI,EAAE,CAAC;oBACd,KAAK,eAAe,CAAC;oBACrB,KAAK,QAAQ,CAAC;oBACd,KAAK,eAAe,CAAC;oBACrB,KAAK,mBAAmB,CAAC;oBACzB,KAAK,MAAM,CAAC;oBACZ,KAAK,WAAW;wBACf,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;wBAClB,MAAM;oBACP,KAAK,YAAY;wBAChB,MAAM;gBACR,CAAC;gBACD,MAAM;YACP,CAAC;YACD,KAAK,uBAAuB,CAAC;YAC7B,KAAK,cAAc,CAAC;YACpB,KAAK,YAAY,CAAC;YAClB,KAAK,gBAAgB,CAAC;YACtB,KAAK,QAAQ,CAAC;YACd,KAAK,gBAAgB,CAAC;YACtB,KAAK,OAAO,CAAC;QACd,CAAC;QACD,6EAA6E;QAC7E,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;YACxE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QACnB,CAAC;IACF,CAAC;IACD,OAAO,SAAS,CAAC;AAAA,CACjB;AAED;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAuB,EAAE,UAAkB,EAAE,UAAkB,EAAU;IAC3G,KAAK,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QAC/C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,6EAA6E;QAC7E,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;YACxE,OAAO,CAAC,CAAC;QACV,CAAC;QACD,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,MAAM,IAAI,GAAG,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC;YAChC,IAAI,IAAI,KAAK,MAAM,IAAI,IAAI,KAAK,eAAe,EAAE,CAAC;gBACjD,OAAO,CAAC,CAAC;YACV,CAAC;QACF,CAAC;IACF,CAAC;IACD,OAAO,CAAC,CAAC,CAAC;AAAA,CACV;AAWD;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,YAAY,CAC3B,OAAuB,EACvB,UAAkB,EAClB,QAAgB,EAChB,gBAAwB,EACP;IACjB,MAAM,SAAS,GAAG,kBAAkB,CAAC,OAAO,EAAE,UAAU,EAAE,QAAQ,CAAC,CAAC;IAEpE,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC5B,OAAO,EAAE,mBAAmB,EAAE,UAAU,EAAE,cAAc,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,KAAK,EAAE,CAAC;IACpF,CAAC;IAED,mEAAmE;IACnE,IAAI,iBAAiB,GAAG,CAAC,CAAC;IAC1B,IAAI,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,gDAAgD;IAE7E,KAAK,IAAI,CAAC,GAAG,QAAQ,GAAG,CAAC,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,KAAK,CAAC,IAAI,KAAK,SAAS;YAAE,SAAS;QAEvC,+BAA+B;QAC/B,MAAM,aAAa,GAAG,cAAc,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;QACpD,iBAAiB,IAAI,aAAa,CAAC;QAEnC,qCAAqC;QACrC,IAAI,iBAAiB,IAAI,gBAAgB,EAAE,CAAC;YAC3C,0DAA0D;YAC1D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;gBAC3C,IAAI,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC;oBACvB,QAAQ,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;oBACxB,MAAM;gBACP,CAAC;YACF,CAAC;YACD,MAAM;QACP,CAAC;IACF,CAAC;IAED,yFAAyF;IACzF,OAAO,QAAQ,GAAG,UAAU,EAAE,CAAC;QAC9B,MAAM,SAAS,GAAG,OAAO,CAAC,QAAQ,GAAG,CAAC,CAAC,CAAC;QACxC,kDAAkD;QAClD,IAAI,SAAS,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YACrC,MAAM;QACP,CAAC;QACD,IAAI,SAAS,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;YAClC,6BAA6B;YAC7B,MAAM;QACP,CAAC;QACD,+DAA+D;QAC/D,QAAQ,EAAE,CAAC;IACZ,CAAC;IAED,oCAAoC;IACpC,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,MAAM,aAAa,GAAG,QAAQ,CAAC,IAAI,KAAK,SAAS,IAAI,QAAQ,CAAC,OAAO,CAAC,IAAI,KAAK,MAAM,CAAC;IACtF,MAAM,cAAc,GAAG,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,kBAAkB,CAAC,OAAO,EAAE,QAAQ,EAAE,UAAU,CAAC,CAAC;IAE9F,OAAO;QACN,mBAAmB,EAAE,QAAQ;QAC7B,cAAc;QACd,WAAW,EAAE,CAAC,aAAa,IAAI,cAAc,KAAK,CAAC,CAAC;KACpD,CAAC;AAAA,CACF;AAED,+EAA+E;AAC/E,gBAAgB;AAChB,+EAA+E;AAE/E,MAAM,oBAAoB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0FA+B6D,CAAC;AAE3F,MAAM,2BAA2B,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;0FAqCsD,CAAC;AAE3F;;;GAGG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACpC,eAA+B,EAC/B,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACpB,kBAA2B,EAC3B,eAAwB,EACN;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC;IAElD,4EAA4E;IAC5E,IAAI,UAAU,GAAG,eAAe,CAAC,CAAC,CAAC,2BAA2B,CAAC,CAAC,CAAC,oBAAoB,CAAC;IACtF,IAAI,kBAAkB,EAAE,CAAC;QACxB,UAAU,GAAG,GAAG,UAAU,yBAAyB,kBAAkB,EAAE,CAAC;IACzE,CAAC;IAED,qEAAqE;IACrE,wFAAwF;IACxF,MAAM,WAAW,GAAG,YAAY,CAAC,eAAe,CAAC,CAAC;IAClD,MAAM,gBAAgB,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IAE5D,qDAAqD;IACrD,IAAI,UAAU,GAAG,mBAAmB,gBAAgB,uBAAuB,CAAC;IAC5E,IAAI,eAAe,EAAE,CAAC;QACrB,UAAU,IAAI,uBAAuB,eAAe,2BAA2B,CAAC;IACjF,CAAC;IACD,UAAU,IAAI,UAAU,CAAC;IAEzB,MAAM,qBAAqB,GAAG;QAC7B;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;YACtD,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,cAAc,CACpC,KAAK,EACL,EAAE,YAAY,EAAE,2BAA2B,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAC9E,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,CAChD,CAAC;IAEF,IAAI,QAAQ,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CAAC,yBAAyB,QAAQ,CAAC,YAAY,IAAI,eAAe,EAAE,CAAC,CAAC;IACtF,CAAC;IAED,MAAM,WAAW,GAAG,QAAQ,CAAC,OAAO;SAClC,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,OAAO,WAAW,CAAC;AAAA,CACnB;AAwBD,MAAM,UAAU,iBAAiB,CAChC,WAA2B,EAC3B,QAA4B,EACQ;IACpC,IAAI,WAAW,CAAC,MAAM,GAAG,CAAC,IAAI,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;QACzF,OAAO,SAAS,CAAC;IAClB,CAAC;IAED,IAAI,mBAAmB,GAAG,CAAC,CAAC,CAAC;IAC7B,KAAK,IAAI,CAAC,GAAG,WAAW,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAClD,IAAI,WAAW,CAAC,CAAC,CAAC,CAAC,IAAI,KAAK,YAAY,EAAE,CAAC;YAC1C,mBAAmB,GAAG,CAAC,CAAC;YACxB,MAAM;QACP,CAAC;IACF,CAAC;IACD,MAAM,aAAa,GAAG,mBAAmB,GAAG,CAAC,CAAC;IAC9C,MAAM,WAAW,GAAG,WAAW,CAAC,MAAM,CAAC;IAEvC,MAAM,SAAS,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IACrD,MAAM,YAAY,GAAG,SAAS,CAAC,CAAC,CAAC,sBAAsB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAEvE,MAAM,QAAQ,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,WAAW,EAAE,QAAQ,CAAC,gBAAgB,CAAC,CAAC;IAElG,+BAA+B;IAC/B,MAAM,cAAc,GAAG,WAAW,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CAAC;IACjE,IAAI,CAAC,cAAc,EAAE,EAAE,EAAE,CAAC;QACzB,OAAO,SAAS,CAAC,CAAC,0BAA0B;IAC7C,CAAC;IACD,MAAM,gBAAgB,GAAG,cAAc,CAAC,EAAE,CAAC;IAE3C,MAAM,UAAU,GAAG,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,mBAAmB,CAAC;IAEjG,0DAA0D;IAC1D,MAAM,mBAAmB,GAAmB,EAAE,CAAC;IAC/C,KAAK,IAAI,CAAC,GAAG,aAAa,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,MAAM,GAAG,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;QAChD,IAAI,GAAG;YAAE,mBAAmB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACxC,CAAC;IAED,yDAAyD;IACzD,MAAM,kBAAkB,GAAmB,EAAE,CAAC;IAC9C,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC;QAC1B,KAAK,IAAI,CAAC,GAAG,QAAQ,CAAC,cAAc,EAAE,CAAC,GAAG,QAAQ,CAAC,mBAAmB,EAAE,CAAC,EAAE,EAAE,CAAC;YAC7E,MAAM,GAAG,GAAG,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,IAAI,GAAG;gBAAE,kBAAkB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QACvC,CAAC;IACF,CAAC;IAED,4CAA4C;IAC5C,IAAI,eAAmC,CAAC;IACxC,IAAI,mBAAmB,IAAI,CAAC,EAAE,CAAC;QAC9B,MAAM,cAAc,GAAG,WAAW,CAAC,mBAAmB,CAAoB,CAAC;QAC3E,eAAe,GAAG,cAAc,CAAC,OAAO,CAAC;IAC1C,CAAC;IAED,gEAAgE;IAChE,MAAM,OAAO,GAAG,qBAAqB,CAAC,mBAAmB,EAAE,WAAW,EAAE,mBAAmB,CAAC,CAAC;IAE7F,sDAAsD;IACtD,IAAI,QAAQ,CAAC,WAAW,EAAE,CAAC;QAC1B,KAAK,MAAM,GAAG,IAAI,kBAAkB,EAAE,CAAC;YACtC,yBAAyB,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACzC,CAAC;IACF,CAAC;IAED,OAAO;QACN,gBAAgB;QAChB,mBAAmB;QACnB,kBAAkB;QAClB,WAAW,EAAE,QAAQ,CAAC,WAAW;QACjC,YAAY;QACZ,eAAe;QACf,OAAO;QACP,QAAQ;KACR,CAAC;AAAA,CACF;AAED,+EAA+E;AAC/E,2BAA2B;AAC3B,+EAA+E;AAE/E,MAAM,gCAAgC,GAAG;;;;;;;;;;;;;kEAayB,CAAC;AAEnE;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,OAAO,CAC5B,WAAkC,EAClC,KAAiB,EACjB,MAAc,EACd,kBAA2B,EAC3B,MAAoB,EACQ;IAC5B,MAAM,EACL,gBAAgB,EAChB,mBAAmB,EACnB,kBAAkB,EAClB,WAAW,EACX,YAAY,EACZ,eAAe,EACf,OAAO,EACP,QAAQ,GACR,GAAG,WAAW,CAAC;IAEhB,yEAAyE;IACzE,IAAI,OAAe,CAAC;IAEpB,IAAI,WAAW,IAAI,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAClD,sCAAsC;QACtC,MAAM,CAAC,aAAa,EAAE,gBAAgB,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;YAC3D,mBAAmB,CAAC,MAAM,GAAG,CAAC;gBAC7B,CAAC,CAAC,eAAe,CACf,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,EAClB,eAAe,CACf;gBACF,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,mBAAmB,CAAC;YACvC,yBAAyB,CAAC,kBAAkB,EAAE,KAAK,EAAE,QAAQ,CAAC,aAAa,EAAE,MAAM,EAAE,MAAM,CAAC;SAC5F,CAAC,CAAC;QACH,4BAA4B;QAC5B,OAAO,GAAG,GAAG,aAAa,gDAAgD,gBAAgB,EAAE,CAAC;IAC9F,CAAC;SAAM,CAAC;QACP,gCAAgC;QAChC,OAAO,GAAG,MAAM,eAAe,CAC9B,mBAAmB,EACnB,KAAK,EACL,QAAQ,CAAC,aAAa,EACtB,MAAM,EACN,MAAM,EACN,kBAAkB,EAClB,eAAe,CACf,CAAC;IACH,CAAC;IAED,2CAA2C;IAC3C,MAAM,EAAE,SAAS,EAAE,aAAa,EAAE,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC/D,OAAO,IAAI,oBAAoB,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;IAE1D,IAAI,CAAC,gBAAgB,EAAE,CAAC;QACvB,MAAM,IAAI,KAAK,CAAC,2DAA2D,CAAC,CAAC;IAC9E,CAAC;IAED,OAAO;QACN,OAAO;QACP,gBAAgB;QAChB,YAAY;QACZ,OAAO,EAAE,EAAE,SAAS,EAAE,aAAa,EAAuB;KAC1D,CAAC;AAAA,CACF;AAED;;GAEG;AACH,KAAK,UAAU,yBAAyB,CACvC,QAAwB,EACxB,KAAiB,EACjB,aAAqB,EACrB,MAAc,EACd,MAAoB,EACF;IAClB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,GAAG,aAAa,CAAC,CAAC,CAAC,iCAAiC;IACpF,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC3C,MAAM,gBAAgB,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IAC5D,MAAM,UAAU,GAAG,mBAAmB,gBAAgB,wBAAwB,gCAAgC,EAAE,CAAC;IACjH,MAAM,qBAAqB,GAAG;QAC7B;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;YACtD,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,cAAc,CACpC,KAAK,EACL,EAAE,YAAY,EAAE,2BAA2B,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAC9E,EAAE,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,CAC7B,CAAC;IAEF,IAAI,QAAQ,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;QACrC,MAAM,IAAI,KAAK,CAAC,qCAAqC,QAAQ,CAAC,YAAY,IAAI,eAAe,EAAE,CAAC,CAAC;IAClG,CAAC;IAED,OAAO,QAAQ,CAAC,OAAO;SACrB,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;AAAA,CACb","sourcesContent":["/**\n * Context compaction for long sessions.\n *\n * Pure functions for compaction logic. The session manager handles I/O,\n * and after compaction the session is reloaded.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { AssistantMessage, Model, Usage } from \"@mariozechner/pi-ai\";\nimport { completeSimple } from \"@mariozechner/pi-ai\";\nimport { convertToLlm, createBranchSummaryMessage, createCustomMessage } from \"../messages.js\";\nimport type { CompactionEntry, SessionEntry } from \"../session-manager.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// File Operation Tracking\n// ============================================================================\n\n/** Details stored in CompactionEntry.details for file tracking */\nexport interface CompactionDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\n/**\n * Extract file operations from messages and previous compaction entries.\n */\nfunction extractFileOperations(\n\tmessages: AgentMessage[],\n\tentries: SessionEntry[],\n\tprevCompactionIndex: number,\n): FileOperations {\n\tconst fileOps = createFileOps();\n\n\t// Collect from previous compaction's details (if pi-generated)\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = entries[prevCompactionIndex] as CompactionEntry;\n\t\tif (!prevCompaction.fromHook && prevCompaction.details) {\n\t\t\t// fromHook field kept for session file compatibility\n\t\t\tconst details = prevCompaction.details as CompactionDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\tfor (const f of details.modifiedFiles) fileOps.edited.add(f);\n\t\t\t}\n\t\t}\n\t}\n\n\t// Extract from tool calls in messages\n\tfor (const msg of messages) {\n\t\textractFileOpsFromMessage(msg, fileOps);\n\t}\n\n\treturn fileOps;\n}\n\n// ============================================================================\n// Message Extraction\n// ============================================================================\n\n/**\n * Extract AgentMessage from an entry if it produces one.\n * Returns undefined for entries that don't contribute to LLM context.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tif (entry.type === \"message\") {\n\t\treturn entry.message;\n\t}\n\tif (entry.type === \"custom_message\") {\n\t\treturn createCustomMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\t}\n\tif (entry.type === \"branch_summary\") {\n\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\t}\n\treturn undefined;\n}\n\n/** Result from compact() - SessionManager adds uuid/parentUuid when saving */\nexport interface CompactionResult<T = unknown> {\n\tsummary: string;\n\tfirstKeptEntryId: string;\n\ttokensBefore: number;\n\t/** Extension-specific data (e.g., ArtifactIndex, version markers for structured compaction) */\n\tdetails?: T;\n}\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface CompactionSettings {\n\tenabled: boolean;\n\treserveTokens: number;\n\tkeepRecentTokens: number;\n}\n\nexport const DEFAULT_COMPACTION_SETTINGS: CompactionSettings = {\n\tenabled: true,\n\treserveTokens: 16384,\n\tkeepRecentTokens: 20000,\n};\n\n// ============================================================================\n// Token calculation\n// ============================================================================\n\n/**\n * Calculate total context tokens from usage.\n * Uses the native totalTokens field when available, falls back to computing from components.\n */\nexport function calculateContextTokens(usage: Usage): number {\n\treturn usage.totalTokens || usage.input + usage.output + usage.cacheRead + usage.cacheWrite;\n}\n\n/**\n * Get usage from an assistant message if available.\n * Skips aborted and error messages as they don't have valid usage data.\n */\nfunction getAssistantUsage(msg: AgentMessage): Usage | undefined {\n\tif (msg.role === \"assistant\" && \"usage\" in msg) {\n\t\tconst assistantMsg = msg as AssistantMessage;\n\t\tif (assistantMsg.stopReason !== \"aborted\" && assistantMsg.stopReason !== \"error\" && assistantMsg.usage) {\n\t\t\treturn assistantMsg.usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Find the last non-aborted assistant message usage from session entries.\n */\nexport function getLastAssistantUsage(entries: SessionEntry[]): Usage | undefined {\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type === \"message\") {\n\t\t\tconst usage = getAssistantUsage(entry.message);\n\t\t\tif (usage) return usage;\n\t\t}\n\t}\n\treturn undefined;\n}\n\n/**\n * Check if compaction should trigger based on context usage.\n */\nexport function shouldCompact(contextTokens: number, contextWindow: number, settings: CompactionSettings): boolean {\n\tif (!settings.enabled) return false;\n\treturn contextTokens > contextWindow - settings.reserveTokens;\n}\n\n// ============================================================================\n// Cut point detection\n// ============================================================================\n\n/**\n * Estimate token count for a message using chars/4 heuristic.\n * This is conservative (overestimates tokens).\n */\nexport function estimateTokens(message: AgentMessage): number {\n\tlet chars = 0;\n\n\tswitch (message.role) {\n\t\tcase \"user\": {\n\t\t\tconst content = (message as { content: string | Array<{ type: string; text?: string }> }).content;\n\t\t\tif (typeof content === \"string\") {\n\t\t\t\tchars = content.length;\n\t\t\t} else if (Array.isArray(content)) {\n\t\t\t\tfor (const block of content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"assistant\": {\n\t\t\tconst assistant = message as AssistantMessage;\n\t\t\tfor (const block of assistant.content) {\n\t\t\t\tif (block.type === \"text\") {\n\t\t\t\t\tchars += block.text.length;\n\t\t\t\t} else if (block.type === \"thinking\") {\n\t\t\t\t\tchars += block.thinking.length;\n\t\t\t\t} else if (block.type === \"toolCall\") {\n\t\t\t\t\tchars += block.name.length + JSON.stringify(block.arguments).length;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"custom\":\n\t\tcase \"toolResult\": {\n\t\t\tif (typeof message.content === \"string\") {\n\t\t\t\tchars = message.content.length;\n\t\t\t} else {\n\t\t\t\tfor (const block of message.content) {\n\t\t\t\t\tif (block.type === \"text\" && block.text) {\n\t\t\t\t\t\tchars += block.text.length;\n\t\t\t\t\t}\n\t\t\t\t\tif (block.type === \"image\") {\n\t\t\t\t\t\tchars += 4800; // Estimate images as 4000 chars, or 1200 tokens\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"bashExecution\": {\n\t\t\tchars = message.command.length + message.output.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t\tcase \"branchSummary\":\n\t\tcase \"compactionSummary\": {\n\t\t\tchars = message.summary.length;\n\t\t\treturn Math.ceil(chars / 4);\n\t\t}\n\t}\n\n\treturn 0;\n}\n\n/**\n * Find valid cut points: indices of user, assistant, custom, or bashExecution messages.\n * Never cut at tool results (they must follow their tool call).\n * When we cut at an assistant message with tool calls, its tool results follow it\n * and will be kept.\n * BashExecutionMessage is treated like a user message (user-initiated context).\n */\nfunction findValidCutPoints(entries: SessionEntry[], startIndex: number, endIndex: number): number[] {\n\tconst cutPoints: number[] = [];\n\tfor (let i = startIndex; i < endIndex; i++) {\n\t\tconst entry = entries[i];\n\t\tswitch (entry.type) {\n\t\t\tcase \"message\": {\n\t\t\t\tconst role = entry.message.role;\n\t\t\t\tswitch (role) {\n\t\t\t\t\tcase \"bashExecution\":\n\t\t\t\t\tcase \"custom\":\n\t\t\t\t\tcase \"branchSummary\":\n\t\t\t\t\tcase \"compactionSummary\":\n\t\t\t\t\tcase \"user\":\n\t\t\t\t\tcase \"assistant\":\n\t\t\t\t\t\tcutPoints.push(i);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"toolResult\":\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase \"thinking_level_change\":\n\t\t\tcase \"model_change\":\n\t\t\tcase \"compaction\":\n\t\t\tcase \"branch_summary\":\n\t\t\tcase \"custom\":\n\t\t\tcase \"custom_message\":\n\t\t\tcase \"label\":\n\t\t}\n\t\t// branch_summary and custom_message are user-role messages, valid cut points\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\tcutPoints.push(i);\n\t\t}\n\t}\n\treturn cutPoints;\n}\n\n/**\n * Find the user message (or bashExecution) that starts the turn containing the given entry index.\n * Returns -1 if no turn start found before the index.\n * BashExecutionMessage is treated like a user message for turn boundaries.\n */\nexport function findTurnStartIndex(entries: SessionEntry[], entryIndex: number, startIndex: number): number {\n\tfor (let i = entryIndex; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\t// branch_summary and custom_message are user-role messages, can start a turn\n\t\tif (entry.type === \"branch_summary\" || entry.type === \"custom_message\") {\n\t\t\treturn i;\n\t\t}\n\t\tif (entry.type === \"message\") {\n\t\t\tconst role = entry.message.role;\n\t\t\tif (role === \"user\" || role === \"bashExecution\") {\n\t\t\t\treturn i;\n\t\t\t}\n\t\t}\n\t}\n\treturn -1;\n}\n\nexport interface CutPointResult {\n\t/** Index of first entry to keep */\n\tfirstKeptEntryIndex: number;\n\t/** Index of user message that starts the turn being split, or -1 if not splitting */\n\tturnStartIndex: number;\n\t/** Whether this cut splits a turn (cut point is not a user message) */\n\tisSplitTurn: boolean;\n}\n\n/**\n * Find the cut point in session entries that keeps approximately `keepRecentTokens`.\n *\n * Algorithm: Walk backwards from newest, accumulating estimated message sizes.\n * Stop when we've accumulated >= keepRecentTokens. Cut at that point.\n *\n * Can cut at user OR assistant messages (never tool results). When cutting at an\n * assistant message with tool calls, its tool results come after and will be kept.\n *\n * Returns CutPointResult with:\n * - firstKeptEntryIndex: the entry index to start keeping from\n * - turnStartIndex: if cutting mid-turn, the user message that started that turn\n * - isSplitTurn: whether we're cutting in the middle of a turn\n *\n * Only considers entries between `startIndex` and `endIndex` (exclusive).\n */\nexport function findCutPoint(\n\tentries: SessionEntry[],\n\tstartIndex: number,\n\tendIndex: number,\n\tkeepRecentTokens: number,\n): CutPointResult {\n\tconst cutPoints = findValidCutPoints(entries, startIndex, endIndex);\n\n\tif (cutPoints.length === 0) {\n\t\treturn { firstKeptEntryIndex: startIndex, turnStartIndex: -1, isSplitTurn: false };\n\t}\n\n\t// Walk backwards from newest, accumulating estimated message sizes\n\tlet accumulatedTokens = 0;\n\tlet cutIndex = cutPoints[0]; // Default: keep from first message (not header)\n\n\tfor (let i = endIndex - 1; i >= startIndex; i--) {\n\t\tconst entry = entries[i];\n\t\tif (entry.type !== \"message\") continue;\n\n\t\t// Estimate this message's size\n\t\tconst messageTokens = estimateTokens(entry.message);\n\t\taccumulatedTokens += messageTokens;\n\n\t\t// Check if we've exceeded the budget\n\t\tif (accumulatedTokens >= keepRecentTokens) {\n\t\t\t// Find the closest valid cut point at or after this entry\n\t\t\tfor (let c = 0; c < cutPoints.length; c++) {\n\t\t\t\tif (cutPoints[c] >= i) {\n\t\t\t\t\tcutIndex = cutPoints[c];\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Scan backwards from cutIndex to include any non-message entries (bash, settings, etc.)\n\twhile (cutIndex > startIndex) {\n\t\tconst prevEntry = entries[cutIndex - 1];\n\t\t// Stop at session header or compaction boundaries\n\t\tif (prevEntry.type === \"compaction\") {\n\t\t\tbreak;\n\t\t}\n\t\tif (prevEntry.type === \"message\") {\n\t\t\t// Stop if we hit any message\n\t\t\tbreak;\n\t\t}\n\t\t// Include this non-message entry (bash, settings change, etc.)\n\t\tcutIndex--;\n\t}\n\n\t// Determine if this is a split turn\n\tconst cutEntry = entries[cutIndex];\n\tconst isUserMessage = cutEntry.type === \"message\" && cutEntry.message.role === \"user\";\n\tconst turnStartIndex = isUserMessage ? -1 : findTurnStartIndex(entries, cutIndex, startIndex);\n\n\treturn {\n\t\tfirstKeptEntryIndex: cutIndex,\n\t\tturnStartIndex,\n\t\tisSplitTurn: !isUserMessage && turnStartIndex !== -1,\n\t};\n}\n\n// ============================================================================\n// Summarization\n// ============================================================================\n\nconst SUMMARIZATION_PROMPT = `The messages above are a conversation to summarize. Create a structured context checkpoint summary that another LLM will use to continue the work.\n\nUse this EXACT format:\n\n## Goal\n[What is the user trying to accomplish? Can be multiple items if the session covers different tasks.]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned by user]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Current work]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [Ordered list of what should happen next]\n\n## Critical Context\n- [Any data, examples, or references needed to continue]\n- [Or \"(none)\" if not applicable]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\nconst UPDATE_SUMMARIZATION_PROMPT = `The messages above are NEW conversation messages to incorporate into the existing summary provided in <previous-summary> tags.\n\nUpdate the existing structured summary with new information. RULES:\n- PRESERVE all existing information from the previous summary\n- ADD new progress, decisions, and context from the new messages\n- UPDATE the Progress section: move items from \"In Progress\" to \"Done\" when completed\n- UPDATE \"Next Steps\" based on what was accomplished\n- PRESERVE exact file paths, function names, and error messages\n- If something is no longer relevant, you may remove it\n\nUse this EXACT format:\n\n## Goal\n[Preserve existing goals, add new ones if the task expanded]\n\n## Constraints & Preferences\n- [Preserve existing, add new ones discovered]\n\n## Progress\n### Done\n- [x] [Include previously done items AND newly completed items]\n\n### In Progress\n- [ ] [Current work - update based on progress]\n\n### Blocked\n- [Current blockers - remove if resolved]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale] (preserve all previous, add new)\n\n## Next Steps\n1. [Update based on current state]\n\n## Critical Context\n- [Preserve important context, add new if needed]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of the conversation using the LLM.\n * If previousSummary is provided, uses the update prompt to merge.\n */\nexport async function generateSummary(\n\tcurrentMessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n\tcustomInstructions?: string,\n\tpreviousSummary?: string,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.8 * reserveTokens);\n\n\t// Use update prompt if we have a previous summary, otherwise initial prompt\n\tlet basePrompt = previousSummary ? UPDATE_SUMMARIZATION_PROMPT : SUMMARIZATION_PROMPT;\n\tif (customInstructions) {\n\t\tbasePrompt = `${basePrompt}\\n\\nAdditional focus: ${customInstructions}`;\n\t}\n\n\t// Serialize conversation to text so model doesn't try to continue it\n\t// Convert to LLM messages first (handles custom types like bashExecution, custom, etc.)\n\tconst llmMessages = convertToLlm(currentMessages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build the prompt with conversation wrapped in tags\n\tlet promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n`;\n\tif (previousSummary) {\n\t\tpromptText += `<previous-summary>\\n${previousSummary}\\n</previous-summary>\\n\\n`;\n\t}\n\tpromptText += basePrompt;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey, reasoning: \"high\" },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\tconst textContent = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\treturn textContent;\n}\n\n// ============================================================================\n// Compaction Preparation (for extensions)\n// ============================================================================\n\nexport interface CompactionPreparation {\n\t/** UUID of first entry to keep */\n\tfirstKeptEntryId: string;\n\t/** Messages that will be summarized and discarded */\n\tmessagesToSummarize: AgentMessage[];\n\t/** Messages that will be turned into turn prefix summary (if splitting) */\n\tturnPrefixMessages: AgentMessage[];\n\t/** Whether this is a split turn (cut point in middle of turn) */\n\tisSplitTurn: boolean;\n\ttokensBefore: number;\n\t/** Summary from previous compaction, for iterative update */\n\tpreviousSummary?: string;\n\t/** File operations extracted from messagesToSummarize */\n\tfileOps: FileOperations;\n\t/** Compaction settions from settings.jsonl\t*/\n\tsettings: CompactionSettings;\n}\n\nexport function prepareCompaction(\n\tpathEntries: SessionEntry[],\n\tsettings: CompactionSettings,\n): CompactionPreparation | undefined {\n\tif (pathEntries.length > 0 && pathEntries[pathEntries.length - 1].type === \"compaction\") {\n\t\treturn undefined;\n\t}\n\n\tlet prevCompactionIndex = -1;\n\tfor (let i = pathEntries.length - 1; i >= 0; i--) {\n\t\tif (pathEntries[i].type === \"compaction\") {\n\t\t\tprevCompactionIndex = i;\n\t\t\tbreak;\n\t\t}\n\t}\n\tconst boundaryStart = prevCompactionIndex + 1;\n\tconst boundaryEnd = pathEntries.length;\n\n\tconst lastUsage = getLastAssistantUsage(pathEntries);\n\tconst tokensBefore = lastUsage ? calculateContextTokens(lastUsage) : 0;\n\n\tconst cutPoint = findCutPoint(pathEntries, boundaryStart, boundaryEnd, settings.keepRecentTokens);\n\n\t// Get UUID of first kept entry\n\tconst firstKeptEntry = pathEntries[cutPoint.firstKeptEntryIndex];\n\tif (!firstKeptEntry?.id) {\n\t\treturn undefined; // Session needs migration\n\t}\n\tconst firstKeptEntryId = firstKeptEntry.id;\n\n\tconst historyEnd = cutPoint.isSplitTurn ? cutPoint.turnStartIndex : cutPoint.firstKeptEntryIndex;\n\n\t// Messages to summarize (will be discarded after summary)\n\tconst messagesToSummarize: AgentMessage[] = [];\n\tfor (let i = boundaryStart; i < historyEnd; i++) {\n\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\tif (msg) messagesToSummarize.push(msg);\n\t}\n\n\t// Messages for turn prefix summary (if splitting a turn)\n\tconst turnPrefixMessages: AgentMessage[] = [];\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (let i = cutPoint.turnStartIndex; i < cutPoint.firstKeptEntryIndex; i++) {\n\t\t\tconst msg = getMessageFromEntry(pathEntries[i]);\n\t\t\tif (msg) turnPrefixMessages.push(msg);\n\t\t}\n\t}\n\n\t// Get previous summary for iterative update\n\tlet previousSummary: string | undefined;\n\tif (prevCompactionIndex >= 0) {\n\t\tconst prevCompaction = pathEntries[prevCompactionIndex] as CompactionEntry;\n\t\tpreviousSummary = prevCompaction.summary;\n\t}\n\n\t// Extract file operations from messages and previous compaction\n\tconst fileOps = extractFileOperations(messagesToSummarize, pathEntries, prevCompactionIndex);\n\n\t// Also extract file ops from turn prefix if splitting\n\tif (cutPoint.isSplitTurn) {\n\t\tfor (const msg of turnPrefixMessages) {\n\t\t\textractFileOpsFromMessage(msg, fileOps);\n\t\t}\n\t}\n\n\treturn {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn: cutPoint.isSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t};\n}\n\n// ============================================================================\n// Main compaction function\n// ============================================================================\n\nconst TURN_PREFIX_SUMMARIZATION_PROMPT = `This is the PREFIX of a turn that was too large to keep. The SUFFIX (recent work) is retained.\n\nSummarize the prefix to provide context for the retained suffix:\n\n## Original Request\n[What did the user ask for in this turn?]\n\n## Early Progress\n- [Key decisions and work done in the prefix]\n\n## Context for Suffix\n- [Information needed to understand the retained recent work]\n\nBe concise. Focus on what's needed to understand the kept suffix.`;\n\n/**\n * Generate summaries for compaction using prepared data.\n * Returns CompactionResult - SessionManager adds uuid/parentUuid when saving.\n *\n * @param preparation - Pre-calculated preparation from prepareCompaction()\n * @param customInstructions - Optional custom focus for the summary\n */\nexport async function compact(\n\tpreparation: CompactionPreparation,\n\tmodel: Model<any>,\n\tapiKey: string,\n\tcustomInstructions?: string,\n\tsignal?: AbortSignal,\n): Promise<CompactionResult> {\n\tconst {\n\t\tfirstKeptEntryId,\n\t\tmessagesToSummarize,\n\t\tturnPrefixMessages,\n\t\tisSplitTurn,\n\t\ttokensBefore,\n\t\tpreviousSummary,\n\t\tfileOps,\n\t\tsettings,\n\t} = preparation;\n\n\t// Generate summaries (can be parallel if both needed) and merge into one\n\tlet summary: string;\n\n\tif (isSplitTurn && turnPrefixMessages.length > 0) {\n\t\t// Generate both summaries in parallel\n\t\tconst [historyResult, turnPrefixResult] = await Promise.all([\n\t\t\tmessagesToSummarize.length > 0\n\t\t\t\t? generateSummary(\n\t\t\t\t\t\tmessagesToSummarize,\n\t\t\t\t\t\tmodel,\n\t\t\t\t\t\tsettings.reserveTokens,\n\t\t\t\t\t\tapiKey,\n\t\t\t\t\t\tsignal,\n\t\t\t\t\t\tcustomInstructions,\n\t\t\t\t\t\tpreviousSummary,\n\t\t\t\t\t)\n\t\t\t\t: Promise.resolve(\"No prior history.\"),\n\t\t\tgenerateTurnPrefixSummary(turnPrefixMessages, model, settings.reserveTokens, apiKey, signal),\n\t\t]);\n\t\t// Merge into single summary\n\t\tsummary = `${historyResult}\\n\\n---\\n\\n**Turn Context (split turn):**\\n\\n${turnPrefixResult}`;\n\t} else {\n\t\t// Just generate history summary\n\t\tsummary = await generateSummary(\n\t\t\tmessagesToSummarize,\n\t\t\tmodel,\n\t\t\tsettings.reserveTokens,\n\t\t\tapiKey,\n\t\t\tsignal,\n\t\t\tcustomInstructions,\n\t\t\tpreviousSummary,\n\t\t);\n\t}\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\tif (!firstKeptEntryId) {\n\t\tthrow new Error(\"First kept entry has no UUID - session may need migration\");\n\t}\n\n\treturn {\n\t\tsummary,\n\t\tfirstKeptEntryId,\n\t\ttokensBefore,\n\t\tdetails: { readFiles, modifiedFiles } as CompactionDetails,\n\t};\n}\n\n/**\n * Generate a summary for a turn prefix (when splitting a turn).\n */\nasync function generateTurnPrefixSummary(\n\tmessages: AgentMessage[],\n\tmodel: Model<any>,\n\treserveTokens: number,\n\tapiKey: string,\n\tsignal?: AbortSignal,\n): Promise<string> {\n\tconst maxTokens = Math.floor(0.5 * reserveTokens); // Smaller budget for turn prefix\n\tconst llmMessages = convertToLlm(messages);\n\tconst conversationText = serializeConversation(llmMessages);\n\tconst promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n${TURN_PREFIX_SUMMARIZATION_PROMPT}`;\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ maxTokens, signal, apiKey },\n\t);\n\n\tif (response.stopReason === \"error\") {\n\t\tthrow new Error(`Turn prefix summarization failed: ${response.errorMessage || \"Unknown error\"}`);\n\t}\n\n\treturn response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n}\n"]}
@@ -0,0 +1,22 @@
1
+ /**
2
+ * ANSI escape code to HTML converter.
3
+ *
4
+ * Converts terminal ANSI color/style codes to HTML with inline styles.
5
+ * Supports:
6
+ * - Standard foreground colors (30-37) and bright variants (90-97)
7
+ * - Standard background colors (40-47) and bright variants (100-107)
8
+ * - 256-color palette (38;5;N and 48;5;N)
9
+ * - RGB true color (38;2;R;G;B and 48;2;R;G;B)
10
+ * - Text styles: bold (1), dim (2), italic (3), underline (4)
11
+ * - Reset (0)
12
+ */
13
+ /**
14
+ * Convert ANSI-escaped text to HTML with inline styles.
15
+ */
16
+ export declare function ansiToHtml(text: string): string;
17
+ /**
18
+ * Convert array of ANSI-escaped lines to HTML.
19
+ * Each line is wrapped in a div element.
20
+ */
21
+ export declare function ansiLinesToHtml(lines: string[]): string;
22
+ //# sourceMappingURL=ansi-to-html.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ansi-to-html.d.ts","sourceRoot":"","sources":["../../../src/core/export-html/ansi-to-html.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAuLH;;GAEG;AACH,wBAAgB,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,CAoD/C;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,MAAM,CAEvD","sourcesContent":["/**\n * ANSI escape code to HTML converter.\n *\n * Converts terminal ANSI color/style codes to HTML with inline styles.\n * Supports:\n * - Standard foreground colors (30-37) and bright variants (90-97)\n * - Standard background colors (40-47) and bright variants (100-107)\n * - 256-color palette (38;5;N and 48;5;N)\n * - RGB true color (38;2;R;G;B and 48;2;R;G;B)\n * - Text styles: bold (1), dim (2), italic (3), underline (4)\n * - Reset (0)\n */\n\n// Standard ANSI color palette (0-15)\nconst ANSI_COLORS = [\n\t\"#000000\", // 0: black\n\t\"#800000\", // 1: red\n\t\"#008000\", // 2: green\n\t\"#808000\", // 3: yellow\n\t\"#000080\", // 4: blue\n\t\"#800080\", // 5: magenta\n\t\"#008080\", // 6: cyan\n\t\"#c0c0c0\", // 7: white\n\t\"#808080\", // 8: bright black\n\t\"#ff0000\", // 9: bright red\n\t\"#00ff00\", // 10: bright green\n\t\"#ffff00\", // 11: bright yellow\n\t\"#0000ff\", // 12: bright blue\n\t\"#ff00ff\", // 13: bright magenta\n\t\"#00ffff\", // 14: bright cyan\n\t\"#ffffff\", // 15: bright white\n];\n\n/**\n * Convert 256-color index to hex.\n */\nfunction color256ToHex(index: number): string {\n\t// Standard colors (0-15)\n\tif (index < 16) {\n\t\treturn ANSI_COLORS[index];\n\t}\n\n\t// Color cube (16-231): 6x6x6 = 216 colors\n\tif (index < 232) {\n\t\tconst cubeIndex = index - 16;\n\t\tconst r = Math.floor(cubeIndex / 36);\n\t\tconst g = Math.floor((cubeIndex % 36) / 6);\n\t\tconst b = cubeIndex % 6;\n\t\tconst toComponent = (n: number) => (n === 0 ? 0 : 55 + n * 40);\n\t\tconst toHex = (n: number) => toComponent(n).toString(16).padStart(2, \"0\");\n\t\treturn `#${toHex(r)}${toHex(g)}${toHex(b)}`;\n\t}\n\n\t// Grayscale (232-255): 24 shades\n\tconst gray = 8 + (index - 232) * 10;\n\tconst grayHex = gray.toString(16).padStart(2, \"0\");\n\treturn `#${grayHex}${grayHex}${grayHex}`;\n}\n\n/**\n * Escape HTML special characters.\n */\nfunction escapeHtml(text: string): string {\n\treturn text\n\t\t.replace(/&/g, \"&amp;\")\n\t\t.replace(/</g, \"&lt;\")\n\t\t.replace(/>/g, \"&gt;\")\n\t\t.replace(/\"/g, \"&quot;\")\n\t\t.replace(/'/g, \"&#039;\");\n}\n\ninterface TextStyle {\n\tfg: string | null;\n\tbg: string | null;\n\tbold: boolean;\n\tdim: boolean;\n\titalic: boolean;\n\tunderline: boolean;\n}\n\nfunction createEmptyStyle(): TextStyle {\n\treturn {\n\t\tfg: null,\n\t\tbg: null,\n\t\tbold: false,\n\t\tdim: false,\n\t\titalic: false,\n\t\tunderline: false,\n\t};\n}\n\nfunction styleToInlineCSS(style: TextStyle): string {\n\tconst parts: string[] = [];\n\tif (style.fg) parts.push(`color:${style.fg}`);\n\tif (style.bg) parts.push(`background-color:${style.bg}`);\n\tif (style.bold) parts.push(\"font-weight:bold\");\n\tif (style.dim) parts.push(\"opacity:0.6\");\n\tif (style.italic) parts.push(\"font-style:italic\");\n\tif (style.underline) parts.push(\"text-decoration:underline\");\n\treturn parts.join(\";\");\n}\n\nfunction hasStyle(style: TextStyle): boolean {\n\treturn style.fg !== null || style.bg !== null || style.bold || style.dim || style.italic || style.underline;\n}\n\n/**\n * Parse ANSI SGR (Select Graphic Rendition) codes and update style.\n */\nfunction applySgrCode(params: number[], style: TextStyle): void {\n\tlet i = 0;\n\twhile (i < params.length) {\n\t\tconst code = params[i];\n\n\t\tif (code === 0) {\n\t\t\t// Reset all\n\t\t\tstyle.fg = null;\n\t\t\tstyle.bg = null;\n\t\t\tstyle.bold = false;\n\t\t\tstyle.dim = false;\n\t\t\tstyle.italic = false;\n\t\t\tstyle.underline = false;\n\t\t} else if (code === 1) {\n\t\t\tstyle.bold = true;\n\t\t} else if (code === 2) {\n\t\t\tstyle.dim = true;\n\t\t} else if (code === 3) {\n\t\t\tstyle.italic = true;\n\t\t} else if (code === 4) {\n\t\t\tstyle.underline = true;\n\t\t} else if (code === 22) {\n\t\t\t// Reset bold/dim\n\t\t\tstyle.bold = false;\n\t\t\tstyle.dim = false;\n\t\t} else if (code === 23) {\n\t\t\tstyle.italic = false;\n\t\t} else if (code === 24) {\n\t\t\tstyle.underline = false;\n\t\t} else if (code >= 30 && code <= 37) {\n\t\t\t// Standard foreground colors\n\t\t\tstyle.fg = ANSI_COLORS[code - 30];\n\t\t} else if (code === 38) {\n\t\t\t// Extended foreground color\n\t\t\tif (params[i + 1] === 5 && params.length > i + 2) {\n\t\t\t\t// 256-color: 38;5;N\n\t\t\t\tstyle.fg = color256ToHex(params[i + 2]);\n\t\t\t\ti += 2;\n\t\t\t} else if (params[i + 1] === 2 && params.length > i + 4) {\n\t\t\t\t// RGB: 38;2;R;G;B\n\t\t\t\tconst r = params[i + 2];\n\t\t\t\tconst g = params[i + 3];\n\t\t\t\tconst b = params[i + 4];\n\t\t\t\tstyle.fg = `rgb(${r},${g},${b})`;\n\t\t\t\ti += 4;\n\t\t\t}\n\t\t} else if (code === 39) {\n\t\t\t// Default foreground\n\t\t\tstyle.fg = null;\n\t\t} else if (code >= 40 && code <= 47) {\n\t\t\t// Standard background colors\n\t\t\tstyle.bg = ANSI_COLORS[code - 40];\n\t\t} else if (code === 48) {\n\t\t\t// Extended background color\n\t\t\tif (params[i + 1] === 5 && params.length > i + 2) {\n\t\t\t\t// 256-color: 48;5;N\n\t\t\t\tstyle.bg = color256ToHex(params[i + 2]);\n\t\t\t\ti += 2;\n\t\t\t} else if (params[i + 1] === 2 && params.length > i + 4) {\n\t\t\t\t// RGB: 48;2;R;G;B\n\t\t\t\tconst r = params[i + 2];\n\t\t\t\tconst g = params[i + 3];\n\t\t\t\tconst b = params[i + 4];\n\t\t\t\tstyle.bg = `rgb(${r},${g},${b})`;\n\t\t\t\ti += 4;\n\t\t\t}\n\t\t} else if (code === 49) {\n\t\t\t// Default background\n\t\t\tstyle.bg = null;\n\t\t} else if (code >= 90 && code <= 97) {\n\t\t\t// Bright foreground colors\n\t\t\tstyle.fg = ANSI_COLORS[code - 90 + 8];\n\t\t} else if (code >= 100 && code <= 107) {\n\t\t\t// Bright background colors\n\t\t\tstyle.bg = ANSI_COLORS[code - 100 + 8];\n\t\t}\n\t\t// Ignore unrecognized codes\n\n\t\ti++;\n\t}\n}\n\n// Match ANSI escape sequences: ESC[ followed by params and ending with 'm'\nconst ANSI_REGEX = /\\x1b\\[([\\d;]*)m/g;\n\n/**\n * Convert ANSI-escaped text to HTML with inline styles.\n */\nexport function ansiToHtml(text: string): string {\n\tconst style = createEmptyStyle();\n\tlet result = \"\";\n\tlet lastIndex = 0;\n\tlet inSpan = false;\n\n\t// Reset regex state\n\tANSI_REGEX.lastIndex = 0;\n\n\tlet match = ANSI_REGEX.exec(text);\n\twhile (match !== null) {\n\t\t// Add text before this escape sequence\n\t\tconst beforeText = text.slice(lastIndex, match.index);\n\t\tif (beforeText) {\n\t\t\tresult += escapeHtml(beforeText);\n\t\t}\n\n\t\t// Parse SGR parameters\n\t\tconst paramStr = match[1];\n\t\tconst params = paramStr ? paramStr.split(\";\").map((p) => parseInt(p, 10) || 0) : [0];\n\n\t\t// Close existing span if we have one\n\t\tif (inSpan) {\n\t\t\tresult += \"</span>\";\n\t\t\tinSpan = false;\n\t\t}\n\n\t\t// Apply the codes\n\t\tapplySgrCode(params, style);\n\n\t\t// Open new span if we have any styling\n\t\tif (hasStyle(style)) {\n\t\t\tresult += `<span style=\"${styleToInlineCSS(style)}\">`;\n\t\t\tinSpan = true;\n\t\t}\n\n\t\tlastIndex = match.index + match[0].length;\n\t\tmatch = ANSI_REGEX.exec(text);\n\t}\n\n\t// Add remaining text\n\tconst remainingText = text.slice(lastIndex);\n\tif (remainingText) {\n\t\tresult += escapeHtml(remainingText);\n\t}\n\n\t// Close any open span\n\tif (inSpan) {\n\t\tresult += \"</span>\";\n\t}\n\n\treturn result;\n}\n\n/**\n * Convert array of ANSI-escaped lines to HTML.\n * Each line is wrapped in a div element.\n */\nexport function ansiLinesToHtml(lines: string[]): string {\n\treturn lines.map((line) => `<div class=\"ansi-line\">${ansiToHtml(line) || \"&nbsp;\"}</div>`).join(\"\\n\");\n}\n"]}