@mariozechner/pi-coding-agent 0.30.2 → 0.31.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (297) hide show
  1. package/CHANGELOG.md +251 -1
  2. package/README.md +105 -84
  3. package/dist/cli/args.d.ts.map +1 -1
  4. package/dist/cli/args.js +5 -1
  5. package/dist/cli/args.js.map +1 -1
  6. package/dist/cli/file-processor.d.ts +3 -3
  7. package/dist/cli/file-processor.d.ts.map +1 -1
  8. package/dist/cli/file-processor.js +7 -10
  9. package/dist/cli/file-processor.js.map +1 -1
  10. package/dist/config.d.ts +9 -0
  11. package/dist/config.d.ts.map +1 -1
  12. package/dist/config.js +18 -0
  13. package/dist/config.js.map +1 -1
  14. package/dist/core/agent-session.d.ts +73 -34
  15. package/dist/core/agent-session.d.ts.map +1 -1
  16. package/dist/core/agent-session.js +464 -210
  17. package/dist/core/agent-session.js.map +1 -1
  18. package/dist/core/auth-storage.d.ts +2 -2
  19. package/dist/core/auth-storage.d.ts.map +1 -1
  20. package/dist/core/auth-storage.js +2 -2
  21. package/dist/core/auth-storage.js.map +1 -1
  22. package/dist/core/bash-executor.d.ts +2 -2
  23. package/dist/core/bash-executor.d.ts.map +1 -1
  24. package/dist/core/bash-executor.js +2 -2
  25. package/dist/core/bash-executor.js.map +1 -1
  26. package/dist/core/compaction/branch-summarization.d.ts +84 -0
  27. package/dist/core/compaction/branch-summarization.d.ts.map +1 -0
  28. package/dist/core/compaction/branch-summarization.js +233 -0
  29. package/dist/core/compaction/branch-summarization.js.map +1 -0
  30. package/dist/core/{compaction.d.ts → compaction/compaction.d.ts} +38 -19
  31. package/dist/core/compaction/compaction.d.ts.map +1 -0
  32. package/dist/core/compaction/compaction.js +558 -0
  33. package/dist/core/compaction/compaction.js.map +1 -0
  34. package/dist/core/compaction/index.d.ts +7 -0
  35. package/dist/core/compaction/index.d.ts.map +1 -0
  36. package/dist/core/compaction/index.js +7 -0
  37. package/dist/core/compaction/index.js.map +1 -0
  38. package/dist/core/compaction/utils.d.ts +35 -0
  39. package/dist/core/compaction/utils.d.ts.map +1 -0
  40. package/dist/core/compaction/utils.js +138 -0
  41. package/dist/core/compaction/utils.js.map +1 -0
  42. package/dist/core/custom-tools/index.d.ts +2 -1
  43. package/dist/core/custom-tools/index.d.ts.map +1 -1
  44. package/dist/core/custom-tools/index.js +1 -0
  45. package/dist/core/custom-tools/index.js.map +1 -1
  46. package/dist/core/custom-tools/loader.d.ts.map +1 -1
  47. package/dist/core/custom-tools/loader.js +13 -80
  48. package/dist/core/custom-tools/loader.js.map +1 -1
  49. package/dist/core/custom-tools/types.d.ts +84 -59
  50. package/dist/core/custom-tools/types.d.ts.map +1 -1
  51. package/dist/core/custom-tools/types.js.map +1 -1
  52. package/dist/core/custom-tools/wrapper.d.ts +15 -0
  53. package/dist/core/custom-tools/wrapper.d.ts.map +1 -0
  54. package/dist/core/custom-tools/wrapper.js +23 -0
  55. package/dist/core/custom-tools/wrapper.js.map +1 -0
  56. package/dist/core/exec.d.ts +29 -0
  57. package/dist/core/exec.d.ts.map +1 -0
  58. package/dist/core/exec.js +71 -0
  59. package/dist/core/exec.js.map +1 -0
  60. package/dist/core/export-html/index.d.ts +17 -0
  61. package/dist/core/export-html/index.d.ts.map +1 -0
  62. package/dist/core/export-html/index.js +171 -0
  63. package/dist/core/export-html/index.js.map +1 -0
  64. package/dist/core/export-html/template.css +781 -0
  65. package/dist/core/export-html/template.html +54 -0
  66. package/dist/core/export-html/template.js +1185 -0
  67. package/dist/core/export-html/vendor/highlight.min.js +1213 -0
  68. package/dist/core/export-html/vendor/marked.min.js +6 -0
  69. package/dist/core/hooks/index.d.ts +4 -4
  70. package/dist/core/hooks/index.d.ts.map +1 -1
  71. package/dist/core/hooks/index.js +4 -3
  72. package/dist/core/hooks/index.js.map +1 -1
  73. package/dist/core/hooks/loader.d.ts +40 -5
  74. package/dist/core/hooks/loader.d.ts.map +1 -1
  75. package/dist/core/hooks/loader.js +43 -10
  76. package/dist/core/hooks/loader.js.map +1 -1
  77. package/dist/core/hooks/runner.d.ts +94 -18
  78. package/dist/core/hooks/runner.d.ts.map +1 -1
  79. package/dist/core/hooks/runner.js +199 -120
  80. package/dist/core/hooks/runner.js.map +1 -1
  81. package/dist/core/hooks/tool-wrapper.d.ts +1 -1
  82. package/dist/core/hooks/tool-wrapper.d.ts.map +1 -1
  83. package/dist/core/hooks/tool-wrapper.js +36 -19
  84. package/dist/core/hooks/tool-wrapper.js.map +1 -1
  85. package/dist/core/hooks/types.d.ts +407 -96
  86. package/dist/core/hooks/types.d.ts.map +1 -1
  87. package/dist/core/hooks/types.js.map +1 -1
  88. package/dist/core/index.d.ts +4 -3
  89. package/dist/core/index.d.ts.map +1 -1
  90. package/dist/core/index.js.map +1 -1
  91. package/dist/core/messages.d.ts +44 -12
  92. package/dist/core/messages.d.ts.map +1 -1
  93. package/dist/core/messages.js +82 -34
  94. package/dist/core/messages.js.map +1 -1
  95. package/dist/core/model-registry.d.ts +5 -5
  96. package/dist/core/model-registry.d.ts.map +1 -1
  97. package/dist/core/model-registry.js +7 -7
  98. package/dist/core/model-registry.js.map +1 -1
  99. package/dist/core/model-resolver.d.ts +7 -7
  100. package/dist/core/model-resolver.d.ts.map +1 -1
  101. package/dist/core/model-resolver.js +45 -14
  102. package/dist/core/model-resolver.js.map +1 -1
  103. package/dist/core/sdk.d.ts +7 -10
  104. package/dist/core/sdk.d.ts.map +1 -1
  105. package/dist/core/sdk.js +88 -32
  106. package/dist/core/sdk.js.map +1 -1
  107. package/dist/core/session-manager.d.ts +202 -36
  108. package/dist/core/session-manager.d.ts.map +1 -1
  109. package/dist/core/session-manager.js +565 -133
  110. package/dist/core/session-manager.js.map +1 -1
  111. package/dist/core/settings-manager.d.ts +9 -3
  112. package/dist/core/settings-manager.d.ts.map +1 -1
  113. package/dist/core/settings-manager.js +13 -12
  114. package/dist/core/settings-manager.js.map +1 -1
  115. package/dist/core/system-prompt.d.ts.map +1 -1
  116. package/dist/core/system-prompt.js +6 -3
  117. package/dist/core/system-prompt.js.map +1 -1
  118. package/dist/core/tools/bash.d.ts +1 -1
  119. package/dist/core/tools/bash.d.ts.map +1 -1
  120. package/dist/core/tools/bash.js.map +1 -1
  121. package/dist/core/tools/edit-diff.d.ts +33 -0
  122. package/dist/core/tools/edit-diff.d.ts.map +1 -0
  123. package/dist/core/tools/edit-diff.js +171 -0
  124. package/dist/core/tools/edit-diff.js.map +1 -0
  125. package/dist/core/tools/edit.d.ts +7 -1
  126. package/dist/core/tools/edit.d.ts.map +1 -1
  127. package/dist/core/tools/edit.js +20 -95
  128. package/dist/core/tools/edit.js.map +1 -1
  129. package/dist/core/tools/find.d.ts +1 -1
  130. package/dist/core/tools/find.d.ts.map +1 -1
  131. package/dist/core/tools/find.js.map +1 -1
  132. package/dist/core/tools/grep.d.ts +1 -1
  133. package/dist/core/tools/grep.d.ts.map +1 -1
  134. package/dist/core/tools/grep.js.map +1 -1
  135. package/dist/core/tools/index.d.ts +1 -1
  136. package/dist/core/tools/index.d.ts.map +1 -1
  137. package/dist/core/tools/index.js.map +1 -1
  138. package/dist/core/tools/ls.d.ts +1 -1
  139. package/dist/core/tools/ls.d.ts.map +1 -1
  140. package/dist/core/tools/ls.js.map +1 -1
  141. package/dist/core/tools/read.d.ts +1 -1
  142. package/dist/core/tools/read.d.ts.map +1 -1
  143. package/dist/core/tools/read.js.map +1 -1
  144. package/dist/core/tools/write.d.ts +1 -1
  145. package/dist/core/tools/write.d.ts.map +1 -1
  146. package/dist/core/tools/write.js.map +1 -1
  147. package/dist/index.d.ts +8 -7
  148. package/dist/index.d.ts.map +1 -1
  149. package/dist/index.js +5 -3
  150. package/dist/index.js.map +1 -1
  151. package/dist/main.d.ts.map +1 -1
  152. package/dist/main.js +22 -21
  153. package/dist/main.js.map +1 -1
  154. package/dist/modes/interactive/components/assistant-message.d.ts.map +1 -1
  155. package/dist/modes/interactive/components/assistant-message.js +3 -4
  156. package/dist/modes/interactive/components/assistant-message.js.map +1 -1
  157. package/dist/modes/interactive/components/bash-execution.d.ts +1 -1
  158. package/dist/modes/interactive/components/bash-execution.d.ts.map +1 -1
  159. package/dist/modes/interactive/components/bash-execution.js +6 -2
  160. package/dist/modes/interactive/components/bash-execution.js.map +1 -1
  161. package/dist/modes/interactive/components/bordered-loader.d.ts +12 -0
  162. package/dist/modes/interactive/components/bordered-loader.d.ts.map +1 -0
  163. package/dist/modes/interactive/components/bordered-loader.js +30 -0
  164. package/dist/modes/interactive/components/bordered-loader.js.map +1 -0
  165. package/dist/modes/interactive/components/branch-summary-message.d.ts +14 -0
  166. package/dist/modes/interactive/components/branch-summary-message.d.ts.map +1 -0
  167. package/dist/modes/interactive/components/branch-summary-message.js +35 -0
  168. package/dist/modes/interactive/components/branch-summary-message.js.map +1 -0
  169. package/dist/modes/interactive/components/compaction-summary-message.d.ts +14 -0
  170. package/dist/modes/interactive/components/compaction-summary-message.d.ts.map +1 -0
  171. package/dist/modes/interactive/components/compaction-summary-message.js +36 -0
  172. package/dist/modes/interactive/components/compaction-summary-message.js.map +1 -0
  173. package/dist/modes/interactive/components/dynamic-border.d.ts +5 -1
  174. package/dist/modes/interactive/components/dynamic-border.d.ts.map +1 -1
  175. package/dist/modes/interactive/components/dynamic-border.js +5 -1
  176. package/dist/modes/interactive/components/dynamic-border.js.map +1 -1
  177. package/dist/modes/interactive/components/footer.d.ts +12 -6
  178. package/dist/modes/interactive/components/footer.d.ts.map +1 -1
  179. package/dist/modes/interactive/components/footer.js +57 -25
  180. package/dist/modes/interactive/components/footer.js.map +1 -1
  181. package/dist/modes/interactive/components/hook-editor.d.ts +15 -0
  182. package/dist/modes/interactive/components/hook-editor.d.ts.map +1 -0
  183. package/dist/modes/interactive/components/hook-editor.js +95 -0
  184. package/dist/modes/interactive/components/hook-editor.js.map +1 -0
  185. package/dist/modes/interactive/components/hook-message.d.ts +18 -0
  186. package/dist/modes/interactive/components/hook-message.d.ts.map +1 -0
  187. package/dist/modes/interactive/components/hook-message.js +80 -0
  188. package/dist/modes/interactive/components/hook-message.js.map +1 -0
  189. package/dist/modes/interactive/components/model-selector.d.ts +3 -3
  190. package/dist/modes/interactive/components/model-selector.d.ts.map +1 -1
  191. package/dist/modes/interactive/components/model-selector.js +6 -1
  192. package/dist/modes/interactive/components/model-selector.js.map +1 -1
  193. package/dist/modes/interactive/components/tool-execution.d.ts +15 -2
  194. package/dist/modes/interactive/components/tool-execution.d.ts.map +1 -1
  195. package/dist/modes/interactive/components/tool-execution.js +70 -21
  196. package/dist/modes/interactive/components/tool-execution.js.map +1 -1
  197. package/dist/modes/interactive/components/tree-selector.d.ts +52 -0
  198. package/dist/modes/interactive/components/tree-selector.d.ts.map +1 -0
  199. package/dist/modes/interactive/components/tree-selector.js +745 -0
  200. package/dist/modes/interactive/components/tree-selector.js.map +1 -0
  201. package/dist/modes/interactive/components/user-message-selector.d.ts +3 -3
  202. package/dist/modes/interactive/components/user-message-selector.d.ts.map +1 -1
  203. package/dist/modes/interactive/components/user-message-selector.js +1 -1
  204. package/dist/modes/interactive/components/user-message-selector.js.map +1 -1
  205. package/dist/modes/interactive/components/user-message.d.ts +1 -1
  206. package/dist/modes/interactive/components/user-message.d.ts.map +1 -1
  207. package/dist/modes/interactive/components/user-message.js +2 -5
  208. package/dist/modes/interactive/components/user-message.js.map +1 -1
  209. package/dist/modes/interactive/interactive-mode.d.ts +29 -12
  210. package/dist/modes/interactive/interactive-mode.d.ts.map +1 -1
  211. package/dist/modes/interactive/interactive-mode.js +589 -208
  212. package/dist/modes/interactive/interactive-mode.js.map +1 -1
  213. package/dist/modes/interactive/theme/dark.json +13 -1
  214. package/dist/modes/interactive/theme/light.json +13 -1
  215. package/dist/modes/interactive/theme/theme-schema.json +34 -0
  216. package/dist/modes/interactive/theme/theme.d.ts +20 -2
  217. package/dist/modes/interactive/theme/theme.d.ts.map +1 -1
  218. package/dist/modes/interactive/theme/theme.js +135 -2
  219. package/dist/modes/interactive/theme/theme.js.map +1 -1
  220. package/dist/modes/print-mode.d.ts +3 -3
  221. package/dist/modes/print-mode.d.ts.map +1 -1
  222. package/dist/modes/print-mode.js +26 -20
  223. package/dist/modes/print-mode.js.map +1 -1
  224. package/dist/modes/rpc/rpc-client.d.ts +13 -10
  225. package/dist/modes/rpc/rpc-client.d.ts.map +1 -1
  226. package/dist/modes/rpc/rpc-client.js +11 -10
  227. package/dist/modes/rpc/rpc-client.js.map +1 -1
  228. package/dist/modes/rpc/rpc-mode.d.ts.map +1 -1
  229. package/dist/modes/rpc/rpc-mode.js +88 -35
  230. package/dist/modes/rpc/rpc-mode.js.map +1 -1
  231. package/dist/modes/rpc/rpc-types.d.ts +30 -11
  232. package/dist/modes/rpc/rpc-types.d.ts.map +1 -1
  233. package/dist/modes/rpc/rpc-types.js.map +1 -1
  234. package/dist/utils/shell.d.ts +4 -2
  235. package/dist/utils/shell.d.ts.map +1 -1
  236. package/dist/utils/shell.js +36 -7
  237. package/dist/utils/shell.js.map +1 -1
  238. package/dist/utils/tools-manager.d.ts +1 -1
  239. package/dist/utils/tools-manager.d.ts.map +1 -1
  240. package/dist/utils/tools-manager.js +2 -2
  241. package/dist/utils/tools-manager.js.map +1 -1
  242. package/docs/compaction.md +388 -0
  243. package/docs/custom-tools.md +146 -43
  244. package/docs/extension-loading.md +1004 -0
  245. package/docs/hooks.md +562 -596
  246. package/docs/rpc.md +33 -19
  247. package/docs/sdk.md +93 -21
  248. package/docs/session-tree-plan.md +441 -0
  249. package/docs/session.md +172 -21
  250. package/docs/skills.md +2 -0
  251. package/docs/theme.md +31 -2
  252. package/docs/tree.md +197 -0
  253. package/docs/tui.md +343 -0
  254. package/examples/README.md +1 -9
  255. package/examples/custom-tools/hello/index.ts +4 -3
  256. package/examples/custom-tools/question/index.ts +4 -4
  257. package/examples/custom-tools/subagent/index.ts +7 -6
  258. package/examples/custom-tools/todo/index.ts +11 -5
  259. package/examples/hooks/README.md +29 -71
  260. package/examples/hooks/auto-commit-on-exit.ts +8 -9
  261. package/examples/hooks/confirm-destructive.ts +29 -30
  262. package/examples/hooks/custom-compaction.ts +20 -21
  263. package/examples/hooks/dirty-repo-guard.ts +41 -40
  264. package/examples/hooks/file-trigger.ts +10 -5
  265. package/examples/hooks/git-checkpoint.ts +16 -12
  266. package/examples/hooks/handoff.ts +150 -0
  267. package/examples/hooks/permission-gate.ts +1 -1
  268. package/examples/hooks/protected-paths.ts +1 -1
  269. package/examples/hooks/qna.ts +119 -0
  270. package/examples/hooks/snake.ts +343 -0
  271. package/examples/hooks/status-line.ts +40 -0
  272. package/examples/sdk/01-minimal.ts +1 -1
  273. package/examples/sdk/02-custom-model.ts +1 -1
  274. package/examples/sdk/03-custom-prompt.ts +1 -1
  275. package/examples/sdk/04-skills.ts +1 -1
  276. package/examples/sdk/05-tools.ts +4 -4
  277. package/examples/sdk/06-hooks.ts +1 -1
  278. package/examples/sdk/07-context-files.ts +1 -1
  279. package/examples/sdk/08-slash-commands.ts +6 -1
  280. package/examples/sdk/09-api-keys-and-oauth.ts +1 -1
  281. package/examples/sdk/10-settings.ts +1 -1
  282. package/examples/sdk/11-sessions.ts +1 -1
  283. package/examples/sdk/12-full-control.ts +4 -7
  284. package/package.json +6 -6
  285. package/dist/core/compaction.d.ts.map +0 -1
  286. package/dist/core/compaction.js +0 -412
  287. package/dist/core/compaction.js.map +0 -1
  288. package/dist/core/export-html.d.ts +0 -23
  289. package/dist/core/export-html.d.ts.map +0 -1
  290. package/dist/core/export-html.js +0 -1185
  291. package/dist/core/export-html.js.map +0 -1
  292. package/dist/modes/interactive/components/compaction.d.ts +0 -15
  293. package/dist/modes/interactive/components/compaction.d.ts.map +0 -1
  294. package/dist/modes/interactive/components/compaction.js +0 -41
  295. package/dist/modes/interactive/components/compaction.js.map +0 -1
  296. package/docs/hooks-v2.md +0 -385
  297. package/docs/session-tree.md +0 -452
@@ -1 +1 @@
1
- {"version":3,"file":"auth-storage.js","sourceRoot":"","sources":["../../src/core/auth-storage.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EACN,YAAY,EACZ,cAAc,EACd,cAAc,EACd,gBAAgB,EAChB,cAAc,EACd,kBAAkB,GAGlB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,IAAI,CAAC;AACnF,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAe/B;;GAEG;AACH,MAAM,OAAO,WAAW;IAKH,QAAQ;IAJpB,IAAI,GAAoB,EAAE,CAAC;IAC3B,gBAAgB,GAAwB,IAAI,GAAG,EAAE,CAAC;IAClD,gBAAgB,CAA4C;IAEpE,YAAoB,QAAgB,EAAE;wBAAlB,QAAQ;QAC3B,IAAI,CAAC,MAAM,EAAE,CAAC;IAAA,CACd;IAED;;;OAGG;IACH,gBAAgB,CAAC,QAAgB,EAAE,MAAc,EAAQ;QACxD,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;IAAA,CAC5C;IAED;;OAEG;IACH,mBAAmB,CAAC,QAAgB,EAAQ;QAC3C,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAAA,CACvC;IAED;;;OAGG;IACH,mBAAmB,CAAC,QAAkD,EAAQ;QAC7E,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC;IAAA,CACjC;IAED;;OAEG;IACH,MAAM,GAAS;QACd,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC;YAChC,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;YACf,OAAO;QACR,CAAC;QACD,IAAI,CAAC;YACJ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;QAC9D,CAAC;QAAC,MAAM,CAAC;YACR,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QAChB,CAAC;IAAA,CACD;IAED;;OAEG;IACK,IAAI,GAAS;QACpB,MAAM,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACtB,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;QAClD,CAAC;QACD,aAAa,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAC1E,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAAA,CAChC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAyB;QAC5C,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC;IAAA,CACnC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,UAA0B,EAAQ;QACvD,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,UAAU,CAAC;QACjC,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACZ;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAQ;QAC9B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACZ;IAED;;OAEG;IACH,IAAI,GAAa;QAChB,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAAA,CAC9B;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAW;QAC9B,OAAO,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;IAAA,CAC7B;IAED;;OAEG;IACH,MAAM,GAAoB;QACzB,OAAO,EAAE,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACxB;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CACV,QAAuB,EACvB,SAIC,EACe;QAChB,IAAI,WAA6B,CAAC;QAElC,QAAQ,QAAQ,EAAE,CAAC;YAClB,KAAK,WAAW;gBACf,WAAW,GAAG,MAAM,cAAc,CACjC,CAAC,GAAG,EAAE,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE,CAAC,EAClC,GAAG,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,+BAA+B,EAAE,CAAC,CACtE,CAAC;gBACF,MAAM;YACP,KAAK,gBAAgB;gBACpB,WAAW,GAAG,MAAM,kBAAkB,CAAC;oBACtC,MAAM,EAAE,CAAC,GAAG,EAAE,YAAY,EAAE,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE,YAAY,EAAE,CAAC;oBACtE,QAAQ,EAAE,SAAS,CAAC,QAAQ;oBAC5B,UAAU,EAAE,SAAS,CAAC,UAAU;iBAChC,CAAC,CAAC;gBACH,MAAM;YACP,KAAK,mBAAmB;gBACvB,WAAW,GAAG,MAAM,cAAc,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC;gBAC3E,MAAM;YACP,KAAK,oBAAoB;gBACxB,WAAW,GAAG,MAAM,gBAAgB,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC;gBAC7E,MAAM;YACP;gBACC,MAAM,IAAI,KAAK,CAAC,2BAA2B,QAAQ,EAAE,CAAC,CAAC;QACzD,CAAC;QAED,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,WAAW,EAAE,CAAC,CAAC;IAAA,CACtD;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAQ;QAC9B,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAAA,CACtB;IAED;;;;;;;;OAQG;IACH,KAAK,CAAC,SAAS,CAAC,QAAgB,EAA0B;QACzD,0CAA0C;QAC1C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QACvD,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,UAAU,CAAC;QACnB,CAAC;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,IAAI,EAAE,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO,IAAI,CAAC,GAAG,CAAC;QACjB,CAAC;QAED,IAAI,IAAI,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC5B,sDAAsD;YACtD,MAAM,UAAU,GAAqC,EAAE,CAAC;YACxD,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACtD,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;oBAC5B,UAAU,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;gBACzB,CAAC;YACF,CAAC;YAED,IAAI,CAAC;gBACJ,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,QAAyB,EAAE,UAAU,CAAC,CAAC;gBAC3E,IAAI,MAAM,EAAE,CAAC;oBACZ,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;oBAClE,IAAI,CAAC,IAAI,EAAE,CAAC;oBACZ,OAAO,MAAM,CAAC,MAAM,CAAC;gBACtB,CAAC;YACF,CAAC;YAAC,MAAM,CAAC;gBACR,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YACvB,CAAC;QACF,CAAC;QAED,oCAAoC;QACpC,MAAM,MAAM,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACtC,IAAI,MAAM;YAAE,OAAO,MAAM,CAAC;QAE1B,oEAAoE;QACpE,OAAO,IAAI,CAAC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC;IAAA,CACjD;CACD","sourcesContent":["/**\n * Credential storage for API keys and OAuth tokens.\n * Handles loading, saving, and refreshing credentials from auth.json.\n */\n\nimport {\n\tgetEnvApiKey,\n\tgetOAuthApiKey,\n\tloginAnthropic,\n\tloginAntigravity,\n\tloginGeminiCli,\n\tloginGitHubCopilot,\n\ttype OAuthCredentials,\n\ttype OAuthProvider,\n} from \"@mariozechner/pi-ai\";\nimport { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from \"fs\";\nimport { dirname } from \"path\";\n\nexport type ApiKeyCredential = {\n\ttype: \"api_key\";\n\tkey: string;\n};\n\nexport type OAuthCredential = {\n\ttype: \"oauth\";\n} & OAuthCredentials;\n\nexport type AuthCredential = ApiKeyCredential | OAuthCredential;\n\nexport type AuthStorageData = Record<string, AuthCredential>;\n\n/**\n * Credential storage backed by a JSON file.\n */\nexport class AuthStorage {\n\tprivate data: AuthStorageData = {};\n\tprivate runtimeOverrides: Map<string, string> = new Map();\n\tprivate fallbackResolver?: (provider: string) => string | undefined;\n\n\tconstructor(private authPath: string) {\n\t\tthis.reload();\n\t}\n\n\t/**\n\t * Set a runtime API key override (not persisted to disk).\n\t * Used for CLI --api-key flag.\n\t */\n\tsetRuntimeApiKey(provider: string, apiKey: string): void {\n\t\tthis.runtimeOverrides.set(provider, apiKey);\n\t}\n\n\t/**\n\t * Remove a runtime API key override.\n\t */\n\tremoveRuntimeApiKey(provider: string): void {\n\t\tthis.runtimeOverrides.delete(provider);\n\t}\n\n\t/**\n\t * Set a fallback resolver for API keys not found in auth.json or env vars.\n\t * Used for custom provider keys from models.json.\n\t */\n\tsetFallbackResolver(resolver: (provider: string) => string | undefined): void {\n\t\tthis.fallbackResolver = resolver;\n\t}\n\n\t/**\n\t * Reload credentials from disk.\n\t */\n\treload(): void {\n\t\tif (!existsSync(this.authPath)) {\n\t\t\tthis.data = {};\n\t\t\treturn;\n\t\t}\n\t\ttry {\n\t\t\tthis.data = JSON.parse(readFileSync(this.authPath, \"utf-8\"));\n\t\t} catch {\n\t\t\tthis.data = {};\n\t\t}\n\t}\n\n\t/**\n\t * Save credentials to disk.\n\t */\n\tprivate save(): void {\n\t\tconst dir = dirname(this.authPath);\n\t\tif (!existsSync(dir)) {\n\t\t\tmkdirSync(dir, { recursive: true, mode: 0o700 });\n\t\t}\n\t\twriteFileSync(this.authPath, JSON.stringify(this.data, null, 2), \"utf-8\");\n\t\tchmodSync(this.authPath, 0o600);\n\t}\n\n\t/**\n\t * Get credential for a provider.\n\t */\n\tget(provider: string): AuthCredential | null {\n\t\treturn this.data[provider] ?? null;\n\t}\n\n\t/**\n\t * Set credential for a provider.\n\t */\n\tset(provider: string, credential: AuthCredential): void {\n\t\tthis.data[provider] = credential;\n\t\tthis.save();\n\t}\n\n\t/**\n\t * Remove credential for a provider.\n\t */\n\tremove(provider: string): void {\n\t\tdelete this.data[provider];\n\t\tthis.save();\n\t}\n\n\t/**\n\t * List all providers with credentials.\n\t */\n\tlist(): string[] {\n\t\treturn Object.keys(this.data);\n\t}\n\n\t/**\n\t * Check if credentials exist for a provider.\n\t */\n\thas(provider: string): boolean {\n\t\treturn provider in this.data;\n\t}\n\n\t/**\n\t * Get all credentials (for passing to getOAuthApiKey).\n\t */\n\tgetAll(): AuthStorageData {\n\t\treturn { ...this.data };\n\t}\n\n\t/**\n\t * Login to an OAuth provider.\n\t */\n\tasync login(\n\t\tprovider: OAuthProvider,\n\t\tcallbacks: {\n\t\t\tonAuth: (info: { url: string; instructions?: string }) => void;\n\t\t\tonPrompt: (prompt: { message: string; placeholder?: string }) => Promise<string>;\n\t\t\tonProgress?: (message: string) => void;\n\t\t},\n\t): Promise<void> {\n\t\tlet credentials: OAuthCredentials;\n\n\t\tswitch (provider) {\n\t\t\tcase \"anthropic\":\n\t\t\t\tcredentials = await loginAnthropic(\n\t\t\t\t\t(url) => callbacks.onAuth({ url }),\n\t\t\t\t\t() => callbacks.onPrompt({ message: \"Paste the authorization code:\" }),\n\t\t\t\t);\n\t\t\t\tbreak;\n\t\t\tcase \"github-copilot\":\n\t\t\t\tcredentials = await loginGitHubCopilot({\n\t\t\t\t\tonAuth: (url, instructions) => callbacks.onAuth({ url, instructions }),\n\t\t\t\t\tonPrompt: callbacks.onPrompt,\n\t\t\t\t\tonProgress: callbacks.onProgress,\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\tcase \"google-gemini-cli\":\n\t\t\t\tcredentials = await loginGeminiCli(callbacks.onAuth, callbacks.onProgress);\n\t\t\t\tbreak;\n\t\t\tcase \"google-antigravity\":\n\t\t\t\tcredentials = await loginAntigravity(callbacks.onAuth, callbacks.onProgress);\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unknown OAuth provider: ${provider}`);\n\t\t}\n\n\t\tthis.set(provider, { type: \"oauth\", ...credentials });\n\t}\n\n\t/**\n\t * Logout from a provider.\n\t */\n\tlogout(provider: string): void {\n\t\tthis.remove(provider);\n\t}\n\n\t/**\n\t * Get API key for a provider.\n\t * Priority:\n\t * 1. Runtime override (CLI --api-key)\n\t * 2. API key from auth.json\n\t * 3. OAuth token from auth.json (auto-refreshed)\n\t * 4. Environment variable\n\t * 5. Fallback resolver (models.json custom providers)\n\t */\n\tasync getApiKey(provider: string): Promise<string | null> {\n\t\t// Runtime override takes highest priority\n\t\tconst runtimeKey = this.runtimeOverrides.get(provider);\n\t\tif (runtimeKey) {\n\t\t\treturn runtimeKey;\n\t\t}\n\n\t\tconst cred = this.data[provider];\n\n\t\tif (cred?.type === \"api_key\") {\n\t\t\treturn cred.key;\n\t\t}\n\n\t\tif (cred?.type === \"oauth\") {\n\t\t\t// Filter to only oauth credentials for getOAuthApiKey\n\t\t\tconst oauthCreds: Record<string, OAuthCredentials> = {};\n\t\t\tfor (const [key, value] of Object.entries(this.data)) {\n\t\t\t\tif (value.type === \"oauth\") {\n\t\t\t\t\toauthCreds[key] = value;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ttry {\n\t\t\t\tconst result = await getOAuthApiKey(provider as OAuthProvider, oauthCreds);\n\t\t\t\tif (result) {\n\t\t\t\t\tthis.data[provider] = { type: \"oauth\", ...result.newCredentials };\n\t\t\t\t\tthis.save();\n\t\t\t\t\treturn result.apiKey;\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\tthis.remove(provider);\n\t\t\t}\n\t\t}\n\n\t\t// Fall back to environment variable\n\t\tconst envKey = getEnvApiKey(provider);\n\t\tif (envKey) return envKey;\n\n\t\t// Fall back to custom resolver (e.g., models.json custom providers)\n\t\treturn this.fallbackResolver?.(provider) ?? null;\n\t}\n}\n"]}
1
+ {"version":3,"file":"auth-storage.js","sourceRoot":"","sources":["../../src/core/auth-storage.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EACN,YAAY,EACZ,cAAc,EACd,cAAc,EACd,gBAAgB,EAChB,cAAc,EACd,kBAAkB,GAGlB,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,EAAE,aAAa,EAAE,MAAM,IAAI,CAAC;AACnF,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAe/B;;GAEG;AACH,MAAM,OAAO,WAAW;IAKH,QAAQ;IAJpB,IAAI,GAAoB,EAAE,CAAC;IAC3B,gBAAgB,GAAwB,IAAI,GAAG,EAAE,CAAC;IAClD,gBAAgB,CAA4C;IAEpE,YAAoB,QAAgB,EAAE;wBAAlB,QAAQ;QAC3B,IAAI,CAAC,MAAM,EAAE,CAAC;IAAA,CACd;IAED;;;OAGG;IACH,gBAAgB,CAAC,QAAgB,EAAE,MAAc,EAAQ;QACxD,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;IAAA,CAC5C;IAED;;OAEG;IACH,mBAAmB,CAAC,QAAgB,EAAQ;QAC3C,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAAA,CACvC;IAED;;;OAGG;IACH,mBAAmB,CAAC,QAAkD,EAAQ;QAC7E,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC;IAAA,CACjC;IAED;;OAEG;IACH,MAAM,GAAS;QACd,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC;YAChC,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;YACf,OAAO;QACR,CAAC;QACD,IAAI,CAAC;YACJ,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;QAC9D,CAAC;QAAC,MAAM,CAAC;YACR,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QAChB,CAAC;IAAA,CACD;IAED;;OAEG;IACK,IAAI,GAAS;QACpB,MAAM,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE,CAAC;YACtB,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC;QAClD,CAAC;QACD,aAAa,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC;QAC1E,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC;IAAA,CAChC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAA8B;QACjD,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,SAAS,CAAC;IAAA,CACxC;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAE,UAA0B,EAAQ;QACvD,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,UAAU,CAAC;QACjC,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACZ;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAQ;QAC9B,OAAO,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACZ;IAED;;OAEG;IACH,IAAI,GAAa;QAChB,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAAA,CAC9B;IAED;;OAEG;IACH,GAAG,CAAC,QAAgB,EAAW;QAC9B,OAAO,QAAQ,IAAI,IAAI,CAAC,IAAI,CAAC;IAAA,CAC7B;IAED;;OAEG;IACH,MAAM,GAAoB;QACzB,OAAO,EAAE,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;IAAA,CACxB;IAED;;OAEG;IACH,KAAK,CAAC,KAAK,CACV,QAAuB,EACvB,SAIC,EACe;QAChB,IAAI,WAA6B,CAAC;QAElC,QAAQ,QAAQ,EAAE,CAAC;YAClB,KAAK,WAAW;gBACf,WAAW,GAAG,MAAM,cAAc,CACjC,CAAC,GAAG,EAAE,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE,CAAC,EAClC,GAAG,EAAE,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,+BAA+B,EAAE,CAAC,CACtE,CAAC;gBACF,MAAM;YACP,KAAK,gBAAgB;gBACpB,WAAW,GAAG,MAAM,kBAAkB,CAAC;oBACtC,MAAM,EAAE,CAAC,GAAG,EAAE,YAAY,EAAE,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,GAAG,EAAE,YAAY,EAAE,CAAC;oBACtE,QAAQ,EAAE,SAAS,CAAC,QAAQ;oBAC5B,UAAU,EAAE,SAAS,CAAC,UAAU;iBAChC,CAAC,CAAC;gBACH,MAAM;YACP,KAAK,mBAAmB;gBACvB,WAAW,GAAG,MAAM,cAAc,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC;gBAC3E,MAAM;YACP,KAAK,oBAAoB;gBACxB,WAAW,GAAG,MAAM,gBAAgB,CAAC,SAAS,CAAC,MAAM,EAAE,SAAS,CAAC,UAAU,CAAC,CAAC;gBAC7E,MAAM;YACP;gBACC,MAAM,IAAI,KAAK,CAAC,2BAA2B,QAAQ,EAAE,CAAC,CAAC;QACzD,CAAC;QAED,IAAI,CAAC,GAAG,CAAC,QAAQ,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,WAAW,EAAE,CAAC,CAAC;IAAA,CACtD;IAED;;OAEG;IACH,MAAM,CAAC,QAAgB,EAAQ;QAC9B,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAAA,CACtB;IAED;;;;;;;;OAQG;IACH,KAAK,CAAC,SAAS,CAAC,QAAgB,EAA+B;QAC9D,0CAA0C;QAC1C,MAAM,UAAU,GAAG,IAAI,CAAC,gBAAgB,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;QACvD,IAAI,UAAU,EAAE,CAAC;YAChB,OAAO,UAAU,CAAC;QACnB,CAAC;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,IAAI,EAAE,IAAI,KAAK,SAAS,EAAE,CAAC;YAC9B,OAAO,IAAI,CAAC,GAAG,CAAC;QACjB,CAAC;QAED,IAAI,IAAI,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC5B,sDAAsD;YACtD,MAAM,UAAU,GAAqC,EAAE,CAAC;YACxD,KAAK,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;gBACtD,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;oBAC5B,UAAU,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;gBACzB,CAAC;YACF,CAAC;YAED,IAAI,CAAC;gBACJ,MAAM,MAAM,GAAG,MAAM,cAAc,CAAC,QAAyB,EAAE,UAAU,CAAC,CAAC;gBAC3E,IAAI,MAAM,EAAE,CAAC;oBACZ,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,MAAM,CAAC,cAAc,EAAE,CAAC;oBAClE,IAAI,CAAC,IAAI,EAAE,CAAC;oBACZ,OAAO,MAAM,CAAC,MAAM,CAAC;gBACtB,CAAC;YACF,CAAC;YAAC,MAAM,CAAC;gBACR,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;YACvB,CAAC;QACF,CAAC;QAED,oCAAoC;QACpC,MAAM,MAAM,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACtC,IAAI,MAAM;YAAE,OAAO,MAAM,CAAC;QAE1B,oEAAoE;QACpE,OAAO,IAAI,CAAC,gBAAgB,EAAE,CAAC,QAAQ,CAAC,IAAI,SAAS,CAAC;IAAA,CACtD;CACD","sourcesContent":["/**\n * Credential storage for API keys and OAuth tokens.\n * Handles loading, saving, and refreshing credentials from auth.json.\n */\n\nimport {\n\tgetEnvApiKey,\n\tgetOAuthApiKey,\n\tloginAnthropic,\n\tloginAntigravity,\n\tloginGeminiCli,\n\tloginGitHubCopilot,\n\ttype OAuthCredentials,\n\ttype OAuthProvider,\n} from \"@mariozechner/pi-ai\";\nimport { chmodSync, existsSync, mkdirSync, readFileSync, writeFileSync } from \"fs\";\nimport { dirname } from \"path\";\n\nexport type ApiKeyCredential = {\n\ttype: \"api_key\";\n\tkey: string;\n};\n\nexport type OAuthCredential = {\n\ttype: \"oauth\";\n} & OAuthCredentials;\n\nexport type AuthCredential = ApiKeyCredential | OAuthCredential;\n\nexport type AuthStorageData = Record<string, AuthCredential>;\n\n/**\n * Credential storage backed by a JSON file.\n */\nexport class AuthStorage {\n\tprivate data: AuthStorageData = {};\n\tprivate runtimeOverrides: Map<string, string> = new Map();\n\tprivate fallbackResolver?: (provider: string) => string | undefined;\n\n\tconstructor(private authPath: string) {\n\t\tthis.reload();\n\t}\n\n\t/**\n\t * Set a runtime API key override (not persisted to disk).\n\t * Used for CLI --api-key flag.\n\t */\n\tsetRuntimeApiKey(provider: string, apiKey: string): void {\n\t\tthis.runtimeOverrides.set(provider, apiKey);\n\t}\n\n\t/**\n\t * Remove a runtime API key override.\n\t */\n\tremoveRuntimeApiKey(provider: string): void {\n\t\tthis.runtimeOverrides.delete(provider);\n\t}\n\n\t/**\n\t * Set a fallback resolver for API keys not found in auth.json or env vars.\n\t * Used for custom provider keys from models.json.\n\t */\n\tsetFallbackResolver(resolver: (provider: string) => string | undefined): void {\n\t\tthis.fallbackResolver = resolver;\n\t}\n\n\t/**\n\t * Reload credentials from disk.\n\t */\n\treload(): void {\n\t\tif (!existsSync(this.authPath)) {\n\t\t\tthis.data = {};\n\t\t\treturn;\n\t\t}\n\t\ttry {\n\t\t\tthis.data = JSON.parse(readFileSync(this.authPath, \"utf-8\"));\n\t\t} catch {\n\t\t\tthis.data = {};\n\t\t}\n\t}\n\n\t/**\n\t * Save credentials to disk.\n\t */\n\tprivate save(): void {\n\t\tconst dir = dirname(this.authPath);\n\t\tif (!existsSync(dir)) {\n\t\t\tmkdirSync(dir, { recursive: true, mode: 0o700 });\n\t\t}\n\t\twriteFileSync(this.authPath, JSON.stringify(this.data, null, 2), \"utf-8\");\n\t\tchmodSync(this.authPath, 0o600);\n\t}\n\n\t/**\n\t * Get credential for a provider.\n\t */\n\tget(provider: string): AuthCredential | undefined {\n\t\treturn this.data[provider] ?? undefined;\n\t}\n\n\t/**\n\t * Set credential for a provider.\n\t */\n\tset(provider: string, credential: AuthCredential): void {\n\t\tthis.data[provider] = credential;\n\t\tthis.save();\n\t}\n\n\t/**\n\t * Remove credential for a provider.\n\t */\n\tremove(provider: string): void {\n\t\tdelete this.data[provider];\n\t\tthis.save();\n\t}\n\n\t/**\n\t * List all providers with credentials.\n\t */\n\tlist(): string[] {\n\t\treturn Object.keys(this.data);\n\t}\n\n\t/**\n\t * Check if credentials exist for a provider.\n\t */\n\thas(provider: string): boolean {\n\t\treturn provider in this.data;\n\t}\n\n\t/**\n\t * Get all credentials (for passing to getOAuthApiKey).\n\t */\n\tgetAll(): AuthStorageData {\n\t\treturn { ...this.data };\n\t}\n\n\t/**\n\t * Login to an OAuth provider.\n\t */\n\tasync login(\n\t\tprovider: OAuthProvider,\n\t\tcallbacks: {\n\t\t\tonAuth: (info: { url: string; instructions?: string }) => void;\n\t\t\tonPrompt: (prompt: { message: string; placeholder?: string }) => Promise<string>;\n\t\t\tonProgress?: (message: string) => void;\n\t\t},\n\t): Promise<void> {\n\t\tlet credentials: OAuthCredentials;\n\n\t\tswitch (provider) {\n\t\t\tcase \"anthropic\":\n\t\t\t\tcredentials = await loginAnthropic(\n\t\t\t\t\t(url) => callbacks.onAuth({ url }),\n\t\t\t\t\t() => callbacks.onPrompt({ message: \"Paste the authorization code:\" }),\n\t\t\t\t);\n\t\t\t\tbreak;\n\t\t\tcase \"github-copilot\":\n\t\t\t\tcredentials = await loginGitHubCopilot({\n\t\t\t\t\tonAuth: (url, instructions) => callbacks.onAuth({ url, instructions }),\n\t\t\t\t\tonPrompt: callbacks.onPrompt,\n\t\t\t\t\tonProgress: callbacks.onProgress,\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t\tcase \"google-gemini-cli\":\n\t\t\t\tcredentials = await loginGeminiCli(callbacks.onAuth, callbacks.onProgress);\n\t\t\t\tbreak;\n\t\t\tcase \"google-antigravity\":\n\t\t\t\tcredentials = await loginAntigravity(callbacks.onAuth, callbacks.onProgress);\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unknown OAuth provider: ${provider}`);\n\t\t}\n\n\t\tthis.set(provider, { type: \"oauth\", ...credentials });\n\t}\n\n\t/**\n\t * Logout from a provider.\n\t */\n\tlogout(provider: string): void {\n\t\tthis.remove(provider);\n\t}\n\n\t/**\n\t * Get API key for a provider.\n\t * Priority:\n\t * 1. Runtime override (CLI --api-key)\n\t * 2. API key from auth.json\n\t * 3. OAuth token from auth.json (auto-refreshed)\n\t * 4. Environment variable\n\t * 5. Fallback resolver (models.json custom providers)\n\t */\n\tasync getApiKey(provider: string): Promise<string | undefined> {\n\t\t// Runtime override takes highest priority\n\t\tconst runtimeKey = this.runtimeOverrides.get(provider);\n\t\tif (runtimeKey) {\n\t\t\treturn runtimeKey;\n\t\t}\n\n\t\tconst cred = this.data[provider];\n\n\t\tif (cred?.type === \"api_key\") {\n\t\t\treturn cred.key;\n\t\t}\n\n\t\tif (cred?.type === \"oauth\") {\n\t\t\t// Filter to only oauth credentials for getOAuthApiKey\n\t\t\tconst oauthCreds: Record<string, OAuthCredentials> = {};\n\t\t\tfor (const [key, value] of Object.entries(this.data)) {\n\t\t\t\tif (value.type === \"oauth\") {\n\t\t\t\t\toauthCreds[key] = value;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\ttry {\n\t\t\t\tconst result = await getOAuthApiKey(provider as OAuthProvider, oauthCreds);\n\t\t\t\tif (result) {\n\t\t\t\t\tthis.data[provider] = { type: \"oauth\", ...result.newCredentials };\n\t\t\t\t\tthis.save();\n\t\t\t\t\treturn result.apiKey;\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\tthis.remove(provider);\n\t\t\t}\n\t\t}\n\n\t\t// Fall back to environment variable\n\t\tconst envKey = getEnvApiKey(provider);\n\t\tif (envKey) return envKey;\n\n\t\t// Fall back to custom resolver (e.g., models.json custom providers)\n\t\treturn this.fallbackResolver?.(provider) ?? undefined;\n\t}\n}\n"]}
@@ -14,8 +14,8 @@ export interface BashExecutorOptions {
14
14
  export interface BashResult {
15
15
  /** Combined stdout + stderr output (sanitized, possibly truncated) */
16
16
  output: string;
17
- /** Process exit code (null if killed/cancelled) */
18
- exitCode: number | null;
17
+ /** Process exit code (undefined if killed/cancelled) */
18
+ exitCode: number | undefined;
19
19
  /** Whether the command was cancelled via signal */
20
20
  cancelled: boolean;
21
21
  /** Whether the output was truncated */
@@ -1 +1 @@
1
- {"version":3,"file":"bash-executor.d.ts","sourceRoot":"","sources":["../../src/core/bash-executor.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAeH,MAAM,WAAW,mBAAmB;IACnC,+DAA+D;IAC/D,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAClC,mCAAmC;IACnC,MAAM,CAAC,EAAE,WAAW,CAAC;CACrB;AAED,MAAM,WAAW,UAAU;IAC1B,sEAAsE;IACtE,MAAM,EAAE,MAAM,CAAC;IACf,mDAAmD;IACnD,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,mDAAmD;IACnD,SAAS,EAAE,OAAO,CAAC;IACnB,uCAAuC;IACvC,SAAS,EAAE,OAAO,CAAC;IACnB,yFAAyF;IACzF,cAAc,CAAC,EAAE,MAAM,CAAC;CACxB;AAMD;;;;;;;;;;;;;GAaG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,mBAAmB,GAAG,OAAO,CAAC,UAAU,CAAC,CAqH/F","sourcesContent":["/**\n * Bash command execution with streaming support and cancellation.\n *\n * This module provides a unified bash execution implementation used by:\n * - AgentSession.executeBash() for interactive and RPC modes\n * - Direct calls from modes that need bash execution\n */\n\nimport { randomBytes } from \"node:crypto\";\nimport { createWriteStream, type WriteStream } from \"node:fs\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { type ChildProcess, spawn } from \"child_process\";\nimport stripAnsi from \"strip-ansi\";\nimport { getShellConfig, killProcessTree, sanitizeBinaryOutput } from \"../utils/shell.js\";\nimport { DEFAULT_MAX_BYTES, truncateTail } from \"./tools/truncate.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BashExecutorOptions {\n\t/** Callback for streaming output chunks (already sanitized) */\n\tonChunk?: (chunk: string) => void;\n\t/** AbortSignal for cancellation */\n\tsignal?: AbortSignal;\n}\n\nexport interface BashResult {\n\t/** Combined stdout + stderr output (sanitized, possibly truncated) */\n\toutput: string;\n\t/** Process exit code (null if killed/cancelled) */\n\texitCode: number | null;\n\t/** Whether the command was cancelled via signal */\n\tcancelled: boolean;\n\t/** Whether the output was truncated */\n\ttruncated: boolean;\n\t/** Path to temp file containing full output (if output exceeded truncation threshold) */\n\tfullOutputPath?: string;\n}\n\n// ============================================================================\n// Implementation\n// ============================================================================\n\n/**\n * Execute a bash command with optional streaming and cancellation support.\n *\n * Features:\n * - Streams sanitized output via onChunk callback\n * - Writes large output to temp file for later retrieval\n * - Supports cancellation via AbortSignal\n * - Sanitizes output (strips ANSI, removes binary garbage, normalizes newlines)\n * - Truncates output if it exceeds the default max bytes\n *\n * @param command - The bash command to execute\n * @param options - Optional streaming callback and abort signal\n * @returns Promise resolving to execution result\n */\nexport function executeBash(command: string, options?: BashExecutorOptions): Promise<BashResult> {\n\treturn new Promise((resolve, reject) => {\n\t\tconst { shell, args } = getShellConfig();\n\t\tconst child: ChildProcess = spawn(shell, [...args, command], {\n\t\t\tdetached: true,\n\t\t\tstdio: [\"ignore\", \"pipe\", \"pipe\"],\n\t\t});\n\n\t\t// Track sanitized output for truncation\n\t\tconst outputChunks: string[] = [];\n\t\tlet outputBytes = 0;\n\t\tconst maxOutputBytes = DEFAULT_MAX_BYTES * 2;\n\n\t\t// Temp file for large output\n\t\tlet tempFilePath: string | undefined;\n\t\tlet tempFileStream: WriteStream | undefined;\n\t\tlet totalBytes = 0;\n\n\t\t// Handle abort signal\n\t\tconst abortHandler = () => {\n\t\t\tif (child.pid) {\n\t\t\t\tkillProcessTree(child.pid);\n\t\t\t}\n\t\t};\n\n\t\tif (options?.signal) {\n\t\t\tif (options.signal.aborted) {\n\t\t\t\t// Already aborted, don't even start\n\t\t\t\tchild.kill();\n\t\t\t\tresolve({\n\t\t\t\t\toutput: \"\",\n\t\t\t\t\texitCode: null,\n\t\t\t\t\tcancelled: true,\n\t\t\t\t\ttruncated: false,\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\toptions.signal.addEventListener(\"abort\", abortHandler, { once: true });\n\t\t}\n\n\t\tconst handleData = (data: Buffer) => {\n\t\t\ttotalBytes += data.length;\n\n\t\t\t// Sanitize once at the source: strip ANSI, replace binary garbage, normalize newlines\n\t\t\tconst text = sanitizeBinaryOutput(stripAnsi(data.toString())).replace(/\\r/g, \"\");\n\n\t\t\t// Start writing to temp file if exceeds threshold\n\t\t\tif (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {\n\t\t\t\tconst id = randomBytes(8).toString(\"hex\");\n\t\t\t\ttempFilePath = join(tmpdir(), `pi-bash-${id}.log`);\n\t\t\t\ttempFileStream = createWriteStream(tempFilePath);\n\t\t\t\t// Write already-buffered chunks to temp file\n\t\t\t\tfor (const chunk of outputChunks) {\n\t\t\t\t\ttempFileStream.write(chunk);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.write(text);\n\t\t\t}\n\n\t\t\t// Keep rolling buffer of sanitized text\n\t\t\toutputChunks.push(text);\n\t\t\toutputBytes += text.length;\n\t\t\twhile (outputBytes > maxOutputBytes && outputChunks.length > 1) {\n\t\t\t\tconst removed = outputChunks.shift()!;\n\t\t\t\toutputBytes -= removed.length;\n\t\t\t}\n\n\t\t\t// Stream to callback if provided\n\t\t\tif (options?.onChunk) {\n\t\t\t\toptions.onChunk(text);\n\t\t\t}\n\t\t};\n\n\t\tchild.stdout?.on(\"data\", handleData);\n\t\tchild.stderr?.on(\"data\", handleData);\n\n\t\tchild.on(\"close\", (code) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\t// Combine buffered chunks for truncation (already sanitized)\n\t\t\tconst fullOutput = outputChunks.join(\"\");\n\t\t\tconst truncationResult = truncateTail(fullOutput);\n\n\t\t\t// code === null means killed (cancelled)\n\t\t\tconst cancelled = code === null;\n\n\t\t\tresolve({\n\t\t\t\toutput: truncationResult.truncated ? truncationResult.content : fullOutput,\n\t\t\t\texitCode: code,\n\t\t\t\tcancelled,\n\t\t\t\ttruncated: truncationResult.truncated,\n\t\t\t\tfullOutputPath: tempFilePath,\n\t\t\t});\n\t\t});\n\n\t\tchild.on(\"error\", (err) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\treject(err);\n\t\t});\n\t});\n}\n"]}
1
+ {"version":3,"file":"bash-executor.d.ts","sourceRoot":"","sources":["../../src/core/bash-executor.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAeH,MAAM,WAAW,mBAAmB;IACnC,+DAA+D;IAC/D,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,IAAI,CAAC;IAClC,mCAAmC;IACnC,MAAM,CAAC,EAAE,WAAW,CAAC;CACrB;AAED,MAAM,WAAW,UAAU;IAC1B,sEAAsE;IACtE,MAAM,EAAE,MAAM,CAAC;IACf,wDAAwD;IACxD,QAAQ,EAAE,MAAM,GAAG,SAAS,CAAC;IAC7B,mDAAmD;IACnD,SAAS,EAAE,OAAO,CAAC;IACnB,uCAAuC;IACvC,SAAS,EAAE,OAAO,CAAC;IACnB,yFAAyF;IACzF,cAAc,CAAC,EAAE,MAAM,CAAC;CACxB;AAMD;;;;;;;;;;;;;GAaG;AACH,wBAAgB,WAAW,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,mBAAmB,GAAG,OAAO,CAAC,UAAU,CAAC,CAqH/F","sourcesContent":["/**\n * Bash command execution with streaming support and cancellation.\n *\n * This module provides a unified bash execution implementation used by:\n * - AgentSession.executeBash() for interactive and RPC modes\n * - Direct calls from modes that need bash execution\n */\n\nimport { randomBytes } from \"node:crypto\";\nimport { createWriteStream, type WriteStream } from \"node:fs\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { type ChildProcess, spawn } from \"child_process\";\nimport stripAnsi from \"strip-ansi\";\nimport { getShellConfig, killProcessTree, sanitizeBinaryOutput } from \"../utils/shell.js\";\nimport { DEFAULT_MAX_BYTES, truncateTail } from \"./tools/truncate.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BashExecutorOptions {\n\t/** Callback for streaming output chunks (already sanitized) */\n\tonChunk?: (chunk: string) => void;\n\t/** AbortSignal for cancellation */\n\tsignal?: AbortSignal;\n}\n\nexport interface BashResult {\n\t/** Combined stdout + stderr output (sanitized, possibly truncated) */\n\toutput: string;\n\t/** Process exit code (undefined if killed/cancelled) */\n\texitCode: number | undefined;\n\t/** Whether the command was cancelled via signal */\n\tcancelled: boolean;\n\t/** Whether the output was truncated */\n\ttruncated: boolean;\n\t/** Path to temp file containing full output (if output exceeded truncation threshold) */\n\tfullOutputPath?: string;\n}\n\n// ============================================================================\n// Implementation\n// ============================================================================\n\n/**\n * Execute a bash command with optional streaming and cancellation support.\n *\n * Features:\n * - Streams sanitized output via onChunk callback\n * - Writes large output to temp file for later retrieval\n * - Supports cancellation via AbortSignal\n * - Sanitizes output (strips ANSI, removes binary garbage, normalizes newlines)\n * - Truncates output if it exceeds the default max bytes\n *\n * @param command - The bash command to execute\n * @param options - Optional streaming callback and abort signal\n * @returns Promise resolving to execution result\n */\nexport function executeBash(command: string, options?: BashExecutorOptions): Promise<BashResult> {\n\treturn new Promise((resolve, reject) => {\n\t\tconst { shell, args } = getShellConfig();\n\t\tconst child: ChildProcess = spawn(shell, [...args, command], {\n\t\t\tdetached: true,\n\t\t\tstdio: [\"ignore\", \"pipe\", \"pipe\"],\n\t\t});\n\n\t\t// Track sanitized output for truncation\n\t\tconst outputChunks: string[] = [];\n\t\tlet outputBytes = 0;\n\t\tconst maxOutputBytes = DEFAULT_MAX_BYTES * 2;\n\n\t\t// Temp file for large output\n\t\tlet tempFilePath: string | undefined;\n\t\tlet tempFileStream: WriteStream | undefined;\n\t\tlet totalBytes = 0;\n\n\t\t// Handle abort signal\n\t\tconst abortHandler = () => {\n\t\t\tif (child.pid) {\n\t\t\t\tkillProcessTree(child.pid);\n\t\t\t}\n\t\t};\n\n\t\tif (options?.signal) {\n\t\t\tif (options.signal.aborted) {\n\t\t\t\t// Already aborted, don't even start\n\t\t\t\tchild.kill();\n\t\t\t\tresolve({\n\t\t\t\t\toutput: \"\",\n\t\t\t\t\texitCode: undefined,\n\t\t\t\t\tcancelled: true,\n\t\t\t\t\ttruncated: false,\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\toptions.signal.addEventListener(\"abort\", abortHandler, { once: true });\n\t\t}\n\n\t\tconst handleData = (data: Buffer) => {\n\t\t\ttotalBytes += data.length;\n\n\t\t\t// Sanitize once at the source: strip ANSI, replace binary garbage, normalize newlines\n\t\t\tconst text = sanitizeBinaryOutput(stripAnsi(data.toString())).replace(/\\r/g, \"\");\n\n\t\t\t// Start writing to temp file if exceeds threshold\n\t\t\tif (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {\n\t\t\t\tconst id = randomBytes(8).toString(\"hex\");\n\t\t\t\ttempFilePath = join(tmpdir(), `pi-bash-${id}.log`);\n\t\t\t\ttempFileStream = createWriteStream(tempFilePath);\n\t\t\t\t// Write already-buffered chunks to temp file\n\t\t\t\tfor (const chunk of outputChunks) {\n\t\t\t\t\ttempFileStream.write(chunk);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.write(text);\n\t\t\t}\n\n\t\t\t// Keep rolling buffer of sanitized text\n\t\t\toutputChunks.push(text);\n\t\t\toutputBytes += text.length;\n\t\t\twhile (outputBytes > maxOutputBytes && outputChunks.length > 1) {\n\t\t\t\tconst removed = outputChunks.shift()!;\n\t\t\t\toutputBytes -= removed.length;\n\t\t\t}\n\n\t\t\t// Stream to callback if provided\n\t\t\tif (options?.onChunk) {\n\t\t\t\toptions.onChunk(text);\n\t\t\t}\n\t\t};\n\n\t\tchild.stdout?.on(\"data\", handleData);\n\t\tchild.stderr?.on(\"data\", handleData);\n\n\t\tchild.on(\"close\", (code) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\t// Combine buffered chunks for truncation (already sanitized)\n\t\t\tconst fullOutput = outputChunks.join(\"\");\n\t\t\tconst truncationResult = truncateTail(fullOutput);\n\n\t\t\t// code === null means killed (cancelled)\n\t\t\tconst cancelled = code === null;\n\n\t\t\tresolve({\n\t\t\t\toutput: truncationResult.truncated ? truncationResult.content : fullOutput,\n\t\t\t\texitCode: cancelled ? undefined : code,\n\t\t\t\tcancelled,\n\t\t\t\ttruncated: truncationResult.truncated,\n\t\t\t\tfullOutputPath: tempFilePath,\n\t\t\t});\n\t\t});\n\n\t\tchild.on(\"error\", (err) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\treject(err);\n\t\t});\n\t});\n}\n"]}
@@ -57,7 +57,7 @@ export function executeBash(command, options) {
57
57
  child.kill();
58
58
  resolve({
59
59
  output: "",
60
- exitCode: null,
60
+ exitCode: undefined,
61
61
  cancelled: true,
62
62
  truncated: false,
63
63
  });
@@ -111,7 +111,7 @@ export function executeBash(command, options) {
111
111
  const cancelled = code === null;
112
112
  resolve({
113
113
  output: truncationResult.truncated ? truncationResult.content : fullOutput,
114
- exitCode: code,
114
+ exitCode: cancelled ? undefined : code,
115
115
  cancelled,
116
116
  truncated: truncationResult.truncated,
117
117
  fullOutputPath: tempFilePath,
@@ -1 +1 @@
1
- {"version":3,"file":"bash-executor.js","sourceRoot":"","sources":["../../src/core/bash-executor.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,EAAE,iBAAiB,EAAoB,MAAM,SAAS,CAAC;AAC9D,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAqB,KAAK,EAAE,MAAM,eAAe,CAAC;AACzD,OAAO,SAAS,MAAM,YAAY,CAAC;AACnC,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAC1F,OAAO,EAAE,iBAAiB,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AA0BtE,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;;;;;;;;;;;;GAaG;AACH,MAAM,UAAU,WAAW,CAAC,OAAe,EAAE,OAA6B,EAAuB;IAChG,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC;QACvC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,cAAc,EAAE,CAAC;QACzC,MAAM,KAAK,GAAiB,KAAK,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,OAAO,CAAC,EAAE;YAC5D,QAAQ,EAAE,IAAI;YACd,KAAK,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC;SACjC,CAAC,CAAC;QAEH,wCAAwC;QACxC,MAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,CAAC,CAAC;QACpB,MAAM,cAAc,GAAG,iBAAiB,GAAG,CAAC,CAAC;QAE7C,6BAA6B;QAC7B,IAAI,YAAgC,CAAC;QACrC,IAAI,cAAuC,CAAC;QAC5C,IAAI,UAAU,GAAG,CAAC,CAAC;QAEnB,sBAAsB;QACtB,MAAM,YAAY,GAAG,GAAG,EAAE,CAAC;YAC1B,IAAI,KAAK,CAAC,GAAG,EAAE,CAAC;gBACf,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAC5B,CAAC;QAAA,CACD,CAAC;QAEF,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;YACrB,IAAI,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;gBAC5B,oCAAoC;gBACpC,KAAK,CAAC,IAAI,EAAE,CAAC;gBACb,OAAO,CAAC;oBACP,MAAM,EAAE,EAAE;oBACV,QAAQ,EAAE,IAAI;oBACd,SAAS,EAAE,IAAI;oBACf,SAAS,EAAE,KAAK;iBAChB,CAAC,CAAC;gBACH,OAAO;YACR,CAAC;YACD,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QACxE,CAAC;QAED,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC;YACpC,UAAU,IAAI,IAAI,CAAC,MAAM,CAAC;YAE1B,sFAAsF;YACtF,MAAM,IAAI,GAAG,oBAAoB,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;YAEjF,kDAAkD;YAClD,IAAI,UAAU,GAAG,iBAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;gBACrD,MAAM,EAAE,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;gBAC1C,YAAY,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;gBACnD,cAAc,GAAG,iBAAiB,CAAC,YAAY,CAAC,CAAC;gBACjD,6CAA6C;gBAC7C,KAAK,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;oBAClC,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBAC7B,CAAC;YACF,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC5B,CAAC;YAED,wCAAwC;YACxC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,OAAO,WAAW,GAAG,cAAc,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAChE,MAAM,OAAO,GAAG,YAAY,CAAC,KAAK,EAAG,CAAC;gBACtC,WAAW,IAAI,OAAO,CAAC,MAAM,CAAC;YAC/B,CAAC;YAED,iCAAiC;YACjC,IAAI,OAAO,EAAE,OAAO,EAAE,CAAC;gBACtB,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;YACvB,CAAC;QAAA,CACD,CAAC;QAEF,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QACrC,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QAErC,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC;YAC3B,0BAA0B;YAC1B,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;gBACrB,OAAO,CAAC,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;YAC3D,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,GAAG,EAAE,CAAC;YACtB,CAAC;YAED,6DAA6D;YAC7D,MAAM,UAAU,GAAG,YAAY,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACzC,MAAM,gBAAgB,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;YAElD,yCAAyC;YACzC,MAAM,SAAS,GAAG,IAAI,KAAK,IAAI,CAAC;YAEhC,OAAO,CAAC;gBACP,MAAM,EAAE,gBAAgB,CAAC,SAAS,CAAC,CAAC,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU;gBAC1E,QAAQ,EAAE,IAAI;gBACd,SAAS;gBACT,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,cAAc,EAAE,YAAY;aAC5B,CAAC,CAAC;QAAA,CACH,CAAC,CAAC;QAEH,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC;YAC1B,0BAA0B;YAC1B,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;gBACrB,OAAO,CAAC,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;YAC3D,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,GAAG,EAAE,CAAC;YACtB,CAAC;YAED,MAAM,CAAC,GAAG,CAAC,CAAC;QAAA,CACZ,CAAC,CAAC;IAAA,CACH,CAAC,CAAC;AAAA,CACH","sourcesContent":["/**\n * Bash command execution with streaming support and cancellation.\n *\n * This module provides a unified bash execution implementation used by:\n * - AgentSession.executeBash() for interactive and RPC modes\n * - Direct calls from modes that need bash execution\n */\n\nimport { randomBytes } from \"node:crypto\";\nimport { createWriteStream, type WriteStream } from \"node:fs\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { type ChildProcess, spawn } from \"child_process\";\nimport stripAnsi from \"strip-ansi\";\nimport { getShellConfig, killProcessTree, sanitizeBinaryOutput } from \"../utils/shell.js\";\nimport { DEFAULT_MAX_BYTES, truncateTail } from \"./tools/truncate.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BashExecutorOptions {\n\t/** Callback for streaming output chunks (already sanitized) */\n\tonChunk?: (chunk: string) => void;\n\t/** AbortSignal for cancellation */\n\tsignal?: AbortSignal;\n}\n\nexport interface BashResult {\n\t/** Combined stdout + stderr output (sanitized, possibly truncated) */\n\toutput: string;\n\t/** Process exit code (null if killed/cancelled) */\n\texitCode: number | null;\n\t/** Whether the command was cancelled via signal */\n\tcancelled: boolean;\n\t/** Whether the output was truncated */\n\ttruncated: boolean;\n\t/** Path to temp file containing full output (if output exceeded truncation threshold) */\n\tfullOutputPath?: string;\n}\n\n// ============================================================================\n// Implementation\n// ============================================================================\n\n/**\n * Execute a bash command with optional streaming and cancellation support.\n *\n * Features:\n * - Streams sanitized output via onChunk callback\n * - Writes large output to temp file for later retrieval\n * - Supports cancellation via AbortSignal\n * - Sanitizes output (strips ANSI, removes binary garbage, normalizes newlines)\n * - Truncates output if it exceeds the default max bytes\n *\n * @param command - The bash command to execute\n * @param options - Optional streaming callback and abort signal\n * @returns Promise resolving to execution result\n */\nexport function executeBash(command: string, options?: BashExecutorOptions): Promise<BashResult> {\n\treturn new Promise((resolve, reject) => {\n\t\tconst { shell, args } = getShellConfig();\n\t\tconst child: ChildProcess = spawn(shell, [...args, command], {\n\t\t\tdetached: true,\n\t\t\tstdio: [\"ignore\", \"pipe\", \"pipe\"],\n\t\t});\n\n\t\t// Track sanitized output for truncation\n\t\tconst outputChunks: string[] = [];\n\t\tlet outputBytes = 0;\n\t\tconst maxOutputBytes = DEFAULT_MAX_BYTES * 2;\n\n\t\t// Temp file for large output\n\t\tlet tempFilePath: string | undefined;\n\t\tlet tempFileStream: WriteStream | undefined;\n\t\tlet totalBytes = 0;\n\n\t\t// Handle abort signal\n\t\tconst abortHandler = () => {\n\t\t\tif (child.pid) {\n\t\t\t\tkillProcessTree(child.pid);\n\t\t\t}\n\t\t};\n\n\t\tif (options?.signal) {\n\t\t\tif (options.signal.aborted) {\n\t\t\t\t// Already aborted, don't even start\n\t\t\t\tchild.kill();\n\t\t\t\tresolve({\n\t\t\t\t\toutput: \"\",\n\t\t\t\t\texitCode: null,\n\t\t\t\t\tcancelled: true,\n\t\t\t\t\ttruncated: false,\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\toptions.signal.addEventListener(\"abort\", abortHandler, { once: true });\n\t\t}\n\n\t\tconst handleData = (data: Buffer) => {\n\t\t\ttotalBytes += data.length;\n\n\t\t\t// Sanitize once at the source: strip ANSI, replace binary garbage, normalize newlines\n\t\t\tconst text = sanitizeBinaryOutput(stripAnsi(data.toString())).replace(/\\r/g, \"\");\n\n\t\t\t// Start writing to temp file if exceeds threshold\n\t\t\tif (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {\n\t\t\t\tconst id = randomBytes(8).toString(\"hex\");\n\t\t\t\ttempFilePath = join(tmpdir(), `pi-bash-${id}.log`);\n\t\t\t\ttempFileStream = createWriteStream(tempFilePath);\n\t\t\t\t// Write already-buffered chunks to temp file\n\t\t\t\tfor (const chunk of outputChunks) {\n\t\t\t\t\ttempFileStream.write(chunk);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.write(text);\n\t\t\t}\n\n\t\t\t// Keep rolling buffer of sanitized text\n\t\t\toutputChunks.push(text);\n\t\t\toutputBytes += text.length;\n\t\t\twhile (outputBytes > maxOutputBytes && outputChunks.length > 1) {\n\t\t\t\tconst removed = outputChunks.shift()!;\n\t\t\t\toutputBytes -= removed.length;\n\t\t\t}\n\n\t\t\t// Stream to callback if provided\n\t\t\tif (options?.onChunk) {\n\t\t\t\toptions.onChunk(text);\n\t\t\t}\n\t\t};\n\n\t\tchild.stdout?.on(\"data\", handleData);\n\t\tchild.stderr?.on(\"data\", handleData);\n\n\t\tchild.on(\"close\", (code) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\t// Combine buffered chunks for truncation (already sanitized)\n\t\t\tconst fullOutput = outputChunks.join(\"\");\n\t\t\tconst truncationResult = truncateTail(fullOutput);\n\n\t\t\t// code === null means killed (cancelled)\n\t\t\tconst cancelled = code === null;\n\n\t\t\tresolve({\n\t\t\t\toutput: truncationResult.truncated ? truncationResult.content : fullOutput,\n\t\t\t\texitCode: code,\n\t\t\t\tcancelled,\n\t\t\t\ttruncated: truncationResult.truncated,\n\t\t\t\tfullOutputPath: tempFilePath,\n\t\t\t});\n\t\t});\n\n\t\tchild.on(\"error\", (err) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\treject(err);\n\t\t});\n\t});\n}\n"]}
1
+ {"version":3,"file":"bash-executor.js","sourceRoot":"","sources":["../../src/core/bash-executor.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAC1C,OAAO,EAAE,iBAAiB,EAAoB,MAAM,SAAS,CAAC;AAC9D,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,MAAM,WAAW,CAAC;AACjC,OAAO,EAAqB,KAAK,EAAE,MAAM,eAAe,CAAC;AACzD,OAAO,SAAS,MAAM,YAAY,CAAC;AACnC,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAC1F,OAAO,EAAE,iBAAiB,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AA0BtE,+EAA+E;AAC/E,iBAAiB;AACjB,+EAA+E;AAE/E;;;;;;;;;;;;;GAaG;AACH,MAAM,UAAU,WAAW,CAAC,OAAe,EAAE,OAA6B,EAAuB;IAChG,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE,CAAC;QACvC,MAAM,EAAE,KAAK,EAAE,IAAI,EAAE,GAAG,cAAc,EAAE,CAAC;QACzC,MAAM,KAAK,GAAiB,KAAK,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,OAAO,CAAC,EAAE;YAC5D,QAAQ,EAAE,IAAI;YACd,KAAK,EAAE,CAAC,QAAQ,EAAE,MAAM,EAAE,MAAM,CAAC;SACjC,CAAC,CAAC;QAEH,wCAAwC;QACxC,MAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,CAAC,CAAC;QACpB,MAAM,cAAc,GAAG,iBAAiB,GAAG,CAAC,CAAC;QAE7C,6BAA6B;QAC7B,IAAI,YAAgC,CAAC;QACrC,IAAI,cAAuC,CAAC;QAC5C,IAAI,UAAU,GAAG,CAAC,CAAC;QAEnB,sBAAsB;QACtB,MAAM,YAAY,GAAG,GAAG,EAAE,CAAC;YAC1B,IAAI,KAAK,CAAC,GAAG,EAAE,CAAC;gBACf,eAAe,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAC5B,CAAC;QAAA,CACD,CAAC;QAEF,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;YACrB,IAAI,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,CAAC;gBAC5B,oCAAoC;gBACpC,KAAK,CAAC,IAAI,EAAE,CAAC;gBACb,OAAO,CAAC;oBACP,MAAM,EAAE,EAAE;oBACV,QAAQ,EAAE,SAAS;oBACnB,SAAS,EAAE,IAAI;oBACf,SAAS,EAAE,KAAK;iBAChB,CAAC,CAAC;gBACH,OAAO;YACR,CAAC;YACD,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC,CAAC;QACxE,CAAC;QAED,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC;YACpC,UAAU,IAAI,IAAI,CAAC,MAAM,CAAC;YAE1B,sFAAsF;YACtF,MAAM,IAAI,GAAG,oBAAoB,CAAC,SAAS,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;YAEjF,kDAAkD;YAClD,IAAI,UAAU,GAAG,iBAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;gBACrD,MAAM,EAAE,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;gBAC1C,YAAY,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,MAAM,CAAC,CAAC;gBACnD,cAAc,GAAG,iBAAiB,CAAC,YAAY,CAAC,CAAC;gBACjD,6CAA6C;gBAC7C,KAAK,MAAM,KAAK,IAAI,YAAY,EAAE,CAAC;oBAClC,cAAc,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;gBAC7B,CAAC;YACF,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;YAC5B,CAAC;YAED,wCAAwC;YACxC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACxB,WAAW,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,OAAO,WAAW,GAAG,cAAc,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAChE,MAAM,OAAO,GAAG,YAAY,CAAC,KAAK,EAAG,CAAC;gBACtC,WAAW,IAAI,OAAO,CAAC,MAAM,CAAC;YAC/B,CAAC;YAED,iCAAiC;YACjC,IAAI,OAAO,EAAE,OAAO,EAAE,CAAC;gBACtB,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;YACvB,CAAC;QAAA,CACD,CAAC;QAEF,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QACrC,KAAK,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC,CAAC;QAErC,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,IAAI,EAAE,EAAE,CAAC;YAC3B,0BAA0B;YAC1B,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;gBACrB,OAAO,CAAC,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;YAC3D,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,GAAG,EAAE,CAAC;YACtB,CAAC;YAED,6DAA6D;YAC7D,MAAM,UAAU,GAAG,YAAY,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;YACzC,MAAM,gBAAgB,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;YAElD,yCAAyC;YACzC,MAAM,SAAS,GAAG,IAAI,KAAK,IAAI,CAAC;YAEhC,OAAO,CAAC;gBACP,MAAM,EAAE,gBAAgB,CAAC,SAAS,CAAC,CAAC,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,CAAC,UAAU;gBAC1E,QAAQ,EAAE,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI;gBACtC,SAAS;gBACT,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,cAAc,EAAE,YAAY;aAC5B,CAAC,CAAC;QAAA,CACH,CAAC,CAAC;QAEH,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC;YAC1B,0BAA0B;YAC1B,IAAI,OAAO,EAAE,MAAM,EAAE,CAAC;gBACrB,OAAO,CAAC,MAAM,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;YAC3D,CAAC;YAED,IAAI,cAAc,EAAE,CAAC;gBACpB,cAAc,CAAC,GAAG,EAAE,CAAC;YACtB,CAAC;YAED,MAAM,CAAC,GAAG,CAAC,CAAC;QAAA,CACZ,CAAC,CAAC;IAAA,CACH,CAAC,CAAC;AAAA,CACH","sourcesContent":["/**\n * Bash command execution with streaming support and cancellation.\n *\n * This module provides a unified bash execution implementation used by:\n * - AgentSession.executeBash() for interactive and RPC modes\n * - Direct calls from modes that need bash execution\n */\n\nimport { randomBytes } from \"node:crypto\";\nimport { createWriteStream, type WriteStream } from \"node:fs\";\nimport { tmpdir } from \"node:os\";\nimport { join } from \"node:path\";\nimport { type ChildProcess, spawn } from \"child_process\";\nimport stripAnsi from \"strip-ansi\";\nimport { getShellConfig, killProcessTree, sanitizeBinaryOutput } from \"../utils/shell.js\";\nimport { DEFAULT_MAX_BYTES, truncateTail } from \"./tools/truncate.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BashExecutorOptions {\n\t/** Callback for streaming output chunks (already sanitized) */\n\tonChunk?: (chunk: string) => void;\n\t/** AbortSignal for cancellation */\n\tsignal?: AbortSignal;\n}\n\nexport interface BashResult {\n\t/** Combined stdout + stderr output (sanitized, possibly truncated) */\n\toutput: string;\n\t/** Process exit code (undefined if killed/cancelled) */\n\texitCode: number | undefined;\n\t/** Whether the command was cancelled via signal */\n\tcancelled: boolean;\n\t/** Whether the output was truncated */\n\ttruncated: boolean;\n\t/** Path to temp file containing full output (if output exceeded truncation threshold) */\n\tfullOutputPath?: string;\n}\n\n// ============================================================================\n// Implementation\n// ============================================================================\n\n/**\n * Execute a bash command with optional streaming and cancellation support.\n *\n * Features:\n * - Streams sanitized output via onChunk callback\n * - Writes large output to temp file for later retrieval\n * - Supports cancellation via AbortSignal\n * - Sanitizes output (strips ANSI, removes binary garbage, normalizes newlines)\n * - Truncates output if it exceeds the default max bytes\n *\n * @param command - The bash command to execute\n * @param options - Optional streaming callback and abort signal\n * @returns Promise resolving to execution result\n */\nexport function executeBash(command: string, options?: BashExecutorOptions): Promise<BashResult> {\n\treturn new Promise((resolve, reject) => {\n\t\tconst { shell, args } = getShellConfig();\n\t\tconst child: ChildProcess = spawn(shell, [...args, command], {\n\t\t\tdetached: true,\n\t\t\tstdio: [\"ignore\", \"pipe\", \"pipe\"],\n\t\t});\n\n\t\t// Track sanitized output for truncation\n\t\tconst outputChunks: string[] = [];\n\t\tlet outputBytes = 0;\n\t\tconst maxOutputBytes = DEFAULT_MAX_BYTES * 2;\n\n\t\t// Temp file for large output\n\t\tlet tempFilePath: string | undefined;\n\t\tlet tempFileStream: WriteStream | undefined;\n\t\tlet totalBytes = 0;\n\n\t\t// Handle abort signal\n\t\tconst abortHandler = () => {\n\t\t\tif (child.pid) {\n\t\t\t\tkillProcessTree(child.pid);\n\t\t\t}\n\t\t};\n\n\t\tif (options?.signal) {\n\t\t\tif (options.signal.aborted) {\n\t\t\t\t// Already aborted, don't even start\n\t\t\t\tchild.kill();\n\t\t\t\tresolve({\n\t\t\t\t\toutput: \"\",\n\t\t\t\t\texitCode: undefined,\n\t\t\t\t\tcancelled: true,\n\t\t\t\t\ttruncated: false,\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\toptions.signal.addEventListener(\"abort\", abortHandler, { once: true });\n\t\t}\n\n\t\tconst handleData = (data: Buffer) => {\n\t\t\ttotalBytes += data.length;\n\n\t\t\t// Sanitize once at the source: strip ANSI, replace binary garbage, normalize newlines\n\t\t\tconst text = sanitizeBinaryOutput(stripAnsi(data.toString())).replace(/\\r/g, \"\");\n\n\t\t\t// Start writing to temp file if exceeds threshold\n\t\t\tif (totalBytes > DEFAULT_MAX_BYTES && !tempFilePath) {\n\t\t\t\tconst id = randomBytes(8).toString(\"hex\");\n\t\t\t\ttempFilePath = join(tmpdir(), `pi-bash-${id}.log`);\n\t\t\t\ttempFileStream = createWriteStream(tempFilePath);\n\t\t\t\t// Write already-buffered chunks to temp file\n\t\t\t\tfor (const chunk of outputChunks) {\n\t\t\t\t\ttempFileStream.write(chunk);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.write(text);\n\t\t\t}\n\n\t\t\t// Keep rolling buffer of sanitized text\n\t\t\toutputChunks.push(text);\n\t\t\toutputBytes += text.length;\n\t\t\twhile (outputBytes > maxOutputBytes && outputChunks.length > 1) {\n\t\t\t\tconst removed = outputChunks.shift()!;\n\t\t\t\toutputBytes -= removed.length;\n\t\t\t}\n\n\t\t\t// Stream to callback if provided\n\t\t\tif (options?.onChunk) {\n\t\t\t\toptions.onChunk(text);\n\t\t\t}\n\t\t};\n\n\t\tchild.stdout?.on(\"data\", handleData);\n\t\tchild.stderr?.on(\"data\", handleData);\n\n\t\tchild.on(\"close\", (code) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\t// Combine buffered chunks for truncation (already sanitized)\n\t\t\tconst fullOutput = outputChunks.join(\"\");\n\t\t\tconst truncationResult = truncateTail(fullOutput);\n\n\t\t\t// code === null means killed (cancelled)\n\t\t\tconst cancelled = code === null;\n\n\t\t\tresolve({\n\t\t\t\toutput: truncationResult.truncated ? truncationResult.content : fullOutput,\n\t\t\t\texitCode: cancelled ? undefined : code,\n\t\t\t\tcancelled,\n\t\t\t\ttruncated: truncationResult.truncated,\n\t\t\t\tfullOutputPath: tempFilePath,\n\t\t\t});\n\t\t});\n\n\t\tchild.on(\"error\", (err) => {\n\t\t\t// Clean up abort listener\n\t\t\tif (options?.signal) {\n\t\t\t\toptions.signal.removeEventListener(\"abort\", abortHandler);\n\t\t\t}\n\n\t\t\tif (tempFileStream) {\n\t\t\t\ttempFileStream.end();\n\t\t\t}\n\n\t\t\treject(err);\n\t\t});\n\t});\n}\n"]}
@@ -0,0 +1,84 @@
1
+ /**
2
+ * Branch summarization for tree navigation.
3
+ *
4
+ * When navigating to a different point in the session tree, this generates
5
+ * a summary of the branch being left so context isn't lost.
6
+ */
7
+ import type { AgentMessage } from "@mariozechner/pi-agent-core";
8
+ import type { Model } from "@mariozechner/pi-ai";
9
+ import type { ReadonlySessionManager, SessionEntry } from "../session-manager.js";
10
+ import { type FileOperations } from "./utils.js";
11
+ export interface BranchSummaryResult {
12
+ summary?: string;
13
+ readFiles?: string[];
14
+ modifiedFiles?: string[];
15
+ aborted?: boolean;
16
+ error?: string;
17
+ }
18
+ /** Details stored in BranchSummaryEntry.details for file tracking */
19
+ export interface BranchSummaryDetails {
20
+ readFiles: string[];
21
+ modifiedFiles: string[];
22
+ }
23
+ export type { FileOperations } from "./utils.js";
24
+ export interface BranchPreparation {
25
+ /** Messages extracted for summarization, in chronological order */
26
+ messages: AgentMessage[];
27
+ /** File operations extracted from tool calls */
28
+ fileOps: FileOperations;
29
+ /** Total estimated tokens in messages */
30
+ totalTokens: number;
31
+ }
32
+ export interface CollectEntriesResult {
33
+ /** Entries to summarize, in chronological order */
34
+ entries: SessionEntry[];
35
+ /** Common ancestor between old and new position, if any */
36
+ commonAncestorId: string | null;
37
+ }
38
+ export interface GenerateBranchSummaryOptions {
39
+ /** Model to use for summarization */
40
+ model: Model<any>;
41
+ /** API key for the model */
42
+ apiKey: string;
43
+ /** Abort signal for cancellation */
44
+ signal: AbortSignal;
45
+ /** Optional custom instructions for summarization */
46
+ customInstructions?: string;
47
+ /** Tokens reserved for prompt + LLM response (default 16384) */
48
+ reserveTokens?: number;
49
+ }
50
+ /**
51
+ * Collect entries that should be summarized when navigating from one position to another.
52
+ *
53
+ * Walks from oldLeafId back to the common ancestor with targetId, collecting entries
54
+ * along the way. Does NOT stop at compaction boundaries - those are included and their
55
+ * summaries become context.
56
+ *
57
+ * @param session - Session manager (read-only access)
58
+ * @param oldLeafId - Current position (where we're navigating from)
59
+ * @param targetId - Target position (where we're navigating to)
60
+ * @returns Entries to summarize and the common ancestor
61
+ */
62
+ export declare function collectEntriesForBranchSummary(session: ReadonlySessionManager, oldLeafId: string | null, targetId: string): CollectEntriesResult;
63
+ /**
64
+ * Prepare entries for summarization with token budget.
65
+ *
66
+ * Walks entries from NEWEST to OLDEST, adding messages until we hit the token budget.
67
+ * This ensures we keep the most recent context when the branch is too long.
68
+ *
69
+ * Also collects file operations from:
70
+ * - Tool calls in assistant messages
71
+ * - Existing branch_summary entries' details (for cumulative tracking)
72
+ *
73
+ * @param entries - Entries in chronological order
74
+ * @param tokenBudget - Maximum tokens to include (0 = no limit)
75
+ */
76
+ export declare function prepareBranchEntries(entries: SessionEntry[], tokenBudget?: number): BranchPreparation;
77
+ /**
78
+ * Generate a summary of abandoned branch entries.
79
+ *
80
+ * @param entries - Session entries to summarize (chronological order)
81
+ * @param options - Generation options
82
+ */
83
+ export declare function generateBranchSummary(entries: SessionEntry[], options: GenerateBranchSummaryOptions): Promise<BranchSummaryResult>;
84
+ //# sourceMappingURL=branch-summarization.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"branch-summarization.d.ts","sourceRoot":"","sources":["../../../src/core/compaction/branch-summarization.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAEH,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,6BAA6B,CAAC;AAChE,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAC;AAQjD,OAAO,KAAK,EAAE,sBAAsB,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAElF,OAAO,EAIN,KAAK,cAAc,EAInB,MAAM,YAAY,CAAC;AAMpB,MAAM,WAAW,mBAAmB;IACnC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC;IACrB,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;IACzB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,KAAK,CAAC,EAAE,MAAM,CAAC;CACf;AAED,qEAAqE;AACrE,MAAM,WAAW,oBAAoB;IACpC,SAAS,EAAE,MAAM,EAAE,CAAC;IACpB,aAAa,EAAE,MAAM,EAAE,CAAC;CACxB;AAED,YAAY,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAEjD,MAAM,WAAW,iBAAiB;IACjC,mEAAmE;IACnE,QAAQ,EAAE,YAAY,EAAE,CAAC;IACzB,gDAAgD;IAChD,OAAO,EAAE,cAAc,CAAC;IACxB,yCAAyC;IACzC,WAAW,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,oBAAoB;IACpC,mDAAmD;IACnD,OAAO,EAAE,YAAY,EAAE,CAAC;IACxB,2DAA2D;IAC3D,gBAAgB,EAAE,MAAM,GAAG,IAAI,CAAC;CAChC;AAED,MAAM,WAAW,4BAA4B;IAC5C,qCAAqC;IACrC,KAAK,EAAE,KAAK,CAAC,GAAG,CAAC,CAAC;IAClB,4BAA4B;IAC5B,MAAM,EAAE,MAAM,CAAC;IACf,oCAAoC;IACpC,MAAM,EAAE,WAAW,CAAC;IACpB,qDAAqD;IACrD,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,gEAAgE;IAChE,aAAa,CAAC,EAAE,MAAM,CAAC;CACvB;AAMD;;;;;;;;;;;GAWG;AACH,wBAAgB,8BAA8B,CAC7C,OAAO,EAAE,sBAAsB,EAC/B,SAAS,EAAE,MAAM,GAAG,IAAI,EACxB,QAAQ,EAAE,MAAM,GACd,oBAAoB,CAkCtB;AAmCD;;;;;;;;;;;;GAYG;AACH,wBAAgB,oBAAoB,CAAC,OAAO,EAAE,YAAY,EAAE,EAAE,WAAW,GAAE,MAAU,GAAG,iBAAiB,CAoDxG;AAwCD;;;;;GAKG;AACH,wBAAsB,qBAAqB,CAC1C,OAAO,EAAE,YAAY,EAAE,EACvB,OAAO,EAAE,4BAA4B,GACnC,OAAO,CAAC,mBAAmB,CAAC,CA8D9B","sourcesContent":["/**\n * Branch summarization for tree navigation.\n *\n * When navigating to a different point in the session tree, this generates\n * a summary of the branch being left so context isn't lost.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { Model } from \"@mariozechner/pi-ai\";\nimport { completeSimple } from \"@mariozechner/pi-ai\";\nimport {\n\tconvertToLlm,\n\tcreateBranchSummaryMessage,\n\tcreateCompactionSummaryMessage,\n\tcreateHookMessage,\n} from \"../messages.js\";\nimport type { ReadonlySessionManager, SessionEntry } from \"../session-manager.js\";\nimport { estimateTokens } from \"./compaction.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BranchSummaryResult {\n\tsummary?: string;\n\treadFiles?: string[];\n\tmodifiedFiles?: string[];\n\taborted?: boolean;\n\terror?: string;\n}\n\n/** Details stored in BranchSummaryEntry.details for file tracking */\nexport interface BranchSummaryDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\nexport type { FileOperations } from \"./utils.js\";\n\nexport interface BranchPreparation {\n\t/** Messages extracted for summarization, in chronological order */\n\tmessages: AgentMessage[];\n\t/** File operations extracted from tool calls */\n\tfileOps: FileOperations;\n\t/** Total estimated tokens in messages */\n\ttotalTokens: number;\n}\n\nexport interface CollectEntriesResult {\n\t/** Entries to summarize, in chronological order */\n\tentries: SessionEntry[];\n\t/** Common ancestor between old and new position, if any */\n\tcommonAncestorId: string | null;\n}\n\nexport interface GenerateBranchSummaryOptions {\n\t/** Model to use for summarization */\n\tmodel: Model<any>;\n\t/** API key for the model */\n\tapiKey: string;\n\t/** Abort signal for cancellation */\n\tsignal: AbortSignal;\n\t/** Optional custom instructions for summarization */\n\tcustomInstructions?: string;\n\t/** Tokens reserved for prompt + LLM response (default 16384) */\n\treserveTokens?: number;\n}\n\n// ============================================================================\n// Entry Collection\n// ============================================================================\n\n/**\n * Collect entries that should be summarized when navigating from one position to another.\n *\n * Walks from oldLeafId back to the common ancestor with targetId, collecting entries\n * along the way. Does NOT stop at compaction boundaries - those are included and their\n * summaries become context.\n *\n * @param session - Session manager (read-only access)\n * @param oldLeafId - Current position (where we're navigating from)\n * @param targetId - Target position (where we're navigating to)\n * @returns Entries to summarize and the common ancestor\n */\nexport function collectEntriesForBranchSummary(\n\tsession: ReadonlySessionManager,\n\toldLeafId: string | null,\n\ttargetId: string,\n): CollectEntriesResult {\n\t// If no old position, nothing to summarize\n\tif (!oldLeafId) {\n\t\treturn { entries: [], commonAncestorId: null };\n\t}\n\n\t// Find common ancestor (deepest node that's on both paths)\n\tconst oldPath = new Set(session.getBranch(oldLeafId).map((e) => e.id));\n\tconst targetPath = session.getBranch(targetId);\n\n\t// targetPath is root-first, so iterate backwards to find deepest common ancestor\n\tlet commonAncestorId: string | null = null;\n\tfor (let i = targetPath.length - 1; i >= 0; i--) {\n\t\tif (oldPath.has(targetPath[i].id)) {\n\t\t\tcommonAncestorId = targetPath[i].id;\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Collect entries from old leaf back to common ancestor\n\tconst entries: SessionEntry[] = [];\n\tlet current: string | null = oldLeafId;\n\n\twhile (current && current !== commonAncestorId) {\n\t\tconst entry = session.getEntry(current);\n\t\tif (!entry) break;\n\t\tentries.push(entry);\n\t\tcurrent = entry.parentId;\n\t}\n\n\t// Reverse to get chronological order\n\tentries.reverse();\n\n\treturn { entries, commonAncestorId };\n}\n\n// ============================================================================\n// Entry to Message Conversion\n// ============================================================================\n\n/**\n * Extract AgentMessage from a session entry.\n * Similar to getMessageFromEntry in compaction.ts but also handles compaction entries.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tswitch (entry.type) {\n\t\tcase \"message\":\n\t\t\t// Skip tool results - context is in assistant's tool call\n\t\t\tif (entry.message.role === \"toolResult\") return undefined;\n\t\t\treturn entry.message;\n\n\t\tcase \"custom_message\":\n\t\t\treturn createHookMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\n\t\tcase \"branch_summary\":\n\t\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\n\t\tcase \"compaction\":\n\t\t\treturn createCompactionSummaryMessage(entry.summary, entry.tokensBefore, entry.timestamp);\n\n\t\t// These don't contribute to conversation content\n\t\tcase \"thinking_level_change\":\n\t\tcase \"model_change\":\n\t\tcase \"custom\":\n\t\tcase \"label\":\n\t\t\treturn undefined;\n\t}\n}\n\n/**\n * Prepare entries for summarization with token budget.\n *\n * Walks entries from NEWEST to OLDEST, adding messages until we hit the token budget.\n * This ensures we keep the most recent context when the branch is too long.\n *\n * Also collects file operations from:\n * - Tool calls in assistant messages\n * - Existing branch_summary entries' details (for cumulative tracking)\n *\n * @param entries - Entries in chronological order\n * @param tokenBudget - Maximum tokens to include (0 = no limit)\n */\nexport function prepareBranchEntries(entries: SessionEntry[], tokenBudget: number = 0): BranchPreparation {\n\tconst messages: AgentMessage[] = [];\n\tconst fileOps = createFileOps();\n\tlet totalTokens = 0;\n\n\t// First pass: collect file ops from ALL entries (even if they don't fit in token budget)\n\t// This ensures we capture cumulative file tracking from nested branch summaries\n\t// Only extract from pi-generated summaries (fromHook !== true), not hook-generated ones\n\tfor (const entry of entries) {\n\t\tif (entry.type === \"branch_summary\" && !entry.fromHook && entry.details) {\n\t\t\tconst details = entry.details as BranchSummaryDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\t// Modified files go into both edited and written for proper deduplication\n\t\t\t\tfor (const f of details.modifiedFiles) {\n\t\t\t\t\tfileOps.edited.add(f);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Second pass: walk from newest to oldest, adding messages until token budget\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tconst message = getMessageFromEntry(entry);\n\t\tif (!message) continue;\n\n\t\t// Extract file ops from assistant messages (tool calls)\n\t\textractFileOpsFromMessage(message, fileOps);\n\n\t\tconst tokens = estimateTokens(message);\n\n\t\t// Check budget before adding\n\t\tif (tokenBudget > 0 && totalTokens + tokens > tokenBudget) {\n\t\t\t// If this is a summary entry, try to fit it anyway as it's important context\n\t\t\tif (entry.type === \"compaction\" || entry.type === \"branch_summary\") {\n\t\t\t\tif (totalTokens < tokenBudget * 0.9) {\n\t\t\t\t\tmessages.unshift(message);\n\t\t\t\t\ttotalTokens += tokens;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Stop - we've hit the budget\n\t\t\tbreak;\n\t\t}\n\n\t\tmessages.unshift(message);\n\t\ttotalTokens += tokens;\n\t}\n\n\treturn { messages, fileOps, totalTokens };\n}\n\n// ============================================================================\n// Summary Generation\n// ============================================================================\n\nconst BRANCH_SUMMARY_PREAMBLE = `The user explored a different conversation branch before returning here.\nSummary of that exploration:\n\n`;\n\nconst BRANCH_SUMMARY_PROMPT = `Create a structured summary of this conversation branch for context when returning later.\n\nUse this EXACT format:\n\n## Goal\n[What was the user trying to accomplish in this branch?]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Work that was started but not finished]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [What should happen next to continue this work]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of abandoned branch entries.\n *\n * @param entries - Session entries to summarize (chronological order)\n * @param options - Generation options\n */\nexport async function generateBranchSummary(\n\tentries: SessionEntry[],\n\toptions: GenerateBranchSummaryOptions,\n): Promise<BranchSummaryResult> {\n\tconst { model, apiKey, signal, customInstructions, reserveTokens = 16384 } = options;\n\n\t// Token budget = context window minus reserved space for prompt + response\n\tconst contextWindow = model.contextWindow || 128000;\n\tconst tokenBudget = contextWindow - reserveTokens;\n\n\tconst { messages, fileOps } = prepareBranchEntries(entries, tokenBudget);\n\n\tif (messages.length === 0) {\n\t\treturn { summary: \"No content to summarize\" };\n\t}\n\n\t// Transform to LLM-compatible messages, then serialize to text\n\t// Serialization prevents the model from treating it as a conversation to continue\n\tconst llmMessages = convertToLlm(messages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build prompt\n\tconst instructions = customInstructions || BRANCH_SUMMARY_PROMPT;\n\tconst promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n${instructions}`;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\t// Call LLM for summarization\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ apiKey, signal, maxTokens: 2048 },\n\t);\n\n\t// Check if aborted or errored\n\tif (response.stopReason === \"aborted\") {\n\t\treturn { aborted: true };\n\t}\n\tif (response.stopReason === \"error\") {\n\t\treturn { error: response.errorMessage || \"Summarization failed\" };\n\t}\n\n\tlet summary = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\t// Prepend preamble to provide context about the branch summary\n\tsummary = BRANCH_SUMMARY_PREAMBLE + summary;\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\treturn {\n\t\tsummary: summary || \"No summary generated\",\n\t\treadFiles,\n\t\tmodifiedFiles,\n\t};\n}\n"]}
@@ -0,0 +1,233 @@
1
+ /**
2
+ * Branch summarization for tree navigation.
3
+ *
4
+ * When navigating to a different point in the session tree, this generates
5
+ * a summary of the branch being left so context isn't lost.
6
+ */
7
+ import { completeSimple } from "@mariozechner/pi-ai";
8
+ import { convertToLlm, createBranchSummaryMessage, createCompactionSummaryMessage, createHookMessage, } from "../messages.js";
9
+ import { estimateTokens } from "./compaction.js";
10
+ import { computeFileLists, createFileOps, extractFileOpsFromMessage, formatFileOperations, SUMMARIZATION_SYSTEM_PROMPT, serializeConversation, } from "./utils.js";
11
+ // ============================================================================
12
+ // Entry Collection
13
+ // ============================================================================
14
+ /**
15
+ * Collect entries that should be summarized when navigating from one position to another.
16
+ *
17
+ * Walks from oldLeafId back to the common ancestor with targetId, collecting entries
18
+ * along the way. Does NOT stop at compaction boundaries - those are included and their
19
+ * summaries become context.
20
+ *
21
+ * @param session - Session manager (read-only access)
22
+ * @param oldLeafId - Current position (where we're navigating from)
23
+ * @param targetId - Target position (where we're navigating to)
24
+ * @returns Entries to summarize and the common ancestor
25
+ */
26
+ export function collectEntriesForBranchSummary(session, oldLeafId, targetId) {
27
+ // If no old position, nothing to summarize
28
+ if (!oldLeafId) {
29
+ return { entries: [], commonAncestorId: null };
30
+ }
31
+ // Find common ancestor (deepest node that's on both paths)
32
+ const oldPath = new Set(session.getBranch(oldLeafId).map((e) => e.id));
33
+ const targetPath = session.getBranch(targetId);
34
+ // targetPath is root-first, so iterate backwards to find deepest common ancestor
35
+ let commonAncestorId = null;
36
+ for (let i = targetPath.length - 1; i >= 0; i--) {
37
+ if (oldPath.has(targetPath[i].id)) {
38
+ commonAncestorId = targetPath[i].id;
39
+ break;
40
+ }
41
+ }
42
+ // Collect entries from old leaf back to common ancestor
43
+ const entries = [];
44
+ let current = oldLeafId;
45
+ while (current && current !== commonAncestorId) {
46
+ const entry = session.getEntry(current);
47
+ if (!entry)
48
+ break;
49
+ entries.push(entry);
50
+ current = entry.parentId;
51
+ }
52
+ // Reverse to get chronological order
53
+ entries.reverse();
54
+ return { entries, commonAncestorId };
55
+ }
56
+ // ============================================================================
57
+ // Entry to Message Conversion
58
+ // ============================================================================
59
+ /**
60
+ * Extract AgentMessage from a session entry.
61
+ * Similar to getMessageFromEntry in compaction.ts but also handles compaction entries.
62
+ */
63
+ function getMessageFromEntry(entry) {
64
+ switch (entry.type) {
65
+ case "message":
66
+ // Skip tool results - context is in assistant's tool call
67
+ if (entry.message.role === "toolResult")
68
+ return undefined;
69
+ return entry.message;
70
+ case "custom_message":
71
+ return createHookMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);
72
+ case "branch_summary":
73
+ return createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);
74
+ case "compaction":
75
+ return createCompactionSummaryMessage(entry.summary, entry.tokensBefore, entry.timestamp);
76
+ // These don't contribute to conversation content
77
+ case "thinking_level_change":
78
+ case "model_change":
79
+ case "custom":
80
+ case "label":
81
+ return undefined;
82
+ }
83
+ }
84
+ /**
85
+ * Prepare entries for summarization with token budget.
86
+ *
87
+ * Walks entries from NEWEST to OLDEST, adding messages until we hit the token budget.
88
+ * This ensures we keep the most recent context when the branch is too long.
89
+ *
90
+ * Also collects file operations from:
91
+ * - Tool calls in assistant messages
92
+ * - Existing branch_summary entries' details (for cumulative tracking)
93
+ *
94
+ * @param entries - Entries in chronological order
95
+ * @param tokenBudget - Maximum tokens to include (0 = no limit)
96
+ */
97
+ export function prepareBranchEntries(entries, tokenBudget = 0) {
98
+ const messages = [];
99
+ const fileOps = createFileOps();
100
+ let totalTokens = 0;
101
+ // First pass: collect file ops from ALL entries (even if they don't fit in token budget)
102
+ // This ensures we capture cumulative file tracking from nested branch summaries
103
+ // Only extract from pi-generated summaries (fromHook !== true), not hook-generated ones
104
+ for (const entry of entries) {
105
+ if (entry.type === "branch_summary" && !entry.fromHook && entry.details) {
106
+ const details = entry.details;
107
+ if (Array.isArray(details.readFiles)) {
108
+ for (const f of details.readFiles)
109
+ fileOps.read.add(f);
110
+ }
111
+ if (Array.isArray(details.modifiedFiles)) {
112
+ // Modified files go into both edited and written for proper deduplication
113
+ for (const f of details.modifiedFiles) {
114
+ fileOps.edited.add(f);
115
+ }
116
+ }
117
+ }
118
+ }
119
+ // Second pass: walk from newest to oldest, adding messages until token budget
120
+ for (let i = entries.length - 1; i >= 0; i--) {
121
+ const entry = entries[i];
122
+ const message = getMessageFromEntry(entry);
123
+ if (!message)
124
+ continue;
125
+ // Extract file ops from assistant messages (tool calls)
126
+ extractFileOpsFromMessage(message, fileOps);
127
+ const tokens = estimateTokens(message);
128
+ // Check budget before adding
129
+ if (tokenBudget > 0 && totalTokens + tokens > tokenBudget) {
130
+ // If this is a summary entry, try to fit it anyway as it's important context
131
+ if (entry.type === "compaction" || entry.type === "branch_summary") {
132
+ if (totalTokens < tokenBudget * 0.9) {
133
+ messages.unshift(message);
134
+ totalTokens += tokens;
135
+ }
136
+ }
137
+ // Stop - we've hit the budget
138
+ break;
139
+ }
140
+ messages.unshift(message);
141
+ totalTokens += tokens;
142
+ }
143
+ return { messages, fileOps, totalTokens };
144
+ }
145
+ // ============================================================================
146
+ // Summary Generation
147
+ // ============================================================================
148
+ const BRANCH_SUMMARY_PREAMBLE = `The user explored a different conversation branch before returning here.
149
+ Summary of that exploration:
150
+
151
+ `;
152
+ const BRANCH_SUMMARY_PROMPT = `Create a structured summary of this conversation branch for context when returning later.
153
+
154
+ Use this EXACT format:
155
+
156
+ ## Goal
157
+ [What was the user trying to accomplish in this branch?]
158
+
159
+ ## Constraints & Preferences
160
+ - [Any constraints, preferences, or requirements mentioned]
161
+ - [Or "(none)" if none were mentioned]
162
+
163
+ ## Progress
164
+ ### Done
165
+ - [x] [Completed tasks/changes]
166
+
167
+ ### In Progress
168
+ - [ ] [Work that was started but not finished]
169
+
170
+ ### Blocked
171
+ - [Issues preventing progress, if any]
172
+
173
+ ## Key Decisions
174
+ - **[Decision]**: [Brief rationale]
175
+
176
+ ## Next Steps
177
+ 1. [What should happen next to continue this work]
178
+
179
+ Keep each section concise. Preserve exact file paths, function names, and error messages.`;
180
+ /**
181
+ * Generate a summary of abandoned branch entries.
182
+ *
183
+ * @param entries - Session entries to summarize (chronological order)
184
+ * @param options - Generation options
185
+ */
186
+ export async function generateBranchSummary(entries, options) {
187
+ const { model, apiKey, signal, customInstructions, reserveTokens = 16384 } = options;
188
+ // Token budget = context window minus reserved space for prompt + response
189
+ const contextWindow = model.contextWindow || 128000;
190
+ const tokenBudget = contextWindow - reserveTokens;
191
+ const { messages, fileOps } = prepareBranchEntries(entries, tokenBudget);
192
+ if (messages.length === 0) {
193
+ return { summary: "No content to summarize" };
194
+ }
195
+ // Transform to LLM-compatible messages, then serialize to text
196
+ // Serialization prevents the model from treating it as a conversation to continue
197
+ const llmMessages = convertToLlm(messages);
198
+ const conversationText = serializeConversation(llmMessages);
199
+ // Build prompt
200
+ const instructions = customInstructions || BRANCH_SUMMARY_PROMPT;
201
+ const promptText = `<conversation>\n${conversationText}\n</conversation>\n\n${instructions}`;
202
+ const summarizationMessages = [
203
+ {
204
+ role: "user",
205
+ content: [{ type: "text", text: promptText }],
206
+ timestamp: Date.now(),
207
+ },
208
+ ];
209
+ // Call LLM for summarization
210
+ const response = await completeSimple(model, { systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages }, { apiKey, signal, maxTokens: 2048 });
211
+ // Check if aborted or errored
212
+ if (response.stopReason === "aborted") {
213
+ return { aborted: true };
214
+ }
215
+ if (response.stopReason === "error") {
216
+ return { error: response.errorMessage || "Summarization failed" };
217
+ }
218
+ let summary = response.content
219
+ .filter((c) => c.type === "text")
220
+ .map((c) => c.text)
221
+ .join("\n");
222
+ // Prepend preamble to provide context about the branch summary
223
+ summary = BRANCH_SUMMARY_PREAMBLE + summary;
224
+ // Compute file lists and append to summary
225
+ const { readFiles, modifiedFiles } = computeFileLists(fileOps);
226
+ summary += formatFileOperations(readFiles, modifiedFiles);
227
+ return {
228
+ summary: summary || "No summary generated",
229
+ readFiles,
230
+ modifiedFiles,
231
+ };
232
+ }
233
+ //# sourceMappingURL=branch-summarization.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"branch-summarization.js","sourceRoot":"","sources":["../../../src/core/compaction/branch-summarization.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAIH,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EACN,YAAY,EACZ,0BAA0B,EAC1B,8BAA8B,EAC9B,iBAAiB,GACjB,MAAM,gBAAgB,CAAC;AAExB,OAAO,EAAE,cAAc,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,EACN,gBAAgB,EAChB,aAAa,EACb,yBAAyB,EAEzB,oBAAoB,EACpB,2BAA2B,EAC3B,qBAAqB,GACrB,MAAM,YAAY,CAAC;AAmDpB,+EAA+E;AAC/E,mBAAmB;AACnB,+EAA+E;AAE/E;;;;;;;;;;;GAWG;AACH,MAAM,UAAU,8BAA8B,CAC7C,OAA+B,EAC/B,SAAwB,EACxB,QAAgB,EACO;IACvB,2CAA2C;IAC3C,IAAI,CAAC,SAAS,EAAE,CAAC;QAChB,OAAO,EAAE,OAAO,EAAE,EAAE,EAAE,gBAAgB,EAAE,IAAI,EAAE,CAAC;IAChD,CAAC;IAED,2DAA2D;IAC3D,MAAM,OAAO,GAAG,IAAI,GAAG,CAAC,OAAO,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACvE,MAAM,UAAU,GAAG,OAAO,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAE/C,iFAAiF;IACjF,IAAI,gBAAgB,GAAkB,IAAI,CAAC;IAC3C,KAAK,IAAI,CAAC,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QACjD,IAAI,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC;YACnC,gBAAgB,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YACpC,MAAM;QACP,CAAC;IACF,CAAC;IAED,wDAAwD;IACxD,MAAM,OAAO,GAAmB,EAAE,CAAC;IACnC,IAAI,OAAO,GAAkB,SAAS,CAAC;IAEvC,OAAO,OAAO,IAAI,OAAO,KAAK,gBAAgB,EAAE,CAAC;QAChD,MAAM,KAAK,GAAG,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;QACxC,IAAI,CAAC,KAAK;YAAE,MAAM;QAClB,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACpB,OAAO,GAAG,KAAK,CAAC,QAAQ,CAAC;IAC1B,CAAC;IAED,qCAAqC;IACrC,OAAO,CAAC,OAAO,EAAE,CAAC;IAElB,OAAO,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC;AAAA,CACrC;AAED,+EAA+E;AAC/E,8BAA8B;AAC9B,+EAA+E;AAE/E;;;GAGG;AACH,SAAS,mBAAmB,CAAC,KAAmB,EAA4B;IAC3E,QAAQ,KAAK,CAAC,IAAI,EAAE,CAAC;QACpB,KAAK,SAAS;YACb,0DAA0D;YAC1D,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,KAAK,YAAY;gBAAE,OAAO,SAAS,CAAC;YAC1D,OAAO,KAAK,CAAC,OAAO,CAAC;QAEtB,KAAK,gBAAgB;YACpB,OAAO,iBAAiB,CAAC,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;QAE1G,KAAK,gBAAgB;YACpB,OAAO,0BAA0B,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjF,KAAK,YAAY;YAChB,OAAO,8BAA8B,CAAC,KAAK,CAAC,OAAO,EAAE,KAAK,CAAC,YAAY,EAAE,KAAK,CAAC,SAAS,CAAC,CAAC;QAE3F,iDAAiD;QACjD,KAAK,uBAAuB,CAAC;QAC7B,KAAK,cAAc,CAAC;QACpB,KAAK,QAAQ,CAAC;QACd,KAAK,OAAO;YACX,OAAO,SAAS,CAAC;IACnB,CAAC;AAAA,CACD;AAED;;;;;;;;;;;;GAYG;AACH,MAAM,UAAU,oBAAoB,CAAC,OAAuB,EAAE,WAAW,GAAW,CAAC,EAAqB;IACzG,MAAM,QAAQ,GAAmB,EAAE,CAAC;IACpC,MAAM,OAAO,GAAG,aAAa,EAAE,CAAC;IAChC,IAAI,WAAW,GAAG,CAAC,CAAC;IAEpB,yFAAyF;IACzF,gFAAgF;IAChF,wFAAwF;IACxF,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;QAC7B,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,IAAI,CAAC,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;YACzE,MAAM,OAAO,GAAG,KAAK,CAAC,OAA+B,CAAC;YACtD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE,CAAC;gBACtC,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,SAAS;oBAAE,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;YACxD,CAAC;YACD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE,CAAC;gBAC1C,0EAA0E;gBAC1E,KAAK,MAAM,CAAC,IAAI,OAAO,CAAC,aAAa,EAAE,CAAC;oBACvC,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;gBACvB,CAAC;YACF,CAAC;QACF,CAAC;IACF,CAAC;IAED,8EAA8E;IAC9E,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;QAC9C,MAAM,KAAK,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;QAC3C,IAAI,CAAC,OAAO;YAAE,SAAS;QAEvB,wDAAwD;QACxD,yBAAyB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAE5C,MAAM,MAAM,GAAG,cAAc,CAAC,OAAO,CAAC,CAAC;QAEvC,6BAA6B;QAC7B,IAAI,WAAW,GAAG,CAAC,IAAI,WAAW,GAAG,MAAM,GAAG,WAAW,EAAE,CAAC;YAC3D,6EAA6E;YAC7E,IAAI,KAAK,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,IAAI,KAAK,gBAAgB,EAAE,CAAC;gBACpE,IAAI,WAAW,GAAG,WAAW,GAAG,GAAG,EAAE,CAAC;oBACrC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;oBAC1B,WAAW,IAAI,MAAM,CAAC;gBACvB,CAAC;YACF,CAAC;YACD,8BAA8B;YAC9B,MAAM;QACP,CAAC;QAED,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;QAC1B,WAAW,IAAI,MAAM,CAAC;IACvB,CAAC;IAED,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC;AAAA,CAC1C;AAED,+EAA+E;AAC/E,qBAAqB;AACrB,+EAA+E;AAE/E,MAAM,uBAAuB,GAAG;;;CAG/B,CAAC;AAEF,MAAM,qBAAqB,GAAG;;;;;;;;;;;;;;;;;;;;;;;;;;;0FA2B4D,CAAC;AAE3F;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CAC1C,OAAuB,EACvB,OAAqC,EACN;IAC/B,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,kBAAkB,EAAE,aAAa,GAAG,KAAK,EAAE,GAAG,OAAO,CAAC;IAErF,2EAA2E;IAC3E,MAAM,aAAa,GAAG,KAAK,CAAC,aAAa,IAAI,MAAM,CAAC;IACpD,MAAM,WAAW,GAAG,aAAa,GAAG,aAAa,CAAC;IAElD,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,oBAAoB,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;IAEzE,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QAC3B,OAAO,EAAE,OAAO,EAAE,yBAAyB,EAAE,CAAC;IAC/C,CAAC;IAED,+DAA+D;IAC/D,kFAAkF;IAClF,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;IAC3C,MAAM,gBAAgB,GAAG,qBAAqB,CAAC,WAAW,CAAC,CAAC;IAE5D,eAAe;IACf,MAAM,YAAY,GAAG,kBAAkB,IAAI,qBAAqB,CAAC;IACjE,MAAM,UAAU,GAAG,mBAAmB,gBAAgB,wBAAwB,YAAY,EAAE,CAAC;IAE7F,MAAM,qBAAqB,GAAG;QAC7B;YACC,IAAI,EAAE,MAAe;YACrB,OAAO,EAAE,CAAC,EAAE,IAAI,EAAE,MAAe,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC;YACtD,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE;SACrB;KACD,CAAC;IAEF,6BAA6B;IAC7B,MAAM,QAAQ,GAAG,MAAM,cAAc,CACpC,KAAK,EACL,EAAE,YAAY,EAAE,2BAA2B,EAAE,QAAQ,EAAE,qBAAqB,EAAE,EAC9E,EAAE,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,CACnC,CAAC;IAEF,8BAA8B;IAC9B,IAAI,QAAQ,CAAC,UAAU,KAAK,SAAS,EAAE,CAAC;QACvC,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;IAC1B,CAAC;IACD,IAAI,QAAQ,CAAC,UAAU,KAAK,OAAO,EAAE,CAAC;QACrC,OAAO,EAAE,KAAK,EAAE,QAAQ,CAAC,YAAY,IAAI,sBAAsB,EAAE,CAAC;IACnE,CAAC;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,OAAO;SAC5B,MAAM,CAAC,CAAC,CAAC,EAAuC,EAAE,CAAC,CAAC,CAAC,IAAI,KAAK,MAAM,CAAC;SACrE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC;SAClB,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,+DAA+D;IAC/D,OAAO,GAAG,uBAAuB,GAAG,OAAO,CAAC;IAE5C,2CAA2C;IAC3C,MAAM,EAAE,SAAS,EAAE,aAAa,EAAE,GAAG,gBAAgB,CAAC,OAAO,CAAC,CAAC;IAC/D,OAAO,IAAI,oBAAoB,CAAC,SAAS,EAAE,aAAa,CAAC,CAAC;IAE1D,OAAO;QACN,OAAO,EAAE,OAAO,IAAI,sBAAsB;QAC1C,SAAS;QACT,aAAa;KACb,CAAC;AAAA,CACF","sourcesContent":["/**\n * Branch summarization for tree navigation.\n *\n * When navigating to a different point in the session tree, this generates\n * a summary of the branch being left so context isn't lost.\n */\n\nimport type { AgentMessage } from \"@mariozechner/pi-agent-core\";\nimport type { Model } from \"@mariozechner/pi-ai\";\nimport { completeSimple } from \"@mariozechner/pi-ai\";\nimport {\n\tconvertToLlm,\n\tcreateBranchSummaryMessage,\n\tcreateCompactionSummaryMessage,\n\tcreateHookMessage,\n} from \"../messages.js\";\nimport type { ReadonlySessionManager, SessionEntry } from \"../session-manager.js\";\nimport { estimateTokens } from \"./compaction.js\";\nimport {\n\tcomputeFileLists,\n\tcreateFileOps,\n\textractFileOpsFromMessage,\n\ttype FileOperations,\n\tformatFileOperations,\n\tSUMMARIZATION_SYSTEM_PROMPT,\n\tserializeConversation,\n} from \"./utils.js\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface BranchSummaryResult {\n\tsummary?: string;\n\treadFiles?: string[];\n\tmodifiedFiles?: string[];\n\taborted?: boolean;\n\terror?: string;\n}\n\n/** Details stored in BranchSummaryEntry.details for file tracking */\nexport interface BranchSummaryDetails {\n\treadFiles: string[];\n\tmodifiedFiles: string[];\n}\n\nexport type { FileOperations } from \"./utils.js\";\n\nexport interface BranchPreparation {\n\t/** Messages extracted for summarization, in chronological order */\n\tmessages: AgentMessage[];\n\t/** File operations extracted from tool calls */\n\tfileOps: FileOperations;\n\t/** Total estimated tokens in messages */\n\ttotalTokens: number;\n}\n\nexport interface CollectEntriesResult {\n\t/** Entries to summarize, in chronological order */\n\tentries: SessionEntry[];\n\t/** Common ancestor between old and new position, if any */\n\tcommonAncestorId: string | null;\n}\n\nexport interface GenerateBranchSummaryOptions {\n\t/** Model to use for summarization */\n\tmodel: Model<any>;\n\t/** API key for the model */\n\tapiKey: string;\n\t/** Abort signal for cancellation */\n\tsignal: AbortSignal;\n\t/** Optional custom instructions for summarization */\n\tcustomInstructions?: string;\n\t/** Tokens reserved for prompt + LLM response (default 16384) */\n\treserveTokens?: number;\n}\n\n// ============================================================================\n// Entry Collection\n// ============================================================================\n\n/**\n * Collect entries that should be summarized when navigating from one position to another.\n *\n * Walks from oldLeafId back to the common ancestor with targetId, collecting entries\n * along the way. Does NOT stop at compaction boundaries - those are included and their\n * summaries become context.\n *\n * @param session - Session manager (read-only access)\n * @param oldLeafId - Current position (where we're navigating from)\n * @param targetId - Target position (where we're navigating to)\n * @returns Entries to summarize and the common ancestor\n */\nexport function collectEntriesForBranchSummary(\n\tsession: ReadonlySessionManager,\n\toldLeafId: string | null,\n\ttargetId: string,\n): CollectEntriesResult {\n\t// If no old position, nothing to summarize\n\tif (!oldLeafId) {\n\t\treturn { entries: [], commonAncestorId: null };\n\t}\n\n\t// Find common ancestor (deepest node that's on both paths)\n\tconst oldPath = new Set(session.getBranch(oldLeafId).map((e) => e.id));\n\tconst targetPath = session.getBranch(targetId);\n\n\t// targetPath is root-first, so iterate backwards to find deepest common ancestor\n\tlet commonAncestorId: string | null = null;\n\tfor (let i = targetPath.length - 1; i >= 0; i--) {\n\t\tif (oldPath.has(targetPath[i].id)) {\n\t\t\tcommonAncestorId = targetPath[i].id;\n\t\t\tbreak;\n\t\t}\n\t}\n\n\t// Collect entries from old leaf back to common ancestor\n\tconst entries: SessionEntry[] = [];\n\tlet current: string | null = oldLeafId;\n\n\twhile (current && current !== commonAncestorId) {\n\t\tconst entry = session.getEntry(current);\n\t\tif (!entry) break;\n\t\tentries.push(entry);\n\t\tcurrent = entry.parentId;\n\t}\n\n\t// Reverse to get chronological order\n\tentries.reverse();\n\n\treturn { entries, commonAncestorId };\n}\n\n// ============================================================================\n// Entry to Message Conversion\n// ============================================================================\n\n/**\n * Extract AgentMessage from a session entry.\n * Similar to getMessageFromEntry in compaction.ts but also handles compaction entries.\n */\nfunction getMessageFromEntry(entry: SessionEntry): AgentMessage | undefined {\n\tswitch (entry.type) {\n\t\tcase \"message\":\n\t\t\t// Skip tool results - context is in assistant's tool call\n\t\t\tif (entry.message.role === \"toolResult\") return undefined;\n\t\t\treturn entry.message;\n\n\t\tcase \"custom_message\":\n\t\t\treturn createHookMessage(entry.customType, entry.content, entry.display, entry.details, entry.timestamp);\n\n\t\tcase \"branch_summary\":\n\t\t\treturn createBranchSummaryMessage(entry.summary, entry.fromId, entry.timestamp);\n\n\t\tcase \"compaction\":\n\t\t\treturn createCompactionSummaryMessage(entry.summary, entry.tokensBefore, entry.timestamp);\n\n\t\t// These don't contribute to conversation content\n\t\tcase \"thinking_level_change\":\n\t\tcase \"model_change\":\n\t\tcase \"custom\":\n\t\tcase \"label\":\n\t\t\treturn undefined;\n\t}\n}\n\n/**\n * Prepare entries for summarization with token budget.\n *\n * Walks entries from NEWEST to OLDEST, adding messages until we hit the token budget.\n * This ensures we keep the most recent context when the branch is too long.\n *\n * Also collects file operations from:\n * - Tool calls in assistant messages\n * - Existing branch_summary entries' details (for cumulative tracking)\n *\n * @param entries - Entries in chronological order\n * @param tokenBudget - Maximum tokens to include (0 = no limit)\n */\nexport function prepareBranchEntries(entries: SessionEntry[], tokenBudget: number = 0): BranchPreparation {\n\tconst messages: AgentMessage[] = [];\n\tconst fileOps = createFileOps();\n\tlet totalTokens = 0;\n\n\t// First pass: collect file ops from ALL entries (even if they don't fit in token budget)\n\t// This ensures we capture cumulative file tracking from nested branch summaries\n\t// Only extract from pi-generated summaries (fromHook !== true), not hook-generated ones\n\tfor (const entry of entries) {\n\t\tif (entry.type === \"branch_summary\" && !entry.fromHook && entry.details) {\n\t\t\tconst details = entry.details as BranchSummaryDetails;\n\t\t\tif (Array.isArray(details.readFiles)) {\n\t\t\t\tfor (const f of details.readFiles) fileOps.read.add(f);\n\t\t\t}\n\t\t\tif (Array.isArray(details.modifiedFiles)) {\n\t\t\t\t// Modified files go into both edited and written for proper deduplication\n\t\t\t\tfor (const f of details.modifiedFiles) {\n\t\t\t\t\tfileOps.edited.add(f);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\t// Second pass: walk from newest to oldest, adding messages until token budget\n\tfor (let i = entries.length - 1; i >= 0; i--) {\n\t\tconst entry = entries[i];\n\t\tconst message = getMessageFromEntry(entry);\n\t\tif (!message) continue;\n\n\t\t// Extract file ops from assistant messages (tool calls)\n\t\textractFileOpsFromMessage(message, fileOps);\n\n\t\tconst tokens = estimateTokens(message);\n\n\t\t// Check budget before adding\n\t\tif (tokenBudget > 0 && totalTokens + tokens > tokenBudget) {\n\t\t\t// If this is a summary entry, try to fit it anyway as it's important context\n\t\t\tif (entry.type === \"compaction\" || entry.type === \"branch_summary\") {\n\t\t\t\tif (totalTokens < tokenBudget * 0.9) {\n\t\t\t\t\tmessages.unshift(message);\n\t\t\t\t\ttotalTokens += tokens;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Stop - we've hit the budget\n\t\t\tbreak;\n\t\t}\n\n\t\tmessages.unshift(message);\n\t\ttotalTokens += tokens;\n\t}\n\n\treturn { messages, fileOps, totalTokens };\n}\n\n// ============================================================================\n// Summary Generation\n// ============================================================================\n\nconst BRANCH_SUMMARY_PREAMBLE = `The user explored a different conversation branch before returning here.\nSummary of that exploration:\n\n`;\n\nconst BRANCH_SUMMARY_PROMPT = `Create a structured summary of this conversation branch for context when returning later.\n\nUse this EXACT format:\n\n## Goal\n[What was the user trying to accomplish in this branch?]\n\n## Constraints & Preferences\n- [Any constraints, preferences, or requirements mentioned]\n- [Or \"(none)\" if none were mentioned]\n\n## Progress\n### Done\n- [x] [Completed tasks/changes]\n\n### In Progress\n- [ ] [Work that was started but not finished]\n\n### Blocked\n- [Issues preventing progress, if any]\n\n## Key Decisions\n- **[Decision]**: [Brief rationale]\n\n## Next Steps\n1. [What should happen next to continue this work]\n\nKeep each section concise. Preserve exact file paths, function names, and error messages.`;\n\n/**\n * Generate a summary of abandoned branch entries.\n *\n * @param entries - Session entries to summarize (chronological order)\n * @param options - Generation options\n */\nexport async function generateBranchSummary(\n\tentries: SessionEntry[],\n\toptions: GenerateBranchSummaryOptions,\n): Promise<BranchSummaryResult> {\n\tconst { model, apiKey, signal, customInstructions, reserveTokens = 16384 } = options;\n\n\t// Token budget = context window minus reserved space for prompt + response\n\tconst contextWindow = model.contextWindow || 128000;\n\tconst tokenBudget = contextWindow - reserveTokens;\n\n\tconst { messages, fileOps } = prepareBranchEntries(entries, tokenBudget);\n\n\tif (messages.length === 0) {\n\t\treturn { summary: \"No content to summarize\" };\n\t}\n\n\t// Transform to LLM-compatible messages, then serialize to text\n\t// Serialization prevents the model from treating it as a conversation to continue\n\tconst llmMessages = convertToLlm(messages);\n\tconst conversationText = serializeConversation(llmMessages);\n\n\t// Build prompt\n\tconst instructions = customInstructions || BRANCH_SUMMARY_PROMPT;\n\tconst promptText = `<conversation>\\n${conversationText}\\n</conversation>\\n\\n${instructions}`;\n\n\tconst summarizationMessages = [\n\t\t{\n\t\t\trole: \"user\" as const,\n\t\t\tcontent: [{ type: \"text\" as const, text: promptText }],\n\t\t\ttimestamp: Date.now(),\n\t\t},\n\t];\n\n\t// Call LLM for summarization\n\tconst response = await completeSimple(\n\t\tmodel,\n\t\t{ systemPrompt: SUMMARIZATION_SYSTEM_PROMPT, messages: summarizationMessages },\n\t\t{ apiKey, signal, maxTokens: 2048 },\n\t);\n\n\t// Check if aborted or errored\n\tif (response.stopReason === \"aborted\") {\n\t\treturn { aborted: true };\n\t}\n\tif (response.stopReason === \"error\") {\n\t\treturn { error: response.errorMessage || \"Summarization failed\" };\n\t}\n\n\tlet summary = response.content\n\t\t.filter((c): c is { type: \"text\"; text: string } => c.type === \"text\")\n\t\t.map((c) => c.text)\n\t\t.join(\"\\n\");\n\n\t// Prepend preamble to provide context about the branch summary\n\tsummary = BRANCH_SUMMARY_PREAMBLE + summary;\n\n\t// Compute file lists and append to summary\n\tconst { readFiles, modifiedFiles } = computeFileLists(fileOps);\n\tsummary += formatFileOperations(readFiles, modifiedFiles);\n\n\treturn {\n\t\tsummary: summary || \"No summary generated\",\n\t\treadFiles,\n\t\tmodifiedFiles,\n\t};\n}\n"]}