panopticon-cli 0.6.4 → 0.6.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (311) hide show
  1. package/README.md +2 -2
  2. package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
  3. package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
  4. package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
  5. package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
  6. package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
  7. package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
  8. package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
  9. package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
  10. package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
  11. package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
  12. package/dist/clean-planning-x1S-JdmO.js +2 -0
  13. package/dist/cli/index.js +291 -760
  14. package/dist/cli/index.js.map +1 -1
  15. package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
  16. package/dist/close-issue-CaFE0stN.js.map +1 -0
  17. package/dist/close-issue-CjcfZI9s.js +2 -0
  18. package/dist/compact-beads-B0_qE1w3.js +2 -0
  19. package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
  20. package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
  21. package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
  22. package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
  23. package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
  24. package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
  25. package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
  26. package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
  27. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
  28. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
  29. package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
  30. package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
  31. package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
  32. package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
  33. package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
  34. package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
  35. package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
  36. package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
  37. package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
  38. package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
  39. package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
  40. package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
  41. package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
  42. package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
  43. package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
  44. package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
  45. package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
  46. package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
  47. package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
  48. package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
  49. package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
  50. package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
  51. package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
  52. package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
  53. package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
  54. package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
  55. package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
  56. package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
  57. package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
  58. package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
  59. package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
  60. package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
  61. package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
  62. package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
  63. package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
  64. package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
  65. package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
  66. package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
  67. package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
  68. package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
  69. package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
  70. package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
  71. package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
  72. package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
  73. package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
  74. package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
  75. package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
  76. package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
  77. package/dist/dashboard/hume-CA2pftu_.js +3 -0
  78. package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
  79. package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
  80. package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
  81. package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
  82. package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
  83. package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
  84. package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
  85. package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
  86. package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
  87. package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
  88. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
  89. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
  90. package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
  91. package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
  92. package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
  93. package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
  94. package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
  95. package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
  96. package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
  97. package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
  98. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
  99. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
  100. package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
  101. package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
  102. package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
  103. package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
  104. package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
  105. package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
  106. package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
  107. package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
  108. package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
  109. package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
  110. package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
  111. package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
  112. package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-BaQPC-c6.js} +1 -1
  113. package/dist/dashboard/public/assets/index-ByLmYGhW.js +212 -0
  114. package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
  115. package/dist/dashboard/public/index.html +2 -2
  116. package/dist/dashboard/rally-6McpKKRa.js +3 -0
  117. package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
  118. package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
  119. package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
  120. package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
  121. package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
  122. package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
  123. package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
  124. package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
  125. package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
  126. package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
  127. package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
  128. package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
  129. package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
  130. package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
  131. package/dist/dashboard/server.js +565 -265
  132. package/dist/dashboard/server.js.map +1 -1
  133. package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
  134. package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
  135. package/dist/dashboard/settings-XWvDcj-D.js +2 -0
  136. package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
  137. package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
  138. package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
  139. package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
  140. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
  141. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
  142. package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
  143. package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
  144. package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
  145. package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
  146. package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
  147. package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
  148. package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
  149. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
  150. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
  151. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
  152. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
  153. package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
  154. package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
  155. package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
  156. package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
  157. package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
  158. package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
  159. package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
  160. package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
  161. package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
  162. package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
  163. package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
  164. package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
  165. package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
  166. package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
  167. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
  168. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
  169. package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
  170. package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
  171. package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
  172. package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
  173. package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
  174. package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
  175. package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
  176. package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
  177. package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
  178. package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
  179. package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
  180. package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
  181. package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
  182. package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
  183. package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
  184. package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
  185. package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
  186. package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
  187. package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
  188. package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
  189. package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
  190. package/dist/hume-9nv1VmMV.js +3 -0
  191. package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
  192. package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
  193. package/dist/index.d.ts +17 -2
  194. package/dist/index.d.ts.map +1 -1
  195. package/dist/index.js +8 -7
  196. package/dist/issue-id-CAcekoIw.js +62 -0
  197. package/dist/issue-id-CAcekoIw.js.map +1 -0
  198. package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
  199. package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
  200. package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
  201. package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
  202. package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
  203. package/dist/merge-agent-DlUiUanN.js.map +1 -0
  204. package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
  205. package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
  206. package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
  207. package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
  208. package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
  209. package/dist/projects-Bk-5QhFQ.js.map +1 -0
  210. package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
  211. package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
  212. package/dist/providers-DSU1vfQF.js.map +1 -0
  213. package/dist/rally-DdPvGa-w.js +3 -0
  214. package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
  215. package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
  216. package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
  217. package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
  218. package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
  219. package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
  220. package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
  221. package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
  222. package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
  223. package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
  224. package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
  225. package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
  226. package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
  227. package/dist/settings-BcWPTrua.js.map +1 -0
  228. package/dist/shadow-state-BZzxfEGw.js +2 -0
  229. package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
  230. package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
  231. package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
  232. package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
  233. package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
  234. package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
  235. package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
  236. package/dist/specialists-D7Kj5o6s.js.map +1 -0
  237. package/dist/sync-DMfgd389.js +693 -0
  238. package/dist/sync-DMfgd389.js.map +1 -0
  239. package/dist/sync-TL6y-8K6.js +2 -0
  240. package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
  241. package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
  242. package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
  243. package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
  244. package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
  245. package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
  246. package/dist/tracker-BhYYvU3p.js +198 -0
  247. package/dist/tracker-BhYYvU3p.js.map +1 -0
  248. package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
  249. package/dist/tracker-utils-ChQyut8w.js.map +1 -0
  250. package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
  251. package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
  252. package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
  253. package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
  254. package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
  255. package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
  256. package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
  257. package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
  258. package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
  259. package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
  260. package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
  261. package/dist/workspace-config-fUafvYMp.js.map +1 -0
  262. package/dist/workspace-manager-B9jS4Dsq.js +3 -0
  263. package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
  264. package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
  265. package/package.json +2 -1
  266. package/scripts/post-merge-deploy.sh +25 -5
  267. package/scripts/record-cost-event.js +57 -7
  268. package/scripts/record-cost-event.js.map +1 -1
  269. package/skills/pan-help/SKILL.md +1 -1
  270. package/skills/pan-sync/SKILL.md +6 -6
  271. package/skills/workspace-add-repo/skill.md +46 -0
  272. package/templates/claude-md/sections/warnings.md +15 -2
  273. package/dist/clean-planning-sZXvy3Y5.js +0 -2
  274. package/dist/close-issue-Dml437qV.js +0 -2
  275. package/dist/close-issue-Dr7yZmrr.js.map +0 -1
  276. package/dist/compact-beads-iu218JcO.js +0 -2
  277. package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
  278. package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
  279. package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
  280. package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
  281. package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
  282. package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
  283. package/dist/dashboard/hume-MZndNDVU.js +0 -3
  284. package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
  285. package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
  286. package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
  287. package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
  288. package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
  289. package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
  290. package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
  291. package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
  292. package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
  293. package/dist/dashboard/settings-DMeGBRsk.js +0 -2
  294. package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
  295. package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
  296. package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
  297. package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
  298. package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
  299. package/dist/hume-BjmwmJ9E.js +0 -3
  300. package/dist/label-cleanup-31ElPqqv.js.map +0 -1
  301. package/dist/merge-agent-VQH9z9t8.js.map +0 -1
  302. package/dist/projects-CvLepaxC.js.map +0 -1
  303. package/dist/providers-DcCPZ5K4.js.map +0 -1
  304. package/dist/rally-DR9x8--6.js +0 -3
  305. package/dist/shadow-state-p3jpGRPJ.js +0 -2
  306. package/dist/specialists-DEKqgkxp.js.map +0 -1
  307. package/dist/tracker-C_62ukEq.js.map +0 -1
  308. package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
  309. package/dist/workspace-config-CNXOpKuj.js.map +0 -1
  310. package/dist/workspace-manager-CncdZkIy.js.map +0 -1
  311. package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
@@ -0,0 +1 @@
1
+ {"version":3,"file":"merge-agent-DlUiUanN.js","names":["execAsync","execAsync"],"sources":["../src/dashboard/server/event-store.ts","../src/lib/activity-logger.ts","../src/lib/cloister/validation.ts","../src/lib/git-utils.ts","../src/lib/cloister/merge-agent.ts"],"sourcesContent":["/**\n * Event Store — SQLite-backed append-only event log with PubSub (PAN-428)\n *\n * - Persists domain events to panopticon.db `events` table\n * - In-memory PubSub for live streaming to WebSocket clients\n * - Monotonic, gap-free sequence numbers (SQLite AUTOINCREMENT)\n * - 7-day retention with startup compaction\n * - Dual-runtime: bun:sqlite on Bun, better-sqlite3 on Node\n *\n * Usage:\n * const store = await initEventStore();\n * const seq = store.append({ type: 'agent.started', ... });\n * const past = store.readFrom(0);\n * const unsub = store.subscribe(event => console.log(event));\n */\n\nimport { EventEmitter } from 'node:events';\nimport { existsSync, mkdirSync } from 'node:fs';\nimport { join } from 'node:path';\nimport { getPanopticonHome } from '../../lib/paths.js';\nimport type { DomainEvent } from '@panopticon/contracts';\n\n// ─── Types ────────────────────────────────────────────────────────────────────\n\nexport interface StoredEvent {\n sequence: number;\n type: string;\n timestamp: string;\n payload: unknown;\n}\n\nexport type EventSubscriber = (event: StoredEvent) => void;\nexport type Unsubscribe = () => void;\n\nexport interface EventStore {\n /** Append a domain event. Returns the assigned sequence number. */\n append(event: Omit<DomainEvent, 'sequence'>): number;\n /** Return all events with sequence > fromSequence (exclusive lower bound). */\n readFrom(fromSequence: number): StoredEvent[];\n /** Return events of a given type, most recent first, capped at limit. */\n queryByType(type: string, limit?: number): StoredEvent[];\n /** Subscribe to live events. Returns an unsubscribe function. */\n subscribe(fn: EventSubscriber): Unsubscribe;\n /** Run 7-day retention compaction. Called at startup. */\n compact(): void;\n /** Return the highest sequence number in the store (0 if empty). */\n getLatestSequence(): number;\n}\n\n// ─── Minimal DB interface (compatible with bun:sqlite and better-sqlite3) ────\n//\n// Uses positional parameters (?) in all SQL to avoid runtime differences in\n// named-parameter binding syntax:\n// - bun:sqlite requires sigil in binding keys: { $name: value }\n// - better-sqlite3 requires no sigil: { name: value }\n// Positional parameters + arrays work identically in both runtimes.\n\ninterface PreparedStatement<R = Record<string, unknown>> {\n run(params?: unknown[]): { changes: number };\n get(params?: unknown[]): R | undefined | null;\n all(params?: unknown[]): R[];\n}\n\nexport interface DbAdapter {\n prepare<R = Record<string, unknown>>(sql: string): PreparedStatement<R>;\n exec(sql: string): void;\n}\n\n// ─── Row shape from SQLite ─────────────────────────────────────────────────────\n\ninterface EventRow {\n sequence: number;\n type: string;\n timestamp: string;\n payload: string;\n}\n\nfunction rowToStored(row: EventRow): StoredEvent {\n return {\n sequence: row.sequence,\n type: row.type,\n timestamp: row.timestamp,\n payload: JSON.parse(row.payload),\n };\n}\n\n// ─── Runtime-aware DB initializer ────────────────────────────────────────────\n\ndeclare const Bun: unknown;\n\n/**\n * Open the panopticon.db database using the appropriate driver for the runtime.\n * Under Bun: uses bun:sqlite (native, no native addons needed).\n * Under Node: uses the shared getDatabase() which applies migrations.\n */\nexport async function openEventDb(): Promise<DbAdapter> {\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n const dbPath = join(home, 'panopticon.db');\n\n if (typeof Bun !== 'undefined') {\n const { Database } = await import('bun:sqlite');\n const db = new Database(dbPath, { create: true });\n db.exec('PRAGMA journal_mode = WAL');\n db.exec('PRAGMA foreign_keys = ON');\n db.exec('PRAGMA synchronous = NORMAL');\n // Ensure required tables exist (Bun doesn't run the shared schema migrations)\n db.exec(`\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL DEFAULT '{}'\n )\n `);\n db.exec(`CREATE INDEX IF NOT EXISTS events_timestamp_idx ON events (timestamp)`);\n db.exec(`\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL,\n sequence INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n )\n `);\n return db as unknown as DbAdapter;\n } else {\n // Node.js: use shared database connection — migrations run there\n const { getDatabase } = await import('../../lib/database/index.js');\n return getDatabase() as unknown as DbAdapter;\n }\n}\n\n// ─── Factory ──────────────────────────────────────────────────────────────────\n\n/**\n * Create an EventStore from a pre-opened DbAdapter.\n * Call openEventDb() first to get the adapter.\n */\nexport function createEventStore(db: DbAdapter): EventStore {\n const emitter = new EventEmitter();\n // Allow many subscribers (one per WebSocket connection)\n emitter.setMaxListeners(0);\n\n // Prepared statements for hot path performance.\n // All SQL uses positional parameters (?) — both bun:sqlite and better-sqlite3\n // accept arrays for positional bindings with no runtime-specific differences.\n const insertStmt = db.prepare<void>(\n `INSERT INTO events (type, timestamp, payload) VALUES (?, ?, ?)`,\n );\n const readFromStmt = db.prepare<EventRow>(\n `SELECT sequence, type, timestamp, payload FROM events WHERE sequence > ? ORDER BY sequence ASC`,\n );\n const compactStmt = db.prepare<void>(\n `DELETE FROM events WHERE timestamp < ?`,\n );\n const latestSeqStmt = db.prepare<{ seq: number | null }>(\n `SELECT MAX(sequence) AS seq FROM events`,\n );\n const lastRowIdStmt = db.prepare<{ sequence: number }>(\n `SELECT last_insert_rowid() AS sequence`,\n );\n const queryByTypeStmt = db.prepare<EventRow>(\n `SELECT sequence, type, timestamp, payload FROM events WHERE type = ? ORDER BY sequence DESC LIMIT ?`,\n );\n\n function append(event: Omit<DomainEvent, 'sequence'>): number {\n const timestamp =\n (event as Record<string, unknown>)['timestamp'] as string ?? new Date().toISOString();\n const payload = JSON.stringify((event as Record<string, unknown>)['payload'] ?? {});\n\n insertStmt.run([event.type, timestamp, payload]);\n\n const row = lastRowIdStmt.get();\n const sequence = row?.sequence ?? 0;\n\n const stored: StoredEvent = {\n sequence,\n type: event.type,\n timestamp,\n payload: (event as Record<string, unknown>)['payload'] ?? {},\n };\n\n emitter.emit('event', stored);\n return sequence;\n }\n\n function readFrom(fromSequence: number): StoredEvent[] {\n const rows = readFromStmt.all([fromSequence]);\n return rows.map(rowToStored);\n }\n\n function subscribe(fn: EventSubscriber): Unsubscribe {\n emitter.on('event', fn);\n return () => emitter.off('event', fn);\n }\n\n function compact(): void {\n const sevenDaysAgo = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000).toISOString();\n const result = compactStmt.run([sevenDaysAgo]);\n if (result.changes > 0) {\n console.log(`[event-store] Compacted ${result.changes} events older than 7 days`);\n }\n }\n\n function getLatestSequence(): number {\n const row = latestSeqStmt.get();\n return row?.seq ?? 0;\n }\n\n function queryByType(type: string, limit = 100): StoredEvent[] {\n const rows = queryByTypeStmt.all([type, limit]);\n // Return most-recent-first (ORDER BY sequence DESC)\n return rows.map(rowToStored).reverse();\n }\n\n return { append, readFrom, queryByType, subscribe, compact, getLatestSequence };\n}\n\n// ─── Module-level singleton ───────────────────────────────────────────────────\n\nlet _store: EventStore | null = null;\nlet _db: DbAdapter | null = null;\nlet _initPromise: Promise<EventStore> | null = null;\n\n/**\n * Initialize and return the process-singleton EventStore (async, Bun-compatible).\n * Idempotent — returns the same store on subsequent calls.\n */\nexport async function initEventStore(): Promise<EventStore> {\n if (_store) return _store;\n if (_initPromise) return _initPromise;\n\n _initPromise = openEventDb().then((db) => {\n _db = db;\n const store = createEventStore(db);\n store.compact();\n _store = store;\n // Initialize projection cache with same DB connection\n import('./services/projection-cache.js').then(({ initProjectionCache }) => {\n initProjectionCache(db);\n }).catch(() => { /* module not available yet */ });\n return store;\n });\n\n return _initPromise;\n}\n\n/**\n * Return the shared DbAdapter after initEventStore() has resolved.\n * Used by services that need access to the same DB connection.\n */\nexport function getSharedDb(): DbAdapter {\n if (!_db) {\n throw new Error('[event-store] getSharedDb() called before initEventStore() resolved.');\n }\n return _db;\n}\n\n/**\n * Synchronous accessor — returns the store if already initialized, throws otherwise.\n * Used by legacy callers that expect a sync API (event-store unit tests, etc.)\n */\nexport function getEventStore(): EventStore {\n if (!_store) {\n throw new Error(\n '[event-store] getEventStore() called before initEventStore() resolved. ' +\n 'Use initEventStore() for async initialization.',\n );\n }\n return _store;\n}\n","/**\n * Shared activity logger — emits activity.entry events to the SQLite event store.\n *\n * Replaces flat-file logActivity() in merge-agent.ts and provides a unified\n * activity logging API for all Panopticon components (merge-agent, cloister,\n * specialists, dashboard).\n *\n * Activity entries are persisted to the event store and flow through:\n * event store → PubSub → WebSocket → EventRouter → Zustand store → ActivityPanel\n *\n * Usage:\n * import { emitActivityEntry } from '../lib/activity-logger.js';\n * emitActivityEntry({ source: 'merge-agent', level: 'info', message: '...', issueId: 'PAN-123' });\n */\n\nimport { randomUUID } from 'crypto';\nimport { getEventStore } from '../dashboard/server/event-store.js';\n\nexport type ActivityLevel = 'info' | 'warn' | 'error' | 'success';\nexport type ActivitySource = 'merge-agent' | 'cloister' | 'review-specialist' | 'test-specialist' | 'dashboard' | 'deploy-script';\n\nexport interface EmitActivityOptions {\n source: ActivitySource;\n level: ActivityLevel;\n message: string;\n details?: string;\n issueId?: string;\n}\n\n/**\n * Emit an activity.entry domain event to the SQLite event store.\n * Non-blocking — throws silently if event store is not yet initialized.\n *\n * The event is persisted to SQLite immediately and PubSub notifies all\n * WebSocket subscribers so the ActivityPanel updates in real-time.\n */\nexport function emitActivityEntry(options: EmitActivityOptions): void {\n try {\n const store = getEventStore();\n const entry = {\n type: 'activity.entry' as const,\n timestamp: new Date().toISOString(),\n payload: {\n id: randomUUID(),\n source: options.source,\n level: options.level,\n message: options.message,\n details: options.details ?? null,\n issueId: options.issueId ?? null,\n },\n };\n store.append(entry);\n } catch {\n // Non-fatal — event store may not be initialized during early boot\n }\n}\n\n/**\n * Emit a dashboard lifecycle event (started, completed, failed).\n * Used by pending-lifecycle.ts and merge-agent.ts.\n */\nexport function emitDashboardLifecycle(\n status: 'started' | 'completed' | 'failed',\n options: {\n reason: string;\n issueId?: string;\n trigger?: string;\n durationMs?: number;\n error?: string;\n },\n): void {\n try {\n const store = getEventStore();\n let event: Record<string, unknown>;\n\n if (status === 'started') {\n event = {\n type: 'dashboard.lifecycle_started' as const,\n timestamp: new Date().toISOString(),\n payload: {\n reason: options.reason,\n issueId: options.issueId ?? null,\n trigger: options.trigger ?? 'unknown',\n },\n };\n } else if (status === 'completed') {\n event = {\n type: 'dashboard.lifecycle_completed' as const,\n timestamp: new Date().toISOString(),\n payload: {\n reason: options.reason,\n issueId: options.issueId ?? null,\n durationMs: options.durationMs ?? 0,\n },\n };\n } else {\n event = {\n type: 'dashboard.lifecycle_failed' as const,\n timestamp: new Date().toISOString(),\n payload: {\n reason: options.reason,\n issueId: options.issueId ?? null,\n error: options.error ?? 'unknown error',\n },\n };\n }\n\n store.append(event);\n } catch {\n // Non-fatal\n }\n}\n","/**\n * Merge Validation - Validation utilities for merge completeness\n *\n * Validates that merged code:\n * - Has no conflict markers\n * - Builds successfully\n * - Passes all tests\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { join } from 'path';\nimport { existsSync } from 'fs';\nimport type { QualityGateConfig, TemplatePlaceholders } from '../workspace-config.js';\nimport { replacePlaceholders } from '../workspace-config.js';\nimport { loadConfig } from '../config.js';\n\nconst execAsync = promisify(exec);\n\n/**\n * Context for validation execution\n */\nexport interface ValidationContext {\n /** Project root path */\n projectPath: string;\n /** Issue ID for logging */\n issueId?: string;\n /** Custom validation script path (defaults to scripts/validate-merge.sh) */\n validationScript?: string;\n /** Baseline test failure count for comparison mode (pre-existing failures) */\n baselineTestFailures?: number;\n}\n\n/**\n * Detailed validation failure information\n */\nexport interface ValidationFailure {\n /** Type of failure: conflict, build, or test */\n type: 'conflict' | 'build' | 'test';\n /** Files affected (for conflicts) */\n files?: string[];\n /** Error message or output */\n message: string;\n}\n\n/**\n * Result of validation execution\n */\nexport interface ValidationResult {\n /** Overall validation success */\n success: boolean;\n /** Validation passed (or skipped — check `skipped` to distinguish) */\n valid: boolean;\n /** Validation was skipped (no validation script found) */\n skipped?: boolean;\n /** Conflict markers detected */\n conflictMarkersFound: boolean;\n /** Build result */\n buildPassed: boolean | null; // null if not run\n /** Test result */\n testsPassed: boolean | null; // null if not run\n /** List of failures */\n failures: ValidationFailure[];\n /** Raw validation output */\n output: string;\n /** Error message if validation script itself failed */\n error?: string;\n}\n\n/**\n * Parse validation script output to extract structured results\n */\nfunction parseValidationOutput(output: string, exitCode: number): ValidationResult {\n const lines = output.split('\\n');\n\n const failures: ValidationFailure[] = [];\n let conflictMarkersFound = false;\n let buildPassed: boolean | null = null;\n let testsPassed: boolean | null = null;\n\n // Track what stage we're in\n let inConflictCheck = false;\n let inBuildCheck = false;\n let inTestCheck = false;\n\n const conflictFiles: string[] = [];\n\n for (const line of lines) {\n const trimmed = line.trim();\n\n // Detect stages\n if (trimmed.startsWith('Checking for conflict markers')) {\n inConflictCheck = true;\n inBuildCheck = false;\n inTestCheck = false;\n } else if (trimmed.startsWith('Running build')) {\n inConflictCheck = false;\n inBuildCheck = true;\n inTestCheck = false;\n } else if (trimmed.startsWith('Running tests')) {\n inConflictCheck = false;\n inBuildCheck = false;\n inTestCheck = true;\n }\n\n // Parse conflict markers\n if (inConflictCheck) {\n if (trimmed.startsWith('ERROR: Conflict')) {\n conflictMarkersFound = true;\n } else if (trimmed.includes('/') && !trimmed.startsWith('ERROR')) {\n // File path listed\n conflictFiles.push(trimmed);\n } else if (trimmed.startsWith('✓ No conflict markers found')) {\n conflictMarkersFound = false;\n }\n }\n\n // Parse build result\n if (inBuildCheck) {\n if (trimmed.startsWith('✓ Build passed')) {\n buildPassed = true;\n } else if (trimmed.startsWith('ERROR: Build failed') ||\n trimmed.includes('VALIDATION FAILED: Build errors detected')) {\n buildPassed = false;\n } else if (trimmed.includes('skipping build check')) {\n buildPassed = null; // Not applicable\n }\n }\n\n // Parse test result\n if (inTestCheck) {\n if (trimmed.startsWith('✓ Tests passed')) {\n testsPassed = true;\n } else if (trimmed.startsWith('ERROR: Tests failed') ||\n trimmed.includes('VALIDATION FAILED: Test failures detected')) {\n testsPassed = false;\n } else if (trimmed.includes('skipping test check')) {\n testsPassed = null; // Not applicable\n }\n }\n }\n\n // Build failures list\n if (conflictMarkersFound) {\n failures.push({\n type: 'conflict',\n files: conflictFiles.length > 0 ? conflictFiles : undefined,\n message: 'Conflict markers detected in merged code',\n });\n }\n\n if (buildPassed === false) {\n failures.push({\n type: 'build',\n message: 'Build failed after merge',\n });\n }\n\n if (testsPassed === false) {\n failures.push({\n type: 'test',\n message: 'Tests failed after merge',\n });\n }\n\n // Determine overall validity\n const valid = exitCode === 0 &&\n !conflictMarkersFound &&\n (buildPassed === null || buildPassed === true) &&\n (testsPassed === null || testsPassed === true);\n\n return {\n success: true, // Script ran successfully\n valid,\n conflictMarkersFound,\n buildPassed,\n testsPassed,\n failures,\n output,\n };\n}\n\n/**\n * Run merge validation on a project\n *\n * @param context - Validation context\n * @returns Promise resolving to validation result\n */\nexport async function runMergeValidation(\n context: ValidationContext\n): Promise<ValidationResult> {\n const { projectPath, validationScript } = context;\n\n // Determine validation script path\n const scriptPath = validationScript || join(projectPath, 'scripts', 'validate-merge.sh');\n\n // No validation script = skip validation (specialist already ran build + tests)\n if (!existsSync(scriptPath)) {\n console.log(`[validation] No validation script at ${scriptPath}, skipping (specialist already validated)`);\n return {\n success: true,\n valid: true,\n skipped: true,\n conflictMarkersFound: false,\n buildPassed: null,\n testsPassed: null,\n failures: [],\n output: '',\n };\n }\n\n console.log(`[validation] Running validation script: ${scriptPath}`);\n console.log(`[validation] Project path: ${projectPath}`);\n\n try {\n // Run validation script\n // Pass baseline failures as env var for baseline comparison mode\n const env = { ...process.env };\n if (context.baselineTestFailures !== undefined) {\n env.BASELINE_FAILURES = String(context.baselineTestFailures);\n console.log(`[validation] Baseline comparison mode: ${context.baselineTestFailures} pre-existing failures`);\n }\n\n const { stdout, stderr } = await execAsync(\n `bash \"${scriptPath}\" \"${projectPath}\"`,\n {\n cwd: projectPath,\n env,\n maxBuffer: 10 * 1024 * 1024, // 10MB buffer for large outputs\n timeout: 10 * 60 * 1000, // 10 minute timeout\n }\n );\n\n const output = stdout + stderr;\n\n console.log(`[validation] ✓ Validation passed`);\n\n return parseValidationOutput(output, 0);\n } catch (error: any) {\n // Validation script exited with non-zero code (validation failed)\n const exitCode = error.code || 1;\n const output = (error.stdout || '') + (error.stderr || '');\n\n console.log(`[validation] ✗ Validation failed (exit code ${exitCode})`);\n\n // Parse the output to understand what failed\n const result = parseValidationOutput(output, exitCode);\n\n return result;\n }\n}\n\n/**\n * Auto-revert a merge if validation fails\n *\n * Uses ORIG_HEAD which git sets automatically at merge time to the commit\n * HEAD pointed to right before the merge. This is always correct regardless\n * of commits added between task start and merge execution.\n *\n * @param projectPath - Project root path\n * @returns Promise resolving to success status\n */\nexport async function autoRevertMerge(projectPath: string): Promise<boolean> {\n console.log(`[validation] Auto-reverting merge in ${projectPath}`);\n\n try {\n // Get current commit before revert (for logging)\n const { stdout: beforeCommit } = await execAsync('git rev-parse HEAD', {\n cwd: projectPath,\n });\n\n // Use ORIG_HEAD — git sets this to pre-merge HEAD at merge time.\n // Handles fast-forwards, multi-commit merges, and any commits\n // added to main between task start and merge execution.\n await execAsync('git reset --hard ORIG_HEAD', {\n cwd: projectPath,\n });\n\n // Get new HEAD after revert (for logging)\n const { stdout: afterCommit } = await execAsync('git rev-parse HEAD', {\n cwd: projectPath,\n });\n\n console.log(\n `[validation] ✓ Auto-revert successful: ${beforeCommit.trim()} -> ${afterCommit.trim()} (via ORIG_HEAD)`\n );\n\n return true;\n } catch (error: any) {\n console.error(`[validation] ✗ Auto-revert failed:`, error.message);\n return false;\n }\n}\n\n/**\n * Result of a single quality gate execution\n */\nexport interface QualityGateResult {\n /** Gate name from projects.yaml */\n name: string;\n /** Whether the gate passed */\n passed: boolean;\n /** Whether the gate was required */\n required: boolean;\n /** Gate output (stdout + stderr) */\n output: string;\n /** Duration in milliseconds */\n durationMs: number;\n /** Error message if gate failed */\n error?: string;\n}\n\n/**\n * Options for running quality gates\n */\nexport interface QualityGateRunOptions {\n /** Whether the workspace is remote (SSH) */\n isRemote?: boolean;\n /** VM name for SSH connections (required when isRemote is true) */\n vmName?: string;\n /** Template placeholders for resolving container names (e.g., {{FEATURE_FOLDER}}) */\n placeholders?: TemplatePlaceholders;\n}\n\n/**\n * Default quality gates used when no quality_gates config exists in projects.yaml.\n * Runs typecheck → lint → test sequentially (bail on first failure).\n */\nexport const DEFAULT_GATES: Record<string, QualityGateConfig> = {\n typecheck: { command: 'npm run typecheck 2>&1' },\n lint: { command: 'npm run lint 2>&1' },\n test: { command: 'npm test 2>&1' },\n};\n\n/**\n * Run all quality gates for a project\n *\n * Executes each gate in declaration order, stopping on first required failure.\n * Returns results for all gates that were run.\n *\n * Supports both local and remote (SSH) workspaces. For remote workspaces,\n * commands are wrapped with SSH and run on the specified VM.\n *\n * @param gates - Quality gate configs from projects.yaml (or DEFAULT_GATES)\n * @param projectPath - Project root (or workspace root)\n * @param phase - Which phase to run ('pre_push' or 'post_push')\n * @param opts - Optional remote workspace options\n * @returns Array of gate results\n */\nexport async function runQualityGates(\n gates: Record<string, QualityGateConfig>,\n projectPath: string,\n phase: 'pre_push' | 'post_push' = 'pre_push',\n opts: QualityGateRunOptions = {}\n): Promise<QualityGateResult[]> {\n if (opts.isRemote && !opts.vmName) {\n throw new Error('Remote workspace requires vmName');\n }\n if (opts.isRemote && opts.vmName) {\n // Validate vmName and projectPath to prevent shell injection.\n // Both are controlled by Panopticon config, but explicit validation\n // catches any accidental or malicious values before they reach the shell.\n if (!/^[a-z0-9][a-z0-9-]*$/.test(opts.vmName)) {\n throw new Error(`Invalid vmName for SSH: ${opts.vmName}`);\n }\n if (!/^[a-zA-Z0-9/_\\-.]+$/.test(projectPath)) {\n throw new Error(`Workspace path contains unsafe characters: ${projectPath}`);\n }\n }\n const results: QualityGateResult[] = [];\n\n for (const [name, gate] of Object.entries(gates)) {\n const gatePhase = gate.phase || 'pre_push';\n if (gatePhase !== phase) continue;\n\n const required = gate.required !== false; // default true\n const cwd = gate.path ? join(projectPath, gate.path) : projectPath;\n\n console.log(`[quality-gate] Running \"${name}\" (${required ? 'required' : 'optional'}) in ${cwd}`);\n const startTime = Date.now();\n\n if (gate.type === 'http_health') {\n // HTTP health check gate\n const result = await runHttpHealthGate(name, gate, required);\n results.push(result);\n if (!result.passed && required) {\n console.log(`[quality-gate] ✗ Required gate \"${name}\" failed — stopping`);\n break;\n }\n continue;\n }\n\n // Command gate (default)\n\n // For remote workspaces, build and validate the SSH command BEFORE entering\n // the try/catch so validation errors propagate as real errors (not gate failures).\n const isRemote = opts.isRemote && opts.vmName;\n let resolvedCommand: string;\n if (isRemote) {\n // Validate cwd (which may include gate.path) — not just the base projectPath.\n // A gate.path like \"frontend;rm -rf /\" would produce an unsafe cwd after join.\n if (!/^[a-zA-Z0-9/_\\-.]+$/.test(cwd)) {\n throw new Error(`Gate \"${name}\" path resolves to unsafe characters for SSH: ${cwd}`);\n }\n // Validate gate.command doesn't contain double quotes — a \" in the command would\n // end the SSH double-quoted string and allow local command injection:\n // ssh host \"cd /path && legit; injected\" ← breaks when command contains \"\n if (gate.command.includes('\"')) {\n throw new Error(`Gate \"${name}\" command contains double quotes which are unsafe in SSH context`);\n }\n const flyAppName = loadConfig().remote?.fly?.app ?? 'pan-workspaces';\n resolvedCommand = `fly ssh console -a ${flyAppName} -C \"cd ${cwd} && ${gate.command}\"`;\n } else if (gate.container && gate.container_name) {\n // Run inside Docker container — resolve container name from placeholders\n let containerName = gate.container_name;\n if (opts.placeholders) {\n containerName = replacePlaceholders(containerName, opts.placeholders);\n }\n // Use -w to set working directory inside the container.\n // The container mounts workspace code at /workspaces/feature/<subdir>,\n // so map the gate.path (e.g., 'fe') to the container's working directory.\n const containerWorkdir = gate.path ? `/workspaces/feature/${gate.path}` : '/workspaces/feature';\n // Pass gate.env as -e flags so env vars reach the container process\n const envFlags = gate.env\n ? Object.entries(gate.env).map(([k, v]) => `-e ${k}=\"${v}\"`).join(' ')\n : '';\n resolvedCommand = `docker exec ${envFlags} -w \"${containerWorkdir}\" \"${containerName}\" ${gate.command}`;\n console.log(`[quality-gate] Running in container: ${containerName} (workdir: ${containerWorkdir})`);\n } else {\n resolvedCommand = gate.command;\n }\n\n try {\n // When running in container, don't set host cwd (irrelevant)\n const useHostCwd = !isRemote && !(gate.container && gate.container_name);\n const env = { ...process.env, ...gate.env };\n const { stdout, stderr } = await execAsync(resolvedCommand, {\n cwd: useHostCwd ? cwd : undefined,\n env,\n maxBuffer: 10 * 1024 * 1024, // 10MB\n timeout: 5 * 60 * 1000, // 5 minute timeout per gate\n });\n\n const durationMs = Date.now() - startTime;\n console.log(`[quality-gate] ✓ \"${name}\" passed (${durationMs}ms)`);\n results.push({\n name,\n passed: true,\n required,\n output: (stdout + stderr).slice(-2000), // keep last 2KB\n durationMs,\n });\n } catch (error: any) {\n const durationMs = Date.now() - startTime;\n const output = ((error.stdout || '') + (error.stderr || '')).slice(-2000);\n console.log(`[quality-gate] ✗ \"${name}\" failed (${durationMs}ms): ${error.message?.slice(0, 200)}`);\n results.push({\n name,\n passed: false,\n required,\n output,\n durationMs,\n error: error.message?.slice(0, 500),\n });\n\n if (required) {\n console.log(`[quality-gate] ✗ Required gate \"${name}\" failed — stopping`);\n break;\n }\n }\n }\n\n const passed = results.filter(r => r.passed).length;\n const failed = results.filter(r => !r.passed).length;\n console.log(`[quality-gate] Complete: ${passed} passed, ${failed} failed out of ${results.length} gates`);\n\n return results;\n}\n\n/**\n * Run an HTTP health check gate (for post-push deployment verification)\n */\nasync function runHttpHealthGate(\n name: string,\n gate: QualityGateConfig,\n required: boolean\n): Promise<QualityGateResult> {\n const url = gate.url;\n if (!url) {\n return {\n name,\n passed: false,\n required,\n output: '',\n durationMs: 0,\n error: 'http_health gate missing url',\n };\n }\n\n const waitSeconds = gate.wait || 120;\n const expectStatus = gate.expect_status || 200;\n const startTime = Date.now();\n\n console.log(`[quality-gate] Waiting ${waitSeconds}s for deployment, then checking ${url}`);\n\n // Wait for deployment\n await new Promise(resolve => setTimeout(resolve, waitSeconds * 1000));\n\n try {\n const { stdout } = await execAsync(\n `curl -sL -o /dev/null -w '%{http_code}' --max-time 30 '${url}'`,\n { timeout: 60 * 1000 }\n );\n\n const statusCode = parseInt(stdout.trim(), 10);\n const passed = statusCode === expectStatus;\n const durationMs = Date.now() - startTime;\n\n console.log(`[quality-gate] Health check ${url}: ${statusCode} (expected ${expectStatus}) — ${passed ? 'PASS' : 'FAIL'}`);\n\n return {\n name,\n passed,\n required,\n output: `HTTP ${statusCode} from ${url}`,\n durationMs,\n error: passed ? undefined : `Expected HTTP ${expectStatus}, got ${statusCode}`,\n };\n } catch (error: any) {\n return {\n name,\n passed: false,\n required,\n output: error.message || '',\n durationMs: Date.now() - startTime,\n error: `Health check failed: ${error.message?.slice(0, 200)}`,\n };\n }\n}\n","/**\n * Git utilities for handling common git operations and recovery\n */\n\nimport { existsSync, unlinkSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\n\nconst execAsync = promisify(exec);\n\n/**\n * Check if any git processes are currently running in a specific repository\n *\n * This checks if there are git processes with the repository path in their command line.\n * If we can't determine repository-specific processes, we conservatively return false\n * (no processes detected) to allow cleanup to proceed.\n */\nasync function hasRunningGitProcesses(repoPath: string): Promise<boolean> {\n try {\n // Try to find git processes that reference this specific repository\n // Use fuser to check if any process has the .git directory open (more reliable)\n try {\n const gitDir = join(repoPath, '.git');\n const { stdout } = await execAsync(`fuser \"${gitDir}\" 2>/dev/null`, {\n encoding: 'utf-8',\n });\n // fuser returns PIDs if any process has the directory open\n return stdout.trim().length > 0;\n } catch {\n // fuser not available or no processes found\n // Fall back to checking ps for git processes in this directory\n try {\n const { stdout } = await execAsync(\n `ps aux | grep -E \"git.*${repoPath.replace(/[.*+?^${}()|[\\]\\\\]/g, '\\\\$&')}\" | grep -v grep`,\n { encoding: 'utf-8' }\n );\n return stdout.trim().length > 0;\n } catch {\n // No git processes found for this repo\n return false;\n }\n }\n } catch {\n // Error checking - conservatively assume no processes\n return false;\n }\n}\n\n/**\n * Find all git lock files in a repository\n */\nfunction findGitLockFiles(repoPath: string): string[] {\n const lockFiles: string[] = [];\n\n // Check for index.lock in .git directory\n const indexLock = join(repoPath, '.git', 'index.lock');\n if (existsSync(indexLock)) {\n lockFiles.push(indexLock);\n }\n\n // Check for ref locks in .git/refs\n const refsDir = join(repoPath, '.git', 'refs');\n if (existsSync(refsDir)) {\n const findLocksRecursive = (dir: string) => {\n const entries = readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n findLocksRecursive(fullPath);\n } else if (entry.name.endsWith('.lock')) {\n lockFiles.push(fullPath);\n }\n }\n };\n try {\n findLocksRecursive(refsDir);\n } catch {\n // Ignore errors reading refs directory\n }\n }\n\n return lockFiles;\n}\n\n/**\n * Check for and clean up stale git lock files\n *\n * A lock file is considered stale if:\n * 1. It exists\n * 2. No git processes are currently running\n *\n * @param repoPath - Path to the git repository\n * @returns Object with cleanup results\n */\nexport async function cleanupStaleLocks(repoPath: string): Promise<{\n found: string[];\n removed: string[];\n errors: Array<{ file: string; error: string }>;\n}> {\n const result = {\n found: [] as string[],\n removed: [] as string[],\n errors: [] as Array<{ file: string; error: string }>,\n };\n\n // Find all lock files\n const lockFiles = findGitLockFiles(repoPath);\n result.found = lockFiles;\n\n if (lockFiles.length === 0) {\n return result;\n }\n\n // Check if git processes are running for this repository\n const hasGitProcesses = await hasRunningGitProcesses(repoPath);\n\n if (hasGitProcesses) {\n // Don't remove locks if git is actively running\n result.errors.push({\n file: 'N/A',\n error: 'Git processes are running - not safe to remove locks',\n });\n return result;\n }\n\n // Remove stale lock files\n for (const lockFile of lockFiles) {\n try {\n unlinkSync(lockFile);\n result.removed.push(lockFile);\n } catch (error: unknown) {\n const msg = error instanceof Error ? error.message : String(error);\n result.errors.push({ file: lockFile, error: msg });\n }\n }\n\n return result;\n}\n\n/**\n * Result of getWorkspaceGitInfo.\n * Note: `branch` is the branch name (not a hash) despite the parent function name.\n */\nexport interface WorkspaceCommitInfo {\n /** Full SHA of the HEAD commit */\n HEAD: string;\n /** Current branch name (e.g. \"feature/pan-342\") */\n branch: string;\n}\n\n/**\n * Get the current HEAD commit SHA and branch name for a workspace.\n *\n * Note: the return value includes `branch` (a name, not a hash) alongside `HEAD` (a SHA).\n * The function name reflects its primary use-case of snapshotting commit state for review.\n *\n * @param workspacePath - Path to the git workspace\n * @returns WorkspaceCommitInfo with HEAD SHA and branch name\n * @throws Error if git commands fail (e.g. path is not a git repository)\n */\nexport async function getWorkspaceGitInfo(workspacePath: string): Promise<WorkspaceCommitInfo> {\n try {\n const [headResult, branchResult] = await Promise.all([\n execAsync('git rev-parse HEAD', { cwd: workspacePath }),\n execAsync('git rev-parse --abbrev-ref HEAD', { cwd: workspacePath }),\n ]);\n return {\n HEAD: headResult.stdout.trim(),\n branch: branchResult.stdout.trim(),\n };\n } catch (err: unknown) {\n const msg = err instanceof Error ? err.message : String(err);\n throw new Error(`getWorkspaceGitInfo failed for ${workspacePath}: ${msg}`);\n }\n}\n\n/**\n * Check if a repository has stale lock files\n *\n * @param repoPath - Path to the git repository\n * @returns True if stale locks exist\n */\nexport async function hasStaleLocks(repoPath: string): Promise<boolean> {\n const lockFiles = findGitLockFiles(repoPath);\n if (lockFiles.length === 0) {\n return false;\n }\n\n const hasGitProcesses = await hasRunningGitProcesses(repoPath);\n return !hasGitProcesses;\n}\n","/**\n * Merge Agent - Automatic merge conflict resolution using Claude Code\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync, appendFileSync } from 'fs';\nimport { writeFile } from 'fs/promises';\nimport { join, dirname, basename, relative } from 'path';\nimport { fileURLToPath } from 'url';\nimport { spawn, exec } from 'child_process';\nimport { promisify } from 'util';\nimport { sendKeysAsync, sessionExists } from '../tmux.js';\nimport { emitActivityEntry, emitDashboardLifecycle } from '../activity-logger.js';\n\nconst execAsync = promisify(exec);\n\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nimport {\n PANOPTICON_HOME,\n} from '../paths.js';\nimport { resolveGitHubIssue } from '../tracker-utils.js';\n\nimport {\n getSessionId,\n recordWake,\n getTmuxSessionName,\n wakeSpecialist,\n spawnEphemeralSpecialist,\n isRunning,\n} from './specialists.js';\nimport { resolveProjectFromIssue } from '../projects.js';\nimport { runMergeValidation, autoRevertMerge, runQualityGates } from './validation.js';\nimport { loadProjectsConfig } from '../projects.js';\nimport { cleanupStaleLocks } from '../git-utils.js';\n\nconst SPECIALISTS_DIR = join(PANOPTICON_HOME, 'specialists');\nconst MERGE_HISTORY_DIR = join(SPECIALISTS_DIR, 'merge-agent');\nconst MERGE_HISTORY_FILE = join(MERGE_HISTORY_DIR, 'history.jsonl');\n\n/**\n * Context for a merge conflict resolution request\n */\nexport interface MergeConflictContext {\n projectPath: string;\n sourceBranch: string;\n targetBranch: string;\n conflictFiles: string[];\n issueId: string;\n testCommand?: string;\n}\n\n/**\n * Result of merge agent execution\n */\nexport interface MergeResult {\n success: boolean;\n resolvedFiles?: string[];\n failedFiles?: string[];\n testsStatus?: 'PASS' | 'FAIL' | 'SKIP';\n validationStatus?: 'PASS' | 'FAIL' | 'NOT_RUN';\n reason?: string;\n notes?: string;\n output?: string;\n}\n\n/**\n * Merge history entry\n */\ninterface MergeHistoryEntry {\n timestamp: string;\n issueId: string;\n sourceBranch: string;\n targetBranch: string;\n conflictFiles: string[];\n result: MergeResult;\n sessionId?: string;\n}\n\n/**\n * Timeout for merge agent in milliseconds (15 minutes)\n */\nconst MERGE_TIMEOUT_MS = 15 * 60 * 1000;\n\n/**\n * Build the prompt for merge-agent\n */\nfunction buildMergePrompt(context: MergeConflictContext): string {\n const templatePath = join(__dirname, 'prompts', 'merge-agent.md');\n\n if (!existsSync(templatePath)) {\n throw new Error(`Merge agent prompt template not found at ${templatePath}`);\n }\n\n const template = readFileSync(templatePath, 'utf-8');\n\n // Replace template variables\n const prompt = template\n .replace(/\\{\\{projectPath\\}\\}/g, context.projectPath)\n .replace(/\\{\\{sourceBranch\\}\\}/g, context.sourceBranch)\n .replace(/\\{\\{targetBranch\\}\\}/g, context.targetBranch)\n .replace(/\\{\\{issueId\\}\\}/g, context.issueId)\n .replace(\n /\\{\\{conflictFiles\\}\\}/g,\n context.conflictFiles.map((f) => ` - ${f}`).join('\\n')\n )\n .replace(/\\{\\{testCommand\\}\\}/g, context.testCommand || 'skip')\n .replace(/\\{\\{apiUrl\\}\\}/g, process.env.DASHBOARD_URL || `http://localhost:${process.env.API_PORT || process.env.PORT || '3011'}`);\n\n // Wrap in orchestration markers for context delineation\n return `<!-- panopticon:orchestration-context-start -->\\n${prompt}\\n<!-- panopticon:orchestration-context-end -->`;\n}\n\n/**\n * Detect test command from project structure\n */\nfunction detectTestCommand(projectPath: string): string {\n // Check for package.json (Node.js)\n const packageJsonPath = join(projectPath, 'package.json');\n if (existsSync(packageJsonPath)) {\n try {\n const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf-8'));\n if (packageJson.scripts?.test) {\n return 'npm test';\n }\n } catch {\n // Ignore parse errors\n }\n }\n\n // Check for pom.xml (Java/Maven)\n if (existsSync(join(projectPath, 'pom.xml'))) {\n return 'mvn test';\n }\n\n // Check for Cargo.toml (Rust)\n if (existsSync(join(projectPath, 'Cargo.toml'))) {\n return 'cargo test';\n }\n\n // Check for pytest (Python)\n if (existsSync(join(projectPath, 'pytest.ini')) || existsSync(join(projectPath, 'setup.py'))) {\n return 'pytest';\n }\n\n // No test command detected\n return 'skip';\n}\n\n/**\n * Notify TLDR daemon to reindex changed files after merge\n */\nexport async function notifyTldrDaemon(projectPath: string, sourceBranch: string): Promise<void> {\n try {\n console.log(`[merge-agent] Notifying TLDR daemon to reindex changed files...`);\n\n // Check if TLDR daemon is available\n const venvPath = join(projectPath, '.venv');\n if (!existsSync(venvPath)) {\n console.log(`[merge-agent] No .venv found, skipping TLDR notification`);\n return;\n }\n\n // Get changed files from the merge\n const { stdout } = await execAsync(`git diff --name-only HEAD~1 HEAD`, {\n cwd: projectPath,\n encoding: 'utf-8'\n });\n\n const changedFiles = stdout\n .trim()\n .split('\\n')\n .filter(f => f.trim().length > 0)\n .filter(f => {\n // Only include source code files (skip docs, configs, etc)\n const ext = f.split('.').pop()?.toLowerCase();\n return ext && ['ts', 'js', 'tsx', 'jsx', 'py', 'java', 'go', 'rs', 'cpp', 'c', 'h'].includes(ext);\n });\n\n if (changedFiles.length === 0) {\n console.log(`[merge-agent] No source files changed, skipping TLDR notification`);\n return;\n }\n\n console.log(`[merge-agent] Found ${changedFiles.length} changed source files to reindex`);\n\n // Get TLDR daemon service\n const { getTldrDaemonService } = await import('../tldr-daemon.js');\n const tldrService = getTldrDaemonService(projectPath, venvPath);\n\n // Check if daemon is running\n const status = await tldrService.getStatus();\n if (!status.running) {\n console.log(`[merge-agent] TLDR daemon not running, skipping notification`);\n return;\n }\n\n // Trigger warm to reindex (this will update the index incrementally)\n console.log(`[merge-agent] Triggering TLDR index warm...`);\n await tldrService.warm(true); // background mode\n\n console.log(`[merge-agent] ✓ TLDR daemon notified to reindex`);\n logActivity('tldr_notified', `Notified TLDR daemon to reindex ${changedFiles.length} files`);\n } catch (error: any) {\n // Non-fatal - log warning and continue\n console.warn(`[merge-agent] Failed to notify TLDR daemon: ${error.message}`);\n logActivity('tldr_notify_error', `TLDR notification failed: ${error.message}`);\n }\n}\n\n/**\n * Post-merge cleanup: move PRD, close PR, move issue to Done, report merge, compact beads.\n *\n * Moves the issue to Done on the tracker so it appears in the Done column.\n * Does NOT tear down the workspace or apply the closed-out label — the human\n * close-out ceremony handles that separately.\n *\n * IDEMPOTENT: Safe to call multiple times for the same issueId. Tracks completed\n * issues and returns immediately on re-entry. This is defense-in-depth against\n * the infinite loop that burned 24,626 Linear API calls (PAN-328).\n */\n\n// Defense-in-depth: track issues that have completed postMergeLifecycle.\n// Prevents re-execution even if caller guards fail. Persists for server lifetime.\nconst _completedPostMerge = new Set<string>();\n\n// Circuit breaker for issue tracker close operations.\n// After MAX_CLOSE_RETRIES consecutive failures, stop trying to close the issue\n// on the tracker. The issue can be closed manually via the dashboard close-out ceremony.\nconst _closeIssueFailures = new Map<string, number>();\nconst MAX_CLOSE_RETRIES = 3;\n\nexport async function postMergeLifecycle(issueId: string, projectPath: string, sourceBranch?: string, options?: { skipDeploy?: boolean }): Promise<void> {\n // Guard 1: skip if already completed (defense-in-depth against infinite loops)\n if (_completedPostMerge.has(issueId)) {\n console.log(`[merge-agent] postMergeLifecycle already completed for ${issueId}, skipping`);\n return;\n }\n\n // Step 0: Write pending lifecycle file and spawn detached deploy script.\n // The deploy script rebuilds dist/, kills this server, and starts a fresh process.\n // The fresh process reads the pending file on startup and runs the lifecycle steps\n // with correct module chunk references (no ERR_MODULE_NOT_FOUND after merge).\n //\n // Skip this step when we ARE the fresh process (called from processPendingLifecycle) —\n // dynamic imports already resolve correctly and spawning again would create an infinite loop.\n if (!options?.skipDeploy) {\n const pendingFile = join(PANOPTICON_HOME, 'pending-post-merge.json');\n const repoRoot = __dirname.includes('/src/')\n ? __dirname.replace(/\\/src\\/.*$/, '')\n : __dirname.replace(/\\/dist\\/.*$/, '').replace(/\\/lib\\/.*$/, '');\n const deployScript = join(repoRoot, 'scripts', 'post-merge-deploy.sh');\n\n try {\n const pendingData = JSON.stringify({\n issueId,\n projectPath,\n sourceBranch: sourceBranch ?? '',\n timestamp: Date.now(),\n reason: 'post-merge',\n trigger: 'merge-agent',\n });\n await writeFile(pendingFile, pendingData, 'utf-8');\n console.log(`[merge-agent] Wrote pending lifecycle file: ${pendingFile}`);\n\n // Pass 'post-merge' as the reason to the deploy script so it writes the\n // restart marker. We spawn detached and return immediately — the deploy script\n // kills this server. The new server reads the pending file on boot,\n // emits lifecycle_started, and after processing emits lifecycle_complete/failed.\n const child = spawn(deployScript, [repoRoot, issueId, projectPath, sourceBranch ?? '', 'post-merge'], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n console.log(`[merge-agent] Spawned detached deploy script (pid ${child.pid}) — server will restart with new build`);\n return;\n } catch (err: any) {\n console.warn(`[merge-agent] Failed to spawn deploy script: ${err.message}. Falling through to in-process lifecycle (may fail on stale chunks).`);\n }\n }\n\n console.log(`[merge-agent] Running post-merge cleanup for ${issueId}`);\n\n // 1. Move PRD from active to completed (via lifecycle module)\n try {\n const { movePrd } = await import('../lifecycle/archive-planning.js');\n const prdResult = await movePrd({ issueId, projectPath });\n if (prdResult.success && !prdResult.skipped) {\n console.log(`[merge-agent] ✓ ${prdResult.details?.join('; ')}`);\n logActivity('prd_moved', `Moved ${issueId} PRD to completed directory`);\n } else if (prdResult.skipped) {\n console.log(`[merge-agent] PRD move skipped: ${prdResult.details?.join('; ')}`);\n } else {\n console.warn(`[merge-agent] PRD move failed: ${prdResult.error}`);\n }\n } catch (err) {\n console.warn(`[merge-agent] Could not move PRD: ${err}`);\n }\n\n // 2. Remove ephemeral planning artifacts from main (via lifecycle module)\n try {\n const { cleanPlanningArtifacts } = await import('../lifecycle/clean-planning.js');\n const cleanResult = await cleanPlanningArtifacts({ issueId, projectPath });\n if (cleanResult.success && !cleanResult.skipped) {\n console.log(`[merge-agent] ✓ ${cleanResult.details?.join('; ')}`);\n logActivity('planning_artifacts_cleaned', cleanResult.details?.join('; ') || 'Planning artifacts removed');\n } else if (cleanResult.skipped) {\n console.log(`[merge-agent] Planning artifact cleanup skipped: ${cleanResult.details?.join('; ')}`);\n } else {\n console.warn(`[merge-agent] Planning artifact cleanup failed: ${cleanResult.error}`);\n }\n } catch (err) {\n console.warn(`[merge-agent] Could not clean planning artifacts: ${err}`);\n }\n\n // 3. Clean up workflow labels + apply 'merged' label (non-fatal)\n // MUST run BEFORE closing the issue — once closed on GitHub, label edits fail silently.\n // This was the root cause of in-review labels persisting after merge (PAN-453 incident).\n try {\n const { cleanupMergedLabels } = await import('../lifecycle/label-cleanup.js');\n const ghResolved = resolveGitHubIssue(issueId);\n const labelCtx = ghResolved.isGitHub\n ? { issueId, projectPath, github: { owner: ghResolved.owner, repo: ghResolved.repo, number: ghResolved.number } }\n : { issueId, projectPath };\n const labelResult = await cleanupMergedLabels(labelCtx);\n if (labelResult.success && !labelResult.skipped) {\n console.log(`[merge-agent] ✓ ${labelResult.details?.join('; ')}`);\n logActivity('labels_cleaned', labelResult.details?.join('; ') || 'Labels cleaned');\n } else if (labelResult.skipped) {\n console.log(`[merge-agent] Label cleanup skipped: ${labelResult.details?.join('; ')}`);\n } else {\n console.warn(`[merge-agent] Label cleanup failed (non-fatal): ${labelResult.error}`);\n }\n } catch (err) {\n console.warn(`[merge-agent] Could not clean labels: ${err}`);\n }\n\n // 3b. Close issue on tracker (fire-and-forget with circuit breaker)\n // This is decoupled from the merge lifecycle: failure to close the issue on the\n // tracker does NOT block the merge or cause retries. The close-out ceremony handles\n // any issues that weren't auto-closed.\n closeIssueWithCircuitBreaker(issueId, projectPath);\n\n // 4. Compact old beads (via lifecycle module)\n try {\n const { compactBeads } = await import('../lifecycle/compact-beads.js');\n const beadsResult = await compactBeads({ issueId, projectPath });\n if (beadsResult.success && !beadsResult.skipped) {\n console.log(`[merge-agent] ✓ ${beadsResult.details?.join('; ')}`);\n logActivity('beads_compaction_complete', beadsResult.details?.join('; ') || 'Beads compacted');\n }\n } catch (err) {\n console.warn(`[merge-agent] Beads compaction failed: ${err}`);\n }\n\n // 5. Kill work agent tmux session to free resources (non-fatal)\n // Stopped agents with live tmux sessions leak memory (Claude + MCP processes stay resident)\n try {\n const { getAgentState, saveAgentState } = await import('../agents.js');\n const { killSession, sessionExists } = await import('../tmux.js');\n const agentId = `agent-${issueId.toLowerCase()}`;\n const agentState = getAgentState(agentId);\n if (agentState && sessionExists(agentId)) {\n killSession(agentId);\n agentState.status = 'stopped';\n saveAgentState(agentState);\n console.log(`[merge-agent] ✓ Killed work agent session ${agentId} to free resources`);\n logActivity('agent_session_killed', `Freed resources: killed tmux session for ${agentId}`);\n }\n // Also kill planning agent if it exists\n const planningId = `planning-${issueId.toLowerCase()}`;\n if (sessionExists(planningId)) {\n killSession(planningId);\n console.log(`[merge-agent] ✓ Killed planning agent session ${planningId}`);\n }\n } catch (err) {\n console.warn(`[merge-agent] Could not kill agent sessions: ${err}`);\n }\n\n // 6. Stop Docker containers + networks to prevent network pool exhaustion (non-fatal)\n // Orphaned Docker networks accumulate when workspaces are merged but containers are never\n // torn down, eventually exhausting Docker's address pool and blocking new workspace creation.\n try {\n const { findWorkspacePath } = await import('../lifecycle/archive-planning.js');\n const { stopWorkspaceDocker } = await import('../workspace-manager.js');\n const issueLower = issueId.toLowerCase();\n const workspacePath = findWorkspacePath(projectPath, issueLower);\n if (workspacePath) {\n const projName = basename(projectPath);\n const dockerResult = await stopWorkspaceDocker(workspacePath, projName, issueLower);\n if (dockerResult.containersFound) {\n console.log(`[merge-agent] ✓ Stopped Docker containers: ${dockerResult.steps.join('; ')}`);\n logActivity('docker_cleanup', `Stopped Docker for ${issueId}: ${dockerResult.steps.join('; ')}`);\n }\n }\n } catch (err) {\n console.warn(`[merge-agent] Docker cleanup failed (non-fatal): ${err}`);\n }\n\n // Mark completed BEFORE logging — prevents re-entry even if the log line triggers something\n _completedPostMerge.add(issueId);\n\n console.log(`[merge-agent] Post-merge cleanup completed for ${issueId}. Issue moved to Done — awaiting close-out.`);\n logActivity('merge_complete', `Merged ${issueId}. Issue moved to Done — awaiting close-out.`);\n}\n\n/**\n * Close issue on tracker with circuit breaker protection.\n * Fire-and-forget: runs asynchronously, never blocks the caller.\n * Stops retrying after MAX_CLOSE_RETRIES consecutive failures per issue.\n */\nfunction closeIssueWithCircuitBreaker(issueId: string, projectPath: string): void {\n const failures = _closeIssueFailures.get(issueId) || 0;\n if (failures >= MAX_CLOSE_RETRIES) {\n console.log(`[merge-agent] Circuit breaker open for ${issueId} issue close (${failures} failures). Will be closed during close-out ceremony.`);\n return;\n }\n\n // Fire-and-forget — errors are caught and logged, never propagated\n (async () => {\n try {\n const { closeIssue } = await import('../lifecycle/close-issue.js');\n const ghResolved = resolveGitHubIssue(issueId);\n const ctx = ghResolved.isGitHub\n ? { issueId, projectPath, github: { owner: ghResolved.owner, repo: ghResolved.repo, number: ghResolved.number } }\n : { issueId, projectPath };\n const results = await closeIssue(ctx, { applyLabel: false, comment: 'Merged to main via Panopticon merge-agent' });\n\n let anyFailure = false;\n for (const r of results) {\n if (r.success && !r.skipped) {\n console.log(`[merge-agent] ✓ ${r.details?.join('; ')}`);\n logActivity(r.step, r.details?.join('; ') || r.step);\n } else if (!r.skipped) {\n console.warn(`[merge-agent] ✗ ${r.step} failed: ${r.error}`);\n anyFailure = true;\n }\n }\n\n if (anyFailure) {\n const newCount = (_closeIssueFailures.get(issueId) || 0) + 1;\n _closeIssueFailures.set(issueId, newCount);\n if (newCount >= MAX_CLOSE_RETRIES) {\n console.warn(`[merge-agent] Circuit breaker tripped for ${issueId} after ${newCount} failures. Issue close deferred to close-out ceremony.`);\n }\n } else {\n // Success — clear failure counter\n _closeIssueFailures.delete(issueId);\n }\n } catch (err) {\n const newCount = (_closeIssueFailures.get(issueId) || 0) + 1;\n _closeIssueFailures.set(issueId, newCount);\n console.warn(`[merge-agent] Could not move issue to Done (attempt ${newCount}/${MAX_CLOSE_RETRIES}): ${err}`);\n }\n })();\n}\n\n/**\n * Reset postMergeLifecycle completion tracking for an issue (used by reopen).\n */\nexport function resetPostMergeState(issueId: string): void {\n _completedPostMerge.delete(issueId);\n _closeIssueFailures.delete(issueId);\n}\n\n/**\n * Parse result markers from agent output\n */\nfunction parseAgentOutput(output: string): MergeResult {\n const lines = output.split('\\n');\n\n let mergeResult: 'SUCCESS' | 'FAILURE' | null = null;\n let resolvedFiles: string[] = [];\n let failedFiles: string[] = [];\n let testsStatus: 'PASS' | 'FAIL' | 'SKIP' | null = null;\n let validationStatus: 'PASS' | 'FAIL' | null = null;\n let reason = '';\n let notes = '';\n\n for (const line of lines) {\n const trimmed = line.trim();\n\n // Match MERGE_RESULT\n if (trimmed.startsWith('MERGE_RESULT:')) {\n const value = trimmed.substring('MERGE_RESULT:'.length).trim();\n if (value === 'SUCCESS' || value === 'FAILURE') {\n mergeResult = value;\n }\n }\n\n // Match RESOLVED_FILES\n if (trimmed.startsWith('RESOLVED_FILES:')) {\n const value = trimmed.substring('RESOLVED_FILES:'.length).trim();\n resolvedFiles = value\n .split(',')\n .map((f) => f.trim())\n .filter((f) => f.length > 0);\n }\n\n // Match FAILED_FILES\n if (trimmed.startsWith('FAILED_FILES:')) {\n const value = trimmed.substring('FAILED_FILES:'.length).trim();\n failedFiles = value\n .split(',')\n .map((f) => f.trim())\n .filter((f) => f.length > 0);\n }\n\n // Match TESTS\n if (trimmed.startsWith('TESTS:')) {\n const value = trimmed.substring('TESTS:'.length).trim();\n if (value === 'PASS' || value === 'FAIL' || value === 'SKIP') {\n testsStatus = value;\n }\n }\n\n // Match VALIDATION\n if (trimmed.startsWith('VALIDATION:')) {\n const value = trimmed.substring('VALIDATION:'.length).trim();\n if (value === 'PASS' || value === 'FAIL') {\n validationStatus = value;\n }\n }\n\n // Match REASON\n if (trimmed.startsWith('REASON:')) {\n reason = trimmed.substring('REASON:'.length).trim();\n }\n\n // Match NOTES\n if (trimmed.startsWith('NOTES:')) {\n notes = trimmed.substring('NOTES:'.length).trim();\n }\n }\n\n // Build result\n if (mergeResult === 'SUCCESS') {\n return {\n success: true,\n resolvedFiles,\n testsStatus: testsStatus || 'SKIP',\n validationStatus: validationStatus || 'NOT_RUN',\n notes,\n output,\n };\n } else if (mergeResult === 'FAILURE') {\n return {\n success: false,\n failedFiles,\n validationStatus: validationStatus || 'NOT_RUN',\n reason,\n notes,\n output,\n };\n } else {\n // No structured result markers found - try to detect human-readable format\n // Agents sometimes output \"MERGE TASK COMPLETE\" instead of \"MERGE_RESULT: SUCCESS\"\n const lowerOutput = output.toLowerCase();\n\n // Check for success indicators\n const successIndicators = [\n 'merge task complete',\n 'successfully merged',\n 'merge complete',\n 'pushed merge commit',\n 'successfully merged and pushed',\n ];\n\n const failureIndicators = [\n 'merge failed',\n 'merge task failed',\n 'could not merge',\n 'conflict not resolved',\n ];\n\n const hasSuccessIndicator = successIndicators.some(i => lowerOutput.includes(i));\n const hasFailureIndicator = failureIndicators.some(i => lowerOutput.includes(i));\n\n if (hasSuccessIndicator && !hasFailureIndicator) {\n // Extract test status from output if mentioned\n let detectedTestStatus: 'PASS' | 'FAIL' | 'SKIP' = 'SKIP';\n if (lowerOutput.includes('tests: pass') || lowerOutput.includes('tests passed') ||\n output.match(/\\d+ passed/)) {\n detectedTestStatus = 'PASS';\n } else if (lowerOutput.includes('tests: fail') || lowerOutput.includes('tests failed')) {\n detectedTestStatus = 'FAIL';\n }\n\n console.log('[merge-agent] Detected success from human-readable output');\n return {\n success: true,\n testsStatus: detectedTestStatus,\n validationStatus: 'PASS',\n notes: 'Detected from human-readable output (agent did not use structured format)',\n output,\n };\n }\n\n if (hasFailureIndicator) {\n console.log('[merge-agent] Detected failure from human-readable output');\n return {\n success: false,\n validationStatus: 'NOT_RUN',\n reason: 'Detected merge failure from agent output',\n output,\n };\n }\n\n // Truly unrecognized output\n return {\n success: false,\n validationStatus: 'NOT_RUN',\n reason: 'Agent did not report result in expected format',\n output,\n };\n }\n}\n\n/**\n * Get conflict files from git status (async)\n */\nasync function getConflictFiles(projectPath: string): Promise<string[]> {\n try {\n const { stdout: status } = await execAsync('git diff --name-only --diff-filter=U', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n\n return status\n .split('\\n')\n .map((line) => line.trim())\n .filter((line) => line.length > 0);\n } catch (error) {\n console.error('Failed to get conflict files:', error);\n return [];\n }\n}\n\n/**\n * Log merge to history\n */\nfunction logMergeHistory(context: MergeConflictContext, result: MergeResult, sessionId?: string): void {\n // Ensure history directory exists\n if (!existsSync(MERGE_HISTORY_DIR)) {\n mkdirSync(MERGE_HISTORY_DIR, { recursive: true });\n }\n\n const entry: MergeHistoryEntry = {\n timestamp: new Date().toISOString(),\n issueId: context.issueId,\n sourceBranch: context.sourceBranch,\n targetBranch: context.targetBranch,\n conflictFiles: context.conflictFiles,\n result: {\n ...result,\n output: undefined, // Don't store full output in history\n },\n sessionId,\n };\n\n appendFileSync(MERGE_HISTORY_FILE, JSON.stringify(entry) + '\\n', 'utf-8');\n}\n\n/**\n * Log activity to the dashboard activity log (event-sourced via emitActivityEntry)\n */\nfunction logActivity(action: string, details: string, issueId?: string): void {\n emitActivityEntry({\n source: 'merge-agent',\n level: action.includes('fail') || action.includes('error') ? 'error' : action.includes('warn') ? 'warn' : 'success',\n message: `[merge-agent] ${action}: ${details}`,\n issueId,\n });\n}\n\n/**\n * Capture tmux output and look for result markers (async)\n */\nasync function captureTmuxOutput(sessionName: string): Promise<string> {\n try {\n const { stdout } = await execAsync(`tmux capture-pane -t \"${sessionName}\" -p`, { encoding: 'utf-8' });\n return stdout;\n } catch {\n return '';\n }\n}\n\n/**\n * Check if specialist-merge-agent tmux session is running (async)\n */\nasync function isMergeAgentRunning(): Promise<boolean> {\n try {\n await execAsync(`tmux has-session -t specialist-merge-agent 2>/dev/null`, { encoding: 'utf-8' });\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send a message to an agent's tmux session (async)\n */\nasync function sendMessageToAgent(issueId: string, message: string): Promise<boolean> {\n // Agent sessions are typically named agent-{issueId} (lowercase)\n const sessionName = `agent-${issueId.toLowerCase()}`;\n\n try {\n // Check if session exists\n if (!sessionExists(sessionName)) {\n console.log(`[merge-agent] Could not send message to ${sessionName} (session does not exist)`);\n return false;\n }\n\n // Send the message using centralized sendKeys\n await sendKeysAsync(sessionName, message);\n\n console.log(`[merge-agent] Sent message to ${sessionName}`);\n logActivity('agent_message', `Sent to ${sessionName}: ${message.slice(0, 100)}...`);\n return true;\n } catch {\n console.log(`[merge-agent] Could not send message to ${sessionName} (session may not exist)`);\n return false;\n }\n}\n\n/**\n * Spawn merge-agent to resolve conflicts using the tmux session\n *\n * @param context - Merge conflict context\n * @returns Promise that resolves with merge result\n */\nexport async function spawnMergeAgent(context: MergeConflictContext): Promise<MergeResult> {\n console.log(`[merge-agent] Starting conflict resolution for ${context.issueId}`);\n logActivity('merge_start', `Starting merge for ${context.issueId}: ${context.conflictFiles.join(', ')}`);\n\n // Detect test command if not provided\n if (!context.testCommand) {\n context.testCommand = detectTestCommand(context.projectPath);\n }\n\n const tmuxSession = getTmuxSessionName('merge-agent');\n console.log(`[merge-agent] Using tmux session: ${tmuxSession}`);\n console.log(`[merge-agent] Test command: ${context.testCommand}`);\n\n // Check if merge-agent session is running\n if (!(await isMergeAgentRunning())) {\n console.log(`[merge-agent] Session not running, cannot proceed`);\n logActivity('merge_error', `Session ${tmuxSession} not running`);\n return {\n success: false,\n reason: `Specialist ${tmuxSession} is not running. Start Cloister first.`,\n };\n }\n\n // Build prompt\n const prompt = buildMergePrompt(context);\n\n try {\n // Send prompt to tmux session using centralized sendKeys\n console.log(`[merge-agent] Sending task to ${tmuxSession}...`);\n await sendKeysAsync(tmuxSession, prompt);\n\n // Record wake event\n recordWake('merge-agent');\n logActivity('merge_task_sent', `Task sent to ${tmuxSession}`);\n\n console.log(`[merge-agent] Task sent, waiting for completion...`);\n\n // Poll for result with timeout\n const startTime = Date.now();\n const POLL_INTERVAL = 5000; // 5 seconds\n let lastOutput = '';\n\n while (Date.now() - startTime < MERGE_TIMEOUT_MS) {\n await new Promise(resolve => setTimeout(resolve, POLL_INTERVAL));\n\n const output = await captureTmuxOutput(tmuxSession);\n\n // Check if we have new output with result markers\n if (output !== lastOutput) {\n lastOutput = output;\n const lowerOutput = output.toLowerCase();\n\n // Look for result markers in the output (structured or human-readable)\n const hasStructuredResult = output.includes('MERGE_RESULT:');\n const hasHumanReadableResult =\n lowerOutput.includes('merge task complete') ||\n lowerOutput.includes('successfully merged') ||\n lowerOutput.includes('merge complete') ||\n lowerOutput.includes('merge failed') ||\n lowerOutput.includes('merge task failed');\n\n if (hasStructuredResult || hasHumanReadableResult) {\n console.log(`[merge-agent] Found result markers in output (structured: ${hasStructuredResult}, human-readable: ${hasHumanReadableResult})`);\n\n const result = parseAgentOutput(output);\n\n // If agent reports success, run post-merge validation\n if (result.success) {\n console.log(`[merge-agent] Agent reported success, running post-merge validation...`);\n logActivity('merge_validation_start', `Running validation for ${context.issueId}`);\n\n // Extract baseline failure count from agent output for baseline comparison\n // Agent output contains a table like: │ Failed │ 18 │ 18 │ 0 ✅ │\n const baselineMatch = output.match(/Failed\\s*│\\s*(\\d+)\\s*│/);\n const baselineTestFailures = baselineMatch ? parseInt(baselineMatch[1], 10) : undefined;\n if (baselineTestFailures !== undefined) {\n console.log(`[merge-agent] Extracted baseline failure count from agent: ${baselineTestFailures}`);\n }\n\n const validationResult = await runMergeValidation({\n projectPath: context.projectPath,\n issueId: context.issueId,\n baselineTestFailures,\n });\n\n if (validationResult.valid) {\n // Validation passed — now run quality gates if configured\n console.log(`[merge-agent] ✓ Validation passed`);\n\n const gateResults = await runProjectQualityGates(context.projectPath, 'pre_push');\n const failedRequired = gateResults.filter(g => !g.passed && g.required);\n if (failedRequired.length > 0) {\n const failedNames = failedRequired.map(g => g.name).join(', ');\n console.log(`[merge-agent] ✗ Quality gates failed: ${failedNames}`);\n logActivity('merge_quality_gate_fail', `Quality gates failed for ${context.issueId}: ${failedNames}`);\n\n const revertSuccess = await autoRevertMerge(context.projectPath);\n const revertNote = revertSuccess\n ? 'Merge auto-reverted to clean state'\n : 'WARNING: Auto-revert failed - manual cleanup required';\n\n const failedResult: MergeResult = {\n success: false,\n validationStatus: 'FAIL',\n reason: `Quality gate(s) failed: ${failedNames}. ${revertNote}`,\n notes: result.notes,\n output,\n };\n logMergeHistory(context, failedResult);\n return failedResult;\n }\n\n logActivity('merge_success', `Merge and validation completed for ${context.issueId}`);\n\n // Update result with validation status\n result.validationStatus = 'PASS';\n logMergeHistory(context, result);\n\n // Run post-merge cleanup (move PRD, update issue status)\n await postMergeLifecycle(context.issueId, context.projectPath, context.sourceBranch);\n\n // Notify TLDR daemon to reindex changed files\n await notifyTldrDaemon(context.projectPath, context.sourceBranch);\n\n return result;\n } else {\n // Validation failed - auto-revert\n console.log(`[merge-agent] ✗ Validation failed:`, validationResult.failures);\n logActivity('merge_validation_fail', `Validation failed for ${context.issueId}: ${validationResult.failures.map(f => f.type).join(', ')}`);\n\n // Revert to ORIG_HEAD (set by git at merge time)\n const revertSuccess = await autoRevertMerge(context.projectPath);\n\n const failureReason = validationResult.failures.map(f => `${f.type}: ${f.message}`).join('; ');\n const revertNote = revertSuccess\n ? 'Merge auto-reverted to clean state'\n : 'WARNING: Auto-revert failed - manual cleanup required';\n\n console.log(`[merge-agent] ${revertNote}`);\n logActivity('merge_auto_revert', revertNote);\n\n // Return failure with validation details\n const failedResult: MergeResult = {\n success: false,\n validationStatus: 'FAIL',\n reason: `Validation failed: ${failureReason}. ${revertNote}`,\n notes: result.notes,\n output,\n };\n\n logMergeHistory(context, failedResult);\n return failedResult;\n }\n } else {\n // Agent reported failure\n logActivity('merge_failure', `Merge failed for ${context.issueId}: ${result.reason}`);\n logMergeHistory(context, result);\n return result;\n }\n }\n }\n\n // Log progress periodically\n const elapsed = Math.round((Date.now() - startTime) / 1000);\n if (elapsed % 30 === 0) {\n console.log(`[merge-agent] Still working... (${elapsed}s elapsed)`);\n }\n }\n\n // Timeout\n console.log(`[merge-agent] Timeout after ${MERGE_TIMEOUT_MS / 1000} seconds`);\n logActivity('merge_timeout', `Merge timed out for ${context.issueId}`);\n\n return {\n success: false,\n reason: `Timeout after ${MERGE_TIMEOUT_MS / 60000} minutes`,\n output: lastOutput,\n };\n } catch (error: any) {\n console.error(`[merge-agent] Failed:`, error);\n logActivity('merge_error', `Error: ${error.message}`);\n\n const result: MergeResult = {\n success: false,\n reason: error.message || 'Unknown error',\n };\n\n logMergeHistory(context, result);\n return result;\n }\n}\n\n/**\n * Attempt merge and handle result (clean merge, conflicts, or failure)\n *\n * This function:\n * 1. Attempts to merge sourceBranch into current branch\n * 2. If clean merge: commits and optionally runs tests\n * 3. If conflicts: spawns merge-agent to resolve them\n * 4. If failure: returns error\n *\n * @param projectPath - Project root path\n * @param sourceBranch - Feature branch to merge\n * @param targetBranch - Target branch (usually main)\n * @param issueId - Issue identifier\n * @returns Promise that resolves with merge result\n */\nexport async function spawnMergeAgentForBranches(\n projectPath: string,\n sourceBranch: string,\n targetBranch: string,\n issueId: string,\n options?: { skipDoneReport?: boolean }\n): Promise<MergeResult> {\n console.log(`[merge-agent] Waking specialist for merge of ${sourceBranch} into ${targetBranch}`);\n logActivity('merge_attempt', `Waking specialist for merge: ${sourceBranch} -> ${targetBranch}`);\n\n // Pre-flight checks (quick validation before waking specialist)\n try {\n // 1. Check for and clean up stale git lock files\n console.log(`[merge-agent] Checking for stale git lock files...`);\n const lockCleanup = await cleanupStaleLocks(projectPath);\n\n if (lockCleanup.found.length > 0) {\n console.log(`[merge-agent] Found ${lockCleanup.found.length} lock file(s)`);\n\n if (lockCleanup.removed.length > 0) {\n console.log(`[merge-agent] ✓ Cleaned up ${lockCleanup.removed.length} stale lock file(s):`);\n lockCleanup.removed.forEach(f => console.log(` - ${f}`));\n logActivity('git_lock_cleanup', `Removed ${lockCleanup.removed.length} stale lock file(s)`);\n }\n\n if (lockCleanup.errors.length > 0) {\n console.warn(`[merge-agent] ⚠️ Failed to clean up some locks:`, lockCleanup.errors);\n if (lockCleanup.errors.some(e => e.error.includes('Git processes are running'))) {\n const message = 'Git processes are still running - cannot safely start merge';\n console.error(`[merge-agent] ${message}`);\n logActivity('merge_blocked', message);\n return { success: false, reason: message };\n }\n }\n }\n\n // 2. Check that source branch is pushed to remote\n try {\n const { stdout: remoteBranches } = await execAsync(`git ls-remote --heads origin ${sourceBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n\n if (!remoteBranches.trim()) {\n const message = `Branch ${sourceBranch} is not pushed to remote.`;\n console.error(`[merge-agent] ${message}`);\n logActivity('merge_blocked', message);\n // Write feedback file and send short reference\n const { writeFeedbackFile } = await import('./feedback-writer.js');\n const blockMsg = `# Merge Blocked\\n\\nBranch \"${sourceBranch}\" is not pushed to remote.\\n\\n## Required Action\\n\\nRun: \\`git push -u origin ${sourceBranch}\\``;\n const fileResult = await writeFeedbackFile({\n issueId,\n specialist: 'merge-agent',\n outcome: 'blocked',\n summary: `Branch ${sourceBranch} not pushed`,\n markdownBody: blockMsg,\n });\n if (fileResult.success) {\n await sendMessageToAgent(issueId, `SPECIALIST FEEDBACK: merge-agent reported BLOCKED for ${issueId}.\\nRead and address: ${fileResult.relativePath}`);\n } else {\n console.error(`[merge-agent] Failed to write feedback file for ${issueId}: ${fileResult.error}`);\n }\n return { success: false, reason: message };\n }\n } catch {\n const message = `Cannot verify remote branch ${sourceBranch}.`;\n console.error(`[merge-agent] ${message}`);\n logActivity('merge_blocked', message);\n return { success: false, reason: message };\n }\n\n // NOTE: We don't check for uncommitted changes in the main repo here.\n // The merge happens via git merge which will fail if there are conflicts.\n // Uncommitted changes in main are the user's own work and shouldn't block\n // merging a feature branch. The dashboard server already checks the\n // workspace for uncommitted changes before initiating the merge.\n } catch (error: any) {\n return { success: false, reason: `Pre-flight check failed: ${error.message}` };\n }\n\n // 3. No-op check: if sourceBranch is already an ancestor of targetBranch, skip the merge\n try {\n await execAsync(`git fetch origin ${sourceBranch} ${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n let isAlreadyMerged = false;\n try {\n await execAsync(\n `git merge-base --is-ancestor origin/${sourceBranch} origin/${targetBranch}`,\n { cwd: projectPath, encoding: 'utf-8' }\n );\n isAlreadyMerged = true;\n } catch (e: any) {\n // exit code 1 means not an ancestor — proceed with merge\n // any other exit code is a real error; propagate it\n if (e.code !== 1) {\n throw e;\n }\n }\n if (isAlreadyMerged) {\n const message = `Branch ${sourceBranch} is already integrated into ${targetBranch} — no merge needed`;\n console.log(`[merge-agent] ${message}`);\n logActivity('merge_skipped', message);\n return { success: true, reason: message };\n }\n } catch (ancestorErr: any) {\n console.warn(`[merge-agent] Ancestor check failed: ${ancestorErr.message} (continuing)`);\n }\n\n // Record current HEAD to detect when merge happens (polling compares against this)\n const { stdout: headBeforeRaw } = await execAsync('git rev-parse HEAD', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const headBefore = headBeforeRaw.trim();\n\n // Stash any uncommitted changes so the merge starts from a clean state\n // We restore the stash after completion (success or rollback)\n let stashCreated = false;\n try {\n const { stdout: statusOut } = await execAsync('git status --porcelain', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n if (statusOut.trim()) {\n await execAsync('git stash push -u -m \"Pre-merge stash for ' + issueId + '\"', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n stashCreated = true;\n console.log(`[merge-agent] Stashed uncommitted changes before merge`);\n }\n } catch (stashErr: any) {\n console.warn(`[merge-agent] Failed to stash: ${stashErr.message} (continuing anyway)`);\n }\n\n // Build the task prompt for the merge-agent specialist\n const apiPort = process.env.API_PORT || process.env.PORT || '3011';\n const apiUrl = process.env.DASHBOARD_URL || `http://localhost:${apiPort}`;\n const skipDoneReport = options?.skipDoneReport ?? false;\n\n // When called from the polyrepo merge loop, the server manages overall status.\n // The merge-agent should NOT call /api/specialists/done — doing so would\n // prematurely set the issue's overall mergeStatus to 'merged' after one repo,\n // even if other repos haven't been merged yet.\n const doneReportInstructions = skipDoneReport\n ? `DO NOT call /api/specialists/done — the server manages status for this merge.\n After pushing, simply STOP. If you need to rollback, rollback and STOP.`\n : `Then report by calling the Panopticon API:\n curl -s -X POST ${apiUrl}/api/specialists/done \\\\\n -H \"Content-Type: application/json\" \\\\\n -d '{\"specialist\":\"merge\",\"issueId\":\"${issueId}\",\"status\":\"<passed or failed>\",\"notes\":\"<reason if failed>\"}'\n\nCRITICAL: You MUST call the /api/specialists/done endpoint whether you succeed or fail.`;\n\n const taskPrompt = `MERGE TASK for ${issueId}:\n\nPROJECT: ${projectPath}\nSOURCE BRANCH: ${sourceBranch}\nTARGET BRANCH: ${targetBranch}\n\nINSTRUCTIONS:\n\nPHASE 1 — SYNC & BASELINE (before merge):\n1. cd ${projectPath}\n2. git checkout ${targetBranch}\n3. git fetch origin ${targetBranch}\n4. Sync local ${targetBranch} with origin/${targetBranch}:\n Run: git rev-list --left-right --count ${targetBranch}...origin/${targetBranch}\n (Output: \"LOCAL_AHEAD REMOTE_AHEAD\". If REMOTE_AHEAD > 0, local is behind origin.)\n If local is behind origin (REMOTE_AHEAD > 0):\n a. git rebase origin/${targetBranch}\n (Replays local commits on top of origin — preserves linear history, no merge commits, no data loss)\n b. If rebase conflicts: abort with git rebase --abort, then STOP — human intervention needed.\n c. If rebase succeeds: continue to next step\n If local is up-to-date or ahead-only (REMOTE_AHEAD = 0): continue to next step\n5. Run tests on the CURRENT ${targetBranch} to establish a baseline:\n - Use the Task tool with subagent_type=\"Bash\" to run: npm test 2>&1 || true\n - Record the number of passing and failing tests as BASELINE_PASS and BASELINE_FAIL\n - This baseline is critical — you will compare post-merge results against it\n\nPHASE 2 — MERGE:\n6. git merge ${sourceBranch}\n7. If clean merge: the merge commit is auto-created (or fast-forward). Skip to Phase 3.\n8. If conflicts:\n a. Immediately abort: git merge --abort\n b. ROLLBACK — report FAILURE with note \"Merge conflicts detected — work agent must rebase before merge\"\n c. Do NOT attempt to manually resolve conflicts. The work agent or human must handle this.\n\nPHASE 3 — VERIFY:\n9. Build the project to verify no compile errors:\n - Use the Task tool with subagent_type=\"Bash\" to run the build command\n - For Node.js: NODE_OPTIONS=\"--max-old-space-size=8192\" npm run build\n - For Java/Maven: ./mvnw compile\n - Check package.json or pom.xml to determine the right command\n10. Run tests using the Task tool with subagent_type=\"Bash\":\n - For Node.js: npm test\n - Record the number of passing and failing tests as MERGE_PASS and MERGE_FAIL\n\nPHASE 4 — DECIDE:\n11. Compare results:\n - If build failed: ROLLBACK (go to step 12)\n - If MERGE_FAIL > BASELINE_FAIL (NEW test failures introduced): ROLLBACK (go to step 12)\n - If MERGE_FAIL <= BASELINE_FAIL (no new failures): PUSH (go to step 13)\n - Pre-existing failures on ${targetBranch} are NOT a reason to rollback\n12. ROLLBACK: git reset --hard ORIG_HEAD\n (ORIG_HEAD is set by git at merge time — always points to pre-merge state)\n ${doneReportInstructions.includes('DO NOT') ? 'Then STOP.' : `Then report failure by calling the Panopticon API:\n curl -s -X POST ${apiUrl}/api/specialists/done \\\\\n -H \"Content-Type: application/json\" \\\\\n -d '{\"specialist\":\"merge\",\"issueId\":\"${issueId}\",\"status\":\"failed\",\"notes\":\"<reason for rollback>\"}'\n Then STOP.`}\n13. PUSH: git push origin ${targetBranch}\n If push is rejected (non-fast-forward / \"tip of your current branch is behind\"):\n a. git fetch origin ${targetBranch}\n b. git rebase origin/${targetBranch}\n (Replay on top of any new remote commits — safe, no data loss)\n c. If rebase conflicts: abort with git rebase --abort, ROLLBACK (go to step 12)\n d. If rebase succeeds: retry git push origin ${targetBranch}\n e. If push fails again after one retry: ROLLBACK (go to step 12)\n ${doneReportInstructions}\n\nCRITICAL: You MUST complete this merge. The approve operation is waiting.\n\nWHY USE SUBAGENTS FOR BUILD/TEST:\n- Subagents have isolated context and won't pollute your working memory\n- Build and test output can be verbose - subagents handle this cleanly\n- If tests fail, the subagent returns a clear summary\n\nDO NOT:\n- Delete the feature branch (locally or remotely)\n- Clean up workspaces\n- Use git push --force or --force-with-lease — NEVER force-push under any circumstances\n- Skip the build step - compile errors after merge are common\n- Skip the baseline test run — without it you cannot distinguish new failures from pre-existing ones\n- Use HEAD~1 for rollback — use ORIG_HEAD which git sets automatically at merge time\n- Run git stash — the TypeScript layer handles stash/restore automatically\n- Do anything beyond the sync, merge, build, test, and push steps above\n\nReport any issues or conflicts you encountered.`;\n\n // Resolve project key for per-project ephemeral lifecycle (PAN-300)\n const resolvedProject = resolveProjectFromIssue(issueId);\n const mergeProjectKey = resolvedProject?.projectKey ?? null;\n const mergeSession = getTmuxSessionName('merge-agent', mergeProjectKey ?? undefined);\n\n if (!resolvedProject) {\n console.warn(`[merge-agent] Could not resolve project for ${issueId} — falling back to global specialist. Check projects.yaml configuration.`);\n }\n\n // Wait for the per-project merge-agent to be idle before sending a new task.\n // Only applies to the per-project ephemeral path — the legacy wakeSpecialist\n // path manages its own ready-wait internally via waitForReady: true.\n // Without this, sending a task to a busy specialist causes Claude's\n // \"Interrupted\" behavior — the running tool gets cancelled and the\n // previous merge is abandoned mid-flight.\n if (mergeProjectKey) {\n const { getAgentRuntimeState, saveAgentRuntimeState } = await import('../agents.js');\n const IDLE_POLL_INTERVAL = 3000; // 3 seconds\n const IDLE_MAX_WAIT = 360000; // 6 minutes (slightly longer than specialist timeout)\n const idleStart = Date.now();\n\n while (Date.now() - idleStart < IDLE_MAX_WAIT) {\n const state = getAgentRuntimeState(mergeSession);\n if (!state || state.state === 'idle' || state.state === 'suspended') {\n break; // Specialist is idle, safe to send\n }\n // Dead-session check: if runtime.json says active but tmux session is gone,\n // the specialist died without resetting state. Reset to idle and proceed immediately.\n try {\n await execAsync(`tmux has-session -t \"${mergeSession}\" 2>/dev/null`);\n } catch {\n // tmux has-session exits non-zero when the session does not exist\n console.log(`[merge-agent] Specialist session ${mergeSession} is dead (state was ${state.state}), resetting to idle`);\n saveAgentRuntimeState(mergeSession, { state: 'idle', lastActivity: new Date().toISOString() });\n break;\n }\n console.log(`[merge-agent] Specialist busy (state: ${state.state}, issue: ${state.currentIssue}), waiting...`);\n await new Promise(resolve => setTimeout(resolve, IDLE_POLL_INTERVAL));\n }\n\n // Final check after loop\n const finalState = getAgentRuntimeState(mergeSession);\n if (finalState && finalState.state !== 'idle' && finalState.state !== 'suspended') {\n console.warn(`[merge-agent] Specialist still busy after ${IDLE_MAX_WAIT / 1000}s, proceeding anyway`);\n }\n }\n\n // Wake the merge-agent specialist using per-project ephemeral lifecycle when possible\n let wakeResult: { success: boolean; message: string; tmuxSession?: string; error?: string };\n if (mergeProjectKey) {\n console.log(`[merge-agent] Using per-project ephemeral specialist for ${issueId} (${mergeProjectKey})`);\n wakeResult = await spawnEphemeralSpecialist(mergeProjectKey, 'merge-agent', {\n issueId,\n branch: sourceBranch,\n workspace: projectPath,\n promptOverride: taskPrompt,\n });\n } else {\n console.log(`[merge-agent] Project resolution failed, falling back to legacy global specialist for ${issueId}`);\n wakeResult = await wakeSpecialist('merge-agent', taskPrompt, {\n waitForReady: true,\n startIfNotRunning: true,\n issueId,\n });\n }\n\n if (!wakeResult.success) {\n console.error(`[merge-agent] Failed to wake specialist: ${wakeResult.message}`);\n logActivity('merge_error', `Failed to wake specialist: ${wakeResult.message}`);\n return {\n success: false,\n reason: `Failed to wake merge-agent specialist: ${wakeResult.message}`,\n };\n }\n\n console.log(`[merge-agent] Specialist woken, waiting for merge completion...`);\n logActivity('merge_specialist_woken', `Specialist woken, task sent`);\n\n // Poll for merge completion (check if HEAD has changed and been pushed)\n const POLL_INTERVAL = 5000; // 5 seconds\n const MAX_WAIT = 15 * 60 * 1000; // 15 minutes (match MERGE_TIMEOUT_MS)\n const startTime = Date.now();\n\n while (Date.now() - startTime < MAX_WAIT) {\n await new Promise(resolve => setTimeout(resolve, POLL_INTERVAL));\n\n try {\n // Check if we're still on target branch\n const { stdout: currentBranchRaw } = await execAsync('git branch --show-current', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const currentBranch = currentBranchRaw.trim();\n\n if (currentBranch !== targetBranch) {\n // Specialist might still be working, continue polling\n continue;\n }\n\n // Check if HEAD has changed (merge happened)\n const { stdout: currentHeadRaw } = await execAsync('git rev-parse HEAD', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const currentHead = currentHeadRaw.trim();\n\n if (currentHead !== headBefore) {\n // HEAD changed — the merge happened (could be merge commit OR fast-forward)\n // For merge commits: message contains \"merge\" or branch name\n // For fast-forward: message is the original commit message (no \"merge\" keyword)\n // In BOTH cases, HEAD changing means the merge is done — verify it's pushed\n {\n // Verify it's pushed — fetch first to refresh stale tracking refs\n // (the push happens in the merge-agent's tmux session, which may not\n // update the tracking ref visible to this process)\n try {\n await execAsync(`git fetch origin ${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n timeout: 10000,\n }).catch(() => {}); // Non-fatal — fall through to rev-parse\n const { stdout: remoteHeadRaw } = await execAsync(`git rev-parse origin/${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const remoteHead = remoteHeadRaw.trim();\n\n if (remoteHead === currentHead) {\n console.log(`[merge-agent] Merge completed and pushed, running validation...`);\n logActivity('merge_validation_start', `Running post-merge validation for ${issueId}`);\n\n // Extract baseline from specialist output if available\n let specialistBaseline: number | undefined;\n try {\n const specialistOutput = await captureTmuxOutput(mergeSession);\n const baselineMatch = specialistOutput.match(/Failed\\s*│\\s*(\\d+)\\s*│/);\n specialistBaseline = baselineMatch ? parseInt(baselineMatch[1], 10) : undefined;\n if (specialistBaseline !== undefined) {\n console.log(`[merge-agent] Extracted baseline from specialist: ${specialistBaseline}`);\n }\n } catch { /* ignore */ }\n\n // Run validation\n const validationResult = await runMergeValidation({\n projectPath,\n issueId,\n baselineTestFailures: specialistBaseline,\n });\n\n if (validationResult.valid) {\n // Validation passed — now run quality gates if configured\n const skipNote = validationResult.skipped ? ' (no validation script, specialist already validated)' : '';\n console.log(`[merge-agent] ✓ Merge validation passed${skipNote}`);\n\n const gateResults = await runProjectQualityGates(projectPath, 'pre_push');\n const failedRequired = gateResults.filter(g => !g.passed && g.required);\n if (failedRequired.length > 0) {\n const failedNames = failedRequired.map(g => g.name).join(', ');\n console.log(`[merge-agent] ✗ Quality gates failed: ${failedNames}`);\n logActivity('merge_quality_gate_fail', `Quality gates failed for ${issueId}: ${failedNames}`);\n\n const revertSuccess = await autoRevertMerge(projectPath);\n const revertNote = revertSuccess\n ? 'Merge auto-reverted to clean state'\n : 'WARNING: Auto-revert failed';\n\n return {\n success: false,\n validationStatus: 'FAIL',\n reason: `Quality gate(s) failed: ${failedNames}. ${revertNote}`,\n };\n }\n\n logActivity('merge_complete', `Merge completed by specialist${skipNote}`);\n\n // Run post-merge cleanup (move PRD, update issue status)\n await postMergeLifecycle(issueId, projectPath, sourceBranch);\n\n // Restore stashed changes\n if (stashCreated) {\n try {\n await execAsync('git stash pop', { cwd: projectPath, encoding: 'utf-8' });\n console.log(`[merge-agent] ✓ Restored stashed changes after successful merge`);\n } catch (popErr: any) {\n console.warn(`[merge-agent] ⚠ Failed to restore stash after merge: ${popErr.message}`);\n }\n }\n\n return {\n success: true,\n validationStatus: 'PASS',\n testsStatus: 'SKIP', // Specialist ran tests, we trust the result\n notes: 'Merge completed by merge-agent specialist and validation passed',\n };\n } else {\n // Validation failed - auto-revert\n console.log(`[merge-agent] ✗ Validation failed:`, validationResult.failures);\n logActivity('merge_validation_fail', `Validation failed: ${validationResult.failures.map(f => f.type).join(', ')}`);\n\n // Revert to ORIG_HEAD (set by git at merge time)\n const revertSuccess = await autoRevertMerge(projectPath);\n\n // Force push to revert the remote as well\n if (revertSuccess) {\n try {\n await execAsync(`git push --force-with-lease origin ${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n console.log(`[merge-agent] ✓ Auto-revert pushed to remote`);\n logActivity('merge_auto_revert', 'Merge auto-reverted and pushed to remote');\n } catch (pushError: any) {\n console.error(`[merge-agent] ✗ Failed to push revert: ${pushError.message}`);\n logActivity('merge_revert_push_fail', 'Auto-revert successful but push failed');\n }\n }\n\n // Restore stashed changes after revert\n if (stashCreated) {\n try {\n await execAsync('git stash pop', { cwd: projectPath, encoding: 'utf-8' });\n console.log(`[merge-agent] ✓ Restored stashed changes after revert`);\n } catch (popErr: any) {\n console.warn(`[merge-agent] ⚠ Failed to restore stash after revert: ${popErr.message}`);\n }\n }\n\n const failureReason = validationResult.failures.map(f => `${f.type}: ${f.message}`).join('; ');\n const revertNote = revertSuccess\n ? 'Merge auto-reverted and force-pushed to remote'\n : 'WARNING: Auto-revert failed - manual cleanup required';\n\n return {\n success: false,\n validationStatus: 'FAIL',\n reason: `Validation failed: ${failureReason}. ${revertNote}`,\n notes: 'Merge completed but validation failed, auto-reverted',\n };\n }\n }\n } catch {\n // Remote check failed, but local merge is done\n console.log(`[merge-agent] Merge completed locally, push status unknown`);\n }\n\n // Local merge done but not pushed yet - keep polling\n console.log(`[merge-agent] Merge commit detected, waiting for push...`);\n }\n }\n\n // Check if merge-agent is still running\n if (!(await isRunning('merge-agent', mergeProjectKey ?? undefined))) {\n console.error(`[merge-agent] Specialist stopped unexpectedly — checking for stranded merge commit`);\n logActivity('merge_error', 'Specialist stopped unexpectedly');\n\n // Salvage: if the specialist merged locally but died before pushing, push it ourselves\n const salvageResult = await salvageStrandedMerge(projectPath, targetBranch, headBefore, issueId, logActivity);\n if (salvageResult) return salvageResult;\n\n return {\n success: false,\n reason: 'merge-agent specialist stopped before completing the merge',\n };\n }\n\n } catch (pollError: any) {\n console.warn(`[merge-agent] Poll error: ${pollError.message}`);\n // Continue polling\n }\n }\n\n // Timeout — same salvage check\n console.error(`[merge-agent] Timeout waiting for merge completion — checking for stranded merge commit`);\n logActivity('merge_timeout', 'Timeout waiting for specialist to complete merge');\n\n const salvageResult = await salvageStrandedMerge(projectPath, targetBranch, headBefore, issueId, logActivity);\n if (salvageResult) return salvageResult;\n\n return {\n success: false,\n reason: 'Timeout waiting for merge-agent specialist to complete merge (15 minutes)',\n };\n}\n\n/**\n * Rebase a feature branch onto a base branch and push, using the merge-agent\n * specialist for conflict resolution.\n *\n * Used by the PR-based merge flow: triggerMerge() calls this to prepare the\n * feature branch, then calls `gh pr merge --squash` once the rebase is done.\n */\nexport async function spawnRebaseAgentForBranch(\n workspacePath: string,\n featureBranch: string,\n baseBranch: string,\n issueId: string,\n): Promise<MergeResult> {\n console.log(`[merge-agent] Starting rebase of ${featureBranch} onto ${baseBranch} for ${issueId}`);\n logActivity('rebase_start', `Rebasing ${featureBranch} onto ${baseBranch} for ${issueId}`);\n\n // Pre-flight: verify feature branch is pushed to remote\n try {\n const { stdout: remoteBranches } = await execAsync(\n `git ls-remote --heads origin ${featureBranch}`,\n { cwd: workspacePath, encoding: 'utf-8' },\n );\n if (!remoteBranches.trim()) {\n const message = `Branch ${featureBranch} is not pushed to remote`;\n console.error(`[merge-agent] ${message}`);\n return { success: false, reason: message };\n }\n } catch {\n const message = `Cannot verify remote branch ${featureBranch}`;\n console.error(`[merge-agent] ${message}`);\n return { success: false, reason: message };\n }\n\n // Record current remote HEAD of feature branch to detect rebase completion\n let headBefore: string;\n try {\n await execAsync(`git fetch origin ${featureBranch}`, { cwd: workspacePath, encoding: 'utf-8' });\n const { stdout } = await execAsync(`git rev-parse origin/${featureBranch}`, {\n cwd: workspacePath,\n encoding: 'utf-8',\n });\n headBefore = stdout.trim();\n } catch (err: any) {\n return { success: false, reason: `Failed to get remote HEAD: ${err.message}` };\n }\n\n // Build rebase task prompt\n const apiPort = process.env.API_PORT || process.env.PORT || '3011';\n const apiUrl = process.env.DASHBOARD_URL || `http://localhost:${apiPort}`;\n\n const taskPrompt = `REBASE TASK for ${issueId}:\n\nWORKSPACE: ${workspacePath}\nFEATURE BRANCH: ${featureBranch}\nBASE BRANCH: ${baseBranch}\n\nINSTRUCTIONS:\n\n1. cd ${workspacePath}\n2. git fetch origin ${baseBranch}\n3. Check if rebase is needed:\n \\`\\`\\`bash\n BEHIND=$(git rev-list --count HEAD..origin/${baseBranch})\n echo \"Commits behind origin/${baseBranch}: $BEHIND\"\n \\`\\`\\`\n4. If BEHIND is 0: skip rebase entirely — branch is already up to date. Go to step 7.\n5. If BEHIND > 0: Remove .planning/ first (ephemeral artifacts always conflict), then rebase:\n \\`\\`\\`bash\n git rm -rf .planning/ 2>/dev/null && git commit -m \"chore: remove planning artifacts before rebase\" 2>/dev/null\n git rebase origin/${baseBranch}\n \\`\\`\\`\n6. If rebase has conflicts:\n a. Immediately abort: git rebase --abort\n b. Report FAILURE — do NOT attempt to resolve conflicts manually\n7. git push --force-with-lease origin ${featureBranch}\n8. Merge the PR via GitHub CLI (this is the ACTUAL merge to main):\n \\`\\`\\`bash\n gh pr merge ${featureBranch} --squash\n \\`\\`\\`\n Do NOT use --auto or --admin flags. Panopticon reports commit statuses via GitHub App,\n so branch protection checks will pass automatically.\n If this fails, report FAILURE — do NOT report success without a merged PR.\n9. Report completion by calling the Panopticon API:\n curl -s -X POST ${apiUrl}/api/specialists/done \\\\\n -H \"Content-Type: application/json\" \\\\\n -d '{\"specialist\":\"merge\",\"issueId\":\"${issueId}\",\"status\":\"passed\",\"notes\":\"Rebased and merged PR via gh pr merge --squash\"}'\n\nIMPORTANT:\n- Work ONLY in ${workspacePath} — do NOT modify the main repo\n- Do NOT run git merge locally — use gh pr merge --squash to merge via GitHub\n- Do NOT run build or tests — CI handles validation after PR merge\n- Use --force-with-lease (never --force) for the push\n- The PR MUST be merged via gh pr merge before reporting success\n- Report completion immediately after the PR merge\n\nIF REBASE FAILS (conflicts):\nAfter aborting, report failure so the work agent can fix it:\n\\`\\`\\`bash\ncurl -s -X POST ${apiUrl}/api/specialists/done \\\\\n -H \"Content-Type: application/json\" \\\\\n -d '{\"specialist\":\"merge\",\"issueId\":\"${issueId}\",\"status\":\"failed\",\"notes\":\"Rebase conflicts with main — work agent must run: git fetch origin main && git rebase origin/main, resolve conflicts, then resubmit\"}'\n\\`\\`\\`\n\nIF gh pr merge FAILS:\nReport failure — do NOT report success:\n\\`\\`\\`bash\ncurl -s -X POST ${apiUrl}/api/specialists/done \\\\\n -H \"Content-Type: application/json\" \\\\\n -d '{\"specialist\":\"merge\",\"issueId\":\"${issueId}\",\"status\":\"failed\",\"notes\":\"Rebase succeeded but gh pr merge --squash failed\"}'\n\\`\\`\\`\n\nCRITICAL: You MUST call the /api/specialists/done endpoint whether you succeed or fail.\nCRITICAL: Success means the PR is MERGED on GitHub. Rebase alone is NOT success.`;\n\n // Resolve project for per-project ephemeral specialist\n const resolvedProject = resolveProjectFromIssue(issueId);\n const mergeProjectKey = resolvedProject?.projectKey ?? null;\n const mergeSession = getTmuxSessionName('merge-agent', mergeProjectKey ?? undefined);\n\n if (!resolvedProject) {\n console.warn(`[merge-agent] Could not resolve project for ${issueId} — using global specialist`);\n }\n\n // Wait for specialist to be idle (same as spawnMergeAgentForBranches)\n if (mergeProjectKey) {\n const { getAgentRuntimeState, saveAgentRuntimeState } = await import('../agents.js');\n const IDLE_POLL_INTERVAL = 3000;\n const IDLE_MAX_WAIT = 360000;\n const idleStart = Date.now();\n\n while (Date.now() - idleStart < IDLE_MAX_WAIT) {\n const state = getAgentRuntimeState(mergeSession);\n if (!state || state.state === 'idle' || state.state === 'suspended') break;\n try {\n await execAsync(`tmux has-session -t \"${mergeSession}\" 2>/dev/null`);\n } catch {\n saveAgentRuntimeState(mergeSession, { state: 'idle', lastActivity: new Date().toISOString() });\n break;\n }\n await new Promise(resolve => setTimeout(resolve, IDLE_POLL_INTERVAL));\n }\n }\n\n // Wake the merge-agent specialist\n let wakeResult: { success: boolean; message: string };\n if (mergeProjectKey) {\n wakeResult = await spawnEphemeralSpecialist(mergeProjectKey, 'merge-agent', {\n issueId,\n branch: featureBranch,\n workspace: workspacePath,\n promptOverride: taskPrompt,\n });\n } else {\n wakeResult = await wakeSpecialist('merge-agent', taskPrompt, {\n waitForReady: true,\n startIfNotRunning: true,\n issueId,\n });\n }\n\n if (!wakeResult.success) {\n return {\n success: false,\n reason: `Failed to wake merge-agent specialist: ${wakeResult.message}`,\n };\n }\n\n console.log(`[merge-agent] Rebase specialist woken for ${issueId}, polling for completion...`);\n\n // Poll for rebase completion: remote feature branch HEAD should change after rebase + push\n const POLL_INTERVAL = 5000;\n const MAX_WAIT = 10 * 60 * 1000; // 10 minutes\n\n const startTime = Date.now();\n while (Date.now() - startTime < MAX_WAIT) {\n await new Promise(resolve => setTimeout(resolve, POLL_INTERVAL));\n\n try {\n await execAsync(`git fetch origin ${featureBranch}`, {\n cwd: workspacePath,\n encoding: 'utf-8',\n timeout: 10000,\n }).catch(() => {});\n\n const { stdout: remoteHeadRaw } = await execAsync(\n `git rev-parse origin/${featureBranch}`,\n { cwd: workspacePath, encoding: 'utf-8' },\n );\n const remoteHead = remoteHeadRaw.trim();\n\n if (remoteHead !== headBefore) {\n console.log(`[merge-agent] Rebase complete for ${issueId}, new remote HEAD: ${remoteHead}`);\n logActivity('rebase_complete', `Rebase completed for ${issueId}`);\n return { success: true, reason: 'Rebase completed successfully' };\n }\n } catch {\n // Continue polling\n }\n\n // Check if specialist stopped\n if (!(await isRunning('merge-agent', mergeProjectKey ?? undefined))) {\n // Final check: maybe rebase succeeded just before specialist stopped\n try {\n await execAsync(`git fetch origin ${featureBranch}`, {\n cwd: workspacePath,\n encoding: 'utf-8',\n }).catch(() => {});\n const { stdout } = await execAsync(`git rev-parse origin/${featureBranch}`, {\n cwd: workspacePath,\n encoding: 'utf-8',\n });\n if (stdout.trim() !== headBefore) {\n console.log(`[merge-agent] Rebase detected after specialist stopped for ${issueId}`);\n return { success: true, reason: 'Rebase completed (detected after specialist stopped)' };\n }\n } catch {}\n\n return {\n success: false,\n reason: 'merge-agent specialist stopped before completing rebase',\n };\n }\n }\n\n logActivity('rebase_timeout', `Rebase timed out for ${issueId}`);\n return {\n success: false,\n reason: 'Timeout waiting for rebase to complete (10 minutes)',\n };\n}\n\n/**\n * Salvage a stranded merge commit — if the specialist merged locally but died\n * before pushing, detect the unpushed merge and push it ourselves.\n *\n * Returns a success result if salvaged, or null if nothing to salvage.\n */\nasync function salvageStrandedMerge(\n projectPath: string,\n targetBranch: string,\n headBefore: string,\n issueId: string,\n logActivity: (action: string, detail: string) => void,\n): Promise<{ success: boolean; reason?: string } | null> {\n try {\n const { stdout: currentHeadRaw } = await execAsync('git rev-parse HEAD', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const currentHead = currentHeadRaw.trim();\n\n if (currentHead === headBefore) {\n // No local merge happened — nothing to salvage\n return null;\n }\n\n // Local HEAD changed — check if it's ahead of remote\n await execAsync(`git fetch origin ${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n timeout: 10000,\n }).catch(() => {});\n\n const { stdout: remoteHeadRaw } = await execAsync(`git rev-parse origin/${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n\n if (remoteHeadRaw.trim() === currentHead) {\n // Already pushed (maybe by another process)\n console.log(`[merge-agent] Salvage check: merge already pushed`);\n return { success: true };\n }\n\n // Stranded merge detected — push it\n console.log(`[merge-agent] SALVAGING stranded merge for ${issueId}: local HEAD ${currentHead.slice(0, 8)} != remote ${remoteHeadRaw.trim().slice(0, 8)}`);\n logActivity('merge_salvage', `Pushing stranded merge commit ${currentHead.slice(0, 8)} for ${issueId}`);\n\n await execAsync(`git push origin ${targetBranch}`, {\n cwd: projectPath,\n encoding: 'utf-8',\n timeout: 30000,\n });\n\n console.log(`[merge-agent] Salvage push successful for ${issueId}`);\n logActivity('merge_salvage_success', `Stranded merge pushed successfully`);\n return { success: true };\n } catch (error: any) {\n console.error(`[merge-agent] Salvage failed: ${error.message}`);\n logActivity('merge_salvage_failed', `Salvage push failed: ${error.message}`);\n return null;\n }\n}\n\n/**\n * Result of syncing main into a workspace branch\n */\nexport interface SyncMainResult {\n success: boolean;\n alreadyUpToDate?: boolean;\n commitCount?: number;\n changedFiles?: string[];\n conflictFiles?: string[];\n reason?: string;\n}\n\n/**\n * Scan workspace for leftover git conflict markers (async)\n */\nexport async function scanForConflictMarkers(projectPath: string): Promise<string[]> {\n try {\n // git diff --check exits non-zero and prints filenames when conflict markers exist\n const { stdout } = await execAsync('git diff --check 2>&1 || true', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n const files = stdout\n .split('\\n')\n .filter(line => line.includes('leftover conflict marker'))\n .map(line => line.split(':')[0].trim())\n .filter(f => f.length > 0);\n return [...new Set(files)];\n } catch {\n return [];\n }\n}\n\n/**\n * Sync the latest main branch into a workspace's feature branch.\n *\n * This performs a `git merge origin/main` in the workspace. If the merge is clean\n * it returns immediately. If conflicts arise, the merge-agent specialist is woken\n * to resolve them. The merge is never pushed — this is a local workspace operation.\n *\n * Auto-commits any uncommitted changes before merging (with safety verification).\n */\nexport async function syncMainIntoWorkspace(\n projectPath: string,\n issueId: string,\n): Promise<SyncMainResult> {\n console.log(`[sync-main] Starting sync of main into workspace for ${issueId}`);\n logActivity('sync_main_start', `Starting sync for ${issueId}`);\n\n // Pre-flight: auto-commit uncommitted changes before merge\n try {\n const { stdout: statusOut } = await execAsync('git status --porcelain', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n if (statusOut.trim()) {\n console.log(`[sync-main] Uncommitted changes detected, auto-committing...`);\n logActivity('sync_main_auto_commit', `Auto-committing uncommitted changes before sync`);\n try {\n await execAsync('git add -A && git commit -m \"WIP: auto-commit before sync with main\"', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n console.log(`[sync-main] Auto-commit successful`);\n } catch (commitErr: any) {\n const message = `Failed to auto-commit uncommitted changes: ${commitErr.message}`;\n console.error(`[sync-main] ${message}`);\n logActivity('sync_main_blocked', message);\n return { success: false, reason: message };\n }\n\n // Verify commit succeeded — abort if uncommitted changes still exist\n const { stdout: postCommitStatus } = await execAsync('git status --porcelain', {\n cwd: projectPath,\n encoding: 'utf-8',\n });\n if (postCommitStatus.trim()) {\n const message = 'Uncommitted changes remain after auto-commit — aborting sync';\n console.error(`[sync-main] ${message}`);\n logActivity('sync_main_blocked', message);\n return { success: false, reason: message };\n }\n }\n } catch (error: any) {\n return { success: false, reason: `Failed to check git status: ${error.message}` };\n }\n\n // Clean up stale git lock files\n try {\n const lockCleanup = await cleanupStaleLocks(projectPath);\n if (lockCleanup.found.length > 0) {\n console.log(`[sync-main] Found ${lockCleanup.found.length} lock file(s)`);\n if (lockCleanup.removed.length > 0) {\n console.log(`[sync-main] Cleaned up ${lockCleanup.removed.length} stale lock file(s)`);\n logActivity('git_lock_cleanup', `Removed ${lockCleanup.removed.length} stale lock file(s)`);\n }\n if (lockCleanup.errors.some((e: { file: string; error: string }) => e.error.includes('Git processes are running'))) {\n const message = 'Git processes are still running — cannot safely start sync';\n console.error(`[sync-main] ${message}`);\n logActivity('sync_main_blocked', message);\n return { success: false, reason: message };\n }\n }\n } catch (lockErr: any) {\n console.warn(`[sync-main] Lock cleanup warning: ${lockErr.message} (continuing)`);\n }\n\n // Fetch latest main\n try {\n console.log(`[sync-main] Fetching origin/main...`);\n await execAsync('git fetch origin main', { cwd: projectPath, encoding: 'utf-8' });\n } catch (error: any) {\n return { success: false, reason: `Failed to fetch origin/main: ${error.message}` };\n }\n\n // Attempt the merge\n let mergeOutput = '';\n let hasConflicts = false;\n try {\n const result = await execAsync('git merge origin/main', { cwd: projectPath, encoding: 'utf-8' });\n mergeOutput = (result.stdout || '') + (result.stderr || '');\n } catch (error: any) {\n mergeOutput = (error.stdout || '') + (error.stderr || '');\n hasConflicts = true;\n }\n\n // Already up to date?\n if (mergeOutput.includes('Already up to date') || mergeOutput.includes('Already up-to-date')) {\n console.log(`[sync-main] Already up to date`);\n logActivity('sync_main_noop', `${issueId} already up to date with main`);\n return { success: true, alreadyUpToDate: true };\n }\n\n if (!hasConflicts) {\n // Clean merge — collect stats\n console.log(`[sync-main] Clean merge completed`);\n logActivity('sync_main_success', `Clean merge of main into ${issueId}`);\n\n let changedFiles: string[] = [];\n let commitCount = 0;\n try {\n const { stdout: diffFiles } = await execAsync(\n 'git diff --name-only ORIG_HEAD HEAD 2>/dev/null || git diff --name-only HEAD~1 HEAD',\n { cwd: projectPath, encoding: 'utf-8' },\n );\n changedFiles = diffFiles.trim().split('\\n').filter(f => f.length > 0);\n } catch { /* non-fatal */ }\n try {\n const { stdout: logOut } = await execAsync(\n 'git log ORIG_HEAD..HEAD --oneline 2>/dev/null || echo \"\"',\n { cwd: projectPath, encoding: 'utf-8' },\n );\n commitCount = logOut.trim().split('\\n').filter(l => l.length > 0).length;\n } catch { /* non-fatal */ }\n\n return { success: true, commitCount, changedFiles };\n }\n\n // Conflict case — delegate to merge-agent specialist\n const conflictFiles = await getConflictFiles(projectPath);\n console.log(`[sync-main] ${conflictFiles.length} conflict(s), waking merge-agent...`);\n logActivity('sync_main_conflicts', `${conflictFiles.length} conflict(s) in ${issueId}: ${conflictFiles.join(', ')}`);\n\n const workspaceBranch = await execAsync('git branch --show-current', { cwd: projectPath, encoding: 'utf-8' })\n .then(r => r.stdout.trim())\n .catch(() => `feature/${issueId.toLowerCase()}`);\n\n // Build prompt from template\n const promptPath = join(__dirname, 'prompts', 'sync-main.md');\n let taskPrompt: string;\n try {\n const template = readFileSync(promptPath, 'utf-8');\n taskPrompt = template\n .replace(/{{projectPath}}/g, projectPath)\n .replace(/{{workspaceBranch}}/g, workspaceBranch)\n .replace(/{{issueId}}/g, issueId)\n .replace(/{{conflictFiles}}/g, conflictFiles.map(f => `- ${f}`).join('\\n'));\n } catch (templateErr: any) {\n console.error(`[sync-main] Could not load sync-main.md template: ${templateErr.message}`);\n logActivity('sync_main_error', `Template load failed: ${templateErr.message}`);\n try { await execAsync('git merge --abort', { cwd: projectPath, encoding: 'utf-8' }); } catch {}\n return { success: false, conflictFiles, reason: 'Internal error: sync-main prompt template not found' };\n }\n\n // Wake the merge-agent specialist using per-project ephemeral lifecycle when possible\n const syncResolvedProject = resolveProjectFromIssue(issueId);\n const syncProjectKey = syncResolvedProject?.projectKey ?? null;\n let syncWakeResult: { success: boolean; message: string; tmuxSession?: string; error?: string };\n if (syncProjectKey) {\n syncWakeResult = await spawnEphemeralSpecialist(syncProjectKey, 'merge-agent', {\n issueId,\n branch: workspaceBranch,\n workspace: projectPath,\n promptOverride: taskPrompt,\n });\n } else {\n syncWakeResult = await wakeSpecialist('merge-agent', taskPrompt, {\n waitForReady: true,\n startIfNotRunning: true,\n issueId,\n });\n }\n\n if (!syncWakeResult.success) {\n try { await execAsync('git merge --abort', { cwd: projectPath, encoding: 'utf-8' }); } catch {}\n const message = `Failed to wake merge-agent specialist: ${syncWakeResult.message}`;\n console.error(`[sync-main] ${message}`);\n logActivity('sync_main_error', message);\n return { success: false, conflictFiles, reason: message };\n }\n\n console.log(`[sync-main] Specialist woken, waiting for conflict resolution...`);\n logActivity('sync_main_agent_woken', `Agent resolving ${conflictFiles.length} conflict(s) for ${issueId}`);\n\n // Poll tmux output for MERGE_RESULT markers\n const tmuxSession = getTmuxSessionName('merge-agent', syncProjectKey ?? undefined);\n const startTime = Date.now();\n const POLL_INTERVAL = 5000;\n const SYNC_TIMEOUT_MS = 15 * 60 * 1000; // 15 minutes\n let lastOutput = '';\n\n while (Date.now() - startTime < SYNC_TIMEOUT_MS) {\n await new Promise(resolve => setTimeout(resolve, POLL_INTERVAL));\n\n const output = await captureTmuxOutput(tmuxSession);\n if (output !== lastOutput) {\n lastOutput = output;\n const hasStructured = output.includes('MERGE_RESULT:');\n const lowerOutput = output.toLowerCase();\n const hasHumanReadable =\n lowerOutput.includes('merge task complete') ||\n lowerOutput.includes('successfully merged') ||\n lowerOutput.includes('merge complete') ||\n lowerOutput.includes('merge failed') ||\n lowerOutput.includes('merge task failed');\n\n if (hasStructured || hasHumanReadable) {\n const agentResult = parseAgentOutput(output);\n\n if (agentResult.success) {\n // Verify no leftover conflict markers\n const remaining = await scanForConflictMarkers(projectPath);\n if (remaining.length > 0) {\n try { await execAsync('git merge --abort', { cwd: projectPath, encoding: 'utf-8' }); } catch {}\n const msg = `Agent reported success but ${remaining.length} conflict marker(s) remain in: ${remaining.join(', ')}`;\n console.error(`[sync-main] ${msg}`);\n logActivity('sync_main_markers_remain', msg);\n return { success: false, conflictFiles, reason: msg };\n }\n\n console.log(`[sync-main] ✓ Conflicts resolved by agent`);\n logActivity('sync_main_success', `Merge agent resolved conflicts for ${issueId}`);\n\n // Collect stats\n let changedFiles: string[] = [];\n let commitCount = 0;\n try {\n const { stdout: diffFiles } = await execAsync(\n 'git diff --name-only ORIG_HEAD HEAD',\n { cwd: projectPath, encoding: 'utf-8' },\n );\n changedFiles = diffFiles.trim().split('\\n').filter(f => f.length > 0);\n const { stdout: logOut } = await execAsync(\n 'git log ORIG_HEAD..HEAD --oneline',\n { cwd: projectPath, encoding: 'utf-8' },\n );\n commitCount = logOut.trim().split('\\n').filter(l => l.length > 0).length;\n } catch { /* non-fatal */ }\n\n return { success: true, commitCount, changedFiles };\n } else {\n // Agent failed — ensure merge is aborted\n try { await execAsync('git merge --abort', { cwd: projectPath, encoding: 'utf-8' }); } catch {}\n console.log(`[sync-main] ✗ Agent could not resolve conflicts`);\n logActivity('sync_main_agent_failed', `Agent failed to resolve conflicts for ${issueId}`);\n return {\n success: false,\n conflictFiles,\n reason: agentResult.reason || 'Merge agent could not resolve conflicts',\n };\n }\n }\n }\n\n const elapsed = Math.round((Date.now() - startTime) / 1000);\n if (elapsed % 30 === 0) {\n console.log(`[sync-main] Still waiting for agent... (${elapsed}s elapsed)`);\n }\n }\n\n // Timeout\n try { await execAsync('git merge --abort', { cwd: projectPath, encoding: 'utf-8' }); } catch {}\n logActivity('sync_main_timeout', `Sync timed out for ${issueId}`);\n return {\n success: false,\n conflictFiles,\n reason: `Timeout: merge agent did not complete within ${SYNC_TIMEOUT_MS / 60000} minutes`,\n };\n}\n\n/**\n * Look up and run quality gates for the project at projectPath.\n * Returns empty array if no quality gates are configured.\n *\n * In polyrepo mode (projectPath is a sub-repo of project.path), only gates\n * whose `path` field matches the relative sub-repo path are run. Gates with\n * no `path` field are skipped in polyrepo context.\n */\nexport async function runProjectQualityGates(\n projectPath: string,\n phase: 'pre_push' | 'post_push'\n): Promise<import('./validation.js').QualityGateResult[]> {\n try {\n const config = loadProjectsConfig();\n // Find the project whose path matches\n const project = Object.values(config.projects).find(p => projectPath.startsWith(p.path));\n if (!project?.quality_gates || Object.keys(project.quality_gates).length === 0) {\n console.log(`[merge-agent] No quality gates configured for ${projectPath}`);\n return [];\n }\n\n // Detect polyrepo context: if projectPath is a subdirectory of project.path,\n // repoRelPath is non-empty (e.g., 'frontend' or 'backend').\n const repoRelPath = relative(project.path, projectPath);\n\n let gatesToRun = project.quality_gates;\n if (repoRelPath && !repoRelPath.startsWith('..')) {\n // Polyrepo: only run gates whose path matches this sub-repo\n const filtered = Object.entries(project.quality_gates).filter(\n ([, gate]) => gate.path === repoRelPath\n );\n if (filtered.length === 0) {\n console.log(`[merge-agent] No quality gates configured for repo path \"${repoRelPath}\"`);\n return [];\n }\n gatesToRun = Object.fromEntries(filtered);\n console.log(\n `[merge-agent] Polyrepo: running ${Object.keys(gatesToRun).length} gate(s) for path \"${repoRelPath}\"`\n );\n }\n\n console.log(`[merge-agent] Running ${phase} quality gates for project \"${project.name}\"`);\n return await runQualityGates(gatesToRun, projectPath, phase);\n } catch (error: any) {\n console.error(`[merge-agent] Failed to load quality gates: ${error.message}`);\n return [];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAwQA,SAAA,gBAAA;AACE,OAAA,IAAA,MAAA,wHAAA;;;;;;;;;;;;;;;;;;;;;;;;;ACrOF,SAAgB,kBAAkB,SAAoC;AACpE,KAAI;EACF,MAAM,QAAQ,eAAe;EAC7B,MAAM,QAAQ;GACZ,MAAM;GACN,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC,SAAS;IACP,IAAI,YAAY;IAChB,QAAQ,QAAQ;IAChB,OAAO,QAAQ;IACf,SAAS,QAAQ;IACjB,SAAS,QAAQ,WAAW;IAC5B,SAAS,QAAQ,WAAW;IAC7B;GACF;AACD,QAAM,OAAO,MAAM;SACb;;;;;;;;;;;;;;;ACnCV,UAAA,KAAA;ACRkB,UAAU,KAAK;;;YCUZ;AANH,UAAU,KAAK;AAGjC,MAAM,YAAY,QADC,cAAc,OAAO,KAAK,IAAI,CACZ;AAqBV,KADD,KADF,KAAK,iBAAiB,cAAc,EACZ,cAAc,EACX,gBAAgB;;;;;;;;;;;;AA0LnE,MAAM,sCAAsB,IAAI,KAAa;AAK7C,MAAM,sCAAsB,IAAI,KAAqB;AACrD,MAAM,oBAAoB;AAE1B,eAAsB,mBAAmB,SAAiB,aAAqB,cAAuB,SAAmD;AAEvJ,KAAI,oBAAoB,IAAI,QAAQ,EAAE;AACpC,UAAQ,IAAI,0DAA0D,QAAQ,YAAY;AAC1F;;AAUF,KAAI,CAAC,SAAS,YAAY;EACxB,MAAM,cAAc,KAAK,iBAAiB,0BAA0B;EACpE,MAAM,WAAW,UAAU,SAAS,QAAQ,GACxC,UAAU,QAAQ,cAAc,GAAG,GACnC,UAAU,QAAQ,eAAe,GAAG,CAAC,QAAQ,cAAc,GAAG;EAClE,MAAM,eAAe,KAAK,UAAU,WAAW,uBAAuB;AAEtE,MAAI;AASF,SAAM,UAAU,aARI,KAAK,UAAU;IACjC;IACA;IACA,cAAc,gBAAgB;IAC9B,WAAW,KAAK,KAAK;IACrB,QAAQ;IACR,SAAS;IACV,CAAC,EACwC,QAAQ;AAClD,WAAQ,IAAI,+CAA+C,cAAc;GAMzE,MAAM,QAAQ,MAAM,cAAc;IAAC;IAAU;IAAS;IAAa,gBAAgB;IAAI;IAAa,EAAE;IACpG,UAAU;IACV,OAAO;IACR,CAAC;AACF,SAAM,OAAO;AACb,WAAQ,IAAI,qDAAqD,MAAM,IAAI,wCAAwC;AACnH;WACO,KAAU;AACjB,WAAQ,KAAK,gDAAgD,IAAI,QAAQ,uEAAuE;;;AAIpJ,SAAQ,IAAI,gDAAgD,UAAU;AAGtE,KAAI;EACF,MAAM,EAAE,YAAY,MAAM,OAAO;EACjC,MAAM,YAAY,MAAM,QAAQ;GAAE;GAAS;GAAa,CAAC;AACzD,MAAI,UAAU,WAAW,CAAC,UAAU,SAAS;AAC3C,WAAQ,IAAI,mBAAmB,UAAU,SAAS,KAAK,KAAK,GAAG;AAC/D,eAAY,aAAa,SAAS,QAAQ,6BAA6B;aAC9D,UAAU,QACnB,SAAQ,IAAI,mCAAmC,UAAU,SAAS,KAAK,KAAK,GAAG;MAE/E,SAAQ,KAAK,kCAAkC,UAAU,QAAQ;UAE5D,KAAK;AACZ,UAAQ,KAAK,qCAAqC,MAAM;;AAI1D,KAAI;EACF,MAAM,EAAE,2BAA2B,MAAM,OAAO;EAChD,MAAM,cAAc,MAAM,uBAAuB;GAAE;GAAS;GAAa,CAAC;AAC1E,MAAI,YAAY,WAAW,CAAC,YAAY,SAAS;AAC/C,WAAQ,IAAI,mBAAmB,YAAY,SAAS,KAAK,KAAK,GAAG;AACjE,eAAY,8BAA8B,YAAY,SAAS,KAAK,KAAK,IAAI,6BAA6B;aACjG,YAAY,QACrB,SAAQ,IAAI,oDAAoD,YAAY,SAAS,KAAK,KAAK,GAAG;MAElG,SAAQ,KAAK,mDAAmD,YAAY,QAAQ;UAE/E,KAAK;AACZ,UAAQ,KAAK,qDAAqD,MAAM;;AAM1E,KAAI;EACF,MAAM,EAAE,wBAAwB,MAAM,OAAO;EAC7C,MAAM,aAAa,mBAAmB,QAAQ;EAI9C,MAAM,cAAc,MAAM,oBAHT,WAAW,WACxB;GAAE;GAAS;GAAa,QAAQ;IAAE,OAAO,WAAW;IAAO,MAAM,WAAW;IAAM,QAAQ,WAAW;IAAQ;GAAE,GAC/G;GAAE;GAAS;GAAa,CAC2B;AACvD,MAAI,YAAY,WAAW,CAAC,YAAY,SAAS;AAC/C,WAAQ,IAAI,mBAAmB,YAAY,SAAS,KAAK,KAAK,GAAG;AACjE,eAAY,kBAAkB,YAAY,SAAS,KAAK,KAAK,IAAI,iBAAiB;aACzE,YAAY,QACrB,SAAQ,IAAI,wCAAwC,YAAY,SAAS,KAAK,KAAK,GAAG;MAEtF,SAAQ,KAAK,mDAAmD,YAAY,QAAQ;UAE/E,KAAK;AACZ,UAAQ,KAAK,yCAAyC,MAAM;;AAO9D,8BAA6B,SAAS,YAAY;AAGlD,KAAI;EACF,MAAM,EAAE,iBAAiB,MAAM,OAAO;EACtC,MAAM,cAAc,MAAM,aAAa;GAAE;GAAS;GAAa,CAAC;AAChE,MAAI,YAAY,WAAW,CAAC,YAAY,SAAS;AAC/C,WAAQ,IAAI,mBAAmB,YAAY,SAAS,KAAK,KAAK,GAAG;AACjE,eAAY,6BAA6B,YAAY,SAAS,KAAK,KAAK,IAAI,kBAAkB;;UAEzF,KAAK;AACZ,UAAQ,KAAK,0CAA0C,MAAM;;AAK/D,KAAI;EACF,MAAM,EAAE,eAAe,mBAAmB,MAAM,OAAO;EACvD,MAAM,EAAE,aAAa,kBAAkB,MAAM,OAAO;EACpD,MAAM,UAAU,SAAS,QAAQ,aAAa;EAC9C,MAAM,aAAa,cAAc,QAAQ;AACzC,MAAI,cAAc,cAAc,QAAQ,EAAE;AACxC,eAAY,QAAQ;AACpB,cAAW,SAAS;AACpB,kBAAe,WAAW;AAC1B,WAAQ,IAAI,6CAA6C,QAAQ,oBAAoB;AACrF,eAAY,wBAAwB,4CAA4C,UAAU;;EAG5F,MAAM,aAAa,YAAY,QAAQ,aAAa;AACpD,MAAI,cAAc,WAAW,EAAE;AAC7B,eAAY,WAAW;AACvB,WAAQ,IAAI,iDAAiD,aAAa;;UAErE,KAAK;AACZ,UAAQ,KAAK,gDAAgD,MAAM;;AAMrE,KAAI;EACF,MAAM,EAAE,sBAAsB,MAAM,OAAO;EAC3C,MAAM,EAAE,wBAAwB,MAAM,OAAO;EAC7C,MAAM,aAAa,QAAQ,aAAa;EACxC,MAAM,gBAAgB,kBAAkB,aAAa,WAAW;AAChE,MAAI,eAAe;GAEjB,MAAM,eAAe,MAAM,oBAAoB,eAD9B,SAAS,YAAY,EACkC,WAAW;AACnF,OAAI,aAAa,iBAAiB;AAChC,YAAQ,IAAI,8CAA8C,aAAa,MAAM,KAAK,KAAK,GAAG;AAC1F,gBAAY,kBAAkB,sBAAsB,QAAQ,IAAI,aAAa,MAAM,KAAK,KAAK,GAAG;;;UAG7F,KAAK;AACZ,UAAQ,KAAK,oDAAoD,MAAM;;AAIzE,qBAAoB,IAAI,QAAQ;AAEhC,SAAQ,IAAI,kDAAkD,QAAQ,6CAA6C;AACnH,aAAY,kBAAkB,UAAU,QAAQ,6CAA6C;;;;;;;AAQ/F,SAAS,6BAA6B,SAAiB,aAA2B;CAChF,MAAM,WAAW,oBAAoB,IAAI,QAAQ,IAAI;AACrD,KAAI,YAAY,mBAAmB;AACjC,UAAQ,IAAI,0CAA0C,QAAQ,gBAAgB,SAAS,uDAAuD;AAC9I;;AAIF,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,eAAe,MAAM,OAAO;GACpC,MAAM,aAAa,mBAAmB,QAAQ;GAI9C,MAAM,UAAU,MAAM,WAHV,WAAW,WACnB;IAAE;IAAS;IAAa,QAAQ;KAAE,OAAO,WAAW;KAAO,MAAM,WAAW;KAAM,QAAQ,WAAW;KAAQ;IAAE,GAC/G;IAAE;IAAS;IAAa,EACU;IAAE,YAAY;IAAO,SAAS;IAA6C,CAAC;GAElH,IAAI,aAAa;AACjB,QAAK,MAAM,KAAK,QACd,KAAI,EAAE,WAAW,CAAC,EAAE,SAAS;AAC3B,YAAQ,IAAI,mBAAmB,EAAE,SAAS,KAAK,KAAK,GAAG;AACvD,gBAAY,EAAE,MAAM,EAAE,SAAS,KAAK,KAAK,IAAI,EAAE,KAAK;cAC3C,CAAC,EAAE,SAAS;AACrB,YAAQ,KAAK,mBAAmB,EAAE,KAAK,WAAW,EAAE,QAAQ;AAC5D,iBAAa;;AAIjB,OAAI,YAAY;IACd,MAAM,YAAY,oBAAoB,IAAI,QAAQ,IAAI,KAAK;AAC3D,wBAAoB,IAAI,SAAS,SAAS;AAC1C,QAAI,YAAY,kBACd,SAAQ,KAAK,6CAA6C,QAAQ,SAAS,SAAS,wDAAwD;SAI9I,qBAAoB,OAAO,QAAQ;WAE9B,KAAK;GACZ,MAAM,YAAY,oBAAoB,IAAI,QAAQ,IAAI,KAAK;AAC3D,uBAAoB,IAAI,SAAS,SAAS;AAC1C,WAAQ,KAAK,uDAAuD,SAAS,GAAG,kBAAkB,KAAK,MAAM;;KAE7G;;;;;AAoNN,SAAS,YAAY,QAAgB,SAAiB,SAAwB;AAC5E,mBAAkB;EAChB,QAAQ;EACR,OAAO,OAAO,SAAS,OAAO,IAAI,OAAO,SAAS,QAAQ,GAAG,UAAU,OAAO,SAAS,OAAO,GAAG,SAAS;EAC1G,SAAS,iBAAiB,OAAO,IAAI;EACrC;EACD,CAAC"}
@@ -22,6 +22,23 @@ function isDevMode() {
22
22
  return false;
23
23
  }
24
24
  }
25
+ /**
26
+ * Encode a filesystem path to match Claude Code's project directory naming.
27
+ *
28
+ * Claude Code replaces ALL non-alphanumeric characters (except hyphens) with
29
+ * hyphens when encoding the CWD into the project directory name under
30
+ * ~/.claude/projects/. For example:
31
+ *
32
+ * /Users/edward.becker/Projects → -Users-edward-becker-Projects
33
+ * /home/eltmon/Projects → -home-eltmon-Projects
34
+ * /tmp/test_under.dot+plus@at → -tmp-test-under-dot-plus-at
35
+ *
36
+ * This is critical for session file lookup — a mismatch means JSONL files
37
+ * are never found and conversation messages appear permanently empty.
38
+ */
39
+ function encodeClaudeProjectDir(cwdPath) {
40
+ return cwdPath.replace(/[^a-zA-Z0-9-]/g, "-");
41
+ }
25
42
  var PANOPTICON_HOME, CONFIG_DIR, SKILLS_DIR, COMMANDS_DIR, AGENTS_DIR, BIN_DIR, BACKUPS_DIR, COSTS_DIR, HEARTBEATS_DIR, ARCHIVES_DIR, TRAEFIK_DIR, TRAEFIK_DYNAMIC_DIR, TRAEFIK_CERTS_DIR, CERTS_DIR, CONFIG_FILE, SETTINGS_FILE, CLAUDE_DIR, LEGACY_RUNTIME_DIRS, SYNC_TARGET, TEMPLATES_DIR, CLAUDE_MD_TEMPLATES, currentDir, packageRoot, SOURCE_TEMPLATES_DIR, SOURCE_TRAEFIK_TEMPLATES, SOURCE_SCRIPTS_DIR, SOURCE_SKILLS_DIR, SOURCE_DEV_SKILLS_DIR, SOURCE_AGENTS_DIR, SOURCE_RULES_DIR, CACHE_SKILLS_DIR, CACHE_AGENTS_DIR, CACHE_RULES_DIR, CACHE_MANIFEST, DOCS_DIR, PRDS_DIR, PRD_DRAFTS_DIR, PRD_PUBLISHED_DIR, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR, PROJECT_PRDS_ACTIVE_SUBDIR, PROJECT_PRDS_PLANNED_SUBDIR, PROJECT_PRDS_COMPLETED_SUBDIR, INIT_DIRS;
26
43
  var init_paths = __esmMin((() => {
27
44
  PANOPTICON_HOME = process.env.PANOPTICON_HOME || join(homedir(), ".panopticon");
@@ -101,6 +118,6 @@ var init_paths = __esmMin((() => {
101
118
  ];
102
119
  }));
103
120
  //#endregion
104
- export { SKILLS_DIR as A, TRAEFIK_CERTS_DIR as B, PRD_PUBLISHED_DIR as C, PROJECT_PRDS_PLANNED_SUBDIR as D, PROJECT_PRDS_COMPLETED_SUBDIR as E, SOURCE_SKILLS_DIR as F, isDevMode as G, TRAEFIK_DYNAMIC_DIR as H, SOURCE_TEMPLATES_DIR as I, SOURCE_TRAEFIK_TEMPLATES as L, SOURCE_DEV_SKILLS_DIR as M, SOURCE_RULES_DIR as N, PROJECT_PRDS_SUBDIR as O, SOURCE_SCRIPTS_DIR as P, SYNC_TARGET as R, PRD_DRAFTS_DIR as S, PROJECT_PRDS_ACTIVE_SUBDIR as T, getPanopticonHome as U, TRAEFIK_DIR as V, init_paths as W, HEARTBEATS_DIR as _, CACHE_AGENTS_DIR as a, PANOPTICON_HOME as b, CACHE_SKILLS_DIR as c, CLAUDE_MD_TEMPLATES as d, COMMANDS_DIR as f, DOCS_DIR as g, COSTS_DIR as h, BIN_DIR as i, SOURCE_AGENTS_DIR as j, SETTINGS_FILE as k, CERTS_DIR as l, CONFIG_FILE as m, ARCHIVES_DIR as n, CACHE_MANIFEST as o, CONFIG_DIR as p, BACKUPS_DIR as r, CACHE_RULES_DIR as s, AGENTS_DIR as t, CLAUDE_DIR as u, INIT_DIRS as v, PROJECT_DOCS_SUBDIR as w, PRDS_DIR as x, LEGACY_RUNTIME_DIRS as y, TEMPLATES_DIR as z };
121
+ export { SKILLS_DIR as A, TRAEFIK_CERTS_DIR as B, PRD_PUBLISHED_DIR as C, PROJECT_PRDS_PLANNED_SUBDIR as D, PROJECT_PRDS_COMPLETED_SUBDIR as E, SOURCE_SKILLS_DIR as F, init_paths as G, TRAEFIK_DYNAMIC_DIR as H, SOURCE_TEMPLATES_DIR as I, isDevMode as K, SOURCE_TRAEFIK_TEMPLATES as L, SOURCE_DEV_SKILLS_DIR as M, SOURCE_RULES_DIR as N, PROJECT_PRDS_SUBDIR as O, SOURCE_SCRIPTS_DIR as P, SYNC_TARGET as R, PRD_DRAFTS_DIR as S, PROJECT_PRDS_ACTIVE_SUBDIR as T, encodeClaudeProjectDir as U, TRAEFIK_DIR as V, getPanopticonHome as W, HEARTBEATS_DIR as _, CACHE_AGENTS_DIR as a, PANOPTICON_HOME as b, CACHE_SKILLS_DIR as c, CLAUDE_MD_TEMPLATES as d, COMMANDS_DIR as f, DOCS_DIR as g, COSTS_DIR as h, BIN_DIR as i, SOURCE_AGENTS_DIR as j, SETTINGS_FILE as k, CERTS_DIR as l, CONFIG_FILE as m, ARCHIVES_DIR as n, CACHE_MANIFEST as o, CONFIG_DIR as p, BACKUPS_DIR as r, CACHE_RULES_DIR as s, AGENTS_DIR as t, CLAUDE_DIR as u, INIT_DIRS as v, PROJECT_DOCS_SUBDIR as w, PRDS_DIR as x, LEGACY_RUNTIME_DIRS as y, TEMPLATES_DIR as z };
105
122
 
106
- //# sourceMappingURL=paths-lMaxrYtT.js.map
123
+ //# sourceMappingURL=paths-CDJ_HsbN.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"paths-lMaxrYtT.js","names":[],"sources":["../src/lib/paths.ts"],"sourcesContent":["import { homedir } from 'os';\nimport { join } from 'path';\nimport { existsSync } from 'fs';\n\n// Panopticon home directory (can be overridden for testing)\nexport const PANOPTICON_HOME = process.env.PANOPTICON_HOME || join(homedir(), '.panopticon');\n\n/** Get PANOPTICON_HOME dynamically (reads env var on each call, useful for testing) */\nexport function getPanopticonHome(): string {\n return process.env.PANOPTICON_HOME || join(homedir(), '.panopticon');\n}\n\n// Subdirectories\nexport const CONFIG_DIR = PANOPTICON_HOME;\nexport const SKILLS_DIR = join(PANOPTICON_HOME, 'skills');\nexport const COMMANDS_DIR = join(PANOPTICON_HOME, 'commands');\nexport const AGENTS_DIR = join(PANOPTICON_HOME, 'agents');\nexport const BIN_DIR = join(PANOPTICON_HOME, 'bin');\nexport const BACKUPS_DIR = join(PANOPTICON_HOME, 'backups');\nexport const COSTS_DIR = join(PANOPTICON_HOME, 'costs');\nexport const HEARTBEATS_DIR = join(PANOPTICON_HOME, 'heartbeats');\nexport const ARCHIVES_DIR = join(PANOPTICON_HOME, 'archives');\n\n// Traefik directories\nexport const TRAEFIK_DIR = join(PANOPTICON_HOME, 'traefik');\nexport const TRAEFIK_DYNAMIC_DIR = join(TRAEFIK_DIR, 'dynamic');\nexport const TRAEFIK_CERTS_DIR = join(TRAEFIK_DIR, 'certs');\n\n// Legacy certs directory (for backwards compatibility)\nexport const CERTS_DIR = join(PANOPTICON_HOME, 'certs');\n\n// Config files\nexport const CONFIG_FILE = join(CONFIG_DIR, 'config.toml');\nexport const SETTINGS_FILE = join(CONFIG_DIR, 'settings.json');\n\n// AI tool directory (Claude Code is the sole supported runtime)\nexport const CLAUDE_DIR = join(homedir(), '.claude');\n\n// Legacy runtime directories (kept for symlink cleanup migration)\nexport const LEGACY_RUNTIME_DIRS = {\n codex: join(homedir(), '.codex'),\n cursor: join(homedir(), '.cursor'),\n gemini: join(homedir(), '.gemini'),\n opencode: join(homedir(), '.opencode'),\n} as const;\n\n// Sync target (Claude Code only)\nexport const SYNC_TARGET = {\n skills: join(CLAUDE_DIR, 'skills'),\n commands: join(CLAUDE_DIR, 'commands'),\n agents: join(CLAUDE_DIR, 'agents'),\n} as const;\n\n// Templates directory (in user's ~/.panopticon)\nexport const TEMPLATES_DIR = join(PANOPTICON_HOME, 'templates');\nexport const CLAUDE_MD_TEMPLATES = join(TEMPLATES_DIR, 'claude-md', 'sections');\n\n// Source templates directory (bundled with the package)\n// This is resolved at runtime from the package root\nimport { fileURLToPath } from 'url';\nimport { dirname } from 'path';\n\nconst currentFile = fileURLToPath(import.meta.url);\nconst currentDir = dirname(currentFile);\n\n// Handle both development (src/lib/) and production (dist/) modes\n// In dev: /path/to/panopticon/src/lib/paths.ts -> /path/to/panopticon\n// In prod: /path/to/panopticon/dist/lib/paths.js -> /path/to/panopticon\nlet packageRoot: string;\nif (currentDir.includes('/src/')) {\n // Development mode - go up from src/lib to package root\n packageRoot = dirname(dirname(currentDir));\n} else {\n // Production mode - go up from dist (or dist/lib) to package root\n packageRoot = currentDir.endsWith('/lib')\n ? dirname(dirname(currentDir))\n : dirname(currentDir);\n}\n\nexport const SOURCE_TEMPLATES_DIR = join(packageRoot, 'templates');\nexport const SOURCE_TRAEFIK_TEMPLATES = join(SOURCE_TEMPLATES_DIR, 'traefik');\nexport const SOURCE_SCRIPTS_DIR = join(packageRoot, 'scripts');\nexport const SOURCE_SKILLS_DIR = join(packageRoot, 'skills');\nexport const SOURCE_DEV_SKILLS_DIR = join(packageRoot, 'dev-skills');\nexport const SOURCE_AGENTS_DIR = join(packageRoot, 'agents');\nexport const SOURCE_RULES_DIR = join(packageRoot, 'rules');\n\n// Cache directories (where Panopticon keeps its copy of distributed content)\nexport const CACHE_SKILLS_DIR = SKILLS_DIR; // ~/.panopticon/skills/\nexport const CACHE_AGENTS_DIR = join(PANOPTICON_HOME, 'agent-definitions'); // separate from agent state\nexport const CACHE_RULES_DIR = join(PANOPTICON_HOME, 'rules');\nexport const CACHE_MANIFEST = join(PANOPTICON_HOME, '.manifest.json');\n\n// Pre-workspace PRD directory (for PRDs created before workspace exists)\nexport const DOCS_DIR = join(PANOPTICON_HOME, 'docs');\nexport const PRDS_DIR = join(DOCS_DIR, 'prds');\nexport const PRD_DRAFTS_DIR = join(PRDS_DIR, 'drafts');\nexport const PRD_PUBLISHED_DIR = join(PRDS_DIR, 'published');\n\n// Project-relative docs paths (subdirectory names for project-level docs)\nexport const PROJECT_DOCS_SUBDIR = 'docs';\nexport const PROJECT_PRDS_SUBDIR = 'prds';\nexport const PROJECT_PRDS_ACTIVE_SUBDIR = 'active';\nexport const PROJECT_PRDS_PLANNED_SUBDIR = 'planned';\nexport const PROJECT_PRDS_COMPLETED_SUBDIR = 'completed';\n\n/**\n * Detect if running in development mode (from npm link or panopticon repo)\n *\n * Dev mode is detected if:\n * 1. Running from the panopticon source directory (npm link)\n * 2. The SOURCE_DEV_SKILLS_DIR exists (only present in repo, not in npm package)\n */\nexport function isDevMode(): boolean {\n try {\n // Check if dev-skills directory exists - this is only in the repo, not npm package\n return existsSync(SOURCE_DEV_SKILLS_DIR);\n } catch {\n return false;\n }\n}\n\n// All directories to create on init\nexport const INIT_DIRS = [\n PANOPTICON_HOME,\n SKILLS_DIR,\n COMMANDS_DIR,\n AGENTS_DIR,\n BIN_DIR,\n BACKUPS_DIR,\n COSTS_DIR,\n HEARTBEATS_DIR,\n TEMPLATES_DIR,\n CLAUDE_MD_TEMPLATES,\n CERTS_DIR,\n CACHE_AGENTS_DIR,\n CACHE_RULES_DIR,\n TRAEFIK_DIR,\n TRAEFIK_DYNAMIC_DIR,\n TRAEFIK_CERTS_DIR,\n DOCS_DIR,\n PRDS_DIR,\n PRD_DRAFTS_DIR,\n PRD_PUBLISHED_DIR,\n];\n"],"mappings":";;;;;;;AAQA,SAAgB,oBAA4B;AAC1C,QAAO,QAAQ,IAAI,mBAAmB,KAAK,SAAS,EAAE,cAAc;;;;;;;;;AAwGtE,SAAgB,YAAqB;AACnC,KAAI;AAEF,SAAO,WAAW,sBAAsB;SAClC;AACN,SAAO;;;;;AAjHE,mBAAkB,QAAQ,IAAI,mBAAmB,KAAK,SAAS,EAAE,cAAc;AAQ/E,cAAa;AACb,cAAa,KAAK,iBAAiB,SAAS;AAC5C,gBAAe,KAAK,iBAAiB,WAAW;AAChD,cAAa,KAAK,iBAAiB,SAAS;AAC5C,WAAU,KAAK,iBAAiB,MAAM;AACtC,eAAc,KAAK,iBAAiB,UAAU;AAC9C,aAAY,KAAK,iBAAiB,QAAQ;AAC1C,kBAAiB,KAAK,iBAAiB,aAAa;AACpD,gBAAe,KAAK,iBAAiB,WAAW;AAGhD,eAAc,KAAK,iBAAiB,UAAU;AAC9C,uBAAsB,KAAK,aAAa,UAAU;AAClD,qBAAoB,KAAK,aAAa,QAAQ;AAG9C,aAAY,KAAK,iBAAiB,QAAQ;AAG1C,eAAc,KAAK,YAAY,cAAc;AAC7C,iBAAgB,KAAK,YAAY,gBAAgB;AAGjD,cAAa,KAAK,SAAS,EAAE,UAAU;AAGvC,uBAAsB;EACjC,OAAO,KAAK,SAAS,EAAE,SAAS;EAChC,QAAQ,KAAK,SAAS,EAAE,UAAU;EAClC,QAAQ,KAAK,SAAS,EAAE,UAAU;EAClC,UAAU,KAAK,SAAS,EAAE,YAAY;EACvC;AAGY,eAAc;EACzB,QAAQ,KAAK,YAAY,SAAS;EAClC,UAAU,KAAK,YAAY,WAAW;EACtC,QAAQ,KAAK,YAAY,SAAS;EACnC;AAGY,iBAAgB,KAAK,iBAAiB,YAAY;AAClD,uBAAsB,KAAK,eAAe,aAAa,WAAW;AAQzE,cAAa,QADC,cAAc,OAAO,KAAK,IAAI,CACX;AAMvC,KAAI,WAAW,SAAS,QAAQ,CAE9B,eAAc,QAAQ,QAAQ,WAAW,CAAC;KAG1C,eAAc,WAAW,SAAS,OAAO,GACrC,QAAQ,QAAQ,WAAW,CAAC,GAC5B,QAAQ,WAAW;AAGZ,wBAAuB,KAAK,aAAa,YAAY;AACrD,4BAA2B,KAAK,sBAAsB,UAAU;AAChE,sBAAqB,KAAK,aAAa,UAAU;AACjD,qBAAoB,KAAK,aAAa,SAAS;AAC/C,yBAAwB,KAAK,aAAa,aAAa;AACvD,qBAAoB,KAAK,aAAa,SAAS;AAC/C,oBAAmB,KAAK,aAAa,QAAQ;AAG7C,oBAAmB;AACnB,oBAAmB,KAAK,iBAAiB,oBAAoB;AAC7D,mBAAkB,KAAK,iBAAiB,QAAQ;AAChD,kBAAiB,KAAK,iBAAiB,iBAAiB;AAGxD,YAAW,KAAK,iBAAiB,OAAO;AACxC,YAAW,KAAK,UAAU,OAAO;AACjC,kBAAiB,KAAK,UAAU,SAAS;AACzC,qBAAoB,KAAK,UAAU,YAAY;AAG/C,uBAAsB;AACtB,uBAAsB;AACtB,8BAA6B;AAC7B,+BAA8B;AAC9B,iCAAgC;AAmBhC,aAAY;EACvB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD"}
1
+ {"version":3,"file":"paths-CDJ_HsbN.js","names":[],"sources":["../src/lib/paths.ts"],"sourcesContent":["import { homedir } from 'os';\nimport { join } from 'path';\nimport { existsSync } from 'fs';\n\n// Panopticon home directory (can be overridden for testing)\nexport const PANOPTICON_HOME = process.env.PANOPTICON_HOME || join(homedir(), '.panopticon');\n\n/** Get PANOPTICON_HOME dynamically (reads env var on each call, useful for testing) */\nexport function getPanopticonHome(): string {\n return process.env.PANOPTICON_HOME || join(homedir(), '.panopticon');\n}\n\n// Subdirectories\nexport const CONFIG_DIR = PANOPTICON_HOME;\nexport const SKILLS_DIR = join(PANOPTICON_HOME, 'skills');\nexport const COMMANDS_DIR = join(PANOPTICON_HOME, 'commands');\nexport const AGENTS_DIR = join(PANOPTICON_HOME, 'agents');\nexport const BIN_DIR = join(PANOPTICON_HOME, 'bin');\nexport const BACKUPS_DIR = join(PANOPTICON_HOME, 'backups');\nexport const COSTS_DIR = join(PANOPTICON_HOME, 'costs');\nexport const HEARTBEATS_DIR = join(PANOPTICON_HOME, 'heartbeats');\nexport const ARCHIVES_DIR = join(PANOPTICON_HOME, 'archives');\n\n// Traefik directories\nexport const TRAEFIK_DIR = join(PANOPTICON_HOME, 'traefik');\nexport const TRAEFIK_DYNAMIC_DIR = join(TRAEFIK_DIR, 'dynamic');\nexport const TRAEFIK_CERTS_DIR = join(TRAEFIK_DIR, 'certs');\n\n// Legacy certs directory (for backwards compatibility)\nexport const CERTS_DIR = join(PANOPTICON_HOME, 'certs');\n\n// Config files\nexport const CONFIG_FILE = join(CONFIG_DIR, 'config.toml');\nexport const SETTINGS_FILE = join(CONFIG_DIR, 'settings.json');\n\n// AI tool directory (Claude Code is the sole supported runtime)\nexport const CLAUDE_DIR = join(homedir(), '.claude');\n\n// Legacy runtime directories (kept for symlink cleanup migration)\nexport const LEGACY_RUNTIME_DIRS = {\n codex: join(homedir(), '.codex'),\n cursor: join(homedir(), '.cursor'),\n gemini: join(homedir(), '.gemini'),\n opencode: join(homedir(), '.opencode'),\n} as const;\n\n// Sync target (Claude Code only)\nexport const SYNC_TARGET = {\n skills: join(CLAUDE_DIR, 'skills'),\n commands: join(CLAUDE_DIR, 'commands'),\n agents: join(CLAUDE_DIR, 'agents'),\n} as const;\n\n// Templates directory (in user's ~/.panopticon)\nexport const TEMPLATES_DIR = join(PANOPTICON_HOME, 'templates');\nexport const CLAUDE_MD_TEMPLATES = join(TEMPLATES_DIR, 'claude-md', 'sections');\n\n// Source templates directory (bundled with the package)\n// This is resolved at runtime from the package root\nimport { fileURLToPath } from 'url';\nimport { dirname } from 'path';\n\nconst currentFile = fileURLToPath(import.meta.url);\nconst currentDir = dirname(currentFile);\n\n// Handle both development (src/lib/) and production (dist/) modes\n// In dev: /path/to/panopticon/src/lib/paths.ts -> /path/to/panopticon\n// In prod: /path/to/panopticon/dist/lib/paths.js -> /path/to/panopticon\nlet packageRoot: string;\nif (currentDir.includes('/src/')) {\n // Development mode - go up from src/lib to package root\n packageRoot = dirname(dirname(currentDir));\n} else {\n // Production mode - go up from dist (or dist/lib) to package root\n packageRoot = currentDir.endsWith('/lib')\n ? dirname(dirname(currentDir))\n : dirname(currentDir);\n}\n\nexport const SOURCE_TEMPLATES_DIR = join(packageRoot, 'templates');\nexport const SOURCE_TRAEFIK_TEMPLATES = join(SOURCE_TEMPLATES_DIR, 'traefik');\nexport const SOURCE_SCRIPTS_DIR = join(packageRoot, 'scripts');\nexport const SOURCE_SKILLS_DIR = join(packageRoot, 'skills');\nexport const SOURCE_DEV_SKILLS_DIR = join(packageRoot, 'dev-skills');\nexport const SOURCE_AGENTS_DIR = join(packageRoot, 'agents');\nexport const SOURCE_RULES_DIR = join(packageRoot, 'rules');\n\n// Cache directories (where Panopticon keeps its copy of distributed content)\nexport const CACHE_SKILLS_DIR = SKILLS_DIR; // ~/.panopticon/skills/\nexport const CACHE_AGENTS_DIR = join(PANOPTICON_HOME, 'agent-definitions'); // separate from agent state\nexport const CACHE_RULES_DIR = join(PANOPTICON_HOME, 'rules');\nexport const CACHE_MANIFEST = join(PANOPTICON_HOME, '.manifest.json');\n\n// Pre-workspace PRD directory (for PRDs created before workspace exists)\nexport const DOCS_DIR = join(PANOPTICON_HOME, 'docs');\nexport const PRDS_DIR = join(DOCS_DIR, 'prds');\nexport const PRD_DRAFTS_DIR = join(PRDS_DIR, 'drafts');\nexport const PRD_PUBLISHED_DIR = join(PRDS_DIR, 'published');\n\n// Project-relative docs paths (subdirectory names for project-level docs)\nexport const PROJECT_DOCS_SUBDIR = 'docs';\nexport const PROJECT_PRDS_SUBDIR = 'prds';\nexport const PROJECT_PRDS_ACTIVE_SUBDIR = 'active';\nexport const PROJECT_PRDS_PLANNED_SUBDIR = 'planned';\nexport const PROJECT_PRDS_COMPLETED_SUBDIR = 'completed';\n\n/**\n * Detect if running in development mode (from npm link or panopticon repo)\n *\n * Dev mode is detected if:\n * 1. Running from the panopticon source directory (npm link)\n * 2. The SOURCE_DEV_SKILLS_DIR exists (only present in repo, not in npm package)\n */\nexport function isDevMode(): boolean {\n try {\n // Check if dev-skills directory exists - this is only in the repo, not npm package\n return existsSync(SOURCE_DEV_SKILLS_DIR);\n } catch {\n return false;\n }\n}\n\n/**\n * Encode a filesystem path to match Claude Code's project directory naming.\n *\n * Claude Code replaces ALL non-alphanumeric characters (except hyphens) with\n * hyphens when encoding the CWD into the project directory name under\n * ~/.claude/projects/. For example:\n *\n * /Users/edward.becker/Projects → -Users-edward-becker-Projects\n * /home/eltmon/Projects → -home-eltmon-Projects\n * /tmp/test_under.dot+plus@at → -tmp-test-under-dot-plus-at\n *\n * This is critical for session file lookup — a mismatch means JSONL files\n * are never found and conversation messages appear permanently empty.\n */\nexport function encodeClaudeProjectDir(cwdPath: string): string {\n return cwdPath.replace(/[^a-zA-Z0-9-]/g, '-');\n}\n\n// All directories to create on init\nexport const INIT_DIRS = [\n PANOPTICON_HOME,\n SKILLS_DIR,\n COMMANDS_DIR,\n AGENTS_DIR,\n BIN_DIR,\n BACKUPS_DIR,\n COSTS_DIR,\n HEARTBEATS_DIR,\n TEMPLATES_DIR,\n CLAUDE_MD_TEMPLATES,\n CERTS_DIR,\n CACHE_AGENTS_DIR,\n CACHE_RULES_DIR,\n TRAEFIK_DIR,\n TRAEFIK_DYNAMIC_DIR,\n TRAEFIK_CERTS_DIR,\n DOCS_DIR,\n PRDS_DIR,\n PRD_DRAFTS_DIR,\n PRD_PUBLISHED_DIR,\n];\n"],"mappings":";;;;;;;AAQA,SAAgB,oBAA4B;AAC1C,QAAO,QAAQ,IAAI,mBAAmB,KAAK,SAAS,EAAE,cAAc;;;;;;;;;AAwGtE,SAAgB,YAAqB;AACnC,KAAI;AAEF,SAAO,WAAW,sBAAsB;SAClC;AACN,SAAO;;;;;;;;;;;;;;;;;AAkBX,SAAgB,uBAAuB,SAAyB;AAC9D,QAAO,QAAQ,QAAQ,kBAAkB,IAAI;;;;AApIlC,mBAAkB,QAAQ,IAAI,mBAAmB,KAAK,SAAS,EAAE,cAAc;AAQ/E,cAAa;AACb,cAAa,KAAK,iBAAiB,SAAS;AAC5C,gBAAe,KAAK,iBAAiB,WAAW;AAChD,cAAa,KAAK,iBAAiB,SAAS;AAC5C,WAAU,KAAK,iBAAiB,MAAM;AACtC,eAAc,KAAK,iBAAiB,UAAU;AAC9C,aAAY,KAAK,iBAAiB,QAAQ;AAC1C,kBAAiB,KAAK,iBAAiB,aAAa;AACpD,gBAAe,KAAK,iBAAiB,WAAW;AAGhD,eAAc,KAAK,iBAAiB,UAAU;AAC9C,uBAAsB,KAAK,aAAa,UAAU;AAClD,qBAAoB,KAAK,aAAa,QAAQ;AAG9C,aAAY,KAAK,iBAAiB,QAAQ;AAG1C,eAAc,KAAK,YAAY,cAAc;AAC7C,iBAAgB,KAAK,YAAY,gBAAgB;AAGjD,cAAa,KAAK,SAAS,EAAE,UAAU;AAGvC,uBAAsB;EACjC,OAAO,KAAK,SAAS,EAAE,SAAS;EAChC,QAAQ,KAAK,SAAS,EAAE,UAAU;EAClC,QAAQ,KAAK,SAAS,EAAE,UAAU;EAClC,UAAU,KAAK,SAAS,EAAE,YAAY;EACvC;AAGY,eAAc;EACzB,QAAQ,KAAK,YAAY,SAAS;EAClC,UAAU,KAAK,YAAY,WAAW;EACtC,QAAQ,KAAK,YAAY,SAAS;EACnC;AAGY,iBAAgB,KAAK,iBAAiB,YAAY;AAClD,uBAAsB,KAAK,eAAe,aAAa,WAAW;AAQzE,cAAa,QADC,cAAc,OAAO,KAAK,IAAI,CACX;AAMvC,KAAI,WAAW,SAAS,QAAQ,CAE9B,eAAc,QAAQ,QAAQ,WAAW,CAAC;KAG1C,eAAc,WAAW,SAAS,OAAO,GACrC,QAAQ,QAAQ,WAAW,CAAC,GAC5B,QAAQ,WAAW;AAGZ,wBAAuB,KAAK,aAAa,YAAY;AACrD,4BAA2B,KAAK,sBAAsB,UAAU;AAChE,sBAAqB,KAAK,aAAa,UAAU;AACjD,qBAAoB,KAAK,aAAa,SAAS;AAC/C,yBAAwB,KAAK,aAAa,aAAa;AACvD,qBAAoB,KAAK,aAAa,SAAS;AAC/C,oBAAmB,KAAK,aAAa,QAAQ;AAG7C,oBAAmB;AACnB,oBAAmB,KAAK,iBAAiB,oBAAoB;AAC7D,mBAAkB,KAAK,iBAAiB,QAAQ;AAChD,kBAAiB,KAAK,iBAAiB,iBAAiB;AAGxD,YAAW,KAAK,iBAAiB,OAAO;AACxC,YAAW,KAAK,UAAU,OAAO;AACjC,kBAAiB,KAAK,UAAU,SAAS;AACzC,qBAAoB,KAAK,UAAU,YAAY;AAG/C,uBAAsB;AACtB,uBAAsB;AACtB,8BAA6B;AAC7B,+BAA8B;AAC9B,iCAAgC;AAqChC,aAAY;EACvB;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACD"}
@@ -14,4 +14,4 @@ var init_pipeline_notifier = __esmMin((() => {
14
14
  //#endregion
15
15
  export { notifyPipeline as n, init_pipeline_notifier as t };
16
16
 
17
- //# sourceMappingURL=pipeline-notifier-OJ-d3Y60.js.map
17
+ //# sourceMappingURL=pipeline-notifier-XgDdCdvT.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"pipeline-notifier-OJ-d3Y60.js","names":[],"sources":["../src/lib/pipeline-notifier.ts"],"sourcesContent":["/**\n * Pipeline Notifier — event bridge between library code and Socket.io\n *\n * Lightweight singleton that decouples state mutations (review-status, specialist queue)\n * from the dashboard's Socket.io server. Library code calls notifyPipeline() which is\n * fire-and-forget — if no handler is registered (CLI context), events are silently dropped.\n * File-based persistence remains the source of truth.\n */\n\nimport type { ReviewStatus } from './review-status.js';\n\nexport type PipelineEvent =\n | { type: 'status_changed'; issueId: string; status: ReviewStatus }\n | { type: 'task_queued'; specialist: string; issueId: string };\n\ntype Handler = (event: PipelineEvent) => void;\nlet handler: Handler | null = null;\n\nexport function setPipelineHandler(fn: Handler): void {\n handler = fn;\n}\n\nexport function notifyPipeline(event: PipelineEvent): void {\n if (handler) {\n try {\n handler(event);\n } catch (e) {\n console.error('[pipeline] handler error:', e);\n }\n }\n}\n"],"mappings":";;AAsBA,SAAgB,eAAe,OAA4B;AACzD,KAAI,QACF,KAAI;AACF,UAAQ,MAAM;UACP,GAAG;AACV,UAAQ,MAAM,6BAA6B,EAAE;;;;;AAX/C,WAA0B"}
1
+ {"version":3,"file":"pipeline-notifier-XgDdCdvT.js","names":[],"sources":["../src/lib/pipeline-notifier.ts"],"sourcesContent":["/**\n * Pipeline Notifier — event bridge between library code and Socket.io\n *\n * Lightweight singleton that decouples state mutations (review-status, specialist queue)\n * from the dashboard's Socket.io server. Library code calls notifyPipeline() which is\n * fire-and-forget — if no handler is registered (CLI context), events are silently dropped.\n * File-based persistence remains the source of truth.\n */\n\nimport type { ReviewStatus } from './review-status.js';\n\nexport type PipelineEvent =\n | { type: 'status_changed'; issueId: string; status: ReviewStatus }\n | { type: 'task_queued'; specialist: string; issueId: string };\n\ntype Handler = (event: PipelineEvent) => void;\nlet handler: Handler | null = null;\n\nexport function setPipelineHandler(fn: Handler): void {\n handler = fn;\n}\n\nexport function notifyPipeline(event: PipelineEvent): void {\n if (handler) {\n try {\n handler(event);\n } catch (e) {\n console.error('[pipeline] handler error:', e);\n }\n }\n}\n"],"mappings":";;AAsBA,SAAgB,eAAe,OAA4B;AACzD,KAAI,QACF,KAAI;AACF,UAAQ,MAAM;UACP,GAAG;AACV,UAAQ,MAAM,6BAA6B,EAAE;;;;;AAX/C,WAA0B"}
@@ -1,5 +1,6 @@
1
1
  import { n as __exportAll, t as __esmMin } from "./chunk-ruWRV7i3.js";
2
- import { W as init_paths, b as PANOPTICON_HOME } from "./paths-lMaxrYtT.js";
2
+ import { G as init_paths, b as PANOPTICON_HOME } from "./paths-CDJ_HsbN.js";
3
+ import { i as parseIssueId, n as extractPrefix, r as init_issue_id } from "./issue-id-CAcekoIw.js";
3
4
  import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
4
5
  import { join, resolve } from "path";
5
6
  import { parse as parse$1, stringify } from "yaml";
@@ -86,12 +87,12 @@ function unregisterProject(key) {
86
87
  return false;
87
88
  }
88
89
  /**
89
- * Extract Linear team prefix from an issue ID
90
- * E.g., "MIN-123" -> "MIN", "PAN-456" -> "PAN"
90
+ * Extract Linear team prefix from an issue ID.
91
+ * Supports standard (MIN-123), Rally (F29698), and custom formats.
92
+ * @deprecated Use extractPrefix from issue-id.ts for unified parsing
91
93
  */
92
94
  function extractTeamPrefix(issueId) {
93
- const match = issueId.match(/^([A-Z]+)-\d+$/i);
94
- return match ? match[1].toUpperCase() : null;
95
+ return extractPrefix(issueId);
95
96
  }
96
97
  /**
97
98
  * Find project by Linear team prefix
@@ -134,26 +135,36 @@ function resolveProjectPath(project, labels = []) {
134
135
  /**
135
136
  * Resolve project from an issue ID (and optional labels)
136
137
  *
137
- * @param issueId - Linear issue ID (e.g., "MIN-123")
138
+ * @param issueId - Issue ID in any supported format (e.g., "MIN-123", "F29698")
138
139
  * @param labels - Optional array of label names
139
140
  * @returns Resolved project info or null if not found
140
141
  */
141
142
  function resolveProjectFromIssue(issueId, labels = []) {
142
- const teamPrefix = extractTeamPrefix(issueId);
143
- if (!teamPrefix) return null;
143
+ const parsed = parseIssueId(issueId);
144
+ if (!parsed) return null;
144
145
  const config = loadProjectsConfig();
145
146
  for (const [key, projectConfig] of Object.entries(config.projects)) {
146
- if (getIssuePrefix(projectConfig)?.toUpperCase() === teamPrefix) {
147
+ const singlePrefix = getIssuePrefix(projectConfig);
148
+ if (singlePrefix?.toUpperCase() === parsed.prefix) {
147
149
  const resolvedPath = resolveProjectPath(projectConfig, labels);
148
150
  return {
149
151
  projectKey: key,
150
152
  projectName: projectConfig.name,
151
153
  projectPath: resolvedPath,
152
- linearTeam: getIssuePrefix(projectConfig)
154
+ linearTeam: singlePrefix
153
155
  };
154
156
  }
155
- if (!getIssuePrefix(projectConfig) && (projectConfig.github_repo || projectConfig.rally_project)) {
156
- if (key.toUpperCase().replace(/-/g, "") === teamPrefix) {
157
+ if (projectConfig.issue_prefixes?.some((p) => p.toUpperCase() === parsed.prefix)) {
158
+ const resolvedPath = resolveProjectPath(projectConfig, labels);
159
+ return {
160
+ projectKey: key,
161
+ projectName: projectConfig.name,
162
+ projectPath: resolvedPath,
163
+ linearTeam: projectConfig.issue_prefixes?.find((p) => p.toUpperCase() === parsed.prefix)
164
+ };
165
+ }
166
+ if (!singlePrefix && !projectConfig.issue_prefixes) {
167
+ if (key.toUpperCase().replace(/-/g, "") === parsed.prefix) {
157
168
  const resolvedPath = resolveProjectPath(projectConfig, labels);
158
169
  return {
159
170
  projectKey: key,
@@ -284,6 +295,7 @@ function getSpecialistPromptOverride(projectKey, specialistType) {
284
295
  var PROJECTS_CONFIG_FILE, DEFAULT_SPECIALIST_CONFIG;
285
296
  var init_projects = __esmMin((() => {
286
297
  init_paths();
298
+ init_issue_id();
287
299
  PROJECTS_CONFIG_FILE = join(PANOPTICON_HOME, "projects.yaml");
288
300
  DEFAULT_SPECIALIST_CONFIG = {
289
301
  context_runs: 5,
@@ -298,4 +310,4 @@ var init_projects = __esmMin((() => {
298
310
  //#endregion
299
311
  export { unregisterProject as S, projects_exports as _, findProjectByTeam as a, resolveProjectPath as b, getProject as c, getSpecialistRetention as d, hasProjects as f, loadProjectsConfig as g, listProjects as h, findProjectByPath as i, getSpecialistConfig as l, initializeProjectsConfig as m, createDefaultProjectsConfig as n, findProjectsByRallyProject as o, init_projects as p, extractTeamPrefix as r, getIssuePrefix as s, PROJECTS_CONFIG_FILE as t, getSpecialistPromptOverride as u, registerProject as v, saveProjectsConfig as x, resolveProjectFromIssue as y };
300
312
 
301
- //# sourceMappingURL=projects-CvLepaxC.js.map
313
+ //# sourceMappingURL=projects-Bk-5QhFQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"projects-Bk-5QhFQ.js","names":["parseYaml","stringifyYaml"],"sources":["../src/lib/projects.ts"],"sourcesContent":["/**\n * Project Registry - Multi-project support for Panopticon\n *\n * Maps Linear team prefixes and labels to project paths for workspace creation.\n */\n\nimport { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, resolve } from 'path';\nimport { parse as parseYaml, stringify as stringifyYaml } from 'yaml';\nimport { PANOPTICON_HOME } from './paths.js';\nimport { extractPrefix, parseIssueId } from './issue-id.js';\nimport type { QualityGateConfig } from './workspace-config.js';\n\nexport const PROJECTS_CONFIG_FILE = join(PANOPTICON_HOME, 'projects.yaml');\n\n/**\n * Issue routing rule - routes issues with certain labels to specific paths\n */\nexport interface IssueRoutingRule {\n labels?: string[];\n default?: boolean;\n path: string;\n}\n\n/**\n * Workspace configuration (imported from workspace-config.ts for full details)\n */\nexport interface WorkspaceConfig {\n type?: 'polyrepo' | 'monorepo';\n workspaces_dir?: string;\n repos?: Array<{ name: string; path: string; branch_prefix?: string }>;\n dns?: { domain: string; entries: string[]; sync_method?: 'wsl2hosts' | 'hosts_file' | 'dnsmasq' };\n ports?: Record<string, { range: [number, number] }>;\n docker?: { traefik?: string; compose_template?: string };\n database?: { seed_file?: string; container_name?: string; [key: string]: any };\n agent?: { template_dir: string; templates?: Array<{ source: string; target: string }>; copy_dirs?: string[]; symlinks?: string[] };\n env?: { template?: string; secrets_file?: string };\n services?: Array<{ name: string; path: string; start_command: string; docker_command?: string; health_url?: string; port?: number }>;\n}\n\n/**\n * Test configuration\n */\nexport interface TestConfig {\n type: string;\n path: string;\n command: string;\n container?: boolean;\n container_name?: string;\n env?: Record<string, string>;\n}\n\n/**\n * Specialist configuration for per-project specialists\n */\nexport interface SpecialistConfig {\n /** Number of recent runs to include in context digest (default: 5) */\n context_runs?: number;\n /** Model to use for generating context digests (null = same as specialist) */\n digest_model?: string | null;\n /** Log retention policy */\n retention?: {\n /** Maximum days to keep logs */\n max_days: number;\n /** Maximum number of runs to keep (whichever is more permissive) */\n max_runs: number;\n };\n /** Per-specialist prompt overrides */\n prompts?: {\n 'review-agent'?: string;\n 'test-agent'?: string;\n 'merge-agent'?: string;\n };\n}\n\n/**\n * Project configuration\n */\nexport interface ProjectConfig {\n name: string;\n path: string;\n /** Issue prefix for identifier construction (e.g., \"PAN\" → PAN-123) */\n issue_prefix?: string;\n github_repo?: string; // e.g. \"owner/repo\"\n gitlab_repo?: string; // e.g. \"group/repo\"\n /** Tracker type for this project. Affects ID parsing and state management. */\n tracker?: 'linear' | 'github' | 'gitlab' | 'rally';\n /**\n * Custom regex pattern for issue ID parsing. Must have two capture groups:\n * group 1 = prefix, group 2 = number. Example: \"^(PROJ)-(\\\\d+)$\"\n */\n issue_pattern?: string;\n /**\n * Multiple prefixes that map to this project.\n * For Rally: ['F', 'US', 'DE', 'TA'] — all artifact types route here.\n * For standard trackers: usually just one prefix via issue_prefix.\n */\n issue_prefixes?: string[];\n issue_routing?: IssueRoutingRule[];\n /** Workspace configuration */\n workspace?: WorkspaceConfig;\n /** Test configuration by name */\n tests?: Record<string, TestConfig>;\n /** Custom command to create workspaces (e.g., infra/new-feature for MYN) */\n workspace_command?: string;\n /** Custom command to remove workspaces */\n workspace_remove_command?: string;\n /** Rally project OID (e.g., \"/project/822404704163\") for per-project Rally scoping */\n rally_project?: string;\n /** Specialist agent configuration */\n specialists?: SpecialistConfig;\n /** Quality gates run by merge-agent before pushing (lint, typecheck, prod build, etc.) */\n quality_gates?: Record<string, QualityGateConfig>;\n /** Package manager for dependency installation in workspaces (bun, npm, pnpm) */\n package_manager?: 'bun' | 'npm' | 'pnpm';\n /** Local workspace packages that need building before quality gates (e.g., @panopticon/contracts) */\n workspace_packages?: Array<{ path: string; build_command: string }>;\n /**\n * Path to the repo where per-project cost WAL files live.\n * Defaults to `path` (the project repo itself).\n * For polyrepo setups, point this at the docs/shared repo.\n */\n events_repo?: string;\n /**\n * Subdirectory within events_repo where cost JSONL files are stored.\n * Defaults to \".pan/events\".\n */\n events_path?: string;\n}\n\n/** Resolve the issue prefix for a project. */\nexport function getIssuePrefix(config: ProjectConfig): string | undefined {\n return config.issue_prefix;\n}\n\n/**\n * Full projects configuration file\n */\nexport interface ProjectsConfig {\n projects: Record<string, ProjectConfig>;\n}\n\n/**\n * Resolved project info for workspace creation\n */\nexport interface ResolvedProject {\n projectKey: string;\n projectName: string;\n projectPath: string;\n linearTeam?: string;\n}\n\n/**\n * Load projects configuration from ~/.panopticon/projects.yaml\n */\nexport function loadProjectsConfig(): ProjectsConfig {\n if (!existsSync(PROJECTS_CONFIG_FILE)) {\n return { projects: {} };\n }\n\n try {\n const content = readFileSync(PROJECTS_CONFIG_FILE, 'utf-8');\n const config = parseYaml(content) as ProjectsConfig;\n return config || { projects: {} };\n } catch (error: any) {\n console.error(`Failed to parse projects.yaml: ${error.message}`);\n return { projects: {} };\n }\n}\n\n/**\n * Save projects configuration\n */\nexport function saveProjectsConfig(config: ProjectsConfig): void {\n const dir = PANOPTICON_HOME;\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n const yaml = stringifyYaml(config, { indent: 2 });\n writeFileSync(PROJECTS_CONFIG_FILE, yaml, 'utf-8');\n}\n\n/**\n * Get a list of all registered projects\n */\nexport function listProjects(): Array<{ key: string; config: ProjectConfig }> {\n const config = loadProjectsConfig();\n return Object.entries(config.projects).map(([key, projectConfig]) => ({\n key,\n config: projectConfig,\n }));\n}\n\n/**\n * Add or update a project in the registry\n */\nexport function registerProject(key: string, projectConfig: ProjectConfig): void {\n const config = loadProjectsConfig();\n config.projects[key] = projectConfig;\n saveProjectsConfig(config);\n}\n\n/**\n * Remove a project from the registry\n */\nexport function unregisterProject(key: string): boolean {\n const config = loadProjectsConfig();\n if (config.projects[key]) {\n delete config.projects[key];\n saveProjectsConfig(config);\n return true;\n }\n return false;\n}\n\n/**\n * Extract Linear team prefix from an issue ID.\n * Supports standard (MIN-123), Rally (F29698), and custom formats.\n * @deprecated Use extractPrefix from issue-id.ts for unified parsing\n */\nexport function extractTeamPrefix(issueId: string): string | null {\n return extractPrefix(issueId);\n}\n\n/**\n * Find project by Linear team prefix\n */\nexport function findProjectByTeam(teamPrefix: string): ProjectConfig | null {\n const config = loadProjectsConfig();\n\n for (const [, projectConfig] of Object.entries(config.projects)) {\n if (getIssuePrefix(projectConfig)?.toUpperCase() === teamPrefix.toUpperCase()) {\n return projectConfig;\n }\n }\n\n return null;\n}\n\n/**\n * Find project by workspace path.\n * Matches any project whose root path is an ancestor of the given path.\n * Used to resolve the tracker (GitHub/GitLab) from a workspace directory.\n */\nexport function findProjectByPath(workspacePath: string): ProjectConfig | null {\n const config = loadProjectsConfig();\n const normalizedTarget = resolve(workspacePath);\n\n for (const [, projectConfig] of Object.entries(config.projects)) {\n const normalizedProject = resolve(projectConfig.path);\n if (normalizedTarget === normalizedProject || normalizedTarget.startsWith(normalizedProject + '/')) {\n return projectConfig;\n }\n }\n\n return null;\n}\n\n\n/**\n * Resolve the correct project path for an issue based on labels\n *\n * @param project - The project config\n * @param labels - Array of label names from the Linear issue\n * @returns The resolved path (may differ from project.path based on routing rules)\n */\nexport function resolveProjectPath(project: ProjectConfig, labels: string[] = []): string {\n if (!project.issue_routing || project.issue_routing.length === 0) {\n return project.path;\n }\n\n // Normalize labels to lowercase for comparison\n const normalizedLabels = labels.map(l => l.toLowerCase());\n\n // First, check label-based routing rules\n for (const rule of project.issue_routing) {\n if (rule.labels && rule.labels.length > 0) {\n const ruleLabels = rule.labels.map(l => l.toLowerCase());\n const hasMatch = ruleLabels.some(label => normalizedLabels.includes(label));\n if (hasMatch) {\n return rule.path;\n }\n }\n }\n\n // Then, find default rule\n for (const rule of project.issue_routing) {\n if (rule.default) {\n return rule.path;\n }\n }\n\n // Fall back to project path\n return project.path;\n}\n\n/**\n * Resolve project from an issue ID (and optional labels)\n *\n * @param issueId - Issue ID in any supported format (e.g., \"MIN-123\", \"F29698\")\n * @param labels - Optional array of label names\n * @returns Resolved project info or null if not found\n */\nexport function resolveProjectFromIssue(\n issueId: string,\n labels: string[] = []\n): ResolvedProject | null {\n const parsed = parseIssueId(issueId);\n if (!parsed) {\n return null;\n }\n\n const config = loadProjectsConfig();\n\n for (const [key, projectConfig] of Object.entries(config.projects)) {\n // Check single issue_prefix (existing behavior)\n const singlePrefix = getIssuePrefix(projectConfig);\n if (singlePrefix?.toUpperCase() === parsed.prefix) {\n const resolvedPath = resolveProjectPath(projectConfig, labels);\n return {\n projectKey: key,\n projectName: projectConfig.name,\n projectPath: resolvedPath,\n linearTeam: singlePrefix,\n };\n }\n\n // Check issue_prefixes array (new: multiple prefixes per project)\n if (projectConfig.issue_prefixes?.some(p => p.toUpperCase() === parsed.prefix)) {\n const resolvedPath = resolveProjectPath(projectConfig, labels);\n return {\n projectKey: key,\n projectName: projectConfig.name,\n projectPath: resolvedPath,\n linearTeam: projectConfig.issue_prefixes?.find(p => p.toUpperCase() === parsed.prefix),\n };\n }\n\n // Fallback: derive prefix from project key for projects without explicit prefixes\n if (!singlePrefix && !projectConfig.issue_prefixes) {\n const derivedPrefix = key.toUpperCase().replace(/-/g, '');\n if (derivedPrefix === parsed.prefix) {\n const resolvedPath = resolveProjectPath(projectConfig, labels);\n return {\n projectKey: key,\n projectName: projectConfig.name,\n projectPath: resolvedPath,\n linearTeam: undefined,\n };\n }\n }\n }\n\n return null;\n}\n\n/**\n * Get a project by key\n */\nexport function getProject(key: string): ProjectConfig | null {\n const config = loadProjectsConfig();\n return config.projects[key] || null;\n}\n\n/**\n * Check if projects.yaml exists and has any projects\n */\nexport function hasProjects(): boolean {\n const config = loadProjectsConfig();\n return Object.keys(config.projects).length > 0;\n}\n\n/**\n * Create a default projects.yaml with example structure\n */\nexport function createDefaultProjectsConfig(): ProjectsConfig {\n const defaultConfig: ProjectsConfig = {\n projects: {\n // Example project - commented out in actual file\n },\n };\n\n return defaultConfig;\n}\n\n/**\n * Initialize projects.yaml with example configuration\n */\nexport function initializeProjectsConfig(): void {\n if (existsSync(PROJECTS_CONFIG_FILE)) {\n console.log(`Projects config already exists at ${PROJECTS_CONFIG_FILE}`);\n return;\n }\n\n const exampleYaml = `# Panopticon Project Registry\n# Maps Linear teams to project paths for workspace creation\n\nprojects:\n # Example: Mind Your Now project\n # myn:\n # name: \"Mind Your Now\"\n # path: /home/user/projects/myn\n # linear_team: MIN\n # issue_routing:\n # # Route docs/marketing issues to docs repo\n # - labels: [docs, marketing, seo, landing-pages]\n # path: /home/user/projects/myn/docs\n # # Default: main repo\n # - default: true\n # path: /home/user/projects/myn\n # specialists:\n # context_runs: 5\n # digest_model: null # Use same model as specialist\n # retention:\n # max_days: 30\n # max_runs: 50\n # prompts:\n # review-agent: |\n # Pay special attention to:\n # - Database migration safety\n # - API backward compatibility\n\n # Example: Panopticon itself\n # panopticon:\n # name: \"Panopticon\"\n # path: /home/user/projects/panopticon\n # linear_team: PAN\n`;\n\n const dir = PANOPTICON_HOME;\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n\n writeFileSync(PROJECTS_CONFIG_FILE, exampleYaml, 'utf-8');\n console.log(`Created example projects config at ${PROJECTS_CONFIG_FILE}`);\n}\n\n/**\n * Default specialist configuration values\n */\nconst DEFAULT_SPECIALIST_CONFIG: Required<SpecialistConfig> = {\n context_runs: 5,\n digest_model: null,\n retention: {\n max_days: 30,\n max_runs: 50,\n },\n prompts: {},\n};\n\n/**\n * Get specialist configuration for a project with defaults\n *\n * @param projectKey - Project key\n * @returns Specialist config with defaults applied\n */\nexport function getSpecialistConfig(projectKey: string): Required<SpecialistConfig> {\n const project = getProject(projectKey);\n\n if (!project || !project.specialists) {\n return DEFAULT_SPECIALIST_CONFIG;\n }\n\n return {\n context_runs: project.specialists.context_runs ?? DEFAULT_SPECIALIST_CONFIG.context_runs,\n digest_model: project.specialists.digest_model ?? DEFAULT_SPECIALIST_CONFIG.digest_model,\n retention: {\n max_days: project.specialists.retention?.max_days ?? DEFAULT_SPECIALIST_CONFIG.retention.max_days,\n max_runs: project.specialists.retention?.max_runs ?? DEFAULT_SPECIALIST_CONFIG.retention.max_runs,\n },\n prompts: project.specialists.prompts ?? DEFAULT_SPECIALIST_CONFIG.prompts,\n };\n}\n\n/**\n * Get retention policy for a project's specialists\n *\n * @param projectKey - Project key\n * @returns Retention policy\n */\nexport function getSpecialistRetention(projectKey: string): { max_days: number; max_runs: number } {\n const config = getSpecialistConfig(projectKey);\n return config.retention;\n}\n\n/**\n * Find all projects that have a rally_project configured.\n * Returns array of { key, config } for projects with Rally project OIDs.\n */\nexport function findProjectsByRallyProject(): Array<{ key: string; config: ProjectConfig }> {\n const config = loadProjectsConfig();\n return Object.entries(config.projects)\n .filter(([, projectConfig]) => !!projectConfig.rally_project)\n .map(([key, projectConfig]) => ({ key, config: projectConfig }));\n}\n\n/**\n * Get custom prompt override for a specialist (if configured)\n *\n * @param projectKey - Project key\n * @param specialistType - Specialist type\n * @returns Custom prompt or null if not configured\n */\nexport function getSpecialistPromptOverride(\n projectKey: string,\n specialistType: string\n): string | null {\n const config = getSpecialistConfig(projectKey);\n return (config.prompts as Record<string, string | undefined>)[specialistType] || null;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmIA,SAAgB,eAAe,QAA2C;AACxE,QAAO,OAAO;;;;;AAuBhB,SAAgB,qBAAqC;AACnD,KAAI,CAAC,WAAW,qBAAqB,CACnC,QAAO,EAAE,UAAU,EAAE,EAAE;AAGzB,KAAI;AAGF,SADeA,QADC,aAAa,sBAAsB,QAAQ,CAC1B,IAChB,EAAE,UAAU,EAAE,EAAE;UAC1B,OAAY;AACnB,UAAQ,MAAM,kCAAkC,MAAM,UAAU;AAChE,SAAO,EAAE,UAAU,EAAE,EAAE;;;;;;AAO3B,SAAgB,mBAAmB,QAA8B;CAC/D,MAAM,MAAM;AACZ,KAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAIrC,eAAc,sBADDC,UAAc,QAAQ,EAAE,QAAQ,GAAG,CAAC,EACP,QAAQ;;;;;AAMpD,SAAgB,eAA8D;CAC5E,MAAM,SAAS,oBAAoB;AACnC,QAAO,OAAO,QAAQ,OAAO,SAAS,CAAC,KAAK,CAAC,KAAK,oBAAoB;EACpE;EACA,QAAQ;EACT,EAAE;;;;;AAML,SAAgB,gBAAgB,KAAa,eAAoC;CAC/E,MAAM,SAAS,oBAAoB;AACnC,QAAO,SAAS,OAAO;AACvB,oBAAmB,OAAO;;;;;AAM5B,SAAgB,kBAAkB,KAAsB;CACtD,MAAM,SAAS,oBAAoB;AACnC,KAAI,OAAO,SAAS,MAAM;AACxB,SAAO,OAAO,SAAS;AACvB,qBAAmB,OAAO;AAC1B,SAAO;;AAET,QAAO;;;;;;;AAQT,SAAgB,kBAAkB,SAAgC;AAChE,QAAO,cAAc,QAAQ;;;;;AAM/B,SAAgB,kBAAkB,YAA0C;CAC1E,MAAM,SAAS,oBAAoB;AAEnC,MAAK,MAAM,GAAG,kBAAkB,OAAO,QAAQ,OAAO,SAAS,CAC7D,KAAI,eAAe,cAAc,EAAE,aAAa,KAAK,WAAW,aAAa,CAC3E,QAAO;AAIX,QAAO;;;;;;;AAQT,SAAgB,kBAAkB,eAA6C;CAC7E,MAAM,SAAS,oBAAoB;CACnC,MAAM,mBAAmB,QAAQ,cAAc;AAE/C,MAAK,MAAM,GAAG,kBAAkB,OAAO,QAAQ,OAAO,SAAS,EAAE;EAC/D,MAAM,oBAAoB,QAAQ,cAAc,KAAK;AACrD,MAAI,qBAAqB,qBAAqB,iBAAiB,WAAW,oBAAoB,IAAI,CAChG,QAAO;;AAIX,QAAO;;;;;;;;;AAWT,SAAgB,mBAAmB,SAAwB,SAAmB,EAAE,EAAU;AACxF,KAAI,CAAC,QAAQ,iBAAiB,QAAQ,cAAc,WAAW,EAC7D,QAAO,QAAQ;CAIjB,MAAM,mBAAmB,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC;AAGzD,MAAK,MAAM,QAAQ,QAAQ,cACzB,KAAI,KAAK,UAAU,KAAK,OAAO,SAAS;MACnB,KAAK,OAAO,KAAI,MAAK,EAAE,aAAa,CAAC,CAC5B,MAAK,UAAS,iBAAiB,SAAS,MAAM,CAAC,CAEzE,QAAO,KAAK;;AAMlB,MAAK,MAAM,QAAQ,QAAQ,cACzB,KAAI,KAAK,QACP,QAAO,KAAK;AAKhB,QAAO,QAAQ;;;;;;;;;AAUjB,SAAgB,wBACd,SACA,SAAmB,EAAE,EACG;CACxB,MAAM,SAAS,aAAa,QAAQ;AACpC,KAAI,CAAC,OACH,QAAO;CAGT,MAAM,SAAS,oBAAoB;AAEnC,MAAK,MAAM,CAAC,KAAK,kBAAkB,OAAO,QAAQ,OAAO,SAAS,EAAE;EAElE,MAAM,eAAe,eAAe,cAAc;AAClD,MAAI,cAAc,aAAa,KAAK,OAAO,QAAQ;GACjD,MAAM,eAAe,mBAAmB,eAAe,OAAO;AAC9D,UAAO;IACL,YAAY;IACZ,aAAa,cAAc;IAC3B,aAAa;IACb,YAAY;IACb;;AAIH,MAAI,cAAc,gBAAgB,MAAK,MAAK,EAAE,aAAa,KAAK,OAAO,OAAO,EAAE;GAC9E,MAAM,eAAe,mBAAmB,eAAe,OAAO;AAC9D,UAAO;IACL,YAAY;IACZ,aAAa,cAAc;IAC3B,aAAa;IACb,YAAY,cAAc,gBAAgB,MAAK,MAAK,EAAE,aAAa,KAAK,OAAO,OAAO;IACvF;;AAIH,MAAI,CAAC,gBAAgB,CAAC,cAAc;OACZ,IAAI,aAAa,CAAC,QAAQ,MAAM,GAAG,KACnC,OAAO,QAAQ;IACnC,MAAM,eAAe,mBAAmB,eAAe,OAAO;AAC9D,WAAO;KACL,YAAY;KACZ,aAAa,cAAc;KAC3B,aAAa;KACb,YAAY,KAAA;KACb;;;;AAKP,QAAO;;;;;AAMT,SAAgB,WAAW,KAAmC;AAE5D,QADe,oBAAoB,CACrB,SAAS,QAAQ;;;;;AAMjC,SAAgB,cAAuB;CACrC,MAAM,SAAS,oBAAoB;AACnC,QAAO,OAAO,KAAK,OAAO,SAAS,CAAC,SAAS;;;;;AAM/C,SAAgB,8BAA8C;AAO5D,QANsC,EACpC,UAAU,EAET,EACF;;;;;AAQH,SAAgB,2BAAiC;AAC/C,KAAI,WAAW,qBAAqB,EAAE;AACpC,UAAQ,IAAI,qCAAqC,uBAAuB;AACxE;;CAGF,MAAM,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmCpB,MAAM,MAAM;AACZ,KAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAGrC,eAAc,sBAAsB,aAAa,QAAQ;AACzD,SAAQ,IAAI,sCAAsC,uBAAuB;;;;;;;;AAsB3E,SAAgB,oBAAoB,YAAgD;CAClF,MAAM,UAAU,WAAW,WAAW;AAEtC,KAAI,CAAC,WAAW,CAAC,QAAQ,YACvB,QAAO;AAGT,QAAO;EACL,cAAc,QAAQ,YAAY,gBAAgB,0BAA0B;EAC5E,cAAc,QAAQ,YAAY,gBAAgB,0BAA0B;EAC5E,WAAW;GACT,UAAU,QAAQ,YAAY,WAAW,YAAY,0BAA0B,UAAU;GACzF,UAAU,QAAQ,YAAY,WAAW,YAAY,0BAA0B,UAAU;GAC1F;EACD,SAAS,QAAQ,YAAY,WAAW,0BAA0B;EACnE;;;;;;;;AASH,SAAgB,uBAAuB,YAA4D;AAEjG,QADe,oBAAoB,WAAW,CAChC;;;;;;AAOhB,SAAgB,6BAA4E;CAC1F,MAAM,SAAS,oBAAoB;AACnC,QAAO,OAAO,QAAQ,OAAO,SAAS,CACnC,QAAQ,GAAG,mBAAmB,CAAC,CAAC,cAAc,cAAc,CAC5D,KAAK,CAAC,KAAK,oBAAoB;EAAE;EAAK,QAAQ;EAAe,EAAE;;;;;;;;;AAUpE,SAAgB,4BACd,YACA,gBACe;AAEf,QADe,oBAAoB,WAAW,CAC/B,QAA+C,mBAAmB;;;;aArftC;gBACe;AAG/C,wBAAuB,KAAK,iBAAiB,gBAAgB;AA6apE,6BAAwD;EAC5D,cAAc;EACd,cAAc;EACd,WAAW;GACT,UAAU;GACV,UAAU;GACX;EACD,SAAS,EAAE;EACZ"}
@@ -1,3 +1,3 @@
1
- import { a as findProjectByTeam, c as getProject, d as getSpecialistRetention, i as findProjectByPath, p as init_projects, r as extractTeamPrefix, u as getSpecialistPromptOverride, y as resolveProjectFromIssue } from "./projects-CvLepaxC.js";
1
+ import { a as findProjectByTeam, c as getProject, d as getSpecialistRetention, i as findProjectByPath, p as init_projects, r as extractTeamPrefix, u as getSpecialistPromptOverride, y as resolveProjectFromIssue } from "./projects-Bk-5QhFQ.js";
2
2
  init_projects();
3
3
  export { extractTeamPrefix, findProjectByPath, findProjectByTeam, getProject, getSpecialistPromptOverride, getSpecialistRetention, resolveProjectFromIssue };
@@ -27,7 +27,7 @@ function getProviderForModel(modelId) {
27
27
  "gpt-4o-mini"
28
28
  ].includes(modelId)) return PROVIDERS.openai;
29
29
  if (["gemini-3-pro-preview", "gemini-3-flash-preview"].includes(modelId)) return PROVIDERS.google;
30
- if (["glm-4.7", "glm-4.7-flash"].includes(modelId)) return PROVIDERS.zai;
30
+ if (["glm-4.7-flash"].includes(modelId)) return PROVIDERS.zai;
31
31
  if (["kimi-k2", "kimi-k2.5"].includes(modelId)) return PROVIDERS.kimi;
32
32
  if (["minimax-m2.7", "minimax-m2.7-highspeed"].includes(modelId)) return PROVIDERS.minimax;
33
33
  return PROVIDERS.anthropic;
@@ -146,9 +146,9 @@ var init_providers = __esmMin((() => {
146
146
  displayName: "Z.AI (GLM)",
147
147
  compatibility: "direct",
148
148
  baseUrl: "https://api.z.ai/api/anthropic",
149
- models: ["glm-4.7", "glm-4.7-flash"],
149
+ models: ["glm-4.7-flash"],
150
150
  tested: true,
151
- description: "Anthropic-compatible API, tested 2026-01-28"
151
+ description: "Anthropic-compatible API, GLM-4.7 Flash (31B, fast and affordable)"
152
152
  },
153
153
  openai: {
154
154
  name: "openai",
@@ -194,4 +194,4 @@ var init_providers = __esmMin((() => {
194
194
  //#endregion
195
195
  export { getProviderForModel as a, needsRouter as c, getProviderEnv as i, requiresRouter as l, clearCredentialFileAuth as n, getRouterProviders as o, getDirectProviders as r, init_providers as s, PROVIDERS as t, setupCredentialFileAuth as u };
196
196
 
197
- //# sourceMappingURL=providers-DcCPZ5K4.js.map
197
+ //# sourceMappingURL=providers-DSU1vfQF.js.map