panopticon-cli 0.6.5 → 0.6.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (311) hide show
  1. package/README.md +2 -2
  2. package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
  3. package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
  4. package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
  5. package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
  6. package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
  7. package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
  8. package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
  9. package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
  10. package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
  11. package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
  12. package/dist/clean-planning-x1S-JdmO.js +2 -0
  13. package/dist/cli/index.js +291 -760
  14. package/dist/cli/index.js.map +1 -1
  15. package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
  16. package/dist/close-issue-CaFE0stN.js.map +1 -0
  17. package/dist/close-issue-CjcfZI9s.js +2 -0
  18. package/dist/compact-beads-B0_qE1w3.js +2 -0
  19. package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
  20. package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
  21. package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
  22. package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
  23. package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
  24. package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
  25. package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
  26. package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
  27. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
  28. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
  29. package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
  30. package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
  31. package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
  32. package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
  33. package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
  34. package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
  35. package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
  36. package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
  37. package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
  38. package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
  39. package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
  40. package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
  41. package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
  42. package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
  43. package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
  44. package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
  45. package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
  46. package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
  47. package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
  48. package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
  49. package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
  50. package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
  51. package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
  52. package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
  53. package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
  54. package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
  55. package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
  56. package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
  57. package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
  58. package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
  59. package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
  60. package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
  61. package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
  62. package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
  63. package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
  64. package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
  65. package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
  66. package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
  67. package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
  68. package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
  69. package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
  70. package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
  71. package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
  72. package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
  73. package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
  74. package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
  75. package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
  76. package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
  77. package/dist/dashboard/hume-CA2pftu_.js +3 -0
  78. package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
  79. package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
  80. package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
  81. package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
  82. package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
  83. package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
  84. package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
  85. package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
  86. package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
  87. package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
  88. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
  89. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
  90. package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
  91. package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
  92. package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
  93. package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
  94. package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
  95. package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
  96. package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
  97. package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
  98. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
  99. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
  100. package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
  101. package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
  102. package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
  103. package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
  104. package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
  105. package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
  106. package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
  107. package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
  108. package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
  109. package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
  110. package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
  111. package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
  112. package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-CXaO6nOE.js} +1 -1
  113. package/dist/dashboard/public/assets/index-CzFZIb87.js +212 -0
  114. package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
  115. package/dist/dashboard/public/index.html +2 -2
  116. package/dist/dashboard/rally-6McpKKRa.js +3 -0
  117. package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
  118. package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
  119. package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
  120. package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
  121. package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
  122. package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
  123. package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
  124. package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
  125. package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
  126. package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
  127. package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
  128. package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
  129. package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
  130. package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
  131. package/dist/dashboard/server.js +559 -253
  132. package/dist/dashboard/server.js.map +1 -1
  133. package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
  134. package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
  135. package/dist/dashboard/settings-XWvDcj-D.js +2 -0
  136. package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
  137. package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
  138. package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
  139. package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
  140. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
  141. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
  142. package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
  143. package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
  144. package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
  145. package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
  146. package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
  147. package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
  148. package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
  149. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
  150. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
  151. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
  152. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
  153. package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
  154. package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
  155. package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
  156. package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
  157. package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
  158. package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
  159. package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
  160. package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
  161. package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
  162. package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
  163. package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
  164. package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
  165. package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
  166. package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
  167. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
  168. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
  169. package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
  170. package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
  171. package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
  172. package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
  173. package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
  174. package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
  175. package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
  176. package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
  177. package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
  178. package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
  179. package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
  180. package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
  181. package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
  182. package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
  183. package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
  184. package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
  185. package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
  186. package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
  187. package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
  188. package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
  189. package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
  190. package/dist/hume-9nv1VmMV.js +3 -0
  191. package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
  192. package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
  193. package/dist/index.d.ts +17 -2
  194. package/dist/index.d.ts.map +1 -1
  195. package/dist/index.js +8 -7
  196. package/dist/issue-id-CAcekoIw.js +62 -0
  197. package/dist/issue-id-CAcekoIw.js.map +1 -0
  198. package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
  199. package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
  200. package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
  201. package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
  202. package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
  203. package/dist/merge-agent-DlUiUanN.js.map +1 -0
  204. package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
  205. package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
  206. package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
  207. package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
  208. package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
  209. package/dist/projects-Bk-5QhFQ.js.map +1 -0
  210. package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
  211. package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
  212. package/dist/providers-DSU1vfQF.js.map +1 -0
  213. package/dist/rally-DdPvGa-w.js +3 -0
  214. package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
  215. package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
  216. package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
  217. package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
  218. package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
  219. package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
  220. package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
  221. package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
  222. package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
  223. package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
  224. package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
  225. package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
  226. package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
  227. package/dist/settings-BcWPTrua.js.map +1 -0
  228. package/dist/shadow-state-BZzxfEGw.js +2 -0
  229. package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
  230. package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
  231. package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
  232. package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
  233. package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
  234. package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
  235. package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
  236. package/dist/specialists-D7Kj5o6s.js.map +1 -0
  237. package/dist/sync-DMfgd389.js +693 -0
  238. package/dist/sync-DMfgd389.js.map +1 -0
  239. package/dist/sync-TL6y-8K6.js +2 -0
  240. package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
  241. package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
  242. package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
  243. package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
  244. package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
  245. package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
  246. package/dist/tracker-BhYYvU3p.js +198 -0
  247. package/dist/tracker-BhYYvU3p.js.map +1 -0
  248. package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
  249. package/dist/tracker-utils-ChQyut8w.js.map +1 -0
  250. package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
  251. package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
  252. package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
  253. package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
  254. package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
  255. package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
  256. package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
  257. package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
  258. package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
  259. package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
  260. package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
  261. package/dist/workspace-config-fUafvYMp.js.map +1 -0
  262. package/dist/workspace-manager-B9jS4Dsq.js +3 -0
  263. package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
  264. package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
  265. package/package.json +2 -1
  266. package/scripts/post-merge-deploy.sh +25 -5
  267. package/scripts/record-cost-event.js +57 -7
  268. package/scripts/record-cost-event.js.map +1 -1
  269. package/skills/pan-help/SKILL.md +1 -1
  270. package/skills/pan-sync/SKILL.md +6 -6
  271. package/skills/workspace-add-repo/skill.md +46 -0
  272. package/templates/claude-md/sections/warnings.md +15 -2
  273. package/dist/clean-planning-sZXvy3Y5.js +0 -2
  274. package/dist/close-issue-Dml437qV.js +0 -2
  275. package/dist/close-issue-Dr7yZmrr.js.map +0 -1
  276. package/dist/compact-beads-iu218JcO.js +0 -2
  277. package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
  278. package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
  279. package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
  280. package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
  281. package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
  282. package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
  283. package/dist/dashboard/hume-MZndNDVU.js +0 -3
  284. package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
  285. package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
  286. package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
  287. package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
  288. package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
  289. package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
  290. package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
  291. package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
  292. package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
  293. package/dist/dashboard/settings-DMeGBRsk.js +0 -2
  294. package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
  295. package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
  296. package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
  297. package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
  298. package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
  299. package/dist/hume-BjmwmJ9E.js +0 -3
  300. package/dist/label-cleanup-31ElPqqv.js.map +0 -1
  301. package/dist/merge-agent-VQH9z9t8.js.map +0 -1
  302. package/dist/projects-CvLepaxC.js.map +0 -1
  303. package/dist/providers-DcCPZ5K4.js.map +0 -1
  304. package/dist/rally-DR9x8--6.js +0 -3
  305. package/dist/shadow-state-p3jpGRPJ.js +0 -2
  306. package/dist/specialists-DEKqgkxp.js.map +0 -1
  307. package/dist/tracker-C_62ukEq.js.map +0 -1
  308. package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
  309. package/dist/workspace-config-CNXOpKuj.js.map +0 -1
  310. package/dist/workspace-manager-CncdZkIy.js.map +0 -1
  311. package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
@@ -1 +1 @@
1
- {"version":3,"file":"remote-workspace-Dxghqiti.js","names":[],"sources":["../src/lib/remote-workspace.ts"],"sourcesContent":["/**\n * Remote Workspace Creation\n *\n * Shared module for creating remote workspaces.\n * Used by both workspace.ts (explicit creation) and work/issue.ts (auto-creation).\n */\n\nimport chalk from 'chalk';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { loadConfig } from './config.js';\nimport { createFlyProviderFromConfig } from './remote/index.js';\nimport { saveWorkspaceMetadata } from './remote/workspace-metadata.js';\nimport type { RemoteWorkspaceMetadata } from './remote/interface.js';\nimport { extractTeamPrefix, findProjectByTeam, resolveProjectFromIssue, getIssuePrefix } from './projects.js';\n\nconst execAsync = promisify(exec);\n\nexport interface CreateRemoteWorkspaceOptions {\n dryRun?: boolean;\n spinner?: { text: string };\n}\n\n/**\n * Create a remote workspace on Fly.io\n */\nexport async function createRemoteWorkspace(\n issueId: string,\n options: CreateRemoteWorkspaceOptions = {}\n): Promise<RemoteWorkspaceMetadata> {\n const config = loadConfig();\n const remoteConfig = config.remote;\n\n if (!remoteConfig?.enabled) {\n throw new Error('Remote workspaces not enabled. Run `pan remote setup`');\n }\n\n const normalizedId = issueId.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n const branchName = `feature/${normalizedId}`;\n const fly = createFlyProviderFromConfig(config.remote);\n\n // Determine project context\n const teamPrefix = extractTeamPrefix(issueId);\n const projectConfig = teamPrefix ? findProjectByTeam(teamPrefix) : null;\n const projectRoot = projectConfig?.path || process.cwd();\n\n // Determine project identifier for VM name\n let projectId = teamPrefix?.toLowerCase();\n if (!projectId && projectConfig && getIssuePrefix(projectConfig)) {\n projectId = getIssuePrefix(projectConfig)!.toLowerCase();\n }\n if (!projectId) {\n try {\n const { stdout } = await execAsync('git remote get-url origin', {\n cwd: projectRoot,\n encoding: 'utf-8',\n });\n const repoMatch = stdout.trim().match(/\\/([^\\/]+?)(\\.git)?$/);\n projectId = repoMatch ? repoMatch[1].toLowerCase().replace(/[^a-z0-9-]/g, '-') : 'proj';\n } catch {\n projectId = 'proj';\n }\n }\n\n // VM names must be valid hostnames (start with letter, alphanumeric + hyphens)\n const vmName = `${projectId}-${normalizedId}-ws`.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n\n if (options.dryRun) {\n console.log(chalk.bold('Would create remote workspace:'));\n console.log(` VM: ${chalk.cyan(vmName)}`);\n console.log(` Project: ${chalk.dim(projectId)}`);\n console.log(` Branch: ${chalk.dim(branchName)}`);\n throw new Error('Dry run - not implemented in this module');\n }\n\n // Get git remote URL\n let repoUrl = '';\n try {\n const { stdout } = await execAsync('git remote get-url origin', {\n cwd: projectRoot,\n encoding: 'utf-8',\n });\n repoUrl = stdout.trim();\n } catch {\n throw new Error('Could not determine git remote URL. Make sure you are in a git repository with a remote origin.');\n }\n\n if (options.spinner) {\n options.spinner.text = 'Creating VM (this may take 1-2 minutes)...';\n }\n\n // Step 1: Create VM\n await fly.createVm(vmName);\n\n // Step 2: Add GitHub host key and clone repository on VM\n if (options.spinner) {\n options.spinner.text = 'Cloning repository on VM...';\n }\n await fly.ssh(vmName, 'mkdir -p ~/.ssh && ssh-keyscan -t ed25519,rsa github.com >> ~/.ssh/known_hosts 2>/dev/null');\n const cloneResult = await fly.ssh(vmName, `git clone ${repoUrl} ~/workspace`);\n if (cloneResult.exitCode !== 0) {\n await fly.deleteVm(vmName);\n throw new Error(`Failed to clone: ${cloneResult.stderr}`);\n }\n\n // Step 3: Create feature branch\n if (options.spinner) {\n options.spinner.text = 'Creating feature branch...';\n }\n const branchResult = await fly.ssh(vmName, `cd ~/workspace && git checkout -b ${branchName}`);\n if (branchResult.exitCode !== 0) {\n await fly.ssh(vmName, `cd ~/workspace && git checkout ${branchName} || git checkout -b ${branchName}`);\n }\n\n // Step 4: Configure environment for shared infra\n const dbName = `myn_${normalizedId.replace(/-/g, '_')}`;\n const envContent = `\n# Panopticon Remote Workspace\nWORKSPACE_ID=${normalizedId}\nISSUE_ID=${issueId.toUpperCase()}\n\nDATABASE_NAME=${dbName}\n`;\n\n await fly.ssh(vmName, `cat > ~/workspace/.env.remote << 'EOF'\n${envContent}\nEOF`);\n\n // Step 6: Install beads CLI globally on remote VM\n if (options.spinner) {\n options.spinner.text = 'Installing beads CLI...';\n }\n const bdInstalled = await fly.installBeads(vmName);\n if (bdInstalled) {\n await fly.initBeads(vmName, '~/workspace');\n }\n\n // Step 6.5: Copy essential skills to remote VM\n if (options.spinner) {\n options.spinner.text = 'Copying skills to remote VM...';\n }\n await fly.copySkillsToVm(vmName);\n\n // Step 7: Start containers if docker compose exists\n let containersStarted = false;\n let frontendUrl = '';\n let apiUrl = '';\n\n const composeCheck = await fly.ssh(vmName, 'ls ~/workspace/docker-compose.yml ~/workspace/.devcontainer/docker-compose.yml 2>/dev/null | head -1');\n\n if (composeCheck.stdout.trim()) {\n if (options.spinner) {\n options.spinner.text = 'Starting containers...';\n }\n const composeDir = composeCheck.stdout.includes('.devcontainer')\n ? '~/workspace/.devcontainer'\n : '~/workspace';\n\n const upResult = await fly.ssh(vmName, `cd ${composeDir} && docker compose up -d 2>&1`);\n containersStarted = upResult.exitCode === 0;\n\n if (containersStarted) {\n if (options.spinner) {\n options.spinner.text = 'Exposing ports...';\n }\n try {\n frontendUrl = await fly.exposePort(vmName, 4173);\n apiUrl = await fly.exposePort(vmName, 7000);\n } catch {\n // Port exposure failed - not critical\n }\n }\n }\n\n // Step 8: Save workspace metadata\n const metadata: RemoteWorkspaceMetadata = {\n id: normalizedId,\n issue: issueId.toUpperCase(),\n provider: 'fly',\n vmName,\n urls: {\n frontend: frontendUrl || undefined,\n api: apiUrl || undefined,\n },\n created: new Date(),\n location: 'remote',\n };\n\n saveWorkspaceMetadata(metadata);\n\n return metadata;\n}\n"],"mappings":";;;;;;;;;;;;;aAYyC;eAIqE;AAE9G,MAAM,YAAY,UAAU,KAAK;;;;AAUjC,eAAsB,sBACpB,SACA,UAAwC,EAAE,EACR;CAClC,MAAM,SAAS,YAAY;AAG3B,KAAI,CAFiB,OAAO,QAET,QACjB,OAAM,IAAI,MAAM,wDAAwD;CAG1E,MAAM,eAAe,QAAQ,aAAa,CAAC,QAAQ,eAAe,IAAI;CACtE,MAAM,aAAa,WAAW;CAC9B,MAAM,MAAM,4BAA4B,OAAO,OAAO;CAGtD,MAAM,aAAa,kBAAkB,QAAQ;CAC7C,MAAM,gBAAgB,aAAa,kBAAkB,WAAW,GAAG;CACnE,MAAM,cAAc,eAAe,QAAQ,QAAQ,KAAK;CAGxD,IAAI,YAAY,YAAY,aAAa;AACzC,KAAI,CAAC,aAAa,iBAAiB,eAAe,cAAc,CAC9D,aAAY,eAAe,cAAc,CAAE,aAAa;AAE1D,KAAI,CAAC,UACH,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,6BAA6B;GAC9D,KAAK;GACL,UAAU;GACX,CAAC;EACF,MAAM,YAAY,OAAO,MAAM,CAAC,MAAM,uBAAuB;AAC7D,cAAY,YAAY,UAAU,GAAG,aAAa,CAAC,QAAQ,eAAe,IAAI,GAAG;SAC3E;AACN,cAAY;;CAKhB,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,KAAK,aAAa,CAAC,QAAQ,eAAe,IAAI;AAE1F,KAAI,QAAQ,QAAQ;AAClB,UAAQ,IAAI,MAAM,KAAK,iCAAiC,CAAC;AACzD,UAAQ,IAAI,gBAAgB,MAAM,KAAK,OAAO,GAAG;AACjD,UAAQ,IAAI,gBAAgB,MAAM,IAAI,UAAU,GAAG;AACnD,UAAQ,IAAI,gBAAgB,MAAM,IAAI,WAAW,GAAG;AACpD,QAAM,IAAI,MAAM,2CAA2C;;CAI7D,IAAI,UAAU;AACd,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,6BAA6B;GAC9D,KAAK;GACL,UAAU;GACX,CAAC;AACF,YAAU,OAAO,MAAM;SACjB;AACN,QAAM,IAAI,MAAM,kGAAkG;;AAGpH,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAIzB,OAAM,IAAI,SAAS,OAAO;AAG1B,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAM,IAAI,IAAI,QAAQ,6FAA6F;CACnH,MAAM,cAAc,MAAM,IAAI,IAAI,QAAQ,aAAa,QAAQ,cAAc;AAC7E,KAAI,YAAY,aAAa,GAAG;AAC9B,QAAM,IAAI,SAAS,OAAO;AAC1B,QAAM,IAAI,MAAM,oBAAoB,YAAY,SAAS;;AAI3D,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAGzB,MADqB,MAAM,IAAI,IAAI,QAAQ,qCAAqC,aAAa,EAC5E,aAAa,EAC5B,OAAM,IAAI,IAAI,QAAQ,kCAAkC,WAAW,sBAAsB,aAAa;CAIxG,MAAM,SAAS,OAAO,aAAa,QAAQ,MAAM,IAAI;CACrD,MAAM,aAAa;;eAEN,aAAa;WACjB,QAAQ,aAAa,CAAC;;gBAEjB,OAAO;;AAGrB,OAAM,IAAI,IAAI,QAAQ;EACtB,WAAW;KACR;AAGH,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAGzB,KADoB,MAAM,IAAI,aAAa,OAAO,CAEhD,OAAM,IAAI,UAAU,QAAQ,cAAc;AAI5C,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAM,IAAI,eAAe,OAAO;CAGhC,IAAI,oBAAoB;CACxB,IAAI,cAAc;CAClB,IAAI,SAAS;CAEb,MAAM,eAAe,MAAM,IAAI,IAAI,QAAQ,uGAAuG;AAElJ,KAAI,aAAa,OAAO,MAAM,EAAE;AAC9B,MAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;EAEzB,MAAM,aAAa,aAAa,OAAO,SAAS,gBAAgB,GAC5D,8BACA;AAGJ,uBADiB,MAAM,IAAI,IAAI,QAAQ,MAAM,WAAW,+BAA+B,EAC1D,aAAa;AAE1C,MAAI,mBAAmB;AACrB,OAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAI;AACF,kBAAc,MAAM,IAAI,WAAW,QAAQ,KAAK;AAChD,aAAS,MAAM,IAAI,WAAW,QAAQ,IAAK;WACrC;;;CAOZ,MAAM,WAAoC;EACxC,IAAI;EACJ,OAAO,QAAQ,aAAa;EAC5B,UAAU;EACV;EACA,MAAM;GACJ,UAAU,eAAe,KAAA;GACzB,KAAK,UAAU,KAAA;GAChB;EACD,yBAAS,IAAI,MAAM;EACnB,UAAU;EACX;AAED,uBAAsB,SAAS;AAE/B,QAAO"}
1
+ {"version":3,"file":"remote-workspace-CA33UuVI.js","names":[],"sources":["../src/lib/remote-workspace.ts"],"sourcesContent":["/**\n * Remote Workspace Creation\n *\n * Shared module for creating remote workspaces.\n * Used by both workspace.ts (explicit creation) and work/issue.ts (auto-creation).\n */\n\nimport chalk from 'chalk';\nimport { existsSync } from 'fs';\nimport { join } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { loadConfig } from './config.js';\nimport { createFlyProviderFromConfig } from './remote/index.js';\nimport { saveWorkspaceMetadata } from './remote/workspace-metadata.js';\nimport type { RemoteWorkspaceMetadata } from './remote/interface.js';\nimport { extractTeamPrefix, findProjectByTeam, resolveProjectFromIssue, getIssuePrefix } from './projects.js';\n\nconst execAsync = promisify(exec);\n\nexport interface CreateRemoteWorkspaceOptions {\n dryRun?: boolean;\n spinner?: { text: string };\n}\n\n/**\n * Create a remote workspace on Fly.io\n */\nexport async function createRemoteWorkspace(\n issueId: string,\n options: CreateRemoteWorkspaceOptions = {}\n): Promise<RemoteWorkspaceMetadata> {\n const config = loadConfig();\n const remoteConfig = config.remote;\n\n if (!remoteConfig?.enabled) {\n throw new Error('Remote workspaces not enabled. Run `pan remote setup`');\n }\n\n const normalizedId = issueId.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n const branchName = `feature/${normalizedId}`;\n const fly = createFlyProviderFromConfig(config.remote);\n\n // Determine project context\n const teamPrefix = extractTeamPrefix(issueId);\n const projectConfig = teamPrefix ? findProjectByTeam(teamPrefix) : null;\n const projectRoot = projectConfig?.path || process.cwd();\n\n // Determine project identifier for VM name\n let projectId = teamPrefix?.toLowerCase();\n if (!projectId && projectConfig && getIssuePrefix(projectConfig)) {\n projectId = getIssuePrefix(projectConfig)!.toLowerCase();\n }\n if (!projectId) {\n try {\n const { stdout } = await execAsync('git remote get-url origin', {\n cwd: projectRoot,\n encoding: 'utf-8',\n });\n const repoMatch = stdout.trim().match(/\\/([^\\/]+?)(\\.git)?$/);\n projectId = repoMatch ? repoMatch[1].toLowerCase().replace(/[^a-z0-9-]/g, '-') : 'proj';\n } catch {\n projectId = 'proj';\n }\n }\n\n // VM names must be valid hostnames (start with letter, alphanumeric + hyphens)\n const vmName = `${projectId}-${normalizedId}-ws`.toLowerCase().replace(/[^a-z0-9-]/g, '-');\n\n if (options.dryRun) {\n console.log(chalk.bold('Would create remote workspace:'));\n console.log(` VM: ${chalk.cyan(vmName)}`);\n console.log(` Project: ${chalk.dim(projectId)}`);\n console.log(` Branch: ${chalk.dim(branchName)}`);\n throw new Error('Dry run - not implemented in this module');\n }\n\n // Get git remote URL\n let repoUrl = '';\n try {\n const { stdout } = await execAsync('git remote get-url origin', {\n cwd: projectRoot,\n encoding: 'utf-8',\n });\n repoUrl = stdout.trim();\n } catch {\n throw new Error('Could not determine git remote URL. Make sure you are in a git repository with a remote origin.');\n }\n\n if (options.spinner) {\n options.spinner.text = 'Creating VM (this may take 1-2 minutes)...';\n }\n\n // Step 1: Create VM\n await fly.createVm(vmName);\n\n // Step 2: Add GitHub host key and clone repository on VM\n if (options.spinner) {\n options.spinner.text = 'Cloning repository on VM...';\n }\n await fly.ssh(vmName, 'mkdir -p ~/.ssh && ssh-keyscan -t ed25519,rsa github.com >> ~/.ssh/known_hosts 2>/dev/null');\n const cloneResult = await fly.ssh(vmName, `git clone ${repoUrl} ~/workspace`);\n if (cloneResult.exitCode !== 0) {\n await fly.deleteVm(vmName);\n throw new Error(`Failed to clone: ${cloneResult.stderr}`);\n }\n\n // Step 3: Create feature branch\n if (options.spinner) {\n options.spinner.text = 'Creating feature branch...';\n }\n const branchResult = await fly.ssh(vmName, `cd ~/workspace && git checkout -b ${branchName}`);\n if (branchResult.exitCode !== 0) {\n await fly.ssh(vmName, `cd ~/workspace && git checkout ${branchName} || git checkout -b ${branchName}`);\n }\n\n // Step 4: Configure environment for shared infra\n const dbName = `myn_${normalizedId.replace(/-/g, '_')}`;\n const envContent = `\n# Panopticon Remote Workspace\nWORKSPACE_ID=${normalizedId}\nISSUE_ID=${issueId.toUpperCase()}\n\nDATABASE_NAME=${dbName}\n`;\n\n await fly.ssh(vmName, `cat > ~/workspace/.env.remote << 'EOF'\n${envContent}\nEOF`);\n\n // Step 6: Install beads CLI globally on remote VM\n if (options.spinner) {\n options.spinner.text = 'Installing beads CLI...';\n }\n const bdInstalled = await fly.installBeads(vmName);\n if (bdInstalled) {\n await fly.initBeads(vmName, '~/workspace');\n }\n\n // Step 6.5: Copy essential skills to remote VM\n if (options.spinner) {\n options.spinner.text = 'Copying skills to remote VM...';\n }\n await fly.copySkillsToVm(vmName);\n\n // Step 7: Start containers if docker compose exists\n let containersStarted = false;\n let frontendUrl = '';\n let apiUrl = '';\n\n const composeCheck = await fly.ssh(vmName, 'ls ~/workspace/docker-compose.yml ~/workspace/.devcontainer/docker-compose.yml 2>/dev/null | head -1');\n\n if (composeCheck.stdout.trim()) {\n if (options.spinner) {\n options.spinner.text = 'Starting containers...';\n }\n const composeDir = composeCheck.stdout.includes('.devcontainer')\n ? '~/workspace/.devcontainer'\n : '~/workspace';\n\n const upResult = await fly.ssh(vmName, `cd ${composeDir} && docker compose up -d 2>&1`);\n containersStarted = upResult.exitCode === 0;\n\n if (containersStarted) {\n if (options.spinner) {\n options.spinner.text = 'Exposing ports...';\n }\n try {\n frontendUrl = await fly.exposePort(vmName, 4173);\n apiUrl = await fly.exposePort(vmName, 7000);\n } catch {\n // Port exposure failed - not critical\n }\n }\n }\n\n // Step 8: Save workspace metadata\n const metadata: RemoteWorkspaceMetadata = {\n id: normalizedId,\n issue: issueId.toUpperCase(),\n provider: 'fly',\n vmName,\n urls: {\n frontend: frontendUrl || undefined,\n api: apiUrl || undefined,\n },\n created: new Date(),\n location: 'remote',\n };\n\n saveWorkspaceMetadata(metadata);\n\n return metadata;\n}\n"],"mappings":";;;;;;;;;;;;;aAYyC;eAIqE;AAE9G,MAAM,YAAY,UAAU,KAAK;;;;AAUjC,eAAsB,sBACpB,SACA,UAAwC,EAAE,EACR;CAClC,MAAM,SAAS,YAAY;AAG3B,KAAI,CAFiB,OAAO,QAET,QACjB,OAAM,IAAI,MAAM,wDAAwD;CAG1E,MAAM,eAAe,QAAQ,aAAa,CAAC,QAAQ,eAAe,IAAI;CACtE,MAAM,aAAa,WAAW;CAC9B,MAAM,MAAM,4BAA4B,OAAO,OAAO;CAGtD,MAAM,aAAa,kBAAkB,QAAQ;CAC7C,MAAM,gBAAgB,aAAa,kBAAkB,WAAW,GAAG;CACnE,MAAM,cAAc,eAAe,QAAQ,QAAQ,KAAK;CAGxD,IAAI,YAAY,YAAY,aAAa;AACzC,KAAI,CAAC,aAAa,iBAAiB,eAAe,cAAc,CAC9D,aAAY,eAAe,cAAc,CAAE,aAAa;AAE1D,KAAI,CAAC,UACH,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,6BAA6B;GAC9D,KAAK;GACL,UAAU;GACX,CAAC;EACF,MAAM,YAAY,OAAO,MAAM,CAAC,MAAM,uBAAuB;AAC7D,cAAY,YAAY,UAAU,GAAG,aAAa,CAAC,QAAQ,eAAe,IAAI,GAAG;SAC3E;AACN,cAAY;;CAKhB,MAAM,SAAS,GAAG,UAAU,GAAG,aAAa,KAAK,aAAa,CAAC,QAAQ,eAAe,IAAI;AAE1F,KAAI,QAAQ,QAAQ;AAClB,UAAQ,IAAI,MAAM,KAAK,iCAAiC,CAAC;AACzD,UAAQ,IAAI,gBAAgB,MAAM,KAAK,OAAO,GAAG;AACjD,UAAQ,IAAI,gBAAgB,MAAM,IAAI,UAAU,GAAG;AACnD,UAAQ,IAAI,gBAAgB,MAAM,IAAI,WAAW,GAAG;AACpD,QAAM,IAAI,MAAM,2CAA2C;;CAI7D,IAAI,UAAU;AACd,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,6BAA6B;GAC9D,KAAK;GACL,UAAU;GACX,CAAC;AACF,YAAU,OAAO,MAAM;SACjB;AACN,QAAM,IAAI,MAAM,kGAAkG;;AAGpH,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAIzB,OAAM,IAAI,SAAS,OAAO;AAG1B,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAM,IAAI,IAAI,QAAQ,6FAA6F;CACnH,MAAM,cAAc,MAAM,IAAI,IAAI,QAAQ,aAAa,QAAQ,cAAc;AAC7E,KAAI,YAAY,aAAa,GAAG;AAC9B,QAAM,IAAI,SAAS,OAAO;AAC1B,QAAM,IAAI,MAAM,oBAAoB,YAAY,SAAS;;AAI3D,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAGzB,MADqB,MAAM,IAAI,IAAI,QAAQ,qCAAqC,aAAa,EAC5E,aAAa,EAC5B,OAAM,IAAI,IAAI,QAAQ,kCAAkC,WAAW,sBAAsB,aAAa;CAIxG,MAAM,SAAS,OAAO,aAAa,QAAQ,MAAM,IAAI;CACrD,MAAM,aAAa;;eAEN,aAAa;WACjB,QAAQ,aAAa,CAAC;;gBAEjB,OAAO;;AAGrB,OAAM,IAAI,IAAI,QAAQ;EACtB,WAAW;KACR;AAGH,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAGzB,KADoB,MAAM,IAAI,aAAa,OAAO,CAEhD,OAAM,IAAI,UAAU,QAAQ,cAAc;AAI5C,KAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAM,IAAI,eAAe,OAAO;CAGhC,IAAI,oBAAoB;CACxB,IAAI,cAAc;CAClB,IAAI,SAAS;CAEb,MAAM,eAAe,MAAM,IAAI,IAAI,QAAQ,uGAAuG;AAElJ,KAAI,aAAa,OAAO,MAAM,EAAE;AAC9B,MAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;EAEzB,MAAM,aAAa,aAAa,OAAO,SAAS,gBAAgB,GAC5D,8BACA;AAGJ,uBADiB,MAAM,IAAI,IAAI,QAAQ,MAAM,WAAW,+BAA+B,EAC1D,aAAa;AAE1C,MAAI,mBAAmB;AACrB,OAAI,QAAQ,QACV,SAAQ,QAAQ,OAAO;AAEzB,OAAI;AACF,kBAAc,MAAM,IAAI,WAAW,QAAQ,KAAK;AAChD,aAAS,MAAM,IAAI,WAAW,QAAQ,IAAK;WACrC;;;CAOZ,MAAM,WAAoC;EACxC,IAAI;EACJ,OAAO,QAAQ,aAAa;EAC5B,UAAU;EACV;EACA,MAAM;GACJ,UAAU,eAAe,KAAA;GACzB,KAAK,UAAU,KAAA;GAChB;EACD,yBAAS,IAAI,MAAM;EACnB,UAAU;EACX;AAED,uBAAsB,SAAS;AAE/B,QAAO"}
@@ -1,3 +1,3 @@
1
- import { i as loadReviewStatuses, n as getReviewStatus, o as setReviewStatus, r as init_review_status, t as clearReviewStatus } from "./review-status-Bm1bWNEa.js";
1
+ import { i as loadReviewStatuses, n as getReviewStatus, o as setReviewStatus, r as init_review_status, t as clearReviewStatus } from "./review-status-DEDvCKMP.js";
2
2
  init_review_status();
3
3
  export { clearReviewStatus, getReviewStatus, loadReviewStatuses, setReviewStatus };
@@ -1,6 +1,6 @@
1
1
  import { t as __esmMin } from "./chunk-ruWRV7i3.js";
2
- import { U as getPanopticonHome, W as init_paths } from "./paths-lMaxrYtT.js";
3
- import { n as notifyPipeline, t as init_pipeline_notifier } from "./pipeline-notifier-OJ-d3Y60.js";
2
+ import { G as init_paths, W as getPanopticonHome } from "./paths-CDJ_HsbN.js";
3
+ import { n as notifyPipeline, t as init_pipeline_notifier } from "./pipeline-notifier-XgDdCdvT.js";
4
4
  import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
5
5
  import { dirname, join } from "path";
6
6
  import { homedir } from "os";
@@ -550,7 +550,7 @@ function setReviewStatus(issueId, update, filePath = DEFAULT_STATUS_FILE) {
550
550
  };
551
551
  if (readyForMerge && !existing.readyForMerge && updated.prUrl) (async () => {
552
552
  try {
553
- const { isGitHubAppConfigured, reportCommitStatus } = await import("./github-app-CHKwxOeQ.js");
553
+ const { isGitHubAppConfigured, reportCommitStatus } = await import("./github-app-DykduJ0X.js");
554
554
  if (!isGitHubAppConfigured()) return;
555
555
  const prMatch = updated.prUrl.match(/github\.com\/([^/]+)\/([^/]+)\/pull/);
556
556
  if (!prMatch) return;
@@ -583,6 +583,46 @@ function setReviewStatus(issueId, update, filePath = DEFAULT_STATUS_FILE) {
583
583
  issueId,
584
584
  status: updated
585
585
  });
586
+ if (update.reviewStatus === "passed" && existing.reviewStatus !== "passed" && existing.testStatus === "pending") (async () => {
587
+ try {
588
+ const { submitToSpecialistQueue } = await import("./specialists-Bfb9ATzw.js");
589
+ const workAgentId = `agent-${issueId.toLowerCase()}`;
590
+ const workStateFile = join(homedir(), ".panopticon", "agents", workAgentId, "state.json");
591
+ let workspace;
592
+ let branch;
593
+ if (existsSync(workStateFile)) try {
594
+ const workState = JSON.parse(readFileSync(workStateFile, "utf-8"));
595
+ workspace = workState.workspace;
596
+ branch = workState.branch || `feature/${issueId.toLowerCase()}`;
597
+ } catch {}
598
+ submitToSpecialistQueue("test-agent", {
599
+ priority: "high",
600
+ source: "review-agent-auto",
601
+ issueId,
602
+ workspace,
603
+ branch
604
+ });
605
+ console.log(`[review-status] Queued test-agent for ${issueId} after review passed`);
606
+ } catch (err) {
607
+ console.warn(`[review-status] Failed to queue test-agent for ${issueId}: ${err.message}`);
608
+ }
609
+ })();
610
+ if ((update.reviewStatus === "blocked" || update.testStatus === "failed") && (update.reviewStatus !== existing.reviewStatus || update.testStatus !== existing.testStatus)) {
611
+ const agentSession = `agent-${issueId.toLowerCase()}`;
612
+ (async () => {
613
+ try {
614
+ const { sessionExists } = await import("./tmux-CBtui_Cl.js");
615
+ if (!sessionExists(agentSession)) return;
616
+ const statusType = update.reviewStatus === "blocked" ? "REVIEW BLOCKED" : "TESTS FAILED";
617
+ const msg = `SPECIALIST FEEDBACK: ${statusType} for ${issueId}.\n\n${update.reviewNotes || update.testNotes || "No details provided."}\n\nFix the issues, then run: pan work done ${issueId}`;
618
+ const { messageAgent } = await import("./agents-D_2oRFVf.js");
619
+ await messageAgent(agentSession, msg);
620
+ console.log(`[review-status] Auto-delivered ${statusType} feedback to ${agentSession}`);
621
+ } catch (err) {
622
+ console.warn(`[review-status] Failed to auto-deliver feedback to ${agentSession}: ${err.message}`);
623
+ }
624
+ })();
625
+ }
586
626
  return updated;
587
627
  }
588
628
  function getReviewStatus(issueId, filePath = DEFAULT_STATUS_FILE) {
@@ -611,4 +651,4 @@ var init_review_status = __esmMin((() => {
611
651
  //#endregion
612
652
  export { saveReviewStatuses as a, getDatabase as c, loadReviewStatuses as i, init_database as l, getReviewStatus as n, setReviewStatus as o, init_review_status as r, closeDatabase as s, clearReviewStatus as t };
613
653
 
614
- //# sourceMappingURL=review-status-Bm1bWNEa.js.map
654
+ //# sourceMappingURL=review-status-DEDvCKMP.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"review-status-Bm1bWNEa.js","names":[],"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 13;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n\n -- ===== Domain Events (PAN-428: push-first architecture) =====\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n\n -- ===== Projection Cache (PAN-437: instant dashboard startup) =====\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL, -- JSON-serialized DashboardSnapshot\n sequence INTEGER NOT NULL, -- Last event sequence applied\n updated_at TEXT NOT NULL -- ISO timestamp\n );\n\n -- ===== Conversations (PAN-416: Mission Control conversation launcher) =====\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active', -- 'active', 'ended'\n cwd TEXT NOT NULL,\n issue_id TEXT, -- optional cost attribution\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT,\n session_file TEXT, -- path to Claude Code JSONL session file (PAN-451)\n title TEXT, -- human-readable title, auto-set from first message\n title_source TEXT, -- 'auto', 'ai', or 'manual'\n title_seed TEXT, -- original auto-generated title for replacement check\n total_cost REAL DEFAULT 0, -- cached total cost in USD\n archived_at TEXT, -- ISO timestamp when archived, null = active\n model TEXT, -- model used to spawn conversation (e.g. 'minimax-m2.7-highspeed')\n effort TEXT -- effort level (e.g. 'low', 'medium', 'high')\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v3 → v4: add events table for push-first architecture (PAN-428)\n if (currentVersion < 4) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n `);\n }\n\n // v4 → v5: add projection_cache table (PAN-437: instant dashboard startup)\n if (currentVersion < 5) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL,\n sequence INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n );\n `);\n }\n\n // v5 → v6: add conversations table (PAN-416)\n if (currentVersion < 6) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active',\n cwd TEXT NOT NULL,\n issue_id TEXT,\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n }\n\n // v6 → v7: add session_file column to conversations (PAN-451)\n if (currentVersion < 7) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN session_file TEXT`);\n } catch { /* already exists */ }\n }\n\n // v7 → v8: add title column to conversations (auto-set from first message)\n if (currentVersion < 8) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title TEXT`);\n } catch { /* already exists */ }\n }\n\n // v8 → v9: add title_source and title_seed columns to conversations\n // title_source tracks how the title was set: 'auto' (truncated first message),\n // 'ai' (Claude-generated), or 'manual' (user renamed). Used for T3Code-style\n // canReplaceThreadTitle logic — only auto-generated titles get AI replacement.\n // title_seed stores the original truncated message for replacement eligibility.\n if (currentVersion < 9) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_source TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_seed TEXT`);\n } catch { /* already exists */ }\n }\n\n // v9 → v10: add total_cost column to conversations (cached cost in USD)\n if (currentVersion < 10) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN total_cost REAL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v10 → v11: expression index for UPPER(issue_id) on cost_events\n // The N+1 queries in getCostsByIssueFromDb use UPPER(issue_id) which defeats\n // the existing idx_cost_issue_id index. This expression index fixes that.\n if (currentVersion < 11) {\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_cost_issue_upper ON cost_events(UPPER(issue_id))`);\n } catch { /* already exists */ }\n }\n\n // v11 → v12: archived_at column + index for conversations (T3Code pattern)\n if (currentVersion < 12) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN archived_at TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_archived ON conversations(archived_at)`);\n } catch { /* already exists */ }\n }\n\n // v12 → v13: add model + effort columns to conversations (preserve model on resume)\n if (currentVersion < 13) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN model TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN effort TEXT`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n *\n * Dual-runtime (PAN-428):\n * - Bun: uses bun:sqlite (better-sqlite3 is a native addon — ERR_DLOPEN_FAILED in Bun)\n * - Node: uses better-sqlite3\n * In both cases the external API is identical: pragma(), exec(), prepare(), close().\n */\n\nimport type Database from 'better-sqlite3';\nimport { createRequire } from 'module';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\ndeclare const Bun: unknown;\n\nfunction isBunRuntime(): boolean {\n return typeof Bun !== 'undefined';\n}\n\n// createRequire allows synchronous require() in ESM — works in both Bun and Node\nconst _require = createRequire(import.meta.url);\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n\n if (isBunRuntime()) {\n // better-sqlite3 is a native Node.js addon that fails in Bun with ERR_DLOPEN_FAILED.\n // Use bun:sqlite instead, with a pragma() shim for API compatibility.\n const { Database: BunDatabase } = _require('bun:sqlite') as { Database: new (path: string) => any };\n const bunDb = new BunDatabase(dbPath);\n\n // bun:sqlite has no pragma() method — shim it using exec() and query().get()\n bunDb.pragma = function (sql: string, options?: { simple?: boolean }): any {\n if (options?.simple) {\n // Read-only: return the scalar value directly (e.g. db.pragma('user_version', { simple: true }))\n const key = sql.trim();\n const row = bunDb.query(`PRAGMA ${key}`).get() as Record<string, unknown> | null;\n return row?.[key] ?? null;\n }\n // Set or no-return pragma (e.g. 'journal_mode = WAL', 'foreign_keys = ON')\n bunDb.exec(`PRAGMA ${sql}`);\n return undefined;\n };\n\n _db = bunDb as Database.Database;\n } else {\n // Node.js path: load better-sqlite3 lazily (avoids import-time native addon load)\n const BetterSqlite3 = _require('better-sqlite3');\n _db = new BetterSqlite3(dbPath) as Database.Database;\n }\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge' | 'inspect' | 'uat';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n inspectStatus?: 'pending' | 'inspecting' | 'passed' | 'failed';\n inspectNotes?: string;\n uatStatus?: 'pending' | 'testing' | 'passed' | 'failed';\n uatNotes?: string;\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n /** HEAD commit SHA at the time review passed — used to detect new commits after review */\n reviewedAtCommit?: string;\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.uatStatus && update.uatStatus !== existing.uatStatus) {\n history.push({ type: 'uat', status: update.uatStatus, timestamp: now, notes: update.uatNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n // readyForMerge is true when all required gates pass.\n // If uatStatus exists (UAT specialist has been involved), it must also be 'passed'.\n // verificationStatus must not be 'failed' — verification catches pre-existing test breakage\n // that scoped test runs (e2e/dashboard) may miss.\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (\n merged.reviewStatus === 'passed' &&\n merged.testStatus === 'passed' &&\n merged.verificationStatus !== 'failed' &&\n merged.mergeStatus !== 'merged' &&\n // If UAT has been initiated, it must pass too\n (merged.uatStatus === undefined || merged.uatStatus === 'passed')\n );\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // Report commit statuses to GitHub when readyForMerge transitions to true (PAN-536)\n if (readyForMerge && !existing.readyForMerge && updated.prUrl) {\n (async () => {\n try {\n const { isGitHubAppConfigured, reportCommitStatus } = await import('./github-app.js');\n if (!isGitHubAppConfigured()) return;\n const prMatch = updated.prUrl!.match(/github\\.com\\/([^/]+)\\/([^/]+)\\/pull/);\n if (!prMatch) return;\n const [, owner, repo] = prMatch;\n // Get HEAD SHA of the PR branch\n const { exec } = await import('child_process');\n const { promisify } = await import('util');\n const execAsync = promisify(exec);\n const { stdout } = await execAsync(\n `gh pr view ${updated.prUrl!.match(/\\/pull\\/(\\d+)/)?.[1]} --json headRefOid --jq .headRefOid`,\n { encoding: 'utf-8', timeout: 10000 }\n );\n const sha = stdout.trim();\n if (sha) {\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/review', 'Review passed');\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/test', 'Tests passed');\n console.log(`[review-status] Reported commit statuses for ${issueId} (${sha.slice(0, 8)})`);\n }\n } catch (err: any) {\n console.warn(`[review-status] Failed to report commit status: ${err.message}`);\n }\n })();\n }\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\n/**\n * On server startup, clear any mergeStatus stuck at 'merging'.\n * Pending merge operations are in-memory only — they don't survive a restart.\n * Any 'merging' status after boot is definitionally stuck (PAN-490).\n */\nexport function clearStuckMergeStatuses(): void {\n const statuses = loadReviewStatuses();\n const stuck = Object.values(statuses).filter(s => s.mergeStatus === 'merging');\n if (stuck.length === 0) return;\n console.log(`[review-status] Clearing ${stuck.length} stuck 'merging' status(es) on startup`);\n for (const s of stuck) {\n setReviewStatus(s.issueId, { mergeStatus: 'pending' });\n }\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,SAAgB,WAAW,IAA6B;AACtD,IAAG,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA6KN;AAGF,IAAG,OAAO,oBAAmC;;;;;;AAO/C,SAAgB,cAAc,IAA6B;CACzD,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AAElE,KAAI,mBAAA,GACF;AAGF,KAAI,mBAAmB,GAAG;AAExB,aAAW,GAAG;AACd;;AAIF,KAAI,iBAAiB,EAEnB,IAAG,KAAK;;;;;;;;;MASN;AAIJ,KAAI,iBAAiB,GAAG;AAEtB,MAAI;AACF,MAAG,KAAK,qDAAqD;UACvD;AAKR,KAAG,KAAK;;;MAGN;AAGF,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,iEAAiE;UACnE;AACR,MAAI;AACF,MAAG,KAAK,mFAAmF;UACrF;;AAIV,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;MAaN;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;MAON;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;;;;;;MAkBN;AAIJ,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,yDAAyD;SAC3D;AAIV,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,kDAAkD;SACpD;AAQV,KAAI,iBAAiB,GAAG;AACtB,MAAI;AACF,MAAG,KAAK,yDAAyD;UAC3D;AACR,MAAI;AACF,MAAG,KAAK,uDAAuD;UACzD;;AAIV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,iEAAiE;SACnE;AAMV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,kFAAkF;SACpF;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AACR,MAAI;AACF,MAAG,KAAK,sFAAsF;UACxF;;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,kDAAkD;UACpD;AACR,MAAI;AACF,MAAG,KAAK,mDAAmD;UACrD;;AAIV,IAAG,OAAO,oBAAmC;;;;;AChW/C,SAAS,eAAwB;AAC/B,QAAO,OAAO,QAAQ;;;;;AAWxB,SAAgB,kBAA0B;AACxC,QAAO,KAAK,mBAAmB,EAAE,gBAAgB;;;;;;AAOnD,SAAgB,cAAiC;AAC/C,KAAI,IACF,QAAO;CAGT,MAAM,OAAO,mBAAmB;AAChC,KAAI,CAAC,WAAW,KAAK,CACnB,WAAU,MAAM,EAAE,WAAW,MAAM,CAAC;CAGtC,MAAM,SAAS,iBAAiB;AAEhC,KAAI,cAAc,EAAE;EAGlB,MAAM,EAAE,UAAU,gBAAgB,SAAS,aAAa;EACxD,MAAM,QAAQ,IAAI,YAAY,OAAO;AAGrC,QAAM,SAAS,SAAU,KAAa,SAAqC;AACzE,OAAI,SAAS,QAAQ;IAEnB,MAAM,MAAM,IAAI,MAAM;AAEtB,WADY,MAAM,MAAM,UAAU,MAAM,CAAC,KAAK,GACjC,QAAQ;;AAGvB,SAAM,KAAK,UAAU,MAAM;;AAI7B,QAAM;OAIN,OAAM,KADgB,SAAS,iBAAiB,EACxB,OAAO;AAIjC,KAAI,OAAO,qBAAqB;AAEhC,KAAI,OAAO,oBAAoB;AAE/B,KAAI,OAAO,uBAAuB;AAGlC,eAAc,IAAI;AAElB,QAAO;;;;;;AAOT,SAAgB,gBAAsB;AACpC,KAAI,KAAK;AACP,MAAI,OAAO;AACX,QAAM;;;;;aAnFsC;cACJ;AAStC,YAAW,cAAc,OAAO,KAAK,IAAI;AAE3C,OAAgC;;;;;;;;ACdpC,SAAgB,mBAAmB,QAA4B;CAC7D,MAAM,KAAK,aAAa;AAET,IAAG,aAAa,MAAoB;AAEjD,KAAG,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;MAyBT,CAAC,IACD,EAAE,SACF,EAAE,cACF,EAAE,YACF,EAAE,eAAe,MACjB,EAAE,sBAAsB,MACxB,EAAE,qBAAqB,MACvB,EAAE,0BAA0B,MAC5B,EAAE,yBAAyB,MAC3B,EAAE,eAAe,MACjB,EAAE,aAAa,MACf,EAAE,cAAc,MAChB,EAAE,WACF,EAAE,gBAAgB,IAAI,GACtB,EAAE,oBAAoB,MACtB,EAAE,SAAS,KACZ;AAGD,MAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;GACrC,MAAM,gBAAgB,GAAG,QAAQ;;;QAG/B;AACF,QAAK,MAAM,SAAS,EAAE,QACpB,eAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,KAAK;;GAGhG,CAEK,OAAO;;;;;AAMhB,SAAgB,mBAAmB,SAAuB;AAC7C,cAAa,CACrB,QAAQ,+CAA+C,CAAC,IAAI,QAAQ;;;;;AAQzE,SAAgB,sBAAsB,SAAsC;CAG1E,MAAM,MAFK,aAAa,CAET,QAAQ;;IAErB,CAAC,IAAI,QAAQ;AAEf,KAAI,CAAC,IAAK,QAAO;AAGjB,QAAO,kBAAkB,KADT,iBAAiB,QAAQ,CACH;;;;;AAMxC,SAAgB,6BAA2D;CAGzE,MAAM,OAFK,aAAa,CAER,QAAQ,uDAAuD,CAAC,KAAK;CACrF,MAAM,SAAuC,EAAE;AAE/C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,SAAO,IAAI,YAAY,kBAAkB,KAAK,QAAQ;;AAGxD,QAAO;;;;;AAMT,SAAS,iBAAiB,SAAuC;AAS/D,QARW,aAAa,CACR,QAAQ;;;;;IAKtB,CAAC,IAAI,QAAQ,CAEH,KAAI,OAAM;EACpB,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,WAAW,EAAE;EACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE;EACtC,EAAE;;AAuBL,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,QAAO;EACL,SAAS,IAAI;EACb,cAAc,IAAI;EAClB,YAAY,IAAI;EAChB,aAAa,IAAI,gBAA+C,KAAA;EAChE,oBAAoB,IAAI,uBAA6D,KAAA;EACrF,mBAAmB,IAAI,sBAAsB,KAAA;EAC7C,wBAAwB,IAAI,4BAA4B,KAAA;EACxD,uBAAuB,IAAI,2BAA2B,KAAA;EACtD,aAAa,IAAI,gBAAgB,KAAA;EACjC,WAAW,IAAI,cAAc,KAAA;EAC7B,YAAY,IAAI,eAAe,KAAA;EAC/B,WAAW,IAAI;EACf,eAAe,IAAI,oBAAoB;EACvC,kBAAkB,IAAI,sBAAsB,KAAA;EAC5C,OAAO,IAAI,UAAU,KAAA;EACrB,SAAS,QAAQ,SAAS,IAAI,UAAU,KAAA;EACzC;;;gBA7KsC;;;;ACqCzC,SAAgB,mBAAmB,WAAW,qBAAmD;AAE/F,KAAI,aAAa,oBACf,KAAI;AACF,SAAO,4BAA4B;SAC7B;AAKV,KAAI;AACF,MAAI,WAAW,SAAS,CACtB,QAAO,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC;UAE7C,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;AAEvD,QAAO,EAAE;;AAGX,SAAgB,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,KAAI;EACF,MAAM,MAAM,QAAQ,SAAS;AAC7B,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC;UACnD,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;;AAIzD,SAAgB,gBACd,SACA,QACA,WAAW,qBACG;CACd,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,WAAW,SAAS,YAAY;EACpC;EACA,cAAc;EACd,YAAY;EACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,eAAe;EAChB;AAKD,KAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,KAAA,GAAW;AACjH,UAAQ,KAAK,sFAAsF,QAAQ,gCAAgC;AAC3I,SAAO;;CAGT,MAAM,SAAS;EAAE,GAAG;EAAU,GAAG;EAAQ;CAGzC,MAAM,UAAU,CAAC,GAAI,SAAS,WAAW,EAAE,CAAE;CAC7C,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AACpC,KAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,aAC1D,SAAQ,KAAK;EAAE,MAAM;EAAU,QAAQ,OAAO;EAAc,WAAW;EAAK,OAAO,OAAO;EAAa,CAAC;AAE1G,KAAI,OAAO,cAAc,OAAO,eAAe,SAAS,WACtD,SAAQ,KAAK;EAAE,MAAM;EAAQ,QAAQ,OAAO;EAAY,WAAW;EAAK,OAAO,OAAO;EAAW,CAAC;AAEpG,KAAI,OAAO,aAAa,OAAO,cAAc,SAAS,UACpD,SAAQ,KAAK;EAAE,MAAM;EAAO,QAAQ,OAAO;EAAW,WAAW;EAAK,OAAO,OAAO;EAAU,CAAC;AAEjG,KAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,YACxD,SAAQ,KAAK;EAAE,MAAM;EAAS,QAAQ,OAAO;EAAa,WAAW;EAAK,CAAC;AAE7E,QAAO,QAAQ,SAAS,GAAI,SAAQ,OAAO;CAM3C,MAAM,gBAAgB,OAAO,kBAAkB,KAAA,IAC3C,OAAO,gBAEL,OAAO,iBAAiB,YACxB,OAAO,eAAe,YACtB,OAAO,uBAAuB,YAC9B,OAAO,gBAAgB,aAEtB,OAAO,cAAc,KAAA,KAAa,OAAO,cAAc;CAG9D,MAAM,UAAwB;EAC5B,GAAG;EACH;EACA,WAAW;EACX;EACA;EACD;AAGD,KAAI,iBAAiB,CAAC,SAAS,iBAAiB,QAAQ,MACtD,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,uBAAuB,uBAAuB,MAAM,OAAO;AACnE,OAAI,CAAC,uBAAuB,CAAE;GAC9B,MAAM,UAAU,QAAQ,MAAO,MAAM,sCAAsC;AAC3E,OAAI,CAAC,QAAS;GACd,MAAM,GAAG,OAAO,QAAQ;GAExB,MAAM,EAAE,SAAS,MAAM,OAAO;GAC9B,MAAM,EAAE,cAAc,MAAM,OAAO;GAEnC,MAAM,EAAE,WAAW,MADD,UAAU,KAAK,CAE/B,cAAc,QAAQ,MAAO,MAAM,gBAAgB,GAAG,GAAG,sCACzD;IAAE,UAAU;IAAS,SAAS;IAAO,CACtC;GACD,MAAM,MAAM,OAAO,MAAM;AACzB,OAAI,KAAK;AACP,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,qBAAqB,gBAAgB;AAC3F,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,mBAAmB,eAAe;AACxF,YAAQ,IAAI,gDAAgD,QAAQ,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,GAAG;;WAEtF,KAAU;AACjB,WAAQ,KAAK,mDAAmD,IAAI,UAAU;;KAE9E;AAIN,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,+DAA+D,IAAI;;AAKrF,UAAS,WAAW;AACpB,oBAAmB,UAAU,SAAS;AAEtC,gBAAe;EAAE,MAAM;EAAkB;EAAS,QAAQ;EAAS,CAAC;AAEpE,QAAO;;AAGT,SAAgB,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,KAAI,aAAa,oBACf,KAAI;EACF,MAAM,SAAS,sBAAsB,QAAQ;AAC7C,MAAI,OAAQ,QAAO;SACb;AAKV,QADiB,mBAAmB,SAAS,CAC7B,YAAY;;AAkB9B,SAAgB,kBAAkB,SAAiB,WAAW,qBAA2B;CACvF,MAAM,WAAW,mBAAmB,SAAS;AAC7C,QAAO,SAAS;AAChB,oBAAmB,UAAU,SAAS;AAGtC,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,gEAAgE,IAAI;;;;;yBAhOhC;wBAMhB;AAkClC,uBAAsB,KAAK,SAAS,EAAE,eAAe,qBAAqB"}
1
+ {"version":3,"file":"review-status-DEDvCKMP.js","names":[],"sources":["../src/lib/database/schema.ts","../src/lib/database/index.ts","../src/lib/database/review-status-db.ts","../src/lib/review-status.ts"],"sourcesContent":["/**\n * Panopticon Database Schema\n *\n * Defines the unified schema for panopticon.db.\n * All persistent application state lives here.\n */\n\nimport type Database from 'better-sqlite3';\n\n// Schema version — increment when making breaking schema changes\nexport const SCHEMA_VERSION = 13;\n\n/**\n * Initialize the complete database schema.\n * Idempotent — uses CREATE TABLE IF NOT EXISTS throughout.\n */\nexport function initSchema(db: Database.Database): void {\n db.exec(`\n -- ===== Cost Events =====\n CREATE TABLE IF NOT EXISTS cost_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n ts TEXT NOT NULL,\n agent_id TEXT NOT NULL,\n issue_id TEXT NOT NULL,\n session_type TEXT NOT NULL DEFAULT 'unknown',\n provider TEXT NOT NULL DEFAULT 'anthropic',\n model TEXT NOT NULL,\n input INTEGER NOT NULL DEFAULT 0,\n output INTEGER NOT NULL DEFAULT 0,\n cache_read INTEGER NOT NULL DEFAULT 0,\n cache_write INTEGER NOT NULL DEFAULT 0,\n cost REAL NOT NULL DEFAULT 0,\n request_id TEXT,\n session_id TEXT, -- Claude Code session UUID (for reconciler offset tracking)\n -- TLDR metrics\n tldr_interceptions INTEGER,\n tldr_bypasses INTEGER,\n tldr_tokens_saved INTEGER,\n tldr_bypass_reasons TEXT, -- JSON string\n -- WAL source tracking\n source_file TEXT -- path of WAL file this came from (for imports)\n );\n\n CREATE UNIQUE INDEX IF NOT EXISTS idx_cost_request_id\n ON cost_events(request_id) WHERE request_id IS NOT NULL;\n\n CREATE INDEX IF NOT EXISTS idx_cost_issue_id\n ON cost_events(issue_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_agent_id\n ON cost_events(agent_id, ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_ts\n ON cost_events(ts);\n\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n\n -- ===== Review Status =====\n CREATE TABLE IF NOT EXISTS review_status (\n issue_id TEXT PRIMARY KEY,\n review_status TEXT NOT NULL DEFAULT 'pending',\n test_status TEXT NOT NULL DEFAULT 'pending',\n merge_status TEXT,\n verification_status TEXT,\n verification_notes TEXT,\n verification_cycle_count INTEGER DEFAULT 0,\n verification_max_cycles INTEGER,\n review_notes TEXT,\n test_notes TEXT,\n merge_notes TEXT,\n updated_at TEXT NOT NULL,\n ready_for_merge INTEGER NOT NULL DEFAULT 0,\n auto_requeue_count INTEGER DEFAULT 0,\n pr_url TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_review_status_updated\n ON review_status(updated_at);\n\n -- ===== Status History =====\n CREATE TABLE IF NOT EXISTS status_history (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n issue_id TEXT NOT NULL,\n type TEXT NOT NULL, -- 'review', 'test', 'merge'\n status TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n notes TEXT,\n FOREIGN KEY (issue_id) REFERENCES review_status(issue_id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_status_history_issue\n ON status_history(issue_id, timestamp);\n\n -- UNIQUE constraint enables INSERT OR IGNORE deduplication in upsertReviewStatus\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n\n -- ===== Health Events =====\n CREATE TABLE IF NOT EXISTS health_events (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n state TEXT NOT NULL,\n previous_state TEXT,\n source TEXT,\n metadata TEXT -- JSON string\n );\n\n CREATE INDEX IF NOT EXISTS idx_health_agent_timestamp\n ON health_events(agent_id, timestamp);\n\n CREATE INDEX IF NOT EXISTS idx_health_timestamp\n ON health_events(timestamp);\n\n -- ===== Processed Sessions (for reconciler offset tracking) =====\n CREATE TABLE IF NOT EXISTS processed_sessions (\n session_id TEXT PRIMARY KEY,\n agent_id TEXT,\n issue_id TEXT,\n transcript_path TEXT, -- full path to the .jsonl file\n byte_offset INTEGER NOT NULL DEFAULT 0, -- bytes consumed so far\n processed_at TEXT NOT NULL,\n event_count INTEGER NOT NULL DEFAULT 0\n );\n\n -- ===== API Cache =====\n CREATE TABLE IF NOT EXISTS api_cache (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL, -- JSON string\n expires_at TEXT,\n created_at TEXT NOT NULL\n );\n\n -- ===== Rate Limits =====\n CREATE TABLE IF NOT EXISTS rate_limits (\n service TEXT PRIMARY KEY,\n requests INTEGER NOT NULL DEFAULT 0,\n window_start TEXT NOT NULL,\n limit_per_window INTEGER NOT NULL DEFAULT 1000\n );\n\n -- ===== Domain Events (PAN-428: push-first architecture) =====\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n\n -- ===== Projection Cache (PAN-437: instant dashboard startup) =====\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL, -- JSON-serialized DashboardSnapshot\n sequence INTEGER NOT NULL, -- Last event sequence applied\n updated_at TEXT NOT NULL -- ISO timestamp\n );\n\n -- ===== Conversations (PAN-416: Mission Control conversation launcher) =====\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active', -- 'active', 'ended'\n cwd TEXT NOT NULL,\n issue_id TEXT, -- optional cost attribution\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT,\n session_file TEXT, -- path to Claude Code JSONL session file (PAN-451)\n title TEXT, -- human-readable title, auto-set from first message\n title_source TEXT, -- 'auto', 'ai', or 'manual'\n title_seed TEXT, -- original auto-generated title for replacement check\n total_cost REAL DEFAULT 0, -- cached total cost in USD\n archived_at TEXT, -- ISO timestamp when archived, null = active\n model TEXT, -- model used to spawn conversation (e.g. 'minimax-m2.7-highspeed')\n effort TEXT -- effort level (e.g. 'low', 'medium', 'high')\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n\n // Record schema version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n\n/**\n * Run schema migrations if the database version is older than SCHEMA_VERSION.\n * This function handles upgrading from older schema versions.\n */\nexport function runMigrations(db: Database.Database): void {\n const currentVersion = db.pragma('user_version', { simple: true }) as number;\n\n if (currentVersion === SCHEMA_VERSION) {\n return; // Already at latest version\n }\n\n if (currentVersion === 0) {\n // Fresh database — just initialize the full schema\n initSchema(db);\n return;\n }\n\n // v1 → v2: add UNIQUE index on status_history for INSERT OR IGNORE dedup\n if (currentVersion < 2) {\n // Remove duplicate rows before adding the unique index (keep lowest id per unique key)\n db.exec(`\n DELETE FROM status_history\n WHERE id NOT IN (\n SELECT MIN(id)\n FROM status_history\n GROUP BY issue_id, type, status, timestamp\n );\n CREATE UNIQUE INDEX IF NOT EXISTS idx_status_history_unique\n ON status_history(issue_id, type, status, timestamp);\n `);\n }\n\n // v2 → v3: add session_id to cost_events, extend processed_sessions for reconciler\n if (currentVersion < 3) {\n // Add session_id column to cost_events (nullable, no data loss)\n try {\n db.exec(`ALTER TABLE cost_events ADD COLUMN session_id TEXT`);\n } catch {\n // Column may already exist if schema was manually applied\n }\n\n // Add index on session_id\n db.exec(`\n CREATE INDEX IF NOT EXISTS idx_cost_session_id\n ON cost_events(session_id) WHERE session_id IS NOT NULL;\n `);\n\n // Extend processed_sessions with new columns for reconciler\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN agent_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN issue_id TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN transcript_path TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE processed_sessions ADD COLUMN byte_offset INTEGER NOT NULL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v3 → v4: add events table for push-first architecture (PAN-428)\n if (currentVersion < 4) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS events (\n sequence INTEGER PRIMARY KEY AUTOINCREMENT,\n type TEXT NOT NULL,\n timestamp TEXT NOT NULL,\n payload TEXT NOT NULL -- JSON\n );\n\n CREATE INDEX IF NOT EXISTS idx_events_type\n ON events(type);\n\n CREATE INDEX IF NOT EXISTS idx_events_timestamp\n ON events(timestamp);\n `);\n }\n\n // v4 → v5: add projection_cache table (PAN-437: instant dashboard startup)\n if (currentVersion < 5) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS projection_cache (\n key TEXT PRIMARY KEY,\n data TEXT NOT NULL,\n sequence INTEGER NOT NULL,\n updated_at TEXT NOT NULL\n );\n `);\n }\n\n // v5 → v6: add conversations table (PAN-416)\n if (currentVersion < 6) {\n db.exec(`\n CREATE TABLE IF NOT EXISTS conversations (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE,\n tmux_session TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'active',\n cwd TEXT NOT NULL,\n issue_id TEXT,\n created_at TEXT NOT NULL,\n ended_at TEXT,\n last_attached_at TEXT\n );\n\n CREATE INDEX IF NOT EXISTS idx_conversations_status\n ON conversations(status);\n\n CREATE INDEX IF NOT EXISTS idx_conversations_created_at\n ON conversations(created_at);\n `);\n }\n\n // v6 → v7: add session_file column to conversations (PAN-451)\n if (currentVersion < 7) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN session_file TEXT`);\n } catch { /* already exists */ }\n }\n\n // v7 → v8: add title column to conversations (auto-set from first message)\n if (currentVersion < 8) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title TEXT`);\n } catch { /* already exists */ }\n }\n\n // v8 → v9: add title_source and title_seed columns to conversations\n // title_source tracks how the title was set: 'auto' (truncated first message),\n // 'ai' (Claude-generated), or 'manual' (user renamed). Used for T3Code-style\n // canReplaceThreadTitle logic — only auto-generated titles get AI replacement.\n // title_seed stores the original truncated message for replacement eligibility.\n if (currentVersion < 9) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_source TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN title_seed TEXT`);\n } catch { /* already exists */ }\n }\n\n // v9 → v10: add total_cost column to conversations (cached cost in USD)\n if (currentVersion < 10) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN total_cost REAL DEFAULT 0`);\n } catch { /* already exists */ }\n }\n\n // v10 → v11: expression index for UPPER(issue_id) on cost_events\n // The N+1 queries in getCostsByIssueFromDb use UPPER(issue_id) which defeats\n // the existing idx_cost_issue_id index. This expression index fixes that.\n if (currentVersion < 11) {\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_cost_issue_upper ON cost_events(UPPER(issue_id))`);\n } catch { /* already exists */ }\n }\n\n // v11 → v12: archived_at column + index for conversations (T3Code pattern)\n if (currentVersion < 12) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN archived_at TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`CREATE INDEX IF NOT EXISTS idx_conversations_archived ON conversations(archived_at)`);\n } catch { /* already exists */ }\n }\n\n // v12 → v13: add model + effort columns to conversations (preserve model on resume)\n if (currentVersion < 13) {\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN model TEXT`);\n } catch { /* already exists */ }\n try {\n db.exec(`ALTER TABLE conversations ADD COLUMN effort TEXT`);\n } catch { /* already exists */ }\n }\n\n // After all migrations, set the version\n db.pragma(`user_version = ${SCHEMA_VERSION}`);\n}\n","/**\n * Panopticon Unified Database\n *\n * Single panopticon.db at ~/.panopticon/panopticon.db.\n * Singleton pattern — one connection shared across the process.\n *\n * IMPORTANT: This module is safe to import in both server and CLI contexts.\n * Never use execSync here — this is synchronous SQLite, not a subprocess.\n *\n * Dual-runtime (PAN-428):\n * - Bun: uses bun:sqlite (better-sqlite3 is a native addon — ERR_DLOPEN_FAILED in Bun)\n * - Node: uses better-sqlite3\n * In both cases the external API is identical: pragma(), exec(), prepare(), close().\n */\n\nimport type Database from 'better-sqlite3';\nimport { createRequire } from 'module';\nimport { join } from 'path';\nimport { existsSync, mkdirSync } from 'fs';\nimport { getPanopticonHome } from '../paths.js';\nimport { runMigrations } from './schema.js';\n\ndeclare const Bun: unknown;\n\nfunction isBunRuntime(): boolean {\n return typeof Bun !== 'undefined';\n}\n\n// createRequire allows synchronous require() in ESM — works in both Bun and Node\nconst _require = createRequire(import.meta.url);\n\nlet _db: Database.Database | null = null;\n\n/**\n * Get the path to panopticon.db (dynamic, respects PANOPTICON_HOME override for tests)\n */\nexport function getDatabasePath(): string {\n return join(getPanopticonHome(), 'panopticon.db');\n}\n\n/**\n * Initialize and return the singleton database connection.\n * Safe to call multiple times — returns the existing connection after first call.\n */\nexport function getDatabase(): Database.Database {\n if (_db) {\n return _db;\n }\n\n const home = getPanopticonHome();\n if (!existsSync(home)) {\n mkdirSync(home, { recursive: true });\n }\n\n const dbPath = getDatabasePath();\n\n if (isBunRuntime()) {\n // better-sqlite3 is a native Node.js addon that fails in Bun with ERR_DLOPEN_FAILED.\n // Use bun:sqlite instead, with a pragma() shim for API compatibility.\n const { Database: BunDatabase } = _require('bun:sqlite') as { Database: new (path: string) => any };\n const bunDb = new BunDatabase(dbPath);\n\n // bun:sqlite has no pragma() method — shim it using exec() and query().get()\n bunDb.pragma = function (sql: string, options?: { simple?: boolean }): any {\n if (options?.simple) {\n // Read-only: return the scalar value directly (e.g. db.pragma('user_version', { simple: true }))\n const key = sql.trim();\n const row = bunDb.query(`PRAGMA ${key}`).get() as Record<string, unknown> | null;\n return row?.[key] ?? null;\n }\n // Set or no-return pragma (e.g. 'journal_mode = WAL', 'foreign_keys = ON')\n bunDb.exec(`PRAGMA ${sql}`);\n return undefined;\n };\n\n _db = bunDb as Database.Database;\n } else {\n // Node.js path: load better-sqlite3 lazily (avoids import-time native addon load)\n const BetterSqlite3 = _require('better-sqlite3');\n _db = new BetterSqlite3(dbPath) as Database.Database;\n }\n\n // Enable WAL mode for concurrent readers + single writer\n _db.pragma('journal_mode = WAL');\n // Enforce foreign keys\n _db.pragma('foreign_keys = ON');\n // Write-ahead log synchronization — NORMAL is safe and fast\n _db.pragma('synchronous = NORMAL');\n\n // Initialize or migrate schema\n runMigrations(_db);\n\n return _db;\n}\n\n/**\n * Close the database connection and release the singleton.\n * Primarily used in tests to get a fresh connection.\n */\nexport function closeDatabase(): void {\n if (_db) {\n _db.close();\n _db = null;\n }\n}\n\n/**\n * Force re-initialization of the database connection.\n * Used in tests after PANOPTICON_HOME changes.\n */\nexport function resetDatabase(): void {\n closeDatabase();\n}\n","/**\n * Review Status SQLite Storage\n *\n * Provides SQLite-backed CRUD for ReviewStatus, matching the interface in\n * src/lib/review-status.ts. Atomic single-transaction writes eliminate the\n * TOCTOU race in the JSON-backed implementation.\n */\n\nimport { getDatabase } from './index.js';\nimport type { ReviewStatus, StatusHistoryEntry } from '../review-status.js';\n\n// ============== Write operations ==============\n\n/**\n * Upsert a review status record atomically.\n * Replaces the JSON read-modify-write cycle with a single transaction.\n */\nexport function upsertReviewStatus(status: ReviewStatus): void {\n const db = getDatabase();\n\n const upsert = db.transaction((s: ReviewStatus) => {\n // Upsert main record\n db.prepare(`\n INSERT INTO review_status (\n issue_id, review_status, test_status, merge_status,\n verification_status, verification_notes,\n verification_cycle_count, verification_max_cycles,\n review_notes, test_notes, merge_notes,\n updated_at, ready_for_merge, auto_requeue_count, pr_url\n ) VALUES (\n ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?\n )\n ON CONFLICT(issue_id) DO UPDATE SET\n review_status = excluded.review_status,\n test_status = excluded.test_status,\n merge_status = excluded.merge_status,\n verification_status = excluded.verification_status,\n verification_notes = excluded.verification_notes,\n verification_cycle_count = excluded.verification_cycle_count,\n verification_max_cycles = excluded.verification_max_cycles,\n review_notes = excluded.review_notes,\n test_notes = excluded.test_notes,\n merge_notes = excluded.merge_notes,\n updated_at = excluded.updated_at,\n ready_for_merge = excluded.ready_for_merge,\n auto_requeue_count = excluded.auto_requeue_count,\n pr_url = excluded.pr_url\n `).run(\n s.issueId,\n s.reviewStatus,\n s.testStatus,\n s.mergeStatus ?? null,\n s.verificationStatus ?? null,\n s.verificationNotes ?? null,\n s.verificationCycleCount ?? null,\n s.verificationMaxCycles ?? null,\n s.reviewNotes ?? null,\n s.testNotes ?? null,\n s.mergeNotes ?? null,\n s.updatedAt,\n s.readyForMerge ? 1 : 0,\n s.autoRequeueCount ?? null,\n s.prUrl ?? null,\n );\n\n // Append new history entries (deduplicate by timestamp to avoid re-inserting)\n if (s.history && s.history.length > 0) {\n const insertHistory = db.prepare(`\n INSERT OR IGNORE INTO status_history (issue_id, type, status, timestamp, notes)\n VALUES (?, ?, ?, ?, ?)\n `);\n for (const entry of s.history) {\n insertHistory.run(s.issueId, entry.type, entry.status, entry.timestamp, entry.notes ?? null);\n }\n }\n });\n\n upsert(status);\n}\n\n/**\n * Delete a review status record and its history.\n */\nexport function deleteReviewStatus(issueId: string): void {\n const db = getDatabase();\n db.prepare('DELETE FROM review_status WHERE issue_id = ?').run(issueId);\n}\n\n// ============== Read operations ==============\n\n/**\n * Get a single review status by issue ID.\n */\nexport function getReviewStatusFromDb(issueId: string): ReviewStatus | null {\n const db = getDatabase();\n\n const row = db.prepare(`\n SELECT * FROM review_status WHERE issue_id = ?\n `).get(issueId) as DbReviewStatusRow | undefined;\n\n if (!row) return null;\n\n const history = getHistoryFromDb(issueId);\n return rowToReviewStatus(row, history);\n}\n\n/**\n * Get all review statuses.\n */\nexport function getAllReviewStatusesFromDb(): Record<string, ReviewStatus> {\n const db = getDatabase();\n\n const rows = db.prepare('SELECT * FROM review_status ORDER BY updated_at DESC').all() as DbReviewStatusRow[];\n const result: Record<string, ReviewStatus> = {};\n\n for (const row of rows) {\n const history = getHistoryFromDb(row.issue_id);\n result[row.issue_id] = rowToReviewStatus(row, history);\n }\n\n return result;\n}\n\n/**\n * Get history entries for an issue.\n */\nfunction getHistoryFromDb(issueId: string): StatusHistoryEntry[] {\n const db = getDatabase();\n const rows = db.prepare(`\n SELECT type, status, timestamp, notes\n FROM status_history\n WHERE issue_id = ?\n ORDER BY timestamp ASC\n `).all(issueId) as Array<{ type: string; status: string; timestamp: string; notes: string | null }>;\n\n return rows.map(r => ({\n type: r.type as 'review' | 'test' | 'merge',\n status: r.status,\n timestamp: r.timestamp,\n ...(r.notes ? { notes: r.notes } : {}),\n }));\n}\n\n// ============== Row mapping ==============\n\ninterface DbReviewStatusRow {\n issue_id: string;\n review_status: string;\n test_status: string;\n merge_status: string | null;\n verification_status: string | null;\n verification_notes: string | null;\n verification_cycle_count: number | null;\n verification_max_cycles: number | null;\n review_notes: string | null;\n test_notes: string | null;\n merge_notes: string | null;\n updated_at: string;\n ready_for_merge: number;\n auto_requeue_count: number | null;\n pr_url: string | null;\n}\n\nfunction rowToReviewStatus(row: DbReviewStatusRow, history: StatusHistoryEntry[]): ReviewStatus {\n return {\n issueId: row.issue_id,\n reviewStatus: row.review_status as ReviewStatus['reviewStatus'],\n testStatus: row.test_status as ReviewStatus['testStatus'],\n mergeStatus: row.merge_status as ReviewStatus['mergeStatus'] ?? undefined,\n verificationStatus: row.verification_status as ReviewStatus['verificationStatus'] ?? undefined,\n verificationNotes: row.verification_notes ?? undefined,\n verificationCycleCount: row.verification_cycle_count ?? undefined,\n verificationMaxCycles: row.verification_max_cycles ?? undefined,\n reviewNotes: row.review_notes ?? undefined,\n testNotes: row.test_notes ?? undefined,\n mergeNotes: row.merge_notes ?? undefined,\n updatedAt: row.updated_at,\n readyForMerge: row.ready_for_merge === 1,\n autoRequeueCount: row.auto_requeue_count ?? undefined,\n prUrl: row.pr_url ?? undefined,\n history: history.length > 0 ? history : undefined,\n };\n}\n","import { existsSync, readFileSync, writeFileSync, mkdirSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { homedir } from 'os';\nimport { notifyPipeline } from './pipeline-notifier.js';\nimport {\n upsertReviewStatus as dbUpsert,\n deleteReviewStatus as dbDelete,\n getReviewStatusFromDb,\n getAllReviewStatusesFromDb,\n} from './database/review-status-db.js';\n\nexport interface StatusHistoryEntry {\n type: 'review' | 'test' | 'merge' | 'inspect' | 'uat';\n status: string;\n timestamp: string;\n notes?: string;\n}\n\nexport interface ReviewStatus {\n issueId: string;\n reviewStatus: 'pending' | 'reviewing' | 'passed' | 'failed' | 'blocked';\n testStatus: 'pending' | 'testing' | 'passed' | 'failed' | 'skipped' | 'dispatch_failed';\n mergeStatus?: 'pending' | 'merging' | 'merged' | 'failed';\n inspectStatus?: 'pending' | 'inspecting' | 'passed' | 'failed';\n inspectNotes?: string;\n uatStatus?: 'pending' | 'testing' | 'passed' | 'failed';\n uatNotes?: string;\n verificationStatus?: 'pending' | 'running' | 'passed' | 'failed' | 'skipped';\n verificationNotes?: string;\n verificationCycleCount?: number;\n verificationMaxCycles?: number;\n reviewNotes?: string;\n testNotes?: string;\n mergeNotes?: string;\n updatedAt: string;\n readyForMerge: boolean;\n autoRequeueCount?: number;\n prUrl?: string;\n history?: StatusHistoryEntry[];\n /** HEAD commit SHA at the time review passed — used to detect new commits after review */\n reviewedAtCommit?: string;\n}\n\nconst DEFAULT_STATUS_FILE = join(homedir(), '.panopticon', 'review-status.json');\n\nexport function loadReviewStatuses(filePath = DEFAULT_STATUS_FILE): Record<string, ReviewStatus> {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n return getAllReviewStatusesFromDb();\n } catch {\n // Fall through to JSON on DB error\n }\n }\n\n try {\n if (existsSync(filePath)) {\n return JSON.parse(readFileSync(filePath, 'utf-8'));\n }\n } catch (err) {\n console.error('Failed to load review statuses:', err);\n }\n return {};\n}\n\nexport function saveReviewStatuses(statuses: Record<string, ReviewStatus>, filePath = DEFAULT_STATUS_FILE): void {\n try {\n const dir = dirname(filePath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(filePath, JSON.stringify(statuses, null, 2));\n } catch (err) {\n console.error('Failed to save review statuses:', err);\n }\n}\n\nexport function setReviewStatus(\n issueId: string,\n update: Partial<ReviewStatus>,\n filePath = DEFAULT_STATUS_FILE,\n): ReviewStatus {\n const statuses = loadReviewStatuses(filePath);\n const existing = statuses[issueId] || {\n issueId,\n reviewStatus: 'pending' as const,\n testStatus: 'pending' as const,\n updatedAt: new Date().toISOString(),\n readyForMerge: false,\n };\n\n // Guard: reject reviewStatus regression from 'passed' to 'reviewing' unless the caller\n // is explicitly resetting the merge lifecycle (update includes mergeStatus).\n // This is belt-and-suspenders — endpoint-level guards should catch this first.\n if (update.reviewStatus === 'reviewing' && existing.reviewStatus === 'passed' && update.mergeStatus === undefined) {\n console.warn(`[review-status] Rejecting reviewStatus regression from 'passed' to 'reviewing' for ${issueId} (mergeStatus not being reset)`);\n return existing as ReviewStatus;\n }\n\n const merged = { ...existing, ...update };\n\n // Track status transitions in history (last 10 entries)\n const history = [...(existing.history || [])];\n const now = new Date().toISOString();\n if (update.reviewStatus && update.reviewStatus !== existing.reviewStatus) {\n history.push({ type: 'review', status: update.reviewStatus, timestamp: now, notes: update.reviewNotes });\n }\n if (update.testStatus && update.testStatus !== existing.testStatus) {\n history.push({ type: 'test', status: update.testStatus, timestamp: now, notes: update.testNotes });\n }\n if (update.uatStatus && update.uatStatus !== existing.uatStatus) {\n history.push({ type: 'uat', status: update.uatStatus, timestamp: now, notes: update.uatNotes });\n }\n if (update.mergeStatus && update.mergeStatus !== existing.mergeStatus) {\n history.push({ type: 'merge', status: update.mergeStatus, timestamp: now });\n }\n while (history.length > 10) history.shift();\n\n // readyForMerge is true when all required gates pass.\n // If uatStatus exists (UAT specialist has been involved), it must also be 'passed'.\n // verificationStatus must not be 'failed' — verification catches pre-existing test breakage\n // that scoped test runs (e2e/dashboard) may miss.\n const readyForMerge = update.readyForMerge !== undefined\n ? update.readyForMerge\n : (\n merged.reviewStatus === 'passed' &&\n merged.testStatus === 'passed' &&\n merged.verificationStatus !== 'failed' &&\n merged.mergeStatus !== 'merged' &&\n // If UAT has been initiated, it must pass too\n (merged.uatStatus === undefined || merged.uatStatus === 'passed')\n );\n\n const updated: ReviewStatus = {\n ...merged,\n issueId,\n updatedAt: now,\n readyForMerge,\n history,\n };\n\n // Report commit statuses to GitHub when readyForMerge transitions to true (PAN-536)\n if (readyForMerge && !existing.readyForMerge && updated.prUrl) {\n (async () => {\n try {\n const { isGitHubAppConfigured, reportCommitStatus } = await import('./github-app.js');\n if (!isGitHubAppConfigured()) return;\n const prMatch = updated.prUrl!.match(/github\\.com\\/([^/]+)\\/([^/]+)\\/pull/);\n if (!prMatch) return;\n const [, owner, repo] = prMatch;\n // Get HEAD SHA of the PR branch\n const { exec } = await import('child_process');\n const { promisify } = await import('util');\n const execAsync = promisify(exec);\n const { stdout } = await execAsync(\n `gh pr view ${updated.prUrl!.match(/\\/pull\\/(\\d+)/)?.[1]} --json headRefOid --jq .headRefOid`,\n { encoding: 'utf-8', timeout: 10000 }\n );\n const sha = stdout.trim();\n if (sha) {\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/review', 'Review passed');\n await reportCommitStatus(owner, repo, sha, 'success', 'panopticon/test', 'Tests passed');\n console.log(`[review-status] Reported commit statuses for ${issueId} (${sha.slice(0, 8)})`);\n }\n } catch (err: any) {\n console.warn(`[review-status] Failed to report commit status: ${err.message}`);\n }\n })();\n }\n\n // SQLite first — it is the authoritative store (reads prefer SQLite)\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbUpsert(updated);\n } catch (err) {\n console.error('[review-status] SQLite write failed (continuing with JSON):', err);\n }\n }\n\n // JSON second — legacy fallback for tools that read review-status.json directly\n statuses[issueId] = updated;\n saveReviewStatuses(statuses, filePath);\n\n notifyPipeline({ type: 'status_changed', issueId, status: updated });\n\n // Queue test-agent when review transitions to 'passed'.\n // This fires regardless of how setReviewStatus() is called (API or direct import),\n // ensuring test-agent is queued even when review-agent bypasses the specialist\n // dispatch endpoint. Idempotent — if test-agent is already queued, pushToHook\n // deduplicates by issueId.\n if (\n update.reviewStatus === 'passed' &&\n existing.reviewStatus !== 'passed' &&\n existing.testStatus === 'pending'\n ) {\n (async () => {\n try {\n const { submitToSpecialistQueue } = await import('./cloister/specialists.js');\n const workAgentId = `agent-${issueId.toLowerCase()}`;\n const workStateFile = join(homedir(), '.panopticon', 'agents', workAgentId, 'state.json');\n let workspace: string | undefined;\n let branch: string | undefined;\n if (existsSync(workStateFile)) {\n try {\n const workState = JSON.parse(readFileSync(workStateFile, 'utf-8'));\n workspace = workState.workspace;\n branch = workState.branch || `feature/${issueId.toLowerCase()}`;\n } catch {}\n }\n submitToSpecialistQueue('test-agent', {\n priority: 'high',\n source: 'review-agent-auto',\n issueId,\n workspace,\n branch,\n });\n console.log(`[review-status] Queued test-agent for ${issueId} after review passed`);\n } catch (err: any) {\n console.warn(`[review-status] Failed to queue test-agent for ${issueId}: ${err.message}`);\n }\n })();\n }\n\n // Auto-deliver feedback to work agent when review blocks or tests fail.\n // This ensures feedback reaches the agent regardless of whether status was\n // set via the dashboard API or directly (e.g., bun -e import). See PAN-586.\n if (\n (update.reviewStatus === 'blocked' || update.testStatus === 'failed') &&\n (update.reviewStatus !== existing.reviewStatus || update.testStatus !== existing.testStatus)\n ) {\n const agentSession = `agent-${issueId.toLowerCase()}`;\n (async () => {\n try {\n const { sessionExists } = await import('./tmux.js');\n if (!sessionExists(agentSession)) return;\n\n const statusType = update.reviewStatus === 'blocked' ? 'REVIEW BLOCKED' : 'TESTS FAILED';\n const notes = update.reviewNotes || update.testNotes || 'No details provided.';\n const msg = `SPECIALIST FEEDBACK: ${statusType} for ${issueId}.\\n\\n${notes}\\n\\nFix the issues, then run: pan work done ${issueId}`;\n\n const { messageAgent } = await import('./agents.js');\n await messageAgent(agentSession, msg);\n console.log(`[review-status] Auto-delivered ${statusType} feedback to ${agentSession}`);\n } catch (err: any) {\n console.warn(`[review-status] Failed to auto-deliver feedback to ${agentSession}: ${err.message}`);\n }\n })();\n }\n\n return updated;\n}\n\nexport function getReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): ReviewStatus | null {\n // Prefer SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n const fromDb = getReviewStatusFromDb(issueId);\n if (fromDb) return fromDb;\n } catch {\n // Fall through to JSON on DB error\n }\n }\n const statuses = loadReviewStatuses(filePath);\n return statuses[issueId] || null;\n}\n\n/**\n * On server startup, clear any mergeStatus stuck at 'merging'.\n * Pending merge operations are in-memory only — they don't survive a restart.\n * Any 'merging' status after boot is definitionally stuck (PAN-490).\n */\nexport function clearStuckMergeStatuses(): void {\n const statuses = loadReviewStatuses();\n const stuck = Object.values(statuses).filter(s => s.mergeStatus === 'merging');\n if (stuck.length === 0) return;\n console.log(`[review-status] Clearing ${stuck.length} stuck 'merging' status(es) on startup`);\n for (const s of stuck) {\n setReviewStatus(s.issueId, { mergeStatus: 'pending' });\n }\n}\n\nexport function clearReviewStatus(issueId: string, filePath = DEFAULT_STATUS_FILE): void {\n const statuses = loadReviewStatuses(filePath);\n delete statuses[issueId];\n saveReviewStatuses(statuses, filePath);\n\n // Dual-delete from SQLite when using the default path\n if (filePath === DEFAULT_STATUS_FILE) {\n try {\n dbDelete(issueId);\n } catch (err) {\n console.error('[review-status] SQLite delete failed (continuing with JSON):', err);\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAgBA,SAAgB,WAAW,IAA6B;AACtD,IAAG,KAAK;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;IA6KN;AAGF,IAAG,OAAO,oBAAmC;;;;;;AAO/C,SAAgB,cAAc,IAA6B;CACzD,MAAM,iBAAiB,GAAG,OAAO,gBAAgB,EAAE,QAAQ,MAAM,CAAC;AAElE,KAAI,mBAAA,GACF;AAGF,KAAI,mBAAmB,GAAG;AAExB,aAAW,GAAG;AACd;;AAIF,KAAI,iBAAiB,EAEnB,IAAG,KAAK;;;;;;;;;MASN;AAIJ,KAAI,iBAAiB,GAAG;AAEtB,MAAI;AACF,MAAG,KAAK,qDAAqD;UACvD;AAKR,KAAG,KAAK;;;MAGN;AAGF,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,0DAA0D;UAC5D;AACR,MAAI;AACF,MAAG,KAAK,iEAAiE;UACnE;AACR,MAAI;AACF,MAAG,KAAK,mFAAmF;UACrF;;AAIV,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;MAaN;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;MAON;AAIJ,KAAI,iBAAiB,EACnB,IAAG,KAAK;;;;;;;;;;;;;;;;;;MAkBN;AAIJ,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,yDAAyD;SAC3D;AAIV,KAAI,iBAAiB,EACnB,KAAI;AACF,KAAG,KAAK,kDAAkD;SACpD;AAQV,KAAI,iBAAiB,GAAG;AACtB,MAAI;AACF,MAAG,KAAK,yDAAyD;UAC3D;AACR,MAAI;AACF,MAAG,KAAK,uDAAuD;UACzD;;AAIV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,iEAAiE;SACnE;AAMV,KAAI,iBAAiB,GACnB,KAAI;AACF,KAAG,KAAK,kFAAkF;SACpF;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AACR,MAAI;AACF,MAAG,KAAK,sFAAsF;UACxF;;AAIV,KAAI,iBAAiB,IAAI;AACvB,MAAI;AACF,MAAG,KAAK,kDAAkD;UACpD;AACR,MAAI;AACF,MAAG,KAAK,mDAAmD;UACrD;;AAIV,IAAG,OAAO,oBAAmC;;;;;AChW/C,SAAS,eAAwB;AAC/B,QAAO,OAAO,QAAQ;;;;;AAWxB,SAAgB,kBAA0B;AACxC,QAAO,KAAK,mBAAmB,EAAE,gBAAgB;;;;;;AAOnD,SAAgB,cAAiC;AAC/C,KAAI,IACF,QAAO;CAGT,MAAM,OAAO,mBAAmB;AAChC,KAAI,CAAC,WAAW,KAAK,CACnB,WAAU,MAAM,EAAE,WAAW,MAAM,CAAC;CAGtC,MAAM,SAAS,iBAAiB;AAEhC,KAAI,cAAc,EAAE;EAGlB,MAAM,EAAE,UAAU,gBAAgB,SAAS,aAAa;EACxD,MAAM,QAAQ,IAAI,YAAY,OAAO;AAGrC,QAAM,SAAS,SAAU,KAAa,SAAqC;AACzE,OAAI,SAAS,QAAQ;IAEnB,MAAM,MAAM,IAAI,MAAM;AAEtB,WADY,MAAM,MAAM,UAAU,MAAM,CAAC,KAAK,GACjC,QAAQ;;AAGvB,SAAM,KAAK,UAAU,MAAM;;AAI7B,QAAM;OAIN,OAAM,KADgB,SAAS,iBAAiB,EACxB,OAAO;AAIjC,KAAI,OAAO,qBAAqB;AAEhC,KAAI,OAAO,oBAAoB;AAE/B,KAAI,OAAO,uBAAuB;AAGlC,eAAc,IAAI;AAElB,QAAO;;;;;;AAOT,SAAgB,gBAAsB;AACpC,KAAI,KAAK;AACP,MAAI,OAAO;AACX,QAAM;;;;;aAnFsC;cACJ;AAStC,YAAW,cAAc,OAAO,KAAK,IAAI;AAE3C,OAAgC;;;;;;;;ACdpC,SAAgB,mBAAmB,QAA4B;CAC7D,MAAM,KAAK,aAAa;AAET,IAAG,aAAa,MAAoB;AAEjD,KAAG,QAAQ;;;;;;;;;;;;;;;;;;;;;;;;;MAyBT,CAAC,IACD,EAAE,SACF,EAAE,cACF,EAAE,YACF,EAAE,eAAe,MACjB,EAAE,sBAAsB,MACxB,EAAE,qBAAqB,MACvB,EAAE,0BAA0B,MAC5B,EAAE,yBAAyB,MAC3B,EAAE,eAAe,MACjB,EAAE,aAAa,MACf,EAAE,cAAc,MAChB,EAAE,WACF,EAAE,gBAAgB,IAAI,GACtB,EAAE,oBAAoB,MACtB,EAAE,SAAS,KACZ;AAGD,MAAI,EAAE,WAAW,EAAE,QAAQ,SAAS,GAAG;GACrC,MAAM,gBAAgB,GAAG,QAAQ;;;QAG/B;AACF,QAAK,MAAM,SAAS,EAAE,QACpB,eAAc,IAAI,EAAE,SAAS,MAAM,MAAM,MAAM,QAAQ,MAAM,WAAW,MAAM,SAAS,KAAK;;GAGhG,CAEK,OAAO;;;;;AAMhB,SAAgB,mBAAmB,SAAuB;AAC7C,cAAa,CACrB,QAAQ,+CAA+C,CAAC,IAAI,QAAQ;;;;;AAQzE,SAAgB,sBAAsB,SAAsC;CAG1E,MAAM,MAFK,aAAa,CAET,QAAQ;;IAErB,CAAC,IAAI,QAAQ;AAEf,KAAI,CAAC,IAAK,QAAO;AAGjB,QAAO,kBAAkB,KADT,iBAAiB,QAAQ,CACH;;;;;AAMxC,SAAgB,6BAA2D;CAGzE,MAAM,OAFK,aAAa,CAER,QAAQ,uDAAuD,CAAC,KAAK;CACrF,MAAM,SAAuC,EAAE;AAE/C,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,UAAU,iBAAiB,IAAI,SAAS;AAC9C,SAAO,IAAI,YAAY,kBAAkB,KAAK,QAAQ;;AAGxD,QAAO;;;;;AAMT,SAAS,iBAAiB,SAAuC;AAS/D,QARW,aAAa,CACR,QAAQ;;;;;IAKtB,CAAC,IAAI,QAAQ,CAEH,KAAI,OAAM;EACpB,MAAM,EAAE;EACR,QAAQ,EAAE;EACV,WAAW,EAAE;EACb,GAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE;EACtC,EAAE;;AAuBL,SAAS,kBAAkB,KAAwB,SAA6C;AAC9F,QAAO;EACL,SAAS,IAAI;EACb,cAAc,IAAI;EAClB,YAAY,IAAI;EAChB,aAAa,IAAI,gBAA+C,KAAA;EAChE,oBAAoB,IAAI,uBAA6D,KAAA;EACrF,mBAAmB,IAAI,sBAAsB,KAAA;EAC7C,wBAAwB,IAAI,4BAA4B,KAAA;EACxD,uBAAuB,IAAI,2BAA2B,KAAA;EACtD,aAAa,IAAI,gBAAgB,KAAA;EACjC,WAAW,IAAI,cAAc,KAAA;EAC7B,YAAY,IAAI,eAAe,KAAA;EAC/B,WAAW,IAAI;EACf,eAAe,IAAI,oBAAoB;EACvC,kBAAkB,IAAI,sBAAsB,KAAA;EAC5C,OAAO,IAAI,UAAU,KAAA;EACrB,SAAS,QAAQ,SAAS,IAAI,UAAU,KAAA;EACzC;;;gBA7KsC;;;;ACqCzC,SAAgB,mBAAmB,WAAW,qBAAmD;AAE/F,KAAI,aAAa,oBACf,KAAI;AACF,SAAO,4BAA4B;SAC7B;AAKV,KAAI;AACF,MAAI,WAAW,SAAS,CACtB,QAAO,KAAK,MAAM,aAAa,UAAU,QAAQ,CAAC;UAE7C,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;AAEvD,QAAO,EAAE;;AAGX,SAAgB,mBAAmB,UAAwC,WAAW,qBAA2B;AAC/G,KAAI;EACF,MAAM,MAAM,QAAQ,SAAS;AAC7B,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,UAAU,KAAK,UAAU,UAAU,MAAM,EAAE,CAAC;UACnD,KAAK;AACZ,UAAQ,MAAM,mCAAmC,IAAI;;;AAIzD,SAAgB,gBACd,SACA,QACA,WAAW,qBACG;CACd,MAAM,WAAW,mBAAmB,SAAS;CAC7C,MAAM,WAAW,SAAS,YAAY;EACpC;EACA,cAAc;EACd,YAAY;EACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,eAAe;EAChB;AAKD,KAAI,OAAO,iBAAiB,eAAe,SAAS,iBAAiB,YAAY,OAAO,gBAAgB,KAAA,GAAW;AACjH,UAAQ,KAAK,sFAAsF,QAAQ,gCAAgC;AAC3I,SAAO;;CAGT,MAAM,SAAS;EAAE,GAAG;EAAU,GAAG;EAAQ;CAGzC,MAAM,UAAU,CAAC,GAAI,SAAS,WAAW,EAAE,CAAE;CAC7C,MAAM,uBAAM,IAAI,MAAM,EAAC,aAAa;AACpC,KAAI,OAAO,gBAAgB,OAAO,iBAAiB,SAAS,aAC1D,SAAQ,KAAK;EAAE,MAAM;EAAU,QAAQ,OAAO;EAAc,WAAW;EAAK,OAAO,OAAO;EAAa,CAAC;AAE1G,KAAI,OAAO,cAAc,OAAO,eAAe,SAAS,WACtD,SAAQ,KAAK;EAAE,MAAM;EAAQ,QAAQ,OAAO;EAAY,WAAW;EAAK,OAAO,OAAO;EAAW,CAAC;AAEpG,KAAI,OAAO,aAAa,OAAO,cAAc,SAAS,UACpD,SAAQ,KAAK;EAAE,MAAM;EAAO,QAAQ,OAAO;EAAW,WAAW;EAAK,OAAO,OAAO;EAAU,CAAC;AAEjG,KAAI,OAAO,eAAe,OAAO,gBAAgB,SAAS,YACxD,SAAQ,KAAK;EAAE,MAAM;EAAS,QAAQ,OAAO;EAAa,WAAW;EAAK,CAAC;AAE7E,QAAO,QAAQ,SAAS,GAAI,SAAQ,OAAO;CAM3C,MAAM,gBAAgB,OAAO,kBAAkB,KAAA,IAC3C,OAAO,gBAEL,OAAO,iBAAiB,YACxB,OAAO,eAAe,YACtB,OAAO,uBAAuB,YAC9B,OAAO,gBAAgB,aAEtB,OAAO,cAAc,KAAA,KAAa,OAAO,cAAc;CAG9D,MAAM,UAAwB;EAC5B,GAAG;EACH;EACA,WAAW;EACX;EACA;EACD;AAGD,KAAI,iBAAiB,CAAC,SAAS,iBAAiB,QAAQ,MACtD,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,uBAAuB,uBAAuB,MAAM,OAAO;AACnE,OAAI,CAAC,uBAAuB,CAAE;GAC9B,MAAM,UAAU,QAAQ,MAAO,MAAM,sCAAsC;AAC3E,OAAI,CAAC,QAAS;GACd,MAAM,GAAG,OAAO,QAAQ;GAExB,MAAM,EAAE,SAAS,MAAM,OAAO;GAC9B,MAAM,EAAE,cAAc,MAAM,OAAO;GAEnC,MAAM,EAAE,WAAW,MADD,UAAU,KAAK,CAE/B,cAAc,QAAQ,MAAO,MAAM,gBAAgB,GAAG,GAAG,sCACzD;IAAE,UAAU;IAAS,SAAS;IAAO,CACtC;GACD,MAAM,MAAM,OAAO,MAAM;AACzB,OAAI,KAAK;AACP,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,qBAAqB,gBAAgB;AAC3F,UAAM,mBAAmB,OAAO,MAAM,KAAK,WAAW,mBAAmB,eAAe;AACxF,YAAQ,IAAI,gDAAgD,QAAQ,IAAI,IAAI,MAAM,GAAG,EAAE,CAAC,GAAG;;WAEtF,KAAU;AACjB,WAAQ,KAAK,mDAAmD,IAAI,UAAU;;KAE9E;AAIN,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,+DAA+D,IAAI;;AAKrF,UAAS,WAAW;AACpB,oBAAmB,UAAU,SAAS;AAEtC,gBAAe;EAAE,MAAM;EAAkB;EAAS,QAAQ;EAAS,CAAC;AAOpE,KACE,OAAO,iBAAiB,YACxB,SAAS,iBAAiB,YAC1B,SAAS,eAAe,UAExB,EAAC,YAAY;AACX,MAAI;GACF,MAAM,EAAE,4BAA4B,MAAM,OAAO;GACjD,MAAM,cAAc,SAAS,QAAQ,aAAa;GAClD,MAAM,gBAAgB,KAAK,SAAS,EAAE,eAAe,UAAU,aAAa,aAAa;GACzF,IAAI;GACJ,IAAI;AACJ,OAAI,WAAW,cAAc,CAC3B,KAAI;IACF,MAAM,YAAY,KAAK,MAAM,aAAa,eAAe,QAAQ,CAAC;AAClE,gBAAY,UAAU;AACtB,aAAS,UAAU,UAAU,WAAW,QAAQ,aAAa;WACvD;AAEV,2BAAwB,cAAc;IACpC,UAAU;IACV,QAAQ;IACR;IACA;IACA;IACD,CAAC;AACF,WAAQ,IAAI,yCAAyC,QAAQ,sBAAsB;WAC5E,KAAU;AACjB,WAAQ,KAAK,kDAAkD,QAAQ,IAAI,IAAI,UAAU;;KAEzF;AAMN,MACG,OAAO,iBAAiB,aAAa,OAAO,eAAe,cAC3D,OAAO,iBAAiB,SAAS,gBAAgB,OAAO,eAAe,SAAS,aACjF;EACA,MAAM,eAAe,SAAS,QAAQ,aAAa;AACnD,GAAC,YAAY;AACX,OAAI;IACF,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,QAAI,CAAC,cAAc,aAAa,CAAE;IAElC,MAAM,aAAa,OAAO,iBAAiB,YAAY,mBAAmB;IAE1E,MAAM,MAAM,wBAAwB,WAAW,OAAO,QAAQ,OADhD,OAAO,eAAe,OAAO,aAAa,uBACmB,8CAA8C;IAEzH,MAAM,EAAE,iBAAiB,MAAM,OAAO;AACtC,UAAM,aAAa,cAAc,IAAI;AACrC,YAAQ,IAAI,kCAAkC,WAAW,eAAe,eAAe;YAChF,KAAU;AACjB,YAAQ,KAAK,sDAAsD,aAAa,IAAI,IAAI,UAAU;;MAElG;;AAGN,QAAO;;AAGT,SAAgB,gBAAgB,SAAiB,WAAW,qBAA0C;AAEpG,KAAI,aAAa,oBACf,KAAI;EACF,MAAM,SAAS,sBAAsB,QAAQ;AAC7C,MAAI,OAAQ,QAAO;SACb;AAKV,QADiB,mBAAmB,SAAS,CAC7B,YAAY;;AAkB9B,SAAgB,kBAAkB,SAAiB,WAAW,qBAA2B;CACvF,MAAM,WAAW,mBAAmB,SAAS;AAC7C,QAAO,SAAS;AAChB,oBAAmB,UAAU,SAAS;AAGtC,KAAI,aAAa,oBACf,KAAI;AACF,qBAAS,QAAQ;UACV,KAAK;AACZ,UAAQ,MAAM,gEAAgE,IAAI;;;;;yBAhShC;wBAMhB;AAkClC,uBAAsB,KAAK,SAAS,EAAE,eAAe,qBAAqB"}
@@ -1,51 +1,9 @@
1
- import { A as SKILLS_DIR, F as SOURCE_SKILLS_DIR, G as isDevMode, M as SOURCE_DEV_SKILLS_DIR, N as SOURCE_RULES_DIR, P as SOURCE_SCRIPTS_DIR, W as init_paths, a as CACHE_AGENTS_DIR, i as BIN_DIR, j as SOURCE_AGENTS_DIR, k as SETTINGS_FILE, o as CACHE_MANIFEST, r as BACKUPS_DIR, s as CACHE_RULES_DIR } from "./paths-lMaxrYtT.js";
2
- import { a as init_config, i as getDevrootPath } from "./config-CRzMQRgA.js";
3
- import { a as init_manifest, c as writeManifest, i as hashFile, n as collectSourceFiles, o as readManifest, r as compareFileToManifest, s as setManifestEntry, t as buildManifestFromDirectory } from "./manifest-DL0oDbpv.js";
4
- import { o as init_interface } from "./rally-uUUZXp1h.js";
5
- import { c as init_gitlab, f as init_linear, o as init_factory, u as init_github } from "./factory-DfzczxN1.js";
6
- import { appendFileSync, chmodSync, copyFileSync, cpSync, existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, readlinkSync, rmSync, unlinkSync, writeFileSync } from "fs";
1
+ import { A as SKILLS_DIR, F as SOURCE_SKILLS_DIR, G as init_paths, K as isDevMode, M as SOURCE_DEV_SKILLS_DIR, N as SOURCE_RULES_DIR, P as SOURCE_SCRIPTS_DIR, a as CACHE_AGENTS_DIR, i as BIN_DIR, j as SOURCE_AGENTS_DIR, k as SETTINGS_FILE, o as CACHE_MANIFEST, r as BACKUPS_DIR, s as CACHE_RULES_DIR } from "./paths-CDJ_HsbN.js";
2
+ import { a as init_config, i as getDevrootPath } from "./config-BQNKsi9G.js";
3
+ import { a as init_manifest, c as writeManifest, i as hashFile, n as collectSourceFiles, o as readManifest, r as compareFileToManifest, s as setManifestEntry, t as buildManifestFromDirectory } from "./manifest-B4ghOD-V.js";
4
+ import { chmodSync, copyFileSync, cpSync, existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, readlinkSync, rmSync, unlinkSync, writeFileSync } from "fs";
7
5
  import { basename, dirname, join } from "path";
8
6
  import { homedir } from "os";
9
- //#region src/lib/shell.ts
10
- function detectShell() {
11
- const shell = process.env.SHELL || "";
12
- if (shell.includes("zsh")) return "zsh";
13
- if (shell.includes("bash")) return "bash";
14
- if (shell.includes("fish")) return "fish";
15
- return "unknown";
16
- }
17
- function getShellRcFile(shell) {
18
- const home = homedir();
19
- switch (shell) {
20
- case "zsh": return join(home, ".zshrc");
21
- case "bash":
22
- const bashrc = join(home, ".bashrc");
23
- if (existsSync(bashrc)) return bashrc;
24
- return join(home, ".bash_profile");
25
- case "fish": return join(home, ".config", "fish", "config.fish");
26
- default: return null;
27
- }
28
- }
29
- const ALIAS_LINE = "alias pan=\"panopticon\"";
30
- const ALIAS_MARKER = "# Panopticon CLI alias";
31
- function hasAlias(rcFile) {
32
- if (!existsSync(rcFile)) return false;
33
- const content = readFileSync(rcFile, "utf8");
34
- return content.includes(ALIAS_MARKER) || content.includes(ALIAS_LINE);
35
- }
36
- function addAlias(rcFile) {
37
- if (hasAlias(rcFile)) return;
38
- appendFileSync(rcFile, `
39
- ${ALIAS_MARKER}
40
- ${ALIAS_LINE}
41
- `, "utf8");
42
- }
43
- function getAliasInstructions(shell) {
44
- const rcFile = getShellRcFile(shell);
45
- if (!rcFile) return `Add this to your shell config:\n ${ALIAS_LINE}`;
46
- return `Alias added to ${rcFile}. Run:\n source ${rcFile}`;
47
- }
48
- //#endregion
49
7
  //#region src/lib/backup.ts
50
8
  init_paths();
51
9
  function createBackupTimestamp() {
@@ -612,7 +570,7 @@ function getAvailableModels(settings) {
612
570
  "gpt-4o-mini"
613
571
  ] : [],
614
572
  google: settings.api_keys.google ? ["gemini-3-pro-preview", "gemini-3-flash-preview"] : [],
615
- zai: settings.api_keys.zai ? ["glm-4.7", "glm-4.7-flash"] : [],
573
+ zai: settings.api_keys.zai ? ["glm-4.7-flash"] : [],
616
574
  kimi: settings.api_keys.kimi ? ["kimi-k2", "kimi-k2.5"] : []
617
575
  };
618
576
  }
@@ -651,156 +609,6 @@ function getAgentCommand(modelId) {
651
609
  };
652
610
  }
653
611
  //#endregion
654
- //#region src/lib/tracker/linking.ts
655
- /**
656
- * Cross-Tracker Linking
657
- *
658
- * Manages links between issues in different trackers.
659
- * Links are stored in a local JSON file for persistence.
660
- */
661
- /**
662
- * Parse an issue reference to extract tracker and ID
663
- * Examples:
664
- * "#42" -> { tracker: "github", ref: "#42" }
665
- * "github#42" -> { tracker: "github", ref: "#42" }
666
- * "MIN-630" -> { tracker: "linear", ref: "MIN-630" }
667
- * "gitlab#15" -> { tracker: "gitlab", ref: "#15" }
668
- */
669
- function parseIssueRef(ref) {
670
- if (ref.startsWith("github#")) return {
671
- tracker: "github",
672
- ref: `#${ref.slice(7)}`
673
- };
674
- if (ref.startsWith("gitlab#")) return {
675
- tracker: "gitlab",
676
- ref: `#${ref.slice(7)}`
677
- };
678
- if (ref.startsWith("linear:")) return {
679
- tracker: "linear",
680
- ref: ref.slice(7)
681
- };
682
- if (/^#\d+$/.test(ref)) return {
683
- tracker: "github",
684
- ref
685
- };
686
- if (/^[A-Z]+-\d+$/i.test(ref)) return {
687
- tracker: "linear",
688
- ref: ref.toUpperCase()
689
- };
690
- return null;
691
- }
692
- /**
693
- * Format an issue ref with tracker prefix for display
694
- */
695
- function formatIssueRef(ref, tracker) {
696
- if (tracker === "github") return ref.startsWith("#") ? `github${ref}` : `github#${ref}`;
697
- if (tracker === "gitlab") return ref.startsWith("#") ? `gitlab${ref}` : `gitlab#${ref}`;
698
- return ref;
699
- }
700
- /**
701
- * Link Manager for cross-tracker issue linking
702
- */
703
- var LinkManager = class {
704
- storePath;
705
- store;
706
- constructor(storePath) {
707
- this.storePath = storePath ?? join(homedir(), ".panopticon", "links.json");
708
- this.store = this.load();
709
- }
710
- load() {
711
- if (existsSync(this.storePath)) try {
712
- const data = JSON.parse(readFileSync(this.storePath, "utf-8"));
713
- if (data.version === 1) return data;
714
- } catch {}
715
- return {
716
- version: 1,
717
- links: []
718
- };
719
- }
720
- save() {
721
- const dir = join(this.storePath, "..");
722
- if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
723
- writeFileSync(this.storePath, JSON.stringify(this.store, null, 2));
724
- }
725
- /**
726
- * Add a link between two issues
727
- */
728
- addLink(source, target, direction = "related") {
729
- const existing = this.store.links.find((l) => l.sourceIssueRef === source.ref && l.sourceTracker === source.tracker && l.targetIssueRef === target.ref && l.targetTracker === target.tracker);
730
- if (existing) {
731
- if (existing.direction !== direction) {
732
- existing.direction = direction;
733
- this.save();
734
- }
735
- return existing;
736
- }
737
- const link = {
738
- sourceIssueRef: source.ref,
739
- sourceTracker: source.tracker,
740
- targetIssueRef: target.ref,
741
- targetTracker: target.tracker,
742
- direction,
743
- createdAt: (/* @__PURE__ */ new Date()).toISOString()
744
- };
745
- this.store.links.push(link);
746
- this.save();
747
- return link;
748
- }
749
- /**
750
- * Remove a link between two issues
751
- */
752
- removeLink(source, target) {
753
- const index = this.store.links.findIndex((l) => l.sourceIssueRef === source.ref && l.sourceTracker === source.tracker && l.targetIssueRef === target.ref && l.targetTracker === target.tracker);
754
- if (index >= 0) {
755
- this.store.links.splice(index, 1);
756
- this.save();
757
- return true;
758
- }
759
- return false;
760
- }
761
- /**
762
- * Get all issues linked to a given issue
763
- */
764
- getLinkedIssues(ref, tracker) {
765
- return this.store.links.filter((l) => l.sourceIssueRef === ref && l.sourceTracker === tracker || l.targetIssueRef === ref && l.targetTracker === tracker);
766
- }
767
- /**
768
- * Get all links (for debugging/admin)
769
- */
770
- getAllLinks() {
771
- return [...this.store.links];
772
- }
773
- /**
774
- * Find linked issue in another tracker
775
- */
776
- findLinkedIssue(ref, sourceTracker, targetTracker) {
777
- const asSource = this.store.links.find((l) => l.sourceIssueRef === ref && l.sourceTracker === sourceTracker && l.targetTracker === targetTracker);
778
- if (asSource) return asSource.targetIssueRef;
779
- const asTarget = this.store.links.find((l) => l.targetIssueRef === ref && l.targetTracker === sourceTracker && l.sourceTracker === targetTracker);
780
- if (asTarget) return asTarget.sourceIssueRef;
781
- return null;
782
- }
783
- /**
784
- * Clear all links (for testing)
785
- */
786
- clear() {
787
- this.store.links = [];
788
- this.save();
789
- }
790
- };
791
- let _linkManager = null;
792
- function getLinkManager() {
793
- if (!_linkManager) _linkManager = new LinkManager();
794
- return _linkManager;
795
- }
796
- //#endregion
797
- //#region src/lib/tracker/index.ts
798
- init_interface();
799
- init_linear();
800
- init_github();
801
- init_gitlab();
802
- init_factory();
803
- //#endregion
804
- export { hasAlias as A, createBackupTimestamp as C, detectShell as D, addAlias as E, getAliasInstructions as O, createBackup as S, restoreBackup as T, planSync as _, getAgentCommand as a, syncStatusline as b, getDefaultSettings as c, saveSettings as d, validateSettings as f, planHooksSync as g, migrateStalePersonalContent as h, parseIssueRef as i, getShellRcFile as k, isAnthropicModel as l, isPanopticonSymlink as m, formatIssueRef as n, getAvailableModels as o, executeSync as p, getLinkManager as r, getClaudeModelFlag as s, LinkManager as t, loadSettings as u, refreshCache as v, listBackups as w, cleanOldBackups as x, syncHooks as y };
612
+ export { cleanOldBackups as _, isAnthropicModel as a, listBackups as b, validateSettings as c, migrateStalePersonalContent as d, planHooksSync as f, syncStatusline as g, syncHooks as h, getDefaultSettings as i, executeSync as l, refreshCache as m, getAvailableModels as n, loadSettings as o, planSync as p, getClaudeModelFlag as r, saveSettings as s, getAgentCommand as t, isPanopticonSymlink as u, createBackup as v, restoreBackup as x, createBackupTimestamp as y };
805
613
 
806
- //# sourceMappingURL=tracker-C_62ukEq.js.map
614
+ //# sourceMappingURL=settings-BcWPTrua.js.map