panopticon-cli 0.6.5 → 0.6.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (311) hide show
  1. package/README.md +2 -2
  2. package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
  3. package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
  4. package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
  5. package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
  6. package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
  7. package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
  8. package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
  9. package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
  10. package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
  11. package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
  12. package/dist/clean-planning-x1S-JdmO.js +2 -0
  13. package/dist/cli/index.js +291 -760
  14. package/dist/cli/index.js.map +1 -1
  15. package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
  16. package/dist/close-issue-CaFE0stN.js.map +1 -0
  17. package/dist/close-issue-CjcfZI9s.js +2 -0
  18. package/dist/compact-beads-B0_qE1w3.js +2 -0
  19. package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
  20. package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
  21. package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
  22. package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
  23. package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
  24. package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
  25. package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
  26. package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
  27. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
  28. package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
  29. package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
  30. package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
  31. package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
  32. package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
  33. package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
  34. package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
  35. package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
  36. package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
  37. package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
  38. package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
  39. package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
  40. package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
  41. package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
  42. package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
  43. package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
  44. package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
  45. package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
  46. package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
  47. package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
  48. package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
  49. package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
  50. package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
  51. package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
  52. package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
  53. package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
  54. package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
  55. package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
  56. package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
  57. package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
  58. package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
  59. package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
  60. package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
  61. package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
  62. package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
  63. package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
  64. package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
  65. package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
  66. package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
  67. package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
  68. package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
  69. package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
  70. package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
  71. package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
  72. package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
  73. package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
  74. package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
  75. package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
  76. package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
  77. package/dist/dashboard/hume-CA2pftu_.js +3 -0
  78. package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
  79. package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
  80. package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
  81. package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
  82. package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
  83. package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
  84. package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
  85. package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
  86. package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
  87. package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
  88. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
  89. package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
  90. package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
  91. package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
  92. package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
  93. package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
  94. package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
  95. package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
  96. package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
  97. package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
  98. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
  99. package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
  100. package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
  101. package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
  102. package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
  103. package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
  104. package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
  105. package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
  106. package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
  107. package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
  108. package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
  109. package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
  110. package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
  111. package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
  112. package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-BaQPC-c6.js} +1 -1
  113. package/dist/dashboard/public/assets/index-ByLmYGhW.js +212 -0
  114. package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
  115. package/dist/dashboard/public/index.html +2 -2
  116. package/dist/dashboard/rally-6McpKKRa.js +3 -0
  117. package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
  118. package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
  119. package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
  120. package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
  121. package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
  122. package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
  123. package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
  124. package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
  125. package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
  126. package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
  127. package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
  128. package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
  129. package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
  130. package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
  131. package/dist/dashboard/server.js +559 -253
  132. package/dist/dashboard/server.js.map +1 -1
  133. package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
  134. package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
  135. package/dist/dashboard/settings-XWvDcj-D.js +2 -0
  136. package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
  137. package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
  138. package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
  139. package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
  140. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
  141. package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
  142. package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
  143. package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
  144. package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
  145. package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
  146. package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
  147. package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
  148. package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
  149. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
  150. package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
  151. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
  152. package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
  153. package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
  154. package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
  155. package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
  156. package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
  157. package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
  158. package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
  159. package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
  160. package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
  161. package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
  162. package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
  163. package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
  164. package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
  165. package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
  166. package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
  167. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
  168. package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
  169. package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
  170. package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
  171. package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
  172. package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
  173. package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
  174. package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
  175. package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
  176. package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
  177. package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
  178. package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
  179. package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
  180. package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
  181. package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
  182. package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
  183. package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
  184. package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
  185. package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
  186. package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
  187. package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
  188. package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
  189. package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
  190. package/dist/hume-9nv1VmMV.js +3 -0
  191. package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
  192. package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
  193. package/dist/index.d.ts +17 -2
  194. package/dist/index.d.ts.map +1 -1
  195. package/dist/index.js +8 -7
  196. package/dist/issue-id-CAcekoIw.js +62 -0
  197. package/dist/issue-id-CAcekoIw.js.map +1 -0
  198. package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
  199. package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
  200. package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
  201. package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
  202. package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
  203. package/dist/merge-agent-DlUiUanN.js.map +1 -0
  204. package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
  205. package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
  206. package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
  207. package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
  208. package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
  209. package/dist/projects-Bk-5QhFQ.js.map +1 -0
  210. package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
  211. package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
  212. package/dist/providers-DSU1vfQF.js.map +1 -0
  213. package/dist/rally-DdPvGa-w.js +3 -0
  214. package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
  215. package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
  216. package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
  217. package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
  218. package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
  219. package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
  220. package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
  221. package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
  222. package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
  223. package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
  224. package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
  225. package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
  226. package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
  227. package/dist/settings-BcWPTrua.js.map +1 -0
  228. package/dist/shadow-state-BZzxfEGw.js +2 -0
  229. package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
  230. package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
  231. package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
  232. package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
  233. package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
  234. package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
  235. package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
  236. package/dist/specialists-D7Kj5o6s.js.map +1 -0
  237. package/dist/sync-DMfgd389.js +693 -0
  238. package/dist/sync-DMfgd389.js.map +1 -0
  239. package/dist/sync-TL6y-8K6.js +2 -0
  240. package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
  241. package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
  242. package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
  243. package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
  244. package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
  245. package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
  246. package/dist/tracker-BhYYvU3p.js +198 -0
  247. package/dist/tracker-BhYYvU3p.js.map +1 -0
  248. package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
  249. package/dist/tracker-utils-ChQyut8w.js.map +1 -0
  250. package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
  251. package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
  252. package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
  253. package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
  254. package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
  255. package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
  256. package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
  257. package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
  258. package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
  259. package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
  260. package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
  261. package/dist/workspace-config-fUafvYMp.js.map +1 -0
  262. package/dist/workspace-manager-B9jS4Dsq.js +3 -0
  263. package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
  264. package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
  265. package/package.json +2 -1
  266. package/scripts/post-merge-deploy.sh +25 -5
  267. package/scripts/record-cost-event.js +57 -7
  268. package/scripts/record-cost-event.js.map +1 -1
  269. package/skills/pan-help/SKILL.md +1 -1
  270. package/skills/pan-sync/SKILL.md +6 -6
  271. package/skills/workspace-add-repo/skill.md +46 -0
  272. package/templates/claude-md/sections/warnings.md +15 -2
  273. package/dist/clean-planning-sZXvy3Y5.js +0 -2
  274. package/dist/close-issue-Dml437qV.js +0 -2
  275. package/dist/close-issue-Dr7yZmrr.js.map +0 -1
  276. package/dist/compact-beads-iu218JcO.js +0 -2
  277. package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
  278. package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
  279. package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
  280. package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
  281. package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
  282. package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
  283. package/dist/dashboard/hume-MZndNDVU.js +0 -3
  284. package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
  285. package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
  286. package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
  287. package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
  288. package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
  289. package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
  290. package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
  291. package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
  292. package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
  293. package/dist/dashboard/settings-DMeGBRsk.js +0 -2
  294. package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
  295. package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
  296. package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
  297. package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
  298. package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
  299. package/dist/hume-BjmwmJ9E.js +0 -3
  300. package/dist/label-cleanup-31ElPqqv.js.map +0 -1
  301. package/dist/merge-agent-VQH9z9t8.js.map +0 -1
  302. package/dist/projects-CvLepaxC.js.map +0 -1
  303. package/dist/providers-DcCPZ5K4.js.map +0 -1
  304. package/dist/rally-DR9x8--6.js +0 -3
  305. package/dist/shadow-state-p3jpGRPJ.js +0 -2
  306. package/dist/specialists-DEKqgkxp.js.map +0 -1
  307. package/dist/tracker-C_62ukEq.js.map +0 -1
  308. package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
  309. package/dist/workspace-config-CNXOpKuj.js.map +0 -1
  310. package/dist/workspace-manager-CncdZkIy.js.map +0 -1
  311. package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
@@ -0,0 +1 @@
1
+ {"version":3,"file":"sync-DMfgd389.js","names":[],"sources":["../src/lib/config-migration.ts","../src/lib/multi-tool-sync.ts","../src/cli/commands/sync.ts"],"sourcesContent":["/**\n * Configuration Migration\n *\n * Migrates from legacy settings.json format to new config.yaml format.\n * Legacy presets are no longer supported - all selection is now smart/capability-based.\n */\n\nimport { readFileSync, writeFileSync, existsSync, renameSync, readdirSync, lstatSync, readlinkSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport yaml from 'js-yaml';\nimport { loadSettings, type SettingsConfig } from './settings.js';\nimport { type YamlConfig } from './config-yaml.js';\nimport { type WorkTypeId } from './work-types.js';\nimport { type ModelId } from './settings.js';\n\n/** Path to legacy settings file */\nconst LEGACY_SETTINGS_PATH = join(homedir(), '.panopticon', 'settings.json');\n\n/** Path to new config file */\nconst NEW_CONFIG_PATH = join(homedir(), '.panopticon', 'config.yaml');\n\n/** Path to backup of legacy settings */\nconst BACKUP_SETTINGS_PATH = join(homedir(), '.panopticon', 'settings.json.backup');\n\n/**\n * Check if migration is needed\n * Returns true if settings.json exists and config.yaml doesn't\n */\nexport function needsMigration(): boolean {\n return existsSync(LEGACY_SETTINGS_PATH) && !existsSync(NEW_CONFIG_PATH);\n}\n\n/**\n * Check if legacy settings exist (even if already migrated)\n */\nexport function hasLegacySettings(): boolean {\n return existsSync(LEGACY_SETTINGS_PATH);\n}\n\n/**\n * Determine which providers are enabled based on API keys\n */\nfunction detectEnabledProviders(settings: SettingsConfig): {\n anthropic: boolean;\n openai: boolean;\n google: boolean;\n zai: boolean;\n kimi: boolean;\n} {\n return {\n anthropic: true, // Always enabled\n openai: !!settings.api_keys.openai,\n google: !!settings.api_keys.google,\n zai: !!settings.api_keys.zai,\n kimi: false, // Legacy settings don't have Kimi\n };\n}\n\n/**\n * Convert legacy settings.json to new config.yaml format\n */\nexport function convertToYamlConfig(settings: SettingsConfig): YamlConfig {\n const providers = detectEnabledProviders(settings);\n\n const config: YamlConfig = {\n models: {\n providers,\n overrides: {}, // No overrides from legacy\n gemini_thinking_level: 3,\n },\n api_keys: settings.api_keys,\n };\n\n return config;\n}\n\n/**\n * Migration options\n */\nexport interface MigrationOptions {\n /** Create backup of legacy settings (default: true) */\n backup?: boolean;\n /** Delete legacy settings after migration (default: false) */\n deleteLegacy?: boolean;\n /** Dry run - don't actually write files (default: false) */\n dryRun?: boolean;\n}\n\n/**\n * Migration result\n */\nexport interface MigrationResult {\n success: boolean;\n overridesCount: number;\n providersEnabled: string[];\n message: string;\n error?: string;\n}\n\nexport function migrateConfig(options: MigrationOptions = {}): MigrationResult {\n const { backup = true, deleteLegacy = false, dryRun = false } = options;\n\n try {\n // Check if migration is needed\n if (!needsMigration()) {\n if (existsSync(NEW_CONFIG_PATH)) {\n return {\n success: true,\n overridesCount: 0,\n providersEnabled: ['anthropic'],\n message: 'Config already migrated (config.yaml exists)',\n };\n }\n return {\n success: false,\n overridesCount: 0,\n providersEnabled: [],\n message: 'No legacy settings.json found to migrate',\n };\n }\n\n // Load legacy settings\n const settings = loadSettings();\n\n // Convert to YAML config\n const yamlConfig = convertToYamlConfig(settings);\n\n // Generate YAML content\n const yamlContent = yaml.dump(yamlConfig, {\n indent: 2,\n lineWidth: 120,\n noRefs: true,\n });\n\n // Dry run - just return what would happen\n if (dryRun) {\n const providersEnabled = Object.entries(yamlConfig.models?.providers || {})\n .filter(([_, enabled]) => enabled)\n .map(([name]) => name);\n\n return {\n success: true,\n overridesCount: Object.keys(yamlConfig.models?.overrides || {}).length,\n providersEnabled,\n message: `Would migrate to smart selection with ${providersEnabled.length} providers enabled`,\n };\n }\n\n // Write new config.yaml\n writeFileSync(NEW_CONFIG_PATH, yamlContent, 'utf-8');\n\n // Back up legacy settings if requested\n if (backup) {\n const legacyContent = readFileSync(LEGACY_SETTINGS_PATH, 'utf-8');\n writeFileSync(BACKUP_SETTINGS_PATH, legacyContent, 'utf-8');\n }\n\n // Delete legacy settings if requested\n if (deleteLegacy) {\n renameSync(LEGACY_SETTINGS_PATH, `${LEGACY_SETTINGS_PATH}.migrated`);\n }\n\n const providersEnabled = Object.entries(yamlConfig.models?.providers || {})\n .filter(([_, enabled]) => enabled)\n .map(([name]) => name);\n\n return {\n success: true,\n overridesCount: Object.keys(yamlConfig.models?.overrides || {}).length,\n providersEnabled,\n message: `Successfully migrated to smart selection with ${providersEnabled.length} providers`,\n };\n } catch (error: any) {\n return {\n success: false,\n overridesCount: 0,\n providersEnabled: [],\n message: 'Migration failed',\n error: error.message,\n };\n }\n}\n\n/**\n * Get migration status\n */\nexport function getMigrationStatus(): {\n needsMigration: boolean;\n hasLegacySettings: boolean;\n hasNewConfig: boolean;\n} {\n return {\n needsMigration: needsMigration(),\n hasLegacySettings: existsSync(LEGACY_SETTINGS_PATH),\n hasNewConfig: existsSync(NEW_CONFIG_PATH),\n };\n}\n\n/**\n * Clean up legacy runtime symlinks from removed runtimes.\n *\n * PAN-142: Panopticon consolidated to Claude Code as the sole runtime.\n * This removes any Panopticon-managed symlinks from legacy runtime directories\n * (codex, cursor, gemini, opencode).\n */\nexport interface LegacyCleanupResult {\n cleaned: string[];\n total: number;\n errors: string[];\n}\n\nexport function cleanupLegacyRuntimeSymlinks(): LegacyCleanupResult {\n const legacyDirs = [\n { name: 'codex', base: join(homedir(), '.codex') },\n { name: 'cursor', base: join(homedir(), '.cursor') },\n { name: 'gemini', base: join(homedir(), '.gemini') },\n { name: 'opencode', base: join(homedir(), '.opencode') },\n ];\n\n const cleaned: string[] = [];\n const errors: string[] = [];\n\n for (const { name, base } of legacyDirs) {\n for (const subdir of ['skills', 'commands', 'agents']) {\n const dir = join(base, subdir);\n if (!existsSync(dir)) continue;\n\n try {\n const entries = readdirSync(dir);\n for (const entry of entries) {\n const entryPath = join(dir, entry);\n try {\n const stats = lstatSync(entryPath);\n if (!stats.isSymbolicLink()) continue;\n\n const linkTarget = readlinkSync(entryPath);\n // Only remove symlinks pointing to Panopticon directories\n if (linkTarget.includes('.panopticon')) {\n unlinkSync(entryPath);\n cleaned.push(`${name}/${subdir}/${entry}`);\n }\n } catch (err: any) {\n errors.push(`${name}/${subdir}/${entry}: ${err.message}`);\n }\n }\n } catch (err: any) {\n // Directory may not be readable, that's fine\n errors.push(`${name}/${subdir}: ${err.message}`);\n }\n }\n }\n\n return { cleaned, total: cleaned.length, errors };\n}\n\n/**\n * Migrate legacy sync config by stripping the 'targets' field from config.toml.\n * This handles users who had `targets = [\"claude\", \"codex\"]` in their config.\n */\nexport function migrateSyncTargets(): { migrated: boolean; hadNonClaudeTargets: boolean } {\n const configPath = join(homedir(), '.panopticon', 'config.toml');\n\n if (!existsSync(configPath)) {\n return { migrated: false, hadNonClaudeTargets: false };\n }\n\n try {\n const content = readFileSync(configPath, 'utf-8');\n\n // Check if targets field exists\n const targetsMatch = content.match(/^targets\\s*=\\s*\\[([^\\]]*)\\]/m);\n if (!targetsMatch) {\n return { migrated: false, hadNonClaudeTargets: false };\n }\n\n // Check if non-claude targets were configured\n const targetsStr = targetsMatch[1];\n const hadNonClaudeTargets = /codex|cursor|gemini|opencode/i.test(targetsStr);\n\n // Remove the targets line\n const newContent = content.replace(/^targets\\s*=\\s*\\[[^\\]]*\\]\\s*\\n?/m, '');\n writeFileSync(configPath, newContent, 'utf-8');\n\n return { migrated: true, hadNonClaudeTargets };\n } catch {\n return { migrated: false, hadNonClaudeTargets: false };\n }\n}\n","/**\n * Multi-Tool Skill Sync\n *\n * Writes Panopticon skills to other AI tool formats so skills authored once\n * in .pan/skills/ are available across all configured tools.\n *\n * Configured via `tools.also_sync` in ~/.panopticon/config.yaml and .pan.yaml.\n * Per-project .pan.yaml values are merged additively with global config.\n *\n * Supported targets:\n * cursor → .cursor/rules/<skill-name>.mdc\n * codex → AGENTS.md (named blocks)\n * windsurf → .windsurf/rules/<skill-name>.md\n * cline → .clinerules/<skill-name>.md\n * copilot → .github/instructions/<skill-name>.instructions.md\n * aider → CONVENTIONS.md (named blocks)\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport yaml from 'js-yaml';\nimport { PANOPTICON_HOME } from './paths.js';\n\nexport type AlsoSyncTool = 'cursor' | 'codex' | 'windsurf' | 'cline' | 'copilot' | 'aider';\n\nexport interface MultiToolSyncResult {\n tool: AlsoSyncTool;\n written: string[];\n skipped: string[];\n errors: string[];\n}\n\n/** Strip YAML frontmatter from a skill markdown file */\nfunction stripFrontmatter(content: string): string {\n if (!content.startsWith('---')) return content;\n const end = content.indexOf('\\n---', 4);\n if (end === -1) return content;\n return content.slice(end + 4).trimStart();\n}\n\n/** Extract the skill name from frontmatter, or fall back to dir name */\nfunction extractSkillName(content: string, fallback: string): string {\n if (!content.startsWith('---')) return fallback;\n const end = content.indexOf('\\n---', 4);\n if (end === -1) return fallback;\n const frontmatter = content.slice(4, end);\n const match = frontmatter.match(/^name:\\s*(.+)$/m);\n return match ? match[1].trim() : fallback;\n}\n\n/** Read main SKILL.md content for a skill directory */\nfunction readSkillContent(skillDir: string): string | null {\n const skillMd = join(skillDir, 'SKILL.md');\n if (!existsSync(skillMd)) {\n // Fallback: any .md file in root\n const files = existsSync(skillDir) ? readdirSync(skillDir).filter(f => f.endsWith('.md')) : [];\n if (files.length === 0) return null;\n return readFileSync(join(skillDir, files[0]), 'utf-8');\n }\n return readFileSync(skillMd, 'utf-8');\n}\n\n/** Collect all skill directories from the given skills root */\nfunction collectSkillDirs(skillsDir: string): Array<{ name: string; dir: string }> {\n if (!existsSync(skillsDir)) return [];\n return readdirSync(skillsDir, { withFileTypes: true })\n .filter(e => e.isDirectory())\n .map(e => ({ name: e.name, dir: join(skillsDir, e.name) }));\n}\n\n/**\n * Update or insert a named block in a file.\n * Blocks are delimited by: <!-- panopticon:<skill-name> start --> ... <!-- panopticon:<skill-name> end -->\n */\nfunction upsertNamedBlock(filePath: string, blockName: string, content: string): void {\n const startTag = `<!-- panopticon:${blockName} start -->`;\n const endTag = `<!-- panopticon:${blockName} end -->`;\n const block = `${startTag}\\n${content}\\n${endTag}`;\n\n let existing = existsSync(filePath) ? readFileSync(filePath, 'utf-8') : '';\n\n const startIdx = existing.indexOf(startTag);\n const endIdx = existing.indexOf(endTag);\n\n if (startIdx !== -1 && endIdx !== -1 && endIdx > startIdx) {\n // Replace existing block\n existing = existing.slice(0, startIdx) + block + existing.slice(endIdx + endTag.length);\n } else {\n // Append new block\n if (existing.length > 0 && !existing.endsWith('\\n')) existing += '\\n';\n existing += '\\n' + block + '\\n';\n }\n\n writeFileSync(filePath, existing, 'utf-8');\n}\n\n/** Sync a single skill to the cursor target */\nfunction syncToCursor(projectPath: string, skillName: string, rawContent: string): void {\n const rulesDir = join(projectPath, '.cursor', 'rules');\n mkdirSync(rulesDir, { recursive: true });\n const body = stripFrontmatter(rawContent);\n // .mdc files: standard markdown, cursor accepts them as context rules\n writeFileSync(join(rulesDir, `${skillName}.mdc`), body, 'utf-8');\n}\n\n/** Sync a single skill to the windsurf target */\nfunction syncToWindsurf(projectPath: string, skillName: string, rawContent: string): void {\n const rulesDir = join(projectPath, '.windsurf', 'rules');\n mkdirSync(rulesDir, { recursive: true });\n writeFileSync(join(rulesDir, `${skillName}.md`), stripFrontmatter(rawContent), 'utf-8');\n}\n\n/** Sync a single skill to the cline target */\nfunction syncToCline(projectPath: string, skillName: string, rawContent: string): void {\n const rulesDir = join(projectPath, '.clinerules');\n mkdirSync(rulesDir, { recursive: true });\n writeFileSync(join(rulesDir, `${skillName}.md`), stripFrontmatter(rawContent), 'utf-8');\n}\n\n/** Sync a single skill to the copilot target */\nfunction syncToCopilot(projectPath: string, skillName: string, rawContent: string): void {\n const instructionsDir = join(projectPath, '.github', 'instructions');\n mkdirSync(instructionsDir, { recursive: true });\n writeFileSync(\n join(instructionsDir, `${skillName}.instructions.md`),\n stripFrontmatter(rawContent),\n 'utf-8',\n );\n}\n\n/** Sync a single skill to AGENTS.md (codex) as a named block */\nfunction syncToCodex(projectPath: string, skillName: string, rawContent: string): void {\n const agentsMd = join(projectPath, 'AGENTS.md');\n upsertNamedBlock(agentsMd, skillName, `## ${skillName}\\n\\n${stripFrontmatter(rawContent)}`);\n}\n\n/** Sync a single skill to CONVENTIONS.md (aider) as a named block */\nfunction syncToAider(projectPath: string, skillName: string, rawContent: string): void {\n const conventionsMd = join(projectPath, 'CONVENTIONS.md');\n upsertNamedBlock(conventionsMd, skillName, `## ${skillName}\\n\\n${stripFrontmatter(rawContent)}`);\n}\n\nconst TOOL_WRITERS: Record<AlsoSyncTool, (projectPath: string, name: string, content: string) => void> = {\n cursor: syncToCursor,\n windsurf: syncToWindsurf,\n cline: syncToCline,\n copilot: syncToCopilot,\n codex: syncToCodex,\n aider: syncToAider,\n};\n\n/**\n * Resolve the merged list of tools to sync.\n * Global config is the base; per-project .pan.yaml adds more (never removes).\n */\nexport function resolveAlsoSyncTools(projectPath?: string): AlsoSyncTool[] {\n const tools = new Set<AlsoSyncTool>();\n\n // Read from global config\n const globalConfig = join(PANOPTICON_HOME, 'config.yaml');\n if (existsSync(globalConfig)) {\n try {\n const parsed = yaml.load(readFileSync(globalConfig, 'utf-8')) as any;\n const globalTools: string[] = parsed?.tools?.also_sync || [];\n for (const t of globalTools) {\n if (t in TOOL_WRITERS) tools.add(t as AlsoSyncTool);\n }\n } catch { /* ignore parse errors */ }\n }\n\n // Merge per-project .pan.yaml (additive)\n if (projectPath) {\n const panYaml = join(projectPath, '.pan.yaml');\n const legacyYaml = join(projectPath, '.panopticon.yaml');\n const configPath = existsSync(panYaml) ? panYaml : existsSync(legacyYaml) ? legacyYaml : null;\n if (configPath) {\n try {\n const parsed = yaml.load(readFileSync(configPath, 'utf-8')) as any;\n const projectTools: string[] = parsed?.tools?.also_sync || [];\n for (const t of projectTools) {\n if (t in TOOL_WRITERS) tools.add(t as AlsoSyncTool);\n }\n } catch { /* ignore parse errors */ }\n }\n }\n\n return Array.from(tools);\n}\n\n/**\n * Sync skills from a skills directory to all configured tools.\n *\n * @param skillsDir Directory containing skill subdirectories\n * @param projectPath Project root where tool targets live\n * @param tools Tools to sync to (from resolveAlsoSyncTools)\n */\nexport function syncSkillsToTools(\n skillsDir: string,\n projectPath: string,\n tools: AlsoSyncTool[],\n): MultiToolSyncResult[] {\n if (tools.length === 0 || !existsSync(skillsDir)) return [];\n\n const skills = collectSkillDirs(skillsDir);\n const results: MultiToolSyncResult[] = [];\n\n for (const tool of tools) {\n const writer = TOOL_WRITERS[tool];\n const result: MultiToolSyncResult = { tool, written: [], skipped: [], errors: [] };\n\n for (const { name, dir } of skills) {\n try {\n const rawContent = readSkillContent(dir);\n if (!rawContent) {\n result.skipped.push(name);\n continue;\n }\n const displayName = extractSkillName(rawContent, name);\n writer(projectPath, displayName, rawContent);\n result.written.push(name);\n } catch (err: any) {\n result.errors.push(`${name}: ${err.message}`);\n }\n }\n\n results.push(result);\n }\n\n return results;\n}\n\n/**\n * Run the full multi-tool sync for a project.\n * Sources: .pan/skills/ (project-local) and/or ~/.panopticon/skills/ (global).\n */\nexport function runMultiToolSync(projectPath: string): MultiToolSyncResult[] {\n const tools = resolveAlsoSyncTools(projectPath);\n if (tools.length === 0) return [];\n\n const allResults: MultiToolSyncResult[] = [];\n\n // 1. Global skills (from ~/.panopticon/skills/)\n const globalSkillsDir = join(PANOPTICON_HOME, 'skills');\n const globalResults = syncSkillsToTools(globalSkillsDir, projectPath, tools);\n allResults.push(...globalResults);\n\n // 2. Project-local skills (from .pan/skills/) — may overwrite global skill entries\n const projectSkillsDir = join(projectPath, '.pan', 'skills');\n if (existsSync(projectSkillsDir)) {\n const projectResults = syncSkillsToTools(projectSkillsDir, projectPath, tools);\n // Merge into existing results (project results override counts, don't duplicate tools)\n for (const pr of projectResults) {\n const existing = allResults.find(r => r.tool === pr.tool);\n if (existing) {\n existing.written.push(...pr.written);\n existing.errors.push(...pr.errors);\n } else {\n allResults.push(pr);\n }\n }\n }\n\n return allResults;\n}\n","import chalk from 'chalk';\nimport ora from 'ora';\nimport { execSync } from 'child_process';\nimport { existsSync, readdirSync, readFileSync, writeFileSync, statSync, symlinkSync, mkdirSync } from 'fs';\nimport { homedir } from 'os';\nimport { join, dirname } from 'path';\nimport { fileURLToPath } from 'url';\nimport { loadConfig } from '../../lib/config.js';\nimport { createBackup } from '../../lib/backup.js';\nimport { planSync, executeSync, refreshCache, migrateStalePersonalContent, planHooksSync, syncHooks, syncStatusline } from '../../lib/sync.js';\nimport { SYNC_TARGET, isDevMode } from '../../lib/paths.js';\nimport { getDevrootPath } from '../../lib/config.js';\nimport { listProjects } from '../../lib/projects.js';\nimport { cleanupLegacyRuntimeSymlinks, migrateSyncTargets } from '../../lib/config-migration.js';\nimport { migratePanopticonToPan } from '../../lib/workspace-manager.js';\nimport { runMultiToolSync, resolveAlsoSyncTools } from '../../lib/multi-tool-sync.js';\n\n// Get path to bundled git hooks\nconst __filename = fileURLToPath(import.meta.url);\nconst __dirname = dirname(__filename);\nconst BUNDLED_GIT_HOOKS_DIR = join(__dirname, '..', '..', 'scripts', 'git-hooks');\n\n// Helper to check if a command exists\nfunction checkCommand(cmd: string): boolean {\n try {\n execSync(`which ${cmd}`, { stdio: 'pipe' });\n return true;\n } catch {\n return false;\n }\n}\n\ninterface SyncOptions {\n dryRun?: boolean;\n force?: boolean;\n diff?: boolean;\n backupOnly?: boolean;\n}\n\nexport async function syncCommand(options: SyncOptions): Promise<void> {\n // Dry run mode\n if (options.dryRun) {\n console.log(chalk.bold('Sync Plan (dry run):\\n'));\n\n // Show dev mode status\n if (isDevMode()) {\n console.log(chalk.magenta('Developer mode detected - dev-skills will be synced\\n'));\n }\n\n // Show hooks plan\n const hooksPlan = planHooksSync();\n if (hooksPlan.length > 0) {\n console.log(chalk.cyan('hooks (bin scripts):'));\n for (const hook of hooksPlan) {\n const icon = hook.status === 'new' ? chalk.green('+') : chalk.blue('↻');\n const status = hook.status === 'new' ? '' : chalk.dim('[update]');\n console.log(` ${icon} ${hook.name} ${status}`);\n }\n console.log('');\n }\n\n const devrootPath = getDevrootPath();\n console.log(chalk.cyan(`devroot (${devrootPath || 'disabled'}):`));\n\n if (!devrootPath) {\n console.log(chalk.dim(' (devroot disabled — set sync.devroot in config)'));\n } else {\n const plan = planSync();\n const allItems = [...plan.skills, ...plan.agents, ...plan.rules, ...plan.commands];\n\n if (allItems.length === 0) {\n console.log(chalk.dim(' (nothing to sync)'));\n } else {\n for (const item of allItems) {\n const icon = item.status === 'conflict' ? chalk.yellow('!') :\n item.status === 'symlink' ? chalk.blue('↻') :\n chalk.green('+');\n const label = item.status === 'conflict' ? chalk.yellow('[modified]') :\n item.status === 'symlink' ? chalk.dim('[update]') :\n chalk.green('[new]');\n console.log(` ${icon} ${item.name} ${label}`);\n }\n }\n }\n\n // Show .pan/skills/ source files for each registered project\n const dryRunProjects = listProjects();\n for (const { config } of dryRunProjects) {\n if (!existsSync(config.path)) continue;\n const panSkillsDir = join(config.path, '.pan', 'skills');\n if (existsSync(panSkillsDir)) {\n const skills = readdirSync(panSkillsDir, { withFileTypes: true })\n .filter(e => e.isDirectory())\n .map(e => e.name);\n if (skills.length > 0) {\n console.log(chalk.cyan(`\\n.pan/skills/ (${config.name}):`));\n for (const skillName of skills) {\n console.log(` ${chalk.green('+')} ${skillName} ${chalk.green('[project-local]')}`);\n }\n }\n }\n\n // Show multi-tool sync targets\n const tools = resolveAlsoSyncTools(config.path);\n if (tools.length > 0) {\n console.log(chalk.cyan(`\\nmulti-tool sync (${config.name}): ${tools.join(', ')}`));\n const panSkillsDirExists = existsSync(join(config.path, '.pan', 'skills'));\n if (panSkillsDirExists) {\n const skills = readdirSync(join(config.path, '.pan', 'skills'), { withFileTypes: true })\n .filter(e => e.isDirectory())\n .map(e => e.name);\n for (const tool of tools) {\n for (const skillName of skills) {\n console.log(` ${chalk.green('+')} ${skillName} → ${tool}`);\n }\n }\n }\n }\n }\n\n console.log('');\n console.log(chalk.dim('Run without --dry-run to apply changes.'));\n return;\n }\n\n // Run one-time migration: strip legacy sync targets from config.toml\n const syncMigration = migrateSyncTargets();\n if (syncMigration.migrated) {\n if (syncMigration.hadNonClaudeTargets) {\n console.log(chalk.yellow('Config updated: removed non-Claude sync targets (Panopticon now syncs to Claude Code only).'));\n }\n }\n\n // Run one-time migration: remove Panopticon-managed symlinks from legacy runtime dirs\n const cleanupResult = cleanupLegacyRuntimeSymlinks();\n if (cleanupResult.cleaned.length > 0) {\n console.log(chalk.dim(`Removed ${cleanupResult.total} legacy runtime symlink(s): ${cleanupResult.cleaned.join(', ')}`));\n }\n\n // One-time migration: remove Panopticon symlinks from ~/.claude/ (devroot replaces this)\n const migration = migrateStalePersonalContent();\n if (migration.removedSymlinks.length > 0) {\n console.log(chalk.cyan(`Migrated: removed ${migration.removedSymlinks.length} Panopticon symlink(s) from ~/.claude/`));\n if (migration.preservedUserContent.length > 0) {\n console.log(chalk.dim(` Preserved ${migration.preservedUserContent.length} user-created item(s)`));\n }\n }\n\n const config = loadConfig();\n\n // Create backup if enabled\n if (config.sync.backup_before_sync) {\n const spinner = ora('Creating backup...').start();\n\n const backupDirs = [\n SYNC_TARGET.skills,\n SYNC_TARGET.commands,\n SYNC_TARGET.agents,\n ];\n\n const backup = createBackup(backupDirs);\n\n if (backup.targets.length > 0) {\n spinner.succeed(`Backup created: ${backup.timestamp}`);\n } else {\n spinner.info('No existing content to backup');\n }\n\n if (options.backupOnly) {\n return;\n }\n }\n\n // Refresh cache from repo source\n const cacheSpinner = ora('Refreshing cache from repo...').start();\n const cacheResult = refreshCache();\n const cacheParts = [];\n if (cacheResult.skills.copied > 0) cacheParts.push(`${cacheResult.skills.copied} skills`);\n if (cacheResult.agents.copied > 0) cacheParts.push(`${cacheResult.agents.copied} agents`);\n if (cacheResult.rules.copied > 0) cacheParts.push(`${cacheResult.rules.copied} rules`);\n cacheSpinner.succeed(`Cache refreshed: ${cacheParts.length > 0 ? cacheParts.join(', ') : 'up to date'}`);\n\n // Execute sync to devroot\n const devrootPath = getDevrootPath();\n const spinner = ora(`Syncing to devroot (${devrootPath || 'disabled'})...`).start();\n\n if (!devrootPath) {\n spinner.info('Devroot disabled (set sync.devroot in config to enable)');\n } else {\n const result = executeSync({ force: options.force, diff: options.diff });\n const totalSynced = result.created.length + result.updated.length;\n\n // Show diffs if requested\n if (result.diffs.length > 0) {\n spinner.info(`Showing diffs for ${result.diffs.length} modified file(s):\\n`);\n for (const d of result.diffs) {\n console.log(chalk.cyan(`--- ${d.path} (installed)`));\n console.log(chalk.cyan(`+++ ${d.path} (current on disk)`));\n // Simple line-by-line diff\n const sourceLines = d.sourceContent.split('\\n');\n const targetLines = d.targetContent.split('\\n');\n const maxLines = Math.max(sourceLines.length, targetLines.length);\n for (let i = 0; i < maxLines; i++) {\n if (sourceLines[i] !== targetLines[i]) {\n if (targetLines[i] !== undefined) console.log(chalk.red(`- ${targetLines[i]}`));\n if (sourceLines[i] !== undefined) console.log(chalk.green(`+ ${sourceLines[i]}`));\n }\n }\n console.log('');\n }\n }\n\n if (result.conflicts.length > 0 && !options.force) {\n spinner.warn(`Synced ${totalSynced} items, ${result.conflicts.length} conflicts`);\n console.log('');\n console.log(chalk.yellow('Modified since Panopticon installed:'));\n for (const name of result.conflicts) {\n console.log(chalk.dim(` - ${name}`));\n }\n console.log('');\n console.log(chalk.dim('Use --force to overwrite, --diff to see changes.'));\n } else if (result.skipped.length > 0) {\n spinner.succeed(`Synced ${totalSynced} items to devroot (${result.skipped.length} user-owned skipped)`);\n } else {\n spinner.succeed(`Synced ${totalSynced} items to devroot`);\n }\n }\n\n // Sync hooks (bin scripts)\n const hooksSpinner = ora('Syncing hooks...').start();\n const hooksResult = syncHooks();\n\n if (hooksResult.errors.length > 0) {\n hooksSpinner.warn(`Synced ${hooksResult.synced.length} hooks, ${hooksResult.errors.length} errors`);\n for (const error of hooksResult.errors) {\n console.log(chalk.red(` ✗ ${error}`));\n }\n } else if (hooksResult.synced.length > 0) {\n hooksSpinner.succeed(`Synced ${hooksResult.synced.length} hooks to ~/.panopticon/bin/`);\n } else {\n hooksSpinner.info('No hooks to sync');\n }\n\n const projects = listProjects();\n\n // Ensure beads database exists for each registered project (first-time setup guard).\n // bd install puts the binary in PATH, but bd init must be run once per project to\n // create the Dolt database. Without it, workspace beads creation silently fails.\n if (projects.length > 0 && checkCommand('bd')) {\n for (const { key, config } of projects) {\n if (!existsSync(config.path)) continue;\n const mainBeadsDir = join(config.path, '.beads');\n if (!existsSync(mainBeadsDir)) continue; // Project hasn't used beads yet — skip\n // Test connectivity. If the database is missing, auto-init.\n try {\n execSync('bd list --json --limit 0 2>&1', { cwd: config.path, stdio: 'pipe', timeout: 8000 });\n } catch (e: any) {\n const msg = String(e?.stdout ?? e?.stderr ?? e?.message ?? '');\n if (msg.includes('database') && (msg.includes('not found') || msg.includes('not exist') || msg.includes('defaulting'))) {\n const beadsSpinner = ora(`Initializing beads database for ${config.name}...`).start();\n try {\n const prefix = (key || config.name).toLowerCase().replace(/[^a-z0-9-]/g, '-');\n execSync(`bd init --prefix ${prefix}`, { cwd: config.path, stdio: 'pipe', timeout: 20000 });\n try { execSync('git config beads.role contributor', { cwd: config.path, stdio: 'pipe' }); } catch { /* non-fatal */ }\n beadsSpinner.succeed(`Beads database initialized for ${config.name} (prefix: ${prefix})`);\n } catch {\n beadsSpinner.warn(`Could not auto-initialize beads for ${config.name} — run: cd ${config.path} && bd init`);\n }\n }\n }\n }\n }\n\n\n // Check jq availability (required by statusline, beads, specialists)\n if (!checkCommand('jq')) {\n console.log(chalk.yellow('\\n ⚠ jq not found — statusline and other features need it'));\n console.log(chalk.dim(' Install: apt install jq / brew install jq\\n'));\n }\n\n // Sync statusline to all runtimes\n const statuslineSpinner = ora('Syncing statusline...').start();\n const statuslineResult = syncStatusline();\n\n if (statuslineResult.errors.length > 0) {\n statuslineSpinner.warn(`Synced statusline to ${statuslineResult.synced.length} runtime(s), ${statuslineResult.errors.length} error(s)`);\n for (const error of statuslineResult.errors) {\n console.log(chalk.red(` ✗ ${error}`));\n }\n } else if (statuslineResult.synced.length > 0) {\n statuslineSpinner.succeed(`Synced statusline to ${statuslineResult.synced.join(', ')}`);\n } else {\n statuslineSpinner.info('No statusline script found (scripts/statusline.sh)');\n }\n\n // Check and install claude-code-router if missing\n const hasRouter = checkCommand('claude-code-router');\n if (!hasRouter) {\n const routerSpinner = ora('Installing claude-code-router...').start();\n try {\n execSync('npm install -g @musistudio/claude-code-router', {\n stdio: 'pipe',\n timeout: 120000\n });\n routerSpinner.succeed('claude-code-router installed');\n } catch (error) {\n routerSpinner.warn('Failed to install claude-code-router - run: npm install -g @musistudio/claude-code-router');\n }\n }\n\n // Check and install mkcert if missing\n if (!checkCommand('mkcert')) {\n const mkcertSpinner = ora('Installing mkcert...').start();\n try {\n const binDir = join(homedir(), '.local', 'bin');\n mkdirSync(binDir, { recursive: true });\n const mkcertPath = join(binDir, 'mkcert');\n const arch = process.arch === 'x64' ? 'amd64' : process.arch;\n execSync(`curl -sL \"https://github.com/FiloSottile/mkcert/releases/latest/download/mkcert-v1.4.4-linux-${arch}\" -o \"${mkcertPath}\" && chmod +x \"${mkcertPath}\"`, {\n stdio: 'pipe',\n timeout: 60000,\n });\n mkcertSpinner.succeed('mkcert installed');\n } catch {\n mkcertSpinner.warn('Failed to install mkcert - run: https://github.com/FiloSottile/mkcert/releases');\n }\n }\n\n // Check and install SageOx CLI if missing\n if (!checkCommand('ox')) {\n const oxSpinner = ora('Installing SageOx CLI (ox)...').start();\n try {\n const binDir = join(homedir(), '.local', 'bin');\n mkdirSync(binDir, { recursive: true });\n const oxPath = join(binDir, 'ox');\n const arch = process.arch === 'x64' ? 'amd64' : process.arch;\n const platform = process.platform === 'darwin' ? 'darwin' : 'linux';\n execSync(`curl -sL \"https://github.com/eltmon/ox/releases/download/latest/ox-${platform}-${arch}\" -o \"${oxPath}\" && chmod +x \"${oxPath}\"`, {\n stdio: 'pipe',\n timeout: 60000,\n });\n oxSpinner.succeed('SageOx CLI installed');\n } catch {\n oxSpinner.warn('Failed to install SageOx CLI - see: https://github.com/eltmon/ox/releases');\n }\n }\n\n\n // Enforce Playwright MCP --isolated flag to prevent stale zoom/profile state\n const mcpPath = join(homedir(), '.claude', 'mcp.json');\n try {\n if (existsSync(mcpPath)) {\n const mcpConfig = JSON.parse(readFileSync(mcpPath, 'utf-8'));\n const pw = mcpConfig?.mcpServers?.playwright;\n if (pw && Array.isArray(pw.args) && !pw.args.includes('--isolated')) {\n pw.args.push('--isolated');\n writeFileSync(mcpPath, JSON.stringify(mcpConfig, null, 2) + '\\n');\n console.log(chalk.green('✓ Added --isolated to Playwright MCP (prevents stale zoom/profile state)'));\n }\n }\n } catch {\n // Non-fatal — skip if mcp.json can't be read/written\n }\n\n // Ensure beads database exists for each registered project (first-time setup guard).\n // bd install puts the binary in PATH, but bd init must be run once per project to\n // create the Dolt database. Without it, workspace beads creation silently fails.\n // Migrate .panopticon/ → .pan/ and run multi-tool sync in all registered projects\n\n for (const { config } of projects) {\n if (!existsSync(config.path)) continue;\n\n // Migrate .panopticon/ subdirs → .pan/\n const migResult = migratePanopticonToPan(config.path);\n if (migResult.migrated.length > 0) {\n console.log(chalk.cyan(`Migrated .panopticon/ → .pan/ in ${config.name}: ${migResult.migrated.join(', ')}`));\n }\n if (migResult.skipped.length > 0) {\n console.log(chalk.yellow(`Migration skipped (both exist) in ${config.name}: ${migResult.skipped.join(', ')}`));\n }\n for (const err of migResult.errors) {\n console.log(chalk.red(`Migration error in ${config.name}: ${err}`));\n }\n\n // Multi-tool skill sync (cursor, codex, windsurf, cline, copilot, aider)\n const toolSyncResults = runMultiToolSync(config.path);\n for (const r of toolSyncResults) {\n if (r.written.length > 0) {\n console.log(chalk.cyan(`Synced ${r.written.length} skill(s) to ${r.tool} in ${config.name}`));\n }\n for (const err of r.errors) {\n console.log(chalk.red(`Multi-tool sync error (${r.tool}) in ${config.name}: ${err}`));\n }\n }\n }\n\n // Sync git hooks to all registered projects (branch protection)\n if (projects.length > 0 && existsSync(BUNDLED_GIT_HOOKS_DIR)) {\n const gitHooksSpinner = ora('Installing git hooks in registered projects...').start();\n let totalInstalled = 0;\n let projectsUpdated = 0;\n\n for (const { config } of projects) {\n if (!existsSync(config.path)) continue;\n\n // Find all .git directories (handles polyrepos)\n const gitDirs: string[] = [];\n\n // Check root\n if (existsSync(join(config.path, '.git')) && statSync(join(config.path, '.git')).isDirectory()) {\n gitDirs.push(join(config.path, '.git'));\n } else {\n // Scan for polyrepo\n try {\n const entries = readdirSync(config.path);\n for (const entry of entries) {\n const entryPath = join(config.path, entry);\n const gitPath = join(entryPath, '.git');\n if (existsSync(gitPath) && statSync(gitPath).isDirectory()) {\n gitDirs.push(gitPath);\n }\n }\n } catch {\n // Skip unreadable directories\n }\n }\n\n // Install hooks in each git dir\n for (const gitDir of gitDirs) {\n const hooksTarget = join(gitDir, 'hooks');\n if (!existsSync(hooksTarget)) {\n mkdirSync(hooksTarget, { recursive: true });\n }\n\n try {\n const hooks = readdirSync(BUNDLED_GIT_HOOKS_DIR).filter(f =>\n statSync(join(BUNDLED_GIT_HOOKS_DIR, f)).isFile()\n );\n\n for (const hook of hooks) {\n const source = join(BUNDLED_GIT_HOOKS_DIR, hook);\n const target = join(hooksTarget, hook);\n\n // Skip if already a symlink to our hook\n if (existsSync(target)) {\n try {\n const { readlinkSync } = await import('fs');\n if (readlinkSync(target) === source) continue;\n } catch {\n // Not a symlink\n }\n // Backup existing\n const { renameSync } = await import('fs');\n try { renameSync(target, `${target}.backup`); } catch {}\n }\n\n try {\n symlinkSync(source, target);\n totalInstalled++;\n } catch {}\n }\n projectsUpdated++;\n } catch {}\n }\n }\n\n if (totalInstalled > 0) {\n gitHooksSpinner.succeed(`Installed git hooks in ${projectsUpdated} project(s)`);\n } else {\n gitHooksSpinner.info('Git hooks already up to date');\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAiBA,MAAM,uBAAuB,KAAK,SAAS,EAAE,eAAe,gBAAgB;;AAG5E,MAAM,kBAAkB,KAAK,SAAS,EAAE,eAAe,cAAc;;AAGrE,MAAM,uBAAuB,KAAK,SAAS,EAAE,eAAe,uBAAuB;;;;;AAMnF,SAAgB,iBAA0B;AACxC,QAAO,WAAW,qBAAqB,IAAI,CAAC,WAAW,gBAAgB;;;;;AAMzE,SAAgB,oBAA6B;AAC3C,QAAO,WAAW,qBAAqB;;;;;AAMzC,SAAS,uBAAuB,UAM9B;AACA,QAAO;EACL,WAAW;EACX,QAAQ,CAAC,CAAC,SAAS,SAAS;EAC5B,QAAQ,CAAC,CAAC,SAAS,SAAS;EAC5B,KAAK,CAAC,CAAC,SAAS,SAAS;EACzB,MAAM;EACP;;;;;AAMH,SAAgB,oBAAoB,UAAsC;AAYxE,QAT2B;EACzB,QAAQ;GACN,WAJc,uBAAuB,SAAS;GAK9C,WAAW,EAAE;GACb,uBAAuB;GACxB;EACD,UAAU,SAAS;EACpB;;AA4BH,SAAgB,cAAc,UAA4B,EAAE,EAAmB;CAC7E,MAAM,EAAE,SAAS,MAAM,eAAe,OAAO,SAAS,UAAU;AAEhE,KAAI;AAEF,MAAI,CAAC,gBAAgB,EAAE;AACrB,OAAI,WAAW,gBAAgB,CAC7B,QAAO;IACL,SAAS;IACT,gBAAgB;IAChB,kBAAkB,CAAC,YAAY;IAC/B,SAAS;IACV;AAEH,UAAO;IACL,SAAS;IACT,gBAAgB;IAChB,kBAAkB,EAAE;IACpB,SAAS;IACV;;EAOH,MAAM,aAAa,oBAHF,cAAc,CAGiB;EAGhD,MAAM,cAAc,KAAK,KAAK,YAAY;GACxC,QAAQ;GACR,WAAW;GACX,QAAQ;GACT,CAAC;AAGF,MAAI,QAAQ;GACV,MAAM,mBAAmB,OAAO,QAAQ,WAAW,QAAQ,aAAa,EAAE,CAAC,CACxE,QAAQ,CAAC,GAAG,aAAa,QAAQ,CACjC,KAAK,CAAC,UAAU,KAAK;AAExB,UAAO;IACL,SAAS;IACT,gBAAgB,OAAO,KAAK,WAAW,QAAQ,aAAa,EAAE,CAAC,CAAC;IAChE;IACA,SAAS,yCAAyC,iBAAiB,OAAO;IAC3E;;AAIH,gBAAc,iBAAiB,aAAa,QAAQ;AAGpD,MAAI,OAEF,eAAc,sBADQ,aAAa,sBAAsB,QAAQ,EACd,QAAQ;AAI7D,MAAI,aACF,YAAW,sBAAsB,GAAG,qBAAqB,WAAW;EAGtE,MAAM,mBAAmB,OAAO,QAAQ,WAAW,QAAQ,aAAa,EAAE,CAAC,CACxE,QAAQ,CAAC,GAAG,aAAa,QAAQ,CACjC,KAAK,CAAC,UAAU,KAAK;AAExB,SAAO;GACL,SAAS;GACT,gBAAgB,OAAO,KAAK,WAAW,QAAQ,aAAa,EAAE,CAAC,CAAC;GAChE;GACA,SAAS,iDAAiD,iBAAiB,OAAO;GACnF;UACM,OAAY;AACnB,SAAO;GACL,SAAS;GACT,gBAAgB;GAChB,kBAAkB,EAAE;GACpB,SAAS;GACT,OAAO,MAAM;GACd;;;AAgCL,SAAgB,+BAAoD;CAClE,MAAM,aAAa;EACjB;GAAE,MAAM;GAAS,MAAM,KAAK,SAAS,EAAE,SAAS;GAAE;EAClD;GAAE,MAAM;GAAU,MAAM,KAAK,SAAS,EAAE,UAAU;GAAE;EACpD;GAAE,MAAM;GAAU,MAAM,KAAK,SAAS,EAAE,UAAU;GAAE;EACpD;GAAE,MAAM;GAAY,MAAM,KAAK,SAAS,EAAE,YAAY;GAAE;EACzD;CAED,MAAM,UAAoB,EAAE;CAC5B,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,EAAE,MAAM,UAAU,WAC3B,MAAK,MAAM,UAAU;EAAC;EAAU;EAAY;EAAS,EAAE;EACrD,MAAM,MAAM,KAAK,MAAM,OAAO;AAC9B,MAAI,CAAC,WAAW,IAAI,CAAE;AAEtB,MAAI;GACF,MAAM,UAAU,YAAY,IAAI;AAChC,QAAK,MAAM,SAAS,SAAS;IAC3B,MAAM,YAAY,KAAK,KAAK,MAAM;AAClC,QAAI;AAEF,SAAI,CADU,UAAU,UAAU,CACvB,gBAAgB,CAAE;AAI7B,SAFmB,aAAa,UAAU,CAE3B,SAAS,cAAc,EAAE;AACtC,iBAAW,UAAU;AACrB,cAAQ,KAAK,GAAG,KAAK,GAAG,OAAO,GAAG,QAAQ;;aAErC,KAAU;AACjB,YAAO,KAAK,GAAG,KAAK,GAAG,OAAO,GAAG,MAAM,IAAI,IAAI,UAAU;;;WAGtD,KAAU;AAEjB,UAAO,KAAK,GAAG,KAAK,GAAG,OAAO,IAAI,IAAI,UAAU;;;AAKtD,QAAO;EAAE;EAAS,OAAO,QAAQ;EAAQ;EAAQ;;;;;;AAOnD,SAAgB,qBAA0E;CACxF,MAAM,aAAa,KAAK,SAAS,EAAE,eAAe,cAAc;AAEhE,KAAI,CAAC,WAAW,WAAW,CACzB,QAAO;EAAE,UAAU;EAAO,qBAAqB;EAAO;AAGxD,KAAI;EACF,MAAM,UAAU,aAAa,YAAY,QAAQ;EAGjD,MAAM,eAAe,QAAQ,MAAM,+BAA+B;AAClE,MAAI,CAAC,aACH,QAAO;GAAE,UAAU;GAAO,qBAAqB;GAAO;EAIxD,MAAM,aAAa,aAAa;EAChC,MAAM,sBAAsB,gCAAgC,KAAK,WAAW;AAI5E,gBAAc,YADK,QAAQ,QAAQ,oCAAoC,GAAG,EACpC,QAAQ;AAE9C,SAAO;GAAE,UAAU;GAAM;GAAqB;SACxC;AACN,SAAO;GAAE,UAAU;GAAO,qBAAqB;GAAO;;;;;;;;;;;;;;;;;;;;;;YCxQb;;AAY7C,SAAS,iBAAiB,SAAyB;AACjD,KAAI,CAAC,QAAQ,WAAW,MAAM,CAAE,QAAO;CACvC,MAAM,MAAM,QAAQ,QAAQ,SAAS,EAAE;AACvC,KAAI,QAAQ,GAAI,QAAO;AACvB,QAAO,QAAQ,MAAM,MAAM,EAAE,CAAC,WAAW;;;AAI3C,SAAS,iBAAiB,SAAiB,UAA0B;AACnE,KAAI,CAAC,QAAQ,WAAW,MAAM,CAAE,QAAO;CACvC,MAAM,MAAM,QAAQ,QAAQ,SAAS,EAAE;AACvC,KAAI,QAAQ,GAAI,QAAO;CAEvB,MAAM,QADc,QAAQ,MAAM,GAAG,IAAI,CACf,MAAM,kBAAkB;AAClD,QAAO,QAAQ,MAAM,GAAG,MAAM,GAAG;;;AAInC,SAAS,iBAAiB,UAAiC;CACzD,MAAM,UAAU,KAAK,UAAU,WAAW;AAC1C,KAAI,CAAC,WAAW,QAAQ,EAAE;EAExB,MAAM,QAAQ,WAAW,SAAS,GAAG,YAAY,SAAS,CAAC,QAAO,MAAK,EAAE,SAAS,MAAM,CAAC,GAAG,EAAE;AAC9F,MAAI,MAAM,WAAW,EAAG,QAAO;AAC/B,SAAO,aAAa,KAAK,UAAU,MAAM,GAAG,EAAE,QAAQ;;AAExD,QAAO,aAAa,SAAS,QAAQ;;;AAIvC,SAAS,iBAAiB,WAAyD;AACjF,KAAI,CAAC,WAAW,UAAU,CAAE,QAAO,EAAE;AACrC,QAAO,YAAY,WAAW,EAAE,eAAe,MAAM,CAAC,CACnD,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,OAAM;EAAE,MAAM,EAAE;EAAM,KAAK,KAAK,WAAW,EAAE,KAAK;EAAE,EAAE;;;;;;AAO/D,SAAS,iBAAiB,UAAkB,WAAmB,SAAuB;CACpF,MAAM,WAAW,mBAAmB,UAAU;CAC9C,MAAM,SAAS,mBAAmB,UAAU;CAC5C,MAAM,QAAQ,GAAG,SAAS,IAAI,QAAQ,IAAI;CAE1C,IAAI,WAAW,WAAW,SAAS,GAAG,aAAa,UAAU,QAAQ,GAAG;CAExE,MAAM,WAAW,SAAS,QAAQ,SAAS;CAC3C,MAAM,SAAS,SAAS,QAAQ,OAAO;AAEvC,KAAI,aAAa,MAAM,WAAW,MAAM,SAAS,SAE/C,YAAW,SAAS,MAAM,GAAG,SAAS,GAAG,QAAQ,SAAS,MAAM,SAAS,OAAO,OAAO;MAClF;AAEL,MAAI,SAAS,SAAS,KAAK,CAAC,SAAS,SAAS,KAAK,CAAE,aAAY;AACjE,cAAY,OAAO,QAAQ;;AAG7B,eAAc,UAAU,UAAU,QAAQ;;;AAI5C,SAAS,aAAa,aAAqB,WAAmB,YAA0B;CACtF,MAAM,WAAW,KAAK,aAAa,WAAW,QAAQ;AACtD,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;CACxC,MAAM,OAAO,iBAAiB,WAAW;AAEzC,eAAc,KAAK,UAAU,GAAG,UAAU,MAAM,EAAE,MAAM,QAAQ;;;AAIlE,SAAS,eAAe,aAAqB,WAAmB,YAA0B;CACxF,MAAM,WAAW,KAAK,aAAa,aAAa,QAAQ;AACxD,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,eAAc,KAAK,UAAU,GAAG,UAAU,KAAK,EAAE,iBAAiB,WAAW,EAAE,QAAQ;;;AAIzF,SAAS,YAAY,aAAqB,WAAmB,YAA0B;CACrF,MAAM,WAAW,KAAK,aAAa,cAAc;AACjD,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,eAAc,KAAK,UAAU,GAAG,UAAU,KAAK,EAAE,iBAAiB,WAAW,EAAE,QAAQ;;;AAIzF,SAAS,cAAc,aAAqB,WAAmB,YAA0B;CACvF,MAAM,kBAAkB,KAAK,aAAa,WAAW,eAAe;AACpE,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAC/C,eACE,KAAK,iBAAiB,GAAG,UAAU,kBAAkB,EACrD,iBAAiB,WAAW,EAC5B,QACD;;;AAIH,SAAS,YAAY,aAAqB,WAAmB,YAA0B;AAErF,kBADiB,KAAK,aAAa,YAAY,EACpB,WAAW,MAAM,UAAU,MAAM,iBAAiB,WAAW,GAAG;;;AAI7F,SAAS,YAAY,aAAqB,WAAmB,YAA0B;AAErF,kBADsB,KAAK,aAAa,iBAAiB,EACzB,WAAW,MAAM,UAAU,MAAM,iBAAiB,WAAW,GAAG;;AAGlG,MAAM,eAAmG;CACvG,QAAQ;CACR,UAAU;CACV,OAAO;CACP,SAAS;CACT,OAAO;CACP,OAAO;CACR;;;;;AAMD,SAAgB,qBAAqB,aAAsC;CACzE,MAAM,wBAAQ,IAAI,KAAmB;CAGrC,MAAM,eAAe,KAAK,iBAAiB,cAAc;AACzD,KAAI,WAAW,aAAa,CAC1B,KAAI;EAEF,MAAM,cADS,KAAK,KAAK,aAAa,cAAc,QAAQ,CAAC,EACvB,OAAO,aAAa,EAAE;AAC5D,OAAK,MAAM,KAAK,YACd,KAAI,KAAK,aAAc,OAAM,IAAI,EAAkB;SAE/C;AAIV,KAAI,aAAa;EACf,MAAM,UAAU,KAAK,aAAa,YAAY;EAC9C,MAAM,aAAa,KAAK,aAAa,mBAAmB;EACxD,MAAM,aAAa,WAAW,QAAQ,GAAG,UAAU,WAAW,WAAW,GAAG,aAAa;AACzF,MAAI,WACF,KAAI;GAEF,MAAM,eADS,KAAK,KAAK,aAAa,YAAY,QAAQ,CAAC,EACpB,OAAO,aAAa,EAAE;AAC7D,QAAK,MAAM,KAAK,aACd,KAAI,KAAK,aAAc,OAAM,IAAI,EAAkB;UAE/C;;AAIZ,QAAO,MAAM,KAAK,MAAM;;;;;;;;;AAU1B,SAAgB,kBACd,WACA,aACA,OACuB;AACvB,KAAI,MAAM,WAAW,KAAK,CAAC,WAAW,UAAU,CAAE,QAAO,EAAE;CAE3D,MAAM,SAAS,iBAAiB,UAAU;CAC1C,MAAM,UAAiC,EAAE;AAEzC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,SAAS,aAAa;EAC5B,MAAM,SAA8B;GAAE;GAAM,SAAS,EAAE;GAAE,SAAS,EAAE;GAAE,QAAQ,EAAE;GAAE;AAElF,OAAK,MAAM,EAAE,MAAM,SAAS,OAC1B,KAAI;GACF,MAAM,aAAa,iBAAiB,IAAI;AACxC,OAAI,CAAC,YAAY;AACf,WAAO,QAAQ,KAAK,KAAK;AACzB;;AAGF,UAAO,aADa,iBAAiB,YAAY,KAAK,EACrB,WAAW;AAC5C,UAAO,QAAQ,KAAK,KAAK;WAClB,KAAU;AACjB,UAAO,OAAO,KAAK,GAAG,KAAK,IAAI,IAAI,UAAU;;AAIjD,UAAQ,KAAK,OAAO;;AAGtB,QAAO;;;;;;AAOT,SAAgB,iBAAiB,aAA4C;CAC3E,MAAM,QAAQ,qBAAqB,YAAY;AAC/C,KAAI,MAAM,WAAW,EAAG,QAAO,EAAE;CAEjC,MAAM,aAAoC,EAAE;CAI5C,MAAM,gBAAgB,kBADE,KAAK,iBAAiB,SAAS,EACE,aAAa,MAAM;AAC5E,YAAW,KAAK,GAAG,cAAc;CAGjC,MAAM,mBAAmB,KAAK,aAAa,QAAQ,SAAS;AAC5D,KAAI,WAAW,iBAAiB,EAAE;EAChC,MAAM,iBAAiB,kBAAkB,kBAAkB,aAAa,MAAM;AAE9E,OAAK,MAAM,MAAM,gBAAgB;GAC/B,MAAM,WAAW,WAAW,MAAK,MAAK,EAAE,SAAS,GAAG,KAAK;AACzD,OAAI,UAAU;AACZ,aAAS,QAAQ,KAAK,GAAG,GAAG,QAAQ;AACpC,aAAS,OAAO,KAAK,GAAG,GAAG,OAAO;SAElC,YAAW,KAAK,GAAG;;;AAKzB,QAAO;;;;aChQwC;YAGW;eAEP;wBAEmB;AAMxE,MAAM,wBAAwB,KADZ,QADC,cAAc,OAAO,KAAK,IAAI,CACZ,EACS,MAAM,MAAM,WAAW,YAAY;AAGjF,SAAS,aAAa,KAAsB;AAC1C,KAAI;AACF,WAAS,SAAS,OAAO,EAAE,OAAO,QAAQ,CAAC;AAC3C,SAAO;SACD;AACN,SAAO;;;AAWX,eAAsB,YAAY,SAAqC;AAErE,KAAI,QAAQ,QAAQ;AAClB,UAAQ,IAAI,MAAM,KAAK,yBAAyB,CAAC;AAGjD,MAAI,WAAW,CACb,SAAQ,IAAI,MAAM,QAAQ,wDAAwD,CAAC;EAIrF,MAAM,YAAY,eAAe;AACjC,MAAI,UAAU,SAAS,GAAG;AACxB,WAAQ,IAAI,MAAM,KAAK,uBAAuB,CAAC;AAC/C,QAAK,MAAM,QAAQ,WAAW;IAC5B,MAAM,OAAO,KAAK,WAAW,QAAQ,MAAM,MAAM,IAAI,GAAG,MAAM,KAAK,IAAI;IACvE,MAAM,SAAS,KAAK,WAAW,QAAQ,KAAK,MAAM,IAAI,WAAW;AACjE,YAAQ,IAAI,KAAK,KAAK,GAAG,KAAK,KAAK,GAAG,SAAS;;AAEjD,WAAQ,IAAI,GAAG;;EAGjB,MAAM,cAAc,gBAAgB;AACpC,UAAQ,IAAI,MAAM,KAAK,YAAY,eAAe,WAAW,IAAI,CAAC;AAElE,MAAI,CAAC,YACH,SAAQ,IAAI,MAAM,IAAI,oDAAoD,CAAC;OACtE;GACL,MAAM,OAAO,UAAU;GACvB,MAAM,WAAW;IAAC,GAAG,KAAK;IAAQ,GAAG,KAAK;IAAQ,GAAG,KAAK;IAAO,GAAG,KAAK;IAAS;AAElF,OAAI,SAAS,WAAW,EACtB,SAAQ,IAAI,MAAM,IAAI,sBAAsB,CAAC;OAE7C,MAAK,MAAM,QAAQ,UAAU;IAC3B,MAAM,OAAO,KAAK,WAAW,aAAa,MAAM,OAAO,IAAI,GAC9C,KAAK,WAAW,YAAY,MAAM,KAAK,IAAI,GAC3C,MAAM,MAAM,IAAI;IAC7B,MAAM,QAAQ,KAAK,WAAW,aAAa,MAAM,OAAO,aAAa,GACvD,KAAK,WAAW,YAAY,MAAM,IAAI,WAAW,GACjD,MAAM,MAAM,QAAQ;AAClC,YAAQ,IAAI,KAAK,KAAK,GAAG,KAAK,KAAK,GAAG,QAAQ;;;EAMpD,MAAM,iBAAiB,cAAc;AACrC,OAAK,MAAM,EAAE,YAAY,gBAAgB;AACvC,OAAI,CAAC,WAAW,OAAO,KAAK,CAAE;GAC9B,MAAM,eAAe,KAAK,OAAO,MAAM,QAAQ,SAAS;AACxD,OAAI,WAAW,aAAa,EAAE;IAC5B,MAAM,SAAS,YAAY,cAAc,EAAE,eAAe,MAAM,CAAC,CAC9D,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,MAAK,EAAE,KAAK;AACnB,QAAI,OAAO,SAAS,GAAG;AACrB,aAAQ,IAAI,MAAM,KAAK,mBAAmB,OAAO,KAAK,IAAI,CAAC;AAC3D,UAAK,MAAM,aAAa,OACtB,SAAQ,IAAI,KAAK,MAAM,MAAM,IAAI,CAAC,GAAG,UAAU,GAAG,MAAM,MAAM,kBAAkB,GAAG;;;GAMzF,MAAM,QAAQ,qBAAqB,OAAO,KAAK;AAC/C,OAAI,MAAM,SAAS,GAAG;AACpB,YAAQ,IAAI,MAAM,KAAK,sBAAsB,OAAO,KAAK,KAAK,MAAM,KAAK,KAAK,GAAG,CAAC;AAElF,QAD2B,WAAW,KAAK,OAAO,MAAM,QAAQ,SAAS,CAAC,EAClD;KACtB,MAAM,SAAS,YAAY,KAAK,OAAO,MAAM,QAAQ,SAAS,EAAE,EAAE,eAAe,MAAM,CAAC,CACrF,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,MAAK,EAAE,KAAK;AACnB,UAAK,MAAM,QAAQ,MACjB,MAAK,MAAM,aAAa,OACtB,SAAQ,IAAI,KAAK,MAAM,MAAM,IAAI,CAAC,GAAG,UAAU,KAAK,OAAO;;;;AAOrE,UAAQ,IAAI,GAAG;AACf,UAAQ,IAAI,MAAM,IAAI,0CAA0C,CAAC;AACjE;;CAIF,MAAM,gBAAgB,oBAAoB;AAC1C,KAAI,cAAc;MACZ,cAAc,oBAChB,SAAQ,IAAI,MAAM,OAAO,8FAA8F,CAAC;;CAK5H,MAAM,gBAAgB,8BAA8B;AACpD,KAAI,cAAc,QAAQ,SAAS,EACjC,SAAQ,IAAI,MAAM,IAAI,WAAW,cAAc,MAAM,8BAA8B,cAAc,QAAQ,KAAK,KAAK,GAAG,CAAC;CAIzH,MAAM,YAAY,6BAA6B;AAC/C,KAAI,UAAU,gBAAgB,SAAS,GAAG;AACxC,UAAQ,IAAI,MAAM,KAAK,qBAAqB,UAAU,gBAAgB,OAAO,wCAAwC,CAAC;AACtH,MAAI,UAAU,qBAAqB,SAAS,EAC1C,SAAQ,IAAI,MAAM,IAAI,eAAe,UAAU,qBAAqB,OAAO,uBAAuB,CAAC;;AAOvG,KAHe,YAAY,CAGhB,KAAK,oBAAoB;EAClC,MAAM,UAAU,IAAI,qBAAqB,CAAC,OAAO;EAQjD,MAAM,SAAS,aANI;GACjB,YAAY;GACZ,YAAY;GACZ,YAAY;GACb,CAEsC;AAEvC,MAAI,OAAO,QAAQ,SAAS,EAC1B,SAAQ,QAAQ,mBAAmB,OAAO,YAAY;MAEtD,SAAQ,KAAK,gCAAgC;AAG/C,MAAI,QAAQ,WACV;;CAKJ,MAAM,eAAe,IAAI,gCAAgC,CAAC,OAAO;CACjE,MAAM,cAAc,cAAc;CAClC,MAAM,aAAa,EAAE;AACrB,KAAI,YAAY,OAAO,SAAS,EAAG,YAAW,KAAK,GAAG,YAAY,OAAO,OAAO,SAAS;AACzF,KAAI,YAAY,OAAO,SAAS,EAAG,YAAW,KAAK,GAAG,YAAY,OAAO,OAAO,SAAS;AACzF,KAAI,YAAY,MAAM,SAAS,EAAG,YAAW,KAAK,GAAG,YAAY,MAAM,OAAO,QAAQ;AACtF,cAAa,QAAQ,oBAAoB,WAAW,SAAS,IAAI,WAAW,KAAK,KAAK,GAAG,eAAe;CAGxG,MAAM,cAAc,gBAAgB;CACpC,MAAM,UAAU,IAAI,uBAAuB,eAAe,WAAW,MAAM,CAAC,OAAO;AAEnF,KAAI,CAAC,YACH,SAAQ,KAAK,0DAA0D;MAClE;EACL,MAAM,SAAS,YAAY;GAAE,OAAO,QAAQ;GAAO,MAAM,QAAQ;GAAM,CAAC;EACxE,MAAM,cAAc,OAAO,QAAQ,SAAS,OAAO,QAAQ;AAG3D,MAAI,OAAO,MAAM,SAAS,GAAG;AAC3B,WAAQ,KAAK,qBAAqB,OAAO,MAAM,OAAO,sBAAsB;AAC5E,QAAK,MAAM,KAAK,OAAO,OAAO;AAC5B,YAAQ,IAAI,MAAM,KAAK,OAAO,EAAE,KAAK,cAAc,CAAC;AACpD,YAAQ,IAAI,MAAM,KAAK,OAAO,EAAE,KAAK,oBAAoB,CAAC;IAE1D,MAAM,cAAc,EAAE,cAAc,MAAM,KAAK;IAC/C,MAAM,cAAc,EAAE,cAAc,MAAM,KAAK;IAC/C,MAAM,WAAW,KAAK,IAAI,YAAY,QAAQ,YAAY,OAAO;AACjE,SAAK,IAAI,IAAI,GAAG,IAAI,UAAU,IAC5B,KAAI,YAAY,OAAO,YAAY,IAAI;AACrC,SAAI,YAAY,OAAO,KAAA,EAAW,SAAQ,IAAI,MAAM,IAAI,KAAK,YAAY,KAAK,CAAC;AAC/E,SAAI,YAAY,OAAO,KAAA,EAAW,SAAQ,IAAI,MAAM,MAAM,KAAK,YAAY,KAAK,CAAC;;AAGrF,YAAQ,IAAI,GAAG;;;AAInB,MAAI,OAAO,UAAU,SAAS,KAAK,CAAC,QAAQ,OAAO;AACjD,WAAQ,KAAK,UAAU,YAAY,UAAU,OAAO,UAAU,OAAO,YAAY;AACjF,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,MAAM,OAAO,uCAAuC,CAAC;AACjE,QAAK,MAAM,QAAQ,OAAO,UACxB,SAAQ,IAAI,MAAM,IAAI,OAAO,OAAO,CAAC;AAEvC,WAAQ,IAAI,GAAG;AACf,WAAQ,IAAI,MAAM,IAAI,mDAAmD,CAAC;aACjE,OAAO,QAAQ,SAAS,EACjC,SAAQ,QAAQ,UAAU,YAAY,qBAAqB,OAAO,QAAQ,OAAO,sBAAsB;MAEvG,SAAQ,QAAQ,UAAU,YAAY,mBAAmB;;CAK7D,MAAM,eAAe,IAAI,mBAAmB,CAAC,OAAO;CACpD,MAAM,cAAc,WAAW;AAE/B,KAAI,YAAY,OAAO,SAAS,GAAG;AACjC,eAAa,KAAK,UAAU,YAAY,OAAO,OAAO,UAAU,YAAY,OAAO,OAAO,SAAS;AACnG,OAAK,MAAM,SAAS,YAAY,OAC9B,SAAQ,IAAI,MAAM,IAAI,OAAO,QAAQ,CAAC;YAE/B,YAAY,OAAO,SAAS,EACrC,cAAa,QAAQ,UAAU,YAAY,OAAO,OAAO,8BAA8B;KAEvF,cAAa,KAAK,mBAAmB;CAGvC,MAAM,WAAW,cAAc;AAK/B,KAAI,SAAS,SAAS,KAAK,aAAa,KAAK,CAC3C,MAAK,MAAM,EAAE,KAAK,YAAY,UAAU;AACtC,MAAI,CAAC,WAAW,OAAO,KAAK,CAAE;AAE9B,MAAI,CAAC,WADgB,KAAK,OAAO,MAAM,SAAS,CACnB,CAAE;AAE/B,MAAI;AACF,YAAS,iCAAiC;IAAE,KAAK,OAAO;IAAM,OAAO;IAAQ,SAAS;IAAM,CAAC;WACtF,GAAQ;GACf,MAAM,MAAM,OAAO,GAAG,UAAU,GAAG,UAAU,GAAG,WAAW,GAAG;AAC9D,OAAI,IAAI,SAAS,WAAW,KAAK,IAAI,SAAS,YAAY,IAAI,IAAI,SAAS,YAAY,IAAI,IAAI,SAAS,aAAa,GAAG;IACtH,MAAM,eAAe,IAAI,mCAAmC,OAAO,KAAK,KAAK,CAAC,OAAO;AACrF,QAAI;KACF,MAAM,UAAU,OAAO,OAAO,MAAM,aAAa,CAAC,QAAQ,eAAe,IAAI;AAC7E,cAAS,oBAAoB,UAAU;MAAE,KAAK,OAAO;MAAM,OAAO;MAAQ,SAAS;MAAO,CAAC;AAC3F,SAAI;AAAE,eAAS,qCAAqC;OAAE,KAAK,OAAO;OAAM,OAAO;OAAQ,CAAC;aAAU;AAClG,kBAAa,QAAQ,kCAAkC,OAAO,KAAK,YAAY,OAAO,GAAG;YACnF;AACN,kBAAa,KAAK,uCAAuC,OAAO,KAAK,aAAa,OAAO,KAAK,aAAa;;;;;AASrH,KAAI,CAAC,aAAa,KAAK,EAAE;AACvB,UAAQ,IAAI,MAAM,OAAO,6DAA6D,CAAC;AACvF,UAAQ,IAAI,MAAM,IAAI,kDAAkD,CAAC;;CAI3E,MAAM,oBAAoB,IAAI,wBAAwB,CAAC,OAAO;CAC9D,MAAM,mBAAmB,gBAAgB;AAEzC,KAAI,iBAAiB,OAAO,SAAS,GAAG;AACtC,oBAAkB,KAAK,wBAAwB,iBAAiB,OAAO,OAAO,eAAe,iBAAiB,OAAO,OAAO,WAAW;AACvI,OAAK,MAAM,SAAS,iBAAiB,OACnC,SAAQ,IAAI,MAAM,IAAI,OAAO,QAAQ,CAAC;YAE/B,iBAAiB,OAAO,SAAS,EAC1C,mBAAkB,QAAQ,wBAAwB,iBAAiB,OAAO,KAAK,KAAK,GAAG;KAEvF,mBAAkB,KAAK,qDAAqD;AAK9E,KAAI,CADc,aAAa,qBAAqB,EACpC;EACd,MAAM,gBAAgB,IAAI,mCAAmC,CAAC,OAAO;AACrE,MAAI;AACF,YAAS,iDAAiD;IACxD,OAAO;IACP,SAAS;IACV,CAAC;AACF,iBAAc,QAAQ,+BAA+B;WAC9C,OAAO;AACd,iBAAc,KAAK,4FAA4F;;;AAKnH,KAAI,CAAC,aAAa,SAAS,EAAE;EAC3B,MAAM,gBAAgB,IAAI,uBAAuB,CAAC,OAAO;AACzD,MAAI;GACF,MAAM,SAAS,KAAK,SAAS,EAAE,UAAU,MAAM;AAC/C,aAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;GACtC,MAAM,aAAa,KAAK,QAAQ,SAAS;AAEzC,YAAS,gGADI,QAAQ,SAAS,QAAQ,UAAU,QAAQ,KACsD,QAAQ,WAAW,iBAAiB,WAAW,IAAI;IAC/J,OAAO;IACP,SAAS;IACV,CAAC;AACF,iBAAc,QAAQ,mBAAmB;UACnC;AACN,iBAAc,KAAK,iFAAiF;;;AAKxG,KAAI,CAAC,aAAa,KAAK,EAAE;EACvB,MAAM,YAAY,IAAI,gCAAgC,CAAC,OAAO;AAC9D,MAAI;GACF,MAAM,SAAS,KAAK,SAAS,EAAE,UAAU,MAAM;AAC/C,aAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;GACtC,MAAM,SAAS,KAAK,QAAQ,KAAK;GACjC,MAAM,OAAO,QAAQ,SAAS,QAAQ,UAAU,QAAQ;AAExD,YAAS,sEADQ,QAAQ,aAAa,WAAW,WAAW,QAC4B,GAAG,KAAK,QAAQ,OAAO,iBAAiB,OAAO,IAAI;IACzI,OAAO;IACP,SAAS;IACV,CAAC;AACF,aAAU,QAAQ,uBAAuB;UACnC;AACN,aAAU,KAAK,4EAA4E;;;CAM/F,MAAM,UAAU,KAAK,SAAS,EAAE,WAAW,WAAW;AACtD,KAAI;AACF,MAAI,WAAW,QAAQ,EAAE;GACvB,MAAM,YAAY,KAAK,MAAM,aAAa,SAAS,QAAQ,CAAC;GAC5D,MAAM,KAAK,WAAW,YAAY;AAClC,OAAI,MAAM,MAAM,QAAQ,GAAG,KAAK,IAAI,CAAC,GAAG,KAAK,SAAS,aAAa,EAAE;AACnE,OAAG,KAAK,KAAK,aAAa;AAC1B,kBAAc,SAAS,KAAK,UAAU,WAAW,MAAM,EAAE,GAAG,KAAK;AACjE,YAAQ,IAAI,MAAM,MAAM,2EAA2E,CAAC;;;SAGlG;AASR,MAAK,MAAM,EAAE,YAAY,UAAU;AACjC,MAAI,CAAC,WAAW,OAAO,KAAK,CAAE;EAG9B,MAAM,YAAY,uBAAuB,OAAO,KAAK;AACrD,MAAI,UAAU,SAAS,SAAS,EAC9B,SAAQ,IAAI,MAAM,KAAK,oCAAoC,OAAO,KAAK,IAAI,UAAU,SAAS,KAAK,KAAK,GAAG,CAAC;AAE9G,MAAI,UAAU,QAAQ,SAAS,EAC7B,SAAQ,IAAI,MAAM,OAAO,qCAAqC,OAAO,KAAK,IAAI,UAAU,QAAQ,KAAK,KAAK,GAAG,CAAC;AAEhH,OAAK,MAAM,OAAO,UAAU,OAC1B,SAAQ,IAAI,MAAM,IAAI,sBAAsB,OAAO,KAAK,IAAI,MAAM,CAAC;EAIrE,MAAM,kBAAkB,iBAAiB,OAAO,KAAK;AACrD,OAAK,MAAM,KAAK,iBAAiB;AAC/B,OAAI,EAAE,QAAQ,SAAS,EACrB,SAAQ,IAAI,MAAM,KAAK,UAAU,EAAE,QAAQ,OAAO,eAAe,EAAE,KAAK,MAAM,OAAO,OAAO,CAAC;AAE/F,QAAK,MAAM,OAAO,EAAE,OAClB,SAAQ,IAAI,MAAM,IAAI,0BAA0B,EAAE,KAAK,OAAO,OAAO,KAAK,IAAI,MAAM,CAAC;;;AAM3F,KAAI,SAAS,SAAS,KAAK,WAAW,sBAAsB,EAAE;EAC5D,MAAM,kBAAkB,IAAI,iDAAiD,CAAC,OAAO;EACrF,IAAI,iBAAiB;EACrB,IAAI,kBAAkB;AAEtB,OAAK,MAAM,EAAE,YAAY,UAAU;AACjC,OAAI,CAAC,WAAW,OAAO,KAAK,CAAE;GAG9B,MAAM,UAAoB,EAAE;AAG5B,OAAI,WAAW,KAAK,OAAO,MAAM,OAAO,CAAC,IAAI,SAAS,KAAK,OAAO,MAAM,OAAO,CAAC,CAAC,aAAa,CAC5F,SAAQ,KAAK,KAAK,OAAO,MAAM,OAAO,CAAC;OAGvC,KAAI;IACF,MAAM,UAAU,YAAY,OAAO,KAAK;AACxC,SAAK,MAAM,SAAS,SAAS;KAE3B,MAAM,UAAU,KADE,KAAK,OAAO,MAAM,MAAM,EACV,OAAO;AACvC,SAAI,WAAW,QAAQ,IAAI,SAAS,QAAQ,CAAC,aAAa,CACxD,SAAQ,KAAK,QAAQ;;WAGnB;AAMV,QAAK,MAAM,UAAU,SAAS;IAC5B,MAAM,cAAc,KAAK,QAAQ,QAAQ;AACzC,QAAI,CAAC,WAAW,YAAY,CAC1B,WAAU,aAAa,EAAE,WAAW,MAAM,CAAC;AAG7C,QAAI;KACF,MAAM,QAAQ,YAAY,sBAAsB,CAAC,QAAO,MACtD,SAAS,KAAK,uBAAuB,EAAE,CAAC,CAAC,QAAQ,CAClD;AAED,UAAK,MAAM,QAAQ,OAAO;MACxB,MAAM,SAAS,KAAK,uBAAuB,KAAK;MAChD,MAAM,SAAS,KAAK,aAAa,KAAK;AAGtC,UAAI,WAAW,OAAO,EAAE;AACtB,WAAI;QACF,MAAM,EAAE,iBAAiB,MAAM,OAAO;AACtC,YAAI,aAAa,OAAO,KAAK,OAAQ;eAC/B;OAIR,MAAM,EAAE,eAAe,MAAM,OAAO;AACpC,WAAI;AAAE,mBAAW,QAAQ,GAAG,OAAO,SAAS;eAAU;;AAGxD,UAAI;AACF,mBAAY,QAAQ,OAAO;AAC3B;cACM;;AAEV;YACM;;;AAIZ,MAAI,iBAAiB,EACnB,iBAAgB,QAAQ,0BAA0B,gBAAgB,aAAa;MAE/E,iBAAgB,KAAK,+BAA+B"}
@@ -0,0 +1,2 @@
1
+ import { t as syncCommand } from "./sync-DMfgd389.js";
2
+ export { syncCommand };
@@ -1,5 +1,5 @@
1
1
  import { t as __esmMin } from "./chunk-ruWRV7i3.js";
2
- import { W as init_paths, b as PANOPTICON_HOME } from "./paths-lMaxrYtT.js";
2
+ import { G as init_paths, b as PANOPTICON_HOME } from "./paths-CDJ_HsbN.js";
3
3
  import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "fs";
4
4
  import { join } from "path";
5
5
  import { exec } from "child_process";
@@ -294,4 +294,4 @@ var init_tldr_daemon = __esmMin((() => {
294
294
  //#endregion
295
295
  export { init_tldr_daemon as i, getTldrDaemonService as n, getTldrMetrics as r, TldrDaemonService as t };
296
296
 
297
- //# sourceMappingURL=tldr-daemon-BCEFPItr.js.map
297
+ //# sourceMappingURL=tldr-daemon-CFx4LXAl.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"tldr-daemon-BCEFPItr.js","names":[],"sources":["../src/lib/tldr-daemon.ts"],"sourcesContent":["/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemon lifecycle for project root and workspaces.\n * Provides code analysis and summarization for token-efficient agent work.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { existsSync, writeFileSync, readFileSync, mkdirSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { PANOPTICON_HOME } from './paths.js';\n\n// ============================================================================\n// TLDR Session Metrics (PAN-236)\n// ============================================================================\n\n/**\n * Per-session TLDR metrics — delta since last captured cost event.\n *\n * Metrics are file-based, stored in <workspace>/.tldr/:\n * interceptions.log — written by tldr-read-enforcer on each TLDR serve\n * bypasses.log — written by tldr-read-enforcer on each deliberate bypass\n * metrics-checkpoint.json — tracks line offsets for delta (per-cost-event) reporting\n */\nexport interface TldrSessionMetrics {\n interceptions: number; // TLDR summaries served since last checkpoint\n bypasses: number; // TLDR bypasses since last checkpoint\n estimatedTokensSaved: number; // Rough token savings (fullTokens - ~1000 per interception)\n filesAnalyzed: string[]; // Unique files summarized in this window\n bypassReasons: Record<string, number>; // e.g. { \"offset-limit\": 3, \"recently-edited\": 1 }\n}\n\n/** Checkpoint persisted to .tldr/metrics-checkpoint.json */\ninterface TldrMetricsCheckpoint {\n interceptionsLine: number;\n bypassesLine: number;\n capturedAt: string;\n}\n\n/**\n * Read TLDR session metrics for a workspace from log files.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @param sinceCheckpoint - Only return metrics since the last captured checkpoint\n */\nexport function getTldrMetrics(workspacePath: string, sinceCheckpoint = false): TldrSessionMetrics {\n const tldrDir = join(workspacePath, '.tldr');\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n let interceptionsStartLine = 0;\n let bypassesStartLine = 0;\n\n if (sinceCheckpoint && existsSync(checkpointFile)) {\n try {\n const checkpoint = JSON.parse(readFileSync(checkpointFile, 'utf-8')) as TldrMetricsCheckpoint;\n interceptionsStartLine = checkpoint.interceptionsLine || 0;\n bypassesStartLine = checkpoint.bypassesLine || 0;\n } catch { /* start from 0 on parse error */ }\n }\n\n // Parse interceptions log: each line is \"timestamp file_size rel_path\"\n const allInterceptionLines = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newInterceptions = allInterceptionLines.slice(interceptionsStartLine);\n\n let estimatedTokensSaved = 0;\n const filesAnalyzed: string[] = [];\n\n for (const line of newInterceptions) {\n const parts = line.trim().split(' ');\n if (parts.length >= 3) {\n const fileSizeBytes = parseInt(parts[1], 10) || 0;\n const relPath = parts.slice(2).join(' ');\n // Rough estimate: ~1 token per 4 bytes for code; TLDR summary is ~1000 tokens\n const fullTokens = Math.round(fileSizeBytes / 4);\n estimatedTokensSaved += Math.max(0, fullTokens - 1000);\n if (relPath && !filesAnalyzed.includes(relPath)) {\n filesAnalyzed.push(relPath);\n }\n }\n }\n\n // Parse bypasses log: each line is \"timestamp reason [rel_path]\"\n const allBypassLines = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newBypasses = allBypassLines.slice(bypassesStartLine);\n const bypassReasons: Record<string, number> = {};\n\n for (const line of newBypasses) {\n const parts = line.trim().split(' ');\n if (parts.length >= 2) {\n const reason = parts[1];\n bypassReasons[reason] = (bypassReasons[reason] || 0) + 1;\n }\n }\n\n return {\n interceptions: newInterceptions.length,\n bypasses: newBypasses.length,\n estimatedTokensSaved,\n filesAnalyzed,\n bypassReasons,\n };\n}\n\n/**\n * Capture TLDR metrics since the last checkpoint and advance the checkpoint.\n *\n * Call this once per cost event batch to get the delta metrics for that batch,\n * then update the checkpoint so the next call starts from here.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @returns Metrics delta since last capture, or null if no .tldr/ directory exists\n */\nexport function captureTldrMetrics(workspacePath: string): TldrSessionMetrics | null {\n const tldrDir = join(workspacePath, '.tldr');\n if (!existsSync(tldrDir)) {\n return null;\n }\n\n const metrics = getTldrMetrics(workspacePath, true);\n\n // Advance checkpoint to current line counts\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n const interceptionsTotal = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n const bypassesTotal = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n\n const checkpoint: TldrMetricsCheckpoint = {\n interceptionsLine: interceptionsTotal,\n bypassesLine: bypassesTotal,\n capturedAt: new Date().toISOString(),\n };\n\n try {\n writeFileSync(checkpointFile, JSON.stringify(checkpoint, null, 2), 'utf-8');\n } catch { /* non-fatal — metrics still returned */ }\n\n return metrics;\n}\n\nconst execAsync = promisify(exec);\n\n/** Directory for TLDR daemon state files */\nconst TLDR_STATE_DIR = join(PANOPTICON_HOME, 'tldr');\n\n/** Ensure TLDR state directory exists */\nfunction ensureTldrStateDir(): void {\n if (!existsSync(TLDR_STATE_DIR)) {\n mkdirSync(TLDR_STATE_DIR, { recursive: true });\n }\n}\n\n/**\n * TLDR daemon state\n */\ninterface TldrDaemonState {\n running: boolean;\n pid?: number;\n startedAt?: string;\n workspacePath: string;\n venvPath: string;\n}\n\n/**\n * TLDR daemon status\n */\nexport interface TldrDaemonStatus {\n running: boolean;\n pid?: number;\n startedAt?: Date;\n workspacePath: string;\n venvPath: string;\n healthy: boolean;\n}\n\n/**\n * Hash workspace path to create a stable identifier\n */\nfunction hashWorkspacePath(path: string): string {\n return createHash('sha256').update(path).digest('hex').substring(0, 16);\n}\n\n/**\n * Get state file path for a workspace\n */\nfunction getStateFilePath(workspacePath: string): string {\n ensureTldrStateDir();\n const hash = hashWorkspacePath(workspacePath);\n const stateDir = join(TLDR_STATE_DIR, hash);\n if (!existsSync(stateDir)) {\n mkdirSync(stateDir, { recursive: true });\n }\n return join(stateDir, 'daemon.json');\n}\n\n/**\n * Write daemon state to file\n */\nfunction writeStateFile(workspacePath: string, venvPath: string, running: boolean, pid?: number): void {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (running) {\n const state: TldrDaemonState = {\n running: true,\n pid: pid || process.pid,\n startedAt: new Date().toISOString(),\n workspacePath,\n venvPath,\n };\n writeFileSync(stateFile, JSON.stringify(state, null, 2));\n } else {\n if (existsSync(stateFile)) {\n unlinkSync(stateFile);\n }\n }\n } catch (error) {\n console.warn('Failed to write TLDR daemon state file:', error);\n }\n}\n\n/**\n * Read daemon state from file\n */\nfunction readStateFile(workspacePath: string): TldrDaemonState | null {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (!existsSync(stateFile)) {\n return null;\n }\n\n const data = JSON.parse(readFileSync(stateFile, 'utf-8')) as TldrDaemonState;\n\n // Verify the process is still running\n if (data.pid) {\n try {\n process.kill(data.pid, 0); // Signal 0 checks if process exists\n return data;\n } catch {\n // Process doesn't exist - clean up stale state file\n unlinkSync(stateFile);\n return null;\n }\n }\n\n return data;\n } catch {\n // State file doesn't exist or is corrupted\n return null;\n }\n}\n\n/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemons for project root and workspaces.\n */\nexport class TldrDaemonService {\n private workspacePath: string;\n private venvPath: string;\n\n /**\n * Create a new TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace (project root or workspace directory)\n * @param venvPath - Path to the Python venv containing llm-tldr\n */\n constructor(workspacePath: string, venvPath: string) {\n this.workspacePath = workspacePath;\n this.venvPath = venvPath;\n }\n\n /**\n * Start the TLDR daemon\n *\n * @param background - Run daemon in background (default: true)\n */\n async start(background = true): Promise<void> {\n // Check if daemon is already running\n const currentState = readStateFile(this.workspacePath);\n if (currentState?.running) {\n console.warn(`TLDR daemon already running for ${this.workspacePath} (PID: ${currentState.pid})`);\n return;\n }\n\n // Verify venv and tldr binary exist\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}. Ensure llm-tldr is installed in the venv.`);\n }\n\n console.log(`Starting TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Start daemon with project path\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\" >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\"`;\n\n const { stdout, stderr } = await execAsync(cmd);\n\n if (stderr && !stderr.includes('started')) {\n console.warn(`TLDR daemon start warning: ${stderr}`);\n }\n\n // Give daemon a moment to start and write its PID file\n await new Promise(r => setTimeout(r, 500));\n\n // Try to get PID from tldr's status command\n let pid: number | undefined;\n try {\n const statusResult = await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`);\n const pidMatch = statusResult.stdout.match(/PID[:\\s]+(\\d+)/i);\n if (pidMatch) {\n pid = parseInt(pidMatch[1]);\n }\n } catch {\n // Status command failed - daemon might not expose PID\n }\n\n writeStateFile(this.workspacePath, this.venvPath, true, pid);\n console.log(`✓ TLDR daemon started for ${this.workspacePath}${pid ? ` (PID: ${pid})` : ''}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to start TLDR daemon: ${errorMessage}`);\n }\n }\n\n /**\n * Stop the TLDR daemon\n */\n async stop(): Promise<void> {\n const currentState = readStateFile(this.workspacePath);\n if (!currentState?.running) {\n console.warn(`TLDR daemon not running for ${this.workspacePath}`);\n return;\n }\n\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n console.warn(`tldr binary not found at ${tldrBin}, cleaning up state file`);\n writeStateFile(this.workspacePath, this.venvPath, false);\n return;\n }\n\n console.log(`Stopping TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Stop daemon\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon stop`);\n\n writeStateFile(this.workspacePath, this.venvPath, false);\n console.log(`✓ TLDR daemon stopped for ${this.workspacePath}`);\n } catch (error) {\n // If stop fails, try to kill the process directly\n if (currentState.pid) {\n try {\n process.kill(currentState.pid, 'SIGTERM');\n console.log(`✓ Forcefully stopped TLDR daemon (PID: ${currentState.pid})`);\n } catch (killError) {\n console.warn(`Failed to kill TLDR daemon process: ${killError}`);\n }\n }\n\n // Clean up state file regardless\n writeStateFile(this.workspacePath, this.venvPath, false);\n }\n }\n\n /**\n * Get daemon status\n */\n async getStatus(): Promise<TldrDaemonStatus> {\n const state = readStateFile(this.workspacePath);\n\n if (!state?.running) {\n return {\n running: false,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy: false,\n };\n }\n\n // Check health\n const healthy = await this.checkHealth();\n\n return {\n running: true,\n pid: state.pid,\n startedAt: state.startedAt ? new Date(state.startedAt) : undefined,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy,\n };\n }\n\n /**\n * Check if daemon is healthy (can respond to status queries)\n */\n async checkHealth(): Promise<boolean> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n return false;\n }\n\n try {\n // Try to get daemon status\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`, { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Restart the daemon\n */\n async restart(): Promise<void> {\n console.log(`Restarting TLDR daemon for ${this.workspacePath}...`);\n await this.stop();\n await new Promise(r => setTimeout(r, 1000)); // Wait for cleanup\n await this.start();\n }\n\n /**\n * Warm the index (trigger initial analysis)\n *\n * @param background - Run in background (default: true)\n */\n async warm(background = true): Promise<void> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}`);\n }\n\n console.log(`Warming TLDR index for ${this.workspacePath}...`);\n\n try {\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm . >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm .`;\n\n await execAsync(cmd);\n console.log(`✓ TLDR index warming initiated for ${this.workspacePath}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to warm TLDR index: ${errorMessage}`);\n }\n }\n\n /**\n * Check if daemon is running\n */\n isRunning(): boolean {\n const state = readStateFile(this.workspacePath);\n return state?.running ?? false;\n }\n\n /**\n * Get workspace path\n */\n getWorkspacePath(): string {\n return this.workspacePath;\n }\n\n /**\n * Get venv path\n */\n getVenvPath(): string {\n return this.venvPath;\n }\n}\n\n/**\n * Global registry of TLDR daemon services by workspace path\n */\nconst daemonRegistry = new Map<string, TldrDaemonService>();\n\n/**\n * Get or create a TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace\n * @param venvPath - Path to the Python venv\n */\nexport function getTldrDaemonService(workspacePath: string, venvPath: string): TldrDaemonService {\n const existing = daemonRegistry.get(workspacePath);\n if (existing) {\n return existing;\n }\n\n const service = new TldrDaemonService(workspacePath, venvPath);\n daemonRegistry.set(workspacePath, service);\n return service;\n}\n\n/**\n * Remove a daemon service from the registry\n *\n * @param workspacePath - Path to the workspace\n */\nexport function removeTldrDaemonService(workspacePath: string): void {\n daemonRegistry.delete(workspacePath);\n}\n\n/**\n * List all registered daemon services\n */\nexport function listTldrDaemonServices(): TldrDaemonService[] {\n return Array.from(daemonRegistry.values());\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AA+CA,SAAgB,eAAe,eAAuB,kBAAkB,OAA2B;CACjG,MAAM,UAAU,KAAK,eAAe,QAAQ;CAC5C,MAAM,mBAAmB,KAAK,SAAS,oBAAoB;CAC3D,MAAM,cAAc,KAAK,SAAS,eAAe;CACjD,MAAM,iBAAiB,KAAK,SAAS,0BAA0B;CAE/D,IAAI,yBAAyB;CAC7B,IAAI,oBAAoB;AAExB,KAAI,mBAAmB,WAAW,eAAe,CAC/C,KAAI;EACF,MAAM,aAAa,KAAK,MAAM,aAAa,gBAAgB,QAAQ,CAAC;AACpE,2BAAyB,WAAW,qBAAqB;AACzD,sBAAoB,WAAW,gBAAgB;SACzC;CAOV,MAAM,oBAHuB,WAAW,iBAAiB,GACrD,aAAa,kBAAkB,QAAQ,CAAC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,GACzE,EAAE,EACwC,MAAM,uBAAuB;CAE3E,IAAI,uBAAuB;CAC3B,MAAM,gBAA0B,EAAE;AAElC,MAAK,MAAM,QAAQ,kBAAkB;EACnC,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,IAAI;AACpC,MAAI,MAAM,UAAU,GAAG;GACrB,MAAM,gBAAgB,SAAS,MAAM,IAAI,GAAG,IAAI;GAChD,MAAM,UAAU,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;GAExC,MAAM,aAAa,KAAK,MAAM,gBAAgB,EAAE;AAChD,2BAAwB,KAAK,IAAI,GAAG,aAAa,IAAK;AACtD,OAAI,WAAW,CAAC,cAAc,SAAS,QAAQ,CAC7C,eAAc,KAAK,QAAQ;;;CASjC,MAAM,eAHiB,WAAW,YAAY,GAC1C,aAAa,aAAa,QAAQ,CAAC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,GACpE,EAAE,EAC6B,MAAM,kBAAkB;CAC3D,MAAM,gBAAwC,EAAE;AAEhD,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,IAAI;AACpC,MAAI,MAAM,UAAU,GAAG;GACrB,MAAM,SAAS,MAAM;AACrB,iBAAc,WAAW,cAAc,WAAW,KAAK;;;AAI3D,QAAO;EACL,eAAe,iBAAiB;EAChC,UAAU,YAAY;EACtB;EACA;EACA;EACD;;;AAmDH,SAAS,qBAA2B;AAClC,KAAI,CAAC,WAAW,eAAe,CAC7B,WAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;;;;;AA8BlD,SAAS,kBAAkB,MAAsB;AAC/C,QAAO,WAAW,SAAS,CAAC,OAAO,KAAK,CAAC,OAAO,MAAM,CAAC,UAAU,GAAG,GAAG;;;;;AAMzE,SAAS,iBAAiB,eAA+B;AACvD,qBAAoB;CAEpB,MAAM,WAAW,KAAK,gBADT,kBAAkB,cAAc,CACF;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAO,KAAK,UAAU,cAAc;;;;;AAMtC,SAAS,eAAe,eAAuB,UAAkB,SAAkB,KAAoB;AACrG,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,SAAS;GACX,MAAM,QAAyB;IAC7B,SAAS;IACT,KAAK,OAAO,QAAQ;IACpB,4BAAW,IAAI,MAAM,EAAC,aAAa;IACnC;IACA;IACD;AACD,iBAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC;aAEpD,WAAW,UAAU,CACvB,YAAW,UAAU;UAGlB,OAAO;AACd,UAAQ,KAAK,2CAA2C,MAAM;;;;;;AAOlE,SAAS,cAAc,eAA+C;AACpE,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO;EAGT,MAAM,OAAO,KAAK,MAAM,aAAa,WAAW,QAAQ,CAAC;AAGzD,MAAI,KAAK,IACP,KAAI;AACF,WAAQ,KAAK,KAAK,KAAK,EAAE;AACzB,UAAO;UACD;AAEN,cAAW,UAAU;AACrB,UAAO;;AAIX,SAAO;SACD;AAEN,SAAO;;;;;;;;;AA6OX,SAAgB,qBAAqB,eAAuB,UAAqC;CAC/F,MAAM,WAAW,eAAe,IAAI,cAAc;AAClD,KAAI,SACF,QAAO;CAGT,MAAM,UAAU,IAAI,kBAAkB,eAAe,SAAS;AAC9D,gBAAe,IAAI,eAAe,QAAQ;AAC1C,QAAO;;;;aA7eoC;AA6IvC,aAAY,UAAU,KAAK;AAG3B,kBAAiB,KAAK,iBAAiB,OAAO;AAiHvC,qBAAb,MAA+B;EAC7B;EACA;;;;;;;EAQA,YAAY,eAAuB,UAAkB;AACnD,QAAK,gBAAgB;AACrB,QAAK,WAAW;;;;;;;EAQlB,MAAM,MAAM,aAAa,MAAqB;GAE5C,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,cAAc,SAAS;AACzB,YAAQ,KAAK,mCAAmC,KAAK,cAAc,SAAS,aAAa,IAAI,GAAG;AAChG;;GAIF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,QAAQ,6CAA6C;AAGnG,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;IAMF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAJrB,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,uBACzF,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,GAE9C;AAE/C,QAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,KAAK,8BAA8B,SAAS;AAItD,UAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;IAG1C,IAAI;AACJ,QAAI;KAEF,MAAM,YADe,MAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,iBAAiB,EAClE,OAAO,MAAM,kBAAkB;AAC7D,SAAI,SACF,OAAM,SAAS,SAAS,GAAG;YAEvB;AAIR,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM,IAAI;AAC5D,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB,MAAM,UAAU,IAAI,KAAK,KAAK;YACrF,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,gCAAgC,eAAe;;;;;;EAOnE,MAAM,OAAsB;GAC1B,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,CAAC,cAAc,SAAS;AAC1B,YAAQ,KAAK,+BAA+B,KAAK,gBAAgB;AACjE;;GAGF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,EAAE;AACxB,YAAQ,KAAK,4BAA4B,QAAQ,0BAA0B;AAC3E,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD;;AAGF,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,eAAe;AAEzE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB;YACvD,OAAO;AAEd,QAAI,aAAa,IACf,KAAI;AACF,aAAQ,KAAK,aAAa,KAAK,UAAU;AACzC,aAAQ,IAAI,0CAA0C,aAAa,IAAI,GAAG;aACnE,WAAW;AAClB,aAAQ,KAAK,uCAAuC,YAAY;;AAKpE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;;;;;;EAO5D,MAAM,YAAuC;GAC3C,MAAM,QAAQ,cAAc,KAAK,cAAc;AAE/C,OAAI,CAAC,OAAO,QACV,QAAO;IACL,SAAS;IACT,eAAe,KAAK;IACpB,UAAU,KAAK;IACf,SAAS;IACV;GAIH,MAAM,UAAU,MAAM,KAAK,aAAa;AAExC,UAAO;IACL,SAAS;IACT,KAAK,MAAM;IACX,WAAW,MAAM,YAAY,IAAI,KAAK,MAAM,UAAU,GAAG,KAAA;IACzD,eAAe,KAAK;IACpB,UAAU,KAAK;IACf;IACD;;;;;EAMH,MAAM,cAAgC;GACpC,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;AAGT,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,kBAAkB,EAAE,SAAS,KAAM,CAAC;AAC9F,WAAO;WACD;AACN,WAAO;;;;;;EAOX,MAAM,UAAyB;AAC7B,WAAQ,IAAI,8BAA8B,KAAK,cAAc,KAAK;AAClE,SAAM,KAAK,MAAM;AACjB,SAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAK,CAAC;AAC3C,SAAM,KAAK,OAAO;;;;;;;EAQpB,MAAM,KAAK,aAAa,MAAqB;GAC3C,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,UAAU;AAGxD,WAAQ,IAAI,0BAA0B,KAAK,cAAc,KAAK;AAE9D,OAAI;AAKF,UAAM,UAJM,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,8BAC1C,OAAO,KAAK,cAAc,QAAQ,QAAQ,UAE1B;AACpB,YAAQ,IAAI,sCAAsC,KAAK,gBAAgB;YAChE,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,8BAA8B,eAAe;;;;;;EAOjE,YAAqB;AAEnB,UADc,cAAc,KAAK,cAAc,EACjC,WAAW;;;;;EAM3B,mBAA2B;AACzB,UAAO,KAAK;;;;;EAMd,cAAsB;AACpB,UAAO,KAAK;;;AAOV,kCAAiB,IAAI,KAAgC"}
1
+ {"version":3,"file":"tldr-daemon-CFx4LXAl.js","names":[],"sources":["../src/lib/tldr-daemon.ts"],"sourcesContent":["/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemon lifecycle for project root and workspaces.\n * Provides code analysis and summarization for token-efficient agent work.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { existsSync, writeFileSync, readFileSync, mkdirSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { PANOPTICON_HOME } from './paths.js';\n\n// ============================================================================\n// TLDR Session Metrics (PAN-236)\n// ============================================================================\n\n/**\n * Per-session TLDR metrics — delta since last captured cost event.\n *\n * Metrics are file-based, stored in <workspace>/.tldr/:\n * interceptions.log — written by tldr-read-enforcer on each TLDR serve\n * bypasses.log — written by tldr-read-enforcer on each deliberate bypass\n * metrics-checkpoint.json — tracks line offsets for delta (per-cost-event) reporting\n */\nexport interface TldrSessionMetrics {\n interceptions: number; // TLDR summaries served since last checkpoint\n bypasses: number; // TLDR bypasses since last checkpoint\n estimatedTokensSaved: number; // Rough token savings (fullTokens - ~1000 per interception)\n filesAnalyzed: string[]; // Unique files summarized in this window\n bypassReasons: Record<string, number>; // e.g. { \"offset-limit\": 3, \"recently-edited\": 1 }\n}\n\n/** Checkpoint persisted to .tldr/metrics-checkpoint.json */\ninterface TldrMetricsCheckpoint {\n interceptionsLine: number;\n bypassesLine: number;\n capturedAt: string;\n}\n\n/**\n * Read TLDR session metrics for a workspace from log files.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @param sinceCheckpoint - Only return metrics since the last captured checkpoint\n */\nexport function getTldrMetrics(workspacePath: string, sinceCheckpoint = false): TldrSessionMetrics {\n const tldrDir = join(workspacePath, '.tldr');\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n let interceptionsStartLine = 0;\n let bypassesStartLine = 0;\n\n if (sinceCheckpoint && existsSync(checkpointFile)) {\n try {\n const checkpoint = JSON.parse(readFileSync(checkpointFile, 'utf-8')) as TldrMetricsCheckpoint;\n interceptionsStartLine = checkpoint.interceptionsLine || 0;\n bypassesStartLine = checkpoint.bypassesLine || 0;\n } catch { /* start from 0 on parse error */ }\n }\n\n // Parse interceptions log: each line is \"timestamp file_size rel_path\"\n const allInterceptionLines = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newInterceptions = allInterceptionLines.slice(interceptionsStartLine);\n\n let estimatedTokensSaved = 0;\n const filesAnalyzed: string[] = [];\n\n for (const line of newInterceptions) {\n const parts = line.trim().split(' ');\n if (parts.length >= 3) {\n const fileSizeBytes = parseInt(parts[1], 10) || 0;\n const relPath = parts.slice(2).join(' ');\n // Rough estimate: ~1 token per 4 bytes for code; TLDR summary is ~1000 tokens\n const fullTokens = Math.round(fileSizeBytes / 4);\n estimatedTokensSaved += Math.max(0, fullTokens - 1000);\n if (relPath && !filesAnalyzed.includes(relPath)) {\n filesAnalyzed.push(relPath);\n }\n }\n }\n\n // Parse bypasses log: each line is \"timestamp reason [rel_path]\"\n const allBypassLines = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newBypasses = allBypassLines.slice(bypassesStartLine);\n const bypassReasons: Record<string, number> = {};\n\n for (const line of newBypasses) {\n const parts = line.trim().split(' ');\n if (parts.length >= 2) {\n const reason = parts[1];\n bypassReasons[reason] = (bypassReasons[reason] || 0) + 1;\n }\n }\n\n return {\n interceptions: newInterceptions.length,\n bypasses: newBypasses.length,\n estimatedTokensSaved,\n filesAnalyzed,\n bypassReasons,\n };\n}\n\n/**\n * Capture TLDR metrics since the last checkpoint and advance the checkpoint.\n *\n * Call this once per cost event batch to get the delta metrics for that batch,\n * then update the checkpoint so the next call starts from here.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @returns Metrics delta since last capture, or null if no .tldr/ directory exists\n */\nexport function captureTldrMetrics(workspacePath: string): TldrSessionMetrics | null {\n const tldrDir = join(workspacePath, '.tldr');\n if (!existsSync(tldrDir)) {\n return null;\n }\n\n const metrics = getTldrMetrics(workspacePath, true);\n\n // Advance checkpoint to current line counts\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n const interceptionsTotal = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n const bypassesTotal = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n\n const checkpoint: TldrMetricsCheckpoint = {\n interceptionsLine: interceptionsTotal,\n bypassesLine: bypassesTotal,\n capturedAt: new Date().toISOString(),\n };\n\n try {\n writeFileSync(checkpointFile, JSON.stringify(checkpoint, null, 2), 'utf-8');\n } catch { /* non-fatal — metrics still returned */ }\n\n return metrics;\n}\n\nconst execAsync = promisify(exec);\n\n/** Directory for TLDR daemon state files */\nconst TLDR_STATE_DIR = join(PANOPTICON_HOME, 'tldr');\n\n/** Ensure TLDR state directory exists */\nfunction ensureTldrStateDir(): void {\n if (!existsSync(TLDR_STATE_DIR)) {\n mkdirSync(TLDR_STATE_DIR, { recursive: true });\n }\n}\n\n/**\n * TLDR daemon state\n */\ninterface TldrDaemonState {\n running: boolean;\n pid?: number;\n startedAt?: string;\n workspacePath: string;\n venvPath: string;\n}\n\n/**\n * TLDR daemon status\n */\nexport interface TldrDaemonStatus {\n running: boolean;\n pid?: number;\n startedAt?: Date;\n workspacePath: string;\n venvPath: string;\n healthy: boolean;\n}\n\n/**\n * Hash workspace path to create a stable identifier\n */\nfunction hashWorkspacePath(path: string): string {\n return createHash('sha256').update(path).digest('hex').substring(0, 16);\n}\n\n/**\n * Get state file path for a workspace\n */\nfunction getStateFilePath(workspacePath: string): string {\n ensureTldrStateDir();\n const hash = hashWorkspacePath(workspacePath);\n const stateDir = join(TLDR_STATE_DIR, hash);\n if (!existsSync(stateDir)) {\n mkdirSync(stateDir, { recursive: true });\n }\n return join(stateDir, 'daemon.json');\n}\n\n/**\n * Write daemon state to file\n */\nfunction writeStateFile(workspacePath: string, venvPath: string, running: boolean, pid?: number): void {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (running) {\n const state: TldrDaemonState = {\n running: true,\n pid: pid || process.pid,\n startedAt: new Date().toISOString(),\n workspacePath,\n venvPath,\n };\n writeFileSync(stateFile, JSON.stringify(state, null, 2));\n } else {\n if (existsSync(stateFile)) {\n unlinkSync(stateFile);\n }\n }\n } catch (error) {\n console.warn('Failed to write TLDR daemon state file:', error);\n }\n}\n\n/**\n * Read daemon state from file\n */\nfunction readStateFile(workspacePath: string): TldrDaemonState | null {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (!existsSync(stateFile)) {\n return null;\n }\n\n const data = JSON.parse(readFileSync(stateFile, 'utf-8')) as TldrDaemonState;\n\n // Verify the process is still running\n if (data.pid) {\n try {\n process.kill(data.pid, 0); // Signal 0 checks if process exists\n return data;\n } catch {\n // Process doesn't exist - clean up stale state file\n unlinkSync(stateFile);\n return null;\n }\n }\n\n return data;\n } catch {\n // State file doesn't exist or is corrupted\n return null;\n }\n}\n\n/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemons for project root and workspaces.\n */\nexport class TldrDaemonService {\n private workspacePath: string;\n private venvPath: string;\n\n /**\n * Create a new TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace (project root or workspace directory)\n * @param venvPath - Path to the Python venv containing llm-tldr\n */\n constructor(workspacePath: string, venvPath: string) {\n this.workspacePath = workspacePath;\n this.venvPath = venvPath;\n }\n\n /**\n * Start the TLDR daemon\n *\n * @param background - Run daemon in background (default: true)\n */\n async start(background = true): Promise<void> {\n // Check if daemon is already running\n const currentState = readStateFile(this.workspacePath);\n if (currentState?.running) {\n console.warn(`TLDR daemon already running for ${this.workspacePath} (PID: ${currentState.pid})`);\n return;\n }\n\n // Verify venv and tldr binary exist\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}. Ensure llm-tldr is installed in the venv.`);\n }\n\n console.log(`Starting TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Start daemon with project path\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\" >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\"`;\n\n const { stdout, stderr } = await execAsync(cmd);\n\n if (stderr && !stderr.includes('started')) {\n console.warn(`TLDR daemon start warning: ${stderr}`);\n }\n\n // Give daemon a moment to start and write its PID file\n await new Promise(r => setTimeout(r, 500));\n\n // Try to get PID from tldr's status command\n let pid: number | undefined;\n try {\n const statusResult = await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`);\n const pidMatch = statusResult.stdout.match(/PID[:\\s]+(\\d+)/i);\n if (pidMatch) {\n pid = parseInt(pidMatch[1]);\n }\n } catch {\n // Status command failed - daemon might not expose PID\n }\n\n writeStateFile(this.workspacePath, this.venvPath, true, pid);\n console.log(`✓ TLDR daemon started for ${this.workspacePath}${pid ? ` (PID: ${pid})` : ''}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to start TLDR daemon: ${errorMessage}`);\n }\n }\n\n /**\n * Stop the TLDR daemon\n */\n async stop(): Promise<void> {\n const currentState = readStateFile(this.workspacePath);\n if (!currentState?.running) {\n console.warn(`TLDR daemon not running for ${this.workspacePath}`);\n return;\n }\n\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n console.warn(`tldr binary not found at ${tldrBin}, cleaning up state file`);\n writeStateFile(this.workspacePath, this.venvPath, false);\n return;\n }\n\n console.log(`Stopping TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Stop daemon\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon stop`);\n\n writeStateFile(this.workspacePath, this.venvPath, false);\n console.log(`✓ TLDR daemon stopped for ${this.workspacePath}`);\n } catch (error) {\n // If stop fails, try to kill the process directly\n if (currentState.pid) {\n try {\n process.kill(currentState.pid, 'SIGTERM');\n console.log(`✓ Forcefully stopped TLDR daemon (PID: ${currentState.pid})`);\n } catch (killError) {\n console.warn(`Failed to kill TLDR daemon process: ${killError}`);\n }\n }\n\n // Clean up state file regardless\n writeStateFile(this.workspacePath, this.venvPath, false);\n }\n }\n\n /**\n * Get daemon status\n */\n async getStatus(): Promise<TldrDaemonStatus> {\n const state = readStateFile(this.workspacePath);\n\n if (!state?.running) {\n return {\n running: false,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy: false,\n };\n }\n\n // Check health\n const healthy = await this.checkHealth();\n\n return {\n running: true,\n pid: state.pid,\n startedAt: state.startedAt ? new Date(state.startedAt) : undefined,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy,\n };\n }\n\n /**\n * Check if daemon is healthy (can respond to status queries)\n */\n async checkHealth(): Promise<boolean> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n return false;\n }\n\n try {\n // Try to get daemon status\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`, { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Restart the daemon\n */\n async restart(): Promise<void> {\n console.log(`Restarting TLDR daemon for ${this.workspacePath}...`);\n await this.stop();\n await new Promise(r => setTimeout(r, 1000)); // Wait for cleanup\n await this.start();\n }\n\n /**\n * Warm the index (trigger initial analysis)\n *\n * @param background - Run in background (default: true)\n */\n async warm(background = true): Promise<void> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}`);\n }\n\n console.log(`Warming TLDR index for ${this.workspacePath}...`);\n\n try {\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm . >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm .`;\n\n await execAsync(cmd);\n console.log(`✓ TLDR index warming initiated for ${this.workspacePath}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to warm TLDR index: ${errorMessage}`);\n }\n }\n\n /**\n * Check if daemon is running\n */\n isRunning(): boolean {\n const state = readStateFile(this.workspacePath);\n return state?.running ?? false;\n }\n\n /**\n * Get workspace path\n */\n getWorkspacePath(): string {\n return this.workspacePath;\n }\n\n /**\n * Get venv path\n */\n getVenvPath(): string {\n return this.venvPath;\n }\n}\n\n/**\n * Global registry of TLDR daemon services by workspace path\n */\nconst daemonRegistry = new Map<string, TldrDaemonService>();\n\n/**\n * Get or create a TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace\n * @param venvPath - Path to the Python venv\n */\nexport function getTldrDaemonService(workspacePath: string, venvPath: string): TldrDaemonService {\n const existing = daemonRegistry.get(workspacePath);\n if (existing) {\n return existing;\n }\n\n const service = new TldrDaemonService(workspacePath, venvPath);\n daemonRegistry.set(workspacePath, service);\n return service;\n}\n\n/**\n * Remove a daemon service from the registry\n *\n * @param workspacePath - Path to the workspace\n */\nexport function removeTldrDaemonService(workspacePath: string): void {\n daemonRegistry.delete(workspacePath);\n}\n\n/**\n * List all registered daemon services\n */\nexport function listTldrDaemonServices(): TldrDaemonService[] {\n return Array.from(daemonRegistry.values());\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AA+CA,SAAgB,eAAe,eAAuB,kBAAkB,OAA2B;CACjG,MAAM,UAAU,KAAK,eAAe,QAAQ;CAC5C,MAAM,mBAAmB,KAAK,SAAS,oBAAoB;CAC3D,MAAM,cAAc,KAAK,SAAS,eAAe;CACjD,MAAM,iBAAiB,KAAK,SAAS,0BAA0B;CAE/D,IAAI,yBAAyB;CAC7B,IAAI,oBAAoB;AAExB,KAAI,mBAAmB,WAAW,eAAe,CAC/C,KAAI;EACF,MAAM,aAAa,KAAK,MAAM,aAAa,gBAAgB,QAAQ,CAAC;AACpE,2BAAyB,WAAW,qBAAqB;AACzD,sBAAoB,WAAW,gBAAgB;SACzC;CAOV,MAAM,oBAHuB,WAAW,iBAAiB,GACrD,aAAa,kBAAkB,QAAQ,CAAC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,GACzE,EAAE,EACwC,MAAM,uBAAuB;CAE3E,IAAI,uBAAuB;CAC3B,MAAM,gBAA0B,EAAE;AAElC,MAAK,MAAM,QAAQ,kBAAkB;EACnC,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,IAAI;AACpC,MAAI,MAAM,UAAU,GAAG;GACrB,MAAM,gBAAgB,SAAS,MAAM,IAAI,GAAG,IAAI;GAChD,MAAM,UAAU,MAAM,MAAM,EAAE,CAAC,KAAK,IAAI;GAExC,MAAM,aAAa,KAAK,MAAM,gBAAgB,EAAE;AAChD,2BAAwB,KAAK,IAAI,GAAG,aAAa,IAAK;AACtD,OAAI,WAAW,CAAC,cAAc,SAAS,QAAQ,CAC7C,eAAc,KAAK,QAAQ;;;CASjC,MAAM,eAHiB,WAAW,YAAY,GAC1C,aAAa,aAAa,QAAQ,CAAC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,GACpE,EAAE,EAC6B,MAAM,kBAAkB;CAC3D,MAAM,gBAAwC,EAAE;AAEhD,MAAK,MAAM,QAAQ,aAAa;EAC9B,MAAM,QAAQ,KAAK,MAAM,CAAC,MAAM,IAAI;AACpC,MAAI,MAAM,UAAU,GAAG;GACrB,MAAM,SAAS,MAAM;AACrB,iBAAc,WAAW,cAAc,WAAW,KAAK;;;AAI3D,QAAO;EACL,eAAe,iBAAiB;EAChC,UAAU,YAAY;EACtB;EACA;EACA;EACD;;;AAmDH,SAAS,qBAA2B;AAClC,KAAI,CAAC,WAAW,eAAe,CAC7B,WAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;;;;;AA8BlD,SAAS,kBAAkB,MAAsB;AAC/C,QAAO,WAAW,SAAS,CAAC,OAAO,KAAK,CAAC,OAAO,MAAM,CAAC,UAAU,GAAG,GAAG;;;;;AAMzE,SAAS,iBAAiB,eAA+B;AACvD,qBAAoB;CAEpB,MAAM,WAAW,KAAK,gBADT,kBAAkB,cAAc,CACF;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAO,KAAK,UAAU,cAAc;;;;;AAMtC,SAAS,eAAe,eAAuB,UAAkB,SAAkB,KAAoB;AACrG,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,SAAS;GACX,MAAM,QAAyB;IAC7B,SAAS;IACT,KAAK,OAAO,QAAQ;IACpB,4BAAW,IAAI,MAAM,EAAC,aAAa;IACnC;IACA;IACD;AACD,iBAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC;aAEpD,WAAW,UAAU,CACvB,YAAW,UAAU;UAGlB,OAAO;AACd,UAAQ,KAAK,2CAA2C,MAAM;;;;;;AAOlE,SAAS,cAAc,eAA+C;AACpE,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO;EAGT,MAAM,OAAO,KAAK,MAAM,aAAa,WAAW,QAAQ,CAAC;AAGzD,MAAI,KAAK,IACP,KAAI;AACF,WAAQ,KAAK,KAAK,KAAK,EAAE;AACzB,UAAO;UACD;AAEN,cAAW,UAAU;AACrB,UAAO;;AAIX,SAAO;SACD;AAEN,SAAO;;;;;;;;;AA6OX,SAAgB,qBAAqB,eAAuB,UAAqC;CAC/F,MAAM,WAAW,eAAe,IAAI,cAAc;AAClD,KAAI,SACF,QAAO;CAGT,MAAM,UAAU,IAAI,kBAAkB,eAAe,SAAS;AAC9D,gBAAe,IAAI,eAAe,QAAQ;AAC1C,QAAO;;;;aA7eoC;AA6IvC,aAAY,UAAU,KAAK;AAG3B,kBAAiB,KAAK,iBAAiB,OAAO;AAiHvC,qBAAb,MAA+B;EAC7B;EACA;;;;;;;EAQA,YAAY,eAAuB,UAAkB;AACnD,QAAK,gBAAgB;AACrB,QAAK,WAAW;;;;;;;EAQlB,MAAM,MAAM,aAAa,MAAqB;GAE5C,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,cAAc,SAAS;AACzB,YAAQ,KAAK,mCAAmC,KAAK,cAAc,SAAS,aAAa,IAAI,GAAG;AAChG;;GAIF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,QAAQ,6CAA6C;AAGnG,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;IAMF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAJrB,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,uBACzF,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,GAE9C;AAE/C,QAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,KAAK,8BAA8B,SAAS;AAItD,UAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;IAG1C,IAAI;AACJ,QAAI;KAEF,MAAM,YADe,MAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,iBAAiB,EAClE,OAAO,MAAM,kBAAkB;AAC7D,SAAI,SACF,OAAM,SAAS,SAAS,GAAG;YAEvB;AAIR,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM,IAAI;AAC5D,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB,MAAM,UAAU,IAAI,KAAK,KAAK;YACrF,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,gCAAgC,eAAe;;;;;;EAOnE,MAAM,OAAsB;GAC1B,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,CAAC,cAAc,SAAS;AAC1B,YAAQ,KAAK,+BAA+B,KAAK,gBAAgB;AACjE;;GAGF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,EAAE;AACxB,YAAQ,KAAK,4BAA4B,QAAQ,0BAA0B;AAC3E,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD;;AAGF,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,eAAe;AAEzE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB;YACvD,OAAO;AAEd,QAAI,aAAa,IACf,KAAI;AACF,aAAQ,KAAK,aAAa,KAAK,UAAU;AACzC,aAAQ,IAAI,0CAA0C,aAAa,IAAI,GAAG;aACnE,WAAW;AAClB,aAAQ,KAAK,uCAAuC,YAAY;;AAKpE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;;;;;;EAO5D,MAAM,YAAuC;GAC3C,MAAM,QAAQ,cAAc,KAAK,cAAc;AAE/C,OAAI,CAAC,OAAO,QACV,QAAO;IACL,SAAS;IACT,eAAe,KAAK;IACpB,UAAU,KAAK;IACf,SAAS;IACV;GAIH,MAAM,UAAU,MAAM,KAAK,aAAa;AAExC,UAAO;IACL,SAAS;IACT,KAAK,MAAM;IACX,WAAW,MAAM,YAAY,IAAI,KAAK,MAAM,UAAU,GAAG,KAAA;IACzD,eAAe,KAAK;IACpB,UAAU,KAAK;IACf;IACD;;;;;EAMH,MAAM,cAAgC;GACpC,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;AAGT,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,kBAAkB,EAAE,SAAS,KAAM,CAAC;AAC9F,WAAO;WACD;AACN,WAAO;;;;;;EAOX,MAAM,UAAyB;AAC7B,WAAQ,IAAI,8BAA8B,KAAK,cAAc,KAAK;AAClE,SAAM,KAAK,MAAM;AACjB,SAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAK,CAAC;AAC3C,SAAM,KAAK,OAAO;;;;;;;EAQpB,MAAM,KAAK,aAAa,MAAqB;GAC3C,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,UAAU;AAGxD,WAAQ,IAAI,0BAA0B,KAAK,cAAc,KAAK;AAE9D,OAAI;AAKF,UAAM,UAJM,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,8BAC1C,OAAO,KAAK,cAAc,QAAQ,QAAQ,UAE1B;AACpB,YAAQ,IAAI,sCAAsC,KAAK,gBAAgB;YAChE,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,8BAA8B,eAAe;;;;;;EAOjE,YAAqB;AAEnB,UADc,cAAc,KAAK,cAAc,EACjC,WAAW;;;;;EAM3B,mBAA2B;AACzB,UAAO,KAAK;;;;;EAMd,cAAsB;AACpB,UAAO,KAAK;;;AAOV,kCAAiB,IAAI,KAAgC"}
@@ -1,3 +1,3 @@
1
- import { i as init_tldr_daemon, n as getTldrDaemonService } from "./tldr-daemon-BCEFPItr.js";
1
+ import { i as init_tldr_daemon, n as getTldrDaemonService } from "./tldr-daemon-CFx4LXAl.js";
2
2
  init_tldr_daemon();
3
3
  export { getTldrDaemonService };
@@ -1,3 +1,3 @@
1
- import { d as sessionExists, o as init_tmux, s as killSession } from "./tmux-CKdNxxJx.js";
1
+ import { d as sessionExists, o as init_tmux, s as killSession } from "./tmux-D6Ah4I8z.js";
2
2
  init_tmux();
3
3
  export { killSession, sessionExists };
@@ -1,5 +1,5 @@
1
1
  import { t as __esmMin } from "./chunk-ruWRV7i3.js";
2
- import { W as init_paths, b as PANOPTICON_HOME } from "./paths-lMaxrYtT.js";
2
+ import { G as init_paths, b as PANOPTICON_HOME } from "./paths-CDJ_HsbN.js";
3
3
  import { appendFileSync, chmodSync, existsSync, mkdirSync, unlinkSync, writeFileSync } from "fs";
4
4
  import { join } from "path";
5
5
  import { exec, execSync } from "child_process";
@@ -186,4 +186,4 @@ var init_tmux = __esmMin((() => {
186
186
  //#endregion
187
187
  export { getAgentSessions as a, listSessions as c, sessionExists as d, waitForClaudePrompt as f, createSession as i, sendKeys as l, capturePaneAsync as n, init_tmux as o, confirmDelivery as r, killSession as s, capturePane as t, sendKeysAsync as u };
188
188
 
189
- //# sourceMappingURL=tmux-CKdNxxJx.js.map
189
+ //# sourceMappingURL=tmux-D6Ah4I8z.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"tmux-CKdNxxJx.js","names":[],"sources":["../src/lib/tmux.ts"],"sourcesContent":["import { execSync, exec } from 'child_process';\nimport { promisify } from 'util';\nimport { writeFileSync, chmodSync, appendFileSync, mkdirSync, existsSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from './paths.js';\n\n/**\n * Log file for tmux sendKeys operations\n * This helps debug mysterious messages appearing in agent prompts\n */\nconst SENDKEYS_LOG_FILE = join(PANOPTICON_HOME, 'logs', 'sendkeys.jsonl');\n\n/**\n * Ensure log directory exists\n */\nfunction ensureLogDir(): void {\n const logDir = join(PANOPTICON_HOME, 'logs');\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n}\n\n/**\n * Log a sendKeys operation for debugging\n */\nfunction logSendKeys(sessionName: string, keys: string, caller?: string): void {\n try {\n ensureLogDir();\n\n // Get call stack to identify caller if not provided\n const stack = new Error().stack || '';\n const stackLines = stack.split('\\n').slice(3, 6); // Skip Error, logSendKeys, sendKeys\n const callerInfo = caller || stackLines.map(l => l.trim()).join(' <- ');\n\n const entry = {\n timestamp: new Date().toISOString(),\n sessionName,\n keysLength: keys.length,\n keysPreview: keys.length > 200 ? keys.slice(0, 200) + '...' : keys,\n caller: callerInfo,\n pid: process.pid,\n };\n\n appendFileSync(SENDKEYS_LOG_FILE, JSON.stringify(entry) + '\\n', 'utf-8');\n } catch {\n // Silently fail - logging should never break functionality\n }\n}\n\nexport interface TmuxSession {\n name: string;\n created: Date;\n attached: boolean;\n windows: number;\n}\n\nexport function listSessions(): TmuxSession[] {\n try {\n const output = execSync('tmux list-sessions -F \"#{session_name}|#{session_created}|#{session_attached}|#{session_windows}\"', {\n encoding: 'utf8',\n });\n\n return output.trim().split('\\n').filter(Boolean).map(line => {\n const [name, created, attached, windows] = line.split('|');\n return {\n name,\n created: new Date(parseInt(created) * 1000),\n attached: attached === '1',\n windows: parseInt(windows),\n };\n });\n } catch {\n return []; // No sessions\n }\n}\n\nexport function sessionExists(name: string): boolean {\n try {\n execSync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n\nexport function createSession(\n name: string,\n cwd: string,\n initialCommand?: string,\n options?: { env?: Record<string, string> }\n): void {\n const escapedCwd = cwd.replace(/\"/g, '\\\\\"');\n\n // Build environment variable flags for tmux\n let envFlags = '';\n if (options?.env) {\n for (const [key, value] of Object.entries(options.env)) {\n envFlags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n }\n\n // For complex commands (with special chars), start session first then send command\n if (initialCommand && (initialCommand.includes('`') || initialCommand.includes('\\n') || initialCommand.length > 500)) {\n // Create session without command\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n\n // Small delay to let session initialize\n execSync('sleep 0.5');\n\n // Send the command in chunks if needed (tmux has buffer limits)\n // First, write to a temp file and source it\n const tmpFile = `/tmp/pan-cmd-${name}.sh`;\n writeFileSync(tmpFile, initialCommand);\n chmodSync(tmpFile, '755');\n\n // Execute the script\n execSync(`tmux send-keys -t ${name} \"bash ${tmpFile}\"`);\n execSync(`tmux send-keys -t ${name} C-m`);\n } else if (initialCommand) {\n // Simple command - use inline\n const cmd = `tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags} \"${initialCommand.replace(/\"/g, '\\\\\"')}\"`;\n execSync(cmd);\n } else {\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n }\n}\n\nexport function killSession(name: string): void {\n execSync(`tmux kill-session -t ${name}`);\n}\n\nconst execAsync = promisify(exec);\n\nexport async function sessionExistsAsync(name: string): Promise<boolean> {\n try {\n await execAsync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send keys to a tmux session (async, non-blocking).\n * Uses load-buffer + paste-buffer for reliable delivery, with a delay before Enter.\n * MUST be used from the dashboard server and any async context.\n */\nexport async function sendKeysAsync(sessionName: string, keys: string, caller?: string): Promise<void> {\n logSendKeys(sessionName, keys, caller);\n\n // Use a unique named buffer per call to prevent race conditions.\n // The default (unnamed) paste buffer is global — concurrent load-buffer\n // calls from different specialist wakes clobber each other.\n const bufferName = `pan-${process.pid}-${Date.now()}`;\n const tmpFile = `/tmp/pan-sendkeys-${bufferName}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n await execAsync(`tmux load-buffer -b ${bufferName} ${tmpFile}`);\n await execAsync(`tmux paste-buffer -b ${bufferName} -t ${sessionName} -d`);\n await new Promise(r => setTimeout(r, 300));\n await execAsync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n try { await execAsync(`tmux delete-buffer -b ${bufferName} 2>/dev/null`); } catch {}\n }\n}\n\n/**\n * Send keys to a tmux session (sync, blocks event loop).\n * Only use from CLI commands — NEVER from the dashboard server.\n */\nexport function sendKeys(sessionName: string, keys: string, caller?: string): void {\n logSendKeys(sessionName, keys, caller);\n\n const tmpFile = `/tmp/pan-sendkeys-${process.pid}-${Date.now()}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n execSync(`tmux load-buffer ${tmpFile}`);\n execSync(`tmux paste-buffer -t ${sessionName}`);\n execSync(`sleep 0.3`);\n execSync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n }\n}\n\nexport function capturePane(sessionName: string, lines: number = 50): string {\n try {\n return execSync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf8',\n });\n } catch {\n return '';\n }\n}\n\n/**\n * Capture tmux pane output (async, non-blocking).\n * MUST be used from the dashboard server and any async context.\n */\nexport async function capturePaneAsync(sessionName: string, lines: number = 50): Promise<string> {\n try {\n const { stdout } = await execAsync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf-8',\n });\n return stdout;\n } catch {\n return '';\n }\n}\n\n/**\n * Wait for Claude Code to reach its interactive prompt (❯) in a tmux session.\n * Polls tmux output until the prompt appears or timeout is reached.\n *\n * @param sessionName - tmux session name\n * @param timeoutMs - maximum time to wait (default: 15s for fresh start, use 5s for already-running)\n * @returns true if prompt detected, false if timed out\n */\nexport async function waitForClaudePrompt(sessionName: string, timeoutMs: number = 15000): Promise<boolean> {\n const start = Date.now();\n const POLL = 500;\n while (Date.now() - start < timeoutMs) {\n const output = await capturePaneAsync(sessionName, 10);\n // Claude Code shows ❯ when ready for user input.\n // Check that the LAST non-empty line contains ❯ (not a stale prompt from earlier output).\n const lines = output.split('\\n').filter(l => l.trim());\n const lastLine = lines[lines.length - 1] || '';\n if (lastLine.includes('❯')) return true;\n await new Promise(r => setTimeout(r, POLL));\n }\n return false;\n}\n\n/**\n * Verify that a message sent to Claude was actually received and processing started.\n * Compares tmux output before and after to detect new activity (tool calls, responses).\n *\n * @param sessionName - tmux session name\n * @param outputBefore - tmux output snapshot taken BEFORE sending the message\n * @param timeoutMs - maximum time to wait for activity (default: 10s)\n * @returns true if new activity detected, false if timed out\n */\nexport async function confirmDelivery(\n sessionName: string,\n outputBefore: string,\n timeoutMs: number = 10000,\n): Promise<boolean> {\n const start = Date.now();\n const POLL = 1000;\n const beforeLineCount = outputBefore.split('\\n').filter(l => l.trim()).length;\n\n while (Date.now() - start < timeoutMs) {\n await new Promise(r => setTimeout(r, POLL));\n const after = await capturePaneAsync(sessionName, 50);\n const afterLines = after.split('\\n').filter(l => l.trim());\n const afterLineCount = afterLines.length;\n\n // Claude is processing if: new output lines appeared (tool calls: ●, results: ⎿, etc.)\n if (afterLineCount > beforeLineCount + 1) return true;\n\n // Or if we can see activity markers in the new output\n const newOutput = afterLines.slice(beforeLineCount).join('\\n');\n if (\n newOutput.includes('●') || newOutput.includes('⎿') || newOutput.includes('Read') ||\n newOutput.includes('✻') || newOutput.includes('·') || newOutput.includes('✶') ||\n newOutput.includes('✽') || newOutput.includes('✢') || newOutput.includes('Generating') ||\n newOutput.includes('thinking') || newOutput.includes('thought for')\n ) return true;\n }\n return false;\n}\n\nexport function getAgentSessions(): TmuxSession[] {\n return listSessions().filter(s => s.name.startsWith('agent-'));\n}\n"],"mappings":";;;;;;;;;;AAeA,SAAS,eAAqB;CAC5B,MAAM,SAAS,KAAK,iBAAiB,OAAO;AAC5C,KAAI,CAAC,WAAW,OAAO,CACrB,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;;;;;AAO1C,SAAS,YAAY,aAAqB,MAAc,QAAuB;AAC7E,KAAI;AACF,gBAAc;EAId,MAAM,+BADQ,IAAI,OAAO,EAAC,SAAS,IACV,MAAM,KAAK,CAAC,MAAM,GAAG,EAAE;EAChD,MAAM,aAAa,UAAU,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC,CAAC,KAAK,OAAO;EAEvE,MAAM,QAAQ;GACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC;GACA,YAAY,KAAK;GACjB,aAAa,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,IAAI,GAAG,QAAQ;GAC9D,QAAQ;GACR,KAAK,QAAQ;GACd;AAED,iBAAe,mBAAmB,KAAK,UAAU,MAAM,GAAG,MAAM,QAAQ;SAClE;;AAYV,SAAgB,eAA8B;AAC5C,KAAI;AAKF,SAJe,SAAS,uGAAqG,EAC3H,UAAU,QACX,CAAC,CAEY,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ;GAC3D,MAAM,CAAC,MAAM,SAAS,UAAU,WAAW,KAAK,MAAM,IAAI;AAC1D,UAAO;IACL;IACA,yBAAS,IAAI,KAAK,SAAS,QAAQ,GAAG,IAAK;IAC3C,UAAU,aAAa;IACvB,SAAS,SAAS,QAAQ;IAC3B;IACD;SACI;AACN,SAAO,EAAE;;;AAIb,SAAgB,cAAc,MAAuB;AACnD,KAAI;AACF,WAAS,uBAAuB,KAAK,cAAc;AACnD,SAAO;SACD;AACN,SAAO;;;AAKX,SAAgB,cACd,MACA,KACA,gBACA,SACM;CACN,MAAM,aAAa,IAAI,QAAQ,MAAM,OAAM;CAG3C,IAAI,WAAW;AACf,KAAI,SAAS,IACX,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,IAAI,CACpD,aAAY,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAK1D,KAAI,mBAAmB,eAAe,SAAS,IAAI,IAAI,eAAe,SAAS,KAAK,IAAI,eAAe,SAAS,MAAM;AAEpH,WAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;AAGxE,WAAS,YAAY;EAIrB,MAAM,UAAU,gBAAgB,KAAK;AACrC,gBAAc,SAAS,eAAe;AACtC,YAAU,SAAS,MAAM;AAGzB,WAAS,qBAAqB,KAAK,SAAS,QAAQ,GAAG;AACvD,WAAS,qBAAqB,KAAK,MAAM;YAChC,eAGT,UADY,0BAA0B,KAAK,OAAO,WAAW,GAAG,SAAS,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,GACpG;KAEb,UAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;;AAI5E,SAAgB,YAAY,MAAoB;AAC9C,UAAS,wBAAwB,OAAO;;;;;;;AAmB1C,eAAsB,cAAc,aAAqB,MAAc,QAAgC;AACrG,aAAY,aAAa,MAAM,OAAO;CAKtC,MAAM,aAAa,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK;CACnD,MAAM,UAAU,qBAAqB,WAAW;AAChD,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,QAAM,UAAU,uBAAuB,WAAW,GAAG,UAAU;AAC/D,QAAM,UAAU,wBAAwB,WAAW,MAAM,YAAY,KAAK;AAC1E,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;AAC1C,QAAM,UAAU,qBAAqB,YAAY,MAAM;WAC/C;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;AACnC,MAAI;AAAE,SAAM,UAAU,yBAAyB,WAAW,cAAc;UAAU;;;;;;;AAQtF,SAAgB,SAAS,aAAqB,MAAc,QAAuB;AACjF,aAAY,aAAa,MAAM,OAAO;CAEtC,MAAM,UAAU,qBAAqB,QAAQ,IAAI,GAAG,KAAK,KAAK,CAAC;AAC/D,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,WAAS,oBAAoB,UAAU;AACvC,WAAS,wBAAwB,cAAc;AAC/C,WAAS,YAAY;AACrB,WAAS,qBAAqB,YAAY,MAAM;WACxC;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;;;AAIvC,SAAgB,YAAY,aAAqB,QAAgB,IAAY;AAC3E,KAAI;AACF,SAAO,SAAS,wBAAwB,YAAY,UAAU,SAAS,EACrE,UAAU,QACX,CAAC;SACI;AACN,SAAO;;;;;;;AAQX,eAAsB,iBAAiB,aAAqB,QAAgB,IAAqB;AAC/F,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,wBAAwB,YAAY,UAAU,SAAS,EACxF,UAAU,SACX,CAAC;AACF,SAAO;SACD;AACN,SAAO;;;;;;;;;;;AAYX,eAAsB,oBAAoB,aAAqB,YAAoB,MAAyB;CAC1G,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;AACb,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;EAIrC,MAAM,SAHS,MAAM,iBAAiB,aAAa,GAAG,EAGjC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAEtD,OADiB,MAAM,MAAM,SAAS,MAAM,IAC/B,SAAS,IAAI,CAAE,QAAO;AACnC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;;AAE7C,QAAO;;;;;;;;;;;AAYT,eAAsB,gBACpB,aACA,cACA,YAAoB,KACF;CAClB,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;CACb,MAAM,kBAAkB,aAAa,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,CAAC;AAEvE,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;AACrC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;EAE3C,MAAM,cADQ,MAAM,iBAAiB,aAAa,GAAG,EAC5B,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAI1D,MAHuB,WAAW,SAGb,kBAAkB,EAAG,QAAO;EAGjD,MAAM,YAAY,WAAW,MAAM,gBAAgB,CAAC,KAAK,KAAK;AAC9D,MACE,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,OAAO,IAChF,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAC7E,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,aAAa,IACtF,UAAU,SAAS,WAAW,IAAI,UAAU,SAAS,cAAc,CACnE,QAAO;;AAEX,QAAO;;AAGT,SAAgB,mBAAkC;AAChD,QAAO,cAAc,CAAC,QAAO,MAAK,EAAE,KAAK,WAAW,SAAS,CAAC;;;;aA/QnB;AAMvC,qBAAoB,KAAK,iBAAiB,QAAQ,iBAAiB;AA0HnE,aAAY,UAAU,KAAK"}
1
+ {"version":3,"file":"tmux-D6Ah4I8z.js","names":[],"sources":["../src/lib/tmux.ts"],"sourcesContent":["import { execSync, exec } from 'child_process';\nimport { promisify } from 'util';\nimport { writeFileSync, chmodSync, appendFileSync, mkdirSync, existsSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from './paths.js';\n\n/**\n * Log file for tmux sendKeys operations\n * This helps debug mysterious messages appearing in agent prompts\n */\nconst SENDKEYS_LOG_FILE = join(PANOPTICON_HOME, 'logs', 'sendkeys.jsonl');\n\n/**\n * Ensure log directory exists\n */\nfunction ensureLogDir(): void {\n const logDir = join(PANOPTICON_HOME, 'logs');\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n}\n\n/**\n * Log a sendKeys operation for debugging\n */\nfunction logSendKeys(sessionName: string, keys: string, caller?: string): void {\n try {\n ensureLogDir();\n\n // Get call stack to identify caller if not provided\n const stack = new Error().stack || '';\n const stackLines = stack.split('\\n').slice(3, 6); // Skip Error, logSendKeys, sendKeys\n const callerInfo = caller || stackLines.map(l => l.trim()).join(' <- ');\n\n const entry = {\n timestamp: new Date().toISOString(),\n sessionName,\n keysLength: keys.length,\n keysPreview: keys.length > 200 ? keys.slice(0, 200) + '...' : keys,\n caller: callerInfo,\n pid: process.pid,\n };\n\n appendFileSync(SENDKEYS_LOG_FILE, JSON.stringify(entry) + '\\n', 'utf-8');\n } catch {\n // Silently fail - logging should never break functionality\n }\n}\n\nexport interface TmuxSession {\n name: string;\n created: Date;\n attached: boolean;\n windows: number;\n}\n\nexport function listSessions(): TmuxSession[] {\n try {\n const output = execSync('tmux list-sessions -F \"#{session_name}|#{session_created}|#{session_attached}|#{session_windows}\"', {\n encoding: 'utf8',\n });\n\n return output.trim().split('\\n').filter(Boolean).map(line => {\n const [name, created, attached, windows] = line.split('|');\n return {\n name,\n created: new Date(parseInt(created) * 1000),\n attached: attached === '1',\n windows: parseInt(windows),\n };\n });\n } catch {\n return []; // No sessions\n }\n}\n\nexport function sessionExists(name: string): boolean {\n try {\n execSync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n\nexport function createSession(\n name: string,\n cwd: string,\n initialCommand?: string,\n options?: { env?: Record<string, string> }\n): void {\n const escapedCwd = cwd.replace(/\"/g, '\\\\\"');\n\n // Build environment variable flags for tmux\n let envFlags = '';\n if (options?.env) {\n for (const [key, value] of Object.entries(options.env)) {\n envFlags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n }\n\n // For complex commands (with special chars), start session first then send command\n if (initialCommand && (initialCommand.includes('`') || initialCommand.includes('\\n') || initialCommand.length > 500)) {\n // Create session without command\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n\n // Small delay to let session initialize\n execSync('sleep 0.5');\n\n // Send the command in chunks if needed (tmux has buffer limits)\n // First, write to a temp file and source it\n const tmpFile = `/tmp/pan-cmd-${name}.sh`;\n writeFileSync(tmpFile, initialCommand);\n chmodSync(tmpFile, '755');\n\n // Execute the script\n execSync(`tmux send-keys -t ${name} \"bash ${tmpFile}\"`);\n execSync(`tmux send-keys -t ${name} C-m`);\n } else if (initialCommand) {\n // Simple command - use inline\n const cmd = `tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags} \"${initialCommand.replace(/\"/g, '\\\\\"')}\"`;\n execSync(cmd);\n } else {\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n }\n}\n\nexport function killSession(name: string): void {\n execSync(`tmux kill-session -t ${name}`);\n}\n\nconst execAsync = promisify(exec);\n\nexport async function sessionExistsAsync(name: string): Promise<boolean> {\n try {\n await execAsync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send keys to a tmux session (async, non-blocking).\n * Uses load-buffer + paste-buffer for reliable delivery, with a delay before Enter.\n * MUST be used from the dashboard server and any async context.\n */\nexport async function sendKeysAsync(sessionName: string, keys: string, caller?: string): Promise<void> {\n logSendKeys(sessionName, keys, caller);\n\n // Use a unique named buffer per call to prevent race conditions.\n // The default (unnamed) paste buffer is global — concurrent load-buffer\n // calls from different specialist wakes clobber each other.\n const bufferName = `pan-${process.pid}-${Date.now()}`;\n const tmpFile = `/tmp/pan-sendkeys-${bufferName}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n await execAsync(`tmux load-buffer -b ${bufferName} ${tmpFile}`);\n await execAsync(`tmux paste-buffer -b ${bufferName} -t ${sessionName} -d`);\n await new Promise(r => setTimeout(r, 300));\n await execAsync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n try { await execAsync(`tmux delete-buffer -b ${bufferName} 2>/dev/null`); } catch {}\n }\n}\n\n/**\n * Send keys to a tmux session (sync, blocks event loop).\n * Only use from CLI commands — NEVER from the dashboard server.\n */\nexport function sendKeys(sessionName: string, keys: string, caller?: string): void {\n logSendKeys(sessionName, keys, caller);\n\n const tmpFile = `/tmp/pan-sendkeys-${process.pid}-${Date.now()}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n execSync(`tmux load-buffer ${tmpFile}`);\n execSync(`tmux paste-buffer -t ${sessionName}`);\n execSync(`sleep 0.3`);\n execSync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n }\n}\n\nexport function capturePane(sessionName: string, lines: number = 50): string {\n try {\n return execSync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf8',\n });\n } catch {\n return '';\n }\n}\n\n/**\n * Capture tmux pane output (async, non-blocking).\n * MUST be used from the dashboard server and any async context.\n */\nexport async function capturePaneAsync(sessionName: string, lines: number = 50): Promise<string> {\n try {\n const { stdout } = await execAsync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf-8',\n });\n return stdout;\n } catch {\n return '';\n }\n}\n\n/**\n * Wait for Claude Code to reach its interactive prompt (❯) in a tmux session.\n * Polls tmux output until the prompt appears or timeout is reached.\n *\n * @param sessionName - tmux session name\n * @param timeoutMs - maximum time to wait (default: 15s for fresh start, use 5s for already-running)\n * @returns true if prompt detected, false if timed out\n */\nexport async function waitForClaudePrompt(sessionName: string, timeoutMs: number = 15000): Promise<boolean> {\n const start = Date.now();\n const POLL = 500;\n while (Date.now() - start < timeoutMs) {\n const output = await capturePaneAsync(sessionName, 10);\n // Claude Code shows ❯ when ready for user input.\n // Check that the LAST non-empty line contains ❯ (not a stale prompt from earlier output).\n const lines = output.split('\\n').filter(l => l.trim());\n const lastLine = lines[lines.length - 1] || '';\n if (lastLine.includes('❯')) return true;\n await new Promise(r => setTimeout(r, POLL));\n }\n return false;\n}\n\n/**\n * Verify that a message sent to Claude was actually received and processing started.\n * Compares tmux output before and after to detect new activity (tool calls, responses).\n *\n * @param sessionName - tmux session name\n * @param outputBefore - tmux output snapshot taken BEFORE sending the message\n * @param timeoutMs - maximum time to wait for activity (default: 10s)\n * @returns true if new activity detected, false if timed out\n */\nexport async function confirmDelivery(\n sessionName: string,\n outputBefore: string,\n timeoutMs: number = 10000,\n): Promise<boolean> {\n const start = Date.now();\n const POLL = 1000;\n const beforeLineCount = outputBefore.split('\\n').filter(l => l.trim()).length;\n\n while (Date.now() - start < timeoutMs) {\n await new Promise(r => setTimeout(r, POLL));\n const after = await capturePaneAsync(sessionName, 50);\n const afterLines = after.split('\\n').filter(l => l.trim());\n const afterLineCount = afterLines.length;\n\n // Claude is processing if: new output lines appeared (tool calls: ●, results: ⎿, etc.)\n if (afterLineCount > beforeLineCount + 1) return true;\n\n // Or if we can see activity markers in the new output\n const newOutput = afterLines.slice(beforeLineCount).join('\\n');\n if (\n newOutput.includes('●') || newOutput.includes('⎿') || newOutput.includes('Read') ||\n newOutput.includes('✻') || newOutput.includes('·') || newOutput.includes('✶') ||\n newOutput.includes('✽') || newOutput.includes('✢') || newOutput.includes('Generating') ||\n newOutput.includes('thinking') || newOutput.includes('thought for')\n ) return true;\n }\n return false;\n}\n\nexport function getAgentSessions(): TmuxSession[] {\n return listSessions().filter(s => s.name.startsWith('agent-'));\n}\n"],"mappings":";;;;;;;;;;AAeA,SAAS,eAAqB;CAC5B,MAAM,SAAS,KAAK,iBAAiB,OAAO;AAC5C,KAAI,CAAC,WAAW,OAAO,CACrB,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;;;;;AAO1C,SAAS,YAAY,aAAqB,MAAc,QAAuB;AAC7E,KAAI;AACF,gBAAc;EAId,MAAM,+BADQ,IAAI,OAAO,EAAC,SAAS,IACV,MAAM,KAAK,CAAC,MAAM,GAAG,EAAE;EAChD,MAAM,aAAa,UAAU,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC,CAAC,KAAK,OAAO;EAEvE,MAAM,QAAQ;GACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC;GACA,YAAY,KAAK;GACjB,aAAa,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,IAAI,GAAG,QAAQ;GAC9D,QAAQ;GACR,KAAK,QAAQ;GACd;AAED,iBAAe,mBAAmB,KAAK,UAAU,MAAM,GAAG,MAAM,QAAQ;SAClE;;AAYV,SAAgB,eAA8B;AAC5C,KAAI;AAKF,SAJe,SAAS,uGAAqG,EAC3H,UAAU,QACX,CAAC,CAEY,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ;GAC3D,MAAM,CAAC,MAAM,SAAS,UAAU,WAAW,KAAK,MAAM,IAAI;AAC1D,UAAO;IACL;IACA,yBAAS,IAAI,KAAK,SAAS,QAAQ,GAAG,IAAK;IAC3C,UAAU,aAAa;IACvB,SAAS,SAAS,QAAQ;IAC3B;IACD;SACI;AACN,SAAO,EAAE;;;AAIb,SAAgB,cAAc,MAAuB;AACnD,KAAI;AACF,WAAS,uBAAuB,KAAK,cAAc;AACnD,SAAO;SACD;AACN,SAAO;;;AAKX,SAAgB,cACd,MACA,KACA,gBACA,SACM;CACN,MAAM,aAAa,IAAI,QAAQ,MAAM,OAAM;CAG3C,IAAI,WAAW;AACf,KAAI,SAAS,IACX,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,IAAI,CACpD,aAAY,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAK1D,KAAI,mBAAmB,eAAe,SAAS,IAAI,IAAI,eAAe,SAAS,KAAK,IAAI,eAAe,SAAS,MAAM;AAEpH,WAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;AAGxE,WAAS,YAAY;EAIrB,MAAM,UAAU,gBAAgB,KAAK;AACrC,gBAAc,SAAS,eAAe;AACtC,YAAU,SAAS,MAAM;AAGzB,WAAS,qBAAqB,KAAK,SAAS,QAAQ,GAAG;AACvD,WAAS,qBAAqB,KAAK,MAAM;YAChC,eAGT,UADY,0BAA0B,KAAK,OAAO,WAAW,GAAG,SAAS,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,GACpG;KAEb,UAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;;AAI5E,SAAgB,YAAY,MAAoB;AAC9C,UAAS,wBAAwB,OAAO;;;;;;;AAmB1C,eAAsB,cAAc,aAAqB,MAAc,QAAgC;AACrG,aAAY,aAAa,MAAM,OAAO;CAKtC,MAAM,aAAa,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK;CACnD,MAAM,UAAU,qBAAqB,WAAW;AAChD,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,QAAM,UAAU,uBAAuB,WAAW,GAAG,UAAU;AAC/D,QAAM,UAAU,wBAAwB,WAAW,MAAM,YAAY,KAAK;AAC1E,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;AAC1C,QAAM,UAAU,qBAAqB,YAAY,MAAM;WAC/C;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;AACnC,MAAI;AAAE,SAAM,UAAU,yBAAyB,WAAW,cAAc;UAAU;;;;;;;AAQtF,SAAgB,SAAS,aAAqB,MAAc,QAAuB;AACjF,aAAY,aAAa,MAAM,OAAO;CAEtC,MAAM,UAAU,qBAAqB,QAAQ,IAAI,GAAG,KAAK,KAAK,CAAC;AAC/D,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,WAAS,oBAAoB,UAAU;AACvC,WAAS,wBAAwB,cAAc;AAC/C,WAAS,YAAY;AACrB,WAAS,qBAAqB,YAAY,MAAM;WACxC;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;;;AAIvC,SAAgB,YAAY,aAAqB,QAAgB,IAAY;AAC3E,KAAI;AACF,SAAO,SAAS,wBAAwB,YAAY,UAAU,SAAS,EACrE,UAAU,QACX,CAAC;SACI;AACN,SAAO;;;;;;;AAQX,eAAsB,iBAAiB,aAAqB,QAAgB,IAAqB;AAC/F,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,wBAAwB,YAAY,UAAU,SAAS,EACxF,UAAU,SACX,CAAC;AACF,SAAO;SACD;AACN,SAAO;;;;;;;;;;;AAYX,eAAsB,oBAAoB,aAAqB,YAAoB,MAAyB;CAC1G,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;AACb,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;EAIrC,MAAM,SAHS,MAAM,iBAAiB,aAAa,GAAG,EAGjC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAEtD,OADiB,MAAM,MAAM,SAAS,MAAM,IAC/B,SAAS,IAAI,CAAE,QAAO;AACnC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;;AAE7C,QAAO;;;;;;;;;;;AAYT,eAAsB,gBACpB,aACA,cACA,YAAoB,KACF;CAClB,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;CACb,MAAM,kBAAkB,aAAa,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,CAAC;AAEvE,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;AACrC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;EAE3C,MAAM,cADQ,MAAM,iBAAiB,aAAa,GAAG,EAC5B,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAI1D,MAHuB,WAAW,SAGb,kBAAkB,EAAG,QAAO;EAGjD,MAAM,YAAY,WAAW,MAAM,gBAAgB,CAAC,KAAK,KAAK;AAC9D,MACE,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,OAAO,IAChF,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAC7E,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,aAAa,IACtF,UAAU,SAAS,WAAW,IAAI,UAAU,SAAS,cAAc,CACnE,QAAO;;AAEX,QAAO;;AAGT,SAAgB,mBAAkC;AAChD,QAAO,cAAc,CAAC,QAAO,MAAK,EAAE,KAAK,WAAW,SAAS,CAAC;;;;aA/QnB;AAMvC,qBAAoB,KAAK,iBAAiB,QAAQ,iBAAiB;AA0HnE,aAAY,UAAU,KAAK"}
@@ -0,0 +1,198 @@
1
+ import { o as init_interface } from "./rally-Dy00NElU.js";
2
+ import { c as init_gitlab, f as init_linear, o as init_factory, u as init_github } from "./factory-DzsOiZVc.js";
3
+ import { appendFileSync, existsSync, mkdirSync, readFileSync, writeFileSync } from "fs";
4
+ import { join } from "path";
5
+ import { homedir } from "os";
6
+ //#region src/lib/shell.ts
7
+ function detectShell() {
8
+ const shell = process.env.SHELL || "";
9
+ if (shell.includes("zsh")) return "zsh";
10
+ if (shell.includes("bash")) return "bash";
11
+ if (shell.includes("fish")) return "fish";
12
+ return "unknown";
13
+ }
14
+ function getShellRcFile(shell) {
15
+ const home = homedir();
16
+ switch (shell) {
17
+ case "zsh": return join(home, ".zshrc");
18
+ case "bash":
19
+ const bashrc = join(home, ".bashrc");
20
+ if (existsSync(bashrc)) return bashrc;
21
+ return join(home, ".bash_profile");
22
+ case "fish": return join(home, ".config", "fish", "config.fish");
23
+ default: return null;
24
+ }
25
+ }
26
+ const ALIAS_LINE = "alias pan=\"panopticon\"";
27
+ const ALIAS_MARKER = "# Panopticon CLI alias";
28
+ function hasAlias(rcFile) {
29
+ if (!existsSync(rcFile)) return false;
30
+ const content = readFileSync(rcFile, "utf8");
31
+ return content.includes(ALIAS_MARKER) || content.includes(ALIAS_LINE);
32
+ }
33
+ function addAlias(rcFile) {
34
+ if (hasAlias(rcFile)) return;
35
+ appendFileSync(rcFile, `
36
+ ${ALIAS_MARKER}
37
+ ${ALIAS_LINE}
38
+ `, "utf8");
39
+ }
40
+ function getAliasInstructions(shell) {
41
+ const rcFile = getShellRcFile(shell);
42
+ if (!rcFile) return `Add this to your shell config:\n ${ALIAS_LINE}`;
43
+ return `Alias added to ${rcFile}. Run:\n source ${rcFile}`;
44
+ }
45
+ //#endregion
46
+ //#region src/lib/tracker/linking.ts
47
+ /**
48
+ * Cross-Tracker Linking
49
+ *
50
+ * Manages links between issues in different trackers.
51
+ * Links are stored in a local JSON file for persistence.
52
+ */
53
+ /**
54
+ * Parse an issue reference to extract tracker and ID
55
+ * Examples:
56
+ * "#42" -> { tracker: "github", ref: "#42" }
57
+ * "github#42" -> { tracker: "github", ref: "#42" }
58
+ * "MIN-630" -> { tracker: "linear", ref: "MIN-630" }
59
+ * "gitlab#15" -> { tracker: "gitlab", ref: "#15" }
60
+ */
61
+ function parseIssueRef(ref) {
62
+ if (ref.startsWith("github#")) return {
63
+ tracker: "github",
64
+ ref: `#${ref.slice(7)}`
65
+ };
66
+ if (ref.startsWith("gitlab#")) return {
67
+ tracker: "gitlab",
68
+ ref: `#${ref.slice(7)}`
69
+ };
70
+ if (ref.startsWith("linear:")) return {
71
+ tracker: "linear",
72
+ ref: ref.slice(7)
73
+ };
74
+ if (/^#\d+$/.test(ref)) return {
75
+ tracker: "github",
76
+ ref
77
+ };
78
+ if (/^[A-Z]+-\d+$/i.test(ref)) return {
79
+ tracker: "linear",
80
+ ref: ref.toUpperCase()
81
+ };
82
+ return null;
83
+ }
84
+ /**
85
+ * Format an issue ref with tracker prefix for display
86
+ */
87
+ function formatIssueRef(ref, tracker) {
88
+ if (tracker === "github") return ref.startsWith("#") ? `github${ref}` : `github#${ref}`;
89
+ if (tracker === "gitlab") return ref.startsWith("#") ? `gitlab${ref}` : `gitlab#${ref}`;
90
+ return ref;
91
+ }
92
+ /**
93
+ * Link Manager for cross-tracker issue linking
94
+ */
95
+ var LinkManager = class {
96
+ storePath;
97
+ store;
98
+ constructor(storePath) {
99
+ this.storePath = storePath ?? join(homedir(), ".panopticon", "links.json");
100
+ this.store = this.load();
101
+ }
102
+ load() {
103
+ if (existsSync(this.storePath)) try {
104
+ const data = JSON.parse(readFileSync(this.storePath, "utf-8"));
105
+ if (data.version === 1) return data;
106
+ } catch {}
107
+ return {
108
+ version: 1,
109
+ links: []
110
+ };
111
+ }
112
+ save() {
113
+ const dir = join(this.storePath, "..");
114
+ if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
115
+ writeFileSync(this.storePath, JSON.stringify(this.store, null, 2));
116
+ }
117
+ /**
118
+ * Add a link between two issues
119
+ */
120
+ addLink(source, target, direction = "related") {
121
+ const existing = this.store.links.find((l) => l.sourceIssueRef === source.ref && l.sourceTracker === source.tracker && l.targetIssueRef === target.ref && l.targetTracker === target.tracker);
122
+ if (existing) {
123
+ if (existing.direction !== direction) {
124
+ existing.direction = direction;
125
+ this.save();
126
+ }
127
+ return existing;
128
+ }
129
+ const link = {
130
+ sourceIssueRef: source.ref,
131
+ sourceTracker: source.tracker,
132
+ targetIssueRef: target.ref,
133
+ targetTracker: target.tracker,
134
+ direction,
135
+ createdAt: (/* @__PURE__ */ new Date()).toISOString()
136
+ };
137
+ this.store.links.push(link);
138
+ this.save();
139
+ return link;
140
+ }
141
+ /**
142
+ * Remove a link between two issues
143
+ */
144
+ removeLink(source, target) {
145
+ const index = this.store.links.findIndex((l) => l.sourceIssueRef === source.ref && l.sourceTracker === source.tracker && l.targetIssueRef === target.ref && l.targetTracker === target.tracker);
146
+ if (index >= 0) {
147
+ this.store.links.splice(index, 1);
148
+ this.save();
149
+ return true;
150
+ }
151
+ return false;
152
+ }
153
+ /**
154
+ * Get all issues linked to a given issue
155
+ */
156
+ getLinkedIssues(ref, tracker) {
157
+ return this.store.links.filter((l) => l.sourceIssueRef === ref && l.sourceTracker === tracker || l.targetIssueRef === ref && l.targetTracker === tracker);
158
+ }
159
+ /**
160
+ * Get all links (for debugging/admin)
161
+ */
162
+ getAllLinks() {
163
+ return [...this.store.links];
164
+ }
165
+ /**
166
+ * Find linked issue in another tracker
167
+ */
168
+ findLinkedIssue(ref, sourceTracker, targetTracker) {
169
+ const asSource = this.store.links.find((l) => l.sourceIssueRef === ref && l.sourceTracker === sourceTracker && l.targetTracker === targetTracker);
170
+ if (asSource) return asSource.targetIssueRef;
171
+ const asTarget = this.store.links.find((l) => l.targetIssueRef === ref && l.targetTracker === sourceTracker && l.sourceTracker === targetTracker);
172
+ if (asTarget) return asTarget.sourceIssueRef;
173
+ return null;
174
+ }
175
+ /**
176
+ * Clear all links (for testing)
177
+ */
178
+ clear() {
179
+ this.store.links = [];
180
+ this.save();
181
+ }
182
+ };
183
+ let _linkManager = null;
184
+ function getLinkManager() {
185
+ if (!_linkManager) _linkManager = new LinkManager();
186
+ return _linkManager;
187
+ }
188
+ //#endregion
189
+ //#region src/lib/tracker/index.ts
190
+ init_interface();
191
+ init_linear();
192
+ init_github();
193
+ init_gitlab();
194
+ init_factory();
195
+ //#endregion
196
+ export { addAlias as a, getShellRcFile as c, parseIssueRef as i, hasAlias as l, formatIssueRef as n, detectShell as o, getLinkManager as r, getAliasInstructions as s, LinkManager as t };
197
+
198
+ //# sourceMappingURL=tracker-BhYYvU3p.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tracker-BhYYvU3p.js","names":[],"sources":["../src/lib/shell.ts","../src/lib/tracker/linking.ts","../src/lib/tracker/index.ts"],"sourcesContent":["import { existsSync, readFileSync, appendFileSync } from 'fs';\nimport { homedir } from 'os';\nimport { join } from 'path';\n\nexport type Shell = 'bash' | 'zsh' | 'fish' | 'unknown';\n\nexport function detectShell(): Shell {\n const shell = process.env.SHELL || '';\n\n if (shell.includes('zsh')) return 'zsh';\n if (shell.includes('bash')) return 'bash';\n if (shell.includes('fish')) return 'fish';\n\n return 'unknown';\n}\n\nexport function getShellRcFile(shell: Shell): string | null {\n const home = homedir();\n\n switch (shell) {\n case 'zsh':\n return join(home, '.zshrc');\n case 'bash':\n // Prefer .bashrc, fall back to .bash_profile\n const bashrc = join(home, '.bashrc');\n if (existsSync(bashrc)) return bashrc;\n return join(home, '.bash_profile');\n case 'fish':\n return join(home, '.config', 'fish', 'config.fish');\n default:\n return null;\n }\n}\n\nconst ALIAS_LINE = 'alias pan=\"panopticon\"';\nconst ALIAS_MARKER = '# Panopticon CLI alias';\n\nexport function hasAlias(rcFile: string): boolean {\n if (!existsSync(rcFile)) return false;\n\n const content = readFileSync(rcFile, 'utf8');\n return content.includes(ALIAS_MARKER) || content.includes(ALIAS_LINE);\n}\n\nexport function addAlias(rcFile: string): void {\n if (hasAlias(rcFile)) return;\n\n const aliasBlock = `\n${ALIAS_MARKER}\n${ALIAS_LINE}\n`;\n\n appendFileSync(rcFile, aliasBlock, 'utf8');\n}\n\nexport function getAliasInstructions(shell: Shell): string {\n const rcFile = getShellRcFile(shell);\n\n if (!rcFile) {\n return `Add this to your shell config:\\n ${ALIAS_LINE}`;\n }\n\n return `Alias added to ${rcFile}. Run:\\n source ${rcFile}`;\n}\n","/**\n * Cross-Tracker Linking\n *\n * Manages links between issues in different trackers.\n * Links are stored in a local JSON file for persistence.\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport type { TrackerType } from './interface.js';\n\n// Link direction types\nexport type LinkDirection = 'blocks' | 'blocked_by' | 'related' | 'duplicate_of';\n\n// A single link between two issues\nexport interface TrackerLink {\n sourceIssueRef: string; // e.g., \"MIN-630\"\n sourceTracker: TrackerType;\n targetIssueRef: string; // e.g., \"#42\"\n targetTracker: TrackerType;\n direction: LinkDirection;\n createdAt: string; // ISO timestamp\n}\n\n// Storage format\ninterface LinkStore {\n version: 1;\n links: TrackerLink[];\n}\n\n/**\n * Parse an issue reference to extract tracker and ID\n * Examples:\n * \"#42\" -> { tracker: \"github\", ref: \"#42\" }\n * \"github#42\" -> { tracker: \"github\", ref: \"#42\" }\n * \"MIN-630\" -> { tracker: \"linear\", ref: \"MIN-630\" }\n * \"gitlab#15\" -> { tracker: \"gitlab\", ref: \"#15\" }\n */\nexport function parseIssueRef(ref: string): { tracker: TrackerType; ref: string } | null {\n // Explicit tracker prefix\n if (ref.startsWith('github#')) {\n return { tracker: 'github', ref: `#${ref.slice(7)}` };\n }\n if (ref.startsWith('gitlab#')) {\n return { tracker: 'gitlab', ref: `#${ref.slice(7)}` };\n }\n if (ref.startsWith('linear:')) {\n return { tracker: 'linear', ref: ref.slice(7) };\n }\n\n // GitHub-style refs (#number)\n if (/^#\\d+$/.test(ref)) {\n return { tracker: 'github', ref };\n }\n\n // Linear-style refs (XXX-123)\n if (/^[A-Z]+-\\d+$/i.test(ref)) {\n return { tracker: 'linear', ref: ref.toUpperCase() };\n }\n\n return null;\n}\n\n/**\n * Format an issue ref with tracker prefix for display\n */\nexport function formatIssueRef(ref: string, tracker: TrackerType): string {\n if (tracker === 'github') {\n return ref.startsWith('#') ? `github${ref}` : `github#${ref}`;\n }\n if (tracker === 'gitlab') {\n return ref.startsWith('#') ? `gitlab${ref}` : `gitlab#${ref}`;\n }\n return ref; // Linear refs are already unique\n}\n\n/**\n * Link Manager for cross-tracker issue linking\n */\nexport class LinkManager {\n private storePath: string;\n private store: LinkStore;\n\n constructor(storePath?: string) {\n this.storePath = storePath ?? join(homedir(), '.panopticon', 'links.json');\n this.store = this.load();\n }\n\n private load(): LinkStore {\n if (existsSync(this.storePath)) {\n try {\n const data = JSON.parse(readFileSync(this.storePath, 'utf-8'));\n if (data.version === 1) {\n return data;\n }\n } catch {\n // Fall through to default\n }\n }\n return { version: 1, links: [] };\n }\n\n private save(): void {\n const dir = join(this.storePath, '..');\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(this.storePath, JSON.stringify(this.store, null, 2));\n }\n\n /**\n * Add a link between two issues\n */\n addLink(\n source: { ref: string; tracker: TrackerType },\n target: { ref: string; tracker: TrackerType },\n direction: LinkDirection = 'related'\n ): TrackerLink {\n // Check if link already exists\n const existing = this.store.links.find(\n (l) =>\n l.sourceIssueRef === source.ref &&\n l.sourceTracker === source.tracker &&\n l.targetIssueRef === target.ref &&\n l.targetTracker === target.tracker\n );\n\n if (existing) {\n // Update direction if different\n if (existing.direction !== direction) {\n existing.direction = direction;\n this.save();\n }\n return existing;\n }\n\n const link: TrackerLink = {\n sourceIssueRef: source.ref,\n sourceTracker: source.tracker,\n targetIssueRef: target.ref,\n targetTracker: target.tracker,\n direction,\n createdAt: new Date().toISOString(),\n };\n\n this.store.links.push(link);\n this.save();\n return link;\n }\n\n /**\n * Remove a link between two issues\n */\n removeLink(\n source: { ref: string; tracker: TrackerType },\n target: { ref: string; tracker: TrackerType }\n ): boolean {\n const index = this.store.links.findIndex(\n (l) =>\n l.sourceIssueRef === source.ref &&\n l.sourceTracker === source.tracker &&\n l.targetIssueRef === target.ref &&\n l.targetTracker === target.tracker\n );\n\n if (index >= 0) {\n this.store.links.splice(index, 1);\n this.save();\n return true;\n }\n return false;\n }\n\n /**\n * Get all issues linked to a given issue\n */\n getLinkedIssues(ref: string, tracker: TrackerType): TrackerLink[] {\n return this.store.links.filter(\n (l) =>\n (l.sourceIssueRef === ref && l.sourceTracker === tracker) ||\n (l.targetIssueRef === ref && l.targetTracker === tracker)\n );\n }\n\n /**\n * Get all links (for debugging/admin)\n */\n getAllLinks(): TrackerLink[] {\n return [...this.store.links];\n }\n\n /**\n * Find linked issue in another tracker\n */\n findLinkedIssue(\n ref: string,\n sourceTracker: TrackerType,\n targetTracker: TrackerType\n ): string | null {\n // Check as source\n const asSource = this.store.links.find(\n (l) =>\n l.sourceIssueRef === ref &&\n l.sourceTracker === sourceTracker &&\n l.targetTracker === targetTracker\n );\n if (asSource) return asSource.targetIssueRef;\n\n // Check as target\n const asTarget = this.store.links.find(\n (l) =>\n l.targetIssueRef === ref &&\n l.targetTracker === sourceTracker &&\n l.sourceTracker === targetTracker\n );\n if (asTarget) return asTarget.sourceIssueRef;\n\n return null;\n }\n\n /**\n * Clear all links (for testing)\n */\n clear(): void {\n this.store.links = [];\n this.save();\n }\n}\n\n// Singleton instance\nlet _linkManager: LinkManager | null = null;\n\nexport function getLinkManager(): LinkManager {\n if (!_linkManager) {\n _linkManager = new LinkManager();\n }\n return _linkManager;\n}\n","/**\n * Issue Tracker Module\n *\n * Provides a unified interface for different issue tracking systems.\n */\n\n// Core interface and types\nexport type {\n IssueTracker,\n Issue,\n IssueFilters,\n IssueState,\n IssueUpdate,\n NewIssue,\n Comment,\n TrackerType,\n} from './interface.js';\n\nexport {\n NotImplementedError,\n IssueNotFoundError,\n TrackerAuthError,\n} from './interface.js';\n\n// Tracker implementations\nexport { LinearTracker } from './linear.js';\nexport { GitHubTracker } from './github.js';\nexport { GitLabTracker } from './gitlab.js';\n\n// Factory functions\nexport type { TrackerConfig } from './factory.js';\nexport {\n createTracker,\n createTrackerFromConfig,\n getPrimaryTracker,\n getSecondaryTracker,\n getAllTrackers,\n} from './factory.js';\n\n// Cross-tracker linking\nexport type { TrackerLink, LinkDirection } from './linking.js';\nexport {\n LinkManager,\n getLinkManager,\n parseIssueRef,\n formatIssueRef,\n} from './linking.js';\n"],"mappings":";;;;;;AAMA,SAAgB,cAAqB;CACnC,MAAM,QAAQ,QAAQ,IAAI,SAAS;AAEnC,KAAI,MAAM,SAAS,MAAM,CAAE,QAAO;AAClC,KAAI,MAAM,SAAS,OAAO,CAAE,QAAO;AACnC,KAAI,MAAM,SAAS,OAAO,CAAE,QAAO;AAEnC,QAAO;;AAGT,SAAgB,eAAe,OAA6B;CAC1D,MAAM,OAAO,SAAS;AAEtB,SAAQ,OAAR;EACE,KAAK,MACH,QAAO,KAAK,MAAM,SAAS;EAC7B,KAAK;GAEH,MAAM,SAAS,KAAK,MAAM,UAAU;AACpC,OAAI,WAAW,OAAO,CAAE,QAAO;AAC/B,UAAO,KAAK,MAAM,gBAAgB;EACpC,KAAK,OACH,QAAO,KAAK,MAAM,WAAW,QAAQ,cAAc;EACrD,QACE,QAAO;;;AAIb,MAAM,aAAa;AACnB,MAAM,eAAe;AAErB,SAAgB,SAAS,QAAyB;AAChD,KAAI,CAAC,WAAW,OAAO,CAAE,QAAO;CAEhC,MAAM,UAAU,aAAa,QAAQ,OAAO;AAC5C,QAAO,QAAQ,SAAS,aAAa,IAAI,QAAQ,SAAS,WAAW;;AAGvE,SAAgB,SAAS,QAAsB;AAC7C,KAAI,SAAS,OAAO,CAAE;AAOtB,gBAAe,QALI;EACnB,aAAa;EACb,WAAW;GAGwB,OAAO;;AAG5C,SAAgB,qBAAqB,OAAsB;CACzD,MAAM,SAAS,eAAe,MAAM;AAEpC,KAAI,CAAC,OACH,QAAO,qCAAqC;AAG9C,QAAO,kBAAkB,OAAO,mBAAmB;;;;;;;;;;;;;;;;;;ACvBrD,SAAgB,cAAc,KAA2D;AAEvF,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,IAAI,MAAM,EAAE;EAAI;AAEvD,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,IAAI,MAAM,EAAE;EAAI;AAEvD,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,MAAM,EAAE;EAAE;AAIjD,KAAI,SAAS,KAAK,IAAI,CACpB,QAAO;EAAE,SAAS;EAAU;EAAK;AAInC,KAAI,gBAAgB,KAAK,IAAI,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,aAAa;EAAE;AAGtD,QAAO;;;;;AAMT,SAAgB,eAAe,KAAa,SAA8B;AACxE,KAAI,YAAY,SACd,QAAO,IAAI,WAAW,IAAI,GAAG,SAAS,QAAQ,UAAU;AAE1D,KAAI,YAAY,SACd,QAAO,IAAI,WAAW,IAAI,GAAG,SAAS,QAAQ,UAAU;AAE1D,QAAO;;;;;AAMT,IAAa,cAAb,MAAyB;CACvB;CACA;CAEA,YAAY,WAAoB;AAC9B,OAAK,YAAY,aAAa,KAAK,SAAS,EAAE,eAAe,aAAa;AAC1E,OAAK,QAAQ,KAAK,MAAM;;CAG1B,OAA0B;AACxB,MAAI,WAAW,KAAK,UAAU,CAC5B,KAAI;GACF,MAAM,OAAO,KAAK,MAAM,aAAa,KAAK,WAAW,QAAQ,CAAC;AAC9D,OAAI,KAAK,YAAY,EACnB,QAAO;UAEH;AAIV,SAAO;GAAE,SAAS;GAAG,OAAO,EAAE;GAAE;;CAGlC,OAAqB;EACnB,MAAM,MAAM,KAAK,KAAK,WAAW,KAAK;AACtC,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,KAAK,WAAW,KAAK,UAAU,KAAK,OAAO,MAAM,EAAE,CAAC;;;;;CAMpE,QACE,QACA,QACA,YAA2B,WACd;EAEb,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,WAC3B,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,QAC9B;AAED,MAAI,UAAU;AAEZ,OAAI,SAAS,cAAc,WAAW;AACpC,aAAS,YAAY;AACrB,SAAK,MAAM;;AAEb,UAAO;;EAGT,MAAM,OAAoB;GACxB,gBAAgB,OAAO;GACvB,eAAe,OAAO;GACtB,gBAAgB,OAAO;GACvB,eAAe,OAAO;GACtB;GACA,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;AAED,OAAK,MAAM,MAAM,KAAK,KAAK;AAC3B,OAAK,MAAM;AACX,SAAO;;;;;CAMT,WACE,QACA,QACS;EACT,MAAM,QAAQ,KAAK,MAAM,MAAM,WAC5B,MACC,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,WAC3B,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,QAC9B;AAED,MAAI,SAAS,GAAG;AACd,QAAK,MAAM,MAAM,OAAO,OAAO,EAAE;AACjC,QAAK,MAAM;AACX,UAAO;;AAET,SAAO;;;;;CAMT,gBAAgB,KAAa,SAAqC;AAChE,SAAO,KAAK,MAAM,MAAM,QACrB,MACE,EAAE,mBAAmB,OAAO,EAAE,kBAAkB,WAChD,EAAE,mBAAmB,OAAO,EAAE,kBAAkB,QACpD;;;;;CAMH,cAA6B;AAC3B,SAAO,CAAC,GAAG,KAAK,MAAM,MAAM;;;;;CAM9B,gBACE,KACA,eACA,eACe;EAEf,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OACrB,EAAE,kBAAkB,iBACpB,EAAE,kBAAkB,cACvB;AACD,MAAI,SAAU,QAAO,SAAS;EAG9B,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OACrB,EAAE,kBAAkB,iBACpB,EAAE,kBAAkB,cACvB;AACD,MAAI,SAAU,QAAO,SAAS;AAE9B,SAAO;;;;;CAMT,QAAc;AACZ,OAAK,MAAM,QAAQ,EAAE;AACrB,OAAK,MAAM;;;AAKf,IAAI,eAAmC;AAEvC,SAAgB,iBAA8B;AAC5C,KAAI,CAAC,aACH,gBAAe,IAAI,aAAa;AAElC,QAAO;;;;gBCvNe;aAGoB;aACA;aACA;cAUtB"}