panopticon-cli 0.6.4 → 0.6.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
- package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
- package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
- package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
- package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
- package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
- package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
- package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
- package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
- package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
- package/dist/clean-planning-x1S-JdmO.js +2 -0
- package/dist/cli/index.js +291 -760
- package/dist/cli/index.js.map +1 -1
- package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
- package/dist/close-issue-CaFE0stN.js.map +1 -0
- package/dist/close-issue-CjcfZI9s.js +2 -0
- package/dist/compact-beads-B0_qE1w3.js +2 -0
- package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
- package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
- package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
- package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
- package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
- package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
- package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
- package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
- package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
- package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
- package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
- package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
- package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
- package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
- package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
- package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
- package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
- package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
- package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
- package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
- package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
- package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
- package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
- package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
- package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
- package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
- package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
- package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
- package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
- package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
- package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
- package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
- package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
- package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
- package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
- package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
- package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
- package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
- package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
- package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
- package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
- package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
- package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
- package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
- package/dist/dashboard/hume-CA2pftu_.js +3 -0
- package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
- package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
- package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
- package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
- package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
- package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
- package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
- package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
- package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
- package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
- package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
- package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
- package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
- package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
- package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
- package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
- package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
- package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
- package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
- package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
- package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
- package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
- package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
- package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
- package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
- package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
- package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-BaQPC-c6.js} +1 -1
- package/dist/dashboard/public/assets/index-ByLmYGhW.js +212 -0
- package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
- package/dist/dashboard/public/index.html +2 -2
- package/dist/dashboard/rally-6McpKKRa.js +3 -0
- package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
- package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
- package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
- package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
- package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
- package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
- package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
- package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
- package/dist/dashboard/server.js +565 -265
- package/dist/dashboard/server.js.map +1 -1
- package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
- package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
- package/dist/dashboard/settings-XWvDcj-D.js +2 -0
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
- package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
- package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
- package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
- package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
- package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
- package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
- package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
- package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
- package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
- package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
- package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
- package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
- package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
- package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
- package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
- package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
- package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
- package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
- package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
- package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
- package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
- package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
- package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
- package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
- package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
- package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
- package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
- package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
- package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
- package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
- package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
- package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
- package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
- package/dist/hume-9nv1VmMV.js +3 -0
- package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
- package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
- package/dist/index.d.ts +17 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -7
- package/dist/issue-id-CAcekoIw.js +62 -0
- package/dist/issue-id-CAcekoIw.js.map +1 -0
- package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
- package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
- package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
- package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
- package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
- package/dist/merge-agent-DlUiUanN.js.map +1 -0
- package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
- package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
- package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
- package/dist/projects-Bk-5QhFQ.js.map +1 -0
- package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
- package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
- package/dist/providers-DSU1vfQF.js.map +1 -0
- package/dist/rally-DdPvGa-w.js +3 -0
- package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
- package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
- package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
- package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
- package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
- package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
- package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
- package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
- package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
- package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
- package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
- package/dist/settings-BcWPTrua.js.map +1 -0
- package/dist/shadow-state-BZzxfEGw.js +2 -0
- package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
- package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
- package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
- package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
- package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
- package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
- package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
- package/dist/specialists-D7Kj5o6s.js.map +1 -0
- package/dist/sync-DMfgd389.js +693 -0
- package/dist/sync-DMfgd389.js.map +1 -0
- package/dist/sync-TL6y-8K6.js +2 -0
- package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
- package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
- package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
- package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
- package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
- package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
- package/dist/tracker-BhYYvU3p.js +198 -0
- package/dist/tracker-BhYYvU3p.js.map +1 -0
- package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
- package/dist/tracker-utils-ChQyut8w.js.map +1 -0
- package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
- package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
- package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
- package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
- package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
- package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
- package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
- package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
- package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
- package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
- package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
- package/dist/workspace-config-fUafvYMp.js.map +1 -0
- package/dist/workspace-manager-B9jS4Dsq.js +3 -0
- package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
- package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
- package/package.json +2 -1
- package/scripts/post-merge-deploy.sh +25 -5
- package/scripts/record-cost-event.js +57 -7
- package/scripts/record-cost-event.js.map +1 -1
- package/skills/pan-help/SKILL.md +1 -1
- package/skills/pan-sync/SKILL.md +6 -6
- package/skills/workspace-add-repo/skill.md +46 -0
- package/templates/claude-md/sections/warnings.md +15 -2
- package/dist/clean-planning-sZXvy3Y5.js +0 -2
- package/dist/close-issue-Dml437qV.js +0 -2
- package/dist/close-issue-Dr7yZmrr.js.map +0 -1
- package/dist/compact-beads-iu218JcO.js +0 -2
- package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
- package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
- package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
- package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
- package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
- package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
- package/dist/dashboard/hume-MZndNDVU.js +0 -3
- package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
- package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
- package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
- package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
- package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
- package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
- package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
- package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
- package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
- package/dist/dashboard/settings-DMeGBRsk.js +0 -2
- package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
- package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
- package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
- package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
- package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
- package/dist/hume-BjmwmJ9E.js +0 -3
- package/dist/label-cleanup-31ElPqqv.js.map +0 -1
- package/dist/merge-agent-VQH9z9t8.js.map +0 -1
- package/dist/projects-CvLepaxC.js.map +0 -1
- package/dist/providers-DcCPZ5K4.js.map +0 -1
- package/dist/rally-DR9x8--6.js +0 -3
- package/dist/shadow-state-p3jpGRPJ.js +0 -2
- package/dist/specialists-DEKqgkxp.js.map +0 -1
- package/dist/tracker-C_62ukEq.js.map +0 -1
- package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
- package/dist/workspace-config-CNXOpKuj.js.map +0 -1
- package/dist/workspace-manager-CncdZkIy.js.map +0 -1
- package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map}
RENAMED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"test-agent-queue-
|
|
1
|
+
{"version":3,"file":"test-agent-queue-ypF_ecHo.js","names":[],"sources":["../../src/lib/cloister/test-agent-queue.ts"],"sourcesContent":["/**\n * Auto-queue logic for triggering the test-agent after review passes.\n *\n * Uses per-project ephemeral specialists (no global test-agent pool).\n */\n\nimport { setReviewStatus } from '../review-status.js';\nimport { spawnEphemeralSpecialist, submitToSpecialistQueue } from './specialists.js';\nimport { resolveProjectFromIssue } from '../projects.js';\n\n/**\n * Spawn an ephemeral test specialist for the given issue, then notify\n * the work agent when delivery succeeds.\n *\n * @param issueId - Issue identifier (e.g. \"PAN-343\")\n * @param workspace - Absolute path to the workspace directory\n * @param branch - Feature branch name (e.g. \"feature/pan-343\")\n * @param notifyAgent - Callback that sends a message to the work agent\n */\nexport async function autoQueueTestAgentAndNotify(\n issueId: string,\n workspace: string,\n branch: string,\n notifyAgent: (agentId: string, msg: string) => Promise<void>,\n): Promise<void> {\n let testTaskDelivered = false;\n\n try {\n const resolved = resolveProjectFromIssue(issueId);\n if (!resolved) {\n console.error(`[test-queue] No project configured for ${issueId} — cannot spawn test specialist`);\n setReviewStatus(issueId, {\n testStatus: 'dispatch_failed',\n testNotes: `No project configured for ${issueId}. Add it to projects.yaml.`,\n });\n return;\n }\n\n const result = await spawnEphemeralSpecialist(resolved.projectKey, 'test-agent', {\n issueId,\n workspace,\n branch,\n });\n\n if (result.success) {\n setReviewStatus(issueId, { testStatus: 'testing' });\n testTaskDelivered = true;\n console.log(`[test-queue] Spawned test specialist for ${issueId} (${resolved.projectKey})`);\n } else if (result.error === 'specialist_busy') {\n // Specialist is busy with another task — add to queue for deacon to drain\n console.log(`[test-queue] Specialist busy for ${issueId} — queuing for deacon dispatch`);\n submitToSpecialistQueue('test-agent', {\n priority: 'high',\n source: 'test-queue',\n issueId,\n workspace,\n branch,\n });\n setReviewStatus(issueId, { testStatus: 'testing' });\n testTaskDelivered = true; // notify agent that tests are queued\n } else {\n // Non-busy failure — retry once after 2s\n console.log(`[test-queue] First spawn failed for ${issueId}: ${result.message}. Retrying in 2s...`);\n await new Promise((r) => setTimeout(r, 2000));\n\n const retry = await spawnEphemeralSpecialist(resolved.projectKey, 'test-agent', {\n issueId,\n workspace,\n branch,\n });\n\n if (retry.success) {\n setReviewStatus(issueId, { testStatus: 'testing' });\n testTaskDelivered = true;\n console.log(`[test-queue] Spawned test specialist for ${issueId} on retry`);\n } else if (retry.error === 'specialist_busy') {\n // Became busy between attempts — queue it\n console.log(`[test-queue] Specialist became busy for ${issueId} — queuing for deacon dispatch`);\n submitToSpecialistQueue('test-agent', {\n priority: 'high',\n source: 'test-queue',\n issueId,\n workspace,\n branch,\n });\n setReviewStatus(issueId, { testStatus: 'testing' });\n testTaskDelivered = true;\n } else {\n console.error(`[test-queue] Both spawn attempts failed for ${issueId}: ${retry.message}`);\n setReviewStatus(issueId, {\n testStatus: 'dispatch_failed',\n testNotes: `Test specialist spawn failed: ${retry.message}`,\n });\n }\n }\n } catch (err) {\n const msg = err instanceof Error ? err.message : String(err);\n console.error(`[test-queue] Failed to dispatch test specialist for ${issueId}:`, err);\n try {\n setReviewStatus(issueId, {\n testStatus: 'dispatch_failed',\n testNotes: `Dispatch failed: ${msg}`,\n });\n } catch (statusErr) {\n console.error(`[test-queue] Failed to set dispatch_failed status for ${issueId}:`, statusErr);\n }\n }\n\n // Only notify work agent when test task was successfully delivered\n if (testTaskDelivered) {\n try {\n await notifyAgent(\n `agent-${issueId.toLowerCase()}`,\n `REVIEW PASSED for ${issueId}. Tests have been queued automatically. Do NOT poll or check status — you will be notified when tests complete.`,\n );\n } catch (err) {\n console.log(\n `[test-queue] Could not notify work agent for ${issueId} (may not be running): ${(err as Error).message}`,\n );\n }\n }\n}\n"],"mappings":";;;;oBAMsD;kBAC+B;eAC5B;;;;;;;;;;AAWzD,eAAsB,4BACpB,SACA,WACA,QACA,aACe;CACf,IAAI,oBAAoB;AAExB,KAAI;EACF,MAAM,WAAW,wBAAwB,QAAQ;AACjD,MAAI,CAAC,UAAU;AACb,WAAQ,MAAM,0CAA0C,QAAQ,iCAAiC;AACjG,mBAAgB,SAAS;IACvB,YAAY;IACZ,WAAW,6BAA6B,QAAQ;IACjD,CAAC;AACF;;EAGF,MAAM,SAAS,MAAM,yBAAyB,SAAS,YAAY,cAAc;GAC/E;GACA;GACA;GACD,CAAC;AAEF,MAAI,OAAO,SAAS;AAClB,mBAAgB,SAAS,EAAE,YAAY,WAAW,CAAC;AACnD,uBAAoB;AACpB,WAAQ,IAAI,4CAA4C,QAAQ,IAAI,SAAS,WAAW,GAAG;aAClF,OAAO,UAAU,mBAAmB;AAE7C,WAAQ,IAAI,oCAAoC,QAAQ,gCAAgC;AACxF,2BAAwB,cAAc;IACpC,UAAU;IACV,QAAQ;IACR;IACA;IACA;IACD,CAAC;AACF,mBAAgB,SAAS,EAAE,YAAY,WAAW,CAAC;AACnD,uBAAoB;SACf;AAEL,WAAQ,IAAI,uCAAuC,QAAQ,IAAI,OAAO,QAAQ,qBAAqB;AACnG,SAAM,IAAI,SAAS,MAAM,WAAW,GAAG,IAAK,CAAC;GAE7C,MAAM,QAAQ,MAAM,yBAAyB,SAAS,YAAY,cAAc;IAC9E;IACA;IACA;IACD,CAAC;AAEF,OAAI,MAAM,SAAS;AACjB,oBAAgB,SAAS,EAAE,YAAY,WAAW,CAAC;AACnD,wBAAoB;AACpB,YAAQ,IAAI,4CAA4C,QAAQ,WAAW;cAClE,MAAM,UAAU,mBAAmB;AAE5C,YAAQ,IAAI,2CAA2C,QAAQ,gCAAgC;AAC/F,4BAAwB,cAAc;KACpC,UAAU;KACV,QAAQ;KACR;KACA;KACA;KACD,CAAC;AACF,oBAAgB,SAAS,EAAE,YAAY,WAAW,CAAC;AACnD,wBAAoB;UACf;AACL,YAAQ,MAAM,+CAA+C,QAAQ,IAAI,MAAM,UAAU;AACzF,oBAAgB,SAAS;KACvB,YAAY;KACZ,WAAW,iCAAiC,MAAM;KACnD,CAAC;;;UAGC,KAAK;EACZ,MAAM,MAAM,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAC5D,UAAQ,MAAM,uDAAuD,QAAQ,IAAI,IAAI;AACrF,MAAI;AACF,mBAAgB,SAAS;IACvB,YAAY;IACZ,WAAW,oBAAoB;IAChC,CAAC;WACK,WAAW;AAClB,WAAQ,MAAM,yDAAyD,QAAQ,IAAI,UAAU;;;AAKjG,KAAI,kBACF,KAAI;AACF,QAAM,YACJ,SAAS,QAAQ,aAAa,IAC9B,qBAAqB,QAAQ,iHAC9B;UACM,KAAK;AACZ,UAAQ,IACN,gDAAgD,QAAQ,yBAA0B,IAAc,UACjG"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { n as __esmMin } from "./chunk-DORXReHP.js";
|
|
2
|
-
import {
|
|
2
|
+
import { _ as init_paths, s as PANOPTICON_HOME } from "./paths-BDyJ7BiV.js";
|
|
3
3
|
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "fs";
|
|
4
4
|
import { join } from "path";
|
|
5
5
|
import { createHash } from "crypto";
|
|
@@ -246,4 +246,4 @@ var init_tldr_daemon = __esmMin((() => {
|
|
|
246
246
|
//#endregion
|
|
247
247
|
export { getTldrDaemonService as n, init_tldr_daemon as r, TldrDaemonService as t };
|
|
248
248
|
|
|
249
|
-
//# sourceMappingURL=tldr-daemon-
|
|
249
|
+
//# sourceMappingURL=tldr-daemon-B_oLRD8z.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tldr-daemon-BNFyS7W_.js","names":[],"sources":["../../src/lib/tldr-daemon.ts"],"sourcesContent":["/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemon lifecycle for project root and workspaces.\n * Provides code analysis and summarization for token-efficient agent work.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { existsSync, writeFileSync, readFileSync, mkdirSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { PANOPTICON_HOME } from './paths.js';\n\n// ============================================================================\n// TLDR Session Metrics (PAN-236)\n// ============================================================================\n\n/**\n * Per-session TLDR metrics — delta since last captured cost event.\n *\n * Metrics are file-based, stored in <workspace>/.tldr/:\n * interceptions.log — written by tldr-read-enforcer on each TLDR serve\n * bypasses.log — written by tldr-read-enforcer on each deliberate bypass\n * metrics-checkpoint.json — tracks line offsets for delta (per-cost-event) reporting\n */\nexport interface TldrSessionMetrics {\n interceptions: number; // TLDR summaries served since last checkpoint\n bypasses: number; // TLDR bypasses since last checkpoint\n estimatedTokensSaved: number; // Rough token savings (fullTokens - ~1000 per interception)\n filesAnalyzed: string[]; // Unique files summarized in this window\n bypassReasons: Record<string, number>; // e.g. { \"offset-limit\": 3, \"recently-edited\": 1 }\n}\n\n/** Checkpoint persisted to .tldr/metrics-checkpoint.json */\ninterface TldrMetricsCheckpoint {\n interceptionsLine: number;\n bypassesLine: number;\n capturedAt: string;\n}\n\n/**\n * Read TLDR session metrics for a workspace from log files.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @param sinceCheckpoint - Only return metrics since the last captured checkpoint\n */\nexport function getTldrMetrics(workspacePath: string, sinceCheckpoint = false): TldrSessionMetrics {\n const tldrDir = join(workspacePath, '.tldr');\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n let interceptionsStartLine = 0;\n let bypassesStartLine = 0;\n\n if (sinceCheckpoint && existsSync(checkpointFile)) {\n try {\n const checkpoint = JSON.parse(readFileSync(checkpointFile, 'utf-8')) as TldrMetricsCheckpoint;\n interceptionsStartLine = checkpoint.interceptionsLine || 0;\n bypassesStartLine = checkpoint.bypassesLine || 0;\n } catch { /* start from 0 on parse error */ }\n }\n\n // Parse interceptions log: each line is \"timestamp file_size rel_path\"\n const allInterceptionLines = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newInterceptions = allInterceptionLines.slice(interceptionsStartLine);\n\n let estimatedTokensSaved = 0;\n const filesAnalyzed: string[] = [];\n\n for (const line of newInterceptions) {\n const parts = line.trim().split(' ');\n if (parts.length >= 3) {\n const fileSizeBytes = parseInt(parts[1], 10) || 0;\n const relPath = parts.slice(2).join(' ');\n // Rough estimate: ~1 token per 4 bytes for code; TLDR summary is ~1000 tokens\n const fullTokens = Math.round(fileSizeBytes / 4);\n estimatedTokensSaved += Math.max(0, fullTokens - 1000);\n if (relPath && !filesAnalyzed.includes(relPath)) {\n filesAnalyzed.push(relPath);\n }\n }\n }\n\n // Parse bypasses log: each line is \"timestamp reason [rel_path]\"\n const allBypassLines = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newBypasses = allBypassLines.slice(bypassesStartLine);\n const bypassReasons: Record<string, number> = {};\n\n for (const line of newBypasses) {\n const parts = line.trim().split(' ');\n if (parts.length >= 2) {\n const reason = parts[1];\n bypassReasons[reason] = (bypassReasons[reason] || 0) + 1;\n }\n }\n\n return {\n interceptions: newInterceptions.length,\n bypasses: newBypasses.length,\n estimatedTokensSaved,\n filesAnalyzed,\n bypassReasons,\n };\n}\n\n/**\n * Capture TLDR metrics since the last checkpoint and advance the checkpoint.\n *\n * Call this once per cost event batch to get the delta metrics for that batch,\n * then update the checkpoint so the next call starts from here.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @returns Metrics delta since last capture, or null if no .tldr/ directory exists\n */\nexport function captureTldrMetrics(workspacePath: string): TldrSessionMetrics | null {\n const tldrDir = join(workspacePath, '.tldr');\n if (!existsSync(tldrDir)) {\n return null;\n }\n\n const metrics = getTldrMetrics(workspacePath, true);\n\n // Advance checkpoint to current line counts\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n const interceptionsTotal = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n const bypassesTotal = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n\n const checkpoint: TldrMetricsCheckpoint = {\n interceptionsLine: interceptionsTotal,\n bypassesLine: bypassesTotal,\n capturedAt: new Date().toISOString(),\n };\n\n try {\n writeFileSync(checkpointFile, JSON.stringify(checkpoint, null, 2), 'utf-8');\n } catch { /* non-fatal — metrics still returned */ }\n\n return metrics;\n}\n\nconst execAsync = promisify(exec);\n\n/** Directory for TLDR daemon state files */\nconst TLDR_STATE_DIR = join(PANOPTICON_HOME, 'tldr');\n\n/** Ensure TLDR state directory exists */\nfunction ensureTldrStateDir(): void {\n if (!existsSync(TLDR_STATE_DIR)) {\n mkdirSync(TLDR_STATE_DIR, { recursive: true });\n }\n}\n\n/**\n * TLDR daemon state\n */\ninterface TldrDaemonState {\n running: boolean;\n pid?: number;\n startedAt?: string;\n workspacePath: string;\n venvPath: string;\n}\n\n/**\n * TLDR daemon status\n */\nexport interface TldrDaemonStatus {\n running: boolean;\n pid?: number;\n startedAt?: Date;\n workspacePath: string;\n venvPath: string;\n healthy: boolean;\n}\n\n/**\n * Hash workspace path to create a stable identifier\n */\nfunction hashWorkspacePath(path: string): string {\n return createHash('sha256').update(path).digest('hex').substring(0, 16);\n}\n\n/**\n * Get state file path for a workspace\n */\nfunction getStateFilePath(workspacePath: string): string {\n ensureTldrStateDir();\n const hash = hashWorkspacePath(workspacePath);\n const stateDir = join(TLDR_STATE_DIR, hash);\n if (!existsSync(stateDir)) {\n mkdirSync(stateDir, { recursive: true });\n }\n return join(stateDir, 'daemon.json');\n}\n\n/**\n * Write daemon state to file\n */\nfunction writeStateFile(workspacePath: string, venvPath: string, running: boolean, pid?: number): void {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (running) {\n const state: TldrDaemonState = {\n running: true,\n pid: pid || process.pid,\n startedAt: new Date().toISOString(),\n workspacePath,\n venvPath,\n };\n writeFileSync(stateFile, JSON.stringify(state, null, 2));\n } else {\n if (existsSync(stateFile)) {\n unlinkSync(stateFile);\n }\n }\n } catch (error) {\n console.warn('Failed to write TLDR daemon state file:', error);\n }\n}\n\n/**\n * Read daemon state from file\n */\nfunction readStateFile(workspacePath: string): TldrDaemonState | null {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (!existsSync(stateFile)) {\n return null;\n }\n\n const data = JSON.parse(readFileSync(stateFile, 'utf-8')) as TldrDaemonState;\n\n // Verify the process is still running\n if (data.pid) {\n try {\n process.kill(data.pid, 0); // Signal 0 checks if process exists\n return data;\n } catch {\n // Process doesn't exist - clean up stale state file\n unlinkSync(stateFile);\n return null;\n }\n }\n\n return data;\n } catch {\n // State file doesn't exist or is corrupted\n return null;\n }\n}\n\n/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemons for project root and workspaces.\n */\nexport class TldrDaemonService {\n private workspacePath: string;\n private venvPath: string;\n\n /**\n * Create a new TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace (project root or workspace directory)\n * @param venvPath - Path to the Python venv containing llm-tldr\n */\n constructor(workspacePath: string, venvPath: string) {\n this.workspacePath = workspacePath;\n this.venvPath = venvPath;\n }\n\n /**\n * Start the TLDR daemon\n *\n * @param background - Run daemon in background (default: true)\n */\n async start(background = true): Promise<void> {\n // Check if daemon is already running\n const currentState = readStateFile(this.workspacePath);\n if (currentState?.running) {\n console.warn(`TLDR daemon already running for ${this.workspacePath} (PID: ${currentState.pid})`);\n return;\n }\n\n // Verify venv and tldr binary exist\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}. Ensure llm-tldr is installed in the venv.`);\n }\n\n console.log(`Starting TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Start daemon with project path\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\" >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\"`;\n\n const { stdout, stderr } = await execAsync(cmd);\n\n if (stderr && !stderr.includes('started')) {\n console.warn(`TLDR daemon start warning: ${stderr}`);\n }\n\n // Give daemon a moment to start and write its PID file\n await new Promise(r => setTimeout(r, 500));\n\n // Try to get PID from tldr's status command\n let pid: number | undefined;\n try {\n const statusResult = await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`);\n const pidMatch = statusResult.stdout.match(/PID[:\\s]+(\\d+)/i);\n if (pidMatch) {\n pid = parseInt(pidMatch[1]);\n }\n } catch {\n // Status command failed - daemon might not expose PID\n }\n\n writeStateFile(this.workspacePath, this.venvPath, true, pid);\n console.log(`✓ TLDR daemon started for ${this.workspacePath}${pid ? ` (PID: ${pid})` : ''}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to start TLDR daemon: ${errorMessage}`);\n }\n }\n\n /**\n * Stop the TLDR daemon\n */\n async stop(): Promise<void> {\n const currentState = readStateFile(this.workspacePath);\n if (!currentState?.running) {\n console.warn(`TLDR daemon not running for ${this.workspacePath}`);\n return;\n }\n\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n console.warn(`tldr binary not found at ${tldrBin}, cleaning up state file`);\n writeStateFile(this.workspacePath, this.venvPath, false);\n return;\n }\n\n console.log(`Stopping TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Stop daemon\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon stop`);\n\n writeStateFile(this.workspacePath, this.venvPath, false);\n console.log(`✓ TLDR daemon stopped for ${this.workspacePath}`);\n } catch (error) {\n // If stop fails, try to kill the process directly\n if (currentState.pid) {\n try {\n process.kill(currentState.pid, 'SIGTERM');\n console.log(`✓ Forcefully stopped TLDR daemon (PID: ${currentState.pid})`);\n } catch (killError) {\n console.warn(`Failed to kill TLDR daemon process: ${killError}`);\n }\n }\n\n // Clean up state file regardless\n writeStateFile(this.workspacePath, this.venvPath, false);\n }\n }\n\n /**\n * Get daemon status\n */\n async getStatus(): Promise<TldrDaemonStatus> {\n const state = readStateFile(this.workspacePath);\n\n if (!state?.running) {\n return {\n running: false,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy: false,\n };\n }\n\n // Check health\n const healthy = await this.checkHealth();\n\n return {\n running: true,\n pid: state.pid,\n startedAt: state.startedAt ? new Date(state.startedAt) : undefined,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy,\n };\n }\n\n /**\n * Check if daemon is healthy (can respond to status queries)\n */\n async checkHealth(): Promise<boolean> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n return false;\n }\n\n try {\n // Try to get daemon status\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`, { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Restart the daemon\n */\n async restart(): Promise<void> {\n console.log(`Restarting TLDR daemon for ${this.workspacePath}...`);\n await this.stop();\n await new Promise(r => setTimeout(r, 1000)); // Wait for cleanup\n await this.start();\n }\n\n /**\n * Warm the index (trigger initial analysis)\n *\n * @param background - Run in background (default: true)\n */\n async warm(background = true): Promise<void> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}`);\n }\n\n console.log(`Warming TLDR index for ${this.workspacePath}...`);\n\n try {\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm . >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm .`;\n\n await execAsync(cmd);\n console.log(`✓ TLDR index warming initiated for ${this.workspacePath}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to warm TLDR index: ${errorMessage}`);\n }\n }\n\n /**\n * Check if daemon is running\n */\n isRunning(): boolean {\n const state = readStateFile(this.workspacePath);\n return state?.running ?? false;\n }\n\n /**\n * Get workspace path\n */\n getWorkspacePath(): string {\n return this.workspacePath;\n }\n\n /**\n * Get venv path\n */\n getVenvPath(): string {\n return this.venvPath;\n }\n}\n\n/**\n * Global registry of TLDR daemon services by workspace path\n */\nconst daemonRegistry = new Map<string, TldrDaemonService>();\n\n/**\n * Get or create a TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace\n * @param venvPath - Path to the Python venv\n */\nexport function getTldrDaemonService(workspacePath: string, venvPath: string): TldrDaemonService {\n const existing = daemonRegistry.get(workspacePath);\n if (existing) {\n return existing;\n }\n\n const service = new TldrDaemonService(workspacePath, venvPath);\n daemonRegistry.set(workspacePath, service);\n return service;\n}\n\n/**\n * Remove a daemon service from the registry\n *\n * @param workspacePath - Path to the workspace\n */\nexport function removeTldrDaemonService(workspacePath: string): void {\n daemonRegistry.delete(workspacePath);\n}\n\n/**\n * List all registered daemon services\n */\nexport function listTldrDaemonServices(): TldrDaemonService[] {\n return Array.from(daemonRegistry.values());\n}\n"],"mappings":";;;;;;;;;;;;;;;AA+JA,SAAS,qBAA2B;AAClC,KAAI,CAAC,WAAW,eAAe,CAC7B,WAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;;;;;AA8BlD,SAAS,kBAAkB,MAAsB;AAC/C,QAAO,WAAW,SAAS,CAAC,OAAO,KAAK,CAAC,OAAO,MAAM,CAAC,UAAU,GAAG,GAAG;;;;;AAMzE,SAAS,iBAAiB,eAA+B;AACvD,qBAAoB;CAEpB,MAAM,WAAW,KAAK,gBADT,kBAAkB,cAAc,CACF;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAO,KAAK,UAAU,cAAc;;;;;AAMtC,SAAS,eAAe,eAAuB,UAAkB,SAAkB,KAAoB;AACrG,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,SAAS;GACX,MAAM,QAAyB;IAC7B,SAAS;IACT,KAAK,OAAO,QAAQ;IACpB,4BAAW,IAAI,MAAM,EAAC,aAAa;IACnC;IACA;IACD;AACD,iBAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC;aAEpD,WAAW,UAAU,CACvB,YAAW,UAAU;UAGlB,OAAO;AACd,UAAQ,KAAK,2CAA2C,MAAM;;;;;;AAOlE,SAAS,cAAc,eAA+C;AACpE,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO;EAGT,MAAM,OAAO,KAAK,MAAM,aAAa,WAAW,QAAQ,CAAC;AAGzD,MAAI,KAAK,IACP,KAAI;AACF,WAAQ,KAAK,KAAK,KAAK,EAAE;AACzB,UAAO;UACD;AAEN,cAAW,UAAU;AACrB,UAAO;;AAIX,SAAO;SACD;AAEN,SAAO;;;;;;;;;AA6OX,SAAgB,qBAAqB,eAAuB,UAAqC;CAC/F,MAAM,WAAW,eAAe,IAAI,cAAc;AAClD,KAAI,SACF,QAAO;CAGT,MAAM,UAAU,IAAI,kBAAkB,eAAe,SAAS;AAC9D,gBAAe,IAAI,eAAe,QAAQ;AAC1C,QAAO;;;;aA7eoC;AA6IvC,aAAY,UAAU,KAAK;AAG3B,kBAAiB,KAAK,iBAAiB,OAAO;AAiHvC,qBAAb,MAA+B;EAC7B;EACA;;;;;;;EAQA,YAAY,eAAuB,UAAkB;AACnD,QAAK,gBAAgB;AACrB,QAAK,WAAW;;;;;;;EAQlB,MAAM,MAAM,aAAa,MAAqB;GAE5C,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,cAAc,SAAS;AACzB,YAAQ,KAAK,mCAAmC,KAAK,cAAc,SAAS,aAAa,IAAI,GAAG;AAChG;;GAIF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,QAAQ,6CAA6C;AAGnG,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;IAMF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAJrB,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,uBACzF,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,GAE9C;AAE/C,QAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,KAAK,8BAA8B,SAAS;AAItD,UAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;IAG1C,IAAI;AACJ,QAAI;KAEF,MAAM,YADe,MAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,iBAAiB,EAClE,OAAO,MAAM,kBAAkB;AAC7D,SAAI,SACF,OAAM,SAAS,SAAS,GAAG;YAEvB;AAIR,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM,IAAI;AAC5D,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB,MAAM,UAAU,IAAI,KAAK,KAAK;YACrF,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,gCAAgC,eAAe;;;;;;EAOnE,MAAM,OAAsB;GAC1B,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,CAAC,cAAc,SAAS;AAC1B,YAAQ,KAAK,+BAA+B,KAAK,gBAAgB;AACjE;;GAGF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,EAAE;AACxB,YAAQ,KAAK,4BAA4B,QAAQ,0BAA0B;AAC3E,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD;;AAGF,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,eAAe;AAEzE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB;YACvD,OAAO;AAEd,QAAI,aAAa,IACf,KAAI;AACF,aAAQ,KAAK,aAAa,KAAK,UAAU;AACzC,aAAQ,IAAI,0CAA0C,aAAa,IAAI,GAAG;aACnE,WAAW;AAClB,aAAQ,KAAK,uCAAuC,YAAY;;AAKpE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;;;;;;EAO5D,MAAM,YAAuC;GAC3C,MAAM,QAAQ,cAAc,KAAK,cAAc;AAE/C,OAAI,CAAC,OAAO,QACV,QAAO;IACL,SAAS;IACT,eAAe,KAAK;IACpB,UAAU,KAAK;IACf,SAAS;IACV;GAIH,MAAM,UAAU,MAAM,KAAK,aAAa;AAExC,UAAO;IACL,SAAS;IACT,KAAK,MAAM;IACX,WAAW,MAAM,YAAY,IAAI,KAAK,MAAM,UAAU,GAAG,KAAA;IACzD,eAAe,KAAK;IACpB,UAAU,KAAK;IACf;IACD;;;;;EAMH,MAAM,cAAgC;GACpC,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;AAGT,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,kBAAkB,EAAE,SAAS,KAAM,CAAC;AAC9F,WAAO;WACD;AACN,WAAO;;;;;;EAOX,MAAM,UAAyB;AAC7B,WAAQ,IAAI,8BAA8B,KAAK,cAAc,KAAK;AAClE,SAAM,KAAK,MAAM;AACjB,SAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAK,CAAC;AAC3C,SAAM,KAAK,OAAO;;;;;;;EAQpB,MAAM,KAAK,aAAa,MAAqB;GAC3C,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,UAAU;AAGxD,WAAQ,IAAI,0BAA0B,KAAK,cAAc,KAAK;AAE9D,OAAI;AAKF,UAAM,UAJM,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,8BAC1C,OAAO,KAAK,cAAc,QAAQ,QAAQ,UAE1B;AACpB,YAAQ,IAAI,sCAAsC,KAAK,gBAAgB;YAChE,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,8BAA8B,eAAe;;;;;;EAOjE,YAAqB;AAEnB,UADc,cAAc,KAAK,cAAc,EACjC,WAAW;;;;;EAM3B,mBAA2B;AACzB,UAAO,KAAK;;;;;EAMd,cAAsB;AACpB,UAAO,KAAK;;;AAOV,kCAAiB,IAAI,KAAgC"}
|
|
1
|
+
{"version":3,"file":"tldr-daemon-B_oLRD8z.js","names":[],"sources":["../../src/lib/tldr-daemon.ts"],"sourcesContent":["/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemon lifecycle for project root and workspaces.\n * Provides code analysis and summarization for token-efficient agent work.\n */\n\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { existsSync, writeFileSync, readFileSync, mkdirSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { createHash } from 'crypto';\nimport { PANOPTICON_HOME } from './paths.js';\n\n// ============================================================================\n// TLDR Session Metrics (PAN-236)\n// ============================================================================\n\n/**\n * Per-session TLDR metrics — delta since last captured cost event.\n *\n * Metrics are file-based, stored in <workspace>/.tldr/:\n * interceptions.log — written by tldr-read-enforcer on each TLDR serve\n * bypasses.log — written by tldr-read-enforcer on each deliberate bypass\n * metrics-checkpoint.json — tracks line offsets for delta (per-cost-event) reporting\n */\nexport interface TldrSessionMetrics {\n interceptions: number; // TLDR summaries served since last checkpoint\n bypasses: number; // TLDR bypasses since last checkpoint\n estimatedTokensSaved: number; // Rough token savings (fullTokens - ~1000 per interception)\n filesAnalyzed: string[]; // Unique files summarized in this window\n bypassReasons: Record<string, number>; // e.g. { \"offset-limit\": 3, \"recently-edited\": 1 }\n}\n\n/** Checkpoint persisted to .tldr/metrics-checkpoint.json */\ninterface TldrMetricsCheckpoint {\n interceptionsLine: number;\n bypassesLine: number;\n capturedAt: string;\n}\n\n/**\n * Read TLDR session metrics for a workspace from log files.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @param sinceCheckpoint - Only return metrics since the last captured checkpoint\n */\nexport function getTldrMetrics(workspacePath: string, sinceCheckpoint = false): TldrSessionMetrics {\n const tldrDir = join(workspacePath, '.tldr');\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n let interceptionsStartLine = 0;\n let bypassesStartLine = 0;\n\n if (sinceCheckpoint && existsSync(checkpointFile)) {\n try {\n const checkpoint = JSON.parse(readFileSync(checkpointFile, 'utf-8')) as TldrMetricsCheckpoint;\n interceptionsStartLine = checkpoint.interceptionsLine || 0;\n bypassesStartLine = checkpoint.bypassesLine || 0;\n } catch { /* start from 0 on parse error */ }\n }\n\n // Parse interceptions log: each line is \"timestamp file_size rel_path\"\n const allInterceptionLines = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newInterceptions = allInterceptionLines.slice(interceptionsStartLine);\n\n let estimatedTokensSaved = 0;\n const filesAnalyzed: string[] = [];\n\n for (const line of newInterceptions) {\n const parts = line.trim().split(' ');\n if (parts.length >= 3) {\n const fileSizeBytes = parseInt(parts[1], 10) || 0;\n const relPath = parts.slice(2).join(' ');\n // Rough estimate: ~1 token per 4 bytes for code; TLDR summary is ~1000 tokens\n const fullTokens = Math.round(fileSizeBytes / 4);\n estimatedTokensSaved += Math.max(0, fullTokens - 1000);\n if (relPath && !filesAnalyzed.includes(relPath)) {\n filesAnalyzed.push(relPath);\n }\n }\n }\n\n // Parse bypasses log: each line is \"timestamp reason [rel_path]\"\n const allBypassLines = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim())\n : [];\n const newBypasses = allBypassLines.slice(bypassesStartLine);\n const bypassReasons: Record<string, number> = {};\n\n for (const line of newBypasses) {\n const parts = line.trim().split(' ');\n if (parts.length >= 2) {\n const reason = parts[1];\n bypassReasons[reason] = (bypassReasons[reason] || 0) + 1;\n }\n }\n\n return {\n interceptions: newInterceptions.length,\n bypasses: newBypasses.length,\n estimatedTokensSaved,\n filesAnalyzed,\n bypassReasons,\n };\n}\n\n/**\n * Capture TLDR metrics since the last checkpoint and advance the checkpoint.\n *\n * Call this once per cost event batch to get the delta metrics for that batch,\n * then update the checkpoint so the next call starts from here.\n *\n * @param workspacePath - Workspace root (where .tldr/ lives)\n * @returns Metrics delta since last capture, or null if no .tldr/ directory exists\n */\nexport function captureTldrMetrics(workspacePath: string): TldrSessionMetrics | null {\n const tldrDir = join(workspacePath, '.tldr');\n if (!existsSync(tldrDir)) {\n return null;\n }\n\n const metrics = getTldrMetrics(workspacePath, true);\n\n // Advance checkpoint to current line counts\n const interceptionsLog = join(tldrDir, 'interceptions.log');\n const bypassesLog = join(tldrDir, 'bypasses.log');\n const checkpointFile = join(tldrDir, 'metrics-checkpoint.json');\n\n const interceptionsTotal = existsSync(interceptionsLog)\n ? readFileSync(interceptionsLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n const bypassesTotal = existsSync(bypassesLog)\n ? readFileSync(bypassesLog, 'utf-8').split('\\n').filter(l => l.trim()).length\n : 0;\n\n const checkpoint: TldrMetricsCheckpoint = {\n interceptionsLine: interceptionsTotal,\n bypassesLine: bypassesTotal,\n capturedAt: new Date().toISOString(),\n };\n\n try {\n writeFileSync(checkpointFile, JSON.stringify(checkpoint, null, 2), 'utf-8');\n } catch { /* non-fatal — metrics still returned */ }\n\n return metrics;\n}\n\nconst execAsync = promisify(exec);\n\n/** Directory for TLDR daemon state files */\nconst TLDR_STATE_DIR = join(PANOPTICON_HOME, 'tldr');\n\n/** Ensure TLDR state directory exists */\nfunction ensureTldrStateDir(): void {\n if (!existsSync(TLDR_STATE_DIR)) {\n mkdirSync(TLDR_STATE_DIR, { recursive: true });\n }\n}\n\n/**\n * TLDR daemon state\n */\ninterface TldrDaemonState {\n running: boolean;\n pid?: number;\n startedAt?: string;\n workspacePath: string;\n venvPath: string;\n}\n\n/**\n * TLDR daemon status\n */\nexport interface TldrDaemonStatus {\n running: boolean;\n pid?: number;\n startedAt?: Date;\n workspacePath: string;\n venvPath: string;\n healthy: boolean;\n}\n\n/**\n * Hash workspace path to create a stable identifier\n */\nfunction hashWorkspacePath(path: string): string {\n return createHash('sha256').update(path).digest('hex').substring(0, 16);\n}\n\n/**\n * Get state file path for a workspace\n */\nfunction getStateFilePath(workspacePath: string): string {\n ensureTldrStateDir();\n const hash = hashWorkspacePath(workspacePath);\n const stateDir = join(TLDR_STATE_DIR, hash);\n if (!existsSync(stateDir)) {\n mkdirSync(stateDir, { recursive: true });\n }\n return join(stateDir, 'daemon.json');\n}\n\n/**\n * Write daemon state to file\n */\nfunction writeStateFile(workspacePath: string, venvPath: string, running: boolean, pid?: number): void {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (running) {\n const state: TldrDaemonState = {\n running: true,\n pid: pid || process.pid,\n startedAt: new Date().toISOString(),\n workspacePath,\n venvPath,\n };\n writeFileSync(stateFile, JSON.stringify(state, null, 2));\n } else {\n if (existsSync(stateFile)) {\n unlinkSync(stateFile);\n }\n }\n } catch (error) {\n console.warn('Failed to write TLDR daemon state file:', error);\n }\n}\n\n/**\n * Read daemon state from file\n */\nfunction readStateFile(workspacePath: string): TldrDaemonState | null {\n try {\n const stateFile = getStateFilePath(workspacePath);\n if (!existsSync(stateFile)) {\n return null;\n }\n\n const data = JSON.parse(readFileSync(stateFile, 'utf-8')) as TldrDaemonState;\n\n // Verify the process is still running\n if (data.pid) {\n try {\n process.kill(data.pid, 0); // Signal 0 checks if process exists\n return data;\n } catch {\n // Process doesn't exist - clean up stale state file\n unlinkSync(stateFile);\n return null;\n }\n }\n\n return data;\n } catch {\n // State file doesn't exist or is corrupted\n return null;\n }\n}\n\n/**\n * TLDR Daemon Service\n *\n * Manages llm-tldr daemons for project root and workspaces.\n */\nexport class TldrDaemonService {\n private workspacePath: string;\n private venvPath: string;\n\n /**\n * Create a new TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace (project root or workspace directory)\n * @param venvPath - Path to the Python venv containing llm-tldr\n */\n constructor(workspacePath: string, venvPath: string) {\n this.workspacePath = workspacePath;\n this.venvPath = venvPath;\n }\n\n /**\n * Start the TLDR daemon\n *\n * @param background - Run daemon in background (default: true)\n */\n async start(background = true): Promise<void> {\n // Check if daemon is already running\n const currentState = readStateFile(this.workspacePath);\n if (currentState?.running) {\n console.warn(`TLDR daemon already running for ${this.workspacePath} (PID: ${currentState.pid})`);\n return;\n }\n\n // Verify venv and tldr binary exist\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}. Ensure llm-tldr is installed in the venv.`);\n }\n\n console.log(`Starting TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Start daemon with project path\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\" >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon start --project \"${this.workspacePath}\"`;\n\n const { stdout, stderr } = await execAsync(cmd);\n\n if (stderr && !stderr.includes('started')) {\n console.warn(`TLDR daemon start warning: ${stderr}`);\n }\n\n // Give daemon a moment to start and write its PID file\n await new Promise(r => setTimeout(r, 500));\n\n // Try to get PID from tldr's status command\n let pid: number | undefined;\n try {\n const statusResult = await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`);\n const pidMatch = statusResult.stdout.match(/PID[:\\s]+(\\d+)/i);\n if (pidMatch) {\n pid = parseInt(pidMatch[1]);\n }\n } catch {\n // Status command failed - daemon might not expose PID\n }\n\n writeStateFile(this.workspacePath, this.venvPath, true, pid);\n console.log(`✓ TLDR daemon started for ${this.workspacePath}${pid ? ` (PID: ${pid})` : ''}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to start TLDR daemon: ${errorMessage}`);\n }\n }\n\n /**\n * Stop the TLDR daemon\n */\n async stop(): Promise<void> {\n const currentState = readStateFile(this.workspacePath);\n if (!currentState?.running) {\n console.warn(`TLDR daemon not running for ${this.workspacePath}`);\n return;\n }\n\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n console.warn(`tldr binary not found at ${tldrBin}, cleaning up state file`);\n writeStateFile(this.workspacePath, this.venvPath, false);\n return;\n }\n\n console.log(`Stopping TLDR daemon for ${this.workspacePath}...`);\n\n try {\n // Stop daemon\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon stop`);\n\n writeStateFile(this.workspacePath, this.venvPath, false);\n console.log(`✓ TLDR daemon stopped for ${this.workspacePath}`);\n } catch (error) {\n // If stop fails, try to kill the process directly\n if (currentState.pid) {\n try {\n process.kill(currentState.pid, 'SIGTERM');\n console.log(`✓ Forcefully stopped TLDR daemon (PID: ${currentState.pid})`);\n } catch (killError) {\n console.warn(`Failed to kill TLDR daemon process: ${killError}`);\n }\n }\n\n // Clean up state file regardless\n writeStateFile(this.workspacePath, this.venvPath, false);\n }\n }\n\n /**\n * Get daemon status\n */\n async getStatus(): Promise<TldrDaemonStatus> {\n const state = readStateFile(this.workspacePath);\n\n if (!state?.running) {\n return {\n running: false,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy: false,\n };\n }\n\n // Check health\n const healthy = await this.checkHealth();\n\n return {\n running: true,\n pid: state.pid,\n startedAt: state.startedAt ? new Date(state.startedAt) : undefined,\n workspacePath: this.workspacePath,\n venvPath: this.venvPath,\n healthy,\n };\n }\n\n /**\n * Check if daemon is healthy (can respond to status queries)\n */\n async checkHealth(): Promise<boolean> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n return false;\n }\n\n try {\n // Try to get daemon status\n await execAsync(`cd \"${this.workspacePath}\" && \"${tldrBin}\" daemon status`, { timeout: 3000 });\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Restart the daemon\n */\n async restart(): Promise<void> {\n console.log(`Restarting TLDR daemon for ${this.workspacePath}...`);\n await this.stop();\n await new Promise(r => setTimeout(r, 1000)); // Wait for cleanup\n await this.start();\n }\n\n /**\n * Warm the index (trigger initial analysis)\n *\n * @param background - Run in background (default: true)\n */\n async warm(background = true): Promise<void> {\n const tldrBin = join(this.venvPath, 'bin', 'tldr');\n if (!existsSync(tldrBin)) {\n throw new Error(`tldr binary not found at ${tldrBin}`);\n }\n\n console.log(`Warming TLDR index for ${this.workspacePath}...`);\n\n try {\n const cmd = background\n ? `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm . >/dev/null 2>&1 &`\n : `cd \"${this.workspacePath}\" && \"${tldrBin}\" warm .`;\n\n await execAsync(cmd);\n console.log(`✓ TLDR index warming initiated for ${this.workspacePath}`);\n } catch (error) {\n const errorMessage = error instanceof Error ? error.message : String(error);\n throw new Error(`Failed to warm TLDR index: ${errorMessage}`);\n }\n }\n\n /**\n * Check if daemon is running\n */\n isRunning(): boolean {\n const state = readStateFile(this.workspacePath);\n return state?.running ?? false;\n }\n\n /**\n * Get workspace path\n */\n getWorkspacePath(): string {\n return this.workspacePath;\n }\n\n /**\n * Get venv path\n */\n getVenvPath(): string {\n return this.venvPath;\n }\n}\n\n/**\n * Global registry of TLDR daemon services by workspace path\n */\nconst daemonRegistry = new Map<string, TldrDaemonService>();\n\n/**\n * Get or create a TLDR daemon service for a workspace\n *\n * @param workspacePath - Path to the workspace\n * @param venvPath - Path to the Python venv\n */\nexport function getTldrDaemonService(workspacePath: string, venvPath: string): TldrDaemonService {\n const existing = daemonRegistry.get(workspacePath);\n if (existing) {\n return existing;\n }\n\n const service = new TldrDaemonService(workspacePath, venvPath);\n daemonRegistry.set(workspacePath, service);\n return service;\n}\n\n/**\n * Remove a daemon service from the registry\n *\n * @param workspacePath - Path to the workspace\n */\nexport function removeTldrDaemonService(workspacePath: string): void {\n daemonRegistry.delete(workspacePath);\n}\n\n/**\n * List all registered daemon services\n */\nexport function listTldrDaemonServices(): TldrDaemonService[] {\n return Array.from(daemonRegistry.values());\n}\n"],"mappings":";;;;;;;;;;;;;;;AA+JA,SAAS,qBAA2B;AAClC,KAAI,CAAC,WAAW,eAAe,CAC7B,WAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;;;;;AA8BlD,SAAS,kBAAkB,MAAsB;AAC/C,QAAO,WAAW,SAAS,CAAC,OAAO,KAAK,CAAC,OAAO,MAAM,CAAC,UAAU,GAAG,GAAG;;;;;AAMzE,SAAS,iBAAiB,eAA+B;AACvD,qBAAoB;CAEpB,MAAM,WAAW,KAAK,gBADT,kBAAkB,cAAc,CACF;AAC3C,KAAI,CAAC,WAAW,SAAS,CACvB,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAE1C,QAAO,KAAK,UAAU,cAAc;;;;;AAMtC,SAAS,eAAe,eAAuB,UAAkB,SAAkB,KAAoB;AACrG,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,SAAS;GACX,MAAM,QAAyB;IAC7B,SAAS;IACT,KAAK,OAAO,QAAQ;IACpB,4BAAW,IAAI,MAAM,EAAC,aAAa;IACnC;IACA;IACD;AACD,iBAAc,WAAW,KAAK,UAAU,OAAO,MAAM,EAAE,CAAC;aAEpD,WAAW,UAAU,CACvB,YAAW,UAAU;UAGlB,OAAO;AACd,UAAQ,KAAK,2CAA2C,MAAM;;;;;;AAOlE,SAAS,cAAc,eAA+C;AACpE,KAAI;EACF,MAAM,YAAY,iBAAiB,cAAc;AACjD,MAAI,CAAC,WAAW,UAAU,CACxB,QAAO;EAGT,MAAM,OAAO,KAAK,MAAM,aAAa,WAAW,QAAQ,CAAC;AAGzD,MAAI,KAAK,IACP,KAAI;AACF,WAAQ,KAAK,KAAK,KAAK,EAAE;AACzB,UAAO;UACD;AAEN,cAAW,UAAU;AACrB,UAAO;;AAIX,SAAO;SACD;AAEN,SAAO;;;;;;;;;AA6OX,SAAgB,qBAAqB,eAAuB,UAAqC;CAC/F,MAAM,WAAW,eAAe,IAAI,cAAc;AAClD,KAAI,SACF,QAAO;CAGT,MAAM,UAAU,IAAI,kBAAkB,eAAe,SAAS;AAC9D,gBAAe,IAAI,eAAe,QAAQ;AAC1C,QAAO;;;;aA7eoC;AA6IvC,aAAY,UAAU,KAAK;AAG3B,kBAAiB,KAAK,iBAAiB,OAAO;AAiHvC,qBAAb,MAA+B;EAC7B;EACA;;;;;;;EAQA,YAAY,eAAuB,UAAkB;AACnD,QAAK,gBAAgB;AACrB,QAAK,WAAW;;;;;;;EAQlB,MAAM,MAAM,aAAa,MAAqB;GAE5C,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,cAAc,SAAS;AACzB,YAAQ,KAAK,mCAAmC,KAAK,cAAc,SAAS,aAAa,IAAI,GAAG;AAChG;;GAIF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,QAAQ,6CAA6C;AAGnG,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;IAMF,MAAM,EAAE,QAAQ,WAAW,MAAM,UAJrB,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,uBACzF,OAAO,KAAK,cAAc,QAAQ,QAAQ,4BAA4B,KAAK,cAAc,GAE9C;AAE/C,QAAI,UAAU,CAAC,OAAO,SAAS,UAAU,CACvC,SAAQ,KAAK,8BAA8B,SAAS;AAItD,UAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;IAG1C,IAAI;AACJ,QAAI;KAEF,MAAM,YADe,MAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,iBAAiB,EAClE,OAAO,MAAM,kBAAkB;AAC7D,SAAI,SACF,OAAM,SAAS,SAAS,GAAG;YAEvB;AAIR,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM,IAAI;AAC5D,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB,MAAM,UAAU,IAAI,KAAK,KAAK;YACrF,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,gCAAgC,eAAe;;;;;;EAOnE,MAAM,OAAsB;GAC1B,MAAM,eAAe,cAAc,KAAK,cAAc;AACtD,OAAI,CAAC,cAAc,SAAS;AAC1B,YAAQ,KAAK,+BAA+B,KAAK,gBAAgB;AACjE;;GAGF,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,EAAE;AACxB,YAAQ,KAAK,4BAA4B,QAAQ,0BAA0B;AAC3E,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD;;AAGF,WAAQ,IAAI,4BAA4B,KAAK,cAAc,KAAK;AAEhE,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,eAAe;AAEzE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;AACxD,YAAQ,IAAI,6BAA6B,KAAK,gBAAgB;YACvD,OAAO;AAEd,QAAI,aAAa,IACf,KAAI;AACF,aAAQ,KAAK,aAAa,KAAK,UAAU;AACzC,aAAQ,IAAI,0CAA0C,aAAa,IAAI,GAAG;aACnE,WAAW;AAClB,aAAQ,KAAK,uCAAuC,YAAY;;AAKpE,mBAAe,KAAK,eAAe,KAAK,UAAU,MAAM;;;;;;EAO5D,MAAM,YAAuC;GAC3C,MAAM,QAAQ,cAAc,KAAK,cAAc;AAE/C,OAAI,CAAC,OAAO,QACV,QAAO;IACL,SAAS;IACT,eAAe,KAAK;IACpB,UAAU,KAAK;IACf,SAAS;IACV;GAIH,MAAM,UAAU,MAAM,KAAK,aAAa;AAExC,UAAO;IACL,SAAS;IACT,KAAK,MAAM;IACX,WAAW,MAAM,YAAY,IAAI,KAAK,MAAM,UAAU,GAAG,KAAA;IACzD,eAAe,KAAK;IACpB,UAAU,KAAK;IACf;IACD;;;;;EAMH,MAAM,cAAgC;GACpC,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,QAAO;AAGT,OAAI;AAEF,UAAM,UAAU,OAAO,KAAK,cAAc,QAAQ,QAAQ,kBAAkB,EAAE,SAAS,KAAM,CAAC;AAC9F,WAAO;WACD;AACN,WAAO;;;;;;EAOX,MAAM,UAAyB;AAC7B,WAAQ,IAAI,8BAA8B,KAAK,cAAc,KAAK;AAClE,SAAM,KAAK,MAAM;AACjB,SAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAK,CAAC;AAC3C,SAAM,KAAK,OAAO;;;;;;;EAQpB,MAAM,KAAK,aAAa,MAAqB;GAC3C,MAAM,UAAU,KAAK,KAAK,UAAU,OAAO,OAAO;AAClD,OAAI,CAAC,WAAW,QAAQ,CACtB,OAAM,IAAI,MAAM,4BAA4B,UAAU;AAGxD,WAAQ,IAAI,0BAA0B,KAAK,cAAc,KAAK;AAE9D,OAAI;AAKF,UAAM,UAJM,aACR,OAAO,KAAK,cAAc,QAAQ,QAAQ,8BAC1C,OAAO,KAAK,cAAc,QAAQ,QAAQ,UAE1B;AACpB,YAAQ,IAAI,sCAAsC,KAAK,gBAAgB;YAChE,OAAO;IACd,MAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AAC3E,UAAM,IAAI,MAAM,8BAA8B,eAAe;;;;;;EAOjE,YAAqB;AAEnB,UADc,cAAc,KAAK,cAAc,EACjC,WAAW;;;;;EAM3B,mBAA2B;AACzB,UAAO,KAAK;;;;;EAMd,cAAsB;AACpB,UAAO,KAAK;;;AAOV,kCAAiB,IAAI,KAAgC"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { n as __esmMin } from "./chunk-DORXReHP.js";
|
|
2
|
-
import {
|
|
2
|
+
import { _ as init_paths, s as PANOPTICON_HOME } from "./paths-BDyJ7BiV.js";
|
|
3
3
|
import { appendFileSync, chmodSync, existsSync, mkdirSync, unlinkSync, writeFileSync } from "fs";
|
|
4
4
|
import { join } from "path";
|
|
5
5
|
import { exec, execSync } from "child_process";
|
|
@@ -175,4 +175,4 @@ var init_tmux = __esmMin((() => {
|
|
|
175
175
|
//#endregion
|
|
176
176
|
export { getAgentSessions as a, listSessions as c, sessionExistsAsync as d, waitForClaudePrompt as f, createSession as i, sendKeysAsync as l, capturePaneAsync as n, init_tmux as o, confirmDelivery as r, killSession as s, capturePane as t, sessionExists as u };
|
|
177
177
|
|
|
178
|
-
//# sourceMappingURL=tmux-
|
|
178
|
+
//# sourceMappingURL=tmux-LwG0tHhU.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tmux-IlN1Slv-.js","names":[],"sources":["../../src/lib/tmux.ts"],"sourcesContent":["import { execSync, exec } from 'child_process';\nimport { promisify } from 'util';\nimport { writeFileSync, chmodSync, appendFileSync, mkdirSync, existsSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from './paths.js';\n\n/**\n * Log file for tmux sendKeys operations\n * This helps debug mysterious messages appearing in agent prompts\n */\nconst SENDKEYS_LOG_FILE = join(PANOPTICON_HOME, 'logs', 'sendkeys.jsonl');\n\n/**\n * Ensure log directory exists\n */\nfunction ensureLogDir(): void {\n const logDir = join(PANOPTICON_HOME, 'logs');\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n}\n\n/**\n * Log a sendKeys operation for debugging\n */\nfunction logSendKeys(sessionName: string, keys: string, caller?: string): void {\n try {\n ensureLogDir();\n\n // Get call stack to identify caller if not provided\n const stack = new Error().stack || '';\n const stackLines = stack.split('\\n').slice(3, 6); // Skip Error, logSendKeys, sendKeys\n const callerInfo = caller || stackLines.map(l => l.trim()).join(' <- ');\n\n const entry = {\n timestamp: new Date().toISOString(),\n sessionName,\n keysLength: keys.length,\n keysPreview: keys.length > 200 ? keys.slice(0, 200) + '...' : keys,\n caller: callerInfo,\n pid: process.pid,\n };\n\n appendFileSync(SENDKEYS_LOG_FILE, JSON.stringify(entry) + '\\n', 'utf-8');\n } catch {\n // Silently fail - logging should never break functionality\n }\n}\n\nexport interface TmuxSession {\n name: string;\n created: Date;\n attached: boolean;\n windows: number;\n}\n\nexport function listSessions(): TmuxSession[] {\n try {\n const output = execSync('tmux list-sessions -F \"#{session_name}|#{session_created}|#{session_attached}|#{session_windows}\"', {\n encoding: 'utf8',\n });\n\n return output.trim().split('\\n').filter(Boolean).map(line => {\n const [name, created, attached, windows] = line.split('|');\n return {\n name,\n created: new Date(parseInt(created) * 1000),\n attached: attached === '1',\n windows: parseInt(windows),\n };\n });\n } catch {\n return []; // No sessions\n }\n}\n\nexport function sessionExists(name: string): boolean {\n try {\n execSync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n\nexport function createSession(\n name: string,\n cwd: string,\n initialCommand?: string,\n options?: { env?: Record<string, string> }\n): void {\n const escapedCwd = cwd.replace(/\"/g, '\\\\\"');\n\n // Build environment variable flags for tmux\n let envFlags = '';\n if (options?.env) {\n for (const [key, value] of Object.entries(options.env)) {\n envFlags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n }\n\n // For complex commands (with special chars), start session first then send command\n if (initialCommand && (initialCommand.includes('`') || initialCommand.includes('\\n') || initialCommand.length > 500)) {\n // Create session without command\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n\n // Small delay to let session initialize\n execSync('sleep 0.5');\n\n // Send the command in chunks if needed (tmux has buffer limits)\n // First, write to a temp file and source it\n const tmpFile = `/tmp/pan-cmd-${name}.sh`;\n writeFileSync(tmpFile, initialCommand);\n chmodSync(tmpFile, '755');\n\n // Execute the script\n execSync(`tmux send-keys -t ${name} \"bash ${tmpFile}\"`);\n execSync(`tmux send-keys -t ${name} C-m`);\n } else if (initialCommand) {\n // Simple command - use inline\n const cmd = `tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags} \"${initialCommand.replace(/\"/g, '\\\\\"')}\"`;\n execSync(cmd);\n } else {\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n }\n}\n\nexport function killSession(name: string): void {\n execSync(`tmux kill-session -t ${name}`);\n}\n\nconst execAsync = promisify(exec);\n\nexport async function sessionExistsAsync(name: string): Promise<boolean> {\n try {\n await execAsync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send keys to a tmux session (async, non-blocking).\n * Uses load-buffer + paste-buffer for reliable delivery, with a delay before Enter.\n * MUST be used from the dashboard server and any async context.\n */\nexport async function sendKeysAsync(sessionName: string, keys: string, caller?: string): Promise<void> {\n logSendKeys(sessionName, keys, caller);\n\n // Use a unique named buffer per call to prevent race conditions.\n // The default (unnamed) paste buffer is global — concurrent load-buffer\n // calls from different specialist wakes clobber each other.\n const bufferName = `pan-${process.pid}-${Date.now()}`;\n const tmpFile = `/tmp/pan-sendkeys-${bufferName}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n await execAsync(`tmux load-buffer -b ${bufferName} ${tmpFile}`);\n await execAsync(`tmux paste-buffer -b ${bufferName} -t ${sessionName} -d`);\n await new Promise(r => setTimeout(r, 300));\n await execAsync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n try { await execAsync(`tmux delete-buffer -b ${bufferName} 2>/dev/null`); } catch {}\n }\n}\n\n/**\n * Send keys to a tmux session (sync, blocks event loop).\n * Only use from CLI commands — NEVER from the dashboard server.\n */\nexport function sendKeys(sessionName: string, keys: string, caller?: string): void {\n logSendKeys(sessionName, keys, caller);\n\n const tmpFile = `/tmp/pan-sendkeys-${process.pid}-${Date.now()}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n execSync(`tmux load-buffer ${tmpFile}`);\n execSync(`tmux paste-buffer -t ${sessionName}`);\n execSync(`sleep 0.3`);\n execSync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n }\n}\n\nexport function capturePane(sessionName: string, lines: number = 50): string {\n try {\n return execSync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf8',\n });\n } catch {\n return '';\n }\n}\n\n/**\n * Capture tmux pane output (async, non-blocking).\n * MUST be used from the dashboard server and any async context.\n */\nexport async function capturePaneAsync(sessionName: string, lines: number = 50): Promise<string> {\n try {\n const { stdout } = await execAsync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf-8',\n });\n return stdout;\n } catch {\n return '';\n }\n}\n\n/**\n * Wait for Claude Code to reach its interactive prompt (❯) in a tmux session.\n * Polls tmux output until the prompt appears or timeout is reached.\n *\n * @param sessionName - tmux session name\n * @param timeoutMs - maximum time to wait (default: 15s for fresh start, use 5s for already-running)\n * @returns true if prompt detected, false if timed out\n */\nexport async function waitForClaudePrompt(sessionName: string, timeoutMs: number = 15000): Promise<boolean> {\n const start = Date.now();\n const POLL = 500;\n while (Date.now() - start < timeoutMs) {\n const output = await capturePaneAsync(sessionName, 10);\n // Claude Code shows ❯ when ready for user input.\n // Check that the LAST non-empty line contains ❯ (not a stale prompt from earlier output).\n const lines = output.split('\\n').filter(l => l.trim());\n const lastLine = lines[lines.length - 1] || '';\n if (lastLine.includes('❯')) return true;\n await new Promise(r => setTimeout(r, POLL));\n }\n return false;\n}\n\n/**\n * Verify that a message sent to Claude was actually received and processing started.\n * Compares tmux output before and after to detect new activity (tool calls, responses).\n *\n * @param sessionName - tmux session name\n * @param outputBefore - tmux output snapshot taken BEFORE sending the message\n * @param timeoutMs - maximum time to wait for activity (default: 10s)\n * @returns true if new activity detected, false if timed out\n */\nexport async function confirmDelivery(\n sessionName: string,\n outputBefore: string,\n timeoutMs: number = 10000,\n): Promise<boolean> {\n const start = Date.now();\n const POLL = 1000;\n const beforeLineCount = outputBefore.split('\\n').filter(l => l.trim()).length;\n\n while (Date.now() - start < timeoutMs) {\n await new Promise(r => setTimeout(r, POLL));\n const after = await capturePaneAsync(sessionName, 50);\n const afterLines = after.split('\\n').filter(l => l.trim());\n const afterLineCount = afterLines.length;\n\n // Claude is processing if: new output lines appeared (tool calls: ●, results: ⎿, etc.)\n if (afterLineCount > beforeLineCount + 1) return true;\n\n // Or if we can see activity markers in the new output\n const newOutput = afterLines.slice(beforeLineCount).join('\\n');\n if (\n newOutput.includes('●') || newOutput.includes('⎿') || newOutput.includes('Read') ||\n newOutput.includes('✻') || newOutput.includes('·') || newOutput.includes('✶') ||\n newOutput.includes('✽') || newOutput.includes('✢') || newOutput.includes('Generating') ||\n newOutput.includes('thinking') || newOutput.includes('thought for')\n ) return true;\n }\n return false;\n}\n\nexport function getAgentSessions(): TmuxSession[] {\n return listSessions().filter(s => s.name.startsWith('agent-'));\n}\n"],"mappings":";;;;;;;;;;AAeA,SAAS,eAAqB;CAC5B,MAAM,SAAS,KAAK,iBAAiB,OAAO;AAC5C,KAAI,CAAC,WAAW,OAAO,CACrB,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;;;;;AAO1C,SAAS,YAAY,aAAqB,MAAc,QAAuB;AAC7E,KAAI;AACF,gBAAc;EAId,MAAM,+BADQ,IAAI,OAAO,EAAC,SAAS,IACV,MAAM,KAAK,CAAC,MAAM,GAAG,EAAE;EAChD,MAAM,aAAa,UAAU,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC,CAAC,KAAK,OAAO;EAEvE,MAAM,QAAQ;GACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC;GACA,YAAY,KAAK;GACjB,aAAa,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,IAAI,GAAG,QAAQ;GAC9D,QAAQ;GACR,KAAK,QAAQ;GACd;AAED,iBAAe,mBAAmB,KAAK,UAAU,MAAM,GAAG,MAAM,QAAQ;SAClE;;AAYV,SAAgB,eAA8B;AAC5C,KAAI;AAKF,SAJe,SAAS,uGAAqG,EAC3H,UAAU,QACX,CAAC,CAEY,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ;GAC3D,MAAM,CAAC,MAAM,SAAS,UAAU,WAAW,KAAK,MAAM,IAAI;AAC1D,UAAO;IACL;IACA,yBAAS,IAAI,KAAK,SAAS,QAAQ,GAAG,IAAK;IAC3C,UAAU,aAAa;IACvB,SAAS,SAAS,QAAQ;IAC3B;IACD;SACI;AACN,SAAO,EAAE;;;AAIb,SAAgB,cAAc,MAAuB;AACnD,KAAI;AACF,WAAS,uBAAuB,KAAK,cAAc;AACnD,SAAO;SACD;AACN,SAAO;;;AAKX,SAAgB,cACd,MACA,KACA,gBACA,SACM;CACN,MAAM,aAAa,IAAI,QAAQ,MAAM,OAAM;CAG3C,IAAI,WAAW;AACf,KAAI,SAAS,IACX,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,IAAI,CACpD,aAAY,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAK1D,KAAI,mBAAmB,eAAe,SAAS,IAAI,IAAI,eAAe,SAAS,KAAK,IAAI,eAAe,SAAS,MAAM;AAEpH,WAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;AAGxE,WAAS,YAAY;EAIrB,MAAM,UAAU,gBAAgB,KAAK;AACrC,gBAAc,SAAS,eAAe;AACtC,YAAU,SAAS,MAAM;AAGzB,WAAS,qBAAqB,KAAK,SAAS,QAAQ,GAAG;AACvD,WAAS,qBAAqB,KAAK,MAAM;YAChC,eAGT,UADY,0BAA0B,KAAK,OAAO,WAAW,GAAG,SAAS,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,GACpG;KAEb,UAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;;AAI5E,SAAgB,YAAY,MAAoB;AAC9C,UAAS,wBAAwB,OAAO;;AAK1C,eAAsB,mBAAmB,MAAgC;AACvE,KAAI;AACF,QAAM,UAAU,uBAAuB,KAAK,cAAc;AAC1D,SAAO;SACD;AACN,SAAO;;;;;;;;AASX,eAAsB,cAAc,aAAqB,MAAc,QAAgC;AACrG,aAAY,aAAa,MAAM,OAAO;CAKtC,MAAM,aAAa,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK;CACnD,MAAM,UAAU,qBAAqB,WAAW;AAChD,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,QAAM,UAAU,uBAAuB,WAAW,GAAG,UAAU;AAC/D,QAAM,UAAU,wBAAwB,WAAW,MAAM,YAAY,KAAK;AAC1E,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;AAC1C,QAAM,UAAU,qBAAqB,YAAY,MAAM;WAC/C;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;AACnC,MAAI;AAAE,SAAM,UAAU,yBAAyB,WAAW,cAAc;UAAU;;;AAuBtF,SAAgB,YAAY,aAAqB,QAAgB,IAAY;AAC3E,KAAI;AACF,SAAO,SAAS,wBAAwB,YAAY,UAAU,SAAS,EACrE,UAAU,QACX,CAAC;SACI;AACN,SAAO;;;;;;;AAQX,eAAsB,iBAAiB,aAAqB,QAAgB,IAAqB;AAC/F,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,wBAAwB,YAAY,UAAU,SAAS,EACxF,UAAU,SACX,CAAC;AACF,SAAO;SACD;AACN,SAAO;;;;;;;;;;;AAYX,eAAsB,oBAAoB,aAAqB,YAAoB,MAAyB;CAC1G,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;AACb,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;EAIrC,MAAM,SAHS,MAAM,iBAAiB,aAAa,GAAG,EAGjC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAEtD,OADiB,MAAM,MAAM,SAAS,MAAM,IAC/B,SAAS,IAAI,CAAE,QAAO;AACnC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;;AAE7C,QAAO;;;;;;;;;;;AAYT,eAAsB,gBACpB,aACA,cACA,YAAoB,KACF;CAClB,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;CACb,MAAM,kBAAkB,aAAa,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,CAAC;AAEvE,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;AACrC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;EAE3C,MAAM,cADQ,MAAM,iBAAiB,aAAa,GAAG,EAC5B,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAI1D,MAHuB,WAAW,SAGb,kBAAkB,EAAG,QAAO;EAGjD,MAAM,YAAY,WAAW,MAAM,gBAAgB,CAAC,KAAK,KAAK;AAC9D,MACE,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,OAAO,IAChF,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAC7E,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,aAAa,IACtF,UAAU,SAAS,WAAW,IAAI,UAAU,SAAS,cAAc,CACnE,QAAO;;AAEX,QAAO;;AAGT,SAAgB,mBAAkC;AAChD,QAAO,cAAc,CAAC,QAAO,MAAK,EAAE,KAAK,WAAW,SAAS,CAAC;;;;aA/QnB;AAMvC,qBAAoB,KAAK,iBAAiB,QAAQ,iBAAiB;AA0HnE,aAAY,UAAU,KAAK"}
|
|
1
|
+
{"version":3,"file":"tmux-LwG0tHhU.js","names":[],"sources":["../../src/lib/tmux.ts"],"sourcesContent":["import { execSync, exec } from 'child_process';\nimport { promisify } from 'util';\nimport { writeFileSync, chmodSync, appendFileSync, mkdirSync, existsSync, unlinkSync } from 'fs';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from './paths.js';\n\n/**\n * Log file for tmux sendKeys operations\n * This helps debug mysterious messages appearing in agent prompts\n */\nconst SENDKEYS_LOG_FILE = join(PANOPTICON_HOME, 'logs', 'sendkeys.jsonl');\n\n/**\n * Ensure log directory exists\n */\nfunction ensureLogDir(): void {\n const logDir = join(PANOPTICON_HOME, 'logs');\n if (!existsSync(logDir)) {\n mkdirSync(logDir, { recursive: true });\n }\n}\n\n/**\n * Log a sendKeys operation for debugging\n */\nfunction logSendKeys(sessionName: string, keys: string, caller?: string): void {\n try {\n ensureLogDir();\n\n // Get call stack to identify caller if not provided\n const stack = new Error().stack || '';\n const stackLines = stack.split('\\n').slice(3, 6); // Skip Error, logSendKeys, sendKeys\n const callerInfo = caller || stackLines.map(l => l.trim()).join(' <- ');\n\n const entry = {\n timestamp: new Date().toISOString(),\n sessionName,\n keysLength: keys.length,\n keysPreview: keys.length > 200 ? keys.slice(0, 200) + '...' : keys,\n caller: callerInfo,\n pid: process.pid,\n };\n\n appendFileSync(SENDKEYS_LOG_FILE, JSON.stringify(entry) + '\\n', 'utf-8');\n } catch {\n // Silently fail - logging should never break functionality\n }\n}\n\nexport interface TmuxSession {\n name: string;\n created: Date;\n attached: boolean;\n windows: number;\n}\n\nexport function listSessions(): TmuxSession[] {\n try {\n const output = execSync('tmux list-sessions -F \"#{session_name}|#{session_created}|#{session_attached}|#{session_windows}\"', {\n encoding: 'utf8',\n });\n\n return output.trim().split('\\n').filter(Boolean).map(line => {\n const [name, created, attached, windows] = line.split('|');\n return {\n name,\n created: new Date(parseInt(created) * 1000),\n attached: attached === '1',\n windows: parseInt(windows),\n };\n });\n } catch {\n return []; // No sessions\n }\n}\n\nexport function sessionExists(name: string): boolean {\n try {\n execSync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n\nexport function createSession(\n name: string,\n cwd: string,\n initialCommand?: string,\n options?: { env?: Record<string, string> }\n): void {\n const escapedCwd = cwd.replace(/\"/g, '\\\\\"');\n\n // Build environment variable flags for tmux\n let envFlags = '';\n if (options?.env) {\n for (const [key, value] of Object.entries(options.env)) {\n envFlags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n }\n\n // For complex commands (with special chars), start session first then send command\n if (initialCommand && (initialCommand.includes('`') || initialCommand.includes('\\n') || initialCommand.length > 500)) {\n // Create session without command\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n\n // Small delay to let session initialize\n execSync('sleep 0.5');\n\n // Send the command in chunks if needed (tmux has buffer limits)\n // First, write to a temp file and source it\n const tmpFile = `/tmp/pan-cmd-${name}.sh`;\n writeFileSync(tmpFile, initialCommand);\n chmodSync(tmpFile, '755');\n\n // Execute the script\n execSync(`tmux send-keys -t ${name} \"bash ${tmpFile}\"`);\n execSync(`tmux send-keys -t ${name} C-m`);\n } else if (initialCommand) {\n // Simple command - use inline\n const cmd = `tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags} \"${initialCommand.replace(/\"/g, '\\\\\"')}\"`;\n execSync(cmd);\n } else {\n execSync(`tmux new-session -d -s ${name} -c \"${escapedCwd}\"${envFlags}`);\n }\n}\n\nexport function killSession(name: string): void {\n execSync(`tmux kill-session -t ${name}`);\n}\n\nconst execAsync = promisify(exec);\n\nexport async function sessionExistsAsync(name: string): Promise<boolean> {\n try {\n await execAsync(`tmux has-session -t ${name} 2>/dev/null`);\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Send keys to a tmux session (async, non-blocking).\n * Uses load-buffer + paste-buffer for reliable delivery, with a delay before Enter.\n * MUST be used from the dashboard server and any async context.\n */\nexport async function sendKeysAsync(sessionName: string, keys: string, caller?: string): Promise<void> {\n logSendKeys(sessionName, keys, caller);\n\n // Use a unique named buffer per call to prevent race conditions.\n // The default (unnamed) paste buffer is global — concurrent load-buffer\n // calls from different specialist wakes clobber each other.\n const bufferName = `pan-${process.pid}-${Date.now()}`;\n const tmpFile = `/tmp/pan-sendkeys-${bufferName}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n await execAsync(`tmux load-buffer -b ${bufferName} ${tmpFile}`);\n await execAsync(`tmux paste-buffer -b ${bufferName} -t ${sessionName} -d`);\n await new Promise(r => setTimeout(r, 300));\n await execAsync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n try { await execAsync(`tmux delete-buffer -b ${bufferName} 2>/dev/null`); } catch {}\n }\n}\n\n/**\n * Send keys to a tmux session (sync, blocks event loop).\n * Only use from CLI commands — NEVER from the dashboard server.\n */\nexport function sendKeys(sessionName: string, keys: string, caller?: string): void {\n logSendKeys(sessionName, keys, caller);\n\n const tmpFile = `/tmp/pan-sendkeys-${process.pid}-${Date.now()}.txt`;\n try {\n writeFileSync(tmpFile, keys);\n execSync(`tmux load-buffer ${tmpFile}`);\n execSync(`tmux paste-buffer -t ${sessionName}`);\n execSync(`sleep 0.3`);\n execSync(`tmux send-keys -t ${sessionName} C-m`);\n } finally {\n try { unlinkSync(tmpFile); } catch {}\n }\n}\n\nexport function capturePane(sessionName: string, lines: number = 50): string {\n try {\n return execSync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf8',\n });\n } catch {\n return '';\n }\n}\n\n/**\n * Capture tmux pane output (async, non-blocking).\n * MUST be used from the dashboard server and any async context.\n */\nexport async function capturePaneAsync(sessionName: string, lines: number = 50): Promise<string> {\n try {\n const { stdout } = await execAsync(`tmux capture-pane -t ${sessionName} -p -S -${lines}`, {\n encoding: 'utf-8',\n });\n return stdout;\n } catch {\n return '';\n }\n}\n\n/**\n * Wait for Claude Code to reach its interactive prompt (❯) in a tmux session.\n * Polls tmux output until the prompt appears or timeout is reached.\n *\n * @param sessionName - tmux session name\n * @param timeoutMs - maximum time to wait (default: 15s for fresh start, use 5s for already-running)\n * @returns true if prompt detected, false if timed out\n */\nexport async function waitForClaudePrompt(sessionName: string, timeoutMs: number = 15000): Promise<boolean> {\n const start = Date.now();\n const POLL = 500;\n while (Date.now() - start < timeoutMs) {\n const output = await capturePaneAsync(sessionName, 10);\n // Claude Code shows ❯ when ready for user input.\n // Check that the LAST non-empty line contains ❯ (not a stale prompt from earlier output).\n const lines = output.split('\\n').filter(l => l.trim());\n const lastLine = lines[lines.length - 1] || '';\n if (lastLine.includes('❯')) return true;\n await new Promise(r => setTimeout(r, POLL));\n }\n return false;\n}\n\n/**\n * Verify that a message sent to Claude was actually received and processing started.\n * Compares tmux output before and after to detect new activity (tool calls, responses).\n *\n * @param sessionName - tmux session name\n * @param outputBefore - tmux output snapshot taken BEFORE sending the message\n * @param timeoutMs - maximum time to wait for activity (default: 10s)\n * @returns true if new activity detected, false if timed out\n */\nexport async function confirmDelivery(\n sessionName: string,\n outputBefore: string,\n timeoutMs: number = 10000,\n): Promise<boolean> {\n const start = Date.now();\n const POLL = 1000;\n const beforeLineCount = outputBefore.split('\\n').filter(l => l.trim()).length;\n\n while (Date.now() - start < timeoutMs) {\n await new Promise(r => setTimeout(r, POLL));\n const after = await capturePaneAsync(sessionName, 50);\n const afterLines = after.split('\\n').filter(l => l.trim());\n const afterLineCount = afterLines.length;\n\n // Claude is processing if: new output lines appeared (tool calls: ●, results: ⎿, etc.)\n if (afterLineCount > beforeLineCount + 1) return true;\n\n // Or if we can see activity markers in the new output\n const newOutput = afterLines.slice(beforeLineCount).join('\\n');\n if (\n newOutput.includes('●') || newOutput.includes('⎿') || newOutput.includes('Read') ||\n newOutput.includes('✻') || newOutput.includes('·') || newOutput.includes('✶') ||\n newOutput.includes('✽') || newOutput.includes('✢') || newOutput.includes('Generating') ||\n newOutput.includes('thinking') || newOutput.includes('thought for')\n ) return true;\n }\n return false;\n}\n\nexport function getAgentSessions(): TmuxSession[] {\n return listSessions().filter(s => s.name.startsWith('agent-'));\n}\n"],"mappings":";;;;;;;;;;AAeA,SAAS,eAAqB;CAC5B,MAAM,SAAS,KAAK,iBAAiB,OAAO;AAC5C,KAAI,CAAC,WAAW,OAAO,CACrB,WAAU,QAAQ,EAAE,WAAW,MAAM,CAAC;;;;;AAO1C,SAAS,YAAY,aAAqB,MAAc,QAAuB;AAC7E,KAAI;AACF,gBAAc;EAId,MAAM,+BADQ,IAAI,OAAO,EAAC,SAAS,IACV,MAAM,KAAK,CAAC,MAAM,GAAG,EAAE;EAChD,MAAM,aAAa,UAAU,WAAW,KAAI,MAAK,EAAE,MAAM,CAAC,CAAC,KAAK,OAAO;EAEvE,MAAM,QAAQ;GACZ,4BAAW,IAAI,MAAM,EAAC,aAAa;GACnC;GACA,YAAY,KAAK;GACjB,aAAa,KAAK,SAAS,MAAM,KAAK,MAAM,GAAG,IAAI,GAAG,QAAQ;GAC9D,QAAQ;GACR,KAAK,QAAQ;GACd;AAED,iBAAe,mBAAmB,KAAK,UAAU,MAAM,GAAG,MAAM,QAAQ;SAClE;;AAYV,SAAgB,eAA8B;AAC5C,KAAI;AAKF,SAJe,SAAS,uGAAqG,EAC3H,UAAU,QACX,CAAC,CAEY,MAAM,CAAC,MAAM,KAAK,CAAC,OAAO,QAAQ,CAAC,KAAI,SAAQ;GAC3D,MAAM,CAAC,MAAM,SAAS,UAAU,WAAW,KAAK,MAAM,IAAI;AAC1D,UAAO;IACL;IACA,yBAAS,IAAI,KAAK,SAAS,QAAQ,GAAG,IAAK;IAC3C,UAAU,aAAa;IACvB,SAAS,SAAS,QAAQ;IAC3B;IACD;SACI;AACN,SAAO,EAAE;;;AAIb,SAAgB,cAAc,MAAuB;AACnD,KAAI;AACF,WAAS,uBAAuB,KAAK,cAAc;AACnD,SAAO;SACD;AACN,SAAO;;;AAKX,SAAgB,cACd,MACA,KACA,gBACA,SACM;CACN,MAAM,aAAa,IAAI,QAAQ,MAAM,OAAM;CAG3C,IAAI,WAAW;AACf,KAAI,SAAS,IACX,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,IAAI,CACpD,aAAY,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAK1D,KAAI,mBAAmB,eAAe,SAAS,IAAI,IAAI,eAAe,SAAS,KAAK,IAAI,eAAe,SAAS,MAAM;AAEpH,WAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;AAGxE,WAAS,YAAY;EAIrB,MAAM,UAAU,gBAAgB,KAAK;AACrC,gBAAc,SAAS,eAAe;AACtC,YAAU,SAAS,MAAM;AAGzB,WAAS,qBAAqB,KAAK,SAAS,QAAQ,GAAG;AACvD,WAAS,qBAAqB,KAAK,MAAM;YAChC,eAGT,UADY,0BAA0B,KAAK,OAAO,WAAW,GAAG,SAAS,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,GACpG;KAEb,UAAS,0BAA0B,KAAK,OAAO,WAAW,GAAG,WAAW;;AAI5E,SAAgB,YAAY,MAAoB;AAC9C,UAAS,wBAAwB,OAAO;;AAK1C,eAAsB,mBAAmB,MAAgC;AACvE,KAAI;AACF,QAAM,UAAU,uBAAuB,KAAK,cAAc;AAC1D,SAAO;SACD;AACN,SAAO;;;;;;;;AASX,eAAsB,cAAc,aAAqB,MAAc,QAAgC;AACrG,aAAY,aAAa,MAAM,OAAO;CAKtC,MAAM,aAAa,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK;CACnD,MAAM,UAAU,qBAAqB,WAAW;AAChD,KAAI;AACF,gBAAc,SAAS,KAAK;AAC5B,QAAM,UAAU,uBAAuB,WAAW,GAAG,UAAU;AAC/D,QAAM,UAAU,wBAAwB,WAAW,MAAM,YAAY,KAAK;AAC1E,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,IAAI,CAAC;AAC1C,QAAM,UAAU,qBAAqB,YAAY,MAAM;WAC/C;AACR,MAAI;AAAE,cAAW,QAAQ;UAAU;AACnC,MAAI;AAAE,SAAM,UAAU,yBAAyB,WAAW,cAAc;UAAU;;;AAuBtF,SAAgB,YAAY,aAAqB,QAAgB,IAAY;AAC3E,KAAI;AACF,SAAO,SAAS,wBAAwB,YAAY,UAAU,SAAS,EACrE,UAAU,QACX,CAAC;SACI;AACN,SAAO;;;;;;;AAQX,eAAsB,iBAAiB,aAAqB,QAAgB,IAAqB;AAC/F,KAAI;EACF,MAAM,EAAE,WAAW,MAAM,UAAU,wBAAwB,YAAY,UAAU,SAAS,EACxF,UAAU,SACX,CAAC;AACF,SAAO;SACD;AACN,SAAO;;;;;;;;;;;AAYX,eAAsB,oBAAoB,aAAqB,YAAoB,MAAyB;CAC1G,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;AACb,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;EAIrC,MAAM,SAHS,MAAM,iBAAiB,aAAa,GAAG,EAGjC,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAEtD,OADiB,MAAM,MAAM,SAAS,MAAM,IAC/B,SAAS,IAAI,CAAE,QAAO;AACnC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;;AAE7C,QAAO;;;;;;;;;;;AAYT,eAAsB,gBACpB,aACA,cACA,YAAoB,KACF;CAClB,MAAM,QAAQ,KAAK,KAAK;CACxB,MAAM,OAAO;CACb,MAAM,kBAAkB,aAAa,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC,CAAC;AAEvE,QAAO,KAAK,KAAK,GAAG,QAAQ,WAAW;AACrC,QAAM,IAAI,SAAQ,MAAK,WAAW,GAAG,KAAK,CAAC;EAE3C,MAAM,cADQ,MAAM,iBAAiB,aAAa,GAAG,EAC5B,MAAM,KAAK,CAAC,QAAO,MAAK,EAAE,MAAM,CAAC;AAI1D,MAHuB,WAAW,SAGb,kBAAkB,EAAG,QAAO;EAGjD,MAAM,YAAY,WAAW,MAAM,gBAAgB,CAAC,KAAK,KAAK;AAC9D,MACE,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,OAAO,IAChF,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAC7E,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,IAAI,IAAI,UAAU,SAAS,aAAa,IACtF,UAAU,SAAS,WAAW,IAAI,UAAU,SAAS,cAAc,CACnE,QAAO;;AAEX,QAAO;;AAGT,SAAgB,mBAAkC;AAChD,QAAO,cAAc,CAAC,QAAO,MAAK,EAAE,KAAK,WAAW,SAAS,CAAC;;;;aA/QnB;AAMvC,qBAAoB,KAAK,iBAAiB,QAAQ,iBAAiB;AA0HnE,aAAY,UAAU,KAAK"}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { a as validateRallyConfig, i as init_tracker_config, n as getLinearApiKey, r as getRallyConfig, t as getGitHubConfig } from "./tracker-config-
|
|
1
|
+
import { a as validateRallyConfig, i as init_tracker_config, n as getLinearApiKey, r as getRallyConfig, t as getGitHubConfig } from "./tracker-config-e7ph1QqT.js";
|
|
2
2
|
init_tracker_config();
|
|
3
3
|
export { getGitHubConfig, getLinearApiKey, getRallyConfig, validateRallyConfig };
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { n as __esmMin } from "./chunk-DORXReHP.js";
|
|
2
2
|
import { i as loadConfig, r as init_config_yaml } from "./config-yaml-DSfYpzN6.js";
|
|
3
|
-
import { g as loadProjectsConfig, p as init_projects, s as getIssuePrefix } from "./projects-
|
|
3
|
+
import { g as loadProjectsConfig, p as init_projects, s as getIssuePrefix } from "./projects-CFVl4oHn.js";
|
|
4
4
|
import { existsSync, readFileSync } from "fs";
|
|
5
5
|
import { join } from "path";
|
|
6
6
|
import { homedir } from "os";
|
|
@@ -131,4 +131,4 @@ var init_tracker_config = __esmMin((() => {
|
|
|
131
131
|
//#endregion
|
|
132
132
|
export { validateRallyConfig as a, init_tracker_config as i, getLinearApiKey as n, getRallyConfig as r, getGitHubConfig as t };
|
|
133
133
|
|
|
134
|
-
//# sourceMappingURL=tracker-config-
|
|
134
|
+
//# sourceMappingURL=tracker-config-e7ph1QqT.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tracker-config-
|
|
1
|
+
{"version":3,"file":"tracker-config-e7ph1QqT.js","names":["loadYamlConfig"],"sources":["../../src/dashboard/server/services/tracker-config.ts"],"sourcesContent":["/**\n * Tracker Configuration Readers\n *\n * Extracted from server/index.ts for reuse by IssueDataService.\n * Priority: config.yaml (Settings page) > ~/.panopticon.env > environment variables\n */\n\nimport { readFileSync, existsSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport { loadConfig as loadYamlConfig } from '../../../lib/config-yaml.js';\nimport { loadProjectsConfig, getIssuePrefix } from '../../../lib/projects.js';\n\n// GitHub configuration\nexport interface GitHubConfig {\n token: string;\n repos: Array<{ owner: string; repo: string; prefix?: string }>;\n}\n\n// Rally configuration\nexport interface RallyConfig {\n apiKey: string;\n server?: string;\n workspace?: string;\n project?: string;\n}\n\n/**\n * Load Linear API key.\n * Priority: config.yaml > ~/.panopticon.env > env var\n */\nexport function getLinearApiKey(): string | null {\n // 1. Check config.yaml (Settings page)\n try {\n const yamlConfig = loadYamlConfig();\n if (yamlConfig.config.trackerKeys.linear) return yamlConfig.config.trackerKeys.linear;\n } catch { /* ignore */ }\n\n // 2. Check ~/.panopticon.env\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n const match = content.match(/LINEAR_API_KEY=(.+)/);\n if (match) return match[1].trim();\n }\n\n // 3. Check environment variable\n return process.env.LINEAR_API_KEY || null;\n}\n\n/**\n * Load Rally configuration.\n * Priority: config.yaml > ~/.panopticon.env > env var\n */\nexport function getRallyConfig(): RallyConfig | null {\n let apiKey: string | undefined;\n let server: string | undefined;\n let workspace: string | undefined;\n let project: string | undefined;\n\n // 1. Check config.yaml (Settings page)\n try {\n const yamlConfig = loadYamlConfig();\n if (yamlConfig.config.trackerKeys.rally) apiKey = yamlConfig.config.trackerKeys.rally;\n } catch { /* ignore */ }\n\n // 2. Check ~/.panopticon.env (also get server/workspace/project from here)\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n if (!apiKey) {\n const apiKeyMatch = content.match(/RALLY_API_KEY=(.+)/);\n if (apiKeyMatch) apiKey = apiKeyMatch[1].trim();\n }\n const serverMatch = content.match(/RALLY_SERVER=(.+)/);\n server = serverMatch?.[1].trim();\n const workspaceMatch = content.match(/RALLY_WORKSPACE=(.+)/);\n workspace = workspaceMatch?.[1].trim();\n const projectMatch = content.match(/RALLY_PROJECT=(.+)/);\n project = projectMatch?.[1].trim();\n }\n\n // 3. Check environment variable\n if (!apiKey) apiKey = process.env.RALLY_API_KEY;\n\n if (!apiKey) return null;\n return { apiKey, server, workspace, project };\n}\n\n/**\n * Validate Rally configuration and return warnings/errors.\n * Does not block functionality — only provides diagnostic info.\n */\nexport function validateRallyConfig(config: RallyConfig): {\n valid: boolean;\n warnings: string[];\n errors: string[];\n} {\n const warnings: string[] = [];\n const errors: string[] = [];\n\n if (!config.apiKey) {\n errors.push('RALLY_API_KEY is required');\n }\n\n if (!config.workspace) {\n warnings.push('RALLY_WORKSPACE not configured - queries may return unexpected results');\n }\n\n if (!config.project) {\n warnings.push('RALLY_PROJECT not configured - queries will search all projects');\n }\n\n return { valid: errors.length === 0, warnings, errors };\n}\n\n/**\n * Load GitHub configuration.\n * Priority: config.yaml > ~/.panopticon.env > env var\n */\nexport function getGitHubConfig(): GitHubConfig | null {\n let token: string | undefined;\n let repos: Array<{ owner: string; repo: string; prefix?: string }> = [];\n\n // 1. Check config.yaml (Settings page)\n try {\n const yamlConfig = loadYamlConfig();\n if (yamlConfig.config.trackerKeys.github) token = yamlConfig.config.trackerKeys.github;\n } catch { /* ignore */ }\n\n // 2. Check ~/.panopticon.env (also get repos from here)\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n if (!token) {\n const tokenMatch = content.match(/GITHUB_TOKEN=(.+)/);\n if (tokenMatch) token = tokenMatch[1].trim();\n }\n\n const reposMatch = content.match(/GITHUB_REPOS=(.+)/);\n if (reposMatch) {\n repos = reposMatch[1].trim().split(',').map(r => {\n const [repoPath, prefix] = r.trim().split(':');\n const [owner, repo] = repoPath.split('/');\n return { owner, repo, prefix };\n }).filter(r => r.owner && r.repo);\n }\n }\n\n // 3. Check environment variable\n if (!token) token = process.env.GITHUB_TOKEN;\n\n // 4. Auto-derive repos from projects.yaml if none explicitly configured\n if (repos.length === 0) {\n try {\n const { projects } = loadProjectsConfig();\n for (const [, project] of Object.entries(projects)) {\n if (project.github_repo) {\n const [owner, repo] = project.github_repo.split('/');\n const prefix = getIssuePrefix(project) ? getIssuePrefix(project) : undefined;\n if (owner && repo) {\n repos.push({ owner, repo, prefix });\n }\n }\n }\n } catch { /* ignore — projects.yaml may not exist */ }\n }\n\n if (!token || repos.length === 0) return null;\n return { token, repos };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AA+BA,SAAgB,kBAAiC;AAE/C,KAAI;EACF,MAAM,aAAaA,YAAgB;AACnC,MAAI,WAAW,OAAO,YAAY,OAAQ,QAAO,WAAW,OAAO,YAAY;SACzE;CAGR,MAAM,UAAU,KAAK,SAAS,EAAE,kBAAkB;AAClD,KAAI,WAAW,QAAQ,EAAE;EAEvB,MAAM,QADU,aAAa,SAAS,QAAQ,CACxB,MAAM,sBAAsB;AAClD,MAAI,MAAO,QAAO,MAAM,GAAG,MAAM;;AAInC,QAAO,QAAQ,IAAI,kBAAkB;;;;;;AAOvC,SAAgB,iBAAqC;CACnD,IAAI;CACJ,IAAI;CACJ,IAAI;CACJ,IAAI;AAGJ,KAAI;EACF,MAAM,aAAaA,YAAgB;AACnC,MAAI,WAAW,OAAO,YAAY,MAAO,UAAS,WAAW,OAAO,YAAY;SAC1E;CAGR,MAAM,UAAU,KAAK,SAAS,EAAE,kBAAkB;AAClD,KAAI,WAAW,QAAQ,EAAE;EACvB,MAAM,UAAU,aAAa,SAAS,QAAQ;AAC9C,MAAI,CAAC,QAAQ;GACX,MAAM,cAAc,QAAQ,MAAM,qBAAqB;AACvD,OAAI,YAAa,UAAS,YAAY,GAAG,MAAM;;AAGjD,WADoB,QAAQ,MAAM,oBAAoB,GAC/B,GAAG,MAAM;AAEhC,cADuB,QAAQ,MAAM,uBAAuB,GAC/B,GAAG,MAAM;AAEtC,YADqB,QAAQ,MAAM,qBAAqB,GAC/B,GAAG,MAAM;;AAIpC,KAAI,CAAC,OAAQ,UAAS,QAAQ,IAAI;AAElC,KAAI,CAAC,OAAQ,QAAO;AACpB,QAAO;EAAE;EAAQ;EAAQ;EAAW;EAAS;;;;;;AAO/C,SAAgB,oBAAoB,QAIlC;CACA,MAAM,WAAqB,EAAE;CAC7B,MAAM,SAAmB,EAAE;AAE3B,KAAI,CAAC,OAAO,OACV,QAAO,KAAK,4BAA4B;AAG1C,KAAI,CAAC,OAAO,UACV,UAAS,KAAK,yEAAyE;AAGzF,KAAI,CAAC,OAAO,QACV,UAAS,KAAK,kEAAkE;AAGlF,QAAO;EAAE,OAAO,OAAO,WAAW;EAAG;EAAU;EAAQ;;;;;;AAOzD,SAAgB,kBAAuC;CACrD,IAAI;CACJ,IAAI,QAAiE,EAAE;AAGvE,KAAI;EACF,MAAM,aAAaA,YAAgB;AACnC,MAAI,WAAW,OAAO,YAAY,OAAQ,SAAQ,WAAW,OAAO,YAAY;SAC1E;CAGR,MAAM,UAAU,KAAK,SAAS,EAAE,kBAAkB;AAClD,KAAI,WAAW,QAAQ,EAAE;EACvB,MAAM,UAAU,aAAa,SAAS,QAAQ;AAC9C,MAAI,CAAC,OAAO;GACV,MAAM,aAAa,QAAQ,MAAM,oBAAoB;AACrD,OAAI,WAAY,SAAQ,WAAW,GAAG,MAAM;;EAG9C,MAAM,aAAa,QAAQ,MAAM,oBAAoB;AACrD,MAAI,WACF,SAAQ,WAAW,GAAG,MAAM,CAAC,MAAM,IAAI,CAAC,KAAI,MAAK;GAC/C,MAAM,CAAC,UAAU,UAAU,EAAE,MAAM,CAAC,MAAM,IAAI;GAC9C,MAAM,CAAC,OAAO,QAAQ,SAAS,MAAM,IAAI;AACzC,UAAO;IAAE;IAAO;IAAM;IAAQ;IAC9B,CAAC,QAAO,MAAK,EAAE,SAAS,EAAE,KAAK;;AAKrC,KAAI,CAAC,MAAO,SAAQ,QAAQ,IAAI;AAGhC,KAAI,MAAM,WAAW,EACnB,KAAI;EACF,MAAM,EAAE,aAAa,oBAAoB;AACzC,OAAK,MAAM,GAAG,YAAY,OAAO,QAAQ,SAAS,CAChD,KAAI,QAAQ,aAAa;GACvB,MAAM,CAAC,OAAO,QAAQ,QAAQ,YAAY,MAAM,IAAI;GACpD,MAAM,SAAS,eAAe,QAAQ,GAAG,eAAe,QAAQ,GAAG,KAAA;AACnE,OAAI,SAAS,KACX,OAAM,KAAK;IAAE;IAAO;IAAM;IAAQ,CAAC;;SAInC;AAGV,KAAI,CAAC,SAAS,MAAM,WAAW,EAAG,QAAO;AACzC,QAAO;EAAE;EAAO;EAAO;;;mBA/JkD;gBACG"}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { n as __esmMin } from "./chunk-DORXReHP.js";
|
|
2
|
-
import { n as init_workspace_config, r as replacePlaceholders } from "./workspace-config-
|
|
2
|
+
import { n as init_workspace_config, r as replacePlaceholders } from "./workspace-config-DVDR-Ukh.js";
|
|
3
3
|
import { readFileSync } from "fs";
|
|
4
4
|
import { resolve } from "path";
|
|
5
5
|
import { homedir } from "os";
|
|
@@ -204,4 +204,4 @@ var init_tunnel = __esmMin((() => {
|
|
|
204
204
|
//#endregion
|
|
205
205
|
export { init_tunnel as n, removeTunnelIngress as r, addTunnelIngress as t };
|
|
206
206
|
|
|
207
|
-
//# sourceMappingURL=tunnel-
|
|
207
|
+
//# sourceMappingURL=tunnel-DldbBPWL.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tunnel-Dub2hiAA.js","names":[],"sources":["../../src/lib/tunnel.ts"],"sourcesContent":["/**\n * Cloudflare Tunnel Management\n *\n * Manages tunnel ingress rules and DNS CNAME records for workspace lifecycle.\n * Called during workspace create (addTunnelIngress) and workspace remove/deep-wipe (removeTunnelIngress).\n */\n\nimport { readFileSync } from 'fs';\nimport { resolve } from 'path';\nimport { homedir } from 'os';\nimport { TunnelConfig, TunnelHostname, TemplatePlaceholders, replacePlaceholders } from './workspace-config.js';\n\nexport interface TunnelResult {\n success: boolean;\n steps: string[];\n}\n\ninterface CloudflareCredentials {\n apiToken: string;\n accountId: string;\n zoneId: string;\n}\n\ninterface CloudflareIngressRule {\n service: string;\n hostname?: string;\n originRequest?: Record<string, unknown>;\n}\n\ninterface CloudflareTunnelConfig {\n config: {\n ingress: CloudflareIngressRule[];\n };\n}\n\nconst CF_API = 'https://api.cloudflare.com/client/v4';\nconst FETCH_TIMEOUT = 10_000;\n\n/**\n * Read API token from Cloudflare cert.pem file.\n * The cert.pem contains a PEM-wrapped base64 JSON blob with { zoneID, accountID, apiToken }.\n */\nfunction readCloudflareCredentials(certPath: string): CloudflareCredentials | null {\n try {\n const resolvedPath = certPath.replace(/^~/, homedir());\n const pem = readFileSync(resolve(resolvedPath), 'utf-8');\n // Strip PEM headers/trailers and decode\n const b64 = pem\n .split('\\n')\n .filter(line => !line.startsWith('-----'))\n .join('');\n const json = JSON.parse(Buffer.from(b64, 'base64').toString('utf-8'));\n return {\n apiToken: json.apiToken,\n accountId: json.accountID,\n zoneId: json.zoneID,\n };\n } catch (err) {\n return null;\n }\n}\n\n/**\n * Make an authenticated Cloudflare API request.\n */\nasync function cfFetch(\n path: string,\n apiToken: string,\n method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',\n body?: unknown,\n): Promise<{ ok: boolean; data: any; errors?: any[] }> {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT);\n\n try {\n const resp = await fetch(`${CF_API}${path}`, {\n method,\n headers: {\n Authorization: `Bearer ${apiToken}`,\n 'Content-Type': 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal: controller.signal,\n });\n const json = await resp.json() as any;\n return { ok: json.success !== false, data: json.result, errors: json.errors };\n } catch (err: any) {\n return { ok: false, data: null, errors: [{ message: err.message }] };\n } finally {\n clearTimeout(timeout);\n }\n}\n\n/**\n * Resolve hostnames from config, replacing template placeholders.\n */\nfunction resolveHostnames(\n hostnames: TunnelHostname[],\n placeholders: TemplatePlaceholders,\n): Array<{ hostname: string; httpHostHeader?: string; noTlsVerify: boolean }> {\n return hostnames.map(h => ({\n hostname: replacePlaceholders(h.pattern, placeholders),\n httpHostHeader: h.http_host_header ? replacePlaceholders(h.http_host_header, placeholders) : undefined,\n noTlsVerify: h.no_tls_verify !== false, // default true\n }));\n}\n\n/**\n * Add tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace creation.\n */\nexport async function addTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n return { success: false, steps: [...steps, `[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`] };\n }\n\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n steps.push(`[tunnel] Current tunnel config has ${ingress.length} ingress rules`);\n\n // Add new ingress rules (before the catch-all)\n let modified = false;\n for (const h of resolved) {\n // Skip if rule already exists\n if (ingress.some(r => r.hostname === h.hostname)) {\n steps.push(`[tunnel] Ingress rule for ${h.hostname} already exists, skipping`);\n continue;\n }\n\n const originRequest: Record<string, unknown> = {};\n if (h.noTlsVerify) originRequest.noTLSVerify = true;\n if (h.httpHostHeader) originRequest.httpHostHeader = h.httpHostHeader;\n\n const newRule: CloudflareIngressRule = {\n service: config.service_target,\n hostname: h.hostname,\n originRequest: Object.keys(originRequest).length > 0 ? originRequest : undefined,\n };\n\n // Insert before the last rule (catch-all has no hostname)\n const catchAllIdx = ingress.findIndex(r => !r.hostname);\n if (catchAllIdx >= 0) {\n ingress.splice(catchAllIdx, 0, newRule);\n } else {\n ingress.push(newRule);\n }\n modified = true;\n steps.push(`[tunnel] Added ingress rule for ${h.hostname}`);\n }\n\n // Push updated tunnel config\n if (modified) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push('[tunnel] Updated tunnel ingress configuration');\n }\n }\n\n // Create DNS CNAME records\n for (const h of resolved) {\n const dnsResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records`,\n creds.apiToken,\n 'POST',\n {\n type: 'CNAME',\n name: h.hostname,\n content: `${config.tunnel_id}.cfargotunnel.com`,\n proxied: true,\n },\n );\n if (!dnsResult.ok) {\n const errMsg = dnsResult.errors?.map((e: any) => e.message).join(', ') || 'unknown error';\n // Record already exists is not a failure\n if (errMsg.includes('already exists') || errMsg.includes('already been taken')) {\n steps.push(`[tunnel] DNS CNAME for ${h.hostname} already exists`);\n } else {\n steps.push(`[tunnel] Failed to create DNS CNAME for ${h.hostname}: ${errMsg}`);\n allOk = false;\n }\n } else {\n steps.push(`[tunnel] Created DNS CNAME: ${h.hostname} → ${config.tunnel_id}.cfargotunnel.com`);\n }\n }\n\n return { success: allOk, steps };\n}\n\n/**\n * Remove tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace removal and deep-wipe.\n */\nexport async function removeTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n const hostnameSet = new Set(resolved.map(h => h.hostname));\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n steps.push(`[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`);\n // Continue to attempt DNS cleanup even if tunnel config read fails\n allOk = false;\n } else {\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n const originalCount = ingress.length;\n\n // Filter out matching ingress rules\n const filtered = ingress.filter(r => !r.hostname || !hostnameSet.has(r.hostname));\n\n if (filtered.length < originalCount) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress: filtered },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Removed ${originalCount - filtered.length} ingress rule(s)`);\n }\n } else {\n steps.push('[tunnel] No matching ingress rules found to remove');\n }\n }\n\n // Remove DNS CNAME records\n for (const h of resolved) {\n // Find the DNS record\n const listResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records?name=${encodeURIComponent(h.hostname)}&type=CNAME`,\n creds.apiToken,\n );\n if (!listResult.ok) {\n steps.push(`[tunnel] Failed to look up DNS record for ${h.hostname}: ${JSON.stringify(listResult.errors)}`);\n allOk = false;\n continue;\n }\n\n const records = Array.isArray(listResult.data) ? listResult.data : [];\n if (records.length === 0) {\n steps.push(`[tunnel] No DNS CNAME record found for ${h.hostname}`);\n continue;\n }\n\n for (const record of records) {\n const delResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records/${record.id}`,\n creds.apiToken,\n 'DELETE',\n );\n if (!delResult.ok) {\n steps.push(`[tunnel] Failed to delete DNS record ${record.id} for ${h.hostname}: ${JSON.stringify(delResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Deleted DNS CNAME for ${h.hostname}`);\n }\n }\n }\n\n return { success: allOk, steps };\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA0CA,SAAS,0BAA0B,UAAgD;AACjF,KAAI;EAIF,MAAM,MAFM,aAAa,QADJ,SAAS,QAAQ,MAAM,SAAS,CAAC,CACR,EAAE,QAAQ,CAGrD,MAAM,KAAK,CACX,QAAO,SAAQ,CAAC,KAAK,WAAW,QAAQ,CAAC,CACzC,KAAK,GAAG;EACX,MAAM,OAAO,KAAK,MAAM,OAAO,KAAK,KAAK,SAAS,CAAC,SAAS,QAAQ,CAAC;AACrE,SAAO;GACL,UAAU,KAAK;GACf,WAAW,KAAK;GAChB,QAAQ,KAAK;GACd;UACM,KAAK;AACZ,SAAO;;;;;;AAOX,eAAe,QACb,MACA,UACA,SAA4C,OAC5C,MACqD;CACrD,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB,WAAW,OAAO,EAAE,cAAc;AAEnE,KAAI;EAUF,MAAM,OAAO,OATA,MAAM,MAAM,GAAG,SAAS,QAAQ;GAC3C;GACA,SAAS;IACP,eAAe,UAAU;IACzB,gBAAgB;IACjB;GACD,MAAM,OAAO,KAAK,UAAU,KAAK,GAAG,KAAA;GACpC,QAAQ,WAAW;GACpB,CAAC,EACsB,MAAM;AAC9B,SAAO;GAAE,IAAI,KAAK,YAAY;GAAO,MAAM,KAAK;GAAQ,QAAQ,KAAK;GAAQ;UACtE,KAAU;AACjB,SAAO;GAAE,IAAI;GAAO,MAAM;GAAM,QAAQ,CAAC,EAAE,SAAS,IAAI,SAAS,CAAC;GAAE;WAC5D;AACR,eAAa,QAAQ;;;;;;AAOzB,SAAS,iBACP,WACA,cAC4E;AAC5E,QAAO,UAAU,KAAI,OAAM;EACzB,UAAU,oBAAoB,EAAE,SAAS,aAAa;EACtD,gBAAgB,EAAE,mBAAmB,oBAAoB,EAAE,kBAAkB,aAAa,GAAG,KAAA;EAC7F,aAAa,EAAE,kBAAkB;EAClC,EAAE;;;;;;AAOL,eAAsB,iBACpB,QACA,cACuB;CACvB,MAAM,QAAkB,EAAE;CAC1B,IAAI,QAAQ;CAGZ,MAAM,QAAQ,0BAA0B,OAAO,iBAAiB;AAChE,KAAI,CAAC,MACH,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,yDAAyD,OAAO,iBAAiB;EAAE;AAEtH,OAAM,KAAK,uCAAuC;CAElD,MAAM,WAAW,iBAAiB,OAAO,WAAW,aAAa;CAGjE,MAAM,aAAa,aAAa,MAAM,UAAU,cAAc,OAAO,UAAU;CAC/E,MAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,SAAS;AACzD,KAAI,CAAC,QAAQ,GACX,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,GAAG,OAAO,yCAAyC,KAAK,UAAU,QAAQ,OAAO,GAAG;EAAE;CAIzH,MAAM,UADuC,QAAQ,KACC,QAAQ,WAAW,EAAE;AAC3E,OAAM,KAAK,sCAAsC,QAAQ,OAAO,gBAAgB;CAGhF,IAAI,WAAW;AACf,MAAK,MAAM,KAAK,UAAU;AAExB,MAAI,QAAQ,MAAK,MAAK,EAAE,aAAa,EAAE,SAAS,EAAE;AAChD,SAAM,KAAK,6BAA6B,EAAE,SAAS,2BAA2B;AAC9E;;EAGF,MAAM,gBAAyC,EAAE;AACjD,MAAI,EAAE,YAAa,eAAc,cAAc;AAC/C,MAAI,EAAE,eAAgB,eAAc,iBAAiB,EAAE;EAEvD,MAAM,UAAiC;GACrC,SAAS,OAAO;GAChB,UAAU,EAAE;GACZ,eAAe,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB,KAAA;GACxE;EAGD,MAAM,cAAc,QAAQ,WAAU,MAAK,CAAC,EAAE,SAAS;AACvD,MAAI,eAAe,EACjB,SAAQ,OAAO,aAAa,GAAG,QAAQ;MAEvC,SAAQ,KAAK,QAAQ;AAEvB,aAAW;AACX,QAAM,KAAK,mCAAmC,EAAE,WAAW;;AAI7D,KAAI,UAAU;EACZ,MAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO,EACjE,QAAQ,EAAE,SAAS,EACpB,CAAC;AACF,MAAI,CAAC,UAAU,IAAI;AACjB,SAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,OAAO,GAAG;AAC1F,WAAQ;QAER,OAAM,KAAK,gDAAgD;;AAK/D,MAAK,MAAM,KAAK,UAAU;EACxB,MAAM,YAAY,MAAM,QACtB,UAAU,MAAM,OAAO,eACvB,MAAM,UACN,QACA;GACE,MAAM;GACN,MAAM,EAAE;GACR,SAAS,GAAG,OAAO,UAAU;GAC7B,SAAS;GACV,CACF;AACD,MAAI,CAAC,UAAU,IAAI;GACjB,MAAM,SAAS,UAAU,QAAQ,KAAK,MAAW,EAAE,QAAQ,CAAC,KAAK,KAAK,IAAI;AAE1E,OAAI,OAAO,SAAS,iBAAiB,IAAI,OAAO,SAAS,qBAAqB,CAC5E,OAAM,KAAK,0BAA0B,EAAE,SAAS,iBAAiB;QAC5D;AACL,UAAM,KAAK,2CAA2C,EAAE,SAAS,IAAI,SAAS;AAC9E,YAAQ;;QAGV,OAAM,KAAK,+BAA+B,EAAE,SAAS,KAAK,OAAO,UAAU,mBAAmB;;AAIlG,QAAO;EAAE,SAAS;EAAO;EAAO;;;;;;AAOlC,eAAsB,oBACpB,QACA,cACuB;CACvB,MAAM,QAAkB,EAAE;CAC1B,IAAI,QAAQ;CAGZ,MAAM,QAAQ,0BAA0B,OAAO,iBAAiB;AAChE,KAAI,CAAC,MACH,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,yDAAyD,OAAO,iBAAiB;EAAE;AAEtH,OAAM,KAAK,uCAAuC;CAElD,MAAM,WAAW,iBAAiB,OAAO,WAAW,aAAa;CACjE,MAAM,cAAc,IAAI,IAAI,SAAS,KAAI,MAAK,EAAE,SAAS,CAAC;CAG1D,MAAM,aAAa,aAAa,MAAM,UAAU,cAAc,OAAO,UAAU;CAC/E,MAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,SAAS;AACzD,KAAI,CAAC,QAAQ,IAAI;AACf,QAAM,KAAK,yCAAyC,KAAK,UAAU,QAAQ,OAAO,GAAG;AAErF,UAAQ;QACH;EAEL,MAAM,UADuC,QAAQ,KACC,QAAQ,WAAW,EAAE;EAC3E,MAAM,gBAAgB,QAAQ;EAG9B,MAAM,WAAW,QAAQ,QAAO,MAAK,CAAC,EAAE,YAAY,CAAC,YAAY,IAAI,EAAE,SAAS,CAAC;AAEjF,MAAI,SAAS,SAAS,eAAe;GACnC,MAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO,EACjE,QAAQ,EAAE,SAAS,UAAU,EAC9B,CAAC;AACF,OAAI,CAAC,UAAU,IAAI;AACjB,UAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,OAAO,GAAG;AAC1F,YAAQ;SAER,OAAM,KAAK,oBAAoB,gBAAgB,SAAS,OAAO,kBAAkB;QAGnF,OAAM,KAAK,qDAAqD;;AAKpE,MAAK,MAAM,KAAK,UAAU;EAExB,MAAM,aAAa,MAAM,QACvB,UAAU,MAAM,OAAO,oBAAoB,mBAAmB,EAAE,SAAS,CAAC,cAC1E,MAAM,SACP;AACD,MAAI,CAAC,WAAW,IAAI;AAClB,SAAM,KAAK,6CAA6C,EAAE,SAAS,IAAI,KAAK,UAAU,WAAW,OAAO,GAAG;AAC3G,WAAQ;AACR;;EAGF,MAAM,UAAU,MAAM,QAAQ,WAAW,KAAK,GAAG,WAAW,OAAO,EAAE;AACrE,MAAI,QAAQ,WAAW,GAAG;AACxB,SAAM,KAAK,0CAA0C,EAAE,WAAW;AAClE;;AAGF,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,YAAY,MAAM,QACtB,UAAU,MAAM,OAAO,eAAe,OAAO,MAC7C,MAAM,UACN,SACD;AACD,OAAI,CAAC,UAAU,IAAI;AACjB,UAAM,KAAK,wCAAwC,OAAO,GAAG,OAAO,EAAE,SAAS,IAAI,KAAK,UAAU,UAAU,OAAO,GAAG;AACtH,YAAQ;SAER,OAAM,KAAK,kCAAkC,EAAE,WAAW;;;AAKhE,QAAO;EAAE,SAAS;EAAO;EAAO;;;;wBA9R8E;AAyB1G,UAAS;AACT,iBAAgB"}
|
|
1
|
+
{"version":3,"file":"tunnel-DldbBPWL.js","names":[],"sources":["../../src/lib/tunnel.ts"],"sourcesContent":["/**\n * Cloudflare Tunnel Management\n *\n * Manages tunnel ingress rules and DNS CNAME records for workspace lifecycle.\n * Called during workspace create (addTunnelIngress) and workspace remove/deep-wipe (removeTunnelIngress).\n */\n\nimport { readFileSync } from 'fs';\nimport { resolve } from 'path';\nimport { homedir } from 'os';\nimport { TunnelConfig, TunnelHostname, TemplatePlaceholders, replacePlaceholders } from './workspace-config.js';\n\nexport interface TunnelResult {\n success: boolean;\n steps: string[];\n}\n\ninterface CloudflareCredentials {\n apiToken: string;\n accountId: string;\n zoneId: string;\n}\n\ninterface CloudflareIngressRule {\n service: string;\n hostname?: string;\n originRequest?: Record<string, unknown>;\n}\n\ninterface CloudflareTunnelConfig {\n config: {\n ingress: CloudflareIngressRule[];\n };\n}\n\nconst CF_API = 'https://api.cloudflare.com/client/v4';\nconst FETCH_TIMEOUT = 10_000;\n\n/**\n * Read API token from Cloudflare cert.pem file.\n * The cert.pem contains a PEM-wrapped base64 JSON blob with { zoneID, accountID, apiToken }.\n */\nfunction readCloudflareCredentials(certPath: string): CloudflareCredentials | null {\n try {\n const resolvedPath = certPath.replace(/^~/, homedir());\n const pem = readFileSync(resolve(resolvedPath), 'utf-8');\n // Strip PEM headers/trailers and decode\n const b64 = pem\n .split('\\n')\n .filter(line => !line.startsWith('-----'))\n .join('');\n const json = JSON.parse(Buffer.from(b64, 'base64').toString('utf-8'));\n return {\n apiToken: json.apiToken,\n accountId: json.accountID,\n zoneId: json.zoneID,\n };\n } catch (err) {\n return null;\n }\n}\n\n/**\n * Make an authenticated Cloudflare API request.\n */\nasync function cfFetch(\n path: string,\n apiToken: string,\n method: 'GET' | 'POST' | 'PUT' | 'DELETE' = 'GET',\n body?: unknown,\n): Promise<{ ok: boolean; data: any; errors?: any[] }> {\n const controller = new AbortController();\n const timeout = setTimeout(() => controller.abort(), FETCH_TIMEOUT);\n\n try {\n const resp = await fetch(`${CF_API}${path}`, {\n method,\n headers: {\n Authorization: `Bearer ${apiToken}`,\n 'Content-Type': 'application/json',\n },\n body: body ? JSON.stringify(body) : undefined,\n signal: controller.signal,\n });\n const json = await resp.json() as any;\n return { ok: json.success !== false, data: json.result, errors: json.errors };\n } catch (err: any) {\n return { ok: false, data: null, errors: [{ message: err.message }] };\n } finally {\n clearTimeout(timeout);\n }\n}\n\n/**\n * Resolve hostnames from config, replacing template placeholders.\n */\nfunction resolveHostnames(\n hostnames: TunnelHostname[],\n placeholders: TemplatePlaceholders,\n): Array<{ hostname: string; httpHostHeader?: string; noTlsVerify: boolean }> {\n return hostnames.map(h => ({\n hostname: replacePlaceholders(h.pattern, placeholders),\n httpHostHeader: h.http_host_header ? replacePlaceholders(h.http_host_header, placeholders) : undefined,\n noTlsVerify: h.no_tls_verify !== false, // default true\n }));\n}\n\n/**\n * Add tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace creation.\n */\nexport async function addTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n return { success: false, steps: [...steps, `[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`] };\n }\n\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n steps.push(`[tunnel] Current tunnel config has ${ingress.length} ingress rules`);\n\n // Add new ingress rules (before the catch-all)\n let modified = false;\n for (const h of resolved) {\n // Skip if rule already exists\n if (ingress.some(r => r.hostname === h.hostname)) {\n steps.push(`[tunnel] Ingress rule for ${h.hostname} already exists, skipping`);\n continue;\n }\n\n const originRequest: Record<string, unknown> = {};\n if (h.noTlsVerify) originRequest.noTLSVerify = true;\n if (h.httpHostHeader) originRequest.httpHostHeader = h.httpHostHeader;\n\n const newRule: CloudflareIngressRule = {\n service: config.service_target,\n hostname: h.hostname,\n originRequest: Object.keys(originRequest).length > 0 ? originRequest : undefined,\n };\n\n // Insert before the last rule (catch-all has no hostname)\n const catchAllIdx = ingress.findIndex(r => !r.hostname);\n if (catchAllIdx >= 0) {\n ingress.splice(catchAllIdx, 0, newRule);\n } else {\n ingress.push(newRule);\n }\n modified = true;\n steps.push(`[tunnel] Added ingress rule for ${h.hostname}`);\n }\n\n // Push updated tunnel config\n if (modified) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push('[tunnel] Updated tunnel ingress configuration');\n }\n }\n\n // Create DNS CNAME records\n for (const h of resolved) {\n const dnsResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records`,\n creds.apiToken,\n 'POST',\n {\n type: 'CNAME',\n name: h.hostname,\n content: `${config.tunnel_id}.cfargotunnel.com`,\n proxied: true,\n },\n );\n if (!dnsResult.ok) {\n const errMsg = dnsResult.errors?.map((e: any) => e.message).join(', ') || 'unknown error';\n // Record already exists is not a failure\n if (errMsg.includes('already exists') || errMsg.includes('already been taken')) {\n steps.push(`[tunnel] DNS CNAME for ${h.hostname} already exists`);\n } else {\n steps.push(`[tunnel] Failed to create DNS CNAME for ${h.hostname}: ${errMsg}`);\n allOk = false;\n }\n } else {\n steps.push(`[tunnel] Created DNS CNAME: ${h.hostname} → ${config.tunnel_id}.cfargotunnel.com`);\n }\n }\n\n return { success: allOk, steps };\n}\n\n/**\n * Remove tunnel ingress rules and DNS CNAME records for a workspace.\n * Called during workspace removal and deep-wipe.\n */\nexport async function removeTunnelIngress(\n config: TunnelConfig,\n placeholders: TemplatePlaceholders,\n): Promise<TunnelResult> {\n const steps: string[] = [];\n let allOk = true;\n\n // Read credentials\n const creds = readCloudflareCredentials(config.credentials_file);\n if (!creds) {\n return { success: false, steps: ['[tunnel] Failed to read Cloudflare credentials from ' + config.credentials_file] };\n }\n steps.push('[tunnel] Read Cloudflare credentials');\n\n const resolved = resolveHostnames(config.hostnames, placeholders);\n const hostnameSet = new Set(resolved.map(h => h.hostname));\n\n // Get current tunnel configuration\n const tunnelPath = `/accounts/${creds.accountId}/cfd_tunnel/${config.tunnel_id}/configurations`;\n const current = await cfFetch(tunnelPath, creds.apiToken);\n if (!current.ok) {\n steps.push(`[tunnel] Failed to get tunnel config: ${JSON.stringify(current.errors)}`);\n // Continue to attempt DNS cleanup even if tunnel config read fails\n allOk = false;\n } else {\n const tunnelConfig: CloudflareTunnelConfig = current.data;\n const ingress: CloudflareIngressRule[] = tunnelConfig.config?.ingress || [];\n const originalCount = ingress.length;\n\n // Filter out matching ingress rules\n const filtered = ingress.filter(r => !r.hostname || !hostnameSet.has(r.hostname));\n\n if (filtered.length < originalCount) {\n const putResult = await cfFetch(tunnelPath, creds.apiToken, 'PUT', {\n config: { ingress: filtered },\n });\n if (!putResult.ok) {\n steps.push(`[tunnel] Failed to update tunnel config: ${JSON.stringify(putResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Removed ${originalCount - filtered.length} ingress rule(s)`);\n }\n } else {\n steps.push('[tunnel] No matching ingress rules found to remove');\n }\n }\n\n // Remove DNS CNAME records\n for (const h of resolved) {\n // Find the DNS record\n const listResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records?name=${encodeURIComponent(h.hostname)}&type=CNAME`,\n creds.apiToken,\n );\n if (!listResult.ok) {\n steps.push(`[tunnel] Failed to look up DNS record for ${h.hostname}: ${JSON.stringify(listResult.errors)}`);\n allOk = false;\n continue;\n }\n\n const records = Array.isArray(listResult.data) ? listResult.data : [];\n if (records.length === 0) {\n steps.push(`[tunnel] No DNS CNAME record found for ${h.hostname}`);\n continue;\n }\n\n for (const record of records) {\n const delResult = await cfFetch(\n `/zones/${creds.zoneId}/dns_records/${record.id}`,\n creds.apiToken,\n 'DELETE',\n );\n if (!delResult.ok) {\n steps.push(`[tunnel] Failed to delete DNS record ${record.id} for ${h.hostname}: ${JSON.stringify(delResult.errors)}`);\n allOk = false;\n } else {\n steps.push(`[tunnel] Deleted DNS CNAME for ${h.hostname}`);\n }\n }\n }\n\n return { success: allOk, steps };\n}\n"],"mappings":";;;;;;;;;;;;;;;;AA0CA,SAAS,0BAA0B,UAAgD;AACjF,KAAI;EAIF,MAAM,MAFM,aAAa,QADJ,SAAS,QAAQ,MAAM,SAAS,CAAC,CACR,EAAE,QAAQ,CAGrD,MAAM,KAAK,CACX,QAAO,SAAQ,CAAC,KAAK,WAAW,QAAQ,CAAC,CACzC,KAAK,GAAG;EACX,MAAM,OAAO,KAAK,MAAM,OAAO,KAAK,KAAK,SAAS,CAAC,SAAS,QAAQ,CAAC;AACrE,SAAO;GACL,UAAU,KAAK;GACf,WAAW,KAAK;GAChB,QAAQ,KAAK;GACd;UACM,KAAK;AACZ,SAAO;;;;;;AAOX,eAAe,QACb,MACA,UACA,SAA4C,OAC5C,MACqD;CACrD,MAAM,aAAa,IAAI,iBAAiB;CACxC,MAAM,UAAU,iBAAiB,WAAW,OAAO,EAAE,cAAc;AAEnE,KAAI;EAUF,MAAM,OAAO,OATA,MAAM,MAAM,GAAG,SAAS,QAAQ;GAC3C;GACA,SAAS;IACP,eAAe,UAAU;IACzB,gBAAgB;IACjB;GACD,MAAM,OAAO,KAAK,UAAU,KAAK,GAAG,KAAA;GACpC,QAAQ,WAAW;GACpB,CAAC,EACsB,MAAM;AAC9B,SAAO;GAAE,IAAI,KAAK,YAAY;GAAO,MAAM,KAAK;GAAQ,QAAQ,KAAK;GAAQ;UACtE,KAAU;AACjB,SAAO;GAAE,IAAI;GAAO,MAAM;GAAM,QAAQ,CAAC,EAAE,SAAS,IAAI,SAAS,CAAC;GAAE;WAC5D;AACR,eAAa,QAAQ;;;;;;AAOzB,SAAS,iBACP,WACA,cAC4E;AAC5E,QAAO,UAAU,KAAI,OAAM;EACzB,UAAU,oBAAoB,EAAE,SAAS,aAAa;EACtD,gBAAgB,EAAE,mBAAmB,oBAAoB,EAAE,kBAAkB,aAAa,GAAG,KAAA;EAC7F,aAAa,EAAE,kBAAkB;EAClC,EAAE;;;;;;AAOL,eAAsB,iBACpB,QACA,cACuB;CACvB,MAAM,QAAkB,EAAE;CAC1B,IAAI,QAAQ;CAGZ,MAAM,QAAQ,0BAA0B,OAAO,iBAAiB;AAChE,KAAI,CAAC,MACH,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,yDAAyD,OAAO,iBAAiB;EAAE;AAEtH,OAAM,KAAK,uCAAuC;CAElD,MAAM,WAAW,iBAAiB,OAAO,WAAW,aAAa;CAGjE,MAAM,aAAa,aAAa,MAAM,UAAU,cAAc,OAAO,UAAU;CAC/E,MAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,SAAS;AACzD,KAAI,CAAC,QAAQ,GACX,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,GAAG,OAAO,yCAAyC,KAAK,UAAU,QAAQ,OAAO,GAAG;EAAE;CAIzH,MAAM,UADuC,QAAQ,KACC,QAAQ,WAAW,EAAE;AAC3E,OAAM,KAAK,sCAAsC,QAAQ,OAAO,gBAAgB;CAGhF,IAAI,WAAW;AACf,MAAK,MAAM,KAAK,UAAU;AAExB,MAAI,QAAQ,MAAK,MAAK,EAAE,aAAa,EAAE,SAAS,EAAE;AAChD,SAAM,KAAK,6BAA6B,EAAE,SAAS,2BAA2B;AAC9E;;EAGF,MAAM,gBAAyC,EAAE;AACjD,MAAI,EAAE,YAAa,eAAc,cAAc;AAC/C,MAAI,EAAE,eAAgB,eAAc,iBAAiB,EAAE;EAEvD,MAAM,UAAiC;GACrC,SAAS,OAAO;GAChB,UAAU,EAAE;GACZ,eAAe,OAAO,KAAK,cAAc,CAAC,SAAS,IAAI,gBAAgB,KAAA;GACxE;EAGD,MAAM,cAAc,QAAQ,WAAU,MAAK,CAAC,EAAE,SAAS;AACvD,MAAI,eAAe,EACjB,SAAQ,OAAO,aAAa,GAAG,QAAQ;MAEvC,SAAQ,KAAK,QAAQ;AAEvB,aAAW;AACX,QAAM,KAAK,mCAAmC,EAAE,WAAW;;AAI7D,KAAI,UAAU;EACZ,MAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO,EACjE,QAAQ,EAAE,SAAS,EACpB,CAAC;AACF,MAAI,CAAC,UAAU,IAAI;AACjB,SAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,OAAO,GAAG;AAC1F,WAAQ;QAER,OAAM,KAAK,gDAAgD;;AAK/D,MAAK,MAAM,KAAK,UAAU;EACxB,MAAM,YAAY,MAAM,QACtB,UAAU,MAAM,OAAO,eACvB,MAAM,UACN,QACA;GACE,MAAM;GACN,MAAM,EAAE;GACR,SAAS,GAAG,OAAO,UAAU;GAC7B,SAAS;GACV,CACF;AACD,MAAI,CAAC,UAAU,IAAI;GACjB,MAAM,SAAS,UAAU,QAAQ,KAAK,MAAW,EAAE,QAAQ,CAAC,KAAK,KAAK,IAAI;AAE1E,OAAI,OAAO,SAAS,iBAAiB,IAAI,OAAO,SAAS,qBAAqB,CAC5E,OAAM,KAAK,0BAA0B,EAAE,SAAS,iBAAiB;QAC5D;AACL,UAAM,KAAK,2CAA2C,EAAE,SAAS,IAAI,SAAS;AAC9E,YAAQ;;QAGV,OAAM,KAAK,+BAA+B,EAAE,SAAS,KAAK,OAAO,UAAU,mBAAmB;;AAIlG,QAAO;EAAE,SAAS;EAAO;EAAO;;;;;;AAOlC,eAAsB,oBACpB,QACA,cACuB;CACvB,MAAM,QAAkB,EAAE;CAC1B,IAAI,QAAQ;CAGZ,MAAM,QAAQ,0BAA0B,OAAO,iBAAiB;AAChE,KAAI,CAAC,MACH,QAAO;EAAE,SAAS;EAAO,OAAO,CAAC,yDAAyD,OAAO,iBAAiB;EAAE;AAEtH,OAAM,KAAK,uCAAuC;CAElD,MAAM,WAAW,iBAAiB,OAAO,WAAW,aAAa;CACjE,MAAM,cAAc,IAAI,IAAI,SAAS,KAAI,MAAK,EAAE,SAAS,CAAC;CAG1D,MAAM,aAAa,aAAa,MAAM,UAAU,cAAc,OAAO,UAAU;CAC/E,MAAM,UAAU,MAAM,QAAQ,YAAY,MAAM,SAAS;AACzD,KAAI,CAAC,QAAQ,IAAI;AACf,QAAM,KAAK,yCAAyC,KAAK,UAAU,QAAQ,OAAO,GAAG;AAErF,UAAQ;QACH;EAEL,MAAM,UADuC,QAAQ,KACC,QAAQ,WAAW,EAAE;EAC3E,MAAM,gBAAgB,QAAQ;EAG9B,MAAM,WAAW,QAAQ,QAAO,MAAK,CAAC,EAAE,YAAY,CAAC,YAAY,IAAI,EAAE,SAAS,CAAC;AAEjF,MAAI,SAAS,SAAS,eAAe;GACnC,MAAM,YAAY,MAAM,QAAQ,YAAY,MAAM,UAAU,OAAO,EACjE,QAAQ,EAAE,SAAS,UAAU,EAC9B,CAAC;AACF,OAAI,CAAC,UAAU,IAAI;AACjB,UAAM,KAAK,4CAA4C,KAAK,UAAU,UAAU,OAAO,GAAG;AAC1F,YAAQ;SAER,OAAM,KAAK,oBAAoB,gBAAgB,SAAS,OAAO,kBAAkB;QAGnF,OAAM,KAAK,qDAAqD;;AAKpE,MAAK,MAAM,KAAK,UAAU;EAExB,MAAM,aAAa,MAAM,QACvB,UAAU,MAAM,OAAO,oBAAoB,mBAAmB,EAAE,SAAS,CAAC,cAC1E,MAAM,SACP;AACD,MAAI,CAAC,WAAW,IAAI;AAClB,SAAM,KAAK,6CAA6C,EAAE,SAAS,IAAI,KAAK,UAAU,WAAW,OAAO,GAAG;AAC3G,WAAQ;AACR;;EAGF,MAAM,UAAU,MAAM,QAAQ,WAAW,KAAK,GAAG,WAAW,OAAO,EAAE;AACrE,MAAI,QAAQ,WAAW,GAAG;AACxB,SAAM,KAAK,0CAA0C,EAAE,WAAW;AAClE;;AAGF,OAAK,MAAM,UAAU,SAAS;GAC5B,MAAM,YAAY,MAAM,QACtB,UAAU,MAAM,OAAO,eAAe,OAAO,MAC7C,MAAM,UACN,SACD;AACD,OAAI,CAAC,UAAU,IAAI;AACjB,UAAM,KAAK,wCAAwC,OAAO,GAAG,OAAO,EAAE,SAAS,IAAI,KAAK,UAAU,UAAU,OAAO,GAAG;AACtH,YAAQ;SAER,OAAM,KAAK,kCAAkC,EAAE,WAAW;;;AAKhE,QAAO;EAAE,SAAS;EAAO;EAAO;;;;wBA9R8E;AAyB1G,UAAS;AACT,iBAAgB"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types-
|
|
1
|
+
{"version":3,"file":"types-RKZjGE5N.js","names":[],"sources":["../../src/lib/lifecycle/types.ts"],"sourcesContent":["/**\n * Shared types for lifecycle operations.\n *\n * Every atomic operation returns a StepResult. Workflows compose\n * multiple operations and return a WorkflowResult.\n */\n\nimport { existsSync, readFileSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\n\nexport interface StepResult {\n step: string;\n success: boolean;\n skipped: boolean; // true if operation was a no-op (idempotent)\n error?: string;\n details?: string[]; // human-readable log of what was done\n}\n\nexport interface WorkflowResult {\n workflow: 'approve' | 'close' | 'close-out' | 'deep-wipe';\n issueId: string;\n success: boolean; // true only if ALL non-skipped steps succeeded\n steps: StepResult[];\n duration: number; // ms\n}\n\n/** Context shared across lifecycle operations */\nexport interface LifecycleContext {\n issueId: string;\n projectPath: string;\n /** Project name (for Docker compose project naming + placeholders) */\n projectName?: string;\n /** GitHub issue metadata (populated for PAN- issues) */\n github?: {\n owner: string;\n repo: string;\n number: number;\n };\n /** Rally configuration (populated for Rally-tracked issues) */\n rally?: {\n apiKey: string;\n server?: string;\n workspace?: string;\n project?: string;\n };\n}\n\n/** Options for teardown-workspace */\nexport interface TeardownOptions {\n /** Delete feature branches (local + remote). Default: false */\n deleteBranches?: boolean;\n /** Skip Docker container cleanup. Default: false */\n skipDocker?: boolean;\n /** Delete workspace directory (worktree + files). Default: true */\n deleteWorkspace?: boolean;\n /** Clear beads for this issue from project root. Default: false.\n * Only set to true for destructive wipe — normal completion should preserve beads. */\n clearBeads?: boolean;\n /** Project-specific workspace config for tunnel/Hume cleanup */\n workspaceConfig?: {\n tunnel?: any;\n hume?: any;\n dns?: { domain?: string };\n };\n /** Project name (for Docker compose project naming + placeholders) */\n projectName?: string;\n}\n\n/** Options for archive-planning */\nexport interface ArchiveOptions {\n /** Push git commits to remote after archiving. Default: true */\n pushToRemote?: boolean;\n}\n\n/** Options for the approve workflow */\nexport interface ApproveOptions {\n /** Skip the merge step (e.g. if already merged). Default: false */\n skipMerge?: boolean;\n /** Skip beads compaction. Default: false */\n skipBeadsCompaction?: boolean;\n}\n\n/** Options for the deep-wipe workflow */\n/** Progress event emitted during deep-wipe. */\nexport interface DeepWipeProgress {\n step: number;\n total: number;\n label: string;\n detail: string;\n status: 'active' | 'complete' | 'error';\n}\n\nexport interface DeepWipeOptions {\n /** Delete workspace directory. Default: true */\n deleteWorkspace?: boolean;\n /** Delete git branches (local + remote). Default: true */\n deleteBranches?: boolean;\n /** Reset issue to backlog/open state. Default: true */\n resetIssue?: boolean;\n /** Project-specific workspace config for tunnel/Hume cleanup */\n workspaceConfig?: {\n tunnel?: any;\n hume?: any;\n dns?: { domain?: string };\n };\n /** Project name (for Docker compose project naming + placeholders) */\n projectName?: string;\n /** Optional callback for streaming progress events to the client. */\n onProgress?: (event: DeepWipeProgress) => void;\n}\n\n/** Helper to create a successful step result */\nexport function stepOk(step: string, details?: string[]): StepResult {\n return { step, success: true, skipped: false, details };\n}\n\n/** Helper to create a skipped step result */\nexport function stepSkipped(step: string, details?: string[]): StepResult {\n return { step, success: true, skipped: true, details };\n}\n\n/** Helper to create a failed step result */\nexport function stepFailed(step: string, error: string, details?: string[]): StepResult {\n return { step, success: false, skipped: false, error, details };\n}\n\n/**\n * Get LINEAR_API_KEY from environment or .panopticon.env.\n * Shared across lifecycle modules.\n */\nexport function getLinearApiKey(): string | null {\n if (process.env.LINEAR_API_KEY) return process.env.LINEAR_API_KEY;\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n const match = content.match(/LINEAR_API_KEY=(.+)/);\n if (match) return match[1].trim();\n }\n return null;\n}\n"],"mappings":";;;;;;;;;;;AAiHA,SAAgB,OAAO,MAAc,SAAgC;AACnE,QAAO;EAAE;EAAM,SAAS;EAAM,SAAS;EAAO;EAAS;;;AAIzD,SAAgB,YAAY,MAAc,SAAgC;AACxE,QAAO;EAAE;EAAM,SAAS;EAAM,SAAS;EAAM;EAAS;;;AAIxD,SAAgB,WAAW,MAAc,OAAe,SAAgC;AACtF,QAAO;EAAE;EAAM,SAAS;EAAO,SAAS;EAAO;EAAO;EAAS;;;;;;AAOjE,SAAgB,kBAAiC;AAC/C,KAAI,QAAQ,IAAI,eAAgB,QAAO,QAAQ,IAAI;CACnD,MAAM,UAAU,KAAK,SAAS,EAAE,kBAAkB;AAClD,KAAI,WAAW,QAAQ,EAAE;EAEvB,MAAM,QADU,aAAa,SAAS,QAAQ,CACxB,MAAM,sBAAsB;AAClD,MAAI,MAAO,QAAO,MAAM,GAAG,MAAM;;AAEnC,QAAO"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"vtt-parser-
|
|
1
|
+
{"version":3,"file":"vtt-parser-99vFekRQ.js","names":[],"sources":["../../src/dashboard/server/utils/vtt-parser.ts"],"sourcesContent":["/**\n * VTT to Markdown converter for transcript uploads.\n * Converts WebVTT (.vtt) files to readable Markdown format.\n *\n * Features:\n * - Extracts speaker names from <v Name> tags\n * - Converts timestamps to MM:SS format\n * - Strips HTML tags and positioning metadata\n * - Decodes common HTML entities\n * - Consolidates consecutive same-speaker cues within 3 seconds\n * - Removes NOTE blocks and cue IDs\n */\n\ninterface ParsedCue {\n startTime: number; // seconds\n speaker: string;\n text: string;\n}\n\n/**\n * Convert WebVTT content to Markdown format.\n * If content is not valid VTT (missing WEBVTT header), returns content as-is.\n */\nexport function vttToMarkdown(vttContent: string): string {\n // Normalize line endings\n const normalized = vttContent.replace(/\\r\\n/g, '\\n');\n\n // Validate WEBVTT header\n if (!normalized.trim().startsWith('WEBVTT')) {\n return vttContent; // Not a VTT file, return as-is\n }\n\n // Split into blocks by double newlines\n const blocks = normalized.split(/\\n\\n+/);\n\n const cues: ParsedCue[] = [];\n\n for (const block of blocks) {\n const lines = block.trim().split('\\n');\n if (lines.length === 0) continue;\n\n // Skip header block (first block with WEBVTT)\n if (lines[0].startsWith('WEBVTT')) continue;\n\n // Skip NOTE blocks\n if (lines[0].startsWith('NOTE')) continue;\n\n // Find the timestamp line\n let timestampLineIndex = -1;\n for (let i = 0; i < lines.length; i++) {\n if (lines[i].match(/(\\d{1,2}:)?\\d{2}:\\d{2}\\.\\d{3}\\s*-->\\s*/)) {\n timestampLineIndex = i;\n break;\n }\n }\n\n if (timestampLineIndex === -1) continue; // No timestamp found\n\n // Parse timestamp\n const timestampLine = lines[timestampLineIndex];\n const timestampMatch = timestampLine.match(/^((\\d{1,2}:)?\\d{2}:\\d{2}\\.\\d{3})\\s*-->\\s*/);\n if (!timestampMatch) continue;\n\n const startTimestamp = timestampMatch[1];\n const startSeconds = parseTimestamp(startTimestamp);\n\n // Extract text lines (everything after timestamp)\n const textLines = lines.slice(timestampLineIndex + 1);\n if (textLines.length === 0) continue; // Empty cue\n\n // Join text lines and process\n let text = textLines.join(' ').trim();\n if (!text) continue;\n\n // Extract speaker from <v Name> tag\n let speaker = '';\n const speakerMatch = text.match(/^<v\\s+([^>]+)>/i);\n if (speakerMatch) {\n speaker = speakerMatch[1].trim();\n text = text.replace(/^<v\\s+[^>]+>/i, '').trim();\n }\n\n // Strip all HTML tags\n text = text.replace(/<[^>]+>/g, '');\n\n // Decode HTML entities\n text = decodeHtmlEntities(text);\n\n if (!text) continue; // Empty after processing\n\n cues.push({\n startTime: startSeconds,\n speaker,\n text\n });\n }\n\n // Consolidate consecutive same-speaker cues within 3 seconds\n const consolidatedCues = consolidateSpeakerCues(cues);\n\n // Format as Markdown\n if (consolidatedCues.length === 0) {\n return '# Transcript\\n\\n(No cues found)\\n';\n }\n\n let markdown = '# Transcript\\n\\n';\n\n for (const cue of consolidatedCues) {\n const timestamp = formatTimestamp(cue.startTime);\n if (cue.speaker) {\n markdown += `**[${timestamp}]** **${cue.speaker}:** ${cue.text}\\n\\n`;\n } else {\n markdown += `**[${timestamp}]** ${cue.text}\\n\\n`;\n }\n }\n\n return markdown;\n}\n\n/**\n * Parse VTT timestamp to seconds.\n * Supports: HH:MM:SS.mmm or MM:SS.mmm\n */\nfunction parseTimestamp(timestamp: string): number {\n const parts = timestamp.split(':');\n\n if (parts.length === 3) {\n // HH:MM:SS.mmm\n const hours = parseInt(parts[0], 10);\n const minutes = parseInt(parts[1], 10);\n const seconds = parseFloat(parts[2]);\n return hours * 3600 + minutes * 60 + seconds;\n } else if (parts.length === 2) {\n // MM:SS.mmm\n const minutes = parseInt(parts[0], 10);\n const seconds = parseFloat(parts[1]);\n return minutes * 60 + seconds;\n }\n\n return 0;\n}\n\n/**\n * Format seconds to MM:SS.\n */\nfunction formatTimestamp(seconds: number): string {\n const totalSeconds = Math.floor(seconds);\n const mins = Math.floor(totalSeconds / 60);\n const secs = totalSeconds % 60;\n return `${mins.toString().padStart(2, '0')}:${secs.toString().padStart(2, '0')}`;\n}\n\n/**\n * Decode common HTML entities.\n */\nfunction decodeHtmlEntities(text: string): string {\n return text\n .replace(/&/g, '&')\n .replace(/</g, '<')\n .replace(/>/g, '>')\n .replace(/"/g, '\"')\n .replace(/'/g, \"'\")\n .replace(/ /g, ' ');\n}\n\n/**\n * Consolidate consecutive cues from the same speaker within 3 seconds.\n */\nfunction consolidateSpeakerCues(cues: ParsedCue[]): ParsedCue[] {\n if (cues.length === 0) return [];\n\n const consolidated: ParsedCue[] = [];\n let current = { ...cues[0] };\n\n for (let i = 1; i < cues.length; i++) {\n const cue = cues[i];\n const timeDiff = cue.startTime - current.startTime;\n\n // If same speaker and within 3 seconds, consolidate\n if (cue.speaker === current.speaker && timeDiff <= 3) {\n current.text = current.text + ' ' + cue.text;\n } else {\n consolidated.push(current);\n current = { ...cue };\n }\n }\n\n consolidated.push(current); // Don't forget the last one\n\n return consolidated;\n}\n"],"mappings":";;;;;AAuBA,SAAgB,cAAc,YAA4B;CAExD,MAAM,aAAa,WAAW,QAAQ,SAAS,KAAK;AAGpD,KAAI,CAAC,WAAW,MAAM,CAAC,WAAW,SAAS,CACzC,QAAO;CAIT,MAAM,SAAS,WAAW,MAAM,QAAQ;CAExC,MAAM,OAAoB,EAAE;AAE5B,MAAK,MAAM,SAAS,QAAQ;EAC1B,MAAM,QAAQ,MAAM,MAAM,CAAC,MAAM,KAAK;AACtC,MAAI,MAAM,WAAW,EAAG;AAGxB,MAAI,MAAM,GAAG,WAAW,SAAS,CAAE;AAGnC,MAAI,MAAM,GAAG,WAAW,OAAO,CAAE;EAGjC,IAAI,qBAAqB;AACzB,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,IAChC,KAAI,MAAM,GAAG,MAAM,yCAAyC,EAAE;AAC5D,wBAAqB;AACrB;;AAIJ,MAAI,uBAAuB,GAAI;EAI/B,MAAM,iBADgB,MAAM,oBACS,MAAM,4CAA4C;AACvF,MAAI,CAAC,eAAgB;EAErB,MAAM,iBAAiB,eAAe;EACtC,MAAM,eAAe,eAAe,eAAe;EAGnD,MAAM,YAAY,MAAM,MAAM,qBAAqB,EAAE;AACrD,MAAI,UAAU,WAAW,EAAG;EAG5B,IAAI,OAAO,UAAU,KAAK,IAAI,CAAC,MAAM;AACrC,MAAI,CAAC,KAAM;EAGX,IAAI,UAAU;EACd,MAAM,eAAe,KAAK,MAAM,kBAAkB;AAClD,MAAI,cAAc;AAChB,aAAU,aAAa,GAAG,MAAM;AAChC,UAAO,KAAK,QAAQ,iBAAiB,GAAG,CAAC,MAAM;;AAIjD,SAAO,KAAK,QAAQ,YAAY,GAAG;AAGnC,SAAO,mBAAmB,KAAK;AAE/B,MAAI,CAAC,KAAM;AAEX,OAAK,KAAK;GACR,WAAW;GACX;GACA;GACD,CAAC;;CAIJ,MAAM,mBAAmB,uBAAuB,KAAK;AAGrD,KAAI,iBAAiB,WAAW,EAC9B,QAAO;CAGT,IAAI,WAAW;AAEf,MAAK,MAAM,OAAO,kBAAkB;EAClC,MAAM,YAAY,gBAAgB,IAAI,UAAU;AAChD,MAAI,IAAI,QACN,aAAY,MAAM,UAAU,QAAQ,IAAI,QAAQ,MAAM,IAAI,KAAK;MAE/D,aAAY,MAAM,UAAU,MAAM,IAAI,KAAK;;AAI/C,QAAO;;;;;;AAOT,SAAS,eAAe,WAA2B;CACjD,MAAM,QAAQ,UAAU,MAAM,IAAI;AAElC,KAAI,MAAM,WAAW,GAAG;EAEtB,MAAM,QAAQ,SAAS,MAAM,IAAI,GAAG;EACpC,MAAM,UAAU,SAAS,MAAM,IAAI,GAAG;EACtC,MAAM,UAAU,WAAW,MAAM,GAAG;AACpC,SAAO,QAAQ,OAAO,UAAU,KAAK;YAC5B,MAAM,WAAW,GAAG;EAE7B,MAAM,UAAU,SAAS,MAAM,IAAI,GAAG;EACtC,MAAM,UAAU,WAAW,MAAM,GAAG;AACpC,SAAO,UAAU,KAAK;;AAGxB,QAAO;;;;;AAMT,SAAS,gBAAgB,SAAyB;CAChD,MAAM,eAAe,KAAK,MAAM,QAAQ;CACxC,MAAM,OAAO,KAAK,MAAM,eAAe,GAAG;CAC1C,MAAM,OAAO,eAAe;AAC5B,QAAO,GAAG,KAAK,UAAU,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,KAAK,UAAU,CAAC,SAAS,GAAG,IAAI;;;;;AAMhF,SAAS,mBAAmB,MAAsB;AAChD,QAAO,KACJ,QAAQ,UAAU,IAAI,CACtB,QAAQ,SAAS,IAAI,CACrB,QAAQ,SAAS,IAAI,CACrB,QAAQ,WAAW,KAAI,CACvB,QAAQ,WAAW,IAAI,CACvB,QAAQ,WAAW,IAAI;;;;;AAM5B,SAAS,uBAAuB,MAAgC;AAC9D,KAAI,KAAK,WAAW,EAAG,QAAO,EAAE;CAEhC,MAAM,eAA4B,EAAE;CACpC,IAAI,UAAU,EAAE,GAAG,KAAK,IAAI;AAE5B,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;EACpC,MAAM,MAAM,KAAK;EACjB,MAAM,WAAW,IAAI,YAAY,QAAQ;AAGzC,MAAI,IAAI,YAAY,QAAQ,WAAW,YAAY,EACjD,SAAQ,OAAO,QAAQ,OAAO,MAAM,IAAI;OACnC;AACL,gBAAa,KAAK,QAAQ;AAC1B,aAAU,EAAE,GAAG,KAAK;;;AAIxB,cAAa,KAAK,QAAQ;AAE1B,QAAO"}
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { a as findProjectByTeam, p as init_projects, r as extractTeamPrefix } from "./projects-
|
|
3
|
-
import { a as init_config, o as loadConfig } from "./config-
|
|
4
|
-
import { i as NotImplementedError, o as init_interface } from "./rally-
|
|
5
|
-
import { n as createTrackerFromConfig, r as init_factory } from "./factory-
|
|
6
|
-
import { i as readWorkspacePlan, n as init_io } from "./io-
|
|
7
|
-
import { t as extractACFromDocument } from "./acceptance-criteria-
|
|
1
|
+
import { _ as init_paths } from "./paths-BDyJ7BiV.js";
|
|
2
|
+
import { a as findProjectByTeam, p as init_projects, r as extractTeamPrefix } from "./projects-CFVl4oHn.js";
|
|
3
|
+
import { a as init_config, o as loadConfig } from "./config-CDkGjnwy.js";
|
|
4
|
+
import { i as NotImplementedError, o as init_interface } from "./rally-YjFRxIiC.js";
|
|
5
|
+
import { n as createTrackerFromConfig, r as init_factory } from "./factory-C8nhLGHB.js";
|
|
6
|
+
import { i as readWorkspacePlan, n as init_io } from "./io-DKS6359z.js";
|
|
7
|
+
import { t as extractACFromDocument } from "./acceptance-criteria-Dk9hhiYj.js";
|
|
8
8
|
import { existsSync, readFileSync, readdirSync, statSync } from "fs";
|
|
9
9
|
import { dirname, join } from "path";
|
|
10
10
|
import { fileURLToPath } from "url";
|
|
@@ -331,7 +331,17 @@ function buildPolyrepoContext(issueId, workspacePath) {
|
|
|
331
331
|
const teamPrefix = extractTeamPrefix(issueId);
|
|
332
332
|
const projectConfig = teamPrefix ? findProjectByTeam(teamPrefix) : null;
|
|
333
333
|
if (!projectConfig?.workspace?.type || projectConfig.workspace.type !== "polyrepo" || !projectConfig.workspace.repos) return "";
|
|
334
|
-
const
|
|
334
|
+
const wsConfig = projectConfig.workspace;
|
|
335
|
+
const repos = wsConfig.repos;
|
|
336
|
+
const isProgressive = wsConfig.progressive && wsConfig.always_include;
|
|
337
|
+
let visibleRepos = repos;
|
|
338
|
+
if (isProgressive) {
|
|
339
|
+
const existingRepos = readdirSync(workspacePath).filter((f) => {
|
|
340
|
+
const fullPath = join(workspacePath, f);
|
|
341
|
+
return f !== ".planning" && f !== ".claude" && f !== ".pan" && f !== ".beads" && existsSync(fullPath);
|
|
342
|
+
});
|
|
343
|
+
visibleRepos = repos.filter((r) => existingRepos.includes(r.name));
|
|
344
|
+
}
|
|
335
345
|
const lines = [
|
|
336
346
|
"## Project Structure (Polyrepo)",
|
|
337
347
|
"",
|
|
@@ -341,16 +351,61 @@ function buildPolyrepoContext(issueId, workspacePath) {
|
|
|
341
351
|
"| Directory | Purpose |",
|
|
342
352
|
"|-----------|---------|"
|
|
343
353
|
];
|
|
344
|
-
for (const repo of
|
|
354
|
+
for (const repo of visibleRepos) {
|
|
355
|
+
const notes = [];
|
|
356
|
+
if (repo.readonly) notes.push("readonly");
|
|
357
|
+
if (repo.link_type === "symlink") notes.push("symlink");
|
|
358
|
+
const noteStr = notes.length > 0 ? ` (${notes.join(", ")})` : "";
|
|
359
|
+
lines.push(`| \`${repo.name}/\` | ${repo.path}${noteStr} |`);
|
|
360
|
+
}
|
|
345
361
|
lines.push("");
|
|
346
362
|
lines.push("**Git operations:**");
|
|
347
363
|
lines.push("- Run `git status`, `git log`, etc. INSIDE the subdirectories (e.g., `cd fe && git status`)");
|
|
348
364
|
lines.push(`- The workspace root (\`${workspacePath}\`) has no \`.git\` directory`);
|
|
349
365
|
lines.push(`- Each subdirectory has its own branch: \`${repos[0]?.branch_prefix || "feature/"}${issueId.toLowerCase()}\``);
|
|
366
|
+
const prTargets = /* @__PURE__ */ new Set();
|
|
367
|
+
for (const repo of visibleRepos) {
|
|
368
|
+
const prTarget = repo.pr_target || wsConfig.pr_target;
|
|
369
|
+
if (prTarget) prTargets.add(prTarget);
|
|
370
|
+
}
|
|
371
|
+
if (prTargets.size > 0) {
|
|
372
|
+
lines.push("");
|
|
373
|
+
lines.push(`**PR target branch:** \`${[...prTargets].join("` or `")}\` (NOT main/master)`);
|
|
374
|
+
}
|
|
375
|
+
if (isProgressive) {
|
|
376
|
+
lines.push("");
|
|
377
|
+
lines.push("## Adding Repositories");
|
|
378
|
+
lines.push("");
|
|
379
|
+
lines.push("This is a **progressive** workspace. Only essential repos are included.");
|
|
380
|
+
lines.push("Use the `/workspace-add-repo` skill to add more repos when needed:");
|
|
381
|
+
lines.push("");
|
|
382
|
+
lines.push("```bash");
|
|
383
|
+
lines.push(`pan workspace add-repo ${issueId.toLowerCase()} <repo-name> [repo-name...]`);
|
|
384
|
+
lines.push("# Or add all repos in a group:");
|
|
385
|
+
lines.push(`pan workspace add-repo ${issueId.toLowerCase()} --group <group-name>`);
|
|
386
|
+
lines.push("```");
|
|
387
|
+
lines.push("");
|
|
388
|
+
lines.push("Available repos not yet in workspace:");
|
|
389
|
+
const existingRepoNames = visibleRepos.map((r) => r.name);
|
|
390
|
+
const missingRepos = repos.filter((r) => !existingRepoNames.includes(r.name));
|
|
391
|
+
for (const repo of missingRepos) {
|
|
392
|
+
const notes = [];
|
|
393
|
+
if (repo.readonly) notes.push("readonly");
|
|
394
|
+
if (repo.link_type === "symlink") notes.push("symlink");
|
|
395
|
+
const noteStr = notes.length > 0 ? ` (${notes.join(", ")})` : "";
|
|
396
|
+
lines.push(`- \`${repo.name}\`${noteStr} — ${repo.path}`);
|
|
397
|
+
}
|
|
398
|
+
const readonlyRepos = visibleRepos.filter((r) => r.readonly || r.link_type === "symlink");
|
|
399
|
+
if (readonlyRepos.length > 0) {
|
|
400
|
+
lines.push("");
|
|
401
|
+
lines.push("**Readonly repos** (do NOT commit changes):");
|
|
402
|
+
for (const repo of readonlyRepos) lines.push(`- \`${repo.name}/\` — ${repo.path}`);
|
|
403
|
+
}
|
|
404
|
+
}
|
|
350
405
|
lines.push("");
|
|
351
406
|
return lines.join("\n");
|
|
352
407
|
}
|
|
353
408
|
//#endregion
|
|
354
409
|
export { buildPolyrepoContext, buildWorkAgentPrompt, extractBeadsIdsFromState, extractStitchDesigns, getTrackerContext, readBeadsTasks, readPlanningContext };
|
|
355
410
|
|
|
356
|
-
//# sourceMappingURL=work-agent-prompt-
|
|
411
|
+
//# sourceMappingURL=work-agent-prompt-fCg67nyo.js.map
|