panopticon-cli 0.6.4 → 0.6.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
- package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
- package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
- package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
- package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
- package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
- package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
- package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
- package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
- package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
- package/dist/clean-planning-x1S-JdmO.js +2 -0
- package/dist/cli/index.js +291 -760
- package/dist/cli/index.js.map +1 -1
- package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
- package/dist/close-issue-CaFE0stN.js.map +1 -0
- package/dist/close-issue-CjcfZI9s.js +2 -0
- package/dist/compact-beads-B0_qE1w3.js +2 -0
- package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
- package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
- package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
- package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
- package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
- package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
- package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
- package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
- package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
- package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
- package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
- package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
- package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
- package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
- package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
- package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
- package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
- package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
- package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
- package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
- package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
- package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
- package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
- package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
- package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
- package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
- package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
- package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
- package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
- package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
- package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
- package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
- package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
- package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
- package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
- package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
- package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
- package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
- package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
- package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
- package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
- package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
- package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
- package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
- package/dist/dashboard/hume-CA2pftu_.js +3 -0
- package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
- package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
- package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
- package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
- package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
- package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
- package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
- package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
- package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
- package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
- package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
- package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
- package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
- package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
- package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
- package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
- package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
- package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
- package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
- package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
- package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
- package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
- package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
- package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
- package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
- package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
- package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-BaQPC-c6.js} +1 -1
- package/dist/dashboard/public/assets/index-ByLmYGhW.js +212 -0
- package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
- package/dist/dashboard/public/index.html +2 -2
- package/dist/dashboard/rally-6McpKKRa.js +3 -0
- package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
- package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
- package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
- package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
- package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
- package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
- package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
- package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
- package/dist/dashboard/server.js +565 -265
- package/dist/dashboard/server.js.map +1 -1
- package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
- package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
- package/dist/dashboard/settings-XWvDcj-D.js +2 -0
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
- package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
- package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
- package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
- package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
- package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
- package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
- package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
- package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
- package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
- package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
- package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
- package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
- package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
- package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
- package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
- package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
- package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
- package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
- package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
- package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
- package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
- package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
- package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
- package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
- package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
- package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
- package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
- package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
- package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
- package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
- package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
- package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
- package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
- package/dist/hume-9nv1VmMV.js +3 -0
- package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
- package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
- package/dist/index.d.ts +17 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -7
- package/dist/issue-id-CAcekoIw.js +62 -0
- package/dist/issue-id-CAcekoIw.js.map +1 -0
- package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
- package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
- package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
- package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
- package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
- package/dist/merge-agent-DlUiUanN.js.map +1 -0
- package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
- package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
- package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
- package/dist/projects-Bk-5QhFQ.js.map +1 -0
- package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
- package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
- package/dist/providers-DSU1vfQF.js.map +1 -0
- package/dist/rally-DdPvGa-w.js +3 -0
- package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
- package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
- package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
- package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
- package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
- package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
- package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
- package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
- package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
- package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
- package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
- package/dist/settings-BcWPTrua.js.map +1 -0
- package/dist/shadow-state-BZzxfEGw.js +2 -0
- package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
- package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
- package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
- package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
- package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
- package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
- package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
- package/dist/specialists-D7Kj5o6s.js.map +1 -0
- package/dist/sync-DMfgd389.js +693 -0
- package/dist/sync-DMfgd389.js.map +1 -0
- package/dist/sync-TL6y-8K6.js +2 -0
- package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
- package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
- package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
- package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
- package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
- package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
- package/dist/tracker-BhYYvU3p.js +198 -0
- package/dist/tracker-BhYYvU3p.js.map +1 -0
- package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
- package/dist/tracker-utils-ChQyut8w.js.map +1 -0
- package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
- package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
- package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
- package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
- package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
- package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
- package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
- package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
- package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
- package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
- package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
- package/dist/workspace-config-fUafvYMp.js.map +1 -0
- package/dist/workspace-manager-B9jS4Dsq.js +3 -0
- package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
- package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
- package/package.json +2 -1
- package/scripts/post-merge-deploy.sh +25 -5
- package/scripts/record-cost-event.js +57 -7
- package/scripts/record-cost-event.js.map +1 -1
- package/skills/pan-help/SKILL.md +1 -1
- package/skills/pan-sync/SKILL.md +6 -6
- package/skills/workspace-add-repo/skill.md +46 -0
- package/templates/claude-md/sections/warnings.md +15 -2
- package/dist/clean-planning-sZXvy3Y5.js +0 -2
- package/dist/close-issue-Dml437qV.js +0 -2
- package/dist/close-issue-Dr7yZmrr.js.map +0 -1
- package/dist/compact-beads-iu218JcO.js +0 -2
- package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
- package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
- package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
- package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
- package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
- package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
- package/dist/dashboard/hume-MZndNDVU.js +0 -3
- package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
- package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
- package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
- package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
- package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
- package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
- package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
- package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
- package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
- package/dist/dashboard/settings-DMeGBRsk.js +0 -2
- package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
- package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
- package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
- package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
- package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
- package/dist/hume-BjmwmJ9E.js +0 -3
- package/dist/label-cleanup-31ElPqqv.js.map +0 -1
- package/dist/merge-agent-VQH9z9t8.js.map +0 -1
- package/dist/projects-CvLepaxC.js.map +0 -1
- package/dist/providers-DcCPZ5K4.js.map +0 -1
- package/dist/rally-DR9x8--6.js +0 -3
- package/dist/shadow-state-p3jpGRPJ.js +0 -2
- package/dist/specialists-DEKqgkxp.js.map +0 -1
- package/dist/tracker-C_62ukEq.js.map +0 -1
- package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
- package/dist/workspace-config-CNXOpKuj.js.map +0 -1
- package/dist/workspace-manager-CncdZkIy.js.map +0 -1
- package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { a as getAgentPendingQuestions, c as getPendingQuestions, i as getAgentJsonlPath, n as getActiveSessionPath, o as getAgentWorkspace, r as getAgentJsonlMtime, s as getClaudeProjectDir, t as computeAgentEnrichment } from "./agent-enrichment-
|
|
1
|
+
import { a as getAgentPendingQuestions, c as getPendingQuestions, i as getAgentJsonlPath, n as getActiveSessionPath, o as getAgentWorkspace, r as getAgentJsonlMtime, s as getClaudeProjectDir, t as computeAgentEnrichment } from "./agent-enrichment-DdO7ZqjI.js";
|
|
2
2
|
export { computeAgentEnrichment, getActiveSessionPath, getAgentJsonlMtime, getAgentJsonlPath, getAgentPendingQuestions, getAgentWorkspace, getClaudeProjectDir, getPendingQuestions };
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
import { n as __esmMin } from "./chunk-DORXReHP.js";
|
|
2
|
-
import {
|
|
2
|
+
import { _ as init_paths, s as PANOPTICON_HOME, t as AGENTS_DIR } from "./paths-BDyJ7BiV.js";
|
|
3
3
|
import { i as loadConfig, r as init_config_yaml } from "./config-yaml-DSfYpzN6.js";
|
|
4
|
-
import { i as findProjectByPath, p as init_projects, s as getIssuePrefix } from "./projects-
|
|
5
|
-
import { a as init_providers, i as getProviderForModel, n as clearCredentialFileAuth, o as setupCredentialFileAuth, r as getProviderEnv } from "./providers-
|
|
6
|
-
import { a as getAgentSessions, i as createSession, l as sendKeysAsync, o as init_tmux, s as killSession, t as capturePane, u as sessionExists } from "./tmux-
|
|
7
|
-
import { a as init_config$1, c as require_toml, o as loadConfig$1 } from "./config-
|
|
8
|
-
import { i as generateFixedPointPrompt, o as initHook, s as init_hooks, t as checkHook } from "./hooks-
|
|
9
|
-
import { a as getModelId, s as init_work_type_router } from "./work-type-router-
|
|
10
|
-
import { n as createTrackerFromConfig, r as init_factory, t as createTracker } from "./factory-
|
|
4
|
+
import { i as findProjectByPath, p as init_projects, s as getIssuePrefix } from "./projects-CFVl4oHn.js";
|
|
5
|
+
import { a as init_providers, i as getProviderForModel, n as clearCredentialFileAuth, o as setupCredentialFileAuth, r as getProviderEnv } from "./providers-B5Y4H2Mg.js";
|
|
6
|
+
import { a as getAgentSessions, i as createSession, l as sendKeysAsync, o as init_tmux, s as killSession, t as capturePane, u as sessionExists } from "./tmux-LwG0tHhU.js";
|
|
7
|
+
import { a as init_config$1, c as require_toml, o as loadConfig$1 } from "./config-CDkGjnwy.js";
|
|
8
|
+
import { i as generateFixedPointPrompt, o as initHook, s as init_hooks, t as checkHook } from "./hooks-CjqXOlNb.js";
|
|
9
|
+
import { a as getModelId, s as init_work_type_router } from "./work-type-router-CWVW2Wk_.js";
|
|
10
|
+
import { n as createTrackerFromConfig, r as init_factory, t as createTracker } from "./factory-C8nhLGHB.js";
|
|
11
11
|
import { appendFileSync, existsSync, mkdirSync, readFileSync, readdirSync, statSync, unlinkSync, writeFileSync } from "fs";
|
|
12
12
|
import { join, resolve } from "path";
|
|
13
13
|
import { homedir } from "os";
|
|
@@ -599,7 +599,7 @@ async function spawnAgent(options) {
|
|
|
599
599
|
try {
|
|
600
600
|
const venvPath = join(options.workspace, ".venv");
|
|
601
601
|
if (existsSync(venvPath)) {
|
|
602
|
-
const { getTldrDaemonService } = await import("./tldr-daemon-
|
|
602
|
+
const { getTldrDaemonService } = await import("./tldr-daemon-Cfs0bXTi.js");
|
|
603
603
|
const tldrService = getTldrDaemonService(options.workspace, venvPath);
|
|
604
604
|
if (!(await tldrService.getStatus()).running) {
|
|
605
605
|
await tldrService.start(true);
|
|
@@ -623,13 +623,13 @@ exec claude --dangerously-skip-permissions --model ${state.model} "\$prompt"
|
|
|
623
623
|
claudeCmd = `bash "${launcherScript}"`;
|
|
624
624
|
} else claudeCmd = `claude --dangerously-skip-permissions --model ${state.model}`;
|
|
625
625
|
try {
|
|
626
|
-
const { preTrustDirectory } = await import("./workspace-manager-
|
|
626
|
+
const { preTrustDirectory } = await import("./workspace-manager-BYfzs_t2.js");
|
|
627
627
|
preTrustDirectory(options.workspace);
|
|
628
628
|
} catch {}
|
|
629
629
|
try {
|
|
630
|
-
const { isGitHubAppConfigured, generateInstallationToken, configureWorkspaceForBot } = await import("./github-app-
|
|
630
|
+
const { isGitHubAppConfigured, generateInstallationToken, configureWorkspaceForBot } = await import("./github-app-DClWjjHr.js");
|
|
631
631
|
if (isGitHubAppConfigured()) {
|
|
632
|
-
const { findProjectByPath } = await import("./projects-
|
|
632
|
+
const { findProjectByPath } = await import("./projects-C5ozxjwP.js");
|
|
633
633
|
const ghRepo = findProjectByPath(resolve(options.workspace, "..", ".."))?.github_repo;
|
|
634
634
|
if (ghRepo) {
|
|
635
635
|
const [owner, repo] = ghRepo.split("/");
|
|
@@ -764,7 +764,7 @@ async function messageAgent(agentId, message) {
|
|
|
764
764
|
writeFileSync(join(mailDir, `${(/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-")}.md`), `# Message\n\n${message}\n`);
|
|
765
765
|
return;
|
|
766
766
|
}
|
|
767
|
-
const { loadRemoteAgentState, sendToRemoteAgent } = await import("./remote-agents-
|
|
767
|
+
const { loadRemoteAgentState, sendToRemoteAgent } = await import("./remote-agents-Bf3GuM7t.js");
|
|
768
768
|
const remoteState = loadRemoteAgentState(normalizedId);
|
|
769
769
|
if (remoteState && remoteState.vmName) {
|
|
770
770
|
console.log(`[agents] Sending message to remote agent ${normalizedId} on ${remoteState.vmName}`);
|
|
@@ -1053,4 +1053,4 @@ var init_agents = __esmMin((() => {
|
|
|
1053
1053
|
//#endregion
|
|
1054
1054
|
export { saveCloisterConfig as A, stopAgent as C, getHealthThresholdsMs as D, warnOnBareNumericIssueIds as E, init_config as O, spawnAgent as S, transitionIssueToInReview as T, recoverAgent as _, getAgentDir as a, saveAgentState as b, getAgentState as c, getProviderExportsForModel as d, getProviderTmuxFlags as f, messageAgent as g, listRunningAgents as h, getActivity as i, shouldAutoStart as j, loadCloisterConfig as k, getLatestSessionId as l, init_agents as m, autoRecoverAgents as n, getAgentRuntimeFile as o, getSessionId as p, detectCrashedAgents as r, getAgentRuntimeState as s, appendActivity as t, getProviderEnvForModel as u, resumeAgent as v, transitionIssueToInProgress as w, saveSessionId as x, saveAgentRuntimeState as y };
|
|
1055
1055
|
|
|
1056
|
-
//# sourceMappingURL=agents-
|
|
1056
|
+
//# sourceMappingURL=agents-DCpQQ_W5.js.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"agents-YyO6t5Xa.js","names":["loadYamlConfig","loadConfig"],"sources":["../../src/lib/cloister/config.ts","../../src/lib/cv.ts","../../src/lib/agents.ts"],"sourcesContent":["/**\n * Cloister Configuration\n *\n * Loads and manages Cloister configuration from ~/.panopticon/cloister.toml\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { parse, stringify } from '@iarna/toml';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from '../paths.js';\n\nconst CLOISTER_CONFIG_FILE = join(PANOPTICON_HOME, 'cloister.toml');\n\n/**\n * Health threshold configuration (in minutes)\n */\nexport interface HealthThresholds {\n stale: number;\n warning: number;\n stuck: number;\n}\n\n/**\n * Automatic action configuration\n */\nexport interface AutoActions {\n poke_on_warning: boolean;\n poke_on_stuck: boolean; // Poke agents idle > stuck threshold (default: true)\n kill_on_stuck: boolean;\n restart_on_kill: boolean;\n /** Minimum ms between pokes for the same agent. Prevents spam on repeated health checks. Default: 30 min */\n poke_cooldown_ms: number;\n}\n\n/**\n * Monitoring configuration\n */\nexport interface MonitoringConfig {\n check_interval: number; // seconds between health checks\n heartbeat_sources: ('jsonl_mtime' | 'tmux_activity' | 'git_activity' | 'active_heartbeat')[];\n}\n\n/**\n * Startup configuration\n */\nexport interface StartupConfig {\n auto_start: boolean; // Start Cloister when dashboard starts\n}\n\n/**\n * Notification configuration (future feature)\n */\nexport interface NotificationConfig {\n slack_webhook?: string;\n email?: string;\n}\n\n/**\n * Specialist agent configuration\n */\nexport interface SpecialistConfig {\n enabled: boolean;\n auto_wake: boolean;\n}\n\n/**\n * Test agent specific configuration\n */\nexport interface TestAgentConfig extends SpecialistConfig {\n test_command?: string; // Optional test command override (e.g., \"npm test\", \"pytest\", etc.)\n}\n\n/**\n * All specialist agents configuration\n */\nexport interface SpecialistsConfig {\n merge_agent?: SpecialistConfig;\n review_agent?: SpecialistConfig;\n test_agent?: TestAgentConfig;\n}\n\n/**\n * Model selection configuration\n */\nexport interface ModelSelectionConfig {\n default_model: 'opus' | 'sonnet' | 'haiku';\n complexity_routing: {\n trivial: 'opus' | 'sonnet' | 'haiku';\n simple: 'opus' | 'sonnet' | 'haiku';\n medium: 'opus' | 'sonnet' | 'haiku';\n complex: 'opus' | 'sonnet' | 'haiku';\n expert: 'opus' | 'sonnet' | 'haiku';\n };\n specialist_models: {\n merge_agent: 'opus' | 'sonnet' | 'haiku';\n review_agent: 'opus' | 'sonnet' | 'haiku';\n test_agent: 'opus' | 'sonnet' | 'haiku';\n };\n}\n\n/**\n * Handoff trigger configuration\n */\nexport interface HandoffTriggersConfig {\n stuck_escalation?: {\n enabled: boolean;\n haiku_to_sonnet_minutes: number;\n sonnet_to_opus_minutes: number;\n };\n test_failure?: {\n enabled: boolean;\n from_model: 'opus' | 'sonnet' | 'haiku';\n to_model: 'opus' | 'sonnet' | 'haiku';\n trigger_on: 'any_failure' | '2_consecutive';\n };\n implementation_complete?: {\n enabled: boolean;\n to_specialist: string; // e.g., 'test-agent'\n };\n}\n\n/**\n * Handoff configuration\n */\nexport interface HandoffConfig {\n auto_triggers: HandoffTriggersConfig;\n}\n\n/**\n * Cost tracking configuration\n */\nexport interface CostTrackingConfig {\n display_enabled: boolean;\n log_to_jsonl: boolean;\n}\n\n/**\n * Auto-restart configuration\n */\nexport interface AutoRestartConfig {\n enabled: boolean;\n max_retries: number;\n backoff_seconds: number[]; // Array of backoff delays (e.g., [30, 60, 120])\n}\n\n/**\n * Cost limits configuration\n */\nexport interface CostLimitsConfig {\n per_agent_usd: number;\n per_issue_usd: number;\n daily_total_usd: number;\n alert_threshold: number; // Fraction (0.0-1.0) at which to start alerting\n}\n\n/**\n * Retention policy configuration\n */\nexport interface RetentionConfig {\n agent_state_days: number; // Days to keep agent state dirs (default: 30)\n health_staleness_hours: number; // Hours before hiding stale agents in health API (default: 24)\n}\n\n/**\n * Complete Cloister configuration\n */\nexport interface CloisterConfig {\n startup: StartupConfig;\n thresholds: HealthThresholds;\n auto_actions: AutoActions;\n monitoring: MonitoringConfig;\n notifications?: NotificationConfig;\n specialists?: SpecialistsConfig;\n model_selection?: ModelSelectionConfig;\n handoffs?: HandoffConfig;\n cost_tracking?: CostTrackingConfig;\n auto_restart?: AutoRestartConfig;\n cost_limits?: CostLimitsConfig;\n retention?: RetentionConfig;\n}\n\n/**\n * Default Cloister configuration\n */\nexport const DEFAULT_CLOISTER_CONFIG: CloisterConfig = {\n startup: {\n auto_start: true,\n },\n thresholds: {\n stale: 5,\n warning: 15,\n stuck: 30,\n },\n auto_actions: {\n poke_on_warning: true,\n poke_on_stuck: true, // Poke agents that have been idle > stuck threshold\n kill_on_stuck: false, // Manual by default for safety\n restart_on_kill: false,\n poke_cooldown_ms: 30 * 60 * 1000, // 30 min between pokes for the same agent\n },\n monitoring: {\n check_interval: 60, // 1 minute\n heartbeat_sources: ['jsonl_mtime', 'tmux_activity', 'git_activity'],\n },\n notifications: {\n slack_webhook: undefined,\n email: undefined,\n },\n specialists: {\n merge_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit \"Approve & Merge\" click\n },\n review_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit request\n },\n test_agent: {\n enabled: false, // Not yet implemented\n auto_wake: false,\n },\n },\n model_selection: {\n default_model: 'sonnet',\n complexity_routing: {\n trivial: 'haiku',\n simple: 'haiku',\n medium: 'sonnet',\n complex: 'sonnet',\n expert: 'opus',\n },\n specialist_models: {\n merge_agent: 'sonnet',\n review_agent: 'sonnet',\n test_agent: 'haiku',\n },\n },\n handoffs: {\n auto_triggers: {\n stuck_escalation: {\n enabled: true,\n haiku_to_sonnet_minutes: 10,\n sonnet_to_opus_minutes: 20,\n },\n test_failure: {\n enabled: true,\n from_model: 'haiku',\n to_model: 'sonnet',\n trigger_on: 'any_failure',\n },\n implementation_complete: {\n enabled: true, // Auto-handoff to test-agent when implementation done\n to_specialist: 'test-agent',\n },\n },\n },\n cost_tracking: {\n display_enabled: true,\n log_to_jsonl: true,\n },\n auto_restart: {\n enabled: true,\n max_retries: 3,\n backoff_seconds: [30, 60, 120], // 30s, 1m, 2m\n },\n cost_limits: {\n per_agent_usd: 10.0,\n per_issue_usd: 25.0,\n daily_total_usd: 100.0,\n alert_threshold: 0.8, // Alert at 80%\n },\n retention: {\n agent_state_days: 30,\n health_staleness_hours: 24,\n },\n};\n\n/**\n * Deep merge utility that recursively merges objects.\n * - Recursively merges nested objects\n * - Arrays in overrides replace defaults (not concatenated)\n * - User values take precedence over defaults\n */\nfunction deepMerge<T extends object>(defaults: T, overrides: Partial<T>): T {\n const result = { ...defaults };\n\n for (const key of Object.keys(overrides) as (keyof T)[]) {\n const defaultVal = defaults[key];\n const overrideVal = overrides[key];\n\n // Skip undefined values in overrides\n if (overrideVal === undefined) continue;\n\n // Deep merge if both values are non-array objects\n if (\n typeof defaultVal === 'object' &&\n defaultVal !== null &&\n !Array.isArray(defaultVal) &&\n typeof overrideVal === 'object' &&\n overrideVal !== null &&\n !Array.isArray(overrideVal)\n ) {\n result[key] = deepMerge(defaultVal as any, overrideVal as any);\n } else {\n // Direct override for primitives and arrays\n result[key] = overrideVal as T[keyof T];\n }\n }\n\n return result;\n}\n\n/**\n * Load Cloister configuration\n *\n * Reads from ~/.panopticon/cloister.toml and merges with defaults.\n * Creates default config file if it doesn't exist.\n */\nexport function loadCloisterConfig(): CloisterConfig {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n // If config file doesn't exist, create it with defaults\n if (!existsSync(CLOISTER_CONFIG_FILE)) {\n saveCloisterConfig(DEFAULT_CLOISTER_CONFIG);\n return DEFAULT_CLOISTER_CONFIG;\n }\n\n try {\n const content = readFileSync(CLOISTER_CONFIG_FILE, 'utf-8');\n const parsed = parse(content) as unknown as Partial<CloisterConfig>;\n\n // Deep merge with defaults\n return deepMerge(DEFAULT_CLOISTER_CONFIG, parsed);\n } catch (error) {\n console.error('Failed to load Cloister config:', error);\n console.error('Using default configuration');\n return DEFAULT_CLOISTER_CONFIG;\n }\n}\n\n/**\n * Save Cloister configuration\n *\n * Writes configuration to ~/.panopticon/cloister.toml\n */\nexport function saveCloisterConfig(config: CloisterConfig): void {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n try {\n const content = stringify(config as any);\n writeFileSync(CLOISTER_CONFIG_FILE, content, 'utf-8');\n } catch (error) {\n console.error('Failed to save Cloister config:', error);\n throw error;\n }\n}\n\n/**\n * Update Cloister configuration\n *\n * Merges partial config updates with existing config.\n */\nexport function updateCloisterConfig(updates: Partial<CloisterConfig>): CloisterConfig {\n const current = loadCloisterConfig();\n const updated = deepMerge(current, updates);\n saveCloisterConfig(updated);\n return updated;\n}\n\n/**\n * Get the path to the Cloister config file\n */\nexport function getCloisterConfigPath(): string {\n return CLOISTER_CONFIG_FILE;\n}\n\n/**\n * Check if Cloister should auto-start\n */\nexport function shouldAutoStart(): boolean {\n const config = loadCloisterConfig();\n return config.startup.auto_start;\n}\n\n/**\n * Get health thresholds in milliseconds\n */\nexport function getHealthThresholdsMs(): {\n stale: number;\n warning: number;\n stuck: number;\n} {\n const config = loadCloisterConfig();\n return {\n stale: config.thresholds.stale * 60 * 1000,\n warning: config.thresholds.warning * 60 * 1000,\n stuck: config.thresholds.stuck * 60 * 1000,\n };\n}\n","/**\n * Agent CV (Work History) System\n *\n * Tracks agent performance over time to enable capability-based routing.\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { AGENTS_DIR } from './paths.js';\n\nexport interface WorkEntry {\n issueId: string;\n startedAt: string;\n completedAt?: string;\n outcome: 'success' | 'failed' | 'abandoned' | 'in_progress';\n duration?: number; // minutes\n skills?: string[];\n failureReason?: string;\n commits?: number;\n linesChanged?: number;\n}\n\nexport interface AgentCV {\n agentId: string;\n createdAt: string;\n lastActive: string;\n runtime: string;\n model: string;\n stats: {\n totalIssues: number;\n successCount: number;\n failureCount: number;\n abandonedCount: number;\n avgDuration: number; // minutes\n successRate: number; // 0-1\n };\n skillsUsed: string[];\n recentWork: WorkEntry[];\n}\n\nfunction getCVFile(agentId: string): string {\n return join(AGENTS_DIR, agentId, 'cv.json');\n}\n\n/**\n * Get or create an agent's CV\n */\nexport function getAgentCV(agentId: string): AgentCV {\n const cvFile = getCVFile(agentId);\n\n if (existsSync(cvFile)) {\n try {\n return JSON.parse(readFileSync(cvFile, 'utf-8'));\n } catch {}\n }\n\n // Create new CV\n const cv: AgentCV = {\n agentId,\n createdAt: new Date().toISOString(),\n lastActive: new Date().toISOString(),\n runtime: 'claude',\n model: 'sonnet',\n stats: {\n totalIssues: 0,\n successCount: 0,\n failureCount: 0,\n abandonedCount: 0,\n avgDuration: 0,\n successRate: 0,\n },\n skillsUsed: [],\n recentWork: [],\n };\n\n saveAgentCV(cv);\n return cv;\n}\n\n/**\n * Save an agent's CV\n */\nexport function saveAgentCV(cv: AgentCV): void {\n const dir = join(AGENTS_DIR, cv.agentId);\n mkdirSync(dir, { recursive: true });\n writeFileSync(getCVFile(cv.agentId), JSON.stringify(cv, null, 2));\n}\n\n/**\n * Start tracking work for an agent\n */\nexport function startWork(agentId: string, issueId: string, skills?: string[]): void {\n const cv = getAgentCV(agentId);\n\n const entry: WorkEntry = {\n issueId,\n startedAt: new Date().toISOString(),\n outcome: 'in_progress',\n skills,\n };\n\n cv.recentWork.unshift(entry);\n cv.stats.totalIssues++;\n cv.lastActive = new Date().toISOString();\n\n // Track skills\n if (skills) {\n for (const skill of skills) {\n if (!cv.skillsUsed.includes(skill)) {\n cv.skillsUsed.push(skill);\n }\n }\n }\n\n // Keep only last 50 entries\n if (cv.recentWork.length > 50) {\n cv.recentWork = cv.recentWork.slice(0, 50);\n }\n\n saveAgentCV(cv);\n}\n\n/**\n * Complete work for an agent\n */\nexport function completeWork(\n agentId: string,\n issueId: string,\n outcome: 'success' | 'failed' | 'abandoned',\n details?: { commits?: number; linesChanged?: number; failureReason?: string }\n): void {\n const cv = getAgentCV(agentId);\n\n // Find the work entry\n const entry = cv.recentWork.find(\n (w) => w.issueId === issueId && w.outcome === 'in_progress'\n );\n\n if (entry) {\n entry.outcome = outcome;\n entry.completedAt = new Date().toISOString();\n entry.duration = Math.round(\n (new Date().getTime() - new Date(entry.startedAt).getTime()) / (1000 * 60)\n );\n if (details?.commits) entry.commits = details.commits;\n if (details?.linesChanged) entry.linesChanged = details.linesChanged;\n if (details?.failureReason) entry.failureReason = details.failureReason;\n }\n\n // Update stats\n if (outcome === 'success') {\n cv.stats.successCount++;\n } else if (outcome === 'failed') {\n cv.stats.failureCount++;\n } else if (outcome === 'abandoned') {\n cv.stats.abandonedCount++;\n }\n\n // Calculate success rate\n const completed = cv.stats.successCount + cv.stats.failureCount + cv.stats.abandonedCount;\n cv.stats.successRate = completed > 0 ? cv.stats.successCount / completed : 0;\n\n // Calculate average duration (only from completed work)\n const completedEntries = cv.recentWork.filter(\n (w) => w.duration !== undefined && w.outcome !== 'in_progress'\n );\n if (completedEntries.length > 0) {\n const totalDuration = completedEntries.reduce((sum, w) => sum + (w.duration || 0), 0);\n cv.stats.avgDuration = Math.round(totalDuration / completedEntries.length);\n }\n\n cv.lastActive = new Date().toISOString();\n saveAgentCV(cv);\n}\n\n/**\n * Get agent rankings by success rate\n */\nexport function getAgentRankings(): Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n}> {\n const rankings: Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n }> = [];\n\n if (!existsSync(AGENTS_DIR)) return rankings;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true }).filter(\n (d) => d.isDirectory()\n );\n\n for (const dir of dirs) {\n const cv = getAgentCV(dir.name);\n if (cv.stats.totalIssues > 0) {\n rankings.push({\n agentId: dir.name,\n successRate: cv.stats.successRate,\n totalIssues: cv.stats.totalIssues,\n avgDuration: cv.stats.avgDuration,\n });\n }\n }\n\n // Sort by success rate, then by total issues\n rankings.sort((a, b) => {\n if (b.successRate !== a.successRate) {\n return b.successRate - a.successRate;\n }\n return b.totalIssues - a.totalIssues;\n });\n\n return rankings;\n}\n\n/**\n * Format CV for display\n */\nexport function formatCV(cv: AgentCV): string {\n const lines: string[] = [\n `# Agent CV: ${cv.agentId}`,\n '',\n `Runtime: ${cv.runtime} (${cv.model})`,\n `Created: ${cv.createdAt}`,\n `Last Active: ${cv.lastActive}`,\n '',\n '## Statistics',\n '',\n `- Total Issues: ${cv.stats.totalIssues}`,\n `- Success Rate: ${(cv.stats.successRate * 100).toFixed(1)}%`,\n `- Successes: ${cv.stats.successCount}`,\n `- Failures: ${cv.stats.failureCount}`,\n `- Abandoned: ${cv.stats.abandonedCount}`,\n `- Avg Duration: ${cv.stats.avgDuration} minutes`,\n '',\n ];\n\n if (cv.skillsUsed.length > 0) {\n lines.push('## Skills Used');\n lines.push('');\n lines.push(cv.skillsUsed.join(', '));\n lines.push('');\n }\n\n if (cv.recentWork.length > 0) {\n lines.push('## Recent Work');\n lines.push('');\n\n for (const work of cv.recentWork.slice(0, 10)) {\n const statusIcon = {\n success: '✓',\n failed: '✗',\n abandoned: '⊘',\n in_progress: '●',\n }[work.outcome];\n\n const duration = work.duration ? ` (${work.duration}m)` : '';\n lines.push(`${statusIcon} ${work.issueId}${duration}`);\n\n if (work.failureReason) {\n lines.push(` Reason: ${work.failureReason}`);\n }\n }\n lines.push('');\n }\n\n return lines.join('\\n');\n}\n","import { existsSync, mkdirSync, writeFileSync, readFileSync, readdirSync, appendFileSync, unlinkSync, statSync } from 'fs';\nimport { join, resolve } from 'path';\nimport { homedir } from 'os';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { AGENTS_DIR } from './paths.js';\nimport { createSession, killSession, sendKeys, sendKeysAsync, sessionExists, getAgentSessions, capturePane } from './tmux.js';\nimport { initHook, checkHook, generateFixedPointPrompt } from './hooks.js';\nimport { startWork, completeWork, getAgentCV } from './cv.js';\nimport type { ComplexityLevel } from './cloister/complexity.js';\nimport { loadCloisterConfig } from './cloister/config.js';\nimport type { ModelId } from './settings.js';\nimport { getModelId, WorkTypeId } from './work-type-router.js';\nimport { getProviderForModel, getProviderEnv, setupCredentialFileAuth, clearCredentialFileAuth, requiresRouter } from './providers.js';\nimport { loadConfig as loadYamlConfig } from './config-yaml.js';\nimport { loadConfig } from './config.js';\nimport { createTrackerFromConfig, createTracker } from './tracker/factory.js';\nimport type { IssueState } from './tracker/interface.js';\nimport { findProjectByPath, getIssuePrefix } from './projects.js';\n\nconst execAsync = promisify(exec);\n\n/** Known agent ID prefixes — IDs with these prefixes are already normalized */\nconst AGENT_PREFIXES = ['agent-', 'planning-'];\n\n/** Normalize agent ID: preserve known prefixes, add 'agent-' for bare issue IDs */\nfunction normalizeAgentId(agentId: string): string {\n if (AGENT_PREFIXES.some(p => agentId.startsWith(p))) {\n return agentId;\n }\n return `agent-${agentId.toLowerCase()}`;\n}\n\n/**\n * Get provider-specific env vars (BASE_URL, AUTH_TOKEN) for a model.\n * Reads the current API key from settings so resumed/recovered agents\n * always use the latest key.\n */\nexport function getProviderEnvForModel(model: string): Record<string, string> {\n const provider = getProviderForModel(model);\n if (provider.name === 'anthropic') return {};\n\n // OpenRouter API key is stored in config.yaml under providers.openrouter.api_key\n if (provider.name === 'openrouter') {\n const { config } = loadYamlConfig();\n const apiKey = config.apiKeys.openrouter;\n if (apiKey) {\n return getProviderEnv(provider, apiKey);\n }\n throw new Error(`OpenRouter API key not configured. Add your key in Settings → OpenRouter before using model \"${model}\".`);\n }\n\n const { config } = loadYamlConfig();\n const apiKey = config.apiKeys[provider.name as keyof typeof config.apiKeys];\n if (apiKey) {\n return getProviderEnv(provider, apiKey);\n }\n throw new Error(`No API key configured for ${provider.displayName}. Configure it in Settings before using model \"${model}\".`);\n}\n\n/**\n * Get bash export lines for provider env vars (for use in launcher scripts).\n * Returns empty string for Anthropic models.\n */\nexport function getProviderExportsForModel(model: string): string {\n const envVars = getProviderEnvForModel(model);\n if (Object.keys(envVars).length === 0) return '';\n return Object.entries(envVars)\n .map(([k, v]) => `export ${k}=\"${v.replace(/\"/g, '\\\\\"')}\"`)\n .join('\\n') + '\\n';\n}\n\n/**\n * Get tmux -e flags for provider env vars (for use in tmux new-session).\n * Returns empty string for Anthropic models.\n */\nexport function getProviderTmuxFlags(model: string): string {\n const envVars = getProviderEnvForModel(model);\n let flags = '';\n for (const [key, value] of Object.entries(envVars)) {\n flags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n return flags;\n}\n\n// ============================================================================\n// Ready Signal Management (PAN-87)\n// ============================================================================\n\n/**\n * Get path to agent's ready signal file (written by SessionStart hook)\n */\nfunction getReadySignalPath(agentId: string): string {\n return join(getAgentDir(agentId), 'ready.json');\n}\n\n/**\n * Clear ready signal before spawning (clean slate)\n */\nfunction clearReadySignal(agentId: string): void {\n const readyPath = getReadySignalPath(agentId);\n if (existsSync(readyPath)) {\n try {\n unlinkSync(readyPath);\n } catch {\n // Ignore errors - non-critical\n }\n }\n}\n\n/**\n * Wait for SessionStart hook to signal ready (async - non-blocking)\n * Returns true if ready signal received, false if timeout\n */\nasync function waitForReadySignal(agentId: string, timeoutSeconds = 30): Promise<boolean> {\n const readyPath = getReadySignalPath(agentId);\n\n for (let i = 0; i < timeoutSeconds; i++) {\n await new Promise(resolve => setTimeout(resolve, 1000)); // Non-blocking sleep\n\n if (existsSync(readyPath)) {\n try {\n const content = readFileSync(readyPath, 'utf-8');\n const signal = JSON.parse(content);\n if (signal.ready === true) {\n return true;\n }\n } catch {\n // File exists but invalid - keep waiting\n }\n }\n }\n\n return false;\n}\n\nexport interface AgentState {\n id: string;\n issueId: string;\n workspace: string;\n runtime: string;\n model: string;\n status: 'starting' | 'running' | 'stopped' | 'error';\n startedAt: string;\n lastActivity?: string;\n branch?: string; // Git branch name for this agent\n\n // Model routing & handoffs (Phase 4)\n complexity?: ComplexityLevel;\n handoffCount?: number;\n costSoFar?: number;\n sessionId?: string; // For resuming sessions after handoff\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Current work type ID\n\n // SageOx session tracking (PAN-278)\n sageoxSessionPath?: string; // Path to SageOx session folder for parent linking\n}\n\nexport function getAgentDir(agentId: string): string {\n return join(AGENTS_DIR, agentId);\n}\n\nexport function getAgentState(agentId: string): AgentState | null {\n const stateFile = join(getAgentDir(agentId), 'state.json');\n if (!existsSync(stateFile)) return null;\n\n const content = readFileSync(stateFile, 'utf8');\n return JSON.parse(content);\n}\n\nexport function saveAgentState(state: AgentState): void {\n const dir = getAgentDir(state.id);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(\n join(dir, 'state.json'),\n JSON.stringify(state, null, 2)\n );\n}\n\n// ============================================================================\n// Hook-based State Management (PAN-80)\n// ============================================================================\n\n/**\n * Agent runtime state (hook-based tracking)\n */\nexport type AgentResolution = 'working' | 'done' | 'needs_input' | 'stuck' | 'completed' | 'unclear';\n\nexport interface AgentRuntimeState {\n state: 'active' | 'idle' | 'suspended' | 'stopped' | 'uninitialized';\n lastActivity: string;\n currentTool?: string;\n sessionId?: string;\n claudeSessionId?: string; // Claude Code session ID (written by heartbeat hook)\n suspendedAt?: string;\n resumedAt?: string;\n currentIssue?: string; // Issue ID the agent is currently working on\n resolution?: AgentResolution; // Lifecycle completion signal (PAN-309)\n resolutionCount?: number; // How many times this resolution was set\n resolutionUpdatedAt?: string; // When resolution was last updated\n}\n\n/**\n * Activity log entry\n */\nexport interface ActivityEntry {\n ts: string;\n tool: string;\n action?: string;\n state?: 'active' | 'idle';\n}\n\n/**\n * Get the path to an agent's runtime state file (separate from config state)\n */\nexport function getAgentRuntimeFile(agentId: string): string {\n return join(getAgentDir(agentId), 'runtime.json');\n}\n\n/**\n * Get agent runtime state (from hooks)\n *\n * Reads from runtime.json (new) with fallback to state.json (legacy migration).\n * This separation prevents bash hooks from corrupting AgentState config.\n */\nexport function getAgentRuntimeState(agentId: string): AgentRuntimeState | null {\n const runtimeFile = getAgentRuntimeFile(agentId);\n const stateFile = join(getAgentDir(agentId), 'state.json');\n\n // Try runtime.json first (new location)\n if (existsSync(runtimeFile)) {\n try {\n const content = readFileSync(runtimeFile, 'utf8');\n return JSON.parse(content) as AgentRuntimeState;\n } catch {\n // Fall through to legacy\n }\n }\n\n // Fallback to state.json (legacy — runtime fields were mixed in)\n if (existsSync(stateFile)) {\n try {\n const content = readFileSync(stateFile, 'utf8');\n const parsed = JSON.parse(content);\n // Only use if it has runtime-specific fields\n if (parsed.state && parsed.lastActivity) {\n return parsed as AgentRuntimeState;\n }\n } catch {\n // Ignore\n }\n }\n\n // No state at all — uninitialized\n if (!existsSync(stateFile) && !existsSync(runtimeFile)) {\n return {\n state: 'uninitialized',\n lastActivity: new Date().toISOString(),\n };\n }\n\n return null;\n}\n\n/**\n * Save agent runtime state to runtime.json (separate from AgentState config)\n *\n * This writes ONLY to runtime.json, never touching state.json.\n * This separation is critical: bash hooks write runtime.json on every tool call,\n * while AgentState in state.json is only written at lifecycle events (spawn/stop/handoff).\n */\nexport function saveAgentRuntimeState(agentId: string, state: Partial<AgentRuntimeState>): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const runtimeFile = getAgentRuntimeFile(agentId);\n\n // Merge with existing runtime state (read from runtime.json only, not state.json)\n let existing: AgentRuntimeState | null = null;\n if (existsSync(runtimeFile)) {\n try {\n existing = JSON.parse(readFileSync(runtimeFile, 'utf8'));\n } catch {\n // Ignore corrupt file\n }\n }\n\n const merged: AgentRuntimeState = {\n ...(existing || { state: 'uninitialized', lastActivity: new Date().toISOString() }),\n ...state,\n };\n\n writeFileSync(runtimeFile, JSON.stringify(merged, null, 2));\n}\n\n/**\n * Append to activity log with automatic pruning to 100 entries\n */\nexport function appendActivity(agentId: string, entry: ActivityEntry): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const activityFile = join(dir, 'activity.jsonl');\n\n // Append entry\n appendFileSync(activityFile, JSON.stringify(entry) + '\\n');\n\n // Prune to last 100 entries\n if (existsSync(activityFile)) {\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n if (lines.length > 100) {\n const trimmed = lines.slice(-100);\n writeFileSync(activityFile, trimmed.join('\\n') + '\\n');\n }\n } catch (error) {\n // Ignore pruning errors - activity log is non-critical\n }\n }\n}\n\n/**\n * Read activity log (last N entries)\n */\nexport function getActivity(agentId: string, limit = 100): ActivityEntry[] {\n const activityFile = join(getAgentDir(agentId), 'activity.jsonl');\n\n if (!existsSync(activityFile)) {\n return [];\n }\n\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n const entries = lines\n .filter(line => line.trim())\n .map(line => JSON.parse(line) as ActivityEntry)\n .slice(-limit);\n\n return entries;\n } catch {\n return [];\n }\n}\n\n/**\n * Save Claude session ID for later resume\n */\nexport function saveSessionId(agentId: string, sessionId: string): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(join(dir, 'session.id'), sessionId);\n}\n\n/**\n * Get saved Claude session ID\n */\nexport function getSessionId(agentId: string): string | null {\n const sessionFile = join(getAgentDir(agentId), 'session.id');\n\n if (!existsSync(sessionFile)) {\n return null;\n }\n\n try {\n return readFileSync(sessionFile, 'utf8').trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Get the latest Claude session ID from any available source.\n * Checks session.id first (written by suspend), then sessions.json (written by heartbeat hook),\n * then runtime.json claudeSessionId field.\n */\nexport function getLatestSessionId(agentId: string): string | null {\n // 1. session.id (written by auto-suspend)\n const fromSessionFile = getSessionId(agentId);\n if (fromSessionFile) return fromSessionFile;\n\n // 2. sessions.json (written by heartbeat hook — last entry is most recent)\n const sessionsFile = join(getAgentDir(agentId), 'sessions.json');\n try {\n if (existsSync(sessionsFile)) {\n const sessions = JSON.parse(readFileSync(sessionsFile, 'utf8'));\n if (Array.isArray(sessions) && sessions.length > 0) {\n return sessions[sessions.length - 1];\n }\n }\n } catch { /* non-fatal */ }\n\n // 3. runtime.json claudeSessionId\n const runtimeState = getAgentRuntimeState(agentId);\n if (runtimeState?.claudeSessionId) {\n return runtimeState.claudeSessionId;\n }\n\n return null;\n}\n\nexport interface SpawnOptions {\n issueId: string;\n workspace: string;\n runtime?: string;\n model?: string;\n prompt?: string;\n difficulty?: ComplexityLevel;\n agentType?: 'review-agent' | 'test-agent' | 'merge-agent' | 'work-agent';\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Explicit work type ID (overrides phase-based detection)\n}\n\n/**\n * Determine which model to use for an agent based on configuration\n *\n * New Priority (PAN-118):\n * 1. Explicitly provided model (options.model)\n * 2. Explicit work type ID (options.workType)\n * 3. Work type from phase (options.phase → issue-agent:{phase})\n * 4. Specialist work type (options.agentType → specialist-{type})\n * 5. Complexity-based routing (LEGACY - deprecated)\n * 6. Default fallback (claude-sonnet-4-6)\n */\nfunction determineModel(options: SpawnOptions): string {\n console.log(`[DEBUG] determineModel called with:`, { model: options.model, workType: options.workType, phase: options.phase, agentType: options.agentType, difficulty: options.difficulty });\n\n // Explicit model always wins\n if (options.model) {\n console.log(`[DEBUG] Using explicit model: ${options.model}`);\n return options.model;\n }\n\n try {\n // Use work type router if work type or phase specified\n if (options.workType) {\n return getModelId(options.workType);\n }\n\n // Map phase to work type ID\n if (options.phase) {\n const workType: WorkTypeId = `issue-agent:${options.phase}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // Map specialist agent type to work type ID\n if (options.agentType && options.agentType !== 'work-agent') {\n // Specialists: review-agent, test-agent, merge-agent\n const workType: WorkTypeId = `specialist-${options.agentType}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // LEGACY: Complexity-based routing removed — settings.json no longer exists.\n // All model routing goes through work-type-router via config.yaml.\n\n // Fall back to default model from Cloister config or claude-sonnet-4-6\n try {\n const cloisterConfig = loadCloisterConfig();\n const defaultModel = cloisterConfig.model_selection?.default_model || 'sonnet';\n const modelMap: Record<string, string> = {\n 'opus': 'claude-opus-4-6',\n 'sonnet': 'claude-sonnet-4-6',\n 'haiku': 'claude-haiku-4-5',\n };\n return modelMap[defaultModel] || 'claude-sonnet-4-6';\n } catch {\n return 'claude-sonnet-4-6';\n }\n } catch (error) {\n // If work type router fails, fall back to default\n console.warn('Warning: Could not resolve model using work type router, using default');\n return options.model || 'claude-sonnet-4-6';\n }\n}\n\n/**\n * Shared tracker resolution logic for issue state transitions.\n *\n * Resolution order (by project tracker type):\n * 1. github_repo → GitHub Issues (takes priority over issue_prefix, since projects\n * like panopticon-cli use GitHub Issues with a prefix, not Linear)\n * 2. rally_project → Rally\n * 3. issue_prefix (no github_repo) → Linear (covers gitlab+linear and pure-linear projects)\n * 4. gitlab_repo only → warn and skip (GitLab doesn't support label-based state transitions)\n *\n * Precedence rationale: issue_prefix was renamed from linear_team but is now also set on\n * GitHub-hosted projects (e.g. issue_prefix: PAN for panopticon-cli GitHub Issues).\n * github_repo must be checked first so GitHub projects don't misroute to Linear.\n */\nasync function transitionIssueState(issueId: string, state: IssueState, workspacePath?: string): Promise<void> {\n // Guard: bare numeric IDs (no alphabetic prefix, e.g. \"484\") must never reach\n // any tracker API. Linear's searchIssues(\"484\") would match MIN-484 in the wrong\n // team. Log a warning and skip — the workspace's project must use prefixed IDs.\n if (/^\\d+$/.test(issueId)) {\n console.warn(\n `[agents] Skipping ${state} transition for bare numeric ID \"${issueId}\" — ` +\n `issue IDs must include a project prefix (e.g. PAN-${issueId}). ` +\n `This workspace was likely created before the pan- prefix convention.`\n );\n return;\n }\n\n // Resolve the project from workspacePath — its configured tracker is authoritative.\n // Every issue MUST belong to a registered project with a tracker configured.\n const projectConfig = workspacePath ? findProjectByPath(workspacePath) : null;\n if (!projectConfig) {\n throw new Error(`Cannot transition ${issueId}: no project config found for workspace ${workspacePath || '(none)'}. Register the project in projects.yaml.`);\n }\n\n // Project has a GitHub repo — use GitHub Issues tracker.\n // Checked BEFORE issue_prefix because github_repo projects (e.g. panopticon-cli)\n // set issue_prefix for their GitHub Issue prefix (PAN-), not for Linear.\n if (projectConfig.github_repo) {\n const [owner, repo] = projectConfig.github_repo.split('/');\n const tracker = createTracker({ type: 'github', owner, repo });\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via GitHub (${projectConfig.github_repo})`);\n return;\n }\n\n // Project has a Rally project — use Rally tracker\n if (projectConfig.rally_project) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.rally) {\n throw new Error(`Project ${projectConfig.name} uses Rally (project: ${projectConfig.rally_project}) but no Rally tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'rally');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Rally (project: ${projectConfig.rally_project})`);\n return;\n }\n\n // Project has a Linear team prefix (and no github_repo) — use Linear tracker.\n // This covers: pure-Linear projects and gitlab+Linear projects (e.g. mind-your-now).\n if (getIssuePrefix(projectConfig)) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.linear) {\n throw new Error(`Project ${projectConfig.name} uses Linear (team: ${getIssuePrefix(projectConfig)}) but no Linear tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'linear');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Linear (team: ${getIssuePrefix(projectConfig)})`);\n return;\n }\n\n if (projectConfig.gitlab_repo) {\n console.warn(`[agents] GitLab project detected (${projectConfig.gitlab_repo}) but GitLab does not support ${state} label transitions`);\n return;\n }\n\n throw new Error(`Project ${projectConfig.name} has no tracker configured (need issue_prefix, github_repo, or rally_project in projects.yaml)`);\n}\n\nexport async function transitionIssueToInProgress(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_progress', workspacePath);\n}\n\n/**\n * Transitions an issue to \"in_review\" state in the configured issue tracker.\n * Fire-and-forget — logs warnings on failure but never blocks the pipeline.\n */\nexport async function transitionIssueToInReview(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_review', workspacePath);\n}\n\nexport async function spawnAgent(options: SpawnOptions): Promise<AgentState> {\n const agentId = `agent-${options.issueId.toLowerCase()}`;\n\n // Check if already running\n if (sessionExists(agentId)) {\n throw new Error(`Agent ${agentId} already running. Use 'pan work tell' to message it.`);\n }\n\n // Initialize hook for this agent (FPP support)\n initHook(agentId);\n\n // Determine model based on configuration\n const selectedModel = determineModel(options);\n console.log(`[DEBUG] Selected model: ${selectedModel}`);\n\n // Create state\n const state: AgentState = {\n id: agentId,\n issueId: options.issueId,\n workspace: options.workspace,\n runtime: options.runtime || 'claude',\n model: selectedModel,\n status: 'starting',\n startedAt: new Date().toISOString(),\n // Initialize Phase 4 fields (legacy)\n complexity: options.difficulty,\n handoffCount: 0,\n costSoFar: 0,\n // Work type system (PAN-118)\n phase: options.phase,\n workType: options.workType,\n };\n\n saveAgentState(state);\n\n // Build prompt with FPP work if available\n let prompt = options.prompt || '';\n\n // FPP: Check for pending work on hook\n const { hasWork, items } = checkHook(agentId);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(agentId);\n if (fixedPointPrompt) {\n prompt = fixedPointPrompt + '\\n\\n---\\n\\n' + prompt;\n }\n }\n\n // Write prompt to file for complex prompts (avoids shell escaping issues)\n const promptFile = join(getAgentDir(agentId), 'initial-prompt.md');\n if (prompt) {\n writeFileSync(promptFile, prompt);\n }\n\n // Auto-setup hooks if not configured\n checkAndSetupHooks();\n\n // Ensure TLDR daemon is running for the workspace (non-blocking, non-fatal)\n try {\n const venvPath = join(options.workspace, '.venv');\n if (existsSync(venvPath)) {\n const { getTldrDaemonService } = await import('./tldr-daemon.js');\n const tldrService = getTldrDaemonService(options.workspace, venvPath);\n const status = await tldrService.getStatus();\n if (!status.running) {\n await tldrService.start(true);\n console.log(`[${agentId}] Started TLDR daemon for workspace`);\n }\n }\n } catch {\n // Non-fatal — agents degrade to direct file reads if TLDR unavailable\n }\n\n // Write initial task cache for heartbeat hook\n writeTaskCache(agentId, options.issueId);\n\n // Clear ready signal before spawning (clean slate for PAN-87 fix)\n clearReadySignal(agentId);\n\n // Get provider-specific environment variables (BASE_URL, AUTH_TOKEN)\n const providerEnv = getProviderEnvForModel(selectedModel);\n\n // For credential-file providers (e.g. Kimi Code Plan), configure apiKeyHelper\n // so Claude Code can refresh short-lived tokens dynamically.\n // For all other providers, CLEAR any stale apiKeyHelper from previous runs\n // (e.g. switching from Kimi to Anthropic plan-based auth).\n const provider = getProviderForModel(selectedModel as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, options.workspace);\n } else {\n clearCredentialFileAuth(options.workspace);\n }\n\n // Create tmux session and start claude\n // For prompts with special shell characters, use a launcher script to safely pass the prompt\n // The script reads the file into a variable, which bash then safely expands\n let claudeCmd: string;\n if (prompt) {\n const launcherScript = join(getAgentDir(agentId), 'launcher.sh');\n const providerExports = getProviderExportsForModel(state.model);\n const launcherContent = `#!/bin/bash\n${providerExports}prompt=$(cat \"${promptFile}\")\nexec claude --dangerously-skip-permissions --model ${state.model} \"\\$prompt\"\n`;\n writeFileSync(launcherScript, launcherContent, { mode: 0o755 });\n claudeCmd = `bash \"${launcherScript}\"`;\n } else {\n claudeCmd = `claude --dangerously-skip-permissions --model ${state.model}`;\n }\n\n // Pre-trust workspace directory in Claude Code to avoid the trust prompt\n try {\n const { preTrustDirectory } = await import('./workspace-manager.js') as { preTrustDirectory: (dir: string) => void };\n preTrustDirectory(options.workspace);\n } catch { /* non-fatal */ }\n\n // Configure workspace for GitHub App bot identity (PAN-536)\n // Agents push as panopticon-agent[bot] with short-lived installation tokens\n try {\n const { isGitHubAppConfigured, generateInstallationToken, configureWorkspaceForBot } = await import('./github-app.js');\n if (isGitHubAppConfigured()) {\n const { findProjectByPath } = await import('./projects.js');\n const project = findProjectByPath(resolve(options.workspace, '..', '..'));\n const ghRepo = project?.github_repo;\n if (ghRepo) {\n const [owner, repo] = ghRepo.split('/');\n const { token } = await generateInstallationToken();\n await configureWorkspaceForBot(options.workspace, owner, repo, token);\n console.log(`[${agentId}] Configured workspace for bot push (panopticon-agent[bot])`);\n }\n }\n } catch (err: any) {\n console.warn(`[${agentId}] GitHub App config failed (falling back to SSH): ${err.message}`);\n }\n\n // Build SageOx environment variables for session linking (only if project is SageOx-initialized)\n // Derive project root from workspace path: <project-root>/workspaces/<branch>\n const projectRoot = resolve(options.workspace, '..', '..');\n const sageoxEnabled = existsSync(join(projectRoot, '.sageox'));\n const sageoxEnv: Record<string, string> = {};\n\n if (sageoxEnabled) {\n sageoxEnv.OX_PROJECT_ROOT = projectRoot;\n\n // Add issue tracking for multi-agent pipelines\n if (options.issueId) {\n sageoxEnv.PAN_ISSUE_ID = options.issueId;\n }\n if (options.phase) {\n sageoxEnv.PAN_PHASE = options.phase;\n }\n\n // For non-planner agents, find the planner's session path for parent linking\n if (options.phase && (options.phase as string) !== 'planning') {\n const plannerAgentId = `agent-${options.issueId.toLowerCase()}`;\n const plannerState = getAgentState(plannerAgentId);\n if (plannerState?.sageoxSessionPath) {\n sageoxEnv.PAN_PARENT_SESSION = plannerState.sageoxSessionPath;\n }\n }\n }\n\n createSession(agentId, options.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: agentId,\n PANOPTICON_ISSUE_ID: options.issueId,\n PANOPTICON_SESSION_TYPE: options.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false', // Disable suggested prompts for autonomous agents (PAN-251)\n ...providerEnv, // Add provider-specific env vars (BASE_URL, AUTH_TOKEN, etc.)\n ...sageoxEnv // Add SageOx environment variables\n }\n });\n\n // Update status\n state.status = 'running';\n saveAgentState(state);\n\n // Track work in CV\n startWork(agentId, options.issueId);\n\n // Transition issue tracker to \"in progress\" (best-effort, don't block agent spawn)\n // Only for work agents, not planning/specialist agents\n if (!options.agentType || options.agentType === 'work-agent') {\n transitionIssueToInProgress(options.issueId, options.workspace).catch((err) => {\n console.warn(`[agents] Could not transition ${options.issueId} to in_progress: ${err.message}`);\n });\n }\n\n // For planner agents, capture SageOx session path after it becomes available\n if (sageoxEnabled && (options.phase as string) === 'planning') {\n captureSageoxSessionPath(agentId, projectRoot).catch((err) => {\n console.warn(`[agents] Could not capture SageOx session path: ${err.message}`);\n });\n }\n\n return state;\n}\n\nexport function listRunningAgents(): (AgentState & { tmuxActive: boolean })[] {\n const tmuxSessions = getAgentSessions();\n const tmuxNames = new Set(tmuxSessions.map(s => s.name));\n\n const agents: (AgentState & { tmuxActive: boolean })[] = [];\n\n // Read all agent states\n if (!existsSync(AGENTS_DIR)) return agents;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true })\n .filter(d => d.isDirectory());\n\n for (const dir of dirs) {\n const state = getAgentState(dir.name);\n if (state) {\n agents.push({\n ...state,\n tmuxActive: tmuxNames.has(state.id),\n });\n }\n }\n\n return agents;\n}\n\n/**\n * Scan ~/.panopticon/agents/ for state files with bare numeric issueIds\n * (e.g. \"484\" instead of \"PAN-484\") and log warnings to stderr.\n *\n * These workspaces were created before the pan- prefix convention and may\n * cause cross-tracker pollution if their in_review transition is triggered.\n * Called once at server startup to surface legacy state files.\n */\nexport function warnOnBareNumericIssueIds(): void {\n if (!existsSync(AGENTS_DIR)) return;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true })\n .filter(d => d.isDirectory());\n\n const legacy: string[] = [];\n for (const dir of dirs) {\n const state = getAgentState(dir.name);\n if (state?.issueId && /^\\d+$/.test(state.issueId)) {\n legacy.push(`${dir.name} (issueId: \"${state.issueId}\")`);\n }\n }\n\n if (legacy.length > 0) {\n console.warn(\n `[agents] WARNING: ${legacy.length} agent state file(s) have bare numeric issueIds ` +\n `(created before the pan- prefix convention). These agents will not be able to ` +\n `transition tracker state. Consider removing or updating them:\\n` +\n legacy.map(l => ` ~/.panopticon/agents/${l}`).join('\\n')\n );\n }\n}\n\nexport function stopAgent(agentId: string): void {\n const normalizedId = normalizeAgentId(agentId);\n\n if (sessionExists(normalizedId)) {\n // Capture tmux output before killing so logs remain viewable after stop\n try {\n const output = capturePane(normalizedId, 5000);\n if (output) {\n const agentDir = getAgentDir(normalizedId);\n mkdirSync(agentDir, { recursive: true });\n writeFileSync(join(agentDir, 'output.log'), output);\n }\n } catch {\n // Non-fatal — best effort log capture\n }\n\n killSession(normalizedId);\n }\n\n const state = getAgentState(normalizedId);\n if (state) {\n // Ensure id is set — runtime state files may lack it (PAN-150)\n if (!state.id) state.id = normalizedId;\n\n state.status = 'stopped';\n saveAgentState(state);\n }\n\n // Also mark runtime.json as stopped so Cloister/Deacon won't auto-restart.\n // state.json and runtime.json are separate files — both must agree the agent\n // was intentionally stopped to prevent race conditions with health check polls.\n saveAgentRuntimeState(normalizedId, { state: 'stopped' });\n}\n\nexport async function messageAgent(agentId: string, message: string): Promise<void> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check if agent is suspended - auto-resume if so (PAN-80)\n const runtimeState = getAgentRuntimeState(normalizedId);\n if (runtimeState?.state === 'suspended') {\n console.log(`[agents] Auto-resuming suspended agent ${normalizedId} to deliver message`);\n const result = await resumeAgent(normalizedId, message);\n if (!result.success) {\n throw new Error(`Failed to auto-resume agent: ${result.error}`);\n }\n // Message already sent during resume\n return;\n }\n\n // Check if agent is stopped — auto-restart to deliver feedback (PAN-367)\n const agentState = getAgentState(normalizedId);\n if (agentState && agentState.status === 'stopped' && !sessionExists(normalizedId)) {\n console.log(`[agents] Auto-restarting stopped agent ${normalizedId} to deliver feedback`);\n\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n clearReadySignal(normalizedId);\n const claudeCmd = `claude --dangerously-skip-permissions --model ${agentState.model || 'claude-sonnet-4-6'} \"You are resuming work on ${agentState.issueId}. Check .planning/feedback/ for specialist feedback that arrived while you were stopped, then continue working.\"`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n\n // Wait for ready, then deliver the message\n const ready = await waitForReadySignal(normalizedId, 30);\n if (ready) {\n await sendKeysAsync(normalizedId, message);\n console.log(`[agents] Restarted ${normalizedId} and delivered feedback`);\n } else {\n console.warn(`[agents] Restarted ${normalizedId} but ready signal not detected — feedback in mail queue`);\n }\n\n // Save to mail queue regardless\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n // Check if this is a remote agent\n const { loadRemoteAgentState, sendToRemoteAgent } = await import('./remote/remote-agents.js');\n const remoteState = loadRemoteAgentState(normalizedId);\n if (remoteState && remoteState.vmName) {\n console.log(`[agents] Sending message to remote agent ${normalizedId} on ${remoteState.vmName}`);\n await sendToRemoteAgent(normalizedId, remoteState.vmName, message);\n\n // Also save to mail queue for persistence\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n if (!sessionExists(normalizedId)) {\n throw new Error(`Agent ${normalizedId} not running`);\n }\n\n await sendKeysAsync(normalizedId, message);\n\n // Also save to mail queue\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n}\n\n/**\n * Resume a suspended agent (PAN-80)\n *\n * Reads saved session ID and creates new tmux session with --resume flag.\n * Optionally sends a message after resuming.\n *\n * Auto-resume triggers:\n * - Specialists: When queued work arrives\n * - Work agents: When message is sent via /work-tell\n */\nexport async function resumeAgent(agentId: string, message?: string): Promise<{ success: boolean; error?: string }> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check runtime state — allow both suspended (auto-suspend) and stopped/idle (manual stop, crash)\n const runtimeState = getAgentRuntimeState(normalizedId);\n const agentState = getAgentState(normalizedId);\n const allowedRuntimeStates = ['suspended', 'idle'];\n const allowedAgentStatuses = ['stopped', 'completed'];\n\n // Also allow resuming a \"running\" agent with no live tmux session — this happens after\n // a system crash where tmux was killed but state.json was never updated to 'stopped'.\n const isCrashed = agentState?.status === 'running' && !sessionExists(normalizedId);\n\n const canResume = (runtimeState && allowedRuntimeStates.includes(runtimeState.state))\n || (agentState && allowedAgentStatuses.includes(agentState.status))\n || isCrashed;\n\n if (!canResume) {\n return {\n success: false,\n error: `Cannot resume agent in state: runtime=${runtimeState?.state || 'unknown'}, status=${agentState?.status || 'unknown'}`\n };\n }\n\n // Get saved session ID from any available source\n const sessionId = getLatestSessionId(normalizedId);\n if (!sessionId) {\n return {\n success: false,\n error: 'No saved session ID found'\n };\n }\n\n // Verify agent state exists (already fetched above for status check)\n if (!agentState) {\n return {\n success: false,\n error: 'Agent state not found'\n };\n }\n\n // Kill any zombie tmux session (crashed agent left behind)\n if (sessionExists(normalizedId)) {\n try {\n killSession(normalizedId);\n } catch { /* non-fatal */ }\n }\n\n // Remove completed marker so the agent can work again\n const completedFile = join(getAgentDir(normalizedId), 'completed');\n if (existsSync(completedFile)) {\n try { unlinkSync(completedFile); } catch { /* non-fatal */ }\n }\n\n try {\n // Clear ready signal before resuming (clean slate for PAN-87 fix)\n clearReadySignal(normalizedId);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n // Create new tmux session with resume command\n const claudeCmd = `claude --resume \"${sessionId}\" --dangerously-skip-permissions`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // If there's a message, wait for ready signal then send\n if (message) {\n // Wait for SessionStart hook to signal ready (PAN-87: reliable message delivery)\n const ready = await waitForReadySignal(normalizedId, 30);\n\n if (ready) {\n // Send message\n await sendKeysAsync(normalizedId, message);\n } else {\n console.error('Claude SessionStart hook did not fire during resume, message not sent');\n }\n }\n\n // Update runtime state\n saveAgentRuntimeState(normalizedId, {\n state: 'active',\n resumedAt: new Date().toISOString(),\n });\n\n // Update agent state\n if (agentState) {\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n }\n\n return { success: true };\n } catch (error: unknown) {\n const msg = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `Failed to resume agent: ${msg}`\n };\n }\n}\n\n/**\n * Detect crashed agents (state shows running but tmux session is gone)\n */\nexport function detectCrashedAgents(): AgentState[] {\n const agents = listRunningAgents();\n return agents.filter(\n (agent) => agent.status === 'running' && !agent.tmuxActive\n );\n}\n\n/**\n * Recover a crashed agent by restarting it with context\n */\nexport function recoverAgent(agentId: string): AgentState | null {\n const normalizedId = normalizeAgentId(agentId);\n const state = getAgentState(normalizedId);\n\n if (!state) {\n return null;\n }\n\n // Runtime state files may lack required fields (PAN-150)\n if (!state.id) state.id = normalizedId;\n if (!state.workspace || !state.model) {\n console.error(`[agents] Cannot recover ${normalizedId}: state.json missing workspace or model`);\n return null;\n }\n\n // Check if already running\n if (sessionExists(normalizedId)) {\n return state;\n }\n\n // Update crash count in health file\n const healthFile = join(getAgentDir(normalizedId), 'health.json');\n let health = { consecutiveFailures: 0, killCount: 0, recoveryCount: 0 };\n if (existsSync(healthFile)) {\n try {\n health = { ...health, ...JSON.parse(readFileSync(healthFile, 'utf-8')) };\n } catch {}\n }\n health.recoveryCount = (health.recoveryCount || 0) + 1;\n writeFileSync(healthFile, JSON.stringify(health, null, 2));\n\n // Build recovery prompt\n const recoveryPrompt = generateRecoveryPrompt(state);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = state.model ? getProviderEnvForModel(state.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (state.model) {\n const provider = getProviderForModel(state.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, state.workspace);\n } else {\n clearCredentialFileAuth(state.workspace);\n }\n }\n\n // Restart the agent with recovery context (YOLO mode - skip permissions)\n const claudeCmd = `claude --dangerously-skip-permissions --model ${state.model} \"${recoveryPrompt.replace(/\"/g, '\\\\\"').replace(/\\n/g, '\\\\n')}\"`;\n createSession(normalizedId, state.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: state.issueId || '',\n PANOPTICON_SESSION_TYPE: state.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // Update state\n state.status = 'running';\n state.lastActivity = new Date().toISOString();\n saveAgentState(state);\n\n return state;\n}\n\n/**\n * Generate a recovery prompt for a crashed agent\n */\nfunction generateRecoveryPrompt(state: AgentState): string {\n const lines: string[] = [\n '# Agent Recovery',\n '',\n '⚠️ This agent session was recovered after a crash.',\n '',\n '## Previous Context',\n `- Issue: ${state.issueId}`,\n `- Workspace: ${state.workspace}`,\n `- Started: ${state.startedAt}`,\n '',\n '## Recovery Steps',\n '1. Check beads for context: `bd show ' + state.issueId + '`',\n '2. Review recent git commits: `git log --oneline -10`',\n '3. Check hook for pending work: `pan work hook check`',\n '4. Resume from last known state',\n '',\n '## FPP Reminder',\n '> \"Any runnable action is a fixed point and must resolve before the system can rest.\"',\n '',\n ];\n\n // Add FPP work if available\n const { hasWork } = checkHook(state.id);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(state.id);\n if (fixedPointPrompt) {\n lines.push('---');\n lines.push('');\n lines.push(fixedPointPrompt);\n }\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Auto-recover all crashed agents\n */\nexport function autoRecoverAgents(): { recovered: string[]; failed: string[] } {\n const crashed = detectCrashedAgents();\n const recovered: string[] = [];\n const failed: string[] = [];\n\n for (const agent of crashed) {\n try {\n const result = recoverAgent(agent.id);\n if (result) {\n recovered.push(agent.id);\n } else {\n failed.push(agent.id);\n }\n } catch (error) {\n failed.push(agent.id);\n }\n }\n\n return { recovered, failed };\n}\n\n/**\n * Check if Panopticon hooks are configured, and auto-setup if not\n */\nfunction checkAndSetupHooks(): void {\n const settingsPath = join(homedir(), '.claude', 'settings.json');\n const hookPath = join(homedir(), '.panopticon', 'bin', 'heartbeat-hook');\n\n // Check if settings.json exists and has heartbeat hook configured\n if (existsSync(settingsPath)) {\n try {\n const settingsContent = readFileSync(settingsPath, 'utf-8');\n const settings = JSON.parse(settingsContent);\n const postToolUse = settings?.hooks?.PostToolUse || [];\n\n const hookConfigured = postToolUse.some((hookConfig: any) =>\n hookConfig.hooks?.some((hook: any) =>\n hook.command === hookPath ||\n hook.command?.includes('panopticon') ||\n hook.command?.includes('heartbeat-hook')\n )\n );\n\n if (hookConfigured) {\n return; // Already configured\n }\n } catch {\n // Ignore errors, will attempt setup\n }\n }\n\n // Hooks not configured - run setup silently\n try {\n console.log('Configuring Panopticon heartbeat hooks...');\n // Note: This runs during spawn which is now async, so we can use execAsync\n // But this is called from a sync context in checkAndSetupHooks, so we use fire-and-forget\n exec('pan setup hooks', (error: Error | null) => {\n if (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n } else {\n console.log('✓ Heartbeat hooks configured');\n }\n });\n } catch (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n }\n}\n\n/**\n * Write task cache for heartbeat hook to use\n */\nfunction writeTaskCache(agentId: string, issueId: string): void {\n const cacheDir = join(getAgentDir(agentId));\n mkdirSync(cacheDir, { recursive: true });\n\n const cacheFile = join(cacheDir, 'current-task.json');\n writeFileSync(\n cacheFile,\n JSON.stringify({\n id: issueId,\n title: `Working on ${issueId}`,\n updated_at: new Date().toISOString()\n }, null, 2)\n );\n}\n\n/**\n * Capture SageOx session path for a planner agent.\n * This is used for parent-child session linking in multi-agent pipelines.\n * Subsequent agents (worker, reviewer, tester, merger) will use this path\n * as their PAN_PARENT_SESSION to link their sessions to the planner's session.\n */\nasync function captureSageoxSessionPath(agentId: string, projectRoot: string): Promise<void> {\n // Wait for SageOx session to be created by the hook (up to 10 seconds)\n const sessionsDir = join(projectRoot, '.sageox', 'sessions');\n let attempts = 0;\n const maxAttempts = 20;\n const delayMs = 500;\n\n while (attempts < maxAttempts) {\n // Check if sessions directory exists\n if (existsSync(sessionsDir)) {\n // Find the most recent session directory for this agent\n const sessions = readdirSync(sessionsDir, { withFileTypes: true })\n .filter(d => d.isDirectory())\n .map(d => ({\n name: d.name,\n path: join(sessionsDir, d.name),\n mtime: existsSync(join(sessionsDir, d.name, '.recording.json'))\n ? readFileSync(join(sessionsDir, d.name, '.recording.json'), 'utf-8')\n : null\n }))\n .filter(s => {\n // Check if this session belongs to our agent\n if (!s.mtime) return false;\n try {\n const state = JSON.parse(s.mtime);\n return state.agent_id === agentId || state.AgentID === agentId;\n } catch {\n return false;\n }\n })\n .sort((a, b) => {\n // Sort by modification time (newest first)\n const aTime = existsSync(join(a.path, '.recording.json'))\n ? (statSync(join(a.path, '.recording.json')).mtimeMs || 0)\n : 0;\n const bTime = existsSync(join(b.path, '.recording.json'))\n ? (statSync(join(b.path, '.recording.json')).mtimeMs || 0)\n : 0;\n return bTime - aTime;\n });\n\n if (sessions.length > 0) {\n // Update agent state with SageOx session path\n const state = getAgentState(agentId);\n if (state) {\n state.sageoxSessionPath = sessions[0].path;\n saveAgentState(state);\n console.log(`[agents] Captured SageOx session path for ${agentId}: ${sessions[0].path}`);\n return;\n }\n }\n }\n\n // Wait before retrying\n await new Promise(resolve => setTimeout(resolve, delayMs));\n attempts++;\n }\n\n throw new Error(`Could not find SageOx session for ${agentId} after ${maxAttempts * delayMs}ms`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AA2RA,SAAS,UAA4B,UAAa,WAA0B;CAC1E,MAAM,SAAS,EAAE,GAAG,UAAU;AAE9B,MAAK,MAAM,OAAO,OAAO,KAAK,UAAU,EAAiB;EACvD,MAAM,aAAa,SAAS;EAC5B,MAAM,cAAc,UAAU;AAG9B,MAAI,gBAAgB,KAAA,EAAW;AAG/B,MACE,OAAO,eAAe,YACtB,eAAe,QACf,CAAC,MAAM,QAAQ,WAAW,IAC1B,OAAO,gBAAgB,YACvB,gBAAgB,QAChB,CAAC,MAAM,QAAQ,YAAY,CAE3B,QAAO,OAAO,UAAU,YAAmB,YAAmB;MAG9D,QAAO,OAAO;;AAIlB,QAAO;;;;;;;;AAST,SAAgB,qBAAqC;AAEnD,KAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAIjD,KAAI,CAAC,WAAW,qBAAqB,EAAE;AACrC,qBAAmB,wBAAwB;AAC3C,SAAO;;AAGT,KAAI;AAKF,SAAO,UAAU,0BAAA,GAAA,YAAA,OAJD,aAAa,sBAAsB,QAAQ,CAC9B,CAGoB;UAC1C,OAAO;AACd,UAAQ,MAAM,mCAAmC,MAAM;AACvD,UAAQ,MAAM,8BAA8B;AAC5C,SAAO;;;;;;;;AASX,SAAgB,mBAAmB,QAA8B;AAE/D,KAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAGjD,KAAI;AAEF,gBAAc,uBAAA,GAAA,YAAA,WADY,OAAc,EACK,QAAQ;UAC9C,OAAO;AACd,UAAQ,MAAM,mCAAmC,MAAM;AACvD,QAAM;;;;;;AA0BV,SAAgB,kBAA2B;AAEzC,QADe,oBAAoB,CACrB,QAAQ;;;;;AAMxB,SAAgB,wBAId;CACA,MAAM,SAAS,oBAAoB;AACnC,QAAO;EACL,OAAO,OAAO,WAAW,QAAQ,KAAK;EACtC,SAAS,OAAO,WAAW,UAAU,KAAK;EAC1C,OAAO,OAAO,WAAW,QAAQ,KAAK;EACvC;;;;;aA1Y2C;AAExC,wBAAuB,KAAK,iBAAiB,gBAAgB;AA6KtD,2BAA0C;EACrD,SAAS,EACP,YAAY,MACb;EACD,YAAY;GACV,OAAO;GACP,SAAS;GACT,OAAO;GACR;EACD,cAAc;GACZ,iBAAiB;GACjB,eAAe;GACf,eAAe;GACf,iBAAiB;GACjB,kBAAkB,OAAU;GAC7B;EACD,YAAY;GACV,gBAAgB;GAChB,mBAAmB;IAAC;IAAe;IAAiB;IAAe;GACpE;EACD,eAAe;GACb,eAAe,KAAA;GACf,OAAO,KAAA;GACR;EACD,aAAa;GACX,aAAa;IACX,SAAS;IACT,WAAW;IACZ;GACD,cAAc;IACZ,SAAS;IACT,WAAW;IACZ;GACD,YAAY;IACV,SAAS;IACT,WAAW;IACZ;GACF;EACD,iBAAiB;GACf,eAAe;GACf,oBAAoB;IAClB,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,QAAQ;IACT;GACD,mBAAmB;IACjB,aAAa;IACb,cAAc;IACd,YAAY;IACb;GACF;EACD,UAAU,EACR,eAAe;GACb,kBAAkB;IAChB,SAAS;IACT,yBAAyB;IACzB,wBAAwB;IACzB;GACD,cAAc;IACZ,SAAS;IACT,YAAY;IACZ,UAAU;IACV,YAAY;IACb;GACD,yBAAyB;IACvB,SAAS;IACT,eAAe;IAChB;GACF,EACF;EACD,eAAe;GACb,iBAAiB;GACjB,cAAc;GACf;EACD,cAAc;GACZ,SAAS;GACT,aAAa;GACb,iBAAiB;IAAC;IAAI;IAAI;IAAI;GAC/B;EACD,aAAa;GACX,eAAe;GACf,eAAe;GACf,iBAAiB;GACjB,iBAAiB;GAClB;EACD,WAAW;GACT,kBAAkB;GAClB,wBAAwB;GACzB;EACF;;;;;;;;;AC3OD,SAAS,UAAU,SAAyB;AAC1C,QAAO,KAAK,YAAY,SAAS,UAAU;;;;;AAM7C,SAAgB,WAAW,SAA0B;CACnD,MAAM,SAAS,UAAU,QAAQ;AAEjC,KAAI,WAAW,OAAO,CACpB,KAAI;AACF,SAAO,KAAK,MAAM,aAAa,QAAQ,QAAQ,CAAC;SAC1C;CAIV,MAAM,KAAc;EAClB;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,6BAAY,IAAI,MAAM,EAAC,aAAa;EACpC,SAAS;EACT,OAAO;EACP,OAAO;GACL,aAAa;GACb,cAAc;GACd,cAAc;GACd,gBAAgB;GAChB,aAAa;GACb,aAAa;GACd;EACD,YAAY,EAAE;EACd,YAAY,EAAE;EACf;AAED,aAAY,GAAG;AACf,QAAO;;;;;AAMT,SAAgB,YAAY,IAAmB;AAE7C,WADY,KAAK,YAAY,GAAG,QAAQ,EACzB,EAAE,WAAW,MAAM,CAAC;AACnC,eAAc,UAAU,GAAG,QAAQ,EAAE,KAAK,UAAU,IAAI,MAAM,EAAE,CAAC;;;;;AAMnE,SAAgB,UAAU,SAAiB,SAAiB,QAAyB;CACnF,MAAM,KAAK,WAAW,QAAQ;CAE9B,MAAM,QAAmB;EACvB;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,SAAS;EACT;EACD;AAED,IAAG,WAAW,QAAQ,MAAM;AAC5B,IAAG,MAAM;AACT,IAAG,8BAAa,IAAI,MAAM,EAAC,aAAa;AAGxC,KAAI;OACG,MAAM,SAAS,OAClB,KAAI,CAAC,GAAG,WAAW,SAAS,MAAM,CAChC,IAAG,WAAW,KAAK,MAAM;;AAM/B,KAAI,GAAG,WAAW,SAAS,GACzB,IAAG,aAAa,GAAG,WAAW,MAAM,GAAG,GAAG;AAG5C,aAAY,GAAG;;;aA/GuB;;;;;ACkBxC,SAAS,iBAAiB,SAAyB;AACjD,KAAI,eAAe,MAAK,MAAK,QAAQ,WAAW,EAAE,CAAC,CACjD,QAAO;AAET,QAAO,SAAS,QAAQ,aAAa;;;;;;;AAQvC,SAAgB,uBAAuB,OAAuC;CAC5E,MAAM,WAAW,oBAAoB,MAAM;AAC3C,KAAI,SAAS,SAAS,YAAa,QAAO,EAAE;AAG5C,KAAI,SAAS,SAAS,cAAc;EAClC,MAAM,EAAE,WAAWA,YAAgB;EACnC,MAAM,SAAS,OAAO,QAAQ;AAC9B,MAAI,OACF,QAAO,eAAe,UAAU,OAAO;AAEzC,QAAM,IAAI,MAAM,gGAAgG,MAAM,IAAI;;CAG5H,MAAM,EAAE,WAAWA,YAAgB;CACnC,MAAM,SAAS,OAAO,QAAQ,SAAS;AACvC,KAAI,OACF,QAAO,eAAe,UAAU,OAAO;AAEzC,OAAM,IAAI,MAAM,6BAA6B,SAAS,YAAY,iDAAiD,MAAM,IAAI;;;;;;AAO/H,SAAgB,2BAA2B,OAAuB;CAChE,MAAM,UAAU,uBAAuB,MAAM;AAC7C,KAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAAG,QAAO;AAC9C,QAAO,OAAO,QAAQ,QAAQ,CAC3B,KAAK,CAAC,GAAG,OAAO,UAAU,EAAE,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,GAAG,CAC1D,KAAK,KAAK,GAAG;;;;;;AAOlB,SAAgB,qBAAqB,OAAuB;CAC1D,MAAM,UAAU,uBAAuB,MAAM;CAC7C,IAAI,QAAQ;AACZ,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,CAChD,UAAS,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAErD,QAAO;;;;;AAUT,SAAS,mBAAmB,SAAyB;AACnD,QAAO,KAAK,YAAY,QAAQ,EAAE,aAAa;;;;;AAMjD,SAAS,iBAAiB,SAAuB;CAC/C,MAAM,YAAY,mBAAmB,QAAQ;AAC7C,KAAI,WAAW,UAAU,CACvB,KAAI;AACF,aAAW,UAAU;SACf;;;;;;AAUZ,eAAe,mBAAmB,SAAiB,iBAAiB,IAAsB;CACxF,MAAM,YAAY,mBAAmB,QAAQ;AAE7C,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,QAAM,IAAI,SAAQ,YAAW,WAAW,SAAS,IAAK,CAAC;AAEvD,MAAI,WAAW,UAAU,CACvB,KAAI;GACF,MAAM,UAAU,aAAa,WAAW,QAAQ;AAEhD,OADe,KAAK,MAAM,QAAQ,CACvB,UAAU,KACnB,QAAO;UAEH;;AAMZ,QAAO;;AA4BT,SAAgB,YAAY,SAAyB;AACnD,QAAO,KAAK,YAAY,QAAQ;;AAGlC,SAAgB,cAAc,SAAoC;CAChE,MAAM,YAAY,KAAK,YAAY,QAAQ,EAAE,aAAa;AAC1D,KAAI,CAAC,WAAW,UAAU,CAAE,QAAO;CAEnC,MAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,QAAO,KAAK,MAAM,QAAQ;;AAG5B,SAAgB,eAAe,OAAyB;CACtD,MAAM,MAAM,YAAY,MAAM,GAAG;AACjC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAEnC,eACE,KAAK,KAAK,aAAa,EACvB,KAAK,UAAU,OAAO,MAAM,EAAE,CAC/B;;;;;AAuCH,SAAgB,oBAAoB,SAAyB;AAC3D,QAAO,KAAK,YAAY,QAAQ,EAAE,eAAe;;;;;;;;AASnD,SAAgB,qBAAqB,SAA2C;CAC9E,MAAM,cAAc,oBAAoB,QAAQ;CAChD,MAAM,YAAY,KAAK,YAAY,QAAQ,EAAE,aAAa;AAG1D,KAAI,WAAW,YAAY,CACzB,KAAI;EACF,MAAM,UAAU,aAAa,aAAa,OAAO;AACjD,SAAO,KAAK,MAAM,QAAQ;SACpB;AAMV,KAAI,WAAW,UAAU,CACvB,KAAI;EACF,MAAM,UAAU,aAAa,WAAW,OAAO;EAC/C,MAAM,SAAS,KAAK,MAAM,QAAQ;AAElC,MAAI,OAAO,SAAS,OAAO,aACzB,QAAO;SAEH;AAMV,KAAI,CAAC,WAAW,UAAU,IAAI,CAAC,WAAW,YAAY,CACpD,QAAO;EACL,OAAO;EACP,+BAAc,IAAI,MAAM,EAAC,aAAa;EACvC;AAGH,QAAO;;;;;;;;;AAUT,SAAgB,sBAAsB,SAAiB,OAAyC;AAE9F,WADY,YAAY,QAAQ,EACjB,EAAE,WAAW,MAAM,CAAC;CAEnC,MAAM,cAAc,oBAAoB,QAAQ;CAGhD,IAAI,WAAqC;AACzC,KAAI,WAAW,YAAY,CACzB,KAAI;AACF,aAAW,KAAK,MAAM,aAAa,aAAa,OAAO,CAAC;SAClD;CAKV,MAAM,SAA4B;EAChC,GAAI,YAAY;GAAE,OAAO;GAAiB,+BAAc,IAAI,MAAM,EAAC,aAAa;GAAE;EAClF,GAAG;EACJ;AAED,eAAc,aAAa,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC;;;;;AAM7D,SAAgB,eAAe,SAAiB,OAA4B;CAC1E,MAAM,MAAM,YAAY,QAAQ;AAChC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;CAEnC,MAAM,eAAe,KAAK,KAAK,iBAAiB;AAGhD,gBAAe,cAAc,KAAK,UAAU,MAAM,GAAG,KAAK;AAG1D,KAAI,WAAW,aAAa,CAC1B,KAAI;EACF,MAAM,QAAQ,aAAa,cAAc,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK;AACnE,MAAI,MAAM,SAAS,IAEjB,eAAc,cADE,MAAM,MAAM,KAAK,CACG,KAAK,KAAK,GAAG,KAAK;UAEjD,OAAO;;;;;AASpB,SAAgB,YAAY,SAAiB,QAAQ,KAAsB;CACzE,MAAM,eAAe,KAAK,YAAY,QAAQ,EAAE,iBAAiB;AAEjE,KAAI,CAAC,WAAW,aAAa,CAC3B,QAAO,EAAE;AAGX,KAAI;AAOF,SANc,aAAa,cAAc,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAEhE,QAAO,SAAQ,KAAK,MAAM,CAAC,CAC3B,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAkB,CAC9C,MAAM,CAAC,MAAM;SAGV;AACN,SAAO,EAAE;;;;;;AAOb,SAAgB,cAAc,SAAiB,WAAyB;CACtE,MAAM,MAAM,YAAY,QAAQ;AAChC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAEnC,eAAc,KAAK,KAAK,aAAa,EAAE,UAAU;;;;;AAMnD,SAAgB,aAAa,SAAgC;CAC3D,MAAM,cAAc,KAAK,YAAY,QAAQ,EAAE,aAAa;AAE5D,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;AAGT,KAAI;AACF,SAAO,aAAa,aAAa,OAAO,CAAC,MAAM;SACzC;AACN,SAAO;;;;;;;;AASX,SAAgB,mBAAmB,SAAgC;CAEjE,MAAM,kBAAkB,aAAa,QAAQ;AAC7C,KAAI,gBAAiB,QAAO;CAG5B,MAAM,eAAe,KAAK,YAAY,QAAQ,EAAE,gBAAgB;AAChE,KAAI;AACF,MAAI,WAAW,aAAa,EAAE;GAC5B,MAAM,WAAW,KAAK,MAAM,aAAa,cAAc,OAAO,CAAC;AAC/D,OAAI,MAAM,QAAQ,SAAS,IAAI,SAAS,SAAS,EAC/C,QAAO,SAAS,SAAS,SAAS;;SAGhC;CAGR,MAAM,eAAe,qBAAqB,QAAQ;AAClD,KAAI,cAAc,gBAChB,QAAO,aAAa;AAGtB,QAAO;;;;;;;;;;;;;AA4BT,SAAS,eAAe,SAA+B;AACrD,SAAQ,IAAI,uCAAuC;EAAE,OAAO,QAAQ;EAAO,UAAU,QAAQ;EAAU,OAAO,QAAQ;EAAO,WAAW,QAAQ;EAAW,YAAY,QAAQ;EAAY,CAAC;AAG5L,KAAI,QAAQ,OAAO;AACjB,UAAQ,IAAI,iCAAiC,QAAQ,QAAQ;AAC7D,SAAO,QAAQ;;AAGjB,KAAI;AAEF,MAAI,QAAQ,SACV,QAAO,WAAW,QAAQ,SAAS;AAIrC,MAAI,QAAQ,MAEV,QAAO,WADsB,eAAe,QAAQ,QACzB;AAI7B,MAAI,QAAQ,aAAa,QAAQ,cAAc,aAG7C,QAAO,WADsB,cAAc,QAAQ,YACxB;AAO7B,MAAI;GAEF,MAAM,eADiB,oBAAoB,CACP,iBAAiB,iBAAiB;AAMtE,UALyC;IACvC,QAAQ;IACR,UAAU;IACV,SAAS;IACV,CACe,iBAAiB;UAC3B;AACN,UAAO;;UAEF,OAAO;AAEd,UAAQ,KAAK,yEAAyE;AACtF,SAAO,QAAQ,SAAS;;;;;;;;;;;;;;;;;AAkB5B,eAAe,qBAAqB,SAAiB,OAAmB,eAAuC;AAI7G,KAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,UAAQ,KACN,qBAAqB,MAAM,mCAAmC,QAAQ,wDACjB,QAAQ,yEAE9D;AACD;;CAKF,MAAM,gBAAgB,gBAAgB,kBAAkB,cAAc,GAAG;AACzE,KAAI,CAAC,cACH,OAAM,IAAI,MAAM,qBAAqB,QAAQ,0CAA0C,iBAAiB,SAAS,0CAA0C;AAM7J,KAAI,cAAc,aAAa;EAC7B,MAAM,CAAC,OAAO,QAAQ,cAAc,YAAY,MAAM,IAAI;AAE1D,QADgB,cAAc;GAAE,MAAM;GAAU;GAAO;GAAM,CAAC,CAChD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,eAAe,cAAc,YAAY,GAAG;AACrG;;AAIF,KAAI,cAAc,eAAe;EAE/B,MAAM,iBADSC,cAAY,CACG;AAC9B,MAAI,CAAC,gBAAgB,MACnB,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,wBAAwB,cAAc,cAAc,qDAAqD;AAGzJ,QADgB,wBAAwB,gBAAgB,QAAQ,CAClD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,uBAAuB,cAAc,cAAc,GAAG;AAC/G;;AAKF,KAAI,eAAe,cAAc,EAAE;EAEjC,MAAM,iBADSA,cAAY,CACG;AAC9B,MAAI,CAAC,gBAAgB,OACnB,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,sBAAsB,eAAe,cAAc,CAAC,sDAAsD;AAG1J,QADgB,wBAAwB,gBAAgB,SAAS,CACnD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,qBAAqB,eAAe,cAAc,CAAC,GAAG;AAC/G;;AAGF,KAAI,cAAc,aAAa;AAC7B,UAAQ,KAAK,qCAAqC,cAAc,YAAY,gCAAgC,MAAM,oBAAoB;AACtI;;AAGF,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,gGAAgG;;AAGhJ,eAAsB,4BAA4B,SAAiB,eAAuC;AACxG,QAAO,qBAAqB,SAAS,eAAe,cAAc;;;;;;AAOpE,eAAsB,0BAA0B,SAAiB,eAAuC;AACtG,QAAO,qBAAqB,SAAS,aAAa,cAAc;;AAGlE,eAAsB,WAAW,SAA4C;CAC3E,MAAM,UAAU,SAAS,QAAQ,QAAQ,aAAa;AAGtD,KAAI,cAAc,QAAQ,CACxB,OAAM,IAAI,MAAM,SAAS,QAAQ,sDAAsD;AAIzF,UAAS,QAAQ;CAGjB,MAAM,gBAAgB,eAAe,QAAQ;AAC7C,SAAQ,IAAI,2BAA2B,gBAAgB;CAGvD,MAAM,QAAoB;EACxB,IAAI;EACJ,SAAS,QAAQ;EACjB,WAAW,QAAQ;EACnB,SAAS,QAAQ,WAAW;EAC5B,OAAO;EACP,QAAQ;EACR,4BAAW,IAAI,MAAM,EAAC,aAAa;EAEnC,YAAY,QAAQ;EACpB,cAAc;EACd,WAAW;EAEX,OAAO,QAAQ;EACf,UAAU,QAAQ;EACnB;AAED,gBAAe,MAAM;CAGrB,IAAI,SAAS,QAAQ,UAAU;CAG/B,MAAM,EAAE,SAAS,UAAU,UAAU,QAAQ;AAC7C,KAAI,SAAS;EACX,MAAM,mBAAmB,yBAAyB,QAAQ;AAC1D,MAAI,iBACF,UAAS,mBAAmB,gBAAgB;;CAKhD,MAAM,aAAa,KAAK,YAAY,QAAQ,EAAE,oBAAoB;AAClE,KAAI,OACF,eAAc,YAAY,OAAO;AAInC,qBAAoB;AAGpB,KAAI;EACF,MAAM,WAAW,KAAK,QAAQ,WAAW,QAAQ;AACjD,MAAI,WAAW,SAAS,EAAE;GACxB,MAAM,EAAE,yBAAyB,MAAM,OAAO;GAC9C,MAAM,cAAc,qBAAqB,QAAQ,WAAW,SAAS;AAErE,OAAI,EADW,MAAM,YAAY,WAAW,EAChC,SAAS;AACnB,UAAM,YAAY,MAAM,KAAK;AAC7B,YAAQ,IAAI,IAAI,QAAQ,qCAAqC;;;SAG3D;AAKR,gBAAe,SAAS,QAAQ,QAAQ;AAGxC,kBAAiB,QAAQ;CAGzB,MAAM,cAAc,uBAAuB,cAAc;CAMzD,MAAM,WAAW,oBAAoB,cAAyB;AAC9D,KAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,QAAQ,UAAU;KAEpD,yBAAwB,QAAQ,UAAU;CAM5C,IAAI;AACJ,KAAI,QAAQ;EACV,MAAM,iBAAiB,KAAK,YAAY,QAAQ,EAAE,cAAc;AAMhE,gBAAc,gBAJU;EADA,2BAA2B,MAAM,MAAM,CAEjD,gBAAgB,WAAW;qDACQ,MAAM,MAAM;GAEd,EAAE,MAAM,KAAO,CAAC;AAC/D,cAAY,SAAS,eAAe;OAEpC,aAAY,iDAAiD,MAAM;AAIrE,KAAI;EACF,MAAM,EAAE,sBAAsB,MAAM,OAAO;AAC3C,oBAAkB,QAAQ,UAAU;SAC9B;AAIR,KAAI;EACF,MAAM,EAAE,uBAAuB,2BAA2B,6BAA6B,MAAM,OAAO;AACpG,MAAI,uBAAuB,EAAE;GAC3B,MAAM,EAAE,sBAAsB,MAAM,OAAO;GAE3C,MAAM,SADU,kBAAkB,QAAQ,QAAQ,WAAW,MAAM,KAAK,CAAC,EACjD;AACxB,OAAI,QAAQ;IACV,MAAM,CAAC,OAAO,QAAQ,OAAO,MAAM,IAAI;IACvC,MAAM,EAAE,UAAU,MAAM,2BAA2B;AACnD,UAAM,yBAAyB,QAAQ,WAAW,OAAO,MAAM,MAAM;AACrE,YAAQ,IAAI,IAAI,QAAQ,6DAA6D;;;UAGlF,KAAU;AACjB,UAAQ,KAAK,IAAI,QAAQ,oDAAoD,IAAI,UAAU;;CAK7F,MAAM,cAAc,QAAQ,QAAQ,WAAW,MAAM,KAAK;CAC1D,MAAM,gBAAgB,WAAW,KAAK,aAAa,UAAU,CAAC;CAC9D,MAAM,YAAoC,EAAE;AAE5C,KAAI,eAAe;AACjB,YAAU,kBAAkB;AAG5B,MAAI,QAAQ,QACV,WAAU,eAAe,QAAQ;AAEnC,MAAI,QAAQ,MACV,WAAU,YAAY,QAAQ;AAIhC,MAAI,QAAQ,SAAU,QAAQ,UAAqB,YAAY;GAE7D,MAAM,eAAe,cADE,SAAS,QAAQ,QAAQ,aAAa,GACX;AAClD,OAAI,cAAc,kBAChB,WAAU,qBAAqB,aAAa;;;AAKlD,eAAc,SAAS,QAAQ,WAAW,WAAW,EACnD,KAAK;EACH,qBAAqB;EACrB,qBAAqB,QAAQ;EAC7B,yBAAyB,QAAQ,SAAS;EAC1C,sCAAsC;EACtC,GAAG;EACH,GAAG;EACJ,EACF,CAAC;AAGF,OAAM,SAAS;AACf,gBAAe,MAAM;AAGrB,WAAU,SAAS,QAAQ,QAAQ;AAInC,KAAI,CAAC,QAAQ,aAAa,QAAQ,cAAc,aAC9C,6BAA4B,QAAQ,SAAS,QAAQ,UAAU,CAAC,OAAO,QAAQ;AAC7E,UAAQ,KAAK,iCAAiC,QAAQ,QAAQ,mBAAmB,IAAI,UAAU;GAC/F;AAIJ,KAAI,iBAAkB,QAAQ,UAAqB,WACjD,0BAAyB,SAAS,YAAY,CAAC,OAAO,QAAQ;AAC5D,UAAQ,KAAK,mDAAmD,IAAI,UAAU;GAC9E;AAGJ,QAAO;;AAGT,SAAgB,oBAA8D;CAC5E,MAAM,eAAe,kBAAkB;CACvC,MAAM,YAAY,IAAI,IAAI,aAAa,KAAI,MAAK,EAAE,KAAK,CAAC;CAExD,MAAM,SAAmD,EAAE;AAG3D,KAAI,CAAC,WAAW,WAAW,CAAE,QAAO;CAEpC,MAAM,OAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CAC1D,QAAO,MAAK,EAAE,aAAa,CAAC;AAE/B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,MAAI,MACF,QAAO,KAAK;GACV,GAAG;GACH,YAAY,UAAU,IAAI,MAAM,GAAG;GACpC,CAAC;;AAIN,QAAO;;;;;;;;;;AAWT,SAAgB,4BAAkC;AAChD,KAAI,CAAC,WAAW,WAAW,CAAE;CAE7B,MAAM,OAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CAC1D,QAAO,MAAK,EAAE,aAAa,CAAC;CAE/B,MAAM,SAAmB,EAAE;AAC3B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,MAAI,OAAO,WAAW,QAAQ,KAAK,MAAM,QAAQ,CAC/C,QAAO,KAAK,GAAG,IAAI,KAAK,cAAc,MAAM,QAAQ,IAAI;;AAI5D,KAAI,OAAO,SAAS,EAClB,SAAQ,KACN,qBAAqB,OAAO,OAAO,iMAGnC,OAAO,KAAI,MAAK,0BAA0B,IAAI,CAAC,KAAK,KAAK,CAC1D;;AAIL,SAAgB,UAAU,SAAuB;CAC/C,MAAM,eAAe,iBAAiB,QAAQ;AAE9C,KAAI,cAAc,aAAa,EAAE;AAE/B,MAAI;GACF,MAAM,SAAS,YAAY,cAAc,IAAK;AAC9C,OAAI,QAAQ;IACV,MAAM,WAAW,YAAY,aAAa;AAC1C,cAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,kBAAc,KAAK,UAAU,aAAa,EAAE,OAAO;;UAE/C;AAIR,cAAY,aAAa;;CAG3B,MAAM,QAAQ,cAAc,aAAa;AACzC,KAAI,OAAO;AAET,MAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAE1B,QAAM,SAAS;AACf,iBAAe,MAAM;;AAMvB,uBAAsB,cAAc,EAAE,OAAO,WAAW,CAAC;;AAG3D,eAAsB,aAAa,SAAiB,SAAgC;CAClF,MAAM,eAAe,iBAAiB,QAAQ;AAI9C,KADqB,qBAAqB,aAAa,EACrC,UAAU,aAAa;AACvC,UAAQ,IAAI,0CAA0C,aAAa,qBAAqB;EACxF,MAAM,SAAS,MAAM,YAAY,cAAc,QAAQ;AACvD,MAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,gCAAgC,OAAO,QAAQ;AAGjE;;CAIF,MAAM,aAAa,cAAc,aAAa;AAC9C,KAAI,cAAc,WAAW,WAAW,aAAa,CAAC,cAAc,aAAa,EAAE;AACjF,UAAQ,IAAI,0CAA0C,aAAa,sBAAsB;EAEzF,MAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,MAAM,GAAG,EAAE;AACpF,MAAI,WAAW,OAAO;GACpB,MAAM,WAAW,oBAAoB,WAAW,MAAiB;AACjE,OAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,WAAW,UAAU;OAEvD,yBAAwB,WAAW,UAAU;;AAIjD,mBAAiB,aAAa;EAC9B,MAAM,YAAY,iDAAiD,WAAW,SAAS,oBAAoB,6BAA6B,WAAW,QAAQ;AAC3J,gBAAc,cAAc,WAAW,WAAW,WAAW,EAC3D,KAAK;GACH,qBAAqB;GACrB,qBAAqB,WAAW,WAAW;GAC3C,yBAAyB,WAAW,SAAS;GAC7C,sCAAsC;GACtC,GAAG;GACJ,EACF,CAAC;AAEF,aAAW,SAAS;AACpB,aAAW,gCAAe,IAAI,MAAM,EAAC,aAAa;AAClD,iBAAe,WAAW;AAI1B,MADc,MAAM,mBAAmB,cAAc,GAAG,EAC7C;AACT,SAAM,cAAc,cAAc,QAAQ;AAC1C,WAAQ,IAAI,sBAAsB,aAAa,yBAAyB;QAExE,SAAQ,KAAK,sBAAsB,aAAa,yDAAyD;EAI3G,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,YAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,gBACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;AACD;;CAIF,MAAM,EAAE,sBAAsB,sBAAsB,MAAM,OAAO;CACjE,MAAM,cAAc,qBAAqB,aAAa;AACtD,KAAI,eAAe,YAAY,QAAQ;AACrC,UAAQ,IAAI,4CAA4C,aAAa,MAAM,YAAY,SAAS;AAChG,QAAM,kBAAkB,cAAc,YAAY,QAAQ,QAAQ;EAGlE,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,YAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,gBACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;AACD;;AAGF,KAAI,CAAC,cAAc,aAAa,CAC9B,OAAM,IAAI,MAAM,SAAS,aAAa,cAAc;AAGtD,OAAM,cAAc,cAAc,QAAQ;CAG1C,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAGvC,eACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;;;;;;;;;;;;AAaH,eAAsB,YAAY,SAAiB,SAAiE;CAClH,MAAM,eAAe,iBAAiB,QAAQ;CAG9C,MAAM,eAAe,qBAAqB,aAAa;CACvD,MAAM,aAAa,cAAc,aAAa;CAC9C,MAAM,uBAAuB,CAAC,aAAa,OAAO;CAClD,MAAM,uBAAuB,CAAC,WAAW,YAAY;CAIrD,MAAM,YAAY,YAAY,WAAW,aAAa,CAAC,cAAc,aAAa;AAMlF,KAAI,EAJe,gBAAgB,qBAAqB,SAAS,aAAa,MAAM,IAC9E,cAAc,qBAAqB,SAAS,WAAW,OAAO,IAC/D,WAGH,QAAO;EACL,SAAS;EACT,OAAO,yCAAyC,cAAc,SAAS,UAAU,WAAW,YAAY,UAAU;EACnH;CAIH,MAAM,YAAY,mBAAmB,aAAa;AAClD,KAAI,CAAC,UACH,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAIH,KAAI,CAAC,WACH,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAIH,KAAI,cAAc,aAAa,CAC7B,KAAI;AACF,cAAY,aAAa;SACnB;CAIV,MAAM,gBAAgB,KAAK,YAAY,aAAa,EAAE,YAAY;AAClE,KAAI,WAAW,cAAc,CAC3B,KAAI;AAAE,aAAW,cAAc;SAAU;AAG3C,KAAI;AAEF,mBAAiB,aAAa;EAG9B,MAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,MAAM,GAAG,EAAE;AAIpF,MAAI,WAAW,OAAO;GACpB,MAAM,WAAW,oBAAoB,WAAW,MAAiB;AACjE,OAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,WAAW,UAAU;OAEvD,yBAAwB,WAAW,UAAU;;EAKjD,MAAM,YAAY,oBAAoB,UAAU;AAChD,gBAAc,cAAc,WAAW,WAAW,WAAW,EAC3D,KAAK;GACH,qBAAqB;GACrB,qBAAqB,WAAW,WAAW;GAC3C,yBAAyB,WAAW,SAAS;GAC7C,sCAAsC;GACtC,GAAG;GACJ,EACF,CAAC;AAGF,MAAI,QAIF,KAFc,MAAM,mBAAmB,cAAc,GAAG,CAItD,OAAM,cAAc,cAAc,QAAQ;MAE1C,SAAQ,MAAM,wEAAwE;AAK1F,wBAAsB,cAAc;GAClC,OAAO;GACP,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC,CAAC;AAGF,MAAI,YAAY;AACd,cAAW,SAAS;AACpB,cAAW,gCAAe,IAAI,MAAM,EAAC,aAAa;AAClD,kBAAe,WAAW;;AAG5B,SAAO,EAAE,SAAS,MAAM;UACjB,OAAgB;AAEvB,SAAO;GACL,SAAS;GACT,OAAO,2BAHG,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;GAIjE;;;;;;AAOL,SAAgB,sBAAoC;AAElD,QADe,mBAAmB,CACpB,QACX,UAAU,MAAM,WAAW,aAAa,CAAC,MAAM,WACjD;;;;;AAMH,SAAgB,aAAa,SAAoC;CAC/D,MAAM,eAAe,iBAAiB,QAAQ;CAC9C,MAAM,QAAQ,cAAc,aAAa;AAEzC,KAAI,CAAC,MACH,QAAO;AAIT,KAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAC1B,KAAI,CAAC,MAAM,aAAa,CAAC,MAAM,OAAO;AACpC,UAAQ,MAAM,2BAA2B,aAAa,yCAAyC;AAC/F,SAAO;;AAIT,KAAI,cAAc,aAAa,CAC7B,QAAO;CAIT,MAAM,aAAa,KAAK,YAAY,aAAa,EAAE,cAAc;CACjE,IAAI,SAAS;EAAE,qBAAqB;EAAG,WAAW;EAAG,eAAe;EAAG;AACvE,KAAI,WAAW,WAAW,CACxB,KAAI;AACF,WAAS;GAAE,GAAG;GAAQ,GAAG,KAAK,MAAM,aAAa,YAAY,QAAQ,CAAC;GAAE;SAClE;AAEV,QAAO,iBAAiB,OAAO,iBAAiB,KAAK;AACrD,eAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC;CAG1D,MAAM,iBAAiB,uBAAuB,MAAM;CAGpD,MAAM,cAAc,MAAM,QAAQ,uBAAuB,MAAM,MAAM,GAAG,EAAE;AAI1E,KAAI,MAAM,OAAO;EACf,MAAM,WAAW,oBAAoB,MAAM,MAAiB;AAC5D,MAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,MAAM,UAAU;MAElD,yBAAwB,MAAM,UAAU;;CAK5C,MAAM,YAAY,iDAAiD,MAAM,MAAM,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,QAAQ,OAAO,MAAM,CAAC;AAC7I,eAAc,cAAc,MAAM,WAAW,WAAW,EACtD,KAAK;EACH,qBAAqB;EACrB,qBAAqB,MAAM,WAAW;EACtC,yBAAyB,MAAM,SAAS;EACxC,sCAAsC;EACtC,GAAG;EACJ,EACF,CAAC;AAGF,OAAM,SAAS;AACf,OAAM,gCAAe,IAAI,MAAM,EAAC,aAAa;AAC7C,gBAAe,MAAM;AAErB,QAAO;;;;;AAMT,SAAS,uBAAuB,OAA2B;CACzD,MAAM,QAAkB;EACtB;EACA;EACA;EACA;EACA;EACA,YAAY,MAAM;EAClB,gBAAgB,MAAM;EACtB,cAAc,MAAM;EACpB;EACA;EACA,0CAA0C,MAAM,UAAU;EAC1D;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAGD,MAAM,EAAE,YAAY,UAAU,MAAM,GAAG;AACvC,KAAI,SAAS;EACX,MAAM,mBAAmB,yBAAyB,MAAM,GAAG;AAC3D,MAAI,kBAAkB;AACpB,SAAM,KAAK,MAAM;AACjB,SAAM,KAAK,GAAG;AACd,SAAM,KAAK,iBAAiB;;;AAIhC,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAgB,oBAA+D;CAC7E,MAAM,UAAU,qBAAqB;CACrC,MAAM,YAAsB,EAAE;CAC9B,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,SAAS,QAClB,KAAI;AAEF,MADe,aAAa,MAAM,GAAG,CAEnC,WAAU,KAAK,MAAM,GAAG;MAExB,QAAO,KAAK,MAAM,GAAG;UAEhB,OAAO;AACd,SAAO,KAAK,MAAM,GAAG;;AAIzB,QAAO;EAAE;EAAW;EAAQ;;;;;AAM9B,SAAS,qBAA2B;CAClC,MAAM,eAAe,KAAK,SAAS,EAAE,WAAW,gBAAgB;CAChE,MAAM,WAAW,KAAK,SAAS,EAAE,eAAe,OAAO,iBAAiB;AAGxE,KAAI,WAAW,aAAa,CAC1B,KAAI;EACF,MAAM,kBAAkB,aAAa,cAAc,QAAQ;AAY3D,OAXiB,KAAK,MAAM,gBAAgB,EACd,OAAO,eAAe,EAAE,EAEnB,MAAM,eACvC,WAAW,OAAO,MAAM,SACtB,KAAK,YAAY,YACjB,KAAK,SAAS,SAAS,aAAa,IACpC,KAAK,SAAS,SAAS,iBAAiB,CACzC,CACF,CAGC;SAEI;AAMV,KAAI;AACF,UAAQ,IAAI,4CAA4C;AAGxD,OAAK,oBAAoB,UAAwB;AAC/C,OAAI,MACF,SAAQ,KAAK,oEAAoE;OAEjF,SAAQ,IAAI,+BAA+B;IAE7C;UACK,OAAO;AACd,UAAQ,KAAK,oEAAoE;;;;;;AAOrF,SAAS,eAAe,SAAiB,SAAuB;CAC9D,MAAM,WAAW,KAAK,YAAY,QAAQ,CAAC;AAC3C,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAGxC,eADkB,KAAK,UAAU,oBAAoB,EAGnD,KAAK,UAAU;EACb,IAAI;EACJ,OAAO,cAAc;EACrB,6BAAY,IAAI,MAAM,EAAC,aAAa;EACrC,EAAE,MAAM,EAAE,CACZ;;;;;;;;AASH,eAAe,yBAAyB,SAAiB,aAAoC;CAE3F,MAAM,cAAc,KAAK,aAAa,WAAW,WAAW;CAC5D,IAAI,WAAW;CACf,MAAM,cAAc;CACpB,MAAM,UAAU;AAEhB,QAAO,WAAW,aAAa;AAE7B,MAAI,WAAW,YAAY,EAAE;GAE3B,MAAM,WAAW,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC/D,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,OAAM;IACT,MAAM,EAAE;IACR,MAAM,KAAK,aAAa,EAAE,KAAK;IAC/B,OAAO,WAAW,KAAK,aAAa,EAAE,MAAM,kBAAkB,CAAC,GAC3D,aAAa,KAAK,aAAa,EAAE,MAAM,kBAAkB,EAAE,QAAQ,GACnE;IACL,EAAE,CACF,QAAO,MAAK;AAEX,QAAI,CAAC,EAAE,MAAO,QAAO;AACrB,QAAI;KACF,MAAM,QAAQ,KAAK,MAAM,EAAE,MAAM;AACjC,YAAO,MAAM,aAAa,WAAW,MAAM,YAAY;YACjD;AACN,YAAO;;KAET,CACD,MAAM,GAAG,MAAM;IAEd,MAAM,QAAQ,WAAW,KAAK,EAAE,MAAM,kBAAkB,CAAC,GACpD,SAAS,KAAK,EAAE,MAAM,kBAAkB,CAAC,CAAC,WAAW,IACtD;AAIJ,YAHc,WAAW,KAAK,EAAE,MAAM,kBAAkB,CAAC,GACpD,SAAS,KAAK,EAAE,MAAM,kBAAkB,CAAC,CAAC,WAAW,IACtD,KACW;KACf;AAEJ,OAAI,SAAS,SAAS,GAAG;IAEvB,MAAM,QAAQ,cAAc,QAAQ;AACpC,QAAI,OAAO;AACT,WAAM,oBAAoB,SAAS,GAAG;AACtC,oBAAe,MAAM;AACrB,aAAQ,IAAI,6CAA6C,QAAQ,IAAI,SAAS,GAAG,OAAO;AACxF;;;;AAMN,QAAM,IAAI,SAAQ,YAAW,WAAW,SAAS,QAAQ,CAAC;AAC1D;;AAGF,OAAM,IAAI,MAAM,qCAAqC,QAAQ,SAAS,cAAc,QAAQ,IAAI;;;;aAh1C1D;YACsF;aACnD;UACb;cAEJ;wBAEK;iBACwE;mBACvE;gBACvB;eACqC;gBAEZ;AAEhD,WAAU,KAAK;AAG3B,kBAAiB,CAAC,UAAU,YAAY"}
|
|
1
|
+
{"version":3,"file":"agents-DCpQQ_W5.js","names":["loadYamlConfig","loadConfig"],"sources":["../../src/lib/cloister/config.ts","../../src/lib/cv.ts","../../src/lib/agents.ts"],"sourcesContent":["/**\n * Cloister Configuration\n *\n * Loads and manages Cloister configuration from ~/.panopticon/cloister.toml\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { parse, stringify } from '@iarna/toml';\nimport { join } from 'path';\nimport { PANOPTICON_HOME } from '../paths.js';\n\nconst CLOISTER_CONFIG_FILE = join(PANOPTICON_HOME, 'cloister.toml');\n\n/**\n * Health threshold configuration (in minutes)\n */\nexport interface HealthThresholds {\n stale: number;\n warning: number;\n stuck: number;\n}\n\n/**\n * Automatic action configuration\n */\nexport interface AutoActions {\n poke_on_warning: boolean;\n poke_on_stuck: boolean; // Poke agents idle > stuck threshold (default: true)\n kill_on_stuck: boolean;\n restart_on_kill: boolean;\n /** Minimum ms between pokes for the same agent. Prevents spam on repeated health checks. Default: 30 min */\n poke_cooldown_ms: number;\n}\n\n/**\n * Monitoring configuration\n */\nexport interface MonitoringConfig {\n check_interval: number; // seconds between health checks\n heartbeat_sources: ('jsonl_mtime' | 'tmux_activity' | 'git_activity' | 'active_heartbeat')[];\n}\n\n/**\n * Startup configuration\n */\nexport interface StartupConfig {\n auto_start: boolean; // Start Cloister when dashboard starts\n}\n\n/**\n * Notification configuration (future feature)\n */\nexport interface NotificationConfig {\n slack_webhook?: string;\n email?: string;\n}\n\n/**\n * Specialist agent configuration\n */\nexport interface SpecialistConfig {\n enabled: boolean;\n auto_wake: boolean;\n}\n\n/**\n * Test agent specific configuration\n */\nexport interface TestAgentConfig extends SpecialistConfig {\n test_command?: string; // Optional test command override (e.g., \"npm test\", \"pytest\", etc.)\n}\n\n/**\n * All specialist agents configuration\n */\nexport interface SpecialistsConfig {\n merge_agent?: SpecialistConfig;\n review_agent?: SpecialistConfig;\n test_agent?: TestAgentConfig;\n}\n\n/**\n * Model selection configuration\n */\nexport interface ModelSelectionConfig {\n default_model: 'opus' | 'sonnet' | 'haiku';\n complexity_routing: {\n trivial: 'opus' | 'sonnet' | 'haiku';\n simple: 'opus' | 'sonnet' | 'haiku';\n medium: 'opus' | 'sonnet' | 'haiku';\n complex: 'opus' | 'sonnet' | 'haiku';\n expert: 'opus' | 'sonnet' | 'haiku';\n };\n specialist_models: {\n merge_agent: 'opus' | 'sonnet' | 'haiku';\n review_agent: 'opus' | 'sonnet' | 'haiku';\n test_agent: 'opus' | 'sonnet' | 'haiku';\n };\n}\n\n/**\n * Handoff trigger configuration\n */\nexport interface HandoffTriggersConfig {\n stuck_escalation?: {\n enabled: boolean;\n haiku_to_sonnet_minutes: number;\n sonnet_to_opus_minutes: number;\n };\n test_failure?: {\n enabled: boolean;\n from_model: 'opus' | 'sonnet' | 'haiku';\n to_model: 'opus' | 'sonnet' | 'haiku';\n trigger_on: 'any_failure' | '2_consecutive';\n };\n implementation_complete?: {\n enabled: boolean;\n to_specialist: string; // e.g., 'test-agent'\n };\n}\n\n/**\n * Handoff configuration\n */\nexport interface HandoffConfig {\n auto_triggers: HandoffTriggersConfig;\n}\n\n/**\n * Cost tracking configuration\n */\nexport interface CostTrackingConfig {\n display_enabled: boolean;\n log_to_jsonl: boolean;\n}\n\n/**\n * Auto-restart configuration\n */\nexport interface AutoRestartConfig {\n enabled: boolean;\n max_retries: number;\n backoff_seconds: number[]; // Array of backoff delays (e.g., [30, 60, 120])\n}\n\n/**\n * Cost limits configuration\n */\nexport interface CostLimitsConfig {\n per_agent_usd: number;\n per_issue_usd: number;\n daily_total_usd: number;\n alert_threshold: number; // Fraction (0.0-1.0) at which to start alerting\n}\n\n/**\n * Retention policy configuration\n */\nexport interface RetentionConfig {\n agent_state_days: number; // Days to keep agent state dirs (default: 30)\n health_staleness_hours: number; // Hours before hiding stale agents in health API (default: 24)\n}\n\n/**\n * Complete Cloister configuration\n */\nexport interface CloisterConfig {\n startup: StartupConfig;\n thresholds: HealthThresholds;\n auto_actions: AutoActions;\n monitoring: MonitoringConfig;\n notifications?: NotificationConfig;\n specialists?: SpecialistsConfig;\n model_selection?: ModelSelectionConfig;\n handoffs?: HandoffConfig;\n cost_tracking?: CostTrackingConfig;\n auto_restart?: AutoRestartConfig;\n cost_limits?: CostLimitsConfig;\n retention?: RetentionConfig;\n}\n\n/**\n * Default Cloister configuration\n */\nexport const DEFAULT_CLOISTER_CONFIG: CloisterConfig = {\n startup: {\n auto_start: true,\n },\n thresholds: {\n stale: 5,\n warning: 15,\n stuck: 30,\n },\n auto_actions: {\n poke_on_warning: true,\n poke_on_stuck: true, // Poke agents that have been idle > stuck threshold\n kill_on_stuck: false, // Manual by default for safety\n restart_on_kill: false,\n poke_cooldown_ms: 30 * 60 * 1000, // 30 min between pokes for the same agent\n },\n monitoring: {\n check_interval: 60, // 1 minute\n heartbeat_sources: ['jsonl_mtime', 'tmux_activity', 'git_activity'],\n },\n notifications: {\n slack_webhook: undefined,\n email: undefined,\n },\n specialists: {\n merge_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit \"Approve & Merge\" click\n },\n review_agent: {\n enabled: true,\n auto_wake: false, // Only wake on explicit request\n },\n test_agent: {\n enabled: false, // Not yet implemented\n auto_wake: false,\n },\n },\n model_selection: {\n default_model: 'sonnet',\n complexity_routing: {\n trivial: 'haiku',\n simple: 'haiku',\n medium: 'sonnet',\n complex: 'sonnet',\n expert: 'opus',\n },\n specialist_models: {\n merge_agent: 'sonnet',\n review_agent: 'sonnet',\n test_agent: 'haiku',\n },\n },\n handoffs: {\n auto_triggers: {\n stuck_escalation: {\n enabled: true,\n haiku_to_sonnet_minutes: 10,\n sonnet_to_opus_minutes: 20,\n },\n test_failure: {\n enabled: true,\n from_model: 'haiku',\n to_model: 'sonnet',\n trigger_on: 'any_failure',\n },\n implementation_complete: {\n enabled: true, // Auto-handoff to test-agent when implementation done\n to_specialist: 'test-agent',\n },\n },\n },\n cost_tracking: {\n display_enabled: true,\n log_to_jsonl: true,\n },\n auto_restart: {\n enabled: true,\n max_retries: 3,\n backoff_seconds: [30, 60, 120], // 30s, 1m, 2m\n },\n cost_limits: {\n per_agent_usd: 10.0,\n per_issue_usd: 25.0,\n daily_total_usd: 100.0,\n alert_threshold: 0.8, // Alert at 80%\n },\n retention: {\n agent_state_days: 30,\n health_staleness_hours: 24,\n },\n};\n\n/**\n * Deep merge utility that recursively merges objects.\n * - Recursively merges nested objects\n * - Arrays in overrides replace defaults (not concatenated)\n * - User values take precedence over defaults\n */\nfunction deepMerge<T extends object>(defaults: T, overrides: Partial<T>): T {\n const result = { ...defaults };\n\n for (const key of Object.keys(overrides) as (keyof T)[]) {\n const defaultVal = defaults[key];\n const overrideVal = overrides[key];\n\n // Skip undefined values in overrides\n if (overrideVal === undefined) continue;\n\n // Deep merge if both values are non-array objects\n if (\n typeof defaultVal === 'object' &&\n defaultVal !== null &&\n !Array.isArray(defaultVal) &&\n typeof overrideVal === 'object' &&\n overrideVal !== null &&\n !Array.isArray(overrideVal)\n ) {\n result[key] = deepMerge(defaultVal as any, overrideVal as any);\n } else {\n // Direct override for primitives and arrays\n result[key] = overrideVal as T[keyof T];\n }\n }\n\n return result;\n}\n\n/**\n * Load Cloister configuration\n *\n * Reads from ~/.panopticon/cloister.toml and merges with defaults.\n * Creates default config file if it doesn't exist.\n */\nexport function loadCloisterConfig(): CloisterConfig {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n // If config file doesn't exist, create it with defaults\n if (!existsSync(CLOISTER_CONFIG_FILE)) {\n saveCloisterConfig(DEFAULT_CLOISTER_CONFIG);\n return DEFAULT_CLOISTER_CONFIG;\n }\n\n try {\n const content = readFileSync(CLOISTER_CONFIG_FILE, 'utf-8');\n const parsed = parse(content) as unknown as Partial<CloisterConfig>;\n\n // Deep merge with defaults\n return deepMerge(DEFAULT_CLOISTER_CONFIG, parsed);\n } catch (error) {\n console.error('Failed to load Cloister config:', error);\n console.error('Using default configuration');\n return DEFAULT_CLOISTER_CONFIG;\n }\n}\n\n/**\n * Save Cloister configuration\n *\n * Writes configuration to ~/.panopticon/cloister.toml\n */\nexport function saveCloisterConfig(config: CloisterConfig): void {\n // Ensure panopticon home exists\n if (!existsSync(PANOPTICON_HOME)) {\n mkdirSync(PANOPTICON_HOME, { recursive: true });\n }\n\n try {\n const content = stringify(config as any);\n writeFileSync(CLOISTER_CONFIG_FILE, content, 'utf-8');\n } catch (error) {\n console.error('Failed to save Cloister config:', error);\n throw error;\n }\n}\n\n/**\n * Update Cloister configuration\n *\n * Merges partial config updates with existing config.\n */\nexport function updateCloisterConfig(updates: Partial<CloisterConfig>): CloisterConfig {\n const current = loadCloisterConfig();\n const updated = deepMerge(current, updates);\n saveCloisterConfig(updated);\n return updated;\n}\n\n/**\n * Get the path to the Cloister config file\n */\nexport function getCloisterConfigPath(): string {\n return CLOISTER_CONFIG_FILE;\n}\n\n/**\n * Check if Cloister should auto-start\n */\nexport function shouldAutoStart(): boolean {\n const config = loadCloisterConfig();\n return config.startup.auto_start;\n}\n\n/**\n * Get health thresholds in milliseconds\n */\nexport function getHealthThresholdsMs(): {\n stale: number;\n warning: number;\n stuck: number;\n} {\n const config = loadCloisterConfig();\n return {\n stale: config.thresholds.stale * 60 * 1000,\n warning: config.thresholds.warning * 60 * 1000,\n stuck: config.thresholds.stuck * 60 * 1000,\n };\n}\n","/**\n * Agent CV (Work History) System\n *\n * Tracks agent performance over time to enable capability-based routing.\n */\n\nimport { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync } from 'fs';\nimport { join } from 'path';\nimport { AGENTS_DIR } from './paths.js';\n\nexport interface WorkEntry {\n issueId: string;\n startedAt: string;\n completedAt?: string;\n outcome: 'success' | 'failed' | 'abandoned' | 'in_progress';\n duration?: number; // minutes\n skills?: string[];\n failureReason?: string;\n commits?: number;\n linesChanged?: number;\n}\n\nexport interface AgentCV {\n agentId: string;\n createdAt: string;\n lastActive: string;\n runtime: string;\n model: string;\n stats: {\n totalIssues: number;\n successCount: number;\n failureCount: number;\n abandonedCount: number;\n avgDuration: number; // minutes\n successRate: number; // 0-1\n };\n skillsUsed: string[];\n recentWork: WorkEntry[];\n}\n\nfunction getCVFile(agentId: string): string {\n return join(AGENTS_DIR, agentId, 'cv.json');\n}\n\n/**\n * Get or create an agent's CV\n */\nexport function getAgentCV(agentId: string): AgentCV {\n const cvFile = getCVFile(agentId);\n\n if (existsSync(cvFile)) {\n try {\n return JSON.parse(readFileSync(cvFile, 'utf-8'));\n } catch {}\n }\n\n // Create new CV\n const cv: AgentCV = {\n agentId,\n createdAt: new Date().toISOString(),\n lastActive: new Date().toISOString(),\n runtime: 'claude',\n model: 'sonnet',\n stats: {\n totalIssues: 0,\n successCount: 0,\n failureCount: 0,\n abandonedCount: 0,\n avgDuration: 0,\n successRate: 0,\n },\n skillsUsed: [],\n recentWork: [],\n };\n\n saveAgentCV(cv);\n return cv;\n}\n\n/**\n * Save an agent's CV\n */\nexport function saveAgentCV(cv: AgentCV): void {\n const dir = join(AGENTS_DIR, cv.agentId);\n mkdirSync(dir, { recursive: true });\n writeFileSync(getCVFile(cv.agentId), JSON.stringify(cv, null, 2));\n}\n\n/**\n * Start tracking work for an agent\n */\nexport function startWork(agentId: string, issueId: string, skills?: string[]): void {\n const cv = getAgentCV(agentId);\n\n const entry: WorkEntry = {\n issueId,\n startedAt: new Date().toISOString(),\n outcome: 'in_progress',\n skills,\n };\n\n cv.recentWork.unshift(entry);\n cv.stats.totalIssues++;\n cv.lastActive = new Date().toISOString();\n\n // Track skills\n if (skills) {\n for (const skill of skills) {\n if (!cv.skillsUsed.includes(skill)) {\n cv.skillsUsed.push(skill);\n }\n }\n }\n\n // Keep only last 50 entries\n if (cv.recentWork.length > 50) {\n cv.recentWork = cv.recentWork.slice(0, 50);\n }\n\n saveAgentCV(cv);\n}\n\n/**\n * Complete work for an agent\n */\nexport function completeWork(\n agentId: string,\n issueId: string,\n outcome: 'success' | 'failed' | 'abandoned',\n details?: { commits?: number; linesChanged?: number; failureReason?: string }\n): void {\n const cv = getAgentCV(agentId);\n\n // Find the work entry\n const entry = cv.recentWork.find(\n (w) => w.issueId === issueId && w.outcome === 'in_progress'\n );\n\n if (entry) {\n entry.outcome = outcome;\n entry.completedAt = new Date().toISOString();\n entry.duration = Math.round(\n (new Date().getTime() - new Date(entry.startedAt).getTime()) / (1000 * 60)\n );\n if (details?.commits) entry.commits = details.commits;\n if (details?.linesChanged) entry.linesChanged = details.linesChanged;\n if (details?.failureReason) entry.failureReason = details.failureReason;\n }\n\n // Update stats\n if (outcome === 'success') {\n cv.stats.successCount++;\n } else if (outcome === 'failed') {\n cv.stats.failureCount++;\n } else if (outcome === 'abandoned') {\n cv.stats.abandonedCount++;\n }\n\n // Calculate success rate\n const completed = cv.stats.successCount + cv.stats.failureCount + cv.stats.abandonedCount;\n cv.stats.successRate = completed > 0 ? cv.stats.successCount / completed : 0;\n\n // Calculate average duration (only from completed work)\n const completedEntries = cv.recentWork.filter(\n (w) => w.duration !== undefined && w.outcome !== 'in_progress'\n );\n if (completedEntries.length > 0) {\n const totalDuration = completedEntries.reduce((sum, w) => sum + (w.duration || 0), 0);\n cv.stats.avgDuration = Math.round(totalDuration / completedEntries.length);\n }\n\n cv.lastActive = new Date().toISOString();\n saveAgentCV(cv);\n}\n\n/**\n * Get agent rankings by success rate\n */\nexport function getAgentRankings(): Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n}> {\n const rankings: Array<{\n agentId: string;\n successRate: number;\n totalIssues: number;\n avgDuration: number;\n }> = [];\n\n if (!existsSync(AGENTS_DIR)) return rankings;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true }).filter(\n (d) => d.isDirectory()\n );\n\n for (const dir of dirs) {\n const cv = getAgentCV(dir.name);\n if (cv.stats.totalIssues > 0) {\n rankings.push({\n agentId: dir.name,\n successRate: cv.stats.successRate,\n totalIssues: cv.stats.totalIssues,\n avgDuration: cv.stats.avgDuration,\n });\n }\n }\n\n // Sort by success rate, then by total issues\n rankings.sort((a, b) => {\n if (b.successRate !== a.successRate) {\n return b.successRate - a.successRate;\n }\n return b.totalIssues - a.totalIssues;\n });\n\n return rankings;\n}\n\n/**\n * Format CV for display\n */\nexport function formatCV(cv: AgentCV): string {\n const lines: string[] = [\n `# Agent CV: ${cv.agentId}`,\n '',\n `Runtime: ${cv.runtime} (${cv.model})`,\n `Created: ${cv.createdAt}`,\n `Last Active: ${cv.lastActive}`,\n '',\n '## Statistics',\n '',\n `- Total Issues: ${cv.stats.totalIssues}`,\n `- Success Rate: ${(cv.stats.successRate * 100).toFixed(1)}%`,\n `- Successes: ${cv.stats.successCount}`,\n `- Failures: ${cv.stats.failureCount}`,\n `- Abandoned: ${cv.stats.abandonedCount}`,\n `- Avg Duration: ${cv.stats.avgDuration} minutes`,\n '',\n ];\n\n if (cv.skillsUsed.length > 0) {\n lines.push('## Skills Used');\n lines.push('');\n lines.push(cv.skillsUsed.join(', '));\n lines.push('');\n }\n\n if (cv.recentWork.length > 0) {\n lines.push('## Recent Work');\n lines.push('');\n\n for (const work of cv.recentWork.slice(0, 10)) {\n const statusIcon = {\n success: '✓',\n failed: '✗',\n abandoned: '⊘',\n in_progress: '●',\n }[work.outcome];\n\n const duration = work.duration ? ` (${work.duration}m)` : '';\n lines.push(`${statusIcon} ${work.issueId}${duration}`);\n\n if (work.failureReason) {\n lines.push(` Reason: ${work.failureReason}`);\n }\n }\n lines.push('');\n }\n\n return lines.join('\\n');\n}\n","import { existsSync, mkdirSync, writeFileSync, readFileSync, readdirSync, appendFileSync, unlinkSync, statSync } from 'fs';\nimport { join, resolve } from 'path';\nimport { homedir } from 'os';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport { AGENTS_DIR } from './paths.js';\nimport { createSession, killSession, sendKeys, sendKeysAsync, sessionExists, getAgentSessions, capturePane } from './tmux.js';\nimport { initHook, checkHook, generateFixedPointPrompt } from './hooks.js';\nimport { startWork, completeWork, getAgentCV } from './cv.js';\nimport type { ComplexityLevel } from './cloister/complexity.js';\nimport { loadCloisterConfig } from './cloister/config.js';\nimport type { ModelId } from './settings.js';\nimport { getModelId, WorkTypeId } from './work-type-router.js';\nimport { getProviderForModel, getProviderEnv, setupCredentialFileAuth, clearCredentialFileAuth, requiresRouter } from './providers.js';\nimport { loadConfig as loadYamlConfig } from './config-yaml.js';\nimport { loadConfig } from './config.js';\nimport { createTrackerFromConfig, createTracker } from './tracker/factory.js';\nimport type { IssueState } from './tracker/interface.js';\nimport { findProjectByPath, getIssuePrefix } from './projects.js';\n\nconst execAsync = promisify(exec);\n\n/** Known agent ID prefixes — IDs with these prefixes are already normalized */\nconst AGENT_PREFIXES = ['agent-', 'planning-'];\n\n/** Normalize agent ID: preserve known prefixes, add 'agent-' for bare issue IDs */\nfunction normalizeAgentId(agentId: string): string {\n if (AGENT_PREFIXES.some(p => agentId.startsWith(p))) {\n return agentId;\n }\n return `agent-${agentId.toLowerCase()}`;\n}\n\n/**\n * Get provider-specific env vars (BASE_URL, AUTH_TOKEN) for a model.\n * Reads the current API key from settings so resumed/recovered agents\n * always use the latest key.\n */\nexport function getProviderEnvForModel(model: string): Record<string, string> {\n const provider = getProviderForModel(model);\n if (provider.name === 'anthropic') return {};\n\n // OpenRouter API key is stored in config.yaml under providers.openrouter.api_key\n if (provider.name === 'openrouter') {\n const { config } = loadYamlConfig();\n const apiKey = config.apiKeys.openrouter;\n if (apiKey) {\n return getProviderEnv(provider, apiKey);\n }\n throw new Error(`OpenRouter API key not configured. Add your key in Settings → OpenRouter before using model \"${model}\".`);\n }\n\n const { config } = loadYamlConfig();\n const apiKey = config.apiKeys[provider.name as keyof typeof config.apiKeys];\n if (apiKey) {\n return getProviderEnv(provider, apiKey);\n }\n throw new Error(`No API key configured for ${provider.displayName}. Configure it in Settings before using model \"${model}\".`);\n}\n\n/**\n * Get bash export lines for provider env vars (for use in launcher scripts).\n * Returns empty string for Anthropic models.\n */\nexport function getProviderExportsForModel(model: string): string {\n const envVars = getProviderEnvForModel(model);\n if (Object.keys(envVars).length === 0) return '';\n return Object.entries(envVars)\n .map(([k, v]) => `export ${k}=\"${v.replace(/\"/g, '\\\\\"')}\"`)\n .join('\\n') + '\\n';\n}\n\n/**\n * Get tmux -e flags for provider env vars (for use in tmux new-session).\n * Returns empty string for Anthropic models.\n */\nexport function getProviderTmuxFlags(model: string): string {\n const envVars = getProviderEnvForModel(model);\n let flags = '';\n for (const [key, value] of Object.entries(envVars)) {\n flags += ` -e ${key}=\"${value.replace(/\"/g, '\\\\\"')}\"`;\n }\n return flags;\n}\n\n// ============================================================================\n// Ready Signal Management (PAN-87)\n// ============================================================================\n\n/**\n * Get path to agent's ready signal file (written by SessionStart hook)\n */\nfunction getReadySignalPath(agentId: string): string {\n return join(getAgentDir(agentId), 'ready.json');\n}\n\n/**\n * Clear ready signal before spawning (clean slate)\n */\nfunction clearReadySignal(agentId: string): void {\n const readyPath = getReadySignalPath(agentId);\n if (existsSync(readyPath)) {\n try {\n unlinkSync(readyPath);\n } catch {\n // Ignore errors - non-critical\n }\n }\n}\n\n/**\n * Wait for SessionStart hook to signal ready (async - non-blocking)\n * Returns true if ready signal received, false if timeout\n */\nasync function waitForReadySignal(agentId: string, timeoutSeconds = 30): Promise<boolean> {\n const readyPath = getReadySignalPath(agentId);\n\n for (let i = 0; i < timeoutSeconds; i++) {\n await new Promise(resolve => setTimeout(resolve, 1000)); // Non-blocking sleep\n\n if (existsSync(readyPath)) {\n try {\n const content = readFileSync(readyPath, 'utf-8');\n const signal = JSON.parse(content);\n if (signal.ready === true) {\n return true;\n }\n } catch {\n // File exists but invalid - keep waiting\n }\n }\n }\n\n return false;\n}\n\nexport interface AgentState {\n id: string;\n issueId: string;\n workspace: string;\n runtime: string;\n model: string;\n status: 'starting' | 'running' | 'stopped' | 'error';\n startedAt: string;\n lastActivity?: string;\n branch?: string; // Git branch name for this agent\n\n // Model routing & handoffs (Phase 4)\n complexity?: ComplexityLevel;\n handoffCount?: number;\n costSoFar?: number;\n sessionId?: string; // For resuming sessions after handoff\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Current work type ID\n\n // SageOx session tracking (PAN-278)\n sageoxSessionPath?: string; // Path to SageOx session folder for parent linking\n}\n\nexport function getAgentDir(agentId: string): string {\n return join(AGENTS_DIR, agentId);\n}\n\nexport function getAgentState(agentId: string): AgentState | null {\n const stateFile = join(getAgentDir(agentId), 'state.json');\n if (!existsSync(stateFile)) return null;\n\n const content = readFileSync(stateFile, 'utf8');\n return JSON.parse(content);\n}\n\nexport function saveAgentState(state: AgentState): void {\n const dir = getAgentDir(state.id);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(\n join(dir, 'state.json'),\n JSON.stringify(state, null, 2)\n );\n}\n\n// ============================================================================\n// Hook-based State Management (PAN-80)\n// ============================================================================\n\n/**\n * Agent runtime state (hook-based tracking)\n */\nexport type AgentResolution = 'working' | 'done' | 'needs_input' | 'stuck' | 'completed' | 'unclear';\n\nexport interface AgentRuntimeState {\n state: 'active' | 'idle' | 'suspended' | 'stopped' | 'uninitialized';\n lastActivity: string;\n currentTool?: string;\n sessionId?: string;\n claudeSessionId?: string; // Claude Code session ID (written by heartbeat hook)\n suspendedAt?: string;\n resumedAt?: string;\n currentIssue?: string; // Issue ID the agent is currently working on\n resolution?: AgentResolution; // Lifecycle completion signal (PAN-309)\n resolutionCount?: number; // How many times this resolution was set\n resolutionUpdatedAt?: string; // When resolution was last updated\n}\n\n/**\n * Activity log entry\n */\nexport interface ActivityEntry {\n ts: string;\n tool: string;\n action?: string;\n state?: 'active' | 'idle';\n}\n\n/**\n * Get the path to an agent's runtime state file (separate from config state)\n */\nexport function getAgentRuntimeFile(agentId: string): string {\n return join(getAgentDir(agentId), 'runtime.json');\n}\n\n/**\n * Get agent runtime state (from hooks)\n *\n * Reads from runtime.json (new) with fallback to state.json (legacy migration).\n * This separation prevents bash hooks from corrupting AgentState config.\n */\nexport function getAgentRuntimeState(agentId: string): AgentRuntimeState | null {\n const runtimeFile = getAgentRuntimeFile(agentId);\n const stateFile = join(getAgentDir(agentId), 'state.json');\n\n // Try runtime.json first (new location)\n if (existsSync(runtimeFile)) {\n try {\n const content = readFileSync(runtimeFile, 'utf8');\n return JSON.parse(content) as AgentRuntimeState;\n } catch {\n // Fall through to legacy\n }\n }\n\n // Fallback to state.json (legacy — runtime fields were mixed in)\n if (existsSync(stateFile)) {\n try {\n const content = readFileSync(stateFile, 'utf8');\n const parsed = JSON.parse(content);\n // Only use if it has runtime-specific fields\n if (parsed.state && parsed.lastActivity) {\n return parsed as AgentRuntimeState;\n }\n } catch {\n // Ignore\n }\n }\n\n // No state at all — uninitialized\n if (!existsSync(stateFile) && !existsSync(runtimeFile)) {\n return {\n state: 'uninitialized',\n lastActivity: new Date().toISOString(),\n };\n }\n\n return null;\n}\n\n/**\n * Save agent runtime state to runtime.json (separate from AgentState config)\n *\n * This writes ONLY to runtime.json, never touching state.json.\n * This separation is critical: bash hooks write runtime.json on every tool call,\n * while AgentState in state.json is only written at lifecycle events (spawn/stop/handoff).\n */\nexport function saveAgentRuntimeState(agentId: string, state: Partial<AgentRuntimeState>): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const runtimeFile = getAgentRuntimeFile(agentId);\n\n // Merge with existing runtime state (read from runtime.json only, not state.json)\n let existing: AgentRuntimeState | null = null;\n if (existsSync(runtimeFile)) {\n try {\n existing = JSON.parse(readFileSync(runtimeFile, 'utf8'));\n } catch {\n // Ignore corrupt file\n }\n }\n\n const merged: AgentRuntimeState = {\n ...(existing || { state: 'uninitialized', lastActivity: new Date().toISOString() }),\n ...state,\n };\n\n writeFileSync(runtimeFile, JSON.stringify(merged, null, 2));\n}\n\n/**\n * Append to activity log with automatic pruning to 100 entries\n */\nexport function appendActivity(agentId: string, entry: ActivityEntry): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n const activityFile = join(dir, 'activity.jsonl');\n\n // Append entry\n appendFileSync(activityFile, JSON.stringify(entry) + '\\n');\n\n // Prune to last 100 entries\n if (existsSync(activityFile)) {\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n if (lines.length > 100) {\n const trimmed = lines.slice(-100);\n writeFileSync(activityFile, trimmed.join('\\n') + '\\n');\n }\n } catch (error) {\n // Ignore pruning errors - activity log is non-critical\n }\n }\n}\n\n/**\n * Read activity log (last N entries)\n */\nexport function getActivity(agentId: string, limit = 100): ActivityEntry[] {\n const activityFile = join(getAgentDir(agentId), 'activity.jsonl');\n\n if (!existsSync(activityFile)) {\n return [];\n }\n\n try {\n const lines = readFileSync(activityFile, 'utf8').trim().split('\\n');\n const entries = lines\n .filter(line => line.trim())\n .map(line => JSON.parse(line) as ActivityEntry)\n .slice(-limit);\n\n return entries;\n } catch {\n return [];\n }\n}\n\n/**\n * Save Claude session ID for later resume\n */\nexport function saveSessionId(agentId: string, sessionId: string): void {\n const dir = getAgentDir(agentId);\n mkdirSync(dir, { recursive: true });\n\n writeFileSync(join(dir, 'session.id'), sessionId);\n}\n\n/**\n * Get saved Claude session ID\n */\nexport function getSessionId(agentId: string): string | null {\n const sessionFile = join(getAgentDir(agentId), 'session.id');\n\n if (!existsSync(sessionFile)) {\n return null;\n }\n\n try {\n return readFileSync(sessionFile, 'utf8').trim();\n } catch {\n return null;\n }\n}\n\n/**\n * Get the latest Claude session ID from any available source.\n * Checks session.id first (written by suspend), then sessions.json (written by heartbeat hook),\n * then runtime.json claudeSessionId field.\n */\nexport function getLatestSessionId(agentId: string): string | null {\n // 1. session.id (written by auto-suspend)\n const fromSessionFile = getSessionId(agentId);\n if (fromSessionFile) return fromSessionFile;\n\n // 2. sessions.json (written by heartbeat hook — last entry is most recent)\n const sessionsFile = join(getAgentDir(agentId), 'sessions.json');\n try {\n if (existsSync(sessionsFile)) {\n const sessions = JSON.parse(readFileSync(sessionsFile, 'utf8'));\n if (Array.isArray(sessions) && sessions.length > 0) {\n return sessions[sessions.length - 1];\n }\n }\n } catch { /* non-fatal */ }\n\n // 3. runtime.json claudeSessionId\n const runtimeState = getAgentRuntimeState(agentId);\n if (runtimeState?.claudeSessionId) {\n return runtimeState.claudeSessionId;\n }\n\n return null;\n}\n\nexport interface SpawnOptions {\n issueId: string;\n workspace: string;\n runtime?: string;\n model?: string;\n prompt?: string;\n difficulty?: ComplexityLevel;\n agentType?: 'review-agent' | 'test-agent' | 'merge-agent' | 'work-agent';\n\n // Work type system (PAN-118)\n phase?: 'exploration' | 'implementation' | 'testing' | 'documentation' | 'review-response';\n workType?: WorkTypeId; // Explicit work type ID (overrides phase-based detection)\n}\n\n/**\n * Determine which model to use for an agent based on configuration\n *\n * New Priority (PAN-118):\n * 1. Explicitly provided model (options.model)\n * 2. Explicit work type ID (options.workType)\n * 3. Work type from phase (options.phase → issue-agent:{phase})\n * 4. Specialist work type (options.agentType → specialist-{type})\n * 5. Complexity-based routing (LEGACY - deprecated)\n * 6. Default fallback (claude-sonnet-4-6)\n */\nfunction determineModel(options: SpawnOptions): string {\n console.log(`[DEBUG] determineModel called with:`, { model: options.model, workType: options.workType, phase: options.phase, agentType: options.agentType, difficulty: options.difficulty });\n\n // Explicit model always wins\n if (options.model) {\n console.log(`[DEBUG] Using explicit model: ${options.model}`);\n return options.model;\n }\n\n try {\n // Use work type router if work type or phase specified\n if (options.workType) {\n return getModelId(options.workType);\n }\n\n // Map phase to work type ID\n if (options.phase) {\n const workType: WorkTypeId = `issue-agent:${options.phase}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // Map specialist agent type to work type ID\n if (options.agentType && options.agentType !== 'work-agent') {\n // Specialists: review-agent, test-agent, merge-agent\n const workType: WorkTypeId = `specialist-${options.agentType}` as WorkTypeId;\n return getModelId(workType);\n }\n\n // LEGACY: Complexity-based routing removed — settings.json no longer exists.\n // All model routing goes through work-type-router via config.yaml.\n\n // Fall back to default model from Cloister config or claude-sonnet-4-6\n try {\n const cloisterConfig = loadCloisterConfig();\n const defaultModel = cloisterConfig.model_selection?.default_model || 'sonnet';\n const modelMap: Record<string, string> = {\n 'opus': 'claude-opus-4-6',\n 'sonnet': 'claude-sonnet-4-6',\n 'haiku': 'claude-haiku-4-5',\n };\n return modelMap[defaultModel] || 'claude-sonnet-4-6';\n } catch {\n return 'claude-sonnet-4-6';\n }\n } catch (error) {\n // If work type router fails, fall back to default\n console.warn('Warning: Could not resolve model using work type router, using default');\n return options.model || 'claude-sonnet-4-6';\n }\n}\n\n/**\n * Shared tracker resolution logic for issue state transitions.\n *\n * Resolution order (by project tracker type):\n * 1. github_repo → GitHub Issues (takes priority over issue_prefix, since projects\n * like panopticon-cli use GitHub Issues with a prefix, not Linear)\n * 2. rally_project → Rally\n * 3. issue_prefix (no github_repo) → Linear (covers gitlab+linear and pure-linear projects)\n * 4. gitlab_repo only → warn and skip (GitLab doesn't support label-based state transitions)\n *\n * Precedence rationale: issue_prefix was renamed from linear_team but is now also set on\n * GitHub-hosted projects (e.g. issue_prefix: PAN for panopticon-cli GitHub Issues).\n * github_repo must be checked first so GitHub projects don't misroute to Linear.\n */\nasync function transitionIssueState(issueId: string, state: IssueState, workspacePath?: string): Promise<void> {\n // Guard: bare numeric IDs (no alphabetic prefix, e.g. \"484\") must never reach\n // any tracker API. Linear's searchIssues(\"484\") would match MIN-484 in the wrong\n // team. Log a warning and skip — the workspace's project must use prefixed IDs.\n if (/^\\d+$/.test(issueId)) {\n console.warn(\n `[agents] Skipping ${state} transition for bare numeric ID \"${issueId}\" — ` +\n `issue IDs must include a project prefix (e.g. PAN-${issueId}). ` +\n `This workspace was likely created before the pan- prefix convention.`\n );\n return;\n }\n\n // Resolve the project from workspacePath — its configured tracker is authoritative.\n // Every issue MUST belong to a registered project with a tracker configured.\n const projectConfig = workspacePath ? findProjectByPath(workspacePath) : null;\n if (!projectConfig) {\n throw new Error(`Cannot transition ${issueId}: no project config found for workspace ${workspacePath || '(none)'}. Register the project in projects.yaml.`);\n }\n\n // Project has a GitHub repo — use GitHub Issues tracker.\n // Checked BEFORE issue_prefix because github_repo projects (e.g. panopticon-cli)\n // set issue_prefix for their GitHub Issue prefix (PAN-), not for Linear.\n if (projectConfig.github_repo) {\n const [owner, repo] = projectConfig.github_repo.split('/');\n const tracker = createTracker({ type: 'github', owner, repo });\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via GitHub (${projectConfig.github_repo})`);\n return;\n }\n\n // Project has a Rally project — use Rally tracker\n if (projectConfig.rally_project) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.rally) {\n throw new Error(`Project ${projectConfig.name} uses Rally (project: ${projectConfig.rally_project}) but no Rally tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'rally');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Rally (project: ${projectConfig.rally_project})`);\n return;\n }\n\n // Project has a Linear team prefix (and no github_repo) — use Linear tracker.\n // This covers: pure-Linear projects and gitlab+Linear projects (e.g. mind-your-now).\n if (getIssuePrefix(projectConfig)) {\n const config = loadConfig();\n const trackersConfig = config.trackers;\n if (!trackersConfig?.linear) {\n throw new Error(`Project ${projectConfig.name} uses Linear (team: ${getIssuePrefix(projectConfig)}) but no Linear tracker is configured in config.yaml`);\n }\n const tracker = createTrackerFromConfig(trackersConfig, 'linear');\n await tracker.transitionIssue(issueId, state);\n console.log(`[agents] Transitioned ${issueId} to ${state} via Linear (team: ${getIssuePrefix(projectConfig)})`);\n return;\n }\n\n if (projectConfig.gitlab_repo) {\n console.warn(`[agents] GitLab project detected (${projectConfig.gitlab_repo}) but GitLab does not support ${state} label transitions`);\n return;\n }\n\n throw new Error(`Project ${projectConfig.name} has no tracker configured (need issue_prefix, github_repo, or rally_project in projects.yaml)`);\n}\n\nexport async function transitionIssueToInProgress(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_progress', workspacePath);\n}\n\n/**\n * Transitions an issue to \"in_review\" state in the configured issue tracker.\n * Fire-and-forget — logs warnings on failure but never blocks the pipeline.\n */\nexport async function transitionIssueToInReview(issueId: string, workspacePath?: string): Promise<void> {\n return transitionIssueState(issueId, 'in_review', workspacePath);\n}\n\nexport async function spawnAgent(options: SpawnOptions): Promise<AgentState> {\n const agentId = `agent-${options.issueId.toLowerCase()}`;\n\n // Check if already running\n if (sessionExists(agentId)) {\n throw new Error(`Agent ${agentId} already running. Use 'pan work tell' to message it.`);\n }\n\n // Initialize hook for this agent (FPP support)\n initHook(agentId);\n\n // Determine model based on configuration\n const selectedModel = determineModel(options);\n console.log(`[DEBUG] Selected model: ${selectedModel}`);\n\n // Create state\n const state: AgentState = {\n id: agentId,\n issueId: options.issueId,\n workspace: options.workspace,\n runtime: options.runtime || 'claude',\n model: selectedModel,\n status: 'starting',\n startedAt: new Date().toISOString(),\n // Initialize Phase 4 fields (legacy)\n complexity: options.difficulty,\n handoffCount: 0,\n costSoFar: 0,\n // Work type system (PAN-118)\n phase: options.phase,\n workType: options.workType,\n };\n\n saveAgentState(state);\n\n // Build prompt with FPP work if available\n let prompt = options.prompt || '';\n\n // FPP: Check for pending work on hook\n const { hasWork, items } = checkHook(agentId);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(agentId);\n if (fixedPointPrompt) {\n prompt = fixedPointPrompt + '\\n\\n---\\n\\n' + prompt;\n }\n }\n\n // Write prompt to file for complex prompts (avoids shell escaping issues)\n const promptFile = join(getAgentDir(agentId), 'initial-prompt.md');\n if (prompt) {\n writeFileSync(promptFile, prompt);\n }\n\n // Auto-setup hooks if not configured\n checkAndSetupHooks();\n\n // Ensure TLDR daemon is running for the workspace (non-blocking, non-fatal)\n try {\n const venvPath = join(options.workspace, '.venv');\n if (existsSync(venvPath)) {\n const { getTldrDaemonService } = await import('./tldr-daemon.js');\n const tldrService = getTldrDaemonService(options.workspace, venvPath);\n const status = await tldrService.getStatus();\n if (!status.running) {\n await tldrService.start(true);\n console.log(`[${agentId}] Started TLDR daemon for workspace`);\n }\n }\n } catch {\n // Non-fatal — agents degrade to direct file reads if TLDR unavailable\n }\n\n // Write initial task cache for heartbeat hook\n writeTaskCache(agentId, options.issueId);\n\n // Clear ready signal before spawning (clean slate for PAN-87 fix)\n clearReadySignal(agentId);\n\n // Get provider-specific environment variables (BASE_URL, AUTH_TOKEN)\n const providerEnv = getProviderEnvForModel(selectedModel);\n\n // For credential-file providers (e.g. Kimi Code Plan), configure apiKeyHelper\n // so Claude Code can refresh short-lived tokens dynamically.\n // For all other providers, CLEAR any stale apiKeyHelper from previous runs\n // (e.g. switching from Kimi to Anthropic plan-based auth).\n const provider = getProviderForModel(selectedModel as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, options.workspace);\n } else {\n clearCredentialFileAuth(options.workspace);\n }\n\n // Create tmux session and start claude\n // For prompts with special shell characters, use a launcher script to safely pass the prompt\n // The script reads the file into a variable, which bash then safely expands\n let claudeCmd: string;\n if (prompt) {\n const launcherScript = join(getAgentDir(agentId), 'launcher.sh');\n const providerExports = getProviderExportsForModel(state.model);\n const launcherContent = `#!/bin/bash\n${providerExports}prompt=$(cat \"${promptFile}\")\nexec claude --dangerously-skip-permissions --model ${state.model} \"\\$prompt\"\n`;\n writeFileSync(launcherScript, launcherContent, { mode: 0o755 });\n claudeCmd = `bash \"${launcherScript}\"`;\n } else {\n claudeCmd = `claude --dangerously-skip-permissions --model ${state.model}`;\n }\n\n // Pre-trust workspace directory in Claude Code to avoid the trust prompt\n try {\n const { preTrustDirectory } = await import('./workspace-manager.js') as { preTrustDirectory: (dir: string) => void };\n preTrustDirectory(options.workspace);\n } catch { /* non-fatal */ }\n\n // Configure workspace for GitHub App bot identity (PAN-536)\n // Agents push as panopticon-agent[bot] with short-lived installation tokens\n try {\n const { isGitHubAppConfigured, generateInstallationToken, configureWorkspaceForBot } = await import('./github-app.js');\n if (isGitHubAppConfigured()) {\n const { findProjectByPath } = await import('./projects.js');\n const project = findProjectByPath(resolve(options.workspace, '..', '..'));\n const ghRepo = project?.github_repo;\n if (ghRepo) {\n const [owner, repo] = ghRepo.split('/');\n const { token } = await generateInstallationToken();\n await configureWorkspaceForBot(options.workspace, owner, repo, token);\n console.log(`[${agentId}] Configured workspace for bot push (panopticon-agent[bot])`);\n }\n }\n } catch (err: any) {\n console.warn(`[${agentId}] GitHub App config failed (falling back to SSH): ${err.message}`);\n }\n\n // Build SageOx environment variables for session linking (only if project is SageOx-initialized)\n // Derive project root from workspace path: <project-root>/workspaces/<branch>\n const projectRoot = resolve(options.workspace, '..', '..');\n const sageoxEnabled = existsSync(join(projectRoot, '.sageox'));\n const sageoxEnv: Record<string, string> = {};\n\n if (sageoxEnabled) {\n sageoxEnv.OX_PROJECT_ROOT = projectRoot;\n\n // Add issue tracking for multi-agent pipelines\n if (options.issueId) {\n sageoxEnv.PAN_ISSUE_ID = options.issueId;\n }\n if (options.phase) {\n sageoxEnv.PAN_PHASE = options.phase;\n }\n\n // For non-planner agents, find the planner's session path for parent linking\n if (options.phase && (options.phase as string) !== 'planning') {\n const plannerAgentId = `agent-${options.issueId.toLowerCase()}`;\n const plannerState = getAgentState(plannerAgentId);\n if (plannerState?.sageoxSessionPath) {\n sageoxEnv.PAN_PARENT_SESSION = plannerState.sageoxSessionPath;\n }\n }\n }\n\n createSession(agentId, options.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: agentId,\n PANOPTICON_ISSUE_ID: options.issueId,\n PANOPTICON_SESSION_TYPE: options.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false', // Disable suggested prompts for autonomous agents (PAN-251)\n ...providerEnv, // Add provider-specific env vars (BASE_URL, AUTH_TOKEN, etc.)\n ...sageoxEnv // Add SageOx environment variables\n }\n });\n\n // Update status\n state.status = 'running';\n saveAgentState(state);\n\n // Track work in CV\n startWork(agentId, options.issueId);\n\n // Transition issue tracker to \"in progress\" (best-effort, don't block agent spawn)\n // Only for work agents, not planning/specialist agents\n if (!options.agentType || options.agentType === 'work-agent') {\n transitionIssueToInProgress(options.issueId, options.workspace).catch((err) => {\n console.warn(`[agents] Could not transition ${options.issueId} to in_progress: ${err.message}`);\n });\n }\n\n // For planner agents, capture SageOx session path after it becomes available\n if (sageoxEnabled && (options.phase as string) === 'planning') {\n captureSageoxSessionPath(agentId, projectRoot).catch((err) => {\n console.warn(`[agents] Could not capture SageOx session path: ${err.message}`);\n });\n }\n\n return state;\n}\n\nexport function listRunningAgents(): (AgentState & { tmuxActive: boolean })[] {\n const tmuxSessions = getAgentSessions();\n const tmuxNames = new Set(tmuxSessions.map(s => s.name));\n\n const agents: (AgentState & { tmuxActive: boolean })[] = [];\n\n // Read all agent states\n if (!existsSync(AGENTS_DIR)) return agents;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true })\n .filter(d => d.isDirectory());\n\n for (const dir of dirs) {\n const state = getAgentState(dir.name);\n if (state) {\n agents.push({\n ...state,\n tmuxActive: tmuxNames.has(state.id),\n });\n }\n }\n\n return agents;\n}\n\n/**\n * Scan ~/.panopticon/agents/ for state files with bare numeric issueIds\n * (e.g. \"484\" instead of \"PAN-484\") and log warnings to stderr.\n *\n * These workspaces were created before the pan- prefix convention and may\n * cause cross-tracker pollution if their in_review transition is triggered.\n * Called once at server startup to surface legacy state files.\n */\nexport function warnOnBareNumericIssueIds(): void {\n if (!existsSync(AGENTS_DIR)) return;\n\n const dirs = readdirSync(AGENTS_DIR, { withFileTypes: true })\n .filter(d => d.isDirectory());\n\n const legacy: string[] = [];\n for (const dir of dirs) {\n const state = getAgentState(dir.name);\n if (state?.issueId && /^\\d+$/.test(state.issueId)) {\n legacy.push(`${dir.name} (issueId: \"${state.issueId}\")`);\n }\n }\n\n if (legacy.length > 0) {\n console.warn(\n `[agents] WARNING: ${legacy.length} agent state file(s) have bare numeric issueIds ` +\n `(created before the pan- prefix convention). These agents will not be able to ` +\n `transition tracker state. Consider removing or updating them:\\n` +\n legacy.map(l => ` ~/.panopticon/agents/${l}`).join('\\n')\n );\n }\n}\n\nexport function stopAgent(agentId: string): void {\n const normalizedId = normalizeAgentId(agentId);\n\n if (sessionExists(normalizedId)) {\n // Capture tmux output before killing so logs remain viewable after stop\n try {\n const output = capturePane(normalizedId, 5000);\n if (output) {\n const agentDir = getAgentDir(normalizedId);\n mkdirSync(agentDir, { recursive: true });\n writeFileSync(join(agentDir, 'output.log'), output);\n }\n } catch {\n // Non-fatal — best effort log capture\n }\n\n killSession(normalizedId);\n }\n\n const state = getAgentState(normalizedId);\n if (state) {\n // Ensure id is set — runtime state files may lack it (PAN-150)\n if (!state.id) state.id = normalizedId;\n\n state.status = 'stopped';\n saveAgentState(state);\n }\n\n // Also mark runtime.json as stopped so Cloister/Deacon won't auto-restart.\n // state.json and runtime.json are separate files — both must agree the agent\n // was intentionally stopped to prevent race conditions with health check polls.\n saveAgentRuntimeState(normalizedId, { state: 'stopped' });\n}\n\nexport async function messageAgent(agentId: string, message: string): Promise<void> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check if agent is suspended - auto-resume if so (PAN-80)\n const runtimeState = getAgentRuntimeState(normalizedId);\n if (runtimeState?.state === 'suspended') {\n console.log(`[agents] Auto-resuming suspended agent ${normalizedId} to deliver message`);\n const result = await resumeAgent(normalizedId, message);\n if (!result.success) {\n throw new Error(`Failed to auto-resume agent: ${result.error}`);\n }\n // Message already sent during resume\n return;\n }\n\n // Check if agent is stopped — auto-restart to deliver feedback (PAN-367)\n const agentState = getAgentState(normalizedId);\n if (agentState && agentState.status === 'stopped' && !sessionExists(normalizedId)) {\n console.log(`[agents] Auto-restarting stopped agent ${normalizedId} to deliver feedback`);\n\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n clearReadySignal(normalizedId);\n const claudeCmd = `claude --dangerously-skip-permissions --model ${agentState.model || 'claude-sonnet-4-6'} \"You are resuming work on ${agentState.issueId}. Check .planning/feedback/ for specialist feedback that arrived while you were stopped, then continue working.\"`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n\n // Wait for ready, then deliver the message\n const ready = await waitForReadySignal(normalizedId, 30);\n if (ready) {\n await sendKeysAsync(normalizedId, message);\n console.log(`[agents] Restarted ${normalizedId} and delivered feedback`);\n } else {\n console.warn(`[agents] Restarted ${normalizedId} but ready signal not detected — feedback in mail queue`);\n }\n\n // Save to mail queue regardless\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n // Check if this is a remote agent\n const { loadRemoteAgentState, sendToRemoteAgent } = await import('./remote/remote-agents.js');\n const remoteState = loadRemoteAgentState(normalizedId);\n if (remoteState && remoteState.vmName) {\n console.log(`[agents] Sending message to remote agent ${normalizedId} on ${remoteState.vmName}`);\n await sendToRemoteAgent(normalizedId, remoteState.vmName, message);\n\n // Also save to mail queue for persistence\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n return;\n }\n\n if (!sessionExists(normalizedId)) {\n throw new Error(`Agent ${normalizedId} not running`);\n }\n\n await sendKeysAsync(normalizedId, message);\n\n // Also save to mail queue\n const mailDir = join(getAgentDir(normalizedId), 'mail');\n mkdirSync(mailDir, { recursive: true });\n\n const timestamp = new Date().toISOString().replace(/[:.]/g, '-');\n writeFileSync(\n join(mailDir, `${timestamp}.md`),\n `# Message\\n\\n${message}\\n`\n );\n}\n\n/**\n * Resume a suspended agent (PAN-80)\n *\n * Reads saved session ID and creates new tmux session with --resume flag.\n * Optionally sends a message after resuming.\n *\n * Auto-resume triggers:\n * - Specialists: When queued work arrives\n * - Work agents: When message is sent via /work-tell\n */\nexport async function resumeAgent(agentId: string, message?: string): Promise<{ success: boolean; error?: string }> {\n const normalizedId = normalizeAgentId(agentId);\n\n // Check runtime state — allow both suspended (auto-suspend) and stopped/idle (manual stop, crash)\n const runtimeState = getAgentRuntimeState(normalizedId);\n const agentState = getAgentState(normalizedId);\n const allowedRuntimeStates = ['suspended', 'idle'];\n const allowedAgentStatuses = ['stopped', 'completed'];\n\n // Also allow resuming a \"running\" agent with no live tmux session — this happens after\n // a system crash where tmux was killed but state.json was never updated to 'stopped'.\n const isCrashed = agentState?.status === 'running' && !sessionExists(normalizedId);\n\n const canResume = (runtimeState && allowedRuntimeStates.includes(runtimeState.state))\n || (agentState && allowedAgentStatuses.includes(agentState.status))\n || isCrashed;\n\n if (!canResume) {\n return {\n success: false,\n error: `Cannot resume agent in state: runtime=${runtimeState?.state || 'unknown'}, status=${agentState?.status || 'unknown'}`\n };\n }\n\n // Get saved session ID from any available source\n const sessionId = getLatestSessionId(normalizedId);\n if (!sessionId) {\n return {\n success: false,\n error: 'No saved session ID found'\n };\n }\n\n // Verify agent state exists (already fetched above for status check)\n if (!agentState) {\n return {\n success: false,\n error: 'Agent state not found'\n };\n }\n\n // Kill any zombie tmux session (crashed agent left behind)\n if (sessionExists(normalizedId)) {\n try {\n killSession(normalizedId);\n } catch { /* non-fatal */ }\n }\n\n // Remove completed marker so the agent can work again\n const completedFile = join(getAgentDir(normalizedId), 'completed');\n if (existsSync(completedFile)) {\n try { unlinkSync(completedFile); } catch { /* non-fatal */ }\n }\n\n try {\n // Clear ready signal before resuming (clean slate for PAN-87 fix)\n clearReadySignal(normalizedId);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = agentState.model ? getProviderEnvForModel(agentState.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (agentState.model) {\n const provider = getProviderForModel(agentState.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, agentState.workspace);\n } else {\n clearCredentialFileAuth(agentState.workspace);\n }\n }\n\n // Create new tmux session with resume command\n const claudeCmd = `claude --resume \"${sessionId}\" --dangerously-skip-permissions`;\n createSession(normalizedId, agentState.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: agentState.issueId || '',\n PANOPTICON_SESSION_TYPE: agentState.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // If there's a message, wait for ready signal then send\n if (message) {\n // Wait for SessionStart hook to signal ready (PAN-87: reliable message delivery)\n const ready = await waitForReadySignal(normalizedId, 30);\n\n if (ready) {\n // Send message\n await sendKeysAsync(normalizedId, message);\n } else {\n console.error('Claude SessionStart hook did not fire during resume, message not sent');\n }\n }\n\n // Update runtime state\n saveAgentRuntimeState(normalizedId, {\n state: 'active',\n resumedAt: new Date().toISOString(),\n });\n\n // Update agent state\n if (agentState) {\n agentState.status = 'running';\n agentState.lastActivity = new Date().toISOString();\n saveAgentState(agentState);\n }\n\n return { success: true };\n } catch (error: unknown) {\n const msg = error instanceof Error ? error.message : String(error);\n return {\n success: false,\n error: `Failed to resume agent: ${msg}`\n };\n }\n}\n\n/**\n * Detect crashed agents (state shows running but tmux session is gone)\n */\nexport function detectCrashedAgents(): AgentState[] {\n const agents = listRunningAgents();\n return agents.filter(\n (agent) => agent.status === 'running' && !agent.tmuxActive\n );\n}\n\n/**\n * Recover a crashed agent by restarting it with context\n */\nexport function recoverAgent(agentId: string): AgentState | null {\n const normalizedId = normalizeAgentId(agentId);\n const state = getAgentState(normalizedId);\n\n if (!state) {\n return null;\n }\n\n // Runtime state files may lack required fields (PAN-150)\n if (!state.id) state.id = normalizedId;\n if (!state.workspace || !state.model) {\n console.error(`[agents] Cannot recover ${normalizedId}: state.json missing workspace or model`);\n return null;\n }\n\n // Check if already running\n if (sessionExists(normalizedId)) {\n return state;\n }\n\n // Update crash count in health file\n const healthFile = join(getAgentDir(normalizedId), 'health.json');\n let health = { consecutiveFailures: 0, killCount: 0, recoveryCount: 0 };\n if (existsSync(healthFile)) {\n try {\n health = { ...health, ...JSON.parse(readFileSync(healthFile, 'utf-8')) };\n } catch {}\n }\n health.recoveryCount = (health.recoveryCount || 0) + 1;\n writeFileSync(healthFile, JSON.stringify(health, null, 2));\n\n // Build recovery prompt\n const recoveryPrompt = generateRecoveryPrompt(state);\n\n // Get provider env for the agent's model (reads latest API key from settings)\n const providerEnv = state.model ? getProviderEnvForModel(state.model) : {};\n\n // For credential-file providers, ensure apiKeyHelper is configured.\n // For all other providers, clear stale apiKeyHelper from previous runs.\n if (state.model) {\n const provider = getProviderForModel(state.model as ModelId);\n if (provider.authType === 'credential-file') {\n setupCredentialFileAuth(provider, state.workspace);\n } else {\n clearCredentialFileAuth(state.workspace);\n }\n }\n\n // Restart the agent with recovery context (YOLO mode - skip permissions)\n const claudeCmd = `claude --dangerously-skip-permissions --model ${state.model} \"${recoveryPrompt.replace(/\"/g, '\\\\\"').replace(/\\n/g, '\\\\n')}\"`;\n createSession(normalizedId, state.workspace, claudeCmd, {\n env: {\n PANOPTICON_AGENT_ID: normalizedId,\n PANOPTICON_ISSUE_ID: state.issueId || '',\n PANOPTICON_SESSION_TYPE: state.phase || 'implementation',\n CLAUDE_CODE_ENABLE_PROMPT_SUGGESTION: 'false',\n ...providerEnv\n }\n });\n\n // Update state\n state.status = 'running';\n state.lastActivity = new Date().toISOString();\n saveAgentState(state);\n\n return state;\n}\n\n/**\n * Generate a recovery prompt for a crashed agent\n */\nfunction generateRecoveryPrompt(state: AgentState): string {\n const lines: string[] = [\n '# Agent Recovery',\n '',\n '⚠️ This agent session was recovered after a crash.',\n '',\n '## Previous Context',\n `- Issue: ${state.issueId}`,\n `- Workspace: ${state.workspace}`,\n `- Started: ${state.startedAt}`,\n '',\n '## Recovery Steps',\n '1. Check beads for context: `bd show ' + state.issueId + '`',\n '2. Review recent git commits: `git log --oneline -10`',\n '3. Check hook for pending work: `pan work hook check`',\n '4. Resume from last known state',\n '',\n '## FPP Reminder',\n '> \"Any runnable action is a fixed point and must resolve before the system can rest.\"',\n '',\n ];\n\n // Add FPP work if available\n const { hasWork } = checkHook(state.id);\n if (hasWork) {\n const fixedPointPrompt = generateFixedPointPrompt(state.id);\n if (fixedPointPrompt) {\n lines.push('---');\n lines.push('');\n lines.push(fixedPointPrompt);\n }\n }\n\n return lines.join('\\n');\n}\n\n/**\n * Auto-recover all crashed agents\n */\nexport function autoRecoverAgents(): { recovered: string[]; failed: string[] } {\n const crashed = detectCrashedAgents();\n const recovered: string[] = [];\n const failed: string[] = [];\n\n for (const agent of crashed) {\n try {\n const result = recoverAgent(agent.id);\n if (result) {\n recovered.push(agent.id);\n } else {\n failed.push(agent.id);\n }\n } catch (error) {\n failed.push(agent.id);\n }\n }\n\n return { recovered, failed };\n}\n\n/**\n * Check if Panopticon hooks are configured, and auto-setup if not\n */\nfunction checkAndSetupHooks(): void {\n const settingsPath = join(homedir(), '.claude', 'settings.json');\n const hookPath = join(homedir(), '.panopticon', 'bin', 'heartbeat-hook');\n\n // Check if settings.json exists and has heartbeat hook configured\n if (existsSync(settingsPath)) {\n try {\n const settingsContent = readFileSync(settingsPath, 'utf-8');\n const settings = JSON.parse(settingsContent);\n const postToolUse = settings?.hooks?.PostToolUse || [];\n\n const hookConfigured = postToolUse.some((hookConfig: any) =>\n hookConfig.hooks?.some((hook: any) =>\n hook.command === hookPath ||\n hook.command?.includes('panopticon') ||\n hook.command?.includes('heartbeat-hook')\n )\n );\n\n if (hookConfigured) {\n return; // Already configured\n }\n } catch {\n // Ignore errors, will attempt setup\n }\n }\n\n // Hooks not configured - run setup silently\n try {\n console.log('Configuring Panopticon heartbeat hooks...');\n // Note: This runs during spawn which is now async, so we can use execAsync\n // But this is called from a sync context in checkAndSetupHooks, so we use fire-and-forget\n exec('pan setup hooks', (error: Error | null) => {\n if (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n } else {\n console.log('✓ Heartbeat hooks configured');\n }\n });\n } catch (error) {\n console.warn('⚠ Failed to auto-configure hooks. Run `pan setup hooks` manually.');\n }\n}\n\n/**\n * Write task cache for heartbeat hook to use\n */\nfunction writeTaskCache(agentId: string, issueId: string): void {\n const cacheDir = join(getAgentDir(agentId));\n mkdirSync(cacheDir, { recursive: true });\n\n const cacheFile = join(cacheDir, 'current-task.json');\n writeFileSync(\n cacheFile,\n JSON.stringify({\n id: issueId,\n title: `Working on ${issueId}`,\n updated_at: new Date().toISOString()\n }, null, 2)\n );\n}\n\n/**\n * Capture SageOx session path for a planner agent.\n * This is used for parent-child session linking in multi-agent pipelines.\n * Subsequent agents (worker, reviewer, tester, merger) will use this path\n * as their PAN_PARENT_SESSION to link their sessions to the planner's session.\n */\nasync function captureSageoxSessionPath(agentId: string, projectRoot: string): Promise<void> {\n // Wait for SageOx session to be created by the hook (up to 10 seconds)\n const sessionsDir = join(projectRoot, '.sageox', 'sessions');\n let attempts = 0;\n const maxAttempts = 20;\n const delayMs = 500;\n\n while (attempts < maxAttempts) {\n // Check if sessions directory exists\n if (existsSync(sessionsDir)) {\n // Find the most recent session directory for this agent\n const sessions = readdirSync(sessionsDir, { withFileTypes: true })\n .filter(d => d.isDirectory())\n .map(d => ({\n name: d.name,\n path: join(sessionsDir, d.name),\n mtime: existsSync(join(sessionsDir, d.name, '.recording.json'))\n ? readFileSync(join(sessionsDir, d.name, '.recording.json'), 'utf-8')\n : null\n }))\n .filter(s => {\n // Check if this session belongs to our agent\n if (!s.mtime) return false;\n try {\n const state = JSON.parse(s.mtime);\n return state.agent_id === agentId || state.AgentID === agentId;\n } catch {\n return false;\n }\n })\n .sort((a, b) => {\n // Sort by modification time (newest first)\n const aTime = existsSync(join(a.path, '.recording.json'))\n ? (statSync(join(a.path, '.recording.json')).mtimeMs || 0)\n : 0;\n const bTime = existsSync(join(b.path, '.recording.json'))\n ? (statSync(join(b.path, '.recording.json')).mtimeMs || 0)\n : 0;\n return bTime - aTime;\n });\n\n if (sessions.length > 0) {\n // Update agent state with SageOx session path\n const state = getAgentState(agentId);\n if (state) {\n state.sageoxSessionPath = sessions[0].path;\n saveAgentState(state);\n console.log(`[agents] Captured SageOx session path for ${agentId}: ${sessions[0].path}`);\n return;\n }\n }\n }\n\n // Wait before retrying\n await new Promise(resolve => setTimeout(resolve, delayMs));\n attempts++;\n }\n\n throw new Error(`Could not find SageOx session for ${agentId} after ${maxAttempts * delayMs}ms`);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AA2RA,SAAS,UAA4B,UAAa,WAA0B;CAC1E,MAAM,SAAS,EAAE,GAAG,UAAU;AAE9B,MAAK,MAAM,OAAO,OAAO,KAAK,UAAU,EAAiB;EACvD,MAAM,aAAa,SAAS;EAC5B,MAAM,cAAc,UAAU;AAG9B,MAAI,gBAAgB,KAAA,EAAW;AAG/B,MACE,OAAO,eAAe,YACtB,eAAe,QACf,CAAC,MAAM,QAAQ,WAAW,IAC1B,OAAO,gBAAgB,YACvB,gBAAgB,QAChB,CAAC,MAAM,QAAQ,YAAY,CAE3B,QAAO,OAAO,UAAU,YAAmB,YAAmB;MAG9D,QAAO,OAAO;;AAIlB,QAAO;;;;;;;;AAST,SAAgB,qBAAqC;AAEnD,KAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAIjD,KAAI,CAAC,WAAW,qBAAqB,EAAE;AACrC,qBAAmB,wBAAwB;AAC3C,SAAO;;AAGT,KAAI;AAKF,SAAO,UAAU,0BAAA,GAAA,YAAA,OAJD,aAAa,sBAAsB,QAAQ,CAC9B,CAGoB;UAC1C,OAAO;AACd,UAAQ,MAAM,mCAAmC,MAAM;AACvD,UAAQ,MAAM,8BAA8B;AAC5C,SAAO;;;;;;;;AASX,SAAgB,mBAAmB,QAA8B;AAE/D,KAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAGjD,KAAI;AAEF,gBAAc,uBAAA,GAAA,YAAA,WADY,OAAc,EACK,QAAQ;UAC9C,OAAO;AACd,UAAQ,MAAM,mCAAmC,MAAM;AACvD,QAAM;;;;;;AA0BV,SAAgB,kBAA2B;AAEzC,QADe,oBAAoB,CACrB,QAAQ;;;;;AAMxB,SAAgB,wBAId;CACA,MAAM,SAAS,oBAAoB;AACnC,QAAO;EACL,OAAO,OAAO,WAAW,QAAQ,KAAK;EACtC,SAAS,OAAO,WAAW,UAAU,KAAK;EAC1C,OAAO,OAAO,WAAW,QAAQ,KAAK;EACvC;;;;;aA1Y2C;AAExC,wBAAuB,KAAK,iBAAiB,gBAAgB;AA6KtD,2BAA0C;EACrD,SAAS,EACP,YAAY,MACb;EACD,YAAY;GACV,OAAO;GACP,SAAS;GACT,OAAO;GACR;EACD,cAAc;GACZ,iBAAiB;GACjB,eAAe;GACf,eAAe;GACf,iBAAiB;GACjB,kBAAkB,OAAU;GAC7B;EACD,YAAY;GACV,gBAAgB;GAChB,mBAAmB;IAAC;IAAe;IAAiB;IAAe;GACpE;EACD,eAAe;GACb,eAAe,KAAA;GACf,OAAO,KAAA;GACR;EACD,aAAa;GACX,aAAa;IACX,SAAS;IACT,WAAW;IACZ;GACD,cAAc;IACZ,SAAS;IACT,WAAW;IACZ;GACD,YAAY;IACV,SAAS;IACT,WAAW;IACZ;GACF;EACD,iBAAiB;GACf,eAAe;GACf,oBAAoB;IAClB,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,QAAQ;IACT;GACD,mBAAmB;IACjB,aAAa;IACb,cAAc;IACd,YAAY;IACb;GACF;EACD,UAAU,EACR,eAAe;GACb,kBAAkB;IAChB,SAAS;IACT,yBAAyB;IACzB,wBAAwB;IACzB;GACD,cAAc;IACZ,SAAS;IACT,YAAY;IACZ,UAAU;IACV,YAAY;IACb;GACD,yBAAyB;IACvB,SAAS;IACT,eAAe;IAChB;GACF,EACF;EACD,eAAe;GACb,iBAAiB;GACjB,cAAc;GACf;EACD,cAAc;GACZ,SAAS;GACT,aAAa;GACb,iBAAiB;IAAC;IAAI;IAAI;IAAI;GAC/B;EACD,aAAa;GACX,eAAe;GACf,eAAe;GACf,iBAAiB;GACjB,iBAAiB;GAClB;EACD,WAAW;GACT,kBAAkB;GAClB,wBAAwB;GACzB;EACF;;;;;;;;;AC3OD,SAAS,UAAU,SAAyB;AAC1C,QAAO,KAAK,YAAY,SAAS,UAAU;;;;;AAM7C,SAAgB,WAAW,SAA0B;CACnD,MAAM,SAAS,UAAU,QAAQ;AAEjC,KAAI,WAAW,OAAO,CACpB,KAAI;AACF,SAAO,KAAK,MAAM,aAAa,QAAQ,QAAQ,CAAC;SAC1C;CAIV,MAAM,KAAc;EAClB;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,6BAAY,IAAI,MAAM,EAAC,aAAa;EACpC,SAAS;EACT,OAAO;EACP,OAAO;GACL,aAAa;GACb,cAAc;GACd,cAAc;GACd,gBAAgB;GAChB,aAAa;GACb,aAAa;GACd;EACD,YAAY,EAAE;EACd,YAAY,EAAE;EACf;AAED,aAAY,GAAG;AACf,QAAO;;;;;AAMT,SAAgB,YAAY,IAAmB;AAE7C,WADY,KAAK,YAAY,GAAG,QAAQ,EACzB,EAAE,WAAW,MAAM,CAAC;AACnC,eAAc,UAAU,GAAG,QAAQ,EAAE,KAAK,UAAU,IAAI,MAAM,EAAE,CAAC;;;;;AAMnE,SAAgB,UAAU,SAAiB,SAAiB,QAAyB;CACnF,MAAM,KAAK,WAAW,QAAQ;CAE9B,MAAM,QAAmB;EACvB;EACA,4BAAW,IAAI,MAAM,EAAC,aAAa;EACnC,SAAS;EACT;EACD;AAED,IAAG,WAAW,QAAQ,MAAM;AAC5B,IAAG,MAAM;AACT,IAAG,8BAAa,IAAI,MAAM,EAAC,aAAa;AAGxC,KAAI;OACG,MAAM,SAAS,OAClB,KAAI,CAAC,GAAG,WAAW,SAAS,MAAM,CAChC,IAAG,WAAW,KAAK,MAAM;;AAM/B,KAAI,GAAG,WAAW,SAAS,GACzB,IAAG,aAAa,GAAG,WAAW,MAAM,GAAG,GAAG;AAG5C,aAAY,GAAG;;;aA/GuB;;;;;ACkBxC,SAAS,iBAAiB,SAAyB;AACjD,KAAI,eAAe,MAAK,MAAK,QAAQ,WAAW,EAAE,CAAC,CACjD,QAAO;AAET,QAAO,SAAS,QAAQ,aAAa;;;;;;;AAQvC,SAAgB,uBAAuB,OAAuC;CAC5E,MAAM,WAAW,oBAAoB,MAAM;AAC3C,KAAI,SAAS,SAAS,YAAa,QAAO,EAAE;AAG5C,KAAI,SAAS,SAAS,cAAc;EAClC,MAAM,EAAE,WAAWA,YAAgB;EACnC,MAAM,SAAS,OAAO,QAAQ;AAC9B,MAAI,OACF,QAAO,eAAe,UAAU,OAAO;AAEzC,QAAM,IAAI,MAAM,gGAAgG,MAAM,IAAI;;CAG5H,MAAM,EAAE,WAAWA,YAAgB;CACnC,MAAM,SAAS,OAAO,QAAQ,SAAS;AACvC,KAAI,OACF,QAAO,eAAe,UAAU,OAAO;AAEzC,OAAM,IAAI,MAAM,6BAA6B,SAAS,YAAY,iDAAiD,MAAM,IAAI;;;;;;AAO/H,SAAgB,2BAA2B,OAAuB;CAChE,MAAM,UAAU,uBAAuB,MAAM;AAC7C,KAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,EAAG,QAAO;AAC9C,QAAO,OAAO,QAAQ,QAAQ,CAC3B,KAAK,CAAC,GAAG,OAAO,UAAU,EAAE,IAAI,EAAE,QAAQ,MAAM,OAAM,CAAC,GAAG,CAC1D,KAAK,KAAK,GAAG;;;;;;AAOlB,SAAgB,qBAAqB,OAAuB;CAC1D,MAAM,UAAU,uBAAuB,MAAM;CAC7C,IAAI,QAAQ;AACZ,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,QAAQ,CAChD,UAAS,OAAO,IAAI,IAAI,MAAM,QAAQ,MAAM,OAAM,CAAC;AAErD,QAAO;;;;;AAUT,SAAS,mBAAmB,SAAyB;AACnD,QAAO,KAAK,YAAY,QAAQ,EAAE,aAAa;;;;;AAMjD,SAAS,iBAAiB,SAAuB;CAC/C,MAAM,YAAY,mBAAmB,QAAQ;AAC7C,KAAI,WAAW,UAAU,CACvB,KAAI;AACF,aAAW,UAAU;SACf;;;;;;AAUZ,eAAe,mBAAmB,SAAiB,iBAAiB,IAAsB;CACxF,MAAM,YAAY,mBAAmB,QAAQ;AAE7C,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,KAAK;AACvC,QAAM,IAAI,SAAQ,YAAW,WAAW,SAAS,IAAK,CAAC;AAEvD,MAAI,WAAW,UAAU,CACvB,KAAI;GACF,MAAM,UAAU,aAAa,WAAW,QAAQ;AAEhD,OADe,KAAK,MAAM,QAAQ,CACvB,UAAU,KACnB,QAAO;UAEH;;AAMZ,QAAO;;AA4BT,SAAgB,YAAY,SAAyB;AACnD,QAAO,KAAK,YAAY,QAAQ;;AAGlC,SAAgB,cAAc,SAAoC;CAChE,MAAM,YAAY,KAAK,YAAY,QAAQ,EAAE,aAAa;AAC1D,KAAI,CAAC,WAAW,UAAU,CAAE,QAAO;CAEnC,MAAM,UAAU,aAAa,WAAW,OAAO;AAC/C,QAAO,KAAK,MAAM,QAAQ;;AAG5B,SAAgB,eAAe,OAAyB;CACtD,MAAM,MAAM,YAAY,MAAM,GAAG;AACjC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAEnC,eACE,KAAK,KAAK,aAAa,EACvB,KAAK,UAAU,OAAO,MAAM,EAAE,CAC/B;;;;;AAuCH,SAAgB,oBAAoB,SAAyB;AAC3D,QAAO,KAAK,YAAY,QAAQ,EAAE,eAAe;;;;;;;;AASnD,SAAgB,qBAAqB,SAA2C;CAC9E,MAAM,cAAc,oBAAoB,QAAQ;CAChD,MAAM,YAAY,KAAK,YAAY,QAAQ,EAAE,aAAa;AAG1D,KAAI,WAAW,YAAY,CACzB,KAAI;EACF,MAAM,UAAU,aAAa,aAAa,OAAO;AACjD,SAAO,KAAK,MAAM,QAAQ;SACpB;AAMV,KAAI,WAAW,UAAU,CACvB,KAAI;EACF,MAAM,UAAU,aAAa,WAAW,OAAO;EAC/C,MAAM,SAAS,KAAK,MAAM,QAAQ;AAElC,MAAI,OAAO,SAAS,OAAO,aACzB,QAAO;SAEH;AAMV,KAAI,CAAC,WAAW,UAAU,IAAI,CAAC,WAAW,YAAY,CACpD,QAAO;EACL,OAAO;EACP,+BAAc,IAAI,MAAM,EAAC,aAAa;EACvC;AAGH,QAAO;;;;;;;;;AAUT,SAAgB,sBAAsB,SAAiB,OAAyC;AAE9F,WADY,YAAY,QAAQ,EACjB,EAAE,WAAW,MAAM,CAAC;CAEnC,MAAM,cAAc,oBAAoB,QAAQ;CAGhD,IAAI,WAAqC;AACzC,KAAI,WAAW,YAAY,CACzB,KAAI;AACF,aAAW,KAAK,MAAM,aAAa,aAAa,OAAO,CAAC;SAClD;CAKV,MAAM,SAA4B;EAChC,GAAI,YAAY;GAAE,OAAO;GAAiB,+BAAc,IAAI,MAAM,EAAC,aAAa;GAAE;EAClF,GAAG;EACJ;AAED,eAAc,aAAa,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC;;;;;AAM7D,SAAgB,eAAe,SAAiB,OAA4B;CAC1E,MAAM,MAAM,YAAY,QAAQ;AAChC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;CAEnC,MAAM,eAAe,KAAK,KAAK,iBAAiB;AAGhD,gBAAe,cAAc,KAAK,UAAU,MAAM,GAAG,KAAK;AAG1D,KAAI,WAAW,aAAa,CAC1B,KAAI;EACF,MAAM,QAAQ,aAAa,cAAc,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK;AACnE,MAAI,MAAM,SAAS,IAEjB,eAAc,cADE,MAAM,MAAM,KAAK,CACG,KAAK,KAAK,GAAG,KAAK;UAEjD,OAAO;;;;;AASpB,SAAgB,YAAY,SAAiB,QAAQ,KAAsB;CACzE,MAAM,eAAe,KAAK,YAAY,QAAQ,EAAE,iBAAiB;AAEjE,KAAI,CAAC,WAAW,aAAa,CAC3B,QAAO,EAAE;AAGX,KAAI;AAOF,SANc,aAAa,cAAc,OAAO,CAAC,MAAM,CAAC,MAAM,KAAK,CAEhE,QAAO,SAAQ,KAAK,MAAM,CAAC,CAC3B,KAAI,SAAQ,KAAK,MAAM,KAAK,CAAkB,CAC9C,MAAM,CAAC,MAAM;SAGV;AACN,SAAO,EAAE;;;;;;AAOb,SAAgB,cAAc,SAAiB,WAAyB;CACtE,MAAM,MAAM,YAAY,QAAQ;AAChC,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAEnC,eAAc,KAAK,KAAK,aAAa,EAAE,UAAU;;;;;AAMnD,SAAgB,aAAa,SAAgC;CAC3D,MAAM,cAAc,KAAK,YAAY,QAAQ,EAAE,aAAa;AAE5D,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;AAGT,KAAI;AACF,SAAO,aAAa,aAAa,OAAO,CAAC,MAAM;SACzC;AACN,SAAO;;;;;;;;AASX,SAAgB,mBAAmB,SAAgC;CAEjE,MAAM,kBAAkB,aAAa,QAAQ;AAC7C,KAAI,gBAAiB,QAAO;CAG5B,MAAM,eAAe,KAAK,YAAY,QAAQ,EAAE,gBAAgB;AAChE,KAAI;AACF,MAAI,WAAW,aAAa,EAAE;GAC5B,MAAM,WAAW,KAAK,MAAM,aAAa,cAAc,OAAO,CAAC;AAC/D,OAAI,MAAM,QAAQ,SAAS,IAAI,SAAS,SAAS,EAC/C,QAAO,SAAS,SAAS,SAAS;;SAGhC;CAGR,MAAM,eAAe,qBAAqB,QAAQ;AAClD,KAAI,cAAc,gBAChB,QAAO,aAAa;AAGtB,QAAO;;;;;;;;;;;;;AA4BT,SAAS,eAAe,SAA+B;AACrD,SAAQ,IAAI,uCAAuC;EAAE,OAAO,QAAQ;EAAO,UAAU,QAAQ;EAAU,OAAO,QAAQ;EAAO,WAAW,QAAQ;EAAW,YAAY,QAAQ;EAAY,CAAC;AAG5L,KAAI,QAAQ,OAAO;AACjB,UAAQ,IAAI,iCAAiC,QAAQ,QAAQ;AAC7D,SAAO,QAAQ;;AAGjB,KAAI;AAEF,MAAI,QAAQ,SACV,QAAO,WAAW,QAAQ,SAAS;AAIrC,MAAI,QAAQ,MAEV,QAAO,WADsB,eAAe,QAAQ,QACzB;AAI7B,MAAI,QAAQ,aAAa,QAAQ,cAAc,aAG7C,QAAO,WADsB,cAAc,QAAQ,YACxB;AAO7B,MAAI;GAEF,MAAM,eADiB,oBAAoB,CACP,iBAAiB,iBAAiB;AAMtE,UALyC;IACvC,QAAQ;IACR,UAAU;IACV,SAAS;IACV,CACe,iBAAiB;UAC3B;AACN,UAAO;;UAEF,OAAO;AAEd,UAAQ,KAAK,yEAAyE;AACtF,SAAO,QAAQ,SAAS;;;;;;;;;;;;;;;;;AAkB5B,eAAe,qBAAqB,SAAiB,OAAmB,eAAuC;AAI7G,KAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,UAAQ,KACN,qBAAqB,MAAM,mCAAmC,QAAQ,wDACjB,QAAQ,yEAE9D;AACD;;CAKF,MAAM,gBAAgB,gBAAgB,kBAAkB,cAAc,GAAG;AACzE,KAAI,CAAC,cACH,OAAM,IAAI,MAAM,qBAAqB,QAAQ,0CAA0C,iBAAiB,SAAS,0CAA0C;AAM7J,KAAI,cAAc,aAAa;EAC7B,MAAM,CAAC,OAAO,QAAQ,cAAc,YAAY,MAAM,IAAI;AAE1D,QADgB,cAAc;GAAE,MAAM;GAAU;GAAO;GAAM,CAAC,CAChD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,eAAe,cAAc,YAAY,GAAG;AACrG;;AAIF,KAAI,cAAc,eAAe;EAE/B,MAAM,iBADSC,cAAY,CACG;AAC9B,MAAI,CAAC,gBAAgB,MACnB,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,wBAAwB,cAAc,cAAc,qDAAqD;AAGzJ,QADgB,wBAAwB,gBAAgB,QAAQ,CAClD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,uBAAuB,cAAc,cAAc,GAAG;AAC/G;;AAKF,KAAI,eAAe,cAAc,EAAE;EAEjC,MAAM,iBADSA,cAAY,CACG;AAC9B,MAAI,CAAC,gBAAgB,OACnB,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,sBAAsB,eAAe,cAAc,CAAC,sDAAsD;AAG1J,QADgB,wBAAwB,gBAAgB,SAAS,CACnD,gBAAgB,SAAS,MAAM;AAC7C,UAAQ,IAAI,yBAAyB,QAAQ,MAAM,MAAM,qBAAqB,eAAe,cAAc,CAAC,GAAG;AAC/G;;AAGF,KAAI,cAAc,aAAa;AAC7B,UAAQ,KAAK,qCAAqC,cAAc,YAAY,gCAAgC,MAAM,oBAAoB;AACtI;;AAGF,OAAM,IAAI,MAAM,WAAW,cAAc,KAAK,gGAAgG;;AAGhJ,eAAsB,4BAA4B,SAAiB,eAAuC;AACxG,QAAO,qBAAqB,SAAS,eAAe,cAAc;;;;;;AAOpE,eAAsB,0BAA0B,SAAiB,eAAuC;AACtG,QAAO,qBAAqB,SAAS,aAAa,cAAc;;AAGlE,eAAsB,WAAW,SAA4C;CAC3E,MAAM,UAAU,SAAS,QAAQ,QAAQ,aAAa;AAGtD,KAAI,cAAc,QAAQ,CACxB,OAAM,IAAI,MAAM,SAAS,QAAQ,sDAAsD;AAIzF,UAAS,QAAQ;CAGjB,MAAM,gBAAgB,eAAe,QAAQ;AAC7C,SAAQ,IAAI,2BAA2B,gBAAgB;CAGvD,MAAM,QAAoB;EACxB,IAAI;EACJ,SAAS,QAAQ;EACjB,WAAW,QAAQ;EACnB,SAAS,QAAQ,WAAW;EAC5B,OAAO;EACP,QAAQ;EACR,4BAAW,IAAI,MAAM,EAAC,aAAa;EAEnC,YAAY,QAAQ;EACpB,cAAc;EACd,WAAW;EAEX,OAAO,QAAQ;EACf,UAAU,QAAQ;EACnB;AAED,gBAAe,MAAM;CAGrB,IAAI,SAAS,QAAQ,UAAU;CAG/B,MAAM,EAAE,SAAS,UAAU,UAAU,QAAQ;AAC7C,KAAI,SAAS;EACX,MAAM,mBAAmB,yBAAyB,QAAQ;AAC1D,MAAI,iBACF,UAAS,mBAAmB,gBAAgB;;CAKhD,MAAM,aAAa,KAAK,YAAY,QAAQ,EAAE,oBAAoB;AAClE,KAAI,OACF,eAAc,YAAY,OAAO;AAInC,qBAAoB;AAGpB,KAAI;EACF,MAAM,WAAW,KAAK,QAAQ,WAAW,QAAQ;AACjD,MAAI,WAAW,SAAS,EAAE;GACxB,MAAM,EAAE,yBAAyB,MAAM,OAAO;GAC9C,MAAM,cAAc,qBAAqB,QAAQ,WAAW,SAAS;AAErE,OAAI,EADW,MAAM,YAAY,WAAW,EAChC,SAAS;AACnB,UAAM,YAAY,MAAM,KAAK;AAC7B,YAAQ,IAAI,IAAI,QAAQ,qCAAqC;;;SAG3D;AAKR,gBAAe,SAAS,QAAQ,QAAQ;AAGxC,kBAAiB,QAAQ;CAGzB,MAAM,cAAc,uBAAuB,cAAc;CAMzD,MAAM,WAAW,oBAAoB,cAAyB;AAC9D,KAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,QAAQ,UAAU;KAEpD,yBAAwB,QAAQ,UAAU;CAM5C,IAAI;AACJ,KAAI,QAAQ;EACV,MAAM,iBAAiB,KAAK,YAAY,QAAQ,EAAE,cAAc;AAMhE,gBAAc,gBAJU;EADA,2BAA2B,MAAM,MAAM,CAEjD,gBAAgB,WAAW;qDACQ,MAAM,MAAM;GAEd,EAAE,MAAM,KAAO,CAAC;AAC/D,cAAY,SAAS,eAAe;OAEpC,aAAY,iDAAiD,MAAM;AAIrE,KAAI;EACF,MAAM,EAAE,sBAAsB,MAAM,OAAO;AAC3C,oBAAkB,QAAQ,UAAU;SAC9B;AAIR,KAAI;EACF,MAAM,EAAE,uBAAuB,2BAA2B,6BAA6B,MAAM,OAAO;AACpG,MAAI,uBAAuB,EAAE;GAC3B,MAAM,EAAE,sBAAsB,MAAM,OAAO;GAE3C,MAAM,SADU,kBAAkB,QAAQ,QAAQ,WAAW,MAAM,KAAK,CAAC,EACjD;AACxB,OAAI,QAAQ;IACV,MAAM,CAAC,OAAO,QAAQ,OAAO,MAAM,IAAI;IACvC,MAAM,EAAE,UAAU,MAAM,2BAA2B;AACnD,UAAM,yBAAyB,QAAQ,WAAW,OAAO,MAAM,MAAM;AACrE,YAAQ,IAAI,IAAI,QAAQ,6DAA6D;;;UAGlF,KAAU;AACjB,UAAQ,KAAK,IAAI,QAAQ,oDAAoD,IAAI,UAAU;;CAK7F,MAAM,cAAc,QAAQ,QAAQ,WAAW,MAAM,KAAK;CAC1D,MAAM,gBAAgB,WAAW,KAAK,aAAa,UAAU,CAAC;CAC9D,MAAM,YAAoC,EAAE;AAE5C,KAAI,eAAe;AACjB,YAAU,kBAAkB;AAG5B,MAAI,QAAQ,QACV,WAAU,eAAe,QAAQ;AAEnC,MAAI,QAAQ,MACV,WAAU,YAAY,QAAQ;AAIhC,MAAI,QAAQ,SAAU,QAAQ,UAAqB,YAAY;GAE7D,MAAM,eAAe,cADE,SAAS,QAAQ,QAAQ,aAAa,GACX;AAClD,OAAI,cAAc,kBAChB,WAAU,qBAAqB,aAAa;;;AAKlD,eAAc,SAAS,QAAQ,WAAW,WAAW,EACnD,KAAK;EACH,qBAAqB;EACrB,qBAAqB,QAAQ;EAC7B,yBAAyB,QAAQ,SAAS;EAC1C,sCAAsC;EACtC,GAAG;EACH,GAAG;EACJ,EACF,CAAC;AAGF,OAAM,SAAS;AACf,gBAAe,MAAM;AAGrB,WAAU,SAAS,QAAQ,QAAQ;AAInC,KAAI,CAAC,QAAQ,aAAa,QAAQ,cAAc,aAC9C,6BAA4B,QAAQ,SAAS,QAAQ,UAAU,CAAC,OAAO,QAAQ;AAC7E,UAAQ,KAAK,iCAAiC,QAAQ,QAAQ,mBAAmB,IAAI,UAAU;GAC/F;AAIJ,KAAI,iBAAkB,QAAQ,UAAqB,WACjD,0BAAyB,SAAS,YAAY,CAAC,OAAO,QAAQ;AAC5D,UAAQ,KAAK,mDAAmD,IAAI,UAAU;GAC9E;AAGJ,QAAO;;AAGT,SAAgB,oBAA8D;CAC5E,MAAM,eAAe,kBAAkB;CACvC,MAAM,YAAY,IAAI,IAAI,aAAa,KAAI,MAAK,EAAE,KAAK,CAAC;CAExD,MAAM,SAAmD,EAAE;AAG3D,KAAI,CAAC,WAAW,WAAW,CAAE,QAAO;CAEpC,MAAM,OAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CAC1D,QAAO,MAAK,EAAE,aAAa,CAAC;AAE/B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,MAAI,MACF,QAAO,KAAK;GACV,GAAG;GACH,YAAY,UAAU,IAAI,MAAM,GAAG;GACpC,CAAC;;AAIN,QAAO;;;;;;;;;;AAWT,SAAgB,4BAAkC;AAChD,KAAI,CAAC,WAAW,WAAW,CAAE;CAE7B,MAAM,OAAO,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC,CAC1D,QAAO,MAAK,EAAE,aAAa,CAAC;CAE/B,MAAM,SAAmB,EAAE;AAC3B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,QAAQ,cAAc,IAAI,KAAK;AACrC,MAAI,OAAO,WAAW,QAAQ,KAAK,MAAM,QAAQ,CAC/C,QAAO,KAAK,GAAG,IAAI,KAAK,cAAc,MAAM,QAAQ,IAAI;;AAI5D,KAAI,OAAO,SAAS,EAClB,SAAQ,KACN,qBAAqB,OAAO,OAAO,iMAGnC,OAAO,KAAI,MAAK,0BAA0B,IAAI,CAAC,KAAK,KAAK,CAC1D;;AAIL,SAAgB,UAAU,SAAuB;CAC/C,MAAM,eAAe,iBAAiB,QAAQ;AAE9C,KAAI,cAAc,aAAa,EAAE;AAE/B,MAAI;GACF,MAAM,SAAS,YAAY,cAAc,IAAK;AAC9C,OAAI,QAAQ;IACV,MAAM,WAAW,YAAY,aAAa;AAC1C,cAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AACxC,kBAAc,KAAK,UAAU,aAAa,EAAE,OAAO;;UAE/C;AAIR,cAAY,aAAa;;CAG3B,MAAM,QAAQ,cAAc,aAAa;AACzC,KAAI,OAAO;AAET,MAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAE1B,QAAM,SAAS;AACf,iBAAe,MAAM;;AAMvB,uBAAsB,cAAc,EAAE,OAAO,WAAW,CAAC;;AAG3D,eAAsB,aAAa,SAAiB,SAAgC;CAClF,MAAM,eAAe,iBAAiB,QAAQ;AAI9C,KADqB,qBAAqB,aAAa,EACrC,UAAU,aAAa;AACvC,UAAQ,IAAI,0CAA0C,aAAa,qBAAqB;EACxF,MAAM,SAAS,MAAM,YAAY,cAAc,QAAQ;AACvD,MAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,gCAAgC,OAAO,QAAQ;AAGjE;;CAIF,MAAM,aAAa,cAAc,aAAa;AAC9C,KAAI,cAAc,WAAW,WAAW,aAAa,CAAC,cAAc,aAAa,EAAE;AACjF,UAAQ,IAAI,0CAA0C,aAAa,sBAAsB;EAEzF,MAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,MAAM,GAAG,EAAE;AACpF,MAAI,WAAW,OAAO;GACpB,MAAM,WAAW,oBAAoB,WAAW,MAAiB;AACjE,OAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,WAAW,UAAU;OAEvD,yBAAwB,WAAW,UAAU;;AAIjD,mBAAiB,aAAa;EAC9B,MAAM,YAAY,iDAAiD,WAAW,SAAS,oBAAoB,6BAA6B,WAAW,QAAQ;AAC3J,gBAAc,cAAc,WAAW,WAAW,WAAW,EAC3D,KAAK;GACH,qBAAqB;GACrB,qBAAqB,WAAW,WAAW;GAC3C,yBAAyB,WAAW,SAAS;GAC7C,sCAAsC;GACtC,GAAG;GACJ,EACF,CAAC;AAEF,aAAW,SAAS;AACpB,aAAW,gCAAe,IAAI,MAAM,EAAC,aAAa;AAClD,iBAAe,WAAW;AAI1B,MADc,MAAM,mBAAmB,cAAc,GAAG,EAC7C;AACT,SAAM,cAAc,cAAc,QAAQ;AAC1C,WAAQ,IAAI,sBAAsB,aAAa,yBAAyB;QAExE,SAAQ,KAAK,sBAAsB,aAAa,yDAAyD;EAI3G,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,YAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,gBACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;AACD;;CAIF,MAAM,EAAE,sBAAsB,sBAAsB,MAAM,OAAO;CACjE,MAAM,cAAc,qBAAqB,aAAa;AACtD,KAAI,eAAe,YAAY,QAAQ;AACrC,UAAQ,IAAI,4CAA4C,aAAa,MAAM,YAAY,SAAS;AAChG,QAAM,kBAAkB,cAAc,YAAY,QAAQ,QAAQ;EAGlE,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,YAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAEvC,gBACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;AACD;;AAGF,KAAI,CAAC,cAAc,aAAa,CAC9B,OAAM,IAAI,MAAM,SAAS,aAAa,cAAc;AAGtD,OAAM,cAAc,cAAc,QAAQ;CAG1C,MAAM,UAAU,KAAK,YAAY,aAAa,EAAE,OAAO;AACvD,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;AAGvC,eACE,KAAK,SAAS,oBAFE,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI,CAEnC,KAAK,EAChC,gBAAgB,QAAQ,IACzB;;;;;;;;;;;;AAaH,eAAsB,YAAY,SAAiB,SAAiE;CAClH,MAAM,eAAe,iBAAiB,QAAQ;CAG9C,MAAM,eAAe,qBAAqB,aAAa;CACvD,MAAM,aAAa,cAAc,aAAa;CAC9C,MAAM,uBAAuB,CAAC,aAAa,OAAO;CAClD,MAAM,uBAAuB,CAAC,WAAW,YAAY;CAIrD,MAAM,YAAY,YAAY,WAAW,aAAa,CAAC,cAAc,aAAa;AAMlF,KAAI,EAJe,gBAAgB,qBAAqB,SAAS,aAAa,MAAM,IAC9E,cAAc,qBAAqB,SAAS,WAAW,OAAO,IAC/D,WAGH,QAAO;EACL,SAAS;EACT,OAAO,yCAAyC,cAAc,SAAS,UAAU,WAAW,YAAY,UAAU;EACnH;CAIH,MAAM,YAAY,mBAAmB,aAAa;AAClD,KAAI,CAAC,UACH,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAIH,KAAI,CAAC,WACH,QAAO;EACL,SAAS;EACT,OAAO;EACR;AAIH,KAAI,cAAc,aAAa,CAC7B,KAAI;AACF,cAAY,aAAa;SACnB;CAIV,MAAM,gBAAgB,KAAK,YAAY,aAAa,EAAE,YAAY;AAClE,KAAI,WAAW,cAAc,CAC3B,KAAI;AAAE,aAAW,cAAc;SAAU;AAG3C,KAAI;AAEF,mBAAiB,aAAa;EAG9B,MAAM,cAAc,WAAW,QAAQ,uBAAuB,WAAW,MAAM,GAAG,EAAE;AAIpF,MAAI,WAAW,OAAO;GACpB,MAAM,WAAW,oBAAoB,WAAW,MAAiB;AACjE,OAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,WAAW,UAAU;OAEvD,yBAAwB,WAAW,UAAU;;EAKjD,MAAM,YAAY,oBAAoB,UAAU;AAChD,gBAAc,cAAc,WAAW,WAAW,WAAW,EAC3D,KAAK;GACH,qBAAqB;GACrB,qBAAqB,WAAW,WAAW;GAC3C,yBAAyB,WAAW,SAAS;GAC7C,sCAAsC;GACtC,GAAG;GACJ,EACF,CAAC;AAGF,MAAI,QAIF,KAFc,MAAM,mBAAmB,cAAc,GAAG,CAItD,OAAM,cAAc,cAAc,QAAQ;MAE1C,SAAQ,MAAM,wEAAwE;AAK1F,wBAAsB,cAAc;GAClC,OAAO;GACP,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC,CAAC;AAGF,MAAI,YAAY;AACd,cAAW,SAAS;AACpB,cAAW,gCAAe,IAAI,MAAM,EAAC,aAAa;AAClD,kBAAe,WAAW;;AAG5B,SAAO,EAAE,SAAS,MAAM;UACjB,OAAgB;AAEvB,SAAO;GACL,SAAS;GACT,OAAO,2BAHG,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;GAIjE;;;;;;AAOL,SAAgB,sBAAoC;AAElD,QADe,mBAAmB,CACpB,QACX,UAAU,MAAM,WAAW,aAAa,CAAC,MAAM,WACjD;;;;;AAMH,SAAgB,aAAa,SAAoC;CAC/D,MAAM,eAAe,iBAAiB,QAAQ;CAC9C,MAAM,QAAQ,cAAc,aAAa;AAEzC,KAAI,CAAC,MACH,QAAO;AAIT,KAAI,CAAC,MAAM,GAAI,OAAM,KAAK;AAC1B,KAAI,CAAC,MAAM,aAAa,CAAC,MAAM,OAAO;AACpC,UAAQ,MAAM,2BAA2B,aAAa,yCAAyC;AAC/F,SAAO;;AAIT,KAAI,cAAc,aAAa,CAC7B,QAAO;CAIT,MAAM,aAAa,KAAK,YAAY,aAAa,EAAE,cAAc;CACjE,IAAI,SAAS;EAAE,qBAAqB;EAAG,WAAW;EAAG,eAAe;EAAG;AACvE,KAAI,WAAW,WAAW,CACxB,KAAI;AACF,WAAS;GAAE,GAAG;GAAQ,GAAG,KAAK,MAAM,aAAa,YAAY,QAAQ,CAAC;GAAE;SAClE;AAEV,QAAO,iBAAiB,OAAO,iBAAiB,KAAK;AACrD,eAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,EAAE,CAAC;CAG1D,MAAM,iBAAiB,uBAAuB,MAAM;CAGpD,MAAM,cAAc,MAAM,QAAQ,uBAAuB,MAAM,MAAM,GAAG,EAAE;AAI1E,KAAI,MAAM,OAAO;EACf,MAAM,WAAW,oBAAoB,MAAM,MAAiB;AAC5D,MAAI,SAAS,aAAa,kBACxB,yBAAwB,UAAU,MAAM,UAAU;MAElD,yBAAwB,MAAM,UAAU;;CAK5C,MAAM,YAAY,iDAAiD,MAAM,MAAM,IAAI,eAAe,QAAQ,MAAM,OAAM,CAAC,QAAQ,OAAO,MAAM,CAAC;AAC7I,eAAc,cAAc,MAAM,WAAW,WAAW,EACtD,KAAK;EACH,qBAAqB;EACrB,qBAAqB,MAAM,WAAW;EACtC,yBAAyB,MAAM,SAAS;EACxC,sCAAsC;EACtC,GAAG;EACJ,EACF,CAAC;AAGF,OAAM,SAAS;AACf,OAAM,gCAAe,IAAI,MAAM,EAAC,aAAa;AAC7C,gBAAe,MAAM;AAErB,QAAO;;;;;AAMT,SAAS,uBAAuB,OAA2B;CACzD,MAAM,QAAkB;EACtB;EACA;EACA;EACA;EACA;EACA,YAAY,MAAM;EAClB,gBAAgB,MAAM;EACtB,cAAc,MAAM;EACpB;EACA;EACA,0CAA0C,MAAM,UAAU;EAC1D;EACA;EACA;EACA;EACA;EACA;EACA;EACD;CAGD,MAAM,EAAE,YAAY,UAAU,MAAM,GAAG;AACvC,KAAI,SAAS;EACX,MAAM,mBAAmB,yBAAyB,MAAM,GAAG;AAC3D,MAAI,kBAAkB;AACpB,SAAM,KAAK,MAAM;AACjB,SAAM,KAAK,GAAG;AACd,SAAM,KAAK,iBAAiB;;;AAIhC,QAAO,MAAM,KAAK,KAAK;;;;;AAMzB,SAAgB,oBAA+D;CAC7E,MAAM,UAAU,qBAAqB;CACrC,MAAM,YAAsB,EAAE;CAC9B,MAAM,SAAmB,EAAE;AAE3B,MAAK,MAAM,SAAS,QAClB,KAAI;AAEF,MADe,aAAa,MAAM,GAAG,CAEnC,WAAU,KAAK,MAAM,GAAG;MAExB,QAAO,KAAK,MAAM,GAAG;UAEhB,OAAO;AACd,SAAO,KAAK,MAAM,GAAG;;AAIzB,QAAO;EAAE;EAAW;EAAQ;;;;;AAM9B,SAAS,qBAA2B;CAClC,MAAM,eAAe,KAAK,SAAS,EAAE,WAAW,gBAAgB;CAChE,MAAM,WAAW,KAAK,SAAS,EAAE,eAAe,OAAO,iBAAiB;AAGxE,KAAI,WAAW,aAAa,CAC1B,KAAI;EACF,MAAM,kBAAkB,aAAa,cAAc,QAAQ;AAY3D,OAXiB,KAAK,MAAM,gBAAgB,EACd,OAAO,eAAe,EAAE,EAEnB,MAAM,eACvC,WAAW,OAAO,MAAM,SACtB,KAAK,YAAY,YACjB,KAAK,SAAS,SAAS,aAAa,IACpC,KAAK,SAAS,SAAS,iBAAiB,CACzC,CACF,CAGC;SAEI;AAMV,KAAI;AACF,UAAQ,IAAI,4CAA4C;AAGxD,OAAK,oBAAoB,UAAwB;AAC/C,OAAI,MACF,SAAQ,KAAK,oEAAoE;OAEjF,SAAQ,IAAI,+BAA+B;IAE7C;UACK,OAAO;AACd,UAAQ,KAAK,oEAAoE;;;;;;AAOrF,SAAS,eAAe,SAAiB,SAAuB;CAC9D,MAAM,WAAW,KAAK,YAAY,QAAQ,CAAC;AAC3C,WAAU,UAAU,EAAE,WAAW,MAAM,CAAC;AAGxC,eADkB,KAAK,UAAU,oBAAoB,EAGnD,KAAK,UAAU;EACb,IAAI;EACJ,OAAO,cAAc;EACrB,6BAAY,IAAI,MAAM,EAAC,aAAa;EACrC,EAAE,MAAM,EAAE,CACZ;;;;;;;;AASH,eAAe,yBAAyB,SAAiB,aAAoC;CAE3F,MAAM,cAAc,KAAK,aAAa,WAAW,WAAW;CAC5D,IAAI,WAAW;CACf,MAAM,cAAc;CACpB,MAAM,UAAU;AAEhB,QAAO,WAAW,aAAa;AAE7B,MAAI,WAAW,YAAY,EAAE;GAE3B,MAAM,WAAW,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAC/D,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,OAAM;IACT,MAAM,EAAE;IACR,MAAM,KAAK,aAAa,EAAE,KAAK;IAC/B,OAAO,WAAW,KAAK,aAAa,EAAE,MAAM,kBAAkB,CAAC,GAC3D,aAAa,KAAK,aAAa,EAAE,MAAM,kBAAkB,EAAE,QAAQ,GACnE;IACL,EAAE,CACF,QAAO,MAAK;AAEX,QAAI,CAAC,EAAE,MAAO,QAAO;AACrB,QAAI;KACF,MAAM,QAAQ,KAAK,MAAM,EAAE,MAAM;AACjC,YAAO,MAAM,aAAa,WAAW,MAAM,YAAY;YACjD;AACN,YAAO;;KAET,CACD,MAAM,GAAG,MAAM;IAEd,MAAM,QAAQ,WAAW,KAAK,EAAE,MAAM,kBAAkB,CAAC,GACpD,SAAS,KAAK,EAAE,MAAM,kBAAkB,CAAC,CAAC,WAAW,IACtD;AAIJ,YAHc,WAAW,KAAK,EAAE,MAAM,kBAAkB,CAAC,GACpD,SAAS,KAAK,EAAE,MAAM,kBAAkB,CAAC,CAAC,WAAW,IACtD,KACW;KACf;AAEJ,OAAI,SAAS,SAAS,GAAG;IAEvB,MAAM,QAAQ,cAAc,QAAQ;AACpC,QAAI,OAAO;AACT,WAAM,oBAAoB,SAAS,GAAG;AACtC,oBAAe,MAAM;AACrB,aAAQ,IAAI,6CAA6C,QAAQ,IAAI,SAAS,GAAG,OAAO;AACxF;;;;AAMN,QAAM,IAAI,SAAQ,YAAW,WAAW,SAAS,QAAQ,CAAC;AAC1D;;AAGF,OAAM,IAAI,MAAM,qCAAqC,QAAQ,SAAS,cAAc,QAAQ,IAAI;;;;aAh1C1D;YACsF;aACnD;UACb;cAEJ;wBAEK;iBACwE;mBACvE;gBACvB;eACqC;gBAEZ;AAEhD,WAAU,KAAK;AAG3B,kBAAiB,CAAC,UAAU,YAAY"}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
import { C as stopAgent, E as warnOnBareNumericIssueIds, S as spawnAgent, T as transitionIssueToInReview, _ as recoverAgent, a as getAgentDir, b as saveAgentState, c as getAgentState, d as getProviderExportsForModel, f as getProviderTmuxFlags, g as messageAgent, h as listRunningAgents, i as getActivity, l as getLatestSessionId, m as init_agents, n as autoRecoverAgents, o as getAgentRuntimeFile, p as getSessionId, r as detectCrashedAgents, s as getAgentRuntimeState, t as appendActivity, u as getProviderEnvForModel, v as resumeAgent, w as transitionIssueToInProgress, x as saveSessionId, y as saveAgentRuntimeState } from "./agents-
|
|
1
|
+
import { C as stopAgent, E as warnOnBareNumericIssueIds, S as spawnAgent, T as transitionIssueToInReview, _ as recoverAgent, a as getAgentDir, b as saveAgentState, c as getAgentState, d as getProviderExportsForModel, f as getProviderTmuxFlags, g as messageAgent, h as listRunningAgents, i as getActivity, l as getLatestSessionId, m as init_agents, n as autoRecoverAgents, o as getAgentRuntimeFile, p as getSessionId, r as detectCrashedAgents, s as getAgentRuntimeState, t as appendActivity, u as getProviderEnvForModel, v as resumeAgent, w as transitionIssueToInProgress, x as saveSessionId, y as saveAgentRuntimeState } from "./agents-DCpQQ_W5.js";
|
|
2
2
|
init_agents();
|
|
3
3
|
export { appendActivity, autoRecoverAgents, detectCrashedAgents, getActivity, getAgentDir, getAgentRuntimeFile, getAgentRuntimeState, getAgentState, getLatestSessionId, getProviderEnvForModel, getProviderExportsForModel, getProviderTmuxFlags, getSessionId, listRunningAgents, messageAgent, recoverAgent, resumeAgent, saveAgentRuntimeState, saveAgentState, saveSessionId, spawnAgent, stopAgent, transitionIssueToInProgress, transitionIssueToInReview, warnOnBareNumericIssueIds };
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import { i as stepSkipped, n as stepFailed, r as stepOk } from "./types-
|
|
1
|
+
import { _ as init_paths, d as PROJECT_PRDS_COMPLETED_SUBDIR, l as PROJECT_DOCS_SUBDIR, n as ARCHIVES_DIR, p as PROJECT_PRDS_SUBDIR, u as PROJECT_PRDS_ACTIVE_SUBDIR } from "./paths-BDyJ7BiV.js";
|
|
2
|
+
import { i as stepSkipped, n as stepFailed, r as stepOk } from "./types-RKZjGE5N.js";
|
|
3
3
|
import { cpSync, existsSync, mkdirSync, rmSync } from "fs";
|
|
4
4
|
import { dirname, join } from "path";
|
|
5
5
|
import { exec } from "child_process";
|
|
@@ -150,4 +150,4 @@ async function archivePlanning(ctx, opts = {}) {
|
|
|
150
150
|
//#endregion
|
|
151
151
|
export { movePrd as i, archiveWorkspaceArtifacts as n, findWorkspacePath as r, archivePlanning as t };
|
|
152
152
|
|
|
153
|
-
//# sourceMappingURL=archive-planning-
|
|
153
|
+
//# sourceMappingURL=archive-planning-BmW9UDTr.js.map
|
package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map}
RENAMED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"archive-planning-h-hAjk0P.js","names":[],"sources":["../../src/lib/lifecycle/archive-planning.ts"],"sourcesContent":["/**\n * archive-planning — PRD active→completed + .planning/ preservation.\n *\n * Consolidates PRD moving from close-out.ts, merge-agent.ts, and the\n * approve endpoint into a single idempotent operation.\n *\n * Steps:\n * 1. Move PRD from docs/prds/active/ → docs/prds/completed/ (git mv, fallback to copy)\n * 2. Archive workspace .planning/ artifacts to ~/.panopticon/archives/<issue>/\n * 3. Rotate previous archives to prevent overwrite\n */\n\nimport { existsSync, mkdirSync, cpSync, rmSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport {\n ARCHIVES_DIR,\n PROJECT_DOCS_SUBDIR,\n PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR,\n} from '../paths.js';\nimport type { LifecycleContext, StepResult, ArchiveOptions } from './types.js';\nimport { stepOk, stepSkipped, stepFailed } from './types.js';\n\nconst execAsync = promisify(exec);\n\n/**\n * Find the workspace path for an issue.\n * Checks multiple conventional locations, including legacy numeric-suffix naming.\n */\nexport function findWorkspacePath(projectPath: string, issueLower: string): string | null {\n // e.g. \"pan-488\" → \"488\" for legacy workspaces named feature-488\n const numericSuffix = issueLower.replace(/^[a-z]+-/, '');\n const candidates = [\n join(projectPath, 'workspaces', `feature-${issueLower}`),\n join(projectPath, 'workspaces', `feature-${numericSuffix}`),\n join(projectPath, 'workspaces', issueLower),\n join(projectPath, '.worktrees', issueLower),\n join(dirname(projectPath), `feature-${issueLower}`),\n ];\n for (const p of candidates) {\n if (existsSync(p)) return p;\n }\n return null;\n}\n\n/**\n * Move PRD from active/ to completed/ directory.\n * Uses git mv with fallback to plain copy. Idempotent — skips if already completed.\n */\nexport async function movePrd(\n ctx: LifecycleContext,\n opts: ArchiveOptions = {},\n): Promise<StepResult> {\n const { pushToRemote = true } = opts;\n const issueLower = ctx.issueId.toLowerCase();\n const step = 'archive-planning:move-prd';\n\n // Per-issue subdirectory paths (new format: docs/prds/active/<issue-id>/STATE.md)\n const activeIssueDir = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR, issueLower,\n );\n const completedIssueDir = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR, issueLower,\n );\n\n // Legacy flat paths (old format: docs/prds/active/<issue-id>-plan.md)\n const legacyActivePrdPath = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR, `${issueLower}-plan.md`,\n );\n const legacyCompletedPrdPath = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR, `${issueLower}-plan.md`,\n );\n\n // Already in completed (either format) — idempotent skip\n if (existsSync(completedIssueDir) || existsSync(legacyCompletedPrdPath)) {\n return stepSkipped(step, ['PRD already in completed/']);\n }\n\n // Determine source: prefer new subdirectory format, fall back to legacy flat file\n const useNewFormat = existsSync(activeIssueDir);\n const useLegacyFormat = !useNewFormat && existsSync(legacyActivePrdPath);\n\n if (!useNewFormat && !useLegacyFormat) {\n return stepSkipped(step, ['No PRD found in active/ (may not have had one)']);\n }\n\n if (useNewFormat) {\n // Move entire issue subdirectory\n const completedParent = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR, PROJECT_PRDS_COMPLETED_SUBDIR,\n );\n if (!existsSync(completedParent)) {\n mkdirSync(completedParent, { recursive: true });\n }\n try {\n await execAsync(`git mv \"${activeIssueDir}\" \"${completedIssueDir}\"`, { cwd: ctx.projectPath });\n await execAsync(`git commit -m \"Move ${ctx.issueId} PRD to completed\"`, { cwd: ctx.projectPath });\n if (pushToRemote) {\n await execAsync('git push', { cwd: ctx.projectPath });\n }\n return stepOk(step, [`Moved PRD subdirectory from active/ to completed/ via git mv`]);\n } catch {\n // git mv failed — fall back to copy\n try {\n cpSync(activeIssueDir, completedIssueDir, { recursive: true });\n return stepOk(step, ['Copied PRD subdirectory to completed/ (git mv failed, plain copy succeeded)']);\n } catch (err) {\n return stepFailed(step, `Failed to preserve PRD: ${(err as Error).message}`);\n }\n }\n } else {\n // Legacy flat file: move single file\n const completedDir = dirname(legacyCompletedPrdPath);\n if (!existsSync(completedDir)) {\n mkdirSync(completedDir, { recursive: true });\n }\n try {\n await execAsync(`git mv \"${legacyActivePrdPath}\" \"${legacyCompletedPrdPath}\"`, { cwd: ctx.projectPath });\n await execAsync(`git commit -m \"Move ${ctx.issueId} PRD to completed\"`, { cwd: ctx.projectPath });\n if (pushToRemote) {\n await execAsync('git push', { cwd: ctx.projectPath });\n }\n return stepOk(step, [`Moved PRD from active/ to completed/ via git mv`]);\n } catch {\n try {\n cpSync(legacyActivePrdPath, legacyCompletedPrdPath);\n if (!existsSync(legacyCompletedPrdPath)) {\n return stepFailed(step, 'PRD copy appeared to succeed but file not found at destination');\n }\n return stepOk(step, ['Copied PRD to completed/ (git mv failed, plain copy succeeded)']);\n } catch (err) {\n return stepFailed(step, `Failed to preserve PRD: ${(err as Error).message}`);\n }\n }\n }\n\n}\n\n/**\n * Archive workspace .planning/ artifacts to ~/.panopticon/archives/<issue>/.\n * Rotates previous archives to prevent overwrite.\n * Returns a hard failure if archiving fails — callers must NOT proceed with\n * workspace deletion after an archive failure.\n */\nexport async function archiveWorkspaceArtifacts(\n ctx: LifecycleContext,\n): Promise<StepResult> {\n const issueLower = ctx.issueId.toLowerCase();\n const step = 'archive-planning:archive-artifacts';\n\n const workspacePath = findWorkspacePath(ctx.projectPath, issueLower);\n if (!workspacePath || !existsSync(workspacePath)) {\n return stepSkipped(step, ['No workspace found to archive']);\n }\n\n try {\n let archiveDir = join(ARCHIVES_DIR, issueLower);\n\n // Rotate previous archive if it exists\n if (existsSync(archiveDir)) {\n let version = 1;\n while (existsSync(`${archiveDir}.${version}`)) {\n version++;\n }\n const rotatedDir = `${archiveDir}.${version}`;\n cpSync(archiveDir, rotatedDir, { recursive: true });\n rmSync(archiveDir, { recursive: true, force: true });\n }\n\n mkdirSync(archiveDir, { recursive: true });\n const details: string[] = [];\n\n // Archive .planning/feedback/\n const feedbackDir = join(workspacePath, '.planning', 'feedback');\n if (existsSync(feedbackDir)) {\n cpSync(feedbackDir, join(archiveDir, 'feedback'), { recursive: true });\n details.push('Archived feedback/');\n }\n\n // Archive STATE.md\n const stateMd = join(workspacePath, '.planning', 'STATE.md');\n if (existsSync(stateMd)) {\n cpSync(stateMd, join(archiveDir, 'STATE.md'));\n details.push('Archived STATE.md');\n }\n\n // Archive beads/\n const beadsDir = join(workspacePath, '.planning', 'beads');\n if (existsSync(beadsDir)) {\n cpSync(beadsDir, join(archiveDir, 'beads'), { recursive: true });\n details.push('Archived beads/');\n }\n\n // Archive PRD.md (workspace copy — the docs/prds/ copy is canonical,\n // but this preserves the workspace-specific version with agent annotations)\n const prdMd = join(workspacePath, '.planning', 'PRD.md');\n if (existsSync(prdMd)) {\n cpSync(prdMd, join(archiveDir, 'PRD.md'));\n details.push('Archived workspace PRD.md');\n }\n\n details.push(`Archived to ${archiveDir}`);\n return stepOk(step, details);\n } catch (err) {\n return stepFailed(step, `Failed to archive: ${(err as Error).message}`);\n }\n}\n\n/**\n * Full archive-planning operation: move PRD + archive workspace artifacts.\n * Archive failure is a hard fail — do not proceed with workspace deletion.\n */\nexport async function archivePlanning(\n ctx: LifecycleContext,\n opts: ArchiveOptions = {},\n): Promise<StepResult[]> {\n const results: StepResult[] = [];\n\n // Step 1: Move PRD\n const prdResult = await movePrd(ctx, opts);\n results.push(prdResult);\n\n // Step 2: Archive workspace artifacts\n const archiveResult = await archiveWorkspaceArtifacts(ctx);\n results.push(archiveResult);\n\n return results;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;YAsBqB;AAIrB,MAAM,YAAY,UAAU,KAAK;;;;;AAMjC,SAAgB,kBAAkB,aAAqB,YAAmC;CAExF,MAAM,gBAAgB,WAAW,QAAQ,YAAY,GAAG;CACxD,MAAM,aAAa;EACjB,KAAK,aAAa,cAAc,WAAW,aAAa;EACxD,KAAK,aAAa,cAAc,WAAW,gBAAgB;EAC3D,KAAK,aAAa,cAAc,WAAW;EAC3C,KAAK,aAAa,cAAc,WAAW;EAC3C,KAAK,QAAQ,YAAY,EAAE,WAAW,aAAa;EACpD;AACD,MAAK,MAAM,KAAK,WACd,KAAI,WAAW,EAAE,CAAE,QAAO;AAE5B,QAAO;;;;;;AAOT,eAAsB,QACpB,KACA,OAAuB,EAAE,EACJ;CACrB,MAAM,EAAE,eAAe,SAAS;CAChC,MAAM,aAAa,IAAI,QAAQ,aAAa;CAC5C,MAAM,OAAO;CAGb,MAAM,iBAAiB,KACrB,IAAI,aAAa,qBAAqB,qBACtC,4BAA4B,WAC7B;CACD,MAAM,oBAAoB,KACxB,IAAI,aAAa,qBAAqB,qBACtC,+BAA+B,WAChC;CAGD,MAAM,sBAAsB,KAC1B,IAAI,aAAa,qBAAqB,qBACtC,4BAA4B,GAAG,WAAW,UAC3C;CACD,MAAM,yBAAyB,KAC7B,IAAI,aAAa,qBAAqB,qBACtC,+BAA+B,GAAG,WAAW,UAC9C;AAGD,KAAI,WAAW,kBAAkB,IAAI,WAAW,uBAAuB,CACrE,QAAO,YAAY,MAAM,CAAC,4BAA4B,CAAC;CAIzD,MAAM,eAAe,WAAW,eAAe;CAC/C,MAAM,kBAAkB,CAAC,gBAAgB,WAAW,oBAAoB;AAExE,KAAI,CAAC,gBAAgB,CAAC,gBACpB,QAAO,YAAY,MAAM,CAAC,iDAAiD,CAAC;AAG9E,KAAI,cAAc;EAEhB,MAAM,kBAAkB,KACtB,IAAI,aAAa,qBAAqB,qBAAqB,8BAC5D;AACD,MAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAEjD,MAAI;AACF,SAAM,UAAU,WAAW,eAAe,KAAK,kBAAkB,IAAI,EAAE,KAAK,IAAI,aAAa,CAAC;AAC9F,SAAM,UAAU,uBAAuB,IAAI,QAAQ,qBAAqB,EAAE,KAAK,IAAI,aAAa,CAAC;AACjG,OAAI,aACF,OAAM,UAAU,YAAY,EAAE,KAAK,IAAI,aAAa,CAAC;AAEvD,UAAO,OAAO,MAAM,CAAC,+DAA+D,CAAC;UAC/E;AAEN,OAAI;AACF,WAAO,gBAAgB,mBAAmB,EAAE,WAAW,MAAM,CAAC;AAC9D,WAAO,OAAO,MAAM,CAAC,8EAA8E,CAAC;YAC7F,KAAK;AACZ,WAAO,WAAW,MAAM,2BAA4B,IAAc,UAAU;;;QAG3E;EAEL,MAAM,eAAe,QAAQ,uBAAuB;AACpD,MAAI,CAAC,WAAW,aAAa,CAC3B,WAAU,cAAc,EAAE,WAAW,MAAM,CAAC;AAE9C,MAAI;AACF,SAAM,UAAU,WAAW,oBAAoB,KAAK,uBAAuB,IAAI,EAAE,KAAK,IAAI,aAAa,CAAC;AACxG,SAAM,UAAU,uBAAuB,IAAI,QAAQ,qBAAqB,EAAE,KAAK,IAAI,aAAa,CAAC;AACjG,OAAI,aACF,OAAM,UAAU,YAAY,EAAE,KAAK,IAAI,aAAa,CAAC;AAEvD,UAAO,OAAO,MAAM,CAAC,kDAAkD,CAAC;UAClE;AACN,OAAI;AACF,WAAO,qBAAqB,uBAAuB;AACnD,QAAI,CAAC,WAAW,uBAAuB,CACrC,QAAO,WAAW,MAAM,iEAAiE;AAE3F,WAAO,OAAO,MAAM,CAAC,iEAAiE,CAAC;YAChF,KAAK;AACZ,WAAO,WAAW,MAAM,2BAA4B,IAAc,UAAU;;;;;;;;;;;AAapF,eAAsB,0BACpB,KACqB;CACrB,MAAM,aAAa,IAAI,QAAQ,aAAa;CAC5C,MAAM,OAAO;CAEb,MAAM,gBAAgB,kBAAkB,IAAI,aAAa,WAAW;AACpE,KAAI,CAAC,iBAAiB,CAAC,WAAW,cAAc,CAC9C,QAAO,YAAY,MAAM,CAAC,gCAAgC,CAAC;AAG7D,KAAI;EACF,IAAI,aAAa,KAAK,cAAc,WAAW;AAG/C,MAAI,WAAW,WAAW,EAAE;GAC1B,IAAI,UAAU;AACd,UAAO,WAAW,GAAG,WAAW,GAAG,UAAU,CAC3C;AAGF,UAAO,YADY,GAAG,WAAW,GAAG,WACL,EAAE,WAAW,MAAM,CAAC;AACnD,UAAO,YAAY;IAAE,WAAW;IAAM,OAAO;IAAM,CAAC;;AAGtD,YAAU,YAAY,EAAE,WAAW,MAAM,CAAC;EAC1C,MAAM,UAAoB,EAAE;EAG5B,MAAM,cAAc,KAAK,eAAe,aAAa,WAAW;AAChE,MAAI,WAAW,YAAY,EAAE;AAC3B,UAAO,aAAa,KAAK,YAAY,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACtE,WAAQ,KAAK,qBAAqB;;EAIpC,MAAM,UAAU,KAAK,eAAe,aAAa,WAAW;AAC5D,MAAI,WAAW,QAAQ,EAAE;AACvB,UAAO,SAAS,KAAK,YAAY,WAAW,CAAC;AAC7C,WAAQ,KAAK,oBAAoB;;EAInC,MAAM,WAAW,KAAK,eAAe,aAAa,QAAQ;AAC1D,MAAI,WAAW,SAAS,EAAE;AACxB,UAAO,UAAU,KAAK,YAAY,QAAQ,EAAE,EAAE,WAAW,MAAM,CAAC;AAChE,WAAQ,KAAK,kBAAkB;;EAKjC,MAAM,QAAQ,KAAK,eAAe,aAAa,SAAS;AACxD,MAAI,WAAW,MAAM,EAAE;AACrB,UAAO,OAAO,KAAK,YAAY,SAAS,CAAC;AACzC,WAAQ,KAAK,4BAA4B;;AAG3C,UAAQ,KAAK,eAAe,aAAa;AACzC,SAAO,OAAO,MAAM,QAAQ;UACrB,KAAK;AACZ,SAAO,WAAW,MAAM,sBAAuB,IAAc,UAAU;;;;;;;AAQ3E,eAAsB,gBACpB,KACA,OAAuB,EAAE,EACF;CACvB,MAAM,UAAwB,EAAE;CAGhC,MAAM,YAAY,MAAM,QAAQ,KAAK,KAAK;AAC1C,SAAQ,KAAK,UAAU;CAGvB,MAAM,gBAAgB,MAAM,0BAA0B,IAAI;AAC1D,SAAQ,KAAK,cAAc;AAE3B,QAAO"}
|
|
1
|
+
{"version":3,"file":"archive-planning-BmW9UDTr.js","names":[],"sources":["../../src/lib/lifecycle/archive-planning.ts"],"sourcesContent":["/**\n * archive-planning — PRD active→completed + .planning/ preservation.\n *\n * Consolidates PRD moving from close-out.ts, merge-agent.ts, and the\n * approve endpoint into a single idempotent operation.\n *\n * Steps:\n * 1. Move PRD from docs/prds/active/ → docs/prds/completed/ (git mv, fallback to copy)\n * 2. Archive workspace .planning/ artifacts to ~/.panopticon/archives/<issue>/\n * 3. Rotate previous archives to prevent overwrite\n */\n\nimport { existsSync, mkdirSync, cpSync, rmSync } from 'fs';\nimport { join, dirname } from 'path';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport {\n ARCHIVES_DIR,\n PROJECT_DOCS_SUBDIR,\n PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR,\n} from '../paths.js';\nimport type { LifecycleContext, StepResult, ArchiveOptions } from './types.js';\nimport { stepOk, stepSkipped, stepFailed } from './types.js';\n\nconst execAsync = promisify(exec);\n\n/**\n * Find the workspace path for an issue.\n * Checks multiple conventional locations, including legacy numeric-suffix naming.\n */\nexport function findWorkspacePath(projectPath: string, issueLower: string): string | null {\n // e.g. \"pan-488\" → \"488\" for legacy workspaces named feature-488\n const numericSuffix = issueLower.replace(/^[a-z]+-/, '');\n const candidates = [\n join(projectPath, 'workspaces', `feature-${issueLower}`),\n join(projectPath, 'workspaces', `feature-${numericSuffix}`),\n join(projectPath, 'workspaces', issueLower),\n join(projectPath, '.worktrees', issueLower),\n join(dirname(projectPath), `feature-${issueLower}`),\n ];\n for (const p of candidates) {\n if (existsSync(p)) return p;\n }\n return null;\n}\n\n/**\n * Move PRD from active/ to completed/ directory.\n * Uses git mv with fallback to plain copy. Idempotent — skips if already completed.\n */\nexport async function movePrd(\n ctx: LifecycleContext,\n opts: ArchiveOptions = {},\n): Promise<StepResult> {\n const { pushToRemote = true } = opts;\n const issueLower = ctx.issueId.toLowerCase();\n const step = 'archive-planning:move-prd';\n\n // Per-issue subdirectory paths (new format: docs/prds/active/<issue-id>/STATE.md)\n const activeIssueDir = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR, issueLower,\n );\n const completedIssueDir = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR, issueLower,\n );\n\n // Legacy flat paths (old format: docs/prds/active/<issue-id>-plan.md)\n const legacyActivePrdPath = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_ACTIVE_SUBDIR, `${issueLower}-plan.md`,\n );\n const legacyCompletedPrdPath = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR,\n PROJECT_PRDS_COMPLETED_SUBDIR, `${issueLower}-plan.md`,\n );\n\n // Already in completed (either format) — idempotent skip\n if (existsSync(completedIssueDir) || existsSync(legacyCompletedPrdPath)) {\n return stepSkipped(step, ['PRD already in completed/']);\n }\n\n // Determine source: prefer new subdirectory format, fall back to legacy flat file\n const useNewFormat = existsSync(activeIssueDir);\n const useLegacyFormat = !useNewFormat && existsSync(legacyActivePrdPath);\n\n if (!useNewFormat && !useLegacyFormat) {\n return stepSkipped(step, ['No PRD found in active/ (may not have had one)']);\n }\n\n if (useNewFormat) {\n // Move entire issue subdirectory\n const completedParent = join(\n ctx.projectPath, PROJECT_DOCS_SUBDIR, PROJECT_PRDS_SUBDIR, PROJECT_PRDS_COMPLETED_SUBDIR,\n );\n if (!existsSync(completedParent)) {\n mkdirSync(completedParent, { recursive: true });\n }\n try {\n await execAsync(`git mv \"${activeIssueDir}\" \"${completedIssueDir}\"`, { cwd: ctx.projectPath });\n await execAsync(`git commit -m \"Move ${ctx.issueId} PRD to completed\"`, { cwd: ctx.projectPath });\n if (pushToRemote) {\n await execAsync('git push', { cwd: ctx.projectPath });\n }\n return stepOk(step, [`Moved PRD subdirectory from active/ to completed/ via git mv`]);\n } catch {\n // git mv failed — fall back to copy\n try {\n cpSync(activeIssueDir, completedIssueDir, { recursive: true });\n return stepOk(step, ['Copied PRD subdirectory to completed/ (git mv failed, plain copy succeeded)']);\n } catch (err) {\n return stepFailed(step, `Failed to preserve PRD: ${(err as Error).message}`);\n }\n }\n } else {\n // Legacy flat file: move single file\n const completedDir = dirname(legacyCompletedPrdPath);\n if (!existsSync(completedDir)) {\n mkdirSync(completedDir, { recursive: true });\n }\n try {\n await execAsync(`git mv \"${legacyActivePrdPath}\" \"${legacyCompletedPrdPath}\"`, { cwd: ctx.projectPath });\n await execAsync(`git commit -m \"Move ${ctx.issueId} PRD to completed\"`, { cwd: ctx.projectPath });\n if (pushToRemote) {\n await execAsync('git push', { cwd: ctx.projectPath });\n }\n return stepOk(step, [`Moved PRD from active/ to completed/ via git mv`]);\n } catch {\n try {\n cpSync(legacyActivePrdPath, legacyCompletedPrdPath);\n if (!existsSync(legacyCompletedPrdPath)) {\n return stepFailed(step, 'PRD copy appeared to succeed but file not found at destination');\n }\n return stepOk(step, ['Copied PRD to completed/ (git mv failed, plain copy succeeded)']);\n } catch (err) {\n return stepFailed(step, `Failed to preserve PRD: ${(err as Error).message}`);\n }\n }\n }\n\n}\n\n/**\n * Archive workspace .planning/ artifacts to ~/.panopticon/archives/<issue>/.\n * Rotates previous archives to prevent overwrite.\n * Returns a hard failure if archiving fails — callers must NOT proceed with\n * workspace deletion after an archive failure.\n */\nexport async function archiveWorkspaceArtifacts(\n ctx: LifecycleContext,\n): Promise<StepResult> {\n const issueLower = ctx.issueId.toLowerCase();\n const step = 'archive-planning:archive-artifacts';\n\n const workspacePath = findWorkspacePath(ctx.projectPath, issueLower);\n if (!workspacePath || !existsSync(workspacePath)) {\n return stepSkipped(step, ['No workspace found to archive']);\n }\n\n try {\n let archiveDir = join(ARCHIVES_DIR, issueLower);\n\n // Rotate previous archive if it exists\n if (existsSync(archiveDir)) {\n let version = 1;\n while (existsSync(`${archiveDir}.${version}`)) {\n version++;\n }\n const rotatedDir = `${archiveDir}.${version}`;\n cpSync(archiveDir, rotatedDir, { recursive: true });\n rmSync(archiveDir, { recursive: true, force: true });\n }\n\n mkdirSync(archiveDir, { recursive: true });\n const details: string[] = [];\n\n // Archive .planning/feedback/\n const feedbackDir = join(workspacePath, '.planning', 'feedback');\n if (existsSync(feedbackDir)) {\n cpSync(feedbackDir, join(archiveDir, 'feedback'), { recursive: true });\n details.push('Archived feedback/');\n }\n\n // Archive STATE.md\n const stateMd = join(workspacePath, '.planning', 'STATE.md');\n if (existsSync(stateMd)) {\n cpSync(stateMd, join(archiveDir, 'STATE.md'));\n details.push('Archived STATE.md');\n }\n\n // Archive beads/\n const beadsDir = join(workspacePath, '.planning', 'beads');\n if (existsSync(beadsDir)) {\n cpSync(beadsDir, join(archiveDir, 'beads'), { recursive: true });\n details.push('Archived beads/');\n }\n\n // Archive PRD.md (workspace copy — the docs/prds/ copy is canonical,\n // but this preserves the workspace-specific version with agent annotations)\n const prdMd = join(workspacePath, '.planning', 'PRD.md');\n if (existsSync(prdMd)) {\n cpSync(prdMd, join(archiveDir, 'PRD.md'));\n details.push('Archived workspace PRD.md');\n }\n\n details.push(`Archived to ${archiveDir}`);\n return stepOk(step, details);\n } catch (err) {\n return stepFailed(step, `Failed to archive: ${(err as Error).message}`);\n }\n}\n\n/**\n * Full archive-planning operation: move PRD + archive workspace artifacts.\n * Archive failure is a hard fail — do not proceed with workspace deletion.\n */\nexport async function archivePlanning(\n ctx: LifecycleContext,\n opts: ArchiveOptions = {},\n): Promise<StepResult[]> {\n const results: StepResult[] = [];\n\n // Step 1: Move PRD\n const prdResult = await movePrd(ctx, opts);\n results.push(prdResult);\n\n // Step 2: Archive workspace artifacts\n const archiveResult = await archiveWorkspaceArtifacts(ctx);\n results.push(archiveResult);\n\n return results;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;YAsBqB;AAIrB,MAAM,YAAY,UAAU,KAAK;;;;;AAMjC,SAAgB,kBAAkB,aAAqB,YAAmC;CAExF,MAAM,gBAAgB,WAAW,QAAQ,YAAY,GAAG;CACxD,MAAM,aAAa;EACjB,KAAK,aAAa,cAAc,WAAW,aAAa;EACxD,KAAK,aAAa,cAAc,WAAW,gBAAgB;EAC3D,KAAK,aAAa,cAAc,WAAW;EAC3C,KAAK,aAAa,cAAc,WAAW;EAC3C,KAAK,QAAQ,YAAY,EAAE,WAAW,aAAa;EACpD;AACD,MAAK,MAAM,KAAK,WACd,KAAI,WAAW,EAAE,CAAE,QAAO;AAE5B,QAAO;;;;;;AAOT,eAAsB,QACpB,KACA,OAAuB,EAAE,EACJ;CACrB,MAAM,EAAE,eAAe,SAAS;CAChC,MAAM,aAAa,IAAI,QAAQ,aAAa;CAC5C,MAAM,OAAO;CAGb,MAAM,iBAAiB,KACrB,IAAI,aAAa,qBAAqB,qBACtC,4BAA4B,WAC7B;CACD,MAAM,oBAAoB,KACxB,IAAI,aAAa,qBAAqB,qBACtC,+BAA+B,WAChC;CAGD,MAAM,sBAAsB,KAC1B,IAAI,aAAa,qBAAqB,qBACtC,4BAA4B,GAAG,WAAW,UAC3C;CACD,MAAM,yBAAyB,KAC7B,IAAI,aAAa,qBAAqB,qBACtC,+BAA+B,GAAG,WAAW,UAC9C;AAGD,KAAI,WAAW,kBAAkB,IAAI,WAAW,uBAAuB,CACrE,QAAO,YAAY,MAAM,CAAC,4BAA4B,CAAC;CAIzD,MAAM,eAAe,WAAW,eAAe;CAC/C,MAAM,kBAAkB,CAAC,gBAAgB,WAAW,oBAAoB;AAExE,KAAI,CAAC,gBAAgB,CAAC,gBACpB,QAAO,YAAY,MAAM,CAAC,iDAAiD,CAAC;AAG9E,KAAI,cAAc;EAEhB,MAAM,kBAAkB,KACtB,IAAI,aAAa,qBAAqB,qBAAqB,8BAC5D;AACD,MAAI,CAAC,WAAW,gBAAgB,CAC9B,WAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAEjD,MAAI;AACF,SAAM,UAAU,WAAW,eAAe,KAAK,kBAAkB,IAAI,EAAE,KAAK,IAAI,aAAa,CAAC;AAC9F,SAAM,UAAU,uBAAuB,IAAI,QAAQ,qBAAqB,EAAE,KAAK,IAAI,aAAa,CAAC;AACjG,OAAI,aACF,OAAM,UAAU,YAAY,EAAE,KAAK,IAAI,aAAa,CAAC;AAEvD,UAAO,OAAO,MAAM,CAAC,+DAA+D,CAAC;UAC/E;AAEN,OAAI;AACF,WAAO,gBAAgB,mBAAmB,EAAE,WAAW,MAAM,CAAC;AAC9D,WAAO,OAAO,MAAM,CAAC,8EAA8E,CAAC;YAC7F,KAAK;AACZ,WAAO,WAAW,MAAM,2BAA4B,IAAc,UAAU;;;QAG3E;EAEL,MAAM,eAAe,QAAQ,uBAAuB;AACpD,MAAI,CAAC,WAAW,aAAa,CAC3B,WAAU,cAAc,EAAE,WAAW,MAAM,CAAC;AAE9C,MAAI;AACF,SAAM,UAAU,WAAW,oBAAoB,KAAK,uBAAuB,IAAI,EAAE,KAAK,IAAI,aAAa,CAAC;AACxG,SAAM,UAAU,uBAAuB,IAAI,QAAQ,qBAAqB,EAAE,KAAK,IAAI,aAAa,CAAC;AACjG,OAAI,aACF,OAAM,UAAU,YAAY,EAAE,KAAK,IAAI,aAAa,CAAC;AAEvD,UAAO,OAAO,MAAM,CAAC,kDAAkD,CAAC;UAClE;AACN,OAAI;AACF,WAAO,qBAAqB,uBAAuB;AACnD,QAAI,CAAC,WAAW,uBAAuB,CACrC,QAAO,WAAW,MAAM,iEAAiE;AAE3F,WAAO,OAAO,MAAM,CAAC,iEAAiE,CAAC;YAChF,KAAK;AACZ,WAAO,WAAW,MAAM,2BAA4B,IAAc,UAAU;;;;;;;;;;;AAapF,eAAsB,0BACpB,KACqB;CACrB,MAAM,aAAa,IAAI,QAAQ,aAAa;CAC5C,MAAM,OAAO;CAEb,MAAM,gBAAgB,kBAAkB,IAAI,aAAa,WAAW;AACpE,KAAI,CAAC,iBAAiB,CAAC,WAAW,cAAc,CAC9C,QAAO,YAAY,MAAM,CAAC,gCAAgC,CAAC;AAG7D,KAAI;EACF,IAAI,aAAa,KAAK,cAAc,WAAW;AAG/C,MAAI,WAAW,WAAW,EAAE;GAC1B,IAAI,UAAU;AACd,UAAO,WAAW,GAAG,WAAW,GAAG,UAAU,CAC3C;AAGF,UAAO,YADY,GAAG,WAAW,GAAG,WACL,EAAE,WAAW,MAAM,CAAC;AACnD,UAAO,YAAY;IAAE,WAAW;IAAM,OAAO;IAAM,CAAC;;AAGtD,YAAU,YAAY,EAAE,WAAW,MAAM,CAAC;EAC1C,MAAM,UAAoB,EAAE;EAG5B,MAAM,cAAc,KAAK,eAAe,aAAa,WAAW;AAChE,MAAI,WAAW,YAAY,EAAE;AAC3B,UAAO,aAAa,KAAK,YAAY,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACtE,WAAQ,KAAK,qBAAqB;;EAIpC,MAAM,UAAU,KAAK,eAAe,aAAa,WAAW;AAC5D,MAAI,WAAW,QAAQ,EAAE;AACvB,UAAO,SAAS,KAAK,YAAY,WAAW,CAAC;AAC7C,WAAQ,KAAK,oBAAoB;;EAInC,MAAM,WAAW,KAAK,eAAe,aAAa,QAAQ;AAC1D,MAAI,WAAW,SAAS,EAAE;AACxB,UAAO,UAAU,KAAK,YAAY,QAAQ,EAAE,EAAE,WAAW,MAAM,CAAC;AAChE,WAAQ,KAAK,kBAAkB;;EAKjC,MAAM,QAAQ,KAAK,eAAe,aAAa,SAAS;AACxD,MAAI,WAAW,MAAM,EAAE;AACrB,UAAO,OAAO,KAAK,YAAY,SAAS,CAAC;AACzC,WAAQ,KAAK,4BAA4B;;AAG3C,UAAQ,KAAK,eAAe,aAAa;AACzC,SAAO,OAAO,MAAM,QAAQ;UACrB,KAAK;AACZ,SAAO,WAAW,MAAM,sBAAuB,IAAc,UAAU;;;;;;;AAQ3E,eAAsB,gBACpB,KACA,OAAuB,EAAE,EACF;CACvB,MAAM,UAAwB,EAAE;CAGhC,MAAM,YAAY,MAAM,QAAQ,KAAK,KAAK;AAC1C,SAAQ,KAAK,UAAU;CAGvB,MAAM,gBAAgB,MAAM,0BAA0B,IAAI;AAC1D,SAAQ,KAAK,cAAc;AAE3B,QAAO"}
|