panopticon-cli 0.6.4 → 0.6.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/dist/{agents-DfYify9s.js → agents-CfFDs52G.js} +14 -14
- package/dist/{agents-DfYify9s.js.map → agents-CfFDs52G.js.map} +1 -1
- package/dist/{agents-BKsVoIc9.js → agents-D_2oRFVf.js} +1 -1
- package/dist/{archive-planning-BJrZ3tmN.js → archive-planning-D97ziGec.js} +3 -3
- package/dist/{archive-planning-BJrZ3tmN.js.map → archive-planning-D97ziGec.js.map} +1 -1
- package/dist/{archive-planning-C3m3hfa5.js → archive-planning-DK90wn9Q.js} +1 -1
- package/dist/{browser-Cvdznzc0.js → browser-CX7jXfXX.js} +1 -1
- package/dist/{browser-Cvdznzc0.js.map → browser-CX7jXfXX.js.map} +1 -1
- package/dist/{clean-planning-DvhZAUv4.js → clean-planning-D_lz4aQq.js} +2 -2
- package/dist/{clean-planning-DvhZAUv4.js.map → clean-planning-D_lz4aQq.js.map} +1 -1
- package/dist/clean-planning-x1S-JdmO.js +2 -0
- package/dist/cli/index.js +291 -760
- package/dist/cli/index.js.map +1 -1
- package/dist/{close-issue-Dr7yZmrr.js → close-issue-CaFE0stN.js} +11 -7
- package/dist/close-issue-CaFE0stN.js.map +1 -0
- package/dist/close-issue-CjcfZI9s.js +2 -0
- package/dist/compact-beads-B0_qE1w3.js +2 -0
- package/dist/{compact-beads-BCOtIIRl.js → compact-beads-CjFkteSU.js} +2 -2
- package/dist/{compact-beads-BCOtIIRl.js.map → compact-beads-CjFkteSU.js.map} +1 -1
- package/dist/{config-CRzMQRgA.js → config-BQNKsi9G.js} +2 -2
- package/dist/{config-CRzMQRgA.js.map → config-BQNKsi9G.js.map} +1 -1
- package/dist/{config-BYgUzQ21.js → config-agyKgF5C.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js → config-yaml-DGbLSMCa.js} +1 -1
- package/dist/{config-yaml-BgOACZAB.js.map → config-yaml-DGbLSMCa.js.map} +1 -1
- package/dist/{config-yaml-fdyvyL0S.js → config-yaml-Dqt4FWQH.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js → acceptance-criteria-Dk9hhiYj.js} +1 -1
- package/dist/dashboard/{acceptance-criteria-e5iiHlRx.js.map → acceptance-criteria-Dk9hhiYj.js.map} +1 -1
- package/dist/dashboard/{agent-enrichment-C67LJBgD.js → agent-enrichment-DdO7ZqjI.js} +11 -7
- package/dist/dashboard/agent-enrichment-DdO7ZqjI.js.map +1 -0
- package/dist/dashboard/{agent-enrichment-Cq0P1cNZ.js → agent-enrichment-dLeGE1fX.js} +1 -1
- package/dist/dashboard/{agents-YyO6t5Xa.js → agents-DCpQQ_W5.js} +14 -14
- package/dist/dashboard/{agents-YyO6t5Xa.js.map → agents-DCpQQ_W5.js.map} +1 -1
- package/dist/dashboard/{agents-BVBVCyat.js → agents-Dgh2TjSp.js} +1 -1
- package/dist/dashboard/{archive-planning-h-hAjk0P.js → archive-planning-BmW9UDTr.js} +3 -3
- package/dist/dashboard/{archive-planning-h-hAjk0P.js.map → archive-planning-BmW9UDTr.js.map} +1 -1
- package/dist/dashboard/{archive-planning-CScs1MOC.js → archive-planning-C3Ebf9yC.js} +1 -1
- package/dist/dashboard/{beads-qNB0yAHV.js → beads-Bv-AdX7G.js} +3 -3
- package/dist/dashboard/{beads-qNB0yAHV.js.map → beads-Bv-AdX7G.js.map} +1 -1
- package/dist/dashboard/{beads-D_FRedEJ.js → beads-By6-X07V.js} +1 -1
- package/dist/dashboard/clean-planning-D60L8rPY.js +2 -0
- package/dist/dashboard/{clean-planning-qafw99vY.js → clean-planning-VEJu5suh.js} +2 -2
- package/dist/dashboard/{clean-planning-qafw99vY.js.map → clean-planning-VEJu5suh.js.map} +1 -1
- package/dist/dashboard/close-issue-C2KeSKKJ.js +2 -0
- package/dist/dashboard/{close-issue-DfIggeZD.js → close-issue-DtKdsSTm.js} +11 -7
- package/dist/dashboard/close-issue-DtKdsSTm.js.map +1 -0
- package/dist/dashboard/compact-beads-C7BN5N11.js +2 -0
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js → compact-beads-D8Vt3qyv.js} +2 -2
- package/dist/dashboard/{compact-beads-Dt0qTqsC.js.map → compact-beads-D8Vt3qyv.js.map} +1 -1
- package/dist/dashboard/{config-CUREjHP7.js → config-CDkGjnwy.js} +2 -2
- package/dist/dashboard/{config-CUREjHP7.js.map → config-CDkGjnwy.js.map} +1 -1
- package/dist/dashboard/{config-BeI3uy-8.js → config-CTXkBATQ.js} +1 -1
- package/dist/dashboard/{database-CozA13Wy.js → database-DhqASALP.js} +1 -1
- package/dist/dashboard/{database-C0y0hXBx.js → database-cxmQryoh.js} +2 -2
- package/dist/dashboard/{database-C0y0hXBx.js.map → database-cxmQryoh.js.map} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js → dist-src-DTm11oQr.js} +1 -1
- package/dist/dashboard/{dist-src-oG2iHzgI.js.map → dist-src-DTm11oQr.js.map} +1 -1
- package/dist/dashboard/{event-store-D7kLBd07.js → event-store-VWWUmOfn.js} +1 -1
- package/dist/dashboard/{event-store-O9q0Gweh.js → event-store-vSmAA3Zp.js} +9 -4
- package/dist/dashboard/event-store-vSmAA3Zp.js.map +1 -0
- package/dist/dashboard/{factory-BnLdiQW-.js → factory-C8nhLGHB.js} +3 -3
- package/dist/dashboard/{factory-BnLdiQW-.js.map → factory-C8nhLGHB.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-DyovUANg.js → feedback-writer-CudSe1WK.js} +2 -2
- package/dist/dashboard/{feedback-writer-DyovUANg.js.map → feedback-writer-CudSe1WK.js.map} +1 -1
- package/dist/dashboard/{feedback-writer-gSUv_W0h.js → feedback-writer-Wgv1cd1r.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js → git-utils-C1m4SwAe.js} +1 -1
- package/dist/dashboard/{git-utils-BJRioREj.js.map → git-utils-C1m4SwAe.js.map} +1 -1
- package/dist/dashboard/{git-utils-BtCRddq3.js → git-utils-DQI8EYoj.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js → github-app-DClWjjHr.js} +1 -1
- package/dist/dashboard/{github-app-XO-LBUGk.js.map → github-app-DClWjjHr.js.map} +1 -1
- package/dist/dashboard/{health-events-db-584nYgJB.js → health-events-db-BMXQfInV.js} +1 -1
- package/dist/dashboard/{health-events-db-B3ChzN65.js → health-events-db-Do4NrOhC.js} +2 -2
- package/dist/dashboard/{health-events-db-B3ChzN65.js.map → health-events-db-Do4NrOhC.js.map} +1 -1
- package/dist/dashboard/{hooks-CKhs3N68.js → hooks-CB4T47NC.js} +1 -1
- package/dist/dashboard/{hooks-CErbP8Oq.js → hooks-CjqXOlNb.js} +2 -2
- package/dist/dashboard/{hooks-CErbP8Oq.js.map → hooks-CjqXOlNb.js.map} +1 -1
- package/dist/dashboard/hume-CA2pftu_.js +3 -0
- package/dist/dashboard/{hume-CX_U3Qha.js → hume-JsAlMOJC.js} +2 -2
- package/dist/dashboard/{hume-CX_U3Qha.js.map → hume-JsAlMOJC.js.map} +1 -1
- package/dist/dashboard/{inspect-agent-B57kGDUV.js → inspect-agent-7eour7EA.js} +3 -3
- package/dist/dashboard/{inspect-agent-B57kGDUV.js.map → inspect-agent-7eour7EA.js.map} +1 -1
- package/dist/dashboard/{io-yGovuG4U.js → io-CWlFW78i.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js → io-DKS6359z.js} +1 -1
- package/dist/dashboard/{io-AJg-mzFi.js.map → io-DKS6359z.js.map} +1 -1
- package/dist/dashboard/issue-id-vwYJdsf8.js +62 -0
- package/dist/dashboard/issue-id-vwYJdsf8.js.map +1 -0
- package/dist/dashboard/{issue-service-singleton-DQK42EqH.js → issue-service-singleton-Co__-6kL.js} +1 -1
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js → issue-service-singleton-Wv4xBm3y.js} +7 -7
- package/dist/dashboard/{issue-service-singleton-sb2HkB9f.js.map → issue-service-singleton-Wv4xBm3y.js.map} +1 -1
- package/dist/dashboard/{label-cleanup-CZEsbtq9.js → label-cleanup-nVKTmIIW.js} +7 -4
- package/dist/dashboard/label-cleanup-nVKTmIIW.js.map +1 -0
- package/dist/dashboard/lifecycle-BcUmtkR4.js +7 -0
- package/dist/dashboard/{merge-agent-GLtMEsTu.js → merge-agent-CGN3TT0a.js} +1 -1
- package/dist/dashboard/{merge-agent-twroFuAh.js → merge-agent-yudQOPZc.js} +148 -46
- package/dist/dashboard/merge-agent-yudQOPZc.js.map +1 -0
- package/dist/dashboard/{paths-COdEvoXR.js → paths-BDyJ7BiV.js} +19 -2
- package/dist/dashboard/{paths-COdEvoXR.js.map → paths-BDyJ7BiV.js.map} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js → pipeline-notifier-CCSN-jar.js} +1 -1
- package/dist/dashboard/{pipeline-notifier-DM5AHG5Q.js.map → pipeline-notifier-CCSN-jar.js.map} +1 -1
- package/dist/dashboard/{plan-utils-BkCIhn3B.js → plan-utils-Bkcsqr_s.js} +3 -3
- package/dist/dashboard/{plan-utils-BkCIhn3B.js.map → plan-utils-Bkcsqr_s.js.map} +1 -1
- package/dist/dashboard/{prd-draft-D09Afalc.js → prd-draft-BD8oMkZ1.js} +2 -2
- package/dist/dashboard/{prd-draft-D09Afalc.js.map → prd-draft-BD8oMkZ1.js.map} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js → projection-cache-C0EL8s8h.js} +1 -1
- package/dist/dashboard/{projection-cache-DQ9zegkK.js.map → projection-cache-C0EL8s8h.js.map} +1 -1
- package/dist/dashboard/{projects-DyT3vSy-.js → projects-C5ozxjwP.js} +1 -1
- package/dist/dashboard/{projects-Cq3TWdPS.js → projects-CFVl4oHn.js} +25 -13
- package/dist/dashboard/projects-CFVl4oHn.js.map +1 -0
- package/dist/dashboard/{providers-Ck2sQd_F.js → providers-B5Y4H2Mg.js} +4 -4
- package/dist/dashboard/providers-B5Y4H2Mg.js.map +1 -0
- package/dist/dashboard/{providers-DVQnDekG.js → providers-csVZVPkE.js} +1 -1
- package/dist/dashboard/public/assets/{dist-CCJbQrSB.js → dist-BaQPC-c6.js} +1 -1
- package/dist/dashboard/public/assets/index-ByLmYGhW.js +212 -0
- package/dist/dashboard/public/assets/index-OEEbThNN.css +1 -0
- package/dist/dashboard/public/index.html +2 -2
- package/dist/dashboard/rally-6McpKKRa.js +3 -0
- package/dist/dashboard/{rally-Cwuae-4C.js → rally-YjFRxIiC.js} +2 -2
- package/dist/dashboard/{rally-Cwuae-4C.js.map → rally-YjFRxIiC.js.map} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js → rally-api-C0WqCSkT.js} +1 -1
- package/dist/dashboard/{rally-api-DSUxm7EO.js.map → rally-api-C0WqCSkT.js.map} +1 -1
- package/dist/dashboard/{rally-api-CEH5KZi4.js → rally-api-DNttdCW4.js} +1 -1
- package/dist/dashboard/{remote-BHTTMpJJ.js → remote-Cigqjj3f.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js → remote-ObpNZ7hF.js} +2 -2
- package/dist/dashboard/{remote-BXo_iIku.js.map → remote-ObpNZ7hF.js.map} +1 -1
- package/dist/dashboard/{remote-agents-CTKVhFFY.js → remote-agents-Bf3GuM7t.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js → remote-agents-DFyjT1Le.js} +1 -1
- package/dist/dashboard/{remote-agents-C0_0LLNd.js.map → remote-agents-DFyjT1Le.js.map} +1 -1
- package/dist/dashboard/{review-status-CK3eBGyb.js → review-status-BtXqWBhS.js} +1 -1
- package/dist/dashboard/{review-status-CV55Tl-n.js → review-status-Bymwzh2i.js} +44 -4
- package/dist/dashboard/{review-status-CV55Tl-n.js.map → review-status-Bymwzh2i.js.map} +1 -1
- package/dist/dashboard/server.js +565 -265
- package/dist/dashboard/server.js.map +1 -1
- package/dist/dashboard/{settings-CuHV-wcv.js → settings-BHlDG7TK.js} +2 -2
- package/dist/dashboard/settings-BHlDG7TK.js.map +1 -0
- package/dist/dashboard/settings-XWvDcj-D.js +2 -0
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js → shadow-engineering-lIn1W_95.js} +1 -1
- package/dist/dashboard/{shadow-engineering-BUeZunaE.js.map → shadow-engineering-lIn1W_95.js.map} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js → shadow-state-BIexcxkv.js} +1 -1
- package/dist/dashboard/{shadow-state-DHQ-kASN.js.map → shadow-state-BIexcxkv.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js → spawn-planning-session-33Jf-d5T.js} +6 -6
- package/dist/dashboard/{spawn-planning-session-8FFAqLdK.js.map → spawn-planning-session-33Jf-d5T.js.map} +1 -1
- package/dist/dashboard/{spawn-planning-session-U0Lqpjen.js → spawn-planning-session-D5hrVdWM.js} +1 -1
- package/dist/dashboard/{specialist-context-ColzlmGE.js → specialist-context-DGukHSn8.js} +6 -6
- package/dist/dashboard/{specialist-context-ColzlmGE.js.map → specialist-context-DGukHSn8.js.map} +1 -1
- package/dist/dashboard/{specialist-logs-BhmDpFIq.js → specialist-logs-CIw4qfTy.js} +1 -1
- package/dist/dashboard/{specialists-C6s3U6tX.js → specialists-B_zrayaP.js} +37 -36
- package/dist/dashboard/specialists-B_zrayaP.js.map +1 -0
- package/dist/dashboard/{specialists-Cny632-T.js → specialists-Cp-PgspS.js} +1 -1
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js → test-agent-queue-ypF_ecHo.js} +4 -4
- package/dist/dashboard/{test-agent-queue-tqI4VDsu.js.map → test-agent-queue-ypF_ecHo.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js → tldr-daemon-B_oLRD8z.js} +2 -2
- package/dist/dashboard/{tldr-daemon-BNFyS7W_.js.map → tldr-daemon-B_oLRD8z.js.map} +1 -1
- package/dist/dashboard/{tldr-daemon-A6JqC59u.js → tldr-daemon-Cfs0bXTi.js} +1 -1
- package/dist/dashboard/{tmux-DYGAVJfb.js → tmux-BzxdKItf.js} +1 -1
- package/dist/dashboard/{tmux-IlN1Slv-.js → tmux-LwG0tHhU.js} +2 -2
- package/dist/dashboard/{tmux-IlN1Slv-.js.map → tmux-LwG0tHhU.js.map} +1 -1
- package/dist/dashboard/{tracker-config-BzNLnmcE.js → tracker-config-BP59uH4V.js} +1 -1
- package/dist/dashboard/{tracker-config-CNM_5rEf.js → tracker-config-e7ph1QqT.js} +2 -2
- package/dist/dashboard/{tracker-config-CNM_5rEf.js.map → tracker-config-e7ph1QqT.js.map} +1 -1
- package/dist/dashboard/{tunnel-D2BkwU7k.js → tunnel-0RzzuXPf.js} +1 -1
- package/dist/dashboard/{tunnel-Dub2hiAA.js → tunnel-DldbBPWL.js} +2 -2
- package/dist/dashboard/{tunnel-Dub2hiAA.js.map → tunnel-DldbBPWL.js.map} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js → types-RKZjGE5N.js} +1 -1
- package/dist/dashboard/{types-CWA-o4UN.js.map → types-RKZjGE5N.js.map} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js → vtt-parser-99vFekRQ.js} +1 -1
- package/dist/dashboard/{vtt-parser-BAXygRf0.js.map → vtt-parser-99vFekRQ.js.map} +1 -1
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js → work-agent-prompt-fCg67nyo.js} +65 -10
- package/dist/dashboard/{work-agent-prompt-JYq_OugP.js.map → work-agent-prompt-fCg67nyo.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js → work-type-router-CWVW2Wk_.js} +1 -1
- package/dist/dashboard/{work-type-router-Cxp8_ur2.js.map → work-type-router-CWVW2Wk_.js.map} +1 -1
- package/dist/dashboard/{work-type-router-Com2amST.js → work-type-router-Di5gCQwh.js} +1 -1
- package/dist/dashboard/{workflows-N1UTipYl.js → workflows-BSMipN07.js} +35 -17
- package/dist/dashboard/workflows-BSMipN07.js.map +1 -0
- package/dist/dashboard/workflows-DaYWQIS2.js +2 -0
- package/dist/dashboard/{workspace-config-cmp5_ipD.js → workspace-config-DVDR-Ukh.js} +1 -1
- package/dist/dashboard/workspace-config-DVDR-Ukh.js.map +1 -0
- package/dist/dashboard/{workspace-manager-CjpWPgzL.js → workspace-manager-BYfzs_t2.js} +1 -1
- package/dist/dashboard/{workspace-manager-D_y9ZmW_.js → workspace-manager-C7OfT62A.js} +44 -24
- package/dist/dashboard/workspace-manager-C7OfT62A.js.map +1 -0
- package/dist/{dns-BKzHm-2q.js → dns-D_aKQJjb.js} +1 -1
- package/dist/{dns-DZwOWvVO.js → dns-Yxq4NNS7.js} +1 -1
- package/dist/{dns-DZwOWvVO.js.map → dns-Yxq4NNS7.js.map} +1 -1
- package/dist/{factory-DFu3IT4r.js → factory-BRBGw6OB.js} +1 -1
- package/dist/{factory-DfzczxN1.js → factory-DzsOiZVc.js} +3 -3
- package/dist/{factory-DfzczxN1.js.map → factory-DzsOiZVc.js.map} +1 -1
- package/dist/{feedback-writer-CwdnOkPO.js → feedback-writer-ygXN5F9N.js} +2 -2
- package/dist/{feedback-writer-CwdnOkPO.js.map → feedback-writer-ygXN5F9N.js.map} +1 -1
- package/dist/{github-app-CHKwxOeQ.js → github-app-DykduJ0X.js} +1 -1
- package/dist/{github-app-CHKwxOeQ.js.map → github-app-DykduJ0X.js.map} +1 -1
- package/dist/hume-9nv1VmMV.js +3 -0
- package/dist/{hume-DnV-tDsh.js → hume-DoCbph2h.js} +2 -2
- package/dist/{hume-DnV-tDsh.js.map → hume-DoCbph2h.js.map} +1 -1
- package/dist/index.d.ts +17 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +8 -7
- package/dist/issue-id-CAcekoIw.js +62 -0
- package/dist/issue-id-CAcekoIw.js.map +1 -0
- package/dist/{label-cleanup-31ElPqqv.js → label-cleanup-C8R9Rspn.js} +7 -4
- package/dist/label-cleanup-C8R9Rspn.js.map +1 -0
- package/dist/{manifest-DL0oDbpv.js → manifest-B4ghOD-V.js} +1 -1
- package/dist/{manifest-DL0oDbpv.js.map → manifest-B4ghOD-V.js.map} +1 -1
- package/dist/{merge-agent-VQH9z9t8.js → merge-agent-DlUiUanN.js} +86 -33
- package/dist/merge-agent-DlUiUanN.js.map +1 -0
- package/dist/{paths-lMaxrYtT.js → paths-CDJ_HsbN.js} +19 -2
- package/dist/{paths-lMaxrYtT.js.map → paths-CDJ_HsbN.js.map} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js → pipeline-notifier-XgDdCdvT.js} +1 -1
- package/dist/{pipeline-notifier-OJ-d3Y60.js.map → pipeline-notifier-XgDdCdvT.js.map} +1 -1
- package/dist/{projects-CvLepaxC.js → projects-Bk-5QhFQ.js} +25 -13
- package/dist/projects-Bk-5QhFQ.js.map +1 -0
- package/dist/{projects-DMWmPeIU.js → projects-DhU7rAVN.js} +1 -1
- package/dist/{providers-DcCPZ5K4.js → providers-DSU1vfQF.js} +4 -4
- package/dist/providers-DSU1vfQF.js.map +1 -0
- package/dist/rally-DdPvGa-w.js +3 -0
- package/dist/{rally-uUUZXp1h.js → rally-Dy00NElU.js} +1 -1
- package/dist/{rally-uUUZXp1h.js.map → rally-Dy00NElU.js.map} +1 -1
- package/dist/{remote-CkLBqLJc.js → remote-CYiOJg0q.js} +2 -2
- package/dist/{remote-CkLBqLJc.js.map → remote-CYiOJg0q.js.map} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js → remote-agents-CZXrUF4f.js} +1 -1
- package/dist/{remote-agents-C5Bd2fgt.js.map → remote-agents-CZXrUF4f.js.map} +1 -1
- package/dist/{remote-agents-BTzD-wMQ.js → remote-agents-ycHHVsgf.js} +1 -1
- package/dist/{remote-workspace-Dxghqiti.js → remote-workspace-CA33UuVI.js} +4 -4
- package/dist/{remote-workspace-Dxghqiti.js.map → remote-workspace-CA33UuVI.js.map} +1 -1
- package/dist/{review-status-2TdtHNcs.js → review-status-D6H2WOw8.js} +1 -1
- package/dist/{review-status-Bm1bWNEa.js → review-status-DEDvCKMP.js} +44 -4
- package/dist/{review-status-Bm1bWNEa.js.map → review-status-DEDvCKMP.js.map} +1 -1
- package/dist/{tracker-C_62ukEq.js → settings-BcWPTrua.js} +7 -199
- package/dist/settings-BcWPTrua.js.map +1 -0
- package/dist/shadow-state-BZzxfEGw.js +2 -0
- package/dist/{shadow-state-CFFHf05M.js → shadow-state-CE3dQfll.js} +1 -1
- package/dist/{shadow-state-CFFHf05M.js.map → shadow-state-CE3dQfll.js.map} +1 -1
- package/dist/{specialist-context-BdNFsfMG.js → specialist-context-BAUWL1Fl.js} +6 -6
- package/dist/{specialist-context-BdNFsfMG.js.map → specialist-context-BAUWL1Fl.js.map} +1 -1
- package/dist/{specialist-logs-CLztE_bE.js → specialist-logs-DQKKQV9B.js} +1 -1
- package/dist/{specialists-aUoUVWsN.js → specialists-Bfb9ATzw.js} +1 -1
- package/dist/{specialists-DEKqgkxp.js → specialists-D7Kj5o6s.js} +35 -34
- package/dist/specialists-D7Kj5o6s.js.map +1 -0
- package/dist/sync-DMfgd389.js +693 -0
- package/dist/sync-DMfgd389.js.map +1 -0
- package/dist/sync-TL6y-8K6.js +2 -0
- package/dist/{tldr-daemon-BCEFPItr.js → tldr-daemon-CFx4LXAl.js} +2 -2
- package/dist/{tldr-daemon-BCEFPItr.js.map → tldr-daemon-CFx4LXAl.js.map} +1 -1
- package/dist/{tldr-daemon-xBAx4cBE.js → tldr-daemon-D_EooADG.js} +1 -1
- package/dist/{tmux-DN6H886Y.js → tmux-CBtui_Cl.js} +1 -1
- package/dist/{tmux-CKdNxxJx.js → tmux-D6Ah4I8z.js} +2 -2
- package/dist/{tmux-CKdNxxJx.js.map → tmux-D6Ah4I8z.js.map} +1 -1
- package/dist/tracker-BhYYvU3p.js +198 -0
- package/dist/tracker-BhYYvU3p.js.map +1 -0
- package/dist/{tracker-utils-CVU2W1sX.js → tracker-utils-ChQyut8w.js} +34 -12
- package/dist/tracker-utils-ChQyut8w.js.map +1 -0
- package/dist/{traefik-DHgBoWXX.js → traefik-C80EbDu_.js} +4 -4
- package/dist/{traefik-DHgBoWXX.js.map → traefik-C80EbDu_.js.map} +1 -1
- package/dist/{traefik-BR-edbZv.js → traefik-CgHl7Bge.js} +1 -1
- package/dist/{tunnel-BZO9Q5oe.js → tunnel-DXOJ1wMM.js} +1 -1
- package/dist/{tunnel-Bl1qNSyQ.js → tunnel-DzXEPwIc.js} +2 -2
- package/dist/{tunnel-Bl1qNSyQ.js.map → tunnel-DzXEPwIc.js.map} +1 -1
- package/dist/{types-DewGdaIP.js → types-BhJj1SP1.js} +1 -1
- package/dist/{types-DewGdaIP.js.map → types-BhJj1SP1.js.map} +1 -1
- package/dist/{work-type-router-CS2BB1vS.js → work-type-router-CHjciPyS.js} +3 -3
- package/dist/{work-type-router-CS2BB1vS.js.map → work-type-router-CHjciPyS.js.map} +1 -1
- package/dist/{workspace-config-CNXOpKuj.js → workspace-config-fUafvYMp.js} +1 -1
- package/dist/workspace-config-fUafvYMp.js.map +1 -0
- package/dist/workspace-manager-B9jS4Dsq.js +3 -0
- package/dist/{workspace-manager-CncdZkIy.js → workspace-manager-DuLhnzJV.js} +112 -27
- package/dist/workspace-manager-DuLhnzJV.js.map +1 -0
- package/package.json +2 -1
- package/scripts/post-merge-deploy.sh +25 -5
- package/scripts/record-cost-event.js +57 -7
- package/scripts/record-cost-event.js.map +1 -1
- package/skills/pan-help/SKILL.md +1 -1
- package/skills/pan-sync/SKILL.md +6 -6
- package/skills/workspace-add-repo/skill.md +46 -0
- package/templates/claude-md/sections/warnings.md +15 -2
- package/dist/clean-planning-sZXvy3Y5.js +0 -2
- package/dist/close-issue-Dml437qV.js +0 -2
- package/dist/close-issue-Dr7yZmrr.js.map +0 -1
- package/dist/compact-beads-iu218JcO.js +0 -2
- package/dist/dashboard/agent-enrichment-C67LJBgD.js.map +0 -1
- package/dist/dashboard/clean-planning-DCu3cOTu.js +0 -2
- package/dist/dashboard/close-issue-DfIggeZD.js.map +0 -1
- package/dist/dashboard/close-issue-DwdwYtar.js +0 -2
- package/dist/dashboard/compact-beads-DXY2fK2s.js +0 -2
- package/dist/dashboard/event-store-O9q0Gweh.js.map +0 -1
- package/dist/dashboard/hume-MZndNDVU.js +0 -3
- package/dist/dashboard/label-cleanup-CZEsbtq9.js.map +0 -1
- package/dist/dashboard/lifecycle-ZTYdrr2O.js +0 -7
- package/dist/dashboard/merge-agent-twroFuAh.js.map +0 -1
- package/dist/dashboard/projects-Cq3TWdPS.js.map +0 -1
- package/dist/dashboard/providers-Ck2sQd_F.js.map +0 -1
- package/dist/dashboard/public/assets/index-CpSmB2ts.css +0 -1
- package/dist/dashboard/public/assets/index-yarWhi0M.js +0 -214
- package/dist/dashboard/rally-CQ1OBJrJ.js +0 -3
- package/dist/dashboard/settings-CuHV-wcv.js.map +0 -1
- package/dist/dashboard/settings-DMeGBRsk.js +0 -2
- package/dist/dashboard/specialists-C6s3U6tX.js.map +0 -1
- package/dist/dashboard/workflows-B2ARUpOa.js +0 -2
- package/dist/dashboard/workflows-N1UTipYl.js.map +0 -1
- package/dist/dashboard/workspace-config-cmp5_ipD.js.map +0 -1
- package/dist/dashboard/workspace-manager-D_y9ZmW_.js.map +0 -1
- package/dist/hume-BjmwmJ9E.js +0 -3
- package/dist/label-cleanup-31ElPqqv.js.map +0 -1
- package/dist/merge-agent-VQH9z9t8.js.map +0 -1
- package/dist/projects-CvLepaxC.js.map +0 -1
- package/dist/providers-DcCPZ5K4.js.map +0 -1
- package/dist/rally-DR9x8--6.js +0 -3
- package/dist/shadow-state-p3jpGRPJ.js +0 -2
- package/dist/specialists-DEKqgkxp.js.map +0 -1
- package/dist/tracker-C_62ukEq.js.map +0 -1
- package/dist/tracker-utils-CVU2W1sX.js.map +0 -1
- package/dist/workspace-config-CNXOpKuj.js.map +0 -1
- package/dist/workspace-manager-CncdZkIy.js.map +0 -1
- package/dist/workspace-manager-Cx0r2Jnv.js +0 -3
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"tracker-C_62ukEq.js","names":[],"sources":["../src/lib/shell.ts","../src/lib/backup.ts","../src/lib/sync.ts","../src/lib/settings.ts","../src/lib/tracker/linking.ts","../src/lib/tracker/index.ts"],"sourcesContent":["import { existsSync, readFileSync, appendFileSync } from 'fs';\nimport { homedir } from 'os';\nimport { join } from 'path';\n\nexport type Shell = 'bash' | 'zsh' | 'fish' | 'unknown';\n\nexport function detectShell(): Shell {\n const shell = process.env.SHELL || '';\n\n if (shell.includes('zsh')) return 'zsh';\n if (shell.includes('bash')) return 'bash';\n if (shell.includes('fish')) return 'fish';\n\n return 'unknown';\n}\n\nexport function getShellRcFile(shell: Shell): string | null {\n const home = homedir();\n\n switch (shell) {\n case 'zsh':\n return join(home, '.zshrc');\n case 'bash':\n // Prefer .bashrc, fall back to .bash_profile\n const bashrc = join(home, '.bashrc');\n if (existsSync(bashrc)) return bashrc;\n return join(home, '.bash_profile');\n case 'fish':\n return join(home, '.config', 'fish', 'config.fish');\n default:\n return null;\n }\n}\n\nconst ALIAS_LINE = 'alias pan=\"panopticon\"';\nconst ALIAS_MARKER = '# Panopticon CLI alias';\n\nexport function hasAlias(rcFile: string): boolean {\n if (!existsSync(rcFile)) return false;\n\n const content = readFileSync(rcFile, 'utf8');\n return content.includes(ALIAS_MARKER) || content.includes(ALIAS_LINE);\n}\n\nexport function addAlias(rcFile: string): void {\n if (hasAlias(rcFile)) return;\n\n const aliasBlock = `\n${ALIAS_MARKER}\n${ALIAS_LINE}\n`;\n\n appendFileSync(rcFile, aliasBlock, 'utf8');\n}\n\nexport function getAliasInstructions(shell: Shell): string {\n const rcFile = getShellRcFile(shell);\n\n if (!rcFile) {\n return `Add this to your shell config:\\n ${ALIAS_LINE}`;\n }\n\n return `Alias added to ${rcFile}. Run:\\n source ${rcFile}`;\n}\n","import { existsSync, mkdirSync, readdirSync, cpSync, rmSync, lstatSync } from 'fs';\nimport { join, basename } from 'path';\nimport { BACKUPS_DIR } from './paths.js';\n\nexport interface BackupInfo {\n timestamp: string;\n path: string;\n targets: string[];\n}\n\nexport function createBackupTimestamp(): string {\n return new Date().toISOString().replace(/[:.]/g, '-');\n}\n\nexport function createBackup(sourceDirs: string[]): BackupInfo {\n const timestamp = createBackupTimestamp();\n const backupPath = join(BACKUPS_DIR, timestamp);\n\n mkdirSync(backupPath, { recursive: true });\n\n const targets: string[] = [];\n\n for (const sourceDir of sourceDirs) {\n if (!existsSync(sourceDir)) continue;\n\n const targetName = basename(sourceDir);\n const targetPath = join(backupPath, targetName);\n\n // Use filter to skip symlinks — sync targets (e.g. ~/.claude/skills/)\n // contain symlinks back into ~/.panopticon/skills/ which causes cpSync\n // to fail with \"cannot copy to a subdirectory of self\".\n cpSync(sourceDir, targetPath, {\n recursive: true,\n filter: (src) => !lstatSync(src).isSymbolicLink(),\n });\n targets.push(targetName);\n }\n\n return {\n timestamp,\n path: backupPath,\n targets,\n };\n}\n\nexport function listBackups(): BackupInfo[] {\n if (!existsSync(BACKUPS_DIR)) return [];\n\n const entries = readdirSync(BACKUPS_DIR, { withFileTypes: true });\n\n return entries\n .filter((e) => e.isDirectory())\n .map((e) => {\n const backupPath = join(BACKUPS_DIR, e.name);\n const contents = readdirSync(backupPath);\n\n return {\n timestamp: e.name,\n path: backupPath,\n targets: contents,\n };\n })\n .sort((a, b) => b.timestamp.localeCompare(a.timestamp));\n}\n\nexport function restoreBackup(timestamp: string, targetDirs: Record<string, string>): void {\n const backupPath = join(BACKUPS_DIR, timestamp);\n\n if (!existsSync(backupPath)) {\n throw new Error(`Backup not found: ${timestamp}`);\n }\n\n const contents = readdirSync(backupPath, { withFileTypes: true });\n\n for (const entry of contents) {\n if (!entry.isDirectory()) continue;\n\n const sourcePath = join(backupPath, entry.name);\n const targetPath = targetDirs[entry.name];\n\n if (!targetPath) continue;\n\n // Remove existing and restore from backup\n if (existsSync(targetPath)) {\n rmSync(targetPath, { recursive: true });\n }\n\n cpSync(sourcePath, targetPath, { recursive: true });\n }\n}\n\nexport function cleanOldBackups(keepCount: number = 10): number {\n const backups = listBackups();\n\n if (backups.length <= keepCount) return 0;\n\n const toRemove = backups.slice(keepCount);\n let removed = 0;\n\n for (const backup of toRemove) {\n rmSync(backup.path, { recursive: true });\n removed++;\n }\n\n return removed;\n}\n","import { existsSync, mkdirSync, readdirSync, symlinkSync, unlinkSync, lstatSync, readlinkSync, rmSync, copyFileSync, chmodSync, readFileSync, writeFileSync } from 'fs';\nimport { join, basename, dirname, relative } from 'path';\nimport { homedir } from 'os';\nimport {\n SKILLS_DIR, COMMANDS_DIR, AGENTS_DIR, BIN_DIR,\n SOURCE_SCRIPTS_DIR, SOURCE_DEV_SKILLS_DIR, SOURCE_SKILLS_DIR, SOURCE_AGENTS_DIR, SOURCE_RULES_DIR,\n CACHE_AGENTS_DIR, CACHE_RULES_DIR, CACHE_MANIFEST,\n SYNC_TARGET, isDevMode,\n} from './paths.js';\nimport {\n buildManifestFromDirectory, writeManifest, readManifest, hashFile,\n setManifestEntry, collectSourceFiles,\n type Manifest, type FileStatus,\n compareFileToManifest,\n} from './manifest.js';\nimport { getDevrootPath } from './config.js';\n\nexport interface SyncItem {\n name: string;\n sourcePath: string;\n targetPath: string;\n status: 'new' | 'exists' | 'conflict' | 'symlink';\n}\n\nexport interface SyncPlan {\n skills: SyncItem[];\n commands: SyncItem[];\n agents: SyncItem[];\n rules: SyncItem[];\n devSkills: SyncItem[]; // Developer-only skills (only synced in dev mode)\n}\n\n/**\n * Remove a file, symlink, or directory safely\n */\nfunction removeTarget(targetPath: string): void {\n const stats = lstatSync(targetPath);\n if (stats.isDirectory() && !stats.isSymbolicLink()) {\n // It's a real directory, remove recursively\n rmSync(targetPath, { recursive: true, force: true });\n } else {\n // It's a file or symlink\n unlinkSync(targetPath);\n }\n}\n\n/**\n * Check if a path is a Panopticon-managed symlink\n */\nexport function isPanopticonSymlink(targetPath: string): boolean {\n if (!existsSync(targetPath)) return false;\n\n try {\n const stats = lstatSync(targetPath);\n if (!stats.isSymbolicLink()) return false;\n\n const linkTarget = readlinkSync(targetPath);\n // It's ours if it points to our skills/commands dir\n return linkTarget.includes('.panopticon');\n } catch {\n return false;\n }\n}\n\nexport interface MigrationResult {\n removedSymlinks: string[];\n preservedUserContent: string[];\n errors: string[];\n}\n\n/**\n * One-time migration: remove Panopticon-managed symlinks from ~/.claude/.\n *\n * Detects symlinks in ~/.claude/skills/ and ~/.claude/agents/ that point to\n * .panopticon directories. Removes only those symlinks, preserving any\n * user-created content (real files/directories).\n *\n * This is safe to run multiple times — it's a no-op if nothing remains to clean up.\n *\n * Removes two kinds of stale Panopticon content from ~/.claude/:\n * 1. Symlinks pointing to .panopticon or panopticon-cli (legacy sync method)\n * 2. Plain directories that also exist in the devroot (stale copies from before\n * the devroot migration — these cause duplicate skill listings)\n */\nexport function migrateStalePersonalContent(): MigrationResult {\n const claudeDir = join(homedir(), '.claude');\n const result: MigrationResult = {\n removedSymlinks: [],\n preservedUserContent: [],\n errors: [],\n };\n\n // Build a set of skill/agent/command names that exist in the devroot\n // so we can identify stale copies in ~/.claude/\n const devrootNames = new Set<string>();\n const devroot = getDevrootPath();\n if (devroot) {\n for (const subdir of ['skills', 'commands', 'agents']) {\n const devrootDir = join(devroot, '.claude', subdir);\n if (existsSync(devrootDir)) {\n try {\n for (const entry of readdirSync(devrootDir)) {\n devrootNames.add(`${subdir}/${entry}`);\n }\n } catch {\n // Ignore read errors on devroot\n }\n }\n }\n }\n\n for (const subdir of ['skills', 'commands', 'agents']) {\n const dir = join(claudeDir, subdir);\n if (!existsSync(dir)) continue;\n\n try {\n const entries = readdirSync(dir);\n for (const entry of entries) {\n const entryPath = join(dir, entry);\n try {\n const stats = lstatSync(entryPath);\n if (stats.isSymbolicLink()) {\n const linkTarget = readlinkSync(entryPath);\n if (linkTarget.includes('.panopticon') || linkTarget.includes('panopticon-cli')) {\n unlinkSync(entryPath);\n result.removedSymlinks.push(`${subdir}/${entry}`);\n } else {\n // Symlink to somewhere else — leave it\n result.preservedUserContent.push(`${subdir}/${entry}`);\n }\n } else if (stats.isDirectory() && devrootNames.has(`${subdir}/${entry}`)) {\n // Plain directory that also exists in devroot — stale Panopticon copy.\n // The devroot copy is the canonical one; this personal copy causes\n // duplicate listings and violates principle #4 (never touch ~/.claude/).\n rmSync(entryPath, { recursive: true, force: true });\n result.removedSymlinks.push(`${subdir}/${entry} (stale copy)`);\n } else {\n // Real file/directory with no devroot counterpart — user content, never touch\n result.preservedUserContent.push(`${subdir}/${entry}`);\n }\n } catch (err: any) {\n result.errors.push(`${subdir}/${entry}: ${err.message}`);\n }\n }\n } catch (err: any) {\n result.errors.push(`${subdir}: ${err.message}`);\n }\n }\n\n return result;\n}\n\nexport interface RefreshCacheResult {\n skills: { copied: number; total: number };\n agents: { copied: number; total: number };\n rules: { copied: number; total: number };\n}\n\n/**\n * Recursively copy a directory, overwriting existing files.\n */\nfunction copyDirectoryRecursive(source: string, dest: string): number {\n if (!existsSync(source)) return 0;\n\n mkdirSync(dest, { recursive: true });\n let count = 0;\n\n const entries = readdirSync(source, { withFileTypes: true });\n for (const entry of entries) {\n const srcPath = join(source, entry.name);\n const dstPath = join(dest, entry.name);\n if (entry.isDirectory()) {\n count += copyDirectoryRecursive(srcPath, dstPath);\n } else if (entry.isFile()) {\n copyFileSync(srcPath, dstPath);\n count++;\n }\n }\n return count;\n}\n\n/**\n * Refresh the ~/.panopticon/ cache from the repo source.\n *\n * Always copies (overwrites) skills, agents, and rules from the package's\n * source directories to the cache. Generates ~/.panopticon/.manifest.json\n * tracking all cached files.\n *\n * This replaces the old \"skip if exists\" behavior in `pan install`.\n */\nexport function refreshCache(): RefreshCacheResult {\n const result: RefreshCacheResult = {\n skills: { copied: 0, total: 0 },\n agents: { copied: 0, total: 0 },\n rules: { copied: 0, total: 0 },\n };\n\n // Copy skills from repo to cache (always overwrite)\n if (existsSync(SOURCE_SKILLS_DIR)) {\n const skillDirs = readdirSync(SOURCE_SKILLS_DIR, { withFileTypes: true })\n .filter((d) => d.isDirectory());\n\n result.skills.total = skillDirs.length;\n for (const skillDir of skillDirs) {\n const src = join(SOURCE_SKILLS_DIR, skillDir.name);\n const dst = join(SKILLS_DIR, skillDir.name);\n copyDirectoryRecursive(src, dst);\n result.skills.copied++;\n }\n }\n\n // Copy dev-skills to cache too (in dev mode only)\n if (isDevMode() && existsSync(SOURCE_DEV_SKILLS_DIR)) {\n const devSkillDirs = readdirSync(SOURCE_DEV_SKILLS_DIR, { withFileTypes: true })\n .filter((d) => d.isDirectory());\n\n for (const skillDir of devSkillDirs) {\n const src = join(SOURCE_DEV_SKILLS_DIR, skillDir.name);\n const dst = join(SKILLS_DIR, skillDir.name);\n copyDirectoryRecursive(src, dst);\n result.skills.copied++;\n result.skills.total++;\n }\n }\n\n // Copy agent definitions from repo to cache\n if (existsSync(SOURCE_AGENTS_DIR)) {\n mkdirSync(CACHE_AGENTS_DIR, { recursive: true });\n const agents = readdirSync(SOURCE_AGENTS_DIR, { withFileTypes: true })\n .filter((entry) => entry.isFile() && entry.name.endsWith('.md'));\n\n result.agents.total = agents.length;\n for (const agent of agents) {\n copyFileSync(join(SOURCE_AGENTS_DIR, agent.name), join(CACHE_AGENTS_DIR, agent.name));\n result.agents.copied++;\n }\n }\n\n // Copy rules from repo to cache (directory may not exist yet)\n if (existsSync(SOURCE_RULES_DIR)) {\n const ruleFiles = readdirSync(SOURCE_RULES_DIR, { withFileTypes: true })\n .filter((entry) => entry.isFile());\n\n result.rules.total = ruleFiles.length;\n for (const rule of ruleFiles) {\n mkdirSync(CACHE_RULES_DIR, { recursive: true });\n copyFileSync(join(SOURCE_RULES_DIR, rule.name), join(CACHE_RULES_DIR, rule.name));\n result.rules.copied++;\n }\n }\n\n // Generate cache manifest\n const manifest = buildManifestFromDirectory(\n join(SKILLS_DIR, '..'), // ~/.panopticon/\n ['skills', 'agent-definitions', 'rules'],\n 'panopticon',\n );\n writeManifest(CACHE_MANIFEST, manifest);\n\n return result;\n}\n\n/**\n * Devroot sync item — represents a single file to distribute.\n */\nexport interface DevrootSyncItem {\n /** Relative path from .claude/ (e.g., \"skills/beads/SKILL.md\") */\n relativePath: string;\n /** Absolute path to source file in cache */\n sourcePath: string;\n /** Absolute path to target file at devroot */\n targetPath: string;\n /** What action to take */\n status: FileStatus;\n}\n\n/**\n * Plan what would be synced to devroot (dry run).\n * Reads from cache, targets <devroot>/.claude/, uses manifest comparison.\n */\nexport function planSync(): SyncPlan {\n const plan: SyncPlan = {\n skills: [],\n commands: [],\n agents: [],\n rules: [],\n devSkills: [],\n };\n\n const devrootPath = getDevrootPath();\n if (!devrootPath) return plan;\n\n const targetBase = join(devrootPath, '.claude');\n const manifestPath = join(targetBase, '.panopticon-manifest.json');\n const manifest = readManifest(manifestPath);\n\n // Plan skills\n const skillFiles = collectSourceFiles(SKILLS_DIR, 'skills/');\n for (const file of skillFiles) {\n const targetFile = join(targetBase, file.relativePath);\n const status = compareFileToManifest(targetFile, file.relativePath, manifest);\n const skillName = file.relativePath.split('/')[1] || file.relativePath;\n\n let syncStatus: SyncItem['status'] = 'new';\n if (status.action === 'update') syncStatus = 'symlink'; // reusing 'symlink' for \"managed, safe to update\"\n else if (status.action === 'modified') syncStatus = 'conflict';\n else if (status.action === 'user-owned') syncStatus = 'conflict';\n\n plan.skills.push({\n name: file.relativePath,\n sourcePath: file.absolutePath,\n targetPath: targetFile,\n status: syncStatus,\n });\n }\n\n // Plan agents\n const agentFiles = collectSourceFiles(CACHE_AGENTS_DIR, 'agents/');\n for (const file of agentFiles) {\n const targetFile = join(targetBase, file.relativePath);\n const status = compareFileToManifest(targetFile, file.relativePath, manifest);\n\n let syncStatus: SyncItem['status'] = 'new';\n if (status.action === 'update') syncStatus = 'symlink';\n else if (status.action === 'modified') syncStatus = 'conflict';\n else if (status.action === 'user-owned') syncStatus = 'conflict';\n\n plan.agents.push({\n name: file.relativePath,\n sourcePath: file.absolutePath,\n targetPath: targetFile,\n status: syncStatus,\n });\n }\n\n // Plan rules\n const ruleFiles = collectSourceFiles(CACHE_RULES_DIR, 'rules/');\n for (const file of ruleFiles) {\n const targetFile = join(targetBase, file.relativePath);\n const status = compareFileToManifest(targetFile, file.relativePath, manifest);\n\n let syncStatus: SyncItem['status'] = 'new';\n if (status.action === 'update') syncStatus = 'symlink';\n else if (status.action === 'modified') syncStatus = 'conflict';\n else if (status.action === 'user-owned') syncStatus = 'conflict';\n\n plan.rules.push({\n name: file.relativePath,\n sourcePath: file.absolutePath,\n targetPath: targetFile,\n status: syncStatus,\n });\n }\n\n return plan;\n}\n\nexport interface SyncOptions {\n force?: boolean;\n diff?: boolean;\n dryRun?: boolean;\n}\n\nexport interface SyncResult {\n created: string[];\n updated: string[];\n skipped: string[];\n conflicts: string[];\n diffs: Array<{ path: string; sourceContent: string; targetContent: string }>;\n}\n\n/**\n * Execute sync to devroot: copy from cache to <devroot>/.claude/.\n * Uses manifest-based conflict resolution. NEVER touches ~/.claude/.\n */\nexport function executeSync(options: SyncOptions = {}): SyncResult {\n const result: SyncResult = {\n created: [],\n updated: [],\n skipped: [],\n conflicts: [],\n diffs: [],\n };\n\n const devrootPath = getDevrootPath();\n if (!devrootPath) {\n return result;\n }\n\n const targetBase = join(devrootPath, '.claude');\n const manifestPath = join(targetBase, '.panopticon-manifest.json');\n const manifest = readManifest(manifestPath);\n\n // Collect all source files from cache\n const allFiles = [\n ...collectSourceFiles(SKILLS_DIR, 'skills/'),\n ...collectSourceFiles(CACHE_AGENTS_DIR, 'agents/'),\n ...collectSourceFiles(CACHE_RULES_DIR, 'rules/'),\n ];\n\n for (const file of allFiles) {\n const targetFile = join(targetBase, file.relativePath);\n const status = compareFileToManifest(targetFile, file.relativePath, manifest);\n\n switch (status.action) {\n case 'new': {\n // File doesn't exist at target — copy it\n mkdirSync(dirname(targetFile), { recursive: true });\n copyFileSync(file.absolutePath, targetFile);\n const hash = hashFile(targetFile);\n setManifestEntry(manifest, file.relativePath, hash, 'panopticon');\n result.created.push(file.relativePath);\n break;\n }\n\n case 'update': {\n // File exists, hash matches manifest — safe to overwrite (user didn't modify)\n mkdirSync(dirname(targetFile), { recursive: true });\n copyFileSync(file.absolutePath, targetFile);\n const hash = hashFile(targetFile);\n setManifestEntry(manifest, file.relativePath, hash, 'panopticon');\n result.updated.push(file.relativePath);\n break;\n }\n\n case 'modified': {\n // File was modified since we placed it\n if (options.diff) {\n result.diffs.push({\n path: file.relativePath,\n sourceContent: readFileSync(file.absolutePath, 'utf-8'),\n targetContent: readFileSync(targetFile, 'utf-8'),\n });\n }\n\n if (options.force) {\n mkdirSync(dirname(targetFile), { recursive: true });\n copyFileSync(file.absolutePath, targetFile);\n const hash = hashFile(targetFile);\n setManifestEntry(manifest, file.relativePath, hash, 'panopticon');\n result.updated.push(file.relativePath);\n } else {\n result.conflicts.push(file.relativePath);\n }\n break;\n }\n\n case 'user-owned': {\n // User placed this file, never touch it\n result.skipped.push(file.relativePath);\n break;\n }\n }\n }\n\n // Write updated manifest\n writeManifest(manifestPath, manifest);\n\n return result;\n}\n\n/**\n * Hook item for sync planning\n */\nexport interface HookItem {\n name: string;\n sourcePath: string;\n targetPath: string;\n status: 'new' | 'updated' | 'current';\n}\n\n/**\n * Plan hooks sync (checks what would be updated)\n */\nexport function planHooksSync(): HookItem[] {\n const hooks: HookItem[] = [];\n\n if (!existsSync(SOURCE_SCRIPTS_DIR)) {\n return hooks;\n }\n\n // Sync hook scripts (no extension) and bundled JS scripts (.js)\n // Skip source files (.ts), shell helpers (.sh), and other non-hook files (.mjs)\n const scripts = readdirSync(SOURCE_SCRIPTS_DIR, { withFileTypes: true })\n .filter((entry) => entry.isFile() && !entry.name.startsWith('.')\n && (!entry.name.includes('.') || entry.name.endsWith('.js')));\n\n for (const script of scripts) {\n const sourcePath = join(SOURCE_SCRIPTS_DIR, script.name);\n const targetPath = join(BIN_DIR, script.name);\n\n let status: HookItem['status'] = 'new';\n\n if (existsSync(targetPath)) {\n // Could compare file contents/timestamps here for 'current' vs 'updated'\n // For now, always update to ensure latest version\n status = 'updated';\n }\n\n hooks.push({ name: script.name, sourcePath, targetPath, status });\n }\n\n return hooks;\n}\n\n/**\n * Sync hooks (copy scripts to ~/.panopticon/bin/)\n */\nexport function syncHooks(): { synced: string[]; errors: string[] } {\n const result = { synced: [] as string[], errors: [] as string[] };\n\n // Ensure bin directory exists\n mkdirSync(BIN_DIR, { recursive: true });\n\n const hooks = planHooksSync();\n\n for (const hook of hooks) {\n try {\n copyFileSync(hook.sourcePath, hook.targetPath);\n chmodSync(hook.targetPath, 0o755); // Make executable\n result.synced.push(hook.name);\n } catch (error) {\n result.errors.push(`${hook.name}: ${error}`);\n }\n }\n\n return result;\n}\n\n/**\n * Runtime-specific statusline configurations\n * Maps runtime to: config dir, statusline filename, settings file\n */\nconst STATUSLINE_TARGETS: Record<string, { configDir: string; scriptName: string; settingsFile: string }> = {\n claude: {\n configDir: join(homedir(), '.claude'),\n scriptName: 'statusline-command.sh',\n settingsFile: join(homedir(), '.claude', 'settings.json'),\n },\n // Other runtimes can be added as they support statusline\n};\n\n/**\n * Sync statusline script to all supported runtimes\n * Copies the canonical statusline.sh from panopticon scripts to each runtime's config dir\n * and ensures the runtime's settings.json references it.\n */\nexport function syncStatusline(): { synced: string[]; errors: string[] } {\n const result = { synced: [] as string[], errors: [] as string[] };\n\n const sourceScript = join(SOURCE_SCRIPTS_DIR, 'statusline.sh');\n if (!existsSync(sourceScript)) {\n return result;\n }\n\n for (const [runtime, target] of Object.entries(STATUSLINE_TARGETS)) {\n try {\n // Ensure config dir exists\n mkdirSync(target.configDir, { recursive: true });\n\n // Copy statusline script\n const targetScript = join(target.configDir, target.scriptName);\n copyFileSync(sourceScript, targetScript);\n chmodSync(targetScript, 0o755);\n\n // Update settings.json to reference the statusline\n updateSettingsStatusline(target.settingsFile, targetScript);\n\n result.synced.push(runtime);\n } catch (error) {\n result.errors.push(`${runtime}: ${error}`);\n }\n }\n\n return result;\n}\n\n/**\n * Update a settings.json file to include the statusLine configuration\n * Preserves all existing settings (hooks, etc.)\n */\nfunction updateSettingsStatusline(settingsFile: string, scriptPath: string): void {\n let settings: Record<string, any> = {};\n\n if (existsSync(settingsFile)) {\n try {\n settings = JSON.parse(readFileSync(settingsFile, 'utf-8'));\n } catch {\n // If settings file is corrupt, start fresh but preserve the file\n settings = {};\n }\n }\n\n // Only update if statusLine is missing or points to a different script\n const currentCommand = settings.statusLine?.command;\n if (currentCommand === scriptPath && settings.statusLine?.type === 'command') {\n return; // Already configured correctly\n }\n\n settings.statusLine = {\n type: 'command',\n command: scriptPath,\n padding: 0,\n };\n\n mkdirSync(dirname(settingsFile), { recursive: true });\n writeFileSync(settingsFile, JSON.stringify(settings, null, 2) + '\\n', 'utf-8');\n}\n","import { readFileSync, writeFileSync, existsSync } from 'fs';\nimport { SETTINGS_FILE } from './paths.js';\n\n// Model identifiers\nexport type AnthropicModel = 'claude-opus-4-6' | 'claude-sonnet-4-6' | 'claude-sonnet-4-5' | 'claude-haiku-4-5';\nexport type OpenAIModel = 'gpt-5.2-codex' | 'o3-deep-research' | 'gpt-4o' | 'gpt-4o-mini';\nexport type GoogleModel = 'gemini-3-pro-preview' | 'gemini-3-flash-preview' | 'gemini-2.5-pro' | 'gemini-2.5-flash';\nexport type ZAIModel = 'glm-4.7' | 'glm-4.7-flash';\nexport type KimiModel = 'kimi-k2' | 'kimi-k2.5';\nexport type MiniMaxModel = 'minimax-m2.7' | 'minimax-m2.7-highspeed';\nexport type ModelId = AnthropicModel | OpenAIModel | GoogleModel | ZAIModel | KimiModel | MiniMaxModel;\n\n// Task complexity levels\nexport type ComplexityLevel = 'trivial' | 'simple' | 'medium' | 'complex' | 'expert';\n\n// Specialist agent types\nexport interface SpecialistModels {\n review_agent: ModelId;\n test_agent: ModelId;\n merge_agent: ModelId;\n}\n\n// Complexity-based model mapping\nexport type ComplexityModels = {\n [K in ComplexityLevel]: ModelId;\n};\n\n// All model configuration\nexport interface ModelsConfig {\n specialists: SpecialistModels;\n status_review: ModelId;\n complexity: ComplexityModels;\n}\n\n// API keys for external providers\nexport interface ApiKeysConfig {\n openai?: string;\n google?: string;\n zai?: string;\n kimi?: string;\n}\n\n// Complete settings structure\nexport interface SettingsConfig {\n models: ModelsConfig;\n api_keys: ApiKeysConfig;\n}\n\n// Default settings - match optimal defaults from settings-api.ts\nconst DEFAULT_SETTINGS: SettingsConfig = {\n models: {\n specialists: {\n review_agent: 'claude-opus-4-6',\n test_agent: 'claude-sonnet-4-6',\n merge_agent: 'claude-sonnet-4-6',\n },\n status_review: 'claude-opus-4-6',\n complexity: {\n trivial: 'claude-haiku-4-5',\n simple: 'claude-haiku-4-5',\n medium: 'kimi-k2.5',\n complex: 'kimi-k2.5',\n expert: 'claude-opus-4-6',\n },\n },\n api_keys: {},\n};\n\n/**\n * Deep merge utility that recursively merges objects.\n * - Recursively merges nested objects\n * - User values take precedence over defaults\n */\nfunction deepMerge<T extends object>(defaults: T, overrides: Partial<T>): T {\n const result = { ...defaults };\n\n for (const key of Object.keys(overrides) as (keyof T)[]) {\n const defaultVal = defaults[key];\n const overrideVal = overrides[key];\n\n // Skip undefined values in overrides\n if (overrideVal === undefined) continue;\n\n // Deep merge if both values are non-array objects\n if (\n typeof defaultVal === 'object' &&\n defaultVal !== null &&\n !Array.isArray(defaultVal) &&\n typeof overrideVal === 'object' &&\n overrideVal !== null &&\n !Array.isArray(overrideVal)\n ) {\n result[key] = deepMerge(defaultVal, overrideVal as any);\n } else {\n // For primitives or null - override wins\n result[key] = overrideVal as T[keyof T];\n }\n }\n\n return result;\n}\n\n/**\n * Load settings from ~/.panopticon/settings.json\n * Returns default settings if file doesn't exist or is invalid\n * Also loads API keys from environment variables as fallback\n */\nexport function loadSettings(): SettingsConfig {\n let settings: SettingsConfig;\n\n if (!existsSync(SETTINGS_FILE)) {\n settings = getDefaultSettings();\n } else {\n try {\n const content = readFileSync(SETTINGS_FILE, 'utf8');\n const parsed = JSON.parse(content) as Partial<SettingsConfig>;\n settings = deepMerge(DEFAULT_SETTINGS, parsed);\n } catch (error) {\n console.error('Warning: Failed to parse settings.json, using defaults');\n settings = getDefaultSettings();\n }\n }\n\n // Load API keys from environment variables as fallback\n // This allows using ~/.panopticon.env for API keys\n const envApiKeys: ApiKeysConfig = {};\n if (process.env.OPENAI_API_KEY) envApiKeys.openai = process.env.OPENAI_API_KEY;\n if (process.env.GOOGLE_API_KEY) envApiKeys.google = process.env.GOOGLE_API_KEY;\n if (process.env.ZAI_API_KEY) envApiKeys.zai = process.env.ZAI_API_KEY;\n if (process.env.KIMI_API_KEY) envApiKeys.kimi = process.env.KIMI_API_KEY;\n\n // Merge env vars as fallback (settings.json takes precedence)\n settings.api_keys = {\n ...envApiKeys,\n ...settings.api_keys,\n };\n\n return settings;\n}\n\n/**\n * Save settings to ~/.panopticon/settings.json\n * Writes with pretty formatting (2-space indent)\n */\nexport function saveSettings(settings: SettingsConfig): void {\n const content = JSON.stringify(settings, null, 2);\n writeFileSync(SETTINGS_FILE, content, 'utf8');\n}\n\n/**\n * Validate settings structure and model IDs\n * Returns error message if invalid, null if valid\n */\nexport function validateSettings(settings: SettingsConfig): string | null {\n // Validate models structure\n if (!settings.models) {\n return 'Missing models configuration';\n }\n\n // Validate specialists\n if (!settings.models.specialists) {\n return 'Missing specialists configuration';\n }\n const specialists = settings.models.specialists;\n if (!specialists.review_agent || !specialists.test_agent || !specialists.merge_agent) {\n return 'Missing specialist agent model configuration';\n }\n\n // Validate complexity levels\n if (!settings.models.complexity) {\n return 'Missing complexity configuration';\n }\n const complexity = settings.models.complexity;\n const requiredLevels: ComplexityLevel[] = ['trivial', 'simple', 'medium', 'complex', 'expert'];\n for (const level of requiredLevels) {\n if (!complexity[level]) {\n return `Missing complexity level: ${level}`;\n }\n }\n\n // Validate api_keys structure (optional keys)\n if (!settings.api_keys) {\n return 'Missing api_keys configuration';\n }\n\n return null;\n}\n\n/**\n * Get a deep copy of the default settings\n */\nexport function getDefaultSettings(): SettingsConfig {\n return JSON.parse(JSON.stringify(DEFAULT_SETTINGS));\n}\n\n/**\n * Get available models for a provider based on configured API keys\n * Returns empty array if provider API key is not configured\n */\nexport function getAvailableModels(settings: SettingsConfig): {\n anthropic: AnthropicModel[];\n openai: OpenAIModel[];\n google: GoogleModel[];\n zai: ZAIModel[];\n kimi: KimiModel[];\n} {\n const anthropicModels: AnthropicModel[] = [\n 'claude-opus-4-6',\n 'claude-sonnet-4-6',\n 'claude-haiku-4-5',\n ];\n\n const openaiModels: OpenAIModel[] = settings.api_keys.openai\n ? ['gpt-5.2-codex', 'o3-deep-research', 'gpt-4o', 'gpt-4o-mini']\n : [];\n\n const googleModels: GoogleModel[] = settings.api_keys.google\n ? ['gemini-3-pro-preview', 'gemini-3-flash-preview']\n : [];\n\n const zaiModels: ZAIModel[] = settings.api_keys.zai\n ? ['glm-4.7', 'glm-4.7-flash']\n : [];\n\n const kimiModels: KimiModel[] = settings.api_keys.kimi\n ? ['kimi-k2', 'kimi-k2.5']\n : [];\n\n return {\n anthropic: anthropicModels,\n openai: openaiModels,\n google: googleModels,\n zai: zaiModels,\n kimi: kimiModels,\n };\n}\n\n/**\n * Check if a model ID is an Anthropic model\n * Anthropic models can be run directly with `claude` CLI\n */\nexport function isAnthropicModel(modelId: ModelId | string): boolean {\n return modelId.startsWith('claude-');\n}\n\n/**\n * Get the Claude CLI model flag for an Anthropic model\n * Maps our model IDs to Claude's expected format\n */\nexport function getClaudeModelFlag(modelId: ModelId | string): string {\n const modelMap: Record<string, string> = {\n 'claude-opus-4-6': 'opus',\n 'claude-sonnet-4-6': 'sonnet',\n 'claude-sonnet-4-5': 'sonnet',\n 'claude-haiku-4-5': 'haiku',\n };\n return modelMap[modelId] || 'sonnet';\n}\n\n/**\n * Get the command to run an agent with a specific model\n * Always uses 'claude' CLI — non-Anthropic models work via ANTHROPIC_BASE_URL env var\n * pointing to their Anthropic-compatible endpoint.\n */\nexport function getAgentCommand(modelId: ModelId | string): { command: string; args: string[] } {\n if (isAnthropicModel(modelId)) {\n return {\n command: 'claude',\n args: ['--model', getClaudeModelFlag(modelId)],\n };\n }\n // Non-Anthropic direct providers: use claude CLI with the model name as-is.\n // The caller must set ANTHROPIC_BASE_URL and ANTHROPIC_AUTH_TOKEN env vars.\n return {\n command: 'claude',\n args: ['--model', modelId],\n };\n}\n","/**\n * Cross-Tracker Linking\n *\n * Manages links between issues in different trackers.\n * Links are stored in a local JSON file for persistence.\n */\n\nimport { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport type { TrackerType } from './interface.js';\n\n// Link direction types\nexport type LinkDirection = 'blocks' | 'blocked_by' | 'related' | 'duplicate_of';\n\n// A single link between two issues\nexport interface TrackerLink {\n sourceIssueRef: string; // e.g., \"MIN-630\"\n sourceTracker: TrackerType;\n targetIssueRef: string; // e.g., \"#42\"\n targetTracker: TrackerType;\n direction: LinkDirection;\n createdAt: string; // ISO timestamp\n}\n\n// Storage format\ninterface LinkStore {\n version: 1;\n links: TrackerLink[];\n}\n\n/**\n * Parse an issue reference to extract tracker and ID\n * Examples:\n * \"#42\" -> { tracker: \"github\", ref: \"#42\" }\n * \"github#42\" -> { tracker: \"github\", ref: \"#42\" }\n * \"MIN-630\" -> { tracker: \"linear\", ref: \"MIN-630\" }\n * \"gitlab#15\" -> { tracker: \"gitlab\", ref: \"#15\" }\n */\nexport function parseIssueRef(ref: string): { tracker: TrackerType; ref: string } | null {\n // Explicit tracker prefix\n if (ref.startsWith('github#')) {\n return { tracker: 'github', ref: `#${ref.slice(7)}` };\n }\n if (ref.startsWith('gitlab#')) {\n return { tracker: 'gitlab', ref: `#${ref.slice(7)}` };\n }\n if (ref.startsWith('linear:')) {\n return { tracker: 'linear', ref: ref.slice(7) };\n }\n\n // GitHub-style refs (#number)\n if (/^#\\d+$/.test(ref)) {\n return { tracker: 'github', ref };\n }\n\n // Linear-style refs (XXX-123)\n if (/^[A-Z]+-\\d+$/i.test(ref)) {\n return { tracker: 'linear', ref: ref.toUpperCase() };\n }\n\n return null;\n}\n\n/**\n * Format an issue ref with tracker prefix for display\n */\nexport function formatIssueRef(ref: string, tracker: TrackerType): string {\n if (tracker === 'github') {\n return ref.startsWith('#') ? `github${ref}` : `github#${ref}`;\n }\n if (tracker === 'gitlab') {\n return ref.startsWith('#') ? `gitlab${ref}` : `gitlab#${ref}`;\n }\n return ref; // Linear refs are already unique\n}\n\n/**\n * Link Manager for cross-tracker issue linking\n */\nexport class LinkManager {\n private storePath: string;\n private store: LinkStore;\n\n constructor(storePath?: string) {\n this.storePath = storePath ?? join(homedir(), '.panopticon', 'links.json');\n this.store = this.load();\n }\n\n private load(): LinkStore {\n if (existsSync(this.storePath)) {\n try {\n const data = JSON.parse(readFileSync(this.storePath, 'utf-8'));\n if (data.version === 1) {\n return data;\n }\n } catch {\n // Fall through to default\n }\n }\n return { version: 1, links: [] };\n }\n\n private save(): void {\n const dir = join(this.storePath, '..');\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n writeFileSync(this.storePath, JSON.stringify(this.store, null, 2));\n }\n\n /**\n * Add a link between two issues\n */\n addLink(\n source: { ref: string; tracker: TrackerType },\n target: { ref: string; tracker: TrackerType },\n direction: LinkDirection = 'related'\n ): TrackerLink {\n // Check if link already exists\n const existing = this.store.links.find(\n (l) =>\n l.sourceIssueRef === source.ref &&\n l.sourceTracker === source.tracker &&\n l.targetIssueRef === target.ref &&\n l.targetTracker === target.tracker\n );\n\n if (existing) {\n // Update direction if different\n if (existing.direction !== direction) {\n existing.direction = direction;\n this.save();\n }\n return existing;\n }\n\n const link: TrackerLink = {\n sourceIssueRef: source.ref,\n sourceTracker: source.tracker,\n targetIssueRef: target.ref,\n targetTracker: target.tracker,\n direction,\n createdAt: new Date().toISOString(),\n };\n\n this.store.links.push(link);\n this.save();\n return link;\n }\n\n /**\n * Remove a link between two issues\n */\n removeLink(\n source: { ref: string; tracker: TrackerType },\n target: { ref: string; tracker: TrackerType }\n ): boolean {\n const index = this.store.links.findIndex(\n (l) =>\n l.sourceIssueRef === source.ref &&\n l.sourceTracker === source.tracker &&\n l.targetIssueRef === target.ref &&\n l.targetTracker === target.tracker\n );\n\n if (index >= 0) {\n this.store.links.splice(index, 1);\n this.save();\n return true;\n }\n return false;\n }\n\n /**\n * Get all issues linked to a given issue\n */\n getLinkedIssues(ref: string, tracker: TrackerType): TrackerLink[] {\n return this.store.links.filter(\n (l) =>\n (l.sourceIssueRef === ref && l.sourceTracker === tracker) ||\n (l.targetIssueRef === ref && l.targetTracker === tracker)\n );\n }\n\n /**\n * Get all links (for debugging/admin)\n */\n getAllLinks(): TrackerLink[] {\n return [...this.store.links];\n }\n\n /**\n * Find linked issue in another tracker\n */\n findLinkedIssue(\n ref: string,\n sourceTracker: TrackerType,\n targetTracker: TrackerType\n ): string | null {\n // Check as source\n const asSource = this.store.links.find(\n (l) =>\n l.sourceIssueRef === ref &&\n l.sourceTracker === sourceTracker &&\n l.targetTracker === targetTracker\n );\n if (asSource) return asSource.targetIssueRef;\n\n // Check as target\n const asTarget = this.store.links.find(\n (l) =>\n l.targetIssueRef === ref &&\n l.targetTracker === sourceTracker &&\n l.sourceTracker === targetTracker\n );\n if (asTarget) return asTarget.sourceIssueRef;\n\n return null;\n }\n\n /**\n * Clear all links (for testing)\n */\n clear(): void {\n this.store.links = [];\n this.save();\n }\n}\n\n// Singleton instance\nlet _linkManager: LinkManager | null = null;\n\nexport function getLinkManager(): LinkManager {\n if (!_linkManager) {\n _linkManager = new LinkManager();\n }\n return _linkManager;\n}\n","/**\n * Issue Tracker Module\n *\n * Provides a unified interface for different issue tracking systems.\n */\n\n// Core interface and types\nexport type {\n IssueTracker,\n Issue,\n IssueFilters,\n IssueState,\n IssueUpdate,\n NewIssue,\n Comment,\n TrackerType,\n} from './interface.js';\n\nexport {\n NotImplementedError,\n IssueNotFoundError,\n TrackerAuthError,\n} from './interface.js';\n\n// Tracker implementations\nexport { LinearTracker } from './linear.js';\nexport { GitHubTracker } from './github.js';\nexport { GitLabTracker } from './gitlab.js';\n\n// Factory functions\nexport type { TrackerConfig } from './factory.js';\nexport {\n createTracker,\n createTrackerFromConfig,\n getPrimaryTracker,\n getSecondaryTracker,\n getAllTrackers,\n} from './factory.js';\n\n// Cross-tracker linking\nexport type { TrackerLink, LinkDirection } from './linking.js';\nexport {\n LinkManager,\n getLinkManager,\n parseIssueRef,\n formatIssueRef,\n} from './linking.js';\n"],"mappings":";;;;;;;;;AAMA,SAAgB,cAAqB;CACnC,MAAM,QAAQ,QAAQ,IAAI,SAAS;AAEnC,KAAI,MAAM,SAAS,MAAM,CAAE,QAAO;AAClC,KAAI,MAAM,SAAS,OAAO,CAAE,QAAO;AACnC,KAAI,MAAM,SAAS,OAAO,CAAE,QAAO;AAEnC,QAAO;;AAGT,SAAgB,eAAe,OAA6B;CAC1D,MAAM,OAAO,SAAS;AAEtB,SAAQ,OAAR;EACE,KAAK,MACH,QAAO,KAAK,MAAM,SAAS;EAC7B,KAAK;GAEH,MAAM,SAAS,KAAK,MAAM,UAAU;AACpC,OAAI,WAAW,OAAO,CAAE,QAAO;AAC/B,UAAO,KAAK,MAAM,gBAAgB;EACpC,KAAK,OACH,QAAO,KAAK,MAAM,WAAW,QAAQ,cAAc;EACrD,QACE,QAAO;;;AAIb,MAAM,aAAa;AACnB,MAAM,eAAe;AAErB,SAAgB,SAAS,QAAyB;AAChD,KAAI,CAAC,WAAW,OAAO,CAAE,QAAO;CAEhC,MAAM,UAAU,aAAa,QAAQ,OAAO;AAC5C,QAAO,QAAQ,SAAS,aAAa,IAAI,QAAQ,SAAS,WAAW;;AAGvE,SAAgB,SAAS,QAAsB;AAC7C,KAAI,SAAS,OAAO,CAAE;AAOtB,gBAAe,QALI;EACnB,aAAa;EACb,WAAW;GAGwB,OAAO;;AAG5C,SAAgB,qBAAqB,OAAsB;CACzD,MAAM,SAAS,eAAe,MAAM;AAEpC,KAAI,CAAC,OACH,QAAO,qCAAqC;AAG9C,QAAO,kBAAkB,OAAO,mBAAmB;;;;YC5DZ;AAQzC,SAAgB,wBAAgC;AAC9C,yBAAO,IAAI,MAAM,EAAC,aAAa,CAAC,QAAQ,SAAS,IAAI;;AAGvD,SAAgB,aAAa,YAAkC;CAC7D,MAAM,YAAY,uBAAuB;CACzC,MAAM,aAAa,KAAK,aAAa,UAAU;AAE/C,WAAU,YAAY,EAAE,WAAW,MAAM,CAAC;CAE1C,MAAM,UAAoB,EAAE;AAE5B,MAAK,MAAM,aAAa,YAAY;AAClC,MAAI,CAAC,WAAW,UAAU,CAAE;EAE5B,MAAM,aAAa,SAAS,UAAU;AAMtC,SAAO,WALY,KAAK,YAAY,WAAW,EAKjB;GAC5B,WAAW;GACX,SAAS,QAAQ,CAAC,UAAU,IAAI,CAAC,gBAAgB;GAClD,CAAC;AACF,UAAQ,KAAK,WAAW;;AAG1B,QAAO;EACL;EACA,MAAM;EACN;EACD;;AAGH,SAAgB,cAA4B;AAC1C,KAAI,CAAC,WAAW,YAAY,CAAE,QAAO,EAAE;AAIvC,QAFgB,YAAY,aAAa,EAAE,eAAe,MAAM,CAAC,CAG9D,QAAQ,MAAM,EAAE,aAAa,CAAC,CAC9B,KAAK,MAAM;EACV,MAAM,aAAa,KAAK,aAAa,EAAE,KAAK;EAC5C,MAAM,WAAW,YAAY,WAAW;AAExC,SAAO;GACL,WAAW,EAAE;GACb,MAAM;GACN,SAAS;GACV;GACD,CACD,MAAM,GAAG,MAAM,EAAE,UAAU,cAAc,EAAE,UAAU,CAAC;;AAG3D,SAAgB,cAAc,WAAmB,YAA0C;CACzF,MAAM,aAAa,KAAK,aAAa,UAAU;AAE/C,KAAI,CAAC,WAAW,WAAW,CACzB,OAAM,IAAI,MAAM,qBAAqB,YAAY;CAGnD,MAAM,WAAW,YAAY,YAAY,EAAE,eAAe,MAAM,CAAC;AAEjE,MAAK,MAAM,SAAS,UAAU;AAC5B,MAAI,CAAC,MAAM,aAAa,CAAE;EAE1B,MAAM,aAAa,KAAK,YAAY,MAAM,KAAK;EAC/C,MAAM,aAAa,WAAW,MAAM;AAEpC,MAAI,CAAC,WAAY;AAGjB,MAAI,WAAW,WAAW,CACxB,QAAO,YAAY,EAAE,WAAW,MAAM,CAAC;AAGzC,SAAO,YAAY,YAAY,EAAE,WAAW,MAAM,CAAC;;;AAIvD,SAAgB,gBAAgB,YAAoB,IAAY;CAC9D,MAAM,UAAU,aAAa;AAE7B,KAAI,QAAQ,UAAU,UAAW,QAAO;CAExC,MAAM,WAAW,QAAQ,MAAM,UAAU;CACzC,IAAI,UAAU;AAEd,MAAK,MAAM,UAAU,UAAU;AAC7B,SAAO,OAAO,MAAM,EAAE,WAAW,MAAM,CAAC;AACxC;;AAGF,QAAO;;;;YChGW;eAMG;aACsB;;;;AAkC7C,SAAgB,oBAAoB,YAA6B;AAC/D,KAAI,CAAC,WAAW,WAAW,CAAE,QAAO;AAEpC,KAAI;AAEF,MAAI,CADU,UAAU,WAAW,CACxB,gBAAgB,CAAE,QAAO;AAIpC,SAFmB,aAAa,WAAW,CAEzB,SAAS,cAAc;SACnC;AACN,SAAO;;;;;;;;;;;;;;;;;AAwBX,SAAgB,8BAA+C;CAC7D,MAAM,YAAY,KAAK,SAAS,EAAE,UAAU;CAC5C,MAAM,SAA0B;EAC9B,iBAAiB,EAAE;EACnB,sBAAsB,EAAE;EACxB,QAAQ,EAAE;EACX;CAID,MAAM,+BAAe,IAAI,KAAa;CACtC,MAAM,UAAU,gBAAgB;AAChC,KAAI,QACF,MAAK,MAAM,UAAU;EAAC;EAAU;EAAY;EAAS,EAAE;EACrD,MAAM,aAAa,KAAK,SAAS,WAAW,OAAO;AACnD,MAAI,WAAW,WAAW,CACxB,KAAI;AACF,QAAK,MAAM,SAAS,YAAY,WAAW,CACzC,cAAa,IAAI,GAAG,OAAO,GAAG,QAAQ;UAElC;;AAOd,MAAK,MAAM,UAAU;EAAC;EAAU;EAAY;EAAS,EAAE;EACrD,MAAM,MAAM,KAAK,WAAW,OAAO;AACnC,MAAI,CAAC,WAAW,IAAI,CAAE;AAEtB,MAAI;GACF,MAAM,UAAU,YAAY,IAAI;AAChC,QAAK,MAAM,SAAS,SAAS;IAC3B,MAAM,YAAY,KAAK,KAAK,MAAM;AAClC,QAAI;KACF,MAAM,QAAQ,UAAU,UAAU;AAClC,SAAI,MAAM,gBAAgB,EAAE;MAC1B,MAAM,aAAa,aAAa,UAAU;AAC1C,UAAI,WAAW,SAAS,cAAc,IAAI,WAAW,SAAS,iBAAiB,EAAE;AAC/E,kBAAW,UAAU;AACrB,cAAO,gBAAgB,KAAK,GAAG,OAAO,GAAG,QAAQ;YAGjD,QAAO,qBAAqB,KAAK,GAAG,OAAO,GAAG,QAAQ;gBAE/C,MAAM,aAAa,IAAI,aAAa,IAAI,GAAG,OAAO,GAAG,QAAQ,EAAE;AAIxE,aAAO,WAAW;OAAE,WAAW;OAAM,OAAO;OAAM,CAAC;AACnD,aAAO,gBAAgB,KAAK,GAAG,OAAO,GAAG,MAAM,eAAe;WAG9D,QAAO,qBAAqB,KAAK,GAAG,OAAO,GAAG,QAAQ;aAEjD,KAAU;AACjB,YAAO,OAAO,KAAK,GAAG,OAAO,GAAG,MAAM,IAAI,IAAI,UAAU;;;WAGrD,KAAU;AACjB,UAAO,OAAO,KAAK,GAAG,OAAO,IAAI,IAAI,UAAU;;;AAInD,QAAO;;;;;AAYT,SAAS,uBAAuB,QAAgB,MAAsB;AACpE,KAAI,CAAC,WAAW,OAAO,CAAE,QAAO;AAEhC,WAAU,MAAM,EAAE,WAAW,MAAM,CAAC;CACpC,IAAI,QAAQ;CAEZ,MAAM,UAAU,YAAY,QAAQ,EAAE,eAAe,MAAM,CAAC;AAC5D,MAAK,MAAM,SAAS,SAAS;EAC3B,MAAM,UAAU,KAAK,QAAQ,MAAM,KAAK;EACxC,MAAM,UAAU,KAAK,MAAM,MAAM,KAAK;AACtC,MAAI,MAAM,aAAa,CACrB,UAAS,uBAAuB,SAAS,QAAQ;WACxC,MAAM,QAAQ,EAAE;AACzB,gBAAa,SAAS,QAAQ;AAC9B;;;AAGJ,QAAO;;;;;;;;;;;AAYT,SAAgB,eAAmC;CACjD,MAAM,SAA6B;EACjC,QAAQ;GAAE,QAAQ;GAAG,OAAO;GAAG;EAC/B,QAAQ;GAAE,QAAQ;GAAG,OAAO;GAAG;EAC/B,OAAO;GAAE,QAAQ;GAAG,OAAO;GAAG;EAC/B;AAGD,KAAI,WAAW,kBAAkB,EAAE;EACjC,MAAM,YAAY,YAAY,mBAAmB,EAAE,eAAe,MAAM,CAAC,CACtE,QAAQ,MAAM,EAAE,aAAa,CAAC;AAEjC,SAAO,OAAO,QAAQ,UAAU;AAChC,OAAK,MAAM,YAAY,WAAW;AAGhC,0BAFY,KAAK,mBAAmB,SAAS,KAAK,EACtC,KAAK,YAAY,SAAS,KAAK,CACX;AAChC,UAAO,OAAO;;;AAKlB,KAAI,WAAW,IAAI,WAAW,sBAAsB,EAAE;EACpD,MAAM,eAAe,YAAY,uBAAuB,EAAE,eAAe,MAAM,CAAC,CAC7E,QAAQ,MAAM,EAAE,aAAa,CAAC;AAEjC,OAAK,MAAM,YAAY,cAAc;AAGnC,0BAFY,KAAK,uBAAuB,SAAS,KAAK,EAC1C,KAAK,YAAY,SAAS,KAAK,CACX;AAChC,UAAO,OAAO;AACd,UAAO,OAAO;;;AAKlB,KAAI,WAAW,kBAAkB,EAAE;AACjC,YAAU,kBAAkB,EAAE,WAAW,MAAM,CAAC;EAChD,MAAM,SAAS,YAAY,mBAAmB,EAAE,eAAe,MAAM,CAAC,CACnE,QAAQ,UAAU,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CAAC;AAElE,SAAO,OAAO,QAAQ,OAAO;AAC7B,OAAK,MAAM,SAAS,QAAQ;AAC1B,gBAAa,KAAK,mBAAmB,MAAM,KAAK,EAAE,KAAK,kBAAkB,MAAM,KAAK,CAAC;AACrF,UAAO,OAAO;;;AAKlB,KAAI,WAAW,iBAAiB,EAAE;EAChC,MAAM,YAAY,YAAY,kBAAkB,EAAE,eAAe,MAAM,CAAC,CACrE,QAAQ,UAAU,MAAM,QAAQ,CAAC;AAEpC,SAAO,MAAM,QAAQ,UAAU;AAC/B,OAAK,MAAM,QAAQ,WAAW;AAC5B,aAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;AAC/C,gBAAa,KAAK,kBAAkB,KAAK,KAAK,EAAE,KAAK,iBAAiB,KAAK,KAAK,CAAC;AACjF,UAAO,MAAM;;;AAUjB,eAAc,gBALG,2BACf,KAAK,YAAY,KAAK,EACtB;EAAC;EAAU;EAAqB;EAAQ,EACxC,aACD,CACsC;AAEvC,QAAO;;;;;;AAqBT,SAAgB,WAAqB;CACnC,MAAM,OAAiB;EACrB,QAAQ,EAAE;EACV,UAAU,EAAE;EACZ,QAAQ,EAAE;EACV,OAAO,EAAE;EACT,WAAW,EAAE;EACd;CAED,MAAM,cAAc,gBAAgB;AACpC,KAAI,CAAC,YAAa,QAAO;CAEzB,MAAM,aAAa,KAAK,aAAa,UAAU;CAE/C,MAAM,WAAW,aADI,KAAK,YAAY,4BAA4B,CACvB;CAG3C,MAAM,aAAa,mBAAmB,YAAY,UAAU;AAC5D,MAAK,MAAM,QAAQ,YAAY;EAC7B,MAAM,aAAa,KAAK,YAAY,KAAK,aAAa;EACtD,MAAM,SAAS,sBAAsB,YAAY,KAAK,cAAc,SAAS;AAC3D,OAAK,aAAa,MAAM,IAAI,CAAC,MAAM,KAAK;EAE1D,IAAI,aAAiC;AACrC,MAAI,OAAO,WAAW,SAAU,cAAa;WACpC,OAAO,WAAW,WAAY,cAAa;WAC3C,OAAO,WAAW,aAAc,cAAa;AAEtD,OAAK,OAAO,KAAK;GACf,MAAM,KAAK;GACX,YAAY,KAAK;GACjB,YAAY;GACZ,QAAQ;GACT,CAAC;;CAIJ,MAAM,aAAa,mBAAmB,kBAAkB,UAAU;AAClE,MAAK,MAAM,QAAQ,YAAY;EAC7B,MAAM,aAAa,KAAK,YAAY,KAAK,aAAa;EACtD,MAAM,SAAS,sBAAsB,YAAY,KAAK,cAAc,SAAS;EAE7E,IAAI,aAAiC;AACrC,MAAI,OAAO,WAAW,SAAU,cAAa;WACpC,OAAO,WAAW,WAAY,cAAa;WAC3C,OAAO,WAAW,aAAc,cAAa;AAEtD,OAAK,OAAO,KAAK;GACf,MAAM,KAAK;GACX,YAAY,KAAK;GACjB,YAAY;GACZ,QAAQ;GACT,CAAC;;CAIJ,MAAM,YAAY,mBAAmB,iBAAiB,SAAS;AAC/D,MAAK,MAAM,QAAQ,WAAW;EAC5B,MAAM,aAAa,KAAK,YAAY,KAAK,aAAa;EACtD,MAAM,SAAS,sBAAsB,YAAY,KAAK,cAAc,SAAS;EAE7E,IAAI,aAAiC;AACrC,MAAI,OAAO,WAAW,SAAU,cAAa;WACpC,OAAO,WAAW,WAAY,cAAa;WAC3C,OAAO,WAAW,aAAc,cAAa;AAEtD,OAAK,MAAM,KAAK;GACd,MAAM,KAAK;GACX,YAAY,KAAK;GACjB,YAAY;GACZ,QAAQ;GACT,CAAC;;AAGJ,QAAO;;;;;;AAqBT,SAAgB,YAAY,UAAuB,EAAE,EAAc;CACjE,MAAM,SAAqB;EACzB,SAAS,EAAE;EACX,SAAS,EAAE;EACX,SAAS,EAAE;EACX,WAAW,EAAE;EACb,OAAO,EAAE;EACV;CAED,MAAM,cAAc,gBAAgB;AACpC,KAAI,CAAC,YACH,QAAO;CAGT,MAAM,aAAa,KAAK,aAAa,UAAU;CAC/C,MAAM,eAAe,KAAK,YAAY,4BAA4B;CAClE,MAAM,WAAW,aAAa,aAAa;CAG3C,MAAM,WAAW;EACf,GAAG,mBAAmB,YAAY,UAAU;EAC5C,GAAG,mBAAmB,kBAAkB,UAAU;EAClD,GAAG,mBAAmB,iBAAiB,SAAS;EACjD;AAED,MAAK,MAAM,QAAQ,UAAU;EAC3B,MAAM,aAAa,KAAK,YAAY,KAAK,aAAa;AAGtD,UAFe,sBAAsB,YAAY,KAAK,cAAc,SAAS,CAE9D,QAAf;GACE,KAAK,OAAO;AAEV,cAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,iBAAa,KAAK,cAAc,WAAW;IAC3C,MAAM,OAAO,SAAS,WAAW;AACjC,qBAAiB,UAAU,KAAK,cAAc,MAAM,aAAa;AACjE,WAAO,QAAQ,KAAK,KAAK,aAAa;AACtC;;GAGF,KAAK,UAAU;AAEb,cAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,iBAAa,KAAK,cAAc,WAAW;IAC3C,MAAM,OAAO,SAAS,WAAW;AACjC,qBAAiB,UAAU,KAAK,cAAc,MAAM,aAAa;AACjE,WAAO,QAAQ,KAAK,KAAK,aAAa;AACtC;;GAGF,KAAK;AAEH,QAAI,QAAQ,KACV,QAAO,MAAM,KAAK;KAChB,MAAM,KAAK;KACX,eAAe,aAAa,KAAK,cAAc,QAAQ;KACvD,eAAe,aAAa,YAAY,QAAQ;KACjD,CAAC;AAGJ,QAAI,QAAQ,OAAO;AACjB,eAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,kBAAa,KAAK,cAAc,WAAW;KAC3C,MAAM,OAAO,SAAS,WAAW;AACjC,sBAAiB,UAAU,KAAK,cAAc,MAAM,aAAa;AACjE,YAAO,QAAQ,KAAK,KAAK,aAAa;UAEtC,QAAO,UAAU,KAAK,KAAK,aAAa;AAE1C;GAGF,KAAK;AAEH,WAAO,QAAQ,KAAK,KAAK,aAAa;AACtC;;;AAMN,eAAc,cAAc,SAAS;AAErC,QAAO;;;;;AAgBT,SAAgB,gBAA4B;CAC1C,MAAM,QAAoB,EAAE;AAE5B,KAAI,CAAC,WAAW,mBAAmB,CACjC,QAAO;CAKT,MAAM,UAAU,YAAY,oBAAoB,EAAE,eAAe,MAAM,CAAC,CACrE,QAAQ,UAAU,MAAM,QAAQ,IAAI,CAAC,MAAM,KAAK,WAAW,IAAI,KAC1D,CAAC,MAAM,KAAK,SAAS,IAAI,IAAI,MAAM,KAAK,SAAS,MAAM,EAAE;AAEjE,MAAK,MAAM,UAAU,SAAS;EAC5B,MAAM,aAAa,KAAK,oBAAoB,OAAO,KAAK;EACxD,MAAM,aAAa,KAAK,SAAS,OAAO,KAAK;EAE7C,IAAI,SAA6B;AAEjC,MAAI,WAAW,WAAW,CAGxB,UAAS;AAGX,QAAM,KAAK;GAAE,MAAM,OAAO;GAAM;GAAY;GAAY;GAAQ,CAAC;;AAGnE,QAAO;;;;;AAMT,SAAgB,YAAoD;CAClE,MAAM,SAAS;EAAE,QAAQ,EAAE;EAAc,QAAQ,EAAE;EAAc;AAGjE,WAAU,SAAS,EAAE,WAAW,MAAM,CAAC;CAEvC,MAAM,QAAQ,eAAe;AAE7B,MAAK,MAAM,QAAQ,MACjB,KAAI;AACF,eAAa,KAAK,YAAY,KAAK,WAAW;AAC9C,YAAU,KAAK,YAAY,IAAM;AACjC,SAAO,OAAO,KAAK,KAAK,KAAK;UACtB,OAAO;AACd,SAAO,OAAO,KAAK,GAAG,KAAK,KAAK,IAAI,QAAQ;;AAIhD,QAAO;;;;;;AAOT,MAAM,qBAAsG,EAC1G,QAAQ;CACN,WAAW,KAAK,SAAS,EAAE,UAAU;CACrC,YAAY;CACZ,cAAc,KAAK,SAAS,EAAE,WAAW,gBAAgB;CAC1D,EAEF;;;;;;AAOD,SAAgB,iBAAyD;CACvE,MAAM,SAAS;EAAE,QAAQ,EAAE;EAAc,QAAQ,EAAE;EAAc;CAEjE,MAAM,eAAe,KAAK,oBAAoB,gBAAgB;AAC9D,KAAI,CAAC,WAAW,aAAa,CAC3B,QAAO;AAGT,MAAK,MAAM,CAAC,SAAS,WAAW,OAAO,QAAQ,mBAAmB,CAChE,KAAI;AAEF,YAAU,OAAO,WAAW,EAAE,WAAW,MAAM,CAAC;EAGhD,MAAM,eAAe,KAAK,OAAO,WAAW,OAAO,WAAW;AAC9D,eAAa,cAAc,aAAa;AACxC,YAAU,cAAc,IAAM;AAG9B,2BAAyB,OAAO,cAAc,aAAa;AAE3D,SAAO,OAAO,KAAK,QAAQ;UACpB,OAAO;AACd,SAAO,OAAO,KAAK,GAAG,QAAQ,IAAI,QAAQ;;AAI9C,QAAO;;;;;;AAOT,SAAS,yBAAyB,cAAsB,YAA0B;CAChF,IAAI,WAAgC,EAAE;AAEtC,KAAI,WAAW,aAAa,CAC1B,KAAI;AACF,aAAW,KAAK,MAAM,aAAa,cAAc,QAAQ,CAAC;SACpD;AAEN,aAAW,EAAE;;AAMjB,KADuB,SAAS,YAAY,YACrB,cAAc,SAAS,YAAY,SAAS,UACjE;AAGF,UAAS,aAAa;EACpB,MAAM;EACN,SAAS;EACT,SAAS;EACV;AAED,WAAU,QAAQ,aAAa,EAAE,EAAE,WAAW,MAAM,CAAC;AACrD,eAAc,cAAc,KAAK,UAAU,UAAU,MAAM,EAAE,GAAG,MAAM,QAAQ;;;;YC7lBrC;AAgD3C,MAAM,mBAAmC;CACvC,QAAQ;EACN,aAAa;GACX,cAAc;GACd,YAAY;GACZ,aAAa;GACd;EACD,eAAe;EACf,YAAY;GACV,SAAS;GACT,QAAQ;GACR,QAAQ;GACR,SAAS;GACT,QAAQ;GACT;EACF;CACD,UAAU,EAAE;CACb;;;;;;AAOD,SAAS,UAA4B,UAAa,WAA0B;CAC1E,MAAM,SAAS,EAAE,GAAG,UAAU;AAE9B,MAAK,MAAM,OAAO,OAAO,KAAK,UAAU,EAAiB;EACvD,MAAM,aAAa,SAAS;EAC5B,MAAM,cAAc,UAAU;AAG9B,MAAI,gBAAgB,KAAA,EAAW;AAG/B,MACE,OAAO,eAAe,YACtB,eAAe,QACf,CAAC,MAAM,QAAQ,WAAW,IAC1B,OAAO,gBAAgB,YACvB,gBAAgB,QAChB,CAAC,MAAM,QAAQ,YAAY,CAE3B,QAAO,OAAO,UAAU,YAAY,YAAmB;MAGvD,QAAO,OAAO;;AAIlB,QAAO;;;;;;;AAQT,SAAgB,eAA+B;CAC7C,IAAI;AAEJ,KAAI,CAAC,WAAW,cAAc,CAC5B,YAAW,oBAAoB;KAE/B,KAAI;EACF,MAAM,UAAU,aAAa,eAAe,OAAO;AAEnD,aAAW,UAAU,kBADN,KAAK,MAAM,QAAQ,CACY;UACvC,OAAO;AACd,UAAQ,MAAM,yDAAyD;AACvE,aAAW,oBAAoB;;CAMnC,MAAM,aAA4B,EAAE;AACpC,KAAI,QAAQ,IAAI,eAAgB,YAAW,SAAS,QAAQ,IAAI;AAChE,KAAI,QAAQ,IAAI,eAAgB,YAAW,SAAS,QAAQ,IAAI;AAChE,KAAI,QAAQ,IAAI,YAAa,YAAW,MAAM,QAAQ,IAAI;AAC1D,KAAI,QAAQ,IAAI,aAAc,YAAW,OAAO,QAAQ,IAAI;AAG5D,UAAS,WAAW;EAClB,GAAG;EACH,GAAG,SAAS;EACb;AAED,QAAO;;;;;;AAOT,SAAgB,aAAa,UAAgC;AAE3D,eAAc,eADE,KAAK,UAAU,UAAU,MAAM,EAAE,EACX,OAAO;;;;;;AAO/C,SAAgB,iBAAiB,UAAyC;AAExE,KAAI,CAAC,SAAS,OACZ,QAAO;AAIT,KAAI,CAAC,SAAS,OAAO,YACnB,QAAO;CAET,MAAM,cAAc,SAAS,OAAO;AACpC,KAAI,CAAC,YAAY,gBAAgB,CAAC,YAAY,cAAc,CAAC,YAAY,YACvE,QAAO;AAIT,KAAI,CAAC,SAAS,OAAO,WACnB,QAAO;CAET,MAAM,aAAa,SAAS,OAAO;AAEnC,MAAK,MAAM,SAD+B;EAAC;EAAW;EAAU;EAAU;EAAW;EAAS,CAE5F,KAAI,CAAC,WAAW,OACd,QAAO,6BAA6B;AAKxC,KAAI,CAAC,SAAS,SACZ,QAAO;AAGT,QAAO;;;;;AAMT,SAAgB,qBAAqC;AACnD,QAAO,KAAK,MAAM,KAAK,UAAU,iBAAiB,CAAC;;;;;;AAOrD,SAAgB,mBAAmB,UAMjC;AAuBA,QAAO;EACL,WAvBwC;GACxC;GACA;GACA;GACD;EAoBC,QAlBkC,SAAS,SAAS,SAClD;GAAC;GAAiB;GAAoB;GAAU;GAAc,GAC9D,EAAE;EAiBJ,QAfkC,SAAS,SAAS,SAClD,CAAC,wBAAwB,yBAAyB,GAClD,EAAE;EAcJ,KAZ4B,SAAS,SAAS,MAC5C,CAAC,WAAW,gBAAgB,GAC5B,EAAE;EAWJ,MAT8B,SAAS,SAAS,OAC9C,CAAC,WAAW,YAAY,GACxB,EAAE;EAQL;;;;;;AAOH,SAAgB,iBAAiB,SAAoC;AACnE,QAAO,QAAQ,WAAW,UAAU;;;;;;AAOtC,SAAgB,mBAAmB,SAAmC;AAOpE,QANyC;EACvC,mBAAmB;EACnB,qBAAqB;EACrB,qBAAqB;EACrB,oBAAoB;EACrB,CACe,YAAY;;;;;;;AAQ9B,SAAgB,gBAAgB,SAAgE;AAC9F,KAAI,iBAAiB,QAAQ,CAC3B,QAAO;EACL,SAAS;EACT,MAAM,CAAC,WAAW,mBAAmB,QAAQ,CAAC;EAC/C;AAIH,QAAO;EACL,SAAS;EACT,MAAM,CAAC,WAAW,QAAQ;EAC3B;;;;;;;;;;;;;;;;;;AC7OH,SAAgB,cAAc,KAA2D;AAEvF,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,IAAI,MAAM,EAAE;EAAI;AAEvD,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,IAAI,MAAM,EAAE;EAAI;AAEvD,KAAI,IAAI,WAAW,UAAU,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,MAAM,EAAE;EAAE;AAIjD,KAAI,SAAS,KAAK,IAAI,CACpB,QAAO;EAAE,SAAS;EAAU;EAAK;AAInC,KAAI,gBAAgB,KAAK,IAAI,CAC3B,QAAO;EAAE,SAAS;EAAU,KAAK,IAAI,aAAa;EAAE;AAGtD,QAAO;;;;;AAMT,SAAgB,eAAe,KAAa,SAA8B;AACxE,KAAI,YAAY,SACd,QAAO,IAAI,WAAW,IAAI,GAAG,SAAS,QAAQ,UAAU;AAE1D,KAAI,YAAY,SACd,QAAO,IAAI,WAAW,IAAI,GAAG,SAAS,QAAQ,UAAU;AAE1D,QAAO;;;;;AAMT,IAAa,cAAb,MAAyB;CACvB;CACA;CAEA,YAAY,WAAoB;AAC9B,OAAK,YAAY,aAAa,KAAK,SAAS,EAAE,eAAe,aAAa;AAC1E,OAAK,QAAQ,KAAK,MAAM;;CAG1B,OAA0B;AACxB,MAAI,WAAW,KAAK,UAAU,CAC5B,KAAI;GACF,MAAM,OAAO,KAAK,MAAM,aAAa,KAAK,WAAW,QAAQ,CAAC;AAC9D,OAAI,KAAK,YAAY,EACnB,QAAO;UAEH;AAIV,SAAO;GAAE,SAAS;GAAG,OAAO,EAAE;GAAE;;CAGlC,OAAqB;EACnB,MAAM,MAAM,KAAK,KAAK,WAAW,KAAK;AACtC,MAAI,CAAC,WAAW,IAAI,CAClB,WAAU,KAAK,EAAE,WAAW,MAAM,CAAC;AAErC,gBAAc,KAAK,WAAW,KAAK,UAAU,KAAK,OAAO,MAAM,EAAE,CAAC;;;;;CAMpE,QACE,QACA,QACA,YAA2B,WACd;EAEb,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,WAC3B,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,QAC9B;AAED,MAAI,UAAU;AAEZ,OAAI,SAAS,cAAc,WAAW;AACpC,aAAS,YAAY;AACrB,SAAK,MAAM;;AAEb,UAAO;;EAGT,MAAM,OAAoB;GACxB,gBAAgB,OAAO;GACvB,eAAe,OAAO;GACtB,gBAAgB,OAAO;GACvB,eAAe,OAAO;GACtB;GACA,4BAAW,IAAI,MAAM,EAAC,aAAa;GACpC;AAED,OAAK,MAAM,MAAM,KAAK,KAAK;AAC3B,OAAK,MAAM;AACX,SAAO;;;;;CAMT,WACE,QACA,QACS;EACT,MAAM,QAAQ,KAAK,MAAM,MAAM,WAC5B,MACC,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,WAC3B,EAAE,mBAAmB,OAAO,OAC5B,EAAE,kBAAkB,OAAO,QAC9B;AAED,MAAI,SAAS,GAAG;AACd,QAAK,MAAM,MAAM,OAAO,OAAO,EAAE;AACjC,QAAK,MAAM;AACX,UAAO;;AAET,SAAO;;;;;CAMT,gBAAgB,KAAa,SAAqC;AAChE,SAAO,KAAK,MAAM,MAAM,QACrB,MACE,EAAE,mBAAmB,OAAO,EAAE,kBAAkB,WAChD,EAAE,mBAAmB,OAAO,EAAE,kBAAkB,QACpD;;;;;CAMH,cAA6B;AAC3B,SAAO,CAAC,GAAG,KAAK,MAAM,MAAM;;;;;CAM9B,gBACE,KACA,eACA,eACe;EAEf,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OACrB,EAAE,kBAAkB,iBACpB,EAAE,kBAAkB,cACvB;AACD,MAAI,SAAU,QAAO,SAAS;EAG9B,MAAM,WAAW,KAAK,MAAM,MAAM,MAC/B,MACC,EAAE,mBAAmB,OACrB,EAAE,kBAAkB,iBACpB,EAAE,kBAAkB,cACvB;AACD,MAAI,SAAU,QAAO,SAAS;AAE9B,SAAO;;;;;CAMT,QAAc;AACZ,OAAK,MAAM,QAAQ,EAAE;AACrB,OAAK,MAAM;;;AAKf,IAAI,eAAmC;AAEvC,SAAgB,iBAA8B;AAC5C,KAAI,CAAC,aACH,gBAAe,IAAI,aAAa;AAElC,QAAO;;;;gBCvNe;aAGoB;aACA;aACA;cAUtB"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"tracker-utils-CVU2W1sX.js","names":[],"sources":["../src/lib/tracker-utils.ts"],"sourcesContent":["/**\n * Shared tracker utilities for resolving issue IDs to their tracker type\n * (GitHub or Linear) based on GITHUB_REPOS configuration.\n *\n * Eliminates hardcoded prefix checks like `issueId.startsWith('PAN-')`.\n */\n\nimport { readFileSync, existsSync } from 'fs';\nimport { join } from 'path';\nimport { homedir } from 'os';\nimport { loadProjectsConfig, getIssuePrefix } from './projects.js';\n\nexport interface GitHubRepoConfig {\n owner: string;\n repo: string;\n prefix: string;\n}\n\nexport interface GitHubIssueResolution {\n isGitHub: true;\n owner: string;\n repo: string;\n prefix: string;\n number: number;\n}\n\nexport interface NonGitHubResolution {\n isGitHub: false;\n}\n\nexport type IssueResolution = GitHubIssueResolution | NonGitHubResolution;\n\n/**\n * Parse GitHub repos from GITHUB_REPOS env var and projects.yaml.\n * Priority: GITHUB_REPOS env var first, then auto-derive from projects.yaml.\n * Format for env var: \"owner/repo:PREFIX,owner2/repo2:PREFIX2\"\n */\nexport function parseGitHubRepos(): GitHubRepoConfig[] {\n const repos: GitHubRepoConfig[] = [];\n\n // 1. Check GITHUB_REPOS env var\n const envFile = join(homedir(), '.panopticon.env');\n if (existsSync(envFile)) {\n const content = readFileSync(envFile, 'utf-8');\n const reposMatch = content.match(/GITHUB_REPOS=(.+)/);\n if (reposMatch) {\n repos.push(...reposMatch[1].trim().split(',').map(r => {\n const [repoPath, prefix] = r.trim().split(':');\n const [owner, repo] = (repoPath || '').split('/');\n return { owner: owner || '', repo: repo || '', prefix: (prefix || '').toUpperCase() };\n }).filter(r => r.owner && r.repo && r.prefix));\n }\n }\n\n // 2. Auto-derive from projects.yaml (if no explicit GITHUB_REPOS)\n if (repos.length === 0) {\n try {\n const { projects } = loadProjectsConfig();\n for (const [key, project] of Object.entries(projects)) {\n if (project.github_repo) {\n const [owner, repo] = project.github_repo.split('/');\n // Derive prefix: linear_team if set, otherwise uppercase project key\n const prefix = getIssuePrefix(project) || key.toUpperCase().replace(/-/g, '');\n if (owner && repo && prefix) {\n repos.push({ owner, repo, prefix: prefix.toUpperCase() });\n }\n }\n }\n } catch { /* ignore */ }\n }\n\n return repos;\n}\n\n/**\n * Extract the prefix from an issue ID (e.g., \"CLI\" from \"CLI-1\", \"PAN\" from \"PAN-42\").\n */\nexport function extractIssuePrefix(issueId: string): string {\n return issueId.split('-')[0].toUpperCase();\n}\n\n/**\n * Resolve an issue ID to its GitHub repo config, or determine it's not a GitHub issue.\n *\n * Checks the issue prefix against all prefixes configured in GITHUB_REPOS.\n * Returns the matching repo config with parsed issue number, or { isGitHub: false }.\n */\nexport function resolveGitHubIssue(issueId: string): IssueResolution {\n const prefix = extractIssuePrefix(issueId);\n const repos = parseGitHubRepos();\n\n for (const repoConfig of repos) {\n if (repoConfig.prefix === prefix) {\n const number = parseInt(issueId.split('-')[1], 10);\n if (!isNaN(number)) {\n return { isGitHub: true, ...repoConfig, number };\n }\n }\n }\n\n return { isGitHub: false };\n}\n\n/**\n * Check if an issue ID belongs to a GitHub-tracked project.\n */\nexport function isGitHubIssue(issueId: string): boolean {\n return resolveGitHubIssue(issueId).isGitHub;\n}\n\nexport type TrackerTypeResolution = 'github' | 'rally' | 'linear';\n\n/**\n * Resolve the tracker type for an issue ID by checking projects.yaml configuration.\n *\n * Resolution order:\n * 1. GitHub — prefix matches a configured github_repo project\n * 2. Rally — prefix matches a project with rally_project but no linear_team / github_repo\n * 3. Linear — fallback (matches linear_team or unknown prefix)\n */\nexport function resolveTrackerType(issueId: string): TrackerTypeResolution {\n // Check GitHub first (existing logic)\n if (resolveGitHubIssue(issueId).isGitHub) {\n return 'github';\n }\n\n // Check if the issue prefix matches a Rally-only project\n const prefix = extractIssuePrefix(issueId);\n try {\n const { projects } = loadProjectsConfig();\n for (const [key, project] of Object.entries(projects)) {\n const projectPrefix = getIssuePrefix(project) || key.toUpperCase().replace(/-/g, '');\n if (projectPrefix?.toUpperCase() === prefix) {\n // Prefix matches — determine tracker by what's configured\n if (project.github_repo) return 'github';\n if (project.rally_project) return 'rally';\n return 'linear';\n }\n }\n } catch { /* ignore config errors */ }\n\n // Default to Linear for unknown prefixes\n return 'linear';\n}\n"],"mappings":";;;;;;;;;;;eAUmE;;;;;;AA2BnE,SAAgB,mBAAuC;CACrD,MAAM,QAA4B,EAAE;CAGpC,MAAM,UAAU,KAAK,SAAS,EAAE,kBAAkB;AAClD,KAAI,WAAW,QAAQ,EAAE;EAEvB,MAAM,aADU,aAAa,SAAS,QAAQ,CACnB,MAAM,oBAAoB;AACrD,MAAI,WACF,OAAM,KAAK,GAAG,WAAW,GAAG,MAAM,CAAC,MAAM,IAAI,CAAC,KAAI,MAAK;GACrD,MAAM,CAAC,UAAU,UAAU,EAAE,MAAM,CAAC,MAAM,IAAI;GAC9C,MAAM,CAAC,OAAO,SAAS,YAAY,IAAI,MAAM,IAAI;AACjD,UAAO;IAAE,OAAO,SAAS;IAAI,MAAM,QAAQ;IAAI,SAAS,UAAU,IAAI,aAAa;IAAE;IACrF,CAAC,QAAO,MAAK,EAAE,SAAS,EAAE,QAAQ,EAAE,OAAO,CAAC;;AAKlD,KAAI,MAAM,WAAW,EACnB,KAAI;EACF,MAAM,EAAE,aAAa,oBAAoB;AACzC,OAAK,MAAM,CAAC,KAAK,YAAY,OAAO,QAAQ,SAAS,CACnD,KAAI,QAAQ,aAAa;GACvB,MAAM,CAAC,OAAO,QAAQ,QAAQ,YAAY,MAAM,IAAI;GAEpD,MAAM,SAAS,eAAe,QAAQ,IAAI,IAAI,aAAa,CAAC,QAAQ,MAAM,GAAG;AAC7E,OAAI,SAAS,QAAQ,OACnB,OAAM,KAAK;IAAE;IAAO;IAAM,QAAQ,OAAO,aAAa;IAAE,CAAC;;SAIzD;AAGV,QAAO;;;;;AAMT,SAAgB,mBAAmB,SAAyB;AAC1D,QAAO,QAAQ,MAAM,IAAI,CAAC,GAAG,aAAa;;;;;;;;AAS5C,SAAgB,mBAAmB,SAAkC;CACnE,MAAM,SAAS,mBAAmB,QAAQ;CAC1C,MAAM,QAAQ,kBAAkB;AAEhC,MAAK,MAAM,cAAc,MACvB,KAAI,WAAW,WAAW,QAAQ;EAChC,MAAM,SAAS,SAAS,QAAQ,MAAM,IAAI,CAAC,IAAI,GAAG;AAClD,MAAI,CAAC,MAAM,OAAO,CAChB,QAAO;GAAE,UAAU;GAAM,GAAG;GAAY;GAAQ;;AAKtD,QAAO,EAAE,UAAU,OAAO;;;;;AAM5B,SAAgB,cAAc,SAA0B;AACtD,QAAO,mBAAmB,QAAQ,CAAC;;;;;;;;;;AAarC,SAAgB,mBAAmB,SAAwC;AAEzE,KAAI,mBAAmB,QAAQ,CAAC,SAC9B,QAAO;CAIT,MAAM,SAAS,mBAAmB,QAAQ;AAC1C,KAAI;EACF,MAAM,EAAE,aAAa,oBAAoB;AACzC,OAAK,MAAM,CAAC,KAAK,YAAY,OAAO,QAAQ,SAAS,CAEnD,MADsB,eAAe,QAAQ,IAAI,IAAI,aAAa,CAAC,QAAQ,MAAM,GAAG,GACjE,aAAa,KAAK,QAAQ;AAE3C,OAAI,QAAQ,YAAa,QAAO;AAChC,OAAI,QAAQ,cAAe,QAAO;AAClC,UAAO;;SAGL;AAGR,QAAO"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"workspace-config-CNXOpKuj.js","names":[],"sources":["../src/lib/workspace-config.ts"],"sourcesContent":["/**\n * Workspace Configuration Types\n *\n * Defines the schema for project workspace configuration in projects.yaml\n */\n\nexport interface RepoConfig {\n /** Name of the repo in the workspace (e.g., 'fe', 'api') */\n name: string;\n /** Path to source repo relative to project root */\n path: string;\n /** Branch prefix for feature branches (default: 'feature/') */\n branch_prefix?: string;\n /** Default branch to create feature branches from (default: 'main') */\n default_branch?: string;\n}\n\nexport interface DnsConfig {\n /** Base domain (e.g., 'myn.test') */\n domain: string;\n /**\n * DNS entry patterns. Supports placeholders:\n * - {{FEATURE_FOLDER}}: e.g., 'feature-min-123'\n * - {{FEATURE_NAME}}: e.g., 'min-123'\n * - {{DOMAIN}}: the domain value\n */\n entries: string[];\n /** How to sync DNS: 'wsl2hosts' | 'hosts_file' | 'dnsmasq' */\n sync_method?: 'wsl2hosts' | 'hosts_file' | 'dnsmasq';\n}\n\nexport interface PortConfig {\n /** Port range [start, end] */\n range: [number, number];\n}\n\nexport interface DockerConfig {\n /** Path to Traefik compose file (relative to project root) */\n traefik?: string;\n /** Path to devcontainer template directory */\n compose_template?: string;\n}\n\nexport interface AgentTemplateConfig {\n /** Path to agent template directory */\n template_dir: string;\n /** Files to process with placeholder replacement */\n templates?: Array<{\n source: string;\n target: string;\n }>;\n /** Directories to copy from project template into workspace */\n copy_dirs?: string[];\n /** @deprecated Use copy_dirs instead */\n symlinks?: string[];\n}\n\nexport interface EnvConfig {\n /** Environment variable template with placeholders */\n template?: string;\n /** Additional env vars from secrets */\n secrets_file?: string;\n}\n\nexport interface ServiceConfig {\n /** Service name (e.g., 'api', 'frontend') */\n name: string;\n /** Path relative to workspace (e.g., 'api', 'fe') */\n path: string;\n /** Command to start the service natively (e.g., './run-dev.sh', 'pnpm start') */\n start_command: string;\n /** Command to start inside Docker container (if different) */\n docker_command?: string;\n /** Health check URL pattern (supports placeholders) */\n health_url?: string;\n /** Port the service runs on */\n port?: number;\n}\n\nexport interface TestConfig {\n /** Test type: 'maven' | 'vitest' | 'playwright' | 'jest' | 'pytest' | 'cargo' */\n type: string;\n /** Path to test directory (relative to workspace) */\n path: string;\n /** Command to run tests */\n command: string;\n /** Run inside container for feature workspaces */\n container?: boolean;\n /** Container name pattern (uses {{FEATURE_FOLDER}}) */\n container_name?: string;\n /** Additional environment variables */\n env?: Record<string, string>;\n}\n\nexport interface QualityGateConfig {\n /** Command to run (e.g., 'pnpm lint', 'pnpm typecheck') */\n command: string;\n /** Path relative to workspace (e.g., 'frontend' for polyrepo) */\n path?: string;\n /** If true, merge is blocked on failure (default: true) */\n required?: boolean;\n /** Additional environment variables */\n env?: Record<string, string>;\n /** When to run: before push (default) or after push */\n phase?: 'pre_push' | 'post_push';\n /** Gate type: shell command (default) or HTTP health check */\n type?: 'command' | 'http_health';\n /** URL for http_health type */\n url?: string;\n /** Seconds to wait for deployment before checking (http_health only) */\n wait?: number;\n /** Expected HTTP status code (http_health only, default: 200) */\n expect_status?: number;\n /** Run command inside a Docker container (uses docker exec) */\n container?: boolean;\n /** Container name pattern (supports {{FEATURE_FOLDER}} etc.) */\n container_name?: string;\n}\n\nexport interface DatabaseConfig {\n /** Path to seed file for database initialization */\n seed_file?: string;\n /** Command to run after loading seed (e.g., sanitization script) */\n seed_command?: string;\n /** Command to create snapshots from external source (e.g., kubectl exec pg_dump) */\n snapshot_command?: string;\n /** External database connection for direct access */\n external_db?: {\n host: string;\n port?: number;\n database: string;\n user?: string;\n /** Environment variable name containing password */\n password_env?: string;\n };\n /** Container name pattern (supports {{PROJECT}} placeholder) */\n container_name?: string;\n /** Migration tool configuration */\n migrations?: {\n type: 'flyway' | 'liquibase' | 'prisma' | 'typeorm' | 'custom';\n path?: string;\n command?: string;\n };\n}\n\nexport interface TunnelHostname {\n /** Hostname pattern (supports {{FEATURE_FOLDER}} etc.) e.g., \"api-{{FEATURE_FOLDER}}.mindyournow.com\" */\n pattern: string;\n /** HTTP Host header for Traefik routing e.g., \"api-{{FEATURE_FOLDER}}.myn.localhost\" */\n http_host_header?: string;\n /** Skip TLS verification for local dev (default: true) */\n no_tls_verify?: boolean;\n}\n\nexport interface TunnelConfig {\n /** Tunnel provider (currently only Cloudflare) */\n provider: 'cloudflare';\n /** Cloudflare tunnel ID */\n tunnel_id: string;\n /** Cloudflare account ID */\n account_id: string;\n /** Cloudflare zone ID */\n zone_id: string;\n /** Path to credentials file (cert.pem) containing API token */\n credentials_file: string;\n /** Service target for ingress rules (e.g., \"https://localhost\") */\n service_target: string;\n /** Hostnames to create ingress rules + DNS records for */\n hostnames: TunnelHostname[];\n}\n\nexport interface HumeConfig {\n /** Env var name containing the Hume API key (default: HUME_API_KEY) */\n api_key_env?: string;\n /** Config ID of the production/template config to clone from */\n template_config_id: string;\n /** Config name pattern for workspaces (supports placeholders) */\n name_pattern: string;\n /** BYOLLM callback URL pattern (supports placeholders) */\n byollm_url_pattern: string;\n}\n\nexport interface WorkspaceConfig {\n /** Workspace type: 'polyrepo' (multiple git repos) or 'monorepo' (single repo, default) */\n type?: 'polyrepo' | 'monorepo';\n /** Where to create workspaces (relative to project path) */\n workspaces_dir?: string;\n /** Default branch for all repos (default: 'main'). Can be overridden per-repo. */\n default_branch?: string;\n /** Git repositories to include (for polyrepo) */\n repos?: RepoConfig[];\n /** DNS configuration */\n dns?: DnsConfig;\n /** Port assignments for services */\n ports?: Record<string, PortConfig>;\n /** Docker configuration */\n docker?: DockerConfig;\n /** Database seeding configuration */\n database?: DatabaseConfig;\n /** Agent configuration templates */\n agent?: AgentTemplateConfig;\n /** Environment variables */\n env?: EnvConfig;\n /** Service definitions for startup commands */\n services?: ServiceConfig[];\n /** Cloudflare tunnel configuration for external access */\n tunnel?: TunnelConfig;\n /** Hume EVI config management for workspace lifecycle */\n hume?: HumeConfig;\n /** PRD directory path (relative to project path, default: 'docs/prds') */\n prdDir?: string;\n}\n\nexport interface TestsConfig {\n [name: string]: TestConfig;\n}\n\nexport interface ProjectConfig {\n name: string;\n path: string;\n /** Issue prefix for identifier construction (e.g., \"PAN\" → PAN-123) */\n issue_prefix?: string;\n github_repo?: string;\n gitlab_repo?: string;\n\n /** Workspace configuration */\n workspace?: WorkspaceConfig;\n\n /** Test configuration */\n tests?: TestsConfig;\n\n /** Issue routing rules */\n issue_routing?: Array<{\n labels?: string[];\n path: string;\n default?: boolean;\n }>;\n\n /** Legacy: custom workspace command (deprecated, use workspace config) */\n workspace_command?: string;\n workspace_remove_command?: string;\n\n /** Package manager for dependency installation in workspaces (bun, npm, pnpm) */\n package_manager?: 'bun' | 'npm' | 'pnpm';\n /** Local workspace packages that need building before quality gates */\n workspace_packages?: Array<{ path: string; build_command: string }>;\n}\n\nexport interface ProjectsConfig {\n projects: Record<string, ProjectConfig>;\n}\n\n/**\n * Template placeholders that can be used in configuration\n */\nexport interface TemplatePlaceholders {\n FEATURE_NAME: string; // e.g., 'min-123'\n FEATURE_FOLDER: string; // e.g., 'feature-min-123'\n BRANCH_NAME: string; // e.g., 'feature/min-123'\n COMPOSE_PROJECT: string; // e.g., 'myn-feature-min-123'\n DOMAIN: string; // e.g., 'myn.test'\n PROJECT_NAME: string; // e.g., 'myn'\n PROJECT_PATH: string; // e.g., '/home/user/Projects/myn'\n PROJECTS_DIR: string; // e.g., '/home/user/Projects' (parent of PROJECT_PATH)\n WORKSPACE_PATH: string; // e.g., '/home/user/Projects/myn/workspaces/feature-min-123'\n HOME?: string; // e.g., '/home/user' (for docker-compose path sanitization)\n}\n\n/**\n * Replace template placeholders in a string\n */\nexport function replacePlaceholders(template: string, placeholders: TemplatePlaceholders): string {\n let result = template;\n for (const [key, value] of Object.entries(placeholders)) {\n result = result.replace(new RegExp(`\\\\{\\\\{${key}\\\\}\\\\}`, 'g'), value);\n }\n return result;\n}\n\n/**\n * Get default workspace config for a monorepo project\n */\nexport function getDefaultWorkspaceConfig(): WorkspaceConfig {\n return {\n type: 'monorepo',\n workspaces_dir: 'workspaces',\n };\n}\n\n/**\n * Service templates for common project types\n * These provide sensible defaults that can be overridden\n */\nexport const SERVICE_TEMPLATES: Record<string, Partial<ServiceConfig>> = {\n // Frontend frameworks\n 'react': {\n start_command: 'npm start',\n docker_command: 'npm start',\n port: 3000,\n },\n 'react-vite': {\n start_command: 'npm run dev',\n docker_command: 'npm run dev',\n port: 5173,\n },\n 'react-pnpm': {\n start_command: 'pnpm start',\n docker_command: 'pnpm start',\n port: 3000,\n },\n 'nextjs': {\n start_command: 'npm run dev',\n docker_command: 'npm run dev',\n port: 3000,\n },\n 'vue': {\n start_command: 'npm run dev',\n docker_command: 'npm run dev',\n port: 5173,\n },\n 'angular': {\n start_command: 'ng serve',\n docker_command: 'ng serve',\n port: 4200,\n },\n\n // Backend frameworks\n 'spring-boot-maven': {\n start_command: './mvnw spring-boot:run',\n docker_command: './mvnw spring-boot:run',\n port: 8080,\n },\n 'spring-boot-gradle': {\n start_command: './gradlew bootRun',\n docker_command: './gradlew bootRun',\n port: 8080,\n },\n 'express': {\n start_command: 'npm start',\n docker_command: 'npm start',\n port: 3000,\n },\n 'fastapi': {\n start_command: 'uvicorn main:app --reload',\n docker_command: 'uvicorn main:app --host 0.0.0.0 --reload',\n port: 8000,\n },\n 'django': {\n start_command: 'python manage.py runserver',\n docker_command: 'python manage.py runserver 0.0.0.0:8000',\n port: 8000,\n },\n 'rails': {\n start_command: 'rails server',\n docker_command: 'rails server -b 0.0.0.0',\n port: 3000,\n },\n 'go': {\n start_command: 'go run .',\n docker_command: 'go run .',\n port: 8080,\n },\n 'rust-cargo': {\n start_command: 'cargo run',\n docker_command: 'cargo run',\n port: 8080,\n },\n};\n\n/**\n * Get service config from template with overrides\n */\nexport function getServiceFromTemplate(\n templateName: string,\n overrides: Partial<ServiceConfig>\n): ServiceConfig {\n const template = SERVICE_TEMPLATES[templateName] || {};\n return {\n name: overrides.name || templateName,\n path: overrides.path || '.',\n start_command: overrides.start_command || template.start_command || 'npm start',\n docker_command: overrides.docker_command || template.docker_command,\n health_url: overrides.health_url,\n port: overrides.port || template.port,\n };\n}\n"],"mappings":";;;;;AA+QA,SAAgB,oBAAoB,UAAkB,cAA4C;CAChG,IAAI,SAAS;AACb,MAAK,MAAM,CAAC,KAAK,UAAU,OAAO,QAAQ,aAAa,CACrD,UAAS,OAAO,QAAQ,IAAI,OAAO,SAAS,IAAI,SAAS,IAAI,EAAE,MAAM;AAEvE,QAAO;;;;;AAMT,SAAgB,4BAA6C;AAC3D,QAAO;EACL,MAAM;EACN,gBAAgB;EACjB"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"workspace-manager-CncdZkIy.js","names":[],"sources":["../src/lib/skills-merge.ts","../src/lib/workspace-manager.ts"],"sourcesContent":["import {\n existsSync,\n readdirSync,\n mkdirSync,\n readFileSync,\n writeFileSync,\n copyFileSync,\n statSync,\n} from 'fs';\nimport { join, relative, dirname } from 'path';\nimport { SKILLS_DIR, CACHE_AGENTS_DIR, CACHE_RULES_DIR } from './paths.js';\nimport {\n readManifest,\n writeManifest,\n collectSourceFiles,\n hashFile,\n setManifestEntry,\n compareFileToManifest,\n type Manifest,\n} from './manifest.js';\n\nexport interface MergeResult {\n added: string[];\n updated: string[];\n skipped: string[];\n overlayed: string[];\n}\n\n/**\n * Copy all files from a source directory into a target directory,\n * preserving subdirectory structure. Returns the list of relative paths copied.\n */\nfunction copyTree(sourceDir: string, targetDir: string): string[] {\n const copied: string[] = [];\n if (!existsSync(sourceDir)) return copied;\n\n function walk(dir: string): void {\n const entries = readdirSync(dir, { withFileTypes: true });\n for (const entry of entries) {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n walk(fullPath);\n } else if (entry.isFile()) {\n const rel = relative(sourceDir, fullPath);\n const targetPath = join(targetDir, rel);\n mkdirSync(dirname(targetPath), { recursive: true });\n copyFileSync(fullPath, targetPath);\n copied.push(rel);\n }\n }\n }\n\n walk(sourceDir);\n return copied;\n}\n\n/**\n * Merge Panopticon skills, agents, and rules into a workspace using file copies.\n *\n * Flow:\n * 1. Copy from cache (skills, agent-definitions, rules) → workspace/.claude/\n * 2. Write manifest tracking what was placed\n *\n * Project template overlay is handled separately by workspace-manager.ts\n * (processTemplates + createSymlinks → now also copy-based).\n */\nexport function mergeSkillsIntoWorkspace(workspacePath: string): MergeResult {\n const claudeDir = join(workspacePath, '.claude');\n const manifestPath = join(claudeDir, '.panopticon-manifest.json');\n const manifest = readManifest(manifestPath);\n\n const result: MergeResult = {\n added: [],\n updated: [],\n skipped: [],\n overlayed: [],\n };\n\n // Ensure base directories exist\n mkdirSync(join(claudeDir, 'skills'), { recursive: true });\n mkdirSync(join(claudeDir, 'agents'), { recursive: true });\n\n // Sources to copy: category → source cache directory\n const sources: Array<{ category: string; sourceDir: string; targetSubdir: string }> = [\n { category: 'skills', sourceDir: SKILLS_DIR, targetSubdir: 'skills' },\n { category: 'agents', sourceDir: CACHE_AGENTS_DIR, targetSubdir: 'agents' },\n { category: 'rules', sourceDir: CACHE_RULES_DIR, targetSubdir: 'rules' },\n ];\n\n for (const { category, sourceDir, targetSubdir } of sources) {\n if (!existsSync(sourceDir)) continue;\n\n const prefix = targetSubdir ? `${targetSubdir}/` : '';\n const files = collectSourceFiles(sourceDir, '');\n\n for (const file of files) {\n const relativePath = `${prefix}${file.relativePath}`;\n const targetPath = join(claudeDir, relativePath);\n const sourceHash = hashFile(file.absolutePath);\n\n // Check status against manifest\n const status = compareFileToManifest(targetPath, relativePath, manifest);\n\n switch (status.action) {\n case 'new':\n // File doesn't exist at target — copy it\n mkdirSync(dirname(targetPath), { recursive: true });\n copyFileSync(file.absolutePath, targetPath);\n setManifestEntry(manifest, relativePath, sourceHash, 'panopticon');\n result.added.push(relativePath);\n break;\n\n case 'update':\n // File exists and matches manifest — safe to overwrite with latest\n copyFileSync(file.absolutePath, targetPath);\n setManifestEntry(manifest, relativePath, sourceHash, 'panopticon');\n result.updated.push(relativePath);\n break;\n\n case 'modified':\n // User modified the file — skip to preserve their changes\n result.skipped.push(`${relativePath} (modified by user)`);\n break;\n\n case 'user-owned':\n // File exists but wasn't placed by us — never touch\n result.skipped.push(`${relativePath} (user-owned)`);\n break;\n }\n }\n }\n\n // Write updated manifest\n writeManifest(manifestPath, manifest);\n\n return result;\n}\n\n/**\n * Apply project template overlay on top of Panopticon base files in a workspace.\n *\n * This copies files from the project's agent template directory into\n * workspace/.claude/, overwriting Panopticon files where the project\n * provides its own version. Updates the manifest with source=\"project-template\".\n *\n * @param workspacePath - Path to the workspace\n * @param templateDir - Absolute path to the project's agent template directory\n * @param templates - Optional list of specific template files to process (source → target mappings)\n */\nexport function applyProjectTemplateOverlay(\n workspacePath: string,\n templateDir: string,\n templates?: Array<{ source: string; target: string }>,\n): string[] {\n const claudeDir = join(workspacePath, '.claude');\n const manifestPath = join(claudeDir, '.panopticon-manifest.json');\n const manifest = readManifest(manifestPath);\n const overlayed: string[] = [];\n\n if (!existsSync(templateDir)) return overlayed;\n\n if (templates && templates.length > 0) {\n // Process specific template mappings\n for (const { source, target } of templates) {\n const sourcePath = join(templateDir, source);\n if (!existsSync(sourcePath)) continue;\n\n const targetPath = join(workspacePath, target);\n mkdirSync(dirname(targetPath), { recursive: true });\n\n // Read template content and check if it's a template file\n if (source.endsWith('.template')) {\n // Template files are handled by workspace-manager's processTemplates\n // We just track them in the manifest after they're processed\n continue;\n }\n\n copyFileSync(sourcePath, targetPath);\n\n // Track in manifest if it's under .claude/\n if (target.startsWith('.claude/')) {\n const relativePath = target.slice('.claude/'.length);\n const hash = hashFile(targetPath);\n setManifestEntry(manifest, relativePath, hash, 'project-template');\n overlayed.push(relativePath);\n }\n }\n } else {\n // Copy all .claude/ subdirectories from template dir\n const claudeInTemplate = join(templateDir, '.claude');\n if (existsSync(claudeInTemplate)) {\n const copied = copyTree(claudeInTemplate, claudeDir);\n for (const rel of copied) {\n const targetPath = join(claudeDir, rel);\n const hash = hashFile(targetPath);\n setManifestEntry(manifest, rel, hash, 'project-template');\n overlayed.push(rel);\n }\n }\n }\n\n // Write updated manifest\n writeManifest(manifestPath, manifest);\n\n return overlayed;\n}\n\n// ─── Legacy exports (kept for migration, to be removed in future) ───\n\n/**\n * @deprecated No longer needed — skills are copies, not symlinks. Kept for migration.\n */\nexport function cleanupGitignore(gitignorePath: string): {\n cleaned: boolean;\n duplicatesRemoved: number;\n entriesAfter: number;\n} {\n if (!existsSync(gitignorePath)) {\n return { cleaned: false, duplicatesRemoved: 0, entriesAfter: 0 };\n }\n\n const PANOPTICON_HEADER = '# Panopticon-managed symlinks (not committed)';\n let content: string;\n try {\n content = readFileSync(gitignorePath, 'utf-8');\n } catch {\n return { cleaned: false, duplicatesRemoved: 0, entriesAfter: 0 };\n }\n\n // If no Panopticon section, nothing to clean\n if (!content.includes(PANOPTICON_HEADER)) {\n return { cleaned: false, duplicatesRemoved: 0, entriesAfter: 0 };\n }\n\n // Remove the entire Panopticon section (skills are copies now, not symlinks)\n const lines = content.split('\\n');\n const newLines: string[] = [];\n let inPanopticonSection = false;\n\n for (const line of lines) {\n const trimmed = line.trim();\n if (trimmed === PANOPTICON_HEADER) {\n inPanopticonSection = true;\n continue;\n }\n if (inPanopticonSection) {\n if (trimmed.startsWith('#') && trimmed !== '') {\n inPanopticonSection = false;\n newLines.push(line);\n } else if (trimmed === '') {\n // Skip blank lines in Panopticon section\n continue;\n }\n // Skip entries in Panopticon section\n continue;\n }\n newLines.push(line);\n }\n\n // Write cleaned file\n try {\n writeFileSync(gitignorePath, newLines.join('\\n'), 'utf-8');\n return { cleaned: true, duplicatesRemoved: 0, entriesAfter: 0 };\n } catch {\n return { cleaned: false, duplicatesRemoved: 0, entriesAfter: 0 };\n }\n}\n\n/**\n * @deprecated No longer needed — skills are copies, not symlinks. Kept for migration.\n */\nexport function cleanupWorkspaceGitignore(workspacePath: string): {\n cleaned: boolean;\n duplicatesRemoved: number;\n entriesAfter: number;\n} {\n const gitignorePath = join(workspacePath, '.claude', 'skills', '.gitignore');\n return cleanupGitignore(gitignorePath);\n}\n\n/**\n * Merge project-local skills from .pan/skills/ into a workspace's .claude/skills/.\n *\n * Precedence (highest wins):\n * 1. .claude/skills/<name>/ already in workspace (user-owned or project template) → skip\n * 2. .pan/skills/<name>/ in project repo → copy into workspace .claude/skills/\n * 3. Global cache (handled by mergeSkillsIntoWorkspace) → baseline\n *\n * This should be called AFTER mergeSkillsIntoWorkspace so that project-local skills\n * can override global cache skills (but never overwrite user-owned content).\n */\nexport function mergePanSkillsIntoWorkspace(projectPath: string, workspacePath: string): MergeResult {\n const result: MergeResult = { added: [], updated: [], skipped: [], overlayed: [] };\n const panSkillsDir = join(projectPath, '.pan', 'skills');\n if (!existsSync(panSkillsDir)) return result;\n\n const claudeSkillsDir = join(workspacePath, '.claude', 'skills');\n const manifestPath = join(workspacePath, '.claude', '.panopticon-manifest.json');\n const manifest = readManifest(manifestPath);\n\n const skillDirs = readdirSync(panSkillsDir, { withFileTypes: true })\n .filter(e => e.isDirectory())\n .map(e => e.name);\n\n for (const skillName of skillDirs) {\n const sourceSkillDir = join(panSkillsDir, skillName);\n const targetSkillDir = join(claudeSkillsDir, skillName);\n\n // Rule #1: if target already exists (user-owned or project-template), never overwrite\n if (existsSync(targetSkillDir)) {\n result.skipped.push(`skills/${skillName} (already exists in .claude/skills/)`);\n continue;\n }\n\n // Rule #2: copy from .pan/skills/<name>/ to workspace .claude/skills/<name>/\n const files = collectSourceFiles(sourceSkillDir, '');\n mkdirSync(targetSkillDir, { recursive: true });\n let anyAdded = false;\n for (const file of files) {\n const targetPath = join(targetSkillDir, file.relativePath);\n mkdirSync(dirname(targetPath), { recursive: true });\n copyFileSync(file.absolutePath, targetPath);\n const hash = hashFile(targetPath);\n setManifestEntry(manifest, `skills/${skillName}/${file.relativePath}`, hash, 'pan-skills');\n result.added.push(`skills/${skillName}/${file.relativePath}`);\n anyAdded = true;\n }\n if (anyAdded) {\n result.overlayed.push(skillName);\n }\n }\n\n writeManifest(manifestPath, manifest);\n return result;\n}\n","/**\n * Workspace Manager\n *\n * Handles workspace creation and removal for both monorepo and polyrepo projects.\n */\n\nimport { existsSync, mkdirSync, writeFileSync, readFileSync, readdirSync, copyFileSync, symlinkSync, chmodSync, realpathSync, rmSync, rmdirSync, statSync, renameSync } from 'fs';\nimport { join, dirname, basename, extname, resolve } from 'path';\nimport { homedir } from 'os';\nimport { exec } from 'child_process';\nimport { promisify } from 'util';\nimport {\n ProjectConfig,\n WorkspaceConfig,\n TemplatePlaceholders,\n replacePlaceholders,\n getDefaultWorkspaceConfig,\n} from './workspace-config.js';\nimport { addDnsEntry, removeDnsEntry, syncDnsToWindows } from './dns.js';\nimport { addTunnelIngress, removeTunnelIngress } from './tunnel.js';\nimport { createHumeConfig, deleteHumeConfig } from './hume.js';\nimport { mergeSkillsIntoWorkspace, mergePanSkillsIntoWorkspace } from './skills-merge.js';\n\nconst execAsync = promisify(exec);\n\nexport interface PanMigrationResult {\n /** Subdirectories migrated from .panopticon/ to .pan/ */\n migrated: string[];\n /** Subdirectories skipped because .pan/<subdir> already exists */\n skipped: string[];\n /** Errors encountered during migration */\n errors: string[];\n}\n\n/**\n * Migrate existing .panopticon/<subdir> directories to .pan/<subdir> within a project.\n *\n * Safety rules:\n * - If old path exists and new path does NOT exist → move old to new.\n * - If both old and new exist → log warning and skip (never overwrite silently).\n * - If neither exists → nothing to do.\n * - Only migrates the specific runtime subdirs (events, convoy, prompts).\n * .pan/skills/ is not migrated here since it may not have existed before.\n */\nexport function migratePanopticonToPan(projectPath: string): PanMigrationResult {\n const result: PanMigrationResult = { migrated: [], skipped: [], errors: [] };\n\n // Map legacy .panopticon/<subdir> paths to new .pan/<subdir> paths,\n // including convoy unification (triage + health → convoy)\n const legacyMappings: Array<{ old: string; new: string }> = [\n { old: '.panopticon/events', new: '.pan/events' },\n { old: '.panopticon/triage', new: '.pan/convoy' },\n { old: '.panopticon/health', new: '.pan/convoy' },\n { old: '.panopticon/convoy-output', new: '.pan/convoy' },\n { old: '.panopticon/prompts', new: '.pan/prompts' },\n ];\n\n for (const { old: oldRelPath, new: newRelPath } of legacyMappings) {\n const oldPath = join(projectPath, oldRelPath);\n const newPath = join(projectPath, newRelPath);\n\n if (!existsSync(oldPath)) continue;\n\n if (existsSync(newPath)) {\n const msg = `Migration skipped: both ${oldRelPath} and ${newRelPath} exist in ${projectPath} — remove one manually`;\n console.warn(`[panopticon] ${msg}`);\n result.skipped.push(oldRelPath);\n continue;\n }\n\n try {\n // Ensure parent directory exists\n const parentDir = dirname(newPath);\n if (!existsSync(parentDir)) {\n mkdirSync(parentDir, { recursive: true });\n }\n renameSync(oldPath, newPath);\n result.migrated.push(`${oldRelPath} → ${newRelPath}`);\n } catch (err: any) {\n result.errors.push(`${oldRelPath}: ${err.message}`);\n }\n }\n\n // Clean up empty .panopticon/ dir if nothing remains\n const panopticonDir = join(projectPath, '.panopticon');\n if (existsSync(panopticonDir)) {\n try {\n const remaining = readdirSync(panopticonDir);\n if (remaining.length === 0) {\n rmdirSync(panopticonDir);\n result.migrated.push('.panopticon/ (empty dir removed)');\n }\n } catch {\n // Non-fatal — dir may have been removed already\n }\n }\n\n return result;\n}\n\n/**\n * Ensure .pan/events/, .pan/convoy/, and .pan/prompts/ are excluded from git tracking\n * in the given project root's .gitignore. .pan/skills/ is intentionally NOT excluded\n * since project-specific skills should be committed.\n */\nexport function ensurePanGitignore(projectPath: string): void {\n const gitignorePath = join(projectPath, '.gitignore');\n const requiredEntries = ['.pan/events/', '.pan/convoy/', '.pan/prompts/'];\n\n let content = existsSync(gitignorePath) ? readFileSync(gitignorePath, 'utf-8') : '';\n const lines = content.split('\\n');\n\n const missing = requiredEntries.filter(entry => !lines.some(l => l.trim() === entry));\n if (missing.length === 0) return;\n\n // Append missing entries with a section header if we're adding for the first time\n if (!content.endsWith('\\n') && content.length > 0) {\n content += '\\n';\n }\n if (!lines.some(l => l.includes('.pan/'))) {\n content += '\\n# Panopticon runtime artifacts (ephemeral, not tracked)\\n';\n }\n content += missing.join('\\n') + '\\n';\n\n writeFileSync(gitignorePath, content, 'utf-8');\n}\n\n/** Progress event emitted during workspace creation. */\nexport interface WorkspaceProgress {\n label: string;\n detail: string;\n status: 'active' | 'complete' | 'error';\n}\n\nexport interface WorkspaceCreateOptions {\n projectConfig: ProjectConfig;\n featureName: string;\n startDocker?: boolean;\n dryRun?: boolean;\n /** Optional callback for streaming progress events during creation. */\n onProgress?: (event: WorkspaceProgress) => void;\n}\n\nexport interface WorkspaceCreateResult {\n success: boolean;\n workspacePath: string;\n errors: string[];\n steps: string[];\n}\n\n/**\n * Create placeholders for template substitution\n */\nfunction createPlaceholders(\n projectConfig: ProjectConfig,\n featureName: string,\n workspacePath: string\n): TemplatePlaceholders {\n const featureFolder = `feature-${featureName}`;\n const domain = projectConfig.workspace?.dns?.domain || 'localhost';\n\n return {\n FEATURE_NAME: featureName,\n FEATURE_FOLDER: featureFolder,\n BRANCH_NAME: `feature/${featureName}`,\n COMPOSE_PROJECT: `${basename(projectConfig.path)}-${featureFolder}`,\n DOMAIN: domain,\n PROJECT_NAME: basename(projectConfig.path),\n PROJECT_PATH: projectConfig.path,\n PROJECTS_DIR: dirname(projectConfig.path),\n WORKSPACE_PATH: workspacePath,\n HOME: homedir(),\n };\n}\n\n/**\n * Sanitize docker-compose files to use platform-agnostic paths\n * Replaces hardcoded /home/username paths with ${HOME}\n */\nfunction sanitizeComposeFile(filePath: string): void {\n if (!existsSync(filePath)) return;\n\n let content = readFileSync(filePath, 'utf-8');\n const originalContent = content;\n\n // Pattern to match hardcoded home paths like /home/username or /Users/username\n // Replace with ${HOME} which docker-compose expands\n const homePatterns = [\n /\\/home\\/[a-zA-Z0-9_-]+\\//g, // Linux: /home/username/\n /\\/Users\\/[a-zA-Z0-9_-]+\\//g, // macOS: /Users/username/\n ];\n\n for (const pattern of homePatterns) {\n content = content.replace(pattern, '${HOME}/');\n }\n\n if (content !== originalContent) {\n writeFileSync(filePath, content, 'utf-8');\n }\n}\n\n/**\n * Validate feature name (alphanumeric and hyphens only)\n */\nfunction validateFeatureName(name: string): boolean {\n return /^[a-zA-Z0-9-]+$/.test(name);\n}\n\n/**\n * Create a git worktree\n * @param repoPath Path to the source git repository\n * @param targetPath Where to create the worktree\n * @param branchName Name of the feature branch to create/checkout\n * @param defaultBranch Base branch to create new branches from (default: 'main')\n */\nasync function createWorktree(\n repoPath: string,\n targetPath: string,\n branchName: string,\n defaultBranch: string = 'main'\n): Promise<{ success: boolean; message: string }> {\n try {\n // Fetch latest from origin\n await execAsync('git fetch origin', { cwd: repoPath });\n\n // Prune stale worktree entries (e.g., from deleted workspaces)\n await execAsync('git worktree prune', { cwd: repoPath });\n\n // Check if branch exists locally or remotely (exact match, not substring)\n const { stdout: localBranches } = await execAsync('git branch --list', { cwd: repoPath });\n const { stdout: remoteBranches } = await execAsync('git branch -r --list', { cwd: repoPath });\n\n const localList = localBranches.split('\\n').map(b => b.replace(/^[*+\\s]+/, '').trim()).filter(Boolean);\n const remoteList = remoteBranches.split('\\n').map(b => b.trim()).filter(Boolean);\n const branchExists =\n localList.includes(branchName) ||\n remoteList.includes(`origin/${branchName}`);\n\n if (branchExists) {\n await execAsync(`git worktree add \"${targetPath}\" \"${branchName}\"`, { cwd: repoPath });\n } else {\n // Create new branch from the configured default branch\n await execAsync(`git worktree add \"${targetPath}\" -b \"${branchName}\" \"${defaultBranch}\"`, { cwd: repoPath });\n }\n\n // Clear unstaged deletions from the new worktree (e.g. .planning/ files that exist on the\n // feature branch but not on main appear as deleted in a fresh worktree). Without this,\n // `git rebase origin/main` fails immediately with \"unstaged changes\" (PAN-495).\n await execAsync('git restore .', { cwd: targetPath }).catch(() => {});\n\n // Configure beads role so agents don't get \"beads.role not configured\" warnings\n await execAsync('git config beads.role contributor', { cwd: targetPath }).catch(() => {});\n\n return { success: true, message: `Created worktree at ${targetPath}` };\n } catch (error) {\n return { success: false, message: `Failed to create worktree: ${error}` };\n }\n}\n\n/**\n * Remove a git worktree\n */\nasync function removeWorktree(\n repoPath: string,\n targetPath: string,\n branchName: string\n): Promise<{ success: boolean; message: string }> {\n try {\n // Remove worktree\n await execAsync(`git worktree remove \"${targetPath}\" --force`, { cwd: repoPath }).catch(() => {});\n\n // Optionally delete the branch\n await execAsync(`git branch -D \"${branchName}\"`, { cwd: repoPath }).catch(() => {});\n\n return { success: true, message: `Removed worktree at ${targetPath}` };\n } catch (error) {\n return { success: false, message: `Failed to remove worktree: ${error}` };\n }\n}\n\n// DNS functions (addWsl2HostEntry, removeWsl2HostEntry, syncDnsToWindows)\n// are now in src/lib/dns.ts and imported above\n\n/**\n * Assign a port from a range\n */\nfunction assignPort(\n portFile: string,\n featureFolder: string,\n range: [number, number]\n): number {\n // Ensure port file exists\n if (!existsSync(portFile)) {\n mkdirSync(dirname(portFile), { recursive: true });\n writeFileSync(portFile, '');\n }\n\n const content = readFileSync(portFile, 'utf-8');\n const lines = content.split('\\n').filter(Boolean);\n\n // Check if already assigned\n for (const line of lines) {\n const [folder, port] = line.split(':');\n if (folder === featureFolder) {\n return parseInt(port, 10);\n }\n }\n\n // Find next available port\n const usedPorts = new Set(lines.map(l => parseInt(l.split(':')[1], 10)));\n for (let port = range[0]; port <= range[1]; port++) {\n if (!usedPorts.has(port)) {\n writeFileSync(portFile, content + (content.endsWith('\\n') ? '' : '\\n') + `${featureFolder}:${port}\\n`);\n return port;\n }\n }\n\n throw new Error(`No available ports in range ${range[0]}-${range[1]}`);\n}\n\n/**\n * Release a port assignment\n */\nfunction releasePort(portFile: string, featureFolder: string): boolean {\n try {\n if (!existsSync(portFile)) return true;\n\n let content = readFileSync(portFile, 'utf-8');\n const lines = content.split('\\n').filter(line => !line.startsWith(`${featureFolder}:`));\n writeFileSync(portFile, lines.join('\\n'));\n return true;\n } catch {\n return false;\n }\n}\n\n/**\n * Process template files with placeholder replacement\n */\nfunction processTemplates(\n templateDir: string,\n targetDir: string,\n placeholders: TemplatePlaceholders,\n templates?: Array<{ source: string; target: string }>\n): string[] {\n const steps: string[] = [];\n\n if (!existsSync(templateDir)) {\n return steps;\n }\n\n // If specific templates are defined, process those\n if (templates && templates.length > 0) {\n for (const { source, target } of templates) {\n const sourcePath = join(templateDir, source);\n const targetPath = join(targetDir, target);\n\n if (existsSync(sourcePath)) {\n const content = readFileSync(sourcePath, 'utf-8');\n const processed = replacePlaceholders(content, placeholders);\n mkdirSync(dirname(targetPath), { recursive: true });\n writeFileSync(targetPath, processed);\n steps.push(`Processed template: ${source} -> ${target}`);\n }\n }\n } else {\n // Process all .template files\n const files = readdirSync(templateDir);\n for (const file of files) {\n if (file.endsWith('.template')) {\n const sourcePath = join(templateDir, file);\n const targetPath = join(targetDir, file.replace('.template', ''));\n\n const content = readFileSync(sourcePath, 'utf-8');\n const processed = replacePlaceholders(content, placeholders);\n writeFileSync(targetPath, processed);\n // Shell scripts need execute permission\n const targetName = file.replace('.template', '');\n if (targetName === 'dev' || targetName.endsWith('.sh')) {\n chmodSync(targetPath, 0o755);\n }\n steps.push(`Processed template: ${file}`);\n }\n }\n }\n\n return steps;\n}\n\n/**\n * @deprecated Use copyProjectTemplateDirs instead. Kept for non-.claude paths.\n */\nfunction createSymlinks(\n sourceDir: string,\n targetDir: string,\n symlinks: string[]\n): string[] {\n const steps: string[] = [];\n\n for (const symlink of symlinks) {\n const sourcePath = join(sourceDir, symlink);\n const targetPath = join(targetDir, symlink);\n\n if (existsSync(sourcePath)) {\n mkdirSync(dirname(targetPath), { recursive: true });\n try {\n symlinkSync(sourcePath, targetPath);\n steps.push(`Created symlink: ${symlink}`);\n } catch {\n // Symlink might already exist\n }\n }\n }\n\n return steps;\n}\n\n/**\n * Copy project template directories into workspace (replaces symlinks).\n * Recursively copies all files from each source directory.\n */\nconst TEXT_EXTENSIONS = new Set([\n '.md', '.sh', '.yml', '.yaml', '.json', '.ts', '.js', '.env', '.txt', '.toml', '.template',\n]);\n\nfunction copyProjectTemplateDirs(\n sourceDir: string,\n targetDir: string,\n dirs: string[],\n placeholders?: TemplatePlaceholders\n): string[] {\n const steps: string[] = [];\n\n for (const dir of dirs) {\n const sourcePath = join(sourceDir, dir);\n const targetPath = join(targetDir, dir);\n\n if (!existsSync(sourcePath)) continue;\n\n // Recursively copy all files, applying placeholder substitution to text files\n function copyDir(src: string, dest: string): number {\n let count = 0;\n mkdirSync(dest, { recursive: true });\n const entries = readdirSync(src, { withFileTypes: true });\n for (const entry of entries) {\n const srcEntry = join(src, entry.name);\n const destEntry = join(dest, entry.name);\n if (entry.isDirectory()) {\n count += copyDir(srcEntry, destEntry);\n } else if (entry.isFile()) {\n const ext = extname(entry.name).toLowerCase();\n if (placeholders && TEXT_EXTENSIONS.has(ext)) {\n const content = readFileSync(srcEntry, 'utf-8');\n writeFileSync(destEntry, replacePlaceholders(content, placeholders));\n } else {\n copyFileSync(srcEntry, destEntry);\n }\n count++;\n }\n }\n return count;\n }\n\n const count = copyDir(sourcePath, targetPath);\n steps.push(`Copied ${count} files from project template: ${dir}`);\n }\n\n return steps;\n}\n\n/**\n * Create a workspace\n */\nexport async function createWorkspace(options: WorkspaceCreateOptions): Promise<WorkspaceCreateResult> {\n const { projectConfig, featureName, startDocker, dryRun, onProgress } = options;\n const progress = (label: string, detail: string, status: 'active' | 'complete' | 'error' = 'active') => {\n onProgress?.({ label, detail, status });\n };\n const result: WorkspaceCreateResult = {\n success: true,\n workspacePath: '',\n errors: [],\n steps: [],\n };\n\n // Validate feature name\n if (!validateFeatureName(featureName)) {\n result.success = false;\n result.errors.push('Invalid feature name. Use alphanumeric and hyphens only.');\n return result;\n }\n\n // Reject 'main' as feature name\n if (featureName === 'main') {\n result.success = false;\n result.errors.push('Cannot create workspace for \"main\". Use base repos directly.');\n return result;\n }\n\n const workspaceConfig = projectConfig.workspace || getDefaultWorkspaceConfig();\n const workspacesDir = join(projectConfig.path, workspaceConfig.workspaces_dir || 'workspaces');\n const featureFolder = `feature-${featureName}`;\n const workspacePath = join(workspacesDir, featureFolder);\n result.workspacePath = workspacePath;\n\n // Check if workspace already exists\n if (existsSync(workspacePath)) {\n result.success = false;\n result.errors.push(`Workspace already exists at ${workspacePath}`);\n return result;\n }\n\n if (dryRun) {\n result.steps.push('[DRY RUN] Would create workspace at: ' + workspacePath);\n return result;\n }\n\n // Create placeholders\n const placeholders = createPlaceholders(projectConfig, featureName, workspacePath);\n\n // Create workspace directory\n progress('Creating git worktree', `feature/${featureName}`);\n mkdirSync(workspacePath, { recursive: true });\n result.steps.push('Created workspace directory');\n\n // Handle polyrepo vs monorepo\n if (workspaceConfig.type === 'polyrepo' && workspaceConfig.repos) {\n // Create worktrees for each repo\n for (const repo of workspaceConfig.repos) {\n // Resolve symlinks to get the actual git repository path\n // (e.g., myn/frontend -> ../frontend needs to resolve to actual path)\n const rawRepoPath = join(projectConfig.path, repo.path);\n const repoPath = existsSync(rawRepoPath) ? realpathSync(rawRepoPath) : rawRepoPath;\n const targetPath = join(workspacePath, repo.name);\n const branchPrefix = repo.branch_prefix || 'feature/';\n const branchName = `${branchPrefix}${featureName}`;\n // Per-repo default_branch overrides workspace-level, falls back to 'main'\n const defaultBranch = repo.default_branch || workspaceConfig.default_branch || 'main';\n\n const worktreeResult = await createWorktree(repoPath, targetPath, branchName, defaultBranch);\n if (worktreeResult.success) {\n result.steps.push(`Created worktree for ${repo.name}: ${branchName} (from ${defaultBranch})`);\n } else {\n result.errors.push(`${repo.name}: ${worktreeResult.message}`);\n result.success = false; // Fail the entire workspace creation if any worktree fails\n }\n }\n } else {\n // Monorepo: create single worktree\n const branchName = `feature/${featureName}`;\n const defaultBranch = workspaceConfig.default_branch || 'main';\n const worktreeResult = await createWorktree(projectConfig.path, workspacePath, branchName, defaultBranch);\n if (worktreeResult.success) {\n result.steps.push(`Created worktree: ${branchName} (from ${defaultBranch})`);\n } else {\n result.errors.push(worktreeResult.message);\n result.success = false; // Fail the entire workspace creation if worktree fails\n }\n }\n\n progress('Creating git worktree', 'Worktree ready', 'complete');\n\n // Remove stale .planning/ directory inherited from main branch.\n // This contains STATE.md and other planning artifacts from a PREVIOUS issue.\n // If left in place, the new agent reads it and works on the wrong issue.\n // SAFETY: resolve() to absolute path and verify it's under a known workspace prefix\n // to prevent path traversal from ever reaching rmSync.\n const resolvedWorkspace = resolve(workspacePath);\n const resolvedPlanning = resolve(resolvedWorkspace, '.planning');\n const isUnderWorkspacesDir = resolvedWorkspace.match(/\\/workspaces\\/feature-[a-z0-9-]+$/);\n if (\n isUnderWorkspacesDir &&\n resolvedPlanning === join(resolvedWorkspace, '.planning') &&\n existsSync(join(resolvedWorkspace, '.git')) &&\n existsSync(resolvedPlanning)\n ) {\n rmSync(resolvedPlanning, { recursive: true, force: true });\n result.steps.push('Removed stale .planning/ directory from previous issue');\n }\n\n // Ensure .pan/events/, .pan/convoy/, .pan/prompts/ are in the project's .gitignore\n try {\n ensurePanGitignore(projectConfig.path);\n result.steps.push('Verified .pan/ runtime paths are in .gitignore');\n } catch (gitignoreErr: any) {\n // Non-fatal — log but don't block workspace creation\n result.steps.push(`Warning: could not update .gitignore: ${gitignoreErr.message}`);\n }\n\n // Sanitize any docker-compose files in the workspace to use platform-agnostic paths\n // This handles files inherited from worktrees that may have hardcoded home paths\n const devcontainerDir = join(workspacePath, '.devcontainer');\n if (existsSync(devcontainerDir)) {\n const composeFiles = readdirSync(devcontainerDir)\n .filter(f => f.includes('compose') && (f.endsWith('.yml') || f.endsWith('.yaml')));\n for (const composeFile of composeFiles) {\n sanitizeComposeFile(join(devcontainerDir, composeFile));\n }\n if (composeFiles.length > 0) {\n result.steps.push(`Sanitized ${composeFiles.length} compose file(s) for platform compatibility`);\n }\n }\n\n // Install dependencies using the project's package manager\n progress('Installing dependencies', projectConfig.package_manager || 'detecting...');\n const pkgManager = projectConfig.package_manager || (existsSync(join(workspacePath, 'bun.lock')) ? 'bun' : 'npm');\n const installCmd = pkgManager === 'bun' ? 'bun install' : `${pkgManager} install`;\n try {\n await execAsync(installCmd, { cwd: workspacePath, encoding: 'utf-8', timeout: 60000 });\n result.steps.push(`Installed dependencies (${pkgManager})`);\n progress('Installing dependencies', `${pkgManager} — done`, 'complete');\n } catch (installErr: any) {\n result.steps.push(`Dependency install warning: ${installErr.message?.slice(0, 100)}`);\n progress('Installing dependencies', 'Warning (non-fatal)', 'complete');\n }\n\n // Build workspace packages (e.g., @panopticon/contracts) so types resolve correctly\n const workspacePackages = projectConfig.workspace_packages;\n if (workspacePackages && workspacePackages.length > 0) {\n progress('Building workspace packages', workspacePackages.map(p => p.path).join(', '));\n for (const pkg of workspacePackages) {\n try {\n await execAsync(pkg.build_command, { cwd: join(workspacePath, pkg.path), encoding: 'utf-8', timeout: 30000 });\n result.steps.push(`Built workspace package: ${pkg.path}`);\n } catch (buildErr: any) {\n result.steps.push(`Build warning (${pkg.path}): ${buildErr.message?.slice(0, 100)}`);\n }\n }\n progress('Building workspace packages', 'Packages built', 'complete');\n }\n\n // Setup TLDR code analysis for workspace (after worktree creation to ensure directory is ready)\n try {\n // Check if python3 is available\n await execAsync('python3 --version');\n const venvPath = join(workspacePath, '.venv');\n const tldrBin = join(venvPath, 'bin', 'tldr');\n\n // Check if main branch already has a working venv with llm-tldr\n const mainVenvTldr = join(projectConfig.path, '.venv', 'bin', 'tldr');\n const mainVenvExists = existsSync(mainVenvTldr);\n\n if (mainVenvExists) {\n // Copy the entire venv from main — faster than pip install (seconds vs 30s+)\n await execAsync(`cp -a \"${join(projectConfig.path, '.venv')}\" \"${venvPath}\"`);\n result.steps.push('Copied Python venv from main branch');\n } else {\n // Create fresh venv and install llm-tldr\n await execAsync(`python3 -m venv \"${venvPath}\"`, { cwd: workspacePath });\n const pipPath = join(venvPath, 'bin', 'pip');\n await execAsync(`\"${pipPath}\" install llm-tldr`, { cwd: workspacePath, timeout: 120000 });\n result.steps.push('Created Python venv and installed llm-tldr');\n\n // Apply .tsx/.jsx support patch (upstream llm-tldr only checks .ts)\n const patchScript = join(projectConfig.path, 'scripts', 'patches', 'llm-tldr-tsx-support.py');\n if (existsSync(patchScript)) {\n await execAsync(`python3 \"${patchScript}\" \"${venvPath}\"`);\n result.steps.push('Applied llm-tldr .tsx/.jsx patch');\n }\n }\n\n // Verify tldr binary exists after setup\n if (!existsSync(tldrBin)) {\n result.steps.push('TLDR setup incomplete: tldr binary not found after venv creation');\n } else {\n // Copy .tldr index from main branch if it exists\n const mainTldrDir = join(projectConfig.path, '.tldr');\n const workspaceTldrDir = join(workspacePath, '.tldr');\n\n if (existsSync(mainTldrDir)) {\n await execAsync(`cp -r \"${mainTldrDir}\" \"${workspaceTldrDir}\"`);\n result.steps.push('Copied TLDR index from main branch');\n }\n\n // Start TLDR daemon for this workspace\n const { getTldrDaemonService } = await import('./tldr-daemon.js');\n const tldrService = getTldrDaemonService(workspacePath, venvPath);\n await tldrService.start(true);\n result.steps.push('Started TLDR daemon');\n\n // Warm the index in the background — ensures workspaces always have a working index\n // even when the main branch cache was empty (nothing to copy)\n try {\n await tldrService.warm(true); // background=true: non-blocking\n result.steps.push('TLDR index warm initiated (background)');\n } catch {\n // Non-fatal — daemon may not support warm yet\n }\n }\n } catch (error: any) {\n // TLDR setup is optional — don't fail workspace creation, but log clearly\n if (error.message?.includes('python3')) {\n result.steps.push('Skipped TLDR setup (python3 not available)');\n } else {\n console.warn(`⚠ TLDR setup failed: ${error.message}`);\n result.steps.push(`TLDR setup failed: ${error.message}`);\n }\n }\n\n // Configure DNS\n if (workspaceConfig.dns) {\n const dnsMethod = workspaceConfig.dns.sync_method || 'wsl2hosts';\n for (const entryPattern of workspaceConfig.dns.entries) {\n const hostname = replacePlaceholders(entryPattern, placeholders);\n\n if (addDnsEntry(dnsMethod, hostname)) {\n result.steps.push(`Added DNS entry: ${hostname} (${dnsMethod})`);\n }\n }\n\n // Sync to Windows if using wsl2hosts method\n if (dnsMethod === 'wsl2hosts') {\n const synced = await syncDnsToWindows();\n if (synced) {\n result.steps.push('Synced DNS to Windows hosts file');\n }\n }\n }\n\n // Assign ports\n if (workspaceConfig.ports) {\n for (const [portName, portConfig] of Object.entries(workspaceConfig.ports)) {\n const portFile = join(projectConfig.path, `.${portName}-ports`);\n try {\n const port = assignPort(portFile, featureFolder, portConfig.range);\n result.steps.push(`Assigned ${portName} port: ${port}`);\n // Add to placeholders for use in templates\n (placeholders as any)[`${portName.toUpperCase()}_PORT`] = String(port);\n } catch (error) {\n result.errors.push(`Failed to assign ${portName} port: ${error}`);\n }\n }\n }\n\n // Install base Panopticon skills/agents/rules from cache\n progress('Installing skills & templates', 'Panopticon skills, agents, rules');\n const mergeResult = mergeSkillsIntoWorkspace(workspacePath);\n const mergeTotal = mergeResult.added.length + mergeResult.updated.length;\n if (mergeTotal > 0) {\n result.steps.push(`Installed ${mergeTotal} Panopticon files (${mergeResult.added.length} new, ${mergeResult.updated.length} updated)`);\n }\n\n // Overlay project-local skills from .pan/skills/ (higher precedence than global cache)\n const panMergeResult = mergePanSkillsIntoWorkspace(projectConfig.path, workspacePath);\n if (panMergeResult.added.length > 0) {\n result.steps.push(`Installed ${panMergeResult.added.length} project-local skill file(s) from .pan/skills/ (${panMergeResult.overlayed.join(', ')})`);\n }\n\n // Process agent templates (project template overlay — wins over Panopticon base)\n if (workspaceConfig.agent?.template_dir) {\n const templateDir = join(projectConfig.path, workspaceConfig.agent.template_dir);\n\n // Process template files\n const templateSteps = processTemplates(\n templateDir,\n workspacePath,\n placeholders,\n workspaceConfig.agent.templates\n );\n result.steps.push(...templateSteps);\n\n // Copy .claude/ directories from project template (copy_dirs replaces legacy symlinks)\n const dirsToSync = workspaceConfig.agent.copy_dirs || workspaceConfig.agent.symlinks;\n if (dirsToSync) {\n const copySteps = copyProjectTemplateDirs(templateDir, workspacePath, dirsToSync, placeholders);\n result.steps.push(...copySteps);\n }\n }\n\n // Generate .env file\n if (workspaceConfig.env?.template) {\n const envContent = replacePlaceholders(workspaceConfig.env.template, placeholders);\n writeFileSync(join(workspacePath, '.env'), envContent);\n result.steps.push('Created .env file');\n }\n\n // Process Docker compose templates\n if (workspaceConfig.docker?.compose_template) {\n const templateDir = join(projectConfig.path, workspaceConfig.docker.compose_template);\n const devcontainerDir = join(workspacePath, '.devcontainer');\n mkdirSync(devcontainerDir, { recursive: true });\n\n const templateSteps = processTemplates(templateDir, devcontainerDir, placeholders);\n result.steps.push(...templateSteps);\n\n // Copy non-template files (like Dockerfile)\n if (existsSync(templateDir)) {\n const files = readdirSync(templateDir);\n for (const file of files) {\n if (!file.endsWith('.template')) {\n const sourcePath = join(templateDir, file);\n const targetPath = join(devcontainerDir, file);\n copyFileSync(sourcePath, targetPath);\n }\n }\n }\n\n // Sanitize docker-compose files to use platform-agnostic paths\n // This fixes hardcoded /home/username or /Users/username paths\n const composeFiles = readdirSync(devcontainerDir)\n .filter(f => f.includes('compose') && (f.endsWith('.yml') || f.endsWith('.yaml')));\n for (const composeFile of composeFiles) {\n sanitizeComposeFile(join(devcontainerDir, composeFile));\n }\n if (composeFiles.length > 0) {\n result.steps.push(`Sanitized ${composeFiles.length} compose file(s) for platform compatibility`);\n }\n\n // Create ./dev symlink at workspace root pointing to .devcontainer/dev\n // Symlink keeps changes in sync - editing ./dev updates .devcontainer/dev\n const devScriptInContainer = join(devcontainerDir, 'dev');\n const devScriptAtRoot = join(workspacePath, 'dev');\n if (existsSync(devScriptInContainer) && !existsSync(devScriptAtRoot)) {\n try {\n symlinkSync('.devcontainer/dev', devScriptAtRoot);\n chmodSync(devScriptInContainer, 0o755); // Make executable\n result.steps.push('Created ./dev symlink');\n } catch (error) {\n result.errors.push(`Failed to create ./dev symlink: ${error}`);\n }\n }\n }\n\n // Note: Beads initialization is handled by the calling command (workspace.ts)\n // With beads v0.47.1+, worktrees use shared database with labels for isolation\n // The workspace.ts command creates a bead with workspace:issue-id label\n\n // Set up Cloudflare tunnel for external access (before Docker so containers can use tunnel URLs)\n if (workspaceConfig.tunnel) {\n const tunnelResult = await addTunnelIngress(workspaceConfig.tunnel, placeholders);\n result.steps.push(...tunnelResult.steps);\n if (!tunnelResult.success) {\n result.errors.push('Tunnel setup had failures (see steps for details)');\n }\n }\n\n // Create Hume EVI config and write env file for Docker (before Docker so containers pick up the config ID)\n if (workspaceConfig.hume) {\n const humeResult = await createHumeConfig(workspaceConfig.hume, placeholders);\n result.steps.push(...humeResult.steps);\n if (humeResult.configId) {\n writeFileSync(\n join(workspacePath, '.hume-config'),\n `HUME_CONFIG_ID=${humeResult.configId}\\nVITE_HUME_CONFIG_ID=${humeResult.configId}\\n`,\n );\n result.steps.push('Wrote .hume-config with Hume EVI config ID');\n }\n if (!humeResult.success) {\n result.errors.push('Hume EVI config setup had failures (see steps for details)');\n }\n }\n\n progress('Installing skills & templates', 'Skills and templates ready', 'complete');\n\n // Start Docker containers if requested\n if (startDocker) {\n progress('Starting Docker containers', 'Building and starting services');\n // Check for Traefik\n if (workspaceConfig.docker?.traefik) {\n // Always use the installed Traefik location (~/.panopticon/traefik/), not the\n // template source in projects.yaml. The template is copied to ~/.panopticon/traefik/\n // during `pan install`, and the installed copy has the correct volume mounts\n // (dynamic configs, certs) relative to ~/.panopticon/traefik/.\n const traefikPath = join(homedir(), '.panopticon', 'traefik', 'docker-compose.yml');\n if (existsSync(traefikPath)) {\n try {\n await execAsync(`docker compose -f \"${traefikPath}\" up -d`, { cwd: join(homedir(), '.panopticon', 'traefik') });\n result.steps.push('Started Traefik');\n } catch (error: any) {\n const msg = error?.message || String(error);\n if (msg.includes('port is already allocated') || msg.includes('address already in use')) {\n // Traefik (or another reverse proxy) is already running — not an error\n result.steps.push('Traefik already running (port in use)');\n } else {\n result.errors.push(`Failed to start Traefik: ${error}`);\n }\n }\n }\n }\n\n // Start workspace containers\n const composeLocations = [\n join(workspacePath, 'docker-compose.yml'),\n join(workspacePath, 'docker-compose.yaml'),\n join(workspacePath, '.devcontainer', 'docker-compose.yml'),\n join(workspacePath, '.devcontainer', 'docker-compose.devcontainer.yml'),\n ];\n\n for (const composePath of composeLocations) {\n if (existsSync(composePath)) {\n try {\n // Don't pass -p: the compose file's `name:` field is the authority.\n // Passing -p with a different value creates a second Docker project\n // on container restart, splitting services onto separate networks.\n await execAsync(`docker compose -f \"${composePath}\" up -d --build`, { cwd: dirname(composePath), timeout: 300000 });\n result.steps.push(`Started containers from ${basename(composePath)}`);\n } catch (error) {\n result.errors.push(`Failed to start containers: ${error}`);\n }\n break;\n }\n }\n }\n\n if (startDocker) {\n progress('Starting Docker containers', 'Containers running', 'complete');\n }\n\n // Pre-trust workspace directory in Claude Code so agents don't get the trust prompt\n try {\n preTrustDirectory(workspacePath);\n result.steps.push('Pre-trusted workspace in Claude Code');\n } catch {\n // Non-fatal — agent can still work, user will just see trust prompt\n }\n\n result.success = result.errors.length === 0;\n return result;\n}\n\n/**\n * Pre-register a directory as trusted in Claude Code's ~/.claude.json.\n * This prevents the \"Quick safety check: Is this a project you created or one you trust?\" prompt\n * when agents are spawned in dynamically-created workspace directories.\n */\nexport function preTrustDirectory(dirPath: string): void {\n const claudeJsonPath = join(homedir(), '.claude.json');\n if (!existsSync(claudeJsonPath)) return;\n\n const data = JSON.parse(readFileSync(claudeJsonPath, 'utf8'));\n if (!data.projects) data.projects = {};\n\n // Only add if not already present\n if (data.projects[dirPath]) {\n if (!data.projects[dirPath].hasTrustDialogAccepted) {\n data.projects[dirPath].hasTrustDialogAccepted = true;\n writeFileSync(claudeJsonPath, JSON.stringify(data, null, 2), 'utf8');\n }\n return;\n }\n\n data.projects[dirPath] = {\n allowedTools: [],\n mcpContextUris: [],\n mcpServers: {},\n enabledMcpjsonServers: [],\n disabledMcpjsonServers: [],\n hasTrustDialogAccepted: true,\n projectOnboardingSeenCount: 0,\n hasClaudeMdExternalIncludesApproved: false,\n hasClaudeMdExternalIncludesWarningShown: false,\n };\n\n writeFileSync(claudeJsonPath, JSON.stringify(data, null, 2), 'utf8');\n}\n\nexport interface WorkspaceRemoveOptions {\n projectConfig: ProjectConfig;\n featureName: string;\n dryRun?: boolean;\n}\n\nexport interface WorkspaceRemoveResult {\n success: boolean;\n errors: string[];\n steps: string[];\n}\n\n/**\n * Result of Docker container cleanup for a workspace.\n */\nexport interface DockerCleanupResult {\n /** Whether compose files were found (containers may or may not have been running) */\n containersFound: boolean;\n /** Human-readable log of cleanup steps taken */\n steps: string[];\n}\n\n/**\n * Stop Docker containers and clean up Docker-created files for a workspace.\n *\n * Extracted as a standalone function so it can be used by:\n * - removeWorkspace() during normal workspace removal\n * - deep-wipe endpoint for complete issue cleanup\n * - workspace-migrate for pre-migration cleanup\n *\n * Failures are logged but never thrown — callers should not fail if Docker is unavailable.\n */\nexport async function stopWorkspaceDocker(\n workspacePath: string,\n projectName: string,\n featureName: string,\n): Promise<DockerCleanupResult> {\n const result: DockerCleanupResult = {\n containersFound: false,\n steps: [],\n };\n\n // Find all compose files in devcontainer directory (some projects use multiple)\n const devcontainerDir = join(workspacePath, '.devcontainer');\n const composeFiles: string[] = [];\n\n if (existsSync(devcontainerDir)) {\n const possibleFiles = [\n 'docker-compose.devcontainer.yml',\n 'docker-compose.yml',\n 'compose.yml',\n 'compose.infra.yml',\n 'compose.override.yml',\n ];\n for (const file of possibleFiles) {\n const fullPath = join(devcontainerDir, file);\n if (existsSync(fullPath)) {\n composeFiles.push(fullPath);\n }\n }\n }\n\n // Fallback: check for compose file in workspace root\n if (composeFiles.length === 0) {\n const rootCompose = join(workspacePath, 'docker-compose.yml');\n if (existsSync(rootCompose)) {\n composeFiles.push(rootCompose);\n }\n }\n\n if (composeFiles.length > 0) {\n result.containersFound = true;\n try {\n const fileFlags = composeFiles.map(f => `-f \"${f}\"`).join(' ');\n const cwd = existsSync(devcontainerDir) ? devcontainerDir : workspacePath;\n\n // Derive compose project name from the dev script (same logic as dashboard)\n // or fall back to \"{projectName}-feature-{featureName}\" convention.\n let composeProjectName = `${projectName}-feature-${featureName}`;\n const devScriptPaths = [\n join(workspacePath, '.devcontainer', 'dev'),\n join(workspacePath, 'dev'),\n ];\n for (const devPath of devScriptPaths) {\n try {\n if (existsSync(devPath)) {\n const content = readFileSync(devPath, 'utf-8');\n const match = content.match(/COMPOSE_PROJECT_NAME=\"([^$\"]*)\\$\\{FEATURE_FOLDER\\}\"/);\n if (match) {\n composeProjectName = `${match[1]}feature-${featureName}`;\n break;\n }\n const literalMatch = content.match(/COMPOSE_PROJECT_NAME=\"([^\"]+)\"/);\n if (literalMatch) {\n composeProjectName = literalMatch[1];\n break;\n }\n }\n } catch {\n // Fall through to default\n }\n }\n\n await execAsync(`docker compose ${fileFlags} -p \"${composeProjectName}\" down -v --remove-orphans`, {\n cwd,\n timeout: 60000,\n });\n result.steps.push(`Stopped Docker containers (${composeFiles.length} compose files)`);\n } catch (error: any) {\n // Log but don't fail — containers might not be running\n result.steps.push(`Docker cleanup attempted (${error.message?.split('\\n')[0] || 'containers may not be running'})`);\n }\n }\n\n // Clean up Docker-created files (root-owned in containers)\n try {\n await execAsync(\n `docker run --rm -v \"${workspacePath}:/workspace\" alpine sh -c \"find /workspace -user root -delete 2>&1 | tail -100 || true\"`,\n { timeout: 30000, maxBuffer: 10 * 1024 * 1024 }\n );\n result.steps.push('Cleaned up Docker-created files');\n } catch {\n // Alpine container might not be available\n }\n\n return result;\n}\n\n/**\n * Remove a workspace\n */\nexport async function removeWorkspace(options: WorkspaceRemoveOptions): Promise<WorkspaceRemoveResult> {\n const { projectConfig, featureName, dryRun } = options;\n const result: WorkspaceRemoveResult = {\n success: true,\n errors: [],\n steps: [],\n };\n\n const workspaceConfig = projectConfig.workspace || getDefaultWorkspaceConfig();\n const workspacesDir = join(projectConfig.path, workspaceConfig.workspaces_dir || 'workspaces');\n const featureFolder = `feature-${featureName}`;\n const workspacePath = join(workspacesDir, featureFolder);\n\n if (!existsSync(workspacePath)) {\n result.success = false;\n result.errors.push(`Workspace not found at ${workspacePath}`);\n return result;\n }\n\n if (dryRun) {\n result.steps.push('[DRY RUN] Would remove workspace at: ' + workspacePath);\n return result;\n }\n\n // Stop TLDR daemon for workspace (if it exists)\n const venvPath = join(workspacePath, '.venv');\n if (existsSync(venvPath)) {\n try {\n const { getTldrDaemonService } = await import('./tldr-daemon.js');\n const tldrService = getTldrDaemonService(workspacePath, venvPath);\n await tldrService.stop();\n result.steps.push('Stopped TLDR daemon');\n } catch (error: any) {\n // Non-fatal - daemon may not be running\n console.warn(`⚠ Failed to stop TLDR daemon: ${error?.message}`);\n }\n }\n\n // Stop Docker containers and clean up Docker-created files\n const dockerResult = await stopWorkspaceDocker(workspacePath, projectConfig.name || 'workspace', featureName);\n result.steps.push(...dockerResult.steps);\n\n // Remove worktrees\n if (workspaceConfig.type === 'polyrepo' && workspaceConfig.repos) {\n for (const repo of workspaceConfig.repos) {\n const repoPath = join(projectConfig.path, repo.path);\n const targetPath = join(workspacePath, repo.name);\n const branchPrefix = repo.branch_prefix || 'feature/';\n const branchName = `${branchPrefix}${featureName}`;\n\n const worktreeResult = await removeWorktree(repoPath, targetPath, branchName);\n if (worktreeResult.success) {\n result.steps.push(`Removed worktree for ${repo.name}`);\n } else {\n result.errors.push(worktreeResult.message);\n }\n }\n } else {\n // Monorepo: remove single worktree\n const branchName = `feature/${featureName}`;\n const worktreeResult = await removeWorktree(projectConfig.path, workspacePath, branchName);\n if (worktreeResult.success) {\n result.steps.push('Removed worktree');\n } else {\n result.errors.push(worktreeResult.message);\n }\n }\n\n // Remove DNS entries\n if (workspaceConfig.dns) {\n const placeholders = createPlaceholders(projectConfig, featureName, workspacePath);\n\n const dnsMethod = workspaceConfig.dns.sync_method || 'wsl2hosts';\n for (const entryPattern of workspaceConfig.dns.entries) {\n const hostname = replacePlaceholders(entryPattern, placeholders);\n if (removeDnsEntry(dnsMethod, hostname)) {\n result.steps.push(`Removed DNS entry: ${hostname}`);\n }\n }\n }\n\n // Remove Cloudflare tunnel entries\n if (workspaceConfig.tunnel) {\n const placeholders = createPlaceholders(projectConfig, featureName, workspacePath);\n const tunnelResult = await removeTunnelIngress(workspaceConfig.tunnel, placeholders);\n result.steps.push(...tunnelResult.steps);\n }\n\n // Remove Hume EVI config\n if (workspaceConfig.hume) {\n const placeholders = createPlaceholders(projectConfig, featureName, workspacePath);\n const humeResult = await deleteHumeConfig(workspaceConfig.hume, placeholders);\n result.steps.push(...humeResult.steps);\n }\n\n // Release ports\n if (workspaceConfig.ports) {\n for (const [portName] of Object.entries(workspaceConfig.ports)) {\n const portFile = join(projectConfig.path, `.${portName}-ports`);\n if (releasePort(portFile, featureFolder)) {\n result.steps.push(`Released ${portName} port`);\n }\n }\n }\n\n // Remove workspace directory\n try {\n await execAsync(`rm -rf \"${workspacePath}\"`, { maxBuffer: 10 * 1024 * 1024 });\n result.steps.push('Removed workspace directory');\n } catch (error) {\n result.errors.push(`Failed to remove workspace directory: ${error}`);\n }\n\n result.success = result.errors.length === 0;\n return result;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAgCA,SAAS,SAAS,WAAmB,WAA6B;CAChE,MAAM,SAAmB,EAAE;AAC3B,KAAI,CAAC,WAAW,UAAU,CAAE,QAAO;CAEnC,SAAS,KAAK,KAAmB;EAC/B,MAAM,UAAU,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AACzD,OAAK,MAAM,SAAS,SAAS;GAC3B,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;AACtC,OAAI,MAAM,aAAa,CACrB,MAAK,SAAS;YACL,MAAM,QAAQ,EAAE;IACzB,MAAM,MAAM,SAAS,WAAW,SAAS;IACzC,MAAM,aAAa,KAAK,WAAW,IAAI;AACvC,cAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,iBAAa,UAAU,WAAW;AAClC,WAAO,KAAK,IAAI;;;;AAKtB,MAAK,UAAU;AACf,QAAO;;;;;;;;;;;;AAaT,SAAgB,yBAAyB,eAAoC;CAC3E,MAAM,YAAY,KAAK,eAAe,UAAU;CAChD,MAAM,eAAe,KAAK,WAAW,4BAA4B;CACjE,MAAM,WAAW,aAAa,aAAa;CAE3C,MAAM,SAAsB;EAC1B,OAAO,EAAE;EACT,SAAS,EAAE;EACX,SAAS,EAAE;EACX,WAAW,EAAE;EACd;AAGD,WAAU,KAAK,WAAW,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC;AACzD,WAAU,KAAK,WAAW,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC;CAGzD,MAAM,UAAgF;EACpF;GAAE,UAAU;GAAU,WAAW;GAAY,cAAc;GAAU;EACrE;GAAE,UAAU;GAAU,WAAW;GAAkB,cAAc;GAAU;EAC3E;GAAE,UAAU;GAAS,WAAW;GAAiB,cAAc;GAAS;EACzE;AAED,MAAK,MAAM,EAAE,UAAU,WAAW,kBAAkB,SAAS;AAC3D,MAAI,CAAC,WAAW,UAAU,CAAE;EAE5B,MAAM,SAAS,eAAe,GAAG,aAAa,KAAK;EACnD,MAAM,QAAQ,mBAAmB,WAAW,GAAG;AAE/C,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,eAAe,GAAG,SAAS,KAAK;GACtC,MAAM,aAAa,KAAK,WAAW,aAAa;GAChD,MAAM,aAAa,SAAS,KAAK,aAAa;AAK9C,WAFe,sBAAsB,YAAY,cAAc,SAAS,CAEzD,QAAf;IACE,KAAK;AAEH,eAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,kBAAa,KAAK,cAAc,WAAW;AAC3C,sBAAiB,UAAU,cAAc,YAAY,aAAa;AAClE,YAAO,MAAM,KAAK,aAAa;AAC/B;IAEF,KAAK;AAEH,kBAAa,KAAK,cAAc,WAAW;AAC3C,sBAAiB,UAAU,cAAc,YAAY,aAAa;AAClE,YAAO,QAAQ,KAAK,aAAa;AACjC;IAEF,KAAK;AAEH,YAAO,QAAQ,KAAK,GAAG,aAAa,qBAAqB;AACzD;IAEF,KAAK;AAEH,YAAO,QAAQ,KAAK,GAAG,aAAa,eAAe;AACnD;;;;AAMR,eAAc,cAAc,SAAS;AAErC,QAAO;;;;;;;;;;;;;AAcT,SAAgB,4BACd,eACA,aACA,WACU;CACV,MAAM,YAAY,KAAK,eAAe,UAAU;CAChD,MAAM,eAAe,KAAK,WAAW,4BAA4B;CACjE,MAAM,WAAW,aAAa,aAAa;CAC3C,MAAM,YAAsB,EAAE;AAE9B,KAAI,CAAC,WAAW,YAAY,CAAE,QAAO;AAErC,KAAI,aAAa,UAAU,SAAS,EAElC,MAAK,MAAM,EAAE,QAAQ,YAAY,WAAW;EAC1C,MAAM,aAAa,KAAK,aAAa,OAAO;AAC5C,MAAI,CAAC,WAAW,WAAW,CAAE;EAE7B,MAAM,aAAa,KAAK,eAAe,OAAO;AAC9C,YAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AAGnD,MAAI,OAAO,SAAS,YAAY,CAG9B;AAGF,eAAa,YAAY,WAAW;AAGpC,MAAI,OAAO,WAAW,WAAW,EAAE;GACjC,MAAM,eAAe,OAAO,MAAM,EAAkB;AAEpD,oBAAiB,UAAU,cADd,SAAS,WAAW,EACc,mBAAmB;AAClE,aAAU,KAAK,aAAa;;;MAG3B;EAEL,MAAM,mBAAmB,KAAK,aAAa,UAAU;AACrD,MAAI,WAAW,iBAAiB,EAAE;GAChC,MAAM,SAAS,SAAS,kBAAkB,UAAU;AACpD,QAAK,MAAM,OAAO,QAAQ;AAGxB,qBAAiB,UAAU,KADd,SADM,KAAK,WAAW,IAAI,CACN,EACK,mBAAmB;AACzD,cAAU,KAAK,IAAI;;;;AAMzB,eAAc,cAAc,SAAS;AAErC,QAAO;;;;;;;;;;;;;AAuFT,SAAgB,4BAA4B,aAAqB,eAAoC;CACnG,MAAM,SAAsB;EAAE,OAAO,EAAE;EAAE,SAAS,EAAE;EAAE,SAAS,EAAE;EAAE,WAAW,EAAE;EAAE;CAClF,MAAM,eAAe,KAAK,aAAa,QAAQ,SAAS;AACxD,KAAI,CAAC,WAAW,aAAa,CAAE,QAAO;CAEtC,MAAM,kBAAkB,KAAK,eAAe,WAAW,SAAS;CAChE,MAAM,eAAe,KAAK,eAAe,WAAW,4BAA4B;CAChF,MAAM,WAAW,aAAa,aAAa;CAE3C,MAAM,YAAY,YAAY,cAAc,EAAE,eAAe,MAAM,CAAC,CACjE,QAAO,MAAK,EAAE,aAAa,CAAC,CAC5B,KAAI,MAAK,EAAE,KAAK;AAEnB,MAAK,MAAM,aAAa,WAAW;EACjC,MAAM,iBAAiB,KAAK,cAAc,UAAU;EACpD,MAAM,iBAAiB,KAAK,iBAAiB,UAAU;AAGvD,MAAI,WAAW,eAAe,EAAE;AAC9B,UAAO,QAAQ,KAAK,UAAU,UAAU,sCAAsC;AAC9E;;EAIF,MAAM,QAAQ,mBAAmB,gBAAgB,GAAG;AACpD,YAAU,gBAAgB,EAAE,WAAW,MAAM,CAAC;EAC9C,IAAI,WAAW;AACf,OAAK,MAAM,QAAQ,OAAO;GACxB,MAAM,aAAa,KAAK,gBAAgB,KAAK,aAAa;AAC1D,aAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,gBAAa,KAAK,cAAc,WAAW;GAC3C,MAAM,OAAO,SAAS,WAAW;AACjC,oBAAiB,UAAU,UAAU,UAAU,GAAG,KAAK,gBAAgB,MAAM,aAAa;AAC1F,UAAO,MAAM,KAAK,UAAU,UAAU,GAAG,KAAK,eAAe;AAC7D,cAAW;;AAEb,MAAI,SACF,QAAO,UAAU,KAAK,UAAU;;AAIpC,eAAc,cAAc,SAAS;AACrC,QAAO;;;aAnUkE;gBASpD;;;;;;;;;;;;;;;;;;;ACyBvB,SAAgB,uBAAuB,aAAyC;CAC9E,MAAM,SAA6B;EAAE,UAAU,EAAE;EAAE,SAAS,EAAE;EAAE,QAAQ,EAAE;EAAE;AAY5E,MAAK,MAAM,EAAE,KAAK,YAAY,KAAK,gBARyB;EAC1D;GAAE,KAAK;GAAsB,KAAK;GAAe;EACjD;GAAE,KAAK;GAAsB,KAAK;GAAe;EACjD;GAAE,KAAK;GAAsB,KAAK;GAAe;EACjD;GAAE,KAAK;GAA6B,KAAK;GAAe;EACxD;GAAE,KAAK;GAAuB,KAAK;GAAgB;EACpD,EAEkE;EACjE,MAAM,UAAU,KAAK,aAAa,WAAW;EAC7C,MAAM,UAAU,KAAK,aAAa,WAAW;AAE7C,MAAI,CAAC,WAAW,QAAQ,CAAE;AAE1B,MAAI,WAAW,QAAQ,EAAE;GACvB,MAAM,MAAM,2BAA2B,WAAW,OAAO,WAAW,YAAY,YAAY;AAC5F,WAAQ,KAAK,gBAAgB,MAAM;AACnC,UAAO,QAAQ,KAAK,WAAW;AAC/B;;AAGF,MAAI;GAEF,MAAM,YAAY,QAAQ,QAAQ;AAClC,OAAI,CAAC,WAAW,UAAU,CACxB,WAAU,WAAW,EAAE,WAAW,MAAM,CAAC;AAE3C,cAAW,SAAS,QAAQ;AAC5B,UAAO,SAAS,KAAK,GAAG,WAAW,KAAK,aAAa;WAC9C,KAAU;AACjB,UAAO,OAAO,KAAK,GAAG,WAAW,IAAI,IAAI,UAAU;;;CAKvD,MAAM,gBAAgB,KAAK,aAAa,cAAc;AACtD,KAAI,WAAW,cAAc,CAC3B,KAAI;AAEF,MADkB,YAAY,cAAc,CAC9B,WAAW,GAAG;AAC1B,aAAU,cAAc;AACxB,UAAO,SAAS,KAAK,mCAAmC;;SAEpD;AAKV,QAAO;;;;;;;AAQT,SAAgB,mBAAmB,aAA2B;CAC5D,MAAM,gBAAgB,KAAK,aAAa,aAAa;CACrD,MAAM,kBAAkB;EAAC;EAAgB;EAAgB;EAAgB;CAEzE,IAAI,UAAU,WAAW,cAAc,GAAG,aAAa,eAAe,QAAQ,GAAG;CACjF,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAEjC,MAAM,UAAU,gBAAgB,QAAO,UAAS,CAAC,MAAM,MAAK,MAAK,EAAE,MAAM,KAAK,MAAM,CAAC;AACrF,KAAI,QAAQ,WAAW,EAAG;AAG1B,KAAI,CAAC,QAAQ,SAAS,KAAK,IAAI,QAAQ,SAAS,EAC9C,YAAW;AAEb,KAAI,CAAC,MAAM,MAAK,MAAK,EAAE,SAAS,QAAQ,CAAC,CACvC,YAAW;AAEb,YAAW,QAAQ,KAAK,KAAK,GAAG;AAEhC,eAAc,eAAe,SAAS,QAAQ;;;;;AA6BhD,SAAS,mBACP,eACA,aACA,eACsB;CACtB,MAAM,gBAAgB,WAAW;CACjC,MAAM,SAAS,cAAc,WAAW,KAAK,UAAU;AAEvD,QAAO;EACL,cAAc;EACd,gBAAgB;EAChB,aAAa,WAAW;EACxB,iBAAiB,GAAG,SAAS,cAAc,KAAK,CAAC,GAAG;EACpD,QAAQ;EACR,cAAc,SAAS,cAAc,KAAK;EAC1C,cAAc,cAAc;EAC5B,cAAc,QAAQ,cAAc,KAAK;EACzC,gBAAgB;EAChB,MAAM,SAAS;EAChB;;;;;;AAOH,SAAS,oBAAoB,UAAwB;AACnD,KAAI,CAAC,WAAW,SAAS,CAAE;CAE3B,IAAI,UAAU,aAAa,UAAU,QAAQ;CAC7C,MAAM,kBAAkB;AASxB,MAAK,MAAM,WALU,CACnB,6BACA,6BACD,CAGC,WAAU,QAAQ,QAAQ,SAAS,WAAW;AAGhD,KAAI,YAAY,gBACd,eAAc,UAAU,SAAS,QAAQ;;;;;AAO7C,SAAS,oBAAoB,MAAuB;AAClD,QAAO,kBAAkB,KAAK,KAAK;;;;;;;;;AAUrC,eAAe,eACb,UACA,YACA,YACA,gBAAwB,QACwB;AAChD,KAAI;AAEF,QAAM,UAAU,oBAAoB,EAAE,KAAK,UAAU,CAAC;AAGtD,QAAM,UAAU,sBAAsB,EAAE,KAAK,UAAU,CAAC;EAGxD,MAAM,EAAE,QAAQ,kBAAkB,MAAM,UAAU,qBAAqB,EAAE,KAAK,UAAU,CAAC;EACzF,MAAM,EAAE,QAAQ,mBAAmB,MAAM,UAAU,wBAAwB,EAAE,KAAK,UAAU,CAAC;EAE7F,MAAM,YAAY,cAAc,MAAM,KAAK,CAAC,KAAI,MAAK,EAAE,QAAQ,YAAY,GAAG,CAAC,MAAM,CAAC,CAAC,OAAO,QAAQ;EACtG,MAAM,aAAa,eAAe,MAAM,KAAK,CAAC,KAAI,MAAK,EAAE,MAAM,CAAC,CAAC,OAAO,QAAQ;AAKhF,MAHE,UAAU,SAAS,WAAW,IAC9B,WAAW,SAAS,UAAU,aAAa,CAG3C,OAAM,UAAU,qBAAqB,WAAW,KAAK,WAAW,IAAI,EAAE,KAAK,UAAU,CAAC;MAGtF,OAAM,UAAU,qBAAqB,WAAW,QAAQ,WAAW,KAAK,cAAc,IAAI,EAAE,KAAK,UAAU,CAAC;AAM9G,QAAM,UAAU,iBAAiB,EAAE,KAAK,YAAY,CAAC,CAAC,YAAY,GAAG;AAGrE,QAAM,UAAU,qCAAqC,EAAE,KAAK,YAAY,CAAC,CAAC,YAAY,GAAG;AAEzF,SAAO;GAAE,SAAS;GAAM,SAAS,uBAAuB;GAAc;UAC/D,OAAO;AACd,SAAO;GAAE,SAAS;GAAO,SAAS,8BAA8B;GAAS;;;;;;AAO7E,eAAe,eACb,UACA,YACA,YACgD;AAChD,KAAI;AAEF,QAAM,UAAU,wBAAwB,WAAW,YAAY,EAAE,KAAK,UAAU,CAAC,CAAC,YAAY,GAAG;AAGjG,QAAM,UAAU,kBAAkB,WAAW,IAAI,EAAE,KAAK,UAAU,CAAC,CAAC,YAAY,GAAG;AAEnF,SAAO;GAAE,SAAS;GAAM,SAAS,uBAAuB;GAAc;UAC/D,OAAO;AACd,SAAO;GAAE,SAAS;GAAO,SAAS,8BAA8B;GAAS;;;;;;AAU7E,SAAS,WACP,UACA,eACA,OACQ;AAER,KAAI,CAAC,WAAW,SAAS,EAAE;AACzB,YAAU,QAAQ,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC;AACjD,gBAAc,UAAU,GAAG;;CAG7B,MAAM,UAAU,aAAa,UAAU,QAAQ;CAC/C,MAAM,QAAQ,QAAQ,MAAM,KAAK,CAAC,OAAO,QAAQ;AAGjD,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,CAAC,QAAQ,QAAQ,KAAK,MAAM,IAAI;AACtC,MAAI,WAAW,cACb,QAAO,SAAS,MAAM,GAAG;;CAK7B,MAAM,YAAY,IAAI,IAAI,MAAM,KAAI,MAAK,SAAS,EAAE,MAAM,IAAI,CAAC,IAAI,GAAG,CAAC,CAAC;AACxE,MAAK,IAAI,OAAO,MAAM,IAAI,QAAQ,MAAM,IAAI,OAC1C,KAAI,CAAC,UAAU,IAAI,KAAK,EAAE;AACxB,gBAAc,UAAU,WAAW,QAAQ,SAAS,KAAK,GAAG,KAAK,QAAQ,GAAG,cAAc,GAAG,KAAK,IAAI;AACtG,SAAO;;AAIX,OAAM,IAAI,MAAM,+BAA+B,MAAM,GAAG,GAAG,MAAM,KAAK;;;;;AAMxE,SAAS,YAAY,UAAkB,eAAgC;AACrE,KAAI;AACF,MAAI,CAAC,WAAW,SAAS,CAAE,QAAO;AAIlC,gBAAc,UAFA,aAAa,UAAU,QAAQ,CACvB,MAAM,KAAK,CAAC,QAAO,SAAQ,CAAC,KAAK,WAAW,GAAG,cAAc,GAAG,CAAC,CACzD,KAAK,KAAK,CAAC;AACzC,SAAO;SACD;AACN,SAAO;;;;;;AAOX,SAAS,iBACP,aACA,WACA,cACA,WACU;CACV,MAAM,QAAkB,EAAE;AAE1B,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO;AAIT,KAAI,aAAa,UAAU,SAAS,EAClC,MAAK,MAAM,EAAE,QAAQ,YAAY,WAAW;EAC1C,MAAM,aAAa,KAAK,aAAa,OAAO;EAC5C,MAAM,aAAa,KAAK,WAAW,OAAO;AAE1C,MAAI,WAAW,WAAW,EAAE;GAE1B,MAAM,YAAY,oBADF,aAAa,YAAY,QAAQ,EACF,aAAa;AAC5D,aAAU,QAAQ,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,iBAAc,YAAY,UAAU;AACpC,SAAM,KAAK,uBAAuB,OAAO,MAAM,SAAS;;;MAGvD;EAEL,MAAM,QAAQ,YAAY,YAAY;AACtC,OAAK,MAAM,QAAQ,MACjB,KAAI,KAAK,SAAS,YAAY,EAAE;GAC9B,MAAM,aAAa,KAAK,aAAa,KAAK;GAC1C,MAAM,aAAa,KAAK,WAAW,KAAK,QAAQ,aAAa,GAAG,CAAC;AAIjE,iBAAc,YADI,oBADF,aAAa,YAAY,QAAQ,EACF,aAAa,CACxB;GAEpC,MAAM,aAAa,KAAK,QAAQ,aAAa,GAAG;AAChD,OAAI,eAAe,SAAS,WAAW,SAAS,MAAM,CACpD,WAAU,YAAY,IAAM;AAE9B,SAAM,KAAK,uBAAuB,OAAO;;;AAK/C,QAAO;;AAuCT,SAAS,wBACP,WACA,WACA,MACA,cACU;CACV,MAAM,QAAkB,EAAE;AAE1B,MAAK,MAAM,OAAO,MAAM;EACtB,MAAM,aAAa,KAAK,WAAW,IAAI;EACvC,MAAM,aAAa,KAAK,WAAW,IAAI;AAEvC,MAAI,CAAC,WAAW,WAAW,CAAE;EAG7B,SAAS,QAAQ,KAAa,MAAsB;GAClD,IAAI,QAAQ;AACZ,aAAU,MAAM,EAAE,WAAW,MAAM,CAAC;GACpC,MAAM,UAAU,YAAY,KAAK,EAAE,eAAe,MAAM,CAAC;AACzD,QAAK,MAAM,SAAS,SAAS;IAC3B,MAAM,WAAW,KAAK,KAAK,MAAM,KAAK;IACtC,MAAM,YAAY,KAAK,MAAM,MAAM,KAAK;AACxC,QAAI,MAAM,aAAa,CACrB,UAAS,QAAQ,UAAU,UAAU;aAC5B,MAAM,QAAQ,EAAE;KACzB,MAAM,MAAM,QAAQ,MAAM,KAAK,CAAC,aAAa;AAC7C,SAAI,gBAAgB,gBAAgB,IAAI,IAAI,CAE1C,eAAc,WAAW,oBADT,aAAa,UAAU,QAAQ,EACO,aAAa,CAAC;SAEpE,cAAa,UAAU,UAAU;AAEnC;;;AAGJ,UAAO;;EAGT,MAAM,QAAQ,QAAQ,YAAY,WAAW;AAC7C,QAAM,KAAK,UAAU,MAAM,gCAAgC,MAAM;;AAGnE,QAAO;;;;;AAMT,eAAsB,gBAAgB,SAAiE;CACrG,MAAM,EAAE,eAAe,aAAa,aAAa,QAAQ,eAAe;CACxE,MAAM,YAAY,OAAe,QAAgB,SAA0C,aAAa;AACtG,eAAa;GAAE;GAAO;GAAQ;GAAQ,CAAC;;CAEzC,MAAM,SAAgC;EACpC,SAAS;EACT,eAAe;EACf,QAAQ,EAAE;EACV,OAAO,EAAE;EACV;AAGD,KAAI,CAAC,oBAAoB,YAAY,EAAE;AACrC,SAAO,UAAU;AACjB,SAAO,OAAO,KAAK,2DAA2D;AAC9E,SAAO;;AAIT,KAAI,gBAAgB,QAAQ;AAC1B,SAAO,UAAU;AACjB,SAAO,OAAO,KAAK,iEAA+D;AAClF,SAAO;;CAGT,MAAM,kBAAkB,cAAc,aAAa,2BAA2B;CAC9E,MAAM,gBAAgB,KAAK,cAAc,MAAM,gBAAgB,kBAAkB,aAAa;CAC9F,MAAM,gBAAgB,WAAW;CACjC,MAAM,gBAAgB,KAAK,eAAe,cAAc;AACxD,QAAO,gBAAgB;AAGvB,KAAI,WAAW,cAAc,EAAE;AAC7B,SAAO,UAAU;AACjB,SAAO,OAAO,KAAK,+BAA+B,gBAAgB;AAClE,SAAO;;AAGT,KAAI,QAAQ;AACV,SAAO,MAAM,KAAK,0CAA0C,cAAc;AAC1E,SAAO;;CAIT,MAAM,eAAe,mBAAmB,eAAe,aAAa,cAAc;AAGlF,UAAS,yBAAyB,WAAW,cAAc;AAC3D,WAAU,eAAe,EAAE,WAAW,MAAM,CAAC;AAC7C,QAAO,MAAM,KAAK,8BAA8B;AAGhD,KAAI,gBAAgB,SAAS,cAAc,gBAAgB,MAEzD,MAAK,MAAM,QAAQ,gBAAgB,OAAO;EAGxC,MAAM,cAAc,KAAK,cAAc,MAAM,KAAK,KAAK;EACvD,MAAM,WAAW,WAAW,YAAY,GAAG,aAAa,YAAY,GAAG;EACvE,MAAM,aAAa,KAAK,eAAe,KAAK,KAAK;EAEjD,MAAM,aAAa,GADE,KAAK,iBAAiB,aACN;EAErC,MAAM,gBAAgB,KAAK,kBAAkB,gBAAgB,kBAAkB;EAE/E,MAAM,iBAAiB,MAAM,eAAe,UAAU,YAAY,YAAY,cAAc;AAC5F,MAAI,eAAe,QACjB,QAAO,MAAM,KAAK,wBAAwB,KAAK,KAAK,IAAI,WAAW,SAAS,cAAc,GAAG;OACxF;AACL,UAAO,OAAO,KAAK,GAAG,KAAK,KAAK,IAAI,eAAe,UAAU;AAC7D,UAAO,UAAU;;;MAGhB;EAEL,MAAM,aAAa,WAAW;EAC9B,MAAM,gBAAgB,gBAAgB,kBAAkB;EACxD,MAAM,iBAAiB,MAAM,eAAe,cAAc,MAAM,eAAe,YAAY,cAAc;AACzG,MAAI,eAAe,QACjB,QAAO,MAAM,KAAK,qBAAqB,WAAW,SAAS,cAAc,GAAG;OACvE;AACL,UAAO,OAAO,KAAK,eAAe,QAAQ;AAC1C,UAAO,UAAU;;;AAIrB,UAAS,yBAAyB,kBAAkB,WAAW;CAO/D,MAAM,oBAAoB,QAAQ,cAAc;CAChD,MAAM,mBAAmB,QAAQ,mBAAmB,YAAY;AAEhE,KAD6B,kBAAkB,MAAM,oCAAoC,IAGvF,qBAAqB,KAAK,mBAAmB,YAAY,IACzD,WAAW,KAAK,mBAAmB,OAAO,CAAC,IAC3C,WAAW,iBAAiB,EAC5B;AACA,SAAO,kBAAkB;GAAE,WAAW;GAAM,OAAO;GAAM,CAAC;AAC1D,SAAO,MAAM,KAAK,yDAAyD;;AAI7E,KAAI;AACF,qBAAmB,cAAc,KAAK;AACtC,SAAO,MAAM,KAAK,iDAAiD;UAC5D,cAAmB;AAE1B,SAAO,MAAM,KAAK,yCAAyC,aAAa,UAAU;;CAKpF,MAAM,kBAAkB,KAAK,eAAe,gBAAgB;AAC5D,KAAI,WAAW,gBAAgB,EAAE;EAC/B,MAAM,eAAe,YAAY,gBAAgB,CAC9C,QAAO,MAAK,EAAE,SAAS,UAAU,KAAK,EAAE,SAAS,OAAO,IAAI,EAAE,SAAS,QAAQ,EAAE;AACpF,OAAK,MAAM,eAAe,aACxB,qBAAoB,KAAK,iBAAiB,YAAY,CAAC;AAEzD,MAAI,aAAa,SAAS,EACxB,QAAO,MAAM,KAAK,aAAa,aAAa,OAAO,6CAA6C;;AAKpG,UAAS,2BAA2B,cAAc,mBAAmB,eAAe;CACpF,MAAM,aAAa,cAAc,oBAAoB,WAAW,KAAK,eAAe,WAAW,CAAC,GAAG,QAAQ;CAC3G,MAAM,aAAa,eAAe,QAAQ,gBAAgB,GAAG,WAAW;AACxE,KAAI;AACF,QAAM,UAAU,YAAY;GAAE,KAAK;GAAe,UAAU;GAAS,SAAS;GAAO,CAAC;AACtF,SAAO,MAAM,KAAK,2BAA2B,WAAW,GAAG;AAC3D,WAAS,2BAA2B,GAAG,WAAW,UAAU,WAAW;UAChE,YAAiB;AACxB,SAAO,MAAM,KAAK,+BAA+B,WAAW,SAAS,MAAM,GAAG,IAAI,GAAG;AACrF,WAAS,2BAA2B,uBAAuB,WAAW;;CAIxE,MAAM,oBAAoB,cAAc;AACxC,KAAI,qBAAqB,kBAAkB,SAAS,GAAG;AACrD,WAAS,+BAA+B,kBAAkB,KAAI,MAAK,EAAE,KAAK,CAAC,KAAK,KAAK,CAAC;AACtF,OAAK,MAAM,OAAO,kBAChB,KAAI;AACF,SAAM,UAAU,IAAI,eAAe;IAAE,KAAK,KAAK,eAAe,IAAI,KAAK;IAAE,UAAU;IAAS,SAAS;IAAO,CAAC;AAC7G,UAAO,MAAM,KAAK,4BAA4B,IAAI,OAAO;WAClD,UAAe;AACtB,UAAO,MAAM,KAAK,kBAAkB,IAAI,KAAK,KAAK,SAAS,SAAS,MAAM,GAAG,IAAI,GAAG;;AAGxF,WAAS,+BAA+B,kBAAkB,WAAW;;AAIvE,KAAI;AAEF,QAAM,UAAU,oBAAoB;EACpC,MAAM,WAAW,KAAK,eAAe,QAAQ;EAC7C,MAAM,UAAU,KAAK,UAAU,OAAO,OAAO;AAM7C,MAFuB,WADF,KAAK,cAAc,MAAM,SAAS,OAAO,OAAO,CACtB,EAE3B;AAElB,SAAM,UAAU,UAAU,KAAK,cAAc,MAAM,QAAQ,CAAC,KAAK,SAAS,GAAG;AAC7E,UAAO,MAAM,KAAK,sCAAsC;SACnD;AAEL,SAAM,UAAU,oBAAoB,SAAS,IAAI,EAAE,KAAK,eAAe,CAAC;AAExE,SAAM,UAAU,IADA,KAAK,UAAU,OAAO,MAAM,CAChB,qBAAqB;IAAE,KAAK;IAAe,SAAS;IAAQ,CAAC;AACzF,UAAO,MAAM,KAAK,6CAA6C;GAG/D,MAAM,cAAc,KAAK,cAAc,MAAM,WAAW,WAAW,0BAA0B;AAC7F,OAAI,WAAW,YAAY,EAAE;AAC3B,UAAM,UAAU,YAAY,YAAY,KAAK,SAAS,GAAG;AACzD,WAAO,MAAM,KAAK,mCAAmC;;;AAKzD,MAAI,CAAC,WAAW,QAAQ,CACtB,QAAO,MAAM,KAAK,mEAAmE;OAChF;GAEL,MAAM,cAAc,KAAK,cAAc,MAAM,QAAQ;GACrD,MAAM,mBAAmB,KAAK,eAAe,QAAQ;AAErD,OAAI,WAAW,YAAY,EAAE;AAC3B,UAAM,UAAU,UAAU,YAAY,KAAK,iBAAiB,GAAG;AAC/D,WAAO,MAAM,KAAK,qCAAqC;;GAIzD,MAAM,EAAE,yBAAyB,MAAM,OAAO;GAC9C,MAAM,cAAc,qBAAqB,eAAe,SAAS;AACjE,SAAM,YAAY,MAAM,KAAK;AAC7B,UAAO,MAAM,KAAK,sBAAsB;AAIxC,OAAI;AACF,UAAM,YAAY,KAAK,KAAK;AAC5B,WAAO,MAAM,KAAK,yCAAyC;WACrD;;UAIH,OAAY;AAEnB,MAAI,MAAM,SAAS,SAAS,UAAU,CACpC,QAAO,MAAM,KAAK,6CAA6C;OAC1D;AACL,WAAQ,KAAK,wBAAwB,MAAM,UAAU;AACrD,UAAO,MAAM,KAAK,sBAAsB,MAAM,UAAU;;;AAK5D,KAAI,gBAAgB,KAAK;EACvB,MAAM,YAAY,gBAAgB,IAAI,eAAe;AACrD,OAAK,MAAM,gBAAgB,gBAAgB,IAAI,SAAS;GACtD,MAAM,WAAW,oBAAoB,cAAc,aAAa;AAEhE,OAAI,YAAY,WAAW,SAAS,CAClC,QAAO,MAAM,KAAK,oBAAoB,SAAS,IAAI,UAAU,GAAG;;AAKpE,MAAI,cAAc;OACD,MAAM,kBAAkB,CAErC,QAAO,MAAM,KAAK,mCAAmC;;;AAM3D,KAAI,gBAAgB,MAClB,MAAK,MAAM,CAAC,UAAU,eAAe,OAAO,QAAQ,gBAAgB,MAAM,EAAE;EAC1E,MAAM,WAAW,KAAK,cAAc,MAAM,IAAI,SAAS,QAAQ;AAC/D,MAAI;GACF,MAAM,OAAO,WAAW,UAAU,eAAe,WAAW,MAAM;AAClE,UAAO,MAAM,KAAK,YAAY,SAAS,SAAS,OAAO;AAEtD,gBAAqB,GAAG,SAAS,aAAa,CAAC,UAAU,OAAO,KAAK;WAC/D,OAAO;AACd,UAAO,OAAO,KAAK,oBAAoB,SAAS,SAAS,QAAQ;;;AAMvE,UAAS,iCAAiC,mCAAmC;CAC7E,MAAM,cAAc,yBAAyB,cAAc;CAC3D,MAAM,aAAa,YAAY,MAAM,SAAS,YAAY,QAAQ;AAClE,KAAI,aAAa,EACf,QAAO,MAAM,KAAK,aAAa,WAAW,qBAAqB,YAAY,MAAM,OAAO,QAAQ,YAAY,QAAQ,OAAO,WAAW;CAIxI,MAAM,iBAAiB,4BAA4B,cAAc,MAAM,cAAc;AACrF,KAAI,eAAe,MAAM,SAAS,EAChC,QAAO,MAAM,KAAK,aAAa,eAAe,MAAM,OAAO,kDAAkD,eAAe,UAAU,KAAK,KAAK,CAAC,GAAG;AAItJ,KAAI,gBAAgB,OAAO,cAAc;EACvC,MAAM,cAAc,KAAK,cAAc,MAAM,gBAAgB,MAAM,aAAa;EAGhF,MAAM,gBAAgB,iBACpB,aACA,eACA,cACA,gBAAgB,MAAM,UACvB;AACD,SAAO,MAAM,KAAK,GAAG,cAAc;EAGnC,MAAM,aAAa,gBAAgB,MAAM,aAAa,gBAAgB,MAAM;AAC5E,MAAI,YAAY;GACd,MAAM,YAAY,wBAAwB,aAAa,eAAe,YAAY,aAAa;AAC/F,UAAO,MAAM,KAAK,GAAG,UAAU;;;AAKnC,KAAI,gBAAgB,KAAK,UAAU;EACjC,MAAM,aAAa,oBAAoB,gBAAgB,IAAI,UAAU,aAAa;AAClF,gBAAc,KAAK,eAAe,OAAO,EAAE,WAAW;AACtD,SAAO,MAAM,KAAK,oBAAoB;;AAIxC,KAAI,gBAAgB,QAAQ,kBAAkB;EAC5C,MAAM,cAAc,KAAK,cAAc,MAAM,gBAAgB,OAAO,iBAAiB;EACrF,MAAM,kBAAkB,KAAK,eAAe,gBAAgB;AAC5D,YAAU,iBAAiB,EAAE,WAAW,MAAM,CAAC;EAE/C,MAAM,gBAAgB,iBAAiB,aAAa,iBAAiB,aAAa;AAClF,SAAO,MAAM,KAAK,GAAG,cAAc;AAGnC,MAAI,WAAW,YAAY,EAAE;GAC3B,MAAM,QAAQ,YAAY,YAAY;AACtC,QAAK,MAAM,QAAQ,MACjB,KAAI,CAAC,KAAK,SAAS,YAAY,CAG7B,cAFmB,KAAK,aAAa,KAAK,EACvB,KAAK,iBAAiB,KAAK,CACV;;EAO1C,MAAM,eAAe,YAAY,gBAAgB,CAC9C,QAAO,MAAK,EAAE,SAAS,UAAU,KAAK,EAAE,SAAS,OAAO,IAAI,EAAE,SAAS,QAAQ,EAAE;AACpF,OAAK,MAAM,eAAe,aACxB,qBAAoB,KAAK,iBAAiB,YAAY,CAAC;AAEzD,MAAI,aAAa,SAAS,EACxB,QAAO,MAAM,KAAK,aAAa,aAAa,OAAO,6CAA6C;EAKlG,MAAM,uBAAuB,KAAK,iBAAiB,MAAM;EACzD,MAAM,kBAAkB,KAAK,eAAe,MAAM;AAClD,MAAI,WAAW,qBAAqB,IAAI,CAAC,WAAW,gBAAgB,CAClE,KAAI;AACF,eAAY,qBAAqB,gBAAgB;AACjD,aAAU,sBAAsB,IAAM;AACtC,UAAO,MAAM,KAAK,wBAAwB;WACnC,OAAO;AACd,UAAO,OAAO,KAAK,mCAAmC,QAAQ;;;AAUpE,KAAI,gBAAgB,QAAQ;EAC1B,MAAM,eAAe,MAAM,iBAAiB,gBAAgB,QAAQ,aAAa;AACjF,SAAO,MAAM,KAAK,GAAG,aAAa,MAAM;AACxC,MAAI,CAAC,aAAa,QAChB,QAAO,OAAO,KAAK,oDAAoD;;AAK3E,KAAI,gBAAgB,MAAM;EACxB,MAAM,aAAa,MAAM,iBAAiB,gBAAgB,MAAM,aAAa;AAC7E,SAAO,MAAM,KAAK,GAAG,WAAW,MAAM;AACtC,MAAI,WAAW,UAAU;AACvB,iBACE,KAAK,eAAe,eAAe,EACnC,kBAAkB,WAAW,SAAS,wBAAwB,WAAW,SAAS,IACnF;AACD,UAAO,MAAM,KAAK,6CAA6C;;AAEjE,MAAI,CAAC,WAAW,QACd,QAAO,OAAO,KAAK,6DAA6D;;AAIpF,UAAS,iCAAiC,8BAA8B,WAAW;AAGnF,KAAI,aAAa;AACf,WAAS,8BAA8B,iCAAiC;AAExE,MAAI,gBAAgB,QAAQ,SAAS;GAKnC,MAAM,cAAc,KAAK,SAAS,EAAE,eAAe,WAAW,qBAAqB;AACnF,OAAI,WAAW,YAAY,CACzB,KAAI;AACF,UAAM,UAAU,sBAAsB,YAAY,UAAU,EAAE,KAAK,KAAK,SAAS,EAAE,eAAe,UAAU,EAAE,CAAC;AAC/G,WAAO,MAAM,KAAK,kBAAkB;YAC7B,OAAY;IACnB,MAAM,MAAM,OAAO,WAAW,OAAO,MAAM;AAC3C,QAAI,IAAI,SAAS,4BAA4B,IAAI,IAAI,SAAS,yBAAyB,CAErF,QAAO,MAAM,KAAK,wCAAwC;QAE1D,QAAO,OAAO,KAAK,4BAA4B,QAAQ;;;EAO/D,MAAM,mBAAmB;GACvB,KAAK,eAAe,qBAAqB;GACzC,KAAK,eAAe,sBAAsB;GAC1C,KAAK,eAAe,iBAAiB,qBAAqB;GAC1D,KAAK,eAAe,iBAAiB,kCAAkC;GACxE;AAED,OAAK,MAAM,eAAe,iBACxB,KAAI,WAAW,YAAY,EAAE;AAC3B,OAAI;AAIF,UAAM,UAAU,sBAAsB,YAAY,kBAAkB;KAAE,KAAK,QAAQ,YAAY;KAAE,SAAS;KAAQ,CAAC;AACnH,WAAO,MAAM,KAAK,2BAA2B,SAAS,YAAY,GAAG;YAC9D,OAAO;AACd,WAAO,OAAO,KAAK,+BAA+B,QAAQ;;AAE5D;;;AAKN,KAAI,YACF,UAAS,8BAA8B,sBAAsB,WAAW;AAI1E,KAAI;AACF,oBAAkB,cAAc;AAChC,SAAO,MAAM,KAAK,uCAAuC;SACnD;AAIR,QAAO,UAAU,OAAO,OAAO,WAAW;AAC1C,QAAO;;;;;;;AAQT,SAAgB,kBAAkB,SAAuB;CACvD,MAAM,iBAAiB,KAAK,SAAS,EAAE,eAAe;AACtD,KAAI,CAAC,WAAW,eAAe,CAAE;CAEjC,MAAM,OAAO,KAAK,MAAM,aAAa,gBAAgB,OAAO,CAAC;AAC7D,KAAI,CAAC,KAAK,SAAU,MAAK,WAAW,EAAE;AAGtC,KAAI,KAAK,SAAS,UAAU;AAC1B,MAAI,CAAC,KAAK,SAAS,SAAS,wBAAwB;AAClD,QAAK,SAAS,SAAS,yBAAyB;AAChD,iBAAc,gBAAgB,KAAK,UAAU,MAAM,MAAM,EAAE,EAAE,OAAO;;AAEtE;;AAGF,MAAK,SAAS,WAAW;EACvB,cAAc,EAAE;EAChB,gBAAgB,EAAE;EAClB,YAAY,EAAE;EACd,uBAAuB,EAAE;EACzB,wBAAwB,EAAE;EAC1B,wBAAwB;EACxB,4BAA4B;EAC5B,qCAAqC;EACrC,yCAAyC;EAC1C;AAED,eAAc,gBAAgB,KAAK,UAAU,MAAM,MAAM,EAAE,EAAE,OAAO;;;;;;;;;;;;AAmCtE,eAAsB,oBACpB,eACA,aACA,aAC8B;CAC9B,MAAM,SAA8B;EAClC,iBAAiB;EACjB,OAAO,EAAE;EACV;CAGD,MAAM,kBAAkB,KAAK,eAAe,gBAAgB;CAC5D,MAAM,eAAyB,EAAE;AAEjC,KAAI,WAAW,gBAAgB,CAQ7B,MAAK,MAAM,QAPW;EACpB;EACA;EACA;EACA;EACA;EACD,EACiC;EAChC,MAAM,WAAW,KAAK,iBAAiB,KAAK;AAC5C,MAAI,WAAW,SAAS,CACtB,cAAa,KAAK,SAAS;;AAMjC,KAAI,aAAa,WAAW,GAAG;EAC7B,MAAM,cAAc,KAAK,eAAe,qBAAqB;AAC7D,MAAI,WAAW,YAAY,CACzB,cAAa,KAAK,YAAY;;AAIlC,KAAI,aAAa,SAAS,GAAG;AAC3B,SAAO,kBAAkB;AACzB,MAAI;GACF,MAAM,YAAY,aAAa,KAAI,MAAK,OAAO,EAAE,GAAG,CAAC,KAAK,IAAI;GAC9D,MAAM,MAAM,WAAW,gBAAgB,GAAG,kBAAkB;GAI5D,IAAI,qBAAqB,GAAG,YAAY,WAAW;GACnD,MAAM,iBAAiB,CACrB,KAAK,eAAe,iBAAiB,MAAM,EAC3C,KAAK,eAAe,MAAM,CAC3B;AACD,QAAK,MAAM,WAAW,eACpB,KAAI;AACF,QAAI,WAAW,QAAQ,EAAE;KACvB,MAAM,UAAU,aAAa,SAAS,QAAQ;KAC9C,MAAM,QAAQ,QAAQ,MAAM,sDAAsD;AAClF,SAAI,OAAO;AACT,2BAAqB,GAAG,MAAM,GAAG,UAAU;AAC3C;;KAEF,MAAM,eAAe,QAAQ,MAAM,iCAAiC;AACpE,SAAI,cAAc;AAChB,2BAAqB,aAAa;AAClC;;;WAGE;AAKV,SAAM,UAAU,kBAAkB,UAAU,OAAO,mBAAmB,6BAA6B;IACjG;IACA,SAAS;IACV,CAAC;AACF,UAAO,MAAM,KAAK,8BAA8B,aAAa,OAAO,iBAAiB;WAC9E,OAAY;AAEnB,UAAO,MAAM,KAAK,6BAA6B,MAAM,SAAS,MAAM,KAAK,CAAC,MAAM,gCAAgC,GAAG;;;AAKvH,KAAI;AACF,QAAM,UACJ,uBAAuB,cAAc,0FACrC;GAAE,SAAS;GAAO,WAAW,KAAK,OAAO;GAAM,CAChD;AACD,SAAO,MAAM,KAAK,kCAAkC;SAC9C;AAIR,QAAO;;;;;AAMT,eAAsB,gBAAgB,SAAiE;CACrG,MAAM,EAAE,eAAe,aAAa,WAAW;CAC/C,MAAM,SAAgC;EACpC,SAAS;EACT,QAAQ,EAAE;EACV,OAAO,EAAE;EACV;CAED,MAAM,kBAAkB,cAAc,aAAa,2BAA2B;CAC9E,MAAM,gBAAgB,KAAK,cAAc,MAAM,gBAAgB,kBAAkB,aAAa;CAC9F,MAAM,gBAAgB,WAAW;CACjC,MAAM,gBAAgB,KAAK,eAAe,cAAc;AAExD,KAAI,CAAC,WAAW,cAAc,EAAE;AAC9B,SAAO,UAAU;AACjB,SAAO,OAAO,KAAK,0BAA0B,gBAAgB;AAC7D,SAAO;;AAGT,KAAI,QAAQ;AACV,SAAO,MAAM,KAAK,0CAA0C,cAAc;AAC1E,SAAO;;CAIT,MAAM,WAAW,KAAK,eAAe,QAAQ;AAC7C,KAAI,WAAW,SAAS,CACtB,KAAI;EACF,MAAM,EAAE,yBAAyB,MAAM,OAAO;AAE9C,QADoB,qBAAqB,eAAe,SAAS,CAC/C,MAAM;AACxB,SAAO,MAAM,KAAK,sBAAsB;UACjC,OAAY;AAEnB,UAAQ,KAAK,iCAAiC,OAAO,UAAU;;CAKnE,MAAM,eAAe,MAAM,oBAAoB,eAAe,cAAc,QAAQ,aAAa,YAAY;AAC7G,QAAO,MAAM,KAAK,GAAG,aAAa,MAAM;AAGxC,KAAI,gBAAgB,SAAS,cAAc,gBAAgB,MACzD,MAAK,MAAM,QAAQ,gBAAgB,OAAO;EAMxC,MAAM,iBAAiB,MAAM,eALZ,KAAK,cAAc,MAAM,KAAK,KAAK,EACjC,KAAK,eAAe,KAAK,KAAK,EAE9B,GADE,KAAK,iBAAiB,aACN,cAEwC;AAC7E,MAAI,eAAe,QACjB,QAAO,MAAM,KAAK,wBAAwB,KAAK,OAAO;MAEtD,QAAO,OAAO,KAAK,eAAe,QAAQ;;MAGzC;EAEL,MAAM,aAAa,WAAW;EAC9B,MAAM,iBAAiB,MAAM,eAAe,cAAc,MAAM,eAAe,WAAW;AAC1F,MAAI,eAAe,QACjB,QAAO,MAAM,KAAK,mBAAmB;MAErC,QAAO,OAAO,KAAK,eAAe,QAAQ;;AAK9C,KAAI,gBAAgB,KAAK;EACvB,MAAM,eAAe,mBAAmB,eAAe,aAAa,cAAc;EAElF,MAAM,YAAY,gBAAgB,IAAI,eAAe;AACrD,OAAK,MAAM,gBAAgB,gBAAgB,IAAI,SAAS;GACtD,MAAM,WAAW,oBAAoB,cAAc,aAAa;AAChE,OAAI,eAAe,WAAW,SAAS,CACrC,QAAO,MAAM,KAAK,sBAAsB,WAAW;;;AAMzD,KAAI,gBAAgB,QAAQ;EAC1B,MAAM,eAAe,mBAAmB,eAAe,aAAa,cAAc;EAClF,MAAM,eAAe,MAAM,oBAAoB,gBAAgB,QAAQ,aAAa;AACpF,SAAO,MAAM,KAAK,GAAG,aAAa,MAAM;;AAI1C,KAAI,gBAAgB,MAAM;EACxB,MAAM,eAAe,mBAAmB,eAAe,aAAa,cAAc;EAClF,MAAM,aAAa,MAAM,iBAAiB,gBAAgB,MAAM,aAAa;AAC7E,SAAO,MAAM,KAAK,GAAG,WAAW,MAAM;;AAIxC,KAAI,gBAAgB;OACb,MAAM,CAAC,aAAa,OAAO,QAAQ,gBAAgB,MAAM,CAE5D,KAAI,YADa,KAAK,cAAc,MAAM,IAAI,SAAS,QAAQ,EACrC,cAAc,CACtC,QAAO,MAAM,KAAK,YAAY,SAAS,OAAO;;AAMpD,KAAI;AACF,QAAM,UAAU,WAAW,cAAc,IAAI,EAAE,WAAW,KAAK,OAAO,MAAM,CAAC;AAC7E,SAAO,MAAM,KAAK,8BAA8B;UACzC,OAAO;AACd,SAAO,OAAO,KAAK,yCAAyC,QAAQ;;AAGtE,QAAO,UAAU,OAAO,OAAO,WAAW;AAC1C,QAAO;;;;wBAhqCsB;WAC0C;cACL;YACL;oBAC2B;AAEpF,aAAY,UAAU,KAAK;AA8Y3B,mBAAkB,IAAI,IAAI;EAC9B;EAAO;EAAO;EAAQ;EAAS;EAAS;EAAO;EAAO;EAAQ;EAAQ;EAAS;EAChF,CAAC"}
|