@iloom/cli 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +35 -18
- package/dist/{BranchNamingService-B5PVRR7F.js → BranchNamingService-FLPUUFOB.js} +2 -2
- package/dist/ClaudeContextManager-KE5TBZVZ.js +14 -0
- package/dist/ClaudeService-CRSETT3A.js +13 -0
- package/dist/{GitHubService-S2OGUTDR.js → GitHubService-O7U4UQ7N.js} +3 -3
- package/dist/{LoomLauncher-5LFM4LXB.js → LoomLauncher-NL65LSKP.js} +6 -6
- package/dist/{MetadataManager-DFI73J3G.js → MetadataManager-XJ2YB762.js} +2 -2
- package/dist/PRManager-2ABCWXHW.js +16 -0
- package/dist/{ProjectCapabilityDetector-S5FLNCFI.js → ProjectCapabilityDetector-UZYW32SY.js} +3 -3
- package/dist/{PromptTemplateManager-C3DK6XZL.js → PromptTemplateManager-7L3HJQQU.js} +2 -2
- package/dist/README.md +35 -18
- package/dist/{SettingsManager-35F5RUJH.js → SettingsManager-YU4VYPTW.js} +2 -2
- package/dist/agents/iloom-issue-analyze-and-plan.md +42 -17
- package/dist/agents/iloom-issue-analyzer.md +14 -14
- package/dist/agents/iloom-issue-complexity-evaluator.md +38 -15
- package/dist/agents/iloom-issue-enhancer.md +15 -15
- package/dist/agents/iloom-issue-implementer.md +44 -15
- package/dist/agents/iloom-issue-planner.md +121 -17
- package/dist/agents/iloom-issue-reviewer.md +15 -15
- package/dist/{build-FJVYP7EV.js → build-O2EJHDEW.js} +9 -9
- package/dist/{chunk-ZPSTA5PR.js → chunk-3CDWFEGL.js} +2 -2
- package/dist/{chunk-VU3QMIP2.js → chunk-453NC377.js} +91 -15
- package/dist/chunk-453NC377.js.map +1 -0
- package/dist/{chunk-UQIXZ3BA.js → chunk-5V74K5ZA.js} +2 -2
- package/dist/{chunk-7WANFUIK.js → chunk-6TL3BYH6.js} +2 -2
- package/dist/{chunk-5TXLVEXT.js → chunk-C3AKFAIR.js} +2 -2
- package/dist/{chunk-K7SEEHKO.js → chunk-CNSTXBJ3.js} +7 -419
- package/dist/chunk-CNSTXBJ3.js.map +1 -0
- package/dist/{chunk-VDA5JMB4.js → chunk-EPPPDVHD.js} +21 -8
- package/dist/chunk-EPPPDVHD.js.map +1 -0
- package/dist/{chunk-LVBRMTE6.js → chunk-FEAJR6PN.js} +6 -6
- package/dist/{chunk-6YSFTPKW.js → chunk-FM4KBPVA.js} +18 -13
- package/dist/chunk-FM4KBPVA.js.map +1 -0
- package/dist/{chunk-AEIMYF4P.js → chunk-FP7G7DG3.js} +6 -2
- package/dist/chunk-FP7G7DG3.js.map +1 -0
- package/dist/{chunk-LT3SGBR7.js → chunk-GCPAZSGV.js} +36 -2
- package/dist/{chunk-LT3SGBR7.js.map → chunk-GCPAZSGV.js.map} +1 -1
- package/dist/chunk-GJMEKEI5.js +517 -0
- package/dist/chunk-GJMEKEI5.js.map +1 -0
- package/dist/{chunk-64O2UIWO.js → chunk-GV5X6XUE.js} +4 -4
- package/dist/{chunk-7Q66W4OH.js → chunk-HBJITKSZ.js} +37 -1
- package/dist/chunk-HBJITKSZ.js.map +1 -0
- package/dist/{chunk-7HIRPCKU.js → chunk-HVQNVRAF.js} +2 -2
- package/dist/{chunk-BXCPJJYM.js → chunk-ITN64ENQ.js} +1 -1
- package/dist/chunk-ITN64ENQ.js.map +1 -0
- package/dist/{chunk-6U6VI4SZ.js → chunk-KVS4XGBQ.js} +4 -4
- package/dist/{chunk-AXX3QIKK.js → chunk-LLWX3PCW.js} +2 -2
- package/dist/{chunk-2A7WQKBE.js → chunk-LQBLDI47.js} +96 -6
- package/dist/chunk-LQBLDI47.js.map +1 -0
- package/dist/{chunk-SN3Z6EZO.js → chunk-N7FVXZNI.js} +2 -2
- package/dist/chunk-NTIZLX42.js +822 -0
- package/dist/chunk-NTIZLX42.js.map +1 -0
- package/dist/{chunk-I75JMBNB.js → chunk-S7YMZQUD.js} +31 -43
- package/dist/chunk-S7YMZQUD.js.map +1 -0
- package/dist/chunk-TIYJEEVO.js +79 -0
- package/dist/chunk-TIYJEEVO.js.map +1 -0
- package/dist/{chunk-EK3XCAAS.js → chunk-UDRZY65Y.js} +2 -2
- package/dist/{chunk-3PT7RKL5.js → chunk-USJSNHGG.js} +2 -2
- package/dist/{chunk-CFUWQHCJ.js → chunk-VWGKGNJP.js} +114 -35
- package/dist/chunk-VWGKGNJP.js.map +1 -0
- package/dist/{chunk-F6WVM437.js → chunk-WFQ5CLTR.js} +6 -3
- package/dist/chunk-WFQ5CLTR.js.map +1 -0
- package/dist/{chunk-TRQ76ISK.js → chunk-Z6BO53V7.js} +9 -9
- package/dist/{chunk-GEXP5IOF.js → chunk-ZA575VLF.js} +21 -8
- package/dist/chunk-ZA575VLF.js.map +1 -0
- package/dist/{claude-H33OQMXO.js → claude-6H36IBHO.js} +4 -2
- package/dist/{cleanup-BRUAINKE.js → cleanup-ZPOMRSNN.js} +20 -16
- package/dist/cleanup-ZPOMRSNN.js.map +1 -0
- package/dist/cli.js +341 -954
- package/dist/cli.js.map +1 -1
- package/dist/commit-6S2RIA2K.js +237 -0
- package/dist/commit-6S2RIA2K.js.map +1 -0
- package/dist/{compile-ULNO5F7Q.js → compile-LRMAADUT.js} +9 -9
- package/dist/{contribute-Q6GX6AXK.js → contribute-GXKOIA42.js} +5 -5
- package/dist/{dev-server-4RCDJ5MU.js → dev-server-GREJUEKW.js} +22 -74
- package/dist/dev-server-GREJUEKW.js.map +1 -0
- package/dist/{feedback-O4Q55SVS.js → feedback-G7G5QCY4.js} +10 -10
- package/dist/{git-FVMGBHC2.js → git-ENLT2VNI.js} +6 -4
- package/dist/hooks/iloom-hook.js +30 -2
- package/dist/{ignite-VHV65WEZ.js → ignite-YUAOJ5PP.js} +20 -20
- package/dist/ignite-YUAOJ5PP.js.map +1 -0
- package/dist/index.d.ts +71 -27
- package/dist/index.js +196 -266
- package/dist/index.js.map +1 -1
- package/dist/init-XQQMFDM6.js +21 -0
- package/dist/{lint-5JMCWE4Y.js → lint-OFVN7FT6.js} +9 -9
- package/dist/mcp/issue-management-server.js +359 -13
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/mcp/recap-server.js +13 -4
- package/dist/mcp/recap-server.js.map +1 -1
- package/dist/{open-WHVUYGPY.js → open-MCWQAPSZ.js} +25 -76
- package/dist/open-MCWQAPSZ.js.map +1 -0
- package/dist/{projects-SA76I4TZ.js → projects-PQOTWUII.js} +11 -4
- package/dist/projects-PQOTWUII.js.map +1 -0
- package/dist/prompts/init-prompt.txt +62 -51
- package/dist/prompts/issue-prompt.txt +132 -63
- package/dist/prompts/pr-prompt.txt +3 -3
- package/dist/prompts/regular-prompt.txt +16 -18
- package/dist/prompts/session-summary-prompt.txt +13 -13
- package/dist/{rebase-Y4AS6LQW.js → rebase-RKQED567.js} +53 -8
- package/dist/rebase-RKQED567.js.map +1 -0
- package/dist/{recap-VOOUXOGP.js → recap-ZKGHZCX6.js} +6 -6
- package/dist/{run-NCRK5NPR.js → run-CCG24PBC.js} +25 -76
- package/dist/run-CCG24PBC.js.map +1 -0
- package/dist/schema/settings.schema.json +14 -3
- package/dist/{shell-SBLXVOVJ.js → shell-2NNSIU34.js} +6 -6
- package/dist/{summary-CVFAMDOJ.js → summary-G6L3VAKK.js} +11 -10
- package/dist/{summary-CVFAMDOJ.js.map → summary-G6L3VAKK.js.map} +1 -1
- package/dist/{test-3KIVXI6J.js → test-QZDOEUIO.js} +9 -9
- package/dist/{test-git-ZB6AGGRW.js → test-git-E2BLXR6M.js} +4 -4
- package/dist/{test-prefix-FBGXKMPA.js → test-prefix-A7JGGYAA.js} +4 -4
- package/dist/{test-webserver-YVQD42W6.js → test-webserver-NRMGT2HB.js} +29 -8
- package/dist/test-webserver-NRMGT2HB.js.map +1 -0
- package/package.json +3 -1
- package/dist/ClaudeContextManager-6J2EB4QU.js +0 -14
- package/dist/ClaudeService-O2PB22GX.js +0 -13
- package/dist/PRManager-GB3FOJ2W.js +0 -14
- package/dist/chunk-2A7WQKBE.js.map +0 -1
- package/dist/chunk-6YSFTPKW.js.map +0 -1
- package/dist/chunk-7Q66W4OH.js.map +0 -1
- package/dist/chunk-AEIMYF4P.js.map +0 -1
- package/dist/chunk-BXCPJJYM.js.map +0 -1
- package/dist/chunk-CFUWQHCJ.js.map +0 -1
- package/dist/chunk-F6WVM437.js.map +0 -1
- package/dist/chunk-GEXP5IOF.js.map +0 -1
- package/dist/chunk-I75JMBNB.js.map +0 -1
- package/dist/chunk-K7SEEHKO.js.map +0 -1
- package/dist/chunk-VDA5JMB4.js.map +0 -1
- package/dist/chunk-VU3QMIP2.js.map +0 -1
- package/dist/chunk-W6WVRHJ6.js +0 -251
- package/dist/chunk-W6WVRHJ6.js.map +0 -1
- package/dist/cleanup-BRUAINKE.js.map +0 -1
- package/dist/dev-server-4RCDJ5MU.js.map +0 -1
- package/dist/ignite-VHV65WEZ.js.map +0 -1
- package/dist/init-UTYRHNJJ.js +0 -21
- package/dist/open-WHVUYGPY.js.map +0 -1
- package/dist/projects-SA76I4TZ.js.map +0 -1
- package/dist/rebase-Y4AS6LQW.js.map +0 -1
- package/dist/run-NCRK5NPR.js.map +0 -1
- package/dist/test-webserver-YVQD42W6.js.map +0 -1
- /package/dist/{BranchNamingService-B5PVRR7F.js.map → BranchNamingService-FLPUUFOB.js.map} +0 -0
- /package/dist/{ClaudeContextManager-6J2EB4QU.js.map → ClaudeContextManager-KE5TBZVZ.js.map} +0 -0
- /package/dist/{ClaudeService-O2PB22GX.js.map → ClaudeService-CRSETT3A.js.map} +0 -0
- /package/dist/{GitHubService-S2OGUTDR.js.map → GitHubService-O7U4UQ7N.js.map} +0 -0
- /package/dist/{LoomLauncher-5LFM4LXB.js.map → LoomLauncher-NL65LSKP.js.map} +0 -0
- /package/dist/{MetadataManager-DFI73J3G.js.map → MetadataManager-XJ2YB762.js.map} +0 -0
- /package/dist/{PRManager-GB3FOJ2W.js.map → PRManager-2ABCWXHW.js.map} +0 -0
- /package/dist/{ProjectCapabilityDetector-S5FLNCFI.js.map → ProjectCapabilityDetector-UZYW32SY.js.map} +0 -0
- /package/dist/{PromptTemplateManager-C3DK6XZL.js.map → PromptTemplateManager-7L3HJQQU.js.map} +0 -0
- /package/dist/{SettingsManager-35F5RUJH.js.map → SettingsManager-YU4VYPTW.js.map} +0 -0
- /package/dist/{build-FJVYP7EV.js.map → build-O2EJHDEW.js.map} +0 -0
- /package/dist/{chunk-ZPSTA5PR.js.map → chunk-3CDWFEGL.js.map} +0 -0
- /package/dist/{chunk-UQIXZ3BA.js.map → chunk-5V74K5ZA.js.map} +0 -0
- /package/dist/{chunk-7WANFUIK.js.map → chunk-6TL3BYH6.js.map} +0 -0
- /package/dist/{chunk-5TXLVEXT.js.map → chunk-C3AKFAIR.js.map} +0 -0
- /package/dist/{chunk-LVBRMTE6.js.map → chunk-FEAJR6PN.js.map} +0 -0
- /package/dist/{chunk-64O2UIWO.js.map → chunk-GV5X6XUE.js.map} +0 -0
- /package/dist/{chunk-7HIRPCKU.js.map → chunk-HVQNVRAF.js.map} +0 -0
- /package/dist/{chunk-6U6VI4SZ.js.map → chunk-KVS4XGBQ.js.map} +0 -0
- /package/dist/{chunk-AXX3QIKK.js.map → chunk-LLWX3PCW.js.map} +0 -0
- /package/dist/{chunk-SN3Z6EZO.js.map → chunk-N7FVXZNI.js.map} +0 -0
- /package/dist/{chunk-EK3XCAAS.js.map → chunk-UDRZY65Y.js.map} +0 -0
- /package/dist/{chunk-3PT7RKL5.js.map → chunk-USJSNHGG.js.map} +0 -0
- /package/dist/{chunk-TRQ76ISK.js.map → chunk-Z6BO53V7.js.map} +0 -0
- /package/dist/{claude-H33OQMXO.js.map → claude-6H36IBHO.js.map} +0 -0
- /package/dist/{compile-ULNO5F7Q.js.map → compile-LRMAADUT.js.map} +0 -0
- /package/dist/{contribute-Q6GX6AXK.js.map → contribute-GXKOIA42.js.map} +0 -0
- /package/dist/{feedback-O4Q55SVS.js.map → feedback-G7G5QCY4.js.map} +0 -0
- /package/dist/{git-FVMGBHC2.js.map → git-ENLT2VNI.js.map} +0 -0
- /package/dist/{init-UTYRHNJJ.js.map → init-XQQMFDM6.js.map} +0 -0
- /package/dist/{lint-5JMCWE4Y.js.map → lint-OFVN7FT6.js.map} +0 -0
- /package/dist/{recap-VOOUXOGP.js.map → recap-ZKGHZCX6.js.map} +0 -0
- /package/dist/{shell-SBLXVOVJ.js.map → shell-2NNSIU34.js.map} +0 -0
- /package/dist/{test-3KIVXI6J.js.map → test-QZDOEUIO.js.map} +0 -0
- /package/dist/{test-git-ZB6AGGRW.js.map → test-git-E2BLXR6M.js.map} +0 -0
- /package/dist/{test-prefix-FBGXKMPA.js.map → test-prefix-A7JGGYAA.js.map} +0 -0
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/SessionSummaryService.ts","../src/utils/claude-transcript.ts","../src/mcp/GitHubIssueManagementProvider.ts","../src/utils/linear-markup-converter.ts","../src/mcp/LinearIssueManagementProvider.ts","../src/mcp/IssueManagementProviderFactory.ts"],"sourcesContent":["/**\n * SessionSummaryService: Generates and posts Claude session summaries\n *\n * This service orchestrates:\n * 1. Reading session metadata to get session ID\n * 2. Loading and processing the session-summary prompt template\n * 3. Invoking Claude headless to generate the summary\n * 4. Posting the summary as a comment to the issue/PR\n */\n\nimport path from 'path'\nimport os from 'os'\nimport fs from 'fs-extra'\nimport { logger } from '../utils/logger.js'\nimport { launchClaude, generateDeterministicSessionId } from '../utils/claude.js'\nimport { readSessionContext } from '../utils/claude-transcript.js'\nimport { PromptTemplateManager } from './PromptTemplateManager.js'\nimport { MetadataManager } from './MetadataManager.js'\nimport { SettingsManager, type IloomSettings } from './SettingsManager.js'\nimport { IssueManagementProviderFactory } from '../mcp/IssueManagementProviderFactory.js'\nimport type { IssueProvider } from '../mcp/types.js'\nimport { hasMultipleRemotes } from '../utils/remote.js'\nimport type { RecapFile, RecapOutput } from '../mcp/recap-types.js'\nimport { formatRecapMarkdown } from '../utils/recap-formatter.js'\n\nconst RECAPS_DIR = path.join(os.homedir(), '.config', 'iloom-ai', 'recaps')\n\n/**\n * Slugify path to recap filename (matches MetadataManager/RecapCommand algorithm)\n *\n * Algorithm:\n * 1. Trim trailing slashes\n * 2. Replace all path separators (/ or \\) with ___ (triple underscore)\n * 3. Replace any other non-alphanumeric characters (except _ and -) with -\n * 4. Append .json\n */\nfunction slugifyPath(loomPath: string): string {\n\tlet slug = loomPath.replace(/[/\\\\]+$/, '')\n\tslug = slug.replace(/[/\\\\]/g, '___')\n\tslug = slug.replace(/[^a-zA-Z0-9_-]/g, '-')\n\treturn `${slug}.json`\n}\n\n/**\n * Read recap file for a worktree path with graceful degradation\n * Returns formatted recap string or null if not found/error\n */\nasync function readRecapFile(worktreePath: string): Promise<string | null> {\n\ttry {\n\t\tconst filePath = path.join(RECAPS_DIR, slugifyPath(worktreePath))\n\t\tif (await fs.pathExists(filePath)) {\n\t\t\tconst content = await fs.readFile(filePath, 'utf8')\n\t\t\tconst recap = JSON.parse(content) as RecapFile\n\n\t\t\t// Check if recap has any meaningful content\n\t\t\tconst hasGoal = recap.goal !== null && recap.goal !== undefined\n\t\t\tconst hasComplexity = recap.complexity !== null && recap.complexity !== undefined\n\t\t\tconst hasEntries = Array.isArray(recap.entries) && recap.entries.length > 0\n\t\t\tconst hasArtifacts = Array.isArray(recap.artifacts) && recap.artifacts.length > 0\n\t\t\tconst hasContent = hasGoal || hasComplexity || hasEntries || hasArtifacts\n\n\t\t\tif (hasContent) {\n\t\t\t\t// Convert RecapFile (optional fields) to RecapOutput (required fields)\n\t\t\t\t// Same pattern as RecapCommand.ts:61-66\n\t\t\t\tconst recapOutput: RecapOutput = {\n\t\t\t\t\tfilePath,\n\t\t\t\t\tgoal: recap.goal ?? null,\n\t\t\t\t\tcomplexity: recap.complexity ?? null,\n\t\t\t\t\tentries: recap.entries ?? [],\n\t\t\t\t\tartifacts: recap.artifacts ?? [],\n\t\t\t\t}\n\t\t\t\treturn formatRecapMarkdown(recapOutput)\n\t\t\t}\n\t\t}\n\t\treturn null\n\t} catch {\n\t\t// Graceful degradation - return null on any error\n\t\treturn null\n\t}\n}\n\n/**\n * Input for generating and posting a session summary\n */\nexport interface SessionSummaryInput {\n\tworktreePath: string\n\tissueNumber: string | number\n\tbranchName: string\n\tloomType: 'issue' | 'pr' | 'branch'\n\t/** Optional PR number - when provided, summary is posted to the PR instead of the issue */\n\tprNumber?: number\n}\n\n/**\n * Result from generating a session summary\n */\nexport interface SessionSummaryResult {\n\tsummary: string\n\tsessionId: string\n}\n\n/**\n * Service that generates and posts Claude session summaries to issues\n */\nexport class SessionSummaryService {\n\tprivate templateManager: PromptTemplateManager\n\tprivate metadataManager: MetadataManager\n\tprivate settingsManager: SettingsManager\n\n\tconstructor(\n\t\ttemplateManager?: PromptTemplateManager,\n\t\tmetadataManager?: MetadataManager,\n\t\tsettingsManager?: SettingsManager\n\t) {\n\t\tthis.templateManager = templateManager ?? new PromptTemplateManager()\n\t\tthis.metadataManager = metadataManager ?? new MetadataManager()\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t}\n\n\t/**\n\t * Generate and post a session summary to the issue\n\t *\n\t * Non-blocking: Catches all errors and logs warnings instead of throwing\n\t * This ensures the finish workflow continues even if summary generation fails\n\t */\n\tasync generateAndPostSummary(input: SessionSummaryInput): Promise<void> {\n\t\ttry {\n\t\t\t// 1. Skip for branch type (no issue to comment on)\n\t\t\tif (input.loomType === 'branch') {\n\t\t\t\tlogger.debug('Skipping session summary: branch type has no associated issue')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 2. Read metadata to get sessionId, or generate deterministically\n\t\t\tconst metadata = await this.metadataManager.readMetadata(input.worktreePath)\n\t\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(input.worktreePath)\n\n\t\t\t// 3. Load settings to check generateSummary config\n\t\t\tconst settings = await this.settingsManager.loadSettings(input.worktreePath)\n\t\t\tif (!this.shouldGenerateSummary(input.loomType, settings)) {\n\t\t\t\tlogger.debug(`Skipping session summary: generateSummary is disabled for ${input.loomType} workflow`)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tlogger.info('Generating session summary...')\n\n\t\t\t// 4. Try to read compact summaries from session transcript for additional context\n\t\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\t\tconst compactSummaries = await readSessionContext(input.worktreePath, sessionId)\n\t\t\tif (compactSummaries) {\n\t\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t\t}\n\n\t\t\t// 5. Try to read recap data for high-signal context\n\t\t\tconst recapData = await readRecapFile(input.worktreePath)\n\t\t\tif (recapData) {\n\t\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No recap data found')\n\t\t\t}\n\n\t\t\t// 6. Load and process the session-summary template\n\t\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\t\tISSUE_NUMBER: String(input.issueNumber),\n\t\t\t\tBRANCH_NAME: input.branchName,\n\t\t\t\tLOOM_TYPE: input.loomType,\n\t\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\t\tRECAP_DATA: recapData ?? '',\n\t\t\t})\n\n\t\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t\t// 7. Invoke Claude headless to generate summary\n\t\t\t// Use --resume with session ID so Claude knows which conversation to summarize\n\t\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\t\theadless: true,\n\t\t\t\tmodel: summaryModel,\n\t\t\t\tsessionId: sessionId, // Resume this session so Claude has conversation context\n\t\t\t})\n\n\t\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\t\tlogger.warn('Session summary generation returned empty result')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tconst summary = summaryResult.trim()\n\n\t\t\t// 8. Skip posting if summary is too short (likely failed generation)\n\t\t\tif (summary.length < 100) {\n\t\t\t\tlogger.warn('Session summary too short, skipping post')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 9. Post summary to issue or PR (PR takes priority when prNumber is provided)\n\t\t\tawait this.postSummaryToIssue(input.issueNumber, summary, settings, input.worktreePath, input.prNumber)\n\n\t\t\tconst targetDescription = input.prNumber ? `PR #${input.prNumber}` : 'issue'\n\t\t\tlogger.success(`Session summary posted to ${targetDescription}`)\n\t\t} catch (error) {\n\t\t\t// Non-blocking: Log warning but don't throw\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error)\n\t\t\tlogger.warn(`Failed to generate session summary: ${errorMessage}`)\n\t\t\tlogger.debug('Session summary generation error details:', { error })\n\t\t}\n\t}\n\n\t/**\n\t * Generate a session summary without posting it\n\t *\n\t * This method is useful for previewing the summary or for use by CLI commands\n\t * that want to display the summary before optionally posting it.\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param branchName - Name of the branch\n\t * @param loomType - Type of loom ('issue' | 'pr' | 'branch')\n\t * @param issueNumber - Issue or PR number (optional, for template variables)\n\t * @returns The generated summary and session ID\n\t * @throws Error if Claude invocation fails\n\t */\n\tasync generateSummary(\n\t\tworktreePath: string,\n\t\tbranchName: string,\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tissueNumber?: string | number\n\t): Promise<SessionSummaryResult> {\n\t\t// 1. Read metadata or generate deterministic session ID\n\t\tconst metadata = await this.metadataManager.readMetadata(worktreePath)\n\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(worktreePath)\n\n\t\t// 2. Load settings for model configuration\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\n\t\tlogger.info('Generating session summary...')\n\n\t\t// 3. Try to read compact summaries from session transcript for additional context\n\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\tconst compactSummaries = await readSessionContext(worktreePath, sessionId)\n\t\tif (compactSummaries) {\n\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t}\n\n\t\t// 4. Try to read recap data for high-signal context\n\t\tconst recapData = await readRecapFile(worktreePath)\n\t\tif (recapData) {\n\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No recap data found')\n\t\t}\n\n\t\t// 5. Load and process the session-summary template\n\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\tISSUE_NUMBER: issueNumber !== undefined ? String(issueNumber) : '',\n\t\t\tBRANCH_NAME: branchName,\n\t\t\tLOOM_TYPE: loomType,\n\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\tRECAP_DATA: recapData ?? '',\n\t\t})\n\n\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t// 6. Invoke Claude headless to generate summary\n\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\theadless: true,\n\t\t\tmodel: summaryModel,\n\t\t\tsessionId: sessionId,\n\t\t})\n\n\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\tthrow new Error('Session summary generation returned empty result')\n\t\t}\n\n\t\tconst summary = summaryResult.trim()\n\n\t\t// 7. Check if summary is too short (likely failed generation)\n\t\tif (summary.length < 100) {\n\t\t\tthrow new Error('Session summary too short - generation may have failed')\n\t\t}\n\n\t\treturn {\n\t\t\tsummary,\n\t\t\tsessionId: sessionId,\n\t\t}\n\t}\n\n\t/**\n\t * Post a summary to an issue (used by both generateAndPostSummary and CLI commands)\n\t *\n\t * @param issueNumber - Issue or PR number to post to\n\t * @param summary - The summary text to post\n\t * @param worktreePath - Path to worktree for loading settings (optional)\n\t */\n\tasync postSummary(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tworktreePath?: string\n\t): Promise<void> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\tawait this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd())\n\t\tlogger.success('Session summary posted to issue')\n\t}\n\n\t/**\n\t * Determine if summary should be generated based on loom type and settings\n\t *\n\t * @param loomType - The type of loom being finished\n\t * @param settings - The loaded iloom settings\n\t * @returns true if summary should be generated\n\t */\n\tshouldGenerateSummary(\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tsettings: IloomSettings\n\t): boolean {\n\t\t// Branch type never generates summaries (no issue to comment on)\n\t\tif (loomType === 'branch') {\n\t\t\treturn false\n\t\t}\n\n\t\t// Get workflow-specific config\n\t\tconst workflowConfig =\n\t\t\tloomType === 'issue'\n\t\t\t\t? settings.workflows?.issue\n\t\t\t\t: settings.workflows?.pr\n\n\t\t// Default to true if not explicitly set (for issue and pr types)\n\t\treturn workflowConfig?.generateSummary ?? true\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on settings\n\t *\n\t * @param summary - The summary text\n\t * @param worktreePath - Path to worktree for loading settings and detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttribution(summary: string, worktreePath: string): Promise<string> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\treturn this.applyAttributionWithSettings(summary, settings, worktreePath)\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on provided settings\n\t *\n\t * @param summary - The summary text\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttributionWithSettings(\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string\n\t): Promise<string> {\n\t\tconst attributionSetting = settings.attribution ?? 'upstreamOnly'\n\t\tlogger.debug(`Attribution setting from config: ${settings.attribution}`)\n\t\tlogger.debug(`Attribution setting (with default): ${attributionSetting}`)\n\n\t\tlet shouldShowAttribution = false\n\t\tif (attributionSetting === 'on') {\n\t\t\tshouldShowAttribution = true\n\t\t\tlogger.debug('Attribution: always on')\n\t\t} else if (attributionSetting === 'upstreamOnly') {\n\t\t\t// Only show attribution when contributing to external repos (multiple remotes)\n\t\t\tshouldShowAttribution = await hasMultipleRemotes(worktreePath)\n\t\t\tlogger.debug(`Attribution: upstreamOnly, hasMultipleRemotes=${shouldShowAttribution}`)\n\t\t} else {\n\t\t\tlogger.debug('Attribution: off')\n\t\t}\n\t\t// 'off' keeps shouldShowAttribution = false\n\n\t\tlogger.debug(`Should show attribution: ${shouldShowAttribution}`)\n\t\tif (shouldShowAttribution) {\n\t\t\tlogger.debug('Attribution footer appended to summary')\n\t\t\treturn `${summary}\\n\\n---\\n*Generated with 🤖❤️ by [iloom.ai](https://iloom.ai)*`\n\t\t}\n\n\t\treturn summary\n\t}\n\n\t/**\n\t * Post the summary as a comment to the issue or PR\n\t *\n\t * @param issueNumber - The issue number (used when prNumber is not provided)\n\t * @param summary - The summary text to post\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for attribution detection\n\t * @param prNumber - Optional PR number - when provided, posts to the PR instead\n\t */\n\tprivate async postSummaryToIssue(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string,\n\t\tprNumber?: number\n\t): Promise<void> {\n\t\t// Get the issue management provider from settings\n\t\tconst providerType = (settings.issueManagement?.provider ?? 'github') as IssueProvider\n\t\tconst provider = IssueManagementProviderFactory.create(providerType)\n\n\t\t// Apply attribution if configured\n\t\tconst finalSummary = await this.applyAttributionWithSettings(summary, settings, worktreePath)\n\n\t\t// When prNumber is provided, post to the PR instead of the issue\n\t\tconst targetNumber = prNumber ?? issueNumber\n\t\tconst targetType = prNumber !== undefined ? 'pr' : 'issue'\n\n\t\t// Create the comment\n\t\tawait provider.createComment({\n\t\t\tnumber: String(targetNumber),\n\t\t\tbody: finalSummary,\n\t\t\ttype: targetType,\n\t\t})\n\t}\n}\n","/**\n * Claude Transcript Utilities\n *\n * Provides functions to read and parse Claude Code session transcript files\n * stored in ~/.claude/projects/. These transcripts contain the full conversation\n * history including compact summaries from when conversations were compacted.\n */\n\nimport { readFile } from 'fs/promises'\nimport { homedir } from 'os'\nimport { join } from 'path'\nimport { logger } from './logger.js'\n\n/**\n * Entry in a Claude Code JSONL transcript file\n */\nexport interface TranscriptEntry {\n\ttype: 'user' | 'assistant' | 'system' | 'file-history-snapshot' | 'queue-operation'\n\tsessionId?: string\n\tmessage?: { role: string; content: string | Array<{ type: string; text?: string }> }\n\tisCompactSummary?: boolean\n\tisVisibleInTranscriptOnly?: boolean\n\tsubtype?: string // 'compact_boundary' for compaction markers\n\tcontent?: string\n\ttimestamp?: string\n\tuuid?: string\n\tparentUuid?: string\n}\n\n/**\n * Get the Claude projects directory path encoding for a worktree path\n * Encoding: /Users/adam/Projects/foo_bar -> -Users-adam-Projects-foo-bar\n *\n * Claude Code encodes paths by replacing both '/' and '_' with '-'\n *\n * @param worktreePath - Absolute path to the worktree\n * @returns Encoded directory name for Claude projects\n */\nexport function getClaudeProjectPath(worktreePath: string): string {\n\t// Replace all '/' and '_' with '-' (matching Claude Code's encoding)\n\treturn worktreePath.replace(/[/_]/g, '-')\n}\n\n/**\n * Get the full path to the Claude projects directory\n * @returns Path to ~/.claude/projects/\n */\nexport function getClaudeProjectsDir(): string {\n\treturn join(homedir(), '.claude', 'projects')\n}\n\n/**\n * Find the session transcript file for a given worktree and session ID\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @returns Full path to the transcript file, or null if not found\n */\nexport function findSessionTranscript(worktreePath: string, sessionId: string): string | null {\n\tconst projectsDir = getClaudeProjectsDir()\n\tconst projectDirName = getClaudeProjectPath(worktreePath)\n\tconst transcriptPath = join(projectsDir, projectDirName, `${sessionId}.jsonl`)\n\treturn transcriptPath\n}\n\n/**\n * Extract the content from a compact summary message\n * Handles both string content and array content formats\n */\nfunction extractMessageContent(message: TranscriptEntry['message']): string | null {\n\tif (!message) return null\n\n\tif (typeof message.content === 'string') {\n\t\treturn message.content\n\t}\n\n\tif (Array.isArray(message.content)) {\n\t\t// Concatenate all text elements\n\t\treturn message.content\n\t\t\t.filter((item) => item.type === 'text' && item.text)\n\t\t\t.map((item) => item.text)\n\t\t\t.join('\\n')\n\t}\n\n\treturn null\n}\n\n/**\n * Extract compact summaries from a session transcript file\n *\n * Returns empty array if file doesn't exist or no summaries found.\n * Each compact summary contains structured history of pre-compaction conversation.\n *\n * @param transcriptPath - Full path to the transcript JSONL file\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Array of compact summary content strings, newest first\n */\nexport async function extractCompactSummaries(\n\ttranscriptPath: string,\n\tmaxSummaries = 3\n): Promise<string[]> {\n\ttry {\n\t\tconst content = await readFile(transcriptPath, 'utf-8')\n\t\tconst lines = content.split('\\n').filter((line) => line.trim())\n\n\t\tconst summaries: string[] = []\n\n\t\tfor (const line of lines) {\n\t\t\ttry {\n\t\t\t\tconst entry = JSON.parse(line) as TranscriptEntry\n\n\t\t\t\t// Look for compact summary entries\n\t\t\t\tif (entry.isCompactSummary === true && entry.message) {\n\t\t\t\t\tconst summaryContent = extractMessageContent(entry.message)\n\t\t\t\t\tif (summaryContent) {\n\t\t\t\t\t\tsummaries.push(summaryContent)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// Skip malformed JSON lines\n\t\t\t\tlogger.debug('Skipping malformed JSONL line in transcript')\n\t\t\t}\n\t\t}\n\n\t\t// Return most recent summaries (they appear in order in the file)\n\t\t// Limit to maxSummaries\n\t\treturn summaries.slice(-maxSummaries)\n\t} catch (error) {\n\t\t// File not found or permission error - return empty array (graceful degradation)\n\t\tif (error instanceof Error && 'code' in error && error.code === 'ENOENT') {\n\t\t\tlogger.debug('Transcript file not found:', transcriptPath)\n\t\t} else {\n\t\t\tlogger.debug('Error reading transcript file:', error)\n\t\t}\n\t\treturn []\n\t}\n}\n\n/**\n * Read session transcript and extract compact summaries for summary generation\n *\n * This is the main entry point for SessionSummaryService to get pre-compaction\n * conversation context. It gracefully handles all error cases.\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Formatted string of compact summaries, or null if none found\n */\nexport async function readSessionContext(\n\tworktreePath: string,\n\tsessionId: string,\n\tmaxSummaries = 3\n): Promise<string | null> {\n\tconst transcriptPath = findSessionTranscript(worktreePath, sessionId)\n\tif (!transcriptPath) {\n\t\treturn null\n\t}\n\n\tlogger.debug(`Checking transcript at: ${transcriptPath}`)\n\n\tconst summaries = await extractCompactSummaries(transcriptPath, maxSummaries)\n\n\tif (summaries.length === 0) {\n\t\treturn null\n\t}\n\n\t// Format summaries with separators\n\t// Newest summaries are at the end, so we reverse to show newest first\n\tconst formattedSummaries = summaries\n\t\t.reverse()\n\t\t.map((summary, index) => {\n\t\t\tconst header =\n\t\t\t\tsummaries.length > 1\n\t\t\t\t\t? `### Compact Summary ${index + 1} of ${summaries.length}\\n\\n`\n\t\t\t\t\t: ''\n\t\t\treturn `${header}${summary}`\n\t\t})\n\t\t.join('\\n\\n---\\n\\n')\n\n\treturn formattedSummaries\n}\n","/**\n * GitHub implementation of Issue Management Provider\n * Uses GitHub CLI for all operations\n * Normalizes GitHub-specific fields (login) to provider-agnostic core fields (id, displayName)\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tIssueResult,\n\tCommentDetailResult,\n\tCommentResult,\n\tFlexibleAuthor,\n} from './types.js'\nimport {\n\texecuteGhCommand,\n\tcreateIssueComment,\n\tupdateIssueComment,\n\tcreatePRComment,\n} from '../utils/github.js'\n\n/**\n * GitHub-specific author structure from API\n */\ninterface GitHubAuthor {\n\tlogin: string\n\tid?: number\n\tavatarUrl?: string\n\turl?: string\n}\n\n/**\n * Normalize GitHub author to FlexibleAuthor format\n */\nfunction normalizeAuthor(author: GitHubAuthor | null | undefined): FlexibleAuthor | null {\n\tif (!author) return null\n\n\treturn {\n\t\tid: author.id ? String(author.id) : author.login,\n\t\tdisplayName: author.login, // GitHub uses login as primary identifier\n\t\tlogin: author.login, // Preserve original GitHub field\n\t\t...(author.avatarUrl && { avatarUrl: author.avatarUrl }),\n\t\t...(author.url && { url: author.url }),\n\t}\n}\n\n/**\n * Extract numeric comment ID from GitHub comment URL\n * URL format: https://github.com/owner/repo/issues/123#issuecomment-3615239386\n */\nexport function extractNumericIdFromUrl(url: string): string {\n\tconst match = url.match(/#issuecomment-(\\d+)$/)\n\tif (!match?.[1]) {\n\t\tthrow new Error(`Cannot extract comment ID from URL: ${url}`)\n\t}\n\treturn match[1]\n}\n\n/**\n * GitHub-specific implementation of IssueManagementProvider\n */\nexport class GitHubIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'github'\n\n\t/**\n\t * Fetch issue details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst issueNumber = parseInt(number, 10)\n\t\tif (isNaN(issueNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst fields = includeComments\n\t\t\t? 'body,title,comments,labels,assignees,milestone,author,state,number,url'\n\t\t\t: 'body,title,labels,assignees,milestone,author,state,number,url'\n\n\t\t// Use gh issue view to fetch issue details\n\t\tinterface GitHubIssueResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\tlabels?: Array<{ name: string; color?: string; description?: string }>\n\t\t\tassignees?: Array<GitHubAuthor>\n\t\t\tmilestone?: { title: string; number?: number; state?: string }\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubIssueResponse>([\n\t\t\t'issue',\n\t\t\t'view',\n\t\t\tString(issueNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t])\n\n\t\t// Normalize to IssueResult with core fields + passthrough\n\t\tconst result: IssueResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'github',\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// Optional flexible fields\n\t\t\t...(raw.assignees && {\n\t\t\t\tassignees: raw.assignees.map(a => normalizeAuthor(a)).filter((a): a is FlexibleAuthor => a !== null),\n\t\t\t}),\n\t\t\t...(raw.labels && {\n\t\t\t\tlabels: raw.labels,\n\t\t\t}),\n\n\t\t\t// GitHub-specific passthrough fields\n\t\t\t...(raw.milestone && {\n\t\t\t\tmilestone: raw.milestone,\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\t// (GitHub CLI returns GraphQL node IDs in the id field, but REST API expects numeric IDs)\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID using gh API\n\t * Normalizes author to FlexibleAuthor format\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub API\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// GitHub API response structure\n\t\tinterface GitHubCommentResponse {\n\t\t\tid: number\n\t\t\tbody: string\n\t\t\tuser: GitHubAuthor\n\t\t\tcreated_at: string\n\t\t\tupdated_at?: string\n\t\t\thtml_url?: string\n\t\t\treactions?: Record<string, unknown>\n\t\t}\n\n\t\t// Use gh api to fetch specific comment\n\t\tconst raw = await executeGhCommand<GitHubCommentResponse>([\n\t\t\t'api',\n\t\t\t`repos/:owner/:repo/issues/comments/${numericCommentId}`,\n\t\t\t'--jq',\n\t\t\t'{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}',\n\t\t])\n\n\t\t// Normalize to CommentDetailResult\n\t\treturn {\n\t\t\tid: String(raw.id),\n\t\t\tbody: raw.body,\n\t\t\tauthor: normalizeAuthor(raw.user),\n\t\t\tcreated_at: raw.created_at,\n\t\t\t...(raw.updated_at && { updated_at: raw.updated_at }),\n\t\t\t// Passthrough GitHub-specific fields\n\t\t\t...(raw.html_url && { html_url: raw.html_url }),\n\t\t\t...(raw.reactions && { reactions: raw.reactions }),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue or PR\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body, type } = input\n\n\t\t// Convert string ID to number for GitHub utilities\n\t\tconst numericId = parseInt(number, 10)\n\t\tif (isNaN(numericId)) {\n\t\t\tthrow new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utilities\n\t\tconst result =\n\t\t\ttype === 'issue'\n\t\t\t\t? await createIssueComment(numericId, body)\n\t\t\t\t: await createPRComment(numericId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub utility\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utility\n\t\tconst result = await updateIssueComment(numericCommentId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n}\n","import { appendFileSync } from 'node:fs'\nimport { join, dirname, basename, extname } from 'node:path'\n\n/**\n * Utility class for converting HTML details/summary format to Linear's collapsible format\n *\n * Converts:\n * <details>\n * <summary>Header</summary>\n * CONTENT\n * </details>\n *\n * Into Linear format:\n * +++ Header\n *\n * CONTENT\n *\n * +++\n */\nexport class LinearMarkupConverter {\n\t/**\n\t * Convert HTML details/summary blocks to Linear's collapsible format\n\t * Handles nested details blocks recursively\n\t *\n\t * @param text - Text containing HTML details/summary blocks\n\t * @returns Text with details/summary converted to Linear format\n\t */\n\tstatic convertDetailsToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Process from innermost to outermost to handle nesting correctly\n\t\t// Keep converting until no more details blocks are found\n\t\tlet previousText = ''\n\t\tlet currentText = text\n\n\t\twhile (previousText !== currentText) {\n\t\t\tpreviousText = currentText\n\t\t\tcurrentText = this.convertSinglePass(currentText)\n\t\t}\n\n\t\treturn currentText\n\t}\n\n\t/**\n\t * Perform a single pass of details block conversion\n\t * Converts the innermost details blocks first\n\t */\n\tprivate static convertSinglePass(text: string): string {\n\t\t// Match <details> blocks with optional attributes on the details tag\n\t\t// Supports multiline content between tags\n\t\tconst detailsRegex = /<details[^>]*>\\s*<summary[^>]*>(.*?)<\\/summary>\\s*(.*?)\\s*<\\/details>/gis\n\n\t\treturn text.replace(detailsRegex, (_match, summary, content) => {\n\t\t\t// Clean up the summary - trim whitespace and decode HTML entities\n\t\t\tconst cleanSummary = this.cleanText(summary)\n\n\t\t\t// Clean up the content - preserve internal structure but normalize outer whitespace\n\t\t\t// Note: Don't recursively convert here - the while loop handles that\n\t\t\tconst cleanContent = this.cleanContent(content)\n\n\t\t\t// Build Linear collapsible format\n\t\t\t// Always include blank lines around content for readability\n\t\t\tif (cleanContent) {\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n${cleanContent}\\n\\n+++`\n\t\t\t} else {\n\t\t\t\t// Empty content - use minimal format\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n+++`\n\t\t\t}\n\t\t})\n\t}\n\n\t/**\n\t * Clean text by trimming whitespace and decoding common HTML entities\n\t */\n\tprivate static cleanText(text: string): string {\n\t\treturn text\n\t\t\t.trim()\n\t\t\t.replace(/</g, '<')\n\t\t\t.replace(/>/g, '>')\n\t\t\t.replace(/&/g, '&')\n\t\t\t.replace(/"/g, '\"')\n\t\t\t.replace(/'/g, \"'\")\n\t}\n\n\t/**\n\t * Clean content while preserving internal structure\n\t * - Removes leading/trailing whitespace\n\t * - Normalizes internal blank lines (max 2 consecutive newlines)\n\t * - Preserves code blocks and other formatting\n\t */\n\tprivate static cleanContent(content: string): string {\n\t\tif (!content) {\n\t\t\treturn ''\n\t\t}\n\n\t\t// Trim outer whitespace\n\t\tlet cleaned = content.trim()\n\n\t\t// Normalize excessive blank lines (3+ newlines -> 2 newlines)\n\t\tcleaned = cleaned.replace(/\\n{3,}/g, '\\n\\n')\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Check if text contains HTML details/summary blocks\n\t * Useful for conditional conversion\n\t */\n\tstatic hasDetailsBlocks(text: string): boolean {\n\t\tif (!text) {\n\t\t\treturn false\n\t\t}\n\n\t\tconst detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\\/summary>.*?<\\/details>/is\n\t\treturn detailsRegex.test(text)\n\t}\n\n\t/**\n\t * Remove wrapper tags from code sample details blocks\n\t * Identifies details blocks where summary contains \"X lines\" pattern\n\t * and removes the details/summary tags while preserving the content\n\t *\n\t * @param text - Text containing potential code sample details blocks\n\t * @returns Text with code sample wrappers removed\n\t */\n\tstatic removeCodeSampleWrappers(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Match details blocks where summary contains \"X lines\" (e.g., \"45 lines\", \"120 lines\")\n\t\t// Pattern: <details><summary>...N lines...</summary>CONTENT</details>\n\t\t// Use [^<]* to match summary content without allowing nested tags to interfere\n\t\t// Then use [\\s\\S]*? for the content to allow any characters including newlines\n\t\tconst codeSampleRegex = /<details[^>]*>\\s*<summary[^>]*>([^<]*\\d+\\s+lines[^<]*)<\\/summary>\\s*([\\s\\S]*?)<\\/details>/gi\n\n\t\treturn text.replace(codeSampleRegex, (_match, _summary, content) => {\n\t\t\t// Return just the content, without any wrapper tags\n\t\t\t// Preserve the content exactly as-is\n\t\t\treturn content.trim()\n\t\t})\n\t}\n\n\t/**\n\t * Convert text for Linear - applies all necessary conversions\n\t * Currently only converts details/summary blocks, but can be extended\n\t * for other HTML to Linear markdown conversions\n\t */\n\tstatic convertToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Log input if logging is enabled\n\t\tthis.logConversion('INPUT', text)\n\n\t\t// Apply all conversions\n\t\tlet converted = text\n\n\t\t// First, remove code sample wrappers (details blocks with \"X lines\" pattern)\n\t\t// This prevents them from being converted to Linear's +++ format\n\t\tconverted = this.removeCodeSampleWrappers(converted)\n\n\t\t// Then convert remaining details/summary blocks to Linear format\n\t\tconverted = this.convertDetailsToLinear(converted)\n\n\t\t// Log output if logging is enabled\n\t\tthis.logConversion('OUTPUT', converted)\n\n\t\treturn converted\n\t}\n\n\t/**\n\t * Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set\n\t */\n\tprivate static logConversion(label: string, content: string): void {\n\t\tconst logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE\n\t\tif (!logFilePath) {\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst timestampedPath = this.getTimestampedLogPath(logFilePath)\n\t\t\tconst timestamp = new Date().toISOString()\n\t\t\tconst separator = '================================'\n\n\t\t\tconst logEntry = `${separator}\\n[${timestamp}] CONVERSION ${label}\\n${separator}\\n${label}:\\n${content}\\n\\n`\n\n\t\t\tappendFileSync(timestampedPath, logEntry, 'utf-8')\n\t\t} catch {\n\t\t\t// Silently fail - don't crash if logging fails\n\t\t\t// This is a debug feature and shouldn't break the conversion\n\t\t}\n\t}\n\n\t/**\n\t * Generate timestamped log file path\n\t * Example: debug.log -> debug-20231202-161234.log\n\t */\n\tprivate static getTimestampedLogPath(logFilePath: string): string {\n\t\tconst dir = dirname(logFilePath)\n\t\tconst ext = extname(logFilePath)\n\t\tconst base = basename(logFilePath, ext)\n\n\t\t// Generate timestamp: YYYYMMDD-HHMMSS\n\t\tconst now = new Date()\n\t\tconst timestamp = [\n\t\t\tnow.getFullYear(),\n\t\t\tString(now.getMonth() + 1).padStart(2, '0'),\n\t\t\tString(now.getDate()).padStart(2, '0'),\n\t\t].join('') + '-' + [\n\t\t\tString(now.getHours()).padStart(2, '0'),\n\t\t\tString(now.getMinutes()).padStart(2, '0'),\n\t\t\tString(now.getSeconds()).padStart(2, '0'),\n\t\t].join('')\n\n\t\treturn join(dir, `${base}-${timestamp}${ext}`)\n\t}\n}\n","/**\n * Linear implementation of Issue Management Provider\n * Uses @linear/sdk for all operations\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tIssueResult,\n\tCommentDetailResult,\n\tCommentResult,\n} from './types.js'\nimport {\n\tfetchLinearIssue,\n\tcreateLinearComment,\n\tgetLinearComment,\n\tupdateLinearComment,\n\tfetchLinearIssueComments,\n} from '../utils/linear.js'\nimport { LinearMarkupConverter } from '../utils/linear-markup-converter.js'\n\n/**\n * Linear-specific implementation of IssueManagementProvider\n */\nexport class LinearIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'linear'\n\n\t/**\n\t * Fetch issue details using Linear SDK\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Fetch issue - Linear uses alphanumeric identifiers like \"ENG-123\"\n\t\tconst raw = await fetchLinearIssue(number)\n\n\t\t// Map Linear state name to open/closed\n\t\tconst state = raw.state && (raw.state.toLowerCase().includes('done') || raw.state.toLowerCase().includes('completed') || raw.state.toLowerCase().includes('canceled'))\n\t\t\t? 'closed'\n\t\t\t: 'open'\n\n\t\t// Build result\n\t\tconst result: IssueResult = {\n\t\t\tid: raw.identifier,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.description ?? '',\n\t\t\tstate,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'linear',\n\t\t\tauthor: null, // Linear SDK doesn't return author in basic fetch\n\n\t\t\t// Linear-specific fields\n\t\t\tlinearState: raw.state,\n\t\t\tcreatedAt: raw.createdAt,\n\t\t\tupdatedAt: raw.updatedAt,\n\t\t}\n\n\t\t// Fetch comments if requested\n\t\tif (includeComments) {\n\t\t\ttry {\n\t\t\t\tconst comments = await this.fetchIssueComments(number)\n\t\t\t\tif (comments) {\n\t\t\t\t\tresult.comments = comments\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// If comments fail, continue without them\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch comments for an issue\n\t */\n\tprivate async fetchIssueComments(identifier: string): Promise<IssueResult['comments']> {\n\t\ttry {\n\t\t\tconst comments = await fetchLinearIssueComments(identifier)\n\n\t\t\treturn comments.map(comment => ({\n\t\t\t\tid: comment.id,\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t} catch {\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\n\t\tconst raw = await getLinearComment(commentId)\n\n\t\treturn {\n\t\t\tid: raw.id,\n\t\t\tbody: raw.body,\n\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\tcreated_at: raw.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body } = input\n\t\t// Note: Linear doesn't distinguish between issue and PR comments\n\t\t// (Linear doesn't have PRs - that's GitHub-specific)\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await createLinearComment(number, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tcreated_at: result.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await updateLinearComment(commentId, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tupdated_at: result.updatedAt,\n\t\t}\n\t}\n}\n","/**\n * Factory for creating issue management providers\n */\n\nimport type { IssueManagementProvider, IssueProvider } from './types.js'\nimport { GitHubIssueManagementProvider } from './GitHubIssueManagementProvider.js'\nimport { LinearIssueManagementProvider } from './LinearIssueManagementProvider.js'\n\n/**\n * Factory class for creating issue management providers\n */\nexport class IssueManagementProviderFactory {\n\t/**\n\t * Create an issue management provider based on the provider type\n\t */\n\tstatic create(provider: IssueProvider): IssueManagementProvider {\n\t\tswitch (provider) {\n\t\t\tcase 'github':\n\t\t\t\treturn new GitHubIssueManagementProvider()\n\t\t\tcase 'linear':\n\t\t\t\treturn new LinearIssueManagementProvider()\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unsupported issue management provider: ${provider}`)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,QAAQ;;;ACJf,SAAS,gBAAgB;AACzB,SAAS,eAAe;AACxB,SAAS,YAAY;AA4Bd,SAAS,qBAAqB,cAA8B;AAElE,SAAO,aAAa,QAAQ,SAAS,GAAG;AACzC;AAMO,SAAS,uBAA+B;AAC9C,SAAO,KAAK,QAAQ,GAAG,WAAW,UAAU;AAC7C;AASO,SAAS,sBAAsB,cAAsB,WAAkC;AAC7F,QAAM,cAAc,qBAAqB;AACzC,QAAM,iBAAiB,qBAAqB,YAAY;AACxD,QAAM,iBAAiB,KAAK,aAAa,gBAAgB,GAAG,SAAS,QAAQ;AAC7E,SAAO;AACR;AAMA,SAAS,sBAAsB,SAAoD;AAClF,MAAI,CAAC,QAAS,QAAO;AAErB,MAAI,OAAO,QAAQ,YAAY,UAAU;AACxC,WAAO,QAAQ;AAAA,EAChB;AAEA,MAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAEnC,WAAO,QAAQ,QACb,OAAO,CAAC,SAAS,KAAK,SAAS,UAAU,KAAK,IAAI,EAClD,IAAI,CAAC,SAAS,KAAK,IAAI,EACvB,KAAK,IAAI;AAAA,EACZ;AAEA,SAAO;AACR;AAYA,eAAsB,wBACrB,gBACA,eAAe,GACK;AACpB,MAAI;AACH,UAAM,UAAU,MAAM,SAAS,gBAAgB,OAAO;AACtD,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC;AAE9D,UAAM,YAAsB,CAAC;AAE7B,eAAW,QAAQ,OAAO;AACzB,UAAI;AACH,cAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,YAAI,MAAM,qBAAqB,QAAQ,MAAM,SAAS;AACrD,gBAAM,iBAAiB,sBAAsB,MAAM,OAAO;AAC1D,cAAI,gBAAgB;AACnB,sBAAU,KAAK,cAAc;AAAA,UAC9B;AAAA,QACD;AAAA,MACD,QAAQ;AAEP,eAAO,MAAM,6CAA6C;AAAA,MAC3D;AAAA,IACD;AAIA,WAAO,UAAU,MAAM,CAAC,YAAY;AAAA,EACrC,SAAS,OAAO;AAEf,QAAI,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS,UAAU;AACzE,aAAO,MAAM,8BAA8B,cAAc;AAAA,IAC1D,OAAO;AACN,aAAO,MAAM,kCAAkC,KAAK;AAAA,IACrD;AACA,WAAO,CAAC;AAAA,EACT;AACD;AAaA,eAAsB,mBACrB,cACA,WACA,eAAe,GACU;AACzB,QAAM,iBAAiB,sBAAsB,cAAc,SAAS;AACpE,MAAI,CAAC,gBAAgB;AACpB,WAAO;AAAA,EACR;AAEA,SAAO,MAAM,2BAA2B,cAAc,EAAE;AAExD,QAAM,YAAY,MAAM,wBAAwB,gBAAgB,YAAY;AAE5E,MAAI,UAAU,WAAW,GAAG;AAC3B,WAAO;AAAA,EACR;AAIA,QAAM,qBAAqB,UACzB,QAAQ,EACR,IAAI,CAAC,SAAS,UAAU;AACxB,UAAM,SACL,UAAU,SAAS,IAChB,uBAAuB,QAAQ,CAAC,OAAO,UAAU,MAAM;AAAA;AAAA,IACvD;AACJ,WAAO,GAAG,MAAM,GAAG,OAAO;AAAA,EAC3B,CAAC,EACA,KAAK,aAAa;AAEpB,SAAO;AACR;;;AChJA,SAAS,gBAAgB,QAAgE;AACxF,MAAI,CAAC,OAAQ,QAAO;AAEpB,SAAO;AAAA,IACN,IAAI,OAAO,KAAK,OAAO,OAAO,EAAE,IAAI,OAAO;AAAA,IAC3C,aAAa,OAAO;AAAA;AAAA,IACpB,OAAO,OAAO;AAAA;AAAA,IACd,GAAI,OAAO,aAAa,EAAE,WAAW,OAAO,UAAU;AAAA,IACtD,GAAI,OAAO,OAAO,EAAE,KAAK,OAAO,IAAI;AAAA,EACrC;AACD;AAMO,SAAS,wBAAwB,KAAqB;AAC5D,QAAM,QAAQ,IAAI,MAAM,sBAAsB;AAC9C,MAAI,EAAC,+BAAQ,KAAI;AAChB,UAAM,IAAI,MAAM,uCAAuC,GAAG,EAAE;AAAA,EAC7D;AACA,SAAO,MAAM,CAAC;AACf;AAKO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMxB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAG3C,UAAM,cAAc,SAAS,QAAQ,EAAE;AACvC,QAAI,MAAM,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,gCAAgC,MAAM,qCAAqC;AAAA,IAC5F;AAGA,UAAM,SAAS,kBACZ,2EACA;AAuBH,UAAM,MAAM,MAAM,iBAAsC;AAAA,MACvD;AAAA,MACA;AAAA,MACA,OAAO,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,IACD,CAAC;AAGD,UAAM,SAAsB;AAAA;AAAA,MAE3B,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA,MACT,UAAU;AAAA;AAAA,MAGV,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI,UAAU,IAAI,OAAK,gBAAgB,CAAC,CAAC,EAAE,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,MACpG;AAAA,MACA,GAAI,IAAI,UAAU;AAAA,QACjB,QAAQ,IAAI;AAAA,MACb;AAAA;AAAA,MAGA,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI;AAAA,MAChB;AAAA,IACD;AAKA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAKtB,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAcA,UAAM,MAAM,MAAM,iBAAwC;AAAA,MACzD;AAAA,MACA,sCAAsC,gBAAgB;AAAA,MACtD;AAAA,MACA;AAAA,IACD,CAAC;AAGD,WAAO;AAAA,MACN,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM,IAAI;AAAA,MACV,QAAQ,gBAAgB,IAAI,IAAI;AAAA,MAChC,YAAY,IAAI;AAAA,MAChB,GAAI,IAAI,cAAc,EAAE,YAAY,IAAI,WAAW;AAAA;AAAA,MAEnD,GAAI,IAAI,YAAY,EAAE,UAAU,IAAI,SAAS;AAAA,MAC7C,GAAI,IAAI,aAAa,EAAE,WAAW,IAAI,UAAU;AAAA,IACjD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,MAAM,KAAK,IAAI;AAG/B,UAAM,YAAY,SAAS,QAAQ,EAAE;AACrC,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY,MAAM,+BAA+B;AAAA,IACxF;AAGA,UAAM,SACL,SAAS,UACN,MAAM,mBAAmB,WAAW,IAAI,IACxC,MAAM,gBAAgB,WAAW,IAAI;AAGzC,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAGA,UAAM,SAAS,MAAM,mBAAmB,kBAAkB,IAAI;AAG9D,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AACD;;;AC5PA,SAAS,sBAAsB;AAC/B,SAAS,QAAAA,OAAM,SAAS,UAAU,eAAe;AAkB1C,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlC,OAAO,uBAAuB,MAAsB;AACnD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAIA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,iBAAiB,aAAa;AACpC,qBAAe;AACf,oBAAc,KAAK,kBAAkB,WAAW;AAAA,IACjD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,kBAAkB,MAAsB;AAGtD,UAAM,eAAe;AAErB,WAAO,KAAK,QAAQ,cAAc,CAAC,QAAQ,SAAS,YAAY;AAE/D,YAAM,eAAe,KAAK,UAAU,OAAO;AAI3C,YAAM,eAAe,KAAK,aAAa,OAAO;AAI9C,UAAI,cAAc;AACjB,eAAO,OAAO,YAAY;AAAA;AAAA,EAAO,YAAY;AAAA;AAAA;AAAA,MAC9C,OAAO;AAEN,eAAO,OAAO,YAAY;AAAA;AAAA;AAAA,MAC3B;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,UAAU,MAAsB;AAC9C,WAAO,KACL,KAAK,EACL,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,UAAU,GAAG,EACrB,QAAQ,WAAW,GAAG,EACtB,QAAQ,UAAU,GAAG;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAe,aAAa,SAAyB;AACpD,QAAI,CAAC,SAAS;AACb,aAAO;AAAA,IACR;AAGA,QAAI,UAAU,QAAQ,KAAK;AAG3B,cAAU,QAAQ,QAAQ,WAAW,MAAM;AAE3C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAiB,MAAuB;AAC9C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAEA,UAAM,eAAe;AACrB,WAAO,aAAa,KAAK,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,yBAAyB,MAAsB;AACrD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAMA,UAAM,kBAAkB;AAExB,WAAO,KAAK,QAAQ,iBAAiB,CAAC,QAAQ,UAAU,YAAY;AAGnE,aAAO,QAAQ,KAAK;AAAA,IACrB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,gBAAgB,MAAsB;AAC5C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAGA,SAAK,cAAc,SAAS,IAAI;AAGhC,QAAI,YAAY;AAIhB,gBAAY,KAAK,yBAAyB,SAAS;AAGnD,gBAAY,KAAK,uBAAuB,SAAS;AAGjD,SAAK,cAAc,UAAU,SAAS;AAEtC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,OAAe,SAAuB;AAClE,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI,CAAC,aAAa;AACjB;AAAA,IACD;AAEA,QAAI;AACH,YAAM,kBAAkB,KAAK,sBAAsB,WAAW;AAC9D,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY;AAElB,YAAM,WAAW,GAAG,SAAS;AAAA,GAAM,SAAS,gBAAgB,KAAK;AAAA,EAAK,SAAS;AAAA,EAAK,KAAK;AAAA,EAAM,OAAO;AAAA;AAAA;AAEtG,qBAAe,iBAAiB,UAAU,OAAO;AAAA,IAClD,QAAQ;AAAA,IAGR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,sBAAsB,aAA6B;AACjE,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,OAAO,SAAS,aAAa,GAAG;AAGtC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY;AAAA,MACjB,IAAI,YAAY;AAAA,MAChB,OAAO,IAAI,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MAC1C,OAAO,IAAI,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACtC,EAAE,KAAK,EAAE,IAAI,MAAM;AAAA,MAClB,OAAO,IAAI,SAAS,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACtC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACxC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACzC,EAAE,KAAK,EAAE;AAET,WAAOA,MAAK,KAAK,GAAG,IAAI,IAAI,SAAS,GAAG,GAAG,EAAE;AAAA,EAC9C;AACD;;;ACjMO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA,EAKxB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAG3C,UAAM,MAAM,MAAM,iBAAiB,MAAM;AAGzC,UAAM,QAAQ,IAAI,UAAU,IAAI,MAAM,YAAY,EAAE,SAAS,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,UAAU,KACjK,WACA;AAGH,UAAM,SAAsB;AAAA,MAC3B,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI,eAAe;AAAA,MACzB;AAAA,MACA,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA;AAAA,MAGR,aAAa,IAAI;AAAA,MACjB,WAAW,IAAI;AAAA,MACf,WAAW,IAAI;AAAA,IAChB;AAGA,QAAI,iBAAiB;AACpB,UAAI;AACH,cAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,YAAI,UAAU;AACb,iBAAO,WAAW;AAAA,QACnB;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,YAAsD;AACtF,QAAI;AACH,YAAM,WAAW,MAAM,yBAAyB,UAAU;AAE1D,aAAO,SAAS,IAAI,cAAY;AAAA,QAC/B,IAAI,QAAQ;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA;AAAA,QACR,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH,QAAQ;AACP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAEtB,UAAM,MAAM,MAAM,iBAAiB,SAAS;AAE5C,WAAO;AAAA,MACN,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,QAAQ;AAAA;AAAA,MACR,YAAY,IAAI;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,KAAK,IAAI;AAKzB,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,QAAQ,aAAa;AAE9D,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAG5B,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,WAAW,aAAa;AAEjE,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AACD;;;ACxIO,IAAM,iCAAN,MAAqC;AAAA;AAAA;AAAA;AAAA,EAI3C,OAAO,OAAO,UAAkD;AAC/D,YAAQ,UAAU;AAAA,MACjB,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C;AACC,cAAM,IAAI,MAAM,0CAA0C,QAAQ,EAAE;AAAA,IACtE;AAAA,EACD;AACD;;;ALAA,IAAM,aAAa,KAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,YAAY,QAAQ;AAW1E,SAAS,YAAY,UAA0B;AAC9C,MAAI,OAAO,SAAS,QAAQ,WAAW,EAAE;AACzC,SAAO,KAAK,QAAQ,UAAU,KAAK;AACnC,SAAO,KAAK,QAAQ,mBAAmB,GAAG;AAC1C,SAAO,GAAG,IAAI;AACf;AAMA,eAAe,cAAc,cAA8C;AAC1E,MAAI;AACH,UAAM,WAAW,KAAK,KAAK,YAAY,YAAY,YAAY,CAAC;AAChE,QAAI,MAAM,GAAG,WAAW,QAAQ,GAAG;AAClC,YAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,YAAM,QAAQ,KAAK,MAAM,OAAO;AAGhC,YAAM,UAAU,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtD,YAAM,gBAAgB,MAAM,eAAe,QAAQ,MAAM,eAAe;AACxE,YAAM,aAAa,MAAM,QAAQ,MAAM,OAAO,KAAK,MAAM,QAAQ,SAAS;AAC1E,YAAM,eAAe,MAAM,QAAQ,MAAM,SAAS,KAAK,MAAM,UAAU,SAAS;AAChF,YAAM,aAAa,WAAW,iBAAiB,cAAc;AAE7D,UAAI,YAAY;AAGf,cAAM,cAA2B;AAAA,UAChC;AAAA,UACA,MAAM,MAAM,QAAQ;AAAA,UACpB,YAAY,MAAM,cAAc;AAAA,UAChC,SAAS,MAAM,WAAW,CAAC;AAAA,UAC3B,WAAW,MAAM,aAAa,CAAC;AAAA,QAChC;AACA,eAAO,oBAAoB,WAAW;AAAA,MACvC;AAAA,IACD;AACA,WAAO;AAAA,EACR,QAAQ;AAEP,WAAO;AAAA,EACR;AACD;AAyBO,IAAM,wBAAN,MAA4B;AAAA,EAKlC,YACC,iBACA,iBACA,iBACC;AACD,SAAK,kBAAkB,mBAAmB,IAAI,sBAAsB;AACpE,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,uBAAuB,OAA2C;AACvE,QAAI;AAEH,UAAI,MAAM,aAAa,UAAU;AAChC,eAAO,MAAM,+DAA+D;AAC5E;AAAA,MACD;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,YAAM,aAAY,qCAAU,cAAa,+BAA+B,MAAM,YAAY;AAG1F,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,UAAI,CAAC,KAAK,sBAAsB,MAAM,UAAU,QAAQ,GAAG;AAC1D,eAAO,MAAM,6DAA6D,MAAM,QAAQ,WAAW;AACnG;AAAA,MACD;AAEA,aAAO,KAAK,+BAA+B;AAG3C,aAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,YAAM,mBAAmB,MAAM,mBAAmB,MAAM,cAAc,SAAS;AAC/E,UAAI,kBAAkB;AACrB,eAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,MAC1E,OAAO;AACN,eAAO,MAAM,kDAAkD;AAAA,MAChE;AAGA,YAAM,YAAY,MAAM,cAAc,MAAM,YAAY;AACxD,UAAI,WAAW;AACd,eAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,MAC5D,OAAO;AACN,eAAO,MAAM,qBAAqB;AAAA,MACnC;AAGA,YAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,QACtE,cAAc,OAAO,MAAM,WAAW;AAAA,QACtC,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,QACjB,mBAAmB,oBAAoB;AAAA,QACvC,YAAY,aAAa;AAAA,MAC1B,CAAC;AAED,aAAO,MAAM,8BAA8B,MAAM;AAIjD,YAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,YAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,QAChD,UAAU;AAAA,QACV,OAAO;AAAA,QACP;AAAA;AAAA,MACD,CAAC;AAED,UAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,eAAO,KAAK,kDAAkD;AAC9D;AAAA,MACD;AAEA,YAAM,UAAU,cAAc,KAAK;AAGnC,UAAI,QAAQ,SAAS,KAAK;AACzB,eAAO,KAAK,0CAA0C;AACtD;AAAA,MACD;AAGA,YAAM,KAAK,mBAAmB,MAAM,aAAa,SAAS,UAAU,MAAM,cAAc,MAAM,QAAQ;AAEtG,YAAM,oBAAoB,MAAM,WAAW,OAAO,MAAM,QAAQ,KAAK;AACrE,aAAO,QAAQ,6BAA6B,iBAAiB,EAAE;AAAA,IAChE,SAAS,OAAO;AAEf,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,aAAO,KAAK,uCAAuC,YAAY,EAAE;AACjE,aAAO,MAAM,6CAA6C,EAAE,MAAM,CAAC;AAAA,IACpE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,gBACL,cACA,YACA,UACA,aACgC;AAEhC,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,aAAY,qCAAU,cAAa,+BAA+B,YAAY;AAGpF,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AAErE,WAAO,KAAK,+BAA+B;AAG3C,WAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,UAAM,mBAAmB,MAAM,mBAAmB,cAAc,SAAS;AACzE,QAAI,kBAAkB;AACrB,aAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,IAC1E,OAAO;AACN,aAAO,MAAM,kDAAkD;AAAA,IAChE;AAGA,UAAM,YAAY,MAAM,cAAc,YAAY;AAClD,QAAI,WAAW;AACd,aAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,IAC5D,OAAO;AACN,aAAO,MAAM,qBAAqB;AAAA,IACnC;AAGA,UAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,MACtE,cAAc,gBAAgB,SAAY,OAAO,WAAW,IAAI;AAAA,MAChE,aAAa;AAAA,MACb,WAAW;AAAA,MACX,mBAAmB,oBAAoB;AAAA,MACvC,YAAY,aAAa;AAAA,IAC1B,CAAC;AAED,WAAO,MAAM,8BAA8B,MAAM;AAGjD,UAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,UAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,MAChD,UAAU;AAAA,MACV,OAAO;AAAA,MACP;AAAA,IACD,CAAC;AAED,QAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACnE;AAEA,UAAM,UAAU,cAAc,KAAK;AAGnC,QAAI,QAAQ,SAAS,KAAK;AACzB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IACzE;AAEA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,YACL,aACA,SACA,cACgB;AAChB,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,KAAK,mBAAmB,aAAa,SAAS,UAAU,gBAAgB,QAAQ,IAAI,CAAC;AAC3F,WAAO,QAAQ,iCAAiC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,sBACC,UACA,UACU;AA7TZ;AA+TE,QAAI,aAAa,UAAU;AAC1B,aAAO;AAAA,IACR;AAGA,UAAM,iBACL,aAAa,WACV,cAAS,cAAT,mBAAoB,SACpB,cAAS,cAAT,mBAAoB;AAGxB,YAAO,iDAAgB,oBAAmB;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,iBAAiB,SAAiB,cAAuC;AAC9E,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,WAAO,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,6BACL,SACA,UACA,cACkB;AAClB,UAAM,qBAAqB,SAAS,eAAe;AACnD,WAAO,MAAM,oCAAoC,SAAS,WAAW,EAAE;AACvE,WAAO,MAAM,uCAAuC,kBAAkB,EAAE;AAExE,QAAI,wBAAwB;AAC5B,QAAI,uBAAuB,MAAM;AAChC,8BAAwB;AACxB,aAAO,MAAM,wBAAwB;AAAA,IACtC,WAAW,uBAAuB,gBAAgB;AAEjD,8BAAwB,MAAM,mBAAmB,YAAY;AAC7D,aAAO,MAAM,iDAAiD,qBAAqB,EAAE;AAAA,IACtF,OAAO;AACN,aAAO,MAAM,kBAAkB;AAAA,IAChC;AAGA,WAAO,MAAM,4BAA4B,qBAAqB,EAAE;AAChE,QAAI,uBAAuB;AAC1B,aAAO,MAAM,wCAAwC;AACrD,aAAO,GAAG,OAAO;AAAA;AAAA;AAAA;AAAA,IAClB;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,mBACb,aACA,SACA,UACA,cACA,UACgB;AA/YlB;AAiZE,UAAM,iBAAgB,cAAS,oBAAT,mBAA0B,aAAY;AAC5D,UAAM,WAAW,+BAA+B,OAAO,YAAY;AAGnE,UAAM,eAAe,MAAM,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAG5F,UAAM,eAAe,YAAY;AACjC,UAAM,aAAa,aAAa,SAAY,OAAO;AAGnD,UAAM,SAAS,cAAc;AAAA,MAC5B,QAAQ,OAAO,YAAY;AAAA,MAC3B,MAAM;AAAA,MACN,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AACD;","names":["join"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/PRManager.ts"],"sourcesContent":["import { executeGhCommand } from '../utils/github.js'\nimport { launchClaude, detectClaudeCli } from '../utils/claude.js'\nimport { getEffectivePRTargetRemote, getConfiguredRepoFromSettings, parseGitRemotes } from '../utils/remote.js'\nimport { openBrowser } from '../utils/browser.js'\nimport { getLogger } from '../utils/logger-context.js'\nimport type { IloomSettings } from './SettingsManager.js'\n\ninterface ExistingPR {\n\tnumber: number\n\turl: string\n}\n\ninterface PRCreationResult {\n\turl: string\n\tnumber: number\n\twasExisting: boolean\n}\n\nexport class PRManager {\n\tconstructor(private settings: IloomSettings) {\n\t\t// Uses getLogger() for all logging operations\n\t}\n\n\t/**\n\t * Check if a PR already exists for the given branch\n\t * @param branchName - Branch to check\n\t * @param cwd - Working directory\n\t * @returns Existing PR info or null if none found\n\t */\n\tasync checkForExistingPR(branchName: string, cwd?: string): Promise<ExistingPR | null> {\n\t\ttry {\n\t\t\tconst prList = await executeGhCommand<Array<{ number: number; url: string }>>(\n\t\t\t\t['pr', 'list', '--head', branchName, '--state', 'open', '--json', 'number,url'],\n\t\t\t\tcwd ? { cwd } : undefined\n\t\t\t)\n\n\t\t\tif (prList.length > 0) {\n\t\t\t\treturn prList[0] ?? null // Return first match\n\t\t\t}\n\n\t\t\treturn null\n\t\t} catch (error) {\n\t\tgetLogger().debug('Error checking for existing PR', { error })\n\t\t\treturn null\n\t\t}\n\t}\n\n\t/**\n\t * Generate PR body using Claude if available, otherwise use simple template\n\t * @param issueNumber - Issue number to include in body\n\t * @param worktreePath - Path to worktree for context\n\t * @returns PR body markdown\n\t */\n\tasync generatePRBody(issueNumber: string | number | undefined, worktreePath: string): Promise<string> {\n\t\t// Try Claude first for rich body generation\n\t\tconst hasClaudeCli = await detectClaudeCli()\n\n\t\tif (hasClaudeCli) {\n\t\t\ttry {\n\t\t\t\tconst prompt = this.buildPRBodyPrompt(issueNumber)\n\n\t\t\t\tconst body = await launchClaude(prompt, {\n\t\t\t\t\theadless: true,\n\t\t\t\t\taddDir: worktreePath,\n\t\t\t\t\ttimeout: 30000,\n\t\t\t\t})\n\n\t\t\t\tif (body && typeof body === 'string' && body.trim()) {\n\t\t\t\t\tconst sanitized = this.sanitizeClaudeOutput(body)\n\t\t\t\t\tif (sanitized) {\n\t\t\t\t\t\treturn sanitized\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch (error) {\n\t\t\tgetLogger().debug('Claude PR body generation failed, using template', { error })\n\t\t\t}\n\t\t}\n\n\t\t// Fallback to simple template\n\t\tlet body = 'This PR contains changes from the iloom workflow.\\n\\n'\n\n\t\tif (issueNumber) {\n\t\t\tbody += `Fixes #${issueNumber}`\n\t\t}\n\n\t\treturn body\n\t}\n\n\t/**\n\t * Build structured XML prompt for PR body generation\n\t * Uses XML format for clear task definition and output expectations\n\t */\n\tprivate buildPRBodyPrompt(issueNumber?: string | number): string {\n\t\tconst issueContext = issueNumber\n\t\t\t? `\\n<IssueContext>\nThis PR is associated with GitHub issue #${issueNumber}.\nInclude \"Fixes #${issueNumber}\" at the end of the body on its own line.\n</IssueContext>`\n\t\t\t: ''\n\n\t\treturn `<Task>\nYou are a software engineer writing a pull request body for this repository.\nExamine the changes in the git repository and generate a concise, professional PR description.\n</Task>\n\n<Requirements>\n<Format>Write 2-3 sentences summarizing what was changed and why.${issueNumber ? `\\n\\nEnd with \"Fixes #${issueNumber}\" on its own line.` : ''}</Format>\n<Tone>Professional and concise</Tone>\n<Focus>Summarize the changes and their purpose</Focus>\n<NoMeta>CRITICAL: Do NOT include ANY explanatory text, analysis, or meta-commentary. Output ONLY the raw PR body text.</NoMeta>\n<Examples>\nGood: \"Add user authentication with JWT tokens to secure the API endpoints. This includes login and registration endpoints with proper password hashing.\n\nFixes #42\"\nGood: \"Fix navigation bug in sidebar menu that caused incorrect highlighting on nested routes.\"\nBad: \"Here's the PR body:\\n\\n---\\n\\nAdd user authentication...\"\nBad: \"Based on the changes, I'll write: Fix navigation bug...\"\n</Examples>\n${issueContext}\n</Requirements>\n\n<Output>\nIMPORTANT: Your entire response will be used directly as the GitHub pull request body.\nDo not include any explanatory text, headers, or separators before or after the body.\nStart your response immediately with the PR body text.\n</Output>`\n\t}\n\n\t/**\n\t * Sanitize Claude output to remove meta-commentary and clean formatting\n\t * Handles cases where Claude includes explanatory text despite instructions\n\t */\n\tprivate sanitizeClaudeOutput(rawOutput: string): string {\n\t\tlet cleaned = rawOutput.trim()\n\n\t\t// Remove common meta-commentary patterns (case-insensitive)\n\t\tconst metaPatterns = [\n\t\t\t/^.*?based on.*?changes.*?:/i,\n\t\t\t/^.*?looking at.*?files.*?:/i,\n\t\t\t/^.*?examining.*?:/i,\n\t\t\t/^.*?analyzing.*?:/i,\n\t\t\t/^.*?i'll.*?generate.*?:/i,\n\t\t\t/^.*?let me.*?:/i,\n\t\t\t/^.*?here.*?is.*?(?:the\\s+)?(?:pr|pull request).*?body.*?:/i,\n\t\t\t/^.*?here's.*?(?:the\\s+)?(?:pr|pull request).*?body.*?:/i,\n\t\t]\n\n\t\tfor (const pattern of metaPatterns) {\n\t\t\tcleaned = cleaned.replace(pattern, '').trim()\n\t\t}\n\n\t\t// Remove leading separator lines (---, ===, etc.)\n\t\tcleaned = cleaned.replace(/^[-=]{3,}\\s*/m, '').trim()\n\n\t\t// Extract content after separators only if it looks like meta-commentary\n\t\tif (cleaned.includes(':')) {\n\t\t\tconst colonIndex = cleaned.indexOf(':')\n\t\t\tconst beforeColon = cleaned.substring(0, colonIndex).trim().toLowerCase()\n\n\t\t\t// Only split if the text before colon looks like meta-commentary\n\t\t\tconst metaIndicators = [\n\t\t\t\t'here is the pr body',\n\t\t\t\t'here is the pull request body',\n\t\t\t\t'pr body',\n\t\t\t\t'pull request body',\n\t\t\t\t'here is',\n\t\t\t\t\"here's\",\n\t\t\t\t'the body should be',\n\t\t\t\t'i suggest',\n\t\t\t\t'my suggestion'\n\t\t\t]\n\n\t\t\tconst isMetaCommentary = metaIndicators.some(indicator => beforeColon.includes(indicator))\n\n\t\t\tif (isMetaCommentary) {\n\t\t\t\tconst afterColon = cleaned.substring(colonIndex + 1).trim()\n\t\t\t\t// Remove leading separator after colon\n\t\t\t\tconst afterSeparator = afterColon.replace(/^[-=]{3,}\\s*/m, '').trim()\n\t\t\t\tif (afterSeparator && afterSeparator.length > 10) {\n\t\t\t\t\tcleaned = afterSeparator\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// Remove quotes if the entire message is wrapped in them\n\t\tif ((cleaned.startsWith('\"') && cleaned.endsWith('\"')) ||\n\t\t\t(cleaned.startsWith(\"'\") && cleaned.endsWith(\"'\"))) {\n\t\t\tcleaned = cleaned.slice(1, -1).trim()\n\t\t}\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Create a GitHub PR for the branch\n\t * @param branchName - Branch to create PR from (used as --head)\n\t * @param title - PR title\n\t * @param body - PR body\n\t * @param baseBranch - Base branch to target (usually main/master)\n\t * @param cwd - Working directory\n\t * @returns PR URL\n\t */\n\tasync createPR(\n\t\tbranchName: string,\n\t\ttitle: string,\n\t\tbody: string,\n\t\tbaseBranch: string,\n\t\tcwd?: string\n\t): Promise<string> {\n\t\ttry {\n\t\t\t// Get the target remote for the PR\n\t\t\tconst targetRemote = await getEffectivePRTargetRemote(this.settings, cwd)\n\n\t\t\t// Determine the correct --head value\n\t\t\t// For fork workflows (target != origin), we need \"username:branch\" format\n\t\t\t// See: https://github.com/cli/cli/issues/2691\n\t\t\tlet headValue = branchName\n\n\t\t\tif (targetRemote !== 'origin') {\n\t\t\t\t// Fork workflow: need to specify the head as \"owner:branch\"\n\t\t\t\t// Get the owner of the origin remote (where we pushed the branch)\n\t\t\t\tconst remotes = await parseGitRemotes(cwd)\n\t\t\t\tconst originRemote = remotes.find(r => r.name === 'origin')\n\n\t\t\t\tif (originRemote) {\n\t\t\t\t\theadValue = `${originRemote.owner}:${branchName}`\n\t\t\t\tgetLogger().debug(`Fork workflow detected, using head: ${headValue}`)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Build gh pr create command\n\t\t\t// Note: gh pr create returns a plain URL string, not JSON\n\t\t\tconst args = ['pr', 'create', '--head', headValue, '--title', title, '--body', body, '--base', baseBranch]\n\n\t\t\t// If target remote is not 'origin', we need to specify the repo\n\t\t\tif (targetRemote !== 'origin') {\n\t\t\t\tconst repo = await getConfiguredRepoFromSettings(this.settings, cwd)\n\t\t\t\targs.push('--repo', repo)\n\t\t\t}\n\n\t\t\t// gh pr create returns the PR URL as plain text (not JSON)\n\t\t\tconst result = await executeGhCommand<string>(args, cwd ? { cwd } : undefined)\n\n\t\t\t// Result is a string URL like \"https://github.com/owner/repo/pull/123\"\n\t\t\tconst url = typeof result === 'string' ? result.trim() : String(result).trim()\n\n\t\t\tif (!url.includes('github.com') || !url.includes('/pull/')) {\n\t\t\t\tthrow new Error(`Unexpected response from gh pr create: ${url}`)\n\t\t\t}\n\n\t\t\treturn url\n\t\t} catch (error) {\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error)\n\n\t\t\t// Provide helpful error message for common GraphQL errors\n\t\t\tif (errorMessage.includes(\"Head sha can't be blank\") || errorMessage.includes(\"No commits between\")) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Failed to create pull request: ${errorMessage}\\n\\n` +\n\t\t\t\t\t`This error typically occurs when:\\n` +\n\t\t\t\t\t` - The branch was not fully pushed to the remote\\n` +\n\t\t\t\t\t` - There's a race condition between push and PR creation\\n` +\n\t\t\t\t\t` - The branch has no commits ahead of the base branch\\n\\n` +\n\t\t\t\t\t`Try running: git push -u origin ${branchName}\\n` +\n\t\t\t\t\t`Then retry: il finish`\n\t\t\t\t)\n\t\t\t}\n\n\t\t\tthrow new Error(`Failed to create pull request: ${errorMessage}`)\n\t\t}\n\t}\n\n\t/**\n\t * Open PR URL in browser\n\t * @param url - PR URL to open\n\t */\n\tasync openPRInBrowser(url: string): Promise<void> {\n\t\ttry {\n\t\t\tawait openBrowser(url)\n\t\tgetLogger().debug('Opened PR in browser', { url })\n\t\t} catch (error) {\n\t\t\t// Don't fail the whole operation if browser opening fails\n\t\tgetLogger().warn('Failed to open PR in browser', { error })\n\t\t}\n\t}\n\n\t/**\n\t * Complete PR workflow: check for existing, create if needed, optionally open in browser\n\t * @param branchName - Branch to create PR from\n\t * @param title - PR title\n\t * @param issueNumber - Optional issue number for body generation\n\t * @param baseBranch - Base branch to target\n\t * @param worktreePath - Path to worktree\n\t * @param openInBrowser - Whether to open PR in browser\n\t * @returns PR creation result\n\t */\n\tasync createOrOpenPR(\n\t\tbranchName: string,\n\t\ttitle: string,\n\t\tissueNumber: string | number | undefined,\n\t\tbaseBranch: string,\n\t\tworktreePath: string,\n\t\topenInBrowser: boolean\n\t): Promise<PRCreationResult> {\n\t\t// Check for existing PR\n\t\tconst existingPR = await this.checkForExistingPR(branchName, worktreePath)\n\n\t\tif (existingPR) {\n\t\tgetLogger().info(`Pull request already exists: ${existingPR.url}`)\n\n\t\t\tif (openInBrowser) {\n\t\t\t\tawait this.openPRInBrowser(existingPR.url)\n\t\t\t}\n\n\t\t\treturn {\n\t\t\t\turl: existingPR.url,\n\t\t\t\tnumber: existingPR.number,\n\t\t\t\twasExisting: true,\n\t\t\t}\n\t\t}\n\n\t\t// Generate PR body\n\t\tconst body = await this.generatePRBody(issueNumber, worktreePath)\n\n\t\t// Create new PR\n\tgetLogger().info('Creating pull request...')\n\t\tconst url = await this.createPR(branchName, title, body, baseBranch, worktreePath)\n\n\t\t// Extract PR number from URL\n\t\tconst prNumber = this.extractPRNumberFromUrl(url)\n\n\t\tif (openInBrowser) {\n\t\t\tawait this.openPRInBrowser(url)\n\t\t}\n\n\t\treturn {\n\t\t\turl,\n\t\t\tnumber: prNumber,\n\t\t\twasExisting: false,\n\t\t}\n\t}\n\n\t/**\n\t * Extract PR number from GitHub PR URL\n\t * @param url - PR URL (e.g., https://github.com/owner/repo/pull/123)\n\t * @returns PR number\n\t */\n\tprivate extractPRNumberFromUrl(url: string): number {\n\t\tconst match = url.match(/\\/pull\\/(\\d+)/)\n\t\tif (match?.[1]) {\n\t\t\treturn parseInt(match[1], 10)\n\t\t}\n\t\tthrow new Error(`Could not extract PR number from URL: ${url}`)\n\t}\n\n\t/**\n\t * Create a draft PR for the branch\n\t * Used by github-draft-pr mode during il start\n\t * @param branchName - Branch to create PR from (used as --head)\n\t * @param title - PR title\n\t * @param body - PR body\n\t * @param cwd - Working directory\n\t * @returns PR URL and number\n\t */\n\tasync createDraftPR(\n\t\tbranchName: string,\n\t\ttitle: string,\n\t\tbody: string,\n\t\tcwd?: string\n\t): Promise<{ url: string; number: number }> {\n\t\ttry {\n\t\t\t// Get the target remote for the PR\n\t\t\tconst targetRemote = await getEffectivePRTargetRemote(this.settings, cwd)\n\n\t\t\t// Determine the correct --head value\n\t\t\t// For fork workflows (target != origin), we need \"username:branch\" format\n\t\t\tlet headValue = branchName\n\n\t\t\tif (targetRemote !== 'origin') {\n\t\t\t\t// Fork workflow: need to specify the head as \"owner:branch\"\n\t\t\t\tconst remotes = await parseGitRemotes(cwd)\n\t\t\t\tconst originRemote = remotes.find(r => r.name === 'origin')\n\n\t\t\t\tif (originRemote) {\n\t\t\t\t\theadValue = `${originRemote.owner}:${branchName}`\n\t\t\t\t\tgetLogger().debug(`Fork workflow detected, using head: ${headValue}`)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Build gh pr create command with --draft flag\n\t\t\t// Omit --base to let GitHub use the repository's default branch (main, master, etc.)\n\t\t\tconst args = ['pr', 'create', '--head', headValue, '--title', title, '--body', body, '--draft']\n\n\t\t\t// If target remote is not 'origin', we need to specify the repo\n\t\t\tif (targetRemote !== 'origin') {\n\t\t\t\tconst repo = await getConfiguredRepoFromSettings(this.settings, cwd)\n\t\t\t\targs.push('--repo', repo)\n\t\t\t}\n\n\t\t\t// gh pr create returns the PR URL as plain text (not JSON)\n\t\t\tconst result = await executeGhCommand<string>(args, cwd ? { cwd } : undefined)\n\n\t\t\t// Result is a string URL like \"https://github.com/owner/repo/pull/123\"\n\t\t\tconst url = typeof result === 'string' ? result.trim() : String(result).trim()\n\n\t\t\tif (!url.includes('github.com') || !url.includes('/pull/')) {\n\t\t\t\tthrow new Error(`Unexpected response from gh pr create --draft: ${url}`)\n\t\t\t}\n\n\t\t\tconst number = this.extractPRNumberFromUrl(url)\n\n\t\t\treturn { url, number }\n\t\t} catch (error) {\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error)\n\n\t\t\t// Provide helpful error message for common errors\n\t\t\tif (errorMessage.includes(\"Head sha can't be blank\") || errorMessage.includes(\"No commits between\")) {\n\t\t\t\tthrow new Error(\n\t\t\t\t\t`Failed to create draft pull request: ${errorMessage}\\n\\n` +\n\t\t\t\t\t`This error typically occurs when:\\n` +\n\t\t\t\t\t` - The branch was not fully pushed to the remote\\n` +\n\t\t\t\t\t` - The branch has no commits ahead of the base branch\\n\\n` +\n\t\t\t\t\t`Try running: git push -u origin ${branchName}\\n` +\n\t\t\t\t\t`Then retry: il start`\n\t\t\t\t)\n\t\t\t}\n\n\t\t\tthrow new Error(`Failed to create draft pull request: ${errorMessage}`)\n\t\t}\n\t}\n\n\t/**\n\t * Mark a draft PR as ready for review\n\t * Used by github-draft-pr mode during il finish\n\t * @param prNumber - PR number to mark ready\n\t * @param cwd - Working directory\n\t */\n\tasync markPRReady(prNumber: number, cwd?: string): Promise<void> {\n\t\tconst args = ['pr', 'ready', String(prNumber)]\n\t\tawait executeGhCommand(args, cwd ? { cwd } : undefined)\n\t\tgetLogger().info(`Marked PR #${prNumber} as ready for review`)\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAkBO,IAAM,YAAN,MAAgB;AAAA,EACtB,YAAoB,UAAyB;AAAzB;AAAA,EAEpB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,mBAAmB,YAAoB,KAA0C;AACtF,QAAI;AACH,YAAM,SAAS,MAAM;AAAA,QACpB,CAAC,MAAM,QAAQ,UAAU,YAAY,WAAW,QAAQ,UAAU,YAAY;AAAA,QAC9E,MAAM,EAAE,IAAI,IAAI;AAAA,MACjB;AAEA,UAAI,OAAO,SAAS,GAAG;AACtB,eAAO,OAAO,CAAC,KAAK;AAAA,MACrB;AAEA,aAAO;AAAA,IACR,SAAS,OAAO;AAChB,gBAAU,EAAE,MAAM,kCAAkC,EAAE,MAAM,CAAC;AAC5D,aAAO;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,eAAe,aAA0C,cAAuC;AAErG,UAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAI,cAAc;AACjB,UAAI;AACH,cAAM,SAAS,KAAK,kBAAkB,WAAW;AAEjD,cAAMA,QAAO,MAAM,aAAa,QAAQ;AAAA,UACvC,UAAU;AAAA,UACV,QAAQ;AAAA,UACR,SAAS;AAAA,QACV,CAAC;AAED,YAAIA,SAAQ,OAAOA,UAAS,YAAYA,MAAK,KAAK,GAAG;AACpD,gBAAM,YAAY,KAAK,qBAAqBA,KAAI;AAChD,cAAI,WAAW;AACd,mBAAO;AAAA,UACR;AAAA,QACD;AAAA,MACD,SAAS,OAAO;AAChB,kBAAU,EAAE,MAAM,oDAAoD,EAAE,MAAM,CAAC;AAAA,MAC/E;AAAA,IACD;AAGA,QAAI,OAAO;AAEX,QAAI,aAAa;AAChB,cAAQ,UAAU,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,kBAAkB,aAAuC;AAChE,UAAM,eAAe,cAClB;AAAA;AAAA,2CACsC,WAAW;AAAA,kBACpC,WAAW;AAAA,mBAExB;AAEH,WAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mEAM0D,cAAc;AAAA;AAAA,mBAAwB,WAAW,uBAAuB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAY3I,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQb;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,qBAAqB,WAA2B;AACvD,QAAI,UAAU,UAAU,KAAK;AAG7B,UAAM,eAAe;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,eAAW,WAAW,cAAc;AACnC,gBAAU,QAAQ,QAAQ,SAAS,EAAE,EAAE,KAAK;AAAA,IAC7C;AAGA,cAAU,QAAQ,QAAQ,iBAAiB,EAAE,EAAE,KAAK;AAGpD,QAAI,QAAQ,SAAS,GAAG,GAAG;AAC1B,YAAM,aAAa,QAAQ,QAAQ,GAAG;AACtC,YAAM,cAAc,QAAQ,UAAU,GAAG,UAAU,EAAE,KAAK,EAAE,YAAY;AAGxE,YAAM,iBAAiB;AAAA,QACtB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACD;AAEA,YAAM,mBAAmB,eAAe,KAAK,eAAa,YAAY,SAAS,SAAS,CAAC;AAEzF,UAAI,kBAAkB;AACrB,cAAM,aAAa,QAAQ,UAAU,aAAa,CAAC,EAAE,KAAK;AAE1D,cAAM,iBAAiB,WAAW,QAAQ,iBAAiB,EAAE,EAAE,KAAK;AACpE,YAAI,kBAAkB,eAAe,SAAS,IAAI;AACjD,oBAAU;AAAA,QACX;AAAA,MACD;AAAA,IACD;AAGA,QAAK,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,KAClD,QAAQ,WAAW,GAAG,KAAK,QAAQ,SAAS,GAAG,GAAI;AACpD,gBAAU,QAAQ,MAAM,GAAG,EAAE,EAAE,KAAK;AAAA,IACrC;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,SACL,YACA,OACA,MACA,YACA,KACkB;AAClB,QAAI;AAEH,YAAM,eAAe,MAAM,2BAA2B,KAAK,UAAU,GAAG;AAKxE,UAAI,YAAY;AAEhB,UAAI,iBAAiB,UAAU;AAG9B,cAAM,UAAU,MAAM,gBAAgB,GAAG;AACzC,cAAM,eAAe,QAAQ,KAAK,OAAK,EAAE,SAAS,QAAQ;AAE1D,YAAI,cAAc;AACjB,sBAAY,GAAG,aAAa,KAAK,IAAI,UAAU;AAChD,oBAAU,EAAE,MAAM,uCAAuC,SAAS,EAAE;AAAA,QACpE;AAAA,MACD;AAIA,YAAM,OAAO,CAAC,MAAM,UAAU,UAAU,WAAW,WAAW,OAAO,UAAU,MAAM,UAAU,UAAU;AAGzG,UAAI,iBAAiB,UAAU;AAC9B,cAAM,OAAO,MAAM,8BAA8B,KAAK,UAAU,GAAG;AACnE,aAAK,KAAK,UAAU,IAAI;AAAA,MACzB;AAGA,YAAM,SAAS,MAAM,iBAAyB,MAAM,MAAM,EAAE,IAAI,IAAI,MAAS;AAG7E,YAAM,MAAM,OAAO,WAAW,WAAW,OAAO,KAAK,IAAI,OAAO,MAAM,EAAE,KAAK;AAE7E,UAAI,CAAC,IAAI,SAAS,YAAY,KAAK,CAAC,IAAI,SAAS,QAAQ,GAAG;AAC3D,cAAM,IAAI,MAAM,0CAA0C,GAAG,EAAE;AAAA,MAChE;AAEA,aAAO;AAAA,IACR,SAAS,OAAO;AACf,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAG1E,UAAI,aAAa,SAAS,yBAAyB,KAAK,aAAa,SAAS,oBAAoB,GAAG;AACpG,cAAM,IAAI;AAAA,UACT,kCAAkC,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAKX,UAAU;AAAA;AAAA,QAE9C;AAAA,MACD;AAEA,YAAM,IAAI,MAAM,kCAAkC,YAAY,EAAE;AAAA,IACjE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBAAgB,KAA4B;AACjD,QAAI;AACH,YAAM,YAAY,GAAG;AACtB,gBAAU,EAAE,MAAM,wBAAwB,EAAE,IAAI,CAAC;AAAA,IACjD,SAAS,OAAO;AAEhB,gBAAU,EAAE,KAAK,gCAAgC,EAAE,MAAM,CAAC;AAAA,IAC1D;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYA,MAAM,eACL,YACA,OACA,aACA,YACA,cACA,eAC4B;AAE5B,UAAM,aAAa,MAAM,KAAK,mBAAmB,YAAY,YAAY;AAEzE,QAAI,YAAY;AAChB,gBAAU,EAAE,KAAK,gCAAgC,WAAW,GAAG,EAAE;AAEhE,UAAI,eAAe;AAClB,cAAM,KAAK,gBAAgB,WAAW,GAAG;AAAA,MAC1C;AAEA,aAAO;AAAA,QACN,KAAK,WAAW;AAAA,QAChB,QAAQ,WAAW;AAAA,QACnB,aAAa;AAAA,MACd;AAAA,IACD;AAGA,UAAM,OAAO,MAAM,KAAK,eAAe,aAAa,YAAY;AAGjE,cAAU,EAAE,KAAK,0BAA0B;AAC1C,UAAM,MAAM,MAAM,KAAK,SAAS,YAAY,OAAO,MAAM,YAAY,YAAY;AAGjF,UAAM,WAAW,KAAK,uBAAuB,GAAG;AAEhD,QAAI,eAAe;AAClB,YAAM,KAAK,gBAAgB,GAAG;AAAA,IAC/B;AAEA,WAAO;AAAA,MACN;AAAA,MACA,QAAQ;AAAA,MACR,aAAa;AAAA,IACd;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOQ,uBAAuB,KAAqB;AACnD,UAAM,QAAQ,IAAI,MAAM,eAAe;AACvC,QAAI,+BAAQ,IAAI;AACf,aAAO,SAAS,MAAM,CAAC,GAAG,EAAE;AAAA,IAC7B;AACA,UAAM,IAAI,MAAM,yCAAyC,GAAG,EAAE;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,cACL,YACA,OACA,MACA,KAC2C;AAC3C,QAAI;AAEH,YAAM,eAAe,MAAM,2BAA2B,KAAK,UAAU,GAAG;AAIxE,UAAI,YAAY;AAEhB,UAAI,iBAAiB,UAAU;AAE9B,cAAM,UAAU,MAAM,gBAAgB,GAAG;AACzC,cAAM,eAAe,QAAQ,KAAK,OAAK,EAAE,SAAS,QAAQ;AAE1D,YAAI,cAAc;AACjB,sBAAY,GAAG,aAAa,KAAK,IAAI,UAAU;AAC/C,oBAAU,EAAE,MAAM,uCAAuC,SAAS,EAAE;AAAA,QACrE;AAAA,MACD;AAIA,YAAM,OAAO,CAAC,MAAM,UAAU,UAAU,WAAW,WAAW,OAAO,UAAU,MAAM,SAAS;AAG9F,UAAI,iBAAiB,UAAU;AAC9B,cAAM,OAAO,MAAM,8BAA8B,KAAK,UAAU,GAAG;AACnE,aAAK,KAAK,UAAU,IAAI;AAAA,MACzB;AAGA,YAAM,SAAS,MAAM,iBAAyB,MAAM,MAAM,EAAE,IAAI,IAAI,MAAS;AAG7E,YAAM,MAAM,OAAO,WAAW,WAAW,OAAO,KAAK,IAAI,OAAO,MAAM,EAAE,KAAK;AAE7E,UAAI,CAAC,IAAI,SAAS,YAAY,KAAK,CAAC,IAAI,SAAS,QAAQ,GAAG;AAC3D,cAAM,IAAI,MAAM,kDAAkD,GAAG,EAAE;AAAA,MACxE;AAEA,YAAM,SAAS,KAAK,uBAAuB,GAAG;AAE9C,aAAO,EAAE,KAAK,OAAO;AAAA,IACtB,SAAS,OAAO;AACf,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAG1E,UAAI,aAAa,SAAS,yBAAyB,KAAK,aAAa,SAAS,oBAAoB,GAAG;AACpG,cAAM,IAAI;AAAA,UACT,wCAAwC,YAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAIjB,UAAU;AAAA;AAAA,QAE9C;AAAA,MACD;AAEA,YAAM,IAAI,MAAM,wCAAwC,YAAY,EAAE;AAAA,IACvE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,YAAY,UAAkB,KAA6B;AAChE,UAAM,OAAO,CAAC,MAAM,SAAS,OAAO,QAAQ,CAAC;AAC7C,UAAM,iBAAiB,MAAM,MAAM,EAAE,IAAI,IAAI,MAAS;AACtD,cAAU,EAAE,KAAK,cAAc,QAAQ,sBAAsB;AAAA,EAC9D;AACD;","names":["body"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/utils/port.ts","../src/lib/process/ProcessManager.ts"],"sourcesContent":["import { createHash } from 'crypto'\n\n/**\n * Generate deterministic port offset from branch name using SHA256 hash\n * Range: 1-999 (matches existing random range for branches)\n *\n * @param branchName - Branch name to generate port offset from\n * @returns Port offset in range [1, 999]\n * @throws Error if branchName is empty\n */\nexport function generatePortOffsetFromBranchName(branchName: string): number {\n\t// Validate input\n\tif (!branchName || branchName.trim().length === 0) {\n\t\tthrow new Error('Branch name cannot be empty')\n\t}\n\n\t// Generate SHA256 hash of branch name (same pattern as color.ts)\n\tconst hash = createHash('sha256').update(branchName).digest('hex')\n\n\t// Take first 8 hex characters and convert to port offset (1-999)\n\tconst hashPrefix = hash.slice(0, 8)\n\tconst hashAsInt = parseInt(hashPrefix, 16)\n\tconst portOffset = (hashAsInt % 999) + 1 // +1 ensures range is 1-999, not 0-998\n\n\treturn portOffset\n}\n\n/**\n * Calculate deterministic port for branch-based workspace\n *\n * @param branchName - Branch name\n * @param basePort - Base port (default: 3000)\n * @returns Port number\n * @throws Error if calculated port exceeds 65535 or branchName is empty\n */\nexport function calculatePortForBranch(branchName: string, basePort: number = 3000): number {\n\tconst offset = generatePortOffsetFromBranchName(branchName)\n\tconst port = basePort + offset\n\n\t// Validate port range (same as EnvironmentManager.calculatePort)\n\tif (port > 65535) {\n\t\tthrow new Error(\n\t\t\t`Calculated port ${port} exceeds maximum (65535). Use a lower base port (current: ${basePort}).`\n\t\t)\n\t}\n\n\treturn port\n}\n","import { execa } from 'execa'\nimport { setTimeout } from 'timers/promises'\nimport type { ProcessInfo, Platform } from '../../types/process.js'\nimport { calculatePortForBranch } from '../../utils/port.js'\n\n/**\n * Manages process detection and termination across platforms\n * Ports dev server termination logic from bash/merge-and-clean.sh lines 1092-1148\n */\nexport class ProcessManager {\n\tprivate readonly platform: Platform\n\n\tconstructor() {\n\t\tthis.platform = this.detectPlatform()\n\t}\n\n\t/**\n\t * Detect current platform\n\t */\n\tprivate detectPlatform(): Platform {\n\t\tswitch (process.platform) {\n\t\t\tcase 'darwin':\n\t\t\t\treturn 'darwin'\n\t\t\tcase 'linux':\n\t\t\t\treturn 'linux'\n\t\t\tcase 'win32':\n\t\t\t\treturn 'win32'\n\t\t\tdefault:\n\t\t\t\treturn 'unsupported'\n\t\t}\n\t}\n\n\t/**\n\t * Detect if a dev server is running on the specified port\n\t * Ports logic from merge-and-clean.sh lines 1107-1123\n\t */\n\tasync detectDevServer(port: number): Promise<ProcessInfo | null> {\n\t\tif (this.platform === 'unsupported') {\n\t\t\tthrow new Error('Process detection not supported on this platform')\n\t\t}\n\n\t\t// Use platform-specific detection\n\t\tif (this.platform === 'win32') {\n\t\t\treturn await this.detectOnPortWindows(port)\n\t\t} else {\n\t\t\treturn await this.detectOnPortUnix(port)\n\t\t}\n\t}\n\n\t/**\n\t * Unix/macOS implementation using lsof\n\t * Ports bash lines 1107-1123\n\t */\n\tprivate async detectOnPortUnix(port: number): Promise<ProcessInfo | null> {\n\t\ttry {\n\t\t\t// Run lsof to find process listening on port (LISTEN only)\n\t\t\tconst result = await execa('lsof', ['-i', `:${port}`, '-P'], {\n\t\t\t\treject: false,\n\t\t\t})\n\n\t\t\t// Filter for LISTEN state only\n\t\t\tconst lines = result.stdout.split('\\n').filter(line => line.includes('LISTEN'))\n\n\t\t\tif (lines.length === 0) {\n\t\t\t\treturn null\n\t\t\t}\n\n\t\t\t// Parse first LISTEN line\n\t\t\tconst firstLine = lines[0]\n\t\t\tif (!firstLine) return null\n\n\t\t\tconst parts = firstLine.split(/\\s+/)\n\t\t\tif (parts.length < 2) return null\n\n\t\t\tconst processName = parts[0] ?? ''\n\t\t\tconst pid = parseInt(parts[1] ?? '', 10)\n\n\t\t\tif (isNaN(pid)) {\n\t\t\t\treturn null\n\t\t\t}\n\n\t\t\t// Get full command line using ps\n\t\t\tconst psResult = await execa('ps', ['-p', pid.toString(), '-o', 'command='], {\n\t\t\t\treject: false,\n\t\t\t})\n\t\t\tconst fullCommand = psResult.stdout.trim()\n\n\t\t\t// Validate if this is a dev server\n\t\t\tconst isDevServer = this.isDevServerProcess(processName, fullCommand)\n\n\t\t\treturn {\n\t\t\t\tpid,\n\t\t\t\tname: processName,\n\t\t\t\tcommand: fullCommand,\n\t\t\t\tport,\n\t\t\t\tisDevServer,\n\t\t\t}\n\t\t} catch {\n\t\t\t// If lsof fails, assume no process on port\n\t\t\treturn null\n\t\t}\n\t}\n\n\t/**\n\t * Windows implementation using netstat and tasklist\n\t */\n\tprivate async detectOnPortWindows(port: number): Promise<ProcessInfo | null> {\n\t\ttry {\n\t\t\t// Use netstat to find PID listening on port\n\t\t\tconst result = await execa('netstat', ['-ano'], { reject: false })\n\t\t\tconst lines = result.stdout.split('\\n')\n\n\t\t\t// Find line with our port and LISTENING state\n\t\t\tconst portLine = lines.find(\n\t\t\t\tline => line.includes(`:${port}`) && line.includes('LISTENING')\n\t\t\t)\n\n\t\t\tif (!portLine) {\n\t\t\t\treturn null\n\t\t\t}\n\n\t\t\t// Extract PID (last column)\n\t\t\tconst parts = portLine.trim().split(/\\s+/)\n\t\t\tconst lastPart = parts[parts.length - 1]\n\t\t\tif (!lastPart) return null\n\n\t\t\tconst pid = parseInt(lastPart, 10)\n\n\t\t\tif (isNaN(pid)) {\n\t\t\t\treturn null\n\t\t\t}\n\n\t\t\t// Get process info using tasklist\n\t\t\tconst taskResult = await execa(\n\t\t\t\t'tasklist',\n\t\t\t\t['/FI', `PID eq ${pid}`, '/FO', 'CSV'],\n\t\t\t\t{\n\t\t\t\t\treject: false,\n\t\t\t\t}\n\t\t\t)\n\n\t\t\t// Parse CSV output\n\t\t\tconst lines2 = taskResult.stdout.split('\\n')\n\t\t\tif (lines2.length < 2) {\n\t\t\t\treturn null\n\t\t\t}\n\n\t\t\tconst secondLine = lines2[1]\n\t\t\tif (!secondLine) return null\n\n\t\t\tconst parts2 = secondLine.split(',')\n\t\t\tconst processName = (parts2[0] ?? '').replace(/\"/g, '')\n\n\t\t\t// TODO: Get full command line on Windows (more complex)\n\t\t\tconst fullCommand = processName\n\n\t\t\tconst isDevServer = this.isDevServerProcess(processName, fullCommand)\n\n\t\t\treturn {\n\t\t\t\tpid,\n\t\t\t\tname: processName,\n\t\t\t\tcommand: fullCommand,\n\t\t\t\tport,\n\t\t\t\tisDevServer,\n\t\t\t}\n\t\t} catch {\n\t\t\treturn null\n\t\t}\n\t}\n\n\t/**\n\t * Validate if process is a dev server\n\t * Ports logic from merge-and-clean.sh lines 1121-1123\n\t */\n\tprivate isDevServerProcess(processName: string, command: string): boolean {\n\t\t// Check process name patterns\n\t\tconst devServerNames = /^(node|npm|pnpm|yarn|next|next-server|vite|webpack|dev-server)$/i\n\t\tif (devServerNames.test(processName)) {\n\t\t\t// Additional validation via command line\n\t\t\tconst devServerCommands =\n\t\t\t\t/(next dev|next-server|npm.*dev|pnpm.*dev|yarn.*dev|vite|webpack.*serve|turbo.*dev|dev.*server)/i\n\t\t\treturn devServerCommands.test(command)\n\t\t}\n\n\t\t// Check command line alone\n\t\tconst devServerCommands =\n\t\t\t/(next dev|next-server|npm.*dev|pnpm.*dev|yarn.*dev|vite|webpack.*serve|turbo.*dev|dev.*server)/i\n\t\treturn devServerCommands.test(command)\n\t}\n\n\t/**\n\t * Terminate a process by PID\n\t * Ports logic from merge-and-clean.sh lines 1126-1139\n\t */\n\tasync terminateProcess(pid: number): Promise<boolean> {\n\t\ttry {\n\t\t\tif (this.platform === 'win32') {\n\t\t\t\t// Windows: use taskkill\n\t\t\t\tawait execa('taskkill', ['/PID', pid.toString(), '/F'], { reject: true })\n\t\t\t} else {\n\t\t\t\t// Unix/macOS: use kill -9\n\t\t\t\tprocess.kill(pid, 'SIGKILL')\n\t\t\t}\n\n\t\t\t// Wait briefly for process to die\n\t\t\tawait setTimeout(1000)\n\n\t\t\treturn true\n\t\t} catch (error) {\n\t\t\tthrow new Error(\n\t\t\t\t`Failed to terminate process ${pid}: ${error instanceof Error ? error.message : 'Unknown error'}`\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Verify that a port is free\n\t * Ports verification logic from merge-and-clean.sh lines 1135-1139\n\t */\n\tasync verifyPortFree(port: number): Promise<boolean> {\n\t\tconst processInfo = await this.detectDevServer(port)\n\t\treturn processInfo === null\n\t}\n\n\t/**\n\t * Calculate dev server port from issue/PR number\n\t * Ports logic from merge-and-clean.sh lines 1093-1098\n\t * For alphanumeric identifiers, uses hash-based port calculation\n\t */\n\tcalculatePort(number: string | number): number {\n\t\t// For numeric identifiers, use simple arithmetic (original behavior)\n\t\tif (typeof number === 'number') {\n\t\t\treturn 3000 + number\n\t\t}\n\n\t\t// For string identifiers, try numeric conversion first\n\t\tconst numericValue = Number(number)\n\t\tif (!isNaN(numericValue) && isFinite(numericValue)) {\n\t\t\treturn 3000 + numericValue\n\t\t}\n\n\t\t// For non-numeric strings (alphanumeric identifiers), use hash-based calculation\n\t\treturn calculatePortForBranch(`issue-${number}`, 3000)\n\t}\n}\n"],"mappings":";;;AAAA,SAAS,kBAAkB;AAUpB,SAAS,iCAAiC,YAA4B;AAE5E,MAAI,CAAC,cAAc,WAAW,KAAK,EAAE,WAAW,GAAG;AAClD,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC9C;AAGA,QAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,UAAU,EAAE,OAAO,KAAK;AAGjE,QAAM,aAAa,KAAK,MAAM,GAAG,CAAC;AAClC,QAAM,YAAY,SAAS,YAAY,EAAE;AACzC,QAAM,aAAc,YAAY,MAAO;AAEvC,SAAO;AACR;AAUO,SAAS,uBAAuB,YAAoB,WAAmB,KAAc;AAC3F,QAAM,SAAS,iCAAiC,UAAU;AAC1D,QAAM,OAAO,WAAW;AAGxB,MAAI,OAAO,OAAO;AACjB,UAAM,IAAI;AAAA,MACT,mBAAmB,IAAI,6DAA6D,QAAQ;AAAA,IAC7F;AAAA,EACD;AAEA,SAAO;AACR;;;AC/CA,SAAS,aAAa;AACtB,SAAS,kBAAkB;AAQpB,IAAM,iBAAN,MAAqB;AAAA,EAG3B,cAAc;AACb,SAAK,WAAW,KAAK,eAAe;AAAA,EACrC;AAAA;AAAA;AAAA;AAAA,EAKQ,iBAA2B;AAClC,YAAQ,QAAQ,UAAU;AAAA,MACzB,KAAK;AACJ,eAAO;AAAA,MACR,KAAK;AACJ,eAAO;AAAA,MACR,KAAK;AACJ,eAAO;AAAA,MACR;AACC,eAAO;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,gBAAgB,MAA2C;AAChE,QAAI,KAAK,aAAa,eAAe;AACpC,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACnE;AAGA,QAAI,KAAK,aAAa,SAAS;AAC9B,aAAO,MAAM,KAAK,oBAAoB,IAAI;AAAA,IAC3C,OAAO;AACN,aAAO,MAAM,KAAK,iBAAiB,IAAI;AAAA,IACxC;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,iBAAiB,MAA2C;AACzE,QAAI;AAEH,YAAM,SAAS,MAAM,MAAM,QAAQ,CAAC,MAAM,IAAI,IAAI,IAAI,IAAI,GAAG;AAAA,QAC5D,QAAQ;AAAA,MACT,CAAC;AAGD,YAAM,QAAQ,OAAO,OAAO,MAAM,IAAI,EAAE,OAAO,UAAQ,KAAK,SAAS,QAAQ,CAAC;AAE9E,UAAI,MAAM,WAAW,GAAG;AACvB,eAAO;AAAA,MACR;AAGA,YAAM,YAAY,MAAM,CAAC;AACzB,UAAI,CAAC,UAAW,QAAO;AAEvB,YAAM,QAAQ,UAAU,MAAM,KAAK;AACnC,UAAI,MAAM,SAAS,EAAG,QAAO;AAE7B,YAAM,cAAc,MAAM,CAAC,KAAK;AAChC,YAAM,MAAM,SAAS,MAAM,CAAC,KAAK,IAAI,EAAE;AAEvC,UAAI,MAAM,GAAG,GAAG;AACf,eAAO;AAAA,MACR;AAGA,YAAM,WAAW,MAAM,MAAM,MAAM,CAAC,MAAM,IAAI,SAAS,GAAG,MAAM,UAAU,GAAG;AAAA,QAC5E,QAAQ;AAAA,MACT,CAAC;AACD,YAAM,cAAc,SAAS,OAAO,KAAK;AAGzC,YAAM,cAAc,KAAK,mBAAmB,aAAa,WAAW;AAEpE,aAAO;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,SAAS;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD,QAAQ;AAEP,aAAO;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,oBAAoB,MAA2C;AAC5E,QAAI;AAEH,YAAM,SAAS,MAAM,MAAM,WAAW,CAAC,MAAM,GAAG,EAAE,QAAQ,MAAM,CAAC;AACjE,YAAM,QAAQ,OAAO,OAAO,MAAM,IAAI;AAGtC,YAAM,WAAW,MAAM;AAAA,QACtB,UAAQ,KAAK,SAAS,IAAI,IAAI,EAAE,KAAK,KAAK,SAAS,WAAW;AAAA,MAC/D;AAEA,UAAI,CAAC,UAAU;AACd,eAAO;AAAA,MACR;AAGA,YAAM,QAAQ,SAAS,KAAK,EAAE,MAAM,KAAK;AACzC,YAAM,WAAW,MAAM,MAAM,SAAS,CAAC;AACvC,UAAI,CAAC,SAAU,QAAO;AAEtB,YAAM,MAAM,SAAS,UAAU,EAAE;AAEjC,UAAI,MAAM,GAAG,GAAG;AACf,eAAO;AAAA,MACR;AAGA,YAAM,aAAa,MAAM;AAAA,QACxB;AAAA,QACA,CAAC,OAAO,UAAU,GAAG,IAAI,OAAO,KAAK;AAAA,QACrC;AAAA,UACC,QAAQ;AAAA,QACT;AAAA,MACD;AAGA,YAAM,SAAS,WAAW,OAAO,MAAM,IAAI;AAC3C,UAAI,OAAO,SAAS,GAAG;AACtB,eAAO;AAAA,MACR;AAEA,YAAM,aAAa,OAAO,CAAC;AAC3B,UAAI,CAAC,WAAY,QAAO;AAExB,YAAM,SAAS,WAAW,MAAM,GAAG;AACnC,YAAM,eAAe,OAAO,CAAC,KAAK,IAAI,QAAQ,MAAM,EAAE;AAGtD,YAAM,cAAc;AAEpB,YAAM,cAAc,KAAK,mBAAmB,aAAa,WAAW;AAEpE,aAAO;AAAA,QACN;AAAA,QACA,MAAM;AAAA,QACN,SAAS;AAAA,QACT;AAAA,QACA;AAAA,MACD;AAAA,IACD,QAAQ;AACP,aAAO;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,mBAAmB,aAAqB,SAA0B;AAEzE,UAAM,iBAAiB;AACvB,QAAI,eAAe,KAAK,WAAW,GAAG;AAErC,YAAMA,qBACL;AACD,aAAOA,mBAAkB,KAAK,OAAO;AAAA,IACtC;AAGA,UAAM,oBACL;AACD,WAAO,kBAAkB,KAAK,OAAO;AAAA,EACtC;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,KAA+B;AACrD,QAAI;AACH,UAAI,KAAK,aAAa,SAAS;AAE9B,cAAM,MAAM,YAAY,CAAC,QAAQ,IAAI,SAAS,GAAG,IAAI,GAAG,EAAE,QAAQ,KAAK,CAAC;AAAA,MACzE,OAAO;AAEN,gBAAQ,KAAK,KAAK,SAAS;AAAA,MAC5B;AAGA,YAAM,WAAW,GAAI;AAErB,aAAO;AAAA,IACR,SAAS,OAAO;AACf,YAAM,IAAI;AAAA,QACT,+BAA+B,GAAG,KAAK,iBAAiB,QAAQ,MAAM,UAAU,eAAe;AAAA,MAChG;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,eAAe,MAAgC;AACpD,UAAM,cAAc,MAAM,KAAK,gBAAgB,IAAI;AACnD,WAAO,gBAAgB;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,cAAc,QAAiC;AAE9C,QAAI,OAAO,WAAW,UAAU;AAC/B,aAAO,MAAO;AAAA,IACf;AAGA,UAAM,eAAe,OAAO,MAAM;AAClC,QAAI,CAAC,MAAM,YAAY,KAAK,SAAS,YAAY,GAAG;AACnD,aAAO,MAAO;AAAA,IACf;AAGA,WAAO,uBAAuB,SAAS,MAAM,IAAI,GAAI;AAAA,EACtD;AACD;","names":["devServerCommands"]}
|
package/dist/chunk-W6WVRHJ6.js
DELETED
|
@@ -1,251 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import {
|
|
3
|
-
logger
|
|
4
|
-
} from "./chunk-VT4PDUYT.js";
|
|
5
|
-
|
|
6
|
-
// src/lib/PromptTemplateManager.ts
|
|
7
|
-
import { readFile } from "fs/promises";
|
|
8
|
-
import { accessSync } from "fs";
|
|
9
|
-
import path from "path";
|
|
10
|
-
import { fileURLToPath } from "url";
|
|
11
|
-
var PromptTemplateManager = class {
|
|
12
|
-
constructor(templateDir) {
|
|
13
|
-
if (templateDir) {
|
|
14
|
-
this.templateDir = templateDir;
|
|
15
|
-
} else {
|
|
16
|
-
const currentFileUrl = import.meta.url;
|
|
17
|
-
const currentFilePath = fileURLToPath(currentFileUrl);
|
|
18
|
-
const distDir = path.dirname(currentFilePath);
|
|
19
|
-
let templateDir2 = path.join(distDir, "prompts");
|
|
20
|
-
let currentDir = distDir;
|
|
21
|
-
while (currentDir !== path.dirname(currentDir)) {
|
|
22
|
-
const candidatePath = path.join(currentDir, "prompts");
|
|
23
|
-
try {
|
|
24
|
-
accessSync(candidatePath);
|
|
25
|
-
templateDir2 = candidatePath;
|
|
26
|
-
break;
|
|
27
|
-
} catch {
|
|
28
|
-
currentDir = path.dirname(currentDir);
|
|
29
|
-
}
|
|
30
|
-
}
|
|
31
|
-
this.templateDir = templateDir2;
|
|
32
|
-
logger.debug("PromptTemplateManager initialized", {
|
|
33
|
-
currentFilePath,
|
|
34
|
-
distDir,
|
|
35
|
-
templateDir: this.templateDir
|
|
36
|
-
});
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
/**
|
|
40
|
-
* Load a template file by name
|
|
41
|
-
*/
|
|
42
|
-
async loadTemplate(templateName) {
|
|
43
|
-
const templatePath = path.join(this.templateDir, `${templateName}-prompt.txt`);
|
|
44
|
-
logger.debug("Loading template", {
|
|
45
|
-
templateName,
|
|
46
|
-
templateDir: this.templateDir,
|
|
47
|
-
templatePath
|
|
48
|
-
});
|
|
49
|
-
try {
|
|
50
|
-
return await readFile(templatePath, "utf-8");
|
|
51
|
-
} catch (error) {
|
|
52
|
-
logger.error("Failed to load template", { templateName, templatePath, error });
|
|
53
|
-
throw new Error(`Template not found: ${templatePath}`);
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
/**
|
|
57
|
-
* Substitute variables in a template string
|
|
58
|
-
*/
|
|
59
|
-
substituteVariables(template, variables) {
|
|
60
|
-
let result = template;
|
|
61
|
-
result = this.processConditionalSections(result, variables);
|
|
62
|
-
if (variables.ISSUE_NUMBER !== void 0) {
|
|
63
|
-
result = result.replace(/ISSUE_NUMBER/g, String(variables.ISSUE_NUMBER));
|
|
64
|
-
}
|
|
65
|
-
if (variables.PR_NUMBER !== void 0) {
|
|
66
|
-
result = result.replace(/PR_NUMBER/g, String(variables.PR_NUMBER));
|
|
67
|
-
}
|
|
68
|
-
if (variables.ISSUE_TITLE !== void 0) {
|
|
69
|
-
result = result.replace(/ISSUE_TITLE/g, variables.ISSUE_TITLE);
|
|
70
|
-
}
|
|
71
|
-
if (variables.PR_TITLE !== void 0) {
|
|
72
|
-
result = result.replace(/PR_TITLE/g, variables.PR_TITLE);
|
|
73
|
-
}
|
|
74
|
-
if (variables.WORKSPACE_PATH !== void 0) {
|
|
75
|
-
result = result.replace(/WORKSPACE_PATH/g, variables.WORKSPACE_PATH);
|
|
76
|
-
}
|
|
77
|
-
if (variables.PORT !== void 0) {
|
|
78
|
-
result = result.replace(/PORT/g, String(variables.PORT));
|
|
79
|
-
}
|
|
80
|
-
if (variables.SETTINGS_SCHEMA !== void 0) {
|
|
81
|
-
result = result.replace(/SETTINGS_SCHEMA/g, variables.SETTINGS_SCHEMA);
|
|
82
|
-
}
|
|
83
|
-
if (variables.SETTINGS_GLOBAL_JSON !== void 0) {
|
|
84
|
-
result = result.replace(/SETTINGS_GLOBAL_JSON/g, variables.SETTINGS_GLOBAL_JSON);
|
|
85
|
-
}
|
|
86
|
-
if (variables.SETTINGS_JSON !== void 0) {
|
|
87
|
-
result = result.replace(/SETTINGS_JSON/g, variables.SETTINGS_JSON);
|
|
88
|
-
}
|
|
89
|
-
if (variables.SETTINGS_LOCAL_JSON !== void 0) {
|
|
90
|
-
result = result.replace(/SETTINGS_LOCAL_JSON/g, variables.SETTINGS_LOCAL_JSON);
|
|
91
|
-
}
|
|
92
|
-
if (variables.SHELL_TYPE !== void 0) {
|
|
93
|
-
result = result.replace(/SHELL_TYPE/g, variables.SHELL_TYPE);
|
|
94
|
-
}
|
|
95
|
-
if (variables.SHELL_CONFIG_PATH !== void 0) {
|
|
96
|
-
result = result.replace(/SHELL_CONFIG_PATH/g, variables.SHELL_CONFIG_PATH);
|
|
97
|
-
}
|
|
98
|
-
if (variables.SHELL_CONFIG_CONTENT !== void 0) {
|
|
99
|
-
result = result.replace(/SHELL_CONFIG_CONTENT/g, variables.SHELL_CONFIG_CONTENT);
|
|
100
|
-
}
|
|
101
|
-
if (variables.REMOTES_INFO !== void 0) {
|
|
102
|
-
result = result.replace(/REMOTES_INFO/g, variables.REMOTES_INFO);
|
|
103
|
-
}
|
|
104
|
-
if (variables.MULTIPLE_REMOTES !== void 0) {
|
|
105
|
-
result = result.replace(/MULTIPLE_REMOTES/g, variables.MULTIPLE_REMOTES);
|
|
106
|
-
}
|
|
107
|
-
if (variables.SINGLE_REMOTE !== void 0) {
|
|
108
|
-
result = result.replace(/SINGLE_REMOTE/g, variables.SINGLE_REMOTE);
|
|
109
|
-
}
|
|
110
|
-
if (variables.SINGLE_REMOTE_NAME !== void 0) {
|
|
111
|
-
result = result.replace(/SINGLE_REMOTE_NAME/g, variables.SINGLE_REMOTE_NAME);
|
|
112
|
-
}
|
|
113
|
-
if (variables.SINGLE_REMOTE_URL !== void 0) {
|
|
114
|
-
result = result.replace(/SINGLE_REMOTE_URL/g, variables.SINGLE_REMOTE_URL);
|
|
115
|
-
}
|
|
116
|
-
if (variables.NO_REMOTES !== void 0) {
|
|
117
|
-
result = result.replace(/NO_REMOTES/g, variables.NO_REMOTES);
|
|
118
|
-
}
|
|
119
|
-
if (variables.README_CONTENT !== void 0) {
|
|
120
|
-
result = result.replace(/README_CONTENT/g, variables.README_CONTENT);
|
|
121
|
-
}
|
|
122
|
-
if (variables.SETTINGS_SCHEMA_CONTENT !== void 0) {
|
|
123
|
-
result = result.replace(/SETTINGS_SCHEMA_CONTENT/g, variables.SETTINGS_SCHEMA_CONTENT);
|
|
124
|
-
}
|
|
125
|
-
if (variables.VSCODE_SETTINGS_GITIGNORED !== void 0) {
|
|
126
|
-
result = result.replace(/VSCODE_SETTINGS_GITIGNORED/g, variables.VSCODE_SETTINGS_GITIGNORED);
|
|
127
|
-
}
|
|
128
|
-
if (variables.SESSION_CONTEXT !== void 0) {
|
|
129
|
-
result = result.replace(/SESSION_CONTEXT/g, variables.SESSION_CONTEXT);
|
|
130
|
-
}
|
|
131
|
-
if (variables.BRANCH_NAME !== void 0) {
|
|
132
|
-
result = result.replace(/BRANCH_NAME/g, variables.BRANCH_NAME);
|
|
133
|
-
}
|
|
134
|
-
if (variables.LOOM_TYPE !== void 0) {
|
|
135
|
-
result = result.replace(/LOOM_TYPE/g, variables.LOOM_TYPE);
|
|
136
|
-
}
|
|
137
|
-
if (variables.COMPACT_SUMMARIES !== void 0) {
|
|
138
|
-
result = result.replace(/COMPACT_SUMMARIES/g, variables.COMPACT_SUMMARIES);
|
|
139
|
-
}
|
|
140
|
-
if (variables.DRAFT_PR_NUMBER !== void 0) {
|
|
141
|
-
result = result.replace(/DRAFT_PR_NUMBER/g, String(variables.DRAFT_PR_NUMBER));
|
|
142
|
-
}
|
|
143
|
-
return result;
|
|
144
|
-
}
|
|
145
|
-
/**
|
|
146
|
-
* Process conditional sections in template
|
|
147
|
-
* Format: {{#IF ONE_SHOT_MODE}}content{{/IF ONE_SHOT_MODE}}
|
|
148
|
-
*
|
|
149
|
-
* Note: /s flag allows . to match newlines
|
|
150
|
-
*/
|
|
151
|
-
processConditionalSections(template, variables) {
|
|
152
|
-
let result = template;
|
|
153
|
-
const oneShotRegex = /\{\{#IF ONE_SHOT_MODE\}\}(.*?)\{\{\/IF ONE_SHOT_MODE\}\}/gs;
|
|
154
|
-
if (variables.ONE_SHOT_MODE === true) {
|
|
155
|
-
result = result.replace(oneShotRegex, "$1");
|
|
156
|
-
} else {
|
|
157
|
-
result = result.replace(oneShotRegex, "");
|
|
158
|
-
}
|
|
159
|
-
const settingsJsonRegex = /\{\{#IF SETTINGS_JSON\}\}(.*?)\{\{\/IF SETTINGS_JSON\}\}/gs;
|
|
160
|
-
if (variables.SETTINGS_JSON !== void 0 && variables.SETTINGS_JSON !== "") {
|
|
161
|
-
result = result.replace(settingsJsonRegex, "$1");
|
|
162
|
-
} else {
|
|
163
|
-
result = result.replace(settingsJsonRegex, "");
|
|
164
|
-
}
|
|
165
|
-
const settingsGlobalJsonRegex = /\{\{#IF SETTINGS_GLOBAL_JSON\}\}(.*?)\{\{\/IF SETTINGS_GLOBAL_JSON\}\}/gs;
|
|
166
|
-
if (variables.SETTINGS_GLOBAL_JSON !== void 0 && variables.SETTINGS_GLOBAL_JSON !== "") {
|
|
167
|
-
result = result.replace(settingsGlobalJsonRegex, "$1");
|
|
168
|
-
} else {
|
|
169
|
-
result = result.replace(settingsGlobalJsonRegex, "");
|
|
170
|
-
}
|
|
171
|
-
const settingsLocalJsonRegex = /\{\{#IF SETTINGS_LOCAL_JSON\}\}(.*?)\{\{\/IF SETTINGS_LOCAL_JSON\}\}/gs;
|
|
172
|
-
if (variables.SETTINGS_LOCAL_JSON !== void 0 && variables.SETTINGS_LOCAL_JSON !== "") {
|
|
173
|
-
result = result.replace(settingsLocalJsonRegex, "$1");
|
|
174
|
-
} else {
|
|
175
|
-
result = result.replace(settingsLocalJsonRegex, "");
|
|
176
|
-
}
|
|
177
|
-
const multipleRemotesRegex = /\{\{#IF MULTIPLE_REMOTES\}\}(.*?)\{\{\/IF MULTIPLE_REMOTES\}\}/gs;
|
|
178
|
-
if (variables.MULTIPLE_REMOTES !== void 0 && variables.MULTIPLE_REMOTES !== "") {
|
|
179
|
-
result = result.replace(multipleRemotesRegex, "$1");
|
|
180
|
-
} else {
|
|
181
|
-
result = result.replace(multipleRemotesRegex, "");
|
|
182
|
-
}
|
|
183
|
-
const singleRemoteRegex = /\{\{#IF SINGLE_REMOTE\}\}(.*?)\{\{\/IF SINGLE_REMOTE\}\}/gs;
|
|
184
|
-
if (variables.SINGLE_REMOTE !== void 0 && variables.SINGLE_REMOTE !== "") {
|
|
185
|
-
result = result.replace(singleRemoteRegex, "$1");
|
|
186
|
-
} else {
|
|
187
|
-
result = result.replace(singleRemoteRegex, "");
|
|
188
|
-
}
|
|
189
|
-
const noRemotesRegex = /\{\{#IF NO_REMOTES\}\}(.*?)\{\{\/IF NO_REMOTES\}\}/gs;
|
|
190
|
-
if (variables.NO_REMOTES !== void 0 && variables.NO_REMOTES !== "") {
|
|
191
|
-
result = result.replace(noRemotesRegex, "$1");
|
|
192
|
-
} else {
|
|
193
|
-
result = result.replace(noRemotesRegex, "");
|
|
194
|
-
}
|
|
195
|
-
const firstTimeUserRegex = /\{\{#IF FIRST_TIME_USER\}\}(.*?)\{\{\/IF FIRST_TIME_USER\}\}/gs;
|
|
196
|
-
if (variables.FIRST_TIME_USER === true) {
|
|
197
|
-
result = result.replace(firstTimeUserRegex, "$1");
|
|
198
|
-
} else {
|
|
199
|
-
result = result.replace(firstTimeUserRegex, "");
|
|
200
|
-
}
|
|
201
|
-
const interactiveModeRegex = /\{\{#IF INTERACTIVE_MODE\}\}(.*?)\{\{\/IF INTERACTIVE_MODE\}\}/gs;
|
|
202
|
-
if (variables.INTERACTIVE_MODE === true) {
|
|
203
|
-
result = result.replace(interactiveModeRegex, "$1");
|
|
204
|
-
} else {
|
|
205
|
-
result = result.replace(interactiveModeRegex, "");
|
|
206
|
-
}
|
|
207
|
-
const compactSummariesRegex = /\{\{#IF COMPACT_SUMMARIES\}\}(.*?)\{\{\/IF COMPACT_SUMMARIES\}\}/gs;
|
|
208
|
-
if (variables.COMPACT_SUMMARIES !== void 0 && variables.COMPACT_SUMMARIES !== "") {
|
|
209
|
-
result = result.replace(compactSummariesRegex, "$1");
|
|
210
|
-
} else {
|
|
211
|
-
result = result.replace(compactSummariesRegex, "");
|
|
212
|
-
}
|
|
213
|
-
const draftPrModeRegex = /\{\{#IF DRAFT_PR_MODE\}\}(.*?)\{\{\/IF DRAFT_PR_MODE\}\}/gs;
|
|
214
|
-
if (variables.DRAFT_PR_MODE === true) {
|
|
215
|
-
result = result.replace(draftPrModeRegex, "$1");
|
|
216
|
-
} else {
|
|
217
|
-
result = result.replace(draftPrModeRegex, "");
|
|
218
|
-
}
|
|
219
|
-
const standardIssueModeRegex = /\{\{#IF STANDARD_ISSUE_MODE\}\}(.*?)\{\{\/IF STANDARD_ISSUE_MODE\}\}/gs;
|
|
220
|
-
if (variables.STANDARD_ISSUE_MODE === true) {
|
|
221
|
-
result = result.replace(standardIssueModeRegex, "$1");
|
|
222
|
-
} else {
|
|
223
|
-
result = result.replace(standardIssueModeRegex, "");
|
|
224
|
-
}
|
|
225
|
-
const hasPackageJsonRegex = /\{\{#IF HAS_PACKAGE_JSON\}\}(.*?)\{\{\/IF HAS_PACKAGE_JSON\}\}/gs;
|
|
226
|
-
if (variables.HAS_PACKAGE_JSON === true) {
|
|
227
|
-
result = result.replace(hasPackageJsonRegex, "$1");
|
|
228
|
-
} else {
|
|
229
|
-
result = result.replace(hasPackageJsonRegex, "");
|
|
230
|
-
}
|
|
231
|
-
const noPackageJsonRegex = /\{\{#IF NO_PACKAGE_JSON\}\}(.*?)\{\{\/IF NO_PACKAGE_JSON\}\}/gs;
|
|
232
|
-
if (variables.NO_PACKAGE_JSON === true) {
|
|
233
|
-
result = result.replace(noPackageJsonRegex, "$1");
|
|
234
|
-
} else {
|
|
235
|
-
result = result.replace(noPackageJsonRegex, "");
|
|
236
|
-
}
|
|
237
|
-
return result;
|
|
238
|
-
}
|
|
239
|
-
/**
|
|
240
|
-
* Get a fully processed prompt for a workflow type
|
|
241
|
-
*/
|
|
242
|
-
async getPrompt(type, variables) {
|
|
243
|
-
const template = await this.loadTemplate(type);
|
|
244
|
-
return this.substituteVariables(template, variables);
|
|
245
|
-
}
|
|
246
|
-
};
|
|
247
|
-
|
|
248
|
-
export {
|
|
249
|
-
PromptTemplateManager
|
|
250
|
-
};
|
|
251
|
-
//# sourceMappingURL=chunk-W6WVRHJ6.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/lib/PromptTemplateManager.ts"],"sourcesContent":["import { readFile } from 'fs/promises'\nimport { accessSync } from 'fs'\nimport path from 'path'\nimport { fileURLToPath } from 'url'\nimport { logger } from '../utils/logger.js'\n\nexport interface TemplateVariables {\n\tISSUE_NUMBER?: string | number\n\tPR_NUMBER?: number\n\tISSUE_TITLE?: string\n\tPR_TITLE?: string\n\tWORKSPACE_PATH?: string\n\tPORT?: number\n\tONE_SHOT_MODE?: boolean\n\tINTERACTIVE_MODE?: boolean\n\tSETTINGS_SCHEMA?: string\n\tSETTINGS_GLOBAL_JSON?: string\n\tSETTINGS_JSON?: string\n\tSETTINGS_LOCAL_JSON?: string\n\tSHELL_TYPE?: string\n\tSHELL_CONFIG_PATH?: string\n\tSHELL_CONFIG_CONTENT?: string\n\tREMOTES_INFO?: string\n\tMULTIPLE_REMOTES?: string\n\tSINGLE_REMOTE?: string\n\tSINGLE_REMOTE_NAME?: string\n\tSINGLE_REMOTE_URL?: string\n\tNO_REMOTES?: string\n\tREADME_CONTENT?: string\n\tSETTINGS_SCHEMA_CONTENT?: string\n\tFIRST_TIME_USER?: boolean\n\tVSCODE_SETTINGS_GITIGNORED?: string\n\t// Session summary template variables\n\tSESSION_CONTEXT?: string // Session ID for Claude to reference its conversation\n\tBRANCH_NAME?: string // Branch being finished\n\tLOOM_TYPE?: string // 'issue' or 'pr'\n\tCOMPACT_SUMMARIES?: string // Extracted compact summaries from session transcript\n\tRECAP_DATA?: string // Formatted recap data (goal, complexity, entries, artifacts)\n\t// Draft PR mode variables - mutually exclusive with standard issue mode\n\tDRAFT_PR_NUMBER?: number // PR number for draft PR workflow\n\tDRAFT_PR_MODE?: boolean // True when using github-draft-pr merge mode\n\tSTANDARD_ISSUE_MODE?: boolean // True when using standard issue commenting (not draft PR)\n\t// Multi-language support variables - mutually exclusive\n\tHAS_PACKAGE_JSON?: boolean // True when project has package.json\n\tNO_PACKAGE_JSON?: boolean // True when project does not have package.json (non-Node.js projects)\n}\n\nexport class PromptTemplateManager {\n\tprivate templateDir: string\n\n\tconstructor(templateDir?: string) {\n\t\tif (templateDir) {\n\t\t\tthis.templateDir = templateDir\n\t\t} else {\n\t\t\t// Find templates relative to the package installation\n\t\t\t// When running from dist/, templates are copied to dist/prompts/\n\t\t\tconst currentFileUrl = import.meta.url\n\t\t\tconst currentFilePath = fileURLToPath(currentFileUrl)\n\t\t\tconst distDir = path.dirname(currentFilePath) // dist directory (may be chunked file location)\n\n\t\t\t// Walk up to find the dist directory (in case of chunked files)\n\t\t\tlet templateDir = path.join(distDir, 'prompts')\n\t\t\tlet currentDir = distDir\n\n\t\t\t// Try to find the prompts directory by walking up\n\t\t\twhile (currentDir !== path.dirname(currentDir)) {\n\t\t\t\tconst candidatePath = path.join(currentDir, 'prompts')\n\t\t\t\ttry {\n\t\t\t\t\t// Check if this directory exists (sync check for constructor)\n\t\t\t\t\taccessSync(candidatePath)\n\t\t\t\t\ttemplateDir = candidatePath\n\t\t\t\t\tbreak\n\t\t\t\t} catch {\n\t\t\t\t\tcurrentDir = path.dirname(currentDir)\n\t\t\t\t}\n\t\t\t}\n\n\t\t\tthis.templateDir = templateDir\n\t\t\tlogger.debug('PromptTemplateManager initialized', {\n\t\t\t\tcurrentFilePath,\n\t\t\t\tdistDir,\n\t\t\t\ttemplateDir: this.templateDir\n\t\t\t})\n\t\t}\n\t}\n\n\t/**\n\t * Load a template file by name\n\t */\n\tasync loadTemplate(templateName: 'issue' | 'pr' | 'regular' | 'init' | 'session-summary'): Promise<string> {\n\t\tconst templatePath = path.join(this.templateDir, `${templateName}-prompt.txt`)\n\n\t\tlogger.debug('Loading template', {\n\t\t\ttemplateName,\n\t\t\ttemplateDir: this.templateDir,\n\t\t\ttemplatePath\n\t\t})\n\n\t\ttry {\n\t\t\treturn await readFile(templatePath, 'utf-8')\n\t\t} catch (error) {\n\t\t\tlogger.error('Failed to load template', { templateName, templatePath, error })\n\t\t\tthrow new Error(`Template not found: ${templatePath}`)\n\t\t}\n\t}\n\n\t/**\n\t * Substitute variables in a template string\n\t */\n\tsubstituteVariables(template: string, variables: TemplateVariables): string {\n\t\tlet result = template\n\n\t\t// Process conditional sections first\n\t\tresult = this.processConditionalSections(result, variables)\n\n\t\t// Replace each variable if it exists\n\t\tif (variables.ISSUE_NUMBER !== undefined) {\n\t\t\tresult = result.replace(/ISSUE_NUMBER/g, String(variables.ISSUE_NUMBER))\n\t\t}\n\n\t\tif (variables.PR_NUMBER !== undefined) {\n\t\t\tresult = result.replace(/PR_NUMBER/g, String(variables.PR_NUMBER))\n\t\t}\n\n\t\tif (variables.ISSUE_TITLE !== undefined) {\n\t\t\tresult = result.replace(/ISSUE_TITLE/g, variables.ISSUE_TITLE)\n\t\t}\n\n\t\tif (variables.PR_TITLE !== undefined) {\n\t\t\tresult = result.replace(/PR_TITLE/g, variables.PR_TITLE)\n\t\t}\n\n\t\tif (variables.WORKSPACE_PATH !== undefined) {\n\t\t\tresult = result.replace(/WORKSPACE_PATH/g, variables.WORKSPACE_PATH)\n\t\t}\n\n\t\tif (variables.PORT !== undefined) {\n\t\t\tresult = result.replace(/PORT/g, String(variables.PORT))\n\t\t}\n\n\t\tif (variables.SETTINGS_SCHEMA !== undefined) {\n\t\t\tresult = result.replace(/SETTINGS_SCHEMA/g, variables.SETTINGS_SCHEMA)\n\t\t}\n\n\t\tif (variables.SETTINGS_GLOBAL_JSON !== undefined) {\n\t\t\tresult = result.replace(/SETTINGS_GLOBAL_JSON/g, variables.SETTINGS_GLOBAL_JSON)\n\t\t}\n\n\t\tif (variables.SETTINGS_JSON !== undefined) {\n\t\t\tresult = result.replace(/SETTINGS_JSON/g, variables.SETTINGS_JSON)\n\t\t}\n\n\t\tif (variables.SETTINGS_LOCAL_JSON !== undefined) {\n\t\t\tresult = result.replace(/SETTINGS_LOCAL_JSON/g, variables.SETTINGS_LOCAL_JSON)\n\t\t}\n\n\t\tif (variables.SHELL_TYPE !== undefined) {\n\t\t\tresult = result.replace(/SHELL_TYPE/g, variables.SHELL_TYPE)\n\t\t}\n\n\t\tif (variables.SHELL_CONFIG_PATH !== undefined) {\n\t\t\tresult = result.replace(/SHELL_CONFIG_PATH/g, variables.SHELL_CONFIG_PATH)\n\t\t}\n\n\t\tif (variables.SHELL_CONFIG_CONTENT !== undefined) {\n\t\t\tresult = result.replace(/SHELL_CONFIG_CONTENT/g, variables.SHELL_CONFIG_CONTENT)\n\t\t}\n\n\t\tif (variables.REMOTES_INFO !== undefined) {\n\t\t\tresult = result.replace(/REMOTES_INFO/g, variables.REMOTES_INFO)\n\t\t}\n\n\t\tif (variables.MULTIPLE_REMOTES !== undefined) {\n\t\t\tresult = result.replace(/MULTIPLE_REMOTES/g, variables.MULTIPLE_REMOTES)\n\t\t}\n\n\t\tif (variables.SINGLE_REMOTE !== undefined) {\n\t\t\tresult = result.replace(/SINGLE_REMOTE/g, variables.SINGLE_REMOTE)\n\t\t}\n\n\t\tif (variables.SINGLE_REMOTE_NAME !== undefined) {\n\t\t\tresult = result.replace(/SINGLE_REMOTE_NAME/g, variables.SINGLE_REMOTE_NAME)\n\t\t}\n\n\t\tif (variables.SINGLE_REMOTE_URL !== undefined) {\n\t\t\tresult = result.replace(/SINGLE_REMOTE_URL/g, variables.SINGLE_REMOTE_URL)\n\t\t}\n\n\t\tif (variables.NO_REMOTES !== undefined) {\n\t\t\tresult = result.replace(/NO_REMOTES/g, variables.NO_REMOTES)\n\t\t}\n\n\t\tif (variables.README_CONTENT !== undefined) {\n\t\t\tresult = result.replace(/README_CONTENT/g, variables.README_CONTENT)\n\t\t}\n\n\t\tif (variables.SETTINGS_SCHEMA_CONTENT !== undefined) {\n\t\t\tresult = result.replace(/SETTINGS_SCHEMA_CONTENT/g, variables.SETTINGS_SCHEMA_CONTENT)\n\t\t}\n\n\t\tif (variables.VSCODE_SETTINGS_GITIGNORED !== undefined) {\n\t\t\tresult = result.replace(/VSCODE_SETTINGS_GITIGNORED/g, variables.VSCODE_SETTINGS_GITIGNORED)\n\t\t}\n\n\t\t// Session summary template variables\n\t\tif (variables.SESSION_CONTEXT !== undefined) {\n\t\t\tresult = result.replace(/SESSION_CONTEXT/g, variables.SESSION_CONTEXT)\n\t\t}\n\n\t\tif (variables.BRANCH_NAME !== undefined) {\n\t\t\tresult = result.replace(/BRANCH_NAME/g, variables.BRANCH_NAME)\n\t\t}\n\n\t\tif (variables.LOOM_TYPE !== undefined) {\n\t\t\tresult = result.replace(/LOOM_TYPE/g, variables.LOOM_TYPE)\n\t\t}\n\n\t\tif (variables.COMPACT_SUMMARIES !== undefined) {\n\t\t\tresult = result.replace(/COMPACT_SUMMARIES/g, variables.COMPACT_SUMMARIES)\n\t\t}\n\n\t\t// Draft PR mode variable substitution\n\t\tif (variables.DRAFT_PR_NUMBER !== undefined) {\n\t\t\tresult = result.replace(/DRAFT_PR_NUMBER/g, String(variables.DRAFT_PR_NUMBER))\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Process conditional sections in template\n\t * Format: {{#IF ONE_SHOT_MODE}}content{{/IF ONE_SHOT_MODE}}\n\t *\n\t * Note: /s flag allows . to match newlines\n\t */\n\tprivate processConditionalSections(template: string, variables: TemplateVariables): string {\n\t\tlet result = template\n\n\t\t// Process ONE_SHOT_MODE conditionals\n\t\tconst oneShotRegex = /\\{\\{#IF ONE_SHOT_MODE\\}\\}(.*?)\\{\\{\\/IF ONE_SHOT_MODE\\}\\}/gs\n\n\t\tif (variables.ONE_SHOT_MODE === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(oneShotRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(oneShotRegex, '')\n\t\t}\n\n\t\t// Process SETTINGS_JSON conditionals\n\t\tconst settingsJsonRegex = /\\{\\{#IF SETTINGS_JSON\\}\\}(.*?)\\{\\{\\/IF SETTINGS_JSON\\}\\}/gs\n\n\t\tif (variables.SETTINGS_JSON !== undefined && variables.SETTINGS_JSON !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(settingsJsonRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(settingsJsonRegex, '')\n\t\t}\n\n\t\t// Process SETTINGS_GLOBAL_JSON conditionals\n\t\tconst settingsGlobalJsonRegex = /\\{\\{#IF SETTINGS_GLOBAL_JSON\\}\\}(.*?)\\{\\{\\/IF SETTINGS_GLOBAL_JSON\\}\\}/gs\n\n\t\tif (variables.SETTINGS_GLOBAL_JSON !== undefined && variables.SETTINGS_GLOBAL_JSON !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(settingsGlobalJsonRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(settingsGlobalJsonRegex, '')\n\t\t}\n\n\t\t// Process SETTINGS_LOCAL_JSON conditionals\n\t\tconst settingsLocalJsonRegex = /\\{\\{#IF SETTINGS_LOCAL_JSON\\}\\}(.*?)\\{\\{\\/IF SETTINGS_LOCAL_JSON\\}\\}/gs\n\n\t\tif (variables.SETTINGS_LOCAL_JSON !== undefined && variables.SETTINGS_LOCAL_JSON !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(settingsLocalJsonRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(settingsLocalJsonRegex, '')\n\t\t}\n\n\t\t// Process MULTIPLE_REMOTES conditionals\n\t\tconst multipleRemotesRegex = /\\{\\{#IF MULTIPLE_REMOTES\\}\\}(.*?)\\{\\{\\/IF MULTIPLE_REMOTES\\}\\}/gs\n\n\t\tif (variables.MULTIPLE_REMOTES !== undefined && variables.MULTIPLE_REMOTES !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(multipleRemotesRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(multipleRemotesRegex, '')\n\t\t}\n\n\t\t// Process SINGLE_REMOTE conditionals\n\t\tconst singleRemoteRegex = /\\{\\{#IF SINGLE_REMOTE\\}\\}(.*?)\\{\\{\\/IF SINGLE_REMOTE\\}\\}/gs\n\n\t\tif (variables.SINGLE_REMOTE !== undefined && variables.SINGLE_REMOTE !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(singleRemoteRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(singleRemoteRegex, '')\n\t\t}\n\n\t\t// Process NO_REMOTES conditionals\n\t\tconst noRemotesRegex = /\\{\\{#IF NO_REMOTES\\}\\}(.*?)\\{\\{\\/IF NO_REMOTES\\}\\}/gs\n\n\t\tif (variables.NO_REMOTES !== undefined && variables.NO_REMOTES !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(noRemotesRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(noRemotesRegex, '')\n\t\t}\n\n\t\t// Process FIRST_TIME_USER conditionals\n\t\tconst firstTimeUserRegex = /\\{\\{#IF FIRST_TIME_USER\\}\\}(.*?)\\{\\{\\/IF FIRST_TIME_USER\\}\\}/gs\n\n\t\tif (variables.FIRST_TIME_USER === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(firstTimeUserRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(firstTimeUserRegex, '')\n\t\t}\n\n\t\t// Process INTERACTIVE_MODE conditionals\n\t\tconst interactiveModeRegex = /\\{\\{#IF INTERACTIVE_MODE\\}\\}(.*?)\\{\\{\\/IF INTERACTIVE_MODE\\}\\}/gs\n\n\t\tif (variables.INTERACTIVE_MODE === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(interactiveModeRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(interactiveModeRegex, '')\n\t\t}\n\n\t\t// Process COMPACT_SUMMARIES conditionals\n\t\tconst compactSummariesRegex = /\\{\\{#IF COMPACT_SUMMARIES\\}\\}(.*?)\\{\\{\\/IF COMPACT_SUMMARIES\\}\\}/gs\n\n\t\tif (variables.COMPACT_SUMMARIES !== undefined && variables.COMPACT_SUMMARIES !== '') {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(compactSummariesRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(compactSummariesRegex, '')\n\t\t}\n\n\t\t// Process DRAFT_PR_MODE conditionals\n\t\tconst draftPrModeRegex = /\\{\\{#IF DRAFT_PR_MODE\\}\\}(.*?)\\{\\{\\/IF DRAFT_PR_MODE\\}\\}/gs\n\n\t\tif (variables.DRAFT_PR_MODE === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(draftPrModeRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(draftPrModeRegex, '')\n\t\t}\n\n\t\t// Process STANDARD_ISSUE_MODE conditionals\n\t\tconst standardIssueModeRegex = /\\{\\{#IF STANDARD_ISSUE_MODE\\}\\}(.*?)\\{\\{\\/IF STANDARD_ISSUE_MODE\\}\\}/gs\n\n\t\tif (variables.STANDARD_ISSUE_MODE === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(standardIssueModeRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(standardIssueModeRegex, '')\n\t\t}\n\n\t\t// Process HAS_PACKAGE_JSON conditionals\n\t\tconst hasPackageJsonRegex = /\\{\\{#IF HAS_PACKAGE_JSON\\}\\}(.*?)\\{\\{\\/IF HAS_PACKAGE_JSON\\}\\}/gs\n\n\t\tif (variables.HAS_PACKAGE_JSON === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(hasPackageJsonRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(hasPackageJsonRegex, '')\n\t\t}\n\n\t\t// Process NO_PACKAGE_JSON conditionals\n\t\tconst noPackageJsonRegex = /\\{\\{#IF NO_PACKAGE_JSON\\}\\}(.*?)\\{\\{\\/IF NO_PACKAGE_JSON\\}\\}/gs\n\n\t\tif (variables.NO_PACKAGE_JSON === true) {\n\t\t\t// Include the content, remove the conditional markers\n\t\t\tresult = result.replace(noPackageJsonRegex, '$1')\n\t\t} else {\n\t\t\t// Remove the entire conditional block\n\t\t\tresult = result.replace(noPackageJsonRegex, '')\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Get a fully processed prompt for a workflow type\n\t */\n\tasync getPrompt(\n\t\ttype: 'issue' | 'pr' | 'regular' | 'init' | 'session-summary',\n\t\tvariables: TemplateVariables\n\t): Promise<string> {\n\t\tconst template = await this.loadTemplate(type)\n\t\treturn this.substituteVariables(template, variables)\n\t}\n}\n"],"mappings":";;;;;;AAAA,SAAS,gBAAgB;AACzB,SAAS,kBAAkB;AAC3B,OAAO,UAAU;AACjB,SAAS,qBAAqB;AA4CvB,IAAM,wBAAN,MAA4B;AAAA,EAGlC,YAAY,aAAsB;AACjC,QAAI,aAAa;AAChB,WAAK,cAAc;AAAA,IACpB,OAAO;AAGN,YAAM,iBAAiB,YAAY;AACnC,YAAM,kBAAkB,cAAc,cAAc;AACpD,YAAM,UAAU,KAAK,QAAQ,eAAe;AAG5C,UAAIA,eAAc,KAAK,KAAK,SAAS,SAAS;AAC9C,UAAI,aAAa;AAGjB,aAAO,eAAe,KAAK,QAAQ,UAAU,GAAG;AAC/C,cAAM,gBAAgB,KAAK,KAAK,YAAY,SAAS;AACrD,YAAI;AAEH,qBAAW,aAAa;AACxB,UAAAA,eAAc;AACd;AAAA,QACD,QAAQ;AACP,uBAAa,KAAK,QAAQ,UAAU;AAAA,QACrC;AAAA,MACD;AAEA,WAAK,cAAcA;AACnB,aAAO,MAAM,qCAAqC;AAAA,QACjD;AAAA,QACA;AAAA,QACA,aAAa,KAAK;AAAA,MACnB,CAAC;AAAA,IACF;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAAa,cAAwF;AAC1G,UAAM,eAAe,KAAK,KAAK,KAAK,aAAa,GAAG,YAAY,aAAa;AAE7E,WAAO,MAAM,oBAAoB;AAAA,MAChC;AAAA,MACA,aAAa,KAAK;AAAA,MAClB;AAAA,IACD,CAAC;AAED,QAAI;AACH,aAAO,MAAM,SAAS,cAAc,OAAO;AAAA,IAC5C,SAAS,OAAO;AACf,aAAO,MAAM,2BAA2B,EAAE,cAAc,cAAc,MAAM,CAAC;AAC7E,YAAM,IAAI,MAAM,uBAAuB,YAAY,EAAE;AAAA,IACtD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,oBAAoB,UAAkB,WAAsC;AAC3E,QAAI,SAAS;AAGb,aAAS,KAAK,2BAA2B,QAAQ,SAAS;AAG1D,QAAI,UAAU,iBAAiB,QAAW;AACzC,eAAS,OAAO,QAAQ,iBAAiB,OAAO,UAAU,YAAY,CAAC;AAAA,IACxE;AAEA,QAAI,UAAU,cAAc,QAAW;AACtC,eAAS,OAAO,QAAQ,cAAc,OAAO,UAAU,SAAS,CAAC;AAAA,IAClE;AAEA,QAAI,UAAU,gBAAgB,QAAW;AACxC,eAAS,OAAO,QAAQ,gBAAgB,UAAU,WAAW;AAAA,IAC9D;AAEA,QAAI,UAAU,aAAa,QAAW;AACrC,eAAS,OAAO,QAAQ,aAAa,UAAU,QAAQ;AAAA,IACxD;AAEA,QAAI,UAAU,mBAAmB,QAAW;AAC3C,eAAS,OAAO,QAAQ,mBAAmB,UAAU,cAAc;AAAA,IACpE;AAEA,QAAI,UAAU,SAAS,QAAW;AACjC,eAAS,OAAO,QAAQ,SAAS,OAAO,UAAU,IAAI,CAAC;AAAA,IACxD;AAEA,QAAI,UAAU,oBAAoB,QAAW;AAC5C,eAAS,OAAO,QAAQ,oBAAoB,UAAU,eAAe;AAAA,IACtE;AAEA,QAAI,UAAU,yBAAyB,QAAW;AACjD,eAAS,OAAO,QAAQ,yBAAyB,UAAU,oBAAoB;AAAA,IAChF;AAEA,QAAI,UAAU,kBAAkB,QAAW;AAC1C,eAAS,OAAO,QAAQ,kBAAkB,UAAU,aAAa;AAAA,IAClE;AAEA,QAAI,UAAU,wBAAwB,QAAW;AAChD,eAAS,OAAO,QAAQ,wBAAwB,UAAU,mBAAmB;AAAA,IAC9E;AAEA,QAAI,UAAU,eAAe,QAAW;AACvC,eAAS,OAAO,QAAQ,eAAe,UAAU,UAAU;AAAA,IAC5D;AAEA,QAAI,UAAU,sBAAsB,QAAW;AAC9C,eAAS,OAAO,QAAQ,sBAAsB,UAAU,iBAAiB;AAAA,IAC1E;AAEA,QAAI,UAAU,yBAAyB,QAAW;AACjD,eAAS,OAAO,QAAQ,yBAAyB,UAAU,oBAAoB;AAAA,IAChF;AAEA,QAAI,UAAU,iBAAiB,QAAW;AACzC,eAAS,OAAO,QAAQ,iBAAiB,UAAU,YAAY;AAAA,IAChE;AAEA,QAAI,UAAU,qBAAqB,QAAW;AAC7C,eAAS,OAAO,QAAQ,qBAAqB,UAAU,gBAAgB;AAAA,IACxE;AAEA,QAAI,UAAU,kBAAkB,QAAW;AAC1C,eAAS,OAAO,QAAQ,kBAAkB,UAAU,aAAa;AAAA,IAClE;AAEA,QAAI,UAAU,uBAAuB,QAAW;AAC/C,eAAS,OAAO,QAAQ,uBAAuB,UAAU,kBAAkB;AAAA,IAC5E;AAEA,QAAI,UAAU,sBAAsB,QAAW;AAC9C,eAAS,OAAO,QAAQ,sBAAsB,UAAU,iBAAiB;AAAA,IAC1E;AAEA,QAAI,UAAU,eAAe,QAAW;AACvC,eAAS,OAAO,QAAQ,eAAe,UAAU,UAAU;AAAA,IAC5D;AAEA,QAAI,UAAU,mBAAmB,QAAW;AAC3C,eAAS,OAAO,QAAQ,mBAAmB,UAAU,cAAc;AAAA,IACpE;AAEA,QAAI,UAAU,4BAA4B,QAAW;AACpD,eAAS,OAAO,QAAQ,4BAA4B,UAAU,uBAAuB;AAAA,IACtF;AAEA,QAAI,UAAU,+BAA+B,QAAW;AACvD,eAAS,OAAO,QAAQ,+BAA+B,UAAU,0BAA0B;AAAA,IAC5F;AAGA,QAAI,UAAU,oBAAoB,QAAW;AAC5C,eAAS,OAAO,QAAQ,oBAAoB,UAAU,eAAe;AAAA,IACtE;AAEA,QAAI,UAAU,gBAAgB,QAAW;AACxC,eAAS,OAAO,QAAQ,gBAAgB,UAAU,WAAW;AAAA,IAC9D;AAEA,QAAI,UAAU,cAAc,QAAW;AACtC,eAAS,OAAO,QAAQ,cAAc,UAAU,SAAS;AAAA,IAC1D;AAEA,QAAI,UAAU,sBAAsB,QAAW;AAC9C,eAAS,OAAO,QAAQ,sBAAsB,UAAU,iBAAiB;AAAA,IAC1E;AAGA,QAAI,UAAU,oBAAoB,QAAW;AAC5C,eAAS,OAAO,QAAQ,oBAAoB,OAAO,UAAU,eAAe,CAAC;AAAA,IAC9E;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,2BAA2B,UAAkB,WAAsC;AAC1F,QAAI,SAAS;AAGb,UAAM,eAAe;AAErB,QAAI,UAAU,kBAAkB,MAAM;AAErC,eAAS,OAAO,QAAQ,cAAc,IAAI;AAAA,IAC3C,OAAO;AAEN,eAAS,OAAO,QAAQ,cAAc,EAAE;AAAA,IACzC;AAGA,UAAM,oBAAoB;AAE1B,QAAI,UAAU,kBAAkB,UAAa,UAAU,kBAAkB,IAAI;AAE5E,eAAS,OAAO,QAAQ,mBAAmB,IAAI;AAAA,IAChD,OAAO;AAEN,eAAS,OAAO,QAAQ,mBAAmB,EAAE;AAAA,IAC9C;AAGA,UAAM,0BAA0B;AAEhC,QAAI,UAAU,yBAAyB,UAAa,UAAU,yBAAyB,IAAI;AAE1F,eAAS,OAAO,QAAQ,yBAAyB,IAAI;AAAA,IACtD,OAAO;AAEN,eAAS,OAAO,QAAQ,yBAAyB,EAAE;AAAA,IACpD;AAGA,UAAM,yBAAyB;AAE/B,QAAI,UAAU,wBAAwB,UAAa,UAAU,wBAAwB,IAAI;AAExF,eAAS,OAAO,QAAQ,wBAAwB,IAAI;AAAA,IACrD,OAAO;AAEN,eAAS,OAAO,QAAQ,wBAAwB,EAAE;AAAA,IACnD;AAGA,UAAM,uBAAuB;AAE7B,QAAI,UAAU,qBAAqB,UAAa,UAAU,qBAAqB,IAAI;AAElF,eAAS,OAAO,QAAQ,sBAAsB,IAAI;AAAA,IACnD,OAAO;AAEN,eAAS,OAAO,QAAQ,sBAAsB,EAAE;AAAA,IACjD;AAGA,UAAM,oBAAoB;AAE1B,QAAI,UAAU,kBAAkB,UAAa,UAAU,kBAAkB,IAAI;AAE5E,eAAS,OAAO,QAAQ,mBAAmB,IAAI;AAAA,IAChD,OAAO;AAEN,eAAS,OAAO,QAAQ,mBAAmB,EAAE;AAAA,IAC9C;AAGA,UAAM,iBAAiB;AAEvB,QAAI,UAAU,eAAe,UAAa,UAAU,eAAe,IAAI;AAEtE,eAAS,OAAO,QAAQ,gBAAgB,IAAI;AAAA,IAC7C,OAAO;AAEN,eAAS,OAAO,QAAQ,gBAAgB,EAAE;AAAA,IAC3C;AAGA,UAAM,qBAAqB;AAE3B,QAAI,UAAU,oBAAoB,MAAM;AAEvC,eAAS,OAAO,QAAQ,oBAAoB,IAAI;AAAA,IACjD,OAAO;AAEN,eAAS,OAAO,QAAQ,oBAAoB,EAAE;AAAA,IAC/C;AAGA,UAAM,uBAAuB;AAE7B,QAAI,UAAU,qBAAqB,MAAM;AAExC,eAAS,OAAO,QAAQ,sBAAsB,IAAI;AAAA,IACnD,OAAO;AAEN,eAAS,OAAO,QAAQ,sBAAsB,EAAE;AAAA,IACjD;AAGA,UAAM,wBAAwB;AAE9B,QAAI,UAAU,sBAAsB,UAAa,UAAU,sBAAsB,IAAI;AAEpF,eAAS,OAAO,QAAQ,uBAAuB,IAAI;AAAA,IACpD,OAAO;AAEN,eAAS,OAAO,QAAQ,uBAAuB,EAAE;AAAA,IAClD;AAGA,UAAM,mBAAmB;AAEzB,QAAI,UAAU,kBAAkB,MAAM;AAErC,eAAS,OAAO,QAAQ,kBAAkB,IAAI;AAAA,IAC/C,OAAO;AAEN,eAAS,OAAO,QAAQ,kBAAkB,EAAE;AAAA,IAC7C;AAGA,UAAM,yBAAyB;AAE/B,QAAI,UAAU,wBAAwB,MAAM;AAE3C,eAAS,OAAO,QAAQ,wBAAwB,IAAI;AAAA,IACrD,OAAO;AAEN,eAAS,OAAO,QAAQ,wBAAwB,EAAE;AAAA,IACnD;AAGA,UAAM,sBAAsB;AAE5B,QAAI,UAAU,qBAAqB,MAAM;AAExC,eAAS,OAAO,QAAQ,qBAAqB,IAAI;AAAA,IAClD,OAAO;AAEN,eAAS,OAAO,QAAQ,qBAAqB,EAAE;AAAA,IAChD;AAGA,UAAM,qBAAqB;AAE3B,QAAI,UAAU,oBAAoB,MAAM;AAEvC,eAAS,OAAO,QAAQ,oBAAoB,IAAI;AAAA,IACjD,OAAO;AAEN,eAAS,OAAO,QAAQ,oBAAoB,EAAE;AAAA,IAC/C;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,UACL,MACA,WACkB;AAClB,UAAM,WAAW,MAAM,KAAK,aAAa,IAAI;AAC7C,WAAO,KAAK,oBAAoB,UAAU,SAAS;AAAA,EACpD;AACD;","names":["templateDir"]}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/commands/cleanup.ts"],"sourcesContent":["import { getLogger } from '../utils/logger-context.js'\nimport { GitWorktreeManager } from '../lib/GitWorktreeManager.js'\nimport { ResourceCleanup } from '../lib/ResourceCleanup.js'\nimport { ProcessManager } from '../lib/process/ProcessManager.js'\nimport { DatabaseManager } from '../lib/DatabaseManager.js'\nimport { EnvironmentManager } from '../lib/EnvironmentManager.js'\nimport { CLIIsolationManager } from '../lib/CLIIsolationManager.js'\nimport { SettingsManager } from '../lib/SettingsManager.js'\nimport { promptConfirmation } from '../utils/prompt.js'\nimport { IdentifierParser } from '../utils/IdentifierParser.js'\nimport { loadEnvIntoProcess } from '../utils/env.js'\nimport { createNeonProviderFromSettings } from '../utils/neon-helpers.js'\nimport { LoomManager } from '../lib/LoomManager.js'\nimport type { CleanupOptions } from '../types/index.js'\nimport type { CleanupResult } from '../types/cleanup.js'\nimport type { ParsedInput } from './start.js'\n\n/**\n * Input structure for CleanupCommand.execute()\n */\nexport interface CleanupCommandInput {\n identifier?: string\n options: CleanupOptions\n}\n\n/**\n * Parsed and validated cleanup command input\n * Mode determines which cleanup operation to perform\n */\nexport interface ParsedCleanupInput {\n mode: 'list' | 'single' | 'issue' | 'all'\n identifier?: string\n issueNumber?: string | number\n branchName?: string\n originalInput?: string\n options: CleanupOptions\n}\n\n/**\n * Manages cleanup command execution with option parsing and validation\n * Follows the command pattern established by StartCommand\n *\n * This implementation handles ONLY parsing, validation, and mode determination.\n * Actual cleanup operations are deferred to subsequent sub-issues.\n */\nexport class CleanupCommand {\n private readonly gitWorktreeManager: GitWorktreeManager\n private resourceCleanup?: ResourceCleanup\n private loomManager?: import('../lib/LoomManager.js').LoomManager\n private readonly identifierParser: IdentifierParser\n\n constructor(\n gitWorktreeManager?: GitWorktreeManager,\n resourceCleanup?: ResourceCleanup\n ) {\n // Load environment variables first\n const envResult = loadEnvIntoProcess()\n if (envResult.error) {\n getLogger().debug(`Environment loading warning: ${envResult.error.message}`)\n }\n if (envResult.parsed) {\n getLogger().debug(`Loaded ${Object.keys(envResult.parsed).length} environment variables`)\n }\n\n this.gitWorktreeManager = gitWorktreeManager ?? new GitWorktreeManager()\n\n // Initialize ResourceCleanup with DatabaseManager and CLIIsolationManager\n // ResourceCleanup will be initialized lazily with proper configuration\n if (resourceCleanup) {\n this.resourceCleanup = resourceCleanup\n }\n\n // Initialize IdentifierParser for pattern-based detection\n this.identifierParser = new IdentifierParser(this.gitWorktreeManager)\n }\n\n /**\n * Lazy initialization of ResourceCleanup and LoomManager with properly configured DatabaseManager\n */\n private async ensureResourceCleanup(): Promise<void> {\n if (this.resourceCleanup && this.loomManager) {\n return\n }\n\n const settingsManager = new SettingsManager()\n const settings = await settingsManager.loadSettings()\n const databaseUrlEnvVarName = settings.capabilities?.database?.databaseUrlEnvVarName ?? 'DATABASE_URL'\n\n const environmentManager = new EnvironmentManager()\n const neonProvider = createNeonProviderFromSettings(settings)\n const databaseManager = new DatabaseManager(neonProvider, environmentManager, databaseUrlEnvVarName)\n const cliIsolationManager = new CLIIsolationManager()\n\n this.resourceCleanup ??= new ResourceCleanup(\n this.gitWorktreeManager,\n new ProcessManager(),\n databaseManager,\n cliIsolationManager\n )\n\n // Initialize LoomManager if not provided (for child loom detection)\n if (!this.loomManager) {\n const { GitHubService } = await import('../lib/GitHubService.js')\n const { ClaudeContextManager } = await import('../lib/ClaudeContextManager.js')\n const { ProjectCapabilityDetector } = await import('../lib/ProjectCapabilityDetector.js')\n const { DefaultBranchNamingService } = await import('../lib/BranchNamingService.js')\n\n this.loomManager = new LoomManager(\n this.gitWorktreeManager,\n new GitHubService(),\n new DefaultBranchNamingService({ useClaude: true }),\n environmentManager,\n new ClaudeContextManager(),\n new ProjectCapabilityDetector(),\n cliIsolationManager,\n settingsManager,\n databaseManager\n )\n }\n }\n\n /**\n * Check for child looms and exit gracefully if any exist\n * Always checks the TARGET loom (the one being cleaned up), not the current directory's loom\n *\n * @param parsed - The parsed input identifying the loom being cleaned up\n */\n private async checkForChildLooms(parsed: ParsedCleanupInput): Promise<void> {\n await this.ensureResourceCleanup()\n if (!this.loomManager) {\n throw new Error('Failed to initialize LoomManager')\n }\n\n // Determine which branch is being cleaned up based on parsed input\n let targetBranch: string | undefined\n\n if (parsed.branchName) {\n targetBranch = parsed.branchName\n } else if (parsed.mode === 'issue' && parsed.issueNumber !== undefined) {\n // For issues, try to find the worktree by issue number to get the branch name\n const worktree = await this.gitWorktreeManager.findWorktreeForIssue(parsed.issueNumber)\n targetBranch = worktree?.branch\n }\n\n // If we can't determine the target branch, skip the check\n if (!targetBranch) {\n getLogger().debug(`Cannot determine target branch for child loom check`)\n return\n }\n\n // Check if the TARGET loom has any child looms\n const hasChildLooms = await this.loomManager.checkAndWarnChildLooms(targetBranch)\n if (hasChildLooms) {\n throw new Error('Cannot cleanup loom while child looms exist. Please \\'finish\\' or \\'cleanup\\' child looms first.')\n }\n }\n\n /**\n * Main entry point for the cleanup command\n * Parses input, validates options, and determines operation mode\n */\n public async execute(input: CleanupCommandInput): Promise<CleanupResult | void> {\n // Step 1: Parse input and determine mode\n const parsed = this.parseInput(input)\n\n // Step 2: Validate option combinations\n this.validateInput(parsed)\n\n // Note: JSON mode auto-skips routine confirmations (programmatic use can't interact)\n // Safety checks still require --force to bypass (ResourceCleanup.validateWorktreeSafety throws errors)\n\n // Step 2.5: Check for child looms AFTER parsing input\n // This ensures we only block when cleaning the CURRENT loom (parent), not a child\n await this.checkForChildLooms(parsed)\n\n // Step 3: Execute based on mode\n getLogger().info(`Cleanup mode: ${parsed.mode}`)\n\n if (parsed.mode === 'single') {\n return await this.executeSingleCleanup(parsed)\n } else if (parsed.mode === 'list') {\n getLogger().info('Would list all worktrees') // TODO: Implement in Sub-issue #2\n getLogger().success('Command parsing and validation successful')\n return {\n identifier: 'list',\n success: true,\n dryRun: parsed.options.dryRun ?? false,\n operations: [],\n errors: [],\n rollbackRequired: false,\n }\n } else if (parsed.mode === 'all') {\n getLogger().info('Would remove all worktrees') // TODO: Implement in Sub-issue #5\n getLogger().success('Command parsing and validation successful')\n return {\n identifier: 'all',\n success: true,\n dryRun: parsed.options.dryRun ?? false,\n operations: [],\n errors: [],\n rollbackRequired: false,\n }\n } else if (parsed.mode === 'issue') {\n return await this.executeIssueCleanup(parsed)\n }\n }\n\n /**\n * Parse input to determine cleanup mode and extract relevant data\n * Implements auto-detection: numeric input = issue number, non-numeric = branch name\n *\n * @private\n */\n private parseInput(input: CleanupCommandInput): ParsedCleanupInput {\n const { identifier, options } = input\n\n // Trim identifier if present\n const trimmedIdentifier = identifier?.trim() ?? undefined\n\n // Mode: List (takes priority - it's informational only)\n if (options.list) {\n const result: ParsedCleanupInput = {\n mode: 'list',\n options\n }\n if (trimmedIdentifier) {\n result.identifier = trimmedIdentifier\n }\n return result\n }\n\n // Mode: All (remove everything)\n if (options.all) {\n const result: ParsedCleanupInput = {\n mode: 'all',\n options\n }\n if (trimmedIdentifier) {\n result.identifier = trimmedIdentifier\n }\n if (options.issue !== undefined) {\n result.issueNumber = options.issue\n }\n return result\n }\n\n // Mode: Explicit issue number via --issue flag\n if (options.issue !== undefined) {\n // Need to determine if identifier is branch or numeric to set branchName\n if (trimmedIdentifier) {\n const numericPattern = /^[0-9]+$/\n if (!numericPattern.test(trimmedIdentifier)) {\n // Identifier is a branch name with explicit --issue flag\n return {\n mode: 'issue',\n issueNumber: options.issue,\n branchName: trimmedIdentifier,\n identifier: trimmedIdentifier,\n originalInput: trimmedIdentifier,\n options\n }\n }\n }\n const result: ParsedCleanupInput = {\n mode: 'issue',\n issueNumber: options.issue,\n options\n }\n if (trimmedIdentifier) {\n result.identifier = trimmedIdentifier\n }\n return result\n }\n\n // Mode: Auto-detect from identifier\n if (!trimmedIdentifier) {\n throw new Error('Missing required argument: identifier. Use --all to remove all worktrees or --list to list them.')\n }\n\n // Auto-detection: Check if identifier is purely numeric\n // Pattern from bash script line 364: ^[0-9]+$\n const numericPattern = /^[0-9]+$/\n if (numericPattern.test(trimmedIdentifier)) {\n // Numeric input = issue number\n return {\n mode: 'issue',\n issueNumber: parseInt(trimmedIdentifier, 10),\n identifier: trimmedIdentifier,\n originalInput: trimmedIdentifier,\n options\n }\n } else {\n // Non-numeric = branch name\n return {\n mode: 'single',\n branchName: trimmedIdentifier,\n identifier: trimmedIdentifier,\n originalInput: trimmedIdentifier,\n options\n }\n }\n }\n\n /**\n * Validate parsed input for option conflicts\n * Throws descriptive errors for invalid option combinations\n *\n * @private\n */\n private validateInput(parsed: ParsedCleanupInput): void {\n const { mode, options, branchName } = parsed\n\n // Conflict: --list is informational only, incompatible with destructive operations\n if (mode === 'list') {\n if (options.all) {\n throw new Error('Cannot use --list with --all (list is informational only)')\n }\n if (options.issue !== undefined) {\n throw new Error('Cannot use --list with --issue (list is informational only)')\n }\n if (parsed.identifier) {\n throw new Error('Cannot use --list with a specific identifier (list shows all worktrees)')\n }\n }\n\n // Conflict: --all removes everything, can't combine with specific identifier or --issue\n if (mode === 'all') {\n if (parsed.identifier) {\n throw new Error('Cannot use --all with a specific identifier. Use one or the other.')\n }\n if (parsed.issueNumber !== undefined) {\n throw new Error('Cannot use --all with a specific identifier. Use one or the other.')\n }\n }\n\n // Conflict: explicit --issue flag with branch name identifier\n // (This prevents confusion when user provides both)\n if (options.issue !== undefined && branchName) {\n throw new Error('Cannot use --issue flag with branch name identifier. Use numeric identifier or --issue flag alone.')\n }\n\n // Note: --force and --dry-run are compatible with all modes (no conflicts)\n }\n\n /**\n * Execute cleanup for single worktree\n * Implements two-stage confirmation: worktree removal, then branch deletion\n * Uses IdentifierParser for pattern-based detection without GitHub API calls\n */\n private async executeSingleCleanup(parsed: ParsedCleanupInput): Promise<CleanupResult> {\n const identifier = parsed.branchName ?? parsed.identifier ?? ''\n if (!identifier) {\n throw new Error('No identifier found for cleanup')\n }\n const { force, dryRun } = parsed.options\n\n // Step 1: Parse identifier using pattern-based detection\n let parsedInput: ParsedInput = await this.identifierParser.parseForPatternDetection(identifier)\n\n // If type is 'branch', try to extract issue number for CLI symlink cleanup\n if (parsedInput.type === 'branch' && parsedInput.branchName) {\n const { extractIssueNumber } = await import('../utils/git.js')\n const extractedNumber = extractIssueNumber(parsedInput.branchName)\n if (extractedNumber !== null) {\n parsedInput = {\n ...parsedInput,\n number: extractedNumber // Add number for CLI symlink cleanup\n }\n }\n }\n\n // Step 2: Display worktree details\n getLogger().info(`Preparing to cleanup worktree: ${identifier}`)\n\n // Step 3: Routine confirmation - worktree removal\n // Skip if --force (user explicitly bypasses) or --json (programmatic use can't interact)\n // Note: Safety checks (uncommitted changes, unmerged work) still require --force to bypass\n if (!force && !parsed.options.json) {\n const confirmWorktree = await promptConfirmation('Remove this worktree?', true)\n if (!confirmWorktree) {\n getLogger().info('Cleanup cancelled')\n return {\n identifier,\n success: false,\n dryRun: dryRun ?? false,\n operations: [],\n errors: [],\n rollbackRequired: false,\n }\n }\n }\n\n // Step 4: Execute worktree cleanup (includes safety validation)\n // Issue #275 fix: Run 5-point safety check BEFORE any deletion\n // This prevents the scenario where worktree is deleted but branch deletion fails\n await this.ensureResourceCleanup()\n if (!this.resourceCleanup) {\n throw new Error('Failed to initialize ResourceCleanup')\n }\n const cleanupResult = await this.resourceCleanup.cleanupWorktree(parsedInput, {\n dryRun: dryRun ?? false,\n force: force ?? false,\n deleteBranch: true, // Always include branch deletion (safety checks run first)\n keepDatabase: false,\n checkMergeSafety: true // Run 5-point safety check BEFORE any deletion\n })\n\n // Add dryRun flag to result\n cleanupResult.dryRun = dryRun ?? false\n\n // Step 5: Report cleanup results\n this.reportCleanupResults(cleanupResult)\n\n // Final success message\n if (cleanupResult.success) {\n getLogger().success('Cleanup completed successfully')\n } else {\n getLogger().warn('Cleanup completed with errors - see details above')\n }\n\n return cleanupResult\n }\n\n /**\n * Report cleanup operation results to user\n */\n private reportCleanupResults(result: CleanupResult): void {\n getLogger().info('Cleanup operations:')\n\n result.operations.forEach(op => {\n const status = op.success ? '✓' : '✗'\n const message = op.error ? `${op.message}: ${op.error}` : op.message\n\n if (op.success) {\n getLogger().info(` ${status} ${message}`)\n } else {\n getLogger().error(` ${status} ${message}`)\n }\n })\n\n if (result.errors.length > 0) {\n getLogger().warn(`${result.errors.length} error(s) occurred during cleanup`)\n }\n }\n\n /**\n * Execute cleanup for all worktrees associated with an issue or PR number\n * Searches for worktrees by their path patterns (e.g., issue-25, pr-25, 25-feature, _pr_25)\n * Implements bash cleanup-worktree.sh remove_worktrees_by_issue() (lines 157-242)\n */\n private async executeIssueCleanup(parsed: ParsedCleanupInput): Promise<CleanupResult> {\n const issueNumber = parsed.issueNumber\n if (issueNumber === undefined) {\n throw new Error('No issue/PR number provided for cleanup')\n }\n\n const { force, dryRun } = parsed.options\n\n getLogger().info(`Finding worktrees related to GitHub issue/PR #${issueNumber}...`)\n\n // Step 1: Get all worktrees and filter by path pattern\n const worktrees = await this.gitWorktreeManager.listWorktrees()\n const matchingWorktrees = worktrees.filter(wt => {\n const path = wt.path.toLowerCase()\n // Lowercase for case-insensitive matching (Linear IDs are uppercase like MARK-1)\n const idStr = String(issueNumber).toLowerCase()\n\n // Check if path contains the identifier with proper word boundaries\n // Matches: issue-25, pr-25, 25-feature, _pr_25, issue-mark-1, etc.\n // Uses word boundary or common separators (-, _, /) for alphanumeric IDs\n const pattern = new RegExp(`(?:^|[/_-])${idStr}(?:[/_-]|$)`)\n return pattern.test(path)\n })\n\n if (matchingWorktrees.length === 0) {\n getLogger().warn(`No worktrees found for GitHub issue/PR #${issueNumber}`)\n getLogger().info(`Searched for worktree paths containing: ${issueNumber}, _pr_${issueNumber}, issue-${issueNumber}, etc.`)\n return {\n identifier: String(issueNumber),\n success: true,\n dryRun: dryRun ?? false,\n operations: [],\n errors: [],\n rollbackRequired: false,\n }\n }\n\n // Step 2: Build targets list from matching worktrees\n const targets: Array<{ branchName: string; hasWorktree: boolean; worktreePath?: string }> =\n matchingWorktrees.map(wt => ({\n branchName: wt.branch,\n hasWorktree: true,\n worktreePath: wt.path\n }))\n\n // Step 3: Display preview\n getLogger().info(`Found ${targets.length} worktree(s) related to issue/PR #${issueNumber}:`)\n for (const target of targets) {\n getLogger().info(` Branch: ${target.branchName} (${target.worktreePath})`)\n }\n\n // Step 4: Routine batch confirmation\n // Skip if --force (user explicitly bypasses) or --json (programmatic use can't interact)\n // Note: Safety checks per-worktree (uncommitted changes, unmerged work) still require --force to bypass\n if (!force && !parsed.options.json) {\n const confirmCleanup = await promptConfirmation(\n `Remove ${targets.length} worktree(s)?`,\n true\n )\n if (!confirmCleanup) {\n getLogger().info('Cleanup cancelled')\n return {\n identifier: String(issueNumber),\n success: false,\n dryRun: dryRun ?? false,\n operations: [],\n errors: [],\n rollbackRequired: false,\n }\n }\n }\n\n // Step 5: Process each target sequentially\n let worktreesRemoved = 0\n let branchesDeleted = 0\n const databaseBranchesDeletedList: string[] = []\n let failed = 0\n\n for (const target of targets) {\n getLogger().info(`Processing worktree: ${target.branchName}`)\n\n // Cleanup worktree using ResourceCleanup with ParsedInput\n // Now includes branch deletion with 5-point safety check BEFORE any deletion\n try {\n // Use the known issue number directly instead of parsing from branch name\n // This ensures CLI symlinks (created with issue number) are properly cleaned up\n const parsedInput: ParsedInput = {\n type: 'issue',\n number: issueNumber, // Use the known issue number, not parsed from branch\n branchName: target.branchName,\n originalInput: String(issueNumber)\n }\n\n await this.ensureResourceCleanup()\n if (!this.resourceCleanup) {\n throw new Error('Failed to initialize ResourceCleanup')\n }\n // Issue #275 fix: Run safety checks BEFORE deleting worktree\n // This prevents the scenario where worktree is deleted but branch deletion fails\n const result = await this.resourceCleanup.cleanupWorktree(parsedInput, {\n dryRun: dryRun ?? false,\n force: force ?? false,\n deleteBranch: true, // Include branch deletion (with safety checks)\n keepDatabase: false,\n checkMergeSafety: true // Run 5-point safety check BEFORE any deletion\n })\n\n if (result.success) {\n worktreesRemoved++\n getLogger().success(` Worktree removed: ${target.branchName}`)\n\n // Check if branch was deleted\n const branchOperation = result.operations.find(op => op.type === 'branch')\n if (branchOperation?.success) {\n branchesDeleted++\n getLogger().success(` Branch deleted: ${target.branchName}`)\n }\n\n // Check if database branch was actually deleted (use explicit deleted field)\n const dbOperation = result.operations.find(op => op.type === 'database')\n if (dbOperation?.deleted) {\n // Get branch name from result or use the target branch name\n const deletedBranchName = target.branchName\n databaseBranchesDeletedList.push(deletedBranchName)\n }\n } else {\n failed++\n getLogger().error(` Failed to remove worktree: ${target.branchName}`)\n }\n } catch (error) {\n failed++\n const errMsg = error instanceof Error ? error.message : 'Unknown error'\n getLogger().error(` Failed to cleanup: ${errMsg}`)\n continue // Continue with next worktree even if this one failed\n }\n }\n\n // Step 7: Report statistics\n getLogger().success(`Completed cleanup for issue/PR #${issueNumber}:`)\n getLogger().info(` Worktrees removed: ${worktreesRemoved}`)\n getLogger().info(` Branches deleted: ${branchesDeleted}`)\n if (databaseBranchesDeletedList.length > 0) {\n // Display branch names in the format requested\n getLogger().info(` Database branches deleted: ${databaseBranchesDeletedList.join(', ')}`)\n }\n if (failed > 0) {\n getLogger().warn(` Failed operations: ${failed}`)\n }\n\n // Return aggregated result\n return {\n identifier: String(issueNumber),\n success: failed === 0,\n dryRun: dryRun ?? false,\n operations: [\n { type: 'worktree' as const, success: true, message: `Removed ${worktreesRemoved} worktree(s)` },\n { type: 'branch' as const, success: true, message: `Deleted ${branchesDeleted} branch(es)` },\n ...(databaseBranchesDeletedList.length > 0 ? [{ type: 'database' as const, success: true, message: `Deleted ${databaseBranchesDeletedList.length} database branch(es)`, deleted: true }] : []),\n ],\n errors: [],\n rollbackRequired: false,\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6CO,IAAM,iBAAN,MAAqB;AAAA,EAM1B,YACE,oBACA,iBACA;AAEA,UAAM,YAAY,mBAAmB;AACrC,QAAI,UAAU,OAAO;AACnB,gBAAU,EAAE,MAAM,gCAAgC,UAAU,MAAM,OAAO,EAAE;AAAA,IAC7E;AACA,QAAI,UAAU,QAAQ;AACpB,gBAAU,EAAE,MAAM,UAAU,OAAO,KAAK,UAAU,MAAM,EAAE,MAAM,wBAAwB;AAAA,IAC1F;AAEA,SAAK,qBAAqB,sBAAsB,IAAI,mBAAmB;AAIvE,QAAI,iBAAiB;AACnB,WAAK,kBAAkB;AAAA,IACzB;AAGA,SAAK,mBAAmB,IAAI,iBAAiB,KAAK,kBAAkB;AAAA,EACtE;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,wBAAuC;AA/EvD;AAgFI,QAAI,KAAK,mBAAmB,KAAK,aAAa;AAC5C;AAAA,IACF;AAEA,UAAM,kBAAkB,IAAI,gBAAgB;AAC5C,UAAM,WAAW,MAAM,gBAAgB,aAAa;AACpD,UAAM,0BAAwB,oBAAS,iBAAT,mBAAuB,aAAvB,mBAAiC,0BAAyB;AAExF,UAAM,qBAAqB,IAAI,mBAAmB;AAClD,UAAM,eAAe,+BAA+B,QAAQ;AAC5D,UAAM,kBAAkB,IAAI,gBAAgB,cAAc,oBAAoB,qBAAqB;AACnG,UAAM,sBAAsB,IAAI,oBAAoB;AAEpD,SAAK,oBAAoB,IAAI;AAAA,MAC3B,KAAK;AAAA,MACL,IAAI,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,IACF;AAGA,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,EAAE,cAAc,IAAI,MAAM,OAAO,6BAAyB;AAChE,YAAM,EAAE,qBAAqB,IAAI,MAAM,OAAO,oCAAgC;AAC9E,YAAM,EAAE,0BAA0B,IAAI,MAAM,OAAO,yCAAqC;AACxF,YAAM,EAAE,2BAA2B,IAAI,MAAM,OAAO,mCAA+B;AAEnF,WAAK,cAAc,IAAI;AAAA,QACrB,KAAK;AAAA,QACL,IAAI,cAAc;AAAA,QAClB,IAAI,2BAA2B,EAAE,WAAW,KAAK,CAAC;AAAA,QAClD;AAAA,QACA,IAAI,qBAAqB;AAAA,QACzB,IAAI,0BAA0B;AAAA,QAC9B;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,mBAAmB,QAA2C;AAC1E,UAAM,KAAK,sBAAsB;AACjC,QAAI,CAAC,KAAK,aAAa;AACrB,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAGA,QAAI;AAEJ,QAAI,OAAO,YAAY;AACrB,qBAAe,OAAO;AAAA,IACxB,WAAW,OAAO,SAAS,WAAW,OAAO,gBAAgB,QAAW;AAEtE,YAAM,WAAW,MAAM,KAAK,mBAAmB,qBAAqB,OAAO,WAAW;AACtF,qBAAe,qCAAU;AAAA,IAC3B;AAGA,QAAI,CAAC,cAAc;AACjB,gBAAU,EAAE,MAAM,qDAAqD;AACvE;AAAA,IACF;AAGA,UAAM,gBAAgB,MAAM,KAAK,YAAY,uBAAuB,YAAY;AAChF,QAAI,eAAe;AACjB,YAAM,IAAI,MAAM,8FAAkG;AAAA,IACpH;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAa,QAAQ,OAA2D;AAE9E,UAAM,SAAS,KAAK,WAAW,KAAK;AAGpC,SAAK,cAAc,MAAM;AAOzB,UAAM,KAAK,mBAAmB,MAAM;AAGpC,cAAU,EAAE,KAAK,iBAAiB,OAAO,IAAI,EAAE;AAE/C,QAAI,OAAO,SAAS,UAAU;AAC5B,aAAO,MAAM,KAAK,qBAAqB,MAAM;AAAA,IAC/C,WAAW,OAAO,SAAS,QAAQ;AACjC,gBAAU,EAAE,KAAK,0BAA0B;AAC3C,gBAAU,EAAE,QAAQ,2CAA2C;AAC/D,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,SAAS;AAAA,QACT,QAAQ,OAAO,QAAQ,UAAU;AAAA,QACjC,YAAY,CAAC;AAAA,QACb,QAAQ,CAAC;AAAA,QACT,kBAAkB;AAAA,MACpB;AAAA,IACF,WAAW,OAAO,SAAS,OAAO;AAChC,gBAAU,EAAE,KAAK,4BAA4B;AAC7C,gBAAU,EAAE,QAAQ,2CAA2C;AAC/D,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,SAAS;AAAA,QACT,QAAQ,OAAO,QAAQ,UAAU;AAAA,QACjC,YAAY,CAAC;AAAA,QACb,QAAQ,CAAC;AAAA,QACT,kBAAkB;AAAA,MACpB;AAAA,IACF,WAAW,OAAO,SAAS,SAAS;AAClC,aAAO,MAAM,KAAK,oBAAoB,MAAM;AAAA,IAC9C;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,WAAW,OAAgD;AACjE,UAAM,EAAE,YAAY,QAAQ,IAAI;AAGhC,UAAM,qBAAoB,yCAAY,WAAU;AAGhD,QAAI,QAAQ,MAAM;AAChB,YAAM,SAA6B;AAAA,QACjC,MAAM;AAAA,QACN;AAAA,MACF;AACA,UAAI,mBAAmB;AACrB,eAAO,aAAa;AAAA,MACtB;AACA,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,KAAK;AACf,YAAM,SAA6B;AAAA,QACjC,MAAM;AAAA,QACN;AAAA,MACF;AACA,UAAI,mBAAmB;AACrB,eAAO,aAAa;AAAA,MACtB;AACA,UAAI,QAAQ,UAAU,QAAW;AAC/B,eAAO,cAAc,QAAQ;AAAA,MAC/B;AACA,aAAO;AAAA,IACT;AAGA,QAAI,QAAQ,UAAU,QAAW;AAE/B,UAAI,mBAAmB;AACrB,cAAMA,kBAAiB;AACvB,YAAI,CAACA,gBAAe,KAAK,iBAAiB,GAAG;AAE3C,iBAAO;AAAA,YACL,MAAM;AAAA,YACN,aAAa,QAAQ;AAAA,YACrB,YAAY;AAAA,YACZ,YAAY;AAAA,YACZ,eAAe;AAAA,YACf;AAAA,UACF;AAAA,QACF;AAAA,MACF;AACA,YAAM,SAA6B;AAAA,QACjC,MAAM;AAAA,QACN,aAAa,QAAQ;AAAA,QACrB;AAAA,MACF;AACA,UAAI,mBAAmB;AACrB,eAAO,aAAa;AAAA,MACtB;AACA,aAAO;AAAA,IACT;AAGA,QAAI,CAAC,mBAAmB;AACtB,YAAM,IAAI,MAAM,kGAAkG;AAAA,IACpH;AAIA,UAAM,iBAAiB;AACvB,QAAI,eAAe,KAAK,iBAAiB,GAAG;AAE1C,aAAO;AAAA,QACL,MAAM;AAAA,QACN,aAAa,SAAS,mBAAmB,EAAE;AAAA,QAC3C,YAAY;AAAA,QACZ,eAAe;AAAA,QACf;AAAA,MACF;AAAA,IACF,OAAO;AAEL,aAAO;AAAA,QACL,MAAM;AAAA,QACN,YAAY;AAAA,QACZ,YAAY;AAAA,QACZ,eAAe;AAAA,QACf;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQQ,cAAc,QAAkC;AACtD,UAAM,EAAE,MAAM,SAAS,WAAW,IAAI;AAGtC,QAAI,SAAS,QAAQ;AACnB,UAAI,QAAQ,KAAK;AACf,cAAM,IAAI,MAAM,2DAA2D;AAAA,MAC7E;AACA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,IAAI,MAAM,6DAA6D;AAAA,MAC/E;AACA,UAAI,OAAO,YAAY;AACrB,cAAM,IAAI,MAAM,yEAAyE;AAAA,MAC3F;AAAA,IACF;AAGA,QAAI,SAAS,OAAO;AAClB,UAAI,OAAO,YAAY;AACrB,cAAM,IAAI,MAAM,oEAAoE;AAAA,MACtF;AACA,UAAI,OAAO,gBAAgB,QAAW;AACpC,cAAM,IAAI,MAAM,oEAAoE;AAAA,MACtF;AAAA,IACF;AAIA,QAAI,QAAQ,UAAU,UAAa,YAAY;AAC7C,YAAM,IAAI,MAAM,oGAAoG;AAAA,IACtH;AAAA,EAGF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,qBAAqB,QAAoD;AACrF,UAAM,aAAa,OAAO,cAAc,OAAO,cAAc;AAC7D,QAAI,CAAC,YAAY;AACf,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AACA,UAAM,EAAE,OAAO,OAAO,IAAI,OAAO;AAGjC,QAAI,cAA2B,MAAM,KAAK,iBAAiB,yBAAyB,UAAU;AAG9F,QAAI,YAAY,SAAS,YAAY,YAAY,YAAY;AAC3D,YAAM,EAAE,mBAAmB,IAAI,MAAM,OAAO,mBAAiB;AAC7D,YAAM,kBAAkB,mBAAmB,YAAY,UAAU;AACjE,UAAI,oBAAoB,MAAM;AAC5B,sBAAc;AAAA,UACZ,GAAG;AAAA,UACH,QAAQ;AAAA;AAAA,QACV;AAAA,MACF;AAAA,IACF;AAGA,cAAU,EAAE,KAAK,kCAAkC,UAAU,EAAE;AAK/D,QAAI,CAAC,SAAS,CAAC,OAAO,QAAQ,MAAM;AAClC,YAAM,kBAAkB,MAAM,mBAAmB,yBAAyB,IAAI;AAC9E,UAAI,CAAC,iBAAiB;AACpB,kBAAU,EAAE,KAAK,mBAAmB;AACpC,eAAO;AAAA,UACL;AAAA,UACA,SAAS;AAAA,UACT,QAAQ,UAAU;AAAA,UAClB,YAAY,CAAC;AAAA,UACb,QAAQ,CAAC;AAAA,UACT,kBAAkB;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AAKA,UAAM,KAAK,sBAAsB;AACjC,QAAI,CAAC,KAAK,iBAAiB;AACzB,YAAM,IAAI,MAAM,sCAAsC;AAAA,IACxD;AACA,UAAM,gBAAgB,MAAM,KAAK,gBAAgB,gBAAgB,aAAa;AAAA,MAC5E,QAAQ,UAAU;AAAA,MAClB,OAAO,SAAS;AAAA,MAChB,cAAc;AAAA;AAAA,MACd,cAAc;AAAA,MACd,kBAAkB;AAAA;AAAA,IACpB,CAAC;AAGD,kBAAc,SAAS,UAAU;AAGjC,SAAK,qBAAqB,aAAa;AAGvC,QAAI,cAAc,SAAS;AACzB,gBAAU,EAAE,QAAQ,gCAAgC;AAAA,IACtD,OAAO;AACL,gBAAU,EAAE,KAAK,mDAAmD;AAAA,IACtE;AAEA,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA,EAKQ,qBAAqB,QAA6B;AACxD,cAAU,EAAE,KAAK,qBAAqB;AAEtC,WAAO,WAAW,QAAQ,QAAM;AAC9B,YAAM,SAAS,GAAG,UAAU,WAAM;AAClC,YAAM,UAAU,GAAG,QAAQ,GAAG,GAAG,OAAO,KAAK,GAAG,KAAK,KAAK,GAAG;AAE7D,UAAI,GAAG,SAAS;AACd,kBAAU,EAAE,KAAK,KAAK,MAAM,IAAI,OAAO,EAAE;AAAA,MAC3C,OAAO;AACL,kBAAU,EAAE,MAAM,KAAK,MAAM,IAAI,OAAO,EAAE;AAAA,MAC5C;AAAA,IACF,CAAC;AAED,QAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,gBAAU,EAAE,KAAK,GAAG,OAAO,OAAO,MAAM,mCAAmC;AAAA,IAC7E;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAc,oBAAoB,QAAoD;AACpF,UAAM,cAAc,OAAO;AAC3B,QAAI,gBAAgB,QAAW;AAC7B,YAAM,IAAI,MAAM,yCAAyC;AAAA,IAC3D;AAEA,UAAM,EAAE,OAAO,OAAO,IAAI,OAAO;AAEjC,cAAU,EAAE,KAAK,iDAAiD,WAAW,KAAK;AAGlF,UAAM,YAAY,MAAM,KAAK,mBAAmB,cAAc;AAC9D,UAAM,oBAAoB,UAAU,OAAO,QAAM;AAC/C,YAAM,OAAO,GAAG,KAAK,YAAY;AAEjC,YAAM,QAAQ,OAAO,WAAW,EAAE,YAAY;AAK9C,YAAM,UAAU,IAAI,OAAO,cAAc,KAAK,aAAa;AAC3D,aAAO,QAAQ,KAAK,IAAI;AAAA,IAC1B,CAAC;AAED,QAAI,kBAAkB,WAAW,GAAG;AAClC,gBAAU,EAAE,KAAK,2CAA2C,WAAW,EAAE;AACzE,gBAAU,EAAE,KAAK,2CAA2C,WAAW,SAAS,WAAW,WAAW,WAAW,QAAQ;AACzH,aAAO;AAAA,QACL,YAAY,OAAO,WAAW;AAAA,QAC9B,SAAS;AAAA,QACT,QAAQ,UAAU;AAAA,QAClB,YAAY,CAAC;AAAA,QACb,QAAQ,CAAC;AAAA,QACT,kBAAkB;AAAA,MACpB;AAAA,IACF;AAGA,UAAM,UACJ,kBAAkB,IAAI,SAAO;AAAA,MAC3B,YAAY,GAAG;AAAA,MACf,aAAa;AAAA,MACb,cAAc,GAAG;AAAA,IACnB,EAAE;AAGJ,cAAU,EAAE,KAAK,SAAS,QAAQ,MAAM,qCAAqC,WAAW,GAAG;AAC3F,eAAW,UAAU,SAAS;AAC5B,gBAAU,EAAE,KAAK,aAAa,OAAO,UAAU,KAAK,OAAO,YAAY,GAAG;AAAA,IAC5E;AAKA,QAAI,CAAC,SAAS,CAAC,OAAO,QAAQ,MAAM;AAClC,YAAM,iBAAiB,MAAM;AAAA,QAC3B,UAAU,QAAQ,MAAM;AAAA,QACxB;AAAA,MACF;AACA,UAAI,CAAC,gBAAgB;AACnB,kBAAU,EAAE,KAAK,mBAAmB;AACpC,eAAO;AAAA,UACL,YAAY,OAAO,WAAW;AAAA,UAC9B,SAAS;AAAA,UACT,QAAQ,UAAU;AAAA,UAClB,YAAY,CAAC;AAAA,UACb,QAAQ,CAAC;AAAA,UACT,kBAAkB;AAAA,QACpB;AAAA,MACF;AAAA,IACF;AAGA,QAAI,mBAAmB;AACvB,QAAI,kBAAkB;AACtB,UAAM,8BAAwC,CAAC;AAC/C,QAAI,SAAS;AAEb,eAAW,UAAU,SAAS;AAC5B,gBAAU,EAAE,KAAK,wBAAwB,OAAO,UAAU,EAAE;AAI5D,UAAI;AAGF,cAAM,cAA2B;AAAA,UAC/B,MAAM;AAAA,UACN,QAAQ;AAAA;AAAA,UACR,YAAY,OAAO;AAAA,UACnB,eAAe,OAAO,WAAW;AAAA,QACnC;AAEA,cAAM,KAAK,sBAAsB;AACjC,YAAI,CAAC,KAAK,iBAAiB;AACzB,gBAAM,IAAI,MAAM,sCAAsC;AAAA,QACxD;AAGA,cAAM,SAAS,MAAM,KAAK,gBAAgB,gBAAgB,aAAa;AAAA,UACrE,QAAQ,UAAU;AAAA,UAClB,OAAO,SAAS;AAAA,UAChB,cAAc;AAAA;AAAA,UACd,cAAc;AAAA,UACd,kBAAkB;AAAA;AAAA,QACpB,CAAC;AAED,YAAI,OAAO,SAAS;AAClB;AACA,oBAAU,EAAE,QAAQ,uBAAuB,OAAO,UAAU,EAAE;AAG9D,gBAAM,kBAAkB,OAAO,WAAW,KAAK,QAAM,GAAG,SAAS,QAAQ;AACzE,cAAI,mDAAiB,SAAS;AAC5B;AACA,sBAAU,EAAE,QAAQ,qBAAqB,OAAO,UAAU,EAAE;AAAA,UAC9D;AAGA,gBAAM,cAAc,OAAO,WAAW,KAAK,QAAM,GAAG,SAAS,UAAU;AACvE,cAAI,2CAAa,SAAS;AAExB,kBAAM,oBAAoB,OAAO;AACjC,wCAA4B,KAAK,iBAAiB;AAAA,UACpD;AAAA,QACF,OAAO;AACL;AACA,oBAAU,EAAE,MAAM,gCAAgC,OAAO,UAAU,EAAE;AAAA,QACvE;AAAA,MACF,SAAS,OAAO;AACd;AACA,cAAM,SAAS,iBAAiB,QAAQ,MAAM,UAAU;AACxD,kBAAU,EAAE,MAAM,wBAAwB,MAAM,EAAE;AAClD;AAAA,MACF;AAAA,IACF;AAGA,cAAU,EAAE,QAAQ,mCAAmC,WAAW,GAAG;AACrE,cAAU,EAAE,KAAK,yBAAyB,gBAAgB,EAAE;AAC5D,cAAU,EAAE,KAAK,wBAAwB,eAAe,EAAE;AAC1D,QAAI,4BAA4B,SAAS,GAAG;AAE1C,gBAAU,EAAE,KAAK,iCAAiC,4BAA4B,KAAK,IAAI,CAAC,EAAE;AAAA,IAC5F;AACA,QAAI,SAAS,GAAG;AACd,gBAAU,EAAE,KAAK,yBAAyB,MAAM,EAAE;AAAA,IACpD;AAGA,WAAO;AAAA,MACL,YAAY,OAAO,WAAW;AAAA,MAC9B,SAAS,WAAW;AAAA,MACpB,QAAQ,UAAU;AAAA,MAClB,YAAY;AAAA,QACV,EAAE,MAAM,YAAqB,SAAS,MAAM,SAAS,WAAW,gBAAgB,eAAe;AAAA,QAC/F,EAAE,MAAM,UAAmB,SAAS,MAAM,SAAS,WAAW,eAAe,cAAc;AAAA,QAC3F,GAAI,4BAA4B,SAAS,IAAI,CAAC,EAAE,MAAM,YAAqB,SAAS,MAAM,SAAS,WAAW,4BAA4B,MAAM,wBAAwB,SAAS,KAAK,CAAC,IAAI,CAAC;AAAA,MAC9L;AAAA,MACA,QAAQ,CAAC;AAAA,MACT,kBAAkB;AAAA,IACpB;AAAA,EACF;AACF;","names":["numericPattern"]}
|