@iloom/cli 0.6.1 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +35 -18
- package/dist/{BranchNamingService-B5PVRR7F.js → BranchNamingService-FLPUUFOB.js} +2 -2
- package/dist/ClaudeContextManager-KE5TBZVZ.js +14 -0
- package/dist/ClaudeService-CRSETT3A.js +13 -0
- package/dist/{GitHubService-S2OGUTDR.js → GitHubService-O7U4UQ7N.js} +3 -3
- package/dist/{LoomLauncher-5LFM4LXB.js → LoomLauncher-NL65LSKP.js} +6 -6
- package/dist/{MetadataManager-DFI73J3G.js → MetadataManager-XJ2YB762.js} +2 -2
- package/dist/PRManager-2ABCWXHW.js +16 -0
- package/dist/{ProjectCapabilityDetector-S5FLNCFI.js → ProjectCapabilityDetector-UZYW32SY.js} +3 -3
- package/dist/{PromptTemplateManager-C3DK6XZL.js → PromptTemplateManager-7L3HJQQU.js} +2 -2
- package/dist/README.md +35 -18
- package/dist/{SettingsManager-35F5RUJH.js → SettingsManager-YU4VYPTW.js} +2 -2
- package/dist/agents/iloom-issue-analyze-and-plan.md +42 -17
- package/dist/agents/iloom-issue-analyzer.md +14 -14
- package/dist/agents/iloom-issue-complexity-evaluator.md +38 -15
- package/dist/agents/iloom-issue-enhancer.md +15 -15
- package/dist/agents/iloom-issue-implementer.md +44 -15
- package/dist/agents/iloom-issue-planner.md +121 -17
- package/dist/agents/iloom-issue-reviewer.md +15 -15
- package/dist/{build-FJVYP7EV.js → build-O2EJHDEW.js} +9 -9
- package/dist/{chunk-ZPSTA5PR.js → chunk-3CDWFEGL.js} +2 -2
- package/dist/{chunk-VU3QMIP2.js → chunk-453NC377.js} +91 -15
- package/dist/chunk-453NC377.js.map +1 -0
- package/dist/{chunk-UQIXZ3BA.js → chunk-5V74K5ZA.js} +2 -2
- package/dist/{chunk-7WANFUIK.js → chunk-6TL3BYH6.js} +2 -2
- package/dist/{chunk-5TXLVEXT.js → chunk-C3AKFAIR.js} +2 -2
- package/dist/{chunk-K7SEEHKO.js → chunk-CNSTXBJ3.js} +7 -419
- package/dist/chunk-CNSTXBJ3.js.map +1 -0
- package/dist/{chunk-VDA5JMB4.js → chunk-EPPPDVHD.js} +21 -8
- package/dist/chunk-EPPPDVHD.js.map +1 -0
- package/dist/{chunk-LVBRMTE6.js → chunk-FEAJR6PN.js} +6 -6
- package/dist/{chunk-6YSFTPKW.js → chunk-FM4KBPVA.js} +18 -13
- package/dist/chunk-FM4KBPVA.js.map +1 -0
- package/dist/{chunk-AEIMYF4P.js → chunk-FP7G7DG3.js} +6 -2
- package/dist/chunk-FP7G7DG3.js.map +1 -0
- package/dist/{chunk-LT3SGBR7.js → chunk-GCPAZSGV.js} +36 -2
- package/dist/{chunk-LT3SGBR7.js.map → chunk-GCPAZSGV.js.map} +1 -1
- package/dist/chunk-GJMEKEI5.js +517 -0
- package/dist/chunk-GJMEKEI5.js.map +1 -0
- package/dist/{chunk-64O2UIWO.js → chunk-GV5X6XUE.js} +4 -4
- package/dist/{chunk-7Q66W4OH.js → chunk-HBJITKSZ.js} +37 -1
- package/dist/chunk-HBJITKSZ.js.map +1 -0
- package/dist/{chunk-7HIRPCKU.js → chunk-HVQNVRAF.js} +2 -2
- package/dist/{chunk-BXCPJJYM.js → chunk-ITN64ENQ.js} +1 -1
- package/dist/chunk-ITN64ENQ.js.map +1 -0
- package/dist/{chunk-6U6VI4SZ.js → chunk-KVS4XGBQ.js} +4 -4
- package/dist/{chunk-AXX3QIKK.js → chunk-LLWX3PCW.js} +2 -2
- package/dist/{chunk-2A7WQKBE.js → chunk-LQBLDI47.js} +96 -6
- package/dist/chunk-LQBLDI47.js.map +1 -0
- package/dist/{chunk-SN3Z6EZO.js → chunk-N7FVXZNI.js} +2 -2
- package/dist/chunk-NTIZLX42.js +822 -0
- package/dist/chunk-NTIZLX42.js.map +1 -0
- package/dist/{chunk-I75JMBNB.js → chunk-S7YMZQUD.js} +31 -43
- package/dist/chunk-S7YMZQUD.js.map +1 -0
- package/dist/chunk-TIYJEEVO.js +79 -0
- package/dist/chunk-TIYJEEVO.js.map +1 -0
- package/dist/{chunk-EK3XCAAS.js → chunk-UDRZY65Y.js} +2 -2
- package/dist/{chunk-3PT7RKL5.js → chunk-USJSNHGG.js} +2 -2
- package/dist/{chunk-CFUWQHCJ.js → chunk-VWGKGNJP.js} +114 -35
- package/dist/chunk-VWGKGNJP.js.map +1 -0
- package/dist/{chunk-F6WVM437.js → chunk-WFQ5CLTR.js} +6 -3
- package/dist/chunk-WFQ5CLTR.js.map +1 -0
- package/dist/{chunk-TRQ76ISK.js → chunk-Z6BO53V7.js} +9 -9
- package/dist/{chunk-GEXP5IOF.js → chunk-ZA575VLF.js} +21 -8
- package/dist/chunk-ZA575VLF.js.map +1 -0
- package/dist/{claude-H33OQMXO.js → claude-6H36IBHO.js} +4 -2
- package/dist/{cleanup-BRUAINKE.js → cleanup-ZPOMRSNN.js} +20 -16
- package/dist/cleanup-ZPOMRSNN.js.map +1 -0
- package/dist/cli.js +341 -954
- package/dist/cli.js.map +1 -1
- package/dist/commit-6S2RIA2K.js +237 -0
- package/dist/commit-6S2RIA2K.js.map +1 -0
- package/dist/{compile-ULNO5F7Q.js → compile-LRMAADUT.js} +9 -9
- package/dist/{contribute-Q6GX6AXK.js → contribute-GXKOIA42.js} +5 -5
- package/dist/{dev-server-4RCDJ5MU.js → dev-server-GREJUEKW.js} +22 -74
- package/dist/dev-server-GREJUEKW.js.map +1 -0
- package/dist/{feedback-O4Q55SVS.js → feedback-G7G5QCY4.js} +10 -10
- package/dist/{git-FVMGBHC2.js → git-ENLT2VNI.js} +6 -4
- package/dist/hooks/iloom-hook.js +30 -2
- package/dist/{ignite-VHV65WEZ.js → ignite-YUAOJ5PP.js} +20 -20
- package/dist/ignite-YUAOJ5PP.js.map +1 -0
- package/dist/index.d.ts +71 -27
- package/dist/index.js +196 -266
- package/dist/index.js.map +1 -1
- package/dist/init-XQQMFDM6.js +21 -0
- package/dist/{lint-5JMCWE4Y.js → lint-OFVN7FT6.js} +9 -9
- package/dist/mcp/issue-management-server.js +359 -13
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/mcp/recap-server.js +13 -4
- package/dist/mcp/recap-server.js.map +1 -1
- package/dist/{open-WHVUYGPY.js → open-MCWQAPSZ.js} +25 -76
- package/dist/open-MCWQAPSZ.js.map +1 -0
- package/dist/{projects-SA76I4TZ.js → projects-PQOTWUII.js} +11 -4
- package/dist/projects-PQOTWUII.js.map +1 -0
- package/dist/prompts/init-prompt.txt +62 -51
- package/dist/prompts/issue-prompt.txt +132 -63
- package/dist/prompts/pr-prompt.txt +3 -3
- package/dist/prompts/regular-prompt.txt +16 -18
- package/dist/prompts/session-summary-prompt.txt +13 -13
- package/dist/{rebase-Y4AS6LQW.js → rebase-RKQED567.js} +53 -8
- package/dist/rebase-RKQED567.js.map +1 -0
- package/dist/{recap-VOOUXOGP.js → recap-ZKGHZCX6.js} +6 -6
- package/dist/{run-NCRK5NPR.js → run-CCG24PBC.js} +25 -76
- package/dist/run-CCG24PBC.js.map +1 -0
- package/dist/schema/settings.schema.json +14 -3
- package/dist/{shell-SBLXVOVJ.js → shell-2NNSIU34.js} +6 -6
- package/dist/{summary-CVFAMDOJ.js → summary-G6L3VAKK.js} +11 -10
- package/dist/{summary-CVFAMDOJ.js.map → summary-G6L3VAKK.js.map} +1 -1
- package/dist/{test-3KIVXI6J.js → test-QZDOEUIO.js} +9 -9
- package/dist/{test-git-ZB6AGGRW.js → test-git-E2BLXR6M.js} +4 -4
- package/dist/{test-prefix-FBGXKMPA.js → test-prefix-A7JGGYAA.js} +4 -4
- package/dist/{test-webserver-YVQD42W6.js → test-webserver-NRMGT2HB.js} +29 -8
- package/dist/test-webserver-NRMGT2HB.js.map +1 -0
- package/package.json +3 -1
- package/dist/ClaudeContextManager-6J2EB4QU.js +0 -14
- package/dist/ClaudeService-O2PB22GX.js +0 -13
- package/dist/PRManager-GB3FOJ2W.js +0 -14
- package/dist/chunk-2A7WQKBE.js.map +0 -1
- package/dist/chunk-6YSFTPKW.js.map +0 -1
- package/dist/chunk-7Q66W4OH.js.map +0 -1
- package/dist/chunk-AEIMYF4P.js.map +0 -1
- package/dist/chunk-BXCPJJYM.js.map +0 -1
- package/dist/chunk-CFUWQHCJ.js.map +0 -1
- package/dist/chunk-F6WVM437.js.map +0 -1
- package/dist/chunk-GEXP5IOF.js.map +0 -1
- package/dist/chunk-I75JMBNB.js.map +0 -1
- package/dist/chunk-K7SEEHKO.js.map +0 -1
- package/dist/chunk-VDA5JMB4.js.map +0 -1
- package/dist/chunk-VU3QMIP2.js.map +0 -1
- package/dist/chunk-W6WVRHJ6.js +0 -251
- package/dist/chunk-W6WVRHJ6.js.map +0 -1
- package/dist/cleanup-BRUAINKE.js.map +0 -1
- package/dist/dev-server-4RCDJ5MU.js.map +0 -1
- package/dist/ignite-VHV65WEZ.js.map +0 -1
- package/dist/init-UTYRHNJJ.js +0 -21
- package/dist/open-WHVUYGPY.js.map +0 -1
- package/dist/projects-SA76I4TZ.js.map +0 -1
- package/dist/rebase-Y4AS6LQW.js.map +0 -1
- package/dist/run-NCRK5NPR.js.map +0 -1
- package/dist/test-webserver-YVQD42W6.js.map +0 -1
- /package/dist/{BranchNamingService-B5PVRR7F.js.map → BranchNamingService-FLPUUFOB.js.map} +0 -0
- /package/dist/{ClaudeContextManager-6J2EB4QU.js.map → ClaudeContextManager-KE5TBZVZ.js.map} +0 -0
- /package/dist/{ClaudeService-O2PB22GX.js.map → ClaudeService-CRSETT3A.js.map} +0 -0
- /package/dist/{GitHubService-S2OGUTDR.js.map → GitHubService-O7U4UQ7N.js.map} +0 -0
- /package/dist/{LoomLauncher-5LFM4LXB.js.map → LoomLauncher-NL65LSKP.js.map} +0 -0
- /package/dist/{MetadataManager-DFI73J3G.js.map → MetadataManager-XJ2YB762.js.map} +0 -0
- /package/dist/{PRManager-GB3FOJ2W.js.map → PRManager-2ABCWXHW.js.map} +0 -0
- /package/dist/{ProjectCapabilityDetector-S5FLNCFI.js.map → ProjectCapabilityDetector-UZYW32SY.js.map} +0 -0
- /package/dist/{PromptTemplateManager-C3DK6XZL.js.map → PromptTemplateManager-7L3HJQQU.js.map} +0 -0
- /package/dist/{SettingsManager-35F5RUJH.js.map → SettingsManager-YU4VYPTW.js.map} +0 -0
- /package/dist/{build-FJVYP7EV.js.map → build-O2EJHDEW.js.map} +0 -0
- /package/dist/{chunk-ZPSTA5PR.js.map → chunk-3CDWFEGL.js.map} +0 -0
- /package/dist/{chunk-UQIXZ3BA.js.map → chunk-5V74K5ZA.js.map} +0 -0
- /package/dist/{chunk-7WANFUIK.js.map → chunk-6TL3BYH6.js.map} +0 -0
- /package/dist/{chunk-5TXLVEXT.js.map → chunk-C3AKFAIR.js.map} +0 -0
- /package/dist/{chunk-LVBRMTE6.js.map → chunk-FEAJR6PN.js.map} +0 -0
- /package/dist/{chunk-64O2UIWO.js.map → chunk-GV5X6XUE.js.map} +0 -0
- /package/dist/{chunk-7HIRPCKU.js.map → chunk-HVQNVRAF.js.map} +0 -0
- /package/dist/{chunk-6U6VI4SZ.js.map → chunk-KVS4XGBQ.js.map} +0 -0
- /package/dist/{chunk-AXX3QIKK.js.map → chunk-LLWX3PCW.js.map} +0 -0
- /package/dist/{chunk-SN3Z6EZO.js.map → chunk-N7FVXZNI.js.map} +0 -0
- /package/dist/{chunk-EK3XCAAS.js.map → chunk-UDRZY65Y.js.map} +0 -0
- /package/dist/{chunk-3PT7RKL5.js.map → chunk-USJSNHGG.js.map} +0 -0
- /package/dist/{chunk-TRQ76ISK.js.map → chunk-Z6BO53V7.js.map} +0 -0
- /package/dist/{claude-H33OQMXO.js.map → claude-6H36IBHO.js.map} +0 -0
- /package/dist/{compile-ULNO5F7Q.js.map → compile-LRMAADUT.js.map} +0 -0
- /package/dist/{contribute-Q6GX6AXK.js.map → contribute-GXKOIA42.js.map} +0 -0
- /package/dist/{feedback-O4Q55SVS.js.map → feedback-G7G5QCY4.js.map} +0 -0
- /package/dist/{git-FVMGBHC2.js.map → git-ENLT2VNI.js.map} +0 -0
- /package/dist/{init-UTYRHNJJ.js.map → init-XQQMFDM6.js.map} +0 -0
- /package/dist/{lint-5JMCWE4Y.js.map → lint-OFVN7FT6.js.map} +0 -0
- /package/dist/{recap-VOOUXOGP.js.map → recap-ZKGHZCX6.js.map} +0 -0
- /package/dist/{shell-SBLXVOVJ.js.map → shell-2NNSIU34.js.map} +0 -0
- /package/dist/{test-3KIVXI6J.js.map → test-QZDOEUIO.js.map} +0 -0
- /package/dist/{test-git-ZB6AGGRW.js.map → test-git-E2BLXR6M.js.map} +0 -0
- /package/dist/{test-prefix-FBGXKMPA.js.map → test-prefix-A7JGGYAA.js.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/mcp/GitHubIssueManagementProvider.ts","../src/utils/linear-markup-converter.ts","../src/mcp/LinearIssueManagementProvider.ts","../src/mcp/IssueManagementProviderFactory.ts"],"sourcesContent":["/**\n * GitHub implementation of Issue Management Provider\n * Uses GitHub CLI for all operations\n * Normalizes GitHub-specific fields (login) to provider-agnostic core fields (id, displayName)\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tCommentDetailResult,\n\tCommentResult,\n\tFlexibleAuthor,\n} from './types.js'\nimport {\n\texecuteGhCommand,\n\tcreateIssueComment,\n\tupdateIssueComment,\n\tcreatePRComment,\n\tcreateIssue,\n\tgetIssueNodeId,\n\taddSubIssue,\n} from '../utils/github.js'\n\n/**\n * GitHub-specific author structure from API\n */\ninterface GitHubAuthor {\n\tlogin: string\n\tid?: number\n\tavatarUrl?: string\n\turl?: string\n}\n\n/**\n * Normalize GitHub author to FlexibleAuthor format\n */\nfunction normalizeAuthor(author: GitHubAuthor | null | undefined): FlexibleAuthor | null {\n\tif (!author) return null\n\n\treturn {\n\t\tid: author.id ? String(author.id) : author.login,\n\t\tdisplayName: author.login, // GitHub uses login as primary identifier\n\t\tlogin: author.login, // Preserve original GitHub field\n\t\t...(author.avatarUrl && { avatarUrl: author.avatarUrl }),\n\t\t...(author.url && { url: author.url }),\n\t}\n}\n\n/**\n * Extract numeric comment ID from GitHub comment URL\n * URL format: https://github.com/owner/repo/issues/123#issuecomment-3615239386\n */\nexport function extractNumericIdFromUrl(url: string): string {\n\tconst match = url.match(/#issuecomment-(\\d+)$/)\n\tif (!match?.[1]) {\n\t\tthrow new Error(`Cannot extract comment ID from URL: ${url}`)\n\t}\n\treturn match[1]\n}\n\n/**\n * GitHub-specific implementation of IssueManagementProvider\n */\nexport class GitHubIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'github'\n\treadonly issuePrefix = '#'\n\n\t/**\n\t * Fetch issue details using gh CLI\n\t * Normalizes GitHub-specific fields to provider-agnostic format\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true, repo } = input\n\n\t\t// Convert string ID to number for GitHub CLI\n\t\tconst issueNumber = parseInt(number, 10)\n\t\tif (isNaN(issueNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Build fields list based on whether we need comments\n\t\tconst fields = includeComments\n\t\t\t? 'body,title,comments,labels,assignees,milestone,author,state,number,url'\n\t\t\t: 'body,title,labels,assignees,milestone,author,state,number,url'\n\n\t\t// Use gh issue view to fetch issue details\n\t\tinterface GitHubIssueResponse {\n\t\t\tnumber: number\n\t\t\ttitle: string\n\t\t\tbody: string\n\t\t\tstate: string\n\t\t\turl: string\n\t\t\tauthor?: GitHubAuthor\n\t\t\tlabels?: Array<{ name: string; color?: string; description?: string }>\n\t\t\tassignees?: Array<GitHubAuthor>\n\t\t\tmilestone?: { title: string; number?: number; state?: string }\n\t\t\tcomments?: Array<{\n\t\t\t\tid: number\n\t\t\t\tauthor: GitHubAuthor\n\t\t\t\tbody: string\n\t\t\t\tcreatedAt: string\n\t\t\t\tupdatedAt?: string\n\t\t\t\turl: string\n\t\t\t}>\n\t\t}\n\n\t\tconst args = [\n\t\t\t'issue',\n\t\t\t'view',\n\t\t\tString(issueNumber),\n\t\t\t'--json',\n\t\t\tfields,\n\t\t]\n\n\t\t// Add --repo flag if repo is provided (gh CLI handles both owner/repo and URL formats)\n\t\tif (repo) {\n\t\t\targs.push('--repo', repo)\n\t\t}\n\n\t\tconst raw = await executeGhCommand<GitHubIssueResponse>(args)\n\n\t\t// Normalize to IssueResult with core fields + passthrough\n\t\tconst result: IssueResult = {\n\t\t\t// Core fields\n\t\t\tid: String(raw.number),\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.body,\n\t\t\tstate: raw.state,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'github',\n\n\t\t\t// Normalized author\n\t\t\tauthor: normalizeAuthor(raw.author),\n\n\t\t\t// Optional flexible fields\n\t\t\t...(raw.assignees && {\n\t\t\t\tassignees: raw.assignees.map(a => normalizeAuthor(a)).filter((a): a is FlexibleAuthor => a !== null),\n\t\t\t}),\n\t\t\t...(raw.labels && {\n\t\t\t\tlabels: raw.labels,\n\t\t\t}),\n\n\t\t\t// GitHub-specific passthrough fields\n\t\t\t...(raw.milestone && {\n\t\t\t\tmilestone: raw.milestone,\n\t\t\t}),\n\t\t}\n\n\t\t// Handle comments with normalized authors\n\t\t// Use extractNumericIdFromUrl to get REST API-compatible numeric IDs from comment URLs\n\t\t// (GitHub CLI returns GraphQL node IDs in the id field, but REST API expects numeric IDs)\n\t\tif (raw.comments !== undefined) {\n\t\t\tresult.comments = raw.comments.map(comment => ({\n\t\t\t\tid: extractNumericIdFromUrl(comment.url),\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: normalizeAuthor(comment.author),\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID using gh API\n\t * Normalizes author to FlexibleAuthor format\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId, repo } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub API\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// GitHub API response structure\n\t\tinterface GitHubCommentResponse {\n\t\t\tid: number\n\t\t\tbody: string\n\t\t\tuser: GitHubAuthor\n\t\t\tcreated_at: string\n\t\t\tupdated_at?: string\n\t\t\thtml_url?: string\n\t\t\treactions?: Record<string, unknown>\n\t\t}\n\n\t\t// Use explicit repo path if provided, otherwise use :owner/:repo placeholder\n\t\tconst apiPath = repo\n\t\t\t? `repos/${repo}/issues/comments/${numericCommentId}`\n\t\t\t: `repos/:owner/:repo/issues/comments/${numericCommentId}`\n\n\t\t// Use gh api to fetch specific comment\n\t\tconst raw = await executeGhCommand<GitHubCommentResponse>([\n\t\t\t'api',\n\t\t\tapiPath,\n\t\t\t'--jq',\n\t\t\t'{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}',\n\t\t])\n\n\t\t// Normalize to CommentDetailResult\n\t\treturn {\n\t\t\tid: String(raw.id),\n\t\t\tbody: raw.body,\n\t\t\tauthor: normalizeAuthor(raw.user),\n\t\t\tcreated_at: raw.created_at,\n\t\t\t...(raw.updated_at && { updated_at: raw.updated_at }),\n\t\t\t// Passthrough GitHub-specific fields\n\t\t\t...(raw.html_url && { html_url: raw.html_url }),\n\t\t\t...(raw.reactions && { reactions: raw.reactions }),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue or PR\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body, type } = input\n\n\t\t// Convert string ID to number for GitHub utilities\n\t\tconst numericId = parseInt(number, 10)\n\t\tif (isNaN(numericId)) {\n\t\t\tthrow new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utilities\n\t\tconst result =\n\t\t\ttype === 'issue'\n\t\t\t\t? await createIssueComment(numericId, body)\n\t\t\t\t: await createPRComment(numericId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\t\t// Note: GitHub doesn't need the issue number parameter - comment IDs are globally unique\n\t\t// But we accept it for interface compatibility with other providers\n\n\t\t// Convert string ID to number for GitHub utility\n\t\tconst numericCommentId = parseInt(commentId, 10)\n\t\tif (isNaN(numericCommentId)) {\n\t\t\tthrow new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`)\n\t\t}\n\n\t\t// Delegate to existing GitHub utility\n\t\tconst result = await updateIssueComment(numericCommentId, body)\n\n\t\t// Convert numeric ID to string for the interface\n\t\treturn {\n\t\t\t...result,\n\t\t\tid: String(result.id),\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\tconst result = await createIssue(title, body, { labels, repo })\n\n\t\t// Ensure number is numeric\n\t\tconst issueNumber = typeof result.number === 'number'\n\t\t\t? result.number\n\t\t\t: parseInt(String(result.number), 10)\n\n\t\treturn {\n\t\t\tid: String(issueNumber),\n\t\t\turl: result.url,\n\t\t\tnumber: issueNumber,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * GitHub requires two-step process: create issue, then link via GraphQL\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, repo } = input\n\t\t// teamKey is ignored for GitHub\n\n\t\t// Convert parent identifier to number\n\t\tconst parentNumber = parseInt(parentId, 10)\n\t\tif (isNaN(parentNumber)) {\n\t\t\tthrow new Error(`Invalid GitHub parent issue number: ${parentId}. GitHub issue IDs must be numeric.`)\n\t\t}\n\n\t\t// Step 1: Get parent issue's GraphQL node ID\n\t\tconst parentNodeId = await getIssueNodeId(parentNumber, repo)\n\n\t\t// Step 2: Create the child issue\n\t\tconst childResult = await createIssue(title, body, { labels, repo })\n\t\tconst childNumber = typeof childResult.number === 'number'\n\t\t\t? childResult.number\n\t\t\t: parseInt(String(childResult.number), 10)\n\n\t\t// Step 3: Get child issue's GraphQL node ID\n\t\tconst childNodeId = await getIssueNodeId(childNumber, repo)\n\n\t\t// Step 4: Link child to parent via GraphQL mutation\n\t\tawait addSubIssue(parentNodeId, childNodeId)\n\n\t\treturn {\n\t\t\tid: String(childNumber),\n\t\t\turl: childResult.url,\n\t\t\tnumber: childNumber,\n\t\t}\n\t}\n}\n","import { appendFileSync } from 'node:fs'\nimport { join, dirname, basename, extname } from 'node:path'\n\n/**\n * Utility class for converting HTML details/summary format to Linear's collapsible format\n *\n * Converts:\n * <details>\n * <summary>Header</summary>\n * CONTENT\n * </details>\n *\n * Into Linear format:\n * +++ Header\n *\n * CONTENT\n *\n * +++\n */\nexport class LinearMarkupConverter {\n\t/**\n\t * Convert HTML details/summary blocks to Linear's collapsible format\n\t * Handles nested details blocks recursively\n\t *\n\t * @param text - Text containing HTML details/summary blocks\n\t * @returns Text with details/summary converted to Linear format\n\t */\n\tstatic convertDetailsToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Process from innermost to outermost to handle nesting correctly\n\t\t// Keep converting until no more details blocks are found\n\t\tlet previousText = ''\n\t\tlet currentText = text\n\n\t\twhile (previousText !== currentText) {\n\t\t\tpreviousText = currentText\n\t\t\tcurrentText = this.convertSinglePass(currentText)\n\t\t}\n\n\t\treturn currentText\n\t}\n\n\t/**\n\t * Perform a single pass of details block conversion\n\t * Converts the innermost details blocks first\n\t */\n\tprivate static convertSinglePass(text: string): string {\n\t\t// Match <details> blocks with optional attributes on the details tag\n\t\t// Supports multiline content between tags\n\t\tconst detailsRegex = /<details[^>]*>\\s*<summary[^>]*>(.*?)<\\/summary>\\s*(.*?)\\s*<\\/details>/gis\n\n\t\treturn text.replace(detailsRegex, (_match, summary, content) => {\n\t\t\t// Clean up the summary - trim whitespace and decode HTML entities\n\t\t\tconst cleanSummary = this.cleanText(summary)\n\n\t\t\t// Clean up the content - preserve internal structure but normalize outer whitespace\n\t\t\t// Note: Don't recursively convert here - the while loop handles that\n\t\t\tconst cleanContent = this.cleanContent(content)\n\n\t\t\t// Build Linear collapsible format\n\t\t\t// Always include blank lines around content for readability\n\t\t\tif (cleanContent) {\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n${cleanContent}\\n\\n+++`\n\t\t\t} else {\n\t\t\t\t// Empty content - use minimal format\n\t\t\t\treturn `+++ ${cleanSummary}\\n\\n+++`\n\t\t\t}\n\t\t})\n\t}\n\n\t/**\n\t * Clean text by trimming whitespace and decoding common HTML entities\n\t */\n\tprivate static cleanText(text: string): string {\n\t\treturn text\n\t\t\t.trim()\n\t\t\t.replace(/</g, '<')\n\t\t\t.replace(/>/g, '>')\n\t\t\t.replace(/&/g, '&')\n\t\t\t.replace(/"/g, '\"')\n\t\t\t.replace(/'/g, \"'\")\n\t}\n\n\t/**\n\t * Clean content while preserving internal structure\n\t * - Removes leading/trailing whitespace\n\t * - Normalizes internal blank lines (max 2 consecutive newlines)\n\t * - Preserves code blocks and other formatting\n\t */\n\tprivate static cleanContent(content: string): string {\n\t\tif (!content) {\n\t\t\treturn ''\n\t\t}\n\n\t\t// Trim outer whitespace\n\t\tlet cleaned = content.trim()\n\n\t\t// Normalize excessive blank lines (3+ newlines -> 2 newlines)\n\t\tcleaned = cleaned.replace(/\\n{3,}/g, '\\n\\n')\n\n\t\treturn cleaned\n\t}\n\n\t/**\n\t * Check if text contains HTML details/summary blocks\n\t * Useful for conditional conversion\n\t */\n\tstatic hasDetailsBlocks(text: string): boolean {\n\t\tif (!text) {\n\t\t\treturn false\n\t\t}\n\n\t\tconst detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\\/summary>.*?<\\/details>/is\n\t\treturn detailsRegex.test(text)\n\t}\n\n\t/**\n\t * Remove wrapper tags from code sample details blocks\n\t * Identifies details blocks where summary contains \"X lines\" pattern\n\t * and removes the details/summary tags while preserving the content\n\t *\n\t * @param text - Text containing potential code sample details blocks\n\t * @returns Text with code sample wrappers removed\n\t */\n\tstatic removeCodeSampleWrappers(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Match details blocks where summary contains \"X lines\" (e.g., \"45 lines\", \"120 lines\")\n\t\t// Pattern: <details><summary>...N lines...</summary>CONTENT</details>\n\t\t// Use [^<]* to match summary content without allowing nested tags to interfere\n\t\t// Then use [\\s\\S]*? for the content to allow any characters including newlines\n\t\tconst codeSampleRegex = /<details[^>]*>\\s*<summary[^>]*>([^<]*\\d+\\s+lines[^<]*)<\\/summary>\\s*([\\s\\S]*?)<\\/details>/gi\n\n\t\treturn text.replace(codeSampleRegex, (_match, _summary, content) => {\n\t\t\t// Return just the content, without any wrapper tags\n\t\t\t// Preserve the content exactly as-is\n\t\t\treturn content.trim()\n\t\t})\n\t}\n\n\t/**\n\t * Convert text for Linear - applies all necessary conversions\n\t * Currently only converts details/summary blocks, but can be extended\n\t * for other HTML to Linear markdown conversions\n\t */\n\tstatic convertToLinear(text: string): string {\n\t\tif (!text) {\n\t\t\treturn text\n\t\t}\n\n\t\t// Log input if logging is enabled\n\t\tthis.logConversion('INPUT', text)\n\n\t\t// Apply all conversions\n\t\tlet converted = text\n\n\t\t// First, remove code sample wrappers (details blocks with \"X lines\" pattern)\n\t\t// This prevents them from being converted to Linear's +++ format\n\t\tconverted = this.removeCodeSampleWrappers(converted)\n\n\t\t// Then convert remaining details/summary blocks to Linear format\n\t\tconverted = this.convertDetailsToLinear(converted)\n\n\t\t// Log output if logging is enabled\n\t\tthis.logConversion('OUTPUT', converted)\n\n\t\treturn converted\n\t}\n\n\t/**\n\t * Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set\n\t */\n\tprivate static logConversion(label: string, content: string): void {\n\t\tconst logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE\n\t\tif (!logFilePath) {\n\t\t\treturn\n\t\t}\n\n\t\ttry {\n\t\t\tconst timestampedPath = this.getTimestampedLogPath(logFilePath)\n\t\t\tconst timestamp = new Date().toISOString()\n\t\t\tconst separator = '================================'\n\n\t\t\tconst logEntry = `${separator}\\n[${timestamp}] CONVERSION ${label}\\n${separator}\\n${label}:\\n${content}\\n\\n`\n\n\t\t\tappendFileSync(timestampedPath, logEntry, 'utf-8')\n\t\t} catch {\n\t\t\t// Silently fail - don't crash if logging fails\n\t\t\t// This is a debug feature and shouldn't break the conversion\n\t\t}\n\t}\n\n\t/**\n\t * Generate timestamped log file path\n\t * Example: debug.log -> debug-20231202-161234.log\n\t */\n\tprivate static getTimestampedLogPath(logFilePath: string): string {\n\t\tconst dir = dirname(logFilePath)\n\t\tconst ext = extname(logFilePath)\n\t\tconst base = basename(logFilePath, ext)\n\n\t\t// Generate timestamp: YYYYMMDD-HHMMSS\n\t\tconst now = new Date()\n\t\tconst timestamp = [\n\t\t\tnow.getFullYear(),\n\t\t\tString(now.getMonth() + 1).padStart(2, '0'),\n\t\t\tString(now.getDate()).padStart(2, '0'),\n\t\t].join('') + '-' + [\n\t\t\tString(now.getHours()).padStart(2, '0'),\n\t\t\tString(now.getMinutes()).padStart(2, '0'),\n\t\t\tString(now.getSeconds()).padStart(2, '0'),\n\t\t].join('')\n\n\t\treturn join(dir, `${base}-${timestamp}${ext}`)\n\t}\n}\n","/**\n * Linear implementation of Issue Management Provider\n * Uses @linear/sdk for all operations\n */\n\nimport type {\n\tIssueManagementProvider,\n\tGetIssueInput,\n\tGetCommentInput,\n\tCreateCommentInput,\n\tUpdateCommentInput,\n\tCreateIssueInput,\n\tCreateChildIssueInput,\n\tCreateIssueResult,\n\tIssueResult,\n\tCommentDetailResult,\n\tCommentResult,\n} from './types.js'\nimport {\n\tfetchLinearIssue,\n\tcreateLinearComment,\n\tgetLinearComment,\n\tupdateLinearComment,\n\tfetchLinearIssueComments,\n\tcreateLinearIssue,\n\tcreateLinearChildIssue,\n} from '../utils/linear.js'\nimport { LinearMarkupConverter } from '../utils/linear-markup-converter.js'\n\n/**\n * Linear-specific implementation of IssueManagementProvider\n */\nexport class LinearIssueManagementProvider implements IssueManagementProvider {\n\treadonly providerName = 'linear'\n\treadonly issuePrefix = ''\n\n\t/**\n\t * Cached team key extracted from issue identifiers (e.g., \"ENG-123\" -> \"ENG\")\n\t * Used as fallback when teamKey is not explicitly provided to createIssue()\n\t */\n\tprivate cachedTeamKey: string | undefined = undefined\n\n\t/**\n\t * Fetch issue details using Linear SDK\n\t */\n\tasync getIssue(input: GetIssueInput): Promise<IssueResult> {\n\t\tconst { number, includeComments = true } = input\n\n\t\t// Extract and cache team key from identifier (e.g., \"ENG-123\" -> \"ENG\")\n\t\t// This enables createIssue() to use the team key as a fallback\n\t\tconst match = number.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tif (match?.[1]) {\n\t\t\tthis.cachedTeamKey = match[1].toUpperCase()\n\t\t}\n\n\t\t// Fetch issue - Linear uses alphanumeric identifiers like \"ENG-123\"\n\t\tconst raw = await fetchLinearIssue(number)\n\n\t\t// Map Linear state name to open/closed\n\t\tconst state = raw.state && (raw.state.toLowerCase().includes('done') || raw.state.toLowerCase().includes('completed') || raw.state.toLowerCase().includes('canceled'))\n\t\t\t? 'closed'\n\t\t\t: 'open'\n\n\t\t// Build result\n\t\tconst result: IssueResult = {\n\t\t\tid: raw.identifier,\n\t\t\ttitle: raw.title,\n\t\t\tbody: raw.description ?? '',\n\t\t\tstate,\n\t\t\turl: raw.url,\n\t\t\tprovider: 'linear',\n\t\t\tauthor: null, // Linear SDK doesn't return author in basic fetch\n\n\t\t\t// Linear-specific fields\n\t\t\tlinearState: raw.state,\n\t\t\tcreatedAt: raw.createdAt,\n\t\t\tupdatedAt: raw.updatedAt,\n\t\t}\n\n\t\t// Fetch comments if requested\n\t\tif (includeComments) {\n\t\t\ttry {\n\t\t\t\tconst comments = await this.fetchIssueComments(number)\n\t\t\t\tif (comments) {\n\t\t\t\t\tresult.comments = comments\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// If comments fail, continue without them\n\t\t\t}\n\t\t}\n\n\t\treturn result\n\t}\n\n\t/**\n\t * Fetch comments for an issue\n\t */\n\tprivate async fetchIssueComments(identifier: string): Promise<IssueResult['comments']> {\n\t\ttry {\n\t\t\tconst comments = await fetchLinearIssueComments(identifier)\n\n\t\t\treturn comments.map(comment => ({\n\t\t\t\tid: comment.id,\n\t\t\t\tbody: comment.body,\n\t\t\t\tcreatedAt: comment.createdAt,\n\t\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\t\t...(comment.updatedAt && { updatedAt: comment.updatedAt }),\n\t\t\t}))\n\t\t} catch {\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Fetch a specific comment by ID\n\t */\n\tasync getComment(input: GetCommentInput): Promise<CommentDetailResult> {\n\t\tconst { commentId } = input\n\n\t\tconst raw = await getLinearComment(commentId)\n\n\t\treturn {\n\t\t\tid: raw.id,\n\t\t\tbody: raw.body,\n\t\t\tauthor: null, // Linear SDK doesn't return comment author info in basic fetch\n\t\t\tcreated_at: raw.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new comment on an issue\n\t */\n\tasync createComment(input: CreateCommentInput): Promise<CommentResult> {\n\t\tconst { number, body } = input\n\t\t// Note: Linear doesn't distinguish between issue and PR comments\n\t\t// (Linear doesn't have PRs - that's GitHub-specific)\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await createLinearComment(number, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tcreated_at: result.createdAt,\n\t\t}\n\t}\n\n\t/**\n\t * Update an existing comment\n\t */\n\tasync updateComment(input: UpdateCommentInput): Promise<CommentResult> {\n\t\tconst { commentId, body } = input\n\n\t\t// Convert HTML details/summary blocks to Linear's collapsible format\n\t\tconst convertedBody = LinearMarkupConverter.convertToLinear(body)\n\n\t\tconst result = await updateLinearComment(commentId, convertedBody)\n\n\t\treturn {\n\t\t\tid: result.id,\n\t\t\turl: result.url,\n\t\t\tupdated_at: result.updatedAt,\n\t\t}\n\t}\n\n\t/**\n\t * Create a new issue\n\t */\n\tasync createIssue(input: CreateIssueInput): Promise<CreateIssueResult> {\n\t\tconst { title, body, labels, teamKey } = input\n\n\t\t// Fallback chain: explicit param > settings (via env) > cached key from getIssue()\n\t\tconst effectiveTeamKey = teamKey ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear issue creation. Configure issueManagement.linear.teamId in settings, or call getIssue first to extract the team from an issue identifier.')\n\t\t}\n\n\t\tconst result = await createLinearIssue(title, body, effectiveTeamKey, labels)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n\n\t/**\n\t * Create a child issue linked to a parent issue\n\t * Linear supports atomic creation with parentId field\n\t */\n\tasync createChildIssue(input: CreateChildIssueInput): Promise<CreateIssueResult> {\n\t\tconst { parentId, title, body, labels, teamKey } = input\n\n\t\t// Fetch parent issue to get UUID (parentId in input is identifier like \"ENG-123\")\n\t\tconst parentIssue = await fetchLinearIssue(parentId)\n\n\t\t// Extract team key from parent identifier if not provided\n\t\tconst match = parentId.match(/^([A-Z]{2,})-\\d+$/i)\n\t\tconst effectiveTeamKey = teamKey ?? match?.[1]?.toUpperCase() ?? process.env.LINEAR_TEAM_KEY ?? this.cachedTeamKey\n\n\t\tif (!effectiveTeamKey) {\n\t\t\tthrow new Error('teamKey is required for Linear child issue creation. Provide teamKey parameter or use a parent identifier with team prefix.')\n\t\t}\n\n\t\t// Create child issue with parent's UUID\n\t\tconst result = await createLinearChildIssue(\n\t\t\ttitle,\n\t\t\tbody,\n\t\t\teffectiveTeamKey,\n\t\t\tparentIssue.id, // UUID, not identifier\n\t\t\tlabels\n\t\t)\n\n\t\treturn {\n\t\t\tid: result.identifier,\n\t\t\turl: result.url,\n\t\t}\n\t}\n}\n","/**\n * Factory for creating issue management providers\n */\n\nimport type { IssueManagementProvider, IssueProvider } from './types.js'\nimport { GitHubIssueManagementProvider } from './GitHubIssueManagementProvider.js'\nimport { LinearIssueManagementProvider } from './LinearIssueManagementProvider.js'\n\n/**\n * Factory class for creating issue management providers\n */\nexport class IssueManagementProviderFactory {\n\t/**\n\t * Create an issue management provider based on the provider type\n\t */\n\tstatic create(provider: IssueProvider): IssueManagementProvider {\n\t\tswitch (provider) {\n\t\t\tcase 'github':\n\t\t\t\treturn new GitHubIssueManagementProvider()\n\t\t\tcase 'linear':\n\t\t\t\treturn new LinearIssueManagementProvider()\n\t\t\tdefault:\n\t\t\t\tthrow new Error(`Unsupported issue management provider: ${provider}`)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA2CA,SAAS,gBAAgB,QAAgE;AACxF,MAAI,CAAC,OAAQ,QAAO;AAEpB,SAAO;AAAA,IACN,IAAI,OAAO,KAAK,OAAO,OAAO,EAAE,IAAI,OAAO;AAAA,IAC3C,aAAa,OAAO;AAAA;AAAA,IACpB,OAAO,OAAO;AAAA;AAAA,IACd,GAAI,OAAO,aAAa,EAAE,WAAW,OAAO,UAAU;AAAA,IACtD,GAAI,OAAO,OAAO,EAAE,KAAK,OAAO,IAAI;AAAA,EACrC;AACD;AAMO,SAAS,wBAAwB,KAAqB;AAC5D,QAAM,QAAQ,IAAI,MAAM,sBAAsB;AAC9C,MAAI,EAAC,+BAAQ,KAAI;AAChB,UAAM,IAAI,MAAM,uCAAuC,GAAG,EAAE;AAAA,EAC7D;AACA,SAAO,MAAM,CAAC;AACf;AAKO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMvB,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,MAAM,KAAK,IAAI;AAGjD,UAAM,cAAc,SAAS,QAAQ,EAAE;AACvC,QAAI,MAAM,WAAW,GAAG;AACvB,YAAM,IAAI,MAAM,gCAAgC,MAAM,qCAAqC;AAAA,IAC5F;AAGA,UAAM,SAAS,kBACZ,2EACA;AAuBH,UAAM,OAAO;AAAA,MACZ;AAAA,MACA;AAAA,MACA,OAAO,WAAW;AAAA,MAClB;AAAA,MACA;AAAA,IACD;AAGA,QAAI,MAAM;AACT,WAAK,KAAK,UAAU,IAAI;AAAA,IACzB;AAEA,UAAM,MAAM,MAAM,iBAAsC,IAAI;AAG5D,UAAM,SAAsB;AAAA;AAAA,MAE3B,IAAI,OAAO,IAAI,MAAM;AAAA,MACrB,OAAO,IAAI;AAAA,MACX,MAAM,IAAI;AAAA,MACV,OAAO,IAAI;AAAA,MACX,KAAK,IAAI;AAAA,MACT,UAAU;AAAA;AAAA,MAGV,QAAQ,gBAAgB,IAAI,MAAM;AAAA;AAAA,MAGlC,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI,UAAU,IAAI,OAAK,gBAAgB,CAAC,CAAC,EAAE,OAAO,CAAC,MAA2B,MAAM,IAAI;AAAA,MACpG;AAAA,MACA,GAAI,IAAI,UAAU;AAAA,QACjB,QAAQ,IAAI;AAAA,MACb;AAAA;AAAA,MAGA,GAAI,IAAI,aAAa;AAAA,QACpB,WAAW,IAAI;AAAA,MAChB;AAAA,IACD;AAKA,QAAI,IAAI,aAAa,QAAW;AAC/B,aAAO,WAAW,IAAI,SAAS,IAAI,cAAY;AAAA,QAC9C,IAAI,wBAAwB,QAAQ,GAAG;AAAA,QACvC,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ,gBAAgB,QAAQ,MAAM;AAAA,QACtC,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAcA,UAAM,UAAU,OACb,SAAS,IAAI,oBAAoB,gBAAgB,KACjD,sCAAsC,gBAAgB;AAGzD,UAAM,MAAM,MAAM,iBAAwC;AAAA,MACzD;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD,CAAC;AAGD,WAAO;AAAA,MACN,IAAI,OAAO,IAAI,EAAE;AAAA,MACjB,MAAM,IAAI;AAAA,MACV,QAAQ,gBAAgB,IAAI,IAAI;AAAA,MAChC,YAAY,IAAI;AAAA,MAChB,GAAI,IAAI,cAAc,EAAE,YAAY,IAAI,WAAW;AAAA;AAAA,MAEnD,GAAI,IAAI,YAAY,EAAE,UAAU,IAAI,SAAS;AAAA,MAC7C,GAAI,IAAI,aAAa,EAAE,WAAW,IAAI,UAAU;AAAA,IACjD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,MAAM,KAAK,IAAI;AAG/B,UAAM,YAAY,SAAS,QAAQ,EAAE;AACrC,QAAI,MAAM,SAAS,GAAG;AACrB,YAAM,IAAI,MAAM,kBAAkB,IAAI,YAAY,MAAM,+BAA+B;AAAA,IACxF;AAGA,UAAM,SACL,SAAS,UACN,MAAM,mBAAmB,WAAW,IAAI,IACxC,MAAM,gBAAgB,WAAW,IAAI;AAGzC,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAK5B,UAAM,mBAAmB,SAAS,WAAW,EAAE;AAC/C,QAAI,MAAM,gBAAgB,GAAG;AAC5B,YAAM,IAAI,MAAM,8BAA8B,SAAS,uCAAuC;AAAA,IAC/F;AAGA,UAAM,SAAS,MAAM,mBAAmB,kBAAkB,IAAI;AAG9D,WAAO;AAAA,MACN,GAAG;AAAA,MACH,IAAI,OAAO,OAAO,EAAE;AAAA,IACrB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,KAAK,IAAI;AAGtC,UAAM,SAAS,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AAG9D,UAAM,cAAc,OAAO,OAAO,WAAW,WAC1C,OAAO,SACP,SAAS,OAAO,OAAO,MAAM,GAAG,EAAE;AAErC,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,OAAO;AAAA,MACZ,QAAQ;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAChF,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,KAAK,IAAI;AAIhD,UAAM,eAAe,SAAS,UAAU,EAAE;AAC1C,QAAI,MAAM,YAAY,GAAG;AACxB,YAAM,IAAI,MAAM,uCAAuC,QAAQ,qCAAqC;AAAA,IACrG;AAGA,UAAM,eAAe,MAAM,eAAe,cAAc,IAAI;AAG5D,UAAM,cAAc,MAAM,YAAY,OAAO,MAAM,EAAE,QAAQ,KAAK,CAAC;AACnE,UAAM,cAAc,OAAO,YAAY,WAAW,WAC/C,YAAY,SACZ,SAAS,OAAO,YAAY,MAAM,GAAG,EAAE;AAG1C,UAAM,cAAc,MAAM,eAAe,aAAa,IAAI;AAG1D,UAAM,YAAY,cAAc,WAAW;AAE3C,WAAO;AAAA,MACN,IAAI,OAAO,WAAW;AAAA,MACtB,KAAK,YAAY;AAAA,MACjB,QAAQ;AAAA,IACT;AAAA,EACD;AACD;;;ACxUA,SAAS,sBAAsB;AAC/B,SAAS,MAAM,SAAS,UAAU,eAAe;AAkB1C,IAAM,wBAAN,MAA4B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlC,OAAO,uBAAuB,MAAsB;AACnD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAIA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,iBAAiB,aAAa;AACpC,qBAAe;AACf,oBAAc,KAAK,kBAAkB,WAAW;AAAA,IACjD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,kBAAkB,MAAsB;AAGtD,UAAM,eAAe;AAErB,WAAO,KAAK,QAAQ,cAAc,CAAC,QAAQ,SAAS,YAAY;AAE/D,YAAM,eAAe,KAAK,UAAU,OAAO;AAI3C,YAAM,eAAe,KAAK,aAAa,OAAO;AAI9C,UAAI,cAAc;AACjB,eAAO,OAAO,YAAY;AAAA;AAAA,EAAO,YAAY;AAAA;AAAA;AAAA,MAC9C,OAAO;AAEN,eAAO,OAAO,YAAY;AAAA;AAAA;AAAA,MAC3B;AAAA,IACD,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,UAAU,MAAsB;AAC9C,WAAO,KACL,KAAK,EACL,QAAQ,SAAS,GAAG,EACpB,QAAQ,SAAS,GAAG,EACpB,QAAQ,UAAU,GAAG,EACrB,QAAQ,WAAW,GAAG,EACtB,QAAQ,UAAU,GAAG;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAe,aAAa,SAAyB;AACpD,QAAI,CAAC,SAAS;AACb,aAAO;AAAA,IACR;AAGA,QAAI,UAAU,QAAQ,KAAK;AAG3B,cAAU,QAAQ,QAAQ,WAAW,MAAM;AAE3C,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAO,iBAAiB,MAAuB;AAC9C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAEA,UAAM,eAAe;AACrB,WAAO,aAAa,KAAK,IAAI;AAAA,EAC9B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,OAAO,yBAAyB,MAAsB;AACrD,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAMA,UAAM,kBAAkB;AAExB,WAAO,KAAK,QAAQ,iBAAiB,CAAC,QAAQ,UAAU,YAAY;AAGnE,aAAO,QAAQ,KAAK;AAAA,IACrB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,OAAO,gBAAgB,MAAsB;AAC5C,QAAI,CAAC,MAAM;AACV,aAAO;AAAA,IACR;AAGA,SAAK,cAAc,SAAS,IAAI;AAGhC,QAAI,YAAY;AAIhB,gBAAY,KAAK,yBAAyB,SAAS;AAGnD,gBAAY,KAAK,uBAAuB,SAAS;AAGjD,SAAK,cAAc,UAAU,SAAS;AAEtC,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,OAAe,cAAc,OAAe,SAAuB;AAClE,UAAM,cAAc,QAAQ,IAAI;AAChC,QAAI,CAAC,aAAa;AACjB;AAAA,IACD;AAEA,QAAI;AACH,YAAM,kBAAkB,KAAK,sBAAsB,WAAW;AAC9D,YAAM,aAAY,oBAAI,KAAK,GAAE,YAAY;AACzC,YAAM,YAAY;AAElB,YAAM,WAAW,GAAG,SAAS;AAAA,GAAM,SAAS,gBAAgB,KAAK;AAAA,EAAK,SAAS;AAAA,EAAK,KAAK;AAAA,EAAM,OAAO;AAAA;AAAA;AAEtG,qBAAe,iBAAiB,UAAU,OAAO;AAAA,IAClD,QAAQ;AAAA,IAGR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,OAAe,sBAAsB,aAA6B;AACjE,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,MAAM,QAAQ,WAAW;AAC/B,UAAM,OAAO,SAAS,aAAa,GAAG;AAGtC,UAAM,MAAM,oBAAI,KAAK;AACrB,UAAM,YAAY;AAAA,MACjB,IAAI,YAAY;AAAA,MAChB,OAAO,IAAI,SAAS,IAAI,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MAC1C,OAAO,IAAI,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACtC,EAAE,KAAK,EAAE,IAAI,MAAM;AAAA,MAClB,OAAO,IAAI,SAAS,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACtC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,MACxC,OAAO,IAAI,WAAW,CAAC,EAAE,SAAS,GAAG,GAAG;AAAA,IACzC,EAAE,KAAK,EAAE;AAET,WAAO,KAAK,KAAK,GAAG,IAAI,IAAI,SAAS,GAAG,GAAG,EAAE;AAAA,EAC9C;AACD;;;AC5LO,IAAM,gCAAN,MAAuE;AAAA,EAAvE;AACN,SAAS,eAAe;AACxB,SAAS,cAAc;AAMvB;AAAA;AAAA;AAAA;AAAA,SAAQ,gBAAoC;AAAA;AAAA;AAAA;AAAA;AAAA,EAK5C,MAAM,SAAS,OAA4C;AAC1D,UAAM,EAAE,QAAQ,kBAAkB,KAAK,IAAI;AAI3C,UAAM,QAAQ,OAAO,MAAM,oBAAoB;AAC/C,QAAI,+BAAQ,IAAI;AACf,WAAK,gBAAgB,MAAM,CAAC,EAAE,YAAY;AAAA,IAC3C;AAGA,UAAM,MAAM,MAAM,iBAAiB,MAAM;AAGzC,UAAM,QAAQ,IAAI,UAAU,IAAI,MAAM,YAAY,EAAE,SAAS,MAAM,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,WAAW,KAAK,IAAI,MAAM,YAAY,EAAE,SAAS,UAAU,KACjK,WACA;AAGH,UAAM,SAAsB;AAAA,MAC3B,IAAI,IAAI;AAAA,MACR,OAAO,IAAI;AAAA,MACX,MAAM,IAAI,eAAe;AAAA,MACzB;AAAA,MACA,KAAK,IAAI;AAAA,MACT,UAAU;AAAA,MACV,QAAQ;AAAA;AAAA;AAAA,MAGR,aAAa,IAAI;AAAA,MACjB,WAAW,IAAI;AAAA,MACf,WAAW,IAAI;AAAA,IAChB;AAGA,QAAI,iBAAiB;AACpB,UAAI;AACH,cAAM,WAAW,MAAM,KAAK,mBAAmB,MAAM;AACrD,YAAI,UAAU;AACb,iBAAO,WAAW;AAAA,QACnB;AAAA,MACD,QAAQ;AAAA,MAER;AAAA,IACD;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA,EAKA,MAAc,mBAAmB,YAAsD;AACtF,QAAI;AACH,YAAM,WAAW,MAAM,yBAAyB,UAAU;AAE1D,aAAO,SAAS,IAAI,cAAY;AAAA,QAC/B,IAAI,QAAQ;AAAA,QACZ,MAAM,QAAQ;AAAA,QACd,WAAW,QAAQ;AAAA,QACnB,QAAQ;AAAA;AAAA,QACR,GAAI,QAAQ,aAAa,EAAE,WAAW,QAAQ,UAAU;AAAA,MACzD,EAAE;AAAA,IACH,QAAQ;AACP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,WAAW,OAAsD;AACtE,UAAM,EAAE,UAAU,IAAI;AAEtB,UAAM,MAAM,MAAM,iBAAiB,SAAS;AAE5C,WAAO;AAAA,MACN,IAAI,IAAI;AAAA,MACR,MAAM,IAAI;AAAA,MACV,QAAQ;AAAA;AAAA,MACR,YAAY,IAAI;AAAA,IACjB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,QAAQ,KAAK,IAAI;AAKzB,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,QAAQ,aAAa;AAE9D,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAc,OAAmD;AACtE,UAAM,EAAE,WAAW,KAAK,IAAI;AAG5B,UAAM,gBAAgB,sBAAsB,gBAAgB,IAAI;AAEhE,UAAM,SAAS,MAAM,oBAAoB,WAAW,aAAa;AAEjE,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,MACZ,YAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAqD;AACtE,UAAM,EAAE,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGzC,UAAM,mBAAmB,WAAW,QAAQ,IAAI,mBAAmB,KAAK;AAExE,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,0KAA0K;AAAA,IAC3L;AAEA,UAAM,SAAS,MAAM,kBAAkB,OAAO,MAAM,kBAAkB,MAAM;AAE5E,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,iBAAiB,OAA0D;AAhMlF;AAiME,UAAM,EAAE,UAAU,OAAO,MAAM,QAAQ,QAAQ,IAAI;AAGnD,UAAM,cAAc,MAAM,iBAAiB,QAAQ;AAGnD,UAAM,QAAQ,SAAS,MAAM,oBAAoB;AACjD,UAAM,mBAAmB,aAAW,oCAAQ,OAAR,mBAAY,kBAAiB,QAAQ,IAAI,mBAAmB,KAAK;AAErG,QAAI,CAAC,kBAAkB;AACtB,YAAM,IAAI,MAAM,6HAA6H;AAAA,IAC9I;AAGA,UAAM,SAAS,MAAM;AAAA,MACpB;AAAA,MACA;AAAA,MACA;AAAA,MACA,YAAY;AAAA;AAAA,MACZ;AAAA,IACD;AAEA,WAAO;AAAA,MACN,IAAI,OAAO;AAAA,MACX,KAAK,OAAO;AAAA,IACb;AAAA,EACD;AACD;;;ACjNO,IAAM,iCAAN,MAAqC;AAAA;AAAA;AAAA;AAAA,EAI3C,OAAO,OAAO,UAAkD;AAC/D,YAAQ,UAAU;AAAA,MACjB,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C,KAAK;AACJ,eAAO,IAAI,8BAA8B;AAAA,MAC1C;AACC,cAAM,IAAI,MAAM,0CAA0C,QAAQ,EAAE;AAAA,IACtE;AAAA,EACD;AACD;","names":[]}
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
ProcessManager
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-453NC377.js";
|
|
5
5
|
import {
|
|
6
6
|
detectPackageManager,
|
|
7
7
|
runScript
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-LLWX3PCW.js";
|
|
9
9
|
import {
|
|
10
10
|
readPackageJson
|
|
11
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-ITN64ENQ.js";
|
|
12
12
|
import {
|
|
13
13
|
logger
|
|
14
14
|
} from "./chunk-VT4PDUYT.js";
|
|
@@ -212,4 +212,4 @@ var DevServerManager = class {
|
|
|
212
212
|
export {
|
|
213
213
|
DevServerManager
|
|
214
214
|
};
|
|
215
|
-
//# sourceMappingURL=chunk-
|
|
215
|
+
//# sourceMappingURL=chunk-GV5X6XUE.js.map
|
|
@@ -139,6 +139,41 @@ async function createLinearIssue(title, body, teamKey, _labels) {
|
|
|
139
139
|
handleLinearError(error, "createLinearIssue");
|
|
140
140
|
}
|
|
141
141
|
}
|
|
142
|
+
async function createLinearChildIssue(title, body, teamKey, parentId, _labels) {
|
|
143
|
+
try {
|
|
144
|
+
logger.debug(`Creating Linear child issue in team ${teamKey}: ${title}`);
|
|
145
|
+
const client = createLinearClient();
|
|
146
|
+
const teams = await client.teams();
|
|
147
|
+
const team = teams.nodes.find((t) => t.key === teamKey);
|
|
148
|
+
if (!team) {
|
|
149
|
+
throw new LinearServiceError("NOT_FOUND", `Linear team ${teamKey} not found`);
|
|
150
|
+
}
|
|
151
|
+
const issueInput = {
|
|
152
|
+
teamId: team.id,
|
|
153
|
+
title,
|
|
154
|
+
parentId
|
|
155
|
+
// UUID of parent issue
|
|
156
|
+
};
|
|
157
|
+
if (body) {
|
|
158
|
+
issueInput.description = body;
|
|
159
|
+
}
|
|
160
|
+
const payload = await client.createIssue(issueInput);
|
|
161
|
+
const issue = await payload.issue;
|
|
162
|
+
if (!issue) {
|
|
163
|
+
throw new LinearServiceError("CLI_ERROR", "Failed to create Linear child issue");
|
|
164
|
+
}
|
|
165
|
+
const url = issue.url ?? buildLinearIssueUrl(issue.identifier, title);
|
|
166
|
+
return {
|
|
167
|
+
identifier: issue.identifier,
|
|
168
|
+
url
|
|
169
|
+
};
|
|
170
|
+
} catch (error) {
|
|
171
|
+
if (error instanceof LinearServiceError) {
|
|
172
|
+
throw error;
|
|
173
|
+
}
|
|
174
|
+
handleLinearError(error, "createLinearChildIssue");
|
|
175
|
+
}
|
|
176
|
+
}
|
|
142
177
|
async function createLinearComment(identifier, body) {
|
|
143
178
|
try {
|
|
144
179
|
logger.debug(`Creating comment on Linear issue ${identifier}`);
|
|
@@ -272,10 +307,11 @@ export {
|
|
|
272
307
|
LinearServiceError,
|
|
273
308
|
fetchLinearIssue,
|
|
274
309
|
createLinearIssue,
|
|
310
|
+
createLinearChildIssue,
|
|
275
311
|
createLinearComment,
|
|
276
312
|
updateLinearIssueState,
|
|
277
313
|
getLinearComment,
|
|
278
314
|
updateLinearComment,
|
|
279
315
|
fetchLinearIssueComments
|
|
280
316
|
};
|
|
281
|
-
//# sourceMappingURL=chunk-
|
|
317
|
+
//# sourceMappingURL=chunk-HBJITKSZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/types/linear.ts","../src/utils/linear.ts"],"sourcesContent":["/**\n * Linear API response types (from @linear/sdk)\n */\n\n/**\n * Linear issue response from SDK\n */\nexport interface LinearIssue {\n /** Linear internal UUID */\n id: string\n /** Issue identifier in TEAM-NUMBER format (e.g., ENG-123) */\n identifier: string\n /** Issue title */\n title: string\n /** Issue description (markdown) */\n description?: string\n /** Current workflow state name */\n state?: string\n /** Linear web URL */\n url: string\n /** Creation timestamp (ISO string) */\n createdAt: string\n /** Last update timestamp (ISO string) */\n updatedAt: string\n}\n\n/**\n * Linear comment response from SDK\n */\nexport interface LinearComment {\n /** Comment UUID */\n id: string\n /** Comment body (markdown) */\n body: string\n /** Creation timestamp (ISO string) */\n createdAt: string\n /** Last update timestamp (ISO string) */\n updatedAt: string\n /** Comment URL */\n url: string\n}\n\n/**\n * Linear error codes\n */\nexport type LinearErrorCode =\n | 'NOT_FOUND'\n | 'UNAUTHORIZED'\n | 'INVALID_STATE'\n | 'RATE_LIMITED'\n | 'CLI_NOT_FOUND'\n | 'CLI_ERROR'\n\n/**\n * Linear error details\n */\nexport interface LinearError {\n code: LinearErrorCode\n message: string\n details?: unknown\n}\n\n/**\n * Custom error class for Linear operations\n */\nexport class LinearServiceError extends Error {\n constructor(\n public code: LinearErrorCode,\n message: string,\n public details?: unknown,\n ) {\n super(message)\n this.name = 'LinearServiceError'\n // Maintain proper stack trace for where error was thrown (V8 only)\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, LinearServiceError)\n }\n }\n}\n","/**\n * Linear SDK utilities\n * Wrapper functions for the @linear/sdk\n */\n\nimport { LinearClient } from '@linear/sdk'\nimport type { LinearIssue, LinearComment } from '../types/linear.js'\nimport { LinearServiceError } from '../types/linear.js'\nimport { logger } from './logger.js'\n\n/**\n * Slugify a title for use in Linear URLs\n * Converts to lowercase, replaces non-alphanumeric with hyphens, truncates to reasonable length\n * @param title - Issue title\n * @param maxLength - Maximum slug length (default: 50)\n * @returns Slugified title\n */\nexport function slugifyTitle(title: string, maxLength: number = 50): string {\n // Convert to lowercase, replace non-alphanumeric chars with hyphens\n const slug = title\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '') // trim leading/trailing hyphens\n\n // If already short enough, return as-is\n if (slug.length <= maxLength) {\n return slug\n }\n\n // Split by hyphens and rebuild until we hit the limit\n const parts = slug.split('-')\n let result = ''\n for (const part of parts) {\n const candidate = result ? `${result}-${part}` : part\n if (candidate.length > maxLength) {\n break\n }\n result = candidate\n }\n\n return result || slug.slice(0, maxLength) // fallback if first part is too long\n}\n\n/**\n * Build a Linear issue URL with optional title slug\n * @param identifier - Issue identifier (e.g., \"ENG-123\")\n * @param title - Optional issue title for slug\n * @returns Linear URL\n */\nexport function buildLinearIssueUrl(identifier: string, title?: string): string {\n const base = `https://linear.app/issue/${identifier}`\n if (title) {\n const slug = slugifyTitle(title)\n return slug ? `${base}/${slug}` : base\n }\n return base\n}\n\n/**\n * Get Linear API token from environment\n * @returns API token\n * @throws LinearServiceError if token not found\n */\nfunction getLinearApiToken(): string {\n const token = process.env.LINEAR_API_TOKEN\n if (!token) {\n throw new LinearServiceError(\n 'UNAUTHORIZED',\n 'LINEAR_API_TOKEN not set. Configure in settings.local.json or set environment variable.',\n )\n }\n return token\n}\n\n/**\n * Create a Linear SDK client instance\n * @returns Configured LinearClient\n */\nfunction createLinearClient(): LinearClient {\n return new LinearClient({ apiKey: getLinearApiToken() })\n}\n\n/**\n * Handle SDK errors and convert to LinearServiceError\n * @param error - Error from SDK\n * @param context - Context string for debugging\n * @throws LinearServiceError\n */\nfunction handleLinearError(error: unknown, context: string): never {\n logger.debug(`${context}: Handling error`, { error })\n\n // SDK errors typically have a message property\n const errorMessage = error instanceof Error ? error.message : String(error)\n\n // Map common error patterns\n if (errorMessage.includes('not found') || errorMessage.includes('Not found')) {\n throw new LinearServiceError('NOT_FOUND', 'Linear issue or resource not found', { error })\n }\n\n if (\n errorMessage.includes('unauthorized') ||\n errorMessage.includes('Unauthorized') ||\n errorMessage.includes('Invalid API key')\n ) {\n throw new LinearServiceError(\n 'UNAUTHORIZED',\n 'Linear authentication failed. Check LINEAR_API_TOKEN.',\n { error },\n )\n }\n\n if (errorMessage.includes('rate limit')) {\n throw new LinearServiceError('RATE_LIMITED', 'Linear API rate limit exceeded', { error })\n }\n\n // Generic SDK error\n throw new LinearServiceError('CLI_ERROR', `Linear SDK error: ${errorMessage}`, { error })\n}\n\n/**\n * Fetch a Linear issue by identifier\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @returns Linear issue details\n * @throws LinearServiceError if issue not found or SDK error\n */\nexport async function fetchLinearIssue(identifier: string): Promise<LinearIssue> {\n try {\n logger.debug(`Fetching Linear issue: ${identifier}`)\n const client = createLinearClient()\n const issue = await client.issue(identifier)\n\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Convert SDK issue to our LinearIssue type\n const result: LinearIssue = {\n id: issue.id,\n identifier: issue.identifier,\n title: issue.title,\n url: issue.url,\n createdAt: issue.createdAt.toISOString(),\n updatedAt: issue.updatedAt.toISOString(),\n }\n\n // Add optional fields if present\n if (issue.description) {\n result.description = issue.description\n }\n\n if (issue.state) {\n const state = await issue.state\n if (state?.name) {\n result.state = state.name\n }\n }\n\n return result\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'fetchLinearIssue')\n }\n}\n\n/**\n * Create a new Linear issue\n * @param title - Issue title\n * @param body - Issue description (markdown)\n * @param teamKey - Team key (e.g., \"ENG\", \"PLAT\")\n * @param labels - Optional label names to apply\n * @returns Created issue identifier and URL\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearIssue(\n title: string,\n body: string,\n teamKey: string,\n _labels?: string[],\n): Promise<{ identifier: string; url: string }> {\n try {\n logger.debug(`Creating Linear issue in team ${teamKey}: ${title}`)\n const client = createLinearClient()\n\n // Get team by key\n const teams = await client.teams()\n const team = teams.nodes.find((t) => t.key === teamKey)\n\n if (!team) {\n throw new LinearServiceError('NOT_FOUND', `Linear team ${teamKey} not found`)\n }\n\n // Create issue\n const issueInput: { teamId: string; title: string; description?: string } = {\n teamId: team.id,\n title,\n }\n\n if (body) {\n issueInput.description = body\n }\n\n const payload = await client.createIssue(issueInput)\n\n const issue = await payload.issue\n\n if (!issue) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear issue')\n }\n\n // Construct URL\n const url = issue.url ?? buildLinearIssueUrl(issue.identifier, title)\n\n return {\n identifier: issue.identifier,\n url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearIssue')\n }\n}\n\n/**\n * Create a child issue linked to a parent issue\n * Linear supports atomic creation with parentId field\n * @param title - Issue title\n * @param body - Issue description (markdown)\n * @param teamKey - Team key (e.g., \"ENG\")\n * @param parentId - Parent issue UUID (from issue.id, not identifier)\n * @param labels - Optional label names to apply\n * @returns Created issue identifier and URL\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearChildIssue(\n title: string,\n body: string,\n teamKey: string,\n parentId: string,\n _labels?: string[],\n): Promise<{ identifier: string; url: string }> {\n try {\n logger.debug(`Creating Linear child issue in team ${teamKey}: ${title}`)\n const client = createLinearClient()\n\n // Get team by key\n const teams = await client.teams()\n const team = teams.nodes.find((t) => t.key === teamKey)\n\n if (!team) {\n throw new LinearServiceError('NOT_FOUND', `Linear team ${teamKey} not found`)\n }\n\n // Create issue with parentId for atomic parent-child relationship\n const issueInput: { teamId: string; title: string; description?: string; parentId: string } = {\n teamId: team.id,\n title,\n parentId, // UUID of parent issue\n }\n\n if (body) {\n issueInput.description = body\n }\n\n const payload = await client.createIssue(issueInput)\n\n const issue = await payload.issue\n\n if (!issue) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear child issue')\n }\n\n // Construct URL\n const url = issue.url ?? buildLinearIssueUrl(issue.identifier, title)\n\n return {\n identifier: issue.identifier,\n url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearChildIssue')\n }\n}\n\n/**\n * Create a comment on a Linear issue\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @param body - Comment body (markdown)\n * @returns Created comment details\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearComment(\n identifier: string,\n body: string,\n): Promise<LinearComment> {\n try {\n logger.debug(`Creating comment on Linear issue ${identifier}`)\n const client = createLinearClient()\n\n // Get issue by identifier to get its ID\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Create comment using issue ID\n const payload = await client.createComment({\n issueId: issue.id,\n body,\n })\n\n const comment = await payload.comment\n\n if (!comment) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear comment')\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearComment')\n }\n}\n\n/**\n * Update a Linear issue's workflow state\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @param stateName - Target state name (e.g., \"In Progress\", \"Done\")\n * @throws LinearServiceError on update failure\n */\nexport async function updateLinearIssueState(\n identifier: string,\n stateName: string,\n): Promise<void> {\n try {\n logger.debug(`Updating Linear issue ${identifier} state to: ${stateName}`)\n const client = createLinearClient()\n\n // Get issue by identifier\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Get team to find state\n const team = await issue.team\n if (!team) {\n throw new LinearServiceError('CLI_ERROR', 'Issue has no team')\n }\n\n // Find state by name\n const states = await team.states()\n const state = states.nodes.find((s) => s.name === stateName)\n\n if (!state) {\n throw new LinearServiceError(\n 'NOT_FOUND',\n `State \"${stateName}\" not found in team ${team.key}`,\n )\n }\n\n // Update issue state\n await client.updateIssue(issue.id, {\n stateId: state.id,\n })\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'updateLinearIssueState')\n }\n}\n\n/**\n * Get a specific comment by ID\n * @param commentId - Linear comment UUID\n * @returns Comment details\n * @throws LinearServiceError if comment not found\n */\nexport async function getLinearComment(commentId: string): Promise<LinearComment> {\n try {\n logger.debug(`Fetching Linear comment: ${commentId}`)\n const client = createLinearClient()\n const comment = await client.comment({ id: commentId })\n\n if (!comment) {\n throw new LinearServiceError('NOT_FOUND', `Linear comment ${commentId} not found`)\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'getLinearComment')\n }\n}\n\n/**\n * Update an existing comment\n * @param commentId - Linear comment UUID\n * @param body - New comment body (markdown)\n * @returns Updated comment details\n * @throws LinearServiceError on update failure\n */\nexport async function updateLinearComment(\n commentId: string,\n body: string,\n): Promise<LinearComment> {\n try {\n logger.debug(`Updating Linear comment: ${commentId}`)\n const client = createLinearClient()\n\n const payload = await client.updateComment(commentId, { body })\n const comment = await payload.comment\n\n if (!comment) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to update Linear comment')\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'updateLinearComment')\n }\n}\n\n/**\n * Fetch all comments for a Linear issue\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @returns Array of comments\n * @throws LinearServiceError on fetch failure\n */\nexport async function fetchLinearIssueComments(identifier: string): Promise<LinearComment[]> {\n try {\n logger.debug(`Fetching comments for Linear issue: ${identifier}`)\n const client = createLinearClient()\n\n // Get issue by identifier\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Fetch comments\n const comments = await issue.comments({ first: 100 })\n\n return comments.nodes.map((comment) => ({\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }))\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'fetchLinearIssueComments')\n }\n}\n"],"mappings":";;;;;;AAiEO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EAC5C,YACS,MACP,SACO,SACP;AACA,UAAM,OAAO;AAJN;AAEA;AAGP,SAAK,OAAO;AAEZ,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,mBAAkB;AAAA,IAClD;AAAA,EACF;AACF;;;ACzEA,SAAS,oBAAoB;AAYtB,SAAS,aAAa,OAAe,YAAoB,IAAY;AAE1E,QAAM,OAAO,MACV,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;AAGzB,MAAI,KAAK,UAAU,WAAW;AAC5B,WAAO;AAAA,EACT;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,SAAS;AACb,aAAW,QAAQ,OAAO;AACxB,UAAM,YAAY,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AACjD,QAAI,UAAU,SAAS,WAAW;AAChC;AAAA,IACF;AACA,aAAS;AAAA,EACX;AAEA,SAAO,UAAU,KAAK,MAAM,GAAG,SAAS;AAC1C;AAQO,SAAS,oBAAoB,YAAoB,OAAwB;AAC9E,QAAM,OAAO,4BAA4B,UAAU;AACnD,MAAI,OAAO;AACT,UAAM,OAAO,aAAa,KAAK;AAC/B,WAAO,OAAO,GAAG,IAAI,IAAI,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAOA,SAAS,oBAA4B;AACnC,QAAM,QAAQ,QAAQ,IAAI;AAC1B,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,qBAAmC;AAC1C,SAAO,IAAI,aAAa,EAAE,QAAQ,kBAAkB,EAAE,CAAC;AACzD;AAQA,SAAS,kBAAkB,OAAgB,SAAwB;AACjE,SAAO,MAAM,GAAG,OAAO,oBAAoB,EAAE,MAAM,CAAC;AAGpD,QAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAG1E,MAAI,aAAa,SAAS,WAAW,KAAK,aAAa,SAAS,WAAW,GAAG;AAC5E,UAAM,IAAI,mBAAmB,aAAa,sCAAsC,EAAE,MAAM,CAAC;AAAA,EAC3F;AAEA,MACE,aAAa,SAAS,cAAc,KACpC,aAAa,SAAS,cAAc,KACpC,aAAa,SAAS,iBAAiB,GACvC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,EAAE,MAAM;AAAA,IACV;AAAA,EACF;AAEA,MAAI,aAAa,SAAS,YAAY,GAAG;AACvC,UAAM,IAAI,mBAAmB,gBAAgB,kCAAkC,EAAE,MAAM,CAAC;AAAA,EAC1F;AAGA,QAAM,IAAI,mBAAmB,aAAa,qBAAqB,YAAY,IAAI,EAAE,MAAM,CAAC;AAC1F;AAQA,eAAsB,iBAAiB,YAA0C;AAC/E,MAAI;AACF,WAAO,MAAM,0BAA0B,UAAU,EAAE;AACnD,UAAM,SAAS,mBAAmB;AAClC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAE3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,SAAsB;AAAA,MAC1B,IAAI,MAAM;AAAA,MACV,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,WAAW,MAAM,UAAU,YAAY;AAAA,MACvC,WAAW,MAAM,UAAU,YAAY;AAAA,IACzC;AAGA,QAAI,MAAM,aAAa;AACrB,aAAO,cAAc,MAAM;AAAA,IAC7B;AAEA,QAAI,MAAM,OAAO;AACf,YAAM,QAAQ,MAAM,MAAM;AAC1B,UAAI,+BAAO,MAAM;AACf,eAAO,QAAQ,MAAM;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,kBAAkB;AAAA,EAC7C;AACF;AAWA,eAAsB,kBACpB,OACA,MACA,SACA,SAC8C;AAC9C,MAAI;AACF,WAAO,MAAM,iCAAiC,OAAO,KAAK,KAAK,EAAE;AACjE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM;AACjC,UAAM,OAAO,MAAM,MAAM,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO;AAEtD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,eAAe,OAAO,YAAY;AAAA,IAC9E;AAGA,UAAM,aAAsE;AAAA,MAC1E,QAAQ,KAAK;AAAA,MACb;AAAA,IACF;AAEA,QAAI,MAAM;AACR,iBAAW,cAAc;AAAA,IAC3B;AAEA,UAAM,UAAU,MAAM,OAAO,YAAY,UAAU;AAEnD,UAAM,QAAQ,MAAM,QAAQ;AAE5B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,+BAA+B;AAAA,IAC3E;AAGA,UAAM,MAAM,MAAM,OAAO,oBAAoB,MAAM,YAAY,KAAK;AAEpE,WAAO;AAAA,MACL,YAAY,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,mBAAmB;AAAA,EAC9C;AACF;AAaA,eAAsB,uBACpB,OACA,MACA,SACA,UACA,SAC8C;AAC9C,MAAI;AACF,WAAO,MAAM,uCAAuC,OAAO,KAAK,KAAK,EAAE;AACvE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM;AACjC,UAAM,OAAO,MAAM,MAAM,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO;AAEtD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,eAAe,OAAO,YAAY;AAAA,IAC9E;AAGA,UAAM,aAAwF;AAAA,MAC5F,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA;AAAA,IACF;AAEA,QAAI,MAAM;AACR,iBAAW,cAAc;AAAA,IAC3B;AAEA,UAAM,UAAU,MAAM,OAAO,YAAY,UAAU;AAEnD,UAAM,QAAQ,MAAM,QAAQ;AAE5B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,qCAAqC;AAAA,IACjF;AAGA,UAAM,MAAM,MAAM,OAAO,oBAAoB,MAAM,YAAY,KAAK;AAEpE,WAAO;AAAA,MACL,YAAY,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,wBAAwB;AAAA,EACnD;AACF;AASA,eAAsB,oBACpB,YACA,MACwB;AACxB,MAAI;AACF,WAAO,MAAM,oCAAoC,UAAU,EAAE;AAC7D,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,UAAU,MAAM,OAAO,cAAc;AAAA,MACzC,SAAS,MAAM;AAAA,MACf;AAAA,IACF,CAAC;AAED,UAAM,UAAU,MAAM,QAAQ;AAE9B,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,iCAAiC;AAAA,IAC7E;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,qBAAqB;AAAA,EAChD;AACF;AAQA,eAAsB,uBACpB,YACA,WACe;AACf,MAAI;AACF,WAAO,MAAM,yBAAyB,UAAU,cAAc,SAAS,EAAE;AACzE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,OAAO,MAAM,MAAM;AACzB,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,mBAAmB;AAAA,IAC/D;AAGA,UAAM,SAAS,MAAM,KAAK,OAAO;AACjC,UAAM,QAAQ,OAAO,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AAE3D,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU,SAAS,uBAAuB,KAAK,GAAG;AAAA,MACpD;AAAA,IACF;AAGA,UAAM,OAAO,YAAY,MAAM,IAAI;AAAA,MACjC,SAAS,MAAM;AAAA,IACjB,CAAC;AAAA,EACH,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,wBAAwB;AAAA,EACnD;AACF;AAQA,eAAsB,iBAAiB,WAA2C;AAChF,MAAI;AACF,WAAO,MAAM,4BAA4B,SAAS,EAAE;AACpD,UAAM,SAAS,mBAAmB;AAClC,UAAM,UAAU,MAAM,OAAO,QAAQ,EAAE,IAAI,UAAU,CAAC;AAEtD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,kBAAkB,SAAS,YAAY;AAAA,IACnF;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,kBAAkB;AAAA,EAC7C;AACF;AASA,eAAsB,oBACpB,WACA,MACwB;AACxB,MAAI;AACF,WAAO,MAAM,4BAA4B,SAAS,EAAE;AACpD,UAAM,SAAS,mBAAmB;AAElC,UAAM,UAAU,MAAM,OAAO,cAAc,WAAW,EAAE,KAAK,CAAC;AAC9D,UAAM,UAAU,MAAM,QAAQ;AAE9B,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,iCAAiC;AAAA,IAC7E;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,qBAAqB;AAAA,EAChD;AACF;AAQA,eAAsB,yBAAyB,YAA8C;AAC3F,MAAI;AACF,WAAO,MAAM,uCAAuC,UAAU,EAAE;AAChE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,WAAW,MAAM,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC;AAEpD,WAAO,SAAS,MAAM,IAAI,CAAC,aAAa;AAAA,MACtC,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf,EAAE;AAAA,EACJ,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,0BAA0B;AAAA,EACrD;AACF;","names":[]}
|
|
@@ -7,7 +7,7 @@ import {
|
|
|
7
7
|
} from "./chunk-ZX3GTM7O.js";
|
|
8
8
|
import {
|
|
9
9
|
launchClaude
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-FP7G7DG3.js";
|
|
11
11
|
import {
|
|
12
12
|
getLogger
|
|
13
13
|
} from "./chunk-6MLEBAYZ.js";
|
|
@@ -151,4 +151,4 @@ export {
|
|
|
151
151
|
IssueEnhancementService,
|
|
152
152
|
capitalizeFirstLetter
|
|
153
153
|
};
|
|
154
|
-
//# sourceMappingURL=chunk-
|
|
154
|
+
//# sourceMappingURL=chunk-HVQNVRAF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/utils/package-json.ts"],"sourcesContent":["import fs from 'fs-extra'\nimport path from 'path'\nimport { getLogger } from './logger-context.js'\nimport type { ProjectCapability } from '../types/loom.js'\n\n/**\n * Path to the iloom package configuration file (relative to project root)\n * This file allows non-Node.js projects to define scripts for iloom workflows\n */\nexport const ILOOM_PACKAGE_PATH = '.iloom/package.iloom.json'\n\nexport interface PackageJson {\n name: string\n version?: string\n bin?: string | Record<string, string>\n dependencies?: Record<string, string>\n devDependencies?: Record<string, string>\n scripts?: Record<string, string>\n capabilities?: ProjectCapability[]\n [key: string]: unknown\n}\n\n/**\n * Source of a script - determines how it should be executed\n * - 'package-manager': Execute via package manager (pnpm/npm/yarn)\n * - 'iloom-config': Execute directly as shell command\n */\nexport type ScriptSource = 'package-manager' | 'iloom-config'\n\n/**\n * Configuration for a single script including its source\n * The source determines whether to use package manager or direct shell execution\n */\nexport interface PackageScriptConfig {\n /** The script command to execute */\n command: string\n /** Source of the script - determines execution method */\n source: ScriptSource\n}\n\n/**\n * Read and parse package.json from a directory\n * @param dir Directory containing package.json\n * @returns Parsed package.json object\n * @throws Error if package.json doesn't exist or contains invalid JSON\n */\nexport async function readPackageJson(dir: string): Promise<PackageJson> {\n const pkgPath = path.join(dir, 'package.json')\n\n try {\n const pkgJson = await fs.readJson(pkgPath)\n return pkgJson as PackageJson\n } catch (error) {\n if ((error as { code?: string }).code === 'ENOENT') {\n throw new Error(`package.json not found in ${dir}`)\n }\n const message = error instanceof Error ? error.message : 'Unknown error'\n throw new Error(`Invalid package.json in ${dir}: ${message}`)\n }\n}\n\n/**\n * Read scripts from .iloom/package.iloom.json if it exists\n * This file takes precedence over package.json and contains raw shell commands\n * @param dir Directory containing .iloom/package.iloom.json\n * @returns PackageJson-like object with scripts, or null if file doesn't exist\n */\nexport async function readIloomPackageScripts(dir: string): Promise<PackageJson | null> {\n const iloomPkgPath = path.join(dir, ILOOM_PACKAGE_PATH)\n\n try {\n const exists = await fs.pathExists(iloomPkgPath)\n if (!exists) {\n return null\n }\n\n const content = await fs.readJson(iloomPkgPath)\n return content as PackageJson\n } catch (error) {\n const message = error instanceof Error ? error.message : 'Unknown error'\n getLogger().warn(`Failed to read ${ILOOM_PACKAGE_PATH}: ${message}`)\n return null\n }\n}\n\n/**\n * Read package configuration for a project, merging .iloom/package.iloom.json scripts over package.json\n * This allows non-Node.js projects to define scripts for iloom workflows while preserving\n * all other package.json fields (name, version, bin, dependencies, etc.)\n *\n * @param dir Directory to read package configuration from\n * @returns PackageJson object with merged scripts (iloom scripts take precedence)\n * @throws Error if neither file exists or contains valid JSON\n */\nexport async function getPackageConfig(dir: string): Promise<PackageJson> {\n // Check for .iloom/package.iloom.json first\n const iloomPackage = await readIloomPackageScripts(dir)\n\n if (iloomPackage) {\n // Try to read package.json as base\n try {\n const basePackage = await readPackageJson(dir)\n getLogger().debug('Merging scripts from .iloom/package.iloom.json over package.json')\n // Merge: base package.json with iloom scripts taking precedence\n return {\n ...basePackage,\n scripts: {\n ...basePackage.scripts,\n ...iloomPackage.scripts,\n },\n }\n } catch {\n // No package.json - use iloom package as-is (non-Node project)\n getLogger().debug('Using scripts from .iloom/package.iloom.json (no package.json)')\n return iloomPackage\n }\n }\n\n // Fall back to package.json only\n return readPackageJson(dir)\n}\n\n/**\n * Parse bin field into normalized Record format\n * @param bin The bin field from package.json (string or object)\n * @param packageName Package name to use for string bin variant\n * @returns Normalized bin entries as Record<string, string>\n */\nexport function parseBinField(\n bin: string | Record<string, string> | undefined,\n packageName: string\n): Record<string, string> {\n if (!bin) {\n return {}\n }\n\n if (typeof bin === 'string') {\n return { [packageName]: bin }\n }\n\n return bin\n}\n\n/**\n * Check if package.json indicates a web application\n * @param pkgJson Parsed package.json object\n * @returns true if package has web framework dependencies\n */\nexport function hasWebDependencies(pkgJson: PackageJson): boolean {\n const webIndicators = [\n 'next',\n 'vite',\n 'express',\n 'react-scripts',\n 'nuxt',\n 'svelte-kit',\n 'astro',\n 'remix',\n 'fastify',\n 'koa',\n 'hapi',\n '@angular/core',\n 'gatsby',\n '@11ty/eleventy',\n 'ember-cli'\n ]\n\n const allDeps = {\n ...pkgJson.dependencies,\n ...pkgJson.devDependencies\n }\n\n return webIndicators.some(indicator => indicator in allDeps)\n}\n\n/**\n * Check if package.json has a specific script\n * @param pkgJson Parsed package.json object\n * @param scriptName Script name to check for\n * @returns true if script exists\n */\nexport function hasScript(pkgJson: PackageJson, scriptName: string): boolean {\n return !!pkgJson.scripts?.[scriptName]\n}\n\n/**\n * Get all scripts with their source metadata\n * Scripts from .iloom/package.iloom.json are marked as 'iloom-config' and should be executed directly\n * Scripts from package.json are marked as 'package-manager' and should use pnpm/npm/yarn\n *\n * @param dir Directory to read package configuration from\n * @returns Map of script names to their configurations including source\n */\nexport async function getPackageScripts(dir: string): Promise<Record<string, PackageScriptConfig>> {\n const scripts: Record<string, PackageScriptConfig> = {}\n\n // First, check if package.json exists and read scripts (these are package-manager sourced)\n const packageJsonPath = path.join(dir, 'package.json')\n if (await fs.pathExists(packageJsonPath)) {\n const pkgJson = await readPackageJson(dir)\n if (pkgJson.scripts) {\n for (const [name, command] of Object.entries(pkgJson.scripts)) {\n scripts[name] = { command, source: 'package-manager' }\n }\n }\n }\n\n // Then, read iloom package scripts (these override and are iloom-config sourced)\n const iloomPackage = await readIloomPackageScripts(dir)\n if (iloomPackage?.scripts) {\n for (const [name, command] of Object.entries(iloomPackage.scripts)) {\n scripts[name] = { command, source: 'iloom-config' }\n }\n }\n\n return scripts\n}\n\n/**\n * Valid capability values that can be explicitly declared\n */\nconst VALID_CAPABILITIES: readonly ProjectCapability[] = ['cli', 'web'] as const\n\n/**\n * Extract explicit capabilities from package configuration\n * Used for non-Node.js projects that declare capabilities in package.iloom.json\n * @param pkgJson Parsed package configuration object\n * @returns Array of valid ProjectCapability values, or empty array if none declared\n */\nexport function getExplicitCapabilities(pkgJson: PackageJson): ProjectCapability[] {\n // Return empty if no capabilities field or not an array\n if (!pkgJson.capabilities || !Array.isArray(pkgJson.capabilities)) {\n return []\n }\n\n // Filter to only valid ProjectCapability values\n return pkgJson.capabilities.filter(\n (cap): cap is ProjectCapability => VALID_CAPABILITIES.includes(cap as ProjectCapability)\n )\n}\n"],"mappings":";;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAQV,IAAM,qBAAqB;AAqClC,eAAsB,gBAAgB,KAAmC;AACvE,QAAM,UAAU,KAAK,KAAK,KAAK,cAAc;AAE7C,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,SAAS,OAAO;AACzC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,QAAK,MAA4B,SAAS,UAAU;AAClD,YAAM,IAAI,MAAM,6BAA6B,GAAG,EAAE;AAAA,IACpD;AACA,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,UAAM,IAAI,MAAM,2BAA2B,GAAG,KAAK,OAAO,EAAE;AAAA,EAC9D;AACF;AAQA,eAAsB,wBAAwB,KAA0C;AACtF,QAAM,eAAe,KAAK,KAAK,KAAK,kBAAkB;AAEtD,MAAI;AACF,UAAM,SAAS,MAAM,GAAG,WAAW,YAAY;AAC/C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,MAAM,GAAG,SAAS,YAAY;AAC9C,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,cAAU,EAAE,KAAK,kBAAkB,kBAAkB,KAAK,OAAO,EAAE;AACnE,WAAO;AAAA,EACT;AACF;AAWA,eAAsB,iBAAiB,KAAmC;AAExE,QAAM,eAAe,MAAM,wBAAwB,GAAG;AAEtD,MAAI,cAAc;AAEhB,QAAI;AACF,YAAM,cAAc,MAAM,gBAAgB,GAAG;AAC7C,gBAAU,EAAE,MAAM,kEAAkE;AAEpF,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS;AAAA,UACP,GAAG,YAAY;AAAA,UACf,GAAG,aAAa;AAAA,QAClB;AAAA,MACF;AAAA,IACF,QAAQ;AAEN,gBAAU,EAAE,MAAM,gEAAgE;AAClF,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO,gBAAgB,GAAG;AAC5B;AAQO,SAAS,cACd,KACA,aACwB;AACxB,MAAI,CAAC,KAAK;AACR,WAAO,CAAC;AAAA,EACV;AAEA,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,EAAE,CAAC,WAAW,GAAG,IAAI;AAAA,EAC9B;AAEA,SAAO;AACT;AAOO,SAAS,mBAAmB,SAA+B;AAChE,QAAM,gBAAgB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,UAAU;AAAA,IACd,GAAG,QAAQ;AAAA,IACX,GAAG,QAAQ;AAAA,EACb;AAEA,SAAO,cAAc,KAAK,eAAa,aAAa,OAAO;AAC7D;AAQO,SAAS,UAAU,SAAsB,YAA6B;AArL7E;AAsLE,SAAO,CAAC,GAAC,aAAQ,YAAR,mBAAkB;AAC7B;AAUA,eAAsB,kBAAkB,KAA2D;AACjG,QAAM,UAA+C,CAAC;AAGtD,QAAM,kBAAkB,KAAK,KAAK,KAAK,cAAc;AACrD,MAAI,MAAM,GAAG,WAAW,eAAe,GAAG;AACxC,UAAM,UAAU,MAAM,gBAAgB,GAAG;AACzC,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC7D,gBAAQ,IAAI,IAAI,EAAE,SAAS,QAAQ,kBAAkB;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAGA,QAAM,eAAe,MAAM,wBAAwB,GAAG;AACtD,MAAI,6CAAc,SAAS;AACzB,eAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,aAAa,OAAO,GAAG;AAClE,cAAQ,IAAI,IAAI,EAAE,SAAS,QAAQ,eAAe;AAAA,IACpD;AAAA,EACF;AAEA,SAAO;AACT;AAKA,IAAM,qBAAmD,CAAC,OAAO,KAAK;AAQ/D,SAAS,wBAAwB,SAA2C;AAEjF,MAAI,CAAC,QAAQ,gBAAgB,CAAC,MAAM,QAAQ,QAAQ,YAAY,GAAG;AACjE,WAAO,CAAC;AAAA,EACV;AAGA,SAAO,QAAQ,aAAa;AAAA,IAC1B,CAAC,QAAkC,mBAAmB,SAAS,GAAwB;AAAA,EACzF;AACF;","names":[]}
|
|
@@ -1,15 +1,15 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
PromptTemplateManager
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-TIYJEEVO.js";
|
|
5
5
|
import {
|
|
6
6
|
SettingsManager
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-WFQ5CLTR.js";
|
|
8
8
|
import {
|
|
9
9
|
detectClaudeCli,
|
|
10
10
|
launchClaude,
|
|
11
11
|
launchClaudeInNewTerminalWindow
|
|
12
|
-
} from "./chunk-
|
|
12
|
+
} from "./chunk-FP7G7DG3.js";
|
|
13
13
|
import {
|
|
14
14
|
logger
|
|
15
15
|
} from "./chunk-VT4PDUYT.js";
|
|
@@ -122,4 +122,4 @@ var ClaudeService = class {
|
|
|
122
122
|
export {
|
|
123
123
|
ClaudeService
|
|
124
124
|
};
|
|
125
|
-
//# sourceMappingURL=chunk-
|
|
125
|
+
//# sourceMappingURL=chunk-KVS4XGBQ.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
getPackageScripts
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-ITN64ENQ.js";
|
|
5
5
|
import {
|
|
6
6
|
getLogger
|
|
7
7
|
} from "./chunk-6MLEBAYZ.js";
|
|
@@ -161,4 +161,4 @@ export {
|
|
|
161
161
|
installDependencies,
|
|
162
162
|
runScript
|
|
163
163
|
};
|
|
164
|
-
//# sourceMappingURL=chunk-
|
|
164
|
+
//# sourceMappingURL=chunk-LLWX3PCW.js.map
|
|
@@ -1,20 +1,31 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
ProjectCapabilityDetector
|
|
4
|
+
} from "./chunk-3CDWFEGL.js";
|
|
5
|
+
import {
|
|
6
|
+
detectPackageManager,
|
|
7
|
+
runScript
|
|
8
|
+
} from "./chunk-LLWX3PCW.js";
|
|
9
|
+
import {
|
|
10
|
+
getPackageConfig,
|
|
11
|
+
hasScript
|
|
12
|
+
} from "./chunk-ITN64ENQ.js";
|
|
2
13
|
import {
|
|
3
14
|
executeGitCommand,
|
|
4
15
|
findMainWorktreePathWithSettings,
|
|
5
16
|
findWorktreeForBranch,
|
|
6
17
|
getMergeTargetBranch
|
|
7
|
-
} from "./chunk-
|
|
18
|
+
} from "./chunk-ZA575VLF.js";
|
|
8
19
|
import {
|
|
9
20
|
SettingsManager
|
|
10
|
-
} from "./chunk-
|
|
21
|
+
} from "./chunk-WFQ5CLTR.js";
|
|
11
22
|
import {
|
|
12
23
|
MetadataManager
|
|
13
|
-
} from "./chunk-
|
|
24
|
+
} from "./chunk-VWGKGNJP.js";
|
|
14
25
|
import {
|
|
15
26
|
detectClaudeCli,
|
|
16
27
|
launchClaude
|
|
17
|
-
} from "./chunk-
|
|
28
|
+
} from "./chunk-FP7G7DG3.js";
|
|
18
29
|
import {
|
|
19
30
|
getLogger
|
|
20
31
|
} from "./chunk-6MLEBAYZ.js";
|
|
@@ -374,7 +385,86 @@ To recover:
|
|
|
374
385
|
}
|
|
375
386
|
};
|
|
376
387
|
|
|
388
|
+
// src/lib/BuildRunner.ts
|
|
389
|
+
var BuildRunner = class {
|
|
390
|
+
constructor(capabilityDetector) {
|
|
391
|
+
this.capabilityDetector = capabilityDetector ?? new ProjectCapabilityDetector();
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Run build verification in the specified directory
|
|
395
|
+
* @param buildPath - Path where build should run (typically main worktree path)
|
|
396
|
+
* @param options - Build options
|
|
397
|
+
*/
|
|
398
|
+
async runBuild(buildPath, options = {}) {
|
|
399
|
+
const startTime = Date.now();
|
|
400
|
+
try {
|
|
401
|
+
const pkgJson = await getPackageConfig(buildPath);
|
|
402
|
+
const hasBuildScript = hasScript(pkgJson, "build");
|
|
403
|
+
if (!hasBuildScript) {
|
|
404
|
+
getLogger().debug("Skipping build - no build script found");
|
|
405
|
+
return {
|
|
406
|
+
success: true,
|
|
407
|
+
skipped: true,
|
|
408
|
+
reason: "No build script found in package configuration",
|
|
409
|
+
duration: Date.now() - startTime
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
} catch (error) {
|
|
413
|
+
if (error instanceof Error && error.message.includes("package.json not found")) {
|
|
414
|
+
getLogger().debug("Skipping build - no package configuration found");
|
|
415
|
+
return {
|
|
416
|
+
success: true,
|
|
417
|
+
skipped: true,
|
|
418
|
+
reason: "No package configuration found in project",
|
|
419
|
+
duration: Date.now() - startTime
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
throw error;
|
|
423
|
+
}
|
|
424
|
+
const capabilities = await this.capabilityDetector.detectCapabilities(buildPath);
|
|
425
|
+
const isCLIProject = capabilities.capabilities.includes("cli");
|
|
426
|
+
if (!isCLIProject) {
|
|
427
|
+
getLogger().debug("Skipping build - not a CLI project (no bin field)");
|
|
428
|
+
return {
|
|
429
|
+
success: true,
|
|
430
|
+
skipped: true,
|
|
431
|
+
reason: "Project is not a CLI project (no bin field in package.json)",
|
|
432
|
+
duration: Date.now() - startTime
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
const packageManager = await detectPackageManager(buildPath);
|
|
436
|
+
if (options.dryRun) {
|
|
437
|
+
const command = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
|
|
438
|
+
getLogger().info(`[DRY RUN] Would run: ${command}`);
|
|
439
|
+
return {
|
|
440
|
+
success: true,
|
|
441
|
+
skipped: false,
|
|
442
|
+
duration: Date.now() - startTime
|
|
443
|
+
};
|
|
444
|
+
}
|
|
445
|
+
getLogger().info("Running build...");
|
|
446
|
+
try {
|
|
447
|
+
await runScript("build", buildPath, [], { quiet: true });
|
|
448
|
+
getLogger().success("Build completed successfully");
|
|
449
|
+
return {
|
|
450
|
+
success: true,
|
|
451
|
+
skipped: false,
|
|
452
|
+
duration: Date.now() - startTime
|
|
453
|
+
};
|
|
454
|
+
} catch {
|
|
455
|
+
const runCommand = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
|
|
456
|
+
throw new Error(
|
|
457
|
+
`Error: Build failed.
|
|
458
|
+
Fix build errors before proceeding.
|
|
459
|
+
|
|
460
|
+
Run '${runCommand}' to see detailed errors.`
|
|
461
|
+
);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
};
|
|
465
|
+
|
|
377
466
|
export {
|
|
378
|
-
MergeManager
|
|
467
|
+
MergeManager,
|
|
468
|
+
BuildRunner
|
|
379
469
|
};
|
|
380
|
-
//# sourceMappingURL=chunk-
|
|
470
|
+
//# sourceMappingURL=chunk-LQBLDI47.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/MergeManager.ts","../src/lib/BuildRunner.ts"],"sourcesContent":["import { executeGitCommand, findMainWorktreePathWithSettings, findWorktreeForBranch, getMergeTargetBranch } from '../utils/git.js'\nimport { getLogger } from '../utils/logger-context.js'\nimport { detectClaudeCli, launchClaude } from '../utils/claude.js'\nimport { SettingsManager } from './SettingsManager.js'\nimport { MetadataManager } from './MetadataManager.js'\nimport type { MergeOptions } from '../types/index.js'\n\n/**\n * MergeManager handles Git rebase and fast-forward merge operations\n * Implements fail-fast behavior for conflicts (Phase 1 - no Claude assistance)\n *\n * Ports bash/merge-and-clean.sh lines 781-1090\n */\nexport class MergeManager {\n\tprivate settingsManager: SettingsManager\n\tprivate metadataManager: MetadataManager\n\n\tconstructor(settingsManager?: SettingsManager, metadataManager?: MetadataManager) {\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t\tthis.metadataManager = metadataManager ?? new MetadataManager()\n\t}\n\n\t/**\n\t * Get the merge target branch for a loom\n\t * Priority: parent loom metadata > configured main branch > 'main'\n\t * @param worktreePath - Optional path to load settings/metadata from (defaults to process.cwd())\n\t * @private\n\t */\n\tprivate async getMainBranch(worktreePath?: string): Promise<string> {\n\t\t// Delegate to shared utility function\n\t\treturn getMergeTargetBranch(worktreePath ?? process.cwd(), {\n\t\t\tsettingsManager: this.settingsManager,\n\t\t\tmetadataManager: this.metadataManager,\n\t\t})\n\t}\n\n\t/**\n\t * Rebase current branch on main with fail-fast on conflicts\n\t * Ports bash/merge-and-clean.sh lines 781-913\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param options - Merge options (dryRun, force)\n\t * @throws Error if main branch doesn't exist, uncommitted changes exist, or conflicts occur\n\t */\n\tasync rebaseOnMain(worktreePath: string, options: MergeOptions = {}): Promise<void> {\n\t\tconst { dryRun = false, force = false } = options\n\t\tconst mainBranch = await this.getMainBranch(worktreePath)\n\n\t\tgetLogger().info(`Starting rebase on ${mainBranch} branch...`)\n\n\t\t// Step 1: Check if main branch exists\n\t\ttry {\n\t\t\tawait executeGitCommand(['show-ref', '--verify', '--quiet', `refs/heads/${mainBranch}`], {\n\t\t\t\tcwd: worktreePath,\n\t\t\t})\n\t\t} catch {\n\t\t\tthrow new Error(\n\t\t\t\t`Main branch \"${mainBranch}\" does not exist. Cannot rebase.\\n` +\n\t\t\t\t\t`Ensure the repository has a \"${mainBranch}\" branch or create it first.`\n\t\t\t)\n\t\t}\n\n\t\t// Step 2: Check for uncommitted changes and create WIP commit if needed\n\t\tconst statusOutput = await executeGitCommand(['status', '--porcelain'], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tlet wipCommitHash: string | null = null\n\t\tif (statusOutput.trim()) {\n\t\t\tgetLogger().info('Uncommitted changes detected, creating temporary WIP commit...')\n\t\t\twipCommitHash = await this.createWipCommit(worktreePath)\n\t\t\tgetLogger().debug(`Created WIP commit: ${wipCommitHash}`)\n\t\t}\n\n\t\t// Step 3: Check if rebase is needed by comparing merge-base with main HEAD\n\t\tconst mergeBase = await executeGitCommand(['merge-base', mainBranch, 'HEAD'], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst mainHead = await executeGitCommand(['rev-parse', mainBranch], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst mergeBaseTrimmed = mergeBase.trim()\n\t\tconst mainHeadTrimmed = mainHead.trim()\n\n\t\t// If merge-base matches main HEAD, branch is already up to date\n\t\tif (mergeBaseTrimmed === mainHeadTrimmed) {\n\t\t\tgetLogger().success(`Branch is already up to date with ${mainBranch}. No rebase needed.`)\n\t\t\t// Restore WIP commit if created (soft reset to remove temporary commit)\n\t\t\tif (wipCommitHash) {\n\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// Step 4: Show commits to be rebased (for informational purposes)\n\t\tconst commitsOutput = await executeGitCommand(['log', '--oneline', `${mainBranch}..HEAD`], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst commits = commitsOutput.trim()\n\t\tconst commitLines = commits ? commits.split('\\n') : []\n\n\t\tif (commits) {\n\t\t\t// Show commits that will be rebased\n\t\t\tgetLogger().info(`Found ${commitLines.length} commit(s) to rebase:`)\n\t\t\tcommitLines.forEach((commit) => getLogger().info(` ${commit}`))\n\t\t} else {\n\t\t\t// Main has moved forward but branch has no new commits\n\t\t\tgetLogger().info(`${mainBranch} branch has moved forward. Rebasing to update branch...`)\n\t\t}\n\n\t\t// Step 5: User confirmation (unless force mode or dry-run)\n\t\tif (!force && !dryRun) {\n\t\t\t// TODO: Implement interactive prompt for confirmation\n\t\t\t// For now, proceeding automatically (use --force to skip this message)\n\t\t\tgetLogger().info('Proceeding with rebase... (use --force to skip confirmations)')\n\t\t}\n\n\t\t// Step 6: Execute rebase (unless dry-run)\n\t\tif (dryRun) {\n\t\t\tgetLogger().info(`[DRY RUN] Would execute: git rebase ${mainBranch}`)\n\t\t\tif (commitLines.length > 0) {\n\t\t\t\tgetLogger().info(`[DRY RUN] This would rebase ${commitLines.length} commit(s)`)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// Execute rebase\n\t\t// Use -c core.hooksPath=/dev/null to disable hooks during rebase\n\t\t// This prevents pre-commit hooks from running when commits are re-applied\n\t\ttry {\n\t\t\tawait executeGitCommand(['-c', 'core.hooksPath=/dev/null', 'rebase', mainBranch], { cwd: worktreePath })\n\t\t\tgetLogger().success('Rebase completed successfully!')\n\n\t\t\t// Restore WIP commit if created\n\t\t\tif (wipCommitHash) {\n\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// Detect conflicts\n\t\t\tconst conflictedFiles = await this.detectConflictedFiles(worktreePath)\n\n\t\t\tif (conflictedFiles.length > 0) {\n\t\t\t\t// Try Claude-assisted resolution first\n\t\t\t\tgetLogger().info('Merge conflicts detected, attempting Claude-assisted resolution...')\n\n\t\t\t\tconst resolved = await this.attemptClaudeConflictResolution(\n\t\t\t\t\tworktreePath,\n\t\t\t\t\tconflictedFiles\n\t\t\t\t)\n\n\t\t\t\tif (resolved) {\n\t\t\t\t\tgetLogger().success('Conflicts resolved with Claude assistance, rebase completed')\n\n\t\t\t\t\t// Restore WIP commit if created\n\t\t\t\t\tif (wipCommitHash) {\n\t\t\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t\t\t}\n\t\t\t\t\treturn // Continue with successful rebase\n\t\t\t\t}\n\n\t\t\t\t// Claude couldn't resolve or not available - fail fast\n\t\t\t\tconst conflictError = this.formatConflictError(conflictedFiles)\n\t\t\t\tthrow new Error(conflictError)\n\t\t\t}\n\n\t\t\t// If not a conflict, re-throw the original error\n\t\t\tthrow new Error(\n\t\t\t\t`Rebase failed: ${error instanceof Error ? error.message : String(error)}\\n` +\n\t\t\t\t\t'Run: git status for more details\\n' +\n\t\t\t\t\t'Or: git rebase --abort to cancel the rebase'\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Validate that fast-forward merge is possible\n\t * Ports bash/merge-and-clean.sh lines 957-968\n\t *\n\t * @param branchName - Name of the branch to merge\n\t * @param mainWorktreePath - Path where main branch is checked out\n\t * @throws Error if fast-forward is not possible\n\t */\n\tasync validateFastForwardPossible(mainBranch: string, branchName: string, mainWorktreePath: string): Promise<void> {\n\n\t\t// Step 1: Get merge-base between main and branch\n\t\tconst mergeBase = await executeGitCommand(['merge-base', mainBranch, branchName], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\t// Step 2: Get current HEAD of main\n\t\tconst mainHead = await executeGitCommand(['rev-parse', mainBranch], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\t// Step 3: Compare - they must match for fast-forward\n\t\tconst mergeBaseTrimmed = mergeBase.trim()\n\t\tconst mainHeadTrimmed = mainHead.trim()\n\n\t\tif (mergeBaseTrimmed !== mainHeadTrimmed) {\n\t\t\tthrow new Error(\n\t\t\t\t'Cannot perform fast-forward merge.\\n' +\n\t\t\t\t\t`The ${mainBranch} branch has moved forward since this branch was created.\\n` +\n\t\t\t\t\t`Merge base: ${mergeBaseTrimmed}\\n` +\n\t\t\t\t\t`Main HEAD: ${mainHeadTrimmed}\\n\\n` +\n\t\t\t\t\t'To fix this:\\n' +\n\t\t\t\t\t` 1. Rebase the branch on ${mainBranch}: git rebase ${mainBranch}\\n` +\n\t\t\t\t\t` 2. Or use: il finish to automatically rebase and merge\\n`\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Perform fast-forward only merge\n\t * Ports bash/merge-and-clean.sh lines 938-994\n\t *\n\t * @param branchName - Name of the branch to merge\n\t * @param worktreePath - Path to the worktree\n\t * @param options - Merge options (dryRun, force)\n\t * @throws Error if checkout, validation, or merge fails\n\t */\n\tasync performFastForwardMerge(\n\t\tbranchName: string,\n\t\tworktreePath: string,\n\t\toptions: MergeOptions = {}\n\t): Promise<void> {\n\t\tconst { dryRun = false, force = false } = options\n\n\t\tgetLogger().info('Starting fast-forward merge...')\n\n\t\t// Step 1: Get the merge target branch FIRST\n\t\t// For child looms, this will be the parent branch from metadata\n\t\t// For regular looms, this falls back to settings.mainBranch or 'main'\n\t\tconst mainBranch = await this.getMainBranch(worktreePath)\n\n\t\t// Step 2: Find where the merge target branch is checked out\n\t\t// CRITICAL: We must find the worktree for the MERGE TARGET, not settings.mainBranch\n\t\t// This fixes the child loom bug where we'd find the 'main' worktree instead of the parent branch worktree\n\t\tlet mainWorktreePath: string\n\t\tif (options.repoRoot) {\n\t\t\tmainWorktreePath = options.repoRoot\n\t\t} else {\n\t\t\ttry {\n\t\t\t\t// First try to find worktree with the exact merge target branch checked out\n\t\t\t\tmainWorktreePath = await findWorktreeForBranch(mainBranch, worktreePath)\n\t\t\t} catch {\n\t\t\t\t// Fallback: if no worktree has the branch checked out, use settings-based lookup\n\t\t\t\t// This handles edge cases like bare repos or detached HEAD states\n\t\t\t\tgetLogger().debug(`No worktree found for branch '${mainBranch}', falling back to settings-based lookup`)\n\t\t\t\tmainWorktreePath = await findMainWorktreePathWithSettings(worktreePath, this.settingsManager)\n\t\t\t}\n\t\t}\n\n\t\t// Step 3: No need to checkout - the merge target branch is already checked out in mainWorktreePath\n\t\tgetLogger().debug(`Using ${mainBranch} branch location: ${mainWorktreePath}`)\n\n\t\t// Step 4: Verify we're on the correct branch\n\t\tconst currentBranch = await executeGitCommand(['branch', '--show-current'], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\tif (currentBranch.trim() !== mainBranch) {\n\t\t\tthrow new Error(\n\t\t\t\t`Expected ${mainBranch} branch but found: ${currentBranch.trim()}\\n` +\n\t\t\t\t\t`At location: ${mainWorktreePath}\\n` +\n\t\t\t\t\t'This indicates the main worktree detection failed.'\n\t\t\t)\n\t\t}\n\n\t\t// Step 5: Validate fast-forward is possible\n\t\tawait this.validateFastForwardPossible(mainBranch, branchName, mainWorktreePath)\n\n\t\t// Step 6: Show commits to be merged\n\t\tconst commitsOutput = await executeGitCommand(['log', '--oneline', `${mainBranch}..${branchName}`], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\tconst commits = commitsOutput.trim()\n\n\t\t// If no commits, branch has no changes ahead of main\n\t\tif (!commits) {\n\t\t\tgetLogger().success(`Branch has no commits ahead of ${mainBranch}. No merge needed.`)\n\t\t\treturn\n\t\t}\n\n\t\t// Show commits that will be merged\n\t\tconst commitLines = commits.split('\\n')\n\t\tgetLogger().info(`Found ${commitLines.length} commit(s) to merge:`)\n\t\tcommitLines.forEach((commit) => getLogger().info(` ${commit}`))\n\n\t\t// Step 7: User confirmation (unless force mode or dry-run)\n\t\tif (!force && !dryRun) {\n\t\t\t// TODO: Implement interactive prompt for confirmation\n\t\t\t// For now, proceeding automatically (use --force to skip this message)\n\t\t\tgetLogger().info('Proceeding with fast-forward merge... (use --force to skip confirmations)')\n\t\t}\n\n\t\t// Step 8: Execute merge (unless dry-run)\n\t\tif (dryRun) {\n\t\t\tgetLogger().info(`[DRY RUN] Would execute: git merge --ff-only ${branchName}`)\n\t\t\tgetLogger().info(`[DRY RUN] This would merge ${commitLines.length} commit(s)`)\n\t\t\treturn\n\t\t}\n\n\t\t// Execute fast-forward merge\n\t\ttry {\n\t\t\tgetLogger().debug(`Executing fast-forward merge of ${branchName} into ${mainBranch} using cwd: ${mainWorktreePath}...`)\n\t\t\tawait executeGitCommand(['merge', '--ff-only', branchName], { cwd: mainWorktreePath })\n\t\t\tgetLogger().success(`Fast-forward merge completed! Merged ${commitLines.length} commit(s).`)\n\t\t} catch (error) {\n\t\t\tthrow new Error(\n\t\t\t\t`Fast-forward merge failed: ${error instanceof Error ? error.message : String(error)}\\n\\n` +\n\t\t\t\t\t'To recover:\\n' +\n\t\t\t\t\t' 1. Check merge status: git status\\n' +\n\t\t\t\t\t' 2. Abort merge if needed: git merge --abort\\n' +\n\t\t\t\t\t' 3. Verify branch is rebased: git rebase main\\n' +\n\t\t\t\t\t' 4. Try merge again: il finish'\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Helper: Detect conflicted files after failed rebase\n\t * @private\n\t */\n\tprivate async detectConflictedFiles(worktreePath: string): Promise<string[]> {\n\t\ttry {\n\t\t\tconst output = await executeGitCommand(['diff', '--name-only', '--diff-filter=U'], {\n\t\t\t\tcwd: worktreePath,\n\t\t\t})\n\n\t\t\treturn output\n\t\t\t\t.trim()\n\t\t\t\t.split('\\n')\n\t\t\t\t.filter((file) => file.length > 0)\n\t\t} catch {\n\t\t\t// If command fails, return empty array (might not be a conflict)\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Create a temporary WIP commit to preserve uncommitted changes during rebase\n\t * Stages all changes (tracked, untracked) using git add -A\n\t * Uses --no-verify to skip pre-commit hooks since this is a temporary internal commit\n\t * @param worktreePath - Path to the worktree\n\t * @returns The commit hash of the WIP commit\n\t * @private\n\t */\n\tprivate async createWipCommit(worktreePath: string): Promise<string> {\n\t\t// Stage all changes including untracked files\n\t\tawait executeGitCommand(['add', '-A'], { cwd: worktreePath })\n\n\t\t// Create WIP commit with distinctive message\n\t\t// Use --no-verify to skip pre-commit hooks - this is a temporary internal commit\n\t\tawait executeGitCommand(['commit', '--no-verify', '-m', 'WIP: Auto-stash for rebase'], { cwd: worktreePath })\n\n\t\t// Get and return the commit hash\n\t\tconst hash = await executeGitCommand(['rev-parse', 'HEAD'], { cwd: worktreePath })\n\t\treturn hash.trim()\n\t}\n\n\t/**\n\t * Restore uncommitted changes from WIP commit via soft reset\n\t * Logs warning but does not fail if soft reset fails (changes are safe in commit history)\n\t * @param worktreePath - Path to the worktree\n\t * @param wipCommitHash - Original WIP commit hash for verification logging\n\t * @private\n\t */\n\tprivate async restoreWipCommit(worktreePath: string, wipCommitHash: string): Promise<void> {\n\t\tgetLogger().info('Restoring uncommitted changes from WIP commit...')\n\n\t\ttry {\n\t\t\t// Soft reset to parent - changes become staged\n\t\t\tawait executeGitCommand(['reset', '--soft', 'HEAD~1'], { cwd: worktreePath })\n\n\t\t\t// Unstage files to restore to original working directory state\n\t\t\tawait executeGitCommand(['reset', 'HEAD'], { cwd: worktreePath })\n\n\t\t\tgetLogger().success('Restored uncommitted changes from WIP commit')\n\t\t} catch (error) {\n\t\t\t// Log warning but consider rebase successful - work is not lost\n\t\t\tgetLogger().warn(\n\t\t\t\t`Failed to restore WIP commit (${wipCommitHash}). ` +\n\t\t\t\t\t`Your changes are safe in the commit history. ` +\n\t\t\t\t\t`Manual recovery: git reset --soft HEAD~1`,\n\t\t\t\t{ error: error instanceof Error ? error.message : String(error) }\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Helper: Format conflict error message with manual resolution steps\n\t * @private\n\t */\n\tprivate formatConflictError(conflictedFiles: string[]): string {\n\t\tconst fileList = conflictedFiles.map((file) => ` • ${file}`).join('\\n')\n\n\t\treturn (\n\t\t\t'Rebase failed - merge conflicts detected in:\\n' +\n\t\t\tfileList +\n\t\t\t'\\n\\n' +\n\t\t\t'To resolve manually:\\n' +\n\t\t\t' 1. Fix conflicts in the files above\\n' +\n\t\t\t' 2. Stage resolved files: git add <files>\\n' +\n\t\t\t' 3. Continue rebase: git rebase --continue\\n' +\n\t\t\t' 4. Or abort rebase: git rebase --abort\\n' +\n\t\t\t' 5. Then re-run: il finish <issue-number>'\n\t\t)\n\t}\n\n\t/**\n\t * Attempt to resolve conflicts using Claude\n\t * Ports bash/merge-and-clean.sh lines 839-894\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param conflictedFiles - List of files with conflicts\n\t * @returns true if conflicts resolved, false otherwise\n\t * @private\n\t */\n\tprivate async attemptClaudeConflictResolution(\n\t\tworktreePath: string,\n\t\tconflictedFiles: string[]\n\t): Promise<boolean> {\n\t\t// Check if Claude CLI is available\n\t\tconst isClaudeAvailable = await detectClaudeCli()\n\t\tif (!isClaudeAvailable) {\n\t\t\tgetLogger().debug('Claude CLI not available, skipping conflict resolution')\n\t\t\treturn false\n\t\t}\n\n\t\tgetLogger().info(`Launching Claude to resolve conflicts in ${conflictedFiles.length} file(s)...`)\n\n\t\t// Hard-coded prompt matching bash script line 844\n\t\t// No templates, no complexity - just the essential instruction\n\t\tconst systemPrompt =\n\t\t\t`Please help resolve the git rebase conflicts in this repository. ` +\n\t\t\t`Analyze the conflicted files, understand the changes from both branches, ` +\n\t\t\t`fix the conflicts, then run 'git add .' to stage the resolved files, ` +\n\t\t\t`and finally run 'git rebase --continue' to continue the rebase process. ` +\n\t\t\t`Once the issue is resolved, tell the user they can use /exit to continue with the process.`\n\n\t\tconst prompt =\n\t\t\t`Help me with this rebase please.`\n\n\t\t// Git commands to auto-approve during rebase conflict resolution\n\t\t// These are the essential commands Claude needs to analyze and resolve conflicts\n\t\t// Note: git reset and git checkout are intentionally excluded as they can be destructive\n\t\tconst rebaseAllowedTools = [\n\t\t\t'Bash(git status:*)',\n\t\t\t'Bash(git diff:*)',\n\t\t\t'Bash(git log:*)',\n\t\t\t'Bash(git add:*)',\n\t\t\t'Bash(git rebase:*)',\n\t\t]\n\n\t\ttry {\n\t\t\t// Launch Claude interactively in current terminal\n\t\t\t// User will interact directly with Claude to resolve conflicts\n\t\t\tawait launchClaude(prompt, {\n\t\t\t\tappendSystemPrompt: systemPrompt,\n\t\t\t\taddDir: worktreePath,\n\t\t\t\theadless: false, // Interactive - runs in current terminal with stdio: inherit\n\t\t\t\tallowedTools: rebaseAllowedTools,\n\t\t\t})\n\n\t\t\t// After Claude interaction completes, check if conflicts resolved\n\t\t\tconst remainingConflicts = await this.detectConflictedFiles(worktreePath)\n\n\t\t\tif (remainingConflicts.length > 0) {\n\t\t\t\tgetLogger().warn(\n\t\t\t\t\t`Conflicts still exist in ${remainingConflicts.length} file(s) after Claude assistance`\n\t\t\t\t)\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\t// Check if rebase completed or still in progress\n\t\t\tconst rebaseInProgress = await this.isRebaseInProgress(worktreePath)\n\n\t\t\tif (rebaseInProgress) {\n\t\t\t\tgetLogger().warn('Rebase still in progress after Claude assistance')\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\treturn true\n\t\t} catch (error) {\n\t\t\tgetLogger().warn('Claude conflict resolution failed', {\n\t\t\t\terror: error instanceof Error ? error.message : String(error),\n\t\t\t})\n\t\t\treturn false\n\t\t}\n\t}\n\n\t/**\n\t * Check if a git rebase is currently in progress\n\t * Checks for .git/rebase-merge or .git/rebase-apply directories\n\t * Ports bash script logic from lines 853-856\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @returns true if rebase in progress, false otherwise\n\t * @private\n\t */\n\tprivate async isRebaseInProgress(worktreePath: string): Promise<boolean> {\n\t\tconst fs = await import('node:fs/promises')\n\t\tconst path = await import('node:path')\n\n\t\tconst rebaseMergePath = path.join(worktreePath, '.git', 'rebase-merge')\n\t\tconst rebaseApplyPath = path.join(worktreePath, '.git', 'rebase-apply')\n\n\t\t// Check for rebase-merge directory\n\t\ttry {\n\t\t\tawait fs.access(rebaseMergePath)\n\t\t\treturn true\n\t\t} catch {\n\t\t\t// Directory doesn't exist, continue checking\n\t\t}\n\n\t\t// Check for rebase-apply directory\n\t\ttry {\n\t\t\tawait fs.access(rebaseApplyPath)\n\t\t\treturn true\n\t\t} catch {\n\t\t\t// Directory doesn't exist\n\t\t}\n\n\t\treturn false\n\t}\n}\n","import { getLogger } from '../utils/logger-context.js'\nimport { detectPackageManager, runScript } from '../utils/package-manager.js'\nimport { getPackageConfig, hasScript } from '../utils/package-json.js'\nimport { ProjectCapabilityDetector } from './ProjectCapabilityDetector.js'\n\nexport interface BuildOptions {\n\tdryRun?: boolean\n}\n\nexport interface BuildResult {\n\tsuccess: boolean\n\tskipped: boolean\n\treason?: string\n\tduration: number\n}\n\n/**\n * BuildRunner handles post-merge build verification for CLI projects\n * Only runs build when project has CLI capabilities (bin field in package.json)\n */\nexport class BuildRunner {\n\tprivate capabilityDetector: ProjectCapabilityDetector\n\n\tconstructor(capabilityDetector?: ProjectCapabilityDetector) {\n\t\tthis.capabilityDetector = capabilityDetector ?? new ProjectCapabilityDetector()\n\t}\n\n\t/**\n\t * Run build verification in the specified directory\n\t * @param buildPath - Path where build should run (typically main worktree path)\n\t * @param options - Build options\n\t */\n\tasync runBuild(buildPath: string, options: BuildOptions = {}): Promise<BuildResult> {\n\t\tconst startTime = Date.now()\n\n\t\ttry {\n\t\t\t// Step 1: Check if build script exists (checks .iloom/package.iloom.json first, then package.json)\n\t\t\tconst pkgJson = await getPackageConfig(buildPath)\n\t\t\tconst hasBuildScript = hasScript(pkgJson, 'build')\n\n\t\t\tif (!hasBuildScript) {\n\t\t\tgetLogger().debug('Skipping build - no build script found')\n\t\t\t\treturn {\n\t\t\t\t\tsuccess: true,\n\t\t\t\t\tskipped: true,\n\t\t\t\t\treason: 'No build script found in package configuration',\n\t\t\t\t\tduration: Date.now() - startTime,\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// Handle missing package.json - skip build for non-Node.js projects without package.iloom.json\n\t\t\tif (error instanceof Error && error.message.includes('package.json not found')) {\n\t\t\tgetLogger().debug('Skipping build - no package configuration found')\n\t\t\t\treturn {\n\t\t\t\t\tsuccess: true,\n\t\t\t\t\tskipped: true,\n\t\t\t\t\treason: 'No package configuration found in project',\n\t\t\t\t\tduration: Date.now() - startTime,\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Re-throw other errors\n\t\t\tthrow error\n\t\t}\n\n\t\t// Step 2: Check if project has CLI capability (bin field)\n\t\tconst capabilities = await this.capabilityDetector.detectCapabilities(buildPath)\n\t\tconst isCLIProject = capabilities.capabilities.includes('cli')\n\n\t\tif (!isCLIProject) {\n\t\tgetLogger().debug('Skipping build - not a CLI project (no bin field)')\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: true,\n\t\t\t\treason: 'Project is not a CLI project (no bin field in package.json)',\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t}\n\n\t\t// Step 3: Detect package manager\n\t\tconst packageManager = await detectPackageManager(buildPath)\n\n\t\t// Step 4: Handle dry-run mode\n\t\tif (options.dryRun) {\n\t\t\tconst command =\n\t\t\t\tpackageManager === 'npm' ? 'npm run build' : `${packageManager} build`\n\t\tgetLogger().info(`[DRY RUN] Would run: ${command}`)\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: false,\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t}\n\n\t\t// Step 5: Execute build\n\tgetLogger().info('Running build...')\n\n\t\ttry {\n\t\t\tawait runScript('build', buildPath, [], { quiet: true })\n\t\tgetLogger().success('Build completed successfully')\n\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: false,\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t} catch {\n\t\t\t// Step 6: Throw detailed error on failure\n\t\t\tconst runCommand =\n\t\t\t\tpackageManager === 'npm' ? 'npm run build' : `${packageManager} build`\n\n\t\t\tthrow new Error(\n\t\t\t\t`Error: Build failed.\\n` +\n\t\t\t\t\t`Fix build errors before proceeding.\\n\\n` +\n\t\t\t\t\t`Run '${runCommand}' to see detailed errors.`\n\t\t\t)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAaO,IAAM,eAAN,MAAmB;AAAA,EAIzB,YAAY,iBAAmC,iBAAmC;AACjF,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,cAAc,cAAwC;AAEnE,WAAO,qBAAqB,gBAAgB,QAAQ,IAAI,GAAG;AAAA,MAC1D,iBAAiB,KAAK;AAAA,MACtB,iBAAiB,KAAK;AAAA,IACvB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,aAAa,cAAsB,UAAwB,CAAC,GAAkB;AACnF,UAAM,EAAE,SAAS,OAAO,QAAQ,MAAM,IAAI;AAC1C,UAAM,aAAa,MAAM,KAAK,cAAc,YAAY;AAExD,cAAU,EAAE,KAAK,sBAAsB,UAAU,YAAY;AAG7D,QAAI;AACH,YAAM,kBAAkB,CAAC,YAAY,YAAY,WAAW,cAAc,UAAU,EAAE,GAAG;AAAA,QACxF,KAAK;AAAA,MACN,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI;AAAA,QACT,gBAAgB,UAAU;AAAA,+BACO,UAAU;AAAA,MAC5C;AAAA,IACD;AAGA,UAAM,eAAe,MAAM,kBAAkB,CAAC,UAAU,aAAa,GAAG;AAAA,MACvE,KAAK;AAAA,IACN,CAAC;AAED,QAAI,gBAA+B;AACnC,QAAI,aAAa,KAAK,GAAG;AACxB,gBAAU,EAAE,KAAK,gEAAgE;AACjF,sBAAgB,MAAM,KAAK,gBAAgB,YAAY;AACvD,gBAAU,EAAE,MAAM,uBAAuB,aAAa,EAAE;AAAA,IACzD;AAGA,UAAM,YAAY,MAAM,kBAAkB,CAAC,cAAc,YAAY,MAAM,GAAG;AAAA,MAC7E,KAAK;AAAA,IACN,CAAC;AAED,UAAM,WAAW,MAAM,kBAAkB,CAAC,aAAa,UAAU,GAAG;AAAA,MACnE,KAAK;AAAA,IACN,CAAC;AAED,UAAM,mBAAmB,UAAU,KAAK;AACxC,UAAM,kBAAkB,SAAS,KAAK;AAGtC,QAAI,qBAAqB,iBAAiB;AACzC,gBAAU,EAAE,QAAQ,qCAAqC,UAAU,qBAAqB;AAExF,UAAI,eAAe;AAClB,cAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,MACxD;AACA;AAAA,IACD;AAGA,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,OAAO,aAAa,GAAG,UAAU,QAAQ,GAAG;AAAA,MAC1F,KAAK;AAAA,IACN,CAAC;AAED,UAAM,UAAU,cAAc,KAAK;AACnC,UAAM,cAAc,UAAU,QAAQ,MAAM,IAAI,IAAI,CAAC;AAErD,QAAI,SAAS;AAEZ,gBAAU,EAAE,KAAK,SAAS,YAAY,MAAM,uBAAuB;AACnE,kBAAY,QAAQ,CAAC,WAAW,UAAU,EAAE,KAAK,KAAK,MAAM,EAAE,CAAC;AAAA,IAChE,OAAO;AAEN,gBAAU,EAAE,KAAK,GAAG,UAAU,yDAAyD;AAAA,IACxF;AAGA,QAAI,CAAC,SAAS,CAAC,QAAQ;AAGtB,gBAAU,EAAE,KAAK,+DAA+D;AAAA,IACjF;AAGA,QAAI,QAAQ;AACX,gBAAU,EAAE,KAAK,uCAAuC,UAAU,EAAE;AACpE,UAAI,YAAY,SAAS,GAAG;AAC3B,kBAAU,EAAE,KAAK,+BAA+B,YAAY,MAAM,YAAY;AAAA,MAC/E;AACA;AAAA,IACD;AAKA,QAAI;AACH,YAAM,kBAAkB,CAAC,MAAM,4BAA4B,UAAU,UAAU,GAAG,EAAE,KAAK,aAAa,CAAC;AACvG,gBAAU,EAAE,QAAQ,gCAAgC;AAGpD,UAAI,eAAe;AAClB,cAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,MACxD;AAAA,IACD,SAAS,OAAO;AAEf,YAAM,kBAAkB,MAAM,KAAK,sBAAsB,YAAY;AAErE,UAAI,gBAAgB,SAAS,GAAG;AAE/B,kBAAU,EAAE,KAAK,oEAAoE;AAErF,cAAM,WAAW,MAAM,KAAK;AAAA,UAC3B;AAAA,UACA;AAAA,QACD;AAEA,YAAI,UAAU;AACb,oBAAU,EAAE,QAAQ,6DAA6D;AAGjF,cAAI,eAAe;AAClB,kBAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,UACxD;AACA;AAAA,QACD;AAGA,cAAM,gBAAgB,KAAK,oBAAoB,eAAe;AAC9D,cAAM,IAAI,MAAM,aAAa;AAAA,MAC9B;AAGA,YAAM,IAAI;AAAA,QACT,kBAAkB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA;AAAA;AAAA,MAGzE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,4BAA4B,YAAoB,YAAoB,kBAAyC;AAGlH,UAAM,YAAY,MAAM,kBAAkB,CAAC,cAAc,YAAY,UAAU,GAAG;AAAA,MACjF,KAAK;AAAA,IACN,CAAC;AAGD,UAAM,WAAW,MAAM,kBAAkB,CAAC,aAAa,UAAU,GAAG;AAAA,MACnE,KAAK;AAAA,IACN,CAAC;AAGD,UAAM,mBAAmB,UAAU,KAAK;AACxC,UAAM,kBAAkB,SAAS,KAAK;AAEtC,QAAI,qBAAqB,iBAAiB;AACzC,YAAM,IAAI;AAAA,QACT;AAAA,MACQ,UAAU;AAAA,cACF,gBAAgB;AAAA,cAChB,eAAe;AAAA;AAAA;AAAA,4BAED,UAAU,gBAAgB,UAAU;AAAA;AAAA;AAAA,MAEnE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,wBACL,YACA,cACA,UAAwB,CAAC,GACT;AAChB,UAAM,EAAE,SAAS,OAAO,QAAQ,MAAM,IAAI;AAE1C,cAAU,EAAE,KAAK,gCAAgC;AAKjD,UAAM,aAAa,MAAM,KAAK,cAAc,YAAY;AAKxD,QAAI;AACJ,QAAI,QAAQ,UAAU;AACrB,yBAAmB,QAAQ;AAAA,IAC5B,OAAO;AACN,UAAI;AAEH,2BAAmB,MAAM,sBAAsB,YAAY,YAAY;AAAA,MACxE,QAAQ;AAGP,kBAAU,EAAE,MAAM,iCAAiC,UAAU,0CAA0C;AACvG,2BAAmB,MAAM,iCAAiC,cAAc,KAAK,eAAe;AAAA,MAC7F;AAAA,IACD;AAGA,cAAU,EAAE,MAAM,SAAS,UAAU,qBAAqB,gBAAgB,EAAE;AAG5E,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,UAAU,gBAAgB,GAAG;AAAA,MAC3E,KAAK;AAAA,IACN,CAAC;AAED,QAAI,cAAc,KAAK,MAAM,YAAY;AACxC,YAAM,IAAI;AAAA,QACT,YAAY,UAAU,sBAAsB,cAAc,KAAK,CAAC;AAAA,eAC/C,gBAAgB;AAAA;AAAA,MAElC;AAAA,IACD;AAGA,UAAM,KAAK,4BAA4B,YAAY,YAAY,gBAAgB;AAG/E,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,OAAO,aAAa,GAAG,UAAU,KAAK,UAAU,EAAE,GAAG;AAAA,MACnG,KAAK;AAAA,IACN,CAAC;AAED,UAAM,UAAU,cAAc,KAAK;AAGnC,QAAI,CAAC,SAAS;AACb,gBAAU,EAAE,QAAQ,kCAAkC,UAAU,oBAAoB;AACpF;AAAA,IACD;AAGA,UAAM,cAAc,QAAQ,MAAM,IAAI;AACtC,cAAU,EAAE,KAAK,SAAS,YAAY,MAAM,sBAAsB;AAClE,gBAAY,QAAQ,CAAC,WAAW,UAAU,EAAE,KAAK,KAAK,MAAM,EAAE,CAAC;AAG/D,QAAI,CAAC,SAAS,CAAC,QAAQ;AAGtB,gBAAU,EAAE,KAAK,2EAA2E;AAAA,IAC7F;AAGA,QAAI,QAAQ;AACX,gBAAU,EAAE,KAAK,gDAAgD,UAAU,EAAE;AAC7E,gBAAU,EAAE,KAAK,8BAA8B,YAAY,MAAM,YAAY;AAC7E;AAAA,IACD;AAGA,QAAI;AACH,gBAAU,EAAE,MAAM,mCAAmC,UAAU,SAAS,UAAU,eAAe,gBAAgB,KAAK;AACtH,YAAM,kBAAkB,CAAC,SAAS,aAAa,UAAU,GAAG,EAAE,KAAK,iBAAiB,CAAC;AACrF,gBAAU,EAAE,QAAQ,wCAAwC,YAAY,MAAM,aAAa;AAAA,IAC5F,SAAS,OAAO;AACf,YAAM,IAAI;AAAA,QACT,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMrF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBAAsB,cAAyC;AAC5E,QAAI;AACH,YAAM,SAAS,MAAM,kBAAkB,CAAC,QAAQ,eAAe,iBAAiB,GAAG;AAAA,QAClF,KAAK;AAAA,MACN,CAAC;AAED,aAAO,OACL,KAAK,EACL,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC;AAAA,IACnC,QAAQ;AAEP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,gBAAgB,cAAuC;AAEpE,UAAM,kBAAkB,CAAC,OAAO,IAAI,GAAG,EAAE,KAAK,aAAa,CAAC;AAI5D,UAAM,kBAAkB,CAAC,UAAU,eAAe,MAAM,4BAA4B,GAAG,EAAE,KAAK,aAAa,CAAC;AAG5G,UAAM,OAAO,MAAM,kBAAkB,CAAC,aAAa,MAAM,GAAG,EAAE,KAAK,aAAa,CAAC;AACjF,WAAO,KAAK,KAAK;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,iBAAiB,cAAsB,eAAsC;AAC1F,cAAU,EAAE,KAAK,kDAAkD;AAEnE,QAAI;AAEH,YAAM,kBAAkB,CAAC,SAAS,UAAU,QAAQ,GAAG,EAAE,KAAK,aAAa,CAAC;AAG5E,YAAM,kBAAkB,CAAC,SAAS,MAAM,GAAG,EAAE,KAAK,aAAa,CAAC;AAEhE,gBAAU,EAAE,QAAQ,8CAA8C;AAAA,IACnE,SAAS,OAAO;AAEf,gBAAU,EAAE;AAAA,QACX,iCAAiC,aAAa;AAAA,QAG9C,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACjE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,iBAAmC;AAC9D,UAAM,WAAW,gBAAgB,IAAI,CAAC,SAAS,YAAO,IAAI,EAAE,EAAE,KAAK,IAAI;AAEvE,WACC,mDACA,WACA;AAAA,EAQF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,gCACb,cACA,iBACmB;AAEnB,UAAM,oBAAoB,MAAM,gBAAgB;AAChD,QAAI,CAAC,mBAAmB;AACvB,gBAAU,EAAE,MAAM,wDAAwD;AAC1E,aAAO;AAAA,IACR;AAEA,cAAU,EAAE,KAAK,4CAA4C,gBAAgB,MAAM,aAAa;AAIhG,UAAM,eACL;AAMD,UAAM,SACL;AAKD,UAAM,qBAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,QAAI;AAGH,YAAM,aAAa,QAAQ;AAAA,QAC1B,oBAAoB;AAAA,QACpB,QAAQ;AAAA,QACR,UAAU;AAAA;AAAA,QACV,cAAc;AAAA,MACf,CAAC;AAGD,YAAM,qBAAqB,MAAM,KAAK,sBAAsB,YAAY;AAExE,UAAI,mBAAmB,SAAS,GAAG;AAClC,kBAAU,EAAE;AAAA,UACX,4BAA4B,mBAAmB,MAAM;AAAA,QACtD;AACA,eAAO;AAAA,MACR;AAGA,YAAM,mBAAmB,MAAM,KAAK,mBAAmB,YAAY;AAEnE,UAAI,kBAAkB;AACrB,kBAAU,EAAE,KAAK,kDAAkD;AACnE,eAAO;AAAA,MACR;AAEA,aAAO;AAAA,IACR,SAAS,OAAO;AACf,gBAAU,EAAE,KAAK,qCAAqC;AAAA,QACrD,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC7D,CAAC;AACD,aAAO;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,mBAAmB,cAAwC;AACxE,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,OAAO,MAAM,OAAO,MAAW;AAErC,UAAM,kBAAkB,KAAK,KAAK,cAAc,QAAQ,cAAc;AACtE,UAAM,kBAAkB,KAAK,KAAK,cAAc,QAAQ,cAAc;AAGtE,QAAI;AACH,YAAM,GAAG,OAAO,eAAe;AAC/B,aAAO;AAAA,IACR,QAAQ;AAAA,IAER;AAGA,QAAI;AACH,YAAM,GAAG,OAAO,eAAe;AAC/B,aAAO;AAAA,IACR,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,EACR;AACD;;;AC7fO,IAAM,cAAN,MAAkB;AAAA,EAGxB,YAAY,oBAAgD;AAC3D,SAAK,qBAAqB,sBAAsB,IAAI,0BAA0B;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,SAAS,WAAmB,UAAwB,CAAC,GAAyB;AACnF,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEH,YAAM,UAAU,MAAM,iBAAiB,SAAS;AAChD,YAAM,iBAAiB,UAAU,SAAS,OAAO;AAEjD,UAAI,CAAC,gBAAgB;AACrB,kBAAU,EAAE,MAAM,wCAAwC;AACzD,eAAO;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,MACD;AAAA,IACD,SAAS,OAAO;AAEf,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,wBAAwB,GAAG;AAChF,kBAAU,EAAE,MAAM,iDAAiD;AAClE,eAAO;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,MACD;AAEA,YAAM;AAAA,IACP;AAGA,UAAM,eAAe,MAAM,KAAK,mBAAmB,mBAAmB,SAAS;AAC/E,UAAM,eAAe,aAAa,aAAa,SAAS,KAAK;AAE7D,QAAI,CAAC,cAAc;AACnB,gBAAU,EAAE,MAAM,mDAAmD;AACpE,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,QAAQ;AAAA,QACR,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD;AAGA,UAAM,iBAAiB,MAAM,qBAAqB,SAAS;AAG3D,QAAI,QAAQ,QAAQ;AACnB,YAAM,UACL,mBAAmB,QAAQ,kBAAkB,GAAG,cAAc;AAChE,gBAAU,EAAE,KAAK,wBAAwB,OAAO,EAAE;AACjD,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD;AAGD,cAAU,EAAE,KAAK,kBAAkB;AAElC,QAAI;AACH,YAAM,UAAU,SAAS,WAAW,CAAC,GAAG,EAAE,OAAO,KAAK,CAAC;AACxD,gBAAU,EAAE,QAAQ,8BAA8B;AAEjD,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD,QAAQ;AAEP,YAAM,aACL,mBAAmB,QAAQ,kBAAkB,GAAG,cAAc;AAE/D,YAAM,IAAI;AAAA,QACT;AAAA;AAAA;AAAA,OAES,UAAU;AAAA,MACpB;AAAA,IACD;AAAA,EACD;AACD;","names":[]}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
PromptTemplateManager
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-TIYJEEVO.js";
|
|
5
5
|
import {
|
|
6
6
|
logger
|
|
7
7
|
} from "./chunk-VT4PDUYT.js";
|
|
@@ -263,4 +263,4 @@ var AgentManager = class {
|
|
|
263
263
|
export {
|
|
264
264
|
AgentManager
|
|
265
265
|
};
|
|
266
|
-
//# sourceMappingURL=chunk-
|
|
266
|
+
//# sourceMappingURL=chunk-N7FVXZNI.js.map
|