@iloom/cli 0.6.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (179) hide show
  1. package/LICENSE +1 -1
  2. package/README.md +58 -16
  3. package/dist/{BranchNamingService-B5PVRR7F.js → BranchNamingService-FLPUUFOB.js} +2 -2
  4. package/dist/ClaudeContextManager-KE5TBZVZ.js +14 -0
  5. package/dist/ClaudeService-CRSETT3A.js +13 -0
  6. package/dist/{GitHubService-S2OGUTDR.js → GitHubService-O7U4UQ7N.js} +3 -3
  7. package/dist/{LoomLauncher-5LFM4LXB.js → LoomLauncher-NL65LSKP.js} +6 -6
  8. package/dist/{MetadataManager-DFI73J3G.js → MetadataManager-XJ2YB762.js} +2 -2
  9. package/dist/PRManager-2ABCWXHW.js +16 -0
  10. package/dist/{ProjectCapabilityDetector-S5FLNCFI.js → ProjectCapabilityDetector-UZYW32SY.js} +3 -3
  11. package/dist/{PromptTemplateManager-C3DK6XZL.js → PromptTemplateManager-7L3HJQQU.js} +2 -2
  12. package/dist/README.md +58 -16
  13. package/dist/{SettingsManager-35F5RUJH.js → SettingsManager-YU4VYPTW.js} +2 -2
  14. package/dist/agents/iloom-issue-analyze-and-plan.md +42 -17
  15. package/dist/agents/iloom-issue-analyzer.md +14 -14
  16. package/dist/agents/iloom-issue-complexity-evaluator.md +38 -15
  17. package/dist/agents/iloom-issue-enhancer.md +15 -15
  18. package/dist/agents/iloom-issue-implementer.md +44 -15
  19. package/dist/agents/iloom-issue-planner.md +121 -17
  20. package/dist/agents/iloom-issue-reviewer.md +15 -15
  21. package/dist/{build-FJVYP7EV.js → build-O2EJHDEW.js} +9 -9
  22. package/dist/{chunk-ZPSTA5PR.js → chunk-3CDWFEGL.js} +2 -2
  23. package/dist/{chunk-VU3QMIP2.js → chunk-453NC377.js} +91 -15
  24. package/dist/chunk-453NC377.js.map +1 -0
  25. package/dist/{chunk-UQIXZ3BA.js → chunk-5V74K5ZA.js} +2 -2
  26. package/dist/{chunk-7WANFUIK.js → chunk-6TL3BYH6.js} +2 -2
  27. package/dist/{chunk-5TXLVEXT.js → chunk-C3AKFAIR.js} +2 -2
  28. package/dist/{chunk-K7SEEHKO.js → chunk-CNSTXBJ3.js} +7 -419
  29. package/dist/chunk-CNSTXBJ3.js.map +1 -0
  30. package/dist/{chunk-5IWU3HXE.js → chunk-EPPPDVHD.js} +23 -11
  31. package/dist/chunk-EPPPDVHD.js.map +1 -0
  32. package/dist/{chunk-UB4TFAXJ.js → chunk-FEAJR6PN.js} +9 -55
  33. package/dist/chunk-FEAJR6PN.js.map +1 -0
  34. package/dist/{chunk-6YSFTPKW.js → chunk-FM4KBPVA.js} +18 -13
  35. package/dist/chunk-FM4KBPVA.js.map +1 -0
  36. package/dist/{chunk-AEIMYF4P.js → chunk-FP7G7DG3.js} +6 -2
  37. package/dist/chunk-FP7G7DG3.js.map +1 -0
  38. package/dist/{chunk-LT3SGBR7.js → chunk-GCPAZSGV.js} +36 -2
  39. package/dist/{chunk-LT3SGBR7.js.map → chunk-GCPAZSGV.js.map} +1 -1
  40. package/dist/chunk-GJMEKEI5.js +517 -0
  41. package/dist/chunk-GJMEKEI5.js.map +1 -0
  42. package/dist/{chunk-64O2UIWO.js → chunk-GV5X6XUE.js} +4 -4
  43. package/dist/{chunk-7Q66W4OH.js → chunk-HBJITKSZ.js} +37 -1
  44. package/dist/chunk-HBJITKSZ.js.map +1 -0
  45. package/dist/{chunk-7HIRPCKU.js → chunk-HVQNVRAF.js} +2 -2
  46. package/dist/{chunk-BXCPJJYM.js → chunk-ITN64ENQ.js} +1 -1
  47. package/dist/chunk-ITN64ENQ.js.map +1 -0
  48. package/dist/{chunk-6U6VI4SZ.js → chunk-KVS4XGBQ.js} +4 -4
  49. package/dist/{chunk-AXX3QIKK.js → chunk-LLWX3PCW.js} +2 -2
  50. package/dist/{chunk-WIJWIKAN.js → chunk-LQBLDI47.js} +105 -7
  51. package/dist/chunk-LQBLDI47.js.map +1 -0
  52. package/dist/{chunk-SN3Z6EZO.js → chunk-N7FVXZNI.js} +2 -2
  53. package/dist/chunk-NTIZLX42.js +822 -0
  54. package/dist/chunk-NTIZLX42.js.map +1 -0
  55. package/dist/{chunk-PMVWQBWS.js → chunk-S7YMZQUD.js} +31 -45
  56. package/dist/chunk-S7YMZQUD.js.map +1 -0
  57. package/dist/chunk-TIYJEEVO.js +79 -0
  58. package/dist/chunk-TIYJEEVO.js.map +1 -0
  59. package/dist/{chunk-EK3XCAAS.js → chunk-UDRZY65Y.js} +2 -2
  60. package/dist/{chunk-3PT7RKL5.js → chunk-USJSNHGG.js} +2 -2
  61. package/dist/{chunk-CFUWQHCJ.js → chunk-VWGKGNJP.js} +114 -35
  62. package/dist/chunk-VWGKGNJP.js.map +1 -0
  63. package/dist/{chunk-F6WVM437.js → chunk-WFQ5CLTR.js} +6 -3
  64. package/dist/chunk-WFQ5CLTR.js.map +1 -0
  65. package/dist/{chunk-TRQ76ISK.js → chunk-Z6BO53V7.js} +9 -9
  66. package/dist/{chunk-GEXP5IOF.js → chunk-ZA575VLF.js} +21 -8
  67. package/dist/chunk-ZA575VLF.js.map +1 -0
  68. package/dist/{claude-H33OQMXO.js → claude-6H36IBHO.js} +4 -2
  69. package/dist/{cleanup-OU2HFOOG.js → cleanup-ZPOMRSNN.js} +20 -16
  70. package/dist/cleanup-ZPOMRSNN.js.map +1 -0
  71. package/dist/cli.js +511 -959
  72. package/dist/cli.js.map +1 -1
  73. package/dist/commit-6S2RIA2K.js +237 -0
  74. package/dist/commit-6S2RIA2K.js.map +1 -0
  75. package/dist/{compile-ULNO5F7Q.js → compile-LRMAADUT.js} +9 -9
  76. package/dist/{contribute-T7ENST5N.js → contribute-GXKOIA42.js} +99 -31
  77. package/dist/contribute-GXKOIA42.js.map +1 -0
  78. package/dist/{dev-server-4RCDJ5MU.js → dev-server-GREJUEKW.js} +22 -74
  79. package/dist/dev-server-GREJUEKW.js.map +1 -0
  80. package/dist/{feedback-O4Q55SVS.js → feedback-G7G5QCY4.js} +10 -10
  81. package/dist/{git-FVMGBHC2.js → git-ENLT2VNI.js} +6 -4
  82. package/dist/hooks/iloom-hook.js +30 -2
  83. package/dist/{ignite-VHV65WEZ.js → ignite-YUAOJ5PP.js} +20 -20
  84. package/dist/ignite-YUAOJ5PP.js.map +1 -0
  85. package/dist/index.d.ts +71 -27
  86. package/dist/index.js +196 -266
  87. package/dist/index.js.map +1 -1
  88. package/dist/init-XQQMFDM6.js +21 -0
  89. package/dist/{lint-5JMCWE4Y.js → lint-OFVN7FT6.js} +9 -9
  90. package/dist/mcp/issue-management-server.js +359 -13
  91. package/dist/mcp/issue-management-server.js.map +1 -1
  92. package/dist/mcp/recap-server.js +13 -4
  93. package/dist/mcp/recap-server.js.map +1 -1
  94. package/dist/{open-WHVUYGPY.js → open-MCWQAPSZ.js} +25 -76
  95. package/dist/open-MCWQAPSZ.js.map +1 -0
  96. package/dist/{projects-SA76I4TZ.js → projects-PQOTWUII.js} +11 -4
  97. package/dist/projects-PQOTWUII.js.map +1 -0
  98. package/dist/prompts/init-prompt.txt +63 -59
  99. package/dist/prompts/issue-prompt.txt +132 -63
  100. package/dist/prompts/pr-prompt.txt +3 -3
  101. package/dist/prompts/regular-prompt.txt +16 -18
  102. package/dist/prompts/session-summary-prompt.txt +13 -13
  103. package/dist/{rebase-5EY3Q6XP.js → rebase-RKQED567.js} +53 -8
  104. package/dist/rebase-RKQED567.js.map +1 -0
  105. package/dist/{recap-VOOUXOGP.js → recap-ZKGHZCX6.js} +6 -6
  106. package/dist/{run-NCRK5NPR.js → run-CCG24PBC.js} +25 -76
  107. package/dist/run-CCG24PBC.js.map +1 -0
  108. package/dist/schema/settings.schema.json +14 -3
  109. package/dist/{shell-SBLXVOVJ.js → shell-2NNSIU34.js} +6 -6
  110. package/dist/{summary-CVFAMDOJ.js → summary-G6L3VAKK.js} +11 -10
  111. package/dist/{summary-CVFAMDOJ.js.map → summary-G6L3VAKK.js.map} +1 -1
  112. package/dist/{test-3KIVXI6J.js → test-QZDOEUIO.js} +9 -9
  113. package/dist/{test-git-ZB6AGGRW.js → test-git-E2BLXR6M.js} +4 -4
  114. package/dist/{test-prefix-FBGXKMPA.js → test-prefix-A7JGGYAA.js} +4 -4
  115. package/dist/{test-webserver-YVQD42W6.js → test-webserver-NRMGT2HB.js} +29 -8
  116. package/dist/test-webserver-NRMGT2HB.js.map +1 -0
  117. package/package.json +3 -1
  118. package/dist/ClaudeContextManager-6J2EB4QU.js +0 -14
  119. package/dist/ClaudeService-O2PB22GX.js +0 -13
  120. package/dist/PRManager-OCSB2HPT.js +0 -14
  121. package/dist/chunk-5IWU3HXE.js.map +0 -1
  122. package/dist/chunk-6YSFTPKW.js.map +0 -1
  123. package/dist/chunk-7Q66W4OH.js.map +0 -1
  124. package/dist/chunk-AEIMYF4P.js.map +0 -1
  125. package/dist/chunk-BXCPJJYM.js.map +0 -1
  126. package/dist/chunk-CFUWQHCJ.js.map +0 -1
  127. package/dist/chunk-F6WVM437.js.map +0 -1
  128. package/dist/chunk-GEXP5IOF.js.map +0 -1
  129. package/dist/chunk-K7SEEHKO.js.map +0 -1
  130. package/dist/chunk-PMVWQBWS.js.map +0 -1
  131. package/dist/chunk-UB4TFAXJ.js.map +0 -1
  132. package/dist/chunk-VU3QMIP2.js.map +0 -1
  133. package/dist/chunk-W6WVRHJ6.js +0 -251
  134. package/dist/chunk-W6WVRHJ6.js.map +0 -1
  135. package/dist/chunk-WIJWIKAN.js.map +0 -1
  136. package/dist/cleanup-OU2HFOOG.js.map +0 -1
  137. package/dist/contribute-T7ENST5N.js.map +0 -1
  138. package/dist/dev-server-4RCDJ5MU.js.map +0 -1
  139. package/dist/ignite-VHV65WEZ.js.map +0 -1
  140. package/dist/init-HB34Q5FH.js +0 -21
  141. package/dist/open-WHVUYGPY.js.map +0 -1
  142. package/dist/projects-SA76I4TZ.js.map +0 -1
  143. package/dist/rebase-5EY3Q6XP.js.map +0 -1
  144. package/dist/run-NCRK5NPR.js.map +0 -1
  145. package/dist/test-webserver-YVQD42W6.js.map +0 -1
  146. /package/dist/{BranchNamingService-B5PVRR7F.js.map → BranchNamingService-FLPUUFOB.js.map} +0 -0
  147. /package/dist/{ClaudeContextManager-6J2EB4QU.js.map → ClaudeContextManager-KE5TBZVZ.js.map} +0 -0
  148. /package/dist/{ClaudeService-O2PB22GX.js.map → ClaudeService-CRSETT3A.js.map} +0 -0
  149. /package/dist/{GitHubService-S2OGUTDR.js.map → GitHubService-O7U4UQ7N.js.map} +0 -0
  150. /package/dist/{LoomLauncher-5LFM4LXB.js.map → LoomLauncher-NL65LSKP.js.map} +0 -0
  151. /package/dist/{MetadataManager-DFI73J3G.js.map → MetadataManager-XJ2YB762.js.map} +0 -0
  152. /package/dist/{PRManager-OCSB2HPT.js.map → PRManager-2ABCWXHW.js.map} +0 -0
  153. /package/dist/{ProjectCapabilityDetector-S5FLNCFI.js.map → ProjectCapabilityDetector-UZYW32SY.js.map} +0 -0
  154. /package/dist/{PromptTemplateManager-C3DK6XZL.js.map → PromptTemplateManager-7L3HJQQU.js.map} +0 -0
  155. /package/dist/{SettingsManager-35F5RUJH.js.map → SettingsManager-YU4VYPTW.js.map} +0 -0
  156. /package/dist/{build-FJVYP7EV.js.map → build-O2EJHDEW.js.map} +0 -0
  157. /package/dist/{chunk-ZPSTA5PR.js.map → chunk-3CDWFEGL.js.map} +0 -0
  158. /package/dist/{chunk-UQIXZ3BA.js.map → chunk-5V74K5ZA.js.map} +0 -0
  159. /package/dist/{chunk-7WANFUIK.js.map → chunk-6TL3BYH6.js.map} +0 -0
  160. /package/dist/{chunk-5TXLVEXT.js.map → chunk-C3AKFAIR.js.map} +0 -0
  161. /package/dist/{chunk-64O2UIWO.js.map → chunk-GV5X6XUE.js.map} +0 -0
  162. /package/dist/{chunk-7HIRPCKU.js.map → chunk-HVQNVRAF.js.map} +0 -0
  163. /package/dist/{chunk-6U6VI4SZ.js.map → chunk-KVS4XGBQ.js.map} +0 -0
  164. /package/dist/{chunk-AXX3QIKK.js.map → chunk-LLWX3PCW.js.map} +0 -0
  165. /package/dist/{chunk-SN3Z6EZO.js.map → chunk-N7FVXZNI.js.map} +0 -0
  166. /package/dist/{chunk-EK3XCAAS.js.map → chunk-UDRZY65Y.js.map} +0 -0
  167. /package/dist/{chunk-3PT7RKL5.js.map → chunk-USJSNHGG.js.map} +0 -0
  168. /package/dist/{chunk-TRQ76ISK.js.map → chunk-Z6BO53V7.js.map} +0 -0
  169. /package/dist/{claude-H33OQMXO.js.map → claude-6H36IBHO.js.map} +0 -0
  170. /package/dist/{compile-ULNO5F7Q.js.map → compile-LRMAADUT.js.map} +0 -0
  171. /package/dist/{feedback-O4Q55SVS.js.map → feedback-G7G5QCY4.js.map} +0 -0
  172. /package/dist/{git-FVMGBHC2.js.map → git-ENLT2VNI.js.map} +0 -0
  173. /package/dist/{init-HB34Q5FH.js.map → init-XQQMFDM6.js.map} +0 -0
  174. /package/dist/{lint-5JMCWE4Y.js.map → lint-OFVN7FT6.js.map} +0 -0
  175. /package/dist/{recap-VOOUXOGP.js.map → recap-ZKGHZCX6.js.map} +0 -0
  176. /package/dist/{shell-SBLXVOVJ.js.map → shell-2NNSIU34.js.map} +0 -0
  177. /package/dist/{test-3KIVXI6J.js.map → test-QZDOEUIO.js.map} +0 -0
  178. /package/dist/{test-git-ZB6AGGRW.js.map → test-git-E2BLXR6M.js.map} +0 -0
  179. /package/dist/{test-prefix-FBGXKMPA.js.map → test-prefix-A7JGGYAA.js.map} +0 -0
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/types/linear.ts","../src/utils/linear.ts"],"sourcesContent":["/**\n * Linear API response types (from @linear/sdk)\n */\n\n/**\n * Linear issue response from SDK\n */\nexport interface LinearIssue {\n /** Linear internal UUID */\n id: string\n /** Issue identifier in TEAM-NUMBER format (e.g., ENG-123) */\n identifier: string\n /** Issue title */\n title: string\n /** Issue description (markdown) */\n description?: string\n /** Current workflow state name */\n state?: string\n /** Linear web URL */\n url: string\n /** Creation timestamp (ISO string) */\n createdAt: string\n /** Last update timestamp (ISO string) */\n updatedAt: string\n}\n\n/**\n * Linear comment response from SDK\n */\nexport interface LinearComment {\n /** Comment UUID */\n id: string\n /** Comment body (markdown) */\n body: string\n /** Creation timestamp (ISO string) */\n createdAt: string\n /** Last update timestamp (ISO string) */\n updatedAt: string\n /** Comment URL */\n url: string\n}\n\n/**\n * Linear error codes\n */\nexport type LinearErrorCode =\n | 'NOT_FOUND'\n | 'UNAUTHORIZED'\n | 'INVALID_STATE'\n | 'RATE_LIMITED'\n | 'CLI_NOT_FOUND'\n | 'CLI_ERROR'\n\n/**\n * Linear error details\n */\nexport interface LinearError {\n code: LinearErrorCode\n message: string\n details?: unknown\n}\n\n/**\n * Custom error class for Linear operations\n */\nexport class LinearServiceError extends Error {\n constructor(\n public code: LinearErrorCode,\n message: string,\n public details?: unknown,\n ) {\n super(message)\n this.name = 'LinearServiceError'\n // Maintain proper stack trace for where error was thrown (V8 only)\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, LinearServiceError)\n }\n }\n}\n","/**\n * Linear SDK utilities\n * Wrapper functions for the @linear/sdk\n */\n\nimport { LinearClient } from '@linear/sdk'\nimport type { LinearIssue, LinearComment } from '../types/linear.js'\nimport { LinearServiceError } from '../types/linear.js'\nimport { logger } from './logger.js'\n\n/**\n * Slugify a title for use in Linear URLs\n * Converts to lowercase, replaces non-alphanumeric with hyphens, truncates to reasonable length\n * @param title - Issue title\n * @param maxLength - Maximum slug length (default: 50)\n * @returns Slugified title\n */\nexport function slugifyTitle(title: string, maxLength: number = 50): string {\n // Convert to lowercase, replace non-alphanumeric chars with hyphens\n const slug = title\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-')\n .replace(/^-+|-+$/g, '') // trim leading/trailing hyphens\n\n // If already short enough, return as-is\n if (slug.length <= maxLength) {\n return slug\n }\n\n // Split by hyphens and rebuild until we hit the limit\n const parts = slug.split('-')\n let result = ''\n for (const part of parts) {\n const candidate = result ? `${result}-${part}` : part\n if (candidate.length > maxLength) {\n break\n }\n result = candidate\n }\n\n return result || slug.slice(0, maxLength) // fallback if first part is too long\n}\n\n/**\n * Build a Linear issue URL with optional title slug\n * @param identifier - Issue identifier (e.g., \"ENG-123\")\n * @param title - Optional issue title for slug\n * @returns Linear URL\n */\nexport function buildLinearIssueUrl(identifier: string, title?: string): string {\n const base = `https://linear.app/issue/${identifier}`\n if (title) {\n const slug = slugifyTitle(title)\n return slug ? `${base}/${slug}` : base\n }\n return base\n}\n\n/**\n * Get Linear API token from environment\n * @returns API token\n * @throws LinearServiceError if token not found\n */\nfunction getLinearApiToken(): string {\n const token = process.env.LINEAR_API_TOKEN\n if (!token) {\n throw new LinearServiceError(\n 'UNAUTHORIZED',\n 'LINEAR_API_TOKEN not set. Configure in settings.local.json or set environment variable.',\n )\n }\n return token\n}\n\n/**\n * Create a Linear SDK client instance\n * @returns Configured LinearClient\n */\nfunction createLinearClient(): LinearClient {\n return new LinearClient({ apiKey: getLinearApiToken() })\n}\n\n/**\n * Handle SDK errors and convert to LinearServiceError\n * @param error - Error from SDK\n * @param context - Context string for debugging\n * @throws LinearServiceError\n */\nfunction handleLinearError(error: unknown, context: string): never {\n logger.debug(`${context}: Handling error`, { error })\n\n // SDK errors typically have a message property\n const errorMessage = error instanceof Error ? error.message : String(error)\n\n // Map common error patterns\n if (errorMessage.includes('not found') || errorMessage.includes('Not found')) {\n throw new LinearServiceError('NOT_FOUND', 'Linear issue or resource not found', { error })\n }\n\n if (\n errorMessage.includes('unauthorized') ||\n errorMessage.includes('Unauthorized') ||\n errorMessage.includes('Invalid API key')\n ) {\n throw new LinearServiceError(\n 'UNAUTHORIZED',\n 'Linear authentication failed. Check LINEAR_API_TOKEN.',\n { error },\n )\n }\n\n if (errorMessage.includes('rate limit')) {\n throw new LinearServiceError('RATE_LIMITED', 'Linear API rate limit exceeded', { error })\n }\n\n // Generic SDK error\n throw new LinearServiceError('CLI_ERROR', `Linear SDK error: ${errorMessage}`, { error })\n}\n\n/**\n * Fetch a Linear issue by identifier\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @returns Linear issue details\n * @throws LinearServiceError if issue not found or SDK error\n */\nexport async function fetchLinearIssue(identifier: string): Promise<LinearIssue> {\n try {\n logger.debug(`Fetching Linear issue: ${identifier}`)\n const client = createLinearClient()\n const issue = await client.issue(identifier)\n\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Convert SDK issue to our LinearIssue type\n const result: LinearIssue = {\n id: issue.id,\n identifier: issue.identifier,\n title: issue.title,\n url: issue.url,\n createdAt: issue.createdAt.toISOString(),\n updatedAt: issue.updatedAt.toISOString(),\n }\n\n // Add optional fields if present\n if (issue.description) {\n result.description = issue.description\n }\n\n if (issue.state) {\n const state = await issue.state\n if (state?.name) {\n result.state = state.name\n }\n }\n\n return result\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'fetchLinearIssue')\n }\n}\n\n/**\n * Create a new Linear issue\n * @param title - Issue title\n * @param body - Issue description (markdown)\n * @param teamKey - Team key (e.g., \"ENG\", \"PLAT\")\n * @param labels - Optional label names to apply\n * @returns Created issue identifier and URL\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearIssue(\n title: string,\n body: string,\n teamKey: string,\n _labels?: string[],\n): Promise<{ identifier: string; url: string }> {\n try {\n logger.debug(`Creating Linear issue in team ${teamKey}: ${title}`)\n const client = createLinearClient()\n\n // Get team by key\n const teams = await client.teams()\n const team = teams.nodes.find((t) => t.key === teamKey)\n\n if (!team) {\n throw new LinearServiceError('NOT_FOUND', `Linear team ${teamKey} not found`)\n }\n\n // Create issue\n const issueInput: { teamId: string; title: string; description?: string } = {\n teamId: team.id,\n title,\n }\n\n if (body) {\n issueInput.description = body\n }\n\n const payload = await client.createIssue(issueInput)\n\n const issue = await payload.issue\n\n if (!issue) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear issue')\n }\n\n // Construct URL\n const url = issue.url ?? buildLinearIssueUrl(issue.identifier, title)\n\n return {\n identifier: issue.identifier,\n url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearIssue')\n }\n}\n\n/**\n * Create a child issue linked to a parent issue\n * Linear supports atomic creation with parentId field\n * @param title - Issue title\n * @param body - Issue description (markdown)\n * @param teamKey - Team key (e.g., \"ENG\")\n * @param parentId - Parent issue UUID (from issue.id, not identifier)\n * @param labels - Optional label names to apply\n * @returns Created issue identifier and URL\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearChildIssue(\n title: string,\n body: string,\n teamKey: string,\n parentId: string,\n _labels?: string[],\n): Promise<{ identifier: string; url: string }> {\n try {\n logger.debug(`Creating Linear child issue in team ${teamKey}: ${title}`)\n const client = createLinearClient()\n\n // Get team by key\n const teams = await client.teams()\n const team = teams.nodes.find((t) => t.key === teamKey)\n\n if (!team) {\n throw new LinearServiceError('NOT_FOUND', `Linear team ${teamKey} not found`)\n }\n\n // Create issue with parentId for atomic parent-child relationship\n const issueInput: { teamId: string; title: string; description?: string; parentId: string } = {\n teamId: team.id,\n title,\n parentId, // UUID of parent issue\n }\n\n if (body) {\n issueInput.description = body\n }\n\n const payload = await client.createIssue(issueInput)\n\n const issue = await payload.issue\n\n if (!issue) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear child issue')\n }\n\n // Construct URL\n const url = issue.url ?? buildLinearIssueUrl(issue.identifier, title)\n\n return {\n identifier: issue.identifier,\n url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearChildIssue')\n }\n}\n\n/**\n * Create a comment on a Linear issue\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @param body - Comment body (markdown)\n * @returns Created comment details\n * @throws LinearServiceError on creation failure\n */\nexport async function createLinearComment(\n identifier: string,\n body: string,\n): Promise<LinearComment> {\n try {\n logger.debug(`Creating comment on Linear issue ${identifier}`)\n const client = createLinearClient()\n\n // Get issue by identifier to get its ID\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Create comment using issue ID\n const payload = await client.createComment({\n issueId: issue.id,\n body,\n })\n\n const comment = await payload.comment\n\n if (!comment) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to create Linear comment')\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'createLinearComment')\n }\n}\n\n/**\n * Update a Linear issue's workflow state\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @param stateName - Target state name (e.g., \"In Progress\", \"Done\")\n * @throws LinearServiceError on update failure\n */\nexport async function updateLinearIssueState(\n identifier: string,\n stateName: string,\n): Promise<void> {\n try {\n logger.debug(`Updating Linear issue ${identifier} state to: ${stateName}`)\n const client = createLinearClient()\n\n // Get issue by identifier\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Get team to find state\n const team = await issue.team\n if (!team) {\n throw new LinearServiceError('CLI_ERROR', 'Issue has no team')\n }\n\n // Find state by name\n const states = await team.states()\n const state = states.nodes.find((s) => s.name === stateName)\n\n if (!state) {\n throw new LinearServiceError(\n 'NOT_FOUND',\n `State \"${stateName}\" not found in team ${team.key}`,\n )\n }\n\n // Update issue state\n await client.updateIssue(issue.id, {\n stateId: state.id,\n })\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'updateLinearIssueState')\n }\n}\n\n/**\n * Get a specific comment by ID\n * @param commentId - Linear comment UUID\n * @returns Comment details\n * @throws LinearServiceError if comment not found\n */\nexport async function getLinearComment(commentId: string): Promise<LinearComment> {\n try {\n logger.debug(`Fetching Linear comment: ${commentId}`)\n const client = createLinearClient()\n const comment = await client.comment({ id: commentId })\n\n if (!comment) {\n throw new LinearServiceError('NOT_FOUND', `Linear comment ${commentId} not found`)\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'getLinearComment')\n }\n}\n\n/**\n * Update an existing comment\n * @param commentId - Linear comment UUID\n * @param body - New comment body (markdown)\n * @returns Updated comment details\n * @throws LinearServiceError on update failure\n */\nexport async function updateLinearComment(\n commentId: string,\n body: string,\n): Promise<LinearComment> {\n try {\n logger.debug(`Updating Linear comment: ${commentId}`)\n const client = createLinearClient()\n\n const payload = await client.updateComment(commentId, { body })\n const comment = await payload.comment\n\n if (!comment) {\n throw new LinearServiceError('CLI_ERROR', 'Failed to update Linear comment')\n }\n\n return {\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'updateLinearComment')\n }\n}\n\n/**\n * Fetch all comments for a Linear issue\n * @param identifier - Linear issue identifier (e.g., \"ENG-123\")\n * @returns Array of comments\n * @throws LinearServiceError on fetch failure\n */\nexport async function fetchLinearIssueComments(identifier: string): Promise<LinearComment[]> {\n try {\n logger.debug(`Fetching comments for Linear issue: ${identifier}`)\n const client = createLinearClient()\n\n // Get issue by identifier\n const issue = await client.issue(identifier)\n if (!issue) {\n throw new LinearServiceError('NOT_FOUND', `Linear issue ${identifier} not found`)\n }\n\n // Fetch comments\n const comments = await issue.comments({ first: 100 })\n\n return comments.nodes.map((comment) => ({\n id: comment.id,\n body: comment.body,\n createdAt: comment.createdAt.toISOString(),\n updatedAt: comment.updatedAt.toISOString(),\n url: comment.url,\n }))\n } catch (error) {\n if (error instanceof LinearServiceError) {\n throw error\n }\n handleLinearError(error, 'fetchLinearIssueComments')\n }\n}\n"],"mappings":";;;;;;AAiEO,IAAM,qBAAN,MAAM,4BAA2B,MAAM;AAAA,EAC5C,YACS,MACP,SACO,SACP;AACA,UAAM,OAAO;AAJN;AAEA;AAGP,SAAK,OAAO;AAEZ,QAAI,MAAM,mBAAmB;AAC3B,YAAM,kBAAkB,MAAM,mBAAkB;AAAA,IAClD;AAAA,EACF;AACF;;;ACzEA,SAAS,oBAAoB;AAYtB,SAAS,aAAa,OAAe,YAAoB,IAAY;AAE1E,QAAM,OAAO,MACV,YAAY,EACZ,QAAQ,eAAe,GAAG,EAC1B,QAAQ,YAAY,EAAE;AAGzB,MAAI,KAAK,UAAU,WAAW;AAC5B,WAAO;AAAA,EACT;AAGA,QAAM,QAAQ,KAAK,MAAM,GAAG;AAC5B,MAAI,SAAS;AACb,aAAW,QAAQ,OAAO;AACxB,UAAM,YAAY,SAAS,GAAG,MAAM,IAAI,IAAI,KAAK;AACjD,QAAI,UAAU,SAAS,WAAW;AAChC;AAAA,IACF;AACA,aAAS;AAAA,EACX;AAEA,SAAO,UAAU,KAAK,MAAM,GAAG,SAAS;AAC1C;AAQO,SAAS,oBAAoB,YAAoB,OAAwB;AAC9E,QAAM,OAAO,4BAA4B,UAAU;AACnD,MAAI,OAAO;AACT,UAAM,OAAO,aAAa,KAAK;AAC/B,WAAO,OAAO,GAAG,IAAI,IAAI,IAAI,KAAK;AAAA,EACpC;AACA,SAAO;AACT;AAOA,SAAS,oBAA4B;AACnC,QAAM,QAAQ,QAAQ,IAAI;AAC1B,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAMA,SAAS,qBAAmC;AAC1C,SAAO,IAAI,aAAa,EAAE,QAAQ,kBAAkB,EAAE,CAAC;AACzD;AAQA,SAAS,kBAAkB,OAAgB,SAAwB;AACjE,SAAO,MAAM,GAAG,OAAO,oBAAoB,EAAE,MAAM,CAAC;AAGpD,QAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAG1E,MAAI,aAAa,SAAS,WAAW,KAAK,aAAa,SAAS,WAAW,GAAG;AAC5E,UAAM,IAAI,mBAAmB,aAAa,sCAAsC,EAAE,MAAM,CAAC;AAAA,EAC3F;AAEA,MACE,aAAa,SAAS,cAAc,KACpC,aAAa,SAAS,cAAc,KACpC,aAAa,SAAS,iBAAiB,GACvC;AACA,UAAM,IAAI;AAAA,MACR;AAAA,MACA;AAAA,MACA,EAAE,MAAM;AAAA,IACV;AAAA,EACF;AAEA,MAAI,aAAa,SAAS,YAAY,GAAG;AACvC,UAAM,IAAI,mBAAmB,gBAAgB,kCAAkC,EAAE,MAAM,CAAC;AAAA,EAC1F;AAGA,QAAM,IAAI,mBAAmB,aAAa,qBAAqB,YAAY,IAAI,EAAE,MAAM,CAAC;AAC1F;AAQA,eAAsB,iBAAiB,YAA0C;AAC/E,MAAI;AACF,WAAO,MAAM,0BAA0B,UAAU,EAAE;AACnD,UAAM,SAAS,mBAAmB;AAClC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAE3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,SAAsB;AAAA,MAC1B,IAAI,MAAM;AAAA,MACV,YAAY,MAAM;AAAA,MAClB,OAAO,MAAM;AAAA,MACb,KAAK,MAAM;AAAA,MACX,WAAW,MAAM,UAAU,YAAY;AAAA,MACvC,WAAW,MAAM,UAAU,YAAY;AAAA,IACzC;AAGA,QAAI,MAAM,aAAa;AACrB,aAAO,cAAc,MAAM;AAAA,IAC7B;AAEA,QAAI,MAAM,OAAO;AACf,YAAM,QAAQ,MAAM,MAAM;AAC1B,UAAI,+BAAO,MAAM;AACf,eAAO,QAAQ,MAAM;AAAA,MACvB;AAAA,IACF;AAEA,WAAO;AAAA,EACT,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,kBAAkB;AAAA,EAC7C;AACF;AAWA,eAAsB,kBACpB,OACA,MACA,SACA,SAC8C;AAC9C,MAAI;AACF,WAAO,MAAM,iCAAiC,OAAO,KAAK,KAAK,EAAE;AACjE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM;AACjC,UAAM,OAAO,MAAM,MAAM,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO;AAEtD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,eAAe,OAAO,YAAY;AAAA,IAC9E;AAGA,UAAM,aAAsE;AAAA,MAC1E,QAAQ,KAAK;AAAA,MACb;AAAA,IACF;AAEA,QAAI,MAAM;AACR,iBAAW,cAAc;AAAA,IAC3B;AAEA,UAAM,UAAU,MAAM,OAAO,YAAY,UAAU;AAEnD,UAAM,QAAQ,MAAM,QAAQ;AAE5B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,+BAA+B;AAAA,IAC3E;AAGA,UAAM,MAAM,MAAM,OAAO,oBAAoB,MAAM,YAAY,KAAK;AAEpE,WAAO;AAAA,MACL,YAAY,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,mBAAmB;AAAA,EAC9C;AACF;AAaA,eAAsB,uBACpB,OACA,MACA,SACA,UACA,SAC8C;AAC9C,MAAI;AACF,WAAO,MAAM,uCAAuC,OAAO,KAAK,KAAK,EAAE;AACvE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM;AACjC,UAAM,OAAO,MAAM,MAAM,KAAK,CAAC,MAAM,EAAE,QAAQ,OAAO;AAEtD,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,eAAe,OAAO,YAAY;AAAA,IAC9E;AAGA,UAAM,aAAwF;AAAA,MAC5F,QAAQ,KAAK;AAAA,MACb;AAAA,MACA;AAAA;AAAA,IACF;AAEA,QAAI,MAAM;AACR,iBAAW,cAAc;AAAA,IAC3B;AAEA,UAAM,UAAU,MAAM,OAAO,YAAY,UAAU;AAEnD,UAAM,QAAQ,MAAM,QAAQ;AAE5B,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,qCAAqC;AAAA,IACjF;AAGA,UAAM,MAAM,MAAM,OAAO,oBAAoB,MAAM,YAAY,KAAK;AAEpE,WAAO;AAAA,MACL,YAAY,MAAM;AAAA,MAClB;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,wBAAwB;AAAA,EACnD;AACF;AASA,eAAsB,oBACpB,YACA,MACwB;AACxB,MAAI;AACF,WAAO,MAAM,oCAAoC,UAAU,EAAE;AAC7D,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,UAAU,MAAM,OAAO,cAAc;AAAA,MACzC,SAAS,MAAM;AAAA,MACf;AAAA,IACF,CAAC;AAED,UAAM,UAAU,MAAM,QAAQ;AAE9B,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,iCAAiC;AAAA,IAC7E;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,qBAAqB;AAAA,EAChD;AACF;AAQA,eAAsB,uBACpB,YACA,WACe;AACf,MAAI;AACF,WAAO,MAAM,yBAAyB,UAAU,cAAc,SAAS,EAAE;AACzE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,OAAO,MAAM,MAAM;AACzB,QAAI,CAAC,MAAM;AACT,YAAM,IAAI,mBAAmB,aAAa,mBAAmB;AAAA,IAC/D;AAGA,UAAM,SAAS,MAAM,KAAK,OAAO;AACjC,UAAM,QAAQ,OAAO,MAAM,KAAK,CAAC,MAAM,EAAE,SAAS,SAAS;AAE3D,QAAI,CAAC,OAAO;AACV,YAAM,IAAI;AAAA,QACR;AAAA,QACA,UAAU,SAAS,uBAAuB,KAAK,GAAG;AAAA,MACpD;AAAA,IACF;AAGA,UAAM,OAAO,YAAY,MAAM,IAAI;AAAA,MACjC,SAAS,MAAM;AAAA,IACjB,CAAC;AAAA,EACH,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,wBAAwB;AAAA,EACnD;AACF;AAQA,eAAsB,iBAAiB,WAA2C;AAChF,MAAI;AACF,WAAO,MAAM,4BAA4B,SAAS,EAAE;AACpD,UAAM,SAAS,mBAAmB;AAClC,UAAM,UAAU,MAAM,OAAO,QAAQ,EAAE,IAAI,UAAU,CAAC;AAEtD,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,kBAAkB,SAAS,YAAY;AAAA,IACnF;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,kBAAkB;AAAA,EAC7C;AACF;AASA,eAAsB,oBACpB,WACA,MACwB;AACxB,MAAI;AACF,WAAO,MAAM,4BAA4B,SAAS,EAAE;AACpD,UAAM,SAAS,mBAAmB;AAElC,UAAM,UAAU,MAAM,OAAO,cAAc,WAAW,EAAE,KAAK,CAAC;AAC9D,UAAM,UAAU,MAAM,QAAQ;AAE9B,QAAI,CAAC,SAAS;AACZ,YAAM,IAAI,mBAAmB,aAAa,iCAAiC;AAAA,IAC7E;AAEA,WAAO;AAAA,MACL,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf;AAAA,EACF,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,qBAAqB;AAAA,EAChD;AACF;AAQA,eAAsB,yBAAyB,YAA8C;AAC3F,MAAI;AACF,WAAO,MAAM,uCAAuC,UAAU,EAAE;AAChE,UAAM,SAAS,mBAAmB;AAGlC,UAAM,QAAQ,MAAM,OAAO,MAAM,UAAU;AAC3C,QAAI,CAAC,OAAO;AACV,YAAM,IAAI,mBAAmB,aAAa,gBAAgB,UAAU,YAAY;AAAA,IAClF;AAGA,UAAM,WAAW,MAAM,MAAM,SAAS,EAAE,OAAO,IAAI,CAAC;AAEpD,WAAO,SAAS,MAAM,IAAI,CAAC,aAAa;AAAA,MACtC,IAAI,QAAQ;AAAA,MACZ,MAAM,QAAQ;AAAA,MACd,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,WAAW,QAAQ,UAAU,YAAY;AAAA,MACzC,KAAK,QAAQ;AAAA,IACf,EAAE;AAAA,EACJ,SAAS,OAAO;AACd,QAAI,iBAAiB,oBAAoB;AACvC,YAAM;AAAA,IACR;AACA,sBAAkB,OAAO,0BAA0B;AAAA,EACrD;AACF;","names":[]}
@@ -7,7 +7,7 @@ import {
7
7
  } from "./chunk-ZX3GTM7O.js";
8
8
  import {
9
9
  launchClaude
10
- } from "./chunk-AEIMYF4P.js";
10
+ } from "./chunk-FP7G7DG3.js";
11
11
  import {
12
12
  getLogger
13
13
  } from "./chunk-6MLEBAYZ.js";
@@ -151,4 +151,4 @@ export {
151
151
  IssueEnhancementService,
152
152
  capitalizeFirstLetter
153
153
  };
154
- //# sourceMappingURL=chunk-7HIRPCKU.js.map
154
+ //# sourceMappingURL=chunk-HVQNVRAF.js.map
@@ -130,4 +130,4 @@ export {
130
130
  getPackageScripts,
131
131
  getExplicitCapabilities
132
132
  };
133
- //# sourceMappingURL=chunk-BXCPJJYM.js.map
133
+ //# sourceMappingURL=chunk-ITN64ENQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/utils/package-json.ts"],"sourcesContent":["import fs from 'fs-extra'\nimport path from 'path'\nimport { getLogger } from './logger-context.js'\nimport type { ProjectCapability } from '../types/loom.js'\n\n/**\n * Path to the iloom package configuration file (relative to project root)\n * This file allows non-Node.js projects to define scripts for iloom workflows\n */\nexport const ILOOM_PACKAGE_PATH = '.iloom/package.iloom.json'\n\nexport interface PackageJson {\n name: string\n version?: string\n bin?: string | Record<string, string>\n dependencies?: Record<string, string>\n devDependencies?: Record<string, string>\n scripts?: Record<string, string>\n capabilities?: ProjectCapability[]\n [key: string]: unknown\n}\n\n/**\n * Source of a script - determines how it should be executed\n * - 'package-manager': Execute via package manager (pnpm/npm/yarn)\n * - 'iloom-config': Execute directly as shell command\n */\nexport type ScriptSource = 'package-manager' | 'iloom-config'\n\n/**\n * Configuration for a single script including its source\n * The source determines whether to use package manager or direct shell execution\n */\nexport interface PackageScriptConfig {\n /** The script command to execute */\n command: string\n /** Source of the script - determines execution method */\n source: ScriptSource\n}\n\n/**\n * Read and parse package.json from a directory\n * @param dir Directory containing package.json\n * @returns Parsed package.json object\n * @throws Error if package.json doesn't exist or contains invalid JSON\n */\nexport async function readPackageJson(dir: string): Promise<PackageJson> {\n const pkgPath = path.join(dir, 'package.json')\n\n try {\n const pkgJson = await fs.readJson(pkgPath)\n return pkgJson as PackageJson\n } catch (error) {\n if ((error as { code?: string }).code === 'ENOENT') {\n throw new Error(`package.json not found in ${dir}`)\n }\n const message = error instanceof Error ? error.message : 'Unknown error'\n throw new Error(`Invalid package.json in ${dir}: ${message}`)\n }\n}\n\n/**\n * Read scripts from .iloom/package.iloom.json if it exists\n * This file takes precedence over package.json and contains raw shell commands\n * @param dir Directory containing .iloom/package.iloom.json\n * @returns PackageJson-like object with scripts, or null if file doesn't exist\n */\nexport async function readIloomPackageScripts(dir: string): Promise<PackageJson | null> {\n const iloomPkgPath = path.join(dir, ILOOM_PACKAGE_PATH)\n\n try {\n const exists = await fs.pathExists(iloomPkgPath)\n if (!exists) {\n return null\n }\n\n const content = await fs.readJson(iloomPkgPath)\n return content as PackageJson\n } catch (error) {\n const message = error instanceof Error ? error.message : 'Unknown error'\n getLogger().warn(`Failed to read ${ILOOM_PACKAGE_PATH}: ${message}`)\n return null\n }\n}\n\n/**\n * Read package configuration for a project, merging .iloom/package.iloom.json scripts over package.json\n * This allows non-Node.js projects to define scripts for iloom workflows while preserving\n * all other package.json fields (name, version, bin, dependencies, etc.)\n *\n * @param dir Directory to read package configuration from\n * @returns PackageJson object with merged scripts (iloom scripts take precedence)\n * @throws Error if neither file exists or contains valid JSON\n */\nexport async function getPackageConfig(dir: string): Promise<PackageJson> {\n // Check for .iloom/package.iloom.json first\n const iloomPackage = await readIloomPackageScripts(dir)\n\n if (iloomPackage) {\n // Try to read package.json as base\n try {\n const basePackage = await readPackageJson(dir)\n getLogger().debug('Merging scripts from .iloom/package.iloom.json over package.json')\n // Merge: base package.json with iloom scripts taking precedence\n return {\n ...basePackage,\n scripts: {\n ...basePackage.scripts,\n ...iloomPackage.scripts,\n },\n }\n } catch {\n // No package.json - use iloom package as-is (non-Node project)\n getLogger().debug('Using scripts from .iloom/package.iloom.json (no package.json)')\n return iloomPackage\n }\n }\n\n // Fall back to package.json only\n return readPackageJson(dir)\n}\n\n/**\n * Parse bin field into normalized Record format\n * @param bin The bin field from package.json (string or object)\n * @param packageName Package name to use for string bin variant\n * @returns Normalized bin entries as Record<string, string>\n */\nexport function parseBinField(\n bin: string | Record<string, string> | undefined,\n packageName: string\n): Record<string, string> {\n if (!bin) {\n return {}\n }\n\n if (typeof bin === 'string') {\n return { [packageName]: bin }\n }\n\n return bin\n}\n\n/**\n * Check if package.json indicates a web application\n * @param pkgJson Parsed package.json object\n * @returns true if package has web framework dependencies\n */\nexport function hasWebDependencies(pkgJson: PackageJson): boolean {\n const webIndicators = [\n 'next',\n 'vite',\n 'express',\n 'react-scripts',\n 'nuxt',\n 'svelte-kit',\n 'astro',\n 'remix',\n 'fastify',\n 'koa',\n 'hapi',\n '@angular/core',\n 'gatsby',\n '@11ty/eleventy',\n 'ember-cli'\n ]\n\n const allDeps = {\n ...pkgJson.dependencies,\n ...pkgJson.devDependencies\n }\n\n return webIndicators.some(indicator => indicator in allDeps)\n}\n\n/**\n * Check if package.json has a specific script\n * @param pkgJson Parsed package.json object\n * @param scriptName Script name to check for\n * @returns true if script exists\n */\nexport function hasScript(pkgJson: PackageJson, scriptName: string): boolean {\n return !!pkgJson.scripts?.[scriptName]\n}\n\n/**\n * Get all scripts with their source metadata\n * Scripts from .iloom/package.iloom.json are marked as 'iloom-config' and should be executed directly\n * Scripts from package.json are marked as 'package-manager' and should use pnpm/npm/yarn\n *\n * @param dir Directory to read package configuration from\n * @returns Map of script names to their configurations including source\n */\nexport async function getPackageScripts(dir: string): Promise<Record<string, PackageScriptConfig>> {\n const scripts: Record<string, PackageScriptConfig> = {}\n\n // First, check if package.json exists and read scripts (these are package-manager sourced)\n const packageJsonPath = path.join(dir, 'package.json')\n if (await fs.pathExists(packageJsonPath)) {\n const pkgJson = await readPackageJson(dir)\n if (pkgJson.scripts) {\n for (const [name, command] of Object.entries(pkgJson.scripts)) {\n scripts[name] = { command, source: 'package-manager' }\n }\n }\n }\n\n // Then, read iloom package scripts (these override and are iloom-config sourced)\n const iloomPackage = await readIloomPackageScripts(dir)\n if (iloomPackage?.scripts) {\n for (const [name, command] of Object.entries(iloomPackage.scripts)) {\n scripts[name] = { command, source: 'iloom-config' }\n }\n }\n\n return scripts\n}\n\n/**\n * Valid capability values that can be explicitly declared\n */\nconst VALID_CAPABILITIES: readonly ProjectCapability[] = ['cli', 'web'] as const\n\n/**\n * Extract explicit capabilities from package configuration\n * Used for non-Node.js projects that declare capabilities in package.iloom.json\n * @param pkgJson Parsed package configuration object\n * @returns Array of valid ProjectCapability values, or empty array if none declared\n */\nexport function getExplicitCapabilities(pkgJson: PackageJson): ProjectCapability[] {\n // Return empty if no capabilities field or not an array\n if (!pkgJson.capabilities || !Array.isArray(pkgJson.capabilities)) {\n return []\n }\n\n // Filter to only valid ProjectCapability values\n return pkgJson.capabilities.filter(\n (cap): cap is ProjectCapability => VALID_CAPABILITIES.includes(cap as ProjectCapability)\n )\n}\n"],"mappings":";;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AAQV,IAAM,qBAAqB;AAqClC,eAAsB,gBAAgB,KAAmC;AACvE,QAAM,UAAU,KAAK,KAAK,KAAK,cAAc;AAE7C,MAAI;AACF,UAAM,UAAU,MAAM,GAAG,SAAS,OAAO;AACzC,WAAO;AAAA,EACT,SAAS,OAAO;AACd,QAAK,MAA4B,SAAS,UAAU;AAClD,YAAM,IAAI,MAAM,6BAA6B,GAAG,EAAE;AAAA,IACpD;AACA,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,UAAM,IAAI,MAAM,2BAA2B,GAAG,KAAK,OAAO,EAAE;AAAA,EAC9D;AACF;AAQA,eAAsB,wBAAwB,KAA0C;AACtF,QAAM,eAAe,KAAK,KAAK,KAAK,kBAAkB;AAEtD,MAAI;AACF,UAAM,SAAS,MAAM,GAAG,WAAW,YAAY;AAC/C,QAAI,CAAC,QAAQ;AACX,aAAO;AAAA,IACT;AAEA,UAAM,UAAU,MAAM,GAAG,SAAS,YAAY;AAC9C,WAAO;AAAA,EACT,SAAS,OAAO;AACd,UAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU;AACzD,cAAU,EAAE,KAAK,kBAAkB,kBAAkB,KAAK,OAAO,EAAE;AACnE,WAAO;AAAA,EACT;AACF;AAWA,eAAsB,iBAAiB,KAAmC;AAExE,QAAM,eAAe,MAAM,wBAAwB,GAAG;AAEtD,MAAI,cAAc;AAEhB,QAAI;AACF,YAAM,cAAc,MAAM,gBAAgB,GAAG;AAC7C,gBAAU,EAAE,MAAM,kEAAkE;AAEpF,aAAO;AAAA,QACL,GAAG;AAAA,QACH,SAAS;AAAA,UACP,GAAG,YAAY;AAAA,UACf,GAAG,aAAa;AAAA,QAClB;AAAA,MACF;AAAA,IACF,QAAQ;AAEN,gBAAU,EAAE,MAAM,gEAAgE;AAClF,aAAO;AAAA,IACT;AAAA,EACF;AAGA,SAAO,gBAAgB,GAAG;AAC5B;AAQO,SAAS,cACd,KACA,aACwB;AACxB,MAAI,CAAC,KAAK;AACR,WAAO,CAAC;AAAA,EACV;AAEA,MAAI,OAAO,QAAQ,UAAU;AAC3B,WAAO,EAAE,CAAC,WAAW,GAAG,IAAI;AAAA,EAC9B;AAEA,SAAO;AACT;AAOO,SAAS,mBAAmB,SAA+B;AAChE,QAAM,gBAAgB;AAAA,IACpB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,QAAM,UAAU;AAAA,IACd,GAAG,QAAQ;AAAA,IACX,GAAG,QAAQ;AAAA,EACb;AAEA,SAAO,cAAc,KAAK,eAAa,aAAa,OAAO;AAC7D;AAQO,SAAS,UAAU,SAAsB,YAA6B;AArL7E;AAsLE,SAAO,CAAC,GAAC,aAAQ,YAAR,mBAAkB;AAC7B;AAUA,eAAsB,kBAAkB,KAA2D;AACjG,QAAM,UAA+C,CAAC;AAGtD,QAAM,kBAAkB,KAAK,KAAK,KAAK,cAAc;AACrD,MAAI,MAAM,GAAG,WAAW,eAAe,GAAG;AACxC,UAAM,UAAU,MAAM,gBAAgB,GAAG;AACzC,QAAI,QAAQ,SAAS;AACnB,iBAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,QAAQ,OAAO,GAAG;AAC7D,gBAAQ,IAAI,IAAI,EAAE,SAAS,QAAQ,kBAAkB;AAAA,MACvD;AAAA,IACF;AAAA,EACF;AAGA,QAAM,eAAe,MAAM,wBAAwB,GAAG;AACtD,MAAI,6CAAc,SAAS;AACzB,eAAW,CAAC,MAAM,OAAO,KAAK,OAAO,QAAQ,aAAa,OAAO,GAAG;AAClE,cAAQ,IAAI,IAAI,EAAE,SAAS,QAAQ,eAAe;AAAA,IACpD;AAAA,EACF;AAEA,SAAO;AACT;AAKA,IAAM,qBAAmD,CAAC,OAAO,KAAK;AAQ/D,SAAS,wBAAwB,SAA2C;AAEjF,MAAI,CAAC,QAAQ,gBAAgB,CAAC,MAAM,QAAQ,QAAQ,YAAY,GAAG;AACjE,WAAO,CAAC;AAAA,EACV;AAGA,SAAO,QAAQ,aAAa;AAAA,IAC1B,CAAC,QAAkC,mBAAmB,SAAS,GAAwB;AAAA,EACzF;AACF;","names":[]}
@@ -1,15 +1,15 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  PromptTemplateManager
4
- } from "./chunk-W6WVRHJ6.js";
4
+ } from "./chunk-TIYJEEVO.js";
5
5
  import {
6
6
  SettingsManager
7
- } from "./chunk-F6WVM437.js";
7
+ } from "./chunk-WFQ5CLTR.js";
8
8
  import {
9
9
  detectClaudeCli,
10
10
  launchClaude,
11
11
  launchClaudeInNewTerminalWindow
12
- } from "./chunk-AEIMYF4P.js";
12
+ } from "./chunk-FP7G7DG3.js";
13
13
  import {
14
14
  logger
15
15
  } from "./chunk-VT4PDUYT.js";
@@ -122,4 +122,4 @@ var ClaudeService = class {
122
122
  export {
123
123
  ClaudeService
124
124
  };
125
- //# sourceMappingURL=chunk-6U6VI4SZ.js.map
125
+ //# sourceMappingURL=chunk-KVS4XGBQ.js.map
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  getPackageScripts
4
- } from "./chunk-BXCPJJYM.js";
4
+ } from "./chunk-ITN64ENQ.js";
5
5
  import {
6
6
  getLogger
7
7
  } from "./chunk-6MLEBAYZ.js";
@@ -161,4 +161,4 @@ export {
161
161
  installDependencies,
162
162
  runScript
163
163
  };
164
- //# sourceMappingURL=chunk-AXX3QIKK.js.map
164
+ //# sourceMappingURL=chunk-LLWX3PCW.js.map
@@ -1,20 +1,31 @@
1
1
  #!/usr/bin/env node
2
+ import {
3
+ ProjectCapabilityDetector
4
+ } from "./chunk-3CDWFEGL.js";
5
+ import {
6
+ detectPackageManager,
7
+ runScript
8
+ } from "./chunk-LLWX3PCW.js";
9
+ import {
10
+ getPackageConfig,
11
+ hasScript
12
+ } from "./chunk-ITN64ENQ.js";
2
13
  import {
3
14
  executeGitCommand,
4
15
  findMainWorktreePathWithSettings,
5
16
  findWorktreeForBranch,
6
17
  getMergeTargetBranch
7
- } from "./chunk-GEXP5IOF.js";
18
+ } from "./chunk-ZA575VLF.js";
8
19
  import {
9
20
  SettingsManager
10
- } from "./chunk-F6WVM437.js";
21
+ } from "./chunk-WFQ5CLTR.js";
11
22
  import {
12
23
  MetadataManager
13
- } from "./chunk-CFUWQHCJ.js";
24
+ } from "./chunk-VWGKGNJP.js";
14
25
  import {
15
26
  detectClaudeCli,
16
27
  launchClaude
17
- } from "./chunk-AEIMYF4P.js";
28
+ } from "./chunk-FP7G7DG3.js";
18
29
  import {
19
30
  getLogger
20
31
  } from "./chunk-6MLEBAYZ.js";
@@ -311,12 +322,20 @@ To recover:
311
322
  getLogger().info(`Launching Claude to resolve conflicts in ${conflictedFiles.length} file(s)...`);
312
323
  const systemPrompt = `Please help resolve the git rebase conflicts in this repository. Analyze the conflicted files, understand the changes from both branches, fix the conflicts, then run 'git add .' to stage the resolved files, and finally run 'git rebase --continue' to continue the rebase process. Once the issue is resolved, tell the user they can use /exit to continue with the process.`;
313
324
  const prompt = `Help me with this rebase please.`;
325
+ const rebaseAllowedTools = [
326
+ "Bash(git status:*)",
327
+ "Bash(git diff:*)",
328
+ "Bash(git log:*)",
329
+ "Bash(git add:*)",
330
+ "Bash(git rebase:*)"
331
+ ];
314
332
  try {
315
333
  await launchClaude(prompt, {
316
334
  appendSystemPrompt: systemPrompt,
317
335
  addDir: worktreePath,
318
- headless: false
336
+ headless: false,
319
337
  // Interactive - runs in current terminal with stdio: inherit
338
+ allowedTools: rebaseAllowedTools
320
339
  });
321
340
  const remainingConflicts = await this.detectConflictedFiles(worktreePath);
322
341
  if (remainingConflicts.length > 0) {
@@ -366,7 +385,86 @@ To recover:
366
385
  }
367
386
  };
368
387
 
388
+ // src/lib/BuildRunner.ts
389
+ var BuildRunner = class {
390
+ constructor(capabilityDetector) {
391
+ this.capabilityDetector = capabilityDetector ?? new ProjectCapabilityDetector();
392
+ }
393
+ /**
394
+ * Run build verification in the specified directory
395
+ * @param buildPath - Path where build should run (typically main worktree path)
396
+ * @param options - Build options
397
+ */
398
+ async runBuild(buildPath, options = {}) {
399
+ const startTime = Date.now();
400
+ try {
401
+ const pkgJson = await getPackageConfig(buildPath);
402
+ const hasBuildScript = hasScript(pkgJson, "build");
403
+ if (!hasBuildScript) {
404
+ getLogger().debug("Skipping build - no build script found");
405
+ return {
406
+ success: true,
407
+ skipped: true,
408
+ reason: "No build script found in package configuration",
409
+ duration: Date.now() - startTime
410
+ };
411
+ }
412
+ } catch (error) {
413
+ if (error instanceof Error && error.message.includes("package.json not found")) {
414
+ getLogger().debug("Skipping build - no package configuration found");
415
+ return {
416
+ success: true,
417
+ skipped: true,
418
+ reason: "No package configuration found in project",
419
+ duration: Date.now() - startTime
420
+ };
421
+ }
422
+ throw error;
423
+ }
424
+ const capabilities = await this.capabilityDetector.detectCapabilities(buildPath);
425
+ const isCLIProject = capabilities.capabilities.includes("cli");
426
+ if (!isCLIProject) {
427
+ getLogger().debug("Skipping build - not a CLI project (no bin field)");
428
+ return {
429
+ success: true,
430
+ skipped: true,
431
+ reason: "Project is not a CLI project (no bin field in package.json)",
432
+ duration: Date.now() - startTime
433
+ };
434
+ }
435
+ const packageManager = await detectPackageManager(buildPath);
436
+ if (options.dryRun) {
437
+ const command = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
438
+ getLogger().info(`[DRY RUN] Would run: ${command}`);
439
+ return {
440
+ success: true,
441
+ skipped: false,
442
+ duration: Date.now() - startTime
443
+ };
444
+ }
445
+ getLogger().info("Running build...");
446
+ try {
447
+ await runScript("build", buildPath, [], { quiet: true });
448
+ getLogger().success("Build completed successfully");
449
+ return {
450
+ success: true,
451
+ skipped: false,
452
+ duration: Date.now() - startTime
453
+ };
454
+ } catch {
455
+ const runCommand = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
456
+ throw new Error(
457
+ `Error: Build failed.
458
+ Fix build errors before proceeding.
459
+
460
+ Run '${runCommand}' to see detailed errors.`
461
+ );
462
+ }
463
+ }
464
+ };
465
+
369
466
  export {
370
- MergeManager
467
+ MergeManager,
468
+ BuildRunner
371
469
  };
372
- //# sourceMappingURL=chunk-WIJWIKAN.js.map
470
+ //# sourceMappingURL=chunk-LQBLDI47.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/lib/MergeManager.ts","../src/lib/BuildRunner.ts"],"sourcesContent":["import { executeGitCommand, findMainWorktreePathWithSettings, findWorktreeForBranch, getMergeTargetBranch } from '../utils/git.js'\nimport { getLogger } from '../utils/logger-context.js'\nimport { detectClaudeCli, launchClaude } from '../utils/claude.js'\nimport { SettingsManager } from './SettingsManager.js'\nimport { MetadataManager } from './MetadataManager.js'\nimport type { MergeOptions } from '../types/index.js'\n\n/**\n * MergeManager handles Git rebase and fast-forward merge operations\n * Implements fail-fast behavior for conflicts (Phase 1 - no Claude assistance)\n *\n * Ports bash/merge-and-clean.sh lines 781-1090\n */\nexport class MergeManager {\n\tprivate settingsManager: SettingsManager\n\tprivate metadataManager: MetadataManager\n\n\tconstructor(settingsManager?: SettingsManager, metadataManager?: MetadataManager) {\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t\tthis.metadataManager = metadataManager ?? new MetadataManager()\n\t}\n\n\t/**\n\t * Get the merge target branch for a loom\n\t * Priority: parent loom metadata > configured main branch > 'main'\n\t * @param worktreePath - Optional path to load settings/metadata from (defaults to process.cwd())\n\t * @private\n\t */\n\tprivate async getMainBranch(worktreePath?: string): Promise<string> {\n\t\t// Delegate to shared utility function\n\t\treturn getMergeTargetBranch(worktreePath ?? process.cwd(), {\n\t\t\tsettingsManager: this.settingsManager,\n\t\t\tmetadataManager: this.metadataManager,\n\t\t})\n\t}\n\n\t/**\n\t * Rebase current branch on main with fail-fast on conflicts\n\t * Ports bash/merge-and-clean.sh lines 781-913\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param options - Merge options (dryRun, force)\n\t * @throws Error if main branch doesn't exist, uncommitted changes exist, or conflicts occur\n\t */\n\tasync rebaseOnMain(worktreePath: string, options: MergeOptions = {}): Promise<void> {\n\t\tconst { dryRun = false, force = false } = options\n\t\tconst mainBranch = await this.getMainBranch(worktreePath)\n\n\t\tgetLogger().info(`Starting rebase on ${mainBranch} branch...`)\n\n\t\t// Step 1: Check if main branch exists\n\t\ttry {\n\t\t\tawait executeGitCommand(['show-ref', '--verify', '--quiet', `refs/heads/${mainBranch}`], {\n\t\t\t\tcwd: worktreePath,\n\t\t\t})\n\t\t} catch {\n\t\t\tthrow new Error(\n\t\t\t\t`Main branch \"${mainBranch}\" does not exist. Cannot rebase.\\n` +\n\t\t\t\t\t`Ensure the repository has a \"${mainBranch}\" branch or create it first.`\n\t\t\t)\n\t\t}\n\n\t\t// Step 2: Check for uncommitted changes and create WIP commit if needed\n\t\tconst statusOutput = await executeGitCommand(['status', '--porcelain'], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tlet wipCommitHash: string | null = null\n\t\tif (statusOutput.trim()) {\n\t\t\tgetLogger().info('Uncommitted changes detected, creating temporary WIP commit...')\n\t\t\twipCommitHash = await this.createWipCommit(worktreePath)\n\t\t\tgetLogger().debug(`Created WIP commit: ${wipCommitHash}`)\n\t\t}\n\n\t\t// Step 3: Check if rebase is needed by comparing merge-base with main HEAD\n\t\tconst mergeBase = await executeGitCommand(['merge-base', mainBranch, 'HEAD'], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst mainHead = await executeGitCommand(['rev-parse', mainBranch], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst mergeBaseTrimmed = mergeBase.trim()\n\t\tconst mainHeadTrimmed = mainHead.trim()\n\n\t\t// If merge-base matches main HEAD, branch is already up to date\n\t\tif (mergeBaseTrimmed === mainHeadTrimmed) {\n\t\t\tgetLogger().success(`Branch is already up to date with ${mainBranch}. No rebase needed.`)\n\t\t\t// Restore WIP commit if created (soft reset to remove temporary commit)\n\t\t\tif (wipCommitHash) {\n\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// Step 4: Show commits to be rebased (for informational purposes)\n\t\tconst commitsOutput = await executeGitCommand(['log', '--oneline', `${mainBranch}..HEAD`], {\n\t\t\tcwd: worktreePath,\n\t\t})\n\n\t\tconst commits = commitsOutput.trim()\n\t\tconst commitLines = commits ? commits.split('\\n') : []\n\n\t\tif (commits) {\n\t\t\t// Show commits that will be rebased\n\t\t\tgetLogger().info(`Found ${commitLines.length} commit(s) to rebase:`)\n\t\t\tcommitLines.forEach((commit) => getLogger().info(` ${commit}`))\n\t\t} else {\n\t\t\t// Main has moved forward but branch has no new commits\n\t\t\tgetLogger().info(`${mainBranch} branch has moved forward. Rebasing to update branch...`)\n\t\t}\n\n\t\t// Step 5: User confirmation (unless force mode or dry-run)\n\t\tif (!force && !dryRun) {\n\t\t\t// TODO: Implement interactive prompt for confirmation\n\t\t\t// For now, proceeding automatically (use --force to skip this message)\n\t\t\tgetLogger().info('Proceeding with rebase... (use --force to skip confirmations)')\n\t\t}\n\n\t\t// Step 6: Execute rebase (unless dry-run)\n\t\tif (dryRun) {\n\t\t\tgetLogger().info(`[DRY RUN] Would execute: git rebase ${mainBranch}`)\n\t\t\tif (commitLines.length > 0) {\n\t\t\t\tgetLogger().info(`[DRY RUN] This would rebase ${commitLines.length} commit(s)`)\n\t\t\t}\n\t\t\treturn\n\t\t}\n\n\t\t// Execute rebase\n\t\t// Use -c core.hooksPath=/dev/null to disable hooks during rebase\n\t\t// This prevents pre-commit hooks from running when commits are re-applied\n\t\ttry {\n\t\t\tawait executeGitCommand(['-c', 'core.hooksPath=/dev/null', 'rebase', mainBranch], { cwd: worktreePath })\n\t\t\tgetLogger().success('Rebase completed successfully!')\n\n\t\t\t// Restore WIP commit if created\n\t\t\tif (wipCommitHash) {\n\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// Detect conflicts\n\t\t\tconst conflictedFiles = await this.detectConflictedFiles(worktreePath)\n\n\t\t\tif (conflictedFiles.length > 0) {\n\t\t\t\t// Try Claude-assisted resolution first\n\t\t\t\tgetLogger().info('Merge conflicts detected, attempting Claude-assisted resolution...')\n\n\t\t\t\tconst resolved = await this.attemptClaudeConflictResolution(\n\t\t\t\t\tworktreePath,\n\t\t\t\t\tconflictedFiles\n\t\t\t\t)\n\n\t\t\t\tif (resolved) {\n\t\t\t\t\tgetLogger().success('Conflicts resolved with Claude assistance, rebase completed')\n\n\t\t\t\t\t// Restore WIP commit if created\n\t\t\t\t\tif (wipCommitHash) {\n\t\t\t\t\t\tawait this.restoreWipCommit(worktreePath, wipCommitHash)\n\t\t\t\t\t}\n\t\t\t\t\treturn // Continue with successful rebase\n\t\t\t\t}\n\n\t\t\t\t// Claude couldn't resolve or not available - fail fast\n\t\t\t\tconst conflictError = this.formatConflictError(conflictedFiles)\n\t\t\t\tthrow new Error(conflictError)\n\t\t\t}\n\n\t\t\t// If not a conflict, re-throw the original error\n\t\t\tthrow new Error(\n\t\t\t\t`Rebase failed: ${error instanceof Error ? error.message : String(error)}\\n` +\n\t\t\t\t\t'Run: git status for more details\\n' +\n\t\t\t\t\t'Or: git rebase --abort to cancel the rebase'\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Validate that fast-forward merge is possible\n\t * Ports bash/merge-and-clean.sh lines 957-968\n\t *\n\t * @param branchName - Name of the branch to merge\n\t * @param mainWorktreePath - Path where main branch is checked out\n\t * @throws Error if fast-forward is not possible\n\t */\n\tasync validateFastForwardPossible(mainBranch: string, branchName: string, mainWorktreePath: string): Promise<void> {\n\n\t\t// Step 1: Get merge-base between main and branch\n\t\tconst mergeBase = await executeGitCommand(['merge-base', mainBranch, branchName], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\t// Step 2: Get current HEAD of main\n\t\tconst mainHead = await executeGitCommand(['rev-parse', mainBranch], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\t// Step 3: Compare - they must match for fast-forward\n\t\tconst mergeBaseTrimmed = mergeBase.trim()\n\t\tconst mainHeadTrimmed = mainHead.trim()\n\n\t\tif (mergeBaseTrimmed !== mainHeadTrimmed) {\n\t\t\tthrow new Error(\n\t\t\t\t'Cannot perform fast-forward merge.\\n' +\n\t\t\t\t\t`The ${mainBranch} branch has moved forward since this branch was created.\\n` +\n\t\t\t\t\t`Merge base: ${mergeBaseTrimmed}\\n` +\n\t\t\t\t\t`Main HEAD: ${mainHeadTrimmed}\\n\\n` +\n\t\t\t\t\t'To fix this:\\n' +\n\t\t\t\t\t` 1. Rebase the branch on ${mainBranch}: git rebase ${mainBranch}\\n` +\n\t\t\t\t\t` 2. Or use: il finish to automatically rebase and merge\\n`\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Perform fast-forward only merge\n\t * Ports bash/merge-and-clean.sh lines 938-994\n\t *\n\t * @param branchName - Name of the branch to merge\n\t * @param worktreePath - Path to the worktree\n\t * @param options - Merge options (dryRun, force)\n\t * @throws Error if checkout, validation, or merge fails\n\t */\n\tasync performFastForwardMerge(\n\t\tbranchName: string,\n\t\tworktreePath: string,\n\t\toptions: MergeOptions = {}\n\t): Promise<void> {\n\t\tconst { dryRun = false, force = false } = options\n\n\t\tgetLogger().info('Starting fast-forward merge...')\n\n\t\t// Step 1: Get the merge target branch FIRST\n\t\t// For child looms, this will be the parent branch from metadata\n\t\t// For regular looms, this falls back to settings.mainBranch or 'main'\n\t\tconst mainBranch = await this.getMainBranch(worktreePath)\n\n\t\t// Step 2: Find where the merge target branch is checked out\n\t\t// CRITICAL: We must find the worktree for the MERGE TARGET, not settings.mainBranch\n\t\t// This fixes the child loom bug where we'd find the 'main' worktree instead of the parent branch worktree\n\t\tlet mainWorktreePath: string\n\t\tif (options.repoRoot) {\n\t\t\tmainWorktreePath = options.repoRoot\n\t\t} else {\n\t\t\ttry {\n\t\t\t\t// First try to find worktree with the exact merge target branch checked out\n\t\t\t\tmainWorktreePath = await findWorktreeForBranch(mainBranch, worktreePath)\n\t\t\t} catch {\n\t\t\t\t// Fallback: if no worktree has the branch checked out, use settings-based lookup\n\t\t\t\t// This handles edge cases like bare repos or detached HEAD states\n\t\t\t\tgetLogger().debug(`No worktree found for branch '${mainBranch}', falling back to settings-based lookup`)\n\t\t\t\tmainWorktreePath = await findMainWorktreePathWithSettings(worktreePath, this.settingsManager)\n\t\t\t}\n\t\t}\n\n\t\t// Step 3: No need to checkout - the merge target branch is already checked out in mainWorktreePath\n\t\tgetLogger().debug(`Using ${mainBranch} branch location: ${mainWorktreePath}`)\n\n\t\t// Step 4: Verify we're on the correct branch\n\t\tconst currentBranch = await executeGitCommand(['branch', '--show-current'], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\tif (currentBranch.trim() !== mainBranch) {\n\t\t\tthrow new Error(\n\t\t\t\t`Expected ${mainBranch} branch but found: ${currentBranch.trim()}\\n` +\n\t\t\t\t\t`At location: ${mainWorktreePath}\\n` +\n\t\t\t\t\t'This indicates the main worktree detection failed.'\n\t\t\t)\n\t\t}\n\n\t\t// Step 5: Validate fast-forward is possible\n\t\tawait this.validateFastForwardPossible(mainBranch, branchName, mainWorktreePath)\n\n\t\t// Step 6: Show commits to be merged\n\t\tconst commitsOutput = await executeGitCommand(['log', '--oneline', `${mainBranch}..${branchName}`], {\n\t\t\tcwd: mainWorktreePath,\n\t\t})\n\n\t\tconst commits = commitsOutput.trim()\n\n\t\t// If no commits, branch has no changes ahead of main\n\t\tif (!commits) {\n\t\t\tgetLogger().success(`Branch has no commits ahead of ${mainBranch}. No merge needed.`)\n\t\t\treturn\n\t\t}\n\n\t\t// Show commits that will be merged\n\t\tconst commitLines = commits.split('\\n')\n\t\tgetLogger().info(`Found ${commitLines.length} commit(s) to merge:`)\n\t\tcommitLines.forEach((commit) => getLogger().info(` ${commit}`))\n\n\t\t// Step 7: User confirmation (unless force mode or dry-run)\n\t\tif (!force && !dryRun) {\n\t\t\t// TODO: Implement interactive prompt for confirmation\n\t\t\t// For now, proceeding automatically (use --force to skip this message)\n\t\t\tgetLogger().info('Proceeding with fast-forward merge... (use --force to skip confirmations)')\n\t\t}\n\n\t\t// Step 8: Execute merge (unless dry-run)\n\t\tif (dryRun) {\n\t\t\tgetLogger().info(`[DRY RUN] Would execute: git merge --ff-only ${branchName}`)\n\t\t\tgetLogger().info(`[DRY RUN] This would merge ${commitLines.length} commit(s)`)\n\t\t\treturn\n\t\t}\n\n\t\t// Execute fast-forward merge\n\t\ttry {\n\t\t\tgetLogger().debug(`Executing fast-forward merge of ${branchName} into ${mainBranch} using cwd: ${mainWorktreePath}...`)\n\t\t\tawait executeGitCommand(['merge', '--ff-only', branchName], { cwd: mainWorktreePath })\n\t\t\tgetLogger().success(`Fast-forward merge completed! Merged ${commitLines.length} commit(s).`)\n\t\t} catch (error) {\n\t\t\tthrow new Error(\n\t\t\t\t`Fast-forward merge failed: ${error instanceof Error ? error.message : String(error)}\\n\\n` +\n\t\t\t\t\t'To recover:\\n' +\n\t\t\t\t\t' 1. Check merge status: git status\\n' +\n\t\t\t\t\t' 2. Abort merge if needed: git merge --abort\\n' +\n\t\t\t\t\t' 3. Verify branch is rebased: git rebase main\\n' +\n\t\t\t\t\t' 4. Try merge again: il finish'\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Helper: Detect conflicted files after failed rebase\n\t * @private\n\t */\n\tprivate async detectConflictedFiles(worktreePath: string): Promise<string[]> {\n\t\ttry {\n\t\t\tconst output = await executeGitCommand(['diff', '--name-only', '--diff-filter=U'], {\n\t\t\t\tcwd: worktreePath,\n\t\t\t})\n\n\t\t\treturn output\n\t\t\t\t.trim()\n\t\t\t\t.split('\\n')\n\t\t\t\t.filter((file) => file.length > 0)\n\t\t} catch {\n\t\t\t// If command fails, return empty array (might not be a conflict)\n\t\t\treturn []\n\t\t}\n\t}\n\n\t/**\n\t * Create a temporary WIP commit to preserve uncommitted changes during rebase\n\t * Stages all changes (tracked, untracked) using git add -A\n\t * Uses --no-verify to skip pre-commit hooks since this is a temporary internal commit\n\t * @param worktreePath - Path to the worktree\n\t * @returns The commit hash of the WIP commit\n\t * @private\n\t */\n\tprivate async createWipCommit(worktreePath: string): Promise<string> {\n\t\t// Stage all changes including untracked files\n\t\tawait executeGitCommand(['add', '-A'], { cwd: worktreePath })\n\n\t\t// Create WIP commit with distinctive message\n\t\t// Use --no-verify to skip pre-commit hooks - this is a temporary internal commit\n\t\tawait executeGitCommand(['commit', '--no-verify', '-m', 'WIP: Auto-stash for rebase'], { cwd: worktreePath })\n\n\t\t// Get and return the commit hash\n\t\tconst hash = await executeGitCommand(['rev-parse', 'HEAD'], { cwd: worktreePath })\n\t\treturn hash.trim()\n\t}\n\n\t/**\n\t * Restore uncommitted changes from WIP commit via soft reset\n\t * Logs warning but does not fail if soft reset fails (changes are safe in commit history)\n\t * @param worktreePath - Path to the worktree\n\t * @param wipCommitHash - Original WIP commit hash for verification logging\n\t * @private\n\t */\n\tprivate async restoreWipCommit(worktreePath: string, wipCommitHash: string): Promise<void> {\n\t\tgetLogger().info('Restoring uncommitted changes from WIP commit...')\n\n\t\ttry {\n\t\t\t// Soft reset to parent - changes become staged\n\t\t\tawait executeGitCommand(['reset', '--soft', 'HEAD~1'], { cwd: worktreePath })\n\n\t\t\t// Unstage files to restore to original working directory state\n\t\t\tawait executeGitCommand(['reset', 'HEAD'], { cwd: worktreePath })\n\n\t\t\tgetLogger().success('Restored uncommitted changes from WIP commit')\n\t\t} catch (error) {\n\t\t\t// Log warning but consider rebase successful - work is not lost\n\t\t\tgetLogger().warn(\n\t\t\t\t`Failed to restore WIP commit (${wipCommitHash}). ` +\n\t\t\t\t\t`Your changes are safe in the commit history. ` +\n\t\t\t\t\t`Manual recovery: git reset --soft HEAD~1`,\n\t\t\t\t{ error: error instanceof Error ? error.message : String(error) }\n\t\t\t)\n\t\t}\n\t}\n\n\t/**\n\t * Helper: Format conflict error message with manual resolution steps\n\t * @private\n\t */\n\tprivate formatConflictError(conflictedFiles: string[]): string {\n\t\tconst fileList = conflictedFiles.map((file) => ` • ${file}`).join('\\n')\n\n\t\treturn (\n\t\t\t'Rebase failed - merge conflicts detected in:\\n' +\n\t\t\tfileList +\n\t\t\t'\\n\\n' +\n\t\t\t'To resolve manually:\\n' +\n\t\t\t' 1. Fix conflicts in the files above\\n' +\n\t\t\t' 2. Stage resolved files: git add <files>\\n' +\n\t\t\t' 3. Continue rebase: git rebase --continue\\n' +\n\t\t\t' 4. Or abort rebase: git rebase --abort\\n' +\n\t\t\t' 5. Then re-run: il finish <issue-number>'\n\t\t)\n\t}\n\n\t/**\n\t * Attempt to resolve conflicts using Claude\n\t * Ports bash/merge-and-clean.sh lines 839-894\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param conflictedFiles - List of files with conflicts\n\t * @returns true if conflicts resolved, false otherwise\n\t * @private\n\t */\n\tprivate async attemptClaudeConflictResolution(\n\t\tworktreePath: string,\n\t\tconflictedFiles: string[]\n\t): Promise<boolean> {\n\t\t// Check if Claude CLI is available\n\t\tconst isClaudeAvailable = await detectClaudeCli()\n\t\tif (!isClaudeAvailable) {\n\t\t\tgetLogger().debug('Claude CLI not available, skipping conflict resolution')\n\t\t\treturn false\n\t\t}\n\n\t\tgetLogger().info(`Launching Claude to resolve conflicts in ${conflictedFiles.length} file(s)...`)\n\n\t\t// Hard-coded prompt matching bash script line 844\n\t\t// No templates, no complexity - just the essential instruction\n\t\tconst systemPrompt =\n\t\t\t`Please help resolve the git rebase conflicts in this repository. ` +\n\t\t\t`Analyze the conflicted files, understand the changes from both branches, ` +\n\t\t\t`fix the conflicts, then run 'git add .' to stage the resolved files, ` +\n\t\t\t`and finally run 'git rebase --continue' to continue the rebase process. ` +\n\t\t\t`Once the issue is resolved, tell the user they can use /exit to continue with the process.`\n\n\t\tconst prompt =\n\t\t\t`Help me with this rebase please.`\n\n\t\t// Git commands to auto-approve during rebase conflict resolution\n\t\t// These are the essential commands Claude needs to analyze and resolve conflicts\n\t\t// Note: git reset and git checkout are intentionally excluded as they can be destructive\n\t\tconst rebaseAllowedTools = [\n\t\t\t'Bash(git status:*)',\n\t\t\t'Bash(git diff:*)',\n\t\t\t'Bash(git log:*)',\n\t\t\t'Bash(git add:*)',\n\t\t\t'Bash(git rebase:*)',\n\t\t]\n\n\t\ttry {\n\t\t\t// Launch Claude interactively in current terminal\n\t\t\t// User will interact directly with Claude to resolve conflicts\n\t\t\tawait launchClaude(prompt, {\n\t\t\t\tappendSystemPrompt: systemPrompt,\n\t\t\t\taddDir: worktreePath,\n\t\t\t\theadless: false, // Interactive - runs in current terminal with stdio: inherit\n\t\t\t\tallowedTools: rebaseAllowedTools,\n\t\t\t})\n\n\t\t\t// After Claude interaction completes, check if conflicts resolved\n\t\t\tconst remainingConflicts = await this.detectConflictedFiles(worktreePath)\n\n\t\t\tif (remainingConflicts.length > 0) {\n\t\t\t\tgetLogger().warn(\n\t\t\t\t\t`Conflicts still exist in ${remainingConflicts.length} file(s) after Claude assistance`\n\t\t\t\t)\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\t// Check if rebase completed or still in progress\n\t\t\tconst rebaseInProgress = await this.isRebaseInProgress(worktreePath)\n\n\t\t\tif (rebaseInProgress) {\n\t\t\t\tgetLogger().warn('Rebase still in progress after Claude assistance')\n\t\t\t\treturn false\n\t\t\t}\n\n\t\t\treturn true\n\t\t} catch (error) {\n\t\t\tgetLogger().warn('Claude conflict resolution failed', {\n\t\t\t\terror: error instanceof Error ? error.message : String(error),\n\t\t\t})\n\t\t\treturn false\n\t\t}\n\t}\n\n\t/**\n\t * Check if a git rebase is currently in progress\n\t * Checks for .git/rebase-merge or .git/rebase-apply directories\n\t * Ports bash script logic from lines 853-856\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @returns true if rebase in progress, false otherwise\n\t * @private\n\t */\n\tprivate async isRebaseInProgress(worktreePath: string): Promise<boolean> {\n\t\tconst fs = await import('node:fs/promises')\n\t\tconst path = await import('node:path')\n\n\t\tconst rebaseMergePath = path.join(worktreePath, '.git', 'rebase-merge')\n\t\tconst rebaseApplyPath = path.join(worktreePath, '.git', 'rebase-apply')\n\n\t\t// Check for rebase-merge directory\n\t\ttry {\n\t\t\tawait fs.access(rebaseMergePath)\n\t\t\treturn true\n\t\t} catch {\n\t\t\t// Directory doesn't exist, continue checking\n\t\t}\n\n\t\t// Check for rebase-apply directory\n\t\ttry {\n\t\t\tawait fs.access(rebaseApplyPath)\n\t\t\treturn true\n\t\t} catch {\n\t\t\t// Directory doesn't exist\n\t\t}\n\n\t\treturn false\n\t}\n}\n","import { getLogger } from '../utils/logger-context.js'\nimport { detectPackageManager, runScript } from '../utils/package-manager.js'\nimport { getPackageConfig, hasScript } from '../utils/package-json.js'\nimport { ProjectCapabilityDetector } from './ProjectCapabilityDetector.js'\n\nexport interface BuildOptions {\n\tdryRun?: boolean\n}\n\nexport interface BuildResult {\n\tsuccess: boolean\n\tskipped: boolean\n\treason?: string\n\tduration: number\n}\n\n/**\n * BuildRunner handles post-merge build verification for CLI projects\n * Only runs build when project has CLI capabilities (bin field in package.json)\n */\nexport class BuildRunner {\n\tprivate capabilityDetector: ProjectCapabilityDetector\n\n\tconstructor(capabilityDetector?: ProjectCapabilityDetector) {\n\t\tthis.capabilityDetector = capabilityDetector ?? new ProjectCapabilityDetector()\n\t}\n\n\t/**\n\t * Run build verification in the specified directory\n\t * @param buildPath - Path where build should run (typically main worktree path)\n\t * @param options - Build options\n\t */\n\tasync runBuild(buildPath: string, options: BuildOptions = {}): Promise<BuildResult> {\n\t\tconst startTime = Date.now()\n\n\t\ttry {\n\t\t\t// Step 1: Check if build script exists (checks .iloom/package.iloom.json first, then package.json)\n\t\t\tconst pkgJson = await getPackageConfig(buildPath)\n\t\t\tconst hasBuildScript = hasScript(pkgJson, 'build')\n\n\t\t\tif (!hasBuildScript) {\n\t\t\tgetLogger().debug('Skipping build - no build script found')\n\t\t\t\treturn {\n\t\t\t\t\tsuccess: true,\n\t\t\t\t\tskipped: true,\n\t\t\t\t\treason: 'No build script found in package configuration',\n\t\t\t\t\tduration: Date.now() - startTime,\n\t\t\t\t}\n\t\t\t}\n\t\t} catch (error) {\n\t\t\t// Handle missing package.json - skip build for non-Node.js projects without package.iloom.json\n\t\t\tif (error instanceof Error && error.message.includes('package.json not found')) {\n\t\t\tgetLogger().debug('Skipping build - no package configuration found')\n\t\t\t\treturn {\n\t\t\t\t\tsuccess: true,\n\t\t\t\t\tskipped: true,\n\t\t\t\t\treason: 'No package configuration found in project',\n\t\t\t\t\tduration: Date.now() - startTime,\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Re-throw other errors\n\t\t\tthrow error\n\t\t}\n\n\t\t// Step 2: Check if project has CLI capability (bin field)\n\t\tconst capabilities = await this.capabilityDetector.detectCapabilities(buildPath)\n\t\tconst isCLIProject = capabilities.capabilities.includes('cli')\n\n\t\tif (!isCLIProject) {\n\t\tgetLogger().debug('Skipping build - not a CLI project (no bin field)')\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: true,\n\t\t\t\treason: 'Project is not a CLI project (no bin field in package.json)',\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t}\n\n\t\t// Step 3: Detect package manager\n\t\tconst packageManager = await detectPackageManager(buildPath)\n\n\t\t// Step 4: Handle dry-run mode\n\t\tif (options.dryRun) {\n\t\t\tconst command =\n\t\t\t\tpackageManager === 'npm' ? 'npm run build' : `${packageManager} build`\n\t\tgetLogger().info(`[DRY RUN] Would run: ${command}`)\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: false,\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t}\n\n\t\t// Step 5: Execute build\n\tgetLogger().info('Running build...')\n\n\t\ttry {\n\t\t\tawait runScript('build', buildPath, [], { quiet: true })\n\t\tgetLogger().success('Build completed successfully')\n\n\t\t\treturn {\n\t\t\t\tsuccess: true,\n\t\t\t\tskipped: false,\n\t\t\t\tduration: Date.now() - startTime,\n\t\t\t}\n\t\t} catch {\n\t\t\t// Step 6: Throw detailed error on failure\n\t\t\tconst runCommand =\n\t\t\t\tpackageManager === 'npm' ? 'npm run build' : `${packageManager} build`\n\n\t\t\tthrow new Error(\n\t\t\t\t`Error: Build failed.\\n` +\n\t\t\t\t\t`Fix build errors before proceeding.\\n\\n` +\n\t\t\t\t\t`Run '${runCommand}' to see detailed errors.`\n\t\t\t)\n\t\t}\n\t}\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAaO,IAAM,eAAN,MAAmB;AAAA,EAIzB,YAAY,iBAAmC,iBAAmC;AACjF,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,cAAc,cAAwC;AAEnE,WAAO,qBAAqB,gBAAgB,QAAQ,IAAI,GAAG;AAAA,MAC1D,iBAAiB,KAAK;AAAA,MACtB,iBAAiB,KAAK;AAAA,IACvB,CAAC;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,aAAa,cAAsB,UAAwB,CAAC,GAAkB;AACnF,UAAM,EAAE,SAAS,OAAO,QAAQ,MAAM,IAAI;AAC1C,UAAM,aAAa,MAAM,KAAK,cAAc,YAAY;AAExD,cAAU,EAAE,KAAK,sBAAsB,UAAU,YAAY;AAG7D,QAAI;AACH,YAAM,kBAAkB,CAAC,YAAY,YAAY,WAAW,cAAc,UAAU,EAAE,GAAG;AAAA,QACxF,KAAK;AAAA,MACN,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI;AAAA,QACT,gBAAgB,UAAU;AAAA,+BACO,UAAU;AAAA,MAC5C;AAAA,IACD;AAGA,UAAM,eAAe,MAAM,kBAAkB,CAAC,UAAU,aAAa,GAAG;AAAA,MACvE,KAAK;AAAA,IACN,CAAC;AAED,QAAI,gBAA+B;AACnC,QAAI,aAAa,KAAK,GAAG;AACxB,gBAAU,EAAE,KAAK,gEAAgE;AACjF,sBAAgB,MAAM,KAAK,gBAAgB,YAAY;AACvD,gBAAU,EAAE,MAAM,uBAAuB,aAAa,EAAE;AAAA,IACzD;AAGA,UAAM,YAAY,MAAM,kBAAkB,CAAC,cAAc,YAAY,MAAM,GAAG;AAAA,MAC7E,KAAK;AAAA,IACN,CAAC;AAED,UAAM,WAAW,MAAM,kBAAkB,CAAC,aAAa,UAAU,GAAG;AAAA,MACnE,KAAK;AAAA,IACN,CAAC;AAED,UAAM,mBAAmB,UAAU,KAAK;AACxC,UAAM,kBAAkB,SAAS,KAAK;AAGtC,QAAI,qBAAqB,iBAAiB;AACzC,gBAAU,EAAE,QAAQ,qCAAqC,UAAU,qBAAqB;AAExF,UAAI,eAAe;AAClB,cAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,MACxD;AACA;AAAA,IACD;AAGA,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,OAAO,aAAa,GAAG,UAAU,QAAQ,GAAG;AAAA,MAC1F,KAAK;AAAA,IACN,CAAC;AAED,UAAM,UAAU,cAAc,KAAK;AACnC,UAAM,cAAc,UAAU,QAAQ,MAAM,IAAI,IAAI,CAAC;AAErD,QAAI,SAAS;AAEZ,gBAAU,EAAE,KAAK,SAAS,YAAY,MAAM,uBAAuB;AACnE,kBAAY,QAAQ,CAAC,WAAW,UAAU,EAAE,KAAK,KAAK,MAAM,EAAE,CAAC;AAAA,IAChE,OAAO;AAEN,gBAAU,EAAE,KAAK,GAAG,UAAU,yDAAyD;AAAA,IACxF;AAGA,QAAI,CAAC,SAAS,CAAC,QAAQ;AAGtB,gBAAU,EAAE,KAAK,+DAA+D;AAAA,IACjF;AAGA,QAAI,QAAQ;AACX,gBAAU,EAAE,KAAK,uCAAuC,UAAU,EAAE;AACpE,UAAI,YAAY,SAAS,GAAG;AAC3B,kBAAU,EAAE,KAAK,+BAA+B,YAAY,MAAM,YAAY;AAAA,MAC/E;AACA;AAAA,IACD;AAKA,QAAI;AACH,YAAM,kBAAkB,CAAC,MAAM,4BAA4B,UAAU,UAAU,GAAG,EAAE,KAAK,aAAa,CAAC;AACvG,gBAAU,EAAE,QAAQ,gCAAgC;AAGpD,UAAI,eAAe;AAClB,cAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,MACxD;AAAA,IACD,SAAS,OAAO;AAEf,YAAM,kBAAkB,MAAM,KAAK,sBAAsB,YAAY;AAErE,UAAI,gBAAgB,SAAS,GAAG;AAE/B,kBAAU,EAAE,KAAK,oEAAoE;AAErF,cAAM,WAAW,MAAM,KAAK;AAAA,UAC3B;AAAA,UACA;AAAA,QACD;AAEA,YAAI,UAAU;AACb,oBAAU,EAAE,QAAQ,6DAA6D;AAGjF,cAAI,eAAe;AAClB,kBAAM,KAAK,iBAAiB,cAAc,aAAa;AAAA,UACxD;AACA;AAAA,QACD;AAGA,cAAM,gBAAgB,KAAK,oBAAoB,eAAe;AAC9D,cAAM,IAAI,MAAM,aAAa;AAAA,MAC9B;AAGA,YAAM,IAAI;AAAA,QACT,kBAAkB,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA;AAAA;AAAA,MAGzE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,4BAA4B,YAAoB,YAAoB,kBAAyC;AAGlH,UAAM,YAAY,MAAM,kBAAkB,CAAC,cAAc,YAAY,UAAU,GAAG;AAAA,MACjF,KAAK;AAAA,IACN,CAAC;AAGD,UAAM,WAAW,MAAM,kBAAkB,CAAC,aAAa,UAAU,GAAG;AAAA,MACnE,KAAK;AAAA,IACN,CAAC;AAGD,UAAM,mBAAmB,UAAU,KAAK;AACxC,UAAM,kBAAkB,SAAS,KAAK;AAEtC,QAAI,qBAAqB,iBAAiB;AACzC,YAAM,IAAI;AAAA,QACT;AAAA,MACQ,UAAU;AAAA,cACF,gBAAgB;AAAA,cAChB,eAAe;AAAA;AAAA;AAAA,4BAED,UAAU,gBAAgB,UAAU;AAAA;AAAA;AAAA,MAEnE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAM,wBACL,YACA,cACA,UAAwB,CAAC,GACT;AAChB,UAAM,EAAE,SAAS,OAAO,QAAQ,MAAM,IAAI;AAE1C,cAAU,EAAE,KAAK,gCAAgC;AAKjD,UAAM,aAAa,MAAM,KAAK,cAAc,YAAY;AAKxD,QAAI;AACJ,QAAI,QAAQ,UAAU;AACrB,yBAAmB,QAAQ;AAAA,IAC5B,OAAO;AACN,UAAI;AAEH,2BAAmB,MAAM,sBAAsB,YAAY,YAAY;AAAA,MACxE,QAAQ;AAGP,kBAAU,EAAE,MAAM,iCAAiC,UAAU,0CAA0C;AACvG,2BAAmB,MAAM,iCAAiC,cAAc,KAAK,eAAe;AAAA,MAC7F;AAAA,IACD;AAGA,cAAU,EAAE,MAAM,SAAS,UAAU,qBAAqB,gBAAgB,EAAE;AAG5E,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,UAAU,gBAAgB,GAAG;AAAA,MAC3E,KAAK;AAAA,IACN,CAAC;AAED,QAAI,cAAc,KAAK,MAAM,YAAY;AACxC,YAAM,IAAI;AAAA,QACT,YAAY,UAAU,sBAAsB,cAAc,KAAK,CAAC;AAAA,eAC/C,gBAAgB;AAAA;AAAA,MAElC;AAAA,IACD;AAGA,UAAM,KAAK,4BAA4B,YAAY,YAAY,gBAAgB;AAG/E,UAAM,gBAAgB,MAAM,kBAAkB,CAAC,OAAO,aAAa,GAAG,UAAU,KAAK,UAAU,EAAE,GAAG;AAAA,MACnG,KAAK;AAAA,IACN,CAAC;AAED,UAAM,UAAU,cAAc,KAAK;AAGnC,QAAI,CAAC,SAAS;AACb,gBAAU,EAAE,QAAQ,kCAAkC,UAAU,oBAAoB;AACpF;AAAA,IACD;AAGA,UAAM,cAAc,QAAQ,MAAM,IAAI;AACtC,cAAU,EAAE,KAAK,SAAS,YAAY,MAAM,sBAAsB;AAClE,gBAAY,QAAQ,CAAC,WAAW,UAAU,EAAE,KAAK,KAAK,MAAM,EAAE,CAAC;AAG/D,QAAI,CAAC,SAAS,CAAC,QAAQ;AAGtB,gBAAU,EAAE,KAAK,2EAA2E;AAAA,IAC7F;AAGA,QAAI,QAAQ;AACX,gBAAU,EAAE,KAAK,gDAAgD,UAAU,EAAE;AAC7E,gBAAU,EAAE,KAAK,8BAA8B,YAAY,MAAM,YAAY;AAC7E;AAAA,IACD;AAGA,QAAI;AACH,gBAAU,EAAE,MAAM,mCAAmC,UAAU,SAAS,UAAU,eAAe,gBAAgB,KAAK;AACtH,YAAM,kBAAkB,CAAC,SAAS,aAAa,UAAU,GAAG,EAAE,KAAK,iBAAiB,CAAC;AACrF,gBAAU,EAAE,QAAQ,wCAAwC,YAAY,MAAM,aAAa;AAAA,IAC5F,SAAS,OAAO;AACf,YAAM,IAAI;AAAA,QACT,8BAA8B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAMrF;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAc,sBAAsB,cAAyC;AAC5E,QAAI;AACH,YAAM,SAAS,MAAM,kBAAkB,CAAC,QAAQ,eAAe,iBAAiB,GAAG;AAAA,QAClF,KAAK;AAAA,MACN,CAAC;AAED,aAAO,OACL,KAAK,EACL,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC;AAAA,IACnC,QAAQ;AAEP,aAAO,CAAC;AAAA,IACT;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAc,gBAAgB,cAAuC;AAEpE,UAAM,kBAAkB,CAAC,OAAO,IAAI,GAAG,EAAE,KAAK,aAAa,CAAC;AAI5D,UAAM,kBAAkB,CAAC,UAAU,eAAe,MAAM,4BAA4B,GAAG,EAAE,KAAK,aAAa,CAAC;AAG5G,UAAM,OAAO,MAAM,kBAAkB,CAAC,aAAa,MAAM,GAAG,EAAE,KAAK,aAAa,CAAC;AACjF,WAAO,KAAK,KAAK;AAAA,EAClB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAc,iBAAiB,cAAsB,eAAsC;AAC1F,cAAU,EAAE,KAAK,kDAAkD;AAEnE,QAAI;AAEH,YAAM,kBAAkB,CAAC,SAAS,UAAU,QAAQ,GAAG,EAAE,KAAK,aAAa,CAAC;AAG5E,YAAM,kBAAkB,CAAC,SAAS,MAAM,GAAG,EAAE,KAAK,aAAa,CAAC;AAEhE,gBAAU,EAAE,QAAQ,8CAA8C;AAAA,IACnE,SAAS,OAAO;AAEf,gBAAU,EAAE;AAAA,QACX,iCAAiC,aAAa;AAAA,QAG9C,EAAE,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK,EAAE;AAAA,MACjE;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAMQ,oBAAoB,iBAAmC;AAC9D,UAAM,WAAW,gBAAgB,IAAI,CAAC,SAAS,YAAO,IAAI,EAAE,EAAE,KAAK,IAAI;AAEvE,WACC,mDACA,WACA;AAAA,EAQF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,gCACb,cACA,iBACmB;AAEnB,UAAM,oBAAoB,MAAM,gBAAgB;AAChD,QAAI,CAAC,mBAAmB;AACvB,gBAAU,EAAE,MAAM,wDAAwD;AAC1E,aAAO;AAAA,IACR;AAEA,cAAU,EAAE,KAAK,4CAA4C,gBAAgB,MAAM,aAAa;AAIhG,UAAM,eACL;AAMD,UAAM,SACL;AAKD,UAAM,qBAAqB;AAAA,MAC1B;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAEA,QAAI;AAGH,YAAM,aAAa,QAAQ;AAAA,QAC1B,oBAAoB;AAAA,QACpB,QAAQ;AAAA,QACR,UAAU;AAAA;AAAA,QACV,cAAc;AAAA,MACf,CAAC;AAGD,YAAM,qBAAqB,MAAM,KAAK,sBAAsB,YAAY;AAExE,UAAI,mBAAmB,SAAS,GAAG;AAClC,kBAAU,EAAE;AAAA,UACX,4BAA4B,mBAAmB,MAAM;AAAA,QACtD;AACA,eAAO;AAAA,MACR;AAGA,YAAM,mBAAmB,MAAM,KAAK,mBAAmB,YAAY;AAEnE,UAAI,kBAAkB;AACrB,kBAAU,EAAE,KAAK,kDAAkD;AACnE,eAAO;AAAA,MACR;AAEA,aAAO;AAAA,IACR,SAAS,OAAO;AACf,gBAAU,EAAE,KAAK,qCAAqC;AAAA,QACrD,OAAO,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAAA,MAC7D,CAAC;AACD,aAAO;AAAA,IACR;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,mBAAmB,cAAwC;AACxE,UAAM,KAAK,MAAM,OAAO,aAAkB;AAC1C,UAAM,OAAO,MAAM,OAAO,MAAW;AAErC,UAAM,kBAAkB,KAAK,KAAK,cAAc,QAAQ,cAAc;AACtE,UAAM,kBAAkB,KAAK,KAAK,cAAc,QAAQ,cAAc;AAGtE,QAAI;AACH,YAAM,GAAG,OAAO,eAAe;AAC/B,aAAO;AAAA,IACR,QAAQ;AAAA,IAER;AAGA,QAAI;AACH,YAAM,GAAG,OAAO,eAAe;AAC/B,aAAO;AAAA,IACR,QAAQ;AAAA,IAER;AAEA,WAAO;AAAA,EACR;AACD;;;AC7fO,IAAM,cAAN,MAAkB;AAAA,EAGxB,YAAY,oBAAgD;AAC3D,SAAK,qBAAqB,sBAAsB,IAAI,0BAA0B;AAAA,EAC/E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,MAAM,SAAS,WAAmB,UAAwB,CAAC,GAAyB;AACnF,UAAM,YAAY,KAAK,IAAI;AAE3B,QAAI;AAEH,YAAM,UAAU,MAAM,iBAAiB,SAAS;AAChD,YAAM,iBAAiB,UAAU,SAAS,OAAO;AAEjD,UAAI,CAAC,gBAAgB;AACrB,kBAAU,EAAE,MAAM,wCAAwC;AACzD,eAAO;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,MACD;AAAA,IACD,SAAS,OAAO;AAEf,UAAI,iBAAiB,SAAS,MAAM,QAAQ,SAAS,wBAAwB,GAAG;AAChF,kBAAU,EAAE,MAAM,iDAAiD;AAClE,eAAO;AAAA,UACN,SAAS;AAAA,UACT,SAAS;AAAA,UACT,QAAQ;AAAA,UACR,UAAU,KAAK,IAAI,IAAI;AAAA,QACxB;AAAA,MACD;AAEA,YAAM;AAAA,IACP;AAGA,UAAM,eAAe,MAAM,KAAK,mBAAmB,mBAAmB,SAAS;AAC/E,UAAM,eAAe,aAAa,aAAa,SAAS,KAAK;AAE7D,QAAI,CAAC,cAAc;AACnB,gBAAU,EAAE,MAAM,mDAAmD;AACpE,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,QAAQ;AAAA,QACR,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD;AAGA,UAAM,iBAAiB,MAAM,qBAAqB,SAAS;AAG3D,QAAI,QAAQ,QAAQ;AACnB,YAAM,UACL,mBAAmB,QAAQ,kBAAkB,GAAG,cAAc;AAChE,gBAAU,EAAE,KAAK,wBAAwB,OAAO,EAAE;AACjD,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD;AAGD,cAAU,EAAE,KAAK,kBAAkB;AAElC,QAAI;AACH,YAAM,UAAU,SAAS,WAAW,CAAC,GAAG,EAAE,OAAO,KAAK,CAAC;AACxD,gBAAU,EAAE,QAAQ,8BAA8B;AAEjD,aAAO;AAAA,QACN,SAAS;AAAA,QACT,SAAS;AAAA,QACT,UAAU,KAAK,IAAI,IAAI;AAAA,MACxB;AAAA,IACD,QAAQ;AAEP,YAAM,aACL,mBAAmB,QAAQ,kBAAkB,GAAG,cAAc;AAE/D,YAAM,IAAI;AAAA,QACT;AAAA;AAAA;AAAA,OAES,UAAU;AAAA,MACpB;AAAA,IACD;AAAA,EACD;AACD;","names":[]}
@@ -1,7 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import {
3
3
  PromptTemplateManager
4
- } from "./chunk-W6WVRHJ6.js";
4
+ } from "./chunk-TIYJEEVO.js";
5
5
  import {
6
6
  logger
7
7
  } from "./chunk-VT4PDUYT.js";
@@ -263,4 +263,4 @@ var AgentManager = class {
263
263
  export {
264
264
  AgentManager
265
265
  };
266
- //# sourceMappingURL=chunk-SN3Z6EZO.js.map
266
+ //# sourceMappingURL=chunk-N7FVXZNI.js.map