@iloom/cli 0.6.1 → 0.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +35 -18
- package/dist/{BranchNamingService-B5PVRR7F.js → BranchNamingService-FLPUUFOB.js} +2 -2
- package/dist/ClaudeContextManager-KE5TBZVZ.js +14 -0
- package/dist/ClaudeService-CRSETT3A.js +13 -0
- package/dist/{GitHubService-S2OGUTDR.js → GitHubService-O7U4UQ7N.js} +3 -3
- package/dist/{LoomLauncher-5LFM4LXB.js → LoomLauncher-NL65LSKP.js} +6 -6
- package/dist/{MetadataManager-DFI73J3G.js → MetadataManager-XJ2YB762.js} +2 -2
- package/dist/PRManager-2ABCWXHW.js +16 -0
- package/dist/{ProjectCapabilityDetector-S5FLNCFI.js → ProjectCapabilityDetector-IA56AUE6.js} +3 -3
- package/dist/{PromptTemplateManager-C3DK6XZL.js → PromptTemplateManager-7L3HJQQU.js} +2 -2
- package/dist/README.md +35 -18
- package/dist/{SettingsManager-35F5RUJH.js → SettingsManager-YU4VYPTW.js} +2 -2
- package/dist/agents/iloom-framework-detector.md +78 -9
- package/dist/agents/iloom-issue-analyze-and-plan.md +42 -17
- package/dist/agents/iloom-issue-analyzer.md +14 -14
- package/dist/agents/iloom-issue-complexity-evaluator.md +38 -15
- package/dist/agents/iloom-issue-enhancer.md +15 -15
- package/dist/agents/iloom-issue-implementer.md +44 -15
- package/dist/agents/iloom-issue-planner.md +121 -17
- package/dist/agents/iloom-issue-reviewer.md +15 -15
- package/dist/{build-FJVYP7EV.js → build-HQ5HGA3T.js} +9 -9
- package/dist/{chunk-VU3QMIP2.js → chunk-453NC377.js} +91 -15
- package/dist/chunk-453NC377.js.map +1 -0
- package/dist/{chunk-UQIXZ3BA.js → chunk-5V74K5ZA.js} +2 -2
- package/dist/{chunk-7WANFUIK.js → chunk-6TL3BYH6.js} +2 -2
- package/dist/{chunk-ZPSTA5PR.js → chunk-7GLZVDPQ.js} +2 -2
- package/dist/{chunk-64O2UIWO.js → chunk-AFRICMSW.js} +4 -4
- package/dist/{chunk-5TXLVEXT.js → chunk-C3AKFAIR.js} +2 -2
- package/dist/{chunk-K7SEEHKO.js → chunk-CNSTXBJ3.js} +7 -419
- package/dist/chunk-CNSTXBJ3.js.map +1 -0
- package/dist/{chunk-2A7WQKBE.js → chunk-DAOS6EC3.js} +96 -6
- package/dist/chunk-DAOS6EC3.js.map +1 -0
- package/dist/{chunk-TRQ76ISK.js → chunk-ELJKYFSH.js} +9 -9
- package/dist/{chunk-VDA5JMB4.js → chunk-EPPPDVHD.js} +21 -8
- package/dist/chunk-EPPPDVHD.js.map +1 -0
- package/dist/{chunk-LVBRMTE6.js → chunk-FEAJR6PN.js} +6 -6
- package/dist/{chunk-6YSFTPKW.js → chunk-FM4KBPVA.js} +18 -13
- package/dist/chunk-FM4KBPVA.js.map +1 -0
- package/dist/{chunk-AEIMYF4P.js → chunk-FP7G7DG3.js} +6 -2
- package/dist/chunk-FP7G7DG3.js.map +1 -0
- package/dist/{chunk-LT3SGBR7.js → chunk-GCPAZSGV.js} +36 -2
- package/dist/{chunk-LT3SGBR7.js.map → chunk-GCPAZSGV.js.map} +1 -1
- package/dist/chunk-GJMEKEI5.js +517 -0
- package/dist/chunk-GJMEKEI5.js.map +1 -0
- package/dist/{chunk-7Q66W4OH.js → chunk-HBJITKSZ.js} +37 -1
- package/dist/chunk-HBJITKSZ.js.map +1 -0
- package/dist/{chunk-7HIRPCKU.js → chunk-HVQNVRAF.js} +2 -2
- package/dist/{chunk-6U6VI4SZ.js → chunk-KVS4XGBQ.js} +4 -4
- package/dist/{chunk-SN3Z6EZO.js → chunk-N7FVXZNI.js} +2 -2
- package/dist/{chunk-I75JMBNB.js → chunk-QQFBMCAH.js} +54 -43
- package/dist/chunk-QQFBMCAH.js.map +1 -0
- package/dist/{chunk-AXX3QIKK.js → chunk-RD7I2Q2F.js} +2 -2
- package/dist/chunk-TIYJEEVO.js +79 -0
- package/dist/chunk-TIYJEEVO.js.map +1 -0
- package/dist/{chunk-EK3XCAAS.js → chunk-UDRZY65Y.js} +2 -2
- package/dist/{chunk-3PT7RKL5.js → chunk-USJSNHGG.js} +2 -2
- package/dist/{chunk-CFUWQHCJ.js → chunk-VWGKGNJP.js} +114 -35
- package/dist/chunk-VWGKGNJP.js.map +1 -0
- package/dist/{chunk-F6WVM437.js → chunk-WFQ5CLTR.js} +6 -3
- package/dist/chunk-WFQ5CLTR.js.map +1 -0
- package/dist/{chunk-BXCPJJYM.js → chunk-XPKN3QWY.js} +24 -6
- package/dist/chunk-XPKN3QWY.js.map +1 -0
- package/dist/chunk-YQNSZKKT.js +822 -0
- package/dist/chunk-YQNSZKKT.js.map +1 -0
- package/dist/{chunk-GEXP5IOF.js → chunk-ZA575VLF.js} +21 -8
- package/dist/chunk-ZA575VLF.js.map +1 -0
- package/dist/{claude-H33OQMXO.js → claude-6H36IBHO.js} +4 -2
- package/dist/{cleanup-BRUAINKE.js → cleanup-77U5ATYI.js} +20 -16
- package/dist/cleanup-77U5ATYI.js.map +1 -0
- package/dist/cli.js +361 -954
- package/dist/cli.js.map +1 -1
- package/dist/commit-ONRXU67O.js +237 -0
- package/dist/commit-ONRXU67O.js.map +1 -0
- package/dist/{compile-ULNO5F7Q.js → compile-CT7IR7O2.js} +9 -9
- package/dist/{contribute-Q6GX6AXK.js → contribute-GXKOIA42.js} +5 -5
- package/dist/{dev-server-4RCDJ5MU.js → dev-server-UKAPBGUR.js} +22 -74
- package/dist/dev-server-UKAPBGUR.js.map +1 -0
- package/dist/{feedback-O4Q55SVS.js → feedback-K3A4QUSG.js} +10 -10
- package/dist/{git-FVMGBHC2.js → git-ENLT2VNI.js} +6 -4
- package/dist/hooks/iloom-hook.js +30 -2
- package/dist/{ignite-VHV65WEZ.js → ignite-YUAOJ5PP.js} +20 -20
- package/dist/ignite-YUAOJ5PP.js.map +1 -0
- package/dist/index.d.ts +71 -27
- package/dist/index.js +196 -266
- package/dist/index.js.map +1 -1
- package/dist/init-XQQMFDM6.js +21 -0
- package/dist/{lint-5JMCWE4Y.js → lint-HAVU4U34.js} +9 -9
- package/dist/mcp/issue-management-server.js +359 -13
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/mcp/recap-server.js +13 -4
- package/dist/mcp/recap-server.js.map +1 -1
- package/dist/{open-WHVUYGPY.js → open-QI63XQ4F.js} +25 -76
- package/dist/open-QI63XQ4F.js.map +1 -0
- package/dist/{projects-SA76I4TZ.js → projects-TWY4RT2Z.js} +11 -4
- package/dist/projects-TWY4RT2Z.js.map +1 -0
- package/dist/prompts/init-prompt.txt +119 -51
- package/dist/prompts/issue-prompt.txt +132 -63
- package/dist/prompts/pr-prompt.txt +3 -3
- package/dist/prompts/regular-prompt.txt +16 -18
- package/dist/prompts/session-summary-prompt.txt +13 -13
- package/dist/{rebase-Y4AS6LQW.js → rebase-QYCRF7JG.js} +53 -8
- package/dist/rebase-QYCRF7JG.js.map +1 -0
- package/dist/{recap-VOOUXOGP.js → recap-ZKGHZCX6.js} +6 -6
- package/dist/{run-NCRK5NPR.js → run-YDVYORT2.js} +25 -76
- package/dist/run-YDVYORT2.js.map +1 -0
- package/dist/schema/settings.schema.json +14 -3
- package/dist/{shell-SBLXVOVJ.js → shell-2NNSIU34.js} +6 -6
- package/dist/{summary-CVFAMDOJ.js → summary-G6L3VAKK.js} +11 -10
- package/dist/{summary-CVFAMDOJ.js.map → summary-G6L3VAKK.js.map} +1 -1
- package/dist/{test-3KIVXI6J.js → test-75WAA6DU.js} +9 -9
- package/dist/{test-git-ZB6AGGRW.js → test-git-E2BLXR6M.js} +4 -4
- package/dist/{test-prefix-FBGXKMPA.js → test-prefix-A7JGGYAA.js} +4 -4
- package/dist/{test-webserver-YVQD42W6.js → test-webserver-NRMGT2HB.js} +29 -8
- package/dist/test-webserver-NRMGT2HB.js.map +1 -0
- package/package.json +3 -1
- package/dist/ClaudeContextManager-6J2EB4QU.js +0 -14
- package/dist/ClaudeService-O2PB22GX.js +0 -13
- package/dist/PRManager-GB3FOJ2W.js +0 -14
- package/dist/chunk-2A7WQKBE.js.map +0 -1
- package/dist/chunk-6YSFTPKW.js.map +0 -1
- package/dist/chunk-7Q66W4OH.js.map +0 -1
- package/dist/chunk-AEIMYF4P.js.map +0 -1
- package/dist/chunk-BXCPJJYM.js.map +0 -1
- package/dist/chunk-CFUWQHCJ.js.map +0 -1
- package/dist/chunk-F6WVM437.js.map +0 -1
- package/dist/chunk-GEXP5IOF.js.map +0 -1
- package/dist/chunk-I75JMBNB.js.map +0 -1
- package/dist/chunk-K7SEEHKO.js.map +0 -1
- package/dist/chunk-VDA5JMB4.js.map +0 -1
- package/dist/chunk-VU3QMIP2.js.map +0 -1
- package/dist/chunk-W6WVRHJ6.js +0 -251
- package/dist/chunk-W6WVRHJ6.js.map +0 -1
- package/dist/cleanup-BRUAINKE.js.map +0 -1
- package/dist/dev-server-4RCDJ5MU.js.map +0 -1
- package/dist/ignite-VHV65WEZ.js.map +0 -1
- package/dist/init-UTYRHNJJ.js +0 -21
- package/dist/open-WHVUYGPY.js.map +0 -1
- package/dist/projects-SA76I4TZ.js.map +0 -1
- package/dist/rebase-Y4AS6LQW.js.map +0 -1
- package/dist/run-NCRK5NPR.js.map +0 -1
- package/dist/test-webserver-YVQD42W6.js.map +0 -1
- /package/dist/{BranchNamingService-B5PVRR7F.js.map → BranchNamingService-FLPUUFOB.js.map} +0 -0
- /package/dist/{ClaudeContextManager-6J2EB4QU.js.map → ClaudeContextManager-KE5TBZVZ.js.map} +0 -0
- /package/dist/{ClaudeService-O2PB22GX.js.map → ClaudeService-CRSETT3A.js.map} +0 -0
- /package/dist/{GitHubService-S2OGUTDR.js.map → GitHubService-O7U4UQ7N.js.map} +0 -0
- /package/dist/{LoomLauncher-5LFM4LXB.js.map → LoomLauncher-NL65LSKP.js.map} +0 -0
- /package/dist/{MetadataManager-DFI73J3G.js.map → MetadataManager-XJ2YB762.js.map} +0 -0
- /package/dist/{PRManager-GB3FOJ2W.js.map → PRManager-2ABCWXHW.js.map} +0 -0
- /package/dist/{ProjectCapabilityDetector-S5FLNCFI.js.map → ProjectCapabilityDetector-IA56AUE6.js.map} +0 -0
- /package/dist/{PromptTemplateManager-C3DK6XZL.js.map → PromptTemplateManager-7L3HJQQU.js.map} +0 -0
- /package/dist/{SettingsManager-35F5RUJH.js.map → SettingsManager-YU4VYPTW.js.map} +0 -0
- /package/dist/{build-FJVYP7EV.js.map → build-HQ5HGA3T.js.map} +0 -0
- /package/dist/{chunk-UQIXZ3BA.js.map → chunk-5V74K5ZA.js.map} +0 -0
- /package/dist/{chunk-7WANFUIK.js.map → chunk-6TL3BYH6.js.map} +0 -0
- /package/dist/{chunk-ZPSTA5PR.js.map → chunk-7GLZVDPQ.js.map} +0 -0
- /package/dist/{chunk-64O2UIWO.js.map → chunk-AFRICMSW.js.map} +0 -0
- /package/dist/{chunk-5TXLVEXT.js.map → chunk-C3AKFAIR.js.map} +0 -0
- /package/dist/{chunk-TRQ76ISK.js.map → chunk-ELJKYFSH.js.map} +0 -0
- /package/dist/{chunk-LVBRMTE6.js.map → chunk-FEAJR6PN.js.map} +0 -0
- /package/dist/{chunk-7HIRPCKU.js.map → chunk-HVQNVRAF.js.map} +0 -0
- /package/dist/{chunk-6U6VI4SZ.js.map → chunk-KVS4XGBQ.js.map} +0 -0
- /package/dist/{chunk-SN3Z6EZO.js.map → chunk-N7FVXZNI.js.map} +0 -0
- /package/dist/{chunk-AXX3QIKK.js.map → chunk-RD7I2Q2F.js.map} +0 -0
- /package/dist/{chunk-EK3XCAAS.js.map → chunk-UDRZY65Y.js.map} +0 -0
- /package/dist/{chunk-3PT7RKL5.js.map → chunk-USJSNHGG.js.map} +0 -0
- /package/dist/{claude-H33OQMXO.js.map → claude-6H36IBHO.js.map} +0 -0
- /package/dist/{compile-ULNO5F7Q.js.map → compile-CT7IR7O2.js.map} +0 -0
- /package/dist/{contribute-Q6GX6AXK.js.map → contribute-GXKOIA42.js.map} +0 -0
- /package/dist/{feedback-O4Q55SVS.js.map → feedback-K3A4QUSG.js.map} +0 -0
- /package/dist/{git-FVMGBHC2.js.map → git-ENLT2VNI.js.map} +0 -0
- /package/dist/{init-UTYRHNJJ.js.map → init-XQQMFDM6.js.map} +0 -0
- /package/dist/{lint-5JMCWE4Y.js.map → lint-HAVU4U34.js.map} +0 -0
- /package/dist/{recap-VOOUXOGP.js.map → recap-ZKGHZCX6.js.map} +0 -0
- /package/dist/{shell-SBLXVOVJ.js.map → shell-2NNSIU34.js.map} +0 -0
- /package/dist/{test-3KIVXI6J.js.map → test-75WAA6DU.js.map} +0 -0
- /package/dist/{test-git-ZB6AGGRW.js.map → test-git-E2BLXR6M.js.map} +0 -0
- /package/dist/{test-prefix-FBGXKMPA.js.map → test-prefix-A7JGGYAA.js.map} +0 -0
|
@@ -3,34 +3,24 @@ import {
|
|
|
3
3
|
formatRecapMarkdown
|
|
4
4
|
} from "./chunk-NXMDEL3F.js";
|
|
5
5
|
import {
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
fetchLinearIssueComments,
|
|
9
|
-
getLinearComment,
|
|
10
|
-
updateLinearComment
|
|
11
|
-
} from "./chunk-7Q66W4OH.js";
|
|
6
|
+
IssueManagementProviderFactory
|
|
7
|
+
} from "./chunk-GJMEKEI5.js";
|
|
12
8
|
import {
|
|
13
9
|
hasMultipleRemotes
|
|
14
10
|
} from "./chunk-FXDYIV3K.js";
|
|
15
11
|
import {
|
|
16
12
|
PromptTemplateManager
|
|
17
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-TIYJEEVO.js";
|
|
18
14
|
import {
|
|
19
15
|
SettingsManager
|
|
20
|
-
} from "./chunk-
|
|
16
|
+
} from "./chunk-WFQ5CLTR.js";
|
|
21
17
|
import {
|
|
22
18
|
MetadataManager
|
|
23
|
-
} from "./chunk-
|
|
24
|
-
import {
|
|
25
|
-
createIssueComment,
|
|
26
|
-
createPRComment,
|
|
27
|
-
executeGhCommand,
|
|
28
|
-
updateIssueComment
|
|
29
|
-
} from "./chunk-LT3SGBR7.js";
|
|
19
|
+
} from "./chunk-VWGKGNJP.js";
|
|
30
20
|
import {
|
|
31
21
|
generateDeterministicSessionId,
|
|
32
22
|
launchClaude
|
|
33
|
-
} from "./chunk-
|
|
23
|
+
} from "./chunk-FP7G7DG3.js";
|
|
34
24
|
import {
|
|
35
25
|
logger
|
|
36
26
|
} from "./chunk-VT4PDUYT.js";
|
|
@@ -113,408 +103,6 @@ async function readSessionContext(worktreePath, sessionId, maxSummaries = 3) {
|
|
|
113
103
|
return formattedSummaries;
|
|
114
104
|
}
|
|
115
105
|
|
|
116
|
-
// src/mcp/GitHubIssueManagementProvider.ts
|
|
117
|
-
function normalizeAuthor(author) {
|
|
118
|
-
if (!author) return null;
|
|
119
|
-
return {
|
|
120
|
-
id: author.id ? String(author.id) : author.login,
|
|
121
|
-
displayName: author.login,
|
|
122
|
-
// GitHub uses login as primary identifier
|
|
123
|
-
login: author.login,
|
|
124
|
-
// Preserve original GitHub field
|
|
125
|
-
...author.avatarUrl && { avatarUrl: author.avatarUrl },
|
|
126
|
-
...author.url && { url: author.url }
|
|
127
|
-
};
|
|
128
|
-
}
|
|
129
|
-
function extractNumericIdFromUrl(url) {
|
|
130
|
-
const match = url.match(/#issuecomment-(\d+)$/);
|
|
131
|
-
if (!(match == null ? void 0 : match[1])) {
|
|
132
|
-
throw new Error(`Cannot extract comment ID from URL: ${url}`);
|
|
133
|
-
}
|
|
134
|
-
return match[1];
|
|
135
|
-
}
|
|
136
|
-
var GitHubIssueManagementProvider = class {
|
|
137
|
-
constructor() {
|
|
138
|
-
this.providerName = "github";
|
|
139
|
-
}
|
|
140
|
-
/**
|
|
141
|
-
* Fetch issue details using gh CLI
|
|
142
|
-
* Normalizes GitHub-specific fields to provider-agnostic format
|
|
143
|
-
*/
|
|
144
|
-
async getIssue(input) {
|
|
145
|
-
const { number, includeComments = true } = input;
|
|
146
|
-
const issueNumber = parseInt(number, 10);
|
|
147
|
-
if (isNaN(issueNumber)) {
|
|
148
|
-
throw new Error(`Invalid GitHub issue number: ${number}. GitHub issue IDs must be numeric.`);
|
|
149
|
-
}
|
|
150
|
-
const fields = includeComments ? "body,title,comments,labels,assignees,milestone,author,state,number,url" : "body,title,labels,assignees,milestone,author,state,number,url";
|
|
151
|
-
const raw = await executeGhCommand([
|
|
152
|
-
"issue",
|
|
153
|
-
"view",
|
|
154
|
-
String(issueNumber),
|
|
155
|
-
"--json",
|
|
156
|
-
fields
|
|
157
|
-
]);
|
|
158
|
-
const result = {
|
|
159
|
-
// Core fields
|
|
160
|
-
id: String(raw.number),
|
|
161
|
-
title: raw.title,
|
|
162
|
-
body: raw.body,
|
|
163
|
-
state: raw.state,
|
|
164
|
-
url: raw.url,
|
|
165
|
-
provider: "github",
|
|
166
|
-
// Normalized author
|
|
167
|
-
author: normalizeAuthor(raw.author),
|
|
168
|
-
// Optional flexible fields
|
|
169
|
-
...raw.assignees && {
|
|
170
|
-
assignees: raw.assignees.map((a) => normalizeAuthor(a)).filter((a) => a !== null)
|
|
171
|
-
},
|
|
172
|
-
...raw.labels && {
|
|
173
|
-
labels: raw.labels
|
|
174
|
-
},
|
|
175
|
-
// GitHub-specific passthrough fields
|
|
176
|
-
...raw.milestone && {
|
|
177
|
-
milestone: raw.milestone
|
|
178
|
-
}
|
|
179
|
-
};
|
|
180
|
-
if (raw.comments !== void 0) {
|
|
181
|
-
result.comments = raw.comments.map((comment) => ({
|
|
182
|
-
id: extractNumericIdFromUrl(comment.url),
|
|
183
|
-
body: comment.body,
|
|
184
|
-
createdAt: comment.createdAt,
|
|
185
|
-
author: normalizeAuthor(comment.author),
|
|
186
|
-
...comment.updatedAt && { updatedAt: comment.updatedAt }
|
|
187
|
-
}));
|
|
188
|
-
}
|
|
189
|
-
return result;
|
|
190
|
-
}
|
|
191
|
-
/**
|
|
192
|
-
* Fetch a specific comment by ID using gh API
|
|
193
|
-
* Normalizes author to FlexibleAuthor format
|
|
194
|
-
*/
|
|
195
|
-
async getComment(input) {
|
|
196
|
-
const { commentId } = input;
|
|
197
|
-
const numericCommentId = parseInt(commentId, 10);
|
|
198
|
-
if (isNaN(numericCommentId)) {
|
|
199
|
-
throw new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`);
|
|
200
|
-
}
|
|
201
|
-
const raw = await executeGhCommand([
|
|
202
|
-
"api",
|
|
203
|
-
`repos/:owner/:repo/issues/comments/${numericCommentId}`,
|
|
204
|
-
"--jq",
|
|
205
|
-
"{id: .id, body: .body, user: .user, created_at: .created_at, updated_at: .updated_at, html_url: .html_url, reactions: .reactions}"
|
|
206
|
-
]);
|
|
207
|
-
return {
|
|
208
|
-
id: String(raw.id),
|
|
209
|
-
body: raw.body,
|
|
210
|
-
author: normalizeAuthor(raw.user),
|
|
211
|
-
created_at: raw.created_at,
|
|
212
|
-
...raw.updated_at && { updated_at: raw.updated_at },
|
|
213
|
-
// Passthrough GitHub-specific fields
|
|
214
|
-
...raw.html_url && { html_url: raw.html_url },
|
|
215
|
-
...raw.reactions && { reactions: raw.reactions }
|
|
216
|
-
};
|
|
217
|
-
}
|
|
218
|
-
/**
|
|
219
|
-
* Create a new comment on an issue or PR
|
|
220
|
-
*/
|
|
221
|
-
async createComment(input) {
|
|
222
|
-
const { number, body, type } = input;
|
|
223
|
-
const numericId = parseInt(number, 10);
|
|
224
|
-
if (isNaN(numericId)) {
|
|
225
|
-
throw new Error(`Invalid GitHub ${type} number: ${number}. GitHub IDs must be numeric.`);
|
|
226
|
-
}
|
|
227
|
-
const result = type === "issue" ? await createIssueComment(numericId, body) : await createPRComment(numericId, body);
|
|
228
|
-
return {
|
|
229
|
-
...result,
|
|
230
|
-
id: String(result.id)
|
|
231
|
-
};
|
|
232
|
-
}
|
|
233
|
-
/**
|
|
234
|
-
* Update an existing comment
|
|
235
|
-
*/
|
|
236
|
-
async updateComment(input) {
|
|
237
|
-
const { commentId, body } = input;
|
|
238
|
-
const numericCommentId = parseInt(commentId, 10);
|
|
239
|
-
if (isNaN(numericCommentId)) {
|
|
240
|
-
throw new Error(`Invalid GitHub comment ID: ${commentId}. GitHub comment IDs must be numeric.`);
|
|
241
|
-
}
|
|
242
|
-
const result = await updateIssueComment(numericCommentId, body);
|
|
243
|
-
return {
|
|
244
|
-
...result,
|
|
245
|
-
id: String(result.id)
|
|
246
|
-
};
|
|
247
|
-
}
|
|
248
|
-
};
|
|
249
|
-
|
|
250
|
-
// src/utils/linear-markup-converter.ts
|
|
251
|
-
import { appendFileSync } from "fs";
|
|
252
|
-
import { join as join2, dirname, basename, extname } from "path";
|
|
253
|
-
var LinearMarkupConverter = class {
|
|
254
|
-
/**
|
|
255
|
-
* Convert HTML details/summary blocks to Linear's collapsible format
|
|
256
|
-
* Handles nested details blocks recursively
|
|
257
|
-
*
|
|
258
|
-
* @param text - Text containing HTML details/summary blocks
|
|
259
|
-
* @returns Text with details/summary converted to Linear format
|
|
260
|
-
*/
|
|
261
|
-
static convertDetailsToLinear(text) {
|
|
262
|
-
if (!text) {
|
|
263
|
-
return text;
|
|
264
|
-
}
|
|
265
|
-
let previousText = "";
|
|
266
|
-
let currentText = text;
|
|
267
|
-
while (previousText !== currentText) {
|
|
268
|
-
previousText = currentText;
|
|
269
|
-
currentText = this.convertSinglePass(currentText);
|
|
270
|
-
}
|
|
271
|
-
return currentText;
|
|
272
|
-
}
|
|
273
|
-
/**
|
|
274
|
-
* Perform a single pass of details block conversion
|
|
275
|
-
* Converts the innermost details blocks first
|
|
276
|
-
*/
|
|
277
|
-
static convertSinglePass(text) {
|
|
278
|
-
const detailsRegex = /<details[^>]*>\s*<summary[^>]*>(.*?)<\/summary>\s*(.*?)\s*<\/details>/gis;
|
|
279
|
-
return text.replace(detailsRegex, (_match, summary, content) => {
|
|
280
|
-
const cleanSummary = this.cleanText(summary);
|
|
281
|
-
const cleanContent = this.cleanContent(content);
|
|
282
|
-
if (cleanContent) {
|
|
283
|
-
return `+++ ${cleanSummary}
|
|
284
|
-
|
|
285
|
-
${cleanContent}
|
|
286
|
-
|
|
287
|
-
+++`;
|
|
288
|
-
} else {
|
|
289
|
-
return `+++ ${cleanSummary}
|
|
290
|
-
|
|
291
|
-
+++`;
|
|
292
|
-
}
|
|
293
|
-
});
|
|
294
|
-
}
|
|
295
|
-
/**
|
|
296
|
-
* Clean text by trimming whitespace and decoding common HTML entities
|
|
297
|
-
*/
|
|
298
|
-
static cleanText(text) {
|
|
299
|
-
return text.trim().replace(/</g, "<").replace(/>/g, ">").replace(/&/g, "&").replace(/"/g, '"').replace(/'/g, "'");
|
|
300
|
-
}
|
|
301
|
-
/**
|
|
302
|
-
* Clean content while preserving internal structure
|
|
303
|
-
* - Removes leading/trailing whitespace
|
|
304
|
-
* - Normalizes internal blank lines (max 2 consecutive newlines)
|
|
305
|
-
* - Preserves code blocks and other formatting
|
|
306
|
-
*/
|
|
307
|
-
static cleanContent(content) {
|
|
308
|
-
if (!content) {
|
|
309
|
-
return "";
|
|
310
|
-
}
|
|
311
|
-
let cleaned = content.trim();
|
|
312
|
-
cleaned = cleaned.replace(/\n{3,}/g, "\n\n");
|
|
313
|
-
return cleaned;
|
|
314
|
-
}
|
|
315
|
-
/**
|
|
316
|
-
* Check if text contains HTML details/summary blocks
|
|
317
|
-
* Useful for conditional conversion
|
|
318
|
-
*/
|
|
319
|
-
static hasDetailsBlocks(text) {
|
|
320
|
-
if (!text) {
|
|
321
|
-
return false;
|
|
322
|
-
}
|
|
323
|
-
const detailsRegex = /<details[^>]*>.*?<summary[^>]*>.*?<\/summary>.*?<\/details>/is;
|
|
324
|
-
return detailsRegex.test(text);
|
|
325
|
-
}
|
|
326
|
-
/**
|
|
327
|
-
* Remove wrapper tags from code sample details blocks
|
|
328
|
-
* Identifies details blocks where summary contains "X lines" pattern
|
|
329
|
-
* and removes the details/summary tags while preserving the content
|
|
330
|
-
*
|
|
331
|
-
* @param text - Text containing potential code sample details blocks
|
|
332
|
-
* @returns Text with code sample wrappers removed
|
|
333
|
-
*/
|
|
334
|
-
static removeCodeSampleWrappers(text) {
|
|
335
|
-
if (!text) {
|
|
336
|
-
return text;
|
|
337
|
-
}
|
|
338
|
-
const codeSampleRegex = /<details[^>]*>\s*<summary[^>]*>([^<]*\d+\s+lines[^<]*)<\/summary>\s*([\s\S]*?)<\/details>/gi;
|
|
339
|
-
return text.replace(codeSampleRegex, (_match, _summary, content) => {
|
|
340
|
-
return content.trim();
|
|
341
|
-
});
|
|
342
|
-
}
|
|
343
|
-
/**
|
|
344
|
-
* Convert text for Linear - applies all necessary conversions
|
|
345
|
-
* Currently only converts details/summary blocks, but can be extended
|
|
346
|
-
* for other HTML to Linear markdown conversions
|
|
347
|
-
*/
|
|
348
|
-
static convertToLinear(text) {
|
|
349
|
-
if (!text) {
|
|
350
|
-
return text;
|
|
351
|
-
}
|
|
352
|
-
this.logConversion("INPUT", text);
|
|
353
|
-
let converted = text;
|
|
354
|
-
converted = this.removeCodeSampleWrappers(converted);
|
|
355
|
-
converted = this.convertDetailsToLinear(converted);
|
|
356
|
-
this.logConversion("OUTPUT", converted);
|
|
357
|
-
return converted;
|
|
358
|
-
}
|
|
359
|
-
/**
|
|
360
|
-
* Log conversion input/output if LINEAR_MARKDOWN_LOG_FILE is set
|
|
361
|
-
*/
|
|
362
|
-
static logConversion(label, content) {
|
|
363
|
-
const logFilePath = process.env.LINEAR_MARKDOWN_LOG_FILE;
|
|
364
|
-
if (!logFilePath) {
|
|
365
|
-
return;
|
|
366
|
-
}
|
|
367
|
-
try {
|
|
368
|
-
const timestampedPath = this.getTimestampedLogPath(logFilePath);
|
|
369
|
-
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
370
|
-
const separator = "================================";
|
|
371
|
-
const logEntry = `${separator}
|
|
372
|
-
[${timestamp}] CONVERSION ${label}
|
|
373
|
-
${separator}
|
|
374
|
-
${label}:
|
|
375
|
-
${content}
|
|
376
|
-
|
|
377
|
-
`;
|
|
378
|
-
appendFileSync(timestampedPath, logEntry, "utf-8");
|
|
379
|
-
} catch {
|
|
380
|
-
}
|
|
381
|
-
}
|
|
382
|
-
/**
|
|
383
|
-
* Generate timestamped log file path
|
|
384
|
-
* Example: debug.log -> debug-20231202-161234.log
|
|
385
|
-
*/
|
|
386
|
-
static getTimestampedLogPath(logFilePath) {
|
|
387
|
-
const dir = dirname(logFilePath);
|
|
388
|
-
const ext = extname(logFilePath);
|
|
389
|
-
const base = basename(logFilePath, ext);
|
|
390
|
-
const now = /* @__PURE__ */ new Date();
|
|
391
|
-
const timestamp = [
|
|
392
|
-
now.getFullYear(),
|
|
393
|
-
String(now.getMonth() + 1).padStart(2, "0"),
|
|
394
|
-
String(now.getDate()).padStart(2, "0")
|
|
395
|
-
].join("") + "-" + [
|
|
396
|
-
String(now.getHours()).padStart(2, "0"),
|
|
397
|
-
String(now.getMinutes()).padStart(2, "0"),
|
|
398
|
-
String(now.getSeconds()).padStart(2, "0")
|
|
399
|
-
].join("");
|
|
400
|
-
return join2(dir, `${base}-${timestamp}${ext}`);
|
|
401
|
-
}
|
|
402
|
-
};
|
|
403
|
-
|
|
404
|
-
// src/mcp/LinearIssueManagementProvider.ts
|
|
405
|
-
var LinearIssueManagementProvider = class {
|
|
406
|
-
constructor() {
|
|
407
|
-
this.providerName = "linear";
|
|
408
|
-
}
|
|
409
|
-
/**
|
|
410
|
-
* Fetch issue details using Linear SDK
|
|
411
|
-
*/
|
|
412
|
-
async getIssue(input) {
|
|
413
|
-
const { number, includeComments = true } = input;
|
|
414
|
-
const raw = await fetchLinearIssue(number);
|
|
415
|
-
const state = raw.state && (raw.state.toLowerCase().includes("done") || raw.state.toLowerCase().includes("completed") || raw.state.toLowerCase().includes("canceled")) ? "closed" : "open";
|
|
416
|
-
const result = {
|
|
417
|
-
id: raw.identifier,
|
|
418
|
-
title: raw.title,
|
|
419
|
-
body: raw.description ?? "",
|
|
420
|
-
state,
|
|
421
|
-
url: raw.url,
|
|
422
|
-
provider: "linear",
|
|
423
|
-
author: null,
|
|
424
|
-
// Linear SDK doesn't return author in basic fetch
|
|
425
|
-
// Linear-specific fields
|
|
426
|
-
linearState: raw.state,
|
|
427
|
-
createdAt: raw.createdAt,
|
|
428
|
-
updatedAt: raw.updatedAt
|
|
429
|
-
};
|
|
430
|
-
if (includeComments) {
|
|
431
|
-
try {
|
|
432
|
-
const comments = await this.fetchIssueComments(number);
|
|
433
|
-
if (comments) {
|
|
434
|
-
result.comments = comments;
|
|
435
|
-
}
|
|
436
|
-
} catch {
|
|
437
|
-
}
|
|
438
|
-
}
|
|
439
|
-
return result;
|
|
440
|
-
}
|
|
441
|
-
/**
|
|
442
|
-
* Fetch comments for an issue
|
|
443
|
-
*/
|
|
444
|
-
async fetchIssueComments(identifier) {
|
|
445
|
-
try {
|
|
446
|
-
const comments = await fetchLinearIssueComments(identifier);
|
|
447
|
-
return comments.map((comment) => ({
|
|
448
|
-
id: comment.id,
|
|
449
|
-
body: comment.body,
|
|
450
|
-
createdAt: comment.createdAt,
|
|
451
|
-
author: null,
|
|
452
|
-
// Linear SDK doesn't return comment author info in basic fetch
|
|
453
|
-
...comment.updatedAt && { updatedAt: comment.updatedAt }
|
|
454
|
-
}));
|
|
455
|
-
} catch {
|
|
456
|
-
return [];
|
|
457
|
-
}
|
|
458
|
-
}
|
|
459
|
-
/**
|
|
460
|
-
* Fetch a specific comment by ID
|
|
461
|
-
*/
|
|
462
|
-
async getComment(input) {
|
|
463
|
-
const { commentId } = input;
|
|
464
|
-
const raw = await getLinearComment(commentId);
|
|
465
|
-
return {
|
|
466
|
-
id: raw.id,
|
|
467
|
-
body: raw.body,
|
|
468
|
-
author: null,
|
|
469
|
-
// Linear SDK doesn't return comment author info in basic fetch
|
|
470
|
-
created_at: raw.createdAt
|
|
471
|
-
};
|
|
472
|
-
}
|
|
473
|
-
/**
|
|
474
|
-
* Create a new comment on an issue
|
|
475
|
-
*/
|
|
476
|
-
async createComment(input) {
|
|
477
|
-
const { number, body } = input;
|
|
478
|
-
const convertedBody = LinearMarkupConverter.convertToLinear(body);
|
|
479
|
-
const result = await createLinearComment(number, convertedBody);
|
|
480
|
-
return {
|
|
481
|
-
id: result.id,
|
|
482
|
-
url: result.url,
|
|
483
|
-
created_at: result.createdAt
|
|
484
|
-
};
|
|
485
|
-
}
|
|
486
|
-
/**
|
|
487
|
-
* Update an existing comment
|
|
488
|
-
*/
|
|
489
|
-
async updateComment(input) {
|
|
490
|
-
const { commentId, body } = input;
|
|
491
|
-
const convertedBody = LinearMarkupConverter.convertToLinear(body);
|
|
492
|
-
const result = await updateLinearComment(commentId, convertedBody);
|
|
493
|
-
return {
|
|
494
|
-
id: result.id,
|
|
495
|
-
url: result.url,
|
|
496
|
-
updated_at: result.updatedAt
|
|
497
|
-
};
|
|
498
|
-
}
|
|
499
|
-
};
|
|
500
|
-
|
|
501
|
-
// src/mcp/IssueManagementProviderFactory.ts
|
|
502
|
-
var IssueManagementProviderFactory = class {
|
|
503
|
-
/**
|
|
504
|
-
* Create an issue management provider based on the provider type
|
|
505
|
-
*/
|
|
506
|
-
static create(provider) {
|
|
507
|
-
switch (provider) {
|
|
508
|
-
case "github":
|
|
509
|
-
return new GitHubIssueManagementProvider();
|
|
510
|
-
case "linear":
|
|
511
|
-
return new LinearIssueManagementProvider();
|
|
512
|
-
default:
|
|
513
|
-
throw new Error(`Unsupported issue management provider: ${provider}`);
|
|
514
|
-
}
|
|
515
|
-
}
|
|
516
|
-
};
|
|
517
|
-
|
|
518
106
|
// src/lib/SessionSummaryService.ts
|
|
519
107
|
var RECAPS_DIR = path.join(os.homedir(), ".config", "iloom-ai", "recaps");
|
|
520
108
|
function slugifyPath(loomPath) {
|
|
@@ -776,4 +364,4 @@ var SessionSummaryService = class {
|
|
|
776
364
|
export {
|
|
777
365
|
SessionSummaryService
|
|
778
366
|
};
|
|
779
|
-
//# sourceMappingURL=chunk-
|
|
367
|
+
//# sourceMappingURL=chunk-CNSTXBJ3.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/lib/SessionSummaryService.ts","../src/utils/claude-transcript.ts"],"sourcesContent":["/**\n * SessionSummaryService: Generates and posts Claude session summaries\n *\n * This service orchestrates:\n * 1. Reading session metadata to get session ID\n * 2. Loading and processing the session-summary prompt template\n * 3. Invoking Claude headless to generate the summary\n * 4. Posting the summary as a comment to the issue/PR\n */\n\nimport path from 'path'\nimport os from 'os'\nimport fs from 'fs-extra'\nimport { logger } from '../utils/logger.js'\nimport { launchClaude, generateDeterministicSessionId } from '../utils/claude.js'\nimport { readSessionContext } from '../utils/claude-transcript.js'\nimport { PromptTemplateManager } from './PromptTemplateManager.js'\nimport { MetadataManager } from './MetadataManager.js'\nimport { SettingsManager, type IloomSettings } from './SettingsManager.js'\nimport { IssueManagementProviderFactory } from '../mcp/IssueManagementProviderFactory.js'\nimport type { IssueProvider } from '../mcp/types.js'\nimport { hasMultipleRemotes } from '../utils/remote.js'\nimport type { RecapFile, RecapOutput } from '../mcp/recap-types.js'\nimport { formatRecapMarkdown } from '../utils/recap-formatter.js'\n\nconst RECAPS_DIR = path.join(os.homedir(), '.config', 'iloom-ai', 'recaps')\n\n/**\n * Slugify path to recap filename (matches MetadataManager/RecapCommand algorithm)\n *\n * Algorithm:\n * 1. Trim trailing slashes\n * 2. Replace all path separators (/ or \\) with ___ (triple underscore)\n * 3. Replace any other non-alphanumeric characters (except _ and -) with -\n * 4. Append .json\n */\nfunction slugifyPath(loomPath: string): string {\n\tlet slug = loomPath.replace(/[/\\\\]+$/, '')\n\tslug = slug.replace(/[/\\\\]/g, '___')\n\tslug = slug.replace(/[^a-zA-Z0-9_-]/g, '-')\n\treturn `${slug}.json`\n}\n\n/**\n * Read recap file for a worktree path with graceful degradation\n * Returns formatted recap string or null if not found/error\n */\nasync function readRecapFile(worktreePath: string): Promise<string | null> {\n\ttry {\n\t\tconst filePath = path.join(RECAPS_DIR, slugifyPath(worktreePath))\n\t\tif (await fs.pathExists(filePath)) {\n\t\t\tconst content = await fs.readFile(filePath, 'utf8')\n\t\t\tconst recap = JSON.parse(content) as RecapFile\n\n\t\t\t// Check if recap has any meaningful content\n\t\t\tconst hasGoal = recap.goal !== null && recap.goal !== undefined\n\t\t\tconst hasComplexity = recap.complexity !== null && recap.complexity !== undefined\n\t\t\tconst hasEntries = Array.isArray(recap.entries) && recap.entries.length > 0\n\t\t\tconst hasArtifacts = Array.isArray(recap.artifacts) && recap.artifacts.length > 0\n\t\t\tconst hasContent = hasGoal || hasComplexity || hasEntries || hasArtifacts\n\n\t\t\tif (hasContent) {\n\t\t\t\t// Convert RecapFile (optional fields) to RecapOutput (required fields)\n\t\t\t\t// Same pattern as RecapCommand.ts:61-66\n\t\t\t\tconst recapOutput: RecapOutput = {\n\t\t\t\t\tfilePath,\n\t\t\t\t\tgoal: recap.goal ?? null,\n\t\t\t\t\tcomplexity: recap.complexity ?? null,\n\t\t\t\t\tentries: recap.entries ?? [],\n\t\t\t\t\tartifacts: recap.artifacts ?? [],\n\t\t\t\t}\n\t\t\t\treturn formatRecapMarkdown(recapOutput)\n\t\t\t}\n\t\t}\n\t\treturn null\n\t} catch {\n\t\t// Graceful degradation - return null on any error\n\t\treturn null\n\t}\n}\n\n/**\n * Input for generating and posting a session summary\n */\nexport interface SessionSummaryInput {\n\tworktreePath: string\n\tissueNumber: string | number\n\tbranchName: string\n\tloomType: 'issue' | 'pr' | 'branch'\n\t/** Optional PR number - when provided, summary is posted to the PR instead of the issue */\n\tprNumber?: number\n}\n\n/**\n * Result from generating a session summary\n */\nexport interface SessionSummaryResult {\n\tsummary: string\n\tsessionId: string\n}\n\n/**\n * Service that generates and posts Claude session summaries to issues\n */\nexport class SessionSummaryService {\n\tprivate templateManager: PromptTemplateManager\n\tprivate metadataManager: MetadataManager\n\tprivate settingsManager: SettingsManager\n\n\tconstructor(\n\t\ttemplateManager?: PromptTemplateManager,\n\t\tmetadataManager?: MetadataManager,\n\t\tsettingsManager?: SettingsManager\n\t) {\n\t\tthis.templateManager = templateManager ?? new PromptTemplateManager()\n\t\tthis.metadataManager = metadataManager ?? new MetadataManager()\n\t\tthis.settingsManager = settingsManager ?? new SettingsManager()\n\t}\n\n\t/**\n\t * Generate and post a session summary to the issue\n\t *\n\t * Non-blocking: Catches all errors and logs warnings instead of throwing\n\t * This ensures the finish workflow continues even if summary generation fails\n\t */\n\tasync generateAndPostSummary(input: SessionSummaryInput): Promise<void> {\n\t\ttry {\n\t\t\t// 1. Skip for branch type (no issue to comment on)\n\t\t\tif (input.loomType === 'branch') {\n\t\t\t\tlogger.debug('Skipping session summary: branch type has no associated issue')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 2. Read metadata to get sessionId, or generate deterministically\n\t\t\tconst metadata = await this.metadataManager.readMetadata(input.worktreePath)\n\t\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(input.worktreePath)\n\n\t\t\t// 3. Load settings to check generateSummary config\n\t\t\tconst settings = await this.settingsManager.loadSettings(input.worktreePath)\n\t\t\tif (!this.shouldGenerateSummary(input.loomType, settings)) {\n\t\t\t\tlogger.debug(`Skipping session summary: generateSummary is disabled for ${input.loomType} workflow`)\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tlogger.info('Generating session summary...')\n\n\t\t\t// 4. Try to read compact summaries from session transcript for additional context\n\t\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\t\tconst compactSummaries = await readSessionContext(input.worktreePath, sessionId)\n\t\t\tif (compactSummaries) {\n\t\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t\t}\n\n\t\t\t// 5. Try to read recap data for high-signal context\n\t\t\tconst recapData = await readRecapFile(input.worktreePath)\n\t\t\tif (recapData) {\n\t\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t\t} else {\n\t\t\t\tlogger.debug('No recap data found')\n\t\t\t}\n\n\t\t\t// 6. Load and process the session-summary template\n\t\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\t\tISSUE_NUMBER: String(input.issueNumber),\n\t\t\t\tBRANCH_NAME: input.branchName,\n\t\t\t\tLOOM_TYPE: input.loomType,\n\t\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\t\tRECAP_DATA: recapData ?? '',\n\t\t\t})\n\n\t\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t\t// 7. Invoke Claude headless to generate summary\n\t\t\t// Use --resume with session ID so Claude knows which conversation to summarize\n\t\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\t\theadless: true,\n\t\t\t\tmodel: summaryModel,\n\t\t\t\tsessionId: sessionId, // Resume this session so Claude has conversation context\n\t\t\t})\n\n\t\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\t\tlogger.warn('Session summary generation returned empty result')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\tconst summary = summaryResult.trim()\n\n\t\t\t// 8. Skip posting if summary is too short (likely failed generation)\n\t\t\tif (summary.length < 100) {\n\t\t\t\tlogger.warn('Session summary too short, skipping post')\n\t\t\t\treturn\n\t\t\t}\n\n\t\t\t// 9. Post summary to issue or PR (PR takes priority when prNumber is provided)\n\t\t\tawait this.postSummaryToIssue(input.issueNumber, summary, settings, input.worktreePath, input.prNumber)\n\n\t\t\tconst targetDescription = input.prNumber ? `PR #${input.prNumber}` : 'issue'\n\t\t\tlogger.success(`Session summary posted to ${targetDescription}`)\n\t\t} catch (error) {\n\t\t\t// Non-blocking: Log warning but don't throw\n\t\t\tconst errorMessage = error instanceof Error ? error.message : String(error)\n\t\t\tlogger.warn(`Failed to generate session summary: ${errorMessage}`)\n\t\t\tlogger.debug('Session summary generation error details:', { error })\n\t\t}\n\t}\n\n\t/**\n\t * Generate a session summary without posting it\n\t *\n\t * This method is useful for previewing the summary or for use by CLI commands\n\t * that want to display the summary before optionally posting it.\n\t *\n\t * @param worktreePath - Path to the worktree\n\t * @param branchName - Name of the branch\n\t * @param loomType - Type of loom ('issue' | 'pr' | 'branch')\n\t * @param issueNumber - Issue or PR number (optional, for template variables)\n\t * @returns The generated summary and session ID\n\t * @throws Error if Claude invocation fails\n\t */\n\tasync generateSummary(\n\t\tworktreePath: string,\n\t\tbranchName: string,\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tissueNumber?: string | number\n\t): Promise<SessionSummaryResult> {\n\t\t// 1. Read metadata or generate deterministic session ID\n\t\tconst metadata = await this.metadataManager.readMetadata(worktreePath)\n\t\tconst sessionId = metadata?.sessionId ?? generateDeterministicSessionId(worktreePath)\n\n\t\t// 2. Load settings for model configuration\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\n\t\tlogger.info('Generating session summary...')\n\n\t\t// 3. Try to read compact summaries from session transcript for additional context\n\t\tlogger.debug(`Looking for session transcript with sessionId: ${sessionId}`)\n\t\tconst compactSummaries = await readSessionContext(worktreePath, sessionId)\n\t\tif (compactSummaries) {\n\t\t\tlogger.debug(`Found compact summaries (${compactSummaries.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No compact summaries found in session transcript')\n\t\t}\n\n\t\t// 4. Try to read recap data for high-signal context\n\t\tconst recapData = await readRecapFile(worktreePath)\n\t\tif (recapData) {\n\t\t\tlogger.debug(`Found recap data (${recapData.length} chars)`)\n\t\t} else {\n\t\t\tlogger.debug('No recap data found')\n\t\t}\n\n\t\t// 5. Load and process the session-summary template\n\t\tconst prompt = await this.templateManager.getPrompt('session-summary', {\n\t\t\tISSUE_NUMBER: issueNumber !== undefined ? String(issueNumber) : '',\n\t\t\tBRANCH_NAME: branchName,\n\t\t\tLOOM_TYPE: loomType,\n\t\t\tCOMPACT_SUMMARIES: compactSummaries ?? '',\n\t\t\tRECAP_DATA: recapData ?? '',\n\t\t})\n\n\t\tlogger.debug('Session summary prompt:\\n' + prompt)\n\n\t\t// 6. Invoke Claude headless to generate summary\n\t\tconst summaryModel = this.settingsManager.getSummaryModel(settings)\n\t\tconst summaryResult = await launchClaude(prompt, {\n\t\t\theadless: true,\n\t\t\tmodel: summaryModel,\n\t\t\tsessionId: sessionId,\n\t\t})\n\n\t\tif (!summaryResult || typeof summaryResult !== 'string' || summaryResult.trim() === '') {\n\t\t\tthrow new Error('Session summary generation returned empty result')\n\t\t}\n\n\t\tconst summary = summaryResult.trim()\n\n\t\t// 7. Check if summary is too short (likely failed generation)\n\t\tif (summary.length < 100) {\n\t\t\tthrow new Error('Session summary too short - generation may have failed')\n\t\t}\n\n\t\treturn {\n\t\t\tsummary,\n\t\t\tsessionId: sessionId,\n\t\t}\n\t}\n\n\t/**\n\t * Post a summary to an issue (used by both generateAndPostSummary and CLI commands)\n\t *\n\t * @param issueNumber - Issue or PR number to post to\n\t * @param summary - The summary text to post\n\t * @param worktreePath - Path to worktree for loading settings (optional)\n\t */\n\tasync postSummary(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tworktreePath?: string\n\t): Promise<void> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\tawait this.postSummaryToIssue(issueNumber, summary, settings, worktreePath ?? process.cwd())\n\t\tlogger.success('Session summary posted to issue')\n\t}\n\n\t/**\n\t * Determine if summary should be generated based on loom type and settings\n\t *\n\t * @param loomType - The type of loom being finished\n\t * @param settings - The loaded iloom settings\n\t * @returns true if summary should be generated\n\t */\n\tshouldGenerateSummary(\n\t\tloomType: 'issue' | 'pr' | 'branch',\n\t\tsettings: IloomSettings\n\t): boolean {\n\t\t// Branch type never generates summaries (no issue to comment on)\n\t\tif (loomType === 'branch') {\n\t\t\treturn false\n\t\t}\n\n\t\t// Get workflow-specific config\n\t\tconst workflowConfig =\n\t\t\tloomType === 'issue'\n\t\t\t\t? settings.workflows?.issue\n\t\t\t\t: settings.workflows?.pr\n\n\t\t// Default to true if not explicitly set (for issue and pr types)\n\t\treturn workflowConfig?.generateSummary ?? true\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on settings\n\t *\n\t * @param summary - The summary text\n\t * @param worktreePath - Path to worktree for loading settings and detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttribution(summary: string, worktreePath: string): Promise<string> {\n\t\tconst settings = await this.settingsManager.loadSettings(worktreePath)\n\t\treturn this.applyAttributionWithSettings(summary, settings, worktreePath)\n\t}\n\n\t/**\n\t * Apply attribution footer to summary based on provided settings\n\t *\n\t * @param summary - The summary text\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for detecting remotes\n\t * @returns Summary with attribution footer if applicable\n\t */\n\tasync applyAttributionWithSettings(\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string\n\t): Promise<string> {\n\t\tconst attributionSetting = settings.attribution ?? 'upstreamOnly'\n\t\tlogger.debug(`Attribution setting from config: ${settings.attribution}`)\n\t\tlogger.debug(`Attribution setting (with default): ${attributionSetting}`)\n\n\t\tlet shouldShowAttribution = false\n\t\tif (attributionSetting === 'on') {\n\t\t\tshouldShowAttribution = true\n\t\t\tlogger.debug('Attribution: always on')\n\t\t} else if (attributionSetting === 'upstreamOnly') {\n\t\t\t// Only show attribution when contributing to external repos (multiple remotes)\n\t\t\tshouldShowAttribution = await hasMultipleRemotes(worktreePath)\n\t\t\tlogger.debug(`Attribution: upstreamOnly, hasMultipleRemotes=${shouldShowAttribution}`)\n\t\t} else {\n\t\t\tlogger.debug('Attribution: off')\n\t\t}\n\t\t// 'off' keeps shouldShowAttribution = false\n\n\t\tlogger.debug(`Should show attribution: ${shouldShowAttribution}`)\n\t\tif (shouldShowAttribution) {\n\t\t\tlogger.debug('Attribution footer appended to summary')\n\t\t\treturn `${summary}\\n\\n---\\n*Generated with 🤖❤️ by [iloom.ai](https://iloom.ai)*`\n\t\t}\n\n\t\treturn summary\n\t}\n\n\t/**\n\t * Post the summary as a comment to the issue or PR\n\t *\n\t * @param issueNumber - The issue number (used when prNumber is not provided)\n\t * @param summary - The summary text to post\n\t * @param settings - The loaded iloom settings\n\t * @param worktreePath - Path to worktree for attribution detection\n\t * @param prNumber - Optional PR number - when provided, posts to the PR instead\n\t */\n\tprivate async postSummaryToIssue(\n\t\tissueNumber: string | number,\n\t\tsummary: string,\n\t\tsettings: IloomSettings,\n\t\tworktreePath: string,\n\t\tprNumber?: number\n\t): Promise<void> {\n\t\t// Get the issue management provider from settings\n\t\tconst providerType = (settings.issueManagement?.provider ?? 'github') as IssueProvider\n\t\tconst provider = IssueManagementProviderFactory.create(providerType)\n\n\t\t// Apply attribution if configured\n\t\tconst finalSummary = await this.applyAttributionWithSettings(summary, settings, worktreePath)\n\n\t\t// When prNumber is provided, post to the PR instead of the issue\n\t\tconst targetNumber = prNumber ?? issueNumber\n\t\tconst targetType = prNumber !== undefined ? 'pr' : 'issue'\n\n\t\t// Create the comment\n\t\tawait provider.createComment({\n\t\t\tnumber: String(targetNumber),\n\t\t\tbody: finalSummary,\n\t\t\ttype: targetType,\n\t\t})\n\t}\n}\n","/**\n * Claude Transcript Utilities\n *\n * Provides functions to read and parse Claude Code session transcript files\n * stored in ~/.claude/projects/. These transcripts contain the full conversation\n * history including compact summaries from when conversations were compacted.\n */\n\nimport { readFile } from 'fs/promises'\nimport { homedir } from 'os'\nimport { join } from 'path'\nimport { logger } from './logger.js'\n\n/**\n * Entry in a Claude Code JSONL transcript file\n */\nexport interface TranscriptEntry {\n\ttype: 'user' | 'assistant' | 'system' | 'file-history-snapshot' | 'queue-operation'\n\tsessionId?: string\n\tmessage?: { role: string; content: string | Array<{ type: string; text?: string }> }\n\tisCompactSummary?: boolean\n\tisVisibleInTranscriptOnly?: boolean\n\tsubtype?: string // 'compact_boundary' for compaction markers\n\tcontent?: string\n\ttimestamp?: string\n\tuuid?: string\n\tparentUuid?: string\n}\n\n/**\n * Get the Claude projects directory path encoding for a worktree path\n * Encoding: /Users/adam/Projects/foo_bar -> -Users-adam-Projects-foo-bar\n *\n * Claude Code encodes paths by replacing both '/' and '_' with '-'\n *\n * @param worktreePath - Absolute path to the worktree\n * @returns Encoded directory name for Claude projects\n */\nexport function getClaudeProjectPath(worktreePath: string): string {\n\t// Replace all '/' and '_' with '-' (matching Claude Code's encoding)\n\treturn worktreePath.replace(/[/_]/g, '-')\n}\n\n/**\n * Get the full path to the Claude projects directory\n * @returns Path to ~/.claude/projects/\n */\nexport function getClaudeProjectsDir(): string {\n\treturn join(homedir(), '.claude', 'projects')\n}\n\n/**\n * Find the session transcript file for a given worktree and session ID\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @returns Full path to the transcript file, or null if not found\n */\nexport function findSessionTranscript(worktreePath: string, sessionId: string): string | null {\n\tconst projectsDir = getClaudeProjectsDir()\n\tconst projectDirName = getClaudeProjectPath(worktreePath)\n\tconst transcriptPath = join(projectsDir, projectDirName, `${sessionId}.jsonl`)\n\treturn transcriptPath\n}\n\n/**\n * Extract the content from a compact summary message\n * Handles both string content and array content formats\n */\nfunction extractMessageContent(message: TranscriptEntry['message']): string | null {\n\tif (!message) return null\n\n\tif (typeof message.content === 'string') {\n\t\treturn message.content\n\t}\n\n\tif (Array.isArray(message.content)) {\n\t\t// Concatenate all text elements\n\t\treturn message.content\n\t\t\t.filter((item) => item.type === 'text' && item.text)\n\t\t\t.map((item) => item.text)\n\t\t\t.join('\\n')\n\t}\n\n\treturn null\n}\n\n/**\n * Extract compact summaries from a session transcript file\n *\n * Returns empty array if file doesn't exist or no summaries found.\n * Each compact summary contains structured history of pre-compaction conversation.\n *\n * @param transcriptPath - Full path to the transcript JSONL file\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Array of compact summary content strings, newest first\n */\nexport async function extractCompactSummaries(\n\ttranscriptPath: string,\n\tmaxSummaries = 3\n): Promise<string[]> {\n\ttry {\n\t\tconst content = await readFile(transcriptPath, 'utf-8')\n\t\tconst lines = content.split('\\n').filter((line) => line.trim())\n\n\t\tconst summaries: string[] = []\n\n\t\tfor (const line of lines) {\n\t\t\ttry {\n\t\t\t\tconst entry = JSON.parse(line) as TranscriptEntry\n\n\t\t\t\t// Look for compact summary entries\n\t\t\t\tif (entry.isCompactSummary === true && entry.message) {\n\t\t\t\t\tconst summaryContent = extractMessageContent(entry.message)\n\t\t\t\t\tif (summaryContent) {\n\t\t\t\t\t\tsummaries.push(summaryContent)\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t} catch {\n\t\t\t\t// Skip malformed JSON lines\n\t\t\t\tlogger.debug('Skipping malformed JSONL line in transcript')\n\t\t\t}\n\t\t}\n\n\t\t// Return most recent summaries (they appear in order in the file)\n\t\t// Limit to maxSummaries\n\t\treturn summaries.slice(-maxSummaries)\n\t} catch (error) {\n\t\t// File not found or permission error - return empty array (graceful degradation)\n\t\tif (error instanceof Error && 'code' in error && error.code === 'ENOENT') {\n\t\t\tlogger.debug('Transcript file not found:', transcriptPath)\n\t\t} else {\n\t\t\tlogger.debug('Error reading transcript file:', error)\n\t\t}\n\t\treturn []\n\t}\n}\n\n/**\n * Read session transcript and extract compact summaries for summary generation\n *\n * This is the main entry point for SessionSummaryService to get pre-compaction\n * conversation context. It gracefully handles all error cases.\n *\n * @param worktreePath - Absolute path to the worktree\n * @param sessionId - Session ID to find transcript for\n * @param maxSummaries - Maximum number of summaries to return (default 3)\n * @returns Formatted string of compact summaries, or null if none found\n */\nexport async function readSessionContext(\n\tworktreePath: string,\n\tsessionId: string,\n\tmaxSummaries = 3\n): Promise<string | null> {\n\tconst transcriptPath = findSessionTranscript(worktreePath, sessionId)\n\tif (!transcriptPath) {\n\t\treturn null\n\t}\n\n\tlogger.debug(`Checking transcript at: ${transcriptPath}`)\n\n\tconst summaries = await extractCompactSummaries(transcriptPath, maxSummaries)\n\n\tif (summaries.length === 0) {\n\t\treturn null\n\t}\n\n\t// Format summaries with separators\n\t// Newest summaries are at the end, so we reverse to show newest first\n\tconst formattedSummaries = summaries\n\t\t.reverse()\n\t\t.map((summary, index) => {\n\t\t\tconst header =\n\t\t\t\tsummaries.length > 1\n\t\t\t\t\t? `### Compact Summary ${index + 1} of ${summaries.length}\\n\\n`\n\t\t\t\t\t: ''\n\t\t\treturn `${header}${summary}`\n\t\t})\n\t\t.join('\\n\\n---\\n\\n')\n\n\treturn formattedSummaries\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,OAAO,UAAU;AACjB,OAAO,QAAQ;AACf,OAAO,QAAQ;;;ACJf,SAAS,gBAAgB;AACzB,SAAS,eAAe;AACxB,SAAS,YAAY;AA4Bd,SAAS,qBAAqB,cAA8B;AAElE,SAAO,aAAa,QAAQ,SAAS,GAAG;AACzC;AAMO,SAAS,uBAA+B;AAC9C,SAAO,KAAK,QAAQ,GAAG,WAAW,UAAU;AAC7C;AASO,SAAS,sBAAsB,cAAsB,WAAkC;AAC7F,QAAM,cAAc,qBAAqB;AACzC,QAAM,iBAAiB,qBAAqB,YAAY;AACxD,QAAM,iBAAiB,KAAK,aAAa,gBAAgB,GAAG,SAAS,QAAQ;AAC7E,SAAO;AACR;AAMA,SAAS,sBAAsB,SAAoD;AAClF,MAAI,CAAC,QAAS,QAAO;AAErB,MAAI,OAAO,QAAQ,YAAY,UAAU;AACxC,WAAO,QAAQ;AAAA,EAChB;AAEA,MAAI,MAAM,QAAQ,QAAQ,OAAO,GAAG;AAEnC,WAAO,QAAQ,QACb,OAAO,CAAC,SAAS,KAAK,SAAS,UAAU,KAAK,IAAI,EAClD,IAAI,CAAC,SAAS,KAAK,IAAI,EACvB,KAAK,IAAI;AAAA,EACZ;AAEA,SAAO;AACR;AAYA,eAAsB,wBACrB,gBACA,eAAe,GACK;AACpB,MAAI;AACH,UAAM,UAAU,MAAM,SAAS,gBAAgB,OAAO;AACtD,UAAM,QAAQ,QAAQ,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,CAAC;AAE9D,UAAM,YAAsB,CAAC;AAE7B,eAAW,QAAQ,OAAO;AACzB,UAAI;AACH,cAAM,QAAQ,KAAK,MAAM,IAAI;AAG7B,YAAI,MAAM,qBAAqB,QAAQ,MAAM,SAAS;AACrD,gBAAM,iBAAiB,sBAAsB,MAAM,OAAO;AAC1D,cAAI,gBAAgB;AACnB,sBAAU,KAAK,cAAc;AAAA,UAC9B;AAAA,QACD;AAAA,MACD,QAAQ;AAEP,eAAO,MAAM,6CAA6C;AAAA,MAC3D;AAAA,IACD;AAIA,WAAO,UAAU,MAAM,CAAC,YAAY;AAAA,EACrC,SAAS,OAAO;AAEf,QAAI,iBAAiB,SAAS,UAAU,SAAS,MAAM,SAAS,UAAU;AACzE,aAAO,MAAM,8BAA8B,cAAc;AAAA,IAC1D,OAAO;AACN,aAAO,MAAM,kCAAkC,KAAK;AAAA,IACrD;AACA,WAAO,CAAC;AAAA,EACT;AACD;AAaA,eAAsB,mBACrB,cACA,WACA,eAAe,GACU;AACzB,QAAM,iBAAiB,sBAAsB,cAAc,SAAS;AACpE,MAAI,CAAC,gBAAgB;AACpB,WAAO;AAAA,EACR;AAEA,SAAO,MAAM,2BAA2B,cAAc,EAAE;AAExD,QAAM,YAAY,MAAM,wBAAwB,gBAAgB,YAAY;AAE5E,MAAI,UAAU,WAAW,GAAG;AAC3B,WAAO;AAAA,EACR;AAIA,QAAM,qBAAqB,UACzB,QAAQ,EACR,IAAI,CAAC,SAAS,UAAU;AACxB,UAAM,SACL,UAAU,SAAS,IAChB,uBAAuB,QAAQ,CAAC,OAAO,UAAU,MAAM;AAAA;AAAA,IACvD;AACJ,WAAO,GAAG,MAAM,GAAG,OAAO;AAAA,EAC3B,CAAC,EACA,KAAK,aAAa;AAEpB,SAAO;AACR;;;AD5JA,IAAM,aAAa,KAAK,KAAK,GAAG,QAAQ,GAAG,WAAW,YAAY,QAAQ;AAW1E,SAAS,YAAY,UAA0B;AAC9C,MAAI,OAAO,SAAS,QAAQ,WAAW,EAAE;AACzC,SAAO,KAAK,QAAQ,UAAU,KAAK;AACnC,SAAO,KAAK,QAAQ,mBAAmB,GAAG;AAC1C,SAAO,GAAG,IAAI;AACf;AAMA,eAAe,cAAc,cAA8C;AAC1E,MAAI;AACH,UAAM,WAAW,KAAK,KAAK,YAAY,YAAY,YAAY,CAAC;AAChE,QAAI,MAAM,GAAG,WAAW,QAAQ,GAAG;AAClC,YAAM,UAAU,MAAM,GAAG,SAAS,UAAU,MAAM;AAClD,YAAM,QAAQ,KAAK,MAAM,OAAO;AAGhC,YAAM,UAAU,MAAM,SAAS,QAAQ,MAAM,SAAS;AACtD,YAAM,gBAAgB,MAAM,eAAe,QAAQ,MAAM,eAAe;AACxE,YAAM,aAAa,MAAM,QAAQ,MAAM,OAAO,KAAK,MAAM,QAAQ,SAAS;AAC1E,YAAM,eAAe,MAAM,QAAQ,MAAM,SAAS,KAAK,MAAM,UAAU,SAAS;AAChF,YAAM,aAAa,WAAW,iBAAiB,cAAc;AAE7D,UAAI,YAAY;AAGf,cAAM,cAA2B;AAAA,UAChC;AAAA,UACA,MAAM,MAAM,QAAQ;AAAA,UACpB,YAAY,MAAM,cAAc;AAAA,UAChC,SAAS,MAAM,WAAW,CAAC;AAAA,UAC3B,WAAW,MAAM,aAAa,CAAC;AAAA,QAChC;AACA,eAAO,oBAAoB,WAAW;AAAA,MACvC;AAAA,IACD;AACA,WAAO;AAAA,EACR,QAAQ;AAEP,WAAO;AAAA,EACR;AACD;AAyBO,IAAM,wBAAN,MAA4B;AAAA,EAKlC,YACC,iBACA,iBACA,iBACC;AACD,SAAK,kBAAkB,mBAAmB,IAAI,sBAAsB;AACpE,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAC9D,SAAK,kBAAkB,mBAAmB,IAAI,gBAAgB;AAAA,EAC/D;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,uBAAuB,OAA2C;AACvE,QAAI;AAEH,UAAI,MAAM,aAAa,UAAU;AAChC,eAAO,MAAM,+DAA+D;AAC5E;AAAA,MACD;AAGA,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,YAAM,aAAY,qCAAU,cAAa,+BAA+B,MAAM,YAAY;AAG1F,YAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,MAAM,YAAY;AAC3E,UAAI,CAAC,KAAK,sBAAsB,MAAM,UAAU,QAAQ,GAAG;AAC1D,eAAO,MAAM,6DAA6D,MAAM,QAAQ,WAAW;AACnG;AAAA,MACD;AAEA,aAAO,KAAK,+BAA+B;AAG3C,aAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,YAAM,mBAAmB,MAAM,mBAAmB,MAAM,cAAc,SAAS;AAC/E,UAAI,kBAAkB;AACrB,eAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,MAC1E,OAAO;AACN,eAAO,MAAM,kDAAkD;AAAA,MAChE;AAGA,YAAM,YAAY,MAAM,cAAc,MAAM,YAAY;AACxD,UAAI,WAAW;AACd,eAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,MAC5D,OAAO;AACN,eAAO,MAAM,qBAAqB;AAAA,MACnC;AAGA,YAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,QACtE,cAAc,OAAO,MAAM,WAAW;AAAA,QACtC,aAAa,MAAM;AAAA,QACnB,WAAW,MAAM;AAAA,QACjB,mBAAmB,oBAAoB;AAAA,QACvC,YAAY,aAAa;AAAA,MAC1B,CAAC;AAED,aAAO,MAAM,8BAA8B,MAAM;AAIjD,YAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,YAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,QAChD,UAAU;AAAA,QACV,OAAO;AAAA,QACP;AAAA;AAAA,MACD,CAAC;AAED,UAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,eAAO,KAAK,kDAAkD;AAC9D;AAAA,MACD;AAEA,YAAM,UAAU,cAAc,KAAK;AAGnC,UAAI,QAAQ,SAAS,KAAK;AACzB,eAAO,KAAK,0CAA0C;AACtD;AAAA,MACD;AAGA,YAAM,KAAK,mBAAmB,MAAM,aAAa,SAAS,UAAU,MAAM,cAAc,MAAM,QAAQ;AAEtG,YAAM,oBAAoB,MAAM,WAAW,OAAO,MAAM,QAAQ,KAAK;AACrE,aAAO,QAAQ,6BAA6B,iBAAiB,EAAE;AAAA,IAChE,SAAS,OAAO;AAEf,YAAM,eAAe,iBAAiB,QAAQ,MAAM,UAAU,OAAO,KAAK;AAC1E,aAAO,KAAK,uCAAuC,YAAY,EAAE;AACjE,aAAO,MAAM,6CAA6C,EAAE,MAAM,CAAC;AAAA,IACpE;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAeA,MAAM,gBACL,cACA,YACA,UACA,aACgC;AAEhC,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,aAAY,qCAAU,cAAa,+BAA+B,YAAY;AAGpF,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AAErE,WAAO,KAAK,+BAA+B;AAG3C,WAAO,MAAM,kDAAkD,SAAS,EAAE;AAC1E,UAAM,mBAAmB,MAAM,mBAAmB,cAAc,SAAS;AACzE,QAAI,kBAAkB;AACrB,aAAO,MAAM,4BAA4B,iBAAiB,MAAM,SAAS;AAAA,IAC1E,OAAO;AACN,aAAO,MAAM,kDAAkD;AAAA,IAChE;AAGA,UAAM,YAAY,MAAM,cAAc,YAAY;AAClD,QAAI,WAAW;AACd,aAAO,MAAM,qBAAqB,UAAU,MAAM,SAAS;AAAA,IAC5D,OAAO;AACN,aAAO,MAAM,qBAAqB;AAAA,IACnC;AAGA,UAAM,SAAS,MAAM,KAAK,gBAAgB,UAAU,mBAAmB;AAAA,MACtE,cAAc,gBAAgB,SAAY,OAAO,WAAW,IAAI;AAAA,MAChE,aAAa;AAAA,MACb,WAAW;AAAA,MACX,mBAAmB,oBAAoB;AAAA,MACvC,YAAY,aAAa;AAAA,IAC1B,CAAC;AAED,WAAO,MAAM,8BAA8B,MAAM;AAGjD,UAAM,eAAe,KAAK,gBAAgB,gBAAgB,QAAQ;AAClE,UAAM,gBAAgB,MAAM,aAAa,QAAQ;AAAA,MAChD,UAAU;AAAA,MACV,OAAO;AAAA,MACP;AAAA,IACD,CAAC;AAED,QAAI,CAAC,iBAAiB,OAAO,kBAAkB,YAAY,cAAc,KAAK,MAAM,IAAI;AACvF,YAAM,IAAI,MAAM,kDAAkD;AAAA,IACnE;AAEA,UAAM,UAAU,cAAc,KAAK;AAGnC,QAAI,QAAQ,SAAS,KAAK;AACzB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IACzE;AAEA,WAAO;AAAA,MACN;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,YACL,aACA,SACA,cACgB;AAChB,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,UAAM,KAAK,mBAAmB,aAAa,SAAS,UAAU,gBAAgB,QAAQ,IAAI,CAAC;AAC3F,WAAO,QAAQ,iCAAiC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,sBACC,UACA,UACU;AA7TZ;AA+TE,QAAI,aAAa,UAAU;AAC1B,aAAO;AAAA,IACR;AAGA,UAAM,iBACL,aAAa,WACV,cAAS,cAAT,mBAAoB,SACpB,cAAS,cAAT,mBAAoB;AAGxB,YAAO,iDAAgB,oBAAmB;AAAA,EAC3C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,iBAAiB,SAAiB,cAAuC;AAC9E,UAAM,WAAW,MAAM,KAAK,gBAAgB,aAAa,YAAY;AACrE,WAAO,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAAA,EACzE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUA,MAAM,6BACL,SACA,UACA,cACkB;AAClB,UAAM,qBAAqB,SAAS,eAAe;AACnD,WAAO,MAAM,oCAAoC,SAAS,WAAW,EAAE;AACvE,WAAO,MAAM,uCAAuC,kBAAkB,EAAE;AAExE,QAAI,wBAAwB;AAC5B,QAAI,uBAAuB,MAAM;AAChC,8BAAwB;AACxB,aAAO,MAAM,wBAAwB;AAAA,IACtC,WAAW,uBAAuB,gBAAgB;AAEjD,8BAAwB,MAAM,mBAAmB,YAAY;AAC7D,aAAO,MAAM,iDAAiD,qBAAqB,EAAE;AAAA,IACtF,OAAO;AACN,aAAO,MAAM,kBAAkB;AAAA,IAChC;AAGA,WAAO,MAAM,4BAA4B,qBAAqB,EAAE;AAChE,QAAI,uBAAuB;AAC1B,aAAO,MAAM,wCAAwC;AACrD,aAAO,GAAG,OAAO;AAAA;AAAA;AAAA;AAAA,IAClB;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWA,MAAc,mBACb,aACA,SACA,UACA,cACA,UACgB;AA/YlB;AAiZE,UAAM,iBAAgB,cAAS,oBAAT,mBAA0B,aAAY;AAC5D,UAAM,WAAW,+BAA+B,OAAO,YAAY;AAGnE,UAAM,eAAe,MAAM,KAAK,6BAA6B,SAAS,UAAU,YAAY;AAG5F,UAAM,eAAe,YAAY;AACjC,UAAM,aAAa,aAAa,SAAY,OAAO;AAGnD,UAAM,SAAS,cAAc;AAAA,MAC5B,QAAQ,OAAO,YAAY;AAAA,MAC3B,MAAM;AAAA,MACN,MAAM;AAAA,IACP,CAAC;AAAA,EACF;AACD;","names":[]}
|
|
@@ -1,20 +1,31 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
ProjectCapabilityDetector
|
|
4
|
+
} from "./chunk-7GLZVDPQ.js";
|
|
5
|
+
import {
|
|
6
|
+
detectPackageManager,
|
|
7
|
+
runScript
|
|
8
|
+
} from "./chunk-RD7I2Q2F.js";
|
|
9
|
+
import {
|
|
10
|
+
getPackageConfig,
|
|
11
|
+
hasScript
|
|
12
|
+
} from "./chunk-XPKN3QWY.js";
|
|
2
13
|
import {
|
|
3
14
|
executeGitCommand,
|
|
4
15
|
findMainWorktreePathWithSettings,
|
|
5
16
|
findWorktreeForBranch,
|
|
6
17
|
getMergeTargetBranch
|
|
7
|
-
} from "./chunk-
|
|
18
|
+
} from "./chunk-ZA575VLF.js";
|
|
8
19
|
import {
|
|
9
20
|
SettingsManager
|
|
10
|
-
} from "./chunk-
|
|
21
|
+
} from "./chunk-WFQ5CLTR.js";
|
|
11
22
|
import {
|
|
12
23
|
MetadataManager
|
|
13
|
-
} from "./chunk-
|
|
24
|
+
} from "./chunk-VWGKGNJP.js";
|
|
14
25
|
import {
|
|
15
26
|
detectClaudeCli,
|
|
16
27
|
launchClaude
|
|
17
|
-
} from "./chunk-
|
|
28
|
+
} from "./chunk-FP7G7DG3.js";
|
|
18
29
|
import {
|
|
19
30
|
getLogger
|
|
20
31
|
} from "./chunk-6MLEBAYZ.js";
|
|
@@ -374,7 +385,86 @@ To recover:
|
|
|
374
385
|
}
|
|
375
386
|
};
|
|
376
387
|
|
|
388
|
+
// src/lib/BuildRunner.ts
|
|
389
|
+
var BuildRunner = class {
|
|
390
|
+
constructor(capabilityDetector) {
|
|
391
|
+
this.capabilityDetector = capabilityDetector ?? new ProjectCapabilityDetector();
|
|
392
|
+
}
|
|
393
|
+
/**
|
|
394
|
+
* Run build verification in the specified directory
|
|
395
|
+
* @param buildPath - Path where build should run (typically main worktree path)
|
|
396
|
+
* @param options - Build options
|
|
397
|
+
*/
|
|
398
|
+
async runBuild(buildPath, options = {}) {
|
|
399
|
+
const startTime = Date.now();
|
|
400
|
+
try {
|
|
401
|
+
const pkgJson = await getPackageConfig(buildPath);
|
|
402
|
+
const hasBuildScript = hasScript(pkgJson, "build");
|
|
403
|
+
if (!hasBuildScript) {
|
|
404
|
+
getLogger().debug("Skipping build - no build script found");
|
|
405
|
+
return {
|
|
406
|
+
success: true,
|
|
407
|
+
skipped: true,
|
|
408
|
+
reason: "No build script found in package configuration",
|
|
409
|
+
duration: Date.now() - startTime
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
} catch (error) {
|
|
413
|
+
if (error instanceof Error && error.message.includes("package.json not found")) {
|
|
414
|
+
getLogger().debug("Skipping build - no package configuration found");
|
|
415
|
+
return {
|
|
416
|
+
success: true,
|
|
417
|
+
skipped: true,
|
|
418
|
+
reason: "No package configuration found in project",
|
|
419
|
+
duration: Date.now() - startTime
|
|
420
|
+
};
|
|
421
|
+
}
|
|
422
|
+
throw error;
|
|
423
|
+
}
|
|
424
|
+
const capabilities = await this.capabilityDetector.detectCapabilities(buildPath);
|
|
425
|
+
const isCLIProject = capabilities.capabilities.includes("cli");
|
|
426
|
+
if (!isCLIProject) {
|
|
427
|
+
getLogger().debug("Skipping build - not a CLI project (no bin field)");
|
|
428
|
+
return {
|
|
429
|
+
success: true,
|
|
430
|
+
skipped: true,
|
|
431
|
+
reason: "Project is not a CLI project (no bin field in package.json)",
|
|
432
|
+
duration: Date.now() - startTime
|
|
433
|
+
};
|
|
434
|
+
}
|
|
435
|
+
const packageManager = await detectPackageManager(buildPath);
|
|
436
|
+
if (options.dryRun) {
|
|
437
|
+
const command = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
|
|
438
|
+
getLogger().info(`[DRY RUN] Would run: ${command}`);
|
|
439
|
+
return {
|
|
440
|
+
success: true,
|
|
441
|
+
skipped: false,
|
|
442
|
+
duration: Date.now() - startTime
|
|
443
|
+
};
|
|
444
|
+
}
|
|
445
|
+
getLogger().info("Running build...");
|
|
446
|
+
try {
|
|
447
|
+
await runScript("build", buildPath, [], { quiet: true });
|
|
448
|
+
getLogger().success("Build completed successfully");
|
|
449
|
+
return {
|
|
450
|
+
success: true,
|
|
451
|
+
skipped: false,
|
|
452
|
+
duration: Date.now() - startTime
|
|
453
|
+
};
|
|
454
|
+
} catch {
|
|
455
|
+
const runCommand = packageManager === "npm" ? "npm run build" : `${packageManager} build`;
|
|
456
|
+
throw new Error(
|
|
457
|
+
`Error: Build failed.
|
|
458
|
+
Fix build errors before proceeding.
|
|
459
|
+
|
|
460
|
+
Run '${runCommand}' to see detailed errors.`
|
|
461
|
+
);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
};
|
|
465
|
+
|
|
377
466
|
export {
|
|
378
|
-
MergeManager
|
|
467
|
+
MergeManager,
|
|
468
|
+
BuildRunner
|
|
379
469
|
};
|
|
380
|
-
//# sourceMappingURL=chunk-
|
|
470
|
+
//# sourceMappingURL=chunk-DAOS6EC3.js.map
|