@iloom/cli 0.3.4 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +13 -3
- package/dist/{BranchNamingService-A77VI6AI.js → BranchNamingService-TOM2KAUT.js} +4 -3
- package/dist/ClaudeContextManager-VEGJTS5E.js +16 -0
- package/dist/ClaudeService-ICSHJMQ5.js +15 -0
- package/dist/GitHubService-RPM27GWD.js +12 -0
- package/dist/{LoomLauncher-ZV3ZZIBA.js → LoomLauncher-SJBZFZXE.js} +25 -22
- package/dist/LoomLauncher-SJBZFZXE.js.map +1 -0
- package/dist/PromptTemplateManager-2TDZAUC6.js +9 -0
- package/dist/README.md +13 -3
- package/dist/{SettingsManager-I2LRCW2A.js → SettingsManager-FJFU6JJD.js} +7 -3
- package/dist/SettingsMigrationManager-EH3J2TCN.js +10 -0
- package/dist/{chunk-UJL4HI2R.js → chunk-3NFBZRPR.js} +2 -2
- package/dist/chunk-6UIGZD2N.js +20 -0
- package/dist/chunk-6UIGZD2N.js.map +1 -0
- package/dist/{chunk-RIEO2WML.js → chunk-74VMN2KC.js} +26 -2
- package/dist/chunk-74VMN2KC.js.map +1 -0
- package/dist/{chunk-OYF4VIFI.js → chunk-75B2HZZ5.js} +147 -22
- package/dist/chunk-75B2HZZ5.js.map +1 -0
- package/dist/{chunk-PGPI5LR4.js → chunk-ADDNFQJ4.js} +7 -21
- package/dist/chunk-ADDNFQJ4.js.map +1 -0
- package/dist/{chunk-AKUJXDNW.js → chunk-F4J6KEL6.js} +3 -3
- package/dist/{chunk-DLHA5VQ3.js → chunk-HD5SUKI2.js} +36 -179
- package/dist/chunk-HD5SUKI2.js.map +1 -0
- package/dist/chunk-HHDSIE72.js +667 -0
- package/dist/chunk-HHDSIE72.js.map +1 -0
- package/dist/{chunk-OXAM2WVC.js → chunk-HVGQP44L.js} +21 -1
- package/dist/chunk-HVGQP44L.js.map +1 -0
- package/dist/{chunk-RW54ZMBM.js → chunk-JJUPY5MM.js} +2 -2
- package/dist/{chunk-UAN4A3YU.js → chunk-KM3W7YQX.js} +11 -11
- package/dist/{chunk-3RUPPQRG.js → chunk-KO2FOMHL.js} +43 -2
- package/dist/{chunk-3RUPPQRG.js.map → chunk-KO2FOMHL.js.map} +1 -1
- package/dist/{chunk-2MAIX45J.js → chunk-LTNDJMTH.js} +104 -43
- package/dist/chunk-LTNDJMTH.js.map +1 -0
- package/dist/{chunk-2CXREBLZ.js → chunk-M5XUCTTJ.js} +8 -6
- package/dist/chunk-M5XUCTTJ.js.map +1 -0
- package/dist/{chunk-4XIDC3NF.js → chunk-MD6HA5IK.js} +2 -2
- package/dist/chunk-MLS5FAV7.js +189 -0
- package/dist/chunk-MLS5FAV7.js.map +1 -0
- package/dist/{chunk-2IJEMXOB.js → chunk-NFVFVYAP.js} +419 -427
- package/dist/chunk-NFVFVYAP.js.map +1 -0
- package/dist/{chunk-OC4H6HJD.js → chunk-O7WHXLCB.js} +2 -2
- package/dist/{chunk-M7JJCX53.js → chunk-OEGECBFS.js} +20 -20
- package/dist/chunk-OEGECBFS.js.map +1 -0
- package/dist/{chunk-MKWYLDFK.js → chunk-OF7BNW4D.js} +43 -3
- package/dist/chunk-OF7BNW4D.js.map +1 -0
- package/dist/{chunk-SUOXY5WJ.js → chunk-P2WZIDF3.js} +5 -5
- package/dist/chunk-P2WZIDF3.js.map +1 -0
- package/dist/{chunk-PA6Q6AWM.js → chunk-PSFVTBM7.js} +2 -2
- package/dist/chunk-QHA67Q7A.js +281 -0
- package/dist/chunk-QHA67Q7A.js.map +1 -0
- package/dist/{chunk-ZM3CFL5L.js → chunk-QRBOPFAA.js} +3 -3
- package/dist/{chunk-IFB4Z76W.js → chunk-S44CHE3G.js} +13 -12
- package/dist/chunk-S44CHE3G.js.map +1 -0
- package/dist/{chunk-CE26YH2U.js → chunk-SJ2GZ6RF.js} +48 -50
- package/dist/chunk-SJ2GZ6RF.js.map +1 -0
- package/dist/{chunk-SSCQCCJ7.js → chunk-THF25ICZ.js} +2 -2
- package/dist/{chunk-5Q3NDNNV.js → chunk-TR5MC2U6.js} +153 -6
- package/dist/chunk-TR5MC2U6.js.map +1 -0
- package/dist/{chunk-5VK4NRSF.js → chunk-UNXRACJ7.js} +35 -36
- package/dist/chunk-UNXRACJ7.js.map +1 -0
- package/dist/{chunk-GEHQXLEI.js → chunk-UYVWLISQ.js} +18 -35
- package/dist/chunk-UYVWLISQ.js.map +1 -0
- package/dist/{chunk-OSCLCMDG.js → chunk-UYWAESOT.js} +3 -3
- package/dist/{chunk-ZT3YZB4K.js → chunk-VBFDVGAE.js} +12 -12
- package/dist/chunk-VBFDVGAE.js.map +1 -0
- package/dist/{chunk-CDZERT7Z.js → chunk-VWNS6DH5.js} +48 -4
- package/dist/chunk-VWNS6DH5.js.map +1 -0
- package/dist/{chunk-CFFQ2Z7A.js → chunk-WUQQNE63.js} +2 -2
- package/dist/{claude-W52VKI6L.js → claude-X7EBJRB2.js} +8 -5
- package/dist/{cleanup-H4VXU3C3.js → cleanup-7QVPYBJJ.js} +133 -122
- package/dist/cleanup-7QVPYBJJ.js.map +1 -0
- package/dist/cli.js +901 -425
- package/dist/cli.js.map +1 -1
- package/dist/{color-F7RU6B6Z.js → color-ZPIIUADB.js} +3 -3
- package/dist/{contribute-Y7IQV5QY.js → contribute-RZYCYUDX.js} +8 -6
- package/dist/{contribute-Y7IQV5QY.js.map → contribute-RZYCYUDX.js.map} +1 -1
- package/dist/dev-server-LOY7YWCP.js +298 -0
- package/dist/dev-server-LOY7YWCP.js.map +1 -0
- package/dist/{feedback-XTUCKJNT.js → feedback-562KPG5U.js} +13 -12
- package/dist/{feedback-XTUCKJNT.js.map → feedback-562KPG5U.js.map} +1 -1
- package/dist/{git-IYA53VIC.js → git-OXJACVAU.js} +16 -4
- package/dist/hooks/iloom-hook.js +258 -0
- package/dist/{ignite-T74RYXCA.js → ignite-VSIPGKKG.js} +245 -39
- package/dist/ignite-VSIPGKKG.js.map +1 -0
- package/dist/index.d.ts +459 -124
- package/dist/index.js +740 -210
- package/dist/index.js.map +1 -1
- package/dist/init-SCR2LQ4A.js +21 -0
- package/dist/{installation-detector-VARGFFRZ.js → installation-detector-6R6YOFVZ.js} +3 -3
- package/dist/mcp/issue-management-server.js +2 -1
- package/dist/mcp/issue-management-server.js.map +1 -1
- package/dist/neon-helpers-L5CXQ5CT.js +11 -0
- package/dist/{open-UMXANW5S.js → open-CX7HUE26.js} +12 -10
- package/dist/{open-UMXANW5S.js.map → open-CX7HUE26.js.map} +1 -1
- package/dist/projects-6DTNDVLH.js +73 -0
- package/dist/projects-6DTNDVLH.js.map +1 -0
- package/dist/{prompt-QALMYTVC.js → prompt-A7GGRHSY.js} +3 -3
- package/dist/prompts/init-prompt.txt +49 -0
- package/dist/prompts/issue-prompt.txt +110 -8
- package/dist/prompts/regular-prompt.txt +90 -0
- package/dist/prompts/session-summary-prompt.txt +82 -0
- package/dist/{rebase-VJ2VKR6R.js → rebase-55URTXZC.js} +11 -9
- package/dist/{rebase-VJ2VKR6R.js.map → rebase-55URTXZC.js.map} +1 -1
- package/dist/{remote-VUNCQZ6J.js → remote-73TZ2ADI.js} +3 -3
- package/dist/{run-MJYY4PUT.js → run-DP2U2CA2.js} +12 -10
- package/dist/{run-MJYY4PUT.js.map → run-DP2U2CA2.js.map} +1 -1
- package/dist/schema/settings.schema.json +49 -0
- package/dist/summary-J3CJSM7L.js +244 -0
- package/dist/summary-J3CJSM7L.js.map +1 -0
- package/dist/{test-git-IT5EWQ5C.js → test-git-QLAIBJLX.js} +6 -4
- package/dist/{test-git-IT5EWQ5C.js.map → test-git-QLAIBJLX.js.map} +1 -1
- package/dist/{test-prefix-NPWDPUUH.js → test-prefix-6YM2ZOON.js} +6 -4
- package/dist/{test-prefix-NPWDPUUH.js.map → test-prefix-6YM2ZOON.js.map} +1 -1
- package/dist/{test-tabs-PRMRSHKI.js → test-tabs-JGO3VOXJ.js} +4 -4
- package/dist/{test-webserver-DAHONWCS.js → test-webserver-VPNLAFZ3.js} +2 -2
- package/dist/{update-4TDDUR5K.js → update-LETF5ASC.js} +4 -4
- package/dist/{update-notifier-QEX3CJHA.js → update-notifier-H55ZK7NU.js} +3 -3
- package/package.json +6 -6
- package/dist/ClaudeContextManager-BN7RE5ZQ.js +0 -15
- package/dist/ClaudeService-DLYLJUPA.js +0 -14
- package/dist/GitHubService-FZHHBOFG.js +0 -11
- package/dist/LoomLauncher-ZV3ZZIBA.js.map +0 -1
- package/dist/PromptTemplateManager-6HH3PVXV.js +0 -9
- package/dist/SettingsMigrationManager-TJ7UWZG5.js +0 -10
- package/dist/chunk-2CXREBLZ.js.map +0 -1
- package/dist/chunk-2IJEMXOB.js.map +0 -1
- package/dist/chunk-2MAIX45J.js.map +0 -1
- package/dist/chunk-5Q3NDNNV.js.map +0 -1
- package/dist/chunk-5VK4NRSF.js.map +0 -1
- package/dist/chunk-CDZERT7Z.js.map +0 -1
- package/dist/chunk-CE26YH2U.js.map +0 -1
- package/dist/chunk-DLHA5VQ3.js.map +0 -1
- package/dist/chunk-GEHQXLEI.js.map +0 -1
- package/dist/chunk-IFB4Z76W.js.map +0 -1
- package/dist/chunk-M7JJCX53.js.map +0 -1
- package/dist/chunk-MKWYLDFK.js.map +0 -1
- package/dist/chunk-OXAM2WVC.js.map +0 -1
- package/dist/chunk-OYF4VIFI.js.map +0 -1
- package/dist/chunk-PGPI5LR4.js.map +0 -1
- package/dist/chunk-RIEO2WML.js.map +0 -1
- package/dist/chunk-SUOXY5WJ.js.map +0 -1
- package/dist/chunk-ZT3YZB4K.js.map +0 -1
- package/dist/cleanup-H4VXU3C3.js.map +0 -1
- package/dist/ignite-T74RYXCA.js.map +0 -1
- package/dist/init-4FHTAM3F.js +0 -19
- package/dist/logger-MKYH4UDV.js +0 -12
- package/dist/neon-helpers-77PBPGJ5.js +0 -10
- package/dist/update-notifier-QEX3CJHA.js.map +0 -1
- /package/dist/{BranchNamingService-A77VI6AI.js.map → BranchNamingService-TOM2KAUT.js.map} +0 -0
- /package/dist/{ClaudeContextManager-BN7RE5ZQ.js.map → ClaudeContextManager-VEGJTS5E.js.map} +0 -0
- /package/dist/{ClaudeService-DLYLJUPA.js.map → ClaudeService-ICSHJMQ5.js.map} +0 -0
- /package/dist/{GitHubService-FZHHBOFG.js.map → GitHubService-RPM27GWD.js.map} +0 -0
- /package/dist/{PromptTemplateManager-6HH3PVXV.js.map → PromptTemplateManager-2TDZAUC6.js.map} +0 -0
- /package/dist/{SettingsManager-I2LRCW2A.js.map → SettingsManager-FJFU6JJD.js.map} +0 -0
- /package/dist/{SettingsMigrationManager-TJ7UWZG5.js.map → SettingsMigrationManager-EH3J2TCN.js.map} +0 -0
- /package/dist/{chunk-UJL4HI2R.js.map → chunk-3NFBZRPR.js.map} +0 -0
- /package/dist/{chunk-AKUJXDNW.js.map → chunk-F4J6KEL6.js.map} +0 -0
- /package/dist/{chunk-RW54ZMBM.js.map → chunk-JJUPY5MM.js.map} +0 -0
- /package/dist/{chunk-UAN4A3YU.js.map → chunk-KM3W7YQX.js.map} +0 -0
- /package/dist/{chunk-4XIDC3NF.js.map → chunk-MD6HA5IK.js.map} +0 -0
- /package/dist/{chunk-OC4H6HJD.js.map → chunk-O7WHXLCB.js.map} +0 -0
- /package/dist/{chunk-PA6Q6AWM.js.map → chunk-PSFVTBM7.js.map} +0 -0
- /package/dist/{chunk-ZM3CFL5L.js.map → chunk-QRBOPFAA.js.map} +0 -0
- /package/dist/{chunk-SSCQCCJ7.js.map → chunk-THF25ICZ.js.map} +0 -0
- /package/dist/{chunk-OSCLCMDG.js.map → chunk-UYWAESOT.js.map} +0 -0
- /package/dist/{chunk-CFFQ2Z7A.js.map → chunk-WUQQNE63.js.map} +0 -0
- /package/dist/{claude-W52VKI6L.js.map → claude-X7EBJRB2.js.map} +0 -0
- /package/dist/{color-F7RU6B6Z.js.map → color-ZPIIUADB.js.map} +0 -0
- /package/dist/{git-IYA53VIC.js.map → git-OXJACVAU.js.map} +0 -0
- /package/dist/{init-4FHTAM3F.js.map → init-SCR2LQ4A.js.map} +0 -0
- /package/dist/{installation-detector-VARGFFRZ.js.map → installation-detector-6R6YOFVZ.js.map} +0 -0
- /package/dist/{logger-MKYH4UDV.js.map → neon-helpers-L5CXQ5CT.js.map} +0 -0
- /package/dist/{neon-helpers-77PBPGJ5.js.map → prompt-A7GGRHSY.js.map} +0 -0
- /package/dist/{prompt-QALMYTVC.js.map → remote-73TZ2ADI.js.map} +0 -0
- /package/dist/{test-tabs-PRMRSHKI.js.map → test-tabs-JGO3VOXJ.js.map} +0 -0
- /package/dist/{test-webserver-DAHONWCS.js.map → test-webserver-VPNLAFZ3.js.map} +0 -0
- /package/dist/{update-4TDDUR5K.js.map → update-LETF5ASC.js.map} +0 -0
- /package/dist/{remote-VUNCQZ6J.js.map → update-notifier-H55ZK7NU.js.map} +0 -0
package/dist/index.js
CHANGED
|
@@ -9,12 +9,6 @@ var __export = (target, all) => {
|
|
|
9
9
|
};
|
|
10
10
|
|
|
11
11
|
// src/utils/logger.ts
|
|
12
|
-
var logger_exports = {};
|
|
13
|
-
__export(logger_exports, {
|
|
14
|
-
createLogger: () => createLogger,
|
|
15
|
-
default: () => logger_default,
|
|
16
|
-
logger: () => logger
|
|
17
|
-
});
|
|
18
12
|
import chalk, { Chalk } from "chalk";
|
|
19
13
|
function formatMessage(message, ...args) {
|
|
20
14
|
const formattedArgs = args.map(
|
|
@@ -52,7 +46,8 @@ function createLogger(options = {}) {
|
|
|
52
46
|
},
|
|
53
47
|
isDebugEnabled: () => {
|
|
54
48
|
return false;
|
|
55
|
-
}
|
|
49
|
+
},
|
|
50
|
+
stdout: process.stdout
|
|
56
51
|
};
|
|
57
52
|
}
|
|
58
53
|
return {
|
|
@@ -93,7 +88,57 @@ function createLogger(options = {}) {
|
|
|
93
88
|
},
|
|
94
89
|
isDebugEnabled: () => {
|
|
95
90
|
return globalDebugEnabled;
|
|
96
|
-
}
|
|
91
|
+
},
|
|
92
|
+
stdout: process.stdout
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
function createStderrLogger(options = {}) {
|
|
96
|
+
const { prefix = "", timestamp = false, forceColor, debug = globalDebugEnabled } = options;
|
|
97
|
+
let localDebugEnabled = debug;
|
|
98
|
+
const customChalk = forceColor !== void 0 ? new Chalk({ level: forceColor ? 3 : 0 }) : stderrChalk;
|
|
99
|
+
const prefixStr = prefix ? `[${prefix}] ` : "";
|
|
100
|
+
const getTimestamp = () => timestamp ? `[${(/* @__PURE__ */ new Date()).toISOString()}] ` : "";
|
|
101
|
+
return {
|
|
102
|
+
info: (message, ...args) => {
|
|
103
|
+
const formatted = formatMessage(message, ...args);
|
|
104
|
+
const fullMessage = `${getTimestamp()}${prefixStr}${formatted}`;
|
|
105
|
+
const output = formatWithEmoji(fullMessage, "\u{1F5C2}\uFE0F ", customChalk.blue);
|
|
106
|
+
console.error(output);
|
|
107
|
+
},
|
|
108
|
+
success: (message, ...args) => {
|
|
109
|
+
const formatted = formatMessage(message, ...args);
|
|
110
|
+
const fullMessage = `${getTimestamp()}${prefixStr}${formatted}`;
|
|
111
|
+
const output = formatWithEmoji(fullMessage, "\u2705", customChalk.green);
|
|
112
|
+
console.error(output);
|
|
113
|
+
},
|
|
114
|
+
warn: (message, ...args) => {
|
|
115
|
+
const formatted = formatMessage(message, ...args);
|
|
116
|
+
const fullMessage = `${getTimestamp()}${prefixStr}${formatted}`;
|
|
117
|
+
const output = formatWithEmoji(fullMessage, "\u26A0\uFE0F ", customChalk.yellow);
|
|
118
|
+
console.error(output);
|
|
119
|
+
},
|
|
120
|
+
error: (message, ...args) => {
|
|
121
|
+
const formatted = formatMessage(message, ...args);
|
|
122
|
+
const fullMessage = `${getTimestamp()}${prefixStr}${formatted}`;
|
|
123
|
+
const output = formatWithEmoji(fullMessage, "\u274C", customChalk.red);
|
|
124
|
+
console.error(output);
|
|
125
|
+
},
|
|
126
|
+
debug: (message, ...args) => {
|
|
127
|
+
if (localDebugEnabled) {
|
|
128
|
+
const formatted = formatMessage(message, ...args);
|
|
129
|
+
const fullMessage = `${getTimestamp()}${prefixStr}${formatted}`;
|
|
130
|
+
const output = formatWithEmoji(fullMessage, "\u{1F50D}", customChalk.gray);
|
|
131
|
+
console.error(output);
|
|
132
|
+
}
|
|
133
|
+
},
|
|
134
|
+
setDebug: (enabled) => {
|
|
135
|
+
localDebugEnabled = enabled;
|
|
136
|
+
},
|
|
137
|
+
isDebugEnabled: () => {
|
|
138
|
+
return globalDebugEnabled;
|
|
139
|
+
},
|
|
140
|
+
stdout: process.stderr
|
|
141
|
+
// Use stderr for progress output in JSON mode
|
|
97
142
|
};
|
|
98
143
|
}
|
|
99
144
|
var stdoutChalk, stderrChalk, globalDebugEnabled, logger, logger_default;
|
|
@@ -136,7 +181,8 @@ var init_logger = __esm({
|
|
|
136
181
|
},
|
|
137
182
|
isDebugEnabled: () => {
|
|
138
183
|
return globalDebugEnabled;
|
|
139
|
-
}
|
|
184
|
+
},
|
|
185
|
+
stdout: process.stdout
|
|
140
186
|
};
|
|
141
187
|
logger_default = logger;
|
|
142
188
|
}
|
|
@@ -153,6 +199,8 @@ __export(SettingsManager_exports, {
|
|
|
153
199
|
IloomSettingsSchemaNoDefaults: () => IloomSettingsSchemaNoDefaults,
|
|
154
200
|
NeonSettingsSchema: () => NeonSettingsSchema,
|
|
155
201
|
SettingsManager: () => SettingsManager,
|
|
202
|
+
SpinAgentSettingsSchema: () => SpinAgentSettingsSchema,
|
|
203
|
+
SummarySettingsSchema: () => SummarySettingsSchema,
|
|
156
204
|
WorkflowPermissionSchema: () => WorkflowPermissionSchema,
|
|
157
205
|
WorkflowPermissionSchemaNoDefaults: () => WorkflowPermissionSchemaNoDefaults,
|
|
158
206
|
WorkflowsSettingsSchema: () => WorkflowsSettingsSchema,
|
|
@@ -163,7 +211,7 @@ import path from "path";
|
|
|
163
211
|
import os from "os";
|
|
164
212
|
import { z } from "zod";
|
|
165
213
|
import deepmerge from "deepmerge";
|
|
166
|
-
var AgentSettingsSchema, WorkflowPermissionSchema, WorkflowPermissionSchemaNoDefaults, WorkflowsSettingsSchema, WorkflowsSettingsSchemaNoDefaults, CapabilitiesSettingsSchema, CapabilitiesSettingsSchemaNoDefaults, NeonSettingsSchema, DatabaseProvidersSettingsSchema, IloomSettingsSchema, IloomSettingsSchemaNoDefaults, SettingsManager;
|
|
214
|
+
var AgentSettingsSchema, SpinAgentSettingsSchema, SummarySettingsSchema, WorkflowPermissionSchema, WorkflowPermissionSchemaNoDefaults, WorkflowsSettingsSchema, WorkflowsSettingsSchemaNoDefaults, CapabilitiesSettingsSchema, CapabilitiesSettingsSchemaNoDefaults, NeonSettingsSchema, DatabaseProvidersSettingsSchema, IloomSettingsSchema, IloomSettingsSchemaNoDefaults, SettingsManager;
|
|
167
215
|
var init_SettingsManager = __esm({
|
|
168
216
|
"src/lib/SettingsManager.ts"() {
|
|
169
217
|
"use strict";
|
|
@@ -172,13 +220,20 @@ var init_SettingsManager = __esm({
|
|
|
172
220
|
model: z.enum(["sonnet", "opus", "haiku"]).optional().describe("Claude model shorthand: sonnet, opus, or haiku")
|
|
173
221
|
// Future: could add other per-agent overrides
|
|
174
222
|
});
|
|
223
|
+
SpinAgentSettingsSchema = z.object({
|
|
224
|
+
model: z.enum(["sonnet", "opus", "haiku"]).default("opus").describe("Claude model shorthand for spin orchestrator")
|
|
225
|
+
});
|
|
226
|
+
SummarySettingsSchema = z.object({
|
|
227
|
+
model: z.enum(["sonnet", "opus", "haiku"]).default("sonnet").describe("Claude model shorthand for session summary generation")
|
|
228
|
+
});
|
|
175
229
|
WorkflowPermissionSchema = z.object({
|
|
176
230
|
permissionMode: z.enum(["plan", "acceptEdits", "bypassPermissions", "default"]).optional().describe("Permission mode for Claude CLI in this workflow type"),
|
|
177
231
|
noVerify: z.boolean().optional().describe("Skip pre-commit hooks (--no-verify) when committing during finish workflow"),
|
|
178
232
|
startIde: z.boolean().default(true).describe("Launch IDE (code) when starting this workflow type"),
|
|
179
233
|
startDevServer: z.boolean().default(true).describe("Launch development server when starting this workflow type"),
|
|
180
234
|
startAiAgent: z.boolean().default(true).describe("Launch Claude Code agent when starting this workflow type"),
|
|
181
|
-
startTerminal: z.boolean().default(false).describe("Launch terminal window without dev server when starting this workflow type")
|
|
235
|
+
startTerminal: z.boolean().default(false).describe("Launch terminal window without dev server when starting this workflow type"),
|
|
236
|
+
generateSummary: z.boolean().default(true).describe("Generate and post Claude session summary when finishing this workflow type")
|
|
182
237
|
});
|
|
183
238
|
WorkflowPermissionSchemaNoDefaults = z.object({
|
|
184
239
|
permissionMode: z.enum(["plan", "acceptEdits", "bypassPermissions", "default"]).optional().describe("Permission mode for Claude CLI in this workflow type"),
|
|
@@ -186,7 +241,8 @@ var init_SettingsManager = __esm({
|
|
|
186
241
|
startIde: z.boolean().optional().describe("Launch IDE (code) when starting this workflow type"),
|
|
187
242
|
startDevServer: z.boolean().optional().describe("Launch development server when starting this workflow type"),
|
|
188
243
|
startAiAgent: z.boolean().optional().describe("Launch Claude Code agent when starting this workflow type"),
|
|
189
|
-
startTerminal: z.boolean().optional().describe("Launch terminal window without dev server when starting this workflow type")
|
|
244
|
+
startTerminal: z.boolean().optional().describe("Launch terminal window without dev server when starting this workflow type"),
|
|
245
|
+
generateSummary: z.boolean().optional().describe("Generate and post Claude session summary when finishing this workflow type")
|
|
190
246
|
});
|
|
191
247
|
WorkflowsSettingsSchema = z.object({
|
|
192
248
|
issue: WorkflowPermissionSchema.optional(),
|
|
@@ -254,6 +310,12 @@ var init_SettingsManager = __esm({
|
|
|
254
310
|
agents: z.record(z.string(), AgentSettingsSchema).optional().nullable().describe(
|
|
255
311
|
"Per-agent configuration overrides. Available agents: iloom-issue-analyzer (analyzes issues), iloom-issue-planner (creates implementation plans), iloom-issue-analyze-and-plan (combined analysis and planning), iloom-issue-complexity-evaluator (evaluates complexity), iloom-issue-enhancer (enhances issue descriptions), iloom-issue-implementer (implements code changes), iloom-issue-reviewer (reviews code changes against requirements)"
|
|
256
312
|
),
|
|
313
|
+
spin: SpinAgentSettingsSchema.optional().describe(
|
|
314
|
+
"Spin orchestrator configuration. Model defaults to opus when not configured."
|
|
315
|
+
),
|
|
316
|
+
summary: SummarySettingsSchema.optional().describe(
|
|
317
|
+
"Session summary generation configuration. Model defaults to sonnet when not configured."
|
|
318
|
+
),
|
|
257
319
|
capabilities: CapabilitiesSettingsSchema.describe("Project capability configurations"),
|
|
258
320
|
databaseProviders: DatabaseProvidersSettingsSchema.describe("Database provider configurations"),
|
|
259
321
|
issueManagement: z.object({
|
|
@@ -316,6 +378,12 @@ var init_SettingsManager = __esm({
|
|
|
316
378
|
agents: z.record(z.string(), AgentSettingsSchema).optional().nullable().describe(
|
|
317
379
|
"Per-agent configuration overrides. Available agents: iloom-issue-analyzer (analyzes issues), iloom-issue-planner (creates implementation plans), iloom-issue-analyze-and-plan (combined analysis and planning), iloom-issue-complexity-evaluator (evaluates complexity), iloom-issue-enhancer (enhances issue descriptions), iloom-issue-implementer (implements code changes), iloom-issue-reviewer (reviews code changes against requirements)"
|
|
318
380
|
),
|
|
381
|
+
spin: z.object({
|
|
382
|
+
model: z.enum(["sonnet", "opus", "haiku"]).optional()
|
|
383
|
+
}).optional().describe("Spin orchestrator configuration"),
|
|
384
|
+
summary: z.object({
|
|
385
|
+
model: z.enum(["sonnet", "opus", "haiku"]).optional()
|
|
386
|
+
}).optional().describe("Session summary generation configuration"),
|
|
319
387
|
capabilities: CapabilitiesSettingsSchemaNoDefaults.describe("Project capability configurations"),
|
|
320
388
|
databaseProviders: DatabaseProvidersSettingsSchema.describe("Database provider configurations"),
|
|
321
389
|
issueManagement: z.object({
|
|
@@ -445,8 +513,8 @@ Note: CLI overrides were applied. Check your --set arguments.`);
|
|
|
445
513
|
*/
|
|
446
514
|
formatAllZodErrors(error, settingsPath) {
|
|
447
515
|
const errorMessages = error.issues.map((issue) => {
|
|
448
|
-
const
|
|
449
|
-
return ` - ${
|
|
516
|
+
const path7 = issue.path.length > 0 ? issue.path.join(".") : "root";
|
|
517
|
+
return ` - ${path7}: ${issue.message}`;
|
|
450
518
|
});
|
|
451
519
|
return new Error(
|
|
452
520
|
`Settings validation failed at ${settingsPath}:
|
|
@@ -548,6 +616,28 @@ ${errorMessages.join("\n")}`
|
|
|
548
616
|
}
|
|
549
617
|
return protectedBranches;
|
|
550
618
|
}
|
|
619
|
+
/**
|
|
620
|
+
* Get the spin orchestrator model with default applied
|
|
621
|
+
* Default is defined in SpinAgentSettingsSchema
|
|
622
|
+
*
|
|
623
|
+
* @param settings - Pre-loaded settings object
|
|
624
|
+
* @returns Model shorthand ('opus', 'sonnet', or 'haiku')
|
|
625
|
+
*/
|
|
626
|
+
getSpinModel(settings) {
|
|
627
|
+
var _a;
|
|
628
|
+
return ((_a = settings == null ? void 0 : settings.spin) == null ? void 0 : _a.model) ?? SpinAgentSettingsSchema.parse({}).model;
|
|
629
|
+
}
|
|
630
|
+
/**
|
|
631
|
+
* Get the session summary model with default applied
|
|
632
|
+
* Default is defined in SummarySettingsSchema
|
|
633
|
+
*
|
|
634
|
+
* @param settings - Pre-loaded settings object
|
|
635
|
+
* @returns Model shorthand ('opus', 'sonnet', or 'haiku')
|
|
636
|
+
*/
|
|
637
|
+
getSummaryModel(settings) {
|
|
638
|
+
var _a;
|
|
639
|
+
return ((_a = settings == null ? void 0 : settings.summary) == null ? void 0 : _a.model) ?? SummarySettingsSchema.parse({}).model;
|
|
640
|
+
}
|
|
551
641
|
};
|
|
552
642
|
}
|
|
553
643
|
});
|
|
@@ -728,14 +818,206 @@ var WorkspaceManager = class {
|
|
|
728
818
|
};
|
|
729
819
|
|
|
730
820
|
// src/lib/GitWorktreeManager.ts
|
|
821
|
+
import path4 from "path";
|
|
822
|
+
import fs2 from "fs-extra";
|
|
823
|
+
|
|
824
|
+
// src/utils/git.ts
|
|
825
|
+
init_SettingsManager();
|
|
731
826
|
import path3 from "path";
|
|
827
|
+
import { execa } from "execa";
|
|
828
|
+
|
|
829
|
+
// src/lib/MetadataManager.ts
|
|
830
|
+
import path2 from "path";
|
|
831
|
+
import os2 from "os";
|
|
732
832
|
import fs from "fs-extra";
|
|
733
833
|
|
|
834
|
+
// src/utils/logger-context.ts
|
|
835
|
+
init_logger();
|
|
836
|
+
import { AsyncLocalStorage } from "async_hooks";
|
|
837
|
+
var loggerStorage = new AsyncLocalStorage();
|
|
838
|
+
function getLogger() {
|
|
839
|
+
return loggerStorage.getStore() ?? logger;
|
|
840
|
+
}
|
|
841
|
+
|
|
842
|
+
// src/lib/MetadataManager.ts
|
|
843
|
+
var MetadataManager = class {
|
|
844
|
+
constructor() {
|
|
845
|
+
this.loomsDir = path2.join(os2.homedir(), ".config", "iloom-ai", "looms");
|
|
846
|
+
}
|
|
847
|
+
/**
|
|
848
|
+
* Convert worktree path to filename slug per spec section 2.2
|
|
849
|
+
*
|
|
850
|
+
* Algorithm:
|
|
851
|
+
* 1. Trim trailing slashes
|
|
852
|
+
* 2. Replace all path separators (/ or \) with __ (double underscore)
|
|
853
|
+
* 3. Replace any other non-alphanumeric characters (except _ and -) with -
|
|
854
|
+
* 4. Append .json
|
|
855
|
+
*
|
|
856
|
+
* Example:
|
|
857
|
+
* - Worktree: /Users/jane/dev/repo
|
|
858
|
+
* - Filename: _Users__jane__dev__repo.json
|
|
859
|
+
*/
|
|
860
|
+
slugifyPath(worktreePath) {
|
|
861
|
+
let slug = worktreePath.replace(/[/\\]+$/, "");
|
|
862
|
+
slug = slug.replace(/[/\\]/g, "___");
|
|
863
|
+
slug = slug.replace(/[^a-zA-Z0-9_-]/g, "-");
|
|
864
|
+
return `${slug}.json`;
|
|
865
|
+
}
|
|
866
|
+
/**
|
|
867
|
+
* Get the full path to the metadata file for a worktree
|
|
868
|
+
*/
|
|
869
|
+
getFilePath(worktreePath) {
|
|
870
|
+
const filename = this.slugifyPath(worktreePath);
|
|
871
|
+
return path2.join(this.loomsDir, filename);
|
|
872
|
+
}
|
|
873
|
+
/**
|
|
874
|
+
* Write metadata for a worktree (spec section 3.1)
|
|
875
|
+
*
|
|
876
|
+
* @param worktreePath - Absolute path to the worktree (used for file naming)
|
|
877
|
+
* @param input - Metadata to write (description plus additional fields)
|
|
878
|
+
*/
|
|
879
|
+
async writeMetadata(worktreePath, input) {
|
|
880
|
+
try {
|
|
881
|
+
await fs.ensureDir(this.loomsDir, { mode: 493 });
|
|
882
|
+
const content = {
|
|
883
|
+
description: input.description,
|
|
884
|
+
created_at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
885
|
+
version: 1,
|
|
886
|
+
branchName: input.branchName,
|
|
887
|
+
worktreePath: input.worktreePath,
|
|
888
|
+
issueType: input.issueType,
|
|
889
|
+
issue_numbers: input.issue_numbers,
|
|
890
|
+
pr_numbers: input.pr_numbers,
|
|
891
|
+
issueTracker: input.issueTracker,
|
|
892
|
+
colorHex: input.colorHex,
|
|
893
|
+
sessionId: input.sessionId,
|
|
894
|
+
...input.parentLoom && { parentLoom: input.parentLoom }
|
|
895
|
+
};
|
|
896
|
+
const filePath = this.getFilePath(worktreePath);
|
|
897
|
+
await fs.writeFile(filePath, JSON.stringify(content, null, 2), { mode: 420 });
|
|
898
|
+
getLogger().debug(`Metadata written for worktree: ${worktreePath}`);
|
|
899
|
+
} catch (error) {
|
|
900
|
+
getLogger().warn(
|
|
901
|
+
`Failed to write metadata for worktree: ${error instanceof Error ? error.message : String(error)}`
|
|
902
|
+
);
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
/**
|
|
906
|
+
* Read metadata for a worktree (spec section 3.2)
|
|
907
|
+
*
|
|
908
|
+
* @param worktreePath - Absolute path to the worktree
|
|
909
|
+
* @returns The metadata object with all fields, or null if not found/invalid
|
|
910
|
+
*/
|
|
911
|
+
async readMetadata(worktreePath) {
|
|
912
|
+
try {
|
|
913
|
+
const filePath = this.getFilePath(worktreePath);
|
|
914
|
+
if (!await fs.pathExists(filePath)) {
|
|
915
|
+
return null;
|
|
916
|
+
}
|
|
917
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
918
|
+
const data = JSON.parse(content);
|
|
919
|
+
if (!data.description) {
|
|
920
|
+
return null;
|
|
921
|
+
}
|
|
922
|
+
return {
|
|
923
|
+
description: data.description,
|
|
924
|
+
created_at: data.created_at ?? null,
|
|
925
|
+
branchName: data.branchName ?? null,
|
|
926
|
+
worktreePath: data.worktreePath ?? null,
|
|
927
|
+
issueType: data.issueType ?? null,
|
|
928
|
+
issue_numbers: data.issue_numbers ?? [],
|
|
929
|
+
pr_numbers: data.pr_numbers ?? [],
|
|
930
|
+
issueTracker: data.issueTracker ?? null,
|
|
931
|
+
colorHex: data.colorHex ?? null,
|
|
932
|
+
sessionId: data.sessionId ?? null,
|
|
933
|
+
parentLoom: data.parentLoom ?? null
|
|
934
|
+
};
|
|
935
|
+
} catch (error) {
|
|
936
|
+
getLogger().debug(
|
|
937
|
+
`Could not read metadata for worktree ${worktreePath}: ${error instanceof Error ? error.message : String(error)}`
|
|
938
|
+
);
|
|
939
|
+
return null;
|
|
940
|
+
}
|
|
941
|
+
}
|
|
942
|
+
/**
|
|
943
|
+
* List all stored loom metadata files
|
|
944
|
+
*
|
|
945
|
+
* Returns an array of LoomMetadata objects for all valid metadata files
|
|
946
|
+
* in the looms directory. Invalid or unreadable files are skipped.
|
|
947
|
+
*
|
|
948
|
+
* @returns Array of LoomMetadata objects from all stored files
|
|
949
|
+
*/
|
|
950
|
+
async listAllMetadata() {
|
|
951
|
+
const results = [];
|
|
952
|
+
try {
|
|
953
|
+
if (!await fs.pathExists(this.loomsDir)) {
|
|
954
|
+
return results;
|
|
955
|
+
}
|
|
956
|
+
const files = await fs.readdir(this.loomsDir);
|
|
957
|
+
for (const file of files) {
|
|
958
|
+
if (!file.endsWith(".json")) {
|
|
959
|
+
continue;
|
|
960
|
+
}
|
|
961
|
+
try {
|
|
962
|
+
const filePath = path2.join(this.loomsDir, file);
|
|
963
|
+
const content = await fs.readFile(filePath, "utf8");
|
|
964
|
+
const data = JSON.parse(content);
|
|
965
|
+
if (!data.description) {
|
|
966
|
+
continue;
|
|
967
|
+
}
|
|
968
|
+
results.push({
|
|
969
|
+
description: data.description,
|
|
970
|
+
created_at: data.created_at ?? null,
|
|
971
|
+
branchName: data.branchName ?? null,
|
|
972
|
+
worktreePath: data.worktreePath ?? null,
|
|
973
|
+
issueType: data.issueType ?? null,
|
|
974
|
+
issue_numbers: data.issue_numbers ?? [],
|
|
975
|
+
pr_numbers: data.pr_numbers ?? [],
|
|
976
|
+
issueTracker: data.issueTracker ?? null,
|
|
977
|
+
colorHex: data.colorHex ?? null,
|
|
978
|
+
sessionId: data.sessionId ?? null,
|
|
979
|
+
parentLoom: data.parentLoom ?? null
|
|
980
|
+
});
|
|
981
|
+
} catch (error) {
|
|
982
|
+
getLogger().debug(
|
|
983
|
+
`Skipping metadata file ${file}: ${error instanceof Error ? error.message : String(error)}`
|
|
984
|
+
);
|
|
985
|
+
}
|
|
986
|
+
}
|
|
987
|
+
} catch (error) {
|
|
988
|
+
getLogger().debug(
|
|
989
|
+
`Could not list metadata files: ${error instanceof Error ? error.message : String(error)}`
|
|
990
|
+
);
|
|
991
|
+
}
|
|
992
|
+
return results;
|
|
993
|
+
}
|
|
994
|
+
/**
|
|
995
|
+
* Delete metadata for a worktree (spec section 3.3)
|
|
996
|
+
*
|
|
997
|
+
* Idempotent: silently succeeds if file doesn't exist
|
|
998
|
+
* Non-fatal: logs warning on permission errors but doesn't throw
|
|
999
|
+
*
|
|
1000
|
+
* @param worktreePath - Absolute path to the worktree
|
|
1001
|
+
*/
|
|
1002
|
+
async deleteMetadata(worktreePath) {
|
|
1003
|
+
try {
|
|
1004
|
+
const filePath = this.getFilePath(worktreePath);
|
|
1005
|
+
if (!await fs.pathExists(filePath)) {
|
|
1006
|
+
getLogger().debug(`No metadata file to delete for worktree: ${worktreePath}`);
|
|
1007
|
+
return;
|
|
1008
|
+
}
|
|
1009
|
+
await fs.unlink(filePath);
|
|
1010
|
+
getLogger().debug(`Metadata deleted for worktree: ${worktreePath}`);
|
|
1011
|
+
} catch (error) {
|
|
1012
|
+
getLogger().warn(
|
|
1013
|
+
`Failed to delete metadata for worktree: ${error instanceof Error ? error.message : String(error)}`
|
|
1014
|
+
);
|
|
1015
|
+
}
|
|
1016
|
+
}
|
|
1017
|
+
};
|
|
1018
|
+
|
|
734
1019
|
// src/utils/git.ts
|
|
735
|
-
init_SettingsManager();
|
|
736
1020
|
init_logger();
|
|
737
|
-
import path2 from "path";
|
|
738
|
-
import { execa } from "execa";
|
|
739
1021
|
async function executeGitCommand(args, options) {
|
|
740
1022
|
try {
|
|
741
1023
|
const result = await execa("git", args, {
|
|
@@ -743,7 +1025,9 @@ async function executeGitCommand(args, options) {
|
|
|
743
1025
|
timeout: (options == null ? void 0 : options.timeout) ?? 3e4,
|
|
744
1026
|
encoding: "utf8",
|
|
745
1027
|
stdio: (options == null ? void 0 : options.stdio) ?? "pipe",
|
|
746
|
-
verbose: logger.isDebugEnabled()
|
|
1028
|
+
verbose: logger.isDebugEnabled(),
|
|
1029
|
+
// Spread env conditionally - only include if defined
|
|
1030
|
+
...(options == null ? void 0 : options.env) && { env: options.env }
|
|
747
1031
|
});
|
|
748
1032
|
return result.stdout;
|
|
749
1033
|
} catch (error) {
|
|
@@ -879,7 +1163,7 @@ function extractIssueNumber(branchName) {
|
|
|
879
1163
|
}
|
|
880
1164
|
return null;
|
|
881
1165
|
}
|
|
882
|
-
function isWorktreePath(
|
|
1166
|
+
function isWorktreePath(path7) {
|
|
883
1167
|
const worktreePatterns = [
|
|
884
1168
|
/\/worktrees?\//i,
|
|
885
1169
|
// Contains /worktree/ or /worktrees/
|
|
@@ -894,17 +1178,17 @@ function isWorktreePath(path6) {
|
|
|
894
1178
|
/\.worktree$/i
|
|
895
1179
|
// ends with .worktree
|
|
896
1180
|
];
|
|
897
|
-
return worktreePatterns.some((pattern) => pattern.test(
|
|
1181
|
+
return worktreePatterns.some((pattern) => pattern.test(path7));
|
|
898
1182
|
}
|
|
899
1183
|
function generateWorktreePath(branchName, rootDir = process.cwd(), options) {
|
|
900
1184
|
let sanitized = branchName.replace(/\//g, "-");
|
|
901
1185
|
if ((options == null ? void 0 : options.isPR) && (options == null ? void 0 : options.prNumber)) {
|
|
902
1186
|
sanitized = `${sanitized}_pr_${options.prNumber}`;
|
|
903
1187
|
}
|
|
904
|
-
const parentDir =
|
|
1188
|
+
const parentDir = path3.dirname(rootDir);
|
|
905
1189
|
let prefix;
|
|
906
1190
|
if ((options == null ? void 0 : options.prefix) === void 0) {
|
|
907
|
-
const mainFolderName =
|
|
1191
|
+
const mainFolderName = path3.basename(rootDir);
|
|
908
1192
|
prefix = mainFolderName ? `${mainFolderName}-looms/` : "looms/";
|
|
909
1193
|
} else if (options.prefix === "") {
|
|
910
1194
|
prefix = "";
|
|
@@ -924,43 +1208,43 @@ function generateWorktreePath(branchName, rootDir = process.cwd(), options) {
|
|
|
924
1208
|
}
|
|
925
1209
|
}
|
|
926
1210
|
if (prefix === "") {
|
|
927
|
-
return
|
|
1211
|
+
return path3.join(parentDir, sanitized);
|
|
928
1212
|
} else if (prefix.endsWith("/")) {
|
|
929
|
-
return
|
|
1213
|
+
return path3.join(parentDir, prefix, sanitized);
|
|
930
1214
|
} else if (prefix.includes("/")) {
|
|
931
1215
|
const lastSlashIndex = prefix.lastIndexOf("/");
|
|
932
1216
|
const dirPath = prefix.substring(0, lastSlashIndex);
|
|
933
1217
|
const prefixWithSeparator = prefix.substring(lastSlashIndex + 1);
|
|
934
|
-
return
|
|
1218
|
+
return path3.join(parentDir, dirPath, `${prefixWithSeparator}${sanitized}`);
|
|
935
1219
|
} else {
|
|
936
|
-
return
|
|
1220
|
+
return path3.join(parentDir, `${prefix}${sanitized}`);
|
|
937
1221
|
}
|
|
938
1222
|
}
|
|
939
|
-
async function isValidGitRepo(
|
|
1223
|
+
async function isValidGitRepo(path7) {
|
|
940
1224
|
try {
|
|
941
|
-
await executeGitCommand(["rev-parse", "--git-dir"], { cwd:
|
|
1225
|
+
await executeGitCommand(["rev-parse", "--git-dir"], { cwd: path7 });
|
|
942
1226
|
return true;
|
|
943
1227
|
} catch {
|
|
944
1228
|
return false;
|
|
945
1229
|
}
|
|
946
1230
|
}
|
|
947
|
-
async function getCurrentBranch(
|
|
1231
|
+
async function getCurrentBranch(path7 = process.cwd()) {
|
|
948
1232
|
try {
|
|
949
|
-
const result = await executeGitCommand(["branch", "--show-current"], { cwd:
|
|
1233
|
+
const result = await executeGitCommand(["branch", "--show-current"], { cwd: path7 });
|
|
950
1234
|
return result.trim();
|
|
951
1235
|
} catch {
|
|
952
1236
|
return null;
|
|
953
1237
|
}
|
|
954
1238
|
}
|
|
955
|
-
async function branchExists(branchName,
|
|
1239
|
+
async function branchExists(branchName, path7 = process.cwd(), includeRemote = true) {
|
|
956
1240
|
try {
|
|
957
|
-
const localResult = await executeGitCommand(["branch", "--list", branchName], { cwd:
|
|
1241
|
+
const localResult = await executeGitCommand(["branch", "--list", branchName], { cwd: path7 });
|
|
958
1242
|
if (localResult.trim()) {
|
|
959
1243
|
return true;
|
|
960
1244
|
}
|
|
961
1245
|
if (includeRemote) {
|
|
962
1246
|
const remoteResult = await executeGitCommand(["branch", "-r", "--list", `*/${branchName}`], {
|
|
963
|
-
cwd:
|
|
1247
|
+
cwd: path7
|
|
964
1248
|
});
|
|
965
1249
|
if (remoteResult.trim()) {
|
|
966
1250
|
return true;
|
|
@@ -971,31 +1255,31 @@ async function branchExists(branchName, path6 = process.cwd(), includeRemote = t
|
|
|
971
1255
|
return false;
|
|
972
1256
|
}
|
|
973
1257
|
}
|
|
974
|
-
async function getWorktreeRoot(
|
|
1258
|
+
async function getWorktreeRoot(path7 = process.cwd()) {
|
|
975
1259
|
try {
|
|
976
|
-
const result = await executeGitCommand(["rev-parse", "--show-toplevel"], { cwd:
|
|
1260
|
+
const result = await executeGitCommand(["rev-parse", "--show-toplevel"], { cwd: path7 });
|
|
977
1261
|
return result.trim();
|
|
978
1262
|
} catch {
|
|
979
1263
|
return null;
|
|
980
1264
|
}
|
|
981
1265
|
}
|
|
982
|
-
async function getRepoRoot(
|
|
1266
|
+
async function getRepoRoot(path7 = process.cwd()) {
|
|
983
1267
|
try {
|
|
984
1268
|
const gitCommonDir = await executeGitCommand(
|
|
985
1269
|
["rev-parse", "--path-format=absolute", "--git-common-dir"],
|
|
986
|
-
{ cwd:
|
|
1270
|
+
{ cwd: path7 }
|
|
987
1271
|
);
|
|
988
1272
|
const trimmedPath = gitCommonDir.trim();
|
|
989
1273
|
const repoRoot = trimmedPath.replace(/\/\.git\/worktrees\/[^/]+$/, "").replace(/\/\.git$/, "");
|
|
990
1274
|
return repoRoot;
|
|
991
1275
|
} catch (error) {
|
|
992
|
-
logger.warn(`Failed to determine repo root from git-common-dir: ${
|
|
1276
|
+
logger.warn(`Failed to determine repo root from git-common-dir: ${path7}`, error instanceof Error ? error.message : String(error));
|
|
993
1277
|
return null;
|
|
994
1278
|
}
|
|
995
1279
|
}
|
|
996
|
-
async function findMainWorktreePath(
|
|
1280
|
+
async function findMainWorktreePath(path7 = process.cwd(), options) {
|
|
997
1281
|
try {
|
|
998
|
-
const output = await executeGitCommand(["worktree", "list", "--porcelain"], { cwd:
|
|
1282
|
+
const output = await executeGitCommand(["worktree", "list", "--porcelain"], { cwd: path7 });
|
|
999
1283
|
const worktrees = parseWorktreeList(output, options == null ? void 0 : options.mainBranch);
|
|
1000
1284
|
if (worktrees.length === 0) {
|
|
1001
1285
|
throw new Error("No worktrees found in repository");
|
|
@@ -1025,30 +1309,51 @@ async function findMainWorktreePath(path6 = process.cwd(), options) {
|
|
|
1025
1309
|
throw new Error(`Failed to find main worktree: ${error instanceof Error ? error.message : String(error)}`);
|
|
1026
1310
|
}
|
|
1027
1311
|
}
|
|
1028
|
-
async function findMainWorktreePathWithSettings(
|
|
1312
|
+
async function findMainWorktreePathWithSettings(path7, settingsManager) {
|
|
1029
1313
|
settingsManager ??= new SettingsManager();
|
|
1030
|
-
const settings = await settingsManager.loadSettings(
|
|
1314
|
+
const settings = await settingsManager.loadSettings(path7);
|
|
1031
1315
|
const findOptions = settings.mainBranch ? { mainBranch: settings.mainBranch } : void 0;
|
|
1032
|
-
return findMainWorktreePath(
|
|
1316
|
+
return findMainWorktreePath(path7, findOptions);
|
|
1033
1317
|
}
|
|
1034
|
-
async function
|
|
1318
|
+
async function findWorktreeForBranch(branchName, path7 = process.cwd()) {
|
|
1035
1319
|
try {
|
|
1036
|
-
const
|
|
1320
|
+
const output = await executeGitCommand(["worktree", "list", "--porcelain"], { cwd: path7 });
|
|
1321
|
+
const worktrees = parseWorktreeList(output, branchName);
|
|
1322
|
+
if (worktrees.length === 0) {
|
|
1323
|
+
throw new Error("No worktrees found in repository");
|
|
1324
|
+
}
|
|
1325
|
+
const targetWorktree = worktrees.find((wt) => wt.branch === branchName);
|
|
1326
|
+
if (!(targetWorktree == null ? void 0 : targetWorktree.path)) {
|
|
1327
|
+
throw new Error(
|
|
1328
|
+
`No worktree found with branch '${branchName}' checked out. Available worktrees: ${worktrees.map((wt) => `${wt.path} (${wt.branch})`).join(", ")}`
|
|
1329
|
+
);
|
|
1330
|
+
}
|
|
1331
|
+
return targetWorktree.path;
|
|
1332
|
+
} catch (error) {
|
|
1333
|
+
if (error instanceof Error && (error.message.includes("No worktree found with branch") || error.message.includes("No worktrees found"))) {
|
|
1334
|
+
throw error;
|
|
1335
|
+
}
|
|
1336
|
+
throw new Error(`Failed to find worktree for branch '${branchName}': ${error instanceof Error ? error.message : String(error)}`);
|
|
1337
|
+
}
|
|
1338
|
+
}
|
|
1339
|
+
async function hasUncommittedChanges(path7 = process.cwd()) {
|
|
1340
|
+
try {
|
|
1341
|
+
const result = await executeGitCommand(["status", "--porcelain"], { cwd: path7 });
|
|
1037
1342
|
return result.trim().length > 0;
|
|
1038
1343
|
} catch {
|
|
1039
1344
|
return false;
|
|
1040
1345
|
}
|
|
1041
1346
|
}
|
|
1042
|
-
async function getDefaultBranch(
|
|
1347
|
+
async function getDefaultBranch(path7 = process.cwd()) {
|
|
1043
1348
|
try {
|
|
1044
1349
|
const remoteResult = await executeGitCommand(["symbolic-ref", "refs/remotes/origin/HEAD"], {
|
|
1045
|
-
cwd:
|
|
1350
|
+
cwd: path7
|
|
1046
1351
|
});
|
|
1047
1352
|
const match = remoteResult.match(/refs\/remotes\/origin\/(.+)/);
|
|
1048
1353
|
if (match) return match[1] ?? "main";
|
|
1049
1354
|
const commonDefaults = ["main", "master", "develop"];
|
|
1050
1355
|
for (const branch of commonDefaults) {
|
|
1051
|
-
if (await branchExists(branch,
|
|
1356
|
+
if (await branchExists(branch, path7)) {
|
|
1052
1357
|
return branch;
|
|
1053
1358
|
}
|
|
1054
1359
|
}
|
|
@@ -1057,13 +1362,13 @@ async function getDefaultBranch(path6 = process.cwd()) {
|
|
|
1057
1362
|
return "main";
|
|
1058
1363
|
}
|
|
1059
1364
|
}
|
|
1060
|
-
async function findAllBranchesForIssue(issueNumber,
|
|
1365
|
+
async function findAllBranchesForIssue(issueNumber, path7 = process.cwd(), settingsManager) {
|
|
1061
1366
|
if (!settingsManager) {
|
|
1062
1367
|
const { SettingsManager: SM } = await Promise.resolve().then(() => (init_SettingsManager(), SettingsManager_exports));
|
|
1063
1368
|
settingsManager = new SM();
|
|
1064
1369
|
}
|
|
1065
|
-
const protectedBranches = await settingsManager.getProtectedBranches(
|
|
1066
|
-
const output = await executeGitCommand(["branch", "-a"], { cwd:
|
|
1370
|
+
const protectedBranches = await settingsManager.getProtectedBranches(path7);
|
|
1371
|
+
const output = await executeGitCommand(["branch", "-a"], { cwd: path7 });
|
|
1067
1372
|
const branches = [];
|
|
1068
1373
|
const lines = output.split("\n").filter(Boolean);
|
|
1069
1374
|
for (const line of lines) {
|
|
@@ -1120,18 +1425,18 @@ async function findAllBranchesForIssue(issueNumber, path6 = process.cwd(), setti
|
|
|
1120
1425
|
}
|
|
1121
1426
|
return branches;
|
|
1122
1427
|
}
|
|
1123
|
-
async function isEmptyRepository(
|
|
1428
|
+
async function isEmptyRepository(path7 = process.cwd()) {
|
|
1124
1429
|
try {
|
|
1125
|
-
await executeGitCommand(["rev-parse", "--verify", "HEAD"], { cwd:
|
|
1430
|
+
await executeGitCommand(["rev-parse", "--verify", "HEAD"], { cwd: path7 });
|
|
1126
1431
|
return false;
|
|
1127
1432
|
} catch {
|
|
1128
1433
|
return true;
|
|
1129
1434
|
}
|
|
1130
1435
|
}
|
|
1131
|
-
async function ensureRepositoryHasCommits(
|
|
1132
|
-
const isEmpty = await isEmptyRepository(
|
|
1436
|
+
async function ensureRepositoryHasCommits(path7 = process.cwd()) {
|
|
1437
|
+
const isEmpty = await isEmptyRepository(path7);
|
|
1133
1438
|
if (isEmpty) {
|
|
1134
|
-
await executeGitCommand(["commit", "--no-verify", "--allow-empty", "-m", "Initial commit"], { cwd:
|
|
1439
|
+
await executeGitCommand(["commit", "--no-verify", "--allow-empty", "-m", "Initial commit"], { cwd: path7 });
|
|
1135
1440
|
}
|
|
1136
1441
|
}
|
|
1137
1442
|
async function pushBranchToRemote(branchName, worktreePath, options) {
|
|
@@ -1199,6 +1504,122 @@ async function isFileGitignored(filePath, cwd = process.cwd()) {
|
|
|
1199
1504
|
return false;
|
|
1200
1505
|
}
|
|
1201
1506
|
}
|
|
1507
|
+
async function isBranchMergedIntoMain(branchName, mainBranch = "main", cwd = process.cwd()) {
|
|
1508
|
+
try {
|
|
1509
|
+
await executeGitCommand(["merge-base", "--is-ancestor", branchName, mainBranch], { cwd });
|
|
1510
|
+
return true;
|
|
1511
|
+
} catch {
|
|
1512
|
+
return false;
|
|
1513
|
+
}
|
|
1514
|
+
}
|
|
1515
|
+
async function isRemoteBranchUpToDate(branchName, cwd) {
|
|
1516
|
+
try {
|
|
1517
|
+
const remoteResult = await executeGitCommand(["ls-remote", "--heads", "origin", branchName], { cwd });
|
|
1518
|
+
if (remoteResult.trim().length === 0) {
|
|
1519
|
+
return false;
|
|
1520
|
+
}
|
|
1521
|
+
const remoteCommit = remoteResult.trim().split(" ")[0];
|
|
1522
|
+
const localCommit = await executeGitCommand(["rev-parse", branchName], { cwd });
|
|
1523
|
+
return localCommit.trim() === remoteCommit;
|
|
1524
|
+
} catch {
|
|
1525
|
+
return false;
|
|
1526
|
+
}
|
|
1527
|
+
}
|
|
1528
|
+
async function checkRemoteBranchStatus(branchName, cwd) {
|
|
1529
|
+
try {
|
|
1530
|
+
try {
|
|
1531
|
+
await executeGitCommand(["fetch", "origin", branchName], { cwd, timeout: 3e4 });
|
|
1532
|
+
} catch (fetchError) {
|
|
1533
|
+
const fetchErrorMessage = fetchError instanceof Error ? fetchError.message : String(fetchError);
|
|
1534
|
+
if (fetchErrorMessage.includes("Could not resolve host") || fetchErrorMessage.includes("unable to access") || fetchErrorMessage.includes("network") || fetchErrorMessage.includes("Connection refused") || fetchErrorMessage.includes("Connection timed out")) {
|
|
1535
|
+
return {
|
|
1536
|
+
exists: false,
|
|
1537
|
+
remoteAhead: false,
|
|
1538
|
+
localAhead: false,
|
|
1539
|
+
networkError: true,
|
|
1540
|
+
errorMessage: fetchErrorMessage
|
|
1541
|
+
};
|
|
1542
|
+
}
|
|
1543
|
+
}
|
|
1544
|
+
const remoteResult = await executeGitCommand(["ls-remote", "--heads", "origin", branchName], { cwd });
|
|
1545
|
+
if (remoteResult.trim().length === 0) {
|
|
1546
|
+
return {
|
|
1547
|
+
exists: false,
|
|
1548
|
+
remoteAhead: false,
|
|
1549
|
+
localAhead: false,
|
|
1550
|
+
networkError: false
|
|
1551
|
+
};
|
|
1552
|
+
}
|
|
1553
|
+
const remoteCommit = remoteResult.trim().split(" ")[0];
|
|
1554
|
+
if (!remoteCommit) {
|
|
1555
|
+
return {
|
|
1556
|
+
exists: false,
|
|
1557
|
+
remoteAhead: false,
|
|
1558
|
+
localAhead: false,
|
|
1559
|
+
networkError: false
|
|
1560
|
+
};
|
|
1561
|
+
}
|
|
1562
|
+
const localCommit = await executeGitCommand(["rev-parse", branchName], { cwd });
|
|
1563
|
+
const localCommitTrimmed = localCommit.trim();
|
|
1564
|
+
if (remoteCommit === localCommitTrimmed) {
|
|
1565
|
+
return {
|
|
1566
|
+
exists: true,
|
|
1567
|
+
remoteAhead: false,
|
|
1568
|
+
localAhead: false,
|
|
1569
|
+
networkError: false
|
|
1570
|
+
};
|
|
1571
|
+
}
|
|
1572
|
+
try {
|
|
1573
|
+
await executeGitCommand(["merge-base", "--is-ancestor", localCommitTrimmed, remoteCommit], { cwd });
|
|
1574
|
+
return {
|
|
1575
|
+
exists: true,
|
|
1576
|
+
remoteAhead: true,
|
|
1577
|
+
localAhead: false,
|
|
1578
|
+
networkError: false
|
|
1579
|
+
};
|
|
1580
|
+
} catch {
|
|
1581
|
+
return {
|
|
1582
|
+
exists: true,
|
|
1583
|
+
remoteAhead: false,
|
|
1584
|
+
localAhead: true,
|
|
1585
|
+
networkError: false
|
|
1586
|
+
};
|
|
1587
|
+
}
|
|
1588
|
+
} catch (error) {
|
|
1589
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1590
|
+
if (errorMessage.includes("Could not resolve host") || errorMessage.includes("unable to access") || errorMessage.includes("network") || errorMessage.includes("Connection refused") || errorMessage.includes("Connection timed out")) {
|
|
1591
|
+
return {
|
|
1592
|
+
exists: false,
|
|
1593
|
+
remoteAhead: false,
|
|
1594
|
+
localAhead: false,
|
|
1595
|
+
networkError: true,
|
|
1596
|
+
errorMessage
|
|
1597
|
+
};
|
|
1598
|
+
}
|
|
1599
|
+
return {
|
|
1600
|
+
exists: false,
|
|
1601
|
+
remoteAhead: false,
|
|
1602
|
+
localAhead: false,
|
|
1603
|
+
networkError: false
|
|
1604
|
+
};
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
async function getMergeTargetBranch(worktreePath = process.cwd(), options) {
|
|
1608
|
+
var _a;
|
|
1609
|
+
const settingsManager = (options == null ? void 0 : options.settingsManager) ?? new SettingsManager();
|
|
1610
|
+
const metadataManager = (options == null ? void 0 : options.metadataManager) ?? new MetadataManager();
|
|
1611
|
+
logger.debug(`Checking for parent loom metadata at: ${worktreePath}`);
|
|
1612
|
+
const metadata = await metadataManager.readMetadata(worktreePath);
|
|
1613
|
+
if ((_a = metadata == null ? void 0 : metadata.parentLoom) == null ? void 0 : _a.branchName) {
|
|
1614
|
+
logger.debug(`Using parent branch as merge target: ${metadata.parentLoom.branchName}`);
|
|
1615
|
+
return metadata.parentLoom.branchName;
|
|
1616
|
+
}
|
|
1617
|
+
logger.debug("No parent loom metadata found, falling back to settings");
|
|
1618
|
+
const settings = await settingsManager.loadSettings(worktreePath);
|
|
1619
|
+
const mainBranch = settings.mainBranch ?? "main";
|
|
1620
|
+
logger.debug(`Using configured main branch as merge target: ${mainBranch}`);
|
|
1621
|
+
return mainBranch;
|
|
1622
|
+
}
|
|
1202
1623
|
|
|
1203
1624
|
// src/lib/GitWorktreeManager.ts
|
|
1204
1625
|
var GitWorktreeManager = class {
|
|
@@ -1266,12 +1687,12 @@ var GitWorktreeManager = class {
|
|
|
1266
1687
|
if (!options.branch) {
|
|
1267
1688
|
throw new Error("Branch name is required");
|
|
1268
1689
|
}
|
|
1269
|
-
const absolutePath =
|
|
1270
|
-
if (await
|
|
1690
|
+
const absolutePath = path4.resolve(options.path);
|
|
1691
|
+
if (await fs2.pathExists(absolutePath)) {
|
|
1271
1692
|
if (!options.force) {
|
|
1272
1693
|
throw new Error(`Path already exists: ${absolutePath}`);
|
|
1273
1694
|
}
|
|
1274
|
-
await
|
|
1695
|
+
await fs2.remove(absolutePath);
|
|
1275
1696
|
}
|
|
1276
1697
|
const args = ["worktree", "add"];
|
|
1277
1698
|
if (options.createBranch) {
|
|
@@ -1298,11 +1719,10 @@ var GitWorktreeManager = class {
|
|
|
1298
1719
|
const worktrees = await this.listWorktrees({ porcelain: true });
|
|
1299
1720
|
const worktree = worktrees.find((wt) => wt.path === worktreePath);
|
|
1300
1721
|
if (!worktree) {
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
logger4.debug(`Found ${worktrees.length} worktrees:`);
|
|
1722
|
+
getLogger().debug(`Looking for worktree path: ${worktreePath}`);
|
|
1723
|
+
getLogger().debug(`Found ${worktrees.length} worktrees:`);
|
|
1304
1724
|
worktrees.forEach((wt, i) => {
|
|
1305
|
-
|
|
1725
|
+
getLogger().debug(` ${i}: path="${wt.path}", branch="${wt.branch}"`);
|
|
1306
1726
|
});
|
|
1307
1727
|
throw new Error(`Worktree not found: ${worktreePath}`);
|
|
1308
1728
|
}
|
|
@@ -1322,8 +1742,8 @@ var GitWorktreeManager = class {
|
|
|
1322
1742
|
if (options.force) args.push("--force");
|
|
1323
1743
|
args.push(worktreePath);
|
|
1324
1744
|
await executeGitCommand(args, { cwd: this._workingDirectory });
|
|
1325
|
-
if (options.removeDirectory && await
|
|
1326
|
-
await
|
|
1745
|
+
if (options.removeDirectory && await fs2.pathExists(worktreePath)) {
|
|
1746
|
+
await fs2.remove(worktreePath);
|
|
1327
1747
|
}
|
|
1328
1748
|
if (options.removeBranch && !worktree.bare) {
|
|
1329
1749
|
try {
|
|
@@ -1346,7 +1766,7 @@ var GitWorktreeManager = class {
|
|
|
1346
1766
|
let isValidRepo = false;
|
|
1347
1767
|
let hasValidBranch = false;
|
|
1348
1768
|
try {
|
|
1349
|
-
existsOnDisk = await
|
|
1769
|
+
existsOnDisk = await fs2.pathExists(worktreePath);
|
|
1350
1770
|
if (!existsOnDisk) {
|
|
1351
1771
|
issues.push("Worktree directory does not exist on disk");
|
|
1352
1772
|
}
|
|
@@ -1764,42 +2184,35 @@ async function createIssue(title, body, options) {
|
|
|
1764
2184
|
};
|
|
1765
2185
|
}
|
|
1766
2186
|
|
|
1767
|
-
// src/lib/GitHubService.ts
|
|
1768
|
-
init_logger();
|
|
1769
|
-
|
|
1770
2187
|
// src/utils/prompt.ts
|
|
1771
2188
|
init_logger();
|
|
1772
2189
|
import * as readline from "readline";
|
|
1773
2190
|
async function promptConfirmation(message, defaultValue = false) {
|
|
1774
|
-
const rl = readline.createInterface({
|
|
1775
|
-
input: process.stdin,
|
|
1776
|
-
output: process.stdout
|
|
1777
|
-
});
|
|
1778
2191
|
const suffix = defaultValue ? "[Y/n]" : "[y/N]";
|
|
1779
2192
|
const fullMessage = `${message} ${suffix}: `;
|
|
1780
|
-
|
|
1781
|
-
rl.
|
|
1782
|
-
|
|
1783
|
-
|
|
1784
|
-
|
|
1785
|
-
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
resolve(true);
|
|
1790
|
-
return;
|
|
1791
|
-
}
|
|
1792
|
-
if (normalized === "n" || normalized === "no") {
|
|
1793
|
-
resolve(false);
|
|
1794
|
-
return;
|
|
1795
|
-
}
|
|
1796
|
-
logger.warn("Invalid input, using default value", {
|
|
1797
|
-
input: answer,
|
|
1798
|
-
defaultValue
|
|
2193
|
+
while (true) {
|
|
2194
|
+
const rl = readline.createInterface({
|
|
2195
|
+
input: process.stdin,
|
|
2196
|
+
output: process.stdout
|
|
2197
|
+
});
|
|
2198
|
+
const answer = await new Promise((resolve) => {
|
|
2199
|
+
rl.question(fullMessage, (ans) => {
|
|
2200
|
+
rl.close();
|
|
2201
|
+
resolve(ans);
|
|
1799
2202
|
});
|
|
1800
|
-
resolve(defaultValue);
|
|
1801
2203
|
});
|
|
1802
|
-
|
|
2204
|
+
const normalized = answer.trim().toLowerCase();
|
|
2205
|
+
if (normalized === "") {
|
|
2206
|
+
return defaultValue;
|
|
2207
|
+
}
|
|
2208
|
+
if (normalized === "y" || normalized === "yes") {
|
|
2209
|
+
return true;
|
|
2210
|
+
}
|
|
2211
|
+
if (normalized === "n" || normalized === "no") {
|
|
2212
|
+
return false;
|
|
2213
|
+
}
|
|
2214
|
+
logger.warn("Invalid input. Please enter y/yes or n/no.");
|
|
2215
|
+
}
|
|
1803
2216
|
}
|
|
1804
2217
|
|
|
1805
2218
|
// src/lib/GitHubService.ts
|
|
@@ -1829,12 +2242,12 @@ var GitHubService = class {
|
|
|
1829
2242
|
return { type: "unknown", identifier: null, rawInput: input };
|
|
1830
2243
|
}
|
|
1831
2244
|
const number = parseInt(numberMatch[1], 10);
|
|
1832
|
-
|
|
2245
|
+
getLogger().debug("Checking if input is a PR", { number });
|
|
1833
2246
|
const pr = await this.isValidPR(number, repo);
|
|
1834
2247
|
if (pr) {
|
|
1835
2248
|
return { type: "pr", identifier: number.toString(), rawInput: input };
|
|
1836
2249
|
}
|
|
1837
|
-
|
|
2250
|
+
getLogger().debug("Checking if input is an issue", { number });
|
|
1838
2251
|
const issue = await this.isValidIssue(number, repo);
|
|
1839
2252
|
if (issue) {
|
|
1840
2253
|
return { type: "issue", identifier: number.toString(), rawInput: input };
|
|
@@ -1938,17 +2351,17 @@ var GitHubService = class {
|
|
|
1938
2351
|
return createIssue(title, body, { repo: repository, labels });
|
|
1939
2352
|
}
|
|
1940
2353
|
async getIssueUrl(issueNumber, repo) {
|
|
1941
|
-
|
|
2354
|
+
getLogger().debug("Fetching issue URL", { issueNumber, repo });
|
|
1942
2355
|
const issue = await fetchGhIssue(issueNumber, repo);
|
|
1943
2356
|
return issue.url;
|
|
1944
2357
|
}
|
|
1945
2358
|
// GitHub Projects integration
|
|
1946
2359
|
async moveIssueToInProgress(issueNumber) {
|
|
1947
|
-
|
|
2360
|
+
getLogger().info("Moving issue to In Progress in GitHub Projects", {
|
|
1948
2361
|
issueNumber
|
|
1949
2362
|
});
|
|
1950
2363
|
if (!await hasProjectScope()) {
|
|
1951
|
-
|
|
2364
|
+
getLogger().warn("Missing project scope in GitHub CLI auth");
|
|
1952
2365
|
throw new GitHubError(
|
|
1953
2366
|
"MISSING_SCOPE" /* MISSING_SCOPE */,
|
|
1954
2367
|
"GitHub CLI lacks project scope. Run: gh auth refresh -s project"
|
|
@@ -1959,18 +2372,18 @@ var GitHubService = class {
|
|
|
1959
2372
|
const repoInfo = await executeGhCommand(["repo", "view", "--json", "owner,name"]);
|
|
1960
2373
|
owner = repoInfo.owner.login;
|
|
1961
2374
|
} catch (error) {
|
|
1962
|
-
|
|
2375
|
+
getLogger().warn("Could not determine repository info", { error });
|
|
1963
2376
|
return;
|
|
1964
2377
|
}
|
|
1965
2378
|
let projects;
|
|
1966
2379
|
try {
|
|
1967
2380
|
projects = await fetchProjectList(owner);
|
|
1968
2381
|
} catch (error) {
|
|
1969
|
-
|
|
2382
|
+
getLogger().warn("Could not fetch projects", { owner, error });
|
|
1970
2383
|
return;
|
|
1971
2384
|
}
|
|
1972
2385
|
if (!projects.length) {
|
|
1973
|
-
|
|
2386
|
+
getLogger().warn("No projects found", { owner });
|
|
1974
2387
|
return;
|
|
1975
2388
|
}
|
|
1976
2389
|
for (const project of projects) {
|
|
@@ -1983,14 +2396,14 @@ var GitHubService = class {
|
|
|
1983
2396
|
try {
|
|
1984
2397
|
items = await fetchProjectItems(project.number, owner);
|
|
1985
2398
|
} catch (error) {
|
|
1986
|
-
|
|
2399
|
+
getLogger().debug("Could not fetch project items", { project: project.number, error });
|
|
1987
2400
|
return;
|
|
1988
2401
|
}
|
|
1989
2402
|
const item = items.find(
|
|
1990
2403
|
(i) => i.content.type === "Issue" && i.content.number === issueNumber
|
|
1991
2404
|
);
|
|
1992
2405
|
if (!item) {
|
|
1993
|
-
|
|
2406
|
+
getLogger().debug("Issue not found in project", {
|
|
1994
2407
|
issueNumber,
|
|
1995
2408
|
projectNumber: project.number
|
|
1996
2409
|
});
|
|
@@ -2000,19 +2413,19 @@ var GitHubService = class {
|
|
|
2000
2413
|
try {
|
|
2001
2414
|
fieldsData = await fetchProjectFields(project.number, owner);
|
|
2002
2415
|
} catch (error) {
|
|
2003
|
-
|
|
2416
|
+
getLogger().debug("Could not fetch project fields", { project: project.number, error });
|
|
2004
2417
|
return;
|
|
2005
2418
|
}
|
|
2006
2419
|
const statusField = fieldsData.fields.find((f) => f.name === "Status");
|
|
2007
2420
|
if (!statusField) {
|
|
2008
|
-
|
|
2421
|
+
getLogger().debug("No Status field found in project", { projectNumber: project.number });
|
|
2009
2422
|
return;
|
|
2010
2423
|
}
|
|
2011
2424
|
const inProgressOption = (_a = statusField.options) == null ? void 0 : _a.find(
|
|
2012
2425
|
(o) => o.name === "In Progress" || o.name === "In progress"
|
|
2013
2426
|
);
|
|
2014
2427
|
if (!inProgressOption) {
|
|
2015
|
-
|
|
2428
|
+
getLogger().debug("No In Progress option found in Status field", { projectNumber: project.number });
|
|
2016
2429
|
return;
|
|
2017
2430
|
}
|
|
2018
2431
|
try {
|
|
@@ -2022,12 +2435,12 @@ var GitHubService = class {
|
|
|
2022
2435
|
statusField.id,
|
|
2023
2436
|
inProgressOption.id
|
|
2024
2437
|
);
|
|
2025
|
-
|
|
2438
|
+
getLogger().info("Updated issue status in project", {
|
|
2026
2439
|
issueNumber,
|
|
2027
2440
|
projectNumber: project.number
|
|
2028
2441
|
});
|
|
2029
2442
|
} catch (error) {
|
|
2030
|
-
|
|
2443
|
+
getLogger().debug("Could not update project item", { item: item.id, error });
|
|
2031
2444
|
}
|
|
2032
2445
|
}
|
|
2033
2446
|
// Utility methods
|
|
@@ -2388,7 +2801,6 @@ ${issue.body}`;
|
|
|
2388
2801
|
};
|
|
2389
2802
|
|
|
2390
2803
|
// src/lib/IssueTrackerFactory.ts
|
|
2391
|
-
init_logger();
|
|
2392
2804
|
var IssueTrackerFactory = class {
|
|
2393
2805
|
/**
|
|
2394
2806
|
* Create an IssueTracker instance based on settings configuration
|
|
@@ -2401,11 +2813,11 @@ var IssueTrackerFactory = class {
|
|
|
2401
2813
|
static create(settings) {
|
|
2402
2814
|
var _a, _b;
|
|
2403
2815
|
const provider = ((_a = settings.issueManagement) == null ? void 0 : _a.provider) ?? "github";
|
|
2404
|
-
|
|
2405
|
-
|
|
2816
|
+
getLogger().debug(`IssueTrackerFactory: Creating tracker for provider "${provider}"`);
|
|
2817
|
+
getLogger().debug(`IssueTrackerFactory: issueManagement settings:`, JSON.stringify(settings.issueManagement, null, 2));
|
|
2406
2818
|
switch (provider) {
|
|
2407
2819
|
case "github":
|
|
2408
|
-
|
|
2820
|
+
getLogger().debug("IssueTrackerFactory: Creating GitHubService");
|
|
2409
2821
|
return new GitHubService();
|
|
2410
2822
|
case "linear": {
|
|
2411
2823
|
const linearSettings = (_b = settings.issueManagement) == null ? void 0 : _b.linear;
|
|
@@ -2416,7 +2828,7 @@ var IssueTrackerFactory = class {
|
|
|
2416
2828
|
if (linearSettings == null ? void 0 : linearSettings.branchFormat) {
|
|
2417
2829
|
linearConfig.branchFormat = linearSettings.branchFormat;
|
|
2418
2830
|
}
|
|
2419
|
-
|
|
2831
|
+
getLogger().debug(`IssueTrackerFactory: Creating LinearService with config:`, JSON.stringify(linearConfig, null, 2));
|
|
2420
2832
|
return new LinearService(linearConfig);
|
|
2421
2833
|
}
|
|
2422
2834
|
default:
|
|
@@ -2437,12 +2849,11 @@ var IssueTrackerFactory = class {
|
|
|
2437
2849
|
};
|
|
2438
2850
|
|
|
2439
2851
|
// src/lib/EnvironmentManager.ts
|
|
2440
|
-
|
|
2441
|
-
import fs2 from "fs-extra";
|
|
2852
|
+
import fs3 from "fs-extra";
|
|
2442
2853
|
|
|
2443
2854
|
// src/utils/env.ts
|
|
2444
2855
|
init_logger();
|
|
2445
|
-
import
|
|
2856
|
+
import path5 from "path";
|
|
2446
2857
|
import dotenvFlow from "dotenv-flow";
|
|
2447
2858
|
function parseEnvFile(content) {
|
|
2448
2859
|
const envMap = /* @__PURE__ */ new Map();
|
|
@@ -2509,7 +2920,7 @@ async function buildEnvSourceCommands(workspacePath, fileExists) {
|
|
|
2509
2920
|
const files = getDotenvFlowFiles();
|
|
2510
2921
|
const commands = [];
|
|
2511
2922
|
for (const file of files) {
|
|
2512
|
-
const fullPath =
|
|
2923
|
+
const fullPath = path5.join(workspacePath, file);
|
|
2513
2924
|
const exists = await fileExists(fullPath);
|
|
2514
2925
|
if (exists) {
|
|
2515
2926
|
commands.push(`source ${file}`);
|
|
@@ -2520,7 +2931,7 @@ async function buildEnvSourceCommands(workspacePath, fileExists) {
|
|
|
2520
2931
|
async function findEnvFileContainingVariable(workspacePath, variableName, fileExists, getEnvVariable) {
|
|
2521
2932
|
const files = getDotenvFlowFiles().reverse();
|
|
2522
2933
|
for (const file of files) {
|
|
2523
|
-
const fullPath =
|
|
2934
|
+
const fullPath = path5.join(workspacePath, file);
|
|
2524
2935
|
if (!await fileExists(fullPath)) {
|
|
2525
2936
|
continue;
|
|
2526
2937
|
}
|
|
@@ -2560,7 +2971,6 @@ function calculatePortForBranch(branchName, basePort = 3e3) {
|
|
|
2560
2971
|
}
|
|
2561
2972
|
|
|
2562
2973
|
// src/lib/EnvironmentManager.ts
|
|
2563
|
-
var logger2 = createLogger({ prefix: "\u{1F4DD}" });
|
|
2564
2974
|
var EnvironmentManager = class {
|
|
2565
2975
|
constructor() {
|
|
2566
2976
|
this.backupSuffix = ".backup";
|
|
@@ -2575,15 +2985,15 @@ var EnvironmentManager = class {
|
|
|
2575
2985
|
if (!validation.valid) {
|
|
2576
2986
|
throw new Error(validation.error ?? "Invalid variable name");
|
|
2577
2987
|
}
|
|
2578
|
-
const fileExists = await
|
|
2988
|
+
const fileExists = await fs3.pathExists(filePath);
|
|
2579
2989
|
if (!fileExists) {
|
|
2580
|
-
|
|
2990
|
+
getLogger().info(`Creating ${filePath} with ${key}...`);
|
|
2581
2991
|
const content = formatEnvLine(key, value);
|
|
2582
|
-
await
|
|
2583
|
-
|
|
2992
|
+
await fs3.writeFile(filePath, content, "utf8");
|
|
2993
|
+
getLogger().success(`${filePath} created with ${key}`);
|
|
2584
2994
|
return;
|
|
2585
2995
|
}
|
|
2586
|
-
const existingContent = await
|
|
2996
|
+
const existingContent = await fs3.readFile(filePath, "utf8");
|
|
2587
2997
|
const envMap = parseEnvFile(existingContent);
|
|
2588
2998
|
let backupPath;
|
|
2589
2999
|
if (backup) {
|
|
@@ -2612,15 +3022,15 @@ var EnvironmentManager = class {
|
|
|
2612
3022
|
newLines.push(line);
|
|
2613
3023
|
}
|
|
2614
3024
|
if (!variableUpdated) {
|
|
2615
|
-
|
|
3025
|
+
getLogger().info(`Adding ${key} to ${filePath}...`);
|
|
2616
3026
|
newLines.push(formatEnvLine(key, value));
|
|
2617
|
-
|
|
3027
|
+
getLogger().success(`${key} added successfully`);
|
|
2618
3028
|
} else {
|
|
2619
|
-
|
|
2620
|
-
|
|
3029
|
+
getLogger().info(`Updating ${key} in ${filePath}...`);
|
|
3030
|
+
getLogger().success(`${key} updated successfully`);
|
|
2621
3031
|
}
|
|
2622
3032
|
const newContent = newLines.join("\n");
|
|
2623
|
-
await
|
|
3033
|
+
await fs3.writeFile(filePath, newContent, "utf8");
|
|
2624
3034
|
return backupPath;
|
|
2625
3035
|
}
|
|
2626
3036
|
/**
|
|
@@ -2628,10 +3038,10 @@ var EnvironmentManager = class {
|
|
|
2628
3038
|
*/
|
|
2629
3039
|
async readEnvFile(filePath) {
|
|
2630
3040
|
try {
|
|
2631
|
-
const content = await
|
|
3041
|
+
const content = await fs3.readFile(filePath, "utf8");
|
|
2632
3042
|
return parseEnvFile(content);
|
|
2633
3043
|
} catch (error) {
|
|
2634
|
-
|
|
3044
|
+
getLogger().debug(
|
|
2635
3045
|
`Could not read env file ${filePath}: ${error instanceof Error ? error.message : String(error)}`
|
|
2636
3046
|
);
|
|
2637
3047
|
return /* @__PURE__ */ new Map();
|
|
@@ -2651,13 +3061,13 @@ var EnvironmentManager = class {
|
|
|
2651
3061
|
* @private
|
|
2652
3062
|
*/
|
|
2653
3063
|
async copyIfExists(source, destination) {
|
|
2654
|
-
const sourceExists = await
|
|
3064
|
+
const sourceExists = await fs3.pathExists(source);
|
|
2655
3065
|
if (!sourceExists) {
|
|
2656
|
-
|
|
3066
|
+
getLogger().debug(`Source file ${source} does not exist, skipping copy`);
|
|
2657
3067
|
return;
|
|
2658
3068
|
}
|
|
2659
|
-
await
|
|
2660
|
-
|
|
3069
|
+
await fs3.copy(source, destination, { overwrite: false });
|
|
3070
|
+
getLogger().success(`Copied ${source} to ${destination}`);
|
|
2661
3071
|
}
|
|
2662
3072
|
/**
|
|
2663
3073
|
* Calculate unique port for workspace
|
|
@@ -2717,7 +3127,7 @@ var EnvironmentManager = class {
|
|
|
2717
3127
|
*/
|
|
2718
3128
|
async validateEnvFile(filePath) {
|
|
2719
3129
|
try {
|
|
2720
|
-
const content = await
|
|
3130
|
+
const content = await fs3.readFile(filePath, "utf8");
|
|
2721
3131
|
const envMap = parseEnvFile(content);
|
|
2722
3132
|
const errors = [];
|
|
2723
3133
|
for (const [key, value] of envMap.entries()) {
|
|
@@ -2745,25 +3155,23 @@ var EnvironmentManager = class {
|
|
|
2745
3155
|
async createBackup(filePath) {
|
|
2746
3156
|
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
2747
3157
|
const backupPath = `${filePath}${this.backupSuffix}-${timestamp}`;
|
|
2748
|
-
await
|
|
2749
|
-
|
|
3158
|
+
await fs3.copy(filePath, backupPath);
|
|
3159
|
+
getLogger().debug(`Created backup at ${backupPath}`);
|
|
2750
3160
|
return backupPath;
|
|
2751
3161
|
}
|
|
2752
3162
|
};
|
|
2753
3163
|
|
|
2754
3164
|
// src/lib/DatabaseManager.ts
|
|
2755
|
-
|
|
2756
|
-
import fs3 from "fs-extra";
|
|
2757
|
-
var logger3 = createLogger({ prefix: "\u{1F5C2}\uFE0F" });
|
|
3165
|
+
import fs4 from "fs-extra";
|
|
2758
3166
|
var DatabaseManager = class {
|
|
2759
3167
|
constructor(provider, environment, databaseUrlEnvVarName = "DATABASE_URL") {
|
|
2760
3168
|
this.provider = provider;
|
|
2761
3169
|
this.environment = environment;
|
|
2762
3170
|
this.databaseUrlEnvVarName = databaseUrlEnvVarName;
|
|
2763
3171
|
if (databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
2764
|
-
|
|
3172
|
+
getLogger().debug(`DatabaseManager configured with custom variable: ${databaseUrlEnvVarName}`);
|
|
2765
3173
|
} else {
|
|
2766
|
-
|
|
3174
|
+
getLogger().debug("DatabaseManager using default variable: DATABASE_URL");
|
|
2767
3175
|
}
|
|
2768
3176
|
}
|
|
2769
3177
|
/**
|
|
@@ -2780,12 +3188,12 @@ var DatabaseManager = class {
|
|
|
2780
3188
|
*/
|
|
2781
3189
|
async shouldUseDatabaseBranching(workspacePath) {
|
|
2782
3190
|
if (!this.provider.isConfigured()) {
|
|
2783
|
-
|
|
3191
|
+
getLogger().debug("Skipping database branching: Database provider not configured");
|
|
2784
3192
|
return false;
|
|
2785
3193
|
}
|
|
2786
3194
|
const hasDatabaseUrl = await this.hasDatabaseUrlInEnv(workspacePath);
|
|
2787
3195
|
if (!hasDatabaseUrl) {
|
|
2788
|
-
|
|
3196
|
+
getLogger().debug(
|
|
2789
3197
|
"Skipping database branching: configured database URL variable not found in any env file"
|
|
2790
3198
|
);
|
|
2791
3199
|
return false;
|
|
@@ -2806,28 +3214,28 @@ var DatabaseManager = class {
|
|
|
2806
3214
|
return null;
|
|
2807
3215
|
}
|
|
2808
3216
|
if (!await this.provider.isCliAvailable()) {
|
|
2809
|
-
|
|
2810
|
-
|
|
3217
|
+
getLogger().warn("Skipping database branch creation: Neon CLI not available");
|
|
3218
|
+
getLogger().warn("Install with: npm install -g neonctl");
|
|
2811
3219
|
return null;
|
|
2812
3220
|
}
|
|
2813
3221
|
try {
|
|
2814
3222
|
const isAuth = await this.provider.isAuthenticated(cwd);
|
|
2815
3223
|
if (!isAuth) {
|
|
2816
|
-
|
|
2817
|
-
|
|
3224
|
+
getLogger().warn("Skipping database branch creation: Not authenticated with Neon CLI");
|
|
3225
|
+
getLogger().warn("Run: neon auth");
|
|
2818
3226
|
return null;
|
|
2819
3227
|
}
|
|
2820
3228
|
} catch (error) {
|
|
2821
3229
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
2822
|
-
|
|
3230
|
+
getLogger().error(`Database authentication check failed: ${errorMessage}`);
|
|
2823
3231
|
throw error;
|
|
2824
3232
|
}
|
|
2825
3233
|
try {
|
|
2826
3234
|
const connectionString = await this.provider.createBranch(branchName, fromBranch, cwd);
|
|
2827
|
-
|
|
3235
|
+
getLogger().success(`Database branch ready: ${this.provider.sanitizeBranchName(branchName)}`);
|
|
2828
3236
|
return connectionString;
|
|
2829
3237
|
} catch (error) {
|
|
2830
|
-
|
|
3238
|
+
getLogger().error(
|
|
2831
3239
|
`Failed to create database branch: ${error instanceof Error ? error.message : String(error)}`
|
|
2832
3240
|
);
|
|
2833
3241
|
throw error;
|
|
@@ -2853,7 +3261,7 @@ var DatabaseManager = class {
|
|
|
2853
3261
|
};
|
|
2854
3262
|
}
|
|
2855
3263
|
if (!this.provider.isConfigured()) {
|
|
2856
|
-
|
|
3264
|
+
getLogger().debug("Skipping database branch deletion: Database provider not configured");
|
|
2857
3265
|
return {
|
|
2858
3266
|
success: true,
|
|
2859
3267
|
deleted: false,
|
|
@@ -2862,7 +3270,7 @@ var DatabaseManager = class {
|
|
|
2862
3270
|
};
|
|
2863
3271
|
}
|
|
2864
3272
|
if (!await this.provider.isCliAvailable()) {
|
|
2865
|
-
|
|
3273
|
+
getLogger().info("Skipping database branch deletion: CLI tool not available");
|
|
2866
3274
|
return {
|
|
2867
3275
|
success: false,
|
|
2868
3276
|
deleted: false,
|
|
@@ -2874,7 +3282,7 @@ var DatabaseManager = class {
|
|
|
2874
3282
|
try {
|
|
2875
3283
|
const isAuth = await this.provider.isAuthenticated(cwd);
|
|
2876
3284
|
if (!isAuth) {
|
|
2877
|
-
|
|
3285
|
+
getLogger().warn("Skipping database branch deletion: Not authenticated with DB Provider");
|
|
2878
3286
|
return {
|
|
2879
3287
|
success: false,
|
|
2880
3288
|
deleted: false,
|
|
@@ -2885,7 +3293,7 @@ var DatabaseManager = class {
|
|
|
2885
3293
|
}
|
|
2886
3294
|
} catch (error) {
|
|
2887
3295
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
2888
|
-
|
|
3296
|
+
getLogger().error(`Database authentication check failed: ${errorMessage}`);
|
|
2889
3297
|
return {
|
|
2890
3298
|
success: false,
|
|
2891
3299
|
deleted: false,
|
|
@@ -2898,7 +3306,7 @@ var DatabaseManager = class {
|
|
|
2898
3306
|
const result = await this.provider.deleteBranch(branchName, isPreview, cwd);
|
|
2899
3307
|
return result;
|
|
2900
3308
|
} catch (error) {
|
|
2901
|
-
|
|
3309
|
+
getLogger().warn(
|
|
2902
3310
|
`Unexpected error in database deletion: ${error instanceof Error ? error.message : String(error)}`
|
|
2903
3311
|
);
|
|
2904
3312
|
return {
|
|
@@ -2919,13 +3327,13 @@ var DatabaseManager = class {
|
|
|
2919
3327
|
*/
|
|
2920
3328
|
async getBranchNameFromConnectionString(connectionString, cwd) {
|
|
2921
3329
|
if (!this.provider.isConfigured()) {
|
|
2922
|
-
|
|
3330
|
+
getLogger().debug("Provider not configured, skipping reverse lookup");
|
|
2923
3331
|
return null;
|
|
2924
3332
|
}
|
|
2925
3333
|
if ("getBranchNameFromConnectionString" in this.provider && typeof this.provider.getBranchNameFromConnectionString === "function") {
|
|
2926
3334
|
return this.provider.getBranchNameFromConnectionString(connectionString, cwd);
|
|
2927
3335
|
}
|
|
2928
|
-
|
|
3336
|
+
getLogger().debug("Provider does not support reverse lookup");
|
|
2929
3337
|
return null;
|
|
2930
3338
|
}
|
|
2931
3339
|
/**
|
|
@@ -2936,26 +3344,26 @@ var DatabaseManager = class {
|
|
|
2936
3344
|
async hasDatabaseUrlInEnv(workspacePath) {
|
|
2937
3345
|
try {
|
|
2938
3346
|
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
2939
|
-
|
|
3347
|
+
getLogger().debug(`Looking for custom database URL variable: ${this.databaseUrlEnvVarName}`);
|
|
2940
3348
|
} else {
|
|
2941
|
-
|
|
3349
|
+
getLogger().debug("Looking for default database URL variable: DATABASE_URL");
|
|
2942
3350
|
}
|
|
2943
3351
|
const hasConfiguredVar = await hasVariableInAnyEnvFile(
|
|
2944
3352
|
workspacePath,
|
|
2945
3353
|
this.databaseUrlEnvVarName,
|
|
2946
|
-
async (p) =>
|
|
3354
|
+
async (p) => fs4.pathExists(p),
|
|
2947
3355
|
async (p, v) => this.environment.getEnvVariable(p, v)
|
|
2948
3356
|
);
|
|
2949
3357
|
if (hasConfiguredVar) {
|
|
2950
3358
|
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
2951
|
-
|
|
3359
|
+
getLogger().debug(`\u2705 Found custom database URL variable: ${this.databaseUrlEnvVarName}`);
|
|
2952
3360
|
} else {
|
|
2953
|
-
|
|
3361
|
+
getLogger().debug(`\u2705 Found default database URL variable: DATABASE_URL`);
|
|
2954
3362
|
}
|
|
2955
3363
|
return true;
|
|
2956
3364
|
}
|
|
2957
3365
|
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
2958
|
-
|
|
3366
|
+
getLogger().debug(`\u274C Custom database URL variable '${this.databaseUrlEnvVarName}' not found in any env file`);
|
|
2959
3367
|
throw new Error(
|
|
2960
3368
|
`Configured database URL environment variable '${this.databaseUrlEnvVarName}' not found in any dotenv-flow file. Please add it to an .env file or update your iloom configuration.`
|
|
2961
3369
|
);
|
|
@@ -2963,13 +3371,13 @@ var DatabaseManager = class {
|
|
|
2963
3371
|
const hasDefaultVar = await hasVariableInAnyEnvFile(
|
|
2964
3372
|
workspacePath,
|
|
2965
3373
|
"DATABASE_URL",
|
|
2966
|
-
async (p) =>
|
|
3374
|
+
async (p) => fs4.pathExists(p),
|
|
2967
3375
|
async (p, v) => this.environment.getEnvVariable(p, v)
|
|
2968
3376
|
);
|
|
2969
3377
|
if (hasDefaultVar) {
|
|
2970
|
-
|
|
3378
|
+
getLogger().debug("\u2705 Found fallback DATABASE_URL variable");
|
|
2971
3379
|
} else {
|
|
2972
|
-
|
|
3380
|
+
getLogger().debug("\u274C No DATABASE_URL variable found in any env file");
|
|
2973
3381
|
}
|
|
2974
3382
|
return hasDefaultVar;
|
|
2975
3383
|
} catch (error) {
|
|
@@ -2986,6 +3394,7 @@ init_logger();
|
|
|
2986
3394
|
import { execa as execa4 } from "execa";
|
|
2987
3395
|
import { existsSync as existsSync2 } from "fs";
|
|
2988
3396
|
import { join } from "path";
|
|
3397
|
+
import { createHash as createHash3 } from "crypto";
|
|
2989
3398
|
|
|
2990
3399
|
// src/utils/terminal.ts
|
|
2991
3400
|
import { execa as execa3 } from "execa";
|
|
@@ -3062,8 +3471,8 @@ async function buildAppleScript(options) {
|
|
|
3062
3471
|
script += `end tell`;
|
|
3063
3472
|
return script;
|
|
3064
3473
|
}
|
|
3065
|
-
function escapePathForAppleScript(
|
|
3066
|
-
return
|
|
3474
|
+
function escapePathForAppleScript(path7) {
|
|
3475
|
+
return path7.replace(/'/g, "'\\''");
|
|
3067
3476
|
}
|
|
3068
3477
|
function escapeForAppleScript(command) {
|
|
3069
3478
|
return command.replace(/\\/g, "\\\\").replace(/"/g, '\\"');
|
|
@@ -3152,7 +3561,8 @@ function parseJsonStreamOutput(output) {
|
|
|
3152
3561
|
}
|
|
3153
3562
|
}
|
|
3154
3563
|
async function launchClaude(prompt, options = {}) {
|
|
3155
|
-
const { model, permissionMode, addDir, headless = false, appendSystemPrompt, mcpConfig, allowedTools, disallowedTools, agents } = options;
|
|
3564
|
+
const { model, permissionMode, addDir, headless = false, appendSystemPrompt, mcpConfig, allowedTools, disallowedTools, agents, sessionId } = options;
|
|
3565
|
+
const log = getLogger();
|
|
3156
3566
|
const args = [];
|
|
3157
3567
|
if (headless) {
|
|
3158
3568
|
args.push("-p");
|
|
@@ -3186,6 +3596,9 @@ async function launchClaude(prompt, options = {}) {
|
|
|
3186
3596
|
if (agents) {
|
|
3187
3597
|
args.push("--agents", JSON.stringify(agents));
|
|
3188
3598
|
}
|
|
3599
|
+
if (sessionId) {
|
|
3600
|
+
args.push("--session-id", sessionId);
|
|
3601
|
+
}
|
|
3189
3602
|
try {
|
|
3190
3603
|
if (headless) {
|
|
3191
3604
|
const isDebugMode = logger.isDebugEnabled();
|
|
@@ -3210,13 +3623,13 @@ async function launchClaude(prompt, options = {}) {
|
|
|
3210
3623
|
const text = chunk.toString();
|
|
3211
3624
|
outputBuffer += text;
|
|
3212
3625
|
if (isDebugMode) {
|
|
3213
|
-
|
|
3626
|
+
log.stdout.write(text);
|
|
3214
3627
|
} else {
|
|
3215
3628
|
if (isFirstProgress) {
|
|
3216
|
-
|
|
3629
|
+
log.stdout.write("\u{1F916} .");
|
|
3217
3630
|
isFirstProgress = false;
|
|
3218
3631
|
} else {
|
|
3219
|
-
|
|
3632
|
+
log.stdout.write(".");
|
|
3220
3633
|
}
|
|
3221
3634
|
}
|
|
3222
3635
|
});
|
|
@@ -3225,36 +3638,137 @@ async function launchClaude(prompt, options = {}) {
|
|
|
3225
3638
|
if (isStreaming) {
|
|
3226
3639
|
const rawOutput = outputBuffer.trim();
|
|
3227
3640
|
if (!isDebugMode) {
|
|
3228
|
-
|
|
3641
|
+
log.stdout.write("\n");
|
|
3229
3642
|
}
|
|
3230
3643
|
return isJsonStreamFormat ? parseJsonStreamOutput(rawOutput) : rawOutput;
|
|
3231
3644
|
} else {
|
|
3232
3645
|
if (isDebugMode) {
|
|
3233
|
-
|
|
3646
|
+
log.stdout.write(result.stdout);
|
|
3234
3647
|
if (result.stdout && !result.stdout.endsWith("\n")) {
|
|
3235
|
-
|
|
3648
|
+
log.stdout.write("\n");
|
|
3236
3649
|
}
|
|
3237
3650
|
} else {
|
|
3238
|
-
|
|
3239
|
-
|
|
3651
|
+
log.stdout.write("\u{1F916} .");
|
|
3652
|
+
log.stdout.write("\n");
|
|
3240
3653
|
}
|
|
3241
3654
|
const rawOutput = result.stdout.trim();
|
|
3242
3655
|
return isJsonStreamFormat ? parseJsonStreamOutput(rawOutput) : rawOutput;
|
|
3243
3656
|
}
|
|
3244
3657
|
} else {
|
|
3245
|
-
|
|
3246
|
-
|
|
3247
|
-
|
|
3248
|
-
|
|
3249
|
-
|
|
3250
|
-
|
|
3251
|
-
|
|
3252
|
-
|
|
3253
|
-
|
|
3658
|
+
try {
|
|
3659
|
+
await execa4("claude", [...args, "--", prompt], {
|
|
3660
|
+
...addDir && { cwd: addDir },
|
|
3661
|
+
stdio: ["inherit", "inherit", "pipe"],
|
|
3662
|
+
// Capture stderr to detect session conflicts
|
|
3663
|
+
timeout: 0,
|
|
3664
|
+
// Disable timeout
|
|
3665
|
+
verbose: logger.isDebugEnabled()
|
|
3666
|
+
});
|
|
3667
|
+
return;
|
|
3668
|
+
} catch (interactiveError) {
|
|
3669
|
+
const interactiveExecaError = interactiveError;
|
|
3670
|
+
const interactiveErrorMessage = interactiveExecaError.stderr ?? interactiveExecaError.message ?? "";
|
|
3671
|
+
const sessionMatch = interactiveErrorMessage.match(/Session ID ([0-9a-f-]+) is already in use/i);
|
|
3672
|
+
const conflictSessionId = sessionMatch == null ? void 0 : sessionMatch[1];
|
|
3673
|
+
if (sessionMatch && sessionId && conflictSessionId) {
|
|
3674
|
+
log.debug(`Session ID ${conflictSessionId} already in use, retrying with --resume`);
|
|
3675
|
+
const resumeArgs = args.filter((arg, idx) => {
|
|
3676
|
+
if (arg === "--session-id") return false;
|
|
3677
|
+
if (idx > 0 && args[idx - 1] === "--session-id") return false;
|
|
3678
|
+
return true;
|
|
3679
|
+
});
|
|
3680
|
+
resumeArgs.push("--resume", conflictSessionId);
|
|
3681
|
+
await execa4("claude", resumeArgs, {
|
|
3682
|
+
...addDir && { cwd: addDir },
|
|
3683
|
+
stdio: "inherit",
|
|
3684
|
+
timeout: 0,
|
|
3685
|
+
verbose: logger.isDebugEnabled()
|
|
3686
|
+
});
|
|
3687
|
+
return;
|
|
3688
|
+
}
|
|
3689
|
+
throw interactiveError;
|
|
3690
|
+
}
|
|
3254
3691
|
}
|
|
3255
3692
|
} catch (error) {
|
|
3256
3693
|
const execaError = error;
|
|
3257
3694
|
const errorMessage = execaError.stderr ?? execaError.message ?? "Unknown Claude CLI error";
|
|
3695
|
+
const sessionInUseMatch = errorMessage.match(/Session ID ([0-9a-f-]+) is already in use/i);
|
|
3696
|
+
const extractedSessionId = sessionInUseMatch == null ? void 0 : sessionInUseMatch[1];
|
|
3697
|
+
if (sessionInUseMatch && sessionId && extractedSessionId) {
|
|
3698
|
+
log.debug(`Session ID ${extractedSessionId} already in use, retrying with --resume`);
|
|
3699
|
+
const resumeArgs = args.filter((arg, idx) => {
|
|
3700
|
+
if (arg === "--session-id") return false;
|
|
3701
|
+
if (idx > 0 && args[idx - 1] === "--session-id") return false;
|
|
3702
|
+
return true;
|
|
3703
|
+
});
|
|
3704
|
+
resumeArgs.push("--resume", extractedSessionId);
|
|
3705
|
+
try {
|
|
3706
|
+
if (headless) {
|
|
3707
|
+
const isDebugMode = logger.isDebugEnabled();
|
|
3708
|
+
const execaOptions = {
|
|
3709
|
+
input: prompt,
|
|
3710
|
+
timeout: 0,
|
|
3711
|
+
...addDir && { cwd: addDir },
|
|
3712
|
+
verbose: isDebugMode,
|
|
3713
|
+
...isDebugMode && { stdio: ["pipe", "pipe", "pipe"] }
|
|
3714
|
+
};
|
|
3715
|
+
const subprocess = execa4("claude", resumeArgs, execaOptions);
|
|
3716
|
+
const isJsonStreamFormat = resumeArgs.includes("--output-format") && resumeArgs.includes("stream-json");
|
|
3717
|
+
let outputBuffer = "";
|
|
3718
|
+
let isStreaming = false;
|
|
3719
|
+
let isFirstProgress = true;
|
|
3720
|
+
if (subprocess.stdout && typeof subprocess.stdout.on === "function") {
|
|
3721
|
+
isStreaming = true;
|
|
3722
|
+
subprocess.stdout.on("data", (chunk) => {
|
|
3723
|
+
const text = chunk.toString();
|
|
3724
|
+
outputBuffer += text;
|
|
3725
|
+
if (isDebugMode) {
|
|
3726
|
+
log.stdout.write(text);
|
|
3727
|
+
} else {
|
|
3728
|
+
if (isFirstProgress) {
|
|
3729
|
+
log.stdout.write("\u{1F916} .");
|
|
3730
|
+
isFirstProgress = false;
|
|
3731
|
+
} else {
|
|
3732
|
+
log.stdout.write(".");
|
|
3733
|
+
}
|
|
3734
|
+
}
|
|
3735
|
+
});
|
|
3736
|
+
}
|
|
3737
|
+
const result = await subprocess;
|
|
3738
|
+
if (isStreaming) {
|
|
3739
|
+
const rawOutput = outputBuffer.trim();
|
|
3740
|
+
if (!isDebugMode) {
|
|
3741
|
+
log.stdout.write("\n");
|
|
3742
|
+
}
|
|
3743
|
+
return isJsonStreamFormat ? parseJsonStreamOutput(rawOutput) : rawOutput;
|
|
3744
|
+
} else {
|
|
3745
|
+
if (isDebugMode) {
|
|
3746
|
+
log.stdout.write(result.stdout);
|
|
3747
|
+
if (result.stdout && !result.stdout.endsWith("\n")) {
|
|
3748
|
+
log.stdout.write("\n");
|
|
3749
|
+
}
|
|
3750
|
+
} else {
|
|
3751
|
+
log.stdout.write("\u{1F916} .");
|
|
3752
|
+
log.stdout.write("\n");
|
|
3753
|
+
}
|
|
3754
|
+
const rawOutput = result.stdout.trim();
|
|
3755
|
+
return isJsonStreamFormat ? parseJsonStreamOutput(rawOutput) : rawOutput;
|
|
3756
|
+
}
|
|
3757
|
+
} else {
|
|
3758
|
+
await execa4("claude", resumeArgs, {
|
|
3759
|
+
...addDir && { cwd: addDir },
|
|
3760
|
+
stdio: "inherit",
|
|
3761
|
+
timeout: 0,
|
|
3762
|
+
verbose: logger.isDebugEnabled()
|
|
3763
|
+
});
|
|
3764
|
+
return;
|
|
3765
|
+
}
|
|
3766
|
+
} catch (retryError) {
|
|
3767
|
+
const retryExecaError = retryError;
|
|
3768
|
+
const retryErrorMessage = retryExecaError.stderr ?? retryExecaError.message ?? "Unknown Claude CLI error";
|
|
3769
|
+
throw new Error(`Claude CLI error: ${retryErrorMessage}`);
|
|
3770
|
+
}
|
|
3771
|
+
}
|
|
3258
3772
|
throw new Error(`Claude CLI error: ${errorMessage}`);
|
|
3259
3773
|
}
|
|
3260
3774
|
}
|
|
@@ -3300,7 +3814,7 @@ async function launchClaudeInNewTerminalWindow(_prompt, options) {
|
|
|
3300
3814
|
init_logger();
|
|
3301
3815
|
import { readFile as readFile2 } from "fs/promises";
|
|
3302
3816
|
import { accessSync } from "fs";
|
|
3303
|
-
import
|
|
3817
|
+
import path6 from "path";
|
|
3304
3818
|
import { fileURLToPath } from "url";
|
|
3305
3819
|
var PromptTemplateManager = class {
|
|
3306
3820
|
constructor(templateDir) {
|
|
@@ -3309,17 +3823,17 @@ var PromptTemplateManager = class {
|
|
|
3309
3823
|
} else {
|
|
3310
3824
|
const currentFileUrl = import.meta.url;
|
|
3311
3825
|
const currentFilePath = fileURLToPath(currentFileUrl);
|
|
3312
|
-
const distDir =
|
|
3313
|
-
let templateDir2 =
|
|
3826
|
+
const distDir = path6.dirname(currentFilePath);
|
|
3827
|
+
let templateDir2 = path6.join(distDir, "prompts");
|
|
3314
3828
|
let currentDir = distDir;
|
|
3315
|
-
while (currentDir !==
|
|
3316
|
-
const candidatePath =
|
|
3829
|
+
while (currentDir !== path6.dirname(currentDir)) {
|
|
3830
|
+
const candidatePath = path6.join(currentDir, "prompts");
|
|
3317
3831
|
try {
|
|
3318
3832
|
accessSync(candidatePath);
|
|
3319
3833
|
templateDir2 = candidatePath;
|
|
3320
3834
|
break;
|
|
3321
3835
|
} catch {
|
|
3322
|
-
currentDir =
|
|
3836
|
+
currentDir = path6.dirname(currentDir);
|
|
3323
3837
|
}
|
|
3324
3838
|
}
|
|
3325
3839
|
this.templateDir = templateDir2;
|
|
@@ -3334,7 +3848,7 @@ var PromptTemplateManager = class {
|
|
|
3334
3848
|
* Load a template file by name
|
|
3335
3849
|
*/
|
|
3336
3850
|
async loadTemplate(templateName) {
|
|
3337
|
-
const templatePath =
|
|
3851
|
+
const templatePath = path6.join(this.templateDir, `${templateName}-prompt.txt`);
|
|
3338
3852
|
logger.debug("Loading template", {
|
|
3339
3853
|
templateName,
|
|
3340
3854
|
templateDir: this.templateDir,
|
|
@@ -3419,6 +3933,18 @@ var PromptTemplateManager = class {
|
|
|
3419
3933
|
if (variables.VSCODE_SETTINGS_GITIGNORED !== void 0) {
|
|
3420
3934
|
result = result.replace(/VSCODE_SETTINGS_GITIGNORED/g, variables.VSCODE_SETTINGS_GITIGNORED);
|
|
3421
3935
|
}
|
|
3936
|
+
if (variables.SESSION_CONTEXT !== void 0) {
|
|
3937
|
+
result = result.replace(/SESSION_CONTEXT/g, variables.SESSION_CONTEXT);
|
|
3938
|
+
}
|
|
3939
|
+
if (variables.BRANCH_NAME !== void 0) {
|
|
3940
|
+
result = result.replace(/BRANCH_NAME/g, variables.BRANCH_NAME);
|
|
3941
|
+
}
|
|
3942
|
+
if (variables.LOOM_TYPE !== void 0) {
|
|
3943
|
+
result = result.replace(/LOOM_TYPE/g, variables.LOOM_TYPE);
|
|
3944
|
+
}
|
|
3945
|
+
if (variables.COMPACT_SUMMARIES !== void 0) {
|
|
3946
|
+
result = result.replace(/COMPACT_SUMMARIES/g, variables.COMPACT_SUMMARIES);
|
|
3947
|
+
}
|
|
3422
3948
|
return result;
|
|
3423
3949
|
}
|
|
3424
3950
|
/**
|
|
@@ -3477,6 +4003,18 @@ var PromptTemplateManager = class {
|
|
|
3477
4003
|
} else {
|
|
3478
4004
|
result = result.replace(firstTimeUserRegex, "");
|
|
3479
4005
|
}
|
|
4006
|
+
const interactiveModeRegex = /\{\{#IF INTERACTIVE_MODE\}\}(.*?)\{\{\/IF INTERACTIVE_MODE\}\}/gs;
|
|
4007
|
+
if (variables.INTERACTIVE_MODE === true) {
|
|
4008
|
+
result = result.replace(interactiveModeRegex, "$1");
|
|
4009
|
+
} else {
|
|
4010
|
+
result = result.replace(interactiveModeRegex, "");
|
|
4011
|
+
}
|
|
4012
|
+
const compactSummariesRegex = /\{\{#IF COMPACT_SUMMARIES\}\}(.*?)\{\{\/IF COMPACT_SUMMARIES\}\}/gs;
|
|
4013
|
+
if (variables.COMPACT_SUMMARIES !== void 0 && variables.COMPACT_SUMMARIES !== "") {
|
|
4014
|
+
result = result.replace(compactSummariesRegex, "$1");
|
|
4015
|
+
} else {
|
|
4016
|
+
result = result.replace(compactSummariesRegex, "");
|
|
4017
|
+
}
|
|
3480
4018
|
return result;
|
|
3481
4019
|
}
|
|
3482
4020
|
/**
|
|
@@ -3502,15 +4040,6 @@ var ClaudeService = class {
|
|
|
3502
4040
|
async isAvailable() {
|
|
3503
4041
|
return detectClaudeCli();
|
|
3504
4042
|
}
|
|
3505
|
-
/**
|
|
3506
|
-
* Get the appropriate model for a workflow type
|
|
3507
|
-
*/
|
|
3508
|
-
getModelForWorkflow(type) {
|
|
3509
|
-
if (type === "issue") {
|
|
3510
|
-
return "claude-sonnet-4-20250514";
|
|
3511
|
-
}
|
|
3512
|
-
return void 0;
|
|
3513
|
-
}
|
|
3514
4043
|
/**
|
|
3515
4044
|
* Get the appropriate permission mode for a workflow type
|
|
3516
4045
|
*/
|
|
@@ -3554,7 +4083,6 @@ var ClaudeService = class {
|
|
|
3554
4083
|
variables.PORT = port;
|
|
3555
4084
|
}
|
|
3556
4085
|
const prompt = await this.templateManager.getPrompt(type, variables);
|
|
3557
|
-
const model = this.getModelForWorkflow(type);
|
|
3558
4086
|
const permissionMode = this.getPermissionModeForWorkflow(type);
|
|
3559
4087
|
if (permissionMode === "bypassPermissions") {
|
|
3560
4088
|
logger.warn(
|
|
@@ -3565,9 +4093,6 @@ var ClaudeService = class {
|
|
|
3565
4093
|
addDir: workspacePath,
|
|
3566
4094
|
headless
|
|
3567
4095
|
};
|
|
3568
|
-
if (model !== void 0) {
|
|
3569
|
-
claudeOptions.model = model;
|
|
3570
|
-
}
|
|
3571
4096
|
if (permissionMode !== void 0 && permissionMode !== "default") {
|
|
3572
4097
|
claudeOptions.permissionMode = permissionMode;
|
|
3573
4098
|
}
|
|
@@ -3585,7 +4110,6 @@ var ClaudeService = class {
|
|
|
3585
4110
|
}
|
|
3586
4111
|
logger.debug("Launching Claude for workflow", {
|
|
3587
4112
|
type,
|
|
3588
|
-
model,
|
|
3589
4113
|
permissionMode,
|
|
3590
4114
|
headless,
|
|
3591
4115
|
workspacePath
|
|
@@ -3987,7 +4511,9 @@ export {
|
|
|
3987
4511
|
UserAbortedCommitError,
|
|
3988
4512
|
WorkspaceManager,
|
|
3989
4513
|
branchExists,
|
|
4514
|
+
checkRemoteBranchStatus,
|
|
3990
4515
|
createLogger,
|
|
4516
|
+
createStderrLogger,
|
|
3991
4517
|
ensureRepositoryHasCommits,
|
|
3992
4518
|
executeGitCommand,
|
|
3993
4519
|
extractIssueNumber,
|
|
@@ -3995,16 +4521,20 @@ export {
|
|
|
3995
4521
|
findAllBranchesForIssue,
|
|
3996
4522
|
findMainWorktreePath,
|
|
3997
4523
|
findMainWorktreePathWithSettings,
|
|
4524
|
+
findWorktreeForBranch,
|
|
3998
4525
|
generateWorktreePath,
|
|
3999
4526
|
getCurrentBranch,
|
|
4000
4527
|
getDefaultBranch,
|
|
4528
|
+
getMergeTargetBranch,
|
|
4001
4529
|
getRepoRoot,
|
|
4002
4530
|
getWorktreeRoot,
|
|
4003
4531
|
hasUncommittedChanges,
|
|
4532
|
+
isBranchMergedIntoMain,
|
|
4004
4533
|
isEmptyRepository,
|
|
4005
4534
|
isFileGitignored,
|
|
4006
4535
|
isFileTrackedByGit,
|
|
4007
4536
|
isPRBranch,
|
|
4537
|
+
isRemoteBranchUpToDate,
|
|
4008
4538
|
isValidGitRepo,
|
|
4009
4539
|
isWorktreePath,
|
|
4010
4540
|
logger,
|