@iloom/cli 0.2.0 → 0.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +234 -667
- package/dist/BranchNamingService-OMWKUYMM.js +13 -0
- package/dist/ClaudeContextManager-3VXA6UPR.js +13 -0
- package/dist/ClaudeService-6CPK43N4.js +12 -0
- package/dist/GitHubService-EBOETDIW.js +11 -0
- package/dist/{LoomLauncher-CTSWJL35.js → LoomLauncher-JF7JZMTZ.js} +63 -32
- package/dist/LoomLauncher-JF7JZMTZ.js.map +1 -0
- package/dist/ProjectCapabilityDetector-34LU7JJ4.js +9 -0
- package/dist/{PromptTemplateManager-WII75TKH.js → PromptTemplateManager-A52RUAMS.js} +2 -2
- package/dist/README.md +234 -667
- package/dist/{SettingsManager-XOYCLH3D.js → SettingsManager-ZCWJ56WP.js} +12 -4
- package/dist/SettingsMigrationManager-AGIIIPDQ.js +10 -0
- package/dist/agents/iloom-issue-analyze-and-plan.md +125 -35
- package/dist/agents/iloom-issue-analyzer.md +284 -32
- package/dist/agents/iloom-issue-complexity-evaluator.md +40 -21
- package/dist/agents/iloom-issue-enhancer.md +69 -48
- package/dist/agents/iloom-issue-implementer.md +36 -25
- package/dist/agents/iloom-issue-planner.md +35 -24
- package/dist/agents/iloom-issue-reviewer.md +62 -9
- package/dist/{chunk-SWCRXDZC.js → chunk-3RUPPQRG.js} +1 -18
- package/dist/chunk-3RUPPQRG.js.map +1 -0
- package/dist/{chunk-HBVFXN7R.js → chunk-4BGK7T6X.js} +26 -3
- package/dist/chunk-4BGK7T6X.js.map +1 -0
- package/dist/{chunk-6LEQW46Y.js → chunk-4E4LD3QR.js} +72 -2
- package/dist/{chunk-6LEQW46Y.js.map → chunk-4E4LD3QR.js.map} +1 -1
- package/dist/{chunk-CWR2SANQ.js → chunk-EBISESAP.js} +1 -1
- package/dist/{chunk-TS6DL67T.js → chunk-G2IEYOLQ.js} +11 -38
- package/dist/chunk-G2IEYOLQ.js.map +1 -0
- package/dist/chunk-HBYZH6GD.js +1989 -0
- package/dist/chunk-HBYZH6GD.js.map +1 -0
- package/dist/chunk-INW24J2W.js +55 -0
- package/dist/chunk-INW24J2W.js.map +1 -0
- package/dist/{chunk-ZMNQBJUI.js → chunk-IP7SMKIF.js} +61 -22
- package/dist/chunk-IP7SMKIF.js.map +1 -0
- package/dist/{chunk-4IV6W4U5.js → chunk-IXKLYTWO.js} +12 -12
- package/dist/chunk-IXKLYTWO.js.map +1 -0
- package/dist/{chunk-JNKJ7NJV.js → chunk-JKXJ7BGL.js} +6 -2
- package/dist/{chunk-JNKJ7NJV.js.map → chunk-JKXJ7BGL.js.map} +1 -1
- package/dist/{chunk-LAPY6NAE.js → chunk-JQFO7QQN.js} +68 -12
- package/dist/{chunk-LAPY6NAE.js.map → chunk-JQFO7QQN.js.map} +1 -1
- package/dist/{SettingsMigrationManager-MTQIMI54.js → chunk-KLBYVHPK.js} +3 -2
- package/dist/{chunk-USVVV3FP.js → chunk-MKWYLDFK.js} +5 -5
- package/dist/chunk-O5OH5MRX.js +396 -0
- package/dist/chunk-O5OH5MRX.js.map +1 -0
- package/dist/{chunk-DJUGYNQE.js → chunk-PA6Q6AWM.js} +16 -3
- package/dist/chunk-PA6Q6AWM.js.map +1 -0
- package/dist/chunk-RO26VS3W.js +444 -0
- package/dist/chunk-RO26VS3W.js.map +1 -0
- package/dist/{chunk-VETG35MF.js → chunk-TSKY3JI7.js} +3 -3
- package/dist/{chunk-VETG35MF.js.map → chunk-TSKY3JI7.js.map} +1 -1
- package/dist/{chunk-LHP6ROUM.js → chunk-U5QDY7ZD.js} +4 -16
- package/dist/chunk-U5QDY7ZD.js.map +1 -0
- package/dist/{chunk-SPYPLHMK.js → chunk-VU3QMIP2.js} +34 -2
- package/dist/chunk-VU3QMIP2.js.map +1 -0
- package/dist/{chunk-PVAVNJKS.js → chunk-WEN5C5DM.js} +10 -1
- package/dist/chunk-WEN5C5DM.js.map +1 -0
- package/dist/{chunk-2PLUQT6J.js → chunk-XPKDPZ5D.js} +2 -2
- package/dist/{chunk-RF2YI2XJ.js → chunk-ZBQVSHVT.js} +5 -5
- package/dist/chunk-ZBQVSHVT.js.map +1 -0
- package/dist/{chunk-GZP4UGGM.js → chunk-ZM3CFL5L.js} +2 -2
- package/dist/{chunk-BLCTGFZN.js → chunk-ZT3YZB4K.js} +3 -4
- package/dist/chunk-ZT3YZB4K.js.map +1 -0
- package/dist/{chunk-MFU53H6J.js → chunk-ZWFBBPJI.js} +6 -6
- package/dist/{chunk-MFU53H6J.js.map → chunk-ZWFBBPJI.js.map} +1 -1
- package/dist/{claude-ZIWDG4XG.js → claude-LUZ35IMK.js} +2 -2
- package/dist/{cleanup-FEIVZSIV.js → cleanup-3MONU4PU.js} +88 -27
- package/dist/cleanup-3MONU4PU.js.map +1 -0
- package/dist/cli.js +2511 -62
- package/dist/cli.js.map +1 -1
- package/dist/{contribute-EMZKCAC6.js → contribute-UWJAGIG7.js} +6 -6
- package/dist/{feedback-LFNMQBAZ.js → feedback-W3BXTGIM.js} +15 -14
- package/dist/{feedback-LFNMQBAZ.js.map → feedback-W3BXTGIM.js.map} +1 -1
- package/dist/{git-WC6HZLOT.js → git-34Z6QVDS.js} +4 -2
- package/dist/{ignite-MQWVJEAB.js → ignite-KVJEFXNO.js} +32 -27
- package/dist/ignite-KVJEFXNO.js.map +1 -0
- package/dist/index.d.ts +359 -45
- package/dist/index.js +1267 -503
- package/dist/index.js.map +1 -1
- package/dist/{init-GJDYN2IK.js → init-L55Q73H4.js} +104 -40
- package/dist/init-L55Q73H4.js.map +1 -0
- package/dist/mcp/issue-management-server.js +934 -0
- package/dist/mcp/issue-management-server.js.map +1 -0
- package/dist/{neon-helpers-ZVIRPKCI.js → neon-helpers-WPUACUVC.js} +3 -3
- package/dist/neon-helpers-WPUACUVC.js.map +1 -0
- package/dist/{open-NXSN7XOC.js → open-LNRZL3UU.js} +39 -36
- package/dist/open-LNRZL3UU.js.map +1 -0
- package/dist/{prompt-ANTQWHUF.js → prompt-7INJ7YRU.js} +4 -2
- package/dist/prompt-7INJ7YRU.js.map +1 -0
- package/dist/prompts/init-prompt.txt +541 -98
- package/dist/prompts/issue-prompt.txt +27 -27
- package/dist/{rebase-DUNFOJVS.js → rebase-C4WNCVGM.js} +6 -6
- package/dist/{remote-ZCXJVVNW.js → remote-VUNCQZ6J.js} +3 -2
- package/dist/remote-VUNCQZ6J.js.map +1 -0
- package/dist/{run-O7ZK7CKA.js → run-IOGNIOYN.js} +39 -36
- package/dist/run-IOGNIOYN.js.map +1 -0
- package/dist/schema/settings.schema.json +59 -3
- package/dist/{test-git-T76HOTIA.js → test-git-J7I5MFYH.js} +3 -3
- package/dist/{test-prefix-6HJUVQMH.js → test-prefix-ZCONBCBX.js} +3 -3
- package/dist/{test-webserver-M2I3EV4J.js → test-webserver-DAHONWCS.js} +4 -4
- package/dist/test-webserver-DAHONWCS.js.map +1 -0
- package/package.json +3 -2
- package/dist/ClaudeContextManager-LVCYRM6Q.js +0 -13
- package/dist/ClaudeService-WVTWB3DK.js +0 -12
- package/dist/GitHubService-7E2S5NNZ.js +0 -11
- package/dist/LoomLauncher-CTSWJL35.js.map +0 -1
- package/dist/add-issue-OBI325W7.js +0 -69
- package/dist/add-issue-OBI325W7.js.map +0 -1
- package/dist/chunk-4IV6W4U5.js.map +0 -1
- package/dist/chunk-BLCTGFZN.js.map +0 -1
- package/dist/chunk-CVLAZRNB.js +0 -54
- package/dist/chunk-CVLAZRNB.js.map +0 -1
- package/dist/chunk-DJUGYNQE.js.map +0 -1
- package/dist/chunk-H4E4THUZ.js +0 -55
- package/dist/chunk-H4E4THUZ.js.map +0 -1
- package/dist/chunk-H5LDRGVK.js +0 -642
- package/dist/chunk-H5LDRGVK.js.map +0 -1
- package/dist/chunk-HBVFXN7R.js.map +0 -1
- package/dist/chunk-LHP6ROUM.js.map +0 -1
- package/dist/chunk-PVAVNJKS.js.map +0 -1
- package/dist/chunk-RF2YI2XJ.js.map +0 -1
- package/dist/chunk-SPYPLHMK.js.map +0 -1
- package/dist/chunk-SWCRXDZC.js.map +0 -1
- package/dist/chunk-SYOSCMIT.js +0 -545
- package/dist/chunk-SYOSCMIT.js.map +0 -1
- package/dist/chunk-T3KEIB4D.js +0 -243
- package/dist/chunk-T3KEIB4D.js.map +0 -1
- package/dist/chunk-TS6DL67T.js.map +0 -1
- package/dist/chunk-ZMNQBJUI.js.map +0 -1
- package/dist/cleanup-FEIVZSIV.js.map +0 -1
- package/dist/enhance-MNA4ZGXW.js +0 -176
- package/dist/enhance-MNA4ZGXW.js.map +0 -1
- package/dist/finish-TX5CJICB.js +0 -1749
- package/dist/finish-TX5CJICB.js.map +0 -1
- package/dist/ignite-MQWVJEAB.js.map +0 -1
- package/dist/init-GJDYN2IK.js.map +0 -1
- package/dist/mcp/chunk-6SDFJ42P.js +0 -62
- package/dist/mcp/chunk-6SDFJ42P.js.map +0 -1
- package/dist/mcp/claude-NDFOCQQQ.js +0 -249
- package/dist/mcp/claude-NDFOCQQQ.js.map +0 -1
- package/dist/mcp/color-QS5BFCNN.js +0 -168
- package/dist/mcp/color-QS5BFCNN.js.map +0 -1
- package/dist/mcp/github-comment-server.js +0 -168
- package/dist/mcp/github-comment-server.js.map +0 -1
- package/dist/mcp/terminal-OMNRFWB3.js +0 -227
- package/dist/mcp/terminal-OMNRFWB3.js.map +0 -1
- package/dist/open-NXSN7XOC.js.map +0 -1
- package/dist/run-O7ZK7CKA.js.map +0 -1
- package/dist/start-73I5W7WW.js +0 -983
- package/dist/start-73I5W7WW.js.map +0 -1
- package/dist/test-webserver-M2I3EV4J.js.map +0 -1
- /package/dist/{ClaudeContextManager-LVCYRM6Q.js.map → BranchNamingService-OMWKUYMM.js.map} +0 -0
- /package/dist/{ClaudeService-WVTWB3DK.js.map → ClaudeContextManager-3VXA6UPR.js.map} +0 -0
- /package/dist/{GitHubService-7E2S5NNZ.js.map → ClaudeService-6CPK43N4.js.map} +0 -0
- /package/dist/{PromptTemplateManager-WII75TKH.js.map → GitHubService-EBOETDIW.js.map} +0 -0
- /package/dist/{SettingsManager-XOYCLH3D.js.map → ProjectCapabilityDetector-34LU7JJ4.js.map} +0 -0
- /package/dist/{claude-ZIWDG4XG.js.map → PromptTemplateManager-A52RUAMS.js.map} +0 -0
- /package/dist/{git-WC6HZLOT.js.map → SettingsManager-ZCWJ56WP.js.map} +0 -0
- /package/dist/{neon-helpers-ZVIRPKCI.js.map → SettingsMigrationManager-AGIIIPDQ.js.map} +0 -0
- /package/dist/{chunk-CWR2SANQ.js.map → chunk-EBISESAP.js.map} +0 -0
- /package/dist/{SettingsMigrationManager-MTQIMI54.js.map → chunk-KLBYVHPK.js.map} +0 -0
- /package/dist/{chunk-USVVV3FP.js.map → chunk-MKWYLDFK.js.map} +0 -0
- /package/dist/{chunk-2PLUQT6J.js.map → chunk-XPKDPZ5D.js.map} +0 -0
- /package/dist/{chunk-GZP4UGGM.js.map → chunk-ZM3CFL5L.js.map} +0 -0
- /package/dist/{prompt-ANTQWHUF.js.map → claude-LUZ35IMK.js.map} +0 -0
- /package/dist/{contribute-EMZKCAC6.js.map → contribute-UWJAGIG7.js.map} +0 -0
- /package/dist/{remote-ZCXJVVNW.js.map → git-34Z6QVDS.js.map} +0 -0
- /package/dist/{rebase-DUNFOJVS.js.map → rebase-C4WNCVGM.js.map} +0 -0
- /package/dist/{test-git-T76HOTIA.js.map → test-git-J7I5MFYH.js.map} +0 -0
- /package/dist/{test-prefix-6HJUVQMH.js.map → test-prefix-ZCONBCBX.js.map} +0 -0
|
@@ -0,0 +1,1989 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import {
|
|
3
|
+
formatEnvLine,
|
|
4
|
+
loadEnvIntoProcess,
|
|
5
|
+
parseEnvFile,
|
|
6
|
+
validateEnvVariable
|
|
7
|
+
} from "./chunk-IP7SMKIF.js";
|
|
8
|
+
import {
|
|
9
|
+
calculatePortForBranch
|
|
10
|
+
} from "./chunk-VU3QMIP2.js";
|
|
11
|
+
import {
|
|
12
|
+
installDependencies,
|
|
13
|
+
runScript
|
|
14
|
+
} from "./chunk-ZT3YZB4K.js";
|
|
15
|
+
import {
|
|
16
|
+
hasScript,
|
|
17
|
+
readPackageJson
|
|
18
|
+
} from "./chunk-2ZPFJQ3B.js";
|
|
19
|
+
import {
|
|
20
|
+
SettingsManager
|
|
21
|
+
} from "./chunk-O5OH5MRX.js";
|
|
22
|
+
import {
|
|
23
|
+
branchExists,
|
|
24
|
+
ensureRepositoryHasCommits,
|
|
25
|
+
executeGitCommand,
|
|
26
|
+
extractIssueNumber,
|
|
27
|
+
findMainWorktreePathWithSettings,
|
|
28
|
+
hasUncommittedChanges
|
|
29
|
+
} from "./chunk-4BGK7T6X.js";
|
|
30
|
+
import {
|
|
31
|
+
calculateForegroundColor,
|
|
32
|
+
generateColorFromBranchName,
|
|
33
|
+
hexToRgb,
|
|
34
|
+
lightenColor,
|
|
35
|
+
rgbToHex
|
|
36
|
+
} from "./chunk-ZZZWQGTS.js";
|
|
37
|
+
import {
|
|
38
|
+
createLogger,
|
|
39
|
+
logger
|
|
40
|
+
} from "./chunk-GEHQXLEI.js";
|
|
41
|
+
|
|
42
|
+
// src/lib/LoomManager.ts
|
|
43
|
+
import path2 from "path";
|
|
44
|
+
import fs2 from "fs-extra";
|
|
45
|
+
|
|
46
|
+
// src/lib/VSCodeIntegration.ts
|
|
47
|
+
import fs from "fs-extra";
|
|
48
|
+
import path from "path";
|
|
49
|
+
import { parse, modify, applyEdits } from "jsonc-parser";
|
|
50
|
+
var VSCodeIntegration = class {
|
|
51
|
+
/**
|
|
52
|
+
* Set VSCode title bar color for a workspace
|
|
53
|
+
*
|
|
54
|
+
* @param workspacePath - Path to workspace directory
|
|
55
|
+
* @param hexColor - Hex color string (e.g., "#dcebf8")
|
|
56
|
+
*/
|
|
57
|
+
async setTitleBarColor(workspacePath, hexColor) {
|
|
58
|
+
const vscodeDir = path.join(workspacePath, ".vscode");
|
|
59
|
+
const settingsPath = path.join(vscodeDir, "settings.json");
|
|
60
|
+
try {
|
|
61
|
+
await fs.ensureDir(vscodeDir);
|
|
62
|
+
const settings = await this.readSettings(settingsPath);
|
|
63
|
+
const updatedSettings = this.mergeColorSettings(settings, hexColor);
|
|
64
|
+
await this.writeSettings(settingsPath, updatedSettings);
|
|
65
|
+
logger.debug(`Set VSCode title bar color to ${hexColor} for ${workspacePath}`);
|
|
66
|
+
} catch (error) {
|
|
67
|
+
throw new Error(
|
|
68
|
+
`Failed to set VSCode title bar color: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Read VSCode settings from file
|
|
74
|
+
* Supports JSONC (JSON with Comments)
|
|
75
|
+
*
|
|
76
|
+
* @param settingsPath - Path to settings.json file
|
|
77
|
+
* @returns Parsed settings object
|
|
78
|
+
*/
|
|
79
|
+
async readSettings(settingsPath) {
|
|
80
|
+
try {
|
|
81
|
+
if (!await fs.pathExists(settingsPath)) {
|
|
82
|
+
return {};
|
|
83
|
+
}
|
|
84
|
+
const content = await fs.readFile(settingsPath, "utf8");
|
|
85
|
+
const errors = [];
|
|
86
|
+
const settings = parse(content, errors, { allowTrailingComma: true });
|
|
87
|
+
if (errors.length > 0) {
|
|
88
|
+
const firstError = errors[0];
|
|
89
|
+
throw new Error(`Invalid JSON: ${firstError ? firstError.error : "Unknown parse error"}`);
|
|
90
|
+
}
|
|
91
|
+
return settings ?? {};
|
|
92
|
+
} catch (error) {
|
|
93
|
+
throw new Error(
|
|
94
|
+
`Failed to parse settings.json: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
/**
|
|
99
|
+
* Write VSCode settings to file atomically
|
|
100
|
+
* Preserves comments if present (using JSONC parser)
|
|
101
|
+
*
|
|
102
|
+
* @param settingsPath - Path to settings.json file
|
|
103
|
+
* @param settings - Settings object to write
|
|
104
|
+
*/
|
|
105
|
+
async writeSettings(settingsPath, settings) {
|
|
106
|
+
try {
|
|
107
|
+
let content;
|
|
108
|
+
if (await fs.pathExists(settingsPath)) {
|
|
109
|
+
const existingContent = await fs.readFile(settingsPath, "utf8");
|
|
110
|
+
if (existingContent.includes("//") || existingContent.includes("/*")) {
|
|
111
|
+
content = await this.modifyWithCommentsPreserved(existingContent, settings);
|
|
112
|
+
} else {
|
|
113
|
+
content = JSON.stringify(settings, null, 2) + "\n";
|
|
114
|
+
}
|
|
115
|
+
} else {
|
|
116
|
+
content = JSON.stringify(settings, null, 2) + "\n";
|
|
117
|
+
}
|
|
118
|
+
const tempPath = `${settingsPath}.tmp`;
|
|
119
|
+
await fs.writeFile(tempPath, content, "utf8");
|
|
120
|
+
await fs.rename(tempPath, settingsPath);
|
|
121
|
+
} catch (error) {
|
|
122
|
+
throw new Error(
|
|
123
|
+
`Failed to write settings.json: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
124
|
+
);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
/**
|
|
128
|
+
* Modify JSONC content while preserving comments
|
|
129
|
+
*
|
|
130
|
+
* @param existingContent - Original JSONC content
|
|
131
|
+
* @param newSettings - New settings to apply
|
|
132
|
+
* @returns Modified JSONC content with comments preserved
|
|
133
|
+
*/
|
|
134
|
+
async modifyWithCommentsPreserved(existingContent, newSettings) {
|
|
135
|
+
let modifiedContent = existingContent;
|
|
136
|
+
for (const [key, value] of Object.entries(newSettings)) {
|
|
137
|
+
const edits = modify(modifiedContent, [key], value, {});
|
|
138
|
+
modifiedContent = applyEdits(modifiedContent, edits);
|
|
139
|
+
}
|
|
140
|
+
return modifiedContent;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Merge color settings into existing settings object
|
|
144
|
+
*
|
|
145
|
+
* @param existing - Existing settings object
|
|
146
|
+
* @param hexColor - Hex color to apply (subtle palette color)
|
|
147
|
+
* @returns Updated settings object with color merged
|
|
148
|
+
*/
|
|
149
|
+
mergeColorSettings(existing, hexColor) {
|
|
150
|
+
const updated = { ...existing };
|
|
151
|
+
updated["workbench.colorCustomizations"] ??= {};
|
|
152
|
+
const colors = updated["workbench.colorCustomizations"];
|
|
153
|
+
const baseRgb = hexToRgb(hexColor);
|
|
154
|
+
const foreground = calculateForegroundColor(baseRgb);
|
|
155
|
+
const foregroundTransparent = foreground.replace("#", "#") + "99";
|
|
156
|
+
const lighterRgb = lightenColor(baseRgb, 0.05);
|
|
157
|
+
const lighterHex = rgbToHex(lighterRgb.r, lighterRgb.g, lighterRgb.b);
|
|
158
|
+
colors["titleBar.activeBackground"] = hexColor;
|
|
159
|
+
colors["titleBar.inactiveBackground"] = hexColor + "99";
|
|
160
|
+
colors["titleBar.activeForeground"] = foreground;
|
|
161
|
+
colors["titleBar.inactiveForeground"] = foregroundTransparent;
|
|
162
|
+
colors["statusBar.background"] = hexColor;
|
|
163
|
+
colors["statusBar.foreground"] = foreground;
|
|
164
|
+
colors["statusBarItem.hoverBackground"] = lighterHex;
|
|
165
|
+
colors["statusBarItem.remoteBackground"] = hexColor;
|
|
166
|
+
colors["statusBarItem.remoteForeground"] = foreground;
|
|
167
|
+
colors["sash.hoverBorder"] = hexColor;
|
|
168
|
+
colors["commandCenter.border"] = foregroundTransparent;
|
|
169
|
+
return updated;
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
|
|
173
|
+
// src/lib/LoomManager.ts
|
|
174
|
+
var LoomManager = class {
|
|
175
|
+
constructor(gitWorktree, issueTracker, branchNaming, environment, _claude, capabilityDetector, cliIsolation, settings, database) {
|
|
176
|
+
this.gitWorktree = gitWorktree;
|
|
177
|
+
this.issueTracker = issueTracker;
|
|
178
|
+
this.branchNaming = branchNaming;
|
|
179
|
+
this.environment = environment;
|
|
180
|
+
this.capabilityDetector = capabilityDetector;
|
|
181
|
+
this.cliIsolation = cliIsolation;
|
|
182
|
+
this.settings = settings;
|
|
183
|
+
this.database = database;
|
|
184
|
+
}
|
|
185
|
+
/**
|
|
186
|
+
* Get database branch name for a loom by reading its .env file
|
|
187
|
+
* Returns null if database is not configured or branch cannot be determined
|
|
188
|
+
*
|
|
189
|
+
* @param loomPath - Path to the loom worktree
|
|
190
|
+
*/
|
|
191
|
+
async getDatabaseBranchForLoom(loomPath) {
|
|
192
|
+
var _a, _b;
|
|
193
|
+
if (!this.database) {
|
|
194
|
+
return null;
|
|
195
|
+
}
|
|
196
|
+
try {
|
|
197
|
+
const envFilePath = path2.join(loomPath, ".env");
|
|
198
|
+
const settings = await this.settings.loadSettings();
|
|
199
|
+
const databaseUrlVarName = ((_b = (_a = settings.capabilities) == null ? void 0 : _a.database) == null ? void 0 : _b.databaseUrlEnvVarName) ?? "DATABASE_URL";
|
|
200
|
+
const connectionString = await this.environment.getEnvVariable(envFilePath, databaseUrlVarName);
|
|
201
|
+
if (!connectionString) {
|
|
202
|
+
return null;
|
|
203
|
+
}
|
|
204
|
+
return await this.database.getBranchNameFromConnectionString(connectionString, loomPath);
|
|
205
|
+
} catch (error) {
|
|
206
|
+
logger.debug(`Could not get database branch for loom at ${loomPath}: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
207
|
+
return null;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
/**
|
|
211
|
+
* Create a new loom (isolated workspace)
|
|
212
|
+
* Orchestrates worktree creation, environment setup, and Claude context generation
|
|
213
|
+
* NEW: Checks for existing worktrees and reuses them if found
|
|
214
|
+
*/
|
|
215
|
+
async createIloom(input) {
|
|
216
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m;
|
|
217
|
+
logger.info("Fetching issue data...");
|
|
218
|
+
const issueData = await this.fetchIssueData(input);
|
|
219
|
+
if (input.type === "issue" || input.type === "pr" || input.type === "branch") {
|
|
220
|
+
logger.info("Checking for existing worktree...");
|
|
221
|
+
const existing = await this.findExistingIloom(input, issueData);
|
|
222
|
+
if (existing) {
|
|
223
|
+
logger.success(`Found existing worktree, reusing: ${existing.path}`);
|
|
224
|
+
return await this.reuseIloom(existing, input, issueData);
|
|
225
|
+
}
|
|
226
|
+
logger.info("No existing worktree found, creating new one...");
|
|
227
|
+
}
|
|
228
|
+
logger.info("Preparing branch name...");
|
|
229
|
+
const branchName = await this.prepareBranchName(input, issueData);
|
|
230
|
+
logger.info("Creating git worktree...");
|
|
231
|
+
const worktreePath = await this.createWorktreeOnly(input, branchName);
|
|
232
|
+
this.loadMainEnvFile();
|
|
233
|
+
const { capabilities, binEntries } = await this.capabilityDetector.detectCapabilities(worktreePath);
|
|
234
|
+
await this.copyEnvironmentFiles(worktreePath);
|
|
235
|
+
await this.copyIloomSettings(worktreePath, (_a = input.parentLoom) == null ? void 0 : _a.branchName);
|
|
236
|
+
const settingsData = await this.settings.loadSettings();
|
|
237
|
+
const basePort = ((_c = (_b = settingsData.capabilities) == null ? void 0 : _b.web) == null ? void 0 : _c.basePort) ?? 3e3;
|
|
238
|
+
let port = basePort;
|
|
239
|
+
if (capabilities.includes("web")) {
|
|
240
|
+
port = await this.setupPortForWeb(worktreePath, input, basePort);
|
|
241
|
+
}
|
|
242
|
+
try {
|
|
243
|
+
await installDependencies(worktreePath, true, true);
|
|
244
|
+
} catch (error) {
|
|
245
|
+
logger.warn(`Failed to install dependencies: ${error instanceof Error ? error.message : "Unknown error"}`, error);
|
|
246
|
+
}
|
|
247
|
+
let databaseBranch = void 0;
|
|
248
|
+
if (this.database && !((_d = input.options) == null ? void 0 : _d.skipDatabase)) {
|
|
249
|
+
try {
|
|
250
|
+
const connectionString = await this.database.createBranchIfConfigured(
|
|
251
|
+
branchName,
|
|
252
|
+
path2.join(worktreePath, ".env"),
|
|
253
|
+
void 0,
|
|
254
|
+
// cwd
|
|
255
|
+
(_e = input.parentLoom) == null ? void 0 : _e.databaseBranch
|
|
256
|
+
// fromBranch - use parent's database branch for child looms
|
|
257
|
+
);
|
|
258
|
+
if (connectionString) {
|
|
259
|
+
await this.environment.setEnvVar(
|
|
260
|
+
path2.join(worktreePath, ".env"),
|
|
261
|
+
this.database.getConfiguredVariableName(),
|
|
262
|
+
connectionString
|
|
263
|
+
);
|
|
264
|
+
logger.success("Database branch configured");
|
|
265
|
+
databaseBranch = branchName;
|
|
266
|
+
}
|
|
267
|
+
} catch (error) {
|
|
268
|
+
logger.error(
|
|
269
|
+
`Failed to setup database branch: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
270
|
+
);
|
|
271
|
+
throw error;
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
let cliSymlinks = void 0;
|
|
275
|
+
if (capabilities.includes("cli")) {
|
|
276
|
+
try {
|
|
277
|
+
cliSymlinks = await this.cliIsolation.setupCLIIsolation(
|
|
278
|
+
worktreePath,
|
|
279
|
+
input.identifier,
|
|
280
|
+
binEntries
|
|
281
|
+
);
|
|
282
|
+
} catch (error) {
|
|
283
|
+
logger.warn(
|
|
284
|
+
`Failed to setup CLI isolation: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
285
|
+
error
|
|
286
|
+
);
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
if (!((_f = input.options) == null ? void 0 : _f.skipColorSync)) {
|
|
290
|
+
try {
|
|
291
|
+
await this.applyColorSynchronization(worktreePath, branchName);
|
|
292
|
+
} catch (error) {
|
|
293
|
+
logger.warn(
|
|
294
|
+
`Failed to apply color synchronization: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
295
|
+
error
|
|
296
|
+
);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
if (input.type === "issue") {
|
|
300
|
+
try {
|
|
301
|
+
logger.info("Moving issue to In Progress...");
|
|
302
|
+
if (this.issueTracker.moveIssueToInProgress) {
|
|
303
|
+
await this.issueTracker.moveIssueToInProgress(input.identifier);
|
|
304
|
+
}
|
|
305
|
+
} catch (error) {
|
|
306
|
+
logger.warn(
|
|
307
|
+
`Failed to move issue to In Progress: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
308
|
+
error
|
|
309
|
+
);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
const enableClaude = ((_g = input.options) == null ? void 0 : _g.enableClaude) !== false;
|
|
313
|
+
const enableCode = ((_h = input.options) == null ? void 0 : _h.enableCode) !== false;
|
|
314
|
+
const enableDevServer = ((_i = input.options) == null ? void 0 : _i.enableDevServer) !== false;
|
|
315
|
+
const enableTerminal = ((_j = input.options) == null ? void 0 : _j.enableTerminal) ?? false;
|
|
316
|
+
const oneShot = ((_k = input.options) == null ? void 0 : _k.oneShot) ?? "default";
|
|
317
|
+
const setArguments = (_l = input.options) == null ? void 0 : _l.setArguments;
|
|
318
|
+
const executablePath = (_m = input.options) == null ? void 0 : _m.executablePath;
|
|
319
|
+
if (enableClaude || enableCode || enableDevServer || enableTerminal) {
|
|
320
|
+
const { LoomLauncher } = await import("./LoomLauncher-JF7JZMTZ.js");
|
|
321
|
+
const { ClaudeContextManager } = await import("./ClaudeContextManager-3VXA6UPR.js");
|
|
322
|
+
const claudeContext = new ClaudeContextManager(void 0, void 0, this.settings);
|
|
323
|
+
const launcher = new LoomLauncher(claudeContext, this.settings);
|
|
324
|
+
await launcher.launchLoom({
|
|
325
|
+
enableClaude,
|
|
326
|
+
enableCode,
|
|
327
|
+
enableDevServer,
|
|
328
|
+
enableTerminal,
|
|
329
|
+
worktreePath,
|
|
330
|
+
branchName,
|
|
331
|
+
port,
|
|
332
|
+
capabilities,
|
|
333
|
+
workflowType: input.type === "branch" ? "regular" : input.type,
|
|
334
|
+
identifier: input.identifier,
|
|
335
|
+
...(issueData == null ? void 0 : issueData.title) && { title: issueData.title },
|
|
336
|
+
oneShot,
|
|
337
|
+
...setArguments && { setArguments },
|
|
338
|
+
...executablePath && { executablePath },
|
|
339
|
+
sourceEnvOnStart: settingsData.sourceEnvOnStart ?? false
|
|
340
|
+
});
|
|
341
|
+
}
|
|
342
|
+
const loom = {
|
|
343
|
+
id: this.generateLoomId(input),
|
|
344
|
+
path: worktreePath,
|
|
345
|
+
branch: branchName,
|
|
346
|
+
type: input.type,
|
|
347
|
+
identifier: input.identifier,
|
|
348
|
+
port,
|
|
349
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
350
|
+
lastAccessed: /* @__PURE__ */ new Date(),
|
|
351
|
+
...databaseBranch !== void 0 && { databaseBranch },
|
|
352
|
+
...capabilities.length > 0 && { capabilities },
|
|
353
|
+
...Object.keys(binEntries).length > 0 && { binEntries },
|
|
354
|
+
...cliSymlinks && cliSymlinks.length > 0 && { cliSymlinks },
|
|
355
|
+
...issueData !== null && {
|
|
356
|
+
issueData: {
|
|
357
|
+
title: issueData.title,
|
|
358
|
+
body: issueData.body,
|
|
359
|
+
url: issueData.url,
|
|
360
|
+
state: issueData.state
|
|
361
|
+
}
|
|
362
|
+
}
|
|
363
|
+
};
|
|
364
|
+
logger.success(`Created loom: ${loom.id} at ${loom.path}`);
|
|
365
|
+
return loom;
|
|
366
|
+
}
|
|
367
|
+
/**
|
|
368
|
+
* Finish a loom (merge work and cleanup)
|
|
369
|
+
* Not yet implemented - see Issue #7
|
|
370
|
+
*/
|
|
371
|
+
async finishIloom(_identifier) {
|
|
372
|
+
throw new Error("Not implemented - see Issue #7");
|
|
373
|
+
}
|
|
374
|
+
/**
|
|
375
|
+
* List all active looms
|
|
376
|
+
*/
|
|
377
|
+
async listLooms() {
|
|
378
|
+
const worktrees = await this.gitWorktree.listWorktrees();
|
|
379
|
+
return await this.mapWorktreesToLooms(worktrees);
|
|
380
|
+
}
|
|
381
|
+
/**
|
|
382
|
+
* Find a specific loom by identifier
|
|
383
|
+
* Case-insensitive matching for Linear IDs (MARK-1 vs mark-1)
|
|
384
|
+
*/
|
|
385
|
+
async findIloom(identifier) {
|
|
386
|
+
const looms = await this.listLooms();
|
|
387
|
+
const lowerIdentifier = identifier.toLowerCase();
|
|
388
|
+
return looms.find(
|
|
389
|
+
(h) => h.id.toLowerCase() === lowerIdentifier || h.identifier.toString().toLowerCase() === lowerIdentifier || h.branch.toLowerCase() === lowerIdentifier
|
|
390
|
+
) ?? null;
|
|
391
|
+
}
|
|
392
|
+
/**
|
|
393
|
+
* Find child looms for a given parent loom
|
|
394
|
+
* Child looms are worktrees created with the parent loom as their base
|
|
395
|
+
*
|
|
396
|
+
* @param parentBranchName - The parent loom's branch name
|
|
397
|
+
* @returns Array of child loom worktrees
|
|
398
|
+
*/
|
|
399
|
+
async findChildLooms(parentBranchName) {
|
|
400
|
+
try {
|
|
401
|
+
const worktrees = await this.gitWorktree.listWorktrees();
|
|
402
|
+
if (!worktrees) {
|
|
403
|
+
return [];
|
|
404
|
+
}
|
|
405
|
+
const sanitizedBranchName = parentBranchName.replace(/\//g, "-").replace(/[^a-zA-Z0-9-_]/g, "-");
|
|
406
|
+
const pattern = `${sanitizedBranchName}-looms/`;
|
|
407
|
+
return worktrees.filter((wt) => wt.path.includes(pattern));
|
|
408
|
+
} catch (error) {
|
|
409
|
+
logger.debug(`Failed to find child looms: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
410
|
+
return [];
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Check for child looms and warn user if any exist
|
|
415
|
+
* This is useful before finishing or cleaning up a parent loom
|
|
416
|
+
*
|
|
417
|
+
* @param branchName - Optional branch name to check. If not provided, uses current branch.
|
|
418
|
+
* @returns true if child looms were found, false otherwise
|
|
419
|
+
*/
|
|
420
|
+
async checkAndWarnChildLooms(branchName) {
|
|
421
|
+
let targetBranch = branchName;
|
|
422
|
+
if (!targetBranch) {
|
|
423
|
+
const { getCurrentBranch } = await import("./git-34Z6QVDS.js");
|
|
424
|
+
targetBranch = await getCurrentBranch();
|
|
425
|
+
}
|
|
426
|
+
if (!targetBranch) {
|
|
427
|
+
return false;
|
|
428
|
+
}
|
|
429
|
+
const childLooms = await this.findChildLooms(targetBranch);
|
|
430
|
+
if (childLooms.length > 0) {
|
|
431
|
+
logger.warn(`Found ${childLooms.length} child loom(s) that should be finished first:`);
|
|
432
|
+
for (const child of childLooms) {
|
|
433
|
+
logger.warn(` - ${child.path}`);
|
|
434
|
+
}
|
|
435
|
+
logger.warn("");
|
|
436
|
+
logger.warn("To finish child looms:");
|
|
437
|
+
for (const child of childLooms) {
|
|
438
|
+
const prMatch = child.branch.match(/_pr_(\d+)/);
|
|
439
|
+
const issueId = extractIssueNumber(child.branch);
|
|
440
|
+
const childIdentifier = prMatch ? prMatch[1] : issueId ?? child.branch;
|
|
441
|
+
logger.warn(` il finish ${childIdentifier}`);
|
|
442
|
+
}
|
|
443
|
+
logger.warn("");
|
|
444
|
+
return true;
|
|
445
|
+
}
|
|
446
|
+
return false;
|
|
447
|
+
}
|
|
448
|
+
/**
|
|
449
|
+
* Fetch issue/PR data based on input type
|
|
450
|
+
*/
|
|
451
|
+
async fetchIssueData(input) {
|
|
452
|
+
if (input.type === "issue") {
|
|
453
|
+
return await this.issueTracker.fetchIssue(input.identifier);
|
|
454
|
+
} else if (input.type === "pr") {
|
|
455
|
+
if (!this.issueTracker.supportsPullRequests || !this.issueTracker.fetchPR) {
|
|
456
|
+
throw new Error("Issue tracker does not support pull requests");
|
|
457
|
+
}
|
|
458
|
+
return await this.issueTracker.fetchPR(input.identifier);
|
|
459
|
+
}
|
|
460
|
+
return null;
|
|
461
|
+
}
|
|
462
|
+
/**
|
|
463
|
+
* Prepare branch name based on input type and issue/PR data
|
|
464
|
+
*/
|
|
465
|
+
async prepareBranchName(input, issueData) {
|
|
466
|
+
if (input.type === "branch") {
|
|
467
|
+
return input.identifier;
|
|
468
|
+
}
|
|
469
|
+
if (input.type === "pr" && issueData && "branch" in issueData) {
|
|
470
|
+
return issueData.branch;
|
|
471
|
+
}
|
|
472
|
+
if (input.type === "issue" && issueData) {
|
|
473
|
+
const branchName = await this.branchNaming.generateBranchName({
|
|
474
|
+
issueNumber: input.identifier,
|
|
475
|
+
title: issueData.title
|
|
476
|
+
});
|
|
477
|
+
return branchName;
|
|
478
|
+
}
|
|
479
|
+
if (input.type === "pr") {
|
|
480
|
+
return `pr-${input.identifier}`;
|
|
481
|
+
}
|
|
482
|
+
throw new Error(`Unable to determine branch name for input type: ${input.type}`);
|
|
483
|
+
}
|
|
484
|
+
/**
|
|
485
|
+
* Create worktree for the loom (without dependency installation)
|
|
486
|
+
*/
|
|
487
|
+
async createWorktreeOnly(input, branchName) {
|
|
488
|
+
var _a;
|
|
489
|
+
logger.info("Ensuring repository has initial commit...");
|
|
490
|
+
await ensureRepositoryHasCommits(this.gitWorktree.workingDirectory);
|
|
491
|
+
const settingsData = await this.settings.loadSettings();
|
|
492
|
+
let worktreePrefix = settingsData.worktreePrefix;
|
|
493
|
+
if (input.parentLoom) {
|
|
494
|
+
const sanitizedBranchName = input.parentLoom.branchName.replace(/\//g, "-").replace(/[^a-zA-Z0-9-_]/g, "-");
|
|
495
|
+
worktreePrefix = `${sanitizedBranchName}-looms/`;
|
|
496
|
+
logger.info(`Creating child loom with prefix: ${worktreePrefix}`);
|
|
497
|
+
}
|
|
498
|
+
const pathOptions = input.type === "pr" ? { isPR: true, prNumber: input.identifier } : {};
|
|
499
|
+
if (worktreePrefix !== void 0) {
|
|
500
|
+
pathOptions.prefix = worktreePrefix;
|
|
501
|
+
}
|
|
502
|
+
const worktreePath = this.gitWorktree.generateWorktreePath(
|
|
503
|
+
branchName,
|
|
504
|
+
void 0,
|
|
505
|
+
pathOptions
|
|
506
|
+
);
|
|
507
|
+
if (input.type === "pr") {
|
|
508
|
+
logger.info("Fetching all remote branches...");
|
|
509
|
+
try {
|
|
510
|
+
await executeGitCommand(["fetch", "origin"], { cwd: this.gitWorktree.workingDirectory });
|
|
511
|
+
logger.success("Successfully fetched from remote");
|
|
512
|
+
} catch (error) {
|
|
513
|
+
throw new Error(
|
|
514
|
+
`Failed to fetch from remote: ${error instanceof Error ? error.message : "Unknown error"}. Make sure you have access to the repository.`
|
|
515
|
+
);
|
|
516
|
+
}
|
|
517
|
+
}
|
|
518
|
+
const branchExistedLocally = await branchExists(branchName);
|
|
519
|
+
if (input.type !== "pr" && branchExistedLocally) {
|
|
520
|
+
throw new Error(
|
|
521
|
+
`Cannot create worktree: branch '${branchName}' already exists. Use 'git branch -D ${branchName}' to delete it first if needed.`
|
|
522
|
+
);
|
|
523
|
+
}
|
|
524
|
+
const baseBranch = ((_a = input.parentLoom) == null ? void 0 : _a.branchName) ?? input.baseBranch;
|
|
525
|
+
await this.gitWorktree.createWorktree({
|
|
526
|
+
path: worktreePath,
|
|
527
|
+
branch: branchName,
|
|
528
|
+
createBranch: input.type !== "pr",
|
|
529
|
+
// PRs use existing branches
|
|
530
|
+
...baseBranch && { baseBranch }
|
|
531
|
+
});
|
|
532
|
+
if (input.type === "pr" && !branchExistedLocally) {
|
|
533
|
+
logger.info("Resetting new PR branch to match remote exactly...");
|
|
534
|
+
try {
|
|
535
|
+
await executeGitCommand(["reset", "--hard", `origin/${branchName}`], { cwd: worktreePath });
|
|
536
|
+
await executeGitCommand(["branch", "--set-upstream-to", `origin/${branchName}`], { cwd: worktreePath });
|
|
537
|
+
logger.success("Successfully reset to match remote");
|
|
538
|
+
} catch (error) {
|
|
539
|
+
logger.warn(`Failed to reset to match remote: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
return worktreePath;
|
|
543
|
+
}
|
|
544
|
+
/**
|
|
545
|
+
* Copy user application environment files (.env) from main repo to worktree
|
|
546
|
+
* Always called regardless of project capabilities
|
|
547
|
+
*/
|
|
548
|
+
async copyEnvironmentFiles(worktreePath) {
|
|
549
|
+
const envFilePath = path2.join(worktreePath, ".env");
|
|
550
|
+
try {
|
|
551
|
+
const mainEnvPath = path2.join(process.cwd(), ".env");
|
|
552
|
+
if (await fs2.pathExists(envFilePath)) {
|
|
553
|
+
logger.warn(".env file already exists in worktree, skipping copy");
|
|
554
|
+
} else {
|
|
555
|
+
await this.environment.copyIfExists(mainEnvPath, envFilePath);
|
|
556
|
+
}
|
|
557
|
+
} catch (error) {
|
|
558
|
+
logger.warn(`Warning: Failed to copy main .env file: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
/**
|
|
562
|
+
* Copy iloom configuration (settings.local.json) from main repo to worktree
|
|
563
|
+
* Always called regardless of project capabilities
|
|
564
|
+
* @param worktreePath Path to the worktree
|
|
565
|
+
* @param parentBranchName Optional parent branch name for child looms (sets mainBranch)
|
|
566
|
+
*/
|
|
567
|
+
async copyIloomSettings(worktreePath, parentBranchName) {
|
|
568
|
+
const mainSettingsLocalPath = path2.join(process.cwd(), ".iloom", "settings.local.json");
|
|
569
|
+
try {
|
|
570
|
+
const worktreeIloomDir = path2.join(worktreePath, ".iloom");
|
|
571
|
+
await fs2.ensureDir(worktreeIloomDir);
|
|
572
|
+
const worktreeSettingsLocalPath = path2.join(worktreeIloomDir, "settings.local.json");
|
|
573
|
+
if (await fs2.pathExists(worktreeSettingsLocalPath)) {
|
|
574
|
+
logger.warn("settings.local.json already exists in worktree, skipping copy");
|
|
575
|
+
} else {
|
|
576
|
+
await this.environment.copyIfExists(mainSettingsLocalPath, worktreeSettingsLocalPath);
|
|
577
|
+
}
|
|
578
|
+
if (parentBranchName) {
|
|
579
|
+
let existingSettings = {};
|
|
580
|
+
try {
|
|
581
|
+
const content = await fs2.readFile(worktreeSettingsLocalPath, "utf8");
|
|
582
|
+
existingSettings = JSON.parse(content);
|
|
583
|
+
} catch {
|
|
584
|
+
}
|
|
585
|
+
const updatedSettings = {
|
|
586
|
+
...existingSettings,
|
|
587
|
+
mainBranch: parentBranchName
|
|
588
|
+
};
|
|
589
|
+
await fs2.writeFile(worktreeSettingsLocalPath, JSON.stringify(updatedSettings, null, 2));
|
|
590
|
+
logger.info(`Set mainBranch to ${parentBranchName} for child loom`);
|
|
591
|
+
}
|
|
592
|
+
} catch (error) {
|
|
593
|
+
logger.warn(`Warning: Failed to copy settings.local.json: ${error instanceof Error ? error.message : "Unknown error"}`);
|
|
594
|
+
}
|
|
595
|
+
}
|
|
596
|
+
/**
|
|
597
|
+
* Setup PORT environment variable for web projects
|
|
598
|
+
* Only called when project has web capabilities
|
|
599
|
+
*/
|
|
600
|
+
async setupPortForWeb(worktreePath, input, basePort) {
|
|
601
|
+
const envFilePath = path2.join(worktreePath, ".env");
|
|
602
|
+
const options = { basePort };
|
|
603
|
+
if (input.type === "issue") {
|
|
604
|
+
options.issueNumber = input.identifier;
|
|
605
|
+
} else if (input.type === "pr") {
|
|
606
|
+
options.prNumber = input.identifier;
|
|
607
|
+
} else if (input.type === "branch") {
|
|
608
|
+
options.branchName = input.identifier;
|
|
609
|
+
}
|
|
610
|
+
const port = this.environment.calculatePort(options);
|
|
611
|
+
await this.environment.setEnvVar(envFilePath, "PORT", String(port));
|
|
612
|
+
return port;
|
|
613
|
+
}
|
|
614
|
+
/**
|
|
615
|
+
* Load environment variables from main .env file into process.env
|
|
616
|
+
* Uses dotenv-flow to handle various .env file patterns
|
|
617
|
+
*/
|
|
618
|
+
loadMainEnvFile() {
|
|
619
|
+
const result = loadEnvIntoProcess({ path: process.cwd() });
|
|
620
|
+
if (result.error) {
|
|
621
|
+
logger.warn(`Warning: Could not load .env files: ${result.error.message}`);
|
|
622
|
+
} else {
|
|
623
|
+
logger.info("Loaded environment variables using dotenv-flow");
|
|
624
|
+
if (result.parsed && Object.keys(result.parsed).length > 0) {
|
|
625
|
+
logger.debug(`Loaded ${Object.keys(result.parsed).length} environment variables`);
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
}
|
|
629
|
+
/**
|
|
630
|
+
* Generate a unique loom ID
|
|
631
|
+
*/
|
|
632
|
+
generateLoomId(input) {
|
|
633
|
+
const prefix = input.type;
|
|
634
|
+
return `${prefix}-${input.identifier}`;
|
|
635
|
+
}
|
|
636
|
+
/**
|
|
637
|
+
* Calculate port for the loom
|
|
638
|
+
* Base port: configurable via settings.capabilities.web.basePort (default 3000) + issue/PR number (or deterministic hash for branches)
|
|
639
|
+
*/
|
|
640
|
+
async calculatePort(input) {
|
|
641
|
+
var _a, _b;
|
|
642
|
+
const settingsData = await this.settings.loadSettings();
|
|
643
|
+
const basePort = ((_b = (_a = settingsData.capabilities) == null ? void 0 : _a.web) == null ? void 0 : _b.basePort) ?? 3e3;
|
|
644
|
+
if (input.type === "issue" && typeof input.identifier === "number") {
|
|
645
|
+
return this.environment.calculatePort({ basePort, issueNumber: input.identifier });
|
|
646
|
+
}
|
|
647
|
+
if (input.type === "pr" && typeof input.identifier === "number") {
|
|
648
|
+
return this.environment.calculatePort({ basePort, prNumber: input.identifier });
|
|
649
|
+
}
|
|
650
|
+
if (input.type === "branch" && typeof input.identifier === "string") {
|
|
651
|
+
return this.environment.calculatePort({ basePort, branchName: input.identifier });
|
|
652
|
+
}
|
|
653
|
+
throw new Error(`Unknown input type: ${input.type}`);
|
|
654
|
+
}
|
|
655
|
+
/**
|
|
656
|
+
* Apply color synchronization to both VSCode and terminal
|
|
657
|
+
* Colors are cosmetic - errors are logged but don't block workflow
|
|
658
|
+
*/
|
|
659
|
+
async applyColorSynchronization(worktreePath, branchName) {
|
|
660
|
+
const colorData = generateColorFromBranchName(branchName);
|
|
661
|
+
const vscode = new VSCodeIntegration();
|
|
662
|
+
await vscode.setTitleBarColor(worktreePath, colorData.hex);
|
|
663
|
+
logger.info(`Applied VSCode title bar color: ${colorData.hex} for branch: ${branchName}`);
|
|
664
|
+
}
|
|
665
|
+
/**
|
|
666
|
+
* Map worktrees to loom objects
|
|
667
|
+
* This is a simplified conversion - in production we'd store loom metadata
|
|
668
|
+
*/
|
|
669
|
+
async mapWorktreesToLooms(worktrees) {
|
|
670
|
+
return await Promise.all(worktrees.map(async (wt) => {
|
|
671
|
+
let type = "branch";
|
|
672
|
+
let identifier = wt.branch;
|
|
673
|
+
if (wt.branch.startsWith("issue-")) {
|
|
674
|
+
type = "issue";
|
|
675
|
+
identifier = parseInt(wt.branch.replace("issue-", ""), 10);
|
|
676
|
+
} else if (wt.branch.startsWith("pr-")) {
|
|
677
|
+
type = "pr";
|
|
678
|
+
identifier = parseInt(wt.branch.replace("pr-", ""), 10);
|
|
679
|
+
}
|
|
680
|
+
return {
|
|
681
|
+
id: `${type}-${identifier}`,
|
|
682
|
+
path: wt.path,
|
|
683
|
+
branch: wt.branch,
|
|
684
|
+
type,
|
|
685
|
+
identifier,
|
|
686
|
+
port: await this.calculatePort({ type, identifier, originalInput: "" }),
|
|
687
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
688
|
+
lastAccessed: /* @__PURE__ */ new Date()
|
|
689
|
+
};
|
|
690
|
+
}));
|
|
691
|
+
}
|
|
692
|
+
/**
|
|
693
|
+
* NEW: Find existing loom for the given input
|
|
694
|
+
* Checks for worktrees matching the issue/PR identifier
|
|
695
|
+
*/
|
|
696
|
+
async findExistingIloom(input, issueData) {
|
|
697
|
+
if (input.type === "issue") {
|
|
698
|
+
return await this.gitWorktree.findWorktreeForIssue(input.identifier);
|
|
699
|
+
} else if (input.type === "pr" && issueData && "branch" in issueData) {
|
|
700
|
+
return await this.gitWorktree.findWorktreeForPR(
|
|
701
|
+
input.identifier,
|
|
702
|
+
issueData.branch
|
|
703
|
+
);
|
|
704
|
+
} else if (input.type === "branch") {
|
|
705
|
+
return await this.gitWorktree.findWorktreeForBranch(input.identifier);
|
|
706
|
+
}
|
|
707
|
+
return null;
|
|
708
|
+
}
|
|
709
|
+
/**
|
|
710
|
+
* NEW: Reuse an existing loom
|
|
711
|
+
* Includes environment setup and database branching for existing worktrees
|
|
712
|
+
* Ports: handle_existing_worktree() from bash script lines 168-215
|
|
713
|
+
*/
|
|
714
|
+
async reuseIloom(worktree, input, issueData) {
|
|
715
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _i;
|
|
716
|
+
const worktreePath = worktree.path;
|
|
717
|
+
const branchName = worktree.branch;
|
|
718
|
+
this.loadMainEnvFile();
|
|
719
|
+
const { capabilities, binEntries } = await this.capabilityDetector.detectCapabilities(worktreePath);
|
|
720
|
+
await this.copyEnvironmentFiles(worktreePath);
|
|
721
|
+
await this.copyIloomSettings(worktreePath);
|
|
722
|
+
const settingsData = await this.settings.loadSettings();
|
|
723
|
+
const basePort = ((_b = (_a = settingsData.capabilities) == null ? void 0 : _a.web) == null ? void 0 : _b.basePort) ?? 3e3;
|
|
724
|
+
let port = basePort;
|
|
725
|
+
if (capabilities.includes("web")) {
|
|
726
|
+
port = await this.setupPortForWeb(worktreePath, input, basePort);
|
|
727
|
+
}
|
|
728
|
+
logger.info("Database branch assumed to be already configured for existing worktree");
|
|
729
|
+
const databaseBranch = void 0;
|
|
730
|
+
if (input.type === "issue") {
|
|
731
|
+
try {
|
|
732
|
+
logger.info("Moving issue to In Progress...");
|
|
733
|
+
if (this.issueTracker.moveIssueToInProgress) {
|
|
734
|
+
await this.issueTracker.moveIssueToInProgress(input.identifier);
|
|
735
|
+
}
|
|
736
|
+
} catch (error) {
|
|
737
|
+
logger.warn(
|
|
738
|
+
`Failed to move issue to In Progress: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
739
|
+
error
|
|
740
|
+
);
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
const enableClaude = ((_c = input.options) == null ? void 0 : _c.enableClaude) !== false;
|
|
744
|
+
const enableCode = ((_d = input.options) == null ? void 0 : _d.enableCode) !== false;
|
|
745
|
+
const enableDevServer = ((_e = input.options) == null ? void 0 : _e.enableDevServer) !== false;
|
|
746
|
+
const enableTerminal = ((_f = input.options) == null ? void 0 : _f.enableTerminal) ?? false;
|
|
747
|
+
const oneShot = ((_g = input.options) == null ? void 0 : _g.oneShot) ?? "default";
|
|
748
|
+
const setArguments = (_h = input.options) == null ? void 0 : _h.setArguments;
|
|
749
|
+
const executablePath = (_i = input.options) == null ? void 0 : _i.executablePath;
|
|
750
|
+
if (enableClaude || enableCode || enableDevServer || enableTerminal) {
|
|
751
|
+
logger.info("Launching workspace components...");
|
|
752
|
+
const { LoomLauncher } = await import("./LoomLauncher-JF7JZMTZ.js");
|
|
753
|
+
const { ClaudeContextManager } = await import("./ClaudeContextManager-3VXA6UPR.js");
|
|
754
|
+
const claudeContext = new ClaudeContextManager(void 0, void 0, this.settings);
|
|
755
|
+
const launcher = new LoomLauncher(claudeContext, this.settings);
|
|
756
|
+
await launcher.launchLoom({
|
|
757
|
+
enableClaude,
|
|
758
|
+
enableCode,
|
|
759
|
+
enableDevServer,
|
|
760
|
+
enableTerminal,
|
|
761
|
+
worktreePath,
|
|
762
|
+
branchName,
|
|
763
|
+
port,
|
|
764
|
+
capabilities,
|
|
765
|
+
workflowType: input.type === "branch" ? "regular" : input.type,
|
|
766
|
+
identifier: input.identifier,
|
|
767
|
+
...(issueData == null ? void 0 : issueData.title) && { title: issueData.title },
|
|
768
|
+
oneShot,
|
|
769
|
+
...setArguments && { setArguments },
|
|
770
|
+
...executablePath && { executablePath },
|
|
771
|
+
sourceEnvOnStart: settingsData.sourceEnvOnStart ?? false
|
|
772
|
+
});
|
|
773
|
+
}
|
|
774
|
+
const loom = {
|
|
775
|
+
id: this.generateLoomId(input),
|
|
776
|
+
path: worktreePath,
|
|
777
|
+
branch: branchName,
|
|
778
|
+
type: input.type,
|
|
779
|
+
identifier: input.identifier,
|
|
780
|
+
port,
|
|
781
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
782
|
+
// We don't have actual creation date, use now
|
|
783
|
+
lastAccessed: /* @__PURE__ */ new Date(),
|
|
784
|
+
...databaseBranch !== void 0 && { databaseBranch },
|
|
785
|
+
...capabilities.length > 0 && { capabilities },
|
|
786
|
+
...Object.keys(binEntries).length > 0 && { binEntries },
|
|
787
|
+
...issueData !== null && {
|
|
788
|
+
issueData: {
|
|
789
|
+
title: issueData.title,
|
|
790
|
+
body: issueData.body,
|
|
791
|
+
url: issueData.url,
|
|
792
|
+
state: issueData.state
|
|
793
|
+
}
|
|
794
|
+
}
|
|
795
|
+
};
|
|
796
|
+
logger.success(`Reused existing loom: ${loom.id} at ${loom.path}`);
|
|
797
|
+
return loom;
|
|
798
|
+
}
|
|
799
|
+
};
|
|
800
|
+
|
|
801
|
+
// src/lib/EnvironmentManager.ts
|
|
802
|
+
import fs3 from "fs-extra";
|
|
803
|
+
var logger2 = createLogger({ prefix: "\u{1F4DD}" });
|
|
804
|
+
var EnvironmentManager = class {
|
|
805
|
+
constructor() {
|
|
806
|
+
this.backupSuffix = ".backup";
|
|
807
|
+
}
|
|
808
|
+
/**
|
|
809
|
+
* Set or update an environment variable in a .env file
|
|
810
|
+
* Ports functionality from bash/utils/env-utils.sh:setEnvVar()
|
|
811
|
+
* @returns The backup path if a backup was created
|
|
812
|
+
*/
|
|
813
|
+
async setEnvVar(filePath, key, value, backup = false) {
|
|
814
|
+
const validation = validateEnvVariable(key, value);
|
|
815
|
+
if (!validation.valid) {
|
|
816
|
+
throw new Error(validation.error ?? "Invalid variable name");
|
|
817
|
+
}
|
|
818
|
+
const fileExists = await fs3.pathExists(filePath);
|
|
819
|
+
if (!fileExists) {
|
|
820
|
+
logger2.info(`Creating ${filePath} with ${key}...`);
|
|
821
|
+
const content = formatEnvLine(key, value);
|
|
822
|
+
await fs3.writeFile(filePath, content, "utf8");
|
|
823
|
+
logger2.success(`${filePath} created with ${key}`);
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
826
|
+
const existingContent = await fs3.readFile(filePath, "utf8");
|
|
827
|
+
const envMap = parseEnvFile(existingContent);
|
|
828
|
+
let backupPath;
|
|
829
|
+
if (backup) {
|
|
830
|
+
backupPath = await this.createBackup(filePath);
|
|
831
|
+
}
|
|
832
|
+
envMap.set(key, value);
|
|
833
|
+
const lines = existingContent.split("\n");
|
|
834
|
+
const newLines = [];
|
|
835
|
+
let variableUpdated = false;
|
|
836
|
+
for (const line of lines) {
|
|
837
|
+
const trimmedLine = line.trim();
|
|
838
|
+
if (!trimmedLine || trimmedLine.startsWith("#")) {
|
|
839
|
+
newLines.push(line);
|
|
840
|
+
continue;
|
|
841
|
+
}
|
|
842
|
+
const cleanLine = trimmedLine.startsWith("export ") ? trimmedLine.substring(7) : trimmedLine;
|
|
843
|
+
const equalsIndex = cleanLine.indexOf("=");
|
|
844
|
+
if (equalsIndex !== -1) {
|
|
845
|
+
const lineKey = cleanLine.substring(0, equalsIndex).trim();
|
|
846
|
+
if (lineKey === key) {
|
|
847
|
+
newLines.push(formatEnvLine(key, value));
|
|
848
|
+
variableUpdated = true;
|
|
849
|
+
continue;
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
newLines.push(line);
|
|
853
|
+
}
|
|
854
|
+
if (!variableUpdated) {
|
|
855
|
+
logger2.info(`Adding ${key} to ${filePath}...`);
|
|
856
|
+
newLines.push(formatEnvLine(key, value));
|
|
857
|
+
logger2.success(`${key} added successfully`);
|
|
858
|
+
} else {
|
|
859
|
+
logger2.info(`Updating ${key} in ${filePath}...`);
|
|
860
|
+
logger2.success(`${key} updated successfully`);
|
|
861
|
+
}
|
|
862
|
+
const newContent = newLines.join("\n");
|
|
863
|
+
await fs3.writeFile(filePath, newContent, "utf8");
|
|
864
|
+
return backupPath;
|
|
865
|
+
}
|
|
866
|
+
/**
|
|
867
|
+
* Read and parse a .env file
|
|
868
|
+
*/
|
|
869
|
+
async readEnvFile(filePath) {
|
|
870
|
+
try {
|
|
871
|
+
const content = await fs3.readFile(filePath, "utf8");
|
|
872
|
+
return parseEnvFile(content);
|
|
873
|
+
} catch (error) {
|
|
874
|
+
logger2.debug(
|
|
875
|
+
`Could not read env file ${filePath}: ${error instanceof Error ? error.message : String(error)}`
|
|
876
|
+
);
|
|
877
|
+
return /* @__PURE__ */ new Map();
|
|
878
|
+
}
|
|
879
|
+
}
|
|
880
|
+
/**
|
|
881
|
+
* Get a specific environment variable from a .env file
|
|
882
|
+
* Returns null if file doesn't exist or variable is not found
|
|
883
|
+
*/
|
|
884
|
+
async getEnvVariable(filePath, variableName) {
|
|
885
|
+
const envVars = await this.readEnvFile(filePath);
|
|
886
|
+
return envVars.get(variableName) ?? null;
|
|
887
|
+
}
|
|
888
|
+
/**
|
|
889
|
+
* Generic file copy helper that only copies if source exists
|
|
890
|
+
* Does not throw if source file doesn't exist - just logs and returns
|
|
891
|
+
* @private
|
|
892
|
+
*/
|
|
893
|
+
async copyIfExists(source, destination) {
|
|
894
|
+
const sourceExists = await fs3.pathExists(source);
|
|
895
|
+
if (!sourceExists) {
|
|
896
|
+
logger2.debug(`Source file ${source} does not exist, skipping copy`);
|
|
897
|
+
return;
|
|
898
|
+
}
|
|
899
|
+
await fs3.copy(source, destination, { overwrite: false });
|
|
900
|
+
logger2.success(`Copied ${source} to ${destination}`);
|
|
901
|
+
}
|
|
902
|
+
/**
|
|
903
|
+
* Calculate unique port for workspace
|
|
904
|
+
* Implements:
|
|
905
|
+
* - Issue/PR: 3000 + issue/PR number
|
|
906
|
+
* - Branch: 3000 + deterministic hash offset (1-999)
|
|
907
|
+
*/
|
|
908
|
+
calculatePort(options) {
|
|
909
|
+
const basePort = options.basePort ?? 3e3;
|
|
910
|
+
if (options.issueNumber !== void 0) {
|
|
911
|
+
const numericIssue = typeof options.issueNumber === "number" ? options.issueNumber : parseInt(String(options.issueNumber), 10);
|
|
912
|
+
if (!isNaN(numericIssue) && String(numericIssue) === String(options.issueNumber)) {
|
|
913
|
+
const port = basePort + numericIssue;
|
|
914
|
+
if (port > 65535) {
|
|
915
|
+
throw new Error(
|
|
916
|
+
`Calculated port ${port} exceeds maximum (65535). Use a lower base port or issue number.`
|
|
917
|
+
);
|
|
918
|
+
}
|
|
919
|
+
return port;
|
|
920
|
+
}
|
|
921
|
+
return calculatePortForBranch(String(options.issueNumber), basePort);
|
|
922
|
+
}
|
|
923
|
+
if (options.prNumber !== void 0) {
|
|
924
|
+
const port = basePort + options.prNumber;
|
|
925
|
+
if (port > 65535) {
|
|
926
|
+
throw new Error(
|
|
927
|
+
`Calculated port ${port} exceeds maximum (65535). Use a lower base port or PR number.`
|
|
928
|
+
);
|
|
929
|
+
}
|
|
930
|
+
return port;
|
|
931
|
+
}
|
|
932
|
+
if (options.branchName !== void 0) {
|
|
933
|
+
return calculatePortForBranch(options.branchName, basePort);
|
|
934
|
+
}
|
|
935
|
+
return basePort;
|
|
936
|
+
}
|
|
937
|
+
/**
|
|
938
|
+
* Set port environment variable for workspace
|
|
939
|
+
*/
|
|
940
|
+
async setPortForWorkspace(envFilePath, issueNumber, prNumber, branchName) {
|
|
941
|
+
const options = {};
|
|
942
|
+
if (issueNumber !== void 0) {
|
|
943
|
+
options.issueNumber = issueNumber;
|
|
944
|
+
}
|
|
945
|
+
if (prNumber !== void 0) {
|
|
946
|
+
options.prNumber = prNumber;
|
|
947
|
+
}
|
|
948
|
+
if (branchName !== void 0) {
|
|
949
|
+
options.branchName = branchName;
|
|
950
|
+
}
|
|
951
|
+
const port = this.calculatePort(options);
|
|
952
|
+
await this.setEnvVar(envFilePath, "PORT", String(port));
|
|
953
|
+
return port;
|
|
954
|
+
}
|
|
955
|
+
/**
|
|
956
|
+
* Validate environment configuration
|
|
957
|
+
*/
|
|
958
|
+
async validateEnvFile(filePath) {
|
|
959
|
+
try {
|
|
960
|
+
const content = await fs3.readFile(filePath, "utf8");
|
|
961
|
+
const envMap = parseEnvFile(content);
|
|
962
|
+
const errors = [];
|
|
963
|
+
for (const [key, value] of envMap.entries()) {
|
|
964
|
+
const validation = validateEnvVariable(key, value);
|
|
965
|
+
if (!validation.valid) {
|
|
966
|
+
errors.push(`${key}: ${validation.error}`);
|
|
967
|
+
}
|
|
968
|
+
}
|
|
969
|
+
return {
|
|
970
|
+
valid: errors.length === 0,
|
|
971
|
+
errors
|
|
972
|
+
};
|
|
973
|
+
} catch (error) {
|
|
974
|
+
return {
|
|
975
|
+
valid: false,
|
|
976
|
+
errors: [
|
|
977
|
+
`Failed to read or parse file: ${error instanceof Error ? error.message : String(error)}`
|
|
978
|
+
]
|
|
979
|
+
};
|
|
980
|
+
}
|
|
981
|
+
}
|
|
982
|
+
/**
|
|
983
|
+
* Create backup of existing file
|
|
984
|
+
*/
|
|
985
|
+
async createBackup(filePath) {
|
|
986
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-");
|
|
987
|
+
const backupPath = `${filePath}${this.backupSuffix}-${timestamp}`;
|
|
988
|
+
await fs3.copy(filePath, backupPath);
|
|
989
|
+
logger2.debug(`Created backup at ${backupPath}`);
|
|
990
|
+
return backupPath;
|
|
991
|
+
}
|
|
992
|
+
};
|
|
993
|
+
|
|
994
|
+
// src/lib/CLIIsolationManager.ts
|
|
995
|
+
import fs4 from "fs-extra";
|
|
996
|
+
import path3 from "path";
|
|
997
|
+
import os from "os";
|
|
998
|
+
var CLIIsolationManager = class {
|
|
999
|
+
constructor() {
|
|
1000
|
+
this.iloomBinDir = path3.join(os.homedir(), ".iloom", "bin");
|
|
1001
|
+
}
|
|
1002
|
+
/**
|
|
1003
|
+
* Setup CLI isolation for a worktree
|
|
1004
|
+
* - Build the project
|
|
1005
|
+
* - Create versioned symlinks
|
|
1006
|
+
* - Check PATH configuration
|
|
1007
|
+
* @param worktreePath Path to the worktree
|
|
1008
|
+
* @param identifier Issue/PR number or branch identifier
|
|
1009
|
+
* @param binEntries Bin entries from package.json
|
|
1010
|
+
* @returns Array of created symlink names
|
|
1011
|
+
*/
|
|
1012
|
+
async setupCLIIsolation(worktreePath, identifier, binEntries) {
|
|
1013
|
+
await this.buildProject(worktreePath);
|
|
1014
|
+
await this.verifyBinTargets(worktreePath, binEntries);
|
|
1015
|
+
await fs4.ensureDir(this.iloomBinDir);
|
|
1016
|
+
const symlinkNames = await this.createVersionedSymlinks(
|
|
1017
|
+
worktreePath,
|
|
1018
|
+
identifier,
|
|
1019
|
+
binEntries
|
|
1020
|
+
);
|
|
1021
|
+
await this.ensureIloomBinInPath();
|
|
1022
|
+
return symlinkNames;
|
|
1023
|
+
}
|
|
1024
|
+
/**
|
|
1025
|
+
* Build the project using package.json build script
|
|
1026
|
+
* @param worktreePath Path to the worktree
|
|
1027
|
+
*/
|
|
1028
|
+
async buildProject(worktreePath) {
|
|
1029
|
+
const pkgJson = await readPackageJson(worktreePath);
|
|
1030
|
+
if (!hasScript(pkgJson, "build")) {
|
|
1031
|
+
logger.warn("No build script found in package.json - skipping build");
|
|
1032
|
+
return;
|
|
1033
|
+
}
|
|
1034
|
+
logger.info("Building CLI tool...");
|
|
1035
|
+
await runScript("build", worktreePath, [], { quiet: true });
|
|
1036
|
+
logger.success("Build completed");
|
|
1037
|
+
}
|
|
1038
|
+
/**
|
|
1039
|
+
* Verify bin targets exist and are executable
|
|
1040
|
+
* @param worktreePath Path to the worktree
|
|
1041
|
+
* @param binEntries Bin entries from package.json
|
|
1042
|
+
*/
|
|
1043
|
+
async verifyBinTargets(worktreePath, binEntries) {
|
|
1044
|
+
for (const binPath of Object.values(binEntries)) {
|
|
1045
|
+
const targetPath = path3.resolve(worktreePath, binPath);
|
|
1046
|
+
const exists = await fs4.pathExists(targetPath);
|
|
1047
|
+
if (!exists) {
|
|
1048
|
+
throw new Error(`Bin target does not exist: ${targetPath}`);
|
|
1049
|
+
}
|
|
1050
|
+
try {
|
|
1051
|
+
await fs4.access(targetPath, fs4.constants.X_OK);
|
|
1052
|
+
} catch {
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
/**
|
|
1057
|
+
* Create versioned symlinks in ~/.iloom/bin
|
|
1058
|
+
* @param worktreePath Path to the worktree
|
|
1059
|
+
* @param identifier Issue/PR number or branch identifier
|
|
1060
|
+
* @param binEntries Bin entries from package.json
|
|
1061
|
+
* @returns Array of created symlink names
|
|
1062
|
+
*/
|
|
1063
|
+
async createVersionedSymlinks(worktreePath, identifier, binEntries) {
|
|
1064
|
+
const symlinkNames = [];
|
|
1065
|
+
for (const [binName, binPath] of Object.entries(binEntries)) {
|
|
1066
|
+
const versionedName = `${binName}-${identifier}`;
|
|
1067
|
+
const targetPath = path3.resolve(worktreePath, binPath);
|
|
1068
|
+
const symlinkPath = path3.join(this.iloomBinDir, versionedName);
|
|
1069
|
+
await fs4.symlink(targetPath, symlinkPath);
|
|
1070
|
+
logger.success(`CLI available: ${versionedName}`);
|
|
1071
|
+
symlinkNames.push(versionedName);
|
|
1072
|
+
}
|
|
1073
|
+
return symlinkNames;
|
|
1074
|
+
}
|
|
1075
|
+
/**
|
|
1076
|
+
* Check if ~/.iloom/bin is in PATH and provide setup instructions
|
|
1077
|
+
*/
|
|
1078
|
+
async ensureIloomBinInPath() {
|
|
1079
|
+
const currentPath = process.env.PATH ?? "";
|
|
1080
|
+
if (currentPath.includes(".iloom/bin")) {
|
|
1081
|
+
return;
|
|
1082
|
+
}
|
|
1083
|
+
const shell = this.detectShell();
|
|
1084
|
+
const rcFile = this.getShellRcFile(shell);
|
|
1085
|
+
logger.warn("\n\u26A0\uFE0F One-time PATH setup required:");
|
|
1086
|
+
logger.warn(` Add to ${rcFile}:`);
|
|
1087
|
+
logger.warn(` export PATH="$HOME/.iloom/bin:$PATH"`);
|
|
1088
|
+
logger.warn(` Then run: source ${rcFile}
|
|
1089
|
+
`);
|
|
1090
|
+
}
|
|
1091
|
+
/**
|
|
1092
|
+
* Detect current shell
|
|
1093
|
+
* @returns Shell name (zsh, bash, fish, etc.)
|
|
1094
|
+
*/
|
|
1095
|
+
detectShell() {
|
|
1096
|
+
const shell = process.env.SHELL ?? "";
|
|
1097
|
+
return shell.split("/").pop() ?? "bash";
|
|
1098
|
+
}
|
|
1099
|
+
/**
|
|
1100
|
+
* Get RC file path for shell
|
|
1101
|
+
* @param shell Shell name
|
|
1102
|
+
* @returns RC file path
|
|
1103
|
+
*/
|
|
1104
|
+
getShellRcFile(shell) {
|
|
1105
|
+
const rcFiles = {
|
|
1106
|
+
zsh: "~/.zshrc",
|
|
1107
|
+
bash: "~/.bashrc",
|
|
1108
|
+
fish: "~/.config/fish/config.fish"
|
|
1109
|
+
};
|
|
1110
|
+
return rcFiles[shell] ?? "~/.bashrc";
|
|
1111
|
+
}
|
|
1112
|
+
/**
|
|
1113
|
+
* Cleanup versioned CLI executables for a specific identifier
|
|
1114
|
+
* Removes all symlinks matching the pattern: {binName}-{identifier}
|
|
1115
|
+
*
|
|
1116
|
+
* @param identifier - Issue/PR number or branch identifier
|
|
1117
|
+
* @returns Array of removed symlink names
|
|
1118
|
+
*/
|
|
1119
|
+
async cleanupVersionedExecutables(identifier) {
|
|
1120
|
+
const removed = [];
|
|
1121
|
+
try {
|
|
1122
|
+
const files = await fs4.readdir(this.iloomBinDir);
|
|
1123
|
+
for (const file of files) {
|
|
1124
|
+
if (this.matchesIdentifier(file, identifier)) {
|
|
1125
|
+
const symlinkPath = path3.join(this.iloomBinDir, file);
|
|
1126
|
+
try {
|
|
1127
|
+
await fs4.unlink(symlinkPath);
|
|
1128
|
+
removed.push(file);
|
|
1129
|
+
} catch (error) {
|
|
1130
|
+
const isEnoent = error && typeof error === "object" && "code" in error && error.code === "ENOENT";
|
|
1131
|
+
if (isEnoent) {
|
|
1132
|
+
removed.push(file);
|
|
1133
|
+
continue;
|
|
1134
|
+
}
|
|
1135
|
+
logger.warn(
|
|
1136
|
+
`Failed to remove symlink ${file}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
1137
|
+
);
|
|
1138
|
+
}
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
} catch (error) {
|
|
1142
|
+
const isEnoent = error && typeof error === "object" && "code" in error && error.code === "ENOENT";
|
|
1143
|
+
if (isEnoent) {
|
|
1144
|
+
logger.warn("No CLI executables directory found - nothing to cleanup");
|
|
1145
|
+
return [];
|
|
1146
|
+
}
|
|
1147
|
+
throw error;
|
|
1148
|
+
}
|
|
1149
|
+
if (removed.length > 0) {
|
|
1150
|
+
logger.success(`Removed CLI executables: ${removed.join(", ")}`);
|
|
1151
|
+
}
|
|
1152
|
+
return removed;
|
|
1153
|
+
}
|
|
1154
|
+
/**
|
|
1155
|
+
* Find orphaned symlinks in ~/.iloom/bin
|
|
1156
|
+
* Returns symlinks that point to non-existent targets
|
|
1157
|
+
*
|
|
1158
|
+
* @returns Array of orphaned symlink information
|
|
1159
|
+
*/
|
|
1160
|
+
async findOrphanedSymlinks() {
|
|
1161
|
+
const orphaned = [];
|
|
1162
|
+
try {
|
|
1163
|
+
const files = await fs4.readdir(this.iloomBinDir);
|
|
1164
|
+
for (const file of files) {
|
|
1165
|
+
const symlinkPath = path3.join(this.iloomBinDir, file);
|
|
1166
|
+
try {
|
|
1167
|
+
const stats = await fs4.lstat(symlinkPath);
|
|
1168
|
+
if (stats.isSymbolicLink()) {
|
|
1169
|
+
const target = await fs4.readlink(symlinkPath);
|
|
1170
|
+
try {
|
|
1171
|
+
await fs4.access(target);
|
|
1172
|
+
} catch {
|
|
1173
|
+
orphaned.push({
|
|
1174
|
+
name: file,
|
|
1175
|
+
path: symlinkPath,
|
|
1176
|
+
brokenTarget: target
|
|
1177
|
+
});
|
|
1178
|
+
}
|
|
1179
|
+
}
|
|
1180
|
+
} catch (error) {
|
|
1181
|
+
logger.warn(
|
|
1182
|
+
`Failed to check symlink ${file}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
1183
|
+
);
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
} catch (error) {
|
|
1187
|
+
const isEnoent = error && typeof error === "object" && "code" in error && error.code === "ENOENT";
|
|
1188
|
+
if (isEnoent) {
|
|
1189
|
+
return [];
|
|
1190
|
+
}
|
|
1191
|
+
throw error;
|
|
1192
|
+
}
|
|
1193
|
+
return orphaned;
|
|
1194
|
+
}
|
|
1195
|
+
/**
|
|
1196
|
+
* Cleanup all orphaned symlinks
|
|
1197
|
+
* Removes symlinks that point to non-existent targets
|
|
1198
|
+
*
|
|
1199
|
+
* @returns Number of symlinks removed
|
|
1200
|
+
*/
|
|
1201
|
+
async cleanupOrphanedSymlinks() {
|
|
1202
|
+
const orphaned = await this.findOrphanedSymlinks();
|
|
1203
|
+
let removedCount = 0;
|
|
1204
|
+
for (const symlink of orphaned) {
|
|
1205
|
+
try {
|
|
1206
|
+
await fs4.unlink(symlink.path);
|
|
1207
|
+
removedCount++;
|
|
1208
|
+
logger.success(`Removed orphaned symlink: ${symlink.name}`);
|
|
1209
|
+
} catch (error) {
|
|
1210
|
+
logger.warn(
|
|
1211
|
+
`Failed to remove orphaned symlink ${symlink.name}: ${error instanceof Error ? error.message : "Unknown error"}`
|
|
1212
|
+
);
|
|
1213
|
+
}
|
|
1214
|
+
}
|
|
1215
|
+
return removedCount;
|
|
1216
|
+
}
|
|
1217
|
+
/**
|
|
1218
|
+
* Check if a filename matches the versioned pattern for an identifier
|
|
1219
|
+
* Pattern: {binName}-{identifier}
|
|
1220
|
+
*
|
|
1221
|
+
* @param fileName - Name of the file to check
|
|
1222
|
+
* @param identifier - Issue/PR number or branch identifier
|
|
1223
|
+
* @returns True if the filename matches the pattern
|
|
1224
|
+
*/
|
|
1225
|
+
matchesIdentifier(fileName, identifier) {
|
|
1226
|
+
const suffix = `-${identifier}`;
|
|
1227
|
+
return fileName.endsWith(suffix);
|
|
1228
|
+
}
|
|
1229
|
+
};
|
|
1230
|
+
|
|
1231
|
+
// src/lib/DatabaseManager.ts
|
|
1232
|
+
var logger3 = createLogger({ prefix: "\u{1F5C2}\uFE0F" });
|
|
1233
|
+
var DatabaseManager = class {
|
|
1234
|
+
constructor(provider, environment, databaseUrlEnvVarName = "DATABASE_URL") {
|
|
1235
|
+
this.provider = provider;
|
|
1236
|
+
this.environment = environment;
|
|
1237
|
+
this.databaseUrlEnvVarName = databaseUrlEnvVarName;
|
|
1238
|
+
if (databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
1239
|
+
logger3.debug(`\u{1F527} DatabaseManager configured with custom variable: ${databaseUrlEnvVarName}`);
|
|
1240
|
+
} else {
|
|
1241
|
+
logger3.debug("\u{1F527} DatabaseManager using default variable: DATABASE_URL");
|
|
1242
|
+
}
|
|
1243
|
+
}
|
|
1244
|
+
/**
|
|
1245
|
+
* Get the configured database URL environment variable name
|
|
1246
|
+
*/
|
|
1247
|
+
getConfiguredVariableName() {
|
|
1248
|
+
return this.databaseUrlEnvVarName;
|
|
1249
|
+
}
|
|
1250
|
+
/**
|
|
1251
|
+
* Check if database branching should be used
|
|
1252
|
+
* Requires BOTH conditions:
|
|
1253
|
+
* 1. Database provider is properly configured (checked via provider.isConfigured())
|
|
1254
|
+
* 2. .env file contains the configured database URL variable
|
|
1255
|
+
*/
|
|
1256
|
+
async shouldUseDatabaseBranching(envFilePath) {
|
|
1257
|
+
if (!this.provider.isConfigured()) {
|
|
1258
|
+
logger3.debug("Skipping database branching: Database provider not configured");
|
|
1259
|
+
return false;
|
|
1260
|
+
}
|
|
1261
|
+
const hasDatabaseUrl = await this.hasDatabaseUrlInEnv(envFilePath);
|
|
1262
|
+
if (!hasDatabaseUrl) {
|
|
1263
|
+
logger3.debug(
|
|
1264
|
+
"Skipping database branching: configured database URL variable not found in .env file"
|
|
1265
|
+
);
|
|
1266
|
+
return false;
|
|
1267
|
+
}
|
|
1268
|
+
return true;
|
|
1269
|
+
}
|
|
1270
|
+
/**
|
|
1271
|
+
* Create database branch only if configured
|
|
1272
|
+
* Returns connection string if branch was created, null if skipped
|
|
1273
|
+
*
|
|
1274
|
+
* @param branchName - Name of the branch to create
|
|
1275
|
+
* @param envFilePath - Path to .env file for configuration checks
|
|
1276
|
+
* @param cwd - Optional working directory to run commands from
|
|
1277
|
+
* @param fromBranch - Optional parent branch to create from (for child looms)
|
|
1278
|
+
*/
|
|
1279
|
+
async createBranchIfConfigured(branchName, envFilePath, cwd, fromBranch) {
|
|
1280
|
+
if (!await this.shouldUseDatabaseBranching(envFilePath)) {
|
|
1281
|
+
return null;
|
|
1282
|
+
}
|
|
1283
|
+
if (!await this.provider.isCliAvailable()) {
|
|
1284
|
+
logger3.warn("Skipping database branch creation: Neon CLI not available");
|
|
1285
|
+
logger3.warn("Install with: npm install -g neonctl");
|
|
1286
|
+
return null;
|
|
1287
|
+
}
|
|
1288
|
+
try {
|
|
1289
|
+
const isAuth = await this.provider.isAuthenticated(cwd);
|
|
1290
|
+
if (!isAuth) {
|
|
1291
|
+
logger3.warn("Skipping database branch creation: Not authenticated with Neon CLI");
|
|
1292
|
+
logger3.warn("Run: neon auth");
|
|
1293
|
+
return null;
|
|
1294
|
+
}
|
|
1295
|
+
} catch (error) {
|
|
1296
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1297
|
+
logger3.error(`Database authentication check failed: ${errorMessage}`);
|
|
1298
|
+
throw error;
|
|
1299
|
+
}
|
|
1300
|
+
try {
|
|
1301
|
+
const connectionString = await this.provider.createBranch(branchName, fromBranch, cwd);
|
|
1302
|
+
logger3.success(`Database branch ready: ${this.provider.sanitizeBranchName(branchName)}`);
|
|
1303
|
+
return connectionString;
|
|
1304
|
+
} catch (error) {
|
|
1305
|
+
logger3.error(
|
|
1306
|
+
`Failed to create database branch: ${error instanceof Error ? error.message : String(error)}`
|
|
1307
|
+
);
|
|
1308
|
+
throw error;
|
|
1309
|
+
}
|
|
1310
|
+
}
|
|
1311
|
+
/**
|
|
1312
|
+
* Delete database branch only if configured
|
|
1313
|
+
* Returns result object indicating what happened
|
|
1314
|
+
*
|
|
1315
|
+
* @param branchName - Name of the branch to delete
|
|
1316
|
+
* @param shouldCleanup - Boolean indicating if database cleanup should be performed (pre-fetched config)
|
|
1317
|
+
* @param isPreview - Whether this is a preview database branch
|
|
1318
|
+
* @param cwd - Optional working directory to run commands from (prevents issues with deleted directories)
|
|
1319
|
+
*/
|
|
1320
|
+
async deleteBranchIfConfigured(branchName, shouldCleanup, isPreview = false, cwd) {
|
|
1321
|
+
if (shouldCleanup === false) {
|
|
1322
|
+
return {
|
|
1323
|
+
success: true,
|
|
1324
|
+
deleted: false,
|
|
1325
|
+
notFound: true,
|
|
1326
|
+
// Treat "not configured" as "nothing to delete"
|
|
1327
|
+
branchName
|
|
1328
|
+
};
|
|
1329
|
+
}
|
|
1330
|
+
if (!this.provider.isConfigured()) {
|
|
1331
|
+
logger3.debug("Skipping database branch deletion: Database provider not configured");
|
|
1332
|
+
return {
|
|
1333
|
+
success: true,
|
|
1334
|
+
deleted: false,
|
|
1335
|
+
notFound: true,
|
|
1336
|
+
branchName
|
|
1337
|
+
};
|
|
1338
|
+
}
|
|
1339
|
+
if (!await this.provider.isCliAvailable()) {
|
|
1340
|
+
logger3.info("Skipping database branch deletion: CLI tool not available");
|
|
1341
|
+
return {
|
|
1342
|
+
success: false,
|
|
1343
|
+
deleted: false,
|
|
1344
|
+
notFound: true,
|
|
1345
|
+
error: "CLI tool not available",
|
|
1346
|
+
branchName
|
|
1347
|
+
};
|
|
1348
|
+
}
|
|
1349
|
+
try {
|
|
1350
|
+
const isAuth = await this.provider.isAuthenticated(cwd);
|
|
1351
|
+
if (!isAuth) {
|
|
1352
|
+
logger3.warn("Skipping database branch deletion: Not authenticated with DB Provider");
|
|
1353
|
+
return {
|
|
1354
|
+
success: false,
|
|
1355
|
+
deleted: false,
|
|
1356
|
+
notFound: false,
|
|
1357
|
+
error: "Not authenticated with DB Provider",
|
|
1358
|
+
branchName
|
|
1359
|
+
};
|
|
1360
|
+
}
|
|
1361
|
+
} catch (error) {
|
|
1362
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1363
|
+
logger3.error(`Database authentication check failed: ${errorMessage}`);
|
|
1364
|
+
return {
|
|
1365
|
+
success: false,
|
|
1366
|
+
deleted: false,
|
|
1367
|
+
notFound: false,
|
|
1368
|
+
error: `Authentication check failed: ${errorMessage}`,
|
|
1369
|
+
branchName
|
|
1370
|
+
};
|
|
1371
|
+
}
|
|
1372
|
+
try {
|
|
1373
|
+
const result = await this.provider.deleteBranch(branchName, isPreview, cwd);
|
|
1374
|
+
return result;
|
|
1375
|
+
} catch (error) {
|
|
1376
|
+
logger3.warn(
|
|
1377
|
+
`Unexpected error in database deletion: ${error instanceof Error ? error.message : String(error)}`
|
|
1378
|
+
);
|
|
1379
|
+
return {
|
|
1380
|
+
success: false,
|
|
1381
|
+
deleted: false,
|
|
1382
|
+
notFound: false,
|
|
1383
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1384
|
+
branchName
|
|
1385
|
+
};
|
|
1386
|
+
}
|
|
1387
|
+
}
|
|
1388
|
+
/**
|
|
1389
|
+
* Get database branch name from connection string (reverse lookup)
|
|
1390
|
+
* Returns branch name if provider supports reverse lookup, null otherwise
|
|
1391
|
+
*
|
|
1392
|
+
* @param connectionString - Database connection string
|
|
1393
|
+
* @param cwd - Optional working directory to run commands from
|
|
1394
|
+
*/
|
|
1395
|
+
async getBranchNameFromConnectionString(connectionString, cwd) {
|
|
1396
|
+
if (!this.provider.isConfigured()) {
|
|
1397
|
+
logger3.debug("Provider not configured, skipping reverse lookup");
|
|
1398
|
+
return null;
|
|
1399
|
+
}
|
|
1400
|
+
if ("getBranchNameFromConnectionString" in this.provider && typeof this.provider.getBranchNameFromConnectionString === "function") {
|
|
1401
|
+
return this.provider.getBranchNameFromConnectionString(connectionString, cwd);
|
|
1402
|
+
}
|
|
1403
|
+
logger3.debug("Provider does not support reverse lookup");
|
|
1404
|
+
return null;
|
|
1405
|
+
}
|
|
1406
|
+
/**
|
|
1407
|
+
* Check if .env has the configured database URL variable
|
|
1408
|
+
* CRITICAL: If user explicitly configured a custom variable name (not default),
|
|
1409
|
+
* throw an error if it's missing from .env
|
|
1410
|
+
*/
|
|
1411
|
+
async hasDatabaseUrlInEnv(envFilePath) {
|
|
1412
|
+
try {
|
|
1413
|
+
const envMap = await this.environment.readEnvFile(envFilePath);
|
|
1414
|
+
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
1415
|
+
logger3.debug(`Looking for custom database URL variable: ${this.databaseUrlEnvVarName}`);
|
|
1416
|
+
} else {
|
|
1417
|
+
logger3.debug("Looking for default database URL variable: DATABASE_URL");
|
|
1418
|
+
}
|
|
1419
|
+
if (envMap.has(this.databaseUrlEnvVarName)) {
|
|
1420
|
+
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
1421
|
+
logger3.debug(`\u2705 Found custom database URL variable: ${this.databaseUrlEnvVarName}`);
|
|
1422
|
+
} else {
|
|
1423
|
+
logger3.debug(`\u2705 Found default database URL variable: DATABASE_URL`);
|
|
1424
|
+
}
|
|
1425
|
+
return true;
|
|
1426
|
+
}
|
|
1427
|
+
if (this.databaseUrlEnvVarName !== "DATABASE_URL") {
|
|
1428
|
+
logger3.debug(`\u274C Custom database URL variable '${this.databaseUrlEnvVarName}' not found in .env file`);
|
|
1429
|
+
throw new Error(
|
|
1430
|
+
`Configured database URL environment variable '${this.databaseUrlEnvVarName}' not found in .env file. Please add it to your .env file or update your iloom configuration.`
|
|
1431
|
+
);
|
|
1432
|
+
}
|
|
1433
|
+
const hasDefaultVar = envMap.has("DATABASE_URL");
|
|
1434
|
+
if (hasDefaultVar) {
|
|
1435
|
+
logger3.debug("\u2705 Found fallback DATABASE_URL variable");
|
|
1436
|
+
} else {
|
|
1437
|
+
logger3.debug("\u274C No DATABASE_URL variable found in .env file");
|
|
1438
|
+
}
|
|
1439
|
+
return hasDefaultVar;
|
|
1440
|
+
} catch (error) {
|
|
1441
|
+
if (error instanceof Error && error.message.includes("not found in .env")) {
|
|
1442
|
+
throw error;
|
|
1443
|
+
}
|
|
1444
|
+
return false;
|
|
1445
|
+
}
|
|
1446
|
+
}
|
|
1447
|
+
};
|
|
1448
|
+
|
|
1449
|
+
// src/lib/ResourceCleanup.ts
|
|
1450
|
+
import path4 from "path";
|
|
1451
|
+
var ResourceCleanup = class {
|
|
1452
|
+
constructor(gitWorktree, processManager, database, cliIsolation, settingsManager) {
|
|
1453
|
+
this.gitWorktree = gitWorktree;
|
|
1454
|
+
this.processManager = processManager;
|
|
1455
|
+
this.database = database;
|
|
1456
|
+
this.cliIsolation = cliIsolation;
|
|
1457
|
+
this.settingsManager = settingsManager ?? new SettingsManager();
|
|
1458
|
+
}
|
|
1459
|
+
/**
|
|
1460
|
+
* Cleanup a worktree and associated resources
|
|
1461
|
+
* Main orchestration method
|
|
1462
|
+
*
|
|
1463
|
+
* @param parsed - ParsedInput from IdentifierParser with type information
|
|
1464
|
+
* @param options - Cleanup options
|
|
1465
|
+
*/
|
|
1466
|
+
async cleanupWorktree(parsed, options = {}) {
|
|
1467
|
+
var _a;
|
|
1468
|
+
const operations = [];
|
|
1469
|
+
const errors = [];
|
|
1470
|
+
const displayIdentifier = parsed.branchName ?? ((_a = parsed.number) == null ? void 0 : _a.toString()) ?? parsed.originalInput;
|
|
1471
|
+
logger.info(`Starting cleanup for: ${displayIdentifier}`);
|
|
1472
|
+
const number = parsed.number;
|
|
1473
|
+
if (number !== void 0) {
|
|
1474
|
+
const port = this.processManager.calculatePort(number);
|
|
1475
|
+
if (options.dryRun) {
|
|
1476
|
+
operations.push({
|
|
1477
|
+
type: "dev-server",
|
|
1478
|
+
success: true,
|
|
1479
|
+
message: `[DRY RUN] Would check for dev server on port ${port}`
|
|
1480
|
+
});
|
|
1481
|
+
} else {
|
|
1482
|
+
try {
|
|
1483
|
+
const terminated = await this.terminateDevServer(port);
|
|
1484
|
+
operations.push({
|
|
1485
|
+
type: "dev-server",
|
|
1486
|
+
success: true,
|
|
1487
|
+
message: terminated ? `Dev server on port ${port} terminated` : `No dev server running on port ${port}`
|
|
1488
|
+
});
|
|
1489
|
+
} catch (error) {
|
|
1490
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1491
|
+
errors.push(err);
|
|
1492
|
+
operations.push({
|
|
1493
|
+
type: "dev-server",
|
|
1494
|
+
success: false,
|
|
1495
|
+
message: `Failed to terminate dev server`,
|
|
1496
|
+
error: err.message
|
|
1497
|
+
});
|
|
1498
|
+
}
|
|
1499
|
+
}
|
|
1500
|
+
}
|
|
1501
|
+
let worktree = null;
|
|
1502
|
+
try {
|
|
1503
|
+
if (parsed.type === "pr" && parsed.number !== void 0) {
|
|
1504
|
+
const prNumber = typeof parsed.number === "number" ? parsed.number : Number(parsed.number);
|
|
1505
|
+
if (isNaN(prNumber) || !isFinite(prNumber)) {
|
|
1506
|
+
throw new Error(`Invalid PR number: ${parsed.number}. PR numbers must be numeric.`);
|
|
1507
|
+
}
|
|
1508
|
+
worktree = await this.gitWorktree.findWorktreeForPR(prNumber, "");
|
|
1509
|
+
} else if (parsed.type === "issue" && parsed.number !== void 0) {
|
|
1510
|
+
worktree = await this.gitWorktree.findWorktreeForIssue(parsed.number);
|
|
1511
|
+
} else if (parsed.type === "branch" && parsed.branchName) {
|
|
1512
|
+
worktree = await this.gitWorktree.findWorktreeForBranch(parsed.branchName);
|
|
1513
|
+
}
|
|
1514
|
+
if (!worktree) {
|
|
1515
|
+
throw new Error(`No worktree found for identifier: ${displayIdentifier}`);
|
|
1516
|
+
}
|
|
1517
|
+
logger.debug(`Found worktree: path="${worktree.path}", branch="${worktree.branch}"`);
|
|
1518
|
+
} catch (error) {
|
|
1519
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1520
|
+
errors.push(err);
|
|
1521
|
+
return {
|
|
1522
|
+
identifier: displayIdentifier,
|
|
1523
|
+
success: false,
|
|
1524
|
+
operations,
|
|
1525
|
+
errors,
|
|
1526
|
+
rollbackRequired: false
|
|
1527
|
+
};
|
|
1528
|
+
}
|
|
1529
|
+
if (!options.force) {
|
|
1530
|
+
const safety = await this.validateWorktreeSafety(worktree, parsed.originalInput);
|
|
1531
|
+
if (!safety.isSafe) {
|
|
1532
|
+
const blockerMessage = safety.blockers.join("\n\n");
|
|
1533
|
+
throw new Error(`Cannot cleanup:
|
|
1534
|
+
|
|
1535
|
+
${blockerMessage}`);
|
|
1536
|
+
}
|
|
1537
|
+
if (safety.warnings.length > 0) {
|
|
1538
|
+
safety.warnings.forEach((warning) => {
|
|
1539
|
+
logger.warn(warning);
|
|
1540
|
+
});
|
|
1541
|
+
}
|
|
1542
|
+
}
|
|
1543
|
+
let databaseConfig = null;
|
|
1544
|
+
if (!options.keepDatabase && worktree) {
|
|
1545
|
+
const envFilePath = path4.join(worktree.path, ".env");
|
|
1546
|
+
try {
|
|
1547
|
+
const shouldCleanup = this.database ? await this.database.shouldUseDatabaseBranching(envFilePath) : false;
|
|
1548
|
+
databaseConfig = { shouldCleanup, envFilePath };
|
|
1549
|
+
} catch (error) {
|
|
1550
|
+
logger.warn(
|
|
1551
|
+
`Failed to read database config from ${envFilePath}, skipping database cleanup: ${error instanceof Error ? error.message : String(error)}`
|
|
1552
|
+
);
|
|
1553
|
+
databaseConfig = { shouldCleanup: false, envFilePath };
|
|
1554
|
+
}
|
|
1555
|
+
}
|
|
1556
|
+
let mainWorktreePath = null;
|
|
1557
|
+
if (!options.dryRun) {
|
|
1558
|
+
try {
|
|
1559
|
+
mainWorktreePath = await findMainWorktreePathWithSettings(worktree.path, this.settingsManager);
|
|
1560
|
+
} catch (error) {
|
|
1561
|
+
logger.warn(
|
|
1562
|
+
`Failed to find main worktree path: ${error instanceof Error ? error.message : String(error)}`
|
|
1563
|
+
);
|
|
1564
|
+
}
|
|
1565
|
+
}
|
|
1566
|
+
if (options.dryRun) {
|
|
1567
|
+
operations.push({
|
|
1568
|
+
type: "worktree",
|
|
1569
|
+
success: true,
|
|
1570
|
+
message: `[DRY RUN] Would remove worktree: ${worktree.path}`
|
|
1571
|
+
});
|
|
1572
|
+
} else {
|
|
1573
|
+
try {
|
|
1574
|
+
const worktreeOptions = {
|
|
1575
|
+
removeDirectory: true,
|
|
1576
|
+
removeBranch: false
|
|
1577
|
+
// Handle branch separately
|
|
1578
|
+
};
|
|
1579
|
+
if (options.force !== void 0) {
|
|
1580
|
+
worktreeOptions.force = options.force;
|
|
1581
|
+
}
|
|
1582
|
+
await this.gitWorktree.removeWorktree(worktree.path, worktreeOptions);
|
|
1583
|
+
operations.push({
|
|
1584
|
+
type: "worktree",
|
|
1585
|
+
success: true,
|
|
1586
|
+
message: `Worktree removed: ${worktree.path}`
|
|
1587
|
+
});
|
|
1588
|
+
} catch (error) {
|
|
1589
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1590
|
+
errors.push(err);
|
|
1591
|
+
operations.push({
|
|
1592
|
+
type: "worktree",
|
|
1593
|
+
success: false,
|
|
1594
|
+
message: `Failed to remove worktree`,
|
|
1595
|
+
error: err.message
|
|
1596
|
+
});
|
|
1597
|
+
}
|
|
1598
|
+
}
|
|
1599
|
+
if (options.deleteBranch && worktree) {
|
|
1600
|
+
if (options.dryRun) {
|
|
1601
|
+
operations.push({
|
|
1602
|
+
type: "branch",
|
|
1603
|
+
success: true,
|
|
1604
|
+
message: `[DRY RUN] Would delete branch: ${worktree.branch}`
|
|
1605
|
+
});
|
|
1606
|
+
} else {
|
|
1607
|
+
try {
|
|
1608
|
+
const branchOptions = { dryRun: false };
|
|
1609
|
+
if (options.force !== void 0) {
|
|
1610
|
+
branchOptions.force = options.force;
|
|
1611
|
+
}
|
|
1612
|
+
await this.deleteBranch(worktree.branch, branchOptions, mainWorktreePath ?? void 0);
|
|
1613
|
+
operations.push({
|
|
1614
|
+
type: "branch",
|
|
1615
|
+
success: true,
|
|
1616
|
+
message: `Branch deleted: ${worktree.branch}`
|
|
1617
|
+
});
|
|
1618
|
+
} catch (error) {
|
|
1619
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1620
|
+
errors.push(err);
|
|
1621
|
+
operations.push({
|
|
1622
|
+
type: "branch",
|
|
1623
|
+
success: false,
|
|
1624
|
+
message: `Failed to delete branch`,
|
|
1625
|
+
error: err.message
|
|
1626
|
+
});
|
|
1627
|
+
}
|
|
1628
|
+
}
|
|
1629
|
+
}
|
|
1630
|
+
const cliIdentifier = parsed.number ?? parsed.branchName;
|
|
1631
|
+
if (this.cliIsolation && cliIdentifier !== void 0) {
|
|
1632
|
+
if (options.dryRun) {
|
|
1633
|
+
operations.push({
|
|
1634
|
+
type: "cli-symlinks",
|
|
1635
|
+
success: true,
|
|
1636
|
+
message: `[DRY RUN] Would cleanup CLI symlinks for: ${cliIdentifier}`
|
|
1637
|
+
});
|
|
1638
|
+
} else {
|
|
1639
|
+
try {
|
|
1640
|
+
const removed = await this.cliIsolation.cleanupVersionedExecutables(cliIdentifier);
|
|
1641
|
+
operations.push({
|
|
1642
|
+
type: "cli-symlinks",
|
|
1643
|
+
success: true,
|
|
1644
|
+
message: removed.length > 0 ? `CLI symlinks removed: ${removed.length}` : "No CLI symlinks to cleanup"
|
|
1645
|
+
});
|
|
1646
|
+
} catch (error) {
|
|
1647
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1648
|
+
errors.push(err);
|
|
1649
|
+
logger.warn(
|
|
1650
|
+
`CLI symlink cleanup failed: ${err.message}`
|
|
1651
|
+
);
|
|
1652
|
+
operations.push({
|
|
1653
|
+
type: "cli-symlinks",
|
|
1654
|
+
success: false,
|
|
1655
|
+
message: "CLI symlink cleanup failed (non-fatal)"
|
|
1656
|
+
});
|
|
1657
|
+
}
|
|
1658
|
+
}
|
|
1659
|
+
}
|
|
1660
|
+
if (databaseConfig && worktree) {
|
|
1661
|
+
if (options.dryRun) {
|
|
1662
|
+
operations.push({
|
|
1663
|
+
type: "database",
|
|
1664
|
+
success: true,
|
|
1665
|
+
message: `[DRY RUN] Would cleanup database branch for: ${worktree.branch}`
|
|
1666
|
+
});
|
|
1667
|
+
} else {
|
|
1668
|
+
try {
|
|
1669
|
+
if (databaseConfig.shouldCleanup && this.database) {
|
|
1670
|
+
try {
|
|
1671
|
+
const deletionResult = await this.database.deleteBranchIfConfigured(
|
|
1672
|
+
worktree.branch,
|
|
1673
|
+
databaseConfig.shouldCleanup,
|
|
1674
|
+
false,
|
|
1675
|
+
// isPreview
|
|
1676
|
+
mainWorktreePath ?? void 0
|
|
1677
|
+
);
|
|
1678
|
+
if (deletionResult.deleted) {
|
|
1679
|
+
logger.info(`Database branch deleted: ${worktree.branch}`);
|
|
1680
|
+
operations.push({
|
|
1681
|
+
type: "database",
|
|
1682
|
+
success: true,
|
|
1683
|
+
message: `Database branch deleted`,
|
|
1684
|
+
deleted: true
|
|
1685
|
+
});
|
|
1686
|
+
} else if (deletionResult.notFound) {
|
|
1687
|
+
logger.debug(`No database branch found for: ${worktree.branch}`);
|
|
1688
|
+
operations.push({
|
|
1689
|
+
type: "database",
|
|
1690
|
+
success: true,
|
|
1691
|
+
message: `No database branch found (skipped)`,
|
|
1692
|
+
deleted: false
|
|
1693
|
+
});
|
|
1694
|
+
} else if (deletionResult.userDeclined) {
|
|
1695
|
+
logger.info("Preview database deletion declined by user");
|
|
1696
|
+
operations.push({
|
|
1697
|
+
type: "database",
|
|
1698
|
+
success: true,
|
|
1699
|
+
message: `Database cleanup skipped (user declined)`,
|
|
1700
|
+
deleted: false
|
|
1701
|
+
});
|
|
1702
|
+
} else if (!deletionResult.success) {
|
|
1703
|
+
const errorMsg = deletionResult.error ?? "Unknown error";
|
|
1704
|
+
errors.push(new Error(errorMsg));
|
|
1705
|
+
logger.warn(`Database cleanup failed: ${errorMsg}`);
|
|
1706
|
+
operations.push({
|
|
1707
|
+
type: "database",
|
|
1708
|
+
success: false,
|
|
1709
|
+
// Non-fatal, but report error
|
|
1710
|
+
message: `Database cleanup failed`,
|
|
1711
|
+
error: errorMsg,
|
|
1712
|
+
deleted: false
|
|
1713
|
+
});
|
|
1714
|
+
} else {
|
|
1715
|
+
errors.push(new Error("Database cleanup in an unknown state"));
|
|
1716
|
+
logger.warn("Database deletion returned unexpected result state");
|
|
1717
|
+
operations.push({
|
|
1718
|
+
type: "database",
|
|
1719
|
+
success: false,
|
|
1720
|
+
message: `Database cleanup in an unknown state`,
|
|
1721
|
+
deleted: false
|
|
1722
|
+
});
|
|
1723
|
+
}
|
|
1724
|
+
} catch (error) {
|
|
1725
|
+
errors.push(error instanceof Error ? error : new Error(String(error)));
|
|
1726
|
+
logger.warn(
|
|
1727
|
+
`Unexpected database cleanup exception: ${error instanceof Error ? error.message : String(error)}`
|
|
1728
|
+
);
|
|
1729
|
+
operations.push({
|
|
1730
|
+
type: "database",
|
|
1731
|
+
success: false,
|
|
1732
|
+
message: `Database cleanup failed`,
|
|
1733
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1734
|
+
deleted: false
|
|
1735
|
+
});
|
|
1736
|
+
}
|
|
1737
|
+
} else {
|
|
1738
|
+
operations.push({
|
|
1739
|
+
type: "database",
|
|
1740
|
+
success: true,
|
|
1741
|
+
message: `Database cleanup skipped (not available)`,
|
|
1742
|
+
deleted: false
|
|
1743
|
+
});
|
|
1744
|
+
}
|
|
1745
|
+
} catch (error) {
|
|
1746
|
+
const err = error instanceof Error ? error : new Error("Unknown error");
|
|
1747
|
+
errors.push(err);
|
|
1748
|
+
operations.push({
|
|
1749
|
+
type: "database",
|
|
1750
|
+
success: false,
|
|
1751
|
+
message: `Database cleanup failed`,
|
|
1752
|
+
error: err.message,
|
|
1753
|
+
deleted: false
|
|
1754
|
+
});
|
|
1755
|
+
}
|
|
1756
|
+
}
|
|
1757
|
+
}
|
|
1758
|
+
const success = errors.length === 0;
|
|
1759
|
+
return {
|
|
1760
|
+
identifier: displayIdentifier,
|
|
1761
|
+
branchName: worktree == null ? void 0 : worktree.branch,
|
|
1762
|
+
success,
|
|
1763
|
+
operations,
|
|
1764
|
+
errors,
|
|
1765
|
+
rollbackRequired: false
|
|
1766
|
+
// Cleanup operations are generally not reversible
|
|
1767
|
+
};
|
|
1768
|
+
}
|
|
1769
|
+
/**
|
|
1770
|
+
* Terminate dev server on specified port
|
|
1771
|
+
*/
|
|
1772
|
+
async terminateDevServer(port) {
|
|
1773
|
+
logger.debug(`Checking for dev server on port ${port}`);
|
|
1774
|
+
const processInfo = await this.processManager.detectDevServer(port);
|
|
1775
|
+
if (!processInfo) {
|
|
1776
|
+
logger.debug(`No process found on port ${port}`);
|
|
1777
|
+
return false;
|
|
1778
|
+
}
|
|
1779
|
+
if (!processInfo.isDevServer) {
|
|
1780
|
+
logger.warn(
|
|
1781
|
+
`Process on port ${port} (${processInfo.name}) doesn't appear to be a dev server, skipping`
|
|
1782
|
+
);
|
|
1783
|
+
return false;
|
|
1784
|
+
}
|
|
1785
|
+
logger.info(`Terminating dev server: ${processInfo.name} (PID: ${processInfo.pid})`);
|
|
1786
|
+
await this.processManager.terminateProcess(processInfo.pid);
|
|
1787
|
+
const isFree = await this.processManager.verifyPortFree(port);
|
|
1788
|
+
if (!isFree) {
|
|
1789
|
+
throw new Error(`Dev server may still be running on port ${port}`);
|
|
1790
|
+
}
|
|
1791
|
+
return true;
|
|
1792
|
+
}
|
|
1793
|
+
/**
|
|
1794
|
+
* Delete a Git branch with safety checks
|
|
1795
|
+
*
|
|
1796
|
+
* @param branchName - Name of the branch to delete
|
|
1797
|
+
* @param options - Delete options (force, dryRun)
|
|
1798
|
+
* @param cwd - Working directory to execute git command from (defaults to finding main worktree)
|
|
1799
|
+
*/
|
|
1800
|
+
async deleteBranch(branchName, options = {}, cwd) {
|
|
1801
|
+
const protectedBranches = await this.settingsManager.getProtectedBranches(cwd);
|
|
1802
|
+
if (protectedBranches.includes(branchName)) {
|
|
1803
|
+
throw new Error(`Cannot delete protected branch: ${branchName}`);
|
|
1804
|
+
}
|
|
1805
|
+
if (options.dryRun) {
|
|
1806
|
+
logger.info(`[DRY RUN] Would delete branch: ${branchName}`);
|
|
1807
|
+
return true;
|
|
1808
|
+
}
|
|
1809
|
+
try {
|
|
1810
|
+
let workingDir = cwd ?? await findMainWorktreePathWithSettings(void 0, this.settingsManager);
|
|
1811
|
+
const deleteFlag = options.force ? "-D" : "-d";
|
|
1812
|
+
await executeGitCommand(["branch", deleteFlag, branchName], {
|
|
1813
|
+
cwd: workingDir
|
|
1814
|
+
});
|
|
1815
|
+
logger.info(`Branch deleted: ${branchName}`);
|
|
1816
|
+
return true;
|
|
1817
|
+
} catch (error) {
|
|
1818
|
+
if (options.force) {
|
|
1819
|
+
throw error;
|
|
1820
|
+
}
|
|
1821
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1822
|
+
if (errorMessage.includes("not fully merged")) {
|
|
1823
|
+
throw new Error(
|
|
1824
|
+
`Cannot delete unmerged branch '${branchName}'. Use --force to delete anyway.`
|
|
1825
|
+
);
|
|
1826
|
+
}
|
|
1827
|
+
throw error;
|
|
1828
|
+
}
|
|
1829
|
+
}
|
|
1830
|
+
/**
|
|
1831
|
+
* Cleanup database branch
|
|
1832
|
+
* Gracefully handles missing DatabaseManager
|
|
1833
|
+
*
|
|
1834
|
+
* @deprecated This method is deprecated and should not be used for post-deletion cleanup.
|
|
1835
|
+
* Use the pre-fetch mechanism in cleanupWorktree() instead.
|
|
1836
|
+
* This method will fail if called after worktree deletion because
|
|
1837
|
+
* it attempts to read the .env file which has been deleted.
|
|
1838
|
+
*
|
|
1839
|
+
* @param branchName - Name of the branch to delete
|
|
1840
|
+
* @param worktreePath - Path to worktree (must still exist with .env file)
|
|
1841
|
+
*/
|
|
1842
|
+
async cleanupDatabase(branchName, worktreePath) {
|
|
1843
|
+
if (!this.database) {
|
|
1844
|
+
logger.debug("Database manager not available, skipping database cleanup");
|
|
1845
|
+
return false;
|
|
1846
|
+
}
|
|
1847
|
+
try {
|
|
1848
|
+
const envFilePath = path4.join(worktreePath, ".env");
|
|
1849
|
+
const shouldCleanup = await this.database.shouldUseDatabaseBranching(envFilePath);
|
|
1850
|
+
let cwd;
|
|
1851
|
+
try {
|
|
1852
|
+
cwd = await findMainWorktreePathWithSettings(worktreePath, this.settingsManager);
|
|
1853
|
+
} catch (error) {
|
|
1854
|
+
logger.debug(
|
|
1855
|
+
`Could not find main worktree path, using current directory: ${error instanceof Error ? error.message : String(error)}`
|
|
1856
|
+
);
|
|
1857
|
+
}
|
|
1858
|
+
const result = await this.database.deleteBranchIfConfigured(
|
|
1859
|
+
branchName,
|
|
1860
|
+
shouldCleanup,
|
|
1861
|
+
false,
|
|
1862
|
+
// isPreview
|
|
1863
|
+
cwd
|
|
1864
|
+
);
|
|
1865
|
+
if (result.deleted) {
|
|
1866
|
+
logger.info(`Database branch deleted: ${branchName}`);
|
|
1867
|
+
return true;
|
|
1868
|
+
} else if (result.notFound) {
|
|
1869
|
+
logger.debug(`No database branch found for: ${branchName}`);
|
|
1870
|
+
return false;
|
|
1871
|
+
} else if (result.userDeclined) {
|
|
1872
|
+
logger.info("Preview database deletion declined by user");
|
|
1873
|
+
return false;
|
|
1874
|
+
} else if (!result.success) {
|
|
1875
|
+
logger.warn(`Database cleanup failed: ${result.error ?? "Unknown error"}`);
|
|
1876
|
+
return false;
|
|
1877
|
+
} else {
|
|
1878
|
+
logger.debug("Database deletion returned unexpected result");
|
|
1879
|
+
return false;
|
|
1880
|
+
}
|
|
1881
|
+
} catch (error) {
|
|
1882
|
+
logger.warn(
|
|
1883
|
+
`Unexpected database cleanup error: ${error instanceof Error ? error.message : String(error)}`
|
|
1884
|
+
);
|
|
1885
|
+
return false;
|
|
1886
|
+
}
|
|
1887
|
+
}
|
|
1888
|
+
/**
|
|
1889
|
+
* Cleanup multiple worktrees
|
|
1890
|
+
*/
|
|
1891
|
+
async cleanupMultipleWorktrees(identifiers, options = {}) {
|
|
1892
|
+
const results = [];
|
|
1893
|
+
for (const identifier of identifiers) {
|
|
1894
|
+
const parsed = this.parseIdentifier(identifier);
|
|
1895
|
+
const result = await this.cleanupWorktree(parsed, options);
|
|
1896
|
+
results.push(result);
|
|
1897
|
+
}
|
|
1898
|
+
return results;
|
|
1899
|
+
}
|
|
1900
|
+
/**
|
|
1901
|
+
* Validate worktree safety given a worktree object
|
|
1902
|
+
* Private method used internally when worktree is already known
|
|
1903
|
+
*/
|
|
1904
|
+
async validateWorktreeSafety(worktree, identifier) {
|
|
1905
|
+
const warnings = [];
|
|
1906
|
+
const blockers = [];
|
|
1907
|
+
const isMain = await this.gitWorktree.isMainWorktree(worktree, this.settingsManager);
|
|
1908
|
+
if (isMain) {
|
|
1909
|
+
blockers.push(`Cannot cleanup main worktree: "${worktree.branch}" @ "${worktree.path}"`);
|
|
1910
|
+
}
|
|
1911
|
+
const hasChanges = await hasUncommittedChanges(worktree.path);
|
|
1912
|
+
if (hasChanges) {
|
|
1913
|
+
const blockerMessage = `Worktree has uncommitted changes.
|
|
1914
|
+
|
|
1915
|
+
Please resolve before cleanup - you have some options:
|
|
1916
|
+
\u2022 Commit changes: cd ${worktree.path} && git commit -am "message"
|
|
1917
|
+
\u2022 Stash changes: cd ${worktree.path} && git stash
|
|
1918
|
+
\u2022 Force cleanup: il cleanup ${identifier} --force (WARNING: will discard changes)`;
|
|
1919
|
+
blockers.push(blockerMessage);
|
|
1920
|
+
}
|
|
1921
|
+
return {
|
|
1922
|
+
isSafe: blockers.length === 0,
|
|
1923
|
+
warnings,
|
|
1924
|
+
blockers
|
|
1925
|
+
};
|
|
1926
|
+
}
|
|
1927
|
+
/**
|
|
1928
|
+
* Validate cleanup safety
|
|
1929
|
+
*/
|
|
1930
|
+
async validateCleanupSafety(identifier) {
|
|
1931
|
+
const warnings = [];
|
|
1932
|
+
const blockers = [];
|
|
1933
|
+
const worktrees = await this.gitWorktree.findWorktreesByIdentifier(identifier);
|
|
1934
|
+
if (worktrees.length === 0) {
|
|
1935
|
+
blockers.push(`No worktree found for: ${identifier}`);
|
|
1936
|
+
return { isSafe: false, warnings, blockers };
|
|
1937
|
+
}
|
|
1938
|
+
const worktree = worktrees[0];
|
|
1939
|
+
if (!worktree) {
|
|
1940
|
+
blockers.push(`No worktree found for: ${identifier}`);
|
|
1941
|
+
return { isSafe: false, warnings, blockers };
|
|
1942
|
+
}
|
|
1943
|
+
return await this.validateWorktreeSafety(worktree, identifier);
|
|
1944
|
+
}
|
|
1945
|
+
/**
|
|
1946
|
+
* Parse identifier to determine type and extract number
|
|
1947
|
+
* Helper method for port calculation
|
|
1948
|
+
*/
|
|
1949
|
+
parseIdentifier(identifier) {
|
|
1950
|
+
const issueId = extractIssueNumber(identifier);
|
|
1951
|
+
if (issueId !== null) {
|
|
1952
|
+
return {
|
|
1953
|
+
type: "issue",
|
|
1954
|
+
number: issueId,
|
|
1955
|
+
originalInput: identifier
|
|
1956
|
+
};
|
|
1957
|
+
}
|
|
1958
|
+
const prMatch = identifier.match(/(?:pr|PR)[/-](\d+)/);
|
|
1959
|
+
if (prMatch == null ? void 0 : prMatch[1]) {
|
|
1960
|
+
return {
|
|
1961
|
+
type: "pr",
|
|
1962
|
+
number: parseInt(prMatch[1], 10),
|
|
1963
|
+
originalInput: identifier
|
|
1964
|
+
};
|
|
1965
|
+
}
|
|
1966
|
+
const numericMatch = identifier.match(/^#?(\d+)$/);
|
|
1967
|
+
if (numericMatch == null ? void 0 : numericMatch[1]) {
|
|
1968
|
+
return {
|
|
1969
|
+
type: "issue",
|
|
1970
|
+
number: parseInt(numericMatch[1], 10),
|
|
1971
|
+
originalInput: identifier
|
|
1972
|
+
};
|
|
1973
|
+
}
|
|
1974
|
+
return {
|
|
1975
|
+
type: "branch",
|
|
1976
|
+
branchName: identifier,
|
|
1977
|
+
originalInput: identifier
|
|
1978
|
+
};
|
|
1979
|
+
}
|
|
1980
|
+
};
|
|
1981
|
+
|
|
1982
|
+
export {
|
|
1983
|
+
LoomManager,
|
|
1984
|
+
EnvironmentManager,
|
|
1985
|
+
CLIIsolationManager,
|
|
1986
|
+
DatabaseManager,
|
|
1987
|
+
ResourceCleanup
|
|
1988
|
+
};
|
|
1989
|
+
//# sourceMappingURL=chunk-HBYZH6GD.js.map
|