@covibes/zeroshot 2.0.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,10 +9,23 @@
9
9
  */
10
10
 
11
11
  const { spawn, execSync } = require('child_process');
12
+ const { Worker } = require('worker_threads');
12
13
  const path = require('path');
13
14
  const os = require('os');
14
15
  const fs = require('fs');
15
16
 
17
+ /**
18
+ * Escape a string for safe use in shell commands
19
+ * Prevents shell injection when passing dynamic values to execSync with shell: true
20
+ * @param {string} str - String to escape
21
+ * @returns {string} Shell-escaped string
22
+ */
23
+ function escapeShell(str) {
24
+ // Replace single quotes with escaped version and wrap in single quotes
25
+ // This is the safest approach for shell escaping
26
+ return `'${str.replace(/'/g, "'\\''")}'`;
27
+ }
28
+
16
29
  const DEFAULT_IMAGE = 'zeroshot-cluster-base';
17
30
 
18
31
  class IsolationManager {
@@ -21,6 +34,7 @@ class IsolationManager {
21
34
  this.containers = new Map(); // clusterId -> containerId
22
35
  this.isolatedDirs = new Map(); // clusterId -> { path, originalDir }
23
36
  this.clusterConfigDirs = new Map(); // clusterId -> configDirPath
37
+ this.worktrees = new Map(); // clusterId -> { path, branch, repoRoot }
24
38
  }
25
39
 
26
40
  /**
@@ -52,7 +66,7 @@ class IsolationManager {
52
66
  * @param {boolean} [config.reuseExistingWorkspace=false] - If true, reuse existing isolated workspace (for resume)
53
67
  * @returns {Promise<string>} Container ID
54
68
  */
55
- createContainer(clusterId, config) {
69
+ async createContainer(clusterId, config) {
56
70
  const image = config.image || this.image;
57
71
  let workDir = config.workDir || process.cwd();
58
72
  const containerName = `zeroshot-cluster-${clusterId}`;
@@ -86,7 +100,7 @@ class IsolationManager {
86
100
  workDir = isolatedPath;
87
101
  } else {
88
102
  // Fresh start: create new isolated copy
89
- const isolatedDir = this._createIsolatedCopy(clusterId, workDir);
103
+ const isolatedDir = await this._createIsolatedCopy(clusterId, workDir);
90
104
  this.isolatedDirs = this.isolatedDirs || new Map();
91
105
  this.isolatedDirs.set(clusterId, {
92
106
  path: isolatedDir,
@@ -177,54 +191,118 @@ class IsolationManager {
177
191
 
178
192
  // Install dependencies if package.json exists
179
193
  // This enables e2e tests and other npm-based tools to run
194
+ // OPTIMIZATION: Use pre-baked deps when possible (30-40% faster startup)
195
+ // See: GitHub issue #20
180
196
  try {
181
197
  console.log(`[IsolationManager] Checking for package.json in ${workDir}...`);
182
198
  if (fs.existsSync(path.join(workDir, 'package.json'))) {
183
- console.log(`[IsolationManager] Installing npm dependencies in container...`);
184
-
185
- // Retry npm install with exponential backoff (network issues are common)
186
- const maxRetries = 3;
187
- const baseDelay = 2000; // 2 seconds
188
- let installResult = null;
189
-
190
- for (let attempt = 1; attempt <= maxRetries; attempt++) {
191
- try {
192
- installResult = await this.execInContainer(
199
+ // Check if node_modules already exists in container (pre-baked or previous run)
200
+ const checkResult = await this.execInContainer(
201
+ clusterId,
202
+ ['sh', '-c', 'test -d node_modules && test -f node_modules/.package-lock.json && echo "exists"'],
203
+ {}
204
+ );
205
+
206
+ if (checkResult.code === 0 && checkResult.stdout.trim() === 'exists') {
207
+ console.log(`[IsolationManager] ✓ Dependencies already installed (skipping npm install)`);
208
+ } else {
209
+ // Check if npm is available in container
210
+ const npmCheck = await this.execInContainer(clusterId, ['which', 'npm'], {});
211
+ if (npmCheck.code !== 0) {
212
+ console.log(`[IsolationManager] npm not available in container, skipping dependency install`);
213
+ } else {
214
+ // Issue #20: Try to use pre-baked dependencies first
215
+ // Check if pre-baked deps exist and can satisfy project requirements
216
+ const preBakeCheck = await this.execInContainer(
193
217
  clusterId,
194
- ['sh', '-c', 'npm_config_engine_strict=false npm install --no-audit --no-fund'],
218
+ ['sh', '-c', 'test -d /pre-baked-deps/node_modules && echo "exists"'],
195
219
  {}
196
220
  );
197
221
 
198
- if (installResult.code === 0) {
199
- console.log(`[IsolationManager] Dependencies installed`);
200
- break; // Success - exit retry loop
201
- }
222
+ if (preBakeCheck.code === 0 && preBakeCheck.stdout.trim() === 'exists') {
223
+ console.log(`[IsolationManager] Checking if pre-baked deps satisfy requirements...`);
202
224
 
203
- // Failed - retry if not last attempt
204
- // Use stderr if available, otherwise stdout (npm writes some errors to stdout)
205
- const errorOutput = (installResult.stderr || installResult.stdout || '').slice(0, 500);
206
- if (attempt < maxRetries) {
207
- const delay = baseDelay * Math.pow(2, attempt - 1);
208
- console.warn(
209
- `[IsolationManager] ⚠️ npm install failed (attempt ${attempt}/${maxRetries}), retrying in ${delay}ms...`
210
- );
211
- console.warn(`[IsolationManager] Error: ${errorOutput}`);
212
- await new Promise((_resolve) => setTimeout(_resolve, delay));
213
- } else {
214
- console.warn(
215
- `[IsolationManager] ⚠️ npm install failed after ${maxRetries} attempts (non-fatal): ${errorOutput}`
225
+ // Copy pre-baked deps, then run npm install to add any missing
226
+ // This is faster than full npm install: copy is ~2s, npm install adds ~5-10s for missing
227
+ const copyResult = await this.execInContainer(
228
+ clusterId,
229
+ ['sh', '-c', 'cp -rn /pre-baked-deps/node_modules . 2>/dev/null || true'],
230
+ {}
216
231
  );
217
- }
218
- } catch (execErr) {
219
- if (attempt < maxRetries) {
220
- const delay = baseDelay * Math.pow(2, attempt - 1);
221
- console.warn(
222
- `[IsolationManager] ⚠️ npm install execution error (attempt ${attempt}/${maxRetries}), retrying in ${delay}ms...`
223
- );
224
- console.warn(`[IsolationManager] Error: ${execErr.message}`);
225
- await new Promise((_resolve) => setTimeout(_resolve, delay));
232
+
233
+ if (copyResult.code === 0) {
234
+ console.log(`[IsolationManager] Copied pre-baked dependencies`);
235
+
236
+ // Run npm install to add any missing deps (much faster with pre-baked base)
237
+ const installResult = await this.execInContainer(
238
+ clusterId,
239
+ ['sh', '-c', 'npm_config_engine_strict=false npm install --no-audit --no-fund --prefer-offline'],
240
+ {}
241
+ );
242
+
243
+ if (installResult.code === 0) {
244
+ console.log(`[IsolationManager] ✓ Dependencies installed (pre-baked + incremental)`);
245
+ } else {
246
+ // Fallback: full install (pre-baked copy may have caused issues)
247
+ console.warn(`[IsolationManager] Incremental install failed, falling back to full install`);
248
+ await this.execInContainer(
249
+ clusterId,
250
+ ['sh', '-c', 'rm -rf node_modules && npm_config_engine_strict=false npm install --no-audit --no-fund'],
251
+ {}
252
+ );
253
+ console.log(`[IsolationManager] ✓ Dependencies installed (full fallback)`);
254
+ }
255
+ }
226
256
  } else {
227
- throw execErr; // Re-throw on last attempt
257
+ // No pre-baked deps, full npm install with retries
258
+ console.log(`[IsolationManager] Installing npm dependencies in container...`);
259
+
260
+ // Retry npm install with exponential backoff (network issues are common)
261
+ const maxRetries = 3;
262
+ const baseDelay = 2000; // 2 seconds
263
+ let installResult = null;
264
+
265
+ for (let attempt = 1; attempt <= maxRetries; attempt++) {
266
+ try {
267
+ installResult = await this.execInContainer(
268
+ clusterId,
269
+ ['sh', '-c', 'npm_config_engine_strict=false npm install --no-audit --no-fund'],
270
+ {}
271
+ );
272
+
273
+ if (installResult.code === 0) {
274
+ console.log(`[IsolationManager] ✓ Dependencies installed`);
275
+ break; // Success - exit retry loop
276
+ }
277
+
278
+ // Failed - retry if not last attempt
279
+ // Use stderr if available, otherwise stdout (npm writes some errors to stdout)
280
+ const errorOutput = (installResult.stderr || installResult.stdout || '').slice(0, 500);
281
+ if (attempt < maxRetries) {
282
+ const delay = baseDelay * Math.pow(2, attempt - 1);
283
+ console.warn(
284
+ `[IsolationManager] ⚠️ npm install failed (attempt ${attempt}/${maxRetries}), retrying in ${delay}ms...`
285
+ );
286
+ console.warn(`[IsolationManager] Error: ${errorOutput}`);
287
+ await new Promise((_resolve) => setTimeout(_resolve, delay));
288
+ } else {
289
+ console.warn(
290
+ `[IsolationManager] ⚠️ npm install failed after ${maxRetries} attempts (non-fatal): ${errorOutput}`
291
+ );
292
+ }
293
+ } catch (execErr) {
294
+ if (attempt < maxRetries) {
295
+ const delay = baseDelay * Math.pow(2, attempt - 1);
296
+ console.warn(
297
+ `[IsolationManager] ⚠️ npm install execution error (attempt ${attempt}/${maxRetries}), retrying in ${delay}ms...`
298
+ );
299
+ console.warn(`[IsolationManager] Error: ${execErr.message}`);
300
+ await new Promise((_resolve) => setTimeout(_resolve, delay));
301
+ } else {
302
+ throw execErr; // Re-throw on last attempt
303
+ }
304
+ }
305
+ }
228
306
  }
229
307
  }
230
308
  }
@@ -447,9 +525,9 @@ class IsolationManager {
447
525
  * @private
448
526
  * @param {string} clusterId - Cluster ID
449
527
  * @param {string} sourceDir - Source directory to copy
450
- * @returns {string} Path to isolated directory
528
+ * @returns {Promise<string>} Path to isolated directory
451
529
  */
452
- _createIsolatedCopy(clusterId, sourceDir) {
530
+ async _createIsolatedCopy(clusterId, sourceDir) {
453
531
  const isolatedPath = path.join(os.tmpdir(), 'zeroshot-isolated', clusterId);
454
532
 
455
533
  // Clean up existing dir
@@ -461,7 +539,7 @@ class IsolationManager {
461
539
  fs.mkdirSync(isolatedPath, { recursive: true });
462
540
 
463
541
  // Copy files (excluding .git and common build artifacts)
464
- this._copyDirExcluding(sourceDir, isolatedPath, [
542
+ await this._copyDirExcluding(sourceDir, isolatedPath, [
465
543
  '.git',
466
544
  'node_modules',
467
545
  '.next',
@@ -495,13 +573,15 @@ class IsolationManager {
495
573
  // No remote configured in source
496
574
  }
497
575
 
498
- // Initialize fresh git repo
499
- execSync('git init', { cwd: isolatedPath, stdio: 'pipe' });
576
+ // Initialize fresh git repo with all setup in a single batched command
577
+ // This reduces ~500ms overhead (5 execSync calls @ ~100ms each) to ~100ms (1 call)
578
+ // Issue #22: Batch git operations for 5-10% startup reduction
579
+ const branchName = `zeroshot/${clusterId}`;
500
580
 
501
- // Add remote if source had one (needed for git push / PR creation)
502
- // Inject gh token into URL for authentication inside container
581
+ // Build authenticated remote URL if source had one (needed for git push / PR creation)
582
+ let authRemoteUrl = null;
503
583
  if (remoteUrl) {
504
- let authRemoteUrl = remoteUrl;
584
+ authRemoteUrl = remoteUrl;
505
585
  const token = this._getGhToken();
506
586
  if (token && remoteUrl.startsWith('https://github.com/')) {
507
587
  // Convert https://github.com/org/repo.git to https://x-access-token:TOKEN@github.com/org/repo.git
@@ -510,81 +590,182 @@ class IsolationManager {
510
590
  `https://x-access-token:${token}@github.com/`
511
591
  );
512
592
  }
513
- execSync(`git remote add origin "${authRemoteUrl}"`, {
514
- cwd: isolatedPath,
515
- stdio: 'pipe',
516
- });
517
593
  }
518
594
 
519
- execSync('git add -A', { cwd: isolatedPath, stdio: 'pipe' });
520
-
521
- try {
522
- execSync('git commit -m "Initial commit (isolated copy)"', {
523
- cwd: isolatedPath,
524
- stdio: 'pipe',
525
- });
526
- } catch {
527
- // May fail if nothing to commit (empty dir)
528
- }
529
-
530
- // Create feature branch for work
531
- const branchName = `zeroshot/${clusterId}`;
532
- execSync(`git checkout -b "${branchName}"`, {
595
+ // Batch all git operations into a single shell command
596
+ // Using --allow-empty on commit to handle edge case of empty directories
597
+ const gitCommands = [
598
+ 'git init',
599
+ authRemoteUrl ? `git remote add origin ${escapeShell(authRemoteUrl)}` : null,
600
+ 'git add -A',
601
+ 'git commit -m "Initial commit (isolated copy)" --allow-empty',
602
+ `git checkout -b ${escapeShell(branchName)}`,
603
+ ]
604
+ .filter(Boolean)
605
+ .join(' && ');
606
+
607
+ execSync(gitCommands, {
533
608
  cwd: isolatedPath,
534
609
  stdio: 'pipe',
610
+ shell: '/bin/bash',
535
611
  });
536
612
 
537
613
  return isolatedPath;
538
614
  }
539
615
 
540
616
  /**
541
- * Copy directory excluding certain paths
617
+ * Copy directory excluding certain paths using parallel worker threads
542
618
  * Supports exact matches and glob patterns (*.ext)
619
+ *
620
+ * Performance optimization for large repos (10k+ files):
621
+ * - Phase 1: Collect all files async (non-blocking traversal)
622
+ * - Phase 2: Create directory structure (must be sequential)
623
+ * - Phase 3: Copy files in parallel using worker threads
624
+ *
543
625
  * @private
626
+ * @param {string} src - Source directory
627
+ * @param {string} dest - Destination directory
628
+ * @param {string[]} exclude - Patterns to exclude
629
+ * @returns {Promise<void>}
544
630
  */
545
- _copyDirExcluding(src, dest, exclude) {
546
- const entries = fs.readdirSync(src, { withFileTypes: true });
547
-
548
- for (const entry of entries) {
549
- // Check exclusions (exact match or glob pattern)
550
- const shouldExclude = exclude.some((pattern) => {
551
- if (pattern.startsWith('*.')) {
552
- return entry.name.endsWith(pattern.slice(1));
631
+ async _copyDirExcluding(src, dest, exclude) {
632
+ // Phase 1: Collect all files and directories
633
+ const files = [];
634
+ const directories = new Set();
635
+
636
+ const collectFiles = (currentSrc, relativePath = '') => {
637
+ let entries;
638
+ try {
639
+ entries = fs.readdirSync(currentSrc, { withFileTypes: true });
640
+ } catch (err) {
641
+ if (err.code === 'EACCES' || err.code === 'EPERM' || err.code === 'ENOENT') {
642
+ return;
553
643
  }
554
- return entry.name === pattern;
555
- });
556
- if (shouldExclude) continue;
644
+ throw err;
645
+ }
646
+
647
+ for (const entry of entries) {
648
+ // Check exclusions (exact match or glob pattern)
649
+ const shouldExclude = exclude.some((pattern) => {
650
+ if (pattern.startsWith('*.')) {
651
+ return entry.name.endsWith(pattern.slice(1));
652
+ }
653
+ return entry.name === pattern;
654
+ });
655
+ if (shouldExclude) continue;
557
656
 
558
- const srcPath = path.join(src, entry.name);
559
- const destPath = path.join(dest, entry.name);
657
+ const srcPath = path.join(currentSrc, entry.name);
658
+ const relPath = relativePath ? path.join(relativePath, entry.name) : entry.name;
560
659
 
561
- try {
562
- // Handle symlinks: resolve to actual target and copy appropriately
563
- // This avoids EISDIR errors when symlink points to directory
564
- if (entry.isSymbolicLink()) {
565
- // Get the actual target stats (follows the symlink)
566
- const targetStats = fs.statSync(srcPath);
567
- if (targetStats.isDirectory()) {
568
- fs.mkdirSync(destPath, { recursive: true });
569
- this._copyDirExcluding(srcPath, destPath, exclude);
660
+ try {
661
+ // Handle symlinks: resolve to actual target
662
+ if (entry.isSymbolicLink()) {
663
+ const targetStats = fs.statSync(srcPath);
664
+ if (targetStats.isDirectory()) {
665
+ directories.add(relPath);
666
+ collectFiles(srcPath, relPath);
667
+ } else {
668
+ files.push(relPath);
669
+ // Ensure parent directory is tracked
670
+ if (relativePath) directories.add(relativePath);
671
+ }
672
+ } else if (entry.isDirectory()) {
673
+ directories.add(relPath);
674
+ collectFiles(srcPath, relPath);
570
675
  } else {
571
- fs.copyFileSync(srcPath, destPath);
676
+ files.push(relPath);
677
+ // Ensure parent directory is tracked
678
+ if (relativePath) directories.add(relativePath);
572
679
  }
573
- } else if (entry.isDirectory()) {
574
- fs.mkdirSync(destPath, { recursive: true });
575
- this._copyDirExcluding(srcPath, destPath, exclude);
576
- } else {
577
- fs.copyFileSync(srcPath, destPath);
680
+ } catch (err) {
681
+ if (err.code === 'EACCES' || err.code === 'EPERM' || err.code === 'ENOENT') {
682
+ continue;
683
+ }
684
+ throw err;
578
685
  }
686
+ }
687
+ };
688
+
689
+ collectFiles(src);
690
+
691
+ // Phase 2: Create directory structure (sequential - must exist before file copy)
692
+ // Sort directories by depth to ensure parents are created before children
693
+ const sortedDirs = Array.from(directories).sort((a, b) => {
694
+ const depthA = a.split(path.sep).length;
695
+ const depthB = b.split(path.sep).length;
696
+ return depthA - depthB;
697
+ });
698
+
699
+ for (const dir of sortedDirs) {
700
+ const destDir = path.join(dest, dir);
701
+ try {
702
+ fs.mkdirSync(destDir, { recursive: true });
579
703
  } catch (err) {
580
- // Skip files we can't copy (permission denied, broken symlinks, etc.)
581
- // These are usually cache/temp files that aren't needed
582
- if (err.code === 'EACCES' || err.code === 'EPERM' || err.code === 'ENOENT') {
583
- continue;
704
+ if (err.code !== 'EEXIST') {
705
+ throw err;
706
+ }
707
+ }
708
+ }
709
+
710
+ // Phase 3: Copy files in parallel using worker threads
711
+ // For small file counts (<100), use synchronous copy (worker overhead not worth it)
712
+ if (files.length < 100) {
713
+ for (const relPath of files) {
714
+ const srcPath = path.join(src, relPath);
715
+ const destPath = path.join(dest, relPath);
716
+ try {
717
+ fs.copyFileSync(srcPath, destPath);
718
+ } catch (err) {
719
+ if (err.code !== 'EACCES' && err.code !== 'EPERM' && err.code !== 'ENOENT') {
720
+ throw err;
721
+ }
584
722
  }
585
- throw err; // Re-throw other errors
586
723
  }
724
+ return;
725
+ }
726
+
727
+ // Use worker threads for larger file counts
728
+ const numWorkers = Math.min(4, os.cpus().length);
729
+ const chunkSize = Math.ceil(files.length / numWorkers);
730
+ const workerPath = path.join(__dirname, 'copy-worker.js');
731
+
732
+ // Split files into chunks for workers
733
+ const chunks = [];
734
+ for (let i = 0; i < files.length; i += chunkSize) {
735
+ chunks.push(files.slice(i, i + chunkSize));
587
736
  }
737
+
738
+ // Spawn workers and wait for completion
739
+ const workerPromises = chunks.map((chunk) => {
740
+ return new Promise((resolve, reject) => {
741
+ const worker = new Worker(workerPath, {
742
+ workerData: {
743
+ files: chunk,
744
+ sourceBase: src,
745
+ destBase: dest,
746
+ },
747
+ });
748
+
749
+ worker.on('message', (result) => {
750
+ resolve(result);
751
+ });
752
+
753
+ worker.on('error', (err) => {
754
+ reject(err);
755
+ });
756
+
757
+ worker.on('exit', (code) => {
758
+ if (code !== 0) {
759
+ reject(new Error(`Worker exited with code ${code}`));
760
+ }
761
+ });
762
+ });
763
+ });
764
+
765
+ // Wait for all workers to complete (proper async/await - no busy-wait!)
766
+ // FIX: Previous version used busy-wait which blocked the event loop,
767
+ // preventing worker thread messages from being processed (timeout bug)
768
+ await Promise.all(workerPromises);
588
769
  }
589
770
 
590
771
  /**
@@ -826,7 +1007,7 @@ class IsolationManager {
826
1007
  */
827
1008
  _isContainerRunning(containerId) {
828
1009
  try {
829
- const result = execSync(`docker inspect -f '{{.State.Running}}' ${containerId} 2>/dev/null`, {
1010
+ const result = execSync(`docker inspect -f '{{.State.Running}}' ${escapeShell(containerId)} 2>/dev/null`, {
830
1011
  encoding: 'utf8',
831
1012
  });
832
1013
  return result.trim() === 'true';
@@ -841,7 +1022,7 @@ class IsolationManager {
841
1022
  */
842
1023
  _removeContainerByName(name) {
843
1024
  try {
844
- execSync(`docker rm -f ${name} 2>/dev/null`, { encoding: 'utf8' });
1025
+ execSync(`docker rm -f ${escapeShell(name)} 2>/dev/null`, { encoding: 'utf8' });
845
1026
  } catch {
846
1027
  // Ignore - container doesn't exist
847
1028
  }
@@ -867,7 +1048,7 @@ class IsolationManager {
867
1048
  */
868
1049
  static imageExists(image = DEFAULT_IMAGE) {
869
1050
  try {
870
- execSync(`docker image inspect ${image} 2>/dev/null`, {
1051
+ execSync(`docker image inspect ${escapeShell(image)} 2>/dev/null`, {
871
1052
  encoding: 'utf8',
872
1053
  stdio: 'pipe',
873
1054
  });
@@ -900,7 +1081,7 @@ class IsolationManager {
900
1081
  try {
901
1082
  // CRITICAL: Run from repo root so build context includes package.json and src/
902
1083
  // Use -f flag to specify Dockerfile location
903
- execSync(`docker build -f docker/zeroshot-cluster/Dockerfile -t ${image} .`, {
1084
+ execSync(`docker build -f docker/zeroshot-cluster/Dockerfile -t ${escapeShell(image)} .`, {
904
1085
  cwd: repoRoot,
905
1086
  encoding: 'utf8',
906
1087
  stdio: 'inherit',
@@ -980,14 +1161,52 @@ class IsolationManager {
980
1161
  }
981
1162
  }
982
1163
 
1164
+ /**
1165
+ * Create worktree-based isolation for a cluster (lightweight alternative to Docker)
1166
+ * Creates a git worktree at /tmp/zeroshot-worktrees/{clusterId}
1167
+ * @param {string} clusterId - Cluster ID
1168
+ * @param {string} workDir - Original working directory (must be a git repo)
1169
+ * @returns {{ path: string, branch: string, repoRoot: string }}
1170
+ */
1171
+ createWorktreeIsolation(clusterId, workDir) {
1172
+ if (!this._isGitRepo(workDir)) {
1173
+ throw new Error(`Worktree isolation requires a git repository. ${workDir} is not a git repo.`);
1174
+ }
1175
+
1176
+ const worktreeInfo = this.createWorktree(clusterId, workDir);
1177
+ this.worktrees.set(clusterId, worktreeInfo);
1178
+
1179
+ console.log(`[IsolationManager] Created worktree isolation at ${worktreeInfo.path}`);
1180
+ console.log(`[IsolationManager] Branch: ${worktreeInfo.branch}`);
1181
+
1182
+ return worktreeInfo;
1183
+ }
1184
+
1185
+ /**
1186
+ * Clean up worktree isolation for a cluster
1187
+ * @param {string} clusterId - Cluster ID
1188
+ * @param {object} [options] - Cleanup options
1189
+ * @param {boolean} [options.preserveBranch=true] - Keep the branch after removing worktree
1190
+ */
1191
+ cleanupWorktreeIsolation(clusterId, options = {}) {
1192
+ const worktreeInfo = this.worktrees.get(clusterId);
1193
+ if (!worktreeInfo) {
1194
+ return; // No worktree to clean up
1195
+ }
1196
+
1197
+ this.removeWorktree(worktreeInfo, options);
1198
+ this.worktrees.delete(clusterId);
1199
+
1200
+ console.log(`[IsolationManager] Cleaned up worktree isolation for ${clusterId}`);
1201
+ }
1202
+
983
1203
  /**
984
1204
  * Create a git worktree for isolated work
985
- * @private
986
1205
  * @param {string} clusterId - Cluster ID (used as branch name)
987
1206
  * @param {string} workDir - Original working directory
988
1207
  * @returns {{ path: string, branch: string, repoRoot: string }}
989
1208
  */
990
- _createWorktree(clusterId, workDir) {
1209
+ createWorktree(clusterId, workDir) {
991
1210
  const repoRoot = this._getGitRoot(workDir);
992
1211
  if (!repoRoot) {
993
1212
  throw new Error(`Cannot find git root for ${workDir}`);
@@ -1043,10 +1262,11 @@ class IsolationManager {
1043
1262
 
1044
1263
  /**
1045
1264
  * Remove a git worktree
1046
- * @private
1047
1265
  * @param {{ path: string, branch: string, repoRoot: string }} worktreeInfo
1266
+ * @param {object} [options] - Removal options
1267
+ * @param {boolean} [options.deleteBranch=false] - Also delete the branch
1048
1268
  */
1049
- _removeWorktree(worktreeInfo) {
1269
+ removeWorktree(worktreeInfo, _options = {}) {
1050
1270
  try {
1051
1271
  // Remove the worktree
1052
1272
  execSync(`git worktree remove --force "${worktreeInfo.path}" 2>/dev/null`, {