@orgloop/agentctl 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,198 @@
1
+ import crypto from "node:crypto";
2
+ import path from "node:path";
3
+ import { runHook } from "./hooks.js";
4
+ import { createWorktree } from "./worktree.js";
5
+ // --- Group ID generation ---
6
+ /** Generate a short group ID like "g-a1b2c3" */
7
+ export function generateGroupId() {
8
+ const hex = crypto.randomBytes(3).toString("hex");
9
+ return `g-${hex}`;
10
+ }
11
+ // --- Slot disambiguation ---
12
+ /**
13
+ * Generate a short suffix for a slot, used in worktree/branch naming.
14
+ * When an adapter appears multiple times, disambiguate using the model short name.
15
+ */
16
+ export function slotSuffix(slot, allSlots) {
17
+ const sameAdapter = allSlots.filter((s) => s.adapter === slot.adapter);
18
+ // Short adapter name: claude-code → cc, codex → codex, etc.
19
+ const adapterShort = shortenAdapter(slot.adapter);
20
+ if (sameAdapter.length <= 1) {
21
+ return adapterShort;
22
+ }
23
+ // Disambiguate with model short name
24
+ const modelShort = slot.model ? shortenModel(slot.model) : "default";
25
+ return `${adapterShort}-${modelShort}`;
26
+ }
27
+ /** Shorten adapter names for path-friendly suffixes */
28
+ function shortenAdapter(adapter) {
29
+ const map = {
30
+ "claude-code": "cc",
31
+ "pi-rust": "pi-rs",
32
+ };
33
+ return map[adapter] || adapter;
34
+ }
35
+ /** Extract a short model name from a full model identifier */
36
+ function shortenModel(model) {
37
+ // claude-opus-4-6 → opus, claude-sonnet-4-5 → sonnet
38
+ const opusMatch = model.match(/opus/i);
39
+ if (opusMatch)
40
+ return "opus";
41
+ const sonnetMatch = model.match(/sonnet/i);
42
+ if (sonnetMatch)
43
+ return "sonnet";
44
+ const haikuMatch = model.match(/haiku/i);
45
+ if (haikuMatch)
46
+ return "haiku";
47
+ // gpt-5.2-codex → gpt5-codex
48
+ const gptMatch = model.match(/gpt[- ]?(\d+)/i);
49
+ if (gptMatch) {
50
+ const rest = model.replace(/gpt[- ]?\d+\.?\d*/i, "").replace(/^[- .]+/, "");
51
+ return rest ? `gpt${gptMatch[1]}-${rest}` : `gpt${gptMatch[1]}`;
52
+ }
53
+ // Fallback: take last segment, sanitize
54
+ const parts = model.split(/[/:-]/);
55
+ return sanitizePath(parts[parts.length - 1] || model);
56
+ }
57
+ /** Sanitize a string for use in file paths and branch names */
58
+ function sanitizePath(s) {
59
+ return s.replace(/[^a-zA-Z0-9-]/g, "").toLowerCase() || "default";
60
+ }
61
+ // --- Worktree + branch naming ---
62
+ /** Build worktree path: <repo>-<groupId>-<suffix> */
63
+ export function worktreePath(repo, groupId, suffix) {
64
+ const repoResolved = path.resolve(repo);
65
+ return `${repoResolved}-${groupId}-${suffix}`;
66
+ }
67
+ /** Build branch name: try/<groupId>/<suffix> */
68
+ export function branchName(groupId, suffix) {
69
+ return `try/${groupId}/${suffix}`;
70
+ }
71
+ // --- Orchestrator ---
72
+ /**
73
+ * Orchestrate a parallel multi-adapter launch.
74
+ *
75
+ * 1. Generate group ID
76
+ * 2. For each slot: create worktree, run on_worktree_create hook, launch adapter
77
+ * 3. Return all results (successes and failures)
78
+ */
79
+ export async function orchestrateLaunch(opts) {
80
+ const { slots, prompt, spec, cwd, hooks, adapters } = opts;
81
+ const groupId = generateGroupId();
82
+ opts.onGroupCreated?.(groupId);
83
+ const repo = path.resolve(cwd);
84
+ // Phase 1: Create all worktrees (sequential to avoid git lock contention)
85
+ const worktrees = [];
86
+ for (const slot of slots) {
87
+ const suffix = slotSuffix(slot, slots);
88
+ const branch = branchName(groupId, suffix);
89
+ const worktree = await createWorktree({
90
+ repo,
91
+ branch,
92
+ });
93
+ // The createWorktree function names the path based on repo+branch slug.
94
+ // We need to override for our naming convention.
95
+ // Actually — createWorktree uses `<repo>-<branch-slug>` which for
96
+ // branch "try/g-a1b2c3/cc" becomes "<repo>-try-g-a1b2c3-cc".
97
+ // That's acceptable. Let's use the path it returns.
98
+ worktrees.push({ slot, suffix, branch, worktree });
99
+ // Run on_worktree_create hook (onCreate) if provided
100
+ if (hooks?.onCreate) {
101
+ await runHook(hooks, "onCreate", {
102
+ sessionId: "", // not yet launched
103
+ cwd: worktree.path,
104
+ adapter: slot.adapter,
105
+ branch,
106
+ group: groupId,
107
+ model: slot.model,
108
+ });
109
+ }
110
+ }
111
+ // Phase 2: Launch all adapters in parallel
112
+ const launchPromises = worktrees.map(async ({ slot, branch, worktree }) => {
113
+ const adapter = adapters[slot.adapter];
114
+ if (!adapter) {
115
+ return {
116
+ slot,
117
+ sessionId: "",
118
+ cwd: worktree.path,
119
+ branch,
120
+ error: `Unknown adapter: ${slot.adapter}`,
121
+ };
122
+ }
123
+ try {
124
+ const launchOpts = {
125
+ adapter: slot.adapter,
126
+ prompt,
127
+ spec,
128
+ cwd: worktree.path,
129
+ model: slot.model,
130
+ worktree: { repo: worktree.repo, branch },
131
+ hooks,
132
+ };
133
+ const session = await adapter.launch(launchOpts);
134
+ // Tag the session with the group
135
+ session.group = groupId;
136
+ const result = {
137
+ slot,
138
+ sessionId: session.id,
139
+ pid: session.pid,
140
+ cwd: worktree.path,
141
+ branch,
142
+ };
143
+ opts.onSessionLaunched?.(result);
144
+ return result;
145
+ }
146
+ catch (err) {
147
+ return {
148
+ slot,
149
+ sessionId: "",
150
+ cwd: worktree.path,
151
+ branch,
152
+ error: err.message,
153
+ };
154
+ }
155
+ });
156
+ const results = await Promise.all(launchPromises);
157
+ return { groupId, results };
158
+ }
159
+ // --- CLI flag parsing ---
160
+ /**
161
+ * Parse positional adapter slots from raw argv.
162
+ *
163
+ * Multiple --adapter flags, each optionally followed by --model:
164
+ * --adapter claude-code --model opus --adapter codex
165
+ *
166
+ * Returns AdapterSlot[] representing each launch slot.
167
+ */
168
+ export function parseAdapterSlots(rawArgs) {
169
+ const slots = [];
170
+ let current = null;
171
+ for (let i = 0; i < rawArgs.length; i++) {
172
+ const arg = rawArgs[i];
173
+ if (arg === "--adapter" || arg === "-A") {
174
+ // Flush previous slot
175
+ if (current)
176
+ slots.push(current);
177
+ const value = rawArgs[++i];
178
+ if (!value || value.startsWith("-")) {
179
+ throw new Error(`--adapter requires a value`);
180
+ }
181
+ current = { adapter: value };
182
+ }
183
+ else if (arg === "--model" || arg === "-M") {
184
+ if (!current) {
185
+ throw new Error(`--model must follow an --adapter flag`);
186
+ }
187
+ const value = rawArgs[++i];
188
+ if (!value || value.startsWith("-")) {
189
+ throw new Error(`--model requires a value`);
190
+ }
191
+ current.model = value;
192
+ }
193
+ }
194
+ // Flush last slot
195
+ if (current)
196
+ slots.push(current);
197
+ return slots;
198
+ }
@@ -0,0 +1,40 @@
1
+ import type { AdapterSlot } from "./launch-orchestrator.js";
2
+ /** A single entry in the matrix array */
3
+ export interface MatrixEntry {
4
+ adapter: string;
5
+ model?: string | string[];
6
+ }
7
+ /** Top-level matrix file schema */
8
+ export interface MatrixFile {
9
+ prompt: string;
10
+ cwd?: string;
11
+ spec?: string;
12
+ hooks?: {
13
+ on_create?: string;
14
+ on_complete?: string;
15
+ };
16
+ matrix: MatrixEntry[];
17
+ }
18
+ /**
19
+ * Parse a YAML matrix file and expand into AdapterSlot[].
20
+ *
21
+ * Cross-product expansion: when a matrix entry has an array value for `model`,
22
+ * it expands into one slot per model value.
23
+ *
24
+ * Example:
25
+ * matrix:
26
+ * - adapter: claude-code
27
+ * model: [opus, sonnet]
28
+ * - adapter: codex
29
+ *
30
+ * Expands to 3 slots:
31
+ * [{ adapter: "claude-code", model: "opus" },
32
+ * { adapter: "claude-code", model: "sonnet" },
33
+ * { adapter: "codex" }]
34
+ */
35
+ export declare function parseMatrixFile(filePath: string): Promise<MatrixFile>;
36
+ /**
37
+ * Expand a MatrixFile into AdapterSlot[].
38
+ * Handles cross-product expansion for array-valued fields.
39
+ */
40
+ export declare function expandMatrix(matrix: MatrixFile): AdapterSlot[];
@@ -0,0 +1,69 @@
1
+ import fs from "node:fs/promises";
2
+ import YAML from "yaml";
3
+ // --- Parsing ---
4
+ /**
5
+ * Parse a YAML matrix file and expand into AdapterSlot[].
6
+ *
7
+ * Cross-product expansion: when a matrix entry has an array value for `model`,
8
+ * it expands into one slot per model value.
9
+ *
10
+ * Example:
11
+ * matrix:
12
+ * - adapter: claude-code
13
+ * model: [opus, sonnet]
14
+ * - adapter: codex
15
+ *
16
+ * Expands to 3 slots:
17
+ * [{ adapter: "claude-code", model: "opus" },
18
+ * { adapter: "claude-code", model: "sonnet" },
19
+ * { adapter: "codex" }]
20
+ */
21
+ export async function parseMatrixFile(filePath) {
22
+ const raw = await fs.readFile(filePath, "utf-8");
23
+ const parsed = YAML.parse(raw);
24
+ if (!parsed || typeof parsed !== "object") {
25
+ throw new Error(`Invalid matrix file: ${filePath}`);
26
+ }
27
+ if (!parsed.prompt || typeof parsed.prompt !== "string") {
28
+ throw new Error("Matrix file must have a 'prompt' field (string)");
29
+ }
30
+ if (!Array.isArray(parsed.matrix) || parsed.matrix.length === 0) {
31
+ throw new Error("Matrix file must have a non-empty 'matrix' array");
32
+ }
33
+ // Validate entries
34
+ for (const entry of parsed.matrix) {
35
+ if (!entry.adapter || typeof entry.adapter !== "string") {
36
+ throw new Error("Each matrix entry must have an 'adapter' field (string)");
37
+ }
38
+ }
39
+ return parsed;
40
+ }
41
+ /**
42
+ * Expand a MatrixFile into AdapterSlot[].
43
+ * Handles cross-product expansion for array-valued fields.
44
+ */
45
+ export function expandMatrix(matrix) {
46
+ const slots = [];
47
+ for (const entry of matrix.matrix) {
48
+ const models = normalizeToArray(entry.model);
49
+ if (models.length === 0) {
50
+ // No model specified — single slot
51
+ slots.push({ adapter: entry.adapter });
52
+ }
53
+ else {
54
+ // One slot per model
55
+ for (const model of models) {
56
+ slots.push({ adapter: entry.adapter, model });
57
+ }
58
+ }
59
+ }
60
+ return slots;
61
+ }
62
+ /** Normalize a value to an array (handles string | string[] | undefined) */
63
+ function normalizeToArray(value) {
64
+ if (value === undefined || value === null)
65
+ return [];
66
+ if (Array.isArray(value))
67
+ return value;
68
+ return [value];
69
+ }
@@ -0,0 +1,16 @@
1
+ /**
2
+ * Save the current process environment to disk.
3
+ * Called at daemon start time when we still have the user's shell env.
4
+ */
5
+ export declare function saveEnvironment(configDir: string): Promise<void>;
6
+ /**
7
+ * Load the saved environment from disk.
8
+ * Returns undefined if the env file doesn't exist or is corrupt.
9
+ */
10
+ export declare function loadSavedEnvironment(configDir: string): Promise<Record<string, string> | undefined>;
11
+ /**
12
+ * Build an augmented environment for spawning subprocesses.
13
+ * Merges the saved daemon env with common bin paths to ensure
14
+ * binaries are findable even when the daemon is detached from the shell.
15
+ */
16
+ export declare function buildSpawnEnv(savedEnv?: Record<string, string>, extraEnv?: Record<string, string>): Record<string, string>;
@@ -0,0 +1,85 @@
1
+ import * as fs from "node:fs/promises";
2
+ import * as os from "node:os";
3
+ import * as path from "node:path";
4
+ const ENV_FILE = "daemon-env.json";
5
+ /**
6
+ * Common bin directories that should be in PATH when spawning subprocesses.
7
+ * These cover the usual locations for various package managers and tools.
8
+ */
9
+ function getCommonBinDirs() {
10
+ const home = os.homedir();
11
+ return [
12
+ path.join(home, ".local", "bin"),
13
+ "/usr/local/bin",
14
+ "/usr/bin",
15
+ "/bin",
16
+ "/usr/sbin",
17
+ "/sbin",
18
+ "/opt/homebrew/bin",
19
+ path.join(home, ".npm-global", "bin"),
20
+ path.join(home, ".local", "share", "mise", "shims"),
21
+ path.join(home, ".cargo", "bin"),
22
+ ];
23
+ }
24
+ /**
25
+ * Save the current process environment to disk.
26
+ * Called at daemon start time when we still have the user's shell env.
27
+ */
28
+ export async function saveEnvironment(configDir) {
29
+ const envPath = path.join(configDir, ENV_FILE);
30
+ try {
31
+ const tmpPath = `${envPath}.tmp`;
32
+ await fs.writeFile(tmpPath, JSON.stringify(process.env));
33
+ await fs.rename(tmpPath, envPath);
34
+ }
35
+ catch (err) {
36
+ console.error(`Warning: could not save environment: ${err.message}`);
37
+ }
38
+ }
39
+ /**
40
+ * Load the saved environment from disk.
41
+ * Returns undefined if the env file doesn't exist or is corrupt.
42
+ */
43
+ export async function loadSavedEnvironment(configDir) {
44
+ const envPath = path.join(configDir, ENV_FILE);
45
+ try {
46
+ const raw = await fs.readFile(envPath, "utf-8");
47
+ const parsed = JSON.parse(raw);
48
+ if (typeof parsed === "object" && parsed !== null) {
49
+ return parsed;
50
+ }
51
+ }
52
+ catch {
53
+ // File doesn't exist or is corrupt
54
+ }
55
+ return undefined;
56
+ }
57
+ /**
58
+ * Build an augmented environment for spawning subprocesses.
59
+ * Merges the saved daemon env with common bin paths to ensure
60
+ * binaries are findable even when the daemon is detached from the shell.
61
+ */
62
+ export function buildSpawnEnv(savedEnv, extraEnv) {
63
+ const base = {};
64
+ const source = savedEnv || process.env;
65
+ // Copy source env
66
+ for (const [k, v] of Object.entries(source)) {
67
+ if (v !== undefined)
68
+ base[k] = v;
69
+ }
70
+ // Augment PATH with common bin directories
71
+ const existingPath = base.PATH || "";
72
+ const existingDirs = new Set(existingPath.split(":").filter(Boolean));
73
+ const commonDirs = getCommonBinDirs();
74
+ const newDirs = commonDirs.filter((d) => !existingDirs.has(d));
75
+ if (newDirs.length > 0) {
76
+ base.PATH = [...existingPath.split(":").filter(Boolean), ...newDirs].join(":");
77
+ }
78
+ // Apply extra env overrides
79
+ if (extraEnv) {
80
+ for (const [k, v] of Object.entries(extraEnv)) {
81
+ base[k] = v;
82
+ }
83
+ }
84
+ return base;
85
+ }
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Read the first N lines of a file by reading only `maxBytes` from the start.
3
+ * Avoids allocating the entire file into memory for large JSONL files.
4
+ *
5
+ * @param filePath - Path to the file
6
+ * @param maxLines - Maximum number of lines to return
7
+ * @param maxBytes - Maximum bytes to read from the start (default 8192)
8
+ * @returns Array of complete lines (up to maxLines)
9
+ */
10
+ export declare function readHead(filePath: string, maxLines: number, maxBytes?: number): Promise<string[]>;
11
+ /**
12
+ * Read the last N lines of a file by reading only `maxBytes` from the end.
13
+ * Avoids allocating the entire file into memory for large JSONL files.
14
+ *
15
+ * @param filePath - Path to the file
16
+ * @param maxLines - Maximum number of lines to return
17
+ * @param maxBytes - Maximum bytes to read from the end (default 65536)
18
+ * @returns Array of complete lines (up to maxLines, in order)
19
+ */
20
+ export declare function readTail(filePath: string, maxLines: number, maxBytes?: number): Promise<string[]>;
@@ -0,0 +1,66 @@
1
+ import * as fs from "node:fs/promises";
2
+ /**
3
+ * Read the first N lines of a file by reading only `maxBytes` from the start.
4
+ * Avoids allocating the entire file into memory for large JSONL files.
5
+ *
6
+ * @param filePath - Path to the file
7
+ * @param maxLines - Maximum number of lines to return
8
+ * @param maxBytes - Maximum bytes to read from the start (default 8192)
9
+ * @returns Array of complete lines (up to maxLines)
10
+ */
11
+ export async function readHead(filePath, maxLines, maxBytes = 8192) {
12
+ const fh = await fs.open(filePath, "r");
13
+ try {
14
+ const stat = await fh.stat();
15
+ const bytesToRead = Math.min(maxBytes, stat.size);
16
+ if (bytesToRead === 0)
17
+ return [];
18
+ const buf = Buffer.alloc(bytesToRead);
19
+ const { bytesRead } = await fh.read(buf, 0, bytesToRead, 0);
20
+ if (bytesRead === 0)
21
+ return [];
22
+ const text = buf.subarray(0, bytesRead).toString("utf-8");
23
+ const lines = text.split("\n");
24
+ // If we didn't read the whole file, the last chunk may be incomplete — drop it
25
+ if (bytesRead < stat.size) {
26
+ lines.pop();
27
+ }
28
+ return lines.filter((l) => l.length > 0).slice(0, maxLines);
29
+ }
30
+ finally {
31
+ await fh.close();
32
+ }
33
+ }
34
+ /**
35
+ * Read the last N lines of a file by reading only `maxBytes` from the end.
36
+ * Avoids allocating the entire file into memory for large JSONL files.
37
+ *
38
+ * @param filePath - Path to the file
39
+ * @param maxLines - Maximum number of lines to return
40
+ * @param maxBytes - Maximum bytes to read from the end (default 65536)
41
+ * @returns Array of complete lines (up to maxLines, in order)
42
+ */
43
+ export async function readTail(filePath, maxLines, maxBytes = 65536) {
44
+ const fh = await fs.open(filePath, "r");
45
+ try {
46
+ const stat = await fh.stat();
47
+ if (stat.size === 0)
48
+ return [];
49
+ const bytesToRead = Math.min(maxBytes, stat.size);
50
+ const offset = Math.max(0, stat.size - bytesToRead);
51
+ const buf = Buffer.alloc(bytesToRead);
52
+ const { bytesRead } = await fh.read(buf, 0, bytesToRead, offset);
53
+ if (bytesRead === 0)
54
+ return [];
55
+ const text = buf.subarray(0, bytesRead).toString("utf-8");
56
+ const lines = text.split("\n");
57
+ // If we didn't start from the beginning, the first chunk may be a partial line — drop it
58
+ if (offset > 0) {
59
+ lines.shift();
60
+ }
61
+ return lines.filter((l) => l.length > 0).slice(-maxLines);
62
+ }
63
+ finally {
64
+ await fh.close();
65
+ }
66
+ }
@@ -0,0 +1,14 @@
1
+ /**
2
+ * Resolve the absolute path to a binary, checking known locations first,
3
+ * then falling back to `which`. Results are cached per binary name.
4
+ *
5
+ * @param name - Binary name (e.g., "claude", "codex", "pi")
6
+ * @param knownLocations - Additional absolute paths to check first
7
+ * @returns Resolved absolute path, or bare name as last resort
8
+ */
9
+ export declare function resolveBinaryPath(name: string, knownLocations?: string[]): Promise<string>;
10
+ /**
11
+ * Clear the resolved path cache. Call this when binaries may have been
12
+ * updated (e.g., on daemon restart).
13
+ */
14
+ export declare function clearBinaryCache(): void;
@@ -0,0 +1,66 @@
1
+ import { execFile } from "node:child_process";
2
+ import * as fs from "node:fs/promises";
3
+ import * as os from "node:os";
4
+ import * as path from "node:path";
5
+ import { promisify } from "node:util";
6
+ const execFileAsync = promisify(execFile);
7
+ /** Cache of resolved binary paths: name → absolute path */
8
+ const resolvedCache = new Map();
9
+ /**
10
+ * Resolve the absolute path to a binary, checking known locations first,
11
+ * then falling back to `which`. Results are cached per binary name.
12
+ *
13
+ * @param name - Binary name (e.g., "claude", "codex", "pi")
14
+ * @param knownLocations - Additional absolute paths to check first
15
+ * @returns Resolved absolute path, or bare name as last resort
16
+ */
17
+ export async function resolveBinaryPath(name, knownLocations = []) {
18
+ const cached = resolvedCache.get(name);
19
+ if (cached)
20
+ return cached;
21
+ const home = os.homedir();
22
+ // Default well-known locations for common toolchains
23
+ const defaultLocations = [
24
+ path.join(home, ".local", "bin", name),
25
+ `/usr/local/bin/${name}`,
26
+ `/opt/homebrew/bin/${name}`, // Homebrew Apple Silicon
27
+ path.join(home, ".npm-global", "bin", name),
28
+ path.join(home, ".local", "share", "mise", "shims", name),
29
+ path.join(home, ".cargo", "bin", name),
30
+ ];
31
+ const candidates = [...knownLocations, ...defaultLocations];
32
+ for (const c of candidates) {
33
+ try {
34
+ await fs.access(c, fs.constants.X_OK);
35
+ // Resolve symlinks to get the actual binary path
36
+ const resolved = await fs.realpath(c);
37
+ await fs.access(resolved, fs.constants.X_OK);
38
+ resolvedCache.set(name, resolved);
39
+ return resolved;
40
+ }
41
+ catch {
42
+ // Try next
43
+ }
44
+ }
45
+ // Try `which <name>` as fallback
46
+ try {
47
+ const { stdout } = await execFileAsync("which", [name]);
48
+ const p = stdout.trim();
49
+ if (p) {
50
+ resolvedCache.set(name, p);
51
+ return p;
52
+ }
53
+ }
54
+ catch {
55
+ // Fall through
56
+ }
57
+ // Last resort: bare name (let PATH resolve it at spawn time)
58
+ return name;
59
+ }
60
+ /**
61
+ * Clear the resolved path cache. Call this when binaries may have been
62
+ * updated (e.g., on daemon restart).
63
+ */
64
+ export function clearBinaryCache() {
65
+ resolvedCache.clear();
66
+ }
@@ -22,3 +22,25 @@ export declare function createWorktree(opts: WorktreeCreateOpts): Promise<Worktr
22
22
  * Remove a git worktree.
23
23
  */
24
24
  export declare function removeWorktree(repo: string, worktreePath: string): Promise<void>;
25
+ /** Info about an existing worktree from `git worktree list` */
26
+ export interface WorktreeListEntry {
27
+ path: string;
28
+ branch: string;
29
+ head: string;
30
+ /** Whether this is the bare/main worktree */
31
+ bare: boolean;
32
+ }
33
+ /**
34
+ * List all git worktrees for a repo.
35
+ * Parses `git worktree list --porcelain` output.
36
+ */
37
+ export declare function listWorktrees(repo: string): Promise<WorktreeListEntry[]>;
38
+ /**
39
+ * Remove a worktree and optionally delete its branch.
40
+ */
41
+ export declare function cleanWorktree(repo: string, worktreePath: string, opts?: {
42
+ deleteBranch?: boolean;
43
+ }): Promise<{
44
+ removedPath: string;
45
+ deletedBranch?: string;
46
+ }>;
package/dist/worktree.js CHANGED
@@ -63,3 +63,71 @@ export async function removeWorktree(repo, worktreePath) {
63
63
  cwd: repoResolved,
64
64
  });
65
65
  }
66
+ /**
67
+ * List all git worktrees for a repo.
68
+ * Parses `git worktree list --porcelain` output.
69
+ */
70
+ export async function listWorktrees(repo) {
71
+ const repoResolved = path.resolve(repo);
72
+ const { stdout } = await execFileAsync("git", ["worktree", "list", "--porcelain"], { cwd: repoResolved });
73
+ const entries = [];
74
+ let current = {};
75
+ for (const line of stdout.split("\n")) {
76
+ if (line.startsWith("worktree ")) {
77
+ if (current.path)
78
+ entries.push(current);
79
+ current = { path: line.replace("worktree ", ""), bare: false };
80
+ }
81
+ else if (line.startsWith("HEAD ")) {
82
+ current.head = line.replace("HEAD ", "");
83
+ }
84
+ else if (line.startsWith("branch ")) {
85
+ current.branch = line.replace("branch refs/heads/", "");
86
+ }
87
+ else if (line === "bare") {
88
+ current.bare = true;
89
+ }
90
+ else if (line === "" && current.path) {
91
+ // End of entry
92
+ }
93
+ }
94
+ if (current.path)
95
+ entries.push(current);
96
+ return entries;
97
+ }
98
+ /**
99
+ * Remove a worktree and optionally delete its branch.
100
+ */
101
+ export async function cleanWorktree(repo, worktreePath, opts) {
102
+ const repoResolved = path.resolve(repo);
103
+ // Get the branch name before removing
104
+ let branch;
105
+ if (opts?.deleteBranch) {
106
+ try {
107
+ const { stdout } = await execFileAsync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: worktreePath });
108
+ branch = stdout.trim();
109
+ }
110
+ catch {
111
+ // Worktree might be broken — still try to remove
112
+ }
113
+ }
114
+ await execFileAsync("git", ["worktree", "remove", "--force", worktreePath], {
115
+ cwd: repoResolved,
116
+ });
117
+ const result = {
118
+ removedPath: worktreePath,
119
+ };
120
+ // Delete the branch if requested
121
+ if (branch && opts?.deleteBranch) {
122
+ try {
123
+ await execFileAsync("git", ["branch", "-D", branch], {
124
+ cwd: repoResolved,
125
+ });
126
+ result.deletedBranch = branch;
127
+ }
128
+ catch {
129
+ // Branch might already be gone
130
+ }
131
+ }
132
+ return result;
133
+ }