compound-agent 1.2.10 → 1.2.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +24 -1
- package/README.md +11 -0
- package/dist/cli.js +478 -70
- package/dist/cli.js.map +1 -1
- package/dist/index.d.ts +4 -167
- package/dist/index.js +12 -8
- package/dist/index.js.map +1 -1
- package/package.json +1 -1
package/dist/cli.js
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import { Command } from 'commander';
|
|
3
3
|
import { getLlama, resolveModelFile } from 'node-llama-cpp';
|
|
4
|
-
import { mkdirSync, writeFileSync, statSync,
|
|
4
|
+
import { mkdirSync, writeFileSync, statSync, unlinkSync, existsSync, readFileSync, copyFileSync, chmodSync, readdirSync } from 'fs';
|
|
5
5
|
import { homedir } from 'os';
|
|
6
|
-
import { join, dirname, resolve, relative } from 'path';
|
|
6
|
+
import path, { join, dirname, resolve, relative } from 'path';
|
|
7
7
|
import * as fs from 'fs/promises';
|
|
8
8
|
import { readFile, mkdir, appendFile, writeFile, chmod, rm, rename } from 'fs/promises';
|
|
9
9
|
import { createHash } from 'crypto';
|
|
@@ -31,6 +31,27 @@ function parseLimit(value, name) {
|
|
|
31
31
|
function getRepoRoot() {
|
|
32
32
|
return process.env["COMPOUND_AGENT_ROOT"] ?? process.cwd();
|
|
33
33
|
}
|
|
34
|
+
var EPIC_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
|
|
35
|
+
function validateEpicId(epicId) {
|
|
36
|
+
if (!EPIC_ID_PATTERN.test(epicId)) {
|
|
37
|
+
throw new Error(`Invalid epic ID: "${epicId}" (must be alphanumeric with hyphens/underscores)`);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
function parseBdShowDeps(raw) {
|
|
41
|
+
const data = JSON.parse(raw);
|
|
42
|
+
const issue = Array.isArray(data) ? data[0] : data;
|
|
43
|
+
if (!issue) return [];
|
|
44
|
+
const depsArray = issue.depends_on ?? issue.dependencies ?? [];
|
|
45
|
+
return depsArray.map((dep) => ({
|
|
46
|
+
id: dep.id ?? "",
|
|
47
|
+
title: dep.title ?? "",
|
|
48
|
+
status: dep.status ?? "open"
|
|
49
|
+
}));
|
|
50
|
+
}
|
|
51
|
+
function shortId(fullId) {
|
|
52
|
+
const parts = fullId.split("-");
|
|
53
|
+
return parts[parts.length - 1] ?? fullId;
|
|
54
|
+
}
|
|
34
55
|
var MODEL_URI = "hf:ggml-org/embeddinggemma-300M-qat-q4_0-GGUF/embeddinggemma-300M-qat-Q4_0.gguf";
|
|
35
56
|
var MODEL_FILENAME = "hf_ggml-org_embeddinggemma-300M-qat-Q4_0.gguf";
|
|
36
57
|
var DEFAULT_MODEL_DIR = join(homedir(), ".node-llama-cpp", "models");
|
|
@@ -805,6 +826,7 @@ async function compact(repoRoot) {
|
|
|
805
826
|
}
|
|
806
827
|
|
|
807
828
|
// src/memory/search/vector.ts
|
|
829
|
+
var cctEmbeddingCache = /* @__PURE__ */ new Map();
|
|
808
830
|
function cosineSimilarity(a, b) {
|
|
809
831
|
if (a.length !== b.length) {
|
|
810
832
|
throw new Error("Vectors must have same length");
|
|
@@ -867,7 +889,13 @@ async function searchVector(repoRoot, query, options) {
|
|
|
867
889
|
for (const pattern of cctPatterns) {
|
|
868
890
|
try {
|
|
869
891
|
const text = `${pattern.name} ${pattern.description}`;
|
|
870
|
-
const
|
|
892
|
+
const hash = contentHash(pattern.name, pattern.description);
|
|
893
|
+
const cacheKey = `${pattern.id}:${hash}`;
|
|
894
|
+
let vec = cctEmbeddingCache.get(cacheKey);
|
|
895
|
+
if (!vec) {
|
|
896
|
+
vec = await embedText(text);
|
|
897
|
+
cctEmbeddingCache.set(cacheKey, vec);
|
|
898
|
+
}
|
|
871
899
|
const score = cosineSimilarity(queryVector, vec);
|
|
872
900
|
scored.push({ lesson: cctToMemoryItem(pattern), score });
|
|
873
901
|
} catch {
|
|
@@ -1070,11 +1098,17 @@ function registerCompoundCommands(program2) {
|
|
|
1070
1098
|
process.exitCode = 1;
|
|
1071
1099
|
return;
|
|
1072
1100
|
}
|
|
1101
|
+
openDb(repoRoot);
|
|
1073
1102
|
const embeddings = [];
|
|
1074
1103
|
try {
|
|
1075
1104
|
for (const item of items) {
|
|
1076
1105
|
const text = `${item.trigger} ${item.insight}`;
|
|
1077
|
-
const
|
|
1106
|
+
const hash = contentHash(item.trigger, item.insight);
|
|
1107
|
+
let vec = getCachedEmbedding(repoRoot, item.id, hash);
|
|
1108
|
+
if (!vec) {
|
|
1109
|
+
vec = await embedText(text);
|
|
1110
|
+
setCachedEmbedding(repoRoot, item.id, vec, hash);
|
|
1111
|
+
}
|
|
1078
1112
|
embeddings.push(Array.isArray(vec) ? vec : Array.from(vec));
|
|
1079
1113
|
}
|
|
1080
1114
|
} catch (err) {
|
|
@@ -1082,6 +1116,8 @@ function registerCompoundCommands(program2) {
|
|
|
1082
1116
|
console.error("Run: npx ca download-model");
|
|
1083
1117
|
process.exitCode = 1;
|
|
1084
1118
|
return;
|
|
1119
|
+
} finally {
|
|
1120
|
+
closeDb();
|
|
1085
1121
|
}
|
|
1086
1122
|
const { clusters } = clusterBySimilarity(items, embeddings);
|
|
1087
1123
|
const multiClusters = clusters.filter((c) => c.length >= 2);
|
|
@@ -2132,7 +2168,7 @@ function formatError(command, code, message, remediation) {
|
|
|
2132
2168
|
}
|
|
2133
2169
|
var STATE_DIR = ".claude";
|
|
2134
2170
|
var STATE_FILE = ".ca-phase-state.json";
|
|
2135
|
-
var
|
|
2171
|
+
var EPIC_ID_PATTERN2 = /^[a-zA-Z0-9_-]+$/;
|
|
2136
2172
|
var PHASES = ["brainstorm", "plan", "work", "review", "compound"];
|
|
2137
2173
|
var GATES = ["post-plan", "gate-3", "gate-4", "final"];
|
|
2138
2174
|
var PHASE_INDEX = {
|
|
@@ -2187,9 +2223,9 @@ function initPhaseState(repoRoot, epicId) {
|
|
|
2187
2223
|
}
|
|
2188
2224
|
function getPhaseState(repoRoot) {
|
|
2189
2225
|
try {
|
|
2190
|
-
const
|
|
2191
|
-
if (!existsSync(
|
|
2192
|
-
const raw = readFileSync(
|
|
2226
|
+
const path2 = getStatePath(repoRoot);
|
|
2227
|
+
if (!existsSync(path2)) return null;
|
|
2228
|
+
const raw = readFileSync(path2, "utf-8");
|
|
2193
2229
|
const parsed = JSON.parse(raw);
|
|
2194
2230
|
return validatePhaseState(parsed) ? parsed : null;
|
|
2195
2231
|
} catch {
|
|
@@ -2215,8 +2251,8 @@ function startPhase(repoRoot, phase) {
|
|
|
2215
2251
|
}
|
|
2216
2252
|
function cleanPhaseState(repoRoot) {
|
|
2217
2253
|
try {
|
|
2218
|
-
const
|
|
2219
|
-
if (existsSync(
|
|
2254
|
+
const path2 = getStatePath(repoRoot);
|
|
2255
|
+
if (existsSync(path2)) unlinkSync(path2);
|
|
2220
2256
|
} catch {
|
|
2221
2257
|
}
|
|
2222
2258
|
}
|
|
@@ -2246,9 +2282,10 @@ function printStatusHuman(state) {
|
|
|
2246
2282
|
}
|
|
2247
2283
|
function registerPhaseSubcommands(phaseCheck, getDryRun, repoRoot) {
|
|
2248
2284
|
phaseCheck.command("init <epic-id>").description("Initialize phase state for an epic").action((epicId) => {
|
|
2249
|
-
if (!
|
|
2285
|
+
if (!EPIC_ID_PATTERN2.test(epicId)) {
|
|
2250
2286
|
console.error(`Invalid epic ID: "${epicId}"`);
|
|
2251
|
-
process.
|
|
2287
|
+
process.exitCode = 1;
|
|
2288
|
+
return;
|
|
2252
2289
|
}
|
|
2253
2290
|
if (getDryRun()) {
|
|
2254
2291
|
console.log(`[dry-run] Would initialize phase state for epic ${epicId} in ${repoRoot()}`);
|
|
@@ -2260,7 +2297,8 @@ function registerPhaseSubcommands(phaseCheck, getDryRun, repoRoot) {
|
|
|
2260
2297
|
phaseCheck.command("start <phase>").description("Start or resume a phase").action((phase) => {
|
|
2261
2298
|
if (!isPhaseName(phase)) {
|
|
2262
2299
|
console.error(`Invalid phase: "${phase}". Valid phases: ${PHASES.join(", ")}`);
|
|
2263
|
-
process.
|
|
2300
|
+
process.exitCode = 1;
|
|
2301
|
+
return;
|
|
2264
2302
|
}
|
|
2265
2303
|
if (getDryRun()) {
|
|
2266
2304
|
console.log(`[dry-run] Would start phase ${phase}`);
|
|
@@ -2269,14 +2307,16 @@ function registerPhaseSubcommands(phaseCheck, getDryRun, repoRoot) {
|
|
|
2269
2307
|
const state = startPhase(repoRoot(), phase);
|
|
2270
2308
|
if (state === null) {
|
|
2271
2309
|
console.error("No active phase state. Run: ca phase-check init <epic-id>");
|
|
2272
|
-
process.
|
|
2310
|
+
process.exitCode = 1;
|
|
2311
|
+
return;
|
|
2273
2312
|
}
|
|
2274
2313
|
console.log(`Phase updated: ${state.current_phase} (${state.phase_index}/5).`);
|
|
2275
2314
|
});
|
|
2276
2315
|
phaseCheck.command("gate <gate-name>").description("Record a phase gate as passed").action((gateName) => {
|
|
2277
2316
|
if (!isGateName(gateName)) {
|
|
2278
2317
|
console.error(`Invalid gate: "${gateName}". Valid gates: ${GATES.join(", ")}`);
|
|
2279
|
-
process.
|
|
2318
|
+
process.exitCode = 1;
|
|
2319
|
+
return;
|
|
2280
2320
|
}
|
|
2281
2321
|
if (getDryRun()) {
|
|
2282
2322
|
console.log(`[dry-run] Would record gate ${gateName}`);
|
|
@@ -2285,7 +2325,8 @@ function registerPhaseSubcommands(phaseCheck, getDryRun, repoRoot) {
|
|
|
2285
2325
|
const state = recordGatePassed(repoRoot(), gateName);
|
|
2286
2326
|
if (state === null) {
|
|
2287
2327
|
console.error("No active phase state. Run: ca phase-check init <epic-id>");
|
|
2288
|
-
process.
|
|
2328
|
+
process.exitCode = 1;
|
|
2329
|
+
return;
|
|
2289
2330
|
}
|
|
2290
2331
|
if (gateName === "final") {
|
|
2291
2332
|
console.log("Final gate recorded. Phase state cleaned.");
|
|
@@ -2345,8 +2386,8 @@ function processPhaseGuard(repoRoot, toolName, _toolInput) {
|
|
|
2345
2386
|
|
|
2346
2387
|
// src/setup/hooks-read-tracker.ts
|
|
2347
2388
|
var SKILL_PATH_PATTERN = /(?:^|\/)\.claude\/skills\/compound\/([^/]+)\/SKILL\.md$/;
|
|
2348
|
-
function normalizePath(
|
|
2349
|
-
return
|
|
2389
|
+
function normalizePath(path2) {
|
|
2390
|
+
return path2.replaceAll("\\", "/");
|
|
2350
2391
|
}
|
|
2351
2392
|
function toCanonicalSkillPath(filePath) {
|
|
2352
2393
|
const normalized = normalizePath(filePath);
|
|
@@ -2744,7 +2785,7 @@ function registerHooksCommand(program2) {
|
|
|
2744
2785
|
)
|
|
2745
2786
|
);
|
|
2746
2787
|
}
|
|
2747
|
-
process.
|
|
2788
|
+
process.exitCode = 1;
|
|
2748
2789
|
}
|
|
2749
2790
|
});
|
|
2750
2791
|
}
|
|
@@ -2939,13 +2980,9 @@ Spawned as a **subagent**. Follow the **memory-analyst** role skill for full ins
|
|
|
2939
2980
|
`
|
|
2940
2981
|
};
|
|
2941
2982
|
|
|
2942
|
-
// src/setup/templates/agents-workflow.ts
|
|
2943
|
-
var WORKFLOW_AGENT_TEMPLATES = {};
|
|
2944
|
-
|
|
2945
2983
|
// src/setup/templates/agents.ts
|
|
2946
2984
|
var AGENT_TEMPLATES = {
|
|
2947
2985
|
...REVIEW_AGENT_TEMPLATES,
|
|
2948
|
-
...WORKFLOW_AGENT_TEMPLATES,
|
|
2949
2986
|
...PHASE11_AGENT_TEMPLATES,
|
|
2950
2987
|
...EXTERNAL_AGENT_TEMPLATES
|
|
2951
2988
|
};
|
|
@@ -3608,6 +3645,17 @@ $ARGUMENTS
|
|
|
3608
3645
|
# LFG
|
|
3609
3646
|
|
|
3610
3647
|
**MANDATORY FIRST STEP -- NON-NEGOTIABLE**: Use the Read tool to open and read \`.claude/skills/compound/lfg/SKILL.md\` NOW. Do NOT proceed until you have read the complete skill file. It contains the full orchestration workflow you must follow.
|
|
3648
|
+
`,
|
|
3649
|
+
"set-worktree.md": `---
|
|
3650
|
+
name: compound:set-worktree
|
|
3651
|
+
description: Set up a git worktree for isolated epic execution
|
|
3652
|
+
argument-hint: "<epic-id>"
|
|
3653
|
+
---
|
|
3654
|
+
$ARGUMENTS
|
|
3655
|
+
|
|
3656
|
+
# Set Worktree
|
|
3657
|
+
|
|
3658
|
+
**MANDATORY FIRST STEP -- NON-NEGOTIABLE**: Use the Read tool to open and read \`.claude/skills/compound/set-worktree/SKILL.md\` NOW. Do NOT proceed until you have read the complete skill file. It contains the full workflow you must follow.
|
|
3611
3659
|
`,
|
|
3612
3660
|
// =========================================================================
|
|
3613
3661
|
// Utility commands (CLI wrappers)
|
|
@@ -3785,6 +3833,7 @@ Create a concrete implementation plan by decomposing work into small, testable t
|
|
|
3785
3833
|
8. Map dependencies between tasks
|
|
3786
3834
|
9. Create beads issues: \`bd create --title="..." --type=task\`
|
|
3787
3835
|
10. Create review and compound blocking tasks (\`bd create\` + \`bd dep add\`) that depend on work tasks \u2014 these survive compaction and surface via \`bd ready\` after work completes
|
|
3836
|
+
11. Run \`npx ca worktree wire-deps <epic-id>\` to connect merge dependencies (graceful no-op if no worktree is active)
|
|
3788
3837
|
|
|
3789
3838
|
## Memory Integration
|
|
3790
3839
|
- Run \`npx ca search\` for patterns related to the feature area
|
|
@@ -3817,6 +3866,7 @@ Create a concrete implementation plan by decomposing work into small, testable t
|
|
|
3817
3866
|
After creating all tasks, verify review and compound tasks exist:
|
|
3818
3867
|
- Run \`bd list --status=open\` and check for a "Review:" task and a "Compound:" task
|
|
3819
3868
|
- If either is missing, CREATE THEM NOW. The plan is NOT complete without these gates.
|
|
3869
|
+
- If a Merge: task exists in the dependency graph, verify it has Review and Compound as blockers (run \`bd show <merge-id>\` to confirm)
|
|
3820
3870
|
`,
|
|
3821
3871
|
work: `---
|
|
3822
3872
|
name: Work
|
|
@@ -4015,6 +4065,48 @@ Before closing the epic:
|
|
|
4015
4065
|
- Run \`pnpm test\` and \`pnpm lint\` -- must pass
|
|
4016
4066
|
If verify-gates fails, the missing phase was SKIPPED. Go back and complete it.
|
|
4017
4067
|
CRITICAL: 3/5 phases is NOT success. All 5 phases are required.
|
|
4068
|
+
`,
|
|
4069
|
+
"set-worktree": `---
|
|
4070
|
+
name: Set Worktree
|
|
4071
|
+
description: Configure an isolated git worktree for parallel epic execution
|
|
4072
|
+
---
|
|
4073
|
+
|
|
4074
|
+
# Set Worktree Skill
|
|
4075
|
+
|
|
4076
|
+
## Overview
|
|
4077
|
+
Set up a git worktree to isolate epic work from the main branch. This creates a separate working directory, installs dependencies, and creates a Merge beads task that orchestrates the merge lifecycle.
|
|
4078
|
+
|
|
4079
|
+
## Methodology
|
|
4080
|
+
1. Validate the epic exists: run \`bd show <epic-id>\` to confirm the epic is open
|
|
4081
|
+
2. Search memory with \`npx ca search "worktree"\` for past worktree lessons
|
|
4082
|
+
3. Run the worktree creation command: \`npx ca worktree create <epic-id>\`
|
|
4083
|
+
4. Verify output: confirm worktree path, branch name, and Merge task ID are reported
|
|
4084
|
+
5. Note the Merge task ID -- it will surface via \`bd ready\` after all work tasks complete
|
|
4085
|
+
6. Confirm the worktree is ready: check that \`.claude/\` directory exists in the worktree
|
|
4086
|
+
7. Inform the user: the worktree is set up, they can now run \`/compound:lfg\` to start work
|
|
4087
|
+
|
|
4088
|
+
## What Happens Under the Hood
|
|
4089
|
+
- A git worktree is created at \`../<repo>-wt-<epic-id>\` on branch \`epic/<epic-id>\`
|
|
4090
|
+
- Dependencies are installed via \`pnpm install --frozen-lockfile\`
|
|
4091
|
+
- Lessons JSONL is copied (not symlinked) to the worktree
|
|
4092
|
+
- A Merge beads task is created with the epic as its dependent
|
|
4093
|
+
- When all work completes, the Merge task surfaces via \`bd ready\`
|
|
4094
|
+
|
|
4095
|
+
## Memory Integration
|
|
4096
|
+
- Run \`npx ca search "worktree"\` before creating to check for known issues
|
|
4097
|
+
- Run \`npx ca learn\` if you discover worktree-specific knowledge
|
|
4098
|
+
|
|
4099
|
+
## Common Pitfalls
|
|
4100
|
+
- Creating a worktree for an epic that already has one (the command checks for this)
|
|
4101
|
+
- Forgetting to run \`/compound:lfg\` after setup (the worktree alone does nothing)
|
|
4102
|
+
- Not noting the Merge task ID (needed for later reference)
|
|
4103
|
+
- Running from inside an existing worktree (must run from main repo)
|
|
4104
|
+
|
|
4105
|
+
## Quality Criteria
|
|
4106
|
+
- Worktree was created successfully (path exists)
|
|
4107
|
+
- \`pnpm install\` completed without errors
|
|
4108
|
+
- Merge beads task exists and is linked to the epic
|
|
4109
|
+
- User was informed of next steps (\`/compound:lfg\`)
|
|
4018
4110
|
`,
|
|
4019
4111
|
lfg: `---
|
|
4020
4112
|
name: LFG
|
|
@@ -4697,7 +4789,8 @@ function registerClaudeSubcommand(setupCommand) {
|
|
|
4697
4789
|
} else {
|
|
4698
4790
|
console.error(formatError("setup", "PARSE_ERROR", "Failed to parse settings file", "Check if JSON is valid"));
|
|
4699
4791
|
}
|
|
4700
|
-
process.
|
|
4792
|
+
process.exitCode = 1;
|
|
4793
|
+
return;
|
|
4701
4794
|
}
|
|
4702
4795
|
const alreadyInstalled = hasAllCompoundAgentHooks(settings);
|
|
4703
4796
|
if (options.status) {
|
|
@@ -4934,7 +5027,8 @@ async function showAction(id, options) {
|
|
|
4934
5027
|
const msg = wasDeleted ? `Lesson ${id} not found (deleted)` : `Lesson ${id} not found`;
|
|
4935
5028
|
console.error(formatError("show", "NOT_FOUND", msg, 'Use "ca list" to see available lessons'));
|
|
4936
5029
|
}
|
|
4937
|
-
process.
|
|
5030
|
+
process.exitCode = 1;
|
|
5031
|
+
return;
|
|
4938
5032
|
}
|
|
4939
5033
|
if (options.json) {
|
|
4940
5034
|
console.log(JSON.stringify(item, null, SHOW_JSON_INDENT));
|
|
@@ -4966,7 +5060,8 @@ async function updateAction(id, options) {
|
|
|
4966
5060
|
} else {
|
|
4967
5061
|
console.error(formatError("update", "NO_FIELDS", "No fields to update", "Specify at least one: --insight, --tags, --severity, ..."));
|
|
4968
5062
|
}
|
|
4969
|
-
process.
|
|
5063
|
+
process.exitCode = 1;
|
|
5064
|
+
return;
|
|
4970
5065
|
}
|
|
4971
5066
|
const { items } = await readMemoryItems(repoRoot);
|
|
4972
5067
|
const item = items.find((i) => i.id === id);
|
|
@@ -4978,7 +5073,8 @@ async function updateAction(id, options) {
|
|
|
4978
5073
|
const msg = wasDeleted ? `Lesson ${id} is deleted` : `Lesson ${id} not found`;
|
|
4979
5074
|
console.error(formatError("update", "NOT_FOUND", msg, 'Use "ca list" to see available lessons'));
|
|
4980
5075
|
}
|
|
4981
|
-
process.
|
|
5076
|
+
process.exitCode = 1;
|
|
5077
|
+
return;
|
|
4982
5078
|
}
|
|
4983
5079
|
if (options.severity !== void 0) {
|
|
4984
5080
|
const result = SeveritySchema.safeParse(options.severity);
|
|
@@ -4988,7 +5084,8 @@ async function updateAction(id, options) {
|
|
|
4988
5084
|
} else {
|
|
4989
5085
|
console.error(formatError("update", "INVALID_SEVERITY", `Invalid severity: "${options.severity}"`, "Use --severity high|medium|low"));
|
|
4990
5086
|
}
|
|
4991
|
-
process.
|
|
5087
|
+
process.exitCode = 1;
|
|
5088
|
+
return;
|
|
4992
5089
|
}
|
|
4993
5090
|
}
|
|
4994
5091
|
const updatedItem = buildUpdatedItem(item, options);
|
|
@@ -4999,7 +5096,8 @@ async function updateAction(id, options) {
|
|
|
4999
5096
|
} else {
|
|
5000
5097
|
console.error(formatError("update", "VALIDATION_FAILED", `Schema validation failed: ${validationResult.error.message}`, "Check field values and try again"));
|
|
5001
5098
|
}
|
|
5002
|
-
process.
|
|
5099
|
+
process.exitCode = 1;
|
|
5100
|
+
return;
|
|
5003
5101
|
}
|
|
5004
5102
|
await appendMemoryItem(repoRoot, updatedItem);
|
|
5005
5103
|
await syncIfNeeded(repoRoot);
|
|
@@ -5043,7 +5141,8 @@ async function deleteAction(ids, options) {
|
|
|
5043
5141
|
out.warn(`${warning.id}: ${warning.message}`);
|
|
5044
5142
|
}
|
|
5045
5143
|
if (deleted.length === 0 && warnings.length > 0) {
|
|
5046
|
-
process.
|
|
5144
|
+
process.exitCode = 1;
|
|
5145
|
+
return;
|
|
5047
5146
|
}
|
|
5048
5147
|
}
|
|
5049
5148
|
}
|
|
@@ -5122,7 +5221,8 @@ function registerInvalidationCommands(program2) {
|
|
|
5122
5221
|
const lesson = items.find((l) => l.id === id);
|
|
5123
5222
|
if (!lesson) {
|
|
5124
5223
|
console.error(formatError("wrong", "NOT_FOUND", `Lesson not found: ${id}`, 'Use "ca list" to see available lessons'));
|
|
5125
|
-
process.
|
|
5224
|
+
process.exitCode = 1;
|
|
5225
|
+
return;
|
|
5126
5226
|
}
|
|
5127
5227
|
if (lesson.invalidatedAt) {
|
|
5128
5228
|
out.warn(`Lesson ${id} is already marked as invalid.`);
|
|
@@ -5145,7 +5245,8 @@ function registerInvalidationCommands(program2) {
|
|
|
5145
5245
|
const lesson = items.find((l) => l.id === id);
|
|
5146
5246
|
if (!lesson) {
|
|
5147
5247
|
console.error(formatError("validate", "NOT_FOUND", `Lesson not found: ${id}`, 'Use "ca list" to see available lessons'));
|
|
5148
|
-
process.
|
|
5248
|
+
process.exitCode = 1;
|
|
5249
|
+
return;
|
|
5149
5250
|
}
|
|
5150
5251
|
if (!lesson.invalidatedAt) {
|
|
5151
5252
|
out.info(`Lesson ${id} is not invalidated.`);
|
|
@@ -5168,7 +5269,8 @@ async function exportAction(options) {
|
|
|
5168
5269
|
const sinceDate = new Date(options.since);
|
|
5169
5270
|
if (Number.isNaN(sinceDate.getTime())) {
|
|
5170
5271
|
console.error(formatError("export", "INVALID_DATE", `Invalid date format: ${options.since}`, "Use ISO8601 format (e.g., 2024-01-15)"));
|
|
5171
|
-
process.
|
|
5272
|
+
process.exitCode = 1;
|
|
5273
|
+
return;
|
|
5172
5274
|
}
|
|
5173
5275
|
filtered = filtered.filter((item) => new Date(item.created) >= sinceDate);
|
|
5174
5276
|
}
|
|
@@ -5190,7 +5292,8 @@ async function importAction(file) {
|
|
|
5190
5292
|
} else {
|
|
5191
5293
|
console.error(formatError("import", "READ_ERROR", `Error reading file: ${err.message}`, "Check file permissions"));
|
|
5192
5294
|
}
|
|
5193
|
-
process.
|
|
5295
|
+
process.exitCode = 1;
|
|
5296
|
+
return;
|
|
5194
5297
|
}
|
|
5195
5298
|
const { items: existingItems } = await readMemoryItems(repoRoot);
|
|
5196
5299
|
const existingIds = new Set(existingItems.map((item) => item.id));
|
|
@@ -5486,7 +5589,8 @@ function registerAuditCommands(program2) {
|
|
|
5486
5589
|
} catch (err) {
|
|
5487
5590
|
const msg = err instanceof Error ? err.message : "Audit failed";
|
|
5488
5591
|
console.error(formatError("audit", "AUDIT_ERROR", msg, "Check repo configuration"));
|
|
5489
|
-
process.
|
|
5592
|
+
process.exitCode = 1;
|
|
5593
|
+
return;
|
|
5490
5594
|
}
|
|
5491
5595
|
if (opts.json) {
|
|
5492
5596
|
console.log(JSON.stringify(report, null, JSON_INDENT));
|
|
@@ -5513,7 +5617,7 @@ function registerAuditCommands(program2) {
|
|
|
5513
5617
|
}
|
|
5514
5618
|
}
|
|
5515
5619
|
if (report.summary.errors > 0) {
|
|
5516
|
-
process.
|
|
5620
|
+
process.exitCode = 1;
|
|
5517
5621
|
}
|
|
5518
5622
|
});
|
|
5519
5623
|
}
|
|
@@ -5523,10 +5627,10 @@ function configPath(repoRoot) {
|
|
|
5523
5627
|
return join(repoRoot, ".claude", CONFIG_FILENAME);
|
|
5524
5628
|
}
|
|
5525
5629
|
async function readConfig(repoRoot) {
|
|
5526
|
-
const
|
|
5527
|
-
if (!existsSync(
|
|
5630
|
+
const path2 = configPath(repoRoot);
|
|
5631
|
+
if (!existsSync(path2)) return {};
|
|
5528
5632
|
try {
|
|
5529
|
-
const parsed = JSON.parse(await readFile(
|
|
5633
|
+
const parsed = JSON.parse(await readFile(path2, "utf-8"));
|
|
5530
5634
|
if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return {};
|
|
5531
5635
|
return parsed;
|
|
5532
5636
|
} catch {
|
|
@@ -5628,7 +5732,8 @@ function registerRulesCommands(program2) {
|
|
|
5628
5732
|
} catch (err) {
|
|
5629
5733
|
const msg = err instanceof Error ? err.message : "Failed to load rules config";
|
|
5630
5734
|
console.error(formatError("rules", "CONFIG_ERROR", msg, "Check .claude/rules.json syntax"));
|
|
5631
|
-
process.
|
|
5735
|
+
process.exitCode = 1;
|
|
5736
|
+
return;
|
|
5632
5737
|
}
|
|
5633
5738
|
if (config.rules.length === 0) {
|
|
5634
5739
|
if (!quiet) {
|
|
@@ -5660,7 +5765,7 @@ function registerRulesCommands(program2) {
|
|
|
5660
5765
|
console.log("");
|
|
5661
5766
|
console.log(`Rules: ${total} checked, ${errors} error(s), ${warnings} warning(s), ${passed} passed`);
|
|
5662
5767
|
if (errors > 0) {
|
|
5663
|
-
process.
|
|
5768
|
+
process.exitCode = 1;
|
|
5664
5769
|
}
|
|
5665
5770
|
});
|
|
5666
5771
|
}
|
|
@@ -5761,16 +5866,8 @@ function registerTestSummaryCommand(program2) {
|
|
|
5761
5866
|
process.exit(exitCode);
|
|
5762
5867
|
});
|
|
5763
5868
|
}
|
|
5764
|
-
var EPIC_ID_PATTERN2 = /^[a-zA-Z0-9_-]+$/;
|
|
5765
5869
|
function parseDepsJson(raw) {
|
|
5766
|
-
|
|
5767
|
-
const issue = Array.isArray(data) ? data[0] : data;
|
|
5768
|
-
if (!issue) return [];
|
|
5769
|
-
const depsArray = issue.depends_on ?? issue.dependencies ?? [];
|
|
5770
|
-
return depsArray.map((dep) => ({
|
|
5771
|
-
closed: dep.status === "closed",
|
|
5772
|
-
title: dep.title ?? ""
|
|
5773
|
-
}));
|
|
5870
|
+
return parseBdShowDeps(raw).map((d) => ({ closed: d.status === "closed", title: d.title }));
|
|
5774
5871
|
}
|
|
5775
5872
|
function parseDepsText(output) {
|
|
5776
5873
|
const deps = [];
|
|
@@ -5805,9 +5902,7 @@ function checkGate(deps, prefix, gateName) {
|
|
|
5805
5902
|
return { name: gateName, status: "pass" };
|
|
5806
5903
|
}
|
|
5807
5904
|
async function runVerifyGates(epicId, options = {}) {
|
|
5808
|
-
|
|
5809
|
-
throw new Error(`Invalid epic ID: "${epicId}" (must be alphanumeric with hyphens/underscores)`);
|
|
5810
|
-
}
|
|
5905
|
+
validateEpicId(epicId);
|
|
5811
5906
|
const repoRoot = options.repoRoot ?? getRepoRoot();
|
|
5812
5907
|
const raw = execFileSync("bd", ["show", epicId, "--json"], { encoding: "utf-8" });
|
|
5813
5908
|
let deps;
|
|
@@ -5821,6 +5916,10 @@ async function runVerifyGates(epicId, options = {}) {
|
|
|
5821
5916
|
checkGate(deps, "Review:", "Review task"),
|
|
5822
5917
|
checkGate(deps, "Compound:", "Compound task")
|
|
5823
5918
|
];
|
|
5919
|
+
const mergeTask = deps.find((d) => d.title.startsWith("Merge:"));
|
|
5920
|
+
if (mergeTask) {
|
|
5921
|
+
checks.push(checkGate(deps, "Merge:", "Merge task"));
|
|
5922
|
+
}
|
|
5824
5923
|
const allPassed = checks.every((check) => check.status === "pass");
|
|
5825
5924
|
if (allPassed) {
|
|
5826
5925
|
const state = getPhaseState(repoRoot);
|
|
@@ -5863,6 +5962,291 @@ function registerVerifyGatesCommand(program2) {
|
|
|
5863
5962
|
}
|
|
5864
5963
|
});
|
|
5865
5964
|
}
|
|
5965
|
+
function parseWorktreeList(raw) {
|
|
5966
|
+
const entries = [];
|
|
5967
|
+
let currentPath = "";
|
|
5968
|
+
for (const line of raw.split("\n")) {
|
|
5969
|
+
if (line.startsWith("worktree ")) {
|
|
5970
|
+
currentPath = line.slice("worktree ".length);
|
|
5971
|
+
} else if (line.startsWith("branch ")) {
|
|
5972
|
+
const branch = line.slice("branch refs/heads/".length);
|
|
5973
|
+
entries.push({ path: currentPath, branch });
|
|
5974
|
+
}
|
|
5975
|
+
}
|
|
5976
|
+
return entries;
|
|
5977
|
+
}
|
|
5978
|
+
function runWorktreeCreate(epicId) {
|
|
5979
|
+
validateEpicId(epicId);
|
|
5980
|
+
const repoRoot = getRepoRoot();
|
|
5981
|
+
const basename = path.basename(repoRoot);
|
|
5982
|
+
const worktreePath = path.resolve(repoRoot, "..", `${basename}-wt-${epicId}`);
|
|
5983
|
+
const branch = `epic/${epicId}`;
|
|
5984
|
+
const listRaw = execFileSync("git", ["worktree", "list", "--porcelain"], { encoding: "utf-8" });
|
|
5985
|
+
const existing = parseWorktreeList(listRaw);
|
|
5986
|
+
if (existing.some((e) => e.path === worktreePath || e.branch === branch)) {
|
|
5987
|
+
return { worktreePath, branch, mergeTaskId: "", alreadyExists: true };
|
|
5988
|
+
}
|
|
5989
|
+
execFileSync("git", ["worktree", "add", worktreePath, "-b", branch], { encoding: "utf-8" });
|
|
5990
|
+
execFileSync("pnpm", ["install", "--frozen-lockfile"], { cwd: worktreePath, encoding: "utf-8" });
|
|
5991
|
+
const srcJsonl = path.join(repoRoot, ".claude", "lessons", "index.jsonl");
|
|
5992
|
+
const dstDir = path.join(worktreePath, ".claude", "lessons");
|
|
5993
|
+
const dstJsonl = path.join(dstDir, "index.jsonl");
|
|
5994
|
+
if (existsSync(srcJsonl)) {
|
|
5995
|
+
mkdirSync(dstDir, { recursive: true });
|
|
5996
|
+
copyFileSync(srcJsonl, dstJsonl);
|
|
5997
|
+
}
|
|
5998
|
+
execFileSync("pnpm", ["exec", "ca", "setup", "--skip-model"], { cwd: worktreePath, encoding: "utf-8" });
|
|
5999
|
+
const mergeTitle = `Merge: merge ${branch} to main`;
|
|
6000
|
+
const mergeDesc = `INSTRUCTIONS: This task merges the worktree branch back to main. Worktree path: ${worktreePath}. Run \`pnpm exec ca worktree merge ${epicId}\` when all other blocking tasks are resolved.`;
|
|
6001
|
+
const bdOutput = execFileSync("bd", [
|
|
6002
|
+
"create",
|
|
6003
|
+
`--title=${mergeTitle}`,
|
|
6004
|
+
"--type=task",
|
|
6005
|
+
"--priority=1",
|
|
6006
|
+
`--description=${mergeDesc}`
|
|
6007
|
+
], { encoding: "utf-8" });
|
|
6008
|
+
const idMatch = bdOutput.match(/(\S+)$/);
|
|
6009
|
+
const mergeFullId = idMatch?.[1] ?? "";
|
|
6010
|
+
if (!mergeFullId) {
|
|
6011
|
+
throw new Error("bd create returned no task ID");
|
|
6012
|
+
}
|
|
6013
|
+
const mergeTaskId = shortId(mergeFullId);
|
|
6014
|
+
execFileSync("bd", ["dep", "add", epicId, mergeTaskId], { encoding: "utf-8" });
|
|
6015
|
+
return { worktreePath, branch, mergeTaskId, alreadyExists: false };
|
|
6016
|
+
}
|
|
6017
|
+
function runWorktreeWireDeps(epicId) {
|
|
6018
|
+
validateEpicId(epicId);
|
|
6019
|
+
const raw = execFileSync("bd", ["show", epicId, "--json"], { encoding: "utf-8" });
|
|
6020
|
+
const deps = parseBdShowDeps(raw);
|
|
6021
|
+
const mergeDep = deps.find((d) => d.title.startsWith("Merge:"));
|
|
6022
|
+
if (!mergeDep) {
|
|
6023
|
+
return { noMergeTask: true, wired: [], warnings: [] };
|
|
6024
|
+
}
|
|
6025
|
+
const mergeId = shortId(mergeDep.id);
|
|
6026
|
+
const wired = [];
|
|
6027
|
+
const warnings = [];
|
|
6028
|
+
const reviewDep = deps.find((d) => d.title.startsWith("Review:"));
|
|
6029
|
+
const compoundDep = deps.find((d) => d.title.startsWith("Compound:"));
|
|
6030
|
+
if (reviewDep) {
|
|
6031
|
+
const reviewId = shortId(reviewDep.id);
|
|
6032
|
+
execFileSync("bd", ["dep", "add", mergeId, reviewId], { encoding: "utf-8" });
|
|
6033
|
+
wired.push(reviewId);
|
|
6034
|
+
} else {
|
|
6035
|
+
warnings.push("No Review task found \u2014 it may not exist yet");
|
|
6036
|
+
}
|
|
6037
|
+
if (compoundDep) {
|
|
6038
|
+
const compoundId = shortId(compoundDep.id);
|
|
6039
|
+
execFileSync("bd", ["dep", "add", mergeId, compoundId], { encoding: "utf-8" });
|
|
6040
|
+
wired.push(compoundId);
|
|
6041
|
+
} else {
|
|
6042
|
+
warnings.push("No Compound task found \u2014 it may not exist yet");
|
|
6043
|
+
}
|
|
6044
|
+
return { noMergeTask: false, wired, warnings };
|
|
6045
|
+
}
|
|
6046
|
+
function runWorktreeMerge(epicId) {
|
|
6047
|
+
validateEpicId(epicId);
|
|
6048
|
+
const branch = `epic/${epicId}`;
|
|
6049
|
+
const gitCommonDir = execFileSync("git", ["rev-parse", "--git-common-dir"], { encoding: "utf-8" }).trim();
|
|
6050
|
+
const mainRepo = path.resolve(gitCommonDir, "..");
|
|
6051
|
+
const currentBranch = execFileSync(
|
|
6052
|
+
"git",
|
|
6053
|
+
["-C", mainRepo, "rev-parse", "--abbrev-ref", "HEAD"],
|
|
6054
|
+
{ encoding: "utf-8" }
|
|
6055
|
+
).trim();
|
|
6056
|
+
if (currentBranch !== "main") {
|
|
6057
|
+
throw new Error(`Main repo is on branch "${currentBranch}", expected "main". Checkout main before merging.`);
|
|
6058
|
+
}
|
|
6059
|
+
const listRaw = execFileSync("git", ["worktree", "list", "--porcelain"], { encoding: "utf-8" });
|
|
6060
|
+
const entries = parseWorktreeList(listRaw);
|
|
6061
|
+
const wtEntry = entries.find((e) => e.branch === branch);
|
|
6062
|
+
if (!wtEntry) {
|
|
6063
|
+
throw new Error(`Worktree not found for branch "${branch}". Run \`ca worktree list\` to see active worktrees.`);
|
|
6064
|
+
}
|
|
6065
|
+
const worktreePath = wtEntry.path;
|
|
6066
|
+
try {
|
|
6067
|
+
execFileSync("git", ["merge", "main"], { cwd: worktreePath, encoding: "utf-8" });
|
|
6068
|
+
} catch (err) {
|
|
6069
|
+
throw new Error(
|
|
6070
|
+
`Merge conflict in worktree at ${worktreePath}. Resolve conflicts there and run \`ca worktree merge ${epicId}\` again. Detail: ${err instanceof Error ? err.message : String(err)}`
|
|
6071
|
+
);
|
|
6072
|
+
}
|
|
6073
|
+
try {
|
|
6074
|
+
execFileSync("pnpm", ["test"], { cwd: worktreePath, encoding: "utf-8" });
|
|
6075
|
+
} catch (err) {
|
|
6076
|
+
throw new Error(
|
|
6077
|
+
`Tests failed in worktree at ${worktreePath}. Fix failures before merging. Detail: ${err instanceof Error ? err.message : String(err)}`
|
|
6078
|
+
);
|
|
6079
|
+
}
|
|
6080
|
+
execFileSync("git", ["-C", mainRepo, "merge", branch, "--no-edit"], { encoding: "utf-8" });
|
|
6081
|
+
const mainJsonlPath = path.join(mainRepo, ".claude", "lessons", "index.jsonl");
|
|
6082
|
+
const wtJsonlPath = path.join(worktreePath, ".claude", "lessons", "index.jsonl");
|
|
6083
|
+
let newLessons = 0;
|
|
6084
|
+
if (existsSync(wtJsonlPath)) {
|
|
6085
|
+
const mainContent = existsSync(mainJsonlPath) ? readFileSync(mainJsonlPath, "utf-8") : "";
|
|
6086
|
+
const mainLineSet = new Set(mainContent.split("\n").filter(Boolean));
|
|
6087
|
+
const wtLines = readFileSync(wtJsonlPath, "utf-8").split("\n").filter(Boolean);
|
|
6088
|
+
const newLines = wtLines.filter((line) => !mainLineSet.has(line));
|
|
6089
|
+
if (newLines.length > 0) {
|
|
6090
|
+
const base = mainContent.trimEnd();
|
|
6091
|
+
const appended = base ? `${base}
|
|
6092
|
+
${newLines.join("\n")}
|
|
6093
|
+
` : `${newLines.join("\n")}
|
|
6094
|
+
`;
|
|
6095
|
+
writeFileSync(mainJsonlPath, appended, "utf-8");
|
|
6096
|
+
newLessons = newLines.length;
|
|
6097
|
+
}
|
|
6098
|
+
}
|
|
6099
|
+
execFileSync("git", ["worktree", "remove", worktreePath], { encoding: "utf-8" });
|
|
6100
|
+
execFileSync("git", ["branch", "-d", branch], { encoding: "utf-8" });
|
|
6101
|
+
return { mainRepo, newLessons };
|
|
6102
|
+
}
|
|
6103
|
+
function runWorktreeList() {
|
|
6104
|
+
const listRaw = execFileSync("git", ["worktree", "list", "--porcelain"], { encoding: "utf-8" });
|
|
6105
|
+
const entries = parseWorktreeList(listRaw);
|
|
6106
|
+
const results = [];
|
|
6107
|
+
for (const entry of entries) {
|
|
6108
|
+
if (!entry.path.includes("-wt-")) continue;
|
|
6109
|
+
const epicIdMatch = entry.path.match(/-wt-(.+)$/);
|
|
6110
|
+
if (!epicIdMatch?.[1]) continue;
|
|
6111
|
+
const epicId = epicIdMatch[1];
|
|
6112
|
+
let status = "unknown";
|
|
6113
|
+
try {
|
|
6114
|
+
const raw = execFileSync("bd", ["show", epicId, "--json"], { encoding: "utf-8" });
|
|
6115
|
+
const data = JSON.parse(raw);
|
|
6116
|
+
const issue = Array.isArray(data) ? data[0] : data;
|
|
6117
|
+
status = issue?.status ?? "unknown";
|
|
6118
|
+
} catch {
|
|
6119
|
+
}
|
|
6120
|
+
results.push({ epicId, path: entry.path, branch: entry.branch, status });
|
|
6121
|
+
}
|
|
6122
|
+
return results;
|
|
6123
|
+
}
|
|
6124
|
+
function runWorktreeCleanup(epicId, options = {}) {
|
|
6125
|
+
validateEpicId(epicId);
|
|
6126
|
+
const listRaw = execFileSync("git", ["worktree", "list", "--porcelain"], { encoding: "utf-8" });
|
|
6127
|
+
const entries = parseWorktreeList(listRaw);
|
|
6128
|
+
const branch = `epic/${epicId}`;
|
|
6129
|
+
const wtEntry = entries.find((e) => e.branch === branch || e.path.endsWith(`-wt-${epicId}`));
|
|
6130
|
+
if (!wtEntry) {
|
|
6131
|
+
throw new Error(`Worktree not found for epic "${epicId}"`);
|
|
6132
|
+
}
|
|
6133
|
+
if (!options.force) {
|
|
6134
|
+
const status = execFileSync("git", ["status", "--porcelain"], {
|
|
6135
|
+
cwd: wtEntry.path,
|
|
6136
|
+
encoding: "utf-8"
|
|
6137
|
+
});
|
|
6138
|
+
if (status.trim()) {
|
|
6139
|
+
throw new Error(`Worktree has uncommitted changes. Use --force to override.`);
|
|
6140
|
+
}
|
|
6141
|
+
}
|
|
6142
|
+
const removeArgs = options.force ? ["worktree", "remove", wtEntry.path, "--force"] : ["worktree", "remove", wtEntry.path];
|
|
6143
|
+
execFileSync("git", removeArgs, { encoding: "utf-8" });
|
|
6144
|
+
const branchFlag = options.force ? "-D" : "-d";
|
|
6145
|
+
execFileSync("git", ["branch", branchFlag, branch], { encoding: "utf-8" });
|
|
6146
|
+
let mergeTaskClosed = false;
|
|
6147
|
+
try {
|
|
6148
|
+
const raw = execFileSync("bd", ["show", epicId, "--json"], { encoding: "utf-8" });
|
|
6149
|
+
const deps = parseBdShowDeps(raw);
|
|
6150
|
+
const mergeDep = deps.find((d) => d.title.startsWith("Merge:"));
|
|
6151
|
+
if (mergeDep) {
|
|
6152
|
+
const mergeId = shortId(mergeDep.id);
|
|
6153
|
+
execFileSync("bd", ["close", mergeId], { encoding: "utf-8" });
|
|
6154
|
+
mergeTaskClosed = true;
|
|
6155
|
+
}
|
|
6156
|
+
} catch {
|
|
6157
|
+
}
|
|
6158
|
+
return { removed: true, mergeTaskClosed };
|
|
6159
|
+
}
|
|
6160
|
+
function handleError(err) {
|
|
6161
|
+
console.error(`Error: ${err instanceof Error ? err.message : String(err)}`);
|
|
6162
|
+
process.exitCode = 1;
|
|
6163
|
+
}
|
|
6164
|
+
function addCreateCommand(wt) {
|
|
6165
|
+
wt.command("create <epic-id>").description("Create a new worktree for an epic").action((epicId) => {
|
|
6166
|
+
try {
|
|
6167
|
+
const result = runWorktreeCreate(epicId);
|
|
6168
|
+
if (result.alreadyExists) {
|
|
6169
|
+
console.log(`Worktree already exists at ${result.worktreePath}`);
|
|
6170
|
+
return;
|
|
6171
|
+
}
|
|
6172
|
+
console.log(`Worktree created:`);
|
|
6173
|
+
console.log(` Path: ${result.worktreePath}`);
|
|
6174
|
+
console.log(` Branch: ${result.branch}`);
|
|
6175
|
+
console.log(` Merge task: ${result.mergeTaskId}`);
|
|
6176
|
+
} catch (err) {
|
|
6177
|
+
handleError(err);
|
|
6178
|
+
}
|
|
6179
|
+
});
|
|
6180
|
+
}
|
|
6181
|
+
function addWireDepsCommand(wt) {
|
|
6182
|
+
wt.command("wire-deps <epic-id>").description("Wire Review/Compound tasks as merge dependencies").action((epicId) => {
|
|
6183
|
+
try {
|
|
6184
|
+
const result = runWorktreeWireDeps(epicId);
|
|
6185
|
+
if (result.noMergeTask) {
|
|
6186
|
+
console.log("No worktree detected, working on main branch");
|
|
6187
|
+
return;
|
|
6188
|
+
}
|
|
6189
|
+
if (result.wired.length > 0) {
|
|
6190
|
+
console.log(`Wired dependencies: ${result.wired.join(", ")}`);
|
|
6191
|
+
}
|
|
6192
|
+
for (const w of result.warnings) {
|
|
6193
|
+
console.log(`Warning: ${w}`);
|
|
6194
|
+
}
|
|
6195
|
+
} catch (err) {
|
|
6196
|
+
handleError(err);
|
|
6197
|
+
}
|
|
6198
|
+
});
|
|
6199
|
+
}
|
|
6200
|
+
function addMergeCommand(wt) {
|
|
6201
|
+
wt.command("merge <epic-id>").description("Merge worktree branch back to main").action((epicId) => {
|
|
6202
|
+
try {
|
|
6203
|
+
const result = runWorktreeMerge(epicId);
|
|
6204
|
+
console.log(`Merged epic/${epicId} to main`);
|
|
6205
|
+
console.log(` New lessons: ${result.newLessons}`);
|
|
6206
|
+
} catch (err) {
|
|
6207
|
+
handleError(err);
|
|
6208
|
+
}
|
|
6209
|
+
});
|
|
6210
|
+
}
|
|
6211
|
+
function addListCommand(wt) {
|
|
6212
|
+
wt.command("list").description("List active worktrees").action(() => {
|
|
6213
|
+
try {
|
|
6214
|
+
const entries = runWorktreeList();
|
|
6215
|
+
if (entries.length === 0) {
|
|
6216
|
+
console.log("No active worktrees.");
|
|
6217
|
+
return;
|
|
6218
|
+
}
|
|
6219
|
+
console.log("Epic ID | Path | Branch | Status");
|
|
6220
|
+
console.log("------------|-------------------------------|-----------------|-------");
|
|
6221
|
+
for (const e of entries) {
|
|
6222
|
+
console.log(`${e.epicId.padEnd(12)}| ${e.path.padEnd(30)}| ${e.branch.padEnd(16)}| ${e.status}`);
|
|
6223
|
+
}
|
|
6224
|
+
} catch (err) {
|
|
6225
|
+
handleError(err);
|
|
6226
|
+
}
|
|
6227
|
+
});
|
|
6228
|
+
}
|
|
6229
|
+
function addCleanupCommand(wt) {
|
|
6230
|
+
wt.command("cleanup <epic-id>").description("Remove a worktree and clean up associated resources").option("--force", "Force removal even with uncommitted changes").action((epicId, opts) => {
|
|
6231
|
+
try {
|
|
6232
|
+
const result = runWorktreeCleanup(epicId, { force: opts.force });
|
|
6233
|
+
console.log(`Worktree removed for epic/${epicId}`);
|
|
6234
|
+
if (result.mergeTaskClosed) {
|
|
6235
|
+
console.log("Merge task closed.");
|
|
6236
|
+
}
|
|
6237
|
+
} catch (err) {
|
|
6238
|
+
handleError(err);
|
|
6239
|
+
}
|
|
6240
|
+
});
|
|
6241
|
+
}
|
|
6242
|
+
function registerWorktreeCommands(program2) {
|
|
6243
|
+
const wt = program2.command("worktree").description("Manage git worktrees for parallel epic execution");
|
|
6244
|
+
addCreateCommand(wt);
|
|
6245
|
+
addWireDepsCommand(wt);
|
|
6246
|
+
addMergeCommand(wt);
|
|
6247
|
+
addListCommand(wt);
|
|
6248
|
+
addCleanupCommand(wt);
|
|
6249
|
+
}
|
|
5866
6250
|
|
|
5867
6251
|
// src/commands/capture.ts
|
|
5868
6252
|
function createLessonFromFlags(trigger, insight, confirmed) {
|
|
@@ -5919,19 +6303,22 @@ async function handleLearn(cmd, insight, options) {
|
|
|
5919
6303
|
const typeResult = MemoryItemTypeSchema.safeParse(options.type);
|
|
5920
6304
|
if (!typeResult.success) {
|
|
5921
6305
|
console.error(formatError("learn", "INVALID_TYPE", `Invalid type: "${options.type}"`, "Use --type lesson|solution|pattern|preference"));
|
|
5922
|
-
process.
|
|
6306
|
+
process.exitCode = 1;
|
|
6307
|
+
return;
|
|
5923
6308
|
}
|
|
5924
6309
|
const itemType = typeResult.data;
|
|
5925
6310
|
if (itemType === "pattern" && (!options.patternBad || !options.patternGood)) {
|
|
5926
6311
|
console.error(formatError("learn", "MISSING_PATTERN", "type=pattern requires --pattern-bad and --pattern-good", 'Use: learn "insight" --type pattern --pattern-bad "old" --pattern-good "new"'));
|
|
5927
|
-
process.
|
|
6312
|
+
process.exitCode = 1;
|
|
6313
|
+
return;
|
|
5928
6314
|
}
|
|
5929
6315
|
let severity;
|
|
5930
6316
|
if (options.severity !== void 0) {
|
|
5931
6317
|
const result = SeveritySchema.safeParse(options.severity);
|
|
5932
6318
|
if (!result.success) {
|
|
5933
6319
|
console.error(formatError("learn", "INVALID_SEVERITY", `Invalid severity: "${options.severity}"`, "Use --severity high|medium|low"));
|
|
5934
|
-
process.
|
|
6320
|
+
process.exitCode = 1;
|
|
6321
|
+
return;
|
|
5935
6322
|
}
|
|
5936
6323
|
severity = result.data;
|
|
5937
6324
|
}
|
|
@@ -5986,7 +6373,8 @@ async function handleDetect(options) {
|
|
|
5986
6373
|
} else {
|
|
5987
6374
|
console.error(formatError("detect", "MISSING_FLAG", "--save requires --yes", "Use: detect --input <file> --save --yes"));
|
|
5988
6375
|
}
|
|
5989
|
-
process.
|
|
6376
|
+
process.exitCode = 1;
|
|
6377
|
+
return;
|
|
5990
6378
|
}
|
|
5991
6379
|
let input;
|
|
5992
6380
|
try {
|
|
@@ -5998,7 +6386,8 @@ async function handleDetect(options) {
|
|
|
5998
6386
|
} else {
|
|
5999
6387
|
console.error(formatError("detect", "INVALID_INPUT", message, "Check the file is valid JSON matching the expected schema"));
|
|
6000
6388
|
}
|
|
6001
|
-
process.
|
|
6389
|
+
process.exitCode = 1;
|
|
6390
|
+
return;
|
|
6002
6391
|
}
|
|
6003
6392
|
const result = await detectAndPropose(repoRoot, input);
|
|
6004
6393
|
if (!result) {
|
|
@@ -6051,7 +6440,8 @@ async function handleCapture(cmd, options) {
|
|
|
6051
6440
|
} else {
|
|
6052
6441
|
console.error(formatError("capture", "INVALID_INPUT", message, "Check the file is valid JSON matching the expected schema"));
|
|
6053
6442
|
}
|
|
6054
|
-
process.
|
|
6443
|
+
process.exitCode = 1;
|
|
6444
|
+
return;
|
|
6055
6445
|
}
|
|
6056
6446
|
const result = await detectAndPropose(repoRoot, input);
|
|
6057
6447
|
if (!result) {
|
|
@@ -6072,7 +6462,8 @@ async function handleCapture(cmd, options) {
|
|
|
6072
6462
|
} else {
|
|
6073
6463
|
console.error(formatError("capture", "MISSING_OPTIONS", msg, "Provide --trigger and --insight, or --input"));
|
|
6074
6464
|
}
|
|
6075
|
-
process.
|
|
6465
|
+
process.exitCode = 1;
|
|
6466
|
+
return;
|
|
6076
6467
|
}
|
|
6077
6468
|
if (!options.yes && !process.stdin.isTTY) {
|
|
6078
6469
|
if (options.json) {
|
|
@@ -6080,7 +6471,8 @@ async function handleCapture(cmd, options) {
|
|
|
6080
6471
|
} else {
|
|
6081
6472
|
console.error(formatError("capture", "NON_INTERACTIVE", "--yes required in non-interactive mode", 'Use: capture --trigger "..." --insight "..." --yes'));
|
|
6082
6473
|
}
|
|
6083
|
-
process.
|
|
6474
|
+
process.exitCode = 1;
|
|
6475
|
+
return;
|
|
6084
6476
|
}
|
|
6085
6477
|
if (options.json) {
|
|
6086
6478
|
if (options.yes) await appendLesson(repoRoot, lesson);
|
|
@@ -6367,13 +6759,13 @@ function registerLoopCommands(program2) {
|
|
|
6367
6759
|
await handleLoop(this, options);
|
|
6368
6760
|
});
|
|
6369
6761
|
}
|
|
6370
|
-
function
|
|
6762
|
+
function parseLimitOrNull(rawLimit, optionName, commandName) {
|
|
6371
6763
|
try {
|
|
6372
6764
|
return parseLimit(rawLimit, optionName);
|
|
6373
6765
|
} catch (err) {
|
|
6374
6766
|
const message = err instanceof Error ? err.message : `Invalid ${optionName}`;
|
|
6375
6767
|
console.error(formatError(commandName, "INVALID_LIMIT", message, `Use --${optionName} with a positive integer`));
|
|
6376
|
-
|
|
6768
|
+
return null;
|
|
6377
6769
|
}
|
|
6378
6770
|
}
|
|
6379
6771
|
async function readPlanFromStdin() {
|
|
@@ -6434,7 +6826,11 @@ function outputSessionLessonsHuman(lessons, quiet) {
|
|
|
6434
6826
|
}
|
|
6435
6827
|
async function searchAction(cmd, query, options) {
|
|
6436
6828
|
const repoRoot = getRepoRoot();
|
|
6437
|
-
const limit =
|
|
6829
|
+
const limit = parseLimitOrNull(options.limit, "limit", "search");
|
|
6830
|
+
if (limit === null) {
|
|
6831
|
+
process.exitCode = 1;
|
|
6832
|
+
return;
|
|
6833
|
+
}
|
|
6438
6834
|
const { verbose, quiet } = getGlobalOpts(cmd);
|
|
6439
6835
|
await syncIfNeeded(repoRoot);
|
|
6440
6836
|
let results;
|
|
@@ -6443,7 +6839,8 @@ async function searchAction(cmd, query, options) {
|
|
|
6443
6839
|
} catch (err) {
|
|
6444
6840
|
const message = err instanceof Error ? err.message : "Search failed";
|
|
6445
6841
|
console.error(formatError("search", "SEARCH_FAILED", message, "Check your query syntax"));
|
|
6446
|
-
process.
|
|
6842
|
+
process.exitCode = 1;
|
|
6843
|
+
return;
|
|
6447
6844
|
}
|
|
6448
6845
|
if (results.length > 0) {
|
|
6449
6846
|
incrementRetrievalCount(repoRoot, results.map((lesson) => lesson.id));
|
|
@@ -6471,7 +6868,11 @@ async function searchAction(cmd, query, options) {
|
|
|
6471
6868
|
}
|
|
6472
6869
|
async function listAction(cmd, options) {
|
|
6473
6870
|
const repoRoot = getRepoRoot();
|
|
6474
|
-
const limit =
|
|
6871
|
+
const limit = parseLimitOrNull(options.limit, "limit", "list");
|
|
6872
|
+
if (limit === null) {
|
|
6873
|
+
process.exitCode = 1;
|
|
6874
|
+
return;
|
|
6875
|
+
}
|
|
6475
6876
|
const { verbose, quiet } = getGlobalOpts(cmd);
|
|
6476
6877
|
const { items, skippedCount } = await readMemoryItems(repoRoot);
|
|
6477
6878
|
const filteredItems = options.invalidated ? items.filter((i) => i.invalidatedAt) : items;
|
|
@@ -6546,12 +6947,17 @@ async function loadSessionAction(cmd, options) {
|
|
|
6546
6947
|
}
|
|
6547
6948
|
async function checkPlanAction(cmd, options) {
|
|
6548
6949
|
const repoRoot = getRepoRoot();
|
|
6549
|
-
const limit =
|
|
6950
|
+
const limit = parseLimitOrNull(options.limit, "limit", "check-plan");
|
|
6951
|
+
if (limit === null) {
|
|
6952
|
+
process.exitCode = 1;
|
|
6953
|
+
return;
|
|
6954
|
+
}
|
|
6550
6955
|
const { quiet } = getGlobalOpts(cmd);
|
|
6551
6956
|
const planText = options.plan ?? await readPlanFromStdin();
|
|
6552
6957
|
if (!planText) {
|
|
6553
6958
|
console.error(formatError("check-plan", "NO_PLAN", "No plan provided", "Use --plan <text> or pipe text to stdin"));
|
|
6554
|
-
process.
|
|
6959
|
+
process.exitCode = 1;
|
|
6960
|
+
return;
|
|
6555
6961
|
}
|
|
6556
6962
|
const usability = await isModelUsable();
|
|
6557
6963
|
if (!usability.usable) {
|
|
@@ -6565,7 +6971,8 @@ async function checkPlanAction(cmd, options) {
|
|
|
6565
6971
|
} else {
|
|
6566
6972
|
console.error(formatError("check-plan", "MODEL_UNAVAILABLE", usability.reason, usability.action));
|
|
6567
6973
|
}
|
|
6568
|
-
process.
|
|
6974
|
+
process.exitCode = 1;
|
|
6975
|
+
return;
|
|
6569
6976
|
}
|
|
6570
6977
|
try {
|
|
6571
6978
|
const result = await retrieveForPlan(repoRoot, planText, limit);
|
|
@@ -6589,7 +6996,7 @@ async function checkPlanAction(cmd, options) {
|
|
|
6589
6996
|
} else {
|
|
6590
6997
|
console.error(formatError("check-plan", "PLAN_CHECK_FAILED", message, "Check model installation and try again"));
|
|
6591
6998
|
}
|
|
6592
|
-
process.
|
|
6999
|
+
process.exitCode = 1;
|
|
6593
7000
|
}
|
|
6594
7001
|
}
|
|
6595
7002
|
function registerRetrievalCommands(program2) {
|
|
@@ -6628,6 +7035,7 @@ function registerManagementCommands(program2) {
|
|
|
6628
7035
|
registerRulesCommands(program2);
|
|
6629
7036
|
registerTestSummaryCommand(program2);
|
|
6630
7037
|
registerVerifyGatesCommand(program2);
|
|
7038
|
+
registerWorktreeCommands(program2);
|
|
6631
7039
|
}
|
|
6632
7040
|
|
|
6633
7041
|
// src/cli.ts
|