@codebyplan/cli 2.0.1 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +259 -135
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -37,7 +37,7 @@ var VERSION, PACKAGE_NAME;
37
37
  var init_version = __esm({
38
38
  "src/lib/version.ts"() {
39
39
  "use strict";
40
- VERSION = "2.0.1";
40
+ VERSION = "2.0.2";
41
41
  PACKAGE_NAME = "@codebyplan/cli";
42
42
  }
43
43
  });
@@ -49,7 +49,52 @@ __export(setup_exports, {
49
49
  });
50
50
  import { createInterface } from "node:readline/promises";
51
51
  import { stdin, stdout } from "node:process";
52
- import { execFile } from "node:child_process";
52
+ import { readFile, writeFile } from "node:fs/promises";
53
+ import { homedir } from "node:os";
54
+ import { join } from "node:path";
55
+ function getConfigPath(scope) {
56
+ return scope === "user" ? join(homedir(), ".claude.json") : join(process.cwd(), ".mcp.json");
57
+ }
58
+ async function readConfig(path) {
59
+ try {
60
+ const raw = await readFile(path, "utf-8");
61
+ const parsed = JSON.parse(raw);
62
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
63
+ return parsed;
64
+ }
65
+ return {};
66
+ } catch {
67
+ return {};
68
+ }
69
+ }
70
+ function buildMcpEntry(apiKey) {
71
+ return {
72
+ command: "npx",
73
+ args: ["-y", PACKAGE_NAME],
74
+ env: { CODEBYPLAN_API_KEY: apiKey }
75
+ };
76
+ }
77
+ async function writeMcpConfig(scope, apiKey) {
78
+ const configPath = getConfigPath(scope);
79
+ const config2 = await readConfig(configPath);
80
+ if (typeof config2.mcpServers !== "object" || config2.mcpServers === null || Array.isArray(config2.mcpServers)) {
81
+ config2.mcpServers = {};
82
+ }
83
+ config2.mcpServers.codebyplan = buildMcpEntry(apiKey);
84
+ await writeFile(configPath, JSON.stringify(config2, null, 2) + "\n", "utf-8");
85
+ return configPath;
86
+ }
87
+ async function verifyMcpConfig(scope, apiKey) {
88
+ try {
89
+ const config2 = await readConfig(getConfigPath(scope));
90
+ const servers = config2.mcpServers;
91
+ if (!servers) return false;
92
+ const entry = servers.codebyplan;
93
+ return entry?.env?.CODEBYPLAN_API_KEY === apiKey;
94
+ } catch {
95
+ return false;
96
+ }
97
+ }
53
98
  async function runSetup() {
54
99
  const rl = createInterface({ input: stdin, output: stdout });
55
100
  console.log("\n CodeByPlan MCP Server Setup\n");
@@ -76,53 +121,38 @@ async function runSetup() {
76
121
  console.log(` Warning: API returned status ${res.status}, but continuing.
77
122
  `);
78
123
  } else {
79
- console.log(" API key is valid!\n");
80
- }
81
- const addCmd = `claude mcp add --scope user codebyplan -e CODEBYPLAN_API_KEY=${apiKey} -- npx -y ${PACKAGE_NAME}`;
82
- console.log(" Run this command to add the MCP server to Claude Code:\n");
83
- console.log(` ${addCmd}
84
- `);
85
- const autoAdd = (await rl.question(" Run it now? (Y/n): ")).trim().toLowerCase();
86
- if (autoAdd === "" || autoAdd === "y" || autoAdd === "yes") {
87
- console.log("\n Adding MCP server to Claude Code...\n");
88
- await new Promise((resolve2) => {
89
- execFile("claude", ["mcp", "remove", "--scope", "user", "codebyplan"], () => resolve2());
90
- });
91
- await new Promise((resolve2) => {
92
- execFile(
93
- "claude",
94
- [
95
- "mcp",
96
- "add",
97
- "--scope",
98
- "user",
99
- "codebyplan",
100
- "-e",
101
- `CODEBYPLAN_API_KEY=${apiKey}`,
102
- "--",
103
- "npx",
104
- "-y",
105
- PACKAGE_NAME
106
- ],
107
- (err, _stdout, stderr) => {
108
- if (err) {
109
- console.log(` Could not run 'claude mcp add' automatically.`);
110
- console.log(` Error: ${stderr || err.message}`);
111
- console.log(`
112
- Run it manually:
113
- ${addCmd}
124
+ try {
125
+ const body = await res.json();
126
+ if (Array.isArray(body.data) && body.data.length === 0) {
127
+ console.log(" API key is valid but no repositories found.");
128
+ console.log(" Create one at https://codebyplan.com after setup.\n");
129
+ } else {
130
+ console.log(" API key is valid!\n");
131
+ }
132
+ } catch {
133
+ console.log(" API key is valid!\n");
134
+ }
135
+ }
136
+ console.log(" Where should the MCP server be configured?\n");
137
+ console.log(" 1. Global \u2014 available in all projects (~/.claude.json)");
138
+ console.log(" 2. Project \u2014 only this project (.mcp.json)\n");
139
+ const scopeInput = (await rl.question(" Select (1/2, default: 1): ")).trim();
140
+ const scope = scopeInput === "2" ? "project" : "user";
141
+ console.log("\n Configuring MCP server...");
142
+ const configPath = await writeMcpConfig(scope, apiKey);
143
+ const verified = await verifyMcpConfig(scope, apiKey);
144
+ if (verified) {
145
+ console.log(` Done! Config written to ${configPath}
114
146
  `);
115
- resolve2();
116
- } else {
117
- console.log(" Done! CodeByPlan MCP server is now configured.\n");
118
- console.log(" Start a new Claude Code session to begin using it.\n");
119
- resolve2();
120
- }
121
- }
122
- );
123
- });
147
+ if (scope === "project") {
148
+ console.log(" Note: .mcp.json contains your API key \u2014 add it to .gitignore.\n");
149
+ }
150
+ console.log(" Start a new Claude Code session to begin using it.\n");
124
151
  } else {
125
- console.log("\n Run the command above when you're ready.\n");
152
+ console.log(" Warning: Could not verify the saved configuration.\n");
153
+ console.log(" You can configure manually by adding to your Claude config:\n");
154
+ console.log(` claude mcp add codebyplan -e CODEBYPLAN_API_KEY=${apiKey} -- npx -y ${PACKAGE_NAME}
155
+ `);
126
156
  }
127
157
  } finally {
128
158
  rl.close();
@@ -136,8 +166,8 @@ var init_setup = __esm({
136
166
  });
137
167
 
138
168
  // src/cli/config.ts
139
- import { readFile } from "node:fs/promises";
140
- import { join } from "node:path";
169
+ import { readFile as readFile2 } from "node:fs/promises";
170
+ import { join as join2 } from "node:path";
141
171
  function parseFlags(startIndex) {
142
172
  const flags = {};
143
173
  const args = process.argv.slice(startIndex);
@@ -159,8 +189,8 @@ async function resolveConfig(flags) {
159
189
  let worktreeId = flags["worktree-id"] ?? process.env.CODEBYPLAN_WORKTREE_ID;
160
190
  if (!repoId || !worktreeId) {
161
191
  try {
162
- const configPath = join(projectPath, ".codebyplan.json");
163
- const raw = await readFile(configPath, "utf-8");
192
+ const configPath = join2(projectPath, ".codebyplan.json");
193
+ const raw = await readFile2(configPath, "utf-8");
164
194
  const config2 = JSON.parse(raw);
165
195
  if (!repoId) repoId = config2.repo_id;
166
196
  if (!worktreeId) worktreeId = config2.worktree_id;
@@ -372,8 +402,8 @@ var init_settings_merge = __esm({
372
402
  });
373
403
 
374
404
  // src/lib/hook-registry.ts
375
- import { readdir, readFile as readFile2 } from "node:fs/promises";
376
- import { join as join2 } from "node:path";
405
+ import { readdir, readFile as readFile3 } from "node:fs/promises";
406
+ import { join as join3 } from "node:path";
377
407
  function parseHookMeta(content) {
378
408
  const match = content.match(/^#\s*@hook:\s*(\S+)(?:\s+(.+))?$/m);
379
409
  if (!match) return null;
@@ -392,7 +422,7 @@ async function discoverHooks(hooksDir) {
392
422
  return discovered;
393
423
  }
394
424
  for (const filename of filenames) {
395
- const content = await readFile2(join2(hooksDir, filename), "utf-8");
425
+ const content = await readFile3(join3(hooksDir, filename), "utf-8");
396
426
  const meta = parseHookMeta(content);
397
427
  if (meta) {
398
428
  discovered.set(filename.replace(/\.sh$/, ""), meta);
@@ -478,38 +508,38 @@ var init_variables = __esm({
478
508
  });
479
509
 
480
510
  // src/lib/sync-engine.ts
481
- import { readdir as readdir2, readFile as readFile3, writeFile, unlink, mkdir, rmdir, chmod, stat } from "node:fs/promises";
482
- import { join as join3, dirname } from "node:path";
511
+ import { readdir as readdir2, readFile as readFile4, writeFile as writeFile2, unlink, mkdir, rmdir, chmod, stat } from "node:fs/promises";
512
+ import { join as join4, dirname } from "node:path";
483
513
  function getTypeDir(claudeDir, dir) {
484
- if (dir === "commands") return join3(claudeDir, dir, "cbp");
485
- return join3(claudeDir, dir);
514
+ if (dir === "commands") return join4(claudeDir, dir, "cbp");
515
+ return join4(claudeDir, dir);
486
516
  }
487
517
  function getFilePath(claudeDir, typeName, file) {
488
518
  const cfg = typeConfig[typeName];
489
519
  const typeDir = getTypeDir(claudeDir, cfg.dir);
490
520
  if (cfg.subfolder) {
491
- return join3(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
521
+ return join4(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
492
522
  }
493
523
  if (typeName === "command" && file.category) {
494
- return join3(typeDir, file.category, `${file.name}${cfg.ext}`);
524
+ return join4(typeDir, file.category, `${file.name}${cfg.ext}`);
495
525
  }
496
526
  if (typeName === "template") {
497
- return join3(typeDir, file.name);
527
+ return join4(typeDir, file.name);
498
528
  }
499
- return join3(typeDir, `${file.name}${cfg.ext}`);
529
+ return join4(typeDir, `${file.name}${cfg.ext}`);
500
530
  }
501
531
  async function readDirRecursive(dir, base = dir) {
502
532
  const result = /* @__PURE__ */ new Map();
503
533
  try {
504
534
  const entries = await readdir2(dir, { withFileTypes: true });
505
535
  for (const entry of entries) {
506
- const fullPath = join3(dir, entry.name);
536
+ const fullPath = join4(dir, entry.name);
507
537
  if (entry.isDirectory()) {
508
538
  const sub = await readDirRecursive(fullPath, base);
509
539
  for (const [k, v] of sub) result.set(k, v);
510
540
  } else {
511
541
  const relPath = fullPath.slice(base.length + 1);
512
- const fileContent = await readFile3(fullPath, "utf-8");
542
+ const fileContent = await readFile4(fullPath, "utf-8");
513
543
  result.set(relPath, fileContent);
514
544
  }
515
545
  }
@@ -519,7 +549,7 @@ async function readDirRecursive(dir, base = dir) {
519
549
  }
520
550
  async function isGitWorktree(projectPath) {
521
551
  try {
522
- const gitPath = join3(projectPath, ".git");
552
+ const gitPath = join4(projectPath, ".git");
523
553
  const info = await stat(gitPath);
524
554
  return info.isFile();
525
555
  } catch {
@@ -546,7 +576,7 @@ async function executeSyncToLocal(options) {
546
576
  const syncData = syncRes.data;
547
577
  const repoData = repoRes.data;
548
578
  syncData.claude_md = [];
549
- const claudeDir = join3(projectPath, ".claude");
579
+ const claudeDir = join4(projectPath, ".claude");
550
580
  const worktree = await isGitWorktree(projectPath);
551
581
  const byType = {};
552
582
  const totals = { created: 0, updated: 0, deleted: 0, unchanged: 0 };
@@ -571,19 +601,19 @@ async function executeSyncToLocal(options) {
571
601
  remotePathMap.set(relPath, { content: substituted, name: remote.name });
572
602
  }
573
603
  for (const [relPath, { content, name }] of remotePathMap) {
574
- const fullPath = join3(targetDir, relPath);
604
+ const fullPath = join4(targetDir, relPath);
575
605
  const localContent = localFiles.get(relPath);
576
606
  if (localContent === void 0) {
577
607
  if (!dryRun) {
578
608
  await mkdir(dirname(fullPath), { recursive: true });
579
- await writeFile(fullPath, content, "utf-8");
609
+ await writeFile2(fullPath, content, "utf-8");
580
610
  if (typeName === "hook") await chmod(fullPath, 493);
581
611
  }
582
612
  result.created.push(name);
583
613
  totals.created++;
584
614
  } else if (localContent !== content) {
585
615
  if (!dryRun) {
586
- await writeFile(fullPath, content, "utf-8");
616
+ await writeFile2(fullPath, content, "utf-8");
587
617
  if (typeName === "hook") await chmod(fullPath, 493);
588
618
  }
589
619
  result.updated.push(name);
@@ -595,7 +625,7 @@ async function executeSyncToLocal(options) {
595
625
  }
596
626
  for (const [relPath] of localFiles) {
597
627
  if (!remotePathMap.has(relPath)) {
598
- const fullPath = join3(targetDir, relPath);
628
+ const fullPath = join4(targetDir, relPath);
599
629
  if (!dryRun) {
600
630
  await unlink(fullPath);
601
631
  await removeEmptyParents(fullPath, targetDir);
@@ -607,9 +637,59 @@ async function executeSyncToLocal(options) {
607
637
  }
608
638
  byType[`${typeName}s`] = result;
609
639
  }
640
+ {
641
+ const typeName = "docs_stack";
642
+ const syncKey = "docs_stack";
643
+ const targetDir = join4(projectPath, "docs", "stack");
644
+ const remoteFiles = syncData[syncKey] ?? [];
645
+ const result = { created: [], updated: [], deleted: [], unchanged: [] };
646
+ if (remoteFiles.length > 0 && !dryRun) {
647
+ await mkdir(targetDir, { recursive: true });
648
+ }
649
+ const localFiles = await readDirRecursive(targetDir);
650
+ const remotePathMap = /* @__PURE__ */ new Map();
651
+ for (const remote of remoteFiles) {
652
+ const relPath = remote.category ? join4(remote.category, remote.name) : remote.name;
653
+ const substituted = substituteVariables(remote.content, repoData);
654
+ remotePathMap.set(relPath, { content: substituted, name: `${remote.category ?? ""}/${remote.name}` });
655
+ }
656
+ for (const [relPath, { content, name }] of remotePathMap) {
657
+ const fullPath = join4(targetDir, relPath);
658
+ const localContent = localFiles.get(relPath);
659
+ if (localContent === void 0) {
660
+ if (!dryRun) {
661
+ await mkdir(dirname(fullPath), { recursive: true });
662
+ await writeFile2(fullPath, content, "utf-8");
663
+ }
664
+ result.created.push(name);
665
+ totals.created++;
666
+ } else if (localContent !== content) {
667
+ if (!dryRun) {
668
+ await writeFile2(fullPath, content, "utf-8");
669
+ }
670
+ result.updated.push(name);
671
+ totals.updated++;
672
+ } else {
673
+ result.unchanged.push(name);
674
+ totals.unchanged++;
675
+ }
676
+ }
677
+ for (const [relPath] of localFiles) {
678
+ if (!remotePathMap.has(relPath)) {
679
+ const fullPath = join4(targetDir, relPath);
680
+ if (!dryRun) {
681
+ await unlink(fullPath);
682
+ await removeEmptyParents(fullPath, targetDir);
683
+ }
684
+ result.deleted.push(relPath);
685
+ totals.deleted++;
686
+ }
687
+ }
688
+ byType[typeName] = result;
689
+ }
610
690
  const specialTypes = {
611
- claude_md: () => join3(projectPath, "CLAUDE.md"),
612
- settings: () => join3(projectPath, ".claude", "settings.local.json")
691
+ claude_md: () => join4(projectPath, "CLAUDE.md"),
692
+ settings: () => join4(projectPath, ".claude", "settings.local.json")
613
693
  };
614
694
  for (const [typeName, getPath] of Object.entries(specialTypes)) {
615
695
  const remoteFiles = syncData[typeName] ?? [];
@@ -619,12 +699,12 @@ async function executeSyncToLocal(options) {
619
699
  const remoteContent = substituteVariables(remote.content, repoData);
620
700
  let localContent;
621
701
  try {
622
- localContent = await readFile3(targetPath, "utf-8");
702
+ localContent = await readFile4(targetPath, "utf-8");
623
703
  } catch {
624
704
  }
625
705
  if (typeName === "settings") {
626
706
  const templateSettings = JSON.parse(remoteContent);
627
- const hooksDir = join3(projectPath, ".claude", "hooks");
707
+ const hooksDir = join4(projectPath, ".claude", "hooks");
628
708
  const discovered = await discoverHooks(hooksDir);
629
709
  if (localContent === void 0) {
630
710
  if (discovered.size > 0) {
@@ -635,7 +715,7 @@ async function executeSyncToLocal(options) {
635
715
  }
636
716
  if (!dryRun) {
637
717
  await mkdir(dirname(targetPath), { recursive: true });
638
- await writeFile(targetPath, JSON.stringify(templateSettings, null, 2) + "\n", "utf-8");
718
+ await writeFile2(targetPath, JSON.stringify(templateSettings, null, 2) + "\n", "utf-8");
639
719
  }
640
720
  result.created.push(remote.name);
641
721
  totals.created++;
@@ -651,7 +731,7 @@ async function executeSyncToLocal(options) {
651
731
  const mergedContent = JSON.stringify(merged, null, 2) + "\n";
652
732
  if (localContent !== mergedContent) {
653
733
  if (!dryRun) {
654
- await writeFile(targetPath, mergedContent, "utf-8");
734
+ await writeFile2(targetPath, mergedContent, "utf-8");
655
735
  }
656
736
  result.updated.push(remote.name);
657
737
  totals.updated++;
@@ -664,13 +744,13 @@ async function executeSyncToLocal(options) {
664
744
  if (localContent === void 0) {
665
745
  if (!dryRun) {
666
746
  await mkdir(dirname(targetPath), { recursive: true });
667
- await writeFile(targetPath, remoteContent, "utf-8");
747
+ await writeFile2(targetPath, remoteContent, "utf-8");
668
748
  }
669
749
  result.created.push(remote.name);
670
750
  totals.created++;
671
751
  } else if (localContent !== remoteContent) {
672
752
  if (!dryRun) {
673
- await writeFile(targetPath, remoteContent, "utf-8");
753
+ await writeFile2(targetPath, remoteContent, "utf-8");
674
754
  }
675
755
  result.updated.push(remote.name);
676
756
  totals.updated++;
@@ -734,8 +814,8 @@ var init_confirm = __esm({
734
814
  });
735
815
 
736
816
  // src/lib/tech-detect.ts
737
- import { readFile as readFile4, access } from "node:fs/promises";
738
- import { join as join4 } from "node:path";
817
+ import { readFile as readFile5, access } from "node:fs/promises";
818
+ import { join as join5 } from "node:path";
739
819
  async function fileExists(filePath) {
740
820
  try {
741
821
  await access(filePath);
@@ -747,7 +827,7 @@ async function fileExists(filePath) {
747
827
  async function detectTechStack(projectPath) {
748
828
  const seen = /* @__PURE__ */ new Map();
749
829
  try {
750
- const raw = await readFile4(join4(projectPath, "package.json"), "utf-8");
830
+ const raw = await readFile5(join5(projectPath, "package.json"), "utf-8");
751
831
  const pkg = JSON.parse(raw);
752
832
  const allDeps = {
753
833
  ...pkg.dependencies,
@@ -766,7 +846,7 @@ async function detectTechStack(projectPath) {
766
846
  }
767
847
  for (const { file, rule } of CONFIG_FILE_MAP) {
768
848
  const key = rule.name.toLowerCase();
769
- if (!seen.has(key) && await fileExists(join4(projectPath, file))) {
849
+ if (!seen.has(key) && await fileExists(join5(projectPath, file))) {
770
850
  seen.set(key, { name: rule.name, category: rule.category });
771
851
  }
772
852
  }
@@ -1022,19 +1102,19 @@ var init_pull = __esm({
1022
1102
  });
1023
1103
 
1024
1104
  // src/cli/fileMapper.ts
1025
- import { readdir as readdir3, readFile as readFile5 } from "node:fs/promises";
1026
- import { join as join5, extname } from "node:path";
1105
+ import { readdir as readdir3, readFile as readFile6 } from "node:fs/promises";
1106
+ import { join as join6, extname } from "node:path";
1027
1107
  function compositeKey(type, name, category) {
1028
1108
  return category ? `${type}:${category}/${name}` : `${type}:${name}`;
1029
1109
  }
1030
1110
  async function scanLocalFiles(claudeDir) {
1031
1111
  const result = /* @__PURE__ */ new Map();
1032
- await scanCommands(join5(claudeDir, "commands", "cbp"), result);
1033
- await scanSubfolderType(join5(claudeDir, "agents"), "agent", "AGENT.md", result);
1034
- await scanSubfolderType(join5(claudeDir, "skills"), "skill", "SKILL.md", result);
1035
- await scanFlatType(join5(claudeDir, "rules"), "rule", ".md", result);
1036
- await scanFlatType(join5(claudeDir, "hooks"), "hook", ".sh", result);
1037
- await scanTemplates(join5(claudeDir, "templates"), result);
1112
+ await scanCommands(join6(claudeDir, "commands", "cbp"), result);
1113
+ await scanSubfolderType(join6(claudeDir, "agents"), "agent", "AGENT.md", result);
1114
+ await scanSubfolderType(join6(claudeDir, "skills"), "skill", "SKILL.md", result);
1115
+ await scanFlatType(join6(claudeDir, "rules"), "rule", ".md", result);
1116
+ await scanFlatType(join6(claudeDir, "hooks"), "hook", ".sh", result);
1117
+ await scanTemplates(join6(claudeDir, "templates"), result);
1038
1118
  return result;
1039
1119
  }
1040
1120
  async function scanCommands(dir, result) {
@@ -1049,10 +1129,10 @@ async function scanCommandsRecursive(baseDir, currentDir, result) {
1049
1129
  }
1050
1130
  for (const entry of entries) {
1051
1131
  if (entry.isDirectory()) {
1052
- await scanCommandsRecursive(baseDir, join5(currentDir, entry.name), result);
1132
+ await scanCommandsRecursive(baseDir, join6(currentDir, entry.name), result);
1053
1133
  } else if (entry.isFile() && entry.name.endsWith(".md")) {
1054
1134
  const name = entry.name.slice(0, -3);
1055
- const content = await readFile5(join5(currentDir, entry.name), "utf-8");
1135
+ const content = await readFile6(join6(currentDir, entry.name), "utf-8");
1056
1136
  const relDir = currentDir.slice(baseDir.length + 1);
1057
1137
  const category = relDir || null;
1058
1138
  const key = compositeKey("command", name, category);
@@ -1069,9 +1149,9 @@ async function scanSubfolderType(dir, type, fileName, result) {
1069
1149
  }
1070
1150
  for (const entry of entries) {
1071
1151
  if (entry.isDirectory()) {
1072
- const filePath = join5(dir, entry.name, fileName);
1152
+ const filePath = join6(dir, entry.name, fileName);
1073
1153
  try {
1074
- const content = await readFile5(filePath, "utf-8");
1154
+ const content = await readFile6(filePath, "utf-8");
1075
1155
  const key = compositeKey(type, entry.name, null);
1076
1156
  result.set(key, { type, name: entry.name, category: null, content });
1077
1157
  } catch {
@@ -1089,7 +1169,7 @@ async function scanFlatType(dir, type, ext, result) {
1089
1169
  for (const entry of entries) {
1090
1170
  if (entry.isFile() && entry.name.endsWith(ext)) {
1091
1171
  const name = entry.name.slice(0, -ext.length);
1092
- const content = await readFile5(join5(dir, entry.name), "utf-8");
1172
+ const content = await readFile6(join6(dir, entry.name), "utf-8");
1093
1173
  const key = compositeKey(type, name, null);
1094
1174
  result.set(key, { type, name, category: null, content });
1095
1175
  }
@@ -1104,7 +1184,7 @@ async function scanTemplates(dir, result) {
1104
1184
  }
1105
1185
  for (const entry of entries) {
1106
1186
  if (entry.isFile() && extname(entry.name)) {
1107
- const content = await readFile5(join5(dir, entry.name), "utf-8");
1187
+ const content = await readFile6(join6(dir, entry.name), "utf-8");
1108
1188
  const key = compositeKey("template", entry.name, null);
1109
1189
  result.set(key, { type: "template", name: entry.name, category: null, content });
1110
1190
  }
@@ -1190,7 +1270,7 @@ __export(push_exports, {
1190
1270
  runPush: () => runPush
1191
1271
  });
1192
1272
  import { stat as stat2 } from "node:fs/promises";
1193
- import { join as join6 } from "node:path";
1273
+ import { join as join7 } from "node:path";
1194
1274
  async function runPush() {
1195
1275
  const flags = parseFlags(3);
1196
1276
  const dryRun = hasFlag("dry-run", 3);
@@ -1205,7 +1285,7 @@ async function runPush() {
1205
1285
  if (dryRun) console.log(` Mode: dry-run (no changes will be made)`);
1206
1286
  if (force) console.log(` Mode: force (no conflict prompts)`);
1207
1287
  console.log();
1208
- const claudeDir = join6(projectPath, ".claude");
1288
+ const claudeDir = join7(projectPath, ".claude");
1209
1289
  try {
1210
1290
  await stat2(claudeDir);
1211
1291
  } catch {
@@ -1377,8 +1457,8 @@ __export(init_exports, {
1377
1457
  });
1378
1458
  import { createInterface as createInterface4 } from "node:readline/promises";
1379
1459
  import { stdin as stdin4, stdout as stdout4 } from "node:process";
1380
- import { writeFile as writeFile2, mkdir as mkdir2, chmod as chmod2 } from "node:fs/promises";
1381
- import { join as join7, dirname as dirname2 } from "node:path";
1460
+ import { writeFile as writeFile3, mkdir as mkdir2, chmod as chmod2 } from "node:fs/promises";
1461
+ import { join as join8, dirname as dirname2 } from "node:path";
1382
1462
  async function runInit() {
1383
1463
  const flags = parseFlags(3);
1384
1464
  const projectPath = flags["path"] ?? process.cwd();
@@ -1417,27 +1497,27 @@ async function runInit() {
1417
1497
  if (match) worktreeId = match.id;
1418
1498
  } catch {
1419
1499
  }
1420
- const configPath = join7(projectPath, ".codebyplan.json");
1500
+ const configPath = join8(projectPath, ".codebyplan.json");
1421
1501
  const configData = { repo_id: repoId };
1422
1502
  if (worktreeId) configData.worktree_id = worktreeId;
1423
1503
  const configContent = JSON.stringify(configData, null, 2) + "\n";
1424
- await writeFile2(configPath, configContent, "utf-8");
1504
+ await writeFile3(configPath, configContent, "utf-8");
1425
1505
  console.log(` Created ${configPath}`);
1426
1506
  const seedAnswer = (await rl.question("\n Seed with CodeByPlan defaults? (Y/n): ")).trim().toLowerCase();
1427
1507
  if (seedAnswer === "" || seedAnswer === "y" || seedAnswer === "yes") {
1428
1508
  let getFilePath3 = function(typeName, file) {
1429
1509
  const cfg = typeConfig2[typeName];
1430
- const typeDir = typeName === "command" ? join7(claudeDir, cfg.dir, "cbp") : join7(claudeDir, cfg.dir);
1510
+ const typeDir = typeName === "command" ? join8(claudeDir, cfg.dir, "cbp") : join8(claudeDir, cfg.dir);
1431
1511
  if (cfg.subfolder) {
1432
- return join7(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
1512
+ return join8(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
1433
1513
  }
1434
1514
  if (typeName === "command" && file.category) {
1435
- return join7(typeDir, file.category, `${file.name}${cfg.ext}`);
1515
+ return join8(typeDir, file.category, `${file.name}${cfg.ext}`);
1436
1516
  }
1437
1517
  if (typeName === "template") {
1438
- return join7(typeDir, file.name);
1518
+ return join8(typeDir, file.name);
1439
1519
  }
1440
- return join7(typeDir, `${file.name}${cfg.ext}`);
1520
+ return join8(typeDir, `${file.name}${cfg.ext}`);
1441
1521
  };
1442
1522
  var getFilePath2 = getFilePath3;
1443
1523
  console.log("\n Fetching default files...");
@@ -1452,7 +1532,7 @@ async function runInit() {
1452
1532
  printNextSteps(projectPath);
1453
1533
  return;
1454
1534
  }
1455
- const claudeDir = join7(projectPath, ".claude");
1535
+ const claudeDir = join8(projectPath, ".claude");
1456
1536
  let written = 0;
1457
1537
  const typeConfig2 = {
1458
1538
  command: { dir: "commands", ext: ".md" },
@@ -1475,7 +1555,7 @@ async function runInit() {
1475
1555
  for (const file of files) {
1476
1556
  const filePath = getFilePath3(typeName, file);
1477
1557
  await mkdir2(dirname2(filePath), { recursive: true });
1478
- await writeFile2(filePath, file.content, "utf-8");
1558
+ await writeFile3(filePath, file.content, "utf-8");
1479
1559
  if (typeName === "hook") await chmod2(filePath, 493);
1480
1560
  written++;
1481
1561
  }
@@ -1484,16 +1564,16 @@ async function runInit() {
1484
1564
  ...defaultsData.claude_md ?? []
1485
1565
  ];
1486
1566
  for (const file of specialFiles) {
1487
- const targetPath = join7(projectPath, "CLAUDE.md");
1567
+ const targetPath = join8(projectPath, "CLAUDE.md");
1488
1568
  await mkdir2(dirname2(targetPath), { recursive: true });
1489
- await writeFile2(targetPath, file.content, "utf-8");
1569
+ await writeFile3(targetPath, file.content, "utf-8");
1490
1570
  written++;
1491
1571
  }
1492
1572
  const settingsFiles = defaultsData.settings ?? [];
1493
1573
  for (const file of settingsFiles) {
1494
- const targetPath = join7(claudeDir, "settings.json");
1574
+ const targetPath = join8(claudeDir, "settings.json");
1495
1575
  await mkdir2(dirname2(targetPath), { recursive: true });
1496
- await writeFile2(targetPath, file.content, "utf-8");
1576
+ await writeFile3(targetPath, file.content, "utf-8");
1497
1577
  written++;
1498
1578
  }
1499
1579
  console.log(` Wrote ${written} files to .claude/
@@ -23439,11 +23519,14 @@ function registerWriteTools(server) {
23439
23519
  goal: external_exports.string().optional().describe("Checkpoint goal description"),
23440
23520
  deadline: external_exports.string().optional().describe("Deadline date (ISO format)"),
23441
23521
  status: external_exports.string().optional().describe("Initial status (default: pending). Use 'draft' for checkpoints not ready for development."),
23442
- launch_id: external_exports.string().uuid().optional().describe("Optional launch UUID to connect this checkpoint to")
23522
+ launch_id: external_exports.string().uuid().optional().describe("Optional launch UUID to connect this checkpoint to"),
23523
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints, qa_answers)"),
23524
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)"),
23525
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23443
23526
  }
23444
- }, async ({ repo_id, title, number: number3, goal, deadline, status, launch_id }) => {
23527
+ }, async ({ repo_id, title, number: number3, goal, deadline, status, launch_id, context, research, qa }) => {
23445
23528
  try {
23446
- const res = await apiPost("/checkpoints", {
23529
+ const body = {
23447
23530
  repo_id,
23448
23531
  title,
23449
23532
  number: number3,
@@ -23451,7 +23534,11 @@ function registerWriteTools(server) {
23451
23534
  deadline: deadline ?? null,
23452
23535
  status: status ?? "pending",
23453
23536
  launch_id: launch_id ?? null
23454
- });
23537
+ };
23538
+ if (context !== void 0) body.context = context;
23539
+ if (research !== void 0) body.research = research;
23540
+ if (qa !== void 0) body.qa = qa;
23541
+ const res = await apiPost("/checkpoints", body);
23455
23542
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23456
23543
  } catch (err) {
23457
23544
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23468,9 +23555,12 @@ function registerWriteTools(server) {
23468
23555
  completed_at: external_exports.string().optional().describe("Completion timestamp (ISO format)"),
23469
23556
  launch_id: external_exports.string().uuid().nullable().optional().describe("Launch UUID to connect (or null to disconnect)"),
23470
23557
  worktree_id: external_exports.string().uuid().nullable().optional().describe("Worktree UUID to assign (or null to unassign)"),
23471
- assigned_to: external_exports.string().nullable().optional().describe("Who/what claimed this checkpoint")
23558
+ assigned_to: external_exports.string().nullable().optional().describe("Who/what claimed this checkpoint"),
23559
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints, qa_answers)"),
23560
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)"),
23561
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23472
23562
  }
23473
- }, async ({ checkpoint_id, title, goal, status, deadline, completed_at, launch_id, worktree_id, assigned_to }) => {
23563
+ }, async ({ checkpoint_id, title, goal, status, deadline, completed_at, launch_id, worktree_id, assigned_to, context, research, qa }) => {
23474
23564
  const update = {};
23475
23565
  if (title !== void 0) update.title = title;
23476
23566
  if (goal !== void 0) update.goal = goal;
@@ -23480,6 +23570,9 @@ function registerWriteTools(server) {
23480
23570
  if (launch_id !== void 0) update.launch_id = launch_id;
23481
23571
  if (worktree_id !== void 0) update.worktree_id = worktree_id;
23482
23572
  if (assigned_to !== void 0) update.assigned_to = assigned_to;
23573
+ if (context !== void 0) update.context = context;
23574
+ if (research !== void 0) update.research = research;
23575
+ if (qa !== void 0) update.qa = qa;
23483
23576
  if (Object.keys(update).length === 0) {
23484
23577
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23485
23578
  }
@@ -23515,17 +23608,24 @@ function registerWriteTools(server) {
23515
23608
  title: external_exports.string().describe("Task title"),
23516
23609
  number: external_exports.number().int().describe("Task number (e.g. 1 for TASK-1)"),
23517
23610
  requirements: external_exports.string().optional().describe("Task requirements text"),
23518
- status: external_exports.string().optional().describe("Initial status (default: pending)")
23611
+ status: external_exports.string().optional().describe("Initial status (default: pending)"),
23612
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints)"),
23613
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)"),
23614
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)")
23519
23615
  }
23520
- }, async ({ checkpoint_id, title, number: number3, requirements, status }) => {
23616
+ }, async ({ checkpoint_id, title, number: number3, requirements, status, context, qa, research }) => {
23521
23617
  try {
23522
- const res = await apiPost("/tasks", {
23618
+ const body = {
23523
23619
  checkpoint_id,
23524
23620
  title,
23525
23621
  number: number3,
23526
23622
  requirements: requirements ?? null,
23527
23623
  status: status ?? "pending"
23528
- });
23624
+ };
23625
+ if (context !== void 0) body.context = context;
23626
+ if (qa !== void 0) body.qa = qa;
23627
+ if (research !== void 0) body.research = research;
23628
+ const res = await apiPost("/tasks", body);
23529
23629
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23530
23630
  } catch (err) {
23531
23631
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23541,17 +23641,25 @@ function registerWriteTools(server) {
23541
23641
  files_changed: external_exports.array(external_exports.object({
23542
23642
  path: external_exports.string().describe("File path relative to repo root"),
23543
23643
  action: external_exports.string().describe("File action (new, modified, deleted)"),
23544
- status: external_exports.string().describe("Approval status (approved, not_approved)")
23644
+ status: external_exports.string().describe("Approval status (approved, not_approved)"),
23645
+ claude_approved: external_exports.boolean().optional().describe("Whether Claude's automated checks passed for this file"),
23646
+ user_approved: external_exports.boolean().optional().describe("Whether the user has approved this file (via git add or web UI)")
23545
23647
  })).optional().describe("Files changed across all rounds"),
23546
- claim_worktree_id: external_exports.string().uuid().optional().describe("Worktree UUID to auto-claim the parent checkpoint when setting status to in_progress")
23648
+ claim_worktree_id: external_exports.string().uuid().optional().describe("Worktree UUID to auto-claim the parent checkpoint when setting status to in_progress"),
23649
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints)"),
23650
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)"),
23651
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)")
23547
23652
  }
23548
- }, async ({ task_id, title, requirements, status, files_changed, claim_worktree_id }) => {
23653
+ }, async ({ task_id, title, requirements, status, files_changed, claim_worktree_id, context, qa, research }) => {
23549
23654
  const update = {};
23550
23655
  if (title !== void 0) update.title = title;
23551
23656
  if (requirements !== void 0) update.requirements = requirements;
23552
23657
  if (status !== void 0) update.status = status;
23553
23658
  if (files_changed !== void 0) update.files_changed = files_changed;
23554
23659
  if (claim_worktree_id !== void 0) update.claim_worktree_id = claim_worktree_id;
23660
+ if (context !== void 0) update.context = context;
23661
+ if (qa !== void 0) update.qa = qa;
23662
+ if (research !== void 0) update.research = research;
23555
23663
  if (Object.keys(update).length === 0) {
23556
23664
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23557
23665
  }
@@ -23585,17 +23693,22 @@ function registerWriteTools(server) {
23585
23693
  number: external_exports.number().int().describe("Round number"),
23586
23694
  requirements: external_exports.string().optional().describe("Round requirements text"),
23587
23695
  status: external_exports.string().optional().describe("Initial status (default: pending)"),
23588
- started_at: external_exports.string().optional().describe("Start timestamp (ISO format)")
23696
+ started_at: external_exports.string().optional().describe("Start timestamp (ISO format)"),
23697
+ context: external_exports.any().optional().describe("Context JSONB"),
23698
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23589
23699
  }
23590
- }, async ({ task_id, number: number3, requirements, status, started_at }) => {
23700
+ }, async ({ task_id, number: number3, requirements, status, started_at, context, qa }) => {
23591
23701
  try {
23592
- const res = await apiPost("/rounds", {
23702
+ const body = {
23593
23703
  task_id,
23594
23704
  number: number3,
23595
23705
  requirements: requirements ?? null,
23596
23706
  status: status ?? "pending",
23597
23707
  started_at: started_at ?? null
23598
- });
23708
+ };
23709
+ if (context !== void 0) body.context = context;
23710
+ if (qa !== void 0) body.qa = qa;
23711
+ const res = await apiPost("/rounds", body);
23599
23712
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23600
23713
  } catch (err) {
23601
23714
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23613,10 +23726,14 @@ function registerWriteTools(server) {
23613
23726
  files_changed: external_exports.array(external_exports.object({
23614
23727
  path: external_exports.string().describe("File path relative to repo root"),
23615
23728
  action: external_exports.string().describe("File action (new, modified, deleted)"),
23616
- status: external_exports.string().describe("Approval status (approved, not_approved)")
23617
- })).optional().describe("Files changed in this round with approval status")
23618
- }
23619
- }, async ({ round_id, requirements, status, started_at, completed_at, duration_minutes, files_changed }) => {
23729
+ status: external_exports.string().describe("Approval status (approved, not_approved)"),
23730
+ claude_approved: external_exports.boolean().optional().describe("Whether Claude's automated checks passed for this file"),
23731
+ user_approved: external_exports.boolean().optional().describe("Whether the user has approved this file (via git add or web UI)")
23732
+ })).optional().describe("Files changed in this round with approval status"),
23733
+ context: external_exports.any().optional().describe("Context JSONB"),
23734
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23735
+ }
23736
+ }, async ({ round_id, requirements, status, started_at, completed_at, duration_minutes, files_changed, context, qa }) => {
23620
23737
  const update = {};
23621
23738
  if (requirements !== void 0) update.requirements = requirements;
23622
23739
  if (status !== void 0) update.status = status;
@@ -23624,6 +23741,8 @@ function registerWriteTools(server) {
23624
23741
  if (completed_at !== void 0) update.completed_at = completed_at;
23625
23742
  if (duration_minutes !== void 0) update.duration_minutes = duration_minutes;
23626
23743
  if (files_changed !== void 0) update.files_changed = files_changed;
23744
+ if (context !== void 0) update.context = context;
23745
+ if (qa !== void 0) update.qa = qa;
23627
23746
  if (Object.keys(update).length === 0) {
23628
23747
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23629
23748
  }
@@ -23783,7 +23902,7 @@ function registerWriteTools(server) {
23783
23902
  }
23784
23903
  });
23785
23904
  server.registerTool("sync_claude_files", {
23786
- description: "Sync .claude infrastructure from CodeByPlan DB to local project. Uses aggregated defaults (latest version across all repos) for shared files (commands, agents, skills, rules, hooks, templates). Repo-specific files (CLAUDE.md, settings) are not overwritten.",
23905
+ description: "Sync .claude infrastructure from CodeByPlan DB to local project. Uses aggregated defaults (latest version across all repos) for shared files (commands, agents, skills, rules, hooks, templates, stack docs). Repo-specific files (CLAUDE.md, settings) are not overwritten.",
23787
23906
  inputSchema: {
23788
23907
  repo_id: external_exports.string().uuid().describe("Repository ID to sync files for"),
23789
23908
  project_path: external_exports.string().describe("Absolute path to the project root directory")
@@ -24048,6 +24167,11 @@ if (!process.env.CODEBYPLAN_API_KEY) {
24048
24167
  } catch {
24049
24168
  }
24050
24169
  }
24170
+ if (process.env.CODEBYPLAN_API_KEY?.startsWith("CODEBYPLAN_API_KEY=")) {
24171
+ process.env.CODEBYPLAN_API_KEY = process.env.CODEBYPLAN_API_KEY.slice(
24172
+ "CODEBYPLAN_API_KEY=".length
24173
+ );
24174
+ }
24051
24175
  var arg = process.argv[2];
24052
24176
  if (arg === "--version" || arg === "-v") {
24053
24177
  console.log(VERSION);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@codebyplan/cli",
3
- "version": "2.0.1",
3
+ "version": "2.0.2",
4
4
  "description": "MCP server for CodeByPlan — AI-powered development planning and tracking",
5
5
  "type": "module",
6
6
  "bin": {