@codebyplan/cli 2.0.0 → 2.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli.js +450 -135
  2. package/package.json +1 -1
package/dist/cli.js CHANGED
@@ -37,7 +37,7 @@ var VERSION, PACKAGE_NAME;
37
37
  var init_version = __esm({
38
38
  "src/lib/version.ts"() {
39
39
  "use strict";
40
- VERSION = "2.0.0";
40
+ VERSION = "2.0.2";
41
41
  PACKAGE_NAME = "@codebyplan/cli";
42
42
  }
43
43
  });
@@ -49,7 +49,52 @@ __export(setup_exports, {
49
49
  });
50
50
  import { createInterface } from "node:readline/promises";
51
51
  import { stdin, stdout } from "node:process";
52
- import { execFile } from "node:child_process";
52
+ import { readFile, writeFile } from "node:fs/promises";
53
+ import { homedir } from "node:os";
54
+ import { join } from "node:path";
55
+ function getConfigPath(scope) {
56
+ return scope === "user" ? join(homedir(), ".claude.json") : join(process.cwd(), ".mcp.json");
57
+ }
58
+ async function readConfig(path) {
59
+ try {
60
+ const raw = await readFile(path, "utf-8");
61
+ const parsed = JSON.parse(raw);
62
+ if (typeof parsed === "object" && parsed !== null && !Array.isArray(parsed)) {
63
+ return parsed;
64
+ }
65
+ return {};
66
+ } catch {
67
+ return {};
68
+ }
69
+ }
70
+ function buildMcpEntry(apiKey) {
71
+ return {
72
+ command: "npx",
73
+ args: ["-y", PACKAGE_NAME],
74
+ env: { CODEBYPLAN_API_KEY: apiKey }
75
+ };
76
+ }
77
+ async function writeMcpConfig(scope, apiKey) {
78
+ const configPath = getConfigPath(scope);
79
+ const config2 = await readConfig(configPath);
80
+ if (typeof config2.mcpServers !== "object" || config2.mcpServers === null || Array.isArray(config2.mcpServers)) {
81
+ config2.mcpServers = {};
82
+ }
83
+ config2.mcpServers.codebyplan = buildMcpEntry(apiKey);
84
+ await writeFile(configPath, JSON.stringify(config2, null, 2) + "\n", "utf-8");
85
+ return configPath;
86
+ }
87
+ async function verifyMcpConfig(scope, apiKey) {
88
+ try {
89
+ const config2 = await readConfig(getConfigPath(scope));
90
+ const servers = config2.mcpServers;
91
+ if (!servers) return false;
92
+ const entry = servers.codebyplan;
93
+ return entry?.env?.CODEBYPLAN_API_KEY === apiKey;
94
+ } catch {
95
+ return false;
96
+ }
97
+ }
53
98
  async function runSetup() {
54
99
  const rl = createInterface({ input: stdin, output: stdout });
55
100
  console.log("\n CodeByPlan MCP Server Setup\n");
@@ -76,53 +121,38 @@ async function runSetup() {
76
121
  console.log(` Warning: API returned status ${res.status}, but continuing.
77
122
  `);
78
123
  } else {
79
- console.log(" API key is valid!\n");
80
- }
81
- const addCmd = `claude mcp add --scope user codebyplan -e CODEBYPLAN_API_KEY=${apiKey} -- npx -y ${PACKAGE_NAME}`;
82
- console.log(" Run this command to add the MCP server to Claude Code:\n");
83
- console.log(` ${addCmd}
84
- `);
85
- const autoAdd = (await rl.question(" Run it now? (Y/n): ")).trim().toLowerCase();
86
- if (autoAdd === "" || autoAdd === "y" || autoAdd === "yes") {
87
- console.log("\n Adding MCP server to Claude Code...\n");
88
- await new Promise((resolve2) => {
89
- execFile("claude", ["mcp", "remove", "--scope", "user", "codebyplan"], () => resolve2());
90
- });
91
- await new Promise((resolve2) => {
92
- execFile(
93
- "claude",
94
- [
95
- "mcp",
96
- "add",
97
- "--scope",
98
- "user",
99
- "codebyplan",
100
- "-e",
101
- `CODEBYPLAN_API_KEY=${apiKey}`,
102
- "--",
103
- "npx",
104
- "-y",
105
- PACKAGE_NAME
106
- ],
107
- (err, _stdout, stderr) => {
108
- if (err) {
109
- console.log(` Could not run 'claude mcp add' automatically.`);
110
- console.log(` Error: ${stderr || err.message}`);
111
- console.log(`
112
- Run it manually:
113
- ${addCmd}
124
+ try {
125
+ const body = await res.json();
126
+ if (Array.isArray(body.data) && body.data.length === 0) {
127
+ console.log(" API key is valid but no repositories found.");
128
+ console.log(" Create one at https://codebyplan.com after setup.\n");
129
+ } else {
130
+ console.log(" API key is valid!\n");
131
+ }
132
+ } catch {
133
+ console.log(" API key is valid!\n");
134
+ }
135
+ }
136
+ console.log(" Where should the MCP server be configured?\n");
137
+ console.log(" 1. Global \u2014 available in all projects (~/.claude.json)");
138
+ console.log(" 2. Project \u2014 only this project (.mcp.json)\n");
139
+ const scopeInput = (await rl.question(" Select (1/2, default: 1): ")).trim();
140
+ const scope = scopeInput === "2" ? "project" : "user";
141
+ console.log("\n Configuring MCP server...");
142
+ const configPath = await writeMcpConfig(scope, apiKey);
143
+ const verified = await verifyMcpConfig(scope, apiKey);
144
+ if (verified) {
145
+ console.log(` Done! Config written to ${configPath}
114
146
  `);
115
- resolve2();
116
- } else {
117
- console.log(" Done! CodeByPlan MCP server is now configured.\n");
118
- console.log(" Start a new Claude Code session to begin using it.\n");
119
- resolve2();
120
- }
121
- }
122
- );
123
- });
147
+ if (scope === "project") {
148
+ console.log(" Note: .mcp.json contains your API key \u2014 add it to .gitignore.\n");
149
+ }
150
+ console.log(" Start a new Claude Code session to begin using it.\n");
124
151
  } else {
125
- console.log("\n Run the command above when you're ready.\n");
152
+ console.log(" Warning: Could not verify the saved configuration.\n");
153
+ console.log(" You can configure manually by adding to your Claude config:\n");
154
+ console.log(` claude mcp add codebyplan -e CODEBYPLAN_API_KEY=${apiKey} -- npx -y ${PACKAGE_NAME}
155
+ `);
126
156
  }
127
157
  } finally {
128
158
  rl.close();
@@ -136,8 +166,8 @@ var init_setup = __esm({
136
166
  });
137
167
 
138
168
  // src/cli/config.ts
139
- import { readFile } from "node:fs/promises";
140
- import { join } from "node:path";
169
+ import { readFile as readFile2 } from "node:fs/promises";
170
+ import { join as join2 } from "node:path";
141
171
  function parseFlags(startIndex) {
142
172
  const flags = {};
143
173
  const args = process.argv.slice(startIndex);
@@ -159,8 +189,8 @@ async function resolveConfig(flags) {
159
189
  let worktreeId = flags["worktree-id"] ?? process.env.CODEBYPLAN_WORKTREE_ID;
160
190
  if (!repoId || !worktreeId) {
161
191
  try {
162
- const configPath = join(projectPath, ".codebyplan.json");
163
- const raw = await readFile(configPath, "utf-8");
192
+ const configPath = join2(projectPath, ".codebyplan.json");
193
+ const raw = await readFile2(configPath, "utf-8");
164
194
  const config2 = JSON.parse(raw);
165
195
  if (!repoId) repoId = config2.repo_id;
166
196
  if (!worktreeId) worktreeId = config2.worktree_id;
@@ -262,8 +292,14 @@ async function request(method, path, options) {
262
292
  let code;
263
293
  try {
264
294
  const body = await res.json();
265
- if (body.error) message = body.error;
266
- if (body.code) code = body.code;
295
+ if (body.error && typeof body.error === "object") {
296
+ const err = body.error;
297
+ if (err.message) message = err.message;
298
+ if (err.code) code = err.code;
299
+ } else if (typeof body.error === "string") {
300
+ message = body.error;
301
+ }
302
+ if (body.code && typeof body.code === "string") code = body.code;
267
303
  } catch {
268
304
  }
269
305
  const apiError = new ApiError(message, res.status, code);
@@ -366,8 +402,8 @@ var init_settings_merge = __esm({
366
402
  });
367
403
 
368
404
  // src/lib/hook-registry.ts
369
- import { readdir, readFile as readFile2 } from "node:fs/promises";
370
- import { join as join2 } from "node:path";
405
+ import { readdir, readFile as readFile3 } from "node:fs/promises";
406
+ import { join as join3 } from "node:path";
371
407
  function parseHookMeta(content) {
372
408
  const match = content.match(/^#\s*@hook:\s*(\S+)(?:\s+(.+))?$/m);
373
409
  if (!match) return null;
@@ -386,7 +422,7 @@ async function discoverHooks(hooksDir) {
386
422
  return discovered;
387
423
  }
388
424
  for (const filename of filenames) {
389
- const content = await readFile2(join2(hooksDir, filename), "utf-8");
425
+ const content = await readFile3(join3(hooksDir, filename), "utf-8");
390
426
  const meta = parseHookMeta(content);
391
427
  if (meta) {
392
428
  discovered.set(filename.replace(/\.sh$/, ""), meta);
@@ -472,38 +508,38 @@ var init_variables = __esm({
472
508
  });
473
509
 
474
510
  // src/lib/sync-engine.ts
475
- import { readdir as readdir2, readFile as readFile3, writeFile, unlink, mkdir, rmdir, chmod, stat } from "node:fs/promises";
476
- import { join as join3, dirname } from "node:path";
511
+ import { readdir as readdir2, readFile as readFile4, writeFile as writeFile2, unlink, mkdir, rmdir, chmod, stat } from "node:fs/promises";
512
+ import { join as join4, dirname } from "node:path";
477
513
  function getTypeDir(claudeDir, dir) {
478
- if (dir === "commands") return join3(claudeDir, dir, "cbp");
479
- return join3(claudeDir, dir);
514
+ if (dir === "commands") return join4(claudeDir, dir, "cbp");
515
+ return join4(claudeDir, dir);
480
516
  }
481
517
  function getFilePath(claudeDir, typeName, file) {
482
518
  const cfg = typeConfig[typeName];
483
519
  const typeDir = getTypeDir(claudeDir, cfg.dir);
484
520
  if (cfg.subfolder) {
485
- return join3(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
521
+ return join4(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
486
522
  }
487
523
  if (typeName === "command" && file.category) {
488
- return join3(typeDir, file.category, `${file.name}${cfg.ext}`);
524
+ return join4(typeDir, file.category, `${file.name}${cfg.ext}`);
489
525
  }
490
526
  if (typeName === "template") {
491
- return join3(typeDir, file.name);
527
+ return join4(typeDir, file.name);
492
528
  }
493
- return join3(typeDir, `${file.name}${cfg.ext}`);
529
+ return join4(typeDir, `${file.name}${cfg.ext}`);
494
530
  }
495
531
  async function readDirRecursive(dir, base = dir) {
496
532
  const result = /* @__PURE__ */ new Map();
497
533
  try {
498
534
  const entries = await readdir2(dir, { withFileTypes: true });
499
535
  for (const entry of entries) {
500
- const fullPath = join3(dir, entry.name);
536
+ const fullPath = join4(dir, entry.name);
501
537
  if (entry.isDirectory()) {
502
538
  const sub = await readDirRecursive(fullPath, base);
503
539
  for (const [k, v] of sub) result.set(k, v);
504
540
  } else {
505
541
  const relPath = fullPath.slice(base.length + 1);
506
- const fileContent = await readFile3(fullPath, "utf-8");
542
+ const fileContent = await readFile4(fullPath, "utf-8");
507
543
  result.set(relPath, fileContent);
508
544
  }
509
545
  }
@@ -513,7 +549,7 @@ async function readDirRecursive(dir, base = dir) {
513
549
  }
514
550
  async function isGitWorktree(projectPath) {
515
551
  try {
516
- const gitPath = join3(projectPath, ".git");
552
+ const gitPath = join4(projectPath, ".git");
517
553
  const info = await stat(gitPath);
518
554
  return info.isFile();
519
555
  } catch {
@@ -540,7 +576,7 @@ async function executeSyncToLocal(options) {
540
576
  const syncData = syncRes.data;
541
577
  const repoData = repoRes.data;
542
578
  syncData.claude_md = [];
543
- const claudeDir = join3(projectPath, ".claude");
579
+ const claudeDir = join4(projectPath, ".claude");
544
580
  const worktree = await isGitWorktree(projectPath);
545
581
  const byType = {};
546
582
  const totals = { created: 0, updated: 0, deleted: 0, unchanged: 0 };
@@ -565,19 +601,19 @@ async function executeSyncToLocal(options) {
565
601
  remotePathMap.set(relPath, { content: substituted, name: remote.name });
566
602
  }
567
603
  for (const [relPath, { content, name }] of remotePathMap) {
568
- const fullPath = join3(targetDir, relPath);
604
+ const fullPath = join4(targetDir, relPath);
569
605
  const localContent = localFiles.get(relPath);
570
606
  if (localContent === void 0) {
571
607
  if (!dryRun) {
572
608
  await mkdir(dirname(fullPath), { recursive: true });
573
- await writeFile(fullPath, content, "utf-8");
609
+ await writeFile2(fullPath, content, "utf-8");
574
610
  if (typeName === "hook") await chmod(fullPath, 493);
575
611
  }
576
612
  result.created.push(name);
577
613
  totals.created++;
578
614
  } else if (localContent !== content) {
579
615
  if (!dryRun) {
580
- await writeFile(fullPath, content, "utf-8");
616
+ await writeFile2(fullPath, content, "utf-8");
581
617
  if (typeName === "hook") await chmod(fullPath, 493);
582
618
  }
583
619
  result.updated.push(name);
@@ -589,7 +625,7 @@ async function executeSyncToLocal(options) {
589
625
  }
590
626
  for (const [relPath] of localFiles) {
591
627
  if (!remotePathMap.has(relPath)) {
592
- const fullPath = join3(targetDir, relPath);
628
+ const fullPath = join4(targetDir, relPath);
593
629
  if (!dryRun) {
594
630
  await unlink(fullPath);
595
631
  await removeEmptyParents(fullPath, targetDir);
@@ -601,9 +637,59 @@ async function executeSyncToLocal(options) {
601
637
  }
602
638
  byType[`${typeName}s`] = result;
603
639
  }
640
+ {
641
+ const typeName = "docs_stack";
642
+ const syncKey = "docs_stack";
643
+ const targetDir = join4(projectPath, "docs", "stack");
644
+ const remoteFiles = syncData[syncKey] ?? [];
645
+ const result = { created: [], updated: [], deleted: [], unchanged: [] };
646
+ if (remoteFiles.length > 0 && !dryRun) {
647
+ await mkdir(targetDir, { recursive: true });
648
+ }
649
+ const localFiles = await readDirRecursive(targetDir);
650
+ const remotePathMap = /* @__PURE__ */ new Map();
651
+ for (const remote of remoteFiles) {
652
+ const relPath = remote.category ? join4(remote.category, remote.name) : remote.name;
653
+ const substituted = substituteVariables(remote.content, repoData);
654
+ remotePathMap.set(relPath, { content: substituted, name: `${remote.category ?? ""}/${remote.name}` });
655
+ }
656
+ for (const [relPath, { content, name }] of remotePathMap) {
657
+ const fullPath = join4(targetDir, relPath);
658
+ const localContent = localFiles.get(relPath);
659
+ if (localContent === void 0) {
660
+ if (!dryRun) {
661
+ await mkdir(dirname(fullPath), { recursive: true });
662
+ await writeFile2(fullPath, content, "utf-8");
663
+ }
664
+ result.created.push(name);
665
+ totals.created++;
666
+ } else if (localContent !== content) {
667
+ if (!dryRun) {
668
+ await writeFile2(fullPath, content, "utf-8");
669
+ }
670
+ result.updated.push(name);
671
+ totals.updated++;
672
+ } else {
673
+ result.unchanged.push(name);
674
+ totals.unchanged++;
675
+ }
676
+ }
677
+ for (const [relPath] of localFiles) {
678
+ if (!remotePathMap.has(relPath)) {
679
+ const fullPath = join4(targetDir, relPath);
680
+ if (!dryRun) {
681
+ await unlink(fullPath);
682
+ await removeEmptyParents(fullPath, targetDir);
683
+ }
684
+ result.deleted.push(relPath);
685
+ totals.deleted++;
686
+ }
687
+ }
688
+ byType[typeName] = result;
689
+ }
604
690
  const specialTypes = {
605
- claude_md: () => join3(projectPath, "CLAUDE.md"),
606
- settings: () => join3(projectPath, ".claude", "settings.local.json")
691
+ claude_md: () => join4(projectPath, "CLAUDE.md"),
692
+ settings: () => join4(projectPath, ".claude", "settings.local.json")
607
693
  };
608
694
  for (const [typeName, getPath] of Object.entries(specialTypes)) {
609
695
  const remoteFiles = syncData[typeName] ?? [];
@@ -613,12 +699,12 @@ async function executeSyncToLocal(options) {
613
699
  const remoteContent = substituteVariables(remote.content, repoData);
614
700
  let localContent;
615
701
  try {
616
- localContent = await readFile3(targetPath, "utf-8");
702
+ localContent = await readFile4(targetPath, "utf-8");
617
703
  } catch {
618
704
  }
619
705
  if (typeName === "settings") {
620
706
  const templateSettings = JSON.parse(remoteContent);
621
- const hooksDir = join3(projectPath, ".claude", "hooks");
707
+ const hooksDir = join4(projectPath, ".claude", "hooks");
622
708
  const discovered = await discoverHooks(hooksDir);
623
709
  if (localContent === void 0) {
624
710
  if (discovered.size > 0) {
@@ -629,7 +715,7 @@ async function executeSyncToLocal(options) {
629
715
  }
630
716
  if (!dryRun) {
631
717
  await mkdir(dirname(targetPath), { recursive: true });
632
- await writeFile(targetPath, JSON.stringify(templateSettings, null, 2) + "\n", "utf-8");
718
+ await writeFile2(targetPath, JSON.stringify(templateSettings, null, 2) + "\n", "utf-8");
633
719
  }
634
720
  result.created.push(remote.name);
635
721
  totals.created++;
@@ -645,7 +731,7 @@ async function executeSyncToLocal(options) {
645
731
  const mergedContent = JSON.stringify(merged, null, 2) + "\n";
646
732
  if (localContent !== mergedContent) {
647
733
  if (!dryRun) {
648
- await writeFile(targetPath, mergedContent, "utf-8");
734
+ await writeFile2(targetPath, mergedContent, "utf-8");
649
735
  }
650
736
  result.updated.push(remote.name);
651
737
  totals.updated++;
@@ -658,13 +744,13 @@ async function executeSyncToLocal(options) {
658
744
  if (localContent === void 0) {
659
745
  if (!dryRun) {
660
746
  await mkdir(dirname(targetPath), { recursive: true });
661
- await writeFile(targetPath, remoteContent, "utf-8");
747
+ await writeFile2(targetPath, remoteContent, "utf-8");
662
748
  }
663
749
  result.created.push(remote.name);
664
750
  totals.created++;
665
751
  } else if (localContent !== remoteContent) {
666
752
  if (!dryRun) {
667
- await writeFile(targetPath, remoteContent, "utf-8");
753
+ await writeFile2(targetPath, remoteContent, "utf-8");
668
754
  }
669
755
  result.updated.push(remote.name);
670
756
  totals.updated++;
@@ -727,6 +813,145 @@ var init_confirm = __esm({
727
813
  }
728
814
  });
729
815
 
816
+ // src/lib/tech-detect.ts
817
+ import { readFile as readFile5, access } from "node:fs/promises";
818
+ import { join as join5 } from "node:path";
819
+ async function fileExists(filePath) {
820
+ try {
821
+ await access(filePath);
822
+ return true;
823
+ } catch {
824
+ return false;
825
+ }
826
+ }
827
+ async function detectTechStack(projectPath) {
828
+ const seen = /* @__PURE__ */ new Map();
829
+ try {
830
+ const raw = await readFile5(join5(projectPath, "package.json"), "utf-8");
831
+ const pkg = JSON.parse(raw);
832
+ const allDeps = {
833
+ ...pkg.dependencies,
834
+ ...pkg.devDependencies
835
+ };
836
+ for (const depName of Object.keys(allDeps)) {
837
+ const rule = PACKAGE_MAP[depName];
838
+ if (rule) {
839
+ const key = rule.name.toLowerCase();
840
+ if (!seen.has(key)) {
841
+ seen.set(key, { name: rule.name, category: rule.category });
842
+ }
843
+ }
844
+ }
845
+ } catch {
846
+ }
847
+ for (const { file, rule } of CONFIG_FILE_MAP) {
848
+ const key = rule.name.toLowerCase();
849
+ if (!seen.has(key) && await fileExists(join5(projectPath, file))) {
850
+ seen.set(key, { name: rule.name, category: rule.category });
851
+ }
852
+ }
853
+ return Array.from(seen.values()).sort((a, b) => {
854
+ const catCmp = a.category.localeCompare(b.category);
855
+ if (catCmp !== 0) return catCmp;
856
+ return a.name.localeCompare(b.name);
857
+ });
858
+ }
859
+ function mergeTechStack(remote, detected) {
860
+ const seen = /* @__PURE__ */ new Map();
861
+ for (const entry of remote) {
862
+ seen.set(entry.name.toLowerCase(), entry);
863
+ }
864
+ const added = [];
865
+ for (const entry of detected) {
866
+ const key = entry.name.toLowerCase();
867
+ if (!seen.has(key)) {
868
+ seen.set(key, entry);
869
+ added.push(entry);
870
+ }
871
+ }
872
+ const merged = Array.from(seen.values()).sort((a, b) => {
873
+ const catCmp = a.category.localeCompare(b.category);
874
+ if (catCmp !== 0) return catCmp;
875
+ return a.name.localeCompare(b.name);
876
+ });
877
+ return { merged, added };
878
+ }
879
+ function parseTechStack(raw) {
880
+ if (!Array.isArray(raw)) return [];
881
+ return raw.filter(
882
+ (item) => typeof item === "object" && item !== null && typeof item.name === "string" && typeof item.category === "string"
883
+ );
884
+ }
885
+ var PACKAGE_MAP, CONFIG_FILE_MAP;
886
+ var init_tech_detect = __esm({
887
+ "src/lib/tech-detect.ts"() {
888
+ "use strict";
889
+ PACKAGE_MAP = {
890
+ // Frameworks
891
+ next: { name: "Next.js", category: "framework" },
892
+ nuxt: { name: "Nuxt", category: "framework" },
893
+ gatsby: { name: "Gatsby", category: "framework" },
894
+ express: { name: "Express", category: "framework" },
895
+ fastify: { name: "Fastify", category: "framework" },
896
+ hono: { name: "Hono", category: "framework" },
897
+ "@remix-run/node": { name: "Remix", category: "framework" },
898
+ svelte: { name: "Svelte", category: "framework" },
899
+ astro: { name: "Astro", category: "framework" },
900
+ "@angular/core": { name: "Angular", category: "framework" },
901
+ // Libraries (UI)
902
+ react: { name: "React", category: "framework" },
903
+ vue: { name: "Vue", category: "framework" },
904
+ "solid-js": { name: "Solid", category: "framework" },
905
+ preact: { name: "Preact", category: "framework" },
906
+ // Languages (detected via devDeps)
907
+ typescript: { name: "TypeScript", category: "language" },
908
+ // Styling
909
+ tailwindcss: { name: "Tailwind CSS", category: "styling" },
910
+ sass: { name: "SCSS", category: "styling" },
911
+ "styled-components": { name: "styled-components", category: "styling" },
912
+ "@emotion/react": { name: "Emotion", category: "styling" },
913
+ // Database
914
+ prisma: { name: "Prisma", category: "database" },
915
+ "@prisma/client": { name: "Prisma", category: "database" },
916
+ "drizzle-orm": { name: "Drizzle", category: "database" },
917
+ "@supabase/supabase-js": { name: "Supabase", category: "database" },
918
+ mongoose: { name: "MongoDB", category: "database" },
919
+ typeorm: { name: "TypeORM", category: "database" },
920
+ knex: { name: "Knex", category: "database" },
921
+ // Testing
922
+ jest: { name: "Jest", category: "testing" },
923
+ vitest: { name: "Vitest", category: "testing" },
924
+ mocha: { name: "Mocha", category: "testing" },
925
+ playwright: { name: "Playwright", category: "testing" },
926
+ "@playwright/test": { name: "Playwright", category: "testing" },
927
+ cypress: { name: "Cypress", category: "testing" },
928
+ // Build tools
929
+ turbo: { name: "Turborepo", category: "build" },
930
+ vite: { name: "Vite", category: "build" },
931
+ webpack: { name: "Webpack", category: "build" },
932
+ esbuild: { name: "esbuild", category: "build" },
933
+ rollup: { name: "Rollup", category: "build" },
934
+ // Tools
935
+ eslint: { name: "ESLint", category: "tool" },
936
+ prettier: { name: "Prettier", category: "tool" },
937
+ "@biomejs/biome": { name: "Biome", category: "tool" }
938
+ };
939
+ CONFIG_FILE_MAP = [
940
+ { file: "tsconfig.json", rule: { name: "TypeScript", category: "language" } },
941
+ { file: "next.config.js", rule: { name: "Next.js", category: "framework" } },
942
+ { file: "next.config.mjs", rule: { name: "Next.js", category: "framework" } },
943
+ { file: "next.config.ts", rule: { name: "Next.js", category: "framework" } },
944
+ { file: "tailwind.config.js", rule: { name: "Tailwind CSS", category: "styling" } },
945
+ { file: "tailwind.config.ts", rule: { name: "Tailwind CSS", category: "styling" } },
946
+ { file: "turbo.json", rule: { name: "Turborepo", category: "build" } },
947
+ { file: "docker-compose.yml", rule: { name: "Docker", category: "deployment" } },
948
+ { file: "docker-compose.yaml", rule: { name: "Docker", category: "deployment" } },
949
+ { file: "Dockerfile", rule: { name: "Docker", category: "deployment" } },
950
+ { file: "vercel.json", rule: { name: "Vercel", category: "deployment" } }
951
+ ];
952
+ }
953
+ });
954
+
730
955
  // src/cli/pull.ts
731
956
  var pull_exports = {};
732
957
  __export(pull_exports, {
@@ -801,6 +1026,7 @@ async function runPull() {
801
1026
  if (dryRun) console.log(` Mode: dry-run (no changes will be made)`);
802
1027
  if (force) console.log(` Mode: force (no confirmation prompt)`);
803
1028
  console.log();
1029
+ const repoRes = await apiGet(`/repos/${repoId}`);
804
1030
  const result = await executePull({ repoId, projectPath, dryRun, force });
805
1031
  console.log();
806
1032
  if (result.created + result.updated + result.deleted === 0) {
@@ -811,8 +1037,29 @@ async function runPull() {
811
1037
  if (dryRun) {
812
1038
  console.log(" (dry-run \u2014 no changes were made)");
813
1039
  }
1040
+ printTechStack(repoRes.data.tech_stack);
814
1041
  await printSyncStatus(repoId);
815
1042
  }
1043
+ function printTechStack(techStackRaw) {
1044
+ try {
1045
+ const entries = parseTechStack(techStackRaw);
1046
+ if (entries.length === 0) return;
1047
+ const grouped = /* @__PURE__ */ new Map();
1048
+ for (const entry of entries) {
1049
+ const list = grouped.get(entry.category) ?? [];
1050
+ list.push(entry);
1051
+ grouped.set(entry.category, list);
1052
+ }
1053
+ console.log();
1054
+ console.log(" Tech stack:");
1055
+ for (const [category, items] of grouped) {
1056
+ const label = category.charAt(0).toUpperCase() + category.slice(1);
1057
+ const names = items.map((e) => e.name).join(", ");
1058
+ console.log(` ${label}: ${names}`);
1059
+ }
1060
+ } catch {
1061
+ }
1062
+ }
816
1063
  async function printSyncStatus(currentRepoId) {
817
1064
  try {
818
1065
  const statusRes = await apiGet("/sync/status");
@@ -842,6 +1089,7 @@ var init_pull = __esm({
842
1089
  init_api();
843
1090
  init_sync_engine();
844
1091
  init_confirm();
1092
+ init_tech_detect();
845
1093
  displayTypeMap = {
846
1094
  commands: "command",
847
1095
  agents: "agent",
@@ -854,19 +1102,19 @@ var init_pull = __esm({
854
1102
  });
855
1103
 
856
1104
  // src/cli/fileMapper.ts
857
- import { readdir as readdir3, readFile as readFile4 } from "node:fs/promises";
858
- import { join as join4, extname } from "node:path";
1105
+ import { readdir as readdir3, readFile as readFile6 } from "node:fs/promises";
1106
+ import { join as join6, extname } from "node:path";
859
1107
  function compositeKey(type, name, category) {
860
1108
  return category ? `${type}:${category}/${name}` : `${type}:${name}`;
861
1109
  }
862
1110
  async function scanLocalFiles(claudeDir) {
863
1111
  const result = /* @__PURE__ */ new Map();
864
- await scanCommands(join4(claudeDir, "commands", "cbp"), result);
865
- await scanSubfolderType(join4(claudeDir, "agents"), "agent", "AGENT.md", result);
866
- await scanSubfolderType(join4(claudeDir, "skills"), "skill", "SKILL.md", result);
867
- await scanFlatType(join4(claudeDir, "rules"), "rule", ".md", result);
868
- await scanFlatType(join4(claudeDir, "hooks"), "hook", ".sh", result);
869
- await scanTemplates(join4(claudeDir, "templates"), result);
1112
+ await scanCommands(join6(claudeDir, "commands", "cbp"), result);
1113
+ await scanSubfolderType(join6(claudeDir, "agents"), "agent", "AGENT.md", result);
1114
+ await scanSubfolderType(join6(claudeDir, "skills"), "skill", "SKILL.md", result);
1115
+ await scanFlatType(join6(claudeDir, "rules"), "rule", ".md", result);
1116
+ await scanFlatType(join6(claudeDir, "hooks"), "hook", ".sh", result);
1117
+ await scanTemplates(join6(claudeDir, "templates"), result);
870
1118
  return result;
871
1119
  }
872
1120
  async function scanCommands(dir, result) {
@@ -881,10 +1129,10 @@ async function scanCommandsRecursive(baseDir, currentDir, result) {
881
1129
  }
882
1130
  for (const entry of entries) {
883
1131
  if (entry.isDirectory()) {
884
- await scanCommandsRecursive(baseDir, join4(currentDir, entry.name), result);
1132
+ await scanCommandsRecursive(baseDir, join6(currentDir, entry.name), result);
885
1133
  } else if (entry.isFile() && entry.name.endsWith(".md")) {
886
1134
  const name = entry.name.slice(0, -3);
887
- const content = await readFile4(join4(currentDir, entry.name), "utf-8");
1135
+ const content = await readFile6(join6(currentDir, entry.name), "utf-8");
888
1136
  const relDir = currentDir.slice(baseDir.length + 1);
889
1137
  const category = relDir || null;
890
1138
  const key = compositeKey("command", name, category);
@@ -901,9 +1149,9 @@ async function scanSubfolderType(dir, type, fileName, result) {
901
1149
  }
902
1150
  for (const entry of entries) {
903
1151
  if (entry.isDirectory()) {
904
- const filePath = join4(dir, entry.name, fileName);
1152
+ const filePath = join6(dir, entry.name, fileName);
905
1153
  try {
906
- const content = await readFile4(filePath, "utf-8");
1154
+ const content = await readFile6(filePath, "utf-8");
907
1155
  const key = compositeKey(type, entry.name, null);
908
1156
  result.set(key, { type, name: entry.name, category: null, content });
909
1157
  } catch {
@@ -921,7 +1169,7 @@ async function scanFlatType(dir, type, ext, result) {
921
1169
  for (const entry of entries) {
922
1170
  if (entry.isFile() && entry.name.endsWith(ext)) {
923
1171
  const name = entry.name.slice(0, -ext.length);
924
- const content = await readFile4(join4(dir, entry.name), "utf-8");
1172
+ const content = await readFile6(join6(dir, entry.name), "utf-8");
925
1173
  const key = compositeKey(type, name, null);
926
1174
  result.set(key, { type, name, category: null, content });
927
1175
  }
@@ -936,7 +1184,7 @@ async function scanTemplates(dir, result) {
936
1184
  }
937
1185
  for (const entry of entries) {
938
1186
  if (entry.isFile() && extname(entry.name)) {
939
- const content = await readFile4(join4(dir, entry.name), "utf-8");
1187
+ const content = await readFile6(join6(dir, entry.name), "utf-8");
940
1188
  const key = compositeKey("template", entry.name, null);
941
1189
  result.set(key, { type: "template", name: entry.name, category: null, content });
942
1190
  }
@@ -1022,7 +1270,7 @@ __export(push_exports, {
1022
1270
  runPush: () => runPush
1023
1271
  });
1024
1272
  import { stat as stat2 } from "node:fs/promises";
1025
- import { join as join5 } from "node:path";
1273
+ import { join as join7 } from "node:path";
1026
1274
  async function runPush() {
1027
1275
  const flags = parseFlags(3);
1028
1276
  const dryRun = hasFlag("dry-run", 3);
@@ -1037,7 +1285,7 @@ async function runPush() {
1037
1285
  if (dryRun) console.log(` Mode: dry-run (no changes will be made)`);
1038
1286
  if (force) console.log(` Mode: force (no conflict prompts)`);
1039
1287
  console.log();
1040
- const claudeDir = join5(projectPath, ".claude");
1288
+ const claudeDir = join7(projectPath, ".claude");
1041
1289
  try {
1042
1290
  await stat2(claudeDir);
1043
1291
  } catch {
@@ -1108,7 +1356,11 @@ async function runPush() {
1108
1356
  }
1109
1357
  }
1110
1358
  if (toUpsert.length === 0 && toDelete.length === 0) {
1111
- await apiPut(`/repos/${repoId}`, { claude_sync_at: (/* @__PURE__ */ new Date()).toISOString() });
1359
+ if (!dryRun) {
1360
+ const repoUpdate2 = { claude_sync_at: (/* @__PURE__ */ new Date()).toISOString() };
1361
+ await pushTechStack(projectPath, repoRes.data.tech_stack, repoUpdate2);
1362
+ await apiPut(`/repos/${repoId}`, repoUpdate2);
1363
+ }
1112
1364
  console.log(" Everything is in sync. Nothing to push.\n");
1113
1365
  return;
1114
1366
  }
@@ -1134,10 +1386,28 @@ async function runPush() {
1134
1386
  })),
1135
1387
  delete_keys: toDelete
1136
1388
  });
1137
- await apiPut(`/repos/${repoId}`, { claude_sync_at: (/* @__PURE__ */ new Date()).toISOString() });
1389
+ const repoUpdate = { claude_sync_at: (/* @__PURE__ */ new Date()).toISOString() };
1390
+ await pushTechStack(projectPath, repoRes.data.tech_stack, repoUpdate);
1391
+ await apiPut(`/repos/${repoId}`, repoUpdate);
1138
1392
  console.log(` Done: ${result.data.upserted} upserted, ${result.data.deleted} deleted
1139
1393
  `);
1140
1394
  }
1395
+ async function pushTechStack(projectPath, remoteTechStack, repoUpdate) {
1396
+ try {
1397
+ const detected = await detectTechStack(projectPath);
1398
+ if (detected.length === 0) return;
1399
+ const remote = parseTechStack(remoteTechStack);
1400
+ const { merged, added } = mergeTechStack(remote, detected);
1401
+ console.log(` Tech stack: ${detected.length} detected${added.length > 0 ? ` (${added.length} new)` : ""}`);
1402
+ for (const entry of added) {
1403
+ console.log(` + ${entry.name} (${entry.category})`);
1404
+ }
1405
+ if (added.length > 0) {
1406
+ repoUpdate.tech_stack = merged;
1407
+ }
1408
+ } catch {
1409
+ }
1410
+ }
1141
1411
  function flattenSyncData(data) {
1142
1412
  const result = /* @__PURE__ */ new Map();
1143
1413
  const typeMap = {
@@ -1176,6 +1446,7 @@ var init_push = __esm({
1176
1446
  init_confirm();
1177
1447
  init_api();
1178
1448
  init_variables();
1449
+ init_tech_detect();
1179
1450
  }
1180
1451
  });
1181
1452
 
@@ -1186,8 +1457,8 @@ __export(init_exports, {
1186
1457
  });
1187
1458
  import { createInterface as createInterface4 } from "node:readline/promises";
1188
1459
  import { stdin as stdin4, stdout as stdout4 } from "node:process";
1189
- import { writeFile as writeFile2, mkdir as mkdir2, chmod as chmod2 } from "node:fs/promises";
1190
- import { join as join6, dirname as dirname2 } from "node:path";
1460
+ import { writeFile as writeFile3, mkdir as mkdir2, chmod as chmod2 } from "node:fs/promises";
1461
+ import { join as join8, dirname as dirname2 } from "node:path";
1191
1462
  async function runInit() {
1192
1463
  const flags = parseFlags(3);
1193
1464
  const projectPath = flags["path"] ?? process.cwd();
@@ -1226,27 +1497,27 @@ async function runInit() {
1226
1497
  if (match) worktreeId = match.id;
1227
1498
  } catch {
1228
1499
  }
1229
- const configPath = join6(projectPath, ".codebyplan.json");
1500
+ const configPath = join8(projectPath, ".codebyplan.json");
1230
1501
  const configData = { repo_id: repoId };
1231
1502
  if (worktreeId) configData.worktree_id = worktreeId;
1232
1503
  const configContent = JSON.stringify(configData, null, 2) + "\n";
1233
- await writeFile2(configPath, configContent, "utf-8");
1504
+ await writeFile3(configPath, configContent, "utf-8");
1234
1505
  console.log(` Created ${configPath}`);
1235
1506
  const seedAnswer = (await rl.question("\n Seed with CodeByPlan defaults? (Y/n): ")).trim().toLowerCase();
1236
1507
  if (seedAnswer === "" || seedAnswer === "y" || seedAnswer === "yes") {
1237
1508
  let getFilePath3 = function(typeName, file) {
1238
1509
  const cfg = typeConfig2[typeName];
1239
- const typeDir = typeName === "command" ? join6(claudeDir, cfg.dir, "cbp") : join6(claudeDir, cfg.dir);
1510
+ const typeDir = typeName === "command" ? join8(claudeDir, cfg.dir, "cbp") : join8(claudeDir, cfg.dir);
1240
1511
  if (cfg.subfolder) {
1241
- return join6(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
1512
+ return join8(typeDir, file.name, `${cfg.subfolder}${cfg.ext}`);
1242
1513
  }
1243
1514
  if (typeName === "command" && file.category) {
1244
- return join6(typeDir, file.category, `${file.name}${cfg.ext}`);
1515
+ return join8(typeDir, file.category, `${file.name}${cfg.ext}`);
1245
1516
  }
1246
1517
  if (typeName === "template") {
1247
- return join6(typeDir, file.name);
1518
+ return join8(typeDir, file.name);
1248
1519
  }
1249
- return join6(typeDir, `${file.name}${cfg.ext}`);
1520
+ return join8(typeDir, `${file.name}${cfg.ext}`);
1250
1521
  };
1251
1522
  var getFilePath2 = getFilePath3;
1252
1523
  console.log("\n Fetching default files...");
@@ -1261,7 +1532,7 @@ async function runInit() {
1261
1532
  printNextSteps(projectPath);
1262
1533
  return;
1263
1534
  }
1264
- const claudeDir = join6(projectPath, ".claude");
1535
+ const claudeDir = join8(projectPath, ".claude");
1265
1536
  let written = 0;
1266
1537
  const typeConfig2 = {
1267
1538
  command: { dir: "commands", ext: ".md" },
@@ -1284,7 +1555,7 @@ async function runInit() {
1284
1555
  for (const file of files) {
1285
1556
  const filePath = getFilePath3(typeName, file);
1286
1557
  await mkdir2(dirname2(filePath), { recursive: true });
1287
- await writeFile2(filePath, file.content, "utf-8");
1558
+ await writeFile3(filePath, file.content, "utf-8");
1288
1559
  if (typeName === "hook") await chmod2(filePath, 493);
1289
1560
  written++;
1290
1561
  }
@@ -1293,16 +1564,16 @@ async function runInit() {
1293
1564
  ...defaultsData.claude_md ?? []
1294
1565
  ];
1295
1566
  for (const file of specialFiles) {
1296
- const targetPath = join6(projectPath, "CLAUDE.md");
1567
+ const targetPath = join8(projectPath, "CLAUDE.md");
1297
1568
  await mkdir2(dirname2(targetPath), { recursive: true });
1298
- await writeFile2(targetPath, file.content, "utf-8");
1569
+ await writeFile3(targetPath, file.content, "utf-8");
1299
1570
  written++;
1300
1571
  }
1301
1572
  const settingsFiles = defaultsData.settings ?? [];
1302
1573
  for (const file of settingsFiles) {
1303
- const targetPath = join6(claudeDir, "settings.json");
1574
+ const targetPath = join8(claudeDir, "settings.json");
1304
1575
  await mkdir2(dirname2(targetPath), { recursive: true });
1305
- await writeFile2(targetPath, file.content, "utf-8");
1576
+ await writeFile3(targetPath, file.content, "utf-8");
1306
1577
  written++;
1307
1578
  }
1308
1579
  console.log(` Wrote ${written} files to .claude/
@@ -23248,11 +23519,14 @@ function registerWriteTools(server) {
23248
23519
  goal: external_exports.string().optional().describe("Checkpoint goal description"),
23249
23520
  deadline: external_exports.string().optional().describe("Deadline date (ISO format)"),
23250
23521
  status: external_exports.string().optional().describe("Initial status (default: pending). Use 'draft' for checkpoints not ready for development."),
23251
- launch_id: external_exports.string().uuid().optional().describe("Optional launch UUID to connect this checkpoint to")
23522
+ launch_id: external_exports.string().uuid().optional().describe("Optional launch UUID to connect this checkpoint to"),
23523
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints, qa_answers)"),
23524
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)"),
23525
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23252
23526
  }
23253
- }, async ({ repo_id, title, number: number3, goal, deadline, status, launch_id }) => {
23527
+ }, async ({ repo_id, title, number: number3, goal, deadline, status, launch_id, context, research, qa }) => {
23254
23528
  try {
23255
- const res = await apiPost("/checkpoints", {
23529
+ const body = {
23256
23530
  repo_id,
23257
23531
  title,
23258
23532
  number: number3,
@@ -23260,7 +23534,11 @@ function registerWriteTools(server) {
23260
23534
  deadline: deadline ?? null,
23261
23535
  status: status ?? "pending",
23262
23536
  launch_id: launch_id ?? null
23263
- });
23537
+ };
23538
+ if (context !== void 0) body.context = context;
23539
+ if (research !== void 0) body.research = research;
23540
+ if (qa !== void 0) body.qa = qa;
23541
+ const res = await apiPost("/checkpoints", body);
23264
23542
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23265
23543
  } catch (err) {
23266
23544
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23277,9 +23555,12 @@ function registerWriteTools(server) {
23277
23555
  completed_at: external_exports.string().optional().describe("Completion timestamp (ISO format)"),
23278
23556
  launch_id: external_exports.string().uuid().nullable().optional().describe("Launch UUID to connect (or null to disconnect)"),
23279
23557
  worktree_id: external_exports.string().uuid().nullable().optional().describe("Worktree UUID to assign (or null to unassign)"),
23280
- assigned_to: external_exports.string().nullable().optional().describe("Who/what claimed this checkpoint")
23558
+ assigned_to: external_exports.string().nullable().optional().describe("Who/what claimed this checkpoint"),
23559
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints, qa_answers)"),
23560
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)"),
23561
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23281
23562
  }
23282
- }, async ({ checkpoint_id, title, goal, status, deadline, completed_at, launch_id, worktree_id, assigned_to }) => {
23563
+ }, async ({ checkpoint_id, title, goal, status, deadline, completed_at, launch_id, worktree_id, assigned_to, context, research, qa }) => {
23283
23564
  const update = {};
23284
23565
  if (title !== void 0) update.title = title;
23285
23566
  if (goal !== void 0) update.goal = goal;
@@ -23289,6 +23570,9 @@ function registerWriteTools(server) {
23289
23570
  if (launch_id !== void 0) update.launch_id = launch_id;
23290
23571
  if (worktree_id !== void 0) update.worktree_id = worktree_id;
23291
23572
  if (assigned_to !== void 0) update.assigned_to = assigned_to;
23573
+ if (context !== void 0) update.context = context;
23574
+ if (research !== void 0) update.research = research;
23575
+ if (qa !== void 0) update.qa = qa;
23292
23576
  if (Object.keys(update).length === 0) {
23293
23577
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23294
23578
  }
@@ -23324,17 +23608,24 @@ function registerWriteTools(server) {
23324
23608
  title: external_exports.string().describe("Task title"),
23325
23609
  number: external_exports.number().int().describe("Task number (e.g. 1 for TASK-1)"),
23326
23610
  requirements: external_exports.string().optional().describe("Task requirements text"),
23327
- status: external_exports.string().optional().describe("Initial status (default: pending)")
23611
+ status: external_exports.string().optional().describe("Initial status (default: pending)"),
23612
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints)"),
23613
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)"),
23614
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)")
23328
23615
  }
23329
- }, async ({ checkpoint_id, title, number: number3, requirements, status }) => {
23616
+ }, async ({ checkpoint_id, title, number: number3, requirements, status, context, qa, research }) => {
23330
23617
  try {
23331
- const res = await apiPost("/tasks", {
23618
+ const body = {
23332
23619
  checkpoint_id,
23333
23620
  title,
23334
23621
  number: number3,
23335
23622
  requirements: requirements ?? null,
23336
23623
  status: status ?? "pending"
23337
- });
23624
+ };
23625
+ if (context !== void 0) body.context = context;
23626
+ if (qa !== void 0) body.qa = qa;
23627
+ if (research !== void 0) body.research = research;
23628
+ const res = await apiPost("/tasks", body);
23338
23629
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23339
23630
  } catch (err) {
23340
23631
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23350,17 +23641,25 @@ function registerWriteTools(server) {
23350
23641
  files_changed: external_exports.array(external_exports.object({
23351
23642
  path: external_exports.string().describe("File path relative to repo root"),
23352
23643
  action: external_exports.string().describe("File action (new, modified, deleted)"),
23353
- status: external_exports.string().describe("Approval status (approved, not_approved)")
23644
+ status: external_exports.string().describe("Approval status (approved, not_approved)"),
23645
+ claude_approved: external_exports.boolean().optional().describe("Whether Claude's automated checks passed for this file"),
23646
+ user_approved: external_exports.boolean().optional().describe("Whether the user has approved this file (via git add or web UI)")
23354
23647
  })).optional().describe("Files changed across all rounds"),
23355
- claim_worktree_id: external_exports.string().uuid().optional().describe("Worktree UUID to auto-claim the parent checkpoint when setting status to in_progress")
23648
+ claim_worktree_id: external_exports.string().uuid().optional().describe("Worktree UUID to auto-claim the parent checkpoint when setting status to in_progress"),
23649
+ context: external_exports.any().optional().describe("Context JSONB (decisions, discoveries, dependencies, constraints)"),
23650
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)"),
23651
+ research: external_exports.any().optional().describe("Research JSONB (topics with findings and sources)")
23356
23652
  }
23357
- }, async ({ task_id, title, requirements, status, files_changed, claim_worktree_id }) => {
23653
+ }, async ({ task_id, title, requirements, status, files_changed, claim_worktree_id, context, qa, research }) => {
23358
23654
  const update = {};
23359
23655
  if (title !== void 0) update.title = title;
23360
23656
  if (requirements !== void 0) update.requirements = requirements;
23361
23657
  if (status !== void 0) update.status = status;
23362
23658
  if (files_changed !== void 0) update.files_changed = files_changed;
23363
23659
  if (claim_worktree_id !== void 0) update.claim_worktree_id = claim_worktree_id;
23660
+ if (context !== void 0) update.context = context;
23661
+ if (qa !== void 0) update.qa = qa;
23662
+ if (research !== void 0) update.research = research;
23364
23663
  if (Object.keys(update).length === 0) {
23365
23664
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23366
23665
  }
@@ -23394,17 +23693,22 @@ function registerWriteTools(server) {
23394
23693
  number: external_exports.number().int().describe("Round number"),
23395
23694
  requirements: external_exports.string().optional().describe("Round requirements text"),
23396
23695
  status: external_exports.string().optional().describe("Initial status (default: pending)"),
23397
- started_at: external_exports.string().optional().describe("Start timestamp (ISO format)")
23696
+ started_at: external_exports.string().optional().describe("Start timestamp (ISO format)"),
23697
+ context: external_exports.any().optional().describe("Context JSONB"),
23698
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23398
23699
  }
23399
- }, async ({ task_id, number: number3, requirements, status, started_at }) => {
23700
+ }, async ({ task_id, number: number3, requirements, status, started_at, context, qa }) => {
23400
23701
  try {
23401
- const res = await apiPost("/rounds", {
23702
+ const body = {
23402
23703
  task_id,
23403
23704
  number: number3,
23404
23705
  requirements: requirements ?? null,
23405
23706
  status: status ?? "pending",
23406
23707
  started_at: started_at ?? null
23407
- });
23708
+ };
23709
+ if (context !== void 0) body.context = context;
23710
+ if (qa !== void 0) body.qa = qa;
23711
+ const res = await apiPost("/rounds", body);
23408
23712
  return { content: [{ type: "text", text: JSON.stringify(res.data, null, 2) }] };
23409
23713
  } catch (err) {
23410
23714
  return { content: [{ type: "text", text: `Error: ${err instanceof Error ? err.message : String(err)}` }], isError: true };
@@ -23422,10 +23726,14 @@ function registerWriteTools(server) {
23422
23726
  files_changed: external_exports.array(external_exports.object({
23423
23727
  path: external_exports.string().describe("File path relative to repo root"),
23424
23728
  action: external_exports.string().describe("File action (new, modified, deleted)"),
23425
- status: external_exports.string().describe("Approval status (approved, not_approved)")
23426
- })).optional().describe("Files changed in this round with approval status")
23427
- }
23428
- }, async ({ round_id, requirements, status, started_at, completed_at, duration_minutes, files_changed }) => {
23729
+ status: external_exports.string().describe("Approval status (approved, not_approved)"),
23730
+ claude_approved: external_exports.boolean().optional().describe("Whether Claude's automated checks passed for this file"),
23731
+ user_approved: external_exports.boolean().optional().describe("Whether the user has approved this file (via git add or web UI)")
23732
+ })).optional().describe("Files changed in this round with approval status"),
23733
+ context: external_exports.any().optional().describe("Context JSONB"),
23734
+ qa: external_exports.any().optional().describe("QA JSONB (checklist items with type, check, status)")
23735
+ }
23736
+ }, async ({ round_id, requirements, status, started_at, completed_at, duration_minutes, files_changed, context, qa }) => {
23429
23737
  const update = {};
23430
23738
  if (requirements !== void 0) update.requirements = requirements;
23431
23739
  if (status !== void 0) update.status = status;
@@ -23433,6 +23741,8 @@ function registerWriteTools(server) {
23433
23741
  if (completed_at !== void 0) update.completed_at = completed_at;
23434
23742
  if (duration_minutes !== void 0) update.duration_minutes = duration_minutes;
23435
23743
  if (files_changed !== void 0) update.files_changed = files_changed;
23744
+ if (context !== void 0) update.context = context;
23745
+ if (qa !== void 0) update.qa = qa;
23436
23746
  if (Object.keys(update).length === 0) {
23437
23747
  return { content: [{ type: "text", text: "Error: No fields to update" }], isError: true };
23438
23748
  }
@@ -23592,7 +23902,7 @@ function registerWriteTools(server) {
23592
23902
  }
23593
23903
  });
23594
23904
  server.registerTool("sync_claude_files", {
23595
- description: "Sync .claude infrastructure from CodeByPlan DB to local project. Uses aggregated defaults (latest version across all repos) for shared files (commands, agents, skills, rules, hooks, templates). Repo-specific files (CLAUDE.md, settings) are not overwritten.",
23905
+ description: "Sync .claude infrastructure from CodeByPlan DB to local project. Uses aggregated defaults (latest version across all repos) for shared files (commands, agents, skills, rules, hooks, templates, stack docs). Repo-specific files (CLAUDE.md, settings) are not overwritten.",
23596
23906
  inputSchema: {
23597
23907
  repo_id: external_exports.string().uuid().describe("Repository ID to sync files for"),
23598
23908
  project_path: external_exports.string().describe("Absolute path to the project root directory")
@@ -23857,6 +24167,11 @@ if (!process.env.CODEBYPLAN_API_KEY) {
23857
24167
  } catch {
23858
24168
  }
23859
24169
  }
24170
+ if (process.env.CODEBYPLAN_API_KEY?.startsWith("CODEBYPLAN_API_KEY=")) {
24171
+ process.env.CODEBYPLAN_API_KEY = process.env.CODEBYPLAN_API_KEY.slice(
24172
+ "CODEBYPLAN_API_KEY=".length
24173
+ );
24174
+ }
23860
24175
  var arg = process.argv[2];
23861
24176
  if (arg === "--version" || arg === "-v") {
23862
24177
  console.log(VERSION);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@codebyplan/cli",
3
- "version": "2.0.0",
3
+ "version": "2.0.2",
4
4
  "description": "MCP server for CodeByPlan — AI-powered development planning and tracking",
5
5
  "type": "module",
6
6
  "bin": {