@bensandee/tooling 0.14.1 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin.mjs CHANGED
@@ -2,17 +2,15 @@
2
2
  import { t as isExecSyncError } from "./exec-CC49vrkM.mjs";
3
3
  import { defineCommand, runMain } from "citty";
4
4
  import * as p from "@clack/prompts";
5
- import { execSync } from "node:child_process";
6
5
  import path from "node:path";
7
6
  import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from "node:fs";
8
7
  import JSON5 from "json5";
9
8
  import { parse } from "jsonc-parser";
10
9
  import { z } from "zod";
11
10
  import { isMap, isSeq, parseDocument } from "yaml";
11
+ import { execSync } from "node:child_process";
12
12
  import { FatalError, TransientError, UnexpectedError } from "@bensandee/common";
13
13
  //#region src/types.ts
14
- /** Default CI platform when not explicitly chosen. */
15
- const DEFAULT_CI = "forgejo";
16
14
  const LEGACY_TOOLS = [
17
15
  "eslint",
18
16
  "prettier",
@@ -141,10 +139,6 @@ function readPackageJson(targetDir) {
141
139
  return;
142
140
  }
143
141
  }
144
- /** Detect whether the project is a monorepo. */
145
- function detectMonorepo(targetDir) {
146
- return existsSync(path.join(targetDir, "pnpm-workspace.yaml"));
147
- }
148
142
  /** Detect project type from package.json signals. */
149
143
  function detectProjectType(targetDir) {
150
144
  const pkg = readPackageJson(targetDir);
@@ -185,6 +179,32 @@ function hasWebUIDeps(targetDir) {
185
179
  if (!pkg) return false;
186
180
  return packageHasWebUIDeps(pkg);
187
181
  }
182
+ /** Detect CI platform from existing workflow directories. */
183
+ function detectCiPlatform(targetDir) {
184
+ if (existsSync(path.join(targetDir, ".forgejo", "workflows"))) return "forgejo";
185
+ if (existsSync(path.join(targetDir, ".github", "workflows"))) return "github";
186
+ return "none";
187
+ }
188
+ /**
189
+ * Compute convention-based defaults for a project directory.
190
+ * These are the values the tool would use when .tooling.json says nothing.
191
+ */
192
+ function computeDefaults(targetDir) {
193
+ const detected = detectProject(targetDir);
194
+ const isMonorepo = detected.hasPnpmWorkspace;
195
+ const hasPrettier = detected.legacyConfigs.some((l) => l.tool === "prettier");
196
+ return {
197
+ structure: isMonorepo ? "monorepo" : "single",
198
+ useEslintPlugin: true,
199
+ formatter: hasPrettier ? "prettier" : "oxfmt",
200
+ setupVitest: !isMonorepo && !detected.hasVitestConfig,
201
+ ci: detectCiPlatform(targetDir),
202
+ setupRenovate: true,
203
+ releaseStrategy: isMonorepo ? "changesets" : "simple",
204
+ projectType: isMonorepo ? "default" : detectProjectType(targetDir),
205
+ detectPackageTypes: true
206
+ };
207
+ }
188
208
  /** List packages in a monorepo's packages/ directory. */
189
209
  function getMonorepoPackages(targetDir) {
190
210
  const packagesDir = path.join(targetDir, "packages");
@@ -210,175 +230,99 @@ function isCancelled(value) {
210
230
  return p.isCancel(value);
211
231
  }
212
232
  async function runInitPrompts(targetDir, saved) {
213
- p.intro("@bensandee/tooling repo:init");
233
+ p.intro("@bensandee/tooling repo:sync");
214
234
  const existingPkg = readPackageJson(targetDir);
215
235
  const detected = detectProject(targetDir);
236
+ const defaults = computeDefaults(targetDir);
216
237
  const isExisting = detected.hasPackageJson;
238
+ const isFirstInit = !saved;
217
239
  const name = existingPkg?.name ?? path.basename(targetDir);
218
- const detectedMonorepo = detectMonorepo(targetDir);
219
- const structure = await p.select({
220
- message: "Project structure",
221
- initialValue: saved?.structure ?? (detectedMonorepo ? "monorepo" : "single"),
222
- options: [{
223
- value: "single",
224
- label: "Single repo"
225
- }, {
226
- value: "monorepo",
227
- label: "Monorepo (pnpm workspaces)"
228
- }]
229
- });
230
- if (isCancelled(structure)) {
231
- p.cancel("Cancelled.");
232
- process.exit(0);
233
- }
234
- const useEslintPlugin = await p.confirm({
235
- message: "Include @bensandee/eslint-plugin?",
236
- initialValue: saved?.useEslintPlugin ?? true
237
- });
238
- if (isCancelled(useEslintPlugin)) {
239
- p.cancel("Cancelled.");
240
- process.exit(0);
241
- }
242
- const hasExistingPrettier = detected.legacyConfigs.some((l) => l.tool === "prettier");
243
- const formatter = await p.select({
244
- message: "Formatter",
245
- initialValue: saved?.formatter ?? (hasExistingPrettier ? "prettier" : "oxfmt"),
246
- options: [{
247
- value: "oxfmt",
248
- label: "oxfmt",
249
- hint: "fast, Rust-based"
250
- }, {
251
- value: "prettier",
252
- label: "Prettier"
253
- }]
254
- });
255
- if (isCancelled(formatter)) {
256
- p.cancel("Cancelled.");
257
- process.exit(0);
258
- }
259
- const setupVitest = await p.confirm({
260
- message: "Set up vitest with a starter test?",
261
- initialValue: saved?.setupVitest ?? !isExisting
262
- });
263
- if (isCancelled(setupVitest)) {
264
- p.cancel("Cancelled.");
265
- process.exit(0);
266
- }
267
- const ci = await p.select({
268
- message: "CI workflow",
269
- initialValue: saved?.ci,
270
- options: [
271
- {
272
- value: "forgejo",
273
- label: "Forgejo Actions"
274
- },
275
- {
276
- value: "github",
277
- label: "GitHub Actions"
278
- },
279
- {
280
- value: "none",
281
- label: "None"
282
- }
283
- ]
284
- });
285
- if (isCancelled(ci)) {
286
- p.cancel("Cancelled.");
287
- process.exit(0);
288
- }
289
- let setupRenovate = true;
290
- if (ci === "github") {
291
- const renovateAnswer = await p.confirm({
292
- message: "Set up Renovate for automated dependency updates?",
293
- initialValue: saved?.setupRenovate ?? true
240
+ const structure = saved?.structure ?? defaults.structure;
241
+ const useEslintPlugin = saved?.useEslintPlugin ?? defaults.useEslintPlugin;
242
+ let formatter = saved?.formatter ?? defaults.formatter;
243
+ const setupVitest = saved?.setupVitest ?? defaults.setupVitest;
244
+ let ci = saved?.ci ?? defaults.ci;
245
+ const setupRenovate = saved?.setupRenovate ?? defaults.setupRenovate;
246
+ let releaseStrategy = saved?.releaseStrategy ?? defaults.releaseStrategy;
247
+ const projectType = saved?.projectType ?? defaults.projectType;
248
+ const detectPackageTypes = saved?.detectPackageTypes ?? defaults.detectPackageTypes;
249
+ if (detected.legacyConfigs.some((l) => l.tool === "prettier") && isFirstInit) {
250
+ const formatterAnswer = await p.select({
251
+ message: "Existing Prettier config found. Keep Prettier or migrate to oxfmt?",
252
+ initialValue: "prettier",
253
+ options: [{
254
+ value: "prettier",
255
+ label: "Keep Prettier"
256
+ }, {
257
+ value: "oxfmt",
258
+ label: "Migrate to oxfmt",
259
+ hint: "fast, Rust-based"
260
+ }]
294
261
  });
295
- if (isCancelled(renovateAnswer)) {
262
+ if (isCancelled(formatterAnswer)) {
296
263
  p.cancel("Cancelled.");
297
264
  process.exit(0);
298
265
  }
299
- setupRenovate = renovateAnswer;
266
+ formatter = formatterAnswer;
300
267
  }
301
- const releaseStrategy = await p.select({
302
- message: "Release management",
303
- initialValue: saved?.releaseStrategy ?? "none",
304
- options: [
305
- {
306
- value: "none",
307
- label: "None"
308
- },
309
- {
310
- value: "release-it",
311
- label: "release-it",
312
- hint: "interactive, conventional commits"
313
- },
314
- {
315
- value: "changesets",
316
- label: "Changesets",
317
- hint: "PR-based versioning"
318
- },
319
- {
320
- value: "simple",
321
- label: "Simple",
322
- hint: "uses commit-and-tag-version internally"
323
- }
324
- ]
325
- });
326
- if (isCancelled(releaseStrategy)) {
327
- p.cancel("Cancelled.");
328
- process.exit(0);
329
- }
330
- let projectType = "default";
331
- let detectPackageTypes = false;
332
- if (structure === "monorepo") {
333
- const packages = getMonorepoPackages(targetDir);
334
- if (packages.length > 0) {
335
- const detections = packages.map((pkg) => {
336
- const type = detectProjectType(pkg.dir);
337
- return ` ${pkg.name} → ${type}`;
338
- });
339
- p.note(detections.join("\n"), "Detected package types");
340
- const applyDetected = await p.confirm({
341
- message: "Apply detected tsconfig bases to packages?",
342
- initialValue: saved?.detectPackageTypes ?? true
343
- });
344
- if (isCancelled(applyDetected)) {
345
- p.cancel("Cancelled.");
346
- process.exit(0);
347
- }
348
- detectPackageTypes = applyDetected;
268
+ const detectedCi = detectCiPlatform(targetDir);
269
+ if (isFirstInit && detectedCi === "none") {
270
+ const ciAnswer = await p.select({
271
+ message: "CI workflow",
272
+ initialValue: "forgejo",
273
+ options: [
274
+ {
275
+ value: "forgejo",
276
+ label: "Forgejo Actions"
277
+ },
278
+ {
279
+ value: "github",
280
+ label: "GitHub Actions"
281
+ },
282
+ {
283
+ value: "none",
284
+ label: "None"
285
+ }
286
+ ]
287
+ });
288
+ if (isCancelled(ciAnswer)) {
289
+ p.cancel("Cancelled.");
290
+ process.exit(0);
349
291
  }
350
- } else {
351
- const projectTypeAnswer = await p.select({
352
- message: "Project type",
353
- initialValue: saved?.projectType ?? "default",
292
+ ci = ciAnswer;
293
+ }
294
+ const hasExistingRelease = detected.hasReleaseItConfig || detected.hasSimpleReleaseConfig || detected.hasChangesetsConfig;
295
+ if (isFirstInit && !hasExistingRelease) {
296
+ const releaseAnswer = await p.select({
297
+ message: "Release management",
298
+ initialValue: defaults.releaseStrategy,
354
299
  options: [
355
300
  {
356
- value: "default",
357
- label: "Default",
358
- hint: "strictest base, no runtime assumptions"
301
+ value: "none",
302
+ label: "None"
359
303
  },
360
304
  {
361
- value: "node",
362
- label: "Node.js",
363
- hint: "adds types: [\"node\"]"
305
+ value: "release-it",
306
+ label: "release-it",
307
+ hint: "interactive, conventional commits"
364
308
  },
365
309
  {
366
- value: "react",
367
- label: "React",
368
- hint: "browser app with JSX + DOM types"
310
+ value: "changesets",
311
+ label: "Changesets",
312
+ hint: "PR-based versioning"
369
313
  },
370
314
  {
371
- value: "library",
372
- label: "Library",
373
- hint: "publishable package (ES2022 target)"
315
+ value: "simple",
316
+ label: "Simple",
317
+ hint: "uses commit-and-tag-version internally"
374
318
  }
375
319
  ]
376
320
  });
377
- if (isCancelled(projectTypeAnswer)) {
321
+ if (isCancelled(releaseAnswer)) {
378
322
  p.cancel("Cancelled.");
379
323
  process.exit(0);
380
324
  }
381
- projectType = projectTypeAnswer;
325
+ releaseStrategy = releaseAnswer;
382
326
  }
383
327
  p.outro("Configuration complete!");
384
328
  return {
@@ -400,18 +344,13 @@ async function runInitPrompts(targetDir, saved) {
400
344
  function buildDefaultConfig(targetDir, flags) {
401
345
  const existingPkg = readPackageJson(targetDir);
402
346
  const detected = detectProject(targetDir);
347
+ const defaults = computeDefaults(targetDir);
403
348
  return {
404
349
  name: existingPkg?.name ?? path.basename(targetDir),
405
350
  isNew: !detected.hasPackageJson,
406
- structure: detected.hasPnpmWorkspace ? "monorepo" : "single",
407
- useEslintPlugin: flags.eslintPlugin ?? true,
408
- formatter: detected.legacyConfigs.some((l) => l.tool === "prettier") ? "prettier" : "oxfmt",
409
- setupVitest: !detected.hasVitestConfig,
410
- ci: flags.noCi ? "none" : DEFAULT_CI,
411
- setupRenovate: true,
412
- releaseStrategy: detected.hasReleaseItConfig ? "release-it" : detected.hasSimpleReleaseConfig ? "simple" : detected.hasChangesetsConfig ? "changesets" : "none",
413
- projectType: "default",
414
- detectPackageTypes: true,
351
+ ...defaults,
352
+ ...flags.eslintPlugin !== void 0 && { useEslintPlugin: flags.eslintPlugin },
353
+ ...flags.noCi && { ci: "none" },
415
354
  targetDir
416
355
  };
417
356
  }
@@ -510,6 +449,109 @@ function createDryRunContext(config) {
510
449
  };
511
450
  }
512
451
  //#endregion
452
+ //#region src/utils/tooling-config.ts
453
+ const CONFIG_FILE = ".tooling.json";
454
+ const ToolingConfigSchema = z.object({
455
+ structure: z.enum(["single", "monorepo"]).optional(),
456
+ useEslintPlugin: z.boolean().optional(),
457
+ formatter: z.enum(["oxfmt", "prettier"]).optional(),
458
+ setupVitest: z.boolean().optional(),
459
+ ci: z.enum([
460
+ "github",
461
+ "forgejo",
462
+ "none"
463
+ ]).optional(),
464
+ setupRenovate: z.boolean().optional(),
465
+ releaseStrategy: z.enum([
466
+ "release-it",
467
+ "simple",
468
+ "changesets",
469
+ "none"
470
+ ]).optional(),
471
+ projectType: z.enum([
472
+ "default",
473
+ "node",
474
+ "react",
475
+ "library"
476
+ ]).optional(),
477
+ detectPackageTypes: z.boolean().optional(),
478
+ setupDocker: z.boolean().optional(),
479
+ docker: z.record(z.string(), z.object({
480
+ dockerfile: z.string(),
481
+ context: z.string().default(".")
482
+ })).optional()
483
+ });
484
+ /** Load saved tooling config from the target directory. Returns undefined if missing or invalid. */
485
+ function loadToolingConfig(targetDir) {
486
+ const fullPath = path.join(targetDir, CONFIG_FILE);
487
+ if (!existsSync(fullPath)) return void 0;
488
+ try {
489
+ const raw = readFileSync(fullPath, "utf-8");
490
+ const result = ToolingConfigSchema.safeParse(JSON.parse(raw));
491
+ return result.success ? result.data : void 0;
492
+ } catch {
493
+ return;
494
+ }
495
+ }
496
+ /** Config fields that can be overridden in .tooling.json. */
497
+ const OVERRIDE_KEYS = [
498
+ "structure",
499
+ "useEslintPlugin",
500
+ "formatter",
501
+ "setupVitest",
502
+ "ci",
503
+ "setupRenovate",
504
+ "releaseStrategy",
505
+ "projectType",
506
+ "detectPackageTypes"
507
+ ];
508
+ /** Keys that have no effect for monorepos (generators ignore them). */
509
+ const MONOREPO_IGNORED_KEYS = new Set(["setupVitest", "projectType"]);
510
+ /**
511
+ * Save only the fields that differ from detected defaults to .tooling.json.
512
+ * A fully conventional project produces `{}` (or a minimal set of overrides).
513
+ * Keys that have no effect for the current structure are omitted.
514
+ */
515
+ function saveToolingConfig(ctx, config) {
516
+ const defaults = computeDefaults(config.targetDir);
517
+ const isMonorepo = config.structure === "monorepo";
518
+ const overrides = {};
519
+ for (const key of OVERRIDE_KEYS) {
520
+ if (isMonorepo && MONOREPO_IGNORED_KEYS.has(key)) continue;
521
+ if (config[key] !== defaults[key]) overrides[key] = config[key];
522
+ }
523
+ const content = JSON.stringify(overrides, null, 2) + "\n";
524
+ const existing = ctx.exists(CONFIG_FILE) ? ctx.read(CONFIG_FILE) : void 0;
525
+ if (existing !== void 0 && contentEqual(CONFIG_FILE, existing, content)) return {
526
+ filePath: CONFIG_FILE,
527
+ action: "skipped",
528
+ description: "Already up to date"
529
+ };
530
+ ctx.write(CONFIG_FILE, content);
531
+ return {
532
+ filePath: CONFIG_FILE,
533
+ action: existing ? "updated" : "created",
534
+ description: "Saved tooling configuration"
535
+ };
536
+ }
537
+ /** Merge saved config over detected defaults. Saved values win when present. */
538
+ function mergeWithSavedConfig(detected, saved) {
539
+ return {
540
+ name: detected.name,
541
+ isNew: detected.isNew,
542
+ targetDir: detected.targetDir,
543
+ structure: saved.structure ?? detected.structure,
544
+ useEslintPlugin: saved.useEslintPlugin ?? detected.useEslintPlugin,
545
+ formatter: saved.formatter ?? detected.formatter,
546
+ setupVitest: saved.setupVitest ?? detected.setupVitest,
547
+ ci: saved.ci ?? detected.ci,
548
+ setupRenovate: saved.setupRenovate ?? detected.setupRenovate,
549
+ releaseStrategy: saved.releaseStrategy ?? detected.releaseStrategy,
550
+ projectType: saved.projectType ?? detected.projectType,
551
+ detectPackageTypes: saved.detectPackageTypes ?? detected.detectPackageTypes
552
+ };
553
+ }
554
+ //#endregion
513
555
  //#region src/generators/package-json.ts
514
556
  const STANDARD_SCRIPTS_SINGLE = {
515
557
  build: "tsdown",
@@ -520,8 +562,8 @@ const STANDARD_SCRIPTS_SINGLE = {
520
562
  knip: "knip",
521
563
  check: "pnpm exec tooling checks:run",
522
564
  "ci:check": "pnpm check",
523
- "tooling:check": "pnpm exec tooling repo:check",
524
- "tooling:update": "pnpm exec tooling repo:update"
565
+ "tooling:check": "pnpm exec tooling repo:sync --check",
566
+ "tooling:sync": "pnpm exec tooling repo:sync"
525
567
  };
526
568
  const STANDARD_SCRIPTS_MONOREPO = {
527
569
  build: "pnpm -r build",
@@ -531,16 +573,18 @@ const STANDARD_SCRIPTS_MONOREPO = {
531
573
  knip: "knip",
532
574
  check: "pnpm exec tooling checks:run",
533
575
  "ci:check": "pnpm check",
534
- "tooling:check": "pnpm exec tooling repo:check",
535
- "tooling:update": "pnpm exec tooling repo:update"
576
+ "tooling:check": "pnpm exec tooling repo:sync --check",
577
+ "tooling:sync": "pnpm exec tooling repo:sync"
536
578
  };
537
579
  /** Scripts that tooling owns — map from script name to keyword that must appear in the value. */
538
580
  const MANAGED_SCRIPTS = {
539
581
  check: "checks:run",
540
582
  "ci:check": "pnpm check",
541
- "tooling:check": "repo:check",
542
- "tooling:update": "repo:update"
583
+ "tooling:check": "repo:sync --check",
584
+ "tooling:sync": "repo:sync"
543
585
  };
586
+ /** Deprecated scripts to remove during migration. */
587
+ const DEPRECATED_SCRIPTS = ["tooling:init", "tooling:update"];
544
588
  /** DevDeps that belong in every project (single repo) or per-package (monorepo). */
545
589
  const PER_PACKAGE_DEV_DEPS = {
546
590
  "@types/node": "25.3.2",
@@ -596,7 +640,7 @@ function getAddedDevDepNames(config) {
596
640
  const deps = { ...ROOT_DEV_DEPS };
597
641
  if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
598
642
  deps["@bensandee/config"] = "0.8.1";
599
- deps["@bensandee/tooling"] = "0.14.1";
643
+ deps["@bensandee/tooling"] = "0.16.0";
600
644
  if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
601
645
  if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
602
646
  addReleaseDeps(deps, config);
@@ -617,7 +661,7 @@ async function generatePackageJson(ctx) {
617
661
  const devDeps = { ...ROOT_DEV_DEPS };
618
662
  if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
619
663
  devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.1";
620
- devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.14.1";
664
+ devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.16.0";
621
665
  if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
622
666
  if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
623
667
  if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
@@ -642,6 +686,10 @@ async function generatePackageJson(ctx) {
642
686
  existingScripts[key] = value;
643
687
  changes.push(`updated script: ${key}`);
644
688
  }
689
+ for (const key of DEPRECATED_SCRIPTS) if (key in existingScripts) {
690
+ delete existingScripts[key];
691
+ changes.push(`removed deprecated script: ${key}`);
692
+ }
645
693
  pkg.scripts = existingScripts;
646
694
  const existingDevDeps = pkg.devDependencies ?? {};
647
695
  for (const [key, value] of Object.entries(devDeps)) if (!(key in existingDevDeps)) {
@@ -686,244 +734,75 @@ async function generatePackageJson(ctx) {
686
734
  };
687
735
  }
688
736
  //#endregion
689
- //#region src/generators/migrate-prompt.ts
690
- /**
691
- * Generate a context-aware AI migration prompt based on what the CLI did.
692
- * This prompt can be pasted into Claude Code (or similar) to finish the migration.
693
- */
694
- function generateMigratePrompt(results, config, detected) {
695
- const sections = [];
696
- sections.push("# Migration Prompt");
697
- sections.push("");
698
- sections.push("The following prompt was generated by `@bensandee/tooling repo:init`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
699
- sections.push("");
700
- sections.push("> **Tip:** Before starting, run `/init` in Claude Code to generate a `CLAUDE.md` that gives the AI a complete picture of your repository's structure, conventions, and build commands.");
701
- sections.push("");
702
- sections.push("## What was changed");
703
- sections.push("");
704
- const created = results.filter((r) => r.action === "created");
705
- const updated = results.filter((r) => r.action === "updated");
706
- const skipped = results.filter((r) => r.action === "skipped");
707
- const archived = results.filter((r) => r.action === "archived");
708
- if (created.length > 0) {
709
- sections.push("**Created:**");
710
- for (const r of created) sections.push(`- \`${r.filePath}\` — ${r.description}`);
711
- sections.push("");
712
- }
713
- if (updated.length > 0) {
714
- sections.push("**Updated:**");
715
- for (const r of updated) sections.push(`- \`${r.filePath}\` — ${r.description}`);
716
- sections.push("");
737
+ //#region src/generators/tsconfig.ts
738
+ async function generateTsconfig(ctx) {
739
+ const filePath = "tsconfig.json";
740
+ const existing = ctx.read(filePath);
741
+ if (ctx.config.structure === "monorepo") return [generateMonorepoRootTsconfig(ctx), ...ctx.config.detectPackageTypes ? generateMonorepoPackageTsconfigs(ctx) : []];
742
+ const extendsValue = `@bensandee/config/tsconfig/${ctx.config.projectType}`;
743
+ if (!existing) {
744
+ const config = {
745
+ extends: extendsValue,
746
+ ...ctx.exists("src") ? { include: ["src"] } : {}
747
+ };
748
+ ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
749
+ return [{
750
+ filePath,
751
+ action: "created",
752
+ description: `Generated tsconfig.json with ${extendsValue}`
753
+ }];
717
754
  }
718
- if (archived.length > 0) {
719
- sections.push("**Archived:**");
720
- for (const r of archived) sections.push(`- \`${r.filePath}\` — ${r.description}`);
721
- sections.push("");
755
+ if (existing.includes("// @bensandee/tooling:ignore")) return [{
756
+ filePath,
757
+ action: "skipped",
758
+ description: "Ignored via tooling:ignore comment"
759
+ }];
760
+ const parsed = parseTsconfig(existing);
761
+ if (isSolutionStyle(parsed)) {
762
+ const results = [{
763
+ filePath,
764
+ action: "skipped",
765
+ description: "Solution-style tsconfig — traversing references"
766
+ }];
767
+ for (const ref of parsed.references ?? []) {
768
+ const refPath = resolveReferencePath(ref.path);
769
+ results.push(mergeSingleTsconfig(ctx, refPath, extendsValue));
770
+ }
771
+ return results;
722
772
  }
723
- if (skipped.length > 0) {
724
- sections.push("**Skipped (review these):**");
725
- for (const r of skipped) sections.push(`- \`${r.filePath}\` — ${r.description}`);
726
- sections.push("");
773
+ return [mergeSingleTsconfig(ctx, filePath, extendsValue)];
774
+ }
775
+ function isSolutionStyle(parsed) {
776
+ return Array.isArray(parsed.references) && parsed.references.length > 0 && Array.isArray(parsed.files) && parsed.files.length === 0;
777
+ }
778
+ function resolveReferencePath(refPath) {
779
+ const resolved = refPath.endsWith(".json") ? refPath : path.join(refPath, "tsconfig.json");
780
+ return path.normalize(resolved);
781
+ }
782
+ function mergeSingleTsconfig(ctx, filePath, extendsValue) {
783
+ const existing = ctx.read(filePath);
784
+ if (!existing) return {
785
+ filePath,
786
+ action: "skipped",
787
+ description: "File not found"
788
+ };
789
+ if (existing.includes("// @bensandee/tooling:ignore")) return {
790
+ filePath,
791
+ action: "skipped",
792
+ description: "Ignored via tooling:ignore comment"
793
+ };
794
+ const parsed = parseTsconfig(existing);
795
+ const changes = [];
796
+ if (!parsed.extends) {
797
+ parsed.extends = extendsValue;
798
+ changes.push(`added extends: ${extendsValue}`);
727
799
  }
728
- sections.push("## Migration tasks");
729
- sections.push("");
730
- const legacyToRemove = detected.legacyConfigs.filter((legacy) => !(legacy.tool === "prettier" && config.formatter === "prettier"));
731
- if (legacyToRemove.length > 0) {
732
- sections.push("### Remove legacy tooling");
733
- sections.push("");
734
- for (const legacy of legacyToRemove) {
735
- const replacement = {
736
- eslint: "oxlint",
737
- prettier: "oxfmt",
738
- jest: "vitest",
739
- webpack: "tsdown",
740
- rollup: "tsdown"
741
- }[legacy.tool];
742
- sections.push(`- Remove ${legacy.tool} config files (${legacy.files.map((f) => `\`${f}\``).join(", ")}). This project now uses **${replacement}**.`);
743
- sections.push(` - Uninstall ${legacy.tool}-related packages from devDependencies`);
744
- if (legacy.tool === "eslint") sections.push(" - Migrate any custom ESLint rules that don't have oxlint equivalents");
745
- if (legacy.tool === "jest") sections.push(" - Migrate any jest-specific test utilities (jest.mock, jest.fn) to vitest equivalents (vi.mock, vi.fn)");
746
- }
747
- sections.push("");
748
- }
749
- if (archived.length > 0) {
750
- sections.push("### Review archived files");
751
- sections.push("");
752
- sections.push("The following files were modified or replaced. The originals have been saved to `.tooling-archived/`:");
753
- sections.push("");
754
- for (const r of archived) sections.push(`- \`${r.filePath}\` → \`.tooling-archived/${r.filePath}\``);
755
- sections.push("");
756
- sections.push("For each archived file, **diff the old version against the new one** and look for features, categories, or modules that were enabled in the original but are missing from the replacement. Focus on broad capability gaps rather than individual rule strictness (in general, being stricter is fine). Examples of what to look for:");
757
- sections.push("");
758
- sections.push("- **Lint configs**: enabled plugin categories (e.g. `jsx-a11y`, `import`, `react`, `nextjs`), custom `plugins` or `overrides`, file-scoped rule blocks");
759
- sections.push("- **TypeScript configs**: compiler features like `jsx`, `paths`, `baseUrl`, or `references` that affect build behavior");
760
- sections.push("- **Other configs**: feature flags, custom presets, or integrations that go beyond the default template");
761
- sections.push("");
762
- sections.push("If the old config had capabilities the new one lacks, port them into the new file. Then:");
763
- sections.push("");
764
- sections.push("1. If the project previously used `husky` and `lint-staged`, remove them from `devDependencies`");
765
- sections.push("2. Delete the `.tooling-archived/` directory when migration is complete");
766
- sections.push("");
767
- }
768
- const oxlintWasSkipped = results.find((r) => r.filePath === "oxlint.config.ts")?.action === "skipped";
769
- if (detected.hasLegacyOxlintJson) {
770
- sections.push("### Migrate .oxlintrc.json to oxlint.config.ts");
771
- sections.push("");
772
- sections.push("A new `oxlint.config.ts` has been generated using `defineConfig` from the `oxlint` package. The existing `.oxlintrc.json` needs to be migrated:");
773
- sections.push("");
774
- sections.push("1. Read `.oxlintrc.json` and compare its `rules` against the rules provided by `@bensandee/config/oxlint/recommended` (check `node_modules/@bensandee/config`). Most standard rules are already included in the recommended config.");
775
- sections.push("2. If there are any custom rules, overrides, settings, or `jsPlugins` not covered by the recommended config, add them to `oxlint.config.ts` alongside the `extends`.");
776
- sections.push("3. Delete `.oxlintrc.json`.");
777
- sections.push("4. Run `pnpm lint` to verify the new config works correctly.");
778
- sections.push("");
779
- } else if (oxlintWasSkipped && detected.hasOxlintConfig) {
780
- sections.push("### Verify oxlint.config.ts includes recommended rules");
781
- sections.push("");
782
- sections.push("The existing `oxlint.config.ts` was kept as-is. Verify that it extends the recommended config from `@bensandee/config/oxlint`:");
783
- sections.push("");
784
- sections.push("1. Open `oxlint.config.ts` and check that it imports and extends `@bensandee/config/oxlint/recommended`.");
785
- sections.push("2. The expected pattern is:");
786
- sections.push(" ```ts");
787
- sections.push(" import recommended from \"@bensandee/config/oxlint/recommended\";");
788
- sections.push(" import { defineConfig } from \"oxlint\";");
789
- sections.push("");
790
- sections.push(" export default defineConfig({ extends: [recommended] });");
791
- sections.push(" ```");
792
- sections.push("3. If it uses a different pattern, update it to extend the recommended config while preserving any project-specific customizations.");
793
- sections.push("4. Run `pnpm lint` to verify the config works correctly.");
794
- sections.push("");
795
- }
796
- if (config.structure === "monorepo" && !detected.hasPnpmWorkspace) {
797
- sections.push("### Migrate to monorepo structure");
798
- sections.push("");
799
- sections.push("This project was converted from a single repo to a monorepo. Complete the migration:");
800
- sections.push("");
801
- sections.push("1. Move existing source into `packages/<name>/` (using the existing package name)");
802
- sections.push("2. Split the root `package.json` into a root workspace manifest + package-level `package.json`");
803
- sections.push("3. Move the existing `tsconfig.json` into the package and update the root tsconfig with project references");
804
- sections.push("4. Create a package-level `tsdown.config.ts` in the new package");
805
- sections.push("5. Update any import paths or build scripts affected by the move");
806
- sections.push("");
807
- }
808
- const skippedConfigs = skipped.filter((r) => r.filePath !== "ci" && r.description !== "Not a monorepo");
809
- if (skippedConfigs.length > 0) {
810
- sections.push("### Review skipped files");
811
- sections.push("");
812
- sections.push("The following files were left unchanged. Review them for compatibility:");
813
- sections.push("");
814
- for (const r of skippedConfigs) sections.push(`- \`${r.filePath}\` — ${r.description}`);
815
- sections.push("");
816
- }
817
- if (results.some((r) => r.filePath === "test/example.test.ts" && r.action === "created")) {
818
- sections.push("### Generate tests");
819
- sections.push("");
820
- sections.push("A starter test was created at `test/example.test.ts`. Now:");
821
- sections.push("");
822
- sections.push("1. Review the existing source code in `src/`");
823
- sections.push("2. Create additional test files following the starter test's patterns (import style, describe/it structure)");
824
- sections.push("3. Focus on edge cases and core business logic");
825
- sections.push("4. Aim for meaningful coverage of exported functions and key code paths");
826
- sections.push("");
827
- }
828
- sections.push("## Ground rules");
829
- sections.push("");
830
- sections.push("It is OK to add new packages (e.g. `zod`, `@bensandee/common`) if they are needed to resolve errors.");
831
- sections.push("");
832
- sections.push("When resolving errors from the checklist below, prefer fixing the root cause over suppressing the issue. For example:");
833
- sections.push("");
834
- sections.push("- **Lint errors**: fix the code rather than adding disable comments or rule exceptions");
835
- sections.push("- **Test failures**: update the test or fix the underlying bug rather than skipping or deleting the test");
836
- sections.push("- **Knip findings**: remove genuinely unused code/exports/dependencies rather than adding ignores to `knip.config.ts`");
837
- sections.push("- **Type errors**: add proper types rather than using `any` or `@ts-expect-error`");
838
- sections.push("");
839
- sections.push("Only suppress an issue if there is a clear, documented reason why the fix is not feasible (e.g. a third-party type mismatch). Leave a comment explaining why.");
840
- sections.push("");
841
- sections.push("## Verification checklist");
842
- sections.push("");
843
- sections.push("Run each of these commands and fix any errors before moving on:");
844
- sections.push("");
845
- sections.push("1. `pnpm install`");
846
- const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
847
- sections.push(`2. \`${updateCmd}\` — bump added dependencies to their latest versions`);
848
- sections.push("3. `pnpm typecheck` — fix any type errors");
849
- sections.push("4. `pnpm build` — fix any build errors");
850
- sections.push("5. `pnpm test` — fix any test failures");
851
- sections.push("6. `pnpm lint` — fix the code to satisfy lint rules");
852
- sections.push("7. `pnpm knip` — remove unused exports, dependencies, and dead code");
853
- sections.push("8. `pnpm format` — fix any formatting issues");
854
- sections.push("");
855
- return sections.join("\n");
856
- }
857
- //#endregion
858
- //#region src/generators/tsconfig.ts
859
- async function generateTsconfig(ctx) {
860
- const filePath = "tsconfig.json";
861
- const existing = ctx.read(filePath);
862
- if (ctx.config.structure === "monorepo") return [generateMonorepoRootTsconfig(ctx), ...ctx.config.detectPackageTypes ? generateMonorepoPackageTsconfigs(ctx) : []];
863
- const extendsValue = `@bensandee/config/tsconfig/${ctx.config.projectType}`;
864
- if (!existing) {
865
- const config = {
866
- extends: extendsValue,
867
- ...ctx.exists("src") ? { include: ["src"] } : {}
868
- };
869
- ctx.write(filePath, JSON.stringify(config, null, 2) + "\n");
870
- return [{
871
- filePath,
872
- action: "created",
873
- description: `Generated tsconfig.json with ${extendsValue}`
874
- }];
875
- }
876
- if (existing.includes("// @bensandee/tooling:ignore")) return [{
877
- filePath,
878
- action: "skipped",
879
- description: "Ignored via tooling:ignore comment"
880
- }];
881
- const parsed = parseTsconfig(existing);
882
- if (isSolutionStyle(parsed)) {
883
- const results = [{
884
- filePath,
885
- action: "skipped",
886
- description: "Solution-style tsconfig — traversing references"
887
- }];
888
- for (const ref of parsed.references ?? []) {
889
- const refPath = resolveReferencePath(ref.path);
890
- results.push(mergeSingleTsconfig(ctx, refPath, extendsValue));
891
- }
892
- return results;
893
- }
894
- return [mergeSingleTsconfig(ctx, filePath, extendsValue)];
895
- }
896
- function isSolutionStyle(parsed) {
897
- return Array.isArray(parsed.references) && parsed.references.length > 0 && Array.isArray(parsed.files) && parsed.files.length === 0;
898
- }
899
- function resolveReferencePath(refPath) {
900
- const resolved = refPath.endsWith(".json") ? refPath : path.join(refPath, "tsconfig.json");
901
- return path.normalize(resolved);
902
- }
903
- function mergeSingleTsconfig(ctx, filePath, extendsValue) {
904
- const existing = ctx.read(filePath);
905
- if (!existing) return {
906
- filePath,
907
- action: "skipped",
908
- description: "File not found"
909
- };
910
- if (existing.includes("// @bensandee/tooling:ignore")) return {
911
- filePath,
912
- action: "skipped",
913
- description: "Ignored via tooling:ignore comment"
914
- };
915
- const parsed = parseTsconfig(existing);
916
- const changes = [];
917
- if (!parsed.extends) {
918
- parsed.extends = extendsValue;
919
- changes.push(`added extends: ${extendsValue}`);
920
- }
921
- if (!parsed.include) {
922
- const tsconfigDir = path.dirname(filePath);
923
- const srcDir = tsconfigDir === "." ? "src" : path.join(tsconfigDir, "src");
924
- if (ctx.exists(srcDir)) {
925
- parsed.include = ["src"];
926
- changes.push("added include: [\"src\"]");
800
+ if (!parsed.include) {
801
+ const tsconfigDir = path.dirname(filePath);
802
+ const srcDir = tsconfigDir === "." ? "src" : path.join(tsconfigDir, "src");
803
+ if (ctx.exists(srcDir)) {
804
+ parsed.include = ["src"];
805
+ changes.push("added include: [\"src\"]");
927
806
  }
928
807
  }
929
808
  if (changes.length === 0) return {
@@ -1208,7 +1087,7 @@ async function generateTsdown(ctx) {
1208
1087
  }
1209
1088
  //#endregion
1210
1089
  //#region src/generators/gitignore.ts
1211
- /** Entries that every project should have — repo:check flags these as missing. */
1090
+ /** Entries that every project should have — repo:sync --check flags these as missing. */
1212
1091
  const REQUIRED_ENTRIES = [
1213
1092
  "node_modules/",
1214
1093
  ".pnpm-store/",
@@ -1218,7 +1097,7 @@ const REQUIRED_ENTRIES = [
1218
1097
  ".env.*",
1219
1098
  "!.env.example"
1220
1099
  ];
1221
- /** Tooling-specific entries added during init/update but not required for repo:check. */
1100
+ /** Tooling-specific entries added during init/update but not required for repo:sync --check. */
1222
1101
  const OPTIONAL_ENTRIES = [".tooling-migrate.md", ".tooling-archived/"];
1223
1102
  const ALL_ENTRIES = [...REQUIRED_ENTRIES, ...OPTIONAL_ENTRIES];
1224
1103
  /** Normalize a gitignore entry for comparison: strip leading `/` and trailing `/`. */
@@ -1366,9 +1245,42 @@ function mergeWorkflowSteps(existing, jobName, requiredSteps) {
1366
1245
  };
1367
1246
  }
1368
1247
  }
1248
+ /**
1249
+ * Add a job to an existing workflow YAML if it doesn't already exist.
1250
+ * Returns unchanged content if the job already exists, the file has an opt-out comment,
1251
+ * or the document can't be parsed.
1252
+ */
1253
+ function addWorkflowJob(existing, jobName, jobConfig) {
1254
+ if (isToolingIgnored(existing)) return {
1255
+ content: existing,
1256
+ changed: false
1257
+ };
1258
+ try {
1259
+ const doc = parseDocument(existing);
1260
+ const jobs = doc.getIn(["jobs"]);
1261
+ if (!isMap(jobs)) return {
1262
+ content: existing,
1263
+ changed: false
1264
+ };
1265
+ if (jobs.has(jobName)) return {
1266
+ content: existing,
1267
+ changed: false
1268
+ };
1269
+ jobs.set(jobName, doc.createNode(jobConfig));
1270
+ return {
1271
+ content: doc.toString(),
1272
+ changed: true
1273
+ };
1274
+ } catch {
1275
+ return {
1276
+ content: existing,
1277
+ changed: false
1278
+ };
1279
+ }
1280
+ }
1369
1281
  //#endregion
1370
1282
  //#region src/generators/ci.ts
1371
- function hasEnginesNode$1(ctx) {
1283
+ function hasEnginesNode$2(ctx) {
1372
1284
  const raw = ctx.read("package.json");
1373
1285
  if (!raw) return false;
1374
1286
  return typeof parsePackageJson(raw)?.engines?.["node"] === "string";
@@ -1380,7 +1292,6 @@ ${emailNotifications}on:
1380
1292
  push:
1381
1293
  branches: [main]
1382
1294
  pull_request:
1383
- branches: [main]
1384
1295
 
1385
1296
  jobs:
1386
1297
  check:
@@ -1437,7 +1348,7 @@ async function generateCi(ctx) {
1437
1348
  description: "CI workflow not requested"
1438
1349
  };
1439
1350
  const isGitHub = ctx.config.ci === "github";
1440
- const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1351
+ const nodeVersionYaml = hasEnginesNode$2(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1441
1352
  const filePath = isGitHub ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
1442
1353
  const content = ciWorkflow(nodeVersionYaml, !isGitHub);
1443
1354
  if (ctx.exists(filePath)) {
@@ -1913,7 +1824,11 @@ async function generateChangesets(ctx) {
1913
1824
  }
1914
1825
  //#endregion
1915
1826
  //#region src/generators/release-ci.ts
1916
- function hasEnginesNode(ctx) {
1827
+ /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
1828
+ function actionsExpr$1(expr) {
1829
+ return `\${{ ${expr} }}`;
1830
+ }
1831
+ function hasEnginesNode$1(ctx) {
1917
1832
  return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
1918
1833
  }
1919
1834
  function commonSteps(nodeVersionYaml) {
@@ -1984,60 +1899,85 @@ jobs:
1984
1899
  ${commonSteps(nodeVersionYaml)}${gitConfigStep}${releaseStep}
1985
1900
  `;
1986
1901
  }
1987
- function changesetsWorkflow(ci, nodeVersionYaml) {
1988
- if (ci === "github") return `${workflowSchemaComment(ci)}name: Release
1989
- on:
1990
- push:
1991
- branches:
1992
- - main
1993
-
1994
- permissions:
1995
- contents: write
1996
- pull-requests: write
1997
-
1998
- jobs:
1999
- release:
2000
- runs-on: ubuntu-latest
2001
- steps:
2002
- ${commonSteps(nodeVersionYaml)}
2003
- - uses: changesets/action@v1
2004
- with:
2005
- publish: pnpm changeset publish
2006
- version: pnpm changeset version
2007
- env:
2008
- GITHUB_TOKEN: \${{ github.token }}
2009
- NPM_TOKEN: \${{ secrets.NPM_TOKEN }}
2010
- `;
2011
- return `${workflowSchemaComment(ci)}name: Release
2012
- on:
2013
- push:
2014
- branches:
2015
- - main
2016
-
2017
- jobs:
2018
- release:
2019
- runs-on: ubuntu-latest
2020
- steps:
2021
- ${commonSteps(nodeVersionYaml)}
2022
- - name: Configure git
2023
- run: |
2024
- git config user.name "forgejo-actions[bot]"
2025
- git config user.email "forgejo-actions[bot]@noreply.localhost"
2026
- - name: Release
2027
- env:
2028
- FORGEJO_SERVER_URL: \${{ github.server_url }}
2029
- FORGEJO_REPOSITORY: \${{ github.repository }}
2030
- FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
2031
- NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
2032
- run: pnpm exec tooling release:changesets
2033
- `;
2034
- }
2035
- function requiredReleaseSteps(strategy, nodeVersionYaml) {
2036
- const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
2037
- const steps = [
2038
- {
2039
- match: { uses: "actions/checkout" },
2040
- step: {
1902
+ function changesetsReleaseJobConfig(ci, nodeVersionYaml) {
1903
+ const isGitHub = ci === "github";
1904
+ const nodeWith = {
1905
+ ...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
1906
+ cache: "pnpm",
1907
+ "registry-url": "https://registry.npmjs.org"
1908
+ };
1909
+ if (isGitHub) return {
1910
+ needs: "check",
1911
+ if: "github.ref == 'refs/heads/main'",
1912
+ "runs-on": "ubuntu-latest",
1913
+ permissions: {
1914
+ contents: "write",
1915
+ "pull-requests": "write"
1916
+ },
1917
+ steps: [
1918
+ {
1919
+ uses: "actions/checkout@v4",
1920
+ with: { "fetch-depth": 0 }
1921
+ },
1922
+ { uses: "pnpm/action-setup@v4" },
1923
+ {
1924
+ uses: "actions/setup-node@v4",
1925
+ with: nodeWith
1926
+ },
1927
+ { run: "pnpm install --frozen-lockfile" },
1928
+ { run: "pnpm build" },
1929
+ {
1930
+ uses: "changesets/action@v1",
1931
+ with: {
1932
+ publish: "pnpm changeset publish",
1933
+ version: "pnpm changeset version"
1934
+ },
1935
+ env: {
1936
+ GITHUB_TOKEN: actionsExpr$1("github.token"),
1937
+ NPM_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
1938
+ }
1939
+ }
1940
+ ]
1941
+ };
1942
+ return {
1943
+ needs: "check",
1944
+ if: "github.ref == 'refs/heads/main'",
1945
+ "runs-on": "ubuntu-latest",
1946
+ steps: [
1947
+ {
1948
+ uses: "actions/checkout@v4",
1949
+ with: { "fetch-depth": 0 }
1950
+ },
1951
+ { uses: "pnpm/action-setup@v4" },
1952
+ {
1953
+ uses: "actions/setup-node@v4",
1954
+ with: nodeWith
1955
+ },
1956
+ { run: "pnpm install --frozen-lockfile" },
1957
+ { run: "pnpm build" },
1958
+ {
1959
+ name: "Configure git",
1960
+ run: "git config user.name \"forgejo-actions[bot]\"\ngit config user.email \"forgejo-actions[bot]@noreply.localhost\"\n"
1961
+ },
1962
+ {
1963
+ name: "Release",
1964
+ env: {
1965
+ FORGEJO_SERVER_URL: actionsExpr$1("github.server_url"),
1966
+ FORGEJO_REPOSITORY: actionsExpr$1("github.repository"),
1967
+ FORGEJO_TOKEN: actionsExpr$1("secrets.FORGEJO_TOKEN"),
1968
+ NODE_AUTH_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
1969
+ },
1970
+ run: "pnpm exec tooling release:changesets"
1971
+ }
1972
+ ]
1973
+ };
1974
+ }
1975
+ function requiredReleaseSteps(strategy, nodeVersionYaml) {
1976
+ const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
1977
+ const steps = [
1978
+ {
1979
+ match: { uses: "actions/checkout" },
1980
+ step: {
2041
1981
  uses: "actions/checkout@v4",
2042
1982
  with: { "fetch-depth": 0 }
2043
1983
  }
@@ -2092,10 +2032,42 @@ function buildWorkflow(strategy, ci, nodeVersionYaml) {
2092
2032
  switch (strategy) {
2093
2033
  case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
2094
2034
  case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
2095
- case "changesets": return changesetsWorkflow(ci, nodeVersionYaml);
2096
2035
  default: return null;
2097
2036
  }
2098
2037
  }
2038
+ function generateChangesetsReleaseCi(ctx) {
2039
+ const checkPath = ctx.config.ci === "github" ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
2040
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2041
+ const existing = ctx.read(checkPath);
2042
+ if (!existing) return {
2043
+ filePath: checkPath,
2044
+ action: "skipped",
2045
+ description: "CI workflow not found — run check generator first"
2046
+ };
2047
+ const addResult = addWorkflowJob(existing, "release", changesetsReleaseJobConfig(ctx.config.ci, nodeVersionYaml));
2048
+ if (addResult.changed) {
2049
+ const withComment = ensureSchemaComment(addResult.content, ctx.config.ci);
2050
+ ctx.write(checkPath, withComment);
2051
+ return {
2052
+ filePath: checkPath,
2053
+ action: "updated",
2054
+ description: "Added release job to CI workflow"
2055
+ };
2056
+ }
2057
+ const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps("changesets", nodeVersionYaml));
2058
+ if (!merged.changed) return {
2059
+ filePath: checkPath,
2060
+ action: "skipped",
2061
+ description: "Release job in CI workflow already up to date"
2062
+ };
2063
+ const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2064
+ ctx.write(checkPath, withComment);
2065
+ return {
2066
+ filePath: checkPath,
2067
+ action: "updated",
2068
+ description: "Added missing steps to release job in CI workflow"
2069
+ };
2070
+ }
2099
2071
  async function generateReleaseCi(ctx) {
2100
2072
  const filePath = "release-ci";
2101
2073
  if (ctx.config.releaseStrategy === "none" || ctx.config.ci === "none") return {
@@ -2103,9 +2075,10 @@ async function generateReleaseCi(ctx) {
2103
2075
  action: "skipped",
2104
2076
  description: "Release CI workflow not applicable"
2105
2077
  };
2078
+ if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx);
2106
2079
  const isGitHub = ctx.config.ci === "github";
2107
2080
  const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
2108
- const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2081
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2109
2082
  const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml);
2110
2083
  if (!content) return {
2111
2084
  filePath,
@@ -2115,16 +2088,42 @@ async function generateReleaseCi(ctx) {
2115
2088
  if (ctx.exists(workflowPath)) {
2116
2089
  const existing = ctx.read(workflowPath);
2117
2090
  if (existing) {
2091
+ if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
2092
+ filePath: workflowPath,
2093
+ action: "skipped",
2094
+ description: "Release workflow already up to date"
2095
+ };
2118
2096
  const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml));
2119
2097
  const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2120
- if (merged.changed || withComment !== merged.content) {
2121
- ctx.write(workflowPath, withComment);
2098
+ if (withComment === content) {
2099
+ ctx.write(workflowPath, content);
2122
2100
  return {
2123
2101
  filePath: workflowPath,
2124
2102
  action: "updated",
2125
2103
  description: "Added missing steps to release workflow"
2126
2104
  };
2127
2105
  }
2106
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") {
2107
+ if (merged.changed || withComment !== merged.content) {
2108
+ ctx.write(workflowPath, withComment);
2109
+ return {
2110
+ filePath: workflowPath,
2111
+ action: "updated",
2112
+ description: "Added missing steps to release workflow"
2113
+ };
2114
+ }
2115
+ return {
2116
+ filePath: workflowPath,
2117
+ action: "skipped",
2118
+ description: "Existing release workflow preserved"
2119
+ };
2120
+ }
2121
+ ctx.write(workflowPath, content);
2122
+ return {
2123
+ filePath: workflowPath,
2124
+ action: "updated",
2125
+ description: "Replaced release workflow with updated template"
2126
+ };
2128
2127
  }
2129
2128
  return {
2130
2129
  filePath: workflowPath,
@@ -2395,6 +2394,148 @@ async function generateVscodeSettings(ctx) {
2395
2394
  return results;
2396
2395
  }
2397
2396
  //#endregion
2397
+ //#region src/generators/deploy-ci.ts
2398
+ /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
2399
+ function actionsExpr(expr) {
2400
+ return `\${{ ${expr} }}`;
2401
+ }
2402
+ function hasEnginesNode(ctx) {
2403
+ return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
2404
+ }
2405
+ function deployWorkflow(ci, nodeVersionYaml) {
2406
+ return `${workflowSchemaComment(ci)}name: Deploy
2407
+ on:
2408
+ push:
2409
+ tags:
2410
+ - "v[0-9]+.[0-9]+.[0-9]+"
2411
+
2412
+ jobs:
2413
+ deploy:
2414
+ runs-on: ubuntu-latest
2415
+ steps:
2416
+ - uses: actions/checkout@v4
2417
+ - uses: pnpm/action-setup@v4
2418
+ - uses: actions/setup-node@v4
2419
+ with:
2420
+ ${nodeVersionYaml}
2421
+ - run: pnpm install --frozen-lockfile
2422
+ - name: Publish Docker images
2423
+ env:
2424
+ DOCKER_REGISTRY_HOST: ${actionsExpr("vars.DOCKER_REGISTRY_HOST")}
2425
+ DOCKER_REGISTRY_NAMESPACE: ${actionsExpr("vars.DOCKER_REGISTRY_NAMESPACE")}
2426
+ DOCKER_USERNAME: ${actionsExpr("secrets.DOCKER_USERNAME")}
2427
+ DOCKER_PASSWORD: ${actionsExpr("secrets.DOCKER_PASSWORD")}
2428
+ run: pnpm exec tooling docker:publish
2429
+ `;
2430
+ }
2431
+ function requiredDeploySteps() {
2432
+ return [
2433
+ {
2434
+ match: { uses: "actions/checkout" },
2435
+ step: { uses: "actions/checkout@v4" }
2436
+ },
2437
+ {
2438
+ match: { uses: "pnpm/action-setup" },
2439
+ step: { uses: "pnpm/action-setup@v4" }
2440
+ },
2441
+ {
2442
+ match: { uses: "actions/setup-node" },
2443
+ step: { uses: "actions/setup-node@v4" }
2444
+ },
2445
+ {
2446
+ match: { run: "pnpm install" },
2447
+ step: { run: "pnpm install --frozen-lockfile" }
2448
+ },
2449
+ {
2450
+ match: { run: "docker:publish" },
2451
+ step: { run: "pnpm exec tooling docker:publish" }
2452
+ }
2453
+ ];
2454
+ }
2455
+ /** Convention paths to check for Dockerfiles. */
2456
+ const CONVENTION_DOCKERFILE_PATHS$1 = ["Dockerfile", "docker/Dockerfile"];
2457
+ const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
2458
+ /** Check whether any Docker packages exist by convention or .tooling.json config. */
2459
+ function hasDockerPackages(ctx) {
2460
+ const configRaw = ctx.read(".tooling.json");
2461
+ if (configRaw) {
2462
+ const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
2463
+ if (result.success && result.data.docker && Object.keys(result.data.docker).length > 0) return true;
2464
+ }
2465
+ if (ctx.config.structure === "monorepo") {
2466
+ const packages = getMonorepoPackages(ctx.targetDir);
2467
+ for (const pkg of packages) {
2468
+ const dirName = pkg.name.split("/").pop() ?? pkg.name;
2469
+ for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) return true;
2470
+ }
2471
+ } else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) return true;
2472
+ return false;
2473
+ }
2474
+ async function generateDeployCi(ctx) {
2475
+ const filePath = "deploy-ci";
2476
+ if (!hasDockerPackages(ctx) || ctx.config.ci === "none") return {
2477
+ filePath,
2478
+ action: "skipped",
2479
+ description: "Deploy CI workflow not applicable"
2480
+ };
2481
+ const isGitHub = ctx.config.ci === "github";
2482
+ const workflowPath = isGitHub ? ".github/workflows/deploy.yml" : ".forgejo/workflows/deploy.yml";
2483
+ const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2484
+ const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
2485
+ if (ctx.exists(workflowPath)) {
2486
+ const existing = ctx.read(workflowPath);
2487
+ if (existing) {
2488
+ if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
2489
+ filePath: workflowPath,
2490
+ action: "skipped",
2491
+ description: "Deploy workflow already up to date"
2492
+ };
2493
+ const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
2494
+ const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2495
+ if (withComment === content) {
2496
+ ctx.write(workflowPath, content);
2497
+ return {
2498
+ filePath: workflowPath,
2499
+ action: "updated",
2500
+ description: "Added missing steps to deploy workflow"
2501
+ };
2502
+ }
2503
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") {
2504
+ if (merged.changed || withComment !== merged.content) {
2505
+ ctx.write(workflowPath, withComment);
2506
+ return {
2507
+ filePath: workflowPath,
2508
+ action: "updated",
2509
+ description: "Added missing steps to deploy workflow"
2510
+ };
2511
+ }
2512
+ return {
2513
+ filePath: workflowPath,
2514
+ action: "skipped",
2515
+ description: "Existing deploy workflow preserved"
2516
+ };
2517
+ }
2518
+ ctx.write(workflowPath, content);
2519
+ return {
2520
+ filePath: workflowPath,
2521
+ action: "updated",
2522
+ description: "Replaced deploy workflow with updated template"
2523
+ };
2524
+ }
2525
+ return {
2526
+ filePath: workflowPath,
2527
+ action: "skipped",
2528
+ description: "Deploy workflow already up to date"
2529
+ };
2530
+ }
2531
+ ctx.write(workflowPath, content);
2532
+ return {
2533
+ filePath: workflowPath,
2534
+ action: "created",
2535
+ description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
2536
+ };
2537
+ }
2538
+ //#endregion
2398
2539
  //#region src/generators/pipeline.ts
2399
2540
  /** Run all generators sequentially and return their results. */
2400
2541
  async function runGenerators(ctx) {
@@ -2414,137 +2555,183 @@ async function runGenerators(ctx) {
2414
2555
  results.push(await generateReleaseIt(ctx));
2415
2556
  results.push(await generateChangesets(ctx));
2416
2557
  results.push(await generateReleaseCi(ctx));
2558
+ results.push(await generateDeployCi(ctx));
2417
2559
  results.push(...await generateVitest(ctx));
2418
2560
  results.push(...await generateVscodeSettings(ctx));
2561
+ results.push(saveToolingConfig(ctx, ctx.config));
2419
2562
  return results;
2420
2563
  }
2421
2564
  //#endregion
2422
- //#region src/utils/tooling-config.ts
2423
- const CONFIG_FILE = ".tooling.json";
2424
- const ToolingConfigSchema = z.object({
2425
- structure: z.enum(["single", "monorepo"]).optional(),
2426
- useEslintPlugin: z.boolean().optional(),
2427
- formatter: z.enum(["oxfmt", "prettier"]).optional(),
2428
- setupVitest: z.boolean().optional(),
2429
- ci: z.enum([
2430
- "github",
2431
- "forgejo",
2432
- "none"
2433
- ]).optional(),
2434
- setupRenovate: z.boolean().optional(),
2435
- releaseStrategy: z.enum([
2436
- "release-it",
2437
- "simple",
2438
- "changesets",
2439
- "none"
2440
- ]).optional(),
2441
- projectType: z.enum([
2442
- "default",
2443
- "node",
2444
- "react",
2445
- "library"
2446
- ]).optional(),
2447
- detectPackageTypes: z.boolean().optional()
2448
- });
2449
- /** Load saved tooling config from the target directory. Returns undefined if missing or invalid. */
2450
- function loadToolingConfig(targetDir) {
2451
- const fullPath = path.join(targetDir, CONFIG_FILE);
2452
- if (!existsSync(fullPath)) return void 0;
2453
- try {
2454
- const raw = readFileSync(fullPath, "utf-8");
2455
- const result = ToolingConfigSchema.safeParse(JSON.parse(raw));
2456
- return result.success ? result.data : void 0;
2457
- } catch {
2458
- return;
2565
+ //#region src/generators/migrate-prompt.ts
2566
+ /**
2567
+ * Generate a context-aware AI migration prompt based on what the CLI did.
2568
+ * This prompt can be pasted into Claude Code (or similar) to finish the migration.
2569
+ */
2570
+ function generateMigratePrompt(results, config, detected) {
2571
+ const sections = [];
2572
+ sections.push("# Migration Prompt");
2573
+ sections.push("");
2574
+ sections.push("The following prompt was generated by `@bensandee/tooling repo:sync`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
2575
+ sections.push("");
2576
+ sections.push("> **Tip:** Before starting, run `/init` in Claude Code to generate a `CLAUDE.md` that gives the AI a complete picture of your repository's structure, conventions, and build commands.");
2577
+ sections.push("");
2578
+ sections.push("## What was changed");
2579
+ sections.push("");
2580
+ const created = results.filter((r) => r.action === "created");
2581
+ const updated = results.filter((r) => r.action === "updated");
2582
+ const skipped = results.filter((r) => r.action === "skipped");
2583
+ const archived = results.filter((r) => r.action === "archived");
2584
+ if (created.length > 0) {
2585
+ sections.push("**Created:**");
2586
+ for (const r of created) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2587
+ sections.push("");
2459
2588
  }
2460
- }
2461
- /** Save the user's config choices to .tooling.json via the generator context. */
2462
- function saveToolingConfig(ctx, config) {
2463
- const saved = {
2464
- structure: config.structure,
2465
- useEslintPlugin: config.useEslintPlugin,
2466
- formatter: config.formatter,
2467
- setupVitest: config.setupVitest,
2468
- ci: config.ci,
2469
- setupRenovate: config.setupRenovate,
2470
- releaseStrategy: config.releaseStrategy,
2471
- projectType: config.projectType,
2472
- detectPackageTypes: config.detectPackageTypes
2473
- };
2474
- const content = JSON.stringify(saved, null, 2) + "\n";
2475
- const existing = ctx.exists(CONFIG_FILE) ? ctx.read(CONFIG_FILE) : void 0;
2476
- if (existing !== void 0 && contentEqual(CONFIG_FILE, existing, content)) return {
2477
- filePath: CONFIG_FILE,
2478
- action: "skipped",
2479
- description: "Already up to date"
2480
- };
2481
- ctx.write(CONFIG_FILE, content);
2482
- return {
2483
- filePath: CONFIG_FILE,
2484
- action: existing ? "updated" : "created",
2485
- description: "Saved tooling configuration"
2486
- };
2487
- }
2488
- /** Merge saved config over detected defaults. Saved values win when present. */
2489
- function mergeWithSavedConfig(detected, saved) {
2490
- return {
2491
- name: detected.name,
2492
- isNew: detected.isNew,
2493
- targetDir: detected.targetDir,
2494
- structure: saved.structure ?? detected.structure,
2495
- useEslintPlugin: saved.useEslintPlugin ?? detected.useEslintPlugin,
2496
- formatter: saved.formatter ?? detected.formatter,
2497
- setupVitest: saved.setupVitest ?? detected.setupVitest,
2498
- ci: saved.ci ?? detected.ci,
2499
- setupRenovate: saved.setupRenovate ?? detected.setupRenovate,
2500
- releaseStrategy: saved.releaseStrategy ?? detected.releaseStrategy,
2501
- projectType: saved.projectType ?? detected.projectType,
2502
- detectPackageTypes: saved.detectPackageTypes ?? detected.detectPackageTypes
2503
- };
2589
+ if (updated.length > 0) {
2590
+ sections.push("**Updated:**");
2591
+ for (const r of updated) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2592
+ sections.push("");
2593
+ }
2594
+ if (archived.length > 0) {
2595
+ sections.push("**Archived:**");
2596
+ for (const r of archived) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2597
+ sections.push("");
2598
+ }
2599
+ if (skipped.length > 0) {
2600
+ sections.push("**Skipped (review these):**");
2601
+ for (const r of skipped) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2602
+ sections.push("");
2603
+ }
2604
+ sections.push("## Migration tasks");
2605
+ sections.push("");
2606
+ const legacyToRemove = detected.legacyConfigs.filter((legacy) => !(legacy.tool === "prettier" && config.formatter === "prettier"));
2607
+ if (legacyToRemove.length > 0) {
2608
+ sections.push("### Remove legacy tooling");
2609
+ sections.push("");
2610
+ for (const legacy of legacyToRemove) {
2611
+ const replacement = {
2612
+ eslint: "oxlint",
2613
+ prettier: "oxfmt",
2614
+ jest: "vitest",
2615
+ webpack: "tsdown",
2616
+ rollup: "tsdown"
2617
+ }[legacy.tool];
2618
+ sections.push(`- Remove ${legacy.tool} config files (${legacy.files.map((f) => `\`${f}\``).join(", ")}). This project now uses **${replacement}**.`);
2619
+ sections.push(` - Uninstall ${legacy.tool}-related packages from devDependencies`);
2620
+ if (legacy.tool === "eslint") sections.push(" - Migrate any custom ESLint rules that don't have oxlint equivalents");
2621
+ if (legacy.tool === "jest") sections.push(" - Migrate any jest-specific test utilities (jest.mock, jest.fn) to vitest equivalents (vi.mock, vi.fn)");
2622
+ }
2623
+ sections.push("");
2624
+ }
2625
+ if (archived.length > 0) {
2626
+ sections.push("### Review archived files");
2627
+ sections.push("");
2628
+ sections.push("The following files were modified or replaced. The originals have been saved to `.tooling-archived/`:");
2629
+ sections.push("");
2630
+ for (const r of archived) sections.push(`- \`${r.filePath}\` → \`.tooling-archived/${r.filePath}\``);
2631
+ sections.push("");
2632
+ sections.push("For each archived file, **diff the old version against the new one** and look for features, categories, or modules that were enabled in the original but are missing from the replacement. Focus on broad capability gaps rather than individual rule strictness (in general, being stricter is fine). Examples of what to look for:");
2633
+ sections.push("");
2634
+ sections.push("- **Lint configs**: enabled plugin categories (e.g. `jsx-a11y`, `import`, `react`, `nextjs`), custom `plugins` or `overrides`, file-scoped rule blocks");
2635
+ sections.push("- **TypeScript configs**: compiler features like `jsx`, `paths`, `baseUrl`, or `references` that affect build behavior");
2636
+ sections.push("- **Other configs**: feature flags, custom presets, or integrations that go beyond the default template");
2637
+ sections.push("");
2638
+ sections.push("If the old config had capabilities the new one lacks, port them into the new file. Then:");
2639
+ sections.push("");
2640
+ sections.push("1. If the project previously used `husky` and `lint-staged`, remove them from `devDependencies`");
2641
+ sections.push("2. Delete the `.tooling-archived/` directory when migration is complete");
2642
+ sections.push("");
2643
+ }
2644
+ const oxlintWasSkipped = results.find((r) => r.filePath === "oxlint.config.ts")?.action === "skipped";
2645
+ if (detected.hasLegacyOxlintJson) {
2646
+ sections.push("### Migrate .oxlintrc.json to oxlint.config.ts");
2647
+ sections.push("");
2648
+ sections.push("A new `oxlint.config.ts` has been generated using `defineConfig` from the `oxlint` package. The existing `.oxlintrc.json` needs to be migrated:");
2649
+ sections.push("");
2650
+ sections.push("1. Read `.oxlintrc.json` and compare its `rules` against the rules provided by `@bensandee/config/oxlint/recommended` (check `node_modules/@bensandee/config`). Most standard rules are already included in the recommended config.");
2651
+ sections.push("2. If there are any custom rules, overrides, settings, or `jsPlugins` not covered by the recommended config, add them to `oxlint.config.ts` alongside the `extends`.");
2652
+ sections.push("3. Delete `.oxlintrc.json`.");
2653
+ sections.push("4. Run `pnpm lint` to verify the new config works correctly.");
2654
+ sections.push("");
2655
+ } else if (oxlintWasSkipped && detected.hasOxlintConfig) {
2656
+ sections.push("### Verify oxlint.config.ts includes recommended rules");
2657
+ sections.push("");
2658
+ sections.push("The existing `oxlint.config.ts` was kept as-is. Verify that it extends the recommended config from `@bensandee/config/oxlint`:");
2659
+ sections.push("");
2660
+ sections.push("1. Open `oxlint.config.ts` and check that it imports and extends `@bensandee/config/oxlint/recommended`.");
2661
+ sections.push("2. The expected pattern is:");
2662
+ sections.push(" ```ts");
2663
+ sections.push(" import recommended from \"@bensandee/config/oxlint/recommended\";");
2664
+ sections.push(" import { defineConfig } from \"oxlint\";");
2665
+ sections.push("");
2666
+ sections.push(" export default defineConfig({ extends: [recommended] });");
2667
+ sections.push(" ```");
2668
+ sections.push("3. If it uses a different pattern, update it to extend the recommended config while preserving any project-specific customizations.");
2669
+ sections.push("4. Run `pnpm lint` to verify the config works correctly.");
2670
+ sections.push("");
2671
+ }
2672
+ if (config.structure === "monorepo" && !detected.hasPnpmWorkspace) {
2673
+ sections.push("### Migrate to monorepo structure");
2674
+ sections.push("");
2675
+ sections.push("This project was converted from a single repo to a monorepo. Complete the migration:");
2676
+ sections.push("");
2677
+ sections.push("1. Move existing source into `packages/<name>/` (using the existing package name)");
2678
+ sections.push("2. Split the root `package.json` into a root workspace manifest + package-level `package.json`");
2679
+ sections.push("3. Move the existing `tsconfig.json` into the package and update the root tsconfig with project references");
2680
+ sections.push("4. Create a package-level `tsdown.config.ts` in the new package");
2681
+ sections.push("5. Update any import paths or build scripts affected by the move");
2682
+ sections.push("");
2683
+ }
2684
+ const skippedConfigs = skipped.filter((r) => r.filePath !== "ci" && r.description !== "Not a monorepo");
2685
+ if (skippedConfigs.length > 0) {
2686
+ sections.push("### Review skipped files");
2687
+ sections.push("");
2688
+ sections.push("The following files were left unchanged. Review them for compatibility:");
2689
+ sections.push("");
2690
+ for (const r of skippedConfigs) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2691
+ sections.push("");
2692
+ }
2693
+ if (results.some((r) => r.filePath === "test/example.test.ts" && r.action === "created")) {
2694
+ sections.push("### Generate tests");
2695
+ sections.push("");
2696
+ sections.push("A starter test was created at `test/example.test.ts`. Now:");
2697
+ sections.push("");
2698
+ sections.push("1. Review the existing source code in `src/`");
2699
+ sections.push("2. Create additional test files following the starter test's patterns (import style, describe/it structure)");
2700
+ sections.push("3. Focus on edge cases and core business logic");
2701
+ sections.push("4. Aim for meaningful coverage of exported functions and key code paths");
2702
+ sections.push("");
2703
+ }
2704
+ sections.push("## Ground rules");
2705
+ sections.push("");
2706
+ sections.push("It is OK to add new packages (e.g. `zod`, `@bensandee/common`) if they are needed to resolve errors.");
2707
+ sections.push("");
2708
+ sections.push("When resolving errors from the checklist below, prefer fixing the root cause over suppressing the issue. For example:");
2709
+ sections.push("");
2710
+ sections.push("- **Lint errors**: fix the code rather than adding disable comments or rule exceptions");
2711
+ sections.push("- **Test failures**: update the test or fix the underlying bug rather than skipping or deleting the test");
2712
+ sections.push("- **Knip findings**: remove genuinely unused code/exports/dependencies rather than adding ignores to `knip.config.ts`");
2713
+ sections.push("- **Type errors**: add proper types rather than using `any` or `@ts-expect-error`");
2714
+ sections.push("");
2715
+ sections.push("Only suppress an issue if there is a clear, documented reason why the fix is not feasible (e.g. a third-party type mismatch). Leave a comment explaining why.");
2716
+ sections.push("");
2717
+ sections.push("## Verification checklist");
2718
+ sections.push("");
2719
+ sections.push("Run each of these commands and fix any errors before moving on:");
2720
+ sections.push("");
2721
+ sections.push("1. `pnpm install`");
2722
+ const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
2723
+ sections.push(`2. \`${updateCmd}\` — bump added dependencies to their latest versions`);
2724
+ sections.push("3. `pnpm typecheck` — fix any type errors");
2725
+ sections.push("4. `pnpm build` — fix any build errors");
2726
+ sections.push("5. `pnpm test` — fix any test failures");
2727
+ sections.push("6. `pnpm lint` — fix the code to satisfy lint rules");
2728
+ sections.push("7. `pnpm knip` — remove unused exports, dependencies, and dead code");
2729
+ sections.push("8. `pnpm format` — fix any formatting issues");
2730
+ sections.push("");
2731
+ return sections.join("\n");
2504
2732
  }
2505
2733
  //#endregion
2506
2734
  //#region src/commands/repo-init.ts
2507
- const initCommand = defineCommand({
2508
- meta: {
2509
- name: "repo:init",
2510
- description: "Interactive setup wizard"
2511
- },
2512
- args: {
2513
- dir: {
2514
- type: "positional",
2515
- description: "Target directory (default: current directory)",
2516
- required: false
2517
- },
2518
- yes: {
2519
- type: "boolean",
2520
- alias: "y",
2521
- description: "Accept all defaults (non-interactive)"
2522
- },
2523
- "eslint-plugin": {
2524
- type: "boolean",
2525
- description: "Include @bensandee/eslint-plugin (default: true)"
2526
- },
2527
- "no-ci": {
2528
- type: "boolean",
2529
- description: "Skip CI workflow generation"
2530
- },
2531
- "no-prompt": {
2532
- type: "boolean",
2533
- description: "Skip migration prompt generation"
2534
- }
2535
- },
2536
- async run({ args }) {
2537
- const targetDir = path.resolve(args.dir ?? ".");
2538
- const saved = loadToolingConfig(targetDir);
2539
- await runInit(args.yes ? (() => {
2540
- const detected = buildDefaultConfig(targetDir, {
2541
- eslintPlugin: args["eslint-plugin"] === true ? true : void 0,
2542
- noCi: args["no-ci"] === true ? true : void 0
2543
- });
2544
- return saved ? mergeWithSavedConfig(detected, saved) : detected;
2545
- })() : await runInitPrompts(targetDir, saved), args["no-prompt"] === true ? { noPrompt: true } : {});
2546
- }
2547
- });
2548
2735
  async function runInit(config, options = {}) {
2549
2736
  const detected = detectProject(config.targetDir);
2550
2737
  const s = p.spinner();
@@ -2566,7 +2753,6 @@ async function runInit(config, options = {}) {
2566
2753
  }));
2567
2754
  s.start("Generating configuration files...");
2568
2755
  const results = await runGenerators(ctx);
2569
- results.push(saveToolingConfig(ctx, config));
2570
2756
  const alreadyArchived = new Set(results.filter((r) => r.action === "archived").map((r) => r.filePath));
2571
2757
  for (const rel of archivedFiles) if (!alreadyArchived.has(rel)) results.push({
2572
2758
  filePath: rel,
@@ -2575,7 +2761,6 @@ async function runInit(config, options = {}) {
2575
2761
  });
2576
2762
  const created = results.filter((r) => r.action === "created");
2577
2763
  const updated = results.filter((r) => r.action === "updated");
2578
- const skipped = results.filter((r) => r.action === "skipped");
2579
2764
  const archived = results.filter((r) => r.action === "archived");
2580
2765
  if (!(created.length > 0 || updated.length > 0 || archived.length > 0) && options.noPrompt) {
2581
2766
  s.stop("Repository is up to date.");
@@ -2592,7 +2777,6 @@ async function runInit(config, options = {}) {
2592
2777
  if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
2593
2778
  if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
2594
2779
  if (archived.length > 0) summaryLines.push(`Archived: ${archived.map((r) => r.filePath).join(", ")}`);
2595
- if (skipped.length > 0) summaryLines.push(`Skipped: ${skipped.map((r) => r.filePath).join(", ")}`);
2596
2780
  p.note(summaryLines.join("\n"), "Summary");
2597
2781
  if (!options.noPrompt) {
2598
2782
  const prompt = generateMigratePrompt(results, config, detected);
@@ -2625,57 +2809,68 @@ async function runInit(config, options = {}) {
2625
2809
  return results;
2626
2810
  }
2627
2811
  //#endregion
2628
- //#region src/commands/repo-update.ts
2629
- const updateCommand = defineCommand({
2812
+ //#region src/commands/repo-sync.ts
2813
+ const syncCommand = defineCommand({
2630
2814
  meta: {
2631
- name: "repo:update",
2632
- description: "Update managed config and add missing files"
2815
+ name: "repo:sync",
2816
+ description: "Detect, generate, and sync project tooling (idempotent)"
2633
2817
  },
2634
- args: { dir: {
2635
- type: "positional",
2636
- description: "Target directory (default: current directory)",
2637
- required: false
2638
- } },
2639
- async run({ args }) {
2640
- await runUpdate(path.resolve(args.dir ?? "."));
2641
- }
2642
- });
2643
- async function runUpdate(targetDir) {
2644
- const saved = loadToolingConfig(targetDir);
2645
- if (!saved) {
2646
- p.log.error("No .tooling.json found. Run `tooling repo:init` first to initialize the project.");
2647
- process.exitCode = 1;
2648
- return [];
2649
- }
2650
- return runInit(mergeWithSavedConfig(buildDefaultConfig(targetDir, {}), saved), {
2651
- noPrompt: true,
2652
- confirmOverwrite: async () => "overwrite"
2653
- });
2654
- }
2655
- //#endregion
2656
- //#region src/commands/repo-check.ts
2657
- const checkCommand = defineCommand({
2658
- meta: {
2659
- name: "repo:check",
2660
- description: "Check repo for tooling drift (dry-run, CI-friendly)"
2818
+ args: {
2819
+ dir: {
2820
+ type: "positional",
2821
+ description: "Target directory (default: current directory)",
2822
+ required: false
2823
+ },
2824
+ check: {
2825
+ type: "boolean",
2826
+ description: "Dry-run mode: report drift without writing files"
2827
+ },
2828
+ yes: {
2829
+ type: "boolean",
2830
+ alias: "y",
2831
+ description: "Accept all defaults (non-interactive)"
2832
+ },
2833
+ "eslint-plugin": {
2834
+ type: "boolean",
2835
+ description: "Include @bensandee/eslint-plugin (default: true)"
2836
+ },
2837
+ "no-ci": {
2838
+ type: "boolean",
2839
+ description: "Skip CI workflow generation"
2840
+ },
2841
+ "no-prompt": {
2842
+ type: "boolean",
2843
+ description: "Skip migration prompt generation"
2844
+ }
2661
2845
  },
2662
- args: { dir: {
2663
- type: "positional",
2664
- description: "Target directory (default: current directory)",
2665
- required: false
2666
- } },
2667
2846
  async run({ args }) {
2668
- const exitCode = await runCheck(path.resolve(args.dir ?? "."));
2669
- process.exitCode = exitCode;
2847
+ const targetDir = path.resolve(args.dir ?? ".");
2848
+ if (args.check) {
2849
+ const exitCode = await runCheck(targetDir);
2850
+ process.exitCode = exitCode;
2851
+ return;
2852
+ }
2853
+ const saved = loadToolingConfig(targetDir);
2854
+ const isFirstRun = !saved;
2855
+ let config;
2856
+ if (args.yes || !isFirstRun) {
2857
+ const detected = buildDefaultConfig(targetDir, {
2858
+ eslintPlugin: args["eslint-plugin"] === true ? true : void 0,
2859
+ noCi: args["no-ci"] === true ? true : void 0
2860
+ });
2861
+ config = saved ? mergeWithSavedConfig(detected, saved) : detected;
2862
+ } else config = await runInitPrompts(targetDir, saved);
2863
+ await runInit(config, {
2864
+ noPrompt: args["no-prompt"] === true || !isFirstRun,
2865
+ ...!isFirstRun && { confirmOverwrite: async () => "overwrite" }
2866
+ });
2670
2867
  }
2671
2868
  });
2869
+ /** Run sync in check mode: dry-run drift detection. */
2672
2870
  async function runCheck(targetDir) {
2673
2871
  const saved = loadToolingConfig(targetDir);
2674
- if (!saved) {
2675
- p.log.error("No .tooling.json found. Run `tooling repo:init` first to initialize the project.");
2676
- return 1;
2677
- }
2678
- const { ctx, pendingWrites } = createDryRunContext(mergeWithSavedConfig(buildDefaultConfig(targetDir, {}), saved));
2872
+ const detected = buildDefaultConfig(targetDir, {});
2873
+ const { ctx, pendingWrites } = createDryRunContext(saved ? mergeWithSavedConfig(detected, saved) : detected);
2679
2874
  const actionable = (await runGenerators(ctx)).filter((r) => {
2680
2875
  if (r.action !== "created" && r.action !== "updated") return false;
2681
2876
  const newContent = pendingWrites.get(r.filePath);
@@ -2690,7 +2885,7 @@ async function runCheck(targetDir) {
2690
2885
  p.log.success("Repository is up to date.");
2691
2886
  return 0;
2692
2887
  }
2693
- p.log.warn(`${actionable.length} file(s) would be changed by repo:update`);
2888
+ p.log.warn(`${actionable.length} file(s) would be changed by repo:sync`);
2694
2889
  for (const r of actionable) {
2695
2890
  p.log.info(` ${r.action}: ${r.filePath} — ${r.description}`);
2696
2891
  const newContent = pendingWrites.get(r.filePath);
@@ -2702,13 +2897,12 @@ async function runCheck(targetDir) {
2702
2897
  p.log.info(` + ${lineCount} new lines`);
2703
2898
  } else {
2704
2899
  const diff = lineDiff(existing, newContent);
2705
- if (diff.length > 0) for (const line of diff) p.log.info(` ${line}`);
2900
+ for (const line of diff) p.log.info(` ${line}`);
2706
2901
  }
2707
2902
  }
2708
2903
  return 1;
2709
2904
  }
2710
2905
  const normalize = (line) => line.trimEnd();
2711
- /** Produce a compact line-level diff summary, ignoring whitespace-only differences. */
2712
2906
  function lineDiff(oldText, newText) {
2713
2907
  const oldLines = oldText.split("\n").map(normalize);
2714
2908
  const newLines = newText.split("\n").map(normalize);
@@ -2785,6 +2979,14 @@ function createRealExecutor() {
2785
2979
  } catch (_error) {}
2786
2980
  return packages;
2787
2981
  },
2982
+ listPackageDirs(cwd) {
2983
+ const packagesDir = path.join(cwd, "packages");
2984
+ try {
2985
+ return readdirSync(packagesDir, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
2986
+ } catch {
2987
+ return [];
2988
+ }
2989
+ },
2788
2990
  readFile(filePath) {
2789
2991
  try {
2790
2992
  return readFileSync(filePath, "utf-8");
@@ -2931,7 +3133,7 @@ async function createRelease(executor, conn, tag) {
2931
3133
  //#endregion
2932
3134
  //#region src/release/log.ts
2933
3135
  /** Log a debug message when verbose mode is enabled. */
2934
- function debug(config, message) {
3136
+ function debug$1(config, message) {
2935
3137
  if (config.verbose) p.log.info(`[debug] ${message}`);
2936
3138
  }
2937
3139
  /** Log the result of an exec call when verbose mode is enabled. */
@@ -3014,7 +3216,7 @@ function buildPrContent(executor, cwd, packagesBefore) {
3014
3216
  async function runVersionMode(executor, config) {
3015
3217
  p.log.info("Changesets detected — versioning packages");
3016
3218
  const packagesBefore = executor.listWorkspacePackages(config.cwd);
3017
- debug(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3219
+ debug$1(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3018
3220
  const changesetConfigPath = path.join(config.cwd, ".changeset", "config.json");
3019
3221
  const originalConfig = executor.readFile(changesetConfigPath);
3020
3222
  if (originalConfig) {
@@ -3025,7 +3227,7 @@ async function runVersionMode(executor, config) {
3025
3227
  commit: false
3026
3228
  };
3027
3229
  executor.writeFile(changesetConfigPath, JSON.stringify(patched, null, 2) + "\n");
3028
- debug(config, "Temporarily disabled changeset commit:true");
3230
+ debug$1(config, "Temporarily disabled changeset commit:true");
3029
3231
  }
3030
3232
  }
3031
3233
  const versionResult = executor.exec("pnpm changeset version", { cwd: config.cwd });
@@ -3034,11 +3236,11 @@ async function runVersionMode(executor, config) {
3034
3236
  if (versionResult.exitCode !== 0) throw new FatalError(`pnpm changeset version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr}`);
3035
3237
  debugExec(config, "pnpm install --no-frozen-lockfile", executor.exec("pnpm install --no-frozen-lockfile", { cwd: config.cwd }));
3036
3238
  const { title, body } = buildPrContent(executor, config.cwd, packagesBefore);
3037
- debug(config, `PR title: ${title}`);
3239
+ debug$1(config, `PR title: ${title}`);
3038
3240
  executor.exec("git add -A", { cwd: config.cwd });
3039
3241
  const remainingChangesets = executor.listChangesetFiles(config.cwd);
3040
3242
  if (remainingChangesets.length > 0) p.log.warn(`Changeset files still present after versioning: ${remainingChangesets.join(", ")}`);
3041
- debug(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3243
+ debug$1(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3042
3244
  const commitResult = executor.exec("git commit -m \"chore: version packages\"", { cwd: config.cwd });
3043
3245
  debugExec(config, "git commit", commitResult);
3044
3246
  if (commitResult.exitCode !== 0) {
@@ -3062,7 +3264,7 @@ async function runVersionMode(executor, config) {
3062
3264
  token: config.token
3063
3265
  };
3064
3266
  const existingPr = await findOpenPr(executor, conn, BRANCH);
3065
- debug(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3267
+ debug$1(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3066
3268
  if (existingPr === null) {
3067
3269
  await createPr(executor, conn, {
3068
3270
  title,
@@ -3110,14 +3312,14 @@ async function runPublishMode(executor, config) {
3110
3312
  debugExec(config, "pnpm changeset publish", publishResult);
3111
3313
  if (publishResult.exitCode !== 0) throw new FatalError(`pnpm changeset publish failed (exit code ${String(publishResult.exitCode)}):\n${publishResult.stderr}`);
3112
3314
  const stdoutTags = parseNewTags(publishResult.stdout + "\n" + publishResult.stderr);
3113
- debug(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3315
+ debug$1(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3114
3316
  const expectedTags = computeExpectedTags(executor.listWorkspacePackages(config.cwd));
3115
- debug(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3317
+ debug$1(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3116
3318
  const remoteTags = parseRemoteTags(executor.exec("git ls-remote --tags origin", { cwd: config.cwd }).stdout);
3117
- debug(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3319
+ debug$1(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3118
3320
  const remoteSet = new Set(remoteTags);
3119
3321
  const tagsToPush = reconcileTags(expectedTags, remoteTags, stdoutTags);
3120
- debug(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3322
+ debug$1(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3121
3323
  if (config.dryRun) {
3122
3324
  if (tagsToPush.length === 0) {
3123
3325
  p.log.info("No packages were published");
@@ -3286,12 +3488,12 @@ function buildReleaseConfig(flags) {
3286
3488
  /** Core release logic — testable with a mock executor. */
3287
3489
  async function runRelease(config, executor) {
3288
3490
  const changesetFiles = executor.listChangesetFiles(config.cwd);
3289
- debug(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3491
+ debug$1(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3290
3492
  if (changesetFiles.length > 0) {
3291
- debug(config, "Entering version mode");
3493
+ debug$1(config, "Entering version mode");
3292
3494
  return runVersionMode(executor, config);
3293
3495
  }
3294
- debug(config, "Entering publish mode");
3496
+ debug$1(config, "Entering publish mode");
3295
3497
  return runPublishMode(executor, config);
3296
3498
  }
3297
3499
  //#endregion
@@ -3435,7 +3637,7 @@ async function runSimpleRelease(executor, config) {
3435
3637
  debugExec(config, "commit-and-tag-version", versionResult);
3436
3638
  if (versionResult.exitCode !== 0) throw new FatalError(`commit-and-tag-version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr || versionResult.stdout}`);
3437
3639
  const version = readVersion(executor, config.cwd);
3438
- debug(config, `New version: ${version}`);
3640
+ debug$1(config, `New version: ${version}`);
3439
3641
  const tagResult = executor.exec("git describe --tags --abbrev=0", { cwd: config.cwd });
3440
3642
  debugExec(config, "git describe", tagResult);
3441
3643
  const tag = tagResult.stdout.trim();
@@ -3457,7 +3659,7 @@ async function runSimpleRelease(executor, config) {
3457
3659
  let pushed = false;
3458
3660
  if (!config.noPush) {
3459
3661
  const branch = executor.exec("git rev-parse --abbrev-ref HEAD", { cwd: config.cwd }).stdout.trim() || "main";
3460
- debug(config, `Pushing to origin/${branch}`);
3662
+ debug$1(config, `Pushing to origin/${branch}`);
3461
3663
  const pushResult = executor.exec(`git push --follow-tags origin ${branch}`, { cwd: config.cwd });
3462
3664
  debugExec(config, "git push", pushResult);
3463
3665
  if (pushResult.exitCode !== 0) throw new FatalError(`git push failed (exit code ${String(pushResult.exitCode)}):\n${pushResult.stderr || pushResult.stdout}`);
@@ -3487,7 +3689,7 @@ async function createPlatformRelease(executor, config, tag) {
3487
3689
  if (!config.platform) return false;
3488
3690
  if (config.platform.type === "forgejo") {
3489
3691
  if (await findRelease(executor, config.platform.conn, tag)) {
3490
- debug(config, `Release for ${tag} already exists, skipping`);
3692
+ debug$1(config, `Release for ${tag} already exists, skipping`);
3491
3693
  return false;
3492
3694
  }
3493
3695
  await createRelease(executor, config.platform.conn, tag);
@@ -3686,26 +3888,379 @@ const runChecksCommand = defineCommand({
3686
3888
  }
3687
3889
  });
3688
3890
  //#endregion
3891
+ //#region src/release/docker.ts
3892
+ const ToolingDockerMapSchema = z.record(z.string(), z.object({
3893
+ dockerfile: z.string(),
3894
+ context: z.string().default(".")
3895
+ }));
3896
+ const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
3897
+ const PackageInfoSchema = z.object({
3898
+ name: z.string().optional(),
3899
+ version: z.string().optional()
3900
+ });
3901
+ /** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
3902
+ function loadDockerMap(executor, cwd) {
3903
+ const configPath = path.join(cwd, ".tooling.json");
3904
+ const raw = executor.readFile(configPath);
3905
+ if (!raw) return {};
3906
+ try {
3907
+ const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
3908
+ if (!result.success || !result.data.docker) return {};
3909
+ return result.data.docker;
3910
+ } catch (_error) {
3911
+ return {};
3912
+ }
3913
+ }
3914
+ /** Read name and version from a package's package.json. */
3915
+ function readPackageInfo(executor, packageJsonPath) {
3916
+ const raw = executor.readFile(packageJsonPath);
3917
+ if (!raw) return {
3918
+ name: void 0,
3919
+ version: void 0
3920
+ };
3921
+ try {
3922
+ const result = PackageInfoSchema.safeParse(JSON.parse(raw));
3923
+ if (!result.success) return {
3924
+ name: void 0,
3925
+ version: void 0
3926
+ };
3927
+ return {
3928
+ name: result.data.name,
3929
+ version: result.data.version
3930
+ };
3931
+ } catch (_error) {
3932
+ return {
3933
+ name: void 0,
3934
+ version: void 0
3935
+ };
3936
+ }
3937
+ }
3938
+ /** Convention paths to check for Dockerfiles in a package directory. */
3939
+ const CONVENTION_DOCKERFILE_PATHS = ["Dockerfile", "docker/Dockerfile"];
3940
+ /**
3941
+ * Find a Dockerfile at convention paths for a monorepo package.
3942
+ * Checks packages/{dir}/Dockerfile and packages/{dir}/docker/Dockerfile.
3943
+ */
3944
+ function findConventionDockerfile(executor, cwd, dir) {
3945
+ for (const rel of CONVENTION_DOCKERFILE_PATHS) {
3946
+ const dockerfilePath = `packages/${dir}/${rel}`;
3947
+ if (executor.readFile(path.join(cwd, dockerfilePath)) !== null) return {
3948
+ dockerfile: dockerfilePath,
3949
+ context: "."
3950
+ };
3951
+ }
3952
+ }
3953
+ /**
3954
+ * Find a Dockerfile at convention paths for a single-package repo.
3955
+ * Checks Dockerfile and docker/Dockerfile at the project root.
3956
+ */
3957
+ function findRootDockerfile(executor, cwd) {
3958
+ for (const rel of CONVENTION_DOCKERFILE_PATHS) if (executor.readFile(path.join(cwd, rel)) !== null) return {
3959
+ dockerfile: rel,
3960
+ context: "."
3961
+ };
3962
+ }
3963
+ /**
3964
+ * Discover Docker packages by convention and merge with .tooling.json overrides.
3965
+ *
3966
+ * Convention: any package with a Dockerfile or docker/Dockerfile is a Docker package.
3967
+ * For monorepos, scans packages/{name}/. For single-package repos, scans the root.
3968
+ * The docker map in .tooling.json overrides convention-discovered config and can add
3969
+ * packages at non-standard locations.
3970
+ *
3971
+ * Image names are derived from {root-name}-{package-name} using each package's package.json name.
3972
+ * Versions are read from each package's own package.json.
3973
+ */
3974
+ function detectDockerPackages(executor, cwd, repoName) {
3975
+ const overrides = loadDockerMap(executor, cwd);
3976
+ const packageDirs = executor.listPackageDirs(cwd);
3977
+ const packages = [];
3978
+ const seen = /* @__PURE__ */ new Set();
3979
+ if (packageDirs.length > 0) {
3980
+ for (const dir of packageDirs) {
3981
+ const convention = findConventionDockerfile(executor, cwd, dir);
3982
+ const docker = overrides[dir] ?? convention;
3983
+ if (docker) {
3984
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
3985
+ packages.push({
3986
+ dir,
3987
+ imageName: `${repoName}-${name ?? dir}`,
3988
+ version,
3989
+ docker
3990
+ });
3991
+ seen.add(dir);
3992
+ }
3993
+ }
3994
+ for (const [dir, docker] of Object.entries(overrides)) if (!seen.has(dir)) {
3995
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
3996
+ packages.push({
3997
+ dir,
3998
+ imageName: `${repoName}-${name ?? dir}`,
3999
+ version,
4000
+ docker
4001
+ });
4002
+ }
4003
+ } else {
4004
+ const convention = findRootDockerfile(executor, cwd);
4005
+ const docker = overrides["."] ?? convention;
4006
+ if (docker) {
4007
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "package.json"));
4008
+ packages.push({
4009
+ dir: ".",
4010
+ imageName: name ?? repoName,
4011
+ version,
4012
+ docker
4013
+ });
4014
+ }
4015
+ }
4016
+ return packages;
4017
+ }
4018
+ /**
4019
+ * Read docker config for a single package, checking convention paths first,
4020
+ * then .tooling.json overrides. Used by the per-package image:build script.
4021
+ */
4022
+ function readSinglePackageDocker(executor, cwd, packageDir, repoName) {
4023
+ const dir = path.basename(path.resolve(cwd, packageDir));
4024
+ const convention = findConventionDockerfile(executor, cwd, dir);
4025
+ const docker = loadDockerMap(executor, cwd)[dir] ?? convention;
4026
+ if (!docker) throw new FatalError(`No Dockerfile found for package "${dir}" (checked convention paths and .tooling.json)`);
4027
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
4028
+ return {
4029
+ dir,
4030
+ imageName: `${repoName}-${name ?? dir}`,
4031
+ version,
4032
+ docker
4033
+ };
4034
+ }
4035
+ /** Parse semver version string into major, minor, patch components. */
4036
+ function parseSemver(version) {
4037
+ const clean = version.replace(/^v/, "");
4038
+ const match = /^(\d+)\.(\d+)\.(\d+)/.exec(clean);
4039
+ if (!match?.[1] || !match[2] || !match[3]) throw new FatalError(`Invalid semver version: ${version}`);
4040
+ return {
4041
+ major: Number(match[1]),
4042
+ minor: Number(match[2]),
4043
+ patch: Number(match[3])
4044
+ };
4045
+ }
4046
+ /** Generate semver tag variants: latest, vX.Y.Z, vX.Y, vX */
4047
+ function generateTags(version) {
4048
+ const { major, minor, patch } = parseSemver(version);
4049
+ return [
4050
+ "latest",
4051
+ `v${major}.${minor}.${patch}`,
4052
+ `v${major}.${minor}`,
4053
+ `v${major}`
4054
+ ];
4055
+ }
4056
+ /** Build the full image reference: namespace/imageName:tag */
4057
+ function imageRef(namespace, imageName, tag) {
4058
+ return `${namespace}/${imageName}:${tag}`;
4059
+ }
4060
+ function log(message) {
4061
+ console.log(message);
4062
+ }
4063
+ function debug(verbose, message) {
4064
+ if (verbose) console.log(`[debug] ${message}`);
4065
+ }
4066
+ /** Read the repo name from root package.json. */
4067
+ function readRepoName(executor, cwd) {
4068
+ const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
4069
+ if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
4070
+ const repoName = parsePackageJson(rootPkgRaw)?.name;
4071
+ if (!repoName) throw new FatalError("Root package.json must have a name field");
4072
+ return repoName;
4073
+ }
4074
+ /** Build a single docker image from its config. Paths are resolved relative to cwd. */
4075
+ function buildImage(executor, pkg, cwd, verbose, extraArgs) {
4076
+ const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
4077
+ const contextPath = path.resolve(cwd, pkg.docker.context);
4078
+ const command = [
4079
+ "docker build",
4080
+ `-f ${dockerfilePath}`,
4081
+ `-t ${pkg.imageName}:latest`,
4082
+ ...extraArgs,
4083
+ contextPath
4084
+ ].join(" ");
4085
+ debug(verbose, `Running: ${command}`);
4086
+ const buildResult = executor.exec(command);
4087
+ debug(verbose, `Build stdout: ${buildResult.stdout}`);
4088
+ if (buildResult.exitCode !== 0) throw new FatalError(`docker build failed for ${pkg.dir} (exit ${buildResult.exitCode}): ${buildResult.stderr}`);
4089
+ }
4090
+ /**
4091
+ * Detect packages with docker config in .tooling.json and build each one.
4092
+ * Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
4093
+ * Dockerfile and context paths are resolved relative to the project root.
4094
+ *
4095
+ * When `packageDir` is set, builds only that single package (for use as an image:build script).
4096
+ */
4097
+ function runDockerBuild(executor, config) {
4098
+ const repoName = readRepoName(executor, config.cwd);
4099
+ if (config.packageDir) {
4100
+ const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
4101
+ log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4102
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4103
+ log(`Built ${pkg.imageName}:latest`);
4104
+ return { packages: [pkg] };
4105
+ }
4106
+ const packages = detectDockerPackages(executor, config.cwd, repoName);
4107
+ if (packages.length === 0) {
4108
+ log("No packages with docker config found");
4109
+ return { packages: [] };
4110
+ }
4111
+ log(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
4112
+ for (const pkg of packages) {
4113
+ log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4114
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4115
+ }
4116
+ log(`Built ${packages.length} image(s)`);
4117
+ return { packages };
4118
+ }
4119
+ /**
4120
+ * Run the full Docker publish pipeline:
4121
+ * 1. Build all images via runDockerBuild
4122
+ * 2. Login to registry
4123
+ * 3. Tag each image with semver variants from its own package.json version
4124
+ * 4. Push all tags
4125
+ * 5. Logout from registry
4126
+ */
4127
+ function runDockerPublish(executor, config) {
4128
+ const { packages } = runDockerBuild(executor, {
4129
+ cwd: config.cwd,
4130
+ packageDir: void 0,
4131
+ verbose: config.verbose,
4132
+ extraArgs: []
4133
+ });
4134
+ if (packages.length === 0) return {
4135
+ packages: [],
4136
+ tags: []
4137
+ };
4138
+ for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
4139
+ if (!config.dryRun) {
4140
+ log(`Logging in to ${config.registryHost}...`);
4141
+ const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
4142
+ if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
4143
+ } else log("[dry-run] Skipping docker login");
4144
+ const allTags = [];
4145
+ try {
4146
+ for (const pkg of packages) {
4147
+ const tags = generateTags(pkg.version ?? "");
4148
+ log(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
4149
+ for (const tag of tags) {
4150
+ const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
4151
+ allTags.push(ref);
4152
+ log(`Tagging ${pkg.imageName} → ${ref}`);
4153
+ const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
4154
+ if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
4155
+ if (!config.dryRun) {
4156
+ log(`Pushing ${ref}...`);
4157
+ const pushResult = executor.exec(`docker push ${ref}`);
4158
+ if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
4159
+ } else log(`[dry-run] Skipping push for ${ref}`);
4160
+ }
4161
+ }
4162
+ } finally {
4163
+ if (!config.dryRun) {
4164
+ log(`Logging out from ${config.registryHost}...`);
4165
+ executor.exec(`docker logout ${config.registryHost}`);
4166
+ }
4167
+ }
4168
+ log(`Published ${allTags.length} image tag(s)`);
4169
+ return {
4170
+ packages,
4171
+ tags: allTags
4172
+ };
4173
+ }
4174
+ //#endregion
4175
+ //#region src/commands/publish-docker.ts
4176
+ function requireEnv(name) {
4177
+ const value = process.env[name];
4178
+ if (!value) throw new FatalError(`Missing required environment variable: ${name}`);
4179
+ return value;
4180
+ }
4181
+ const publishDockerCommand = defineCommand({
4182
+ meta: {
4183
+ name: "docker:publish",
4184
+ description: "Build, tag, and push Docker images for packages with an image:build script"
4185
+ },
4186
+ args: {
4187
+ "dry-run": {
4188
+ type: "boolean",
4189
+ description: "Build and tag images but skip login, push, and logout"
4190
+ },
4191
+ verbose: {
4192
+ type: "boolean",
4193
+ description: "Enable detailed debug logging"
4194
+ }
4195
+ },
4196
+ async run({ args }) {
4197
+ const config = {
4198
+ cwd: process.cwd(),
4199
+ registryHost: requireEnv("DOCKER_REGISTRY_HOST"),
4200
+ registryNamespace: requireEnv("DOCKER_REGISTRY_NAMESPACE"),
4201
+ username: requireEnv("DOCKER_USERNAME"),
4202
+ password: requireEnv("DOCKER_PASSWORD"),
4203
+ dryRun: args["dry-run"] === true,
4204
+ verbose: args.verbose === true
4205
+ };
4206
+ runDockerPublish(createRealExecutor(), config);
4207
+ }
4208
+ });
4209
+ //#endregion
4210
+ //#region src/commands/docker-build.ts
4211
+ const dockerBuildCommand = defineCommand({
4212
+ meta: {
4213
+ name: "docker:build",
4214
+ description: "Build Docker images for packages with docker config in .tooling.json"
4215
+ },
4216
+ args: {
4217
+ package: {
4218
+ type: "string",
4219
+ description: "Build a single package by directory path (e.g. packages/server). Useful as an image:build script."
4220
+ },
4221
+ verbose: {
4222
+ type: "boolean",
4223
+ description: "Enable detailed debug logging"
4224
+ },
4225
+ _: {
4226
+ type: "positional",
4227
+ required: false,
4228
+ description: "Extra arguments passed to docker build (after --)"
4229
+ }
4230
+ },
4231
+ async run({ args }) {
4232
+ const executor = createRealExecutor();
4233
+ const rawExtra = args._ ?? [];
4234
+ const extraArgs = Array.isArray(rawExtra) ? rawExtra.map(String) : [String(rawExtra)];
4235
+ runDockerBuild(executor, {
4236
+ cwd: process.cwd(),
4237
+ packageDir: args.package,
4238
+ verbose: args.verbose === true,
4239
+ extraArgs: extraArgs.filter((a) => a.length > 0)
4240
+ });
4241
+ }
4242
+ });
4243
+ //#endregion
3689
4244
  //#region src/bin.ts
3690
4245
  const main = defineCommand({
3691
4246
  meta: {
3692
4247
  name: "tooling",
3693
- version: "0.14.1",
4248
+ version: "0.16.0",
3694
4249
  description: "Bootstrap and maintain standardized TypeScript project tooling"
3695
4250
  },
3696
4251
  subCommands: {
3697
- "repo:init": initCommand,
3698
- "repo:update": updateCommand,
3699
- "repo:check": checkCommand,
4252
+ "repo:sync": syncCommand,
3700
4253
  "checks:run": runChecksCommand,
3701
4254
  "release:changesets": releaseForgejoCommand,
3702
4255
  "release:trigger": releaseTriggerCommand,
3703
4256
  "forgejo:create-release": createForgejoReleaseCommand,
3704
4257
  "changesets:merge": releaseMergeCommand,
3705
- "release:simple": releaseSimpleCommand
4258
+ "release:simple": releaseSimpleCommand,
4259
+ "docker:publish": publishDockerCommand,
4260
+ "docker:build": dockerBuildCommand
3706
4261
  }
3707
4262
  });
3708
- console.log(`@bensandee/tooling v0.14.1`);
4263
+ console.log(`@bensandee/tooling v0.16.0`);
3709
4264
  runMain(main);
3710
4265
  //#endregion
3711
4266
  export {};