@bensandee/tooling 0.23.0 → 0.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/bin.mjs +1537 -1513
  2. package/package.json +1 -1
package/dist/bin.mjs CHANGED
@@ -981,7 +981,7 @@ function getAddedDevDepNames(config) {
981
981
  const deps = { ...ROOT_DEV_DEPS };
982
982
  if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
983
983
  deps["@bensandee/config"] = "0.8.2";
984
- deps["@bensandee/tooling"] = "0.23.0";
984
+ deps["@bensandee/tooling"] = "0.24.0";
985
985
  if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
986
986
  if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
987
987
  addReleaseDeps(deps, config);
@@ -1006,7 +1006,7 @@ async function generatePackageJson(ctx) {
1006
1006
  const devDeps = { ...ROOT_DEV_DEPS };
1007
1007
  if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
1008
1008
  devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.2";
1009
- devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.23.0";
1009
+ devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.24.0";
1010
1010
  if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
1011
1011
  if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
1012
1012
  if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
@@ -2599,1638 +2599,1658 @@ async function runGenerators(ctx) {
2599
2599
  return results;
2600
2600
  }
2601
2601
  //#endregion
2602
- //#region src/generators/migrate-prompt.ts
2603
- /**
2604
- * Generate a context-aware AI migration prompt based on what the CLI did.
2605
- * This prompt can be pasted into Claude Code (or similar) to finish the migration.
2606
- */
2607
- function generateMigratePrompt(results, config, detected) {
2608
- const sections = [];
2609
- sections.push("# Migration Prompt");
2610
- sections.push("");
2611
- sections.push("The following prompt was generated by `@bensandee/tooling repo:sync`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
2612
- sections.push("");
2613
- sections.push("> **Tip:** Before starting, run `/init` in Claude Code to generate a `CLAUDE.md` that gives the AI a complete picture of your repository's structure, conventions, and build commands.");
2614
- sections.push("");
2615
- sections.push("## What was changed");
2616
- sections.push("");
2617
- const created = results.filter((r) => r.action === "created");
2618
- const updated = results.filter((r) => r.action === "updated");
2619
- const skipped = results.filter((r) => r.action === "skipped");
2620
- const archived = results.filter((r) => r.action === "archived");
2621
- if (created.length > 0) {
2622
- sections.push("**Created:**");
2623
- for (const r of created) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2624
- sections.push("");
2625
- }
2626
- if (updated.length > 0) {
2627
- sections.push("**Updated:**");
2628
- for (const r of updated) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2629
- sections.push("");
2630
- }
2631
- if (archived.length > 0) {
2632
- sections.push("**Archived:**");
2633
- for (const r of archived) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2634
- sections.push("");
2635
- }
2636
- if (skipped.length > 0) {
2637
- sections.push("**Skipped (review these):**");
2638
- for (const r of skipped) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2639
- sections.push("");
2640
- }
2641
- sections.push("## Migration tasks");
2642
- sections.push("");
2643
- const legacyToRemove = detected.legacyConfigs.filter((legacy) => !(legacy.tool === "prettier" && config.formatter === "prettier"));
2644
- if (legacyToRemove.length > 0) {
2645
- sections.push("### Remove legacy tooling");
2646
- sections.push("");
2647
- for (const legacy of legacyToRemove) {
2648
- const replacement = {
2649
- eslint: "oxlint",
2650
- prettier: "oxfmt",
2651
- jest: "vitest",
2652
- webpack: "tsdown",
2653
- rollup: "tsdown"
2654
- }[legacy.tool];
2655
- sections.push(`- Remove ${legacy.tool} config files (${legacy.files.map((f) => `\`${f}\``).join(", ")}). This project now uses **${replacement}**.`);
2656
- sections.push(` - Uninstall ${legacy.tool}-related packages from devDependencies`);
2657
- if (legacy.tool === "eslint") sections.push(" - Migrate any custom ESLint rules that don't have oxlint equivalents");
2658
- if (legacy.tool === "jest") sections.push(" - Migrate any jest-specific test utilities (jest.mock, jest.fn) to vitest equivalents (vi.mock, vi.fn)");
2659
- }
2660
- sections.push("");
2661
- }
2662
- if (archived.length > 0) {
2663
- sections.push("### Review archived files");
2664
- sections.push("");
2665
- sections.push("The following files were modified or replaced. The originals have been saved to `.tooling-archived/`:");
2666
- sections.push("");
2667
- for (const r of archived) sections.push(`- \`${r.filePath}\` → \`.tooling-archived/${r.filePath}\``);
2668
- sections.push("");
2669
- sections.push("For each archived file, **diff the old version against the new one** and look for features, categories, or modules that were enabled in the original but are missing from the replacement. Focus on broad capability gaps rather than individual rule strictness (in general, being stricter is fine). Examples of what to look for:");
2670
- sections.push("");
2671
- sections.push("- **Lint configs**: enabled plugin categories (e.g. `jsx-a11y`, `import`, `react`, `nextjs`), custom `plugins` or `overrides`, file-scoped rule blocks");
2672
- sections.push("- **TypeScript configs**: compiler features like `jsx`, `paths`, `baseUrl`, or `references` that affect build behavior");
2673
- sections.push("- **Other configs**: feature flags, custom presets, or integrations that go beyond the default template");
2674
- sections.push("");
2675
- sections.push("If the old config had capabilities the new one lacks, port them into the new file. Then:");
2676
- sections.push("");
2677
- sections.push("1. If the project previously used `husky` and `lint-staged`, remove them from `devDependencies`");
2678
- sections.push("2. Delete the `.tooling-archived/` directory when migration is complete");
2679
- sections.push("");
2680
- }
2681
- const oxlintWasSkipped = results.find((r) => r.filePath === "oxlint.config.ts")?.action === "skipped";
2682
- if (detected.hasLegacyOxlintJson) {
2683
- sections.push("### Migrate .oxlintrc.json to oxlint.config.ts");
2684
- sections.push("");
2685
- sections.push("A new `oxlint.config.ts` has been generated using `defineConfig` from the `oxlint` package. The existing `.oxlintrc.json` needs to be migrated:");
2686
- sections.push("");
2687
- sections.push("1. Read `.oxlintrc.json` and compare its `rules` against the rules provided by `@bensandee/config/oxlint/recommended` (check `node_modules/@bensandee/config`). Most standard rules are already included in the recommended config.");
2688
- sections.push("2. If there are any custom rules, overrides, settings, or `jsPlugins` not covered by the recommended config, add them to `oxlint.config.ts` alongside the `extends`.");
2689
- sections.push("3. Delete `.oxlintrc.json`.");
2690
- sections.push("4. Run `pnpm lint` to verify the new config works correctly.");
2691
- sections.push("");
2692
- } else if (oxlintWasSkipped && detected.hasOxlintConfig) {
2693
- sections.push("### Verify oxlint.config.ts includes recommended rules");
2694
- sections.push("");
2695
- sections.push("The existing `oxlint.config.ts` was kept as-is. Verify that it extends the recommended config from `@bensandee/config/oxlint`:");
2696
- sections.push("");
2697
- sections.push("1. Open `oxlint.config.ts` and check that it imports and extends `@bensandee/config/oxlint/recommended`.");
2698
- sections.push("2. The expected pattern is:");
2699
- sections.push(" ```ts");
2700
- sections.push(" import recommended from \"@bensandee/config/oxlint/recommended\";");
2701
- sections.push(" import { defineConfig } from \"oxlint\";");
2702
- sections.push("");
2703
- sections.push(" export default defineConfig({ extends: [recommended] });");
2704
- sections.push(" ```");
2705
- sections.push("3. If it uses a different pattern, update it to extend the recommended config while preserving any project-specific customizations.");
2706
- sections.push("4. Run `pnpm lint` to verify the config works correctly.");
2707
- sections.push("");
2708
- }
2709
- if (config.structure === "monorepo" && !detected.hasPnpmWorkspace) {
2710
- sections.push("### Migrate to monorepo structure");
2711
- sections.push("");
2712
- sections.push("This project was converted from a single repo to a monorepo. Complete the migration:");
2713
- sections.push("");
2714
- sections.push("1. Move existing source into `packages/<name>/` (using the existing package name)");
2715
- sections.push("2. Split the root `package.json` into a root workspace manifest + package-level `package.json`");
2716
- sections.push("3. Move the existing `tsconfig.json` into the package and update the root tsconfig with project references");
2717
- sections.push("4. Create a package-level `tsdown.config.ts` in the new package");
2718
- sections.push("5. Update any import paths or build scripts affected by the move");
2719
- sections.push("");
2720
- }
2721
- const skippedConfigs = skipped.filter((r) => r.filePath !== "ci" && r.description !== "Not a monorepo");
2722
- if (skippedConfigs.length > 0) {
2723
- sections.push("### Review skipped files");
2724
- sections.push("");
2725
- sections.push("The following files were left unchanged. Review them for compatibility:");
2726
- sections.push("");
2727
- for (const r of skippedConfigs) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2728
- sections.push("");
2729
- }
2730
- if (results.some((r) => r.filePath === "test/example.test.ts" && r.action === "created")) {
2731
- sections.push("### Generate tests");
2732
- sections.push("");
2733
- sections.push("A starter test was created at `test/example.test.ts`. Now:");
2734
- sections.push("");
2735
- sections.push("1. Review the existing source code in `src/`");
2736
- sections.push("2. Create additional test files following the starter test's patterns (import style, describe/it structure)");
2737
- sections.push("3. Focus on edge cases and core business logic");
2738
- sections.push("4. Aim for meaningful coverage of exported functions and key code paths");
2739
- sections.push("");
2740
- }
2741
- sections.push("## Ground rules");
2742
- sections.push("");
2743
- sections.push("It is OK to add new packages (e.g. `zod`, `@bensandee/common`) if they are needed to resolve errors.");
2744
- sections.push("");
2745
- sections.push("When resolving errors from the checklist below, prefer fixing the root cause over suppressing the issue. For example:");
2746
- sections.push("");
2747
- sections.push("- **Lint errors**: fix the code rather than adding disable comments or rule exceptions");
2748
- sections.push("- **Test failures**: update the test or fix the underlying bug rather than skipping or deleting the test");
2749
- sections.push("- **Knip findings**: remove genuinely unused code/exports/dependencies rather than adding ignores to `knip.config.ts`");
2750
- sections.push("- **Type errors**: add proper types rather than using `any` or `@ts-expect-error`");
2751
- sections.push("");
2752
- sections.push("Only suppress an issue if there is a clear, documented reason why the fix is not feasible (e.g. a third-party type mismatch). Leave a comment explaining why.");
2753
- sections.push("");
2754
- sections.push("## Verification checklist");
2755
- sections.push("");
2756
- sections.push("Run each of these commands and fix any errors before moving on:");
2757
- sections.push("");
2758
- sections.push("1. `pnpm install`");
2759
- const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
2760
- sections.push(`2. \`${updateCmd}\` — bump added dependencies to their latest versions`);
2761
- sections.push("3. `pnpm typecheck` — fix any type errors");
2762
- sections.push("4. `pnpm build` — fix any build errors");
2763
- sections.push("5. `pnpm test` — fix any test failures");
2764
- sections.push("6. `pnpm lint` — fix the code to satisfy lint rules");
2765
- sections.push("7. `pnpm knip` — remove unused exports, dependencies, and dead code");
2766
- sections.push("8. `pnpm format` — fix any formatting issues");
2767
- sections.push("");
2768
- return sections.join("\n");
2769
- }
2770
- //#endregion
2771
- //#region src/commands/repo-init.ts
2772
- /** Log what was detected so the user understands generator decisions. */
2773
- function logDetectionSummary(ctx) {
2774
- const dockerNames = getDockerPackageNames(ctx);
2775
- if (dockerNames.length > 0) p.log.info(`Detected Docker packages: ${dockerNames.join(", ")}`);
2776
- if (ctx.config.releaseStrategy !== "none") {
2777
- const publishable = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson);
2778
- if (publishable.length > 0) p.log.info(`Will publish npm packages: ${publishable.map((pkg) => pkg.name).join(", ")}`);
2779
- else p.log.info("No publishable npm packages — npm registry setup will be skipped");
2780
- }
2781
- }
2782
- async function runInit(config, options = {}) {
2783
- const detected = detectProject(config.targetDir);
2784
- const s = p.spinner();
2785
- const { ctx, archivedFiles } = createContext(config, options.confirmOverwrite ?? (async (relativePath) => {
2786
- s.stop("Paused");
2787
- const result = await p.select({
2788
- message: `${relativePath} already exists. What do you want to do?`,
2789
- options: [{
2790
- value: "overwrite",
2791
- label: "Overwrite"
2792
- }, {
2793
- value: "skip",
2794
- label: "Skip"
2795
- }]
2796
- });
2797
- s.start("Generating configuration files...");
2798
- if (p.isCancel(result)) return "skip";
2799
- return result;
2800
- }));
2801
- logDetectionSummary(ctx);
2802
- s.start("Generating configuration files...");
2803
- const results = await runGenerators(ctx);
2804
- const alreadyArchived = new Set(results.filter((r) => r.action === "archived").map((r) => r.filePath));
2805
- for (const rel of archivedFiles) if (!alreadyArchived.has(rel)) results.push({
2806
- filePath: rel,
2807
- action: "archived",
2808
- description: `Original saved to .tooling-archived/${rel}`
2809
- });
2810
- const created = results.filter((r) => r.action === "created");
2811
- const updated = results.filter((r) => r.action === "updated");
2812
- if (!(created.length > 0 || updated.length > 0 || archivedFiles.length > 0) && options.noPrompt) {
2813
- s.stop("Repository is up to date.");
2814
- return results;
2815
- }
2816
- s.stop("Done!");
2817
- if (results.some((r) => r.action === "archived" && r.filePath.startsWith(".husky/"))) try {
2818
- execSync("git config --unset core.hooksPath", {
2819
- cwd: config.targetDir,
2820
- stdio: "ignore"
2821
- });
2822
- } catch (_error) {}
2823
- const summaryLines = [];
2824
- if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
2825
- if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
2826
- p.note(summaryLines.join("\n"), "Summary");
2827
- if (!options.noPrompt) {
2828
- const prompt = generateMigratePrompt(results, config, detected);
2829
- const promptPath = ".tooling-migrate.md";
2830
- ctx.write(promptPath, prompt);
2831
- p.log.info(`Migration prompt written to ${promptPath}`);
2832
- p.log.info("In Claude Code, run: \"Execute the steps in .tooling-migrate.md\"");
2833
- }
2834
- const bensandeeDeps = getAddedDevDepNames(config).filter((name) => name.startsWith("@bensandee/"));
2835
- const hasLockfile = ctx.exists("pnpm-lock.yaml");
2836
- if (bensandeeDeps.length > 0 && hasLockfile) {
2837
- s.start("Updating @bensandee/* packages...");
2838
- try {
2839
- execSync(`pnpm update --latest ${bensandeeDeps.join(" ")}`, {
2840
- cwd: config.targetDir,
2841
- stdio: "ignore"
2842
- });
2843
- s.stop("Updated @bensandee/* packages");
2844
- } catch (_error) {
2845
- s.stop("Could not update @bensandee/* packages — run pnpm install first");
2846
- }
2847
- }
2848
- p.note([
2849
- "1. Run: pnpm install",
2850
- "2. Run: pnpm typecheck",
2851
- "3. Run: pnpm build",
2852
- "4. Run: pnpm test",
2853
- ...options.noPrompt ? [] : ["5. In Claude Code, run: \"Execute the steps in .tooling-migrate.md\""]
2854
- ].join("\n"), "Next steps");
2855
- return results;
2856
- }
2857
- //#endregion
2858
- //#region src/commands/repo-sync.ts
2859
- const syncCommand = defineCommand({
2860
- meta: {
2861
- name: "repo:sync",
2862
- description: "Detect, generate, and sync project tooling (idempotent)"
2863
- },
2864
- args: {
2865
- dir: {
2866
- type: "positional",
2867
- description: "Target directory (default: current directory)",
2868
- required: false
2869
- },
2870
- check: {
2871
- type: "boolean",
2872
- description: "Dry-run mode: report drift without writing files"
2873
- },
2874
- yes: {
2875
- type: "boolean",
2876
- alias: "y",
2877
- description: "Accept all defaults (non-interactive)"
2878
- },
2879
- "eslint-plugin": {
2880
- type: "boolean",
2881
- description: "Include @bensandee/eslint-plugin (default: true)"
2882
- },
2883
- "no-ci": {
2884
- type: "boolean",
2885
- description: "Skip CI workflow generation"
2886
- },
2887
- "no-prompt": {
2888
- type: "boolean",
2889
- description: "Skip migration prompt generation"
2890
- }
2891
- },
2892
- async run({ args }) {
2893
- const targetDir = path.resolve(args.dir ?? ".");
2894
- if (args.check) {
2895
- const exitCode = await runCheck(targetDir);
2896
- process.exitCode = exitCode;
2897
- return;
2898
- }
2899
- const saved = loadToolingConfig(targetDir);
2900
- const isFirstRun = !saved;
2901
- let config;
2902
- if (args.yes || !isFirstRun) {
2903
- const detected = buildDefaultConfig(targetDir, {
2904
- eslintPlugin: args["eslint-plugin"] === true ? true : void 0,
2905
- noCi: args["no-ci"] === true ? true : void 0
2906
- });
2907
- config = saved ? mergeWithSavedConfig(detected, saved) : detected;
2908
- } else config = await runInitPrompts(targetDir, saved);
2909
- await runInit(config, {
2910
- noPrompt: args["no-prompt"] === true || !isFirstRun,
2911
- ...!isFirstRun && { confirmOverwrite: async () => "overwrite" }
2912
- });
2913
- }
2914
- });
2915
- /** Run sync in check mode: dry-run drift detection. */
2916
- async function runCheck(targetDir) {
2917
- const saved = loadToolingConfig(targetDir);
2918
- const detected = buildDefaultConfig(targetDir, {});
2919
- const { ctx, pendingWrites } = createDryRunContext(saved ? mergeWithSavedConfig(detected, saved) : detected);
2920
- logDetectionSummary(ctx);
2921
- const actionable = (await runGenerators(ctx)).filter((r) => {
2922
- if (r.action !== "created" && r.action !== "updated") return false;
2923
- const newContent = pendingWrites.get(r.filePath);
2924
- if (newContent && r.action === "updated") {
2925
- const existingPath = path.join(targetDir, r.filePath);
2926
- const existing = existsSync(existingPath) ? readFileSync(existingPath, "utf-8") : void 0;
2927
- if (existing && contentEqual(r.filePath, existing, newContent)) return false;
2928
- }
2929
- return true;
2930
- });
2931
- if (actionable.length === 0) {
2932
- p.log.success("Repository is up to date.");
2933
- return 0;
2934
- }
2935
- p.log.warn(`${actionable.length} file(s) would be changed by repo:sync`);
2936
- for (const r of actionable) {
2937
- p.log.info(` ${r.action}: ${r.filePath} — ${r.description}`);
2938
- const newContent = pendingWrites.get(r.filePath);
2939
- if (!newContent) continue;
2940
- const existingPath = path.join(targetDir, r.filePath);
2941
- const existing = existsSync(existingPath) ? readFileSync(existingPath, "utf-8") : void 0;
2942
- if (!existing) {
2943
- const lineCount = newContent.split("\n").length - 1;
2944
- p.log.info(` + ${lineCount} new lines`);
2945
- } else {
2946
- const diff = lineDiff(existing, newContent);
2947
- for (const line of diff) p.log.info(` ${line}`);
2948
- }
2949
- }
2950
- return 1;
2951
- }
2952
- const normalize = (line) => line.trimEnd();
2953
- function lineDiff(oldText, newText) {
2954
- const oldLines = oldText.split("\n").map(normalize);
2955
- const newLines = newText.split("\n").map(normalize);
2956
- const oldSet = new Set(oldLines);
2957
- const newSet = new Set(newLines);
2958
- const removed = oldLines.filter((l) => l.trim() !== "" && !newSet.has(l));
2959
- const added = newLines.filter((l) => l.trim() !== "" && !oldSet.has(l));
2960
- const lines = [];
2961
- for (const l of removed) lines.push(`- ${l.trim()}`);
2962
- for (const l of added) lines.push(`+ ${l.trim()}`);
2963
- return lines;
2964
- }
2965
- //#endregion
2966
- //#region src/release/executor.ts
2967
- /** Create an executor that runs real commands, fetches, and reads the filesystem. */
2968
- function createRealExecutor() {
2969
- return {
2970
- exec(command, options) {
2971
- try {
2972
- return {
2973
- stdout: execSync(command, {
2974
- cwd: options?.cwd,
2975
- env: {
2976
- ...process.env,
2977
- ...options?.env
2978
- },
2979
- encoding: "utf-8",
2980
- stdio: [
2981
- "pipe",
2982
- "pipe",
2983
- "pipe"
2984
- ]
2985
- }),
2986
- stderr: "",
2987
- exitCode: 0
2988
- };
2989
- } catch (err) {
2990
- if (isExecSyncError(err)) return {
2991
- stdout: err.stdout,
2992
- stderr: err.stderr,
2993
- exitCode: err.status
2994
- };
2995
- return {
2996
- stdout: "",
2997
- stderr: "",
2998
- exitCode: 1
2999
- };
3000
- }
3001
- },
3002
- fetch: globalThis.fetch,
3003
- listChangesetFiles(cwd) {
3004
- const dir = path.join(cwd, ".changeset");
3005
- try {
3006
- return readdirSync(dir).filter((f) => f.endsWith(".md") && f !== "README.md");
3007
- } catch {
3008
- return [];
3009
- }
3010
- },
3011
- listWorkspacePackages(cwd) {
3012
- const packagesDir = path.join(cwd, "packages");
3013
- const packages = [];
3014
- try {
3015
- for (const entry of readdirSync(packagesDir)) {
3016
- const pkgPath = path.join(packagesDir, entry, "package.json");
3017
- try {
3018
- const pkg = parsePackageJson(readFileSync(pkgPath, "utf-8"));
3019
- if (pkg?.name && pkg.version && !pkg.private) packages.push({
3020
- name: pkg.name,
3021
- version: pkg.version,
3022
- dir: entry
3023
- });
3024
- } catch (_error) {}
3025
- }
3026
- } catch (_error) {}
3027
- return packages;
3028
- },
3029
- listPackageDirs(cwd) {
3030
- const packagesDir = path.join(cwd, "packages");
3031
- try {
3032
- return readdirSync(packagesDir, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
3033
- } catch {
3034
- return [];
3035
- }
3036
- },
3037
- readFile(filePath) {
3038
- try {
3039
- return readFileSync(filePath, "utf-8");
3040
- } catch {
3041
- return null;
3042
- }
3043
- },
3044
- writeFile(filePath, content) {
3045
- mkdirSync(path.dirname(filePath), { recursive: true });
3046
- writeFileSync(filePath, content);
3047
- }
3048
- };
3049
- }
3050
- /** Parse "New tag:" lines from changeset publish output. */
3051
- function parseNewTags(output) {
3052
- const tags = [];
3053
- for (const line of output.split("\n")) {
3054
- const match = /New tag:\s+(\S+)/.exec(line);
3055
- if (match?.[1]) tags.push(match[1]);
2602
+ //#region src/release/docker.ts
2603
+ const ToolingDockerMapSchema = z.record(z.string(), z.object({
2604
+ dockerfile: z.string(),
2605
+ context: z.string().default(".")
2606
+ }));
2607
+ const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
2608
+ const PackageInfoSchema = z.object({
2609
+ name: z.string().optional(),
2610
+ version: z.string().optional()
2611
+ });
2612
+ /** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
2613
+ function loadDockerMap(executor, cwd) {
2614
+ const configPath = path.join(cwd, ".tooling.json");
2615
+ const raw = executor.readFile(configPath);
2616
+ if (!raw) return {};
2617
+ try {
2618
+ const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
2619
+ if (!result.success || !result.data.docker) return {};
2620
+ return result.data.docker;
2621
+ } catch (_error) {
2622
+ return {};
3056
2623
  }
3057
- return tags;
3058
- }
3059
- /** Map workspace packages to their expected tag strings (name@version). */
3060
- function computeExpectedTags(packages) {
3061
- return packages.map((p) => `${p.name}@${p.version}`);
3062
2624
  }
3063
- /** Parse `git ls-remote --tags` output into tag names, filtering out `^{}` dereference entries. */
3064
- function parseRemoteTags(output) {
3065
- const tags = [];
3066
- for (const line of output.split("\n")) {
3067
- const match = /refs\/tags\/(.+)/.exec(line);
3068
- if (match?.[1] && !match[1].endsWith("^{}")) tags.push(match[1]);
2625
+ /** Read name and version from a package's package.json. */
2626
+ function readPackageInfo(executor, packageJsonPath) {
2627
+ const raw = executor.readFile(packageJsonPath);
2628
+ if (!raw) return {
2629
+ name: void 0,
2630
+ version: void 0
2631
+ };
2632
+ try {
2633
+ const result = PackageInfoSchema.safeParse(JSON.parse(raw));
2634
+ if (!result.success) return {
2635
+ name: void 0,
2636
+ version: void 0
2637
+ };
2638
+ return {
2639
+ name: result.data.name,
2640
+ version: result.data.version
2641
+ };
2642
+ } catch (_error) {
2643
+ return {
2644
+ name: void 0,
2645
+ version: void 0
2646
+ };
3069
2647
  }
3070
- return tags;
3071
2648
  }
2649
+ /** Convention paths to check for Dockerfiles in a package directory. */
2650
+ const CONVENTION_DOCKERFILE_PATHS = ["Dockerfile", "docker/Dockerfile"];
3072
2651
  /**
3073
- * Reconcile expected tags with what already exists on the remote.
3074
- * Returns `(expected - remote) ∪ stdoutTags`, deduplicated.
2652
+ * Find a Dockerfile at convention paths for a monorepo package.
2653
+ * Checks packages/{dir}/Dockerfile and packages/{dir}/docker/Dockerfile.
3075
2654
  */
3076
- function reconcileTags(expectedTags, remoteTags, stdoutTags) {
3077
- const remoteSet = new Set(remoteTags);
3078
- const result = /* @__PURE__ */ new Set();
3079
- for (const tag of expectedTags) if (!remoteSet.has(tag)) result.add(tag);
3080
- for (const tag of stdoutTags) result.add(tag);
3081
- return [...result];
2655
+ function findConventionDockerfile(executor, cwd, dir) {
2656
+ for (const rel of CONVENTION_DOCKERFILE_PATHS) {
2657
+ const dockerfilePath = `packages/${dir}/${rel}`;
2658
+ if (executor.readFile(path.join(cwd, dockerfilePath)) !== null) return {
2659
+ dockerfile: dockerfilePath,
2660
+ context: "."
2661
+ };
2662
+ }
3082
2663
  }
3083
- //#endregion
3084
- //#region src/release/forgejo.ts
3085
- const PullRequestSchema = z.array(z.object({
3086
- number: z.number(),
3087
- head: z.object({ ref: z.string() })
3088
- }));
3089
2664
  /**
3090
- * Find an open PR with the given head branch. Returns the PR number or null.
3091
- *
3092
- * Fetches all open PRs and filters client-side by head.ref rather than relying
3093
- * on Forgejo's query parameter filtering, which behaves inconsistently.
2665
+ * Find a Dockerfile at convention paths for a single-package repo.
2666
+ * Checks Dockerfile and docker/Dockerfile at the project root.
3094
2667
  */
3095
- async function findOpenPr(executor, conn, head) {
3096
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls?state=open`;
3097
- const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
3098
- if (!res.ok) throw new TransientError(`Failed to list PRs: ${res.status} ${res.statusText}`);
3099
- const parsed = PullRequestSchema.safeParse(await res.json());
3100
- if (!parsed.success) throw new UnexpectedError(`Unexpected PR list response: ${parsed.error.message}`);
3101
- return parsed.data.find((pr) => pr.head.ref === head)?.number ?? null;
3102
- }
3103
- /** Create a new pull request. */
3104
- async function createPr(executor, conn, options) {
3105
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls`;
3106
- const payload = {
3107
- title: options.title,
3108
- head: options.head,
3109
- base: options.base
2668
+ function findRootDockerfile(executor, cwd) {
2669
+ for (const rel of CONVENTION_DOCKERFILE_PATHS) if (executor.readFile(path.join(cwd, rel)) !== null) return {
2670
+ dockerfile: rel,
2671
+ context: "."
3110
2672
  };
3111
- if (options.body) payload["body"] = options.body;
3112
- const res = await executor.fetch(url, {
3113
- method: "POST",
3114
- headers: {
3115
- Authorization: `token ${conn.token}`,
3116
- "Content-Type": "application/json"
3117
- },
3118
- body: JSON.stringify(payload)
3119
- });
3120
- if (!res.ok) throw new TransientError(`Failed to create PR: ${res.status} ${res.statusText}`);
3121
- }
3122
- /** Update an existing pull request's title and body. */
3123
- async function updatePr(executor, conn, prNumber, options) {
3124
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls/${String(prNumber)}`;
3125
- const res = await executor.fetch(url, {
3126
- method: "PATCH",
3127
- headers: {
3128
- Authorization: `token ${conn.token}`,
3129
- "Content-Type": "application/json"
3130
- },
3131
- body: JSON.stringify({
3132
- title: options.title,
3133
- body: options.body
3134
- })
3135
- });
3136
- if (!res.ok) throw new TransientError(`Failed to update PR #${String(prNumber)}: ${res.status} ${res.statusText}`);
3137
- }
3138
- /** Merge a pull request by number. */
3139
- async function mergePr(executor, conn, prNumber, options) {
3140
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls/${String(prNumber)}/merge`;
3141
- const res = await executor.fetch(url, {
3142
- method: "POST",
3143
- headers: {
3144
- Authorization: `token ${conn.token}`,
3145
- "Content-Type": "application/json"
3146
- },
3147
- body: JSON.stringify({
3148
- Do: options?.method ?? "merge",
3149
- delete_branch_after_merge: options?.deleteBranch ?? true
3150
- })
3151
- });
3152
- if (!res.ok) throw new TransientError(`Failed to merge PR #${String(prNumber)}: ${res.status} ${res.statusText}`);
3153
- }
3154
- /** Check whether a Forgejo release already exists for a given tag. */
3155
- async function findRelease(executor, conn, tag) {
3156
- const encodedTag = encodeURIComponent(tag);
3157
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases/tags/${encodedTag}`;
3158
- const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
3159
- if (res.status === 200) return true;
3160
- if (res.status === 404) return false;
3161
- throw new TransientError(`Failed to check release for ${tag}: ${res.status} ${res.statusText}`);
3162
- }
3163
- /** Create a Forgejo release for a given tag. */
3164
- async function createRelease(executor, conn, tag) {
3165
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases`;
3166
- const res = await executor.fetch(url, {
3167
- method: "POST",
3168
- headers: {
3169
- Authorization: `token ${conn.token}`,
3170
- "Content-Type": "application/json"
3171
- },
3172
- body: JSON.stringify({
3173
- tag_name: tag,
3174
- name: tag,
3175
- body: `Published ${tag}`
3176
- })
3177
- });
3178
- if (!res.ok) throw new TransientError(`Failed to create release for ${tag}: ${res.status} ${res.statusText}`);
3179
- }
3180
- //#endregion
3181
- //#region src/release/log.ts
3182
- /** Log a debug message when verbose mode is enabled. */
3183
- function debug$1(config, message) {
3184
- if (config.verbose) p.log.info(`[debug] ${message}`);
3185
- }
3186
- /** Log the result of an exec call when verbose mode is enabled. */
3187
- function debugExec(config, label, result) {
3188
- if (!config.verbose) return;
3189
- const lines = [`[debug] ${label} (exit code ${String(result.exitCode)})`];
3190
- if (result.stdout.trim()) lines.push(` stdout: ${result.stdout.trim()}`);
3191
- if (result.stderr.trim()) lines.push(` stderr: ${result.stderr.trim()}`);
3192
- p.log.info(lines.join("\n"));
3193
2673
  }
3194
- //#endregion
3195
- //#region src/release/version.ts
3196
- const BRANCH = "changeset-release/main";
3197
- /** Extract the latest changelog entry (content between first and second `## ` heading). */
3198
- function extractLatestEntry(changelog) {
3199
- const lines = changelog.split("\n");
3200
- let start = -1;
3201
- let end = lines.length;
3202
- for (let i = 0; i < lines.length; i++) if (lines[i]?.startsWith("## ")) if (start === -1) start = i;
3203
- else {
3204
- end = i;
3205
- break;
2674
+ /**
2675
+ * Discover Docker packages by convention and merge with .tooling.json overrides.
2676
+ *
2677
+ * Convention: any package with a Dockerfile or docker/Dockerfile is a Docker package.
2678
+ * For monorepos, scans packages/{name}/. For single-package repos, scans the root.
2679
+ * The docker map in .tooling.json overrides convention-discovered config and can add
2680
+ * packages at non-standard locations.
2681
+ *
2682
+ * Image names are derived from {root-name}-{package-name} using each package's package.json name.
2683
+ * Versions are read from each package's own package.json.
2684
+ */
2685
+ function detectDockerPackages(executor, cwd, repoName) {
2686
+ const overrides = loadDockerMap(executor, cwd);
2687
+ const packageDirs = executor.listPackageDirs(cwd);
2688
+ const packages = [];
2689
+ const seen = /* @__PURE__ */ new Set();
2690
+ if (packageDirs.length > 0) {
2691
+ for (const dir of packageDirs) {
2692
+ const convention = findConventionDockerfile(executor, cwd, dir);
2693
+ const docker = overrides[dir] ?? convention;
2694
+ if (docker) {
2695
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
2696
+ packages.push({
2697
+ dir,
2698
+ imageName: `${repoName}-${name ?? dir}`,
2699
+ version,
2700
+ docker
2701
+ });
2702
+ seen.add(dir);
2703
+ }
2704
+ }
2705
+ for (const [dir, docker] of Object.entries(overrides)) if (!seen.has(dir)) {
2706
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
2707
+ packages.push({
2708
+ dir,
2709
+ imageName: `${repoName}-${name ?? dir}`,
2710
+ version,
2711
+ docker
2712
+ });
2713
+ }
2714
+ } else {
2715
+ const convention = findRootDockerfile(executor, cwd);
2716
+ const docker = overrides["."] ?? convention;
2717
+ if (docker) {
2718
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "package.json"));
2719
+ packages.push({
2720
+ dir: ".",
2721
+ imageName: name ?? repoName,
2722
+ version,
2723
+ docker
2724
+ });
2725
+ }
3206
2726
  }
3207
- if (start === -1) return null;
3208
- return lines.slice(start, end).join("\n").trim();
2727
+ return packages;
3209
2728
  }
3210
- /** Read the root package.json name and version. */
3211
- function readRootPackage(executor, cwd) {
3212
- const content = executor.readFile(path.join(cwd, "package.json"));
3213
- if (!content) return null;
3214
- const pkg = parsePackageJson(content);
3215
- if (!pkg?.name || !pkg.version) return null;
3216
- if (pkg.private) return null;
2729
+ /**
2730
+ * Read docker config for a single package, checking convention paths first,
2731
+ * then .tooling.json overrides. Used by the per-package image:build script.
2732
+ */
2733
+ function readSinglePackageDocker(executor, cwd, packageDir, repoName) {
2734
+ const dir = path.basename(path.resolve(cwd, packageDir));
2735
+ const convention = findConventionDockerfile(executor, cwd, dir);
2736
+ const docker = loadDockerMap(executor, cwd)[dir] ?? convention;
2737
+ if (!docker) throw new FatalError(`No Dockerfile found for package "${dir}" (checked convention paths and .tooling.json)`);
2738
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
3217
2739
  return {
3218
- name: pkg.name,
3219
- version: pkg.version
2740
+ dir,
2741
+ imageName: `${repoName}-${name ?? dir}`,
2742
+ version,
2743
+ docker
3220
2744
  };
3221
2745
  }
3222
- /** Determine which packages changed and collect their changelog entries. */
3223
- function buildPrContent(executor, cwd, packagesBefore) {
3224
- const packagesAfter = executor.listWorkspacePackages(cwd);
3225
- if (!(packagesBefore.length > 0 || packagesAfter.length > 0)) {
3226
- const rootPkg = readRootPackage(executor, cwd);
3227
- if (rootPkg) {
3228
- const changelog = executor.readFile(path.join(cwd, "CHANGELOG.md"));
3229
- const entry = changelog ? extractLatestEntry(changelog) : null;
3230
- return {
3231
- title: `chore: release ${rootPkg.name}@${rootPkg.version}`,
3232
- body: entry ?? ""
3233
- };
3234
- }
3235
- return {
3236
- title: "chore: version packages",
3237
- body: ""
3238
- };
3239
- }
3240
- const beforeMap = new Map(packagesBefore.map((pkg) => [pkg.name, pkg.version]));
3241
- const changed = packagesAfter.filter((pkg) => beforeMap.get(pkg.name) !== pkg.version);
3242
- if (changed.length === 0) return {
3243
- title: "chore: version packages",
3244
- body: ""
3245
- };
3246
- const title = `chore: release ${changed.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ")}`;
3247
- const entries = [];
3248
- for (const pkg of changed) {
3249
- const changelogPath = path.join(cwd, "packages", pkg.dir, "CHANGELOG.md");
3250
- const changelog = executor.readFile(changelogPath);
3251
- const entry = changelog ? extractLatestEntry(changelog) : null;
3252
- if (entry) {
3253
- const labeled = entry.replace(/^## .+/, `## ${pkg.name}@${pkg.version}`);
3254
- entries.push(labeled);
3255
- }
3256
- }
2746
+ /** Parse semver version string into major, minor, patch components. */
2747
+ function parseSemver(version) {
2748
+ const clean = version.replace(/^v/, "");
2749
+ const match = /^(\d+)\.(\d+)\.(\d+)/.exec(clean);
2750
+ if (!match?.[1] || !match[2] || !match[3]) throw new FatalError(`Invalid semver version: ${version}`);
3257
2751
  return {
3258
- title,
3259
- body: entries.join("\n\n")
2752
+ major: Number(match[1]),
2753
+ minor: Number(match[2]),
2754
+ patch: Number(match[3])
3260
2755
  };
3261
2756
  }
3262
- /** Mode 1: version packages and create/update a PR. */
3263
- async function runVersionMode(executor, config) {
3264
- p.log.info("Changesets detected versioning packages");
3265
- const packagesBefore = executor.listWorkspacePackages(config.cwd);
3266
- debug$1(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3267
- const changesetConfigPath = path.join(config.cwd, ".changeset", "config.json");
3268
- const originalConfig = executor.readFile(changesetConfigPath);
3269
- if (originalConfig) {
3270
- const parsed = parseChangesetConfig(originalConfig);
3271
- if (parsed?.commit) {
3272
- const patched = {
3273
- ...parsed,
3274
- commit: false
3275
- };
3276
- executor.writeFile(changesetConfigPath, JSON.stringify(patched, null, 2) + "\n");
3277
- debug$1(config, "Temporarily disabled changeset commit:true");
3278
- }
2757
+ /** Generate semver tag variants: latest, vX.Y.Z, vX.Y, vX */
2758
+ function generateTags(version) {
2759
+ const { major, minor, patch } = parseSemver(version);
2760
+ return [
2761
+ "latest",
2762
+ `v${major}.${minor}.${patch}`,
2763
+ `v${major}.${minor}`,
2764
+ `v${major}`
2765
+ ];
2766
+ }
2767
+ /** Build the full image reference: namespace/imageName:tag */
2768
+ function imageRef(namespace, imageName, tag) {
2769
+ return `${namespace}/${imageName}:${tag}`;
2770
+ }
2771
+ function log$1(message) {
2772
+ console.log(message);
2773
+ }
2774
+ function debug$1(verbose, message) {
2775
+ if (verbose) console.log(`[debug] ${message}`);
2776
+ }
2777
+ /** Read the repo name from root package.json. */
2778
+ function readRepoName(executor, cwd) {
2779
+ const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
2780
+ if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
2781
+ const repoName = parsePackageJson(rootPkgRaw)?.name;
2782
+ if (!repoName) throw new FatalError("Root package.json must have a name field");
2783
+ return repoName;
2784
+ }
2785
+ /** Build a single docker image from its config. Paths are resolved relative to cwd. */
2786
+ function buildImage(executor, pkg, cwd, verbose, extraArgs) {
2787
+ const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
2788
+ const contextPath = path.resolve(cwd, pkg.docker.context);
2789
+ const command = [
2790
+ "docker build",
2791
+ `-f ${dockerfilePath}`,
2792
+ `-t ${pkg.imageName}:latest`,
2793
+ ...extraArgs,
2794
+ contextPath
2795
+ ].join(" ");
2796
+ debug$1(verbose, `Running: ${command}`);
2797
+ const buildResult = executor.exec(command);
2798
+ debug$1(verbose, `Build stdout: ${buildResult.stdout}`);
2799
+ if (buildResult.exitCode !== 0) throw new FatalError(`docker build failed for ${pkg.dir} (exit ${buildResult.exitCode}): ${buildResult.stderr}`);
2800
+ }
2801
+ /**
2802
+ * Detect packages with docker config in .tooling.json and build each one.
2803
+ * Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
2804
+ * Dockerfile and context paths are resolved relative to the project root.
2805
+ *
2806
+ * When `packageDir` is set, builds only that single package (for use as an image:build script).
2807
+ */
2808
+ function runDockerBuild(executor, config) {
2809
+ const repoName = readRepoName(executor, config.cwd);
2810
+ if (config.packageDir) {
2811
+ const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
2812
+ log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
2813
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
2814
+ log$1(`Built ${pkg.imageName}:latest`);
2815
+ return { packages: [pkg] };
3279
2816
  }
3280
- const versionResult = executor.exec("pnpm changeset version", { cwd: config.cwd });
3281
- debugExec(config, "pnpm changeset version", versionResult);
3282
- if (originalConfig) executor.writeFile(changesetConfigPath, originalConfig);
3283
- if (versionResult.exitCode !== 0) throw new FatalError(`pnpm changeset version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr}`);
3284
- debugExec(config, "pnpm install --no-frozen-lockfile", executor.exec("pnpm install --no-frozen-lockfile", { cwd: config.cwd }));
3285
- const { title, body } = buildPrContent(executor, config.cwd, packagesBefore);
3286
- debug$1(config, `PR title: ${title}`);
3287
- executor.exec("git add -A", { cwd: config.cwd });
3288
- const remainingChangesets = executor.listChangesetFiles(config.cwd);
3289
- if (remainingChangesets.length > 0) p.log.warn(`Changeset files still present after versioning: ${remainingChangesets.join(", ")}`);
3290
- debug$1(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3291
- const commitResult = executor.exec("git commit -m \"chore: version packages\"", { cwd: config.cwd });
3292
- debugExec(config, "git commit", commitResult);
3293
- if (commitResult.exitCode !== 0) {
3294
- p.log.info("Nothing to commit after versioning");
3295
- return {
3296
- mode: "version",
3297
- pr: "none"
3298
- };
2817
+ const packages = detectDockerPackages(executor, config.cwd, repoName);
2818
+ if (packages.length === 0) {
2819
+ log$1("No packages with docker config found");
2820
+ return { packages: [] };
3299
2821
  }
3300
- if (config.dryRun) {
3301
- p.log.info("[dry-run] Would push and create/update PR");
3302
- return {
3303
- mode: "version",
3304
- pr: "none"
3305
- };
2822
+ log$1(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
2823
+ for (const pkg of packages) {
2824
+ log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
2825
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
3306
2826
  }
3307
- debugExec(config, "git push", executor.exec(`git push origin "HEAD:refs/heads/${BRANCH}" --force`, { cwd: config.cwd }));
3308
- const conn = {
3309
- serverUrl: config.serverUrl,
3310
- repository: config.repository,
3311
- token: config.token
2827
+ log$1(`Built ${packages.length} image(s)`);
2828
+ return { packages };
2829
+ }
2830
+ /**
2831
+ * Run the full Docker publish pipeline:
2832
+ * 1. Build all images via runDockerBuild
2833
+ * 2. Login to registry
2834
+ * 3. Tag each image with semver variants from its own package.json version
2835
+ * 4. Push all tags
2836
+ * 5. Logout from registry
2837
+ */
2838
+ function runDockerPublish(executor, config) {
2839
+ const { packages } = runDockerBuild(executor, {
2840
+ cwd: config.cwd,
2841
+ packageDir: void 0,
2842
+ verbose: config.verbose,
2843
+ extraArgs: []
2844
+ });
2845
+ if (packages.length === 0) return {
2846
+ packages: [],
2847
+ tags: []
3312
2848
  };
3313
- const existingPr = await findOpenPr(executor, conn, BRANCH);
3314
- debug$1(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3315
- if (existingPr === null) {
3316
- await createPr(executor, conn, {
3317
- title,
3318
- head: BRANCH,
3319
- base: "main",
3320
- body
3321
- });
3322
- p.log.info("Created version PR");
3323
- return {
3324
- mode: "version",
3325
- pr: "created"
3326
- };
2849
+ for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
2850
+ if (!config.dryRun) {
2851
+ log$1(`Logging in to ${config.registryHost}...`);
2852
+ const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
2853
+ if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
2854
+ } else log$1("[dry-run] Skipping docker login");
2855
+ const allTags = [];
2856
+ try {
2857
+ for (const pkg of packages) {
2858
+ const tags = generateTags(pkg.version ?? "");
2859
+ log$1(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
2860
+ for (const tag of tags) {
2861
+ const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
2862
+ allTags.push(ref);
2863
+ log$1(`Tagging ${pkg.imageName} → ${ref}`);
2864
+ const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
2865
+ if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
2866
+ if (!config.dryRun) {
2867
+ log$1(`Pushing ${ref}...`);
2868
+ const pushResult = executor.exec(`docker push ${ref}`);
2869
+ if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
2870
+ } else log$1(`[dry-run] Skipping push for ${ref}`);
2871
+ }
2872
+ }
2873
+ } finally {
2874
+ if (!config.dryRun) {
2875
+ log$1(`Logging out from ${config.registryHost}...`);
2876
+ executor.exec(`docker logout ${config.registryHost}`);
2877
+ }
3327
2878
  }
3328
- await updatePr(executor, conn, existingPr, {
3329
- title,
3330
- body
3331
- });
3332
- p.log.info(`Updated version PR #${String(existingPr)}`);
2879
+ log$1(`Published ${allTags.length} image tag(s)`);
3333
2880
  return {
3334
- mode: "version",
3335
- pr: "updated"
2881
+ packages,
2882
+ tags: allTags
3336
2883
  };
3337
2884
  }
3338
2885
  //#endregion
3339
- //#region src/release/publish.ts
3340
- const RETRY_ATTEMPTS = 3;
3341
- const RETRY_BASE_DELAY_MS = 1e3;
3342
- async function retryAsync(fn) {
3343
- let lastError;
3344
- for (let attempt = 0; attempt <= RETRY_ATTEMPTS; attempt++) try {
3345
- return await fn();
3346
- } catch (error) {
3347
- lastError = error;
3348
- if (attempt < RETRY_ATTEMPTS) {
3349
- const delay = RETRY_BASE_DELAY_MS * 2 ** attempt;
3350
- await new Promise((resolve) => setTimeout(resolve, delay));
3351
- }
2886
+ //#region src/generators/migrate-prompt.ts
2887
+ /**
2888
+ * Generate a context-aware AI migration prompt based on what the CLI did.
2889
+ * This prompt can be pasted into Claude Code (or similar) to finish the migration.
2890
+ */
2891
+ function generateMigratePrompt(results, config, detected) {
2892
+ const sections = [];
2893
+ sections.push("# Migration Prompt");
2894
+ sections.push("");
2895
+ sections.push("The following prompt was generated by `@bensandee/tooling repo:sync`. Paste it into Claude Code or another AI assistant to finish migrating this repository.");
2896
+ sections.push("");
2897
+ sections.push("> **Tip:** Before starting, run `/init` in Claude Code to generate a `CLAUDE.md` that gives the AI a complete picture of your repository's structure, conventions, and build commands.");
2898
+ sections.push("");
2899
+ sections.push("## What was changed");
2900
+ sections.push("");
2901
+ const created = results.filter((r) => r.action === "created");
2902
+ const updated = results.filter((r) => r.action === "updated");
2903
+ const skipped = results.filter((r) => r.action === "skipped");
2904
+ const archived = results.filter((r) => r.action === "archived");
2905
+ if (created.length > 0) {
2906
+ sections.push("**Created:**");
2907
+ for (const r of created) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2908
+ sections.push("");
3352
2909
  }
3353
- throw lastError;
3354
- }
3355
- /** Mode 2: publish to npm, push tags, and create Forgejo releases. */
3356
- async function runPublishMode(executor, config) {
3357
- p.log.info("No changesets — publishing packages");
3358
- const publishResult = executor.exec("pnpm changeset publish", { cwd: config.cwd });
3359
- debugExec(config, "pnpm changeset publish", publishResult);
3360
- if (publishResult.exitCode !== 0) throw new FatalError(`pnpm changeset publish failed (exit code ${String(publishResult.exitCode)}):\n${publishResult.stderr}`);
3361
- const stdoutTags = parseNewTags(publishResult.stdout + "\n" + publishResult.stderr);
3362
- debug$1(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3363
- const expectedTags = computeExpectedTags(executor.listWorkspacePackages(config.cwd));
3364
- debug$1(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3365
- const remoteTags = parseRemoteTags(executor.exec("git ls-remote --tags origin", { cwd: config.cwd }).stdout);
3366
- debug$1(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3367
- const remoteSet = new Set(remoteTags);
3368
- const tagsToPush = reconcileTags(expectedTags, remoteTags, stdoutTags);
3369
- debug$1(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3370
- if (config.dryRun) {
3371
- if (tagsToPush.length === 0) {
3372
- p.log.info("No packages were published");
3373
- return { mode: "none" };
3374
- }
3375
- p.log.info(`Tags to process: ${tagsToPush.join(", ")}`);
3376
- p.log.info("[dry-run] Would push tags and create releases");
3377
- return {
3378
- mode: "publish",
3379
- tags: tagsToPush
3380
- };
2910
+ if (updated.length > 0) {
2911
+ sections.push("**Updated:**");
2912
+ for (const r of updated) sections.push(`- \`${r.filePath}\` ${r.description}`);
2913
+ sections.push("");
3381
2914
  }
3382
- const conn = {
3383
- serverUrl: config.serverUrl,
3384
- repository: config.repository,
3385
- token: config.token
3386
- };
3387
- const remoteExpectedTags = expectedTags.filter((t) => remoteSet.has(t) && !tagsToPush.includes(t));
3388
- const tagsWithMissingReleases = [];
3389
- for (const tag of remoteExpectedTags) if (!await findRelease(executor, conn, tag)) tagsWithMissingReleases.push(tag);
3390
- const allTags = [...tagsToPush, ...tagsWithMissingReleases];
3391
- if (allTags.length === 0) {
3392
- p.log.info("No packages were published");
3393
- return { mode: "none" };
2915
+ if (archived.length > 0) {
2916
+ sections.push("**Archived:**");
2917
+ for (const r of archived) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2918
+ sections.push("");
3394
2919
  }
3395
- p.log.info(`Tags to process: ${allTags.join(", ")}`);
3396
- const errors = [];
3397
- for (const tag of allTags) try {
3398
- if (!remoteSet.has(tag)) {
3399
- if (executor.exec(`git tag -l ${JSON.stringify(tag)}`, { cwd: config.cwd }).stdout.trim() === "") executor.exec(`git tag ${JSON.stringify(tag)}`, { cwd: config.cwd });
3400
- executor.exec(`git push origin refs/tags/${tag}`, { cwd: config.cwd });
3401
- }
3402
- if (await findRelease(executor, conn, tag)) p.log.warn(`Release for ${tag} already exists skipping`);
3403
- else {
3404
- await retryAsync(async () => {
3405
- try {
3406
- await createRelease(executor, conn, tag);
3407
- } catch (error) {
3408
- if (await findRelease(executor, conn, tag)) return;
3409
- throw error;
3410
- }
3411
- });
3412
- p.log.info(`Created release for ${tag}`);
2920
+ if (skipped.length > 0) {
2921
+ sections.push("**Skipped (review these):**");
2922
+ for (const r of skipped) sections.push(`- \`${r.filePath}\` — ${r.description}`);
2923
+ sections.push("");
2924
+ }
2925
+ sections.push("## Migration tasks");
2926
+ sections.push("");
2927
+ const legacyToRemove = detected.legacyConfigs.filter((legacy) => !(legacy.tool === "prettier" && config.formatter === "prettier"));
2928
+ if (legacyToRemove.length > 0) {
2929
+ sections.push("### Remove legacy tooling");
2930
+ sections.push("");
2931
+ for (const legacy of legacyToRemove) {
2932
+ const replacement = {
2933
+ eslint: "oxlint",
2934
+ prettier: "oxfmt",
2935
+ jest: "vitest",
2936
+ webpack: "tsdown",
2937
+ rollup: "tsdown"
2938
+ }[legacy.tool];
2939
+ sections.push(`- Remove ${legacy.tool} config files (${legacy.files.map((f) => `\`${f}\``).join(", ")}). This project now uses **${replacement}**.`);
2940
+ sections.push(` - Uninstall ${legacy.tool}-related packages from devDependencies`);
2941
+ if (legacy.tool === "eslint") sections.push(" - Migrate any custom ESLint rules that don't have oxlint equivalents");
2942
+ if (legacy.tool === "jest") sections.push(" - Migrate any jest-specific test utilities (jest.mock, jest.fn) to vitest equivalents (vi.mock, vi.fn)");
3413
2943
  }
3414
- } catch (error) {
3415
- errors.push({
3416
- tag,
3417
- error
3418
- });
3419
- p.log.warn(`Failed to process ${tag}: ${error instanceof Error ? error.message : String(error)}`);
2944
+ sections.push("");
3420
2945
  }
3421
- if (errors.length > 0) throw new TransientError(`Failed to create releases for: ${errors.map((e) => e.tag).join(", ")}`);
3422
- return {
3423
- mode: "publish",
3424
- tags: allTags
3425
- };
2946
+ if (archived.length > 0) {
2947
+ sections.push("### Review archived files");
2948
+ sections.push("");
2949
+ sections.push("The following files were modified or replaced. The originals have been saved to `.tooling-archived/`:");
2950
+ sections.push("");
2951
+ for (const r of archived) sections.push(`- \`${r.filePath}\` → \`.tooling-archived/${r.filePath}\``);
2952
+ sections.push("");
2953
+ sections.push("For each archived file, **diff the old version against the new one** and look for features, categories, or modules that were enabled in the original but are missing from the replacement. Focus on broad capability gaps rather than individual rule strictness (in general, being stricter is fine). Examples of what to look for:");
2954
+ sections.push("");
2955
+ sections.push("- **Lint configs**: enabled plugin categories (e.g. `jsx-a11y`, `import`, `react`, `nextjs`), custom `plugins` or `overrides`, file-scoped rule blocks");
2956
+ sections.push("- **TypeScript configs**: compiler features like `jsx`, `paths`, `baseUrl`, or `references` that affect build behavior");
2957
+ sections.push("- **Other configs**: feature flags, custom presets, or integrations that go beyond the default template");
2958
+ sections.push("");
2959
+ sections.push("If the old config had capabilities the new one lacks, port them into the new file. Then:");
2960
+ sections.push("");
2961
+ sections.push("1. If the project previously used `husky` and `lint-staged`, remove them from `devDependencies`");
2962
+ sections.push("2. Delete the `.tooling-archived/` directory when migration is complete");
2963
+ sections.push("");
2964
+ }
2965
+ const oxlintWasSkipped = results.find((r) => r.filePath === "oxlint.config.ts")?.action === "skipped";
2966
+ if (detected.hasLegacyOxlintJson) {
2967
+ sections.push("### Migrate .oxlintrc.json to oxlint.config.ts");
2968
+ sections.push("");
2969
+ sections.push("A new `oxlint.config.ts` has been generated using `defineConfig` from the `oxlint` package. The existing `.oxlintrc.json` needs to be migrated:");
2970
+ sections.push("");
2971
+ sections.push("1. Read `.oxlintrc.json` and compare its `rules` against the rules provided by `@bensandee/config/oxlint/recommended` (check `node_modules/@bensandee/config`). Most standard rules are already included in the recommended config.");
2972
+ sections.push("2. If there are any custom rules, overrides, settings, or `jsPlugins` not covered by the recommended config, add them to `oxlint.config.ts` alongside the `extends`.");
2973
+ sections.push("3. Delete `.oxlintrc.json`.");
2974
+ sections.push("4. Run `pnpm lint` to verify the new config works correctly.");
2975
+ sections.push("");
2976
+ } else if (oxlintWasSkipped && detected.hasOxlintConfig) {
2977
+ sections.push("### Verify oxlint.config.ts includes recommended rules");
2978
+ sections.push("");
2979
+ sections.push("The existing `oxlint.config.ts` was kept as-is. Verify that it extends the recommended config from `@bensandee/config/oxlint`:");
2980
+ sections.push("");
2981
+ sections.push("1. Open `oxlint.config.ts` and check that it imports and extends `@bensandee/config/oxlint/recommended`.");
2982
+ sections.push("2. The expected pattern is:");
2983
+ sections.push(" ```ts");
2984
+ sections.push(" import recommended from \"@bensandee/config/oxlint/recommended\";");
2985
+ sections.push(" import { defineConfig } from \"oxlint\";");
2986
+ sections.push("");
2987
+ sections.push(" export default defineConfig({ extends: [recommended] });");
2988
+ sections.push(" ```");
2989
+ sections.push("3. If it uses a different pattern, update it to extend the recommended config while preserving any project-specific customizations.");
2990
+ sections.push("4. Run `pnpm lint` to verify the config works correctly.");
2991
+ sections.push("");
2992
+ }
2993
+ if (config.structure === "monorepo" && !detected.hasPnpmWorkspace) {
2994
+ sections.push("### Migrate to monorepo structure");
2995
+ sections.push("");
2996
+ sections.push("This project was converted from a single repo to a monorepo. Complete the migration:");
2997
+ sections.push("");
2998
+ sections.push("1. Move existing source into `packages/<name>/` (using the existing package name)");
2999
+ sections.push("2. Split the root `package.json` into a root workspace manifest + package-level `package.json`");
3000
+ sections.push("3. Move the existing `tsconfig.json` into the package and update the root tsconfig with project references");
3001
+ sections.push("4. Create a package-level `tsdown.config.ts` in the new package");
3002
+ sections.push("5. Update any import paths or build scripts affected by the move");
3003
+ sections.push("");
3004
+ }
3005
+ const skippedConfigs = skipped.filter((r) => r.filePath !== "ci" && r.description !== "Not a monorepo");
3006
+ if (skippedConfigs.length > 0) {
3007
+ sections.push("### Review skipped files");
3008
+ sections.push("");
3009
+ sections.push("The following files were left unchanged. Review them for compatibility:");
3010
+ sections.push("");
3011
+ for (const r of skippedConfigs) sections.push(`- \`${r.filePath}\` — ${r.description}`);
3012
+ sections.push("");
3013
+ }
3014
+ if (results.some((r) => r.filePath === "test/example.test.ts" && r.action === "created")) {
3015
+ sections.push("### Generate tests");
3016
+ sections.push("");
3017
+ sections.push("A starter test was created at `test/example.test.ts`. Now:");
3018
+ sections.push("");
3019
+ sections.push("1. Review the existing source code in `src/`");
3020
+ sections.push("2. Create additional test files following the starter test's patterns (import style, describe/it structure)");
3021
+ sections.push("3. Focus on edge cases and core business logic");
3022
+ sections.push("4. Aim for meaningful coverage of exported functions and key code paths");
3023
+ sections.push("");
3024
+ }
3025
+ sections.push("## Ground rules");
3026
+ sections.push("");
3027
+ sections.push("It is OK to add new packages (e.g. `zod`, `@bensandee/common`) if they are needed to resolve errors.");
3028
+ sections.push("");
3029
+ sections.push("When resolving errors from the checklist below, prefer fixing the root cause over suppressing the issue. For example:");
3030
+ sections.push("");
3031
+ sections.push("- **Lint errors**: fix the code rather than adding disable comments or rule exceptions");
3032
+ sections.push("- **Test failures**: update the test or fix the underlying bug rather than skipping or deleting the test");
3033
+ sections.push("- **Knip findings**: remove genuinely unused code/exports/dependencies rather than adding ignores to `knip.config.ts`");
3034
+ sections.push("- **Type errors**: add proper types rather than using `any` or `@ts-expect-error`");
3035
+ sections.push("");
3036
+ sections.push("Only suppress an issue if there is a clear, documented reason why the fix is not feasible (e.g. a third-party type mismatch). Leave a comment explaining why.");
3037
+ sections.push("");
3038
+ sections.push("## Verification checklist");
3039
+ sections.push("");
3040
+ sections.push("Run each of these commands and fix any errors before moving on:");
3041
+ sections.push("");
3042
+ sections.push("1. `pnpm install`");
3043
+ const updateCmd = `pnpm update --latest ${getAddedDevDepNames(config).join(" ")}`;
3044
+ sections.push(`2. \`${updateCmd}\` — bump added dependencies to their latest versions`);
3045
+ sections.push("3. `pnpm typecheck` — fix any type errors");
3046
+ sections.push("4. `pnpm build` — fix any build errors");
3047
+ sections.push("5. `pnpm test` — fix any test failures");
3048
+ sections.push("6. `pnpm lint` — fix the code to satisfy lint rules");
3049
+ sections.push("7. `pnpm knip` — remove unused exports, dependencies, and dead code");
3050
+ sections.push("8. `pnpm format` — fix any formatting issues");
3051
+ sections.push("");
3052
+ return sections.join("\n");
3426
3053
  }
3427
3054
  //#endregion
3428
- //#region src/release/connection.ts
3429
- const RepositorySchema = z.union([z.string(), z.object({ url: z.string() })]);
3430
- /**
3431
- * Resolve the hosting platform and connection details.
3432
- *
3433
- * Priority:
3434
- * 1. Environment variables (FORGEJO_SERVER_URL, FORGEJO_REPOSITORY, FORGEJO_TOKEN)
3435
- * 2. `repository` field in package.json (server URL and owner/repo parsed from the URL)
3436
- *
3437
- * For Forgejo, FORGEJO_TOKEN is always required (either from env or explicitly).
3438
- * If the repository URL hostname is `github.com`, returns `{ type: "github" }`.
3439
- */
3440
- function resolveConnection(cwd) {
3441
- const serverUrl = process.env["FORGEJO_SERVER_URL"];
3442
- const repository = process.env["FORGEJO_REPOSITORY"];
3443
- const token = process.env["FORGEJO_TOKEN"];
3444
- if (serverUrl && repository && token) return {
3445
- type: "forgejo",
3446
- conn: {
3447
- serverUrl,
3448
- repository,
3449
- token
3450
- }
3451
- };
3452
- const parsed = parseRepositoryUrl(cwd);
3453
- if (parsed === null) {
3454
- if (serverUrl) {
3455
- if (!repository) throw new FatalError("FORGEJO_REPOSITORY environment variable is required");
3456
- if (!token) throw new FatalError("FORGEJO_TOKEN environment variable is required");
3457
- }
3458
- return { type: "github" };
3459
- }
3460
- if (parsed.hostname === "github.com") return { type: "github" };
3461
- const resolvedToken = token;
3462
- if (!resolvedToken) throw new FatalError("FORGEJO_TOKEN environment variable is required (server URL and repository were resolved from package.json)");
3055
+ //#region src/commands/repo-init.ts
3056
+ /** Adapt a GeneratorContext to the DockerFileReader interface used by detectDockerPackages. */
3057
+ function contextAsDockerReader(ctx) {
3463
3058
  return {
3464
- type: "forgejo",
3465
- conn: {
3466
- serverUrl: serverUrl ?? `${parsed.protocol}//${parsed.hostname}`,
3467
- repository: repository ?? parsed.repository,
3468
- token: resolvedToken
3059
+ listPackageDirs(cwd) {
3060
+ const packagesDir = path.join(cwd, "packages");
3061
+ try {
3062
+ return readdirSync(packagesDir, { withFileTypes: true }).filter((e) => e.isDirectory()).map((e) => e.name);
3063
+ } catch (_error) {
3064
+ return [];
3065
+ }
3066
+ },
3067
+ readFile(filePath) {
3068
+ const rel = path.relative(ctx.targetDir, filePath);
3069
+ return ctx.read(rel) ?? null;
3469
3070
  }
3470
3071
  };
3471
3072
  }
3472
- function parseRepositoryUrl(cwd) {
3473
- const pkgPath = path.join(cwd, "package.json");
3474
- let raw;
3475
- try {
3476
- raw = readFileSync(pkgPath, "utf-8");
3477
- } catch {
3478
- return null;
3479
- }
3480
- const pkg = z.object({ repository: RepositorySchema.optional() }).safeParse(JSON.parse(raw));
3481
- if (!pkg.success) return null;
3482
- const repo = pkg.data.repository;
3483
- if (!repo) return null;
3484
- return parseGitUrl(typeof repo === "string" ? repo : repo.url);
3073
+ /** Log what was detected so the user understands generator decisions. */
3074
+ function logDetectionSummary(ctx) {
3075
+ const dockerPackages = detectDockerPackages(contextAsDockerReader(ctx), ctx.targetDir, ctx.config.name);
3076
+ if (dockerPackages.length > 0) p.log.info(`Docker images: ${dockerPackages.map((pkg) => pkg.imageName).join(", ")}`);
3077
+ const publishable = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson);
3078
+ if (publishable.length > 0) p.log.info(`npm packages: ${publishable.map((pkg) => pkg.name).join(", ")}`);
3485
3079
  }
3486
- function parseGitUrl(urlStr) {
3080
+ async function runInit(config, options = {}) {
3081
+ const detected = detectProject(config.targetDir);
3082
+ const s = p.spinner();
3083
+ const { ctx, archivedFiles } = createContext(config, options.confirmOverwrite ?? (async (relativePath) => {
3084
+ s.stop("Paused");
3085
+ const result = await p.select({
3086
+ message: `${relativePath} already exists. What do you want to do?`,
3087
+ options: [{
3088
+ value: "overwrite",
3089
+ label: "Overwrite"
3090
+ }, {
3091
+ value: "skip",
3092
+ label: "Skip"
3093
+ }]
3094
+ });
3095
+ s.start("Generating configuration files...");
3096
+ if (p.isCancel(result)) return "skip";
3097
+ return result;
3098
+ }));
3099
+ logDetectionSummary(ctx);
3100
+ s.start("Generating configuration files...");
3101
+ let results;
3487
3102
  try {
3488
- const url = new URL(urlStr);
3489
- const pathname = url.pathname.replace(/\.git$/, "").replace(/^\//, "");
3490
- if (!pathname.includes("/")) return null;
3491
- return {
3492
- protocol: url.protocol,
3493
- hostname: url.hostname,
3494
- repository: pathname
3495
- };
3496
- } catch {
3497
- return null;
3103
+ results = await runGenerators(ctx);
3104
+ } catch (error) {
3105
+ s.stop("Generation failed!");
3106
+ throw error;
3107
+ }
3108
+ const alreadyArchived = new Set(results.filter((r) => r.action === "archived").map((r) => r.filePath));
3109
+ for (const rel of archivedFiles) if (!alreadyArchived.has(rel)) results.push({
3110
+ filePath: rel,
3111
+ action: "archived",
3112
+ description: `Original saved to .tooling-archived/${rel}`
3113
+ });
3114
+ const created = results.filter((r) => r.action === "created");
3115
+ const updated = results.filter((r) => r.action === "updated");
3116
+ if (!(created.length > 0 || updated.length > 0 || archivedFiles.length > 0) && options.noPrompt) {
3117
+ s.stop("Repository is up to date.");
3118
+ return results;
3119
+ }
3120
+ s.stop("Done!");
3121
+ if (results.some((r) => r.action === "archived" && r.filePath.startsWith(".husky/"))) try {
3122
+ execSync("git config --unset core.hooksPath", {
3123
+ cwd: config.targetDir,
3124
+ stdio: "ignore",
3125
+ timeout: 5e3
3126
+ });
3127
+ } catch (_error) {}
3128
+ const summaryLines = [];
3129
+ if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
3130
+ if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
3131
+ p.note(summaryLines.join("\n"), "Summary");
3132
+ if (!options.noPrompt) {
3133
+ const prompt = generateMigratePrompt(results, config, detected);
3134
+ const promptPath = ".tooling-migrate.md";
3135
+ ctx.write(promptPath, prompt);
3136
+ p.log.info(`Migration prompt written to ${promptPath}`);
3137
+ p.log.info("In Claude Code, run: \"Execute the steps in .tooling-migrate.md\"");
3138
+ }
3139
+ const bensandeeDeps = getAddedDevDepNames(config).filter((name) => name.startsWith("@bensandee/"));
3140
+ const hasLockfile = ctx.exists("pnpm-lock.yaml");
3141
+ if (bensandeeDeps.length > 0 && hasLockfile) {
3142
+ s.start("Updating @bensandee/* packages...");
3143
+ try {
3144
+ execSync(`pnpm update --latest ${bensandeeDeps.join(" ")}`, {
3145
+ cwd: config.targetDir,
3146
+ stdio: "ignore",
3147
+ timeout: 6e4
3148
+ });
3149
+ s.stop("Updated @bensandee/* packages");
3150
+ } catch (_error) {
3151
+ s.stop("Could not update @bensandee/* packages — run pnpm install first");
3152
+ }
3498
3153
  }
3154
+ p.note([
3155
+ "1. Run: pnpm install",
3156
+ "2. Run: pnpm check",
3157
+ ...options.noPrompt ? [] : ["3. In Claude Code, run: \"Execute the steps in .tooling-migrate.md\""]
3158
+ ].join("\n"), "Next steps");
3159
+ return results;
3499
3160
  }
3500
3161
  //#endregion
3501
- //#region src/commands/release-changesets.ts
3502
- const releaseForgejoCommand = defineCommand({
3162
+ //#region src/commands/repo-sync.ts
3163
+ const syncCommand = defineCommand({
3503
3164
  meta: {
3504
- name: "release:changesets",
3505
- description: "Changesets version/publish for Forgejo CI"
3165
+ name: "repo:sync",
3166
+ description: "Detect, generate, and sync project tooling (idempotent)"
3506
3167
  },
3507
3168
  args: {
3508
- "dry-run": {
3169
+ dir: {
3170
+ type: "positional",
3171
+ description: "Target directory (default: current directory)",
3172
+ required: false
3173
+ },
3174
+ check: {
3509
3175
  type: "boolean",
3510
- description: "Skip push, API calls, and publishing side effects"
3176
+ description: "Dry-run mode: report drift without writing files"
3511
3177
  },
3512
- verbose: {
3178
+ yes: {
3513
3179
  type: "boolean",
3514
- description: "Enable detailed debug logging (also enabled by RELEASE_DEBUG env var)"
3515
- }
3516
- },
3517
- async run({ args }) {
3518
- if ((await runRelease(buildReleaseConfig({
3519
- dryRun: args["dry-run"] === true,
3520
- verbose: args.verbose === true || process.env["RELEASE_DEBUG"] === "true"
3521
- }), createRealExecutor())).mode === "none") process.exitCode = 0;
3522
- }
3523
- });
3524
- /** Build release config from environment / package.json and CLI flags. */
3525
- function buildReleaseConfig(flags) {
3526
- const resolved = resolveConnection(process.cwd());
3527
- if (resolved.type !== "forgejo") throw new FatalError("release:changesets requires a Forgejo repository");
3528
- return {
3529
- ...resolved.conn,
3530
- cwd: process.cwd(),
3531
- dryRun: flags.dryRun ?? false,
3532
- verbose: flags.verbose ?? false
3533
- };
3534
- }
3535
- /** Resolve the current branch from CI env vars or git. */
3536
- function getCurrentBranch(executor, cwd) {
3537
- const ref = process.env["GITHUB_REF"];
3538
- if (ref?.startsWith("refs/heads/")) return ref.slice(11);
3539
- return executor.exec("git rev-parse --abbrev-ref HEAD", { cwd }).stdout.trim();
3540
- }
3541
- /** Core release logic — testable with a mock executor. */
3542
- async function runRelease(config, executor) {
3543
- const branch = getCurrentBranch(executor, config.cwd);
3544
- if (branch !== "main") {
3545
- debug$1(config, `Skipping release on non-main branch: ${branch}`);
3546
- return { mode: "none" };
3547
- }
3548
- executor.exec("git config user.name \"forgejo-actions[bot]\"", { cwd: config.cwd });
3549
- executor.exec("git config user.email \"forgejo-actions[bot]@noreply.localhost\"", { cwd: config.cwd });
3550
- const changesetFiles = executor.listChangesetFiles(config.cwd);
3551
- debug$1(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3552
- if (changesetFiles.length > 0) {
3553
- debug$1(config, "Entering version mode");
3554
- return runVersionMode(executor, config);
3555
- }
3556
- debug$1(config, "Entering publish mode");
3557
- return runPublishMode(executor, config);
3558
- }
3559
- //#endregion
3560
- //#region src/commands/release-trigger.ts
3561
- const releaseTriggerCommand = defineCommand({
3562
- meta: {
3563
- name: "release:trigger",
3564
- description: "Trigger the release CI workflow"
3565
- },
3566
- args: { ref: {
3567
- type: "string",
3568
- description: "Git ref to trigger on (default: main)",
3569
- required: false
3570
- } },
3571
- async run({ args }) {
3572
- const ref = args.ref ?? "main";
3573
- const resolved = resolveConnection(process.cwd());
3574
- if (resolved.type === "forgejo") await triggerForgejo(resolved.conn, ref);
3575
- else triggerGitHub(ref);
3576
- }
3577
- });
3578
- async function triggerForgejo(conn, ref) {
3579
- const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/actions/workflows/release.yml/dispatches`;
3580
- const res = await fetch(url, {
3581
- method: "POST",
3582
- headers: {
3583
- Authorization: `token ${conn.token}`,
3584
- "Content-Type": "application/json"
3180
+ alias: "y",
3181
+ description: "Accept all defaults (non-interactive)"
3585
3182
  },
3586
- body: JSON.stringify({ ref })
3587
- });
3588
- if (!res.ok) throw new FatalError(`Failed to trigger Forgejo workflow: ${res.status} ${res.statusText}`);
3589
- p.log.info(`Triggered release workflow on Forgejo (ref: ${ref})`);
3590
- }
3591
- function triggerGitHub(ref) {
3592
- createRealExecutor().exec(`gh workflow run release.yml --ref ${ref}`, { cwd: process.cwd() });
3593
- p.log.info(`Triggered release workflow on GitHub (ref: ${ref})`);
3594
- }
3595
- //#endregion
3596
- //#region src/commands/forgejo-create-release.ts
3597
- const createForgejoReleaseCommand = defineCommand({
3598
- meta: {
3599
- name: "forgejo:create-release",
3600
- description: "Create a Forgejo release for a given tag"
3183
+ "eslint-plugin": {
3184
+ type: "boolean",
3185
+ description: "Include @bensandee/eslint-plugin (default: true)"
3186
+ },
3187
+ "no-ci": {
3188
+ type: "boolean",
3189
+ description: "Skip CI workflow generation"
3190
+ },
3191
+ "no-prompt": {
3192
+ type: "boolean",
3193
+ description: "Skip migration prompt generation"
3194
+ }
3601
3195
  },
3602
- args: { tag: {
3603
- type: "string",
3604
- description: "Git tag to create a release for",
3605
- required: true
3606
- } },
3607
3196
  async run({ args }) {
3608
- const resolved = resolveConnection(process.cwd());
3609
- if (resolved.type !== "forgejo") throw new FatalError("forgejo:create-release requires a Forgejo repository");
3610
- const executor = createRealExecutor();
3611
- const conn = resolved.conn;
3612
- if (await findRelease(executor, conn, args.tag)) {
3613
- p.log.info(`Release for ${args.tag} already exists — skipping`);
3197
+ const targetDir = path.resolve(args.dir ?? ".");
3198
+ if (args.check) {
3199
+ const exitCode = await runCheck(targetDir);
3200
+ process.exitCode = exitCode;
3614
3201
  return;
3615
3202
  }
3616
- await createRelease(executor, conn, args.tag);
3617
- p.log.info(`Created Forgejo release for ${args.tag}`);
3203
+ const saved = loadToolingConfig(targetDir);
3204
+ const isFirstRun = !saved;
3205
+ let config;
3206
+ if (args.yes || !isFirstRun) {
3207
+ const detected = buildDefaultConfig(targetDir, {
3208
+ eslintPlugin: args["eslint-plugin"] === true ? true : void 0,
3209
+ noCi: args["no-ci"] === true ? true : void 0
3210
+ });
3211
+ config = saved ? mergeWithSavedConfig(detected, saved) : detected;
3212
+ } else config = await runInitPrompts(targetDir, saved);
3213
+ await runInit(config, {
3214
+ noPrompt: args["no-prompt"] === true || !isFirstRun,
3215
+ ...!isFirstRun && { confirmOverwrite: async () => "overwrite" }
3216
+ });
3618
3217
  }
3619
3218
  });
3620
- //#endregion
3621
- //#region src/commands/changesets-merge.ts
3622
- const HEAD_BRANCH = "changeset-release/main";
3623
- const releaseMergeCommand = defineCommand({
3624
- meta: {
3625
- name: "changesets:merge",
3626
- description: "Merge the open changesets version PR"
3627
- },
3628
- args: { "dry-run": {
3629
- type: "boolean",
3630
- description: "Show what would be merged without actually merging"
3631
- } },
3632
- async run({ args }) {
3633
- const dryRun = args["dry-run"] === true;
3634
- const resolved = resolveConnection(process.cwd());
3635
- if (resolved.type === "forgejo") await mergeForgejo(resolved.conn, dryRun);
3636
- else mergeGitHub(dryRun);
3219
+ /** Run sync in check mode: dry-run drift detection. */
3220
+ async function runCheck(targetDir) {
3221
+ const saved = loadToolingConfig(targetDir);
3222
+ const detected = buildDefaultConfig(targetDir, {});
3223
+ const { ctx, pendingWrites } = createDryRunContext(saved ? mergeWithSavedConfig(detected, saved) : detected);
3224
+ logDetectionSummary(ctx);
3225
+ const actionable = (await runGenerators(ctx)).filter((r) => {
3226
+ if (r.action !== "created" && r.action !== "updated") return false;
3227
+ const newContent = pendingWrites.get(r.filePath);
3228
+ if (newContent && r.action === "updated") {
3229
+ const existingPath = path.join(targetDir, r.filePath);
3230
+ const existing = existsSync(existingPath) ? readFileSync(existingPath, "utf-8") : void 0;
3231
+ if (existing && contentEqual(r.filePath, existing, newContent)) return false;
3232
+ }
3233
+ return true;
3234
+ });
3235
+ if (actionable.length === 0) {
3236
+ p.log.success("Repository is up to date.");
3237
+ return 0;
3637
3238
  }
3638
- });
3639
- async function mergeForgejo(conn, dryRun) {
3640
- const executor = createRealExecutor();
3641
- const prNumber = await findOpenPr(executor, conn, HEAD_BRANCH);
3642
- if (prNumber === null) throw new FatalError(`No open PR found for branch ${HEAD_BRANCH}`);
3643
- if (dryRun) {
3644
- p.log.info(`[dry-run] Would merge PR #${String(prNumber)} and delete branch ${HEAD_BRANCH}`);
3645
- return;
3239
+ p.log.warn(`${actionable.length} file(s) would be changed by repo:sync`);
3240
+ for (const r of actionable) {
3241
+ p.log.info(` ${r.action}: ${r.filePath} ${r.description}`);
3242
+ const newContent = pendingWrites.get(r.filePath);
3243
+ if (!newContent) continue;
3244
+ const existingPath = path.join(targetDir, r.filePath);
3245
+ const existing = existsSync(existingPath) ? readFileSync(existingPath, "utf-8") : void 0;
3246
+ if (!existing) {
3247
+ const lineCount = newContent.split("\n").length - 1;
3248
+ p.log.info(` + ${lineCount} new lines`);
3249
+ } else {
3250
+ const diff = lineDiff(existing, newContent);
3251
+ for (const line of diff) p.log.info(` ${line}`);
3252
+ }
3646
3253
  }
3647
- await mergePr(executor, conn, prNumber, {
3648
- method: "merge",
3649
- deleteBranch: true
3650
- });
3651
- p.log.info(`Merged PR #${String(prNumber)} and deleted branch ${HEAD_BRANCH}`);
3254
+ return 1;
3652
3255
  }
3653
- function mergeGitHub(dryRun) {
3654
- const executor = createRealExecutor();
3655
- if (dryRun) {
3656
- const prNum = executor.exec(`gh pr view ${HEAD_BRANCH} --json number --jq .number`, { cwd: process.cwd() }).stdout.trim();
3657
- if (!prNum) throw new FatalError(`No open PR found for branch ${HEAD_BRANCH}`);
3658
- p.log.info(`[dry-run] Would merge PR #${prNum} and delete branch ${HEAD_BRANCH}`);
3659
- return;
3660
- }
3661
- executor.exec(`gh pr merge ${HEAD_BRANCH} --merge --delete-branch`, { cwd: process.cwd() });
3662
- p.log.info(`Merged changesets PR and deleted branch ${HEAD_BRANCH}`);
3256
+ const normalize = (line) => line.trimEnd();
3257
+ function lineDiff(oldText, newText) {
3258
+ const oldLines = oldText.split("\n").map(normalize);
3259
+ const newLines = newText.split("\n").map(normalize);
3260
+ const oldSet = new Set(oldLines);
3261
+ const newSet = new Set(newLines);
3262
+ const removed = oldLines.filter((l) => l.trim() !== "" && !newSet.has(l));
3263
+ const added = newLines.filter((l) => l.trim() !== "" && !oldSet.has(l));
3264
+ const lines = [];
3265
+ for (const l of removed) lines.push(`- ${l.trim()}`);
3266
+ for (const l of added) lines.push(`+ ${l.trim()}`);
3267
+ return lines;
3663
3268
  }
3664
3269
  //#endregion
3665
- //#region src/release/simple.ts
3666
- /**
3667
- * Compute sliding version tags from a semver version string.
3668
- * For "1.2.3" returns ["v1", "v1.2"]. Strips prerelease suffixes.
3669
- */
3670
- function computeSlidingTags(version) {
3671
- const parts = (version.split("-")[0] ?? version).split(".");
3672
- if (parts.length < 2 || !parts[0] || !parts[1]) throw new FatalError(`Invalid version format "${version}". Expected semver (X.Y.Z)`);
3673
- return [`v${parts[0]}`, `v${parts[0]}.${parts[1]}`];
3674
- }
3675
- /** Build the commit-and-tag-version command with appropriate flags. */
3676
- function buildCommand(config) {
3677
- const args = ["pnpm exec commit-and-tag-version"];
3678
- if (config.dryRun) args.push("--dry-run");
3679
- if (config.firstRelease) args.push("--first-release");
3680
- if (config.releaseAs) args.push(`--release-as ${config.releaseAs}`);
3681
- if (config.prerelease) args.push(`--prerelease ${config.prerelease}`);
3682
- return args.join(" ");
3683
- }
3684
- /** Read the current version from package.json. */
3685
- function readVersion(executor, cwd) {
3686
- const raw = executor.readFile(path.join(cwd, "package.json"));
3687
- if (!raw) throw new FatalError("Could not read package.json");
3688
- const pkg = parsePackageJson(raw);
3689
- if (!pkg?.version) throw new FatalError("No version field found in package.json");
3690
- return pkg.version;
3691
- }
3692
- /** Run the full commit-and-tag-version release flow. */
3693
- async function runSimpleRelease(executor, config) {
3694
- const command = buildCommand(config);
3695
- p.log.info(`Running: ${command}`);
3696
- const versionResult = executor.exec(command, { cwd: config.cwd });
3697
- debugExec(config, "commit-and-tag-version", versionResult);
3698
- if (versionResult.exitCode !== 0) throw new FatalError(`commit-and-tag-version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr || versionResult.stdout}`);
3699
- const version = readVersion(executor, config.cwd);
3700
- debug$1(config, `New version: ${version}`);
3701
- const tagResult = executor.exec("git describe --tags --abbrev=0", { cwd: config.cwd });
3702
- debugExec(config, "git describe", tagResult);
3703
- const tag = tagResult.stdout.trim();
3704
- if (!tag) throw new FatalError("Could not determine the new tag from git describe");
3705
- p.log.info(`Version ${version} tagged as ${tag}`);
3706
- if (config.dryRun) {
3707
- const slidingTags = config.noSlidingTags ? [] : computeSlidingTags(version);
3708
- p.log.info(`[dry-run] Would push to origin with --follow-tags`);
3709
- if (slidingTags.length > 0) p.log.info(`[dry-run] Would create sliding tags: ${slidingTags.join(", ")}`);
3710
- if (!config.noRelease && config.platform) p.log.info(`[dry-run] Would create ${config.platform.type} release for ${tag}`);
3711
- return {
3712
- version,
3713
- tag,
3714
- slidingTags,
3715
- pushed: false,
3716
- releaseCreated: false
3717
- };
3718
- }
3719
- let pushed = false;
3720
- if (!config.noPush) {
3721
- const branch = executor.exec("git rev-parse --abbrev-ref HEAD", { cwd: config.cwd }).stdout.trim() || "main";
3722
- debug$1(config, `Pushing to origin/${branch}`);
3723
- const pushResult = executor.exec(`git push --follow-tags origin ${branch}`, { cwd: config.cwd });
3724
- debugExec(config, "git push", pushResult);
3725
- if (pushResult.exitCode !== 0) throw new FatalError(`git push failed (exit code ${String(pushResult.exitCode)}):\n${pushResult.stderr || pushResult.stdout}`);
3726
- pushed = true;
3727
- p.log.info("Pushed to origin");
3728
- }
3729
- let slidingTags = [];
3730
- if (!config.noSlidingTags && pushed) {
3731
- slidingTags = computeSlidingTags(version);
3732
- for (const slidingTag of slidingTags) executor.exec(`git tag -f ${slidingTag}`, { cwd: config.cwd });
3733
- const forcePushResult = executor.exec(`git push origin ${slidingTags.join(" ")} --force`, { cwd: config.cwd });
3734
- debugExec(config, "force-push sliding tags", forcePushResult);
3735
- if (forcePushResult.exitCode !== 0) p.log.warn(`Warning: Failed to push sliding tags: ${forcePushResult.stderr || forcePushResult.stdout}`);
3736
- else p.log.info(`Created sliding tags: ${slidingTags.join(", ")}`);
3737
- }
3738
- let releaseCreated = false;
3739
- if (!config.noRelease && config.platform) releaseCreated = await createPlatformRelease(executor, config, tag);
3270
+ //#region src/release/executor.ts
3271
+ /** Create an executor that runs real commands, fetches, and reads the filesystem. */
3272
+ function createRealExecutor() {
3740
3273
  return {
3741
- version,
3742
- tag,
3743
- slidingTags,
3744
- pushed,
3745
- releaseCreated
3274
+ exec(command, options) {
3275
+ try {
3276
+ return {
3277
+ stdout: execSync(command, {
3278
+ cwd: options?.cwd,
3279
+ env: {
3280
+ ...process.env,
3281
+ ...options?.env
3282
+ },
3283
+ encoding: "utf-8",
3284
+ stdio: [
3285
+ "pipe",
3286
+ "pipe",
3287
+ "pipe"
3288
+ ]
3289
+ }),
3290
+ stderr: "",
3291
+ exitCode: 0
3292
+ };
3293
+ } catch (err) {
3294
+ if (isExecSyncError(err)) return {
3295
+ stdout: err.stdout,
3296
+ stderr: err.stderr,
3297
+ exitCode: err.status
3298
+ };
3299
+ return {
3300
+ stdout: "",
3301
+ stderr: "",
3302
+ exitCode: 1
3303
+ };
3304
+ }
3305
+ },
3306
+ fetch: globalThis.fetch,
3307
+ listChangesetFiles(cwd) {
3308
+ const dir = path.join(cwd, ".changeset");
3309
+ try {
3310
+ return readdirSync(dir).filter((f) => f.endsWith(".md") && f !== "README.md");
3311
+ } catch {
3312
+ return [];
3313
+ }
3314
+ },
3315
+ listWorkspacePackages(cwd) {
3316
+ const packagesDir = path.join(cwd, "packages");
3317
+ const packages = [];
3318
+ try {
3319
+ for (const entry of readdirSync(packagesDir)) {
3320
+ const pkgPath = path.join(packagesDir, entry, "package.json");
3321
+ try {
3322
+ const pkg = parsePackageJson(readFileSync(pkgPath, "utf-8"));
3323
+ if (pkg?.name && pkg.version && !pkg.private) packages.push({
3324
+ name: pkg.name,
3325
+ version: pkg.version,
3326
+ dir: entry
3327
+ });
3328
+ } catch (_error) {}
3329
+ }
3330
+ } catch (_error) {}
3331
+ return packages;
3332
+ },
3333
+ listPackageDirs(cwd) {
3334
+ const packagesDir = path.join(cwd, "packages");
3335
+ try {
3336
+ return readdirSync(packagesDir, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
3337
+ } catch {
3338
+ return [];
3339
+ }
3340
+ },
3341
+ readFile(filePath) {
3342
+ try {
3343
+ return readFileSync(filePath, "utf-8");
3344
+ } catch {
3345
+ return null;
3346
+ }
3347
+ },
3348
+ writeFile(filePath, content) {
3349
+ mkdirSync(path.dirname(filePath), { recursive: true });
3350
+ writeFileSync(filePath, content);
3351
+ }
3746
3352
  };
3747
3353
  }
3748
- async function createPlatformRelease(executor, config, tag) {
3749
- if (!config.platform) return false;
3750
- if (config.platform.type === "forgejo") {
3751
- if (await findRelease(executor, config.platform.conn, tag)) {
3752
- debug$1(config, `Release for ${tag} already exists, skipping`);
3753
- return false;
3754
- }
3755
- await createRelease(executor, config.platform.conn, tag);
3756
- p.log.info(`Created Forgejo release for ${tag}`);
3757
- return true;
3354
+ /** Parse "New tag:" lines from changeset publish output. */
3355
+ function parseNewTags(output) {
3356
+ const tags = [];
3357
+ for (const line of output.split("\n")) {
3358
+ const match = /New tag:\s+(\S+)/.exec(line);
3359
+ if (match?.[1]) tags.push(match[1]);
3758
3360
  }
3759
- const ghResult = executor.exec(`gh release create ${tag} --generate-notes`, { cwd: config.cwd });
3760
- debugExec(config, "gh release create", ghResult);
3761
- if (ghResult.exitCode !== 0) {
3762
- p.log.warn(`Warning: Failed to create GitHub release: ${ghResult.stderr || ghResult.stdout}`);
3763
- return false;
3361
+ return tags;
3362
+ }
3363
+ /** Map workspace packages to their expected tag strings (name@version). */
3364
+ function computeExpectedTags(packages) {
3365
+ return packages.map((p) => `${p.name}@${p.version}`);
3366
+ }
3367
+ /** Parse `git ls-remote --tags` output into tag names, filtering out `^{}` dereference entries. */
3368
+ function parseRemoteTags(output) {
3369
+ const tags = [];
3370
+ for (const line of output.split("\n")) {
3371
+ const match = /refs\/tags\/(.+)/.exec(line);
3372
+ if (match?.[1] && !match[1].endsWith("^{}")) tags.push(match[1]);
3764
3373
  }
3765
- p.log.info(`Created GitHub release for ${tag}`);
3766
- return true;
3374
+ return tags;
3375
+ }
3376
+ /**
3377
+ * Reconcile expected tags with what already exists on the remote.
3378
+ * Returns `(expected - remote) ∪ stdoutTags`, deduplicated.
3379
+ */
3380
+ function reconcileTags(expectedTags, remoteTags, stdoutTags) {
3381
+ const remoteSet = new Set(remoteTags);
3382
+ const result = /* @__PURE__ */ new Set();
3383
+ for (const tag of expectedTags) if (!remoteSet.has(tag)) result.add(tag);
3384
+ for (const tag of stdoutTags) result.add(tag);
3385
+ return [...result];
3767
3386
  }
3768
3387
  //#endregion
3769
- //#region src/commands/release-simple.ts
3770
- const releaseSimpleCommand = defineCommand({
3771
- meta: {
3772
- name: "release:simple",
3773
- description: "Run commit-and-tag-version, push, create sliding tags, and create a platform release"
3774
- },
3775
- args: {
3776
- "dry-run": {
3777
- type: "boolean",
3778
- description: "Pass --dry-run to commit-and-tag-version and skip all remote operations"
3779
- },
3780
- verbose: {
3781
- type: "boolean",
3782
- description: "Enable detailed debug logging (also enabled by RELEASE_DEBUG env var)"
3783
- },
3784
- "no-push": {
3785
- type: "boolean",
3786
- description: "Run commit-and-tag-version but skip push and remote operations"
3787
- },
3788
- "no-sliding-tags": {
3789
- type: "boolean",
3790
- description: "Skip creating sliding major/minor version tags (vX, vX.Y)"
3388
+ //#region src/release/forgejo.ts
3389
+ const PullRequestSchema = z.array(z.object({
3390
+ number: z.number(),
3391
+ head: z.object({ ref: z.string() })
3392
+ }));
3393
+ /**
3394
+ * Find an open PR with the given head branch. Returns the PR number or null.
3395
+ *
3396
+ * Fetches all open PRs and filters client-side by head.ref rather than relying
3397
+ * on Forgejo's query parameter filtering, which behaves inconsistently.
3398
+ */
3399
+ async function findOpenPr(executor, conn, head) {
3400
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls?state=open`;
3401
+ const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
3402
+ if (!res.ok) throw new TransientError(`Failed to list PRs: ${res.status} ${res.statusText}`);
3403
+ const parsed = PullRequestSchema.safeParse(await res.json());
3404
+ if (!parsed.success) throw new UnexpectedError(`Unexpected PR list response: ${parsed.error.message}`);
3405
+ return parsed.data.find((pr) => pr.head.ref === head)?.number ?? null;
3406
+ }
3407
+ /** Create a new pull request. */
3408
+ async function createPr(executor, conn, options) {
3409
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls`;
3410
+ const payload = {
3411
+ title: options.title,
3412
+ head: options.head,
3413
+ base: options.base
3414
+ };
3415
+ if (options.body) payload["body"] = options.body;
3416
+ const res = await executor.fetch(url, {
3417
+ method: "POST",
3418
+ headers: {
3419
+ Authorization: `token ${conn.token}`,
3420
+ "Content-Type": "application/json"
3791
3421
  },
3792
- "no-release": {
3793
- type: "boolean",
3794
- description: "Skip Forgejo/GitHub release creation"
3422
+ body: JSON.stringify(payload)
3423
+ });
3424
+ if (!res.ok) throw new TransientError(`Failed to create PR: ${res.status} ${res.statusText}`);
3425
+ }
3426
+ /** Update an existing pull request's title and body. */
3427
+ async function updatePr(executor, conn, prNumber, options) {
3428
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls/${String(prNumber)}`;
3429
+ const res = await executor.fetch(url, {
3430
+ method: "PATCH",
3431
+ headers: {
3432
+ Authorization: `token ${conn.token}`,
3433
+ "Content-Type": "application/json"
3795
3434
  },
3796
- "first-release": {
3797
- type: "boolean",
3798
- description: "Pass --first-release to commit-and-tag-version (skip version bump)"
3435
+ body: JSON.stringify({
3436
+ title: options.title,
3437
+ body: options.body
3438
+ })
3439
+ });
3440
+ if (!res.ok) throw new TransientError(`Failed to update PR #${String(prNumber)}: ${res.status} ${res.statusText}`);
3441
+ }
3442
+ /** Merge a pull request by number. */
3443
+ async function mergePr(executor, conn, prNumber, options) {
3444
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/pulls/${String(prNumber)}/merge`;
3445
+ const res = await executor.fetch(url, {
3446
+ method: "POST",
3447
+ headers: {
3448
+ Authorization: `token ${conn.token}`,
3449
+ "Content-Type": "application/json"
3799
3450
  },
3800
- "release-as": {
3801
- type: "string",
3802
- description: "Force a specific version (passed to commit-and-tag-version --release-as)"
3451
+ body: JSON.stringify({
3452
+ Do: options?.method ?? "merge",
3453
+ delete_branch_after_merge: options?.deleteBranch ?? true
3454
+ })
3455
+ });
3456
+ if (!res.ok) throw new TransientError(`Failed to merge PR #${String(prNumber)}: ${res.status} ${res.statusText}`);
3457
+ }
3458
+ /** Check whether a Forgejo release already exists for a given tag. */
3459
+ async function findRelease(executor, conn, tag) {
3460
+ const encodedTag = encodeURIComponent(tag);
3461
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases/tags/${encodedTag}`;
3462
+ const res = await executor.fetch(url, { headers: { Authorization: `token ${conn.token}` } });
3463
+ if (res.status === 200) return true;
3464
+ if (res.status === 404) return false;
3465
+ throw new TransientError(`Failed to check release for ${tag}: ${res.status} ${res.statusText}`);
3466
+ }
3467
+ /** Create a Forgejo release for a given tag. */
3468
+ async function createRelease(executor, conn, tag) {
3469
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/releases`;
3470
+ const res = await executor.fetch(url, {
3471
+ method: "POST",
3472
+ headers: {
3473
+ Authorization: `token ${conn.token}`,
3474
+ "Content-Type": "application/json"
3803
3475
  },
3804
- prerelease: {
3805
- type: "string",
3806
- description: "Create a prerelease with the given tag (e.g., beta, alpha)"
3807
- }
3808
- },
3809
- async run({ args }) {
3810
- const cwd = process.cwd();
3811
- const verbose = args.verbose === true || process.env["RELEASE_DEBUG"] === "true";
3812
- const noRelease = args["no-release"] === true;
3813
- let platform;
3814
- if (!noRelease) {
3815
- const resolved = resolveConnection(cwd);
3816
- if (resolved.type === "forgejo") platform = {
3817
- type: "forgejo",
3818
- conn: resolved.conn
3819
- };
3820
- else platform = { type: "github" };
3821
- }
3822
- const config = {
3823
- cwd,
3824
- dryRun: args["dry-run"] === true,
3825
- verbose,
3826
- noPush: args["no-push"] === true,
3827
- noSlidingTags: args["no-sliding-tags"] === true,
3828
- noRelease,
3829
- firstRelease: args["first-release"] === true,
3830
- releaseAs: args["release-as"],
3831
- prerelease: args.prerelease,
3832
- platform
3833
- };
3834
- await runSimpleRelease(createRealExecutor(), config);
3835
- }
3836
- });
3476
+ body: JSON.stringify({
3477
+ tag_name: tag,
3478
+ name: tag,
3479
+ body: `Published ${tag}`
3480
+ })
3481
+ });
3482
+ if (!res.ok) throw new TransientError(`Failed to create release for ${tag}: ${res.status} ${res.statusText}`);
3483
+ }
3837
3484
  //#endregion
3838
- //#region src/commands/repo-run-checks.ts
3839
- const CHECKS = [
3840
- { name: "build" },
3841
- { name: "typecheck" },
3842
- { name: "lint" },
3843
- { name: "test" },
3844
- {
3845
- name: "format",
3846
- args: "--check"
3847
- },
3848
- { name: "knip" },
3849
- { name: "tooling:check" },
3850
- { name: "docker:check" }
3851
- ];
3852
- function defaultGetScripts(targetDir) {
3853
- try {
3854
- const pkg = parsePackageJson(readFileSync(path.join(targetDir, "package.json"), "utf-8"));
3855
- return new Set(Object.keys(pkg?.scripts ?? {}));
3856
- } catch {
3857
- return /* @__PURE__ */ new Set();
3858
- }
3485
+ //#region src/release/log.ts
3486
+ /** Log a debug message when verbose mode is enabled. */
3487
+ function debug(config, message) {
3488
+ if (config.verbose) p.log.info(`[debug] ${message}`);
3859
3489
  }
3860
- function defaultExecCommand(cmd, cwd) {
3861
- try {
3862
- execSync(cmd, {
3863
- cwd,
3864
- stdio: "inherit"
3865
- });
3866
- return 0;
3867
- } catch (err) {
3868
- if (isExecSyncError(err)) return err.status;
3869
- return 1;
3490
+ /** Log the result of an exec call when verbose mode is enabled. */
3491
+ function debugExec(config, label, result) {
3492
+ if (!config.verbose) return;
3493
+ const lines = [`[debug] ${label} (exit code ${String(result.exitCode)})`];
3494
+ if (result.stdout.trim()) lines.push(` stdout: ${result.stdout.trim()}`);
3495
+ if (result.stderr.trim()) lines.push(` stderr: ${result.stderr.trim()}`);
3496
+ p.log.info(lines.join("\n"));
3497
+ }
3498
+ //#endregion
3499
+ //#region src/release/version.ts
3500
+ const BRANCH = "changeset-release/main";
3501
+ /** Extract the latest changelog entry (content between first and second `## ` heading). */
3502
+ function extractLatestEntry(changelog) {
3503
+ const lines = changelog.split("\n");
3504
+ let start = -1;
3505
+ let end = lines.length;
3506
+ for (let i = 0; i < lines.length; i++) if (lines[i]?.startsWith("## ")) if (start === -1) start = i;
3507
+ else {
3508
+ end = i;
3509
+ break;
3870
3510
  }
3511
+ if (start === -1) return null;
3512
+ return lines.slice(start, end).join("\n").trim();
3871
3513
  }
3872
- const ciLog = (msg) => console.log(msg);
3873
- function runRunChecks(targetDir, options = {}) {
3874
- const exec = options.execCommand ?? defaultExecCommand;
3875
- const getScripts = options.getScripts ?? defaultGetScripts;
3876
- const skip = options.skip ?? /* @__PURE__ */ new Set();
3877
- const add = options.add ?? [];
3878
- const isCI = Boolean(process.env["CI"]);
3879
- const failFast = options.failFast ?? !isCI;
3880
- const definedScripts = getScripts(targetDir);
3881
- const addedNames = new Set(add);
3882
- const allChecks = [...CHECKS, ...add.map((name) => ({ name }))];
3883
- const failures = [];
3884
- const notDefined = [];
3885
- for (const check of allChecks) {
3886
- if (skip.has(check.name)) continue;
3887
- if (!definedScripts.has(check.name)) {
3888
- if (addedNames.has(check.name)) {
3889
- p.log.error(`${check.name} not defined in package.json`);
3890
- failures.push(check.name);
3891
- } else notDefined.push(check.name);
3892
- continue;
3893
- }
3894
- const cmd = check.args ? `pnpm run ${check.name} ${check.args}` : `pnpm run ${check.name}`;
3895
- if (isCI) ciLog(`::group::${check.name}`);
3896
- const exitCode = exec(cmd, targetDir);
3897
- if (isCI) ciLog("::endgroup::");
3898
- if (exitCode === 0) p.log.success(check.name);
3899
- else {
3900
- if (isCI) ciLog(`::error::${check.name} failed`);
3901
- p.log.error(`${check.name} failed`);
3902
- failures.push(check.name);
3903
- if (failFast) return 1;
3514
+ /** Read the root package.json name and version. */
3515
+ function readRootPackage(executor, cwd) {
3516
+ const content = executor.readFile(path.join(cwd, "package.json"));
3517
+ if (!content) return null;
3518
+ const pkg = parsePackageJson(content);
3519
+ if (!pkg?.name || !pkg.version) return null;
3520
+ if (pkg.private) return null;
3521
+ return {
3522
+ name: pkg.name,
3523
+ version: pkg.version
3524
+ };
3525
+ }
3526
+ /** Determine which packages changed and collect their changelog entries. */
3527
+ function buildPrContent(executor, cwd, packagesBefore) {
3528
+ const packagesAfter = executor.listWorkspacePackages(cwd);
3529
+ if (!(packagesBefore.length > 0 || packagesAfter.length > 0)) {
3530
+ const rootPkg = readRootPackage(executor, cwd);
3531
+ if (rootPkg) {
3532
+ const changelog = executor.readFile(path.join(cwd, "CHANGELOG.md"));
3533
+ const entry = changelog ? extractLatestEntry(changelog) : null;
3534
+ return {
3535
+ title: `chore: release ${rootPkg.name}@${rootPkg.version}`,
3536
+ body: entry ?? ""
3537
+ };
3904
3538
  }
3539
+ return {
3540
+ title: "chore: version packages",
3541
+ body: ""
3542
+ };
3905
3543
  }
3906
- if (notDefined.length > 0) p.log.info(`Skipped (not defined): ${notDefined.join(", ")}`);
3907
- if (failures.length > 0) {
3908
- p.log.error(`Failed checks: ${failures.join(", ")}`);
3909
- return 1;
3544
+ const beforeMap = new Map(packagesBefore.map((pkg) => [pkg.name, pkg.version]));
3545
+ const changed = packagesAfter.filter((pkg) => beforeMap.get(pkg.name) !== pkg.version);
3546
+ if (changed.length === 0) return {
3547
+ title: "chore: version packages",
3548
+ body: ""
3549
+ };
3550
+ const title = `chore: release ${changed.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ")}`;
3551
+ const entries = [];
3552
+ for (const pkg of changed) {
3553
+ const changelogPath = path.join(cwd, "packages", pkg.dir, "CHANGELOG.md");
3554
+ const changelog = executor.readFile(changelogPath);
3555
+ const entry = changelog ? extractLatestEntry(changelog) : null;
3556
+ if (entry) {
3557
+ const labeled = entry.replace(/^## .+/, `## ${pkg.name}@${pkg.version}`);
3558
+ entries.push(labeled);
3559
+ }
3910
3560
  }
3911
- p.log.success("All checks passed");
3912
- return 0;
3561
+ return {
3562
+ title,
3563
+ body: entries.join("\n\n")
3564
+ };
3913
3565
  }
3914
- const runChecksCommand = defineCommand({
3915
- meta: {
3916
- name: "checks:run",
3917
- description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, docker:check)"
3918
- },
3919
- args: {
3920
- dir: {
3921
- type: "positional",
3922
- description: "Target directory (default: current directory)",
3923
- required: false
3924
- },
3925
- skip: {
3926
- type: "string",
3927
- description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check, docker:check)",
3928
- required: false
3929
- },
3930
- add: {
3931
- type: "string",
3932
- description: "Comma-separated list of additional check names to run (uses pnpm run <name>)",
3933
- required: false
3934
- },
3935
- "fail-fast": {
3936
- type: "boolean",
3937
- description: "Stop on first failure (default: true in dev, false in CI)",
3938
- required: false
3566
+ /** Mode 1: version packages and create/update a PR. */
3567
+ async function runVersionMode(executor, config) {
3568
+ p.log.info("Changesets detected — versioning packages");
3569
+ const packagesBefore = executor.listWorkspacePackages(config.cwd);
3570
+ debug(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3571
+ const changesetConfigPath = path.join(config.cwd, ".changeset", "config.json");
3572
+ const originalConfig = executor.readFile(changesetConfigPath);
3573
+ if (originalConfig) {
3574
+ const parsed = parseChangesetConfig(originalConfig);
3575
+ if (parsed?.commit) {
3576
+ const patched = {
3577
+ ...parsed,
3578
+ commit: false
3579
+ };
3580
+ executor.writeFile(changesetConfigPath, JSON.stringify(patched, null, 2) + "\n");
3581
+ debug(config, "Temporarily disabled changeset commit:true");
3939
3582
  }
3940
- },
3941
- run({ args }) {
3942
- const exitCode = runRunChecks(path.resolve(args.dir ?? "."), {
3943
- skip: args.skip ? new Set(args.skip.split(",").map((s) => s.trim())) : void 0,
3944
- add: args.add ? args.add.split(",").map((s) => s.trim()) : void 0,
3945
- failFast: args["fail-fast"] === true ? true : args["fail-fast"] === false ? false : void 0
3946
- });
3947
- process.exitCode = exitCode;
3948
- }
3949
- });
3950
- //#endregion
3951
- //#region src/release/docker.ts
3952
- const ToolingDockerMapSchema = z.record(z.string(), z.object({
3953
- dockerfile: z.string(),
3954
- context: z.string().default(".")
3955
- }));
3956
- const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
3957
- const PackageInfoSchema = z.object({
3958
- name: z.string().optional(),
3959
- version: z.string().optional()
3960
- });
3961
- /** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
3962
- function loadDockerMap(executor, cwd) {
3963
- const configPath = path.join(cwd, ".tooling.json");
3964
- const raw = executor.readFile(configPath);
3965
- if (!raw) return {};
3966
- try {
3967
- const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
3968
- if (!result.success || !result.data.docker) return {};
3969
- return result.data.docker;
3970
- } catch (_error) {
3971
- return {};
3972
3583
  }
3973
- }
3974
- /** Read name and version from a package's package.json. */
3975
- function readPackageInfo(executor, packageJsonPath) {
3976
- const raw = executor.readFile(packageJsonPath);
3977
- if (!raw) return {
3978
- name: void 0,
3979
- version: void 0
3980
- };
3981
- try {
3982
- const result = PackageInfoSchema.safeParse(JSON.parse(raw));
3983
- if (!result.success) return {
3984
- name: void 0,
3985
- version: void 0
3584
+ const versionResult = executor.exec("pnpm changeset version", { cwd: config.cwd });
3585
+ debugExec(config, "pnpm changeset version", versionResult);
3586
+ if (originalConfig) executor.writeFile(changesetConfigPath, originalConfig);
3587
+ if (versionResult.exitCode !== 0) throw new FatalError(`pnpm changeset version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr}`);
3588
+ debugExec(config, "pnpm install --no-frozen-lockfile", executor.exec("pnpm install --no-frozen-lockfile", { cwd: config.cwd }));
3589
+ const { title, body } = buildPrContent(executor, config.cwd, packagesBefore);
3590
+ debug(config, `PR title: ${title}`);
3591
+ executor.exec("git add -A", { cwd: config.cwd });
3592
+ const remainingChangesets = executor.listChangesetFiles(config.cwd);
3593
+ if (remainingChangesets.length > 0) p.log.warn(`Changeset files still present after versioning: ${remainingChangesets.join(", ")}`);
3594
+ debug(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3595
+ const commitResult = executor.exec("git commit -m \"chore: version packages\"", { cwd: config.cwd });
3596
+ debugExec(config, "git commit", commitResult);
3597
+ if (commitResult.exitCode !== 0) {
3598
+ p.log.info("Nothing to commit after versioning");
3599
+ return {
3600
+ mode: "version",
3601
+ pr: "none"
3986
3602
  };
3603
+ }
3604
+ if (config.dryRun) {
3605
+ p.log.info("[dry-run] Would push and create/update PR");
3987
3606
  return {
3988
- name: result.data.name,
3989
- version: result.data.version
3607
+ mode: "version",
3608
+ pr: "none"
3990
3609
  };
3991
- } catch (_error) {
3610
+ }
3611
+ debugExec(config, "git push", executor.exec(`git push origin "HEAD:refs/heads/${BRANCH}" --force`, { cwd: config.cwd }));
3612
+ const conn = {
3613
+ serverUrl: config.serverUrl,
3614
+ repository: config.repository,
3615
+ token: config.token
3616
+ };
3617
+ const existingPr = await findOpenPr(executor, conn, BRANCH);
3618
+ debug(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3619
+ if (existingPr === null) {
3620
+ await createPr(executor, conn, {
3621
+ title,
3622
+ head: BRANCH,
3623
+ base: "main",
3624
+ body
3625
+ });
3626
+ p.log.info("Created version PR");
3992
3627
  return {
3993
- name: void 0,
3994
- version: void 0
3628
+ mode: "version",
3629
+ pr: "created"
3995
3630
  };
3996
3631
  }
3632
+ await updatePr(executor, conn, existingPr, {
3633
+ title,
3634
+ body
3635
+ });
3636
+ p.log.info(`Updated version PR #${String(existingPr)}`);
3637
+ return {
3638
+ mode: "version",
3639
+ pr: "updated"
3640
+ };
3641
+ }
3642
+ //#endregion
3643
+ //#region src/release/publish.ts
3644
+ const RETRY_ATTEMPTS = 3;
3645
+ const RETRY_BASE_DELAY_MS = 1e3;
3646
+ async function retryAsync(fn) {
3647
+ let lastError;
3648
+ for (let attempt = 0; attempt <= RETRY_ATTEMPTS; attempt++) try {
3649
+ return await fn();
3650
+ } catch (error) {
3651
+ lastError = error;
3652
+ if (attempt < RETRY_ATTEMPTS) {
3653
+ const delay = RETRY_BASE_DELAY_MS * 2 ** attempt;
3654
+ await new Promise((resolve) => setTimeout(resolve, delay));
3655
+ }
3656
+ }
3657
+ throw lastError;
3997
3658
  }
3998
- /** Convention paths to check for Dockerfiles in a package directory. */
3999
- const CONVENTION_DOCKERFILE_PATHS = ["Dockerfile", "docker/Dockerfile"];
4000
- /**
4001
- * Find a Dockerfile at convention paths for a monorepo package.
4002
- * Checks packages/{dir}/Dockerfile and packages/{dir}/docker/Dockerfile.
4003
- */
4004
- function findConventionDockerfile(executor, cwd, dir) {
4005
- for (const rel of CONVENTION_DOCKERFILE_PATHS) {
4006
- const dockerfilePath = `packages/${dir}/${rel}`;
4007
- if (executor.readFile(path.join(cwd, dockerfilePath)) !== null) return {
4008
- dockerfile: dockerfilePath,
4009
- context: "."
3659
+ /** Mode 2: publish to npm, push tags, and create Forgejo releases. */
3660
+ async function runPublishMode(executor, config) {
3661
+ p.log.info("No changesets — publishing packages");
3662
+ const publishResult = executor.exec("pnpm changeset publish", { cwd: config.cwd });
3663
+ debugExec(config, "pnpm changeset publish", publishResult);
3664
+ if (publishResult.exitCode !== 0) throw new FatalError(`pnpm changeset publish failed (exit code ${String(publishResult.exitCode)}):\n${publishResult.stderr}`);
3665
+ const stdoutTags = parseNewTags(publishResult.stdout + "\n" + publishResult.stderr);
3666
+ debug(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3667
+ const expectedTags = computeExpectedTags(executor.listWorkspacePackages(config.cwd));
3668
+ debug(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3669
+ const remoteTags = parseRemoteTags(executor.exec("git ls-remote --tags origin", { cwd: config.cwd }).stdout);
3670
+ debug(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3671
+ const remoteSet = new Set(remoteTags);
3672
+ const tagsToPush = reconcileTags(expectedTags, remoteTags, stdoutTags);
3673
+ debug(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3674
+ if (config.dryRun) {
3675
+ if (tagsToPush.length === 0) {
3676
+ p.log.info("No packages were published");
3677
+ return { mode: "none" };
3678
+ }
3679
+ p.log.info(`Tags to process: ${tagsToPush.join(", ")}`);
3680
+ p.log.info("[dry-run] Would push tags and create releases");
3681
+ return {
3682
+ mode: "publish",
3683
+ tags: tagsToPush
4010
3684
  };
4011
3685
  }
4012
- }
4013
- /**
4014
- * Find a Dockerfile at convention paths for a single-package repo.
4015
- * Checks Dockerfile and docker/Dockerfile at the project root.
4016
- */
4017
- function findRootDockerfile(executor, cwd) {
4018
- for (const rel of CONVENTION_DOCKERFILE_PATHS) if (executor.readFile(path.join(cwd, rel)) !== null) return {
4019
- dockerfile: rel,
4020
- context: "."
3686
+ const conn = {
3687
+ serverUrl: config.serverUrl,
3688
+ repository: config.repository,
3689
+ token: config.token
3690
+ };
3691
+ const remoteExpectedTags = expectedTags.filter((t) => remoteSet.has(t) && !tagsToPush.includes(t));
3692
+ const tagsWithMissingReleases = [];
3693
+ for (const tag of remoteExpectedTags) if (!await findRelease(executor, conn, tag)) tagsWithMissingReleases.push(tag);
3694
+ const allTags = [...tagsToPush, ...tagsWithMissingReleases];
3695
+ if (allTags.length === 0) {
3696
+ p.log.info("No packages were published");
3697
+ return { mode: "none" };
3698
+ }
3699
+ p.log.info(`Tags to process: ${allTags.join(", ")}`);
3700
+ const errors = [];
3701
+ for (const tag of allTags) try {
3702
+ if (!remoteSet.has(tag)) {
3703
+ if (executor.exec(`git tag -l ${JSON.stringify(tag)}`, { cwd: config.cwd }).stdout.trim() === "") executor.exec(`git tag ${JSON.stringify(tag)}`, { cwd: config.cwd });
3704
+ executor.exec(`git push origin refs/tags/${tag}`, { cwd: config.cwd });
3705
+ }
3706
+ if (await findRelease(executor, conn, tag)) p.log.warn(`Release for ${tag} already exists — skipping`);
3707
+ else {
3708
+ await retryAsync(async () => {
3709
+ try {
3710
+ await createRelease(executor, conn, tag);
3711
+ } catch (error) {
3712
+ if (await findRelease(executor, conn, tag)) return;
3713
+ throw error;
3714
+ }
3715
+ });
3716
+ p.log.info(`Created release for ${tag}`);
3717
+ }
3718
+ } catch (error) {
3719
+ errors.push({
3720
+ tag,
3721
+ error
3722
+ });
3723
+ p.log.warn(`Failed to process ${tag}: ${error instanceof Error ? error.message : String(error)}`);
3724
+ }
3725
+ if (errors.length > 0) throw new TransientError(`Failed to create releases for: ${errors.map((e) => e.tag).join(", ")}`);
3726
+ return {
3727
+ mode: "publish",
3728
+ tags: allTags
4021
3729
  };
4022
3730
  }
3731
+ //#endregion
3732
+ //#region src/release/connection.ts
3733
+ const RepositorySchema = z.union([z.string(), z.object({ url: z.string() })]);
4023
3734
  /**
4024
- * Discover Docker packages by convention and merge with .tooling.json overrides.
3735
+ * Resolve the hosting platform and connection details.
4025
3736
  *
4026
- * Convention: any package with a Dockerfile or docker/Dockerfile is a Docker package.
4027
- * For monorepos, scans packages/{name}/. For single-package repos, scans the root.
4028
- * The docker map in .tooling.json overrides convention-discovered config and can add
4029
- * packages at non-standard locations.
3737
+ * Priority:
3738
+ * 1. Environment variables (FORGEJO_SERVER_URL, FORGEJO_REPOSITORY, FORGEJO_TOKEN)
3739
+ * 2. `repository` field in package.json (server URL and owner/repo parsed from the URL)
4030
3740
  *
4031
- * Image names are derived from {root-name}-{package-name} using each package's package.json name.
4032
- * Versions are read from each package's own package.json.
3741
+ * For Forgejo, FORGEJO_TOKEN is always required (either from env or explicitly).
3742
+ * If the repository URL hostname is `github.com`, returns `{ type: "github" }`.
4033
3743
  */
4034
- function detectDockerPackages(executor, cwd, repoName) {
4035
- const overrides = loadDockerMap(executor, cwd);
4036
- const packageDirs = executor.listPackageDirs(cwd);
4037
- const packages = [];
4038
- const seen = /* @__PURE__ */ new Set();
4039
- if (packageDirs.length > 0) {
4040
- for (const dir of packageDirs) {
4041
- const convention = findConventionDockerfile(executor, cwd, dir);
4042
- const docker = overrides[dir] ?? convention;
4043
- if (docker) {
4044
- const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
4045
- packages.push({
4046
- dir,
4047
- imageName: `${repoName}-${name ?? dir}`,
4048
- version,
4049
- docker
4050
- });
4051
- seen.add(dir);
4052
- }
4053
- }
4054
- for (const [dir, docker] of Object.entries(overrides)) if (!seen.has(dir)) {
4055
- const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
4056
- packages.push({
4057
- dir,
4058
- imageName: `${repoName}-${name ?? dir}`,
4059
- version,
4060
- docker
4061
- });
3744
+ function resolveConnection(cwd) {
3745
+ const serverUrl = process.env["FORGEJO_SERVER_URL"];
3746
+ const repository = process.env["FORGEJO_REPOSITORY"];
3747
+ const token = process.env["FORGEJO_TOKEN"];
3748
+ if (serverUrl && repository && token) return {
3749
+ type: "forgejo",
3750
+ conn: {
3751
+ serverUrl,
3752
+ repository,
3753
+ token
4062
3754
  }
4063
- } else {
4064
- const convention = findRootDockerfile(executor, cwd);
4065
- const docker = overrides["."] ?? convention;
4066
- if (docker) {
4067
- const { name, version } = readPackageInfo(executor, path.join(cwd, "package.json"));
4068
- packages.push({
4069
- dir: ".",
4070
- imageName: name ?? repoName,
4071
- version,
4072
- docker
4073
- });
3755
+ };
3756
+ const parsed = parseRepositoryUrl(cwd);
3757
+ if (parsed === null) {
3758
+ if (serverUrl) {
3759
+ if (!repository) throw new FatalError("FORGEJO_REPOSITORY environment variable is required");
3760
+ if (!token) throw new FatalError("FORGEJO_TOKEN environment variable is required");
4074
3761
  }
3762
+ return { type: "github" };
4075
3763
  }
4076
- return packages;
4077
- }
4078
- /**
4079
- * Read docker config for a single package, checking convention paths first,
4080
- * then .tooling.json overrides. Used by the per-package image:build script.
4081
- */
4082
- function readSinglePackageDocker(executor, cwd, packageDir, repoName) {
4083
- const dir = path.basename(path.resolve(cwd, packageDir));
4084
- const convention = findConventionDockerfile(executor, cwd, dir);
4085
- const docker = loadDockerMap(executor, cwd)[dir] ?? convention;
4086
- if (!docker) throw new FatalError(`No Dockerfile found for package "${dir}" (checked convention paths and .tooling.json)`);
4087
- const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
3764
+ if (parsed.hostname === "github.com") return { type: "github" };
3765
+ const resolvedToken = token;
3766
+ if (!resolvedToken) throw new FatalError("FORGEJO_TOKEN environment variable is required (server URL and repository were resolved from package.json)");
4088
3767
  return {
4089
- dir,
4090
- imageName: `${repoName}-${name ?? dir}`,
4091
- version,
4092
- docker
3768
+ type: "forgejo",
3769
+ conn: {
3770
+ serverUrl: serverUrl ?? `${parsed.protocol}//${parsed.hostname}`,
3771
+ repository: repository ?? parsed.repository,
3772
+ token: resolvedToken
3773
+ }
4093
3774
  };
4094
3775
  }
4095
- /** Parse semver version string into major, minor, patch components. */
4096
- function parseSemver(version) {
4097
- const clean = version.replace(/^v/, "");
4098
- const match = /^(\d+)\.(\d+)\.(\d+)/.exec(clean);
4099
- if (!match?.[1] || !match[2] || !match[3]) throw new FatalError(`Invalid semver version: ${version}`);
3776
+ function parseRepositoryUrl(cwd) {
3777
+ const pkgPath = path.join(cwd, "package.json");
3778
+ let raw;
3779
+ try {
3780
+ raw = readFileSync(pkgPath, "utf-8");
3781
+ } catch {
3782
+ return null;
3783
+ }
3784
+ const pkg = z.object({ repository: RepositorySchema.optional() }).safeParse(JSON.parse(raw));
3785
+ if (!pkg.success) return null;
3786
+ const repo = pkg.data.repository;
3787
+ if (!repo) return null;
3788
+ return parseGitUrl(typeof repo === "string" ? repo : repo.url);
3789
+ }
3790
+ function parseGitUrl(urlStr) {
3791
+ try {
3792
+ const url = new URL(urlStr);
3793
+ const pathname = url.pathname.replace(/\.git$/, "").replace(/^\//, "");
3794
+ if (!pathname.includes("/")) return null;
3795
+ return {
3796
+ protocol: url.protocol,
3797
+ hostname: url.hostname,
3798
+ repository: pathname
3799
+ };
3800
+ } catch {
3801
+ return null;
3802
+ }
3803
+ }
3804
+ //#endregion
3805
+ //#region src/commands/release-changesets.ts
3806
+ const releaseForgejoCommand = defineCommand({
3807
+ meta: {
3808
+ name: "release:changesets",
3809
+ description: "Changesets version/publish for Forgejo CI"
3810
+ },
3811
+ args: {
3812
+ "dry-run": {
3813
+ type: "boolean",
3814
+ description: "Skip push, API calls, and publishing side effects"
3815
+ },
3816
+ verbose: {
3817
+ type: "boolean",
3818
+ description: "Enable detailed debug logging (also enabled by RELEASE_DEBUG env var)"
3819
+ }
3820
+ },
3821
+ async run({ args }) {
3822
+ if ((await runRelease(buildReleaseConfig({
3823
+ dryRun: args["dry-run"] === true,
3824
+ verbose: args.verbose === true || process.env["RELEASE_DEBUG"] === "true"
3825
+ }), createRealExecutor())).mode === "none") process.exitCode = 0;
3826
+ }
3827
+ });
3828
+ /** Build release config from environment / package.json and CLI flags. */
3829
+ function buildReleaseConfig(flags) {
3830
+ const resolved = resolveConnection(process.cwd());
3831
+ if (resolved.type !== "forgejo") throw new FatalError("release:changesets requires a Forgejo repository");
4100
3832
  return {
4101
- major: Number(match[1]),
4102
- minor: Number(match[2]),
4103
- patch: Number(match[3])
3833
+ ...resolved.conn,
3834
+ cwd: process.cwd(),
3835
+ dryRun: flags.dryRun ?? false,
3836
+ verbose: flags.verbose ?? false
4104
3837
  };
4105
3838
  }
4106
- /** Generate semver tag variants: latest, vX.Y.Z, vX.Y, vX */
4107
- function generateTags(version) {
4108
- const { major, minor, patch } = parseSemver(version);
4109
- return [
4110
- "latest",
4111
- `v${major}.${minor}.${patch}`,
4112
- `v${major}.${minor}`,
4113
- `v${major}`
4114
- ];
3839
+ /** Resolve the current branch from CI env vars or git. */
3840
+ function getCurrentBranch(executor, cwd) {
3841
+ const ref = process.env["GITHUB_REF"];
3842
+ if (ref?.startsWith("refs/heads/")) return ref.slice(11);
3843
+ return executor.exec("git rev-parse --abbrev-ref HEAD", { cwd }).stdout.trim();
4115
3844
  }
4116
- /** Build the full image reference: namespace/imageName:tag */
4117
- function imageRef(namespace, imageName, tag) {
4118
- return `${namespace}/${imageName}:${tag}`;
3845
+ /** Core release logic testable with a mock executor. */
3846
+ async function runRelease(config, executor) {
3847
+ const branch = getCurrentBranch(executor, config.cwd);
3848
+ if (branch !== "main") {
3849
+ debug(config, `Skipping release on non-main branch: ${branch}`);
3850
+ return { mode: "none" };
3851
+ }
3852
+ executor.exec("git config user.name \"forgejo-actions[bot]\"", { cwd: config.cwd });
3853
+ executor.exec("git config user.email \"forgejo-actions[bot]@noreply.localhost\"", { cwd: config.cwd });
3854
+ const changesetFiles = executor.listChangesetFiles(config.cwd);
3855
+ debug(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3856
+ if (changesetFiles.length > 0) {
3857
+ debug(config, "Entering version mode");
3858
+ return runVersionMode(executor, config);
3859
+ }
3860
+ debug(config, "Entering publish mode");
3861
+ return runPublishMode(executor, config);
4119
3862
  }
4120
- function log$1(message) {
4121
- console.log(message);
3863
+ //#endregion
3864
+ //#region src/commands/release-trigger.ts
3865
+ const releaseTriggerCommand = defineCommand({
3866
+ meta: {
3867
+ name: "release:trigger",
3868
+ description: "Trigger the release CI workflow"
3869
+ },
3870
+ args: { ref: {
3871
+ type: "string",
3872
+ description: "Git ref to trigger on (default: main)",
3873
+ required: false
3874
+ } },
3875
+ async run({ args }) {
3876
+ const ref = args.ref ?? "main";
3877
+ const resolved = resolveConnection(process.cwd());
3878
+ if (resolved.type === "forgejo") await triggerForgejo(resolved.conn, ref);
3879
+ else triggerGitHub(ref);
3880
+ }
3881
+ });
3882
+ async function triggerForgejo(conn, ref) {
3883
+ const url = `${conn.serverUrl}/api/v1/repos/${conn.repository}/actions/workflows/release.yml/dispatches`;
3884
+ const res = await fetch(url, {
3885
+ method: "POST",
3886
+ headers: {
3887
+ Authorization: `token ${conn.token}`,
3888
+ "Content-Type": "application/json"
3889
+ },
3890
+ body: JSON.stringify({ ref })
3891
+ });
3892
+ if (!res.ok) throw new FatalError(`Failed to trigger Forgejo workflow: ${res.status} ${res.statusText}`);
3893
+ p.log.info(`Triggered release workflow on Forgejo (ref: ${ref})`);
4122
3894
  }
4123
- function debug(verbose, message) {
4124
- if (verbose) console.log(`[debug] ${message}`);
3895
+ function triggerGitHub(ref) {
3896
+ createRealExecutor().exec(`gh workflow run release.yml --ref ${ref}`, { cwd: process.cwd() });
3897
+ p.log.info(`Triggered release workflow on GitHub (ref: ${ref})`);
4125
3898
  }
4126
- /** Read the repo name from root package.json. */
4127
- function readRepoName(executor, cwd) {
4128
- const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
4129
- if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
4130
- const repoName = parsePackageJson(rootPkgRaw)?.name;
4131
- if (!repoName) throw new FatalError("Root package.json must have a name field");
4132
- return repoName;
3899
+ //#endregion
3900
+ //#region src/commands/forgejo-create-release.ts
3901
+ const createForgejoReleaseCommand = defineCommand({
3902
+ meta: {
3903
+ name: "forgejo:create-release",
3904
+ description: "Create a Forgejo release for a given tag"
3905
+ },
3906
+ args: { tag: {
3907
+ type: "string",
3908
+ description: "Git tag to create a release for",
3909
+ required: true
3910
+ } },
3911
+ async run({ args }) {
3912
+ const resolved = resolveConnection(process.cwd());
3913
+ if (resolved.type !== "forgejo") throw new FatalError("forgejo:create-release requires a Forgejo repository");
3914
+ const executor = createRealExecutor();
3915
+ const conn = resolved.conn;
3916
+ if (await findRelease(executor, conn, args.tag)) {
3917
+ p.log.info(`Release for ${args.tag} already exists — skipping`);
3918
+ return;
3919
+ }
3920
+ await createRelease(executor, conn, args.tag);
3921
+ p.log.info(`Created Forgejo release for ${args.tag}`);
3922
+ }
3923
+ });
3924
+ //#endregion
3925
+ //#region src/commands/changesets-merge.ts
3926
+ const HEAD_BRANCH = "changeset-release/main";
3927
+ const releaseMergeCommand = defineCommand({
3928
+ meta: {
3929
+ name: "changesets:merge",
3930
+ description: "Merge the open changesets version PR"
3931
+ },
3932
+ args: { "dry-run": {
3933
+ type: "boolean",
3934
+ description: "Show what would be merged without actually merging"
3935
+ } },
3936
+ async run({ args }) {
3937
+ const dryRun = args["dry-run"] === true;
3938
+ const resolved = resolveConnection(process.cwd());
3939
+ if (resolved.type === "forgejo") await mergeForgejo(resolved.conn, dryRun);
3940
+ else mergeGitHub(dryRun);
3941
+ }
3942
+ });
3943
+ async function mergeForgejo(conn, dryRun) {
3944
+ const executor = createRealExecutor();
3945
+ const prNumber = await findOpenPr(executor, conn, HEAD_BRANCH);
3946
+ if (prNumber === null) throw new FatalError(`No open PR found for branch ${HEAD_BRANCH}`);
3947
+ if (dryRun) {
3948
+ p.log.info(`[dry-run] Would merge PR #${String(prNumber)} and delete branch ${HEAD_BRANCH}`);
3949
+ return;
3950
+ }
3951
+ await mergePr(executor, conn, prNumber, {
3952
+ method: "merge",
3953
+ deleteBranch: true
3954
+ });
3955
+ p.log.info(`Merged PR #${String(prNumber)} and deleted branch ${HEAD_BRANCH}`);
4133
3956
  }
4134
- /** Build a single docker image from its config. Paths are resolved relative to cwd. */
4135
- function buildImage(executor, pkg, cwd, verbose, extraArgs) {
4136
- const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
4137
- const contextPath = path.resolve(cwd, pkg.docker.context);
4138
- const command = [
4139
- "docker build",
4140
- `-f ${dockerfilePath}`,
4141
- `-t ${pkg.imageName}:latest`,
4142
- ...extraArgs,
4143
- contextPath
4144
- ].join(" ");
4145
- debug(verbose, `Running: ${command}`);
4146
- const buildResult = executor.exec(command);
4147
- debug(verbose, `Build stdout: ${buildResult.stdout}`);
4148
- if (buildResult.exitCode !== 0) throw new FatalError(`docker build failed for ${pkg.dir} (exit ${buildResult.exitCode}): ${buildResult.stderr}`);
3957
+ function mergeGitHub(dryRun) {
3958
+ const executor = createRealExecutor();
3959
+ if (dryRun) {
3960
+ const prNum = executor.exec(`gh pr view ${HEAD_BRANCH} --json number --jq .number`, { cwd: process.cwd() }).stdout.trim();
3961
+ if (!prNum) throw new FatalError(`No open PR found for branch ${HEAD_BRANCH}`);
3962
+ p.log.info(`[dry-run] Would merge PR #${prNum} and delete branch ${HEAD_BRANCH}`);
3963
+ return;
3964
+ }
3965
+ executor.exec(`gh pr merge ${HEAD_BRANCH} --merge --delete-branch`, { cwd: process.cwd() });
3966
+ p.log.info(`Merged changesets PR and deleted branch ${HEAD_BRANCH}`);
4149
3967
  }
3968
+ //#endregion
3969
+ //#region src/release/simple.ts
4150
3970
  /**
4151
- * Detect packages with docker config in .tooling.json and build each one.
4152
- * Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
4153
- * Dockerfile and context paths are resolved relative to the project root.
4154
- *
4155
- * When `packageDir` is set, builds only that single package (for use as an image:build script).
3971
+ * Compute sliding version tags from a semver version string.
3972
+ * For "1.2.3" returns ["v1", "v1.2"]. Strips prerelease suffixes.
4156
3973
  */
4157
- function runDockerBuild(executor, config) {
4158
- const repoName = readRepoName(executor, config.cwd);
4159
- if (config.packageDir) {
4160
- const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
4161
- log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4162
- buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4163
- log$1(`Built ${pkg.imageName}:latest`);
4164
- return { packages: [pkg] };
3974
+ function computeSlidingTags(version) {
3975
+ const parts = (version.split("-")[0] ?? version).split(".");
3976
+ if (parts.length < 2 || !parts[0] || !parts[1]) throw new FatalError(`Invalid version format "${version}". Expected semver (X.Y.Z)`);
3977
+ return [`v${parts[0]}`, `v${parts[0]}.${parts[1]}`];
3978
+ }
3979
+ /** Build the commit-and-tag-version command with appropriate flags. */
3980
+ function buildCommand(config) {
3981
+ const args = ["pnpm exec commit-and-tag-version"];
3982
+ if (config.dryRun) args.push("--dry-run");
3983
+ if (config.firstRelease) args.push("--first-release");
3984
+ if (config.releaseAs) args.push(`--release-as ${config.releaseAs}`);
3985
+ if (config.prerelease) args.push(`--prerelease ${config.prerelease}`);
3986
+ return args.join(" ");
3987
+ }
3988
+ /** Read the current version from package.json. */
3989
+ function readVersion(executor, cwd) {
3990
+ const raw = executor.readFile(path.join(cwd, "package.json"));
3991
+ if (!raw) throw new FatalError("Could not read package.json");
3992
+ const pkg = parsePackageJson(raw);
3993
+ if (!pkg?.version) throw new FatalError("No version field found in package.json");
3994
+ return pkg.version;
3995
+ }
3996
+ /** Run the full commit-and-tag-version release flow. */
3997
+ async function runSimpleRelease(executor, config) {
3998
+ const command = buildCommand(config);
3999
+ p.log.info(`Running: ${command}`);
4000
+ const versionResult = executor.exec(command, { cwd: config.cwd });
4001
+ debugExec(config, "commit-and-tag-version", versionResult);
4002
+ if (versionResult.exitCode !== 0) throw new FatalError(`commit-and-tag-version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr || versionResult.stdout}`);
4003
+ const version = readVersion(executor, config.cwd);
4004
+ debug(config, `New version: ${version}`);
4005
+ const tagResult = executor.exec("git describe --tags --abbrev=0", { cwd: config.cwd });
4006
+ debugExec(config, "git describe", tagResult);
4007
+ const tag = tagResult.stdout.trim();
4008
+ if (!tag) throw new FatalError("Could not determine the new tag from git describe");
4009
+ p.log.info(`Version ${version} tagged as ${tag}`);
4010
+ if (config.dryRun) {
4011
+ const slidingTags = config.noSlidingTags ? [] : computeSlidingTags(version);
4012
+ p.log.info(`[dry-run] Would push to origin with --follow-tags`);
4013
+ if (slidingTags.length > 0) p.log.info(`[dry-run] Would create sliding tags: ${slidingTags.join(", ")}`);
4014
+ if (!config.noRelease && config.platform) p.log.info(`[dry-run] Would create ${config.platform.type} release for ${tag}`);
4015
+ return {
4016
+ version,
4017
+ tag,
4018
+ slidingTags,
4019
+ pushed: false,
4020
+ releaseCreated: false
4021
+ };
4022
+ }
4023
+ let pushed = false;
4024
+ if (!config.noPush) {
4025
+ const branch = executor.exec("git rev-parse --abbrev-ref HEAD", { cwd: config.cwd }).stdout.trim() || "main";
4026
+ debug(config, `Pushing to origin/${branch}`);
4027
+ const pushResult = executor.exec(`git push --follow-tags origin ${branch}`, { cwd: config.cwd });
4028
+ debugExec(config, "git push", pushResult);
4029
+ if (pushResult.exitCode !== 0) throw new FatalError(`git push failed (exit code ${String(pushResult.exitCode)}):\n${pushResult.stderr || pushResult.stdout}`);
4030
+ pushed = true;
4031
+ p.log.info("Pushed to origin");
4032
+ }
4033
+ let slidingTags = [];
4034
+ if (!config.noSlidingTags && pushed) {
4035
+ slidingTags = computeSlidingTags(version);
4036
+ for (const slidingTag of slidingTags) executor.exec(`git tag -f ${slidingTag}`, { cwd: config.cwd });
4037
+ const forcePushResult = executor.exec(`git push origin ${slidingTags.join(" ")} --force`, { cwd: config.cwd });
4038
+ debugExec(config, "force-push sliding tags", forcePushResult);
4039
+ if (forcePushResult.exitCode !== 0) p.log.warn(`Warning: Failed to push sliding tags: ${forcePushResult.stderr || forcePushResult.stdout}`);
4040
+ else p.log.info(`Created sliding tags: ${slidingTags.join(", ")}`);
4041
+ }
4042
+ let releaseCreated = false;
4043
+ if (!config.noRelease && config.platform) releaseCreated = await createPlatformRelease(executor, config, tag);
4044
+ return {
4045
+ version,
4046
+ tag,
4047
+ slidingTags,
4048
+ pushed,
4049
+ releaseCreated
4050
+ };
4051
+ }
4052
+ async function createPlatformRelease(executor, config, tag) {
4053
+ if (!config.platform) return false;
4054
+ if (config.platform.type === "forgejo") {
4055
+ if (await findRelease(executor, config.platform.conn, tag)) {
4056
+ debug(config, `Release for ${tag} already exists, skipping`);
4057
+ return false;
4058
+ }
4059
+ await createRelease(executor, config.platform.conn, tag);
4060
+ p.log.info(`Created Forgejo release for ${tag}`);
4061
+ return true;
4165
4062
  }
4166
- const packages = detectDockerPackages(executor, config.cwd, repoName);
4167
- if (packages.length === 0) {
4168
- log$1("No packages with docker config found");
4169
- return { packages: [] };
4063
+ const ghResult = executor.exec(`gh release create ${tag} --generate-notes`, { cwd: config.cwd });
4064
+ debugExec(config, "gh release create", ghResult);
4065
+ if (ghResult.exitCode !== 0) {
4066
+ p.log.warn(`Warning: Failed to create GitHub release: ${ghResult.stderr || ghResult.stdout}`);
4067
+ return false;
4170
4068
  }
4171
- log$1(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
4172
- for (const pkg of packages) {
4173
- log$1(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4174
- buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4069
+ p.log.info(`Created GitHub release for ${tag}`);
4070
+ return true;
4071
+ }
4072
+ //#endregion
4073
+ //#region src/commands/release-simple.ts
4074
+ const releaseSimpleCommand = defineCommand({
4075
+ meta: {
4076
+ name: "release:simple",
4077
+ description: "Run commit-and-tag-version, push, create sliding tags, and create a platform release"
4078
+ },
4079
+ args: {
4080
+ "dry-run": {
4081
+ type: "boolean",
4082
+ description: "Pass --dry-run to commit-and-tag-version and skip all remote operations"
4083
+ },
4084
+ verbose: {
4085
+ type: "boolean",
4086
+ description: "Enable detailed debug logging (also enabled by RELEASE_DEBUG env var)"
4087
+ },
4088
+ "no-push": {
4089
+ type: "boolean",
4090
+ description: "Run commit-and-tag-version but skip push and remote operations"
4091
+ },
4092
+ "no-sliding-tags": {
4093
+ type: "boolean",
4094
+ description: "Skip creating sliding major/minor version tags (vX, vX.Y)"
4095
+ },
4096
+ "no-release": {
4097
+ type: "boolean",
4098
+ description: "Skip Forgejo/GitHub release creation"
4099
+ },
4100
+ "first-release": {
4101
+ type: "boolean",
4102
+ description: "Pass --first-release to commit-and-tag-version (skip version bump)"
4103
+ },
4104
+ "release-as": {
4105
+ type: "string",
4106
+ description: "Force a specific version (passed to commit-and-tag-version --release-as)"
4107
+ },
4108
+ prerelease: {
4109
+ type: "string",
4110
+ description: "Create a prerelease with the given tag (e.g., beta, alpha)"
4111
+ }
4112
+ },
4113
+ async run({ args }) {
4114
+ const cwd = process.cwd();
4115
+ const verbose = args.verbose === true || process.env["RELEASE_DEBUG"] === "true";
4116
+ const noRelease = args["no-release"] === true;
4117
+ let platform;
4118
+ if (!noRelease) {
4119
+ const resolved = resolveConnection(cwd);
4120
+ if (resolved.type === "forgejo") platform = {
4121
+ type: "forgejo",
4122
+ conn: resolved.conn
4123
+ };
4124
+ else platform = { type: "github" };
4125
+ }
4126
+ const config = {
4127
+ cwd,
4128
+ dryRun: args["dry-run"] === true,
4129
+ verbose,
4130
+ noPush: args["no-push"] === true,
4131
+ noSlidingTags: args["no-sliding-tags"] === true,
4132
+ noRelease,
4133
+ firstRelease: args["first-release"] === true,
4134
+ releaseAs: args["release-as"],
4135
+ prerelease: args.prerelease,
4136
+ platform
4137
+ };
4138
+ await runSimpleRelease(createRealExecutor(), config);
4139
+ }
4140
+ });
4141
+ //#endregion
4142
+ //#region src/commands/repo-run-checks.ts
4143
+ const CHECKS = [
4144
+ { name: "build" },
4145
+ { name: "typecheck" },
4146
+ { name: "lint" },
4147
+ { name: "test" },
4148
+ {
4149
+ name: "format",
4150
+ args: "--check"
4151
+ },
4152
+ { name: "knip" },
4153
+ { name: "tooling:check" },
4154
+ { name: "docker:check" }
4155
+ ];
4156
+ function defaultGetScripts(targetDir) {
4157
+ try {
4158
+ const pkg = parsePackageJson(readFileSync(path.join(targetDir, "package.json"), "utf-8"));
4159
+ return new Set(Object.keys(pkg?.scripts ?? {}));
4160
+ } catch {
4161
+ return /* @__PURE__ */ new Set();
4175
4162
  }
4176
- log$1(`Built ${packages.length} image(s)`);
4177
- return { packages };
4178
4163
  }
4179
- /**
4180
- * Run the full Docker publish pipeline:
4181
- * 1. Build all images via runDockerBuild
4182
- * 2. Login to registry
4183
- * 3. Tag each image with semver variants from its own package.json version
4184
- * 4. Push all tags
4185
- * 5. Logout from registry
4186
- */
4187
- function runDockerPublish(executor, config) {
4188
- const { packages } = runDockerBuild(executor, {
4189
- cwd: config.cwd,
4190
- packageDir: void 0,
4191
- verbose: config.verbose,
4192
- extraArgs: []
4193
- });
4194
- if (packages.length === 0) return {
4195
- packages: [],
4196
- tags: []
4197
- };
4198
- for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
4199
- if (!config.dryRun) {
4200
- log$1(`Logging in to ${config.registryHost}...`);
4201
- const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
4202
- if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
4203
- } else log$1("[dry-run] Skipping docker login");
4204
- const allTags = [];
4164
+ function defaultExecCommand(cmd, cwd) {
4205
4165
  try {
4206
- for (const pkg of packages) {
4207
- const tags = generateTags(pkg.version ?? "");
4208
- log$1(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
4209
- for (const tag of tags) {
4210
- const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
4211
- allTags.push(ref);
4212
- log$1(`Tagging ${pkg.imageName} → ${ref}`);
4213
- const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
4214
- if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
4215
- if (!config.dryRun) {
4216
- log$1(`Pushing ${ref}...`);
4217
- const pushResult = executor.exec(`docker push ${ref}`);
4218
- if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
4219
- } else log$1(`[dry-run] Skipping push for ${ref}`);
4220
- }
4166
+ execSync(cmd, {
4167
+ cwd,
4168
+ stdio: "inherit"
4169
+ });
4170
+ return 0;
4171
+ } catch (err) {
4172
+ if (isExecSyncError(err)) return err.status;
4173
+ return 1;
4174
+ }
4175
+ }
4176
+ const ciLog = (msg) => console.log(msg);
4177
+ function runRunChecks(targetDir, options = {}) {
4178
+ const exec = options.execCommand ?? defaultExecCommand;
4179
+ const getScripts = options.getScripts ?? defaultGetScripts;
4180
+ const skip = options.skip ?? /* @__PURE__ */ new Set();
4181
+ const add = options.add ?? [];
4182
+ const isCI = Boolean(process.env["CI"]);
4183
+ const failFast = options.failFast ?? !isCI;
4184
+ const definedScripts = getScripts(targetDir);
4185
+ const addedNames = new Set(add);
4186
+ const allChecks = [...CHECKS, ...add.map((name) => ({ name }))];
4187
+ const failures = [];
4188
+ const notDefined = [];
4189
+ for (const check of allChecks) {
4190
+ if (skip.has(check.name)) continue;
4191
+ if (!definedScripts.has(check.name)) {
4192
+ if (addedNames.has(check.name)) {
4193
+ p.log.error(`${check.name} not defined in package.json`);
4194
+ failures.push(check.name);
4195
+ } else notDefined.push(check.name);
4196
+ continue;
4221
4197
  }
4222
- } finally {
4223
- if (!config.dryRun) {
4224
- log$1(`Logging out from ${config.registryHost}...`);
4225
- executor.exec(`docker logout ${config.registryHost}`);
4198
+ const cmd = check.args ? `pnpm run ${check.name} ${check.args}` : `pnpm run ${check.name}`;
4199
+ if (isCI) ciLog(`::group::${check.name}`);
4200
+ const exitCode = exec(cmd, targetDir);
4201
+ if (isCI) ciLog("::endgroup::");
4202
+ if (exitCode === 0) p.log.success(check.name);
4203
+ else {
4204
+ if (isCI) ciLog(`::error::${check.name} failed`);
4205
+ p.log.error(`${check.name} failed`);
4206
+ failures.push(check.name);
4207
+ if (failFast) return 1;
4226
4208
  }
4227
4209
  }
4228
- log$1(`Published ${allTags.length} image tag(s)`);
4229
- return {
4230
- packages,
4231
- tags: allTags
4232
- };
4210
+ if (notDefined.length > 0) p.log.info(`Skipped (not defined): ${notDefined.join(", ")}`);
4211
+ if (failures.length > 0) {
4212
+ p.log.error(`Failed checks: ${failures.join(", ")}`);
4213
+ return 1;
4214
+ }
4215
+ p.log.success("All checks passed");
4216
+ return 0;
4233
4217
  }
4218
+ const runChecksCommand = defineCommand({
4219
+ meta: {
4220
+ name: "checks:run",
4221
+ description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, docker:check)"
4222
+ },
4223
+ args: {
4224
+ dir: {
4225
+ type: "positional",
4226
+ description: "Target directory (default: current directory)",
4227
+ required: false
4228
+ },
4229
+ skip: {
4230
+ type: "string",
4231
+ description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check, docker:check)",
4232
+ required: false
4233
+ },
4234
+ add: {
4235
+ type: "string",
4236
+ description: "Comma-separated list of additional check names to run (uses pnpm run <name>)",
4237
+ required: false
4238
+ },
4239
+ "fail-fast": {
4240
+ type: "boolean",
4241
+ description: "Stop on first failure (default: true in dev, false in CI)",
4242
+ required: false
4243
+ }
4244
+ },
4245
+ run({ args }) {
4246
+ const exitCode = runRunChecks(path.resolve(args.dir ?? "."), {
4247
+ skip: args.skip ? new Set(args.skip.split(",").map((s) => s.trim())) : void 0,
4248
+ add: args.add ? args.add.split(",").map((s) => s.trim()) : void 0,
4249
+ failFast: args["fail-fast"] === true ? true : args["fail-fast"] === false ? false : void 0
4250
+ });
4251
+ process.exitCode = exitCode;
4252
+ }
4253
+ });
4234
4254
  //#endregion
4235
4255
  //#region src/commands/publish-docker.ts
4236
4256
  function requireEnv(name) {
@@ -4587,7 +4607,7 @@ const dockerCheckCommand = defineCommand({
4587
4607
  const main = defineCommand({
4588
4608
  meta: {
4589
4609
  name: "tooling",
4590
- version: "0.23.0",
4610
+ version: "0.24.0",
4591
4611
  description: "Bootstrap and maintain standardized TypeScript project tooling"
4592
4612
  },
4593
4613
  subCommands: {
@@ -4603,7 +4623,11 @@ const main = defineCommand({
4603
4623
  "docker:check": dockerCheckCommand
4604
4624
  }
4605
4625
  });
4606
- console.log(`@bensandee/tooling v0.23.0`);
4607
- runMain(main);
4626
+ console.log(`@bensandee/tooling v0.24.0`);
4627
+ async function run() {
4628
+ await runMain(main);
4629
+ process.exit(process.exitCode ?? 0);
4630
+ }
4631
+ run();
4608
4632
  //#endregion
4609
4633
  export {};