@bensandee/tooling 0.19.0 → 0.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/bin.mjs +512 -478
  2. package/package.json +1 -1
package/dist/bin.mjs CHANGED
@@ -7,7 +7,7 @@ import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, unlinkSync, w
7
7
  import JSON5 from "json5";
8
8
  import { parse } from "jsonc-parser";
9
9
  import { z } from "zod";
10
- import { isMap, isSeq, parse as parse$1, parseDocument, stringify } from "yaml";
10
+ import { isMap, isScalar, isSeq, parse as parse$1, parseDocument, stringify } from "yaml";
11
11
  import { execSync } from "node:child_process";
12
12
  import { FatalError, TransientError, UnexpectedError } from "@bensandee/common";
13
13
  import { tmpdir } from "node:os";
@@ -206,6 +206,32 @@ function computeDefaults(targetDir) {
206
206
  detectPackageTypes: true
207
207
  };
208
208
  }
209
+ /**
210
+ * List packages that would be published to npm (non-private, have a name).
211
+ * For monorepos, scans packages/ subdirectories. For single repos, checks root package.json.
212
+ * An optional pre-parsed root package.json can be passed to avoid re-reading from disk.
213
+ */
214
+ function getPublishablePackages(targetDir, structure, rootPackageJson) {
215
+ if (structure === "monorepo") {
216
+ const packages = getMonorepoPackages(targetDir);
217
+ const results = [];
218
+ for (const pkg of packages) {
219
+ const pkgJson = readPackageJson(pkg.dir);
220
+ if (!pkgJson || pkgJson.private || !pkgJson.name) continue;
221
+ results.push({
222
+ name: pkgJson.name,
223
+ dir: pkg.dir
224
+ });
225
+ }
226
+ return results;
227
+ }
228
+ const pkg = rootPackageJson ?? readPackageJson(targetDir);
229
+ if (!pkg || pkg.private || !pkg.name) return [];
230
+ return [{
231
+ name: pkg.name,
232
+ dir: targetDir
233
+ }];
234
+ }
209
235
  /** List packages in a monorepo's packages/ directory. */
210
236
  function getMonorepoPackages(targetDir) {
211
237
  const packagesDir = path.join(targetDir, "packages");
@@ -569,6 +595,306 @@ function mergeWithSavedConfig(detected, saved) {
569
595
  };
570
596
  }
571
597
  //#endregion
598
+ //#region src/utils/yaml-merge.ts
599
+ const IGNORE_PATTERN = "@bensandee/tooling:ignore";
600
+ const FORGEJO_SCHEMA_COMMENT = "# yaml-language-server: $schema=../../.vscode/forgejo-workflow.schema.json\n";
601
+ /** Returns a yaml-language-server schema comment for Forgejo workflows, empty string otherwise. */
602
+ function workflowSchemaComment(ci) {
603
+ return ci === "forgejo" ? FORGEJO_SCHEMA_COMMENT : "";
604
+ }
605
+ /** Prepend the Forgejo schema comment if it's not already present. No-op for GitHub. */
606
+ function ensureSchemaComment(content, ci) {
607
+ if (ci !== "forgejo") return content;
608
+ if (content.includes("yaml-language-server")) return content;
609
+ return FORGEJO_SCHEMA_COMMENT + content;
610
+ }
611
+ /** Check if a YAML file has an opt-out comment in the first 10 lines. */
612
+ function isToolingIgnored(content) {
613
+ return content.split("\n", 10).some((line) => line.includes(IGNORE_PATTERN));
614
+ }
615
+ /**
616
+ * Ensure required commands exist under `pre-commit.commands` in a lefthook config.
617
+ * Only adds missing commands — never modifies existing ones.
618
+ * Returns unchanged content if the file has an opt-out comment or can't be parsed.
619
+ */
620
+ function mergeLefthookCommands(existing, requiredCommands) {
621
+ if (isToolingIgnored(existing)) return {
622
+ content: existing,
623
+ changed: false
624
+ };
625
+ try {
626
+ const doc = parseDocument(existing);
627
+ let changed = false;
628
+ if (!doc.hasIn(["pre-commit", "commands"])) {
629
+ doc.setIn(["pre-commit", "commands"], requiredCommands);
630
+ return {
631
+ content: doc.toString(),
632
+ changed: true
633
+ };
634
+ }
635
+ const commands = doc.getIn(["pre-commit", "commands"]);
636
+ if (!isMap(commands)) return {
637
+ content: existing,
638
+ changed: false
639
+ };
640
+ for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
641
+ commands.set(name, config);
642
+ changed = true;
643
+ }
644
+ return {
645
+ content: changed ? doc.toString() : existing,
646
+ changed
647
+ };
648
+ } catch {
649
+ return {
650
+ content: existing,
651
+ changed: false
652
+ };
653
+ }
654
+ }
655
+ /**
656
+ * Ensure required steps exist in a workflow job's steps array.
657
+ * Only adds missing steps at the end — never modifies existing ones.
658
+ * Returns unchanged content if the file has an opt-out comment or can't be parsed.
659
+ */
660
+ function mergeWorkflowSteps(existing, jobName, requiredSteps) {
661
+ if (isToolingIgnored(existing)) return {
662
+ content: existing,
663
+ changed: false
664
+ };
665
+ try {
666
+ const doc = parseDocument(existing);
667
+ const steps = doc.getIn([
668
+ "jobs",
669
+ jobName,
670
+ "steps"
671
+ ]);
672
+ if (!isSeq(steps)) return {
673
+ content: existing,
674
+ changed: false
675
+ };
676
+ let changed = false;
677
+ for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
678
+ if (!isMap(item)) return false;
679
+ if (match.run) {
680
+ const run = item.get("run");
681
+ return typeof run === "string" && run.includes(match.run);
682
+ }
683
+ if (match.uses) {
684
+ const uses = item.get("uses");
685
+ return typeof uses === "string" && uses.startsWith(match.uses);
686
+ }
687
+ return false;
688
+ })) {
689
+ steps.add(doc.createNode(step));
690
+ changed = true;
691
+ }
692
+ return {
693
+ content: changed ? doc.toString() : existing,
694
+ changed
695
+ };
696
+ } catch {
697
+ return {
698
+ content: existing,
699
+ changed: false
700
+ };
701
+ }
702
+ }
703
+ /**
704
+ * Add a job to an existing workflow YAML if it doesn't already exist.
705
+ * Returns unchanged content if the job already exists, the file has an opt-out comment,
706
+ * or the document can't be parsed.
707
+ */
708
+ /**
709
+ * Ensure a `concurrency` block exists at the workflow top level.
710
+ * Adds it if missing — never modifies an existing one.
711
+ * Returns unchanged content if the file has an opt-out comment or can't be parsed.
712
+ */
713
+ function ensureWorkflowConcurrency(existing, concurrency) {
714
+ if (isToolingIgnored(existing)) return {
715
+ content: existing,
716
+ changed: false
717
+ };
718
+ try {
719
+ const doc = parseDocument(existing);
720
+ if (doc.has("concurrency")) return {
721
+ content: existing,
722
+ changed: false
723
+ };
724
+ doc.set("concurrency", concurrency);
725
+ const contents = doc.contents;
726
+ if (isMap(contents)) {
727
+ const items = contents.items;
728
+ const nameIdx = items.findIndex((p) => isScalar(p.key) && p.key.value === "name");
729
+ const concPair = items.pop();
730
+ if (concPair) items.splice(nameIdx + 1, 0, concPair);
731
+ }
732
+ return {
733
+ content: doc.toString(),
734
+ changed: true
735
+ };
736
+ } catch {
737
+ return {
738
+ content: existing,
739
+ changed: false
740
+ };
741
+ }
742
+ }
743
+ //#endregion
744
+ //#region src/generators/deploy-ci.ts
745
+ /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
746
+ function actionsExpr$2(expr) {
747
+ return `\${{ ${expr} }}`;
748
+ }
749
+ function hasEnginesNode$2(ctx) {
750
+ return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
751
+ }
752
+ function deployWorkflow(ci, nodeVersionYaml) {
753
+ return `${workflowSchemaComment(ci)}name: Deploy
754
+ on:
755
+ push:
756
+ tags:
757
+ - "v[0-9]+.[0-9]+.[0-9]+"
758
+
759
+ jobs:
760
+ deploy:
761
+ runs-on: ubuntu-latest
762
+ steps:
763
+ - uses: actions/checkout@v4
764
+ - uses: pnpm/action-setup@v4
765
+ - uses: actions/setup-node@v4
766
+ with:
767
+ ${nodeVersionYaml}
768
+ - run: pnpm install --frozen-lockfile
769
+ - name: Publish Docker images
770
+ env:
771
+ DOCKER_REGISTRY_HOST: ${actionsExpr$2("vars.DOCKER_REGISTRY_HOST")}
772
+ DOCKER_REGISTRY_NAMESPACE: ${actionsExpr$2("vars.DOCKER_REGISTRY_NAMESPACE")}
773
+ DOCKER_USERNAME: ${actionsExpr$2("secrets.DOCKER_USERNAME")}
774
+ DOCKER_PASSWORD: ${actionsExpr$2("secrets.DOCKER_PASSWORD")}
775
+ run: pnpm exec tooling docker:publish
776
+ `;
777
+ }
778
+ function requiredDeploySteps() {
779
+ return [
780
+ {
781
+ match: { uses: "actions/checkout" },
782
+ step: { uses: "actions/checkout@v4" }
783
+ },
784
+ {
785
+ match: { uses: "pnpm/action-setup" },
786
+ step: { uses: "pnpm/action-setup@v4" }
787
+ },
788
+ {
789
+ match: { uses: "actions/setup-node" },
790
+ step: { uses: "actions/setup-node@v4" }
791
+ },
792
+ {
793
+ match: { run: "pnpm install" },
794
+ step: { run: "pnpm install --frozen-lockfile" }
795
+ },
796
+ {
797
+ match: { run: "docker:publish" },
798
+ step: { run: "pnpm exec tooling docker:publish" }
799
+ }
800
+ ];
801
+ }
802
+ /** Convention paths to check for Dockerfiles. */
803
+ const CONVENTION_DOCKERFILE_PATHS$1 = ["Dockerfile", "docker/Dockerfile"];
804
+ const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
805
+ /** Get names of packages that have Docker builds (by convention or .tooling.json config). */
806
+ function getDockerPackageNames(ctx) {
807
+ const names = [];
808
+ const configRaw = ctx.read(".tooling.json");
809
+ if (configRaw) {
810
+ const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
811
+ if (result.success && result.data.docker) names.push(...Object.keys(result.data.docker));
812
+ }
813
+ if (ctx.config.structure === "monorepo") {
814
+ const packages = getMonorepoPackages(ctx.targetDir);
815
+ for (const pkg of packages) {
816
+ const dirName = pkg.name.split("/").pop() ?? pkg.name;
817
+ if (names.includes(dirName)) continue;
818
+ for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) {
819
+ names.push(dirName);
820
+ break;
821
+ }
822
+ }
823
+ } else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) {
824
+ if (!names.includes(ctx.config.name)) names.push(ctx.config.name);
825
+ break;
826
+ }
827
+ return names;
828
+ }
829
+ /** Check whether any Docker packages exist by convention or .tooling.json config. */
830
+ function hasDockerPackages(ctx) {
831
+ return getDockerPackageNames(ctx).length > 0;
832
+ }
833
+ async function generateDeployCi(ctx) {
834
+ const filePath = "deploy-ci";
835
+ if (!hasDockerPackages(ctx) || ctx.config.ci === "none") return {
836
+ filePath,
837
+ action: "skipped",
838
+ description: "Deploy CI workflow not applicable"
839
+ };
840
+ const isGitHub = ctx.config.ci === "github";
841
+ const workflowPath = isGitHub ? ".github/workflows/publish.yml" : ".forgejo/workflows/publish.yml";
842
+ const nodeVersionYaml = hasEnginesNode$2(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
843
+ const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
844
+ if (ctx.exists(workflowPath)) {
845
+ const existing = ctx.read(workflowPath);
846
+ if (existing) {
847
+ if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
848
+ filePath: workflowPath,
849
+ action: "skipped",
850
+ description: "Deploy workflow already up to date"
851
+ };
852
+ const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
853
+ const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
854
+ if (withComment === content) {
855
+ ctx.write(workflowPath, content);
856
+ return {
857
+ filePath: workflowPath,
858
+ action: "updated",
859
+ description: "Added missing steps to deploy workflow"
860
+ };
861
+ }
862
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") {
863
+ if (merged.changed || withComment !== merged.content) {
864
+ ctx.write(workflowPath, withComment);
865
+ return {
866
+ filePath: workflowPath,
867
+ action: "updated",
868
+ description: "Added missing steps to deploy workflow"
869
+ };
870
+ }
871
+ return {
872
+ filePath: workflowPath,
873
+ action: "skipped",
874
+ description: "Existing deploy workflow preserved"
875
+ };
876
+ }
877
+ ctx.write(workflowPath, content);
878
+ return {
879
+ filePath: workflowPath,
880
+ action: "updated",
881
+ description: "Replaced deploy workflow with updated template"
882
+ };
883
+ }
884
+ return {
885
+ filePath: workflowPath,
886
+ action: "skipped",
887
+ description: "Deploy workflow already up to date"
888
+ };
889
+ }
890
+ ctx.write(workflowPath, content);
891
+ return {
892
+ filePath: workflowPath,
893
+ action: "created",
894
+ description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
895
+ };
896
+ }
897
+ //#endregion
572
898
  //#region src/generators/package-json.ts
573
899
  const STANDARD_SCRIPTS_SINGLE = {
574
900
  build: "tsdown",
@@ -598,7 +924,9 @@ const MANAGED_SCRIPTS = {
598
924
  check: "checks:run",
599
925
  "ci:check": "pnpm check",
600
926
  "tooling:check": "repo:sync --check",
601
- "tooling:sync": "repo:sync"
927
+ "tooling:sync": "repo:sync",
928
+ "docker:build": "docker:build",
929
+ "docker:check": "docker:check"
602
930
  };
603
931
  /** Deprecated scripts to remove during migration. */
604
932
  const DEPRECATED_SCRIPTS = ["tooling:init", "tooling:update"];
@@ -644,9 +972,7 @@ function addReleaseDeps(deps, config) {
644
972
  deps["release-it"] = "18.1.2";
645
973
  if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
646
974
  break;
647
- case "simple":
648
- deps["commit-and-tag-version"] = "12.5.0";
649
- break;
975
+ case "simple": break;
650
976
  case "changesets":
651
977
  deps["@changesets/cli"] = "2.29.4";
652
978
  break;
@@ -657,7 +983,7 @@ function getAddedDevDepNames(config) {
657
983
  const deps = { ...ROOT_DEV_DEPS };
658
984
  if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
659
985
  deps["@bensandee/config"] = "0.8.2";
660
- deps["@bensandee/tooling"] = "0.19.0";
986
+ deps["@bensandee/tooling"] = "0.22.0";
661
987
  if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
662
988
  if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
663
989
  addReleaseDeps(deps, config);
@@ -675,10 +1001,14 @@ async function generatePackageJson(ctx) {
675
1001
  };
676
1002
  if (ctx.config.releaseStrategy === "changesets") allScripts["changeset"] = "changeset";
677
1003
  if (ctx.config.releaseStrategy !== "none" && ctx.config.releaseStrategy !== "changesets") allScripts["trigger-release"] = "pnpm exec tooling release:trigger";
1004
+ if (hasDockerPackages(ctx)) {
1005
+ allScripts["docker:build"] = "pnpm exec tooling docker:build";
1006
+ allScripts["docker:check"] = "pnpm exec tooling docker:check";
1007
+ }
678
1008
  const devDeps = { ...ROOT_DEV_DEPS };
679
1009
  if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
680
1010
  devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.2";
681
- devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.19.0";
1011
+ devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.22.0";
682
1012
  if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
683
1013
  if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
684
1014
  if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
@@ -1111,229 +1441,62 @@ const REQUIRED_ENTRIES = [
1111
1441
  "dist/",
1112
1442
  "*.tsbuildinfo",
1113
1443
  ".env",
1114
- ".env.*",
1115
- "!.env.example"
1116
- ];
1117
- /** Tooling-specific entries added during init/update but not required for repo:sync --check. */
1118
- const OPTIONAL_ENTRIES = [".tooling-migrate.md", ".tooling-archived/"];
1119
- const ALL_ENTRIES = [...REQUIRED_ENTRIES, ...OPTIONAL_ENTRIES];
1120
- /** Normalize a gitignore entry for comparison: strip leading `/` and trailing `/`. */
1121
- function normalizeEntry(entry) {
1122
- let s = entry.trim();
1123
- if (s.startsWith("/")) s = s.slice(1);
1124
- if (s.endsWith("/")) s = s.slice(0, -1);
1125
- return s;
1126
- }
1127
- async function generateGitignore(ctx) {
1128
- const filePath = ".gitignore";
1129
- const existing = ctx.read(filePath);
1130
- if (existing) {
1131
- const existingNormalized = new Set(existing.split("\n").map(normalizeEntry).filter((line) => line.length > 0));
1132
- const missing = ALL_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
1133
- if (missing.length === 0) return {
1134
- filePath,
1135
- action: "skipped",
1136
- description: "Already has all standard entries"
1137
- };
1138
- const missingRequired = REQUIRED_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
1139
- const updated = existing.trimEnd() + "\n\n# Added by @bensandee/tooling\n" + missing.join("\n") + "\n";
1140
- ctx.write(filePath, updated);
1141
- if (missingRequired.length === 0) return {
1142
- filePath,
1143
- action: "skipped",
1144
- description: "Only optional entries missing"
1145
- };
1146
- return {
1147
- filePath,
1148
- action: "updated",
1149
- description: `Appended ${String(missing.length)} missing entries`
1150
- };
1151
- }
1152
- ctx.write(filePath, ALL_ENTRIES.join("\n") + "\n");
1153
- return {
1154
- filePath,
1155
- action: "created",
1156
- description: "Generated .gitignore"
1157
- };
1158
- }
1159
- //#endregion
1160
- //#region src/utils/yaml-merge.ts
1161
- const IGNORE_PATTERN = "@bensandee/tooling:ignore";
1162
- const FORGEJO_SCHEMA_COMMENT = "# yaml-language-server: $schema=../../.vscode/forgejo-workflow.schema.json\n";
1163
- /** Returns a yaml-language-server schema comment for Forgejo workflows, empty string otherwise. */
1164
- function workflowSchemaComment(ci) {
1165
- return ci === "forgejo" ? FORGEJO_SCHEMA_COMMENT : "";
1166
- }
1167
- /** Prepend the Forgejo schema comment if it's not already present. No-op for GitHub. */
1168
- function ensureSchemaComment(content, ci) {
1169
- if (ci !== "forgejo") return content;
1170
- if (content.includes("yaml-language-server")) return content;
1171
- return FORGEJO_SCHEMA_COMMENT + content;
1172
- }
1173
- /** Check if a YAML file has an opt-out comment in the first 10 lines. */
1174
- function isToolingIgnored(content) {
1175
- return content.split("\n", 10).some((line) => line.includes(IGNORE_PATTERN));
1176
- }
1177
- /**
1178
- * Ensure required commands exist under `pre-commit.commands` in a lefthook config.
1179
- * Only adds missing commands — never modifies existing ones.
1180
- * Returns unchanged content if the file has an opt-out comment or can't be parsed.
1181
- */
1182
- function mergeLefthookCommands(existing, requiredCommands) {
1183
- if (isToolingIgnored(existing)) return {
1184
- content: existing,
1185
- changed: false
1186
- };
1187
- try {
1188
- const doc = parseDocument(existing);
1189
- let changed = false;
1190
- if (!doc.hasIn(["pre-commit", "commands"])) {
1191
- doc.setIn(["pre-commit", "commands"], requiredCommands);
1192
- return {
1193
- content: doc.toString(),
1194
- changed: true
1195
- };
1196
- }
1197
- const commands = doc.getIn(["pre-commit", "commands"]);
1198
- if (!isMap(commands)) return {
1199
- content: existing,
1200
- changed: false
1201
- };
1202
- for (const [name, config] of Object.entries(requiredCommands)) if (!commands.has(name)) {
1203
- commands.set(name, config);
1204
- changed = true;
1205
- }
1206
- return {
1207
- content: changed ? doc.toString() : existing,
1208
- changed
1209
- };
1210
- } catch {
1211
- return {
1212
- content: existing,
1213
- changed: false
1214
- };
1215
- }
1216
- }
1217
- /**
1218
- * Ensure required steps exist in a workflow job's steps array.
1219
- * Only adds missing steps at the end — never modifies existing ones.
1220
- * Returns unchanged content if the file has an opt-out comment or can't be parsed.
1221
- */
1222
- function mergeWorkflowSteps(existing, jobName, requiredSteps) {
1223
- if (isToolingIgnored(existing)) return {
1224
- content: existing,
1225
- changed: false
1226
- };
1227
- try {
1228
- const doc = parseDocument(existing);
1229
- const steps = doc.getIn([
1230
- "jobs",
1231
- jobName,
1232
- "steps"
1233
- ]);
1234
- if (!isSeq(steps)) return {
1235
- content: existing,
1236
- changed: false
1237
- };
1238
- let changed = false;
1239
- for (const { match, step } of requiredSteps) if (!steps.items.some((item) => {
1240
- if (!isMap(item)) return false;
1241
- if (match.run) {
1242
- const run = item.get("run");
1243
- return typeof run === "string" && run.includes(match.run);
1244
- }
1245
- if (match.uses) {
1246
- const uses = item.get("uses");
1247
- return typeof uses === "string" && uses.startsWith(match.uses);
1248
- }
1249
- return false;
1250
- })) {
1251
- steps.add(doc.createNode(step));
1252
- changed = true;
1253
- }
1254
- return {
1255
- content: changed ? doc.toString() : existing,
1256
- changed
1257
- };
1258
- } catch {
1259
- return {
1260
- content: existing,
1261
- changed: false
1262
- };
1263
- }
1264
- }
1265
- /**
1266
- * Add a job to an existing workflow YAML if it doesn't already exist.
1267
- * Returns unchanged content if the job already exists, the file has an opt-out comment,
1268
- * or the document can't be parsed.
1269
- */
1270
- /**
1271
- * Ensure a `concurrency` block exists at the workflow top level.
1272
- * Adds it if missing — never modifies an existing one.
1273
- * Returns unchanged content if the file has an opt-out comment or can't be parsed.
1274
- */
1275
- function ensureWorkflowConcurrency(existing, concurrency) {
1276
- if (isToolingIgnored(existing)) return {
1277
- content: existing,
1278
- changed: false
1279
- };
1280
- try {
1281
- const doc = parseDocument(existing);
1282
- if (doc.has("concurrency")) return {
1283
- content: existing,
1284
- changed: false
1285
- };
1286
- doc.set("concurrency", doc.createNode(concurrency));
1287
- return {
1288
- content: doc.toString(),
1289
- changed: true
1290
- };
1291
- } catch {
1292
- return {
1293
- content: existing,
1294
- changed: false
1295
- };
1296
- }
1444
+ ".env.*",
1445
+ "!.env.example"
1446
+ ];
1447
+ /** Tooling-specific entries added during init/update but not required for repo:sync --check. */
1448
+ const OPTIONAL_ENTRIES = [".tooling-migrate.md", ".tooling-archived/"];
1449
+ const ALL_ENTRIES = [...REQUIRED_ENTRIES, ...OPTIONAL_ENTRIES];
1450
+ /** Normalize a gitignore entry for comparison: strip leading `/` and trailing `/`. */
1451
+ function normalizeEntry(entry) {
1452
+ let s = entry.trim();
1453
+ if (s.startsWith("/")) s = s.slice(1);
1454
+ if (s.endsWith("/")) s = s.slice(0, -1);
1455
+ return s;
1297
1456
  }
1298
- function addWorkflowJob(existing, jobName, jobConfig) {
1299
- if (isToolingIgnored(existing)) return {
1300
- content: existing,
1301
- changed: false
1302
- };
1303
- try {
1304
- const doc = parseDocument(existing);
1305
- const jobs = doc.getIn(["jobs"]);
1306
- if (!isMap(jobs)) return {
1307
- content: existing,
1308
- changed: false
1309
- };
1310
- if (jobs.has(jobName)) return {
1311
- content: existing,
1312
- changed: false
1457
+ async function generateGitignore(ctx) {
1458
+ const filePath = ".gitignore";
1459
+ const existing = ctx.read(filePath);
1460
+ if (existing) {
1461
+ const existingNormalized = new Set(existing.split("\n").map(normalizeEntry).filter((line) => line.length > 0));
1462
+ const missing = ALL_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
1463
+ if (missing.length === 0) return {
1464
+ filePath,
1465
+ action: "skipped",
1466
+ description: "Already has all standard entries"
1313
1467
  };
1314
- jobs.set(jobName, doc.createNode(jobConfig));
1315
- return {
1316
- content: doc.toString(),
1317
- changed: true
1468
+ const missingRequired = REQUIRED_ENTRIES.filter((entry) => !existingNormalized.has(normalizeEntry(entry)));
1469
+ const updated = existing.trimEnd() + "\n\n# Added by @bensandee/tooling\n" + missing.join("\n") + "\n";
1470
+ ctx.write(filePath, updated);
1471
+ if (missingRequired.length === 0) return {
1472
+ filePath,
1473
+ action: "skipped",
1474
+ description: "Only optional entries missing"
1318
1475
  };
1319
- } catch {
1320
1476
  return {
1321
- content: existing,
1322
- changed: false
1477
+ filePath,
1478
+ action: "updated",
1479
+ description: `Appended ${String(missing.length)} missing entries`
1323
1480
  };
1324
1481
  }
1482
+ ctx.write(filePath, ALL_ENTRIES.join("\n") + "\n");
1483
+ return {
1484
+ filePath,
1485
+ action: "created",
1486
+ description: "Generated .gitignore"
1487
+ };
1325
1488
  }
1326
1489
  //#endregion
1327
1490
  //#region src/generators/ci.ts
1328
1491
  /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
1329
- function actionsExpr$2(expr) {
1492
+ function actionsExpr$1(expr) {
1330
1493
  return `\${{ ${expr} }}`;
1331
1494
  }
1332
1495
  const CI_CONCURRENCY = {
1333
- group: `ci-${actionsExpr$2("github.ref")}`,
1334
- "cancel-in-progress": actionsExpr$2("github.ref != 'refs/heads/main'")
1496
+ group: `ci-${actionsExpr$1("github.ref")}`,
1497
+ "cancel-in-progress": true
1335
1498
  };
1336
- function hasEnginesNode$2(ctx) {
1499
+ function hasEnginesNode$1(ctx) {
1337
1500
  const raw = ctx.read("package.json");
1338
1501
  if (!raw) return false;
1339
1502
  return typeof parsePackageJson(raw)?.engines?.["node"] === "string";
@@ -1347,8 +1510,8 @@ ${emailNotifications}on:
1347
1510
  pull_request:
1348
1511
 
1349
1512
  concurrency:
1350
- group: ci-${actionsExpr$2("github.ref")}
1351
- cancel-in-progress: ${actionsExpr$2("github.ref != 'refs/heads/main'")}
1513
+ group: ci-${actionsExpr$1("github.ref")}
1514
+ cancel-in-progress: true
1352
1515
 
1353
1516
  jobs:
1354
1517
  check:
@@ -1405,8 +1568,8 @@ async function generateCi(ctx) {
1405
1568
  description: "CI workflow not requested"
1406
1569
  };
1407
1570
  const isGitHub = ctx.config.ci === "github";
1408
- const nodeVersionYaml = hasEnginesNode$2(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1409
- const filePath = isGitHub ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
1571
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1572
+ const filePath = isGitHub ? ".github/workflows/ci.yml" : ".forgejo/workflows/ci.yml";
1410
1573
  const content = ciWorkflow(nodeVersionYaml, !isGitHub);
1411
1574
  if (ctx.exists(filePath)) {
1412
1575
  const existing = ctx.read(filePath);
@@ -1883,13 +2046,13 @@ async function generateChangesets(ctx) {
1883
2046
  //#endregion
1884
2047
  //#region src/generators/release-ci.ts
1885
2048
  /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
1886
- function actionsExpr$1(expr) {
2049
+ function actionsExpr(expr) {
1887
2050
  return `\${{ ${expr} }}`;
1888
2051
  }
1889
- function hasEnginesNode$1(ctx) {
2052
+ function hasEnginesNode(ctx) {
1890
2053
  return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
1891
2054
  }
1892
- function commonSteps(nodeVersionYaml) {
2055
+ function commonSteps(nodeVersionYaml, publishesNpm) {
1893
2056
  return ` - uses: actions/checkout@v4
1894
2057
  with:
1895
2058
  fetch-depth: 0
@@ -1897,18 +2060,18 @@ function commonSteps(nodeVersionYaml) {
1897
2060
  - uses: actions/setup-node@v4
1898
2061
  with:
1899
2062
  ${nodeVersionYaml}
1900
- cache: pnpm
1901
- registry-url: "https://registry.npmjs.org"
2063
+ cache: pnpm${publishesNpm ? `\n registry-url: "https://registry.npmjs.org"` : ""}
1902
2064
  - run: pnpm install --frozen-lockfile
1903
2065
  - run: pnpm build`;
1904
2066
  }
1905
- function releaseItWorkflow(ci, nodeVersionYaml) {
2067
+ function releaseItWorkflow(ci, nodeVersionYaml, publishesNpm) {
1906
2068
  const isGitHub = ci === "github";
1907
2069
  const permissions = isGitHub ? `
1908
2070
  permissions:
1909
2071
  contents: write
1910
2072
  ` : "";
1911
2073
  const tokenEnv = isGitHub ? `GITHUB_TOKEN: \${{ github.token }}` : `FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}`;
2074
+ const npmEnv = publishesNpm ? `\n NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}` : "";
1912
2075
  return `${workflowSchemaComment(ci)}name: Release
1913
2076
  on:
1914
2077
  workflow_dispatch:
@@ -1917,14 +2080,13 @@ jobs:
1917
2080
  release:
1918
2081
  runs-on: ubuntu-latest
1919
2082
  steps:
1920
- ${commonSteps(nodeVersionYaml)}
2083
+ ${commonSteps(nodeVersionYaml, publishesNpm)}
1921
2084
  - run: pnpm release-it --ci
1922
2085
  env:
1923
- ${tokenEnv}
1924
- NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
2086
+ ${tokenEnv}${npmEnv}
1925
2087
  `;
1926
2088
  }
1927
- function commitAndTagVersionWorkflow(ci, nodeVersionYaml) {
2089
+ function commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm) {
1928
2090
  const isGitHub = ci === "github";
1929
2091
  const permissions = isGitHub ? `
1930
2092
  permissions:
@@ -1954,83 +2116,42 @@ jobs:
1954
2116
  release:
1955
2117
  runs-on: ubuntu-latest
1956
2118
  steps:
1957
- ${commonSteps(nodeVersionYaml)}${gitConfigStep}${releaseStep}
2119
+ ${commonSteps(nodeVersionYaml, publishesNpm)}${gitConfigStep}${releaseStep}
1958
2120
  `;
1959
2121
  }
1960
- function changesetsReleaseJobConfig(ci, nodeVersionYaml) {
1961
- const isGitHub = ci === "github";
1962
- const nodeWith = {
1963
- ...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
1964
- cache: "pnpm",
1965
- "registry-url": "https://registry.npmjs.org"
1966
- };
1967
- if (isGitHub) return {
1968
- needs: "check",
1969
- if: "github.ref == 'refs/heads/main'",
1970
- "runs-on": "ubuntu-latest",
1971
- permissions: {
1972
- contents: "write",
1973
- "pull-requests": "write"
1974
- },
1975
- steps: [
1976
- {
1977
- uses: "actions/checkout@v4",
1978
- with: { "fetch-depth": 0 }
2122
+ /** Build the required release step for the check job (changesets). */
2123
+ function changesetsReleaseStep(ci, publishesNpm) {
2124
+ if (ci === "github") return {
2125
+ match: { uses: "changesets/action" },
2126
+ step: {
2127
+ uses: "changesets/action@v1",
2128
+ if: "github.ref == 'refs/heads/main'",
2129
+ with: {
2130
+ publish: "pnpm changeset publish",
2131
+ version: "pnpm changeset version"
1979
2132
  },
1980
- { uses: "pnpm/action-setup@v4" },
1981
- {
1982
- uses: "actions/setup-node@v4",
1983
- with: nodeWith
1984
- },
1985
- { run: "pnpm install --frozen-lockfile" },
1986
- { run: "pnpm build" },
1987
- {
1988
- uses: "changesets/action@v1",
1989
- with: {
1990
- publish: "pnpm changeset publish",
1991
- version: "pnpm changeset version"
1992
- },
1993
- env: {
1994
- GITHUB_TOKEN: actionsExpr$1("github.token"),
1995
- NPM_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
1996
- }
2133
+ env: {
2134
+ GITHUB_TOKEN: actionsExpr("github.token"),
2135
+ ...publishesNpm && { NPM_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
1997
2136
  }
1998
- ]
2137
+ }
1999
2138
  };
2000
2139
  return {
2001
- needs: "check",
2002
- if: "github.ref == 'refs/heads/main'",
2003
- "runs-on": "ubuntu-latest",
2004
- steps: [
2005
- {
2006
- uses: "actions/checkout@v4",
2007
- with: { "fetch-depth": 0 }
2140
+ match: { run: "release:changesets" },
2141
+ step: {
2142
+ name: "Release",
2143
+ if: "github.ref == 'refs/heads/main'",
2144
+ env: {
2145
+ FORGEJO_SERVER_URL: actionsExpr("github.server_url"),
2146
+ FORGEJO_REPOSITORY: actionsExpr("github.repository"),
2147
+ FORGEJO_TOKEN: actionsExpr("secrets.FORGEJO_TOKEN"),
2148
+ ...publishesNpm && { NODE_AUTH_TOKEN: actionsExpr("secrets.NPM_TOKEN") }
2008
2149
  },
2009
- { uses: "pnpm/action-setup@v4" },
2010
- {
2011
- uses: "actions/setup-node@v4",
2012
- with: nodeWith
2013
- },
2014
- { run: "pnpm install --frozen-lockfile" },
2015
- { run: "pnpm build" },
2016
- {
2017
- name: "Configure git",
2018
- run: "git config user.name \"forgejo-actions[bot]\"\ngit config user.email \"forgejo-actions[bot]@noreply.localhost\"\n"
2019
- },
2020
- {
2021
- name: "Release",
2022
- env: {
2023
- FORGEJO_SERVER_URL: actionsExpr$1("github.server_url"),
2024
- FORGEJO_REPOSITORY: actionsExpr$1("github.repository"),
2025
- FORGEJO_TOKEN: actionsExpr$1("secrets.FORGEJO_TOKEN"),
2026
- NODE_AUTH_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
2027
- },
2028
- run: "pnpm exec tooling release:changesets"
2029
- }
2030
- ]
2150
+ run: "pnpm exec tooling release:changesets"
2151
+ }
2031
2152
  };
2032
2153
  }
2033
- function requiredReleaseSteps(strategy, nodeVersionYaml) {
2154
+ function requiredReleaseSteps(strategy, nodeVersionYaml, publishesNpm) {
2034
2155
  const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
2035
2156
  const steps = [
2036
2157
  {
@@ -2051,7 +2172,7 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
2051
2172
  with: {
2052
2173
  ...isNodeVersionFile ? { "node-version-file": "package.json" } : { "node-version": "24" },
2053
2174
  cache: "pnpm",
2054
- "registry-url": "https://registry.npmjs.org"
2175
+ ...publishesNpm && { "registry-url": "https://registry.npmjs.org" }
2055
2176
  }
2056
2177
  }
2057
2178
  },
@@ -2086,44 +2207,33 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
2086
2207
  }
2087
2208
  return steps;
2088
2209
  }
2089
- function buildWorkflow(strategy, ci, nodeVersionYaml) {
2210
+ function buildWorkflow(strategy, ci, nodeVersionYaml, publishesNpm) {
2090
2211
  switch (strategy) {
2091
- case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
2092
- case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
2212
+ case "release-it": return releaseItWorkflow(ci, nodeVersionYaml, publishesNpm);
2213
+ case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml, publishesNpm);
2093
2214
  default: return null;
2094
2215
  }
2095
2216
  }
2096
- function generateChangesetsReleaseCi(ctx) {
2097
- const checkPath = ctx.config.ci === "github" ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
2098
- const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2099
- const existing = ctx.read(checkPath);
2217
+ function generateChangesetsReleaseCi(ctx, publishesNpm) {
2218
+ const ciPath = ctx.config.ci === "github" ? ".github/workflows/ci.yml" : ".forgejo/workflows/ci.yml";
2219
+ const existing = ctx.read(ciPath);
2100
2220
  if (!existing) return {
2101
- filePath: checkPath,
2221
+ filePath: ciPath,
2102
2222
  action: "skipped",
2103
2223
  description: "CI workflow not found — run check generator first"
2104
2224
  };
2105
- const addResult = addWorkflowJob(existing, "release", changesetsReleaseJobConfig(ctx.config.ci, nodeVersionYaml));
2106
- if (addResult.changed) {
2107
- const withComment = ensureSchemaComment(addResult.content, ctx.config.ci);
2108
- ctx.write(checkPath, withComment);
2109
- return {
2110
- filePath: checkPath,
2111
- action: "updated",
2112
- description: "Added release job to CI workflow"
2113
- };
2114
- }
2115
- const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps("changesets", nodeVersionYaml));
2225
+ const merged = mergeWorkflowSteps(existing, "check", [changesetsReleaseStep(ctx.config.ci, publishesNpm)]);
2116
2226
  if (!merged.changed) return {
2117
- filePath: checkPath,
2227
+ filePath: ciPath,
2118
2228
  action: "skipped",
2119
- description: "Release job in CI workflow already up to date"
2229
+ description: "Release step in CI workflow already up to date"
2120
2230
  };
2121
2231
  const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2122
- ctx.write(checkPath, withComment);
2232
+ ctx.write(ciPath, withComment);
2123
2233
  return {
2124
- filePath: checkPath,
2234
+ filePath: ciPath,
2125
2235
  action: "updated",
2126
- description: "Added missing steps to release job in CI workflow"
2236
+ description: "Added release step to CI workflow"
2127
2237
  };
2128
2238
  }
2129
2239
  async function generateReleaseCi(ctx) {
@@ -2133,11 +2243,12 @@ async function generateReleaseCi(ctx) {
2133
2243
  action: "skipped",
2134
2244
  description: "Release CI workflow not applicable"
2135
2245
  };
2136
- if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx);
2246
+ const publishesNpm = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson).length > 0;
2247
+ if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx, publishesNpm);
2137
2248
  const isGitHub = ctx.config.ci === "github";
2138
2249
  const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
2139
- const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2140
- const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml);
2250
+ const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2251
+ const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml, publishesNpm);
2141
2252
  if (!content) return {
2142
2253
  filePath,
2143
2254
  action: "skipped",
@@ -2151,7 +2262,7 @@ async function generateReleaseCi(ctx) {
2151
2262
  action: "skipped",
2152
2263
  description: "Release workflow already up to date"
2153
2264
  };
2154
- const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml));
2265
+ const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml, publishesNpm));
2155
2266
  const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2156
2267
  if (withComment === content) {
2157
2268
  ctx.write(workflowPath, content);
@@ -2452,148 +2563,6 @@ async function generateVscodeSettings(ctx) {
2452
2563
  return results;
2453
2564
  }
2454
2565
  //#endregion
2455
- //#region src/generators/deploy-ci.ts
2456
- /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
2457
- function actionsExpr(expr) {
2458
- return `\${{ ${expr} }}`;
2459
- }
2460
- function hasEnginesNode(ctx) {
2461
- return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
2462
- }
2463
- function deployWorkflow(ci, nodeVersionYaml) {
2464
- return `${workflowSchemaComment(ci)}name: Deploy
2465
- on:
2466
- push:
2467
- tags:
2468
- - "v[0-9]+.[0-9]+.[0-9]+"
2469
-
2470
- jobs:
2471
- deploy:
2472
- runs-on: ubuntu-latest
2473
- steps:
2474
- - uses: actions/checkout@v4
2475
- - uses: pnpm/action-setup@v4
2476
- - uses: actions/setup-node@v4
2477
- with:
2478
- ${nodeVersionYaml}
2479
- - run: pnpm install --frozen-lockfile
2480
- - name: Publish Docker images
2481
- env:
2482
- DOCKER_REGISTRY_HOST: ${actionsExpr("vars.DOCKER_REGISTRY_HOST")}
2483
- DOCKER_REGISTRY_NAMESPACE: ${actionsExpr("vars.DOCKER_REGISTRY_NAMESPACE")}
2484
- DOCKER_USERNAME: ${actionsExpr("secrets.DOCKER_USERNAME")}
2485
- DOCKER_PASSWORD: ${actionsExpr("secrets.DOCKER_PASSWORD")}
2486
- run: pnpm exec tooling docker:publish
2487
- `;
2488
- }
2489
- function requiredDeploySteps() {
2490
- return [
2491
- {
2492
- match: { uses: "actions/checkout" },
2493
- step: { uses: "actions/checkout@v4" }
2494
- },
2495
- {
2496
- match: { uses: "pnpm/action-setup" },
2497
- step: { uses: "pnpm/action-setup@v4" }
2498
- },
2499
- {
2500
- match: { uses: "actions/setup-node" },
2501
- step: { uses: "actions/setup-node@v4" }
2502
- },
2503
- {
2504
- match: { run: "pnpm install" },
2505
- step: { run: "pnpm install --frozen-lockfile" }
2506
- },
2507
- {
2508
- match: { run: "docker:publish" },
2509
- step: { run: "pnpm exec tooling docker:publish" }
2510
- }
2511
- ];
2512
- }
2513
- /** Convention paths to check for Dockerfiles. */
2514
- const CONVENTION_DOCKERFILE_PATHS$1 = ["Dockerfile", "docker/Dockerfile"];
2515
- const DockerMapSchema = z.object({ docker: z.record(z.string(), z.unknown()).optional() });
2516
- /** Check whether any Docker packages exist by convention or .tooling.json config. */
2517
- function hasDockerPackages(ctx) {
2518
- const configRaw = ctx.read(".tooling.json");
2519
- if (configRaw) {
2520
- const result = DockerMapSchema.safeParse(JSON.parse(configRaw));
2521
- if (result.success && result.data.docker && Object.keys(result.data.docker).length > 0) return true;
2522
- }
2523
- if (ctx.config.structure === "monorepo") {
2524
- const packages = getMonorepoPackages(ctx.targetDir);
2525
- for (const pkg of packages) {
2526
- const dirName = pkg.name.split("/").pop() ?? pkg.name;
2527
- for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(`packages/${dirName}/${rel}`)) return true;
2528
- }
2529
- } else for (const rel of CONVENTION_DOCKERFILE_PATHS$1) if (ctx.exists(rel)) return true;
2530
- return false;
2531
- }
2532
- async function generateDeployCi(ctx) {
2533
- const filePath = "deploy-ci";
2534
- if (!hasDockerPackages(ctx) || ctx.config.ci === "none") return {
2535
- filePath,
2536
- action: "skipped",
2537
- description: "Deploy CI workflow not applicable"
2538
- };
2539
- const isGitHub = ctx.config.ci === "github";
2540
- const workflowPath = isGitHub ? ".github/workflows/publish.yml" : ".forgejo/workflows/publish.yml";
2541
- const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2542
- const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
2543
- if (ctx.exists(workflowPath)) {
2544
- const existing = ctx.read(workflowPath);
2545
- if (existing) {
2546
- if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
2547
- filePath: workflowPath,
2548
- action: "skipped",
2549
- description: "Deploy workflow already up to date"
2550
- };
2551
- const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
2552
- const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2553
- if (withComment === content) {
2554
- ctx.write(workflowPath, content);
2555
- return {
2556
- filePath: workflowPath,
2557
- action: "updated",
2558
- description: "Added missing steps to deploy workflow"
2559
- };
2560
- }
2561
- if (await ctx.confirmOverwrite(workflowPath) === "skip") {
2562
- if (merged.changed || withComment !== merged.content) {
2563
- ctx.write(workflowPath, withComment);
2564
- return {
2565
- filePath: workflowPath,
2566
- action: "updated",
2567
- description: "Added missing steps to deploy workflow"
2568
- };
2569
- }
2570
- return {
2571
- filePath: workflowPath,
2572
- action: "skipped",
2573
- description: "Existing deploy workflow preserved"
2574
- };
2575
- }
2576
- ctx.write(workflowPath, content);
2577
- return {
2578
- filePath: workflowPath,
2579
- action: "updated",
2580
- description: "Replaced deploy workflow with updated template"
2581
- };
2582
- }
2583
- return {
2584
- filePath: workflowPath,
2585
- action: "skipped",
2586
- description: "Deploy workflow already up to date"
2587
- };
2588
- }
2589
- ctx.write(workflowPath, content);
2590
- return {
2591
- filePath: workflowPath,
2592
- action: "created",
2593
- description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
2594
- };
2595
- }
2596
- //#endregion
2597
2566
  //#region src/generators/pipeline.ts
2598
2567
  /** Run all generators sequentially and return their results. */
2599
2568
  async function runGenerators(ctx) {
@@ -2790,6 +2759,16 @@ function generateMigratePrompt(results, config, detected) {
2790
2759
  }
2791
2760
  //#endregion
2792
2761
  //#region src/commands/repo-init.ts
2762
+ /** Log what was detected so the user understands generator decisions. */
2763
+ function logDetectionSummary(ctx) {
2764
+ const dockerNames = getDockerPackageNames(ctx);
2765
+ if (dockerNames.length > 0) p.log.info(`Detected Docker packages: ${dockerNames.join(", ")}`);
2766
+ if (ctx.config.releaseStrategy !== "none") {
2767
+ const publishable = getPublishablePackages(ctx.targetDir, ctx.config.structure, ctx.packageJson);
2768
+ if (publishable.length > 0) p.log.info(`Will publish npm packages: ${publishable.map((pkg) => pkg.name).join(", ")}`);
2769
+ else p.log.info("No publishable npm packages — npm registry setup will be skipped");
2770
+ }
2771
+ }
2793
2772
  async function runInit(config, options = {}) {
2794
2773
  const detected = detectProject(config.targetDir);
2795
2774
  const s = p.spinner();
@@ -2809,6 +2788,7 @@ async function runInit(config, options = {}) {
2809
2788
  if (p.isCancel(result)) return "skip";
2810
2789
  return result;
2811
2790
  }));
2791
+ logDetectionSummary(ctx);
2812
2792
  s.start("Generating configuration files...");
2813
2793
  const results = await runGenerators(ctx);
2814
2794
  const alreadyArchived = new Set(results.filter((r) => r.action === "archived").map((r) => r.filePath));
@@ -2819,8 +2799,7 @@ async function runInit(config, options = {}) {
2819
2799
  });
2820
2800
  const created = results.filter((r) => r.action === "created");
2821
2801
  const updated = results.filter((r) => r.action === "updated");
2822
- const archived = results.filter((r) => r.action === "archived");
2823
- if (!(created.length > 0 || updated.length > 0 || archived.length > 0) && options.noPrompt) {
2802
+ if (!(created.length > 0 || updated.length > 0 || archivedFiles.length > 0) && options.noPrompt) {
2824
2803
  s.stop("Repository is up to date.");
2825
2804
  return results;
2826
2805
  }
@@ -2834,7 +2813,6 @@ async function runInit(config, options = {}) {
2834
2813
  const summaryLines = [];
2835
2814
  if (created.length > 0) summaryLines.push(`Created: ${created.map((r) => r.filePath).join(", ")}`);
2836
2815
  if (updated.length > 0) summaryLines.push(`Updated: ${updated.map((r) => r.filePath).join(", ")}`);
2837
- if (archived.length > 0) summaryLines.push(`Archived: ${archived.map((r) => r.filePath).join(", ")}`);
2838
2816
  p.note(summaryLines.join("\n"), "Summary");
2839
2817
  if (!options.noPrompt) {
2840
2818
  const prompt = generateMigratePrompt(results, config, detected);
@@ -2929,6 +2907,7 @@ async function runCheck(targetDir) {
2929
2907
  const saved = loadToolingConfig(targetDir);
2930
2908
  const detected = buildDefaultConfig(targetDir, {});
2931
2909
  const { ctx, pendingWrites } = createDryRunContext(saved ? mergeWithSavedConfig(detected, saved) : detected);
2910
+ logDetectionSummary(ctx);
2932
2911
  const actionable = (await runGenerators(ctx)).filter((r) => {
2933
2912
  if (r.action !== "created" && r.action !== "updated") return false;
2934
2913
  const newContent = pendingWrites.get(r.filePath);
@@ -3543,8 +3522,21 @@ function buildReleaseConfig(flags) {
3543
3522
  verbose: flags.verbose ?? false
3544
3523
  };
3545
3524
  }
3525
+ /** Resolve the current branch from CI env vars or git. */
3526
+ function getCurrentBranch(executor, cwd) {
3527
+ const ref = process.env["GITHUB_REF"];
3528
+ if (ref?.startsWith("refs/heads/")) return ref.slice(11);
3529
+ return executor.exec("git rev-parse --abbrev-ref HEAD", { cwd }).stdout.trim();
3530
+ }
3546
3531
  /** Core release logic — testable with a mock executor. */
3547
3532
  async function runRelease(config, executor) {
3533
+ const branch = getCurrentBranch(executor, config.cwd);
3534
+ if (branch !== "main") {
3535
+ debug$1(config, `Skipping release on non-main branch: ${branch}`);
3536
+ return { mode: "none" };
3537
+ }
3538
+ executor.exec("git config user.name \"forgejo-actions[bot]\"", { cwd: config.cwd });
3539
+ executor.exec("git config user.email \"forgejo-actions[bot]@noreply.localhost\"", { cwd: config.cwd });
3548
3540
  const changesetFiles = executor.listChangesetFiles(config.cwd);
3549
3541
  debug$1(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3550
3542
  if (changesetFiles.length > 0) {
@@ -3845,7 +3837,7 @@ const CHECKS = [
3845
3837
  },
3846
3838
  { name: "knip" },
3847
3839
  { name: "tooling:check" },
3848
- { name: "image:check" }
3840
+ { name: "docker:check" }
3849
3841
  ];
3850
3842
  function defaultGetScripts(targetDir) {
3851
3843
  try {
@@ -3912,7 +3904,7 @@ function runRunChecks(targetDir, options = {}) {
3912
3904
  const runChecksCommand = defineCommand({
3913
3905
  meta: {
3914
3906
  name: "checks:run",
3915
- description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, image:check)"
3907
+ description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, docker:check)"
3916
3908
  },
3917
3909
  args: {
3918
3910
  dir: {
@@ -3922,7 +3914,7 @@ const runChecksCommand = defineCommand({
3922
3914
  },
3923
3915
  skip: {
3924
3916
  type: "string",
3925
- description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check, image:check)",
3917
+ description: "Comma-separated list of checks to skip (build, typecheck, lint, test, format, knip, tooling:check, docker:check)",
3926
3918
  required: false
3927
3919
  },
3928
3920
  add: {
@@ -4313,6 +4305,7 @@ const ComposePortSchema = z.union([z.string(), z.object({
4313
4305
  target: z.union([z.string(), z.number()]).optional()
4314
4306
  }).loose()]);
4315
4307
  const ComposeServiceSchema = z.object({
4308
+ image: z.string().optional(),
4316
4309
  ports: z.array(ComposePortSchema).optional(),
4317
4310
  healthcheck: z.unknown().optional()
4318
4311
  }).loose();
@@ -4387,6 +4380,7 @@ function parseComposeServices(cwd, composeFiles) {
4387
4380
  }
4388
4381
  serviceMap.set(name, {
4389
4382
  name,
4383
+ image: existing?.image ?? service.image,
4390
4384
  hostPort,
4391
4385
  hasHealthcheck: existing?.hasHealthcheck ?? service.healthcheck !== void 0
4392
4386
  });
@@ -4394,6 +4388,15 @@ function parseComposeServices(cwd, composeFiles) {
4394
4388
  }
4395
4389
  return [...serviceMap.values()];
4396
4390
  }
4391
+ /** Extract deduplicated bare image names (without tags) from parsed services. */
4392
+ function extractComposeImageNames(services) {
4393
+ const names = /* @__PURE__ */ new Set();
4394
+ for (const service of services) if (service.image) {
4395
+ const bare = service.image.split(":")[0];
4396
+ if (bare) names.add(bare);
4397
+ }
4398
+ return [...names];
4399
+ }
4397
4400
  /** Generate health checks from parsed services: services with exposed ports get HTTP checks, unless they define a compose-level healthcheck. */
4398
4401
  function deriveHealthChecks(services) {
4399
4402
  return services.filter((s) => s.hostPort !== void 0 && !s.hasHealthcheck).map((s) => ({
@@ -4454,6 +4457,26 @@ function computeCheckDefaults(cwd) {
4454
4457
  healthChecks: healthChecks.length > 0 ? healthChecks : void 0
4455
4458
  };
4456
4459
  }
4460
+ /** Create a DockerFileReader backed by the real filesystem. */
4461
+ function createFileReader() {
4462
+ return {
4463
+ listPackageDirs(cwd) {
4464
+ const packagesDir = path.join(cwd, "packages");
4465
+ try {
4466
+ return readdirSync(packagesDir, { withFileTypes: true }).filter((entry) => entry.isDirectory()).map((entry) => entry.name);
4467
+ } catch {
4468
+ return [];
4469
+ }
4470
+ },
4471
+ readFile(filePath) {
4472
+ try {
4473
+ return readFileSync(filePath, "utf-8");
4474
+ } catch {
4475
+ return null;
4476
+ }
4477
+ }
4478
+ };
4479
+ }
4457
4480
  //#endregion
4458
4481
  //#region src/commands/docker-check.ts
4459
4482
  /** Convert declarative health checks to functional ones. */
@@ -4509,7 +4532,18 @@ const dockerCheckCommand = defineCommand({
4509
4532
  }
4510
4533
  if (!defaults.services || defaults.services.length === 0) throw new FatalError("No services found in compose files.");
4511
4534
  const composeCwd = defaults.composeCwd ?? cwd;
4512
- const tempOverlayPath = writeTempOverlay(generateCheckOverlay(parseComposeServices(composeCwd, defaults.composeFiles)));
4535
+ const services = parseComposeServices(composeCwd, defaults.composeFiles);
4536
+ const fileReader = createFileReader();
4537
+ const rootPkgRaw = fileReader.readFile(path.join(cwd, "package.json"));
4538
+ if (rootPkgRaw) {
4539
+ const rootPkg = parsePackageJson(rootPkgRaw);
4540
+ if (rootPkg?.name) {
4541
+ const dockerPackages = detectDockerPackages(fileReader, cwd, rootPkg.name);
4542
+ const composeImages = extractComposeImageNames(services);
4543
+ for (const pkg of dockerPackages) if (!composeImages.some((img) => img === pkg.imageName || img.endsWith(`/${pkg.imageName}`))) warn(`Docker package "${pkg.dir}" (image: ${pkg.imageName}) is not referenced in any compose service.`);
4544
+ }
4545
+ }
4546
+ const tempOverlayPath = writeTempOverlay(generateCheckOverlay(services));
4513
4547
  const composeFiles = [
4514
4548
  ...defaults.composeFiles,
4515
4549
  tempOverlayPath,
@@ -4543,7 +4577,7 @@ const dockerCheckCommand = defineCommand({
4543
4577
  const main = defineCommand({
4544
4578
  meta: {
4545
4579
  name: "tooling",
4546
- version: "0.19.0",
4580
+ version: "0.22.0",
4547
4581
  description: "Bootstrap and maintain standardized TypeScript project tooling"
4548
4582
  },
4549
4583
  subCommands: {
@@ -4559,7 +4593,7 @@ const main = defineCommand({
4559
4593
  "docker:check": dockerCheckCommand
4560
4594
  }
4561
4595
  });
4562
- console.log(`@bensandee/tooling v0.19.0`);
4596
+ console.log(`@bensandee/tooling v0.22.0`);
4563
4597
  runMain(main);
4564
4598
  //#endregion
4565
4599
  export {};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bensandee/tooling",
3
- "version": "0.19.0",
3
+ "version": "0.22.0",
4
4
  "description": "CLI tool to bootstrap and maintain standardized TypeScript project tooling",
5
5
  "bin": {
6
6
  "tooling": "./dist/bin.mjs"