@bensandee/tooling 0.14.1 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,16 @@
1
1
  # @bensandee/tooling
2
2
 
3
+ ## 0.15.0
4
+
5
+ ### Minor Changes
6
+
7
+ - 2ef37e2: Add `docker:build` and `docker:publish` CLI commands. Packages declare a `docker` field in their `package.json` with `dockerfile` and `context`, and the tooling handles `docker build` with the correct image name (`{repo}-{package}`). `docker:build --package .` enables a per-package `image:build` script for local testing. `docker:publish` builds all images, then tags/pushes them with semver variants (latest, vX.Y.Z, vX.Y, vX) from each package's own version. Also adds a deploy workflow generator (`setupDocker` config option) that emits a CI workflow triggered on version tags.
8
+ - c09d233: Combine CI and release workflows for changesets strategy into a single check.yml with release job gated on check success
9
+
10
+ ### Patch Changes
11
+
12
+ - 6cae944: Prompt to overwrite outdated release workflows during repo:update instead of only merging missing steps
13
+
3
14
  ## 0.14.1
4
15
 
5
16
  ### Patch Changes
package/README.md CHANGED
@@ -32,6 +32,82 @@ pnpm dlx @bensandee/tooling repo:init
32
32
  | `tooling forgejo:create-release` | Create a Forgejo release from a tag. |
33
33
  | `tooling changesets:merge` | Merge a changesets version PR. |
34
34
 
35
+ ### Docker
36
+
37
+ | Command | Description |
38
+ | ------------------------ | ----------------------------------------------------------------------- |
39
+ | `tooling docker:build` | Build Docker images for packages with docker config in `.tooling.json`. |
40
+ | `tooling docker:publish` | Build, tag, and push Docker images to a registry. |
41
+
42
+ Both commands read Docker build config from the `docker` map in `.tooling.json`, keyed by package directory name. All paths are relative to the project root (where `.tooling.json` lives):
43
+
44
+ ```json
45
+ {
46
+ "setupDocker": true,
47
+ "docker": {
48
+ "server": {
49
+ "dockerfile": "packages/server/docker/Dockerfile",
50
+ "context": "."
51
+ },
52
+ "client": {
53
+ "dockerfile": "packages/client/Dockerfile"
54
+ }
55
+ }
56
+ }
57
+ ```
58
+
59
+ The `context` field defaults to `"."` (project root) when omitted. Versions for tagging are read from each package's own `package.json`.
60
+
61
+ The tooling derives image names as `{root-package-name}-{package-name}` (e.g. `my-app-server`) and runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
62
+
63
+ #### `docker:build`
64
+
65
+ Builds all detected packages, or a single package with `--package`:
66
+
67
+ ```bash
68
+ # Build all packages with docker config
69
+ tooling docker:build
70
+
71
+ # Build a single package (useful as an image:build script)
72
+ tooling docker:build --package packages/server
73
+
74
+ # Pass extra args to docker build
75
+ tooling docker:build -- --no-cache --build-arg FOO=bar
76
+ ```
77
+
78
+ To give individual packages a standalone `image:build` script for local testing:
79
+
80
+ ```json
81
+ {
82
+ "scripts": {
83
+ "image:build": "pnpm exec tooling docker:build --package ."
84
+ }
85
+ }
86
+ ```
87
+
88
+ **Flags:** `--package <dir>` (build a single package), `--verbose`
89
+
90
+ #### `docker:publish`
91
+
92
+ Runs `docker:build` for all packages, then logs in to the registry, tags each image with semver variants from its own `version` field, pushes all tags, and logs out.
93
+
94
+ Tags generated per package: `latest`, `vX.Y.Z`, `vX.Y`, `vX`
95
+
96
+ Each package is tagged independently using its own version, so packages in a monorepo can have different release cadences. Packages without a `version` field are rejected at publish time.
97
+
98
+ **Flags:** `--dry-run` (build and tag only, skip login/push/logout), `--verbose`
99
+
100
+ **Required environment variables:**
101
+
102
+ | Variable | Description |
103
+ | --------------------------- | --------------------------------------------------------------------- |
104
+ | `DOCKER_REGISTRY_HOST` | Registry hostname (e.g. `code.orangebikelabs.com`) |
105
+ | `DOCKER_REGISTRY_NAMESPACE` | Full namespace for tagging (e.g. `code.orangebikelabs.com/bensandee`) |
106
+ | `DOCKER_USERNAME` | Registry username |
107
+ | `DOCKER_PASSWORD` | Registry password |
108
+
109
+ When `setupDocker` is enabled in `.tooling.json`, `repo:init` generates a deploy workflow (`.forgejo/workflows/deploy.yml` or `.github/workflows/deploy.yml`) triggered on version tags (`v*.*.*`) that runs `pnpm exec tooling docker:publish`.
110
+
35
111
  ## Config file
36
112
 
37
113
  `repo:init` persists choices to `.tooling.json` at the project root. `repo:check` and `repo:update` read this file to reproduce the same config without re-prompting.
package/dist/bin.mjs CHANGED
@@ -393,6 +393,7 @@ async function runInitPrompts(targetDir, saved) {
393
393
  releaseStrategy,
394
394
  projectType,
395
395
  detectPackageTypes,
396
+ setupDocker: saved?.setupDocker ?? false,
396
397
  targetDir
397
398
  };
398
399
  }
@@ -412,6 +413,7 @@ function buildDefaultConfig(targetDir, flags) {
412
413
  releaseStrategy: detected.hasReleaseItConfig ? "release-it" : detected.hasSimpleReleaseConfig ? "simple" : detected.hasChangesetsConfig ? "changesets" : "none",
413
414
  projectType: "default",
414
415
  detectPackageTypes: true,
416
+ setupDocker: false,
415
417
  targetDir
416
418
  };
417
419
  }
@@ -596,7 +598,7 @@ function getAddedDevDepNames(config) {
596
598
  const deps = { ...ROOT_DEV_DEPS };
597
599
  if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
598
600
  deps["@bensandee/config"] = "0.8.1";
599
- deps["@bensandee/tooling"] = "0.14.1";
601
+ deps["@bensandee/tooling"] = "0.15.0";
600
602
  if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
601
603
  if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
602
604
  addReleaseDeps(deps, config);
@@ -617,7 +619,7 @@ async function generatePackageJson(ctx) {
617
619
  const devDeps = { ...ROOT_DEV_DEPS };
618
620
  if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
619
621
  devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.1";
620
- devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.14.1";
622
+ devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.15.0";
621
623
  if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.2";
622
624
  if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
623
625
  if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
@@ -1366,9 +1368,42 @@ function mergeWorkflowSteps(existing, jobName, requiredSteps) {
1366
1368
  };
1367
1369
  }
1368
1370
  }
1371
+ /**
1372
+ * Add a job to an existing workflow YAML if it doesn't already exist.
1373
+ * Returns unchanged content if the job already exists, the file has an opt-out comment,
1374
+ * or the document can't be parsed.
1375
+ */
1376
+ function addWorkflowJob(existing, jobName, jobConfig) {
1377
+ if (isToolingIgnored(existing)) return {
1378
+ content: existing,
1379
+ changed: false
1380
+ };
1381
+ try {
1382
+ const doc = parseDocument(existing);
1383
+ const jobs = doc.getIn(["jobs"]);
1384
+ if (!isMap(jobs)) return {
1385
+ content: existing,
1386
+ changed: false
1387
+ };
1388
+ if (jobs.has(jobName)) return {
1389
+ content: existing,
1390
+ changed: false
1391
+ };
1392
+ jobs.set(jobName, doc.createNode(jobConfig));
1393
+ return {
1394
+ content: doc.toString(),
1395
+ changed: true
1396
+ };
1397
+ } catch {
1398
+ return {
1399
+ content: existing,
1400
+ changed: false
1401
+ };
1402
+ }
1403
+ }
1369
1404
  //#endregion
1370
1405
  //#region src/generators/ci.ts
1371
- function hasEnginesNode$1(ctx) {
1406
+ function hasEnginesNode$2(ctx) {
1372
1407
  const raw = ctx.read("package.json");
1373
1408
  if (!raw) return false;
1374
1409
  return typeof parsePackageJson(raw)?.engines?.["node"] === "string";
@@ -1380,7 +1415,6 @@ ${emailNotifications}on:
1380
1415
  push:
1381
1416
  branches: [main]
1382
1417
  pull_request:
1383
- branches: [main]
1384
1418
 
1385
1419
  jobs:
1386
1420
  check:
@@ -1437,7 +1471,7 @@ async function generateCi(ctx) {
1437
1471
  description: "CI workflow not requested"
1438
1472
  };
1439
1473
  const isGitHub = ctx.config.ci === "github";
1440
- const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1474
+ const nodeVersionYaml = hasEnginesNode$2(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
1441
1475
  const filePath = isGitHub ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
1442
1476
  const content = ciWorkflow(nodeVersionYaml, !isGitHub);
1443
1477
  if (ctx.exists(filePath)) {
@@ -1913,7 +1947,11 @@ async function generateChangesets(ctx) {
1913
1947
  }
1914
1948
  //#endregion
1915
1949
  //#region src/generators/release-ci.ts
1916
- function hasEnginesNode(ctx) {
1950
+ /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
1951
+ function actionsExpr$1(expr) {
1952
+ return `\${{ ${expr} }}`;
1953
+ }
1954
+ function hasEnginesNode$1(ctx) {
1917
1955
  return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
1918
1956
  }
1919
1957
  function commonSteps(nodeVersionYaml) {
@@ -1984,53 +2022,78 @@ jobs:
1984
2022
  ${commonSteps(nodeVersionYaml)}${gitConfigStep}${releaseStep}
1985
2023
  `;
1986
2024
  }
1987
- function changesetsWorkflow(ci, nodeVersionYaml) {
1988
- if (ci === "github") return `${workflowSchemaComment(ci)}name: Release
1989
- on:
1990
- push:
1991
- branches:
1992
- - main
1993
-
1994
- permissions:
1995
- contents: write
1996
- pull-requests: write
1997
-
1998
- jobs:
1999
- release:
2000
- runs-on: ubuntu-latest
2001
- steps:
2002
- ${commonSteps(nodeVersionYaml)}
2003
- - uses: changesets/action@v1
2004
- with:
2005
- publish: pnpm changeset publish
2006
- version: pnpm changeset version
2007
- env:
2008
- GITHUB_TOKEN: \${{ github.token }}
2009
- NPM_TOKEN: \${{ secrets.NPM_TOKEN }}
2010
- `;
2011
- return `${workflowSchemaComment(ci)}name: Release
2012
- on:
2013
- push:
2014
- branches:
2015
- - main
2016
-
2017
- jobs:
2018
- release:
2019
- runs-on: ubuntu-latest
2020
- steps:
2021
- ${commonSteps(nodeVersionYaml)}
2022
- - name: Configure git
2023
- run: |
2024
- git config user.name "forgejo-actions[bot]"
2025
- git config user.email "forgejo-actions[bot]@noreply.localhost"
2026
- - name: Release
2027
- env:
2028
- FORGEJO_SERVER_URL: \${{ github.server_url }}
2029
- FORGEJO_REPOSITORY: \${{ github.repository }}
2030
- FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
2031
- NODE_AUTH_TOKEN: \${{ secrets.NPM_TOKEN }}
2032
- run: pnpm exec tooling release:changesets
2033
- `;
2025
+ function changesetsReleaseJobConfig(ci, nodeVersionYaml) {
2026
+ const isGitHub = ci === "github";
2027
+ const nodeWith = {
2028
+ ...nodeVersionYaml.startsWith("node-version-file") ? { "node-version-file": "package.json" } : { "node-version": "24" },
2029
+ cache: "pnpm",
2030
+ "registry-url": "https://registry.npmjs.org"
2031
+ };
2032
+ if (isGitHub) return {
2033
+ needs: "check",
2034
+ if: "github.ref == 'refs/heads/main'",
2035
+ "runs-on": "ubuntu-latest",
2036
+ permissions: {
2037
+ contents: "write",
2038
+ "pull-requests": "write"
2039
+ },
2040
+ steps: [
2041
+ {
2042
+ uses: "actions/checkout@v4",
2043
+ with: { "fetch-depth": 0 }
2044
+ },
2045
+ { uses: "pnpm/action-setup@v4" },
2046
+ {
2047
+ uses: "actions/setup-node@v4",
2048
+ with: nodeWith
2049
+ },
2050
+ { run: "pnpm install --frozen-lockfile" },
2051
+ { run: "pnpm build" },
2052
+ {
2053
+ uses: "changesets/action@v1",
2054
+ with: {
2055
+ publish: "pnpm changeset publish",
2056
+ version: "pnpm changeset version"
2057
+ },
2058
+ env: {
2059
+ GITHUB_TOKEN: actionsExpr$1("github.token"),
2060
+ NPM_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
2061
+ }
2062
+ }
2063
+ ]
2064
+ };
2065
+ return {
2066
+ needs: "check",
2067
+ if: "github.ref == 'refs/heads/main'",
2068
+ "runs-on": "ubuntu-latest",
2069
+ steps: [
2070
+ {
2071
+ uses: "actions/checkout@v4",
2072
+ with: { "fetch-depth": 0 }
2073
+ },
2074
+ { uses: "pnpm/action-setup@v4" },
2075
+ {
2076
+ uses: "actions/setup-node@v4",
2077
+ with: nodeWith
2078
+ },
2079
+ { run: "pnpm install --frozen-lockfile" },
2080
+ { run: "pnpm build" },
2081
+ {
2082
+ name: "Configure git",
2083
+ run: "git config user.name \"forgejo-actions[bot]\"\ngit config user.email \"forgejo-actions[bot]@noreply.localhost\"\n"
2084
+ },
2085
+ {
2086
+ name: "Release",
2087
+ env: {
2088
+ FORGEJO_SERVER_URL: actionsExpr$1("github.server_url"),
2089
+ FORGEJO_REPOSITORY: actionsExpr$1("github.repository"),
2090
+ FORGEJO_TOKEN: actionsExpr$1("secrets.FORGEJO_TOKEN"),
2091
+ NODE_AUTH_TOKEN: actionsExpr$1("secrets.NPM_TOKEN")
2092
+ },
2093
+ run: "pnpm exec tooling release:changesets"
2094
+ }
2095
+ ]
2096
+ };
2034
2097
  }
2035
2098
  function requiredReleaseSteps(strategy, nodeVersionYaml) {
2036
2099
  const isNodeVersionFile = nodeVersionYaml.startsWith("node-version-file");
@@ -2092,10 +2155,42 @@ function buildWorkflow(strategy, ci, nodeVersionYaml) {
2092
2155
  switch (strategy) {
2093
2156
  case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
2094
2157
  case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
2095
- case "changesets": return changesetsWorkflow(ci, nodeVersionYaml);
2096
2158
  default: return null;
2097
2159
  }
2098
2160
  }
2161
+ function generateChangesetsReleaseCi(ctx) {
2162
+ const checkPath = ctx.config.ci === "github" ? ".github/workflows/check.yml" : ".forgejo/workflows/check.yml";
2163
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2164
+ const existing = ctx.read(checkPath);
2165
+ if (!existing) return {
2166
+ filePath: checkPath,
2167
+ action: "skipped",
2168
+ description: "CI workflow not found — run check generator first"
2169
+ };
2170
+ const addResult = addWorkflowJob(existing, "release", changesetsReleaseJobConfig(ctx.config.ci, nodeVersionYaml));
2171
+ if (addResult.changed) {
2172
+ const withComment = ensureSchemaComment(addResult.content, ctx.config.ci);
2173
+ ctx.write(checkPath, withComment);
2174
+ return {
2175
+ filePath: checkPath,
2176
+ action: "updated",
2177
+ description: "Added release job to CI workflow"
2178
+ };
2179
+ }
2180
+ const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps("changesets", nodeVersionYaml));
2181
+ if (!merged.changed) return {
2182
+ filePath: checkPath,
2183
+ action: "skipped",
2184
+ description: "Release job in CI workflow already up to date"
2185
+ };
2186
+ const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2187
+ ctx.write(checkPath, withComment);
2188
+ return {
2189
+ filePath: checkPath,
2190
+ action: "updated",
2191
+ description: "Added missing steps to release job in CI workflow"
2192
+ };
2193
+ }
2099
2194
  async function generateReleaseCi(ctx) {
2100
2195
  const filePath = "release-ci";
2101
2196
  if (ctx.config.releaseStrategy === "none" || ctx.config.ci === "none") return {
@@ -2103,9 +2198,10 @@ async function generateReleaseCi(ctx) {
2103
2198
  action: "skipped",
2104
2199
  description: "Release CI workflow not applicable"
2105
2200
  };
2201
+ if (ctx.config.releaseStrategy === "changesets") return generateChangesetsReleaseCi(ctx);
2106
2202
  const isGitHub = ctx.config.ci === "github";
2107
2203
  const workflowPath = isGitHub ? ".github/workflows/release.yml" : ".forgejo/workflows/release.yml";
2108
- const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2204
+ const nodeVersionYaml = hasEnginesNode$1(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2109
2205
  const content = buildWorkflow(ctx.config.releaseStrategy, ctx.config.ci, nodeVersionYaml);
2110
2206
  if (!content) return {
2111
2207
  filePath,
@@ -2115,16 +2211,42 @@ async function generateReleaseCi(ctx) {
2115
2211
  if (ctx.exists(workflowPath)) {
2116
2212
  const existing = ctx.read(workflowPath);
2117
2213
  if (existing) {
2214
+ if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
2215
+ filePath: workflowPath,
2216
+ action: "skipped",
2217
+ description: "Release workflow already up to date"
2218
+ };
2118
2219
  const merged = mergeWorkflowSteps(existing, "release", requiredReleaseSteps(ctx.config.releaseStrategy, nodeVersionYaml));
2119
2220
  const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2120
- if (merged.changed || withComment !== merged.content) {
2121
- ctx.write(workflowPath, withComment);
2221
+ if (withComment === content) {
2222
+ ctx.write(workflowPath, content);
2122
2223
  return {
2123
2224
  filePath: workflowPath,
2124
2225
  action: "updated",
2125
2226
  description: "Added missing steps to release workflow"
2126
2227
  };
2127
2228
  }
2229
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") {
2230
+ if (merged.changed || withComment !== merged.content) {
2231
+ ctx.write(workflowPath, withComment);
2232
+ return {
2233
+ filePath: workflowPath,
2234
+ action: "updated",
2235
+ description: "Added missing steps to release workflow"
2236
+ };
2237
+ }
2238
+ return {
2239
+ filePath: workflowPath,
2240
+ action: "skipped",
2241
+ description: "Existing release workflow preserved"
2242
+ };
2243
+ }
2244
+ ctx.write(workflowPath, content);
2245
+ return {
2246
+ filePath: workflowPath,
2247
+ action: "updated",
2248
+ description: "Replaced release workflow with updated template"
2249
+ };
2128
2250
  }
2129
2251
  return {
2130
2252
  filePath: workflowPath,
@@ -2395,6 +2517,129 @@ async function generateVscodeSettings(ctx) {
2395
2517
  return results;
2396
2518
  }
2397
2519
  //#endregion
2520
+ //#region src/generators/deploy-ci.ts
2521
+ /** Build a GitHub Actions expression like `${{ expr }}` without triggering no-template-curly-in-string. */
2522
+ function actionsExpr(expr) {
2523
+ return `\${{ ${expr} }}`;
2524
+ }
2525
+ function hasEnginesNode(ctx) {
2526
+ return typeof ctx.packageJson?.["engines"]?.["node"] === "string";
2527
+ }
2528
+ function deployWorkflow(ci, nodeVersionYaml) {
2529
+ return `${workflowSchemaComment(ci)}name: Deploy
2530
+ on:
2531
+ push:
2532
+ tags:
2533
+ - "v[0-9]+.[0-9]+.[0-9]+"
2534
+
2535
+ jobs:
2536
+ deploy:
2537
+ runs-on: ubuntu-latest
2538
+ steps:
2539
+ - uses: actions/checkout@v4
2540
+ - uses: pnpm/action-setup@v4
2541
+ - uses: actions/setup-node@v4
2542
+ with:
2543
+ ${nodeVersionYaml}
2544
+ - run: pnpm install --frozen-lockfile
2545
+ - name: Publish Docker images
2546
+ env:
2547
+ DOCKER_REGISTRY_HOST: ${actionsExpr("vars.DOCKER_REGISTRY_HOST")}
2548
+ DOCKER_REGISTRY_NAMESPACE: ${actionsExpr("vars.DOCKER_REGISTRY_NAMESPACE")}
2549
+ DOCKER_USERNAME: ${actionsExpr("secrets.DOCKER_USERNAME")}
2550
+ DOCKER_PASSWORD: ${actionsExpr("secrets.DOCKER_PASSWORD")}
2551
+ run: pnpm exec tooling docker:publish
2552
+ `;
2553
+ }
2554
+ function requiredDeploySteps() {
2555
+ return [
2556
+ {
2557
+ match: { uses: "actions/checkout" },
2558
+ step: { uses: "actions/checkout@v4" }
2559
+ },
2560
+ {
2561
+ match: { uses: "pnpm/action-setup" },
2562
+ step: { uses: "pnpm/action-setup@v4" }
2563
+ },
2564
+ {
2565
+ match: { uses: "actions/setup-node" },
2566
+ step: { uses: "actions/setup-node@v4" }
2567
+ },
2568
+ {
2569
+ match: { run: "pnpm install" },
2570
+ step: { run: "pnpm install --frozen-lockfile" }
2571
+ },
2572
+ {
2573
+ match: { run: "docker:publish" },
2574
+ step: { run: "pnpm exec tooling docker:publish" }
2575
+ }
2576
+ ];
2577
+ }
2578
+ async function generateDeployCi(ctx) {
2579
+ const filePath = "deploy-ci";
2580
+ if (!ctx.config.setupDocker || ctx.config.ci === "none") return {
2581
+ filePath,
2582
+ action: "skipped",
2583
+ description: "Deploy CI workflow not applicable"
2584
+ };
2585
+ const isGitHub = ctx.config.ci === "github";
2586
+ const workflowPath = isGitHub ? ".github/workflows/deploy.yml" : ".forgejo/workflows/deploy.yml";
2587
+ const nodeVersionYaml = hasEnginesNode(ctx) ? "node-version-file: package.json" : "node-version: \"24\"";
2588
+ const content = deployWorkflow(ctx.config.ci, nodeVersionYaml);
2589
+ if (ctx.exists(workflowPath)) {
2590
+ const existing = ctx.read(workflowPath);
2591
+ if (existing) {
2592
+ if (existing === content || ensureSchemaComment(existing, ctx.config.ci) === content) return {
2593
+ filePath: workflowPath,
2594
+ action: "skipped",
2595
+ description: "Deploy workflow already up to date"
2596
+ };
2597
+ const merged = mergeWorkflowSteps(existing, "deploy", requiredDeploySteps());
2598
+ const withComment = ensureSchemaComment(merged.content, ctx.config.ci);
2599
+ if (withComment === content) {
2600
+ ctx.write(workflowPath, content);
2601
+ return {
2602
+ filePath: workflowPath,
2603
+ action: "updated",
2604
+ description: "Added missing steps to deploy workflow"
2605
+ };
2606
+ }
2607
+ if (await ctx.confirmOverwrite(workflowPath) === "skip") {
2608
+ if (merged.changed || withComment !== merged.content) {
2609
+ ctx.write(workflowPath, withComment);
2610
+ return {
2611
+ filePath: workflowPath,
2612
+ action: "updated",
2613
+ description: "Added missing steps to deploy workflow"
2614
+ };
2615
+ }
2616
+ return {
2617
+ filePath: workflowPath,
2618
+ action: "skipped",
2619
+ description: "Existing deploy workflow preserved"
2620
+ };
2621
+ }
2622
+ ctx.write(workflowPath, content);
2623
+ return {
2624
+ filePath: workflowPath,
2625
+ action: "updated",
2626
+ description: "Replaced deploy workflow with updated template"
2627
+ };
2628
+ }
2629
+ return {
2630
+ filePath: workflowPath,
2631
+ action: "skipped",
2632
+ description: "Deploy workflow already up to date"
2633
+ };
2634
+ }
2635
+ ctx.write(workflowPath, content);
2636
+ return {
2637
+ filePath: workflowPath,
2638
+ action: "created",
2639
+ description: `Generated ${isGitHub ? "GitHub" : "Forgejo"} Actions deploy workflow`
2640
+ };
2641
+ }
2642
+ //#endregion
2398
2643
  //#region src/generators/pipeline.ts
2399
2644
  /** Run all generators sequentially and return their results. */
2400
2645
  async function runGenerators(ctx) {
@@ -2414,6 +2659,7 @@ async function runGenerators(ctx) {
2414
2659
  results.push(await generateReleaseIt(ctx));
2415
2660
  results.push(await generateChangesets(ctx));
2416
2661
  results.push(await generateReleaseCi(ctx));
2662
+ results.push(await generateDeployCi(ctx));
2417
2663
  results.push(...await generateVitest(ctx));
2418
2664
  results.push(...await generateVscodeSettings(ctx));
2419
2665
  return results;
@@ -2444,7 +2690,12 @@ const ToolingConfigSchema = z.object({
2444
2690
  "react",
2445
2691
  "library"
2446
2692
  ]).optional(),
2447
- detectPackageTypes: z.boolean().optional()
2693
+ detectPackageTypes: z.boolean().optional(),
2694
+ setupDocker: z.boolean().optional(),
2695
+ docker: z.record(z.string(), z.object({
2696
+ dockerfile: z.string(),
2697
+ context: z.string().default(".")
2698
+ })).optional()
2448
2699
  });
2449
2700
  /** Load saved tooling config from the target directory. Returns undefined if missing or invalid. */
2450
2701
  function loadToolingConfig(targetDir) {
@@ -2469,7 +2720,8 @@ function saveToolingConfig(ctx, config) {
2469
2720
  setupRenovate: config.setupRenovate,
2470
2721
  releaseStrategy: config.releaseStrategy,
2471
2722
  projectType: config.projectType,
2472
- detectPackageTypes: config.detectPackageTypes
2723
+ detectPackageTypes: config.detectPackageTypes,
2724
+ setupDocker: config.setupDocker
2473
2725
  };
2474
2726
  const content = JSON.stringify(saved, null, 2) + "\n";
2475
2727
  const existing = ctx.exists(CONFIG_FILE) ? ctx.read(CONFIG_FILE) : void 0;
@@ -2499,7 +2751,8 @@ function mergeWithSavedConfig(detected, saved) {
2499
2751
  setupRenovate: saved.setupRenovate ?? detected.setupRenovate,
2500
2752
  releaseStrategy: saved.releaseStrategy ?? detected.releaseStrategy,
2501
2753
  projectType: saved.projectType ?? detected.projectType,
2502
- detectPackageTypes: saved.detectPackageTypes ?? detected.detectPackageTypes
2754
+ detectPackageTypes: saved.detectPackageTypes ?? detected.detectPackageTypes,
2755
+ setupDocker: saved.setupDocker ?? detected.setupDocker
2503
2756
  };
2504
2757
  }
2505
2758
  //#endregion
@@ -2931,7 +3184,7 @@ async function createRelease(executor, conn, tag) {
2931
3184
  //#endregion
2932
3185
  //#region src/release/log.ts
2933
3186
  /** Log a debug message when verbose mode is enabled. */
2934
- function debug(config, message) {
3187
+ function debug$1(config, message) {
2935
3188
  if (config.verbose) p.log.info(`[debug] ${message}`);
2936
3189
  }
2937
3190
  /** Log the result of an exec call when verbose mode is enabled. */
@@ -3014,7 +3267,7 @@ function buildPrContent(executor, cwd, packagesBefore) {
3014
3267
  async function runVersionMode(executor, config) {
3015
3268
  p.log.info("Changesets detected — versioning packages");
3016
3269
  const packagesBefore = executor.listWorkspacePackages(config.cwd);
3017
- debug(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3270
+ debug$1(config, `Packages before versioning: ${packagesBefore.map((pkg) => `${pkg.name}@${pkg.version}`).join(", ") || "(none)"}`);
3018
3271
  const changesetConfigPath = path.join(config.cwd, ".changeset", "config.json");
3019
3272
  const originalConfig = executor.readFile(changesetConfigPath);
3020
3273
  if (originalConfig) {
@@ -3025,7 +3278,7 @@ async function runVersionMode(executor, config) {
3025
3278
  commit: false
3026
3279
  };
3027
3280
  executor.writeFile(changesetConfigPath, JSON.stringify(patched, null, 2) + "\n");
3028
- debug(config, "Temporarily disabled changeset commit:true");
3281
+ debug$1(config, "Temporarily disabled changeset commit:true");
3029
3282
  }
3030
3283
  }
3031
3284
  const versionResult = executor.exec("pnpm changeset version", { cwd: config.cwd });
@@ -3034,11 +3287,11 @@ async function runVersionMode(executor, config) {
3034
3287
  if (versionResult.exitCode !== 0) throw new FatalError(`pnpm changeset version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr}`);
3035
3288
  debugExec(config, "pnpm install --no-frozen-lockfile", executor.exec("pnpm install --no-frozen-lockfile", { cwd: config.cwd }));
3036
3289
  const { title, body } = buildPrContent(executor, config.cwd, packagesBefore);
3037
- debug(config, `PR title: ${title}`);
3290
+ debug$1(config, `PR title: ${title}`);
3038
3291
  executor.exec("git add -A", { cwd: config.cwd });
3039
3292
  const remainingChangesets = executor.listChangesetFiles(config.cwd);
3040
3293
  if (remainingChangesets.length > 0) p.log.warn(`Changeset files still present after versioning: ${remainingChangesets.join(", ")}`);
3041
- debug(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3294
+ debug$1(config, `Changeset files after versioning: ${remainingChangesets.length > 0 ? remainingChangesets.join(", ") : "(none — all consumed)"}`);
3042
3295
  const commitResult = executor.exec("git commit -m \"chore: version packages\"", { cwd: config.cwd });
3043
3296
  debugExec(config, "git commit", commitResult);
3044
3297
  if (commitResult.exitCode !== 0) {
@@ -3062,7 +3315,7 @@ async function runVersionMode(executor, config) {
3062
3315
  token: config.token
3063
3316
  };
3064
3317
  const existingPr = await findOpenPr(executor, conn, BRANCH);
3065
- debug(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3318
+ debug$1(config, `Existing open PR for ${BRANCH}: ${existingPr === null ? "(none)" : `#${String(existingPr)}`}`);
3066
3319
  if (existingPr === null) {
3067
3320
  await createPr(executor, conn, {
3068
3321
  title,
@@ -3110,14 +3363,14 @@ async function runPublishMode(executor, config) {
3110
3363
  debugExec(config, "pnpm changeset publish", publishResult);
3111
3364
  if (publishResult.exitCode !== 0) throw new FatalError(`pnpm changeset publish failed (exit code ${String(publishResult.exitCode)}):\n${publishResult.stderr}`);
3112
3365
  const stdoutTags = parseNewTags(publishResult.stdout + "\n" + publishResult.stderr);
3113
- debug(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3366
+ debug$1(config, `Tags from publish stdout: ${stdoutTags.length > 0 ? stdoutTags.join(", ") : "(none)"}`);
3114
3367
  const expectedTags = computeExpectedTags(executor.listWorkspacePackages(config.cwd));
3115
- debug(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3368
+ debug$1(config, `Expected tags from workspace packages: ${expectedTags.length > 0 ? expectedTags.join(", ") : "(none)"}`);
3116
3369
  const remoteTags = parseRemoteTags(executor.exec("git ls-remote --tags origin", { cwd: config.cwd }).stdout);
3117
- debug(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3370
+ debug$1(config, `Remote tags: ${remoteTags.length > 0 ? remoteTags.join(", ") : "(none)"}`);
3118
3371
  const remoteSet = new Set(remoteTags);
3119
3372
  const tagsToPush = reconcileTags(expectedTags, remoteTags, stdoutTags);
3120
- debug(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3373
+ debug$1(config, `Reconciled tags to push: ${tagsToPush.length > 0 ? tagsToPush.join(", ") : "(none)"}`);
3121
3374
  if (config.dryRun) {
3122
3375
  if (tagsToPush.length === 0) {
3123
3376
  p.log.info("No packages were published");
@@ -3286,12 +3539,12 @@ function buildReleaseConfig(flags) {
3286
3539
  /** Core release logic — testable with a mock executor. */
3287
3540
  async function runRelease(config, executor) {
3288
3541
  const changesetFiles = executor.listChangesetFiles(config.cwd);
3289
- debug(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3542
+ debug$1(config, `Changeset files found: ${changesetFiles.length > 0 ? changesetFiles.join(", ") : "(none)"}`);
3290
3543
  if (changesetFiles.length > 0) {
3291
- debug(config, "Entering version mode");
3544
+ debug$1(config, "Entering version mode");
3292
3545
  return runVersionMode(executor, config);
3293
3546
  }
3294
- debug(config, "Entering publish mode");
3547
+ debug$1(config, "Entering publish mode");
3295
3548
  return runPublishMode(executor, config);
3296
3549
  }
3297
3550
  //#endregion
@@ -3435,7 +3688,7 @@ async function runSimpleRelease(executor, config) {
3435
3688
  debugExec(config, "commit-and-tag-version", versionResult);
3436
3689
  if (versionResult.exitCode !== 0) throw new FatalError(`commit-and-tag-version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr || versionResult.stdout}`);
3437
3690
  const version = readVersion(executor, config.cwd);
3438
- debug(config, `New version: ${version}`);
3691
+ debug$1(config, `New version: ${version}`);
3439
3692
  const tagResult = executor.exec("git describe --tags --abbrev=0", { cwd: config.cwd });
3440
3693
  debugExec(config, "git describe", tagResult);
3441
3694
  const tag = tagResult.stdout.trim();
@@ -3457,7 +3710,7 @@ async function runSimpleRelease(executor, config) {
3457
3710
  let pushed = false;
3458
3711
  if (!config.noPush) {
3459
3712
  const branch = executor.exec("git rev-parse --abbrev-ref HEAD", { cwd: config.cwd }).stdout.trim() || "main";
3460
- debug(config, `Pushing to origin/${branch}`);
3713
+ debug$1(config, `Pushing to origin/${branch}`);
3461
3714
  const pushResult = executor.exec(`git push --follow-tags origin ${branch}`, { cwd: config.cwd });
3462
3715
  debugExec(config, "git push", pushResult);
3463
3716
  if (pushResult.exitCode !== 0) throw new FatalError(`git push failed (exit code ${String(pushResult.exitCode)}):\n${pushResult.stderr || pushResult.stdout}`);
@@ -3487,7 +3740,7 @@ async function createPlatformRelease(executor, config, tag) {
3487
3740
  if (!config.platform) return false;
3488
3741
  if (config.platform.type === "forgejo") {
3489
3742
  if (await findRelease(executor, config.platform.conn, tag)) {
3490
- debug(config, `Release for ${tag} already exists, skipping`);
3743
+ debug$1(config, `Release for ${tag} already exists, skipping`);
3491
3744
  return false;
3492
3745
  }
3493
3746
  await createRelease(executor, config.platform.conn, tag);
@@ -3686,11 +3939,303 @@ const runChecksCommand = defineCommand({
3686
3939
  }
3687
3940
  });
3688
3941
  //#endregion
3942
+ //#region src/release/docker.ts
3943
+ const ToolingDockerMapSchema = z.record(z.string(), z.object({
3944
+ dockerfile: z.string(),
3945
+ context: z.string().default(".")
3946
+ }));
3947
+ const ToolingConfigDockerSchema = z.object({ docker: ToolingDockerMapSchema.optional() });
3948
+ const PackageInfoSchema = z.object({
3949
+ name: z.string().optional(),
3950
+ version: z.string().optional()
3951
+ });
3952
+ /** Read the docker map from .tooling.json. Returns empty record if missing or invalid. */
3953
+ function loadDockerMap(executor, cwd) {
3954
+ const configPath = path.join(cwd, ".tooling.json");
3955
+ const raw = executor.readFile(configPath);
3956
+ if (!raw) return {};
3957
+ try {
3958
+ const result = ToolingConfigDockerSchema.safeParse(JSON.parse(raw));
3959
+ if (!result.success || !result.data.docker) return {};
3960
+ return result.data.docker;
3961
+ } catch (_error) {
3962
+ return {};
3963
+ }
3964
+ }
3965
+ /** Read name and version from a package's package.json. */
3966
+ function readPackageInfo(executor, packageJsonPath) {
3967
+ const raw = executor.readFile(packageJsonPath);
3968
+ if (!raw) return {
3969
+ name: void 0,
3970
+ version: void 0
3971
+ };
3972
+ try {
3973
+ const result = PackageInfoSchema.safeParse(JSON.parse(raw));
3974
+ if (!result.success) return {
3975
+ name: void 0,
3976
+ version: void 0
3977
+ };
3978
+ return {
3979
+ name: result.data.name,
3980
+ version: result.data.version
3981
+ };
3982
+ } catch (_error) {
3983
+ return {
3984
+ name: void 0,
3985
+ version: void 0
3986
+ };
3987
+ }
3988
+ }
3989
+ /**
3990
+ * Read docker config from .tooling.json and resolve packages.
3991
+ * Each entry in the docker map is keyed by package directory name.
3992
+ * Image names are derived from {root-name}-{package-name} using each package's package.json name.
3993
+ * Versions are read from each package's own package.json.
3994
+ */
3995
+ function detectDockerPackages(executor, cwd, repoName) {
3996
+ const dockerMap = loadDockerMap(executor, cwd);
3997
+ const packages = [];
3998
+ for (const [dir, docker] of Object.entries(dockerMap)) {
3999
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
4000
+ packages.push({
4001
+ dir,
4002
+ imageName: `${repoName}-${name ?? dir}`,
4003
+ version,
4004
+ docker
4005
+ });
4006
+ }
4007
+ return packages;
4008
+ }
4009
+ /**
4010
+ * Read docker config for a single package from .tooling.json.
4011
+ * Used by the per-package image:build script.
4012
+ */
4013
+ function readSinglePackageDocker(executor, cwd, packageDir, repoName) {
4014
+ const dir = path.basename(path.resolve(cwd, packageDir));
4015
+ const docker = loadDockerMap(executor, cwd)[dir];
4016
+ if (!docker) throw new FatalError(`No docker config found for package "${dir}" in .tooling.json`);
4017
+ const { name, version } = readPackageInfo(executor, path.join(cwd, "packages", dir, "package.json"));
4018
+ return {
4019
+ dir,
4020
+ imageName: `${repoName}-${name ?? dir}`,
4021
+ version,
4022
+ docker
4023
+ };
4024
+ }
4025
+ /** Parse semver version string into major, minor, patch components. */
4026
+ function parseSemver(version) {
4027
+ const clean = version.replace(/^v/, "");
4028
+ const match = /^(\d+)\.(\d+)\.(\d+)/.exec(clean);
4029
+ if (!match?.[1] || !match[2] || !match[3]) throw new FatalError(`Invalid semver version: ${version}`);
4030
+ return {
4031
+ major: Number(match[1]),
4032
+ minor: Number(match[2]),
4033
+ patch: Number(match[3])
4034
+ };
4035
+ }
4036
+ /** Generate semver tag variants: latest, vX.Y.Z, vX.Y, vX */
4037
+ function generateTags(version) {
4038
+ const { major, minor, patch } = parseSemver(version);
4039
+ return [
4040
+ "latest",
4041
+ `v${major}.${minor}.${patch}`,
4042
+ `v${major}.${minor}`,
4043
+ `v${major}`
4044
+ ];
4045
+ }
4046
+ /** Build the full image reference: namespace/imageName:tag */
4047
+ function imageRef(namespace, imageName, tag) {
4048
+ return `${namespace}/${imageName}:${tag}`;
4049
+ }
4050
+ function log(message) {
4051
+ console.log(message);
4052
+ }
4053
+ function debug(verbose, message) {
4054
+ if (verbose) console.log(`[debug] ${message}`);
4055
+ }
4056
+ /** Read the repo name from root package.json. */
4057
+ function readRepoName(executor, cwd) {
4058
+ const rootPkgRaw = executor.readFile(path.join(cwd, "package.json"));
4059
+ if (!rootPkgRaw) throw new FatalError("No package.json found in project root");
4060
+ const repoName = parsePackageJson(rootPkgRaw)?.name;
4061
+ if (!repoName) throw new FatalError("Root package.json must have a name field");
4062
+ return repoName;
4063
+ }
4064
+ /** Build a single docker image from its config. Paths are resolved relative to cwd. */
4065
+ function buildImage(executor, pkg, cwd, verbose, extraArgs) {
4066
+ const dockerfilePath = path.resolve(cwd, pkg.docker.dockerfile);
4067
+ const contextPath = path.resolve(cwd, pkg.docker.context);
4068
+ const command = [
4069
+ "docker build",
4070
+ `-f ${dockerfilePath}`,
4071
+ `-t ${pkg.imageName}:latest`,
4072
+ ...extraArgs,
4073
+ contextPath
4074
+ ].join(" ");
4075
+ debug(verbose, `Running: ${command}`);
4076
+ const buildResult = executor.exec(command);
4077
+ debug(verbose, `Build stdout: ${buildResult.stdout}`);
4078
+ if (buildResult.exitCode !== 0) throw new FatalError(`docker build failed for ${pkg.dir} (exit ${buildResult.exitCode}): ${buildResult.stderr}`);
4079
+ }
4080
+ /**
4081
+ * Detect packages with docker config in .tooling.json and build each one.
4082
+ * Runs `docker build -f <dockerfile> -t <image-name>:latest <context>` for each package.
4083
+ * Dockerfile and context paths are resolved relative to the project root.
4084
+ *
4085
+ * When `packageDir` is set, builds only that single package (for use as an image:build script).
4086
+ */
4087
+ function runDockerBuild(executor, config) {
4088
+ const repoName = readRepoName(executor, config.cwd);
4089
+ if (config.packageDir) {
4090
+ const pkg = readSinglePackageDocker(executor, config.cwd, config.packageDir, repoName);
4091
+ log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4092
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4093
+ log(`Built ${pkg.imageName}:latest`);
4094
+ return { packages: [pkg] };
4095
+ }
4096
+ const packages = detectDockerPackages(executor, config.cwd, repoName);
4097
+ if (packages.length === 0) {
4098
+ log("No packages with docker config found");
4099
+ return { packages: [] };
4100
+ }
4101
+ log(`Found ${packages.length} Docker package(s): ${packages.map((p) => p.dir).join(", ")}`);
4102
+ for (const pkg of packages) {
4103
+ log(`Building image for ${pkg.dir} (${pkg.imageName}:latest)...`);
4104
+ buildImage(executor, pkg, config.cwd, config.verbose, config.extraArgs);
4105
+ }
4106
+ log(`Built ${packages.length} image(s)`);
4107
+ return { packages };
4108
+ }
4109
+ /**
4110
+ * Run the full Docker publish pipeline:
4111
+ * 1. Build all images via runDockerBuild
4112
+ * 2. Login to registry
4113
+ * 3. Tag each image with semver variants from its own package.json version
4114
+ * 4. Push all tags
4115
+ * 5. Logout from registry
4116
+ */
4117
+ function runDockerPublish(executor, config) {
4118
+ const { packages } = runDockerBuild(executor, {
4119
+ cwd: config.cwd,
4120
+ packageDir: void 0,
4121
+ verbose: config.verbose,
4122
+ extraArgs: []
4123
+ });
4124
+ if (packages.length === 0) return {
4125
+ packages: [],
4126
+ tags: []
4127
+ };
4128
+ for (const pkg of packages) if (!pkg.version) throw new FatalError(`Package ${pkg.dir} has docker config but no version in package.json`);
4129
+ if (!config.dryRun) {
4130
+ log(`Logging in to ${config.registryHost}...`);
4131
+ const loginResult = executor.exec(`echo "${config.password}" | docker login ${config.registryHost} -u ${config.username} --password-stdin`);
4132
+ if (loginResult.exitCode !== 0) throw new FatalError(`Docker login failed: ${loginResult.stderr}`);
4133
+ } else log("[dry-run] Skipping docker login");
4134
+ const allTags = [];
4135
+ try {
4136
+ for (const pkg of packages) {
4137
+ const tags = generateTags(pkg.version ?? "");
4138
+ log(`${pkg.dir} v${pkg.version} → tags: ${tags.join(", ")}`);
4139
+ for (const tag of tags) {
4140
+ const ref = imageRef(config.registryNamespace, pkg.imageName, tag);
4141
+ allTags.push(ref);
4142
+ log(`Tagging ${pkg.imageName} → ${ref}`);
4143
+ const tagResult = executor.exec(`docker tag ${pkg.imageName} ${ref}`);
4144
+ if (tagResult.exitCode !== 0) throw new FatalError(`docker tag failed: ${tagResult.stderr}`);
4145
+ if (!config.dryRun) {
4146
+ log(`Pushing ${ref}...`);
4147
+ const pushResult = executor.exec(`docker push ${ref}`);
4148
+ if (pushResult.exitCode !== 0) throw new FatalError(`docker push failed: ${pushResult.stderr}`);
4149
+ } else log(`[dry-run] Skipping push for ${ref}`);
4150
+ }
4151
+ }
4152
+ } finally {
4153
+ if (!config.dryRun) {
4154
+ log(`Logging out from ${config.registryHost}...`);
4155
+ executor.exec(`docker logout ${config.registryHost}`);
4156
+ }
4157
+ }
4158
+ log(`Published ${allTags.length} image tag(s)`);
4159
+ return {
4160
+ packages,
4161
+ tags: allTags
4162
+ };
4163
+ }
4164
+ //#endregion
4165
+ //#region src/commands/publish-docker.ts
4166
+ function requireEnv(name) {
4167
+ const value = process.env[name];
4168
+ if (!value) throw new FatalError(`Missing required environment variable: ${name}`);
4169
+ return value;
4170
+ }
4171
+ const publishDockerCommand = defineCommand({
4172
+ meta: {
4173
+ name: "docker:publish",
4174
+ description: "Build, tag, and push Docker images for packages with an image:build script"
4175
+ },
4176
+ args: {
4177
+ "dry-run": {
4178
+ type: "boolean",
4179
+ description: "Build and tag images but skip login, push, and logout"
4180
+ },
4181
+ verbose: {
4182
+ type: "boolean",
4183
+ description: "Enable detailed debug logging"
4184
+ }
4185
+ },
4186
+ async run({ args }) {
4187
+ const config = {
4188
+ cwd: process.cwd(),
4189
+ registryHost: requireEnv("DOCKER_REGISTRY_HOST"),
4190
+ registryNamespace: requireEnv("DOCKER_REGISTRY_NAMESPACE"),
4191
+ username: requireEnv("DOCKER_USERNAME"),
4192
+ password: requireEnv("DOCKER_PASSWORD"),
4193
+ dryRun: args["dry-run"] === true,
4194
+ verbose: args.verbose === true
4195
+ };
4196
+ runDockerPublish(createRealExecutor(), config);
4197
+ }
4198
+ });
4199
+ //#endregion
4200
+ //#region src/commands/docker-build.ts
4201
+ const dockerBuildCommand = defineCommand({
4202
+ meta: {
4203
+ name: "docker:build",
4204
+ description: "Build Docker images for packages with docker config in .tooling.json"
4205
+ },
4206
+ args: {
4207
+ package: {
4208
+ type: "string",
4209
+ description: "Build a single package by directory path (e.g. packages/server). Useful as an image:build script."
4210
+ },
4211
+ verbose: {
4212
+ type: "boolean",
4213
+ description: "Enable detailed debug logging"
4214
+ },
4215
+ _: {
4216
+ type: "positional",
4217
+ required: false,
4218
+ description: "Extra arguments passed to docker build (after --)"
4219
+ }
4220
+ },
4221
+ async run({ args }) {
4222
+ const executor = createRealExecutor();
4223
+ const rawExtra = args._ ?? [];
4224
+ const extraArgs = Array.isArray(rawExtra) ? rawExtra.map(String) : [String(rawExtra)];
4225
+ runDockerBuild(executor, {
4226
+ cwd: process.cwd(),
4227
+ packageDir: args.package,
4228
+ verbose: args.verbose === true,
4229
+ extraArgs: extraArgs.filter((a) => a.length > 0)
4230
+ });
4231
+ }
4232
+ });
4233
+ //#endregion
3689
4234
  //#region src/bin.ts
3690
4235
  const main = defineCommand({
3691
4236
  meta: {
3692
4237
  name: "tooling",
3693
- version: "0.14.1",
4238
+ version: "0.15.0",
3694
4239
  description: "Bootstrap and maintain standardized TypeScript project tooling"
3695
4240
  },
3696
4241
  subCommands: {
@@ -3702,10 +4247,12 @@ const main = defineCommand({
3702
4247
  "release:trigger": releaseTriggerCommand,
3703
4248
  "forgejo:create-release": createForgejoReleaseCommand,
3704
4249
  "changesets:merge": releaseMergeCommand,
3705
- "release:simple": releaseSimpleCommand
4250
+ "release:simple": releaseSimpleCommand,
4251
+ "docker:publish": publishDockerCommand,
4252
+ "docker:build": dockerBuildCommand
3706
4253
  }
3707
4254
  });
3708
- console.log(`@bensandee/tooling v0.14.1`);
4255
+ console.log(`@bensandee/tooling v0.15.0`);
3709
4256
  runMain(main);
3710
4257
  //#endregion
3711
4258
  export {};
package/dist/index.d.mts CHANGED
@@ -45,6 +45,8 @@ interface ProjectConfig {
45
45
  projectType: "default" | "node" | "react" | "library";
46
46
  /** Auto-detect and configure tsconfig bases for monorepo packages */
47
47
  detectPackageTypes: boolean;
48
+ /** Set up Docker image build/tag/push via docker:publish */
49
+ setupDocker: boolean;
48
50
  /** Target directory (default: cwd) */
49
51
  targetDir: string;
50
52
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@bensandee/tooling",
3
- "version": "0.14.1",
3
+ "version": "0.15.0",
4
4
  "description": "CLI tool to bootstrap and maintain standardized TypeScript project tooling",
5
5
  "bin": {
6
6
  "tooling": "./dist/bin.mjs"