@bensandee/tooling 0.12.0 → 0.14.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +215 -0
- package/README.md +121 -0
- package/dist/bin.mjs +270 -104
- package/dist/docker-verify/index.mjs +218 -0
- package/dist/exec-CC49vrkM.mjs +7 -0
- package/package.json +11 -5
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
# @bensandee/tooling
|
|
2
|
+
|
|
3
|
+
## 0.14.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- e95d449: Add `--fail-fast` / `--no-fail-fast` flag to `checks:run` to control whether execution stops on the first failure. Defaults to fail-fast in dev and continue-on-error in CI.
|
|
8
|
+
- 715a4ea: Add `@bensandee/tooling/docker-verify` subpath export: a TypeScript framework for Docker image verification with compose lifecycle management, HTTP health polling, container health monitoring, and signal-safe cleanup. Consumers import building blocks and compose them with custom validators instead of writing boilerplate.
|
|
9
|
+
- 27c3480: Add `release:simple` command and rename CLI subcommands
|
|
10
|
+
|
|
11
|
+
**Breaking changes:**
|
|
12
|
+
|
|
13
|
+
- `release:create-forgejo-release` renamed to `forgejo:create-release`
|
|
14
|
+
- `release:merge` renamed to `changesets:merge`
|
|
15
|
+
- `releaseStrategy: "commit-and-tag-version"` renamed to `"simple"` in `.tooling.json` config
|
|
16
|
+
- Generated CI workflow for commit-and-tag-version now uses `pnpm exec tooling release:simple` instead of inline shell commands
|
|
17
|
+
|
|
18
|
+
**New feature:**
|
|
19
|
+
|
|
20
|
+
`release:simple` — a CLI command that handles the full release lifecycle for projects using commit-and-tag-version:
|
|
21
|
+
|
|
22
|
+
- Runs `commit-and-tag-version` to bump version, update CHANGELOG, and create a git tag
|
|
23
|
+
- Pushes to origin with `--follow-tags`
|
|
24
|
+
- Creates sliding version tags (vX, vX.Y) for flexible deployment pinning
|
|
25
|
+
- Creates Forgejo or GitHub releases automatically
|
|
26
|
+
|
|
27
|
+
### Patch Changes
|
|
28
|
+
|
|
29
|
+
- 715a4ea: Add README files to all published packages for npm registry documentation
|
|
30
|
+
- 27c3480: Pre-populate `repo:init` prompts from saved `.tooling.json` config
|
|
31
|
+
|
|
32
|
+
When re-running `repo:init` on a project with an existing `.tooling.json`, each prompt now defaults to the previously saved choice instead of the detection-based default. Press Enter to keep existing settings or change only what you need.
|
|
33
|
+
|
|
34
|
+
- d448ec6: Update node tsconfig base to use `nodenext` module resolution with `allowImportingTsExtensions`, enabling `.ts` extensions in imports for projects running TypeScript natively on Node 24+. Migrate all tooling-cli imports to use `.ts` extensions and switch `#src` subpath mapping to `#src/*.ts`. Use extensionless imports for library packages.
|
|
35
|
+
- c49593f: Add `commit-and-tag-version` and `@changesets/cli` as optional dependencies
|
|
36
|
+
|
|
37
|
+
These tools are only needed when using their respective release strategies, so they're optional rather than required. Target projects already install them as devDependencies via the package-json generator.
|
|
38
|
+
|
|
39
|
+
- Updated dependencies [715a4ea]
|
|
40
|
+
- Updated dependencies [d448ec6]
|
|
41
|
+
- @bensandee/common@0.1.1
|
|
42
|
+
|
|
43
|
+
## 0.13.0
|
|
44
|
+
|
|
45
|
+
### Minor Changes
|
|
46
|
+
|
|
47
|
+
- bbe3634: Add `checks:run` command (renamed from `repo:run-checks`). Add `ci:check`, `tooling:check`, and `tooling:update` as generated package.json scripts. CI workflows now run `pnpm ci:check`. Managed scripts are updated on `repo:update`/`repo:check` if they don't reference the expected command.
|
|
48
|
+
|
|
49
|
+
### Patch Changes
|
|
50
|
+
|
|
51
|
+
- f20b25d: `checks:run` now reads package.json to detect which scripts are defined. Undefined scripts show "(not defined)" instead of silently passing. Commands use `pnpm run` instead of `pnpm run --if-present`.
|
|
52
|
+
|
|
53
|
+
## 0.12.0
|
|
54
|
+
|
|
55
|
+
### Minor Changes
|
|
56
|
+
|
|
57
|
+
- 5de6090: Add `repo:run-checks` command that runs all standard checks (build, typecheck, lint, test, format, knip, tooling:check, image:check) without short-circuiting, reporting a summary of failures at the end. Supports `--skip` to skip specific checks and `--add` to append custom checks. Generated CI workflows now use `pnpm check`, and the package.json generator produces `check` and `tooling:check` scripts pointing to this command. Managed scripts (`check`, `tooling:check`) are updated on `repo:update`/`repo:check` if they don't already reference the expected command.
|
|
58
|
+
|
|
59
|
+
## 0.11.0
|
|
60
|
+
|
|
61
|
+
### Minor Changes
|
|
62
|
+
|
|
63
|
+
- 493ae65: Add `repo:run-checks` command that runs all standard checks (build, typecheck, lint, test, format, knip, repo:check) without short-circuiting, reporting a summary of failures at the end. Generated CI workflows and package.json `check` scripts now use this command. Skip `trigger-release` script for changesets release strategy.
|
|
64
|
+
|
|
65
|
+
### Patch Changes
|
|
66
|
+
|
|
67
|
+
- ae18571: Add .pnpm-store to gitignore file
|
|
68
|
+
- 916c1ee: Ensure `yaml-language-server` schema comment is added to existing Forgejo workflow files during update/merge
|
|
69
|
+
|
|
70
|
+
## 0.10.1
|
|
71
|
+
|
|
72
|
+
### Patch Changes
|
|
73
|
+
|
|
74
|
+
- f131a3d: Add `pnpm why` to the allowed Bash commands in generated Claude settings
|
|
75
|
+
- 1cb2ce8: Add yaml-language-server schema comments to generated Forgejo workflow files and update schema glob to match both .yml and .yaml extensions
|
|
76
|
+
|
|
77
|
+
## 0.10.0
|
|
78
|
+
|
|
79
|
+
### Minor Changes
|
|
80
|
+
|
|
81
|
+
- 34a0e1e: feat: merge missing config into existing lefthook and CI workflow files instead of skipping
|
|
82
|
+
|
|
83
|
+
Generators for `lefthook.yml`, CI check workflows, and release workflows now merge required
|
|
84
|
+
entries into existing files rather than silently skipping them. This means `repo:update` can
|
|
85
|
+
add new steps (e.g. a newly required CI check) to repos that were initialized before the step
|
|
86
|
+
existed.
|
|
87
|
+
|
|
88
|
+
Add `# @bensandee/tooling:ignore` in the first 10 lines of any YAML file to opt out of
|
|
89
|
+
automatic merging.
|
|
90
|
+
|
|
91
|
+
### Patch Changes
|
|
92
|
+
|
|
93
|
+
- 330cc2c: fix: use semantic JSON comparison in repo:check and repo:update to ignore formatting-only differences
|
|
94
|
+
|
|
95
|
+
## 0.9.0
|
|
96
|
+
|
|
97
|
+
### Minor Changes
|
|
98
|
+
|
|
99
|
+
- 88f2a93: Require `.tooling.json` for `repo:update` and `repo:check` commands. Previously these commands would warn and continue with detected defaults when `.tooling.json` was missing, which could cause unexpected overwrites without proper archiving. Now they exit with an error directing the user to run `tooling repo:init` first.
|
|
100
|
+
|
|
101
|
+
Write Forgejo workflow schema mapping to `.code-workspace` file when present, falling back to `.vscode/settings.json`. The `yaml.schemas` setting in `.vscode/settings.json` doesn't apply in VS Code multi-root workspaces.
|
|
102
|
+
|
|
103
|
+
Improve post-init guidance: suggest a Claude Code prompt ("Execute the steps in .tooling-migrate.md") instead of "paste contents".
|
|
104
|
+
|
|
105
|
+
## 0.8.1
|
|
106
|
+
|
|
107
|
+
### Patch Changes
|
|
108
|
+
|
|
109
|
+
- efcfdcc: Fix findOpenPr to filter PRs client-side by head.ref instead of relying on Forgejo's inconsistent head query parameter, which could match the wrong PR
|
|
110
|
+
- 88aac23: Add forgejo workflow schema additions
|
|
111
|
+
- e4c41d6: Fix wrong agent name in settings.json for claude
|
|
112
|
+
- 43509b8: Pin @bensandee/\* package versions in generated package.json instead of using "latest". Versions are read from sibling package.json files at build time via tsdown's define feature, so they auto-update with each release.
|
|
113
|
+
- 5e65e50: enhance ciWorkflow to support Forgejo email notifications
|
|
114
|
+
- 60a5502: refactor generateClaudeSettings to handle monorepo structure and update tests for plugin integration
|
|
115
|
+
|
|
116
|
+
## 0.8.0
|
|
117
|
+
|
|
118
|
+
### Minor Changes
|
|
119
|
+
|
|
120
|
+
- 375f7fd: Add claude skills to settings.json
|
|
121
|
+
|
|
122
|
+
### Patch Changes
|
|
123
|
+
|
|
124
|
+
- 375098b: Add more safety restrictions to settings.json
|
|
125
|
+
- b330adf: Fix bad update to tsconfig when not needed
|
|
126
|
+
|
|
127
|
+
## 0.7.3
|
|
128
|
+
|
|
129
|
+
### Patch Changes
|
|
130
|
+
|
|
131
|
+
- 3257e04: Fix no-unsafe-json-parse rule and fix new lint errors
|
|
132
|
+
- ca61fa7: Don't overwrite existing oxfmt config
|
|
133
|
+
- 1bdf858: More intelligent addition of src folder to tsconfig
|
|
134
|
+
- 8de49b9: Add line about adding packages when necessary to resolve errors
|
|
135
|
+
|
|
136
|
+
## 0.7.2
|
|
137
|
+
|
|
138
|
+
### Patch Changes
|
|
139
|
+
|
|
140
|
+
- e48bc27: Fix bug where tsconfigs in packages would be force-updated even if solutions-style
|
|
141
|
+
|
|
142
|
+
## 0.7.1
|
|
143
|
+
|
|
144
|
+
### Patch Changes
|
|
145
|
+
|
|
146
|
+
- 6ef4ea9: Fix tsconfig build/update issues
|
|
147
|
+
- 3608a1a: Run pnpm update after repo:update
|
|
148
|
+
|
|
149
|
+
## 0.7.0
|
|
150
|
+
|
|
151
|
+
### Minor Changes
|
|
152
|
+
|
|
153
|
+
- 912013d: Add repo:check command
|
|
154
|
+
- 2545262: Add common package + error subclasses
|
|
155
|
+
|
|
156
|
+
### Patch Changes
|
|
157
|
+
|
|
158
|
+
- Updated dependencies [2545262]
|
|
159
|
+
- @bensandee/common@0.1.0
|
|
160
|
+
|
|
161
|
+
## 0.6.2
|
|
162
|
+
|
|
163
|
+
### Patch Changes
|
|
164
|
+
|
|
165
|
+
- caa1270: Fix hang migrating repo:init
|
|
166
|
+
|
|
167
|
+
## 0.6.1
|
|
168
|
+
|
|
169
|
+
### Patch Changes
|
|
170
|
+
|
|
171
|
+
- 2182ab3: fix bug where renovate.json5 wasn't cleaned up to use our preset
|
|
172
|
+
- d811a96: Lefthook doesn't need an install step in package.json prepare
|
|
173
|
+
|
|
174
|
+
## 0.6.0
|
|
175
|
+
|
|
176
|
+
### Minor Changes
|
|
177
|
+
|
|
178
|
+
- 94cd161: Updated default oxlint config to include more default rules.
|
|
179
|
+
|
|
180
|
+
## 0.5.1
|
|
181
|
+
|
|
182
|
+
### Patch Changes
|
|
183
|
+
|
|
184
|
+
- e0bc32e: Improve migration for tsconfig and husky/lint-staged
|
|
185
|
+
- 02c1a1b: Include version when running tooling cli
|
|
186
|
+
|
|
187
|
+
## 0.5.0
|
|
188
|
+
|
|
189
|
+
### Minor Changes
|
|
190
|
+
|
|
191
|
+
- 58fc8a3: Add lefthook support in place of husky, lint-staged
|
|
192
|
+
|
|
193
|
+
## 0.4.0
|
|
194
|
+
|
|
195
|
+
### Minor Changes
|
|
196
|
+
|
|
197
|
+
- e02953a: Bug fixing, move renovate config to standard location
|
|
198
|
+
- 451908d: Restructure package names and exports.
|
|
199
|
+
|
|
200
|
+
## 0.3.0
|
|
201
|
+
|
|
202
|
+
### Minor Changes
|
|
203
|
+
|
|
204
|
+
- 5e9719f: Many bug fixes
|
|
205
|
+
|
|
206
|
+
## 0.2.0
|
|
207
|
+
|
|
208
|
+
### Minor Changes
|
|
209
|
+
|
|
210
|
+
- c376981: Initial release
|
|
211
|
+
|
|
212
|
+
### Patch Changes
|
|
213
|
+
|
|
214
|
+
- 3fc9fe3: Support multiple release architectures (release-it, commit-and-tag-version and changsets)
|
|
215
|
+
- 4004530: Add release-forgejo command to perform final steps of release creation in forgejo.
|
package/README.md
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# @bensandee/tooling
|
|
2
|
+
|
|
3
|
+
CLI to bootstrap and maintain standardized TypeScript project tooling.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pnpm add -D @bensandee/tooling
|
|
9
|
+
|
|
10
|
+
# Or run directly
|
|
11
|
+
pnpm dlx @bensandee/tooling repo:init
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## CLI commands
|
|
15
|
+
|
|
16
|
+
### Project management
|
|
17
|
+
|
|
18
|
+
| Command | Description |
|
|
19
|
+
| --------------------------- | ----------------------------------------------------------------------------------------------------------------------- |
|
|
20
|
+
| `tooling repo:init [dir]` | Interactive setup wizard. Flags: `--yes` (accept defaults), `--no-ci`, `--no-prompt`. Saves choices to `.tooling.json`. |
|
|
21
|
+
| `tooling repo:update [dir]` | Add missing config files (never overwrites existing files). |
|
|
22
|
+
| `tooling repo:check [dir]` | Dry-run drift detection. Exits 1 if files would change. CI-friendly. |
|
|
23
|
+
| `tooling checks:run` | Run project checks (build, typecheck, lint, knip, test). Flag: `--fail-fast`. |
|
|
24
|
+
|
|
25
|
+
### Release management
|
|
26
|
+
|
|
27
|
+
| Command | Description |
|
|
28
|
+
| -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------- |
|
|
29
|
+
| `tooling release:changesets` | Changesets version/publish for Forgejo CI. Flag: `--dry-run`. Env: `FORGEJO_SERVER_URL`, `FORGEJO_REPOSITORY`, `FORGEJO_TOKEN`. |
|
|
30
|
+
| `tooling release:simple` | Streamlined release using commit-and-tag-version. |
|
|
31
|
+
| `tooling release:trigger` | Trigger a release workflow. |
|
|
32
|
+
| `tooling forgejo:create-release` | Create a Forgejo release from a tag. |
|
|
33
|
+
| `tooling changesets:merge` | Merge a changesets version PR. |
|
|
34
|
+
|
|
35
|
+
## Config file
|
|
36
|
+
|
|
37
|
+
`repo:init` persists choices to `.tooling.json` at the project root. `repo:check` and `repo:update` read this file to reproduce the same config without re-prompting.
|
|
38
|
+
|
|
39
|
+
## Library API
|
|
40
|
+
|
|
41
|
+
The `"."` export provides type-only exports for programmatic use:
|
|
42
|
+
|
|
43
|
+
```ts
|
|
44
|
+
import type {
|
|
45
|
+
ProjectConfig,
|
|
46
|
+
GeneratorResult,
|
|
47
|
+
GeneratorContext,
|
|
48
|
+
Generator,
|
|
49
|
+
DetectedProjectState,
|
|
50
|
+
LegacyConfig,
|
|
51
|
+
} from "@bensandee/tooling";
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
| Type | Description |
|
|
55
|
+
| ---------------------- | ----------------------------------------------------------------------------------------------- |
|
|
56
|
+
| `ProjectConfig` | User config shape (persisted in `.tooling.json`) |
|
|
57
|
+
| `GeneratorContext` | Context passed to generator functions (`exists`, `read`, `write`, `remove`, `confirmOverwrite`) |
|
|
58
|
+
| `GeneratorResult` | Result from a generator (created/updated/skipped files) |
|
|
59
|
+
| `Generator` | Generator function signature: `(ctx: GeneratorContext) => Promise<GeneratorResult>` |
|
|
60
|
+
| `DetectedProjectState` | Detected existing project state (package manager, CI, etc.) |
|
|
61
|
+
| `LegacyConfig` | Legacy config detection for migration |
|
|
62
|
+
|
|
63
|
+
## Docker verification
|
|
64
|
+
|
|
65
|
+
The `@bensandee/tooling/docker-verify` export provides utilities for verifying Docker Compose stacks via health checks.
|
|
66
|
+
|
|
67
|
+
### Quick start
|
|
68
|
+
|
|
69
|
+
```ts
|
|
70
|
+
import { createRealExecutor, runVerification } from "@bensandee/tooling/docker-verify";
|
|
71
|
+
import type { VerifyConfig } from "@bensandee/tooling/docker-verify";
|
|
72
|
+
|
|
73
|
+
const config: VerifyConfig = {
|
|
74
|
+
compose: {
|
|
75
|
+
cwd: "./deploy",
|
|
76
|
+
composeFiles: ["docker-compose.yaml"],
|
|
77
|
+
services: ["api", "db"],
|
|
78
|
+
},
|
|
79
|
+
buildCommand: "pnpm image:build",
|
|
80
|
+
healthChecks: [
|
|
81
|
+
{
|
|
82
|
+
name: "API",
|
|
83
|
+
url: "http://localhost:3000/health",
|
|
84
|
+
validate: async (res) => res.ok,
|
|
85
|
+
},
|
|
86
|
+
],
|
|
87
|
+
timeoutMs: 120_000,
|
|
88
|
+
pollIntervalMs: 5_000,
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
const result = await runVerification(createRealExecutor(), config);
|
|
92
|
+
if (!result.success) {
|
|
93
|
+
console.error(result.reason, result.message);
|
|
94
|
+
}
|
|
95
|
+
```
|
|
96
|
+
|
|
97
|
+
### Exports
|
|
98
|
+
|
|
99
|
+
| Export | Description |
|
|
100
|
+
| -------------------------------------- | ----------------------------------------------------------------- |
|
|
101
|
+
| `runVerification(executor, config)` | Full lifecycle: build, compose up, health check polling, teardown |
|
|
102
|
+
| `createRealExecutor()` | Production executor (real shell, fetch, timers) |
|
|
103
|
+
| `composeUp(executor, config)` | Start compose services |
|
|
104
|
+
| `composeDown(executor, config)` | Stop and remove compose services |
|
|
105
|
+
| `composeLogs(executor, config)` | Stream compose logs |
|
|
106
|
+
| `composePs(executor, config)` | List running containers |
|
|
107
|
+
| `checkHttpHealth(executor, check)` | Run a single HTTP health check |
|
|
108
|
+
| `getContainerHealth(executor, config)` | Check container-level health status |
|
|
109
|
+
|
|
110
|
+
### Types
|
|
111
|
+
|
|
112
|
+
| Type | Description |
|
|
113
|
+
| ---------------------- | ------------------------------------------------------------------------------------------ |
|
|
114
|
+
| `VerifyConfig` | Full verification config (compose settings, build command, health checks, timeouts) |
|
|
115
|
+
| `ComposeConfig` | Docker Compose settings (cwd, compose files, env file, services) |
|
|
116
|
+
| `HttpHealthCheck` | Health check definition (name, URL, validate function) |
|
|
117
|
+
| `VerifyResult` | Result: `{ success: true, elapsedMs }` or `{ success: false, reason, message, elapsedMs }` |
|
|
118
|
+
| `DockerVerifyExecutor` | Side-effect abstraction (exec, fetch, timers) for testability |
|
|
119
|
+
| `ContainerInfo` | Container status info from `composePs` |
|
|
120
|
+
|
|
121
|
+
## [Changelog](./CHANGELOG.md)
|
package/dist/bin.mjs
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
+
import { t as isExecSyncError } from "./exec-CC49vrkM.mjs";
|
|
2
3
|
import { defineCommand, runMain } from "citty";
|
|
3
4
|
import * as p from "@clack/prompts";
|
|
4
5
|
import { execSync } from "node:child_process";
|
|
@@ -106,7 +107,7 @@ function detectProject(targetDir) {
|
|
|
106
107
|
hasKnipConfig: exists("knip.json") || exists("knip.jsonc") || exists("knip.ts") || exists("knip.mts") || exists("knip.config.ts") || exists("knip.config.mts"),
|
|
107
108
|
hasRenovateConfig: exists("renovate.json") || exists("renovate.json5") || exists(".renovaterc") || exists(".renovaterc.json") || exists(".github/renovate.json") || exists(".github/renovate.json5"),
|
|
108
109
|
hasReleaseItConfig: exists(".release-it.json") || exists(".release-it.yaml") || exists(".release-it.toml"),
|
|
109
|
-
|
|
110
|
+
hasSimpleReleaseConfig: exists(".versionrc") || exists(".versionrc.json") || exists(".versionrc.js"),
|
|
110
111
|
hasChangesetsConfig: exists(".changeset/config.json"),
|
|
111
112
|
legacyConfigs: detectLegacyConfigs(targetDir)
|
|
112
113
|
};
|
|
@@ -208,7 +209,7 @@ function getMonorepoPackages(targetDir) {
|
|
|
208
209
|
function isCancelled(value) {
|
|
209
210
|
return p.isCancel(value);
|
|
210
211
|
}
|
|
211
|
-
async function runInitPrompts(targetDir) {
|
|
212
|
+
async function runInitPrompts(targetDir, saved) {
|
|
212
213
|
p.intro("@bensandee/tooling repo:init");
|
|
213
214
|
const existingPkg = readPackageJson(targetDir);
|
|
214
215
|
const detected = detectProject(targetDir);
|
|
@@ -217,7 +218,7 @@ async function runInitPrompts(targetDir) {
|
|
|
217
218
|
const detectedMonorepo = detectMonorepo(targetDir);
|
|
218
219
|
const structure = await p.select({
|
|
219
220
|
message: "Project structure",
|
|
220
|
-
initialValue: detectedMonorepo ? "monorepo" : "single",
|
|
221
|
+
initialValue: saved?.structure ?? (detectedMonorepo ? "monorepo" : "single"),
|
|
221
222
|
options: [{
|
|
222
223
|
value: "single",
|
|
223
224
|
label: "Single repo"
|
|
@@ -232,7 +233,7 @@ async function runInitPrompts(targetDir) {
|
|
|
232
233
|
}
|
|
233
234
|
const useEslintPlugin = await p.confirm({
|
|
234
235
|
message: "Include @bensandee/eslint-plugin?",
|
|
235
|
-
initialValue: true
|
|
236
|
+
initialValue: saved?.useEslintPlugin ?? true
|
|
236
237
|
});
|
|
237
238
|
if (isCancelled(useEslintPlugin)) {
|
|
238
239
|
p.cancel("Cancelled.");
|
|
@@ -241,7 +242,7 @@ async function runInitPrompts(targetDir) {
|
|
|
241
242
|
const hasExistingPrettier = detected.legacyConfigs.some((l) => l.tool === "prettier");
|
|
242
243
|
const formatter = await p.select({
|
|
243
244
|
message: "Formatter",
|
|
244
|
-
initialValue: hasExistingPrettier ? "prettier" : "oxfmt",
|
|
245
|
+
initialValue: saved?.formatter ?? (hasExistingPrettier ? "prettier" : "oxfmt"),
|
|
245
246
|
options: [{
|
|
246
247
|
value: "oxfmt",
|
|
247
248
|
label: "oxfmt",
|
|
@@ -257,7 +258,7 @@ async function runInitPrompts(targetDir) {
|
|
|
257
258
|
}
|
|
258
259
|
const setupVitest = await p.confirm({
|
|
259
260
|
message: "Set up vitest with a starter test?",
|
|
260
|
-
initialValue: !isExisting
|
|
261
|
+
initialValue: saved?.setupVitest ?? !isExisting
|
|
261
262
|
});
|
|
262
263
|
if (isCancelled(setupVitest)) {
|
|
263
264
|
p.cancel("Cancelled.");
|
|
@@ -265,6 +266,7 @@ async function runInitPrompts(targetDir) {
|
|
|
265
266
|
}
|
|
266
267
|
const ci = await p.select({
|
|
267
268
|
message: "CI workflow",
|
|
269
|
+
initialValue: saved?.ci,
|
|
268
270
|
options: [
|
|
269
271
|
{
|
|
270
272
|
value: "forgejo",
|
|
@@ -288,7 +290,7 @@ async function runInitPrompts(targetDir) {
|
|
|
288
290
|
if (ci === "github") {
|
|
289
291
|
const renovateAnswer = await p.confirm({
|
|
290
292
|
message: "Set up Renovate for automated dependency updates?",
|
|
291
|
-
initialValue: true
|
|
293
|
+
initialValue: saved?.setupRenovate ?? true
|
|
292
294
|
});
|
|
293
295
|
if (isCancelled(renovateAnswer)) {
|
|
294
296
|
p.cancel("Cancelled.");
|
|
@@ -298,7 +300,7 @@ async function runInitPrompts(targetDir) {
|
|
|
298
300
|
}
|
|
299
301
|
const releaseStrategy = await p.select({
|
|
300
302
|
message: "Release management",
|
|
301
|
-
initialValue: "none",
|
|
303
|
+
initialValue: saved?.releaseStrategy ?? "none",
|
|
302
304
|
options: [
|
|
303
305
|
{
|
|
304
306
|
value: "none",
|
|
@@ -315,9 +317,9 @@ async function runInitPrompts(targetDir) {
|
|
|
315
317
|
hint: "PR-based versioning"
|
|
316
318
|
},
|
|
317
319
|
{
|
|
318
|
-
value: "
|
|
319
|
-
label: "
|
|
320
|
-
hint: "
|
|
320
|
+
value: "simple",
|
|
321
|
+
label: "Simple",
|
|
322
|
+
hint: "uses commit-and-tag-version internally"
|
|
321
323
|
}
|
|
322
324
|
]
|
|
323
325
|
});
|
|
@@ -337,7 +339,7 @@ async function runInitPrompts(targetDir) {
|
|
|
337
339
|
p.note(detections.join("\n"), "Detected package types");
|
|
338
340
|
const applyDetected = await p.confirm({
|
|
339
341
|
message: "Apply detected tsconfig bases to packages?",
|
|
340
|
-
initialValue: true
|
|
342
|
+
initialValue: saved?.detectPackageTypes ?? true
|
|
341
343
|
});
|
|
342
344
|
if (isCancelled(applyDetected)) {
|
|
343
345
|
p.cancel("Cancelled.");
|
|
@@ -348,7 +350,7 @@ async function runInitPrompts(targetDir) {
|
|
|
348
350
|
} else {
|
|
349
351
|
const projectTypeAnswer = await p.select({
|
|
350
352
|
message: "Project type",
|
|
351
|
-
initialValue: "default",
|
|
353
|
+
initialValue: saved?.projectType ?? "default",
|
|
352
354
|
options: [
|
|
353
355
|
{
|
|
354
356
|
value: "default",
|
|
@@ -407,7 +409,7 @@ function buildDefaultConfig(targetDir, flags) {
|
|
|
407
409
|
setupVitest: !detected.hasVitestConfig,
|
|
408
410
|
ci: flags.noCi ? "none" : DEFAULT_CI,
|
|
409
411
|
setupRenovate: true,
|
|
410
|
-
releaseStrategy: detected.hasReleaseItConfig ? "release-it" : detected.
|
|
412
|
+
releaseStrategy: detected.hasReleaseItConfig ? "release-it" : detected.hasSimpleReleaseConfig ? "simple" : detected.hasChangesetsConfig ? "changesets" : "none",
|
|
411
413
|
projectType: "default",
|
|
412
414
|
detectPackageTypes: true,
|
|
413
415
|
targetDir
|
|
@@ -516,8 +518,10 @@ const STANDARD_SCRIPTS_SINGLE = {
|
|
|
516
518
|
test: "vitest run",
|
|
517
519
|
lint: "oxlint",
|
|
518
520
|
knip: "knip",
|
|
519
|
-
check: "pnpm exec tooling
|
|
520
|
-
"
|
|
521
|
+
check: "pnpm exec tooling checks:run",
|
|
522
|
+
"ci:check": "pnpm check",
|
|
523
|
+
"tooling:check": "pnpm exec tooling repo:check",
|
|
524
|
+
"tooling:update": "pnpm exec tooling repo:update"
|
|
521
525
|
};
|
|
522
526
|
const STANDARD_SCRIPTS_MONOREPO = {
|
|
523
527
|
build: "pnpm -r build",
|
|
@@ -525,13 +529,17 @@ const STANDARD_SCRIPTS_MONOREPO = {
|
|
|
525
529
|
typecheck: "pnpm -r --parallel run typecheck",
|
|
526
530
|
lint: "oxlint",
|
|
527
531
|
knip: "knip",
|
|
528
|
-
check: "pnpm exec tooling
|
|
529
|
-
"
|
|
532
|
+
check: "pnpm exec tooling checks:run",
|
|
533
|
+
"ci:check": "pnpm check",
|
|
534
|
+
"tooling:check": "pnpm exec tooling repo:check",
|
|
535
|
+
"tooling:update": "pnpm exec tooling repo:update"
|
|
530
536
|
};
|
|
531
537
|
/** Scripts that tooling owns — map from script name to keyword that must appear in the value. */
|
|
532
538
|
const MANAGED_SCRIPTS = {
|
|
533
|
-
check: "
|
|
534
|
-
"
|
|
539
|
+
check: "checks:run",
|
|
540
|
+
"ci:check": "pnpm check",
|
|
541
|
+
"tooling:check": "repo:check",
|
|
542
|
+
"tooling:update": "repo:update"
|
|
535
543
|
};
|
|
536
544
|
/** DevDeps that belong in every project (single repo) or per-package (monorepo). */
|
|
537
545
|
const PER_PACKAGE_DEV_DEPS = {
|
|
@@ -575,7 +583,7 @@ function addReleaseDeps(deps, config) {
|
|
|
575
583
|
deps["release-it"] = "18.1.2";
|
|
576
584
|
if (config.structure === "monorepo") deps["@release-it/bumper"] = "7.0.2";
|
|
577
585
|
break;
|
|
578
|
-
case "
|
|
586
|
+
case "simple":
|
|
579
587
|
deps["commit-and-tag-version"] = "12.5.0";
|
|
580
588
|
break;
|
|
581
589
|
case "changesets":
|
|
@@ -587,8 +595,8 @@ function addReleaseDeps(deps, config) {
|
|
|
587
595
|
function getAddedDevDepNames(config) {
|
|
588
596
|
const deps = { ...ROOT_DEV_DEPS };
|
|
589
597
|
if (config.structure !== "monorepo") Object.assign(deps, PER_PACKAGE_DEV_DEPS);
|
|
590
|
-
deps["@bensandee/config"] = "0.
|
|
591
|
-
deps["@bensandee/tooling"] = "0.
|
|
598
|
+
deps["@bensandee/config"] = "0.8.0";
|
|
599
|
+
deps["@bensandee/tooling"] = "0.14.0";
|
|
592
600
|
if (config.formatter === "oxfmt") deps["oxfmt"] = "0.35.0";
|
|
593
601
|
if (config.formatter === "prettier") deps["prettier"] = "3.8.1";
|
|
594
602
|
addReleaseDeps(deps, config);
|
|
@@ -608,9 +616,9 @@ async function generatePackageJson(ctx) {
|
|
|
608
616
|
if (ctx.config.releaseStrategy !== "none" && ctx.config.releaseStrategy !== "changesets") allScripts["trigger-release"] = "pnpm exec tooling release:trigger";
|
|
609
617
|
const devDeps = { ...ROOT_DEV_DEPS };
|
|
610
618
|
if (!isMonorepo) Object.assign(devDeps, PER_PACKAGE_DEV_DEPS);
|
|
611
|
-
devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.
|
|
612
|
-
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.
|
|
613
|
-
if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.
|
|
619
|
+
devDeps["@bensandee/config"] = isWorkspacePackage(ctx, "@bensandee/config") ? "workspace:*" : "0.8.0";
|
|
620
|
+
devDeps["@bensandee/tooling"] = isWorkspacePackage(ctx, "@bensandee/tooling") ? "workspace:*" : "0.14.0";
|
|
621
|
+
if (ctx.config.useEslintPlugin) devDeps["@bensandee/eslint-plugin"] = isWorkspacePackage(ctx, "@bensandee/eslint-plugin") ? "workspace:*" : "0.9.1";
|
|
614
622
|
if (ctx.config.formatter === "oxfmt") devDeps["oxfmt"] = "0.35.0";
|
|
615
623
|
if (ctx.config.formatter === "prettier") devDeps["prettier"] = "3.8.1";
|
|
616
624
|
addReleaseDeps(devDeps, ctx.config);
|
|
@@ -1386,7 +1394,7 @@ jobs:
|
|
|
1386
1394
|
cache: pnpm
|
|
1387
1395
|
- run: pnpm install --frozen-lockfile
|
|
1388
1396
|
- name: Run all checks
|
|
1389
|
-
run: pnpm check
|
|
1397
|
+
run: pnpm ci:check
|
|
1390
1398
|
`;
|
|
1391
1399
|
}
|
|
1392
1400
|
function requiredCheckSteps(nodeVersionYaml) {
|
|
@@ -1417,7 +1425,7 @@ function requiredCheckSteps(nodeVersionYaml) {
|
|
|
1417
1425
|
match: { run: "check" },
|
|
1418
1426
|
step: {
|
|
1419
1427
|
name: "Run all checks",
|
|
1420
|
-
run: "pnpm check"
|
|
1428
|
+
run: "pnpm ci:check"
|
|
1421
1429
|
}
|
|
1422
1430
|
}
|
|
1423
1431
|
];
|
|
@@ -1958,25 +1966,13 @@ permissions:
|
|
|
1958
1966
|
- name: Release
|
|
1959
1967
|
env:
|
|
1960
1968
|
GITHUB_TOKEN: \${{ github.token }}
|
|
1961
|
-
|
|
1962
|
-
run: |
|
|
1963
|
-
pnpm exec commit-and-tag-version
|
|
1964
|
-
git push --follow-tags
|
|
1965
|
-
TAG=$(git describe --tags --abbrev=0)
|
|
1966
|
-
pnpm publish --no-git-checks
|
|
1967
|
-
gh release create "$TAG" --generate-notes` : `
|
|
1969
|
+
run: pnpm exec tooling release:simple` : `
|
|
1968
1970
|
- name: Release
|
|
1969
1971
|
env:
|
|
1970
1972
|
FORGEJO_SERVER_URL: \${{ github.server_url }}
|
|
1971
1973
|
FORGEJO_REPOSITORY: \${{ github.repository }}
|
|
1972
1974
|
FORGEJO_TOKEN: \${{ secrets.FORGEJO_TOKEN }}
|
|
1973
|
-
|
|
1974
|
-
run: |
|
|
1975
|
-
pnpm exec commit-and-tag-version
|
|
1976
|
-
git push --follow-tags
|
|
1977
|
-
TAG=$(git describe --tags --abbrev=0)
|
|
1978
|
-
pnpm publish --no-git-checks
|
|
1979
|
-
pnpm exec tooling release:create-forgejo-release --tag "$TAG"`;
|
|
1975
|
+
run: pnpm exec tooling release:simple`;
|
|
1980
1976
|
return `${workflowSchemaComment(ci)}name: Release
|
|
1981
1977
|
on:
|
|
1982
1978
|
workflow_dispatch:
|
|
@@ -2077,10 +2073,10 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
|
|
|
2077
2073
|
step: { run: "pnpm release-it --ci" }
|
|
2078
2074
|
});
|
|
2079
2075
|
break;
|
|
2080
|
-
case "
|
|
2076
|
+
case "simple":
|
|
2081
2077
|
steps.push({
|
|
2082
|
-
match: { run: "
|
|
2083
|
-
step: { run: "pnpm exec
|
|
2078
|
+
match: { run: "release:simple" },
|
|
2079
|
+
step: { run: "pnpm exec tooling release:simple" }
|
|
2084
2080
|
});
|
|
2085
2081
|
break;
|
|
2086
2082
|
case "changesets":
|
|
@@ -2095,7 +2091,7 @@ function requiredReleaseSteps(strategy, nodeVersionYaml) {
|
|
|
2095
2091
|
function buildWorkflow(strategy, ci, nodeVersionYaml) {
|
|
2096
2092
|
switch (strategy) {
|
|
2097
2093
|
case "release-it": return releaseItWorkflow(ci, nodeVersionYaml);
|
|
2098
|
-
case "
|
|
2094
|
+
case "simple": return commitAndTagVersionWorkflow(ci, nodeVersionYaml);
|
|
2099
2095
|
case "changesets": return changesetsWorkflow(ci, nodeVersionYaml);
|
|
2100
2096
|
default: return null;
|
|
2101
2097
|
}
|
|
@@ -2438,7 +2434,7 @@ const ToolingConfigSchema = z.object({
|
|
|
2438
2434
|
setupRenovate: z.boolean().optional(),
|
|
2439
2435
|
releaseStrategy: z.enum([
|
|
2440
2436
|
"release-it",
|
|
2441
|
-
"
|
|
2437
|
+
"simple",
|
|
2442
2438
|
"changesets",
|
|
2443
2439
|
"none"
|
|
2444
2440
|
]).optional(),
|
|
@@ -2539,14 +2535,14 @@ const initCommand = defineCommand({
|
|
|
2539
2535
|
},
|
|
2540
2536
|
async run({ args }) {
|
|
2541
2537
|
const targetDir = path.resolve(args.dir ?? ".");
|
|
2538
|
+
const saved = loadToolingConfig(targetDir);
|
|
2542
2539
|
await runInit(args.yes ? (() => {
|
|
2543
|
-
const saved = loadToolingConfig(targetDir);
|
|
2544
2540
|
const detected = buildDefaultConfig(targetDir, {
|
|
2545
2541
|
eslintPlugin: args["eslint-plugin"] === true ? true : void 0,
|
|
2546
2542
|
noCi: args["no-ci"] === true ? true : void 0
|
|
2547
2543
|
});
|
|
2548
2544
|
return saved ? mergeWithSavedConfig(detected, saved) : detected;
|
|
2549
|
-
})() : await runInitPrompts(targetDir), args["no-prompt"] === true ? { noPrompt: true } : {});
|
|
2545
|
+
})() : await runInitPrompts(targetDir, saved), args["no-prompt"] === true ? { noPrompt: true } : {});
|
|
2550
2546
|
}
|
|
2551
2547
|
});
|
|
2552
2548
|
async function runInit(config, options = {}) {
|
|
@@ -2726,12 +2722,6 @@ function lineDiff(oldText, newText) {
|
|
|
2726
2722
|
return lines;
|
|
2727
2723
|
}
|
|
2728
2724
|
//#endregion
|
|
2729
|
-
//#region src/utils/exec.ts
|
|
2730
|
-
/** Type guard for `execSync` errors that carry stdout/stderr/status. */
|
|
2731
|
-
function isExecSyncError(err) {
|
|
2732
|
-
return err instanceof Error && "stdout" in err && typeof err.stdout === "string" && "stderr" in err && typeof err.stderr === "string" && "status" in err && typeof err.status === "number";
|
|
2733
|
-
}
|
|
2734
|
-
//#endregion
|
|
2735
2725
|
//#region src/release/executor.ts
|
|
2736
2726
|
/** Create an executor that runs real commands, fetches, and reads the filesystem. */
|
|
2737
2727
|
function createRealExecutor() {
|
|
@@ -3341,10 +3331,10 @@ function triggerGitHub(ref) {
|
|
|
3341
3331
|
p.log.info(`Triggered release workflow on GitHub (ref: ${ref})`);
|
|
3342
3332
|
}
|
|
3343
3333
|
//#endregion
|
|
3344
|
-
//#region src/commands/
|
|
3334
|
+
//#region src/commands/forgejo-create-release.ts
|
|
3345
3335
|
const createForgejoReleaseCommand = defineCommand({
|
|
3346
3336
|
meta: {
|
|
3347
|
-
name: "
|
|
3337
|
+
name: "forgejo:create-release",
|
|
3348
3338
|
description: "Create a Forgejo release for a given tag"
|
|
3349
3339
|
},
|
|
3350
3340
|
args: { tag: {
|
|
@@ -3354,7 +3344,7 @@ const createForgejoReleaseCommand = defineCommand({
|
|
|
3354
3344
|
} },
|
|
3355
3345
|
async run({ args }) {
|
|
3356
3346
|
const resolved = resolveConnection(process.cwd());
|
|
3357
|
-
if (resolved.type !== "forgejo") throw new FatalError("
|
|
3347
|
+
if (resolved.type !== "forgejo") throw new FatalError("forgejo:create-release requires a Forgejo repository");
|
|
3358
3348
|
const executor = createRealExecutor();
|
|
3359
3349
|
const conn = resolved.conn;
|
|
3360
3350
|
if (await findRelease(executor, conn, args.tag)) {
|
|
@@ -3366,11 +3356,11 @@ const createForgejoReleaseCommand = defineCommand({
|
|
|
3366
3356
|
}
|
|
3367
3357
|
});
|
|
3368
3358
|
//#endregion
|
|
3369
|
-
//#region src/commands/
|
|
3359
|
+
//#region src/commands/changesets-merge.ts
|
|
3370
3360
|
const HEAD_BRANCH = "changeset-release/main";
|
|
3371
3361
|
const releaseMergeCommand = defineCommand({
|
|
3372
3362
|
meta: {
|
|
3373
|
-
name: "
|
|
3363
|
+
name: "changesets:merge",
|
|
3374
3364
|
description: "Merge the open changesets version PR"
|
|
3375
3365
|
},
|
|
3376
3366
|
args: { "dry-run": {
|
|
@@ -3410,41 +3400,201 @@ function mergeGitHub(dryRun) {
|
|
|
3410
3400
|
p.log.info(`Merged changesets PR and deleted branch ${HEAD_BRANCH}`);
|
|
3411
3401
|
}
|
|
3412
3402
|
//#endregion
|
|
3413
|
-
//#region src/
|
|
3414
|
-
|
|
3415
|
-
|
|
3416
|
-
|
|
3417
|
-
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
|
|
3421
|
-
|
|
3422
|
-
|
|
3423
|
-
|
|
3424
|
-
|
|
3425
|
-
|
|
3403
|
+
//#region src/release/simple.ts
|
|
3404
|
+
/**
|
|
3405
|
+
* Compute sliding version tags from a semver version string.
|
|
3406
|
+
* For "1.2.3" returns ["v1", "v1.2"]. Strips prerelease suffixes.
|
|
3407
|
+
*/
|
|
3408
|
+
function computeSlidingTags(version) {
|
|
3409
|
+
const parts = (version.split("-")[0] ?? version).split(".");
|
|
3410
|
+
if (parts.length < 2 || !parts[0] || !parts[1]) throw new FatalError(`Invalid version format "${version}". Expected semver (X.Y.Z)`);
|
|
3411
|
+
return [`v${parts[0]}`, `v${parts[0]}.${parts[1]}`];
|
|
3412
|
+
}
|
|
3413
|
+
/** Build the commit-and-tag-version command with appropriate flags. */
|
|
3414
|
+
function buildCommand(config) {
|
|
3415
|
+
const args = ["pnpm exec commit-and-tag-version"];
|
|
3416
|
+
if (config.dryRun) args.push("--dry-run");
|
|
3417
|
+
if (config.firstRelease) args.push("--first-release");
|
|
3418
|
+
if (config.releaseAs) args.push(`--release-as ${config.releaseAs}`);
|
|
3419
|
+
if (config.prerelease) args.push(`--prerelease ${config.prerelease}`);
|
|
3420
|
+
return args.join(" ");
|
|
3421
|
+
}
|
|
3422
|
+
/** Read the current version from package.json. */
|
|
3423
|
+
function readVersion(executor, cwd) {
|
|
3424
|
+
const raw = executor.readFile(path.join(cwd, "package.json"));
|
|
3425
|
+
if (!raw) throw new FatalError("Could not read package.json");
|
|
3426
|
+
const pkg = parsePackageJson(raw);
|
|
3427
|
+
if (!pkg?.version) throw new FatalError("No version field found in package.json");
|
|
3428
|
+
return pkg.version;
|
|
3429
|
+
}
|
|
3430
|
+
/** Run the full commit-and-tag-version release flow. */
|
|
3431
|
+
async function runSimpleRelease(executor, config) {
|
|
3432
|
+
const command = buildCommand(config);
|
|
3433
|
+
p.log.info(`Running: ${command}`);
|
|
3434
|
+
const versionResult = executor.exec(command, { cwd: config.cwd });
|
|
3435
|
+
debugExec(config, "commit-and-tag-version", versionResult);
|
|
3436
|
+
if (versionResult.exitCode !== 0) throw new FatalError(`commit-and-tag-version failed (exit code ${String(versionResult.exitCode)}):\n${versionResult.stderr || versionResult.stdout}`);
|
|
3437
|
+
const version = readVersion(executor, config.cwd);
|
|
3438
|
+
debug(config, `New version: ${version}`);
|
|
3439
|
+
const tagResult = executor.exec("git describe --tags --abbrev=0", { cwd: config.cwd });
|
|
3440
|
+
debugExec(config, "git describe", tagResult);
|
|
3441
|
+
const tag = tagResult.stdout.trim();
|
|
3442
|
+
if (!tag) throw new FatalError("Could not determine the new tag from git describe");
|
|
3443
|
+
p.log.info(`Version ${version} tagged as ${tag}`);
|
|
3444
|
+
if (config.dryRun) {
|
|
3445
|
+
const slidingTags = config.noSlidingTags ? [] : computeSlidingTags(version);
|
|
3446
|
+
p.log.info(`[dry-run] Would push to origin with --follow-tags`);
|
|
3447
|
+
if (slidingTags.length > 0) p.log.info(`[dry-run] Would create sliding tags: ${slidingTags.join(", ")}`);
|
|
3448
|
+
if (!config.noRelease && config.platform) p.log.info(`[dry-run] Would create ${config.platform.type} release for ${tag}`);
|
|
3449
|
+
return {
|
|
3450
|
+
version,
|
|
3451
|
+
tag,
|
|
3452
|
+
slidingTags,
|
|
3453
|
+
pushed: false,
|
|
3454
|
+
releaseCreated: false
|
|
3455
|
+
};
|
|
3456
|
+
}
|
|
3457
|
+
let pushed = false;
|
|
3458
|
+
if (!config.noPush) {
|
|
3459
|
+
const branch = executor.exec("git rev-parse --abbrev-ref HEAD", { cwd: config.cwd }).stdout.trim() || "main";
|
|
3460
|
+
debug(config, `Pushing to origin/${branch}`);
|
|
3461
|
+
const pushResult = executor.exec(`git push --follow-tags origin ${branch}`, { cwd: config.cwd });
|
|
3462
|
+
debugExec(config, "git push", pushResult);
|
|
3463
|
+
if (pushResult.exitCode !== 0) throw new FatalError(`git push failed (exit code ${String(pushResult.exitCode)}):\n${pushResult.stderr || pushResult.stdout}`);
|
|
3464
|
+
pushed = true;
|
|
3465
|
+
p.log.info("Pushed to origin");
|
|
3466
|
+
}
|
|
3467
|
+
let slidingTags = [];
|
|
3468
|
+
if (!config.noSlidingTags && pushed) {
|
|
3469
|
+
slidingTags = computeSlidingTags(version);
|
|
3470
|
+
for (const slidingTag of slidingTags) executor.exec(`git tag -f ${slidingTag}`, { cwd: config.cwd });
|
|
3471
|
+
const forcePushResult = executor.exec(`git push origin ${slidingTags.join(" ")} --force`, { cwd: config.cwd });
|
|
3472
|
+
debugExec(config, "force-push sliding tags", forcePushResult);
|
|
3473
|
+
if (forcePushResult.exitCode !== 0) p.log.warn(`Warning: Failed to push sliding tags: ${forcePushResult.stderr || forcePushResult.stdout}`);
|
|
3474
|
+
else p.log.info(`Created sliding tags: ${slidingTags.join(", ")}`);
|
|
3475
|
+
}
|
|
3476
|
+
let releaseCreated = false;
|
|
3477
|
+
if (!config.noRelease && config.platform) releaseCreated = await createPlatformRelease(executor, config, tag);
|
|
3478
|
+
return {
|
|
3479
|
+
version,
|
|
3480
|
+
tag,
|
|
3481
|
+
slidingTags,
|
|
3482
|
+
pushed,
|
|
3483
|
+
releaseCreated
|
|
3484
|
+
};
|
|
3485
|
+
}
|
|
3486
|
+
async function createPlatformRelease(executor, config, tag) {
|
|
3487
|
+
if (!config.platform) return false;
|
|
3488
|
+
if (config.platform.type === "forgejo") {
|
|
3489
|
+
if (await findRelease(executor, config.platform.conn, tag)) {
|
|
3490
|
+
debug(config, `Release for ${tag} already exists, skipping`);
|
|
3491
|
+
return false;
|
|
3492
|
+
}
|
|
3493
|
+
await createRelease(executor, config.platform.conn, tag);
|
|
3494
|
+
p.log.info(`Created Forgejo release for ${tag}`);
|
|
3495
|
+
return true;
|
|
3496
|
+
}
|
|
3497
|
+
const ghResult = executor.exec(`gh release create ${tag} --generate-notes`, { cwd: config.cwd });
|
|
3498
|
+
debugExec(config, "gh release create", ghResult);
|
|
3499
|
+
if (ghResult.exitCode !== 0) {
|
|
3500
|
+
p.log.warn(`Warning: Failed to create GitHub release: ${ghResult.stderr || ghResult.stdout}`);
|
|
3501
|
+
return false;
|
|
3502
|
+
}
|
|
3503
|
+
p.log.info(`Created GitHub release for ${tag}`);
|
|
3504
|
+
return true;
|
|
3505
|
+
}
|
|
3506
|
+
//#endregion
|
|
3507
|
+
//#region src/commands/release-simple.ts
|
|
3508
|
+
const releaseSimpleCommand = defineCommand({
|
|
3509
|
+
meta: {
|
|
3510
|
+
name: "release:simple",
|
|
3511
|
+
description: "Run commit-and-tag-version, push, create sliding tags, and create a platform release"
|
|
3426
3512
|
},
|
|
3427
|
-
{
|
|
3428
|
-
|
|
3429
|
-
|
|
3513
|
+
args: {
|
|
3514
|
+
"dry-run": {
|
|
3515
|
+
type: "boolean",
|
|
3516
|
+
description: "Pass --dry-run to commit-and-tag-version and skip all remote operations"
|
|
3517
|
+
},
|
|
3518
|
+
verbose: {
|
|
3519
|
+
type: "boolean",
|
|
3520
|
+
description: "Enable detailed debug logging (also enabled by RELEASE_DEBUG env var)"
|
|
3521
|
+
},
|
|
3522
|
+
"no-push": {
|
|
3523
|
+
type: "boolean",
|
|
3524
|
+
description: "Run commit-and-tag-version but skip push and remote operations"
|
|
3525
|
+
},
|
|
3526
|
+
"no-sliding-tags": {
|
|
3527
|
+
type: "boolean",
|
|
3528
|
+
description: "Skip creating sliding major/minor version tags (vX, vX.Y)"
|
|
3529
|
+
},
|
|
3530
|
+
"no-release": {
|
|
3531
|
+
type: "boolean",
|
|
3532
|
+
description: "Skip Forgejo/GitHub release creation"
|
|
3533
|
+
},
|
|
3534
|
+
"first-release": {
|
|
3535
|
+
type: "boolean",
|
|
3536
|
+
description: "Pass --first-release to commit-and-tag-version (skip version bump)"
|
|
3537
|
+
},
|
|
3538
|
+
"release-as": {
|
|
3539
|
+
type: "string",
|
|
3540
|
+
description: "Force a specific version (passed to commit-and-tag-version --release-as)"
|
|
3541
|
+
},
|
|
3542
|
+
prerelease: {
|
|
3543
|
+
type: "string",
|
|
3544
|
+
description: "Create a prerelease with the given tag (e.g., beta, alpha)"
|
|
3545
|
+
}
|
|
3430
3546
|
},
|
|
3547
|
+
async run({ args }) {
|
|
3548
|
+
const cwd = process.cwd();
|
|
3549
|
+
const verbose = args.verbose === true || process.env["RELEASE_DEBUG"] === "true";
|
|
3550
|
+
const noRelease = args["no-release"] === true;
|
|
3551
|
+
let platform;
|
|
3552
|
+
if (!noRelease) {
|
|
3553
|
+
const resolved = resolveConnection(cwd);
|
|
3554
|
+
if (resolved.type === "forgejo") platform = {
|
|
3555
|
+
type: "forgejo",
|
|
3556
|
+
conn: resolved.conn
|
|
3557
|
+
};
|
|
3558
|
+
else platform = { type: "github" };
|
|
3559
|
+
}
|
|
3560
|
+
const config = {
|
|
3561
|
+
cwd,
|
|
3562
|
+
dryRun: args["dry-run"] === true,
|
|
3563
|
+
verbose,
|
|
3564
|
+
noPush: args["no-push"] === true,
|
|
3565
|
+
noSlidingTags: args["no-sliding-tags"] === true,
|
|
3566
|
+
noRelease,
|
|
3567
|
+
firstRelease: args["first-release"] === true,
|
|
3568
|
+
releaseAs: args["release-as"],
|
|
3569
|
+
prerelease: args.prerelease,
|
|
3570
|
+
platform
|
|
3571
|
+
};
|
|
3572
|
+
await runSimpleRelease(createRealExecutor(), config);
|
|
3573
|
+
}
|
|
3574
|
+
});
|
|
3575
|
+
//#endregion
|
|
3576
|
+
//#region src/commands/repo-run-checks.ts
|
|
3577
|
+
const CHECKS = [
|
|
3578
|
+
{ name: "build" },
|
|
3579
|
+
{ name: "typecheck" },
|
|
3580
|
+
{ name: "lint" },
|
|
3581
|
+
{ name: "test" },
|
|
3431
3582
|
{
|
|
3432
3583
|
name: "format",
|
|
3433
|
-
|
|
3584
|
+
args: "--check"
|
|
3434
3585
|
},
|
|
3435
|
-
{
|
|
3436
|
-
|
|
3437
|
-
|
|
3438
|
-
},
|
|
3439
|
-
{
|
|
3440
|
-
name: "tooling:check",
|
|
3441
|
-
cmd: "pnpm run --if-present tooling:check"
|
|
3442
|
-
},
|
|
3443
|
-
{
|
|
3444
|
-
name: "image:check",
|
|
3445
|
-
cmd: "pnpm run --if-present image:check"
|
|
3446
|
-
}
|
|
3586
|
+
{ name: "knip" },
|
|
3587
|
+
{ name: "tooling:check" },
|
|
3588
|
+
{ name: "image:check" }
|
|
3447
3589
|
];
|
|
3590
|
+
function defaultGetScripts(targetDir) {
|
|
3591
|
+
try {
|
|
3592
|
+
const pkg = parsePackageJson(readFileSync(path.join(targetDir, "package.json"), "utf-8"));
|
|
3593
|
+
return new Set(Object.keys(pkg?.scripts ?? {}));
|
|
3594
|
+
} catch {
|
|
3595
|
+
return /* @__PURE__ */ new Set();
|
|
3596
|
+
}
|
|
3597
|
+
}
|
|
3448
3598
|
function defaultExecCommand(cmd, cwd) {
|
|
3449
3599
|
try {
|
|
3450
3600
|
execSync(cmd, {
|
|
@@ -3460,29 +3610,38 @@ function defaultExecCommand(cmd, cwd) {
|
|
|
3460
3610
|
const ciLog = (msg) => console.log(msg);
|
|
3461
3611
|
function runRunChecks(targetDir, options = {}) {
|
|
3462
3612
|
const exec = options.execCommand ?? defaultExecCommand;
|
|
3613
|
+
const getScripts = options.getScripts ?? defaultGetScripts;
|
|
3463
3614
|
const skip = options.skip ?? /* @__PURE__ */ new Set();
|
|
3464
3615
|
const add = options.add ?? [];
|
|
3465
3616
|
const isCI = Boolean(process.env["CI"]);
|
|
3466
|
-
const
|
|
3467
|
-
|
|
3468
|
-
|
|
3469
|
-
}))];
|
|
3617
|
+
const failFast = options.failFast ?? !isCI;
|
|
3618
|
+
const definedScripts = getScripts(targetDir);
|
|
3619
|
+
const addedNames = new Set(add);
|
|
3620
|
+
const allChecks = [...CHECKS, ...add.map((name) => ({ name }))];
|
|
3470
3621
|
const failures = [];
|
|
3622
|
+
const notDefined = [];
|
|
3471
3623
|
for (const check of allChecks) {
|
|
3472
|
-
if (skip.has(check.name))
|
|
3473
|
-
|
|
3624
|
+
if (skip.has(check.name)) continue;
|
|
3625
|
+
if (!definedScripts.has(check.name)) {
|
|
3626
|
+
if (addedNames.has(check.name)) {
|
|
3627
|
+
p.log.error(`${check.name} not defined in package.json`);
|
|
3628
|
+
failures.push(check.name);
|
|
3629
|
+
} else notDefined.push(check.name);
|
|
3474
3630
|
continue;
|
|
3475
3631
|
}
|
|
3632
|
+
const cmd = check.args ? `pnpm run ${check.name} ${check.args}` : `pnpm run ${check.name}`;
|
|
3476
3633
|
if (isCI) ciLog(`::group::${check.name}`);
|
|
3477
|
-
const exitCode = exec(
|
|
3634
|
+
const exitCode = exec(cmd, targetDir);
|
|
3478
3635
|
if (isCI) ciLog("::endgroup::");
|
|
3479
3636
|
if (exitCode === 0) p.log.success(check.name);
|
|
3480
3637
|
else {
|
|
3481
3638
|
if (isCI) ciLog(`::error::${check.name} failed`);
|
|
3482
3639
|
p.log.error(`${check.name} failed`);
|
|
3483
3640
|
failures.push(check.name);
|
|
3641
|
+
if (failFast) return 1;
|
|
3484
3642
|
}
|
|
3485
3643
|
}
|
|
3644
|
+
if (notDefined.length > 0) p.log.info(`Skipped (not defined): ${notDefined.join(", ")}`);
|
|
3486
3645
|
if (failures.length > 0) {
|
|
3487
3646
|
p.log.error(`Failed checks: ${failures.join(", ")}`);
|
|
3488
3647
|
return 1;
|
|
@@ -3492,7 +3651,7 @@ function runRunChecks(targetDir, options = {}) {
|
|
|
3492
3651
|
}
|
|
3493
3652
|
const runChecksCommand = defineCommand({
|
|
3494
3653
|
meta: {
|
|
3495
|
-
name: "
|
|
3654
|
+
name: "checks:run",
|
|
3496
3655
|
description: "Run all standard checks (build, typecheck, lint, test, format, knip, tooling:check, image:check)"
|
|
3497
3656
|
},
|
|
3498
3657
|
args: {
|
|
@@ -3508,14 +3667,20 @@ const runChecksCommand = defineCommand({
|
|
|
3508
3667
|
},
|
|
3509
3668
|
add: {
|
|
3510
3669
|
type: "string",
|
|
3511
|
-
description: "Comma-separated list of additional check names to run (uses pnpm run
|
|
3670
|
+
description: "Comma-separated list of additional check names to run (uses pnpm run <name>)",
|
|
3671
|
+
required: false
|
|
3672
|
+
},
|
|
3673
|
+
"fail-fast": {
|
|
3674
|
+
type: "boolean",
|
|
3675
|
+
description: "Stop on first failure (default: true in dev, false in CI)",
|
|
3512
3676
|
required: false
|
|
3513
3677
|
}
|
|
3514
3678
|
},
|
|
3515
3679
|
run({ args }) {
|
|
3516
3680
|
const exitCode = runRunChecks(path.resolve(args.dir ?? "."), {
|
|
3517
3681
|
skip: args.skip ? new Set(args.skip.split(",").map((s) => s.trim())) : void 0,
|
|
3518
|
-
add: args.add ? args.add.split(",").map((s) => s.trim()) : void 0
|
|
3682
|
+
add: args.add ? args.add.split(",").map((s) => s.trim()) : void 0,
|
|
3683
|
+
failFast: args["fail-fast"] === true ? true : args["fail-fast"] === false ? false : void 0
|
|
3519
3684
|
});
|
|
3520
3685
|
process.exitCode = exitCode;
|
|
3521
3686
|
}
|
|
@@ -3525,21 +3690,22 @@ const runChecksCommand = defineCommand({
|
|
|
3525
3690
|
const main = defineCommand({
|
|
3526
3691
|
meta: {
|
|
3527
3692
|
name: "tooling",
|
|
3528
|
-
version: "0.
|
|
3693
|
+
version: "0.14.0",
|
|
3529
3694
|
description: "Bootstrap and maintain standardized TypeScript project tooling"
|
|
3530
3695
|
},
|
|
3531
3696
|
subCommands: {
|
|
3532
3697
|
"repo:init": initCommand,
|
|
3533
3698
|
"repo:update": updateCommand,
|
|
3534
3699
|
"repo:check": checkCommand,
|
|
3535
|
-
"
|
|
3700
|
+
"checks:run": runChecksCommand,
|
|
3536
3701
|
"release:changesets": releaseForgejoCommand,
|
|
3537
3702
|
"release:trigger": releaseTriggerCommand,
|
|
3538
|
-
"
|
|
3539
|
-
"
|
|
3703
|
+
"forgejo:create-release": createForgejoReleaseCommand,
|
|
3704
|
+
"changesets:merge": releaseMergeCommand,
|
|
3705
|
+
"release:simple": releaseSimpleCommand
|
|
3540
3706
|
}
|
|
3541
3707
|
});
|
|
3542
|
-
console.log(`@bensandee/tooling v0.
|
|
3708
|
+
console.log(`@bensandee/tooling v0.14.0`);
|
|
3543
3709
|
runMain(main);
|
|
3544
3710
|
//#endregion
|
|
3545
3711
|
export {};
|
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
import { t as isExecSyncError } from "../exec-CC49vrkM.mjs";
|
|
2
|
+
import { execSync } from "node:child_process";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
//#region src/docker-verify/executor.ts
|
|
5
|
+
/** Create an executor that runs real commands, fetches, and manages process signals. */
|
|
6
|
+
function createRealExecutor() {
|
|
7
|
+
return {
|
|
8
|
+
exec(command, options) {
|
|
9
|
+
try {
|
|
10
|
+
return {
|
|
11
|
+
stdout: execSync(command, {
|
|
12
|
+
cwd: options?.cwd,
|
|
13
|
+
env: options?.env ? {
|
|
14
|
+
...process.env,
|
|
15
|
+
...options.env
|
|
16
|
+
} : void 0,
|
|
17
|
+
encoding: "utf-8",
|
|
18
|
+
stdio: [
|
|
19
|
+
"pipe",
|
|
20
|
+
"pipe",
|
|
21
|
+
"pipe"
|
|
22
|
+
]
|
|
23
|
+
}),
|
|
24
|
+
stderr: "",
|
|
25
|
+
exitCode: 0
|
|
26
|
+
};
|
|
27
|
+
} catch (err) {
|
|
28
|
+
if (isExecSyncError(err)) return {
|
|
29
|
+
stdout: err.stdout,
|
|
30
|
+
stderr: err.stderr,
|
|
31
|
+
exitCode: err.status
|
|
32
|
+
};
|
|
33
|
+
return {
|
|
34
|
+
stdout: "",
|
|
35
|
+
stderr: "",
|
|
36
|
+
exitCode: 1
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
execInherit(command, options) {
|
|
41
|
+
execSync(command, {
|
|
42
|
+
cwd: options?.cwd,
|
|
43
|
+
env: options?.env ? {
|
|
44
|
+
...process.env,
|
|
45
|
+
...options.env
|
|
46
|
+
} : void 0,
|
|
47
|
+
stdio: "inherit"
|
|
48
|
+
});
|
|
49
|
+
},
|
|
50
|
+
fetch: globalThis.fetch,
|
|
51
|
+
now: () => Date.now(),
|
|
52
|
+
sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
|
|
53
|
+
onSignal(signal, handler) {
|
|
54
|
+
process.on(signal, handler);
|
|
55
|
+
return () => {
|
|
56
|
+
process.removeListener(signal, handler);
|
|
57
|
+
};
|
|
58
|
+
},
|
|
59
|
+
log: (msg) => console.log(msg),
|
|
60
|
+
logError: (msg) => console.error(msg)
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
//#endregion
|
|
64
|
+
//#region src/docker-verify/compose.ts
|
|
65
|
+
/** Zod schema for a single container entry from `docker compose ps --format json`. */
|
|
66
|
+
const ContainerInfoSchema = z.object({
|
|
67
|
+
Service: z.string(),
|
|
68
|
+
Health: z.string()
|
|
69
|
+
});
|
|
70
|
+
/** Build the `docker compose` base command string from config. */
|
|
71
|
+
function composeCommand(config) {
|
|
72
|
+
return `docker compose ${config.composeFiles.map((f) => `-f ${f}`).join(" ")}${config.envFile ? ` --env-file ${config.envFile}` : ""}`;
|
|
73
|
+
}
|
|
74
|
+
/** Run the build command if configured. */
|
|
75
|
+
function buildImages(executor, config) {
|
|
76
|
+
if (!config.buildCommand) return;
|
|
77
|
+
executor.execInherit(config.buildCommand, { cwd: config.buildCwd ?? config.compose.cwd });
|
|
78
|
+
}
|
|
79
|
+
/** Start the compose stack in detached mode. */
|
|
80
|
+
function composeUp(executor, config) {
|
|
81
|
+
executor.execInherit(`${composeCommand(config)} up -d`, { cwd: config.cwd });
|
|
82
|
+
}
|
|
83
|
+
/** Tear down the compose stack, removing volumes and orphans. Swallows errors. */
|
|
84
|
+
function composeDown(executor, config) {
|
|
85
|
+
try {
|
|
86
|
+
executor.execInherit(`${composeCommand(config)} down -v --remove-orphans`, { cwd: config.cwd });
|
|
87
|
+
} catch (_error) {}
|
|
88
|
+
}
|
|
89
|
+
/** Show logs for a specific service (or all services if not specified). Swallows errors. */
|
|
90
|
+
function composeLogs(executor, config, service) {
|
|
91
|
+
try {
|
|
92
|
+
const suffix = service ? ` ${service}` : "";
|
|
93
|
+
executor.execInherit(`${composeCommand(config)} logs${suffix}`, { cwd: config.cwd });
|
|
94
|
+
} catch (_error) {}
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Query container status via `docker compose ps --format json`.
|
|
98
|
+
* Handles both JSON array and newline-delimited JSON (varies by docker compose version).
|
|
99
|
+
*/
|
|
100
|
+
function composePs(executor, config) {
|
|
101
|
+
const output = executor.exec(`${composeCommand(config)} ps --format json`, { cwd: config.cwd }).stdout.trim();
|
|
102
|
+
if (!output) return [];
|
|
103
|
+
const ArraySchema = z.array(ContainerInfoSchema);
|
|
104
|
+
try {
|
|
105
|
+
const direct = ArraySchema.safeParse(JSON.parse(output));
|
|
106
|
+
if (direct.success) return direct.data;
|
|
107
|
+
const single = ContainerInfoSchema.safeParse(JSON.parse(output));
|
|
108
|
+
if (single.success) return [single.data];
|
|
109
|
+
} catch (_error) {}
|
|
110
|
+
try {
|
|
111
|
+
const joined = `[${output.split("\n").join(",")}]`;
|
|
112
|
+
const delimited = ArraySchema.safeParse(JSON.parse(joined));
|
|
113
|
+
return delimited.success ? delimited.data : [];
|
|
114
|
+
} catch (_error) {
|
|
115
|
+
return [];
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
//#endregion
|
|
119
|
+
//#region src/docker-verify/health.ts
|
|
120
|
+
/** Look up the health status of a specific service from container info. */
|
|
121
|
+
function getContainerHealth(containers, serviceName) {
|
|
122
|
+
return containers.find((c) => c.Service === serviceName)?.Health ?? "unknown";
|
|
123
|
+
}
|
|
124
|
+
/** Run a single HTTP health check, returning true if the validator passes. */
|
|
125
|
+
async function checkHttpHealth(executor, check) {
|
|
126
|
+
try {
|
|
127
|
+
const response = await executor.fetch(check.url);
|
|
128
|
+
return await check.validate(response);
|
|
129
|
+
} catch (_error) {
|
|
130
|
+
return false;
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
//#endregion
|
|
134
|
+
//#region src/docker-verify/verify.ts
|
|
135
|
+
const DEFAULT_TIMEOUT_MS = 12e4;
|
|
136
|
+
const DEFAULT_POLL_INTERVAL_MS = 5e3;
|
|
137
|
+
/** Run the full Docker image verification lifecycle. */
|
|
138
|
+
async function runVerification(executor, config) {
|
|
139
|
+
const timeoutMs = config.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
|
140
|
+
const pollIntervalMs = config.pollIntervalMs ?? DEFAULT_POLL_INTERVAL_MS;
|
|
141
|
+
const { compose } = config;
|
|
142
|
+
const cleanup = () => composeDown(executor, compose);
|
|
143
|
+
const disposeInt = executor.onSignal("SIGINT", () => {
|
|
144
|
+
cleanup();
|
|
145
|
+
process.exit(1);
|
|
146
|
+
});
|
|
147
|
+
const disposeTerm = executor.onSignal("SIGTERM", () => {
|
|
148
|
+
cleanup();
|
|
149
|
+
process.exit(1);
|
|
150
|
+
});
|
|
151
|
+
try {
|
|
152
|
+
if (config.buildCommand) {
|
|
153
|
+
executor.log("Building images...");
|
|
154
|
+
buildImages(executor, config);
|
|
155
|
+
}
|
|
156
|
+
executor.log("Starting compose stack...");
|
|
157
|
+
composeUp(executor, compose);
|
|
158
|
+
executor.log(`Waiting for stack to be healthy (max ${timeoutMs / 1e3}s)...`);
|
|
159
|
+
const startTime = executor.now();
|
|
160
|
+
const healthStatus = new Map(config.healthChecks.map((c) => [c.name, false]));
|
|
161
|
+
while (executor.now() - startTime < timeoutMs) {
|
|
162
|
+
const containers = composePs(executor, compose);
|
|
163
|
+
for (const service of compose.services) if (getContainerHealth(containers, service) === "unhealthy") {
|
|
164
|
+
executor.logError(`Container ${service} is unhealthy`);
|
|
165
|
+
composeLogs(executor, compose, service);
|
|
166
|
+
cleanup();
|
|
167
|
+
return {
|
|
168
|
+
success: false,
|
|
169
|
+
reason: "unhealthy-container",
|
|
170
|
+
message: service,
|
|
171
|
+
elapsedMs: executor.now() - startTime
|
|
172
|
+
};
|
|
173
|
+
}
|
|
174
|
+
for (const check of config.healthChecks) if (!healthStatus.get(check.name)) {
|
|
175
|
+
if (await checkHttpHealth(executor, check)) {
|
|
176
|
+
healthStatus.set(check.name, true);
|
|
177
|
+
executor.log(`${check.name} is healthy!`);
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
if ([...healthStatus.values()].every(Boolean)) {
|
|
181
|
+
executor.log("Verification successful! All systems operational.");
|
|
182
|
+
cleanup();
|
|
183
|
+
return {
|
|
184
|
+
success: true,
|
|
185
|
+
elapsedMs: executor.now() - startTime
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
const elapsed = Math.floor((executor.now() - startTime) / 1e3);
|
|
189
|
+
if (elapsed > 0 && elapsed % 5 === 0) {
|
|
190
|
+
const statuses = [...healthStatus.entries()].map(([name, ok]) => `${name}=${ok ? "OK" : "Pending"}`).join(", ");
|
|
191
|
+
executor.log(`Waiting... (${elapsed}s elapsed). ${statuses}`);
|
|
192
|
+
}
|
|
193
|
+
await executor.sleep(pollIntervalMs);
|
|
194
|
+
}
|
|
195
|
+
executor.logError("Timeout waiting for stack to become healthy");
|
|
196
|
+
for (const service of compose.services) composeLogs(executor, compose, service);
|
|
197
|
+
cleanup();
|
|
198
|
+
return {
|
|
199
|
+
success: false,
|
|
200
|
+
reason: "timeout",
|
|
201
|
+
message: "Exceeded timeout",
|
|
202
|
+
elapsedMs: executor.now() - startTime
|
|
203
|
+
};
|
|
204
|
+
} catch (error) {
|
|
205
|
+
cleanup();
|
|
206
|
+
return {
|
|
207
|
+
success: false,
|
|
208
|
+
reason: "error",
|
|
209
|
+
message: error instanceof Error ? error.message : String(error),
|
|
210
|
+
elapsedMs: 0
|
|
211
|
+
};
|
|
212
|
+
} finally {
|
|
213
|
+
disposeInt();
|
|
214
|
+
disposeTerm();
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
//#endregion
|
|
218
|
+
export { checkHttpHealth, composeCommand, composeDown, composeLogs, composePs, composeUp, createRealExecutor, getContainerHealth, runVerification };
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
//#region src/utils/exec.ts
|
|
2
|
+
/** Type guard for `execSync` errors that carry stdout/stderr/status. */
|
|
3
|
+
function isExecSyncError(err) {
|
|
4
|
+
return err instanceof Error && "stdout" in err && typeof err.stdout === "string" && "stderr" in err && typeof err.stderr === "string" && "status" in err && typeof err.status === "number";
|
|
5
|
+
}
|
|
6
|
+
//#endregion
|
|
7
|
+
export { isExecSyncError as t };
|
package/package.json
CHANGED
|
@@ -1,20 +1,22 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@bensandee/tooling",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.14.0",
|
|
4
4
|
"description": "CLI tool to bootstrap and maintain standardized TypeScript project tooling",
|
|
5
5
|
"bin": {
|
|
6
6
|
"tooling": "./dist/bin.mjs"
|
|
7
7
|
},
|
|
8
8
|
"files": [
|
|
9
|
-
"dist"
|
|
9
|
+
"dist",
|
|
10
|
+
"CHANGELOG.md"
|
|
10
11
|
],
|
|
11
12
|
"type": "module",
|
|
12
13
|
"imports": {
|
|
13
|
-
"#src
|
|
14
|
+
"#src/*.ts": "./src/*.ts"
|
|
14
15
|
},
|
|
15
16
|
"exports": {
|
|
16
17
|
".": "./dist/index.mjs",
|
|
17
18
|
"./bin": "./dist/bin.mjs",
|
|
19
|
+
"./docker-verify": "./dist/docker-verify/index.mjs",
|
|
18
20
|
"./package.json": "./package.json"
|
|
19
21
|
},
|
|
20
22
|
"publishConfig": {
|
|
@@ -27,14 +29,18 @@
|
|
|
27
29
|
"jsonc-parser": "^3.3.1",
|
|
28
30
|
"yaml": "^2.8.2",
|
|
29
31
|
"zod": "^4.3.6",
|
|
30
|
-
"@bensandee/common": "0.1.
|
|
32
|
+
"@bensandee/common": "0.1.1"
|
|
31
33
|
},
|
|
32
34
|
"devDependencies": {
|
|
33
35
|
"@types/node": "24.12.0",
|
|
34
36
|
"tsdown": "0.21.0",
|
|
35
37
|
"typescript": "5.9.3",
|
|
36
38
|
"vitest": "4.0.18",
|
|
37
|
-
"@bensandee/config": "0.
|
|
39
|
+
"@bensandee/config": "0.8.0"
|
|
40
|
+
},
|
|
41
|
+
"optionalDependencies": {
|
|
42
|
+
"@changesets/cli": "^2.29.4",
|
|
43
|
+
"commit-and-tag-version": "^12.5.0"
|
|
38
44
|
},
|
|
39
45
|
"scripts": {
|
|
40
46
|
"build": "tsdown",
|