bulk-release 2.19.1 → 2.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/README.md +72 -77
  3. package/package.json +3 -3
  4. package/src/main/js/config.js +12 -3
  5. package/src/main/js/index.js +0 -6
  6. package/src/main/js/processor/api/gh.js +111 -0
  7. package/src/main/js/{api → processor/api}/git.js +17 -26
  8. package/src/main/js/{api → processor/api}/npm.js +70 -28
  9. package/src/main/js/processor/deps.js +1 -1
  10. package/src/main/js/processor/exec.js +4 -4
  11. package/src/main/js/processor/generators/meta.js +80 -0
  12. package/src/main/js/processor/generators/notes.js +37 -0
  13. package/src/main/js/processor/{meta.js → generators/tag.js} +3 -109
  14. package/src/main/js/processor/log.js +86 -0
  15. package/src/main/js/processor/publishers/changelog.js +26 -0
  16. package/src/main/js/processor/publishers/cmd.js +6 -0
  17. package/src/main/js/processor/publishers/gh-pages.js +32 -0
  18. package/src/main/js/processor/publishers/gh-release.js +41 -0
  19. package/src/main/js/processor/publishers/meta.js +58 -0
  20. package/src/main/js/processor/publishers/npm.js +15 -0
  21. package/src/main/js/processor/release.js +71 -66
  22. package/src/main/js/{steps → processor/steps}/analyze.js +18 -24
  23. package/src/main/js/processor/steps/build.js +20 -0
  24. package/src/main/js/processor/steps/clean.js +7 -0
  25. package/src/main/js/processor/steps/contextify.js +49 -0
  26. package/src/main/js/processor/steps/publish.js +39 -0
  27. package/src/main/js/processor/steps/teardown.js +58 -0
  28. package/src/main/js/processor/steps/test.js +10 -0
  29. package/src/main/js/util.js +32 -77
  30. package/src/main/js/api/changelog.js +0 -42
  31. package/src/main/js/api/gh.js +0 -131
  32. package/src/main/js/log.js +0 -63
  33. package/src/main/js/steps/build.js +0 -23
  34. package/src/main/js/steps/clean.js +0 -7
  35. package/src/main/js/steps/contextify.js +0 -154
  36. package/src/main/js/steps/publish.js +0 -47
  37. package/src/main/js/steps/test.js +0 -16
package/CHANGELOG.md CHANGED
@@ -1,3 +1,28 @@
1
+ ## [2.21.0](https://github.com/semrel-extra/zx-bulk-release/compare/v2.20.0...v2.21.0) (2026-04-10)
2
+
3
+ ### Fixes & improvements
4
+ * docs: update badges ([c9fc1af](https://github.com/semrel-extra/zx-bulk-release/commit/c9fc1af8cfb3d177dc328d1934cb7e663c36f0ca))
5
+ * fix: fix memoize store ([7fddefa](https://github.com/semrel-extra/zx-bulk-release/commit/7fddefa590ded43907d059d3b58e235adff6fa17))
6
+ * refactor: enhance logger ([89d9cff](https://github.com/semrel-extra/zx-bulk-release/commit/89d9cff7727e2190e6490d51f759e7039a25cdb2))
7
+ * docs: mention testing along the change graph flow ([44fd065](https://github.com/semrel-extra/zx-bulk-release/commit/44fd06568d552ae19e753c91f6536978a9c1d28e))
8
+ * docs: add cmd tpl usage example ([d228de0](https://github.com/semrel-extra/zx-bulk-release/commit/d228de09e456799c9e113b36b971187ae7c3c1be))
9
+ * refactor: rearrange utils ([8beeab6](https://github.com/semrel-extra/zx-bulk-release/commit/8beeab6906e4500429cc165f05b39fc5d57e5a09))
10
+ * refactor: optimize npm ver assert (oidc) ([e3b5939](https://github.com/semrel-extra/zx-bulk-release/commit/e3b5939cfb26ab2c0d84bc4a09ebf5971eb2a33a))
11
+ * refactor: bind zx pkg mdc with logger ([fd763a4](https://github.com/semrel-extra/zx-bulk-release/commit/fd763a42a2229ccbc902936fbbfa4a38652685e5))
12
+ * refactor: align context injection flow ([6edb11b](https://github.com/semrel-extra/zx-bulk-release/commit/6edb11bfa669ae9519a134383f5299d77bc4f1a6))
13
+ * refactor: rearrange processor layers ([ea7ef42](https://github.com/semrel-extra/zx-bulk-release/commit/ea7ef42e5632691dfee8705a4f4ef73e0c166979))
14
+ * refactor: align internal publishers contract ([c16b6d8](https://github.com/semrel-extra/zx-bulk-release/commit/c16b6d8d11ade2d5d159936a457c43c0a2989141))
15
+ * refactor: ghFetch helper ([fc3517e](https://github.com/semrel-extra/zx-bulk-release/commit/fc3517e24e12cc28a63d1088d93cecb1114ce5b5))
16
+ * refactor: introduce `attempt` helper ([57f0d06](https://github.com/semrel-extra/zx-bulk-release/commit/57f0d06a093136e57600f2362ab46e16165c9b59))
17
+
18
+ ### Features
19
+ * feat: add secrets masker ([fffea8b](https://github.com/semrel-extra/zx-bulk-release/commit/fffea8bfbebd6155e40f318a9303551ec4ae6e52))
20
+
21
+ ## [2.20.0](https://github.com/semrel-extra/zx-bulk-release/compare/v2.19.1...v2.20.0) (2026-04-05)
22
+
23
+ ### Features
24
+ * feat: support custom ghe urls (#89) ([e33d53c](https://github.com/semrel-extra/zx-bulk-release/commit/e33d53cfe4ac15d8fd66785622d9df428a15d8e6))
25
+
1
26
  ## [2.19.1](https://github.com/semrel-extra/zx-bulk-release/compare/v2.19.0...v2.19.1) (2026-04-05)
2
27
 
3
28
  ### Fixes & improvements
package/README.md CHANGED
@@ -1,9 +1,9 @@
1
1
  # zx-bulk-release
2
2
  > [zx](https://github.com/google/zx)-based alternative for [multi-semantic-release](https://github.com/dhoulb/multi-semantic-release)
3
3
 
4
- [![CI](https://github.com/semrel-extra/zx-bulk-release/workflows/CI/badge.svg)](https://github.com/semrel-extra/zx-bulk-release/actions)
5
- [![Maintainability](https://api.codeclimate.com/v1/badges/bb94e929b1b6430781b5/maintainability)](https://codeclimate.com/github/semrel-extra/zx-bulk-release/maintainability)
6
- [![Test Coverage](https://api.codeclimate.com/v1/badges/bb94e929b1b6430781b5/test_coverage)](https://codeclimate.com/github/semrel-extra/zx-bulk-release/test_coverage)
4
+ [![CI](https://github.com/semrel-extra/zx-bulk-release/workflows/CI/badge.svg?branch=master)](https://github.com/semrel-extra/zx-bulk-release/actions)
5
+ [![Maintainability](https://qlty.sh/badges/semrel-extra/zx-bulk-release/maintainability.svg)](https://qlty.sh/gh/semrel-extra/zx-bulk-release)
6
+ [![Test Coverage](https://qlty.sh/badges/semrel-extra/zx-bulk-release/test_coverage.svg)](https://qlty.sh/gh/semrel-extra/zx-bulk-release)
7
7
  [![npm (tag)](https://img.shields.io/npm/v/zx-bulk-release)](https://www.npmjs.com/package/zx-bulk-release)
8
8
 
9
9
  ## Key features
@@ -74,21 +74,55 @@ await run({
74
74
  Any [cosmiconfig](https://github.com/davidtheclark/cosmiconfig) compliant format: `.releaserc`, `.release.json`, `.release.yaml`, etc in the package root or in the repo root dir.
75
75
  ```json
76
76
  {
77
- "cmd": "yarn && yarn build && yarn test",
77
+ "buildCmd": "yarn && yarn build",
78
+ "testCmd": "yarn test",
78
79
  "npmFetch": true,
79
80
  "changelog": "changelog",
80
- "ghPages": "gh-pages"
81
+ "ghPages": "gh-pages",
82
+ "diffTagUrl": "${repoPublicUrl}/compare/${prevTag}...${newTag}",
83
+ "diffCommitUrl": "${repoPublicUrl}/commit/${hash}"
81
84
  }
82
85
  ```
83
86
 
87
+ #### Command templating
88
+ `buildCmd`, `testCmd` and `publishCmd` support `${{ variable }}` interpolation. The template context includes all `pkg` fields and the release context (`flags`, `git`, `env`, etc):
89
+ ```json
90
+ {
91
+ "buildCmd": "yarn build --pkg=${{name}} --ver=${{version}}",
92
+ "testCmd": "yarn test --scope=${{name}}",
93
+ "publishCmd": "echo releasing ${{name}}@${{version}}"
94
+ }
95
+ ```
96
+ Available variables include: `name`, `version`, `absPath`, `relPath`, and anything from `pkg.ctx` (e.g. `git.sha`, `git.root`, `flags.*`).
97
+
98
+ #### Changelog diff URLs
99
+ By default, changelog entries link to GitHub compare/commit pages. Override `diffTagUrl` and `diffCommitUrl` to customize for other platforms (e.g. Gerrit):
100
+ ```json
101
+ {
102
+ "diffTagUrl": "https://gerrit.foo.com/plugins/gitiles/${repoName}/+/refs/tags/${newTag}",
103
+ "diffCommitUrl": "https://gerrit.foo.com/plugins/gitiles/${repoName}/+/${hash}%5E%21"
104
+ }
105
+ ```
106
+ Available variables: `repoName`, `repoPublicUrl`, `prevTag`, `newTag`, `name`, `version`, `hash`, `short`.
107
+
108
+ #### GitHub Enterprise
109
+ Set `ghUrl` to point to your GHE instance. API URL (`ghApiUrl`) is derived automatically.
110
+ ```json
111
+ {
112
+ "ghUrl": "https://ghe.corp.com"
113
+ }
114
+ ```
115
+ Or via env: `GH_URL=https://ghe.corp.com` / `GITHUB_URL=https://ghe.corp.com`.
116
+
84
117
  ### env vars
85
118
  ```js
86
119
  export const parseEnv = (env = process.env) => {
87
- const {GH_USER, GH_USERNAME, GITHUB_USER, GITHUB_USERNAME, GH_TOKEN, GITHUB_TOKEN, NPM_TOKEN, NPM_REGISTRY, NPMRC, NPM_USERCONFIG, NPM_CONFIG_USERCONFIG, NPM_PROVENANCE, NPM_OIDC, ACTIONS_ID_TOKEN_REQUEST_URL, GIT_COMMITTER_NAME, GIT_COMMITTER_EMAIL} = env
120
+ const {GH_USER, GH_USERNAME, GITHUB_USER, GITHUB_USERNAME, GH_TOKEN, GITHUB_TOKEN, GH_URL, GITHUB_URL, NPM_TOKEN, NPM_REGISTRY, NPMRC, NPM_USERCONFIG, NPM_CONFIG_USERCONFIG, NPM_PROVENANCE, NPM_OIDC, ACTIONS_ID_TOKEN_REQUEST_URL, GIT_COMMITTER_NAME, GIT_COMMITTER_EMAIL} = env
88
121
 
89
122
  return {
90
- ghUser: GH_USER || GH_USERNAME || GITHUB_USER || GITHUB_USERNAME,
123
+ ghUser: GH_USER || GH_USERNAME || GITHUB_USER || GITHUB_USERNAME || 'x-access-token',
91
124
  ghToken: GH_TOKEN || GITHUB_TOKEN,
125
+ ghUrl: GH_URL || GITHUB_URL || 'https://github.com',
92
126
  npmToken: NPM_TOKEN,
93
127
  // npmConfig suppresses npmToken
94
128
  npmConfig: NPMRC || NPM_USERCONFIG || NPM_CONFIG_USERCONFIG,
@@ -113,62 +147,39 @@ OIDC mode is also auto-detected when `NPM_TOKEN` is not set and `ACTIONS_ID_TOKE
113
147
 
114
148
  When OIDC is active, `NPM_TOKEN` and `NPMRC` are ignored for publishing and `--provenance` is enabled automatically.
115
149
 
150
+ ### Selective testing along the change graph
151
+ In a monorepo, `--dry-run` combined with `--no-build` lets you run tests only for packages affected by the current changes — following the dependency graph, without publishing anything. This gives you a precise CI check scoped to what actually changed:
152
+ ```shell
153
+ npx zx-bulk-release --dry-run --no-build
154
+ ```
155
+ See [antongolub/misc](https://github.com/antongolub/misc) for a real-world example of this pattern.
156
+
116
157
  ## Demo
117
158
  * [demo-zx-bulk-release](https://github.com/semrel-extra/demo-zx-bulk-release)
118
159
  * [qiwi/pijma](https://github.com/qiwi/pijma)
160
+ * [antongolub/misc](https://github.com/antongolub/misc)
119
161
 
120
162
  ## Implementation notes
121
163
  ### Flow
122
- ```js
123
- try {
124
- const {packages, queue, root} = await topo({cwd, flags})
125
- console.log('queue:', queue)
126
-
127
- for (let name of queue) {
128
- const pkg = packages[name]
129
-
130
- await analyze(pkg, packages, root)
131
-
132
- if (pkg.changes.length === 0) continue
133
-
134
- await build(pkg, packages)
135
-
136
- if (flags.dryRun) continue
137
-
138
- await publish(pkg)
139
- }
140
- } catch (e) {
141
- console.error(e)
142
- throw e
143
- }
144
164
  ```
165
+ topo ─► contextify ─► analyze ──► build ──► test ──► publish ─► clean
166
+ (per pkg) (per pkg) (per pkg) (per pkg) (per pkg)
167
+ ```
168
+ [`@semrel-extra/topo`](https://github.com/semrel-extra/topo) resolves the release queue respecting dependency graphs. The graph allows parallel execution where the dependency tree permits; `memoizeBy` prevents duplicate work when a package is reached by multiple paths.
145
169
 
146
- ### `topo`
147
- [Toposort](https://github.com/semrel-extra/topo) is used to resolve the pkg release queue.
148
- By default, it omits the packages marked as `private`. You can override this by setting the `--include-private` flag.
149
-
150
- ### `analyze`
151
- Determines pkg changes, release type, next version etc.
152
-
153
- ```js
154
- export const analyze = async (pkg, packages, root) => {
155
- pkg.config = await getPkgConfig(pkg.absPath, root.absPath)
156
- pkg.latest = await getLatest(pkg)
157
-
158
- const semanticChanges = await getSemanticChanges(pkg.absPath, pkg.latest.tag?.ref)
159
- const depsChanges = await updateDeps(pkg, packages)
160
- const changes = [...semanticChanges, ...depsChanges]
170
+ By default, packages marked as `private` are omitted. Override with `--include-private`.
161
171
 
162
- pkg.changes = changes
163
- pkg.version = resolvePkgVersion(changes, pkg.latest.tag?.version || pkg.manifest.version)
164
- pkg.manifest.version = pkg.version
165
-
166
- console.log(`[${pkg.name}] semantic changes`, changes)
167
- }
168
- ```
172
+ ### Steps
173
+ Each step has a uniform signature `(pkg, ctx)`:
174
+ - **`contextify`** — resolves per-package config, latest release metadata, and git context.
175
+ - **`analyze`** — determines semantic changes, release type, and next version.
176
+ - **`build`** — runs `buildCmd` (with dep traversal and optional npm artifact fetch).
177
+ - **`test`** — runs `testCmd`.
178
+ - **`publish`** — orchestrates the publisher registry: prepare (serial) → run (parallel) → rollback on failure.
179
+ - **`clean`** — restores `package.json` files and unsets git user config.
169
180
 
170
181
  Set `config.releaseRules` to override the default rules preset:
171
- ```ts
182
+ ```js
172
183
  [
173
184
  {group: 'Features', releaseType: 'minor', prefixes: ['feat']},
174
185
  {group: 'Fixes & improvements', releaseType: 'patch', prefixes: ['fix', 'perf', 'refactor', 'docs', 'patch']},
@@ -176,32 +187,16 @@ Set `config.releaseRules` to override the default rules preset:
176
187
  ]
177
188
  ```
178
189
 
179
- ### `build`
180
- Applies `config.cmd` to build pkg assets: bundles, docs, etc.
181
- ```js
182
- export const build = async (pkg, packages) => {
183
- // ...
184
- if (!pkg.fetched && config.cmd) {
185
- console.log(`[${pkg.name}] run cmd '${config.cmd}'`)
186
- await $.o({cwd: pkg.absPath, quote: v => v})`${config.cmd}`
187
- }
188
- // ...
189
- }
190
- ```
190
+ ### Publishers
191
+ Publish targets are a registry of `{name, when, prepare?, run, undo?, snapshot?}` objects:
192
+ - **meta** — pushes release metadata to the `meta` branch (or as a GH release asset).
193
+ - **npm** publishes to the npm registry.
194
+ - **gh-release** — creates a GitHub release with optional file assets.
195
+ - **gh-pages** pushes docs to a `gh-pages` branch.
196
+ - **changelog** pushes a changelog entry to a `changelog` branch.
197
+ - **cmd** runs a custom `publishCmd`.
191
198
 
192
- ### `publish`
193
- Publish the pkg to git, npm, gh-pages, gh-release, etc.
194
- ```js
195
- export const publish = async (pkg) => {
196
- await fs.writeJson(pkg.manifestPath, pkg.manifest, {spaces: 2})
197
- await pushTag(pkg)
198
- await pushMeta(pkg)
199
- await pushChangelog(pkg)
200
- await npmPublish(pkg)
201
- await ghRelease(pkg)
202
- await ghPages(pkg)
203
- }
204
- ```
199
+ Teardown walks the registry in reverse, calling `undo()` on each publisher for rollback/recovery.
205
200
 
206
201
  ### Tags
207
202
  [Lerna](https://github.com/lerna/lerna) tags (like `@pkg/name@v1.0.0-beta.0`) are suitable for monorepos, but they don’t follow [semver spec](https://semver.org/). Therefore, we propose another contract:
package/package.json CHANGED
@@ -1,13 +1,13 @@
1
1
  {
2
2
  "name": "bulk-release",
3
3
  "alias": "bulk-release",
4
- "version": "2.19.1",
4
+ "version": "2.21.0",
5
5
  "description": "zx-based alternative for multi-semantic-release",
6
6
  "type": "module",
7
7
  "exports": {
8
8
  ".": "./src/main/js/index.js",
9
9
  "./test-utils": "./src/test/js/test-utils.js",
10
- "./meta": "./src/main/js/meta.js"
10
+ "./meta": "./src/main/js/processor/generators/meta.js"
11
11
  },
12
12
  "bin": "./src/main/js/cli.js",
13
13
  "files": [
@@ -35,7 +35,7 @@
35
35
  "c8": "^11.0.0",
36
36
  "esbuild": "^0.28.0",
37
37
  "uvu": "^0.5.6",
38
- "verdaccio": "6.3.2"
38
+ "verdaccio": "6.4.0"
39
39
  },
40
40
  "publishConfig": {
41
41
  "access": "public"
@@ -58,10 +58,18 @@ export const normalizeMetaConfig = (meta) =>
58
58
  ? { type: meta } // 'commit' | 'asset' | 'tag'
59
59
  : { type: null }
60
60
 
61
- export const parseEnv = ({GH_USER, GH_USERNAME, GH_META, GITHUB_USER, GITHUB_USERNAME, GH_TOKEN, GITHUB_TOKEN, NPM_TOKEN, NPM_REGISTRY, NPMRC, NPM_USERCONFIG, NPM_CONFIG_USERCONFIG, NPM_PROVENANCE, NPM_OIDC, ACTIONS_ID_TOKEN_REQUEST_URL, GIT_COMMITTER_NAME, GIT_COMMITTER_EMAIL} = process.env) =>
62
- ({
61
+ export const GH_URL = 'https://github.com'
62
+
63
+ const resolveGhApiUrl = (ghUrl) =>
64
+ ghUrl === GH_URL ? 'https://api.github.com' : `${ghUrl.replace(/\/$/, '')}/api/v3`
65
+
66
+ export const parseEnv = ({GH_USER, GH_USERNAME, GH_META, GH_URL: _GH_URL, GITHUB_URL, GITHUB_USER, GITHUB_USERNAME, GH_TOKEN, GITHUB_TOKEN, NPM_TOKEN, NPM_REGISTRY, NPMRC, NPM_USERCONFIG, NPM_CONFIG_USERCONFIG, NPM_PROVENANCE, NPM_OIDC, ACTIONS_ID_TOKEN_REQUEST_URL, GIT_COMMITTER_NAME, GIT_COMMITTER_EMAIL} = process.env) => {
67
+ const ghUrl = _GH_URL || GITHUB_URL || GH_URL
68
+ return {
63
69
  ghUser: GH_USER || GH_USERNAME || GITHUB_USER || GITHUB_USERNAME || ((GH_TOKEN || GITHUB_TOKEN) ? 'x-access-token' : undefined),
64
70
  ghToken: GH_TOKEN || GITHUB_TOKEN,
71
+ ghUrl,
72
+ ghApiUrl: resolveGhApiUrl(ghUrl),
65
73
  ghMeta: GH_META,
66
74
  npmConfig: NPMRC || NPM_USERCONFIG || NPM_CONFIG_USERCONFIG,
67
75
  npmToken: NPM_TOKEN,
@@ -70,6 +78,7 @@ export const parseEnv = ({GH_USER, GH_USERNAME, GH_META, GITHUB_USER, GITHUB_USE
70
78
  npmRegistry: NPM_REGISTRY || 'https://registry.npmjs.org',
71
79
  gitCommitterName: GIT_COMMITTER_NAME || 'Semrel Extra Bot',
72
80
  gitCommitterEmail: GIT_COMMITTER_EMAIL || 'semrel-extra-bot@hotmail.com',
73
- })
81
+ }
82
+ }
74
83
 
75
84
  export const normalizeFlags = (flags = {}) => Object.entries(flags).reduce((acc, [k, v]) => ({...acc, [camelize(k)]: v}), {})
@@ -1,7 +1 @@
1
- import process from 'node:process'
2
- import { $ } from 'zx-extra'
3
-
4
- $.quiet = !process.env.DEBUG
5
- $.verbose = !!process.env.DEBUG
6
-
7
1
  export {run} from './processor/release.js'
@@ -0,0 +1,111 @@
1
+ // Low-level GitHub API primitives. No domain knowledge, no imports from processor/ or steps/.
2
+
3
+ import {$, path, tempy, glob, fs, fetch} from 'zx-extra'
4
+ import {asArray, attempt2} from '../../util.js'
5
+
6
+ export const getCommonPath = files => {
7
+ const f0 = files[0]
8
+ const common = files.length === 1
9
+ ? f0.lastIndexOf('/') + 1
10
+ : [...f0].findIndex((c, i) => files.some(f => f.charAt(i) !== c))
11
+ const p = f0.slice(0, common)
12
+ return p.endsWith('/') ? p : p.slice(0, p.lastIndexOf('/') + 1)
13
+ }
14
+
15
+ export const GH_API_VERSION = '2022-11-28'
16
+ export const GH_ACCEPT = 'application/vnd.github.v3+json'
17
+
18
+ export const ghFetch = (url, {ghToken, method = 'GET', headers, body} = {}) => fetch(url, {
19
+ method,
20
+ headers: {
21
+ Accept: GH_ACCEPT,
22
+ 'X-GitHub-Api-Version': GH_API_VERSION,
23
+ ...(ghToken ? {Authorization: `token ${ghToken}`} : {}),
24
+ ...headers,
25
+ },
26
+ body,
27
+ })
28
+
29
+ // https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release
30
+ export const ghCreateRelease = async ({ghApiUrl, ghToken, repoName, tag, body}) => {
31
+ const res = await (await ghFetch(`${ghApiUrl}/repos/${repoName}/releases`, {
32
+ ghToken,
33
+ method: 'POST',
34
+ body: JSON.stringify({name: tag, tag_name: tag, body}),
35
+ })).json()
36
+
37
+ if (!res.upload_url) {
38
+ throw new Error(`gh release failed: ${JSON.stringify(res)}`)
39
+ }
40
+ return res
41
+ }
42
+
43
+ // https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#delete-a-release
44
+ export const ghDeleteReleaseByTag = async ({ghApiUrl, ghToken, repoName, tag}) => {
45
+ const res = await attempt2(() => ghFetch(`${ghApiUrl}/repos/${repoName}/releases/tags/${tag}`, {ghToken}))
46
+ if (!res.ok) return false
47
+ const {id} = await res.json()
48
+ await attempt2(() => ghFetch(`${ghApiUrl}/repos/${repoName}/releases/${id}`, {ghToken, method: 'DELETE'}))
49
+ return true
50
+ }
51
+
52
+ // https://docs.github.com/en/rest/releases/assets?apiVersion=2022-11-28#upload-a-release-asset
53
+ export const ghPrepareAssets = async (assets, _cwd) => {
54
+ const temp = tempy.temporaryDirectory()
55
+
56
+ await Promise.all(assets.map(async ({name, contents, source = 'target/**/*', zip, cwd = _cwd, strip = true}) => {
57
+ const target = path.join(temp, name)
58
+
59
+ if (contents) {
60
+ await fs.outputFile(target, contents, 'utf8')
61
+ return
62
+ }
63
+
64
+ const patterns = asArray(source)
65
+ if (patterns.some(s => s.includes('*'))) {
66
+ zip = true
67
+ }
68
+ const files = await glob(patterns, {cwd, absolute: false, onlyFiles: true})
69
+
70
+ if (files.length === 0) {
71
+ throw new Error(`gh asset not found: ${name} ${source}`)
72
+ }
73
+
74
+ if (!zip && files.length === 1) {
75
+ await fs.copy(path.join(cwd, files[0]), target)
76
+ return
77
+ }
78
+ const prefix = getCommonPath(files)
79
+
80
+ return $.raw`tar -C ${path.join(cwd, prefix)} -cv${zip ? 'z' : ''}f ${target} ${files.map(f => f.slice(prefix.length)).join(' ')}`
81
+ }))
82
+
83
+ return temp
84
+ }
85
+
86
+ export const ghUploadAssets = async ({ghToken, ghAssets, uploadUrl, cwd}) => {
87
+ const temp = await ghPrepareAssets(ghAssets, cwd)
88
+
89
+ return Promise.all(ghAssets.map(async ({name}) => {
90
+ const url = `${uploadUrl}?name=${name}`
91
+ const res = await ghFetch(url, {
92
+ ghToken,
93
+ method: 'POST',
94
+ headers: {'Content-Type': 'application/octet-stream'},
95
+ body: await fs.readFile(path.join(temp, name)),
96
+ })
97
+ if (!res.ok) {
98
+ throw new Error(`gh asset upload failed for '${name}': ${res.status}`)
99
+ }
100
+ return res
101
+ }))
102
+ }
103
+
104
+ export const ghGetAsset = async ({repoName, tag, name, ghUrl}) => {
105
+ const url = `${ghUrl || 'https://github.com'}/${repoName}/releases/download/${tag.ref || tag}/${name}`
106
+ const res = await attempt2(() => fetch(url))
107
+ if (!res.ok) {
108
+ throw new Error(`gh asset fetch failed for '${name}': ${res.status} ${url}`)
109
+ }
110
+ return res.text()
111
+ }
@@ -1,6 +1,6 @@
1
1
  import {$, fs, path, tempy, copy} from 'zx-extra'
2
2
  import {log} from '../log.js'
3
- import {memoizeBy} from '../util.js'
3
+ import {attempt2, attempt3, memoizeBy} from '../../util.js'
4
4
 
5
5
  export const fetchRepo = memoizeBy(async ({cwd: _cwd, branch, origin: _origin, basicAuth}) => {
6
6
  const origin = _origin || (await getRepo(_cwd, {basicAuth})).repoAuthedUrl
@@ -9,7 +9,7 @@ export const fetchRepo = memoizeBy(async ({cwd: _cwd, branch, origin: _origin, b
9
9
  try {
10
10
  await _$`git clone --single-branch --branch ${branch} --depth 1 ${origin} .`
11
11
  } catch (e) {
12
- log({level: 'warn'})(`ref '${branch}' does not exist in ${origin}`)
12
+ log.warn(`ref '${branch}' does not exist in ${origin}`)
13
13
  await _$`git init . &&
14
14
  git remote add origin ${origin}`
15
15
  }
@@ -18,8 +18,6 @@ export const fetchRepo = memoizeBy(async ({cwd: _cwd, branch, origin: _origin, b
18
18
  }, async ({cwd, branch}) => `${await getRoot(cwd)}:${branch}`)
19
19
 
20
20
  export const pushCommit = async ({cwd, from, to, branch, origin, msg, ignoreFiles, files = [], basicAuth, gitCommitterEmail, gitCommitterName}) => {
21
- let retries = 3
22
-
23
21
  const _cwd = await fetchRepo({cwd, branch, origin, basicAuth})
24
22
  const _$ = $({cwd: _cwd})
25
23
 
@@ -34,31 +32,24 @@ export const pushCommit = async ({cwd, from, to, branch, origin, msg, ignoreFile
34
32
  await _$`git add . &&
35
33
  git commit -m ${msg}`
36
34
  } catch {
37
- log({level: 'warn'})(`no changes to commit to ${branch}`)
35
+ log.warn(`no changes to commit to ${branch}`)
38
36
  return
39
37
  }
40
38
 
41
- while (retries > 0) {
42
- try {
43
- return await _$`git push origin HEAD:refs/heads/${branch}`
44
- } catch (e) {
45
- retries -= 1
46
- log({level: 'error'})('git push failed', 'branch', branch, 'retries left', retries, e)
47
-
48
- if (retries === 0) {
49
- throw e
50
- }
51
-
52
- await _$`git fetch origin ${branch} &&
53
- git rebase origin/${branch}`
39
+ return attempt3(
40
+ () => _$`git push origin HEAD:refs/heads/${branch}`,
41
+ (e) => {
42
+ log.warn('git push failed, rebasing', 'branch', branch, e)
43
+ return attempt2(() => _$`git fetch origin ${branch} &&
44
+ git rebase origin/${branch}`)
54
45
  }
55
- }
46
+ )
56
47
  }
57
48
 
58
- export const getSha = async (cwd) => (await $({cwd})`git rev-parse HEAD`).toString().trim()
59
-
60
49
  export const getRoot = memoizeBy(async (cwd) => (await $({cwd})`git rev-parse --show-toplevel`).toString().trim())
61
50
 
51
+ export const getSha = memoizeBy(async (cwd) => (await $({cwd})`git rev-parse HEAD`).toString().trim(), getRoot)
52
+
62
53
  export const parseOrigin = (originUrl) => {
63
54
  const [, , repoHost, repoName] = originUrl.replace(':', '/').replace(/\.git/, '').match(/.+(@|\/\/)([^/]+)\/(.+)$/) || []
64
55
 
@@ -102,10 +93,10 @@ export const getCommits = async (cwd, from, to = 'HEAD') => {
102
93
  })
103
94
  }
104
95
 
105
- export const getTags = async (cwd, ref) =>
106
- (await $({cwd})`git tag -l ${ref || '*'}`)
107
- .toString()
108
- .split('\n')
96
+ export const getTags = memoizeBy(
97
+ async (cwd, ref = '*') => (await $({cwd})`git tag -l ${ref}`).toString().split('\n'),
98
+ async (cwd, ref = '*') => `${await getRoot(cwd)}:${ref}`,
99
+ )
109
100
 
110
101
  export const pushTag = async ({cwd, tag, gitCommitterName, gitCommitterEmail}) => {
111
102
  await setUserConfig(cwd, gitCommitterName, gitCommitterEmail)
@@ -119,7 +110,7 @@ export const fetchTags = async (cwd) =>
119
110
  $({cwd})`git fetch --tags`
120
111
 
121
112
  export const deleteRemoteTag = async ({cwd, tag}) => {
122
- log()(`rolling back remote tag '${tag}'`)
113
+ log.info(`rolling back remote tag '${tag}'`)
123
114
  await $({cwd, nothrow: true})`git push origin :refs/tags/${tag}`
124
115
  await $({cwd, nothrow: true})`git tag -d ${tag}`
125
116
  }
@@ -1,6 +1,15 @@
1
+ import zlib from 'node:zlib'
2
+ import _fs from 'node:fs/promises'
3
+ import _path from 'node:path'
4
+ import tar from 'tar-stream'
5
+ import {Readable} from 'node:stream'
1
6
  import {log} from '../log.js'
2
- import {$, fs, INI, fetch, tempy} from 'zx-extra'
3
- import {pipify, unzip} from '../util.js'
7
+ import {$, semver, fs, INI, fetch, tempy} from 'zx-extra'
8
+ import {attempt2, memoizeBy} from '../../util.js'
9
+
10
+ const FETCH_TIMEOUT_MS = 15_000
11
+ const NPM_OIDC_VER = '11.5.0'
12
+ const NPM_VER = (await $`npm --version`).toString().trim()
4
13
 
5
14
  // https://stackoverflow.com/questions/19978452/how-to-extract-single-file-from-tar-gz-archive-using-node-js
6
15
 
@@ -14,17 +23,16 @@ export const fetchPkg = async (pkg) => {
14
23
  const tarballUrl = getTarballUrl(npmRegistry, pkg.name, pkg.version)
15
24
  const bearerToken = getBearerToken(npmRegistry, npmToken, npmConfig)
16
25
  const headers = bearerToken ? {Authorization: bearerToken} : {}
17
- log({pkg})(`fetching '${id}' from ${npmRegistry}`)
26
+ log.info(`fetching '${id}' from ${npmRegistry}`)
18
27
 
19
- // https://stackoverflow.com/questions/46946380/fetch-api-request-timeout
20
- const controller = new AbortController()
21
- const timeoutId = setTimeout(() => controller.abort(), 15_000)
22
- const tarball = await fetch(tarballUrl, {
28
+ const ac = new AbortController()
29
+ const timer = setTimeout(() => ac.abort(), FETCH_TIMEOUT_MS)
30
+ const tarball = await attempt2(() => fetch(tarballUrl, {
23
31
  method: 'GET',
24
32
  headers,
25
- signal: controller.signal
26
- })
27
- clearTimeout(timeoutId)
33
+ signal: ac.signal,
34
+ }))
35
+ clearTimeout(timer)
28
36
 
29
37
  if (!tarball.ok) {
30
38
  throw new Error(`registry responded with ${tarball.status} for ${tarballUrl}`)
@@ -32,10 +40,10 @@ export const fetchPkg = async (pkg) => {
32
40
 
33
41
  await unzip(pipify(tarball.body), {cwd, strip: 1, omit: ['package.json']})
34
42
 
35
- log({pkg})(`fetch duration '${id}': ${Date.now() - now}`)
43
+ log.info(`fetch duration '${id}': ${Date.now() - now}`)
36
44
  pkg.fetched = true
37
45
  } catch (e) {
38
- log({pkg, level: 'warn'})(`fetching '${id}' failed`, e)
46
+ log.warn(`fetching '${id}' failed`, e)
39
47
  }
40
48
  }
41
49
 
@@ -46,8 +54,8 @@ export const fetchManifest = async (pkg, {nothrow} = {}) => {
46
54
  const reqOpts = bearerToken ? {headers: {authorization: bearerToken}} : {}
47
55
 
48
56
  try {
49
- const res = await fetch(url, reqOpts)
50
- if (!res.ok) throw res
57
+ const res = await attempt2(() => fetch(url, reqOpts))
58
+ if (!res.ok) throw new Error(`npm registry responded with ${res.status} for ${url}`)
51
59
 
52
60
  return res.json() // NOTE .json() is async too
53
61
  } catch (e) {
@@ -58,13 +66,13 @@ export const fetchManifest = async (pkg, {nothrow} = {}) => {
58
66
 
59
67
  export const npmPersist = async (pkg) => {
60
68
  const {name, version, manifest, manifestAbsPath} = pkg
61
- log({pkg})(`updating ${manifestAbsPath} inners: ${name} ${version}`)
69
+ log.info(`updating ${manifestAbsPath} inners: ${name} ${version}`)
62
70
  await fs.writeJson(manifestAbsPath, manifest, {spaces: 2})
63
71
  }
64
72
 
65
73
  export const npmRestore = async (pkg) => {
66
74
  const {manifestRaw, manifestAbsPath} = pkg
67
- log({pkg})(`rolling back ${manifestAbsPath} inners to manifestRaw`)
75
+ log.info(`rolling back ${manifestAbsPath} inners to manifestRaw`)
68
76
  await fs.writeFile(manifestAbsPath, manifestRaw, {encoding: 'utf8'})
69
77
  }
70
78
 
@@ -73,7 +81,7 @@ export const npmPublish = async (pkg) => {
73
81
 
74
82
  if (manifest.private || npmPublish === false) return
75
83
 
76
- log({pkg})(`publishing npm package ${name} ${version} to ${npmRegistry}`)
84
+ log.info(`publishing npm package ${name} ${version} to ${npmRegistry}`)
77
85
 
78
86
  const npmTag = pkg.preversion ? 'snapshot' : 'latest'
79
87
  const npmFlags = [
@@ -86,12 +94,10 @@ export const npmPublish = async (pkg) => {
86
94
  // OIDC trusted publishing: no auth token must be present for npm to use OIDC flow.
87
95
  // https://docs.npmjs.com/trusted-publishers/
88
96
  if (npmOidc) {
89
- const npmVersion = (await $`npm --version`).toString().trim()
90
- const [major, minor] = npmVersion.split('.').map(Number)
91
- if (major < 11 || (major === 11 && minor < 5)) {
92
- throw new Error(`npm OIDC trusted publishing requires npm >= 11.5.0, got ${npmVersion}`)
97
+ if (!semver.gte(NPM_VER, NPM_OIDC_VER)) {
98
+ throw new Error(`npm OIDC trusted publishing requires npm >= ${NPM_OIDC_VER}, got ${NPM_VER}`)
93
99
  }
94
- log({pkg})('npm publish: OIDC trusted publishing enabled')
100
+ log.info('npm publish: OIDC trusted publishing enabled')
95
101
  npmFlags.push('--provenance')
96
102
  } else {
97
103
  const npmrc = await getNpmrc({npmConfig, npmToken, npmRegistry})
@@ -102,16 +108,14 @@ export const npmPublish = async (pkg) => {
102
108
  await $({cwd})`npm publish ${npmFlags.filter(Boolean)}`
103
109
  }
104
110
 
105
- export const getNpmrc = async ({npmConfig, npmToken, npmRegistry}) => {
106
- if (npmConfig) {
107
- return npmConfig
108
- }
111
+ export const getNpmrc = memoizeBy(async ({npmConfig, npmToken, npmRegistry}) => {
112
+ if (npmConfig) return npmConfig
109
113
 
110
- const npmrc = tempy.temporaryFile({name: '.npmrc'})
114
+ const npmrc = tempy.temporaryFile({name: '.npmrc'})
111
115
  await fs.writeFile(npmrc, `${npmRegistry.replace(/^https?:\/\//, '//')}/:_authToken=${npmToken}`, {encoding: 'utf8'})
112
116
 
113
117
  return npmrc
114
- }
118
+ }, ({npmConfig, npmToken, npmRegistry}) => `${npmConfig}:${npmToken}:${npmRegistry}`)
115
119
 
116
120
  // $`npm view ${name}@${version} dist.tarball`
117
121
  export const getTarballUrl = (registry, name, version) => `${registry}/${name}/-/${name.replace(/^.+(%2f|\/)/,'')}-${version}.tgz`
@@ -128,3 +132,41 @@ export const getBearerToken = (npmRegistry, npmToken, npmConfig) => {
128
132
  // NOTE registry-auth-token does not work with localhost:4873
129
133
  export const getAuthToken = (registry, npmrc) =>
130
134
  (Object.entries(npmrc).find(([reg]) => reg.startsWith(registry.replace(/^https?/, ''))) || [])[1]
135
+
136
+ const pipify = (stream) => stream.pipe ? stream : Readable.from(stream)
137
+
138
+ const safePath = v => _path.resolve('/', v).slice(1)
139
+
140
+ const unzip = (stream, {pick, omit, cwd = process.cwd(), strip = 0} = {}) => new Promise((resolve, reject) => {
141
+ const extract = tar.extract()
142
+ const results = []
143
+
144
+ extract.on('entry', ({name, type}, stream, cb) => {
145
+ const _name = safePath(strip ? name.split('/').slice(strip).join('/') : name)
146
+ const fp = _path.join(cwd, _name)
147
+
148
+ let data = ''
149
+ stream.on('data', (chunk) => {
150
+ if (type !== 'file' || omit?.includes(_name) || (pick && !pick.includes(_name))) return
151
+ data += chunk
152
+ })
153
+
154
+ stream.on('end', () => {
155
+ if (data) {
156
+ results.push(
157
+ _fs.mkdir(_path.dirname(fp), {recursive: true})
158
+ .then(() => _fs.writeFile(fp, data, 'utf8'))
159
+ )
160
+ }
161
+ cb()
162
+ })
163
+
164
+ stream.resume()
165
+ })
166
+
167
+ extract.on('finish', () => resolve(Promise.all(results)))
168
+
169
+ stream
170
+ .pipe(zlib.createGunzip())
171
+ .pipe(extract)
172
+ })