bulk-release 2.12.2 → 2.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,3 +1,17 @@
1
+ ## [2.13.0](https://github.com/semrel-extra/zx-bulk-release/compare/v2.12.2...v2.13.0) (2023-12-05)
2
+
3
+ ### Fixes & improvements
4
+ * fix: avoid polynomial regex on flags parsing ([c080fb8](https://github.com/semrel-extra/zx-bulk-release/commit/c080fb8246d71ad5e366ae6931b06db3ff86233b))
5
+ * fix: sanitize paths on extract ([3f67b9f](https://github.com/semrel-extra/zx-bulk-release/commit/3f67b9f89b44589930c5dcce96237c0c5c3294f1))
6
+ * refactor: decompose processor.js ([a498a7f](https://github.com/semrel-extra/zx-bulk-release/commit/a498a7f5900396e42b6b2a6478028e128ab6ec1f))
7
+ * fix: throw error if declared gh asset is empty ([d2dc6f6](https://github.com/semrel-extra/zx-bulk-release/commit/d2dc6f6b457a3178671b0d6349d42234b8052c7e))
8
+ * fix: handle empty files collection on gh assets push ([c9dbb5b](https://github.com/semrel-extra/zx-bulk-release/commit/c9dbb5bf3c5b4ff4ce11603e541feae784574f10))
9
+ * perf: replace external `curl`, `wget` and `tar` with node `fetch` and `tar-stream` ([496c73b](https://github.com/semrel-extra/zx-bulk-release/commit/496c73b68172c42e5930d185f1008d487928e7fc))
10
+
11
+ ### Features
12
+ * feat: configurable meta push ([0042af3](https://github.com/semrel-extra/zx-bulk-release/commit/0042af34d31ff6d3783c3953ece167a9dc78fb45))
13
+ * feat: let release meta be published as gh assets ([3d333e1](https://github.com/semrel-extra/zx-bulk-release/commit/3d333e1fb6d61adb5bd05ac3a90e19a3a5402843))
14
+
1
15
  ## [2.12.2](https://github.com/semrel-extra/zx-bulk-release/compare/v2.12.1...v2.12.2) (2023-11-20)
2
16
 
3
17
  ### Fixes & improvements
package/README.md CHANGED
@@ -25,7 +25,7 @@
25
25
  * macOS / linux
26
26
  * Node.js >= 16.0.0
27
27
  * npm >=7 / yarn >= 3
28
- * wget
28
+ * ~~wget~~
29
29
  * tar
30
30
  * git
31
31
 
@@ -210,8 +210,11 @@ Anyway, it's still possible to override the default config by `tagFormat` option
210
210
 
211
211
 
212
212
  ### Meta
213
+ Each release gathers its own meta. It is _recommended_ to store the data somehow to ensure flow reliability.:
214
+ * Set `meta: {type: 'asset'}` to persist as gh asset.
215
+ * If set `meta: {type: null}` the required data will be fetched from the npm artifact.
216
+ * Otherwise, it will be pushed as a regular git commit to the `meta` branch (default behaviour).
213
217
 
214
- Each release pushes its result to the `meta` branch.
215
218
  `2022-6-26-semrel-extra-zxbr-test-c-1-3-1-f0.json`
216
219
  ```json
217
220
  {
@@ -333,6 +336,7 @@ zx-bulk-release
333
336
  * [googleapis/release-please](https://github.com/googleapis/release-please)
334
337
  * [generic-semantic-version-processing](https://about.gitlab.com/blog/2021/09/28/generic-semantic-version-processing/)
335
338
  * [jchip/fynpo](https://github.com/jchip/fynpo)
339
+ * [lerna-lite/lerna-lite](https://github.com/lerna-lite/lerna-lite)
336
340
 
337
341
  ## License
338
342
  [MIT](./LICENSE)
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "bulk-release",
3
3
  "alias": "bulk-release",
4
- "version": "2.12.2",
4
+ "version": "2.13.0",
5
5
  "description": "zx-based alternative for multi-semantic-release",
6
6
  "type": "module",
7
7
  "exports": {
@@ -20,18 +20,22 @@
20
20
  "scripts": {
21
21
  "test": "NPM_REGISTRY='http://localhost:4873' NPM_TOKEN='mRv6eIuiaggXGb9ZDFCtBA==' c8 uvu ./src/test -i fixtures -i utils && c8 report -r lcov",
22
22
  "test:it": "NPM_REGISTRY='http://localhost:4873' NPM_TOKEN='mRv6eIuiaggXGb9ZDFCtBA==' node ./src/test/js/integration.test.js",
23
- "docs": "mkdir -p docs && cp ./README.md ./docs/README.md"
23
+ "docs": "mkdir -p docs && cp ./README.md ./docs/README.md",
24
+ "publish:beta": "npm publish --tag beta --no-git-tag-version",
25
+ "build": "esbuild src/main/js/index.js --platform=node --outdir=target --bundle --format=esm --external:typescript"
24
26
  },
25
27
  "dependencies": {
26
28
  "@semrel-extra/topo": "^1.14.0",
27
- "cosmiconfig": "^8.3.6",
29
+ "cosmiconfig": "^9.0.0",
28
30
  "queuefy": "^1.2.1",
31
+ "tar-stream": "^3.1.6",
29
32
  "zx-extra": "^2.6.4"
30
33
  },
31
34
  "devDependencies": {
32
35
  "c8": "^8.0.1",
36
+ "esbuild": "^0.19.8",
33
37
  "uvu": "^0.5.6",
34
- "verdaccio": "^5.27.0"
38
+ "verdaccio": "^5.27.1"
35
39
  },
36
40
  "publishConfig": {
37
41
  "access": "public"
@@ -1,9 +1,9 @@
1
1
  import {$} from 'zx-extra'
2
2
  import {queuefy} from 'queuefy'
3
3
  import {fetchRepo, getRepo, pushCommit} from './git.js'
4
- import {log} from './log.js'
5
- import {formatTag} from './meta.js'
6
- import {msgJoin} from './util.js'
4
+ import {log} from '../log.js'
5
+ import {formatTag} from '../processor/meta.js'
6
+ import {msgJoin} from '../util.js'
7
7
 
8
8
  export const pushChangelog = queuefy(async (pkg) => {
9
9
  const {absPath: cwd, config: {changelog: opts, gitCommitterEmail, gitCommitterName, ghBasicAuth: basicAuth}} = pkg
@@ -1,10 +1,10 @@
1
1
  import {queuefy} from 'queuefy'
2
2
  import {$, path, tempy, glob, fs} from 'zx-extra'
3
- import {log} from './log.js'
3
+ import {log} from '../log.js'
4
4
  import {getRepo, pushCommit} from './git.js'
5
- import {formatTag} from './meta.js'
5
+ import {formatTag} from '../processor/meta.js'
6
6
  import {formatReleaseNotes} from './changelog.js'
7
- import {asArray, getCommonPath, msgJoin} from './util.js'
7
+ import {asArray, getCommonPath, msgJoin} from '../util.js'
8
8
 
9
9
  // https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#create-a-release
10
10
  export const ghRelease = async (pkg) => {
@@ -23,10 +23,17 @@ export const ghRelease = async (pkg) => {
23
23
  body: releaseNotes
24
24
  })
25
25
 
26
- const {stdout} = await $.o({cwd})`curl -H 'Authorization: token ${ghToken}' -H 'Accept: application/vnd.github.v3+json' https://api.github.com/repos/${repoName}/releases -d ${releaseData}`
27
- const res = JSON.parse(stdout.toString().trim())
28
-
29
- if (ghAssets) {
26
+ const res = await (await fetch(`https://api.github.com/repos/${repoName}/releases`, {
27
+ method: 'POST',
28
+ headers: {
29
+ Accept: 'application/vnd.github.v3+json',
30
+ Authorization: `token ${ghToken}`,
31
+ 'X-GitHub-Api-Version': '2022-11-28'
32
+ },
33
+ body: releaseData
34
+ })).json()
35
+
36
+ if (ghAssets?.length) {
30
37
  // Lol. GH API literally returns pseudourl `...releases/110103594/assets{?name,label}` as shown in the docs
31
38
  const uploadUrl = res.upload_url.slice(0, res.upload_url.indexOf('{'))
32
39
  await ghUploadAssets({ghToken, ghAssets, uploadUrl, cwd})
@@ -62,13 +69,24 @@ export const ghPages = queuefy(async (pkg) => {
62
69
  export const ghPrepareAssets = async (assets, _cwd) => {
63
70
  const temp = tempy.temporaryDirectory()
64
71
 
65
- await Promise.all(assets.map(async ({name, source = 'target/**/*', zip, cwd = _cwd, strip = true}) => {
66
- const patterns = asArray(source)
72
+ await Promise.all(assets.map(async ({name, contents, source = 'target/**/*', zip, cwd = _cwd, strip = true}) => {
67
73
  const target = path.join(temp, name)
74
+
75
+ if (contents) {
76
+ await fs.outputFile(target, contents, 'utf8')
77
+ return
78
+ }
79
+
80
+ const patterns = asArray(source)
68
81
  if (patterns.some(s => s.includes('*'))) {
69
82
  zip = true
70
83
  }
71
84
  const files = await glob(patterns, {cwd, absolute: false, onlyFiles: true})
85
+
86
+ if (files.length === 0) {
87
+ throw new Error(`gh asset not found: ${name} ${source}`)
88
+ }
89
+
72
90
  if (!zip && files.length === 1) {
73
91
  await fs.copy(path.join(cwd, files[0]), target)
74
92
  return
@@ -86,7 +104,24 @@ export const ghUploadAssets = async ({ghToken, ghAssets, uploadUrl, cwd}) => {
86
104
 
87
105
  return Promise.all(ghAssets.map(async ({name}) => {
88
106
  const url = `${uploadUrl}?name=${name}`
89
- return $.o({cwd: temp})`curl -H 'Authorization: token ${ghToken}' -H 'Accept: application/vnd.github.v3+json' -H 'Content-Type: application/octet-stream' ${url} --data-binary '@${name}'`
107
+ // return $.o({cwd: temp})`curl -H 'Authorization: token ${ghToken}' -H 'Accept: application/vnd.github.v3+json' -H 'Content-Type: application/octet-stream' ${url} --data-binary '@${name}'`
108
+ return fetch(url, {
109
+ method: 'POST',
110
+ headers: {
111
+ 'Content-Type': 'application/octet-stream',
112
+ Accept: 'application/vnd.github.v3+json',
113
+ Authorization: `token ${ghToken}`,
114
+ 'X-GitHub-Api-Version': '2022-11-28'
115
+ },
116
+ body: await fs.readFile(path.join(temp, name))
117
+ })
90
118
  }))
91
119
  }
92
120
 
121
+ export const ghGetAsset = async ({repoName, tag, name}) => {
122
+ return (await fetch(`https://github.com/${repoName}/releases/download/${tag}/${name}`, {
123
+ headers: {
124
+ // Accept: 'application/vnd.github.v3+json'
125
+ }
126
+ })).text()
127
+ }
@@ -1,6 +1,6 @@
1
1
  import {$, ctx, fs, path, tempy, copy} from 'zx-extra'
2
- import {log} from './log.js'
3
- import {memoizeBy} from './util.js'
2
+ import {log} from '../log.js'
3
+ import {memoizeBy} from '../util.js'
4
4
 
5
5
  export const fetchRepo = memoizeBy(async ({cwd: _cwd, branch, origin: _origin, basicAuth}) => ctx(async ($) => {
6
6
  const origin = _origin || (await getRepo(_cwd, {basicAuth})).repoAuthedUrl
@@ -59,9 +59,15 @@ export const getSha = async (cwd) => (await $.o({cwd})`git rev-parse HEAD`).toSt
59
59
 
60
60
  export const getRoot = memoizeBy(async (cwd) => (await $.o({cwd})`git rev-parse --show-toplevel`).toString().trim())
61
61
 
62
+ export const parseOrigin = (originUrl) => {
63
+ const [, , repoHost, repoName] = originUrl.replace(':', '/').replace(/\.git/, '').match(/.+(@|\/\/)([^/]+)\/(.+)$/) || []
64
+
65
+ return {repoHost, repoName}
66
+ }
67
+
62
68
  export const getRepo = memoizeBy(async (cwd, {basicAuth} = {}) => {
63
69
  const originUrl = await getOrigin(cwd)
64
- const [, , repoHost, repoName] = originUrl.replace(':', '/').replace(/\.git/, '').match(/.+(@|\/\/)([^/]+)\/(.+)$/) || []
70
+ const {repoHost, repoName} = parseOrigin(originUrl)
65
71
  const repoPublicUrl = `https://${repoHost}/${repoName}`
66
72
  const repoAuthedUrl = basicAuth && repoHost && repoName
67
73
  ? `https://${basicAuth}@${repoHost}/${repoName}.git`
@@ -1,5 +1,8 @@
1
- import {log} from './log.js'
1
+ import {log} from '../log.js'
2
2
  import {$, fs, INI, fetch, tempy} from 'zx-extra'
3
+ import {unzip} from '../util.js'
4
+
5
+ // https://stackoverflow.com/questions/19978452/how-to-extract-single-file-from-tar-gz-archive-using-node-js
3
6
 
4
7
  export const fetchPkg = async (pkg) => {
5
8
  const id = `${pkg.name}@${pkg.version}`
@@ -10,9 +13,20 @@ export const fetchPkg = async (pkg) => {
10
13
  const {npmRegistry, npmToken, npmConfig} = pkg.config
11
14
  const tarballUrl = getTarballUrl(npmRegistry, pkg.name, pkg.version)
12
15
  const bearerToken = getBearerToken(npmRegistry, npmToken, npmConfig)
13
- const authorization = bearerToken ? `--header='Authorization: ${bearerToken}'` : ''
16
+ const headers = bearerToken ? {Authorization: bearerToken} : {}
14
17
  log({pkg})(`fetching '${id}' from ${npmRegistry}`)
15
- await $.raw`wget --timeout=15 --connect-timeout=5 ${authorization} -qO- ${tarballUrl} | tar -xvz --strip-components=1 --exclude='package.json' -C ${cwd}`
18
+
19
+ // https://stackoverflow.com/questions/46946380/fetch-api-request-timeout
20
+ const controller = new AbortController()
21
+ const timeoutId = setTimeout(() => controller.abort(), 15_000)
22
+ const tarball = await fetch(tarballUrl, {
23
+ method: 'GET',
24
+ headers,
25
+ signal: controller.signal
26
+ })
27
+ clearTimeout(timeoutId)
28
+
29
+ await unzip(tarball.body, {cwd, strip: 1, omit: ['package.json']})
16
30
 
17
31
  log({pkg})(`fetch duration '${id}': ${Date.now() - now}`)
18
32
  pkg.fetched = true
@@ -19,26 +19,40 @@ export const defaultConfig = {
19
19
  changelog: 'changelog',
20
20
  npmFetch: true,
21
21
  ghRelease: true,
22
+ meta: true,
22
23
  // npmPublish: true,
23
24
  // ghPages: 'gh-pages'
24
25
  }
25
26
 
26
27
  export const getPkgConfig = async (...cwds) =>
27
28
  normalizePkgConfig((await Promise.all(cwds.map(
28
- cwd => cosmiconfig(CONFIG_NAME, { searchPlaces: CONFIG_FILES }).search(cwd).then(r => r?.config)
29
+ cwd => cosmiconfig(CONFIG_NAME, {
30
+ searchPlaces: CONFIG_FILES,
31
+ searchStrategy: 'global', // https://github.com/cosmiconfig/cosmiconfig/releases/tag/v9.0.0
32
+ })
33
+ .search(cwd)
34
+ .then(r => r?.config)
29
35
  ))).find(Boolean) || defaultConfig)
30
36
 
31
37
  export const normalizePkgConfig = (config, env) => ({
32
38
  ...parseEnv(env),
33
39
  ...config,
34
40
  releaseRules: config.releaseRules || config.semanticRules,
35
- npmFetch: config.npmFetch || config.fetch || config.fetchPkg,
36
- buildCmd: config.buildCmd || config.cmd,
41
+ npmFetch: config.npmFetch || config.fetch || config.fetchPkg,
42
+ buildCmd: config.buildCmd || config.cmd,
37
43
  get ghBasicAuth() {
38
44
  return this.ghUser && this.ghToken ? `${this.ghUser}:${this.ghToken}` : false
39
- }
45
+ },
46
+ meta: normalizeMetaConfig(config.meta)
40
47
  })
41
48
 
49
+ export const normalizeMetaConfig = (meta) =>
50
+ meta === true
51
+ ? normalizeMetaConfig('commit')
52
+ : typeof meta === 'string'
53
+ ? { type: meta } // 'commit' | 'asset' | 'tag'
54
+ : { type: 'none' }
55
+
42
56
  export const parseEnv = ({GH_USER, GH_USERNAME, GITHUB_USER, GITHUB_USERNAME, GH_TOKEN, GITHUB_TOKEN, NPM_TOKEN, NPM_REGISTRY, NPMRC, NPM_USERCONFIG, NPM_CONFIG_USERCONFIG, NPM_PROVENANCE, GIT_COMMITTER_NAME, GIT_COMMITTER_EMAIL} = process.env) =>
43
57
  ({
44
58
  ghUser: GH_USER || GH_USERNAME || GITHUB_USER || GITHUB_USERNAME,
@@ -1 +1 @@
1
- export {run} from './processor.js'
1
+ export {run} from './processor/release.js'
@@ -53,7 +53,7 @@ export const subsWorkspace = (decl, actual) => {
53
53
 
54
54
  export const topo = async ({flags = {}, cwd} = {}) => {
55
55
  const ignore = typeof flags.ignore === 'string'
56
- ? flags.ignore.split(/\s*,\s*/)
56
+ ? flags.ignore.split(',').map(c => c.trim())
57
57
  : Array.isArray(flags.ignore)
58
58
  ? flags.ignore
59
59
  : []
@@ -0,0 +1,16 @@
1
+ import {tpl} from '../util.js'
2
+ import {log} from '../log.js'
3
+ import {$} from 'zx-extra'
4
+
5
+ export const exec = async (pkg, name) => {
6
+ const cmd = tpl(pkg.config[name], {...pkg, ...pkg.context})
7
+ const now = Date.now()
8
+
9
+ if (cmd) {
10
+ log({pkg})(`run ${name} '${cmd}'`)
11
+ const result = await $.o({cwd: pkg.absPath, quote: v => v, preferLocal: true})`${cmd}`
12
+
13
+ log({pkg})(`duration ${name}: ${Date.now() - now}`)
14
+ return result
15
+ }
16
+ }
@@ -3,12 +3,13 @@
3
3
  import {Buffer} from 'node:buffer'
4
4
  import {queuefy} from 'queuefy'
5
5
  import {semver, $, fs, path} from 'zx-extra'
6
- import {log} from './log.js'
7
- import {fetchRepo, pushCommit, getTags as getGitTags, pushTag} from './git.js'
8
- import {fetchManifest} from './npm.js'
6
+ import {log} from '../log.js'
7
+ import {fetchRepo, pushCommit, getTags as getGitTags, pushTag, getRepo} from '../api/git.js'
8
+ import {fetchManifest} from '../api/npm.js'
9
+ import {ghGetAsset} from '../api/gh.js'
9
10
 
10
11
  export const pushReleaseTag = async (pkg) => {
11
- const {name, version, tag = formatTag({name, version}),config: {gitCommitterEmail, gitCommitterName}} = pkg
12
+ const {name, version, tag = formatTag({name, version}), config: {gitCommitterEmail, gitCommitterName}} = pkg
12
13
  const cwd = pkg.context.git.root
13
14
 
14
15
  pkg.context.git.tag = tag
@@ -17,33 +18,55 @@ export const pushReleaseTag = async (pkg) => {
17
18
  await pushTag({cwd, tag, gitCommitterEmail, gitCommitterName})
18
19
  }
19
20
 
20
- export const pushMeta = queuefy(async (pkg) => {
21
- log({pkg})('push artifact to branch \'meta\'')
22
-
23
- const {name, version, tag = formatTag({name, version}), absPath: cwd, config: {gitCommitterEmail, gitCommitterName, ghBasicAuth: basicAuth}} = pkg
24
- const to = '.'
25
- const branch = 'meta'
26
- const msg = `chore: release meta ${name} ${version}`
21
+ export const prepareMeta = async (pkg) => {
22
+ const {absPath: cwd} = pkg
27
23
  const hash = (await $.o({cwd})`git rev-parse HEAD`).toString().trim()
28
- const meta = {
24
+ pkg.meta = {
29
25
  META_VERSION: '1',
30
- name: pkg.name,
31
26
  hash,
27
+ name: pkg.name,
32
28
  version: pkg.version,
33
29
  dependencies: pkg.dependencies,
34
30
  devDependencies: pkg.devDependencies,
35
31
  peerDependencies: pkg.peerDependencies,
36
32
  optionalDependencies: pkg.optionalDependencies,
37
33
  }
34
+ }
35
+
36
+ export const pushMeta = queuefy(async (pkg) => {
37
+ const {type} = pkg.config.meta
38
+
39
+ if (type === null) {
40
+ return
41
+ }
42
+
43
+ if (!pkg.meta) {
44
+ await prepareMeta(pkg)
45
+ }
46
+
47
+ if (type === 'asset' || type === 'assets') {
48
+ pkg.config.ghAssets = [...pkg.config.ghAssets || [], {
49
+ name: 'meta.json',
50
+ contents: JSON.stringify(pkg.meta, null, 2)
51
+ }]
52
+ return
53
+ }
54
+
55
+ log({pkg})('push artifact to branch \'meta\'')
56
+
57
+ const {name, version, meta, tag = formatTag({name, version}), absPath: cwd, config: {gitCommitterEmail, gitCommitterName, ghBasicAuth: basicAuth}} = pkg
58
+ const to = '.'
59
+ const branch = 'meta'
60
+ const msg = `chore: release meta ${name} ${version}`
38
61
  const files = [{relpath: `${getArtifactPath(tag)}.json`, contents: meta}]
39
62
 
40
63
  await pushCommit({cwd, to, branch, msg, files, gitCommitterEmail, gitCommitterName, basicAuth})
41
64
  })
42
65
 
43
66
  export const getLatest = async (pkg) => {
44
- const {absPath: cwd, name, config: {ghBasicAuth: basicAuth}} = pkg
67
+ const {absPath: cwd, name } = pkg
45
68
  const tag = await getLatestTag(cwd, name)
46
- const meta = await getLatestMeta(cwd, tag?.ref, basicAuth) || await fetchManifest(pkg, {nothrow: true})
69
+ const meta = await getLatestMeta(pkg, tag)
47
70
 
48
71
  return {
49
72
  tag,
@@ -192,9 +215,16 @@ export const parseDateTag = (date) => new Date(date.replaceAll('.', '-')+'Z')
192
215
 
193
216
  export const getArtifactPath = (tag) => tag.toLowerCase().replace(/[^a-z0-9-]/g, '-')
194
217
 
195
- export const getLatestMeta = async (cwd, tag, basicAuth) => {
218
+ export const getLatestMeta = async (pkg, tag) => {
196
219
  if (!tag) return
197
220
 
221
+ const {absPath: cwd, config: {ghBasicAuth: basicAuth}} = pkg
222
+ const {repoName} = await getRepo(cwd, {basicAuth})
223
+
224
+ try {
225
+ return JSON.parse(await ghGetAsset({repoName, tag, name: 'meta.json'}))
226
+ } catch {}
227
+
198
228
  try {
199
229
  const _cwd = await fetchRepo({cwd, branch: 'meta', basicAuth})
200
230
  return await Promise.any([
@@ -202,4 +232,6 @@ export const getLatestMeta = async (cwd, tag, basicAuth) => {
202
232
  fs.readJson(path.resolve(_cwd, getArtifactPath(tag), 'meta.json'))
203
233
  ])
204
234
  } catch {}
235
+
236
+ return fetchManifest(pkg, {nothrow: true})
205
237
  }
@@ -0,0 +1,95 @@
1
+ import os from 'node:os'
2
+ import {createRequire} from 'node:module'
3
+ import {$, within} from 'zx-extra'
4
+ import {queuefy} from 'queuefy'
5
+ import {topo, traverseQueue} from './deps.js'
6
+ import {createReport} from '../log.js'
7
+ import {exec} from './exec.js'
8
+ import {contextify} from '../steps/contextify.js'
9
+ import {analyze} from '../steps/analyze.js'
10
+ import {build} from '../steps/build.js'
11
+ import {publish} from '../steps/publish.js'
12
+ import {clean} from '../steps/clean.js'
13
+
14
+ export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within(async () => {
15
+ const {version: zbrVersion} = createRequire(import.meta.url)('../../../../package.json')
16
+ if (flags.v || flags.version) {
17
+ console.log(zbrVersion)
18
+ return
19
+ }
20
+
21
+ const context = await createContext({flags, env, cwd})
22
+ const {report, packages, queue, prev, graphs} = context
23
+ const _exec = queuefy(exec, flags.concurrency || os.cpus().length)
24
+
25
+ report
26
+ .log()(`zx-bulk-release@${zbrVersion}`)
27
+ .log()('queue:', queue)
28
+ .log()('graphs', graphs)
29
+
30
+ try {
31
+ await traverseQueue({queue, prev, async cb(name) {
32
+ report.setStatus('analyzing', name)
33
+ const pkg = packages[name]
34
+ await contextify(pkg, context)
35
+ await analyze(pkg)
36
+ report
37
+ .set('config', pkg.config, name)
38
+ .set('version', pkg.version, name)
39
+ .set('prevVersion', pkg.latest.tag?.version || pkg.manifest.version, name)
40
+ .set('releaseType', pkg.releaseType, name)
41
+ .set('tag', pkg.tag, name)
42
+ }})
43
+
44
+ report.setStatus('pending')
45
+
46
+ await traverseQueue({queue, prev, async cb(name) {
47
+ const pkg = packages[name]
48
+
49
+ if (!pkg.releaseType) {
50
+ report.setStatus('skipped', name)
51
+ return
52
+ }
53
+ if (flags.build !== false) {
54
+ report.setStatus('building', name)
55
+ await build(pkg, _exec)
56
+ }
57
+ if (!flags.dryRun && flags.publish !== false) {
58
+ report.setStatus('publishing', name)
59
+ await publish(pkg, _exec)
60
+ }
61
+
62
+ report.setStatus('success', name)
63
+ }})
64
+ } catch (e) {
65
+ report
66
+ .log({level: 'error'})(e, e.stack)
67
+ .set('error', e)
68
+ .setStatus('failure')
69
+ throw e
70
+ } finally {
71
+ await clean(cwd, packages)
72
+ }
73
+ report
74
+ .setStatus('success')
75
+ .log()('Great success!')
76
+ })
77
+
78
+ export const createContext = async ({flags, env, cwd}) => {
79
+ const { packages, queue, root, prev, graphs } = await topo({cwd, flags})
80
+ const report = createReport({packages, queue, flags})
81
+
82
+ $.report = report
83
+ $.env = {...process.env, ...env}
84
+ $.verbose = !!(flags.debug || $.env.DEBUG ) || $.verbose
85
+
86
+ return {
87
+ report,
88
+ packages,
89
+ root,
90
+ queue,
91
+ prev,
92
+ graphs,
93
+ flags
94
+ }
95
+ }
@@ -1,8 +1,8 @@
1
1
  import {semver} from 'zx-extra'
2
- import {updateDeps} from './deps.js'
3
- import {formatTag} from './meta.js';
4
- import {log} from './log.js'
5
- import {getCommits} from './git.js'
2
+ import {log} from '../log.js'
3
+ import {getCommits} from '../api/git.js'
4
+ import {updateDeps} from '../processor/deps.js'
5
+ import {formatTag} from '../processor/meta.js';
6
6
 
7
7
  export const analyze = async (pkg) => {
8
8
  const semanticChanges = await getSemanticChanges(pkg.absPath, pkg.latest.tag?.ref, undefined, pkg.config.releaseRules)
@@ -0,0 +1,23 @@
1
+ import {memoizeBy} from '../util.js'
2
+ import {$, within} from 'zx-extra'
3
+ import {fetchPkg} from '../api/npm.js'
4
+ import {traverseDeps} from '../processor/deps.js'
5
+ import {exec} from '../processor/exec.js'
6
+
7
+ export const build = memoizeBy(async (pkg, run = exec, flags = {}, self = build) => within(async () => {
8
+ $.scope = pkg.name
9
+
10
+ await Promise.all([
11
+ traverseDeps({pkg, packages: pkg.context.packages, cb: async({pkg}) => self(pkg, run, flags, self)}),
12
+ pkg.manifest.private !== true && pkg.changes.length === 0 && pkg.config.npmFetch && flags.npmFetch !== false
13
+ ? fetchPkg(pkg)
14
+ : Promise.resolve()
15
+ ])
16
+
17
+ if (!pkg.fetched) {
18
+ await run(pkg, 'buildCmd')
19
+ await run(pkg, 'testCmd')
20
+ }
21
+
22
+ pkg.built = true
23
+ }))
@@ -0,0 +1,7 @@
1
+ import {unsetUserConfig} from '../api/git.js'
2
+ import {npmRestore} from '../api/npm.js'
3
+
4
+ export const clean = async (cwd, packages) => {
5
+ await unsetUserConfig(cwd)
6
+ await Promise.all(Object.values(packages).map(npmRestore))
7
+ }
@@ -0,0 +1,19 @@
1
+ import {getPkgConfig} from '../config.js'
2
+ import {getLatest} from '../processor/meta.js'
3
+ import {getRoot, getSha} from '../api/git.js'
4
+ import {$} from 'zx-extra'
5
+
6
+ // Inspired by https://docs.github.com/en/actions/learn-github-actions/contexts
7
+ export const contextify = async (pkg, {packages, root, flags}) => {
8
+ pkg.config = await getPkgConfig(pkg.absPath, root.absPath)
9
+ pkg.latest = await getLatest(pkg)
10
+ pkg.context = {
11
+ git: {
12
+ sha: await getSha(pkg.absPath),
13
+ root: await getRoot(pkg.absPath)
14
+ },
15
+ env: $.env,
16
+ flags,
17
+ packages
18
+ }
19
+ }
@@ -0,0 +1,40 @@
1
+ import {memoizeBy} from '../util.js'
2
+ import {exec} from '../processor/exec.js'
3
+ import {$, within} from 'zx-extra'
4
+ import {npmPersist, npmPublish} from '../api/npm.js'
5
+ import {prepareMeta, pushMeta, pushReleaseTag} from '../processor/meta.js'
6
+ import {pushChangelog} from '../api/changelog.js'
7
+ import {ghPages, ghRelease} from '../api/gh.js'
8
+
9
+ export const publish = memoizeBy(async (pkg, run = exec) => within(async () => {
10
+ $.scope = pkg.name
11
+
12
+ // Debug
13
+ // https://www.npmjs.com/package/@packasso/preset-ts-tsc-uvu/v/0.0.0?activeTab=code
14
+ // https://github.com/qiwi/packasso/actions/runs/4514909191/jobs/7951564982#step:7:817
15
+ // https://github.com/qiwi/packasso/blob/meta/2023-3-24-packasso-preset-ts-tsc-uvu-0-21-0-f0.json
16
+ if (pkg.version !== pkg.manifest.version) {
17
+ throw new Error('package.json version not synced')
18
+ }
19
+
20
+ await npmPersist(pkg)
21
+ await prepareMeta(pkg)
22
+
23
+ if (pkg.context.flags.snapshot) {
24
+ await Promise.all([
25
+ npmPublish(pkg),
26
+ run(pkg, 'publishCmd')
27
+ ])
28
+ } else {
29
+ await pushReleaseTag(pkg)
30
+ await Promise.all([
31
+ pushMeta(pkg),
32
+ pushChangelog(pkg),
33
+ npmPublish(pkg),
34
+ ghRelease(pkg),
35
+ ghPages(pkg),
36
+ run(pkg, 'publishCmd')
37
+ ])
38
+ }
39
+ pkg.published = true
40
+ }))
@@ -1,3 +1,8 @@
1
+ import zlib from 'node:zlib'
2
+ import fs from 'node:fs/promises'
3
+ import path from 'node:path'
4
+ import tar from 'tar-stream'
5
+
1
6
  export const tpl = (str, context) =>
2
7
  str?.replace(/\$\{\{\s*([.a-z0-9]+)\s*}}/gi, (matched, key) => get(context, key) ?? '')
3
8
 
@@ -60,3 +65,52 @@ export const getCommonPath = files => {
60
65
 
61
66
  return p.slice(0, p.lastIndexOf('/') + 1)
62
67
  }
68
+
69
+ export const safePath = v => path.resolve('/', v).slice(1)
70
+
71
+ // https://stackoverflow.com/questions/19978452/how-to-extract-single-file-from-tar-gz-archive-using-node-js
72
+ export const unzip = (stream, {pick, omit, cwd = process.cwd(), strip = 0} = {}) => new Promise((resolve, reject) => {
73
+ const extract = tar.extract()
74
+ const results = []
75
+
76
+ extract.on('entry', ({name, type}, stream, cb)=> {
77
+ const _name = safePath(strip ? name.split('/').slice(strip).join('/') : name)
78
+ const fp = path.join(cwd, _name)
79
+
80
+ let data = ''
81
+ stream.on('data', (chunk) => {
82
+ if (type !== 'file') {
83
+ return
84
+ }
85
+ if (omit?.includes(_name)) {
86
+ return
87
+ }
88
+ if (pick && !pick.includes(_name)) {
89
+ return
90
+ }
91
+
92
+ data +=chunk
93
+ })
94
+
95
+ stream.on('end', () => {
96
+ if (data) {
97
+ results.push(
98
+ fs.mkdir(path.dirname(fp), {recursive: true})
99
+ .then(() => fs.writeFile(fp, data, 'utf8'))
100
+ )
101
+ }
102
+ cb()
103
+ })
104
+
105
+ stream.resume()
106
+ })
107
+
108
+ extract.on('finish', ()=> {
109
+ resolve(Promise.all(results))
110
+ })
111
+
112
+ // fs.createReadStream('archive.tar.gz')
113
+ stream
114
+ .pipe(zlib.createGunzip())
115
+ .pipe(extract)
116
+ })
@@ -1,180 +0,0 @@
1
- import os from 'node:os'
2
- import {createRequire} from 'node:module'
3
- import {$, within} from 'zx-extra'
4
- import {queuefy} from 'queuefy'
5
- import {analyze} from './analyze.js'
6
- import {pushChangelog} from './changelog.js'
7
- import {getPkgConfig} from './config.js'
8
- import {topo, traverseDeps, traverseQueue} from './deps.js'
9
- import {ghPages, ghRelease} from './gh.js'
10
- import {getRoot, getSha, unsetUserConfig} from './git.js'
11
- import {log, createReport} from './log.js'
12
- import {getLatest, pushMeta, pushReleaseTag} from './meta.js'
13
- import {fetchPkg, npmPersist, npmPublish, npmRestore} from './npm.js'
14
- import {memoizeBy, tpl} from './util.js'
15
-
16
- export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within(async () => {
17
- const {version: zbrVersion} = createRequire(import.meta.url)('../../../package.json')
18
- if (flags.v || flags.version) {
19
- console.log(zbrVersion)
20
- return
21
- }
22
-
23
- const context = await createContext({flags, env, cwd})
24
- const {report, packages, queue, prev, graphs} = context
25
- const _runCmd = queuefy(runCmd, flags.concurrency || os.cpus().length)
26
-
27
- report
28
- .log()(`zx-bulk-release@${zbrVersion}`)
29
- .log()('queue:', queue)
30
- .log()('graphs', graphs)
31
-
32
- try {
33
- await traverseQueue({queue, prev, async cb(name) {
34
- report.setStatus('analyzing', name)
35
- const pkg = packages[name]
36
- await contextify(pkg, context)
37
- await analyze(pkg)
38
- report
39
- .set('config', pkg.config, name)
40
- .set('version', pkg.version, name)
41
- .set('prevVersion', pkg.latest.tag?.version || pkg.manifest.version, name)
42
- .set('releaseType', pkg.releaseType, name)
43
- .set('tag', pkg.tag, name)
44
- }})
45
-
46
- report.setStatus('pending')
47
-
48
- await traverseQueue({queue, prev, async cb(name) {
49
- const pkg = packages[name]
50
-
51
- if (!pkg.releaseType) {
52
- report.setStatus('skipped', name)
53
- return
54
- }
55
- if (flags.build !== false) {
56
- report.setStatus('building', name)
57
- await build(pkg, _runCmd)
58
- }
59
- if (!flags.dryRun && flags.publish !== false) {
60
- report.setStatus('publishing', name)
61
- await publish(pkg, _runCmd)
62
- }
63
-
64
- report.setStatus('success', name)
65
- }})
66
- } catch (e) {
67
- report
68
- .log({level: 'error'})(e, e.stack)
69
- .set('error', e)
70
- .setStatus('failure')
71
- throw e
72
- } finally {
73
- await clean(cwd, packages)
74
- }
75
- report
76
- .setStatus('success')
77
- .log()('Great success!')
78
- })
79
-
80
- export const runCmd = async (pkg, name) => {
81
- const cmd = tpl(pkg.config[name], {...pkg, ...pkg.context})
82
- const now = Date.now()
83
-
84
- if (cmd) {
85
- log({pkg})(`run ${name} '${cmd}'`)
86
- const result = await $.o({cwd: pkg.absPath, quote: v => v, preferLocal: true})`${cmd}`
87
-
88
- log({pkg})(`duration ${name}: ${Date.now() - now}`)
89
- return result
90
- }
91
- }
92
-
93
- const createContext = async ({flags, env, cwd}) => {
94
- const { packages, queue, root, prev, graphs } = await topo({cwd, flags})
95
- const report = createReport({packages, queue, flags})
96
-
97
- $.report = report
98
- $.env = {...process.env, ...env}
99
- $.verbose = !!(flags.debug || $.env.DEBUG ) || $.verbose
100
-
101
- return {
102
- report,
103
- packages,
104
- root,
105
- queue,
106
- prev,
107
- graphs,
108
- flags
109
- }
110
- }
111
-
112
- // Inspired by https://docs.github.com/en/actions/learn-github-actions/contexts
113
- const contextify = async (pkg, {packages, root, flags}) => {
114
- pkg.config = await getPkgConfig(pkg.absPath, root.absPath)
115
- pkg.latest = await getLatest(pkg)
116
- pkg.context = {
117
- git: {
118
- sha: await getSha(pkg.absPath),
119
- root: await getRoot(pkg.absPath)
120
- },
121
- env: $.env,
122
- flags,
123
- packages
124
- }
125
- }
126
-
127
- const build = memoizeBy(async (pkg, run = runCmd, flags = {}, self = build) => within(async () => {
128
- $.scope = pkg.name
129
-
130
- await Promise.all([
131
- traverseDeps({pkg, packages: pkg.context.packages, cb: async({pkg}) => self(pkg, run, flags, self)}),
132
- pkg.manifest.private !== true && pkg.changes.length === 0 && pkg.config.npmFetch && flags.npmFetch !== false
133
- ? fetchPkg(pkg)
134
- : Promise.resolve()
135
- ])
136
-
137
- if (!pkg.fetched) {
138
- await run(pkg, 'buildCmd')
139
- await run(pkg, 'testCmd')
140
- }
141
-
142
- pkg.built = true
143
- }))
144
-
145
- const publish = memoizeBy(async (pkg, run = runCmd) => within(async () => {
146
- $.scope = pkg.name
147
-
148
- // Debug
149
- // https://www.npmjs.com/package/@packasso/preset-ts-tsc-uvu/v/0.0.0?activeTab=code
150
- // https://github.com/qiwi/packasso/actions/runs/4514909191/jobs/7951564982#step:7:817
151
- // https://github.com/qiwi/packasso/blob/meta/2023-3-24-packasso-preset-ts-tsc-uvu-0-21-0-f0.json
152
- if (pkg.version !== pkg.manifest.version) {
153
- throw new Error('package.json version not synced')
154
- }
155
-
156
- await npmPersist(pkg)
157
-
158
- if (pkg.context.flags.snapshot) {
159
- await Promise.all([
160
- npmPublish(pkg),
161
- run(pkg, 'publishCmd')
162
- ])
163
- } else {
164
- await pushReleaseTag(pkg)
165
- await Promise.all([
166
- pushMeta(pkg),
167
- pushChangelog(pkg),
168
- npmPublish(pkg),
169
- ghRelease(pkg),
170
- ghPages(pkg),
171
- run(pkg, 'publishCmd')
172
- ])
173
- }
174
- pkg.published = true
175
- }))
176
-
177
- const clean = async (cwd, packages) => {
178
- await unsetUserConfig(cwd)
179
- await Promise.all(Object.values(packages).map(npmRestore))
180
- }