bulk-release 2.19.1 → 2.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/README.md +72 -77
  3. package/package.json +3 -3
  4. package/src/main/js/config.js +12 -3
  5. package/src/main/js/index.js +0 -6
  6. package/src/main/js/processor/api/gh.js +111 -0
  7. package/src/main/js/{api → processor/api}/git.js +17 -26
  8. package/src/main/js/{api → processor/api}/npm.js +70 -28
  9. package/src/main/js/processor/deps.js +1 -1
  10. package/src/main/js/processor/exec.js +4 -4
  11. package/src/main/js/processor/generators/meta.js +80 -0
  12. package/src/main/js/processor/generators/notes.js +37 -0
  13. package/src/main/js/processor/{meta.js → generators/tag.js} +3 -109
  14. package/src/main/js/processor/log.js +86 -0
  15. package/src/main/js/processor/publishers/changelog.js +26 -0
  16. package/src/main/js/processor/publishers/cmd.js +6 -0
  17. package/src/main/js/processor/publishers/gh-pages.js +32 -0
  18. package/src/main/js/processor/publishers/gh-release.js +41 -0
  19. package/src/main/js/processor/publishers/meta.js +58 -0
  20. package/src/main/js/processor/publishers/npm.js +15 -0
  21. package/src/main/js/processor/release.js +71 -66
  22. package/src/main/js/{steps → processor/steps}/analyze.js +18 -24
  23. package/src/main/js/processor/steps/build.js +20 -0
  24. package/src/main/js/processor/steps/clean.js +7 -0
  25. package/src/main/js/processor/steps/contextify.js +49 -0
  26. package/src/main/js/processor/steps/publish.js +39 -0
  27. package/src/main/js/processor/steps/teardown.js +58 -0
  28. package/src/main/js/processor/steps/test.js +10 -0
  29. package/src/main/js/util.js +32 -77
  30. package/src/main/js/api/changelog.js +0 -42
  31. package/src/main/js/api/gh.js +0 -131
  32. package/src/main/js/log.js +0 -63
  33. package/src/main/js/steps/build.js +0 -23
  34. package/src/main/js/steps/clean.js +0 -7
  35. package/src/main/js/steps/contextify.js +0 -154
  36. package/src/main/js/steps/publish.js +0 -47
  37. package/src/main/js/steps/test.js +0 -16
@@ -3,117 +3,122 @@ import {createRequire} from 'node:module'
3
3
  import {$, within} from 'zx-extra'
4
4
  import {queuefy} from 'queuefy'
5
5
  import {topo, traverseQueue} from './deps.js'
6
- import {createReport} from '../log.js'
6
+ import {createReport, log} from './log.js'
7
7
  import {exec} from './exec.js'
8
- import {contextify, recover} from '../steps/contextify.js'
9
- import {fetchTags} from '../api/git.js'
10
- import {analyze} from '../steps/analyze.js'
11
- import {build} from '../steps/build.js'
12
- import {publish} from '../steps/publish.js'
13
- import {clean} from '../steps/clean.js'
14
- import {test} from '../steps/test.js'
8
+
9
+ import {contextify} from './steps/contextify.js'
10
+ import {recover} from './steps/teardown.js'
11
+ import {fetchTags} from './api/git.js'
12
+ import {analyze} from './steps/analyze.js'
13
+ import {build} from './steps/build.js'
14
+ import {publish} from './steps/publish.js'
15
+ import {clean} from './steps/clean.js'
16
+ import {test} from './steps/test.js'
17
+
18
+ import meta from './publishers/meta.js'
19
+ import npm from './publishers/npm.js'
20
+ import ghRelease from './publishers/gh-release.js'
21
+ import ghPages from './publishers/gh-pages.js'
22
+ import changelog from './publishers/changelog.js'
23
+ import cmd from './publishers/cmd.js'
24
+
25
+ // Publisher registry. Order = publish order; teardown walks it in reverse.
26
+ const publishers = [meta, npm, ghRelease, ghPages, changelog, cmd]
15
27
 
16
28
  export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within(async () => {
29
+ $.memo = new Map()
30
+
17
31
  const {version: zbrVersion} = createRequire(import.meta.url)('../../../../package.json')
18
32
  if (flags.v || flags.version) {
19
33
  console.log(zbrVersion)
20
34
  return
21
35
  }
22
36
 
23
- const context = await createContext({flags, env, cwd})
24
- const {report, packages, queue, prev, graphs} = context
25
- const _exec = queuefy(exec, flags.concurrency || os.cpus().length)
37
+ const ctx = await createContext({flags, env, cwd})
38
+ const {report, packages, queue, prev} = ctx
39
+
40
+ // Per-package scope: $.scope, packages[name] lookup, contextify on first touch.
41
+ const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
42
+ $.scope = name
43
+ const pkg = packages[name]
44
+ await contextify(pkg, ctx)
45
+ return cb(pkg)
46
+ })})
26
47
 
27
48
  report
28
- .log()(`zx-bulk-release@${zbrVersion}`)
29
- .log()('queue:', queue)
30
- .log()('graphs', graphs)
49
+ .log(`zx-bulk-release@${zbrVersion}`)
50
+ .log('queue:', queue)
51
+ .log('graphs', ctx.graphs)
31
52
 
32
53
  // --recover: standalone mode — clean orphan tags and exit.
33
- // Run the full pipeline again after this to rebuild and publish affected packages.
34
54
  if (flags.recover) {
35
55
  await fetchTags(cwd)
36
56
  let recovered = 0
37
- for (const name of queue) {
38
- const pkg = packages[name]
39
- await contextify(pkg, context)
40
- if (await recover(pkg)) recovered++
41
- }
42
- report.log()(`recover: cleaned ${recovered} orphan tag(s)`)
57
+ await forEachPkg(async (pkg) => { if (await recover(pkg)) recovered++ })
58
+ report.log(`recover: cleaned ${recovered} orphan tag(s)`)
43
59
  return
44
60
  }
45
61
 
46
62
  try {
47
- await traverseQueue({queue, prev, async cb(name) {
48
- report.setStatus('analyzing', name)
49
- const pkg = packages[name]
50
- await contextify(pkg, context)
63
+ await forEachPkg(async (pkg) => {
64
+ report.setStatus('analyzing', pkg.name)
51
65
  await analyze(pkg)
52
- report
53
- .set('config', pkg.config, name)
54
- .set('version', pkg.version, name)
55
- .set('prevVersion', pkg.latest.tag?.version || pkg.manifest.version, name)
56
- .set('releaseType', pkg.releaseType, name)
57
- .set('tag', pkg.tag, name)
58
- }})
66
+ report.set({
67
+ config: pkg.config,
68
+ version: pkg.version,
69
+ prevVersion: pkg.latest.tag?.version || pkg.manifest.version,
70
+ releaseType: pkg.releaseType,
71
+ tag: pkg.tag,
72
+ }, pkg.name)
73
+ })
59
74
 
60
75
  report.setStatus('pending')
61
76
 
62
- await traverseQueue({queue, prev, async cb(name) {
63
- const pkg = packages[name]
64
-
77
+ await forEachPkg(async (pkg) => {
65
78
  if (!pkg.releaseType) {
66
- report.setStatus('skipped', name)
79
+ report.setStatus('skipped', pkg.name)
67
80
  pkg.skipped = true
68
81
  return
69
82
  }
70
83
  if (flags.build !== false) {
71
- report.setStatus('building', name)
72
- await build(pkg, _exec)
84
+ report.setStatus('building', pkg.name)
85
+ await build(pkg)
73
86
  }
74
87
  if (flags.test !== false) {
75
- report.setStatus('testing', name)
76
- await test(pkg, _exec)
88
+ report.setStatus('testing', pkg.name)
89
+ await test(pkg)
77
90
  }
78
91
  if (!flags.dryRun && flags.publish !== false) {
79
- report.setStatus('publishing', name)
80
- await publish(pkg, _exec)
92
+ report.setStatus('publishing', pkg.name)
93
+ await publish(pkg)
81
94
  }
82
-
83
- report.setStatus('success', name)
84
- }})
95
+ report.setStatus('success', pkg.name)
96
+ })
85
97
  } catch (e) {
86
- report
87
- .log({level: 'error'})(e, e.stack)
88
- .set('error', e)
89
- .setStatus('failure')
98
+ report.error(e, e.stack).set('error', e).setStatus('failure')
90
99
  throw e
91
100
  } finally {
92
- await clean(cwd, packages)
101
+ await clean(ctx)
93
102
  }
94
- report
95
- .setStatus('success')
96
- .log()('Great success!')
103
+ report.setStatus('success').log('Great success!')
97
104
  })
98
105
 
99
106
  export const createContext = async ({flags, env: _env, cwd}) => {
100
- const { packages, queue, root, prev, graphs } = await topo({cwd, flags})
107
+ const {packages, queue, root, prev, graphs} = await topo({cwd, flags})
101
108
  const report = createReport({packages, queue, flags})
102
109
  const env = {...process.env, ..._env}
103
110
 
104
- $.report = report
105
- $.env = env
106
- $.verbose = !!(flags.debug || $.env.DEBUG ) || $.verbose
107
- $.quiet = !$.verbose
111
+ // Register known secrets so the logger redacts them from all output.
112
+ log.secret(env.GH_TOKEN, env.GITHUB_TOKEN, env.NPM_TOKEN)
113
+
114
+ $.report = report
115
+ $.env = env
116
+ $.verbose = !!(flags.debug || env.DEBUG) || $.verbose
117
+ $.quiet = !$.verbose
108
118
 
109
119
  return {
110
- report,
111
- packages,
112
- root,
113
- queue,
114
- prev,
115
- graphs,
116
- flags,
117
- env
120
+ cwd, env, flags, root, packages, queue, prev, graphs, report,
121
+ publishers,
122
+ run: queuefy(exec, flags.concurrency || os.cpus().length),
118
123
  }
119
124
  }
@@ -1,15 +1,15 @@
1
1
  import {semver} from 'zx-extra'
2
2
  import {log} from '../log.js'
3
3
  import {getCommits} from '../api/git.js'
4
- import {updateDeps} from '../processor/deps.js'
5
- import {formatTag} from '../processor/meta.js';
4
+ import {updateDeps} from '../deps.js'
5
+ import {formatTag} from '../generators/tag.js'
6
6
 
7
- export const analyze = async (pkg) => {
7
+ export const analyze = async (pkg, ctx = pkg.ctx) => {
8
8
  const semanticChanges = await getSemanticChanges(pkg.absPath, pkg.latest.tag?.ref, undefined, pkg.config.releaseRules)
9
9
  const depsChanges = await updateDeps(pkg)
10
10
  const changes = [...semanticChanges, ...depsChanges]
11
11
  const releaseType = getNextReleaseType(changes)
12
- const pre = pkg.context.flags.snapshot ? `-snap.${pkg.context.git.sha.slice(0, 7)}` : undefined
12
+ const pre = ctx.flags.snapshot ? `-snap.${ctx.git.sha.slice(0, 7)}` : undefined
13
13
  const latestVersion = pkg.latest.tag?.version || pkg.latest.meta?.version
14
14
 
15
15
  pkg.changes = changes
@@ -24,7 +24,7 @@ export const analyze = async (pkg) => {
24
24
  pkg.manifest.version = pkg.version
25
25
  pkg.tag = releaseType ? formatTag({name: pkg.name, version: pkg.version, format: pkg.config.tagFormat}) : null
26
26
 
27
- log({pkg})(
27
+ log.info(
28
28
  'semantic changes', changes,
29
29
  'releaseType', releaseType,
30
30
  'prevVersion', latestVersion,
@@ -46,27 +46,21 @@ export const getSemanticChanges = async (cwd, from, to, rules = semanticRules) =
46
46
  return analyzeCommits(commits, rules)
47
47
  }
48
48
 
49
- export const analyzeCommits = (commits, rules) =>
50
- commits.reduce((acc, {subj, body, short, hash}) => {
51
- rules.forEach(({group, releaseType, prefixes, keywords}) => {
52
- const prefixMatcher = prefixes && new RegExp(`^(${prefixes.join('|')})(\\([a-zA-Z0-9\\-_,]+\\))?:\\s.+$`)
53
- const keywordsMatcher = keywords && new RegExp(`(${keywords.join('|')}):\\s(.+)`)
54
- const change = subj.match(prefixMatcher)?.[0] || body.match(keywordsMatcher)?.[2]
49
+ export const analyzeCommits = (commits, rules) => {
50
+ const prepared = rules.map(({group, releaseType, prefixes, keywords}) => [
51
+ group,
52
+ releaseType,
53
+ prefixes && new RegExp(`^(${prefixes.join('|')})(\\([a-zA-Z0-9\\-_,]+\\))?:\\s.+$`),
54
+ keywords && new RegExp(`(${keywords.join('|')}):\\s(.+)`),
55
+ ])
55
56
 
56
- if (change) {
57
- acc.push({
58
- group,
59
- releaseType,
60
- change,
61
- subj,
62
- body,
63
- short,
64
- hash
65
- })
66
- }
57
+ return commits.flatMap((commit) =>
58
+ prepared.flatMap(([group, releaseType, prefixRe, keywordRe]) => {
59
+ const change = prefixRe?.exec(commit.subj)?.[0] || keywordRe?.exec(commit.body)?.[2]
60
+ return change ? [{group, releaseType, change, ...commit}] : []
67
61
  })
68
- return acc
69
- }, [])
62
+ )
63
+ }
70
64
 
71
65
  export const getNextReleaseType = (changes) => changes.length
72
66
  ? releaseSeverityOrder.find(type => changes.find(({releaseType}) => type === releaseType))
@@ -0,0 +1,20 @@
1
+ import {memoizeBy} from '../../util.js'
2
+ import {fetchPkg} from '../api/npm.js'
3
+ import {traverseDeps} from '../deps.js'
4
+ import {exec} from '../exec.js'
5
+
6
+ export const build = memoizeBy(async (pkg, ctx = pkg.ctx) => {
7
+ const {run = exec, flags = {}, packages} = ctx
8
+ await Promise.all([
9
+ traverseDeps({pkg, packages, cb: ({pkg}) => build(pkg, ctx)}),
10
+ pkg.manifest.private !== true && pkg.changes.length === 0 && pkg.config.npmFetch && flags.npmFetch !== false
11
+ ? fetchPkg(pkg)
12
+ : Promise.resolve()
13
+ ])
14
+
15
+ if (!pkg.fetched) {
16
+ await run(pkg, 'buildCmd')
17
+ }
18
+
19
+ pkg.built = true
20
+ })
@@ -0,0 +1,7 @@
1
+ import {unsetUserConfig} from '../api/git.js'
2
+ import {npmRestore} from '../api/npm.js'
3
+
4
+ export const clean = async ({cwd, packages}) => {
5
+ await unsetUserConfig(cwd)
6
+ await Promise.all(Object.values(packages).filter(p => !p.skipped).map(npmRestore))
7
+ }
@@ -0,0 +1,49 @@
1
+ import {getPkgConfig} from '../../config.js'
2
+ import {getLatest} from '../generators/meta.js'
3
+ import {getRoot, getSha} from '../api/git.js'
4
+
5
+ /**
6
+ * Global release context — one per `run()` invocation.
7
+ * Built by `createContext()` in release.js and extended with runtime bits (run, publishers).
8
+ *
9
+ * @typedef {object} ReleaseContext
10
+ * @property {object} flags CLI flags (build, test, publish, snapshot, dryRun, recover, ...).
11
+ * @property {Record<string,string>} env Resolved process env (process.env merged with per-run overrides).
12
+ * @property {string} cwd Repo working directory.
13
+ * @property {object} root Root package descriptor (from topo()).
14
+ * @property {Record<string,object>} packages All discovered packages keyed by name.
15
+ * @property {string[]} queue Topologically sorted release order.
16
+ * @property {Record<string,string[]>} prev Per-package predecessor map.
17
+ * @property {object} graphs Dependency graphs.
18
+ * @property {object} report Run-wide status/report sink.
19
+ * @property {(pkg, name) => Promise} run Concurrency-limited shell exec (queuefy'd).
20
+ * @property {object[]} publishers Ordered publisher registry.
21
+ */
22
+
23
+ /**
24
+ * Per-package release context — refines {@link ReleaseContext} for a single package scope.
25
+ * Inherits all shared fields from the global ctx via prototype chain; only stores
26
+ * package-local state (git sha/root/tag) as own properties.
27
+ *
28
+ * @typedef {ReleaseContext & {git: {sha: string, root: string, tag?: string}}} PkgContext
29
+ */
30
+
31
+ /**
32
+ * Attach per-package state to `pkg` and build its refined context.
33
+ * Inspired by https://docs.github.com/en/actions/learn-github-actions/contexts
34
+ *
35
+ * @param {object} pkg Package descriptor (mutated: config, latest, context are set).
36
+ * @param {ReleaseContext} ctx
37
+ */
38
+ export const contextify = async (pkg, ctx) => {
39
+ if (pkg.ctx) return
40
+ pkg.config = await getPkgConfig([pkg.absPath, ctx.root.absPath], ctx.env)
41
+ pkg.latest = await getLatest(pkg)
42
+ pkg.ctx = {
43
+ ...ctx,
44
+ git: {
45
+ sha: await getSha(pkg.absPath),
46
+ root: await getRoot(pkg.absPath),
47
+ },
48
+ }
49
+ }
@@ -0,0 +1,39 @@
1
+ import {memoizeBy} from '../../util.js'
2
+ import {exec} from '../exec.js'
3
+ import {log} from '../log.js'
4
+ import {npmPersist} from '../api/npm.js'
5
+ import {pushTag} from '../api/git.js'
6
+ import {formatTag} from '../generators/tag.js'
7
+ import {isNpmPublished} from '../publishers/npm.js'
8
+ import {rollbackRelease} from './teardown.js'
9
+
10
+ const pushReleaseTag = async (pkg, ctx) => {
11
+ const {name, version, tag = formatTag({name, version}), config: {gitCommitterEmail, gitCommitterName}} = pkg
12
+ ctx.git.tag = tag
13
+ log.info(`push release tag ${tag}`)
14
+ await pushTag({cwd: ctx.git.root, tag, gitCommitterEmail, gitCommitterName})
15
+ }
16
+
17
+ export const publish = memoizeBy(async (pkg, ctx = pkg.ctx) => {
18
+ if (pkg.version !== pkg.manifest.version)
19
+ throw new Error('package.json version not synced')
20
+
21
+ const {run = exec, publishers = [], flags} = ctx
22
+ const snapshot = !!flags.snapshot
23
+ const active = publishers.filter(p => (!snapshot || p.snapshot) && p.when(pkg))
24
+
25
+ await npmPersist(pkg)
26
+
27
+ // Prepare phase: serial pkg mutations (e.g. meta injects into ghAssets) — must finish before any run().
28
+ for (const p of active) await p.prepare?.(pkg)
29
+
30
+ if (!snapshot) await pushReleaseTag(pkg, ctx)
31
+ try {
32
+ await Promise.all(active.map(p => p.run(pkg, run)))
33
+ } catch (e) {
34
+ // Roll back full release for npm-published packages; git-tag-only packages keep their tag — it IS the release.
35
+ if (!snapshot && isNpmPublished(pkg)) await rollbackRelease(pkg, ctx)
36
+ throw e
37
+ }
38
+ pkg.published = true
39
+ })
@@ -0,0 +1,58 @@
1
+ // Release teardown: undo a published (or half-published) release.
2
+ //
3
+ // Two entry points share the same core:
4
+ // - rollbackRelease: called inline from publish.js on mid-publish failure (tag known from pkg.ctx).
5
+ // - recover: standalone --recover mode — detect orphan tags (tagged but missing on npm) and tear them down.
6
+ //
7
+ // Teardown walks the publishers registry in reverse and calls undo() on each that applies.
8
+
9
+ import {log} from '../log.js'
10
+ import {deleteRemoteTag} from '../api/git.js'
11
+ import {fetchManifest} from '../api/npm.js'
12
+ import {isNpmPublished} from '../publishers/npm.js'
13
+
14
+ // Tear down a release: undo every applicable publisher, then delete the git tag.
15
+ // Failures in individual undo steps are warned, not thrown — teardown is best-effort.
16
+ const teardownRelease = async (pkg, ctx, {tag, version, reason}) => {
17
+ if (!pkg.config.ghBasicAuth) throw new Error(`${reason} requires git credentials (GH_TOKEN)`)
18
+
19
+ for (const p of [...ctx.publishers].reverse()) {
20
+ if (!p.undo || !p.when(pkg)) continue
21
+ try {
22
+ const result = await p.undo(pkg, {tag, version, reason})
23
+ if (result !== false) log.info(`${reason}: ${p.name} undone for '${tag}'`)
24
+ } catch (e) {
25
+ log.warn(`${reason}: ${p.name} undo failed`, e)
26
+ }
27
+ }
28
+
29
+ await deleteRemoteTag({cwd: ctx.git.root, tag})
30
+ }
31
+
32
+ // Rollback a release that failed mid-publish (called inline from publish.js).
33
+ // Uses the current release tag; skips the npm existence check — we already know it failed.
34
+ export const rollbackRelease = async (pkg, ctx = pkg.ctx) => {
35
+ const tag = ctx.git.tag
36
+ if (!tag) return
37
+ log.info(`rollback: cleaning up failed release for tag '${tag}'`)
38
+ await teardownRelease(pkg, ctx, {tag, version: pkg.version, reason: 'rollback'})
39
+ }
40
+
41
+ // Standalone recovery: if a tag exists but the package is missing from npm, treat it as an orphan and tear down.
42
+ export const recover = async (pkg, ctx = pkg.ctx) => {
43
+ if (!isNpmPublished(pkg)) return false
44
+
45
+ const {tag} = pkg.latest
46
+ if (!tag) return false
47
+
48
+ const manifest = await fetchManifest({
49
+ name: pkg.name,
50
+ version: tag.version,
51
+ config: pkg.config,
52
+ }, {nothrow: true})
53
+ if (manifest) return false
54
+
55
+ log.info(`recover: tag '${tag.ref}' exists but ${pkg.name}@${tag.version} not found on npm, rolling back failed release`)
56
+ await teardownRelease(pkg, ctx, {tag: tag.ref, version: tag.version, reason: 'recover'})
57
+ return true
58
+ }
@@ -0,0 +1,10 @@
1
+ import {memoizeBy} from '../../util.js'
2
+ import {exec} from '../exec.js'
3
+
4
+ export const test = memoizeBy(async (pkg, ctx = pkg.ctx) => {
5
+ const {run = exec} = ctx
6
+ if (!pkg.fetched) {
7
+ await run(pkg, 'testCmd')
8
+ }
9
+ pkg.tested = true
10
+ })
@@ -1,8 +1,4 @@
1
- import zlib from 'node:zlib'
2
- import fs from 'node:fs/promises'
3
- import path from 'node:path'
4
- import tar from 'tar-stream'
5
- import {Readable} from 'node:stream'
1
+ import {$} from 'zx-extra'
6
2
 
7
3
  export const tpl = (str, context) =>
8
4
  str?.replace(/\$\{\{\s*([.a-z0-9]+)\s*}}/gi, (matched, key) => get(context, key) ?? '')
@@ -36,84 +32,43 @@ export const set = (obj, path, value) => {
36
32
 
37
33
  export const msgJoin = (rest, context, def) => tpl(rest.filter(Boolean).join(' ') || def, context)
38
34
 
39
- export const keyByValue = (obj, value) => Object.keys(obj).find((key) => obj[key] === value)
40
-
41
- export const memoizeBy = (fn, getKey = v => v, memo = new Map()) => async (...args) => {
42
- const key = await getKey(...args)
43
- if (memo.has(key)) {
44
- return memo.get(key)
35
+ // Normalize "string | {k1, k2, ...}" config shape to a positional tuple.
36
+ // Used for shorthand configs like ghPages: 'branch from to msg' or {branch, from, to, msg}.
37
+ export const asTuple = (opts, keys) => typeof opts === 'string'
38
+ ? opts.split(' ')
39
+ : keys.map(k => opts[k])
40
+
41
+ export const attempt = async (times, action, fix) => {
42
+ for (let i = times; i > 0; i--) {
43
+ try { return await action() }
44
+ catch (e) {
45
+ if (i === 1 || !fix) throw e
46
+ await fix(e)
47
+ }
45
48
  }
46
-
47
- const value = fn(...args)
48
- memo.set(key, value)
49
- return value
50
49
  }
51
50
 
52
- export const camelize = s => s.replace(/-./g, x => x[1].toUpperCase())
51
+ export const attempt2 = (action, fix) => attempt(2, action, fix)
52
+ export const attempt3 = (action, fix) => attempt(3, action, fix)
53
53
 
54
- export const asArray = v => Array.isArray(v) ? v : [v]
54
+ export const memoStore = new Map()
55
55
 
56
- export const getCommonPath = files => {
57
- const f0 = files[0]
58
- const common = files.length === 1
59
- ? f0.lastIndexOf('/') + 1
60
- : [...(f0)].findIndex((c, i) => files.some(f => f.charAt(i) !== c))
56
+ export const memoizeBy = (fn, getKey = v => v) => {
57
+ const memoized = async (...args) => {
58
+ const store = $.memo || memoStore
59
+ if (!store.has(memoized)) store.set(memoized, new Map())
60
+ const memo = store.get(memoized)
61
+ const key = await getKey(...args)
62
+ if (memo.has(key)) return memo.get(key)
61
63
 
62
- const p = f0.slice(0, common)
63
- if (p.endsWith('/')) {
64
- return p
64
+ const value = Promise.resolve().then(() => fn(...args))
65
+ memo.set(key, value)
66
+ value.catch(() => memo.delete(key))
67
+ return value
65
68
  }
66
-
67
- return p.slice(0, p.lastIndexOf('/') + 1)
69
+ return memoized
68
70
  }
69
71
 
70
- export const safePath = v => path.resolve('/', v).slice(1)
71
-
72
- // https://stackoverflow.com/questions/19978452/how-to-extract-single-file-from-tar-gz-archive-using-node-js
73
- export const unzip = (stream, {pick, omit, cwd = process.cwd(), strip = 0} = {}) => new Promise((resolve, reject) => {
74
- const extract = tar.extract()
75
- const results = []
76
-
77
- extract.on('entry', ({name, type}, stream, cb)=> {
78
- const _name = safePath(strip ? name.split('/').slice(strip).join('/') : name)
79
- const fp = path.join(cwd, _name)
80
-
81
- let data = ''
82
- stream.on('data', (chunk) => {
83
- if (type !== 'file') {
84
- return
85
- }
86
- if (omit?.includes(_name)) {
87
- return
88
- }
89
- if (pick && !pick.includes(_name)) {
90
- return
91
- }
92
-
93
- data +=chunk
94
- })
95
-
96
- stream.on('end', () => {
97
- if (data) {
98
- results.push(
99
- fs.mkdir(path.dirname(fp), {recursive: true})
100
- .then(() => fs.writeFile(fp, data, 'utf8'))
101
- )
102
- }
103
- cb()
104
- })
105
-
106
- stream.resume()
107
- })
108
-
109
- extract.on('finish', ()=> {
110
- resolve(Promise.all(results))
111
- })
112
-
113
- // fs.createReadStream('archive.tar.gz')
114
- stream
115
- .pipe(zlib.createGunzip())
116
- .pipe(extract)
117
- })
118
-
119
- export const pipify = (stream) => stream.pipe ? stream : Readable.from(stream)
72
+ export const camelize = s => s.replace(/-./g, x => x[1].toUpperCase())
73
+
74
+ export const asArray = v => Array.isArray(v) ? v : [v]
@@ -1,42 +0,0 @@
1
- import {$} from 'zx-extra'
2
- import {queuefy} from 'queuefy'
3
- import {fetchRepo, getRepo, pushCommit} from './git.js'
4
- import {log} from '../log.js'
5
- import {formatTag} from '../processor/meta.js'
6
- import {msgJoin} from '../util.js'
7
-
8
- export const pushChangelog = queuefy(async (pkg) => {
9
- const {absPath: cwd, config: {changelog: opts, gitCommitterEmail, gitCommitterName, ghBasicAuth: basicAuth}} = pkg
10
- if (!opts) return
11
-
12
- log({pkg})('push changelog')
13
- const [branch = 'changelog', file = `${pkg.name.replace(/[^a-z0-9-]/ig, '')}-changelog.md`, ..._msg] = typeof opts === 'string'
14
- ? opts.split(' ')
15
- : [opts.branch, opts.file, opts.msg]
16
- const _cwd = await fetchRepo({cwd, branch, basicAuth})
17
- const msg = msgJoin(_msg, pkg, 'chore: update changelog ${{name}}')
18
- const releaseNotes = await formatReleaseNotes(pkg)
19
-
20
- await $({cwd: _cwd})`echo ${releaseNotes}"\n$(cat ./${file})" > ./${file}`
21
- await pushCommit({cwd, branch, msg, gitCommitterEmail, gitCommitterName, basicAuth})
22
- })
23
-
24
- export const formatReleaseNotes = async (pkg) => {
25
- const {name, version, tag = formatTag({name, version}), absPath: cwd, config: {ghBasicAuth: basicAuth}} = pkg
26
- const {repoPublicUrl} = await getRepo(cwd, {basicAuth})
27
- const releaseDiffRef = `## [${name}@${version}](${repoPublicUrl}/compare/${pkg.latest.tag?.ref}...${tag}) (${new Date().toISOString().slice(0, 10)})`
28
- const releaseDetails = Object.values(pkg.changes
29
- .reduce((acc, {group, subj, short, hash}) => {
30
- const {commits} = acc[group] || (acc[group] = {commits: [], group})
31
- const commitRef = `* ${subj}${short ? ` [${short}](${repoPublicUrl}/commit/${hash})` : ''}`
32
-
33
- commits.push(commitRef)
34
-
35
- return acc
36
- }, {}))
37
- .map(({group, commits}) => `
38
- ### ${group}
39
- ${commits.join('\n')}`).join('\n')
40
-
41
- return releaseDiffRef + '\n' + releaseDetails + '\n'
42
- }