bulk-release 3.0.5 → 3.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/README.md +146 -24
  3. package/package.json +2 -2
  4. package/src/main/js/config.js +3 -2
  5. package/src/main/js/post/api/gh.js +11 -0
  6. package/src/main/js/post/api/git.js +19 -0
  7. package/src/main/js/post/api/npm.js +5 -5
  8. package/src/main/js/post/courier/channels/changelog.js +12 -3
  9. package/src/main/js/post/courier/channels/gh-pages.js +8 -1
  10. package/src/main/js/post/courier/channels/gh-release.js +11 -1
  11. package/src/main/js/post/courier/channels/git-tag.js +24 -0
  12. package/src/main/js/post/courier/channels/meta.js +9 -3
  13. package/src/main/js/post/courier/channels/npm.js +27 -14
  14. package/src/main/js/post/courier/index.js +137 -24
  15. package/src/main/js/post/courier/semaphore.js +31 -0
  16. package/src/main/js/post/courier/seniority.js +19 -0
  17. package/src/main/js/post/depot/context.js +46 -0
  18. package/src/main/js/post/depot/reconcile.js +47 -0
  19. package/src/main/js/post/depot/steps/contextify.js +2 -1
  20. package/src/main/js/post/depot/steps/pack.js +6 -10
  21. package/src/main/js/post/depot/steps/publish.js +1 -11
  22. package/src/main/js/post/modes/deliver.js +23 -0
  23. package/src/main/js/post/modes/pack.js +69 -0
  24. package/src/main/js/post/modes/receive.js +72 -0
  25. package/src/main/js/post/modes/verify.js +41 -0
  26. package/src/main/js/post/{courier/parcel.js → parcel/build.js} +15 -2
  27. package/src/main/js/post/parcel/directive.js +81 -0
  28. package/src/main/js/post/parcel/index.js +4 -0
  29. package/src/main/js/post/parcel/verify.js +46 -0
  30. package/src/main/js/post/release.js +44 -86
  31. package/src/main/js/post/tar.js +1 -1
  32. package/src/main/js/util.js +16 -0
  33. package/src/test/js/utils/mock.js +1 -1
@@ -9,6 +9,18 @@ const gitFields = (a, pkg) => ({
9
9
  })
10
10
 
11
11
  const entry = {
12
+ 'git-tag': (pkg, ctx) => ({
13
+ channel: 'git-tag',
14
+ manifest: {
15
+ channel: 'git-tag',
16
+ name: pkg.name, version: pkg.version, tag: pkg.tag,
17
+ cwd: ctx.git.root,
18
+ gitCommitterName: '${{GIT_COMMITTER_NAME}}',
19
+ gitCommitterEmail: '${{GIT_COMMITTER_EMAIL}}',
20
+ },
21
+ files: [],
22
+ }),
23
+
12
24
  npm: (pkg, ctx, a) => ({
13
25
  channel: 'npm',
14
26
  manifest: {
@@ -24,6 +36,7 @@ const entry = {
24
36
  channel: 'gh-release',
25
37
  manifest: {
26
38
  channel: 'gh-release',
39
+ name: pkg.name, version: pkg.version,
27
40
  tag: pkg.tag, repoHost: a.repoHost, repoName: a.repoName, releaseNotes: a.releaseNotes,
28
41
  token: '${{GH_TOKEN}}', apiUrl: pkg.config.ghApiUrl,
29
42
  assets: pkg.config.ghAssets ? [...pkg.config.ghAssets] : undefined,
@@ -35,7 +48,7 @@ const entry = {
35
48
  const [branch = 'gh-pages', , to = '.', ..._msg] = asTuple(pkg.config.ghPages, ['branch', 'from', 'to', 'msg'])
36
49
  return {
37
50
  channel: 'gh-pages',
38
- manifest: {channel: 'gh-pages', branch, to, msg: msgJoin(_msg, pkg, 'docs: update docs ${{name}} ${{version}}'), ...gitFields(a, pkg)},
51
+ manifest: {channel: 'gh-pages', name: pkg.name, version: pkg.version, branch, to, msg: msgJoin(_msg, pkg, 'docs: update docs ${{name}} ${{version}}'), ...gitFields(a, pkg)},
39
52
  files: a.docsDir ? [{name: 'docs', source: a.docsDir}] : [],
40
53
  }
41
54
  },
@@ -44,7 +57,7 @@ const entry = {
44
57
  const [branch = 'changelog', file = `${pkg.name.replace(/[^a-z0-9-]/ig, '')}-changelog.md`, ..._msg] = asTuple(pkg.config.changelog, ['branch', 'file', 'msg'])
45
58
  return {
46
59
  channel: 'changelog',
47
- manifest: {channel: 'changelog', releaseNotes: a.releaseNotes, branch, file, msg: msgJoin(_msg, pkg, 'chore: update changelog ${{name}}'), ...gitFields(a, pkg)},
60
+ manifest: {channel: 'changelog', name: pkg.name, version: pkg.version, releaseNotes: a.releaseNotes, branch, file, msg: msgJoin(_msg, pkg, 'chore: update changelog ${{name}}'), ...gitFields(a, pkg)},
48
61
  files: [],
49
62
  }
50
63
  },
@@ -0,0 +1,81 @@
1
+ import {fs, path, glob} from 'zx-extra'
2
+ import {packTar, unpackTar} from '../tar.js'
3
+ import {log} from '../log.js'
4
+
5
+ const GIT_CHANNELS = new Set(['git-tag', 'gh-release', 'changelog', 'gh-pages', 'meta'])
6
+
7
+ // parcel.{sha7}.{channel}.{...}.tar — channel is the 3rd dot-segment
8
+ export const parcelChannel = (name) => name.replace(/\.tar$/, '').split('.')[2]
9
+
10
+ const splitSteps = (channelNames) => {
11
+ const git = channelNames.filter(n => GIT_CHANNELS.has(n))
12
+ const ext = channelNames.filter(n => !GIT_CHANNELS.has(n))
13
+ return [git, ext].filter(s => s.length)
14
+ }
15
+
16
+ export const buildDirective = async (ctx, packedPkgs, outputDir) => {
17
+ const {sha, timestamp} = packedPkgs[0].ctx.git
18
+ const packages = {}
19
+ const parcels = []
20
+
21
+ for (const pkg of packedPkgs) {
22
+ const pkgParcels = (pkg.tars || []).map(t => path.basename(t))
23
+ packages[pkg.name] = {
24
+ version: pkg.version,
25
+ tag: pkg.tag,
26
+ deliver: splitSteps(pkg.activeTransport || []),
27
+ parcels: pkgParcels,
28
+ }
29
+ parcels.push(...pkgParcels)
30
+ }
31
+
32
+ const sha7 = sha.slice(0, 7)
33
+ const manifest = {
34
+ channel: 'directive',
35
+ sha, timestamp: Number(timestamp),
36
+ queue: packedPkgs.map(p => p.name),
37
+ packages, parcels,
38
+ }
39
+
40
+ const tarPath = path.join(outputDir, `parcel.${sha7}.directive.${timestamp}.tar`)
41
+ await packTar(tarPath, manifest, [])
42
+ return tarPath
43
+ }
44
+
45
+ export const parseDirective = async (tarPath) => {
46
+ const content = await fs.readFile(tarPath, 'utf8').catch(() => null)
47
+ if (content === 'released' || content === 'conflict') return null
48
+
49
+ const {manifest} = await unpackTar(tarPath, tarPath + '.d')
50
+ if (manifest.channel !== 'directive') return null
51
+ return manifest
52
+ }
53
+
54
+ export const scanDirectives = async (dir) => {
55
+ const tars = await glob(path.join(dir, 'parcel.*.directive.*.tar'))
56
+ const result = []
57
+
58
+ for (const tarPath of tars) {
59
+ const d = await parseDirective(tarPath)
60
+ if (d) result.push({...d, tarPath})
61
+ }
62
+
63
+ return result.sort((a, b) => a.timestamp - b.timestamp)
64
+ }
65
+
66
+ export const invalidateOrphans = async (dir, directive) => {
67
+ const sha7 = directive.sha.slice(0, 7)
68
+ const owned = new Set(directive.parcels || [])
69
+ const all = await glob(path.join(dir, `parcel.${sha7}.*.tar`))
70
+ let count = 0
71
+
72
+ for (const tarPath of all) {
73
+ const name = path.basename(tarPath)
74
+ if (parcelChannel(name) === 'directive') continue
75
+ if (owned.has(name)) continue
76
+ await fs.writeFile(tarPath, 'orphan')
77
+ count++
78
+ }
79
+
80
+ if (count) log.info(`invalidated ${count} orphan parcel(s) for ${sha7}`)
81
+ }
@@ -0,0 +1,4 @@
1
+ export {buildParcels} from './build.js'
2
+ export {buildDirective, parseDirective, scanDirectives, invalidateOrphans, parcelChannel} from './directive.js'
3
+ export {verifyParcels} from './verify.js'
4
+ export const PARCELS_DIR = 'parcels'
@@ -0,0 +1,46 @@
1
+ import {path} from 'zx-extra'
2
+ import {parcelChannel} from './directive.js'
3
+
4
+ export const verifyParcels = (tars, context) => {
5
+ const {sha7, packages: expected} = context
6
+ const errors = []
7
+ const verified = []
8
+
9
+ for (const tarPath of tars) {
10
+ const name = path.basename(tarPath)
11
+
12
+ if (!name.startsWith(`parcel.${sha7}.`)) {
13
+ errors.push(`sha mismatch: ${name}`)
14
+ continue
15
+ }
16
+
17
+ const channel = parcelChannel(name)
18
+ if (!channel) {
19
+ errors.push(`malformed name: ${name}`)
20
+ continue
21
+ }
22
+
23
+ if (channel === 'directive') {
24
+ verified.push(tarPath)
25
+ continue
26
+ }
27
+
28
+ const belongsTo = Object.entries(expected).find(([, pkg]) =>
29
+ pkg.tag && name.includes(`.${pkg.tag}.`)
30
+ )
31
+ if (!belongsTo) {
32
+ errors.push(`unexpected parcel (no matching package): ${name}`)
33
+ continue
34
+ }
35
+
36
+ const [pkgName, pkg] = belongsTo
37
+ if (!pkg.channels.includes(channel)) {
38
+ errors.push(`unexpected channel '${channel}' for ${pkgName}: ${name}`)
39
+ continue
40
+ }
41
+
42
+ verified.push(tarPath)
43
+ }
44
+
45
+ return {verified, errors}
46
+ }
@@ -1,24 +1,52 @@
1
1
  import os from 'node:os'
2
2
  import {createRequire} from 'node:module'
3
- import {$, within, glob, path} from 'zx-extra'
3
+ import {$, within} from 'zx-extra'
4
4
  import {queuefy} from 'queuefy'
5
5
 
6
6
  import {createReport, log} from './log.js'
7
- import {topo, traverseQueue} from './depot/deps.js'
7
+ import {topo} from './depot/deps.js'
8
8
  import {exec} from './depot/exec.js'
9
- import {contextify} from './depot/steps/contextify.js'
10
- import {analyze} from './depot/steps/analyze.js'
11
- import {build} from './depot/steps/build.js'
12
- import {pack} from './depot/steps/pack.js'
13
- import {publish} from './depot/steps/publish.js'
14
- import {clean} from './depot/steps/clean.js'
15
- import {test} from './depot/steps/test.js'
16
- import {deliver, defaultOrder as channels} from './courier/index.js'
9
+ import {defaultOrder as channels} from './courier/index.js'
10
+ import {runReceive} from './modes/receive.js'
11
+ import {runVerify} from './modes/verify.js'
12
+ import {runDeliver} from './modes/deliver.js'
13
+ import {runPack} from './modes/pack.js'
17
14
 
18
- const PARCELS_DIR = 'parcels'
19
15
  const ZBR_VERSION = createRequire(import.meta.url)('../../../../package.json').version
20
16
 
17
+ const HELP = `
18
+ zx-bulk-release v${ZBR_VERSION}
19
+
20
+ Usage: npx zx-bulk-release [options]
21
+
22
+ Modes:
23
+ (no flags) All-in-one: analyze, build, test, pack, deliver
24
+ --receive Analyze & preflight. Writes .zbr-context.json. Run BEFORE deps install.
25
+ --pack [dir] Build, test, pack tars to dir. [default: parcels]
26
+ --verify [dir] Validate parcels against context, copy to parcels/. [default: parcels]
27
+ --deliver [dir] Deliver parcels through channels. [default: parcels]
28
+
29
+ Options:
30
+ --context <path> Path to .zbr-context.json (with --verify). [default: .zbr-context.json]
31
+ --dry-run, --no-publish Disable any publish / remote-mutating logic.
32
+ --no-build Skip buildCmd.
33
+ --no-test Skip testCmd.
34
+ --snapshot Publish snapshot versions to npm only.
35
+ --ignore <a,b> Packages to ignore.
36
+ --include-private Include private packages.
37
+ --concurrency <n> Build/publish thread limit. [default: os.cpus().length]
38
+ --only-workspace-deps Recognize only workspace: deps as graph edges.
39
+ --no-npm-fetch Disable npm artifact fetching.
40
+ --report <path> Persist release state to file.
41
+ --debug Enable verbose mode.
42
+ -v, --version Print version.
43
+ -h, --help Show this help.
44
+ `.trim()
45
+
21
46
  export const run = async ({cwd = process.cwd(), env: _env, flags = {}} = {}) => within(async () => {
47
+ if (flags.h || flags.help)
48
+ return console.log(HELP)
49
+
22
50
  if (flags.v || flags.version)
23
51
  return console.log(ZBR_VERSION)
24
52
 
@@ -26,83 +54,13 @@ export const run = async ({cwd = process.cwd(), env: _env, flags = {}} = {}) =>
26
54
  log.secret(env.GH_TOKEN, env.GITHUB_TOKEN, env.NPM_TOKEN)
27
55
  log.info(`zx-bulk-release@${ZBR_VERSION}`)
28
56
 
29
- return flags.deliver
30
- ? runDeliver({env, flags})
31
- : runPipeline({cwd, env, flags})
32
- })
33
-
34
- // --deliver [dir]
35
- const runDeliver = async ({env, flags}) => {
36
- const dir = typeof flags.deliver === 'string' ? flags.deliver : PARCELS_DIR
37
- const report = createReport({flags})
38
-
39
- $.memo = new Map()
40
- $.report = report
41
-
42
- report.setStatus('inspecting')
43
-
44
- const tars = await glob(path.join(dir, 'parcel.*.tar'))
45
- if (!tars.length) return report.setStatus('success').log(`no parcels in ${dir}`)
46
-
47
- report.setStatus('delivering').log(`parcels: ${tars.length}`)
48
- const result = await deliver(tars, env, {dryRun: flags.dryRun})
49
- report.set('delivery', result).setStatus('success')
50
-
51
- for (const {channel, name, version} of result.entries)
52
- log.info(`${channel} ${name}@${version}`)
53
- log.info(`done: ${result.delivered} delivered, ${result.skipped} skipped`)
54
- }
57
+ if (flags.verify) return runVerify({cwd, flags})
58
+ if (flags.deliver) return runDeliver({env, flags})
55
59
 
56
- // full pipeline (legacy / --pack)
57
- const runPipeline = async ({cwd, env, flags}) => {
58
60
  const ctx = await createContext({flags, env, cwd})
59
- const {report, packages, queue, prev} = ctx
60
-
61
- const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
62
- $.scope = name
63
- await contextify(packages[name], ctx)
64
- return cb(packages[name])
65
- })})
66
-
67
- report
68
- .log('queue:', queue)
69
- .log('graphs', ctx.graphs)
70
-
71
- try {
72
- await forEachPkg(async (pkg) => {
73
- report.setStatus('analyzing', pkg.name)
74
- await analyze(pkg)
75
- report.set({
76
- config: pkg.config,
77
- version: pkg.version,
78
- prevVersion: pkg.latest.tag?.version || pkg.manifest.version,
79
- releaseType: pkg.releaseType,
80
- tag: pkg.tag,
81
- }, pkg.name)
82
- })
83
-
84
- report.setStatus('pending')
85
-
86
- await forEachPkg(async (pkg) => {
87
- if (!pkg.releaseType) { pkg.skipped = true; return report.setStatus('skipped', pkg.name) }
88
- if (flags.build !== false) { report.setStatus('building', pkg.name); await build(pkg) }
89
- if (flags.test !== false) { report.setStatus('testing', pkg.name); await test(pkg) }
90
- if (flags.dryRun || flags.publish === false) return report.setStatus('success', pkg.name)
91
-
92
- report.setStatus('packing', pkg.name); await pack(pkg)
93
- if (flags.pack) return report.setStatus('packed', pkg.name)
94
-
95
- report.setStatus('publishing', pkg.name); await publish(pkg)
96
- report.setStatus('success', pkg.name)
97
- })
98
- } catch (e) {
99
- report.error(e, e.stack).set('error', e).setStatus('failure')
100
- throw e
101
- } finally {
102
- await clean(ctx)
103
- }
104
- report.setStatus('success').log('Great success!')
105
- }
61
+ if (flags.receive) return runReceive({cwd, env, flags}, ctx)
62
+ return runPack({cwd, env, flags}, ctx)
63
+ })
106
64
 
107
65
  export const createContext = async ({flags, env, cwd}) => {
108
66
  const {packages, queue, root, prev, graphs} = await topo({cwd, flags})
@@ -26,7 +26,7 @@ export const packTar = async (tarPath, manifest, files = []) => {
26
26
  export const hashFile = async (filePath) => {
27
27
  const hash = crypto.createHash('sha1')
28
28
  await pipeline(createReadStream(filePath), hash)
29
- return hash.digest('hex').slice(0, 8)
29
+ return hash.digest('hex').slice(0, 6)
30
30
  }
31
31
 
32
32
  const addDir = async (pack, prefix, dirPath) => {
@@ -72,3 +72,19 @@ export const memoizeBy = (fn, getKey = v => v) => {
72
72
  export const camelize = s => s.replace(/-./g, x => x[1].toUpperCase())
73
73
 
74
74
  export const asArray = v => Array.isArray(v) ? v : [v]
75
+
76
+ export const pool = async (tasks, concurrency, fn) => {
77
+ const active = new Set()
78
+ let i = 0
79
+ await new Promise((resolve, reject) => {
80
+ const next = () => {
81
+ if (i >= tasks.length && active.size === 0) return resolve()
82
+ while (active.size < concurrency && i < tasks.length) {
83
+ const t = tasks[i++]
84
+ const p = fn(t).then(() => { active.delete(p); next() }, reject)
85
+ active.add(p)
86
+ }
87
+ }
88
+ next()
89
+ })
90
+ }
@@ -112,7 +112,7 @@ export const makeCtx = (overrides = {}) => ({
112
112
  report: overrides.report ?? makeReport(),
113
113
  channels: overrides.channels ?? [],
114
114
  run: overrides.run ?? (async () => {}),
115
- git: {sha: 'abc1234567890', root: tmpDir, ...overrides.git},
115
+ git: {sha: 'abc1234567890', root: tmpDir, timestamp: '1700000000', ...overrides.git},
116
116
  })
117
117
 
118
118
  export const makeReport = () => {