bulk-release 3.0.4 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +14 -0
- package/README.md +146 -24
- package/package.json +1 -1
- package/src/main/js/config.js +3 -2
- package/src/main/js/post/api/gh.js +11 -0
- package/src/main/js/post/api/git.js +19 -0
- package/src/main/js/post/courier/channels/changelog.js +8 -1
- package/src/main/js/post/courier/channels/gh-pages.js +8 -1
- package/src/main/js/post/courier/channels/gh-release.js +11 -1
- package/src/main/js/post/courier/channels/git-tag.js +25 -0
- package/src/main/js/post/courier/channels/meta.js +8 -1
- package/src/main/js/post/courier/channels/npm.js +27 -14
- package/src/main/js/post/courier/directive.js +81 -0
- package/src/main/js/post/courier/index.js +164 -16
- package/src/main/js/post/courier/parcel.js +15 -2
- package/src/main/js/post/courier/semaphore.js +31 -0
- package/src/main/js/post/courier/seniority.js +19 -0
- package/src/main/js/post/depot/context.js +45 -0
- package/src/main/js/post/depot/reconcile.js +50 -0
- package/src/main/js/post/depot/steps/contextify.js +2 -1
- package/src/main/js/post/depot/steps/pack.js +3 -1
- package/src/main/js/post/depot/steps/publish.js +1 -11
- package/src/main/js/post/modes/deliver.js +24 -0
- package/src/main/js/post/modes/pack.js +71 -0
- package/src/main/js/post/modes/receive.js +71 -0
- package/src/main/js/post/modes/verify.js +83 -0
- package/src/main/js/post/release.js +23 -112
- package/src/main/js/post/tar.js +1 -1
- package/src/test/js/utils/mock.js +1 -1
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import {glob, path, fs} from 'zx-extra'
|
|
2
|
+
import {log} from '../log.js'
|
|
3
|
+
import {parcelChannel} from '../courier/directive.js'
|
|
4
|
+
|
|
5
|
+
const PARCELS_DIR = 'parcels'
|
|
6
|
+
|
|
7
|
+
export const runVerify = async ({cwd, flags}) => {
|
|
8
|
+
const inputDir = typeof flags.verify === 'string' ? flags.verify : PARCELS_DIR
|
|
9
|
+
const contextPath = typeof flags.context === 'string' ? flags.context : path.resolve(cwd, '.zbr-context.json')
|
|
10
|
+
const outputDir = path.resolve(cwd, PARCELS_DIR)
|
|
11
|
+
|
|
12
|
+
log.info(`verifying parcels in ${inputDir} against ${contextPath}`)
|
|
13
|
+
|
|
14
|
+
let context
|
|
15
|
+
try { context = await fs.readJson(contextPath) } catch { context = null }
|
|
16
|
+
if (!context || context.status !== 'proceed') {
|
|
17
|
+
throw new Error(`no valid context at ${contextPath}`)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
const tars = await glob(path.join(inputDir, 'parcel.*.tar'))
|
|
21
|
+
if (!tars.length) {
|
|
22
|
+
log.info('no parcels to verify')
|
|
23
|
+
return
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const {sha7, packages: expected} = context
|
|
27
|
+
const errors = []
|
|
28
|
+
const verified = []
|
|
29
|
+
|
|
30
|
+
for (const tarPath of tars) {
|
|
31
|
+
const name = path.basename(tarPath)
|
|
32
|
+
|
|
33
|
+
// sha7 prefix must match
|
|
34
|
+
if (!name.startsWith(`parcel.${sha7}.`)) {
|
|
35
|
+
errors.push(`sha mismatch: ${name}`)
|
|
36
|
+
continue
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const channel = parcelChannel(name)
|
|
40
|
+
if (!channel) {
|
|
41
|
+
errors.push(`malformed name: ${name}`)
|
|
42
|
+
continue
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (channel === 'directive') {
|
|
46
|
+
verified.push(tarPath)
|
|
47
|
+
continue
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// match to an expected package by tag
|
|
51
|
+
const belongsTo = Object.entries(expected).find(([, pkg]) =>
|
|
52
|
+
pkg.tag && name.includes(`.${pkg.tag}.`)
|
|
53
|
+
)
|
|
54
|
+
if (!belongsTo) {
|
|
55
|
+
errors.push(`unexpected parcel (no matching package): ${name}`)
|
|
56
|
+
continue
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const [pkgName, pkg] = belongsTo
|
|
60
|
+
if (!pkg.channels.includes(channel)) {
|
|
61
|
+
errors.push(`unexpected channel '${channel}' for ${pkgName}: ${name}`)
|
|
62
|
+
continue
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
verified.push(tarPath)
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
if (errors.length) {
|
|
69
|
+
for (const e of errors) log.error(`verify: ${e}`)
|
|
70
|
+
throw new Error(`parcel verification failed: ${errors.length} error(s)`)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// copy verified parcels to output
|
|
74
|
+
if (path.resolve(inputDir) !== outputDir) {
|
|
75
|
+
await fs.ensureDir(outputDir)
|
|
76
|
+
for (const tarPath of verified) {
|
|
77
|
+
await fs.copy(tarPath, path.join(outputDir, path.basename(tarPath)))
|
|
78
|
+
}
|
|
79
|
+
log.info(`${verified.length} parcel(s) verified and copied to ${outputDir}`)
|
|
80
|
+
} else {
|
|
81
|
+
log.info(`${verified.length} parcel(s) verified in place`)
|
|
82
|
+
}
|
|
83
|
+
}
|
|
@@ -1,134 +1,45 @@
|
|
|
1
1
|
import os from 'node:os'
|
|
2
2
|
import {createRequire} from 'node:module'
|
|
3
|
-
import {$, within
|
|
3
|
+
import {$, within} from 'zx-extra'
|
|
4
4
|
import {queuefy} from 'queuefy'
|
|
5
|
-
|
|
5
|
+
|
|
6
6
|
import {createReport, log} from './log.js'
|
|
7
|
+
import {topo} from './depot/deps.js'
|
|
7
8
|
import {exec} from './depot/exec.js'
|
|
9
|
+
import {defaultOrder as channels} from './courier/index.js'
|
|
10
|
+
import {runReceive} from './modes/receive.js'
|
|
11
|
+
import {runVerify} from './modes/verify.js'
|
|
12
|
+
import {runDeliver} from './modes/deliver.js'
|
|
13
|
+
import {runPack} from './modes/pack.js'
|
|
8
14
|
|
|
9
|
-
|
|
10
|
-
import {analyze} from './depot/steps/analyze.js'
|
|
11
|
-
import {build} from './depot/steps/build.js'
|
|
12
|
-
import {pack} from './depot/steps/pack.js'
|
|
13
|
-
import {publish} from './depot/steps/publish.js'
|
|
14
|
-
import {clean} from './depot/steps/clean.js'
|
|
15
|
-
import {test} from './depot/steps/test.js'
|
|
16
|
-
|
|
17
|
-
import {deliver, defaultOrder as channels} from './courier/index.js'
|
|
15
|
+
const ZBR_VERSION = createRequire(import.meta.url)('../../../../package.json').version
|
|
18
16
|
|
|
19
|
-
const
|
|
17
|
+
export const run = async ({cwd = process.cwd(), env: _env, flags = {}} = {}) => within(async () => {
|
|
18
|
+
if (flags.v || flags.version)
|
|
19
|
+
return console.log(ZBR_VERSION)
|
|
20
20
|
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
const {version: zbrVersion} = createRequire(import.meta.url)('../../../../package.json')
|
|
25
|
-
if (flags.v || flags.version) {
|
|
26
|
-
console.log(zbrVersion)
|
|
27
|
-
return
|
|
28
|
-
}
|
|
21
|
+
const env = {...process.env, ..._env}
|
|
22
|
+
log.secret(env.GH_TOKEN, env.GITHUB_TOKEN, env.NPM_TOKEN)
|
|
23
|
+
log.info(`zx-bulk-release@${ZBR_VERSION}`)
|
|
29
24
|
|
|
30
|
-
|
|
31
|
-
if (flags.deliver) {
|
|
32
|
-
const dir = typeof flags.deliver === 'string' ? flags.deliver : PARCELS_DIR
|
|
33
|
-
const tars = await glob(path.join(dir, 'parcel.*.tar'))
|
|
34
|
-
if (!tars.length) {
|
|
35
|
-
log.info(`deliver: no parcels in ${dir}, nothing to do`)
|
|
36
|
-
return
|
|
37
|
-
}
|
|
38
|
-
const _env = {...process.env, ...env}
|
|
39
|
-
log.secret(_env.GH_TOKEN, _env.GITHUB_TOKEN, _env.NPM_TOKEN)
|
|
40
|
-
log.info(`deliver: ${tars.length} tar(s) from ${dir}`)
|
|
41
|
-
const delivered = await deliver(tars, _env)
|
|
42
|
-
log.info(`deliver: done, ${delivered} delivered`)
|
|
43
|
-
return
|
|
44
|
-
}
|
|
25
|
+
if (flags.verify) return runVerify({cwd, flags})
|
|
26
|
+
if (flags.deliver) return runDeliver({env, flags})
|
|
45
27
|
|
|
46
28
|
const ctx = await createContext({flags, env, cwd})
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
50
|
-
$.scope = name
|
|
51
|
-
const pkg = packages[name]
|
|
52
|
-
await contextify(pkg, ctx)
|
|
53
|
-
return cb(pkg)
|
|
54
|
-
})})
|
|
55
|
-
|
|
56
|
-
report
|
|
57
|
-
.log(`zx-bulk-release@${zbrVersion}`)
|
|
58
|
-
.log('queue:', queue)
|
|
59
|
-
.log('graphs', ctx.graphs)
|
|
60
|
-
|
|
61
|
-
try {
|
|
62
|
-
await forEachPkg(async (pkg) => {
|
|
63
|
-
report.setStatus('analyzing', pkg.name)
|
|
64
|
-
await analyze(pkg)
|
|
65
|
-
report.set({
|
|
66
|
-
config: pkg.config,
|
|
67
|
-
version: pkg.version,
|
|
68
|
-
prevVersion: pkg.latest.tag?.version || pkg.manifest.version,
|
|
69
|
-
releaseType: pkg.releaseType,
|
|
70
|
-
tag: pkg.tag,
|
|
71
|
-
}, pkg.name)
|
|
72
|
-
})
|
|
73
|
-
|
|
74
|
-
report.setStatus('pending')
|
|
75
|
-
|
|
76
|
-
await forEachPkg(async (pkg) => {
|
|
77
|
-
if (!pkg.releaseType) {
|
|
78
|
-
report.setStatus('skipped', pkg.name)
|
|
79
|
-
pkg.skipped = true
|
|
80
|
-
return
|
|
81
|
-
}
|
|
82
|
-
if (flags.build !== false) {
|
|
83
|
-
report.setStatus('building', pkg.name)
|
|
84
|
-
await build(pkg)
|
|
85
|
-
}
|
|
86
|
-
if (flags.test !== false) {
|
|
87
|
-
report.setStatus('testing', pkg.name)
|
|
88
|
-
await test(pkg)
|
|
89
|
-
}
|
|
90
|
-
if (flags.dryRun || flags.publish === false) {
|
|
91
|
-
report.setStatus('success', pkg.name)
|
|
92
|
-
return
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
report.setStatus('packing', pkg.name)
|
|
96
|
-
await pack(pkg)
|
|
97
|
-
|
|
98
|
-
// --pack <dir>: pack only, skip delivery.
|
|
99
|
-
if (flags.pack) {
|
|
100
|
-
report.setStatus('packed', pkg.name)
|
|
101
|
-
return
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
report.setStatus('publishing', pkg.name)
|
|
105
|
-
await publish(pkg)
|
|
106
|
-
report.setStatus('success', pkg.name)
|
|
107
|
-
})
|
|
108
|
-
} catch (e) {
|
|
109
|
-
report.error(e, e.stack).set('error', e).setStatus('failure')
|
|
110
|
-
throw e
|
|
111
|
-
} finally {
|
|
112
|
-
await clean(ctx)
|
|
113
|
-
}
|
|
114
|
-
report.setStatus('success').log('Great success!')
|
|
29
|
+
if (flags.receive) return runReceive({cwd, env, flags}, ctx)
|
|
30
|
+
return runPack({cwd, env, flags}, ctx)
|
|
115
31
|
})
|
|
116
32
|
|
|
117
|
-
export const createContext = async ({flags, env
|
|
33
|
+
export const createContext = async ({flags, env, cwd}) => {
|
|
118
34
|
const {packages, queue, root, prev, graphs} = await topo({cwd, flags})
|
|
119
35
|
const report = createReport({packages, queue, flags})
|
|
120
|
-
const env = {...process.env, ..._env}
|
|
121
|
-
|
|
122
|
-
log.secret(env.GH_TOKEN, env.GITHUB_TOKEN, env.NPM_TOKEN)
|
|
123
36
|
|
|
124
37
|
$.report = report
|
|
125
38
|
$.env = env
|
|
126
39
|
$.verbose = !!(flags.debug || env.DEBUG) || $.verbose
|
|
127
40
|
$.quiet = !$.verbose
|
|
41
|
+
$.memo = new Map()
|
|
128
42
|
|
|
129
|
-
return {
|
|
130
|
-
|
|
131
|
-
channels,
|
|
132
|
-
run: queuefy(exec, flags.concurrency || os.cpus().length),
|
|
133
|
-
}
|
|
43
|
+
return {cwd, env, flags, root, packages, queue, prev, graphs, report, channels,
|
|
44
|
+
run: queuefy(exec, flags.concurrency || os.cpus().length)}
|
|
134
45
|
}
|
package/src/main/js/post/tar.js
CHANGED
|
@@ -26,7 +26,7 @@ export const packTar = async (tarPath, manifest, files = []) => {
|
|
|
26
26
|
export const hashFile = async (filePath) => {
|
|
27
27
|
const hash = crypto.createHash('sha1')
|
|
28
28
|
await pipeline(createReadStream(filePath), hash)
|
|
29
|
-
return hash.digest('hex').slice(0,
|
|
29
|
+
return hash.digest('hex').slice(0, 6)
|
|
30
30
|
}
|
|
31
31
|
|
|
32
32
|
const addDir = async (pack, prefix, dirPath) => {
|
|
@@ -112,7 +112,7 @@ export const makeCtx = (overrides = {}) => ({
|
|
|
112
112
|
report: overrides.report ?? makeReport(),
|
|
113
113
|
channels: overrides.channels ?? [],
|
|
114
114
|
run: overrides.run ?? (async () => {}),
|
|
115
|
-
git: {sha: 'abc1234567890', root: tmpDir, ...overrides.git},
|
|
115
|
+
git: {sha: 'abc1234567890', root: tmpDir, timestamp: '1700000000', ...overrides.git},
|
|
116
116
|
})
|
|
117
117
|
|
|
118
118
|
export const makeReport = () => {
|