bulk-release 2.21.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +22 -0
- package/README.md +140 -28
- package/package.json +12 -5
- package/src/main/js/index.js +1 -1
- package/src/main/js/{processor → post}/api/gh.js +0 -27
- package/src/main/js/{processor → post}/api/git.js +2 -10
- package/src/main/js/{processor → post}/api/npm.js +4 -2
- package/src/main/js/post/courier/channels/changelog.js +29 -0
- package/src/main/js/{processor/publishers → post/courier/channels}/cmd.js +1 -0
- package/src/main/js/post/courier/channels/gh-pages.js +30 -0
- package/src/main/js/post/courier/channels/gh-release.js +35 -0
- package/src/main/js/post/courier/channels/meta.js +34 -0
- package/src/main/js/post/courier/channels/npm.js +26 -0
- package/src/main/js/post/courier/index.js +113 -0
- package/src/main/js/post/courier/parcel.js +77 -0
- package/src/main/js/{processor → post/depot}/exec.js +2 -2
- package/src/main/js/{processor → post/depot}/generators/meta.js +3 -3
- package/src/main/js/{processor → post/depot}/generators/notes.js +1 -1
- package/src/main/js/{processor → post/depot}/steps/analyze.js +2 -2
- package/src/main/js/{processor → post/depot}/steps/build.js +2 -2
- package/src/main/js/{processor → post/depot}/steps/clean.js +2 -2
- package/src/main/js/{processor → post/depot}/steps/contextify.js +3 -3
- package/src/main/js/post/depot/steps/pack.js +59 -0
- package/src/main/js/post/depot/steps/publish.js +29 -0
- package/src/main/js/{processor → post/depot}/steps/test.js +1 -1
- package/src/main/js/{processor → post}/release.js +44 -37
- package/src/main/js/post/tar.js +73 -0
- package/src/test/js/utils/gh-server.js +33 -0
- package/src/test/js/utils/mock.js +132 -0
- package/src/test/js/{test-utils.js → utils/repo.js} +3 -3
- package/src/main/js/processor/publishers/changelog.js +0 -26
- package/src/main/js/processor/publishers/gh-pages.js +0 -32
- package/src/main/js/processor/publishers/gh-release.js +0 -41
- package/src/main/js/processor/publishers/meta.js +0 -58
- package/src/main/js/processor/publishers/npm.js +0 -15
- package/src/main/js/processor/steps/publish.js +0 -39
- package/src/main/js/processor/steps/teardown.js +0 -58
- /package/src/main/js/{processor → post/depot}/deps.js +0 -0
- /package/src/main/js/{processor → post/depot}/generators/tag.js +0 -0
- /package/src/main/js/{processor → post}/log.js +0 -0
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
import {$, tempy, within, path, semver, fs} from 'zx-extra'
|
|
2
|
+
import {unpackTar} from '../tar.js'
|
|
3
|
+
import {log} from '../log.js'
|
|
4
|
+
import meta from './channels/meta.js'
|
|
5
|
+
import npm from './channels/npm.js'
|
|
6
|
+
import ghRelease from './channels/gh-release.js'
|
|
7
|
+
import ghPages from './channels/gh-pages.js'
|
|
8
|
+
import changelog from './channels/changelog.js'
|
|
9
|
+
import cmd from './channels/cmd.js'
|
|
10
|
+
|
|
11
|
+
export {buildParcels} from './parcel.js'
|
|
12
|
+
|
|
13
|
+
export const channels = {meta, npm, 'gh-release': ghRelease, 'gh-pages': ghPages, changelog, cmd}
|
|
14
|
+
export const defaultOrder = ['meta', 'npm', 'gh-release', 'gh-pages', 'changelog', 'cmd']
|
|
15
|
+
|
|
16
|
+
export const prepare = async (names, pkg) => {
|
|
17
|
+
for (const n of names) await channels[n]?.prepare?.(pkg)
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export const runChannel = async (name, ...args) => channels[name]?.run(...args)
|
|
21
|
+
|
|
22
|
+
export const resolveManifest = (manifest, env = process.env) => {
|
|
23
|
+
const resolved = {}
|
|
24
|
+
for (const [k, v] of Object.entries(manifest))
|
|
25
|
+
resolved[k] = typeof v === 'string' ? v.replace(/\$\{\{(\w+)\}\}/g, (_, n) => env[n] || '') : v
|
|
26
|
+
|
|
27
|
+
const {repoHost, repoName, originUrl} = resolved
|
|
28
|
+
const token = env.GH_TOKEN || env.GITHUB_TOKEN || ''
|
|
29
|
+
const user = env.GH_USER || env.GH_USERNAME || env.GITHUB_USER || env.GITHUB_USERNAME || 'x-access-token'
|
|
30
|
+
|
|
31
|
+
if (repoHost && repoName && token) {
|
|
32
|
+
resolved.repoAuthedUrl = `https://${user}:${token}@${repoHost}/${repoName}.git`
|
|
33
|
+
resolved.ghBasicAuth = `${user}:${token}`
|
|
34
|
+
} else if (originUrl) {
|
|
35
|
+
resolved.repoAuthedUrl = originUrl
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return resolved
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const openParcel = async (tarPath, env) => {
|
|
42
|
+
const content = await fs.readFile(tarPath, 'utf8').catch(() => null)
|
|
43
|
+
if (content === 'released' || content === 'skip') return null
|
|
44
|
+
|
|
45
|
+
const destDir = tempy.temporaryDirectory()
|
|
46
|
+
const {manifest} = await unpackTar(tarPath, destDir)
|
|
47
|
+
const resolved = resolveManifest(manifest, env)
|
|
48
|
+
const ch = channels[resolved.channel]
|
|
49
|
+
|
|
50
|
+
if (!ch) return {warn: `unknown channel '${resolved.channel || '<none>'}'`}
|
|
51
|
+
|
|
52
|
+
const missing = (ch.requires || []).filter(f => !resolved[f])
|
|
53
|
+
if (missing.length) return {warn: `missing credentials — ${missing.join(', ')}`, tarPath}
|
|
54
|
+
|
|
55
|
+
return {ch, resolved, destDir, tarPath}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const pool = async (tasks, concurrency, fn) => {
|
|
59
|
+
const active = new Set()
|
|
60
|
+
let i = 0
|
|
61
|
+
await new Promise((resolve, reject) => {
|
|
62
|
+
const next = () => {
|
|
63
|
+
if (i >= tasks.length && active.size === 0) return resolve()
|
|
64
|
+
while (active.size < concurrency && i < tasks.length) {
|
|
65
|
+
const t = tasks[i++]
|
|
66
|
+
const p = fn(t).then(() => { active.delete(p); next() }, reject)
|
|
67
|
+
active.add(p)
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
next()
|
|
71
|
+
})
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
// Parcels grouped by package, sorted by semver asc (latest last).
|
|
75
|
+
// Groups run in parallel (concurrency-limited), entries within a group — sequential.
|
|
76
|
+
export const deliver = async (tars, env = process.env, {concurrency = 4} = {}) => {
|
|
77
|
+
const groups = new Map()
|
|
78
|
+
|
|
79
|
+
for (const tarPath of tars) {
|
|
80
|
+
const fname = path.basename(tarPath)
|
|
81
|
+
try {
|
|
82
|
+
const p = await openParcel(tarPath, env)
|
|
83
|
+
if (!p) continue
|
|
84
|
+
if (p.warn) {
|
|
85
|
+
log.warn(`skipping ${fname}: ${p.warn}`)
|
|
86
|
+
if (p.tarPath) await fs.writeFile(tarPath, 'skip')
|
|
87
|
+
continue
|
|
88
|
+
}
|
|
89
|
+
const key = p.resolved.name || p.resolved.channel
|
|
90
|
+
if (!groups.has(key)) groups.set(key, [])
|
|
91
|
+
groups.get(key).push(p)
|
|
92
|
+
} catch (e) {
|
|
93
|
+
log.warn(`skipping ${fname}: ${e.message}`)
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
for (const g of groups.values())
|
|
98
|
+
g.sort((a, b) => semver.compare(a.resolved.version || '0.0.0', b.resolved.version || '0.0.0'))
|
|
99
|
+
|
|
100
|
+
let delivered = 0
|
|
101
|
+
await pool([...groups.values()], concurrency, async (group) => {
|
|
102
|
+
for (const {ch, resolved, destDir, tarPath} of group) {
|
|
103
|
+
await within(async () => {
|
|
104
|
+
$.scope = resolved.name || resolved.channel
|
|
105
|
+
await ch.run(resolved, destDir)
|
|
106
|
+
})
|
|
107
|
+
await fs.writeFile(tarPath, 'released')
|
|
108
|
+
delivered++
|
|
109
|
+
}
|
|
110
|
+
})
|
|
111
|
+
|
|
112
|
+
return delivered
|
|
113
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import {asTuple, msgJoin} from '../../util.js'
|
|
2
|
+
|
|
3
|
+
const gitFields = (a, pkg) => ({
|
|
4
|
+
repoHost: a.repoHost,
|
|
5
|
+
repoName: a.repoName,
|
|
6
|
+
originUrl: a.originUrl,
|
|
7
|
+
gitCommitterEmail: pkg.config.gitCommitterEmail,
|
|
8
|
+
gitCommitterName: pkg.config.gitCommitterName,
|
|
9
|
+
})
|
|
10
|
+
|
|
11
|
+
const entry = {
|
|
12
|
+
npm: (pkg, ctx, a) => ({
|
|
13
|
+
channel: 'npm',
|
|
14
|
+
manifest: {
|
|
15
|
+
channel: 'npm',
|
|
16
|
+
name: pkg.name, version: pkg.version, preversion: pkg.preversion,
|
|
17
|
+
registry: '${{NPM_REGISTRY}}', token: '${{NPM_TOKEN}}',
|
|
18
|
+
config: '${{NPM_CONFIG}}', provenance: '${{NPM_PROVENANCE}}', oidc: '${{NPM_OIDC}}',
|
|
19
|
+
},
|
|
20
|
+
files: a.npmTarball ? [{name: 'package.tgz', source: a.npmTarball}] : [],
|
|
21
|
+
}),
|
|
22
|
+
|
|
23
|
+
'gh-release': (pkg, ctx, a) => ({
|
|
24
|
+
channel: 'gh-release',
|
|
25
|
+
manifest: {
|
|
26
|
+
channel: 'gh-release',
|
|
27
|
+
tag: pkg.tag, repoName: a.repoName, releaseNotes: a.releaseNotes,
|
|
28
|
+
token: '${{GH_TOKEN}}', apiUrl: '${{GH_API_URL}}',
|
|
29
|
+
assets: pkg.config.ghAssets ? [...pkg.config.ghAssets] : undefined,
|
|
30
|
+
},
|
|
31
|
+
files: a.assetsDir ? [{name: 'assets', source: a.assetsDir}] : [],
|
|
32
|
+
}),
|
|
33
|
+
|
|
34
|
+
'gh-pages': (pkg, ctx, a) => {
|
|
35
|
+
const [branch = 'gh-pages', , to = '.', ..._msg] = asTuple(pkg.config.ghPages, ['branch', 'from', 'to', 'msg'])
|
|
36
|
+
return {
|
|
37
|
+
channel: 'gh-pages',
|
|
38
|
+
manifest: {channel: 'gh-pages', branch, to, msg: msgJoin(_msg, pkg, 'docs: update docs ${{name}} ${{version}}'), ...gitFields(a, pkg)},
|
|
39
|
+
files: a.docsDir ? [{name: 'docs', source: a.docsDir}] : [],
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
|
|
43
|
+
changelog: (pkg, ctx, a) => {
|
|
44
|
+
const [branch = 'changelog', file = `${pkg.name.replace(/[^a-z0-9-]/ig, '')}-changelog.md`, ..._msg] = asTuple(pkg.config.changelog, ['branch', 'file', 'msg'])
|
|
45
|
+
return {
|
|
46
|
+
channel: 'changelog',
|
|
47
|
+
manifest: {channel: 'changelog', releaseNotes: a.releaseNotes, branch, file, msg: msgJoin(_msg, pkg, 'chore: update changelog ${{name}}'), ...gitFields(a, pkg)},
|
|
48
|
+
files: [],
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
|
|
52
|
+
meta: (pkg, ctx, a) => ({
|
|
53
|
+
channel: 'meta',
|
|
54
|
+
manifest: {
|
|
55
|
+
channel: 'meta',
|
|
56
|
+
name: pkg.name, version: pkg.version, tag: pkg.tag,
|
|
57
|
+
type: pkg.config.meta?.type ?? null, data: pkg.meta,
|
|
58
|
+
...gitFields(a, pkg),
|
|
59
|
+
},
|
|
60
|
+
files: [],
|
|
61
|
+
}),
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const defaultEntry = (channel) => (pkg, ctx, a) => ({
|
|
65
|
+
channel,
|
|
66
|
+
manifest: {channel, name: pkg.name, version: pkg.version, tag: pkg.tag},
|
|
67
|
+
files: [],
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
export const buildParcels = (pkg, ctx, {
|
|
71
|
+
channels: channelNames = [],
|
|
72
|
+
npmTarball, releaseNotes, docsDir, assetsDir,
|
|
73
|
+
repoName, repoHost, originUrl,
|
|
74
|
+
} = {}) => {
|
|
75
|
+
const a = {npmTarball, releaseNotes, docsDir, assetsDir, repoName, repoHost, originUrl}
|
|
76
|
+
return channelNames.map(n => (entry[n] || defaultEntry(n))(pkg, ctx, a))
|
|
77
|
+
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
// Meta generator: builds pkg.meta payload and resolves latest-release meta from git tags / gh assets / meta branch.
|
|
2
2
|
|
|
3
3
|
import {semver, $, fs, path} from 'zx-extra'
|
|
4
|
-
import {fetchRepo, getTags as getGitTags, getRepo} from '
|
|
5
|
-
import {fetchManifest} from '
|
|
6
|
-
import {ghGetAsset} from '
|
|
4
|
+
import {fetchRepo, getTags as getGitTags, getRepo} from '../../api/git.js'
|
|
5
|
+
import {fetchManifest} from '../../api/npm.js'
|
|
6
|
+
import {ghGetAsset} from '../../api/gh.js'
|
|
7
7
|
import {parseTag} from './tag.js'
|
|
8
8
|
|
|
9
9
|
export const isAssetMode = (type) => type === 'asset' || type === 'assets'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
// Release notes formatting. Pure except for a single getRepo() call to resolve repoPublicUrl.
|
|
2
2
|
|
|
3
|
-
import {getRepo} from '
|
|
3
|
+
import {getRepo} from '../../api/git.js'
|
|
4
4
|
import {formatTag} from './tag.js'
|
|
5
5
|
|
|
6
6
|
export const DIFF_TAG_URL = '${repoPublicUrl}/compare/${prevTag}...${newTag}'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {semver} from 'zx-extra'
|
|
2
|
-
import {log} from '
|
|
3
|
-
import {getCommits} from '
|
|
2
|
+
import {log} from '../../log.js'
|
|
3
|
+
import {getCommits} from '../../api/git.js'
|
|
4
4
|
import {updateDeps} from '../deps.js'
|
|
5
5
|
import {formatTag} from '../generators/tag.js'
|
|
6
6
|
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
import {unsetUserConfig} from '
|
|
2
|
-
import {npmRestore} from '
|
|
1
|
+
import {unsetUserConfig} from '../../api/git.js'
|
|
2
|
+
import {npmRestore} from '../../api/npm.js'
|
|
3
3
|
|
|
4
4
|
export const clean = async ({cwd, packages}) => {
|
|
5
5
|
await unsetUserConfig(cwd)
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import {getPkgConfig} from '
|
|
1
|
+
import {getPkgConfig} from '../../../config.js'
|
|
2
2
|
import {getLatest} from '../generators/meta.js'
|
|
3
|
-
import {getRoot, getSha} from '
|
|
3
|
+
import {getRoot, getSha} from '../../api/git.js'
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Global release context — one per `run()` invocation.
|
|
7
7
|
* Built by `createContext()` in release.js and extended with runtime bits (run, publishers).
|
|
8
8
|
*
|
|
9
9
|
* @typedef {object} ReleaseContext
|
|
10
|
-
* @property {object} flags CLI flags (build, test, publish, snapshot, dryRun,
|
|
10
|
+
* @property {object} flags CLI flags (build, test, publish, snapshot, dryRun, pack, deliver, ...).
|
|
11
11
|
* @property {Record<string,string>} env Resolved process env (process.env merged with per-run overrides).
|
|
12
12
|
* @property {string} cwd Repo working directory.
|
|
13
13
|
* @property {object} root Root package descriptor (from topo()).
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import {$, tempy, fs, path} from 'zx-extra'
|
|
2
|
+
import {memoizeBy, asTuple} from '../../../util.js'
|
|
3
|
+
import {channels, prepare, buildParcels} from '../../courier/index.js'
|
|
4
|
+
import {npmPersist} from '../../api/npm.js'
|
|
5
|
+
import {getRepo} from '../../api/git.js'
|
|
6
|
+
import {formatReleaseNotes} from '../generators/notes.js'
|
|
7
|
+
import {ghPrepareAssets} from '../../api/gh.js'
|
|
8
|
+
import {packTar, hashFile} from '../../tar.js'
|
|
9
|
+
|
|
10
|
+
const filterActive = (names, pkg, {snapshot = false} = {}) =>
|
|
11
|
+
names.filter(n => {
|
|
12
|
+
const ch = channels[n]
|
|
13
|
+
return ch && ch.transport !== false && (!snapshot || ch.snapshot) && ch.when(pkg)
|
|
14
|
+
})
|
|
15
|
+
|
|
16
|
+
export const pack = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
17
|
+
const {channels: channelNames = [], flags} = ctx
|
|
18
|
+
const snapshot = !!flags.snapshot
|
|
19
|
+
const active = filterActive(channelNames, pkg, {snapshot})
|
|
20
|
+
|
|
21
|
+
await prepare(active, pkg)
|
|
22
|
+
await npmPersist(pkg)
|
|
23
|
+
|
|
24
|
+
const outputDir = flags.pack ? (typeof flags.pack === 'string' ? flags.pack : 'parcels') : null
|
|
25
|
+
const stageDir = outputDir || tempy.temporaryDirectory()
|
|
26
|
+
if (outputDir) await fs.ensureDir(outputDir)
|
|
27
|
+
const {repoName, repoHost, originUrl} = await getRepo(pkg.absPath, {basicAuth: pkg.config.ghBasicAuth})
|
|
28
|
+
const artifacts = {repoName, repoHost, originUrl}
|
|
29
|
+
|
|
30
|
+
if (active.includes('npm')) {
|
|
31
|
+
const out = await $({cwd: pkg.absPath})`npm pack --pack-destination ${stageDir}`
|
|
32
|
+
artifacts.npmTarball = path.join(stageDir, out.toString().trim())
|
|
33
|
+
}
|
|
34
|
+
if (active.includes('gh-release') || active.includes('changelog'))
|
|
35
|
+
artifacts.releaseNotes = await formatReleaseNotes(pkg)
|
|
36
|
+
if (active.includes('gh-pages')) {
|
|
37
|
+
const [, from = 'docs'] = asTuple(pkg.config.ghPages, ['branch', 'from'])
|
|
38
|
+
artifacts.docsDir = path.join(stageDir, 'docs')
|
|
39
|
+
await fs.copy(path.join(pkg.absPath, from), artifacts.docsDir)
|
|
40
|
+
}
|
|
41
|
+
if (active.includes('gh-release') && pkg.config.ghAssets?.length)
|
|
42
|
+
artifacts.assetsDir = await ghPrepareAssets(pkg.config.ghAssets, pkg.absPath)
|
|
43
|
+
|
|
44
|
+
const parcels = buildParcels(pkg, ctx, {channels: active, ...artifacts})
|
|
45
|
+
|
|
46
|
+
const tars = []
|
|
47
|
+
for (const {channel, manifest, files} of parcels) {
|
|
48
|
+
// Two-pass: pack to temp, hash, rename to final name.
|
|
49
|
+
const tmpPath = path.join(stageDir, `_tmp.${channel}.tar`)
|
|
50
|
+
await packTar(tmpPath, manifest, files)
|
|
51
|
+
const hash = await hashFile(tmpPath)
|
|
52
|
+
const finalPath = path.join(stageDir, `parcel.${pkg.tag}.${channel}.${hash}.tar`)
|
|
53
|
+
await fs.rename(tmpPath, finalPath)
|
|
54
|
+
tars.push(finalPath)
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
pkg.tars = tars
|
|
58
|
+
pkg.activeTransport = active
|
|
59
|
+
})
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import {memoizeBy} from '../../../util.js'
|
|
2
|
+
import {exec} from '../exec.js'
|
|
3
|
+
import {log} from '../../log.js'
|
|
4
|
+
import {deliver, channels, runChannel} from '../../courier/index.js'
|
|
5
|
+
import {pushTag} from '../../api/git.js'
|
|
6
|
+
|
|
7
|
+
export const publish = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
8
|
+
if (pkg.version !== pkg.manifest.version)
|
|
9
|
+
throw new Error('package.json version not synced')
|
|
10
|
+
|
|
11
|
+
const {run = exec, channels: channelNames = [], flags} = ctx
|
|
12
|
+
const snapshot = !!flags.snapshot
|
|
13
|
+
const {tars = []} = pkg
|
|
14
|
+
|
|
15
|
+
if (!snapshot) {
|
|
16
|
+
const {tag, config: {gitCommitterEmail, gitCommitterName}} = pkg
|
|
17
|
+
ctx.git.tag = tag
|
|
18
|
+
log.info(`push release tag ${tag}`)
|
|
19
|
+
await pushTag({cwd: ctx.git.root, tag, gitCommitterEmail, gitCommitterName})
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
await deliver(tars, ctx.env)
|
|
23
|
+
|
|
24
|
+
const cmd = channels.cmd
|
|
25
|
+
if (channelNames.includes('cmd') && cmd?.when(pkg) && (!snapshot || cmd.snapshot))
|
|
26
|
+
await runChannel('cmd', pkg, run)
|
|
27
|
+
|
|
28
|
+
pkg.published = true
|
|
29
|
+
})
|
|
@@ -1,29 +1,22 @@
|
|
|
1
1
|
import os from 'node:os'
|
|
2
2
|
import {createRequire} from 'node:module'
|
|
3
|
-
import {$, within} from 'zx-extra'
|
|
3
|
+
import {$, within, fs, glob, path} from 'zx-extra'
|
|
4
4
|
import {queuefy} from 'queuefy'
|
|
5
|
-
import {topo, traverseQueue} from './deps.js'
|
|
5
|
+
import {topo, traverseQueue} from './depot/deps.js'
|
|
6
6
|
import {createReport, log} from './log.js'
|
|
7
|
-
import {exec} from './exec.js'
|
|
8
|
-
|
|
9
|
-
import {contextify} from './steps/contextify.js'
|
|
10
|
-
import {
|
|
11
|
-
import {
|
|
12
|
-
import {
|
|
13
|
-
import {
|
|
14
|
-
import {
|
|
15
|
-
import {
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
import ghRelease from './publishers/gh-release.js'
|
|
21
|
-
import ghPages from './publishers/gh-pages.js'
|
|
22
|
-
import changelog from './publishers/changelog.js'
|
|
23
|
-
import cmd from './publishers/cmd.js'
|
|
24
|
-
|
|
25
|
-
// Publisher registry. Order = publish order; teardown walks it in reverse.
|
|
26
|
-
const publishers = [meta, npm, ghRelease, ghPages, changelog, cmd]
|
|
7
|
+
import {exec} from './depot/exec.js'
|
|
8
|
+
|
|
9
|
+
import {contextify} from './depot/steps/contextify.js'
|
|
10
|
+
import {analyze} from './depot/steps/analyze.js'
|
|
11
|
+
import {build} from './depot/steps/build.js'
|
|
12
|
+
import {pack} from './depot/steps/pack.js'
|
|
13
|
+
import {publish} from './depot/steps/publish.js'
|
|
14
|
+
import {clean} from './depot/steps/clean.js'
|
|
15
|
+
import {test} from './depot/steps/test.js'
|
|
16
|
+
|
|
17
|
+
import {deliver, defaultOrder as channels} from './courier/index.js'
|
|
18
|
+
|
|
19
|
+
const PARCELS_DIR = 'parcels'
|
|
27
20
|
|
|
28
21
|
export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within(async () => {
|
|
29
22
|
$.memo = new Map()
|
|
@@ -34,10 +27,22 @@ export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within
|
|
|
34
27
|
return
|
|
35
28
|
}
|
|
36
29
|
|
|
30
|
+
// --deliver [dir]: standalone delivery from pre-packed tars.
|
|
31
|
+
if (flags.deliver) {
|
|
32
|
+
const dir = typeof flags.deliver === 'string' ? flags.deliver : PARCELS_DIR
|
|
33
|
+
const tars = await glob(path.join(dir, 'parcel.*.tar'))
|
|
34
|
+
if (!tars.length) throw new Error(`no tars found in ${dir}`)
|
|
35
|
+
const _env = {...process.env, ...env}
|
|
36
|
+
log.secret(_env.GH_TOKEN, _env.GITHUB_TOKEN, _env.NPM_TOKEN)
|
|
37
|
+
log.info(`deliver: ${tars.length} tar(s) from ${dir}`)
|
|
38
|
+
const delivered = await deliver(tars, _env)
|
|
39
|
+
log.info(`deliver: done, ${delivered} delivered`)
|
|
40
|
+
return
|
|
41
|
+
}
|
|
42
|
+
|
|
37
43
|
const ctx = await createContext({flags, env, cwd})
|
|
38
44
|
const {report, packages, queue, prev} = ctx
|
|
39
45
|
|
|
40
|
-
// Per-package scope: $.scope, packages[name] lookup, contextify on first touch.
|
|
41
46
|
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
42
47
|
$.scope = name
|
|
43
48
|
const pkg = packages[name]
|
|
@@ -50,15 +55,6 @@ export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within
|
|
|
50
55
|
.log('queue:', queue)
|
|
51
56
|
.log('graphs', ctx.graphs)
|
|
52
57
|
|
|
53
|
-
// --recover: standalone mode — clean orphan tags and exit.
|
|
54
|
-
if (flags.recover) {
|
|
55
|
-
await fetchTags(cwd)
|
|
56
|
-
let recovered = 0
|
|
57
|
-
await forEachPkg(async (pkg) => { if (await recover(pkg)) recovered++ })
|
|
58
|
-
report.log(`recover: cleaned ${recovered} orphan tag(s)`)
|
|
59
|
-
return
|
|
60
|
-
}
|
|
61
|
-
|
|
62
58
|
try {
|
|
63
59
|
await forEachPkg(async (pkg) => {
|
|
64
60
|
report.setStatus('analyzing', pkg.name)
|
|
@@ -88,10 +84,22 @@ export const run = async ({cwd = process.cwd(), env, flags = {}} = {}) => within
|
|
|
88
84
|
report.setStatus('testing', pkg.name)
|
|
89
85
|
await test(pkg)
|
|
90
86
|
}
|
|
91
|
-
if (
|
|
92
|
-
report.setStatus('
|
|
93
|
-
|
|
87
|
+
if (flags.dryRun || flags.publish === false) {
|
|
88
|
+
report.setStatus('success', pkg.name)
|
|
89
|
+
return
|
|
94
90
|
}
|
|
91
|
+
|
|
92
|
+
report.setStatus('packing', pkg.name)
|
|
93
|
+
await pack(pkg)
|
|
94
|
+
|
|
95
|
+
// --pack <dir>: pack only, skip delivery.
|
|
96
|
+
if (flags.pack) {
|
|
97
|
+
report.setStatus('packed', pkg.name)
|
|
98
|
+
return
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
report.setStatus('publishing', pkg.name)
|
|
102
|
+
await publish(pkg)
|
|
95
103
|
report.setStatus('success', pkg.name)
|
|
96
104
|
})
|
|
97
105
|
} catch (e) {
|
|
@@ -108,7 +116,6 @@ export const createContext = async ({flags, env: _env, cwd}) => {
|
|
|
108
116
|
const report = createReport({packages, queue, flags})
|
|
109
117
|
const env = {...process.env, ..._env}
|
|
110
118
|
|
|
111
|
-
// Register known secrets so the logger redacts them from all output.
|
|
112
119
|
log.secret(env.GH_TOKEN, env.GITHUB_TOKEN, env.NPM_TOKEN)
|
|
113
120
|
|
|
114
121
|
$.report = report
|
|
@@ -118,7 +125,7 @@ export const createContext = async ({flags, env: _env, cwd}) => {
|
|
|
118
125
|
|
|
119
126
|
return {
|
|
120
127
|
cwd, env, flags, root, packages, queue, prev, graphs, report,
|
|
121
|
-
|
|
128
|
+
channels,
|
|
122
129
|
run: queuefy(exec, flags.concurrency || os.cpus().length),
|
|
123
130
|
}
|
|
124
131
|
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import tar from 'tar-stream'
|
|
2
|
+
import crypto from 'node:crypto'
|
|
3
|
+
import {fs, path} from 'zx-extra'
|
|
4
|
+
import {pipeline} from 'node:stream/promises'
|
|
5
|
+
import {createWriteStream, createReadStream} from 'node:fs'
|
|
6
|
+
|
|
7
|
+
export const packTar = async (tarPath, manifest, files = []) => {
|
|
8
|
+
const pack = tar.pack()
|
|
9
|
+
const json = JSON.stringify(manifest, null, 2)
|
|
10
|
+
pack.entry({name: 'manifest.json', size: Buffer.byteLength(json)}, json)
|
|
11
|
+
|
|
12
|
+
for (const {name, source} of files) {
|
|
13
|
+
const stat = await fs.stat(source)
|
|
14
|
+
if (stat.isDirectory()) await addDir(pack, name, source)
|
|
15
|
+
else {
|
|
16
|
+
const buf = await fs.readFile(source)
|
|
17
|
+
pack.entry({name, size: buf.length}, buf)
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
pack.finalize()
|
|
22
|
+
await pipeline(pack, createWriteStream(tarPath))
|
|
23
|
+
return tarPath
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export const hashFile = async (filePath) => {
|
|
27
|
+
const hash = crypto.createHash('sha1')
|
|
28
|
+
await pipeline(createReadStream(filePath), hash)
|
|
29
|
+
return hash.digest('hex').slice(0, 8)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const addDir = async (pack, prefix, dirPath) => {
|
|
33
|
+
for (const e of await fs.readdir(dirPath, {withFileTypes: true})) {
|
|
34
|
+
const full = path.join(dirPath, e.name)
|
|
35
|
+
const rel = path.join(prefix, e.name)
|
|
36
|
+
if (e.isDirectory()) await addDir(pack, rel, full)
|
|
37
|
+
else {
|
|
38
|
+
const buf = await fs.readFile(full)
|
|
39
|
+
pack.entry({name: rel, size: buf.length}, buf)
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export const unpackTar = async (tarPath, destDir) => {
|
|
45
|
+
await fs.ensureDir(destDir)
|
|
46
|
+
const extract = tar.extract()
|
|
47
|
+
let manifest = null
|
|
48
|
+
|
|
49
|
+
const done = new Promise((resolve, reject) => {
|
|
50
|
+
extract.on('entry', (header, stream, cb) => {
|
|
51
|
+
const chunks = []
|
|
52
|
+
stream.on('data', d => chunks.push(d))
|
|
53
|
+
stream.on('end', async () => {
|
|
54
|
+
const buf = Buffer.concat(chunks)
|
|
55
|
+
if (header.name === 'manifest.json') {
|
|
56
|
+
manifest = JSON.parse(buf.toString('utf8'))
|
|
57
|
+
} else {
|
|
58
|
+
const dest = path.join(destDir, header.name)
|
|
59
|
+
await fs.ensureDir(path.dirname(dest))
|
|
60
|
+
await fs.writeFile(dest, buf)
|
|
61
|
+
}
|
|
62
|
+
cb()
|
|
63
|
+
})
|
|
64
|
+
stream.resume()
|
|
65
|
+
})
|
|
66
|
+
extract.on('finish', resolve)
|
|
67
|
+
extract.on('error', reject)
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
await pipeline(createReadStream(tarPath), extract)
|
|
71
|
+
await done
|
|
72
|
+
return {manifest, dir: destDir}
|
|
73
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import http from 'node:http'
|
|
2
|
+
|
|
3
|
+
export const createGhServer = async () => {
|
|
4
|
+
const requests = []
|
|
5
|
+
let routes = {}
|
|
6
|
+
|
|
7
|
+
const server = http.createServer((req, res) => {
|
|
8
|
+
let body = ''
|
|
9
|
+
req.on('data', c => body += c)
|
|
10
|
+
req.on('end', () => {
|
|
11
|
+
requests.push({method: req.method, url: req.url, headers: req.headers, body})
|
|
12
|
+
const key = `${req.method} ${req.url}`
|
|
13
|
+
for (const [pattern, handler] of Object.entries(routes)) {
|
|
14
|
+
if (key.includes(pattern) || new RegExp(pattern).test(key)) {
|
|
15
|
+
return handler(req, res, body)
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
res.writeHead(404)
|
|
19
|
+
res.end('{}')
|
|
20
|
+
})
|
|
21
|
+
})
|
|
22
|
+
|
|
23
|
+
await new Promise(r => server.listen(0, '127.0.0.1', r))
|
|
24
|
+
|
|
25
|
+
return {
|
|
26
|
+
url: `http://127.0.0.1:${server.address().port}`,
|
|
27
|
+
requests,
|
|
28
|
+
routes,
|
|
29
|
+
reset() { requests.length = 0; routes = {}; return routes },
|
|
30
|
+
setRoutes(r) { Object.assign(routes, r) },
|
|
31
|
+
close() { server.close() },
|
|
32
|
+
}
|
|
33
|
+
}
|