bulk-release 3.0.5 → 3.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/README.md +146 -24
- package/package.json +2 -2
- package/src/main/js/config.js +3 -2
- package/src/main/js/post/api/gh.js +11 -0
- package/src/main/js/post/api/git.js +19 -0
- package/src/main/js/post/api/npm.js +5 -5
- package/src/main/js/post/courier/channels/changelog.js +12 -3
- package/src/main/js/post/courier/channels/gh-pages.js +8 -1
- package/src/main/js/post/courier/channels/gh-release.js +11 -1
- package/src/main/js/post/courier/channels/git-tag.js +24 -0
- package/src/main/js/post/courier/channels/meta.js +9 -3
- package/src/main/js/post/courier/channels/npm.js +27 -14
- package/src/main/js/post/courier/index.js +137 -24
- package/src/main/js/post/courier/semaphore.js +31 -0
- package/src/main/js/post/courier/seniority.js +19 -0
- package/src/main/js/post/depot/context.js +46 -0
- package/src/main/js/post/depot/reconcile.js +47 -0
- package/src/main/js/post/depot/steps/contextify.js +2 -1
- package/src/main/js/post/depot/steps/pack.js +6 -10
- package/src/main/js/post/depot/steps/publish.js +1 -11
- package/src/main/js/post/modes/deliver.js +23 -0
- package/src/main/js/post/modes/pack.js +69 -0
- package/src/main/js/post/modes/receive.js +72 -0
- package/src/main/js/post/modes/verify.js +41 -0
- package/src/main/js/post/{courier/parcel.js → parcel/build.js} +15 -2
- package/src/main/js/post/parcel/directive.js +81 -0
- package/src/main/js/post/parcel/index.js +4 -0
- package/src/main/js/post/parcel/verify.js +46 -0
- package/src/main/js/post/release.js +44 -86
- package/src/main/js/post/tar.js +1 -1
- package/src/main/js/util.js +16 -0
- package/src/test/js/utils/mock.js +1 -1
|
@@ -1,6 +1,10 @@
|
|
|
1
1
|
import {$, tempy, within, path, semver, fs} from 'zx-extra'
|
|
2
2
|
import {unpackTar} from '../tar.js'
|
|
3
3
|
import {log} from '../log.js'
|
|
4
|
+
import {pool} from '../../util.js'
|
|
5
|
+
import {scanDirectives, invalidateOrphans} from '../parcel/directive.js'
|
|
6
|
+
import {tryLock, unlock, signalRebuild} from './semaphore.js'
|
|
7
|
+
import gitTag from './channels/git-tag.js'
|
|
4
8
|
import meta from './channels/meta.js'
|
|
5
9
|
import npm from './channels/npm.js'
|
|
6
10
|
import ghRelease from './channels/gh-release.js'
|
|
@@ -8,10 +12,15 @@ import ghPages from './channels/gh-pages.js'
|
|
|
8
12
|
import changelog from './channels/changelog.js'
|
|
9
13
|
import cmd from './channels/cmd.js'
|
|
10
14
|
|
|
11
|
-
export {buildParcels} from './parcel.js'
|
|
12
15
|
|
|
13
|
-
export const channels = {meta, npm, 'gh-release': ghRelease, 'gh-pages': ghPages, changelog, cmd}
|
|
14
|
-
export const defaultOrder = ['meta', 'npm', 'gh-release', 'gh-pages', 'changelog', 'cmd']
|
|
16
|
+
export const channels = {'git-tag': gitTag, meta, npm, 'gh-release': ghRelease, 'gh-pages': ghPages, changelog, cmd}
|
|
17
|
+
export const defaultOrder = ['git-tag', 'meta', 'npm', 'gh-release', 'gh-pages', 'changelog', 'cmd']
|
|
18
|
+
|
|
19
|
+
export const getActiveChannels = (pkg, channelNames, snapshot) =>
|
|
20
|
+
channelNames.filter(n => {
|
|
21
|
+
const ch = channels[n]
|
|
22
|
+
return ch && ch.transport !== false && (!snapshot || ch.snapshot) && ch.when(pkg)
|
|
23
|
+
})
|
|
15
24
|
|
|
16
25
|
export const prepare = async (names, pkg) => {
|
|
17
26
|
for (const n of names) await channels[n]?.prepare?.(pkg)
|
|
@@ -38,9 +47,11 @@ export const resolveManifest = (manifest, env = process.env) => {
|
|
|
38
47
|
return resolved
|
|
39
48
|
}
|
|
40
49
|
|
|
50
|
+
const MARKERS = new Set(['released', 'skip', 'conflict', 'orphan'])
|
|
51
|
+
|
|
41
52
|
const openParcel = async (tarPath, env) => {
|
|
42
53
|
const content = await fs.readFile(tarPath, 'utf8').catch(() => null)
|
|
43
|
-
if (content
|
|
54
|
+
if (MARKERS.has(content)) return null
|
|
44
55
|
|
|
45
56
|
const destDir = tempy.temporaryDirectory()
|
|
46
57
|
const {manifest} = await unpackTar(tarPath, destDir)
|
|
@@ -56,24 +67,6 @@ const openParcel = async (tarPath, env) => {
|
|
|
56
67
|
return {ch, resolved, destDir, tarPath}
|
|
57
68
|
}
|
|
58
69
|
|
|
59
|
-
const pool = async (tasks, concurrency, fn) => {
|
|
60
|
-
const active = new Set()
|
|
61
|
-
let i = 0
|
|
62
|
-
await new Promise((resolve, reject) => {
|
|
63
|
-
const next = () => {
|
|
64
|
-
if (i >= tasks.length && active.size === 0) return resolve()
|
|
65
|
-
while (active.size < concurrency && i < tasks.length) {
|
|
66
|
-
const t = tasks[i++]
|
|
67
|
-
const p = fn(t).then(() => { active.delete(p); next() }, reject)
|
|
68
|
-
active.add(p)
|
|
69
|
-
}
|
|
70
|
-
}
|
|
71
|
-
next()
|
|
72
|
-
})
|
|
73
|
-
}
|
|
74
|
-
|
|
75
|
-
// Parcels grouped by package, sorted by semver asc (latest last).
|
|
76
|
-
// Groups run in parallel (concurrency-limited), entries within a group — sequential.
|
|
77
70
|
export const inspect = async (tars, env = process.env) => {
|
|
78
71
|
const parcels = []
|
|
79
72
|
const skipped = []
|
|
@@ -93,7 +86,6 @@ export const inspect = async (tars, env = process.env) => {
|
|
|
93
86
|
}
|
|
94
87
|
}
|
|
95
88
|
|
|
96
|
-
// group by package, sort by semver asc within each group
|
|
97
89
|
const groups = new Map()
|
|
98
90
|
for (const p of parcels) {
|
|
99
91
|
const key = p.resolved.name || p.resolved.channel
|
|
@@ -106,7 +98,9 @@ export const inspect = async (tars, env = process.env) => {
|
|
|
106
98
|
return {groups, skipped, total: tars.length, pending: parcels.length}
|
|
107
99
|
}
|
|
108
100
|
|
|
109
|
-
|
|
101
|
+
// --- Legacy deliver (no directive) ---
|
|
102
|
+
|
|
103
|
+
const deliverLegacy = async (tars, env, {concurrency, dryRun}) => {
|
|
110
104
|
const {groups, skipped, total, pending} = await inspect(tars, env)
|
|
111
105
|
|
|
112
106
|
for (const {file, reason, tarPath} of skipped) {
|
|
@@ -129,6 +123,7 @@ export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun
|
|
|
129
123
|
await within(async () => {
|
|
130
124
|
$.scope = resolved.name || resolved.channel
|
|
131
125
|
await ch.run(resolved, destDir)
|
|
126
|
+
log.info(`${resolved.channel} ${resolved.version}`)
|
|
132
127
|
})
|
|
133
128
|
await fs.writeFile(tarPath, 'released')
|
|
134
129
|
entries.push(toEntry({resolved}))
|
|
@@ -137,3 +132,121 @@ export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun
|
|
|
137
132
|
|
|
138
133
|
return {total, pending, delivered: entries.length, skipped: skipped.length, entries}
|
|
139
134
|
}
|
|
135
|
+
|
|
136
|
+
// --- Directive-aware deliver ---
|
|
137
|
+
|
|
138
|
+
const deliverParcel = async (tarPath, channelName, pkgName, version, env, {dryRun}) => {
|
|
139
|
+
const p = await openParcel(tarPath, env)
|
|
140
|
+
if (!p) return 'already'
|
|
141
|
+
if (p.warn) {
|
|
142
|
+
log.warn(`skipping ${p.warn}`)
|
|
143
|
+
return 'skip'
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
if (dryRun) return 'dryrun'
|
|
147
|
+
|
|
148
|
+
const {ch, resolved, destDir} = p
|
|
149
|
+
const res = await within(async () => {
|
|
150
|
+
$.scope = pkgName
|
|
151
|
+
const r = await ch.run(resolved, destDir)
|
|
152
|
+
log.info(`${channelName} ${version}`)
|
|
153
|
+
return r
|
|
154
|
+
})
|
|
155
|
+
|
|
156
|
+
if (res === 'conflict') return 'conflict'
|
|
157
|
+
|
|
158
|
+
await fs.writeFile(tarPath, 'released')
|
|
159
|
+
return res === 'duplicate' ? 'duplicate' : 'ok'
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
const deliverDirective = async (directive, tarMap, env, {dryRun}) => {
|
|
163
|
+
const entries = []
|
|
164
|
+
const conflicts = []
|
|
165
|
+
const skipped = []
|
|
166
|
+
|
|
167
|
+
for (const pkgName of directive.queue) {
|
|
168
|
+
const pkg = directive.packages[pkgName]
|
|
169
|
+
if (!pkg) continue
|
|
170
|
+
|
|
171
|
+
let pkgConflict = false
|
|
172
|
+
|
|
173
|
+
for (const step of pkg.deliver) {
|
|
174
|
+
if (pkgConflict) break
|
|
175
|
+
|
|
176
|
+
const results = await Promise.all(step.map(async (channelName) => {
|
|
177
|
+
const parcelName = (pkg.parcels || []).find(p => p.includes(`.${channelName}.`))
|
|
178
|
+
const tarPath = parcelName && tarMap.get(parcelName)
|
|
179
|
+
if (!tarPath) return 'missing'
|
|
180
|
+
|
|
181
|
+
return deliverParcel(tarPath, channelName, pkgName, pkg.version, env, {dryRun})
|
|
182
|
+
}))
|
|
183
|
+
|
|
184
|
+
for (let i = 0; i < step.length; i++) {
|
|
185
|
+
const r = results[i]
|
|
186
|
+
if (r === 'skip') skipped.push({channelName: step[i], pkg: pkgName})
|
|
187
|
+
else if (r !== 'missing' && r !== 'already') entries.push({channel: step[i], name: pkgName, version: pkg.version})
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
if (results.includes('conflict')) {
|
|
191
|
+
pkgConflict = true
|
|
192
|
+
conflicts.push(pkgName)
|
|
193
|
+
for (const p of pkg.parcels || []) {
|
|
194
|
+
const tp = tarMap.get(p)
|
|
195
|
+
if (!tp) continue
|
|
196
|
+
const c = await fs.readFile(tp, 'utf8').catch(() => null)
|
|
197
|
+
if (c !== 'released') await fs.writeFile(tp, 'conflict')
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
return {entries, conflicts, skipped}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// --- Main entry point ---
|
|
207
|
+
|
|
208
|
+
export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun = false, cwd} = {}) => {
|
|
209
|
+
const dir = tars.length ? path.dirname(tars[0]) : null
|
|
210
|
+
const directives = dir ? await scanDirectives(dir) : []
|
|
211
|
+
|
|
212
|
+
if (!directives.length) return deliverLegacy(tars, env, {concurrency, dryRun})
|
|
213
|
+
|
|
214
|
+
const tarMap = new Map(tars.map(t => [path.basename(t), t]))
|
|
215
|
+
const allEntries = []
|
|
216
|
+
const allConflicts = []
|
|
217
|
+
const allSkipped = []
|
|
218
|
+
|
|
219
|
+
for (const directive of directives) {
|
|
220
|
+
const gitRoot = cwd || dir
|
|
221
|
+
if (!await tryLock(gitRoot, directive)) {
|
|
222
|
+
log.info(`directive ${directive.sha.slice(0, 7)} locked, skipping`)
|
|
223
|
+
continue
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
try {
|
|
227
|
+
await invalidateOrphans(dir, directive)
|
|
228
|
+
const {entries, conflicts, skipped} = await deliverDirective(directive, tarMap, env, {dryRun})
|
|
229
|
+
allEntries.push(...entries)
|
|
230
|
+
allConflicts.push(...conflicts)
|
|
231
|
+
allSkipped.push(...skipped)
|
|
232
|
+
|
|
233
|
+
if (conflicts.length) {
|
|
234
|
+
await fs.writeFile(directive.tarPath, 'conflict')
|
|
235
|
+
await signalRebuild(gitRoot, directive.sha)
|
|
236
|
+
} else if (!skipped.length) {
|
|
237
|
+
await fs.writeFile(directive.tarPath, 'released')
|
|
238
|
+
}
|
|
239
|
+
} finally {
|
|
240
|
+
await unlock(gitRoot, directive)
|
|
241
|
+
}
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
return {
|
|
245
|
+
total: tars.length,
|
|
246
|
+
pending: allEntries.length,
|
|
247
|
+
delivered: allEntries.length,
|
|
248
|
+
skipped: allSkipped.length,
|
|
249
|
+
entries: allEntries,
|
|
250
|
+
conflicts: allConflicts,
|
|
251
|
+
}
|
|
252
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import {pushAnnotatedTag, deleteRemoteTag} from '../api/git.js'
|
|
2
|
+
|
|
3
|
+
const tagName = ({sha}) =>
|
|
4
|
+
`zbr-deliver.${sha.slice(0, 7)}`
|
|
5
|
+
|
|
6
|
+
export const tryLock = async (cwd, directive) => {
|
|
7
|
+
const tag = tagName(directive)
|
|
8
|
+
const body = JSON.stringify({
|
|
9
|
+
ts: directive.timestamp,
|
|
10
|
+
sha: directive.sha,
|
|
11
|
+
packages: directive.queue,
|
|
12
|
+
})
|
|
13
|
+
try {
|
|
14
|
+
await pushAnnotatedTag(cwd, tag, body)
|
|
15
|
+
return true
|
|
16
|
+
} catch {
|
|
17
|
+
return false
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export const unlock = async (cwd, directive) => {
|
|
22
|
+
await deleteRemoteTag(cwd, tagName(directive))
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export const signalRebuild = async (cwd, sha) => {
|
|
26
|
+
const tag = `zbr-rebuild.${sha.slice(0, 7)}`
|
|
27
|
+
try { await pushAnnotatedTag(cwd, tag, 'rebuild') } catch { /* already signaled */ }
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export const consumeRebuildSignal = async (cwd, sha) =>
|
|
31
|
+
deleteRemoteTag(cwd, `zbr-rebuild.${sha.slice(0, 7)}`)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import {$, semver} from 'zx-extra'
|
|
2
|
+
import {parseTag} from '../depot/generators/tag.js'
|
|
3
|
+
|
|
4
|
+
export const hasHigherVersion = async (cwd, name, version) => {
|
|
5
|
+
const output = (await $({cwd, nothrow: true})`git ls-remote --tags origin`).toString()
|
|
6
|
+
if (!output) return false
|
|
7
|
+
|
|
8
|
+
for (const line of output.split('\n')) {
|
|
9
|
+
const ref = line.split('\t')[1]?.replace('refs/tags/', '')
|
|
10
|
+
if (!ref) continue
|
|
11
|
+
const parsed = parseTag(ref)
|
|
12
|
+
if (!parsed || parsed.name !== name) continue
|
|
13
|
+
try {
|
|
14
|
+
if (semver.gt(parsed.version, version)) return true
|
|
15
|
+
} catch { /* unparseable version, skip */ }
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return false
|
|
19
|
+
}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import {fs, path} from 'zx-extra'
|
|
2
|
+
|
|
3
|
+
export const CONTEXT_FILE = '.zbr-context.json'
|
|
4
|
+
|
|
5
|
+
export const writeContext = async (cwd, context) => {
|
|
6
|
+
const filePath = path.resolve(cwd, CONTEXT_FILE)
|
|
7
|
+
await fs.writeJson(filePath, context, {spaces: 2})
|
|
8
|
+
return filePath
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const readContext = async (filePath) => {
|
|
12
|
+
let data
|
|
13
|
+
try { data = await fs.readJson(filePath) } catch {
|
|
14
|
+
throw new Error(`context not found: ${filePath}`)
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
if (!data || typeof data !== 'object') throw new Error(`context is not an object: ${filePath}`)
|
|
18
|
+
if (!data.status) throw new Error(`context missing status: ${filePath}`)
|
|
19
|
+
if (data.status === 'proceed') {
|
|
20
|
+
if (!data.sha || !data.sha7) throw new Error(`context missing sha: ${filePath}`)
|
|
21
|
+
if (!data.packages || typeof data.packages !== 'object')
|
|
22
|
+
throw new Error(`context missing packages: ${filePath}`)
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
return data
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export const buildContext = (packages, queue, sha, {getChannels} = {}) => {
|
|
29
|
+
const pkgs = {}
|
|
30
|
+
for (const name of queue) {
|
|
31
|
+
const pkg = packages[name]
|
|
32
|
+
if (!pkg.releaseType || pkg.skipped) continue
|
|
33
|
+
pkgs[name] = {
|
|
34
|
+
version: pkg.version,
|
|
35
|
+
tag: pkg.tag,
|
|
36
|
+
channels: getChannels ? getChannels(pkg) : [],
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return {
|
|
41
|
+
status: 'proceed',
|
|
42
|
+
sha,
|
|
43
|
+
sha7: sha.slice(0, 7),
|
|
44
|
+
packages: pkgs,
|
|
45
|
+
}
|
|
46
|
+
}
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
import {$} from 'zx-extra'
|
|
2
|
+
import {log} from '../log.js'
|
|
3
|
+
import {getRemoteTagSha, clearTagsCache} from '../api/git.js'
|
|
4
|
+
import {formatTag} from './generators/tag.js'
|
|
5
|
+
import {resolvePkgVersion} from './steps/analyze.js'
|
|
6
|
+
|
|
7
|
+
export const preflight = async (pkg, ctx) => {
|
|
8
|
+
if (!pkg.tag) return 'ok'
|
|
9
|
+
|
|
10
|
+
const cwd = ctx.git.root
|
|
11
|
+
const remoteSha = await getRemoteTagSha(cwd, pkg.tag)
|
|
12
|
+
if (!remoteSha) return 'ok'
|
|
13
|
+
|
|
14
|
+
// tag exists on remote
|
|
15
|
+
if (remoteSha === ctx.git.sha) {
|
|
16
|
+
log.info(`preflight: ${pkg.tag} already exists for our commit, skipping`)
|
|
17
|
+
return 'skip'
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// need history for merge-base
|
|
21
|
+
await $({cwd, nothrow: true})`git fetch --deepen=100`
|
|
22
|
+
|
|
23
|
+
const isOursOlder = await $({cwd, nothrow: true})`git merge-base --is-ancestor ${ctx.git.sha} ${remoteSha}`
|
|
24
|
+
if (isOursOlder.exitCode === 0) {
|
|
25
|
+
log.info(`preflight: ${pkg.tag} — we are older, skipping`)
|
|
26
|
+
return 'skip'
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const isRemoteOlder = await $({cwd, nothrow: true})`git merge-base --is-ancestor ${remoteSha} ${ctx.git.sha}`
|
|
30
|
+
if (isRemoteOlder.exitCode === 0) {
|
|
31
|
+
log.info(`preflight: ${pkg.tag} — we are newer, re-resolving version`)
|
|
32
|
+
await $({cwd})`git fetch origin --tags --force`
|
|
33
|
+
clearTagsCache()
|
|
34
|
+
|
|
35
|
+
const pre = ctx.flags.snapshot ? `-snap.${ctx.git.sha.slice(0, 7)}` : undefined
|
|
36
|
+
// re-resolve: the fetched tags will give us a new latest version
|
|
37
|
+
const latestVersion = pkg.latest.tag?.version || pkg.manifest.version
|
|
38
|
+
pkg.version = resolvePkgVersion(pkg.releaseType, latestVersion, pkg.manifest.version, pre)
|
|
39
|
+
pkg.manifest.version = pkg.version
|
|
40
|
+
pkg.tag = formatTag({name: pkg.name, version: pkg.version, format: pkg.config.tagFormat})
|
|
41
|
+
return 'ok'
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// diverged — anomaly
|
|
45
|
+
log.warn(`preflight: ${pkg.tag} — diverged commits, skipping`)
|
|
46
|
+
return 'skip'
|
|
47
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {getPkgConfig} from '../../../config.js'
|
|
2
2
|
import {getLatest} from '../generators/meta.js'
|
|
3
|
-
import {getRoot, getSha} from '../../api/git.js'
|
|
3
|
+
import {getRoot, getSha, getCommitTimestamp} from '../../api/git.js'
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Global release context — one per `run()` invocation.
|
|
@@ -44,6 +44,7 @@ export const contextify = async (pkg, ctx) => {
|
|
|
44
44
|
git: {
|
|
45
45
|
sha: await getSha(pkg.absPath),
|
|
46
46
|
root: await getRoot(pkg.absPath),
|
|
47
|
+
timestamp: await getCommitTimestamp(pkg.absPath),
|
|
47
48
|
},
|
|
48
49
|
}
|
|
49
50
|
}
|
|
@@ -1,27 +1,22 @@
|
|
|
1
1
|
import {$, tempy, fs, path} from 'zx-extra'
|
|
2
2
|
import {memoizeBy, asTuple} from '../../../util.js'
|
|
3
|
-
import {
|
|
3
|
+
import {prepare, getActiveChannels} from '../../courier/index.js'
|
|
4
|
+
import {buildParcels, PARCELS_DIR} from '../../parcel/index.js'
|
|
4
5
|
import {npmPersist} from '../../api/npm.js'
|
|
5
6
|
import {getRepo} from '../../api/git.js'
|
|
6
7
|
import {formatReleaseNotes} from '../generators/notes.js'
|
|
7
8
|
import {ghPrepareAssets} from '../../api/gh.js'
|
|
8
9
|
import {packTar, hashFile} from '../../tar.js'
|
|
9
10
|
|
|
10
|
-
const filterActive = (names, pkg, {snapshot = false} = {}) =>
|
|
11
|
-
names.filter(n => {
|
|
12
|
-
const ch = channels[n]
|
|
13
|
-
return ch && ch.transport !== false && (!snapshot || ch.snapshot) && ch.when(pkg)
|
|
14
|
-
})
|
|
15
|
-
|
|
16
11
|
export const pack = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
17
12
|
const {channels: channelNames = [], flags} = ctx
|
|
18
13
|
const snapshot = !!flags.snapshot
|
|
19
|
-
const active =
|
|
14
|
+
const active = getActiveChannels(pkg, channelNames, snapshot)
|
|
20
15
|
|
|
21
16
|
await prepare(active, pkg)
|
|
22
17
|
await npmPersist(pkg)
|
|
23
18
|
|
|
24
|
-
const outputDir = flags.pack ? path.resolve(ctx.git.root, typeof flags.pack === 'string' ? flags.pack :
|
|
19
|
+
const outputDir = flags.pack ? path.resolve(ctx.git.root, typeof flags.pack === 'string' ? flags.pack : PARCELS_DIR) : null
|
|
25
20
|
const stageDir = outputDir || tempy.temporaryDirectory()
|
|
26
21
|
if (outputDir) await fs.ensureDir(outputDir)
|
|
27
22
|
const {repoName, repoHost, originUrl} = await getRepo(pkg.absPath, {basicAuth: pkg.config.ghBasicAuth})
|
|
@@ -49,7 +44,8 @@ export const pack = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
|
49
44
|
const tmpPath = path.join(stageDir, `_tmp.${channel}.tar`)
|
|
50
45
|
await packTar(tmpPath, manifest, files)
|
|
51
46
|
const hash = await hashFile(tmpPath)
|
|
52
|
-
const
|
|
47
|
+
const sha7 = ctx.git.sha.slice(0, 7)
|
|
48
|
+
const finalPath = path.join(stageDir, `parcel.${sha7}.${channel}.${pkg.tag}.${hash}.tar`)
|
|
53
49
|
await fs.rename(tmpPath, finalPath)
|
|
54
50
|
tars.push(finalPath)
|
|
55
51
|
}
|
|
@@ -1,28 +1,18 @@
|
|
|
1
1
|
import {memoizeBy} from '../../../util.js'
|
|
2
2
|
import {exec} from '../exec.js'
|
|
3
|
-
import {log} from '../../log.js'
|
|
4
3
|
import {deliver, channels, runChannel} from '../../courier/index.js'
|
|
5
|
-
import {pushTag} from '../../api/git.js'
|
|
6
4
|
|
|
7
5
|
export const publish = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
8
6
|
if (pkg.version !== pkg.manifest.version)
|
|
9
7
|
throw new Error('package.json version not synced')
|
|
10
8
|
|
|
11
9
|
const {run = exec, channels: channelNames = [], flags} = ctx
|
|
12
|
-
const snapshot = !!flags.snapshot
|
|
13
10
|
const {tars = []} = pkg
|
|
14
11
|
|
|
15
|
-
if (!snapshot) {
|
|
16
|
-
const {tag, config: {gitCommitterEmail, gitCommitterName}} = pkg
|
|
17
|
-
ctx.git.tag = tag
|
|
18
|
-
log.info(`push release tag ${tag}`)
|
|
19
|
-
await pushTag({cwd: ctx.git.root, tag, gitCommitterEmail, gitCommitterName})
|
|
20
|
-
}
|
|
21
|
-
|
|
22
12
|
await deliver(tars, ctx.env)
|
|
23
13
|
|
|
24
14
|
const cmd = channels.cmd
|
|
25
|
-
if (channelNames.includes('cmd') && cmd?.when(pkg) && (!snapshot || cmd.snapshot))
|
|
15
|
+
if (channelNames.includes('cmd') && cmd?.when(pkg) && (!flags.snapshot || cmd.snapshot))
|
|
26
16
|
await runChannel('cmd', pkg, run)
|
|
27
17
|
|
|
28
18
|
pkg.published = true
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import {$, glob, path} from 'zx-extra'
|
|
2
|
+
import {createReport, log} from '../log.js'
|
|
3
|
+
import {deliver} from '../courier/index.js'
|
|
4
|
+
import {PARCELS_DIR} from '../parcel/index.js'
|
|
5
|
+
|
|
6
|
+
export const runDeliver = async ({env, flags}) => {
|
|
7
|
+
const dir = typeof flags.deliver === 'string' ? flags.deliver : PARCELS_DIR
|
|
8
|
+
const report = createReport({flags})
|
|
9
|
+
|
|
10
|
+
$.memo = new Map()
|
|
11
|
+
$.report = report
|
|
12
|
+
|
|
13
|
+
report.setStatus('inspecting')
|
|
14
|
+
|
|
15
|
+
const tars = await glob(path.join(dir, 'parcel.*.tar'))
|
|
16
|
+
if (!tars.length) return report.setStatus('success').log(`no parcels in ${dir}`)
|
|
17
|
+
|
|
18
|
+
report.setStatus('delivering').log(`parcels: ${tars.length}`)
|
|
19
|
+
const result = await deliver(tars, env, {dryRun: flags.dryRun})
|
|
20
|
+
report.set('delivery', result).setStatus('success')
|
|
21
|
+
|
|
22
|
+
log.info(`done: ${result.delivered} delivered, ${result.skipped} skipped`)
|
|
23
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import {$, within, path} from 'zx-extra'
|
|
2
|
+
|
|
3
|
+
import {log} from '../log.js'
|
|
4
|
+
import {traverseQueue} from '../depot/deps.js'
|
|
5
|
+
import {contextify} from '../depot/steps/contextify.js'
|
|
6
|
+
import {analyze} from '../depot/steps/analyze.js'
|
|
7
|
+
import {build} from '../depot/steps/build.js'
|
|
8
|
+
import {pack} from '../depot/steps/pack.js'
|
|
9
|
+
import {publish} from '../depot/steps/publish.js'
|
|
10
|
+
import {clean} from '../depot/steps/clean.js'
|
|
11
|
+
import {test} from '../depot/steps/test.js'
|
|
12
|
+
import {preflight} from '../depot/reconcile.js'
|
|
13
|
+
import {buildDirective, PARCELS_DIR} from '../parcel/index.js'
|
|
14
|
+
|
|
15
|
+
export const runPack = async ({cwd, env, flags}, ctx) => {
|
|
16
|
+
const {report, packages, queue, prev} = ctx
|
|
17
|
+
|
|
18
|
+
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
19
|
+
$.scope = name
|
|
20
|
+
await contextify(packages[name], ctx)
|
|
21
|
+
return cb(packages[name])
|
|
22
|
+
})})
|
|
23
|
+
|
|
24
|
+
report
|
|
25
|
+
.log('queue:', queue)
|
|
26
|
+
.log('graphs', ctx.graphs)
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
await forEachPkg(async (pkg) => {
|
|
30
|
+
report.setStatus('analyzing', pkg.name)
|
|
31
|
+
await analyze(pkg)
|
|
32
|
+
report.set({
|
|
33
|
+
config: pkg.config,
|
|
34
|
+
version: pkg.version,
|
|
35
|
+
prevVersion: pkg.latest.tag?.version || pkg.manifest.version,
|
|
36
|
+
releaseType: pkg.releaseType,
|
|
37
|
+
tag: pkg.tag,
|
|
38
|
+
}, pkg.name)
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
report.setStatus('pending')
|
|
42
|
+
|
|
43
|
+
const packed = []
|
|
44
|
+
await forEachPkg(async (pkg) => {
|
|
45
|
+
if (!pkg.releaseType) { pkg.skipped = true; return report.setStatus('skipped', pkg.name) }
|
|
46
|
+
if (await preflight(pkg, pkg.ctx) === 'skip') { pkg.skipped = true; return report.setStatus('skipped', pkg.name) }
|
|
47
|
+
if (flags.build !== false) { report.setStatus('building', pkg.name); await build(pkg) }
|
|
48
|
+
if (flags.test !== false) { report.setStatus('testing', pkg.name); await test(pkg) }
|
|
49
|
+
if (flags.dryRun || flags.publish === false) return report.setStatus('success', pkg.name)
|
|
50
|
+
|
|
51
|
+
report.setStatus('packing', pkg.name); await pack(pkg)
|
|
52
|
+
if (flags.pack) { packed.push(pkg); return report.setStatus('packed', pkg.name) }
|
|
53
|
+
|
|
54
|
+
report.setStatus('publishing', pkg.name); await publish(pkg)
|
|
55
|
+
report.setStatus('success', pkg.name)
|
|
56
|
+
})
|
|
57
|
+
|
|
58
|
+
if (flags.pack && packed.length) {
|
|
59
|
+
const outputDir = path.resolve(ctx.cwd, typeof flags.pack === 'string' ? flags.pack : PARCELS_DIR)
|
|
60
|
+
await buildDirective(ctx, packed, outputDir)
|
|
61
|
+
}
|
|
62
|
+
} catch (e) {
|
|
63
|
+
report.error(e, e.stack).set('error', e).setStatus('failure')
|
|
64
|
+
throw e
|
|
65
|
+
} finally {
|
|
66
|
+
await clean(ctx)
|
|
67
|
+
}
|
|
68
|
+
report.setStatus('success').log('Great success!')
|
|
69
|
+
}
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import {$, within} from 'zx-extra'
|
|
2
|
+
|
|
3
|
+
import {log} from '../log.js'
|
|
4
|
+
import {traverseQueue} from '../depot/deps.js'
|
|
5
|
+
import {contextify} from '../depot/steps/contextify.js'
|
|
6
|
+
import {analyze} from '../depot/steps/analyze.js'
|
|
7
|
+
import {clean} from '../depot/steps/clean.js'
|
|
8
|
+
import {preflight} from '../depot/reconcile.js'
|
|
9
|
+
import {consumeRebuildSignal} from '../courier/semaphore.js'
|
|
10
|
+
import {getActiveChannels} from '../courier/index.js'
|
|
11
|
+
import {writeContext, buildContext} from '../depot/context.js'
|
|
12
|
+
import {getSha} from '../api/git.js'
|
|
13
|
+
import {setOutput, isRebuildTrigger} from '../api/gh.js'
|
|
14
|
+
|
|
15
|
+
export const runReceive = async ({cwd, env, flags}, ctx) => {
|
|
16
|
+
const {report, packages, queue, prev} = ctx
|
|
17
|
+
|
|
18
|
+
const sha = await getSha(cwd)
|
|
19
|
+
const sha7 = sha.slice(0, 7)
|
|
20
|
+
|
|
21
|
+
if (isRebuildTrigger(env) && !flags.dryRun) {
|
|
22
|
+
const result = await consumeRebuildSignal(cwd, sha)
|
|
23
|
+
if (result?.exitCode !== 0 && result?.stderr?.includes('remote ref does not exist')) {
|
|
24
|
+
log.info(`rebuild signal already consumed by another process`)
|
|
25
|
+
await writeContext(cwd, {status: 'skip', reason: 'rebuild claimed by another process'})
|
|
26
|
+
setOutput('status', 'skip')
|
|
27
|
+
return report.setStatus('success')
|
|
28
|
+
}
|
|
29
|
+
log.info(`consumed rebuild signal for ${sha7}`)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
33
|
+
$.scope = name
|
|
34
|
+
await contextify(packages[name], ctx)
|
|
35
|
+
return cb(packages[name])
|
|
36
|
+
})})
|
|
37
|
+
|
|
38
|
+
try {
|
|
39
|
+
await forEachPkg(async (pkg) => {
|
|
40
|
+
report.setStatus('analyzing', pkg.name)
|
|
41
|
+
await analyze(pkg)
|
|
42
|
+
})
|
|
43
|
+
|
|
44
|
+
await forEachPkg(async (pkg) => {
|
|
45
|
+
if (!pkg.releaseType) { pkg.skipped = true; return }
|
|
46
|
+
if (await preflight(pkg, pkg.ctx) === 'skip') { pkg.skipped = true; return }
|
|
47
|
+
})
|
|
48
|
+
|
|
49
|
+
const snapshot = !!flags.snapshot
|
|
50
|
+
const context = buildContext(packages, queue, sha, {
|
|
51
|
+
getChannels: (pkg) => getActiveChannels(pkg, ctx.channels, snapshot),
|
|
52
|
+
})
|
|
53
|
+
await writeContext(cwd, context)
|
|
54
|
+
|
|
55
|
+
const count = Object.keys(context.packages).length
|
|
56
|
+
if (count === 0) {
|
|
57
|
+
log.info('nothing to release')
|
|
58
|
+
await writeContext(cwd, {status: 'skip', reason: 'nothing to release'})
|
|
59
|
+
setOutput('status', 'skip')
|
|
60
|
+
} else {
|
|
61
|
+
log.info(`${count} package(s) to release`)
|
|
62
|
+
setOutput('status', 'proceed')
|
|
63
|
+
}
|
|
64
|
+
} catch (e) {
|
|
65
|
+
report.error(e, e.stack).setStatus('failure')
|
|
66
|
+
throw e
|
|
67
|
+
} finally {
|
|
68
|
+
await clean(ctx)
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
report.setStatus('success')
|
|
72
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import {glob, path, fs} from 'zx-extra'
|
|
2
|
+
import {log} from '../log.js'
|
|
3
|
+
import {PARCELS_DIR, verifyParcels} from '../parcel/index.js'
|
|
4
|
+
import {CONTEXT_FILE, readContext} from '../depot/context.js'
|
|
5
|
+
|
|
6
|
+
export const runVerify = async ({cwd, flags}) => {
|
|
7
|
+
const inputDir = typeof flags.verify === 'string' ? flags.verify : PARCELS_DIR
|
|
8
|
+
const contextPath = typeof flags.context === 'string' ? flags.context : path.resolve(cwd, CONTEXT_FILE)
|
|
9
|
+
const outputDir = path.resolve(cwd, PARCELS_DIR)
|
|
10
|
+
|
|
11
|
+
log.info(`verifying parcels in ${inputDir} against ${contextPath}`)
|
|
12
|
+
|
|
13
|
+
const context = await readContext(contextPath)
|
|
14
|
+
if (context.status !== 'proceed') {
|
|
15
|
+
log.info(`context status is '${context.status}', nothing to verify`)
|
|
16
|
+
return
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
const tars = await glob(path.join(inputDir, 'parcel.*.tar'))
|
|
20
|
+
if (!tars.length) {
|
|
21
|
+
log.info('no parcels to verify')
|
|
22
|
+
return
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const {verified, errors} = verifyParcels(tars, context)
|
|
26
|
+
|
|
27
|
+
if (errors.length) {
|
|
28
|
+
for (const e of errors) log.error(`verify: ${e}`)
|
|
29
|
+
throw new Error(`parcel verification failed: ${errors.length} error(s)`)
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
if (path.resolve(inputDir) !== outputDir) {
|
|
33
|
+
await fs.ensureDir(outputDir)
|
|
34
|
+
for (const tarPath of verified) {
|
|
35
|
+
await fs.copy(tarPath, path.join(outputDir, path.basename(tarPath)))
|
|
36
|
+
}
|
|
37
|
+
log.info(`${verified.length} parcel(s) verified and copied to ${outputDir}`)
|
|
38
|
+
} else {
|
|
39
|
+
log.info(`${verified.length} parcel(s) verified in place`)
|
|
40
|
+
}
|
|
41
|
+
}
|