bulk-release 3.0.5 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +9 -0
- package/README.md +146 -24
- package/package.json +1 -1
- package/src/main/js/config.js +3 -2
- package/src/main/js/post/api/gh.js +11 -0
- package/src/main/js/post/api/git.js +19 -0
- package/src/main/js/post/courier/channels/changelog.js +8 -1
- package/src/main/js/post/courier/channels/gh-pages.js +8 -1
- package/src/main/js/post/courier/channels/gh-release.js +11 -1
- package/src/main/js/post/courier/channels/git-tag.js +25 -0
- package/src/main/js/post/courier/channels/meta.js +8 -1
- package/src/main/js/post/courier/channels/npm.js +27 -14
- package/src/main/js/post/courier/directive.js +81 -0
- package/src/main/js/post/courier/index.js +130 -7
- package/src/main/js/post/courier/parcel.js +15 -2
- package/src/main/js/post/courier/semaphore.js +31 -0
- package/src/main/js/post/courier/seniority.js +19 -0
- package/src/main/js/post/depot/context.js +45 -0
- package/src/main/js/post/depot/reconcile.js +50 -0
- package/src/main/js/post/depot/steps/contextify.js +2 -1
- package/src/main/js/post/depot/steps/pack.js +3 -1
- package/src/main/js/post/depot/steps/publish.js +1 -11
- package/src/main/js/post/modes/deliver.js +24 -0
- package/src/main/js/post/modes/pack.js +71 -0
- package/src/main/js/post/modes/receive.js +71 -0
- package/src/main/js/post/modes/verify.js +83 -0
- package/src/main/js/post/release.js +12 -86
- package/src/main/js/post/tar.js +1 -1
- package/src/test/js/utils/mock.js +1 -1
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import {$, tempy, within, path, semver, fs} from 'zx-extra'
|
|
2
2
|
import {unpackTar} from '../tar.js'
|
|
3
3
|
import {log} from '../log.js'
|
|
4
|
+
import {scanDirectives, invalidateOrphans} from './directive.js'
|
|
5
|
+
import {tryLock, unlock, signalRebuild} from './semaphore.js'
|
|
6
|
+
import gitTag from './channels/git-tag.js'
|
|
4
7
|
import meta from './channels/meta.js'
|
|
5
8
|
import npm from './channels/npm.js'
|
|
6
9
|
import ghRelease from './channels/gh-release.js'
|
|
@@ -10,8 +13,8 @@ import cmd from './channels/cmd.js'
|
|
|
10
13
|
|
|
11
14
|
export {buildParcels} from './parcel.js'
|
|
12
15
|
|
|
13
|
-
export const channels = {meta, npm, 'gh-release': ghRelease, 'gh-pages': ghPages, changelog, cmd}
|
|
14
|
-
export const defaultOrder = ['meta', 'npm', 'gh-release', 'gh-pages', 'changelog', 'cmd']
|
|
16
|
+
export const channels = {'git-tag': gitTag, meta, npm, 'gh-release': ghRelease, 'gh-pages': ghPages, changelog, cmd}
|
|
17
|
+
export const defaultOrder = ['git-tag', 'meta', 'npm', 'gh-release', 'gh-pages', 'changelog', 'cmd']
|
|
15
18
|
|
|
16
19
|
export const prepare = async (names, pkg) => {
|
|
17
20
|
for (const n of names) await channels[n]?.prepare?.(pkg)
|
|
@@ -38,9 +41,11 @@ export const resolveManifest = (manifest, env = process.env) => {
|
|
|
38
41
|
return resolved
|
|
39
42
|
}
|
|
40
43
|
|
|
44
|
+
const MARKERS = new Set(['released', 'skip', 'conflict', 'orphan'])
|
|
45
|
+
|
|
41
46
|
const openParcel = async (tarPath, env) => {
|
|
42
47
|
const content = await fs.readFile(tarPath, 'utf8').catch(() => null)
|
|
43
|
-
if (content
|
|
48
|
+
if (MARKERS.has(content)) return null
|
|
44
49
|
|
|
45
50
|
const destDir = tempy.temporaryDirectory()
|
|
46
51
|
const {manifest} = await unpackTar(tarPath, destDir)
|
|
@@ -72,8 +77,6 @@ const pool = async (tasks, concurrency, fn) => {
|
|
|
72
77
|
})
|
|
73
78
|
}
|
|
74
79
|
|
|
75
|
-
// Parcels grouped by package, sorted by semver asc (latest last).
|
|
76
|
-
// Groups run in parallel (concurrency-limited), entries within a group — sequential.
|
|
77
80
|
export const inspect = async (tars, env = process.env) => {
|
|
78
81
|
const parcels = []
|
|
79
82
|
const skipped = []
|
|
@@ -93,7 +96,6 @@ export const inspect = async (tars, env = process.env) => {
|
|
|
93
96
|
}
|
|
94
97
|
}
|
|
95
98
|
|
|
96
|
-
// group by package, sort by semver asc within each group
|
|
97
99
|
const groups = new Map()
|
|
98
100
|
for (const p of parcels) {
|
|
99
101
|
const key = p.resolved.name || p.resolved.channel
|
|
@@ -106,7 +108,9 @@ export const inspect = async (tars, env = process.env) => {
|
|
|
106
108
|
return {groups, skipped, total: tars.length, pending: parcels.length}
|
|
107
109
|
}
|
|
108
110
|
|
|
109
|
-
|
|
111
|
+
// --- Legacy deliver (no directive) ---
|
|
112
|
+
|
|
113
|
+
const deliverLegacy = async (tars, env, {concurrency, dryRun}) => {
|
|
110
114
|
const {groups, skipped, total, pending} = await inspect(tars, env)
|
|
111
115
|
|
|
112
116
|
for (const {file, reason, tarPath} of skipped) {
|
|
@@ -129,6 +133,7 @@ export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun
|
|
|
129
133
|
await within(async () => {
|
|
130
134
|
$.scope = resolved.name || resolved.channel
|
|
131
135
|
await ch.run(resolved, destDir)
|
|
136
|
+
log.info(`${resolved.channel} ${resolved.version}`)
|
|
132
137
|
})
|
|
133
138
|
await fs.writeFile(tarPath, 'released')
|
|
134
139
|
entries.push(toEntry({resolved}))
|
|
@@ -137,3 +142,121 @@ export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun
|
|
|
137
142
|
|
|
138
143
|
return {total, pending, delivered: entries.length, skipped: skipped.length, entries}
|
|
139
144
|
}
|
|
145
|
+
|
|
146
|
+
// --- Directive-aware deliver ---
|
|
147
|
+
|
|
148
|
+
const deliverParcel = async (tarPath, channelName, pkgName, version, env, {dryRun}) => {
|
|
149
|
+
const p = await openParcel(tarPath, env)
|
|
150
|
+
if (!p) return 'already'
|
|
151
|
+
if (p.warn) {
|
|
152
|
+
log.warn(`skipping ${p.warn}`)
|
|
153
|
+
return 'skip'
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (dryRun) return 'dryrun'
|
|
157
|
+
|
|
158
|
+
const {ch, resolved, destDir} = p
|
|
159
|
+
const res = await within(async () => {
|
|
160
|
+
$.scope = pkgName
|
|
161
|
+
const r = await ch.run(resolved, destDir)
|
|
162
|
+
log.info(`${channelName} ${version}`)
|
|
163
|
+
return r
|
|
164
|
+
})
|
|
165
|
+
|
|
166
|
+
if (res === 'conflict') return 'conflict'
|
|
167
|
+
|
|
168
|
+
await fs.writeFile(tarPath, 'released')
|
|
169
|
+
return res === 'duplicate' ? 'duplicate' : 'ok'
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
const deliverDirective = async (directive, tarMap, env, {dryRun}) => {
|
|
173
|
+
const entries = []
|
|
174
|
+
const conflicts = []
|
|
175
|
+
const skipped = []
|
|
176
|
+
|
|
177
|
+
for (const pkgName of directive.queue) {
|
|
178
|
+
const pkg = directive.packages[pkgName]
|
|
179
|
+
if (!pkg) continue
|
|
180
|
+
|
|
181
|
+
let pkgConflict = false
|
|
182
|
+
|
|
183
|
+
for (const step of pkg.deliver) {
|
|
184
|
+
if (pkgConflict) break
|
|
185
|
+
|
|
186
|
+
const results = await Promise.all(step.map(async (channelName) => {
|
|
187
|
+
const parcelName = (pkg.parcels || []).find(p => p.includes(`.${channelName}.`))
|
|
188
|
+
const tarPath = parcelName && tarMap.get(parcelName)
|
|
189
|
+
if (!tarPath) return 'missing'
|
|
190
|
+
|
|
191
|
+
return deliverParcel(tarPath, channelName, pkgName, pkg.version, env, {dryRun})
|
|
192
|
+
}))
|
|
193
|
+
|
|
194
|
+
for (let i = 0; i < step.length; i++) {
|
|
195
|
+
const r = results[i]
|
|
196
|
+
if (r === 'skip') skipped.push({channelName: step[i], pkg: pkgName})
|
|
197
|
+
else if (r !== 'missing' && r !== 'already') entries.push({channel: step[i], name: pkgName, version: pkg.version})
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
if (results.includes('conflict')) {
|
|
201
|
+
pkgConflict = true
|
|
202
|
+
conflicts.push(pkgName)
|
|
203
|
+
for (const p of pkg.parcels || []) {
|
|
204
|
+
const tp = tarMap.get(p)
|
|
205
|
+
if (!tp) continue
|
|
206
|
+
const c = await fs.readFile(tp, 'utf8').catch(() => null)
|
|
207
|
+
if (c !== 'released') await fs.writeFile(tp, 'conflict')
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
return {entries, conflicts, skipped}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// --- Main entry point ---
|
|
217
|
+
|
|
218
|
+
export const deliver = async (tars, env = process.env, {concurrency = 4, dryRun = false, cwd} = {}) => {
|
|
219
|
+
const dir = tars.length ? path.dirname(tars[0]) : null
|
|
220
|
+
const directives = dir ? await scanDirectives(dir) : []
|
|
221
|
+
|
|
222
|
+
if (!directives.length) return deliverLegacy(tars, env, {concurrency, dryRun})
|
|
223
|
+
|
|
224
|
+
const tarMap = new Map(tars.map(t => [path.basename(t), t]))
|
|
225
|
+
const allEntries = []
|
|
226
|
+
const allConflicts = []
|
|
227
|
+
const allSkipped = []
|
|
228
|
+
|
|
229
|
+
for (const directive of directives) {
|
|
230
|
+
const gitRoot = cwd || dir
|
|
231
|
+
if (!await tryLock(gitRoot, directive)) {
|
|
232
|
+
log.info(`directive ${directive.sha.slice(0, 7)} locked, skipping`)
|
|
233
|
+
continue
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
try {
|
|
237
|
+
await invalidateOrphans(dir, directive)
|
|
238
|
+
const {entries, conflicts, skipped} = await deliverDirective(directive, tarMap, env, {dryRun})
|
|
239
|
+
allEntries.push(...entries)
|
|
240
|
+
allConflicts.push(...conflicts)
|
|
241
|
+
allSkipped.push(...skipped)
|
|
242
|
+
|
|
243
|
+
if (conflicts.length) {
|
|
244
|
+
await fs.writeFile(directive.tarPath, 'conflict')
|
|
245
|
+
await signalRebuild(gitRoot, directive.sha)
|
|
246
|
+
} else if (!skipped.length) {
|
|
247
|
+
await fs.writeFile(directive.tarPath, 'released')
|
|
248
|
+
}
|
|
249
|
+
} finally {
|
|
250
|
+
await unlock(gitRoot, directive)
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
return {
|
|
255
|
+
total: tars.length,
|
|
256
|
+
pending: allEntries.length,
|
|
257
|
+
delivered: allEntries.length,
|
|
258
|
+
skipped: allSkipped.length,
|
|
259
|
+
entries: allEntries,
|
|
260
|
+
conflicts: allConflicts,
|
|
261
|
+
}
|
|
262
|
+
}
|
|
@@ -9,6 +9,18 @@ const gitFields = (a, pkg) => ({
|
|
|
9
9
|
})
|
|
10
10
|
|
|
11
11
|
const entry = {
|
|
12
|
+
'git-tag': (pkg, ctx) => ({
|
|
13
|
+
channel: 'git-tag',
|
|
14
|
+
manifest: {
|
|
15
|
+
channel: 'git-tag',
|
|
16
|
+
name: pkg.name, version: pkg.version, tag: pkg.tag,
|
|
17
|
+
cwd: ctx.git.root,
|
|
18
|
+
gitCommitterName: '${{GIT_COMMITTER_NAME}}',
|
|
19
|
+
gitCommitterEmail: '${{GIT_COMMITTER_EMAIL}}',
|
|
20
|
+
},
|
|
21
|
+
files: [],
|
|
22
|
+
}),
|
|
23
|
+
|
|
12
24
|
npm: (pkg, ctx, a) => ({
|
|
13
25
|
channel: 'npm',
|
|
14
26
|
manifest: {
|
|
@@ -24,6 +36,7 @@ const entry = {
|
|
|
24
36
|
channel: 'gh-release',
|
|
25
37
|
manifest: {
|
|
26
38
|
channel: 'gh-release',
|
|
39
|
+
name: pkg.name, version: pkg.version,
|
|
27
40
|
tag: pkg.tag, repoHost: a.repoHost, repoName: a.repoName, releaseNotes: a.releaseNotes,
|
|
28
41
|
token: '${{GH_TOKEN}}', apiUrl: pkg.config.ghApiUrl,
|
|
29
42
|
assets: pkg.config.ghAssets ? [...pkg.config.ghAssets] : undefined,
|
|
@@ -35,7 +48,7 @@ const entry = {
|
|
|
35
48
|
const [branch = 'gh-pages', , to = '.', ..._msg] = asTuple(pkg.config.ghPages, ['branch', 'from', 'to', 'msg'])
|
|
36
49
|
return {
|
|
37
50
|
channel: 'gh-pages',
|
|
38
|
-
manifest: {channel: 'gh-pages', branch, to, msg: msgJoin(_msg, pkg, 'docs: update docs ${{name}} ${{version}}'), ...gitFields(a, pkg)},
|
|
51
|
+
manifest: {channel: 'gh-pages', name: pkg.name, version: pkg.version, branch, to, msg: msgJoin(_msg, pkg, 'docs: update docs ${{name}} ${{version}}'), ...gitFields(a, pkg)},
|
|
39
52
|
files: a.docsDir ? [{name: 'docs', source: a.docsDir}] : [],
|
|
40
53
|
}
|
|
41
54
|
},
|
|
@@ -44,7 +57,7 @@ const entry = {
|
|
|
44
57
|
const [branch = 'changelog', file = `${pkg.name.replace(/[^a-z0-9-]/ig, '')}-changelog.md`, ..._msg] = asTuple(pkg.config.changelog, ['branch', 'file', 'msg'])
|
|
45
58
|
return {
|
|
46
59
|
channel: 'changelog',
|
|
47
|
-
manifest: {channel: 'changelog', releaseNotes: a.releaseNotes, branch, file, msg: msgJoin(_msg, pkg, 'chore: update changelog ${{name}}'), ...gitFields(a, pkg)},
|
|
60
|
+
manifest: {channel: 'changelog', name: pkg.name, version: pkg.version, releaseNotes: a.releaseNotes, branch, file, msg: msgJoin(_msg, pkg, 'chore: update changelog ${{name}}'), ...gitFields(a, pkg)},
|
|
48
61
|
files: [],
|
|
49
62
|
}
|
|
50
63
|
},
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import {pushAnnotatedTag, deleteRemoteTag} from '../api/git.js'
|
|
2
|
+
|
|
3
|
+
const tagName = ({sha}) =>
|
|
4
|
+
`zbr-deliver.${sha.slice(0, 7)}`
|
|
5
|
+
|
|
6
|
+
export const tryLock = async (cwd, directive) => {
|
|
7
|
+
const tag = tagName(directive)
|
|
8
|
+
const body = JSON.stringify({
|
|
9
|
+
ts: directive.timestamp,
|
|
10
|
+
sha: directive.sha,
|
|
11
|
+
packages: directive.queue,
|
|
12
|
+
})
|
|
13
|
+
try {
|
|
14
|
+
await pushAnnotatedTag(cwd, tag, body)
|
|
15
|
+
return true
|
|
16
|
+
} catch {
|
|
17
|
+
return false
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export const unlock = async (cwd, directive) => {
|
|
22
|
+
await deleteRemoteTag(cwd, tagName(directive))
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export const signalRebuild = async (cwd, sha) => {
|
|
26
|
+
const tag = `zbr-rebuild.${sha.slice(0, 7)}`
|
|
27
|
+
try { await pushAnnotatedTag(cwd, tag, 'rebuild') } catch { /* already signaled */ }
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export const consumeRebuildSignal = async (cwd, sha) =>
|
|
31
|
+
deleteRemoteTag(cwd, `zbr-rebuild.${sha.slice(0, 7)}`)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import {$, semver} from 'zx-extra'
|
|
2
|
+
import {parseTag} from '../depot/generators/tag.js'
|
|
3
|
+
|
|
4
|
+
export const hasHigherVersion = async (cwd, name, version) => {
|
|
5
|
+
const output = (await $({cwd, nothrow: true})`git ls-remote --tags origin`).toString()
|
|
6
|
+
if (!output) return false
|
|
7
|
+
|
|
8
|
+
for (const line of output.split('\n')) {
|
|
9
|
+
const ref = line.split('\t')[1]?.replace('refs/tags/', '')
|
|
10
|
+
if (!ref) continue
|
|
11
|
+
const parsed = parseTag(ref)
|
|
12
|
+
if (!parsed || parsed.name !== name) continue
|
|
13
|
+
try {
|
|
14
|
+
if (semver.gt(parsed.version, version)) return true
|
|
15
|
+
} catch { /* unparseable version, skip */ }
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
return false
|
|
19
|
+
}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import {fs, path} from 'zx-extra'
|
|
2
|
+
import {channels as channelRegistry} from '../courier/index.js'
|
|
3
|
+
|
|
4
|
+
const CONTEXT_FILE = '.zbr-context.json'
|
|
5
|
+
|
|
6
|
+
export const writeContext = async (cwd, context) => {
|
|
7
|
+
const filePath = path.resolve(cwd, CONTEXT_FILE)
|
|
8
|
+
await fs.writeJson(filePath, context, {spaces: 2})
|
|
9
|
+
return filePath
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export const readContext = async (cwd) => {
|
|
13
|
+
const filePath = path.resolve(cwd, CONTEXT_FILE)
|
|
14
|
+
try {
|
|
15
|
+
return await fs.readJson(filePath)
|
|
16
|
+
} catch {
|
|
17
|
+
return null
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
const getActiveChannels = (pkg, channelNames, snapshot) =>
|
|
22
|
+
channelNames.filter(n => {
|
|
23
|
+
const ch = channelRegistry[n]
|
|
24
|
+
return ch && ch.transport !== false && (!snapshot || ch.snapshot) && ch.when(pkg)
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
export const buildContext = (packages, queue, sha, {channelNames = [], snapshot = false} = {}) => {
|
|
28
|
+
const pkgs = {}
|
|
29
|
+
for (const name of queue) {
|
|
30
|
+
const pkg = packages[name]
|
|
31
|
+
if (!pkg.releaseType || pkg.skipped) continue
|
|
32
|
+
pkgs[name] = {
|
|
33
|
+
version: pkg.version,
|
|
34
|
+
tag: pkg.tag,
|
|
35
|
+
channels: getActiveChannels(pkg, channelNames, snapshot),
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
return {
|
|
40
|
+
status: 'proceed',
|
|
41
|
+
sha,
|
|
42
|
+
sha7: sha.slice(0, 7),
|
|
43
|
+
packages: pkgs,
|
|
44
|
+
}
|
|
45
|
+
}
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import {$} from 'zx-extra'
|
|
2
|
+
import {log} from '../log.js'
|
|
3
|
+
import {getRemoteTagSha, clearTagsCache} from '../api/git.js'
|
|
4
|
+
import {formatTag} from './generators/tag.js'
|
|
5
|
+
import {resolvePkgVersion} from './steps/analyze.js'
|
|
6
|
+
|
|
7
|
+
export const isTagConflict = (e) =>
|
|
8
|
+
/already exists|updates were rejected|failed to push/i.test(e?.message || e?.stderr || '')
|
|
9
|
+
|
|
10
|
+
export const preflight = async (pkg, ctx) => {
|
|
11
|
+
if (!pkg.tag) return 'ok'
|
|
12
|
+
|
|
13
|
+
const cwd = ctx.git.root
|
|
14
|
+
const remoteSha = await getRemoteTagSha(cwd, pkg.tag)
|
|
15
|
+
if (!remoteSha) return 'ok'
|
|
16
|
+
|
|
17
|
+
// tag exists on remote
|
|
18
|
+
if (remoteSha === ctx.git.sha) {
|
|
19
|
+
log.info(`preflight: ${pkg.tag} already exists for our commit, skipping`)
|
|
20
|
+
return 'skip'
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// need history for merge-base
|
|
24
|
+
await $({cwd, nothrow: true})`git fetch --deepen=100`
|
|
25
|
+
|
|
26
|
+
const isOursOlder = await $({cwd, nothrow: true})`git merge-base --is-ancestor ${ctx.git.sha} ${remoteSha}`
|
|
27
|
+
if (isOursOlder.exitCode === 0) {
|
|
28
|
+
log.info(`preflight: ${pkg.tag} — we are older, skipping`)
|
|
29
|
+
return 'skip'
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const isRemoteOlder = await $({cwd, nothrow: true})`git merge-base --is-ancestor ${remoteSha} ${ctx.git.sha}`
|
|
33
|
+
if (isRemoteOlder.exitCode === 0) {
|
|
34
|
+
log.info(`preflight: ${pkg.tag} — we are newer, re-resolving version`)
|
|
35
|
+
await $({cwd})`git fetch origin --tags --force`
|
|
36
|
+
clearTagsCache()
|
|
37
|
+
|
|
38
|
+
const pre = ctx.flags.snapshot ? `-snap.${ctx.git.sha.slice(0, 7)}` : undefined
|
|
39
|
+
// re-resolve: the fetched tags will give us a new latest version
|
|
40
|
+
const latestVersion = pkg.latest.tag?.version || pkg.manifest.version
|
|
41
|
+
pkg.version = resolvePkgVersion(pkg.releaseType, latestVersion, pkg.manifest.version, pre)
|
|
42
|
+
pkg.manifest.version = pkg.version
|
|
43
|
+
pkg.tag = formatTag({name: pkg.name, version: pkg.version, format: pkg.config.tagFormat})
|
|
44
|
+
return 'ok'
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// diverged — anomaly
|
|
48
|
+
log.warn(`preflight: ${pkg.tag} — diverged commits, skipping`)
|
|
49
|
+
return 'skip'
|
|
50
|
+
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import {getPkgConfig} from '../../../config.js'
|
|
2
2
|
import {getLatest} from '../generators/meta.js'
|
|
3
|
-
import {getRoot, getSha} from '../../api/git.js'
|
|
3
|
+
import {getRoot, getSha, getCommitTimestamp} from '../../api/git.js'
|
|
4
4
|
|
|
5
5
|
/**
|
|
6
6
|
* Global release context — one per `run()` invocation.
|
|
@@ -44,6 +44,7 @@ export const contextify = async (pkg, ctx) => {
|
|
|
44
44
|
git: {
|
|
45
45
|
sha: await getSha(pkg.absPath),
|
|
46
46
|
root: await getRoot(pkg.absPath),
|
|
47
|
+
timestamp: await getCommitTimestamp(pkg.absPath),
|
|
47
48
|
},
|
|
48
49
|
}
|
|
49
50
|
}
|
|
@@ -7,6 +7,7 @@ import {formatReleaseNotes} from '../generators/notes.js'
|
|
|
7
7
|
import {ghPrepareAssets} from '../../api/gh.js'
|
|
8
8
|
import {packTar, hashFile} from '../../tar.js'
|
|
9
9
|
|
|
10
|
+
|
|
10
11
|
const filterActive = (names, pkg, {snapshot = false} = {}) =>
|
|
11
12
|
names.filter(n => {
|
|
12
13
|
const ch = channels[n]
|
|
@@ -49,7 +50,8 @@ export const pack = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
|
49
50
|
const tmpPath = path.join(stageDir, `_tmp.${channel}.tar`)
|
|
50
51
|
await packTar(tmpPath, manifest, files)
|
|
51
52
|
const hash = await hashFile(tmpPath)
|
|
52
|
-
const
|
|
53
|
+
const sha7 = ctx.git.sha.slice(0, 7)
|
|
54
|
+
const finalPath = path.join(stageDir, `parcel.${sha7}.${channel}.${pkg.tag}.${hash}.tar`)
|
|
53
55
|
await fs.rename(tmpPath, finalPath)
|
|
54
56
|
tars.push(finalPath)
|
|
55
57
|
}
|
|
@@ -1,28 +1,18 @@
|
|
|
1
1
|
import {memoizeBy} from '../../../util.js'
|
|
2
2
|
import {exec} from '../exec.js'
|
|
3
|
-
import {log} from '../../log.js'
|
|
4
3
|
import {deliver, channels, runChannel} from '../../courier/index.js'
|
|
5
|
-
import {pushTag} from '../../api/git.js'
|
|
6
4
|
|
|
7
5
|
export const publish = memoizeBy(async (pkg, ctx = pkg.ctx) => {
|
|
8
6
|
if (pkg.version !== pkg.manifest.version)
|
|
9
7
|
throw new Error('package.json version not synced')
|
|
10
8
|
|
|
11
9
|
const {run = exec, channels: channelNames = [], flags} = ctx
|
|
12
|
-
const snapshot = !!flags.snapshot
|
|
13
10
|
const {tars = []} = pkg
|
|
14
11
|
|
|
15
|
-
if (!snapshot) {
|
|
16
|
-
const {tag, config: {gitCommitterEmail, gitCommitterName}} = pkg
|
|
17
|
-
ctx.git.tag = tag
|
|
18
|
-
log.info(`push release tag ${tag}`)
|
|
19
|
-
await pushTag({cwd: ctx.git.root, tag, gitCommitterEmail, gitCommitterName})
|
|
20
|
-
}
|
|
21
|
-
|
|
22
12
|
await deliver(tars, ctx.env)
|
|
23
13
|
|
|
24
14
|
const cmd = channels.cmd
|
|
25
|
-
if (channelNames.includes('cmd') && cmd?.when(pkg) && (!snapshot || cmd.snapshot))
|
|
15
|
+
if (channelNames.includes('cmd') && cmd?.when(pkg) && (!flags.snapshot || cmd.snapshot))
|
|
26
16
|
await runChannel('cmd', pkg, run)
|
|
27
17
|
|
|
28
18
|
pkg.published = true
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import {$, glob, path} from 'zx-extra'
|
|
2
|
+
import {createReport, log} from '../log.js'
|
|
3
|
+
import {deliver} from '../courier/index.js'
|
|
4
|
+
|
|
5
|
+
const PARCELS_DIR = 'parcels'
|
|
6
|
+
|
|
7
|
+
export const runDeliver = async ({env, flags}) => {
|
|
8
|
+
const dir = typeof flags.deliver === 'string' ? flags.deliver : PARCELS_DIR
|
|
9
|
+
const report = createReport({flags})
|
|
10
|
+
|
|
11
|
+
$.memo = new Map()
|
|
12
|
+
$.report = report
|
|
13
|
+
|
|
14
|
+
report.setStatus('inspecting')
|
|
15
|
+
|
|
16
|
+
const tars = await glob(path.join(dir, 'parcel.*.tar'))
|
|
17
|
+
if (!tars.length) return report.setStatus('success').log(`no parcels in ${dir}`)
|
|
18
|
+
|
|
19
|
+
report.setStatus('delivering').log(`parcels: ${tars.length}`)
|
|
20
|
+
const result = await deliver(tars, env, {dryRun: flags.dryRun})
|
|
21
|
+
report.set('delivery', result).setStatus('success')
|
|
22
|
+
|
|
23
|
+
log.info(`done: ${result.delivered} delivered, ${result.skipped} skipped`)
|
|
24
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import {$, within, path} from 'zx-extra'
|
|
2
|
+
|
|
3
|
+
import {log} from '../log.js'
|
|
4
|
+
import {traverseQueue} from '../depot/deps.js'
|
|
5
|
+
import {contextify} from '../depot/steps/contextify.js'
|
|
6
|
+
import {analyze} from '../depot/steps/analyze.js'
|
|
7
|
+
import {build} from '../depot/steps/build.js'
|
|
8
|
+
import {pack} from '../depot/steps/pack.js'
|
|
9
|
+
import {publish} from '../depot/steps/publish.js'
|
|
10
|
+
import {clean} from '../depot/steps/clean.js'
|
|
11
|
+
import {test} from '../depot/steps/test.js'
|
|
12
|
+
import {preflight} from '../depot/reconcile.js'
|
|
13
|
+
import {buildDirective} from '../courier/directive.js'
|
|
14
|
+
|
|
15
|
+
const PARCELS_DIR = 'parcels'
|
|
16
|
+
|
|
17
|
+
export const runPack = async ({cwd, env, flags}, ctx) => {
|
|
18
|
+
const {report, packages, queue, prev} = ctx
|
|
19
|
+
|
|
20
|
+
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
21
|
+
$.scope = name
|
|
22
|
+
await contextify(packages[name], ctx)
|
|
23
|
+
return cb(packages[name])
|
|
24
|
+
})})
|
|
25
|
+
|
|
26
|
+
report
|
|
27
|
+
.log('queue:', queue)
|
|
28
|
+
.log('graphs', ctx.graphs)
|
|
29
|
+
|
|
30
|
+
try {
|
|
31
|
+
await forEachPkg(async (pkg) => {
|
|
32
|
+
report.setStatus('analyzing', pkg.name)
|
|
33
|
+
await analyze(pkg)
|
|
34
|
+
report.set({
|
|
35
|
+
config: pkg.config,
|
|
36
|
+
version: pkg.version,
|
|
37
|
+
prevVersion: pkg.latest.tag?.version || pkg.manifest.version,
|
|
38
|
+
releaseType: pkg.releaseType,
|
|
39
|
+
tag: pkg.tag,
|
|
40
|
+
}, pkg.name)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
report.setStatus('pending')
|
|
44
|
+
|
|
45
|
+
const packed = []
|
|
46
|
+
await forEachPkg(async (pkg) => {
|
|
47
|
+
if (!pkg.releaseType) { pkg.skipped = true; return report.setStatus('skipped', pkg.name) }
|
|
48
|
+
if (await preflight(pkg, pkg.ctx) === 'skip') { pkg.skipped = true; return report.setStatus('skipped', pkg.name) }
|
|
49
|
+
if (flags.build !== false) { report.setStatus('building', pkg.name); await build(pkg) }
|
|
50
|
+
if (flags.test !== false) { report.setStatus('testing', pkg.name); await test(pkg) }
|
|
51
|
+
if (flags.dryRun || flags.publish === false) return report.setStatus('success', pkg.name)
|
|
52
|
+
|
|
53
|
+
report.setStatus('packing', pkg.name); await pack(pkg)
|
|
54
|
+
if (flags.pack) { packed.push(pkg); return report.setStatus('packed', pkg.name) }
|
|
55
|
+
|
|
56
|
+
report.setStatus('publishing', pkg.name); await publish(pkg)
|
|
57
|
+
report.setStatus('success', pkg.name)
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
if (flags.pack && packed.length) {
|
|
61
|
+
const outputDir = path.resolve(ctx.cwd, typeof flags.pack === 'string' ? flags.pack : PARCELS_DIR)
|
|
62
|
+
await buildDirective(ctx, packed, outputDir)
|
|
63
|
+
}
|
|
64
|
+
} catch (e) {
|
|
65
|
+
report.error(e, e.stack).set('error', e).setStatus('failure')
|
|
66
|
+
throw e
|
|
67
|
+
} finally {
|
|
68
|
+
await clean(ctx)
|
|
69
|
+
}
|
|
70
|
+
report.setStatus('success').log('Great success!')
|
|
71
|
+
}
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import {$, within} from 'zx-extra'
|
|
2
|
+
|
|
3
|
+
import {log} from '../log.js'
|
|
4
|
+
import {traverseQueue} from '../depot/deps.js'
|
|
5
|
+
import {contextify} from '../depot/steps/contextify.js'
|
|
6
|
+
import {analyze} from '../depot/steps/analyze.js'
|
|
7
|
+
import {clean} from '../depot/steps/clean.js'
|
|
8
|
+
import {preflight} from '../depot/reconcile.js'
|
|
9
|
+
import {consumeRebuildSignal} from '../courier/semaphore.js'
|
|
10
|
+
import {writeContext, buildContext} from '../depot/context.js'
|
|
11
|
+
import {getSha} from '../api/git.js'
|
|
12
|
+
import {setOutput, isRebuildTrigger} from '../api/gh.js'
|
|
13
|
+
|
|
14
|
+
export const runReceive = async ({cwd, env, flags}, ctx) => {
|
|
15
|
+
const {report, packages, queue, prev} = ctx
|
|
16
|
+
|
|
17
|
+
const sha = await getSha(cwd)
|
|
18
|
+
const sha7 = sha.slice(0, 7)
|
|
19
|
+
|
|
20
|
+
if (isRebuildTrigger(env)) {
|
|
21
|
+
const result = await consumeRebuildSignal(cwd, sha)
|
|
22
|
+
if (result?.exitCode !== 0 && result?.stderr?.includes('remote ref does not exist')) {
|
|
23
|
+
log.info(`rebuild signal already consumed by another process`)
|
|
24
|
+
await writeContext(cwd, {status: 'skip', reason: 'rebuild claimed by another process'})
|
|
25
|
+
setOutput('status', 'skip')
|
|
26
|
+
return report.setStatus('success')
|
|
27
|
+
}
|
|
28
|
+
log.info(`consumed rebuild signal for ${sha7}`)
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const forEachPkg = (cb) => traverseQueue({queue, prev, cb: (name) => within(async () => {
|
|
32
|
+
$.scope = name
|
|
33
|
+
await contextify(packages[name], ctx)
|
|
34
|
+
return cb(packages[name])
|
|
35
|
+
})})
|
|
36
|
+
|
|
37
|
+
try {
|
|
38
|
+
await forEachPkg(async (pkg) => {
|
|
39
|
+
report.setStatus('analyzing', pkg.name)
|
|
40
|
+
await analyze(pkg)
|
|
41
|
+
})
|
|
42
|
+
|
|
43
|
+
await forEachPkg(async (pkg) => {
|
|
44
|
+
if (!pkg.releaseType) { pkg.skipped = true; return }
|
|
45
|
+
if (await preflight(pkg, pkg.ctx) === 'skip') { pkg.skipped = true; return }
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
const context = buildContext(packages, queue, sha, {
|
|
49
|
+
channelNames: ctx.channels,
|
|
50
|
+
snapshot: !!flags.snapshot,
|
|
51
|
+
})
|
|
52
|
+
await writeContext(cwd, context)
|
|
53
|
+
|
|
54
|
+
const count = Object.keys(context.packages).length
|
|
55
|
+
if (count === 0) {
|
|
56
|
+
log.info('nothing to release')
|
|
57
|
+
await writeContext(cwd, {status: 'skip', reason: 'nothing to release'})
|
|
58
|
+
setOutput('status', 'skip')
|
|
59
|
+
} else {
|
|
60
|
+
log.info(`${count} package(s) to release`)
|
|
61
|
+
setOutput('status', 'proceed')
|
|
62
|
+
}
|
|
63
|
+
} catch (e) {
|
|
64
|
+
report.error(e, e.stack).setStatus('failure')
|
|
65
|
+
throw e
|
|
66
|
+
} finally {
|
|
67
|
+
await clean(ctx)
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
report.setStatus('success')
|
|
71
|
+
}
|