@take-out/scripts 0.0.93 → 0.0.95
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +6 -4
- package/src/build-initial.ts +76 -81
- package/src/clean.ts +21 -21
- package/src/cmd.ts +85 -0
- package/src/dev-tunnel.ts +141 -159
- package/src/ensure-port.ts +62 -70
- package/src/ensure-tunnel.ts +13 -9
- package/src/env-pull.ts +49 -47
- package/src/env-update.ts +140 -175
- package/src/exec-with-env.ts +14 -11
- package/src/helpers/args.ts +4 -4
- package/src/helpers/get-test-env.ts +5 -3
- package/src/node-version-check.ts +9 -5
- package/src/release.ts +429 -404
- package/src/sst-get-environment.ts +5 -1
- package/src/typecheck.ts +15 -16
- package/src/up.ts +361 -374
- package/src/update-changelog.ts +39 -43
- package/src/update-local-env.ts +139 -158
- package/src/wait-for-dev.ts +21 -20
package/src/release.ts
CHANGED
|
@@ -1,483 +1,508 @@
|
|
|
1
|
-
|
|
2
|
-
import path, { join } from 'node:path'
|
|
1
|
+
#!/usr/bin/env bun
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
// you can delete this from your own app
|
|
6
|
-
|
|
7
|
-
import { run } from '@take-out/scripts/helpers/run'
|
|
8
|
-
import { $ } from 'bun'
|
|
9
|
-
import fs, { writeJSON } from 'fs-extra'
|
|
10
|
-
import pMap from 'p-map'
|
|
3
|
+
import { cmd } from './cmd'
|
|
11
4
|
|
|
12
5
|
// avoid emitter error
|
|
13
6
|
process.setMaxListeners(50)
|
|
14
7
|
process.stderr.setMaxListeners(50)
|
|
15
8
|
process.stdout.setMaxListeners(50)
|
|
16
9
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
const
|
|
26
|
-
|
|
27
|
-
const
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
const
|
|
31
|
-
const
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
const
|
|
41
|
-
const
|
|
42
|
-
const
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
10
|
+
await cmd`publish takeout packages to npm`
|
|
11
|
+
.args(
|
|
12
|
+
`--patch boolean --minor boolean --major boolean --canary boolean
|
|
13
|
+
--rerun boolean --republish boolean --finish boolean --skip-finish boolean
|
|
14
|
+
--dry-run boolean --skip-test boolean --skip-build boolean --skip-version boolean
|
|
15
|
+
--dirty boolean --tamagui-git-user boolean --sync-on-zero boolean --skip-on-zero-sync boolean`
|
|
16
|
+
)
|
|
17
|
+
.run(async ({ args, $, run, path, os }) => {
|
|
18
|
+
const fs = (await import('fs-extra')).default
|
|
19
|
+
const { writeJSON } = await import('fs-extra')
|
|
20
|
+
const pMap = (await import('p-map')).default
|
|
21
|
+
|
|
22
|
+
// on-zero sync paths
|
|
23
|
+
const onZeroGithub = path.join(os.homedir(), 'github', 'on-zero')
|
|
24
|
+
const onZeroTakeout = path.join(process.cwd(), 'packages', 'on-zero')
|
|
25
|
+
|
|
26
|
+
// for failed publishes that need to re-run
|
|
27
|
+
const reRun = args.rerun
|
|
28
|
+
const rePublish = reRun || args.republish
|
|
29
|
+
const finish = args.finish
|
|
30
|
+
const skipFinish = args.skipFinish
|
|
31
|
+
|
|
32
|
+
const canary = args.canary
|
|
33
|
+
const skipVersion = finish || rePublish || args.skipVersion
|
|
34
|
+
const shouldMajor = args.major
|
|
35
|
+
const shouldMinor = args.minor
|
|
36
|
+
const shouldPatch = args.patch
|
|
37
|
+
const dirty = finish || args.dirty
|
|
38
|
+
const skipTest =
|
|
39
|
+
finish || rePublish || args.skipTest || process.argv.includes('--skip-tests')
|
|
40
|
+
const skipBuild = finish || rePublish || args.skipBuild
|
|
41
|
+
const dryRun = args.dryRun
|
|
42
|
+
const tamaguiGitUser = args.tamaguiGitUser
|
|
43
|
+
const syncOnZeroOnly = args.syncOnZero
|
|
44
|
+
const skipOnZeroSync = args.skipOnZeroSync
|
|
45
|
+
|
|
46
|
+
async function syncOnZeroIn() {
|
|
47
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
48
|
+
|
|
49
|
+
// check if there are commits after the last sync commit
|
|
50
|
+
const log = (
|
|
51
|
+
await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()
|
|
52
|
+
).trim()
|
|
53
|
+
const commits = log.split('\n')
|
|
54
|
+
const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
|
|
55
|
+
|
|
56
|
+
// no commits before sync, or first commit is a sync = nothing to pull in
|
|
57
|
+
if (lastSyncIdx <= 0) {
|
|
58
|
+
console.info(' ← on-zero: no new github commits to sync in')
|
|
59
|
+
return
|
|
60
|
+
}
|
|
67
61
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
62
|
+
const newCommits = commits
|
|
63
|
+
.slice(0, lastSyncIdx)
|
|
64
|
+
.filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
|
|
65
|
+
if (!newCommits.length) {
|
|
66
|
+
console.info(' ← on-zero: no new github commits to sync in')
|
|
67
|
+
return
|
|
68
|
+
}
|
|
71
69
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
const patchAndCanary = curVersion.split('.')[2]
|
|
75
|
-
const [curPatch] = patchAndCanary.split('-')
|
|
76
|
-
const patchVersion = shouldPatch ? +curPatch + 1 : 0
|
|
77
|
-
const minorVersion = curMinor + (shouldMinor ? 1 : 0)
|
|
78
|
-
const majorVersion = curMajor + (shouldMajor ? 1 : 0)
|
|
79
|
-
const next = `${majorVersion}.${minorVersion}.${patchVersion}`
|
|
80
|
-
|
|
81
|
-
return next
|
|
82
|
-
})()
|
|
83
|
-
|
|
84
|
-
if (!skipVersion) {
|
|
85
|
-
console.info(` 🚀 Releasing:`)
|
|
86
|
-
console.info(' Current:', curVersion)
|
|
87
|
-
console.info(` Next: ${nextVersion}`)
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
try {
|
|
91
|
-
// sync on-zero IN (before release)
|
|
92
|
-
if (!skipOnZeroSync && !finish && !rePublish) {
|
|
93
|
-
await syncOnZeroIn()
|
|
94
|
-
}
|
|
70
|
+
console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
|
|
71
|
+
for (const c of newCommits) console.info(` ${c}`)
|
|
95
72
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
|
|
100
|
-
throw new Error(`Not on main`)
|
|
73
|
+
if (dryRun) {
|
|
74
|
+
console.info(' [dry-run] would copy src from github')
|
|
75
|
+
return
|
|
101
76
|
}
|
|
102
|
-
|
|
103
|
-
|
|
77
|
+
|
|
78
|
+
await fs.copy(path.join(onZeroGithub, 'src'), path.join(onZeroTakeout, 'src'), {
|
|
79
|
+
overwrite: true,
|
|
80
|
+
})
|
|
81
|
+
|
|
82
|
+
const status = (await $`git status --porcelain`.text()).trim()
|
|
83
|
+
if (status) {
|
|
84
|
+
await $`git add packages/on-zero`
|
|
85
|
+
await $`git commit -m "on-zero: sync from github"`
|
|
104
86
|
}
|
|
105
87
|
}
|
|
106
88
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
await loadPackageJsons(packagePaths)
|
|
89
|
+
async function syncOnZeroOut(version: string) {
|
|
90
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
110
91
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
92
|
+
// copy src files from takeout to github
|
|
93
|
+
await fs.copy(path.join(onZeroTakeout, 'src'), path.join(onZeroGithub, 'src'), {
|
|
94
|
+
overwrite: true,
|
|
95
|
+
})
|
|
96
|
+
await fs.copy(
|
|
97
|
+
path.join(onZeroTakeout, 'cli.cjs'),
|
|
98
|
+
path.join(onZeroGithub, 'cli.cjs')
|
|
99
|
+
)
|
|
100
|
+
await fs.copy(
|
|
101
|
+
path.join(onZeroTakeout, 'tsconfig.json'),
|
|
102
|
+
path.join(onZeroGithub, 'tsconfig.json')
|
|
103
|
+
)
|
|
104
|
+
|
|
105
|
+
// update package.json preserving github-specific fields
|
|
106
|
+
const takeoutPkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
|
|
107
|
+
const githubPkg = await fs.readJSON(path.join(onZeroGithub, 'package.json'))
|
|
108
|
+
const convertDeps = (deps: Record<string, string>) =>
|
|
109
|
+
Object.fromEntries(
|
|
110
|
+
Object.entries(deps || {}).map(([k, v]) => [
|
|
111
|
+
k,
|
|
112
|
+
v.startsWith('workspace:') ? `^${version}` : v,
|
|
113
|
+
])
|
|
114
|
+
)
|
|
115
|
+
await fs.writeJSON(
|
|
116
|
+
path.join(onZeroGithub, 'package.json'),
|
|
117
|
+
{
|
|
118
|
+
...takeoutPkg,
|
|
119
|
+
files: githubPkg.files,
|
|
120
|
+
repository: githubPkg.repository,
|
|
121
|
+
homepage: githubPkg.homepage,
|
|
122
|
+
bugs: githubPkg.bugs,
|
|
123
|
+
dependencies: convertDeps(takeoutPkg.dependencies),
|
|
124
|
+
devDependencies: convertDeps(takeoutPkg.devDependencies),
|
|
125
|
+
},
|
|
126
|
+
{ spaces: 2 }
|
|
114
127
|
)
|
|
128
|
+
|
|
129
|
+
// only commit if there are actual changes
|
|
130
|
+
const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
|
|
131
|
+
if (!status) return
|
|
132
|
+
|
|
133
|
+
console.info(' → on-zero: syncing out to github')
|
|
134
|
+
|
|
135
|
+
if (dryRun) {
|
|
136
|
+
console.info(` [dry-run] would push: sync: from takeout v${version}`)
|
|
137
|
+
await $`git -C ${onZeroGithub} checkout -- .`
|
|
138
|
+
return
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
await $`git -C ${onZeroGithub} add -A`
|
|
142
|
+
await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
|
|
143
|
+
await $`git -C ${onZeroGithub} push origin main`
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// sync on-zero: copy src from github to takeout, then takeout to github after release
|
|
147
|
+
async function syncOnZero() {
|
|
148
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
149
|
+
const pkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
|
|
150
|
+
await syncOnZeroIn()
|
|
151
|
+
await syncOnZeroOut(pkg.version)
|
|
115
152
|
}
|
|
116
153
|
|
|
117
|
-
async function
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
154
|
+
async function getWorkspacePackages() {
|
|
155
|
+
// read workspaces from root package.json
|
|
156
|
+
const rootPackageJson = await fs.readJSON(path.join(process.cwd(), 'package.json'))
|
|
157
|
+
const workspaceGlobs = rootPackageJson.workspaces || []
|
|
158
|
+
|
|
159
|
+
// resolve workspace paths
|
|
160
|
+
const packagePaths: { name: string; location: string }[] = []
|
|
161
|
+
for (const glob of workspaceGlobs) {
|
|
162
|
+
if (glob.includes('*')) {
|
|
163
|
+
// handle glob patterns like "./packages/*"
|
|
164
|
+
const baseDir = glob.replace('/*', '')
|
|
165
|
+
const fullPath = path.join(process.cwd(), baseDir)
|
|
166
|
+
if (await fs.pathExists(fullPath)) {
|
|
167
|
+
const dirs = await fs.readdir(fullPath)
|
|
168
|
+
for (const dir of dirs) {
|
|
169
|
+
const pkgPath = path.join(fullPath, dir, 'package.json')
|
|
170
|
+
if (await fs.pathExists(pkgPath)) {
|
|
171
|
+
const pkg = await fs.readJSON(pkgPath)
|
|
172
|
+
packagePaths.push({
|
|
173
|
+
name: pkg.name,
|
|
174
|
+
location: path.join(baseDir, dir),
|
|
175
|
+
})
|
|
176
|
+
}
|
|
125
177
|
}
|
|
126
178
|
}
|
|
127
|
-
}
|
|
179
|
+
} else {
|
|
180
|
+
// handle direct paths like "./src/start"
|
|
181
|
+
const pkgPath = path.join(process.cwd(), glob, 'package.json')
|
|
182
|
+
if (await fs.pathExists(pkgPath)) {
|
|
183
|
+
const pkg = await fs.readJSON(pkgPath)
|
|
184
|
+
packagePaths.push({
|
|
185
|
+
name: pkg.name,
|
|
186
|
+
location: glob,
|
|
187
|
+
})
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
return packagePaths
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
|
|
196
|
+
const allPackageJsons = await Promise.all(
|
|
197
|
+
packagePaths
|
|
198
|
+
.filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
|
|
199
|
+
.map(async ({ name, location }) => {
|
|
200
|
+
const cwd = path.join(process.cwd(), location)
|
|
201
|
+
const json = await fs.readJSON(path.join(cwd, 'package.json'))
|
|
202
|
+
return {
|
|
203
|
+
name,
|
|
204
|
+
cwd,
|
|
205
|
+
json,
|
|
206
|
+
path: path.join(cwd, 'package.json'),
|
|
207
|
+
directory: location,
|
|
208
|
+
}
|
|
209
|
+
})
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
const publishablePackages = allPackageJsons.filter(
|
|
213
|
+
(x) => !x.json.skipPublish && !x.json.private
|
|
128
214
|
)
|
|
215
|
+
|
|
216
|
+
return { allPackageJsons, publishablePackages }
|
|
129
217
|
}
|
|
130
218
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
219
|
+
// handle --sync-on-zero standalone mode
|
|
220
|
+
if (syncOnZeroOnly) {
|
|
221
|
+
try {
|
|
222
|
+
await syncOnZero()
|
|
223
|
+
} catch (err) {
|
|
224
|
+
console.error('sync failed:', err)
|
|
225
|
+
process.exit(1)
|
|
226
|
+
}
|
|
227
|
+
return
|
|
134
228
|
}
|
|
135
229
|
|
|
136
|
-
|
|
230
|
+
// main release flow
|
|
231
|
+
const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
|
|
137
232
|
|
|
138
|
-
|
|
139
|
-
|
|
233
|
+
// must specify version (unless republishing):
|
|
234
|
+
if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
|
|
235
|
+
console.error(`Must specify one of --patch, --minor, or --major`)
|
|
236
|
+
process.exit(1)
|
|
140
237
|
}
|
|
141
238
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
239
|
+
const nextVersion = (() => {
|
|
240
|
+
if (rePublish || skipVersion) {
|
|
241
|
+
return curVersion
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
if (canary) {
|
|
245
|
+
return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
const curMajor = +curVersion.split('.')[0] || 0
|
|
249
|
+
const curMinor = +curVersion.split('.')[1] || 0
|
|
250
|
+
const patchAndCanary = curVersion.split('.')[2]
|
|
251
|
+
const [curPatch] = patchAndCanary.split('-')
|
|
252
|
+
const patchVersion = shouldPatch ? +curPatch + 1 : 0
|
|
253
|
+
const minorVersion = curMinor + (shouldMinor ? 1 : 0)
|
|
254
|
+
const majorVersion = curMajor + (shouldMajor ? 1 : 0)
|
|
255
|
+
const next = `${majorVersion}.${minorVersion}.${patchVersion}`
|
|
256
|
+
|
|
257
|
+
return next
|
|
258
|
+
})()
|
|
259
|
+
|
|
260
|
+
if (!skipVersion) {
|
|
261
|
+
console.info(` 🚀 Releasing:`)
|
|
262
|
+
console.info(' Current:', curVersion)
|
|
263
|
+
console.info(` Next: ${nextVersion}`)
|
|
146
264
|
}
|
|
147
265
|
|
|
148
|
-
|
|
149
|
-
|
|
266
|
+
try {
|
|
267
|
+
// sync on-zero IN (before release)
|
|
268
|
+
if (!skipOnZeroSync && !finish && !rePublish) {
|
|
269
|
+
await syncOnZeroIn()
|
|
270
|
+
}
|
|
150
271
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
272
|
+
// ensure we are up to date
|
|
273
|
+
// ensure we are on main
|
|
274
|
+
if (!canary) {
|
|
275
|
+
if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
|
|
276
|
+
throw new Error(`Not on main`)
|
|
277
|
+
}
|
|
278
|
+
if (!dirty && !rePublish && !finish) {
|
|
279
|
+
await run(`git pull --rebase origin main`)
|
|
280
|
+
}
|
|
156
281
|
}
|
|
157
|
-
}
|
|
158
282
|
|
|
159
|
-
|
|
160
|
-
const
|
|
161
|
-
|
|
162
|
-
|
|
283
|
+
const packagePaths = await getWorkspacePackages()
|
|
284
|
+
const { allPackageJsons, publishablePackages: packageJsons } =
|
|
285
|
+
await loadPackageJsons(packagePaths)
|
|
286
|
+
|
|
287
|
+
if (!finish) {
|
|
288
|
+
console.info(
|
|
289
|
+
`Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
|
|
290
|
+
)
|
|
163
291
|
}
|
|
164
|
-
}
|
|
165
292
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
'dependencies',
|
|
175
|
-
'devDependencies',
|
|
176
|
-
'optionalDependencies',
|
|
177
|
-
'peerDependencies',
|
|
178
|
-
]) {
|
|
179
|
-
const nextDeps = next[field]
|
|
180
|
-
if (!nextDeps) continue
|
|
181
|
-
for (const depName in nextDeps) {
|
|
182
|
-
if (allPackageJsons.some((p) => p.name === depName)) {
|
|
183
|
-
nextDeps[depName] = nextVersion
|
|
293
|
+
async function checkDistDirs() {
|
|
294
|
+
await Promise.all(
|
|
295
|
+
packageJsons.map(async ({ cwd, json }) => {
|
|
296
|
+
const distDir = path.join(cwd, 'dist')
|
|
297
|
+
if (json.scripts?.build) {
|
|
298
|
+
if (!(await fs.pathExists(distDir))) {
|
|
299
|
+
console.warn('no dist dir!', distDir)
|
|
300
|
+
process.exit(1)
|
|
184
301
|
}
|
|
185
302
|
}
|
|
186
|
-
}
|
|
303
|
+
})
|
|
304
|
+
)
|
|
305
|
+
}
|
|
187
306
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
307
|
+
if (tamaguiGitUser) {
|
|
308
|
+
await run(`git config --global user.name 'Tamagui'`)
|
|
309
|
+
await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
|
|
310
|
+
}
|
|
192
311
|
|
|
193
|
-
|
|
194
|
-
await run(`git diff`)
|
|
195
|
-
}
|
|
312
|
+
console.info('install and build')
|
|
196
313
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
await pMap(
|
|
202
|
-
packageJsons,
|
|
203
|
-
async ({ name, cwd, json }) => {
|
|
204
|
-
const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
|
|
205
|
-
.filter(Boolean)
|
|
206
|
-
.join(' ')
|
|
207
|
-
const tgzPath = join(packDir, `${name.replace('/', '-')}.tgz`)
|
|
208
|
-
|
|
209
|
-
// pack with bun (properly converts workspace:* to versions)
|
|
210
|
-
// use swap-exports for packages with build scripts, otherwise just pack
|
|
211
|
-
if (json.scripts?.build) {
|
|
212
|
-
await run(
|
|
213
|
-
`bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
|
|
214
|
-
{
|
|
215
|
-
cwd,
|
|
216
|
-
silent: true,
|
|
217
|
-
}
|
|
218
|
-
)
|
|
219
|
-
} else {
|
|
220
|
-
await run(`bun pm pack --filename ${tgzPath}`, {
|
|
221
|
-
cwd,
|
|
222
|
-
silent: true,
|
|
223
|
-
})
|
|
224
|
-
}
|
|
314
|
+
if (!rePublish && !finish) {
|
|
315
|
+
await run(`bun install`)
|
|
316
|
+
}
|
|
225
317
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
318
|
+
if (!skipBuild && !finish) {
|
|
319
|
+
await run(`bun clean`)
|
|
320
|
+
await run(`bun run build`)
|
|
321
|
+
await checkDistDirs()
|
|
322
|
+
}
|
|
231
323
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
324
|
+
if (!finish) {
|
|
325
|
+
console.info('run checks')
|
|
326
|
+
|
|
327
|
+
if (!skipTest) {
|
|
328
|
+
await run(`bun lint`)
|
|
329
|
+
await run(`bun check:all`)
|
|
330
|
+
// only in packages
|
|
331
|
+
// await run(`bun test`)
|
|
236
332
|
}
|
|
237
|
-
|
|
333
|
+
}
|
|
238
334
|
|
|
239
|
-
|
|
335
|
+
if (!dirty && !dryRun && !rePublish) {
|
|
336
|
+
const out = await run(`git status --porcelain`)
|
|
337
|
+
if (out.stdout) {
|
|
338
|
+
throw new Error(`Has unsaved git changes: ${out.stdout}`)
|
|
339
|
+
}
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
// snapshot workspace:* deps before mutation (shallow copy mutates originals)
|
|
343
|
+
const workspaceDeps = new Map<string, Record<string, Record<string, string>>>()
|
|
344
|
+
for (const { json, path: pkgPath } of allPackageJsons) {
|
|
345
|
+
const deps: Record<string, Record<string, string>> = {}
|
|
346
|
+
for (const field of [
|
|
347
|
+
'dependencies',
|
|
348
|
+
'devDependencies',
|
|
349
|
+
'optionalDependencies',
|
|
350
|
+
'peerDependencies',
|
|
351
|
+
]) {
|
|
352
|
+
if (!json[field]) continue
|
|
353
|
+
for (const depName in json[field]) {
|
|
354
|
+
if (json[field][depName].startsWith('workspace:')) {
|
|
355
|
+
deps[field] ??= {}
|
|
356
|
+
deps[field][depName] = json[field][depName]
|
|
357
|
+
}
|
|
358
|
+
}
|
|
359
|
+
}
|
|
360
|
+
if (Object.keys(deps).length) workspaceDeps.set(pkgPath, deps)
|
|
361
|
+
}
|
|
240
362
|
|
|
241
|
-
|
|
242
|
-
if (!dryRun) {
|
|
363
|
+
if (!skipVersion && !finish) {
|
|
243
364
|
await Promise.all(
|
|
244
|
-
allPackageJsons.map(async ({ json, path }) => {
|
|
245
|
-
const
|
|
365
|
+
allPackageJsons.map(async ({ json, path: pkgPath }) => {
|
|
366
|
+
const next = { ...json }
|
|
367
|
+
|
|
368
|
+
next.version = nextVersion
|
|
369
|
+
|
|
246
370
|
for (const field of [
|
|
247
371
|
'dependencies',
|
|
248
372
|
'devDependencies',
|
|
249
373
|
'optionalDependencies',
|
|
250
374
|
'peerDependencies',
|
|
251
375
|
]) {
|
|
252
|
-
const
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
currentDeps[depName] = origDeps[depName]
|
|
376
|
+
const nextDeps = next[field]
|
|
377
|
+
if (!nextDeps) continue
|
|
378
|
+
for (const depName in nextDeps) {
|
|
379
|
+
if (allPackageJsons.some((p) => p.name === depName)) {
|
|
380
|
+
nextDeps[depName] = nextVersion
|
|
258
381
|
}
|
|
259
382
|
}
|
|
260
383
|
}
|
|
261
|
-
|
|
384
|
+
|
|
385
|
+
await writeJSON(pkgPath, next, { spaces: 2 })
|
|
262
386
|
})
|
|
263
387
|
)
|
|
264
388
|
}
|
|
265
389
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
await run(`git checkout -- packages/*/package.json`, { silent: true })
|
|
269
|
-
console.info('Reverted version changes\n')
|
|
390
|
+
if (!finish && !rePublish) {
|
|
391
|
+
await run(`git diff`)
|
|
270
392
|
}
|
|
271
|
-
}
|
|
272
393
|
|
|
273
|
-
if (!skipFinish && !dryRun) {
|
|
274
|
-
// then git tag, commit, push
|
|
275
394
|
if (!finish) {
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
395
|
+
const packDir = path.join(os.tmpdir(), `takeout-release-${nextVersion}`)
|
|
396
|
+
await fs.ensureDir(packDir)
|
|
397
|
+
|
|
398
|
+
await pMap(
|
|
399
|
+
packageJsons,
|
|
400
|
+
async ({ name, cwd, json }) => {
|
|
401
|
+
const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
|
|
402
|
+
.filter(Boolean)
|
|
403
|
+
.join(' ')
|
|
404
|
+
const tgzPath = path.join(packDir, `${name.replace('/', '-')}.tgz`)
|
|
405
|
+
|
|
406
|
+
// pack with bun (properly converts workspace:* to versions)
|
|
407
|
+
// use swap-exports for packages with build scripts, otherwise just pack
|
|
408
|
+
if (json.scripts?.build) {
|
|
409
|
+
await run(
|
|
410
|
+
`bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
|
|
411
|
+
{
|
|
412
|
+
cwd,
|
|
413
|
+
silent: true,
|
|
414
|
+
}
|
|
415
|
+
)
|
|
416
|
+
} else {
|
|
417
|
+
await run(`bun pm pack --filename ${tgzPath}`, {
|
|
418
|
+
cwd,
|
|
419
|
+
silent: true,
|
|
420
|
+
})
|
|
421
|
+
}
|
|
288
422
|
|
|
289
|
-
|
|
290
|
-
await run(`
|
|
291
|
-
|
|
423
|
+
// publish the tgz directly
|
|
424
|
+
await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
|
|
425
|
+
cwd: packDir,
|
|
426
|
+
silent: true,
|
|
427
|
+
})
|
|
292
428
|
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
429
|
+
console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
|
|
430
|
+
},
|
|
431
|
+
{
|
|
432
|
+
concurrency: 15,
|
|
296
433
|
}
|
|
434
|
+
)
|
|
297
435
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
436
|
+
console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
|
|
437
|
+
|
|
438
|
+
// restore workspace:* protocols after publishing
|
|
439
|
+
if (!dryRun) {
|
|
440
|
+
await Promise.all(
|
|
441
|
+
allPackageJsons.map(async ({ path: pkgPath }) => {
|
|
442
|
+
const saved = workspaceDeps.get(pkgPath)
|
|
443
|
+
if (!saved) return
|
|
444
|
+
const current = await fs.readJSON(pkgPath)
|
|
445
|
+
for (const field in saved) {
|
|
446
|
+
if (!current[field]) continue
|
|
447
|
+
for (const depName in saved[field]) {
|
|
448
|
+
current[field][depName] = saved[field][depName]
|
|
449
|
+
}
|
|
450
|
+
}
|
|
451
|
+
await writeJSON(pkgPath, current, { spaces: 2 })
|
|
452
|
+
})
|
|
453
|
+
)
|
|
454
|
+
}
|
|
302
455
|
|
|
303
|
-
|
|
456
|
+
// revert version changes after dry-run
|
|
457
|
+
if (dryRun) {
|
|
458
|
+
await run(`git checkout -- packages/*/package.json`, { silent: true })
|
|
459
|
+
console.info('Reverted version changes\n')
|
|
304
460
|
}
|
|
305
461
|
}
|
|
306
462
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
463
|
+
if (!skipFinish && !dryRun) {
|
|
464
|
+
// then git tag, commit, push
|
|
465
|
+
if (!finish) {
|
|
466
|
+
await run(`bun install`)
|
|
467
|
+
}
|
|
468
|
+
const tagPrefix = canary ? 'canary' : 'v'
|
|
469
|
+
const gitTag = `${tagPrefix}${nextVersion}`
|
|
312
470
|
|
|
313
|
-
|
|
314
|
-
} catch (err) {
|
|
315
|
-
console.info('\nError:\n', err)
|
|
316
|
-
process.exit(1)
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
// sync on-zero: copy src from github to takeout, then takeout to github after release
|
|
321
|
-
async function syncOnZero() {
|
|
322
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
323
|
-
const pkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
|
|
324
|
-
await syncOnZeroIn()
|
|
325
|
-
await syncOnZeroOut(pkg.version)
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
async function syncOnZeroIn() {
|
|
329
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
330
|
-
|
|
331
|
-
// check if there are commits after the last sync commit
|
|
332
|
-
const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
|
|
333
|
-
const commits = log.split('\n')
|
|
334
|
-
const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
|
|
335
|
-
|
|
336
|
-
// no commits before sync, or first commit is a sync = nothing to pull in
|
|
337
|
-
if (lastSyncIdx <= 0) {
|
|
338
|
-
console.info(' ← on-zero: no new github commits to sync in')
|
|
339
|
-
return
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
const newCommits = commits
|
|
343
|
-
.slice(0, lastSyncIdx)
|
|
344
|
-
.filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
|
|
345
|
-
if (!newCommits.length) {
|
|
346
|
-
console.info(' ← on-zero: no new github commits to sync in')
|
|
347
|
-
return
|
|
348
|
-
}
|
|
349
|
-
|
|
350
|
-
console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
|
|
351
|
-
for (const c of newCommits) console.info(` ${c}`)
|
|
352
|
-
|
|
353
|
-
if (dryRun) {
|
|
354
|
-
console.info(' [dry-run] would copy src from github')
|
|
355
|
-
return
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
await fs.copy(join(onZeroGithub, 'src'), join(onZeroTakeout, 'src'), {
|
|
359
|
-
overwrite: true,
|
|
360
|
-
})
|
|
471
|
+
await finishAndCommit()
|
|
361
472
|
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
await $`git commit -m "on-zero: sync from github"`
|
|
366
|
-
}
|
|
367
|
-
}
|
|
473
|
+
async function finishAndCommit(cwd = process.cwd()) {
|
|
474
|
+
if (!rePublish || reRun || finish) {
|
|
475
|
+
await run(`git add -A`, { cwd })
|
|
368
476
|
|
|
369
|
-
|
|
370
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
477
|
+
await run(`git commit -m ${gitTag}`, { cwd })
|
|
371
478
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
})
|
|
376
|
-
await fs.copy(join(onZeroTakeout, 'cli.cjs'), join(onZeroGithub, 'cli.cjs'))
|
|
377
|
-
await fs.copy(join(onZeroTakeout, 'tsconfig.json'), join(onZeroGithub, 'tsconfig.json'))
|
|
378
|
-
|
|
379
|
-
// update package.json preserving github-specific fields
|
|
380
|
-
const takeoutPkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
|
|
381
|
-
const githubPkg = await fs.readJSON(join(onZeroGithub, 'package.json'))
|
|
382
|
-
const convertDeps = (deps: Record<string, string>) =>
|
|
383
|
-
Object.fromEntries(
|
|
384
|
-
Object.entries(deps || {}).map(([k, v]) => [
|
|
385
|
-
k,
|
|
386
|
-
v.startsWith('workspace:') ? `^${version}` : v,
|
|
387
|
-
])
|
|
388
|
-
)
|
|
389
|
-
await fs.writeJSON(
|
|
390
|
-
join(onZeroGithub, 'package.json'),
|
|
391
|
-
{
|
|
392
|
-
...takeoutPkg,
|
|
393
|
-
files: githubPkg.files,
|
|
394
|
-
repository: githubPkg.repository,
|
|
395
|
-
homepage: githubPkg.homepage,
|
|
396
|
-
bugs: githubPkg.bugs,
|
|
397
|
-
dependencies: convertDeps(takeoutPkg.dependencies),
|
|
398
|
-
devDependencies: convertDeps(takeoutPkg.devDependencies),
|
|
399
|
-
},
|
|
400
|
-
{ spaces: 2 }
|
|
401
|
-
)
|
|
479
|
+
if (!canary) {
|
|
480
|
+
await run(`git tag ${gitTag}`, { cwd })
|
|
481
|
+
}
|
|
402
482
|
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
await $`git -C ${onZeroGithub} add -A`
|
|
416
|
-
await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
|
|
417
|
-
await $`git -C ${onZeroGithub} push origin main`
|
|
418
|
-
}
|
|
419
|
-
|
|
420
|
-
async function getWorkspacePackages() {
|
|
421
|
-
// read workspaces from root package.json
|
|
422
|
-
const rootPackageJson = await fs.readJSON(join(process.cwd(), 'package.json'))
|
|
423
|
-
const workspaceGlobs = rootPackageJson.workspaces || []
|
|
424
|
-
|
|
425
|
-
// resolve workspace paths
|
|
426
|
-
const packagePaths: { name: string; location: string }[] = []
|
|
427
|
-
for (const glob of workspaceGlobs) {
|
|
428
|
-
if (glob.includes('*')) {
|
|
429
|
-
// handle glob patterns like "./packages/*"
|
|
430
|
-
const baseDir = glob.replace('/*', '')
|
|
431
|
-
const fullPath = join(process.cwd(), baseDir)
|
|
432
|
-
if (await fs.pathExists(fullPath)) {
|
|
433
|
-
const dirs = await fs.readdir(fullPath)
|
|
434
|
-
for (const dir of dirs) {
|
|
435
|
-
const pkgPath = join(fullPath, dir, 'package.json')
|
|
436
|
-
if (await fs.pathExists(pkgPath)) {
|
|
437
|
-
const pkg = await fs.readJSON(pkgPath)
|
|
438
|
-
packagePaths.push({
|
|
439
|
-
name: pkg.name,
|
|
440
|
-
location: join(baseDir, dir),
|
|
441
|
-
})
|
|
483
|
+
if (!dirty) {
|
|
484
|
+
// pull once more before pushing so if there was a push in interim we get it
|
|
485
|
+
await run(`git pull --rebase origin HEAD`, { cwd })
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
await run(`git push origin head`, { cwd })
|
|
489
|
+
if (!canary) {
|
|
490
|
+
await run(`git push origin ${gitTag}`, { cwd })
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
console.info(`✅ Pushed and versioned\n`)
|
|
442
494
|
}
|
|
443
495
|
}
|
|
444
|
-
}
|
|
445
|
-
} else {
|
|
446
|
-
// handle direct paths like "./src/start"
|
|
447
|
-
const pkgPath = join(process.cwd(), glob, 'package.json')
|
|
448
|
-
if (await fs.pathExists(pkgPath)) {
|
|
449
|
-
const pkg = await fs.readJSON(pkgPath)
|
|
450
|
-
packagePaths.push({
|
|
451
|
-
name: pkg.name,
|
|
452
|
-
location: glob,
|
|
453
|
-
})
|
|
454
|
-
}
|
|
455
|
-
}
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
return packagePaths
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
|
|
462
|
-
const allPackageJsons = await Promise.all(
|
|
463
|
-
packagePaths
|
|
464
|
-
.filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
|
|
465
|
-
.map(async ({ name, location }) => {
|
|
466
|
-
const cwd = path.join(process.cwd(), location)
|
|
467
|
-
const json = await fs.readJSON(path.join(cwd, 'package.json'))
|
|
468
|
-
return {
|
|
469
|
-
name,
|
|
470
|
-
cwd,
|
|
471
|
-
json,
|
|
472
|
-
path: path.join(cwd, 'package.json'),
|
|
473
|
-
directory: location,
|
|
474
|
-
}
|
|
475
|
-
})
|
|
476
|
-
)
|
|
477
496
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
497
|
+
// sync on-zero OUT (after release)
|
|
498
|
+
if (!skipOnZeroSync) {
|
|
499
|
+
await syncOnZeroOut(nextVersion)
|
|
500
|
+
}
|
|
501
|
+
}
|
|
481
502
|
|
|
482
|
-
|
|
483
|
-
}
|
|
503
|
+
console.info(`✅ Done\n`)
|
|
504
|
+
} catch (err) {
|
|
505
|
+
console.info('\nError:\n', err)
|
|
506
|
+
process.exit(1)
|
|
507
|
+
}
|
|
508
|
+
})
|