@take-out/scripts 0.0.93 → 0.0.94
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +4 -2
- package/src/build-initial.ts +76 -81
- package/src/clean.ts +21 -21
- package/src/cmd.ts +82 -0
- package/src/dev-tunnel.ts +138 -159
- package/src/ensure-port.ts +62 -70
- package/src/ensure-tunnel.ts +13 -9
- package/src/env-pull.ts +49 -47
- package/src/env-update.ts +143 -175
- package/src/exec-with-env.ts +14 -11
- package/src/helpers/args.ts +4 -4
- package/src/helpers/get-test-env.ts +5 -3
- package/src/node-version-check.ts +9 -5
- package/src/release.ts +427 -404
- package/src/sst-get-environment.ts +5 -1
- package/src/typecheck.ts +19 -16
- package/src/up.ts +355 -374
- package/src/update-changelog.ts +39 -43
- package/src/update-local-env.ts +139 -158
- package/src/wait-for-dev.ts +21 -20
package/src/release.ts
CHANGED
|
@@ -1,483 +1,506 @@
|
|
|
1
|
-
|
|
2
|
-
import path, { join } from 'node:path'
|
|
1
|
+
#!/usr/bin/env bun
|
|
3
2
|
|
|
4
|
-
|
|
5
|
-
// you can delete this from your own app
|
|
6
|
-
|
|
7
|
-
import { run } from '@take-out/scripts/helpers/run'
|
|
8
|
-
import { $ } from 'bun'
|
|
9
|
-
import fs, { writeJSON } from 'fs-extra'
|
|
10
|
-
import pMap from 'p-map'
|
|
3
|
+
import { cmd } from './cmd'
|
|
11
4
|
|
|
12
5
|
// avoid emitter error
|
|
13
6
|
process.setMaxListeners(50)
|
|
14
7
|
process.stderr.setMaxListeners(50)
|
|
15
8
|
process.stdout.setMaxListeners(50)
|
|
16
9
|
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
const
|
|
26
|
-
|
|
27
|
-
const
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
const
|
|
31
|
-
const
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
const
|
|
40
|
-
const
|
|
41
|
-
const
|
|
42
|
-
const
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
10
|
+
await cmd`publish takeout packages to npm`
|
|
11
|
+
.args(
|
|
12
|
+
`--patch boolean --minor boolean --major boolean --canary boolean
|
|
13
|
+
--rerun boolean --republish boolean --finish boolean --skip-finish boolean
|
|
14
|
+
--dry-run boolean --skip-test boolean --skip-build boolean --skip-version boolean
|
|
15
|
+
--dirty boolean --tamagui-git-user boolean --sync-on-zero boolean --skip-on-zero-sync boolean`
|
|
16
|
+
)
|
|
17
|
+
.run(async ({ args, $, run, path, os }) => {
|
|
18
|
+
const fs = (await import('fs-extra')).default
|
|
19
|
+
const { writeJSON } = await import('fs-extra')
|
|
20
|
+
const pMap = (await import('p-map')).default
|
|
21
|
+
|
|
22
|
+
// on-zero sync paths
|
|
23
|
+
const onZeroGithub = path.join(os.homedir(), 'github', 'on-zero')
|
|
24
|
+
const onZeroTakeout = path.join(process.cwd(), 'packages', 'on-zero')
|
|
25
|
+
|
|
26
|
+
// for failed publishes that need to re-run
|
|
27
|
+
const reRun = args.rerun
|
|
28
|
+
const rePublish = reRun || args.republish
|
|
29
|
+
const finish = args.finish
|
|
30
|
+
const skipFinish = args.skipFinish
|
|
31
|
+
|
|
32
|
+
const canary = args.canary
|
|
33
|
+
const skipVersion = finish || rePublish || args.skipVersion
|
|
34
|
+
const shouldMajor = args.major
|
|
35
|
+
const shouldMinor = args.minor
|
|
36
|
+
const shouldPatch = args.patch
|
|
37
|
+
const dirty = finish || args.dirty
|
|
38
|
+
const skipTest =
|
|
39
|
+
finish ||
|
|
40
|
+
rePublish ||
|
|
41
|
+
args.skipTest ||
|
|
42
|
+
process.argv.includes('--skip-tests')
|
|
43
|
+
const skipBuild = finish || rePublish || args.skipBuild
|
|
44
|
+
const dryRun = args.dryRun
|
|
45
|
+
const tamaguiGitUser = args.tamaguiGitUser
|
|
46
|
+
const syncOnZeroOnly = args.syncOnZero
|
|
47
|
+
const skipOnZeroSync = args.skipOnZeroSync
|
|
48
|
+
|
|
49
|
+
async function syncOnZeroIn() {
|
|
50
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
51
|
+
|
|
52
|
+
// check if there are commits after the last sync commit
|
|
53
|
+
const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
|
|
54
|
+
const commits = log.split('\n')
|
|
55
|
+
const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
|
|
56
|
+
|
|
57
|
+
// no commits before sync, or first commit is a sync = nothing to pull in
|
|
58
|
+
if (lastSyncIdx <= 0) {
|
|
59
|
+
console.info(' ← on-zero: no new github commits to sync in')
|
|
60
|
+
return
|
|
61
|
+
}
|
|
67
62
|
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
63
|
+
const newCommits = commits
|
|
64
|
+
.slice(0, lastSyncIdx)
|
|
65
|
+
.filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
|
|
66
|
+
if (!newCommits.length) {
|
|
67
|
+
console.info(' ← on-zero: no new github commits to sync in')
|
|
68
|
+
return
|
|
69
|
+
}
|
|
71
70
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
const patchAndCanary = curVersion.split('.')[2]
|
|
75
|
-
const [curPatch] = patchAndCanary.split('-')
|
|
76
|
-
const patchVersion = shouldPatch ? +curPatch + 1 : 0
|
|
77
|
-
const minorVersion = curMinor + (shouldMinor ? 1 : 0)
|
|
78
|
-
const majorVersion = curMajor + (shouldMajor ? 1 : 0)
|
|
79
|
-
const next = `${majorVersion}.${minorVersion}.${patchVersion}`
|
|
80
|
-
|
|
81
|
-
return next
|
|
82
|
-
})()
|
|
83
|
-
|
|
84
|
-
if (!skipVersion) {
|
|
85
|
-
console.info(` 🚀 Releasing:`)
|
|
86
|
-
console.info(' Current:', curVersion)
|
|
87
|
-
console.info(` Next: ${nextVersion}`)
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
try {
|
|
91
|
-
// sync on-zero IN (before release)
|
|
92
|
-
if (!skipOnZeroSync && !finish && !rePublish) {
|
|
93
|
-
await syncOnZeroIn()
|
|
94
|
-
}
|
|
71
|
+
console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
|
|
72
|
+
for (const c of newCommits) console.info(` ${c}`)
|
|
95
73
|
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
|
|
100
|
-
throw new Error(`Not on main`)
|
|
74
|
+
if (dryRun) {
|
|
75
|
+
console.info(' [dry-run] would copy src from github')
|
|
76
|
+
return
|
|
101
77
|
}
|
|
102
|
-
|
|
103
|
-
|
|
78
|
+
|
|
79
|
+
await fs.copy(path.join(onZeroGithub, 'src'), path.join(onZeroTakeout, 'src'), {
|
|
80
|
+
overwrite: true,
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
const status = (await $`git status --porcelain`.text()).trim()
|
|
84
|
+
if (status) {
|
|
85
|
+
await $`git add packages/on-zero`
|
|
86
|
+
await $`git commit -m "on-zero: sync from github"`
|
|
104
87
|
}
|
|
105
88
|
}
|
|
106
89
|
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
await loadPackageJsons(packagePaths)
|
|
90
|
+
async function syncOnZeroOut(version: string) {
|
|
91
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
110
92
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
93
|
+
// copy src files from takeout to github
|
|
94
|
+
await fs.copy(path.join(onZeroTakeout, 'src'), path.join(onZeroGithub, 'src'), {
|
|
95
|
+
overwrite: true,
|
|
96
|
+
})
|
|
97
|
+
await fs.copy(path.join(onZeroTakeout, 'cli.cjs'), path.join(onZeroGithub, 'cli.cjs'))
|
|
98
|
+
await fs.copy(
|
|
99
|
+
path.join(onZeroTakeout, 'tsconfig.json'),
|
|
100
|
+
path.join(onZeroGithub, 'tsconfig.json')
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
// update package.json preserving github-specific fields
|
|
104
|
+
const takeoutPkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
|
|
105
|
+
const githubPkg = await fs.readJSON(path.join(onZeroGithub, 'package.json'))
|
|
106
|
+
const convertDeps = (deps: Record<string, string>) =>
|
|
107
|
+
Object.fromEntries(
|
|
108
|
+
Object.entries(deps || {}).map(([k, v]) => [
|
|
109
|
+
k,
|
|
110
|
+
v.startsWith('workspace:') ? `^${version}` : v,
|
|
111
|
+
])
|
|
112
|
+
)
|
|
113
|
+
await fs.writeJSON(
|
|
114
|
+
path.join(onZeroGithub, 'package.json'),
|
|
115
|
+
{
|
|
116
|
+
...takeoutPkg,
|
|
117
|
+
files: githubPkg.files,
|
|
118
|
+
repository: githubPkg.repository,
|
|
119
|
+
homepage: githubPkg.homepage,
|
|
120
|
+
bugs: githubPkg.bugs,
|
|
121
|
+
dependencies: convertDeps(takeoutPkg.dependencies),
|
|
122
|
+
devDependencies: convertDeps(takeoutPkg.devDependencies),
|
|
123
|
+
},
|
|
124
|
+
{ spaces: 2 }
|
|
114
125
|
)
|
|
126
|
+
|
|
127
|
+
// only commit if there are actual changes
|
|
128
|
+
const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
|
|
129
|
+
if (!status) return
|
|
130
|
+
|
|
131
|
+
console.info(' → on-zero: syncing out to github')
|
|
132
|
+
|
|
133
|
+
if (dryRun) {
|
|
134
|
+
console.info(` [dry-run] would push: sync: from takeout v${version}`)
|
|
135
|
+
await $`git -C ${onZeroGithub} checkout -- .`
|
|
136
|
+
return
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
await $`git -C ${onZeroGithub} add -A`
|
|
140
|
+
await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
|
|
141
|
+
await $`git -C ${onZeroGithub} push origin main`
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
// sync on-zero: copy src from github to takeout, then takeout to github after release
|
|
145
|
+
async function syncOnZero() {
|
|
146
|
+
if (!(await fs.pathExists(onZeroGithub))) return
|
|
147
|
+
const pkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
|
|
148
|
+
await syncOnZeroIn()
|
|
149
|
+
await syncOnZeroOut(pkg.version)
|
|
115
150
|
}
|
|
116
151
|
|
|
117
|
-
async function
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
152
|
+
async function getWorkspacePackages() {
|
|
153
|
+
// read workspaces from root package.json
|
|
154
|
+
const rootPackageJson = await fs.readJSON(path.join(process.cwd(), 'package.json'))
|
|
155
|
+
const workspaceGlobs = rootPackageJson.workspaces || []
|
|
156
|
+
|
|
157
|
+
// resolve workspace paths
|
|
158
|
+
const packagePaths: { name: string; location: string }[] = []
|
|
159
|
+
for (const glob of workspaceGlobs) {
|
|
160
|
+
if (glob.includes('*')) {
|
|
161
|
+
// handle glob patterns like "./packages/*"
|
|
162
|
+
const baseDir = glob.replace('/*', '')
|
|
163
|
+
const fullPath = path.join(process.cwd(), baseDir)
|
|
164
|
+
if (await fs.pathExists(fullPath)) {
|
|
165
|
+
const dirs = await fs.readdir(fullPath)
|
|
166
|
+
for (const dir of dirs) {
|
|
167
|
+
const pkgPath = path.join(fullPath, dir, 'package.json')
|
|
168
|
+
if (await fs.pathExists(pkgPath)) {
|
|
169
|
+
const pkg = await fs.readJSON(pkgPath)
|
|
170
|
+
packagePaths.push({
|
|
171
|
+
name: pkg.name,
|
|
172
|
+
location: path.join(baseDir, dir),
|
|
173
|
+
})
|
|
174
|
+
}
|
|
125
175
|
}
|
|
126
176
|
}
|
|
127
|
-
}
|
|
177
|
+
} else {
|
|
178
|
+
// handle direct paths like "./src/start"
|
|
179
|
+
const pkgPath = path.join(process.cwd(), glob, 'package.json')
|
|
180
|
+
if (await fs.pathExists(pkgPath)) {
|
|
181
|
+
const pkg = await fs.readJSON(pkgPath)
|
|
182
|
+
packagePaths.push({
|
|
183
|
+
name: pkg.name,
|
|
184
|
+
location: glob,
|
|
185
|
+
})
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
return packagePaths
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
|
|
194
|
+
const allPackageJsons = await Promise.all(
|
|
195
|
+
packagePaths
|
|
196
|
+
.filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
|
|
197
|
+
.map(async ({ name, location }) => {
|
|
198
|
+
const cwd = path.join(process.cwd(), location)
|
|
199
|
+
const json = await fs.readJSON(path.join(cwd, 'package.json'))
|
|
200
|
+
return {
|
|
201
|
+
name,
|
|
202
|
+
cwd,
|
|
203
|
+
json,
|
|
204
|
+
path: path.join(cwd, 'package.json'),
|
|
205
|
+
directory: location,
|
|
206
|
+
}
|
|
207
|
+
})
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
const publishablePackages = allPackageJsons.filter(
|
|
211
|
+
(x) => !x.json.skipPublish && !x.json.private
|
|
128
212
|
)
|
|
213
|
+
|
|
214
|
+
return { allPackageJsons, publishablePackages }
|
|
129
215
|
}
|
|
130
216
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
217
|
+
// handle --sync-on-zero standalone mode
|
|
218
|
+
if (syncOnZeroOnly) {
|
|
219
|
+
try {
|
|
220
|
+
await syncOnZero()
|
|
221
|
+
} catch (err) {
|
|
222
|
+
console.error('sync failed:', err)
|
|
223
|
+
process.exit(1)
|
|
224
|
+
}
|
|
225
|
+
return
|
|
134
226
|
}
|
|
135
227
|
|
|
136
|
-
|
|
228
|
+
// main release flow
|
|
229
|
+
const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
|
|
137
230
|
|
|
138
|
-
|
|
139
|
-
|
|
231
|
+
// must specify version (unless republishing):
|
|
232
|
+
if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
|
|
233
|
+
console.error(`Must specify one of --patch, --minor, or --major`)
|
|
234
|
+
process.exit(1)
|
|
140
235
|
}
|
|
141
236
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
237
|
+
const nextVersion = (() => {
|
|
238
|
+
if (rePublish || skipVersion) {
|
|
239
|
+
return curVersion
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if (canary) {
|
|
243
|
+
return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
const curMajor = +curVersion.split('.')[0] || 0
|
|
247
|
+
const curMinor = +curVersion.split('.')[1] || 0
|
|
248
|
+
const patchAndCanary = curVersion.split('.')[2]
|
|
249
|
+
const [curPatch] = patchAndCanary.split('-')
|
|
250
|
+
const patchVersion = shouldPatch ? +curPatch + 1 : 0
|
|
251
|
+
const minorVersion = curMinor + (shouldMinor ? 1 : 0)
|
|
252
|
+
const majorVersion = curMajor + (shouldMajor ? 1 : 0)
|
|
253
|
+
const next = `${majorVersion}.${minorVersion}.${patchVersion}`
|
|
254
|
+
|
|
255
|
+
return next
|
|
256
|
+
})()
|
|
257
|
+
|
|
258
|
+
if (!skipVersion) {
|
|
259
|
+
console.info(` 🚀 Releasing:`)
|
|
260
|
+
console.info(' Current:', curVersion)
|
|
261
|
+
console.info(` Next: ${nextVersion}`)
|
|
146
262
|
}
|
|
147
263
|
|
|
148
|
-
|
|
149
|
-
|
|
264
|
+
try {
|
|
265
|
+
// sync on-zero IN (before release)
|
|
266
|
+
if (!skipOnZeroSync && !finish && !rePublish) {
|
|
267
|
+
await syncOnZeroIn()
|
|
268
|
+
}
|
|
150
269
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
270
|
+
// ensure we are up to date
|
|
271
|
+
// ensure we are on main
|
|
272
|
+
if (!canary) {
|
|
273
|
+
if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
|
|
274
|
+
throw new Error(`Not on main`)
|
|
275
|
+
}
|
|
276
|
+
if (!dirty && !rePublish && !finish) {
|
|
277
|
+
await run(`git pull --rebase origin main`)
|
|
278
|
+
}
|
|
156
279
|
}
|
|
157
|
-
}
|
|
158
280
|
|
|
159
|
-
|
|
160
|
-
const
|
|
161
|
-
|
|
162
|
-
|
|
281
|
+
const packagePaths = await getWorkspacePackages()
|
|
282
|
+
const { allPackageJsons, publishablePackages: packageJsons } =
|
|
283
|
+
await loadPackageJsons(packagePaths)
|
|
284
|
+
|
|
285
|
+
if (!finish) {
|
|
286
|
+
console.info(
|
|
287
|
+
`Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
|
|
288
|
+
)
|
|
163
289
|
}
|
|
164
|
-
}
|
|
165
290
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
'dependencies',
|
|
175
|
-
'devDependencies',
|
|
176
|
-
'optionalDependencies',
|
|
177
|
-
'peerDependencies',
|
|
178
|
-
]) {
|
|
179
|
-
const nextDeps = next[field]
|
|
180
|
-
if (!nextDeps) continue
|
|
181
|
-
for (const depName in nextDeps) {
|
|
182
|
-
if (allPackageJsons.some((p) => p.name === depName)) {
|
|
183
|
-
nextDeps[depName] = nextVersion
|
|
291
|
+
async function checkDistDirs() {
|
|
292
|
+
await Promise.all(
|
|
293
|
+
packageJsons.map(async ({ cwd, json }) => {
|
|
294
|
+
const distDir = path.join(cwd, 'dist')
|
|
295
|
+
if (json.scripts?.build) {
|
|
296
|
+
if (!(await fs.pathExists(distDir))) {
|
|
297
|
+
console.warn('no dist dir!', distDir)
|
|
298
|
+
process.exit(1)
|
|
184
299
|
}
|
|
185
300
|
}
|
|
186
|
-
}
|
|
301
|
+
})
|
|
302
|
+
)
|
|
303
|
+
}
|
|
187
304
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
305
|
+
if (tamaguiGitUser) {
|
|
306
|
+
await run(`git config --global user.name 'Tamagui'`)
|
|
307
|
+
await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
|
|
308
|
+
}
|
|
192
309
|
|
|
193
|
-
|
|
194
|
-
await run(`git diff`)
|
|
195
|
-
}
|
|
310
|
+
console.info('install and build')
|
|
196
311
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
await pMap(
|
|
202
|
-
packageJsons,
|
|
203
|
-
async ({ name, cwd, json }) => {
|
|
204
|
-
const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
|
|
205
|
-
.filter(Boolean)
|
|
206
|
-
.join(' ')
|
|
207
|
-
const tgzPath = join(packDir, `${name.replace('/', '-')}.tgz`)
|
|
208
|
-
|
|
209
|
-
// pack with bun (properly converts workspace:* to versions)
|
|
210
|
-
// use swap-exports for packages with build scripts, otherwise just pack
|
|
211
|
-
if (json.scripts?.build) {
|
|
212
|
-
await run(
|
|
213
|
-
`bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
|
|
214
|
-
{
|
|
215
|
-
cwd,
|
|
216
|
-
silent: true,
|
|
217
|
-
}
|
|
218
|
-
)
|
|
219
|
-
} else {
|
|
220
|
-
await run(`bun pm pack --filename ${tgzPath}`, {
|
|
221
|
-
cwd,
|
|
222
|
-
silent: true,
|
|
223
|
-
})
|
|
224
|
-
}
|
|
312
|
+
if (!rePublish && !finish) {
|
|
313
|
+
await run(`bun install`)
|
|
314
|
+
}
|
|
225
315
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
316
|
+
if (!skipBuild && !finish) {
|
|
317
|
+
await run(`bun clean`)
|
|
318
|
+
await run(`bun run build`)
|
|
319
|
+
await checkDistDirs()
|
|
320
|
+
}
|
|
231
321
|
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
322
|
+
if (!finish) {
|
|
323
|
+
console.info('run checks')
|
|
324
|
+
|
|
325
|
+
if (!skipTest) {
|
|
326
|
+
await run(`bun lint`)
|
|
327
|
+
await run(`bun check:all`)
|
|
328
|
+
// only in packages
|
|
329
|
+
// await run(`bun test`)
|
|
236
330
|
}
|
|
237
|
-
|
|
331
|
+
}
|
|
238
332
|
|
|
239
|
-
|
|
333
|
+
if (!dirty && !dryRun && !rePublish) {
|
|
334
|
+
const out = await run(`git status --porcelain`)
|
|
335
|
+
if (out.stdout) {
|
|
336
|
+
throw new Error(`Has unsaved git changes: ${out.stdout}`)
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
// snapshot workspace:* deps before mutation (shallow copy mutates originals)
|
|
341
|
+
const workspaceDeps = new Map<string, Record<string, Record<string, string>>>()
|
|
342
|
+
for (const { json, path: pkgPath } of allPackageJsons) {
|
|
343
|
+
const deps: Record<string, Record<string, string>> = {}
|
|
344
|
+
for (const field of [
|
|
345
|
+
'dependencies',
|
|
346
|
+
'devDependencies',
|
|
347
|
+
'optionalDependencies',
|
|
348
|
+
'peerDependencies',
|
|
349
|
+
]) {
|
|
350
|
+
if (!json[field]) continue
|
|
351
|
+
for (const depName in json[field]) {
|
|
352
|
+
if (json[field][depName].startsWith('workspace:')) {
|
|
353
|
+
deps[field] ??= {}
|
|
354
|
+
deps[field][depName] = json[field][depName]
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
}
|
|
358
|
+
if (Object.keys(deps).length) workspaceDeps.set(pkgPath, deps)
|
|
359
|
+
}
|
|
240
360
|
|
|
241
|
-
|
|
242
|
-
if (!dryRun) {
|
|
361
|
+
if (!skipVersion && !finish) {
|
|
243
362
|
await Promise.all(
|
|
244
|
-
allPackageJsons.map(async ({ json, path }) => {
|
|
245
|
-
const
|
|
363
|
+
allPackageJsons.map(async ({ json, path: pkgPath }) => {
|
|
364
|
+
const next = { ...json }
|
|
365
|
+
|
|
366
|
+
next.version = nextVersion
|
|
367
|
+
|
|
246
368
|
for (const field of [
|
|
247
369
|
'dependencies',
|
|
248
370
|
'devDependencies',
|
|
249
371
|
'optionalDependencies',
|
|
250
372
|
'peerDependencies',
|
|
251
373
|
]) {
|
|
252
|
-
const
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
currentDeps[depName] = origDeps[depName]
|
|
374
|
+
const nextDeps = next[field]
|
|
375
|
+
if (!nextDeps) continue
|
|
376
|
+
for (const depName in nextDeps) {
|
|
377
|
+
if (allPackageJsons.some((p) => p.name === depName)) {
|
|
378
|
+
nextDeps[depName] = nextVersion
|
|
258
379
|
}
|
|
259
380
|
}
|
|
260
381
|
}
|
|
261
|
-
|
|
382
|
+
|
|
383
|
+
await writeJSON(pkgPath, next, { spaces: 2 })
|
|
262
384
|
})
|
|
263
385
|
)
|
|
264
386
|
}
|
|
265
387
|
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
await run(`git checkout -- packages/*/package.json`, { silent: true })
|
|
269
|
-
console.info('Reverted version changes\n')
|
|
388
|
+
if (!finish && !rePublish) {
|
|
389
|
+
await run(`git diff`)
|
|
270
390
|
}
|
|
271
|
-
}
|
|
272
391
|
|
|
273
|
-
if (!skipFinish && !dryRun) {
|
|
274
|
-
// then git tag, commit, push
|
|
275
392
|
if (!finish) {
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
393
|
+
const packDir = path.join(os.tmpdir(), `takeout-release-${nextVersion}`)
|
|
394
|
+
await fs.ensureDir(packDir)
|
|
395
|
+
|
|
396
|
+
await pMap(
|
|
397
|
+
packageJsons,
|
|
398
|
+
async ({ name, cwd, json }) => {
|
|
399
|
+
const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
|
|
400
|
+
.filter(Boolean)
|
|
401
|
+
.join(' ')
|
|
402
|
+
const tgzPath = path.join(packDir, `${name.replace('/', '-')}.tgz`)
|
|
403
|
+
|
|
404
|
+
// pack with bun (properly converts workspace:* to versions)
|
|
405
|
+
// use swap-exports for packages with build scripts, otherwise just pack
|
|
406
|
+
if (json.scripts?.build) {
|
|
407
|
+
await run(
|
|
408
|
+
`bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
|
|
409
|
+
{
|
|
410
|
+
cwd,
|
|
411
|
+
silent: true,
|
|
412
|
+
}
|
|
413
|
+
)
|
|
414
|
+
} else {
|
|
415
|
+
await run(`bun pm pack --filename ${tgzPath}`, {
|
|
416
|
+
cwd,
|
|
417
|
+
silent: true,
|
|
418
|
+
})
|
|
419
|
+
}
|
|
288
420
|
|
|
289
|
-
|
|
290
|
-
await run(`
|
|
291
|
-
|
|
421
|
+
// publish the tgz directly
|
|
422
|
+
await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
|
|
423
|
+
cwd: packDir,
|
|
424
|
+
silent: true,
|
|
425
|
+
})
|
|
292
426
|
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
427
|
+
console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
|
|
428
|
+
},
|
|
429
|
+
{
|
|
430
|
+
concurrency: 15,
|
|
296
431
|
}
|
|
432
|
+
)
|
|
297
433
|
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
434
|
+
console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
|
|
435
|
+
|
|
436
|
+
// restore workspace:* protocols after publishing
|
|
437
|
+
if (!dryRun) {
|
|
438
|
+
await Promise.all(
|
|
439
|
+
allPackageJsons.map(async ({ path: pkgPath }) => {
|
|
440
|
+
const saved = workspaceDeps.get(pkgPath)
|
|
441
|
+
if (!saved) return
|
|
442
|
+
const current = await fs.readJSON(pkgPath)
|
|
443
|
+
for (const field in saved) {
|
|
444
|
+
if (!current[field]) continue
|
|
445
|
+
for (const depName in saved[field]) {
|
|
446
|
+
current[field][depName] = saved[field][depName]
|
|
447
|
+
}
|
|
448
|
+
}
|
|
449
|
+
await writeJSON(pkgPath, current, { spaces: 2 })
|
|
450
|
+
})
|
|
451
|
+
)
|
|
452
|
+
}
|
|
302
453
|
|
|
303
|
-
|
|
454
|
+
// revert version changes after dry-run
|
|
455
|
+
if (dryRun) {
|
|
456
|
+
await run(`git checkout -- packages/*/package.json`, { silent: true })
|
|
457
|
+
console.info('Reverted version changes\n')
|
|
304
458
|
}
|
|
305
459
|
}
|
|
306
460
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
461
|
+
if (!skipFinish && !dryRun) {
|
|
462
|
+
// then git tag, commit, push
|
|
463
|
+
if (!finish) {
|
|
464
|
+
await run(`bun install`)
|
|
465
|
+
}
|
|
466
|
+
const tagPrefix = canary ? 'canary' : 'v'
|
|
467
|
+
const gitTag = `${tagPrefix}${nextVersion}`
|
|
312
468
|
|
|
313
|
-
|
|
314
|
-
} catch (err) {
|
|
315
|
-
console.info('\nError:\n', err)
|
|
316
|
-
process.exit(1)
|
|
317
|
-
}
|
|
318
|
-
}
|
|
319
|
-
|
|
320
|
-
// sync on-zero: copy src from github to takeout, then takeout to github after release
|
|
321
|
-
async function syncOnZero() {
|
|
322
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
323
|
-
const pkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
|
|
324
|
-
await syncOnZeroIn()
|
|
325
|
-
await syncOnZeroOut(pkg.version)
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
async function syncOnZeroIn() {
|
|
329
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
330
|
-
|
|
331
|
-
// check if there are commits after the last sync commit
|
|
332
|
-
const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
|
|
333
|
-
const commits = log.split('\n')
|
|
334
|
-
const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
|
|
335
|
-
|
|
336
|
-
// no commits before sync, or first commit is a sync = nothing to pull in
|
|
337
|
-
if (lastSyncIdx <= 0) {
|
|
338
|
-
console.info(' ← on-zero: no new github commits to sync in')
|
|
339
|
-
return
|
|
340
|
-
}
|
|
341
|
-
|
|
342
|
-
const newCommits = commits
|
|
343
|
-
.slice(0, lastSyncIdx)
|
|
344
|
-
.filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
|
|
345
|
-
if (!newCommits.length) {
|
|
346
|
-
console.info(' ← on-zero: no new github commits to sync in')
|
|
347
|
-
return
|
|
348
|
-
}
|
|
349
|
-
|
|
350
|
-
console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
|
|
351
|
-
for (const c of newCommits) console.info(` ${c}`)
|
|
352
|
-
|
|
353
|
-
if (dryRun) {
|
|
354
|
-
console.info(' [dry-run] would copy src from github')
|
|
355
|
-
return
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
await fs.copy(join(onZeroGithub, 'src'), join(onZeroTakeout, 'src'), {
|
|
359
|
-
overwrite: true,
|
|
360
|
-
})
|
|
469
|
+
await finishAndCommit()
|
|
361
470
|
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
await $`git commit -m "on-zero: sync from github"`
|
|
366
|
-
}
|
|
367
|
-
}
|
|
471
|
+
async function finishAndCommit(cwd = process.cwd()) {
|
|
472
|
+
if (!rePublish || reRun || finish) {
|
|
473
|
+
await run(`git add -A`, { cwd })
|
|
368
474
|
|
|
369
|
-
|
|
370
|
-
if (!(await fs.pathExists(onZeroGithub))) return
|
|
475
|
+
await run(`git commit -m ${gitTag}`, { cwd })
|
|
371
476
|
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
})
|
|
376
|
-
await fs.copy(join(onZeroTakeout, 'cli.cjs'), join(onZeroGithub, 'cli.cjs'))
|
|
377
|
-
await fs.copy(join(onZeroTakeout, 'tsconfig.json'), join(onZeroGithub, 'tsconfig.json'))
|
|
378
|
-
|
|
379
|
-
// update package.json preserving github-specific fields
|
|
380
|
-
const takeoutPkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
|
|
381
|
-
const githubPkg = await fs.readJSON(join(onZeroGithub, 'package.json'))
|
|
382
|
-
const convertDeps = (deps: Record<string, string>) =>
|
|
383
|
-
Object.fromEntries(
|
|
384
|
-
Object.entries(deps || {}).map(([k, v]) => [
|
|
385
|
-
k,
|
|
386
|
-
v.startsWith('workspace:') ? `^${version}` : v,
|
|
387
|
-
])
|
|
388
|
-
)
|
|
389
|
-
await fs.writeJSON(
|
|
390
|
-
join(onZeroGithub, 'package.json'),
|
|
391
|
-
{
|
|
392
|
-
...takeoutPkg,
|
|
393
|
-
files: githubPkg.files,
|
|
394
|
-
repository: githubPkg.repository,
|
|
395
|
-
homepage: githubPkg.homepage,
|
|
396
|
-
bugs: githubPkg.bugs,
|
|
397
|
-
dependencies: convertDeps(takeoutPkg.dependencies),
|
|
398
|
-
devDependencies: convertDeps(takeoutPkg.devDependencies),
|
|
399
|
-
},
|
|
400
|
-
{ spaces: 2 }
|
|
401
|
-
)
|
|
477
|
+
if (!canary) {
|
|
478
|
+
await run(`git tag ${gitTag}`, { cwd })
|
|
479
|
+
}
|
|
402
480
|
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
await $`git -C ${onZeroGithub} add -A`
|
|
416
|
-
await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
|
|
417
|
-
await $`git -C ${onZeroGithub} push origin main`
|
|
418
|
-
}
|
|
419
|
-
|
|
420
|
-
async function getWorkspacePackages() {
|
|
421
|
-
// read workspaces from root package.json
|
|
422
|
-
const rootPackageJson = await fs.readJSON(join(process.cwd(), 'package.json'))
|
|
423
|
-
const workspaceGlobs = rootPackageJson.workspaces || []
|
|
424
|
-
|
|
425
|
-
// resolve workspace paths
|
|
426
|
-
const packagePaths: { name: string; location: string }[] = []
|
|
427
|
-
for (const glob of workspaceGlobs) {
|
|
428
|
-
if (glob.includes('*')) {
|
|
429
|
-
// handle glob patterns like "./packages/*"
|
|
430
|
-
const baseDir = glob.replace('/*', '')
|
|
431
|
-
const fullPath = join(process.cwd(), baseDir)
|
|
432
|
-
if (await fs.pathExists(fullPath)) {
|
|
433
|
-
const dirs = await fs.readdir(fullPath)
|
|
434
|
-
for (const dir of dirs) {
|
|
435
|
-
const pkgPath = join(fullPath, dir, 'package.json')
|
|
436
|
-
if (await fs.pathExists(pkgPath)) {
|
|
437
|
-
const pkg = await fs.readJSON(pkgPath)
|
|
438
|
-
packagePaths.push({
|
|
439
|
-
name: pkg.name,
|
|
440
|
-
location: join(baseDir, dir),
|
|
441
|
-
})
|
|
481
|
+
if (!dirty) {
|
|
482
|
+
// pull once more before pushing so if there was a push in interim we get it
|
|
483
|
+
await run(`git pull --rebase origin HEAD`, { cwd })
|
|
484
|
+
}
|
|
485
|
+
|
|
486
|
+
await run(`git push origin head`, { cwd })
|
|
487
|
+
if (!canary) {
|
|
488
|
+
await run(`git push origin ${gitTag}`, { cwd })
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
console.info(`✅ Pushed and versioned\n`)
|
|
442
492
|
}
|
|
443
493
|
}
|
|
444
|
-
}
|
|
445
|
-
} else {
|
|
446
|
-
// handle direct paths like "./src/start"
|
|
447
|
-
const pkgPath = join(process.cwd(), glob, 'package.json')
|
|
448
|
-
if (await fs.pathExists(pkgPath)) {
|
|
449
|
-
const pkg = await fs.readJSON(pkgPath)
|
|
450
|
-
packagePaths.push({
|
|
451
|
-
name: pkg.name,
|
|
452
|
-
location: glob,
|
|
453
|
-
})
|
|
454
|
-
}
|
|
455
|
-
}
|
|
456
|
-
}
|
|
457
|
-
|
|
458
|
-
return packagePaths
|
|
459
|
-
}
|
|
460
|
-
|
|
461
|
-
async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
|
|
462
|
-
const allPackageJsons = await Promise.all(
|
|
463
|
-
packagePaths
|
|
464
|
-
.filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
|
|
465
|
-
.map(async ({ name, location }) => {
|
|
466
|
-
const cwd = path.join(process.cwd(), location)
|
|
467
|
-
const json = await fs.readJSON(path.join(cwd, 'package.json'))
|
|
468
|
-
return {
|
|
469
|
-
name,
|
|
470
|
-
cwd,
|
|
471
|
-
json,
|
|
472
|
-
path: path.join(cwd, 'package.json'),
|
|
473
|
-
directory: location,
|
|
474
|
-
}
|
|
475
|
-
})
|
|
476
|
-
)
|
|
477
494
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
495
|
+
// sync on-zero OUT (after release)
|
|
496
|
+
if (!skipOnZeroSync) {
|
|
497
|
+
await syncOnZeroOut(nextVersion)
|
|
498
|
+
}
|
|
499
|
+
}
|
|
481
500
|
|
|
482
|
-
|
|
483
|
-
}
|
|
501
|
+
console.info(`✅ Done\n`)
|
|
502
|
+
} catch (err) {
|
|
503
|
+
console.info('\nError:\n', err)
|
|
504
|
+
process.exit(1)
|
|
505
|
+
}
|
|
506
|
+
})
|