@take-out/scripts 0.0.92 → 0.0.94

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/release.ts CHANGED
@@ -1,461 +1,506 @@
1
- import { homedir, tmpdir } from 'node:os'
2
- import path, { join } from 'node:path'
1
+ #!/usr/bin/env bun
3
2
 
4
- // note! this is an helper script used by tamagui team for publishing the takeout packages
5
- // you can delete this from your own app
6
-
7
- import { run } from '@take-out/scripts/helpers/run'
8
- import { $ } from 'bun'
9
- import fs, { writeJSON } from 'fs-extra'
10
- import pMap from 'p-map'
3
+ import { cmd } from './cmd'
11
4
 
12
5
  // avoid emitter error
13
6
  process.setMaxListeners(50)
14
7
  process.stderr.setMaxListeners(50)
15
8
  process.stdout.setMaxListeners(50)
16
9
 
17
- // on-zero sync paths
18
- const onZeroGithub = join(homedir(), 'github', 'on-zero')
19
- const onZeroTakeout = join(process.cwd(), 'packages', 'on-zero')
20
-
21
- // for failed publishes that need to re-run
22
- const reRun = process.argv.includes('--rerun')
23
- const rePublish = reRun || process.argv.includes('--republish')
24
- const finish = process.argv.includes('--finish')
25
- const skipFinish = process.argv.includes('--skip-finish')
26
-
27
- const canary = process.argv.includes('--canary')
28
- const skipVersion = finish || rePublish || process.argv.includes('--skip-version')
29
- const shouldMajor = process.argv.includes('--major')
30
- const shouldMinor = process.argv.includes('--minor')
31
- const shouldPatch = process.argv.includes('--patch')
32
- const dirty = finish || process.argv.includes('--dirty')
33
- const skipTest =
34
- finish ||
35
- rePublish ||
36
- process.argv.includes('--skip-test') ||
37
- process.argv.includes('--skip-tests')
38
- const skipBuild = finish || rePublish || process.argv.includes('--skip-build')
39
- const dryRun = process.argv.includes('--dry-run')
40
- const tamaguiGitUser = process.argv.includes('--tamagui-git-user')
41
- const syncOnZeroOnly = process.argv.includes('--sync-on-zero')
42
- const skipOnZeroSync = process.argv.includes('--skip-on-zero-sync')
43
-
44
- // handle --sync-on-zero standalone mode
45
- if (syncOnZeroOnly) {
46
- syncOnZero().catch((err) => {
47
- console.error('sync failed:', err)
48
- process.exit(1)
49
- })
50
- } else {
51
- mainRelease()
52
- }
53
-
54
- async function mainRelease() {
55
- const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
56
-
57
- // must specify version (unless republishing):
58
- if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
59
- console.error(`Must specify one of --patch, --minor, or --major`)
60
- process.exit(1)
61
- }
62
-
63
- const nextVersion = (() => {
64
- if (rePublish || skipVersion) {
65
- return curVersion
66
- }
10
+ await cmd`publish takeout packages to npm`
11
+ .args(
12
+ `--patch boolean --minor boolean --major boolean --canary boolean
13
+ --rerun boolean --republish boolean --finish boolean --skip-finish boolean
14
+ --dry-run boolean --skip-test boolean --skip-build boolean --skip-version boolean
15
+ --dirty boolean --tamagui-git-user boolean --sync-on-zero boolean --skip-on-zero-sync boolean`
16
+ )
17
+ .run(async ({ args, $, run, path, os }) => {
18
+ const fs = (await import('fs-extra')).default
19
+ const { writeJSON } = await import('fs-extra')
20
+ const pMap = (await import('p-map')).default
21
+
22
+ // on-zero sync paths
23
+ const onZeroGithub = path.join(os.homedir(), 'github', 'on-zero')
24
+ const onZeroTakeout = path.join(process.cwd(), 'packages', 'on-zero')
25
+
26
+ // for failed publishes that need to re-run
27
+ const reRun = args.rerun
28
+ const rePublish = reRun || args.republish
29
+ const finish = args.finish
30
+ const skipFinish = args.skipFinish
31
+
32
+ const canary = args.canary
33
+ const skipVersion = finish || rePublish || args.skipVersion
34
+ const shouldMajor = args.major
35
+ const shouldMinor = args.minor
36
+ const shouldPatch = args.patch
37
+ const dirty = finish || args.dirty
38
+ const skipTest =
39
+ finish ||
40
+ rePublish ||
41
+ args.skipTest ||
42
+ process.argv.includes('--skip-tests')
43
+ const skipBuild = finish || rePublish || args.skipBuild
44
+ const dryRun = args.dryRun
45
+ const tamaguiGitUser = args.tamaguiGitUser
46
+ const syncOnZeroOnly = args.syncOnZero
47
+ const skipOnZeroSync = args.skipOnZeroSync
48
+
49
+ async function syncOnZeroIn() {
50
+ if (!(await fs.pathExists(onZeroGithub))) return
51
+
52
+ // check if there are commits after the last sync commit
53
+ const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
54
+ const commits = log.split('\n')
55
+ const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
56
+
57
+ // no commits before sync, or first commit is a sync = nothing to pull in
58
+ if (lastSyncIdx <= 0) {
59
+ console.info(' ← on-zero: no new github commits to sync in')
60
+ return
61
+ }
67
62
 
68
- if (canary) {
69
- return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
70
- }
63
+ const newCommits = commits
64
+ .slice(0, lastSyncIdx)
65
+ .filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
66
+ if (!newCommits.length) {
67
+ console.info(' ← on-zero: no new github commits to sync in')
68
+ return
69
+ }
71
70
 
72
- const curMajor = +curVersion.split('.')[0] || 0
73
- const curMinor = +curVersion.split('.')[1] || 0
74
- const patchAndCanary = curVersion.split('.')[2]
75
- const [curPatch] = patchAndCanary.split('-')
76
- const patchVersion = shouldPatch ? +curPatch + 1 : 0
77
- const minorVersion = curMinor + (shouldMinor ? 1 : 0)
78
- const majorVersion = curMajor + (shouldMajor ? 1 : 0)
79
- const next = `${majorVersion}.${minorVersion}.${patchVersion}`
80
-
81
- return next
82
- })()
83
-
84
- if (!skipVersion) {
85
- console.info(` 🚀 Releasing:`)
86
- console.info(' Current:', curVersion)
87
- console.info(` Next: ${nextVersion}`)
88
- }
89
-
90
- try {
91
- // sync on-zero IN (before release)
92
- if (!skipOnZeroSync && !finish && !rePublish) {
93
- await syncOnZeroIn()
94
- }
71
+ console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
72
+ for (const c of newCommits) console.info(` ${c}`)
95
73
 
96
- // ensure we are up to date
97
- // ensure we are on main
98
- if (!canary) {
99
- if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
100
- throw new Error(`Not on main`)
74
+ if (dryRun) {
75
+ console.info(' [dry-run] would copy src from github')
76
+ return
101
77
  }
102
- if (!dirty && !rePublish && !finish) {
103
- await run(`git pull --rebase origin main`)
78
+
79
+ await fs.copy(path.join(onZeroGithub, 'src'), path.join(onZeroTakeout, 'src'), {
80
+ overwrite: true,
81
+ })
82
+
83
+ const status = (await $`git status --porcelain`.text()).trim()
84
+ if (status) {
85
+ await $`git add packages/on-zero`
86
+ await $`git commit -m "on-zero: sync from github"`
104
87
  }
105
88
  }
106
89
 
107
- const packagePaths = await getWorkspacePackages()
108
- const { allPackageJsons, publishablePackages: packageJsons } =
109
- await loadPackageJsons(packagePaths)
90
+ async function syncOnZeroOut(version: string) {
91
+ if (!(await fs.pathExists(onZeroGithub))) return
110
92
 
111
- if (!finish) {
112
- console.info(
113
- `Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
93
+ // copy src files from takeout to github
94
+ await fs.copy(path.join(onZeroTakeout, 'src'), path.join(onZeroGithub, 'src'), {
95
+ overwrite: true,
96
+ })
97
+ await fs.copy(path.join(onZeroTakeout, 'cli.cjs'), path.join(onZeroGithub, 'cli.cjs'))
98
+ await fs.copy(
99
+ path.join(onZeroTakeout, 'tsconfig.json'),
100
+ path.join(onZeroGithub, 'tsconfig.json')
114
101
  )
115
- }
116
102
 
117
- async function checkDistDirs() {
118
- await Promise.all(
119
- packageJsons.map(async ({ cwd, json }) => {
120
- const distDir = join(cwd, 'dist')
121
- if (json.scripts?.build) {
122
- if (!(await fs.pathExists(distDir))) {
123
- console.warn('no dist dir!', distDir)
124
- process.exit(1)
125
- }
126
- }
127
- })
103
+ // update package.json preserving github-specific fields
104
+ const takeoutPkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
105
+ const githubPkg = await fs.readJSON(path.join(onZeroGithub, 'package.json'))
106
+ const convertDeps = (deps: Record<string, string>) =>
107
+ Object.fromEntries(
108
+ Object.entries(deps || {}).map(([k, v]) => [
109
+ k,
110
+ v.startsWith('workspace:') ? `^${version}` : v,
111
+ ])
112
+ )
113
+ await fs.writeJSON(
114
+ path.join(onZeroGithub, 'package.json'),
115
+ {
116
+ ...takeoutPkg,
117
+ files: githubPkg.files,
118
+ repository: githubPkg.repository,
119
+ homepage: githubPkg.homepage,
120
+ bugs: githubPkg.bugs,
121
+ dependencies: convertDeps(takeoutPkg.dependencies),
122
+ devDependencies: convertDeps(takeoutPkg.devDependencies),
123
+ },
124
+ { spaces: 2 }
128
125
  )
129
- }
130
126
 
131
- if (tamaguiGitUser) {
132
- await run(`git config --global user.name 'Tamagui'`)
133
- await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
134
- }
127
+ // only commit if there are actual changes
128
+ const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
129
+ if (!status) return
135
130
 
136
- console.info('install and build')
131
+ console.info(' on-zero: syncing out to github')
137
132
 
138
- if (!rePublish && !finish) {
139
- await run(`bun install`)
140
- }
133
+ if (dryRun) {
134
+ console.info(` [dry-run] would push: sync: from takeout v${version}`)
135
+ await $`git -C ${onZeroGithub} checkout -- .`
136
+ return
137
+ }
141
138
 
142
- if (!skipBuild && !finish) {
143
- await run(`bun clean`)
144
- await run(`bun run build`)
145
- await checkDistDirs()
139
+ await $`git -C ${onZeroGithub} add -A`
140
+ await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
141
+ await $`git -C ${onZeroGithub} push origin main`
146
142
  }
147
143
 
148
- if (!finish) {
149
- console.info('run checks')
150
-
151
- if (!skipTest) {
152
- await run(`bun lint`)
153
- await run(`bun check:all`)
154
- // only in packages
155
- // await run(`bun test`)
156
- }
144
+ // sync on-zero: copy src from github to takeout, then takeout to github after release
145
+ async function syncOnZero() {
146
+ if (!(await fs.pathExists(onZeroGithub))) return
147
+ const pkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
148
+ await syncOnZeroIn()
149
+ await syncOnZeroOut(pkg.version)
157
150
  }
158
151
 
159
- if (!dirty && !dryRun && !rePublish) {
160
- const out = await run(`git status --porcelain`)
161
- if (out.stdout) {
162
- throw new Error(`Has unsaved git changes: ${out.stdout}`)
152
+ async function getWorkspacePackages() {
153
+ // read workspaces from root package.json
154
+ const rootPackageJson = await fs.readJSON(path.join(process.cwd(), 'package.json'))
155
+ const workspaceGlobs = rootPackageJson.workspaces || []
156
+
157
+ // resolve workspace paths
158
+ const packagePaths: { name: string; location: string }[] = []
159
+ for (const glob of workspaceGlobs) {
160
+ if (glob.includes('*')) {
161
+ // handle glob patterns like "./packages/*"
162
+ const baseDir = glob.replace('/*', '')
163
+ const fullPath = path.join(process.cwd(), baseDir)
164
+ if (await fs.pathExists(fullPath)) {
165
+ const dirs = await fs.readdir(fullPath)
166
+ for (const dir of dirs) {
167
+ const pkgPath = path.join(fullPath, dir, 'package.json')
168
+ if (await fs.pathExists(pkgPath)) {
169
+ const pkg = await fs.readJSON(pkgPath)
170
+ packagePaths.push({
171
+ name: pkg.name,
172
+ location: path.join(baseDir, dir),
173
+ })
174
+ }
175
+ }
176
+ }
177
+ } else {
178
+ // handle direct paths like "./src/start"
179
+ const pkgPath = path.join(process.cwd(), glob, 'package.json')
180
+ if (await fs.pathExists(pkgPath)) {
181
+ const pkg = await fs.readJSON(pkgPath)
182
+ packagePaths.push({
183
+ name: pkg.name,
184
+ location: glob,
185
+ })
186
+ }
187
+ }
163
188
  }
189
+
190
+ return packagePaths
164
191
  }
165
192
 
166
- if (!skipVersion && !finish) {
167
- await Promise.all(
168
- allPackageJsons.map(async ({ json, path }) => {
169
- const next = { ...json }
170
-
171
- next.version = nextVersion
172
-
173
- for (const field of [
174
- 'dependencies',
175
- 'devDependencies',
176
- 'optionalDependencies',
177
- 'peerDependencies',
178
- ]) {
179
- const nextDeps = next[field]
180
- if (!nextDeps) continue
181
- for (const depName in nextDeps) {
182
- // only update non-workspace internal dependencies
183
- if (!nextDeps[depName].startsWith('workspace:')) {
184
- if (allPackageJsons.some((p) => p.name === depName)) {
185
- nextDeps[depName] = nextVersion
186
- }
187
- }
193
+ async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
194
+ const allPackageJsons = await Promise.all(
195
+ packagePaths
196
+ .filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
197
+ .map(async ({ name, location }) => {
198
+ const cwd = path.join(process.cwd(), location)
199
+ const json = await fs.readJSON(path.join(cwd, 'package.json'))
200
+ return {
201
+ name,
202
+ cwd,
203
+ json,
204
+ path: path.join(cwd, 'package.json'),
205
+ directory: location,
188
206
  }
189
- }
207
+ })
208
+ )
190
209
 
191
- await writeJSON(path, next, { spaces: 2 })
192
- })
210
+ const publishablePackages = allPackageJsons.filter(
211
+ (x) => !x.json.skipPublish && !x.json.private
193
212
  )
194
- }
195
213
 
196
- if (!finish && !rePublish) {
197
- await run(`git diff`)
214
+ return { allPackageJsons, publishablePackages }
198
215
  }
199
216
 
200
- if (!finish) {
201
- const packDir = join(tmpdir(), `takeout-release-${nextVersion}`)
202
- await fs.ensureDir(packDir)
203
-
204
- await pMap(
205
- packageJsons,
206
- async ({ name, cwd, json }) => {
207
- const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
208
- .filter(Boolean)
209
- .join(' ')
210
- const tgzPath = join(packDir, `${name.replace('/', '-')}.tgz`)
211
-
212
- // pack with bun (properly converts workspace:* to versions)
213
- // use swap-exports for packages with build scripts, otherwise just pack
214
- if (json.scripts?.build) {
215
- await run(
216
- `bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
217
- {
218
- cwd,
219
- silent: true,
220
- }
221
- )
222
- } else {
223
- await run(`bun pm pack --filename ${tgzPath}`, {
224
- cwd,
225
- silent: true,
226
- })
227
- }
217
+ // handle --sync-on-zero standalone mode
218
+ if (syncOnZeroOnly) {
219
+ try {
220
+ await syncOnZero()
221
+ } catch (err) {
222
+ console.error('sync failed:', err)
223
+ process.exit(1)
224
+ }
225
+ return
226
+ }
228
227
 
229
- // publish the tgz directly
230
- await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
231
- cwd: packDir,
232
- silent: true,
233
- })
228
+ // main release flow
229
+ const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
234
230
 
235
- console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
236
- },
237
- {
238
- concurrency: 15,
239
- }
240
- )
231
+ // must specify version (unless republishing):
232
+ if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
233
+ console.error(`Must specify one of --patch, --minor, or --major`)
234
+ process.exit(1)
235
+ }
241
236
 
242
- console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
237
+ const nextVersion = (() => {
238
+ if (rePublish || skipVersion) {
239
+ return curVersion
240
+ }
243
241
 
244
- // revert version changes after dry-run
245
- if (dryRun) {
246
- await run(`git checkout -- packages/*/package.json`, { silent: true })
247
- console.info('Reverted version changes\n')
242
+ if (canary) {
243
+ return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
248
244
  }
245
+
246
+ const curMajor = +curVersion.split('.')[0] || 0
247
+ const curMinor = +curVersion.split('.')[1] || 0
248
+ const patchAndCanary = curVersion.split('.')[2]
249
+ const [curPatch] = patchAndCanary.split('-')
250
+ const patchVersion = shouldPatch ? +curPatch + 1 : 0
251
+ const minorVersion = curMinor + (shouldMinor ? 1 : 0)
252
+ const majorVersion = curMajor + (shouldMajor ? 1 : 0)
253
+ const next = `${majorVersion}.${minorVersion}.${patchVersion}`
254
+
255
+ return next
256
+ })()
257
+
258
+ if (!skipVersion) {
259
+ console.info(` 🚀 Releasing:`)
260
+ console.info(' Current:', curVersion)
261
+ console.info(` Next: ${nextVersion}`)
249
262
  }
250
263
 
251
- if (!skipFinish && !dryRun) {
252
- // then git tag, commit, push
264
+ try {
265
+ // sync on-zero IN (before release)
266
+ if (!skipOnZeroSync && !finish && !rePublish) {
267
+ await syncOnZeroIn()
268
+ }
269
+
270
+ // ensure we are up to date
271
+ // ensure we are on main
272
+ if (!canary) {
273
+ if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
274
+ throw new Error(`Not on main`)
275
+ }
276
+ if (!dirty && !rePublish && !finish) {
277
+ await run(`git pull --rebase origin main`)
278
+ }
279
+ }
280
+
281
+ const packagePaths = await getWorkspacePackages()
282
+ const { allPackageJsons, publishablePackages: packageJsons } =
283
+ await loadPackageJsons(packagePaths)
284
+
253
285
  if (!finish) {
254
- await run(`bun install`)
286
+ console.info(
287
+ `Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
288
+ )
255
289
  }
256
- const tagPrefix = canary ? 'canary' : 'v'
257
- const gitTag = `${tagPrefix}${nextVersion}`
258
290
 
259
- await finishAndCommit()
291
+ async function checkDistDirs() {
292
+ await Promise.all(
293
+ packageJsons.map(async ({ cwd, json }) => {
294
+ const distDir = path.join(cwd, 'dist')
295
+ if (json.scripts?.build) {
296
+ if (!(await fs.pathExists(distDir))) {
297
+ console.warn('no dist dir!', distDir)
298
+ process.exit(1)
299
+ }
300
+ }
301
+ })
302
+ )
303
+ }
260
304
 
261
- async function finishAndCommit(cwd = process.cwd()) {
262
- if (!rePublish || reRun || finish) {
263
- await run(`git add -A`, { cwd })
305
+ if (tamaguiGitUser) {
306
+ await run(`git config --global user.name 'Tamagui'`)
307
+ await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
308
+ }
264
309
 
265
- await run(`git commit -m ${gitTag}`, { cwd })
310
+ console.info('install and build')
266
311
 
267
- if (!canary) {
268
- await run(`git tag ${gitTag}`, { cwd })
269
- }
312
+ if (!rePublish && !finish) {
313
+ await run(`bun install`)
314
+ }
270
315
 
271
- if (!dirty) {
272
- // pull once more before pushing so if there was a push in interim we get it
273
- await run(`git pull --rebase origin HEAD`, { cwd })
274
- }
316
+ if (!skipBuild && !finish) {
317
+ await run(`bun clean`)
318
+ await run(`bun run build`)
319
+ await checkDistDirs()
320
+ }
275
321
 
276
- await run(`git push origin head`, { cwd })
277
- if (!canary) {
278
- await run(`git push origin ${gitTag}`, { cwd })
279
- }
322
+ if (!finish) {
323
+ console.info('run checks')
280
324
 
281
- console.info(`✅ Pushed and versioned\n`)
325
+ if (!skipTest) {
326
+ await run(`bun lint`)
327
+ await run(`bun check:all`)
328
+ // only in packages
329
+ // await run(`bun test`)
282
330
  }
283
331
  }
284
332
 
285
- // sync on-zero OUT (after release)
286
- if (!skipOnZeroSync) {
287
- await syncOnZeroOut(nextVersion)
333
+ if (!dirty && !dryRun && !rePublish) {
334
+ const out = await run(`git status --porcelain`)
335
+ if (out.stdout) {
336
+ throw new Error(`Has unsaved git changes: ${out.stdout}`)
337
+ }
288
338
  }
289
- }
290
339
 
291
- console.info(`✅ Done\n`)
292
- } catch (err) {
293
- console.info('\nError:\n', err)
294
- process.exit(1)
295
- }
296
- }
297
-
298
- // sync on-zero: copy src from github to takeout, then takeout to github after release
299
- async function syncOnZero() {
300
- if (!(await fs.pathExists(onZeroGithub))) return
301
- const pkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
302
- await syncOnZeroIn()
303
- await syncOnZeroOut(pkg.version)
304
- }
305
-
306
- async function syncOnZeroIn() {
307
- if (!(await fs.pathExists(onZeroGithub))) return
308
-
309
- // check if there are commits after the last sync commit
310
- const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
311
- const commits = log.split('\n')
312
- const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
313
-
314
- // no commits before sync, or first commit is a sync = nothing to pull in
315
- if (lastSyncIdx <= 0) {
316
- console.info(' ← on-zero: no new github commits to sync in')
317
- return
318
- }
319
-
320
- const newCommits = commits
321
- .slice(0, lastSyncIdx)
322
- .filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
323
- if (!newCommits.length) {
324
- console.info(' ← on-zero: no new github commits to sync in')
325
- return
326
- }
327
-
328
- console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
329
- for (const c of newCommits) console.info(` ${c}`)
330
-
331
- if (dryRun) {
332
- console.info(' [dry-run] would copy src from github')
333
- return
334
- }
335
-
336
- await fs.copy(join(onZeroGithub, 'src'), join(onZeroTakeout, 'src'), {
337
- overwrite: true,
338
- })
340
+ // snapshot workspace:* deps before mutation (shallow copy mutates originals)
341
+ const workspaceDeps = new Map<string, Record<string, Record<string, string>>>()
342
+ for (const { json, path: pkgPath } of allPackageJsons) {
343
+ const deps: Record<string, Record<string, string>> = {}
344
+ for (const field of [
345
+ 'dependencies',
346
+ 'devDependencies',
347
+ 'optionalDependencies',
348
+ 'peerDependencies',
349
+ ]) {
350
+ if (!json[field]) continue
351
+ for (const depName in json[field]) {
352
+ if (json[field][depName].startsWith('workspace:')) {
353
+ deps[field] ??= {}
354
+ deps[field][depName] = json[field][depName]
355
+ }
356
+ }
357
+ }
358
+ if (Object.keys(deps).length) workspaceDeps.set(pkgPath, deps)
359
+ }
339
360
 
340
- const status = (await $`git status --porcelain`.text()).trim()
341
- if (status) {
342
- await $`git add packages/on-zero`
343
- await $`git commit -m "on-zero: sync from github"`
344
- }
345
- }
361
+ if (!skipVersion && !finish) {
362
+ await Promise.all(
363
+ allPackageJsons.map(async ({ json, path: pkgPath }) => {
364
+ const next = { ...json }
365
+
366
+ next.version = nextVersion
367
+
368
+ for (const field of [
369
+ 'dependencies',
370
+ 'devDependencies',
371
+ 'optionalDependencies',
372
+ 'peerDependencies',
373
+ ]) {
374
+ const nextDeps = next[field]
375
+ if (!nextDeps) continue
376
+ for (const depName in nextDeps) {
377
+ if (allPackageJsons.some((p) => p.name === depName)) {
378
+ nextDeps[depName] = nextVersion
379
+ }
380
+ }
381
+ }
346
382
 
347
- async function syncOnZeroOut(version: string) {
348
- if (!(await fs.pathExists(onZeroGithub))) return
383
+ await writeJSON(pkgPath, next, { spaces: 2 })
384
+ })
385
+ )
386
+ }
349
387
 
350
- // copy src files from takeout to github
351
- await fs.copy(join(onZeroTakeout, 'src'), join(onZeroGithub, 'src'), {
352
- overwrite: true,
353
- })
354
- await fs.copy(join(onZeroTakeout, 'cli.cjs'), join(onZeroGithub, 'cli.cjs'))
355
- await fs.copy(join(onZeroTakeout, 'tsconfig.json'), join(onZeroGithub, 'tsconfig.json'))
356
-
357
- // update package.json preserving github-specific fields
358
- const takeoutPkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
359
- const githubPkg = await fs.readJSON(join(onZeroGithub, 'package.json'))
360
- const convertDeps = (deps: Record<string, string>) =>
361
- Object.fromEntries(
362
- Object.entries(deps || {}).map(([k, v]) => [
363
- k,
364
- v.startsWith('workspace:') ? `^${version}` : v,
365
- ])
366
- )
367
- await fs.writeJSON(
368
- join(onZeroGithub, 'package.json'),
369
- {
370
- ...takeoutPkg,
371
- files: githubPkg.files,
372
- repository: githubPkg.repository,
373
- homepage: githubPkg.homepage,
374
- bugs: githubPkg.bugs,
375
- dependencies: convertDeps(takeoutPkg.dependencies),
376
- devDependencies: convertDeps(takeoutPkg.devDependencies),
377
- },
378
- { spaces: 2 }
379
- )
388
+ if (!finish && !rePublish) {
389
+ await run(`git diff`)
390
+ }
380
391
 
381
- // only commit if there are actual changes
382
- const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
383
- if (!status) return
384
-
385
- console.info(' → on-zero: syncing out to github')
386
-
387
- if (dryRun) {
388
- console.info(` [dry-run] would push: sync: from takeout v${version}`)
389
- await $`git -C ${onZeroGithub} checkout -- .`
390
- return
391
- }
392
-
393
- await $`git -C ${onZeroGithub} add -A`
394
- await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
395
- await $`git -C ${onZeroGithub} push origin main`
396
- }
397
-
398
- async function getWorkspacePackages() {
399
- // read workspaces from root package.json
400
- const rootPackageJson = await fs.readJSON(join(process.cwd(), 'package.json'))
401
- const workspaceGlobs = rootPackageJson.workspaces || []
402
-
403
- // resolve workspace paths
404
- const packagePaths: { name: string; location: string }[] = []
405
- for (const glob of workspaceGlobs) {
406
- if (glob.includes('*')) {
407
- // handle glob patterns like "./packages/*"
408
- const baseDir = glob.replace('/*', '')
409
- const fullPath = join(process.cwd(), baseDir)
410
- if (await fs.pathExists(fullPath)) {
411
- const dirs = await fs.readdir(fullPath)
412
- for (const dir of dirs) {
413
- const pkgPath = join(fullPath, dir, 'package.json')
414
- if (await fs.pathExists(pkgPath)) {
415
- const pkg = await fs.readJSON(pkgPath)
416
- packagePaths.push({
417
- name: pkg.name,
418
- location: join(baseDir, dir),
392
+ if (!finish) {
393
+ const packDir = path.join(os.tmpdir(), `takeout-release-${nextVersion}`)
394
+ await fs.ensureDir(packDir)
395
+
396
+ await pMap(
397
+ packageJsons,
398
+ async ({ name, cwd, json }) => {
399
+ const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
400
+ .filter(Boolean)
401
+ .join(' ')
402
+ const tgzPath = path.join(packDir, `${name.replace('/', '-')}.tgz`)
403
+
404
+ // pack with bun (properly converts workspace:* to versions)
405
+ // use swap-exports for packages with build scripts, otherwise just pack
406
+ if (json.scripts?.build) {
407
+ await run(
408
+ `bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
409
+ {
410
+ cwd,
411
+ silent: true,
412
+ }
413
+ )
414
+ } else {
415
+ await run(`bun pm pack --filename ${tgzPath}`, {
416
+ cwd,
417
+ silent: true,
418
+ })
419
+ }
420
+
421
+ // publish the tgz directly
422
+ await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
423
+ cwd: packDir,
424
+ silent: true,
419
425
  })
426
+
427
+ console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
428
+ },
429
+ {
430
+ concurrency: 15,
420
431
  }
432
+ )
433
+
434
+ console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
435
+
436
+ // restore workspace:* protocols after publishing
437
+ if (!dryRun) {
438
+ await Promise.all(
439
+ allPackageJsons.map(async ({ path: pkgPath }) => {
440
+ const saved = workspaceDeps.get(pkgPath)
441
+ if (!saved) return
442
+ const current = await fs.readJSON(pkgPath)
443
+ for (const field in saved) {
444
+ if (!current[field]) continue
445
+ for (const depName in saved[field]) {
446
+ current[field][depName] = saved[field][depName]
447
+ }
448
+ }
449
+ await writeJSON(pkgPath, current, { spaces: 2 })
450
+ })
451
+ )
452
+ }
453
+
454
+ // revert version changes after dry-run
455
+ if (dryRun) {
456
+ await run(`git checkout -- packages/*/package.json`, { silent: true })
457
+ console.info('Reverted version changes\n')
421
458
  }
422
459
  }
423
- } else {
424
- // handle direct paths like "./src/start"
425
- const pkgPath = join(process.cwd(), glob, 'package.json')
426
- if (await fs.pathExists(pkgPath)) {
427
- const pkg = await fs.readJSON(pkgPath)
428
- packagePaths.push({
429
- name: pkg.name,
430
- location: glob,
431
- })
432
- }
433
- }
434
- }
435
-
436
- return packagePaths
437
- }
438
-
439
- async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
440
- const allPackageJsons = await Promise.all(
441
- packagePaths
442
- .filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
443
- .map(async ({ name, location }) => {
444
- const cwd = path.join(process.cwd(), location)
445
- const json = await fs.readJSON(path.join(cwd, 'package.json'))
446
- return {
447
- name,
448
- cwd,
449
- json,
450
- path: path.join(cwd, 'package.json'),
451
- directory: location,
460
+
461
+ if (!skipFinish && !dryRun) {
462
+ // then git tag, commit, push
463
+ if (!finish) {
464
+ await run(`bun install`)
452
465
  }
453
- })
454
- )
466
+ const tagPrefix = canary ? 'canary' : 'v'
467
+ const gitTag = `${tagPrefix}${nextVersion}`
455
468
 
456
- const publishablePackages = allPackageJsons.filter(
457
- (x) => !x.json.skipPublish && !x.json.private
458
- )
469
+ await finishAndCommit()
470
+
471
+ async function finishAndCommit(cwd = process.cwd()) {
472
+ if (!rePublish || reRun || finish) {
473
+ await run(`git add -A`, { cwd })
474
+
475
+ await run(`git commit -m ${gitTag}`, { cwd })
476
+
477
+ if (!canary) {
478
+ await run(`git tag ${gitTag}`, { cwd })
479
+ }
480
+
481
+ if (!dirty) {
482
+ // pull once more before pushing so if there was a push in interim we get it
483
+ await run(`git pull --rebase origin HEAD`, { cwd })
484
+ }
485
+
486
+ await run(`git push origin head`, { cwd })
487
+ if (!canary) {
488
+ await run(`git push origin ${gitTag}`, { cwd })
489
+ }
459
490
 
460
- return { allPackageJsons, publishablePackages }
461
- }
491
+ console.info(`✅ Pushed and versioned\n`)
492
+ }
493
+ }
494
+
495
+ // sync on-zero OUT (after release)
496
+ if (!skipOnZeroSync) {
497
+ await syncOnZeroOut(nextVersion)
498
+ }
499
+ }
500
+
501
+ console.info(`✅ Done\n`)
502
+ } catch (err) {
503
+ console.info('\nError:\n', err)
504
+ process.exit(1)
505
+ }
506
+ })