@take-out/scripts 0.0.93 → 0.0.95

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/release.ts CHANGED
@@ -1,483 +1,508 @@
1
- import { homedir, tmpdir } from 'node:os'
2
- import path, { join } from 'node:path'
1
+ #!/usr/bin/env bun
3
2
 
4
- // note! this is an helper script used by tamagui team for publishing the takeout packages
5
- // you can delete this from your own app
6
-
7
- import { run } from '@take-out/scripts/helpers/run'
8
- import { $ } from 'bun'
9
- import fs, { writeJSON } from 'fs-extra'
10
- import pMap from 'p-map'
3
+ import { cmd } from './cmd'
11
4
 
12
5
  // avoid emitter error
13
6
  process.setMaxListeners(50)
14
7
  process.stderr.setMaxListeners(50)
15
8
  process.stdout.setMaxListeners(50)
16
9
 
17
- // on-zero sync paths
18
- const onZeroGithub = join(homedir(), 'github', 'on-zero')
19
- const onZeroTakeout = join(process.cwd(), 'packages', 'on-zero')
20
-
21
- // for failed publishes that need to re-run
22
- const reRun = process.argv.includes('--rerun')
23
- const rePublish = reRun || process.argv.includes('--republish')
24
- const finish = process.argv.includes('--finish')
25
- const skipFinish = process.argv.includes('--skip-finish')
26
-
27
- const canary = process.argv.includes('--canary')
28
- const skipVersion = finish || rePublish || process.argv.includes('--skip-version')
29
- const shouldMajor = process.argv.includes('--major')
30
- const shouldMinor = process.argv.includes('--minor')
31
- const shouldPatch = process.argv.includes('--patch')
32
- const dirty = finish || process.argv.includes('--dirty')
33
- const skipTest =
34
- finish ||
35
- rePublish ||
36
- process.argv.includes('--skip-test') ||
37
- process.argv.includes('--skip-tests')
38
- const skipBuild = finish || rePublish || process.argv.includes('--skip-build')
39
- const dryRun = process.argv.includes('--dry-run')
40
- const tamaguiGitUser = process.argv.includes('--tamagui-git-user')
41
- const syncOnZeroOnly = process.argv.includes('--sync-on-zero')
42
- const skipOnZeroSync = process.argv.includes('--skip-on-zero-sync')
43
-
44
- // handle --sync-on-zero standalone mode
45
- if (syncOnZeroOnly) {
46
- syncOnZero().catch((err) => {
47
- console.error('sync failed:', err)
48
- process.exit(1)
49
- })
50
- } else {
51
- mainRelease()
52
- }
53
-
54
- async function mainRelease() {
55
- const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
56
-
57
- // must specify version (unless republishing):
58
- if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
59
- console.error(`Must specify one of --patch, --minor, or --major`)
60
- process.exit(1)
61
- }
62
-
63
- const nextVersion = (() => {
64
- if (rePublish || skipVersion) {
65
- return curVersion
66
- }
10
+ await cmd`publish takeout packages to npm`
11
+ .args(
12
+ `--patch boolean --minor boolean --major boolean --canary boolean
13
+ --rerun boolean --republish boolean --finish boolean --skip-finish boolean
14
+ --dry-run boolean --skip-test boolean --skip-build boolean --skip-version boolean
15
+ --dirty boolean --tamagui-git-user boolean --sync-on-zero boolean --skip-on-zero-sync boolean`
16
+ )
17
+ .run(async ({ args, $, run, path, os }) => {
18
+ const fs = (await import('fs-extra')).default
19
+ const { writeJSON } = await import('fs-extra')
20
+ const pMap = (await import('p-map')).default
21
+
22
+ // on-zero sync paths
23
+ const onZeroGithub = path.join(os.homedir(), 'github', 'on-zero')
24
+ const onZeroTakeout = path.join(process.cwd(), 'packages', 'on-zero')
25
+
26
+ // for failed publishes that need to re-run
27
+ const reRun = args.rerun
28
+ const rePublish = reRun || args.republish
29
+ const finish = args.finish
30
+ const skipFinish = args.skipFinish
31
+
32
+ const canary = args.canary
33
+ const skipVersion = finish || rePublish || args.skipVersion
34
+ const shouldMajor = args.major
35
+ const shouldMinor = args.minor
36
+ const shouldPatch = args.patch
37
+ const dirty = finish || args.dirty
38
+ const skipTest =
39
+ finish || rePublish || args.skipTest || process.argv.includes('--skip-tests')
40
+ const skipBuild = finish || rePublish || args.skipBuild
41
+ const dryRun = args.dryRun
42
+ const tamaguiGitUser = args.tamaguiGitUser
43
+ const syncOnZeroOnly = args.syncOnZero
44
+ const skipOnZeroSync = args.skipOnZeroSync
45
+
46
+ async function syncOnZeroIn() {
47
+ if (!(await fs.pathExists(onZeroGithub))) return
48
+
49
+ // check if there are commits after the last sync commit
50
+ const log = (
51
+ await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()
52
+ ).trim()
53
+ const commits = log.split('\n')
54
+ const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
55
+
56
+ // no commits before sync, or first commit is a sync = nothing to pull in
57
+ if (lastSyncIdx <= 0) {
58
+ console.info(' ← on-zero: no new github commits to sync in')
59
+ return
60
+ }
67
61
 
68
- if (canary) {
69
- return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
70
- }
62
+ const newCommits = commits
63
+ .slice(0, lastSyncIdx)
64
+ .filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
65
+ if (!newCommits.length) {
66
+ console.info(' ← on-zero: no new github commits to sync in')
67
+ return
68
+ }
71
69
 
72
- const curMajor = +curVersion.split('.')[0] || 0
73
- const curMinor = +curVersion.split('.')[1] || 0
74
- const patchAndCanary = curVersion.split('.')[2]
75
- const [curPatch] = patchAndCanary.split('-')
76
- const patchVersion = shouldPatch ? +curPatch + 1 : 0
77
- const minorVersion = curMinor + (shouldMinor ? 1 : 0)
78
- const majorVersion = curMajor + (shouldMajor ? 1 : 0)
79
- const next = `${majorVersion}.${minorVersion}.${patchVersion}`
80
-
81
- return next
82
- })()
83
-
84
- if (!skipVersion) {
85
- console.info(` 🚀 Releasing:`)
86
- console.info(' Current:', curVersion)
87
- console.info(` Next: ${nextVersion}`)
88
- }
89
-
90
- try {
91
- // sync on-zero IN (before release)
92
- if (!skipOnZeroSync && !finish && !rePublish) {
93
- await syncOnZeroIn()
94
- }
70
+ console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
71
+ for (const c of newCommits) console.info(` ${c}`)
95
72
 
96
- // ensure we are up to date
97
- // ensure we are on main
98
- if (!canary) {
99
- if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
100
- throw new Error(`Not on main`)
73
+ if (dryRun) {
74
+ console.info(' [dry-run] would copy src from github')
75
+ return
101
76
  }
102
- if (!dirty && !rePublish && !finish) {
103
- await run(`git pull --rebase origin main`)
77
+
78
+ await fs.copy(path.join(onZeroGithub, 'src'), path.join(onZeroTakeout, 'src'), {
79
+ overwrite: true,
80
+ })
81
+
82
+ const status = (await $`git status --porcelain`.text()).trim()
83
+ if (status) {
84
+ await $`git add packages/on-zero`
85
+ await $`git commit -m "on-zero: sync from github"`
104
86
  }
105
87
  }
106
88
 
107
- const packagePaths = await getWorkspacePackages()
108
- const { allPackageJsons, publishablePackages: packageJsons } =
109
- await loadPackageJsons(packagePaths)
89
+ async function syncOnZeroOut(version: string) {
90
+ if (!(await fs.pathExists(onZeroGithub))) return
110
91
 
111
- if (!finish) {
112
- console.info(
113
- `Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
92
+ // copy src files from takeout to github
93
+ await fs.copy(path.join(onZeroTakeout, 'src'), path.join(onZeroGithub, 'src'), {
94
+ overwrite: true,
95
+ })
96
+ await fs.copy(
97
+ path.join(onZeroTakeout, 'cli.cjs'),
98
+ path.join(onZeroGithub, 'cli.cjs')
99
+ )
100
+ await fs.copy(
101
+ path.join(onZeroTakeout, 'tsconfig.json'),
102
+ path.join(onZeroGithub, 'tsconfig.json')
103
+ )
104
+
105
+ // update package.json preserving github-specific fields
106
+ const takeoutPkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
107
+ const githubPkg = await fs.readJSON(path.join(onZeroGithub, 'package.json'))
108
+ const convertDeps = (deps: Record<string, string>) =>
109
+ Object.fromEntries(
110
+ Object.entries(deps || {}).map(([k, v]) => [
111
+ k,
112
+ v.startsWith('workspace:') ? `^${version}` : v,
113
+ ])
114
+ )
115
+ await fs.writeJSON(
116
+ path.join(onZeroGithub, 'package.json'),
117
+ {
118
+ ...takeoutPkg,
119
+ files: githubPkg.files,
120
+ repository: githubPkg.repository,
121
+ homepage: githubPkg.homepage,
122
+ bugs: githubPkg.bugs,
123
+ dependencies: convertDeps(takeoutPkg.dependencies),
124
+ devDependencies: convertDeps(takeoutPkg.devDependencies),
125
+ },
126
+ { spaces: 2 }
114
127
  )
128
+
129
+ // only commit if there are actual changes
130
+ const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
131
+ if (!status) return
132
+
133
+ console.info(' → on-zero: syncing out to github')
134
+
135
+ if (dryRun) {
136
+ console.info(` [dry-run] would push: sync: from takeout v${version}`)
137
+ await $`git -C ${onZeroGithub} checkout -- .`
138
+ return
139
+ }
140
+
141
+ await $`git -C ${onZeroGithub} add -A`
142
+ await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
143
+ await $`git -C ${onZeroGithub} push origin main`
144
+ }
145
+
146
+ // sync on-zero: copy src from github to takeout, then takeout to github after release
147
+ async function syncOnZero() {
148
+ if (!(await fs.pathExists(onZeroGithub))) return
149
+ const pkg = await fs.readJSON(path.join(onZeroTakeout, 'package.json'))
150
+ await syncOnZeroIn()
151
+ await syncOnZeroOut(pkg.version)
115
152
  }
116
153
 
117
- async function checkDistDirs() {
118
- await Promise.all(
119
- packageJsons.map(async ({ cwd, json }) => {
120
- const distDir = join(cwd, 'dist')
121
- if (json.scripts?.build) {
122
- if (!(await fs.pathExists(distDir))) {
123
- console.warn('no dist dir!', distDir)
124
- process.exit(1)
154
+ async function getWorkspacePackages() {
155
+ // read workspaces from root package.json
156
+ const rootPackageJson = await fs.readJSON(path.join(process.cwd(), 'package.json'))
157
+ const workspaceGlobs = rootPackageJson.workspaces || []
158
+
159
+ // resolve workspace paths
160
+ const packagePaths: { name: string; location: string }[] = []
161
+ for (const glob of workspaceGlobs) {
162
+ if (glob.includes('*')) {
163
+ // handle glob patterns like "./packages/*"
164
+ const baseDir = glob.replace('/*', '')
165
+ const fullPath = path.join(process.cwd(), baseDir)
166
+ if (await fs.pathExists(fullPath)) {
167
+ const dirs = await fs.readdir(fullPath)
168
+ for (const dir of dirs) {
169
+ const pkgPath = path.join(fullPath, dir, 'package.json')
170
+ if (await fs.pathExists(pkgPath)) {
171
+ const pkg = await fs.readJSON(pkgPath)
172
+ packagePaths.push({
173
+ name: pkg.name,
174
+ location: path.join(baseDir, dir),
175
+ })
176
+ }
125
177
  }
126
178
  }
127
- })
179
+ } else {
180
+ // handle direct paths like "./src/start"
181
+ const pkgPath = path.join(process.cwd(), glob, 'package.json')
182
+ if (await fs.pathExists(pkgPath)) {
183
+ const pkg = await fs.readJSON(pkgPath)
184
+ packagePaths.push({
185
+ name: pkg.name,
186
+ location: glob,
187
+ })
188
+ }
189
+ }
190
+ }
191
+
192
+ return packagePaths
193
+ }
194
+
195
+ async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
196
+ const allPackageJsons = await Promise.all(
197
+ packagePaths
198
+ .filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
199
+ .map(async ({ name, location }) => {
200
+ const cwd = path.join(process.cwd(), location)
201
+ const json = await fs.readJSON(path.join(cwd, 'package.json'))
202
+ return {
203
+ name,
204
+ cwd,
205
+ json,
206
+ path: path.join(cwd, 'package.json'),
207
+ directory: location,
208
+ }
209
+ })
210
+ )
211
+
212
+ const publishablePackages = allPackageJsons.filter(
213
+ (x) => !x.json.skipPublish && !x.json.private
128
214
  )
215
+
216
+ return { allPackageJsons, publishablePackages }
129
217
  }
130
218
 
131
- if (tamaguiGitUser) {
132
- await run(`git config --global user.name 'Tamagui'`)
133
- await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
219
+ // handle --sync-on-zero standalone mode
220
+ if (syncOnZeroOnly) {
221
+ try {
222
+ await syncOnZero()
223
+ } catch (err) {
224
+ console.error('sync failed:', err)
225
+ process.exit(1)
226
+ }
227
+ return
134
228
  }
135
229
 
136
- console.info('install and build')
230
+ // main release flow
231
+ const curVersion = fs.readJSONSync('./packages/helpers/package.json').version
137
232
 
138
- if (!rePublish && !finish) {
139
- await run(`bun install`)
233
+ // must specify version (unless republishing):
234
+ if (!rePublish && !skipVersion && !shouldPatch && !shouldMinor && !shouldMajor) {
235
+ console.error(`Must specify one of --patch, --minor, or --major`)
236
+ process.exit(1)
140
237
  }
141
238
 
142
- if (!skipBuild && !finish) {
143
- await run(`bun clean`)
144
- await run(`bun run build`)
145
- await checkDistDirs()
239
+ const nextVersion = (() => {
240
+ if (rePublish || skipVersion) {
241
+ return curVersion
242
+ }
243
+
244
+ if (canary) {
245
+ return `${curVersion.replace(/(-\d+)+$/, '')}-${Date.now()}`
246
+ }
247
+
248
+ const curMajor = +curVersion.split('.')[0] || 0
249
+ const curMinor = +curVersion.split('.')[1] || 0
250
+ const patchAndCanary = curVersion.split('.')[2]
251
+ const [curPatch] = patchAndCanary.split('-')
252
+ const patchVersion = shouldPatch ? +curPatch + 1 : 0
253
+ const minorVersion = curMinor + (shouldMinor ? 1 : 0)
254
+ const majorVersion = curMajor + (shouldMajor ? 1 : 0)
255
+ const next = `${majorVersion}.${minorVersion}.${patchVersion}`
256
+
257
+ return next
258
+ })()
259
+
260
+ if (!skipVersion) {
261
+ console.info(` 🚀 Releasing:`)
262
+ console.info(' Current:', curVersion)
263
+ console.info(` Next: ${nextVersion}`)
146
264
  }
147
265
 
148
- if (!finish) {
149
- console.info('run checks')
266
+ try {
267
+ // sync on-zero IN (before release)
268
+ if (!skipOnZeroSync && !finish && !rePublish) {
269
+ await syncOnZeroIn()
270
+ }
150
271
 
151
- if (!skipTest) {
152
- await run(`bun lint`)
153
- await run(`bun check:all`)
154
- // only in packages
155
- // await run(`bun test`)
272
+ // ensure we are up to date
273
+ // ensure we are on main
274
+ if (!canary) {
275
+ if ((await run(`git rev-parse --abbrev-ref HEAD`)).stdout.trim() !== 'main') {
276
+ throw new Error(`Not on main`)
277
+ }
278
+ if (!dirty && !rePublish && !finish) {
279
+ await run(`git pull --rebase origin main`)
280
+ }
156
281
  }
157
- }
158
282
 
159
- if (!dirty && !dryRun && !rePublish) {
160
- const out = await run(`git status --porcelain`)
161
- if (out.stdout) {
162
- throw new Error(`Has unsaved git changes: ${out.stdout}`)
283
+ const packagePaths = await getWorkspacePackages()
284
+ const { allPackageJsons, publishablePackages: packageJsons } =
285
+ await loadPackageJsons(packagePaths)
286
+
287
+ if (!finish) {
288
+ console.info(
289
+ `Publishing in order:\n\n${packageJsons.map((x) => x.name).join('\n')}`
290
+ )
163
291
  }
164
- }
165
292
 
166
- if (!skipVersion && !finish) {
167
- await Promise.all(
168
- allPackageJsons.map(async ({ json, path }) => {
169
- const next = { ...json }
170
-
171
- next.version = nextVersion
172
-
173
- for (const field of [
174
- 'dependencies',
175
- 'devDependencies',
176
- 'optionalDependencies',
177
- 'peerDependencies',
178
- ]) {
179
- const nextDeps = next[field]
180
- if (!nextDeps) continue
181
- for (const depName in nextDeps) {
182
- if (allPackageJsons.some((p) => p.name === depName)) {
183
- nextDeps[depName] = nextVersion
293
+ async function checkDistDirs() {
294
+ await Promise.all(
295
+ packageJsons.map(async ({ cwd, json }) => {
296
+ const distDir = path.join(cwd, 'dist')
297
+ if (json.scripts?.build) {
298
+ if (!(await fs.pathExists(distDir))) {
299
+ console.warn('no dist dir!', distDir)
300
+ process.exit(1)
184
301
  }
185
302
  }
186
- }
303
+ })
304
+ )
305
+ }
187
306
 
188
- await writeJSON(path, next, { spaces: 2 })
189
- })
190
- )
191
- }
307
+ if (tamaguiGitUser) {
308
+ await run(`git config --global user.name 'Tamagui'`)
309
+ await run(`git config --global user.email 'tamagui@users.noreply.github.com`)
310
+ }
192
311
 
193
- if (!finish && !rePublish) {
194
- await run(`git diff`)
195
- }
312
+ console.info('install and build')
196
313
 
197
- if (!finish) {
198
- const packDir = join(tmpdir(), `takeout-release-${nextVersion}`)
199
- await fs.ensureDir(packDir)
200
-
201
- await pMap(
202
- packageJsons,
203
- async ({ name, cwd, json }) => {
204
- const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
205
- .filter(Boolean)
206
- .join(' ')
207
- const tgzPath = join(packDir, `${name.replace('/', '-')}.tgz`)
208
-
209
- // pack with bun (properly converts workspace:* to versions)
210
- // use swap-exports for packages with build scripts, otherwise just pack
211
- if (json.scripts?.build) {
212
- await run(
213
- `bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
214
- {
215
- cwd,
216
- silent: true,
217
- }
218
- )
219
- } else {
220
- await run(`bun pm pack --filename ${tgzPath}`, {
221
- cwd,
222
- silent: true,
223
- })
224
- }
314
+ if (!rePublish && !finish) {
315
+ await run(`bun install`)
316
+ }
225
317
 
226
- // publish the tgz directly
227
- await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
228
- cwd: packDir,
229
- silent: true,
230
- })
318
+ if (!skipBuild && !finish) {
319
+ await run(`bun clean`)
320
+ await run(`bun run build`)
321
+ await checkDistDirs()
322
+ }
231
323
 
232
- console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
233
- },
234
- {
235
- concurrency: 15,
324
+ if (!finish) {
325
+ console.info('run checks')
326
+
327
+ if (!skipTest) {
328
+ await run(`bun lint`)
329
+ await run(`bun check:all`)
330
+ // only in packages
331
+ // await run(`bun test`)
236
332
  }
237
- )
333
+ }
238
334
 
239
- console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
335
+ if (!dirty && !dryRun && !rePublish) {
336
+ const out = await run(`git status --porcelain`)
337
+ if (out.stdout) {
338
+ throw new Error(`Has unsaved git changes: ${out.stdout}`)
339
+ }
340
+ }
341
+
342
+ // snapshot workspace:* deps before mutation (shallow copy mutates originals)
343
+ const workspaceDeps = new Map<string, Record<string, Record<string, string>>>()
344
+ for (const { json, path: pkgPath } of allPackageJsons) {
345
+ const deps: Record<string, Record<string, string>> = {}
346
+ for (const field of [
347
+ 'dependencies',
348
+ 'devDependencies',
349
+ 'optionalDependencies',
350
+ 'peerDependencies',
351
+ ]) {
352
+ if (!json[field]) continue
353
+ for (const depName in json[field]) {
354
+ if (json[field][depName].startsWith('workspace:')) {
355
+ deps[field] ??= {}
356
+ deps[field][depName] = json[field][depName]
357
+ }
358
+ }
359
+ }
360
+ if (Object.keys(deps).length) workspaceDeps.set(pkgPath, deps)
361
+ }
240
362
 
241
- // restore workspace:* protocols after publishing
242
- if (!dryRun) {
363
+ if (!skipVersion && !finish) {
243
364
  await Promise.all(
244
- allPackageJsons.map(async ({ json, path }) => {
245
- const current = await fs.readJSON(path)
365
+ allPackageJsons.map(async ({ json, path: pkgPath }) => {
366
+ const next = { ...json }
367
+
368
+ next.version = nextVersion
369
+
246
370
  for (const field of [
247
371
  'dependencies',
248
372
  'devDependencies',
249
373
  'optionalDependencies',
250
374
  'peerDependencies',
251
375
  ]) {
252
- const origDeps = json[field]
253
- const currentDeps = current[field]
254
- if (!origDeps || !currentDeps) continue
255
- for (const depName in origDeps) {
256
- if (origDeps[depName].startsWith('workspace:') && currentDeps[depName]) {
257
- currentDeps[depName] = origDeps[depName]
376
+ const nextDeps = next[field]
377
+ if (!nextDeps) continue
378
+ for (const depName in nextDeps) {
379
+ if (allPackageJsons.some((p) => p.name === depName)) {
380
+ nextDeps[depName] = nextVersion
258
381
  }
259
382
  }
260
383
  }
261
- await writeJSON(path, current, { spaces: 2 })
384
+
385
+ await writeJSON(pkgPath, next, { spaces: 2 })
262
386
  })
263
387
  )
264
388
  }
265
389
 
266
- // revert version changes after dry-run
267
- if (dryRun) {
268
- await run(`git checkout -- packages/*/package.json`, { silent: true })
269
- console.info('Reverted version changes\n')
390
+ if (!finish && !rePublish) {
391
+ await run(`git diff`)
270
392
  }
271
- }
272
393
 
273
- if (!skipFinish && !dryRun) {
274
- // then git tag, commit, push
275
394
  if (!finish) {
276
- await run(`bun install`)
277
- }
278
- const tagPrefix = canary ? 'canary' : 'v'
279
- const gitTag = `${tagPrefix}${nextVersion}`
280
-
281
- await finishAndCommit()
282
-
283
- async function finishAndCommit(cwd = process.cwd()) {
284
- if (!rePublish || reRun || finish) {
285
- await run(`git add -A`, { cwd })
286
-
287
- await run(`git commit -m ${gitTag}`, { cwd })
395
+ const packDir = path.join(os.tmpdir(), `takeout-release-${nextVersion}`)
396
+ await fs.ensureDir(packDir)
397
+
398
+ await pMap(
399
+ packageJsons,
400
+ async ({ name, cwd, json }) => {
401
+ const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
402
+ .filter(Boolean)
403
+ .join(' ')
404
+ const tgzPath = path.join(packDir, `${name.replace('/', '-')}.tgz`)
405
+
406
+ // pack with bun (properly converts workspace:* to versions)
407
+ // use swap-exports for packages with build scripts, otherwise just pack
408
+ if (json.scripts?.build) {
409
+ await run(
410
+ `bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
411
+ {
412
+ cwd,
413
+ silent: true,
414
+ }
415
+ )
416
+ } else {
417
+ await run(`bun pm pack --filename ${tgzPath}`, {
418
+ cwd,
419
+ silent: true,
420
+ })
421
+ }
288
422
 
289
- if (!canary) {
290
- await run(`git tag ${gitTag}`, { cwd })
291
- }
423
+ // publish the tgz directly
424
+ await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
425
+ cwd: packDir,
426
+ silent: true,
427
+ })
292
428
 
293
- if (!dirty) {
294
- // pull once more before pushing so if there was a push in interim we get it
295
- await run(`git pull --rebase origin HEAD`, { cwd })
429
+ console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
430
+ },
431
+ {
432
+ concurrency: 15,
296
433
  }
434
+ )
297
435
 
298
- await run(`git push origin head`, { cwd })
299
- if (!canary) {
300
- await run(`git push origin ${gitTag}`, { cwd })
301
- }
436
+ console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
437
+
438
+ // restore workspace:* protocols after publishing
439
+ if (!dryRun) {
440
+ await Promise.all(
441
+ allPackageJsons.map(async ({ path: pkgPath }) => {
442
+ const saved = workspaceDeps.get(pkgPath)
443
+ if (!saved) return
444
+ const current = await fs.readJSON(pkgPath)
445
+ for (const field in saved) {
446
+ if (!current[field]) continue
447
+ for (const depName in saved[field]) {
448
+ current[field][depName] = saved[field][depName]
449
+ }
450
+ }
451
+ await writeJSON(pkgPath, current, { spaces: 2 })
452
+ })
453
+ )
454
+ }
302
455
 
303
- console.info(`✅ Pushed and versioned\n`)
456
+ // revert version changes after dry-run
457
+ if (dryRun) {
458
+ await run(`git checkout -- packages/*/package.json`, { silent: true })
459
+ console.info('Reverted version changes\n')
304
460
  }
305
461
  }
306
462
 
307
- // sync on-zero OUT (after release)
308
- if (!skipOnZeroSync) {
309
- await syncOnZeroOut(nextVersion)
310
- }
311
- }
463
+ if (!skipFinish && !dryRun) {
464
+ // then git tag, commit, push
465
+ if (!finish) {
466
+ await run(`bun install`)
467
+ }
468
+ const tagPrefix = canary ? 'canary' : 'v'
469
+ const gitTag = `${tagPrefix}${nextVersion}`
312
470
 
313
- console.info(`✅ Done\n`)
314
- } catch (err) {
315
- console.info('\nError:\n', err)
316
- process.exit(1)
317
- }
318
- }
319
-
320
- // sync on-zero: copy src from github to takeout, then takeout to github after release
321
- async function syncOnZero() {
322
- if (!(await fs.pathExists(onZeroGithub))) return
323
- const pkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
324
- await syncOnZeroIn()
325
- await syncOnZeroOut(pkg.version)
326
- }
327
-
328
- async function syncOnZeroIn() {
329
- if (!(await fs.pathExists(onZeroGithub))) return
330
-
331
- // check if there are commits after the last sync commit
332
- const log = (await $`git -C ${onZeroGithub} log --oneline --format=%s`.text()).trim()
333
- const commits = log.split('\n')
334
- const lastSyncIdx = commits.findIndex((c) => c.startsWith('sync: from takeout'))
335
-
336
- // no commits before sync, or first commit is a sync = nothing to pull in
337
- if (lastSyncIdx <= 0) {
338
- console.info(' ← on-zero: no new github commits to sync in')
339
- return
340
- }
341
-
342
- const newCommits = commits
343
- .slice(0, lastSyncIdx)
344
- .filter((c) => !c.match(/^v\d+\.\d+\.\d+/))
345
- if (!newCommits.length) {
346
- console.info(' ← on-zero: no new github commits to sync in')
347
- return
348
- }
349
-
350
- console.info(` ← on-zero: syncing ${newCommits.length} commits from github`)
351
- for (const c of newCommits) console.info(` ${c}`)
352
-
353
- if (dryRun) {
354
- console.info(' [dry-run] would copy src from github')
355
- return
356
- }
357
-
358
- await fs.copy(join(onZeroGithub, 'src'), join(onZeroTakeout, 'src'), {
359
- overwrite: true,
360
- })
471
+ await finishAndCommit()
361
472
 
362
- const status = (await $`git status --porcelain`.text()).trim()
363
- if (status) {
364
- await $`git add packages/on-zero`
365
- await $`git commit -m "on-zero: sync from github"`
366
- }
367
- }
473
+ async function finishAndCommit(cwd = process.cwd()) {
474
+ if (!rePublish || reRun || finish) {
475
+ await run(`git add -A`, { cwd })
368
476
 
369
- async function syncOnZeroOut(version: string) {
370
- if (!(await fs.pathExists(onZeroGithub))) return
477
+ await run(`git commit -m ${gitTag}`, { cwd })
371
478
 
372
- // copy src files from takeout to github
373
- await fs.copy(join(onZeroTakeout, 'src'), join(onZeroGithub, 'src'), {
374
- overwrite: true,
375
- })
376
- await fs.copy(join(onZeroTakeout, 'cli.cjs'), join(onZeroGithub, 'cli.cjs'))
377
- await fs.copy(join(onZeroTakeout, 'tsconfig.json'), join(onZeroGithub, 'tsconfig.json'))
378
-
379
- // update package.json preserving github-specific fields
380
- const takeoutPkg = await fs.readJSON(join(onZeroTakeout, 'package.json'))
381
- const githubPkg = await fs.readJSON(join(onZeroGithub, 'package.json'))
382
- const convertDeps = (deps: Record<string, string>) =>
383
- Object.fromEntries(
384
- Object.entries(deps || {}).map(([k, v]) => [
385
- k,
386
- v.startsWith('workspace:') ? `^${version}` : v,
387
- ])
388
- )
389
- await fs.writeJSON(
390
- join(onZeroGithub, 'package.json'),
391
- {
392
- ...takeoutPkg,
393
- files: githubPkg.files,
394
- repository: githubPkg.repository,
395
- homepage: githubPkg.homepage,
396
- bugs: githubPkg.bugs,
397
- dependencies: convertDeps(takeoutPkg.dependencies),
398
- devDependencies: convertDeps(takeoutPkg.devDependencies),
399
- },
400
- { spaces: 2 }
401
- )
479
+ if (!canary) {
480
+ await run(`git tag ${gitTag}`, { cwd })
481
+ }
402
482
 
403
- // only commit if there are actual changes
404
- const status = (await $`git -C ${onZeroGithub} status --porcelain`.text()).trim()
405
- if (!status) return
406
-
407
- console.info(' → on-zero: syncing out to github')
408
-
409
- if (dryRun) {
410
- console.info(` [dry-run] would push: sync: from takeout v${version}`)
411
- await $`git -C ${onZeroGithub} checkout -- .`
412
- return
413
- }
414
-
415
- await $`git -C ${onZeroGithub} add -A`
416
- await $`git -C ${onZeroGithub} commit -m ${'sync: from takeout v' + version}`
417
- await $`git -C ${onZeroGithub} push origin main`
418
- }
419
-
420
- async function getWorkspacePackages() {
421
- // read workspaces from root package.json
422
- const rootPackageJson = await fs.readJSON(join(process.cwd(), 'package.json'))
423
- const workspaceGlobs = rootPackageJson.workspaces || []
424
-
425
- // resolve workspace paths
426
- const packagePaths: { name: string; location: string }[] = []
427
- for (const glob of workspaceGlobs) {
428
- if (glob.includes('*')) {
429
- // handle glob patterns like "./packages/*"
430
- const baseDir = glob.replace('/*', '')
431
- const fullPath = join(process.cwd(), baseDir)
432
- if (await fs.pathExists(fullPath)) {
433
- const dirs = await fs.readdir(fullPath)
434
- for (const dir of dirs) {
435
- const pkgPath = join(fullPath, dir, 'package.json')
436
- if (await fs.pathExists(pkgPath)) {
437
- const pkg = await fs.readJSON(pkgPath)
438
- packagePaths.push({
439
- name: pkg.name,
440
- location: join(baseDir, dir),
441
- })
483
+ if (!dirty) {
484
+ // pull once more before pushing so if there was a push in interim we get it
485
+ await run(`git pull --rebase origin HEAD`, { cwd })
486
+ }
487
+
488
+ await run(`git push origin head`, { cwd })
489
+ if (!canary) {
490
+ await run(`git push origin ${gitTag}`, { cwd })
491
+ }
492
+
493
+ console.info(`✅ Pushed and versioned\n`)
442
494
  }
443
495
  }
444
- }
445
- } else {
446
- // handle direct paths like "./src/start"
447
- const pkgPath = join(process.cwd(), glob, 'package.json')
448
- if (await fs.pathExists(pkgPath)) {
449
- const pkg = await fs.readJSON(pkgPath)
450
- packagePaths.push({
451
- name: pkg.name,
452
- location: glob,
453
- })
454
- }
455
- }
456
- }
457
-
458
- return packagePaths
459
- }
460
-
461
- async function loadPackageJsons(packagePaths: { name: string; location: string }[]) {
462
- const allPackageJsons = await Promise.all(
463
- packagePaths
464
- .filter((i) => i.location !== '.' && !i.name.startsWith('@takeout'))
465
- .map(async ({ name, location }) => {
466
- const cwd = path.join(process.cwd(), location)
467
- const json = await fs.readJSON(path.join(cwd, 'package.json'))
468
- return {
469
- name,
470
- cwd,
471
- json,
472
- path: path.join(cwd, 'package.json'),
473
- directory: location,
474
- }
475
- })
476
- )
477
496
 
478
- const publishablePackages = allPackageJsons.filter(
479
- (x) => !x.json.skipPublish && !x.json.private
480
- )
497
+ // sync on-zero OUT (after release)
498
+ if (!skipOnZeroSync) {
499
+ await syncOnZeroOut(nextVersion)
500
+ }
501
+ }
481
502
 
482
- return { allPackageJsons, publishablePackages }
483
- }
503
+ console.info(`✅ Done\n`)
504
+ } catch (err) {
505
+ console.info('\nError:\n', err)
506
+ process.exit(1)
507
+ }
508
+ })