@take-out/scripts 0.0.62 → 0.0.65

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,21 +1,21 @@
1
1
  {
2
2
  "name": "@take-out/scripts",
3
- "version": "0.0.62",
3
+ "version": "0.0.65",
4
4
  "type": "module",
5
5
  "main": "./src/run.ts",
6
6
  "sideEffects": false,
7
7
  "exports": {
8
8
  ".": {
9
- "types": "./types/run.d.ts",
9
+ "types": "./src/run.ts",
10
10
  "default": "./src/run.ts"
11
11
  },
12
12
  "./package.json": "./package.json",
13
13
  "./helpers/*": {
14
- "types": "./types/helpers/*.d.ts",
14
+ "types": "./src/helpers/*.ts",
15
15
  "default": "./src/helpers/*.ts"
16
16
  },
17
17
  "./*": {
18
- "types": "./types/*.d.ts",
18
+ "types": "./src/*.ts",
19
19
  "default": "./src/*.ts"
20
20
  }
21
21
  },
@@ -1,6 +1,8 @@
1
1
  #!/usr/bin/env bun
2
2
 
3
- const netstat = Bun.spawnSync(`netstat -rn`.split(' ')).stdout
3
+ import { spawnSync } from 'node:child_process'
4
+
5
+ const netstat = spawnSync('netstat', ['-rn'], { encoding: 'utf-8' }).stdout || ''
4
6
  const isTunnelActive = netstat.includes(`10.0.8/22`)
5
7
 
6
8
  if (!isTunnelActive) {
@@ -1,5 +1,7 @@
1
1
  #!/usr/bin/env bun
2
2
 
3
+ import { spawnSync } from 'node:child_process'
4
+
3
5
  import { ensureExists } from '@take-out/helpers'
4
6
  import { loadEnv } from '@take-out/scripts/helpers/env-load'
5
7
 
@@ -40,10 +42,9 @@ export async function execWithEnvironment(
40
42
  }),
41
43
  } as any as Record<string, string>
42
44
 
43
- return Bun.spawnSync(command.split(' '), {
44
- stdin: 'ignore',
45
- stdout: 'inherit',
46
- stderr: 'inherit',
45
+ const parts = command.split(' ')
46
+ return spawnSync(parts[0]!, parts.slice(1), {
47
+ stdio: ['ignore', 'inherit', 'inherit'],
47
48
  env,
48
49
  })
49
50
  }
@@ -53,5 +54,5 @@ if (import.meta.main) {
53
54
  (process.env.NODE_ENV as 'development' | 'production') || 'development',
54
55
  process.argv.slice(3).join(' ')
55
56
  )
56
- process.exit(result.exitCode)
57
+ process.exit(result.status ?? 1)
57
58
  }
@@ -36,18 +36,27 @@ function getDevDbPort(): string | undefined {
36
36
  return url ? getPortFromConnectionString(url) : undefined
37
37
  }
38
38
 
39
+ // extract port from a URL like http://localhost:8081
40
+ function getPortFromUrl(url: string | undefined): string | undefined {
41
+ if (!url) return undefined
42
+ try {
43
+ const parsed = new URL(url)
44
+ return parsed.port || undefined
45
+ } catch {
46
+ return undefined
47
+ }
48
+ }
49
+
39
50
  export async function getTestEnv() {
40
51
  const zeroVersion = getZeroVersion()
41
52
  const dockerHost = getDockerHost()
42
53
  const devEnv = await loadEnv('development')
43
54
  const serverEnvFallback = await import(join(process.cwd(), 'src/server/env-server'))
44
55
 
45
- // determine db port from (in order of priority):
46
- // 1. DOCKER_DB_PORT env var
47
- // 2. port from ZERO_UPSTREAM_DB in .env.development file (read directly to avoid
48
- // bun auto-loading .env.production when NODE_ENV=production)
49
- // 3. default 5432
50
- const dbPort = process.env.DOCKER_DB_PORT || getDevDbPort() || '5432'
56
+ // read ports from PORT_* env vars (set in .env.development) to support per-project configuration
57
+ const dbPort = process.env.PORT_POSTGRES || getDevEnvVar('PORT_POSTGRES') || '5433'
58
+ const appPort = process.env.PORT_WEB || getDevEnvVar('PORT_WEB') || '8081'
59
+ const minioPort = process.env.PORT_MINIO || getDevEnvVar('PORT_MINIO') || '9200'
51
60
 
52
61
  const dockerDbBase = `postgresql://user:password@127.0.0.1:${dbPort}`
53
62
 
@@ -64,13 +73,13 @@ export async function getTestEnv() {
64
73
  }),
65
74
  DO_NOT_TRACK: '1',
66
75
  ZERO_VERSION: zeroVersion,
67
- ZERO_MUTATE_URL: `http://${dockerHost}:8081/api/zero/push`,
68
- ZERO_QUERY_URL: `http://${dockerHost}:8081/api/zero/pull`,
76
+ ZERO_MUTATE_URL: `http://${dockerHost}:${appPort}/api/zero/push`,
77
+ ZERO_QUERY_URL: `http://${dockerHost}:${appPort}/api/zero/pull`,
69
78
  ZERO_UPSTREAM_DB: `${dockerDbBase}/postgres`,
70
79
  ZERO_CVR_DB: `${dockerDbBase}/zero_cvr`,
71
80
  ZERO_CHANGE_DB: `${dockerDbBase}/zero_cdb`,
72
- CLOUDFLARE_R2_ENDPOINT: 'http://127.0.0.1:9200',
73
- CLOUDFLARE_R2_PUBLIC_URL: 'http://127.0.0.1:9200',
81
+ CLOUDFLARE_R2_ENDPOINT: `http://127.0.0.1:${minioPort}`,
82
+ CLOUDFLARE_R2_PUBLIC_URL: `http://127.0.0.1:${minioPort}`,
74
83
  CLOUDFLARE_R2_ACCESS_KEY: 'minio',
75
84
  CLOUDFLARE_R2_SECRET_KEY: 'minio_password',
76
85
  // ensure auth secret matches dev db keys
@@ -1,9 +1,9 @@
1
+ import { spawn, type ChildProcess } from 'node:child_process'
1
2
  import { cpus } from 'node:os'
2
3
 
3
4
  import type { Timer } from '@take-out/helpers'
4
- import type { ChildProcess } from 'node:child_process'
5
5
 
6
- export type ProcessType = ChildProcess | Bun.Subprocess
6
+ export type ProcessType = ChildProcess
7
7
  export type ProcessHandler = (process: ProcessType) => void
8
8
 
9
9
  // track if we're in cleanup state (another process failed)
@@ -94,11 +94,11 @@ export async function run(
94
94
  let didTimeOut = false
95
95
 
96
96
  try {
97
- const shell = Bun.spawn(['bash', '-c', command], {
97
+ const shell = spawn('bash', ['-c', command], {
98
98
  env: { ...process.env, ...env },
99
99
  cwd,
100
- stdout: 'pipe',
101
- stderr: 'pipe', // always pipe stderr so we can capture it for error messages
100
+ stdio: ['ignore', 'pipe', 'pipe'],
101
+ detached: detached ?? false,
102
102
  })
103
103
 
104
104
  if (detached) {
@@ -128,27 +128,26 @@ export async function run(
128
128
  }
129
129
  }
130
130
 
131
- const processStream = async (
132
- stream: ReadableStream<Uint8Array> | undefined,
131
+ const processStream = (
132
+ stream: NodeJS.ReadableStream | null,
133
133
  isStderr: boolean
134
134
  ): Promise<string> => {
135
- if (effectiveSilent && !captureOutput) {
136
- return ''
137
- }
138
-
139
- if (!stream) return ''
135
+ return new Promise((resolve) => {
136
+ if (effectiveSilent && !captureOutput) {
137
+ resolve('')
138
+ return
139
+ }
140
140
 
141
- let buffer = ''
142
- let captured = ''
143
- const decoder = new TextDecoder()
144
- const reader = stream.getReader()
141
+ if (!stream) {
142
+ resolve('')
143
+ return
144
+ }
145
145
 
146
- try {
147
- while (true) {
148
- const { done, value } = await reader.read()
149
- if (done) break
146
+ let buffer = ''
147
+ let captured = ''
150
148
 
151
- const text = buffer + decoder.decode(value, { stream: true })
149
+ stream.on('data', (chunk: Buffer) => {
150
+ const text = buffer + chunk.toString()
152
151
  const lines = text.split('\n')
153
152
 
154
153
  // keep last partial line in buffer
@@ -164,32 +163,35 @@ export async function run(
164
163
  writeOutput(line + '\n', isStderr)
165
164
  }
166
165
  }
167
- }
166
+ })
168
167
 
169
- // output any remaining buffer
170
- if (buffer) {
171
- captured += buffer
172
- if (!captureOutput || prefix) {
173
- writeOutput(buffer + '\n', isStderr)
168
+ stream.on('end', () => {
169
+ // output any remaining buffer
170
+ if (buffer) {
171
+ captured += buffer
172
+ if (!captureOutput || prefix) {
173
+ writeOutput(buffer + '\n', isStderr)
174
+ }
174
175
  }
175
- }
176
- } catch (err) {
177
- console.error(`Error reading stream!`, err)
178
- } finally {
179
- reader.releaseLock()
180
- }
181
-
182
- return captured
176
+ resolve(captured)
177
+ })
178
+
179
+ stream.on('error', (err) => {
180
+ console.error(`Error reading stream!`, err)
181
+ resolve(captured)
182
+ })
183
+ })
183
184
  }
184
185
 
185
- // always process both streams
186
- const [stdout, stderr] = await Promise.all([
186
+ // process both streams and wait for exit
187
+ const [stdout, stderr, exitCode] = await Promise.all([
187
188
  processStream(shell.stdout, false),
188
189
  processStream(shell.stderr, true),
190
+ new Promise<number | null>((resolve) => {
191
+ shell.on('close', (code) => resolve(code))
192
+ }),
189
193
  ])
190
194
 
191
- const exitCode = await shell.exited
192
-
193
195
  if (timeoutId) {
194
196
  clearTimeout(timeoutId)
195
197
  }
package/src/release.ts CHANGED
@@ -1,10 +1,11 @@
1
+ import { tmpdir } from 'node:os'
1
2
  import path, { join } from 'node:path'
2
3
 
3
4
  // note! this is an helper script used by tamagui team for publishing the takeout packages
4
5
  // you can delete this from your own app
5
6
 
6
7
  import { run } from '@take-out/scripts/helpers/run'
7
- import fs, { ensureDir, writeJSON } from 'fs-extra'
8
+ import fs, { writeJSON } from 'fs-extra'
8
9
  import pMap from 'p-map'
9
10
 
10
11
  // avoid emitter error
@@ -170,104 +171,62 @@ async function main() {
170
171
  )
171
172
  }
172
173
 
173
- if (!finish && dryRun) {
174
- console.info(`Dry run, exiting before publish`)
175
- return
176
- }
177
-
178
174
  if (!finish && !rePublish) {
179
175
  await run(`git diff`)
180
176
  }
181
177
 
182
178
  if (!finish) {
183
- const tmpDir = `/tmp/one-publish`
184
- // clean up from previous runs
185
- await fs.remove(tmpDir)
186
- await ensureDir(tmpDir)
179
+ const packDir = join(tmpdir(), `takeout-release-${nextVersion}`)
180
+ await fs.ensureDir(packDir)
187
181
 
188
182
  await pMap(
189
183
  packageJsons,
190
- async ({ name, cwd }) => {
191
- const publishOptions = [canary && `--tag canary`].filter(Boolean).join(' ')
192
-
193
- // pack with workspace:* converted to versions
194
- const tmpPackageDir = join(tmpDir, name.replace('/', '_'))
195
- await fs.copy(cwd, tmpPackageDir, {
196
- filter: (src) => {
197
- // exclude node_modules to avoid symlink issues
198
- return !src.includes('node_modules')
199
- },
200
- })
184
+ async ({ name, cwd, json }) => {
185
+ const publishOptions = [canary && `--tag canary`, dryRun && `--dry-run`]
186
+ .filter(Boolean)
187
+ .join(' ')
188
+ const tgzPath = join(packDir, `${name.replace('/', '-')}.tgz`)
201
189
 
202
- // replace workspace:* with version
203
- const pkgJsonPath = join(tmpPackageDir, 'package.json')
204
- const pkgJson = await fs.readJSON(pkgJsonPath)
205
- for (const field of [
206
- 'dependencies',
207
- 'devDependencies',
208
- 'optionalDependencies',
209
- 'peerDependencies',
210
- ]) {
211
- if (!pkgJson[field]) continue
212
- for (const depName in pkgJson[field]) {
213
- if (pkgJson[field][depName].startsWith('workspace:')) {
214
- pkgJson[field][depName] = nextVersion
215
- }
216
- }
217
- }
218
- await writeJSON(pkgJsonPath, pkgJson, { spaces: 2 })
219
-
220
- const filename = `${name.replace('/', '_')}-package.tmp.tgz`
221
- const absolutePath = `${tmpDir}/${filename}`
222
-
223
- // swap exports.types from ./src/*.ts to ./types/*.d.ts for publishing
224
- if (pkgJson.exports) {
225
- const swapTypes = (obj: any) => {
226
- for (const key in obj) {
227
- const val = obj[key]
228
- if (typeof val === 'object' && val !== null) {
229
- swapTypes(val)
230
- } else if (
231
- key === 'types' &&
232
- typeof val === 'string' &&
233
- val.includes('/src/')
234
- ) {
235
- obj[key] = val.replace('/src/', '/types/').replace('.ts', '.d.ts')
236
- }
190
+ // pack with bun (properly converts workspace:* to versions)
191
+ // use swap-exports for packages with build scripts, otherwise just pack
192
+ if (json.scripts?.build) {
193
+ await run(
194
+ `bun run build --swap-exports -- bun pm pack --filename ${tgzPath}`,
195
+ {
196
+ cwd,
197
+ silent: true,
237
198
  }
238
- }
239
- swapTypes(pkgJson.exports)
240
- await writeJSON(pkgJsonPath, pkgJson, { spaces: 2 })
199
+ )
200
+ } else {
201
+ await run(`bun pm pack --filename ${tgzPath}`, {
202
+ cwd,
203
+ silent: true,
204
+ })
241
205
  }
242
206
 
243
- await run(`npm pack --pack-destination ${tmpDir}`, {
244
- cwd: tmpPackageDir,
207
+ // publish the tgz directly
208
+ await run(`npm publish ${tgzPath} ${publishOptions}`.trim(), {
209
+ cwd: packDir,
245
210
  silent: true,
246
211
  })
247
212
 
248
- // rename npm's output to our expected filename
249
- const npmFilename = `${name.replace('@', '').replace('/', '-')}-${nextVersion}.tgz`
250
- await fs.rename(join(tmpDir, npmFilename), absolutePath)
251
-
252
- const publishCommand = ['npm publish', absolutePath, publishOptions]
253
- .filter(Boolean)
254
- .join(' ')
255
-
256
- console.info(`Publishing ${name}: ${publishCommand}`)
257
-
258
- await run(publishCommand, {
259
- cwd: tmpDir,
260
- }).catch((err) => console.error(err))
213
+ console.info(`${dryRun ? '[dry-run] ' : ''}Published ${name}`)
261
214
  },
262
215
  {
263
216
  concurrency: 15,
264
217
  }
265
218
  )
266
219
 
267
- console.info(`✅ Published\n`)
220
+ console.info(`✅ ${dryRun ? '[dry-run] ' : ''}Published\n`)
221
+
222
+ // revert version changes after dry-run
223
+ if (dryRun) {
224
+ await run(`git checkout -- packages/*/package.json`, { silent: true })
225
+ console.info('Reverted version changes\n')
226
+ }
268
227
  }
269
228
 
270
- if (!skipFinish) {
229
+ if (!skipFinish && !dryRun) {
271
230
  // then git tag, commit, push
272
231
  if (!finish) {
273
232
  await run(`bun install`)
@@ -1,18 +1,21 @@
1
1
  #!/usr/bin/env bun
2
2
 
3
+ import { spawnSync } from 'node:child_process'
4
+
3
5
  export function getEnvironment(resourceName: string) {
4
6
  if (!resourceName) {
5
7
  console.error(`No resouce name given:
6
-
8
+
7
9
  bun scripts/get-environment.ts [resourceName]
8
10
  `)
9
11
  process.exit(1)
10
12
  }
11
13
 
12
14
  console.info(`Getting environment for ${resourceName}...`)
13
- const state = JSON.parse(
14
- Bun.spawnSync(`bun sst state export --stage production`.split(' ')).stdout.toString()
15
- )
15
+ const result = spawnSync('bun', ['sst', 'state', 'export', '--stage', 'production'], {
16
+ encoding: 'utf-8',
17
+ })
18
+ const state = JSON.parse(result.stdout || '{}')
16
19
 
17
20
  const resource = state.latest.resources.find(
18
21
  (x: any) => x.outputs?._dev?.title === resourceName