@ranger1/dx 0.1.80 → 0.1.81
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/codex/agents/fixer.toml +1 -4
- package/codex/agents/spark.toml +1 -5
- package/lib/backend-artifact-deploy/artifact-builder.js +240 -0
- package/lib/backend-artifact-deploy/config.js +138 -0
- package/lib/backend-artifact-deploy/path-utils.js +18 -0
- package/lib/backend-artifact-deploy/remote-phases.js +13 -0
- package/lib/backend-artifact-deploy/remote-result.js +42 -0
- package/lib/backend-artifact-deploy/remote-script.js +337 -0
- package/lib/backend-artifact-deploy/remote-transport.js +118 -0
- package/lib/backend-artifact-deploy/rollback.js +5 -0
- package/lib/backend-artifact-deploy/runtime-package.js +41 -0
- package/lib/backend-artifact-deploy.js +32 -0
- package/lib/cli/commands/deploy.js +14 -1
- package/lib/cli/flags.js +8 -0
- package/lib/cli/help.js +15 -5
- package/lib/vercel-deploy.js +33 -3
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -181,6 +181,7 @@ dx 的命令由 `dx/config/commands.json` 驱动,并且内置了一些 interna
|
|
|
181
181
|
|
|
182
182
|
- `internal: sdk-build`:SDK 生成/构建
|
|
183
183
|
- `internal: backend-package`:后端打包
|
|
184
|
+
- `internal: backend-artifact-deploy`:后端制品构建、上传与远端部署
|
|
184
185
|
- `internal: start-dev`:开发环境一键启动
|
|
185
186
|
- `internal: pm2-stack`:PM2 交互式服务栈(支持端口清理/缓存清理配置)
|
|
186
187
|
|
|
@@ -195,6 +196,7 @@ dx db generate
|
|
|
195
196
|
dx db migrate --dev --name init
|
|
196
197
|
dx db deploy --prod -Y
|
|
197
198
|
dx deploy front --staging
|
|
199
|
+
dx deploy backend --prod
|
|
198
200
|
dx lint
|
|
199
201
|
dx test e2e backend apps/backend/e2e/auth
|
|
200
202
|
```
|
|
@@ -284,6 +286,83 @@ dx test e2e backend apps/backend/e2e/auth
|
|
|
284
286
|
- 需要的前置构建(例如 `shared`、`api-contracts`、OpenAPI 导出、后端构建等)应由项目自己的 Nx 依赖图(`dependsOn`/项目依赖)或 Vercel 的 `buildCommand` 负责。
|
|
285
287
|
- 这样 dx deploy 不会强依赖 `apps/sdk` 等目录结构,更容易适配不同 monorepo。
|
|
286
288
|
|
|
289
|
+
### backend 制品发布
|
|
290
|
+
|
|
291
|
+
当 `dx/config/commands.json` 的 `deploy.backend.internal` 配置为 `backend-artifact-deploy` 时,`dx deploy backend` 走内置的后端制品发布流程,而不是 Vercel 部署。
|
|
292
|
+
|
|
293
|
+
常用命令:
|
|
294
|
+
|
|
295
|
+
```bash
|
|
296
|
+
dx deploy backend --prod
|
|
297
|
+
dx deploy backend --build-only
|
|
298
|
+
dx deploy backend --prod --skip-migration
|
|
299
|
+
```
|
|
300
|
+
|
|
301
|
+
最小示例配置:
|
|
302
|
+
|
|
303
|
+
```json
|
|
304
|
+
{
|
|
305
|
+
"deploy": {
|
|
306
|
+
"backend": {
|
|
307
|
+
"internal": "backend-artifact-deploy",
|
|
308
|
+
"backendDeploy": {
|
|
309
|
+
"build": {
|
|
310
|
+
"app": "backend",
|
|
311
|
+
"distDir": "dist/backend",
|
|
312
|
+
"versionFile": "apps/backend/package.json",
|
|
313
|
+
"commands": {
|
|
314
|
+
"development": "npx nx build backend --configuration=development",
|
|
315
|
+
"staging": "npx nx build backend --configuration=production",
|
|
316
|
+
"production": "npx nx build backend --configuration=production"
|
|
317
|
+
}
|
|
318
|
+
},
|
|
319
|
+
"runtime": {
|
|
320
|
+
"appPackage": "apps/backend/package.json",
|
|
321
|
+
"rootPackage": "package.json",
|
|
322
|
+
"lockfile": "pnpm-lock.yaml",
|
|
323
|
+
"prismaSchemaDir": "apps/backend/prisma/schema",
|
|
324
|
+
"prismaConfig": "apps/backend/prisma.config.ts",
|
|
325
|
+
"ecosystemConfig": "ecosystem.config.cjs"
|
|
326
|
+
},
|
|
327
|
+
"artifact": {
|
|
328
|
+
"outputDir": "release/backend",
|
|
329
|
+
"bundleName": "backend-bundle"
|
|
330
|
+
},
|
|
331
|
+
"remote": {
|
|
332
|
+
"host": "deploy.example.com",
|
|
333
|
+
"port": 22,
|
|
334
|
+
"user": "deploy",
|
|
335
|
+
"baseDir": "/srv/example-app"
|
|
336
|
+
},
|
|
337
|
+
"startup": {
|
|
338
|
+
"mode": "pm2",
|
|
339
|
+
"serviceName": "backend"
|
|
340
|
+
},
|
|
341
|
+
"deploy": {
|
|
342
|
+
"keepReleases": 5,
|
|
343
|
+
"installCommand": "pnpm install --prod --no-frozen-lockfile --ignore-workspace",
|
|
344
|
+
"prismaGenerate": true,
|
|
345
|
+
"prismaMigrateDeploy": true
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
固定远端目录协议:
|
|
354
|
+
|
|
355
|
+
- `<baseDir>/releases/<version-name>`
|
|
356
|
+
- `<baseDir>/current`
|
|
357
|
+
- `<baseDir>/shared/.env.<environment>`
|
|
358
|
+
- `<baseDir>/shared/.env.<environment>.local`
|
|
359
|
+
- `<baseDir>/uploads/<bundle-file>`
|
|
360
|
+
|
|
361
|
+
运行时制品约束:
|
|
362
|
+
|
|
363
|
+
- 生成的 release `package.json` 默认只保留运行时依赖;如果应用把 `prisma` 放在 `devDependencies`,dx 会自动把它提升进 release 依赖,保证远端 `prisma generate` / `prisma migrate deploy` 可执行。
|
|
364
|
+
- 打包前会递归扫描整个 staged payload;任意层级出现 `.env*` 文件都会直接失败,避免把环境文件误打进制品。
|
|
365
|
+
|
|
287
366
|
## 依赖关系约定
|
|
288
367
|
|
|
289
368
|
dx 不负责管理「工程之间的构建依赖关系」。如果多个工程之间存在依赖(例如 `front/admin` 依赖 `shared` 或 `api-contracts`),必须由 Nx 的依赖图来表达并自动拉起:
|
package/codex/agents/fixer.toml
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
model = "gpt-5.3-codex"
|
|
2
2
|
model_reasoning_effort = "medium"
|
|
3
3
|
approval_policy = "never"
|
|
4
|
-
sandbox_mode = "
|
|
5
|
-
|
|
6
|
-
[sandbox_workspace_write]
|
|
7
|
-
network_access = true
|
|
4
|
+
sandbox_mode = "danger-full-access"
|
|
8
5
|
|
|
9
6
|
developer_instructions = '''
|
|
10
7
|
你是 fix 代理。
|
package/codex/agents/spark.toml
CHANGED
|
@@ -1,10 +1,7 @@
|
|
|
1
1
|
model = "gpt-5.3-codex"
|
|
2
2
|
model_reasoning_effort = "medium"
|
|
3
3
|
approval_policy = "never"
|
|
4
|
-
sandbox_mode = "
|
|
5
|
-
|
|
6
|
-
[sandbox_workspace_write]
|
|
7
|
-
network_access = true
|
|
4
|
+
sandbox_mode = "danger-full-access"
|
|
8
5
|
|
|
9
6
|
developer_instructions = '''
|
|
10
7
|
你是一个通用agent 根据输入的提示词,完整遵循他的要求执行任务
|
|
@@ -19,4 +16,3 @@ developer_instructions = '''
|
|
|
19
16
|
prompt是一个文件路径并不存在时,必须返回结构化错误:{"error":"PROMPT_FILE_NOT_FOUND","detail":"<file_path>"}
|
|
20
17
|
|
|
21
18
|
'''
|
|
22
|
-
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import { execFile } from 'node:child_process'
|
|
2
|
+
import { existsSync } from 'node:fs'
|
|
3
|
+
import { cp, mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises'
|
|
4
|
+
import { basename, dirname, join, relative } from 'node:path'
|
|
5
|
+
import { promisify } from 'node:util'
|
|
6
|
+
import { execManager } from '../exec.js'
|
|
7
|
+
import { basenameOrThrow, resolveWithinBase } from './path-utils.js'
|
|
8
|
+
import { createRuntimePackage } from './runtime-package.js'
|
|
9
|
+
|
|
10
|
+
const execFileAsync = promisify(execFile)
|
|
11
|
+
|
|
12
|
+
function assertSafeNamePart(value, label) {
|
|
13
|
+
const text = String(value || '').trim()
|
|
14
|
+
if (!text || text.includes('/') || text.includes('\\') || text.includes('..')) {
|
|
15
|
+
throw new Error(`${label} 越界,已拒绝: ${text}`)
|
|
16
|
+
}
|
|
17
|
+
return text
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function defaultNowTag() {
|
|
21
|
+
const now = new Date()
|
|
22
|
+
const pad = value => String(value).padStart(2, '0')
|
|
23
|
+
return [
|
|
24
|
+
now.getFullYear(),
|
|
25
|
+
pad(now.getMonth() + 1),
|
|
26
|
+
pad(now.getDate()),
|
|
27
|
+
].join('') + `-${pad(now.getHours())}${pad(now.getMinutes())}${pad(now.getSeconds())}`
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function defaultReadVersion(versionFile) {
|
|
31
|
+
const pkg = JSON.parse(await readFile(versionFile, 'utf8'))
|
|
32
|
+
return String(pkg.version || '').trim()
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async function defaultRunBuild(build) {
|
|
36
|
+
if (!build?.command) {
|
|
37
|
+
throw new Error('缺少构建命令: build.command')
|
|
38
|
+
}
|
|
39
|
+
await execManager.executeCommand(build.command, {
|
|
40
|
+
app: build.app || undefined,
|
|
41
|
+
})
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async function defaultPrepareOutputDir(outputDir) {
|
|
45
|
+
await mkdir(outputDir, { recursive: true })
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async function copyIntoDir(source, destinationDir) {
|
|
49
|
+
if (!existsSync(source)) {
|
|
50
|
+
throw new Error(`缺少必需文件或目录: ${source}`)
|
|
51
|
+
}
|
|
52
|
+
await cp(source, destinationDir, { recursive: true })
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function defaultStageFiles({ config, stageDir, stagePlan }) {
|
|
56
|
+
await rm(stageDir, { recursive: true, force: true })
|
|
57
|
+
await mkdir(stageDir, { recursive: true })
|
|
58
|
+
|
|
59
|
+
for (const entry of await readdir(stagePlan.dist.source)) {
|
|
60
|
+
await copyIntoDir(join(stagePlan.dist.source, entry), join(stageDir, entry))
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const appPackage = JSON.parse(await readFile(stagePlan.appPackage.source, 'utf8'))
|
|
64
|
+
const rootPackage = JSON.parse(await readFile(stagePlan.rootPackage.source, 'utf8'))
|
|
65
|
+
const runtimePackage = createRuntimePackage({ appPackage, rootPackage })
|
|
66
|
+
await writeFile(join(stageDir, stagePlan.runtimePackage.destination), `${JSON.stringify(runtimePackage, null, 2)}\n`)
|
|
67
|
+
|
|
68
|
+
await copyIntoDir(stagePlan.lockfile.source, join(stageDir, stagePlan.lockfile.destination))
|
|
69
|
+
|
|
70
|
+
if (stagePlan.prismaSchema) {
|
|
71
|
+
await mkdir(join(stageDir, dirname(stagePlan.prismaSchema.destination)), { recursive: true })
|
|
72
|
+
await copyIntoDir(stagePlan.prismaSchema.source, join(stageDir, stagePlan.prismaSchema.destination))
|
|
73
|
+
}
|
|
74
|
+
if (stagePlan.prismaConfig) {
|
|
75
|
+
await mkdir(join(stageDir, dirname(stagePlan.prismaConfig.destination)), { recursive: true })
|
|
76
|
+
await copyIntoDir(stagePlan.prismaConfig.source, join(stageDir, stagePlan.prismaConfig.destination))
|
|
77
|
+
}
|
|
78
|
+
if (stagePlan.ecosystemConfig) {
|
|
79
|
+
await copyIntoDir(stagePlan.ecosystemConfig.source, join(stageDir, stagePlan.ecosystemConfig.destination))
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function defaultAssertNoEnvFiles(stageDir) {
|
|
84
|
+
const envFiles = []
|
|
85
|
+
const queue = ['.']
|
|
86
|
+
|
|
87
|
+
while (queue.length > 0) {
|
|
88
|
+
const currentRelativeDir = queue.shift()
|
|
89
|
+
const currentDir = currentRelativeDir === '.' ? stageDir : join(stageDir, currentRelativeDir)
|
|
90
|
+
const entries = await readdir(currentDir, { withFileTypes: true })
|
|
91
|
+
|
|
92
|
+
for (const entry of entries) {
|
|
93
|
+
const entryRelativePath =
|
|
94
|
+
currentRelativeDir === '.' ? entry.name : join(currentRelativeDir, entry.name)
|
|
95
|
+
|
|
96
|
+
if (entry.name.startsWith('.env')) {
|
|
97
|
+
envFiles.push(entryRelativePath.replace(/\\/g, '/'))
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (entry.isDirectory()) {
|
|
101
|
+
queue.push(entryRelativePath)
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if (envFiles.length > 0) {
|
|
107
|
+
throw new Error(`制品目录包含 .env* 文件: ${envFiles.join(', ')}`)
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
async function defaultCreateInnerArchive({ stageDir, innerArchivePath }) {
|
|
112
|
+
await mkdir(dirname(innerArchivePath), { recursive: true })
|
|
113
|
+
await execFileAsync('tar', ['-czf', innerArchivePath, '.'], {
|
|
114
|
+
cwd: stageDir,
|
|
115
|
+
})
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async function defaultWriteChecksum({ archivePath, checksumPath }) {
|
|
119
|
+
try {
|
|
120
|
+
const { stdout } = await execFileAsync('sha256sum', [archivePath])
|
|
121
|
+
await writeFile(checksumPath, stdout)
|
|
122
|
+
} catch {
|
|
123
|
+
const { stdout } = await execFileAsync('shasum', ['-a', '256', archivePath])
|
|
124
|
+
await writeFile(checksumPath, stdout)
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async function defaultCreateBundle({ outputDir, bundlePath, innerArchivePath, checksumPath }) {
|
|
129
|
+
await execFileAsync(
|
|
130
|
+
'tar',
|
|
131
|
+
['-czf', bundlePath, basename(innerArchivePath), basename(checksumPath)],
|
|
132
|
+
{ cwd: outputDir },
|
|
133
|
+
)
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export function createArtifactNames({ version, timeTag, bundleName }) {
|
|
137
|
+
const safeVersion = assertSafeNamePart(version, 'version')
|
|
138
|
+
const safeTimeTag = assertSafeNamePart(timeTag, 'timeTag')
|
|
139
|
+
const safeBundleName = assertSafeNamePart(bundleName, 'bundleName')
|
|
140
|
+
const versionName = `backend-v${safeVersion}-${safeTimeTag}`
|
|
141
|
+
const innerArchiveName = `${versionName}.tgz`
|
|
142
|
+
return {
|
|
143
|
+
versionName,
|
|
144
|
+
innerArchiveName,
|
|
145
|
+
checksumName: `${innerArchiveName}.sha256`,
|
|
146
|
+
bundleName: `${safeBundleName}-v${safeVersion}-${safeTimeTag}.tgz`,
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export function createStagePlan(config) {
|
|
151
|
+
const projectRoot = config.projectRoot || '/'
|
|
152
|
+
const relativeToProject = targetPath =>
|
|
153
|
+
relative(projectRoot, targetPath).replace(/\\/g, '/').replace(/^repo\//, '')
|
|
154
|
+
const plan = {
|
|
155
|
+
dist: {
|
|
156
|
+
source: config.build.distDir,
|
|
157
|
+
destination: '.',
|
|
158
|
+
},
|
|
159
|
+
runtimePackage: {
|
|
160
|
+
destination: 'package.json',
|
|
161
|
+
},
|
|
162
|
+
lockfile: {
|
|
163
|
+
source: config.runtime.lockfile,
|
|
164
|
+
destination: 'pnpm-lock.yaml',
|
|
165
|
+
},
|
|
166
|
+
appPackage: {
|
|
167
|
+
source: config.runtime.appPackage,
|
|
168
|
+
},
|
|
169
|
+
rootPackage: {
|
|
170
|
+
source: config.runtime.rootPackage,
|
|
171
|
+
},
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (config.runtime.prismaSchemaDir) {
|
|
175
|
+
plan.prismaSchema = {
|
|
176
|
+
source: config.runtime.prismaSchemaDir,
|
|
177
|
+
destination: relativeToProject(config.runtime.prismaSchemaDir),
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
if (config.runtime.prismaConfig) {
|
|
181
|
+
plan.prismaConfig = {
|
|
182
|
+
source: config.runtime.prismaConfig,
|
|
183
|
+
destination: relativeToProject(config.runtime.prismaConfig),
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (config.runtime.ecosystemConfig) {
|
|
187
|
+
plan.ecosystemConfig = {
|
|
188
|
+
source: config.runtime.ecosystemConfig,
|
|
189
|
+
destination: basenameOrThrow(config.runtime.ecosystemConfig, 'runtime.ecosystemConfig'),
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
return plan
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
export async function buildBackendArtifact(config, deps = {}) {
|
|
197
|
+
const nowTag = deps.nowTag || defaultNowTag
|
|
198
|
+
const readVersion = deps.readVersion || defaultReadVersion
|
|
199
|
+
const runBuild = deps.runBuild || defaultRunBuild
|
|
200
|
+
const prepareOutputDir = deps.prepareOutputDir || defaultPrepareOutputDir
|
|
201
|
+
const stageFiles = deps.stageFiles || defaultStageFiles
|
|
202
|
+
const assertNoEnvFiles = deps.assertNoEnvFiles || defaultAssertNoEnvFiles
|
|
203
|
+
const createInnerArchive = deps.createInnerArchive || defaultCreateInnerArchive
|
|
204
|
+
const writeChecksum = deps.writeChecksum || defaultWriteChecksum
|
|
205
|
+
const createBundle = deps.createBundle || defaultCreateBundle
|
|
206
|
+
const version = await readVersion(config.build.versionFile)
|
|
207
|
+
const timeTag = nowTag()
|
|
208
|
+
const names = createArtifactNames({
|
|
209
|
+
version,
|
|
210
|
+
timeTag,
|
|
211
|
+
bundleName: config.artifact.bundleName,
|
|
212
|
+
})
|
|
213
|
+
|
|
214
|
+
const outputDir = resolveWithinBase(config.artifact.outputDir, '.', 'artifact.outputDir')
|
|
215
|
+
const stageDir = resolveWithinBase(outputDir, names.versionName, 'stageDir')
|
|
216
|
+
const innerArchivePath = resolveWithinBase(outputDir, names.innerArchiveName, 'innerArchivePath')
|
|
217
|
+
const checksumPath = resolveWithinBase(outputDir, names.checksumName, 'checksumPath')
|
|
218
|
+
const bundlePath = resolveWithinBase(outputDir, names.bundleName, 'bundlePath')
|
|
219
|
+
|
|
220
|
+
await runBuild(config.build)
|
|
221
|
+
await prepareOutputDir(outputDir)
|
|
222
|
+
await stageFiles({
|
|
223
|
+
config,
|
|
224
|
+
stageDir,
|
|
225
|
+
stagePlan: createStagePlan(config),
|
|
226
|
+
})
|
|
227
|
+
await assertNoEnvFiles(stageDir)
|
|
228
|
+
await createInnerArchive({ stageDir, innerArchivePath })
|
|
229
|
+
await writeChecksum({ archivePath: innerArchivePath, checksumPath })
|
|
230
|
+
await createBundle({ outputDir, bundlePath, innerArchivePath, checksumPath })
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
version,
|
|
234
|
+
timeTag,
|
|
235
|
+
versionName: names.versionName,
|
|
236
|
+
bundlePath,
|
|
237
|
+
innerArchivePath,
|
|
238
|
+
checksumPath,
|
|
239
|
+
}
|
|
240
|
+
}
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import { isAbsolute } from 'node:path'
|
|
2
|
+
import { resolveWithinBase } from './path-utils.js'
|
|
3
|
+
|
|
4
|
+
function requireString(value, fieldPath) {
|
|
5
|
+
if (typeof value !== 'string' || value.trim() === '') {
|
|
6
|
+
throw new Error(`缺少必填配置: ${fieldPath}`)
|
|
7
|
+
}
|
|
8
|
+
return value.trim()
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function requirePositiveInteger(value, fieldPath) {
|
|
12
|
+
const parsed = Number(value)
|
|
13
|
+
if (!Number.isInteger(parsed) || parsed <= 0) {
|
|
14
|
+
throw new Error(`缺少必填配置: ${fieldPath}`)
|
|
15
|
+
}
|
|
16
|
+
return parsed
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function resolveBuildCommand(buildConfig, environment) {
|
|
20
|
+
if (buildConfig?.commands && typeof buildConfig.commands === 'object') {
|
|
21
|
+
const selected = buildConfig.commands[environment]
|
|
22
|
+
if (!selected || typeof selected !== 'string' || selected.trim() === '') {
|
|
23
|
+
throw new Error(`缺少必填配置: build.commands.${environment}`)
|
|
24
|
+
}
|
|
25
|
+
return selected.trim()
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
return requireString(buildConfig?.command, 'build.command')
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function resolveProjectPath(projectRoot, relativePath, fieldPath) {
|
|
32
|
+
return resolveWithinBase(projectRoot, requireString(relativePath, fieldPath), fieldPath)
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
function requireRemoteBaseDir(value, fieldPath) {
|
|
36
|
+
const baseDir = requireString(value, fieldPath)
|
|
37
|
+
if (!isAbsolute(baseDir)) {
|
|
38
|
+
throw new Error(`${fieldPath} 必须是绝对路径: ${baseDir}`)
|
|
39
|
+
}
|
|
40
|
+
if (!/^\/[A-Za-z0-9._/-]*$/.test(baseDir)) {
|
|
41
|
+
throw new Error(`${fieldPath} 包含非法字符: ${baseDir}`)
|
|
42
|
+
}
|
|
43
|
+
return baseDir.replace(/\/+$/, '') || '/'
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export function resolveBackendDeployConfig({ cli, targetConfig, environment, flags = {} }) {
|
|
47
|
+
const deployConfig = targetConfig?.backendDeploy
|
|
48
|
+
if (!deployConfig || typeof deployConfig !== 'object') {
|
|
49
|
+
throw new Error('缺少必填配置: backendDeploy')
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const buildConfig = deployConfig.build || {}
|
|
53
|
+
const runtimeConfig = deployConfig.runtime || {}
|
|
54
|
+
const artifactConfig = deployConfig.artifact || {}
|
|
55
|
+
const remoteConfig = deployConfig.remote || null
|
|
56
|
+
const startupConfig = deployConfig.startup || {}
|
|
57
|
+
const runConfig = deployConfig.deploy || {}
|
|
58
|
+
const buildOnly = Boolean(flags.buildOnly)
|
|
59
|
+
const startupMode = String(startupConfig.mode || 'pm2').trim()
|
|
60
|
+
const prismaGenerate = runConfig.prismaGenerate !== false
|
|
61
|
+
const prismaMigrateDeploy = runConfig.prismaMigrateDeploy !== false
|
|
62
|
+
|
|
63
|
+
const normalized = {
|
|
64
|
+
projectRoot: cli.projectRoot,
|
|
65
|
+
environment,
|
|
66
|
+
build: {
|
|
67
|
+
app: typeof buildConfig.app === 'string' && buildConfig.app.trim() ? buildConfig.app.trim() : null,
|
|
68
|
+
command: resolveBuildCommand(buildConfig, environment),
|
|
69
|
+
distDir: resolveProjectPath(cli.projectRoot, buildConfig.distDir, 'build.distDir'),
|
|
70
|
+
versionFile: resolveProjectPath(cli.projectRoot, buildConfig.versionFile, 'build.versionFile'),
|
|
71
|
+
},
|
|
72
|
+
runtime: {
|
|
73
|
+
appPackage: resolveProjectPath(cli.projectRoot, runtimeConfig.appPackage, 'runtime.appPackage'),
|
|
74
|
+
rootPackage: resolveProjectPath(cli.projectRoot, runtimeConfig.rootPackage, 'runtime.rootPackage'),
|
|
75
|
+
lockfile: resolveProjectPath(cli.projectRoot, runtimeConfig.lockfile, 'runtime.lockfile'),
|
|
76
|
+
prismaSchemaDir: runtimeConfig.prismaSchemaDir
|
|
77
|
+
? resolveProjectPath(cli.projectRoot, runtimeConfig.prismaSchemaDir, 'runtime.prismaSchemaDir')
|
|
78
|
+
: null,
|
|
79
|
+
prismaConfig: runtimeConfig.prismaConfig
|
|
80
|
+
? resolveProjectPath(cli.projectRoot, runtimeConfig.prismaConfig, 'runtime.prismaConfig')
|
|
81
|
+
: null,
|
|
82
|
+
ecosystemConfig: runtimeConfig.ecosystemConfig
|
|
83
|
+
? resolveProjectPath(cli.projectRoot, runtimeConfig.ecosystemConfig, 'runtime.ecosystemConfig')
|
|
84
|
+
: null,
|
|
85
|
+
},
|
|
86
|
+
artifact: {
|
|
87
|
+
outputDir: resolveProjectPath(cli.projectRoot, artifactConfig.outputDir, 'artifact.outputDir'),
|
|
88
|
+
bundleName: requireString(artifactConfig.bundleName, 'artifact.bundleName'),
|
|
89
|
+
},
|
|
90
|
+
remote: buildOnly
|
|
91
|
+
? null
|
|
92
|
+
: {
|
|
93
|
+
host: requireString(remoteConfig?.host, 'remote.host'),
|
|
94
|
+
port: remoteConfig?.port == null ? 22 : requirePositiveInteger(remoteConfig.port, 'remote.port'),
|
|
95
|
+
user: requireString(remoteConfig?.user, 'remote.user'),
|
|
96
|
+
baseDir: requireRemoteBaseDir(remoteConfig?.baseDir, 'remote.baseDir'),
|
|
97
|
+
},
|
|
98
|
+
startup: {
|
|
99
|
+
mode: startupMode,
|
|
100
|
+
serviceName:
|
|
101
|
+
typeof startupConfig.serviceName === 'string' && startupConfig.serviceName.trim()
|
|
102
|
+
? startupConfig.serviceName.trim()
|
|
103
|
+
: null,
|
|
104
|
+
entry:
|
|
105
|
+
typeof startupConfig.entry === 'string' && startupConfig.entry.trim()
|
|
106
|
+
? startupConfig.entry.trim()
|
|
107
|
+
: null,
|
|
108
|
+
},
|
|
109
|
+
deploy: {
|
|
110
|
+
keepReleases:
|
|
111
|
+
runConfig.keepReleases == null ? 5 : requirePositiveInteger(runConfig.keepReleases, 'deploy.keepReleases'),
|
|
112
|
+
installCommand: requireString(
|
|
113
|
+
runConfig.installCommand || 'pnpm install --prod --no-frozen-lockfile --ignore-workspace',
|
|
114
|
+
'deploy.installCommand',
|
|
115
|
+
),
|
|
116
|
+
prismaGenerate,
|
|
117
|
+
prismaMigrateDeploy,
|
|
118
|
+
skipMigration: Boolean(flags.skipMigration),
|
|
119
|
+
},
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
if (!['pm2', 'direct'].includes(normalized.startup.mode)) {
|
|
123
|
+
throw new Error('缺少必填配置: startup.mode')
|
|
124
|
+
}
|
|
125
|
+
if (normalized.startup.mode === 'pm2') {
|
|
126
|
+
requireString(normalized.startup.serviceName, 'startup.serviceName')
|
|
127
|
+
requireString(normalized.runtime.ecosystemConfig, 'runtime.ecosystemConfig')
|
|
128
|
+
} else {
|
|
129
|
+
requireString(normalized.startup.entry, 'startup.entry')
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (normalized.deploy.prismaGenerate || normalized.deploy.prismaMigrateDeploy) {
|
|
133
|
+
requireString(normalized.runtime.prismaSchemaDir, 'runtime.prismaSchemaDir')
|
|
134
|
+
requireString(normalized.runtime.prismaConfig, 'runtime.prismaConfig')
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
return normalized
|
|
138
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { basename, resolve, sep } from 'node:path'
|
|
2
|
+
|
|
3
|
+
export function resolveWithinBase(baseDir, targetPath, label = 'path') {
|
|
4
|
+
const absoluteBase = resolve(baseDir)
|
|
5
|
+
const absoluteTarget = resolve(absoluteBase, targetPath)
|
|
6
|
+
if (absoluteTarget !== absoluteBase && !absoluteTarget.startsWith(`${absoluteBase}${sep}`)) {
|
|
7
|
+
throw new Error(`${label} 越界,已拒绝: ${absoluteTarget}`)
|
|
8
|
+
}
|
|
9
|
+
return absoluteTarget
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export function basenameOrThrow(filePath, label = 'path') {
|
|
13
|
+
const name = basename(String(filePath || '').trim())
|
|
14
|
+
if (!name || name === '.' || name === '..') {
|
|
15
|
+
throw new Error(`无效的 ${label}: ${filePath}`)
|
|
16
|
+
}
|
|
17
|
+
return name
|
|
18
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export function createRemotePhaseModel(payload) {
|
|
2
|
+
return [
|
|
3
|
+
{ phase: 'lock', payload },
|
|
4
|
+
{ phase: 'extract', payload },
|
|
5
|
+
{ phase: 'env', payload },
|
|
6
|
+
{ phase: 'install', payload },
|
|
7
|
+
{ phase: 'prisma-generate', payload },
|
|
8
|
+
{ phase: 'prisma-migrate', payload },
|
|
9
|
+
{ phase: 'switch-current', payload },
|
|
10
|
+
{ phase: 'startup', payload },
|
|
11
|
+
{ phase: 'cleanup', payload },
|
|
12
|
+
]
|
|
13
|
+
}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
function parseResultLine(line) {
|
|
2
|
+
if (!line.startsWith('DX_REMOTE_RESULT=')) return null
|
|
3
|
+
return JSON.parse(line.slice('DX_REMOTE_RESULT='.length))
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
function getLastPhase(output = '') {
|
|
7
|
+
const lines = String(output).split('\n')
|
|
8
|
+
let phase = 'cleanup'
|
|
9
|
+
for (const line of lines) {
|
|
10
|
+
if (line.startsWith('DX_REMOTE_PHASE=')) {
|
|
11
|
+
phase = line.slice('DX_REMOTE_PHASE='.length).trim() || phase
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
return phase
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
export function parseRemoteResult({ stdout = '', stderr = '', exitCode = 0 }) {
|
|
18
|
+
const allLines = `${stdout}\n${stderr}`.trim().split('\n').filter(Boolean)
|
|
19
|
+
for (let index = allLines.length - 1; index >= 0; index -= 1) {
|
|
20
|
+
const parsed = parseResultLine(allLines[index])
|
|
21
|
+
if (parsed) return parsed
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (exitCode === 0) {
|
|
25
|
+
return {
|
|
26
|
+
ok: true,
|
|
27
|
+
phase: getLastPhase(stdout),
|
|
28
|
+
message: 'ok',
|
|
29
|
+
rollbackAttempted: false,
|
|
30
|
+
rollbackSucceeded: null,
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const message = [stderr, stdout].filter(Boolean).join('\n').trim() || 'remote execution failed'
|
|
35
|
+
return {
|
|
36
|
+
ok: false,
|
|
37
|
+
phase: getLastPhase(`${stdout}\n${stderr}`),
|
|
38
|
+
message,
|
|
39
|
+
rollbackAttempted: false,
|
|
40
|
+
rollbackSucceeded: null,
|
|
41
|
+
}
|
|
42
|
+
}
|
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
function escapeShell(value) {
|
|
2
|
+
return `'${String(value).replace(/'/g, `'\\''`)}'`
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
export function buildRemoteDeployScript(phaseModel = []) {
|
|
6
|
+
const payload = phaseModel[0]?.payload || {}
|
|
7
|
+
const remote = payload.remote || {}
|
|
8
|
+
const runtime = payload.runtime || {}
|
|
9
|
+
const startup = payload.startup || {}
|
|
10
|
+
const deploy = payload.deploy || {}
|
|
11
|
+
const environment = String(payload.environment || 'production')
|
|
12
|
+
const baseDir = String(remote.baseDir || '.')
|
|
13
|
+
const releaseDir = `${baseDir}/releases/${payload.versionName || 'unknown'}`
|
|
14
|
+
const currentLink = `${baseDir}/current`
|
|
15
|
+
const uploadsDir = `${baseDir}/uploads`
|
|
16
|
+
const uploadedBundlePath = String(payload.uploadedBundlePath || '')
|
|
17
|
+
const envFileName = `.env.${environment}`
|
|
18
|
+
const envLocalFileName = `.env.${environment}.local`
|
|
19
|
+
const prismaSchema = runtime.prismaSchemaDir ? `./${runtime.prismaSchemaDir}` : ''
|
|
20
|
+
const prismaConfig = runtime.prismaConfig ? `./${runtime.prismaConfig}` : ''
|
|
21
|
+
const ecosystemConfig = runtime.ecosystemConfig ? `./${runtime.ecosystemConfig}` : './ecosystem.config.cjs'
|
|
22
|
+
const installCommand = String(deploy.installCommand || 'pnpm install --prod --no-frozen-lockfile --ignore-workspace')
|
|
23
|
+
const startupEntry = String(startup.entry || '')
|
|
24
|
+
const startupMode = String(startup.mode || 'pm2')
|
|
25
|
+
const serviceName = String(startup.serviceName || 'backend')
|
|
26
|
+
const keepReleases = Number(deploy.keepReleases || 5)
|
|
27
|
+
const shouldGenerate = deploy.prismaGenerate !== false
|
|
28
|
+
const shouldMigrate = deploy.prismaMigrateDeploy !== false && deploy.skipMigration !== true
|
|
29
|
+
|
|
30
|
+
return `#!/usr/bin/env bash
|
|
31
|
+
set -euo pipefail
|
|
32
|
+
|
|
33
|
+
APP_ROOT=${escapeShell(baseDir)}
|
|
34
|
+
UPLOADS_DIR=${escapeShell(uploadsDir)}
|
|
35
|
+
ARCHIVE=${escapeShell(uploadedBundlePath)}
|
|
36
|
+
RELEASE_DIR=${escapeShell(releaseDir)}
|
|
37
|
+
CURRENT_LINK=${escapeShell(currentLink)}
|
|
38
|
+
ENV_NAME=${escapeShell(environment)}
|
|
39
|
+
ENV_FILE_NAME=${escapeShell(envFileName)}
|
|
40
|
+
ENV_LOCAL_FILE_NAME=${escapeShell(envLocalFileName)}
|
|
41
|
+
PRISMA_SCHEMA=${escapeShell(prismaSchema)}
|
|
42
|
+
PRISMA_CONFIG=${escapeShell(prismaConfig)}
|
|
43
|
+
ECOSYSTEM_CONFIG=${escapeShell(ecosystemConfig)}
|
|
44
|
+
INSTALL_COMMAND=${escapeShell(installCommand)}
|
|
45
|
+
START_MODE=${escapeShell(startupMode)}
|
|
46
|
+
SERVICE_NAME=${escapeShell(serviceName)}
|
|
47
|
+
START_ENTRY=${escapeShell(startupEntry)}
|
|
48
|
+
KEEP_RELEASES=${keepReleases}
|
|
49
|
+
SHOULD_GENERATE=${shouldGenerate ? '1' : '0'}
|
|
50
|
+
SHOULD_MIGRATE=${shouldMigrate ? '1' : '0'}
|
|
51
|
+
|
|
52
|
+
LOCK_FILE="$APP_ROOT/.deploy.lock"
|
|
53
|
+
LOCK_DIR="$APP_ROOT/.deploy.lock.d"
|
|
54
|
+
SHARED_DIR="$APP_ROOT/shared"
|
|
55
|
+
RELEASES_DIR="$APP_ROOT/releases"
|
|
56
|
+
PREVIOUS_CURRENT_TARGET=""
|
|
57
|
+
BUNDLE_TEMP_DIR=""
|
|
58
|
+
INNER_ARCHIVE=""
|
|
59
|
+
INNER_ARCHIVE_SHA256_FILE=""
|
|
60
|
+
VERSION_NAME=""
|
|
61
|
+
DOTENV_BIN=""
|
|
62
|
+
PRISMA_BIN=""
|
|
63
|
+
CURRENT_PHASE="init"
|
|
64
|
+
RESULT_EMITTED=0
|
|
65
|
+
ROLLBACK_ATTEMPTED=false
|
|
66
|
+
ROLLBACK_SUCCEEDED=null
|
|
67
|
+
MIGRATION_EXECUTED=0
|
|
68
|
+
|
|
69
|
+
emit_result() {
|
|
70
|
+
local ok="$1"
|
|
71
|
+
local phase="$2"
|
|
72
|
+
local message="$3"
|
|
73
|
+
local rollback_attempted="$4"
|
|
74
|
+
local rollback_succeeded="$5"
|
|
75
|
+
if [[ "$RESULT_EMITTED" -eq 1 ]]; then
|
|
76
|
+
return
|
|
77
|
+
fi
|
|
78
|
+
RESULT_EMITTED=1
|
|
79
|
+
message="\${message//\\\\/\\\\\\\\}"
|
|
80
|
+
message="\${message//\"/\\\\\"}"
|
|
81
|
+
message="\${message//$'\\n'/\\\\n}"
|
|
82
|
+
printf 'DX_REMOTE_RESULT={"ok":%s,"phase":"%s","message":"%s","rollbackAttempted":%s,"rollbackSucceeded":%s}\\n' \\
|
|
83
|
+
"$ok" "$phase" "$message" "$rollback_attempted" "$rollback_succeeded"
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
cleanup() {
|
|
87
|
+
rm -rf "$BUNDLE_TEMP_DIR" 2>/dev/null || true
|
|
88
|
+
if [[ -n "$LOCK_DIR" ]]; then
|
|
89
|
+
rmdir "$LOCK_DIR" 2>/dev/null || true
|
|
90
|
+
fi
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
on_error() {
|
|
94
|
+
local code=$?
|
|
95
|
+
emit_result false "$CURRENT_PHASE" "phase failed (exit $code)" "$ROLLBACK_ATTEMPTED" "$ROLLBACK_SUCCEEDED"
|
|
96
|
+
exit "$code"
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
trap cleanup EXIT
|
|
100
|
+
trap on_error ERR
|
|
101
|
+
|
|
102
|
+
validate_path_within_base() {
|
|
103
|
+
local base="$1"
|
|
104
|
+
local target="$2"
|
|
105
|
+
case "$target" in
|
|
106
|
+
"$base"/*|"$base") ;;
|
|
107
|
+
*)
|
|
108
|
+
echo "目标路径越界: $target" >&2
|
|
109
|
+
exit 1
|
|
110
|
+
;;
|
|
111
|
+
esac
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
validate_archive_entries() {
|
|
115
|
+
local archive="$1"
|
|
116
|
+
local entry
|
|
117
|
+
local tar_line
|
|
118
|
+
local link_target
|
|
119
|
+
|
|
120
|
+
while IFS= read -r entry; do
|
|
121
|
+
if [[ "$entry" == /* ]]; then
|
|
122
|
+
echo "包含绝对路径条目: $entry" >&2
|
|
123
|
+
exit 1
|
|
124
|
+
fi
|
|
125
|
+
if [[ "$entry" =~ (^|/)\\.\\.(/|$) || "$entry" =~ \\.\\.\\\\ ]]; then
|
|
126
|
+
echo "包含可疑路径条目: $entry" >&2
|
|
127
|
+
exit 1
|
|
128
|
+
fi
|
|
129
|
+
done < <(tar -tzf "$archive")
|
|
130
|
+
|
|
131
|
+
while IFS= read -r tar_line; do
|
|
132
|
+
if [[ "$tar_line" == *" -> "* ]]; then
|
|
133
|
+
link_target="\${tar_line##* -> }"
|
|
134
|
+
if [[ "$link_target" == /* ]]; then
|
|
135
|
+
echo "包含可疑链接目标: $link_target" >&2
|
|
136
|
+
exit 1
|
|
137
|
+
fi
|
|
138
|
+
if [[ "$link_target" =~ (^|/)\\.\\.(/|$) || "$link_target" =~ \\.\\.\\\\ ]]; then
|
|
139
|
+
echo "包含可疑链接目标: $link_target" >&2
|
|
140
|
+
exit 1
|
|
141
|
+
fi
|
|
142
|
+
fi
|
|
143
|
+
done < <(tar -tvzf "$archive")
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
find_single_bundle_file() {
|
|
147
|
+
local bundle_dir="$1"
|
|
148
|
+
local pattern="$2"
|
|
149
|
+
shopt -s nullglob
|
|
150
|
+
local matches=("$bundle_dir"/$pattern)
|
|
151
|
+
shopt -u nullglob
|
|
152
|
+
if [[ "\${#matches[@]}" -ne 1 ]]; then
|
|
153
|
+
return 1
|
|
154
|
+
fi
|
|
155
|
+
printf '%s\\n' "\${matches[0]}"
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
sha256_check() {
|
|
159
|
+
local checksum_file="$1"
|
|
160
|
+
if command -v sha256sum >/dev/null 2>&1; then
|
|
161
|
+
sha256sum -c "$checksum_file"
|
|
162
|
+
return
|
|
163
|
+
fi
|
|
164
|
+
local checksum expected file
|
|
165
|
+
checksum="$(awk '{print $1}' "$checksum_file")"
|
|
166
|
+
file="$(awk '{print $2}' "$checksum_file")"
|
|
167
|
+
expected="$(shasum -a 256 "$file" | awk '{print $1}')"
|
|
168
|
+
[[ "$checksum" == "$expected" ]]
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
run_with_env() {
|
|
172
|
+
local cwd="$1"
|
|
173
|
+
shift
|
|
174
|
+
(
|
|
175
|
+
cd "$cwd"
|
|
176
|
+
APP_ENV="$ENV_NAME" "$DOTENV_BIN" -o -e "$ENV_FILE_NAME" -e "$ENV_LOCAL_FILE_NAME" -- "$@"
|
|
177
|
+
)
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
attempt_pm2_restore() {
|
|
181
|
+
if [[ -z "$PREVIOUS_CURRENT_TARGET" || ! -e "$PREVIOUS_CURRENT_TARGET/$ECOSYSTEM_CONFIG" ]]; then
|
|
182
|
+
ROLLBACK_SUCCEEDED=false
|
|
183
|
+
return
|
|
184
|
+
fi
|
|
185
|
+
if (
|
|
186
|
+
cd "$PREVIOUS_CURRENT_TARGET"
|
|
187
|
+
APP_ENV="$ENV_NAME" "$DOTENV_BIN" -o -e "$ENV_FILE_NAME" -e "$ENV_LOCAL_FILE_NAME" -- \\
|
|
188
|
+
pm2 start "$ECOSYSTEM_CONFIG" --only "$SERVICE_NAME" --update-env
|
|
189
|
+
pm2 save
|
|
190
|
+
); then
|
|
191
|
+
ROLLBACK_SUCCEEDED=true
|
|
192
|
+
else
|
|
193
|
+
ROLLBACK_SUCCEEDED=false
|
|
194
|
+
fi
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
CURRENT_PHASE="lock"
|
|
198
|
+
echo "DX_REMOTE_PHASE=lock"
|
|
199
|
+
mkdir -p "$RELEASES_DIR" "$SHARED_DIR" "$UPLOADS_DIR"
|
|
200
|
+
validate_path_within_base "$APP_ROOT" "$ARCHIVE"
|
|
201
|
+
validate_path_within_base "$APP_ROOT" "$RELEASE_DIR"
|
|
202
|
+
|
|
203
|
+
PREVIOUS_CURRENT_TARGET="$(readlink "$CURRENT_LINK" 2>/dev/null || true)"
|
|
204
|
+
|
|
205
|
+
if command -v flock >/dev/null 2>&1; then
|
|
206
|
+
exec 9>"$LOCK_FILE"
|
|
207
|
+
flock -n 9
|
|
208
|
+
else
|
|
209
|
+
mkdir "$LOCK_DIR"
|
|
210
|
+
fi
|
|
211
|
+
|
|
212
|
+
CURRENT_PHASE="extract"
|
|
213
|
+
echo "DX_REMOTE_PHASE=extract"
|
|
214
|
+
validate_archive_entries "$ARCHIVE"
|
|
215
|
+
BUNDLE_TEMP_DIR="$(mktemp -d "$APP_ROOT/.bundle-extract.XXXXXX")"
|
|
216
|
+
tar -xzf "$ARCHIVE" -C "$BUNDLE_TEMP_DIR" --strip-components=1
|
|
217
|
+
|
|
218
|
+
INNER_ARCHIVE="$(find_single_bundle_file "$BUNDLE_TEMP_DIR" 'backend-v*.tgz')"
|
|
219
|
+
INNER_ARCHIVE_SHA256_FILE="$(find_single_bundle_file "$BUNDLE_TEMP_DIR" 'backend-v*.tgz.sha256')"
|
|
220
|
+
VERSION_NAME="$(basename "$INNER_ARCHIVE" .tgz)"
|
|
221
|
+
validate_path_within_base "$RELEASES_DIR" "$RELEASE_DIR"
|
|
222
|
+
|
|
223
|
+
(cd "$BUNDLE_TEMP_DIR" && sha256_check "$(basename "$INNER_ARCHIVE_SHA256_FILE")")
|
|
224
|
+
validate_archive_entries "$INNER_ARCHIVE"
|
|
225
|
+
rm -rf "$RELEASE_DIR"
|
|
226
|
+
mkdir -p "$RELEASE_DIR"
|
|
227
|
+
tar -xzf "$INNER_ARCHIVE" -C "$RELEASE_DIR" --strip-components=1
|
|
228
|
+
|
|
229
|
+
CURRENT_PHASE="env"
|
|
230
|
+
echo "DX_REMOTE_PHASE=env"
|
|
231
|
+
if [[ ! -f "$SHARED_DIR/$ENV_FILE_NAME" ]]; then
|
|
232
|
+
echo "未找到基础环境文件: $SHARED_DIR/$ENV_FILE_NAME" >&2
|
|
233
|
+
exit 1
|
|
234
|
+
fi
|
|
235
|
+
if [[ ! -f "$SHARED_DIR/$ENV_LOCAL_FILE_NAME" ]]; then
|
|
236
|
+
echo "未找到本地覆盖环境文件: $SHARED_DIR/$ENV_LOCAL_FILE_NAME" >&2
|
|
237
|
+
exit 1
|
|
238
|
+
fi
|
|
239
|
+
ln -sfn "$SHARED_DIR/$ENV_FILE_NAME" "$RELEASE_DIR/$ENV_FILE_NAME"
|
|
240
|
+
ln -sfn "$SHARED_DIR/$ENV_LOCAL_FILE_NAME" "$RELEASE_DIR/$ENV_LOCAL_FILE_NAME"
|
|
241
|
+
|
|
242
|
+
CURRENT_PHASE="install"
|
|
243
|
+
echo "DX_REMOTE_PHASE=install"
|
|
244
|
+
command -v node >/dev/null 2>&1
|
|
245
|
+
command -v pnpm >/dev/null 2>&1
|
|
246
|
+
if [[ "$START_MODE" == "pm2" ]]; then
|
|
247
|
+
command -v pm2 >/dev/null 2>&1
|
|
248
|
+
fi
|
|
249
|
+
(
|
|
250
|
+
cd "$RELEASE_DIR"
|
|
251
|
+
bash -lc "$INSTALL_COMMAND"
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
DOTENV_BIN="$RELEASE_DIR/node_modules/.bin/dotenv"
|
|
255
|
+
if [[ ! -x "$DOTENV_BIN" ]]; then
|
|
256
|
+
echo "缺少可执行文件: $DOTENV_BIN" >&2
|
|
257
|
+
exit 1
|
|
258
|
+
fi
|
|
259
|
+
|
|
260
|
+
if [[ "$SHOULD_GENERATE" == "1" ]]; then
|
|
261
|
+
CURRENT_PHASE="prisma-generate"
|
|
262
|
+
echo "DX_REMOTE_PHASE=prisma-generate"
|
|
263
|
+
PRISMA_BIN="$RELEASE_DIR/node_modules/.bin/prisma"
|
|
264
|
+
if [[ ! -x "$PRISMA_BIN" ]]; then
|
|
265
|
+
echo "缺少可执行文件: $PRISMA_BIN" >&2
|
|
266
|
+
exit 1
|
|
267
|
+
fi
|
|
268
|
+
run_with_env "$RELEASE_DIR" "$PRISMA_BIN" generate --schema="$PRISMA_SCHEMA" --config="$PRISMA_CONFIG"
|
|
269
|
+
fi
|
|
270
|
+
|
|
271
|
+
if [[ "$SHOULD_MIGRATE" == "1" ]]; then
|
|
272
|
+
CURRENT_PHASE="prisma-migrate"
|
|
273
|
+
echo "DX_REMOTE_PHASE=prisma-migrate"
|
|
274
|
+
PRISMA_BIN="$RELEASE_DIR/node_modules/.bin/prisma"
|
|
275
|
+
if [[ ! -x "$PRISMA_BIN" ]]; then
|
|
276
|
+
echo "缺少可执行文件: $PRISMA_BIN" >&2
|
|
277
|
+
exit 1
|
|
278
|
+
fi
|
|
279
|
+
run_with_env "$RELEASE_DIR" "$PRISMA_BIN" migrate deploy --schema="$PRISMA_SCHEMA" --config="$PRISMA_CONFIG"
|
|
280
|
+
MIGRATION_EXECUTED=1
|
|
281
|
+
fi
|
|
282
|
+
|
|
283
|
+
CURRENT_PHASE="switch-current"
|
|
284
|
+
echo "DX_REMOTE_PHASE=switch-current"
|
|
285
|
+
ln -sfn "$RELEASE_DIR" "$CURRENT_LINK"
|
|
286
|
+
|
|
287
|
+
CURRENT_PHASE="startup"
|
|
288
|
+
echo "DX_REMOTE_PHASE=startup"
|
|
289
|
+
if [[ "$START_MODE" == "pm2" ]]; then
|
|
290
|
+
if ! (
|
|
291
|
+
cd "$CURRENT_LINK"
|
|
292
|
+
pm2 delete "$SERVICE_NAME" || true
|
|
293
|
+
APP_ENV="$ENV_NAME" "$DOTENV_BIN" -o -e "$ENV_FILE_NAME" -e "$ENV_LOCAL_FILE_NAME" -- \\
|
|
294
|
+
pm2 start "$ECOSYSTEM_CONFIG" --only "$SERVICE_NAME" --update-env
|
|
295
|
+
pm2 save
|
|
296
|
+
); then
|
|
297
|
+
if [[ "$MIGRATION_EXECUTED" -eq 0 && -n "$PREVIOUS_CURRENT_TARGET" ]]; then
|
|
298
|
+
ROLLBACK_ATTEMPTED=true
|
|
299
|
+
ln -sfn "$PREVIOUS_CURRENT_TARGET" "$CURRENT_LINK"
|
|
300
|
+
attempt_pm2_restore
|
|
301
|
+
fi
|
|
302
|
+
emit_result false "startup" "pm2 startup failed" "$ROLLBACK_ATTEMPTED" "$ROLLBACK_SUCCEEDED"
|
|
303
|
+
exit 1
|
|
304
|
+
fi
|
|
305
|
+
else
|
|
306
|
+
if ! (
|
|
307
|
+
cd "$CURRENT_LINK"
|
|
308
|
+
APP_ENV="$ENV_NAME" "$DOTENV_BIN" -o -e "$ENV_FILE_NAME" -e "$ENV_LOCAL_FILE_NAME" -- \\
|
|
309
|
+
node "$START_ENTRY"
|
|
310
|
+
); then
|
|
311
|
+
emit_result false "startup" "direct startup failed" false null
|
|
312
|
+
exit 1
|
|
313
|
+
fi
|
|
314
|
+
emit_result true "startup" "direct mode attached" false null
|
|
315
|
+
exit 0
|
|
316
|
+
fi
|
|
317
|
+
|
|
318
|
+
CURRENT_PHASE="cleanup"
|
|
319
|
+
echo "DX_REMOTE_PHASE=cleanup"
|
|
320
|
+
release_count=0
|
|
321
|
+
shopt -s nullglob
|
|
322
|
+
release_dirs=("$RELEASES_DIR"/*)
|
|
323
|
+
shopt -u nullglob
|
|
324
|
+
while IFS= read -r old_release; do
|
|
325
|
+
release_count=$((release_count + 1))
|
|
326
|
+
if [[ "$release_count" -gt "$KEEP_RELEASES" ]]; then
|
|
327
|
+
rm -rf "$old_release"
|
|
328
|
+
fi
|
|
329
|
+
done < <(
|
|
330
|
+
if [[ "\${#release_dirs[@]}" -gt 0 ]]; then
|
|
331
|
+
ls -1dt "\${release_dirs[@]}"
|
|
332
|
+
fi
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
emit_result true "cleanup" "ok" false null
|
|
336
|
+
`
|
|
337
|
+
}
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import { spawn } from 'node:child_process'
|
|
2
|
+
import { basename, relative } from 'node:path'
|
|
3
|
+
import { buildRemoteDeployScript } from './remote-script.js'
|
|
4
|
+
import { createRemotePhaseModel } from './remote-phases.js'
|
|
5
|
+
import { parseRemoteResult } from './remote-result.js'
|
|
6
|
+
|
|
7
|
+
function runProcess(command, args, options = {}) {
|
|
8
|
+
return new Promise((resolve, reject) => {
|
|
9
|
+
const child = spawn(command, args, {
|
|
10
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
11
|
+
...options,
|
|
12
|
+
})
|
|
13
|
+
|
|
14
|
+
let stdout = ''
|
|
15
|
+
let stderr = ''
|
|
16
|
+
child.stdout.on('data', chunk => {
|
|
17
|
+
stdout += String(chunk)
|
|
18
|
+
})
|
|
19
|
+
child.stderr.on('data', chunk => {
|
|
20
|
+
stderr += String(chunk)
|
|
21
|
+
})
|
|
22
|
+
child.on('error', reject)
|
|
23
|
+
child.on('close', exitCode => resolve({ stdout, stderr, exitCode }))
|
|
24
|
+
})
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function escapeShellArg(value) {
|
|
28
|
+
return `'${String(value).replace(/'/g, `'\\''`)}'`
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function normalizeRemoteBaseDir(baseDir) {
|
|
32
|
+
return String(baseDir).replace(/\/+$/, '') || '/'
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export function buildEnsureRemoteBaseDirsCommand(baseDir) {
|
|
36
|
+
const normalizedBaseDir = normalizeRemoteBaseDir(baseDir)
|
|
37
|
+
const directories = ['releases', 'shared', 'uploads'].map(name => `${normalizedBaseDir}/${name}`)
|
|
38
|
+
return `mkdir -p ${directories.map(escapeShellArg).join(' ')}`
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
async function defaultEnsureRemoteBaseDirs(remote) {
|
|
42
|
+
const target = `${remote.user}@${remote.host}`
|
|
43
|
+
const args = [
|
|
44
|
+
'-p',
|
|
45
|
+
String(remote.port || 22),
|
|
46
|
+
target,
|
|
47
|
+
buildEnsureRemoteBaseDirsCommand(remote.baseDir),
|
|
48
|
+
]
|
|
49
|
+
const result = await runProcess('ssh', args)
|
|
50
|
+
if (result.exitCode !== 0) {
|
|
51
|
+
throw new Error(result.stderr || `ssh mkdir failed (${result.exitCode})`)
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function defaultUploadBundle(remote, bundlePath) {
|
|
56
|
+
const target = `${remote.user}@${remote.host}:${remote.baseDir}/uploads/${basename(bundlePath)}`
|
|
57
|
+
const result = await runProcess('scp', ['-P', String(remote.port || 22), bundlePath, target])
|
|
58
|
+
if (result.exitCode !== 0) {
|
|
59
|
+
throw new Error(result.stderr || `scp failed (${result.exitCode})`)
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
async function defaultRunRemoteScript(remote, script) {
|
|
64
|
+
const target = `${remote.user}@${remote.host}`
|
|
65
|
+
return new Promise((resolve, reject) => {
|
|
66
|
+
const child = spawn('ssh', ['-p', String(remote.port || 22), target, 'bash -s'], {
|
|
67
|
+
stdio: ['pipe', 'pipe', 'pipe'],
|
|
68
|
+
})
|
|
69
|
+
let stdout = ''
|
|
70
|
+
let stderr = ''
|
|
71
|
+
child.stdout.on('data', chunk => {
|
|
72
|
+
stdout += String(chunk)
|
|
73
|
+
})
|
|
74
|
+
child.stderr.on('data', chunk => {
|
|
75
|
+
stderr += String(chunk)
|
|
76
|
+
})
|
|
77
|
+
child.on('error', reject)
|
|
78
|
+
child.on('close', exitCode => resolve({ stdout, stderr, exitCode }))
|
|
79
|
+
child.stdin.write(script)
|
|
80
|
+
child.stdin.end()
|
|
81
|
+
})
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
function createRemotePayload(config, bundle) {
|
|
85
|
+
const toReleaseRelativePath = targetPath => {
|
|
86
|
+
if (!targetPath) return null
|
|
87
|
+
if (!config.projectRoot) return targetPath
|
|
88
|
+
return relative(config.projectRoot, targetPath).replace(/\\/g, '/')
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return {
|
|
92
|
+
environment: config.environment,
|
|
93
|
+
versionName: bundle.versionName,
|
|
94
|
+
uploadedBundlePath: `${config.remote.baseDir}/uploads/${basename(bundle.bundlePath)}`,
|
|
95
|
+
remote: config.remote,
|
|
96
|
+
runtime: {
|
|
97
|
+
prismaSchemaDir: toReleaseRelativePath(config.runtime.prismaSchemaDir),
|
|
98
|
+
prismaConfig: toReleaseRelativePath(config.runtime.prismaConfig),
|
|
99
|
+
ecosystemConfig: config.runtime.ecosystemConfig ? basename(config.runtime.ecosystemConfig) : null,
|
|
100
|
+
},
|
|
101
|
+
startup: config.startup,
|
|
102
|
+
deploy: config.deploy,
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
export async function deployBackendArtifactRemotely(config, bundle, deps = {}) {
|
|
107
|
+
const ensureRemoteBaseDirs = deps.ensureRemoteBaseDirs || defaultEnsureRemoteBaseDirs
|
|
108
|
+
const uploadBundle = deps.uploadBundle || defaultUploadBundle
|
|
109
|
+
const runRemoteScript = deps.runRemoteScript || defaultRunRemoteScript
|
|
110
|
+
|
|
111
|
+
await ensureRemoteBaseDirs(config.remote)
|
|
112
|
+
await uploadBundle(config.remote, bundle.bundlePath)
|
|
113
|
+
const payload = createRemotePayload(config, bundle)
|
|
114
|
+
const phaseModel = createRemotePhaseModel(payload)
|
|
115
|
+
const script = buildRemoteDeployScript(phaseModel)
|
|
116
|
+
const commandResult = await runRemoteScript(config.remote, script)
|
|
117
|
+
return parseRemoteResult(commandResult)
|
|
118
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
const UNSUPPORTED_LOCAL_DEP_PATTERN = /^(workspace:|file:|link:)/
|
|
2
|
+
const REQUIRED_DEPENDENCIES_FROM_DEV = ['prisma']
|
|
3
|
+
|
|
4
|
+
function assertSupportedDependencies(dependencies = {}) {
|
|
5
|
+
for (const [name, version] of Object.entries(dependencies)) {
|
|
6
|
+
if (typeof version !== 'string') continue
|
|
7
|
+
if (UNSUPPORTED_LOCAL_DEP_PATTERN.test(version)) {
|
|
8
|
+
throw new Error(`检测到不支持的本地依赖引用: ${name} -> ${version}`)
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
export function createRuntimePackage({ appPackage, rootPackage }) {
|
|
14
|
+
const runtimeDependencies = { ...(appPackage?.dependencies || {}) }
|
|
15
|
+
const appDevDependencies = appPackage?.devDependencies || {}
|
|
16
|
+
|
|
17
|
+
for (const dependencyName of REQUIRED_DEPENDENCIES_FROM_DEV) {
|
|
18
|
+
if (!runtimeDependencies[dependencyName] && appDevDependencies[dependencyName]) {
|
|
19
|
+
runtimeDependencies[dependencyName] = appDevDependencies[dependencyName]
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
assertSupportedDependencies(runtimeDependencies)
|
|
24
|
+
|
|
25
|
+
const runtimePackage = {
|
|
26
|
+
name: appPackage?.name,
|
|
27
|
+
version: appPackage?.version,
|
|
28
|
+
dependencies: runtimeDependencies,
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
if (appPackage?.private !== undefined) runtimePackage.private = appPackage.private
|
|
32
|
+
if (appPackage?.type) runtimePackage.type = appPackage.type
|
|
33
|
+
if (rootPackage?.packageManager) runtimePackage.packageManager = rootPackage.packageManager
|
|
34
|
+
|
|
35
|
+
const nodeEngine = rootPackage?.engines?.node || appPackage?.engines?.node
|
|
36
|
+
if (nodeEngine) {
|
|
37
|
+
runtimePackage.engines = { node: nodeEngine }
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return runtimePackage
|
|
41
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { buildBackendArtifact } from './backend-artifact-deploy/artifact-builder.js'
|
|
2
|
+
import { resolveBackendDeployConfig } from './backend-artifact-deploy/config.js'
|
|
3
|
+
import { deployBackendArtifactRemotely } from './backend-artifact-deploy/remote-transport.js'
|
|
4
|
+
|
|
5
|
+
export async function runBackendArtifactDeploy({
|
|
6
|
+
cli,
|
|
7
|
+
target,
|
|
8
|
+
args,
|
|
9
|
+
environment,
|
|
10
|
+
deps = {},
|
|
11
|
+
}) {
|
|
12
|
+
const resolveConfig = deps.resolveConfig || resolveBackendDeployConfig
|
|
13
|
+
const buildArtifact = deps.buildArtifact || buildBackendArtifact
|
|
14
|
+
const deployRemotely =
|
|
15
|
+
deps.deployRemotely || deployBackendArtifactRemotely
|
|
16
|
+
|
|
17
|
+
const targetConfig = cli?.commands?.deploy?.[target]
|
|
18
|
+
const config = resolveConfig({
|
|
19
|
+
cli,
|
|
20
|
+
targetConfig,
|
|
21
|
+
environment,
|
|
22
|
+
flags: cli?.flags || {},
|
|
23
|
+
args,
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
const bundle = await buildArtifact(config, deps)
|
|
27
|
+
if (cli?.flags?.buildOnly) {
|
|
28
|
+
return bundle
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return deployRemotely(config, bundle, deps)
|
|
32
|
+
}
|
|
@@ -95,7 +95,20 @@ export async function handleDeploy(cli, args) {
|
|
|
95
95
|
return
|
|
96
96
|
}
|
|
97
97
|
|
|
98
|
-
const
|
|
98
|
+
const targetConfig = cli.commands.deploy?.[normalizedTarget]
|
|
99
|
+
const isBackendArtifactDeploy = targetConfig?.internal === 'backend-artifact-deploy'
|
|
100
|
+
const environment = selectedEnvs[0] || (isBackendArtifactDeploy ? 'development' : 'staging')
|
|
101
|
+
|
|
102
|
+
if (isBackendArtifactDeploy) {
|
|
103
|
+
const { runBackendArtifactDeploy } = await import('../../backend-artifact-deploy.js')
|
|
104
|
+
await runBackendArtifactDeploy({
|
|
105
|
+
cli,
|
|
106
|
+
target: normalizedTarget,
|
|
107
|
+
args,
|
|
108
|
+
environment,
|
|
109
|
+
})
|
|
110
|
+
return
|
|
111
|
+
}
|
|
99
112
|
|
|
100
113
|
cli.ensureRepoRoot()
|
|
101
114
|
|
package/lib/cli/flags.js
CHANGED
|
@@ -38,6 +38,8 @@ export const FLAG_DEFINITIONS = {
|
|
|
38
38
|
{ flag: '--fix' },
|
|
39
39
|
],
|
|
40
40
|
deploy: [
|
|
41
|
+
{ flag: '--build-only' },
|
|
42
|
+
{ flag: '--skip-migration' },
|
|
41
43
|
{ flag: '--webhook-path', expectsValue: true },
|
|
42
44
|
{ flag: '--webhook-dry-run' },
|
|
43
45
|
{ flag: '--strict-webhook' },
|
|
@@ -99,6 +101,12 @@ export function parseFlags(args = []) {
|
|
|
99
101
|
case '--all':
|
|
100
102
|
flags.all = true
|
|
101
103
|
break
|
|
104
|
+
case '--build-only':
|
|
105
|
+
flags.buildOnly = true
|
|
106
|
+
break
|
|
107
|
+
case '--skip-migration':
|
|
108
|
+
flags.skipMigration = true
|
|
109
|
+
break
|
|
102
110
|
default:
|
|
103
111
|
break
|
|
104
112
|
}
|
package/lib/cli/help.js
CHANGED
|
@@ -20,9 +20,9 @@ export function showHelp() {
|
|
|
20
20
|
' target: backend, shared, front, admin, mobile, all, sdk, affected (默认: all)',
|
|
21
21
|
' 环境标志: --dev, --staging, --prod, --test, --e2e(未指定时默认 --dev)',
|
|
22
22
|
'',
|
|
23
|
-
' deploy <target> [环境标志] 部署前端到 Vercel',
|
|
24
|
-
' target: front, admin, merchant, telegram-bot, all',
|
|
25
|
-
' 环境标志: --dev, --staging, --prod
|
|
23
|
+
' deploy <target> [环境标志] 部署前端到 Vercel 或后端制品到远端主机',
|
|
24
|
+
' target: front, admin, merchant, telegram-bot, all, backend',
|
|
25
|
+
' 环境标志: --dev, --staging, --prod(默认: Vercel 目标为 --staging;backend 制品发布目标默认 --dev)',
|
|
26
26
|
'',
|
|
27
27
|
' install 安装依赖(使用 frozen-lockfile 确保版本一致)',
|
|
28
28
|
'',
|
|
@@ -95,6 +95,8 @@ export function showHelp() {
|
|
|
95
95
|
' dx test e2e backend apps/backend/e2e/activity/activity.admin.e2e-spec.ts # 运行单个E2E测试文件',
|
|
96
96
|
' dx test e2e backend apps/backend/e2e/activity/activity.admin.e2e-spec.ts -t "should list all activity definitions" # 运行特定测试用例',
|
|
97
97
|
' dx deploy front --staging # 部署前端到 Vercel(staging)',
|
|
98
|
+
' dx deploy backend --prod # 构建 backend 制品并上传/部署到远端主机',
|
|
99
|
+
' dx deploy backend --build-only # 仅构建 backend 制品,不执行远端部署',
|
|
98
100
|
' dx worktree make 88 # 为issue #88创建worktree',
|
|
99
101
|
' dx worktree del 88 # 删除issue #88的worktree',
|
|
100
102
|
' dx worktree del 88 89 90 -Y # 批量删除多个worktree(非交互式)',
|
|
@@ -194,8 +196,14 @@ script 子命令:
|
|
|
194
196
|
dx deploy <target> [环境标志] [选项]
|
|
195
197
|
|
|
196
198
|
参数说明:
|
|
197
|
-
target: front, admin, merchant, telegram-bot, all
|
|
198
|
-
环境标志:
|
|
199
|
+
target: front, admin, merchant, telegram-bot, all, backend
|
|
200
|
+
环境标志:
|
|
201
|
+
- Vercel 目标默认 --staging
|
|
202
|
+
- backend 制品发布目标默认 --dev
|
|
203
|
+
|
|
204
|
+
backend 制品发布(target=backend):
|
|
205
|
+
--build-only 仅本地构建并打包制品,不上传不远端部署
|
|
206
|
+
--skip-migration 远端部署时跳过 prisma migrate deploy
|
|
199
207
|
|
|
200
208
|
Telegram Webhook(仅 target=telegram-bot 生效):
|
|
201
209
|
--webhook-path <path> 对外 webhook 路径(默认 /api/webhook)
|
|
@@ -212,6 +220,8 @@ script 子命令:
|
|
|
212
220
|
dx deploy telegram-bot --prod --webhook-dry-run # 仅打印,不实际调用 Telegram
|
|
213
221
|
dx deploy telegram-bot --dev --no-strict-webhook # 开发环境显式降级为仅告警
|
|
214
222
|
dx deploy all --staging # 串行部署 front + admin + merchant
|
|
223
|
+
dx deploy backend --prod # 构建 backend 制品并部署到远端主机
|
|
224
|
+
dx deploy backend --build-only # 仅构建 backend 制品
|
|
215
225
|
`)
|
|
216
226
|
return
|
|
217
227
|
|
package/lib/vercel-deploy.js
CHANGED
|
@@ -236,10 +236,38 @@ export async function deployPrebuiltWithFallback(options) {
|
|
|
236
236
|
baseArgs,
|
|
237
237
|
env,
|
|
238
238
|
cwd,
|
|
239
|
-
run = runVercel
|
|
239
|
+
run = runVercel,
|
|
240
|
+
cleanupArchiveParts = () => {},
|
|
241
|
+
onMissingFiles = () => {},
|
|
240
242
|
} = options || {}
|
|
241
|
-
|
|
242
|
-
|
|
243
|
+
|
|
244
|
+
try {
|
|
245
|
+
const result = await run(baseArgs, { env, cwd })
|
|
246
|
+
return { usedArchive: false, result }
|
|
247
|
+
} catch (error) {
|
|
248
|
+
if (!isMissingFilesError(error)) {
|
|
249
|
+
throw error
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
onMissingFiles(error)
|
|
253
|
+
cleanupArchiveParts()
|
|
254
|
+
|
|
255
|
+
const archiveArgs = []
|
|
256
|
+
let inserted = false
|
|
257
|
+
for (const arg of baseArgs || []) {
|
|
258
|
+
archiveArgs.push(arg)
|
|
259
|
+
if (!inserted && arg === '--prebuilt') {
|
|
260
|
+
archiveArgs.push('--archive=tgz')
|
|
261
|
+
inserted = true
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
if (!inserted) {
|
|
265
|
+
archiveArgs.push('--archive=tgz')
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const result = await run(archiveArgs, { env, cwd })
|
|
269
|
+
return { usedArchive: true, result }
|
|
270
|
+
}
|
|
243
271
|
}
|
|
244
272
|
|
|
245
273
|
export async function deployToVercel(target, options = {}) {
|
|
@@ -251,6 +279,8 @@ export async function deployToVercel(target, options = {}) {
|
|
|
251
279
|
run = runVercel
|
|
252
280
|
} = options
|
|
253
281
|
|
|
282
|
+
process.exitCode = undefined
|
|
283
|
+
|
|
254
284
|
// 校验环境参数
|
|
255
285
|
if (!ALLOWED_ENVIRONMENTS.includes(environment)) {
|
|
256
286
|
logger.error(`不支持的部署环境: ${environment}`)
|