@ranger1/dx 0.1.79 → 0.1.81
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +79 -0
- package/codex/agents/fixer.toml +1 -2
- package/codex/agents/orchestrator.toml +2 -0
- package/codex/agents/reviewer.toml +2 -0
- package/codex/agents/spark.toml +1 -4
- package/codex/skills/doctor/SKILL.md +1 -1
- package/codex/skills/doctor/scripts/doctor.sh +35 -42
- package/lib/backend-artifact-deploy/artifact-builder.js +240 -0
- package/lib/backend-artifact-deploy/config.js +138 -0
- package/lib/backend-artifact-deploy/path-utils.js +18 -0
- package/lib/backend-artifact-deploy/remote-phases.js +13 -0
- package/lib/backend-artifact-deploy/remote-result.js +42 -0
- package/lib/backend-artifact-deploy/remote-script.js +337 -0
- package/lib/backend-artifact-deploy/remote-transport.js +118 -0
- package/lib/backend-artifact-deploy/rollback.js +5 -0
- package/lib/backend-artifact-deploy/runtime-package.js +41 -0
- package/lib/backend-artifact-deploy.js +32 -0
- package/lib/cli/commands/deploy.js +14 -1
- package/lib/cli/flags.js +8 -0
- package/lib/cli/help.js +15 -5
- package/lib/vercel-deploy.js +33 -3
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -181,6 +181,7 @@ dx 的命令由 `dx/config/commands.json` 驱动,并且内置了一些 interna
|
|
|
181
181
|
|
|
182
182
|
- `internal: sdk-build`:SDK 生成/构建
|
|
183
183
|
- `internal: backend-package`:后端打包
|
|
184
|
+
- `internal: backend-artifact-deploy`:后端制品构建、上传与远端部署
|
|
184
185
|
- `internal: start-dev`:开发环境一键启动
|
|
185
186
|
- `internal: pm2-stack`:PM2 交互式服务栈(支持端口清理/缓存清理配置)
|
|
186
187
|
|
|
@@ -195,6 +196,7 @@ dx db generate
|
|
|
195
196
|
dx db migrate --dev --name init
|
|
196
197
|
dx db deploy --prod -Y
|
|
197
198
|
dx deploy front --staging
|
|
199
|
+
dx deploy backend --prod
|
|
198
200
|
dx lint
|
|
199
201
|
dx test e2e backend apps/backend/e2e/auth
|
|
200
202
|
```
|
|
@@ -284,6 +286,83 @@ dx test e2e backend apps/backend/e2e/auth
|
|
|
284
286
|
- 需要的前置构建(例如 `shared`、`api-contracts`、OpenAPI 导出、后端构建等)应由项目自己的 Nx 依赖图(`dependsOn`/项目依赖)或 Vercel 的 `buildCommand` 负责。
|
|
285
287
|
- 这样 dx deploy 不会强依赖 `apps/sdk` 等目录结构,更容易适配不同 monorepo。
|
|
286
288
|
|
|
289
|
+
### backend 制品发布
|
|
290
|
+
|
|
291
|
+
当 `dx/config/commands.json` 的 `deploy.backend.internal` 配置为 `backend-artifact-deploy` 时,`dx deploy backend` 走内置的后端制品发布流程,而不是 Vercel 部署。
|
|
292
|
+
|
|
293
|
+
常用命令:
|
|
294
|
+
|
|
295
|
+
```bash
|
|
296
|
+
dx deploy backend --prod
|
|
297
|
+
dx deploy backend --build-only
|
|
298
|
+
dx deploy backend --prod --skip-migration
|
|
299
|
+
```
|
|
300
|
+
|
|
301
|
+
最小示例配置:
|
|
302
|
+
|
|
303
|
+
```json
|
|
304
|
+
{
|
|
305
|
+
"deploy": {
|
|
306
|
+
"backend": {
|
|
307
|
+
"internal": "backend-artifact-deploy",
|
|
308
|
+
"backendDeploy": {
|
|
309
|
+
"build": {
|
|
310
|
+
"app": "backend",
|
|
311
|
+
"distDir": "dist/backend",
|
|
312
|
+
"versionFile": "apps/backend/package.json",
|
|
313
|
+
"commands": {
|
|
314
|
+
"development": "npx nx build backend --configuration=development",
|
|
315
|
+
"staging": "npx nx build backend --configuration=production",
|
|
316
|
+
"production": "npx nx build backend --configuration=production"
|
|
317
|
+
}
|
|
318
|
+
},
|
|
319
|
+
"runtime": {
|
|
320
|
+
"appPackage": "apps/backend/package.json",
|
|
321
|
+
"rootPackage": "package.json",
|
|
322
|
+
"lockfile": "pnpm-lock.yaml",
|
|
323
|
+
"prismaSchemaDir": "apps/backend/prisma/schema",
|
|
324
|
+
"prismaConfig": "apps/backend/prisma.config.ts",
|
|
325
|
+
"ecosystemConfig": "ecosystem.config.cjs"
|
|
326
|
+
},
|
|
327
|
+
"artifact": {
|
|
328
|
+
"outputDir": "release/backend",
|
|
329
|
+
"bundleName": "backend-bundle"
|
|
330
|
+
},
|
|
331
|
+
"remote": {
|
|
332
|
+
"host": "deploy.example.com",
|
|
333
|
+
"port": 22,
|
|
334
|
+
"user": "deploy",
|
|
335
|
+
"baseDir": "/srv/example-app"
|
|
336
|
+
},
|
|
337
|
+
"startup": {
|
|
338
|
+
"mode": "pm2",
|
|
339
|
+
"serviceName": "backend"
|
|
340
|
+
},
|
|
341
|
+
"deploy": {
|
|
342
|
+
"keepReleases": 5,
|
|
343
|
+
"installCommand": "pnpm install --prod --no-frozen-lockfile --ignore-workspace",
|
|
344
|
+
"prismaGenerate": true,
|
|
345
|
+
"prismaMigrateDeploy": true
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
```
|
|
352
|
+
|
|
353
|
+
固定远端目录协议:
|
|
354
|
+
|
|
355
|
+
- `<baseDir>/releases/<version-name>`
|
|
356
|
+
- `<baseDir>/current`
|
|
357
|
+
- `<baseDir>/shared/.env.<environment>`
|
|
358
|
+
- `<baseDir>/shared/.env.<environment>.local`
|
|
359
|
+
- `<baseDir>/uploads/<bundle-file>`
|
|
360
|
+
|
|
361
|
+
运行时制品约束:
|
|
362
|
+
|
|
363
|
+
- 生成的 release `package.json` 默认只保留运行时依赖;如果应用把 `prisma` 放在 `devDependencies`,dx 会自动把它提升进 release 依赖,保证远端 `prisma generate` / `prisma migrate deploy` 可执行。
|
|
364
|
+
- 打包前会递归扫描整个 staged payload;任意层级出现 `.env*` 文件都会直接失败,避免把环境文件误打进制品。
|
|
365
|
+
|
|
287
366
|
## 依赖关系约定
|
|
288
367
|
|
|
289
368
|
dx 不负责管理「工程之间的构建依赖关系」。如果多个工程之间存在依赖(例如 `front/admin` 依赖 `shared` 或 `api-contracts`),必须由 Nx 的依赖图来表达并自动拉起:
|
package/codex/agents/fixer.toml
CHANGED
package/codex/agents/spark.toml
CHANGED
|
@@ -1,8 +1,7 @@
|
|
|
1
1
|
model = "gpt-5.3-codex"
|
|
2
2
|
model_reasoning_effort = "medium"
|
|
3
3
|
approval_policy = "never"
|
|
4
|
-
sandbox_mode = "
|
|
5
|
-
network_access = true
|
|
4
|
+
sandbox_mode = "danger-full-access"
|
|
6
5
|
|
|
7
6
|
developer_instructions = '''
|
|
8
7
|
你是一个通用agent 根据输入的提示词,完整遵循他的要求执行任务
|
|
@@ -17,5 +16,3 @@ developer_instructions = '''
|
|
|
17
16
|
prompt是一个文件路径并不存在时,必须返回结构化错误:{"error":"PROMPT_FILE_NOT_FOUND","detail":"<file_path>"}
|
|
18
17
|
|
|
19
18
|
'''
|
|
20
|
-
|
|
21
|
-
|
|
@@ -57,7 +57,7 @@ bash "$CODEX_HOME/skills/doctor/scripts/doctor.sh" --max-rounds 3
|
|
|
57
57
|
- 自动修复:确保 `~/.codex/config.toml` 含以下目标值(缺失补齐、值不符覆盖):
|
|
58
58
|
- `[features] multi_agent = true`
|
|
59
59
|
- `[agents] max_threads = 15`
|
|
60
|
-
- `[agents.fixer] description/
|
|
60
|
+
- `[agents.fixer] description/config_file`
|
|
61
61
|
- `[agents.orchestrator] description/config_file`
|
|
62
62
|
- `[agents.reviewer] description/config_file`
|
|
63
63
|
- `[agents.spark] description/config_file`
|
|
@@ -233,7 +233,7 @@ ensure_codex_config() {
|
|
|
233
233
|
in_features=0; in_agents=0; in_fixer=0; in_orch=0; in_reviewer=0; in_spark=0;
|
|
234
234
|
features_emitted=0; agents_emitted=0; fixer_emitted=0; orch_emitted=0; reviewer_emitted=0; spark_emitted=0;
|
|
235
235
|
features_multi_written=0; agents_max_threads_written=0;
|
|
236
|
-
fixer_desc_written=0;
|
|
236
|
+
fixer_desc_written=0; fixer_cfg_written=0;
|
|
237
237
|
orch_desc_written=0; orch_cfg_written=0;
|
|
238
238
|
reviewer_desc_written=0; reviewer_cfg_written=0;
|
|
239
239
|
spark_desc_written=0; spark_cfg_written=0;
|
|
@@ -252,24 +252,23 @@ ensure_codex_config() {
|
|
|
252
252
|
}
|
|
253
253
|
function flush_fixer() {
|
|
254
254
|
if (!fixer_emitted) return;
|
|
255
|
-
if (!fixer_desc_written) print "description = \"
|
|
256
|
-
if (!
|
|
257
|
-
if (!fixer_cfg_written) print "config_file = \"~/.codex/agents/fixer.toml\"";
|
|
255
|
+
if (!fixer_desc_written) print "description = \"bugfix 代理\"";
|
|
256
|
+
if (!fixer_cfg_written) print "config_file = \"agents/fixer.toml\"";
|
|
258
257
|
}
|
|
259
258
|
function flush_orch() {
|
|
260
259
|
if (!orch_emitted) return;
|
|
261
|
-
if (!orch_desc_written) print "description = \"
|
|
262
|
-
if (!orch_cfg_written) print "config_file = \"
|
|
260
|
+
if (!orch_desc_written) print "description = \"pr 修复流程编排代理\"";
|
|
261
|
+
if (!orch_cfg_written) print "config_file = \"agents/orchestrator.toml\"";
|
|
263
262
|
}
|
|
264
263
|
function flush_reviewer() {
|
|
265
264
|
if (!reviewer_emitted) return;
|
|
266
|
-
if (!reviewer_desc_written) print "description = \"
|
|
267
|
-
if (!reviewer_cfg_written) print "config_file = \"
|
|
265
|
+
if (!reviewer_desc_written) print "description = \"代码评审代理\"";
|
|
266
|
+
if (!reviewer_cfg_written) print "config_file = \"agents/reviewer.toml\"";
|
|
268
267
|
}
|
|
269
268
|
function flush_spark() {
|
|
270
269
|
if (!spark_emitted) return;
|
|
271
|
-
if (!spark_desc_written) print "description = \"
|
|
272
|
-
if (!spark_cfg_written) print "config_file = \"
|
|
270
|
+
if (!spark_desc_written) print "description = \"通用执行代理\"";
|
|
271
|
+
if (!spark_cfg_written) print "config_file = \"agents/spark.toml\"";
|
|
273
272
|
}
|
|
274
273
|
function flush_active_section() {
|
|
275
274
|
if (in_features) flush_features();
|
|
@@ -332,42 +331,38 @@ ensure_codex_config() {
|
|
|
332
331
|
}
|
|
333
332
|
|
|
334
333
|
if (in_fixer && match(t, /^description[[:space:]]*=/)) {
|
|
335
|
-
if (!fixer_desc_written) { print "description = \"
|
|
336
|
-
next;
|
|
337
|
-
}
|
|
338
|
-
if (in_fixer && match(t, /^model_reasoning_effort[[:space:]]*=/)) {
|
|
339
|
-
if (!fixer_reasoning_written) { print "model_reasoning_effort = \"medium\""; fixer_reasoning_written=1; }
|
|
334
|
+
if (!fixer_desc_written) { print "description = \"bugfix 代理\""; fixer_desc_written=1; }
|
|
340
335
|
next;
|
|
341
336
|
}
|
|
342
337
|
if (in_fixer && match(t, /^config_file[[:space:]]*=/)) {
|
|
343
|
-
if (!fixer_cfg_written) { print "config_file = \"
|
|
338
|
+
if (!fixer_cfg_written) { print "config_file = \"agents/fixer.toml\""; fixer_cfg_written=1; }
|
|
344
339
|
next;
|
|
345
340
|
}
|
|
346
341
|
|
|
347
342
|
if (in_orch && match(t, /^description[[:space:]]*=/)) {
|
|
348
|
-
if (!orch_desc_written) { print "description = \"
|
|
343
|
+
if (!orch_desc_written) { print "description = \"pr 修复流程编排代理\""; orch_desc_written=1; }
|
|
349
344
|
next;
|
|
350
345
|
}
|
|
351
346
|
if (in_orch && match(t, /^config_file[[:space:]]*=/)) {
|
|
352
|
-
if (!orch_cfg_written) { print "config_file = \"
|
|
347
|
+
if (!orch_cfg_written) { print "config_file = \"agents/orchestrator.toml\""; orch_cfg_written=1; }
|
|
353
348
|
next;
|
|
354
349
|
}
|
|
355
350
|
|
|
356
351
|
if (in_reviewer && match(t, /^description[[:space:]]*=/)) {
|
|
357
|
-
if (!reviewer_desc_written) { print "description = \"
|
|
352
|
+
if (!reviewer_desc_written) { print "description = \"代码评审代理\""; reviewer_desc_written=1; }
|
|
358
353
|
next;
|
|
359
354
|
}
|
|
360
355
|
if (in_reviewer && match(t, /^config_file[[:space:]]*=/)) {
|
|
361
|
-
if (!reviewer_cfg_written) { print "config_file = \"
|
|
356
|
+
if (!reviewer_cfg_written) { print "config_file = \"agents/reviewer.toml\""; reviewer_cfg_written=1; }
|
|
362
357
|
next;
|
|
363
358
|
}
|
|
364
359
|
|
|
365
360
|
if (in_spark && match(t, /^description[[:space:]]*=/)) {
|
|
366
|
-
if (!spark_desc_written) { print "description = \"
|
|
361
|
+
if (!spark_desc_written) { print "description = \"通用执行代理\""; spark_desc_written=1; }
|
|
367
362
|
next;
|
|
368
363
|
}
|
|
369
364
|
if (in_spark && match(t, /^config_file[[:space:]]*=/)) {
|
|
370
|
-
if (!spark_cfg_written) { print "config_file = \"
|
|
365
|
+
if (!spark_cfg_written) { print "config_file = \"agents/spark.toml\""; spark_cfg_written=1; }
|
|
371
366
|
next;
|
|
372
367
|
}
|
|
373
368
|
|
|
@@ -389,27 +384,26 @@ ensure_codex_config() {
|
|
|
389
384
|
if (!fixer_emitted) {
|
|
390
385
|
print "";
|
|
391
386
|
print "[agents.fixer]";
|
|
392
|
-
print "description = \"
|
|
393
|
-
print "
|
|
394
|
-
print "config_file = \"~/.codex/agents/fixer.toml\"";
|
|
387
|
+
print "description = \"bugfix 代理\"";
|
|
388
|
+
print "config_file = \"agents/fixer.toml\"";
|
|
395
389
|
}
|
|
396
390
|
if (!orch_emitted) {
|
|
397
391
|
print "";
|
|
398
392
|
print "[agents.orchestrator]";
|
|
399
|
-
print "description = \"
|
|
400
|
-
print "config_file = \"
|
|
393
|
+
print "description = \"pr 修复流程编排代理\"";
|
|
394
|
+
print "config_file = \"agents/orchestrator.toml\"";
|
|
401
395
|
}
|
|
402
396
|
if (!reviewer_emitted) {
|
|
403
397
|
print "";
|
|
404
398
|
print "[agents.reviewer]";
|
|
405
|
-
print "description = \"
|
|
406
|
-
print "config_file = \"
|
|
399
|
+
print "description = \"代码评审代理\"";
|
|
400
|
+
print "config_file = \"agents/reviewer.toml\"";
|
|
407
401
|
}
|
|
408
402
|
if (!spark_emitted) {
|
|
409
403
|
print "";
|
|
410
404
|
print "[agents.spark]";
|
|
411
|
-
print "description = \"
|
|
412
|
-
print "config_file = \"
|
|
405
|
+
print "description = \"通用执行代理\"";
|
|
406
|
+
print "config_file = \"agents/spark.toml\"";
|
|
413
407
|
}
|
|
414
408
|
}' "$cfg_file" >"$tmp_file"
|
|
415
409
|
|
|
@@ -424,7 +418,7 @@ check_codex_config() {
|
|
|
424
418
|
awk '
|
|
425
419
|
BEGIN {
|
|
426
420
|
in_features=0; in_agents=0; in_fixer=0; in_orch=0; in_reviewer=0; in_spark=0;
|
|
427
|
-
ok_features=0; ok_threads=0; ok_fixer_desc=0;
|
|
421
|
+
ok_features=0; ok_threads=0; ok_fixer_desc=0; ok_fixer_cfg=0;
|
|
428
422
|
ok_orch_desc=0; ok_orch_cfg=0; ok_reviewer_desc=0; ok_reviewer_cfg=0; ok_spark_desc=0; ok_spark_cfg=0;
|
|
429
423
|
}
|
|
430
424
|
function trim(s) { gsub(/^[[:space:]]+|[[:space:]]+$/, "", s); return s }
|
|
@@ -442,19 +436,18 @@ check_codex_config() {
|
|
|
442
436
|
}
|
|
443
437
|
if (in_features && line ~ /^multi_agent[[:space:]]*=[[:space:]]*true$/) ok_features=1;
|
|
444
438
|
if (in_agents && line ~ /^max_threads[[:space:]]*=[[:space:]]*15$/) ok_threads=1;
|
|
445
|
-
if (in_fixer && line ~ /^description[[:space:]]*=[[:space:]]*"
|
|
446
|
-
if (in_fixer && line ~ /^
|
|
447
|
-
if (
|
|
448
|
-
if (in_orch && line ~ /^
|
|
449
|
-
if (
|
|
450
|
-
if (in_reviewer && line ~ /^
|
|
451
|
-
if (
|
|
452
|
-
if (in_spark && line ~ /^
|
|
453
|
-
if (in_spark && line ~ /^config_file[[:space:]]*=[[:space:]]*"~\/\.codex\/agents\/spark\.toml"$/) ok_spark_cfg=1;
|
|
439
|
+
if (in_fixer && line ~ /^description[[:space:]]*=[[:space:]]*"bugfix 代理"$/) ok_fixer_desc=1;
|
|
440
|
+
if (in_fixer && line ~ /^config_file[[:space:]]*=[[:space:]]*"agents\/fixer\.toml"$/) ok_fixer_cfg=1;
|
|
441
|
+
if (in_orch && line ~ /^description[[:space:]]*=[[:space:]]*"pr 修复流程编排代理"$/) ok_orch_desc=1;
|
|
442
|
+
if (in_orch && line ~ /^config_file[[:space:]]*=[[:space:]]*"agents\/orchestrator\.toml"$/) ok_orch_cfg=1;
|
|
443
|
+
if (in_reviewer && line ~ /^description[[:space:]]*=[[:space:]]*"代码评审代理"$/) ok_reviewer_desc=1;
|
|
444
|
+
if (in_reviewer && line ~ /^config_file[[:space:]]*=[[:space:]]*"agents\/reviewer\.toml"$/) ok_reviewer_cfg=1;
|
|
445
|
+
if (in_spark && line ~ /^description[[:space:]]*=[[:space:]]*"通用执行代理"$/) ok_spark_desc=1;
|
|
446
|
+
if (in_spark && line ~ /^config_file[[:space:]]*=[[:space:]]*"agents\/spark\.toml"$/) ok_spark_cfg=1;
|
|
454
447
|
}
|
|
455
448
|
END {
|
|
456
449
|
ok = ok_features && ok_threads &&
|
|
457
|
-
ok_fixer_desc &&
|
|
450
|
+
ok_fixer_desc && ok_fixer_cfg &&
|
|
458
451
|
ok_orch_desc && ok_orch_cfg &&
|
|
459
452
|
ok_reviewer_desc && ok_reviewer_cfg &&
|
|
460
453
|
ok_spark_desc && ok_spark_cfg;
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import { execFile } from 'node:child_process'
|
|
2
|
+
import { existsSync } from 'node:fs'
|
|
3
|
+
import { cp, mkdir, readdir, readFile, rm, writeFile } from 'node:fs/promises'
|
|
4
|
+
import { basename, dirname, join, relative } from 'node:path'
|
|
5
|
+
import { promisify } from 'node:util'
|
|
6
|
+
import { execManager } from '../exec.js'
|
|
7
|
+
import { basenameOrThrow, resolveWithinBase } from './path-utils.js'
|
|
8
|
+
import { createRuntimePackage } from './runtime-package.js'
|
|
9
|
+
|
|
10
|
+
const execFileAsync = promisify(execFile)
|
|
11
|
+
|
|
12
|
+
function assertSafeNamePart(value, label) {
|
|
13
|
+
const text = String(value || '').trim()
|
|
14
|
+
if (!text || text.includes('/') || text.includes('\\') || text.includes('..')) {
|
|
15
|
+
throw new Error(`${label} 越界,已拒绝: ${text}`)
|
|
16
|
+
}
|
|
17
|
+
return text
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function defaultNowTag() {
|
|
21
|
+
const now = new Date()
|
|
22
|
+
const pad = value => String(value).padStart(2, '0')
|
|
23
|
+
return [
|
|
24
|
+
now.getFullYear(),
|
|
25
|
+
pad(now.getMonth() + 1),
|
|
26
|
+
pad(now.getDate()),
|
|
27
|
+
].join('') + `-${pad(now.getHours())}${pad(now.getMinutes())}${pad(now.getSeconds())}`
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function defaultReadVersion(versionFile) {
|
|
31
|
+
const pkg = JSON.parse(await readFile(versionFile, 'utf8'))
|
|
32
|
+
return String(pkg.version || '').trim()
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
async function defaultRunBuild(build) {
|
|
36
|
+
if (!build?.command) {
|
|
37
|
+
throw new Error('缺少构建命令: build.command')
|
|
38
|
+
}
|
|
39
|
+
await execManager.executeCommand(build.command, {
|
|
40
|
+
app: build.app || undefined,
|
|
41
|
+
})
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async function defaultPrepareOutputDir(outputDir) {
|
|
45
|
+
await mkdir(outputDir, { recursive: true })
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
async function copyIntoDir(source, destinationDir) {
|
|
49
|
+
if (!existsSync(source)) {
|
|
50
|
+
throw new Error(`缺少必需文件或目录: ${source}`)
|
|
51
|
+
}
|
|
52
|
+
await cp(source, destinationDir, { recursive: true })
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function defaultStageFiles({ config, stageDir, stagePlan }) {
|
|
56
|
+
await rm(stageDir, { recursive: true, force: true })
|
|
57
|
+
await mkdir(stageDir, { recursive: true })
|
|
58
|
+
|
|
59
|
+
for (const entry of await readdir(stagePlan.dist.source)) {
|
|
60
|
+
await copyIntoDir(join(stagePlan.dist.source, entry), join(stageDir, entry))
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const appPackage = JSON.parse(await readFile(stagePlan.appPackage.source, 'utf8'))
|
|
64
|
+
const rootPackage = JSON.parse(await readFile(stagePlan.rootPackage.source, 'utf8'))
|
|
65
|
+
const runtimePackage = createRuntimePackage({ appPackage, rootPackage })
|
|
66
|
+
await writeFile(join(stageDir, stagePlan.runtimePackage.destination), `${JSON.stringify(runtimePackage, null, 2)}\n`)
|
|
67
|
+
|
|
68
|
+
await copyIntoDir(stagePlan.lockfile.source, join(stageDir, stagePlan.lockfile.destination))
|
|
69
|
+
|
|
70
|
+
if (stagePlan.prismaSchema) {
|
|
71
|
+
await mkdir(join(stageDir, dirname(stagePlan.prismaSchema.destination)), { recursive: true })
|
|
72
|
+
await copyIntoDir(stagePlan.prismaSchema.source, join(stageDir, stagePlan.prismaSchema.destination))
|
|
73
|
+
}
|
|
74
|
+
if (stagePlan.prismaConfig) {
|
|
75
|
+
await mkdir(join(stageDir, dirname(stagePlan.prismaConfig.destination)), { recursive: true })
|
|
76
|
+
await copyIntoDir(stagePlan.prismaConfig.source, join(stageDir, stagePlan.prismaConfig.destination))
|
|
77
|
+
}
|
|
78
|
+
if (stagePlan.ecosystemConfig) {
|
|
79
|
+
await copyIntoDir(stagePlan.ecosystemConfig.source, join(stageDir, stagePlan.ecosystemConfig.destination))
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function defaultAssertNoEnvFiles(stageDir) {
|
|
84
|
+
const envFiles = []
|
|
85
|
+
const queue = ['.']
|
|
86
|
+
|
|
87
|
+
while (queue.length > 0) {
|
|
88
|
+
const currentRelativeDir = queue.shift()
|
|
89
|
+
const currentDir = currentRelativeDir === '.' ? stageDir : join(stageDir, currentRelativeDir)
|
|
90
|
+
const entries = await readdir(currentDir, { withFileTypes: true })
|
|
91
|
+
|
|
92
|
+
for (const entry of entries) {
|
|
93
|
+
const entryRelativePath =
|
|
94
|
+
currentRelativeDir === '.' ? entry.name : join(currentRelativeDir, entry.name)
|
|
95
|
+
|
|
96
|
+
if (entry.name.startsWith('.env')) {
|
|
97
|
+
envFiles.push(entryRelativePath.replace(/\\/g, '/'))
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
if (entry.isDirectory()) {
|
|
101
|
+
queue.push(entryRelativePath)
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
if (envFiles.length > 0) {
|
|
107
|
+
throw new Error(`制品目录包含 .env* 文件: ${envFiles.join(', ')}`)
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
async function defaultCreateInnerArchive({ stageDir, innerArchivePath }) {
|
|
112
|
+
await mkdir(dirname(innerArchivePath), { recursive: true })
|
|
113
|
+
await execFileAsync('tar', ['-czf', innerArchivePath, '.'], {
|
|
114
|
+
cwd: stageDir,
|
|
115
|
+
})
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async function defaultWriteChecksum({ archivePath, checksumPath }) {
|
|
119
|
+
try {
|
|
120
|
+
const { stdout } = await execFileAsync('sha256sum', [archivePath])
|
|
121
|
+
await writeFile(checksumPath, stdout)
|
|
122
|
+
} catch {
|
|
123
|
+
const { stdout } = await execFileAsync('shasum', ['-a', '256', archivePath])
|
|
124
|
+
await writeFile(checksumPath, stdout)
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async function defaultCreateBundle({ outputDir, bundlePath, innerArchivePath, checksumPath }) {
|
|
129
|
+
await execFileAsync(
|
|
130
|
+
'tar',
|
|
131
|
+
['-czf', bundlePath, basename(innerArchivePath), basename(checksumPath)],
|
|
132
|
+
{ cwd: outputDir },
|
|
133
|
+
)
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
export function createArtifactNames({ version, timeTag, bundleName }) {
|
|
137
|
+
const safeVersion = assertSafeNamePart(version, 'version')
|
|
138
|
+
const safeTimeTag = assertSafeNamePart(timeTag, 'timeTag')
|
|
139
|
+
const safeBundleName = assertSafeNamePart(bundleName, 'bundleName')
|
|
140
|
+
const versionName = `backend-v${safeVersion}-${safeTimeTag}`
|
|
141
|
+
const innerArchiveName = `${versionName}.tgz`
|
|
142
|
+
return {
|
|
143
|
+
versionName,
|
|
144
|
+
innerArchiveName,
|
|
145
|
+
checksumName: `${innerArchiveName}.sha256`,
|
|
146
|
+
bundleName: `${safeBundleName}-v${safeVersion}-${safeTimeTag}.tgz`,
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export function createStagePlan(config) {
|
|
151
|
+
const projectRoot = config.projectRoot || '/'
|
|
152
|
+
const relativeToProject = targetPath =>
|
|
153
|
+
relative(projectRoot, targetPath).replace(/\\/g, '/').replace(/^repo\//, '')
|
|
154
|
+
const plan = {
|
|
155
|
+
dist: {
|
|
156
|
+
source: config.build.distDir,
|
|
157
|
+
destination: '.',
|
|
158
|
+
},
|
|
159
|
+
runtimePackage: {
|
|
160
|
+
destination: 'package.json',
|
|
161
|
+
},
|
|
162
|
+
lockfile: {
|
|
163
|
+
source: config.runtime.lockfile,
|
|
164
|
+
destination: 'pnpm-lock.yaml',
|
|
165
|
+
},
|
|
166
|
+
appPackage: {
|
|
167
|
+
source: config.runtime.appPackage,
|
|
168
|
+
},
|
|
169
|
+
rootPackage: {
|
|
170
|
+
source: config.runtime.rootPackage,
|
|
171
|
+
},
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
if (config.runtime.prismaSchemaDir) {
|
|
175
|
+
plan.prismaSchema = {
|
|
176
|
+
source: config.runtime.prismaSchemaDir,
|
|
177
|
+
destination: relativeToProject(config.runtime.prismaSchemaDir),
|
|
178
|
+
}
|
|
179
|
+
}
|
|
180
|
+
if (config.runtime.prismaConfig) {
|
|
181
|
+
plan.prismaConfig = {
|
|
182
|
+
source: config.runtime.prismaConfig,
|
|
183
|
+
destination: relativeToProject(config.runtime.prismaConfig),
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (config.runtime.ecosystemConfig) {
|
|
187
|
+
plan.ecosystemConfig = {
|
|
188
|
+
source: config.runtime.ecosystemConfig,
|
|
189
|
+
destination: basenameOrThrow(config.runtime.ecosystemConfig, 'runtime.ecosystemConfig'),
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
return plan
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
export async function buildBackendArtifact(config, deps = {}) {
|
|
197
|
+
const nowTag = deps.nowTag || defaultNowTag
|
|
198
|
+
const readVersion = deps.readVersion || defaultReadVersion
|
|
199
|
+
const runBuild = deps.runBuild || defaultRunBuild
|
|
200
|
+
const prepareOutputDir = deps.prepareOutputDir || defaultPrepareOutputDir
|
|
201
|
+
const stageFiles = deps.stageFiles || defaultStageFiles
|
|
202
|
+
const assertNoEnvFiles = deps.assertNoEnvFiles || defaultAssertNoEnvFiles
|
|
203
|
+
const createInnerArchive = deps.createInnerArchive || defaultCreateInnerArchive
|
|
204
|
+
const writeChecksum = deps.writeChecksum || defaultWriteChecksum
|
|
205
|
+
const createBundle = deps.createBundle || defaultCreateBundle
|
|
206
|
+
const version = await readVersion(config.build.versionFile)
|
|
207
|
+
const timeTag = nowTag()
|
|
208
|
+
const names = createArtifactNames({
|
|
209
|
+
version,
|
|
210
|
+
timeTag,
|
|
211
|
+
bundleName: config.artifact.bundleName,
|
|
212
|
+
})
|
|
213
|
+
|
|
214
|
+
const outputDir = resolveWithinBase(config.artifact.outputDir, '.', 'artifact.outputDir')
|
|
215
|
+
const stageDir = resolveWithinBase(outputDir, names.versionName, 'stageDir')
|
|
216
|
+
const innerArchivePath = resolveWithinBase(outputDir, names.innerArchiveName, 'innerArchivePath')
|
|
217
|
+
const checksumPath = resolveWithinBase(outputDir, names.checksumName, 'checksumPath')
|
|
218
|
+
const bundlePath = resolveWithinBase(outputDir, names.bundleName, 'bundlePath')
|
|
219
|
+
|
|
220
|
+
await runBuild(config.build)
|
|
221
|
+
await prepareOutputDir(outputDir)
|
|
222
|
+
await stageFiles({
|
|
223
|
+
config,
|
|
224
|
+
stageDir,
|
|
225
|
+
stagePlan: createStagePlan(config),
|
|
226
|
+
})
|
|
227
|
+
await assertNoEnvFiles(stageDir)
|
|
228
|
+
await createInnerArchive({ stageDir, innerArchivePath })
|
|
229
|
+
await writeChecksum({ archivePath: innerArchivePath, checksumPath })
|
|
230
|
+
await createBundle({ outputDir, bundlePath, innerArchivePath, checksumPath })
|
|
231
|
+
|
|
232
|
+
return {
|
|
233
|
+
version,
|
|
234
|
+
timeTag,
|
|
235
|
+
versionName: names.versionName,
|
|
236
|
+
bundlePath,
|
|
237
|
+
innerArchivePath,
|
|
238
|
+
checksumPath,
|
|
239
|
+
}
|
|
240
|
+
}
|