@ranger1/dx 0.1.85 → 0.1.87
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +61 -1
- package/codex/skills/e2e-audit-fixer/SKILL.md +76 -0
- package/codex/skills/e2e-audit-fixer/agents/openai.yaml +4 -0
- package/codex/skills/e2e-audit-fixer/scripts/e2e_e2e_audit.py +523 -0
- package/codex/skills/env-accessor-audit-fixer/SKILL.md +149 -0
- package/codex/skills/env-accessor-audit-fixer/agents/openai.yaml +7 -0
- package/codex/skills/env-accessor-audit-fixer/references/bootstrap-env-foundation.md +156 -0
- package/codex/skills/env-accessor-audit-fixer/scripts/env_accessor_audit.py +250 -0
- package/codex/skills/error-handling-audit-fixer/SKILL.md +150 -0
- package/codex/skills/error-handling-audit-fixer/agents/openai.yaml +7 -0
- package/codex/skills/error-handling-audit-fixer/references/error-handling-standard.md +152 -0
- package/codex/skills/error-handling-audit-fixer/references/foundation-bootstrap.md +85 -0
- package/codex/skills/error-handling-audit-fixer/scripts/error_handling_audit.py +537 -0
- package/codex/skills/pagination-dto-audit-fixer/SKILL.md +69 -0
- package/codex/skills/pagination-dto-audit-fixer/agents/openai.yaml +7 -0
- package/codex/skills/pagination-dto-audit-fixer/references/pagination-standard.md +67 -0
- package/codex/skills/pagination-dto-audit-fixer/scripts/pagination_dto_audit.py +244 -0
- package/lib/cli/commands/core.js +24 -8
- package/lib/cli/dx-cli.js +19 -9
- package/lib/cli/help.js +11 -6
- package/lib/codex-initial.js +155 -3
- package/lib/exec.js +21 -2
- package/lib/run-with-version-env.js +2 -1
- package/package.json +1 -1
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
import argparse
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from dataclasses import asdict, dataclass
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Iterable
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
PAGINATION_KEYS = (
|
|
11
|
+
"items",
|
|
12
|
+
"data",
|
|
13
|
+
"total",
|
|
14
|
+
"page",
|
|
15
|
+
"limit",
|
|
16
|
+
"pageSize",
|
|
17
|
+
"currentPage",
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
RESPONSE_HINTS = (
|
|
21
|
+
"Pagination",
|
|
22
|
+
"Paginated",
|
|
23
|
+
"ListResponse",
|
|
24
|
+
"PageResult",
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Finding:
|
|
30
|
+
kind: str
|
|
31
|
+
path: str
|
|
32
|
+
line: int
|
|
33
|
+
symbol: str
|
|
34
|
+
message: str
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class ClassBlock:
|
|
39
|
+
name: str
|
|
40
|
+
extends_name: str
|
|
41
|
+
body: str
|
|
42
|
+
start: int
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def parse_args() -> argparse.Namespace:
|
|
46
|
+
parser = argparse.ArgumentParser(description="审计 backend 分页 DTO 规范")
|
|
47
|
+
parser.add_argument("--workspace", required=True, help="仓库根目录")
|
|
48
|
+
parser.add_argument(
|
|
49
|
+
"--include-glob",
|
|
50
|
+
action="append",
|
|
51
|
+
default=["apps/backend/src/**/*.ts"],
|
|
52
|
+
help="附加扫描 glob,可重复传入",
|
|
53
|
+
)
|
|
54
|
+
parser.add_argument("--output-json", help="输出 JSON 文件路径")
|
|
55
|
+
return parser.parse_args()
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def iter_files(workspace: Path, globs: Iterable[str]) -> list[Path]:
|
|
59
|
+
seen: set[Path] = set()
|
|
60
|
+
files: list[Path] = []
|
|
61
|
+
for pattern in globs:
|
|
62
|
+
for path in workspace.glob(pattern):
|
|
63
|
+
if not path.is_file():
|
|
64
|
+
continue
|
|
65
|
+
if path in seen:
|
|
66
|
+
continue
|
|
67
|
+
seen.add(path)
|
|
68
|
+
files.append(path)
|
|
69
|
+
return sorted(files)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def has_pagination_signal(block: str) -> bool:
|
|
73
|
+
hit_count = sum(1 for key in PAGINATION_KEYS if re.search(rf"\b{re.escape(key)}\b", block))
|
|
74
|
+
has_total = re.search(r"\btotal\b", block) is not None
|
|
75
|
+
has_items_or_data = re.search(r"\b(items|data)\b", block) is not None
|
|
76
|
+
has_page_signal = re.search(r"\b(page|limit|pageSize|currentPage)\b", block) is not None
|
|
77
|
+
return hit_count >= 3 and has_total and has_items_or_data and has_page_signal
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def line_no(content: str, index: int) -> int:
|
|
81
|
+
return content.count("\n", 0, index) + 1
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def iter_export_classes(content: str) -> list[ClassBlock]:
|
|
85
|
+
header_pattern = re.compile(
|
|
86
|
+
r"export\s+class\s+(?P<name>\w+)\s*(?:extends\s+(?P<extends>[^{\n]+))?\s*{",
|
|
87
|
+
re.MULTILINE,
|
|
88
|
+
)
|
|
89
|
+
classes: list[ClassBlock] = []
|
|
90
|
+
for match in header_pattern.finditer(content):
|
|
91
|
+
brace_start = match.end() - 1
|
|
92
|
+
depth = 0
|
|
93
|
+
index = brace_start
|
|
94
|
+
while index < len(content):
|
|
95
|
+
char = content[index]
|
|
96
|
+
if char == "{":
|
|
97
|
+
depth += 1
|
|
98
|
+
elif char == "}":
|
|
99
|
+
depth -= 1
|
|
100
|
+
if depth == 0:
|
|
101
|
+
body = content[brace_start + 1 : index]
|
|
102
|
+
classes.append(
|
|
103
|
+
ClassBlock(
|
|
104
|
+
name=match.group("name"),
|
|
105
|
+
extends_name=(match.group("extends") or "").strip(),
|
|
106
|
+
body=body,
|
|
107
|
+
start=match.start(),
|
|
108
|
+
)
|
|
109
|
+
)
|
|
110
|
+
break
|
|
111
|
+
index += 1
|
|
112
|
+
return classes
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def scan_request_dtos(path: Path, content: str) -> list[Finding]:
|
|
116
|
+
findings: list[Finding] = []
|
|
117
|
+
path_text = str(path)
|
|
118
|
+
is_dto_file = "/dto/" in path_text or path.name.endswith(".dto.ts")
|
|
119
|
+
for class_block in iter_export_classes(content):
|
|
120
|
+
name = class_block.name
|
|
121
|
+
extends_name = class_block.extends_name
|
|
122
|
+
body = class_block.body
|
|
123
|
+
if not is_dto_file:
|
|
124
|
+
continue
|
|
125
|
+
if "/responses/" in path_text or ".response." in path.name or "Response" in name:
|
|
126
|
+
continue
|
|
127
|
+
if not name.endswith("Dto"):
|
|
128
|
+
continue
|
|
129
|
+
if name == "BasePaginationRequestDto":
|
|
130
|
+
continue
|
|
131
|
+
if extends_name == "BasePaginationRequestDto":
|
|
132
|
+
continue
|
|
133
|
+
if not re.search(r"\b(page|limit|pageSize|currentPage)\b", body):
|
|
134
|
+
continue
|
|
135
|
+
findings.append(
|
|
136
|
+
Finding(
|
|
137
|
+
kind="request-dto-not-standard",
|
|
138
|
+
path=str(path),
|
|
139
|
+
line=line_no(content, class_block.start),
|
|
140
|
+
symbol=name,
|
|
141
|
+
message="请求 DTO 包含分页字段,但未继承 BasePaginationRequestDto",
|
|
142
|
+
)
|
|
143
|
+
)
|
|
144
|
+
return findings
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def scan_response_dtos(path: Path, content: str) -> list[Finding]:
|
|
148
|
+
findings: list[Finding] = []
|
|
149
|
+
path_text = str(path)
|
|
150
|
+
is_dto_file = "/dto/" in path_text or path.name.endswith(".dto.ts")
|
|
151
|
+
for class_block in iter_export_classes(content):
|
|
152
|
+
name = class_block.name
|
|
153
|
+
extends_name = class_block.extends_name
|
|
154
|
+
body = class_block.body
|
|
155
|
+
if not is_dto_file:
|
|
156
|
+
continue
|
|
157
|
+
if (
|
|
158
|
+
"/requests/" in path_text
|
|
159
|
+
or ".request." in path.name
|
|
160
|
+
or "Request" in name
|
|
161
|
+
or name == "BasePaginationRequestDto"
|
|
162
|
+
):
|
|
163
|
+
continue
|
|
164
|
+
if "BasePaginationResponseDto" in extends_name:
|
|
165
|
+
continue
|
|
166
|
+
if not (has_pagination_signal(body) or any(hint in name for hint in RESPONSE_HINTS)):
|
|
167
|
+
continue
|
|
168
|
+
findings.append(
|
|
169
|
+
Finding(
|
|
170
|
+
kind="response-dto-not-standard",
|
|
171
|
+
path=str(path),
|
|
172
|
+
line=line_no(content, class_block.start),
|
|
173
|
+
symbol=name,
|
|
174
|
+
message="响应 DTO 命中分页信号,但未继承 BasePaginationResponseDto",
|
|
175
|
+
)
|
|
176
|
+
)
|
|
177
|
+
return findings
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def scan_manual_returns(path: Path, content: str) -> list[Finding]:
|
|
181
|
+
findings: list[Finding] = []
|
|
182
|
+
return_pattern = re.compile(r"return\s*{(?P<body>[\s\S]*?)}", re.MULTILINE)
|
|
183
|
+
for match in return_pattern.finditer(content):
|
|
184
|
+
body = match.group("body")
|
|
185
|
+
if not has_pagination_signal(body):
|
|
186
|
+
continue
|
|
187
|
+
findings.append(
|
|
188
|
+
Finding(
|
|
189
|
+
kind="manual-pagination-return",
|
|
190
|
+
path=str(path),
|
|
191
|
+
line=line_no(content, match.start()),
|
|
192
|
+
symbol="return",
|
|
193
|
+
message="检测到手工拼装分页返回结构,建议改为统一分页 DTO",
|
|
194
|
+
)
|
|
195
|
+
)
|
|
196
|
+
return findings
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def scan_file(path: Path) -> list[Finding]:
|
|
200
|
+
try:
|
|
201
|
+
content = path.read_text(encoding="utf-8")
|
|
202
|
+
except UnicodeDecodeError:
|
|
203
|
+
return []
|
|
204
|
+
findings: list[Finding] = []
|
|
205
|
+
findings.extend(scan_request_dtos(path, content))
|
|
206
|
+
findings.extend(scan_response_dtos(path, content))
|
|
207
|
+
findings.extend(scan_manual_returns(path, content))
|
|
208
|
+
return findings
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def print_report(findings: list[Finding]) -> None:
|
|
212
|
+
if not findings:
|
|
213
|
+
print("未发现疑似非标准分页 DTO 或手工分页返回结构。")
|
|
214
|
+
return
|
|
215
|
+
grouped: dict[str, list[Finding]] = {}
|
|
216
|
+
for finding in findings:
|
|
217
|
+
grouped.setdefault(finding.kind, []).append(finding)
|
|
218
|
+
print(f"共发现 {len(findings)} 个问题:")
|
|
219
|
+
for kind in sorted(grouped):
|
|
220
|
+
print(f"\n[{kind}] {len(grouped[kind])} 个")
|
|
221
|
+
for finding in grouped[kind]:
|
|
222
|
+
print(f"- {finding.path}:{finding.line} {finding.symbol} -> {finding.message}")
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def main() -> int:
|
|
226
|
+
args = parse_args()
|
|
227
|
+
workspace = Path(args.workspace).resolve()
|
|
228
|
+
files = iter_files(workspace, args.include_glob)
|
|
229
|
+
findings: list[Finding] = []
|
|
230
|
+
for path in files:
|
|
231
|
+
findings.extend(scan_file(path))
|
|
232
|
+
print_report(findings)
|
|
233
|
+
if args.output_json:
|
|
234
|
+
output = Path(args.output_json)
|
|
235
|
+
output.write_text(
|
|
236
|
+
json.dumps([asdict(finding) for finding in findings], ensure_ascii=False, indent=2) + "\n",
|
|
237
|
+
encoding="utf-8",
|
|
238
|
+
)
|
|
239
|
+
print(f"\nJSON 已输出到 {output}")
|
|
240
|
+
return 0
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
if __name__ == "__main__":
|
|
244
|
+
raise SystemExit(main())
|
package/lib/cli/commands/core.js
CHANGED
|
@@ -94,22 +94,34 @@ export async function handleTest(cli, args) {
|
|
|
94
94
|
cli.flags.test = true
|
|
95
95
|
}
|
|
96
96
|
|
|
97
|
-
|
|
97
|
+
const typeConfig = cli.commands.test[type]
|
|
98
|
+
let testConfig = typeConfig?.[target]
|
|
99
|
+
if (!testConfig && typeConfig?.command) {
|
|
100
|
+
testConfig = typeConfig
|
|
101
|
+
}
|
|
98
102
|
|
|
99
103
|
if (!testConfig) {
|
|
100
104
|
logger.error(`未找到测试配置: ${type}.${target}`)
|
|
101
|
-
process.
|
|
105
|
+
process.exit(1)
|
|
102
106
|
return
|
|
103
107
|
}
|
|
104
108
|
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
109
|
+
if (type === 'e2e' && testConfig.requiresPath && testPath) {
|
|
110
|
+
if (!testConfig.fileCommand) {
|
|
111
|
+
logger.error(`测试配置错误: test.${type}.${target} 已启用 requiresPath,必须配置 fileCommand`)
|
|
112
|
+
process.exit(1)
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
const fileCommand = String(testConfig.fileCommand)
|
|
116
|
+
if (!fileCommand.includes('{TEST_PATH}')) {
|
|
117
|
+
logger.error(`测试配置错误: test.${type}.${target} 的 fileCommand 必须包含 {TEST_PATH}`)
|
|
118
|
+
process.exit(1)
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
let command = fileCommand.replace('{TEST_PATH}', shellEscape(testPath))
|
|
108
122
|
|
|
109
|
-
// 如果指定了测试用例名称,添加 -t 参数
|
|
110
123
|
if (testNamePattern) {
|
|
111
|
-
|
|
112
|
-
command += ` -t "${escapedPattern}"`
|
|
124
|
+
command += ` -t ${shellEscape(testNamePattern)}`
|
|
113
125
|
}
|
|
114
126
|
|
|
115
127
|
testConfig = {
|
|
@@ -132,6 +144,10 @@ export async function handleTest(cli, args) {
|
|
|
132
144
|
await cli.executeCommand(testConfig)
|
|
133
145
|
}
|
|
134
146
|
|
|
147
|
+
function shellEscape(value) {
|
|
148
|
+
return `'${String(value).replace(/'/g, `'\\''`)}'`
|
|
149
|
+
}
|
|
150
|
+
|
|
135
151
|
export async function handleLint(cli, args) {
|
|
136
152
|
void args
|
|
137
153
|
const baseConfig = cli.commands.lint
|
package/lib/cli/dx-cli.js
CHANGED
|
@@ -608,17 +608,27 @@ class DxCli {
|
|
|
608
608
|
}
|
|
609
609
|
|
|
610
610
|
validateTestPositionals(positionalArgs) {
|
|
611
|
-
if (positionalArgs.length === 0) return
|
|
612
|
-
|
|
613
611
|
const [type = 'e2e', target = 'all', testPath] = positionalArgs
|
|
614
612
|
|
|
615
|
-
if (type
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
logger.
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
613
|
+
if (type !== 'e2e') return
|
|
614
|
+
|
|
615
|
+
if (target === 'all') {
|
|
616
|
+
logger.error('dx test e2e all 不受支持,请指定 target 和测试文件或目录路径')
|
|
617
|
+
process.exit(1)
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
const testConfig = this.commands?.test?.[type]?.[target]
|
|
621
|
+
if (!testConfig) return
|
|
622
|
+
if (!testConfig.requiresPath) return
|
|
623
|
+
|
|
624
|
+
if (!testPath) {
|
|
625
|
+
logger.error(`dx test e2e ${target} 必须提供测试文件或目录路径`)
|
|
626
|
+
logger.info(`示例: ${this.invocation} test e2e ${target} apps/${target}/e2e/health`)
|
|
627
|
+
process.exit(1)
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
if (testPath === 'all') {
|
|
631
|
+
logger.error(`dx test e2e ${target} 不支持 all,必须提供测试文件或目录路径`)
|
|
622
632
|
process.exit(1)
|
|
623
633
|
}
|
|
624
634
|
}
|
package/lib/cli/help.js
CHANGED
|
@@ -43,10 +43,10 @@ export function showHelp() {
|
|
|
43
43
|
'',
|
|
44
44
|
' test [type] [target] [path] [-t pattern] 运行测试',
|
|
45
45
|
' type: e2e, unit (默认: e2e)',
|
|
46
|
-
' target:
|
|
47
|
-
' path: 测试文件或目录路径 (
|
|
46
|
+
' target: 由 commands.json 的 test.<type>.<target> 决定(e2e 默认会拒绝隐式 all)',
|
|
47
|
+
' path: 测试文件或目录路径 (guarded e2e target 必填,例如 backend/quantify)',
|
|
48
48
|
' -t pattern: 指定测试用例名称模式 (可选,需要和 path 一起使用)',
|
|
49
|
-
' 说明:
|
|
49
|
+
' 说明: guarded E2E target 禁止无路径或 all 全量执行,dx test e2e all 也不受支持',
|
|
50
50
|
'',
|
|
51
51
|
' worktree [action] [num...] Git Worktree管理',
|
|
52
52
|
' action: make, del, list, clean',
|
|
@@ -94,6 +94,8 @@ export function showHelp() {
|
|
|
94
94
|
' dx test e2e backend apps/backend/e2e/auth # 按目录运行后端 E2E',
|
|
95
95
|
' dx test e2e backend apps/backend/e2e/activity/activity.admin.e2e-spec.ts # 运行单个E2E测试文件',
|
|
96
96
|
' dx test e2e backend apps/backend/e2e/activity/activity.admin.e2e-spec.ts -t "should list all activity definitions" # 运行特定测试用例',
|
|
97
|
+
' dx test e2e quantify apps/quantify/e2e/health/health.e2e-spec.ts # 运行 Quantify E2E 文件',
|
|
98
|
+
' dx test e2e all # 不受支持,必须指定 target 和 path',
|
|
97
99
|
' dx deploy front --staging # 部署前端到 Vercel(staging)',
|
|
98
100
|
' dx deploy backend --prod # 构建 backend 制品并上传/部署到远端主机',
|
|
99
101
|
' dx deploy backend --build-only # 仅构建 backend 制品,不执行远端部署',
|
|
@@ -255,17 +257,20 @@ test 命令用法:
|
|
|
255
257
|
|
|
256
258
|
参数说明:
|
|
257
259
|
type: e2e, unit (默认: e2e)
|
|
258
|
-
target:
|
|
259
|
-
path:
|
|
260
|
+
target: 由 commands.json 的 test.<type>.<target> 决定
|
|
261
|
+
path: guarded e2e target 必须提供文件或目录路径
|
|
260
262
|
-t pattern: 指定测试用例名称模式,需要和 path 一起使用
|
|
261
263
|
|
|
262
264
|
限制说明:
|
|
263
|
-
|
|
265
|
+
guarded E2E target 禁止无路径全量执行。
|
|
266
|
+
guarded E2E target 也不支持把 path 写成 all。
|
|
267
|
+
dx test e2e all 不受支持,必须显式指定 target 和 path。
|
|
264
268
|
|
|
265
269
|
常见示例:
|
|
266
270
|
dx test e2e backend apps/backend/e2e/auth
|
|
267
271
|
dx test e2e backend apps/backend/e2e/auth/auth.login.e2e-spec.ts
|
|
268
272
|
dx test e2e backend apps/backend/e2e/auth/auth.login.e2e-spec.ts -t "should login"
|
|
273
|
+
dx test e2e quantify apps/quantify/e2e/health/health.e2e-spec.ts
|
|
269
274
|
dx test unit front
|
|
270
275
|
dx test unit admin
|
|
271
276
|
`)
|
package/lib/codex-initial.js
CHANGED
|
@@ -4,6 +4,49 @@ import os from 'node:os'
|
|
|
4
4
|
|
|
5
5
|
import { logger } from './logger.js'
|
|
6
6
|
|
|
7
|
+
const REQUIRED_CODEX_CONFIG = [
|
|
8
|
+
{
|
|
9
|
+
section: 'features',
|
|
10
|
+
values: {
|
|
11
|
+
multi_agent: 'true',
|
|
12
|
+
},
|
|
13
|
+
},
|
|
14
|
+
{
|
|
15
|
+
section: 'agents',
|
|
16
|
+
values: {
|
|
17
|
+
max_threads: '15',
|
|
18
|
+
},
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
section: 'agents.fixer',
|
|
22
|
+
values: {
|
|
23
|
+
description: '"bugfix 代理"',
|
|
24
|
+
config_file: '"agents/fixer.toml"',
|
|
25
|
+
},
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
section: 'agents.orchestrator',
|
|
29
|
+
values: {
|
|
30
|
+
description: '"pr 修复流程编排代理"',
|
|
31
|
+
config_file: '"agents/orchestrator.toml"',
|
|
32
|
+
},
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
section: 'agents.reviewer',
|
|
36
|
+
values: {
|
|
37
|
+
description: '"代码评审代理"',
|
|
38
|
+
config_file: '"agents/reviewer.toml"',
|
|
39
|
+
},
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
section: 'agents.spark',
|
|
43
|
+
values: {
|
|
44
|
+
description: '"通用执行代理"',
|
|
45
|
+
config_file: '"agents/spark.toml"',
|
|
46
|
+
},
|
|
47
|
+
},
|
|
48
|
+
]
|
|
49
|
+
|
|
7
50
|
async function collectAllFiles(dir) {
|
|
8
51
|
const out = []
|
|
9
52
|
|
|
@@ -39,6 +82,110 @@ async function ensureDir(path) {
|
|
|
39
82
|
await fs.mkdir(path, { recursive: true })
|
|
40
83
|
}
|
|
41
84
|
|
|
85
|
+
function escapeRegExp(input) {
|
|
86
|
+
return String(input).replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function ensureTrailingNewline(text) {
|
|
90
|
+
if (!text) return ''
|
|
91
|
+
return text.endsWith('\n') ? text : `${text}\n`
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
function upsertTomlSection(content, { section, values }) {
|
|
95
|
+
const header = `[${section}]`
|
|
96
|
+
const sectionPattern = new RegExp(`^\\[${escapeRegExp(section)}\\]\\s*$`, 'm')
|
|
97
|
+
const sectionHeaderMatch = content.match(sectionPattern)
|
|
98
|
+
let nextContent = content
|
|
99
|
+
let changedKeys = 0
|
|
100
|
+
let createdSection = false
|
|
101
|
+
|
|
102
|
+
if (!sectionHeaderMatch) {
|
|
103
|
+
const blockLines = [header, ...Object.entries(values).map(([key, value]) => `${key} = ${value}`), '']
|
|
104
|
+
nextContent = ensureTrailingNewline(content)
|
|
105
|
+
if (nextContent.length > 0 && !nextContent.endsWith('\n\n')) {
|
|
106
|
+
nextContent += '\n'
|
|
107
|
+
}
|
|
108
|
+
nextContent += `${blockLines.join('\n')}\n`
|
|
109
|
+
return {
|
|
110
|
+
content: nextContent,
|
|
111
|
+
changedKeys: Object.keys(values).length,
|
|
112
|
+
createdSection: true,
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
const sectionStart = sectionHeaderMatch.index
|
|
117
|
+
const sectionBodyStart = sectionStart + sectionHeaderMatch[0].length
|
|
118
|
+
const remaining = content.slice(sectionBodyStart)
|
|
119
|
+
const nextHeaderMatch = remaining.match(/\n(?=\[[^\]]+\]\s*$)/m)
|
|
120
|
+
const sectionEnd =
|
|
121
|
+
nextHeaderMatch && typeof nextHeaderMatch.index === 'number'
|
|
122
|
+
? sectionBodyStart + nextHeaderMatch.index + 1
|
|
123
|
+
: content.length
|
|
124
|
+
|
|
125
|
+
const beforeSection = content.slice(0, sectionStart)
|
|
126
|
+
const originalSectionText = content.slice(sectionStart, sectionEnd)
|
|
127
|
+
const trailing = content.slice(sectionEnd)
|
|
128
|
+
const sectionLines = originalSectionText.split('\n')
|
|
129
|
+
|
|
130
|
+
for (const [key, value] of Object.entries(values)) {
|
|
131
|
+
const desiredLine = `${key} = ${value}`
|
|
132
|
+
const keyPattern = new RegExp(`^${escapeRegExp(key)}\\s*=`, 'm')
|
|
133
|
+
const lineIndex = sectionLines.findIndex(line => keyPattern.test(line.trim()))
|
|
134
|
+
|
|
135
|
+
if (lineIndex === -1) {
|
|
136
|
+
let insertIndex = sectionLines.length
|
|
137
|
+
while (insertIndex > 1 && sectionLines[insertIndex - 1] === '') {
|
|
138
|
+
insertIndex--
|
|
139
|
+
}
|
|
140
|
+
sectionLines.splice(insertIndex, 0, desiredLine)
|
|
141
|
+
changedKeys++
|
|
142
|
+
continue
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (sectionLines[lineIndex].trim() !== desiredLine) {
|
|
146
|
+
sectionLines[lineIndex] = desiredLine
|
|
147
|
+
changedKeys++
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const updatedSectionText = ensureTrailingNewline(sectionLines.join('\n'))
|
|
152
|
+
nextContent = `${beforeSection}${updatedSectionText}${trailing}`
|
|
153
|
+
|
|
154
|
+
return { content: nextContent, changedKeys, createdSection }
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async function ensureCodexConfig({ codexDir }) {
|
|
158
|
+
const configPath = join(codexDir, 'config.toml')
|
|
159
|
+
let content = ''
|
|
160
|
+
|
|
161
|
+
try {
|
|
162
|
+
content = await fs.readFile(configPath, 'utf8')
|
|
163
|
+
} catch (error) {
|
|
164
|
+
if (error?.code !== 'ENOENT') throw error
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
let changedKeys = 0
|
|
168
|
+
let createdSections = 0
|
|
169
|
+
let nextContent = content
|
|
170
|
+
|
|
171
|
+
for (const sectionConfig of REQUIRED_CODEX_CONFIG) {
|
|
172
|
+
const result = upsertTomlSection(nextContent, sectionConfig)
|
|
173
|
+
nextContent = result.content
|
|
174
|
+
changedKeys += result.changedKeys
|
|
175
|
+
if (result.createdSection) createdSections++
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (nextContent !== content || content === '') {
|
|
179
|
+
await fs.writeFile(configPath, ensureTrailingNewline(nextContent), 'utf8')
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return {
|
|
183
|
+
configPath,
|
|
184
|
+
changedKeys,
|
|
185
|
+
createdSections,
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
42
189
|
async function assertDirExists(path, label) {
|
|
43
190
|
try {
|
|
44
191
|
const st = await fs.stat(path)
|
|
@@ -116,10 +263,11 @@ export async function runCodexInitial(options = {}) {
|
|
|
116
263
|
if (!packageRoot) throw new Error('runCodexInitial: 缺少 packageRoot')
|
|
117
264
|
|
|
118
265
|
const homeDir = options.homeDir || os.homedir()
|
|
266
|
+
const codexDir = join(homeDir, '.codex')
|
|
119
267
|
const srcSkills = join(packageRoot, 'codex', 'skills')
|
|
120
|
-
const dstSkills = join(
|
|
268
|
+
const dstSkills = join(codexDir, 'skills')
|
|
121
269
|
const srcCodexAgents = join(packageRoot, 'codex', 'agents')
|
|
122
|
-
const dstCodexAgents = join(
|
|
270
|
+
const dstCodexAgents = join(codexDir, 'agents')
|
|
123
271
|
|
|
124
272
|
await assertDirExists(srcSkills, '模板目录 codex/skills')
|
|
125
273
|
await assertDirExists(srcCodexAgents, '模板目录 codex/agents')
|
|
@@ -129,8 +277,12 @@ export async function runCodexInitial(options = {}) {
|
|
|
129
277
|
|
|
130
278
|
const skillsStats = await copySkillsDirectories({ srcSkillsDir: srcSkills, dstSkillsDir: dstSkills })
|
|
131
279
|
const codexAgentsStats = await copyDirMerge({ srcDir: srcCodexAgents, dstDir: dstCodexAgents })
|
|
280
|
+
const configStats = await ensureCodexConfig({ codexDir })
|
|
132
281
|
|
|
133
|
-
logger.success(`已初始化 Codex 模板到: ${
|
|
282
|
+
logger.success(`已初始化 Codex 模板到: ${codexDir}`)
|
|
134
283
|
logger.info(`skills: ${skillsStats.copiedDirs} 个目录,覆盖复制 ${skillsStats.copiedFiles} 个文件 -> ${dstSkills}`)
|
|
135
284
|
logger.info(`codex agents: 覆盖复制 ${codexAgentsStats.fileCount} 个文件 -> ${dstCodexAgents}`)
|
|
285
|
+
logger.info(
|
|
286
|
+
`config.toml: 修复 ${configStats.changedKeys} 个配置项,新增 ${configStats.createdSections} 个分组 -> ${configStats.configPath}`,
|
|
287
|
+
)
|
|
136
288
|
}
|
package/lib/exec.js
CHANGED
|
@@ -12,6 +12,25 @@ import { confirmManager } from './confirm.js'
|
|
|
12
12
|
|
|
13
13
|
const execPromise = promisify(nodeExec)
|
|
14
14
|
|
|
15
|
+
export function sanitizeChildEnv(inputEnv = {}) {
|
|
16
|
+
const env = { ...inputEnv }
|
|
17
|
+
const noColor = env.NO_COLOR
|
|
18
|
+
const forceColor = env.FORCE_COLOR
|
|
19
|
+
|
|
20
|
+
// 某些工具链(npm/pnpm/chalk)会在子进程链路里自动打开 FORCE_COLOR,
|
|
21
|
+
// 这会与继承下去的 NO_COLOR 冲突并触发 "NO_COLOR is ignored" 警告。
|
|
22
|
+
// 这里优先移除 NO_COLOR,避免在 dx 执行链路中重复打印噪音告警。
|
|
23
|
+
if (noColor !== undefined && noColor !== null && String(noColor) !== '') {
|
|
24
|
+
delete env.NO_COLOR
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (forceColor === undefined || forceColor === null || String(forceColor) === '') {
|
|
28
|
+
delete env.FORCE_COLOR
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
return env
|
|
32
|
+
}
|
|
33
|
+
|
|
15
34
|
export class ExecManager {
|
|
16
35
|
constructor() {
|
|
17
36
|
this.runningProcesses = new Map()
|
|
@@ -222,12 +241,12 @@ export class ExecManager {
|
|
|
222
241
|
const result = await this.spawnCommand(fullCommand, {
|
|
223
242
|
cwd: cwd || process.cwd(),
|
|
224
243
|
stdio,
|
|
225
|
-
env: {
|
|
244
|
+
env: sanitizeChildEnv({
|
|
226
245
|
...process.env,
|
|
227
246
|
NODE_ENV: nodeEnvForProcess,
|
|
228
247
|
...forcedEnv,
|
|
229
248
|
...extraEnv,
|
|
230
|
-
},
|
|
249
|
+
}),
|
|
231
250
|
timeout,
|
|
232
251
|
})
|
|
233
252
|
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { spawn, execSync } from 'node:child_process'
|
|
2
|
+
import { sanitizeChildEnv } from './exec.js'
|
|
2
3
|
import { readFile } from 'node:fs/promises'
|
|
3
4
|
import { resolve, join, dirname } from 'node:path'
|
|
4
5
|
import { existsSync } from 'node:fs'
|
|
@@ -147,7 +148,7 @@ export async function runWithVersionEnv(argv = []) {
|
|
|
147
148
|
|
|
148
149
|
const child = spawn(command[0], command.slice(1), {
|
|
149
150
|
stdio: 'inherit',
|
|
150
|
-
env,
|
|
151
|
+
env: sanitizeChildEnv(env),
|
|
151
152
|
shell: false
|
|
152
153
|
})
|
|
153
154
|
|