cc-devflow 4.1.5 → 4.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/CLAUDE.md +87 -1091
- package/.claude/commands/core/architecture.md +32 -2
- package/.claude/commands/core/guidelines.md +27 -2
- package/.claude/commands/core/roadmap.md +33 -4
- package/.claude/commands/core/style.md +53 -263
- package/.claude/commands/flow/CLAUDE.md +28 -0
- package/.claude/commands/flow/archive.md +2 -2
- package/.claude/commands/flow/checklist.md +9 -251
- package/.claude/commands/flow/clarify.md +9 -127
- package/.claude/commands/flow/constitution.md +1 -1
- package/.claude/commands/flow/context.md +1 -1
- package/.claude/commands/flow/dev.md +19 -395
- package/.claude/commands/flow/ideate.md +13 -13
- package/.claude/commands/flow/init.md +19 -30
- package/.claude/commands/flow/new.md +12 -268
- package/.claude/commands/flow/quality.md +10 -153
- package/.claude/commands/flow/release.md +18 -81
- package/.claude/commands/flow/restart.md +15 -16
- package/.claude/commands/flow/spec.md +14 -164
- package/.claude/commands/flow/status.md +12 -12
- package/.claude/commands/flow/update.md +4 -4
- package/.claude/commands/flow/upgrade.md +6 -6
- package/.claude/commands/flow/verify.md +19 -78
- package/.claude/commands/flow/workspace.md +1 -1
- package/.claude/docs/guides/INIT_TROUBLESHOOTING.md +7 -7
- package/.claude/docs/guides/NEW_TROUBLESHOOTING.md +44 -96
- package/.claude/docs/guides/ROADMAP_TROUBLESHOOTING.md +1 -1
- package/.claude/docs/guides/TASK_COMPLETION_MARKING.md +5 -5
- package/.claude/docs/templates/ATTEMPT_TEMPLATE.md +1 -1
- package/.claude/docs/templates/BACKLOG_TEMPLATE.md +3 -3
- package/.claude/docs/templates/CLARIFICATION_REPORT_TEMPLATE.md +5 -5
- package/.claude/docs/templates/ERROR_LOG_TEMPLATE.md +2 -2
- package/.claude/docs/templates/INIT_FLOW_TEMPLATE.md +3 -3
- package/.claude/docs/templates/NEW_ORCHESTRATION_TEMPLATE.md +33 -64
- package/.claude/docs/templates/RESEARCH_TEMPLATE.md +3 -3
- package/.claude/docs/templates/ROADMAP_DIALOGUE_TEMPLATE.md +2 -2
- package/.claude/docs/templates/ROADMAP_TEMPLATE.md +2 -2
- package/.claude/docs/templates/STYLE_TEMPLATE.md +3 -3
- package/.claude/docs/templates/UI_PROTOTYPE_TEMPLATE.md +8 -9
- package/.claude/guides/workflow-guides/flow-orchestrator.md +31 -265
- package/.claude/hooks/CLAUDE.md +1 -1
- package/.claude/hooks/checklist-gate.js +4 -4
- package/.claude/hooks/inject-agent-context.ts +2 -2
- package/.claude/scripts/calculate-checklist-completion.sh +2 -2
- package/.claude/scripts/check-prerequisites.sh +2 -2
- package/.claude/scripts/checklist-errors.sh +4 -4
- package/.claude/scripts/flow-quality-full.sh +5 -5
- package/.claude/scripts/flow-quality-quick.sh +4 -4
- package/.claude/scripts/flow-workspace-init.sh +2 -2
- package/.claude/scripts/generate-clarification-report.sh +4 -4
- package/.claude/scripts/recover-workflow.sh +70 -73
- package/.claude/scripts/run-quality-gates.sh +1 -1
- package/.claude/scripts/setup-epic.sh +2 -2
- package/.claude/scripts/setup-ralph-loop.sh +2 -2
- package/.claude/scripts/validate-research.sh +1 -1
- package/.claude/scripts/verify-setup.sh +1 -1
- package/.claude/skills/cc-devflow-orchestrator/SKILL.md +113 -108
- package/.claude/skills/workflow/CLAUDE.md +24 -0
- package/.claude/skills/workflow/flow-dev/CLAUDE.md +14 -76
- package/.claude/skills/workflow/flow-dev/SKILL.md +58 -60
- package/.claude/skills/workflow/flow-dev/context.jsonl +4 -8
- package/.claude/skills/workflow/flow-init/SKILL.md +46 -144
- package/.claude/skills/workflow/flow-init/assets/RESEARCH_TEMPLATE.md +1 -1
- package/.claude/skills/workflow/flow-init/context.jsonl +3 -3
- package/.claude/skills/workflow/flow-init/scripts/check-prerequisites.sh +1 -1
- package/.claude/skills/workflow/flow-init/scripts/validate-research.sh +1 -1
- package/.claude/skills/workflow/flow-release/SKILL.md +23 -56
- package/.claude/skills/workflow/flow-release/context.jsonl +5 -7
- package/.claude/skills/workflow/flow-spec/CLAUDE.md +15 -101
- package/.claude/skills/workflow/flow-spec/SKILL.md +40 -511
- package/.claude/skills/workflow/flow-spec/context.jsonl +5 -7
- package/.claude/skills/workflow/flow-verify/CLAUDE.md +10 -0
- package/.claude/skills/workflow/flow-verify/SKILL.md +53 -0
- package/.claude/skills/workflow/flow-verify/context.jsonl +5 -0
- package/.claude/skills/workflow.yaml +72 -267
- package/CHANGELOG.md +72 -0
- package/README.md +96 -69
- package/README.zh-CN.md +95 -67
- package/bin/cc-devflow-cli.js +154 -0
- package/bin/harness.js +22 -0
- package/docs/commands/README.md +34 -38
- package/docs/commands/README.zh-CN.md +34 -36
- package/docs/commands/core-roadmap.md +2 -2
- package/docs/commands/core-roadmap.zh-CN.md +2 -2
- package/docs/commands/core-style.md +29 -381
- package/docs/commands/core-style.zh-CN.md +29 -381
- package/docs/commands/flow-init.md +10 -10
- package/docs/commands/flow-init.zh-CN.md +11 -11
- package/docs/commands/flow-new.md +25 -260
- package/docs/commands/flow-new.zh-CN.md +26 -257
- package/docs/guides/getting-started.md +16 -15
- package/docs/guides/getting-started.zh-CN.md +10 -12
- package/lib/compiler/__tests__/manifest.test.js +156 -0
- package/lib/compiler/__tests__/parser.test.js +21 -0
- package/lib/compiler/index.js +17 -1
- package/lib/compiler/manifest.js +68 -6
- package/lib/compiler/parser.js +5 -0
- package/lib/harness/CLAUDE.md +21 -0
- package/lib/harness/cli.js +208 -0
- package/lib/harness/index.js +16 -0
- package/lib/harness/operations/dispatch.js +285 -0
- package/lib/harness/operations/init.js +48 -0
- package/lib/harness/operations/janitor.js +74 -0
- package/lib/harness/operations/pack.js +100 -0
- package/lib/harness/operations/plan.js +29 -0
- package/lib/harness/operations/release.js +83 -0
- package/lib/harness/operations/resume.js +44 -0
- package/lib/harness/operations/verify.js +163 -0
- package/lib/harness/planner.js +141 -0
- package/lib/harness/schemas.js +108 -0
- package/lib/harness/store.js +240 -0
- package/package.json +9 -1
|
@@ -0,0 +1,163 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [INPUT]: 依赖 manifest 与 package scripts,依赖 shell gates(lint/typecheck/test/audit/review)。
|
|
3
|
+
* [OUTPUT]: 生成 report-card.json,给出 quick/strict/review 门禁结论。
|
|
4
|
+
* [POS]: harness 质量门禁入口,被 CLI `harness:verify` 调用。
|
|
5
|
+
* [PROTOCOL]: 变更时更新此头部,然后检查 CLAUDE.md
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const {
|
|
9
|
+
nowIso,
|
|
10
|
+
getPackageScripts,
|
|
11
|
+
readJson,
|
|
12
|
+
writeJson,
|
|
13
|
+
runCommand,
|
|
14
|
+
getTaskManifestPath,
|
|
15
|
+
getReportCardPath
|
|
16
|
+
} = require('../store');
|
|
17
|
+
const { parseManifest, parseReportCard } = require('../schemas');
|
|
18
|
+
|
|
19
|
+
async function runGate(command, cwd, timeoutMs = 20 * 60 * 1000) {
|
|
20
|
+
const result = await runCommand(command, { cwd, timeoutMs });
|
|
21
|
+
return {
|
|
22
|
+
status: result.code === 0 ? 'pass' : 'fail',
|
|
23
|
+
durationMs: result.durationMs,
|
|
24
|
+
details: result.code === 0
|
|
25
|
+
? 'ok'
|
|
26
|
+
: (result.stderr || result.stdout || 'command failed').trim().slice(0, 400)
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function runNpmScriptGate(scripts, scriptName, repoRoot) {
|
|
31
|
+
if (!scripts[scriptName]) {
|
|
32
|
+
return {
|
|
33
|
+
name: scriptName,
|
|
34
|
+
command: `npm run ${scriptName}`,
|
|
35
|
+
status: 'skipped',
|
|
36
|
+
durationMs: 0,
|
|
37
|
+
details: `Script ${scriptName} is not defined`
|
|
38
|
+
};
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const result = await runGate(`npm run ${scriptName}`, repoRoot);
|
|
42
|
+
return {
|
|
43
|
+
name: scriptName,
|
|
44
|
+
command: `npm run ${scriptName}`,
|
|
45
|
+
...result
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
async function runReviewGate({ repoRoot, strict, skipReview }) {
|
|
50
|
+
if (!strict) {
|
|
51
|
+
return {
|
|
52
|
+
status: 'skipped',
|
|
53
|
+
details: 'Strict mode disabled'
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if (skipReview) {
|
|
58
|
+
return {
|
|
59
|
+
status: 'skipped',
|
|
60
|
+
details: 'Skipped by --skip-review'
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
const hasCodex = await runCommand('command -v codex', { cwd: repoRoot });
|
|
65
|
+
if (hasCodex.code !== 0) {
|
|
66
|
+
return {
|
|
67
|
+
status: 'fail',
|
|
68
|
+
details: 'codex binary not found; rerun with --skip-review if intentional'
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
const review = await runCommand('codex review --base main', {
|
|
73
|
+
cwd: repoRoot,
|
|
74
|
+
timeoutMs: 30 * 60 * 1000
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
if (review.code !== 0) {
|
|
78
|
+
return {
|
|
79
|
+
status: 'fail',
|
|
80
|
+
details: (review.stderr || review.stdout || 'codex review failed').trim().slice(0, 400)
|
|
81
|
+
};
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return {
|
|
85
|
+
status: 'pass',
|
|
86
|
+
details: 'codex review passed'
|
|
87
|
+
};
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
function collectBlockingFindings(manifest, quickGates, strictGates, reviewGate) {
|
|
91
|
+
const findings = [];
|
|
92
|
+
|
|
93
|
+
for (const task of manifest.tasks) {
|
|
94
|
+
if (task.status === 'failed') {
|
|
95
|
+
findings.push(`${task.id}: ${task.lastError || 'task failed'}`);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
for (const gate of [...quickGates, ...strictGates]) {
|
|
100
|
+
if (gate.status === 'fail') {
|
|
101
|
+
findings.push(`${gate.name}: ${gate.details}`);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (reviewGate.status === 'fail') {
|
|
106
|
+
findings.push(`review: ${reviewGate.details}`);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return findings;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
async function runVerify({ repoRoot, changeId, strict = false, skipReview = false }) {
|
|
113
|
+
const manifestPath = getTaskManifestPath(repoRoot, changeId);
|
|
114
|
+
const manifest = parseManifest(await readJson(manifestPath));
|
|
115
|
+
const scripts = await getPackageScripts(repoRoot);
|
|
116
|
+
|
|
117
|
+
const quickGateNames = ['lint', 'typecheck', 'test'];
|
|
118
|
+
const quickGates = [];
|
|
119
|
+
|
|
120
|
+
for (const name of quickGateNames) {
|
|
121
|
+
quickGates.push(await runNpmScriptGate(scripts, name, repoRoot));
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
const strictGates = [];
|
|
125
|
+
if (strict) {
|
|
126
|
+
strictGates.push(await runNpmScriptGate(scripts, 'test:integration', repoRoot));
|
|
127
|
+
|
|
128
|
+
const audit = await runGate('npm audit --audit-level=high', repoRoot, 20 * 60 * 1000);
|
|
129
|
+
strictGates.push({
|
|
130
|
+
name: 'audit',
|
|
131
|
+
command: 'npm audit --audit-level=high',
|
|
132
|
+
...audit
|
|
133
|
+
});
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const review = await runReviewGate({ repoRoot, strict, skipReview });
|
|
137
|
+
const blockingFindings = collectBlockingFindings(manifest, quickGates, strictGates, review);
|
|
138
|
+
const hasFailures = blockingFindings.length > 0;
|
|
139
|
+
|
|
140
|
+
const report = parseReportCard({
|
|
141
|
+
changeId,
|
|
142
|
+
overall: hasFailures ? 'fail' : 'pass',
|
|
143
|
+
quickGates,
|
|
144
|
+
strictGates,
|
|
145
|
+
review,
|
|
146
|
+
blockingFindings,
|
|
147
|
+
timestamp: nowIso()
|
|
148
|
+
});
|
|
149
|
+
|
|
150
|
+
const outputPath = getReportCardPath(repoRoot, changeId);
|
|
151
|
+
await writeJson(outputPath, report);
|
|
152
|
+
|
|
153
|
+
return {
|
|
154
|
+
changeId,
|
|
155
|
+
outputPath,
|
|
156
|
+
overall: report.overall,
|
|
157
|
+
blockingFindings: report.blockingFindings
|
|
158
|
+
};
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
module.exports = {
|
|
162
|
+
runVerify
|
|
163
|
+
};
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [INPUT]: 依赖 store 读取 TASKS.md 与输出路径,依赖 schemas 校验 manifest。
|
|
3
|
+
* [OUTPUT]: 对外提供 TASKS.md → task-manifest.json 的解析与生成能力。
|
|
4
|
+
* [POS]: harness 计划编排层,被 operations/plan 直接调用。
|
|
5
|
+
* [PROTOCOL]: 变更时更新此头部,然后检查 CLAUDE.md
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const {
|
|
9
|
+
nowIso,
|
|
10
|
+
readText,
|
|
11
|
+
readJson,
|
|
12
|
+
writeJson,
|
|
13
|
+
exists,
|
|
14
|
+
getTaskManifestPath,
|
|
15
|
+
getTasksMarkdownPath
|
|
16
|
+
} = require('./store');
|
|
17
|
+
const { parseManifest } = require('./schemas');
|
|
18
|
+
|
|
19
|
+
const TASK_LINE = /^- \[( |x|X)\]\s+(T\d{3})\s*(.*)$/;
|
|
20
|
+
const TRAILING_PATHS = /\(([^)]+)\)\s*$/;
|
|
21
|
+
const DEPENDS_TAG = /dependsOn:([A-Za-z0-9_,-]+)/i;
|
|
22
|
+
|
|
23
|
+
function normalizeTitle(rawTitle) {
|
|
24
|
+
return rawTitle
|
|
25
|
+
.replace(/\[P\]/g, '')
|
|
26
|
+
.replace(DEPENDS_TAG, '')
|
|
27
|
+
.trim();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function parseTouches(rawTail) {
|
|
31
|
+
const match = rawTail.match(TRAILING_PATHS);
|
|
32
|
+
if (!match) {
|
|
33
|
+
return [];
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
return match[1]
|
|
37
|
+
.split(',')
|
|
38
|
+
.map((item) => item.trim())
|
|
39
|
+
.filter(Boolean);
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
function parseDependsOn(rawTail, fallbackDependsOn, isParallel) {
|
|
43
|
+
const explicit = rawTail.match(DEPENDS_TAG);
|
|
44
|
+
if (explicit) {
|
|
45
|
+
return explicit[1]
|
|
46
|
+
.split(',')
|
|
47
|
+
.map((item) => item.trim())
|
|
48
|
+
.filter(Boolean);
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
if (isParallel || !fallbackDependsOn) {
|
|
52
|
+
return [];
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return [fallbackDependsOn];
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function parseTasksMarkdown(content) {
|
|
59
|
+
const lines = content.split(/\r?\n/);
|
|
60
|
+
const tasks = [];
|
|
61
|
+
let previousTaskId = null;
|
|
62
|
+
|
|
63
|
+
for (const line of lines) {
|
|
64
|
+
const match = line.match(TASK_LINE);
|
|
65
|
+
if (!match) {
|
|
66
|
+
continue;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const [, doneMark, taskId, tail] = match;
|
|
70
|
+
const isParallel = /\[P\]/.test(tail);
|
|
71
|
+
const touches = parseTouches(tail);
|
|
72
|
+
const dependsOn = parseDependsOn(tail, previousTaskId, isParallel);
|
|
73
|
+
const title = normalizeTitle(tail).replace(TRAILING_PATHS, '').trim() || `Task ${taskId}`;
|
|
74
|
+
|
|
75
|
+
tasks.push({
|
|
76
|
+
id: taskId,
|
|
77
|
+
title,
|
|
78
|
+
dependsOn,
|
|
79
|
+
touches,
|
|
80
|
+
run: [`echo "[TASK ${taskId}] ${title}"`],
|
|
81
|
+
checks: [],
|
|
82
|
+
status: doneMark.toLowerCase() === 'x' ? 'passed' : 'pending',
|
|
83
|
+
attempts: 0,
|
|
84
|
+
maxRetries: 1
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
previousTaskId = taskId;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
return tasks;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function buildDefaultTasks(changeId) {
|
|
94
|
+
return [
|
|
95
|
+
{
|
|
96
|
+
id: 'T001',
|
|
97
|
+
title: `Bootstrap ${changeId}`,
|
|
98
|
+
dependsOn: [],
|
|
99
|
+
touches: [],
|
|
100
|
+
run: [`echo "[TASK T001] Bootstrap ${changeId}"`],
|
|
101
|
+
checks: [],
|
|
102
|
+
status: 'pending',
|
|
103
|
+
attempts: 0,
|
|
104
|
+
maxRetries: 1
|
|
105
|
+
}
|
|
106
|
+
];
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
async function createTaskManifest({ repoRoot, changeId, goal, overwrite = false }) {
|
|
110
|
+
const manifestPath = getTaskManifestPath(repoRoot, changeId);
|
|
111
|
+
|
|
112
|
+
if (!overwrite && (await exists(manifestPath))) {
|
|
113
|
+
const existing = await readJson(manifestPath);
|
|
114
|
+
return parseManifest(existing);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const tasksPath = getTasksMarkdownPath(repoRoot, changeId);
|
|
118
|
+
const hasTasksFile = await exists(tasksPath);
|
|
119
|
+
const rawTasks = hasTasksFile ? parseTasksMarkdown(await readText(tasksPath)) : [];
|
|
120
|
+
const tasks = rawTasks.length > 0 ? rawTasks : buildDefaultTasks(changeId);
|
|
121
|
+
|
|
122
|
+
const manifest = parseManifest({
|
|
123
|
+
changeId,
|
|
124
|
+
goal: goal || `Deliver ${changeId} safely with auditable checkpoints.`,
|
|
125
|
+
createdAt: nowIso(),
|
|
126
|
+
updatedAt: nowIso(),
|
|
127
|
+
tasks,
|
|
128
|
+
metadata: {
|
|
129
|
+
source: hasTasksFile ? 'TASKS.md' : 'default',
|
|
130
|
+
generatedBy: 'harness:plan'
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
await writeJson(manifestPath, manifest);
|
|
135
|
+
return manifest;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
module.exports = {
|
|
139
|
+
parseTasksMarkdown,
|
|
140
|
+
createTaskManifest
|
|
141
|
+
};
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [INPUT]: 依赖 zod 进行运行时 schema 校验,依赖调用方提供 manifest/report/checkpoint 原始对象。
|
|
3
|
+
* [OUTPUT]: 对外提供 Manifest/Task/ReportCard/Checkpoint schema 与 parse 校验函数。
|
|
4
|
+
* [POS]: harness 内核的类型边界层,被 planner/dispatcher/verifier/release 复用。
|
|
5
|
+
* [PROTOCOL]: 变更时更新此头部,然后检查 CLAUDE.md
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const { z } = require('zod');
|
|
9
|
+
|
|
10
|
+
const CHANGE_ID_PATTERN = /^(REQ|BUG)-\d+$/;
|
|
11
|
+
const TASK_ID_PATTERN = /^[A-Z][A-Z0-9_-]{1,31}$/;
|
|
12
|
+
|
|
13
|
+
const ChangeIdSchema = z.string().regex(CHANGE_ID_PATTERN, 'Invalid changeId format');
|
|
14
|
+
|
|
15
|
+
const TaskStatusSchema = z.enum(['pending', 'running', 'passed', 'failed', 'skipped']);
|
|
16
|
+
|
|
17
|
+
const TaskSchema = z.object({
|
|
18
|
+
id: z.string().regex(TASK_ID_PATTERN, 'Invalid task id'),
|
|
19
|
+
title: z.string().min(1, 'Task title is required'),
|
|
20
|
+
dependsOn: z.array(z.string().regex(TASK_ID_PATTERN)).default([]),
|
|
21
|
+
touches: z.array(z.string().min(1)).default([]),
|
|
22
|
+
run: z.array(z.string().min(1)).min(1, 'At least one run command is required'),
|
|
23
|
+
checks: z.array(z.string().min(1)).default([]),
|
|
24
|
+
status: TaskStatusSchema.default('pending'),
|
|
25
|
+
attempts: z.number().int().min(0).default(0),
|
|
26
|
+
maxRetries: z.number().int().min(0).default(1),
|
|
27
|
+
lastError: z.string().optional()
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
const ManifestSchema = z.object({
|
|
31
|
+
changeId: ChangeIdSchema,
|
|
32
|
+
goal: z.string().min(1).default('Deliver planned requirement changes safely.'),
|
|
33
|
+
createdAt: z.string().datetime(),
|
|
34
|
+
updatedAt: z.string().datetime(),
|
|
35
|
+
tasks: z.array(TaskSchema),
|
|
36
|
+
metadata: z.object({
|
|
37
|
+
source: z.enum(['TASKS.md', 'default']).default('default'),
|
|
38
|
+
generatedBy: z.string().min(1).default('harness:plan')
|
|
39
|
+
}).default({ source: 'default', generatedBy: 'harness:plan' })
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
const CheckpointSchema = z.object({
|
|
43
|
+
changeId: ChangeIdSchema,
|
|
44
|
+
taskId: z.string().regex(TASK_ID_PATTERN),
|
|
45
|
+
sessionId: z.string().min(1),
|
|
46
|
+
status: TaskStatusSchema,
|
|
47
|
+
summary: z.string().min(1),
|
|
48
|
+
timestamp: z.string().datetime(),
|
|
49
|
+
attempt: z.number().int().min(0).default(0)
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
const GateResultSchema = z.object({
|
|
53
|
+
name: z.string().min(1),
|
|
54
|
+
status: z.enum(['pass', 'fail', 'skipped']),
|
|
55
|
+
command: z.string().min(1),
|
|
56
|
+
durationMs: z.number().int().min(0),
|
|
57
|
+
details: z.string().default('')
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
const ReportCardSchema = z.object({
|
|
61
|
+
changeId: ChangeIdSchema,
|
|
62
|
+
overall: z.enum(['pass', 'fail']),
|
|
63
|
+
quickGates: z.array(GateResultSchema),
|
|
64
|
+
strictGates: z.array(GateResultSchema),
|
|
65
|
+
review: z.object({
|
|
66
|
+
status: z.enum(['pass', 'fail', 'skipped']),
|
|
67
|
+
details: z.string().default('')
|
|
68
|
+
}),
|
|
69
|
+
blockingFindings: z.array(z.string()),
|
|
70
|
+
timestamp: z.string().datetime()
|
|
71
|
+
});
|
|
72
|
+
|
|
73
|
+
function parseWithSchema(schema, input, label) {
|
|
74
|
+
const parsed = schema.safeParse(input);
|
|
75
|
+
if (parsed.success) {
|
|
76
|
+
return parsed.data;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
const issues = parsed.error.issues
|
|
80
|
+
.map((issue) => `${issue.path.join('.') || '<root>'}: ${issue.message}`)
|
|
81
|
+
.join('; ');
|
|
82
|
+
throw new Error(`${label} validation failed: ${issues}`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function parseManifest(input) {
|
|
86
|
+
return parseWithSchema(ManifestSchema, input, 'Manifest');
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function parseCheckpoint(input) {
|
|
90
|
+
return parseWithSchema(CheckpointSchema, input, 'Checkpoint');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function parseReportCard(input) {
|
|
94
|
+
return parseWithSchema(ReportCardSchema, input, 'ReportCard');
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
module.exports = {
|
|
98
|
+
ChangeIdSchema,
|
|
99
|
+
TaskSchema,
|
|
100
|
+
TaskStatusSchema,
|
|
101
|
+
ManifestSchema,
|
|
102
|
+
CheckpointSchema,
|
|
103
|
+
GateResultSchema,
|
|
104
|
+
ReportCardSchema,
|
|
105
|
+
parseManifest,
|
|
106
|
+
parseCheckpoint,
|
|
107
|
+
parseReportCard
|
|
108
|
+
};
|
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* [INPUT]: 依赖 fs/path/child_process,依赖调用方提供 changeId 与命令参数。
|
|
3
|
+
* [OUTPUT]: 对外提供 harness 路径约定、JSON/文本读写、JSONL 追加、命令执行工具。
|
|
4
|
+
* [POS]: harness 内核的数据与 IO 基础设施层,被全部 operations 复用。
|
|
5
|
+
* [PROTOCOL]: 变更时更新此头部,然后检查 CLAUDE.md
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const fs = require('fs');
|
|
9
|
+
const fsp = fs.promises;
|
|
10
|
+
const path = require('path');
|
|
11
|
+
const { spawn } = require('child_process');
|
|
12
|
+
|
|
13
|
+
function nowIso() {
|
|
14
|
+
return new Date().toISOString();
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function resolveRepoRoot(startDir = process.cwd()) {
|
|
18
|
+
let current = path.resolve(startDir);
|
|
19
|
+
|
|
20
|
+
while (true) {
|
|
21
|
+
const hasPackage = fs.existsSync(path.join(current, 'package.json'));
|
|
22
|
+
const hasGit = fs.existsSync(path.join(current, '.git'));
|
|
23
|
+
const hasDevflow = fs.existsSync(path.join(current, 'devflow'));
|
|
24
|
+
|
|
25
|
+
if (hasPackage && (hasGit || hasDevflow)) {
|
|
26
|
+
return current;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
const parent = path.dirname(current);
|
|
30
|
+
if (parent === current) {
|
|
31
|
+
return path.resolve(startDir);
|
|
32
|
+
}
|
|
33
|
+
current = parent;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
function getRequirementDir(repoRoot, changeId) {
|
|
38
|
+
return path.join(repoRoot, 'devflow', 'requirements', changeId);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function getContextPackagePath(repoRoot, changeId) {
|
|
42
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'context-package.md');
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function getTaskManifestPath(repoRoot, changeId) {
|
|
46
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'task-manifest.json');
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
function getReportCardPath(repoRoot, changeId) {
|
|
50
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'report-card.json');
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
function getReleaseNotePath(repoRoot, changeId) {
|
|
54
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'RELEASE_NOTE.md');
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
function getHarnessStatePath(repoRoot, changeId) {
|
|
58
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'harness-state.json');
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
function getTasksMarkdownPath(repoRoot, changeId) {
|
|
62
|
+
return path.join(getRequirementDir(repoRoot, changeId), 'TASKS.md');
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function getRuntimeRoot(repoRoot) {
|
|
66
|
+
return path.join(repoRoot, '.harness', 'runtime');
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
function getRuntimeChangeDir(repoRoot, changeId) {
|
|
70
|
+
return path.join(getRuntimeRoot(repoRoot), changeId);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
function getRuntimeTaskDir(repoRoot, changeId, taskId) {
|
|
74
|
+
return path.join(getRuntimeChangeDir(repoRoot, changeId), taskId);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function getEventsPath(repoRoot, changeId, taskId) {
|
|
78
|
+
return path.join(getRuntimeTaskDir(repoRoot, changeId, taskId), 'events.jsonl');
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
function getCheckpointPath(repoRoot, changeId, taskId) {
|
|
82
|
+
return path.join(getRuntimeTaskDir(repoRoot, changeId, taskId), 'checkpoint.json');
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function exists(filePath) {
|
|
86
|
+
try {
|
|
87
|
+
await fsp.access(filePath);
|
|
88
|
+
return true;
|
|
89
|
+
} catch {
|
|
90
|
+
return false;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
async function ensureDir(dirPath) {
|
|
95
|
+
await fsp.mkdir(dirPath, { recursive: true });
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
async function readText(filePath, fallback = '') {
|
|
99
|
+
if (!(await exists(filePath))) {
|
|
100
|
+
return fallback;
|
|
101
|
+
}
|
|
102
|
+
return fsp.readFile(filePath, 'utf8');
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
async function writeText(filePath, content) {
|
|
106
|
+
await ensureDir(path.dirname(filePath));
|
|
107
|
+
await fsp.writeFile(filePath, content, 'utf8');
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
async function readJson(filePath, fallback = null) {
|
|
111
|
+
if (!(await exists(filePath))) {
|
|
112
|
+
return fallback;
|
|
113
|
+
}
|
|
114
|
+
const content = await readText(filePath);
|
|
115
|
+
return JSON.parse(content);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async function writeJson(filePath, value) {
|
|
119
|
+
await ensureDir(path.dirname(filePath));
|
|
120
|
+
await fsp.writeFile(filePath, `${JSON.stringify(value, null, 2)}\n`, 'utf8');
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
async function appendJsonl(filePath, value) {
|
|
124
|
+
await ensureDir(path.dirname(filePath));
|
|
125
|
+
await fsp.appendFile(filePath, `${JSON.stringify(value)}\n`, 'utf8');
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
async function listDirectories(dirPath) {
|
|
129
|
+
if (!(await exists(dirPath))) {
|
|
130
|
+
return [];
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
const entries = await fsp.readdir(dirPath, { withFileTypes: true });
|
|
134
|
+
return entries
|
|
135
|
+
.filter((entry) => entry.isDirectory())
|
|
136
|
+
.map((entry) => path.join(dirPath, entry.name));
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
async function runCommand(command, options = {}) {
|
|
140
|
+
const {
|
|
141
|
+
cwd = process.cwd(),
|
|
142
|
+
env = {},
|
|
143
|
+
timeoutMs = 0,
|
|
144
|
+
stream = false
|
|
145
|
+
} = options;
|
|
146
|
+
|
|
147
|
+
const startedAt = Date.now();
|
|
148
|
+
|
|
149
|
+
return new Promise((resolve) => {
|
|
150
|
+
const child = spawn(command, {
|
|
151
|
+
cwd,
|
|
152
|
+
env: { ...process.env, ...env },
|
|
153
|
+
shell: true,
|
|
154
|
+
stdio: stream ? 'inherit' : 'pipe'
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
let stdout = '';
|
|
158
|
+
let stderr = '';
|
|
159
|
+
let killedByTimeout = false;
|
|
160
|
+
let timeoutId = null;
|
|
161
|
+
|
|
162
|
+
if (!stream) {
|
|
163
|
+
child.stdout.on('data', (chunk) => {
|
|
164
|
+
stdout += String(chunk);
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
child.stderr.on('data', (chunk) => {
|
|
168
|
+
stderr += String(chunk);
|
|
169
|
+
});
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
if (timeoutMs > 0) {
|
|
173
|
+
timeoutId = setTimeout(() => {
|
|
174
|
+
killedByTimeout = true;
|
|
175
|
+
child.kill('SIGTERM');
|
|
176
|
+
}, timeoutMs);
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
child.on('close', (code) => {
|
|
180
|
+
if (timeoutId) {
|
|
181
|
+
clearTimeout(timeoutId);
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
resolve({
|
|
185
|
+
code: typeof code === 'number' ? code : 1,
|
|
186
|
+
stdout,
|
|
187
|
+
stderr,
|
|
188
|
+
durationMs: Date.now() - startedAt,
|
|
189
|
+
killedByTimeout
|
|
190
|
+
});
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
child.on('error', (error) => {
|
|
194
|
+
if (timeoutId) {
|
|
195
|
+
clearTimeout(timeoutId);
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
resolve({
|
|
199
|
+
code: 1,
|
|
200
|
+
stdout,
|
|
201
|
+
stderr: `${stderr}\n${error.message}`.trim(),
|
|
202
|
+
durationMs: Date.now() - startedAt,
|
|
203
|
+
killedByTimeout
|
|
204
|
+
});
|
|
205
|
+
});
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async function getPackageScripts(repoRoot) {
|
|
210
|
+
const pkgPath = path.join(repoRoot, 'package.json');
|
|
211
|
+
const pkg = await readJson(pkgPath, {});
|
|
212
|
+
return pkg && pkg.scripts ? pkg.scripts : {};
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
module.exports = {
|
|
216
|
+
nowIso,
|
|
217
|
+
resolveRepoRoot,
|
|
218
|
+
getRequirementDir,
|
|
219
|
+
getContextPackagePath,
|
|
220
|
+
getTaskManifestPath,
|
|
221
|
+
getReportCardPath,
|
|
222
|
+
getReleaseNotePath,
|
|
223
|
+
getHarnessStatePath,
|
|
224
|
+
getTasksMarkdownPath,
|
|
225
|
+
getRuntimeRoot,
|
|
226
|
+
getRuntimeChangeDir,
|
|
227
|
+
getRuntimeTaskDir,
|
|
228
|
+
getEventsPath,
|
|
229
|
+
getCheckpointPath,
|
|
230
|
+
exists,
|
|
231
|
+
ensureDir,
|
|
232
|
+
readText,
|
|
233
|
+
writeText,
|
|
234
|
+
readJson,
|
|
235
|
+
writeJson,
|
|
236
|
+
appendJsonl,
|
|
237
|
+
listDirectories,
|
|
238
|
+
runCommand,
|
|
239
|
+
getPackageScripts
|
|
240
|
+
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "cc-devflow",
|
|
3
|
-
"version": "4.
|
|
3
|
+
"version": "4.2.0",
|
|
4
4
|
"description": "DevFlow CLI tool",
|
|
5
5
|
"main": "bin/cc-devflow.js",
|
|
6
6
|
"bin": {
|
|
@@ -26,6 +26,14 @@
|
|
|
26
26
|
"prepublishOnly": "node scripts/validate-publish.js",
|
|
27
27
|
"test": "jest",
|
|
28
28
|
"start": "node bin/cc-devflow.js",
|
|
29
|
+
"harness:init": "node bin/harness.js init",
|
|
30
|
+
"harness:pack": "node bin/harness.js pack",
|
|
31
|
+
"harness:plan": "node bin/harness.js plan",
|
|
32
|
+
"harness:dispatch": "node bin/harness.js dispatch",
|
|
33
|
+
"harness:verify": "node bin/harness.js verify",
|
|
34
|
+
"harness:release": "node bin/harness.js release",
|
|
35
|
+
"harness:resume": "node bin/harness.js resume",
|
|
36
|
+
"harness:janitor": "node bin/harness.js janitor",
|
|
29
37
|
"adapt": "node bin/adapt.js",
|
|
30
38
|
"adapt:check": "node bin/adapt.js --check",
|
|
31
39
|
"adapt:skills": "node bin/adapt.js --skills",
|