principles-disciple 1.8.2 → 1.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/openclaw.plugin.json +4 -4
- package/package.json +1 -1
- package/templates/langs/en/skills/ai-sprint-orchestration/EXAMPLES.md +63 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/REFERENCE.md +136 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/SKILL.md +67 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/agent-registry.json +214 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/specs/bugfix-complex-template.json +107 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/specs/feature-complex-template.json +107 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/specs/workflow-validation-minimal-verify.json +105 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/specs/workflow-validation-minimal.json +108 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/workflow-v1-acceptance-checklist.md +58 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/references/workflow-v1.4-work-unit-handoff.md +190 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/runtime/.gitignore +2 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/lib/archive.mjs +310 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/lib/contract-enforcement.mjs +683 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/lib/decision.mjs +604 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/lib/state-store.mjs +32 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/lib/task-specs.mjs +707 -0
- package/templates/langs/en/skills/ai-sprint-orchestration/scripts/run.mjs +3419 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/EXAMPLES.md +63 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/REFERENCE.md +136 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/SKILL.md +67 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/agent-registry.json +214 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/specs/bugfix-complex-template.json +107 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/specs/feature-complex-template.json +107 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/specs/workflow-validation-minimal-verify.json +105 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/specs/workflow-validation-minimal.json +108 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/workflow-v1-acceptance-checklist.md +58 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/references/workflow-v1.4-work-unit-handoff.md +190 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/runtime/.gitignore +2 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/lib/archive.mjs +310 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/lib/contract-enforcement.mjs +683 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/lib/decision.mjs +604 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/lib/state-store.mjs +32 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/lib/task-specs.mjs +707 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/scripts/run.mjs +3419 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/test/archive.test.mjs +230 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/test/contract-enforcement.test.mjs +672 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/test/decision.test.mjs +1321 -0
- package/templates/langs/zh/skills/ai-sprint-orchestration/test/run.test.mjs +1419 -0
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
import { ensureDir, readJson, writeJson, writeText, fileExists } from './state-store.mjs';
|
|
5
|
+
|
|
6
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
7
|
+
const packageRoot = path.resolve(__dirname, '..', '..');
|
|
8
|
+
|
|
9
|
+
function resolveRoots() {
|
|
10
|
+
const runtimeRoot = process.env.AI_SPRINT_RUNTIME_ROOT
|
|
11
|
+
? path.resolve(process.env.AI_SPRINT_RUNTIME_ROOT)
|
|
12
|
+
: path.join(packageRoot, 'runtime');
|
|
13
|
+
return {
|
|
14
|
+
runtimeRoot,
|
|
15
|
+
sprintRoot: path.join(runtimeRoot, 'runs'),
|
|
16
|
+
archiveRoot: path.join(runtimeRoot, 'archive'),
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function nowIso() {
|
|
21
|
+
return new Date().toISOString();
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function shouldArchiveEntry(srcPath, runDir) {
|
|
25
|
+
const relative = path.relative(runDir, srcPath);
|
|
26
|
+
if (!relative || relative === '') return true;
|
|
27
|
+
|
|
28
|
+
const normalized = relative.split(path.sep).join('/');
|
|
29
|
+
const base = path.basename(srcPath);
|
|
30
|
+
|
|
31
|
+
if (base === 'orchestrator.lock') return false;
|
|
32
|
+
if (base.startsWith('.ai-sprint-prompt-')) return false;
|
|
33
|
+
if (normalized.includes('/worktrees/')) return false;
|
|
34
|
+
if (normalized.startsWith('worktrees/')) return false;
|
|
35
|
+
if (normalized.includes('/runtime/')) return false;
|
|
36
|
+
if (normalized.startsWith('runtime/')) return false;
|
|
37
|
+
|
|
38
|
+
return true;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Archive a completed/halted sprint run by run ID.
|
|
43
|
+
* CLI entry point for --archive <run-id>.
|
|
44
|
+
*/
|
|
45
|
+
export function archiveRunById(runId) {
|
|
46
|
+
const { sprintRoot } = resolveRoots();
|
|
47
|
+
const runDir = path.join(sprintRoot, runId);
|
|
48
|
+
const sprintFile = path.join(runDir, 'sprint.json');
|
|
49
|
+
if (!fileExists(sprintFile)) {
|
|
50
|
+
throw new Error(`Run not found: ${runId}`);
|
|
51
|
+
}
|
|
52
|
+
return archiveRun(runDir, runId);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/**
|
|
56
|
+
* Core archive function. Copies all artifacts, captures git info, generates summary.
|
|
57
|
+
* @param {string} runDir - Path to the sprint run directory
|
|
58
|
+
* @param {string} runId - The run ID
|
|
59
|
+
* @returns {string} Path to the archive directory
|
|
60
|
+
*/
|
|
61
|
+
export function archiveRun(runDir, runId) {
|
|
62
|
+
const { archiveRoot } = resolveRoots();
|
|
63
|
+
const destDir = path.join(archiveRoot, runId);
|
|
64
|
+
|
|
65
|
+
// Idempotency: check if already successfully archived
|
|
66
|
+
const metaPath = path.join(destDir, 'archive-meta.json');
|
|
67
|
+
if (fileExists(metaPath)) {
|
|
68
|
+
const meta = readJson(metaPath);
|
|
69
|
+
if (meta.status === 'completed') {
|
|
70
|
+
throw new Error(`Already archived: ${destDir}`);
|
|
71
|
+
}
|
|
72
|
+
// Partial/failed archive — clean up and re-run
|
|
73
|
+
fs.rmSync(destDir, { recursive: true, force: true });
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
// Read sprint state
|
|
77
|
+
const state = readJson(path.join(runDir, 'sprint.json'));
|
|
78
|
+
|
|
79
|
+
// Warn if archiving a live run
|
|
80
|
+
if (state.status === 'running' || state.status === 'paused') {
|
|
81
|
+
console.error(`Warning: run ${runId} has status '${state.status}'. Archiving a live run may produce incomplete artifacts.`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Step 1: Copy all run artifacts recursively
|
|
85
|
+
ensureDir(destDir);
|
|
86
|
+
try {
|
|
87
|
+
fs.cpSync(runDir, destDir, {
|
|
88
|
+
recursive: true,
|
|
89
|
+
filter: (src) => shouldArchiveEntry(src, runDir),
|
|
90
|
+
});
|
|
91
|
+
} catch {
|
|
92
|
+
// Fallback for Node < 16.7: manual recursive copy
|
|
93
|
+
copyDirRecursive(runDir, destDir, runDir);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Step 2: Capture git info
|
|
97
|
+
captureGitInfo(destDir, state);
|
|
98
|
+
|
|
99
|
+
// Step 3: Generate summary
|
|
100
|
+
generateSummary(destDir, state);
|
|
101
|
+
|
|
102
|
+
// Step 4: Write archive metadata (last — marks archive as complete)
|
|
103
|
+
writeJson(metaPath, {
|
|
104
|
+
runId,
|
|
105
|
+
archivedAt: nowIso(),
|
|
106
|
+
status: 'completed',
|
|
107
|
+
sourceStatus: state.status,
|
|
108
|
+
});
|
|
109
|
+
|
|
110
|
+
return destDir;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
/**
|
|
114
|
+
* Capture git information into the archive's git/ directory.
|
|
115
|
+
* Each command is independent — one failure does not block the rest.
|
|
116
|
+
*/
|
|
117
|
+
function captureGitInfo(destDir, state) {
|
|
118
|
+
const gitDir = path.join(destDir, 'git');
|
|
119
|
+
ensureDir(gitDir);
|
|
120
|
+
const latestGitStatus = findLatestStageGitStatus(destDir);
|
|
121
|
+
|
|
122
|
+
if (!latestGitStatus) {
|
|
123
|
+
writeText(path.join(gitDir, 'branch.txt'), '# No stage git-status.json found in archive\n');
|
|
124
|
+
writeText(path.join(gitDir, 'status.txt'), '# No stage git-status.json found in archive\n');
|
|
125
|
+
writeText(path.join(gitDir, 'modified-files.txt'), '# No stage git-status.json found in archive\n');
|
|
126
|
+
writeText(path.join(gitDir, 'diff.patch'), '# Diff omitted from archive; inspect archived stage artifacts instead.\n');
|
|
127
|
+
writeText(path.join(gitDir, 'log.txt'), '# Git log omitted from archive; inspect archived stage artifacts instead.\n');
|
|
128
|
+
return;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const dirtyLines = Array.isArray(latestGitStatus.dirtyFiles) ? latestGitStatus.dirtyFiles : [];
|
|
132
|
+
const modifiedFiles = dirtyLines
|
|
133
|
+
.map((line) => String(line).replace(/^[ MARCUD?!]+/, '').trim())
|
|
134
|
+
.filter(Boolean);
|
|
135
|
+
|
|
136
|
+
writeText(path.join(gitDir, 'branch.txt'), `${latestGitStatus.branch || 'unknown'}\n`);
|
|
137
|
+
writeText(path.join(gitDir, 'status.txt'), `${dirtyLines.length ? dirtyLines.join('\n') : '# clean'}\n`);
|
|
138
|
+
writeText(path.join(gitDir, 'modified-files.txt'), `${modifiedFiles.length ? modifiedFiles.join('\n') : '# none'}\n`);
|
|
139
|
+
writeText(path.join(gitDir, 'diff.patch'), '# Diff omitted from archive; inspect archived stage artifacts and git-status.json.\n');
|
|
140
|
+
writeText(path.join(gitDir, 'log.txt'), [
|
|
141
|
+
`headSha: ${latestGitStatus.headSha || 'unknown'}`,
|
|
142
|
+
`baseBranch: ${latestGitStatus.baseBranch || 'unknown'}`,
|
|
143
|
+
`remoteBranch: ${latestGitStatus.remoteBranch || 'unknown'}`,
|
|
144
|
+
`capturedAt: ${state.updatedAt || nowIso()}`,
|
|
145
|
+
].join('\n') + '\n');
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
function findLatestStageGitStatus(destDir) {
|
|
149
|
+
const stagesDir = path.join(destDir, 'stages');
|
|
150
|
+
if (!fileExists(stagesDir)) return null;
|
|
151
|
+
|
|
152
|
+
const stageDirs = fs.readdirSync(stagesDir).sort().reverse();
|
|
153
|
+
for (const dir of stageDirs) {
|
|
154
|
+
const gitStatusPath = path.join(stagesDir, dir, 'git-status.json');
|
|
155
|
+
if (fileExists(gitStatusPath)) {
|
|
156
|
+
return readJson(gitStatusPath);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
return null;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
/**
|
|
163
|
+
* Generate a human-readable archive summary.
|
|
164
|
+
*/
|
|
165
|
+
function generateSummary(destDir, state) {
|
|
166
|
+
const lines = [];
|
|
167
|
+
|
|
168
|
+
// Identity
|
|
169
|
+
lines.push(`# Sprint Archive: ${state.title || state.taskId}`);
|
|
170
|
+
lines.push('');
|
|
171
|
+
lines.push('## Identity');
|
|
172
|
+
lines.push(`- Run ID: ${state.runId}`);
|
|
173
|
+
lines.push(`- Task: ${state.taskId}`);
|
|
174
|
+
lines.push(`- Status: ${state.status}`);
|
|
175
|
+
lines.push(`- Archived at: ${nowIso()}`);
|
|
176
|
+
lines.push('');
|
|
177
|
+
|
|
178
|
+
// Timeline
|
|
179
|
+
const created = state.createdAt || '';
|
|
180
|
+
const updated = state.updatedAt || '';
|
|
181
|
+
const elapsedMs = created ? (Date.parse(updated) || Date.now()) - Date.parse(created) : 0;
|
|
182
|
+
const elapsedMin = (elapsedMs / 60_000).toFixed(1);
|
|
183
|
+
lines.push('## Timeline');
|
|
184
|
+
lines.push(`- Created: ${created}`);
|
|
185
|
+
lines.push(`- Updated: ${updated}`);
|
|
186
|
+
lines.push(`- Total wall time: ${elapsedMin} minutes`);
|
|
187
|
+
lines.push(`- Final stage: ${state.currentStage} (index ${state.currentStageIndex})`);
|
|
188
|
+
lines.push(`- Final round: ${state.currentRound}`);
|
|
189
|
+
lines.push('');
|
|
190
|
+
|
|
191
|
+
// Stage progress table from scorecards
|
|
192
|
+
lines.push('## Stage Progress');
|
|
193
|
+
const stagesDir = path.join(destDir, 'stages');
|
|
194
|
+
const stageEntries = [];
|
|
195
|
+
if (fileExists(stagesDir)) {
|
|
196
|
+
const dirs = fs.readdirSync(stagesDir).sort();
|
|
197
|
+
for (const dir of dirs) {
|
|
198
|
+
const scorecardPath = path.join(stagesDir, dir, 'scorecard.json');
|
|
199
|
+
if (fileExists(scorecardPath)) {
|
|
200
|
+
const sc = readJson(scorecardPath);
|
|
201
|
+
stageEntries.push({
|
|
202
|
+
dir,
|
|
203
|
+
outcome: sc.outcome || '?',
|
|
204
|
+
round: sc.round || '?',
|
|
205
|
+
approvals: sc.approvalCount ?? '?',
|
|
206
|
+
blockers: sc.blockerCount ?? 0,
|
|
207
|
+
reviewerA: sc.reviewerAVerdict || '?',
|
|
208
|
+
reviewerB: sc.reviewerBVerdict || '?',
|
|
209
|
+
});
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (stageEntries.length > 0) {
|
|
215
|
+
lines.push('| Stage | Outcome | Round | Approvals | Blockers | Reviewer A | Reviewer B |');
|
|
216
|
+
lines.push('|-------|---------|-------|-----------|----------|-----------|-----------|');
|
|
217
|
+
for (const s of stageEntries) {
|
|
218
|
+
lines.push(`| ${s.dir} | ${s.outcome} | ${s.round} | ${s.approvals}/2 | ${s.blockers} | ${s.reviewerA} | ${s.reviewerB} |`);
|
|
219
|
+
}
|
|
220
|
+
} else {
|
|
221
|
+
lines.push('No stage scorecards found.');
|
|
222
|
+
}
|
|
223
|
+
lines.push('');
|
|
224
|
+
|
|
225
|
+
// Git context
|
|
226
|
+
lines.push('## Git Context');
|
|
227
|
+
const gitDir = path.join(destDir, 'git');
|
|
228
|
+
if (fileExists(path.join(gitDir, 'branch.txt'))) {
|
|
229
|
+
lines.push(`- Branch: ${fs.readFileSync(path.join(gitDir, 'branch.txt'), 'utf8').trim()}`);
|
|
230
|
+
}
|
|
231
|
+
if (fileExists(path.join(gitDir, 'log.txt'))) {
|
|
232
|
+
const log = fs.readFileSync(path.join(gitDir, 'log.txt'), 'utf8').trim();
|
|
233
|
+
if (log && !log.startsWith('#')) {
|
|
234
|
+
lines.push('');
|
|
235
|
+
lines.push('### Commits');
|
|
236
|
+
for (const line of log.split('\n').filter(Boolean)) {
|
|
237
|
+
lines.push(`- ${line}`);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
if (fileExists(path.join(gitDir, 'modified-files.txt'))) {
|
|
242
|
+
const files = fs.readFileSync(path.join(gitDir, 'modified-files.txt'), 'utf8').trim();
|
|
243
|
+
if (files && !files.startsWith('#')) {
|
|
244
|
+
lines.push('');
|
|
245
|
+
lines.push('### Modified Files');
|
|
246
|
+
for (const f of files.split('\n').filter(Boolean)) {
|
|
247
|
+
lines.push(`- ${f}`);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
lines.push('');
|
|
252
|
+
|
|
253
|
+
// Halt reason
|
|
254
|
+
if (state.haltReason) {
|
|
255
|
+
lines.push('## Halt Reason');
|
|
256
|
+
lines.push(`- Type: ${state.haltReason.type}`);
|
|
257
|
+
lines.push(`- Details: ${state.haltReason.details}`);
|
|
258
|
+
if (state.haltReason.blockers?.length) {
|
|
259
|
+
lines.push('### Blockers');
|
|
260
|
+
for (const b of state.haltReason.blockers) {
|
|
261
|
+
lines.push(`- ${b}`);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
lines.push('');
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Open risks from verify producer
|
|
268
|
+
const verifyDirs = stageEntries.filter((s) => s.dir.includes('verify'));
|
|
269
|
+
if (verifyDirs.length > 0) {
|
|
270
|
+
const verifyDir = path.join(stagesDir, verifyDirs[verifyDirs.length - 1].dir);
|
|
271
|
+
const producerPath = path.join(verifyDir, 'producer.md');
|
|
272
|
+
if (fileExists(producerPath)) {
|
|
273
|
+
const producerText = fs.readFileSync(producerPath, 'utf8');
|
|
274
|
+
const risksSection = extractSection(producerText, 'OPEN_RISKS');
|
|
275
|
+
if (risksSection) {
|
|
276
|
+
lines.push('## Open Risks (from verify producer)');
|
|
277
|
+
lines.push(risksSection);
|
|
278
|
+
lines.push('');
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
writeText(path.join(destDir, 'archive-summary.md'), lines.join('\n') + '\n');
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
/**
|
|
287
|
+
* Extract a markdown section body by heading name.
|
|
288
|
+
*/
|
|
289
|
+
function extractSection(text, heading) {
|
|
290
|
+
const pattern = new RegExp(`^##\\s+${heading}\\s*\\n([\\s\\S]*?)(?=^##\\s|$(?!\\n))`, 'm');
|
|
291
|
+
const match = text.match(pattern);
|
|
292
|
+
return match ? match[1].trim() : null;
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
/**
|
|
296
|
+
* Fallback recursive directory copy for Node < 16.7.
|
|
297
|
+
*/
|
|
298
|
+
function copyDirRecursive(src, dest, rootDir = src) {
|
|
299
|
+
ensureDir(dest);
|
|
300
|
+
for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
|
|
301
|
+
const srcPath = path.join(src, entry.name);
|
|
302
|
+
const destPath = path.join(dest, entry.name);
|
|
303
|
+
if (!shouldArchiveEntry(srcPath, rootDir)) continue;
|
|
304
|
+
if (entry.isDirectory()) {
|
|
305
|
+
copyDirRecursive(srcPath, destPath, rootDir);
|
|
306
|
+
} else {
|
|
307
|
+
fs.copyFileSync(srcPath, destPath);
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
}
|