speccrew 0.7.4 → 0.7.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.speccrew/agents/speccrew-feature-designer.md +6 -0
- package/.speccrew/agents/speccrew-product-manager.md +6 -0
- package/.speccrew/agents/speccrew-system-deployer.md +6 -0
- package/.speccrew/agents/speccrew-system-designer.md +6 -0
- package/.speccrew/agents/speccrew-system-developer.md +6 -0
- package/.speccrew/agents/speccrew-task-worker.md +5 -0
- package/.speccrew/agents/speccrew-test-manager.md +6 -0
- package/.speccrew/skills/speccrew-feature-designer-orchestration/SKILL.md +117 -0
- package/.speccrew/skills/speccrew-feature-designer-orchestration/workflow.agentflow.xml +463 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-api-analyze/workflow.agentflow.xml +119 -10
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/SKILL.md +3 -3
- package/.speccrew/skills/speccrew-knowledge-bizs-init-features/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-knowledge-bizs-init-features/scripts/generate-inventory.js +289 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-module-classify/scripts/reindex-modules.js +1 -1
- package/.speccrew/skills/speccrew-knowledge-bizs-ui-analyze/workflow.agentflow.xml +170 -3
- package/.speccrew/skills/speccrew-knowledge-graph-write/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-pm-module-initializer/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-pm-module-initializer/workflow.agentflow.xml +1 -1
- package/.speccrew/skills/speccrew-product-manager-orchestration/SKILL.md +111 -0
- package/.speccrew/skills/speccrew-product-manager-orchestration/workflow.agentflow.xml +534 -0
- package/.speccrew/skills/speccrew-system-deployer-orchestration/SKILL.md +91 -0
- package/.speccrew/skills/speccrew-system-deployer-orchestration/workflow.agentflow.xml +309 -0
- package/.speccrew/skills/speccrew-system-designer-orchestration/SKILL.md +102 -0
- package/.speccrew/skills/speccrew-system-designer-orchestration/workflow.agentflow.xml +375 -0
- package/.speccrew/skills/speccrew-system-developer-orchestration/SKILL.md +110 -0
- package/.speccrew/skills/speccrew-system-developer-orchestration/workflow.agentflow.xml +422 -0
- package/.speccrew/skills/speccrew-task-worker-execution/SKILL.md +106 -0
- package/.speccrew/skills/speccrew-task-worker-execution/workflow.agentflow.xml +177 -0
- package/.speccrew/skills/speccrew-test-manager-orchestration/SKILL.md +106 -0
- package/.speccrew/skills/speccrew-test-manager-orchestration/workflow.agentflow.xml +442 -0
- package/package.json +1 -1
|
@@ -0,0 +1,289 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* generate-inventory.js
|
|
4
|
+
*
|
|
5
|
+
* Generate features.json inventory for a single platform.
|
|
6
|
+
* This script is called by speccrew-knowledge-bizs-init-features workflow.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* node generate-inventory.js --entryDirsFile <path> --outputDir <path>
|
|
10
|
+
*
|
|
11
|
+
* Arguments:
|
|
12
|
+
* --entryDirsFile Path to entry-dirs JSON file
|
|
13
|
+
* --outputDir Output directory for features.json
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const fs = require('fs');
|
|
17
|
+
const path = require('path');
|
|
18
|
+
|
|
19
|
+
// Parse command line arguments
|
|
20
|
+
function parseArgs() {
|
|
21
|
+
const args = process.argv.slice(2);
|
|
22
|
+
const parsed = {};
|
|
23
|
+
|
|
24
|
+
for (let i = 0; i < args.length; i++) {
|
|
25
|
+
const arg = args[i];
|
|
26
|
+
if (arg.startsWith('--')) {
|
|
27
|
+
const key = arg.slice(2);
|
|
28
|
+
const value = args[i + 1];
|
|
29
|
+
if (value && !value.startsWith('--')) {
|
|
30
|
+
parsed[key] = value;
|
|
31
|
+
i++;
|
|
32
|
+
} else {
|
|
33
|
+
parsed[key] = true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return parsed;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Generate timestamp in format YYYY-MM-DD-HHMMSS
|
|
42
|
+
function generateTimestamp() {
|
|
43
|
+
const now = new Date();
|
|
44
|
+
const year = now.getFullYear();
|
|
45
|
+
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
46
|
+
const day = String(now.getDate()).padStart(2, '0');
|
|
47
|
+
const hours = String(now.getHours()).padStart(2, '0');
|
|
48
|
+
const minutes = String(now.getMinutes()).padStart(2, '0');
|
|
49
|
+
const seconds = String(now.getSeconds()).padStart(2, '0');
|
|
50
|
+
return `${year}-${month}-${day}-${hours}${minutes}${seconds}`;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Convert absolute path to project-relative path
|
|
54
|
+
function toRelativePath(absolutePath, projectRoot) {
|
|
55
|
+
// Normalize paths
|
|
56
|
+
const normalizedAbs = path.normalize(absolutePath).replace(/\\/g, '/');
|
|
57
|
+
const normalizedRoot = path.normalize(projectRoot).replace(/\\/g, '/');
|
|
58
|
+
|
|
59
|
+
if (normalizedAbs.startsWith(normalizedRoot)) {
|
|
60
|
+
const relative = normalizedAbs.slice(normalizedRoot.length).replace(/^\/+/, '');
|
|
61
|
+
return relative;
|
|
62
|
+
}
|
|
63
|
+
return normalizedAbs;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Generate document path for a feature
|
|
67
|
+
// Format: speccrew-workspace/knowledges/bizs/{platformId}/{module}/{subpath}/{filename}.md
|
|
68
|
+
function generateDocumentPath(platformId, module, sourcePath, projectRoot) {
|
|
69
|
+
// Extract filename without extension
|
|
70
|
+
const basename = path.basename(sourcePath, path.extname(sourcePath));
|
|
71
|
+
|
|
72
|
+
// Get directory relative to module root
|
|
73
|
+
const relativePath = toRelativePath(sourcePath, projectRoot);
|
|
74
|
+
|
|
75
|
+
// Parse the source path to extract module and subpath
|
|
76
|
+
// Expected format: {platformSourceRoot}/{module}/{subpath}/{file}
|
|
77
|
+
const pathParts = relativePath.split('/');
|
|
78
|
+
|
|
79
|
+
// Find module index
|
|
80
|
+
let moduleIndex = -1;
|
|
81
|
+
for (let i = 0; i < pathParts.length; i++) {
|
|
82
|
+
if (pathParts[i] === module) {
|
|
83
|
+
moduleIndex = i;
|
|
84
|
+
break;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Build subpath (everything between module and filename)
|
|
89
|
+
let subpath = '';
|
|
90
|
+
if (moduleIndex >= 0 && moduleIndex < pathParts.length - 2) {
|
|
91
|
+
// There are directories between module and filename
|
|
92
|
+
subpath = pathParts.slice(moduleIndex + 1, pathParts.length - 1).join('/');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Construct document path using platformId (which follows {platformType}-{techStack} format)
|
|
96
|
+
// e.g., backend-fastapi, web-vue3, mobile-uniapp
|
|
97
|
+
const docPathParts = ['speccrew-workspace', 'knowledges', 'bizs', platformId, module];
|
|
98
|
+
|
|
99
|
+
if (subpath) {
|
|
100
|
+
docPathParts.push(subpath);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
docPathParts.push(`${basename}.md`);
|
|
104
|
+
|
|
105
|
+
return docPathParts.join('/');
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Main function
|
|
109
|
+
function main() {
|
|
110
|
+
const args = parseArgs();
|
|
111
|
+
|
|
112
|
+
// Validate required arguments
|
|
113
|
+
if (!args.entryDirsFile) {
|
|
114
|
+
console.error('Error: --entryDirsFile is required');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (!args.outputDir) {
|
|
119
|
+
console.error('Error: --outputDir is required');
|
|
120
|
+
process.exit(1);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const entryDirsFile = path.resolve(args.entryDirsFile);
|
|
124
|
+
const outputDir = path.resolve(args.outputDir);
|
|
125
|
+
|
|
126
|
+
// Check if entry-dirs file exists
|
|
127
|
+
if (!fs.existsSync(entryDirsFile)) {
|
|
128
|
+
console.error(`Error: Entry-dirs file not found: ${entryDirsFile}`);
|
|
129
|
+
process.exit(1);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Read and parse entry-dirs JSON
|
|
133
|
+
let entryDirsData;
|
|
134
|
+
try {
|
|
135
|
+
const content = fs.readFileSync(entryDirsFile, 'utf-8');
|
|
136
|
+
entryDirsData = JSON.parse(content);
|
|
137
|
+
} catch (error) {
|
|
138
|
+
console.error(`Error: Failed to parse entry-dirs file: ${error.message}`);
|
|
139
|
+
process.exit(1);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Validate entry-dirs structure
|
|
143
|
+
if (!entryDirsData.modules || !Array.isArray(entryDirsData.modules) || entryDirsData.modules.length === 0) {
|
|
144
|
+
console.error('Error: entry-dirs JSON must have non-empty modules array');
|
|
145
|
+
process.exit(1);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Extract platform info from entry-dirs data
|
|
149
|
+
const platformId = entryDirsData.platformId || 'unknown-platform';
|
|
150
|
+
const platformName = entryDirsData.platformName || platformId;
|
|
151
|
+
const platformType = entryDirsData.platformType || 'unknown';
|
|
152
|
+
const platformSubtype = entryDirsData.platformSubtype || '';
|
|
153
|
+
const techStack = entryDirsData.techStack || [];
|
|
154
|
+
const sourceRoot = entryDirsData.sourcePath || '';
|
|
155
|
+
|
|
156
|
+
// Project root is the parent of speccrew-workspace
|
|
157
|
+
const projectRoot = path.resolve(outputDir, '..', '..', '..');
|
|
158
|
+
|
|
159
|
+
// Generate features for each module
|
|
160
|
+
const features = [];
|
|
161
|
+
const modules = [];
|
|
162
|
+
|
|
163
|
+
for (const moduleData of entryDirsData.modules) {
|
|
164
|
+
const moduleName = moduleData.name;
|
|
165
|
+
const entryDirs = moduleData.entryDirs || [];
|
|
166
|
+
|
|
167
|
+
let moduleFeatureCount = 0;
|
|
168
|
+
|
|
169
|
+
for (const entryDir of entryDirs) {
|
|
170
|
+
// Scan files in entry directory
|
|
171
|
+
const scanDirectory = (dir) => {
|
|
172
|
+
const files = [];
|
|
173
|
+
|
|
174
|
+
try {
|
|
175
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
176
|
+
|
|
177
|
+
for (const entry of entries) {
|
|
178
|
+
const fullPath = path.join(dir, entry.name);
|
|
179
|
+
|
|
180
|
+
if (entry.isDirectory()) {
|
|
181
|
+
// Recursively scan subdirectories
|
|
182
|
+
files.push(...scanDirectory(fullPath));
|
|
183
|
+
} else if (entry.isFile()) {
|
|
184
|
+
// Check if file is a source file based on platform type
|
|
185
|
+
const ext = path.extname(entry.name).toLowerCase();
|
|
186
|
+
const isSourceFile = isValidSourceFile(ext, platformType);
|
|
187
|
+
|
|
188
|
+
if (isSourceFile) {
|
|
189
|
+
files.push(fullPath);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
} catch (error) {
|
|
194
|
+
console.warn(`Warning: Failed to scan directory ${dir}: ${error.message}`);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return files;
|
|
198
|
+
};
|
|
199
|
+
|
|
200
|
+
const sourceFiles = scanDirectory(entryDir);
|
|
201
|
+
|
|
202
|
+
for (const sourceFile of sourceFiles) {
|
|
203
|
+
const relativeSourcePath = toRelativePath(sourceFile, projectRoot);
|
|
204
|
+
const fileName = path.basename(sourceFile, path.extname(sourceFile));
|
|
205
|
+
|
|
206
|
+
// Generate document path using platformId
|
|
207
|
+
const documentPath = generateDocumentPath(platformId, moduleName, sourceFile, projectRoot);
|
|
208
|
+
|
|
209
|
+
features.push({
|
|
210
|
+
fileName: fileName,
|
|
211
|
+
sourcePath: relativeSourcePath,
|
|
212
|
+
documentPath: documentPath,
|
|
213
|
+
module: moduleName,
|
|
214
|
+
analyzed: false,
|
|
215
|
+
startedAt: null,
|
|
216
|
+
completedAt: null,
|
|
217
|
+
analysisNotes: null
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
moduleFeatureCount++;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
modules.push({
|
|
225
|
+
name: moduleName,
|
|
226
|
+
featureCount: moduleFeatureCount
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Build output JSON
|
|
231
|
+
const outputData = {
|
|
232
|
+
platformName: platformName,
|
|
233
|
+
platformType: platformType,
|
|
234
|
+
platformSubtype: platformSubtype,
|
|
235
|
+
platformId: platformId,
|
|
236
|
+
sourcePath: sourceRoot,
|
|
237
|
+
techStack: techStack,
|
|
238
|
+
modules: modules,
|
|
239
|
+
totalFiles: features.length,
|
|
240
|
+
analyzedCount: 0,
|
|
241
|
+
pendingCount: features.length,
|
|
242
|
+
generatedAt: generateTimestamp(),
|
|
243
|
+
features: features
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
// Ensure output directory exists
|
|
247
|
+
if (!fs.existsSync(outputDir)) {
|
|
248
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// Write output file
|
|
252
|
+
const outputFile = path.join(outputDir, `features-${platformId}.json`);
|
|
253
|
+
try {
|
|
254
|
+
fs.writeFileSync(outputFile, JSON.stringify(outputData, null, 2), 'utf-8');
|
|
255
|
+
console.log(`Generated: ${outputFile}`);
|
|
256
|
+
console.log(` Platform: ${platformName} (${platformId})`);
|
|
257
|
+
console.log(` Type: ${platformType}${platformSubtype ? '/' + platformSubtype : ''}`);
|
|
258
|
+
console.log(` Features: ${features.length}`);
|
|
259
|
+
console.log(` Modules: ${modules.map(m => m.name).join(', ')}`);
|
|
260
|
+
} catch (error) {
|
|
261
|
+
console.error(`Error: Failed to write output file: ${error.message}`);
|
|
262
|
+
process.exit(1);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// Check if file extension is valid for the platform type
|
|
267
|
+
function isValidSourceFile(ext, platformType) {
|
|
268
|
+
const backendExts = ['.java', '.kt', '.py', '.go', '.rs', '.cs', '.php', '.rb'];
|
|
269
|
+
const webExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.svelte'];
|
|
270
|
+
const mobileExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.dart', '.swift', '.kt', '.java'];
|
|
271
|
+
const desktopExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.cs', '.xaml'];
|
|
272
|
+
|
|
273
|
+
switch (platformType) {
|
|
274
|
+
case 'backend':
|
|
275
|
+
return backendExts.includes(ext);
|
|
276
|
+
case 'web':
|
|
277
|
+
return webExts.includes(ext);
|
|
278
|
+
case 'mobile':
|
|
279
|
+
return mobileExts.includes(ext);
|
|
280
|
+
case 'desktop':
|
|
281
|
+
return desktopExts.includes(ext);
|
|
282
|
+
default:
|
|
283
|
+
// Accept all common source file extensions
|
|
284
|
+
return [...backendExts, ...webExts, ...mobileExts, ...desktopExts].includes(ext);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// Run main function
|
|
289
|
+
main();
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* reindex-modules.js - Deterministic script to re-extract module names from existing features JSON with updated exclude_dirs
|
|
4
4
|
*
|
|
5
5
|
* Usage:
|
|
6
|
-
* node reindex-modules.js --featuresFile "path/to/features-backend-
|
|
6
|
+
* node reindex-modules.js --featuresFile "path/to/features-backend-fastapi.json" --projectRoot "d:\dev\ruoyi-vue-pro"
|
|
7
7
|
*
|
|
8
8
|
* Optional parameters:
|
|
9
9
|
* --platformType "backend" - If not provided, read from features JSON's platformType field
|
|
@@ -376,12 +376,179 @@
|
|
|
376
376
|
<block type="checkpoint" id="CP7" name="marker-written" desc="Marker file written">
|
|
377
377
|
<field name="verify" value="file.exists(${completed_dir}/${markerName}.done.json)"/>
|
|
378
378
|
</block>
|
|
379
|
-
|
|
379
|
+
|
|
380
|
+
<!-- Step 7b: Construct and Append Graph Data -->
|
|
381
|
+
<!-- Construct Graph Nodes from UI Analysis -->
|
|
382
|
+
<block type="task" id="B32a" action="analyze" desc="Construct page node">
|
|
383
|
+
<field name="type" value="page"/>
|
|
384
|
+
<field name="id" value="page-${module}-${fileName}"/>
|
|
385
|
+
<field name="name" value="${fileName}"/>
|
|
386
|
+
<field name="module" value="${module}"/>
|
|
387
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
388
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
389
|
+
<field name="platform" value="${platform_type}-${platform_subtype}"/>
|
|
390
|
+
<field name="output" var="pageNode"/>
|
|
391
|
+
</block>
|
|
392
|
+
|
|
393
|
+
<block type="task" id="B32b" action="analyze" desc="Construct component nodes">
|
|
394
|
+
<field name="components" value="${analysisResult.components}"/>
|
|
395
|
+
<field name="module" value="${module}"/>
|
|
396
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
397
|
+
<field name="output" var="componentNodes"/>
|
|
398
|
+
</block>
|
|
399
|
+
|
|
400
|
+
<block type="task" id="B32c" action="analyze" desc="Construct route nodes">
|
|
401
|
+
<field name="routes" value="${analysisResult.routes}"/>
|
|
402
|
+
<field name="module" value="${module}"/>
|
|
403
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
404
|
+
<field name="output" var="routeNodes"/>
|
|
405
|
+
</block>
|
|
406
|
+
|
|
407
|
+
<!-- Construct Graph Edges -->
|
|
408
|
+
<block type="task" id="B32d" action="analyze" desc="Construct navigates edges">
|
|
409
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
410
|
+
<field name="navigations" value="${analysisResult.navigations}"/>
|
|
411
|
+
<field name="module" value="${module}"/>
|
|
412
|
+
<field name="output" var="navigatesEdges"/>
|
|
413
|
+
</block>
|
|
414
|
+
|
|
415
|
+
<block type="task" id="B32e" action="analyze" desc="Construct contains edges">
|
|
416
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
417
|
+
<field name="components" value="${analysisResult.components}"/>
|
|
418
|
+
<field name="module" value="${module}"/>
|
|
419
|
+
<field name="output" var="containsEdges"/>
|
|
420
|
+
</block>
|
|
421
|
+
|
|
422
|
+
<block type="task" id="B32f" action="analyze" desc="Construct calls-api edges">
|
|
423
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
424
|
+
<field name="apiCalls" value="${analysisResult.apis}"/>
|
|
425
|
+
<field name="module" value="${module}"/>
|
|
426
|
+
<field name="output" var="callsApiEdges"/>
|
|
427
|
+
</block>
|
|
428
|
+
|
|
429
|
+
<!-- Append Graph Data to nodes.json and edges.json -->
|
|
430
|
+
<block type="task" id="B32g" action="run-script" desc="Append graph nodes to nodes.json">
|
|
431
|
+
<field name="command">node -e "
|
|
432
|
+
const fs = require('fs');
|
|
433
|
+
const path = require('path');
|
|
434
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
435
|
+
const nodesFile = path.join(graphDir, 'nodes.json');
|
|
436
|
+
|
|
437
|
+
// Ensure directory exists
|
|
438
|
+
if (!fs.existsSync(graphDir)) {
|
|
439
|
+
fs.mkdirSync(graphDir, { recursive: true });
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Read existing nodes or initialize empty array
|
|
443
|
+
let existingNodes = [];
|
|
444
|
+
if (fs.existsSync(nodesFile)) {
|
|
445
|
+
try {
|
|
446
|
+
existingNodes = JSON.parse(fs.readFileSync(nodesFile, 'utf8'));
|
|
447
|
+
if (!Array.isArray(existingNodes)) existingNodes = [];
|
|
448
|
+
} catch (e) {
|
|
449
|
+
existingNodes = [];
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
// Parse new nodes from input
|
|
454
|
+
const newNodes = [];
|
|
455
|
+
if ('${pageNode}') {
|
|
456
|
+
try {
|
|
457
|
+
const page = JSON.parse('${pageNode}');
|
|
458
|
+
if (page && page.id) newNodes.push(page);
|
|
459
|
+
} catch (e) {}
|
|
460
|
+
}
|
|
461
|
+
if ('${componentNodes}') {
|
|
462
|
+
try {
|
|
463
|
+
const comps = JSON.parse('${componentNodes}');
|
|
464
|
+
if (Array.isArray(comps)) newNodes.push(...comps);
|
|
465
|
+
} catch (e) {}
|
|
466
|
+
}
|
|
467
|
+
if ('${routeNodes}') {
|
|
468
|
+
try {
|
|
469
|
+
const routes = JSON.parse('${routeNodes}');
|
|
470
|
+
if (Array.isArray(routes)) newNodes.push(...routes);
|
|
471
|
+
} catch (e) {}
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
// Deduplicate by id
|
|
475
|
+
const nodeMap = new Map();
|
|
476
|
+
existingNodes.forEach(n => nodeMap.set(n.id, n));
|
|
477
|
+
newNodes.forEach(n => nodeMap.set(n.id, n));
|
|
478
|
+
|
|
479
|
+
// Write back
|
|
480
|
+
fs.writeFileSync(nodesFile, JSON.stringify(Array.from(nodeMap.values()), null, 2));
|
|
481
|
+
console.log('Nodes appended:', newNodes.length);
|
|
482
|
+
"
|
|
483
|
+
</field>
|
|
484
|
+
</block>
|
|
485
|
+
|
|
486
|
+
<block type="task" id="B32h" action="run-script" desc="Append graph edges to edges.json">
|
|
487
|
+
<field name="command">node -e "
|
|
488
|
+
const fs = require('fs');
|
|
489
|
+
const path = require('path');
|
|
490
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
491
|
+
const edgesFile = path.join(graphDir, 'edges.json');
|
|
492
|
+
|
|
493
|
+
// Read existing edges or initialize empty array
|
|
494
|
+
let existingEdges = [];
|
|
495
|
+
if (fs.existsSync(edgesFile)) {
|
|
496
|
+
try {
|
|
497
|
+
existingEdges = JSON.parse(fs.readFileSync(edgesFile, 'utf8'));
|
|
498
|
+
if (!Array.isArray(existingEdges)) existingEdges = [];
|
|
499
|
+
} catch (e) {
|
|
500
|
+
existingEdges = [];
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// Parse new edges from input
|
|
505
|
+
const newEdges = [];
|
|
506
|
+
if ('${navigatesEdges}') {
|
|
507
|
+
try {
|
|
508
|
+
const edges = JSON.parse('${navigatesEdges}');
|
|
509
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
510
|
+
} catch (e) {}
|
|
511
|
+
}
|
|
512
|
+
if ('${containsEdges}') {
|
|
513
|
+
try {
|
|
514
|
+
const edges = JSON.parse('${containsEdges}');
|
|
515
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
516
|
+
} catch (e) {}
|
|
517
|
+
}
|
|
518
|
+
if ('${callsApiEdges}') {
|
|
519
|
+
try {
|
|
520
|
+
const edges = JSON.parse('${callsApiEdges}');
|
|
521
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
522
|
+
} catch (e) {}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
// Deduplicate by composite key (source+target+type)
|
|
526
|
+
const edgeMap = new Map();
|
|
527
|
+
existingEdges.forEach(e => {
|
|
528
|
+
const key = (e.source || '') + '|' + (e.target || '') + '|' + (e.type || '');
|
|
529
|
+
edgeMap.set(key, e);
|
|
530
|
+
});
|
|
531
|
+
newEdges.forEach(e => {
|
|
532
|
+
const key = (e.source || '') + '|' + (e.target || '') + '|' + (e.type || '');
|
|
533
|
+
edgeMap.set(key, e);
|
|
534
|
+
});
|
|
535
|
+
|
|
536
|
+
// Write back
|
|
537
|
+
fs.writeFileSync(edgesFile, JSON.stringify(Array.from(edgeMap.values()), null, 2));
|
|
538
|
+
console.log('Edges appended:', newEdges.length);
|
|
539
|
+
"
|
|
540
|
+
</field>
|
|
541
|
+
</block>
|
|
542
|
+
|
|
543
|
+
<block type="checkpoint" id="CP-graph" name="graph-written" desc="Graph data written">
|
|
544
|
+
<field name="verify" value="true"/>
|
|
545
|
+
</block>
|
|
546
|
+
|
|
380
547
|
<block type="event" id="E10" action="log" level="info" desc="Log marker status">
|
|
381
|
-
<field name="message" value="Step 7 Status: COMPLETED - Done marker
|
|
548
|
+
<field name="message" value="Step 7 Status: COMPLETED - Done marker and graph data written to ${completed_dir}/${markerName}.done.json"/>
|
|
382
549
|
</block>
|
|
383
550
|
|
|
384
|
-
<!-- ==================== FINAL OUTPUT
|
|
551
|
+
<!-- ==================== FINAL OUTPUT ================= === -->
|
|
385
552
|
<block type="output" id="O1" desc="UI feature analysis output results">
|
|
386
553
|
<field name="status" value="success"/>
|
|
387
554
|
<field name="feature_name" from="${fileName}"/>
|
|
@@ -25,7 +25,7 @@ Dispatch Agent (speccrew-knowledge-dispatch)
|
|
|
25
25
|
| Variable | Type | Description | Example |
|
|
26
26
|
|----------|------|-------------|---------|
|
|
27
27
|
| `{{action}}` | string | Write action to perform | `"batch-write"`, `"init-module"`, `"update-node"`, `"remove-node"` |
|
|
28
|
-
| `{{platformId}}` | string | Platform identifier for directory segregation | `"backend-
|
|
28
|
+
| `{{platformId}}` | string | Platform identifier for directory segregation | `"backend-fastapi"`, `"backend-spring"`, `"web-vue"`, `"mobile-uniapp"` |
|
|
29
29
|
| `{{module}}` | string | Target business module | `"system"`, `"trade"`, `"infra"` |
|
|
30
30
|
| `{{graphData}}` | object | Graph data from skill output (for batch-write) | `{ "nodes": [...], "edges": [...] }` |
|
|
31
31
|
| `{{nodeId}}` | string | Node ID (for update-node / remove-node) | `"api-system-user-list"` |
|
|
@@ -39,7 +39,7 @@ Generate analyze task plan for a single business module. Reads features-*.json,
|
|
|
39
39
|
| `features_file` | string | Yes | Path to the platform's features-{platform}.json file |
|
|
40
40
|
| `output_path` | string | Yes | Knowledge base output root path (e.g., speccrew-workspace/knowledges) |
|
|
41
41
|
| `completed_dir` | string | Yes | Marker file output directory for api-analyze .done.json markers. Value from PM Agent: `{sync_state_bizs_dir}/completed` |
|
|
42
|
-
| `sourceFile` | string | Yes | Features JSON filename (e.g., "features-backend-
|
|
42
|
+
| `sourceFile` | string | Yes | Features JSON filename (e.g., "features-backend-fastapi.json"), used for api-analyze marking |
|
|
43
43
|
| `language` | string | Yes | Output language (zh / en) |
|
|
44
44
|
| `workspace_path` | string | Yes | Workspace root path for constructing absolute paths |
|
|
45
45
|
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
<field name="features_file" required="true" type="string" desc="Path to the platform's features-{platform}.json file"/>
|
|
15
15
|
<field name="output_path" required="true" type="string" desc="Knowledge base output root path"/>
|
|
16
16
|
<field name="completed_dir" required="true" type="string" desc="Marker file output directory for api-analyze .done.json markers"/>
|
|
17
|
-
<field name="sourceFile" required="true" type="string" desc="Features JSON filename (e.g., features-backend-
|
|
17
|
+
<field name="sourceFile" required="true" type="string" desc="Features JSON filename (e.g., features-backend-fastapi.json)"/>
|
|
18
18
|
<field name="language" required="true" type="string" desc="Output language (zh / en)"/>
|
|
19
19
|
<field name="workspace_path" required="true" type="string" desc="Workspace root path for constructing absolute paths"/>
|
|
20
20
|
</block>
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
---
|
|
2
|
+
name: speccrew-product-manager-orchestration
|
|
3
|
+
version: 1.0.0
|
|
4
|
+
description: Product Manager 的核心编排技能,负责需求澄清、复杂度评估、PRD 生成协调与验证。处理简单需求(单 PRD)和复杂需求(Master-Sub PRD)两种工作流路径。
|
|
5
|
+
tools: Read, Write, Glob, Grep, Bash, Agent
|
|
6
|
+
---
|
|
7
|
+
|
|
8
|
+
> **⚠️ MANDATORY EXECUTION PROTOCOL — READ BEFORE EXECUTING ANY BLOCK**
|
|
9
|
+
>
|
|
10
|
+
> **Step 1**: Load XML workflow specification: `speccrew-workspace/docs/rules/agentflow-spec.md` — this defines all block types and action-to-tool mappings
|
|
11
|
+
>
|
|
12
|
+
> **Step 2**: Execute this SKILL.md's XML workflow **block by block in document order**. For EVERY block, you MUST follow this 3-step cycle:
|
|
13
|
+
>
|
|
14
|
+
> ```
|
|
15
|
+
> 📋 Block [ID] (action=[action]) — [desc]
|
|
16
|
+
> 🔧 Tool: [which IDE tool to call]
|
|
17
|
+
> ✅ Result: [output or status]
|
|
18
|
+
> ```
|
|
19
|
+
>
|
|
20
|
+
> Action-to-tool mapping:
|
|
21
|
+
> - `action="run-skill"` → Invoke via **Skill tool** (pass the `<field name="skill">` value EXACTLY)
|
|
22
|
+
> - `action="run-script"` → Execute via **Terminal tool** (pass the `<field name="command">` value EXACTLY)
|
|
23
|
+
> - `action="dispatch-to-worker"` → Create **Task** via **Task tool** for `speccrew-task-worker`
|
|
24
|
+
> - `action="read-file"` → Read via **Read tool**
|
|
25
|
+
> - `action="log"` → Output message directly
|
|
26
|
+
> - `action="confirm"` → Present to user and wait for response
|
|
27
|
+
>
|
|
28
|
+
> **Step 3**: Execute ALL blocks sequentially without pausing (only stop at explicit `<event action="confirm">` blocks)
|
|
29
|
+
|
|
30
|
+
# Product Manager Orchestration
|
|
31
|
+
|
|
32
|
+
Product Manager 的核心编排技能,负责:
|
|
33
|
+
|
|
34
|
+
1. **Pipeline Progress Management** - 创建/定位迭代目录,管理工作流进度
|
|
35
|
+
2. **Knowledge Base Detection** - 检测知识库状态并按需初始化
|
|
36
|
+
3. **Complexity Assessment** - 评估需求复杂度,决定简单/复杂路径
|
|
37
|
+
4. **Requirement Clarification** - 调用澄清技能,收集需求细节
|
|
38
|
+
5. **PRD Generation Orchestration** - 协调 PRD 生成(简单路径直接调用,复杂路径分发 Worker)
|
|
39
|
+
6. **Verification & Confirmation** - 验证 PRD 完整性,等待用户确认
|
|
40
|
+
|
|
41
|
+
## Invocation Method
|
|
42
|
+
|
|
43
|
+
**CRITICAL**: This skill is loaded directly by Product Manager Agent — do NOT invoke via Worker Agent.
|
|
44
|
+
|
|
45
|
+
```xml
|
|
46
|
+
<block type="task" action="run-skill" desc="Product Manager orchestration workflow">
|
|
47
|
+
<field name="skill">speccrew-product-manager-orchestration</field>
|
|
48
|
+
</block>
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
## Input Parameters
|
|
52
|
+
|
|
53
|
+
| Parameter | Type | Required | Description |
|
|
54
|
+
|-----------|------|----------|-------------|
|
|
55
|
+
| `user_requirement` | string | Yes | 用户需求描述或需求文档路径 |
|
|
56
|
+
| `workspace_root` | string | Yes | speccrew-workspace 根目录路径 |
|
|
57
|
+
| `source_path` | string | No | 项目源代码根目录(从 .speccrewrc 读取) |
|
|
58
|
+
| `language` | string | No | 用户语言(默认自动检测) |
|
|
59
|
+
|
|
60
|
+
## Output
|
|
61
|
+
|
|
62
|
+
- `status` - 执行状态 (success / partial / failed)
|
|
63
|
+
- `prd_files` - 生成的 PRD 文件列表
|
|
64
|
+
- `feature_list` - 功能清单文件路径
|
|
65
|
+
- `workflow_stage` - 当前工作流阶段状态
|
|
66
|
+
- `next_agent` - 下一步建议的 Agent
|
|
67
|
+
|
|
68
|
+
---
|
|
69
|
+
|
|
70
|
+
## AgentFlow Definition
|
|
71
|
+
|
|
72
|
+
<!-- @agentflow: workflow.agentflow.xml -->
|
|
73
|
+
|
|
74
|
+
---
|
|
75
|
+
|
|
76
|
+
## CONTINUOUS EXECUTION RULES
|
|
77
|
+
|
|
78
|
+
This skill MUST execute tasks continuously without unnecessary interruptions.
|
|
79
|
+
|
|
80
|
+
### FORBIDDEN Interruptions
|
|
81
|
+
|
|
82
|
+
1. DO NOT ask user "Should I continue?" after completing a subtask
|
|
83
|
+
2. DO NOT suggest "Let me split this into batches" or "Let's do this in parts"
|
|
84
|
+
3. DO NOT pause to list what you plan to do next — just do it
|
|
85
|
+
4. DO NOT ask for confirmation before generating output files
|
|
86
|
+
5. DO NOT warn about "large number of files" — proceed with generation
|
|
87
|
+
6. DO NOT offer "Should I proceed with the remaining items?"
|
|
88
|
+
|
|
89
|
+
### When to Pause (ONLY these cases)
|
|
90
|
+
|
|
91
|
+
1. CHECKPOINT gates defined in workflow (user confirmation required by design)
|
|
92
|
+
2. Ambiguous requirements that genuinely need clarification
|
|
93
|
+
3. Unrecoverable errors that prevent further progress
|
|
94
|
+
4. Security-sensitive operations (e.g., deleting existing files)
|
|
95
|
+
|
|
96
|
+
### Orchestrator Principle
|
|
97
|
+
|
|
98
|
+
This agent is an **orchestrator/dispatcher**. Key constraints:
|
|
99
|
+
|
|
100
|
+
| Phase | Skill | ORCHESTRATOR Rule |
|
|
101
|
+
|-------|-------|-------------------|
|
|
102
|
+
| Phase 3 | `speccrew-pm-requirement-clarify` | DO NOT clarify requirements yourself — Skill handles all clarification rounds |
|
|
103
|
+
| Phase 4a | `speccrew-pm-requirement-model` | DO NOT perform ISA-95 analysis or module decomposition yourself |
|
|
104
|
+
| Phase 4b | `speccrew-pm-requirement-analysis` | DO NOT generate Master PRD or Dispatch Plan yourself |
|
|
105
|
+
| Phase 5 | `speccrew-pm-sub-prd-generate` (via workers) | DO NOT generate Sub-PRD content yourself |
|
|
106
|
+
| Phase 6 | PM Agent verification | DO NOT modify PRD content — only verify and present |
|
|
107
|
+
|
|
108
|
+
**UNIVERSAL ABORT RULE:**
|
|
109
|
+
- IF ANY skill fails → STOP and report to user
|
|
110
|
+
- DO NOT generate content as fallback
|
|
111
|
+
- DO NOT proceed to next phase
|