speccrew 0.7.5 → 0.7.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.speccrew/skills/speccrew-knowledge-bizs-api-analyze/workflow.agentflow.xml +119 -10
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/SKILL.md +3 -3
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/scripts/batch-orchestrator.js +294 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-dispatch/workflow.agentflow.xml +4 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-identify-entries/workflow.agentflow.xml +6 -2
- package/.speccrew/skills/speccrew-knowledge-bizs-init-features/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-knowledge-bizs-init-features/scripts/generate-inventory.js +289 -0
- package/.speccrew/skills/speccrew-knowledge-bizs-module-classify/scripts/reindex-modules.js +1 -1
- package/.speccrew/skills/speccrew-knowledge-bizs-ui-analyze/workflow.agentflow.xml +170 -3
- package/.speccrew/skills/speccrew-knowledge-graph-write/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-pm-module-initializer/SKILL.md +1 -1
- package/.speccrew/skills/speccrew-pm-module-initializer/workflow.agentflow.xml +1 -1
- package/package.json +1 -1
|
@@ -15,6 +15,7 @@
|
|
|
15
15
|
<field name="completed_dir" required="true" type="string" desc="Marker files output directory"/>
|
|
16
16
|
<field name="sourceFile" required="true" type="string" desc="Source features JSON file name"/>
|
|
17
17
|
<field name="language" required="true" type="string" desc="Target language for content"/>
|
|
18
|
+
<field name="workspace_path" required="true" type="string" desc="Workspace root path"/>
|
|
18
19
|
</block>
|
|
19
20
|
|
|
20
21
|
<!-- ==================== CONSTRAINT RULES ==================== -->
|
|
@@ -248,7 +249,7 @@
|
|
|
248
249
|
<field name="text">
|
|
249
250
|
The output document file MUST be created at the EXACT path specified by ${documentPath} input parameter.
|
|
250
251
|
DO NOT use the template file name (e.g., FEATURE-DETAIL-TEMPLATE-*.md) as the output file name.
|
|
251
|
-
The documentPath already contains the correct target path including file name (e.g., speccrew-workspace/knowledges/bizs/backend-
|
|
252
|
+
The documentPath already contains the correct target path including file name (e.g., speccrew-workspace/knowledges/bizs/backend-fastapi/admin/cache/cache_controller.md).
|
|
252
253
|
Before creating the file, ensure the parent directory exists (create if necessary).
|
|
253
254
|
</field>
|
|
254
255
|
</block>
|
|
@@ -478,17 +479,125 @@
|
|
|
478
479
|
<field name="verify" value="file.exists(${completed_dir}/${markerName}.done.json)"/>
|
|
479
480
|
</block>
|
|
480
481
|
|
|
481
|
-
<!--
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
<field name="
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
482
|
+
<!-- Step 7b: Construct and Append Graph Data -->
|
|
483
|
+
<!-- Construct Graph Nodes from API Analysis -->
|
|
484
|
+
<block type="task" id="B32a" action="analyze" desc="Construct API endpoint nodes">
|
|
485
|
+
<field name="endpoints" value="${endpoints}"/>
|
|
486
|
+
<field name="module" value="${module}"/>
|
|
487
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
488
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
489
|
+
<field name="output" var="apiNodes"/>
|
|
490
|
+
</block>
|
|
491
|
+
|
|
492
|
+
<block type="task" id="B32b" action="analyze" desc="Construct service nodes">
|
|
493
|
+
<field name="services" value="${services}"/>
|
|
494
|
+
<field name="module" value="${module}"/>
|
|
495
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
496
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
497
|
+
<field name="output" var="serviceNodes"/>
|
|
498
|
+
</block>
|
|
499
|
+
|
|
500
|
+
<block type="task" id="B32c" action="analyze" desc="Construct table nodes">
|
|
501
|
+
<field name="tables" value="${databaseTables}"/>
|
|
502
|
+
<field name="module" value="${module}"/>
|
|
503
|
+
<field name="output" var="tableNodes"/>
|
|
504
|
+
</block>
|
|
505
|
+
|
|
506
|
+
<!-- Construct Graph Edges -->
|
|
507
|
+
<block type="task" id="B32d" action="analyze" desc="Construct API-to-Service edges">
|
|
508
|
+
<field name="endpoints" value="${endpoints}"/>
|
|
509
|
+
<field name="module" value="${module}"/>
|
|
510
|
+
<field name="output" var="invokesEdges"/>
|
|
511
|
+
</block>
|
|
512
|
+
|
|
513
|
+
<block type="task" id="B32e" action="analyze" desc="Construct API-to-Table edges">
|
|
514
|
+
<field name="endpoints" value="${endpoints}"/>
|
|
515
|
+
<field name="tables" value="${databaseTables}"/>
|
|
516
|
+
<field name="module" value="${module}"/>
|
|
517
|
+
<field name="output" var="operatesEdges"/>
|
|
518
|
+
</block>
|
|
519
|
+
|
|
520
|
+
<!-- Append Graph Data to nodes.json and edges.json -->
|
|
521
|
+
<block type="task" id="B32f" action="run-script" desc="Append graph nodes to nodes.json">
|
|
522
|
+
<field name="command">node -e "
|
|
523
|
+
const fs = require('fs');
|
|
524
|
+
const path = require('path');
|
|
525
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
526
|
+
const nodesFile = path.join(graphDir, 'nodes.json');
|
|
527
|
+
|
|
528
|
+
// Ensure directory exists
|
|
529
|
+
if (!fs.existsSync(graphDir)) {
|
|
530
|
+
fs.mkdirSync(graphDir, { recursive: true });
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
// Read existing nodes or initialize empty array
|
|
534
|
+
let existingNodes = [];
|
|
535
|
+
if (fs.existsSync(nodesFile)) {
|
|
536
|
+
try {
|
|
537
|
+
existingNodes = JSON.parse(fs.readFileSync(nodesFile, 'utf8'));
|
|
538
|
+
if (!Array.isArray(existingNodes)) existingNodes = [];
|
|
539
|
+
} catch (e) {
|
|
540
|
+
existingNodes = [];
|
|
541
|
+
}
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// Parse new nodes from input
|
|
545
|
+
const newNodes = JSON.parse('${apiNodes}' || '[]').concat(JSON.parse('${serviceNodes}' || '[]')).concat(JSON.parse('${tableNodes}' || '[]'));
|
|
546
|
+
|
|
547
|
+
// Deduplicate by id
|
|
548
|
+
const nodeMap = new Map();
|
|
549
|
+
existingNodes.forEach(n => nodeMap.set(n.id, n));
|
|
550
|
+
newNodes.forEach(n => nodeMap.set(n.id, n));
|
|
551
|
+
|
|
552
|
+
// Write back
|
|
553
|
+
fs.writeFileSync(nodesFile, JSON.stringify(Array.from(nodeMap.values()), null, 2));
|
|
554
|
+
console.log('Nodes appended:', newNodes.length);
|
|
555
|
+
"
|
|
490
556
|
</field>
|
|
491
557
|
</block>
|
|
558
|
+
|
|
559
|
+
<block type="task" id="B32g" action="run-script" desc="Append graph edges to edges.json">
|
|
560
|
+
<field name="command">node -e "
|
|
561
|
+
const fs = require('fs');
|
|
562
|
+
const path = require('path');
|
|
563
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
564
|
+
const edgesFile = path.join(graphDir, 'edges.json');
|
|
565
|
+
|
|
566
|
+
// Read existing edges or initialize empty array
|
|
567
|
+
let existingEdges = [];
|
|
568
|
+
if (fs.existsSync(edgesFile)) {
|
|
569
|
+
try {
|
|
570
|
+
existingEdges = JSON.parse(fs.readFileSync(edgesFile, 'utf8'));
|
|
571
|
+
if (!Array.isArray(existingEdges)) existingEdges = [];
|
|
572
|
+
} catch (e) {
|
|
573
|
+
existingEdges = [];
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
// Parse new edges from input
|
|
578
|
+
const newEdges = JSON.parse('${invokesEdges}' || '[]').concat(JSON.parse('${operatesEdges}' || '[]'));
|
|
579
|
+
|
|
580
|
+
// Deduplicate by composite key (source+target+type)
|
|
581
|
+
const edgeMap = new Map();
|
|
582
|
+
existingEdges.forEach(e => {
|
|
583
|
+
const key = e.source + '|' + e.target + '|' + e.type;
|
|
584
|
+
edgeMap.set(key, e);
|
|
585
|
+
});
|
|
586
|
+
newEdges.forEach(e => {
|
|
587
|
+
const key = e.source + '|' + e.target + '|' + e.type;
|
|
588
|
+
edgeMap.set(key, e);
|
|
589
|
+
});
|
|
590
|
+
|
|
591
|
+
// Write back
|
|
592
|
+
fs.writeFileSync(edgesFile, JSON.stringify(Array.from(edgeMap.values()), null, 2));
|
|
593
|
+
console.log('Edges appended:', newEdges.length);
|
|
594
|
+
"
|
|
595
|
+
</field>
|
|
596
|
+
</block>
|
|
597
|
+
|
|
598
|
+
<block type="checkpoint" id="CP-graph" name="graph-written" desc="Graph data written">
|
|
599
|
+
<field name="verify" value="true"/>
|
|
600
|
+
</block>
|
|
492
601
|
|
|
493
602
|
<block type="event" id="E10" action="log" level="info" desc="Log marker status">
|
|
494
603
|
<field name="message" value="Step 7 Status: COMPLETED - Marker file written to ${completed_dir}"/>
|
|
@@ -1001,11 +1001,11 @@ Requirements:
|
|
|
1001
1001
|
"fileName": "UserController",
|
|
1002
1002
|
"sourcePath": "controller/admin/user/UserController.java",
|
|
1003
1003
|
"module": "user",
|
|
1004
|
-
"documentPath": "speccrew-workspace/knowledges/bizs/backend-
|
|
1004
|
+
"documentPath": "speccrew-workspace/knowledges/bizs/backend-spring/user/UserController.md",
|
|
1005
1005
|
"platformType": "backend",
|
|
1006
1006
|
"platformSubtype": "spring",
|
|
1007
|
-
"platformId": "backend-
|
|
1008
|
-
"sourceFile": "features-backend-
|
|
1007
|
+
"platformId": "backend-spring",
|
|
1008
|
+
"sourceFile": "features-backend-spring.json"
|
|
1009
1009
|
}
|
|
1010
1010
|
]
|
|
1011
1011
|
}
|
|
@@ -0,0 +1,294 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* batch-orchestrator.js - Batch management for Stage 2 feature processing
|
|
4
|
+
*
|
|
5
|
+
* Subcommands:
|
|
6
|
+
* get-batch - Get next batch of pending features
|
|
7
|
+
* process-results - Process completed batch results and merge graph data
|
|
8
|
+
*
|
|
9
|
+
* Usage:
|
|
10
|
+
* node batch-orchestrator.js get-batch --syncStatePath <path> --batchSize <number>
|
|
11
|
+
* node batch-orchestrator.js process-results --syncStatePath <path> --graphRoot <path> --completedDir <path>
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
const fs = require('fs');
|
|
15
|
+
const path = require('path');
|
|
16
|
+
|
|
17
|
+
// Parse command line arguments
|
|
18
|
+
function parseArgs() {
|
|
19
|
+
const args = {
|
|
20
|
+
command: null,
|
|
21
|
+
syncStatePath: null,
|
|
22
|
+
batchSize: 5,
|
|
23
|
+
graphRoot: null,
|
|
24
|
+
completedDir: null
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const argv = process.argv.slice(2);
|
|
28
|
+
args.command = argv[0];
|
|
29
|
+
|
|
30
|
+
for (let i = 1; i < argv.length; i++) {
|
|
31
|
+
if (argv[i] === '--syncStatePath' && i + 1 < argv.length) {
|
|
32
|
+
args.syncStatePath = argv[++i];
|
|
33
|
+
} else if (argv[i] === '--batchSize' && i + 1 < argv.length) {
|
|
34
|
+
args.batchSize = parseInt(argv[++i], 10) || 5;
|
|
35
|
+
} else if (argv[i] === '--graphRoot' && i + 1 < argv.length) {
|
|
36
|
+
args.graphRoot = argv[++i];
|
|
37
|
+
} else if (argv[i] === '--completedDir' && i + 1 < argv.length) {
|
|
38
|
+
args.completedDir = argv[++i];
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return args;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// Ensure directory exists
|
|
46
|
+
function ensureDir(dirPath) {
|
|
47
|
+
if (!fs.existsSync(dirPath)) {
|
|
48
|
+
fs.mkdirSync(dirPath, { recursive: true });
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Read JSON file safely
|
|
53
|
+
function readJsonSafe(filePath) {
|
|
54
|
+
try {
|
|
55
|
+
if (fs.existsSync(filePath)) {
|
|
56
|
+
const content = fs.readFileSync(filePath, 'utf8');
|
|
57
|
+
return JSON.parse(content);
|
|
58
|
+
}
|
|
59
|
+
} catch (e) {
|
|
60
|
+
// Skip invalid JSON
|
|
61
|
+
}
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Get all features from syncStatePath
|
|
66
|
+
function getAllFeatures(syncStatePath) {
|
|
67
|
+
const features = [];
|
|
68
|
+
|
|
69
|
+
if (!fs.existsSync(syncStatePath)) {
|
|
70
|
+
return features;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
const files = fs.readdirSync(syncStatePath);
|
|
74
|
+
const featureFiles = files.filter(f => f.startsWith('features-') && f.endsWith('.json'));
|
|
75
|
+
|
|
76
|
+
for (const file of featureFiles) {
|
|
77
|
+
const filePath = path.join(syncStatePath, file);
|
|
78
|
+
const data = readJsonSafe(filePath);
|
|
79
|
+
|
|
80
|
+
if (data && Array.isArray(data.features)) {
|
|
81
|
+
for (const feature of data.features) {
|
|
82
|
+
// Generate feature ID: {platformId}-{module}-{fileName} (without extension)
|
|
83
|
+
const fileName = feature.fileName || feature.sourceFile || 'unknown';
|
|
84
|
+
const fileNameWithoutExt = fileName.replace(/\.[^.]+$/, '');
|
|
85
|
+
const featureId = `${feature.platformId}-${feature.module}-${fileNameWithoutExt}`;
|
|
86
|
+
|
|
87
|
+
features.push({
|
|
88
|
+
id: featureId,
|
|
89
|
+
...feature
|
|
90
|
+
});
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return features;
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Get completed feature IDs from completed directory
|
|
99
|
+
function getCompletedFeatureIds(completedDir) {
|
|
100
|
+
const completedIds = new Set();
|
|
101
|
+
|
|
102
|
+
if (!fs.existsSync(completedDir)) {
|
|
103
|
+
return completedIds;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const files = fs.readdirSync(completedDir);
|
|
107
|
+
const doneFiles = files.filter(f => f.endsWith('.done.json'));
|
|
108
|
+
|
|
109
|
+
for (const file of doneFiles) {
|
|
110
|
+
// Extract feature ID from filename: {featureId}.done.json
|
|
111
|
+
const featureId = file.replace('.done.json', '');
|
|
112
|
+
completedIds.add(featureId);
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
return completedIds;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// get-batch subcommand
|
|
119
|
+
function getBatch(args) {
|
|
120
|
+
const { syncStatePath, batchSize } = args;
|
|
121
|
+
|
|
122
|
+
ensureDir(syncStatePath);
|
|
123
|
+
|
|
124
|
+
// Determine completed directory (sibling to syncStatePath)
|
|
125
|
+
const completedDir = path.join(path.dirname(syncStatePath), 'completed');
|
|
126
|
+
ensureDir(completedDir);
|
|
127
|
+
|
|
128
|
+
// Get all features and completed IDs
|
|
129
|
+
const allFeatures = getAllFeatures(syncStatePath);
|
|
130
|
+
const completedIds = getCompletedFeatureIds(completedDir);
|
|
131
|
+
|
|
132
|
+
// Filter pending features
|
|
133
|
+
const pendingFeatures = allFeatures.filter(f => !completedIds.has(f.id));
|
|
134
|
+
const total = allFeatures.length;
|
|
135
|
+
const completed = completedIds.size;
|
|
136
|
+
const remaining = pendingFeatures.length;
|
|
137
|
+
|
|
138
|
+
if (remaining === 0) {
|
|
139
|
+
// All done
|
|
140
|
+
console.log(JSON.stringify({
|
|
141
|
+
action: 'done',
|
|
142
|
+
total,
|
|
143
|
+
completed
|
|
144
|
+
}));
|
|
145
|
+
} else {
|
|
146
|
+
// Get next batch
|
|
147
|
+
const batch = pendingFeatures.slice(0, batchSize);
|
|
148
|
+
|
|
149
|
+
console.log(JSON.stringify({
|
|
150
|
+
action: 'process',
|
|
151
|
+
batch,
|
|
152
|
+
remaining,
|
|
153
|
+
total
|
|
154
|
+
}));
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
// process-results subcommand
|
|
159
|
+
function processResults(args) {
|
|
160
|
+
const { syncStatePath, graphRoot, completedDir } = args;
|
|
161
|
+
|
|
162
|
+
ensureDir(completedDir);
|
|
163
|
+
ensureDir(graphRoot);
|
|
164
|
+
|
|
165
|
+
let success = 0;
|
|
166
|
+
let failed = 0;
|
|
167
|
+
let graphUpdated = false;
|
|
168
|
+
|
|
169
|
+
if (!fs.existsSync(completedDir)) {
|
|
170
|
+
console.log(JSON.stringify({ success, failed, graphUpdated }));
|
|
171
|
+
return;
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
const files = fs.readdirSync(completedDir);
|
|
175
|
+
|
|
176
|
+
// Count done files
|
|
177
|
+
const doneFiles = files.filter(f => f.endsWith('.done.json'));
|
|
178
|
+
for (const file of doneFiles) {
|
|
179
|
+
const filePath = path.join(completedDir, file);
|
|
180
|
+
const data = readJsonSafe(filePath);
|
|
181
|
+
if (data) {
|
|
182
|
+
if (data.status === 'success' || data.status === 'completed') {
|
|
183
|
+
success++;
|
|
184
|
+
} else if (data.status === 'failed' || data.status === 'error') {
|
|
185
|
+
failed++;
|
|
186
|
+
} else {
|
|
187
|
+
// Default to success if no status field
|
|
188
|
+
success++;
|
|
189
|
+
}
|
|
190
|
+
} else {
|
|
191
|
+
// Invalid JSON, assume success
|
|
192
|
+
success++;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// Process graph files and merge
|
|
197
|
+
const graphFiles = files.filter(f => f.endsWith('.graph.json'));
|
|
198
|
+
|
|
199
|
+
if (graphFiles.length > 0) {
|
|
200
|
+
const nodesPath = path.join(graphRoot, 'nodes.json');
|
|
201
|
+
const edgesPath = path.join(graphRoot, 'edges.json');
|
|
202
|
+
|
|
203
|
+
// Load existing graph data
|
|
204
|
+
let existingNodes = [];
|
|
205
|
+
let existingEdges = [];
|
|
206
|
+
|
|
207
|
+
if (fs.existsSync(nodesPath)) {
|
|
208
|
+
const data = readJsonSafe(nodesPath);
|
|
209
|
+
if (Array.isArray(data)) {
|
|
210
|
+
existingNodes = data;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
if (fs.existsSync(edgesPath)) {
|
|
215
|
+
const data = readJsonSafe(edgesPath);
|
|
216
|
+
if (Array.isArray(data)) {
|
|
217
|
+
existingEdges = data;
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Merge graph data with deduplication
|
|
222
|
+
const nodeIds = new Set(existingNodes.map(n => n.id));
|
|
223
|
+
const edgeKeys = new Set(existingEdges.map(e => `${e.source}-${e.target}-${e.type}`));
|
|
224
|
+
|
|
225
|
+
for (const file of graphFiles) {
|
|
226
|
+
const filePath = path.join(completedDir, file);
|
|
227
|
+
const graphData = readJsonSafe(filePath);
|
|
228
|
+
|
|
229
|
+
if (graphData) {
|
|
230
|
+
// Add nodes
|
|
231
|
+
if (Array.isArray(graphData.nodes)) {
|
|
232
|
+
for (const node of graphData.nodes) {
|
|
233
|
+
if (node.id && !nodeIds.has(node.id)) {
|
|
234
|
+
existingNodes.push(node);
|
|
235
|
+
nodeIds.add(node.id);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
|
|
240
|
+
// Add edges
|
|
241
|
+
if (Array.isArray(graphData.edges)) {
|
|
242
|
+
for (const edge of graphData.edges) {
|
|
243
|
+
const edgeKey = `${edge.source}-${edge.target}-${edge.type}`;
|
|
244
|
+
if (edge.source && edge.target && !edgeKeys.has(edgeKey)) {
|
|
245
|
+
existingEdges.push(edge);
|
|
246
|
+
edgeKeys.add(edgeKey);
|
|
247
|
+
}
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
graphUpdated = true;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// Write merged graph data
|
|
256
|
+
fs.writeFileSync(nodesPath, JSON.stringify(existingNodes, null, 2));
|
|
257
|
+
fs.writeFileSync(edgesPath, JSON.stringify(existingEdges, null, 2));
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
console.log(JSON.stringify({
|
|
261
|
+
success,
|
|
262
|
+
failed,
|
|
263
|
+
graphUpdated
|
|
264
|
+
}));
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
// Main
|
|
268
|
+
function main() {
|
|
269
|
+
const args = parseArgs();
|
|
270
|
+
|
|
271
|
+
if (!args.command) {
|
|
272
|
+
console.error(JSON.stringify({ error: 'No command specified' }));
|
|
273
|
+
process.exit(1);
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
if (args.command === 'get-batch') {
|
|
277
|
+
if (!args.syncStatePath) {
|
|
278
|
+
console.error(JSON.stringify({ error: '--syncStatePath is required' }));
|
|
279
|
+
process.exit(1);
|
|
280
|
+
}
|
|
281
|
+
getBatch(args);
|
|
282
|
+
} else if (args.command === 'process-results') {
|
|
283
|
+
if (!args.syncStatePath || !args.graphRoot || !args.completedDir) {
|
|
284
|
+
console.error(JSON.stringify({ error: '--syncStatePath, --graphRoot, and --completedDir are required' }));
|
|
285
|
+
process.exit(1);
|
|
286
|
+
}
|
|
287
|
+
processResults(args);
|
|
288
|
+
} else {
|
|
289
|
+
console.error(JSON.stringify({ error: `Unknown command: ${args.command}` }));
|
|
290
|
+
process.exit(1);
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
main();
|
|
@@ -44,6 +44,10 @@
|
|
|
44
44
|
<field name="text">DO NOT estimate workload and suggest breaking it into phases 鈥?execute ALL phases in sequence</field>
|
|
45
45
|
<field name="text">When many features need analysis, dispatch ALL of them 鈥?do NOT skip or defer any</field>
|
|
46
46
|
<field name="text">Context window management: if approaching limit, save progress to checkpoint file and resume 鈥?do NOT ask user for guidance</field>
|
|
47
|
+
<field name="text">WHEN batch-orchestrator.js returns action=process, you MUST dispatch ALL workers in the batch and continue to the next iteration 鈥?DO NOT stop to report progress or ask for guidance</field>
|
|
48
|
+
<field name="text">DO NOT estimate total workload (e.g., "52 batches needed") and present it as a reason to pause 鈥?just execute batch by batch until done</field>
|
|
49
|
+
<field name="text">DO NOT present "progress reports" with options to continue/skip/defer 鈥?the ONLY acceptable output between batches is a one-line log of completed count</field>
|
|
50
|
+
<field name="text">If a required script (e.g., batch-orchestrator.js) is missing, report the error and STOP 鈥?DO NOT attempt manual batch management as a workaround</field>
|
|
47
51
|
</block>
|
|
48
52
|
|
|
49
53
|
<!-- ============================================================
|
|
@@ -63,9 +63,10 @@
|
|
|
63
63
|
<field name="tech_stack" value="${platform.techStack}"/>
|
|
64
64
|
<field name="logic_module_scan" value="Read tech-stack-mappings.json for the techStack's module_scan configuration. Use module_scan.root as the scan starting point and module_scan.depth as the grouping level (depth=1 means first-level subdirectories = one module each)"/>
|
|
65
65
|
<field name="logic_backend" value="Find all directories containing *Controller.java or *Controller.kt files under module_scan.root. These are API entry directories. Module name = the business package name of the entry directory. Apply module_scan.depth for grouping level"/>
|
|
66
|
-
<field name="logic_frontend_vue_react" value="
|
|
66
|
+
<field name="logic_frontend_vue_react" value="STRICT DEPTH RULE: Read module_scan.depth from tech-stack-mappings.json. When depth=1, ONLY the first-level subdirectories directly under module_scan.root are modules. DO NOT recurse into subdirectories. Example for depth=1 with root=src/views: if directories are src/views/system/, src/views/bpm/, src/views/ai/, src/views/dashboard/ — output modules: system, bpm, ai, dashboard. WRONG: system-config, system-dept, bpm-category, bpm-form. Each module's entryDirs = ["src/views/{module_name}"]."/>
|
|
67
67
|
<field name="logic_mobile_uniapp" value="Find first-level subdirectories under module_scan.root (e.g., src/pages). Plus top-level pages-* directories (module name = directory name without pages- prefix)"/>
|
|
68
68
|
<field name="logic_mobile_miniprogram" value="Find first-level subdirectories under module_scan.root (e.g., pages) as modules"/>
|
|
69
|
+
<field name="depth_enforcement" value="CRITICAL: When module_scan.depth=1, the module name MUST be exactly the first-level directory name (e.g. 'system', 'bpm', 'monitor'). NEVER concatenate parent-child directory names with hyphens (e.g. NEVER 'system-config', 'bpm-form'). Count directory levels from module_scan.root: root/X is depth=1, root/X/Y is depth=2."/>
|
|
69
70
|
<field name="output" var="identified_entries"/>
|
|
70
71
|
</block>
|
|
71
72
|
|
|
@@ -103,7 +104,8 @@
|
|
|
103
104
|
"sourcePath": "${platform.sourcePath}",
|
|
104
105
|
"techStack": "${platform.techStack}",
|
|
105
106
|
"modules": [
|
|
106
|
-
|
|
107
|
+
IMPORTANT: When depth=1, each module = one first-level subdirectory.
|
|
108
|
+
Example: {"name": "system", "entryDirs": ["src/views/system"]}, NOT {"name": "system-config", "entryDirs": ["src/views/system/config"]}
|
|
107
109
|
]
|
|
108
110
|
}
|
|
109
111
|
</field>
|
|
@@ -143,6 +145,8 @@
|
|
|
143
145
|
- module names are business-meaningful (not technical terms like config, util)
|
|
144
146
|
- entryDirs paths are correct and accessible
|
|
145
147
|
- JSON format is valid
|
|
148
|
+
- DEPTH CHECK: If module_scan.depth=1, verify NO module name contains a hyphen that corresponds to parent-child directory structure (e.g., "system-config" is invalid if "system" is a first-level directory under module_scan.root)
|
|
149
|
+
- Module count should roughly match the number of first-level subdirectories under module_scan.root (not second-level)
|
|
146
150
|
</field>
|
|
147
151
|
<field name="output" var="validation_result"/>
|
|
148
152
|
</block>
|
|
@@ -26,7 +26,7 @@ All generated documents must match the user's language. Detect the language from
|
|
|
26
26
|
|
|
27
27
|
| Parameter | Type | Required | Description |
|
|
28
28
|
|-----------|------|----------|-------------|
|
|
29
|
-
| `platformId` | string | Yes | Platform identifier (e.g., "backend-
|
|
29
|
+
| `platformId` | string | Yes | Platform identifier (e.g., "backend-fastapi", "web-vue3", "mobile-uniapp") |
|
|
30
30
|
| `platformName` | string | Yes | Platform display name |
|
|
31
31
|
| `platformType` | string | Yes | Platform type: backend, web, mobile |
|
|
32
32
|
| `platformSubtype` | string | No | Platform subtype (e.g., vue, react, uniapp) |
|
|
@@ -0,0 +1,289 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* generate-inventory.js
|
|
4
|
+
*
|
|
5
|
+
* Generate features.json inventory for a single platform.
|
|
6
|
+
* This script is called by speccrew-knowledge-bizs-init-features workflow.
|
|
7
|
+
*
|
|
8
|
+
* Usage:
|
|
9
|
+
* node generate-inventory.js --entryDirsFile <path> --outputDir <path>
|
|
10
|
+
*
|
|
11
|
+
* Arguments:
|
|
12
|
+
* --entryDirsFile Path to entry-dirs JSON file
|
|
13
|
+
* --outputDir Output directory for features.json
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
const fs = require('fs');
|
|
17
|
+
const path = require('path');
|
|
18
|
+
|
|
19
|
+
// Parse command line arguments
|
|
20
|
+
function parseArgs() {
|
|
21
|
+
const args = process.argv.slice(2);
|
|
22
|
+
const parsed = {};
|
|
23
|
+
|
|
24
|
+
for (let i = 0; i < args.length; i++) {
|
|
25
|
+
const arg = args[i];
|
|
26
|
+
if (arg.startsWith('--')) {
|
|
27
|
+
const key = arg.slice(2);
|
|
28
|
+
const value = args[i + 1];
|
|
29
|
+
if (value && !value.startsWith('--')) {
|
|
30
|
+
parsed[key] = value;
|
|
31
|
+
i++;
|
|
32
|
+
} else {
|
|
33
|
+
parsed[key] = true;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return parsed;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// Generate timestamp in format YYYY-MM-DD-HHMMSS
|
|
42
|
+
function generateTimestamp() {
|
|
43
|
+
const now = new Date();
|
|
44
|
+
const year = now.getFullYear();
|
|
45
|
+
const month = String(now.getMonth() + 1).padStart(2, '0');
|
|
46
|
+
const day = String(now.getDate()).padStart(2, '0');
|
|
47
|
+
const hours = String(now.getHours()).padStart(2, '0');
|
|
48
|
+
const minutes = String(now.getMinutes()).padStart(2, '0');
|
|
49
|
+
const seconds = String(now.getSeconds()).padStart(2, '0');
|
|
50
|
+
return `${year}-${month}-${day}-${hours}${minutes}${seconds}`;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// Convert absolute path to project-relative path
|
|
54
|
+
function toRelativePath(absolutePath, projectRoot) {
|
|
55
|
+
// Normalize paths
|
|
56
|
+
const normalizedAbs = path.normalize(absolutePath).replace(/\\/g, '/');
|
|
57
|
+
const normalizedRoot = path.normalize(projectRoot).replace(/\\/g, '/');
|
|
58
|
+
|
|
59
|
+
if (normalizedAbs.startsWith(normalizedRoot)) {
|
|
60
|
+
const relative = normalizedAbs.slice(normalizedRoot.length).replace(/^\/+/, '');
|
|
61
|
+
return relative;
|
|
62
|
+
}
|
|
63
|
+
return normalizedAbs;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// Generate document path for a feature
|
|
67
|
+
// Format: speccrew-workspace/knowledges/bizs/{platformId}/{module}/{subpath}/{filename}.md
|
|
68
|
+
function generateDocumentPath(platformId, module, sourcePath, projectRoot) {
|
|
69
|
+
// Extract filename without extension
|
|
70
|
+
const basename = path.basename(sourcePath, path.extname(sourcePath));
|
|
71
|
+
|
|
72
|
+
// Get directory relative to module root
|
|
73
|
+
const relativePath = toRelativePath(sourcePath, projectRoot);
|
|
74
|
+
|
|
75
|
+
// Parse the source path to extract module and subpath
|
|
76
|
+
// Expected format: {platformSourceRoot}/{module}/{subpath}/{file}
|
|
77
|
+
const pathParts = relativePath.split('/');
|
|
78
|
+
|
|
79
|
+
// Find module index
|
|
80
|
+
let moduleIndex = -1;
|
|
81
|
+
for (let i = 0; i < pathParts.length; i++) {
|
|
82
|
+
if (pathParts[i] === module) {
|
|
83
|
+
moduleIndex = i;
|
|
84
|
+
break;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Build subpath (everything between module and filename)
|
|
89
|
+
let subpath = '';
|
|
90
|
+
if (moduleIndex >= 0 && moduleIndex < pathParts.length - 2) {
|
|
91
|
+
// There are directories between module and filename
|
|
92
|
+
subpath = pathParts.slice(moduleIndex + 1, pathParts.length - 1).join('/');
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Construct document path using platformId (which follows {platformType}-{techStack} format)
|
|
96
|
+
// e.g., backend-fastapi, web-vue3, mobile-uniapp
|
|
97
|
+
const docPathParts = ['speccrew-workspace', 'knowledges', 'bizs', platformId, module];
|
|
98
|
+
|
|
99
|
+
if (subpath) {
|
|
100
|
+
docPathParts.push(subpath);
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
docPathParts.push(`${basename}.md`);
|
|
104
|
+
|
|
105
|
+
return docPathParts.join('/');
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Main function
|
|
109
|
+
function main() {
|
|
110
|
+
const args = parseArgs();
|
|
111
|
+
|
|
112
|
+
// Validate required arguments
|
|
113
|
+
if (!args.entryDirsFile) {
|
|
114
|
+
console.error('Error: --entryDirsFile is required');
|
|
115
|
+
process.exit(1);
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
if (!args.outputDir) {
|
|
119
|
+
console.error('Error: --outputDir is required');
|
|
120
|
+
process.exit(1);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const entryDirsFile = path.resolve(args.entryDirsFile);
|
|
124
|
+
const outputDir = path.resolve(args.outputDir);
|
|
125
|
+
|
|
126
|
+
// Check if entry-dirs file exists
|
|
127
|
+
if (!fs.existsSync(entryDirsFile)) {
|
|
128
|
+
console.error(`Error: Entry-dirs file not found: ${entryDirsFile}`);
|
|
129
|
+
process.exit(1);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// Read and parse entry-dirs JSON
|
|
133
|
+
let entryDirsData;
|
|
134
|
+
try {
|
|
135
|
+
const content = fs.readFileSync(entryDirsFile, 'utf-8');
|
|
136
|
+
entryDirsData = JSON.parse(content);
|
|
137
|
+
} catch (error) {
|
|
138
|
+
console.error(`Error: Failed to parse entry-dirs file: ${error.message}`);
|
|
139
|
+
process.exit(1);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Validate entry-dirs structure
|
|
143
|
+
if (!entryDirsData.modules || !Array.isArray(entryDirsData.modules) || entryDirsData.modules.length === 0) {
|
|
144
|
+
console.error('Error: entry-dirs JSON must have non-empty modules array');
|
|
145
|
+
process.exit(1);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Extract platform info from entry-dirs data
|
|
149
|
+
const platformId = entryDirsData.platformId || 'unknown-platform';
|
|
150
|
+
const platformName = entryDirsData.platformName || platformId;
|
|
151
|
+
const platformType = entryDirsData.platformType || 'unknown';
|
|
152
|
+
const platformSubtype = entryDirsData.platformSubtype || '';
|
|
153
|
+
const techStack = entryDirsData.techStack || [];
|
|
154
|
+
const sourceRoot = entryDirsData.sourcePath || '';
|
|
155
|
+
|
|
156
|
+
// Project root is the parent of speccrew-workspace
|
|
157
|
+
const projectRoot = path.resolve(outputDir, '..', '..', '..');
|
|
158
|
+
|
|
159
|
+
// Generate features for each module
|
|
160
|
+
const features = [];
|
|
161
|
+
const modules = [];
|
|
162
|
+
|
|
163
|
+
for (const moduleData of entryDirsData.modules) {
|
|
164
|
+
const moduleName = moduleData.name;
|
|
165
|
+
const entryDirs = moduleData.entryDirs || [];
|
|
166
|
+
|
|
167
|
+
let moduleFeatureCount = 0;
|
|
168
|
+
|
|
169
|
+
for (const entryDir of entryDirs) {
|
|
170
|
+
// Scan files in entry directory
|
|
171
|
+
const scanDirectory = (dir) => {
|
|
172
|
+
const files = [];
|
|
173
|
+
|
|
174
|
+
try {
|
|
175
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
176
|
+
|
|
177
|
+
for (const entry of entries) {
|
|
178
|
+
const fullPath = path.join(dir, entry.name);
|
|
179
|
+
|
|
180
|
+
if (entry.isDirectory()) {
|
|
181
|
+
// Recursively scan subdirectories
|
|
182
|
+
files.push(...scanDirectory(fullPath));
|
|
183
|
+
} else if (entry.isFile()) {
|
|
184
|
+
// Check if file is a source file based on platform type
|
|
185
|
+
const ext = path.extname(entry.name).toLowerCase();
|
|
186
|
+
const isSourceFile = isValidSourceFile(ext, platformType);
|
|
187
|
+
|
|
188
|
+
if (isSourceFile) {
|
|
189
|
+
files.push(fullPath);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
} catch (error) {
|
|
194
|
+
console.warn(`Warning: Failed to scan directory ${dir}: ${error.message}`);
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
return files;
|
|
198
|
+
};
|
|
199
|
+
|
|
200
|
+
const sourceFiles = scanDirectory(entryDir);
|
|
201
|
+
|
|
202
|
+
for (const sourceFile of sourceFiles) {
|
|
203
|
+
const relativeSourcePath = toRelativePath(sourceFile, projectRoot);
|
|
204
|
+
const fileName = path.basename(sourceFile, path.extname(sourceFile));
|
|
205
|
+
|
|
206
|
+
// Generate document path using platformId
|
|
207
|
+
const documentPath = generateDocumentPath(platformId, moduleName, sourceFile, projectRoot);
|
|
208
|
+
|
|
209
|
+
features.push({
|
|
210
|
+
fileName: fileName,
|
|
211
|
+
sourcePath: relativeSourcePath,
|
|
212
|
+
documentPath: documentPath,
|
|
213
|
+
module: moduleName,
|
|
214
|
+
analyzed: false,
|
|
215
|
+
startedAt: null,
|
|
216
|
+
completedAt: null,
|
|
217
|
+
analysisNotes: null
|
|
218
|
+
});
|
|
219
|
+
|
|
220
|
+
moduleFeatureCount++;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
modules.push({
|
|
225
|
+
name: moduleName,
|
|
226
|
+
featureCount: moduleFeatureCount
|
|
227
|
+
});
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
// Build output JSON
|
|
231
|
+
const outputData = {
|
|
232
|
+
platformName: platformName,
|
|
233
|
+
platformType: platformType,
|
|
234
|
+
platformSubtype: platformSubtype,
|
|
235
|
+
platformId: platformId,
|
|
236
|
+
sourcePath: sourceRoot,
|
|
237
|
+
techStack: techStack,
|
|
238
|
+
modules: modules,
|
|
239
|
+
totalFiles: features.length,
|
|
240
|
+
analyzedCount: 0,
|
|
241
|
+
pendingCount: features.length,
|
|
242
|
+
generatedAt: generateTimestamp(),
|
|
243
|
+
features: features
|
|
244
|
+
};
|
|
245
|
+
|
|
246
|
+
// Ensure output directory exists
|
|
247
|
+
if (!fs.existsSync(outputDir)) {
|
|
248
|
+
fs.mkdirSync(outputDir, { recursive: true });
|
|
249
|
+
}
|
|
250
|
+
|
|
251
|
+
// Write output file
|
|
252
|
+
const outputFile = path.join(outputDir, `features-${platformId}.json`);
|
|
253
|
+
try {
|
|
254
|
+
fs.writeFileSync(outputFile, JSON.stringify(outputData, null, 2), 'utf-8');
|
|
255
|
+
console.log(`Generated: ${outputFile}`);
|
|
256
|
+
console.log(` Platform: ${platformName} (${platformId})`);
|
|
257
|
+
console.log(` Type: ${platformType}${platformSubtype ? '/' + platformSubtype : ''}`);
|
|
258
|
+
console.log(` Features: ${features.length}`);
|
|
259
|
+
console.log(` Modules: ${modules.map(m => m.name).join(', ')}`);
|
|
260
|
+
} catch (error) {
|
|
261
|
+
console.error(`Error: Failed to write output file: ${error.message}`);
|
|
262
|
+
process.exit(1);
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// Check if file extension is valid for the platform type
|
|
267
|
+
function isValidSourceFile(ext, platformType) {
|
|
268
|
+
const backendExts = ['.java', '.kt', '.py', '.go', '.rs', '.cs', '.php', '.rb'];
|
|
269
|
+
const webExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.svelte'];
|
|
270
|
+
const mobileExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.dart', '.swift', '.kt', '.java'];
|
|
271
|
+
const desktopExts = ['.vue', '.tsx', '.jsx', '.ts', '.js', '.cs', '.xaml'];
|
|
272
|
+
|
|
273
|
+
switch (platformType) {
|
|
274
|
+
case 'backend':
|
|
275
|
+
return backendExts.includes(ext);
|
|
276
|
+
case 'web':
|
|
277
|
+
return webExts.includes(ext);
|
|
278
|
+
case 'mobile':
|
|
279
|
+
return mobileExts.includes(ext);
|
|
280
|
+
case 'desktop':
|
|
281
|
+
return desktopExts.includes(ext);
|
|
282
|
+
default:
|
|
283
|
+
// Accept all common source file extensions
|
|
284
|
+
return [...backendExts, ...webExts, ...mobileExts, ...desktopExts].includes(ext);
|
|
285
|
+
}
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// Run main function
|
|
289
|
+
main();
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
* reindex-modules.js - Deterministic script to re-extract module names from existing features JSON with updated exclude_dirs
|
|
4
4
|
*
|
|
5
5
|
* Usage:
|
|
6
|
-
* node reindex-modules.js --featuresFile "path/to/features-backend-
|
|
6
|
+
* node reindex-modules.js --featuresFile "path/to/features-backend-fastapi.json" --projectRoot "d:\dev\ruoyi-vue-pro"
|
|
7
7
|
*
|
|
8
8
|
* Optional parameters:
|
|
9
9
|
* --platformType "backend" - If not provided, read from features JSON's platformType field
|
|
@@ -376,12 +376,179 @@
|
|
|
376
376
|
<block type="checkpoint" id="CP7" name="marker-written" desc="Marker file written">
|
|
377
377
|
<field name="verify" value="file.exists(${completed_dir}/${markerName}.done.json)"/>
|
|
378
378
|
</block>
|
|
379
|
-
|
|
379
|
+
|
|
380
|
+
<!-- Step 7b: Construct and Append Graph Data -->
|
|
381
|
+
<!-- Construct Graph Nodes from UI Analysis -->
|
|
382
|
+
<block type="task" id="B32a" action="analyze" desc="Construct page node">
|
|
383
|
+
<field name="type" value="page"/>
|
|
384
|
+
<field name="id" value="page-${module}-${fileName}"/>
|
|
385
|
+
<field name="name" value="${fileName}"/>
|
|
386
|
+
<field name="module" value="${module}"/>
|
|
387
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
388
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
389
|
+
<field name="platform" value="${platform_type}-${platform_subtype}"/>
|
|
390
|
+
<field name="output" var="pageNode"/>
|
|
391
|
+
</block>
|
|
392
|
+
|
|
393
|
+
<block type="task" id="B32b" action="analyze" desc="Construct component nodes">
|
|
394
|
+
<field name="components" value="${analysisResult.components}"/>
|
|
395
|
+
<field name="module" value="${module}"/>
|
|
396
|
+
<field name="documentPath" value="${documentPath}"/>
|
|
397
|
+
<field name="output" var="componentNodes"/>
|
|
398
|
+
</block>
|
|
399
|
+
|
|
400
|
+
<block type="task" id="B32c" action="analyze" desc="Construct route nodes">
|
|
401
|
+
<field name="routes" value="${analysisResult.routes}"/>
|
|
402
|
+
<field name="module" value="${module}"/>
|
|
403
|
+
<field name="sourcePath" value="${sourcePath}"/>
|
|
404
|
+
<field name="output" var="routeNodes"/>
|
|
405
|
+
</block>
|
|
406
|
+
|
|
407
|
+
<!-- Construct Graph Edges -->
|
|
408
|
+
<block type="task" id="B32d" action="analyze" desc="Construct navigates edges">
|
|
409
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
410
|
+
<field name="navigations" value="${analysisResult.navigations}"/>
|
|
411
|
+
<field name="module" value="${module}"/>
|
|
412
|
+
<field name="output" var="navigatesEdges"/>
|
|
413
|
+
</block>
|
|
414
|
+
|
|
415
|
+
<block type="task" id="B32e" action="analyze" desc="Construct contains edges">
|
|
416
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
417
|
+
<field name="components" value="${analysisResult.components}"/>
|
|
418
|
+
<field name="module" value="${module}"/>
|
|
419
|
+
<field name="output" var="containsEdges"/>
|
|
420
|
+
</block>
|
|
421
|
+
|
|
422
|
+
<block type="task" id="B32f" action="analyze" desc="Construct calls-api edges">
|
|
423
|
+
<field name="pageId" value="page-${module}-${fileName}"/>
|
|
424
|
+
<field name="apiCalls" value="${analysisResult.apis}"/>
|
|
425
|
+
<field name="module" value="${module}"/>
|
|
426
|
+
<field name="output" var="callsApiEdges"/>
|
|
427
|
+
</block>
|
|
428
|
+
|
|
429
|
+
<!-- Append Graph Data to nodes.json and edges.json -->
|
|
430
|
+
<block type="task" id="B32g" action="run-script" desc="Append graph nodes to nodes.json">
|
|
431
|
+
<field name="command">node -e "
|
|
432
|
+
const fs = require('fs');
|
|
433
|
+
const path = require('path');
|
|
434
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
435
|
+
const nodesFile = path.join(graphDir, 'nodes.json');
|
|
436
|
+
|
|
437
|
+
// Ensure directory exists
|
|
438
|
+
if (!fs.existsSync(graphDir)) {
|
|
439
|
+
fs.mkdirSync(graphDir, { recursive: true });
|
|
440
|
+
}
|
|
441
|
+
|
|
442
|
+
// Read existing nodes or initialize empty array
|
|
443
|
+
let existingNodes = [];
|
|
444
|
+
if (fs.existsSync(nodesFile)) {
|
|
445
|
+
try {
|
|
446
|
+
existingNodes = JSON.parse(fs.readFileSync(nodesFile, 'utf8'));
|
|
447
|
+
if (!Array.isArray(existingNodes)) existingNodes = [];
|
|
448
|
+
} catch (e) {
|
|
449
|
+
existingNodes = [];
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
// Parse new nodes from input
|
|
454
|
+
const newNodes = [];
|
|
455
|
+
if ('${pageNode}') {
|
|
456
|
+
try {
|
|
457
|
+
const page = JSON.parse('${pageNode}');
|
|
458
|
+
if (page && page.id) newNodes.push(page);
|
|
459
|
+
} catch (e) {}
|
|
460
|
+
}
|
|
461
|
+
if ('${componentNodes}') {
|
|
462
|
+
try {
|
|
463
|
+
const comps = JSON.parse('${componentNodes}');
|
|
464
|
+
if (Array.isArray(comps)) newNodes.push(...comps);
|
|
465
|
+
} catch (e) {}
|
|
466
|
+
}
|
|
467
|
+
if ('${routeNodes}') {
|
|
468
|
+
try {
|
|
469
|
+
const routes = JSON.parse('${routeNodes}');
|
|
470
|
+
if (Array.isArray(routes)) newNodes.push(...routes);
|
|
471
|
+
} catch (e) {}
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
// Deduplicate by id
|
|
475
|
+
const nodeMap = new Map();
|
|
476
|
+
existingNodes.forEach(n => nodeMap.set(n.id, n));
|
|
477
|
+
newNodes.forEach(n => nodeMap.set(n.id, n));
|
|
478
|
+
|
|
479
|
+
// Write back
|
|
480
|
+
fs.writeFileSync(nodesFile, JSON.stringify(Array.from(nodeMap.values()), null, 2));
|
|
481
|
+
console.log('Nodes appended:', newNodes.length);
|
|
482
|
+
"
|
|
483
|
+
</field>
|
|
484
|
+
</block>
|
|
485
|
+
|
|
486
|
+
<block type="task" id="B32h" action="run-script" desc="Append graph edges to edges.json">
|
|
487
|
+
<field name="command">node -e "
|
|
488
|
+
const fs = require('fs');
|
|
489
|
+
const path = require('path');
|
|
490
|
+
const graphDir = path.join('${workspace_path}', 'speccrew-workspace', 'knowledges', 'bizs', 'graph');
|
|
491
|
+
const edgesFile = path.join(graphDir, 'edges.json');
|
|
492
|
+
|
|
493
|
+
// Read existing edges or initialize empty array
|
|
494
|
+
let existingEdges = [];
|
|
495
|
+
if (fs.existsSync(edgesFile)) {
|
|
496
|
+
try {
|
|
497
|
+
existingEdges = JSON.parse(fs.readFileSync(edgesFile, 'utf8'));
|
|
498
|
+
if (!Array.isArray(existingEdges)) existingEdges = [];
|
|
499
|
+
} catch (e) {
|
|
500
|
+
existingEdges = [];
|
|
501
|
+
}
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
// Parse new edges from input
|
|
505
|
+
const newEdges = [];
|
|
506
|
+
if ('${navigatesEdges}') {
|
|
507
|
+
try {
|
|
508
|
+
const edges = JSON.parse('${navigatesEdges}');
|
|
509
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
510
|
+
} catch (e) {}
|
|
511
|
+
}
|
|
512
|
+
if ('${containsEdges}') {
|
|
513
|
+
try {
|
|
514
|
+
const edges = JSON.parse('${containsEdges}');
|
|
515
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
516
|
+
} catch (e) {}
|
|
517
|
+
}
|
|
518
|
+
if ('${callsApiEdges}') {
|
|
519
|
+
try {
|
|
520
|
+
const edges = JSON.parse('${callsApiEdges}');
|
|
521
|
+
if (Array.isArray(edges)) newEdges.push(...edges);
|
|
522
|
+
} catch (e) {}
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
// Deduplicate by composite key (source+target+type)
|
|
526
|
+
const edgeMap = new Map();
|
|
527
|
+
existingEdges.forEach(e => {
|
|
528
|
+
const key = (e.source || '') + '|' + (e.target || '') + '|' + (e.type || '');
|
|
529
|
+
edgeMap.set(key, e);
|
|
530
|
+
});
|
|
531
|
+
newEdges.forEach(e => {
|
|
532
|
+
const key = (e.source || '') + '|' + (e.target || '') + '|' + (e.type || '');
|
|
533
|
+
edgeMap.set(key, e);
|
|
534
|
+
});
|
|
535
|
+
|
|
536
|
+
// Write back
|
|
537
|
+
fs.writeFileSync(edgesFile, JSON.stringify(Array.from(edgeMap.values()), null, 2));
|
|
538
|
+
console.log('Edges appended:', newEdges.length);
|
|
539
|
+
"
|
|
540
|
+
</field>
|
|
541
|
+
</block>
|
|
542
|
+
|
|
543
|
+
<block type="checkpoint" id="CP-graph" name="graph-written" desc="Graph data written">
|
|
544
|
+
<field name="verify" value="true"/>
|
|
545
|
+
</block>
|
|
546
|
+
|
|
380
547
|
<block type="event" id="E10" action="log" level="info" desc="Log marker status">
|
|
381
|
-
<field name="message" value="Step 7 Status: COMPLETED - Done marker
|
|
548
|
+
<field name="message" value="Step 7 Status: COMPLETED - Done marker and graph data written to ${completed_dir}/${markerName}.done.json"/>
|
|
382
549
|
</block>
|
|
383
550
|
|
|
384
|
-
<!-- ==================== FINAL OUTPUT
|
|
551
|
+
<!-- ==================== FINAL OUTPUT ================= === -->
|
|
385
552
|
<block type="output" id="O1" desc="UI feature analysis output results">
|
|
386
553
|
<field name="status" value="success"/>
|
|
387
554
|
<field name="feature_name" from="${fileName}"/>
|
|
@@ -25,7 +25,7 @@ Dispatch Agent (speccrew-knowledge-dispatch)
|
|
|
25
25
|
| Variable | Type | Description | Example |
|
|
26
26
|
|----------|------|-------------|---------|
|
|
27
27
|
| `{{action}}` | string | Write action to perform | `"batch-write"`, `"init-module"`, `"update-node"`, `"remove-node"` |
|
|
28
|
-
| `{{platformId}}` | string | Platform identifier for directory segregation | `"backend-
|
|
28
|
+
| `{{platformId}}` | string | Platform identifier for directory segregation | `"backend-fastapi"`, `"backend-spring"`, `"web-vue"`, `"mobile-uniapp"` |
|
|
29
29
|
| `{{module}}` | string | Target business module | `"system"`, `"trade"`, `"infra"` |
|
|
30
30
|
| `{{graphData}}` | object | Graph data from skill output (for batch-write) | `{ "nodes": [...], "edges": [...] }` |
|
|
31
31
|
| `{{nodeId}}` | string | Node ID (for update-node / remove-node) | `"api-system-user-list"` |
|
|
@@ -39,7 +39,7 @@ Generate analyze task plan for a single business module. Reads features-*.json,
|
|
|
39
39
|
| `features_file` | string | Yes | Path to the platform's features-{platform}.json file |
|
|
40
40
|
| `output_path` | string | Yes | Knowledge base output root path (e.g., speccrew-workspace/knowledges) |
|
|
41
41
|
| `completed_dir` | string | Yes | Marker file output directory for api-analyze .done.json markers. Value from PM Agent: `{sync_state_bizs_dir}/completed` |
|
|
42
|
-
| `sourceFile` | string | Yes | Features JSON filename (e.g., "features-backend-
|
|
42
|
+
| `sourceFile` | string | Yes | Features JSON filename (e.g., "features-backend-fastapi.json"), used for api-analyze marking |
|
|
43
43
|
| `language` | string | Yes | Output language (zh / en) |
|
|
44
44
|
| `workspace_path` | string | Yes | Workspace root path for constructing absolute paths |
|
|
45
45
|
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
<field name="features_file" required="true" type="string" desc="Path to the platform's features-{platform}.json file"/>
|
|
15
15
|
<field name="output_path" required="true" type="string" desc="Knowledge base output root path"/>
|
|
16
16
|
<field name="completed_dir" required="true" type="string" desc="Marker file output directory for api-analyze .done.json markers"/>
|
|
17
|
-
<field name="sourceFile" required="true" type="string" desc="Features JSON filename (e.g., features-backend-
|
|
17
|
+
<field name="sourceFile" required="true" type="string" desc="Features JSON filename (e.g., features-backend-fastapi.json)"/>
|
|
18
18
|
<field name="language" required="true" type="string" desc="Output language (zh / en)"/>
|
|
19
19
|
<field name="workspace_path" required="true" type="string" desc="Workspace root path for constructing absolute paths"/>
|
|
20
20
|
</block>
|